diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000000..2e3ae7b383fc --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# Files of which NthPortal has a lot of knowledge. Wildcards cover tests +*LazyList* @NthPortal +**/scala/util/Using* @NthPortal diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000000..5ace4600a1f2 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b169ac6fcd28..28f53a5d0be9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,6 +9,9 @@ defaults: run: shell: bash +permissions: + contents: read + jobs: build_and_test: name: Test @@ -16,7 +19,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest] - java: [8, 11, 17, 21, 22, 23-ea] + java: [8, 11, 17, 21, 24, 25-ea] runs-on: ${{matrix.os}} steps: - run: git config --global core.autocrlf false @@ -30,6 +33,9 @@ jobs: java-version: ${{matrix.java}} cache: sbt + - name: Setup SBT + uses: sbt/setup-sbt@v1 + - name: Build run: | sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal @@ -37,4 +43,4 @@ jobs: - name: Test run: | STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR - sbt -Dstarr.version=$STARR setupValidateTest test:compile info testAll + sbt -Dstarr.version=$STARR setupValidateTest Test/compile info testAll diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml new file mode 100644 index 000000000000..3549dedc2a21 --- /dev/null +++ b/.github/workflows/cla.yml @@ -0,0 +1,11 @@ +name: "Check Scala CLA" +on: + pull_request: +jobs: + cla-check: + runs-on: ubuntu-latest + steps: + - name: Verify CLA + uses: scala/cla-checker@v1 + with: + author: ${{ github.event.pull_request.user.login }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 000000000000..098131515f5d --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,41 @@ +name: PR validation +on: + pull_request: + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: 8 + cache: sbt + - uses: sbt/setup-sbt@v1 + # "mini" bootstrap for PR validation + # "mini" in these senses: + # - it doesn't use the complicated legacy scripts. + # - it doesn't publish to scala-pr-validation-snapshots + # (because we need secrets for that and PRs from forks can't have secrets) + # it is still a true bootstrap. + - name: build + run: sbt -warn setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal + - name: rebuild + run: | + STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + sbt -Dstarr.version=$STARR Test/compile + - name: testAll1 + run: | + STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + sbt -Dstarr.version=$STARR setupValidateTest testAll1 + - name: testAll2 + run: | + STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + sbt -Dstarr.version=$STARR setupValidateTest testAll2 + - name: benchmarks + run: | + STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + sbt -Dstarr.version=$STARR bench/Jmh/compile + - name: build library with Scala 3 + run: sbt -Dscala.build.compileWithDotty=true library/compile diff --git a/.gitignore b/.gitignore index 364aebc66430..61bf3454a8f9 100644 --- a/.gitignore +++ b/.gitignore @@ -12,10 +12,6 @@ # # JARs aren't checked in, they are fetched by sbt # -# We could be more concise with /lib/**/*.jar but that assumes -# a late-model git. -# -/lib/ant/*.jar /lib/*.jar /test/files/codelib/*.jar /test/files/lib/*.jar @@ -37,27 +33,38 @@ /bin/ /sandbox/ -# eclipse, intellij -/.classpath -/.project +# intellij /src/intellij*/*.iml /src/intellij*/*.ipr /src/intellij*/*.iws **/.cache /.idea /.settings -metals.sbt + +# vscode +/.vscode # Standard symbolic link to build/quick/bin /qbin # sbt's target directories -target/ +/target/ +/project/**/target/ +/test/macro-annot/target/ +/test/files/target/ +/test/target/ /build-sbt/ local.sbt jitwatch.out +# Used by the restarr/restarrFull commands as target directories +/build-restarr/ +/target-restarr/ + # metals -.metals/ -.bloop/ -.bsp/ +.metals +.bloop +project/**/metals.sbt + +.bsp +.history diff --git a/.idea/icon.png b/.idea/icon.png new file mode 100644 index 000000000000..8280fd4bfc3f Binary files /dev/null and b/.idea/icon.png differ diff --git a/.mailmap b/.mailmap index 595313a5015d..216682f3acfb 100644 --- a/.mailmap +++ b/.mailmap @@ -13,6 +13,7 @@ Alex Cruise A. P. Marki Antonio Cunei Antonio Cunei +Björn Regnell Buraq Emir Caoyuan Deng Chris Hodapp @@ -25,6 +26,7 @@ Daniel C. Sobral Daniel C. Sobral Daniel Esik Daniel Lorch +Darcy Shen Diego E. Alonso Blas Diego E. Alonso Blas Eric Huang @@ -33,16 +35,22 @@ Eugene Burmako Eugene Burmako Eugene Vigdorchik François Garillot +Friendseeker <66892505+Friendseeker@users.noreply.github.com> Geoff Reedy Gilad Hoch Harrison Houghton Ilya Sergei Ingo Maier Ingo Maier +Jamie Thompson +Jan Arne Sparka +Jian Lan Josh Suereth Josh Suereth Julien Eberle Kenji Yoshida <6b656e6a69@gmail.com> +Liang Yan <35164941+liang3zy22@users.noreply.github.com> +Liang Yan Luc Bourlier Luc Bourlier Luc Bourlier @@ -55,6 +63,7 @@ Miguel Garcia Mike Skells Mirco Dotta Mirco Dotta +Mitsuhiro Shibuya Moez A. Abdel-Gawad Mohsen Lesani Nada Amin @@ -62,6 +71,8 @@ Nada Amin Nada Amin Natallie Baikevich Nikolay Mihaylov +NthPortal <7505383+NthPortal@users.noreply.github.com> +NthPortal Paolo Giarrusso Pavel Pavlov Philipp Haller @@ -75,6 +86,8 @@ Sebastian Hack Simon Ochsenreither Stepan Koltsov Stéphane Micheloud +Takahashi Osamu +Trey Cahill Unknown Committer Unknown Committer Unknown Committer diff --git a/.travis.yml b/.travis.yml index 4e4b60a8e101..f95bead2a1de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,70 +1,67 @@ version: ~> 1.0 # needed for imports - import: scala/scala-dev:travis/default.yml +dist: xenial # GPG stuff breaks on bionic; scala/scala-dev#764 language: scala stages: - name: build jobs: - include: - - # full bootstrap and publish - - stage: build - if: type != pull_request - script: - # see comment in `bootstrap_fun` for details on the procedure - # env available in each stage - # - by travis config (see below): secret env vars - # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl - # - by `bootstrap_fun`: publishPrivateTask, ... - - (cd admin && ./init.sh) - - source scripts/common - - source scripts/bootstrap_fun - - determineScalaVersion - - deriveModuleVersions - - removeExistingBuilds $integrationRepoUrl - - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - - buildLocker - - buildQuick - - triggerScalaDist - - # pull request validation (w/ mini-bootstrap) - # "mini" in these senses: - # - it doesn't use the complicated legacy scripts. - # - it doesn't publish to scala-pr-validation-snapshots - # (because we need secrets for that and Travis-CI doesn't give PR jobs access to secrets) - # it is still a true bootstrap. - - stage: build - name: "JDK 8 pr validation" - if: type = pull_request - script: - - sbt -warn setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal - - STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR - - sbt -Dstarr.version=$STARR -warn setupValidateTest test:compile info testAll + include: + - stage: build + if: type != pull_request AND repo = scala/scala AND branch = 2.13.x + name: publish (bootstrapped) to scala-integration or sonatype + script: + # see comment in `bootstrap_fun` for details on the procedure + # env available in each stage + # - by travis config (see below): secret env vars + # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl + # - by `bootstrap_fun`: publishPrivateTask, ... + - set -e + - (cd admin && ./init.sh) + - source scripts/common + - source scripts/bootstrap_fun + - determineScalaVersion + - removeExistingBuilds $integrationRepoUrl + - if [ ! -z "$STARR_REF" ]; then buildStarr; fi + - buildLocker + - buildQuick + - triggerScalaDist - # build the spec using jekyll - - stage: build - dist: focal - language: ruby - # ruby 3.x is default, need to upgrade jekyll. using 2.7 for now. - rvm: 2.7 - install: - - ruby -v - - gem install bundler -v "< 2.5" #scala-dev#857 - - bundler --version - - bundle install - script: - - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then (cd admin && ./init.sh); fi' + - stage: build + if: type != pull_request AND repo = scala/scala AND branch = 2.13.x + name: language spec + dist: focal + language: ruby + rvm: 2.7 + install: + - ruby -v + - gem install bundler -v "< 2.5" #scala-dev#857 + - bundler --version + - bundle install --path vendor/bundle + # https://travis-ci.community/t/travis-focal-ubuntu-image-uses-now-expired-mongodb-4-4-package/14293/3 + - wget -qO - https://www.mongodb.org/static/pgp/server-4.4.asc | sudo apt-key add - + # cribbed from https://github.com/SebastiaanKlippert/go-wkhtmltopdf/blob/master/.travis.yml + - sudo apt-get update + - sudo apt-get install -y build-essential xorg xfonts-75dpi libpng16-16 libssl1.1 + - wget --quiet "https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.bionic_amd64.deb" + - sudo dpkg -i "wkhtmltox_0.12.6-1.bionic_amd64.deb" + - rm "wkhtmltox_0.12.6-1.bionic_amd64.deb" + script: + - set -e + - (cd admin && ./init.sh) - bundle exec jekyll build -s spec/ -d build/spec - after_success: - - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then ./scripts/travis-publish-spec.sh; fi' + - export JEKYLL_ENV=spec-pdf + - bundle exec jekyll build -s spec/ -d build/spec-pdf + - ./scripts/generate-spec-pdf.sh + after_success: + - ./scripts/travis-publish-spec.sh env: global: - ADOPTOPENJDK=8 - - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh + - secure: "P8EqpZoin/YTnwel9TTxSSAHtXfZ4M262BKXlYUZmjoQsjyvXDAeZ7yAqgAvX5BeRFrGkBToPiE+V60stdWkPKs3+9COw2BDUB1CULBHhRY9Lxordmz0xVhgEfsoH4f6r6wOlIQ9kuaWhmP+JdB/mzOHZhLch9ziPi8O46Z8t4k=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 05006a325342..635929ce34a4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,25 +4,15 @@ We follow the standard GitHub [fork & pull](https://help.github.com/articles/usi You're always welcome to submit your PR straight away and start the discussion (without reading the rest of this wonderful doc, or the [`README.md`](README.md)). The goal of these notes is to make your experience contributing to Scala as smooth and pleasant as possible. We're happy to guide you through the process once you've submitted your PR. -## The Scala Community - -In 2014, you -- the Scala community -- matched the core team at EPFL in number of commits contributed to Scala 2.11, doubling the percentage of commits from outside EPFL/Lightbend since 2.10. Excellent work! (The split was roughly 25/25/50 for you/EPFL/Lightbend.) - -We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)! - -This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala/contributors (Gitter) or contributors.scala-lang.org (Discourse), or tweet about it to @adriaanm.) - -By the way, the team at Lightbend is: @adriaanm, @lrytz, @retronym, @SethTisue, and @szeiger. - ## What kind of PR are you submitting? -Regardless of the nature of your Pull Request, we have to ask you to digitally sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala), to protect the OSS nature of the code base. +Regardless of the nature of your Pull Request, we have to ask you to digitally sign the [Scala CLA](https://contribute.akka.io/cla/scala), to protect the OSS nature of the code base. -You don't need to submit separate PRs for 2.12.x and 2.13.x. Any change accepted on 2.12.x will, in time, be merged onto 2.13.x too. (We are no longer accepting PRs for 2.11.x.) +You don't need to submit separate PRs for 2.12.x and 2.13.x. Any change accepted on 2.12.x will, in time, be merged onto 2.13.x too. ### Documentation -Whether you finally decided you couldn't stand that annoying typo anymore, you fixed the outdated code sample in some comment, or you wrote a nice, comprehensive, overview for an under-documented package, some docs for a class or the specifics about a method, your documentation improvement is very much appreciated, and we will do our best to fasttrack it. +Whether you finally decided you couldn't stand that annoying typo anymore, you fixed the outdated code sample in some comment, or you wrote a nice, comprehensive, overview for an under-documented package, some docs for a class or the specifics about a method, your documentation improvement is very much appreciated, and we will do our best to fast-track it. You can make these changes directly in your browser in GitHub, or follow the same process as for code. Up to you! @@ -32,21 +22,23 @@ For bigger documentation changes, you may want to poll contributors.scala-lang.o For bigger changes, we do recommend announcing your intentions on contributors.scala-lang.org first, to avoid duplicated effort, or spending a lot of time reworking something we are not able to change at this time in the release cycle, for example. -The kind of code we can accept depends on the life cycle for the release you're targeting. The current maintenance release (2.12.x) cannot break source/binary compatibility, which means public APIs cannot change. It also means we are reluctant to change, e.g., type inference or implicit search, as this can have unforeseen consequences for source compatibility. +The kind of code we can accept depends on the life cycle for the release you're targeting. The current maintenance release (2.13.x) cannot break source/binary compatibility, which means public APIs cannot change. It also means we are reluctant to change, e.g., type inference or implicit search, as this can have unforeseen consequences for source compatibility. #### Bug Fix -At the end of the commit message, include "Fixes scala/bug#NNNN", where https://github.com/scala/bug/issues/NNNN tracks the bug you're fixing. We also recommend naming your branch after the ticket number. +At the end of the PR description, which is autofilled with the commit message if there is only one commit, add the phrase, "Fixes scala/bug#NNNN", where `https://github.com/scala/bug/issues/NNNN` tracks the bug you're fixing. Github will turn your bug number into a link. + +We also recommend naming your branch after the ticket number. Please make sure the ticket's milestone corresponds to the upcoming milestone for the branch your PR targets. The CI automation will automatically assign the milestone after you open the PR. #### Enhancement or New Feature -For longer-running development, likely required for this category of code contributions, we suggest you include `topic/` or `wip/` in your branch name, to indicate that this is work in progress, and that others should be prepared to rebase if they branch off your branch. +For longer-running development, likely required for this category of code contributions, we suggest you include `topic/` or `wip/` in your branch name, to indicate that this is work in progress and that others should be prepared to rebase if they branch off your branch. Any language change (including bug fixes) must be accompanied by the relevant updates to the spec, which lives in the same repository for this reason. -A new language feature or other substantial enough language change requires a SIP (Scala Improvement Process) proposal. For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html). +A new language feature or other substantial enough language change requires a SIP (Scala Improvement Process) proposal. For more details on submitting SIPs, see [how to submit a SIP](https://docs.scala-lang.org/sips/sip-submission.html). ## Guidelines @@ -67,6 +59,161 @@ Some characteristics of good tests: * be minimal, deterministic, stable (unaffected by irrelevant changes), easy to understand and review * have minimal dependencies: a compiler bug test should not depend on, e.g., the Scala library +There are mainly three kinds of tests: unit tests, property-based tests, and integration tests. + +#### JUnit + +For unit tests we use JUnit, which can be run from sbt shell as follows: + +``` +root> junit/testQuick +``` + +It might take a few minutes the first time you run `junit/testQuick`, but from the second time onwards +sbt will only run the tests that is affected by the code change since the last run. +See `test/junit/` for examples of JUnit tests. + +JUnit tests will be compiled with the `starr` compiler, and run against the `quick` library. Some JUnit tests (search for `BytecodeTesting`) invoke the compiler programmatically and test its behavior or output, these tests use the `quick` compiler. + +`starr` is the Scala release used to build the compiler and library, usually the last release. `quick` is the result of that compilation. See also ["Build Setup"](https://github.com/scala/scala#build-setup) in the README. + +#### ScalaCheck + +For testing that can benefit from having lots of randomly generated data, property-based testing should be used. +This is run from sbt shell as follows: + +``` +root> scalacheck/testOnly ByOneRangeTest +``` + +See `test/scalacheck/range.scala`. + +#### Partest + +scala/scala comes with a powerful integration testing tool called Partest. +Using Partest you can compile or run some Scala code, and compare it against the expected output. +In the source tree, partests are located under `test/files//`. The main categories are: + +- `pos`: These files must compile successfully. +- `run`: In addition to compiling, `Test.main` is run and its output is compared against the test's `.check` file. +- `neg`: These files must NOT compile, with compiler output matching the expected output in the `.check` file. +- Other categories such as `jvm` behave the same as `run` category. + +To run a single negative test from sbt shell: + +``` +root> partest --verbose test/files/neg/delayed-init-ref.scala +``` + +A test can be either a single `.scala` file or a directory containing multiple `.scala` and `.java` files. +For testing separate compilation, files can be grouped using `_N` suffixes in the filename. For example, a test +with files (`A.scala`, `B_1.scala`, `C_1.java`, `Test_2.scala`) does: +``` +scalac A.scala -d out +scalac -cp out B_1.scala C_1.java -d out +javac -cp out C_1.java -d out +scalac -cp out Test_2.scala -d out +scala -cp out Test +``` + +**Flags** + - To specify compiler flags such as `-Werror -Xlint`, you can add a comment at the top of your source file of the form: + `//> using options -Werror -Xlint` + - Similarly, a `// javac: ` comment in a Java source file passes flags to the Java compiler. + - A `// filter: ` comment eliminates output lines that match the filter before comparing to the `.check` file. + - A `// java: ` comment makes a `run` test execute in a separate JVM and passes the additional flags to the `java` command. + - A `// javaVersion ` comment makes partest skip the test if the java version is outside the requested range (e.g. `8`, `15+`, `9 - 11`) + +**Common Usage** + +To test that no warnings are emitted while compiling a `pos` test, use `-Werror`. +That will fail a `pos` test if there are warnings. Note that `pos` tests do not have `.check` files. + +To test that warnings are correctly emitted, use `-Werror` with a `neg` test and `.check` file. +The usual way to create a `.check` file is `partest --update-check`. + +To run all tests in `neg` categories from sbt shell: + +``` +root> partest --neg +``` + +This might take a couple of minutes to complete. But in a few minutes, you could test 1000+ negative examples, +so it's totally worth your time, especially if you are working on changing error messages. +If you have made a bunch of tests fail by tweaking a message, you can update them in bulk +with `partest --update-check --failed`. + +Suppose you're interested in ranges. Here's how you can grep the partests and run them: + +``` +root> partest --grep range +... +Selected 74 tests drawn from 74 tests matching 'range' +... +# starting 13 tests in pos +ok 3 - pos/lookupswitch.scala +ok 4 - pos/rangepos-patmat.scala +... +``` + +Another thing you could do is to combine with `--failed` flag to iteratively run +only the failed tests, similar to `testQuick`. + +``` +root> partest --grep range --failed +``` + +To inspect the generated files after running the test, add `--debug`: + +``` +root> partest --debug --verbose test/files/pos/traits.scala +... +# starting 1 test in pos +% scalac pos/traits.scala -d /home/aengelen/dev/scala/test/files/pos/traits-pos.obj +ok 1 - pos/traits.scala +``` + +See `--help` for more info: + +``` +root> partest --help +``` + +Partests are compiled by the bootstrapped `quick` compiler (and `run` partests executed with the `quick` library), +and therefore: + +* if you're working on the compiler, you must write a partest, or a `BytecodeTesting` JUnit test which invokes the compiler programmatically; however +* if you're working on the library, a JUnit and/or ScalaCheck is better. + +If you're working on Partest itself, note that some of its source files are part of Scala's sbt build, and are compiled when sbt is launched, not via its `compile` command. + +#### exploring with REPL + +Before or during the test, you might get better insight into the code by starting a REPL session +using the freshly built scala. To start a REPL session from the sbt shell: + +``` +root> scala +[info] Running scala.tools.nsc.MainGenericRunner -usejavacp +Welcome to Scala 2.13.0-20180304-082722-3debf94 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_151). +Type in expressions for evaluation. Or try :help. + +scala> val r = (0.0 to 2.0 by 0.1) +r: scala.collection.immutable.NumericRange[Double] = NumericRange 0.0 to 2.0 by 0.1 + +scala> r(3) +res0: Double = 0.30000000000000004 + +scala> for { i <- 1 to 20 } { assert(r.toList(i) == r(i), s"$i failed") } +java.lang.AssertionError: assertion failed: 6 failed + at scala.Predef$.assert(Predef.scala:248) + at .$anonfun$res5$1(:1) + at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:151) + ... 33 elided +``` + +Using this information, you can adjust your test. + ### Documentation This is of course required for new features and enhancements. @@ -79,10 +226,16 @@ Consider updating the package-level doc (in the package object), if appropriate. Please follow these standard code standards, though in moderation (scouts quickly learn to let sleeping dogs lie): -* Don't violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself). -* Follow the [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule). +Don't violate [DRY](https://en.wikipedia.org/wiki/Don%27t_repeat_yourself). +* DRY means: "Don't repeat yourself". +* Every piece of knowledge must have a single, unambiguous, authoritative representation within a system. +* Try to only write functionality or algorithms once and reference them (Abstraction) instead of Copy&Paste + +Follow the [Boy Scout Rule](https://martinfowler.com/bliki/OpportunisticRefactoring.html). +* "Always leave the code behind in a better state than you found it" +* This translates to using any opportunity possible to improve and clean up the code in front of you -Please also have a look at the [Scala Hacker Guide](http://www.scala-lang.org/contribute/hacker-guide.html) by @xeno-by. +Please also have a look at the [Scala Hacker Guide](https://www.scala-lang.org/contribute/hacker-guide.html) by @xeno-by. ### Clean commits, clean history @@ -90,25 +243,27 @@ A pull request should consist of commits with messages that clearly state what p Commit logs should be stated in the active, present tense. -A commit's subject should be 72 characters or less. Overall, think of -the first line of the commit as a description of the action performed +The subject line of a commit message should be no more than 72 characters. +Overall, think of the first line of the commit as a description of the action performed by the commit on the code base, so use the active voice and the present tense. That also makes the commit subjects easy to reuse in release notes. -For a bugfix, the end of the commit message should say "Fixes scala/bug#NNNN". +For a bugfix, the end of the PR description (that is, the first comment on the PR) should say, "Fixes scala/bug#NNNN", as mentioned above. + +NOTE: it's best not to add the issue reference to your commit message, as github will pollute the conversation on the ticket with notifications every time you commit. -If a commit purely refactors and is not intended to change behaviour, +If a commit purely refactors and is not intended to change behavior, say so. Backports should be tagged as "[backport]". -When working on maintenance branches (e.g., 2.12.x), include "[nomerge]" +When working on older maintenance branches (namely 2.12.x), include "[nomerge]" if this commit should not be merged forward into the next release branch. Here is standard advice on good commit messages: -http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html +https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html ### Pass Scabot @@ -121,8 +276,7 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) ### Pass code review Your PR will need to be assigned to one or more reviewers. You can suggest reviewers -yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala/contributors (Gitter) -or contributors.scala-lang.org (Discourse). +yourself; if you're not sure, see the list in [README.md](README.md) or ask on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) or contributors.scala-lang.org (Discourse). To assign a reviewer, add a "review by @reviewer" to the PR description or in a comment on your PR. @@ -136,8 +290,8 @@ and `push -f` to the branch. This is to keep the git history clean. Additional c are OK if they stand on their own. Once all these conditions are met, we will merge your changes -- if we -agree with it! We are available on scala/contributors (Gitter) or -contributors.scala-lang.org (Discourse) to discuss changes beforehand, +agree with it! We are available on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) +or contributors.scala-lang.org (Discourse) to discuss changes beforehand, before you put in the coding work. diff --git a/Gemfile b/Gemfile index 6fe508207eef..b248ccc91835 100644 --- a/Gemfile +++ b/Gemfile @@ -1,7 +1,10 @@ # To build the spec on Travis CI source "https://rubygems.org" -gem "jekyll", "3.6.3" +gem "jekyll", "3.9.3" gem "rouge" -# gem 's3_website' -gem "redcarpet", "3.3.2" +gem "redcarpet", "3.6.0" + +# we use redcarpet not kramdown, but current jekyll complains +# if this isn't present?! +gem 'kramdown-parser-gfm' diff --git a/NOTICE b/NOTICE index 22457ecf1a2d..e3d6b3bb0586 100644 --- a/NOTICE +++ b/NOTICE @@ -1,10 +1,10 @@ Scala -Copyright (c) 2002-2024 EPFL -Copyright (c) 2011-2024 Lightbend, Inc. +Copyright (c) 2002-2025 EPFL +Copyright (c) 2011-2025 Lightbend, Inc. dba Akka Scala includes software developed at LAMP/EPFL (https://lamp.epfl.ch/) and -Lightbend, Inc. (https://www.lightbend.com/). +Akka (https://akka.io/). Licensed under the Apache License, Version 2.0 (the "License"). Unless required by applicable law or agreed to in writing, software diff --git a/README.md b/README.md index 944eca3a1379..c039fbc44fc9 100644 --- a/README.md +++ b/README.md @@ -1,64 +1,111 @@ -# Welcome! +# This is Scala 2! Welcome! -This is the official repository for the [Scala Programming Language](http://www.scala-lang.org) +This is the home of the [Scala 2](https://www.scala-lang.org) standard library, compiler, and language spec. +For Scala 3, visit [scala/scala3](https://github.com/scala/scala3). + # How to contribute -To contribute in this repo, please open a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository. +Issues and bug reports for Scala 2 are located in [scala/bug](https://github.com/scala/bug). That tracker is also where new contributors may find issues to work on: [good first issues](https://github.com/scala/bug/labels/good%20first%20issue), [help wanted](https://github.com/scala/bug/labels/help%20wanted). + +For coordinating broader efforts, we also use the [scala/scala-dev tracker](https://github.com/scala/scala-dev/issues). -We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work, to protect its open source nature. +To contribute here, please open a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository. -For more information on building and developing the core of Scala, make sure to read -the rest of this README! +Be aware that we can't accept additions to the standard library, only modifications to existing code. Binary compatibility forbids adding new public classes or public methods. Additions are made to [scala-library-next](https://github.com/scala/scala-library-next) instead. -In order to get in touch with other Scala contributors, join -[scala/contributors](https://gitter.im/scala/contributors) (Gitter) or post on -[contributors.scala-lang.org](http://contributors.scala-lang.org) (Discourse). +We require that you sign the [Scala CLA](https://contribute.akka.io/contribute/cla/scala) before we can merge any of your work, to protect Scala's future as open source software. -# Reporting issues +The general workflow is as follows. +1. Find/file an issue in scala/bug (or submit a well-documented PR right away!). +2. Fork the scala/scala repo. +3. Push your changes to a branch in your forked repo. For coding guidelines, go [here](https://github.com/scala/scala#coding-guidelines). +4. Submit a pull request to scala/scala from your forked repo. + +For more information on building and developing the core of Scala, read the rest of this README, especially for [setting up your machine](https://github.com/scala/scala#get-ready-to-contribute)! -Please report bugs at the [scala/bug issue tracker](https://github.com/scala/bug/issues). We use the [scala/scala-dev tracker](https://github.com/scala/scala-dev/issues) for coordinating bigger work items. # Get in touch! +In order to get in touch with other Scala contributors, join the +\#scala-contributors channel on the [Scala Discord](https://discord.com/invite/scala) chat, or post on +[contributors.scala-lang.org](https://contributors.scala-lang.org) (Discourse). + If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out: | | username | talk to me about... | --------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------| - | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | - | [`@SethTisue`](https://github.com/SethTisue) | getting started, build, developer docs, community build, Jenkins, library | - | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | - | [`@szeiger`](https://github.com/szeiger) | collections, build | - | [`@lrytz`](https://github.com/lrytz) | back end, optimizer, named & default arguments | + | [`@lrytz`](https://github.com/lrytz) | back end, optimizer, named & default arguments, reporters | + | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, lambdas | + | [`@SethTisue`](https://github.com/SethTisue) | getting started, build, CI, community build, Jenkins, docs, library, REPL | + | [`@dwijnand`](https://github.com/dwijnand) | pattern matcher, MiMa, partest | + | [`@som-snytt`](https://github.com/som-snytt) | warnings/lints/errors, REPL, compiler options, compiler internals, partest | | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | | [`@viktorklang`](https://github.com/viktorklang) | concurrency, futures | - | [`@axel22`](https://github.com/axel22) | concurrency, parallel collections, specialization | - | [`@dragos`](https://github.com/dragos) | specialization, back end | - | [`@janekdb`](https://github.com/janekdb) | documentation | - | [`@sjrd`](https://github.com/sjrd) | interactions with Scala.js | + | [`@sjrd`](https://github.com/sjrd) | interactions with Scala.js | + | [`@NthPortal`](https://github.com/NthPortal) | library, concurrency, `scala.math`, `LazyList`, `Using`, warnings | + | [`@bishabosha`](https://github.com/bishabosha) | TASTy reader | + | [`@joroKr21`](https://github.com/joroKr21) | higher-kinded types, implicits, variance | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! +# Branches + +Target the oldest branch you would like your changes to end up in. We periodically merge forward from 2.12.x to 2.13.x. Most changes should target 2.13.x, as 2.12.x is now under minimal maintenance. + +If your change is difficult to merge forward, you may be asked to also submit a separate PR targeting the newer branch. + +If your change is version-specific and shouldn't be merged forward, put `[nomerge]` in the PR name. + +If your change is a backport from a newer branch and thus doesn't need to be merged forward, put `[backport]` in the PR name. + +## Choosing a branch + +Most changes should target 2.13.x. We are increasingly reluctant to target 2.12.x unless there is a special reason (e.g. if an especially bad bug is found, or if there is commercial sponsorship). See [Scala 2 maintenance](https://www.scala-lang.org/development/#scala-2-maintenance). + # Repository structure +Most importantly: + ``` scala/ -+--build.sbt The main sbt build script -+--lib/ Pre-compiled libraries for the build ++--build.sbt The main sbt build definition ++--project/ The rest of the sbt build +--src/ All sources +---/library Scala Standard Library +---/reflect Scala Reflection +---/compiler Scala Compiler - +---/intellij IntelliJ project templates -+--spec/ The Scala language specification -+--scripts/ Scripts for the CI jobs (including building releases) +--test/ The Scala test suite +---/files Partest tests +---/junit JUnit tests +---/scalacheck ScalaCheck tests -+--build/ [Generated] Build output directory ++--spec/ The Scala language specification +``` + +but also: + +``` +scala/ + +---/library-aux Scala Auxiliary Library, for bootstrapping and documentation purposes + +---/interactive Scala Interactive Compiler, for clients such as an IDE (aka Presentation Compiler) + +---/intellij IntelliJ project templates + +---/manual Scala's runner scripts "man" (manual) pages + +---/partest Scala's internal parallel testing framework + +---/partest-javaagent Partest's helper java agent + +---/repl Scala REPL core + +---/repl-frontend Scala REPL frontend + +---/scaladoc Scala's documentation tool + +---/scalap Scala's class file decompiler + +---/testkit Scala's unit-testing kit ++--admin/ Scripts for the CI jobs and releasing ++--doc/ Additional licenses and copyrights ++--scripts/ Scripts for the CI jobs and releasing ++--tools/ Scripts useful for local development ++--build/ Build products ++--dist/ Build products ++--target/ Build products ``` # Get ready to contribute @@ -66,13 +113,13 @@ scala/ ## Requirements You need the following tools: - - Java SDK. The baseline version is 8 for both 2.12.x and 2.13.x. It may be possible to use a - later SDK for local development, but the CI will verify against the baseline - version. - - sbt (sbt 0.13 on the 2.12.x branch, sbt 1 on the 2.13.x branch) + - Java SDK. The baseline version is 8 for both 2.12.x and 2.13.x. It is almost always fine + to use a later SDK (such as 17 or 21) for local development. CI will verify against the + baseline version. + - sbt MacOS and Linux work. Windows may work if you use Cygwin. Community help with keeping -the build working on Windows is appreciated. +the build working on Windows and documenting any needed setup is appreciated. ## Tools we use @@ -80,26 +127,44 @@ We are grateful for the following OSS licenses: - [JProfiler Java profiler](https://www.ej-technologies.com/products/jprofiler/overview.html) - [YourKit Java Profiler](https://www.yourkit.com/java/profiler/) - [IntelliJ IDEA](https://www.jetbrains.com/idea/download/) + - [![Revved up by Develocity](https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A)](https://develocity.scala-lang.org) ## Build setup ### Basics During ordinary development, a new Scala build is built by the -previously released version. For short we call the previous release -"starr": the stable reference release. Building with starr is -sufficient for most kinds of changes. - -However, a full build of Scala (a *bootstrap*, as performed by our CI) -requires two layers. This guarantees that every Scala version can -build itself. If you change the code generation part of the Scala -compiler, your changes will only show up in the bytecode of the -library and compiler after a bootstrap. See below for how to do a -bootstrap build locally. +previously released version, known as the "reference compiler" or, +slangily, as "STARR" (stable reference release). Building with STARR +is sufficient for most kinds of changes. + +However, a full build of Scala is _bootstrapped_. Bootstrapping has +two steps: first, build with STARR; then, build again using the +freshly built compiler, leaving STARR behind. This guarantees that +every Scala version can build itself. + +If you change the code generation part of the Scala compiler, your +changes will only show up in the bytecode of the library and compiler +after a bootstrap. Our CI does a bootstrapped build. + +**Bootstrapping locally**: To perform a bootstrap, run `restarrFull` +within an sbt session. This will build and publish the Scala +distribution to your local artifact repository and then switch sbt to +use that version as its new `scalaVersion`. You may then revert back +with `reload`. Note `restarrFull` will also write the STARR version +to `buildcharacter.properties` so you can switch back to it with +`restarr` without republishing. This will switch the sbt session to +use the `build-restarr` and `target-restarr` directories instead of +`build` and `target`, which avoids wiping out classfiles and +incremental metadata. IntelliJ will continue to be configured to +compile and run tests using the starr version in +`versions.properties`. For history on how the current scheme was arrived at, see https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion. +**Building with fatal warnings**: To make warnings in the project fatal (i.e. turn them into errors), run `set Global / fatalWarnings := true` in sbt (replace `Global` with the name of a module—such as `reflect`—to only make warnings fatal for that module). To disable fatal warnings again, either `reload` sbt, or run `set Global / fatalWarnings := false` (again, replace `Global` with the name of a module if you only enabled fatal warnings for that module). CI always has fatal warnings enabled. + ### Using the sbt build Once you've started an `sbt` session you can run one of the core commands: @@ -107,30 +172,33 @@ Once you've started an `sbt` session you can run one of the core commands: - `compile` compiles all sub-projects (library, reflect, compiler, scaladoc, etc) - `scala` / `scalac` run the REPL / compiler directly from sbt (accept options / arguments) + - `enableOptimizer` reloads the build with the Scala optimizer enabled. Our releases are built this way. Enable this when working on compiler performance improvements. When the optimizer is enabled the build will be slower and incremental builds can be incorrect. + - `setupPublishCore` runs `enableOptimizer` and configures a version number based on the current Git SHA. Often used as part of bootstrapping: `sbt setupPublishCore publishLocal && sbt -Dstarr.version= testAll` - `dist/mkBin` generates runner scripts (`scala`, `scalac`, etc) in `build/quick/bin` - `dist/mkPack` creates a build in the Scala distribution format in `build/pack` - - `test` runs the JUnit test, `testOnly *immutable.ListTest` runs a subset + - `junit/test` runs the JUnit tests; `junit/testOnly *Foo` runs a subset + - `scalacheck/test` runs scalacheck tests, use `testOnly` to run a subset - `partest` runs partest tests (accepts options, try `partest --help`) - `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in other sbt projects) - - Optionally `set baseVersionSuffix := "-bin-abcd123-SNAPSHOT"` + - Optionally `set baseVersionSuffix := "bin-abcd123-SNAPSHOT"` where `abcd123` is the git hash of the revision being published. You can also - use something custom like `"-bin-mypatch"`. This changes the version number from - `2.12.2-SNAPSHOT` to something more stable (`2.12.2-bin-abcd123-SNAPSHOT`). + use something custom like `"bin-mypatch"`. This changes the version number from + `2.13.2-SNAPSHOT` to something more stable (`2.13.2-bin-abcd123-SNAPSHOT`). - Note that the `-bin` string marks the version binary compatible. Using it in - sbt will cause the `scalaBinaryVersion` to be `2.12`. If the version is not - binary compatible, we recommend using `-pre`, e.g., `2.13.0-pre-abcd123-SNAPSHOT`. + sbt will cause the `scalaBinaryVersion` to be `2.13`. If the version is not + binary compatible, we recommend using `-pre`, e.g., `2.14.0-pre-abcd123-SNAPSHOT`. - Optionally `set ThisBuild / Compile / packageDoc / publishArtifact := false` to skip generating / publishing API docs (speeds up the process). If a command results in an error message like `a module is not authorized to depend on -itself`, it may be that a global SBT plugin (such as [Ensime](http://ensime.org/)) is -resulting in a cyclical dependency. Try disabling global SBT plugins (perhaps by -temporarily commenting them out in `~/.sbt/0.13/plugins/plugins.sbt`). +itself`, it may be that a global sbt plugin is causing +a cyclical dependency. Try disabling global sbt plugins (perhaps by +temporarily commenting them out in `~/.sbt/1.0/plugins/plugins.sbt`). #### Sandbox -We recommend to keep local test files in the `sandbox` directory which is listed in +We recommend keeping local test files in the `sandbox` directory which is listed in the `.gitignore` of the Scala repo. #### Incremental compilation @@ -142,37 +210,16 @@ meantime you can: - Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its incremental compiler is a bit less conservative, but usually correct. -#### Bootstrapping locally - -To perform a bootstrap using sbt - - first a build is published either locally or on a temporary repository, - - then a separate invocation of sbt (using the previously built version as `starr`) - is used to build / publish the actual build. - -Assume the current `starr` version is `2.12.0` (defined in -[versions.properties](versions.properties)) and the current version is `2.12.0-SNAPSHOT` -(defined in [build.sbt](build.sbt)). To perform a local bootstrap: - - Run `publishLocal` (you may want to specify a custom version suffix and skip - generating API docs, see above). - - Quit sbt and start a new sbt instance using `sbt -Dstarr.version=` where - `` is the version number you published locally. - - If the version number you published is not binary compatible with the current - `starr`, `set every scalaBinaryVersion := "2.12.0-M4"`. This is not required if - the version you published locally is binary compatible, i.e., if the current - `starr` is a 2.12.x release and not a milestone / RC. - -The last step is required to resolve modules (scala-xml, scala-partest, etc). It -assumes that the module releases for the current `starr` work (in terms of binary -compatibility) with the local starr that you published locally. A full bootstrap -requires re-building the all the modules. On our CI this is handled by the -[bootstrap](scripts/jobs/integrate/bootstrap) script, but it (currently) cannot -be easily executed locally. - ### IDE setup -You may use IntelliJ IDEA (see [src/intellij/README.md](src/intellij/README.md)), -the Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)), -or ENSIME (see [this page on the ENSIME site](http://ensime.org/editors/)). +We suggest using IntelliJ IDEA (see +[src/intellij/README.md](src/intellij/README.md)). + +[Metals](https://scalameta.org/metals/) may also work, but we don't +yet have instructions or sample configuration for that. A pull request +in this area would be exceedingly welcome. In the meantime, we are +collecting guidance at +[scala/scala-dev#668](https://github.com/scala/scala-dev/issues/668). In order to use IntelliJ's incremental compiler: - run `dist/mkBin` in sbt to get a build and the runner scripts in `build/quick/bin` @@ -190,32 +237,41 @@ It contains useful information on our coding standards, testing, documentation, we use git and GitHub and how to get your code reviewed. You may also want to check out the following resources: - - The ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) + - The ["Scala Hacker Guide"](https://scala-lang.org/contribute/hacker-guide.html) covers some of the same ground as this README, but in greater detail and in a more tutorial style, using a running example. - - [Scala documentation site](http://docs.scala-lang.org) + - [Scala documentation site](https://docs.scala-lang.org) # Scala CI +[![Build Status](https://travis-ci.com/scala/scala.svg?branch=2.13.x)](https://travis-ci.com/scala/scala) + Once you submit a PR your commits will be automatically tested by the Scala CI. -If you see a spurious build failure, you can post `/rebuild` as a PR comment. +Our CI setup is always evolving. See +[scala/scala-dev#751](https://github.com/scala/scala-dev/issues/751) +for more details on how things currently work and how we expect they +might change. + +If you see a spurious failure on Jenkins, you can post `/rebuild` as a PR comment. The [scabot README](https://github.com/scala/scabot) lists all available commands. If you'd like to test your patch before having everything polished for review, -feel free to submit a PR and add the `WIP` label. In case your WIP branch contains +you can have Travis CI build your branch (make sure you have a fork and have Travis CI +enabled for branch builds on it first, and then push your branch). Also +feel free to submit a draft PR. In case your draft branch contains a large number of commits (that you didn't clean up / squash yet for review), consider adding `[ci: last-only]` to the PR title. That way only the last commit -will be tested, saving some energy and CI-resources. Note that inactive WIP PRs +will be tested, saving some energy and CI-resources. Note that inactive draft PRs will be closed eventually, which does not mean the change is being rejected. -CI performs a full bootstrap. The first task, `validate-publish-core`, publishes +CI performs a compiler bootstrap. The first task, `validatePublishCore`, publishes a build of your commit to the temporary repository https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots. Note that this build is not yet bootstrapped, its bytecode is built using the -current `starr`. The version number is `2.12.2-bin-abcd123-SNAPSHOT` where `abcd123` +current STARR. The version number is `2.13.2-bin-abcd123-SNAPSHOT` where `abcd123` is the commit hash. For binary incompatible builds, the version number is -`2.13.0-pre-abcd123-SNAPSHOT`. +`2.14.0-pre-abcd123-SNAPSHOT`. You can use Scala builds in the validation repository locally by adding a resolver and specifying the corresponding `scalaVersion`: @@ -223,28 +279,21 @@ and specifying the corresponding `scalaVersion`: ``` $ sbt > set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/" -> set scalaVersion := "2.12.2-bin-abcd123-SNAPSHOT" +> set scalaVersion := "2.13.17-bin-abcd123-SNAPSHOT" > console ``` -Note that the scala modules are currently not built/published against the -tested version during CI validation. - -## Nightly builds +## "Nightly" builds -The Scala CI builds nightly download releases (including all modules) and publishes -them to the following locations: - - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D) - - [2.13.x](http://www.scala-lang.org/files/archive/nightly/2.13.x/?C=M;O=D) +The Scala CI publishes these to +https://scala-ci.typesafe.com/artifactory/scala-integration/ . -The CI also publishes nightly API docs: - - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D) - - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/2.12.x/) - - [2.13.x](http://www.scala-lang.org/files/archive/nightly/2.13.x/api/?C=M;O=D) - - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.13.x/api/2.13.x/) +Using a nightly build in sbt and other tools is explained on this +[doc page](https://docs.scala-lang.org/overviews/core/nightlies.html). -Using a nightly build in sbt is explained in -[this Stack Overflow answer](http://stackoverflow.com/questions/40622878) +Although we casually refer to these as "nightly" builds, they aren't +actually built nightly, but "mergely". That is to say, a build is +published for every merged PR. ## Scala CI internals diff --git a/admin/init.sh b/admin/init.sh index d75db254fc3e..48a8de627368 100755 --- a/admin/init.sh +++ b/admin/init.sh @@ -1,5 +1,10 @@ #!/bin/bash -e +if [ -z "$GPG_SUBKEY_SECRET" ]; then + echo "GPG_SUBKEY_SECRET is missing/empty, so skipping credentials & gpg setup" + exit +fi + sensitive() { perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-private-repo > ~/.credentials-private-repo perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-sonatype > ~/.credentials-sonatype diff --git a/build.sbt b/build.sbt index 551840283bcc..0bbcb579a297 100644 --- a/build.sbt +++ b/build.sbt @@ -2,19 +2,19 @@ * The new, sbt-based build definition for Scala. * * What you see below is very much work-in-progress. The following features are implemented: - * - Compiling all classses for the compiler and library ("compile" in the respective subprojects) + * - Compiling all classes for the compiler and library ("compile" in the respective subprojects) * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/IntegrationTest/test") tests * - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick") * - Creating build/pack with all JARs and launcher scripts ("dist/mkPack") * - Building all scaladoc sets ("doc") - * - Publishing ("publishDists" and standard sbt tasks like "publish" and "publishLocal") + * - Publishing (standard sbt tasks like "publish" and "publishLocal") * * You'll notice that this build definition is much more complicated than your typical sbt build. * The main reason is that we are not benefiting from sbt's conventions when it comes project - * layout. For that reason we have to configure a lot more explicitly. I've tried explain in + * layout. For that reason we have to configure a lot more explicitly. I've tried to explain in * comments the less obvious settings. * - * This nicely leads me to explaining goal and non-goals of this build definition. Goals are: + * This nicely leads me to explain the goal and non-goals of this build definition. Goals are: * * - to be easy to tweak it in case a bug or small inconsistency is found * - to be super explicit about any departure from standard sbt settings @@ -34,57 +34,34 @@ import scala.build._, VersionUtil._ -// Scala dependencies: -val scalaSwingDep = scalaDep("org.scala-lang.modules", "scala-swing") -val scalaXmlDep = scalaDep("org.scala-lang.modules", "scala-xml") -val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators") - // Non-Scala dependencies: -val junitDep = "junit" % "junit" % "4.12" -val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % Test -val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.18.0" % Test -val jolDep = "org.openjdk.jol" % "jol-core" % "0.16" -val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") -val jlineDep = "jline" % "jline" % versionProps("jline.version") -val jansiDep = "org.fusesource.jansi" % "jansi" % "1.12" -val antDep = "org.apache.ant" % "ant" % "1.10.12" -val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" -val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" - -/** Publish to ./dists/maven-sbt, similar to the Ant build which publishes to ./dists/maven. This - * can be used to compare the output of the sbt and Ant builds during the transition period. Any - * real publishing should be done with sbt's standard `publish` task. */ -lazy val publishDists = taskKey[Unit]("Publish to ./dists/maven-sbt.") - -(Global / credentials) ++= { +val junitDep = "junit" % "junit" % "4.13.2" +val junitInterfaceDep = "com.github.sbt" % "junit-interface" % "0.13.3" % Test +val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.18.1" % Test +val jolDep = "org.openjdk.jol" % "jol-core" % "0.16" +val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") +val jlineDep = "org.jline" % "jline" % versionProps("jline.version") classifier "jdk8" +val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" +val diffUtilsDep = "io.github.java-diff-utils" % "java-diff-utils" % "4.15" +val compilerInterfaceDep = "org.scala-sbt" % "compiler-interface" % "1.10.8" + +val projectFolder = settingKey[String]("subfolder in src when using configureAsSubproject, else the project name") + +// `set Global / fatalWarnings := true` to enable -Werror for the certain modules +// currently, many modules cannot support -Werror; ideally this setting will eventually +// enable -Werror for all modules +val fatalWarnings = settingKey[Boolean]("whether or not warnings should be fatal in the build") + +// enable fatal warnings automatically on CI +Global / fatalWarnings := insideCI.value + +Global / credentials ++= { val file = Path.userHome / ".credentials" if (file.exists && !file.isDirectory) List(Credentials(file)) else Nil } lazy val publishSettings : Seq[Setting[_]] = Seq( - publishDists := { - val artifacts = (publish / packagedArtifacts).value - val ver = VersionUtil.versionProperties.value.canonicalVersion - val log = streams.value.log - val mappings = artifacts.toSeq.map { case (a, f) => - val typeSuffix = a.`type` match { - case "pom" => "-pom.xml" - case "jar" => ".jar" - case "doc" => "-docs.jar" - case tpe => s"-$tpe.${a.extension}" - } - val to = file("dists/maven-sbt") / ver / a.name / (a.name + typeSuffix) - log.info(s"Publishing $f to $to") - (f, to) - } - IO.copy(mappings) - }, - credentials ++= { - val file = Path.userHome / ".credentials" - if (file.exists && !file.isDirectory) List(Credentials(file)) - else Nil - }, // Add a "default" Ivy configuration because sbt expects the Scala distribution to have one: ivyConfigurations += Configuration.of("Default", "default", "Default", true, Vector(Configurations.Runtime), true), publishMavenStyle := true @@ -95,16 +72,16 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -(Global / baseVersion) := "2.12.20" -(Global / baseVersionSuffix) := "SNAPSHOT" -(ThisBuild / organization) := "org.scala-lang" -(ThisBuild / homepage) := Some(url("https://www.scala-lang.org")) -(ThisBuild / startYear) := Some(2002) -(ThisBuild / licenses) += (("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0"))) -(ThisBuild / headerLicense) := Some(HeaderLicense.Custom( - s"""Scala (${(ThisBuild / homepage).value.get}) +Global / baseVersion := "2.13.17" +Global / baseVersionSuffix := "SNAPSHOT" +ThisBuild / organization := "org.scala-lang" +ThisBuild / homepage := Some(url("https://www.scala-lang.org")) +ThisBuild / startYear := Some(2002) +ThisBuild / licenses += (("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0"))) +ThisBuild / headerLicense := Some(HeaderLicense.Custom( + s"""Scala (${(ThisBuild/homepage).value.get}) | - |Copyright EPFL and Lightbend, Inc. + |Copyright EPFL and Lightbend, Inc. dba Akka | |Licensed under Apache License 2.0 |(http://www.apache.org/licenses/LICENSE-2.0). @@ -114,7 +91,53 @@ globalVersionSettings |""".stripMargin )) -(Global / scalaVersion) := versionProps("starr.version") +// Save MiMa logs +SavedLogs.settings + +Global / scalaVersion := { + if (DottySupport.compileWithDotty) + DottySupport.dottyVersion + else + versionProps("starr.version") +} + +// Run `sbt -Dscala.build.publishDevelocity` to publish build scans to develocity.scala-lang.org +// In Jenkins, the `...publishDevelocity=stage` value is used to set the `JENKINS_STAGE` value of the scan +ThisBuild / develocityConfiguration := { + def pubDev = Option(System.getProperty("scala.build.publishDevelocity")) + val isInsideCI = sys.env.get("JENKINS_URL").exists(_.contains("scala-ci.typesafe.com")) + val config = develocityConfiguration.value + val buildScan = config.buildScan + val buildCache = config.buildCache + config + .withProjectId(ProjectId("scala2")) + .withServer(config.server.withUrl(Some(url("https://develocity.scala-lang.org")))) + .withBuildScan( + buildScan + .withPublishing(Publishing.onlyIf(ctx => pubDev.nonEmpty && ctx.authenticated)) + .withBackgroundUpload(false) + .withTag(if (isInsideCI) "CI" else "Local") + .withTag("2.13") + .withLinks(buildScan.links ++ + sys.env.get("BUILD_URL").map(u => "Jenkins Build" -> url(u)) ++ + sys.env.get("repo_ref").map(sha => "GitHub Commit" -> url(s"https://github.com/scala/scala/commit/$sha")) ++ + sys.env.get("_scabot_pr").map(pr => "GitHub PR " -> url(s"https://github.com/scala/scala/pull/$pr"))) + .withValues(buildScan.values + + ("GITHUB_REPOSITORY" -> "scala/scala") + + ("GITHUB_BRANCH" -> "2.13.x") ++ + pubDev.filterNot(_.isEmpty).map("JENKINS_STAGE" -> _) ++ + sys.env.get("JOB_NAME").map("JENKINS_JOB_NAME" -> _) ++ + sys.env.get("repo_ref").map("GITHUB_SHA" -> _) ++ + sys.env.get("_scabot_pr").map("GITHUB_PR" -> _) ++ + sys.env.get("NODE_NAME").map("JENKINS_NODE" -> _)) + .withObfuscation(buildScan.obfuscation.withIpAddresses(_.map(_ => "0.0.0.0"))) + ) + .withBuildCache( + buildCache + .withLocal(buildCache.local.withEnabled(false)) + .withRemote(buildCache.remote.withEnabled(false)) + ) +} lazy val instanceSettings = Seq[Setting[_]]( // we don't cross build Scala itself @@ -142,35 +165,53 @@ lazy val instanceSettings = Seq[Setting[_]]( Quiet.silenceScalaBinaryVersionWarning ) + lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, - (Compile / javacOptions) ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), - (Compile / unmanagedJars) := Seq.empty, // no JARs in version control! - (Compile / sourceDirectory) := baseDirectory.value, - (Compile / unmanagedSourceDirectories) := List(baseDirectory.value), - (Compile / unmanagedResourceDirectories) += (ThisBuild / baseDirectory).value / "src" / thisProject.value.id, + projectFolder := thisProject.value.id, // overridden in configureAsSubproject + Compile / javacOptions ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), + Compile / javacOptions ++= ( + if (scala.util.Properties.isJavaAtLeast("20")) + Seq("-Xlint:-options") // allow `-source 1.8` and `-target 1.8` + else + Seq()), + Compile / unmanagedJars := Seq.empty, // no JARs in version control! + Compile / sourceDirectory := baseDirectory.value, + Compile / unmanagedSourceDirectories := List(baseDirectory.value), + Compile / unmanagedResourceDirectories += (ThisBuild / baseDirectory).value / "src" / projectFolder.value, sourcesInBase := false, - (Compile / scalaSource) := (Compile / sourceDirectory).value, - (Compile / javaSource) := (Compile / sourceDirectory).value, - // resources are stored along source files in our current layout - (Compile / resourceDirectory) := (Compile / sourceDirectory).value, + Compile / scalaSource := (Compile / sourceDirectory).value, + // for some reason sbt 1.4 issues unused-settings warnings for this, it seems to me incorrectly + Global / excludeLintKeys ++= Set(scalaSource), // each subproject has to ask specifically for files they want to include - (Compile / unmanagedResources / includeFilter) := NothingFilter, - target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, - (Compile / classDirectory) := buildDirectory.value / "quick/classes" / thisProject.value.id, - (Compile / doc / target) := buildDirectory.value / "scaladoc" / thisProject.value.id, + Compile / unmanagedResources / includeFilter := NothingFilter, + target := (ThisBuild / target).value / projectFolder.value, + Compile / classDirectory := buildDirectory.value / "quick/classes" / projectFolder.value, + Compile / doc / target := buildDirectory.value / "scaladoc" / projectFolder.value, // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have // to make sure they are being cleaned properly cleanFiles += (Compile / classDirectory).value, cleanFiles += (Compile / doc / target).value, - (run / fork) := true, - (run / connectInput) := true, - (Compile / scalacOptions) += "-Ywarn-unused:imports", - // work around https://github.com/scala/bug/issues/11534 - Compile / scalacOptions += "-Wconf:cat=unchecked&msg=The outer reference in this type test cannot be checked at run time.:s", - (Compile / doc / scalacOptions) ++= Seq( + run / fork := true, + run / connectInput := true, + Compile / scalacOptions ++= Seq("-feature", "-Xlint", + //"-Wunused:patvars", + //"-Wunused:params", + //"-Vprint", + //"-Xmaxerrs", "5", "-Xmaxwarns", "5", // uncomment for ease of development while breaking things + // work around https://github.com/scala/bug/issues/11534 + "-Wconf:cat=unchecked&msg=The outer reference in this type test cannot be checked at run time.:s", + // optimizer warnings at INFO since `-Werror` may be turned on. + // optimizer runs in CI and release builds, though not in local development. + "-Wconf:cat=optimizer:is", + // we use @nowarn for methods that are deprecated in JDK > 8, but CI/release is under JDK 8 + "-Wconf:cat=unused-nowarn:s", + "-Wconf:cat=deprecation&msg=in class Thread :s", + "-Wunnamed-boolean-literal-strict", + ), + Compile / doc / scalacOptions ++= Seq( "-doc-footer", "epfl", "-diagrams", "-implicits", @@ -178,10 +219,25 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories "-doc-version", versionProperties.value.canonicalVersion, "-doc-title", description.value, "-sourcepath", (ThisBuild / baseDirectory).value.toString, - "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH_EXT}#L€{FILE_LINE}" + "-doc-source-url", s"https://github.com/scala/scala/blob/${versionProperties.value.githubTree}/€{FILE_PATH_EXT}#L€{FILE_LINE}" ), + //maxErrors := 10, setIncOptions, - apiURL := Some(url("https://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), + // http://stackoverflow.com/questions/16934488 + apiMappings ++= { + Option(System.getProperty("sun.boot.class.path")).flatMap { classPath => + classPath.split(java.io.File.pathSeparator).find(_.endsWith(java.io.File.separator + "rt.jar")) + }.map { jarPath => + Map( + file(jarPath) -> url("https://docs.oracle.com/javase/8/docs/api") + ) + }.getOrElse { + streams.value.log.warn("Failed to add bootstrap class path of Java to apiMappings") + Map.empty[File,URL] + } + }, + apiURL := None, // set on a per-project basis + autoAPIMappings := true, pomIncludeRepository := { _ => false }, pomExtra := { @@ -198,29 +254,43 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories LAMP/EPFL - Lightbend - Lightbend, Inc. + Akka + Lightbend, Inc. dba Akka }, headerLicense := (ThisBuild / headerLicense).value, // Remove auto-generated manifest attributes - (Compile / packageBin / packageOptions) := Seq.empty, - (Compile / packageSrc / packageOptions) := Seq.empty, + Compile / packageBin / packageOptions := Seq.empty, + Compile / packageSrc / packageOptions := Seq.empty, // Lets us CTRL-C partest without exiting SBT entirely - (Global / cancelable) := true, - - // Don't pick up source files from the project root. - sourcesInBase := false, + Global / cancelable := true, // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout - (run / outputStrategy) := Some(StdoutOutput) -) ++ removePomDependencies ++ setForkedWorkingDirectory + run / outputStrategy := Some(StdoutOutput) +) ++ removePomDependencies ++ setForkedWorkingDirectory ++ ( + if (DottySupport.compileWithDotty) + DottySupport.commonSettings + else + Seq() +) + +lazy val fatalWarningsSettings = Seq( + Compile / scalacOptions ++= { + if (fatalWarnings.value) Seq("-Werror") + else Nil + }, + Compile / javacOptions ++= { + if (fatalWarnings.value) Seq("-Werror") + else Nil + }, + Compile / doc / scalacOptions -= "-Werror", // there are too many doc errors to enable this right now +) /** Extra post-processing for the published POM files. These are needed to create POMs that - * are equivalent to the ones from the Ant build. In the long term this should be removed and + * are equivalent to the ones from the old Ant build. In the long term this should be removed and * POMs, scaladocs, OSGi manifests, etc. should all use the same metadata. */ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = { /** Find elements in an XML document by a simple XPath and replace them */ @@ -246,10 +316,28 @@ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = { ) ++ extra) } } +def ivyDependencyFilter(deps: Seq[(String, String)], scalaBinaryVersion: String) = { + import scala.xml._ + import scala.xml.transform._ + new RuleTransformer(new RewriteRule { + override def transform(node: Node) = node match { + case e: Elem if e.label == "dependency" && { + val org = e.attribute("org").getOrElse("").toString + val name = e.attribute("name").getOrElse("").toString + deps.exists { case (g, a) => + org == g && (name == a || name == (a + "_" + scalaBinaryVersion)) + } + } => Seq.empty + case n => n + } + }) +} + val pomDependencyExclusions = settingKey[Seq[(String, String)]]("List of (groupId, artifactId) pairs to exclude from the POM and ivy.xml") +lazy val fixCsrIvy = taskKey[Unit]("Apply pomDependencyExclusions to coursier ivy") -(Global / pomDependencyExclusions) := Nil +Global / pomDependencyExclusions := Nil /** Remove unwanted dependencies from the POM and ivy.xml. */ lazy val removePomDependencies: Seq[Setting[_]] = Seq( @@ -265,46 +353,59 @@ lazy val removePomDependencies: Seq[Setting[_]] = Seq( e.child.contains({g}) && (e.child.contains({a}) || e.child.contains({a + "_" + scalaBinaryVersion.value})) } => Seq.empty - case n => Seq(n) + case n => n } }).transform(Seq(n2)).head }, + fixCsrIvy := { + // - coursier makes target/sbt-bridge/resolution-cache/org.scala-lang/scala2-sbt-bridge/2.13.12-bin-SNAPSHOT/resolved.xml.xml + // - copied to target/sbt-bridge//ivy-2.13.12-bin-SNAPSHOT.xml + // - copied to ~/.ivy2/local/org.scala-lang/scala2-sbt-bridge/2.13.12-bin-SNAPSHOT/ivys/ivy.xml + import scala.jdk.CollectionConverters._ + import scala.xml._ + val currentProject = csrProject.value + val ivyModule = org.apache.ivy.core.module.id.ModuleRevisionId.newInstance( + currentProject.module.organization.value, + currentProject.module.name.value, + currentProject.version, + currentProject.module.attributes.asJava) + val ivyFile = ivySbt.value.withIvy(streams.value.log)(_.getResolutionCacheManager).getResolvedIvyFileInCache(ivyModule) + val e = ivyDependencyFilter(pomDependencyExclusions.value, scalaBinaryVersion.value) + .transform(Seq(XML.loadFile(ivyFile))).head + XML.save(ivyFile.getAbsolutePath, e, xmlDecl = true) + }, + publishConfiguration := Def.taskDyn { + val pc = publishConfiguration.value + Def.task { + fixCsrIvy.value + pc + } + }.value, + publishLocalConfiguration := Def.taskDyn { + val pc = publishLocalConfiguration.value + Def.task { + fixCsrIvy.value + pc + } + }.value, deliverLocal := { + // this doesn't seem to do anything currently, it probably worked before sbt used coursier import scala.xml._ - import scala.xml.transform._ val f = deliverLocal.value - val deps = pomDependencyExclusions.value - val e = new RuleTransformer(new RewriteRule { - override def transform(node: Node) = node match { - case e: Elem if e.label == "dependency" && { - val org = e.attribute("org").getOrElse("").toString - val name = e.attribute("name").getOrElse("").toString - deps.exists { case (g, a) => - org == g && (name == a || name == (a + "_" + scalaBinaryVersion.value)) - } - } => Seq.empty - case n => Seq(n) - } - }).transform(Seq(XML.loadFile(f))).head + val e = ivyDependencyFilter(pomDependencyExclusions.value, scalaBinaryVersion.value) + .transform(Seq(XML.loadFile(f))).head XML.save(f.getAbsolutePath, e, xmlDecl = true) f } ) val disableDocs = Seq[Setting[_]]( - (Compile / doc / sources) := Seq.empty, - (Compile / packageDoc / publishArtifact) := false -) - -val disablePublishing = Seq[Setting[_]]( - publishArtifact := false, - // The above is enough for Maven repos but it doesn't prevent publishing of ivy.xml files - publish := {}, - publishLocal := {}, + Compile / doc / sources := Seq.empty, + Compile / packageDoc / publishArtifact := false ) lazy val setJarLocation: Setting[_] = - (Compile / packageBin / artifactPath) := { + Compile / packageBin / artifactPath := { // two lines below are copied over from sbt's sources: // https://github.com/sbt/sbt/blob/0.13/main/src/main/scala/sbt/Defaults.scala#L628 //val resolvedScalaVersion = ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value) @@ -318,14 +419,14 @@ lazy val setJarLocation: Setting[_] = lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq( - (Compile / doc / sources) ~= (_.filter(ff.accept)), + Compile / doc / sources ~= (_.filter(ff.accept)), // Excluded sources may still be referenced by the included sources, so we add the compiler // output to the scaladoc classpath to resolve them. For the `library` project this is // always required because otherwise the compiler cannot even initialize Definitions without // binaries of the library on the classpath. Specifically, we get this error: // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int - (Compile / doc / dependencyClasspath) += (Compile / classDirectory).value, - (Compile / doc) := (Compile / doc).dependsOn((Compile / compile)).value + Compile / doc / dependencyClasspath += (Compile / classDirectory).value, + Compile / doc := (Compile / doc).dependsOn(Compile / compile).value ) def regexFileFilter(s: String): FileFilter = new FileFilter { @@ -335,36 +436,46 @@ def regexFileFilter(s: String): FileFilter = new FileFilter { def setForkedWorkingDirectory: Seq[Setting[_]] = { // When we fork subprocesses, use the base directory as the working directory. - // This“ enables `sbt> partest test/files/run/t1.scala` or `sbt> scalac sandbox/test.scala` + // This enables `sbt> partest test/files/run/t1.scala` or `sbt> scalac sandbox/test.scala` val setting = (Compile / forkOptions) := (Compile / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value) setting ++ inTask(run)(setting) } // This project provides the STARR scalaInstance for bootstrapping -lazy val bootstrap = project in file("target/bootstrap") +lazy val bootstrap = project.in(file("target/bootstrap")).settings(bspEnabled := false) lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(Osgi.settings) .settings(AutomaticModuleName.settings("scala.library")) + .settings(fatalWarningsSettings) .settings( name := "scala-library", description := "Scala Standard Library", - compileOrder := CompileOrder.Mixed, // needed for JFunction classes in scala.runtime.java8 - (Compile / scalacOptions) ++= Seq[String]("-sourcepath", (Compile / scalaSource).value.toString), - (Compile / doc/ scalacOptions) ++= { + Compile / scalacOptions ++= Seq("-sourcepath", (Compile / scalaSource).value.toString), + Compile / scalacOptions ++= Seq("-Wconf:msg=method box|method anyValClass:s"), // unused params in patched src + Compile / doc / scalacOptions ++= { val libraryAuxDir = (ThisBuild / baseDirectory).value / "src/library-aux" Seq( "-doc-no-compile", libraryAuxDir.toString, "-skip-packages", "scala.concurrent.impl", - "-doc-root-content", (Compile / sourceDirectory).value + "/rootdoc.txt" + "-doc-root-content", (Compile / sourceDirectory).value + "/rootdoc.txt", + //"-required", // placeholder for internal flag ) }, - (Compile / unmanagedResources / includeFilter) := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt", + Compile / console / scalacOptions := { + val opts = (console / scalacOptions).value + val ix = (console / scalacOptions).value.indexOfSlice(Seq[String]("-sourcepath", (Compile / scalaSource).value.toString)) + opts.patch(ix, Nil, 2) + }, + Compile / unmanagedResources / includeFilter := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt", // Include *.txt files in source JAR: - (Compile / packageSrc / mappings) ++= { + Compile / packageSrc / mappings ++= { val base = (Compile / unmanagedResourceDirectories).value - base ** "*.txt" pair Path.relativeTo(base) + (base ** "*.txt" pair Path.relativeTo(base)) ++ { + val auxBase = (ThisBuild / baseDirectory).value / "src/library-aux" + auxBase ** ("*.scala" || "*.java") pair Path.relativeTo(auxBase) + } }, Osgi.headers += "Import-Package" -> "sun.misc;resolution:=optional, *", Osgi.jarlist := true, @@ -373,219 +484,240 @@ lazy val library = configureAsSubproject(project) "/project/description" -> Standard library for the Scala Programming Language, "/project/packaging" -> jar ), - // Remove the dependency on "forkjoin" from the POM because it is included in the JAR: - pomDependencyExclusions += ((organization.value, "forkjoin")), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/")), MimaFilters.mimaSettings, ) - .settings(filterDocSources("*.scala" -- (regexFileFilter(".*/runtime/.*\\$\\.scala") || - regexFileFilter(".*/runtime/ScalaRunTime\\.scala") || - regexFileFilter(".*/runtime/StringAdd\\.scala")))) + .settings(filterDocSources("*.scala" -- regexFileFilter(".*/scala/runtime/.*"))) + .settings( + if (DottySupport.compileWithDotty) + DottySupport.librarySettings + else + Seq() + ) lazy val reflect = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(Osgi.settings) .settings(AutomaticModuleName.settings("scala.reflect")) + .settings(fatalWarningsSettings) .settings( name := "scala-reflect", description := "Scala Reflection Library", Osgi.bundleName := "Scala Reflect", - (Compile / doc / scalacOptions) ++= Seq( - "-skip-packages", "scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io" + Compile / scalacOptions ++= Seq( + "-Wconf:cat=deprecation&msg=early initializers:s", // compiler heavily relies upon early initializers + "-Xlint", + "-feature", + ), + Compile / doc / scalacOptions ++= Seq( + "-skip-packages", "scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io", + "-Xlint:-doc-detached,_", ), Osgi.headers += - "Import-Package" -> ("scala.*;version=\"${range;[==,=+);${ver}}\","+ - "scala.tools.nsc;resolution:=optional;version=\"${range;[==,=+);${ver}}\","+ + "Import-Package" -> (raw"""scala.*;version="$${range;[==,=+);$${ver}}",""" + + raw"""scala.tools.nsc;resolution:=optional;version="$${range;[==,=+);$${ver}}",""" + "*"), fixPom( "/project/name" -> Scala Reflect, "/project/description" -> Reflection Library for the Scala Programming Language, "/project/packaging" -> jar ), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), MimaFilters.mimaSettings, ) .dependsOn(library) -lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") / "src" / "compilerOptionsExporter") - .dependsOn(compiler, reflect, library) - .settings(clearSourceAndResourceDirectories) - .settings(commonSettings) - .settings(disableDocs) - .settings(disablePublishing) - .settings( - libraryDependencies ++= { - val jacksonVersion = "2.17.2" - Seq( - "com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion, - "com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion, - "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, - "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion, - "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, - ) - } - ) - lazy val compiler = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(generateBuildCharacterFileSettings) .settings(Osgi.settings) .settings(AutomaticModuleName.settings("scala.tools.nsc")) + .settings(fatalWarningsSettings) .settings( name := "scala-compiler", description := "Scala Compiler", - libraryDependencies ++= Seq(antDep, asmDep), - // These are only needed for the POM. - libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional"), + libraryDependencies += asmDep, + libraryDependencies += diffUtilsDep, + // This is only needed for the POM: + // TODO: jline dependency is only needed for the REPL shell, which should move to its own jar + libraryDependencies += jlineDep, buildCharacterPropertiesFile := (Compile / resourceManaged).value / "scala-buildcharacter.properties", - (Compile / resourceGenerators) += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, + Compile / resourceGenerators += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, // this a way to make sure that classes from interactive and scaladoc projects // end up in compiler jar. note that we need to use LocalProject references // (with strings) to deal with mutual recursion - (Compile / packageBin / products) := + Compile / packageBin / products := (Compile / packageBin / products).value ++ - Seq((Compile / dependencyClasspath).value.find(_.get(moduleID.key).map(id => (id.organization, id.name, id.revision)).contains((asmDep.organization, asmDep.name, asmDep.revision))).get.data) ++ - (LocalProject("interactive") / Compile / packageBin / products).value ++ - (LocalProject("scaladoc") / Compile / packageBin / products).value ++ - (LocalProject("repl") / Compile / packageBin / products).value ++ - (LocalProject("repl-jline") / Compile / packageBin / products).value ++ - (LocalProject("repl-jline-embedded") / Compile / packageBin / products).value, - (Compile / unmanagedResources / includeFilter) := + (Compile / dependencyClasspath).value.filter(_.get(moduleID.key).map(id => (id.organization, id.name, id.revision)) match { + case Some((diffUtilsDep.organization, diffUtilsDep.name, diffUtilsDep.revision)) => true + case Some((asmDep.organization, asmDep.name, asmDep.revision)) => true + case _ => false + }).map(_.data) ++ + (LocalProject("interactive") / Compile / packageBin / products).value ++ + (LocalProject("scaladoc") / Compile / packageBin / products).value ++ + (LocalProject("repl") / Compile / packageBin / products).value ++ + (LocalProject("replFrontend") / Compile / packageBin / products).value, + Compile / unmanagedResources / includeFilter := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" | "*.png" | "*.gif" | "*.gif" | "*.txt", // Also include the selected unmanaged resources and source files from the additional projects in the source JAR: - (Compile / packageSrc / mappings) ++= { + Compile / packageSrc / mappings ++= { val base = (Compile / unmanagedResourceDirectories).value ++ - (LocalProject("interactive") / Compile / unmanagedResourceDirectories).value ++ - (LocalProject("scaladoc") / Compile / unmanagedResourceDirectories).value ++ - (LocalProject("repl")/ Compile / unmanagedResourceDirectories).value + (LocalProject("interactive") / Compile / unmanagedResourceDirectories).value ++ + (LocalProject("scaladoc") / Compile / unmanagedResourceDirectories).value ++ + (LocalProject("repl") / Compile / unmanagedResourceDirectories).value ++ + (LocalProject("replFrontend") / Compile / unmanagedResourceDirectories).value base ** ((Compile / unmanagedResources / includeFilter).value || "*.scala" || "*.psd" || "*.ai" || "*.java") pair Path.relativeTo(base) }, // Include the additional projects in the scaladoc JAR: - (Compile / doc / sources) ++= { + Compile / doc / sources ++= { val base = - (LocalProject("interactive") / Compile / unmanagedSourceDirectories).value ++ - (LocalProject("scaladoc") / Compile / unmanagedSourceDirectories).value ++ - (LocalProject("repl") / Compile / unmanagedSourceDirectories).value + (LocalProject("interactive") / Compile / unmanagedSourceDirectories).value ++ + (LocalProject("scaladoc") / Compile / unmanagedSourceDirectories).value ++ + (LocalProject("repl") / Compile / unmanagedSourceDirectories).value ++ + (LocalProject("replFrontend") / Compile / unmanagedSourceDirectories).value ((base ** ("*.scala" || "*.java")) --- (base ** "Scaladoc*ModelTest.scala") // exclude test classes that depend on partest ).get }, - (Compile / doc / scalacOptions) ++= Seq( + Compile / scalacOptions ++= Seq( + //"-Wunused", //"-Wnonunit-statement", + "-Wconf:cat=deprecation&msg=early initializers:s", // compiler heavily relies upon early initializers + ), + Compile / doc / scalacOptions ++= Seq( "-doc-root-content", (Compile / sourceDirectory).value + "/rootdoc.txt" ), Osgi.headers ++= Seq( - "Import-Package" -> ("jline.*;resolution:=optional," + - "org.apache.tools.ant.*;resolution:=optional," + - "scala.xml.*;version=\"${range;[====,====];"+versionNumber("scala-xml")+"}\";resolution:=optional," + - "scala.*;version=\"${range;[==,=+);${ver}}\"," + - "*"), + "Import-Package" -> raw"""org.jline.keymap.*;resolution:=optional + |org.jline.reader.*;resolution:=optional + |org.jline.style.*;resolution:=optional + |org.jline.terminal;resolution:=optional + |org.jline.terminal.impl;resolution:=optional + |org.jline.terminal.spi;resolution:=optional + |org.jline.utils;resolution:=optional + |org.jline.builtins;resolution:=optional + |scala.*;version="$${range;[==,=+);$${ver}}" + |*""".stripMargin.linesIterator.mkString(","), "Class-Path" -> "scala-reflect.jar scala-library.jar" ), - // Generate the ScriptEngineFactory service definition. The Ant build does this when building + // Generate the ScriptEngineFactory service definition. The old Ant build did this when building // the JAR but sbt has no support for it and it is easier to do as a resource generator: - generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.Scripted$Factory"), - (Compile / managedResourceDirectories) := Seq((Compile / resourceManaged).value), + generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.shell.Scripted$Factory"), + Compile / managedResourceDirectories := Seq((Compile / resourceManaged).value), fixPom( "/project/name" -> Scala Compiler, "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := None, - pomDependencyExclusions ++= List(("org.apache.ant", "ant"), ("org.scala-lang.modules", "scala-asm")) + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), + pomDependencyExclusions += (("org.scala-lang.modules", "scala-asm")) ) .dependsOn(library, reflect) lazy val interactive = configureAsSubproject(project) .settings(disableDocs) - .settings(disablePublishing) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) .settings( name := "scala-compiler-interactive", - description := "Scala Interactive Compiler" + description := "Scala Interactive Compiler", + Compile / scalacOptions ++= Seq("-Wconf:cat=deprecation&msg=early initializers:s"), ) .dependsOn(compiler) lazy val repl = configureAsSubproject(project) .settings(disableDocs) - .settings(disablePublishing) - .settings( - (run / connectInput) := true, - run := (Compile / run).partialInput(" -usejavacp").evaluated // Automatically add this so that `repl/run` works without additional arguments. - ) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) + .settings(Compile / scalacOptions ++= Seq("-Wconf:cat=deprecation&msg=early initializers:s")) .dependsOn(compiler, interactive) -lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline")) +lazy val replFrontend = configureAsSubproject(project, srcdir = Some("repl-frontend")) .settings(disableDocs) - .settings(disablePublishing) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) .settings( libraryDependencies += jlineDep, - name := "scala-repl-jline" + name := "scala-repl-frontend", ) - .dependsOn(repl) - -lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" / "repl-jline-embedded-src-dummy") - .settings(scalaSubprojectSettings) - .settings(disablePublishing) .settings( - name := "scala-repl-jline-embedded", - // There is nothing to compile for this project. Instead we use the compile task to create - // shaded versions of repl-jline and jline.jar. dist/mkBin puts all of quick/repl, - // quick/repl-jline and quick/repl-jline-shaded on the classpath for quick/bin scripts. - // This is different from the Ant build where all parts are combined into quick/repl, but - // it is cleaner because it avoids circular dependencies. - (Compile / compile) := (Compile / compile).dependsOn(Def.task { - import java.util.jar._ - import collection.JavaConverters._ - val inputs: Iterator[JarJar.Entry] = { - val repljlineClasses = (replJline / Compile/ products).value.flatMap(base => Path.allSubpaths(base).map(x => (base, x._1))) - val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data - val jarFile = new JarFile(jlineJAR) - val jarEntries = jarFile.entries.asScala.filterNot(_.isDirectory).map(entry => JarJar.JarEntryInput(jarFile, entry)) - def compiledClasses = repljlineClasses.iterator.map { case (base, file) => JarJar.FileInput(base, file) } - (jarEntries ++ compiledClasses).filter(x => - x.name.endsWith(".class") || x.name.endsWith(".properties") || x.name.startsWith("META-INF/native") || x.name.startsWith("META-INF/maven") - ) - } - import JarJar.JarJarConfig._ - val config: Seq[JarJar.JarJarConfig] = Seq( - Rule("org.fusesource.**", "scala.tools.fusesource_embedded.@1"), - Rule("jline.**", "scala.tools.jline_embedded.@1"), - Rule("scala.tools.nsc.interpreter.jline.**", "scala.tools.nsc.interpreter.jline_embedded.@1"), - Keep("scala.tools.**") - ) - val outdir = (Compile / classDirectory).value - JarJar(inputs, outdir, config) - }).value, - (run / connectInput) := true - + run := (Compile / run).partialInput(" -usejavacp").evaluated, // so `replFrontend/run` works + Compile / run / javaOptions += "-Dorg.jline.terminal.output=forced-out", ) - .dependsOn(replJline) + .dependsOn(repl) lazy val scaladoc = configureAsSubproject(project) .settings(disableDocs) - .settings(disablePublishing) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) .settings( name := "scala-compiler-doc", description := "Scala Documentation Generator", - libraryDependencies ++= Seq(scalaXmlDep), - (Compile / unmanagedResources / includeFilter) := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf", + Compile / unmanagedResources / includeFilter := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf", libraryDependencies ++= ScaladocSettings.webjarResources, - (Compile / resourceGenerators) += ScaladocSettings.extractResourcesFromWebjar + Compile / resourceGenerators += ScaladocSettings.extractResourcesFromWebjar, + Compile / scalacOptions ++= Seq( + "-Xlint:-doc-detached,_", + "-feature", + "-Wconf:cat=deprecation&msg=early initializers:s", + ), ) .dependsOn(compiler) +// dependencies on compiler and compiler-interface are "provided" to align with scala3-sbt-bridge +lazy val sbtBridge = configureAsSubproject(project, srcdir = Some("sbt-bridge")) + .settings(Osgi.settings) + .settings(AutomaticModuleName.settings("scala.sbtbridge")) + //.settings(fatalWarningsSettings) + .settings( + name := "scala2-sbt-bridge", + description := "sbt compiler bridge for Scala 2", + libraryDependencies += compilerInterfaceDep % Provided, + Compile / scalacOptions ++= Seq( + "-Wconf:cat=deprecation&msg=early initializers:s", // compiler heavily relies upon early initializers + ), + generateServiceProviderResources("xsbti.compile.CompilerInterface2" -> "scala.tools.xsbt.CompilerBridge"), + generateServiceProviderResources("xsbti.compile.ConsoleInterface1" -> "scala.tools.xsbt.ConsoleBridge"), + generateServiceProviderResources("xsbti.compile.ScaladocInterface2" -> "scala.tools.xsbt.ScaladocBridge"), + generateServiceProviderResources("xsbti.InteractiveConsoleFactory" -> "scala.tools.xsbt.InteractiveConsoleBridgeFactory"), + Compile / managedResourceDirectories := Seq((Compile / resourceManaged).value), + pomDependencyExclusions ++= List((organization.value, "scala-repl-frontend"), (organization.value, "scala-compiler-doc")), + fixPom( + "/project/name" -> Scala 2 sbt Bridge, + "/project/description" -> sbt compiler bridge for Scala 2, + "/project/packaging" -> jar + ), + headerLicense := Some(HeaderLicense.Custom( + s"""Zinc - The incremental compiler for Scala. + |Copyright Scala Center, Lightbend dba Akka, and Mark Harrah + | + |Scala (${(ThisBuild/homepage).value.get}) + |Copyright EPFL and Lightbend, Inc. dba Akka + | + |Licensed under Apache License 2.0 + |(http://www.apache.org/licenses/LICENSE-2.0). + | + |See the NOTICE file distributed with this work for + |additional information regarding copyright ownership. + |""".stripMargin)), + ) + .dependsOn(compiler % Provided, replFrontend, scaladoc) + lazy val scalap = configureAsSubproject(project) + .settings(fatalWarningsSettings) .settings( description := "Scala Bytecode Parser", // Include decoder.properties - (Compile / unmanagedResources / includeFilter) := "*.properties", + Compile / unmanagedResources / includeFilter := "*.properties", fixPom( "/project/name" -> Scalap, "/project/description" -> bytecode analysis tool, "/project/properties" -> scala.xml.Text("") ), headerLicense := Some(HeaderLicense.Custom( - s"""Scala classfile decoder (${(ThisBuild / homepage).value.get}) + s"""Scala classfile decoder (${(ThisBuild/homepage).value.get}) | - |Copyright EPFL and Lightbend, Inc. + |Copyright EPFL and Lightbend, Inc. dba Akka | |Licensed under Apache License 2.0 |(http://www.apache.org/licenses/LICENSE-2.0). @@ -593,59 +725,65 @@ lazy val scalap = configureAsSubproject(project) |See the NOTICE file distributed with this work for |additional information regarding copyright ownership. |""".stripMargin)), - (Compile / headerSources) ~= { xs => + Compile / headerSources ~= { xs => val excluded = Set("Memoisable.scala", "Result.scala", "Rule.scala", "Rules.scala", "SeqRule.scala") xs filter { x => !excluded(x.getName) } }, - (Compile / headerResources) := Nil + Compile / headerResources := Nil, ) .dependsOn(compiler) lazy val partest = configureAsSubproject(project) - .dependsOn(library, reflect, compiler, scalap, replJlineEmbedded, scaladoc) + .dependsOn(library, reflect, compiler, replFrontend, scalap, scaladoc, testkit) .settings(Osgi.settings) .settings(AutomaticModuleName.settings("scala.partest")) + .settings(fatalWarningsSettings) .settings( name := "scala-partest", description := "Scala Compiler Testing Tool", - Compile / javacOptions += "-XDenableSunApiLintControl", libraryDependencies ++= List(testInterfaceDep, diffUtilsDep, junitDep), - pomDependencyExclusions ++= List((organization.value, "scala-repl-jline-embedded"), (organization.value, "scala-compiler-doc")), + Compile / scalacOptions ++= Seq( + "-Wconf:cat=deprecation&msg=early initializers:s", // compiler heavily relies upon early initializers + ), + Compile / javacOptions ++= Seq("-XDenableSunApiLintControl", "-Xlint") ++ + (if (fatalWarnings.value) Seq("-Werror") else Seq()), + pomDependencyExclusions ++= List((organization.value, "scala-repl-frontend"), (organization.value, "scala-compiler-doc")), fixPom( "/project/name" -> Scala Partest, "/project/description" -> Scala Compiler Testing Tool, "/project/packaging" -> jar - ), + ) + ) + +lazy val tastytest = configureAsSubproject(project) + .dependsOn(library, reflect, compiler, scaladoc) + .settings(disableDocs) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) + .settings( + name := "scala-tastytest", + description := "Scala TASTy Integration Testing Tool", + libraryDependencies += diffUtilsDep, ) // An instrumented version of BoxesRunTime and ScalaRunTime for partest's "specialized" test category lazy val specLib = project.in(file("test") / "instrumented") .dependsOn(library, reflect, compiler) - .settings(clearSourceAndResourceDirectories) .settings(commonSettings) .settings(disableDocs) - .settings(disablePublishing) + .settings(fatalWarningsSettings) .settings( - (Compile / sourceGenerators) += Def.task { + publish / skip := true, + bspEnabled := false, + Compile / sourceGenerators += Def.task { import scala.collection.JavaConverters._ val srcBase = (library / Compile / sourceDirectories).value.head / "scala/runtime" val targetBase = (Compile / sourceManaged).value / "scala/runtime" def patch(srcFile: String, patchFile: String): File = try { - val patchLines: List[String] = IO.readLines(baseDirectory.value / patchFile) - val origLines: List[String] = IO.readLines(srcBase / srcFile) - import difflib.DiffUtils - val p = DiffUtils.parseUnifiedDiff(patchLines.asJava) - val r = DiffUtils.patch(origLines.asJava, p) + val p = difflib.DiffUtils.parseUnifiedDiff(IO.readLines(baseDirectory.value / patchFile).asJava) + val r = difflib.DiffUtils.patch(IO.readLines(srcBase / srcFile).asJava, p) val target = targetBase / srcFile - val patched = r.asScala.toList - IO.writeLines(target, patched) - if (patched == origLines) { - println(p) - println(patchLines.mkString("\n")) - println(origLines.mkString("\n")) - throw new RuntimeException("Patch did not apply any changes! " + baseDirectory.value / patchFile + " / " + (srcBase / srcFile)) - } - + IO.writeLines(target, r.asScala) target } catch { case ex: Exception => streams.value.log.error(s"Error patching $srcFile: $ex") @@ -656,69 +794,150 @@ lazy val specLib = project.in(file("test") / "instrumented") patch("BoxesRunTime.java", "boxes.patch"), patch("ScalaRunTime.scala", "srt.patch") ) - }.taskValue + }.taskValue, ) +// The scala version used by the benchmark suites, leave undefined to use the ambient version.") +def benchmarkScalaVersion = System.getProperty("benchmark.scala.version", "") + lazy val bench = project.in(file("test") / "benchmarks") - .dependsOn(library, compiler) - .settings(instanceSettings) + .dependsOn((if (benchmarkScalaVersion == "") Seq[sbt.ClasspathDep[sbt.ProjectReference]](library, compiler) else Nil): _*) + .settings(if (benchmarkScalaVersion == "") instanceSettings else Seq(scalaVersion := benchmarkScalaVersion, crossPaths := false)) .settings(disableDocs) - .settings(disablePublishing) + .settings(publish / skip := true) .enablePlugins(JmhPlugin) .settings( name := "test-benchmarks", - libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6", + autoScalaLibrary := false, + crossPaths := true, // needed to enable per-scala-version source directories (https://github.com/sbt/sbt/pull/1799) compileOrder := CompileOrder.JavaThenScala, // to allow inlining from Java ("... is defined in a Java source (mixed compilation), no bytecode is available") + libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.10", + libraryDependencies ++= { + if (benchmarkScalaVersion == "") Nil + else "org.scala-lang" % "scala-compiler" % benchmarkScalaVersion :: Nil + }, + //scalacOptions ++= Seq("-feature", "-opt:inline:scala/**", "-Wopt"), scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), + // Skips JMH source generators during IDE import to avoid needing to compile scala-library during the import + // should not be needed once sbt-jmh 0.4.3 is out (https://github.com/sbt/sbt-jmh/pull/207) + Jmh / bspEnabled := false ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) + +lazy val testkit = configureAsSubproject(project) + .dependsOn(compiler) + .settings(Osgi.settings) + .settings(AutomaticModuleName.settings("scala.testkit")) + .settings(fatalWarningsSettings) + .settings( + name := "scala-testkit", + description := "Scala Compiler Testkit", + libraryDependencies ++= Seq(junitDep, asmDep), + Compile / unmanagedSourceDirectories := List(baseDirectory.value), + fixPom( + "/project/name" -> Scala Testkit, + "/project/description" -> Scala Compiler Testing Tool, + "/project/packaging" -> jar + ) + ) + // Jigsaw: reflective access between modules (`setAccessible(true)`) requires an `opens` directive. // This is enforced by error (not just by warning) since JDK 16. In our tests we use reflective access // from the unnamed package (the classpath) to JDK modules in testing utilities like `assertNotReachable`. // `add-exports=jdk.jdeps/com.sun.tools.javap` is tests that use `:javap` in the REPL, see scala/bug#12378 -val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: "--add-exports=jdk.jdeps/com.sun.tools.javap=ALL-UNNAMED" +: +// Also --enable-native-access is needed for jvm/natives.scala +val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: "--add-exports=jdk.jdeps/com.sun.tools.javap=ALL-UNNAMED" +: "--enable-native-access=ALL-UNNAMED" +: Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") lazy val junit = project.in(file("test") / "junit") - .dependsOn(library, reflect, compiler, partest, scaladoc) - .settings(clearSourceAndResourceDirectories) + .dependsOn(testkit, compiler, replFrontend, scaladoc, sbtBridge) .settings(commonSettings) .settings(disableDocs) - .settings(disablePublishing) + .settings(fatalWarningsSettings) + .settings(publish / skip := true) .settings( - (Test / fork) := true, - (Test / javaOptions) ++= "-Xss1M" +: addOpensForTesting, + Test / fork := true, + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, (Test / forkOptions) := (Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), (Test / testOnly / forkOptions) := (Test / testOnly / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), - libraryDependencies ++= Seq(junitDep, junitInterfaceDep, jolDep), + Compile / scalacOptions ++= Seq( + "-Xlint:-valpattern", + "-Wconf:msg=match may not be exhaustive:s", // if we missed a case, all that happens is the test fails + "-Wconf:cat=lint-nullary-unit&site=.*Test:s", // normal unit test style + "-Ypatmat-exhaust-depth", "40", // despite not caring about patmat exhaustiveness, we still get warnings for this + ), + Compile / javacOptions ++= Seq("-Xlint"), + libraryDependencies ++= Seq(junitInterfaceDep, jolDep, diffUtilsDep, compilerInterfaceDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-s"), - (Compile / unmanagedSourceDirectories) := Nil, - (Test / unmanagedSourceDirectories) := List(baseDirectory.value), + Compile / unmanagedSourceDirectories := Nil, + Test / unmanagedSourceDirectories := List(baseDirectory.value), Test / headerSources := Nil, ) +lazy val tasty = project.in(file("test") / "tasty") + .settings(commonSettings) + .dependsOn(tastytest) + .settings(disableDocs) + .settings(publish / skip := true) + .settings( + Test / fork := true, + libraryDependencies ++= Seq(junitInterfaceDep, TastySupport.scala3Library), + testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), + Test / testOptions += Tests.Argument( + s"-Dtastytest.src=${baseDirectory.value}", + s"-Dtastytest.packageName=tastytest" + ), + Compile / unmanagedSourceDirectories := Nil, + Test / unmanagedSourceDirectories := List(baseDirectory.value/"test"), + ) + .configs(TastySupport.CompilerClasspath, TastySupport.LibraryClasspath) + .settings( + inConfig(TastySupport.CompilerClasspath)(Defaults.configSettings), + inConfig(TastySupport.LibraryClasspath)(Defaults.configSettings), + libraryDependencies ++= Seq( + TastySupport.scala3Compiler % TastySupport.CompilerClasspath, + TastySupport.scala3Library % TastySupport.LibraryClasspath, + ), + javaOptions ++= { + import java.io.File.pathSeparator + val scalaLibrary = (library / Compile / classDirectory).value.getAbsoluteFile() + val scalaReflect = (reflect / Compile / classDirectory).value.getAbsoluteFile() + val dottyCompiler = (TastySupport.CompilerClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary + val dottyLibrary = (TastySupport.LibraryClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary + Seq( + s"-Dtastytest.classpaths.dottyCompiler=${dottyCompiler.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.dottyLibrary=${dottyLibrary.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.scalaReflect=$scalaReflect", + ) + }, + Compile / scalacOptions ++= Seq( + "-Wconf:cat=lint-nullary-unit&site=.*Test:s", // normal unit test style + ), + ) + lazy val scalacheck = project.in(file("test") / "scalacheck") .dependsOn(library, reflect, compiler, scaladoc) - .settings(clearSourceAndResourceDirectories) .settings(commonSettings) + .settings(fatalWarningsSettings) .settings(disableDocs) - .settings(disablePublishing) + .settings(publish / skip := true) .settings( - // enable forking to workaround https://github.com/sbt/sbt/issues/4009 - (Test / fork) := true, - (Test / javaOptions) ++= "-Xss1M" +: addOpensForTesting, - testOptions ++= { - if ((Test / fork).value) Nil - else List(Tests.Cleanup { loader => - sbt.internal.inc.ModuleUtilities.getObject("scala.TestCleanup", loader).asInstanceOf[Runnable].run() - }) - }, - libraryDependencies ++= Seq(scalacheckDep), - (Compile / unmanagedSourceDirectories) := Nil, - (Test / unmanagedSourceDirectories) := List(baseDirectory.value) - ).settings( - // Workaround for https://github.com/sbt/sbt/pull/3985 - List(Keys.test, Keys.testOnly).map(task => (task / parallelExecution) := false) : _* + // Enable forking to workaround https://github.com/sbt/sbt/issues/4009. + Test / fork := true, + // Instead of forking above, it should be possible to set: + // Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.Flat, + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, + Test / testOptions += Tests.Argument( + // Full stack trace on failure: + "-verbosity", "2" + ), + libraryDependencies ++= Seq(scalacheckDep, junitDep), + Compile / unmanagedSourceDirectories := Nil, + Test / unmanagedSourceDirectories := List(baseDirectory.value), + Compile / scalacOptions ++= Seq( + "-Wconf:msg=match may not be exhaustive:s", // if we missed a case, all that happens is the test fails + "-Wconf:msg=Classes which cannot access Tree:s", // extension is irrelevant to tests + ), ) lazy val osgiTestFelix = osgiTestProject( @@ -731,13 +950,13 @@ lazy val osgiTestEclipse = osgiTestProject( def osgiTestProject(p: Project, framework: ModuleID) = p .dependsOn(library, reflect, compiler) - .settings(clearSourceAndResourceDirectories) .settings(commonSettings) .settings(disableDocs) - .settings(disablePublishing) .settings( - (Test / fork) := true, - (Test / parallelExecution) := false, + publish / skip := true, + bspEnabled := false, + Test / fork := true, + Test / parallelExecution := false, libraryDependencies ++= { val paxExamVersion = "4.11.0" // Last version which supports Java 9+ Seq( @@ -751,18 +970,18 @@ def osgiTestProject(p: Project, framework: ModuleID) = p "ch.qos.logback" % "logback-core" % "1.2.8", "ch.qos.logback" % "logback-classic" % "1.2.8", "org.slf4j" % "slf4j-api" % "1.7.32", - framework % "test" + framework % Test ) }, - (Test / Keys.test) := (Test / Keys.test).dependsOn((Compile / packageBin)).value, - (Test / Keys.testOnly) := (Test / Keys.testOnly).dependsOn((Compile / packageBin)).evaluated, + Test / Keys.test := (Test / Keys.test).dependsOn(Compile / packageBin).value, + Test / Keys.testOnly := (Test / Keys.testOnly).dependsOn(Compile / packageBin).evaluated, testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), - (Test / javaOptions) ++= ("-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi") +: addOpensForTesting, - (test / Test / forkOptions) := (test / Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), - (Test / unmanagedSourceDirectories) := List((ThisBuild / baseDirectory).value / "test" / "osgi" / "src"), - (Compile / unmanagedResourceDirectories) := (Test / unmanagedSourceDirectories).value, - (Compile / unmanagedResources / includeFilter) := "*.xml", - (Compile / packageBin) := { // Put the bundle JARs required for the tests into build/osgi + Test / javaOptions ++= ("-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi") +: addOpensForTesting, + Test / Keys.test / forkOptions := (Test / Keys.test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), + Test / unmanagedSourceDirectories := List((ThisBuild / baseDirectory).value / "test" / "osgi" / "src"), + Compile / unmanagedResourceDirectories := (Test / unmanagedSourceDirectories).value, + Compile / unmanagedResources / includeFilter := "*.xml", + Compile / packageBin := { // Put the bundle JARs required for the tests into build/osgi val targetDir = (ThisBuild / buildDirectory).value / "osgi" val mappings = ((dist / mkPack).value / "lib").listFiles.collect { case f if f.getName.startsWith("scala-") && f.getName.endsWith(".jar") => (f, targetDir / f.getName) @@ -773,59 +992,121 @@ def osgiTestProject(p: Project, framework: ModuleID) = p cleanFiles += (ThisBuild / buildDirectory).value / "osgi" ) -lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "partest-javaagent") - .settings(commonSettings) - .settings(generatePropertiesFileSettings) +lazy val verifyScriptedBoilerplate = taskKey[Unit]("Ensure scripted tests have the necessary boilerplate.") + +// Running scripted tests locally +// - `set ThisBuild / Compile / packageDoc / publishArtifact := false` for faster turn around time +// - `sbtTest/scripted source-dependencies/scalac-options` to run a single test +// - `set sbtTest/scriptedBufferLog := false` to see sbt log of test +// - add `> set logLevel := Level.Debug` to individual `test` script for debug output +// - uncomment `-agentlib:...` below to attach the debugger while running a test +lazy val sbtTest = project.in(file("test") / "sbt-test") + .enablePlugins(ScriptedPlugin) + .settings(disableDocs) + .settings( + scalaVersion := appConfiguration.value.provider.scalaProvider.version, + publish / skip := true, + bspEnabled := false, + target := (ThisBuild / target).value / thisProject.value.id, + + sbtTestDirectory := baseDirectory.value, + + scriptedBatchExecution := true, // set to `false` to execute each test in a separate sbt instance + scriptedParallelInstances := 2, + + // hide sbt output of scripted tests + scriptedBufferLog := true, + + scriptedLaunchOpts ++= Seq( + "-Dplugin.scalaVersion=" + version.value, + "-Dsbt.boot.directory=" + (target.value / ".sbt-scripted").getAbsolutePath, // Workaround sbt/sbt#3469 + "-Dscripted.common=" + (baseDirectory.value / "common.sbt.template").getAbsolutePath, + // "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005", + ), + + // Pass along ivy home and repositories settings to sbt instances run from the tests + scriptedLaunchOpts ++= { + val repositoryPath = (io.Path.userHome / ".sbt" / "repositories").absolutePath + s"-Dsbt.repository.config=$repositoryPath" :: + ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList + }, + + verifyScriptedBoilerplate := { + import java.nio.file._ + val tests = (baseDirectory.value * "*").get.flatMap(f => (f * "*").get()).filter(_.isDirectory) + for (t <- tests) { + for (script <- (t * ("test" || "pending" || "disabled")).get().headOption) { + val ls = Files.lines(script.toPath) + val setup = ls.findFirst().orElseGet(() => "") + ls.close() + if (setup.trim != "> setup; reload") + throw new MessageOnlyException(s"$script is missing test boilerplate; the first needs to be `> setup; reload`") + } + val pluginFile = "project/ScriptedTestPlugin.scala" + if (!(t / pluginFile).exists) + throw new MessageOnlyException(s"$t is missing the file $pluginFile; copy it from any other scripted test") + } + }, + + scripted := scripted.dependsOn( + verifyScriptedBoilerplate, + library / publishLocal, + reflect / publishLocal, + compiler / publishLocal, + sbtBridge / publishLocal, + ).evaluated + ) + +lazy val partestJavaAgent = configureAsSubproject(project, srcdir = Some("partest-javaagent")) + .settings(fatalWarningsSettings) .settings(disableDocs) .settings( libraryDependencies += asmDep, - publishLocal := {}, - publish := {}, + publish / skip := true, // Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on name := "scala-partest-javaagent", description := "Scala Compiler Testing Tool (compiler-specific java agent)", // add required manifest entry - previously included from file - (Compile / packageBin / packageOptions) += + Compile / packageBin / packageOptions += Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ), // we need to build this to a JAR exportJars := true ) lazy val test = project - .dependsOn(compiler, interactive, replJlineEmbedded, scalap, partest, partestJavaAgent, scaladoc) + .dependsOn(compiler, interactive, replFrontend, scalap, partest, partestJavaAgent, scaladoc) .disablePlugins(plugins.JUnitXmlReportPlugin) .configs(IntegrationTest) .settings(commonSettings) .settings(disableDocs) - .settings(disablePublishing) + .settings(publish / skip := true) .settings(Defaults.itSettings) .settings( - libraryDependencies ++= Seq(asmDep, scalaXmlDep), + libraryDependencies ++= Seq(asmDep), // no main sources - (Compile / sources) := Seq.empty, + Compile / unmanagedSourceDirectories := Nil, + Compile / sources := Nil, // test sources are compiled in partest run, not here - (IntegrationTest / sources) := Seq.empty, - (IntegrationTest / fork) := true, - // enable this in 2.13, when tests pass - //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", - (Compile / scalacOptions) -= "-Ywarn-unused:imports", - (IntegrationTest / javaOptions) ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, - IntegrationTest / javaOptions ++= { if (scala.util.Properties.isJavaAtLeast("18")) List("-Djava.security.manager=allow") else Nil }, - (IntegrationTest / testOptions) += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + IntegrationTest / unmanagedSourceDirectories := Nil, + IntegrationTest / sources := Nil, + IntegrationTest / fork := true, + Compile / scalacOptions += "-Yvalidate-pos:parser,typer", + IntegrationTest / javaOptions ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, + IntegrationTest / testOptions += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - (IntegrationTest / testOptions) += Tests.Argument(s"""-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"""), - (IntegrationTest / testOptions) += Tests.Argument("-Dpartest.scalac_opts=" + (Compile / scalacOptions).value.mkString(" ")), + IntegrationTest / testOptions += Tests.Argument(s"-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"), + IntegrationTest / testOptions += Tests.Argument("-Dpartest.scalac_opts=" + (Compile / scalacOptions).value.mkString(" ")), (IntegrationTest / forkOptions) := (IntegrationTest / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), - (IntegrationTest / testOptions) += { + IntegrationTest / testOptions += { val cp = (Test / dependencyClasspath).value val baseDir = (ThisBuild / baseDirectory).value - val instrumentedJar = (LocalProject("specLib") / Compile / packageBin / packagedArtifact).value._2 + val instrumentedJar = (specLib / Compile / packageBin / packagedArtifact).value._2 Tests.Setup { () => - // Copy code.jar (resolved in the otherwise unused scope "test") and instrumented.jar (from specLib)to the location where partest expects them + // Copy instrumented.jar (from specLib)to the location where partest expects it. IO.copyFile(instrumentedJar, baseDir / "test/files/speclib/instrumented.jar") } }, - (IntegrationTest / definedTests) += new sbt.TestDefinition( + IntegrationTest / definedTests += new sbt.TestDefinition( "partest", // marker fingerprint since there are no test classes // to be discovered by sbt: @@ -834,7 +1115,7 @@ lazy val test = project def annotationName = "partest" }, true, Array() ), - (IntegrationTest / executeTests) := { + IntegrationTest / executeTests := { val log = streams.value.log val result = (IntegrationTest / executeTests).value val result2 = (Test / executeTests).value @@ -845,38 +1126,25 @@ lazy val test = project } else result }, - (IntegrationTest / testListeners) += new PartestTestListener(target.value) + IntegrationTest / testListeners += new PartestTestListener(target.value) ) lazy val manual = configureAsSubproject(project) .settings(disableDocs) - .settings(disablePublishing) + .settings(publish / skip := true) + .settings(fatalWarningsSettings) .settings( - libraryDependencies ++= Seq(scalaXmlDep, antDep, "org.scala-lang" % "scala-library" % scalaVersion.value), - (Compile / classDirectory) := (Compile / target).value / "classes" + libraryDependencies += "org.scala-lang" % "scala-library" % scalaVersion.value, + Compile / classDirectory := (Compile / target).value / "classes" ) -lazy val libraryAll = Project("library-all", file(".") / "target" / "library-all-src-dummy") +lazy val scalaDist = Project("scalaDist", file(".") / "target" / "scala-dist-dist-src-dummy") .settings(commonSettings) .settings(disableDocs) .settings( - name := "scala-library-all", - (Compile / packageBin / publishArtifact) := false, - (Compile / packageSrc / publishArtifact) := false, - libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, scalaSwingDep), - apiURL := None, - fixPom( - "/project/name" -> Scala Library Powerpack, - "/project/description" -> The Scala Standard Library and Official Modules - ) - ) - .dependsOn(library, reflect) - -lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-dist-src-dummy") - .settings(commonSettings) - .settings(disableDocs) - .settings( - (Compile / packageBin / mappings) ++= { + bspEnabled := false, + name := "scala-dist", + Compile / packageBin / mappings ++= { val binBaseDir = buildDirectory.value / "pack" val binMappings = (dist / mkBin).value.pair(Path.relativeTo(binBaseDir), errorIfNone = false) // With the way the resource files are spread out over the project sources we can't just add @@ -887,7 +1155,7 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di val resMappings = resBaseDir ** ("*.html" | "*.css" | "*.gif" | "*.png") pair (p => Path.relativeTo(resBaseDir)(p).map("doc/tools/" + _)) docMappings ++ resMappings ++ binMappings }, - (Compile / resourceGenerators) += Def.task { + Compile / resourceGenerators += Def.task { val command = "fsc, scala, scalac, scaladoc, scalap" val htmlOut = (Compile / resourceManaged).value / "doc/tools" val manOut = (Compile / resourceManaged).value / "genman" @@ -901,12 +1169,12 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di (manOut ** "*.1" pair Path.rebase(manOut, fixedManOut)).foreach { case (in, out) => // Generated manpages should always use LF only. There doesn't seem to be a good reason // for generating them with the platform EOL first and then converting them but that's - // what the Ant build does. + // what the old Ant build did. IO.write(out, IO.readBytes(in).filterNot(_ == '\r')) } (htmlOut ** "*.html").get ++ (fixedManOut ** "*.1").get }.taskValue, - (Compile / managedResourceDirectories) := Seq((Compile / resourceManaged).value), + Compile / managedResourceDirectories := Seq((Compile / resourceManaged).value), libraryDependencies += jlineDep, apiURL := None, fixPom( @@ -914,15 +1182,21 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di "/project/description" -> The Artifacts Distributed with Scala, "/project/packaging" -> jar ), - (Compile / packageSrc / publishArtifact) := false + Compile / packageSrc / publishArtifact := false ) - .dependsOn(libraryAll, compiler, scalap) + .dependsOn(library, reflect, compiler, scalap) -lazy val root: Project = (project in file(".")) +def partestOnly(in: String): Def.Initialize[Task[Unit]] = + (testP / IntegrationTest / testOnly).toTask(" -- --terse " + in) + +def partestDesc(in: String): Def.Initialize[Task[(Result[Unit], String)]] = + partestOnly(in).result map (_ -> s"partest $in") + +lazy val scala2: Project = (project in file(".")) .settings(disableDocs) - .settings(disablePublishing) .settings(generateBuildCharacterFileSettings) .settings( + publish / skip := true, commands ++= ScriptCommands.all, extractBuildCharacterPropertiesFile := { val jar = (bootstrap / scalaInstance).value.allJars.find(_.getName contains "-compiler").get @@ -941,125 +1215,152 @@ lazy val root: Project = (project in file(".")) // source links (could be fixed by shipping these sources with the scaladoc bundles) and scala-js source maps // rely on them being on github. commands += Command.command("generateSources") { state => - val dir = (((ThisBuild / baseDirectory).value) / "src" / "library" / "scala") - genprod.main(Array(dir.getPath)) - GenerateAnyVals.run(dir.getAbsoluteFile) + val dir = ((ThisBuild / baseDirectory).value / "src" / "library" / "scala").getAbsoluteFile + genprod.run(dir) + GenerateAnyVals.run(dir) + GenerateFunctionConverters.run(dir) state }, - - testAll := { - val results = ScriptCommands.sequence[(Result[Unit], String)](List( - (junit / Test / Keys.test).result map (_ -> "junit/test"), - (scalacheck / Test / Keys.test).result map (_ -> "scalacheck/test"), - (testP / IntegrationTest / testOnly).toTask(" -- run").result map (_ -> "partest run"), - (testP / IntegrationTest / testOnly).toTask(" -- pos neg jvm").result map (_ -> "partest pos neg jvm"), - (testP / IntegrationTest / testOnly).toTask(" -- res scalap specialized").result map (_ -> "partest res scalap specialized"), - (testP / IntegrationTest / testOnly).toTask(" -- instrumented presentation").result map (_ -> "partest instrumented presentation"), - (testP / IntegrationTest / testOnly).toTask(" -- --srcpath scaladoc").result map (_ -> "partest --srcpath scaladoc"), - (testP / IntegrationTest / testOnly).toTask(" -- --srcpath async").result map (_ -> "partest --srcpath async"), - (osgiTestFelix / Test / Keys.test).result map (_ -> "osgiTestFelix/test"), - (osgiTestEclipse / Test / Keys.test).result map (_ -> "osgiTestEclipse/test"), - (library / mimaReportBinaryIssues).result.map(_ -> "library/mimaReportBinaryIssues"), // doesn't aggregate.. - (reflect / mimaReportBinaryIssues).result.map(_ -> "reflect/mimaReportBinaryIssues"), // ..so specify both - (bench / Compile / compile).map(_ => ()).result map (_ -> "bench/compile"), - Def.task(()).dependsOn( // Run these in parallel: - (library / Compile / doc), - (reflect / Compile / doc), - (compiler / Compile / doc), - (scalap / Compile / doc) - ).result map (_ -> "doc") - )).value - val log = streams.value.log - val failed = results.collect { case (Inc(i), d) => (i, d) } - if (failed.nonEmpty) { - def showScopedKey(k: Def.ScopedKey[_]): String = - Vector( - k.scope.project.toOption.map { - case p: ProjectRef => p.project - case p => p - }.map(_ + "/"), - k.scope.config.toOption.map(_.name + ":"), - k.scope.task.toOption.map(_.label + "::") - ).flatten.mkString + k.key - val loggedThis, loggedAny = new scala.collection.mutable.HashSet[String] - def findRootCauses(i: Incomplete, currentTask: String): Vector[(String, Option[Throwable])] = { - val sk = i.node match { - case Some(t: Task[_]) => - t.info.attributes.entries.collectFirst { case e if e.key == Keys.taskDefinitionKey => e.value.asInstanceOf[Def.ScopedKey[_]] } - .map(showScopedKey) - case _ => None - } - val task = sk.getOrElse(currentTask) - val dup = sk.exists(s => !loggedAny.add(s)) - if(sk.exists(s => !loggedThis.add(s))) Vector.empty - else i.directCause match { - case Some(e) => Vector((task, if(dup) None else Some(e))) - case None => i.causes.toVector.flatMap(ch => findRootCauses(ch, task)) - } - } - log.error(s"${failed.size} of ${results.length} test tasks failed:") - failed.foreach { case (i, d) => - log.error(s"- $d") - loggedThis.clear - findRootCauses(i, "").foreach { - case (task, Some(ex)) => log.error(s" - $task failed: $ex") - case (task, None) => log.error(s" - ($task failed)") - } - } - throw new RuntimeException - } + // ../docs.scala-lang/_data/compiler-options.yml + commands += Command.command("generateDocsData") { state => + val dir = (((ThisBuild / baseDirectory).value) / ".." / "docs.scala-lang" / "_data") + val target = if (dir.exists) dir else ((ThisBuild / baseDirectory).value) + GenerateDocsData.run(target.getAbsoluteFile) + state }, + + testJDeps := TestJDeps.testJDepsImpl.value, + testJarSize := TestJarSize.testJarSizeImpl.value, + + // Wasn't sure if findRootCauses would work if I just aggregated testAll1/etc, so a little duplication.. + testAll := runTests(unitTests ::: partests ::: remainingTests).value, + // splitting this in two parts allows them to run in parallel on CI. + // partest takes the longest, so "partest vs. everything else" is a roughly equal split + testAll1 := runTests(unitTests ::: remainingTests).value, + testAll2 := runTests(partests).value, + setIncOptions ) - .aggregate(library, reflect, compiler, compilerOptionsExporter, interactive, repl, replJline, replJlineEmbedded, - scaladoc, scalap, partest, junit, libraryAll, scalaDist).settings( - (Compile / sources) := Seq.empty, + .aggregate(library, reflect, compiler, interactive, repl, replFrontend, sbtBridge, + scaladoc, scalap, testkit, partest, junit, scalacheck, tasty, tastytest, scalaDist).settings( + Compile / sources := Seq.empty, onLoadMessage := s"""|*** Welcome to the sbt build definition for Scala! *** |version=${(Global / version).value} scalaVersion=${(Global / scalaVersion).value} |Check README.md for more information.""".stripMargin ) +lazy val clearSavedLogs = SavedLogs.clearSavedLogs.result.map(_ -> "clearSavedLogs") + +lazy val unitTests = List( + ( junit / Test / testOnly).toTask(" -- +v").result.map(_ -> "junit/testOnly -- +v"), + (scalacheck / Test / Keys.test ).result.map(_ -> "scalacheck/test"), +) + +lazy val partests = List( + partestDesc("run"), + partestDesc("pos neg jvm"), + partestDesc("res scalap specialized"), + partestDesc("instrumented presentation"), + partestDesc("--srcpath scaladoc"), + partestDesc("--srcpath macro-annot"), + partestDesc("--srcpath async"), + (tasty / Test / Keys.test).result.map(_ -> "tasty/test"), +) + +lazy val remainingTests = List( + (osgiTestFelix / Test / Keys.test).result.map(_ -> "osgiTestFelix/test"), + (osgiTestEclipse / Test / Keys.test).result.map(_ -> "osgiTestEclipse/test"), + (sbtTest / scripted ).toTask("").result.map(_ -> "sbtTest/scripted"), + (library / mimaReportBinaryIssues ).result.map(_ -> "library/mimaReportBinaryIssues"), // doesn't aggregate.. + (reflect / mimaReportBinaryIssues ).result.map(_ -> "reflect/mimaReportBinaryIssues"), // ..so specify both + (testJDeps ).result.map(_ -> "testJDeps"), + (testJarSize ).result.map(_ -> "testJarSize"), + (bench / Compile / compile).map(_ => ()).result.map(_ -> "bench/compile"), + Def.task(()).dependsOn( // Run these in parallel: + library / Compile / doc, + reflect / Compile / doc, + compiler / Compile / doc, + scalap / Compile / doc, + ).result.map(_ -> "doc") +) + +def runTests(tests: List[Def.Initialize[Task[(Result[Unit], String)]]]) = Def.task { + val results = ScriptCommands.sequence[(Result[Unit], String)](clearSavedLogs :: tests).value + val log = streams.value.log + val failed = results.collect { case (Inc(i), d) => (i, d) } + if (failed.nonEmpty) { + def showScopedKey(k: Def.ScopedKey[_]): String = + Vector( + k.scope.project.toOption.map { case p: ProjectRef => p.project case p => p }.map(_ + "/"), + k.scope.config.toOption.map(_.name + ":"), + k.scope.task.toOption.map(_.label + "::") + ).flatten.mkString + k.key + + val loggedThis, loggedAny = new scala.collection.mutable.HashSet[String] + + def findRootCauses(i: Incomplete, currentTask: String): Vector[(String, Option[Throwable])] = { + val skey = i.node.collect { case t: Task[_] => t.info.attributes.get(taskDefinitionKey) }.flatten + val sk = skey.map(showScopedKey) + val task = sk.getOrElse(currentTask) + val dup = sk.exists(!loggedAny.add(_)) + if (sk.exists(!loggedThis.add(_))) Vector.empty + else i.directCause match { + case Some(e) => Vector((task, if (dup) None else Some(e))) + case None => i.causes.toVector.flatMap(findRootCauses(_, task)) + } + } + + log.error("") + log.error(s"${failed.size} of ${results.length} test tasks failed:") + failed.foreach { case (i, d) => + log.error(s"- $d") + loggedThis.clear() + findRootCauses(i, "").foreach { + case (task, Some(ex)) => log.error(s" - $task failed: $ex") + case (task, None) => log.error(s" - ($task failed)") + } + } + SavedLogs.showSavedLogsImpl(log.error(_)) + throw new MessageOnlyException("Failure due to previous errors") + } +} + def setIncOptions = incOptions := { incOptions.value .withRecompileOnMacroDef(Some(Boolean box false).asJava) // macros in library+reflect are hard-wired to implementations with `FastTrack`. } // The following subprojects' binaries are required for building "pack": -lazy val distDependencies = Seq(replJline, replJlineEmbedded, compiler, library, reflect, scalap, scaladoc) +lazy val distDependencies = Seq(replFrontend, compiler, library, reflect, scalap, scaladoc) lazy val dist = (project in file("dist")) .settings(commonSettings) .settings( - libraryDependencies ++= Seq(scalaSwingDep, jlineDep), + bspEnabled := false, + libraryDependencies += jlineDep, mkBin := mkBinImpl.value, mkQuick := Def.task { - val cp = (LocalProject("test") / IntegrationTest / fullClasspath).value + val cp = (testP / IntegrationTest / fullClasspath).value val propsFile = (ThisBuild / buildDirectory).value / "quick" / "partest.properties" val props = new java.util.Properties() props.setProperty("partest.classpath", cp.map(_.data.getAbsolutePath).mkString(sys.props("path.separator"))) IO.write(props, null, propsFile) (ThisBuild / buildDirectory).value / "quick" - }.dependsOn((distDependencies.map((_ / Runtime / products)) :+ mkBin): _*).value, - mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn((Compile / packageBin / packagedArtifact), mkBin).value, - target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, - (Compile / packageBin) := { - val extraDeps = Set(scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep) + }.dependsOn((distDependencies.map(_ / Runtime / products) :+ mkBin): _*).value, + mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn(Compile / packageBin / packagedArtifact, mkBin).value, + target := (ThisBuild / target).value / projectFolder.value, + Compile / packageBin := { val targetDir = (ThisBuild / buildDirectory).value / "pack" / "lib" - def uniqueModule(m: ModuleID) = (m.organization, m.name.replaceFirst("_.*", "")) - val extraModules = extraDeps.map(uniqueModule) - val extraJars = (Compile / externalDependencyClasspath).value.map(a => (a.get(moduleID.key), a.data)).collect { - case (Some(m), f) if extraModules contains uniqueModule(m) => f - } val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data - val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ ((jlineJAR, targetDir / "jline.jar")) + val mappings = Seq((jlineJAR, targetDir / "jline.jar")) IO.copy(mappings, CopyOptions() withOverwrite true) targetDir }, cleanFiles += (ThisBuild / buildDirectory).value / "quick", cleanFiles += (ThisBuild / buildDirectory).value / "pack", - (Compile / packageBin / packagedArtifact) := + Compile / packageBin / packagedArtifact := (Compile / packageBin / packagedArtifact) - .dependsOn(distDependencies.map((_ / Runtime / packageBin/ packagedArtifact)): _*) + .dependsOn(distDependencies.map(_ / Compile / packageBin / packagedArtifact): _*) .value ) .dependsOn(distDependencies.map(p => p: ClasspathDep[ProjectReference]): _*) @@ -1075,17 +1376,23 @@ lazy val dist = (project in file("dist")) * We pass `project` as an argument which is in fact a macro call. This macro determines * project.id based on the name of the lazy val on the left-hand side. */ -def configureAsSubproject(project: Project): Project = { - val base = file(".") / "src" / project.id +def configureAsSubproject(project: Project, srcdir: Option[String] = None): Project = { + val base = file(".") / "src" / srcdir.getOrElse(project.id) (project in base) .settings(scalaSubprojectSettings) + .settings(generatePropertiesFileSettings) + .settings(projectFolder := srcdir.getOrElse(project.id)) } -lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in build/quick") lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack") lazy val testAll = taskKey[Unit]("Run all test tasks sequentially") +lazy val testAll1 = taskKey[Unit]("Run 1/2 test tasks sequentially") +lazy val testAll2 = taskKey[Unit]("Run 2/2 test tasks sequentially") + +val testJDeps = taskKey[Unit]("Run jdeps to check dependencies") +val testJarSize = taskKey[Unit]("Test that jars have the expected size") // Defining these settings is somewhat redundant as we also redefine settings that depend on them. // However, IntelliJ's project import works better when these are set correctly. @@ -1127,16 +1434,16 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { streams.value.log.info(s"Creating scripts in $quickOutDir and $packOutDir") - mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (replJlineEmbedded / Compile / fullClasspath).value) ++ + mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (replFrontend / Compile / fullClasspath).value) ++ mkBin("scalac" , "scala.tools.nsc.Main", (compiler / Compile / fullClasspath).value) ++ - mkBin("fsc" , "scala.tools.nsc.CompileClient", (compiler / Compile / fullClasspath).value) ++ - mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (scaladoc / Compile / fullClasspath).value) ++ - mkBin("scalap" , "scala.tools.scalap.Main", (scalap / Compile / fullClasspath).value) + mkBin("fsc" , "scala.tools.nsc.fsc.CompileClient", (compiler / Compile / fullClasspath).value) ++ + mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (scaladoc / Compile / fullClasspath).value) ++ + mkBin("scalap" , "scala.tools.scalap.Main", (scalap / Compile / fullClasspath).value) } /** Generate service provider definition files under META-INF/services */ def generateServiceProviderResources(services: (String, String)*): Setting[_] = - (Compile / resourceGenerators) += Def.task { + Compile / resourceGenerators += Def.task { services.map { case (k, v) => val f = (Compile / resourceManaged).value / "META-INF/services" / k IO.write(f, v + "\n") @@ -1144,8 +1451,6 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = } }.taskValue -(ThisBuild / buildDirectory) := (ThisBuild / baseDirectory).value / "build" - // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => ("test/IntegrationTest/testOnly -- " + parsed) :: state @@ -1158,7 +1463,7 @@ watchSources ++= PartestUtil.testFilePaths((ThisBuild / baseDirectory).value, (T commands ++= { val commands = List(("scalac", "compiler", "scala.tools.nsc.Main"), - ("scala", "repl-jline-embedded", "scala.tools.nsc.MainGenericRunner"), + ("scala", "replFrontend", "scala.tools.nsc.MainGenericRunner"), ("scaladoc", "scaladoc", "scala.tools.nsc.ScalaDoc")) commands.map { @@ -1193,25 +1498,21 @@ intellij := { buildModule :: List( moduleDeps(bench).value, moduleDeps(compilerP).value, - // moduleDeps(dist).value, // No sources, therefore no module in IntelliJ moduleDeps(interactive).value, moduleDeps(junit).value, moduleDeps(library).value, - // moduleDeps(libraryAll).value, // No sources moduleDeps(manual).value, moduleDeps(partest).value, moduleDeps(partestJavaAgent).value, moduleDeps(reflect).value, moduleDeps(repl).value, - moduleDeps(replJline).value, - // moduleDeps(replJlineEmbedded).value, // No sources - // moduleDeps(root).value, // No sources - // moduleDeps(scalaDist).value, // No sources - moduleDeps(scalacheck, config = Test).value, + moduleDeps(replFrontend).value, + moduleDeps(scalacheck, config = Test).value.copy(_1 = "scalacheck-test"), moduleDeps(scaladoc).value, moduleDeps(scalap).value, + moduleDeps(tastytest).value, moduleDeps(testP).value, - moduleDeps(compilerOptionsExporter).value + moduleDeps(testkit).value, ) } @@ -1253,7 +1554,7 @@ intellij := { } override def transform(n: Node): Seq[Node] = n match { - case e @ Elem(_, "library", attrs, _, _, _*) if checkAttrs(attrs) => + case e @ Elem(_, "library", attrs, _, _*) if checkAttrs(attrs) => transformed = true XML.loadString(newContent) case other => @@ -1339,8 +1640,6 @@ def findJar(files: Seq[Attributed[File]], dep: ModuleID): Option[Attributed[File files.find(_.get(moduleID.key).map(extract _) == Some(extract(dep))) } -Global / excludeLintKeys := (Global / excludeLintKeys).value ++ Set(scalaSource, javaSource, resourceDirectory) - { scala.build.TravisOutput.installIfOnTravis() Nil diff --git a/dbuild-meta.json b/dbuild-meta.json deleted file mode 100644 index ca3ce2a1104b..000000000000 --- a/dbuild-meta.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "projects" : [ - { - "artifacts" : [ - { - "extension" : "jar", - "name" : "scala-library", - "organization" : "org.scala-lang" - } - ], - "dependencies" : [], - "name" : "scala-library", - "organization" : "org.scala-lang" - }, - { - "artifacts" : [ - { - "extension" : "jar", - "name" : "scala-reflect", - "organization" : "org.scala-lang" - } - ], - "dependencies" : [ - { - "extension" : "jar", - "name" : "scala-library", - "organization" : "org.scala-lang" - } - ], - "name" : "scala-reflect", - "organization" : "org.scala-lang" - }, - { - "artifacts" : [ - { - "extension" : "jar", - "name" : "scala-compiler", - "organization" : "org.scala-lang" - } - ], - "dependencies" : [ - { - "extension" : "jar", - "name" : "scala-reflect", - "organization" : "org.scala-lang" - }, - { - "extension" : "jar", - "name" : "scala-xml", - "organization" : "org.scala-lang.modules" - } - ], - "name" : "scala-compiler", - "organization" : "org.scala-lang" - }, - { - "artifacts" : [ - { - "extension" : "jar", - "name" : "scalap", - "organization" : "org.scala-lang" - } - ], - "dependencies" : [ - { - "extension" : "jar", - "name" : "scala-compiler", - "organization" : "org.scala-lang" - } - ], - "name" : "scalap", - "organization" : "org.scala-lang" - } - ], - "subproj" : [], - "version" : "2.12.0" -} diff --git a/doc/LICENSE.md b/doc/LICENSE.md index 127e95fc5a9e..a85420a7d71c 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -2,9 +2,9 @@ Scala is licensed under the [Apache License Version 2.0](https://www.apache.org/ ## Scala License -Copyright (c) 2002-2024 EPFL +Copyright (c) 2002-2025 EPFL -Copyright (c) 2011-2024 Lightbend, Inc. +Copyright (c) 2011-2025 Lightbend, Inc. dba Akka All rights reserved. @@ -25,23 +25,13 @@ limitations under the License. This software includes projects with the following licenses, which are also included in the `licenses/` directory: -### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html) -This license is used by the following third-party libraries: - - * jansi - -### [BSD License](http://www.opensource.org/licenses/bsd-license.php) -This license is used by the following third-party libraries: - - * jline - ### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause) This license is used by the following third-party libraries: - * asm + * ASM + * JLine 3 ### [MIT License](http://www.opensource.org/licenses/MIT) This license is used by the following third-party libraries: * jQuery - * tools tooltip diff --git a/doc/License.rtf b/doc/License.rtf index 990df40aa3cb..d92cb8ba249e 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -1,5 +1,5 @@ -{\rtf1\ansi\ansicpg1252\cocoartf1671 -{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;} +{\rtf1\ansi\ansicpg1252\cocoartf2511 +\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;} {\colortbl;\red255\green255\blue255;\red27\green31\blue34;\red10\green77\blue204;\red0\green0\blue0; \red21\green23\blue26;} {\*\expandedcolortbl;;\cssrgb\c14118\c16078\c18039;\cssrgb\c1176\c40000\c83922;\csgray\c0\c0; @@ -9,7 +9,7 @@ {\list\listtemplateid3\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid201\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid3} {\list\listtemplateid4\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid301\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid4}} {\*\listoverridetable{\listoverride\listid1\listoverridecount0\ls1}{\listoverride\listid2\listoverridecount0\ls2}{\listoverride\listid3\listoverridecount0\ls3}{\listoverride\listid4\listoverridecount0\ls4}} -\paperw11900\paperh16840\margl1440\margr1440\vieww17360\viewh22480\viewkind0 +\paperw11900\paperh16840\margl1440\margr1440\vieww17360\viewh20980\viewkind0 \deftab720 \pard\pardeftab720\sl360\sa320\partightenfactor0 @@ -23,8 +23,8 @@ Scala is licensed under the\'a0{\field{\*\fldinst{HYPERLINK "https://www.apache. \fs48 \cf2 Scala License\ \pard\pardeftab720\sl360\sa320\partightenfactor0 -\f0\b0\fs28 \cf2 Copyright (c) 2002-2024 EPFL\ -Copyright (c) 2011-2024 Lightbend, Inc.\ +\f0\b0\fs28 \cf2 Copyright (c) 2002-2025 EPFL\ +Copyright (c) 2011-2025 Lightbend, Inc. dba Akka\ All rights reserved.\ \pard\pardeftab720\sl360\sa320\partightenfactor0 \cf2 \cb4 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at {\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt http://www.apache.org/licenses/LICENSE-2.0}}.\ @@ -40,17 +40,6 @@ Unless required by applicable law or agreed to in writing, software distributed \f0\b0\fs28 \cf2 This software includes projects with the following licenses, which are also included in the\'a0\cb5 licenses/\cb1 \'a0directory:\ \pard\pardeftab720\sl300\partightenfactor0 -\f1\b \cf3 \ -\pard\pardeftab720\sl360\sa320\partightenfactor0 -{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt \cf3 Apache License}}\cf2 \ -\pard\pardeftab720\sl360\sa320\partightenfactor0 - -\f0\b0 \cf2 This license is used by the following third-party libraries:\ -\pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 -\ls1\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 -jansi\ -\pard\pardeftab720\sl300\partightenfactor0 - \f1\b \cf3 \ \pard\pardeftab720\sl360\sa320\partightenfactor0 {\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt \cf3 BSD License}}\cf2 \ @@ -59,7 +48,7 @@ jansi\ \f0\b0 \cf2 This license is used by the following third-party libraries:\ \pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 \ls2\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 -jline\ +JLine 3\ \pard\pardeftab720\sl300\partightenfactor0 \f1\b \cf3 \ @@ -70,7 +59,7 @@ jline\ \f0\b0 \cf2 This license is used by the following third-party libraries:\ \pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 \ls3\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 -asm\ +ASM\ \pard\pardeftab720\sl300\partightenfactor0 \f1\b \cf3 \ @@ -82,6 +71,4 @@ asm\ \pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 \ls4\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 jQuery\ -\ls4\ilvl0\kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 -tools tooltip\ -} +} \ No newline at end of file diff --git a/doc/README b/doc/README index 3361044f73d4..f7d3d44ab721 100644 --- a/doc/README +++ b/doc/README @@ -9,7 +9,7 @@ We welcome contributions at https://github.com/scala/scala! Scala Tools ----------- -- scala Scala interactive interpreter +- scala Scala REPL (interactive shell) - scalac Scala compiler - fsc Scala resident compiler - scaladoc Scala API documentation generator diff --git a/doc/internal/tastyreader.md b/doc/internal/tastyreader.md new file mode 100644 index 000000000000..8f39f7815438 --- /dev/null +++ b/doc/internal/tastyreader.md @@ -0,0 +1,192 @@ +# TASTy Reader For Scala 2 + +The [**TASTy Reader For Scala 2**](https://scala.epfl.ch/projects.html#tastyScala2), included in the Scala 2.x Compiler will enable usage in Scala `2.13.x` of dependencies that have been compiled with `dotc`, the reference compiler of Scala `3.0`. + +TASTy is an intermediate representation of a Scala program after type checking and term elaboration, such as inference of implicit parameters. When compiling code with Scala 3, a single TASTy document is associated with each pair of root class and companion object. Within a TASTy document, the public API of those roots and any inner classes can be read, in a similar way to pickles in the Scala 2.x series. + +## Working with the code + +### Compiler flags + +- `-Ytasty-reader` enables the support for reading Scala 3's TASTy files. + +- `-Ydebug-tasty` enables rich output when traversing tasty files, important for tracing the history of events when diagnosing errors. + +- `-Ytasty-no-annotations` ignores all annotations on tasty symbols, may be useful for ignoring complex annotations that are unsupported, but will prevent safety checks for pattern matching. + +### Entry Points + +A classfile is assumed to have an associated TASTy file if it has a `TASTY` classfile attribute (not available through +Java reflection). This attribute contains a UUID that matches a UUID in the header of a sibling `.tasty` file of the +same directory as the classfile. This file is then found and the UUIDs are compared in +`scala.tools.nsc.symtab.classfile.ClassfileParser`. +After validation of the header, the tasty file is traversed in `scala.tools.nsc.tasty.TastyUnpickler.unpickle`, which +reads any definitions into the symbol table of the compiler. + +### Concepts in TASTy + +A TASTy document is composed of a header, which contains a magic number `0x5CA1AB1F`, a version number and a UUID. +The TASTy document then is composed of a list of names, followed by customisable "sections". The section we are +interested in for Scala 2 is the "ASTs" section. The ASTs section contains a package definition for the root class and +companion of the tasty file. In TASTy, both terms and types are made of trees, and sometimes trees can be reused in +either term or type position, for example path selections. There are five main concepts in TASTy: + - Name: has many roles + - An identifier associated with a Symbol, + - A cursor to lookup terms or types within the scope of a parent type, including resolving a specific overload, + or distinguishing between a class and its companion object's implementation class. + - To describe the erased signature of an method. + - Flags: an enumerated set of properties for a Symbol, e.g. if it is a Method, Object, Param, etc. + - Symbol: an aggregate of Flags, a Name and a Type, representing the semantic information about a definition + - Type: corresponds to a scala reflect Type, can be lazy + - Term: corresponds to a scala reflect Tree and has a Type. Annotations are represented as Terms + +### Workflow + +A typical workflow for experimenting with the TASTy reader is to: +1) create a workspace directory `$issue`, e.g. `sandbox/issue` +2) create an output directory `$out`, e.g. `$issue/bin` +3) create a Scala 3 source file `$src3`, e.g. `$issue/FancyColours.scala` +4) compile the Scala 3 source file to `$out`: + - `dotc -d $out $src3` +5) create a Scala 2 source file, `$src2`, that uses some symbols from `$src3`, e.g. `$issue/TestFancyColours.scala` +6) compile the Scala 2 source file, adding any symbols from `$src3` to the classpath: + - `scalac -Ydebug-tasty -d $out -classpath $out $src2` + +Here are some example source files from the above scenario: +```scala +// FancyColours.scala - compile with Scala 3 + +trait Pretty: + self: Colour => + +trait Dull: + self: Colour => + +enum Colour: + case Pink extends Colour with Pretty + case Red extends Colour with Dull +``` +```scala +// TestFancyColours.scala - compile with Scala 2 + +object TestFancyColours { + + def describe(c: Colour) = c match { + case Colour.Pink => "Amazing!" + case Colour.Red => "Yawn..." + } + + def describePretty(c: Pretty) = c match { + case Colour.Pink => "Amazing!" + } + + def describeDull(c: Dull) = c match { + case Colour.Red => "Yawn..." + } + +} +``` + +The [Script Runner](#script-runner) section describes some commands that support this workflow and can be run from sbt; which also handles providing the supported version of `dotc` on the classpath. + +Below is an example of using the [Script Runner](#script-runner) to simplify iterative development of the scenario above: + +1) First, compile the Scala 3 code with `tasty/test:runMain scala.tools.tastytest.Scripted dotc $out $issue/FancyColours.scala`. +2) Next, compile the test code from Scala 2 with `tasty/test:runMain scala.tools.tastytest.Scripted scalac $out $issue/TestFancyColours.scala -Ydebug-tasty`, which will also put the contents of `$out` on the classpath. +3) To aid with debugging, inspect the TASTy structure for `Colour` with `tasty/test:runMain scala.tools.tastytest.Scripted dotcd $out/Colour.tasty -print-tasty` + +In the above, relative paths will be calculated from the working directory of `tasty/test`. + +Because these commands are run from sbt, incremental changes can be made to the code for the TASTy reader and then step `2` can be immediately re-run to observe new behaviour of the compiler. + +In the output of the above step `2`, you will see the following snippet, showing progress in traversing TASTy and understanding the definition of `trait Dull`: +```scala +#[trait Dull]: Addr(4) completing Symbol(trait Dull, #6286): +#[trait Dull]: Addr(7) No symbol found at current address, ensuring one exists: +#[trait Dull]: Addr(7) registered Symbol(value , #7240) in trait Dull +#[trait Dull]: Addr(9) Template: reading parameters of trait Dull: +#[trait Dull]: Addr(9) Template: indexing members of trait Dull: +#[trait Dull]: Addr(22) No symbol found at current address, ensuring one exists: +#[trait Dull]: Addr(22) ::: => create DEFDEF +#[trait Dull]: Addr(22) parsed flags Stable | Method +#[trait Dull]: Addr(22) registered Symbol(constructor Dull, #7241) in trait Dull +#[trait Dull]: Addr(9) Template: adding parents of trait Dull: +#[trait Dull]: Addr(9) reading type TYPEREF: +#[trait Dull]: Addr(11) reading type TERMREFpkg: +#[trait Dull]: Addr(13) Template: adding self-type of trait Dull: +#[trait Dull]: Addr(15) reading term IDENTtpt: +#[trait Dull]: Addr(17) reading type TYPEREF: +#[trait Dull]: Addr(19) reading type THIS: +#[trait Dull]: Addr(20) reading type TYPEREFpkg: +#[trait Dull]: Addr(22) Template: self-type is Colour +#[trait Dull]: Addr(22) Template: Updated info of trait Dull extends AnyRef +#[trait Dull]: Addr(4) typeOf(Symbol(trait Dull, #6286)) =:= Dull; owned by package +``` + +### Tagged comments +Comments beginning with `TODO [tasty]:` express concerns specific to the implementation of the TASTy reader. These should be considered carefully because of either the disruptive changes they make to the rest of the code base, or as a note that there may be a more correct solution, or as a placeholder to outline missing features of Scala 3 that are not yet backported to Scala 2.x. + +## Testing + +The framework for testing the TASTy reader is contained in the `tastytest` subproject. + +The `tasty` project is an example subproject depending on `tastytest`, used to test the functionality of the TASTy +reader. Test sources are placed in the `test/tasty` directory of this repository and tested with the sbt task +`tasty/test`. Several suites exist that build upon primitives in `tastytest`: +- `run`: test that classes can depend on Scala 3 classes and execute without runtime errors. +- `neg`: assert that scala 2 test sources depending on Scala 3 classes do not compile +- `neg-isolated`: assert that code depending on symbols not on the classpath fails correctly. +- `pos`: The same as `run` except with no runtime checking, useful for validating types while waiting for bytecode to align. +- `pos-false-noannotations`: the same as `pos` but asserting code falsely compiles without warnings or errors when annotations are ignored. + +### Script Runner + +A key tool for working with the tasty reader on individual test cases is `scala.tools.tastytest.Scripted`. It provides several sub commands which share a common implementation with the core of `tastytest`, meaning that the behaviour is identical. +Each sub command is executed with the Dotty standard library and tooling on the classpath, with the version determined by +`TastySupport.dottyCompiler` in the build definition. All relative paths will use the working directory `tasty/test`: + +In the sbt shell the `scripted` runner can be executed by `tasty/test:runMain scala.tools.tastytest.Scripted`, and provides several sub-commands: +- `dotc `: compile a Scala 3 source file, which may depend on classes already compiled in `out`. +- `dotcd `: decompile a tasty file, pass `-print-tasty` to see the structure of the ASTs. +- `scalac `: compile a Scala 2 source file, which may depend on classes already compiled in `out`, including those compiled by Scala 3. `args` can be used to pass additional scalac flags, such as `-Ydebug-tasty` +- `runDotty `: execute the static main method of the given class, and providing no arguments. +- `javac `: compile a Java source file matching, which may depend on classes already compiled in `out`. + +### tastytest Runner + +`tastytest` is a testing library for validating that Scala 2 code can correctly depend on classes compiled with `dotc`, the Scala 3 compiler, which outputs TASTy trees. The framework has several suites for testing different scenarios. In each suite kind, the Scala 3 standard library is available to all test sources. `tastytest` does not implement the TestInterface so it is recommended to call its entry points from JUnit, like in `test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala`. + +#### run Suites +A `run` suite tests the runtime behaviour of Scala 2 code that may extend or call into code compiled with `dotc`, and is specified as follows: + + 1) A root source `$src` is declared, e.g. `"run"` + 2) Compile sources in `$src/pre/**/` with the Scala 2 compiler, this step may be used to create helper methods available to Scala 2 and 3 sources, or embedded test runners. + 3) Compile sources in `$src/src-3/**/` with the Dotty compiler. Classes compiled in `(2)` are now on the classpath. + 4) Compile sources in `$src/src-2/**/` with the Scala 2 compiler. Classes compiled in `(2)` and `(3)` are now on the classpath. + 5) All compiled classes are filtered for those with file names that match the regex `(.*Test)\.class`, and have a corresponding source file in `$src/src-2/**/` matching `$1.scala`, where `$1` is the substituted name of the class file. The remaining classes are executed sequentially as tests: + - A test class must have a static method named `main` and with descriptor `([Ljava.lang.String;)V`. + - The `out` and `err` print streams of `scala.Console` are intercepted before executing the `main` method. + - A successful test must not output to either stream and not throw any runtime exceptions that are not caught within `main`. + +#### pos Suites +A `pos` suite tests the compilation of Scala 2 code that may extend or call into code compiled with `dotc`, and is specified the same as `run`, except that step `(5)` is skipped. If step `(4)` succeeds then the suite succeeds. + +#### neg Suites +A `neg` suite asserts which Scala 2 code is not compatible with code compiled with `dotc`, and is specified as follows: + 1) A root source `$src` is declared, e.g. `"neg"` + 2) Compile sources in `$src/src-3/**/` with the Dotty compiler. + 3) Source files in `$src/src-2/**/` are filtered for those with names that match the regex `(.*)_fail.scala`, and an optional check file that matches `$1.check` where `$1` is the substituted test name, and the check file is in the same directory. These are sources expected to fail compilation. + 4) Compile scala sources in `$src/src-2/**/` with the Scala 2 compiler. + - Classes compiled in `(2)` are now on the classpath. + - If a Scala source fails compilation, check that it is in the set of expected fail cases, and that there is a corresponding check file that matches the compiler output, else collect in the list of failures. + - If an expected fail case compiles successfully, collect it in the list of failures. + +#### neg-isolated Suites +A `neg-isolated` suite tests the effect of missing transitive dependencies on the classpath that are available to Scala 3 dependencies of Scala 2 sources, but not to those downstream Scala 2 sources, and is specified as follows: + 1) A root source `$src` is declared, e.g. `"neg-isolated"` + 2) Compile sources in `$src/src-3-A/**/` with the Dotty compiler. + 3) Compile sources in `$src/src-3-B/**/` with the Dotty compiler. Classes compiled in `(2)` are now on the classpath. + 3) Identical to `neg` step `(3)` + 4) Compile scala sources in `$src/src-2/**/` with the Scala 2 compiler. Test validation behaviour matches `neg` step `(4)`, except for the following caveats: + - Only classes compiled in `(3)` will be on the classpath. Classes compiled in `(2)` are deliberately hidden. + - References to symbols compiled in `(3)` that reference symbols in compiled in `(2)` should trigger missing symbol errors due to the missing transitive dependency. diff --git a/doc/licenses/apache_jansi.txt b/doc/licenses/apache_jansi.txt deleted file mode 100644 index 067a5a6a3432..000000000000 --- a/doc/licenses/apache_jansi.txt +++ /dev/null @@ -1,203 +0,0 @@ -Scala includes the JLine library, which includes the Jansi library. - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/doc/licenses/bsd_asm.txt b/doc/licenses/bsd_asm.txt index 8613cd33a298..a3591e4b406c 100644 --- a/doc/licenses/bsd_asm.txt +++ b/doc/licenses/bsd_asm.txt @@ -28,4 +28,4 @@ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/doc/licenses/bsd_jline.txt b/doc/licenses/bsd_jline.txt index 3e5dba75da38..cf45a50bea5e 100644 --- a/doc/licenses/bsd_jline.txt +++ b/doc/licenses/bsd_jline.txt @@ -1,8 +1,10 @@ -Scala includes the JLine library: +Scala includes the JLine 3 library: -Copyright (c) 2002-2006, Marc Prud'hommeaux +Copyright (c) 2002-2018, the original author or authors. All rights reserved. +https://opensource.org/licenses/BSD-3-Clause + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/doc/licenses/mit_tools.tooltip.txt b/doc/licenses/mit_tools.tooltip.txt deleted file mode 100644 index 27a4dbc788a9..000000000000 --- a/doc/licenses/mit_tools.tooltip.txt +++ /dev/null @@ -1,13 +0,0 @@ -Scala includes the Tools Tooltip library: - -Copyright (c) 2009 Tero Piirainen - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. diff --git a/project/AutomaticModuleName.scala b/project/AutomaticModuleName.scala index 9e9bb74ea3bc..6a02bdaf51d7 100644 --- a/project/AutomaticModuleName.scala +++ b/project/AutomaticModuleName.scala @@ -15,8 +15,8 @@ object AutomaticModuleName { def settings(name: String): Seq[Def.Setting[_]] = { val pair = ("Automatic-Module-Name" -> name) Seq( - (Compile / packageBin / packageOptions) += Package.ManifestAttributes(pair), + Compile / packageBin / packageOptions += Package.ManifestAttributes(pair), Osgi.headers += pair ) } -} \ No newline at end of file +} diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index 3cec68215323..5d4418a6fe0a 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -1,11 +1,20 @@ package scala.build -import sbt._ +import sbt._, Keys._ /** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */ object BuildSettings extends AutoPlugin { + override def trigger = allRequirements + object autoImport { lazy val baseVersion = settingKey[String]("The base version number from which all others are derived") lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build") + lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") } + import autoImport._ + + override def buildSettings = Def.settings( + ThisBuild / target := (ThisBuild / baseDirectory).value / "target", + ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build", + ) } diff --git a/project/DottySupport.scala b/project/DottySupport.scala new file mode 100644 index 000000000000..37d555440088 --- /dev/null +++ b/project/DottySupport.scala @@ -0,0 +1,136 @@ +package scala.build + +import sbt._ +import sbt.Keys._ +import java.io.File + +import sbt.librarymanagement.{ + DependencyResolution, ScalaModuleInfo, UpdateConfiguration, UnresolvedWarningConfiguration +} + +/** + * Settings to support validation of TastyUnpickler against the release of dotty with the matching TASTy version + */ +object TastySupport { + val supportedTASTyRelease = "3.6.4" // TASTY: 28.6-0 + val scala3Compiler = "org.scala-lang" % "scala3-compiler_3" % supportedTASTyRelease + val scala3Library = "org.scala-lang" % "scala3-library_3" % supportedTASTyRelease + + val CompilerClasspath = Configuration.of("TastySupport.CompilerClasspath", "TastySupport.CompilerClasspath") + val LibraryClasspath = Configuration.of("TastySupport.LibraryClasspath", "TastySupport.LibraryClasspath") +} + +/** Settings needed to compile with Dotty, + * Only active when sbt is started with `sbt -Dscala.build.compileWithDotty=true` + * This is currently only used to check that the standard library compiles with + * Dotty in .travis.yml. + */ +object DottySupport { + val dottyVersion = TastySupport.supportedTASTyRelease + val compileWithDotty: Boolean = + Option(System.getProperty("scala.build.compileWithDotty")).exists(_.toBoolean) + lazy val commonSettings = Seq( + Compile / scalacOptions ++= Seq( + "-language:implicitConversions" // Avoid a million warnings + ) + ) + lazy val librarySettings = Seq( + // Needed to compile scala3-library together with scala-library + compileOrder := CompileOrder.Mixed, + + // Add the scala3-library sources to the sourcepath and disable fatal warnings + Compile / scalacOptions := { + val old = (Compile / scalacOptions).value + val withoutFatalWarnings = old.filterNot(opt => opt == "-Werror" || opt.startsWith("-Wconf")) + + val (beforeSourcepath, "-sourcepath" :: oldSourcepath :: afterSourcePath) = withoutFatalWarnings.span(_ != "-sourcepath") + + val newSourcepath = + ((Compile / sourceManaged).value / "scala3-library-src").getAbsolutePath + + File.pathSeparator + oldSourcepath + + beforeSourcepath ++ ("-sourcepath" :: newSourcepath :: afterSourcePath) + }, + + Compile / scalacOptions ++= Seq( + "-Yerased-terms" // needed to compile scala3-library + ), + + // Some files shouldn't be compiled + unmanagedSources / excludeFilter ~= (old => + old || + "AnyVal.scala" + ), + + // Add the sources of scala3-library to the current project to compile the + // complete standard library of Dotty in one go. + // Adapted from similar code in the scala-js build. + Compile / sourceGenerators += Def.task { + object DottyLibrarySourceFilter extends FileFilter { + def accept(file: File): Boolean = { + val name = file.getName + file.isFile && + (name.endsWith(".scala") || name.endsWith(".java")) && + !Set("AnyKind.scala", "Matchable.scala").contains(name) + } + } + + val s = streams.value + val cacheDir = s.cacheDirectory + val trgDir = (Compile / sourceManaged).value / "scala3-library-src" + + val dottyLibrarySourceJar = fetchSourceJarOf( + dependencyResolution.value, + scalaModuleInfo.value, + updateConfiguration.value, + (update / unresolvedWarningConfiguration).value, + streams.value.log, + scalaOrganization.value %% "scala3-library" % scalaVersion.value) + + FileFunction.cached(cacheDir / s"fetchDottyLibrarySource", + FilesInfo.lastModified, FilesInfo.exists) { dependencies => + s.log.info(s"Unpacking scala3-library sources to $trgDir...") + if (trgDir.exists) + IO.delete(trgDir) + IO.createDirectory(trgDir) + IO.unzip(dottyLibrarySourceJar, trgDir) + + (trgDir ** DottyLibrarySourceFilter).get.toSet + } (Set(dottyLibrarySourceJar)).toSeq + }.taskValue + ) + + /** Fetch source jar for `moduleID` */ + def fetchSourceJarOf( + dependencyRes: DependencyResolution, + scalaInfo: Option[ScalaModuleInfo], + updateConfig: UpdateConfiguration, + warningConfig: UnresolvedWarningConfiguration, + log: Logger, + moduleID: ModuleID): File = { + val sourceClassifiersConfig = sbt.librarymanagement.GetClassifiersConfiguration( + sbt.librarymanagement.GetClassifiersModule( + moduleID, + scalaInfo, + Vector(moduleID), + Vector(Configurations.Default) ++ Configurations.default, + Vector("sources") + ), + Vector.empty, + updateConfig.withArtifactFilter( + librarymanagement.ArtifactTypeFilter.allow(Artifact.DefaultSourceTypes) + ), + Artifact.DefaultSourceTypes.toVector, + Vector.empty + ) + + dependencyRes.updateClassifiers(sourceClassifiersConfig, warningConfig, Vector.empty, log) match { + case Right(report) => + val Vector(jar) = report.allFiles + jar + case _ => + throw new MessageOnlyException( + s"Couldn't retrieve `$moduleID`.") + } + } +} diff --git a/project/GenerateAnyVals.scala b/project/GenerateAnyVals.scala index d0dc77a89a57..df9917c21f31 100644 --- a/project/GenerateAnyVals.scala +++ b/project/GenerateAnyVals.scala @@ -9,22 +9,26 @@ trait GenerateAnyValReps { case class Op(op : String, doc : String) - private def companionCoercions(tos: AnyValRep*) = { - tos.toList map (to => - s"implicit def @javaequiv@2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}" - ) - } + private def companionCoercions(deprecated: Boolean, tos: AnyValRep*): List[String] = + tos.toList.flatMap { to => + val code = s"implicit def @javaequiv@2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}" + if (deprecated) + List(s"""@deprecated("Implicit conversion from @name@ to ${to.name} is dangerous because it loses precision. Write `.to${to.name}` instead.", "2.13.1")""", code) + else + List(code) + } + def coercionComment = """/** Language mandated coercions from @name@ to "wider" types. */ import scala.language.implicitConversions""" def implicitCoercions: List[String] = { val coercions = this match { - case B => companionCoercions(S, I, L, F, D) - case S | C => companionCoercions(I, L, F, D) - case I => companionCoercions(L, F, D) - case L => companionCoercions(F, D) - case F => companionCoercions(D) + case B => companionCoercions(deprecated = false, S, I, L, F, D) + case S | C => companionCoercions(deprecated = false, I, L, F, D) + case I => companionCoercions(deprecated = true, F) ++ companionCoercions(deprecated = false, L, D) + case L => companionCoercions(deprecated = true, F, D) + case F => companionCoercions(deprecated = false, D) case _ => Nil } if (coercions.isEmpty) Nil @@ -146,7 +150,9 @@ import scala.language.implicitConversions""" def mkCoercions = numeric map (x => "def to%s: %s".format(x, x)) def mkUnaryOps = unaryOps map (x => "%s\n def unary_%s : %s".format(x.doc, x.op, this opType I)) - def mkStringOps = List("def +(x: String): String") + def mkStringOps = List( + "@deprecated(\"Adding a number and a String is deprecated. Use the string interpolation `s\\\"$num$str\\\"`\", \"2.13.0\")\n def +(x: String): String" + ) def mkShiftOps = ( for (op <- shiftOps ; arg <- List(I, L)) yield { val doc = op.doc + (if (this == L || arg == I) "" else "\n @deprecated(\"shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.\", \"2.12.7\")") @@ -274,7 +280,7 @@ trait GenerateAnyValTemplates { def headerTemplate = """/* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -466,9 +472,18 @@ override def getClass(): Class[Boolean] = ??? ) def objectLines = interpolate(allCompanions).linesIterator.toList + private def nono = "`Unit` companion object is not allowed in source; instead, use `()` for the unit value" + override def mkObject = s"""@scala.annotation.compileTimeOnly("$nono")\n${super.mkObject}""" + override def boxUnboxInterpolations = Map( - "@boxRunTimeDoc@" -> "", - "@unboxRunTimeDoc@" -> "", + "@boxRunTimeDoc@" -> """ + * This method is not intended for use in source code. + * The runtime representation of this value is platform specific. + *""", + "@unboxRunTimeDoc@" -> """ + * This method is not intended for use in source code. + * The result of successfully unboxing a value is `()`. + *""", "@unboxDoc@" -> "the Unit value ()", "@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT", "@unboxImpl@" -> "x.asInstanceOf[scala.runtime.BoxedUnit]" @@ -488,12 +503,12 @@ override def getClass(): Class[Boolean] = ??? } object GenerateAnyVals { - def run(outDir: java.io.File) { + def run(outDir: java.io.File): Unit = { val av = new GenerateAnyVals av.make() foreach { case (name, code ) => val file = new java.io.File(outDir, name + ".scala") - sbt.IO.write(file, code, java.nio.charset.Charset.forName("UTF-8"), false) + sbt.IO.write(file, code, java.nio.charset.StandardCharsets.UTF_8, false) } } } diff --git a/project/GenerateDocsData.scala b/project/GenerateDocsData.scala new file mode 100644 index 000000000000..bb3f85a05bdb --- /dev/null +++ b/project/GenerateDocsData.scala @@ -0,0 +1,170 @@ +package scala.build + +trait SettingsDescriptorModel { + case class Section(category: String, description: String, options: List[ScalacOption]) + case class ScalacOption( + option: String, + schema: Schema, + description: String, + abbreviations: Seq[String] = Seq.empty, + deprecated: Option[String] = None, + note: Option[String] = None + ) + case class Schema( + `type`: String, + arg: Option[String] = None, + multiple: Option[Boolean] = None, + default: Option[Any] = None, + choices: Seq[Choice] = Seq.empty, + min: Option[Any] = None, + max: Option[Any] = None + ) + case class Choice(choice: String, description: Option[String] = None, deprecated: Option[String] = None) + + object Fixup { + private[this] val quoted = """`([^`']+)'""".r + private[this] val htmlTag = """<([^>]+)>""".r + + def markdownifyBackquote(s: String): String = quoted.replaceAllIn(s, "`$1`") + def dehtmlfy(s: String): String = htmlTag.replaceAllIn(s, "$1") + } +} +/** Externalize a descriptor of ScalaSettings in YAML format. + */ +class SettingsDescriptor extends SettingsDescriptorModel { + import scala.tools.nsc.Settings + val settings = new Settings(_ => ???) + import settings._ + import Fixup._ + // Pasted from ./src/compiler/scala/tools/nsc/settings/AbsSettings.scala to avoid bootstrap error. + // The names have been changed to protect the innocent. Also extra categories. + implicit class UpgradedTests(s: Setting) { + import s.{name, deprecationMessage} + def isAdvanced_? = name.startsWith("-X") && name != "-X" + def isPrivate_? = name.startsWith("-Y") && name != "-Y" && !isPreso_? + def isVerbose_? = name.startsWith("-V") && name != "-V" + def isWarning_? = name match { + case "-W" | "-Werror" => false + case "-Xlint" => true + case _ => name.startsWith("-W") + } + def isPreso_? = name.startsWith("-Ypresentation") + def isStandard_? = !isAdvanced_? && !isPrivate_? && !isWarning_? && !isVerbose_? && !isPreso_? + def isDeprecated_? = deprecationMessage.isDefined + } + val sections: List[(String, Setting => Boolean, String)] = List( + ("Standard Settings", _.isStandard_?, + "A set of standard options that are supported on the current development environment and will be supported in future releases."), + //("JVM Settings", _.isWarning_?, ""), + //("Plugin Settings", _.isWarning_?, ""), + ("Advanced Settings", _.isAdvanced_?, ""), + ("Verbose Settings", _.isVerbose_?, ""), + ("Private Settings", _.isPrivate_?, ""), + ("Warning Settings", _.isWarning_?, ""), + ("IDE-specific Settings", _.isPreso_?, ""), + ) + def mergeChoice(labels: Seq[String], descriptions: Seq[String]): Seq[Choice] = + labels.zipAll(descriptions, "", "").map { + case (choice, d) => Choice( + choice, + description = Option(d).map(markdownifyBackquote).map(dehtmlfy).filter(_.nonEmpty), + // FIXME + deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => d.toLowerCase.contains("deprecated")) + ) + } + def schema(s: Setting): Schema = s match { + case b: BooleanSetting => Schema("Boolean") + case i: IntSetting => Schema("Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) + case c: ChoiceSetting => + val choices = mergeChoice(c.choices, c.choicesHelp) + Schema("Choice", arg = Some(c.helpArg).map(dehtmlfy), default = Option(c.default), choices = choices) + case mc: MultiChoiceSetting[_] => + val choices = mergeChoice(mc.choices, mc.descriptions) + Schema("Choice", multiple = Some(true), arg = Some(mc.helpArg).map(dehtmlfy), choices = choices) + case ps: PhasesSetting => Schema("Phases", default = Option(ps.default)) + case px: PrefixSetting => Schema("Prefix") + case sv: ScalaVersionSetting => Schema("ScalaVersion", arg = Some(sv.arg).map(dehtmlfy), default = Some(sv.initial.unparse)) + case pathStr: PathSetting => Schema("Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) + case str: StringSetting => Schema("String", arg = Some(str.arg).map(dehtmlfy), default = Some(str.default)) + case ms: MultiStringSetting => Schema("String", multiple = Some(true), arg = Some(ms.arg).map(dehtmlfy)) + } + def option(s: Setting): ScalacOption = + ScalacOption( + option = s.name, + schema = schema(s), + description = dehtmlfy(markdownifyBackquote(s.helpDescription)), + abbreviations = s.abbreviations, + //TODO + deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated")) + ) + def descriptor: String = { + val grouped = sections.map { + case (title, predicate, text) => + val options = visibleSettings.filter(predicate).map(option).toList.sortBy(_.option) + Section(title, text, options) + } + val sb = new StringBuilder + var indent = 0 + def escape(text: String): String = { + text.replaceAllLiterally("\"", "\\\"").replaceAllLiterally("\\u ", "\\\\u ") + } + def element(tag: String, value: String = "", head: Boolean = false): Unit = { + sb.append(" " * indent).append(if (head) "- " else " ") + if (tag.nonEmpty) sb.append(tag).append(":") + if (value.nonEmpty) { + if (tag.nonEmpty) sb.append(" ") + sb.append("\"").append(escape(value)).append("\"") + } + sb.append("\n") + } + def maybe[A](tag: String, value: Option[A]): Unit = value.foreach(v => element(tag, v.toString)) + def maybes[A](tag: String, value: Seq[A], handlers: (A => Unit)*): Unit = + if (value.nonEmpty) { + element(tag) + value.foreach(v => handlers.foreach(h => h(v))) + } + def indented(body: => Unit): Unit = { indent += 1; body; indent -= 1 } + def emit(section: Section): Unit = { + val Section(title, text, options) = section + element("category", title, head = true) + element("description", text) + element("options") + indented { + options.foreach { + case ScalacOption(option, schema, description, abbreviations, deprecated, note) => + element("option", option, head = true) + element("schema") + indented { + import schema._ + element("type", `type`) + maybe("arg", arg) + maybe("multiple", multiple) + maybe("default", default) + maybes("choices", choices, + (c: Choice) => indented(element("choice", c.choice, head = true)), + (c: Choice) => indented(maybe("description", c.description)) + ) + maybe("min", min) + maybe("max", max) + } + element("description", description) + maybes("abbreviations", abbreviations, (x: String) => indented(element("", x, head = true))) + } + } + } + grouped.foreach(emit) + sb.toString() + } +} + +object GenerateDocsData { + import java.io.File + import java.nio.charset.StandardCharsets.UTF_8 + + // output a descriptor of compiler options for docs.scala-lang/_data + def run(outDir: File): Unit = { + val file = new File(outDir, "compiler-options.yml") + val res = new SettingsDescriptor().descriptor + sbt.IO.write(file, res, UTF_8, false) + } +} diff --git a/project/GenerateFunctionConverters.scala b/project/GenerateFunctionConverters.scala new file mode 100644 index 000000000000..31ef68cdb3c0 --- /dev/null +++ b/project/GenerateFunctionConverters.scala @@ -0,0 +1,433 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +object GenerateFunctionConverters { + case class Artifact(name: String, content: String) + + val copyright = + s"""/* + | * Scala (https://www.scala-lang.org) + | * + | * Copyright EPFL and Lightbend, Inc. dba Akka + | * + | * Licensed under Apache License 2.0 + | * (http://www.apache.org/licenses/LICENSE-2.0). + | * + | * See the NOTICE file distributed with this work for + | * additional information regarding copyright ownership. + | */ + | + |// GENERATED CODE: DO NOT EDIT. + |""".stripMargin + + val packaging = "package scala.jdk" + + import scala.tools.nsc._ + val settings = new Settings(msg => sys.error(msg)) + def us(cl: ClassLoader): List[String] = cl match { + case ucl: java.net.URLClassLoader => ucl.getURLs.map(u => new java.io.File(u.toURI).getAbsolutePath).toList ::: us(ucl.getParent) + case _ => Nil + } + settings.classpath.value = us(settings.getClass.getClassLoader).mkString(java.io.File.pathSeparator) + val compiler = new Global(settings) + val run = new compiler.Run + + import compiler._, definitions._ + locally { + // make sure `java.lang.Double` prints as `java.lang.Double`, not just `Double` (which resolves to `scala.Double`) + val f = classOf[scala.reflect.internal.Definitions#DefinitionsClass].getDeclaredField("UnqualifiedOwners") + f.setAccessible(true) + f.set(definitions, definitions.UnqualifiedOwners.filter(_.fullNameString != "java.lang")) + } + + def primitiveBox(tp: Type): Type = tp.typeSymbol match { + case UnitClass => BoxedUnitClass.tpe + case ByteClass => BoxedByteClass.tpe + case ShortClass => BoxedShortClass.tpe + case CharClass => BoxedCharacterClass.tpe + case IntClass => BoxedIntClass.tpe + case LongClass => BoxedLongClass.tpe + case FloatClass => BoxedFloatClass.tpe + case DoubleClass => BoxedDoubleClass.tpe + case BooleanClass => BoxedBooleanClass.tpe + case _ => tp + } + + implicit class IndentMe(v: Vector[String]) { + def indent: Vector[String] = v.map(" " + _) + } + + implicit class FlattenMe(v: Vector[Vector[String]]) { + def mkVec(join: String = ""): Vector[String] = { + val vb = Vector.newBuilder[String] + var first = true + v.foreach{ vi => + if (!first) vb += join + first = false + vb ++= vi + } + vb.result() + } + } + + implicit class DoubleFlattenMe(v: Vector[Vector[Vector[String]]]) { + def mkVecVec(join: String = ""): Vector[String] = { + val vb = Vector.newBuilder[String] + var first = true + v.foreach{ vi => + if (!first) { vb += join; vb += join } + first = false + var ifirst = true + vi.foreach{ vj => + if (!ifirst) vb += join + ifirst = false + vb ++= vj + } + } + vb.result() + } + } + + implicit class SplitMyLinesAndStuff(s: String) { + // work around scala/bug#11125 + def toVec = Predef.augmentString(s).lines.toVector + def nonBlank = s.trim.length > 0 + } + + implicit class TreeToText(t: Tree) { + // work around scala/bug#11125 + def text = Predef.augmentString(showCode(t).replace("$", "")).lines.toVector + } + + case class Prioritized(lines: Vector[String], priority: Int) { + def withPriority(i: Int) = copy(priority = i) + } + + case class SamConversionCode( + base: String, + wrappedAsScala: Vector[String], + asScalaAnyVal: Vector[String], + implicitToScala: Vector[String], + asScalaDef: Vector[String], + wrappedAsJava: Vector[String], + asJavaAnyVal: Vector[String], + implicitToJava: Prioritized, + asJavaDef: Vector[String] + ) { + def impls: Vector[Vector[String]] = Vector(wrappedAsScala, asScalaAnyVal, wrappedAsJava, asJavaAnyVal) + def defs: Vector[Vector[String]] = Vector(asScalaDef, asJavaDef) + def withPriority(i: Int): SamConversionCode = copy(implicitToJava = implicitToJava.withPriority(i)) + } + object SamConversionCode { + def apply(scc: SamConversionCode*): (Vector[String], Vector[String], Vector[Vector[String]]) = { + val sccDepthSet = scc.map(_.implicitToJava.priority).toSet + val codes = + { + if (sccDepthSet != (0 to sccDepthSet.max).toSet) { + val sccDepthMap = sccDepthSet.toList.sorted.zipWithIndex.toMap + scc.map(x => x.withPriority(sccDepthMap(x.implicitToJava.priority))) + } + else scc + }.toVector.sortBy(_.base) + def priorityName(n: Int, pure: Boolean = false): String = { + val pre = + if (pure) s"Priority${n}FunctionExtensions" + else s"trait ${priorityName(n, pure = true)}" + if (!pure && n < (sccDepthSet.size-1)) s"$pre extends ${priorityName(n+1, pure = true)}" else pre + } + val impls = + "object FunctionWrappers {" +: { + codes.map(_.impls).mkVecVec().indent + } :+ "}" + val explicitDefs = codes.map(_.defs).mkVecVec() + val traits = codes.groupBy(_.implicitToJava.priority).toVector.sortBy(- _._1).map{ case (k,vs) => + s"import language.implicitConversions" +: + "" +: + s"${priorityName(k)} {" +: + s" import FunctionWrappers._" +: + s" " +: + { + vs.map(_.implicitToJava.lines).mkVec().indent ++ + ( + if (k == 0) Vector.fill(3)(" ") ++ codes.map(_.implicitToScala).mkVec().indent + else Vector() + ) + } :+ + s"}" + } + (impls, explicitDefs, traits) + } + } + + private def buildWrappersViaReflection: Seq[SamConversionCode] = { + + val pack: Symbol = rootMirror.getPackageIfDefined("java.util.function") + + case class Jfn(iface: Symbol, sam: Symbol) { + lazy val genericCount = iface.typeParams.length + lazy val name = sam.name.toTermName + lazy val title = iface.name.encoded + lazy val params = sam.info.params + lazy val sig = sam typeSignatureIn iface.info + lazy val pTypes = sig.params.map(_.info) + lazy val rType = sig.resultType + def arity = params.length + } + + val sams = pack.info.decls. + map(d => (d, d.typeSignature.members.filter(_.isAbstract).toList)). + collect{ case (d, m :: Nil) if d.isAbstract => Jfn(d, m) } + + def generate(jfn: Jfn): SamConversionCode = { + def mkRef(tp: Type): Tree = if (tp.typeSymbol.isTypeParameter) Ident(tp.typeSymbol.name.toTypeName) else tq"$tp" + + // Types for the Java SAM and the corresponding Scala function, plus all type parameters + val scalaType = gen.mkAttributedRef(FunctionClass(jfn.arity)) + val javaType = gen.mkAttributedRef(jfn.iface) + val tnParams: List[TypeName] = jfn.iface.typeParams.map(_.name.toTypeName) + val tdParams: List[TypeDef] = tnParams.map(TypeDef(NoMods, _, Nil, EmptyTree)) + val javaTargs: List[Tree] = tdParams.map(_.name).map(Ident(_)) + val scalaTargTps = jfn.pTypes :+ jfn.rType + val scalaTargBoxedTps = scalaTargTps.map(primitiveBox) + val scalaTargs: List[Tree] = scalaTargTps.map(mkRef) + val scalaTargsBoxed: List[Tree] = scalaTargBoxedTps.map(mkRef) + val boxComment = + if (scalaTargTps.map(_.typeSymbol) != scalaTargBoxedTps.map(_.typeSymbol)) + Literal(Constant("primitiveComment")) + else + Literal(Constant("noComment")) + + // Conversion wrappers have three or four components that we need to name + // (1) The wrapper class that wraps a Java SAM as Scala function, or vice versa (ClassN) + // (2) A value class that provides .asJava or .asScala to request the conversion (ValCN) + // (3) A name for an explicit conversion method (DefN) + // (4) An implicit conversion method name (ImpN) that invokes the value class + + // Names for Java conversions to Scala + val j2sClassN = TypeName("FromJava" + jfn.title) + val j2sCompanionN = j2sClassN.toTermName + val j2sValCN = TypeName("Rich" + jfn.title + "As" + scalaType.name.encoded) + val j2sDefN = TermName("asScalaFrom" + jfn.title) + val j2sImpN = TermName("enrichAsScalaFrom" + jfn.title) + + // Names for Scala conversions to Java + val s2jAsJavaTitle = TermName("asJava" + jfn.title) + val s2jClassN = TypeName("AsJava" + jfn.title) + val s2jCompanionN = s2jClassN.toTermName + val s2jValCN = TypeName("Rich" + scalaType.name.encoded + "As" + jfn.title) + val s2jDefN = TermName("asJava" + jfn.title) + val s2jImpN = TermName("enrichAsJava" + jfn.title) + + // Argument lists for the function / SAM + val vParams = (jfn.params zip jfn.pTypes).map{ case (p,t) => + ValDef(NoMods, p.name.toTermName, if (t.typeSymbol.isTypeParameter) Ident(t.typeSymbol.name) else gen.mkAttributedRef(t.typeSymbol), EmptyTree) + } + val vParamRefs = vParams.map(_.name).map(Ident(_)) + + val j2sClassTree = + q"""case class $j2sClassN[..$tdParams](jf: $javaType[..$javaTargs]) extends $scalaType[..$scalaTargs] { + def apply(..$vParams) = jf.${jfn.name}(..$vParamRefs) + }""" + + val j2sValCTree = + q"""class $j2sValCN[..$tdParams](private val underlying: $javaType[..$javaTargs]) extends AnyVal { + @inline def asScala: $scalaType[..$scalaTargs] = underlying match { + case $s2jCompanionN(sf) => sf.asInstanceOf[$scalaType[..$scalaTargs]] + case _ => new $j2sClassN[..$tnParams](underlying) + } + }""" + + val j2sDefTree = + q"""@deprecated($boxComment) @inline def $j2sDefN[..$tdParams](jf: $javaType[..$javaTargs]): $scalaType[..$scalaTargsBoxed] = jf match { + case $s2jCompanionN(f) => f.asInstanceOf[$scalaType[..$scalaTargsBoxed]] + case _ => new $j2sClassN[..$tnParams](jf).asInstanceOf[$scalaType[..$scalaTargsBoxed]] + }""" + + val j2sImpTree = + q"""@inline implicit def $j2sImpN[..$tdParams](jf: $javaType[..$javaTargs]): $j2sValCN[..$tnParams] = new $j2sValCN[..$tnParams](jf)""" + + val s2jClassTree = + q"""case class $s2jClassN[..$tdParams](sf: $scalaType[..$scalaTargs]) extends $javaType[..$javaTargs] { + def ${jfn.name}(..$vParams) = sf.apply(..$vParamRefs) + }""" + + val s2jDefTree = + q"""@deprecated($boxComment) @inline def $s2jDefN[..$tdParams](sf: $scalaType[..$scalaTargsBoxed]): $javaType[..$javaTargs] = (sf: AnyRef) match { + case $j2sCompanionN(f) => f.asInstanceOf[$javaType[..$javaTargs]] + case _ => new $s2jClassN[..$tnParams](sf.asInstanceOf[$scalaType[..$scalaTargs]]) + }""" + + // This is especially tricky because functions are contravariant in their arguments + // Need to prevent e.g. Any => String from "downcasting" itself to Int => String; we want the more exact conversion + val (s2jImpTree, priority) = + if (jfn.pTypes.forall(! _.isFinalType) && jfn.sig == jfn.sam.typeSignature) + ( + q"""@inline implicit def $s2jImpN[..$tdParams](sf: $scalaType[..$scalaTargs]): $s2jValCN[..$tnParams] = new $s2jValCN[..$tnParams](sf)""", + tdParams.length + ) + else { + // Some types are not generic or are re-used; we had better catch those. + // Making up new type names, so switch everything to TypeName or TypeDef + // Instead of foo[A](f: (Int, A) => Long): Fuu[A] = new Foo[A](f) + // we want foo[X, A](f: (X, A) => Long)(implicit evX: Int =:= X): Fuu[A] = new Foo[A](f.asInstanceOf[(Int, A) => Long]) + // Instead of bar[A](f: A => A): Brr[A] = new Foo[A](f) + // we want bar[A, B](f: A => B)(implicit evB: A =:= B): Brr[A] = new Foo[A](f.asInstanceOf[A => B]) + val An = "A(\\d+)".r + val numberedA = collection.mutable.Set.empty[Int] + val evidences = collection.mutable.ArrayBuffer.empty[(TypeName, TypeName)] + numberedA ++= scalaTargs.map(_.toString).collect{ case An(digits) if (digits.length < 10) => digits.toInt } + val scalafnTnames = (jfn.pTypes :+ jfn.rType).zipWithIndex.map{ + case (pt, i) if (i < jfn.pTypes.length && pt.isFinalType) || (!pt.isFinalType && jfn.pTypes.take(i).exists(_ == pt)) => + val j = Iterator.from(i).dropWhile(numberedA).next + val genericName = TypeName(s"A$j") + numberedA += j + evidences += ((genericName, pt.typeSymbol.name.toTypeName)) + genericName + case (pt, _) => pt.typeSymbol.name.toTypeName + } + val scalafnTdefs = scalafnTnames. + map(TypeDef(NoMods, _, Nil, EmptyTree)). + dropRight(if (jfn.rType.isFinalType) 1 else 0) + val evs = evidences.map{ case (generic, specific) => ValDef(NoMods, TermName("ev"+generic.toString), tq"$generic =:= $specific", EmptyTree) } + val tree = + q"""@inline implicit def $s2jImpN[..$scalafnTdefs](sf: $scalaType[..$scalafnTnames])(implicit ..$evs): $s2jValCN[..$tnParams] = + new $s2jValCN[..$tnParams](sf.asInstanceOf[$scalaType[..$scalaTargs]]) + """ + (tree, tdParams.length) + } + + val s2jValFullNameAsJavaMethodTree = + if (priority > 0) + q"""@inline def $s2jAsJavaTitle: $javaType[..$javaTargs] = underlying match { + case $j2sCompanionN(sf) => sf.asInstanceOf[$javaType[..$javaTargs]] + case _ => new $s2jClassN[..$tnParams](underlying) + }""" + else EmptyTree + + val s2jValCTree = + q"""class $s2jValCN[..$tdParams](private val underlying: $scalaType[..$scalaTargs]) extends AnyVal { + @inline def asJava: $javaType[..$javaTargs] = underlying match { + case $j2sCompanionN(jf) => jf.asInstanceOf[$javaType[..$javaTargs]] + case _ => new $s2jClassN[..$tnParams](underlying) + } + $s2jValFullNameAsJavaMethodTree + }""" + + SamConversionCode( + base = jfn.title, + wrappedAsScala = j2sClassTree.text, + asScalaAnyVal = j2sValCTree.text, + implicitToScala = j2sImpTree.text, + asScalaDef = j2sDefTree.text, + wrappedAsJava = s2jClassTree.text, + asJavaAnyVal = s2jValCTree.text, + implicitToJava = Prioritized(s2jImpTree.text, priority), + asJavaDef = s2jDefTree.text + ) + } + + sams.toSeq.map(generate) + } + + def sourceFile(subPack: String, body: String): String = + s"""$copyright + | + |$packaging$subPack + | + |$body + |""".stripMargin + + def sameText(f: java.io.File, text: String): Boolean = { + val x = scala.io.Source.fromFile(f) + val lines = try { x.getLines().toVector } finally { x.close } + // work around scala/bug#11125 + lines.iterator.filter(_.nonBlank) == Predef.augmentString(text).lines.filter(_.nonBlank) + } + + def write(outDir: java.io.File, artifact: Artifact): Unit = { + val f = scala.tools.nsc.io.Path(outDir.getAbsolutePath) / artifact.name + if (!f.exists || !sameText(f.jfile, artifact.content)) { + f.parent.createDirectory(force = true) + f.toFile writeAll artifact.content + } + } + + def run(outDir: java.io.File): Unit = { + val (impls, explicitDefs, defss) = SamConversionCode(buildWrappersViaReflection: _*) + val javaFunConvsNoComments = + s"""/** This object contains methods that convert between Scala and Java function types. + | * + | * The explicit conversion methods defined here are intended to be used in Java code. For Scala + | * code, it is recommended to use the extension methods defined in [[scala.jdk.FunctionConverters]]. + | * + | * For details how the function converters work, see [[scala.jdk.FunctionConverters]]. + | * + | */ + |object FunctionConverters { + | import scala.jdk.FunctionWrappers._ + | + |${explicitDefs.indent.mkString("\n")} + |}""".stripMargin + + // cannot generate comments with quasiquotes + val javaFunConvs = javaFunConvsNoComments.replace(""" @deprecated("primitiveComment") """, + s""" /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + | * primitive type `scala.X` to improve compatibility when using it in Java code (the + | * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + | * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + | * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + | */ + |""".stripMargin + " ").replace("""@deprecated("noComment") """, "") + + val scalaFunConvs = + """/** This object provides extension methods that convert between Scala and Java function types. + | * + | * When writing Java code, use the explicit conversion methods defined in + | * [[javaapi.FunctionConverters]] instead. + | * + | * Using the `.asJava` extension method on a Scala function produces the most specific possible + | * Java function type: + | * + | * {{{ + | * scala> import scala.jdk.FunctionConverters._ + | * scala> val f = (x: Int) => x + 1 + | * + | * scala> val jf1 = f.asJava + | * jf1: java.util.function.IntUnaryOperator = ... + | * }}} + | * + | * More generic Java function types can be created using the corresponding `asJavaXYZ` extension + | * method: + | * + | * {{{ + | * scala> val jf2 = f.asJavaFunction + | * jf2: java.util.function.Function[Int,Int] = ... + | * + | * scala> val jf3 = f.asJavaUnaryOperator + | * jf3: java.util.function.UnaryOperator[Int] = ... + | * }}} + | * + | * Converting a Java function to Scala is done using the `asScala` extension method: + | * + | * {{{ + | * scala> List(1,2,3).map(jf2.asScala) + | * res1: List[Int] = List(2, 3, 4) + | * }}} + | */ + |object FunctionConverters extends Priority0FunctionExtensions""".stripMargin + + write(outDir, Artifact("jdk/javaapi/FunctionConverters.scala", sourceFile(".javaapi", javaFunConvs))) + write(outDir, Artifact("jdk/FunctionConverters.scala", sourceFile("", scalaFunConvs))) + write(outDir, Artifact("jdk/FunctionWrappers.scala", sourceFile("", impls.mkString("\n")))) + write(outDir, Artifact("jdk/FunctionExtensions.scala", sourceFile("", defss.map(_.mkString("\n")).mkString("\n\n\n\n")))) + } +} diff --git a/project/JarJar.scala b/project/JarJar.scala deleted file mode 100644 index 3cb9e4cfffa1..000000000000 --- a/project/JarJar.scala +++ /dev/null @@ -1,94 +0,0 @@ -package scala.build - -import org.pantsbuild.jarjar -import org.pantsbuild.jarjar._ -import org.pantsbuild.jarjar.util._ -import scala.collection.JavaConverters._ -import java.util.jar._ -import java.io._ -import sbt._ - -object JarJar { - sealed abstract class JarJarConfig { - def toPatternElement: PatternElement - } - object JarJarConfig { - case class Rule(pattern: String, result: String) extends JarJarConfig { - def toPatternElement: PatternElement = { - val rule = new jarjar.Rule - rule.setPattern(pattern) - rule.setResult(result) - rule - } - } - case class Keep(pattern: String) extends JarJarConfig { - def toPatternElement: PatternElement = { - val keep = new jarjar.Keep - keep.setPattern(pattern) - keep - } - } - } - - sealed abstract class Entry { - def name: String - def time: Long - def data: Array[Byte] - } - - case class JarEntryInput(jarFile: JarFile, entry: JarEntry) extends Entry { - def name = entry.getName.replace('\\', '/') - def time = entry.getTime - def data = sbt.IO.readBytes(jarFile.getInputStream(entry)) - } - case class FileInput(base: File, file: File) extends Entry { - def name = file.relativeTo(base).get.getPath.replace('\\', '/') - def time = file.lastModified - def data = sbt.IO.readBytes(file) - } - - private def newMainProcessor(patterns: java.util.List[PatternElement], verbose: Boolean, skipManifest: Boolean): JarProcessor = { - val cls = Class.forName("org.pantsbuild.jarjar.MainProcessor") - val constructor = cls.getConstructor(classOf[java.util.List[_]], java.lang.Boolean.TYPE, java.lang.Boolean.TYPE) - constructor.setAccessible(true) - constructor.newInstance(patterns, Boolean.box(verbose), Boolean.box(skipManifest)).asInstanceOf[JarProcessor] - } - - def apply(in: Iterator[Entry], outdir: File, - config: Seq[JarJarConfig], verbose: Boolean = false): Seq[File] = { - val patterns = config.map(_.toPatternElement).asJava - val processor = newMainProcessor(patterns, verbose, false) - def process(e: Entry): Option[File] = { - val struct = new EntryStruct() - struct.name = e.name - struct.time = e.time - struct.data = e.data - if (processor.process(struct)) { - if (struct.name.endsWith("/")) None - else { - val f = outdir / struct.name - try { - f.getParentFile.mkdirs() - sbt.IO.write(f, struct.data) - } catch { - case ex: Exception => - throw new IOException(s"Failed to write ${e.name} / ${f.getParentFile} / ${f.getParentFile.exists}", ex) - } - Some(f) - } - } - else None - } - val processed = in.flatMap(entry => process(entry)).toSet - val getter = processor.getClass.getDeclaredMethod("getExcludes") - getter.setAccessible(true) - val excludes = getter.invoke(processor).asInstanceOf[java.util.Set[String]].asScala - val excluded = excludes.map { name => - val f: File = outdir / name - if(f.exists && !f.delete()) - throw new IOException("Failed to delete excluded file $f") - f - } - (processed -- excluded).toSeq - } -} diff --git a/project/JitWatch.scala b/project/JitWatch.scala index 84037d6067e8..08b2c03eba0d 100644 --- a/project/JitWatch.scala +++ b/project/JitWatch.scala @@ -34,10 +34,10 @@ object JitWatchFilePlugin extends AutoPlugin { // Transitive sources from the projects that contribute to this classpath. val projects: Seq[ProjectRef] = buildDependencies.value.classpathTransitiveRefs(thisProjectRef.value) :+ thisProjectRef.value - val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> ((project / Keys.artifacts) get settingsData.value).getOrElse(Nil))).toMap - val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => ((project / Keys.name) get settingsData.value).getOrElse("")) + val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> (project / Keys.artifacts get settingsData.value).getOrElse(Nil))).toMap + val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => (project / Keys.name get settingsData.value).getOrElse("")) val transitiveSourceDirectories = projects.flatMap { project => - val projectArtifacts: Seq[Artifact] = ((project / Keys.artifacts) get settingsData.value).getOrElse(Nil) + val projectArtifacts: Seq[Artifact] = (project / Keys.artifacts get settingsData.value).getOrElse(Nil) val matching = projectArtifacts.filter(artifacts.contains(_)) val configs = matching.flatMap(artifact => artifact.configurations).distinct val sourceDirectories: Seq[File] = configs.flatMap { configRef => @@ -58,7 +58,7 @@ object JitWatchFilePlugin extends AutoPlugin { // Download and add transitive sources from the classpath val classiferArtifacts: Seq[(ModuleID, Artifact, File)] = updateClassifiers.value.configurations.flatMap(_.details.flatMap(_.modules.flatMap(report => report.artifacts.map(x => (report.module, x._1, x._2))))) - val sourceClassiferArtifacts = classiferArtifacts.filter(tuple => tuple._2.classifier == Some("sources") && dependencyModuleIds.contains(tuple._1)) + val sourceClassiferArtifacts = classiferArtifacts.filter(tuple => tuple._2.classifier.contains("sources") && dependencyModuleIds.contains(tuple._1)) val externalSources = sourceClassiferArtifacts.map(_._3) val internalAndExternalSources = sourceDirectories.value ++ (javaHomeSrc +: (transitiveSourceDirectories ++ transitiveSourceDirectories2).distinct) ++ externalSources diff --git a/project/License.scala b/project/License.scala index baa5fded751c..5cba334dc664 100644 --- a/project/License.scala +++ b/project/License.scala @@ -10,9 +10,9 @@ object License extends AutoPlugin { override def projectSettings: Seq[Def.Setting[_]] = List(packageSrc, packageBin, packageDoc) - .map(pkg => (Compile / pkg / mappings) ++= licenseMapping.value) + .map(task => Compile / task / mappings ++= licenseMapping.value) override def buildSettings: Seq[Def.Setting[_]] = Seq( - licenseMapping := List("LICENSE", "NOTICE").map(fn => (baseDirectory.value / fn) -> fn) + licenseMapping := List("LICENSE", "NOTICE").map(fn => (baseDirectory.value / fn) -> fn), ) -} \ No newline at end of file +} diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 4b3690babd0b..e7351aa1b6d5 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,27 +13,68 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.12.19"), + mimaReferenceVersion := Some("2.13.16"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( - // KEEP: scala.reflect.internal isn't public API + // KEEP: the reflect internal API isn't public API ProblemFilters.exclude[Problem]("scala.reflect.internal.*"), // KEEP: java.util.Enumeration.asIterator only exists in later JDK versions (11 at least). If you build // with JDK 11 and run MiMa it'll complain IteratorWrapper isn't forwards compatible with 2.13.0 - but we // don't publish the artifact built with JDK 11 anyways - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.Wrappers#IteratorWrapper.asIterator"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), // KEEP: when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for // the `isEmpty` default method that was added in JDK 15 ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.SeqCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), - // private[scala] member used by Properties and by REPL - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Properties.consoleIsTerminal"), + // KEEP: make use of CompletionStage#handle to get a better performance than CompletionStage#whenComplete. + ProblemFilters.exclude[MissingTypesProblem]("scala.concurrent.impl.FutureConvertersImpl$P"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.concurrent.impl.FutureConvertersImpl#P.andThen"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.concurrent.impl.FutureConvertersImpl#P.apply"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.concurrent.impl.FutureConvertersImpl#P.andThen"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.concurrent.impl.FutureConvertersImpl#P.accept"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.concurrent.impl.FutureConvertersImpl#P.andThen"), + + // KEEP: the CommonErrors object is not a public API + ProblemFilters.exclude[MissingClassProblem]("scala.collection.generic.CommonErrors"), + ProblemFilters.exclude[MissingClassProblem]("scala.collection.generic.CommonErrors$"), + + // scala/scala#10937 + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.LazyList#LazyBuilder#DeferredState.eval"), + ProblemFilters.exclude[MissingClassProblem](s"scala.collection.immutable.LazyList$$State"), + ProblemFilters.exclude[MissingClassProblem](s"scala.collection.immutable.LazyList$$State$$"), + ProblemFilters.exclude[MissingClassProblem](s"scala.collection.immutable.LazyList$$State$$Cons"), + ProblemFilters.exclude[MissingClassProblem](s"scala.collection.immutable.LazyList$$State$$Empty$$"), + ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.LazyList$EmptyMarker$"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.LazyList#LazyBuilder#DeferredState.eval"), + ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.LazyList$MidEvaluation$"), + ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.LazyList$Uninitialized$"), + + // scala/scala#11004 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.api.Annotations#AnnotationApi.argIsDefault"), + // A new abstract trait method is not binary compatible in principle, but `AnnotationApi` is only implemented by + // `AnnotationInfo`, both of which are in scala-reflect.jar. So this should never leak. + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.api.Annotations#AnnotationApi.argIsDefault"), + + // scala/scala#10976 + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.meta.defaultArg"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.meta.superArg"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.meta.superFwdArg"), + + ProblemFilters.exclude[MissingClassProblem]("scala.collection.IndexedSeqSlidingIterator"), + ProblemFilters.exclude[NewMixinForwarderProblem]("scala.collection.IndexedSeqOps.sliding"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.collection.mutable.ArrayDequeOps.scala$collection$mutable$ArrayDequeOps$$super$sliding"), + + // new jdk 25 method in CharSequence => mixin forwarders + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.getChars"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.getChars"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.StringBuilder.getChars"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.getChars"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.SeqCharSequence.getChars"), ) override val buildSettings = Seq( diff --git a/project/Osgi.scala b/project/Osgi.scala index ea0308992a67..e745872e76de 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -37,7 +37,7 @@ object Osgi { // our little understood OSGi metadata for now. "Export-Package" -> "*;version=${ver};-noimport:=true;-split-package:=merge-first", - "Import-Package" -> "scala.*;version=\"${range;[==,=+);${ver}}\",*", + "Import-Package" -> raw"""scala.*;version="$${range;[==,=+);$${ver}}",*""", "Bundle-Version" -> v, "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.8", "-eclipse" -> "false", @@ -62,9 +62,9 @@ object Osgi { bundleTask(headers.value.toMap, jarlist.value, cp, (Compile / packageBin / artifactPath).value, cp ++ licenseFiles, streams.value) }.value, - (Compile / packageBin / packagedArtifact) := (((Compile / packageBin / artifact).value, bundle.value)), + Compile / packageBin / packagedArtifact := (((Compile / packageBin / artifact).value, bundle.value)), // Also create OSGi source bundles: - (Compile / packageSrc / packageOptions) += Package.ManifestAttributes( + Compile / packageSrc / packageOptions += Package.ManifestAttributes( "Bundle-Name" -> (description.value + " Sources"), "Bundle-SymbolicName" -> (bundleSymbolicName.value + ".source"), "Bundle-Version" -> versionProperties.value.osgiVersion, @@ -85,7 +85,7 @@ object Osgi { def resourceDirectoryRef(f: File) = (if (f.getName endsWith ".jar") "@" else "") + f.getAbsolutePath val includeRes = resourceDirectories.filter(_.exists).map(resourceDirectoryRef).mkString(",") - if (!includeRes.isEmpty) builder.setProperty(INCLUDERESOURCE, includeRes) + if (includeRes.nonEmpty) builder.setProperty(INCLUDERESOURCE, includeRes) builder.getProperties.asScala.foreach { case (k, v) => log.debug(s"bnd: $k: $v") } // builder.build is not thread-safe because it uses a static SimpleDateFormat. This ensures // that all calls to builder.build are serialized. diff --git a/project/ParserUtil.scala b/project/ParserUtil.scala index bbd9129dbe80..ea921031c89c 100644 --- a/project/ParserUtil.scala +++ b/project/ParserUtil.scala @@ -6,49 +6,37 @@ import sbt.complete.Parsers._ import sbt.complete._ object ParserUtil { - def notStartingWith(parser: Parser[String], c: Char): Parser[String] = parser & not(c ~> any.*, "value cannot start with " + c + ".") - def concat(p: Parser[(String, String)]): Parser[String] = { - p.map(x => x._1 + x._2) - } - + def notStartingWith(parser: Parser[String], c: Char): Parser[String] = parser & not(c ~> any.*, s"value cannot start with $c.") + def concat(p: Parser[(String, String)]): Parser[String] = p.map { case (a, b) => a + b } def Opt(a: Parser[String]) = a.?.map(_.getOrElse("")) val StringBasicNotStartingWithDash = notStartingWith(StringBasic, '-') - val IsDirectoryFilter = new SimpleFileFilter(_.isDirectory) - val JarOrDirectoryParser = FileParser(GlobFilter("*.jar") || IsDirectoryFilter) + val IsDirectoryFilter = new SimpleFileFilter(_.isDirectory) + val JarOrDirectoryParser = FileParser(GlobFilter("*.jar") || IsDirectoryFilter) + def FileParser(fileFilter: FileFilter, dirFilter: FileFilter = AllPassFilter, base: File = file(".")) = { + val childFilter = IsDirectoryFilter && dirFilter || fileFilter + def ensureSuffix(s: String, suffix: String) = if (s.endsWith(suffix)) s else s"$s$suffix" def matching(prefix: String): List[String] = { - val preFile = file(prefix) - val cwd = base - val parent = Option(preFile.getParentFile).getOrElse(cwd) - if (preFile.exists) { - if (preFile.isDirectory) { - preFile.*(IsDirectoryFilter.&&(dirFilter) || fileFilter).get.map(_.getPath).toList - } else { - List(preFile).filter(fileFilter.accept).map(_.getPath) - } + val prefixFile = new File(prefix) + val prefixIsAbsolute = prefixFile.isAbsolute + val preFile = if (prefixIsAbsolute) prefixFile else new File(base, prefix) + val basePrefix = if (prefixIsAbsolute) "" else ensureSuffix(base.getPath, "/") + def relativize(p: String) = p.stripPrefix(basePrefix) + def pathOf(f: File) = if (f.isDirectory && !fileFilter.accept(f)) ensureSuffix(f.getPath, "/") else f.getPath + val finder = if (preFile.isDirectory) { + preFile.glob(childFilter) + } else if (preFile.exists()) { + PathFinder(preFile).filter(fileFilter.accept) + } else { + preFile.getParentFile.glob(GlobFilter(s"${preFile.getName}*") && childFilter) } - else if (parent != null) { - def ensureSuffix(s: String, suffix: String) = if (s.endsWith(suffix)) s else s + suffix - def pathOf(f: File): String = { - val f1 = if (preFile.getParentFile == null) f.relativeTo(cwd).getOrElse(f) else f - if (f1.isDirectory && !fileFilter.accept(f1)) ensureSuffix(f1.getPath, "/") else f1.getPath - } - val childFilter = GlobFilter(preFile.name + "*") && ((IsDirectoryFilter && dirFilter) || fileFilter) - val children = parent.*(childFilter).get - children.map(pathOf).toList - } else Nil + finder.get().toList.map(pathOf).map(relativize) } def displayPath = Completions.single(Completion.displayOnly("")) - token(StringBasic, TokenCompletions.fixed((seen, level) => if (seen.isEmpty) displayPath else matching(seen) match { + token(StringBasic, TokenCompletions.fixed((prefix, _) => if (prefix.isEmpty) displayPath else matching(prefix) match { case Nil => displayPath - case x :: Nil => - if (fileFilter.accept(file(x))) - Completions.strict(Set(Completion.tokenDisplay(x.stripPrefix(seen), x))) - else - Completions.strict(Set(Completion.suggestion(x.stripPrefix(seen)))) - case xs => - Completions.strict(xs.map(x => Completion.tokenDisplay(x.stripPrefix(seen), x)).toSet) + case xs => Completions.strict(xs.map(x => Completion.tokenDisplay(x.stripPrefix(prefix), x)).toSet) })).filter(!_.startsWith("-"), x => x) } -} \ No newline at end of file +} diff --git a/project/PartestTestListener.scala b/project/PartestTestListener.scala index 83d1e82aefd8..f7df4ab3f148 100644 --- a/project/PartestTestListener.scala +++ b/project/PartestTestListener.scala @@ -1,7 +1,6 @@ package scala.build import java.io.{File, PrintWriter, StringWriter} -import java.util.concurrent.TimeUnit import sbt.testing.{SuiteSelector, TestSelector} import sbt.{JUnitXmlTestsListener, TestEvent, TestResult, TestsListener, _} @@ -47,7 +46,7 @@ class PartestTestListener(target: File) extends TestsListener { e.fullyQualifiedName() } - for ((group, events) <- event.detail.groupBy(groupOf(_))) { + for ((group, events) <- event.detail.groupBy(groupOf)) { val statii = events.map(_.status()) val errorCount = statii.count(errorStatus.contains) val failCount = statii.count(failStatus.contains) @@ -95,7 +94,7 @@ class PartestTestListener(target: File) extends TestsListener { }} val partestTestReports = target / "test-reports" / "partest" - val xmlFile = (partestTestReports / (group + ".xml")) + val xmlFile = partestTestReports / (group + ".xml") xmlFile.getParentFile.mkdirs() scala.xml.XML.save(xmlFile.getAbsolutePath, xml, "UTF-8", true, null) } diff --git a/project/PartestUtil.scala b/project/PartestUtil.scala index 61fec2045066..01a3db2eb27d 100644 --- a/project/PartestUtil.scala +++ b/project/PartestUtil.scala @@ -3,39 +3,41 @@ package scala.build import sbt._ import sbt.complete._, Parser._, Parsers._ +import ParserUtil.Opt + object PartestUtil { private case class TestFiles(srcPath: String, globalBase: File, testBase: File) { - private val testCaseDir = new SimpleFileFilter(f => f.isDirectory && f.listFiles.nonEmpty && !(f.getParentFile / (f.name + ".res")).exists) - private val testCaseFilter = GlobFilter("*.scala") | GlobFilter("*.java") | GlobFilter("*.res") || testCaseDir - private def testCaseFinder = (testBase / srcPath).*(AllPassFilter).*(testCaseFilter) - private val basePaths = allTestCases.map(_._2.split('/').take(3).mkString("/") + "/").distinct - - def allTestCases = testCaseFinder.pair(io.Path.relativeTo(globalBase)) - def basePathExamples = new FixedSetExamples(basePaths) - private def equiv(f1: File, f2: File) = f1.getCanonicalFile == f2.getCanonicalFile + val srcDir = testBase / srcPath // mirror of partest.nest.PathSettings#srcDir + + private val testCaseFile = GlobFilter("*.scala") | GlobFilter("*.java") | GlobFilter("*.res") + private val testCaseDir = new SimpleFileFilter(f => f.isDirectory && f.listFiles().nonEmpty && !(f.getParentFile / (f.getName + ".res")).exists()) + private val testCaseFilter = testCaseFile || testCaseDir + private val testCaseFinder = srcDir * AllPassFilter * testCaseFilter + + def allTestCases = testCaseFinder.pair(Path.relativeTo(globalBase)) + def parentChain(f: File): Iterator[File] = - if (f == null || !f.exists) Iterator() - else Iterator(f) ++ (if (f.getParentFile == null) Nil else parentChain(f.getParentFile)) + if (f == null || !f.exists()) Iterator.empty + else Iterator.single(f) ++ Option(f.getParentFile).iterator.flatMap(parentChain) + def isParentOf(parent: File, f2: File, maxDepth: Int) = - parentChain(f2).take(maxDepth).exists(p1 => equiv(p1, parent)) - def isTestCase(f: File) = { - val grandParent = if (f != null && f.getParentFile != null) f.getParentFile.getParentFile else null - grandParent != null && equiv(grandParent, testBase / srcPath) && testCaseFilter.accept(f) - } - def mayContainTestCase(f: File) = { - isParentOf(testBase / srcPath, f, 2) || isParentOf(f, testBase / srcPath, Int.MaxValue) - } + parentChain(f2).take(maxDepth).exists(equivCanon(_, parent)) + + def isTestCase(f: File) = testCaseFilter.accept(f) && parentChain(f).slice(2, 3).exists(equivCanon(_, srcDir)) + def mayContainTestCase(f: File) = isParentOf(srcDir, f, 2) || isParentOf(f, srcDir, Int.MaxValue) + def equivCanon(f1: File, f2: File) = f1.getCanonicalFile == f2.getCanonicalFile } - def testFilePaths(globalBase: File, testBase: File): Seq[java.io.File] = - (new TestFiles("files", globalBase, testBase)).allTestCases.map(_._1) + def testFilePaths(globalBase: File, testBase: File): Seq[File] = + TestFiles("files", globalBase, testBase).allTestCases.map(_._1) /** A parser for the custom `partest` command */ def partestParser(globalBase: File, testBase: File): Parser[String] = { val knownUnaryOptions = List( "--pos", "--neg", "--run", "--jvm", "--res", "--ant", "--scalap", "--specialized", "--instrumented", "--presentation", "--failed", "--update-check", "--no-exec", - "--show-diff", "--show-log", "--verbose", "--terse", "--debug", "--version", "--help") + "--show-diff", "--show-log", "--verbose", "--terse", "--debug", "--realeasy", "--branch", "--version", + "--help") val srcPathOption = "--srcpath" val compilerPathOption = "--compilerpath" val grepOption = "--grep" @@ -43,15 +45,14 @@ object PartestUtil { // HACK: if we parse `--srcpath scaladoc`, we overwrite this var. The parser for test file paths // then lazily creates the examples based on the current value. // TODO is there a cleaner way to do this with sbt's parser infrastructure? - var srcPath = "files" - var _testFiles: TestFiles = null - def testFiles = { - if (_testFiles == null || _testFiles.srcPath != srcPath) _testFiles = new TestFiles(srcPath, globalBase, testBase) - _testFiles - } - val TestPathParser = ParserUtil.FileParser( + var testFiles = TestFiles("files", globalBase, testBase) + def mkTestPathParser(base: File) = ParserUtil.FileParser( new SimpleFileFilter(f => testFiles.isTestCase(f)), - new SimpleFileFilter(f => testFiles.mayContainTestCase(f)), globalBase) + new SimpleFileFilter(f => testFiles.mayContainTestCase(f)), + base, + ) + val TestPath = mkTestPathParser(globalBase) + val KindTestPath = mkTestPathParser(testFiles.srcDir) // allow `--grep "is unchecked" | --grep *t123*, in the spirit of ./bin/partest-ack // superset of the --grep built into partest itself. @@ -71,7 +72,7 @@ object PartestUtil { val next = prefix + suffix testFile.getParentFile / next } - val assocFiles = List(".check", ".flags").map(sibling) + val assocFiles = List(".check").map(sibling) val sourceFiles = if (testFile.isFile) List(testFile) else testFile.**(AllPassFilter).get.toList val allFiles = testFile :: assocFiles ::: sourceFiles allFiles.exists(f => f.isFile && re.findFirstIn(IO.read(f)).isDefined) @@ -88,7 +89,7 @@ object PartestUtil { (matchingFileContent ++ matchingFileName).map(_._2).distinct.sorted } - val completion = Completions.strict(Set("", " (for source, flags or checkfile contents)").map(s => Completion.displayOnly(s))) + val completion = Completions.strict(Set("", " (for source or checkfile contents)").map(s => Completion.displayOnly(s))) val tokenCompletion = TokenCompletions.fixed((seen, level) => completion) val globOrPattern = StringBasic.map(expandGrep).flatMap { @@ -100,18 +101,18 @@ object PartestUtil { val SrcPath = ((token(srcPathOption) <~ Space) ~ token(StringBasic.examples(Set("files", "scaladoc", "async")))) map { case opt ~ path => - srcPath = path + testFiles = TestFiles(path, globalBase, testBase) opt + " " + path } - val CompilerPath = ((token(compilerPathOption) <~ Space) ~ token(NotSpace)) map { - case opt ~ path => - opt + " " + path - } + val CompilerPath = ((token(compilerPathOption) <~ Space) ~ token(NotSpace)) map { + case opt ~ path => + opt + " " + path + } - val ScalacOptsParser = (token("-Dpartest.scalac_opts=") ~ token(NotSpace)) map { case opt ~ v => opt + v } + val ScalacOpts = (token("-Dpartest.scalac_opts=") ~ token(NotSpace)) map { case opt ~ v => opt + v } - val P = oneOf(knownUnaryOptions.map(x => token(x))) | SrcPath | CompilerPath | TestPathParser | Grep | ScalacOptsParser - (Space ~> repsep(P, oneOrMore(Space))).map(_.mkString(" ")).?.map(_.getOrElse("")) + val P = oneOf(knownUnaryOptions.map(x => token(x))) | SrcPath | CompilerPath | TestPath | KindTestPath | Grep | ScalacOpts + Opt((Space ~> repsep(P, oneOrMore(Space))).map(_.mkString(" "))) } } diff --git a/project/Quiet.scala b/project/Quiet.scala index 5e9f37a717a8..6201105008c5 100644 --- a/project/Quiet.scala +++ b/project/Quiet.scala @@ -2,7 +2,6 @@ package scala.build import sbt._ import Keys._ -import java.util.function.Supplier object Quiet { // Workaround sbt issue described: diff --git a/project/SavedLogs.scala b/project/SavedLogs.scala new file mode 100644 index 000000000000..4ec335f4b2bf --- /dev/null +++ b/project/SavedLogs.scala @@ -0,0 +1,52 @@ +package scala.build + +import java.io.{ByteArrayOutputStream, PrintStream} + +import sbt._ +import Keys._ +import sbt.internal.util.ConsoleAppender +import scala.collection.mutable + +/** Save MiMa logs so they don't get lost in lots of debug output */ +object SavedLogs { + val savedLogs = new mutable.HashMap[String, ByteArrayOutputStream] + + val showSavedLogs = TaskKey[Unit]("showSavedLogs", "Print all saved logs to the console") + val clearSavedLogs = TaskKey[Unit]("clearSavedLogs", "Clear all saved logs") + + def showSavedLogsImpl(println: String => Unit): Unit = synchronized { + savedLogs.foreach { + case (k, os) => + val log = new String(os.toByteArray) + if (log.nonEmpty) { + println(s"Saved log of $k:") + println(log) + } + } + } + + def clearSavedLogsImpl(): Unit = synchronized { savedLogs.clear() } + + lazy val settings = Seq[Setting[_]]( + (Global / extraAppenders) := { + val previous = (Global / extraAppenders).value + (key: ScopedKey[_]) => { + key.scope match { + case Scope(Select(ProjectRef(_, p)), _, Select(t), _) if t.label == "mimaReportBinaryIssues" => + val os = new ByteArrayOutputStream + val a = ConsoleAppender(new PrintStream(os, true)) + SavedLogs.synchronized { savedLogs.put(s"$p/${t.label}", os) } + a +: previous(key) + case _ => previous(key) + } + } + }, + + showSavedLogs := { + val log = streams.value.log + showSavedLogsImpl(s => log.info(s)) + }, + + clearSavedLogs := { clearSavedLogsImpl() } + ) +} diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 91c10cd0921d..d93f777efe55 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -25,12 +25,12 @@ object ScalaOptionParser { def ChoiceSetting(name: String, choices: List[String]): Parser[String] = concat(token(concat(name ~ ":")) ~ token(StringBasic.examples(choices: _*)).map(_.mkString)) def MultiChoiceSetting(name: String, choices: List[String]): Parser[String] = - concat(token(concat(name ~ ":")) ~ rep1sep(token(StringBasic.examples(choices: _*)), token(",")).map(_.mkString)) + concat(token(concat(name ~ ":")) ~ rep1sep(token(StringBasic.examples(choices: _*)), token(",")).map(_.mkString(","))) def PathSetting(name: String): Parser[String] = { - concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString)) + concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString(java.io.File.pathSeparator))) } def FileSetting(name: String): Parser[String] = { - concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString)) + concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString(java.io.File.pathSeparator))) } val Phase = token(NotSpace.examples(phases: _*)) def PhaseSettingParser(name: String): Parser[String] = { @@ -81,25 +81,35 @@ object ScalaOptionParser { P <~ token(OptSpace) } - // TODO retrieve this data programmatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala - private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xfull-lubs", "-Xfuture", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-implicits", "-Xlog-reflective-calls", - "-Xno-forwarders", "-Xno-patmat-analysis", "-Xno-uescape", "-Xnojline", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xstrict-inference", "-Xverify", "-Y", - "-Ybreak-cycles", "-Ydebug", "-Ycompact-trees", "-YdisableFlatCpCaching", "-Ydoc-debug", - "-Yide-debug", "-Yinfer-argument-types", - "-Yissue-debug", "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand", - "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-predef", - "-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypartial-unification", "-Ypos-debug", "-Ypresentation-debug", + // TODO retrieve these data programmatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala + private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-reflective-calls", + "-Xnojline", "-Xno-forwarders", "-Xno-patmat-analysis", "-Xnon-strict-patmat-analysis", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xverify", "-Y", + "-Ybreak-cycles", "-Ydebug", "-Ydebug-type-error", "-Ycompact-trees", "-YdisableFlatCpCaching", "-Ydoc-debug", + "-Yide-debug", + "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand", + "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-predef", "-Ymacro-annotations", + "-Ypatmat-debug", "-Yno-adapted-args", "-Ypos-debug", "-Ypresentation-debug", "-Ypresentation-strict", "-Ypresentation-verbose", "-Yquasiquote-debug", "-Yrangepos", "-Yreify-copypaste", "-Yreify-debug", "-Yrepl-class-based", "-Yrepl-sync", "-Yshow-member-pos", "-Yshow-symkinds", "-Yshow-symowners", "-Yshow-syms", "-Yshow-trees", "-Yshow-trees-compact", "-Yshow-trees-stringified", "-Ytyper-debug", - "-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused-import", "-Ywarn-value-discard", + "-Ywarn-dead-code", "-Ywarn-numeric-widen", "-Ywarn-value-discard", "-Ywarn-extra-implicit", "-Ywarn-self-implicit", + "-V", + "-Vclasspath", "-Vdebug", "-Vdebug-tasty", "-Vdebug-type-error", "-Vdoc", "-Vfree-terms", "-Vfree-types", + "-Vhot-statistics", "-Vide", "-Vimplicit-conversions", "-Vimplicits", "-Vissue", + "-Vmacro", "-Vmacro-lite", "-Vpatmat", "-Vphases", "-Vpos", "-Vprint-pos", + "-Vprint-types", "-Vquasiquote", "-Vreflective-calls", "-Vreify", + "-Vshow-member-pos", "-Vshow-symkinds", "-Vshow-symowners", "-Vsymbols", "-Vtype-diffs", "-Vtyper", + "-W", + "-Wdead-code", "-Werror", "-Wextra-implicit", "-Wnumeric-widen", "-Woctal-literal", + "-Wvalue-discard", "-Wself-implicit", "-deprecation", "-explaintypes", "-feature", "-help", "-no-specialization", "-nobootcp", "-nowarn", "-optimise", "-print", "-unchecked", "-uniqid", "-usejavacp", "-usemanifestcp", "-verbose", "-version") - private def stringSettingNames = List("-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp", - "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript") - private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp") + private def stringSettingNames = List("-Xjline", "-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Vshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp", + "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript", "-Vinline", "-Vopt", "-Vshow-class", "-Vshow-member-pos") + private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp", "-Vprint-args") private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "jvm", "terminal") - private val phaseSettings = List("-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint", "-Yvalidate-pos") - private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require", "-Ywarn-unused") - private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion") + private val phaseSettings = List("-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", + "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint", "-Yvalidate-pos", "-Vbrowse", "-Vlog", "-Vprint", "-Vshow") + private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require", "-Ywarn-unused", "-opt-inline-from") + private def intSettingNames = List("-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion") private def choiceSettingNames = Map[String, List[String]]( "-YclasspathImpl" -> List("flat", "recursive"), "-Ydelambdafy" -> List("inline", "method"), @@ -108,10 +118,14 @@ object ScalaOptionParser { "-g" -> List("line", "none", "notailcails", "source", "vars"), "-target" -> targetSettingNames) private def multiChoiceSettingNames = Map[String, List[String]]( - "-Xlint" -> List("adapted-args", "nullary-unit", "inaccessible", "nullary-override", "infer-any", "missing-interpolator", "doc-detached", "private-shadow", "type-parameter-shadow", "poly-implicit-overload", "option-implicit", "delayedinit-select", "by-name-right-associative", "package-object-classes", "unsound-match", "stars-align"), + "-Xlint" -> List("adapted-args", "nullary-unit", "inaccessible", "nullary-override", "infer-any", "missing-interpolator", "doc-detached", "private-shadow", "type-parameter-shadow", "poly-implicit-overload", "option-implicit", "delayedinit-select", "package-object-classes", "stars-align", "strict-unsealed-patmat", "constant", "unused", "eta-zero"), "-language" -> List("help", "_", "dynamics", "postfixOps", "reflectiveCalls", "implicitConversions", "higherKinds", "existentials", "experimental.macros"), - "-opt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"), - "-Ystatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm") + "-opt" -> List("unreachable-code", "simplify-jumps", "compact-locals", "copy-propagation", "redundant-casts", "box-unbox", "nullness-tracking", "closure-invocations" , "allow-skip-core-module-init", "assume-modules-non-null", "allow-skip-class-loading", "inline", "l:none", "l:default", "l:method", "l:inline", "l:project", "l:classpath"), + "-Ywarn-unused" -> List("imports", "patvars", "privates", "locals", "explicits", "implicits", "params"), + "-Ywarn-macros" -> List("none", "before", "after", "both"), + "-Ystatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm"), + "-Vstatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm"), + "-Wunused" -> List("imports", "patvars", "privates", "locals", "explicits", "implicits", "nowarn", "linted") ) private def scalaVersionSettings = List("-Xmigration", "-Xsource") @@ -119,12 +133,12 @@ object ScalaOptionParser { private def scalaStringSettingNames = List("-i", "-e") private def scalaBooleanSettingNames = List("-nc", "-save") - private def scalaDocBooleanSettingNames = List("-Yuse-stupid-types", "-implicits", "-implicits-debug", "-implicits-show-all", "-implicits-sound-shadowing", "-implicits-hide", "-author", "-diagrams", "-diagrams-debug", "-raw-output", "-no-prefixes", "-no-link-warnings", "-expand-all-types", "-groups") + private def scalaDocBooleanSettingNames = List("-implicits", "-implicits-debug", "-implicits-show-all", "-implicits-sound-shadowing", "-implicits-hide", "-author", "-diagrams", "-diagrams-debug", "-raw-output", "-no-prefixes", "-no-link-warnings", "-expand-all-types", "-groups") private def scalaDocIntSettingNames = List("-diagrams-max-classes", "-diagrams-max-implicits", "-diagrams-dot-timeout", "-diagrams-dot-restart") private def scalaDocChoiceSettingNames = Map("-doc-format" -> List("html")) - private def scaladocStringSettingNames = List("-doc-title", "-doc-version", "-doc-footer", "-doc-no-compile", "-doc-source-url", "-doc-generator", "-skip-packages") + private def scaladocStringSettingNames = List("-doc-title", "-doc-version", "-doc-footer", "-doc-no-compile", "-doc-source-url", "-doc-generator", "-skip-packages", "-jdk-api-doc-base") private def scaladocPathSettingNames = List("-doc-root-content", "-diagrams-dot-path") private def scaladocMultiStringSettingNames = List("-doc-external-doc") - private val targetSettingNames = (5 to 23).flatMap(v => s"$v" :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList + private val targetSettingNames = (8 to 25).map(_.toString).flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList } diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala index ace547c6407d..146c81b21d07 100644 --- a/project/ScalaTool.scala +++ b/project/ScalaTool.scala @@ -41,7 +41,7 @@ case class ScalaTool(mainClass: String, def writeScript(file: String, platform: String, rootDir: File, outDir: File): File = { val forWindows = platform match { case "windows" => true case _ => false } - val templatePath = s"scala/tools/ant/templates/tool-$platform.tmpl" + val templatePath = s"templates/tool-$platform.tmpl" val suffix = if(forWindows) ".bat" else "" val scriptFile = outDir / s"$file$suffix" val patched = patchedToolScript(IO.read(rootDir / templatePath).replace("\r", ""), forWindows) diff --git a/project/ScaladocSettings.scala b/project/ScaladocSettings.scala index 27424d40be5f..08d1e79882de 100644 --- a/project/ScaladocSettings.scala +++ b/project/ScaladocSettings.scala @@ -1,7 +1,7 @@ package scala.build import sbt._ -import sbt.Keys.{ artifact, dependencyClasspath, moduleID, resourceManaged } +import sbt.Keys.{ artifact, externalDependencyClasspath, moduleID, resourceManaged } object ScaladocSettings { @@ -15,12 +15,14 @@ object ScaladocSettings { s.get(artifact.key).isDefined && s.get(moduleID.key).exists(_.organization == "org.webjars") val dest = (resourceManaged.value / "webjars").getAbsoluteFile IO.createDirectory(dest) - val classpaths = (Compile / dependencyClasspath).value + // externalDependencyClasspath (not dependencyClasspath) to avoid compiling + // upstream projects (library, reflect, compiler) on bsp `buildTarget/resources` + val classpaths = (Compile / externalDependencyClasspath).value val files: Seq[File] = classpaths.filter(isWebjar).flatMap { classpathEntry => val jarFile = classpathEntry.data IO.unzip(jarFile, dest) } - (files ** "*.min.js").get + (files ** "*.min.js").get() } } diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 8716be837e4a..87f046ccbcb4 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -4,6 +4,7 @@ import java.nio.file.Paths import sbt._ import Keys._ +import sbt.complete.Parser._ import sbt.complete.Parsers._ import BuildSettings.autoImport._ @@ -26,10 +27,9 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCoreNonOpt = setup("setupPublishCoreNonOpt") { args => Seq( - (Global / baseVersionSuffix) := "SHA-SNAPSHOT" - ) ++ (args match { - case Seq(url) => publishTarget(url) - case Nil => Nil + Global / baseVersionSuffix := "SHA-SNAPSHOT" + ) ++ (args flatMap { + case url => publishTarget(url) }) ++ noDocs } @@ -37,10 +37,9 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCore = setup("setupPublishCore") { args => Seq( - (Global / baseVersionSuffix) := "SHA-SNAPSHOT" - ) ++ (args match { - case Seq(url) => publishTarget(url) - case Nil => Nil + Global / baseVersionSuffix := "SHA-SNAPSHOT", + ) ++ (args flatMap { + case url => publishTarget(url) }) ++ noDocs ++ enableOptimizer } @@ -48,10 +47,12 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupValidateTest = setup("setupValidateTest") { args => Seq( + // include sha to prevent multiple builds running on the same jenkins worker from overriding each other + // sbtTest/scripted uses publishLocal + Global / baseVersionSuffix := "SHA-TEST-SNAPSHOT", LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ (args match { - case Seq(url) => Seq((Global / resolvers) += "scala-pr" at url) - case Nil => Nil + ) ++ (args flatMap { + case url => Seq(Global / resolvers += "scala-pr" at url) }) ++ enableOptimizer } @@ -61,8 +62,8 @@ object ScriptCommands { def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - (Global / baseVersion) := ver, - (Global / baseVersionSuffix) := "SPLIT" + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT" ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -72,9 +73,9 @@ object ScriptCommands { def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - (Global / baseVersion) := ver, - (Global / baseVersionSuffix) := "SPLIT", - (Global / resolvers) += "scala-pr" at url + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at url ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -88,9 +89,9 @@ object ScriptCommands { val targetUrl = fileToUrl(targetFileOrUrl) val resolverUrl = fileToUrl(resolverFileOrUrl) Seq( - (Global / baseVersion) := ver, - (Global / baseVersionSuffix) := "SPLIT", - (Global / resolvers) += "scala-pr" at resolverUrl, + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at resolverUrl, LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) ) ++ publishTarget(targetUrl) ++ enableOptimizer } @@ -102,15 +103,15 @@ object ScriptCommands { def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - (Global / baseVersion) := ver, - (Global / baseVersionSuffix) := "SPLIT", - (Global / resolvers) += "scala-pr" at url, - (Global / publishTo) := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - (Global / credentials) ++= { + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at url, + Global / publishTo := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), + Global / credentials ++= { val user = env("SONA_USER") val pass = env("SONA_PASS") if (user != "" && pass != "") - List(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)) + List(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)) else Nil } // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default @@ -121,12 +122,17 @@ object ScriptCommands { /** For local dev: sets `scalaVersion` to the version in `/buildcharacter.properties` or the given arg. * Running `reload` will re-read the build files, resetting `scalaVersion`. */ - def restarr = Command("restarr")(_ => (Space ~> StringBasic).?) { (state, s) => - val newVersion = s.getOrElse(readVersionFromPropsFile(state)) - val x = Project.extract(state) - val sv = x.get(Global / scalaVersion) - state.log.info(s"Re-STARR'ing: setting scalaVersion from $sv to $newVersion (`reload` to undo)") - x.appendWithSession(Global / scalaVersion := newVersion, state) // don't use version.value or it'll be a wrong, new value + def restarr = Command("restarr")(_ => (Space ~> token(StringBasic, "scalaVersion")).?) { (state, argSv) => + val x = Project.extract(state) + val oldSv = x.get(Global / scalaVersion) + val newSv = argSv.getOrElse(readVersionFromPropsFile(state)) + state.log.info(s"Re-STARR'ing: setting scalaVersion from $oldSv to $newSv (`reload` to undo; IntelliJ still uses $oldSv)") + val settings = Def.settings( + Global / scalaVersion := newSv, // don't use version.value or it'll be a wrong, new value + ThisBuild / target := (ThisBuild / baseDirectory).value / "target-restarr", + ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build-restarr", + ) + x.appendWithSession(settings, state) } /** For local dev: publishes locally (without optimizing) & then sets the new `scalaVersion`. @@ -140,7 +146,10 @@ object ScriptCommands { } private def readVersionFromPropsFile(state: State): String = { - val props = readProps(file("buildcharacter.properties")) + val propsFile = file("buildcharacter.properties") + if (!propsFile.exists()) + throw new MessageOnlyException("No buildcharacter.properties found - try restarrFull") + val props = readProps(propsFile) val newVersion = props("maven.version.number") val fullVersion = props("version.number") state.log.info(s"Read STARR version from buildcharacter.properties: $newVersion (full version: $fullVersion)") @@ -151,7 +160,8 @@ object ScriptCommands { Project.extract(state).appendWithSession(f(seq), state) } - val enableOptimizer = Seq( + private[this] val enableOptimizer = Seq( + //ThisBuild / Compile / scalacOptions ++= Seq("-opt:inline:scala/**") ThisBuild / Compile / scalacOptions ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) @@ -164,11 +174,11 @@ object ScriptCommands { val url2 = if(url.startsWith("file:")) url else url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis Seq( - (Global / publishTo) := Some("scala-pr-publish" at url2), - (Global / credentials) ++= { + Global / publishTo := Some("scala-pr-publish" at url2), + Global / credentials ++= { val pass = env("PRIVATE_REPO_PASS") if (pass != "") - List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS"))) + List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", pass)) else Nil } ) diff --git a/project/TestJDeps.scala b/project/TestJDeps.scala new file mode 100644 index 000000000000..4d76b43d7105 --- /dev/null +++ b/project/TestJDeps.scala @@ -0,0 +1,29 @@ +package scala.build + +import sbt._, Keys._ +import scala.util.Properties.isJavaAtLeast + +object TestJDeps { + val testJDepsImpl: Def.Initialize[Task[Unit]] = Def.task { + val libraryJar = (LocalProject("library") / Compile / packageBin).value + val reflectJar = (LocalProject("reflect") / Compile / packageBin).value + val log = streams.value.log + // in JDK 22, the already-deprecated `-P` option to jdeps was removed, + // so we can't do the test. it's fine -- it will be a long, long time + // (probably never) before Scala 2's minimum JVM version is 22+ + if (isJavaAtLeast("22")) + log.info("can't test jdeps on JDK 22+") + else { + // jdeps -s -P build/pack/lib/scala-{library,reflect}.jar | grep -v build/pack | perl -pe 's/.*\((.*)\)$/$1/' | sort -u + val jdepsOut = scala.sys.process.Process("jdeps", Seq("-s", "-P", libraryJar.getPath, reflectJar.getPath)).lineStream + + val profilePart = ".*\\((.*)\\)$".r + val profiles = jdepsOut.collect { + case profilePart(profile) => profile + }.toSet + + if (profiles != Set("compact1")) + throw new RuntimeException(jdepsOut.mkString("Detected dependency outside of compact1:\n", "\n", "")) + } + } +} diff --git a/project/TestJarSize.scala b/project/TestJarSize.scala new file mode 100644 index 000000000000..ffe5fb4c7766 --- /dev/null +++ b/project/TestJarSize.scala @@ -0,0 +1,34 @@ +package scala.build + +import sbt._, Keys._ + +object TestJarSize { + final private case class JarSize(currentBytes: Long, errorThreshold: Double, warnThreshold: Double) + + private val libraryJarSize = JarSize(5926587L, 1.03, 1.015) + private val reflectJarSize = JarSize(3814060L, 1.03, 1.015) + + val testJarSizeImpl: Def.Initialize[Task[Unit]] = Def.task { + Def.unit(testJarSize1("library", libraryJarSize).value) + Def.unit(testJarSize1("reflect", reflectJarSize).value) + } + + private def testJarSize1(projectId: String, jarSize: JarSize): Def.Initialize[Task[Unit]] = Def.task { + import jarSize._ + val log = state.value.log + val jar = (LocalProject(projectId) / Compile / packageBin).value + val actualBytes = jar.length() + if (actualBytes > (currentBytes * errorThreshold)) { + fail(s"The $projectId jar is too big: $actualBytes bytes.") + } else if (actualBytes > (currentBytes * warnThreshold)) { + val percent = (actualBytes - currentBytes).toDouble / currentBytes.toDouble + log.warn(s"The $projectId jar is getting too big: $actualBytes bytes or $percent% larger.") + } + } + + private def fail(message: String): Nothing = { + val fail = new MessageOnlyException(message) + fail.setStackTrace(new Array[StackTraceElement](0)) + throw fail + } +} diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index c47a9bf3d7ff..bbb94bfa77eb 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -7,14 +7,13 @@ import java.util.{Date, Locale, Properties, TimeZone} import java.io.{File, FileInputStream, StringWriter} import java.text.SimpleDateFormat import java.time.Instant -import java.time.format.DateTimeFormatter -import java.time.temporal.{TemporalAccessor, TemporalQueries, TemporalQuery} +import java.time.format.DateTimeFormatter.ISO_DATE_TIME import scala.collection.JavaConverters._ import BuildSettings.autoImport._ object VersionUtil { lazy val copyrightString = settingKey[String]("Copyright string.") - lazy val shellWelcomeString = settingKey[String]("Shell welcome banner string.") + lazy val shellBannerString = settingKey[String]("Shell welcome banner string.") lazy val versionProperties = settingKey[Versions]("Version properties.") lazy val gitProperties = settingKey[GitProperties]("Current git information") lazy val buildCharacterPropertiesFile = settingKey[File]("The file which gets generated by generateBuildCharacterPropertiesFile") @@ -24,20 +23,20 @@ object VersionUtil { lazy val globalVersionSettings = Seq[Setting[_]]( // Set the version properties globally (they are the same for all projects) - (Global / versionProperties) := versionPropertiesImpl.value, + Global / versionProperties := versionPropertiesImpl.value, gitProperties := gitPropertiesImpl.value, - (Global / version) := versionProperties.value.mavenVersion + Global / version := versionProperties.value.mavenVersion ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2024, LAMP/EPFL and Lightbend, Inc.", - shellWelcomeString := """ + copyrightString := "Copyright 2002-2025, LAMP/EPFL and Lightbend, Inc. dba Akka", + shellBannerString := """ | ________ ___ / / ___ | / __/ __// _ | / / / _ | | __\ \/ /__/ __ |/ /__/ __ | | /____/\___/_/ |_/____/_/ | | - | |/ %s""".stripMargin.linesIterator.drop(1).map(s => s"${ "%n" }${ s }").mkString, - (Compile / resourceGenerators) += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, + | |/ %s""".stripMargin.linesIterator.mkString("%n"), + Compile / resourceGenerators += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value ) @@ -69,8 +68,8 @@ object VersionUtil { val (dateObj, sha) = { try { // Use JGit to get the commit date and SHA - import org.eclipse.jgit.storage.file.FileRepositoryBuilder import org.eclipse.jgit.revwalk.RevWalk + import org.eclipse.jgit.storage.file.FileRepositoryBuilder val db = new FileRepositoryBuilder().findGitDir.build val head = db.resolve("HEAD") if (head eq null) { @@ -79,9 +78,7 @@ object VersionUtil { // Workaround lack of git worktree support in JGit https://bugs.eclipse.org/bugs/show_bug.cgi?id=477475 val sha = List("git", "rev-parse", "HEAD").!!.trim val commitDateIso = List("git", "log", "-1", "--format=%cI", "HEAD").!!.trim - val date = java.util.Date.from(DateTimeFormatter.ISO_DATE_TIME.parse(commitDateIso, new TemporalQuery[Instant] { - override def queryFrom(temporal: TemporalAccessor): Instant = Instant.from(temporal) - })) + val date = Date.from(ISO_DATE_TIME.parse(commitDateIso, Instant.from _)) (date, sha.substring(0, 7)) } catch { case ex: Exception => @@ -130,7 +127,7 @@ object VersionUtil { val (base, suffix) = { val (b, s) = (baseVersion.value, baseVersionSuffix.value) if(s == "SPLIT") { - val split = """([\w+\.]+)(-[\w+\.-]+)??""".r + val split = """([\w+.]+)(-[\w+.-]+)??""".r val split(b2, sOrNull) = b (b2, Option(sOrNull).map(_.drop(1)).getOrElse("")) } else (b, s) @@ -145,11 +142,12 @@ object VersionUtil { } val (canonicalV, mavenSuffix, osgiV, release) = suffix match { - case "SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-SNAPSHOT", s"$base.v$date-$sha", false) - case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false) - case "SHA" => (s"$base-$sha", s"-$cross-$sha", s"$base.v$date-$sha", false) - case "" => (s"$base", "", s"$base.v$date-VFINAL-$sha", true) - case _ => (s"$base-$suffix", s"-$suffix", s"$base.v$date-$suffix-$sha", true) + case "SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-SNAPSHOT", s"$base.v$date-$sha", false) + case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false) + case "SHA-TEST-SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-$sha-TEST-SNAPSHOT", s"$base.v$date-$sha", false) + case "SHA" => (s"$base-$sha", s"-$cross-$sha", s"$base.v$date-$sha", false) + case "" => (s"$base", "", s"$base.v$date-VFINAL-$sha", true) + case _ => (s"$base-$suffix", s"-$suffix", s"$base.v$date-$suffix-$sha", true) } @@ -159,7 +157,7 @@ object VersionUtil { private lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { writeProps(versionProperties.value.toMap ++ Seq( "copyright.string" -> copyrightString.value, - "shell.welcome" -> shellWelcomeString.value + "shell.banner" -> shellBannerString.value ), (Compile / resourceManaged).value / s"${thisProject.value.id}.properties") } @@ -201,17 +199,4 @@ object VersionUtil { val versionProps = readProps(file("versions.properties")) versionProps.map { case (k, v) => (k, sys.props.getOrElse(k, v)) } // allow sys props to override versions.properties } - - /** Get a subproject version number from `versionProps` */ - def versionNumber(name: String): String = - versionProps(s"$name.version.number") - - /** Build a dependency to a Scala module with the given group and artifact ID */ - def scalaDep(group: String, artifact: String, versionProp: String = null, scope: String = null, compatibility: String = "binary") = { - val vp = if(versionProp eq null) artifact else versionProp - val m = group % (artifact + "_" + versionProps(s"scala.$compatibility.version")) % versionNumber(vp) - val m2 = if(scope eq null) m else m % scope - // exclusion of the scala-library transitive dependency avoids eviction warnings during `update`: - m2.exclude("org.scala-lang", "*") - } } diff --git a/project/build.properties b/project/build.properties index ee4c672cd0d7..cc68b53f1a30 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.10.1 +sbt.version=1.10.11 diff --git a/project/build.sbt b/project/build.sbt deleted file mode 100644 index 0cfcc9fd4bcb..000000000000 --- a/project/build.sbt +++ /dev/null @@ -1,2 +0,0 @@ -// Add genprod to the build; It should be moved from `src/build` to `project` now that the Ant build is gone -Compile / sources += ((baseDirectory).value.getParentFile / "src" / "build" / "genprod.scala") diff --git a/src/build/genprod.scala b/project/genprod.scala similarity index 79% rename from src/build/genprod.scala rename to project/genprod.scala index f5485a9eb46c..ec144322a083 100644 --- a/src/build/genprod.scala +++ b/project/genprod.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,15 +10,11 @@ * additional information regarding copyright ownership. */ -/** This program generates the ProductN, TupleN, FunctionN, - * and AbstractFunctionN, where 0 <= N <= MaxArity. - * - * Usage: scala genprod - * where the argument is the desired output directory - * - * @author Burak Emir, Stephane Micheloud, Geoffrey Washburn, Paul Phillips +/** + * This program generates the ProductN, TupleN, FunctionN, and AbstractFunctionN, + * where 0 <= N <= MaxArity. Usage: sbt generateSources */ -object genprod extends App { +object genprod { final val MaxArity = 22 def arities = (1 to MaxArity).toList @@ -37,6 +33,7 @@ object genprod extends App { def i: Int // arity def typeArgsString(xs: Seq[String]) = xs.mkString("[", ", ", "]") + def typeArgsToTupleSyntacticSugarString(xs: Seq[String]) = xs.mkString("(", ", ", ")") def to = (1 to i).toList def s = if (i == 1) "" else "s" @@ -58,13 +55,14 @@ object genprod extends App { def genprodString = " See scala.Function0 for timestamp." def moreMethods = "" + def companionObject = "" def packageDef = "scala" def imports = "" def header = """/* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -80,20 +78,18 @@ package %s """.trim.format(genprodString, packageDef, imports) } - if (args.length != 1) { - println("please give path of output directory") - sys.exit(-1) - } - val out = args(0) - def writeFile(node: scala.xml.Node) { - import scala.tools.nsc.io._ - val f = Path(out) / node.attributes("name").toString - f.parent.createDirectory(force = true) - f.toFile writeAll node.text + def run(outDir: java.io.File): Unit = { + val out = outDir.getAbsolutePath + def writeFile(node: scala.xml.Node): Unit = { + import scala.tools.nsc.io._ + val f = Path(out) / node.attributes("name").toString + f.parent.createDirectory(force = true) + f.toFile writeAll node.text + } + allfiles foreach writeFile } - - allfiles foreach writeFile } + import genprod._ @@ -102,24 +98,25 @@ import genprod._ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */ object FunctionZero extends Function(0) { - override def genprodString = "\n// genprod generated these sources at: " + new java.util.Date() + override def genprodString = "\n// genprod generated these sources at: " + java.time.Instant.now() override def covariantSpecs = "@specialized(Specializable.Primitives) " - override def descriptiveComment = " " + functionNTemplate.format("javaVersion", "anonfun0", -""" - * val javaVersion = () => sys.props("java.version") + override def descriptiveComment = " " + functionNTemplate.format("greeting", "anonfun0", +raw""" + * val name = "world" + * val greeting = () => s"hello, $$name" * * val anonfun0 = new Function0[String] { - * def apply(): String = sys.props("java.version") + * def apply(): String = s"hello, $$name" * } - * assert(javaVersion() == anonfun0()) + * assert(greeting() == anonfun0()) * """) override def moreMethods = "" } object FunctionOne extends Function(1) { override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n" - override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) " - override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) " + override def contravariantSpecs = "@specialized(Specializable.Arg) " + override def covariantSpecs = "@specialized(Specializable.Return) " override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1", """ @@ -149,12 +146,42 @@ object FunctionOne extends Function(1) { * @return a new function `f` such that `f(x) == g(apply(x))` */ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) } +""" + override def companionObject = +""" +object Function1 { + + implicit final class UnliftOps[A, B] private[Function1](private val f: A => Option[B]) extends AnyVal { + /** Converts an optional function to a partial function. + * + * @example Unlike [[Function.unlift]], this [[UnliftOps.unlift]] method can be used in extractors. + * {{{ + * val of: Int => Option[String] = { i => + * if (i == 2) { + * Some("matched by an optional function") + * } else { + * None + * } + * } + * + * util.Random.nextInt(4) match { + * case of.unlift(m) => // Convert an optional function to a pattern + * println(m) + * case _ => + * println("Not matched") + * } + * }}} + */ + def unlift: PartialFunction[A, B] = Function.unlift(f) + } + +} """ } object FunctionTwo extends Function(2) { - override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) " - override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) " + override def contravariantSpecs = "@specialized(Specializable.Args) " + override def covariantSpecs = "@specialized(Specializable.Return) " override def descriptiveComment = " " + functionNTemplate.format("max", "anonfun2", """ @@ -181,17 +208,19 @@ class Function(val i: Int) extends Group("Function") with Arity { def descriptiveComment = "" def functionNTemplate = """ - * In the following example, the definition of %s is a - * shorthand for the anonymous class definition %s: + * In the following example, the definition of `%s` is + * shorthand, conceptually, for the anonymous class definition + * `%s`, although the implementation details of how the + * function value is constructed may differ: * * {{{ * object Main extends App {%s} * }}}""" - def toStr() = "\"" + ("" format i) + "\"" + def toStr = "\"" + ("" format i) + "\"" def apply() = { {header} - +{companionObject} /** A function of {i} parameter{s}. *{descriptiveComment} */ @@ -201,7 +230,7 @@ class Function(val i: Int) extends Group("Function") with Arity { */ def apply({funArgs}): R {moreMethods} - override def toString() = {toStr} + override def toString(): String = {toStr} }} } @@ -237,9 +266,10 @@ class Function(val i: Int) extends Group("Function") with Arity { * @return a function `f` such that `f(%s) == f(Tuple%d%s) == apply%s` */ """.format(i, i, commaXs, i, commaXs, commaXs) - def body = "case Tuple%d%s => apply%s".format(i, commaXs, commaXs) + def body = "case (%s) => apply%s".format(commaXs, commaXs) - comment + "\n @annotation.unspecialized def tupled: Tuple%d%s => R = {\n %s\n }".format(i, invariantArgs, body) + comment + "\n @annotation.unspecialized def tupled: (%s) => R = {\n %s\n }".format( + typeArgsToTupleSyntacticSugarString(targs), body) } def curryMethod = { @@ -324,7 +354,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity { final case class {className}{covariantArgs}({fields}) extends {Product.className(i)}{invariantArgs} {{ - override def toString() = "(" + {mkToString} + ")" + override def toString(): String = "(" + {mkToString} + ")" {moreMethods} }} } @@ -356,19 +386,19 @@ object ProductTwo extends Product(2) } class Product(val i: Int) extends Group("Product") with Arity { - val productElementComment = """ + val productElementComment = s""" /** Returns the n-th projection of this product if 0 <= n < productArity, * otherwise throws an `IndexOutOfBoundsException`. * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= $i). */ """ def cases = { val xs = for ((x, i) <- mdefs.zipWithIndex) yield "case %d => %s".format(i, x) - val default = "case _ => throw new IndexOutOfBoundsException(n.toString())" + val default = "case _ => throw new IndexOutOfBoundsException(s\"$n is out of bounds (min 0, max " + (i-1) +")\")" "\n" + ((xs ::: List(default)).map(" " + _ + "\n").mkString) } def proj = { @@ -389,17 +419,16 @@ object {className} {{ }} /** {className} is a Cartesian product of {i} component{s}. - * @since 2.3 */ trait {className}{covariantArgs} extends Any with Product {{ /** The arity of this product. * @return {i} */ - override def productArity = {i} + override def productArity: Int = {i} {productElementComment} @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match {{ {cases} }} + override def productElement(n: Int): Any = n match {{ {cases} }} {proj} {moreMethods} diff --git a/project/plugins.sbt b/project/plugins.sbt index f4982adc15f6..d8be38f31bf4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,15 +1,12 @@ scalacOptions ++= Seq( "-unchecked", "-feature", - // "-deprecation", - // "-Xlint:-unused,_", - // "-Werror", - "-Wconf:msg=IntegrationTest .* is deprecated:s,msg=itSettings .* is deprecated:s" -) - -libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.16.0" + "-deprecation", + "-Xlint:-unused,_", + "-Werror", + "-Wconf:msg=IntegrationTest .* is deprecated:s,msg=itSettings .* is deprecated:s") -libraryDependencies += "org.pantsbuild" % "jarjar" % "1.7.2" +libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.17.0" libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bndlib" % "6.1.0" @@ -29,14 +26,16 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.4") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.11.9.201909030838-r", - "org.slf4j" % "slf4j-nop" % "1.7.36", - "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" - ) + "org.slf4j" % "slf4j-nop" % "2.0.0", + "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0", +) -concurrentRestrictions in Global := Seq( +Global / concurrentRestrictions := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 ) addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.10.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.7") + +addSbtPlugin("com.gradle" % "sbt-develocity" % "1.2.1") diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt index 980e841c0f6b..ddfa827f9362 100644 --- a/project/project/plugins.sbt +++ b/project/project/plugins.sbt @@ -1 +1 @@ -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.12.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1") diff --git a/project/src/test/scala/PartestUtilTest.scala b/project/src/test/scala/PartestUtilTest.scala new file mode 100644 index 000000000000..e5ba3cc28bdf --- /dev/null +++ b/project/src/test/scala/PartestUtilTest.scala @@ -0,0 +1,69 @@ +package scala.build + +import sbt._ +import sbt.complete._, Completion._ + +import scala.Console.{ GREEN, RED, RESET, err } + +// This doesn't get run automatically +// but it was handy when tweaking PartestUtil +// so it may be handy down the line. +// +// Run it manually like so: +// > reload plugins +// ... +// [info] loading project definition from /d/scala/project +// > Test/runMain scala.build.PartestUtilTest +// [info] running scala.build.PartestUtilTest +// +// = base=/d/scala +// = path=pos/t177 +// + completions partest pos/t177 OK +// + completions partest test/files/pos/t177 OK +// ... +object PartestUtilTest { + def main(args: Array[String]): Unit = { + def test(base: File, str: String, expected: Completions) = { + print(s"+ completions partest $str ") + val P = PartestUtil.partestParser(base, base / "test") + val completions = Parser.completions(P, s" $str", 9).filterS(_.nonEmpty) + def pp(c: Completions, pre: String) = c.get.iterator.map(x => s"\n$pre $x)$RESET").mkString + def ppStr = s"($RED-obtained$RESET/$GREEN+expected$RESET):${pp(completions, s"$RED-")}${pp(expected, s"$GREEN+")}" + if (completions == expected) { + println(s"${GREEN}OK$RESET") + //println(s"Completions $ppStr") + } else { + println(s"${RED}KO$RESET") + err.println(s"Completions $ppStr") + throw new AssertionError("Completions mismatch") + } + } + + def prependTokens(completions: Completions, prefix: String) = completions.map { + case t: Token => new Token(s"$prefix${t.display}", t.append) + case x => x + } + + def testGen(base: File, path: String, res: Completions) = { + println(s"= path=$path") + test(base, s"$path", prependTokens(res, "")) + test(base, s"test/files/$path", prependTokens(res, "test/files/")) + test(base, s"./test/files/$path", prependTokens(res, "./test/files/")) + if (base.isAbsolute) // a prerequisite as this is to test absolute path tab-completion + test(base, s"$base/test/files/$path", prependTokens(res, s"$base/test/files/")) + } + + def tests(base: File) = { + println(s"\n= base=$base") + testGen(base, "pos/t177", Completions(Set(token("pos/t177", ".scala")))) + testGen(base, "pos/t177.scala", Completions(Set(suggestion(" ")))) + testGen(base, "pos/t1786", Completions(Set(token("pos/t1786", "-counter.scala"), token("pos/t1786", "-cycle.scala")))) + testGen(base, "pos/t1786.scala", Completions(Set())) + } + + tests(file(sys.props("user.dir"))) + tests(file(".").getAbsoluteFile) + tests(file(".")) + tests(file("./")) + } +} diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index f657665811e1..18a27870ebb0 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -1,27 +1,11 @@ # Bootstrap procedure # - determine scala version -# - determine module versions # - optionally build a fresh "starr", publish to BOOTSTRAP_REPO_DIR # - build minimal core (aka "locker") of Scala, publish to BOOTSTRAP_REPO_DIR # - build Scala (aka "quick") using locker, publish to scala-integration (or sonatype for releases) # - run tests -# Modules and stages -# - Each stage (starr, locker quick) builds the modules (if no binary compatible version exists) -# - The reason is: the compiler POM depends on the xml module of the previous stage, i.e., the -# locker compiler uses the starr modules. So the locker scaladoc (building the quick compiler) -# runs with a re-built xml, which may be necessary under binary incompatible changes. -# - In the starr / locker stages, the modules are built using the compiler just built at this stage. -# So the locker modules are built using locker, unlike the locker compiler, which is built by starr. -# - The quick (the actual release) compiler POM depends on the locker xml module. Therefore we need -# to use the same Scala version number in locker and quick, so that the modules built in the quick -# stage can be swapped in (quick compiler and modules are released to scala-integration / sonatype). -# - Final quirk: in the quick stage, the modules are built using the locker compiler. The reason: -# the quick compiler lives in scala-integration / sonatype, but there's no xml module there yet -# (we're just about to build it), which we need to run scaladoc. So we use the locker compiler. - - # Specifying the Scala version: # - To build a release (enables publishing to sonatype): # - Specify SCALA_VER_BASE and optionally SCALA_VER_SUFFIX. The version is SCALA_VER=$SCALA_VER_BASE$SCALA_VER_SUFFIX. @@ -32,46 +16,6 @@ # - version number is read from the build.sbt, extended with -[bin|pre]-$sha -# Specifying module versions. We use release versions for modules. -# - Module versions are read from the versions.properties file. -# - Set _VER to override the default, e.g. XML_VER="1.0.4". -# - The git revision is set to _REF="v$_VER". Make sure the tag exists (you can't override _REF). - - -# Modules are automatically built if necessary. -# - A module is built if it doesn't exist in the maven repository. Note that the lookup uses two versions: -# - The version of the module (see below how it's determined) -# - The binary version of of the SCALA_VER release that is being built -# - sbt computes the binary version when looking up / building modules (*). Examples: -# - 2.12.0-M1, 2.12.0-RC3: the full version is used -# - 2.12.0, 2.12.1-M1, 2.12.1-RC3, 2.12.1: the binary version 2.12 is used -# -# - Example: assume that `scala-xml_2.11 % 1.0.3` and `scala-xml_2.12.0-M1 % 1.0.3` both exists -# - XML_VER=1.0.3 and SCALA_VER=2.11.7 => no rebuild (binary version remains 2.11) -# - XML_VER=1.0.3 and SCALA_VER=2.12.0-M2 => rebuild (new binary version 2.12.0-M2) -# - XML_VER=1.0.4 and SCALA_VER=2.11.7 => rebuild (new version for the module, not yet on maven) -# NOTE: this is not the recommended way of publishing a module. Instead, prefer to release `scala-xml_2.11 % 1.0.4` -# using the existing scala 2.11.6 compiler before releasing 2.11.7. Sometimes it's necessary though. One -# example was 2.11.1, which contained a fix in the backend (SerialVersionUID was ignored). All modules needed -# to be re-built using the 2.11.1 release, we could not use 2.11.0. We could also not release the modules -# after 2.11.1 was out, because that way the scala-library-all pom of 2.11.1 would depend on the old modules. -# -# (*) https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L41 - - -# Binary incompatible changes in Modules: example with Scala 2.11 / 2.12 and scala-parser-combinators -# - The 1.0.x branch on scala-parser-combinators remains binary compatible with 1.0.0 -# - Scala 2.11 will always use 1.0.x releases: we ship scala-parser-combinators with the distribution, -# so we cannot introduce incompatible changes in a minor release. -# - The master branch of scala-parser-combinators contains binary incompatible changes, versioned 1.1.x -# - Scala 2.12 will use 1.1.x releases -# - No changes to the build script required: just put the 1.1.x version number into versions.properties -# -# Note: It's still OK for a module to release a binary incompatible version to maven, for example -# scala-parser-combinators_2.11 % 1.1.0. Users can depend on this in their sbt build. But for the -# distribution (tar/zip archives, scala-library-all) we have to stay on the binary compatible version. - - # Credentials # - `PRIVATE_REPO_PASS` password for `scala-ci` user on scala-ci.typesafe.com/artifactory # - `SONA_USER` / `SONA_PASS` for sonatype @@ -79,115 +23,11 @@ publishPrivateTask=${publishPrivateTask-"publish"} publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} -publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} -forceBuildModules=${forceBuildModules-no} clean="clean" # TESTING leave empty to speed up testing (on jenkins/locally; on travis it's a fresh machine every time) - -docTask() { - # Build the module docs only in the last (quick) stage. The locker scaladoc may be binary - # incompatible with the starr scala-xml (on which it depends, by the pom file) - if [ "$1" = "quick" ]; then - echo "doc" - else - echo "set publishArtifact in (Compile, packageDoc) in ThisBuild := false" - fi -} - -# Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml. -# Even if that version is available through the project's resolvers, sbt won't look past this project. -# SOOOOO, we set the version to a dummy (-DOC), generate documentation, -# then set the version to the right one and publish (which won't re-gen the docs). -# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. -buildXML() { - if [ "$XML_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) - then echo "Found scala-xml $XML_VER; not building." - else - update scala scala-xml "$XML_REF" && gfxd - doc="$(docTask $1)" - sbtBuild 'set version := "'$XML_VER'-DOC"' $clean "$doc" 'set version := "'$XML_VER'"' test "${buildTasks[@]}" - XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above - fi -} - -# should only be called with publishTasks publishing to artifactory -buildScalaCheck(){ - if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) - then echo "Found scalacheck $SCALACHECK_VER; not building." - else - update rickynils scalacheck $SCALACHECK_REF && gfxd - doc="$(docTask $1)" - sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean "$doc" publish # test times out NOTE: never published to sonatype - SCALACHECK_BUILT="yes" - fi -} - -buildModules() { - clearIvyCache - - if [ "$1" = "starr" ]; then - scalaVersionTasks=('set every scalaVersion := "'$STARR_VER'"') - else - scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') - fi - - if [[ "$1" = "starr" || "$1" == "locker" ]]; then - addResolvers="$addBootstrapResolver" - publishTasks=("set every publishTo := Some(\"scala-bootstrap\" at \"file://$BOOTSTRAP_REPO_DIR\")") - buildTasks=($publishPrivateTask) - else - if [ "$publishToSonatype" == "yes" ]; then - addResolvers="$addBootstrapResolver" # locker compiler builds quick modules, see comment on top of this file - publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' 'set pgpSigningKey := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' "set pgpPassphrase := Some(Array.empty)") - buildTasks=($publishSonatypeTaskModules) - else - addResolvers="$addBootstrapResolver" # locker compiler builds quick modules, see comment on top of this file - publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") - buildTasks=($publishPrivateTask) - fi - fi - - buildXML $1 - # buildScalaCheck $1 - - constructUpdatedModuleVersions $1 - - cd $WORKSPACE -} - - ## BUILD STEPS: -scalaVerToBinary() { - # $1 = SCALA_VER - # $2 = SCALA_VER_BASE - # $3 = SCALA_VER_SUFFIX - - local RE='\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)' - local majMin="$(echo $2 | sed -e "s#$RE#\1.\2#")" - local patch="$(echo $2 | sed -e "s#$RE#\3#")" - - # The binary version is majMin (e.g. "2.12") if - # - there's no suffix : 2.12.0, 2.12.1 - # - the suffix starts with "-bin" : 2.12.1-bin-sha, 2.12.1-bin-sha-custom, 2.12.1-bin-SNAPSHOT - # - the suffix is \w+ and patch version is > 0: 2.12.1-M1, 2.12.1-RC2 (also 2.12.1-sha, 2.12.1-SNAPSHOT, which we don't use) - # - # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-pre-sha, 2.12.0-pre-SNAPSHOT - # (also 2.12.0-sha, 2.12.0-SNAPSHOT, which we don't use) - # - # Adapted from sbt: https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L42 - # - # During the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of integration / SNAPSHOT - # versions is the full version, e.g. 2.12.0-pre-sha, so modules are always re-built. - - if [[ "$3" == "" || "${3:0:4}" == "-bin" || ("$patch" != "0" && "$3" =~ ^-[a-zA-Z0-9_]+$) ]]; then - echo "$majMin" - else - echo "$1" - fi -} - determineScalaVersion() { cd $WORKSPACE parseScalaProperties "versions.properties" @@ -208,7 +48,6 @@ determineScalaVersion() { fi SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX" - SCALA_BINARY_VER=$(scalaVerToBinary $SCALA_VER $SCALA_VER_BASE $SCALA_VER_SUFFIX) echo "version=$SCALA_VER" >> $WORKSPACE/jenkins.properties echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $WORKSPACE/jenkins.properties @@ -216,19 +55,6 @@ determineScalaVersion() { echo "Building Scala $SCALA_VER." } -# determineScalaVersion must have been called (versions.properties is parsed to env vars) -deriveModuleVersions() { - XML_VER=${XML_VER-$scala_xml_version_number} - SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number} - - XML_REF="v$XML_VER" - SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags - - # echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" - echo "XML = $XML_VER at $XML_REF" - -} - createNetrcFile() { local netrcFile=$HOME/`basename $1`-netrc grep 'host=' $1 | sed 's/host=\(.*\)/machine \1/' > $netrcFile @@ -236,7 +62,7 @@ createNetrcFile() { grep 'password=' $1 | sed 's/password=\(.*\)/password \1/' >> $netrcFile } -# deletes existing artifacts (core and modules) matching the $SCALA_VER from the repository passed as argument +# deletes existing artifacts matching the $SCALA_VER from the repository passed as argument removeExistingBuilds() { local repoUrl=$1 local repoPrefix="https://scala-ci.typesafe.com/artifactory/" @@ -247,7 +73,7 @@ removeExistingBuilds() { createNetrcFile "$HOME/.credentials-private-repo" local netrcFile="$HOME/.credentials-private-repo-netrc" - # "module" is not a scala module (like scala-xml), but an artifact of a boostrap build. the variable + # "module" is not a scala module, but an artifact of a bootstrap build. the variable # contains: "org/scala-lang/modules", "org/scala-lang/scala-compiler", "org/scala-lang/scala-library", ... local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration` @@ -263,35 +89,14 @@ removeExistingBuilds() { fi } -constructUpdatedModuleVersions() { - updatedModuleVersions=() - - # force the new module versions for building the core. these may be different from the values in versions.properties - # if the variables (XML_VER) were provided. in the common case, the values are the same as in versions.properties. - updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER") - # updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") - - # allow overriding the jline version using a jenkins build parameter - if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi - - if [ "$SCALA_BINARY_VER" = "$SCALA_VER" ]; then - if [ "$1" = "starr" ]; then - binaryVer=$STARR_VER - else - binaryVer=$SCALA_BINARY_VER - fi - updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$binaryVer") - fi -} - pollForStagingReposClosed() { OK=false - for i in $(seq 1 20); do + for i in $(seq 1 10); do OK=true for repo in $1; do if [[ "$(st_stagingRepoStatus $repo)" != "closed" ]]; then - echo "Staging repo $repo not yet closed, waiting 30 seconds ($i / 20)" + echo "Staging repo $repo not yet closed, waiting 30 seconds ($i / 10)" OK=false break fi @@ -301,7 +106,7 @@ pollForStagingReposClosed() { done if [ "$OK" = "false" ]; then - echo "Failed to close staging repos in 10 minutes: $1" + echo "Failed to close staging repos in 5 minutes: $1" exit 1 fi } @@ -341,17 +146,13 @@ buildStarr() { travis_fold_end starr ) SET_STARR=-Dstarr.version=$STARR_VER - - buildModules starr # the locker compiler uses these modules to run scaladoc, see comment on top of this file } #### LOCKER -# for bootstrapping, publish core (or at least smallest subset we can get away with) -# so that we can build modules with this version of Scala and publish them locally -# must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala -# publish more than just core: partest needs scalap -# in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler +# Publish core for bootstrapping. +# TODO: now that we no longer build modules, we can use a version number with a `-locker` suffix. +# Before that was not possible because the module artifacts had a pom dependency on that version. buildLocker() { clearIvyCache cd $WORKSPACE @@ -359,12 +160,9 @@ buildLocker() { travis_fold_start locker "Building locker" $SBT_CMD -no-colors $sbtArgs \ $SET_STARR \ - ${updatedModuleVersions[@]} \ "setupBootstrapLocker \"$BOOTSTRAP_REPO_DIR\" $SCALA_VER" \ $clean publish travis_fold_end locker - - buildModules locker } #### QUICK @@ -377,7 +175,6 @@ invokeQuickInternal() { travis_fold_start quick "Building bootstrapped" $SBT_CMD $sbtArgs \ -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ "$setupCmd" \ "$@" travis_fold_end quick @@ -401,8 +198,6 @@ buildQuick() { invokeQuick $clean publish fi - buildModules quick - closeStagingRepos } diff --git a/scripts/common b/scripts/common index 425fe9fcd575..e00731fcc01b 100644 --- a/scripts/common +++ b/scripts/common @@ -10,10 +10,6 @@ else IVY2_DIR="$WORKSPACE/.ivy2" fi -# used by `sbtResolve` -rm -rf "$WORKSPACE/resolutionScratch_" -mkdir -p "$WORKSPACE/resolutionScratch_" - SBT_VERSION=`grep sbt.version $WORKSPACE/project/build.properties | sed -n 's/sbt.version=\(.*\)/\1/p'` SBT_CMD=${SBT_CMD-sbt} @@ -24,7 +20,6 @@ integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifacto # only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" -jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} # used by `checkAvailability` TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) @@ -37,8 +32,6 @@ mkdir "${BOOTSTRAP_REPO_DIR}" addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" addBootstrapResolver="set resolvers in Global += \"scala-bootstrap\" at \"file://$BOOTSTRAP_REPO_DIR\"" -# Gets set to addIntegrationResolver or addBootstrapResolver for use in sbtBuild and sbtResolve: -addResolvers="" stApi="https://oss.sonatype.org/service/local" @@ -90,7 +83,7 @@ function checkAvailability () { # Only used on Jenkins # Generate a repositories file with all allowed repositories in our build environment. # Takes a variable number of additional repositories as argument. -# See https://www.scala-sbt.org/1.0/docs/Proxy-Repositories.html +# See https://www.scala-sbt.org/1.x/docs/Proxy-Repositories.html function generateRepositoriesConfig() { echo > "$sbtRepositoryConfig" '[repositories]' if [[ $# -gt 0 ]]; then @@ -100,11 +93,9 @@ function generateRepositoriesConfig() { fi cat >> "$sbtRepositoryConfig" << EOF - jcenter-cache: $jcenterCacheUrl local maven-central - typesafe-ivy-releases-boot: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + typesafe-ivy-releases: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext] sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] EOF } @@ -172,34 +163,9 @@ st_stagingRepoClose() { #### sbt tools -sbtBuild() { - travis_fold_start build "Building $(basename $PWD) with $@" - $SBT_CMD -no-colors $sbtArgs "$addResolvers" "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" - travis_fold_end build -} - -sbtResolve() { - cd "$WORKSPACE/resolutionScratch_" - touch build.sbt - # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. - cross=${4-binary} - # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" - travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" - $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ - "$addResolvers" \ - "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ - "show update" - res=$? - cd $WORKSPACE - travis_fold_end resolve - return $res -} - clearIvyCache() { travis_fold_start clearIvy "Clearing ivy cache" rm -fv $IVY2_DIR/exclude_classifiers $IVY2_DIR/exclude_classifiers.lock - rm -rfv $IVY2_DIR/cache/org.scala-lang $IVY2_DIR/cache/org.scala-lang.modules - rm -rfv $IVY2_DIR/local/org.scala-lang $IVY2_DIR/local/org.scala-lang.modules if [ -d $IVY2_DIR ]; then find $IVY2_DIR -name "*compiler-interface*$SCALA_VER*" | xargs rm -rfv; fi if [ -d $HOME/.sbt ]; then find $HOME/.sbt -name "*compiler-interface*$SCALA_VER*" | xargs rm -rfv; fi travis_fold_end clearIvy diff --git a/scripts/generate-spec-pdf.bat b/scripts/generate-spec-pdf.bat new file mode 100644 index 000000000000..67ce5525dc71 --- /dev/null +++ b/scripts/generate-spec-pdf.bat @@ -0,0 +1,24 @@ +@ECHO OFF +SETLOCAL EnableDelayedExpansion + +REM NOTES: +REM "toc" -> treated just like another page, its location can be changed +REM "--window-status loaded" -> when window.status is set to "loaded", wkhtmltopdf knows js is loaded + +SET THIS_SCRIPT_DIR=%~dp0 +SET ROOT_DIR=%THIS_SCRIPT_DIR%.. +SET SPEC_SRC_DIR=%ROOT_DIR%\spec +SET SPEC_BUILD_DIR=%ROOT_DIR%\build\spec + +SET WKHTML_OPTS=--print-media-type --window-status loaded --footer-center [page] --javascript-delay 1000 --footer-font-name "Luxi Sans" +SET WKHTML_TOC=toc --xsl-style-sheet %SPEC_SRC_DIR%\spec-toc.xslt + +SET HTML_FILES= +FOR /F "tokens=*" %%a IN ('dir %SPEC_BUILD_DIR%\*.html /B /O:N ^| findstr /v /i "index.*"') DO ( + SET HTML_FILES=!HTML_FILES! %SPEC_BUILD_DIR%\%%a +) +ECHO Making Spec.pdf with HTML files: +ECHO %SPEC_BUILD_DIR%\index.html %HTML_FILES% + +REM first goes index.html, then TOC, then rest +wkhtmltopdf %WKHTML_OPTS% %SPEC_BUILD_DIR%\index.html %WKHTML_TOC% %HTML_FILES% %SPEC_BUILD_DIR%\Spec.pdf diff --git a/scripts/generate-spec-pdf.sh b/scripts/generate-spec-pdf.sh new file mode 100755 index 000000000000..c49d0d39d1a6 --- /dev/null +++ b/scripts/generate-spec-pdf.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +set -e +set -v + +# NOTES: +# "toc" -> treated just like another page, its location can be changed +# "--window-status loaded" -> when window.status is set to "loaded", wkhtmltopdf knows js is loaded + +THIS_SCRIPT_DIR=$(dirname $0) +ROOT_DIR=$THIS_SCRIPT_DIR/.. +SPEC_SRC_DIR=$ROOT_DIR/spec +SPEC_BUILD_DIR=$ROOT_DIR/build/spec +SPEC_PDF_BUILD_DIR=$ROOT_DIR/build/spec-pdf +PDF=$SPEC_BUILD_DIR/spec.pdf + +mkdir -p $SPEC_PDF_BUILD_DIR + + +WKHTML_OPTS='--print-media-type --window-status loaded --javascript-delay 1000 --load-error-handling ignore --enable-local-file-access --footer-center [page] --footer-font-name "Luxi Sans"' +WKHTML_TOC="toc --xsl-style-sheet $SPEC_SRC_DIR/spec-toc.xslt" + +# exclude index.html, prepend SPEC_PDF_BUILD_DIR path +HTML_FILES=$(ls $SPEC_PDF_BUILD_DIR | grep -vx 'index.html' | grep '\.html$' | while read line; do echo "$SPEC_PDF_BUILD_DIR/$line"; done) + +echo "Making spec.pdf with HTML files: " +echo $SPEC_PDF_BUILD_DIR/index.html $HTML_FILES + +# first goes index.html, then TOC, then rest +rm -f $PDF +wkhtmltopdf $WKHTML_OPTS $SPEC_PDF_BUILD_DIR/index.html $WKHTML_TOC $HTML_FILES $PDF || true + +# the '|| true' thing is because we get: +# Error: Failed to load http:/, with network status code 3 and http status code 0 - Host not found +# Warning: Failed loading page http: (ignored) +# as long we have `--load-error-handling ignore` we still get a PDF, but we also get a nonzero exit code + +# fail if we didn't get a PDF file out +if [ ! -f $PDF ] ; then exit 1 ; fi diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index e936f4106f7d..8524dbb5bd18 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -11,7 +11,6 @@ sbtArgs="-ivy $IVY2_DIR -Dsbt.override.build.repos=true -Dsbt.repository.config= generateRepositoriesConfig $integrationRepoUrl determineScalaVersion -deriveModuleVersions removeExistingBuilds $integrationRepoUrl diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core index ff9cbc2c9c58..e04c91177b55 100755 --- a/scripts/jobs/validate/publish-core +++ b/scripts/jobs/validate/publish-core @@ -28,7 +28,7 @@ case $prDryRun in if $libraryAvailable && $reflectAvailable && $compilerAvailable; then echo "Scala core already built!" else - $SBT -warn "setupPublishCore $prRepoUrl" publish + $SBT -Dscala.build.publishDevelocity=build -warn "setupPublishCore $prRepoUrl" publish fi mv buildcharacter.properties jenkins.properties # parsed by the jenkins job diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index 7dd61c837bcc..e8e94c2e4ed1 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -18,6 +18,7 @@ case $prDryRun in # and run JUnit tests, ScalaCheck tests, partest, OSGi tests, MiMa and scaladoc $SBT \ -Dstarr.version=$scalaVersion \ + -Dscala.build.publishDevelocity=test \ -warn \ "setupValidateTest $prRepoUrl" \ $testExtraArgs \ diff --git a/scripts/readproperties.awk b/scripts/readproperties.awk index 96da94775b77..9384dd5aab36 100644 --- a/scripts/readproperties.awk +++ b/scripts/readproperties.awk @@ -1,4 +1,4 @@ -# Adapted from http://stackoverflow.com/questions/1682442/reading-java-properties-file-from-bash/2318840#2318840 +# Adapted from https://stackoverflow.com/questions/1682442/reading-java-properties-file-from-bash/2318840#2318840 BEGIN { FS="="; n=""; diff --git a/scripts/travis-publish-spec.sh b/scripts/travis-publish-spec.sh index fe29ea06783a..3d6a383d0a21 100755 --- a/scripts/travis-publish-spec.sh +++ b/scripts/travis-publish-spec.sh @@ -1,9 +1,13 @@ #!/bin/bash -# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html - set -e -openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a + +if [ -z "$PRIV_KEY_SECRET" ]; then + echo "PRIV_KEY_SECRET is missing/empty, so skipping publish spec" + exit +fi + +openssl aes-256-cbc -md md5 -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a chmod 600 spec/id_dsa_travis eval "$(ssh-agent)" ssh-add -D @@ -11,4 +15,4 @@ ssh-add spec/id_dsa_travis # the key is restricted using forced commands so that it can only upload to the directory we need here rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ \ - scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.12/ + scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.13/ diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 3af89a807b10..5ec1f9f2ef68 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -6,36 +6,24 @@ chapter: 1 # Lexical Syntax -Scala programs are written using the Unicode Basic Multilingual Plane -(_BMP_) character set; Unicode supplementary characters are not -presently supported. This chapter defines the two modes of Scala's -lexical syntax, the Scala mode and the _XML mode_. If not -otherwise mentioned, the following descriptions of Scala tokens refer -to _Scala mode_, and literal characters ‘c’ refer to the ASCII fragment -`\u0000` – `\u007F`. +Scala source code consists of Unicode text. -In Scala mode, _Unicode escapes_ are replaced by the corresponding -Unicode character with the given hexadecimal code. +The nine [Bidirectional explicit formatting](https://www.unicode.org/reports/tr9/#Bidirectional_Character_Types) +characters `\u202a - \u202e` and `\u2066 - \u2069` (inclusive) are forbidden +from appearing in source files. Note that they can be represented using +unicode escapes in string and character literals. -```ebnf -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` - - +The program text is tokenized as described in this chapter. +See the last section for special support for XML literals, +which are parsed in _XML mode_. To construct tokens, characters are distinguished according to the following classes (Unicode general category given in parentheses): 1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. 1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), - titlecase letters (`Lt`), other letters (`Lo`), letter numerals (`Nl`) and the - two characters `\u0024 ‘$’` and `\u005F ‘_’`. + title case letters (`Lt`), other letters (`Lo`), modifier letters (`Lm`), + letter numerals (`Nl`) and the two characters `\u0024 ‘$’` and `\u005F ‘_’`. 1. Digits `‘0’ | … | ‘9’`. 1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `. 1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``. @@ -54,23 +42,26 @@ plainid ::= upper idrest | varid | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] +escapeSeq ::= UnicodeEscape | charEscapeSeq +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ ``` There are three ways to form an identifier. First, an identifier can -start with a letter which can be followed by an arbitrary sequence of +start with a letter, followed by an arbitrary sequence of letters and digits. This may be followed by underscore `‘_‘` characters and another string composed of either letters and digits or of operator characters. Second, an identifier can start with an operator character followed by an arbitrary sequence of operator characters. The preceding two forms are called _plain_ identifiers. Finally, an identifier may also be formed by an arbitrary string between -back-quotes (host systems may impose some restrictions on which +backquotes (host systems may impose some restrictions on which strings are legal for identifiers). The identifier then is composed of all characters excluding the backquotes themselves. -As usual, a longest match rule applies. For instance, the string +As usual, the longest match rule applies. For instance, the string ```scala big_bob++=`def` @@ -79,14 +70,42 @@ big_bob++=`def` decomposes into the three identifiers `big_bob`, `++=`, and `def`. +Although `/` is an `opchar`, the sequence of characters `//` or `/*`, +which open a comment, must be enclosed in backquotes when used in an identifier. + +```scala +def `://`(s: String): URI +def `*/*`(d: Double): Double +``` + The rules for pattern matching further distinguish between -_variable identifiers_, which start with a lower case letter, and -_constant identifiers_, which do not. For this purpose, -underscore `‘_‘` is taken as lower case, and the ‘\$’ character -is taken as upper case. +_variable identifiers_, which start with a lower case letter +or `_`, and _constant identifiers_, which do not. + +For this purpose, lower case letters include not only a-z, +but also all characters in Unicode category Ll (lowercase letter), +as well as all letters that have contributory property +Other_Lowercase, except characters in category Nl (letter numerals), +which are never taken as lower case. + +The following are examples of variable identifiers: + +> ```scala +> x maxIndex p2p empty_? +> `yield` αρετη _y dot_product_* +> __system _MAX_LEN_ +> ªpple ʰelper +> ``` + +Some examples of constant identifiers are + +> ```scala +> + Object $reserved Džul ǂnûm +> ⅰ_ⅲ Ⅰ_Ⅲ ↁelerious ǃqhàà ʹthatsaletter +> ``` -The ‘\$’ character is reserved for compiler-synthesized identifiers. -User programs should not define identifiers which contain ‘\$’ characters. +The ‘$’ character is reserved for compiler-synthesized identifiers. +User programs should not define identifiers that contain ‘$’ characters. The following names are reserved words instead of being members of the syntactic class `id` of lexical identifiers. @@ -103,7 +122,7 @@ val var while with yield _ : = => <- <: <% >: # @ ``` -The Unicode operators `\u21D2` ‘$\Rightarrow$’ and `\u2190` ‘$\leftarrow$’, which have the ASCII +The Unicode operators `\u21D2` ‘´\Rightarrow´’ and `\u2190` ‘´\leftarrow´’, which have the ASCII equivalents `=>` and `<-`, are also reserved. > Here are examples of identifiers: @@ -184,7 +203,7 @@ printable characters), then two `nl` tokens are inserted. The Scala grammar (given in full [here](13-syntax-summary.html)) contains productions where optional `nl` tokens, but not -semicolons, are accepted. This has the effect that a newline in one of these +semicolons, are accepted. This has the effect that a new line in one of these positions does not terminate an expression or statement. These positions can be summarized as follows: @@ -317,6 +336,7 @@ Literal ::= [‘-’] integerLiteral | booleanLiteral | characterLiteral | stringLiteral + | interpolatedString | symbolLiteral | ‘null’ ``` @@ -324,22 +344,23 @@ Literal ::= [‘-’] integerLiteral ### Integer Literals ```ebnf -integerLiteral ::= (decimalNumeral | hexNumeral) +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit {digit} +decimalNumeral ::= digit {digit} hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} -digit ::= ‘0’ | nonZeroDigit -nonZeroDigit ::= ‘1’ | … | ‘9’ +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit {binaryDigit} ``` -Integer literals are usually of type `Int`, or of type -`Long` when followed by a `L` or -`l` suffix. Values of type `Int` are all integer +Values of type `Int` are all integer numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. Values of type `Long` are all integer numbers between $-2\^{63}$ and $2\^{63}-1$, inclusive. A compile-time error occurs if an integer literal denotes a number outside these ranges. +Integer literals are usually of type `Int`, or of type +`Long` when followed by a `L` or `l` suffix. +(Lowercase `l` is deprecated for reasons of legibility.) + However, if the expected type [_pt_](06-expressions.html#expression-typing) of a literal in an expression is either `Byte`, `Short`, or `Char` and the integer number fits in the numeric range defined by the type, @@ -348,12 +369,15 @@ is _pt_. The numeric ranges given by these types are: | | | |----------------|--------------------------| -|`Byte` | $-2\^7$ to $2\^7-1$ | -|`Short` | $-2\^{15}$ to $2\^{15}-1$| -|`Char` | $0$ to $2\^{16}-1$ | +|`Byte` | ´-2\^7´ to ´2\^7-1´ | +|`Short` | ´-2\^{15}´ to ´2\^{15}-1´| +|`Char` | ´0´ to ´2\^{16}-1´ | + +The digits of a numeric literal may be separated by +arbitrarily many underscores for purposes of legibility. > ```scala -> 0 21 0xFFFFFFFF -42L +> 0 21_000 0x7F -42L 0xFFFF_FFFF > ``` ### Floating Point Literals @@ -403,37 +427,30 @@ members of type `Boolean`. ### Character Literals ```ebnf -characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ ``` A character literal is a single character enclosed in quotes. The character can be any Unicode character except the single quote delimiter or `\u000A` (LF) or `\u000D` (CR); -or any Unicode character represented by either a -[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences). +or any Unicode character represented by an +[escape sequence](#escape-sequences). > ```scala > 'a' '\u0041' '\n' '\t' > ``` -Note that although Unicode conversion is done early during parsing, -so that Unicode characters are generally equivalent to their escaped -expansion in the source text, literal parsing accepts arbitrary -Unicode escapes, including the character literal `'\u000A'`, -which can also be written using the escape sequence `'\n'`. - ### String Literals ```ebnf stringLiteral ::= ‘"’ {stringElement} ‘"’ -stringElement ::= charNoDoubleQuoteOrNewline | UnicodeEscape | charEscapeSeq +stringElement ::= charNoDoubleQuoteOrNewline | escapeSeq ``` A string literal is a sequence of characters in double quotes. The characters can be any Unicode character except the double quote delimiter or `\u000A` (LF) or `\u000D` (CR); -or any Unicode character represented by either a -[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences). +or any Unicode character represented by an [escape sequence](#escape-sequences). If the string literal contains a double quote character, it must be escaped using `"\""`. @@ -457,8 +474,8 @@ triple quotes `""" ... """`. The sequence of characters is arbitrary, except that it may contain three or more consecutive quote characters only at the very end. Characters must not necessarily be printable; newlines or other -control characters are also permitted. Unicode escapes work as everywhere else, but none -of the escape sequences [here](#escape-sequences) are interpreted. +control characters are also permitted. [Escape sequences](#escape-sequences) are +not processed, except for Unicode escapes (this is deprecated since 2.13.2). > ```scala > """the present string @@ -493,14 +510,63 @@ of the escape sequences [here](#escape-sequences) are interpreted. > ``` > > Method `stripMargin` is defined in class -> [scala.collection.immutable.StringLike](http://www.scala-lang.org/api/current/#scala.collection.immutable.StringLike). -> Because there is a predefined -> [implicit conversion](06-expressions.html#implicit-conversions) from `String` to -> `StringLike`, the method is applicable to all strings. +> [scala.collection.StringOps](https://www.scala-lang.org/api/current/scala/collection/StringOps.html#stripMargin:String). + +#### Interpolated string + +```ebnf +interpolatedString ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$"’ + | ‘$’ alphaid + | ‘$’ BlockExpr +alphaid ::= upper idrest + | varid + +``` + +An interpolated string consists of an identifier starting with a letter immediately +followed by a string literal. There may be no whitespace characters or comments +between the leading identifier and the opening quote `"` of the string. +The string literal in an interpolated string can be standard (single quote) +or multi-line (triple quote). + +Inside an interpolated string none of the usual escape characters are interpreted +no matter whether the string literal is normal (enclosed in single quotes) or +multi-line (enclosed in triple quotes). Note that the sequence `\"` does not +close a normal string literal (enclosed in single quotes). + +There are three forms of dollar sign escape. +The most general form encloses an expression in `${` and `}`, i.e. `${expr}`. +The expression enclosed in the braces that follow the leading `$` character is of +syntactical category BlockExpr. Hence, it can contain multiple statements, +and newlines are significant. Single ‘$’-signs are not permitted in isolation +in an interpolated string. A single ‘$’-sign can still be obtained by doubling the ‘$’ +character: ‘$$’. A single ‘"’-sign can be obtained by the sequence ‘\$"’. + +The simpler form consists of a ‘$’-sign followed by an identifier starting with +a letter and followed only by letters, digits, and underscore characters, e.g., `$id`. +The simpler form is expanded by putting braces around the identifier, +e.g., `$id` is equivalent to `${id}`. In the following, unless we explicitly state otherwise, +we assume that this expansion has already been performed. + +The expanded expression is type checked normally. Usually, `StringContext` will resolve to +the default implementation in the scala package, +but it could also be user-defined. Note that new interpolators can also be added through +implicit conversion of the built-in `scala.StringContext`. + +One could write an extension +```scala +implicit class StringInterpolation(s: StringContext) { + def id(args: Any*) = ??? +} +``` ### Escape Sequences -The following escape sequences are recognized in character and string literals. +The following character escape sequences are recognized in character and string literals. | charEscapeSeq | unicode | name | char | |---------------|----------|-----------------|--------| @@ -513,9 +579,8 @@ The following escape sequences are recognized in character and string literals. | `‘\‘ ‘'‘` | `\u0027` | single quote | `'` | | `‘\‘ ‘\‘` | `\u005c` | backslash | `\` | -A character with Unicode between 0 and 255 may also be represented by -an octal escape, i.e. a backslash `'\'` followed by a -sequence of up to three octal characters. +In addition, Unicode escape sequences of the form `\uxxxx`, where each `x` is a hex digit are +recognized in character and string literals. It is a compile time error if a backslash character in a character or string literal does not start a valid escape sequence. @@ -526,16 +591,7 @@ string literal does not start a valid escape sequence. symbolLiteral ::= ‘'’ plainid ``` -A symbol literal `'x` is a shorthand for the expression -`scala.Symbol("x")`. `Symbol` is a [case class](05-classes-and-objects.html#case-classes), -which is defined as follows. - -```scala -package scala -final case class Symbol private (name: String) { - override def toString: String = "'" + name -} -``` +A symbol literal `'x` is deprecated shorthand for the expression `scala.Symbol("x")`. The `apply` method of `Symbol`'s companion object caches weak references to `Symbol`s, thus ensuring that diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md index 76fb68427d39..213c2bee96df 100644 --- a/spec/02-identifiers-names-and-scopes.md +++ b/spec/02-identifiers-names-and-scopes.md @@ -7,22 +7,30 @@ chapter: 2 # Identifiers, Names and Scopes Names in Scala identify types, values, methods, and classes which are -collectively called _entities_. Names are introduced by local +collectively called _entities_. Names are introduced by [definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions), [inheritance](05-classes-and-objects.html#class-members), [import clauses](04-basic-declarations-and-definitions.html#import-clauses), or [package clauses](09-top-level-definitions.html#packagings) which are collectively called _bindings_. -Bindings of different kinds have a precedence defined on them: +Bindings of each kind are assigned a precedence which determines +whether one binding can shadow another: + 1. Definitions and declarations that are local, inherited, or made available by a package clause and also defined in the same compilation unit - as the reference, have highest precedence. -1. Explicit imports have next highest precedence. -1. Wildcard imports have next highest precedence. -1. Definitions made available by a package clause, but not also defined in the - same compilation unit as the reference, have lowest precedence. + as the reference to them, have the highest precedence. +1. Explicit imports have the next highest precedence. +1. Wildcard imports have the next highest precedence. +1. Bindings made available by a package clause, + but not also defined in the same compilation unit as the reference to them, + as well as bindings supplied by the compiler but not explicitly written in source code, + have the lowest precedence. There are two different name spaces, one for [types](03-types.html#types) and one for [terms](06-expressions.html#expressions). The same name may designate a @@ -46,32 +54,32 @@ locally { } ``` -A reference to an unqualified (type- or term-) identifier $x$ is bound +A reference to an unqualified (type- or term-) identifier ´x´ is bound by the unique binding, which -- defines an entity with name $x$ in the same namespace as the identifier, and -- shadows all other bindings that define entities with name $x$ in that +- defines an entity with name ´x´ in the same namespace as the identifier, and +- shadows all other bindings that define entities with name ´x´ in that namespace. -It is an error if no such binding exists. If $x$ is bound by an -import clause, then the simple name $x$ is taken to be equivalent to -the qualified name to which $x$ is mapped by the import clause. If $x$ -is bound by a definition or declaration, then $x$ refers to the entity -introduced by that binding. In that case, the type of $x$ is the type +It is an error if no such binding exists. If ´x´ is bound by an +import clause, then the simple name ´x´ is taken to be equivalent to +the qualified name to which ´x´ is mapped by the import clause. If ´x´ +is bound by a definition or declaration, then ´x´ refers to the entity +introduced by that binding. In that case, the type of ´x´ is the type of the referenced entity. -A reference to a qualified (type- or term-) identifier $e.x$ refers to -the member of the type $T$ of $e$ which has the name $x$ in the same -namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types). -The type of $e.x$ is the member type of the referenced entity in $T$. +A reference to a qualified (type- or term-) identifier ´e.x´ refers to +the member of the type ´T´ of ´e´ which has the name ´x´ in the same +namespace as the identifier. It is an error if ´T´ is not a [value type](03-types.html#value-types). +The type of ´e.x´ is the member type of the referenced entity in ´T´. Binding precedence implies that the way source is bundled in files affects name resolution. In particular, imported names have higher precedence than names, defined in other files, that might otherwise be visible because they are defined in either the current package or an enclosing package. -Note that a package definition is taken as lowest precedence, since packages -are open and can be defined across arbitrary compilation units. +Note that a binding introduced by a packaging is taken as lowest precedence, +since packages are open and can be defined across arbitrary compilation units. ```scala package util { @@ -83,21 +91,47 @@ package util { } ``` -As a convenience, multiple bindings of a type identifier to the same -underlying type is permitted. This is possible when import clauses introduce -a binding of a member type alias with the same binding precedence, typically -through wildcard imports. This allows redundant type aliases to be imported -without introducing an ambiguity. +The compiler supplies bindings from well-known packages and objects, called "root contexts". +The standard locations for these bindings are: + +1. The object `scala.Predef`. +1. The package `scala`. +1. The package `java.lang`. + +These bindings are taken as lowest precedence, so that they are always shadowed +by user code, which may contain competing imports and definitions. + +A binding is available from a root context if it would also be available +using an ordinary import clause. In particular, ordinary access restrictions apply. + +A binding from an earlier root context shadows a binding of the same name from a later one. +For example, `scala.Predef.String` shadows `java.lang.String`, for which it is a type alias. + +Multiple binding of a type identifier to the same underlying type is permitted. +This is possible when import clauses introduce a binding of a member type alias +with the same binding precedence, typically through wildcard imports. +This allows redundant type aliases to be imported without introducing an ambiguity. ```scala object X { type T = annotation.tailrec } object Y { type T = annotation.tailrec } object Z { - import X._, Y._, annotation.{tailrec => T} // OK, all T mean tailrec - @T def f: Int = { f ; 42 } // error, f is not tail recursive + import X._, Y._ // OK, both T mean tailrec + @T def f: Int = { f ; 42 } // the annotation worked: error, f is not tail recursive } ``` +Similarly, imported aliases of names introduced by package statements are permitted: + +```scala +// c.scala +package p { class C } + +// xy.scala +import p._ +package p { class X extends C } // not ambiguous (compiles without the import) +package q { class Y extends C } // requires the import +``` ###### Example Assume the following two definitions of objects named `X` in packages `p` and `q` @@ -120,24 +154,24 @@ precedences between them. package p { // `X' bound by package clause import Console._ // `println' bound by wildcard import object Y { - println(s"L4: \$X") // `X' refers to `p.X' here + println(s"L4: $X") // `X' refers to `p.X' here locally { import q._ // `X' bound by wildcard import - println(s"L7: \$X") // `X' refers to `q.X' here + println(s"L7: $X") // `X' refers to `q.X' here import X._ // `x' and `y' bound by wildcard import - println(s"L9: \$x") // `x' refers to `q.X.x' here + println(s"L9: $x") // `x' refers to `q.X.x' here locally { val x = 3 // `x' bound by local definition - println(s"L12: \$x") // `x' refers to constant `3' here + println(s"L12: $x") // `x' refers to constant `3' here locally { import q.X._ // `x' and `y' bound by wildcard import -// println(s"L15: \$x") // reference to `x' is ambiguous here +// println(s"L15: $x") // reference to `x' is ambiguous here import X.y // `y' bound by explicit import - println(s"L17: \$y") // `y' refers to `q.X.y' here + println(s"L17: $y") // `y' refers to `q.X.y' here locally { val x = "abc" // `x' bound by local definition import p.X._ // `x' and `y' bound by wildcard import -// println(s"L21: \$y") // reference to `y' is ambiguous here - println(s"L22: \$x") // `x' refers to string "abc" here +// println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L22: $x") // `x' refers to string "abc" here }}}}}} ``` diff --git a/spec/03-types.md b/spec/03-types.md index 6d8ee3534ec7..3c78b33e571c 100644 --- a/spec/03-types.md +++ b/spec/03-types.md @@ -23,13 +23,14 @@ chapter: 3 | SimpleType ‘#’ id | StableId | Path ‘.’ ‘type’ + | Literal | ‘(’ Types ‘)’ TypeArgs ::= ‘[’ Types ‘]’ Types ::= Type {‘,’ Type} ``` -We distinguish between first-order types and type constructors, which -take type parameters and yield types. A subset of first-order types +We distinguish between proper types and type constructors, which +take type parameters and yield types. A subset of proper types called _value types_ represents sets of (first-class) values. Value types are either _concrete_ or _abstract_. @@ -54,7 +55,7 @@ Non-value types capture properties of identifiers that [are not values](#non-value-types). For example, a [type constructor](#type-constructors) does not directly specify a type of values. However, when a type constructor is applied to the correct type -arguments, it yields a first-order type, which may be a value type. +arguments, it yields a proper type, which may be a value type. Non-value types are expressed indirectly in Scala. E.g., a method type is described by writing down a method signature, which in itself is not a real @@ -80,17 +81,17 @@ and in that function form a central role in Scala's type system. A path is one of the following. - The empty path ε (which cannot be written explicitly in user programs). -- $C.$`this`, where $C$ references a class. - The path `this` is taken as a shorthand for $C.$`this` where - $C$ is the name of the class directly enclosing the reference. -- $p.x$ where $p$ is a path and $x$ is a stable member of $p$. +- ´C.´`this`, where ´C´ references a class. + The path `this` is taken as a shorthand for ´C.´`this` where + ´C´ is the name of the class directly enclosing the reference. +- ´p.x´ where ´p´ is a path and ´x´ is a stable member of ´p´. _Stable members_ are packages or members introduced by object definitions or by value definitions of [non-volatile types](#volatile-types). -- $C.$`super`$.x$ or $C.$`super`$[M].x$ - where $C$ references a class and $x$ references a - stable member of the super class or designated parent class $M$ of $C$. - The prefix `super` is taken as a shorthand for $C.$`super` where - $C$ is the name of the class directly enclosing the reference. +- ´C.´`super`´.x´ or ´C.´`super`´[M].x´ + where ´C´ references a class and ´x´ references a + stable member of the super class or designated parent class ´M´ of ´C´. + The prefix `super` is taken as a shorthand for ´C.´`super` where + ´C´ is the name of the class directly enclosing the reference. A _stable identifier_ is a path which ends in an identifier. @@ -102,16 +103,39 @@ forms. ### Singleton Types ```ebnf -SimpleType ::= Path ‘.’ type +SimpleType ::= Path ‘.’ ‘type’ ``` -A _singleton type_ is of the form $p.$`type`, where $p$ is a -path pointing to a value expected to [conform](06-expressions.html#expression-typing) -to `scala.AnyRef`. The type denotes the set of values -consisting of `null` and the value denoted by $p$. +A _singleton type_ is of the form ´p.´`type`. Where ´p´ is a path pointing to a +value which [conforms](06-expressions.html#expression-typing) to +`scala.AnyRef`, the type denotes the set of values consisting of `null` and the +value denoted by ´p´ (i.e., the value ´v´ for which `v eq p`). Where the path +does not conform to `scala.AnyRef` the type denotes the set consisting of only +the value denoted by ´p´. -A _stable type_ is either a singleton type or a type which is -declared to be a subtype of trait `scala.Singleton`. + + +### Literal Types + +```ebnf +SimpleType ::= Literal +``` + +A literal type `lit` is a special kind of singleton type which denotes the +single literal value `lit`. Thus, the type ascription `1: 1` gives the most +precise type to the literal value `1`: the literal type `1`. + +At run time, an expression `e` is considered to have literal type `lit` if `e == lit`. +Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is +determined by evaluating `e == lit`. + +Literal types are available for all types for which there is dedicated syntax +except `Unit`. This includes the numeric types (other than `Byte` and `Short` +which don't currently have syntax), `Boolean`, `Char` and `String`. + +### Stable Types +A _stable type_ is a singleton type, a literal type, +or a type that is declared to be a subtype of trait `scala.Singleton`. ### Type Projection @@ -119,12 +143,12 @@ declared to be a subtype of trait `scala.Singleton`. SimpleType ::= SimpleType ‘#’ id ``` -A _type projection_ $T$#$x$ references the type member named -$x$ of type $T$. +A _type projection_ ´T´#´x´ references the type member named +´x´ of type ´T´. @@ -137,11 +161,11 @@ SimpleType ::= StableId A _type designator_ refers to a named value type. It can be simple or qualified. All such type designators are shorthands for type projections. -Specifically, the unqualified type name $t$ where $t$ is bound in some -class, object, or package $C$ is taken as a shorthand for -$C.$`this.type#`$t$. If $t$ is -not bound in a class, object, or package, then $t$ is taken as a -shorthand for ε`.type#`$t$. +Specifically, the unqualified type name ´t´ where ´t´ is bound in some +class, object, or package ´C´ is taken as a shorthand for +´C.´`this.type#`´t´. If ´t´ is +not bound in a class, object, or package, then ´t´ is taken as a +shorthand for ε`.type#`´t´. A qualified type designator has the form `p.t` where `p` is a [path](#paths) and _t_ is a type name. Such a type designator is @@ -150,7 +174,7 @@ equivalent to the type projection `p.type#t`. ###### Example Some type designators and their expansions are listed below. We assume -a local type parameter $t$, a value `maintable` +a local type parameter ´t´, a value `maintable` with a type member `Node` and the standard class `scala.Int`, | Designator | Expansion | @@ -167,16 +191,16 @@ SimpleType ::= SimpleType TypeArgs TypeArgs ::= ‘[’ Types ‘]’ ``` -A _parameterized type_ $T[ T_1 , \ldots , T_n ]$ consists of a type -designator $T$ and type parameters $T_1 , \ldots , T_n$ where -$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type -parameters $a_1 , \ldots , a_n$. +A _parameterized type_ ´T[ T_1 , \ldots , T_n ]´ consists of a type +designator ´T´ and type parameters ´T_1 , \ldots , T_n´ where +´n \geq 1´. ´T´ must refer to a type constructor which takes ´n´ type +parameters ´a_1 , \ldots , a_n´. -Say the type parameters have lower bounds $L_1 , \ldots , L_n$ and -upper bounds $U_1, \ldots, U_n$. The parameterized type is +Say the type parameters have lower bounds ´L_1 , \ldots , L_n´ and +upper bounds ´U_1, \ldots, U_n´. The parameterized type is well-formed if each actual type parameter -_conforms to its bounds_, i.e. $\sigma L_i <: T_i <: \sigma U_i$ where $\sigma$ is the -substitution $[ a_1 := T_1 , \ldots , a_n := T_n ]$. +_conforms to its bounds_, i.e. ´\sigma L_i <: T_i <: \sigma U_i´ where ´\sigma´ is the +substitution ´[ a_1 := T_1 , \ldots , a_n := T_n ]´. ###### Example Parameterized Types @@ -192,7 +216,7 @@ class S[K <: String] { … } class G[M[ Z <: I ], I] { … } ``` -the following parameterized types are well formed: +the following parameterized types are well-formed: ```scala TreeMap[I, String] @@ -227,8 +251,8 @@ G[S, Int] // illegal: S constrains its parameter to SimpleType ::= ‘(’ Types ‘)’ ``` -A _tuple type_ $(T_1 , \ldots , T_n)$ is an alias for the -class `scala.Tuple$n$[$T_1$, … , $T_n$]`, where $n \geq 2$. +A _tuple type_ ´(T_1 , \ldots , T_n)´ is an alias for the +class `scala.Tuple´n´[´T_1´, … , ´T_n´]`, where ´n \geq 2´. Tuple classes are case classes whose fields can be accessed using selectors `_1` , … , `_n`. Their functionality is @@ -238,14 +262,14 @@ standard Scala library (they might also add other methods and implement other traits). ```scala -case class Tuple$n$[+$T_1$, … , +$T_n$](_1: $T_1$, … , _n: $T_n$) -extends Product_n[$T_1$, … , $T_n$] +case class Tuple´_n´[+´T_1´, … , +´T_n´](_1: ´T_1´, … , _n: ´T_n´) +extends Product´_n´[´T_1´, … , ´T_n´] -trait Product_n[+$T_1$, … , +$T_n$] { - override def productArity = $n$ - def _1: $T_1$ +trait Product´_n´[+´T_1´, … , +´T_n´] { + override def productArity = ´n´ + def _1: ´T_1´ … - def _n: $T_n$ + def _n: ´T_n´ } ``` @@ -255,9 +279,9 @@ trait Product_n[+$T_1$, … , +$T_n$] { AnnotType ::= SimpleType {Annotation} ``` -An _annotated type_ $T$ $a_1, \ldots, a_n$ +An _annotated type_ ´T´ ´a_1, \ldots, a_n´ attaches [annotations](11-annotations.html#user-defined-annotations) -$a_1 , \ldots , a_n$ to the type $T$. +´a_1 , \ldots , a_n´ to the type ´T´. ###### Example @@ -278,12 +302,12 @@ RefineStat ::= Dcl | ``` -A _compound type_ $T_1$ `with` … `with` $T_n \\{ R \\}$ +A _compound type_ ´T_1´ `with` … `with` ´T_n \\{ R \\}´ represents objects with members as given in the component types -$T_1 , \ldots , T_n$ and the refinement $\\{ R \\}$. A refinement -$\\{ R \\}$ contains declarations and type definitions. +´T_1 , \ldots , T_n´ and the refinement ´\\{ R \\}´. A refinement +´\\{ R \\}´ contains declarations and type definitions. If a declaration or definition overrides a declaration or definition in -one of the component types $T_1 , \ldots , T_n$, the usual rules for +one of the component types ´T_1 , \ldots , T_n´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration or definition is said to be “structural” [^2]. @@ -299,11 +323,11 @@ definition within the refinement. This restriction does not apply to the method's result type. If no refinement is given, the empty refinement is implicitly added, -i.e. $T_1$ `with` … `with` $T_n$ is a shorthand for $T_1$ `with` … `with` $T_n \\{\\}$. +i.e. ´T_1´ `with` … `with` ´T_n´ is a shorthand for ´T_1´ `with` … `with` ´T_n \\{\\}´. A compound type may also consist of just a refinement -$\\{ R \\}$ with no preceding component types. Such a type is -equivalent to `AnyRef` $\\{ R \\}$. +´\\{ R \\}´ with no preceding component types. Such a type is +equivalent to `AnyRef` ´\\{ R \\}´. ###### Example @@ -343,10 +367,10 @@ a value `callsign` and a `fly` method. InfixType ::= CompoundType {id [nl] CompoundType} ``` -An _infix type_ $T_1$ `op` $T_2$ consists of an infix -operator `op` which gets applied to two type operands $T_1$ and -$T_2$. The type is equivalent to the type application -`op`$[T_1, T_2]$. The infix operator `op` may be an +An _infix type_ ´T_1´ `op` ´T_2´ consists of an infix +operator `op` which gets applied to two type operands ´T_1´ and +´T_2´. The type is equivalent to the type application +`op`´[T_1, T_2]´. The infix operator `op` may be an arbitrary identifier. All type infix operators have the same precedence; parentheses have to @@ -356,13 +380,13 @@ ending in a colon ‘:’ are right-associative; all other operators are left-associative. In a sequence of consecutive type infix operations -$t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, \ldots \, \mathit{op_n} \, t_n$, -all operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ must have the same +´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, \ldots \, \mathit{op_n} \, t_n´, +all operators ´\mathit{op}\_1 , \ldots , \mathit{op}\_n´ must have the same associativity. If they are all left-associative, the sequence is interpreted as -$(\ldots (t_0 \mathit{op_1} t_1) \mathit{op_2} \ldots) \mathit{op_n} t_n$, +´(\ldots (t_0 \mathit{op_1} t_1) \mathit{op_2} \ldots) \mathit{op_n} t_n´, otherwise it is interpreted as -$t_0 \mathit{op_1} (t_1 \mathit{op_2} ( \ldots \mathit{op_n} t_n) \ldots)$. +´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( \ldots \mathit{op_n} t_n) \ldots)´. ### Function Types @@ -372,27 +396,27 @@ FunctionArgs ::= InfixType | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ ``` -The type $(T_1 , \ldots , T_n) \Rightarrow U$ represents the set of function -values that take arguments of types $T1 , \ldots , Tn$ and yield -results of type $U$. In the case of exactly one argument type -$T \Rightarrow U$ is a shorthand for $(T) \Rightarrow U$. -An argument type of the form $\Rightarrow T$ -represents a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters) of type $T$. +The type ´(T_1 , \ldots , T_n) \Rightarrow U´ represents the set of function +values that take arguments of types ´T_1 , \ldots , Tn´ and yield +results of type ´U´. In the case of exactly one argument type +´T \Rightarrow U´ is a shorthand for ´(T) \Rightarrow U´. +An argument type of the form ´\Rightarrow T´ +represents a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters) of type ´T´. Function types associate to the right, e.g. -$S \Rightarrow T \Rightarrow U$ is the same as -$S \Rightarrow (T \Rightarrow U)$. +´S \Rightarrow T \Rightarrow U´ is the same as +´S \Rightarrow (T \Rightarrow U)´. Function types are shorthands for class types that define `apply` -functions. Specifically, the $n$-ary function type -$(T_1 , \ldots , T_n) \Rightarrow U$ is a shorthand for the class type -`Function$_n$[T1 , … , $T_n$, U]`. Such class -types are defined in the Scala library for $n$ between 0 and 22 as follows. +functions. Specifically, the ´n´-ary function type +´(T_1 , \ldots , T_n) \Rightarrow U´ is a shorthand for the class type +`Function´_n´[´T_1´ , … , ´T_n´, ´U´]`. Such class +types are defined in the Scala library for ´n´ between 0 and 22 as follows. ```scala package scala -trait Function_n[-T1 , … , -T$_n$, +R] { - def apply(x1: T1 , … , x$_n$: T$_n$): R +trait Function´_n´[-´T_1´ , … , -´T_n´, +´U´] { + def apply(´x_1´: ´T_1´ , … , ´x_n´: ´T_n´): ´U´ override def toString = "" } ``` @@ -410,63 +434,63 @@ ExistentialDcl ::= ‘type’ TypeDcl | ‘val’ ValDcl ``` -An _existential type_ has the form `$T$ forSome { $Q$ }` -where $Q$ is a sequence of +An _existential type_ has the form `´T´ forSome { ´Q´ }` +where ´Q´ is a sequence of [type declarations](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). Let -$t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$ -be the types declared in $Q$ (any of the -type parameter sections `[ $\mathit{tps}_i$ ]` might be missing). -The scope of each type $t_i$ includes the type $T$ and the existential clause -$Q$. -The type variables $t_i$ are said to be _bound_ in the type -`$T$ forSome { $Q$ }`. -Type variables which occur in a type $T$ but which are not bound in $T$ are said -to be _free_ in $T$. - -A _type instance_ of `$T$ forSome { $Q$ }` -is a type $\sigma T$ where $\sigma$ is a substitution over $t_1 , \ldots , t_n$ -such that, for each $i$, $\sigma L_i <: \sigma t_i <: \sigma U_i$. -The set of values denoted by the existential type `$T$ forSome {$\,Q\,$}` +´t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n´ +be the types declared in ´Q´ (any of the +type parameter sections `[ ´\mathit{tps}_i´ ]` might be missing). +The scope of each type ´t_i´ includes the type ´T´ and the existential clause +´Q´. +The type variables ´t_i´ are said to be _bound_ in the type +`´T´ forSome { ´Q´ }`. +Type variables which occur in a type ´T´ but which are not bound in ´T´ are said +to be _free_ in ´T´. + +A _type instance_ of `´T´ forSome { ´Q´ }` +is a type ´\sigma T´ where ´\sigma´ is a substitution over ´t_1 , \ldots , t_n´ +such that, for each ´i´, ´\sigma L_i <: \sigma t_i <: \sigma U_i´. +The set of values denoted by the existential type `´T´ forSome {´\,Q\,´}` is the union of the set of values of all its type instances. -A _skolemization_ of `$T$ forSome { $Q$ }` is -a type instance $\sigma T$, where $\sigma$ is the substitution -$[t_1'/t_1 , \ldots , t_n'/t_n]$ and each $t_i'$ is a fresh abstract type -with lower bound $\sigma L_i$ and upper bound $\sigma U_i$. +A _skolemization_ of `´T´ forSome { ´Q´ }` is +a type instance ´\sigma T´, where ´\sigma´ is the substitution +´[t_1'/t_1 , \ldots , t_n'/t_n]´ and each ´t_i'´ is a fresh abstract type +with lower bound ´\sigma L_i´ and upper bound ´\sigma U_i´. #### Simplification Rules Existential types obey the following four equivalences: 1. Multiple for-clauses in an existential type can be merged. E.g., -`$T$ forSome { $Q$ } forSome { $Q'$ }` +`´T´ forSome { ´Q´ } forSome { ´Q'´ }` is equivalent to -`$T$ forSome { $Q$ ; $Q'$}`. +`´T´ forSome { ´Q´ ; ´Q'´}`. 1. Unused quantifications can be dropped. E.g., -`$T$ forSome { $Q$ ; $Q'$}` -where none of the types defined in $Q'$ are referred to by $T$ or $Q$, +`´T´ forSome { ´Q´ ; ´Q'´}` +where none of the types defined in ´Q'´ are referred to by ´T´ or ´Q´, is equivalent to -`$T$ forSome {$ Q $}`. +`´T´ forSome {´ Q ´}`. 1. An empty quantification can be dropped. E.g., -`$T$ forSome { }` is equivalent to $T$. -1. An existential type `$T$ forSome { $Q$ }` where $Q$ contains -a clause `type $t[\mathit{tps}] >: L <: U$` is equivalent -to the type `$T'$ forSome { $Q$ }` where $T'$ results from $T$ by replacing -every [covariant occurrence](04-basic-declarations-and-definitions.html#variance-annotations) of $t$ in $T$ by $U$ and by -replacing every contravariant occurrence of $t$ in $T$ by $L$. +`´T´ forSome { }` is equivalent to ´T´. +1. An existential type `´T´ forSome { ´Q´ }` where ´Q´ contains +a clause `type ´t[\mathit{tps}] >: L <: U´` is equivalent +to the type `´T'´ forSome { ´Q´ }` where ´T'´ results from ´T´ by replacing +every [covariant occurrence](04-basic-declarations-and-definitions.html#variance-annotations) of ´t´ in ´T´ by ´U´ and by +replacing every contravariant occurrence of ´t´ in ´T´ by ´L´. #### Existential Quantification over Values As a syntactic convenience, the bindings clause in an existential type may also contain -value declarations `val $x$: $T$`. -An existential type `$T$ forSome { $Q$; val $x$: $S\,$;$\,Q'$ }` +value declarations `val ´x´: ´T´`. +An existential type `´T´ forSome { ´Q´; val ´x´: ´S\,´;´\,Q'´ }` is treated as a shorthand for the type -`$T'$ forSome { $Q$; type $t$ <: $S$ with Singleton; $Q'$ }`, where $t$ is a -fresh type name and $T'$ results from $T$ by replacing every occurrence of -`$x$.type` with $t$. +`´T'´ forSome { ´Q´; type ´t´ <: ´S´ with Singleton; ´Q'´ }`, where ´t´ is a +fresh type name and ´T'´ results from ´T´ by replacing every occurrence of +`´x´.type` with ´t´. #### Placeholder Syntax for Existential Types @@ -475,26 +499,26 @@ WildcardType ::= ‘_’ TypeBounds ``` Scala supports a placeholder syntax for existential types. -A _wildcard type_ is of the form `_$\;$>:$\,L\,$<:$\,U$`. Both bound -clauses may be omitted. If a lower bound clause `>:$\,L$` is missing, -`>:$\,$scala.Nothing` -is assumed. If an upper bound clause `<:$\,U$` is missing, -`<:$\,$scala.Any` is assumed. A wildcard type is a shorthand for an +A _wildcard type_ is of the form `_´\;´>:´\,L\,´<:´\,U´`. Both bound +clauses may be omitted. If a lower bound clause `>:´\,L´` is missing, +`>:´\,´scala.Nothing` +is assumed. If an upper bound clause `<:´\,U´` is missing, +`<:´\,´scala.Any` is assumed. A wildcard type is a shorthand for an existentially quantified type variable, where the existential quantification is implicit. -A wildcard type must appear as type argument of a parameterized type. -Let $T = p.c[\mathit{targs},T,\mathit{targs}']$ be a parameterized type where -$\mathit{targs}, \mathit{targs}'$ may be empty and -$T$ is a wildcard type `_$\;$>:$\,L\,$<:$\,U$`. Then $T$ is equivalent to the +A wildcard type must appear as a type argument of a parameterized type. +Let ´T = p.c[\mathit{targs},T,\mathit{targs}']´ be a parameterized type where +´\mathit{targs}, \mathit{targs}'´ may be empty and +´T´ is a wildcard type `_´\;´>:´\,L\,´<:´\,U´`. Then ´T´ is equivalent to the existential type ```scala -$p.c[\mathit{targs},t,\mathit{targs}']$ forSome { type $t$ >: $L$ <: $U$ } +´p.c[\mathit{targs},t,\mathit{targs}']´ forSome { type ´t´ >: ´L´ <: ´U´ } ``` -where $t$ is some fresh type variable. +where ´t´ is some fresh type variable. Wildcard types may also appear as parts of [infix types](#infix-types) , [function types](#function-types), or [tuple types](#tuple-types). @@ -564,15 +588,15 @@ report as the internal types of defined identifiers. ### Method Types -A _method type_ is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$ -is a sequence of parameter names and types $(p_1:T_1 , \ldots , p_n:T_n)$ -for some $n \geq 0$ and $U$ is a (value or method) type. This type -represents named methods that take arguments named $p_1 , \ldots , p_n$ -of types $T_1 , \ldots , T_n$ -and that return a result of type $U$. +A _method type_ is denoted internally as ´(\mathit{Ps})U´, where ´(\mathit{Ps})´ +is a sequence of parameter names and types ´(p_1:T_1 , \ldots , p_n:T_n)´ +for some ´n \geq 0´ and ´U´ is a (value or method) type. This type +represents named methods that take arguments named ´p_1 , \ldots , p_n´ +of types ´T_1 , \ldots , T_n´ +and that return a result of type ´U´. -Method types associate to the right: $(\mathit{Ps}\_1)(\mathit{Ps}\_2)U$ is -treated as $(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)$. +Method types associate to the right: ´(\mathit{Ps}\_1)(\mathit{Ps}\_2)U´ is +treated as ´(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)´. A special case are types of methods without any parameters. They are written here `=> T`. Parameterless methods name expressions @@ -603,15 +627,15 @@ c: (Int) (String, String) String ### Polymorphic Method Types -A polymorphic method type is denoted internally as `[$\mathit{tps}\,$]$T$` where -`[$\mathit{tps}\,$]` is a type parameter section -`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]` -for some $n \geq 0$ and $T$ is a +A polymorphic method type is denoted internally as `[´\mathit{tps}\,´]´T´` where +`[´\mathit{tps}\,´]` is a type parameter section +`[´a_1´ >: ´L_1´ <: ´U_1 , \ldots , a_n´ >: ´L_n´ <: ´U_n´]` +for some ´n \geq 0´ and ´T´ is a (value or method) type. This type represents named methods that -take type arguments `$S_1 , \ldots , S_n$` which +take type arguments `´S_1 , \ldots , S_n´` which [conform](#parameterized-types) to the lower bounds -`$L_1 , \ldots , L_n$` and the upper bounds -`$U_1 , \ldots , U_n$` and that yield results of type $T$. +`´L_1 , \ldots , L_n´` and the upper bounds +`´U_1 , \ldots , U_n´` and that yield results of type ´T´. ###### Example @@ -632,7 +656,7 @@ union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] ### Type Constructors A _type constructor_ is represented internally much like a polymorphic method type. -`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$` +`[´\pm´ ´a_1´ >: ´L_1´ <: ´U_1 , \ldots , \pm a_n´ >: ´L_n´ <: ´U_n´] ´T´` represents a type that is expected by a [type constructor parameter](04-basic-declarations-and-definitions.html#type-parameters) or an [abstract type constructor binding](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) with @@ -657,124 +681,124 @@ anonymous type `[+X] Iterable[X]`, which may be passed to the More than one values or methods are defined in the same scope with the same name, we model -An overloaded type consisting of type alternatives $T_1 \commadots T_n (n \geq 2)$ is denoted internally $T_1 \overload \ldots \overload T_n$. +An overloaded type consisting of type alternatives ´T_1 \commadots T_n (n \geq 2)´ is denoted internally ´T_1 \overload \ldots \overload T_n´. ###### Example ```scala -def println: Unit -def println(s: String): Unit = $\ldots$ -def println(x: Float): Unit = $\ldots$ -def println(x: Float, width: Int): Unit = $\ldots$ -def println[A](x: A)(tostring: A => String): Unit = $\ldots$ +def println(): Unit +def println(s: String): Unit = ´\ldots´ +def println(x: Float): Unit = ´\ldots´ +def println(x: Float, width: Int): Unit = ´\ldots´ +def println[A](x: A)(tostring: A => String): Unit = ´\ldots´ ``` define a single function `println` which has an overloaded type. ``` -println: => Unit $\overload$ - (String) Unit $\overload$ - (Float) Unit $\overload$ - (Float, Int) Unit $\overload$ +println: () Unit ´\overload´ + (String) Unit ´\overload´ + (Float) Unit ´\overload´ + (Float, Int) Unit ´\overload´ [A] (A) (A => String) Unit ``` ###### Example ```scala -def f(x: T): T = $\ldots$ +def f(x: T): T = ´\ldots´ val f = 0 ``` -define a function `f} which has type `(x: T)T $\overload$ Int`. +define a function `f} which has type `(x: T)T ´\overload´ Int`. --> ## Base Types and Member Definitions Types of class members depend on the way the members are referenced. Central here are three notions, namely: -1. the notion of the set of base types of a type $T$, -1. the notion of a type $T$ in some class $C$ seen from some - prefix type $S$, -1. the notion of the set of member bindings of some type $T$. +1. the notion of the set of base types of a type ´T´, +1. the notion of a type ´T´ in some class ´C´ seen from some + prefix type ´S´, +1. the notion of the set of member bindings of some type ´T´. These notions are defined mutually recursively as follows. 1. The set of _base types_ of a type is a set of class types, given as follows. - - The base types of a class type $C$ with parents $T_1 , \ldots , T_n$ are - $C$ itself, as well as the base types of the compound type - `$T_1$ with … with $T_n$ { $R$ }`. + - The base types of a class type ´C´ with parents ´T_1 , \ldots , T_n´ are + ´C´ itself, as well as the base types of the compound type + `´T_1´ with … with ´T_n´ { ´R´ }`. - The base types of an aliased type are the base types of its alias. - The base types of an abstract type are the base types of its upper bound. - The base types of a parameterized type - `$C$[$T_1 , \ldots , T_n$]` are the base types - of type $C$, where every occurrence of a type parameter $a_i$ - of $C$ has been replaced by the corresponding parameter type $T_i$. - - The base types of a singleton type `$p$.type` are the base types of - the type of $p$. + `´C´[´T_1 , \ldots , T_n´]` are the base types + of type ´C´, where every occurrence of a type parameter ´a_i´ + of ´C´ has been replaced by the corresponding parameter type ´T_i´. + - The base types of a singleton type `´p´.type` are the base types of + the type of ´p´. - The base types of a compound type - `$T_1$ with $\ldots$ with $T_n$ { $R$ }` + `´T_1´ with ´\ldots´ with ´T_n´ { ´R´ }` are the _reduced union_ of the base - classes of all $T_i$'s. This means: - Let the multi-set $\mathscr{S}$ be the multi-set-union of the - base types of all $T_i$'s. - If $\mathscr{S}$ contains several type instances of the same class, say - `$S^i$#$C$[$T^i_1 , \ldots , T^i_n$]` $(i \in I)$, then + classes of all ´T_i´'s. This means: + Let the multi-set ´\mathscr{S}´ be the multi-set-union of the + base types of all ´T_i´'s. + If ´\mathscr{S}´ contains several type instances of the same class, say + `´S^i´#´C´[´T^i_1 , \ldots , T^i_n´]` ´(i \in I)´, then all those instances are replaced by one of them which conforms to all others. It is an error if no such instance exists. It follows that the reduced union, if it exists, produces a set of class types, where different types are instances of different classes. - - The base types of a type selection `$S$#$T$` are - determined as follows. If $T$ is an alias or abstract type, the - previous clauses apply. Otherwise, $T$ must be a (possibly - parameterized) class type, which is defined in some class $B$. Then - the base types of `$S$#$T$` are the base types of $T$ - in $B$ seen from the prefix type $S$. - - The base types of an existential type `$T$ forSome { $Q$ }` are - all types `$S$ forSome { $Q$ }` where $S$ is a base type of $T$. - -1. The notion of a type $T$ _in class $C$ seen from some prefix type $S$_ - makes sense only if the prefix type $S$ - has a type instance of class $C$ as a base type, say - `$S'$#$C$[$T_1 , \ldots , T_n$]`. Then we define as follows. - - If `$S$ = $\epsilon$.type`, then $T$ in $C$ seen from $S$ is - $T$ itself. - - Otherwise, if $S$ is an existential type `$S'$ forSome { $Q$ }`, and - $T$ in $C$ seen from $S'$ is $T'$, - then $T$ in $C$ seen from $S$ is `$T'$ forSome {$\,Q\,$}`. - - Otherwise, if $T$ is the $i$'th type parameter of some class $D$, then - - If $S$ has a base type `$D$[$U_1 , \ldots , U_n$]`, for some type - parameters `[$U_1 , \ldots , U_n$]`, then $T$ in $C$ seen from $S$ - is $U_i$. - - Otherwise, if $C$ is defined in a class $C'$, then - $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$. - - Otherwise, if $C$ is not defined in another class, then - $T$ in $C$ seen from $S$ is $T$ itself. - - Otherwise, if $T$ is the singleton type `$D$.this.type` for some class $D$ + - The base types of a type selection `´S´#´T´` are + determined as follows. If ´T´ is an alias or abstract type, the + previous clauses apply. Otherwise, ´T´ must be a (possibly + parameterized) class type, which is defined in some class ´B´. Then + the base types of `´S´#´T´` are the base types of ´T´ + in ´B´ seen from the prefix type ´S´. + - The base types of an existential type `´T´ forSome { ´Q´ }` are + all types `´S´ forSome { ´Q´ }` where ´S´ is a base type of ´T´. + +1. The notion of a type ´T´ _in class ´C´ seen from some prefix type ´S´_ + makes sense only if the prefix type ´S´ + has a type instance of class ´C´ as a base type, say + `´S'´#´C´[´T_1 , \ldots , T_n´]`. Then we define as follows. + - If `´S´ = ´\epsilon´.type`, then ´T´ in ´C´ seen from ´S´ is + ´T´ itself. + - Otherwise, if ´S´ is an existential type `´S'´ forSome { ´Q´ }`, and + ´T´ in ´C´ seen from ´S'´ is ´T'´, + then ´T´ in ´C´ seen from ´S´ is `´T'´ forSome {´\,Q\,´}`. + - Otherwise, if ´T´ is the ´i´'th type parameter of some class ´D´, then + - If ´S´ has a base type `´D´[´U_1 , \ldots , U_n´]`, for some type + parameters `[´U_1 , \ldots , U_n´]`, then ´T´ in ´C´ seen from ´S´ + is ´U_i´. + - Otherwise, if ´C´ is defined in a class ´C'´, then + ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then + ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - Otherwise, if ´T´ is the singleton type `´D´.this.type` for some class ´D´ then - - If $D$ is a subclass of $C$ and $S$ has a type instance of class $D$ - among its base types, then $T$ in $C$ seen from $S$ is $S$. - - Otherwise, if $C$ is defined in a class $C'$, then - $T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$. - - Otherwise, if $C$ is not defined in another class, then - $T$ in $C$ seen from $S$ is $T$ itself. - - If $T$ is some other type, then the described mapping is performed + - If ´D´ is a subclass of ´C´ and ´S´ has a type instance of class ´D´ + among its base types, then ´T´ in ´C´ seen from ´S´ is ´S´. + - Otherwise, if ´C´ is defined in a class ´C'´, then + ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then + ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - If ´T´ is some other type, then the described mapping is performed to all its type components. - If $T$ is a possibly parameterized class type, where $T$'s class - is defined in some other class $D$, and $S$ is some prefix type, - then we use "$T$ seen from $S$" as a shorthand for - "$T$ in $D$ seen from $S$". + If ´T´ is a possibly parameterized class type, where ´T´'s class + is defined in some other class ´D´, and ´S´ is some prefix type, + then we use "´T´ seen from ´S´" as a shorthand for + "´T´ in ´D´ seen from ´S´". -1. The _member bindings_ of a type $T$ are - 1. all bindings $d$ such that there exists a type instance of some class $C$ among the base types of $T$ - and there exists a definition or declaration $d'$ in $C$ - such that $d$ results from $d'$ by replacing every - type $T'$ in $d'$ by $T'$ in $C$ seen from $T$, and +1. The _member bindings_ of a type ´T´ are + 1. all bindings ´d´ such that there exists a type instance of some class ´C´ among the base types of ´T´ + and there exists a definition or declaration ´d'´ in ´C´ + such that ´d´ results from ´d'´ by replacing every + type ´T'´ in ´d'´ by ´T'´ in ´C´ seen from ´T´, and 2. all bindings of the type's [refinement](#compound-types), if it has one. The _definition_ of a type projection `S#T` is the member - binding $d_T$ of the type `T` in `S`. In that case, we also say - that `S#T` _is defined by_ $d_T$. + binding ´d_T´ of the type `T` in `S`. In that case, we also say + that `S#T` _is defined by_ ´d_T´. ## Relations between types @@ -782,18 +806,18 @@ We define the following relations between types. | Name | Symbolically | Interpretation | |------------------|----------------|----------------------------------------------------| -| Equivalence | $T \equiv U$ | $T$ and $U$ are interchangeable in all contexts. | -| Conformance | $T <: U$ | Type $T$ conforms to ("is a subtype of") type $U$. | -| Weak Conformance | $T <:_w U$ | Augments conformance for primitive numeric types. | -| Compatibility | | Type $T$ conforms to type $U$ after conversions. | +| Equivalence | ´T \equiv U´ | ´T´ and ´U´ are interchangeable in all contexts. | +| Conformance | ´T <: U´ | Type ´T´ conforms to ("is a subtype of") type ´U´. | +| Weak Conformance | ´T <:_w U´ | Augments conformance for primitive numeric types. | +| Compatibility | | Type ´T´ conforms to type ´U´ after conversions. | ### Equivalence -Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that the following holds: +Equivalence ´(\equiv)´ between types is the smallest congruence [^congruence] such that the following holds: -- If $t$ is defined by a type alias `type $t$ = $T$`, then $t$ is equivalent to $T$. -- If a path $p$ has a singleton type `$q$.type`, then `$p$.type $\equiv q$.type`. -- If $O$ is defined by an object definition, and $p$ is a path consisting only of package or object selectors and ending in $O$, then `$O$.this.type $\equiv p$.type`. +- If ´t´ is defined by a type alias `type ´t´ = ´T´`, then ´t´ is equivalent to ´T´. +- If a path ´p´ has a singleton type `´q´.type`, then `´p´.type ´\equiv q´.type`. +- If ´O´ is defined by an object definition, and ´p´ is a path consisting only of package or object selectors and ending in ´O´, then `´O´.this.type ´\equiv p´.type`. - Two [compound types](#compound-types) are equivalent if the sequences of their component are pairwise equivalent, and occur in the same order, and their refinements are equivalent. Two refinements are equivalent if they @@ -824,91 +848,92 @@ Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] su ### Conformance -The conformance relation $(<:)$ is the smallest transitive relation that satisfies the following conditions. +The conformance relation ´(<:)´ is the smallest transitive relation that satisfies the following conditions. + -- Conformance includes equivalence. If $T \equiv U$ then $T <: U$. -- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`. -- For every type constructor $T$ (with any number of type parameters), `scala.Nothing <: $T$ <: scala.Any`. -- For every value type $T$, `scala.Null <: $T$` unless `$T$ <: scala.AnyVal`. -- A type variable or abstract type $t$ conforms to its upper bound and - its lower bound conforms to $t$. +- Conformance includes equivalence. If `T \equiv U` then `T <: U`. +- For every value type `T`, `scala.Nothing <: ´T´ <: scala.Any`. +- For every type constructor ´T´ (with any number of type parameters), `scala.Nothing <: ´T´ <: scala.Any`. +- For every value type ´T´, `scala.Null <: ´T´` unless `´T´ <: scala.AnyVal`. +- A type variable or abstract type ´t´ conforms to its upper bound and + its lower bound conforms to ´t´. - A class type or parameterized type conforms to any of its base-types. -- A singleton type `$p$.type` conforms to the type of the path $p$. -- A singleton type `$p$.type` conforms to the type `scala.Singleton`. -- A type projection `$T$#$t$` conforms to `$U$#$t$` if $T$ conforms to $U$. -- A parameterized type `$T$[$T_1$ , … , $T_n$]` conforms to - `$T$[$U_1$ , … , $U_n$]` if - the following three conditions hold for $i \in \{ 1 , \ldots , n \}$: - 1. If the $i$'th type parameter of $T$ is declared covariant, then - $T_i <: U_i$. - 1. If the $i$'th type parameter of $T$ is declared contravariant, then - $U_i <: T_i$. - 1. If the $i$'th type parameter of $T$ is declared neither covariant - nor contravariant, then $U_i \equiv T_i$. -- A compound type `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` conforms to - each of its component types $T_i$. -- If $T <: U_i$ for $i \in \{ 1 , \ldots , n \}$ and for every - binding $d$ of a type or value $x$ in $R$ there exists a member - binding of $x$ in $T$ which subsumes $d$, then $T$ conforms to the - compound type `$U_1$ with $\ldots$ with $U_n$ {$R\,$}`. -- The existential type `$T$ forSome {$\,Q\,$}` conforms to - $U$ if its [skolemization](#existential-types) - conforms to $U$. -- The type $T$ conforms to the existential type `$U$ forSome {$\,Q\,$}` - if $T$ conforms to one of the [type instances](#existential-types) - of `$U$ forSome {$\,Q\,$}`. +- A singleton type `´p´.type` conforms to the type of the path ´p´. +- A singleton type `´p´.type` conforms to the type `scala.Singleton`. +- A type projection `´T´#´t´` conforms to `´U´#´t´` if ´T´ conforms to ´U´. +- A parameterized type `´T´[´T_1´ , … , ´T_n´]` conforms to + `´T´[´U_1´ , … , ´U_n´]` if + the following three conditions hold for ´i \in \{ 1 , \ldots , n \}´: + 1. If the ´i´'th type parameter of ´T´ is declared covariant, then + ´T_i <: U_i´. + 1. If the ´i´'th type parameter of ´T´ is declared contravariant, then + ´U_i <: T_i´. + 1. If the ´i´'th type parameter of ´T´ is declared neither covariant + nor contravariant, then ´U_i \equiv T_i´. +- A compound type `´T_1´ with ´\ldots´ with ´T_n´ {´R\,´}` conforms to + each of its component types ´T_i´. +- If ´T <: U_i´ for ´i \in \{ 1 , \ldots , n \}´ and for every + binding ´d´ of a type or value ´x´ in ´R´ there exists a member + binding of ´x´ in ´T´ which subsumes ´d´, then ´T´ conforms to the + compound type `´U_1´ with ´\ldots´ with ´U_n´ {´R\,´}`. +- The existential type `´T´ forSome {´\,Q\,´}` conforms to + ´U´ if its [skolemization](#existential-types) + conforms to ´U´. +- The type ´T´ conforms to the existential type `´U´ forSome {´\,Q\,´}` + if ´T´ conforms to one of the [type instances](#existential-types) + of `´U´ forSome {´\,Q\,´}`. - If - $T_i \equiv T_i'$ for $i \in \{ 1 , \ldots , n\}$ and $U$ conforms to $U'$ - then the method type $(p_1:T_1 , \ldots , p_n:T_n) U$ conforms to - $(p_1':T_1' , \ldots , p_n':T_n') U'$. + ´T_i \equiv T_i'´ for ´i \in \{ 1 , \ldots , n\}´ and ´U´ conforms to ´U'´ + then the method type ´(p_1:T_1 , \ldots , p_n:T_n) U´ conforms to + ´(p_1':T_1' , \ldots , p_n':T_n') U'´. - The polymorphic type - $[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T$ conforms to the + ´[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T´ conforms to the polymorphic type - $[a_1 >: L_1' <: U_1' , \ldots , a_n >: L_n' <: U_n'] T'$ if, assuming - $L_1' <: a_1 <: U_1' , \ldots , L_n' <: a_n <: U_n'$ - one has $T <: T'$ and $L_i <: L_i'$ and $U_i' <: U_i$ - for $i \in \{ 1 , \ldots , n \}$. -- Type constructors $T$ and $T'$ follow a similar discipline. We characterize - $T$ and $T'$ by their type parameter clauses - $[a_1 , \ldots , a_n]$ and - $[a_1' , \ldots , a_n']$, where an $a_i$ or $a_i'$ may include a variance - annotation, a higher-order type parameter clause, and bounds. Then, $T$ - conforms to $T'$ if any list $[t_1 , \ldots , t_n]$ -- with declared + ´[a_1 >: L_1' <: U_1' , \ldots , a_n >: L_n' <: U_n'] T'´ if, assuming + ´L_1' <: a_1 <: U_1' , \ldots , L_n' <: a_n <: U_n'´ + one has ´T <: T'´ and ´L_i <: L_i'´ and ´U_i' <: U_i´ + for ´i \in \{ 1 , \ldots , n \}´. +- Type constructors ´T´ and ´T'´ follow a similar discipline. We characterize + ´T´ and ´T'´ by their type parameter clauses + ´[a_1 , \ldots , a_n]´ and + ´[a_1' , \ldots , a_n']´, where an ´a_i´ or ´a_i'´ may include a variance + annotation, a higher-order type parameter clause, and bounds. Then, ´T´ + conforms to ´T'´ if any list ´[t_1 , \ldots , t_n]´ -- with declared variances, bounds and higher-order type parameter clauses -- of valid type - arguments for $T'$ is also a valid list of type arguments for $T$ and - $T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]$. Note that this entails + arguments for ´T'´ is also a valid list of type arguments for ´T´ and + ´T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]´. Note that this entails that: - - The bounds on $a_i$ must be weaker than the corresponding bounds declared - for $a'_i$. - - The variance of $a_i$ must match the variance of $a'_i$, where covariance + - The bounds on ´a_i´ must be weaker than the corresponding bounds declared + for ´a'_i´. + - The variance of ´a_i´ must match the variance of ´a'_i´, where covariance matches covariance, contravariance matches contravariance and any variance matches invariance. - Recursively, these restrictions apply to the corresponding higher-order - type parameter clauses of $a_i$ and $a'_i$. + type parameter clauses of ´a_i´ and ´a'_i´. -A declaration or definition in some compound type of class type $C$ +A declaration or definition in some compound type of class type ´C´ _subsumes_ another declaration of the same name in some compound type or class -type $C'$, if one of the following holds. - -- A value declaration or definition that defines a name $x$ with type $T$ - subsumes a value or method declaration that defines $x$ with type $T'$, provided - $T <: T'$. -- A method declaration or definition that defines a name $x$ with type $T$ - subsumes a method declaration that defines $x$ with type $T'$, provided - $T <: T'$. +type ´C'´, if one of the following holds. + +- A value declaration or definition that defines a name ´x´ with type ´T´ + subsumes a value or method declaration that defines ´x´ with type ´T'´, provided + ´T <: T'´. +- A method declaration or definition that defines a name ´x´ with type ´T´ + subsumes a method declaration that defines ´x´ with type ´T'´, provided + ´T <: T'´. - A type alias - `type $t$[$T_1$ , … , $T_n$] = $T$` subsumes a type alias - `type $t$[$T_1$ , … , $T_n$] = $T'$` if $T \equiv T'$. -- A type declaration `type $t$[$T_1$ , … , $T_n$] >: $L$ <: $U$` subsumes - a type declaration `type $t$[$T_1$ , … , $T_n$] >: $L'$ <: $U'$` if - $L' <: L$ and $U <: U'$. -- A type or class definition that binds a type name $t$ subsumes an abstract - type declaration `type t[$T_1$ , … , $T_n$] >: L <: U` if - $L <: t <: U$. + `type ´t´[´T_1´ , … , ´T_n´] = ´T´` subsumes a type alias + `type ´t´[´T_1´ , … , ´T_n´] = ´T'´` if ´T \equiv T'´. +- A type declaration `type ´t´[´T_1´ , … , ´T_n´] >: ´L´ <: ´U´` subsumes + a type declaration `type ´t´[´T_1´ , … , ´T_n´] >: ´L'´ <: ´U'´` if + ´L' <: L´ and ´U <: U'´. +- A type or class definition that binds a type name ´t´ subsumes an abstract + type declaration `type t[´T_1´ , … , ´T_n´] >: L <: U` if + ´L <: t <: U´. #### Least upper bounds and greatest lower bounds -The $(<:)$ relation forms pre-order between types, i.e. it is transitive and reflexive. +The ´(<:)´ relation forms pre-order between types, i.e. it is transitive and reflexive. This allows us to define _least upper bounds_ and _greatest lower bounds_ of a set of types in terms of that order. The least upper bound or greatest lower bound of a set of types does not always exist. For instance, consider the class definitions: @@ -941,26 +966,26 @@ free to pick any one of them. ### Weak Conformance In some situations Scala uses a more general conformance relation. -A type $S$ _weakly conforms_ to a type $T$, written $S <:_w T$, -if $S <: T$ or both $S$ and $T$ are primitive number types and $S$ precedes $T$ in the following ordering. +A type ´S´ _weakly conforms_ to a type ´T´, written ´S <:_w T´, +if ´S <: T´ or both ´S´ and ´T´ are primitive number types and ´S´ precedes ´T´ in the following ordering. ```scala -Byte $<:_w$ Short -Short $<:_w$ Int -Char $<:_w$ Int -Int $<:_w$ Long -Long $<:_w$ Float -Float $<:_w$ Double +Byte ´<:_w´ Short +Short ´<:_w´ Int +Char ´<:_w´ Int +Int ´<:_w´ Long +Long ´<:_w´ Float +Float ´<:_w´ Double ``` A _weak least upper bound_ is a least upper bound with respect to weak conformance. ### Compatibility -A type $T$ is _compatible_ to a type $U$ if $T$ (or its corresponding function type) [weakly conforms](#weak-conformance) to $U$ -after applying [eta-expansion](06-expressions.html#eta-expansion). If $T$ is a method type, it's converted to the corresponding function type. If the types do not weakly conform, the following alternatives are checked in order: - - [view application](07-implicits.html#views): there's an implicit view from $T$ to $U$; - - dropping by-name modifiers: if $U$ is of the shape `$=> U'$` (and $T$ is not), `$T <:_w U'$`; - - SAM conversion: if $T$ corresponds to a function type, and $U$ declares a single abstract method whose type [corresponds](06-expressions.html#sam-conversion) to the function type $U'$, `$T <:_w U'$`. +A type ´T´ is _compatible_ to a type ´U´ if ´T´ (or its corresponding function type) [weakly conforms](#weak-conformance) to ´U´ +after applying [eta-expansion](06-expressions.html#eta-expansion). If ´T´ is a method type, it's converted to the corresponding function type. If the types do not weakly conform, the following alternatives are checked in order: + - [view application](07-implicits.html#views): there's an implicit view from ´T´ to ´U´; + - dropping by-name modifiers: if ´U´ is of the shape `´=> U'´` (and ´T´ is not), `´T <:_w U'´`; + - SAM conversion: if ´T´ corresponds to a function type, and ´U´ declares a single abstract method whose type [corresponds](06-expressions.html#sam-conversion) to the function type ´U'´, `´T <:_w U'´`. @@ -514,14 +514,14 @@ abstract class P[+A, +B] { ``` With this variance annotation, type instances -of $P$ subtype covariantly with respect to their arguments. +of ´P´ subtype covariantly with respect to their arguments. For instance, ```scala P[IOException, String] <: P[Throwable, AnyRef] ``` -If the members of $P$ are mutable variables, +If the members of ´P´ are mutable variables, the same variance annotation becomes illegal. ```scala @@ -543,7 +543,7 @@ abstract class R[+A, +B](x: A, y: B) { ###### Example -The following variance annotation is illegal, since $a$ appears +The following variance annotation is illegal, since ´a´ appears in contravariant position in the parameter of `append`: ```scala @@ -595,14 +595,14 @@ ParamType ::= Type | Type ‘*’ ``` -A _function declaration_ has the form `def $f\,\mathit{psig}$: $T$`, where -$f$ is the function's name, $\mathit{psig}$ is its parameter -signature and $T$ is its result type. A _function definition_ -`def $f\,\mathit{psig}$: $T$ = $e$` also includes a _function body_ $e$, +A _function declaration_ has the form `def ´f\,\mathit{psig}´: ´T´`, where +´f´ is the function's name, ´\mathit{psig}´ is its parameter +signature and ´T´ is its result type. A _function definition_ +`def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _function body_ ´e´, i.e. an expression which defines the function's result. A parameter -signature consists of an optional type parameter clause `[$\mathit{tps}\,$]`, +signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses -`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$)`. Such a declaration or definition +`(´\mathit{ps}_1´)´\ldots´(´\mathit{ps}_n´)`. Such a declaration or definition introduces a value with a (possibly polymorphic) method type whose parameter types and result type are as given. @@ -612,31 +612,31 @@ result type, if one is given. If the function definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the function body. -A _type parameter clause_ $\mathit{tps}$ consists of one or more +A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type declarations](#type-declarations-and-type-aliases), which introduce type parameters, possibly with bounds. The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the function body, if it is present. -A _value parameter clause_ $\mathit{ps}$ consists of zero or more formal -parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value +A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal +parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types. ### Default Arguments Each value parameter declaration may optionally define a default argument. The default argument -expression $e$ is type-checked with an expected type $T'$ obtained -by replacing all occurrences of the function's type parameters in $T$ by +expression ´e´ is type-checked with an expected type ´T'´ obtained +by replacing all occurrences of the function's type parameters in ´T´ by the undefined type. -For every parameter $p_{i,j}$ with a default argument a method named -`$f\$$default$\$$n` is generated which computes the default argument -expression. Here, $n$ denotes the parameter's position in the method +For every parameter ´p_{i,j}´ with a default argument a method named +`´f\$´default´\$´n` is generated which computes the default argument +expression. Here, ´n´ denotes the parameter's position in the method declaration. These methods are parametrized by the type parameter clause -`[$\mathit{tps}\,$]` and all value parameter clauses -`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$. -The `$f\$$default$\$$n` methods are inaccessible for user programs. +`[´\mathit{tps}\,´]` and all value parameter clauses +`(´\mathit{ps}_1´)´\ldots´(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´. +The `´f\$´default´\$´n` methods are inaccessible for user programs. ###### Example In the method @@ -651,11 +651,11 @@ and `T` is instantiated to `Int`. The methods computing the default arguments have the form: ```scala -def compare$\$$default$\$$1[T]: Int = 0 -def compare$\$$default$\$$2[T](a: T): T = a +def compare´\$´default´\$´1[T]: Int = 0 +def compare´\$´default´\$´2[T](a: T): T = a ``` -The scope of a formal value parameter name $x$ comprises all subsequent +The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the function body, if they are given. Both type parameter names and value parameter names must be pairwise distinct. @@ -685,17 +685,15 @@ ParamType ::= ‘=>’ Type ``` The type of a value parameter may be prefixed by `=>`, e.g. -`$x$: => $T$`. The type of such a parameter is then the -parameterless method type `=> $T$`. This indicates that the +`´x´: => ´T´`. The type of such a parameter is then the +parameterless method type `=> ´T´`. This indicates that the corresponding argument is not evaluated at the point of function application, but instead is evaluated at each use within the function. That is, the argument is evaluated using _call-by-name_. The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case -classes for which a `val` prefix is implicitly generated. The -by-name modifier is also disallowed for -[implicit parameters](07-implicits.html#implicit-parameters). +classes for which a `val` prefix is implicitly generated. ###### Example The declaration @@ -714,22 +712,22 @@ ParamType ::= Type ‘*’ ``` The last value parameter of a parameter section may be suffixed by -`'*'`, e.g. `(..., $x$:$T$*)`. The type of such a +`'*'`, e.g. `(..., ´x´:´T´*)`. The type of such a _repeated_ parameter inside the method is then the sequence type -`scala.Seq[$T$]`. Methods with repeated parameters -`$T$*` take a variable number of arguments of type $T$. -That is, if a method $m$ with type -`($p_1:T_1 , \ldots , p_n:T_n, p_s:S$*)$U$` is applied to arguments -$(e_1 , \ldots , e_k)$ where $k \geq n$, then $m$ is taken in that application -to have type $(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}S)U$, with -$k - n$ occurrences of type -$S$ where any parameter names beyond $p_s$ are fresh. The only exception to +`scala.Seq[´T´]`. Methods with repeated parameters +`´T´*` take a variable number of arguments of type ´T´. +That is, if a method ´m´ with type +`(´p_1:T_1 , \ldots , p_n:T_n, p_s:S´*)´U´` is applied to arguments +´(e_1 , \ldots , e_k)´ where ´k \geq n´, then ´m´ is taken in that application +to have type ´(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}:S)U´, with +´k - n´ occurrences of type +´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type -annotation. If $m$ above is applied to arguments -`($e_1 , \ldots , e_n, e'$: _*)`, then the type of $m$ in +annotation. If ´m´ above is applied to arguments +`(´e_1 , \ldots , e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be -`($p_1:T_1, \ldots , p_n:T_n,p_{s}:$scala.Seq[$S$])`. +`(´p_1:T_1, \ldots , p_n:T_n,p_{s}:´scala.Seq[´S´])`. It is not allowed to define any default arguments in a parameter section with a repeated parameter. @@ -785,13 +783,13 @@ Special syntax exists for procedures, i.e. functions that return the `Unit` value `()`. A _procedure declaration_ is a function declaration where the result type is omitted. The result type is then implicitly completed to the -`Unit` type. E.g., `def $f$($\mathit{ps}$)` is equivalent to -`def $f$($\mathit{ps}$): Unit`. +`Unit` type. E.g., `def ´f´(´\mathit{ps}´)` is equivalent to +`def ´f´(´\mathit{ps}´): Unit`. A _procedure definition_ is a function definition where the result type and the equals sign are omitted; its defining expression must be a block. -E.g., `def $f$($\mathit{ps}$) {$\mathit{stats}$}` is equivalent to -`def $f$($\mathit{ps}$): Unit = {$\mathit{stats}$}`. +E.g., `def ´f´(´\mathit{ps}´) {´\mathit{stats}´}` is equivalent to +`def ´f´(´\mathit{ps}´): Unit = {´\mathit{stats}´}`. ###### Example Here is a declaration and a definition of a procedure named `write`: @@ -818,14 +816,14 @@ object Terminal extends Writer { ### Method Return Type Inference -A class member definition $m$ that overrides some other function $m'$ -in a base class of $C$ may leave out the return type, even if it is -recursive. In this case, the return type $R'$ of the overridden -function $m'$, seen as a member of $C$, is taken as the return type of -$m$ for each recursive invocation of $m$. That way, a type $R$ for the -right-hand side of $m$ can be determined, which is then taken as the -return type of $m$. Note that $R$ may be different from $R'$, as long -as $R$ conforms to $R'$. +A class member definition ´m´ that overrides some other function ´m'´ +in a base class of ´C´ may leave out the return type, even if it is +recursive. In this case, the return type ´R'´ of the overridden +function ´m'´, seen as a member of ´C´, is taken as the return type of +´m´ for each recursive invocation of ´m´. That way, a type ´R´ for the +right-hand side of ´m´ can be determined, which is then taken as the +return type of ´m´. Note that ´R´ may be different from ´R'´, as long +as ´R´ conforms to ´R'´. ###### Example Assume the following definitions: @@ -842,18 +840,37 @@ class C extends I { Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. +### Tail-Recursive Call Elimination + +Method definitions which contain self-recursive invocations in tail position +are optimized for stack safety. Self-invocations which are the last operation +before returning from the method are replaced with jumps to the beginning of +the method, much as in a while loop. Sibling-invocations, in which a method +calls itself but with a different instance as receiver, are also optimized. + +This transform is performed automatically by the compiler whenever possible. +A method definition bearing the annotation, `scala.annotation.tailrec`, +will fail to compile if the transform is not possible. (The annotation is intended +for cases where deoptimization would likely result in a stack overflow.) + +```scala +@annotation.tailrec +def sum(xs: List[Int], acc: Int): Int = + xs match { case h :: t => sum(t, acc + h) case _ => acc } +``` + -**Inheriting from Java Types** A template may have a Java class as its superclass and Java interfaces as its -mixins. +**Inheriting from Java Types** -**Template Evaluation** Consider a template `$sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ }`. +A template may have a Java class as its superclass and Java interfaces as its mixins. + +**Template Evaluation** + +Consider a template `´sc´ with ´mt_1´ with ´mt_n´ { ´\mathit{stats}´ }`. If this is the template of a [trait](#traits) then its _mixin-evaluation_ -consists of an evaluation of the statement sequence $\mathit{stats}$. +consists of an evaluation of the statement sequence ´\mathit{stats}´. If this is not a template of a trait, then its _evaluation_ consists of the following steps. -- First, the superclass constructor $sc$ is +- First, the superclass constructor ´sc´ is [evaluated](#constructor-invocations). - Then, all base classes in the template's [linearization](#class-linearization) - up to the template's superclass denoted by $sc$ are + up to the template's superclass denoted by ´sc´ are mixin-evaluated. Mixin-evaluation happens in reverse order of occurrence in the linearization. -- Finally the statement sequence $\mathit{stats}\,$ is evaluated. +- Finally, the statement sequence ´\mathit{stats}\,´ is evaluated. ###### Delayed Initialization -The initialization code of an object or class (but not a trait) that follows -the superclass -constructor invocation and the mixin-evaluation of the template's base -classes is passed to a special hook, which is inaccessible from user -code. Normally, that hook simply executes the code that is passed to -it. But templates inheriting the `scala.DelayedInit` trait -can override the hook by re-implementing the `delayedInit` -method, which is defined as follows: +This statement sequence constitutes the initialization code for an object +or class after the superclass constructor invocation and the mixin-evaluation +of the template's base classes as described above. +Normally, this code is passed to a special hook, inaccessible to user code, +which simply executes it. + +However, in objects and classes (but not traits) which extend `scala.DelayedInit`, +the initialization code is passed to a `delayedInit` method which can be +overridden to implement arbitrary semantics. ```scala -def delayedInit(body: => Unit) +def delayedInit(body: => Unit): Unit ``` ### Constructor Invocations @@ -152,46 +156,47 @@ Constructor invocations define the type, members, and initial state of objects created by an instance creation expression, or of parts of an object's definition which are inherited by a class or object definition. A constructor invocation is a function application -`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`, where $x$ is a -[stable identifier](03-types.html#paths), $c$ is a type name which either designates a -class or defines an alias type for one, $\mathit{targs}$ is a type argument -list, $\mathit{args}_1 , \ldots , \mathit{args}_n$ are argument lists, and there is a +`´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)´\ldots´(´\mathit{args}_n´)`, where ´x´ is a +[stable identifier](03-types.html#paths), ´c´ is a type name which either designates a +class or defines an alias type for one, ´\mathit{targs}´ is a type argument +list, ´\mathit{args}_1 , \ldots , \mathit{args}_n´ are argument lists, and there is a constructor of that class which is [applicable](06-expressions.html#function-applications) to the given arguments. If the constructor invocation uses named or default arguments, it is transformed into a block expression using the same transformation as described [here](sec:named-default). -The prefix `$x$.` can be omitted. A type argument list -can be given only if the class $c$ takes type parameters. Even then +The prefix `´x´.` can be omitted. A type argument list +can be given only if the class ´c´ takes type parameters. Even then it can be omitted, in which case a type argument list is synthesized using [local type inference](06-expressions.html#local-type-inference). If no explicit arguments are given, an empty list `()` is implicitly supplied. An evaluation of a constructor invocation -`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)` +`´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)´\ldots´(´\mathit{args}_n´)` consists of the following steps: -- First, the prefix $x$ is evaluated. -- Then, the arguments $\mathit{args}_1 , \ldots , \mathit{args}_n$ are evaluated from +- First, the prefix ´x´ is evaluated. +- Then, the arguments ´\mathit{args}_1 , \ldots , \mathit{args}_n´ are evaluated from left to right. - Finally, the class being constructed is initialized by evaluating the - template of the class referred to by $c$. + template of the class referred to by ´c´. ### Class Linearization The classes reachable through transitive closure of the direct -inheritance relation from a class $C$ are called the _base classes_ of $C$. Because of mixins, the inheritance relationship +inheritance relation from a class ´C´ are called the _base classes_ of ´C´. Because of mixins, the inheritance relationship on base classes forms in general a directed acyclic graph. A linearization of this graph is defined as follows. ###### Definition: linearization -Let $C$ be a class with template -`$C_1$ with ... with $C_n$ { $\mathit{stats}$ }`. -The _linearization_ of $C$, $\mathcal{L}(C)$ is defined as follows: - -$$\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; \ldots \; \vec{+} \; \mathcal{L}(C_1)$$ +Let ´C´ be a class with template +´C_1´ with ... with ´C_n´ { ´\mathit{stats}´ }`. +The _linearization_ of ´C´, ´\mathcal{L}(C)´ is defined as follows: +$$ +\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; \ldots \; \vec{+} \; \mathcal{L}(C_1) +$$ -Here $\vec{+}$ denotes concatenation where elements of the right operand +Here ´\vec{+}´ denotes concatenation where elements of the right operand replace identical elements of the left operand: $$ @@ -218,8 +223,8 @@ Then the linearization of class `Iter` is ``` Note that the linearization of a class refines the inheritance -relation: if $C$ is a subclass of $D$, then $C$ precedes $D$ in any -linearization where both $C$ and $D$ occur. +relation: if ´C´ is a subclass of ´D´, then ´C´ precedes ´D´ in any +linearization where both ´C´ and ´D´ occur. [Linearization](#definition:-linearization) also satisfies the property that a linearization of a class always contains the linearization of its direct superclass as a suffix. @@ -241,57 +246,57 @@ which is not a suffix of the linearization of `Iter`. ### Class Members -A class $C$ defined by a template `$C_1$ with $\ldots$ with $C_n$ { $\mathit{stats}$ }` +A class ´C´ defined by a template `´C_1´ with ´\ldots´ with ´C_n´ { ´\mathit{stats}´ }` can define members in its statement sequence -$\mathit{stats}$ and can inherit members from all parent classes. Scala +´\mathit{stats}´ and can inherit members from all parent classes. Scala adopts Java and C\#'s conventions for static overloading of methods. It is thus possible that a class defines and/or inherits several methods with the same name. To decide whether a defined -member of a class $C$ overrides a member of a parent class, or whether -the two co-exist as overloaded variants in $C$, Scala uses the +member of a class ´C´ overrides a member of a parent class, or whether +the two co-exist as overloaded variants in ´C´, Scala uses the following definition of _matching_ on members: ###### Definition: matching -A member definition $M$ _matches_ a member definition $M'$, if $M$ -and $M'$ bind the same name, and one of following holds. +A member definition ´M´ _matches_ a member definition ´M'´, if ´M´ +and ´M'´ bind the same name, and one of following holds. -1. Neither $M$ nor $M'$ is a method definition. -2. $M$ and $M'$ define both monomorphic methods with equivalent argument types. -3. $M$ defines a parameterless method and $M'$ defines a method +1. Neither ´M´ nor ´M'´ is a method definition. +2. ´M´ and ´M'´ define both monomorphic methods with equivalent argument types. +3. ´M´ defines a parameterless method and ´M'´ defines a method with an empty parameter list `()` or _vice versa_. -4. $M$ and $M'$ define both polymorphic methods with - equal number of argument types $\overline T$, $\overline T'$ +4. ´M´ and ´M'´ define both polymorphic methods with + equal number of argument types ´\overline T´, ´\overline T'´ and equal numbers of type parameters - $\overline t$, $\overline t'$, say, and $\overline T' = [\overline t'/\overline t]\overline T$. + ´\overline t´, ´\overline t'´, say, and ´\overline T' = [\overline t'/\overline t]\overline T´. Member definitions fall into two categories: concrete and abstract. -Members of class $C$ are either _directly defined_ (i.e. they appear in -$C$'s statement sequence $\mathit{stats}$) or they are _inherited_. There are two rules +Members of class ´C´ are either _directly defined_ (i.e. they appear in +´C´'s statement sequence ´\mathit{stats}´) or they are _inherited_. There are two rules that determine the set of members of a class, one for each category: -A _concrete member_ of a class $C$ is any concrete definition $M$ in -some class $C_i \in \mathcal{L}(C)$, except if there is a preceding class -$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines a concrete -member $M'$ matching $M$. +A _concrete member_ of a class ´C´ is any concrete definition ´M´ in +some class ´C_i \in \mathcal{L}(C)´, except if there is a preceding class +´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines a concrete +member ´M'´ matching ´M´. -An _abstract member_ of a class $C$ is any abstract definition $M$ -in some class $C_i \in \mathcal{L}(C)$, except if $C$ contains already a -concrete member $M'$ matching $M$, or if there is a preceding class -$C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines an abstract -member $M'$ matching $M$. +An _abstract member_ of a class ´C´ is any abstract definition ´M´ +in some class ´C_i \in \mathcal{L}(C)´, except if ´C´ contains already a +concrete member ´M'´ matching ´M´, or if there is a preceding class +´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines an abstract +member ´M'´ matching ´M´. This definition also determines the [overriding](#overriding) relationships -between matching members of a class $C$ and its parents. +between matching members of a class ´C´ and its parents. First, a concrete definition always overrides an abstract definition. -Second, for definitions $M$ and $M$' which are both concrete or both abstract, -$M$ overrides $M'$ if $M$ appears in a class that precedes (in the -linearization of $C$) the class in which $M'$ is defined. +Second, for definitions ´M´ and ´M´' which are both concrete or both abstract, +´M´ overrides ´M'´ if ´M´ appears in a class that precedes (in the +linearization of ´C´) the class in which ´M'´ is defined. It is an error if a template directly defines two matching members. It is also an error if a template contains two members (directly defined @@ -317,31 +322,31 @@ trait `B`. -A member $M$ of class $C$ that [matches](#class-members) -a non-private member $M'$ of a -base class of $C$ is said to _override_ that member. In this case -the binding of the overriding member $M$ must [subsume](03-types.html#conformance) -the binding of the overridden member $M'$. -Furthermore, the following restrictions on modifiers apply to $M$ and -$M'$: - -- $M'$ must not be labeled `final`. -- $M$ must not be [`private`](#modifiers). -- If $M$ is labeled `private[$C$]` for some enclosing class or package $C$, - then $M'$ must be labeled `private[$C'$]` for some class or package $C'$ where - $C'$ equals $C$ or $C'$ is contained in $C$. - - -- If $M$ is labeled `protected`, then $M'$ must also be +A member ´M´ of class ´C´ that [matches](#class-members) +a non-private member ´M'´ of a +base class of ´C´ is said to _override_ that member. In this case +the binding of the overriding member ´M´ must [subsume](03-types.html#conformance) +the binding of the overridden member ´M'´. +Furthermore, the following restrictions on modifiers apply to ´M´ and +´M'´: + +- ´M'´ must not be labeled `final`. +- ´M´ must not be [`private`](#modifiers). +- If ´M´ is labeled `protected`, then ´M'´ must also be labeled `protected`. -- If $M'$ is not an abstract member, then $M$ must be labeled `override`. +- If ´M´ is labeled `private[´C´]` (respectively `protected[´C´]`) + for some enclosing class or package ´C´, + then ´M'´ must be labeled `private[´C'´]` (or, respectively, `protected[´C'´]`) + for some class or package ´C'´ where + ´C'´ equals ´C´ or the companion of ´C´, or ´C'´ is contained in ´C´. +- If ´M'´ is not an abstract member, then ´M´ must be labeled `override`. Furthermore, one of two possibilities must hold: - - either $M$ is defined in a subclass of the class where is $M'$ is defined, - - or both $M$ and $M'$ override a third member $M''$ which is defined - in a base class of both the classes containing $M$ and $M'$ -- If $M'$ is [incomplete](#modifiers) in $C$ then $M$ must be + - either ´M´ is defined in a subclass of the class where is ´M'´ is defined, + - or both ´M´ and ´M'´ override a third member ´M''´ which is defined + in a base class of both the classes containing ´M´ and ´M'´ +- If ´M'´ is [incomplete](#modifiers) in ´C´ then ´M´ must be labeled `abstract override`. -- If $M$ and $M'$ are both concrete value definitions, then either none +- If ´M´ and ´M'´ are both concrete value definitions, then either none of them is marked `lazy` or both must be marked `lazy`. - A stable member can only be overridden by a stable member. @@ -358,8 +363,8 @@ bound may not override an abstract type member which does not have a volatile upper bound. A special rule concerns parameterless methods. If a parameterless -method defined as `def $f$: $T$ = ...` or `def $f$ = ...` overrides a method of -type $()T'$ which has an empty parameter list, then $f$ is also +method defined as `def ´f´: ´T´ = ...` or `def ´f´ = ...` overrides a method of +type ´()T'´ which has an empty parameter list, then ´f´ is also assumed to have an empty parameter list. An overriding method inherits all default arguments from the definition @@ -392,20 +397,20 @@ class C extends A with B { type T <: C } ### Inheritance Closure -Let $C$ be a class type. The _inheritance closure_ of $C$ is the -smallest set $\mathscr{S}$ of types such that +Let ´C´ be a class type. The _inheritance closure_ of ´C´ is the +smallest set ´\mathscr{S}´ of types such that -- $C$ is in $\mathscr{S}$. -- If $T$ is in $\mathscr{S}$, then every type $T'$ which forms syntactically - a part of $T$ is also in $\mathscr{S}$. -- If $T$ is a class type in $\mathscr{S}$, then all [parents](#templates) - of $T$ are also in $\mathscr{S}$. +- ´C´ is in ´\mathscr{S}´. +- If ´T´ is in ´\mathscr{S}´, then every type ´T'´ which forms syntactically + a part of ´T´ is also in ´\mathscr{S}´. +- If ´T´ is a class type in ´\mathscr{S}´, then all [parents](#templates) + of ´T´ are also in ´\mathscr{S}´. It is a static error if the inheritance closure of a class type consists of an infinite number of types. (This restriction is necessary to make subtyping decidable[^kennedy]). -[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( http://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007 +[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( https://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007 ### Early Definitions @@ -419,13 +424,13 @@ which serves to define certain field values before the supertype constructor is called. In a template ```scala -{ val $p_1$: $T_1$ = $e_1$ +{ val ´p_1´: ´T_1´ = ´e_1´ ... - val $p_n$: $T_n$ = $e_n$ -} with $sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ } + val ´p_n´: ´T_n´ = ´e_n´ +} with ´sc´ with ´mt_1´ with ´mt_n´ { ´\mathit{stats}´ } ``` -The initial pattern definitions of $p_1 , \ldots , p_n$ are called +The initial pattern definitions of ´p_1 , \ldots , p_n´ are called _early definitions_. They define fields which form part of the template. Every early definition must define at least one variable. @@ -434,19 +439,20 @@ An early definition is type-checked and evaluated in the scope which is in effect just before the template being defined, augmented by any type parameters of the enclosing class and by any early definitions preceding the one being defined. In particular, any reference to -`this` in the right-hand side of an early definition refers +`this` in an early definition refers to the identity of `this` just outside the template. Consequently, it -is impossible that an early definition refers to the object being -constructed by the template, or refers to one of its fields and +is impossible for an early definition to refer to the object being +constructed by the template, or to refer to one of its fields and methods, except for any other preceding early definition in the same section. Furthermore, references to preceding early definitions -always refer to the value that's defined there, and do not take into account +always refer to the value that's defined there and do not take into account overriding definitions. In other words, a block of early definitions -is evaluated exactly as if it was a local bock containing a number of value +is evaluated exactly as if it were a local block containing a number of value definitions. -Early definitions are evaluated in the order they are being defined -before the superclass constructor of the template is called. +Early definitions are evaluated +before the superclass constructor of the template is called, +in the order they are defined. ###### Example Early definitions are particularly useful for @@ -496,29 +502,32 @@ definition apply to all constituent definitions. The rules governing the validity and meaning of a modifier are as follows. ### `private` -The `private` modifier can be used with any definition or -declaration in a template. Such members can be accessed only from -within the directly enclosing template and its companion module or +The `private` modifier can be used with any definition or declaration in a +template. Private members of a template can be accessed only from within the +directly enclosing template and its companion module or [companion class](#object-definitions). -A `private` modifier can be _qualified_ with an identifier $C$ (e.g. -`private[$C$]`) that must denote a class or package enclosing the definition. +The `private` modifier is also valid for +[top-level](09-top-level-definitions.html#packagings) templates. + +A `private` modifier can be _qualified_ with an identifier ´C´ (e.g. +`private[´C´]`) that must denote a class or package enclosing the definition. Members labeled with such a modifier are accessible respectively only from code -inside the package $C$ or only from code inside the class $C$ and its +inside the package ´C´ or only from code inside the class ´C´ and its [companion module](#object-definitions). A different form of qualification is `private[this]`. A member -$M$ marked with this modifier is called _object-protected_; it can be accessed only from within -the object in which it is defined. That is, a selection $p.M$ is only -legal if the prefix is `this` or `$O$.this`, for some -class $O$ enclosing the reference. In addition, the restrictions for +´M´ marked with this modifier is called _object-protected_; it can be accessed only from within +the object in which it is defined. That is, a selection ´p.M´ is only +legal if the prefix is `this` or `´O´.this`, for some +class ´O´ enclosing the reference. In addition, the restrictions for unqualified `private` apply. Members marked private without a qualifier are called _class-private_, whereas members labeled with `private[this]` are called _object-private_. A member _is private_ if it is either class-private or object-private, but not if it is marked -`private[$C$]` where $C$ is an identifier; in the latter +`private[´C´]` where ´C´ is an identifier; in the latter case the member is called _qualified private_. Class-private or object-private members may not be abstract, and may @@ -532,27 +541,27 @@ Protected members of a class can be accessed from within - all templates that have the defining class as a base class, - the companion module of any of those classes. -A `protected` modifier can be qualified with an identifier $C$ (e.g. -`protected[$C$]`) that must denote a class or package enclosing the definition. +A `protected` modifier can be qualified with an identifier ´C´ (e.g. +`protected[´C´]`) that must denote a class or package enclosing the definition. Members labeled with such a modifier are also accessible respectively from all -code inside the package $C$ or from all code inside the class $C$ and its +code inside the package ´C´ or from all code inside the class ´C´ and its [companion module](#object-definitions). -A protected identifier $x$ may be used as a member name in a selection -`$r$.$x$` only if one of the following applies: +A protected identifier ´x´ may be used as a member name in a selection +`´r´.´x´` only if one of the following applies: - The access is within the template defining the member, or, if - a qualification $C$ is given, inside the package $C$, - or the class $C$, or its companion module, or - - $r$ is one of the reserved words `this` and + a qualification ´C´ is given, inside the package ´C´, + or the class ´C´, or its companion module, or + - ´r´ is one of the reserved words `this` and `super`, or - - $r$'s type conforms to a type-instance of the + - ´r´'s type conforms to a type-instance of the class which contains the access. A different form of qualification is `protected[this]`. A member -$M$ marked with this modifier is called _object-protected_; it can be accessed only from within -the object in which it is defined. That is, a selection $p.M$ is only -legal if the prefix is `this` or `$O$.this`, for some -class $O$ enclosing the reference. In addition, the restrictions for +´M´ marked with this modifier is called _object-protected_; it can be accessed only from within +the object in which it is defined. That is, a selection ´p.M´ is only +legal if the prefix is `this` or `´O´.this`, for some +class ´O´ enclosing the reference. In addition, the restrictions for unqualified `protected` apply. ### `override` @@ -567,10 +576,10 @@ The `override` modifier has an additional significance when combined with the `abstract` modifier. That modifier combination is only allowed for value members of traits. -We call a member $M$ of a template _incomplete_ if it is either +We call a member ´M´ of a template _incomplete_ if it is either abstract (i.e. defined by a declaration), or it is labeled `abstract` and `override` and -every member overridden by $M$ is again incomplete. +every member overridden by ´M´ is again incomplete. Note that the `abstract override` modifier combination does not influence the concept whether a member is concrete or abstract. A @@ -685,64 +694,64 @@ ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBo The most general form of class definition is ```scala -class $c$[$\mathit{tps}\,$] $as$ $m$($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) extends $t$ $\quad(n \geq 0)$. +class ´c´[´\mathit{tps}\,´] ´as´ ´m´(´\mathit{ps}_1´)´\ldots´(´\mathit{ps}_n´) extends ´t´ ´\quad(n \geq 0)´. ``` Here, - - $c$ is the name of the class to be defined. - - $\mathit{tps}$ is a non-empty list of type parameters of the class + - ´c´ is the name of the class to be defined. + - ´\mathit{tps}´ is a non-empty list of type parameters of the class being defined. The scope of a type parameter is the whole class definition including the type parameter section itself. It is illegal to define two type parameters with the same name. The type - parameter section `[$\mathit{tps}\,$]` may be omitted. A class with a type + parameter section `[´\mathit{tps}\,´]` may be omitted. A class with a type parameter section is called _polymorphic_, otherwise it is called _monomorphic_. - - $as$ is a possibly empty sequence of + - ´as´ is a possibly empty sequence of [annotations](11-annotations.html#user-defined-annotations). If any annotations are given, they apply to the primary constructor of the class. - - $m$ is an [access modifier](#modifiers) such as + - ´m´ is an [access modifier](#modifiers) such as `private` or `protected`, possibly with a qualification. If such an access modifier is given it applies to the primary constructor of the class. - - $(\mathit{ps}\_1)\ldots(\mathit{ps}\_n)$ are formal value parameter clauses for + - ´(\mathit{ps}\_1)\ldots(\mathit{ps}\_n)´ are formal value parameter clauses for the _primary constructor_ of the class. The scope of a formal value parameter includes - all subsequent parameter sections and the template $t$. However, a formal - value parameter may not form part of the types of any of the parent classes or members of the class template $t$. + all subsequent parameter sections and the template ´t´. However, a formal + value parameter may not form part of the types of any of the parent classes or members of the class template ´t´. It is illegal to define two formal value parameters with the same name. If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed. - If a formal parameter declaration $x: T$ is preceded by a `val` + If a formal parameter declaration ´x: T´ is preceded by a `val` or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) for this parameter is implicitly added to the class. - The getter introduces a value member $x$ of class $c$ that is defined as an alias of the parameter. - If the introducing keyword is `var`, a setter accessor [`$x$_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. - In invocation of that setter `$x$_=($e$)` changes the value of the parameter to the result of evaluating $e$. + The getter introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter. + If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. + In invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´. The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s). When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed. A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters). - - $t$ is a [template](#templates) of the form + - ´t´ is a [template](#templates) of the form ```scala - $sc$ with $mt_1$ with $\ldots$ with $mt_m$ { $\mathit{stats}$ } // $m \geq 0$ + ´sc´ with ´mt_1´ with ´\ldots´ with ´mt_m´ { ´\mathit{stats}´ } // ´m \geq 0´ ``` which defines the base classes, behavior and initial state of objects of the class. The extends clause - `extends $sc$ with $mt_1$ with $\ldots$ with $mt_m$` + `extends ´sc´ with ´mt_1´ with ´\ldots´ with ´mt_m´` can be omitted, in which case `extends scala.AnyRef` is assumed. The class body - `{ $\mathit{stats}$ }` may also be omitted, in which case the empty body + `{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. -This class definition defines a type `$c$[$\mathit{tps}\,$]` and a constructor -which when applied to parameters conforming to types $\mathit{ps}$ -initializes instances of type `$c$[$\mathit{tps}\,$]` by evaluating the template -$t$. +This class definition defines a type `´c´[´\mathit{tps}\,´]` and a constructor +which when applied to parameters conforming to types ´\mathit{ps}´ +initializes instances of type `´c´[´\mathit{tps}\,´]` by evaluating the template +´t´. ###### Example – `val` and `var` parameters The following example illustrates `val` and `var` parameters of a class `C`: @@ -780,19 +789,19 @@ SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} A class may have additional constructors besides the primary constructor. These are defined by constructor definitions of the form -`def this($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) = $e$`. Such a +`def this(´\mathit{ps}_1´)´\ldots´(´\mathit{ps}_n´) = ´e´`. Such a definition introduces an additional constructor for the enclosing -class, with parameters as given in the formal parameter lists $\mathit{ps}_1 -, \ldots , \mathit{ps}_n$, and whose evaluation is defined by the constructor -expression $e$. The scope of each formal parameter is the subsequent +class, with parameters as given in the formal parameter lists ´\mathit{ps}_1 +, \ldots , \mathit{ps}_n´, and whose evaluation is defined by the constructor +expression ´e´. The scope of each formal parameter is the subsequent parameter sections and the constructor -expression $e$. A constructor expression is either a self constructor -invocation `this($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)` or a block +expression ´e´. A constructor expression is either a self constructor +invocation `this(´\mathit{args}_1´)´\ldots´(´\mathit{args}_n´)` or a block which begins with a self constructor invocation. The self constructor invocation must construct a generic instance of the class. I.e. if the -class in question has name $C$ and type parameters -`[$\mathit{tps}\,$]`, then a self constructor invocation must -generate an instance of `$C$[$\mathit{tps}\,$]`; it is not permitted +class in question has name ´C´ and type parameters +`[´\mathit{tps}\,´]`, then a self constructor invocation must +generate an instance of `´C´[´\mathit{tps}\,´]`; it is not permitted to instantiate formal type parameters. The signature and the self constructor invocation of a constructor @@ -804,12 +813,12 @@ The rest of the constructor expression is type-checked and evaluated as a function body in the current class. -If there are auxiliary constructors of a class $C$, they form together -with $C$'s primary [constructor](#class-definitions) +If there are auxiliary constructors of a class ´C´, they form together +with ´C´'s primary [constructor](#class-definitions) an overloaded constructor definition. The usual rules for [overloading resolution](06-expressions.html#overloading-resolution) -apply for constructor invocations of $C$, +apply for constructor invocations of ´C´, including for the self constructor invocations in the constructor expressions themselves. However, unlike other methods, constructors are never inherited. To prevent infinite cycles of constructor @@ -823,16 +832,15 @@ Consider the class definition ```scala class LinkedList[A]() { - var head = _ - var tail = null - def isEmpty = tail != null + var head: A = _ + var tail: LinkedList[A] = null def this(head: A) = { this(); this.head = head } - def this(head: A, tail: List[A]) = { this(head); this.tail = tail } + def this(head: A, tail: LinkedList[A]) = { this(head); this.tail = tail } } ``` This defines a class `LinkedList` with three constructors. The -second constructor constructs an singleton list, while the +second constructor constructs a singleton list, while the third one constructs a list with a given head and tail. ### Case Classes @@ -853,59 +861,62 @@ implicitly added to such a parameter, unless the parameter already carries a `val` or `var` modifier. Hence, an accessor definition for the parameter is [generated](#class-definitions). -A case class definition of `$c$[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$)` with type -parameters $\mathit{tps}$ and value parameters $\mathit{ps}$ implies +A case class definition of `´c´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)´\ldots´(´\mathit{ps}_n´)` with type +parameters ´\mathit{tps}´ and value parameters ´\mathit{ps}´ with type ascriptions ´\mathit{pts}´ implies the definition of a companion object, which serves as an [extractor object](08-pattern-matching.html#extractor-patterns). It has the following shape: ```scala -object $c$ { - def apply[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$) - def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = +object ´c´ extends Function´\mathit{n}\,´[´\mathit{pt}_1\,\ldots\,\mathit{pt}_n´\,´c´[´\mathit{tps}\,´]]{ + def apply[´\mathit{tps}\,´](´\mathit{ps}_1\,´)´\ldots´(´\mathit{ps}_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)´\ldots´(´\mathit{xs}_n´) + def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = if (x eq null) scala.None - else scala.Some($x.\mathit{xs}_{11}, \ldots , x.\mathit{xs}_{1k}$) + else scala.Some(´x.\mathit{xs}_{11}, \ldots , x.\mathit{xs}_{1k}´) } ``` -Here, $\mathit{Ts}$ stands for the vector of types defined in the type -parameter section $\mathit{tps}$, -each $\mathit{xs}\_i$ denotes the parameter names of the parameter -section $\mathit{ps}\_i$, and -$\mathit{xs}\_{11}, \ldots , \mathit{xs}\_{1k}$ denote the names of all parameters -in the first parameter section $\mathit{xs}\_1$. +Here, ´\mathit{Ts}´ stands for the vector of types defined in the type +parameter section ´\mathit{tps}´, +each ´\mathit{xs}\_i´ denotes the parameter names of the parameter +section ´\mathit{ps}\_i´, and +´\mathit{xs}\_{11}, \ldots , \mathit{xs}\_{1k}´ denote the names of all parameters +in the first parameter section ´\mathit{xs}\_1´. If a type parameter section is missing in the class, it is also missing in the `apply` and `unapply` methods. -If the companion object $c$ is already defined, +If the companion object ´c´ is already defined, the `apply` and `unapply` methods are added to the existing object. -If the object $c$ already has a [matching](#definition-matching) +If the object ´c´ already has a [matching](#definition-matching) `apply` (or `unapply`) member, no new definition is added. -The definition of `apply` is omitted if class $c$ is `abstract`. +The definition of `apply` is omitted if class ´c´ is `abstract`. + +It will not be modified to extend Function if the existing definition doesn't extend it. If the case class definition contains an empty value parameter list, the `unapply` method returns a `Boolean` instead of an `Option` type and is defined as follows: ```scala -def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = x ne null +def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = x ne null ``` The name of the `unapply` method is changed to `unapplySeq` if the first -parameter section $\mathit{ps}_1$ of $c$ ends in a +parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters). -A method named `copy` is implicitly added to every case class unless the -class already has a member (directly defined or inherited) with that name, or the -class has a repeated parameter. The method is defined as follows: +A method named `copy` is implicitly added to every case class, unless the +class already has a member with that name, whether directly defined or inherited. +The `copy` method is also omitted if the class is abstract, or if the class has +a repeated parameter. The method is defined as follows: ```scala -def copy[$\mathit{tps}\,$]($\mathit{ps}'_1\,$)$\ldots$($\mathit{ps}'_n$): $c$[$\mathit{tps}\,$] = new $c$[$\mathit{Ts}\,$]($\mathit{xs}_1\,$)$\ldots$($\mathit{xs}_n$) +def copy[´\mathit{tps}\,´](´\mathit{ps}'_1\,´)´\ldots´(´\mathit{ps}'_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)´\ldots´(´\mathit{xs}_n´) ``` -Again, `$\mathit{Ts}$` stands for the vector of types defined in the type parameter section `$\mathit{tps}$` -and each `$xs_i$` denotes the parameter names of the parameter section `$ps'_i$`. The value -parameters `$ps'_{1,j}$` of first parameter list have the form `$x_{1,j}$:$T_{1,j}$=this.$x_{1,j}$`, -the other parameters `$ps'_{i,j}$` of the `copy` method are defined as `$x_{i,j}$:$T_{i,j}$`. -In all cases `$x_{i,j}$` and `$T_{i,j}$` refer to the name and type of the corresponding class parameter -`$\mathit{ps}_{i,j}$`. +Again, `´\mathit{Ts}´` stands for the vector of types defined in the type parameter section `´\mathit{tps}´` +and each `´xs_i´` denotes the parameter names of the parameter section `´ps'_i´`. The value +parameters `´ps'_{1,j}´` of first parameter list have the form `´x_{1,j}´:´T_{1,j}´=this.´x_{1,j}´`, +the other parameters `´ps'_{i,j}´` of the `copy` method are defined as `´x_{i,j}´:´T_{i,j}´`. +In all cases `´x_{i,j}´` and `´T_{i,j}´` refer to the name and type of the corresponding class parameter +`´\mathit{ps}_{i,j}´`. Every case class implicitly overrides some method definitions of class [`scala.AnyRef`](12-the-scala-standard-library.html#root-classes) unless a definition of the same @@ -979,14 +990,14 @@ constructor parameters. Furthermore, no constructor arguments are passed to the superclass of the trait. This is not necessary as traits are initialized after the superclass is initialized. -Assume a trait $D$ defines some aspect of an instance $x$ of type $C$ (i.e. $D$ is a base class of $C$). -Then the _actual supertype_ of $D$ in $x$ is the compound type consisting of all the -base classes in $\mathcal{L}(C)$ that succeed $D$. The actual supertype gives +Assume a trait ´D´ defines some aspect of an instance ´x´ of type ´C´ (i.e. ´D´ is a base class of ´C´). +Then the _actual supertype_ of ´D´ in ´x´ is the compound type consisting of all the +base classes in ´\mathcal{L}(C)´ that succeed ´D´. The actual supertype gives the context for resolving a [`super` reference](06-expressions.html#this-and-super) in a trait. Note that the actual supertype depends on the type to which the trait is added in a mixin composition; it is not statically known at the time the trait is defined. -If $D$ is not a trait, then its actual supertype is simply its +If ´D´ is not a trait, then its actual supertype is simply its least proper supertype (which is statically known). ###### Example @@ -1017,7 +1028,7 @@ is undefined for the given key. This class is implemented as follows. ```scala abstract class Table[A, B](defaultValue: B) { def get(key: A): Option[B] - def set(key: A, value: B) + def set(key: A, value: B): Unit def apply(key: A) = get(key) match { case Some(value) => value case None => defaultValue @@ -1029,8 +1040,8 @@ Here is a concrete implementation of the `Table` class. ```scala class ListTable[A, B](defaultValue: B) extends Table[A, B](defaultValue) { - private var elems: List[(A, B)] - def get(key: A) = elems.find(._1.==(key)).map(._2) + private var elems: List[(A, B)] = Nil + def get(key: A) = elems.find(_._1 == key).map(_._2) def set(key: A, value: B) = { elems = (key, value) :: elems } } ``` @@ -1042,7 +1053,7 @@ Here is a trait that prevents concurrent access to the trait SynchronizedTable[A, B] extends Table[A, B] { abstract override def get(key: A): B = synchronized { super.get(key) } - abstract override def set((key: A, value: B) = + abstract override def set(key: A, value: B) = synchronized { super.set(key, value) } } ``` @@ -1060,7 +1071,7 @@ table with strings as keys and integers as values and with a default value `0`: ```scala -object MyTable extends ListTable[String, Int](0) with SynchronizedTable +object MyTable extends ListTable[String, Int](0) with SynchronizedTable[String, Int] ``` The object `MyTable` inherits its `get` and `set` @@ -1077,37 +1088,37 @@ ObjectDef ::= id ClassTemplate An _object definition_ defines a single object of a new class. Its most general form is -`object $m$ extends $t$`. Here, -$m$ is the name of the object to be defined, and -$t$ is a [template](#templates) of the form +`object ´m´ extends ´t´`. Here, +´m´ is the name of the object to be defined, and +´t´ is a [template](#templates) of the form ```scala -$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ } +´sc´ with ´mt_1´ with ´\ldots´ with ´mt_n´ { ´\mathit{stats}´ } ``` -which defines the base classes, behavior and initial state of $m$. -The extends clause `extends $sc$ with $mt_1$ with $\ldots$ with $mt_n$` +which defines the base classes, behavior and initial state of ´m´. +The extends clause `extends ´sc´ with ´mt_1´ with ´\ldots´ with ´mt_n´` can be omitted, in which case `extends scala.AnyRef` is assumed. The class body -`{ $\mathit{stats}$ }` may also be omitted, in which case the empty body +`{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. The object definition defines a single object (or: _module_) -conforming to the template $t$. It is roughly equivalent to the +conforming to the template ´t´. It is roughly equivalent to the following definition of a lazy value: ```scala -lazy val $m$ = new $sc$ with $mt_1$ with $\ldots$ with $mt_n$ { this: $m.type$ => $\mathit{stats}$ } +lazy val ´m´ = new ´sc´ with ´mt_1´ with ´\ldots´ with ´mt_n´ { this: ´m.type´ => ´\mathit{stats}´ } ``` Note that the value defined by an object definition is instantiated -lazily. The `new $m$\$cls` constructor is evaluated +lazily. The `new ´m´$cls` constructor is evaluated not at the point of the object definition, but is instead evaluated -the first time $m$ is dereferenced during execution of the program -(which might be never at all). An attempt to dereference $m$ again +the first time ´m´ is dereferenced during execution of the program +(which might be never at all). An attempt to dereference ´m´ again during evaluation of the constructor will lead to an infinite loop or run-time error. -Other threads trying to dereference $m$ while the +Other threads trying to dereference ´m´ while the constructor is being evaluated block until evaluation is complete. The expansion given above is not accurate for top-level objects. It @@ -1138,9 +1149,9 @@ name `Point` in the type name space, whereas the object definition defines a name in the term namespace. This technique is applied by the Scala compiler when interpreting a -Java class with static members. Such a class $C$ is conceptually seen -as a pair of a Scala class that contains all instance members of $C$ -and a Scala object that contains all static members of $C$. +Java class with static members. Such a class ´C´ is conceptually seen +as a pair of a Scala class that contains all instance members of ´C´ +and a Scala object that contains all static members of ´C´. Generally, a _companion module_ of a class is an object which has the same name as the class and is defined in the same scope and diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 00437a52f0cf..cb6614baef4c 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -11,7 +11,7 @@ Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr | Expr1 Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] | ‘while’ ‘(’ Expr ‘)’ {nl} Expr - | ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’] [‘finally’ Expr] + | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] | ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’ | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr | ‘throw’ Expr @@ -32,7 +32,7 @@ SimpleExpr1 ::= Literal | Path | ‘_’ | ‘(’ [Exprs] ‘)’ - | SimpleExpr ‘.’ id s + | SimpleExpr ‘.’ id | SimpleExpr TypeArgs | SimpleExpr1 ArgumentExprs | XmlExpr @@ -52,22 +52,22 @@ discussed subsequently in decreasing order of precedence. ## Expression Typing -The typing of expressions is often relative to some _expected type_ (which might be undefined). When we write "expression $e$ is expected to conform to type $T$", we mean: - 1. the expected type of $e$ is $T$, and - 2. the type of expression $e$ must conform to $T$. +The typing of expressions is often relative to some _expected type_ (which might be undefined). When we write "expression ´e´ is expected to conform to type ´T´", we mean: + 1. the expected type of ´e´ is ´T´, and + 2. the type of expression ´e´ must conform to ´T´. The following skolemization rule is applied universally for every expression: If the type of an expression would be an existential type -$T$, then the type of the expression is assumed instead to be a -[skolemization](03-types.html#existential-types) of $T$. +´T´, then the type of the expression is assumed instead to be a +[skolemization](03-types.html#existential-types) of ´T´. -Skolemization is reversed by type packing. Assume an expression $e$ of -type $T$ and let $t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$ be -all the type variables created by skolemization of some part of $e$ which are free in $T$. -Then the _packed type_ of $e$ is +Skolemization is reversed by type packing. Assume an expression ´e´ of +type ´T´ and let ´t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n´ be +all the type variables created by skolemization of some part of ´e´ which are free in ´T´. +Then the _packed type_ of ´e´ is ```scala -$T$ forSome { type $t_1[\mathit{tps}\_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\mathit{tps}\_n] >: L_n <: U_n$ }. +´T´ forSome { type ´t_1[\mathit{tps}\_1] >: L_1 <: U_1´; ´\ldots´; type ´t_n[\mathit{tps}\_n] >: L_n <: U_n´ }. ``` ## Literals @@ -76,8 +76,8 @@ $T$ forSome { type $t_1[\mathit{tps}\_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\ma SimpleExpr ::= Literal ``` -Typing of literals is as described [here](01-lexical-syntax.html#literals); their -evaluation is immediate. +Typing of literals is described along with their [lexical syntax](01-lexical-syntax.html#literals); +their evaluation is immediate. ## The _Null_ Value @@ -85,12 +85,12 @@ The `null` value is of type `scala.Null`, and thus conforms to every reference t It denotes a reference value which refers to a special `null` object. This object implements methods in class `scala.AnyRef` as follows: -- `eq($x\,$)` and `==($x\,$)` return `true` iff the - argument $x$ is also the "null" object. -- `ne($x\,$)` and `!=($x\,$)` return true iff the +- `eq(´x\,´)` and `==(´x\,´)` return `true` iff the + argument ´x´ is also the "null" object. +- `ne(´x\,´)` and `!=(´x\,´)` return true iff the argument x is not also the "null" object. -- `isInstanceOf[$T\,$]` always returns `false`. -- `asInstanceOf[$T\,$]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type $T$. +- `isInstanceOf[´T\,´]` always returns `false`. +- `asInstanceOf[´T\,´]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type ´T´. - `##` returns ``0``. A reference to any other member of the "null" object causes a @@ -106,47 +106,47 @@ SimpleExpr ::= Path A designator refers to a named term. It can be a _simple name_ or a _selection_. -A simple name $x$ refers to a value as specified +A simple name ´x´ refers to a value as specified [here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes). -If $x$ is bound by a definition or declaration in an enclosing class -or object $C$, it is taken to be equivalent to the selection -`$C$.this.$x$` where $C$ is taken to refer to the class containing $x$ -even if the type name $C$ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the -occurrence of $x$. +If ´x´ is bound by a definition or declaration in an enclosing class +or object ´C´, it is taken to be equivalent to the selection +`´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ +even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the +occurrence of ´x´. -If $r$ is a [stable identifier](03-types.html#paths) of type $T$, the selection $r.x$ refers -statically to a term member $m$ of $r$ that is identified in $T$ by -the name $x$. +If ´r´ is a [stable identifier](03-types.html#paths) of type ´T´, the selection ´r.x´ refers +statically to a term member ´m´ of ´r´ that is identified in ´T´ by +the name ´x´. -For other expressions $e$, $e.x$ is typed as -if it was `{ val $y$ = $e$; $y$.$x$ }`, for some fresh name -$y$. +For other expressions ´e´, ´e.x´ is typed as +if it was `{ val ´y´ = ´e´; ´y´.´x´ }`, for some fresh name +´y´. The expected type of a designator's prefix is always undefined. The -type of a designator is the type $T$ of the entity it refers to, with -the following exception: The type of a [path](03-types.html#paths) $p$ +type of a designator is the type ´T´ of the entity it refers to, with +the following exception: The type of a [path](03-types.html#paths) ´p´ which occurs in a context where a [stable type](03-types.html#singleton-types) -is required is the singleton type `$p$.type`. +is required is the singleton type `´p´.type`. The contexts where a stable type is required are those that satisfy one of the following conditions: -1. The path $p$ occurs as the prefix of a selection and it does not +1. The path ´p´ occurs as the prefix of a selection and it does not designate a constant, or -1. The expected type $\mathit{pt}$ is a stable type, or -1. The expected type $\mathit{pt}$ is an abstract type with a stable type as lower - bound, and the type $T$ of the entity referred to by $p$ does not - conform to $\mathit{pt}$, or -1. The path $p$ designates a module. +1. The expected type ´\mathit{pt}´ is a stable type, or +1. The expected type ´\mathit{pt}´ is an abstract type with a stable type as lower + bound, and the type ´T´ of the entity referred to by ´p´ does not + conform to ´\mathit{pt}´, or +1. The path ´p´ designates a module. -The selection $e.x$ is evaluated by first evaluating the qualifier -expression $e$, which yields an object $r$, say. The selection's -result is then the member of $r$ that is either defined by $m$ or defined -by a definition overriding $m$. +The selection ´e.x´ is evaluated by first evaluating the qualifier +expression ´e´, which yields an object ´r´, say. The selection's +result is then the member of ´r´ that is either defined by ´m´ or defined +by a definition overriding ´m´. ## This and Super @@ -160,46 +160,46 @@ template or compound type. It stands for the object being defined by the innermost template or compound type enclosing the reference. If this is a compound type, the type of `this` is that compound type. If it is a template of a -class or object definition with simple name $C$, the type of this -is the same as the type of `$C$.this`. +class or object definition with simple name ´C´, the type of this +is the same as the type of `´C´.this`. -The expression `$C$.this` is legal in the statement part of an -enclosing class or object definition with simple name $C$. It +The expression `´C´.this` is legal in the statement part of an +enclosing class or object definition with simple name ´C´. It stands for the object being defined by the innermost such definition. If the expression's expected type is a stable type, or -`$C$.this` occurs as the prefix of a selection, its type is -`$C$.this.type`, otherwise it is the self type of class $C$. +`´C´.this` occurs as the prefix of a selection, its type is +`´C´.this.type`, otherwise it is the self type of class ´C´. -A reference `super.$m$` refers statically to a method or type $m$ +A reference `super.´m´` refers statically to a method or type ´m´ in the least proper supertype of the innermost template containing the -reference. It evaluates to the member $m'$ in the actual supertype of -that template which is equal to $m$ or which overrides $m$. The -statically referenced member $m$ must be a type or a +reference. It evaluates to the member ´m'´ in the actual supertype of +that template which is equal to ´m´ or which overrides ´m´. The +statically referenced member ´m´ must be a type or a method. If it is a method, it must be concrete, or the template -containing the reference must have a member $m'$ which overrides $m$ +containing the reference must have a member ´m'´ which overrides ´m´ and which is labeled `abstract override`. -A reference `$C$.super.$m$` refers statically to a method -or type $m$ in the least proper supertype of the innermost enclosing class or -object definition named $C$ which encloses the reference. It evaluates -to the member $m'$ in the actual supertype of that class or object -which is equal to $m$ or which overrides $m$. The -statically referenced member $m$ must be a type or a +A reference `´C´.super.´m´` refers statically to a method +or type ´m´ in the least proper supertype of the innermost enclosing class or +object definition named ´C´ which encloses the reference. It evaluates +to the member ´m'´ in the actual supertype of that class or object +which is equal to ´m´ or which overrides ´m´. The +statically referenced member ´m´ must be a type or a method. If the statically -referenced member $m$ is a method, it must be concrete, or the innermost enclosing -class or object definition named $C$ must have a member $m'$ which -overrides $m$ and which is labeled `abstract override`. +referenced member ´m´ is a method, it must be concrete, or the innermost enclosing +class or object definition named ´C´ must have a member ´m'´ which +overrides ´m´ and which is labeled `abstract override`. The `super` prefix may be followed by a trait qualifier -`[$T\,$]`, as in `$C$.super[$T\,$].$x$`. This is +`[´T\,´]`, as in `´C´.super[´T\,´].´x´`. This is called a _static super reference_. In this case, the reference is -to the type or method of $x$ in the parent trait of $C$ whose simple -name is $T$. That member must be uniquely defined. If it is a method, +to the type or method of ´x´ in the parent trait of ´C´ whose simple +name is ´T´. That member must be uniquely defined. If it is a method, it must be concrete. ###### Example @@ -246,56 +246,61 @@ ArgumentExprs ::= ‘(’ [Exprs] ‘)’ Exprs ::= Expr {‘,’ Expr} ``` -An application `$f(e_1 , \ldots , e_m)$` applies the function `$f$` to the argument expressions `$e_1, \ldots , e_m$`. For this expression to be well-typed, the function must be *applicable* to its arguments, which is defined next by case analysis on $f$'s type. +An application `´f(e_1 , \ldots , e_m)´` applies the function `´f´` to the argument expressions `´e_1, \ldots , e_m´`. For this expression to be well-typed, the function must be *applicable* to its arguments, which is defined next by case analysis on ´f´'s type. -If $f$ has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, each argument expression $e_i$ is typed with the corresponding parameter type $T_i$ as expected type. Let $S_i$ be the type of argument $e_i$ $(i = 1 , \ldots , m)$. The method $f$ must be _applicable_ to its arguments $e_1, \ldots , e_n$ of types $S_1 , \ldots , S_n$. We say that an argument expression $e_i$ is a _named_ argument if it has the form `$x_i=e'_i$` and `$x_i$` is one of the parameter names `$p_1, \ldots, p_n$`. +If ´f´ has a method type `(´p_1´:´T_1 , \ldots , p_n´:´T_n´)´U´`, each argument expression ´e_i´ is typed with the corresponding parameter type ´T_i´ as expected type. Let ´S_i´ be the type of argument ´e_i´ ´(i = 1 , \ldots , m)´. The method ´f´ must be _applicable_ to its arguments ´e_1, \ldots , e_n´ of types ´S_1 , \ldots , S_n´. We say that an argument expression ´e_i´ is a _named_ argument if it has the form `´x_i=e'_i´` and `´x_i´` is one of the parameter names `´p_1, \ldots, p_n´`. -Once the types $S_i$ have been determined, the method $f$ of the above method type is said to be applicable if all of the following conditions hold: - - for every named argument $p_j=e_i'$ the type $S_i$ is [compatible](03-types.html#compatibility) with the parameter type $T_j$; - - for every positional argument $e_i$ the type $S_i$ is [compatible](03-types.html#compatibility) with $T_i$; - - if the expected type is defined, the result type $U$ is [compatible](03-types.html#compatibility) to it. +Once the types ´S_i´ have been determined, the method ´f´ of the above method type is said to be applicable if all of the following conditions hold: + - for every named argument ´p_j=e_i'´ the type ´S_i´ is [compatible](03-types.html#compatibility) with the parameter type ´T_j´; + - for every positional argument ´e_i´ the type ´S_i´ is [compatible](03-types.html#compatibility) with ´T_i´; + - if the expected type is defined, the result type ´U´ is [compatible](03-types.html#compatibility) to it. -If $f$ is a polymorphic method, [local type inference](#local-type-inference) is used to instantiate $f$'s type parameters. +If ´f´ is a polymorphic method, [local type inference](#local-type-inference) is used to instantiate ´f´'s type parameters. The polymorphic method is applicable if type inference can determine type arguments so that the instantiated method is applicable. -If $f$ has some value type, the application is taken to be equivalent to `$f$.apply($e_1 , \ldots , e_m$)`, -i.e. the application of an `apply` method defined by $f$. The value `$f$` is applicable to the given arguments if `$f$.apply` is applicable. +If ´f´ has some value type, the application is taken to be equivalent to `´f´.apply(´e_1 , \ldots , e_m´)`, +i.e. the application of an `apply` method defined by ´f´. The value `´f´` is applicable to the given arguments if `´f´.apply` is applicable. -Evaluation of `$f$($e_1 , \ldots , e_n$)` usually entails evaluation of -$f$ and $e_1 , \ldots , e_n$ in that order. Each argument expression -is converted to the type of its corresponding formal parameter. After -that, the application is rewritten to the function's right hand side, -with actual arguments substituted for formal parameters. The result -of evaluating the rewritten right-hand side is finally converted to -the function's declared result type, if one is given. +The application `´f´(´e_1 , \ldots , e_n´)` evaluates ´f´ and then each argument +´e_1 , \ldots , e_n´ from left to right, except for arguments that correspond to +a by-name parameter (see below). Each argument expression is converted to the +type of its corresponding formal parameter. After that, the application is +rewritten to the function's right hand side, with actual arguments substituted +for formal parameters. The result of evaluating the rewritten right-hand side +is finally converted to the function's declared result type, if one is given. The case of a formal parameter with a parameterless -method type `=>$T$` is treated specially. In this case, the -corresponding actual argument expression $e$ is not evaluated before the +method type `=> ´T´` is treated specially. In this case, the +corresponding actual argument expression ´e´ is not evaluated before the application. Instead, every use of the formal parameter on the -right-hand side of the rewrite rule entails a re-evaluation of $e$. +right-hand side of the rewrite rule entails a re-evaluation of ´e´. In other words, the evaluation order for `=>`-parameters is _call-by-name_ whereas the evaluation order for normal parameters is _call-by-value_. -Furthermore, it is required that $e$'s [packed type](#expression-typing) -conforms to the parameter type $T$. +Furthermore, it is required that ´e´'s [packed type](#expression-typing) +conforms to the parameter type ´T´. The behavior of by-name parameters is preserved if the application is transformed into a block due to named or default arguments. In this case, -the local value for that parameter has the form `val $y_i$ = () => $e$` -and the argument passed to the function is `$y_i$()`. +the local value for that parameter has the form `val ´y_i´ = () => ´e´` +and the argument passed to the function is `´y_i´()`. The last argument in an application may be marked as a sequence -argument, e.g. `$e$: _*`. Such an argument must correspond +argument, e.g. `´e´: _*`. Such an argument must correspond to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type -`$S$*` and it must be the only argument matching this +`´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments -must be the same). Furthermore, the type of $e$ must conform to -`scala.Seq[$T$]`, for some type $T$ which conforms to -$S$. In this case, the argument list is transformed by replacing the -sequence $e$ with its elements. When the application uses named +must be the same). Furthermore, the type of ´e´ must conform to +`scala.Seq[´T´]`, for some type ´T´ which conforms to +´S´. In this case, the argument list is transformed by replacing the +sequence ´e´ with its elements. When the application uses named arguments, the vararg parameter has to be specified exactly once. +If only a single argument is supplied, it may be supplied as a block expression +and parentheses can be omitted, in the form `´f´ { block }`. This is valid when +`f` has a single formal parameter or when all other formal parameters have +default values. + A function application usually allocates a new frame on the program's run-time stack. However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame @@ -306,7 +311,7 @@ Assume the following method which computes the sum of a variable number of arguments: ```scala -def sum(xs: Int*) = (0 /: xs) ((x, y) => x + y) +def sum(xs: Int*) = xs.foldLeft(0)((x, y) => x + y) ``` Then @@ -329,85 +334,85 @@ The keyword is ignored. ### Named and Default Arguments -If an application is to use named arguments $p = e$ or default +If an application is to use named arguments ´p = e´ or default arguments, the following conditions must hold. -- For every named argument $p_i = e_i$ which appears left of a positional argument - in the argument list $e_1 \ldots e_m$, the argument position $i$ coincides with - the position of parameter $p_i$ in the parameter list of the applied method. -- The names $x_i$ of all named arguments are pairwise distinct and no named +- For every named argument ´p_i = e_i´ which appears left of a positional argument + in the argument list ´e_1 \ldots e_m´, the argument position ´i´ coincides with + the position of parameter ´p_i´ in the parameter list of the applied method. +- The names ´x_i´ of all named arguments are pairwise distinct and no named argument defines a parameter which is already specified by a positional argument. -- Every formal parameter $p_j:T_j$ which is not specified by either a positional +- Every formal parameter ´p_j:T_j´ which is not specified by either a positional or named argument has a default argument. If the application uses named or default arguments the following transformation is applied to convert it into an application without named or default arguments. -If the method $f$ -has the form `$p.m$[$\mathit{targs}$]` it is transformed into the +If the method ´f´ +has the form `´p.m´[´\mathit{targs}´]` it is transformed into the block ```scala -{ val q = $p$ - q.$m$[$\mathit{targs}$] +{ val q = ´p´ + q.´m´[´\mathit{targs}´] } ``` -If the method $f$ is itself an application expression the transformation -is applied recursively on $f$. The result of transforming $f$ is a block of +If the method ´f´ is itself an application expression the transformation +is applied recursively on ´f´. The result of transforming ´f´ is a block of the form ```scala -{ val q = $p$ - val $x_1$ = expr$_1$ - $\ldots$ - val $x_k$ = expr$_k$ - q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$) +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ´\ldots´ + val ´x_k´ = expr´_k´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´)´, \ldots ,´(´\mathit{args}_l´) } ``` -where every argument in $(\mathit{args}\_1) , \ldots , (\mathit{args}\_l)$ is a reference to -one of the values $x_1 , \ldots , x_k$. To integrate the current application -into the block, first a value definition using a fresh name $y_i$ is created -for every argument in $e_1 , \ldots , e_m$, which is initialised to $e_i$ for -positional arguments and to $e'_i$ for named arguments of the form -`$x_i=e'_i$`. Then, for every parameter which is not specified -by the argument list, a value definition using a fresh name $z_i$ is created, +where every argument in ´(\mathit{args}\_1) , \ldots , (\mathit{args}\_l)´ is a reference to +one of the values ´x_1 , \ldots , x_k´. To integrate the current application +into the block, first a value definition using a fresh name ´y_i´ is created +for every argument in ´e_1 , \ldots , e_m´, which is initialised to ´e_i´ for +positional arguments and to ´e'_i´ for named arguments of the form +`´x_i=e'_i´`. Then, for every parameter which is not specified +by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-declarations-and-definitions.html#function-declarations-and-definitions) of this parameter. -Let $\mathit{args}$ be a permutation of the generated names $y_i$ and $z_i$ such such +Let ´\mathit{args}´ be a permutation of the generated names ´y_i´ and ´z_i´ such such that the position of each name matches the position of its corresponding -parameter in the method type `($p_1:T_1 , \ldots , p_n:T_n$)$U$`. +parameter in the method type `(´p_1:T_1 , \ldots , p_n:T_n´)´U´`. The final result of the transformation is a block of the form ```scala -{ val q = $p$ - val $x_1$ = expr$_1$ - $\ldots$ - val $x_l$ = expr$_k$ - val $y_1$ = $e_1$ - $\ldots$ - val $y_m$ = $e_m$ - val $z_1$ = $q.m\$default\$i[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$ - $\ldots$ - val $z_d$ = $q.m\$default\$j[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)$ - q.$m$[$\mathit{targs}$]($\mathit{args}_1$)$, \ldots ,$($\mathit{args}_l$)($\mathit{args}$) +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ´\ldots´ + val ´x_l´ = expr´_k´ + val ´y_1´ = ´e_1´ + ´\ldots´ + val ´y_m´ = ´e_m´ + val ´z_1´ = ´q.m\$default\$i[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)´ + ´\ldots´ + val ´z_d´ = ´q.m\$default\$j[\mathit{targs}](\mathit{args}_1), \ldots ,(\mathit{args}_l)´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´)´, \ldots ,´(´\mathit{args}_l´)(´\mathit{args}´) } ``` ### Signature Polymorphic Methods -For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`, -the invoked method has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call -site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions -`$e_1 , \ldots , e_m$`. If the declared return type `$R$` of the signature polymorphic method is -any type other than `scala.AnyRef`, then the return type `$U$` is `$R$`. -Otherwise, `$U$` is the expected type at the call site. If the expected type is -undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh. +For invocations of signature polymorphic methods of the target platform `´f´(´e_1 , \ldots , e_m´)`, +the invoked method has a different method type `(´p_1´:´T_1 , \ldots , p_n´:´T_n´)´U´` at each call +site. The parameter types `´T_ , \ldots , T_n´` are the types of the argument expressions +`´e_1 , \ldots , e_m´`. If the declared return type `´R´` of the signature polymorphic method is +any type other than `scala.AnyRef`, then the return type `´U´` is `´R´`. +Otherwise, `´U´` is the expected type at the call site. If the expected type is undefined then +`´U´` is `scala.AnyRef`. The parameter names `´p_1 , \ldots , p_n´` are fresh. ###### Note @@ -423,25 +428,25 @@ which had fewer such methods.) SimpleExpr ::= SimpleExpr1 ‘_’ ``` -The expression `$e$ _` is well-formed if $e$ is of method -type or if $e$ is a call-by-name parameter. If $e$ is a method with -parameters, `$e$ _` represents $e$ converted to a function -type by [eta expansion](#eta-expansion). If $e$ is a +The expression `´e´ _` is well-formed if ´e´ is of method +type or if ´e´ is a call-by-name parameter. If ´e´ is a method with +parameters, `´e´ _` represents ´e´ converted to a function +type by [eta expansion](#eta-expansion-section). If ´e´ is a parameterless method or call-by-name parameter of type -`=>$T$`, `$e$ _` represents the function of type -`() => $T$`, which evaluates $e$ when it is applied to the empty -parameterlist `()`. +`=> ´T´`, `´e´ _` represents the function of type +`() => ´T´`, which evaluates ´e´ when it is applied to the empty +parameter list `()`. ###### Example -The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion) on the right. +The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion-section) on the right. | placeholder syntax | eta-expansion | |------------------------------ | ----------------------------------------------------------------------------| |`math.sin _` | `x => math.sin(x)` | |`math.pow _` | `(x1, x2) => math.pow(x1, x2)` | |`val vs = 1 to 9; vs.fold _` | `(z) => (op) => vs.fold(z)(op)` | -|`(1 to 9).fold(z)_` | `{ val eta1 = z; val eta2 = 1 to 9; op => eta2.fold(eta1)(op) }` | -|`Some(1).fold(??? : Int)_` | `{ val eta1 = () => ???; val eta2 = Some(1); op => eta2.fold(eta1())(op) }` | +|`(1 to 9).fold(z)_` | `{ val eta1 = 1 to 9; val eta2 = z; op => eta1.fold(eta2)(op) }` | +|`Some(1).fold(??? : Int)_` | `{ val eta1 = Some(1); val eta2 = () => ???; op => eta1.fold(eta2())(op) }` | Note that a space is necessary between a method name and the trailing underscore because otherwise the underscore would be considered part of the name. @@ -452,20 +457,20 @@ because otherwise the underscore would be considered part of the name. SimpleExpr ::= SimpleExpr TypeArgs ``` -A _type application_ `$e$[$T_1 , \ldots , T_n$]` instantiates -a polymorphic value $e$ of type -`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$` +A _type application_ `´e´[´T_1 , \ldots , T_n´]` instantiates +a polymorphic value ´e´ of type +`[´a_1´ >: ´L_1´ <: ´U_1, \ldots , a_n´ >: ´L_n´ <: ´U_n´]´S´` with argument types -`$T_1 , \ldots , T_n$`. Every argument type $T_i$ must obey -the corresponding bounds $L_i$ and $U_i$. That is, for each $i = 1 -, \ldots , n$, we must have $\sigma L_i <: T_i <: \sigma -U_i$, where $\sigma$ is the substitution $[a_1 := T_1 , \ldots , a_n -:= T_n]$. The type of the application is $\sigma S$. +`´T_1 , \ldots , T_n´`. Every argument type ´T_i´ must obey +the corresponding bounds ´L_i´ and ´U_i´. That is, for each ´i = 1 +, \ldots , n´, we must have ´\sigma L_i <: T_i <: \sigma +U_i´, where ´\sigma´ is the substitution ´[a_1 := T_1 , \ldots , a_n +:= T_n]´. The type of the application is ´\sigma S´. -If the function part $e$ is of some value type, the type application +If the function part ´e´ is of some value type, the type application is taken to be equivalent to -`$e$.apply[$T_1 , \ldots ,$ T$_n$]`, i.e. the application of an `apply` method defined by -$e$. +`´e´.apply[´T_1 , \ldots ,´ T´_n´]`, i.e. the application of an `apply` method defined by +´e´. Type applications can be omitted if [local type inference](#local-type-inference) can infer best type parameters @@ -478,9 +483,9 @@ and the expected result type. SimpleExpr ::= ‘(’ [Exprs] ‘)’ ``` -A _tuple expression_ `($e_1 , \ldots , e_n$)` is an alias +A _tuple expression_ `(´e_1 , \ldots , e_n´)` is an alias for the class instance creation -`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$. +`scala.Tuple´n´(´e_1 , \ldots , e_n´)`, where ´n \geq 2´. The empty tuple `()` is the unique value of type `scala.Unit`. @@ -491,43 +496,43 @@ SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) ``` A _simple instance creation expression_ is of the form -`new $c$` -where $c$ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let $T$ be -the type of $c$. Then $T$ must +`new ´c´` +where ´c´ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let ´T´ be +the type of ´c´. Then ´T´ must denote a (a type instance of) a non-abstract subclass of `scala.AnyRef`. Furthermore, the _concrete self type_ of the expression must conform to the [self type](05-classes-and-objects.html#templates) of the class denoted by -$T$. The concrete self type is normally -$T$, except if the expression `new $c$` appears as the +´T´. The concrete self type is normally +´T´, except if the expression `new ´c´` appears as the right hand side of a value definition ```scala -val $x$: $S$ = new $c$ +val ´x´: ´S´ = new ´c´ ``` -(where the type annotation `: $S$` may be missing). +(where the type annotation `: ´S´` may be missing). In the latter case, the concrete self type of the expression is the -compound type `$T$ with $x$.type`. +compound type `´T´ with ´x´.type`. The expression is evaluated by creating a fresh -object of type $T$ which is initialized by evaluating $c$. The -type of the expression is $T$. +object of type ´T´ which is initialized by evaluating ´c´. The +type of the expression is ´T´. A _general instance creation expression_ is of the form -`new $t$` for some [class template](05-classes-and-objects.html#templates) $t$. +`new ´t´` for some [class template](05-classes-and-objects.html#templates) ´t´. Such an expression is equivalent to the block ```scala -{ class $a$ extends $t$; new $a$ } +{ class ´a´ extends ´t´; new ´a´ } ``` -where $a$ is a fresh name of an _anonymous class_ which is +where ´a´ is a fresh name of an _anonymous class_ which is inaccessible to user programs. There is also a shorthand form for creating values of structural -types: If `{$D$}` is a class body, then -`new {$D$}` is equivalent to the general instance creation expression -`new AnyRef{$D$}`. +types: If `{´D´}` is a class body, then +`new {´D´}` is equivalent to the general instance creation expression +`new AnyRef{´D´}`. ###### Example Consider the following structural instance creation expression: @@ -545,10 +550,10 @@ new AnyRef{ def getName() = "aaron" } The latter is in turn a shorthand for the block ```scala -{ class anon\$X extends AnyRef{ def getName() = "aaron" }; new anon\$X } +{ class anon$X extends AnyRef{ def getName() = "aaron" }; new anon$X } ``` -where `anon\$X` is some freshly created name. +where `anon$X` is some freshly created name. ## Blocks @@ -558,55 +563,58 @@ BlockExpr ::= ‘{’ CaseClauses ‘}’ Block ::= BlockStat {semi BlockStat} [ResultExpr] ``` -A _block expression_ `{$s_1$; $\ldots$; $s_n$; $e\,$}` is -constructed from a sequence of block statements $s_1 , \ldots , s_n$ -and a final expression $e$. The statement sequence may not contain +A _block expression_ `{´s_1´; ´\ldots´; ´s_n´; ´e\,´}` is +constructed from a sequence of block statements ´s_1 , \ldots , s_n´ +and a final expression ´e´. The statement sequence may not contain two definitions or declarations that bind the same name in the same namespace. The final expression can be omitted, in which case the unit value `()` is assumed. -The expected type of the final expression $e$ is the expected +The expected type of the final expression ´e´ is the expected type of the block. The expected type of all preceding statements is undefined. -The type of a block `$s_1$; $\ldots$; $s_n$; $e$` is -`$T$ forSome {$\,Q\,$}`, where $T$ is the type of $e$ and $Q$ +The type of a block `´s_1´; ´\ldots´; ´s_n´; ´e´` is +`´T´ forSome {´\,Q\,´}`, where ´T´ is the type of ´e´ and ´Q´ contains [existential clauses](03-types.html#existential-types) -for every value or type name which is free in $T$ -and which is defined locally in one of the statements $s_1 , \ldots , s_n$. +for every value or type name which is free in ´T´ +and which is defined locally in one of the statements ´s_1 , \ldots , s_n´. We say the existential clause _binds_ the occurrence of the value or type name. Specifically, -- A locally defined type definition `type$\;t = T$` - is bound by the existential clause `type$\;t >: T <: T$`. - It is an error if $t$ carries type parameters. -- A locally defined value definition `val$\;x: T = e$` is - bound by the existential clause `val$\;x: T$`. -- A locally defined class definition `class$\;c$ extends$\;t$` - is bound by the existential clause `type$\;c <: T$` where - $T$ is the least class type or refinement type which is a proper - supertype of the type $c$. It is an error if $c$ carries type parameters. -- A locally defined object definition `object$\;x\;$extends$\;t$` - is bound by the existential clause `val$\;x: T$` where - $T$ is the least class type or refinement type which is a proper supertype of the type - `$x$.type`. +- A locally defined type definition `type´\;t = T´` + is bound by the existential clause `type´\;t >: T <: T´`. + It is an error if ´t´ carries type parameters. +- A locally defined value definition `val´\;x: T = e´` is + bound by the existential clause `val´\;x: T´`. +- A locally defined class definition `class´\;c´ extends´\;t´` + is bound by the existential clause `type´\;c <: T´` where + ´T´ is the least class type or refinement type which is a proper + supertype of the type ´c´. It is an error if ´c´ carries type parameters. +- A locally defined object definition `object´\;x\;´extends´\;t´` + is bound by the existential clause `val´\;x: T´` where + ´T´ is the least class type or refinement type which is a proper supertype of the type + `´x´.type`. Evaluation of the block entails evaluation of its statement sequence, followed by an evaluation of the final expression -$e$, which defines the result of the block. +´e´, which defines the result of the block. + +A block expression `{´c_1´; ´\ldots´; ´c_n´}` where ´s_1 , \ldots , s_n´ are +case clauses forms a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions). ###### Example Assuming a class `Ref[T](x: T)`, the block ```scala -{ class C extends B {$\ldots$} ; new Ref(new C) } +{ class C extends B {´\ldots´} ; new Ref(new C) } ``` has the type `Ref[_1] forSome { type _1 <: B }`. The block ```scala -{ class C extends B {$\ldots$} ; new C } +{ class C extends B {´\ldots´} ; new C } ``` simply has type `B`, because with the rules [here](03-types.html#simplification-rules) @@ -626,11 +634,12 @@ Expressions can be constructed from operands and operators. ### Prefix Operations -A prefix operation $\mathit{op};e$ consists of a prefix operator $\mathit{op}$, which -must be one of the identifiers ‘`+`’, ‘`-`’, -‘`!`’ or ‘`~`’. The expression $\mathit{op};e$ is +A prefix operation ´\mathit{op};e´ consists of a prefix operator ´\mathit{op}´, which +must be one of the identifiers ‘`+`’, ‘`-`’, ‘`!`’ or ‘`~`’, +which must not be enclosed in backquotes. +The expression ´\mathit{op};e´ is equivalent to the postfix method application -`e.unary_$\mathit{op}$`. +`e.unary_´\mathit{op}´`. @@ -644,7 +653,7 @@ application of the infix operator `sin` to the operands ### Postfix Operations A postfix operator can be an arbitrary identifier. The postfix -operation $e;\mathit{op}$ is interpreted as $e.\mathit{op}$. +operation ´e;\mathit{op}´ is interpreted as ´e.\mathit{op}´. ### Infix Operations @@ -656,7 +665,7 @@ character. Characters are listed below in increasing order of precedence, with characters on the same line having the same precedence. ```scala -(all letters) +(all letters, as defined in [chapter 1](01-lexical-syntax.html), including `_` and `$`) | ^ & @@ -665,7 +674,7 @@ precedence, with characters on the same line having the same precedence. : + - * / % -(all other special characters) +(other operator characters, as defined in [chapter 1](01-lexical-syntax.html), including Unicode categories `Sm` and `So`) ``` That is, operators starting with a letter have lowest precedence, @@ -688,35 +697,34 @@ parts of an expression as follows. expression, then operators with higher precedence bind more closely than operators with lower precedence. - If there are consecutive infix - operations $e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 \ldots \mathit{op}\_n; e_n$ - with operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ of the same precedence, + operations ´e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 \ldots \mathit{op}\_n; e_n´ + with operators ´\mathit{op}\_1 , \ldots , \mathit{op}\_n´ of the same precedence, then all these operators must have the same associativity. If all operators are left-associative, the sequence is interpreted as - $(\ldots(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2\ldots);\mathit{op}\_n;e_n$. + ´(\ldots(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2\ldots);\mathit{op}\_n;e_n´. Otherwise, if all operators are right-associative, the sequence is interpreted as - $e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(\ldots \mathit{op}\_n;e_n)\ldots)$. + ´e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(\ldots \mathit{op}\_n;e_n)\ldots)´. - Postfix operators always have lower precedence than infix - operators. E.g. $e_1;\mathit{op}\_1;e_2;\mathit{op}\_2$ is always equivalent to - $(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2$. + operators. E.g. ´e_1;\mathit{op}\_1;e_2;\mathit{op}\_2´ is always equivalent to + ´(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2´. The right-hand operand of a left-associative operator may consist of -several arguments enclosed in parentheses, e.g. $e;\mathit{op};(e_1,\ldots,e_n)$. -This expression is then interpreted as $e.\mathit{op}(e_1,\ldots,e_n)$. +several arguments enclosed in parentheses, e.g. ´e;\mathit{op};(e_1,\ldots,e_n)´. +This expression is then interpreted as ´e.\mathit{op}(e_1,\ldots,e_n)´. A left-associative binary -operation $e_1;\mathit{op};e_2$ is interpreted as $e_1.\mathit{op}(e_2)$. If $\mathit{op}$ is -right-associative, the same operation is interpreted as -`{ val $x$=$e_1$; $e_2$.$\mathit{op}$($x\,$) }`, where $x$ is a fresh -name. +operation ´e_1;\mathit{op};e_2´ is interpreted as ´e_1.\mathit{op}(e_2)´. If ´\mathit{op}´ is +right-associative and its parameter is passed by name, the same operation is interpreted as +´e_2.\mathit{op}(e_1)´. If ´\mathit{op}´ is right-associative and its parameter is passed by value, +it is interpreted as `{ val ´x´=´e_1´; ´e_2´.´\mathit{op}´(´x\,´) }`, where ´x´ is a fresh name. ### Assignment Operators An _assignment operator_ is an operator symbol (syntax category `op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character -“`=`”, with the exception of operators for which one of -the following conditions holds: +“`=`”, with the following exceptions: 1. the operator also starts with an equals character, or 1. the operator is one of `(<=)`, `(>=)`, `(!=)`. @@ -725,24 +733,24 @@ Assignment operators are treated specially in that they can be expanded to assignments if no other interpretation is valid. Let's consider an assignment operator such as `+=` in an infix -operation `$l$ += $r$`, where $l$, $r$ are expressions. +operation `´l´ += ´r´`, where ´l´, ´r´ are expressions. This operation can be re-interpreted as an operation which corresponds to the assignment ```scala -$l$ = $l$ + $r$ +´l´ = ´l´ + ´r´ ``` -except that the operation's left-hand-side $l$ is evaluated only once. +except that the operation's left-hand-side ´l´ is evaluated only once. The re-interpretation occurs if the following two conditions are fulfilled. -1. The left-hand-side $l$ does not have a member named +1. The left-hand-side ´l´ does not have a member named `+=`, and also cannot be converted by an [implicit conversion](#implicit-conversions) to a value with a member named `+=`. -1. The assignment `$l$ = $l$ + $r$` is type-correct. - In particular this implies that $l$ refers to a variable or object +1. The assignment `´l´ = ´l´ + ´r´` is type-correct. + In particular this implies that ´l´ refers to a variable or object that can be assigned to, and that is convertible to a value with a member named `+`. @@ -752,9 +760,9 @@ The re-interpretation occurs if the following two conditions are fulfilled. Expr1 ::= PostfixExpr ‘:’ CompoundType ``` -The _typed expression_ $e: T$ has type $T$. The type of -expression $e$ is expected to conform to $T$. The result of -the expression is the value of $e$ converted to type $T$. +The _typed expression_ ´e: T´ has type ´T´. The type of +expression ´e´ is expected to conform to ´T´. The result of +the expression is the value of ´e´ converted to type ´T´. ###### Example Here are examples of well-typed and ill-typed expressions. @@ -771,33 +779,37 @@ Here are examples of well-typed and ill-typed expressions. Expr1 ::= PostfixExpr ‘:’ Annotation {Annotation} ``` -An _annotated expression_ `$e$: @$a_1$ $\ldots$ @$a_n$` -attaches [annotations](11-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the -expression $e$. +An _annotated expression_ `´e´: @´a_1´ ´\ldots´ @´a_n´` +attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1 , \ldots , a_n´ to the +expression ´e´. ## Assignments ```ebnf Expr1 ::= [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr | SimpleExpr1 ArgumentExprs ‘=’ Expr ``` -The interpretation of an assignment to a simple variable `$x$ = $e$` -depends on the definition of $x$. If $x$ denotes a mutable -variable, then the assignment changes the current value of $x$ to be -the result of evaluating the expression $e$. The type of $e$ is -expected to conform to the type of $x$. If $x$ is a parameterless +The interpretation of an assignment to a simple variable `´x´ = ´e´` +depends on the definition of ´x´. If ´x´ denotes a mutable +variable, then the assignment changes the current value of ´x´ to be +the result of evaluating the expression ´e´. The type of ´e´ is +expected to conform to the type of ´x´. If ´x´ is a parameterless method defined in some template, and the same template contains a -setter method `$x$_=` as member, then the assignment -`$x$ = $e$` is interpreted as the invocation -`$x$_=($e\,$)` of that setter method. Analogously, an -assignment `$f.x$ = $e$` to a parameterless method $x$ -is interpreted as the invocation `$f.x$_=($e\,$)`. - -An assignment `$f$($\mathit{args}\,$) = $e$` with a method application to the +setter method `´x´_=` as member, then the assignment +`´x´ = ´e´` is interpreted as the invocation +`´x´_=(´e\,´)` of that setter method. Analogously, an +assignment `´f.x´ = ´e´` to a parameterless method ´x´ +is interpreted as the invocation `´f.x´_=(´e\,´)`. +If ´x´ is an application of a unary operator, then the expression +is interpreted as though it were written as the explicit application +`´x´.unary_´\mathit{op}´`, namely, as `´x´.unary_´\mathit{op}´_=(´e\,´)`. + +An assignment `´f´(´\mathit{args}\,´) = ´e´` with a method application to the left of the ‘`=`’ operator is interpreted as -`$f.$update($\mathit{args}$, $e\,$)`, i.e. -the invocation of an `update` method defined by $f$. +`´f.´update(´\mathit{args}´, ´e\,´)`, i.e. +the invocation of an `update` method defined by ´f´. ###### Example Here are some assignment expressions and their equivalent expansions. @@ -866,25 +878,25 @@ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] ``` -The _conditional expression_ `if ($e_1$) $e_2$ else $e_3$` chooses -one of the values of $e_2$ and $e_3$, depending on the -value of $e_1$. The condition $e_1$ is expected to -conform to type `Boolean`. The then-part $e_2$ and the -else-part $e_3$ are both expected to conform to the expected +The _conditional expression_ `if (´e_1´) ´e_2´ else ´e_3´` chooses +one of the values of ´e_2´ and ´e_3´, depending on the +value of ´e_1´. The condition ´e_1´ is expected to +conform to type `Boolean`. The then-part ´e_2´ and the +else-part ´e_3´ are both expected to conform to the expected type of the conditional expression. The type of the conditional expression is the [weak least upper bound](03-types.html#weak-conformance) -of the types of $e_2$ and -$e_3$. A semicolon preceding the `else` symbol of a +of the types of ´e_2´ and +´e_3´. A semicolon preceding the `else` symbol of a conditional expression is ignored. The conditional expression is evaluated by evaluating first -$e_1$. If this evaluates to `true`, the result of -evaluating $e_2$ is returned, otherwise the result of -evaluating $e_3$ is returned. +´e_1´. If this evaluates to `true`, the result of +evaluating ´e_2´ is returned, otherwise the result of +evaluating ´e_3´ is returned. A short form of the conditional expression eliminates the -else-part. The conditional expression `if ($e_1$) $e_2$` is -evaluated as if it was `if ($e_1$) $e_2$ else ()`. +else-part. The conditional expression `if (´e_1´) ´e_2´` is +evaluated as if it was `if (´e_1´) ´e_2´ else ()`. ## While Loop Expressions @@ -892,8 +904,8 @@ evaluated as if it was `if ($e_1$) $e_2$ else ()`. Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr ``` -The _while loop expression_ `while ($e_1$) $e_2$` is typed and -evaluated as if it was an application of `whileLoop ($e_1$) ($e_2$)` where +The _while loop expression_ `while (´e_1´) ´e_2´` is typed and +evaluated as if it was an application of `whileLoop (´e_1´) (´e_2´)` where the hypothetical method `whileLoop` is defined as follows. ```scala @@ -907,8 +919,8 @@ def whileLoop(cond: => Boolean)(body: => Unit): Unit = Expr1 ::= ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’ ``` -The _do loop expression_ `do $e_1$ while ($e_2$)` is typed and -evaluated as if it was the expression `($e_1$ ; while ($e_2$) $e_1$)`. +The _do loop expression_ `do ´e_1´ while (´e_2´)` is typed and +evaluated as if it was the expression `(´e_1´ ; while (´e_2´) ´e_1´)`. A semicolon preceding the `while` symbol of a do loop expression is ignored. ## For Comprehensions and For Loops @@ -921,91 +933,91 @@ Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pat Guard ::= ‘if’ PostfixExpr ``` -A _for loop_ `for ($\mathit{enums}\,$) $e$` executes expression $e$ -for each binding generated by the enumerators $\mathit{enums}$. -A _for comprehension_ `for ($\mathit{enums}\,$) yield $e$` evaluates -expression $e$ for each binding generated by the enumerators $\mathit{enums}$ +A _for loop_ `for (´\mathit{enums}\,´) ´e´` executes expression ´e´ +for each binding generated by the enumerators ´\mathit{enums}´. +A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates +expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value definitions, or guards. -A _generator_ `$p$ <- $e$` produces bindings from an expression $e$ which is -matched in some way against pattern $p$. Optionally, `case` can appear in front +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is +matched in some way against pattern ´p´. Optionally, `case` can appear in front of a generator pattern, this has no meaning in Scala 2 but will be [required in Scala 3 if `p` is not irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). -A _value definition_ `$p$ = $e$` -binds the value name $p$ (or several names in a pattern $p$) to -the result of evaluating the expression $e$. A _guard_ -`if $e$` contains a boolean expression which restricts +A _value definition_ `´p´ = ´e´` +binds the value name ´p´ (or several names in a pattern ´p´) to +the result of evaluating the expression ´e´. A _guard_ +`if ´e´` contains a boolean expression which restricts enumerated bindings. The precise meaning of generators and guards is defined by translation to invocations of four methods: `map`, `withFilter`, `flatMap`, and `foreach`. These methods can be implemented in different ways for different carrier types. The translation scheme is as follows. In a first step, every -generator `$p$ <- $e$`, where $p$ is not [irrefutable](08-pattern-matching.html#patterns) -for the type of $e$ is replaced by +generator `´p´ <- ´e´`, where ´p´ is not [irrefutable](08-pattern-matching.html#patterns) +for the type of ´e´ is replaced by ```scala -$p$ <- $e$.withFilter { case $p$ => true; case _ => false } +´p´ <- ´e´.withFilter { case ´p´ => true; case _ => false } ``` Then, the following rules are applied repeatedly until all comprehensions have been eliminated. - A for comprehension - `for ($p$ <- $e\,$) yield $e'$` + `for (´p´ <- ´e\,´) yield ´e'´` is translated to - `$e$.map { case $p$ => $e'$ }`. + `´e´.map { case ´p´ => ´e'´ }`. - A for loop - `for ($p$ <- $e\,$) $e'$` + `for (´p´ <- ´e\,´) ´e'´` is translated to - `$e$.foreach { case $p$ => $e'$ }`. + `´e´.foreach { case ´p´ => ´e'´ }`. - A for comprehension ```scala - for ($p$ <- $e$; $p'$ <- $e'; \ldots$) yield $e''$ + for (´p´ <- ´e´; ´p'´ <- ´e'; \ldots´) yield ´e''´ ``` - where `$\ldots$` is a (possibly empty) + where `´\ldots´` is a (possibly empty) sequence of generators, definitions, or guards, is translated to ```scala - $e$.flatMap { case $p$ => for ($p'$ <- $e'; \ldots$) yield $e''$ } + ´e´.flatMap { case ´p´ => for (´p'´ <- ´e'; \ldots´) yield ´e''´ } ``` - A for loop ```scala - for ($p$ <- $e$; $p'$ <- $e'; \ldots$) $e''$ + for (´p´ <- ´e´; ´p'´ <- ´e'; \ldots´) ´e''´ ``` - where `$\ldots$` is a (possibly empty) + where `´\ldots´` is a (possibly empty) sequence of generators, definitions, or guards, is translated to ```scala - $e$.foreach { case $p$ => for ($p'$ <- $e'; \ldots$) $e''$ } + ´e´.foreach { case ´p´ => for (´p'´ <- ´e'; \ldots´) ´e''´ } ``` - - A generator `$p$ <- $e$` followed by a guard - `if $g$` is translated to a single generator - `$p$ <- $e$.withFilter(($x_1 , \ldots , x_n$) => $g\,$)` where - $x_1 , \ldots , x_n$ are the free variables of $p$. + - A generator `´p´ <- ´e´` followed by a guard + `if ´g´` is translated to a single generator + `´p´ <- ´e´.withFilter((´x_1 , \ldots , x_n´) => ´g\,´)` where + ´x_1 , \ldots , x_n´ are the free variables of ´p´. - - A generator `$p$ <- $e$` followed by a value definition - `$p'$ = $e'$` is translated to the following generator of pairs of values, where - $x$ and $x'$ are fresh names: + - A generator `´p´ <- ´e´` followed by a value definition + `´p'´ = ´e'´` is translated to the following generator of pairs of values, where + ´x´ and ´x'´ are fresh names: ```scala - ($p$, $p'$) <- for ($x @ p$ <- $e$) yield { val $x' @ p'$ = $e'$; ($x$, $x'$) } + (´p´, ´p'´) <- for (´x @ p´ <- ´e´) yield { val ´x' @ p'´ = ´e'´; (´x´, ´x'´) } ``` ###### Example -The following code produces all pairs of numbers between $1$ and $n-1$ +The following code produces all pairs of numbers between ´1´ and ´n-1´ whose sums are prime. ```scala @@ -1071,31 +1083,31 @@ The code above makes use of the fact that `map`, `flatMap`, Expr1 ::= ‘return’ [Expr] ``` -A _return expression_ `return $e$` must occur inside the body of some +A _return expression_ `return ´e´` must occur inside the body of some enclosing user defined method. The innermost enclosing method in a -source program, $m$, must have an explicitly declared result type, and -the type of $e$ must conform to it. +source program, ´m´, must have an explicitly declared result type, and +the type of ´e´ must conform to it. -The return expression evaluates the expression $e$ and returns its -value as the result of $m$. The evaluation of any statements or +The return expression evaluates the expression ´e´ and returns its +value as the result of ´m´. The evaluation of any statements or expressions following the return expression is omitted. The type of a return expression is `scala.Nothing`. -The expression $e$ may be omitted. The return expression +The expression ´e´ may be omitted. The return expression `return` is type-checked and evaluated as if it were `return ()`. Returning from the method from within a nested function may be implemented by throwing and catching a -`scala.runtime.NonLocalReturnException`. Any exception catches +`scala.runtime.NonLocalReturnControl`. Any exception catches between the point of return and the enclosing methods might see and catch that exception. A key comparison makes sure that this exception is only caught by the method instance which is terminated by the return. If the return expression is itself part of an anonymous function, it -is possible that the enclosing method $m$ has already returned +is possible that the enclosing method ´m´ has already returned before the return expression is executed. In that case, the thrown -`scala.runtime.NonLocalReturnException` will not be caught, and will +`scala.runtime.NonLocalReturnControl` will not be caught, and will propagate up the call stack. ## Throw Expressions @@ -1104,10 +1116,10 @@ propagate up the call stack. Expr1 ::= ‘throw’ Expr ``` -A _throw expression_ `throw $e$` evaluates the expression -$e$. The type of this expression must conform to -`Throwable`. If $e$ evaluates to an exception -reference, evaluation is aborted with the thrown exception. If $e$ +A _throw expression_ `throw ´e´` evaluates the expression +´e´. The type of this expression must conform to +`Throwable`. If ´e´ evaluates to an exception +reference, evaluation is aborted with the thrown exception. If ´e´ evaluates to `null`, evaluation is instead aborted with a `NullPointerException`. If there is an active [`try` expression](#try-expressions) which handles the thrown @@ -1118,88 +1130,88 @@ is `scala.Nothing`. ## Try Expressions ```ebnf -Expr1 ::= ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’] - [‘finally’ Expr] +Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] ``` -A _try expression_ is of the form `try { $b$ } catch $h$` -where the handler $h$ is a +A _try expression_ is of the form `try { ´b´ } catch ´h´` +where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) ```scala -{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +{ case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } ``` This expression is evaluated by evaluating the block -$b$. If evaluation of $b$ does not cause an exception to be -thrown, the result of $b$ is returned. Otherwise the -handler $h$ is applied to the thrown exception. +´b´. If evaluation of ´b´ does not cause an exception to be +thrown, the result of ´b´ is returned. Otherwise the +handler ´h´ is applied to the thrown exception. If the handler contains a case matching the thrown exception, the first such case is invoked. If the handler contains no case matching the thrown exception, the exception is -re-thrown. +re-thrown. More generally, if the handler is a `PartialFunction`, +it is applied only if it is defined at the given exception. -Let $\mathit{pt}$ be the expected type of the try expression. The block -$b$ is expected to conform to $\mathit{pt}$. The handler $h$ -is expected conform to type `scala.PartialFunction[scala.Throwable, $\mathit{pt}\,$]`. +Let ´\mathit{pt}´ be the expected type of the try expression. The block +´b´ is expected to conform to ´\mathit{pt}´. The handler ´h´ +is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`. The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) -of the type of $b$ and the result type of $h$. +of the type of ´b´ and the result type of ´h´. -A try expression `try { $b$ } finally $e$` evaluates the block -$b$. If evaluation of $b$ does not cause an exception to be -thrown, the expression $e$ is evaluated. If an exception is thrown -during evaluation of $e$, the evaluation of the try expression is +A try expression `try { ´b´ } finally ´e´` evaluates the block +´b´. If evaluation of ´b´ does not cause an exception to be +thrown, the expression ´e´ is evaluated. If an exception is thrown +during evaluation of ´e´, the evaluation of the try expression is aborted with the thrown exception. If no exception is thrown during -evaluation of $e$, the result of $b$ is returned as the +evaluation of ´e´, the result of ´b´ is returned as the result of the try expression. -If an exception is thrown during evaluation of $b$, the finally block -$e$ is also evaluated. If another exception $e$ is thrown -during evaluation of $e$, evaluation of the try expression is +If an exception is thrown during evaluation of ´b´, the finally block +´e´ is also evaluated. If another exception ´e´ is thrown +during evaluation of ´e´, evaluation of the try expression is aborted with the thrown exception. If no exception is thrown during -evaluation of $e$, the original exception thrown in $b$ is -re-thrown once evaluation of $e$ has completed. The block -$b$ is expected to conform to the expected type of the try -expression. The finally expression $e$ is expected to conform to +evaluation of ´e´, the original exception thrown in ´b´ is +re-thrown once evaluation of ´e´ has completed. The block +´b´ is expected to conform to the expected type of the try +expression. The finally expression ´e´ is expected to conform to type `Unit`. -A try expression `try { $b$ } catch $e_1$ finally $e_2$` +A try expression `try { ´b´ } catch ´e_1´ finally ´e_2´` is a shorthand -for `try { try { $b$ } catch $e_1$ } finally $e_2$`. +for `try { try { ´b´ } catch ´e_1´ } finally ´e_2´`. ## Anonymous Functions ```ebnf -Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr -ResultExpr ::= (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block +Expr ::= (Bindings | [‘implicit’] (id | ‘_’)) ‘=>’ Expr +ResultExpr ::= (Bindings | [‘implicit’] (id | ‘_’) [‘:’ CompoundType]) ‘=>’ Block Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ Binding ::= (id | ‘_’) [‘:’ Type] ``` -The anonymous function of arity $n$, `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e` maps parameters $x_i$ of types $T_i$ to a result given by expression $e$. The scope of each formal parameter $x_i$ is $e$. Formal parameters must have pairwise distinct names. +The anonymous function of arity ´n´, `(´x_1´: ´T_1 , \ldots , x_n´: ´T_n´) => e` maps parameters ´x_i´ of types ´T_i´ to a result given by expression ´e´. The scope of each formal parameter ´x_i´ is ´e´. Formal parameters must have pairwise distinct names. -In the case of a single untyped formal parameter, `($x\,$) => $e$` can be abbreviated to `$x$ => $e$`. If an anonymous function `($x$: $T\,$) => $e$` with a single typed parameter appears as the result expression of a block, it can be abbreviated to `$x$: $T$ => e`. +In the case of a single untyped formal parameter, `(´x\,´) => ´e´` can be abbreviated to `´x´ => ´e´`. If an anonymous function `(´x´: ´T\,´) => ´e´` with a single typed parameter appears as the result expression of a block, it can be abbreviated to `´x´: ´T´ => e`. A formal parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. A named parameter of an anonymous function may be optionally preceded by an `implicit` modifier. In that case the parameter is labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the parameter section itself does not count as an [implicit parameter section](07-implicits.html#implicit-parameters). Hence, arguments to anonymous functions always have to be given explicitly. ### Translation -If the expected type of the anonymous function is of the shape `scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]`, or can be [SAM-converted](#sam-conversion) to such a function type, the type `$T_i$` of a parameter `$x_i$` can be omitted, as far as `$S_i$` is defined in the expected type, and `$T_i$ = $S_i$` is assumed. Furthermore, the expected type when type checking $e$ is $R$. +If the expected type of the anonymous function is of the shape `scala.Function´n´[´S_1 , \ldots , S_n´, ´R\,´]`, or can be [SAM-converted](#sam-conversion) to such a function type, the type `´T_i´` of a parameter `´x_i´` can be omitted, as far as `´S_i´` is defined in the expected type, and `´T_i´ = ´S_i´` is assumed. Furthermore, the expected type when type checking ´e´ is ´R´. -If there is no expected type for the function literal, all formal parameter types `$T_i$` must be specified explicitly, and the expected type of $e$ is undefined. The type of the anonymous function is `scala.Function$n$[$T_1 , \ldots , T_n$, $R\,$]`, where $R$ is the [packed type](#expression-typing) of $e$. $R$ must be equivalent to a type which does not refer to any of the formal parameters $x_i$. +If there is no expected type for the function literal, all formal parameter types `´T_i´` must be specified explicitly, and the expected type of ´e´ is undefined. The type of the anonymous function is `scala.Function´n´[´T_1 , \ldots , T_n´, ´R\,´]`, where ´R´ is the [packed type](#expression-typing) of ´e´. ´R´ must be equivalent to a type which does not refer to any of the formal parameters ´x_i´. The eventual run-time value of an anonymous function is determined by the expected type: - - a subclass of one of the builtin function types, `scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]` (with $S_i$ and $R$ fully defined), + - a subclass of one of the builtin function types, `scala.Function´n´[´S_1 , \ldots , S_n´, ´R\,´]` (with ´S_i´ and ´R´ fully defined), - a [single-abstract-method (SAM) type](#sam-conversion); - - `PartialFunction[$T$, $U$]`, if the function literal is of the shape `x => x match { $\ldots$ }` + - `PartialFunction[´T´, ´U´]` - some other type. The standard anonymous function evaluates in the same way as the following instance creation expression: ```scala -new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] { - def apply($x_1$: $T_1 , \ldots , x_n$: $T_n$): $T$ = $e$ +new scala.Function´n´[´T_1 , \ldots , T_n´, ´T´] { + def apply(´x_1´: ´T_1 , \ldots , x_n´: ´T_n´): ´T´ = ´e´ } ``` @@ -1207,7 +1219,15 @@ The same evaluation holds for a SAM type, except that the instantiated type is g The underlying platform may provide more efficient ways of constructing these instances, such as Java 8's `invokedynamic` bytecode and `LambdaMetaFactory` class. -A `PartialFunction`'s value receives an additional `isDefinedAt` member, which is derived from the pattern match in the function literal, with each case's body being replaced by `true`, and an added default (if none was given) that evaluates to `false`. +When a `PartialFunction` is required, an additional member `isDefinedAt` +is synthesized, which simply returns `true`. +However, if the function literal has the shape `x => x match { $\ldots$ }`, +then `isDefinedAt` is derived from the pattern match in the following way: +each case from the match expression evaluates to `true`, +and if there is no default case, +a default case is added that evaluates to `false`. +For more details on how that is implemented see +["Pattern Matching Anonymous Functions"](08-pattern-matching.html#pattern-matching-anonymous-functions). ###### Example Examples of anonymous functions: @@ -1220,7 +1240,7 @@ f => g => x => f(g(x)) // Curried function composition (x: Int,y: Int) => x + y // A summation function () => { count += 1; count } // The function which takes an - // empty parameter list $()$, + // empty parameter list ´()´, // increments a non-local variable // `count' and returns the new value. @@ -1240,19 +1260,19 @@ are legal. Such an expression represents an anonymous function where subsequent occurrences of underscores denote successive parameters. Define an _underscore section_ to be an expression of the form -`_:$T$` where $T$ is a type, or else of the form `_`, +`_:´T´` where ´T´ is a type, or else of the form `_`, provided the underscore does not appear as the expression part of a -type ascription `_:$T$`. +type ascription `_:´T´`. -An expression $e$ of syntactic category `Expr` _binds_ an underscore section -$u$, if the following two conditions hold: (1) $e$ properly contains $u$, and +An expression ´e´ of syntactic category `Expr` _binds_ an underscore section +´u´, if the following two conditions hold: (1) ´e´ properly contains ´u´, and (2) there is no other expression of syntactic category `Expr` -which is properly contained in $e$ and which itself properly contains $u$. +which is properly contained in ´e´ and which itself properly contains ´u´. -If an expression $e$ binds underscore sections $u_1 , \ldots , u_n$, in this order, it is equivalent to -the anonymous function `($u'_1$, ... $u'_n$) => $e'$` -where each $u_i'$ results from $u_i$ by replacing the underscore with a fresh identifier and -$e'$ results from $e$ by replacing each underscore section $u_i$ by $u_i'$. +If an expression ´e´ binds underscore sections ´u_1 , \ldots , u_n´, in this order, it is equivalent to +the anonymous function `(´u'_1´, ... ´u'_n´) => ´e'´` +where each ´u_i'´ results from ´u_i´ by replacing the underscore with a fresh identifier and +´e'´ results from ´e´ by replacing each underscore section ´u_i´ by ´u_i'´. ###### Example The anonymous functions in the left column use placeholder @@ -1277,15 +1297,15 @@ include at least the expressions of the following forms: - A string literal - A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object) - An element of an enumeration from the underlying platform -- A literal array, of the form `Array$(c_1 , \ldots , c_n)$`, - where all of the $c_i$'s are themselves constant expressions +- A literal array, of the form `Array´(c_1 , \ldots , c_n)´`, + where all of the ´c_i´'s are themselves constant expressions - An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). ## Statements ```ebnf BlockStat ::= Import - | {Annotation} [‘implicit’ | ‘lazy’] Def + | {Annotation} [‘implicit’] [‘lazy’] Def | {Annotation} {LocalModifier} TmplDef | Expr1 | @@ -1300,8 +1320,8 @@ Statements occur as parts of blocks and templates. A _statement_ can be an import, a definition or an expression, or it can be empty. Statements used in the template of a class definition can also be declarations. An expression that is used as a statement can have an -arbitrary value type. An expression statement $e$ is evaluated by -evaluating $e$ and discarding the result of the evaluation. +arbitrary value type. An expression statement ´e´ is evaluated by +evaluating ´e´ and discarding the result of the evaluation. @@ -1323,8 +1343,8 @@ available implicit conversions are given in the next two sub-sections. ### Value Conversions The following seven implicit conversions can be applied to an -expression $e$ which has some value type $T$ and which is type-checked with -some expected type $\mathit{pt}$. +expression ´e´ which has some value type ´T´ and which is type-checked with +some expected type ´\mathit{pt}´. ###### Static Overloading Resolution If an expression denotes several possible members of a class, @@ -1332,36 +1352,41 @@ If an expression denotes several possible members of a class, is applied to pick a unique member. ###### Type Instantiation -An expression $e$ of polymorphic type +An expression ´e´ of polymorphic type ```scala -[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ +[´a_1´ >: ´L_1´ <: ´U_1 , \ldots , a_n´ >: ´L_n´ <: ´U_n´]´T´ ``` which does not appear as the function part of -a type application is converted to a type instance of $T$ +a type application is converted to a type instance of ´T´ by determining with [local type inference](#local-type-inference) -instance types `$T_1 , \ldots , T_n$` -for the type variables `$a_1 , \ldots , a_n$` and -implicitly embedding $e$ in the [type application](#type-applications) -`$e$[$T_1 , \ldots , T_n$]`. +instance types `´T_1 , \ldots , T_n´` +for the type variables `´a_1 , \ldots , a_n´` and +implicitly embedding ´e´ in the [type application](#type-applications) +`´e´[´T_1 , \ldots , T_n´]`. ###### Numeric Widening -If $e$ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) +If ´e´ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) to the expected type, it is widened to the expected type using one of the numeric conversion methods `toShort`, `toChar`, `toInt`, `toLong`, -`toFloat`, `toDouble` defined [here](12-the-scala-standard-library.html#numeric-value-types). +`toFloat`, `toDouble` defined [in the standard library](12-the-scala-standard-library.html#numeric-value-types). + +Since conversions from `Int` to `Float` and from `Long` to `Float` or `Double` +may incur a loss of precision, those implicit conversions are deprecated. +The conversion is permitted for literals if the original value can be recovered, +that is, if conversion back to the original type produces the original value. ###### Numeric Literal Narrowing If the expected type is `Byte`, `Short` or `Char`, and -the expression $e$ is an integer literal fitting in the range of that +the expression ´e´ is an integer literal fitting in the range of that type, it is converted to the same literal in that type. ###### Value Discarding -If $e$ has some value type and the expected type is `Unit`, -$e$ is converted to the expected type by embedding it in the -term `{ $e$; () }`. +If ´e´ has some value type and the expected type is `Unit`, +´e´ is converted to the expected type by embedding it in the +term `{ ´e´; () }`. ###### SAM conversion An expression `(p1, ..., pN) => body` of function type `(T1, ..., TN) => T` is sam-convertible to the expected type `S` if the following holds: @@ -1382,19 +1407,19 @@ It follows that: - it must be possible to derive a fully-defined type `U` from `S` by inferring any unknown type parameters of `C`. Finally, we impose some implementation restrictions (these may be lifted in future releases): - - `C` must not be nested or local (it must not capture its environment, as that results in a zero-argument constructor) + - `C` must not be nested or local (it must not capture its environment, as that results in a nonzero-argument constructor) - `C`'s constructor must not have an implicit argument list (this simplifies type inference); - `C` must not declare a self type (this simplifies type inference); - `C` must not be `@specialized`. ###### View Application -If none of the previous conversions applies, and $e$'s type -does not conform to the expected type $\mathit{pt}$, it is attempted to convert -$e$ to the expected type with a [view](07-implicits.html#views). +If none of the previous conversions applies, and ´e´'s type +does not conform to the expected type ´\mathit{pt}´, it is attempted to convert +´e´ to the expected type with a [view](07-implicits.html#views). ###### Selection on `Dynamic` -If none of the previous conversions applies, and $e$ is a prefix -of a selection $e.x$, and $e$'s type conforms to class `scala.Dynamic`, +If none of the previous conversions applies, and ´e´ is a prefix +of a selection ´e.x´, and ´e´'s type conforms to class `scala.Dynamic`, then the selection is rewritten according to the rules for [dynamic member selection](#dynamic-member-selection). @@ -1404,8 +1429,8 @@ The following four implicit conversions can be applied to methods which are not applied to some argument list. ###### Evaluation -A parameterless method $m$ of type `=> $T$` is always converted to -type $T$ by evaluating the expression to which $m$ is bound. +A parameterless method ´m´ of type `=> ´T´` is always converted to +type ´T´ by evaluating the expression to which ´m´ is bound. ###### Implicit Application If the method takes only implicit parameters, implicit @@ -1413,56 +1438,80 @@ arguments are passed following the rules [here](07-implicits.html#implicit-param ###### Eta Expansion Otherwise, if the method is not a constructor, -and the expected type $\mathit{pt}$ is a function type -$(\mathit{Ts}') \Rightarrow T'$, [eta-expansion](#eta-expansion) -is performed on the expression $e$. +and the expected type ´\mathit{pt}´ is a function type, or, +for methods of non-zero arity, a type [sam-convertible](#sam-conversion) to a function type, +´(\mathit{Ts}') \Rightarrow T'´, [eta-expansion](#eta-expansion-section) +is performed on the expression ´e´. + +(The exception for zero-arity methods is to avoid surprises due to unexpected sam conversion.) ###### Empty Application -Otherwise, if $e$ has method type $()T$, it is implicitly applied to the empty -argument list, yielding $e()$. +Otherwise, if ´e´ has method type ´()T´, it is implicitly applied to the empty +argument list, yielding ´e()´. ### Overloading Resolution -If an identifier or selection $e$ references several members of a +If an identifier or selection ´e´ references several members of a class, the context of the reference is used to identify a unique -member. The way this is done depends on whether or not $e$ is used as -a function. Let $\mathscr{A}$ be the set of members referenced by $e$. +member. The way this is done depends on whether or not ´e´ is used as +a function. Let ´\mathscr{A}´ be the set of members referenced by ´e´. -Assume first that $e$ appears as a function in an application, as in -`$e$($e_1 , \ldots , e_m$)`. +Assume first that ´e´ appears as a function in an application, as in +`´e´(´e_1 , \ldots , e_m´)`. One first determines the set of functions that is potentially [applicable](#function-applications) based on the _shape_ of the arguments. -The *shape* of an argument expression $e$, written $\mathit{shape}(e)$, is +The *shape* of an argument expression ´e´, written ´\mathit{shape}(e)´, is a type that is defined as follows: - - For a function expression `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$: (Any $, \ldots ,$ Any) => $\mathit{shape}(b)$`, - where `Any` occurs $n$ times in the argument type. - - For a named argument `$n$ = $e$`: $\mathit{shape}(e)$. + - For a function expression `(´p_1´: ´T_1 , \ldots , p_n´: ´T_n´) => ´b´: (Any ´, \ldots ,´ Any) => ´\mathit{shape}(b)´`, + where `Any` occurs ´n´ times in the argument type. + - For a pattern-matching anonymous function definition `{ case ... }`: `PartialFunction[Any, Nothing]`. + - For a named argument `´n´ = ´e´`: ´\mathit{shape}(e)´. - For all other expressions: `Nothing`. -Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are [_applicable_](#function-applications) -to expressions $(e_1 , \ldots , e_n)$ of types $(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$. -If there is precisely one alternative in $\mathscr{B}$, that alternative is chosen. - -Otherwise, let $S_1 , \ldots , S_m$ be the list of types obtained by typing each argument as follows. -An argument `$e_i$` of the shape `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$` where one of the `$T_i$` is missing, -i.e., a function literal with a missing parameter type, is typed with an expected function type that -propagates the least upper bound of the fully defined types of the corresponding parameters of -the ([SAM-converted](#sam-conversion)) function types specified by the `$i$`th argument type found in each alternative. -All other arguments are typed with an undefined expected type. - -For every member $m$ in $\mathscr{B}$ one determines whether it is applicable -to expressions ($e_1 , \ldots , e_m$) of types $S_1, \ldots , S_m$. - -It is an error if none of the members in $\mathscr{B}$ is applicable. If there is one -single applicable alternative, that alternative is chosen. Otherwise, let $\mathscr{CC}$ +Let ´\mathscr{B}´ be the set of alternatives in ´\mathscr{A}´ that are [_applicable_](#function-applications) +to expressions ´(e_1 , \ldots , e_n)´ of types ´(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))´. +If there is precisely one alternative in ´\mathscr{B}´, that alternative is chosen. + +Otherwise, let ´S_1 , \ldots , S_m´ be the list of types obtained by typing each argument as follows. + +Normally, an argument is typed without an expected type, except when +all alternatives explicitly specify the same parameter type for this argument (a missing parameter type, +due to e.g. arity differences, is taken as `NoType`, thus resorting to no expected type), +or when trying to propagate more type information to aid inference of higher-order function parameter types, as explained next. + +The intuition for higher-order function parameter type inference is that all arguments must be of a function-like type +(`PartialFunction`, `FunctionN` or some equivalent [SAM type](#sam-conversion)), +which in turn must define the same set of higher-order argument types, so that they can safely be used as +the expected type of a given argument of the overloaded method, without unduly ruling out any alternatives. +The intent is not to steer overloading resolution, but to preserve enough type information to steer type +inference of the arguments (a function literal or eta-expanded method) to this overloaded method. + +Note that the expected type drives eta-expansion (not performed unless a function-like type is expected), +as well as inference of omitted parameter types of function literals. + +More precisely, an argument `´e_i´` is typed with an expected type that is derived from the `´i´`th argument +type found in each alternative (call these `´T_{ij}´` for alternative `´j´` and argument position `´i´`) when +all `´T_{ij}´` are function types `´(A_{1j},..., A_{nj}) => ?´` (or the equivalent `PartialFunction`, or SAM) +of some arity `´n´`, and their argument types `´A_{kj}´` are identical across all overloads `´j´` for a +given `´k´`. Then, the expected type for `´e_i´` is derived as follows: + - we use `´PartialFunction[A_{1j},..., A_{nj}, ?]´` if for some overload `´j´`, `´T_{ij}´`'s type symbol is `PartialFunction`; + - else, if for some `´j´`, `´T_{ij}´` is `FunctionN`, the expected type is `´FunctionN[A_{1j},..., A_{nj}, ?]´`; + - else, if for all `´j´`, `´T_{ij}´` is a SAM type of the same class, defining argument types `´A_{1j},..., A_{nj}´` + (and a potentially varying result type), the expected type encodes these argument types and the SAM class. + +For every member ´m´ in ´\mathscr{B}´ one determines whether it is applicable +to expressions (´e_1 , \ldots , e_m´) of types ´S_1, \ldots , S_m´. + +It is an error if none of the members in ´\mathscr{B}´ is applicable. If there is one +single applicable alternative, that alternative is chosen. Otherwise, let ´\mathscr{CC}´ be the set of applicable alternatives which don't employ any default argument -in the application to $e_1 , \ldots , e_m$. +in the application to ´e_1 , \ldots , e_m´. -It is again an error if $\mathscr{CC}$ is empty. +It is again an error if ´\mathscr{CC}´ is empty. Otherwise, one chooses the _most specific_ alternative among the alternatives -in $\mathscr{CC}$, according to the following definition of being "as specific as", and +in ´\mathscr{CC}´, according to the following definition of being "as specific as", and "more specific than": -- A parameterized method $m$ of type `($p_1:T_1, \ldots , p_n:T_n$)$U$` is - _as specific as_ some other member $m'$ of type $S$ if $m'$ is [applicable](#function-applications) - to arguments `($p_1 , \ldots , p_n$)` of types $T_1 , \ldots , T_n$. -- A polymorphic method of type `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is - as specific as some other member of type $S$ if $T$ is as specific as $S$ - under the assumption that for $i = 1 , \ldots , n$ each $a_i$ is an abstract type name - bounded from below by $L_i$ and from above by $U_i$. +- A parameterized method ´m´ of type `(´p_1:T_1, \ldots , p_n:T_n´)´U´` is + _as specific as_ some other member ´m'´ of type ´S´ if ´m'´ is [applicable](#function-applications) + to arguments `(´p_1 , \ldots , p_n´)` of types ´T_1 , \ldots , T_last´; + if ´T_n´ denotes a repeated parameter (it has shape ´T*´), and so does ´m'´'s last parameter, + ´T_last´ is taken as ´T´, otherwise ´T_n´ is used directly. +- A polymorphic method of type `[´a_1´ >: ´L_1´ <: ´U_1 , \ldots , a_n´ >: ´L_n´ <: ´U_n´]´T´` is + as specific as some other member of type ´S´ if ´T´ is as specific as ´S´ + under the assumption that for ´i = 1 , \ldots , n´ each ´a_i´ is an abstract type name + bounded from below by ´L_i´ and from above by ´U_i´. - A member of any other type is always as specific as a parameterized method or a polymorphic method. -- Given two members of types $T$ and $U$ which are neither parameterized nor polymorphic method types, - the member of type $T$ is as specific as the member of type $U$ if - the existential dual of $T$ conforms to the existential dual of $U$. +- Given two members of types ´T´ and ´U´ which are neither parameterized nor polymorphic method types, + the member of type ´T´ is as specific as the member of type ´U´ if + the existential dual of ´T´ conforms to the existential dual of ´U´. Here, the existential dual of a polymorphic type - `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is - `$T$ forSome { type $a_1$ >: $L_1$ <: $U_1$ $, \ldots ,$ type $a_n$ >: $L_n$ <: $U_n$}`. + `[´a_1´ >: ´L_1´ <: ´U_1 , \ldots , a_n´ >: ´L_n´ <: ´U_n´]´T´` is + `´T´ forSome { type ´a_1´ >: ´L_1´ <: ´U_1´ ´, \ldots ,´ type ´a_n´ >: ´L_n´ <: ´U_n´}`. The existential dual of every other type is the type itself. -The _relative weight_ of an alternative $A$ over an alternative $B$ is a +The _relative weight_ of an alternative ´A´ over an alternative ´B´ is a number from 0 to 2, defined as the sum of -- 1 if $A$ is as specific as $B$, 0 otherwise, and -- 1 if $A$ is defined in a class or object which is derived from the class or object defining $B$, 0 otherwise. +- 1 if ´A´ is as specific as ´B´, 0 otherwise, and +- 1 if ´A´ is defined in a class or object which is derived from the class or object defining ´B´, 0 otherwise. -A class or object $C$ is _derived_ from a class or object $D$ if one of +A class or object ´C´ is _derived_ from a class or object ´D´ if one of the following holds: -- $C$ is a subclass of $D$, or -- $C$ is a companion object of a class derived from $D$, or -- $D$ is a companion object of a class from which $C$ is derived. +- ´C´ is a subclass of ´D´, or +- ´C´ is a companion object of a class derived from ´D´, or +- ´D´ is a companion object of a class from which ´C´ is derived. -An alternative $A$ is _more specific_ than an alternative $B$ if -the relative weight of $A$ over $B$ is greater than the relative -weight of $B$ over $A$. +An alternative ´A´ is _more specific_ than an alternative ´B´ if +the relative weight of ´A´ over ´B´ is greater than the relative +weight of ´B´ over ´A´. -It is an error if there is no alternative in $\mathscr{CC}$ which is more -specific than all other alternatives in $\mathscr{CC}$. +It is an error if there is no alternative in ´\mathscr{CC}´ which is more +specific than all other alternatives in ´\mathscr{CC}´. -Assume next that $e$ appears as a function in a type application, as -in `$e$[$\mathit{targs}\,$]`. Then all alternatives in -$\mathscr{A}$ which take the same number of type parameters as there are type -arguments in $\mathit{targs}$ are chosen. It is an error if no such alternative exists. +Assume next that ´e´ appears as a function in a type application, as +in `´e´[´\mathit{targs}\,´]`. Then all alternatives in +´\mathscr{A}´ which take the same number of type parameters as there are type +arguments in ´\mathit{targs}´ are chosen. It is an error if no such alternative exists. If there are several such alternatives, overloading resolution is -applied again to the whole expression `$e$[$\mathit{targs}\,$]`. +applied again to the whole expression `´e´[´\mathit{targs}\,´]`. -Assume finally that $e$ does not appear as a function in either an application or a type application. -If an expected type is given, let $\mathscr{B}$ be the set of those alternatives -in $\mathscr{A}$ which are [compatible](03-types.html#compatibility) to it. -Otherwise, let $\mathscr{B}$ be the same as $\mathscr{A}$. -In this last case we choose the most specific alternative among all alternatives in $\mathscr{B}$. -It is an error if there is no alternative in $\mathscr{B}$ which is -more specific than all other alternatives in $\mathscr{B}$. +Assume finally that ´e´ does not appear as a function in either an application or a type application. +If an expected type is given, let ´\mathscr{B}´ be the set of those alternatives +in ´\mathscr{A}´ which are [compatible](03-types.html#compatibility) to it. +Otherwise, let ´\mathscr{B}´ be the same as ´\mathscr{A}´. +In this last case we choose the most specific alternative among all alternatives in ´\mathscr{B}´. +It is an error if there is no alternative in ´\mathscr{B}´ which is +more specific than all other alternatives in ´\mathscr{B}´. ###### Example Consider the following definitions: ```scala class A extends B {} -def f(x: B, y: B) = $\ldots$ -def f(x: A, y: B) = $\ldots$ +def f(x: B, y: B) = ´\ldots´ +def f(x: A, y: B) = ´\ldots´ val a: A val b: B ``` Then the application `f(b, b)` refers to the first -definition of $f$ whereas the application `f(a, a)` +definition of ´f´ whereas the application `f(a, a)` refers to the second. Assume now we add a third overloaded definition ```scala -def f(x: B, y: A) = $\ldots$ +def f(x: B, y: A) = ´\ldots´ ``` Then the application `f(a, a)` is rejected for being ambiguous, since @@ -1552,88 +1603,92 @@ no most specific applicable signature exists. ### Local Type Inference Local type inference infers type arguments to be passed to expressions -of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ +of polymorphic type. Say ´e´ is of type [´a_1´ >: ´L_1´ <: ´U_1, \ldots , a_n´ >: ´L_n´ <: ´U_n´]´T´ and no explicit type parameters are given. Local type inference converts this expression to a type -application `$e$[$T_1 , \ldots , T_n$]`. The choice of the -type arguments $T_1 , \ldots , T_n$ depends on the context in which -the expression appears and on the expected type $\mathit{pt}$. +application `´e´[´T_1 , \ldots , T_n´]`. The choice of the +type arguments ´T_1 , \ldots , T_n´ depends on the context in which +the expression appears and on the expected type ´\mathit{pt}´. There are three cases. ###### Case 1: Selections If the expression appears as the prefix of a selection with a name -$x$, then type inference is _deferred_ to the whole expression -$e.x$. That is, if $e.x$ has type $S$, it is now treated as having -type [$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$S$, +´x´, then type inference is _deferred_ to the whole expression +´e.x´. That is, if ´e.x´ has type ´S´, it is now treated as having +type [´a_1´ >: ´L_1´ <: ´U_1 , \ldots , a_n´ >: ´L_n´ <: ´U_n´]´S´, and local type inference is applied in turn to infer type arguments -for $a_1 , \ldots , a_n$, using the context in which $e.x$ appears. +for ´a_1 , \ldots , a_n´, using the context in which ´e.x´ appears. ###### Case 2: Values -If the expression $e$ appears as a value without being applied to +If the expression ´e´ appears as a value without being applied to value arguments, the type arguments are inferred by solving a -constraint system which relates the expression's type $T$ with the -expected type $\mathit{pt}$. Without loss of generality we can assume that -$T$ is a value type; if it is a method type we apply -[eta-expansion](#eta-expansion) to convert it to a function type. Solving -means finding a substitution $\sigma$ of types $T_i$ for the type -parameters $a_i$ such that - -- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types) +constraint system which relates the expression's type ´T´ with the +expected type ´\mathit{pt}´. Without loss of generality we can assume that +´T´ is a value type; if it is a method type we apply +[eta-expansion](#eta-expansion-section) to convert it to a function type. Solving +means finding a substitution ´\sigma´ of types ´T_i´ for the type +parameters ´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) + unless it is a singleton type corresponding to an object or a constant value + definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. - All type parameter bounds are respected, i.e. - $\sigma L_i <: \sigma a_i$ and $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$. + ´\sigma L_i <: \sigma a_i´ and ´\sigma a_i <: \sigma U_i´ for ´i = 1 , \ldots , n´. - The expression's type conforms to the expected type, i.e. - $\sigma T <: \sigma \mathit{pt}$. + ´\sigma T <: \sigma \mathit{pt}´. It is a compile time error if no such substitution exists. If several substitutions exist, local-type inference will choose for -each type variable $a_i$ a minimal or maximal type $T_i$ of the -solution space. A _maximal_ type $T_i$ will be chosen if the type -parameter $a_i$ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the -type $T$ of the expression. A _minimal_ type $T_i$ will be chosen +each type variable ´a_i´ a minimal or maximal type ´T_i´ of the +solution space. A _maximal_ type ´T_i´ will be chosen if the type +parameter ´a_i´ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the +type ´T´ of the expression. A _minimal_ type ´T_i´ will be chosen in all other situations, i.e. if the variable appears covariantly, -non-variantly or not at all in the type $T$. We call such a substitution -an _optimal solution_ of the given constraint system for the type $T$. +non-variantly or not at all in the type ´T´. We call such a substitution +an _optimal solution_ of the given constraint system for the type ´T´. ###### Case 3: Methods The last case applies if the expression -$e$ appears in an application $e(d_1 , \ldots , d_m)$. In that case -$T$ is a method type $(p_1:R_1 , \ldots , p_m:R_m)T'$. Without loss of -generality we can assume that the result type $T'$ is a value type; if -it is a method type we apply [eta-expansion](#eta-expansion) to -convert it to a function type. One computes first the types $S_j$ of -the argument expressions $d_j$, using two alternative schemes. Each -argument expression $d_j$ is typed first with the expected type $R_j$, -in which the type parameters $a_1 , \ldots , a_n$ are taken as type -constants. If this fails, the argument $d_j$ is typed instead with an -expected type $R_j'$ which results from $R_j$ by replacing every type -parameter in $a_1 , \ldots , a_n$ with _undefined_. +´e´ appears in an application ´e(d_1 , \ldots , d_m)´. In that case +´T´ is a method type ´(p_1:R_1 , \ldots , p_m:R_m)T'´. Without loss of +generality we can assume that the result type ´T'´ is a value type; if +it is a method type we apply [eta-expansion](#eta-expansion-section) to +convert it to a function type. One computes first the types ´S_j´ of +the argument expressions ´d_j´, using two alternative schemes. Each +argument expression ´d_j´ is typed first with the expected type ´R_j´, +in which the type parameters ´a_1 , \ldots , a_n´ are taken as type +constants. If this fails, the argument ´d_j´ is typed instead with an +expected type ´R_j'´ which results from ´R_j´ by replacing every type +parameter in ´a_1 , \ldots , a_n´ with _undefined_. In a second step, type arguments are inferred by solving a constraint system which relates the method's type with the expected type -$\mathit{pt}$ and the argument types $S_1 , \ldots , S_m$. Solving the +´\mathit{pt}´ and the argument types ´S_1 , \ldots , S_m´. Solving the constraint system means -finding a substitution $\sigma$ of types $T_i$ for the type parameters -$a_i$ such that - -- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types) -- All type parameter bounds are respected, i.e. $\sigma L_i <: \sigma a_i$ and - $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$. -- The method's result type $T'$ conforms to the expected type, i.e. $\sigma T' <: \sigma \mathit{pt}$. +finding a substitution ´\sigma´ of types ´T_i´ for the type parameters +´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) + unless it is a singleton type corresponding to an object or a constant value + definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. +- All type parameter bounds are respected, i.e. ´\sigma L_i <: \sigma a_i´ and + ´\sigma a_i <: \sigma U_i´ for ´i = 1 , \ldots , n´. +- The method's result type ´T'´ conforms to the expected type, i.e. ´\sigma T' <: \sigma \mathit{pt}´. - Each argument type [weakly conforms](03-types.html#weak-conformance) to the corresponding formal parameter - type, i.e. $\sigma S_j <:_w \sigma R_j$ for $j = 1 , \ldots , m$. + type, i.e. ´\sigma S_j <:_w \sigma R_j´ for ´j = 1 , \ldots , m´. It is a compile time error if no such substitution exists. If several -solutions exist, an optimal one for the type $T'$ is chosen. +solutions exist, an optimal one for the type ´T'´ is chosen. -All or parts of an expected type $\mathit{pt}$ may be undefined. The rules for +All or parts of an expected type ´\mathit{pt}´ may be undefined. The rules for [conformance](03-types.html#conformance) are extended to this case by adding -the rule that for any type $T$ the following two statements are always -true: $\mathit{undefined} <: T$ and $T <: \mathit{undefined}$ +the rule that for any type ´T´ the following two statements are always +true: ´\mathit{undefined} <: T´ and ´T <: \mathit{undefined}´ It is possible that no minimal or maximal solution for a type variable -exists, in which case a compile-time error results. Because $<:$ is a +exists, in which case a compile-time error results. Because ´<:´ is a pre-order, it is also possible that a solution set has several optimal solutions for a type. In that case, a Scala compiler is free to pick any one of them. @@ -1683,7 +1738,7 @@ the type parameter `a` of `cons`: ```scala Int <: a? List[scala.Nothing] <: List[a?] -List[a?] <: $\mathit{undefined}$ +List[a?] <: ´\mathit{undefined}´ ``` The optimal solution of this constraint system is @@ -1710,7 +1765,7 @@ First, the argument expressions are typed. The first argument first tried to be typed with expected type `List[a]`. This fails, as `List[Int]` is not a subtype of `List[a]`. Therefore, the second strategy is tried; `xs` is now typed with expected type -`List[$\mathit{undefined}$]`. This succeeds and yields the argument type +`List[´\mathit{undefined}´]`. This succeeds and yields the argument type `List[Int]`. In a second step, one solves the following constraint system for @@ -1719,7 +1774,7 @@ the type parameter `a` of `cons`: ```scala String <: a? List[Int] <: List[a?] -List[a?] <: $\mathit{undefined}$ +List[a?] <: ´\mathit{undefined}´ ``` The optimal solution of this constraint system is @@ -1730,24 +1785,24 @@ a = scala.Any so `scala.Any` is the type inferred for `a`. -### Eta Expansion +### Eta Expansion _Eta-expansion_ converts an expression of method type to an equivalent expression of function type. It proceeds in two steps. -First, one identifies the maximal sub-expressions of $e$; let's -say these are $e_1 , \ldots , e_m$. For each of these, one creates a -fresh name $x_i$. Let $e'$ be the expression resulting from -replacing every maximal subexpression $e_i$ in $e$ by the -corresponding fresh name $x_i$. Second, one creates a fresh name $y_i$ -for every argument type $T_i$ of the method ($i = 1 , \ldots , -n$). The result of eta-conversion is then: +First, one identifies the maximal sub-expressions of ´e´; let's +say these are ´e_1 , \ldots , e_m´. For each of these, one creates a +fresh name ´x_i´. Let ´e'´ be the expression resulting from +replacing every maximal subexpression ´e_i´ in ´e´ by the +corresponding fresh name ´x_i´. Second, one creates a fresh name ´y_i´ +for every argument type ´T_i´ of the method (´i = 1 , \ldots , +n´). The result of eta-conversion is then: ```scala -{ val $x_1$ = $e_1$; - $\ldots$ - val $x_m$ = $e_m$; - ($y_1: T_1 , \ldots , y_n: T_n$) => $e'$($y_1 , \ldots , y_n$) +{ val ´x_1´ = ´e_1´; + ´\ldots´ + val ´x_m´ = ´e_m´; + (´y_1: T_1 , \ldots , y_n: T_n´) => ´e'´(´y_1 , \ldots , y_n´) } ``` @@ -1759,7 +1814,7 @@ a sub-expression of parameterless method type, is not evaluated in the expanded The standard Scala library defines a marker trait `scala.Dynamic`. Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. -The following rewrites are performed, assuming $e$'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): +The following rewrites are performed, assuming ´e´'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` diff --git a/spec/07-implicits.md b/spec/07-implicits.md index 42b90ef5f102..9592fed410bf 100644 --- a/spec/07-implicits.md +++ b/spec/07-implicits.md @@ -45,47 +45,53 @@ object Monoids { ## Implicit Parameters An _implicit parameter list_ -`(implicit $p_1$,$\ldots$,$p_n$)` of a method marks the parameters $p_1 , \ldots , p_n$ as +`(implicit ´p_1´,´\ldots´,´p_n´)` of a method marks the parameters ´p_1 , \ldots , p_n´ as implicit. A method or constructor can have only one implicit parameter list, and it must be the last parameter list given. -A method with implicit parameters can be applied to arguments just -like a normal method. In this case the `implicit` label has no -effect. However, if such a method misses arguments for its implicit -parameters, such arguments will be automatically provided. +The `implicit` modifier must be included in the first group of modifiers in the parameter list. +For class parameters, order of modifiers is not significant; the following definitions are equivalent: + +```scala +class C()(implicit override val i: Int, j: Int) extends T // preferred style +class C()(override implicit val i: Int, j: Int) extends T +``` + +A method with implicit parameters can be applied to explicit arguments just +as though the parameters were not declared implicit. In that case, missing parameters +can be supplied by default arguments. The actual arguments that are eligible to be passed to an implicit -parameter of type $T$ fall into two categories. First, eligible are -all identifiers $x$ that can be accessed at the point of the method +parameter of type ´T´ fall into two categories. First, eligible are +all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) -or an implicit parameter. An eligible -identifier may thus be a local name, or a member of an enclosing -template, or it may be have been made accessible without a prefix -through an [import clause](04-basic-declarations-and-definitions.html#import-clauses). If there are no eligible +or an implicit parameter. To be accessible without a prefix, an identifier +must be a local name, a member of an enclosing template or a name introduced by an +[import clause](04-basic-declarations-and-definitions.html#import-clauses). If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit -scope of the implicit parameter's type, $T$. - -The _implicit scope_ of a type $T$ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. -Here, we say a class $C$ is _associated_ with a type $T$ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of $T$. - -The _parts_ of a type $T$ are: - -- if $T$ is a compound type `$T_1$ with $\ldots$ with $T_n$`, - the union of the parts of $T_1 , \ldots , T_n$, as well as $T$ itself; -- if $T$ is a parameterized type `$S$[$T_1 , \ldots , T_n$]`, - the union of the parts of $S$ and $T_1 , \ldots , T_n$; -- if $T$ is a singleton type `$p$.type`, - the parts of the type of $p$; -- if $T$ is a type projection `$S$#$U$`, - the parts of $S$ as well as $T$ itself; -- if $T$ is a type alias, the parts of its expansion; -- if $T$ is an abstract type, the parts of its upper bound; -- if $T$ denotes an implicit conversion to a type with a method with argument types $T_1 , \ldots , T_n$ and result type $U$, - the union of the parts of $T_1 , \ldots , T_n$ and $U$; +scope of the implicit parameter's type, ´T´. + +The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. +Here, we say a class ´C´ is _associated_ with a type ´T´ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of ´T´. + +The _parts_ of a type ´T´ are: + +- if ´T´ is a compound type `´T_1´ with ´\ldots´ with ´T_n´`, + the union of the parts of ´T_1 , \ldots , T_n´, as well as ´T´ itself; +- if ´T´ is a parameterized type `´S´[´T_1 , \ldots , T_n´]`, + the union of the parts of ´S´ and ´T_1 , \ldots , T_n´; +- if ´T´ is a singleton type `´p´.type`, + the parts of the type of ´p´; +- if ´T´ is a type projection `´S´#´U´`, + the parts of ´S´ as well as ´T´ itself; +- if ´T´ is a type alias, the parts of its expansion; +- if ´T´ is an abstract type, the parts of its upper bound; +- if ´T´ denotes an implicit conversion to a type with a method with argument types ´T_1 , \ldots , T_n´ and result type ´U´, + the union of the parts of ´T_1 , \ldots , T_n´ and ´U´; - the parts of quantified (existential or universal) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`); -- in all other cases, just $T$ itself. +- in all other cases, just ´T´ itself. Note that packages are internally represented as classes with companion modules to hold the package members. Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package. @@ -177,39 +183,123 @@ expansion: sort(arg)(x => magic(x)(x => magic(x)(x => ... ))) ``` -To prevent such infinite expansions, the compiler keeps track of -a stack of “open implicit types” for which implicit arguments are currently being -searched. Whenever an implicit argument for type $T$ is searched, the -“core type” of $T$ is added to the stack. Here, the _core type_ -of $T$ is $T$ with aliases expanded, top-level type [annotations](11-annotations.html#user-defined-annotations) and -[refinements](03-types.html#compound-types) removed, and occurrences -of top-level existentially bound variables replaced by their upper -bounds. The core type is removed from the stack once the search for -the implicit argument either definitely fails or succeeds. Everytime a -core type is added to the stack, it is checked that this type does not -dominate any of the other types in the set. - -Here, a core type $T$ _dominates_ a type $U$ if $T$ is -[equivalent](03-types.html#equivalence) -to $U$, or if the top-level type constructors of $T$ and $U$ have a -common element and $T$ is more complex than $U$. - -The set of _top-level type constructors_ $\mathit{ttcs}(T)$ of a type $T$ depends on the form of +Such infinite expansions should be detected and reported as errors, however to support the deliberate +implicit construction of recursive values we allow implicit arguments to be marked as by-name. At call +sites recursive uses of implicit values are permitted if they occur in an implicit by-name argument. + +Consider the following example, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +As with the `magic` case above this diverges due to the recursive implicit argument `rec` of method +`foo`. If we mark the implicit argument as by-name, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: => Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +the example compiles with the assertion successful. + +When compiled, recursive by-name implicit arguments of this sort are extracted out as val members of a +local synthetic object at call sites as follows, + +```scala +val foo: Foo = scala.Predef.implicitly[Foo]( + { + object LazyDefns$1 { + val rec$1: Foo = Foo.foo(rec$1) + // ^^^^^ + // recursive knot tied here + } + LazyDefns$1.rec$1 + } +) +assert(foo eq foo.next) +``` + +Note that the recursive use of `rec$1` occurs within the by-name argument of `foo` and is consequently +deferred. The desugaring matches what a programmer would do to construct such a recursive value +explicitly. + +To prevent infinite expansions, such as the `magic` example above, the compiler keeps track of a stack +of “open implicit types” for which implicit arguments are currently being searched. Whenever an +implicit argument for type ´T´ is searched, ´T´ is added to the stack paired with the implicit +definition which produces it, and whether it was required to satisfy a by-name implicit argument or +not. The type is removed from the stack once the search for the implicit argument either definitely +fails or succeeds. Everytime a type is about to be added to the stack, it is checked against +existing entries which were produced by the same implicit definition and then, + ++ if it is equivalent to some type which is already on the stack and there is a by-name argument between + that entry and the top of the stack. In this case the search for that type succeeds immediately and + the implicit argument is compiled as a recursive reference to the found argument. That argument is + added as an entry in the synthesized implicit dictionary if it has not already been added. ++ otherwise if the _core_ of the type _dominates_ the core of a type already on the stack, then the + implicit expansion is said to _diverge_ and the search for that type fails immediately. ++ otherwise it is added to the stack paired with the implicit definition which produces it. + Implicit resolution continues with the implicit arguments of that definition (if any). + +Here, the _core type_ of ´T´ is ´T´ with aliases expanded, +top-level type [annotations](11-annotations.html#user-defined-annotations) and +[refinements](03-types.html#compound-types) removed, and occurrences of top-level existentially bound +variables replaced by their upper bounds. + +A core type ´T´ _dominates_ a type ´U´ if ´T´ is [equivalent](03-types.html#equivalence) to ´U´, +or if the top-level type constructors of ´T´ and ´U´ have a common element and ´T´ is more complex +than ´U´ and the _covering sets_ of ´T´ and ´U´ are equal. + +The set of _top-level type constructors_ ´\mathit{ttcs}(T)´ of a type ´T´ depends on the form of the type: -- For a type designator, $\mathit{ttcs}(p.c) ~=~ \{c\}$; -- For a parameterized type, $\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}$; -- For a singleton type, $\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)$, provided $p$ has type $T$; -- For a compound type, `$\mathit{ttcs}(T_1$ with $\ldots$ with $T_n)$` $~=~ \mathit{ttcs}(T_1) \cup \ldots \cup \mathit{ttcs}(T_n)$. +- For a type designator, ´\mathit{ttcs}(p.c) ~=~ \{c\}´; +- For a parameterized type, ´\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}´; +- For a singleton type, ´\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\mathit{ttcs}(T_1´ with ´\ldots´ with ´T_n)´` ´~=~ \mathit{ttcs}(T_1) \cup \ldots \cup \mathit{ttcs}(T_n)´. -The _complexity_ $\operatorname{complexity}(T)$ of a core type is an integer which also depends on the form of +The _complexity_ ´\operatorname{complexity}(T)´ of a core type is an integer which also depends on the form of the type: -- For a type designator, $\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)$ -- For a parameterized type, $\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})$ -- For a singleton type denoting a package $p$, $\operatorname{complexity}(p.type) ~=~ 0$ -- For any other singleton type, $\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)$, provided $p$ has type $T$; -- For a compound type, `$\operatorname{complexity}(T_1$ with $\ldots$ with $T_n)$` $= \Sigma\operatorname{complexity}(T_i)$ +- For a type designator, ´\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)´ +- For a parameterized type, ´\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})´ +- For a singleton type denoting a package ´p´, ´\operatorname{complexity}(p.type) ~=~ 0´ +- For any other singleton type, ´\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\operatorname{complexity}(T_1´ with ´\ldots´ with ´T_n)´` ´= \Sigma\operatorname{complexity}(T_i)´ + +The _covering set_ ´\mathit{cs}(T)´ of a type ´T´ is the set of type designators mentioned in a type. +For example, given the following, + +```scala +type A = List[(Int, Int)] +type B = List[(Int, (Int, Int))] +type C = List[(Int, String)] +``` + +the corresponding covering sets are: + +- ´\mathit{cs}(A)´: List, Tuple2, Int +- ´\mathit{cs}(B)´: List, Tuple2, Int +- ´\mathit{cs}(C)´: List, Tuple2, Int, String ###### Example When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`, @@ -223,7 +313,7 @@ Int => Ordered[Int] ``` All types share the common type constructor `scala.Function1`, -but the complexity of the each new type is lower than the complexity of the previous types. +but the complexity of each new type is lower than the complexity of the previous types. Hence, the code typechecks. ###### Example @@ -249,35 +339,35 @@ will issue an error signalling a divergent implicit expansion. ## Views Implicit parameters and methods can also define implicit conversions -called views. A _view_ from type $S$ to type $T$ is +called views. A _view_ from type ´S´ to type ´T´ is defined by an implicit value which has function type -`$S$=>$T$` or `(=>$S$)=>$T$` or by a method convertible to a value of that +`´S´ => ´T´` or `(=> ´S´) => ´T´` or by a method convertible to a value of that type. Views are applied in three situations: -1. If an expression $e$ is of type $T$, and $T$ does not conform to the - expression's expected type $\mathit{pt}$. In this case an implicit $v$ is - searched which is applicable to $e$ and whose result type conforms to - $\mathit{pt}$. The search proceeds as in the case of implicit parameters, - where the implicit scope is the one of `$T$ => $\mathit{pt}$`. If - such a view is found, the expression $e$ is converted to - `$v$($e$)`. -1. In a selection $e.m$ with $e$ of type $T$, if the selector $m$ does - not denote an accessible member of $T$. In this case, a view $v$ is searched - which is applicable to $e$ and whose result contains a member named - $m$. The search proceeds as in the case of implicit parameters, where - the implicit scope is the one of $T$. If such a view is found, the - selection $e.m$ is converted to `$v$($e$).$m$`. -1. In a selection $e.m(\mathit{args})$ with $e$ of type $T$, if the selector - $m$ denotes some member(s) of $T$, but none of these members is applicable to the arguments - $\mathit{args}$. In this case a view $v$ is searched which is applicable to $e$ - and whose result contains a method $m$ which is applicable to $\mathit{args}$. +1. If an expression ´e´ is of type ´T´, and ´T´ does not conform to the + expression's expected type ´\mathit{pt}´. In this case an implicit ´v´ is + searched which is applicable to ´e´ and whose result type conforms to + ´\mathit{pt}´. The search proceeds as in the case of implicit parameters, + where the implicit scope is the one of `´T´ => ´\mathit{pt}´`. If + such a view is found, the expression ´e´ is converted to + `´v´(´e´)`. +1. In a selection ´e.m´ with ´e´ of type ´T´, if the selector ´m´ does + not denote an accessible member of ´T´. In this case, a view ´v´ is searched + which is applicable to ´e´ and whose result contains a member named + ´m´. The search proceeds as in the case of implicit parameters, where + the implicit scope is the one of ´T´. If such a view is found, the + selection ´e.m´ is converted to `´v´(´e´).´m´`. +1. In a selection ´e.m(\mathit{args})´ with ´e´ of type ´T´, if the selector + ´m´ denotes some member(s) of ´T´, but none of these members is applicable to the arguments + ´\mathit{args}´. In this case a view ´v´ is searched which is applicable to ´e´ + and whose result contains a method ´m´ which is applicable to ´\mathit{args}´. The search proceeds as in the case of implicit parameters, where - the implicit scope is the one of $T$. If such a view is found, the - selection $e.m$ is converted to `$v$($e$).$m(\mathit{args})$`. + the implicit scope is the one of ´T´. If such a view is found, the + selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. -The implicit view, if it is found, can accept its argument $e$ as a +The implicit view, if it is found, can accept its argument ´e´ as a call-by-value or as a call-by-name parameter. However, call-by-value implicits take precedence over call-by-name implicits. @@ -322,33 +412,33 @@ method. {‘<%’ Type} {‘:’ Type} ``` -A type parameter $A$ of a method or non-trait class may have one or more view -bounds `$A$ <% $T$`. In this case the type parameter may be -instantiated to any type $S$ which is convertible by application of a -view to the bound $T$. +A type parameter ´A´ of a method or non-trait class may have one or more view +bounds `´A´ <% ´T´`. In this case the type parameter may be +instantiated to any type ´S´ which is convertible by application of a +view to the bound ´T´. -A type parameter $A$ of a method or non-trait class may also have one -or more context bounds `$A$ : $T$`. In this case the type parameter may be -instantiated to any type $S$ for which _evidence_ exists at the -instantiation point that $S$ satisfies the bound $T$. Such evidence -consists of an implicit value with type $T[S]$. +A type parameter ´A´ of a method or non-trait class may also have one +or more context bounds `´A´ : ´T´`. In this case the type parameter may be +instantiated to any type ´S´ for which _evidence_ exists at the +instantiation point that ´S´ satisfies the bound ´T´. Such evidence +consists of an implicit value with type ´T[S]´. A method or class containing type parameters with view or context bounds is treated as being equivalent to a method with implicit parameters. Consider first the case of a single parameter with view and/or context bounds such as: ```scala -def $f$[$A$ <% $T_1$ ... <% $T_m$ : $U_1$ : $U_n$]($\mathit{ps}$): $R$ = ... +def ´f´[´A´ <% ´T_1´ ... <% ´T_m´ : ´U_1´ : ´U_n´](´\mathit{ps}´): ´R´ = ... ``` Then the method definition above is expanded to ```scala -def $f$[$A$]($\mathit{ps}$)(implicit $v_1$: $A$ => $T_1$, ..., $v_m$: $A$ => $T_m$, - $w_1$: $U_1$[$A$], ..., $w_n$: $U_n$[$A$]): $R$ = ... +def ´f´[´A´](´\mathit{ps}´)(implicit ´v_1´: ´A´ => ´T_1´, ..., ´v_m´: ´A´ => ´T_m´, + ´w_1´: ´U_1´[´A´], ..., ´w_n´: ´U_n´[´A´]): ´R´ = ... ``` -where the $v_i$ and $w_j$ are fresh names for the newly introduced implicit parameters. These +where the ´v_i´ and ´w_j´ are fresh names for the newly introduced implicit parameters. These parameters are called _evidence parameters_. If a class or method has several view- or context-bounded type parameters, each @@ -365,7 +455,7 @@ For example: ```scala def foo[A: M](implicit b: B): C // expands to: -// def foo[A](implicit evidence$1: M[A], b: B): C +// def foo[A](implicit evidence´1: M[A], b: B): C ``` ###### Example @@ -391,52 +481,52 @@ trait ClassManifest[T] extends OptManifest[T] trait Manifest[T] extends ClassManifest[T] ``` -If an implicit parameter of a method or constructor is of a subtype $M[T]$ of -class `OptManifest[T]`, _a manifest is determined for $M[S]$_, +If an implicit parameter of a method or constructor is of a subtype ´M[T]´ of +class `OptManifest[T]`, _a manifest is determined for ´M[S]´_, according to the following rules. -First if there is already an implicit argument that matches $M[T]$, this +First if there is already an implicit argument that matches ´M[T]´, this argument is selected. -Otherwise, let $\mathit{Mobj}$ be the companion object `scala.reflect.Manifest` -if $M$ is trait `Manifest`, or be -the companion object `scala.reflect.ClassManifest` otherwise. Let $M'$ be the trait -`Manifest` if $M$ is trait `Manifest`, or be the trait `OptManifest` otherwise. +Otherwise, let ´\mathit{Mobj}´ be the companion object `scala.reflect.Manifest` +if ´M´ is trait `Manifest`, or be +the companion object `scala.reflect.ClassManifest` otherwise. Let ´M'´ be the trait +`Manifest` if ´M´ is trait `Manifest`, or be the trait `OptManifest` otherwise. Then the following rules apply. -1. If $T$ is a value class or one of the classes `Any`, `AnyVal`, `Object`, +1. If ´T´ is a value class or one of the classes `Any`, `AnyVal`, `Object`, `Null`, or `Nothing`, a manifest for it is generated by selecting - the corresponding manifest value `Manifest.$T$`, which exists in the + the corresponding manifest value `Manifest.´T´`, which exists in the `Manifest` module. -1. If $T$ is an instance of `Array[$S$]`, a manifest is generated - with the invocation `$\mathit{Mobj}$.arrayType[S](m)`, where $m$ is the manifest - determined for $M[S]$. -1. If $T$ is some other class type $S$#$C[U_1, \ldots, U_n]$ where the prefix - type $S$ cannot be statically determined from the class $C$, - a manifest is generated with the invocation `$\mathit{Mobj}$.classType[T]($m_0$, classOf[T], $ms$)` - where $m_0$ is the manifest determined for $M'[S]$ and $ms$ are the - manifests determined for $M'[U_1], \ldots, M'[U_n]$. -1. If $T$ is some other class type with type arguments $U_1 , \ldots , U_n$, +1. If ´T´ is an instance of `Array[´S´]`, a manifest is generated + with the invocation `´\mathit{Mobj}´.arrayType[S](m)`, where ´m´ is the manifest + determined for ´M[S]´. +1. If ´T´ is some other class type ´S´#´C[U_1, \ldots, U_n]´ where the prefix + type ´S´ cannot be statically determined from the class ´C´, + a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](´m_0´, classOf[T], ´ms´)` + where ´m_0´ is the manifest determined for ´M'[S]´ and ´ms´ are the + manifests determined for ´M'[U_1], \ldots, M'[U_n]´. +1. If ´T´ is some other class type with type arguments ´U_1 , \ldots , U_n´, a manifest is generated - with the invocation `$\mathit{Mobj}$.classType[T](classOf[T], $ms$)` - where $ms$ are the - manifests determined for $M'[U_1] , \ldots , M'[U_n]$. -1. If $T$ is a singleton type `$p$.type`, a manifest is generated with - the invocation `$\mathit{Mobj}$.singleType[T]($p$)` -1. If $T$ is a refined type $T' \{ R \}$, a manifest is generated for $T'$. + with the invocation `´\mathit{Mobj}´.classType[T](classOf[T], ´ms´)` + where ´ms´ are the + manifests determined for ´M'[U_1] , \ldots , M'[U_n]´. +1. If ´T´ is a singleton type `´p´.type`, a manifest is generated with + the invocation `´\mathit{Mobj}´.singleType[T](´p´)` +1. If ´T´ is a refined type ´T' \{ R \}´, a manifest is generated for ´T'´. (That is, refinements are never reflected in manifests). -1. If $T$ is an intersection type - `$T_1$ with $, \ldots ,$ with $T_n$` - where $n > 1$, the result depends on whether a full manifest is +1. If ´T´ is an intersection type + `´T_1´ with ´, \ldots ,´ with ´T_n´` + where ´n > 1´, the result depends on whether a full manifest is to be determined or not. - If $M$ is trait `Manifest`, then + If ´M´ is trait `Manifest`, then a manifest is generated with the invocation - `Manifest.intersectionType[T]($ms$)` where $ms$ are the manifests - determined for $M[T_1] , \ldots , M[T_n]$. - Otherwise, if $M$ is trait `ClassManifest`, + `Manifest.intersectionType[T](´ms´)` where ´ms´ are the manifests + determined for ´M[T_1] , \ldots , M[T_n]´. + Otherwise, if ´M´ is trait `ClassManifest`, then a manifest is generated for the [intersection dominator](03-types.html#type-erasure) - of the types $T_1 , \ldots , T_n$. -1. If $T$ is some other type, then if $M$ is trait `OptManifest`, + of the types ´T_1 , \ldots , T_n´. +1. If ´T´ is some other type, then if ´M´ is trait `OptManifest`, a manifest is generated from the designator `scala.reflect.NoManifest`. - If $M$ is a type different from `OptManifest`, a static error results. + If ´M´ is a type different from `OptManifest`, a static error results. diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md index ecaaa04c2b75..7607b0db85e0 100644 --- a/spec/08-pattern-matching.md +++ b/spec/08-pattern-matching.md @@ -39,12 +39,12 @@ than once in a pattern. Some examples of patterns are: 1. The pattern `ex: IOException` matches all instances of class `IOException`, binding variable `ex` to the instance. - 1. The pattern `Some(x)` matches values of the form `Some($v$)`, - binding `x` to the argument value $v$ of the `Some` constructor. + 1. The pattern `Some(x)` matches values of the form `Some(´v´)`, + binding `x` to the argument value ´v´ of the `Some` constructor. 1. The pattern `(x, _)` matches pairs of values, binding `x` to the first component of the pair. The second component is matched with a wildcard pattern. - 1. The pattern `x :: y :: xs` matches lists of length $\geq 2$, + 1. The pattern `x :: y :: xs` matches lists of length ´\geq 2´, binding `x` to the list's first element, `y` to the list's second element, and `xs` to the remainder. 1. The pattern `1 | 2 | 3` matches the integers between 1 and 3. @@ -60,9 +60,9 @@ patterns. | varid ``` -A _variable pattern_ $x$ is a simple identifier which starts with a +A _variable pattern_ ´x´ is a simple identifier which starts with a lower case letter. It matches any value, and binds the variable name -to that value. The type of $x$ is the expected type of the pattern as +to that value. The type of ´x´ is the expected type of the pattern as given from outside. A special case is the wild-card pattern `_` which is treated as if it was a fresh variable on each occurrence. @@ -73,11 +73,11 @@ which is treated as if it was a fresh variable on each occurrence. | ‘_’ ‘:’ TypePat ``` -A _typed pattern_ $x: T$ consists of a pattern variable $x$ and a -type pattern $T$. The type of $x$ is the type pattern $T$, where +A _typed pattern_ ´x: T´ consists of a pattern variable ´x´ and a +type pattern ´T´. The type of ´x´ is the type pattern ´T´, where each type variable and wildcard is replaced by a fresh, unknown type. This pattern matches any value matched by the [type pattern](#type-patterns) -$T$; it binds the variable name to +´T´; it binds the variable name to that value. ### Pattern Binders @@ -86,32 +86,82 @@ that value. Pattern2 ::= varid ‘@’ Pattern3 ``` -A _pattern binder_ `$x$@$p$` consists of a pattern variable $x$ and a -pattern $p$. The type of the variable $x$ is the static type $T$ of the pattern $p$. -This pattern matches any value $v$ matched by the pattern $p$, -provided the run-time type of $v$ is also an instance of $T$, +A _pattern binder_ `´x´@´p´` consists of a pattern variable ´x´ and a +pattern ´p´. The type of the variable ´x´ is the static type ´T´ implied +by the pattern ´p´. +This pattern matches any value ´v´ matched by the pattern ´p´, and it binds the variable name to that value. +A pattern ´p´ _implies_ a type ´T´ if the pattern matches only values of the type ´T´. + ### Literal Patterns ```ebnf SimplePattern ::= Literal ``` -A _literal pattern_ $L$ matches any value that is equal (in terms of -`==`) to the literal $L$. The type of $L$ must conform to the +A _literal pattern_ ´L´ matches any value that is equal (in terms of +`==`) to the literal ´L´. The type of ´L´ must conform to the expected type of the pattern. +### Interpolated string patterns + +```ebnf + Literal ::= interpolatedString +``` + +The expansion of interpolated string literals in patterns is the same as +in expressions. If it occurs in a pattern, a interpolated string literal +of either of the forms +``` +id"text0{ pat1 }text1 … { patn }textn" +id"""text0{ pat1 }text1 … { patn }textn""" +``` +is equivalent to: +``` +StringContext("""text0""", …, """textn""").id(pat1, …, patn) +``` +You could define your own `StringContext` to shadow the default one that's +in the `scala` package. + +This expansion is well-typed if the member `id` evaluates to an extractor +object. If the extractor object has `apply` as well as `unapply` or +`unapplySeq` methods, processed strings can be used as either expressions +or patterns. + +Taking XML as an example +```scala +implicit class XMLinterpolation(s: StringContext) = { + object xml { + def apply(exprs: Any*) = + // parse ‘s’ and build an XML tree with ‘exprs’ + //in the holes + def unapplySeq(xml: Node): Option[Seq[Node]] = + // match `s’ against `xml’ tree and produce + //subtrees in holes + } +} +``` +Then, XML pattern matching could be expressed like this: +```scala +case xml""" + + $linktext + + """ => ... +``` +where linktext is a variable bound by the pattern. + ### Stable Identifier Patterns ```ebnf SimplePattern ::= StableId ``` -A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) $r$. -The type of $r$ must conform to the expected -type of the pattern. The pattern matches any value $v$ such that -`$r$ == $v$` (see [here](12-the-scala-standard-library.html#root-classes)). +A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) ´r´. +The type of ´r´ must conform to the expected +type of the pattern. The pattern matches any value ´v´ such that +`´r´ == ´v´` (see [here](12-the-scala-standard-library.html#root-classes)). To resolve the syntactic overlap with a variable pattern, a stable identifier pattern may not be a simple name starting with a lower-case @@ -119,27 +169,24 @@ letter. However, it is possible to enclose such a variable name in backquotes; then it is treated as a stable identifier pattern. ###### Example -Consider the following function definition: +Consider the following class definition: ```scala -def f(x: Int, y: Int) = x match { - case y => ... -} -``` - -Here, `y` is a variable pattern, which matches any value. -If we wanted to turn the pattern into a stable identifier pattern, this -can be achieved as follows: - -```scala -def f(x: Int, y: Int) = x match { - case `y` => ... +class C { c => + val x = 42 + val y = 27 + val Z = 8 + def f(x: Int) = x match { + case c.x => 1 // matches 42 + case `y` => 2 // matches 27 + case Z => 3 // matches 8 + case x => 4 // matches any value + } } ``` -Now, the pattern matches the `y` parameter of the enclosing function `f`. -That is, the match succeeds only if the `x` argument and the `y` -argument of `f` are equal. +Here, the first three patterns are stable identifier patterns, while the last +one is a variable pattern. ### Constructor Patterns @@ -147,24 +194,24 @@ argument of `f` are equal. SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ ``` -A _constructor pattern_ is of the form $c(p_1 , \ldots , p_n)$ where $n -\geq 0$. It consists of a stable identifier $c$, followed by element -patterns $p_1 , \ldots , p_n$. The constructor $c$ is a simple or +A _constructor pattern_ is of the form ´c(p_1 , \ldots , p_n)´ where ´n +\geq 0´. It consists of a stable identifier ´c´, followed by element +patterns ´p_1 , \ldots , p_n´. The constructor ´c´ is a simple or qualified name which denotes a [case class](05-classes-and-objects.html#case-classes). If the case class is monomorphic, then it must conform to the expected type of the pattern, and the formal -parameter types of $x$'s [primary constructor](05-classes-and-objects.html#class-definitions) -are taken as the expected types of the element patterns $p_1, \ldots , -p_n$. If the case class is polymorphic, then its type parameters are -instantiated so that the instantiation of $c$ conforms to the expected -type of the pattern. The instantiated formal parameter types of $c$'s +parameter types of ´x´'s [primary constructor](05-classes-and-objects.html#class-definitions) +are taken as the expected types of the element patterns ´p_1, \ldots , +p_n´. If the case class is polymorphic, then its type parameters are +instantiated so that the instantiation of ´c´ conforms to the expected +type of the pattern. The instantiated formal parameter types of ´c´'s primary constructor are then taken as the expected types of the -component patterns $p_1, \ldots , p_n$. The pattern matches all -objects created from constructor invocations $c(v_1 , \ldots , v_n)$ -where each element pattern $p_i$ matches the corresponding value -$v_i$. +component patterns ´p_1, \ldots , p_n´. The pattern matches all +objects created from constructor invocations ´c(v_1 , \ldots , v_n)´ +where each element pattern ´p_i´ matches the corresponding value +´v_i´. -A special case arises when $c$'s formal parameter types end in a +A special case arises when ´c´'s formal parameter types end in a repeated parameter. This is further discussed [here](#pattern-sequences). ### Tuple Patterns @@ -173,9 +220,9 @@ repeated parameter. This is further discussed [here](#pattern-sequences). SimplePattern ::= ‘(’ [Patterns] ‘)’ ``` -A _tuple pattern_ `($p_1 , \ldots , p_n$)` is an alias -for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`, -where $n \geq 2$. The empty tuple +A _tuple pattern_ `(´p_1 , \ldots , p_n´)` is an alias +for the constructor pattern `scala.Tuple´n´(´p_1 , \ldots , p_n´)`, +where ´n \geq 2´. The empty tuple `()` is the unique value of type `scala.Unit`. ### Extractor Patterns @@ -184,41 +231,49 @@ where $n \geq 2$. The empty tuple SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ ``` -An _extractor pattern_ $x(p_1 , \ldots , p_n)$ where $n \geq 0$ is of +An _extractor pattern_ ´x(p_1 , \ldots , p_n)´ where ´n \geq 0´ is of the same syntactic form as a constructor pattern. However, instead of -a case class, the stable identifier $x$ denotes an object which has a +a case class, the stable identifier ´x´ denotes an object which has a member method named `unapply` or `unapplySeq` that matches the pattern. -An `unapply` method in an object $x$ _matches_ the pattern -$x(p_1 , \ldots , p_n)$ if it takes exactly one argument and one of -the following applies: - -* $n=0$ and `unapply`'s result type is `Boolean`. In this case - the extractor pattern matches all values $v$ for which - `$x$.unapply($v$)` yields `true`. -* $n=1$ and `unapply`'s result type is `Option[$T$]`, for some - type $T$. In this case, the (only) argument pattern $p_1$ is typed in - turn with expected type $T$. The extractor pattern matches then all - values $v$ for which `$x$.unapply($v$)` yields a value of form - `Some($v_1$)`, and $p_1$ matches $v_1$. -* $n>1$ and `unapply`'s result type is - `Option[($T_1 , \ldots , T_n$)]`, for some - types $T_1 , \ldots , T_n$. In this case, the argument patterns $p_1 - , \ldots , p_n$ are typed in turn with expected types $T_1 , \ldots , - T_n$. The extractor pattern matches then all values $v$ for which - `$x$.unapply($v$)` yields a value of form - `Some(($v_1 , \ldots , v_n$))`, and each pattern - $p_i$ matches the corresponding value $v_i$. - -An `unapplySeq` method in an object $x$ matches the pattern -$x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if it takes exactly one argument -and its result type is of the form `Option[($T_1 , \ldots , T_m$, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). +An extractor pattern cannot match the value `null`. The implementation +ensures that the `unapply`/`unapplySeq` method is not applied to `null`. + +A type is said to be an _extractor type_ for some type `T` if it has a +method `get` with return type `T`, and a method `isEmpty` with a return type +that conforms to `Boolean`. `Option[T]` is an extractor type for type `T`. + +An `unapply` method in an object ´x´ _matches_ the pattern +´x(p_1 , \ldots , p_n)´ if it has a single parameter (and, optionally, an +implicit parameter list) and one of the following applies: + +* ´n=0´ and `unapply`'s result type conforms to `Boolean`. In this case + the extractor pattern matches all values ´v´ for which + `´x´.unapply(´v´)` yields `true`. +* ´n=1´ and `unapply`'s result type is an extractor type for some + type ´T´. In this case, the (only) argument pattern ´p_1´ is typed in + turn with expected type ´T´. The extractor pattern matches then all + values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields + `false`, `´u´.get` yields a value ´v_1´, and ´p_1´ matches ´v_1´. +* ´n>1´ and `unapply`'s result type is + an extractor type for some type ´T´ with members ´\_1 , \ldots , \_n´ returning + types ´T_1 , \ldots , T_n´. In this case, the argument patterns ´p_1 + , \ldots , p_n´ are typed in turn with expected types ´T_1 , \ldots , + T_n´. The extractor pattern matches then all values ´v´ for which + `´x´.unapply(´v´)` yields a value ´u´ for which + `´u´.isEmpty` yields `false`, `´u´.get` yields some value ´t´, and each pattern + ´p_i´ matches the corresponding value ´t._1´ from + ´t._1 , \ldots , t._n´. + +An `unapplySeq` method in an object ´x´ matches the pattern +´x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)´ if it takes exactly one argument +and its result type is of the form `Option[(´T_1 , \ldots , T_m´, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). This case is further discussed [below](#pattern-sequences). -###### Example -The `Predef` object contains a definition of an -extractor object `Pair`: +###### Example 1 + +If we define an extractor object `Pair`: ```scala object Pair { @@ -238,26 +293,57 @@ val y = x match { } ``` +###### Example 2 + +If we define a class `NameBased` + +```scala +class NameBased[A, B](a: A, b: B) { + def isEmpty = false + def get = this + def _1 = a + def _2 = b +} +``` + +Then `NameBased` is an extractor type for `NameBased` itself, since it has a +member `isEmpty` returning a value of type Boolean, and it has a member `get` +returning a value of type `NameBased`. + +Since it also has members `_1` and `_2`, it can be used in an extractor pattern +with n = 2 as follows: + +```scala +object Extractor { + def unapply(x: Any) = new NameBased(1, "two") +} + +"anything" match { + case Extractor(a, b) => println(s"\$a, \$b") //prints "1, two" +} +``` + + ### Pattern Sequences ```ebnf SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ ``` -A _pattern sequence_ $p_1 , \ldots , p_n$ appears in two contexts. -First, in a constructor pattern $c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$, where $c$ is a case class which has $m+1$ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. -Second, in an extractor pattern $x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if the extractor object $x$ does not have an `unapply` method, -but it does define an `unapplySeq` method with a result type conforming to `Option[(T_1, ... , T_m, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). The expected type for the patterns $p_i$ is $S$. +A _pattern sequence_ ´p_1 , \ldots , p_n´ appears in two contexts. +First, in a constructor pattern ´c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +Second, in an extractor pattern ´x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)´ if the extractor object ´x´ does not have an `unapply` method, +but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´. The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. -Each element pattern $p_i$ is type-checked with -$S$ as expected type, unless it is a sequence wildcard. If a final -sequence wildcard is present, the pattern matches all values $v$ that +Each element pattern ´p_i´ is type-checked with +´S´ as expected type, unless it is a sequence wildcard. If a final +sequence wildcard is present, the pattern matches all values ´v´ that are sequences which start with elements matching patterns -$p_1 , \ldots , p_{n-1}$. If no final sequence wildcard is given, the -pattern matches all values $v$ that are sequences of -length $n$ which consist of elements matching patterns $p_1 , \ldots , -p_n$. +´p_1 , \ldots , p_{n-1}´. If no final sequence wildcard is given, the +pattern matches all values ´v´ that are sequences of +length ´n´ which consist of elements matching patterns ´p_1 , \ldots , +p_n´. ### Infix Operation Patterns @@ -265,14 +351,14 @@ p_n$. Pattern3 ::= SimplePattern {id [nl] SimplePattern} ``` -An _infix operation pattern_ $p;\mathit{op};q$ is a shorthand for the -constructor or extractor pattern $\mathit{op}(p, q)$. The precedence and +An _infix operation pattern_ ´p;\mathit{op};q´ is a shorthand for the +constructor or extractor pattern ´\mathit{op}(p, q)´. The precedence and associativity of operators in patterns is the same as in [expressions](06-expressions.html#prefix,-infix,-and-postfix-operations). -An infix operation pattern $p;\mathit{op};(q_1 , \ldots , q_n)$ is a -shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1 -, \ldots , q_n)$. +An infix operation pattern ´p;\mathit{op};(q_1 , \ldots , q_n)´ is a +shorthand for the constructor or extractor pattern ´\mathit{op}(p, q_1 +, \ldots , q_n)´. ### Pattern Alternatives @@ -280,11 +366,11 @@ shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1 Pattern ::= Pattern1 { ‘|’ Pattern1 } ``` -A _pattern alternative_ `$p_1$ | $\ldots$ | $p_n$` -consists of a number of alternative patterns $p_i$. All alternative +A _pattern alternative_ `´p_1´ | ´\ldots´ | ´p_n´` +consists of a number of alternative patterns ´p_i´. All alternative patterns are type checked with the expected type of the pattern. They may not bind variables other than wildcards. The alternative pattern -matches a value $v$ if at least one its alternatives matches $v$. +matches a value ´v´ if at least one its alternatives matches ´v´. ### XML Patterns @@ -308,14 +394,20 @@ type `Seq[A]`. ### Irrefutable Patterns -A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies: - -1. $p$ is a variable pattern, -1. $p$ is a typed pattern $x: T'$, and $T <: T'$, -1. $p$ is a constructor pattern $c(p_1 , \ldots , p_n)$, the type $T$ - is an instance of class $c$, the [primary constructor](05-classes-and-objects.html#class-definitions) - of type $T$ has argument types $T_1 , \ldots , T_n$, and each $p_i$ is - irrefutable for $T_i$. +A pattern ´p´ is _irrefutable_ for a type ´T´, if one of the following applies: + +1. ´p´ is a variable pattern, +1. ´p´ is a typed pattern ´x: T'´, and ´T <: T'´, +1. ´p´ is a constructor pattern ´c(p_1 , \ldots , p_n)´, the type ´T´ + is an instance of class ´c´, the [primary constructor](05-classes-and-objects.html#class-definitions) + of type ´T´ has argument types ´T_1 , \ldots , T_n´, and each ´p_i´ is + irrefutable for ´T_i´. +1. ´p´ is an extractor pattern for which the extractor type is `Some[´T´]` for + some type ´T´ +1. ´p´ is an extractor pattern for which the extractor types `isEmpty` method + is the singleton type `false` +1. ´p´ is an extractor pattern for which the return type is the singleton type + `true` ## Type Patterns @@ -324,38 +416,42 @@ A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies: ``` Type patterns consist of types, type variables, and wildcards. -A type pattern $T$ is of one of the following forms: +A type pattern ´T´ is of one of the following forms: -* A reference to a class $C$, $p.C$, or `$T$#$C$`. This +* A reference to a class ´C´, ´p.C´, or `´T´#´C´`. This type pattern matches any non-null instance of the given class. Note that the prefix of the class, if it exists, is relevant for determining - class instances. For instance, the pattern $p.C$ matches only - instances of classes $C$ which were created with the path $p$ as + class instances. For instance, the pattern ´p.C´ matches only + instances of classes ´C´ which were created with the path ´p´ as prefix. This also applies to prefixes which are not given syntactically. - For example, if $C$ refers to a class defined in the nearest enclosing - class and is thus equivalent to $this.C$, it is considered to have a prefix. + For example, if ´C´ refers to a class defined in the nearest enclosing + class and is thus equivalent to ´this.C´, it is considered to have a prefix. The bottom types `scala.Nothing` and `scala.Null` cannot be used as type patterns, because they would match nothing in any case. -* A singleton type `$p$.type`. This type pattern matches only the value - denoted by the path $p$ (that is, a pattern match involved a - comparison of the matched value with $p$ using method `eq` in class - `AnyRef`). -* A compound type pattern `$T_1$ with $\ldots$ with $T_n$` where each $T_i$ is a +* A singleton type `´p´.type`. This type pattern matches only the value + denoted by the path ´p´ (the `eq` method is used to compare the matched value + to ´p´). + +* A literal type `´lit´`. This type pattern matches only the value + denoted by the literal ´lit´ (the `==` method is used to compare the matched + value to ´lit´). + +* A compound type pattern `´T_1´ with ´\ldots´ with ´T_n´` where each ´T_i´ is a type pattern. This type pattern matches all values that are matched by each of - the type patterns $T_i$. + the type patterns ´T_i´. -* A parameterized type pattern $T[a_1 , \ldots , a_n]$, where the $a_i$ +* A parameterized type pattern ´T[a_1 , \ldots , a_n]´, where the ´a_i´ are type variable patterns or wildcards `_`. - This type pattern matches all values which match $T$ for + This type pattern matches all values which match ´T´ for some arbitrary instantiation of the type variables and wildcards. The bounds or alias type of these type variable are determined as described [here](#type-parameter-inference-in-patterns). -* A parameterized type pattern `scala.Array$[T_1]$`, where - $T_1$ is a type pattern. This type pattern matches any non-null instance - of type `scala.Array$[U_1]$`, where $U_1$ is a type matched by $T_1$. +* A parameterized type pattern `scala.Array´[T_1]´`, where + ´T_1´ is a type pattern. This type pattern matches any non-null instance + of type `scala.Array´[U_1]´`, where ´U_1´ is a type matched by ´T_1´. Types which are not of one of the forms described above are also accepted as type patterns. However, such type patterns will be translated to their @@ -375,17 +471,17 @@ pattern. ### Type parameter inference for typed patterns -Assume a typed pattern $p: T'$. Let $T$ result from $T'$ where all wildcards in -$T'$ are renamed to fresh variable names. Let $a_1 , \ldots , a_n$ be -the type variables in $T$. These type variables are considered bound -in the pattern. Let the expected type of the pattern be $\mathit{pt}$. +Assume a typed pattern ´p: T'´. Let ´T´ result from ´T'´ where all wildcards in +´T'´ are renamed to fresh variable names. Let ´a_1 , \ldots , a_n´ be +the type variables in ´T´. These type variables are considered bound +in the pattern. Let the expected type of the pattern be ´\mathit{pt}´. Type parameter inference constructs first a set of subtype constraints over -the type variables $a_i$. The initial constraints set $\mathcal{C}\_0$ reflects -just the bounds of these type variables. That is, assuming $T$ has -bound type variables $a_1 , \ldots , a_n$ which correspond to class -type parameters $a_1' , \ldots , a_n'$ with lower bounds $L_1, \ldots , L_n$ -and upper bounds $U_1 , \ldots , U_n$, $\mathcal{C}_0$ contains the constraints +the type variables ´a_i´. The initial constraints set ´\mathcal{C}\_0´ reflects +just the bounds of these type variables. That is, assuming ´T´ has +bound type variables ´a_1 , \ldots , a_n´ which correspond to class +type parameters ´a_1' , \ldots , a_n'´ with lower bounds ´L_1, \ldots , L_n´ +and upper bounds ´U_1 , \ldots , U_n´, ´\mathcal{C}_0´ contains the constraints $$ \begin{cases} @@ -394,45 +490,45 @@ a_i &<: \sigma U_i & \quad (i = 1, \ldots , n) \\\\ \end{cases} $$ -where $\sigma$ is the substitution $[a_1' := a_1 , \ldots , a_n' :=a_n]$. +where ´\sigma´ is the substitution ´[a_1' := a_1 , \ldots , a_n' :=a_n]´. -The set $\mathcal{C}_0$ is then augmented by further subtype constraints. There are two +The set ´\mathcal{C}_0´ is then augmented by further subtype constraints. There are two cases. ###### Case 1 -If there exists a substitution $\sigma$ over the type variables $a_i , \ldots , a_n$ such that $\sigma T$ conforms to $\mathit{pt}$, one determines the weakest subtype constraints -$\mathcal{C}\_1$ over the type variables $a_1, \ldots , a_n$ such that $\mathcal{C}\_0 \wedge \mathcal{C}_1$ implies that $T$ conforms to $\mathit{pt}$. +If there exists a substitution ´\sigma´ over the type variables ´a_i , \ldots , a_n´ such that ´\sigma T´ conforms to ´\mathit{pt}´, one determines the weakest subtype constraints +´\mathcal{C}\_1´ over the type variables ´a_1, \ldots , a_n´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}_1´ implies that ´T´ conforms to ´\mathit{pt}´. ###### Case 2 -Otherwise, if $T$ can not be made to conform to $\mathit{pt}$ by +Otherwise, if ´T´ can not be made to conform to ´\mathit{pt}´ by instantiating its type variables, one determines all type variables in -$\mathit{pt}$ which are defined as type parameters of a method enclosing -the pattern. Let the set of such type parameters be $b_1 , \ldots , -b_m$. Let $\mathcal{C}\_0'$ be the subtype constraints reflecting the bounds of the -type variables $b_i$. If $T$ denotes an instance type of a final -class, let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type -variables $a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that -$\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that $T$ conforms to -$\mathit{pt}$. If $T$ does not denote an instance type of a final class, -let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type variables -$a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that $\mathcal{C}\_0 \wedge -\mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that it is possible to construct a type -$T'$ which conforms to both $T$ and $\mathit{pt}$. It is a static error if -there is no satisfiable set of constraints $\mathcal{C}\_2$ with this property. +´\mathit{pt}´ which are defined as type parameters of a method enclosing +the pattern. Let the set of such type parameters be ´b_1 , \ldots , +b_m´. Let ´\mathcal{C}\_0'´ be the subtype constraints reflecting the bounds of the +type variables ´b_i´. If ´T´ denotes an instance type of a final +class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type +variables ´a_1 , \ldots , a_n´ and ´b_1 , \ldots , b_m´ such that +´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that ´T´ conforms to +´\mathit{pt}´. If ´T´ does not denote an instance type of a final class, +let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables +´a_1 , \ldots , a_n´ and ´b_1 , \ldots , b_m´ such that ´\mathcal{C}\_0 \wedge +\mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that it is possible to construct a type +´T'´ which conforms to both ´T´ and ´\mathit{pt}´. It is a static error if +there is no satisfiable set of constraints ´\mathcal{C}\_2´ with this property. The final step consists in choosing type bounds for the type variables which imply the established constraint system. The process is different for the two cases above. ###### Case 1 -We take $a_i >: L_i <: U_i$ where each $L_i$ is minimal and each $U_i$ is maximal wrt $<:$ such that $a_i >: L_i <: U_i$ for $i = 1, \ldots, n$ implies $\mathcal{C}\_0 \wedge \mathcal{C}\_1$. +We take ´a_i >: L_i <: U_i´ where each ´L_i´ is minimal and each ´U_i´ is maximal wrt ´<:´ such that ´a_i >: L_i <: U_i´ for ´i = 1, \ldots, n´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_1´. ###### Case 2 -We take $a_i >: L_i <: U_i$ and $b\_i >: L_i' <: U_i' $ where each $L_i$ -and $L_j'$ is minimal and each $U_i$ and $U_j'$ is maximal such that -$a_i >: L_i <: U_i$ for $i = 1 , \ldots , n$ and -$b_j >: L_j' <: U_j'$ for $j = 1 , \ldots , m$ -implies $\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2$. +We take ´a_i >: L_i <: U_i´ and ´b\_i >: L_i' <: U_i' ´ where each ´L_i´ +and ´L_j'´ is minimal and each ´U_i´ and ´U_j'´ is maximal such that +´a_i >: L_i <: U_i´ for ´i = 1 , \ldots , n´ and +´b_j >: L_j' <: U_j'´ for ´j = 1 , \ldots , m´ +implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2´. In both cases, local type inference is permitted to limit the complexity of inferred bounds. Minimality and maximality of types have @@ -440,10 +536,10 @@ to be understood relative to the set of types of acceptable complexity. ### Type parameter inference for constructor patterns -Assume a constructor pattern $C(p_1 , \ldots , p_n)$ where class $C$ -has type parameters $a_1 , \ldots , a_n$. These type parameters +Assume a constructor pattern ´C(p_1 , \ldots , p_n)´ where class ´C´ +has type parameters ´a_1 , \ldots , a_n´. These type parameters are inferred in the same way as for the typed pattern -`(_: $C[a_1 , \ldots , a_n]$)`. +`(_: ´C[a_1 , \ldots , a_n]´)`. ###### Example Consider the program fragment: @@ -529,54 +625,56 @@ function's declared result type, `Number`. A _pattern matching expression_ ```scala -e match { case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +e match { case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } ``` -consists of a selector expression $e$ and a number $n > 0$ of -cases. Each case consists of a (possibly guarded) pattern $p_i$ and a -block $b_i$. Each $p_i$ might be complemented by a guard -`if $e$` where $e$ is a boolean expression. +consists of a selector expression ´e´ and a number ´n > 0´ of +cases. Each case consists of a (possibly guarded) pattern ´p_i´ and a +block ´b_i´. Each ´p_i´ might be complemented by a guard +`if ´e´` where ´e´ is a boolean expression. The scope of the pattern -variables in $p_i$ comprises the pattern's guard and the corresponding block $b_i$. +variables in ´p_i´ comprises the pattern's guard and the corresponding block ´b_i´. -Let $T$ be the type of the selector expression $e$ and let $a_1 -, \ldots , a_m$ be the type parameters of all methods enclosing -the pattern matching expression. For every $a_i$, let $L_i$ be its -lower bound and $U_i$ be its higher bound. Every pattern $p \in \{p_1, , \ldots , p_n\}$ +Let ´T´ be the type of the selector expression ´e´ and let ´a_1 +, \ldots , a_m´ be the type parameters of all methods enclosing +the pattern matching expression. For every ´a_i´, let ´L_i´ be its +lower bound and ´U_i´ be its higher bound. Every pattern ´p \in \{p_1, , \ldots , p_n\}´ can be typed in two ways. First, it is attempted -to type $p$ with $T$ as its expected type. If this fails, $p$ is -instead typed with a modified expected type $T'$ which results from -$T$ by replacing every occurrence of a type parameter $a_i$ by -\mbox{\sl undefined}. If this second step fails also, a compile-time -error results. If the second step succeeds, let $T_p$ be the type of -pattern $p$ seen as an expression. One then determines minimal bounds -$L_11 , \ldots , L_m'$ and maximal bounds $U_1' , \ldots , U_m'$ such -that for all $i$, $L_i <: L_i'$ and $U_i' <: U_i$ and the following +to type ´p´ with ´T´ as its expected type. If this fails, ´p´ is +instead typed with a modified expected type ´T'´ which results from +´T´ by replacing every occurrence of a type parameter ´a_i´ by +*undefined*. If this second step fails also, a compile-time +error results. If the second step succeeds, let ´T_p´ be the type of +pattern ´p´ seen as an expression. One then determines minimal bounds +´L_11 , \ldots , L_m'´ and maximal bounds ´U_1' , \ldots , U_m'´ such +that for all ´i´, ´L_i <: L_i'´ and ´U_i' <: U_i´ and the following constraint system is satisfied: -$$L_1 <: a_1 <: U_1\;\wedge\;\ldots\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T$$ +$$ +L_1 <: a_1 <: U_1\;\wedge\;\ldots\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T +$$ If no such bounds can be found, a compile time error results. If such -bounds are found, the pattern matching clause starting with $p$ is -then typed under the assumption that each $a_i$ has lower bound $L_i'$ -instead of $L_i$ and has upper bound $U_i'$ instead of $U_i$. +bounds are found, the pattern matching clause starting with ´p´ is +then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ +instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´. -The expected type of every block $b_i$ is the expected type of the +The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression. The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks -$b_i$. +´b_i´. When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the -[selector value](#patterns). Say this case is `case $p_i \Rightarrow b_i$`. -The result of the whole expression is the result of evaluating $b_i$, -where all pattern variables of $p_i$ are bound to +[selector value](#patterns). Say this case is `case ´p_i \Rightarrow b_i´`. +The result of the whole expression is the result of evaluating ´b_i´, +where all pattern variables of ´p_i´ are bound to the corresponding parts of the selector value. If no matching pattern is found, a `scala.MatchError` exception is thrown. The pattern in a case may also be followed by a guard suffix -`if e` with a boolean expression $e$. The guard expression is +`if e` with a boolean expression ´e´. The guard expression is evaluated if the preceding pattern in the case matches. If the guard expression evaluates to `true`, the pattern match succeeds as normal. If the guard expression evaluates to `false`, the pattern @@ -644,67 +742,67 @@ conforms to its expected type, `T`. An anonymous function can be defined by a sequence of cases ```scala -{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ } +{ case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } ``` which appear as an expression without a prior `match`. The expected type of such an expression must in part be defined. It must -be either `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]` for some $k > 0$, -or `scala.PartialFunction[$S_1$, $R$]`, where the -argument type(s) $S_1 , \ldots , S_k$ must be fully determined, but the result type -$R$ may be undetermined. +be either `scala.Function´k´[´S_1 , \ldots , S_k´, ´R´]` for some ´k > 0´, +or `scala.PartialFunction[´S_1´, ´R´]`, where the +argument type(s) ´S_1 , \ldots , S_k´ must be fully determined, but the result type +´R´ may be undetermined. If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) -to `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`, +to `scala.Function´k´[´S_1 , \ldots , S_k´, ´R´]`, the expression is taken to be equivalent to the anonymous function: ```scala -($x_1: S_1 , \ldots , x_k: S_k$) => ($x_1 , \ldots , x_k$) match { - case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ +(´x_1: S_1 , \ldots , x_k: S_k´) => (´x_1 , \ldots , x_k´) match { + case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } ``` -Here, each $x_i$ is a fresh name. +Here, each ´x_i´ is a fresh name. As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where - $T$ is the weak least upper bound of the types of all $b_i$. + ´T´ is the weak least upper bound of the types of all ´b_i´. ```scala -new scala.Function$k$[$S_1 , \ldots , S_k$, $T$] { - def apply($x_1: S_1 , \ldots , x_k: S_k$): $T$ = ($x_1 , \ldots , x_k$) match { - case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ +new scala.Function´k´[´S_1 , \ldots , S_k´, ´T´] { + def apply(´x_1: S_1 , \ldots , x_k: S_k´): ´T´ = (´x_1 , \ldots , x_k´) match { + case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } } ``` -If the expected type is `scala.PartialFunction[$S$, $R$]`, +If the expected type is `scala.PartialFunction[´S´, ´R´]`, the expression is taken to be equivalent to the following instance creation expression: ```scala -new scala.PartialFunction[$S$, $T$] { - def apply($x$: $S$): $T$ = x match { - case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ +new scala.PartialFunction[´S´, ´T´] { + def apply(´x´: ´S´): ´T´ = x match { + case ´p_1´ => ´b_1´ ´\ldots´ case ´p_n´ => ´b_n´ } - def isDefinedAt($x$: $S$): Boolean = { - case $p_1$ => true $\ldots$ case $p_n$ => true + def isDefinedAt(´x´: ´S´): Boolean = { + case ´p_1´ => true ´\ldots´ case ´p_n´ => true case _ => false } } ``` -Here, $x$ is a fresh name and $T$ is the weak least upper bound of the -types of all $b_i$. The final default case in the `isDefinedAt` -method is omitted if one of the patterns $p_1 , \ldots , p_n$ is +Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the +types of all ´b_i´. The final default case in the `isDefinedAt` +method is omitted if one of the patterns ´p_1 , \ldots , p_n´ is already a variable or wildcard pattern. ###### Example -Here is a method which uses a fold-left operation -`/:` to compute the scalar product of +Here's an example which uses +`foldLeft` to compute the scalar product of two vectors: ```scala def scalarProduct(xs: Array[Double], ys: Array[Double]) = - (0.0 /: (xs zip ys)) { + (xs zip ys).foldLeft(0.0) { case (a, (b, c)) => a + b * c } ``` diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md index 1c2f7ec85e02..33c436e8d77b 100644 --- a/spec/09-top-level-definitions.md +++ b/spec/09-top-level-definitions.md @@ -26,10 +26,10 @@ package clause. A _compilation unit_ ```scala -package $p_1$; -$\ldots$ -package $p_n$; -$\mathit{stats}$ +package ´p_1´; +´\ldots´ +package ´p_n´; +´\mathit{stats}´ ``` starting with one or more package @@ -37,10 +37,10 @@ clauses is equivalent to a compilation unit consisting of the packaging ```scala -package $p_1$ { $\ldots$ - package $p_n$ { - $\mathit{stats}$ - } $\ldots$ +package ´p_1´ { ´\ldots´ + package ´p_n´ { + ´\mathit{stats}´ + } ´\ldots´ } ``` @@ -64,15 +64,15 @@ objects and packages. Unlike other objects, packages are not introduced by a definition. Instead, the set of members of a package is determined by packagings. -A packaging `package $p$ { $\mathit{ds}$ }` injects all -definitions in $\mathit{ds}$ as members into the package whose qualified name -is $p$. Members of a package are called _top-level_ definitions. -If a definition in $\mathit{ds}$ is labeled `private`, it is +A packaging `package ´p´ { ´\mathit{ds}´ }` injects all +definitions in ´\mathit{ds}´ as members into the package whose qualified name +is ´p´. Members of a package are called _top-level_ definitions. +If a definition in ´\mathit{ds}´ is labeled `private`, it is visible only for other members in the package. -Inside the packaging, all members of package $p$ are visible under their +Inside the packaging, all members of package ´p´ are visible under their simple names. However this rule does not extend to members of enclosing -packages of $p$ that are designated by a prefix of the path $p$. +packages of ´p´ that are designated by a prefix of the path ´p´. ```scala package org.net.prj { @@ -84,7 +84,7 @@ all members of package `org.net.prj` are visible under their simple names, but members of packages `org` or `org.net` require explicit qualification or imports. -Selections $p$.$m$ from $p$ as well as imports from $p$ +Selections ´p´.´m´ from ´p´ as well as imports from ´p´ work as for objects. However, unlike other objects, packages may not be used as values. It is illegal to have a package with the same fully qualified name as a module or a class. @@ -100,14 +100,14 @@ are visible to each other without qualification. PackageObject ::= ‘package’ ‘object’ ObjectDef ``` -A _package object_ `package object $p$ extends $t$` adds the -members of template $t$ to the package $p$. There can be only one +A _package object_ `package object ´p´ extends ´t´` adds the +members of template ´t´ to the package ´p´. There can be only one package object per package. The standard naming convention is to place the definition above in a file named `package.scala` that's -located in the directory corresponding to package $p$. +located in the directory corresponding to package ´p´. The package object should not define a member with the same name as -one of the top-level objects or classes defined in package $p$. If +one of the top-level objects or classes defined in package ´p´. If there is a name conflict, the behavior of the program is currently undefined. It is expected that this restriction will be lifted in a future version of Scala. @@ -120,11 +120,17 @@ QualId ::= id {‘.’ id} A reference to a package takes the form of a qualified identifier. Like all other references, package references are relative. That is, -a package reference starting in a name $p$ will be looked up in the -closest enclosing scope that defines a member named $p$. +a package reference starting in a name ´p´ will be looked up in the +closest enclosing scope that defines a member named ´p´. -The special predefined name `_root_` refers to the -outermost root package which contains all top-level packages. +If a package name is shadowed, it's possible to refer to its +fully-qualified name by prefixing it with +the special predefined name `_root_`, which refers to the +outermost root package that contains all top-level packages. + +The name `_root_` has this special denotation only when +used as the first element of a qualifier; it is an ordinary +identifier otherwise. ###### Example Consider the following program: @@ -134,11 +140,18 @@ package b { class B } -package a.b { - class A { - val x = new _root_.b.B +package a { + package b { + class A { + val x = new _root_.b.B + } + class C { + import _root_.b._ + def y = new B + } } } + ``` Here, the reference `_root_.b.B` refers to class `B` in the @@ -158,8 +171,8 @@ passed to the `main` method as a parameter of type The `main` method of a program can be directly defined in the object, or it can be inherited. The scala library defines a special class `scala.App` whose body acts as a `main` method. -An objects $m$ inheriting from this class is thus a program, -which executes the initialization code of the object $m$. +An objects ´m´ inheriting from this class is thus a program, +which executes the initialization code of the object ´m´. ###### Example The following example will create a hello world program by defining diff --git a/spec/10-xml-expressions-and-patterns.md b/spec/10-xml-expressions-and-patterns.md index ea93cc8d8eea..a9b7edb14fff 100644 --- a/spec/10-xml-expressions-and-patterns.md +++ b/spec/10-xml-expressions-and-patterns.md @@ -24,14 +24,13 @@ XmlExpr ::= XmlContent {Element} Well-formedness constraints of the XML specification apply, which means for instance that start tags and end tags must match, and -attributes may only be defined once, with the exception of constraints +attributes may only be defined once, except for constraints related to entity resolution. The following productions describe Scala's extensible markup language, designed as close as possible to the W3C extensible markup language standard. Only the productions for attribute values and character data are changed. -Scala does not support declarations, CDATA sections or processing instructions. -Entity references are not resolved at runtime. +Scala does not support declarations. Entity references are not resolved at runtime. ```ebnf Element ::= EmptyElemTag @@ -76,11 +75,11 @@ AttValue ::= ‘"’ {CharQ | CharRef} ‘"’ ScalaExpr ::= Block -CharData ::= { CharNoRef } $\textit{ without}$ {CharNoRef}‘{’CharB {CharNoRef} - $\textit{ and without}$ {CharNoRef}‘]]>’{CharNoRef} +CharData ::= { CharNoRef } ´\textit{ without}´ {CharNoRef}‘{’CharB {CharNoRef} + ´\textit{ and without}´ {CharNoRef}‘]]>’{CharNoRef} ``` - + XML expressions may contain Scala expressions as attribute values or within nodes. In the latter case, these are embedded using a single opening brace `{` and ended by a closing brace `}`. To express a single opening braces @@ -89,18 +88,18 @@ Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala ```ebnf -BaseChar, Char, Comment, CombiningChar, Ideographic, NameChar, S, Reference - ::= $\textit{“as in W3C XML”}$ +BaseChar, CDSect, Char, Comment, CombiningChar, Ideographic, NameChar, PI, S, Reference + ::= ´\textit{“as in W3C XML”}´ -Char1 ::= Char $\textit{ without}$ ‘<’ | ‘&’ -CharQ ::= Char1 $\textit{ without}$ ‘"’ -CharA ::= Char1 $\textit{ without}$ ‘'’ -CharB ::= Char1 $\textit{ without}$ ‘{’ +Char1 ::= Char ´\textit{ without}´ ‘<’ | ‘&’ +CharQ ::= Char1 ´\textit{ without}´ ‘"’ +CharA ::= Char1 ´\textit{ without}´ ‘'’ +CharB ::= Char1 ´\textit{ without}´ ‘{’ Name ::= XNameStart {NameChar} XNameStart ::= ‘_’ | BaseChar | Ideographic - $\textit{ (as in W3C XML, but without }$ ‘:’$)$ + ´\textit{ (as in W3C XML, but without }´ ‘:’´)´ ``` ## XML patterns diff --git a/spec/11-annotations.md b/spec/11-annotations.md index 68faee53e657..64cf28243373 100644 --- a/spec/11-annotations.md +++ b/spec/11-annotations.md @@ -14,17 +14,16 @@ chapter: 11 ## Definition Annotations associate meta-information with definitions. -A simple annotation has the form `@$c$` or `@$c(a_1 , \ldots , a_n)$`. -Here, $c$ is a constructor of a class $C$, which must conform +A simple annotation has the form `@´c´` or `@´c(a_1 , \ldots , a_n)´`. +Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`. -Annotations may apply to definitions or declarations, types, or -expressions. An annotation of a definition or declaration appears in -front of that definition. An annotation of a type appears after -that type. An annotation of an expression $e$ appears after the -expression $e$, separated by a colon. More than one annotation clause -may apply to an entity. The order in which these annotations are given -does not matter. +Annotations may apply to definitions, types, or expressions. +An annotation of a definition appears in front of that definition. +An annotation of a type appears after that type. +An annotation of an expression appears after that expression, separated by a colon. +More than one annotation clause may apply to an entity. +The order in which these annotations are given does not matter. Examples: @@ -37,83 +36,34 @@ String @local // Type annotation ## Predefined Annotations -### Java Platform Annotations - -The meaning of annotation clauses is implementation-dependent. On the -Java platform, the following annotations have a standard meaning. - - * `@transient` Marks a field to be non-persistent; this is - equivalent to the `transient` - modifier in Java. - - * `@volatile` Marks a field which can change its value - outside the control of the program; this - is equivalent to the `volatile` - modifier in Java. - - * `@SerialVersionUID()` Attaches a serial version identifier (a - `long` constant) to a class. - This is equivalent to a the following field - definition in Java: - - ```java - private final static SerialVersionUID = - ``` - - * `@throws()` A Java compiler checks that a program contains handlers for checked exceptions - by analyzing which checked exceptions can result from execution of a method or - constructor. For each checked exception which is a possible result, the - `throws` - clause for the method or constructor must mention the class of that exception - or one of the superclasses of the class of that exception. - -### Java Beans Annotations - - * `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this - annotation causes getter and setter methods `getX`, `setX` - in the Java bean style to be added in the class containing the - variable. The first letter of the variable appears capitalized after - the `get` or `set`. When the annotation is added to the - definition of an immutable value definition `X`, only a getter is - generated. The construction of these methods is part of - code-generation; therefore, these methods become visible only once a - classfile for the containing class is generated. - - * `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but - the generated getter method is named `isX` instead of `getX`. +Predefined annotations are found in the `scala.annotation` package, and also in the `scala` package. -### Deprecation Annotations - - * `@deprecated(message: , since: )`
- Marks a definition as deprecated. Accesses to the - defined entity will then cause a deprecated warning mentioning the - _message_ `` to be issued from the compiler. - The argument _since_ documents since when the definition should be considered deprecated.
- Deprecated warnings are suppressed in code that belongs itself to a definition - that is labeled deprecated. +### Scala Compiler Annotations - * `@deprecatedName(name: )`
- Marks a formal parameter name as deprecated. Invocations of this entity - using named parameter syntax referring to the deprecated parameter name cause a deprecation warning. + * `@tailrec` Marks a method which must be transformed by the compiler + to eliminate self-recursive invocations in tail position. + It is an error if there are no such invocations, or a recursive call not in tail position. -### Scala Compiler Annotations + * `@switch` Marks the expression submitted to a match as "switchable", + such that the match can be compiled to an efficient form. + The compiler will warn if the type of the expression is not a switchable type. + Certain degenerate matches may remain unoptimized without a warning. * `@unchecked` When applied to the selector of a `match` expression, this attribute suppresses any warnings about non-exhaustive pattern - matches which would otherwise be emitted. For instance, no warnings - would be produced for the method definition below. + matches that would otherwise be emitted. For instance, no warnings + would be produced for the method definition below, or the similar value definition. ```scala def f(x: Option[Int]) = (x: @unchecked) match { - case Some(y) => y + case Some(y) => y } + val Some(y) = x: @unchecked ``` - Without the `@unchecked` annotation, a Scala compiler could - infer that the pattern match is non-exhaustive, and could produce a - warning because `Option` is a `sealed` class. + Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive and issue a warning because `Option` is a `sealed` class. - * `@uncheckedStable` When applied a value declaration or definition, it allows the defined + * `@uncheckedStable` When applied to a value definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). For instance, the following member definitions are legal: @@ -128,17 +78,13 @@ Java platform, the following annotations have a standard meaning. would not be a path since its type `A with B` is volatile. Hence, the reference `x.T` would be malformed. - When applied to value declarations or definitions that have non-volatile - types, the annotation has no effect. + When applied to value definitions that have no volatile types, the annotation has no effect. - * `@specialized` When applied to the definition of a type parameter, this annotation causes - the compiler - to generate specialized definitions for primitive types. An optional list of - primitive - types may be given, in which case specialization takes into account only - those types. + * `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate definitions that are specialized for primitive types. + An optional list of primitive types may be given, in which case specialization + takes into account only those types. For instance, the following code would generate specialized traits for - `Unit`, `Int` and `Double` + `Unit`, `Int` and `Double`: ```scala trait Function0[@specialized(Unit, Int, Double) T] { @@ -147,28 +93,86 @@ Java platform, the following annotations have a standard meaning. ``` Whenever the static type of an expression matches a specialized variant of - a definition, the compiler will instead use the specialized version. - See the [specialization sid](http://docs.scala-lang.org/sips/completed/scala-specialization.html) for more details of the implementation. + a definition, the compiler will use the specialized version instead. + See the [specialization SID](https://docs.scala-lang.org/sips/scala-specialization.html) for more details of the implementation. + +### Deprecation Annotations + + * `@deprecated(message: , since: )`
+ Marks a definition as deprecated. Accesses to the + defined entity will then cause a deprecated warning mentioning the + _message_ `` to be issued from the compiler. + The argument _since_ documents since when the definition should be considered deprecated.
+ Deprecated warnings are suppressed in code that belongs itself to a definition + that is labeled deprecated. + + * `@deprecatedName(name: , since: )`
+ Marks a formal parameter name as deprecated. Invocations of this entity + using named parameter syntax referring to the deprecated parameter name cause a deprecation warning. + +### Java Platform Annotations + +The meaning of other annotation clauses is implementation-dependent. On the +Java platform, the following annotations have a standard meaning. + + * `@transient` Marks a field to be non-persistent; this is + equivalent to the `transient` modifier in Java. + + * `@volatile` Marks a field which can change its value + outside the control of the program; this + is equivalent to the `volatile` modifier in Java. + + * `@SerialVersionUID()` Attaches a serial version identifier (a + `long` constant) to a class. + This is equivalent to the following field + definition in Java: + + ```java + private final static SerialVersionUID = + ``` + + * `@throws()` A Java compiler checks that a program contains handlers for checked exceptions + by analyzing which checked exceptions can result from the execution of a method or + constructor. For each checked exception which is a possible result, the + `throws` + clause for the method or constructor must mention the class of that exception + or one of the superclasses of the class of that exception. + +### Java Beans Annotations + + * `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this + annotation causes getter and setter methods `getX`, `setX` + in the Java bean style to be added in the class containing the + variable. The first letter of the variable appears capitalized after + the `get` or `set`. When the annotation is added to the + definition of an immutable value definition `X`, only a getter is + generated. The construction of these methods is part of + code-generation; therefore, these methods become visible only once a + classfile for the containing class is generated. + + * `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but + the generated getter method is named `isX` instead of `getX`. ## User-defined Annotations -Other annotations may be interpreted by platform- or -application-dependent tools. Class `scala.Annotation` has two -sub-traits which are used to indicate how these annotations are -retained. Instances of an annotation class inheriting from trait -`scala.ClassfileAnnotation` will be stored in the generated class -files. Instances of an annotation class inheriting from trait -`scala.StaticAnnotation` will be visible to the Scala type-checker -in every compilation unit where the annotated symbol is accessed. An -annotation class can inherit from both `scala.ClassfileAnnotation` -and `scala.StaticAnnotation`. If an annotation class inherits from -neither `scala.ClassfileAnnotation` nor -`scala.StaticAnnotation`, its instances are visible only locally -during the compilation run that analyzes them. - -Classes inheriting from `scala.ClassfileAnnotation` may be -subject to further restrictions in order to assure that they can be -mapped to the host environment. In particular, on both the Java and -the .NET platforms, such classes must be toplevel; i.e. they may not -be contained in another class or object. Additionally, on both -Java and .NET, all constructor arguments must be constant expressions. +Other annotations may be interpreted by platform- or application-dependent +tools. The class `scala.annotation.Annotation` is the base class for +user-defined annotations. It has two sub-traits: +- `scala.annotation.StaticAnnotation`: Instances of a subclass of this trait + will be stored in the generated class files, and therefore accessible to + runtime reflection and later compilation runs. +- `scala.annotation.ConstantAnnotation`: Instances of a subclass of this trait + may only have arguments which are + [constant expressions](06-expressions.html#constant-expressions), and are + also stored in the generated class files. +- If an annotation class inherits from neither `scala.ConstantAnnotation` nor + `scala.StaticAnnotation`, its instances are visible only locally during the + compilation run that analyzes them. + +## Host-platform Annotations + +The host platform may define its own annotation format. These annotations do not +extend any of the classes in the `scala.annotation` package, but can generally +be used in the same way as Scala annotations. The host platform may impose +additional restrictions on the expressions which are valid as annotation +arguments. diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md index 76165b8a2c45..0caa21fc49e6 100644 --- a/spec/12-the-scala-standard-library.md +++ b/spec/12-the-scala-standard-library.md @@ -28,8 +28,8 @@ values which are not implemented as objects in the underlying host system. User-defined Scala classes which do not explicitly inherit from -`AnyVal` inherit directly or indirectly from `AnyRef`. They can -not inherit from both `AnyRef` and `AnyVal`. +`AnyVal` inherit directly or indirectly from `AnyRef`. They cannot +inherit from both `AnyRef` and `AnyVal`. Classes `AnyRef` and `AnyVal` are required to provide only the members declared in class `Any`, but implementations may add @@ -56,10 +56,10 @@ abstract class Any { final def != (that: Any): Boolean = !(this == that) /** Hash code; abstract here */ - def hashCode: Int = $\ldots$ + def hashCode: Int = ´\ldots´ /** Textual representation; abstract here */ - def toString: String = $\ldots$ + def toString: String = ´\ldots´ /** Type test; needs to be inlined to work as given */ def isInstanceOf[a]: Boolean @@ -78,35 +78,35 @@ final class AnyVal extends Any /** The root class of all reference types */ class AnyRef extends Any { def equals(that: Any): Boolean = this eq that - final def eq(that: AnyRef): Boolean = $\ldots$ // reference equality + final def eq(that: AnyRef): Boolean = ´\ldots´ // reference equality final def ne(that: AnyRef): Boolean = !(this eq that) - def hashCode: Int = $\ldots$ // hashCode computed from allocation address - def toString: String = $\ldots$ // toString computed from hashCode and class name + def hashCode: Int = ´\ldots´ // hashCode computed from allocation address + def toString: String = ´\ldots´ // toString computed from hashCode and class name def synchronized[T](body: => T): T // execute `body` in while locking `this`. } ``` -The type test `$x$.isInstanceOf[$T$]` is equivalent to a typed +The type test `´x´.isInstanceOf[´T´]` is equivalent to a typed pattern match ```scala -$x$ match { - case _: $T'$ => true +´x´ match { + case _: ´T'´ => true case _ => false } ``` -where the type $T'$ is the same as $T$ except if $T$ is -of the form $D$ or $D[\mathit{tps}]$ where $D$ is a type member of some outer class $C$. -In this case $T'$ is `$C$#$D$` (or `$C$#$D[tps]$`, respectively), whereas $T$ itself would expand to `$C$.this.$D[tps]$`. +where the type ´T'´ is the same as ´T´ except if ´T´ is +of the form ´D´ or ´D[\mathit{tps}]´ where ´D´ is a type member of some outer class ´C´. +In this case ´T'´ is `´C´#´D´` (or `´C´#´D[tps]´`, respectively), whereas ´T´ itself would expand to `´C´.this.´D[tps]´`. In other words, an `isInstanceOf` test does not check that types have the same enclosing instance. -The test `$x$.asInstanceOf[$T$]` is treated specially if $T$ is a +The test `´x´.asInstanceOf[´T´]` is treated specially if ´T´ is a [numeric value type](#value-classes). In this case the cast will be translated to an application of a [conversion method](#numeric-value-types) -`x.to$T$`. For non-numeric values $x$ the operation will raise a +`x.to´T´`. For non-numeric values ´x´ the operation will raise a `ClassCastException`. ## Value Classes @@ -146,14 +146,14 @@ from every numeric value type to all higher-ranked numeric value types. Therefore, lower-ranked types are implicitly converted to higher-ranked types when required by the [context](06-expressions.html#implicit-conversions). -Given two numeric value types $S$ and $T$, the _operation type_ of -$S$ and $T$ is defined as follows: If both $S$ and $T$ are subrange -types then the operation type of $S$ and $T$ is `Int`. Otherwise -the operation type of $S$ and $T$ is the larger of the two types wrt -ranking. Given two numeric values $v$ and $w$ the operation type of -$v$ and $w$ is the operation type of their run-time types. +Given two numeric value types ´S´ and ´T´, the _operation type_ of +´S´ and ´T´ is defined as follows: If both ´S´ and ´T´ are subrange +types then the operation type of ´S´ and ´T´ is `Int`. Otherwise +the operation type of ´S´ and ´T´ is the larger of the two types wrt +ranking. Given two numeric values ´v´ and ´w´ the operation type of +´v´ and ´w´ is the operation type of their run-time types. -Any numeric value type $T$ supports the following methods. +Any numeric value type ´T´ supports the following methods. * Comparison methods for equals (`==`), not-equals (`!=`), less-than (`<`), greater-than (`>`), less-than-or-equals @@ -166,14 +166,14 @@ Any numeric value type $T$ supports the following methods. * Arithmetic methods addition (`+`), subtraction (`-`), multiplication (`*`), division (`/`), and remainder (`%`), which each exist in 7 overloaded alternatives. Each - alternative takes a parameter of some numeric value type $U$. Its - result type is the operation type of $T$ and $U$. The operation is + alternative takes a parameter of some numeric value type ´U´. Its + result type is the operation type of ´T´ and ´U´. The operation is evaluated by converting the receiver and its argument to their operation type and performing the given arithmetic operation of that type. * Parameterless arithmetic methods identity (`+`) and negation - (`-`), with result type $T$. The first of these returns the - receiver unchanged, whereas the second returns its negation. + (`-`), with result type ´T´, or `Int` if ´T´ is a subrange type. + The first of these returns the receiver unchanged, whereas the second returns its negation. * Conversion methods `toByte`, `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` which convert the receiver object to the target type, using the rules of @@ -189,21 +189,21 @@ Integer numeric value types support in addition the following operations: {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5 overloaded alternatives. Each alternative takes a parameter of some integer numeric value type. Its result type is the operation type of - $T$ and $U$. The operation is evaluated by converting the receiver and + ´T´ and ´U´. The operation is evaluated by converting the receiver and its argument to their operation type and performing the given bitwise operation of that type. * A parameterless bit-negation method (`~`). Its result type is - the receiver type $T$ or `Int`, whichever is larger. + the receiver type ´T´ or `Int`, whichever is larger. The operation is evaluated by converting the receiver to the result type and negating every bit in its value. * Bit-shift methods left-shift (`<<`), arithmetic right-shift (`>>`), and unsigned right-shift (`>>>`). Each of these - methods has two overloaded alternatives, which take a parameter $n$ + methods has two overloaded alternatives, which take a parameter ´n´ of type `Int`, respectively `Long`. The result type of the - operation is the receiver type $T$, or `Int`, whichever is larger. + operation is the receiver type ´T´, or `Int`, whichever is larger. The operation is evaluated by converting the receiver to the result - type and performing the specified shift by $n$ bits. + type and performing the specified shift by ´n´ bits. Numeric value types also implement operations `equals`, `hashCode`, and `toString` from class `Any`. @@ -227,7 +227,7 @@ def equals(other: Any): Boolean = other match { ``` The `hashCode` method returns an integer hashcode that maps equal -numeric values to equal results. It is guaranteed to be the identity for +numeric values to equal results. It is guaranteed to be the identity for type `Int` and for all subrange types. The `toString` method displays its receiver as an integer or @@ -353,29 +353,25 @@ right operand. ### The `Tuple` classes -Scala defines tuple classes `Tuple$n$` for $n = 2 , \ldots , 22$. +Scala defines tuple classes `Tuple´n´` for ´n = 2 , \ldots , 22´. These are defined as follows. ```scala package scala -case class Tuple$n$[+T_1, ..., +T_n](_1: T_1, ..., _$n$: T_$n$) { - def toString = "(" ++ _1 ++ "," ++ $\ldots$ ++ "," ++ _$n$ ++ ")" +case class Tuple´n´[+T_1, ..., +T_n](_1: T_1, ..., _´n´: T_´n´) { + def toString = "(" ++ _1 ++ "," ++ ´\ldots´ ++ "," ++ _´n´ ++ ")" } ``` -The implicitly imported [`Predef`](#the-predef-object) object defines -the names `Pair` as an alias of `Tuple2` and `Triple` -as an alias for `Tuple3`. - ### The `Function` Classes -Scala defines function classes `Function$n$` for $n = 1 , \ldots , 22$. +Scala defines function classes `Function´n´` for ´n = 1 , \ldots , 22´. These are defined as follows. ```scala package scala -trait Function$n$[-T_1, ..., -T_$n$, +R] { - def apply(x_1: T_1, ..., x_$n$: T_$n$): R +trait Function´n´[-T_1, ..., -T_´n´, +R] { + def apply(x_1: T_1, ..., x_´n´: T_´n´): R def toString = "" } ``` @@ -401,17 +397,17 @@ informational purposes only: ```scala final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { - def length: Int = $\ldots$ - def apply(i: Int): T = $\ldots$ - def update(i: Int, x: T): Unit = $\ldots$ - override def clone(): Array[T] = $\ldots$ + def length: Int = ´\ldots´ + def apply(i: Int): T = ´\ldots´ + def update(i: Int, x: T): Unit = ´\ldots´ + override def clone(): Array[T] = ´\ldots´ } ``` -If $T$ is not a type parameter or abstract type, the type `Array[T]` +If ´T´ is not a type parameter or abstract type, the type `Array[T]` is represented as the array type `|T|[]` in the underlying host system, where `|T|` is the erasure of `T`. -If $T$ is a type parameter or abstract type, a different representation might be +If ´T´ is a type parameter or abstract type, a different representation might be used (it is `Object` on the Java platform). #### Operations @@ -431,13 +427,13 @@ operations on an array `xs`: Two implicit conversions exist in `Predef` that are frequently applied to arrays: a conversion to `scala.collection.mutable.ArrayOps` and a conversion to -`scala.collection.mutable.WrappedArray` (a subtype of `scala.collection.Seq`). +`scala.collection.mutable.ArraySeq` (a subtype of `scala.collection.Seq`). Both types make many of the standard operations found in the Scala collections API available. The conversion to `ArrayOps` is temporary, as all operations -defined on `ArrayOps` return a value of type `Array`, while the conversion to `WrappedArray` -is permanent as all operations return a value of type `WrappedArray`. -The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`. +defined on `ArrayOps` return a value of type `Array`, while the conversion to `ArraySeq` +is permanent as all operations return a value of type `ArraySeq`. +The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. Because of the tension between parametrized types in Scala and the ad-hoc implementation of arrays in the host-languages, some subtle points @@ -447,10 +443,10 @@ explained in the following. #### Variance Unlike arrays in Java, arrays in Scala are _not_ -co-variant; That is, $S <: T$ does not imply -`Array[$S$] $<:$ Array[$T$]` in Scala. +co-variant; That is, ´S <: T´ does not imply +`Array[´S´] ´<:´ Array[´T´]` in Scala. However, it is possible to cast an array -of $S$ to an array of $T$ if such a cast is permitted in the host +of ´S´ to an array of ´T´ if such a cast is permitted in the host environment. For instance `Array[String]` does not conform to @@ -465,13 +461,13 @@ val xs = new Array[String](2) val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK ``` -The instantiation of an array with a polymorphic element type $T$ requires -information about type $T$ at runtime. +The instantiation of an array with a polymorphic element type ´T´ requires +information about type ´T´ at runtime. This information is synthesized by adding a [context bound](07-implicits.html#context-bounds-and-view-bounds) -of `scala.reflect.ClassTag` to type $T$. +of `scala.reflect.ClassTag` to type ´T´. An example is the following implementation of method `mkArray`, which creates -an array of an arbitrary type $T$, given a sequence of $T$`s which +an array of an arbitrary type ´T´, given a sequence of ´T´`s which defines its elements: ```scala @@ -487,7 +483,7 @@ def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = { } ``` -If type $T$ is a type for which the host platform offers a specialized array +If type ´T´ is a type for which the host platform offers a specialized array representation, this representation is used. ###### Example @@ -506,46 +502,46 @@ package scala object Array { /** copies array elements from `src` to `dest`. */ def copy(src: AnyRef, srcPos: Int, - dest: AnyRef, destPos: Int, length: Int): Unit = $\ldots$ + dest: AnyRef, destPos: Int, length: Int): Unit = ´\ldots´ /** Returns an array of length 0 */ def empty[T: ClassTag]: Array[T] = /** Create an array with given elements. */ - def apply[T: ClassTag](xs: T*): Array[T] = $\ldots$ + def apply[T: ClassTag](xs: T*): Array[T] = ´\ldots´ /** Creates array with given dimensions */ - def ofDim[T: ClassTag](n1: Int): Array[T] = $\ldots$ + def ofDim[T: ClassTag](n1: Int): Array[T] = ´\ldots´ /** Creates a 2-dimensional array */ - def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = $\ldots$ - $\ldots$ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = ´\ldots´ + ´\ldots´ /** Concatenate all argument arrays into a single array. */ - def concat[T: ClassTag](xss: Array[T]*): Array[T] = $\ldots$ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = ´\ldots´ /** Returns an array that contains the results of some element computation a number * of times. */ - def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = $\ldots$ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = ´\ldots´ /** Returns a two-dimensional array that contains the results of some element * computation a number of times. */ - def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = $\ldots$ - $\ldots$ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = ´\ldots´ + ´\ldots´ /** Returns an array containing values of a given function over a range of integer * values starting from 0. */ - def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = $\ldots$ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = ´\ldots´ /** Returns a two-dimensional array containing values of a given function * over ranges of integer values starting from `0`. */ - def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = $\ldots$ - $\ldots$ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = ´\ldots´ + ´\ldots´ /** Returns an array containing a sequence of increasing integers in a range. */ - def range(start: Int, end: Int): Array[Int] = $\ldots$ + def range(start: Int, end: Int): Array[Int] = ´\ldots´ /** Returns an array containing equally spaced values in some integer interval. */ - def range(start: Int, end: Int, step: Int): Array[Int] = $\ldots$ + def range(start: Int, end: Int, step: Int): Array[Int] = ´\ldots´ /** Returns an array containing repeated applications of a function to a start value. */ - def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = $\ldots$ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = ´\ldots´ /** Enables pattern matching over arrays */ def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x) @@ -628,9 +624,10 @@ trait Node { ## The `Predef` Object The `Predef` object defines standard functions and type aliases -for Scala programs. It is always implicitly imported, so that all its -defined members are available without qualification. Its definition -for the JVM environment conforms to the following signature: +for Scala programs. It is implicitly imported, as described in +[the chapter on name binding](02-identifiers-names-and-scopes.html), +so that all its defined members are available without qualification. +Its definition for the JVM environment conforms to the following signature: ```scala package scala @@ -642,6 +639,12 @@ object Predef { def classOf[T]: Class[T] = null // this is a dummy, classOf is handled by compiler. + // valueOf ----------------------------------------------------------- + + /** Retrieve the single value of a type with a unique inhabitant. */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T {} = vt.value + // instances of the ValueOf type class are provided by the compiler. + // Standard type aliases --------------------------------------------- type String = java.lang.String @@ -671,7 +674,7 @@ object Predef { def optManifest[T](implicit m: OptManifest[T]) = m // Minor variations on identity functions ----------------------------- - def identity[A](x: A): A = x // @see `conforms` for the implicit version + def identity[A](x: A): A = x def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements @@ -709,20 +712,6 @@ object Predef { ``` ```scala - // tupling --------------------------------------------------------- - - type Pair[+A, +B] = Tuple2[A, B] - object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) - } - - type Triple[+A, +B, +C] = Tuple3[A, B, C] - object Triple { - def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) - def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) - } - // Printing and reading ----------------------------------------------- def print(x: Any) = Console.print(x) @@ -730,21 +719,6 @@ object Predef { def println(x: Any) = Console.println(x) def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*)) - def readLine(): String = Console.readLine() - def readLine(text: String, args: Any*) = Console.readLine(text, args) - def readBoolean() = Console.readBoolean() - def readByte() = Console.readByte() - def readShort() = Console.readShort() - def readChar() = Console.readChar() - def readInt() = Console.readInt() - def readLong() = Console.readLong() - def readFloat() = Console.readFloat() - def readDouble() = Console.readDouble() - def readf(format: String) = Console.readf(format) - def readf1(format: String) = Console.readf1(format) - def readf2(format: String) = Console.readf2(format) - def readf3(format: String) = Console.readf3(format) - // Implicit conversions ------------------------------------------------ ... @@ -766,9 +740,9 @@ The available low-priority implicits include definitions falling into the follow can be implicitly converted to instances of class `runtime.RichInt`. 1. For every array type with elements of primitive type, a wrapper that - takes the arrays of that type to instances of a `runtime.WrappedArray` class. For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.WrappedArray[Float]`. + takes the arrays of that type to instances of a `ArraySeq` class. For instance, values of type `Array[Float]` can be implicitly converted to instances of class `ArraySeq[Float]`. There are also generic array wrappers that take elements - of type `Array[T]` for arbitrary `T` to `WrappedArray`s. + of type `Array[T]` for arbitrary `T` to `ArraySeq`s. 1. An implicit conversion from `String` to `WrappedString`. diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index 0e844bf2af2e..e31f8ec547dc 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -8,12 +8,10 @@ chapter: 13 The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. -_Unicode escapes_ are used to represent the Unicode character with the given hexadecimal code: - -```ebnf -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` +The nine [Bidirectional explicit formatting](https://www.unicode.org/reports/tr9/#Bidirectional_Character_Types) +characters `\u202a - \u202e` and `\u2066 - \u2069` (inclusive) are forbidden +from appearing in source files. Note that they can be represented using +unicode escapes in string and character literals. ## Lexical Syntax @@ -21,57 +19,77 @@ The lexical syntax of Scala is given by the following grammar in EBNF form: ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ // and Unicode category Lu -lower ::= ‘a’ | … | ‘z’ // and Unicode category Ll -letter ::= upper | lower // and Unicode categories Lo, Lt, Nl +upper ::= ‘A’ | … | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | … | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower digit ::= ‘0’ | … | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ -opchar ::= // printableChar not matched by (whiteSpace | upper | lower | - // letter | digit | paren | delim | opchar | Unicode_Sm | Unicode_So) -printableChar ::= // all characters in [\u0020, \u007F] inclusive -charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) - +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +binaryDigit ::= ‘0’ | ‘1’ +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq op ::= opchar {opchar} varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= upper idrest - | varid - | op + | varid + | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit {digit} +decimalNumeral ::= digit {digit} hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} -digit ::= ‘0’ | nonZeroDigit -nonZeroDigit ::= ‘1’ | … | ‘9’ +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit {binaryDigit} floatingPointLiteral ::= digit {digit} ‘.’ digit {digit} [exponentPart] [floatType] - | ‘.’ digit {digit} [exponentPart] [floatType] - | digit {digit} exponentPart [floatType] - | digit {digit} [exponentPart] floatType + | ‘.’ digit {digit} [exponentPart] [floatType] + | digit {digit} exponentPart [floatType] + | digit {digit} [exponentPart] floatType exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit} floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ - | ‘"""’ multiLineChars ‘"""’ + | ‘"""’ multiLineChars ‘"""’ stringElement ::= charNoDoubleQuoteOrNewline - | UnicodeEscape - | charEscapeSeq + | escapeSeq multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid + symbolLiteral ::= ‘'’ plainid comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ - | ‘//’ “any sequence of characters up to end of line” + | ‘//’ “any sequence of characters up to end of line” -nl ::= $\mathit{“new line character”}$ +nl ::= ´\mathit{“new line character”}´ semi ::= ‘;’ | nl {nl} ``` @@ -86,6 +104,7 @@ grammar: | booleanLiteral | characterLiteral | stringLiteral + | interpolatedString | symbolLiteral | ‘null’ @@ -127,16 +146,17 @@ grammar: | ‘:’ Annotation {Annotation} | ‘:’ ‘_’ ‘*’ - Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr + Expr ::= (Bindings | [‘implicit’] (id | ‘_’)) ‘=>’ Expr | Expr1 Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] | ‘while’ ‘(’ Expr ‘)’ {nl} Expr - | ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’] [‘finally’ Expr] + | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] | ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’ | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr | ‘throw’ Expr | ‘return’ [Expr] | [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr | SimpleExpr1 ArgumentExprs ‘=’ Expr | PostfixExpr | PostfixExpr Ascription @@ -144,7 +164,8 @@ grammar: PostfixExpr ::= InfixExpr [id [nl]] InfixExpr ::= PrefixExpr | InfixExpr id [nl] InfixExpr - PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr + PrefixExpr ::= [PrefixOperator] SimpleExpr + PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) | BlockExpr | SimpleExpr1 [‘_’] @@ -164,12 +185,12 @@ grammar: | ‘{’ Block ‘}’ Block ::= BlockStat {semi BlockStat} [ResultExpr] BlockStat ::= Import - | {Annotation} [‘implicit’ | ‘lazy’] Def + | {Annotation} [‘implicit’] [‘lazy’] Def | {Annotation} {LocalModifier} TmplDef | Expr1 | ResultExpr ::= Expr1 - | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block + | (Bindings | [‘implicit’] (id | ‘_’) [‘:’ CompoundType]) ‘=>’ Block Enumerators ::= Generator {semi Generator} Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} @@ -179,10 +200,10 @@ grammar: Guard ::= ‘if’ PostfixExpr Pattern ::= Pattern1 { ‘|’ Pattern1 } - Pattern1 ::= varid ‘:’ TypePat + Pattern1 ::= boundvarid ‘:’ TypePat | ‘_’ ‘:’ TypePat | Pattern2 - Pattern2 ::= varid [‘@’ Pattern3] + Pattern2 ::= id [‘@’ Pattern3] | Pattern3 Pattern3 ::= SimplePattern | SimplePattern { id [nl] SimplePattern } @@ -191,7 +212,7 @@ grammar: | Literal | StableId | StableId ‘(’ [Patterns] ‘)’ - | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ | ‘(’ [Patterns] ‘)’ | XmlPattern Patterns ::= Pattern [‘,’ Patterns] @@ -286,7 +307,7 @@ grammar: ClassParents ::= Constr {‘with’ AnnotType} TraitParents ::= AnnotType {‘with’ AnnotType} Constr ::= AnnotType {ArgumentExprs} - EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’ + EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’ EarlyDef ::= {Annotation [nl]} {Modifier} PatVarDef ConstrExpr ::= SelfInvocation diff --git a/spec/14-references.md b/spec/14-references.md index caae5796b248..cc088dfcae11 100644 --- a/spec/14-references.md +++ b/spec/14-references.md @@ -94,7 +94,7 @@ for syntactic definitions?", organization = {EPFL}, month = feb, year = 2007, - note = {http://www.scala-lang.org/docu/manuals.html} + note = {https://www.scala-lang.org/docu/manuals.html} } @Book{odersky:scala-reference, @@ -125,7 +125,7 @@ for syntactic definitions?", year = 2003, month = jan, note = {\hspace*{\fill}\\ - \verb@http://www.cis.upenn.edu/~bcpierce/FOOL/FOOL10.html@} + \verb@https://www.cis.upenn.edu/~bcpierce/FOOL/FOOL10.html@} } %% Misc @@ -135,14 +135,14 @@ for syntactic definitions?", author = {W3C}, title = {Document Object Model ({DOM})}, howpublished = {\hspace*{\fill}\\ - \verb@http://www.w3.org/DOM/@} + \verb@https://www.w3.org/DOM/@} } @Misc{w3c:xml, author = {W3C}, title = {Extensible {M}arkup {L}anguage ({XML})}, howpublished = {\hspace*{\fill}\\ - \verb@http://www.w3.org/TR/REC-xml@} + \verb@https://www.w3.org/TR/REC-xml@} } @TechReport{scala-overview-tech-report, @@ -185,7 +185,7 @@ for syntactic definitions?", booktitle = {Proc. FOOL 12}, year = 2005, month = jan, - note = {\verb@http://homepages.inf.ed.ac.uk/wadler/fool@} + note = {\verb@https://homepages.inf.ed.ac.uk/wadler/fool@} } @InProceedings{odersky:scala-experiment, @@ -201,7 +201,7 @@ for syntactic definitions?", year = {2007}, month = jan, note = {FOOL-WOOD '07}, - short = {http://www.cis.upenn.edu/~bcpierce/papers/variance.pdf} + short = {https://www.cis.upenn.edu/~bcpierce/papers/variance.pdf} } --> diff --git a/spec/15-changelog.md b/spec/15-changelog.md index c88408682b60..5d24511ff1cb 100644 --- a/spec/15-changelog.md +++ b/spec/15-changelog.md @@ -6,17 +6,30 @@ chapter: 15 # Changelog -Changes in Version 2.8.0 ------------------------- +This changelog was no longer maintained after version 2.8.0. -#### Trailing commas +A pull request updating this chapter to list the most significant +changes made in more recent Scala versions would be highly welcome. + +Many language changes, especially larger ones, are documented in SIP +(Scala Improvement Process) proposals. Most proposals that were +accepted and implemented have not merged into the main spec. Pull +requests that merge SIPs into the main spec are also highly welcome. + +To find out what has changed in Scala 2 since 2.8.0, you can consult +the following sources: + +* Scala release notes (recent versions): https://github.com/scala/scala/releases +* Scala release notes (older versions): https://scala-lang.org/blog/announcements/ +* Scala release notes (even older versions): presumably findable via search engine +* Spec changelog in version control: https://github.com/scala/scala/commits/2.13.x/spec +* SIPs: https://docs.scala-lang.org/sips/all.html + +## Changes in Version 2.8.0 Trailing commas in expression, argument, type or pattern sequences are no longer supported. -Changes in Version 2.8 ----------------------- - Changed visibility rules for nested packages (where done?) Changed [visibility rules](02-identifiers-names-and-scopes.html) @@ -41,8 +54,7 @@ Clarified differences between [`isInstanceOf` and pattern matches](12-the-scala- Allowed [`implicit` modifier on function literals](06-expressions.html#anonymous-functions) with a single parameter. -Changes in Version 2.7.2 ------------------------- +## Changes in Version 2.7.2 _(10-Nov-2008)_ @@ -61,11 +73,10 @@ A formal parameter to an anonymous function may now be a #### Unicode alternative for left arrow -The Unicode glyph ‘\\(\leftarrow\\)’ \\(`\u2190`\\) is now treated as a reserved +The Unicode glyph ‘´\leftarrow´’ ´`\u2190`´ is now treated as a reserved identifier, equivalent to the ASCII symbol ‘`<-`’. -Changes in Version 2.7.1 ------------------------- +## Changes in Version 2.7.1 _(09-April-2008)_ @@ -88,12 +99,11 @@ interpreted. The contractiveness requirement for [implicit method definitions](07-implicits.html#implicit-parameters) -has been dropped. Instead it is checked for each implicit expansion individually +has been dropped. Instead, it is checked for each implicit expansion individually that the expansion does not result in a cycle or a tree of infinitely growing types. -Changes in Version 2.7.0 ------------------------- +## Changes in Version 2.7.0 _(07-Feb-2008)_ @@ -145,8 +155,7 @@ Three restrictions on case classes have been removed. 3. Case classes may now come with companion objects. -Changes in Version 2.6.1 ------------------------- +## Changes in Version 2.6.1 _(30-Nov-2007)_ @@ -169,8 +178,7 @@ Self types can now be introduced without defining an alias name for trait Trait { this: T => ... } } -Changes in Version 2.6 ----------------------- +## Changes in Version 2.6 _(27-July-2007)_ @@ -193,7 +201,7 @@ one may for example write the following existential types It is now possible to define lazy value declarations using the new modifier [`lazy`](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). A `lazy` value definition evaluates its right hand -side \\(e\\) the first time the value is accessed. Example: +side ´e´ the first time the value is accessed. Example: import compat.Platform._ val t0 = currentTime @@ -227,8 +235,7 @@ is a shorthand for new AnyRef{ def getName() = "aaron" } -Changes in Version 2.5 ----------------------- +## Changes in Version 2.5 _(02-May-2007)_ @@ -324,12 +331,11 @@ directly for functions of arities greater than one. Previously, only unary functions could be defined that way. Example: def scalarProduct(xs: Array[Double], ys: Array[Double]) = - (0.0 /: (xs zip ys)) { + (xs zip ys).foldLeft(0.0) { case (a, (b, c)) => a + b * c } -Changes in Version 2.4 ----------------------- +## Changes in Version 2.4 _(09-Mar-2007)_ @@ -337,24 +343,24 @@ _(09-Mar-2007)_ The `private` and `protected` modifiers now accept a [`[this]` qualifier](05-classes-and-objects.html#modifiers). -A definition \\(M\\) which is labelled `private[this]` is private, +A definition ´M´ which is labelled `private[this]` is private, and in addition can be accessed only from within the current object. -That is, the only legal prefixes for \\(M\\) are `this` or `$C$.this`. -Analogously, a definition \\(M\\) which is labelled `protected[this]` is +That is, the only legal prefixes for ´M´ are `this` or `´C´.this`. +Analogously, a definition ´M´ which is labelled `protected[this]` is protected, and in addition can be accessed only from within the current object. #### Tuples, revised -The syntax for [tuples](06-expressions.html#tuples) has been changed from \\(\\{…\\}\\) to -\\((…)\\). For any sequence of types \\(T_1 , … , T_n\\), +The syntax for [tuples](06-expressions.html#tuples) has been changed from ´\\{…\\}´ to +´(…)´. For any sequence of types ´T_1 , … , T_n´, -\\((T_1 , … , T_n)\\) is a shorthand for `Tuple$n$[$T_1 , … , T_n$]`. +´(T_1 , … , T_n)´ is a shorthand for `Tuple´n´[´T_1 , … , T_n´]`. -Analogously, for any sequence of expressions or patterns \\(x_1 -, … , x_n\\), +Analogously, for any sequence of expressions or patterns ´x_1 +, … , x_n´, -\\((x_1 , … , x_n)\\) is a shorthand for `Tuple$n$($x_1 , … , x_n$)`. +´(x_1 , … , x_n)´ is a shorthand for `Tuple´n´(´x_1 , … , x_n´)`. #### Access modifiers for primary constructors @@ -417,8 +423,7 @@ It is now possible to [combine operators with assignments] var x: int = 0 x += 1 -Changes in Version 2.3.2 ------------------------- +## Changes in Version 2.3.2 _(23-Jan-2007)_ @@ -456,13 +461,13 @@ Patterns” by Emir, Odersky and Williams. #### Tuples A new [lightweight syntax for tuples](06-expressions.html#tuples) has been introduced. -For any sequence of types \\(T_1 , … , T_n\\), +For any sequence of types ´T_1 , … , T_n´, -\\(\{T_1 , … , T_n \}\\) is a shorthand for `Tuple$n$[$T_1 , … , T_n$]`. +´\{T_1 , … , T_n \}´ is a shorthand for `Tuple´n´[´T_1 , … , T_n´]`. -Analogously, for any sequence of expressions or patterns \\(x_1, … , x_n\\), +Analogously, for any sequence of expressions or patterns ´x_1, … , x_n´, -\\(\{x_1 , … , x_n \}\\) is a shorthand for `Tuple$n$($x_1 , … , x_n$)`. +´\{x_1 , … , x_n \}´ is a shorthand for `Tuple´n´(´x_1 , … , x_n´)`. #### Infix operators of greater arities @@ -482,8 +487,7 @@ A new standard attribute [`deprecated`](11-annotations.html#deprecation-annotati is available. If a member definition is marked with this attribute, any reference to the member will cause a “deprecated” warning message to be emitted. -Changes in Version 2.3 ----------------------- +## Changes in Version 2.3 _(23-Nov-2006)_ @@ -493,8 +497,8 @@ A simplified syntax for [methods returning `unit`] (04-basic-declarations-and-definitions.html#procedures) has been introduced. Scala now allows the following shorthands: -`def f(params)` \\(\mbox{for}\\) `def f(params): unit` -`def f(params) { ... }` \\(\mbox{for}\\) `def f(params): unit = { ... }` +`def f(params)` **for** `def f(params): unit` +`def f(params) { ... }` **for** `def f(params): unit = { ... }` #### Type Patterns @@ -517,8 +521,7 @@ hierarchy have changed as follows: The old names are still available as type aliases. -Changes in Version 2.1.8 ------------------------- +## Changes in Version 2.1.8 _(23-Aug-2006)_ @@ -541,15 +544,15 @@ referenced from the companion module of the class and vice versa. The lookup method for [implicit definitions](07-implicits.html#implicit-parameters) has been generalized. -When searching for an implicit definition matching a type \\(T\\), now are considered +When searching for an implicit definition matching a type ´T´, now are considered 1. all identifiers accessible without prefix, and -2. all members of companion modules of classes associated with \\(T\\). +2. all members of companion modules of classes associated with ´T´. (The second clause is more general than before). Here, a class is _associated_ -with a type \\(T\\) if it is referenced by some part of \\(T\\), or if it is a -base class of some part of \\(T\\). +with a type ´T´ if it is referenced by some part of ´T´, or if it is a +base class of some part of ´T´. For instance, to find implicit members corresponding to the type HashSet[List[Int], String] @@ -574,8 +577,7 @@ This will match the second case and hence will print “q”. Before, the singleton types were erased to `List`, and therefore the first case would have matched, which is non-sensical. -Changes in Version 2.1.7 ------------------------- +## Changes in Version 2.1.7 _(19-Jul-2006)_ @@ -609,19 +611,17 @@ Legal alternatives are: val f = { x: T => E } val f = (x: T) => E -Changes in Version 2.1.5 ------------------------- +## Changes in Version 2.1.5 _(24-May-2006)_ #### Class Literals There is a new syntax for [class literals](06-expressions.html#literals): -For any class type \\(C\\), `classOf[$C$]` designates the run-time -representation of \\(C\\). +For any class type ´C´, `classOf[´C´]` designates the run-time +representation of ´C´. -Changes in Version 2.0 ----------------------- +## Changes in Version 2.0 _(12-Mar-2006)_ @@ -639,7 +639,7 @@ The following three words are now reserved; they cannot be used as #### Newlines as Statement Separators -[Newlines](http://www.scala-lang.org/files/archive/spec/2.11/) +[Newlines](https://www.scala-lang.org/files/archive/spec/2.11/) can now be used as statement separators in place of semicolons. #### Syntax Restrictions diff --git a/spec/README.md b/spec/README.md index 286b59fe02e2..d748dddedfd2 100644 --- a/spec/README.md +++ b/spec/README.md @@ -8,15 +8,15 @@ Third, we'd like to support different output formats. An html page per chapter w ## Editing -At the time of writing we are using Jekyll 3.3.0 and [Redcarpet 3.3.2](https://github.com/vmg/redcarpet) to generate the html. +We are using Jekyll and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. Check `Gemfile` for the current versions. -We aim to track the configuration GitHub Pages use but at times differences will arise as GitHub Pages evolves. +We aim to track the configuration GitHub Pages uses but differences may arise as GitHub Pages evolves. ## Building -Travis CI builds the spec automatically after every merged pull release and publishes to http://www.scala-lang.org/files/archive/spec/2.12/. +Travis CI builds the spec automatically after every merged pull release and publishes to https://www.scala-lang.org/files/archive/spec/2.13/. To preview locally, run the following commands in the root of your checkout scala/scala: `bundle install` to install Jekyll and `bundle exec jekyll serve -d build/spec/ -s spec/ -w --baseurl=""` to start it, diff --git a/spec/_config.yml b/spec/_config.yml index ad57339382d2..bd1f691c65d0 100644 --- a/spec/_config.yml +++ b/spec/_config.yml @@ -1,6 +1,7 @@ -baseurl: /files/archive/spec/2.12 +baseurl: /files/archive/spec/2.13 latestScalaVersion: 2.13 -thisScalaVersion: 2.12 +thisScalaVersion: 2.13 +versionCompareMessage: "an upcoming" safe: true lsi: false highlighter: false diff --git a/spec/_includes/table-of-contents.yml b/spec/_includes/table-of-contents.yml new file mode 100644 index 000000000000..b70f97da5424 --- /dev/null +++ b/spec/_includes/table-of-contents.yml @@ -0,0 +1,23 @@ + +
+ +

Table of Contents

+ +
    + {% assign sorted_pages = site.pages | sort:"name" %} + {% for post in sorted_pages %} + + {% if post.chapter >= 0 %} +
  1. + {{ post.title }} +
  2. + {% endif %} + {% endfor %} +
+
+ + diff --git a/spec/_includes/version-notice.yml b/spec/_includes/version-notice.yml index 31669682eb4c..5a7286631c11 100644 --- a/spec/_includes/version-notice.yml +++ b/spec/_includes/version-notice.yml @@ -1,3 +1,3 @@ {% if site.thisScalaVersion != site.latestScalaVersion %} -
This is the specification of a previous version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
+
This is the specification of {{ site.versionCompareMessage }} version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
{% endif %} diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml index 5c78a1d09c38..2589a105dff2 100644 --- a/spec/_layouts/default.yml +++ b/spec/_layouts/default.yml @@ -6,16 +6,10 @@ - - + + + + @@ -30,10 +24,10 @@ -
+
- +
{% include version-notice.yml %} {{ content }}
+ diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml index d23af00454bf..1106222bd088 100644 --- a/spec/_layouts/toc.yml +++ b/spec/_layouts/toc.yml @@ -11,6 +11,7 @@ + @@ -27,5 +28,7 @@ {% include version-notice.yml %} {{ content }} + + diff --git a/spec/id_dsa_travis.enc b/spec/id_dsa_travis.enc index 16bbd569dcc7..6709463580af 100644 --- a/spec/id_dsa_travis.enc +++ b/spec/id_dsa_travis.enc @@ -1,68 +1,68 @@ -U2FsdGVkX18jJJg9lNGgRS0cQhIsqc2UqBkuqZ1rEPKDdtU585GIP+ODcQ9dNPel -xguQyy8Y0nU4Op5eJO9q/4Fnlf9cUfPfbKfs6QXBw5vNHL53fuslhhoaFhLRW1og -dBSVq4Kv02HJjtbo/ZBXu8E4ppYoNzmsEbRkICWMmxFIXpQmiIts6TmN3gC9SedE -+EXdALOvYCUxJ5CLhlPz8kNsNBUSLZkeCvREDhUtOzCxTBfZXCZWDNxaNOOVB+ce -s11el19t+o87u7GAGuujvCiwtAWQ9cbxlME0MXp3NROBJ9TzKBWFHBH0LZGFxkR+ -kXn32EqdH9AQOKC4UWtjgtuZuFRlkVyLyAWtxG8hNxRoj4ddDWalg5BW87Fvd8Pl -Z7YErJbNbLufbHCxbdIfgoxWQIrMoHl87er26HLA7Ryzm1jngEwMQJJLfVdetYJB -E220NngADIt/oSXSCfFQKxbXrchZfjRHS47HBsd0/anhBGIKt4Gmmk4B8FtTO8H2 -m8QaVgzPEC+2ap/mi3DFg8LJO9PwJkbWRMAcdI7QXuy0P1wKR3Xnx/JxnVCJtqv6 -ISNdbKlzUAGTZHGFOo+GWjJuzNC6oo/jwjdLDrggEAR2mzqa9n0NG0yuq3xvU+pF -MWUadYBcJ9FwTWbw4BJPsLokmCpqFTjnLm5kaqv8E+Qfo/xcXtWkMwXE3Carbi5k -hXqvqNglYBECrsScnoEgv/R2nGrOE54FX1TGvnPY0e0OSI8dGbcDRNOhura/4KMl -iU3XYzBtxrJ6WI8RVCWOUYYwLUmEfbZZbAvVvSUvys7089RaQNOQQ+jcAyHqX+6A -DKkaA44x3vx5X//81qZMSE/iwLLaCykMjKnnils12mQqqrkfQAW4E8T00s273EV0 -/EyeDIr5gUKOIlhdrNfcKGe9y8+8jZkZe56bjg7TbbLeJf73Gdapk3FXCpxX3UGn -ZqWR8a6b4cwatH4yTnYff5dYA/0OtMm72zyxh7Sze0BPG8o3r0aw6cPFScEeE1fy -1PyR0+gYGlbEWVpoMJa1kByesaNkPHHC9+XnKu/ANxuFRaxs0W65fOGLszCIEnN0 -x96KiUCZYw6KfH3bYtRV47Nrq7H/9nNMdvPAajkRJM/1+Uf9ps9ygVYPGdA+ShNB -Me1tJmobunuacdRrSnfA2VIQTOTzxGDz82CUjJGHYPXo3Pd71EVhY6CL+4Ufgn1s -GZ6aoHKzlG10BOv2j5fEvnkeY1oky2M13Jbi20qQwkrWvKDnvFiQ/HUzZZAzXs3l -rxhBrnA9T9lPfcH3WOqFHI2v629iQvZdqLrw0Gvnz1E13ktiCXhWgjmF3J1PN/t2 -vq7ATZqIlYCelD2frbrzx41Y67qykGU8uDvTOkWDWMYGXzoFZCTW1ldDLvz8x4Pl -aEP6x5CglGQlEVdye9CPXEagl3eEbj3MVPteBMVS51so9DwWXuT9hiUiRhlhY+7G -pd7K84fRtxeqJ46/sYaDYXFMwblu/j88V3y7QL2uJESWbtxulFURUppeEnqDqrQD -Y7pe4UoG6FTuBEhP20K7T90j8ieFp4zPd/kd0OYxvln2JVF5AxDLiyJUN/R9UCnq -QTaa3P3cmgBKANsNAQs5GfoDAOmlxEqmFiO9Xpmowvax+8hX8oxLjETaa6t5N0Wp -HQUIJehQvuKJj3du8D4/w6oIsPNLG0fsYu0LH3nsmwlk/DBifUutpZzoFGxZdZSM -Hhy25pFSRlxhlECJ3TcCt/LcX3av5115L0bXDmLwIr6LuiL7sQt0vJRNL+ut2E5n -MMapoKAp4SEbJLLCg8S0Poo189WROd4D/skmzdCj4VDk3fOrWVnfZ2LIrySnyUOP -CUs9LTmce6GzS06DVSlbymSiNnKGJHwGSlfN2f2FKalvgCQYN3PSe1stNNX9TzzE -SdPAowzCf9/9WQnh215trjsjPPz7Pc0Xrh4zm4dM72Ek+v9dqOBpExdtLhF0MdIw -R7ZTMSxDx2GoWTWPO/CIL3U6+q/oO50vCzDrOYBI2z3dbgvgqCBzcvc7IzUhEMgp -UQHleTqTfBGkKSfBYT46+9k332JfDAUqKfElfrlxX3gG3thRYNZeUfxsi5tSD1E0 -wF9X0ST4Ab/hje9maF5UgTAmkHy3mZgsykElTrlWs34/jaKlMKxoNIlbk2WdV7VB -wrrIV1YPRC1/jYRnD35Fltv7drI26+3oDq8df9CK8DrNh6uCEIzZ/ohWIeL0zL2K -mDhwHHZwxj9HSGZWBs7pmDXy0WSb/TIkQ9TAy9Sv3kYJmH6GLV7eyYRrDHZQzDL9 -R6jfz0D4nZE9/hfV9lonaeVo80nyv+qAopMnv8hbiWTuWfmvCGSFr4qrHrkfnJHW -INHl6VVBEaoiX0bgHn+9AcymHy4hmixhmP/8HOFF47BdFiRLYlN9qYZY/jPo/EKF -Z6LIIFFxrQyJEay2k/cZoVeJ/vYgq/n8lV8W1gWhGKQKTNt83FcVFLfzmqKjXx+K -urroGtF2+LiHu1Vf439Z33GtouRAS94/tKKAWahKbDlSZAt8wF2PFq0u5JZdOtq+ -+09UFqkq6xf55w7SMqk7uvNDNVxpJ5k1R8/gYAn2cxTqc9eNJqwb3uKp0lDUDeM/ -nOtUKQjqnuIz/FTCQVgDKSeTiLo51U9Mb6OL8zuCPzZe8MDvRmjDqXNkHGbkINDV -Uw3VzfFPKexpxukwB7dit7Hxc7hRJM7Rg0J0tL5bWH03W642zqffJ2DTsSpNaq8U -Eac3UW0Vyw1utZ6mK+GDQvybIguao9vKt9Qvuiybbf5XUBLlHxOV61fVZLhj2Zes -A8qXr7hR+jozhZ8zMyYhOOPyEbecIjtEyfHzdh+eCW2Oi7jQ23iA1OWuEzi1c7rA -TBaoUpb7SEqEXmKw7GoP5bFBW3zfvAxI577P2mOpmwSFRoGTVIEBxRhPpuHYPnjG -WwhDqLQqZ/fMPzWFz0VpSDgp7RdmtWhSV1TT+SAW799f4bUXpwB5/qHK4XzGMd7G -GDJTrA9bGCmEiSWedQlThcbJzDhXDoslAjZyMPwQqS9OiogMui1olXV+I6HYyyNI -dTqcyFOxe5gbS4oHjjuwjJknOSdKPX6fPMCNGJda9v8u/wzAshrTJJyet33SZpNl -jUAjeEBAWEx4Yb+IaHUtdsDEaJxU0nBhGRJqBQVvhLXfFqo8E5fVj+ji+/Qi2Q3C -wo47ORC61/w9q22JHH4xl3t1QlCt6Bpcry6bO4dwA164sWHtiJ/OA72I7+RvbjlI -FjgBK68Az1Y2F7NG0/WnSOV1ktSWV0zhRYbpRoNq6mE97iT2h4hC6tBcCL4YzQZy -Id1NcbRzcn/fq5NJ+DXoA+dzYhNT9612dasun8qZE83NPHjC90KhvpZ3KrtKvxfR -mtTVxAvGSQ5PdI0n4QZVloXBIjv7tp/fYfB+aKwVprr7nBOn+SZIhuPhRaXAT3Uv -+g0q+qKgep7wBozFgP0863gfe7vXbUhTwyXQjbqnh8dWo4fQR7nFYJ/S25c3Ggbj -HcUplLQJ4JZmC9zhD2qCbRiqGe1s6kLRykK9c/GpIfCKFtOJnV0WJRxbSTXv+weG -ctWYHSO/fvmW5SH5ZC6vjCA/fMvX4bZ2LeH/HJMg/v4g05vKriVBBujsSMA5bBRi -+59BkZwdz82LvaPxcooMALJxMbMWxCeOakl8pTXOwg9OWOr2clQUkKFgRMPLuOPs -gIlwTLrWgYIAB5vGE9RqO1J959BjPUVbdO22UBXzoMPx0ERRvzvUyqFWwjayTlQu -40UNaSIdO9U+LtDCX8eRkqBP5LyI0vqlZP4HYIjoCIamYqrxO8AeJV6aYln1G72k -iY7iFmXc0Y0FwXbn1Ud5dwPomOwd1HP4nex7SCDJNhD0w3FaDvsqrPzjTGolDA33 -nmizSx2c8mLnXfu3I8j+WKZbEd4M5UmNnImy0HNYN86sHMZmXH+7e9F7cxKcnHQG -ZeEmPWmVhxSowWC0BvB6OTbSQu6ypSPRYLN4/aWKUA5TlWG6LC3o8ooYwpr/dZX/ -Bz3AmI38kKAL0ZeBmbZF7cQcC5jVL+cZdn6Mh1LxCtqkKFeiU5Cxey2t90tkYpi8 -AZJZdwePL6XcHpOdzDE/4IcxDbEiEdYn/XYG2fGMOqwYblVFoWFbuI08FKcbq8lc -n8dRsfHU3SbtIjtvstldcqPF0MMRroyHe3pLbJfeLwfcey89bv329bWSvVo53Wih -wyByW2Z2wfeVLO6wC52UClpZEIK2WAcDfunrbpP/4AmJq84SXmCwvZ7va7c9Kjnh -7I1zZpE8klFhsyW6WXhwrFF+Uq7jfA+dwe+3AJOiD++H5HFgAW7BNyfmrw5Iqjac +U2FsdGVkX19ePRmShLaiBw8T+ZZjbrD7zejYuQmDFA3U6/CSCjOzJLrQSBViWwH5 +/0BvyYdva00SW9g+soQfXShHlnJUz89ZpQj2Z82ipnebtcgy20jnlsNUdo0FG2aG +tAD3OUNxY+IsCeM7tvym955x7TGjDreygZfMUcVibJNZfk3MKPu1uF7xBD800hQE +1eW21bE2bUZeIMPY3t7ZIIqAH+RbYOir0O/XKoxhdTVgpXDE3ntaGIvLr/rleIyT +nsE5UN5XNP/ONj7hsK3kSSoDHujQ5TxvhF60IGJyXksJEBtM1ZirMG20/SKPuT9C +5ROkA3lOMNFkYiSiQiy4c6uU0ynSdkZ22xiJX6d+qvyhybZsBJhSo4ksE5XbwOjX +0QJ6pro5IT+dq/KQadzlGv/27+trc3Dvf5lnxlYZ0vZDx81/dwFUI0VVLF4CBIo5 +4KBH/b/2lOAkVB9sNpJZoutMh9c4ay6h0rAJC7BzXFxMZSKvDhJmjEUzVDGTgOny +cv6Tpabf/pC+KtqlxQoVq4JTfcGB/TPt7gKE87E4fIUPcBZ36A6NH2slbzNCBuSQ +4h5t2C7e/WPPCFVL5Q+0usLdUaMUoaKeKpDK/LecbOUKcdYfYhoSlgV23ApllsES +YLk9Ldl7sbUx9pVT/suI61CGs/3AVMjKq/l5wemM5T9Y7LYYK1TirEvRL2yZy9Eq +OnCWPA/2j9u13O4ZahHJ+JPp/eQXjPlt++IRk0mF5Ua1mKHWJIFr10SXKy9W/2n8 +b8BVnAaFTdv99vgRTjb0Ic5fYivEzvas/yxv7rA5d/LQ5oLNJrhzOnkQvzFzjGI6 +0N6vBV+1BDFQsnz0vBR7gzV+KhQenVIbyFQsxpp4pzP1N1/QZ/qujD6IiAG3H/DG +kLJc7UO3EfdQ6yRvGYVFsZ4GBBAjtD0y+lUIG7q6y57FvHKKvN+4Y7Yy5IUlfpUE +Y4hu5k4oAY9w0EkyOZsSGLMtwZhaLUX/jWgTjXoe8+pp234DIaSBWCsVqj5PHTnZ +6v1fdmpM+rXS4mFpJnegmmv0WG8l4Fk8QfUExxn5VgXkoAPFhYWDFNiFJw4ZRsBX +2KsnUyi4X0UYjAhVya6JVDMD1l42RskZFXKynTaXSXT+fS1zUZlcpXVy9qdOng7Z +UZrMixTt3kFcw2WDULpHqpbmqbbSXqoErZOSquKzAJw0AO81otsSFiCvb7q05Uw6 +Y6mWmPrIYDZJ78CraA1HPtDvmRW0jRes8kQ6BUhHeVZuS8bGvUSzwEwnzUtaF+Gz +XESJ2wKWVRHqyzbY48UHNllYQfReXV1xQ5t6mQeAVYa+sRRBFSXW4QFwIAxjuB2x +0+yBCzoP0DL8hqtrlYOf4ycQvb0xXq+CajTFzO1lD3tzonq9cM8BsAHXaTYyIYd1 +GtaMxhXfEDKAwaB1t9LHfvV31BGvDv+4XVNAc8ru/wVAxbNptVfstkBj1W5Ns7to +W8AmPKASAFddbkbPCYiVVNQnsa+RC9YPu87QQJcD+BiszPGz2LG0L5EOAiis5uFL +e6S3Lvbx2yomW5CaFOOd36zqB6DJmMDsWluHNzrX8BPUeSEcHfqMrM5WV4HhtTsU +c7Rs5fY0uA/J8VBVGUHLBodW4jnRJAVlCXsncRgj50cATwRnhQpCeRAjMsB7NSsF +Fo1wyMz9k/w69efqJKwX3R29zP3bCTTt36FdQ+uB37L+Fgek6FHUpjL8M0dGB4Tc +5y+YO9eUmzs7Lkl6AHEf4rM14yzL/ETJWP+IBjPg9Np3y6auiKKZ5thXSWmCUD05 +HBoBsX1Fk88NpfDzp2mTzFWDm2XWNnBzcSZn8jSLcSj6KAB3j8myun1u5Ah5M3gd +vgm23GoohOODwzhaZzQbFA8J7aVTXQkzshmqtfjLpg4QtpXThgSTZ7pij/UW4pKU +fMEQAFi7uu2mrkQK+V6H/USCL63bpcALSskXrXZkGMOxJZfA1ND6GCKWQPZeVi1h +O3mgbVsRhRZ8F1rGySwP/Z9D0IWQRBotuVWh4rHqoYzafpO1QjVRm/yfVOSbr9yB +ObEFLIv6c+Eu4I8FbM839qLUiufv+8tUsHzVjN+zP14KwDaiVMC3Y56YQp4wqR2B +IAufYraqoP+q8wmFiux9hPDy857sgyXqdPIQy+p0yNuUJl8ZlQYzCgPhNx4pE1P5 +YOoNJ9AsaN0CMI82M6A8thjPLZfFZ+6Nt8jdBipmMe0APq3wfb9NPWlVx3sh0/Bp +cF3y3xQRgRBk8Twq3Imol2cFCsYu8cQNyPxKCQG/NHKVUffXiUoFsBCvg8oGYU0s +mew25XAx9iZ7+/JC0dmeMQ2xOF9dKPnIhcM5rVt8WSFX4IxTawpUAQlN4N6rHFfx +/w2WHInL34zpBDTQqKUWC+AdxVMc9Is8X1Zpv+GoBv3LEHt8GNKRFG6HmTW6sz+v +0aHbvT2jU1iWqDf9icL29MRT0nXuzoZN0Nf69RBjvnTh35gE8r7y5URaBVI0mZkU +ZL5Fohc2mLmzR7Te8B6/eAdov8nkeLPg5CDkq1T7O/R8hpiHGncfkaqbZTv0oUdN +1Hu9Kt12aakem647KnfdhsWifzMv3nY6uT082iXbI9uXvh1WLMp1HZkTeUvARAan +i/VgiO/0+BBTv/XywpJphsy4UfOJ5cTbg2FWQ8f/DsJMqlbsBQeqD+G9j9b7W2Eg +f3XdvLoWhwR0uLCeHyAA+wfzogltXRxavX8c9wmzbkl8X1fYN6aiPJRr45lenvqS +4n2PAj2qX23n8+sI9iH1Af026Nrb/Kvbo9f/gfaQj2Z2WXiGIT0/RGH4Mz3V/mvD +sNKvSVzQ5VEkvxcMtXmkC7AJBYOKSyv0Vp/2ySzltxkghvBrrbq/RX9Cr2iJBZJN +RrqfefkT8A7vOI+YSjNRTIrRHTc/UfX+nZldzCPfeh3lU2eKUkappZHsGXda++uN +K7mMrXNoy4yCd5YNTCeLiQorklGxzeCCtoa8C+gDSbJY7HtjkvwbeaXUi7CpNPa3 +0GBPe5bQcK+vsynVgHnGU8qH4VOE7dgDWMjUi5+IdGC26zcsM6VvMArfk93Ny3xX +5AS61/4oMKBAedxVQejMD/xUVdjf6x0U+6gKGIlZFyiNl4kPtY+o0Ok7BkFItsDn +sC9dKRlwrvQlI6uNE3+Rk6R5rQLX1EW4UBnOL7YOOWLypiviB0DKas8FTL6RzWfl +TeZRwDS7nYWXYHSBvexEfDnEbv4Xnncz0gQ42ixmTXWVNNGcS8mNLR8GKpQGKEX7 +t6Bub1GZd8EsdVDkG1EUU3qwk6fx4PgGfqxZ+MgrZOlXhYbHmJTE83IeuYfBbAb3 +2MxbOhYmENigWNRf5S9vRkMr254xDJ1eIAAE3FHqeW1fEPbrHy8M1AS1DKlEoNMI +yW2lcOP0HAuib4sLXTqa8d00h7qiClyy3NCtPwKyUganSzSIKOMO7G+Bbf6gJfhN +VBr58/nj8aUZzKCdJO5U1Hou6/fUPnTltyURrfbe/B0RpMCCoUNcwpfT0VltOEDa +4pDD3Z9lnejSmCKplbWLvEWVPi4muNXg9E08cTnolqQIx0zWTMMZkhmzq3z3hKh9 +F1uLWaZd/dzyIxkHVTujKfyEaOmFH+MDzquHoJFaXtlK2220ARSlTgEBUHfICesA +dtXDw/ipuUCy5GAloUWZDJGz8DwCWBwsl/pN+oXq0SK0kZXcjCn04l/LVikAJjUK +fcAlg3SAkwXW17pocvOfxCF6cBJBcNYi74V5n5GSW0entbx4J3ki4UpEI0OQFGEJ +9alenvjUqJGHRGLjMdhv0YjNX15Ww/eAaBFlm19z7Uf02EuTDx4RuxyODGn/oYUa +NXB0obcO2t9ZLj1KrAgY4mseerdY3jJeh2fk6g2Unbo+RDMtB1fMcyaP2ApCxlZg +GVRYULd8shdCKQTg/5eUcNvVpE66m1EyfreE9XZBLwf35O7Bb1t1Aj56gWHg2raS +gLsdecV+7dDSMm71QNNhLreo1iQ6uKKRM5KATHCbvSzeYSTwGNOzXHYBjEC48RpR +nGn8qNT1s7Ddl6W8/kABN0L3i4dNgAIE10AuJuaJGukr0Wiv/aWookD/lGgB6SlS +EJOqZks1YQC/7gLgYIiYnL1iphHonLclqh+GHCqEONPfql7XwonawtNnPYvGVz20 +XynW1kKiF05CPWsolLhgOj8F4eVeTFEG5qPfELZeK0ADxIkbpWOXnYUWXLn59gby +sdijsfJtmWh5aaESy5iEfBTedGaX60+AntTwoN0ncXuseDorwEo3DrUuObjCi5wL +vhxedV446Do4PEEinUV499CGrMlc+lB2UEn5lJ2Fi1uhakbvhhTLL3zgmhaNlr0u diff --git a/spec/index.md b/spec/index.md index d7e79dafb721..df126db7bd44 100644 --- a/spec/index.md +++ b/spec/index.md @@ -3,20 +3,7 @@ title: Scala Language Specification layout: toc --- -# Table of Contents - -
    - {% assign sorted_pages = site.pages | sort:"name" %} - {% for post in sorted_pages %} - - {% if post.chapter >= 0 %} -
  1. - {{ post.title }} -
  2. - {% endif %} - {% endfor %} -
+{% include table-of-contents.yml %} #### Authors and Contributors @@ -48,7 +35,7 @@ definitions or glue code. Scala has been developed from 2001 in the programming methods laboratory at EPFL. Version 1.0 was released in November 2003. This document describes the second version of the language, which was -released in March 2006. It acts a reference for the language +released in March 2006. It acts as a reference for the language definition and some core library modules. It is not intended to teach Scala or its concepts; for this there are [other documents](14-references.html). diff --git a/spec/public/scripts/main.js b/spec/public/scripts/main.js index f0509aba4142..9ade9c770f1e 100644 --- a/spec/public/scripts/main.js +++ b/spec/public/scripts/main.js @@ -27,27 +27,41 @@ function heading(i, heading, $heading) { } } -$('#toc').toc( - { - 'selectors': 'h1,h2,h3', - 'smoothScrolling': false, - 'chapter': currentChapter(), - 'headerLevel': 1, - 'headerCounts': [-1, currentChapter() - 1, 1, 1], - 'headerText': heading - } -); +// ignore when using wkhtmltopdf, or it won't work... +if(window.jekyllEnv !== 'spec-pdf') { + $('#toc').toc( + { + 'selectors': 'h1,h2,h3', + 'smoothScrolling': false, + 'chapter': currentChapter(), + 'headerLevel': 1, + 'headerCounts': [-1, currentChapter() - 1, 1, 1], + 'headerText': heading + } + ); +} // no language auto-detect so that EBNF isn't detected as scala hljs.configure({ languages: [] }); -// syntax highlighting after mathjax is loaded so that mathjax can be used in code blocks -MathJax.Hub.Queue(function () { +// KaTeX configuration +document.addEventListener("DOMContentLoaded", function() { + renderMathInElement(document.body, { + delimiters: [ + {left: "´", right: "´", display: false}, // "display: false" -> inline + {left: "$$", right: "$$", display: true} + ], + ignoredTags: ['script', 'noscript', 'style', 'textarea'], + }); + // syntax highlighting after KaTeX is loaded, + // so that math can be used in code blocks hljs.initHighlighting(); $("pre nobr").addClass("fixws"); -}) + // point when all necessary js is done, so PDF to be rendered + window.status = "loaded"; +}); $("#chapters a").each(function (index) { if (document.location.pathname.endsWith($(this).attr("href"))) diff --git a/spec/public/stylesheets/print.css b/spec/public/stylesheets/print.css index 3fbc5596c055..f0efff28b203 100644 --- a/spec/public/stylesheets/print.css +++ b/spec/public/stylesheets/print.css @@ -2,14 +2,41 @@ body { padding: 0px; - margin: 0.5em; + margin: 0px; } -.anchor, #navigation, .to_top { - display: none; +.anchor, #navigation, .to_top, .version-notice, .hidden-print { + display: none !important; +} + +.print-only { + display: block; } #content-container { width: 100%; float: none; } + +/* no scrollbars, jump to next row.. */ +.highlight pre code { + overflow: hidden; + white-space: pre-wrap; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 0; + padding: 0px 32px; + max-width: none; + min-width: none; + min-height: none; + background-color: #FFF; +} + +/* Avoid clipped headings https://github.com/pdfkit/pdfkit/issues/113#issuecomment-7027798 */ +h2, h3, h4, h5, h6 { + padding: 0px; + margin: 0px; +} diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css index 36f4a5a18126..2073613eaea7 100644 --- a/spec/public/stylesheets/screen.css +++ b/spec/public/stylesheets/screen.css @@ -425,7 +425,7 @@ header { color: #FFFFFF; text-decoration: none; font-size: 0.63vw; - padding: 100% 8px; + padding: 100% 5px; } #chapters a:hover, #chapters a:focus, #github:hover, #github:focus { @@ -515,3 +515,7 @@ header { font-weight: bold; text-decoration: underline; } + +.print-only { + display: none; +} diff --git a/spec/spec-toc.xslt b/spec/spec-toc.xslt new file mode 100644 index 000000000000..437b15e3e6f4 --- /dev/null +++ b/spec/spec-toc.xslt @@ -0,0 +1,64 @@ + + + + + + + Table of Contents + + + ./public/stylesheets/fonts.css + + + + +

Table of Contents

+
+ + +
+ +
  • + + + +
      + added to prevent self-closing tags in QtXmlPatterns + +
    +
  • +
    +
    diff --git a/src/build/InnerObjectTestGen.scala b/src/build/InnerObjectTestGen.scala deleted file mode 100644 index e0b889c96982..000000000000 --- a/src/build/InnerObjectTestGen.scala +++ /dev/null @@ -1,308 +0,0 @@ -import scala.collection.mutable - -/** All contexts where objects can be embedded. */ -object Contexts extends Enumeration { - val Class, Object, Trait, Method, PrivateMethod, Anonfun, ClassConstructor, TraitConstructor, LazyVal, Val = Value - - val topLevel = List(Class, Object, Trait) -} - - -/** Test generation of inner objects, trying to cover as many cases as possible. It proceeds - * by progressively adding nesting layers around a 'payload body'. - * - * There are three scenarios (each generating a full combinatorial search): - * - plain object with single-threaded access - * - private object with single-threaded access - * - plain object with multi-threaded access. - * - * Special care is taken to skip problematic cases (or known bugs). For instance, - * it won't generate objects inside lazy vals (leads to deadlock), or objects that - * are initialized in the static constructors (meaning inside 'val' inside a top-level - * object, or equivalent). - * - * Usage: TestGen - * - by default it's 2 levels. Currently, 3-level deep uncovers bugs in the type checker. - * - * @author Iulian Dragos - */ -object TestGen { - val testFile = "object-testers-automated.scala" - - val payload = -""" var ObjCounter = 0 - - object Obj { ObjCounter += 1} - Obj // one - - def singleThreadedAccess(x: Any) = { - x == Obj - } - - def runTest { - try { - assert(singleThreadedAccess(Obj)) - assert(ObjCounter == 1, "multiple instances: " + ObjCounter) - println("ok") - } catch { - case e => print("failed "); e.printStackTrace() - } - } -""" - - val payloadPrivate = -""" var ObjCounter = 0 - - private object Obj { ObjCounter += 1} - Obj // one - - def singleThreadedAccess(x: Any) = { - x == Obj - } - - def runTest { - try { - assert(singleThreadedAccess(Obj)) - assert(ObjCounter == 1, "multiple instances: " + ObjCounter) - println("ok") - } catch { - case e => print("failed "); e.printStackTrace() - } - } -""" - - val payloadMT = -""" @volatile var ObjCounter = 0 - - object Obj { ObjCounter += 1} - - def multiThreadedAccess() { - val threads = for (i <- 1 to 5) yield new Thread(new Runnable { - def run = Obj - }) - - threads foreach (_.start()) - threads foreach (_.join()) - } - - def runTest { - try { - multiThreadedAccess() - assert(ObjCounter == 1, "multiple instances: " + ObjCounter) - println("ok") - } catch { - case e => print("multi-threaded failed "); e.printStackTrace() - } - } -""" - - - import Contexts._ - - val template = -""" -%s - -%s - -object Test { - def main(args: Array[String]) { - %s - } -} -""" - - var counter = 0 - def freshName(name: String) = { - counter += 1 - name + counter - } - - val bodies = new mutable.ListBuffer[String] - val triggers = new mutable.ListBuffer[String] - - /** Generate the nesting code. */ - def generate(depth: Int, // how many levels we still need to 'add' around the current body - body: String, // the body of one test, so far - trigger: String, // the code that needs to be invoked to run the test so far - nested: List[Contexts.Value], // the path from the innermost to the outermost context - p: List[Contexts.Value] => Boolean, // a predicate for filtering problematic cases - privateObj: Boolean = false) { // are we using a private object? - - def shouldBeTopLevel = - ((depth == 1) - || (nested.headOption == Some(PrivateMethod)) - || (nested.isEmpty && privateObj)) - - val enums = - if (shouldBeTopLevel) Contexts.topLevel else Contexts.values.toList - - if (depth == 0) { - if (p(nested)) {bodies += body; triggers += trigger } - } else { - for (ctx <- enums) { - val (body1, trigger1) = ctx match { - case Class => - val name = freshName("Class") + "_" + depth - (""" - class %s { - %s - def run { %s } - } - """.format(name, body, trigger), "(new %s).run".format(name)) - - case Trait => - val name = freshName("Trait") + "_" + depth - (""" - trait %s { - %s - def run { %s } - } - """.format(name, body, trigger), "(new %s {}).run".format(name)) - - case Object => - val name = freshName("Object") + "_" + depth - (""" - object %s { - %s - def run { %s } // trigger - } - """.format(name, body, trigger), "%s.run".format(name)) - - case Method => - val name = freshName("method") + "_" + depth - (""" - def %s { - %s - %s // trigger - } - """.format(name, body, trigger), name) - - case PrivateMethod => - val name = freshName("method") + "_" + depth - (""" - private def %s { - %s - %s // trigger - } - """.format(name, body, trigger), name) - - case Val => - val name = freshName("value") + "_" + depth - (""" - val %s = { - %s - %s // trigger - } - """.format(name, body, trigger), name) - - case LazyVal => - val name = freshName("lzvalue") + "_" + depth - (""" - lazy val %s = { - %s - %s // trigger - } - """.format(name, body, trigger), name) - - case Anonfun => - val name = freshName("fun") + "_" + depth - (""" - val %s = () => { - %s - %s // trigger - } - """.format(name, body, trigger), name + "()") - - case ClassConstructor => - val name = freshName("Class") + "_" + depth - (""" - class %s { - { // in primary constructor - %s - %s // trigger - } - } - """.format(name, body, trigger), "(new %s)".format(name)) - - case TraitConstructor => - val name = freshName("Trait") + "_" + depth - (""" - trait %s { - { // in primary constructor - %s - %s // trigger - } - } - """.format(name, body, trigger), "(new %s {})".format(name)) - - } - generate(depth - 1, body1, trigger1, ctx :: nested, p) - } - } - } - - /** Only allow multithreaded tests if not inside a static initializer. */ - private def allowMT(structure: List[Contexts.Value]): Boolean = { - var nesting = structure - while ((nesting ne Nil) && nesting.head == Object) { - nesting = nesting.tail - } - if (nesting ne Nil) - !(nesting.head == Val) - else - true - } && !objectInsideLazyVal(structure) - - /** Known bug: object inside lazyval leads to deadlock. */ - private def objectInsideLazyVal(structure: List[Contexts.Value]): Boolean = - structure.contains(LazyVal) - - - def usage() { - val help = -""" - Usage: TestGen - - - how deeply nested should the objects be? default is 2. - (Currently, 3-level deep uncovers bugs in the type checker). - - Test generation of inner objects, trying to cover as many cases as possible. It proceeds - by progressively adding nesting layers around a 'payload body'. - - There are three scenarios (each generating a full combinatorial search): - - plain object with single-threaded access - - private object with single-threaded access - - plain object with multi-threaded access. - - Special care is taken to skip problematic cases (or known bugs). For instance, - it won't generate objects inside lazy vals (leads to deadlock), or objects that - are initialized in the static constructors (meaning inside 'val' inside a top-level - object, or equivalent). -""" - - println(help) - System.exit(1) - } - - def main(args: Array[String]) { - if (args.isEmpty || args.contains("-help")) usage() - - val depth = if (args.length < 1) 2 else args(0).toInt - - val header = -""" -/* ================================================================================ - Automatically generated on %tF. Do Not Edit (unless you have to). - (%d-level nesting) - ================================================================================ */ -""".format(new java.util.Date, depth) - - generate(depth, payload, "runTest", List(), x => true) - // private - generate(depth, payloadPrivate, "runTest", List(), x => true, true) - generate(depth, payloadMT, "runTest", List(), allowMT) - - println(template.format(header, bodies.mkString("", "\n", ""), triggers.mkString("", "\n", ""))) - } -} diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala deleted file mode 100644 index 6405650d881c..000000000000 --- a/src/build/dbuild-meta-json-gen.scala +++ /dev/null @@ -1,47 +0,0 @@ -// Use this script to generate dbuild-meta.json - -// To generate the file: -// - check out https://github.com/typesafehub/dbuild -// - run `sbt metadata/console` -// - paste the code below - -// The `version` field is required for the ProjMeta data structure. However, dbuild will -// overwrite the version specified here with the version number found in the build.number -// file, so the actual value doesn't matter, see ScalaBuildSystem: -// https://github.com/typesafehub/dbuild/blob/25b087759cc52876712c594ea4172148beea1310/support/src/main/scala/com/typesafe/dbuild/support/scala/ScalaBuildSystem.scala#L351 - -import com.typesafe.dbuild.model._ - -val meta = - ProjMeta(version = "2.12.0", projects = Seq( - Project("scala-library", "org.scala-lang", - Seq(ProjectRef("scala-library", "org.scala-lang")), - Seq.empty), - Project("scala-reflect", "org.scala-lang", - Seq(ProjectRef("scala-reflect", "org.scala-lang")), - Seq(ProjectRef("scala-library", "org.scala-lang"))), - Project("scala-compiler", "org.scala-lang", - Seq(ProjectRef("scala-compiler", "org.scala-lang")), - Seq(ProjectRef("scala-reflect", "org.scala-lang"), - ProjectRef("scala-xml", "org.scala-lang.modules") - )), - - // Project("scala-repl", "org.scala-lang", - // Seq(ProjectRef("scala-repl", "org.scala-lang")), - // Seq(ProjectRef("scala-compiler", "org.scala-lang"))), // jline - - // Project("scala-interactive", "org.scala-lang", - // Seq(ProjectRef("scala-interactive", "org.scala-lang")), - // Seq(ProjectRef("scala-compiler", "org.scala-lang"), ProjectRef("scaladoc", "org.scala-lang"))), - - // Project("scaladoc", "org.scala-lang", - // Seq(ProjectRef("scaladoc", "org.scala-lang")), - // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"))), - - Project("scalap", "org.scala-lang", - Seq(ProjectRef("scalap", "org.scala-lang")), - Seq(ProjectRef("scala-compiler", "org.scala-lang"))) - - )) - -println(Utils.writeValueFormatted(meta)) diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala index fa0a108ad287..d2dfa1f8fa6c 100644 --- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala +++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,13 @@ package scala.reflect.macros package compiler import scala.tools.nsc.Global +import scala.util.{Failure, Success, Try} abstract class DefaultMacroCompiler extends Resolvers with Validators with Errors { val global: Global - import global._ + import global.{Try => _, _} import analyzer._ import treeInfo._ import definitions._ @@ -38,8 +39,9 @@ abstract class DefaultMacroCompiler extends Resolvers /** Resolves a macro impl reference provided in the right-hand side of the given macro definition. * * Acceptable shapes of the right-hand side: - * 1) [].[[]] // vanilla macro impl ref - * 2) [].[[]] // shiny new macro bundle impl ref + * + * 1. `[].[ [] ] // vanilla macro impl ref` + * 1. `[].[ [] ] // shiny new macro bundle impl ref` * * Produces a tree, which represents a reference to a macro implementation if everything goes well, * otherwise reports found errors and returns EmptyTree. The resulting tree should have the following format: @@ -50,10 +52,10 @@ abstract class DefaultMacroCompiler extends Resolvers * or be a dummy instance of a macro bundle (e.g. new MyMacro(???).expand). */ def resolveMacroImpl: Tree = { - def tryCompile(compiler: MacroImplRefCompiler): scala.util.Try[Tree] = { - try { compiler.validateMacroImplRef(); scala.util.Success(compiler.macroImplRef) } - catch { case ex: MacroImplResolutionException => scala.util.Failure(ex) } - } + def tryCompile(compiler: MacroImplRefCompiler): Try[Tree] = + try { compiler.validateMacroImplRef(); Success(compiler.macroImplRef) } + catch { case ex: MacroImplResolutionException => Failure(ex) } + def wrong() = Try(MacroBundleWrongShapeError()) val vanillaImplRef = MacroImplRefCompiler(macroDdef.rhs.duplicate, isImplBundle = false) val (maybeBundleRef, methName, targs) = macroDdef.rhs.duplicate match { case Applied(Select(Applied(RefTree(qual, bundleName), _, Nil), methName), targs, Nil) => @@ -68,9 +70,16 @@ abstract class DefaultMacroCompiler extends Resolvers isImplBundle = true ) val vanillaResult = tryCompile(vanillaImplRef) - val bundleResult = tryCompile(bundleImplRef) + val bundleResult = + typer.silent(_.typedTypeConstructor(maybeBundleRef)) match { + case SilentResultValue(result) if looksLikeMacroBundleType(result.tpe) => + val bundle = result.tpe.typeSymbol + if (isMacroBundleType(bundle.tpe)) tryCompile(bundleImplRef) + else wrong() + case _ => wrong() + } - def ensureUnambiguousSuccess() = { + def ensureUnambiguousSuccess(): Unit = { // we now face a hard choice of whether to report ambiguity: // 1) when there are eponymous methods in both bundle and object // 2) when both references to eponymous methods are resolved successfully @@ -99,6 +108,7 @@ abstract class DefaultMacroCompiler extends Resolvers try { if (vanillaResult.isSuccess || bundleResult.isSuccess) ensureUnambiguousSuccess() if (vanillaResult.isFailure && bundleResult.isFailure) reportMostAppropriateFailure() + //else // TODO vanillaResult.orElse(bundleResult).get } catch { case MacroImplResolutionException(pos, msg) => diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala index c0757d4f0f9c..8dc8a73c7f69 100644 --- a/src/compiler/scala/reflect/macros/compiler/Errors.scala +++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,7 @@ package scala.reflect.macros package compiler -import scala.compat.Platform.EOL +import scala.annotation.tailrec import scala.reflect.macros.util.Traces trait Errors extends Traces { @@ -47,7 +47,7 @@ trait Errors extends Traces { private def replClassBasedMacroAddendum(isReplClassBased: Boolean): String = if (isReplClassBased) - "\nnote: macro definition is not supported in the REPL when using -Yrepl-classbased." + "\nnote: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false." else "" @@ -60,7 +60,7 @@ trait Errors extends Traces { trait Error { self: MacroImplRefCompiler => - // sanity check errors + // check errors def MacroImplReferenceWrongShapeError(isReplClassBased: Boolean = false) = implRefError( "macro implementation reference has wrong shape. required:\n"+ @@ -68,7 +68,7 @@ trait Errors extends Traces { "macro [].[[]]" + replClassBasedMacroAddendum(isReplClassBased)) def MacroImplWrongNumberOfTypeArgumentsError() = { - val diagnostic = if (macroImpl.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments" + val diagnostic = if (macroImpl.typeParams.sizeCompare(targs) > 0) "has too few type arguments" else "has too many arguments" implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef)) } @@ -100,7 +100,7 @@ trait Errors extends Traces { private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean, untype: Boolean) = { def preprocess(tpe: Type) = if (untype) untypeMetalevel(tpe) else tpe - var pssPart = (pss map (ps => ps map (p => p.defStringSeenAs(preprocess(p.info))) mkString ("(", ", ", ")"))).mkString + var pssPart = pss.map(_.map(p => p.defStringSeenAs(preprocess(p.info))).mkString("(", ", ", ")")).mkString if (abbreviate) pssPart = abbreviateCoreAliases(pssPart) var retPart = preprocess(restpe).toString if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart) @@ -112,8 +112,9 @@ trait Errors extends Traces { private def checkConforms(slot: String, rtpe: Type, atpe: Type) = { val verbose = macroDebugVerbose + @tailrec def check(rtpe: Type, atpe: Type): Boolean = { - def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true } + def success() = { if (verbose) println(f"$rtpe <: $atpe?%ntrue"); true } (rtpe, atpe) match { case _ if rtpe eq atpe => success() case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe) diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala index d1a2f0ba433b..0906331643a6 100644 --- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala +++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,8 +34,8 @@ trait Resolvers { lazy val (macroImplRef, isBlackbox, macroImplOwner, macroImpl, targs) = typer.silent(_.typed(markMacroImplRef(untypedMacroImplRef)), reportAmbiguousErrors = false) match { case SilentResultValue(macroImplRef @ MacroImplReference(_, isBlackbox, owner, meth, targs)) => (macroImplRef, isBlackbox, owner, meth, targs) - case SilentResultValue(macroImplRef) => MacroImplReferenceWrongShapeError() - case SilentTypeError(err) => abort(err.errPos, err.errMsg) + case SilentResultValue(_) => MacroImplReferenceWrongShapeError() + case ste: SilentTypeError => abort(ste.err.errPos, ste.err.errMsg) } } } diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala index 25e7a4cd4c03..99f3bcddf9a4 100644 --- a/src/compiler/scala/reflect/macros/compiler/Validators.scala +++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,24 +27,23 @@ trait Validators { self: MacroImplRefCompiler => def validateMacroImplRef() = { - sanityCheck() - if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence() - } - - private def sanityCheck() = { - if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError() - if (macroImpl.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError() - if (!macroImpl.isPublic) MacroImplNotPublicError() - if (macroImpl.isOverloaded) MacroImplOverloadedError() - val implicitParams = aparamss.flatten filter (_.isImplicit) - if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams) - val effectiveOwner = if (isImplMethod) macroImplOwner else macroImplOwner.owner - val effectivelyStatic = effectiveOwner.isStaticOwner || effectiveOwner.moduleClass.isStaticOwner - val correctBundleness = if (isImplMethod) macroImplOwner.isModuleClass else macroImplOwner.isClass && !macroImplOwner.isModuleClass - if (!effectivelyStatic || !correctBundleness) { - val isReplClassBased = settings.Yreplclassbased.value && effectiveOwner.enclosingTopLevelClass.isInterpreterWrapper - MacroImplReferenceWrongShapeError(isReplClassBased) + def confidenceCheck() = { + if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError() + if (macroImpl.typeParams.sizeCompare(targs) != 0) MacroImplWrongNumberOfTypeArgumentsError() + if (!macroImpl.isPublic) MacroImplNotPublicError() + if (macroImpl.isOverloaded) MacroImplOverloadedError() + val implicitParams = aparamss.flatten filter (_.isImplicit) + if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams) + val effectiveOwner = if (isImplMethod) macroImplOwner else macroImplOwner.owner + val effectivelyStatic = effectiveOwner.isStaticOwner || effectiveOwner.moduleClass.isStaticOwner + val correctBundleness = if (isImplMethod) macroImplOwner.isModuleClass else macroImplOwner.isClass && !macroImplOwner.isModuleClass + if (!effectivelyStatic || !correctBundleness) { + val isReplClassBased = settings.Yreplclassbased.value && effectiveOwner.enclosingTopLevelClass.isInterpreterWrapper + MacroImplReferenceWrongShapeError(isReplClassBased) + } } + confidenceCheck() + if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence() } private def checkMacroDefMacroImplCorrespondence() = { @@ -53,16 +52,16 @@ trait Validators { // we only check strict correspondence between value parameterss // type parameters of macro defs and macro impls don't have to coincide with each other - if (aparamss.length != rparamss.length) MacroImplParamssMismatchError() - foreach2(aparamss, rparamss)((aparams, rparams) => { - if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams) - if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams) - }) + if (aparamss.sizeCompare(rparamss) != 0) MacroImplParamssMismatchError() + foreach2(aparamss, rparamss) { (aparams, rparams) => + if (aparams.sizeCompare(rparams) < 0) MacroImplMissingParamsError(aparams, rparams) + if (rparams.sizeCompare(aparams) < 0) MacroImplExtraParamsError(aparams, rparams) + } try { - // cannot fuse this map2 and the map2 above because if aparamss.flatten != rparamss.flatten + // cannot fuse this foreach2 and the foreach2 above because if aparamss.flatten != rparamss.flatten // then `atpeToRtpe` is going to fail with an unsound substitution - map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => { + foreach2(aparamss.flatten, rparamss.flatten) { (aparam, rparam) => if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam) if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam) val aparamtpe = aparam.tpe match { @@ -70,7 +69,7 @@ trait Validators { case tpe => tpe } checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam) - }) + } checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret) @@ -79,7 +78,7 @@ trait Validators { val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, "")) boundsOk match { case SilentResultValue(true) => // do nothing, success - case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams) + case SilentResultValue(false) | _: SilentTypeError => MacroImplTargMismatchError(atargs, atparams) } } catch { case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex) @@ -128,7 +127,7 @@ trait Validators { */ private lazy val macroImplSig: MacroImplSig = { val tparams = macroImpl.typeParams - val paramss = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) + val paramss = transformTypeTagEvidenceParams(macroImplRef, (_, _) => NoSymbol) val ret = macroImpl.info.finalResultType MacroImplSig(tparams, paramss, ret) } @@ -171,7 +170,7 @@ trait Validators { else mmap(macroDdef.vparamss)(param) val macroDefRet = if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe - else computeMacroDefTypeFromMacroImplRef(macroDdef, macroImplRef) orElse AnyTpe + else AnyTpe val implReturnType = sigma(increaseMetalevel(ctxPrefix, macroDefRet)) object SigmaTypeMap extends TypeMap { @@ -186,7 +185,7 @@ trait Validators { def apply(tp: Type): Type = tp match { case TypeRef(pre, sym, args) => val pre1 = mapPrefix(pre) - val args1 = mapOverArgs(args, sym.typeParams) + val args1 = args mapConserve this if ((pre eq pre1) && (args eq args1)) tp else typeRef(pre1, sym, args1) case _ => diff --git a/src/compiler/scala/reflect/macros/contexts/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala index 5035d2e99ce2..fc6762dc625c 100644 --- a/src/compiler/scala/reflect/macros/contexts/Aliases.scala +++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -44,4 +44,4 @@ trait Aliases { implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) { def toImplicitCandidate = ImplicitCandidate(oi.info.pre, oi.info.sym, oi.pt, oi.tree) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala index e0c1b71ca95a..369fbe1cc61f 100644 --- a/src/compiler/scala/reflect/macros/contexts/Context.scala +++ b/src/compiler/scala/reflect/macros/contexts/Context.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala index 694aff3232ff..50b528c05b6d 100644 --- a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,6 +23,7 @@ trait Enclosures { private lazy val site = callsiteTyper.context private def lenientEnclosure[T <: Tree : ClassTag]: Tree = site.nextEnclosing(c => classTag[T].runtimeClass.isInstance(c.tree)).tree + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") private def strictEnclosure[T <: Tree : ClassTag]: T = site.nextEnclosing(c => classTag[T].runtimeClass.isInstance(c.tree)) match { case analyzer.NoContext => throw EnclosureException(classTag[T].runtimeClass, site.enclosingContextChain map (_.tree)) case cx => cx.tree.asInstanceOf[T] @@ -31,18 +32,23 @@ trait Enclosures { val macroApplication: Tree = expandee def enclosingPackage: PackageDef = site.nextEnclosing(_.tree.isInstanceOf[PackageDef]).tree.asInstanceOf[PackageDef] lazy val enclosingClass: Tree = lenientEnclosure[ImplDef] + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") def enclosingImpl: ImplDef = strictEnclosure[ImplDef] + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") def enclosingTemplate: Template = strictEnclosure[Template] lazy val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate) private val analyzerOpenMacros = universe.analyzer.openMacros val enclosingMacros: List[Context] = this :: analyzerOpenMacros // include self lazy val enclosingMethod: Tree = lenientEnclosure[DefDef] + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") def enclosingDef: DefDef = strictEnclosure[DefDef] lazy val enclosingPosition: Position = if (this.macroApplication.pos ne NoPosition) this.macroApplication.pos else { analyzerOpenMacros.collectFirst { case x if x.macroApplication.pos ne NoPosition => x.macroApplication.pos }.getOrElse(NoPosition) } + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit + @deprecated("c.enclosingTree-style APIs are now deprecated; consult the scaladoc for more information", "2.13.4") val enclosingRun: Run = universe.currentRun } diff --git a/src/compiler/scala/reflect/macros/contexts/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala index 74f1d7ed3878..6ff18a15644f 100644 --- a/src/compiler/scala/reflect/macros/contexts/Evals.scala +++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,12 +24,18 @@ trait Evals { private lazy val evalImporter = ru.internal.createImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }] def eval[T](expr: Expr[T]): T = { + def specialK(x: Any) = + x match { + case _: global.TypeRef => true + case _: global.Symbol => true + case _ => false + } expr.tree match { - case global.Literal(global.Constant(value)) => + case global.Literal(global.Constant(value)) if !specialK(value) => value.asInstanceOf[T] case _ => val imported = evalImporter.importTree(expr.tree) evalToolBox.eval(imported).asInstanceOf[T] } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala index 857386f1ceca..8bac593b1628 100644 --- a/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala +++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala index b456933603c4..292663c2fdb1 100644 --- a/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala +++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,14 +21,14 @@ trait FrontEnds { def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg) - @deprecated("Use echo, info messages are always forced", since="2.12.13") + @deprecated("Use echo, info messages are always forced", since="2.13.0") def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.echo(pos, msg) def hasWarnings: Boolean = universe.reporter.hasErrors def hasErrors: Boolean = universe.reporter.hasErrors - // TODO: add WarningCategory parameter in 2.14 (not binary compatible) + // TODO: add WarningCategory parameter (not binary compatible) def warning(pos: Position, msg: String): Unit = callsiteTyper.context.warning(pos, msg, WarningCategory.Other) def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg) diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala index c6dfc56d62e4..9d0f1ca47391 100644 --- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala +++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/macros/contexts/Internals.scala b/src/compiler/scala/reflect/macros/contexts/Internals.scala index 2601273568f2..e7c0e726be13 100644 --- a/src/compiler/scala/reflect/macros/contexts/Internals.scala +++ b/src/compiler/scala/reflect/macros/contexts/Internals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ trait Internals extends scala.tools.nsc.transform.TypingTransformers { lazy val internal: ContextInternalApi = new global.SymbolTableInternal with ContextInternalApi { val enclosingOwner = callsiteTyper.context.owner - class HofTransformer(hof: (Tree, TransformApi) => Tree) extends Transformer { + class HofTransformer(hof: (Tree, TransformApi) => Tree) extends AstTransformer { val api = new TransformApi { def recur(tree: Tree): Tree = hof(tree, this) def default(tree: Tree): Tree = superTransform(tree) diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala index a39794dfcd42..00d1ce7dd4bb 100644 --- a/src/compiler/scala/reflect/macros/contexts/Names.scala +++ b/src/compiler/scala/reflect/macros/contexts/Names.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -45,11 +45,11 @@ trait Names { // // TODO: hopefully scala/bug#7823 will provide an ultimate answer to this problem. // In the meanwhile I will also keep open the original issue: scala/bug#6879 "c.freshName is broken". - val prefix = if (name.endsWith("$")) name else name + "$" // scala/bug#8425 - val sortOfUniqueSuffix = freshNameCreator.newName(nme.FRESH_SUFFIX) + val prefix: String = if (name.endsWith("$")) name else name + "$" // scala/bug#8425 + val sortOfUniqueSuffix: Any = freshNameCreator.newName(nme.FRESH_SUFFIX) prefix + sortOfUniqueSuffix } def freshName[NameType <: Name](name: NameType): NameType = name.mapName(freshName(_)).asInstanceOf[NameType] -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala index 22bbc34e8ac0..2084e0e748de 100644 --- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,12 +27,12 @@ trait Parsers { val oldReporter = global.reporter try { global.reporter = sreporter - val parser = newUnitParser(new CompilationUnit(newSourceFile(code, "")) { + val parser = newUnitParser(new global.CompilationUnit(newSourceFile(code, "")) { override implicit val fresh: FreshNameCreator = currentFreshNameCreator }) val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages()) sreporter.infos.foreach { - case Info(pos, msg, Reporter.ERROR) => throw ParseException(pos, msg) + case Info(pos, msg, Reporter.ERROR, _) => throw ParseException(pos, msg) case _ => } tree diff --git a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala index b9dc58295dca..b8fe0ac6f510 100644 --- a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,14 +21,14 @@ trait Reifiers { import definitions._ def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = { - assert(ExprClass != NoSymbol) + assert(ExprClass != NoSymbol, "Missing ExprClass") val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree) logFreeVars(enclosingPosition, result) result } def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = { - assert(TypeTagsClass != NoSymbol) + assert(TypeTagsClass != NoSymbol, "Missing TypeTagsClass") val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete) logFreeVars(enclosingPosition, result) result @@ -41,7 +41,7 @@ trait Reifiers { scala.reflect.reify.`package`.reifyEnclosingRuntimeClass(universe)(callsiteTyper) def unreifyTree(tree: Tree): Tree = { - assert(ExprSplice != NoSymbol) + assert(ExprSplice != NoSymbol, "Missing ExprSplice") Select(tree, ExprSplice) } @@ -67,18 +67,18 @@ trait Reifiers { def logFreeVars(symtab: SymbolTable): Unit = // logging free vars only when they are untyped prevents avalanches of duplicate messages symtab.syms map (sym => symtab.symDef(sym)) foreach { - case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null => + case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null => reporter.echo(position, s"free term: ${showRaw(binding)} $origin") - case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null => + case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null => reporter.echo(position, s"free type: ${showRaw(binding)} $origin") case _ => // do nothing } - if (universe.settings.logFreeTerms || universe.settings.logFreeTypes) - reification match { + if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value) + (reification: @unchecked) match { case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab) - case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab) + case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab) } } } diff --git a/src/compiler/scala/reflect/macros/contexts/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala index 6487adec728d..49a4202a9102 100644 --- a/src/compiler/scala/reflect/macros/contexts/Traces.scala +++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala index a36f530af32d..f5510145b22d 100644 --- a/src/compiler/scala/reflect/macros/contexts/Typers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,7 +39,7 @@ trait Typers { case universe.analyzer.SilentResultValue(result) => macroLogVerbose(result) result - case error@universe.analyzer.SilentTypeError(_) => + case error: universe.analyzer.SilentTypeError => macroLogVerbose(error.err.errMsg) if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg) universe.EmptyTree @@ -53,13 +53,13 @@ trait Typers { def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = { macroLogVerbose(s"inferring implicit value of type $pt, macros = ${!withMacrosDisabled}") - universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg)) + universe.analyzer.inferImplicit(universe.EmptyTree, pt, isView = false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg)) } def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = { macroLogVerbose(s"inferring implicit view from $from to $to for $tree, macros = ${!withMacrosDisabled}") val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to)) - universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg)) + universe.analyzer.inferImplicit(tree, viewTpe, isView = true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg)) } def resetLocalAttrs(tree: Tree): Tree = universe.resetAttrs(universe.duplicateAndKeepPositions(tree)) diff --git a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala index d41e2993f18e..232a484df032 100644 --- a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala +++ b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,4 +16,4 @@ package runtime import scala.reflect.internal.util.Position import scala.util.control.ControlThrowable -class AbortMacroException(val pos: Position, val msg: String) extends Throwable(msg) with ControlThrowable \ No newline at end of file +class AbortMacroException(val pos: Position, val msg: String) extends ControlThrowable(msg) diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala index 944e2b917745..51facd4b713e 100644 --- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,7 +39,7 @@ trait JavaReflectionRuntimes { case _ => false } - val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor) + val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor): @unchecked args => { val implObj = bundleCtor.newInstance(args.c) implMeth.invoke(implObj, args.others.asInstanceOf[Seq[AnyRef]]: _*) @@ -53,4 +53,4 @@ trait JavaReflectionRuntimes { } } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 66589f76f509..e3d2fb17054d 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,6 @@ package scala.reflect.macros package runtime - import scala.reflect.internal.Flags._ import scala.reflect.runtime.ReflectionUtils @@ -37,7 +36,7 @@ trait MacroRuntimes extends JavaReflectionRuntimes { /** Default implementation of `macroRuntime`. * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroRuntime for more details) */ - private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime] + private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]() def standardMacroRuntime(expandee: Tree): MacroRuntime = { val macroDef = expandee.symbol macroLogVerbose(s"looking for macro implementation: $macroDef") diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala index e580d2eb12b1..f7804661257a 100644 --- a/src/compiler/scala/reflect/macros/runtime/package.scala +++ b/src/compiler/scala/reflect/macros/runtime/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,4 +14,4 @@ package scala.reflect.macros package object runtime { type Context = scala.reflect.macros.contexts.Context -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala index 81e8be07597e..16dc123cd9f9 100644 --- a/src/compiler/scala/reflect/macros/util/Helpers.scala +++ b/src/compiler/scala/reflect/macros/util/Helpers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,7 +39,7 @@ trait Helpers { import runDefinitions._ val MacroContextUniverse = definitions.MacroContextUniverse - val treeInfo.MacroImplReference(isBundle, _, _, macroImpl, _) = macroImplRef + val treeInfo.MacroImplReference(isBundle, _, _, macroImpl, _) = macroImplRef: @unchecked val paramss = macroImpl.paramss val ContextParam = paramss match { case Nil | _ :+ Nil => NoSymbol // no implicit parameters in the signature => nothing to do diff --git a/src/compiler/scala/reflect/macros/util/Traces.scala b/src/compiler/scala/reflect/macros/util/Traces.scala index 1bee131b8e0e..d2865b98b7f8 100644 --- a/src/compiler/scala/reflect/macros/util/Traces.scala +++ b/src/compiler/scala/reflect/macros/util/Traces.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,6 +18,6 @@ trait Traces { val macroDebugLite = globalSettings.YmacrodebugLite.value val macroDebugVerbose = globalSettings.YmacrodebugVerbose.value - @inline final def macroLogLite(msg: => Any) { if (macroDebugLite || macroDebugVerbose) println(msg) } - @inline final def macroLogVerbose(msg: => Any) { if (macroDebugVerbose) println(msg) } + @inline final def macroLogLite(msg: => Any): Unit = { if (macroDebugLite || macroDebugVerbose) println(msg) } + @inline final def macroLogVerbose(msg: => Any): Unit = { if (macroDebugVerbose) println(msg) } } diff --git a/src/compiler/scala/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala index c9039fcbaaff..fcf340297660 100644 --- a/src/compiler/scala/reflect/quasiquotes/Holes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,7 @@ import scala.reflect.internal.Flags._ import scala.reflect.macros.TypecheckException class Rank private[Rank](val value: Int) extends AnyVal { - def pred = { assert(value - 1 >= 0); new Rank(value - 1) } + def pred = { assert(value - 1 >= 0, "Rank must be positive"); new Rank(value - 1) } def succ = new Rank(value + 1) override def toString = if (value == 0) "no dots" else "." * (value + 1) } @@ -72,7 +72,7 @@ trait Holes { self: Quasiquotes => * It packs together a rank, pre-reified tree representation * (possibly preprocessed) and position. */ - abstract class Hole { + sealed abstract class Hole { val tree: Tree val pos: Position val rank: Rank @@ -193,13 +193,14 @@ trait Holes { self: Quasiquotes => case Bind(pname, inner @ Bind(_, Typed(Ident(nme.WILDCARD), tpt))) => (pname, inner.pos, Some(tpt)) case Bind(pname, inner @ Typed(Ident(nme.WILDCARD), tpt)) => (pname, inner.pos, Some(tpt)) case Bind(pname, inner) => (pname, inner.pos, None) + case x => throw new MatchError(x) } val treeNoUnlift = Bind(placeholderName, Ident(nme.WILDCARD)) lazy val tree = tptopt.map { tpt => val TypeDef(_, _, _, typedTpt) = - try c.typecheck(TypeDef(NoMods, TypeName("T"), Nil, tpt)) - catch { case TypecheckException(pos, msg) => c.abort(pos.asInstanceOf[c.Position], msg) } + (try c.typecheck(TypeDef(NoMods, TypeName("T"), Nil, tpt)) + catch { case TypecheckException(pos, msg) => c.abort(pos.asInstanceOf[c.Position], msg) }): @unchecked val tpe = typedTpt.tpe val (iterableRank, _) = stripIterable(tpe) if (iterableRank.value < rank.value) @@ -244,9 +245,10 @@ trait Holes { self: Quasiquotes => val helperName = rank match { case DotDot => nme.UnliftListElementwise case DotDotDot => nme.UnliftListOfListsElementwise + case x => throw new MatchError(x) } val lifter = inferUnliftable(tpe) - assert(helperName.isTermName) + assert(helperName.isTermName, "Must be a term") // q"val $name: $u.internal.reificationSupport.${helperName.toTypeName} = $u.internal.reificationSupport.$helperName($lifter)" ValDef(NoMods, name, AppliedTypeTree(Select(Select(Select(u, nme.internal), nme.reificationSupport), helperName.toTypeName), List(TypeTree(tpe))), diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala index 785751eea814..da1991a0b0e6 100644 --- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -42,15 +42,16 @@ trait Parsers { self: Quasiquotes => val posMapList = posMap.toList def containsOffset(start: Int, end: Int) = start <= offset && offset < end def fallbackPosition = posMapList match { - case (pos1, (start1, end1)) :: _ if start1 > offset => pos1 - case _ :+ ((pos2, (start2, end2))) if end2 <= offset => pos2.withPoint(pos2.point + (end2 - start2)) + case (pos1, (start1, _)) :: _ if start1 > offset => pos1 + case _ :+ ((pos2, (start2, end2))) if end2 <= offset => pos2.withPoint(pos2.point + (end2 - start2)) + case x => throw new MatchError(x) } posMapList.sliding(2).collect { - case (pos1, (start1, end1)) :: _ if containsOffset(start1, end1) => (pos1, offset - start1) - case (pos1, (start1, end1)) :: (pos2, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1 - start1) - case _ :: (pos2, (start2, end2)) :: _ if containsOffset(start2, end2) => (pos2, offset - start2) - }.map { case (pos, offset) => - pos.withPoint(pos.point + offset) + case (pos1, (start1, end1)) :: _ if containsOffset(start1, end1) => (pos1, offset - start1) + case (pos1, (start1, end1)) :: (_, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1 - start1) + case _ :: (pos2, (start2, end2)) :: _ if containsOffset(start2, end2) => (pos2, offset - start2) + }.map { + case (pos, offset) => pos.withPoint(pos.point + offset) }.toList.headOption.getOrElse(fallbackPosition) } @@ -69,8 +70,8 @@ trait Parsers { self: Quasiquotes => override implicit lazy val fresh: FreshNameCreator = new FreshNameCreator(nme.QUASIQUOTE_PREFIX) // Do not check for tuple arity. The placeholders can support arbitrary tuple sizes. - override def makeSafeTupleTerm(trees: List[Tree], offset: Offset): Tree = treeBuilder.makeTupleTerm(trees) - override def makeSafeTupleType(trees: List[Tree], offset: Offset): Tree = treeBuilder.makeTupleType(trees) + override def makeSafeTupleTerm(trees: List[Tree]): Tree = treeBuilder.makeTupleTerm(trees) + override def makeSafeTupleType(trees: List[Tree]): Tree = treeBuilder.makeTupleType(trees) override val treeBuilder = new ParserTreeBuilder { override implicit def fresh: FreshNameCreator = parser.fresh @@ -91,7 +92,7 @@ trait Parsers { self: Quasiquotes => case _ => gen.mkBlock(stats, doFlatten = true) } case nme.unapply => gen.mkBlock(stats, doFlatten = false) - case other => global.abort("unreachable") + case _ => global.abort("unreachable") } // tq"$a => $b" @@ -106,7 +107,7 @@ trait Parsers { self: Quasiquotes => import treeBuilder.{global => _, unit => _} // q"def foo($x)" - override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = + override def param(owner: Name, implicitmod: Long, caseParam: Boolean): ValDef = if (isHole && lookingAhead { in.token == COMMA || in.token == RPAREN }) { ParamPlaceholder(implicitmod, ident()) } else super.param(owner, implicitmod, caseParam) @@ -232,7 +233,7 @@ trait Parsers { self: Quasiquotes => object ForEnumeratorParser extends Parser { def entryPoint = { parser => val enums = parser.enumerator(isFirst = false, allowNestedIf = false) - assert(enums.length == 1) + assert(enums.length == 1, "Require one enumerator") implodePatDefs(enums.head) } } diff --git a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala index 8fd2b4a53efe..fc4b03e19d9c 100644 --- a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala +++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -32,14 +32,14 @@ trait Placeholders { self: Quasiquotes => lazy val code = { val sb = new StringBuilder() - def appendPart(value: String, pos: Position) = { + def appendPart(value: String, pos: Position): Unit = { val start = sb.length sb.append(value) val end = sb.length posMap += pos -> ((start, end)) } - def appendHole(tree: Tree, rank: Rank) = { + def appendHole(tree: Tree, rank: Rank): Unit = { val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX)) sb.append(placeholderName) val holeTree = @@ -68,7 +68,7 @@ trait Placeholders { self: Quasiquotes => object holeMap { private val underlying = mutable.LinkedHashMap.empty[String, Hole] private val accessed = mutable.Set.empty[String] - def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_)) + def unused: Set[Name] = (underlying.keys.toSet diff accessed).map(TermName(_)) def contains(key: Name): Boolean = underlying.contains(key.toString) def apply(key: Name): Hole = { val skey = key.toString @@ -76,14 +76,13 @@ trait Placeholders { self: Quasiquotes => accessed += skey value } - def update(key: Name, hole: Hole) = + def update(key: Name, hole: Hole): Unit = underlying += key.toString -> hole def get(key: Name): Option[Hole] = { val skey = key.toString - underlying.get(skey).map { v => - accessed += skey - v - } + val res = underlying.get(skey) + res.foreach(_ => accessed += skey) + res } def keysIterator: Iterator[TermName] = underlying.keysIterator.map(TermName(_)) } diff --git a/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala index f112e7ccade5..16d28d9126bd 100644 --- a/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala index f316500ec20d..7143dd977c36 100644 --- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,6 @@ package scala.reflect package quasiquotes -import java.lang.UnsupportedOperationException import scala.reflect.reify.{Reifier => ReflectReifier} import scala.reflect.internal.Flags._ @@ -71,8 +70,8 @@ trait Reifiers { self: Quasiquotes => if (isReifyingExpressions) { val freshdefs = nameMap.iterator.map { case (origname, names) => - assert(names.size == 1) - val FreshName(prefix) = origname + assert(names.size == 1, "Require one name") + val FreshName(prefix) = origname: @unchecked val nameTypeName = if (origname.isTermName) tpnme.TermName else tpnme.TypeName val freshName = if (origname.isTermName) nme.freshTermName else nme.freshTypeName // q"val ${names.head}: $u.$nameTypeName = $u.internal.reificationSupport.$freshName($prefix)" @@ -86,7 +85,7 @@ trait Reifiers { self: Quasiquotes => val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false } val cases = if(isVarPattern) { - val Ident(name) :: Nil = freevars + val Ident(name) :: Nil = freevars: @unchecked // cq"$name: $treeType => $SomeModule($name)" :: Nil CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))), EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil @@ -104,9 +103,9 @@ trait Reifiers { self: Quasiquotes => } val guard = nameMap.collect { case (_, nameset) if nameset.size >= 2 => - nameset.toList.sliding(2).map { case List(n1, n2) => - // q"$n1 == $n2" - Apply(Select(Ident(n1), nme.EQ), List(Ident(n2))) + nameset.toList.sliding(2).map { + case List(n1, n2) => Apply(Select(Ident(n1), nme.EQ), List(Ident(n2))) // q"$n1 == $n2" + case x => throw new MatchError(x) } }.flatten.reduceOption[Tree] { (l, r) => // q"$l && $r" @@ -320,7 +319,7 @@ trait Reifiers { self: Quasiquotes => */ def group[T](lst: List[T])(similar: (T, T) => Boolean) = lst.foldLeft[List[List[T]]](List()) { case (Nil, el) => List(List(el)) - case (ll :+ (last @ (lastinit :+ lastel)), el) if similar(lastel, el) => ll :+ (last :+ el) + case (ll :+ (last @ _ :+ lastel), el) if similar(lastel, el) => ll :+ (last :+ el) case (ll, el) => ll :+ List(el) } @@ -364,7 +363,7 @@ trait Reifiers { self: Quasiquotes => case ParamPlaceholder(Hole(tree, DotDot)) => tree case SyntacticPatDef(mods, pat, tpt, rhs) => reifyBuildCall(nme.SyntacticPatDef, mods, pat, tpt, rhs) - case SyntacticValDef(mods, p @ Placeholder(h: ApplyHole), tpt, rhs) if h.tpe <:< treeType => + case SyntacticValDef(mods, Placeholder(h: ApplyHole), tpt, rhs) if h.tpe <:< treeType => mirrorBuildCall(nme.SyntacticPatDef, reify(mods), h.tree, reify(tpt), reify(rhs)) } @@ -422,7 +421,7 @@ trait Reifiers { self: Quasiquotes => case List(elem) if fill.isDefinedAt(elem) => fill(elem) case elems => mkList(elems.map(fallback)) } - val head :: tail = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) } + val head :: tail = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }: @unchecked tail.foldLeft[Tree](reifyGroup(head)) { (tree, lst) => Apply(Select(tree, nme.PLUSPLUS), List(reifyGroup(lst))) } } @@ -435,6 +434,7 @@ trait Reifiers { self: Quasiquotes => } val (mods, flags) = modsPlaceholders.map { case ModsPlaceholder(hole: ApplyHole) => hole + case x => throw new MatchError(x) }.partition { hole => if (hole.tpe <:< modsType) true else if (hole.tpe <:< flagsType) false @@ -485,7 +485,7 @@ trait Reifiers { self: Quasiquotes => val mods = m.annotations.collect { case ModsPlaceholder(hole: UnapplyHole) => hole } mods match { case hole :: Nil => - if (m.annotations.length != 1) c.abort(hole.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers") + if (m.annotations.lengthIs != 1) c.abort(hole.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers") ensureNoExplicitFlags(m, hole.pos) hole.treeNoUnlift case _ :: hole :: _ => diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index 012eca623c19..80326d0ba4ac 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,10 +10,12 @@ * additional information regarding copyright ownership. */ -package scala.reflect.reify +package scala.reflect +package reify -import scala.reflect.macros.ReificationException -import scala.reflect.macros.UnexpectedReificationException +import internal.util.StringContextStripMarginOps +import macros.ReificationException +import macros.UnexpectedReificationException trait Errors { self: Reifier => diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala index 9f89d420c3e3..cacca6b692fa 100644 --- a/src/compiler/scala/reflect/reify/Phases.scala +++ b/src/compiler/scala/reflect/reify/Phases.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,7 +37,7 @@ trait Phases extends Reshape if (reifyDebug) println("[reshape phase]") tree = reshape.transform(tree) if (reifyDebug) println("[interlude]") - if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) + if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) if (reifyDebug) println("[calculate phase]") calculate.traverse(tree) diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index bbc3a0de8849..f1e37b059703 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,9 +19,6 @@ import scala.reflect.reify.utils.Utils /** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type. * See more info in the comments to `reify` in scala.reflect.api.Universe. - * - * @author Martin Odersky - * @since 2.10 */ abstract class Reifier extends States with Phases @@ -70,7 +67,7 @@ abstract class Reifier extends States val result = reifee match { case tree: Tree => - reifyTrace("reifying = ")(if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) + reifyTrace("reifying = ")(if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) reifyTrace("reifee is located at: ")(tree.pos) reifyTrace("universe = ")(universe) reifyTrace("mirror = ")(mirror) @@ -80,7 +77,7 @@ abstract class Reifier extends States val rtree = pipeline(tree) val tpe = typer.packedType(tree, NoSymbol) - val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false) + val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false): @unchecked state.reificationIsConcrete &= tpeReificationIsConcrete state.symtab ++= tpeSymtab ReifiedTree(universe, mirror, symtab, rtree, tpe, rtpe, reificationIsConcrete) diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala index c24d8752fd28..a8a7dbc1241a 100644 --- a/src/compiler/scala/reflect/reify/States.scala +++ b/src/compiler/scala/reflect/reify/States.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,7 +24,7 @@ trait States { * might cause subsequent reification (e.g. when filling in signatures and annotations for syms). * * This is a mess in the face of nested reifications, splices and inlining of thereof, - * so I made `SymbolTable` immutable, which brought a significant amount of sanity. + * so I made `SymbolTable` immutable, which brought a significant amount of confidence. * * However that wasn't enough. Sure, symbol table became immutable, but the reifier still needed * to mutate its `symtab` field during reification. This caused nasty desyncs between the table being encoded diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index b829183e3711..b31d2201fe2f 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -78,37 +78,35 @@ abstract class Taggers { translatingReificationErrors(materializer) } try c.typecheck(result) - catch { case terr @ TypecheckException(pos, msg) => failTag(result, terr) } + catch { case terr: TypecheckException => failTag(result, terr) } } def materializeExpr(universe: Tree, mirror: Tree, expr: Tree): Tree = { val result = translatingReificationErrors(c.reifyTree(universe, mirror, expr)) try c.typecheck(result) - catch { case terr @ TypecheckException(pos, msg) => failExpr(result, terr) } + catch { case terr: TypecheckException => failExpr(result, terr) } } private def translatingReificationErrors(materializer: => Tree): Tree = { try materializer catch { case ReificationException(pos, msg) => - c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling - case UnexpectedReificationException(pos, err, cause) if cause != null => + c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the confidence of exception handling + case UnexpectedReificationException(_, _, cause) if cause != null => throw cause } } private def failTag(result: Tree, reason: Any): Nothing = { - val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication + val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication: @unchecked val tpe = tpeTree.tpe - val PolyType(_, MethodType(_, tagTpe)) = fun.tpe + val PolyType(_, MethodType(_, tagTpe)) = fun.tpe: @unchecked val tagModule = tagTpe.typeSymbol.companionSymbol - if (c.compilerSettings.contains("-Xlog-implicits")) - c.echo(c.enclosingPosition, s"cannot materialize ${tagModule.name}[$tpe] as $result because:\n$reason") c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe)) } private def failExpr(result: Tree, reason: Any): Nothing = { - val Apply(_, expr :: Nil) = c.macroApplication + val Apply(_, expr :: Nil) = c.macroApplication: @unchecked c.abort(c.enclosingPosition, s"Cannot materialize $expr as $result because:\n$reason") } } diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala index 83356aa19c23..a7864168cc9d 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -51,7 +51,7 @@ trait GenAnnotationInfos { } // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important - val Apply(Select(New(tpt), name), args) = annotationToTree(ann) + val Apply(Select(New(tpt), name), args) = annotationToTree(ann): @unchecked val reifiedAtp = mirrorCall(nme.Select, mirrorCall(nme.New, mirrorCall(nme.TypeTree, reifyType(tpt.tpe))), reify(name)) val reifiedAnnRepr = mirrorCall(nme.Apply, reifiedAtp, reifyList(args)) mirrorFactoryCall(nme.Annotation, reifiedAnnRepr) diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala index d083eb17daf0..0ae7bd3effce 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala index 429ee203027d..5d78f2eeea32 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,6 +23,6 @@ trait GenPositions { // but I can hardly imagine when one would need a position that points to the reified code // usually reified trees are used to compose macro expansions or to be fed to the runtime compiler // however both macros and toolboxes have their own means to report errors in synthetic trees - def reifyPosition(pos: Position): Tree = - reifyMirrorObject(NoPosition) + @annotation.nowarn + def reifyPosition(pos: Position): Tree = reifyMirrorObject(NoPosition) } diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index cac858d57f9f..f3217c23ab8e 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -184,7 +184,7 @@ trait GenSymbols { val reification = reificode(sym) import reification.{name, binding} val tree = reification.tree updateAttachment ReifyBindingAttachment(binding) - state.symtab += (sym, name.toTermName, tree) + state.symtab = state.symtab.add(sym, name.toTermName, tree) } fromSymtab } diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index 2949cff99817..a3a1040298b3 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ trait GenTrees { // unfortunately, these are necessary to reify AnnotatedTypes // I'd gladly get rid of them, but I don't fancy making a metaprogramming API that doesn't work with annotated types - // luckily for our sanity, these vars are mutated only within a very restricted code execution path + // luckily for our confidence, these vars are mutated only within a very restricted code execution path def reifyTreeSymbols: Boolean = state.reifyTreeSymbols def reifyTreeTypes: Boolean = state.reifyTreeTypes @@ -117,7 +117,7 @@ trait GenTrees { abort("free var local to the reifee, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym)) state.symtab ++= inlinedSymtab rtree - case tree => + case _ => val migrated = Apply(Select(splicee, nme.in), List(Ident(nme.MIRROR_SHORT))) Select(migrated, nme.tree) } diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala index d3bcaf7676ca..f555607c7022 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,35 +46,35 @@ trait GenTypes { if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic) Select(Select(reify(tsym), nme.asType), nme.toTypeConstructor) else tpe match { - case tpe : NoType.type => + case tpe: NoType.type => reifyMirrorObject(tpe) - case tpe : NoPrefix.type => + case tpe: NoPrefix.type => reifyMirrorObject(tpe) - case tpe @ ThisType(root) if root.isRoot => + case ThisType(root) if root.isRoot => mirrorBuildCall(nme.thisPrefix, mirrorMirrorSelect(nme.RootClass)) - case tpe @ ThisType(empty) if empty.isEmptyPackageClass => + case ThisType(empty) if empty.isEmptyPackageClass => mirrorBuildCall(nme.thisPrefix, mirrorMirrorSelect(nme.EmptyPackageClass)) - case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic => + case ThisType(clazz) if clazz.isModuleClass && clazz.isStatic => val module = reify(clazz.sourceModule) val moduleClass = Select(Select(module, nme.asModule), nme.moduleClass) mirrorBuildCall(nme.ThisType, moduleClass) - case tpe @ ThisType(sym) => + case ThisType(sym) => reifyBuildCall(nme.ThisType, sym) - case tpe @ SuperType(thistpe, supertpe) => + case SuperType(thistpe, supertpe) => reifyBuildCall(nme.SuperType, thistpe, supertpe) - case tpe @ SingleType(pre, sym) => + case SingleType(pre, sym) => reifyBuildCall(nme.SingleType, pre, sym) - case tpe @ ConstantType(value) => + case ConstantType(value) => mirrorBuildCall(nme.ConstantType, reifyProduct(value)) - case tpe @ TypeRef(pre, sym, args) => + case TypeRef(pre, sym, args) => reifyBuildCall(nme.TypeRef, pre, sym, args) - case tpe @ TypeBounds(lo, hi) => + case TypeBounds(lo, hi) => reifyBuildCall(nme.TypeBounds, lo, hi) - case tpe @ NullaryMethodType(restpe) => + case NullaryMethodType(restpe) => reifyBuildCall(nme.NullaryMethodType, restpe) - case tpe @ AnnotatedType(anns, underlying) => + case tpe: AnnotatedType => reifyAnnotatedType(tpe) - case _ => + case tpe => reifyToughType(tpe) } } @@ -88,7 +88,7 @@ trait GenTypes { val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString // if this fails, it might produce the dreaded "erroneous or inaccessible type" error - // to find out the whereabouts of the error run scalac with -Ydebug + // to find out the whereabouts of the error run scalac with -Vdebug if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe)) val result = typer.resolveTypeTag(defaultErrorPosition, universe.tpe, tpe, concrete = concrete, allowMaterialization = false) match { @@ -136,7 +136,7 @@ trait GenTypes { val result = typer.silent(silentTyper => silentTyper.context.withMacrosDisabled(searchForManifest(silentTyper))) result match { case analyzer.SilentResultValue(result) => result - case analyzer.SilentTypeError(_) => EmptyTree + case _: analyzer.SilentTypeError => EmptyTree } } @@ -165,8 +165,9 @@ trait GenTypes { * I.e. we can compile the code that involves `ru.Type`, but we cannot serialize an instance of `ru.Type`. */ private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match { - case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential => + case TypeRef(pre @ SingleType(_, _), sym, args) if sym.isAbstractType && !sym.isExistential => mirrorBuildCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args)) + case x => throw new MatchError(x) } /** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */ @@ -186,18 +187,18 @@ trait GenTypes { tpe match { case tpe @ RefinedType(parents, decls) => - reifySymDef(tpe.typeSymbol) + List(tpe.typeSymbol).foreach(reifySymDef) mirrorBuildCall(nme.RefinedType, reify(parents), reifyScope(decls), reify(tpe.typeSymbol)) - case tpe @ ExistentialType(tparams, underlying) => - tparams foreach reifySymDef + case ExistentialType(tparams, underlying) => + tparams.foreach(reifySymDef) reifyBuildCall(nme.ExistentialType, tparams, underlying) case tpe @ ClassInfoType(parents, decls, clazz) => - reifySymDef(clazz) + List(clazz).foreach(reifySymDef) mirrorBuildCall(nme.ClassInfoType, reify(parents), reifyScope(decls), reify(tpe.typeSymbol)) - case tpe @ MethodType(params, restpe) => + case MethodType(params, restpe) => params foreach reifySymDef reifyBuildCall(nme.MethodType, params, restpe) - case tpe @ PolyType(tparams, underlying) => + case PolyType(tparams, underlying) => tparams foreach reifySymDef reifyBuildCall(nme.PolyType, tparams, underlying) case _ => diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 5a7b7450b435..19f8a84a742e 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,8 @@ package scala.reflect.reify package codegen +import scala.annotation.tailrec + trait GenUtils { self: Reifier => @@ -42,7 +44,7 @@ trait GenUtils { def call(fname: String, args: Tree*): Tree = Apply(termPath(fname), args.toList) - def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX + name) + def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX.decoded + name) def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString) def mirrorMirrorSelect(name: TermName): Tree = @@ -88,7 +90,7 @@ trait GenUtils { val lastName = mkName(parts.last) if (prefixParts.isEmpty) Ident(lastName) else { - val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _)) + val prefixTree = prefixParts.tail.foldLeft(Ident(prefixParts.head): Tree)(Select(_, _)) Select(prefixTree, lastName) } } @@ -112,9 +114,10 @@ trait GenUtils { case _ => false } - def isCrossStageTypeBearer(tree: Tree): Boolean = tree match { + @tailrec + final def isCrossStageTypeBearer(tree: Tree): Boolean = tree match { case TypeApply(hk, _) => isCrossStageTypeBearer(hk) - case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.WeakTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true + case Select(Select(_, nme.WeakTypeTag|nme.TypeTag|nme.Expr), nme.apply) => true case _ => false } diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index b647e9d202df..3c4f85abc9b7 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -72,7 +72,7 @@ package object reify { if (tpe.isSpliceable) { val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false) if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass) - if (concrete) throw ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe)) + if (concrete) throw ReificationException(enclosingMacroPosition, s"type $tpe is an unresolved spliceable type") } tpe.dealiasWiden match { diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala index a79d3a47d130..b472e7e261ac 100644 --- a/src/compiler/scala/reflect/reify/phases/Calculate.scala +++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,7 +19,10 @@ trait Calculate { import global._ implicit class RichCalculateSymbol(sym: Symbol) { - def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) } + def metalevel: Int = { + assert(sym != null && sym != NoSymbol, "Missing symbol") + localSymbols.getOrElse(sym, 0) + } def isLocalToReifee = (localSymbols contains sym) // todo. how do I account for local skolems? } @@ -38,9 +41,9 @@ trait Calculate { } /** - * Merely traverses the reifiee and records symbols local to the reifee along with their metalevels. + * Merely traverses the target and records symbols local to the reifee along with their metalevels. */ - val calculate = new Traverser { + val calculate: Traverser = new Traverser { // see the explanation of metalevels in `Metalevels` var currMetalevel = 1 @@ -57,8 +60,8 @@ trait Calculate { bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass") bindRelatedSymbol(tree.symbol.companionClass, "companionClass") bindRelatedSymbol(tree.symbol.companionModule, "companionModule") - Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") } - Some(tree) collect { case labelDef: LabelDef => labelDef.params foreach (param => bindRelatedSymbol(param.symbol, "labelParam")) } + tree.symbol match { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") case _ => } + tree match { case labelDef: LabelDef => labelDef.params.foreach(param => bindRelatedSymbol(param.symbol, "labelParam")) case _ => } def bindRelatedSymbol(related: Symbol, name: String): Unit = if (related != null && related != NoSymbol) { if (reifyDebug) println("boundSym (" + name + "): " + related) diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index c1f3af723d93..88d768c31e4e 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -72,7 +72,7 @@ trait Metalevels { * * Hence some bindings become cross-stage, which is not bad per se (in fact, some cross-stage bindings have sane semantics, as in the example above). * However this affects freevars, since they are delicate inter-dimensional beings that refer to both current and next planes of existence. - * When splicing tears the fabric of the reality apart, some freevars have to go single-dimensional to retain their sanity. + * When splicing tears the fabric of the reality apart, some freevars have to go single-dimensional to retain their confidence. * * Example 2. Consider the following snippet: * @@ -113,7 +113,7 @@ trait Metalevels { * The reasoning from Example 2 still holds here - we do need to inline the freevar that refers to x. * However, we must not touch anything inside the splice'd block, because it's not getting reified. */ - val metalevels = new Transformer { + val metalevels: AstTransformer = new AstTransformer { var insideSplice = false val inlineableBindings = mutable.Map[TermName, Tree]() diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala index 02cf4ec09bf8..5c9472823492 100644 --- a/src/compiler/scala/reflect/reify/phases/Reify.scala +++ b/src/compiler/scala/reflect/reify/phases/Reify.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 7c6eb5d5d2ed..b04a64575798 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,7 @@ package scala.reflect.reify package phases +import scala.annotation.{tailrec, unused} import scala.tools.nsc.symtab.Flags._ trait Reshape { @@ -36,7 +37,7 @@ trait Reshape { * * Transforming Annotated(annot, expr) into Typed(expr, TypeTree(Annotated(annot, _)) * * Non-idempotencies of the typechecker: https://github.com/scala/bug/issues/5464 */ - val reshape = new Transformer { + val reshape: AstTransformer = new AstTransformer { var currentSymbol: Symbol = NoSymbol override def transform(tree0: Tree) = { @@ -189,20 +190,21 @@ trait Reshape { CompoundTypeTree(Template(parents1, self, stats1)) } + @tailrec private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match { - case ty @ Typed(expr1, tpt) => + case ty @ Typed(_, tpt) => if (reifyDebug) println("reify typed: " + tree) val original = tpt match { case tt @ TypeTree() => tt.original case tpt => tpt } val annotatedArg = { + @tailrec def loop(tree: Tree): Tree = tree match { - case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2) - case annotated1 @ Annotated(ann, arg) => arg + case Annotated(_, annotated2 @ Annotated(_, _)) => loop(annotated2) + case Annotated(_, arg) => arg case _ => EmptyTree } - loop(original) } if (annotatedArg != EmptyTree) { @@ -217,12 +219,13 @@ trait Reshape { if (reifyDebug) println("verdict: wasn't annotated, reify as usual") ty } - case at @ Annotated(annot, arg) => + case at @ Annotated(_, arg) => if (reifyDebug) println("reify type annotations for: " + tree) assert(at.tpe.isInstanceOf[AnnotatedType], "%s (%s)".format(at.tpe, at.tpe.kind)) val annot1 = toPreTyperAnnotation(at.tpe.asInstanceOf[AnnotatedType].annotations(0)) if (reifyDebug) println("originals are: " + annot1) Annotated(annot1, arg).copyAttrs(at) + case x => throw new MatchError(x) } /** Restore pre-typer representation of an annotation. @@ -236,18 +239,18 @@ trait Reshape { def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = (jann: @unchecked) match { case LiteralAnnotArg(const) => Literal(const) case ArrayAnnotArg(arr) => Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation) - case NestedAnnotArg(ann) => toPreTyperAnnotation(ann) + case NestedAnnotArg(naa) => toPreTyperAnnotation(naa) } - ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) } + ann.assocs map { case (nme, arg) => NamedArg(Ident(nme), toScalaAnnotation(arg)) } } def extractOriginal: PartialFunction[Tree, Tree] = { case Apply(Select(New(tpt), _), _) => tpt } - assert(extractOriginal.isDefinedAt(ann.original), showRaw(ann.original)) + assert(extractOriginal.isDefinedAt(ann.original), s"$ann has unexpected original ${showRaw(ann.original)}" ) New(TypeTree(ann.atp) setOriginal extractOriginal(ann.original), List(args)) } - private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = { + private def trimAccessors(@unused deff: Tree, stats: List[Tree]): List[Tree] = { val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]() stats collect { case ddef: DefDef => ddef } foreach (defdef => { @@ -278,7 +281,10 @@ trait Reshape { var flags1 = flags & ~LOCAL if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE val privateWithin1 = ddef.mods.privateWithin - val annotations1 = accessors(vdef).foldLeft(annotations)((curr, acc) => curr ++ (acc.symbol.annotations map toPreTyperAnnotation)) + val annotations1 = + accessors(vdef).foldLeft(annotations){ (curr, acc) => + curr ++ (acc.symbol.annotations.filterNot(_ == UnmappableAnnotation ).map(toPreTyperAnnotation)) + } Modifiers(flags1, privateWithin1, annotations1) setPositions mods.positions } else { mods @@ -304,7 +310,7 @@ trait Reshape { stats1 } - private def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]): List[Tree] = + private def trimSyntheticCaseClassMembers(@unused deff: Tree, stats: List[Tree]): List[Tree] = stats filterNot (memberDef => memberDef.isDef && { val isSynthetic = memberDef.symbol.isSynthetic // this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass) diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index 317407865f07..e47933e0016c 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -70,7 +70,7 @@ trait Extractors { } var prev = symtab var next = loop(symtab) - while (next.syms.length < prev.syms.length) { + while (next.syms.sizeCompare(prev.syms) < 0) { prev = next next = loop(prev) } @@ -105,7 +105,7 @@ trait Extractors { } // if we're reifying a MethodType, we can't use it as a type argument for TypeTag ctor - // http://groups.google.com/group/scala-internals/browse_thread/thread/2d7bb85bfcdb2e2 + // https://groups.google.com/group/scala-internals/browse_thread/thread/2d7bb85bfcdb2e2 private def mkTarg(tpe: Type): Tree = ( if ((tpe eq null) || !isUseableAsTypeArg(tpe)) TypeTree(AnyTpe) else TypeTree(tpe) @@ -213,8 +213,8 @@ trait Extractors { object FreeRef { def unapply(tree: Tree): Option[(Tree, TermName)] = tree match { - case Apply(Select(Select(Select(uref @ Ident(_), internal), rs), mkIdent), List(Ident(name: TermName))) - if internal == nme.internal && rs == nme.reificationSupport && mkIdent == nme.mkIdent && name.startsWith(nme.REIFY_FREE_PREFIX) => + case Apply(Select(Select(Select(uref @ Ident(_), nme.internal), nme.reificationSupport), nme.mkIdent), List(Ident(name: TermName))) + if name.startsWith(nme.REIFY_FREE_PREFIX) => Some((uref, name)) case _ => None @@ -242,8 +242,8 @@ trait Extractors { object TypeRefToFreeType { def unapply(tree: Tree): Option[TermName] = tree match { - case Apply(Select(Select(uref @ Ident(_), typeRef), apply), List(Select(_, noSymbol), Ident(freeType: TermName), nil)) - if (uref.name == nme.UNIVERSE_SHORT && typeRef == nme.TypeRef && noSymbol == nme.NoSymbol && freeType.startsWith(nme.REIFY_FREE_PREFIX)) => + case Apply(Select(Select(Ident(nme.UNIVERSE_SHORT), nme.TypeRef), apply@_), List(Select(_, nme.NoSymbol), Ident(freeType: TermName), _)) + if freeType.startsWith(nme.REIFY_FREE_PREFIX) => Some(freeType) case _ => None diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index ad11ae8c74f0..b3bfa987038f 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,8 @@ package scala.reflect.reify package utils -import scala.compat.Platform.EOL +import java.lang.System.{lineSeparator => EOL} +import scala.util.matching.Regex.quoteReplacement trait NodePrinters { self: Utils => @@ -31,7 +32,7 @@ trait NodePrinters { // Rolling a full-fledged, robust TreePrinter would be several times more code. // Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier. val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2 - val (List(universe, mirror), reification0) = lines + val (List(universe, mirror), reification0) = lines: @unchecked val reification = (for (line <- reification0) yield { var s = line substring 2 s = s.replace(nme.UNIVERSE_PREFIX.toString, "") @@ -50,20 +51,20 @@ trait NodePrinters { val annotations = m.group(3) if (buf.nonEmpty || annotations != "") - buf.append("List(" + annotations + ")") + buf += s"List($annotations)" val privateWithin = "" + m.group(2) if (buf.nonEmpty || privateWithin != "") - buf.append("TypeName(\"" + privateWithin + "\")") + buf += {"TypeName(\"" + privateWithin + "\")"} val bits = m.group(1) if (buf.nonEmpty || bits != "0L") { flagsAreUsed = true - buf.append(show(bits.toLong)) + buf += show(bits.toLong) } val replacement = "Modifiers(" + buf.reverse.mkString(", ") + ")" - java.util.regex.Matcher.quoteReplacement(replacement) + quoteReplacement(replacement) }) s }) diff --git a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala index e36622925186..454e6ca669e2 100644 --- a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala +++ b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,8 +23,8 @@ trait StdAttachments { def reifyBinding(tree: Tree): Tree = tree.attachments.get[ReifyBindingAttachment] match { case Some(ReifyBindingAttachment(binding)) => binding - case other => Ident(NoSymbol) + case _ => Ident(NoSymbol) } case class ReifyAliasAttachment(sym: Symbol, alias: TermName) -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 1a22e5b2573f..2b3af81f0367 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,9 @@ package scala.reflect.reify package utils -import scala.collection._ -import scala.compat.Platform.EOL +import scala.annotation._ +import scala.collection.{immutable, mutable}, mutable.{ArrayBuffer, ListBuffer} +import java.lang.System.{lineSeparator => EOL} trait SymbolTables { self: Utils => @@ -22,7 +23,7 @@ trait SymbolTables { import global._ class SymbolTable private[SymbolTable] ( - private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](), + private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap.empty[Symbol, Tree], private[SymbolTable] val aliases: List[(Symbol, TermName)] = List[(Symbol, TermName)](), private[SymbolTable] val original: Option[List[Tree]] = None) { @@ -36,12 +37,13 @@ trait SymbolTables { case Some(FreeDef(_, name, _, _, _)) => name case Some(SymDef(_, name, _, _)) => name case None => nme.EMPTY + case x => throw new MatchError(x) } def symAliases(sym: Symbol): List[TermName] = symName(sym) match { case name if name.isEmpty => Nil - case _ => (aliases.distinct groupBy (_._1) mapValues (_ map (_._2)))(sym) + case _ => (aliases.distinct.groupMap(_._1)(_._2))(sym) } def symBinding(sym: Symbol): Tree = @@ -49,6 +51,7 @@ trait SymbolTables { case Some(FreeDef(_, _, binding, _, _)) => binding case Some(SymDef(_, _, _, _)) => throw new UnsupportedOperationException(s"${symtab(sym)} is a symdef, hence it doesn't have a binding") case None => EmptyTree + case x => throw new MatchError(x) } def symRef(sym: Symbol): Tree = @@ -56,18 +59,20 @@ trait SymbolTables { case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym)) case None => EmptyTree + case x => throw new MatchError(x) } + @deprecated("use add instead", since="2.13.3") def +(sym: Symbol, name: TermName, reification: Tree): SymbolTable = add(sym, name, reification) def +(symDef: Tree): SymbolTable = add(symDef) - def ++(symDefs: TraversableOnce[Tree]): SymbolTable = (this /: symDefs)((symtab, symDef) => symtab.add(symDef)) + def ++(symDefs: IterableOnce[Tree]): SymbolTable = symDefs.iterator.foldLeft(this)((symtab, symDef) => symtab.add(symDef)) def ++(symtab: SymbolTable): SymbolTable = { val updated = this ++ symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases ++ symtab.aliases) } def -(sym: Symbol): SymbolTable = remove(sym) def -(name: TermName): SymbolTable = remove(name) def -(symDef: Tree): SymbolTable = remove(reifyBinding(symDef).symbol) - def --(syms: GenTraversableOnce[Symbol]): SymbolTable = (this /: syms)((symtab, sym) => symtab.remove(sym)) - def --(names: Iterable[TermName]): SymbolTable = (this /: names)((symtab, name) => symtab.remove(name)) - def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_))) + def --(syms: IterableOnce[Symbol]): SymbolTable = syms.iterator.foldLeft(this)((symtab, sym) => symtab.remove(sym)) + def --(names: List[TermName]): SymbolTable = names.foldLeft(this)((symtab, name) => symtab.remove(name)) + def --(symDefs: Iterable[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_))) def --(symtab: SymbolTable): SymbolTable = { val updated = this -- symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases diff symtab.aliases) } def filterSyms(p: Symbol => Boolean): SymbolTable = this -- (syms filterNot p) def filterAliases(p: (Symbol, TermName) => Boolean): SymbolTable = this -- (aliases filterNot (tuple => p(tuple._1, tuple._2)) map (_._2)) @@ -77,14 +82,15 @@ trait SymbolTables { assert(sym != NoSymbol, showRaw(symDef)) val name = symDef match { case FreeDef(_, name, _, _, _) => name - case SymDef(_, name, _, _) => name + case SymDef(_, name, _, _) => name + case x => throw new MatchError(x) } val newSymtab = if (!(symtab contains sym)) symtab + (sym -> symDef) else symtab val newAliases = aliases :+ (sym -> name) new SymbolTable(newSymtab, newAliases) } - private def add(sym: Symbol, name0: TermName, reification: Tree): SymbolTable = { + def add(@unused sym: Symbol, name0: TermName, reification: Tree): SymbolTable = { def freshName(name0: TermName): TermName = { var name = name0.toString name = name.replace(".type", "$type") @@ -107,7 +113,7 @@ trait SymbolTables { val newAliases = aliases filter (_._2 != name) newSymtab = newSymtab filter { case ((sym, _)) => newAliases exists (_._1 == sym) } newSymtab = newSymtab map { case ((sym, tree)) => - val ValDef(mods, primaryName, tpt, rhs) = tree + val ValDef(mods, primaryName, tpt, rhs) = tree: @unchecked val tree1 = if (!(newAliases contains ((sym, primaryName)))) { val primaryName1 = newAliases.find(_._1 == sym).get._2 @@ -128,10 +134,11 @@ trait SymbolTables { s"""symtab = [$symtabString], aliases = [$aliasesString]${if (original.isDefined) ", has original" else ""}""" } + @nowarn // spurious unused buf.type def debugString: String = { val buf = new StringBuilder buf.append("symbol table = " + (if (syms.length == 0) "" else "")).append(EOL) - syms foreach (sym => buf.append(symDef(sym)).append(EOL)) + syms.foreach(sym => buf.append(symDef(sym)).append(EOL)) buf.delete(buf.length - EOL.length, buf.length) buf.toString } @@ -161,8 +168,8 @@ trait SymbolTables { reifier.state.symtab = symtab0.asInstanceOf[reifier.SymbolTable] def currtab = reifier.symtab.asInstanceOf[SymbolTable] try { - val cumulativeSymtab = mutable.ArrayBuffer[Tree](symtab0.symtab.values.toList: _*) - val cumulativeAliases = mutable.ArrayBuffer[(Symbol, TermName)](symtab0.aliases: _*) + val cumulativeSymtab = ArrayBuffer[Tree](symtab0.symtab.values.toList: _*) + val cumulativeAliases = ArrayBuffer[(Symbol, TermName)](symtab0.aliases: _*) def fillInSymbol(sym: Symbol): Tree = { if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString)) @@ -198,7 +205,7 @@ trait SymbolTables { } val withAliases = cumulativeSymtab flatMap (entry => { - val result = mutable.ListBuffer[Tree]() + val result = ListBuffer[Tree]() result += entry val sym = reifyBinding(entry).symbol if (sym != NoSymbol) diff --git a/src/compiler/scala/reflect/reify/utils/Utils.scala b/src/compiler/scala/reflect/reify/utils/Utils.scala index a609a336f201..585ab45c3afe 100644 --- a/src/compiler/scala/reflect/reify/utils/Utils.scala +++ b/src/compiler/scala/reflect/reify/utils/Utils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,4 +30,4 @@ trait Utils extends NodePrinters val reifyDebug = global.settings.Yreifydebug.value val reifyCopypaste = global.settings.Yreifycopypaste.value val reifyTrace = scala.tools.nsc.util.trace when reifyDebug -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/ant/ClassloadVerify.scala b/src/compiler/scala/tools/ant/ClassloadVerify.scala deleted file mode 100644 index acc15d5f3437..000000000000 --- a/src/compiler/scala/tools/ant/ClassloadVerify.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import org.apache.tools.ant.Project -import org.apache.tools.ant.types.{Path, Reference} -import scala.collection.JavaConverters._ -import scala.tools.util.VerifyClass - -class ClassloadVerify extends ScalaMatchingTask { - - /** The class path to use for this compilation. */ - protected var classpath: Option[Path] = None - - /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `classpath`. */ - def setClasspath(input: Path) { - classpath = Some(input) - } - - def setClasspathref(input: Reference) { - val p = new Path(getProject()) - p.setRefid(input) - classpath = Some(p) - } - - private def getClasspath: Array[String] = classpath match { - case None => buildError("Member 'classpath' is empty.") - case Some(x) => x.list.toArray - } - - override def execute(): Unit = { - val results = VerifyClass.run(getClasspath).asScala - results foreach (r => log("Checking: " + r, Project.MSG_DEBUG)) - val errors = for((name, error) <- results; if error != null) yield (name,error) - if(errors.isEmpty) { - // TODO - Log success - log("Classload verification succeeded with " + results.size + " classes.", Project.MSG_INFO) - } else { - for((name, error) <- errors) { - log(name + " failed verification with: " + error, Project.MSG_ERR) - } - buildError(errors.size + " classload verification errors on " + results.size + " classes.") - } - } - -} diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala deleted file mode 100644 index b8bf3a053f75..000000000000 --- a/src/compiler/scala/tools/ant/FastScalac.scala +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import org.apache.tools.ant.AntClassLoader -import org.apache.tools.ant.taskdefs.Java -import org.apache.tools.ant.types.Path - -import scala.tools.nsc.Settings -import scala.tools.nsc.io.File -import scala.tools.nsc.settings.FscSettings -import scala.reflect.internal.util.ScalaClassLoader - -/** An Ant task to compile with the fast Scala compiler (`fsc`). - * - * In addition to the attributes shared with the `Scalac` task, this task - * also accepts the following attributes: - * - `reset` - * - `server` - * - `shutdown` - * - `ipv4` - * - `maxIdle` - * - * @author Stephane Micheloud - */ -class FastScalac extends Scalac { - - private var resetCaches: Boolean = false - - private var serverAddr: Option[String] = None - - private var shutdownServer: Boolean = false - - private var useIPv4: Boolean = false - - private var idleMinutes: Option[Int] = None - -/*============================================================================*\ -** Properties setters ** -\*============================================================================*/ - - /** Sets the `reset` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value for `reset`. - */ - def setReset(input: Boolean) { resetCaches = input } - - /** Sets the `server` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value for `server`. - */ - def setServer(input: String) { serverAddr = Some(input) } - - /** Sets the `shutdown` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value for `shutdown`. - */ - def setShutdown(input: Boolean) { shutdownServer = input } - - /** Sets the `ipv4` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value for `ipv4`. - */ - def setIPv4(input: Boolean) { useIPv4 = input } - - /** Sets the `maxIdle` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value for `maxIdle`. - */ - def setMaxIdle(input: Int) { if (0 <= input) idleMinutes = Some(input) } - -/*============================================================================*\ -** The execute method ** -\*============================================================================*/ - - override protected def newSettings(error: String=>Unit): Settings = - new FscSettings(error) - - /** Performs the compilation. */ - override def execute() { - val (settings, sourceFiles, javaOnly) = initialize - if (sourceFiles.isEmpty || javaOnly) - return - - // initialize fsc specific settings - val s = settings.asInstanceOf[FscSettings] // safe (newSettings) - s.reset.value = resetCaches - if (!serverAddr.isEmpty) s.server.value = serverAddr.get - s.shutdown.value = shutdownServer - s.preferIPv4.value = useIPv4 - if (!idleMinutes.isEmpty) s.idleMins.value = idleMinutes.get - - val stringSettings = - List( - /*scalac*/ - s.bootclasspath, s.classpath, s.extdirs, s.dependencyfile, s.encoding, - s.outdir, s.sourcepath, - /*fsc*/ - s.server - ) filter (_.value != "") flatMap (x => List(x.name, x.value)) - - val choiceSettings = - List( - /*scalac*/ - s.debuginfo, s.target - ) filter (x => x.value != x.default) map (x => s"${x.name}:${x.value}") - - val booleanSettings = - List( - /*scalac*/ - s.debug, s.deprecation, s.explaintypes, s.nospecialization, s.nowarn, - s.optimise, s.unchecked, s.usejavacp, s.verbose, - /*fsc*/ - s.preferIPv4, s.reset, s.shutdown - ) filter (_.value) map (_.name) - - val intSettings = - List( - /*fsc*/ - s.idleMins - ) filter (x => x.value != x.default) flatMap (x => List(x.name, x.value.toString)) - - val phaseSetting = { - val s = settings.log - if (s.value.isEmpty) Nil - else List(s"${s.name}:${s.value.mkString(",")}") - } - - val fscOptions = - stringSettings ::: choiceSettings ::: booleanSettings ::: intSettings ::: phaseSetting - - val java = new Java(this) - java setFork true - // use same default memory options as in fsc script - java.createJvmarg() setValue "-Xmx256M" - java.createJvmarg() setValue "-Xms32M" - val scalacPath: Path = { - val path = new Path(getProject) - if (compilerPath.isDefined) path add compilerPath.get - else getClass.getClassLoader match { - case cl: AntClassLoader => - path add new Path(getProject, cl.getClasspath) - case _ => - buildError("Compilation failed because of an internal compiler error; see the error output for details.") - } - path - } - java.createJvmarg() setValue ("-Xbootclasspath/a:"+scalacPath) - s.jvmargs.value foreach (java.createJvmarg() setValue _) - - val scalaHome: String = try { - val url = ScalaClassLoader.originOfClass(classOf[FastScalac]).get - File(url.getFile).jfile.getParentFile.getParentFile.getAbsolutePath - } catch { - case _: Throwable => - buildError("Compilation failed because of an internal compiler error; couldn't determine value for -Dscala.home=") - } - java.createJvmarg() setValue "-Dscala.usejavacp=true" - java.createJvmarg() setValue ("-Dscala.home="+scalaHome) - s.defines.value foreach (java.createJvmarg() setValue _) - - java setClassname "scala.tools.nsc.MainGenericRunner" - java.createArg() setValue "scala.tools.nsc.CompileClient" - - // Encode scalac/javac args for use in a file to be read back via "@file.txt" - def encodeScalacArgsFile(t: Traversable[String]) = t map { s => - if(s.find(c => c <= ' ' || "\"'\\".contains(c)).isDefined) - "\"" + s.flatMap(c => (if(c == '"' || c == '\\') "\\" else "") + c ) + "\"" - else s - } mkString "\n" - - // dump the arguments to a file and do "java @file" - val tempArgFile = File.makeTemp("fastscalac") - val tokens = fscOptions ++ (sourceFiles map (_.getPath)) - tempArgFile writeAll encodeScalacArgsFile(tokens) - - val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString) - val res = execWithArgFiles(java, paths) - - if (failonerror && res != 0) - buildError("Compilation failed because of an internal compiler error; see the error output for details.") - } -} diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala deleted file mode 100644 index d265a7f01ef8..000000000000 --- a/src/compiler/scala/tools/ant/Same.scala +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.ant - -import java.io.{File, FileInputStream} - -import org.apache.tools.ant.{BuildException, Project} -import org.apache.tools.ant.util.{FileNameMapper, IdentityMapper} - -import org.apache.tools.ant.types.Mapper - -/** An Ant task that, for a set of files, tests them for byte-to-byte - * equality with one or more other files. - * - * This task supports the following parameters as attributes: - * - `dir` - * - `todir` - * - `resultproperty` (a property to be set when all tested files pairs are - * equal, if not set, the task will fail instead), - * - `failing` (whether to stop if all files are not equal). - * - * It also support the following nested elements: - * - `mapper` (a mapper from original files to test files). - * - * This task itself defines a fileset that represents the set of original files. - * - * @author Gilles Dubochet - * @version 1.0 */ -@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask { -/*============================================================================*\ -** Ant user-properties ** -\*============================================================================*/ - - private var origin: Option[File] = None - private var destination: Option[File] = None - - private var resultProperty: Option[String] = None - private var failing: Boolean = false - - private var mapperElement: Option[Mapper] = None - -/*============================================================================*\ -** Properties setters ** -\*============================================================================*/ - - def setDir(input: File) = - origin = Some(input) - - def setTodir(input: File) = - destination = Some(input) - - def setResultproperty(input: String) = - resultProperty = Some(input) - - def setFailondifferent(input: Boolean) = - failing = input - - def createMapper(): Mapper = - if (mapperElement.isEmpty) { - val mapper = new Mapper(getProject) - mapperElement = Some(mapper) - mapper - } - else throw new BuildException("Cannot define more than one mapper", getLocation) - - def add(fileNameMapper: FileNameMapper) = - createMapper().add(fileNameMapper) - -/*============================================================================*\ -** Properties getters ** -\*============================================================================*/ - - private def getMapper: FileNameMapper = mapperElement match { - case None => - new IdentityMapper() - case Some(me) => - me.getImplementation - } - -/*============================================================================*\ -** Support methods ** -\*============================================================================*/ - - private var allEqualNow = true - - /** Tests if all mandatory attributes are set and valid. */ - private def validateAttributes() = { - if (origin.isEmpty) sys.error("Mandatory attribute 'dir' is not set.") - if (destination.isEmpty) sys.error("Mandatory attribute 'todir' is not set.") - } - - private def reportDiff(f1: File, f2: File) = { - allEqualNow = false - log("File '" + f1 + "' is different from correspondant.") - } - - private def reportMissing(f1: File) = { - allEqualNow = false - log("File '" + f1 + "' has no correspondant.") - } - -/*============================================================================*\ -** The big execute method ** -\*============================================================================*/ - - override def execute() = { - validateAttributes() - val mapper = getMapper - allEqualNow = true - val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles - val bufferSize = 1024 - val originBuffer = new Array[Byte](bufferSize) - val destBuffer = new Array[Byte](bufferSize) - for ( - originName: String <- originNames; - destName: String <- mapper.mapFileName(originName) - ) { - //println("originName="+originName) - //println("destName ="+destName) - var equalNow = true - val originFile = new File(origin.get, originName) - val destFile = new File(destination.get, destName) - if (originFile.canRead && destFile.canRead) { - val originStream = new FileInputStream(originFile) - val destStream = new FileInputStream(destFile) - var originRemaining = originStream.read(originBuffer) - var destRemaining = destStream.read(destBuffer) - while (originRemaining > 0 && equalNow) { - if (originRemaining == destRemaining) - for (idx <- 0 until originRemaining) - equalNow = equalNow && (originBuffer(idx) == destBuffer(idx)) - else - equalNow = false - originRemaining = originStream.read(originBuffer) - destRemaining = destStream.read(destBuffer) - } - if (destRemaining > 0) - equalNow = false - if (!equalNow) - reportDiff(originFile, destFile) - originStream.close - destStream.close - } - else reportMissing(originFile) - } - if (!allEqualNow) - if (failing) - sys.error("There were differences between '" + origin.get + "' and '" + destination.get + "'") - else - log("There were differences between '" + origin.get + "' and '" + destination.get + "'") - else { - if (!resultProperty.isEmpty) - getProject.setProperty(resultProperty.get, "yes") - log("All files in '" + origin.get + "' and '" + destination.get + "' are equal", Project.MSG_VERBOSE) - } - } - -} diff --git a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala deleted file mode 100644 index b9fe9b4d91a6..000000000000 --- a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import org.apache.tools.ant.{ Task, BuildException } -import org.apache.tools.ant.taskdefs.MatchingTask - -trait ScalaTask { - self: Task => - - /** Generates a build error. Error location will be the - * current task in the ant file. - * - * @param message A message describing the error. - * @throws BuildException A build error exception thrown in every case. - */ - protected def buildError(message: String): Nothing = - throw new BuildException(message, getLocation()) -} - -abstract class ScalaMatchingTask extends MatchingTask with ScalaTask diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala deleted file mode 100644 index f2ff15d355f7..000000000000 --- a/src/compiler/scala/tools/ant/ScalaTool.scala +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import java.io.{File, FileWriter} -import org.apache.tools.ant.types.{Path, Reference} - -/** An Ant task that generates a shell or batch script to execute a - * Scala program. - * - * This task can take the following parameters as attributes: - * - `file` (mandatory), - * - `class` (mandatory), - * - `platforms`, - * - `classpath`, - * - `properties`, - * - `javaflags`, - * - `toolflags`. - * - * @author Gilles Dubochet - * @version 1.1 - */ -class ScalaTool extends ScalaMatchingTask { - - private def emptyPath = new Path(getProject) - -/*============================================================================*\ -** Ant user-properties ** -\*============================================================================*/ - - abstract class PermissibleValue { - val values: List[String] - def isPermissible(value: String): Boolean = - (value == "") || values.exists(_.startsWith(value)) - } - - /** Defines valid values for the platforms property. */ - object Platforms extends PermissibleValue { - val values = List("unix", "windows") - } - - /** The path to the exec script file. `".bat"` will be appended for the - * Windows BAT file, if generated. */ - private var file: Option[File] = None - - /** The main class to run. */ - private var mainClass: Option[String] = None - - /** Supported platforms for the script. Either `"unix"` or `"windows"`. - * Defaults to both. */ - private var platforms: List[String] = List("unix", "windows") - - /** An (optional) path to all JARs that this script depend on. Paths must be - * relative to the scala home directory. If not set, all JAR archives and - * folders in `"lib/"` are automatically added. */ - private var classpath: List[String] = Nil - - /** An (optional) path to JARs that this script depends on relative to the - * ant project's `basedir`. */ - private var classpathPath: Path = emptyPath - - /** Comma-separated Java system properties to pass to the JRE. Properties - * are formatted as `name=value`. Properties `scala.home`, `scala.tool.name` - * and `scala.tool.version` are always set. */ - private var properties: List[(String, String)] = Nil - - /** Additional flags passed to the JRE (`"java [javaFlags] class"`). */ - private var javaFlags: String = "" - - /** Additional flags passed to the tool (`"java class [toolFlags]"`). - * Can only be set when a main class is defined. */ - private var toolFlags: String = "" - -/*============================================================================*\ -** Properties setters ** -\*============================================================================*/ - - /** Sets the file attribute. */ - def setFile(input: File) = - file = Some(input) - - /** Sets the main class attribute. */ - def setClass(input: String) = - mainClass = Some(input) - - /** Sets the platforms attribute. */ - def setPlatforms(input: String) = { - platforms = input.split(",").toList.flatMap { s: String => - val st = s.trim - if (Platforms.isPermissible(st)) - (if (input != "") List(st) else Nil) - else { - buildError("Platform " + st + " does not exist.") - } - } - } - - /** Sets the classpath with which to run the tool. - * - * Note that this mechanism of setting the classpath is generally preferred - * for general purpose scripts, as this does not assume all elements are - * relative to the Ant `basedir`. Additionally, the platform specific - * demarcation of any script variables (e.g. `${SCALA_HOME}` or - * `%SCALA_HOME%`) can be specified in a platform independent way (e.g. - * `@SCALA_HOME@`) and automatically translated for you. - */ - def setClassPath(input: String) { - classpath = classpath ::: input.split(",").toList - } - - /** - * A special method that allows ant classpath path definitions to be nested - * within this ant task. - */ - def createClassPath: Path = classpathPath.createPath() - - /** - * Adds an Ant Path reference to the tool's classpath. - * Note that all entries in the path must exist either relative to the project - * basedir or with an absolute path to a file in the filesystem. As a result, - * this is not a mechanism for setting the classpath for more general use scripts. - */ - def setClassPathRef(input: Reference) { - val tmpPath = emptyPath - tmpPath.setRefid(input) - classpath = classpath ::: tmpPath.list.toList - } - - /** Sets JVM properties that will be set whilst running the tool. */ - def setProperties(input: String) = { - properties = input.split(",").toList.flatMap { s: String => - val st = s.trim - val stArray = st.split("=", 2) - if (stArray.length == 2) { - if (input != "") List((stArray(0), stArray(1))) else Nil - } - else - buildError("Property " + st + " is not formatted properly.") - } - } - - /** Sets flags to be passed to the Java interpreter. */ - def setJavaflags(input: String) = - javaFlags = input.trim - - /** Sets flags to be passed to the tool. */ - def setToolflags(input: String) = - toolFlags = input.trim - -/*============================================================================*\ -** Properties getters ** -\*============================================================================*/ - - /** Gets the value of the classpath attribute in a Scala-friendly form. - * @return The class path as a list of files. */ - private def getUnixclasspath: String = - transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}") - - /** Gets the value of the classpath attribute in a Scala-friendly form. - * @return The class path as a list of files. */ - private def getWinclasspath: String = - transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%") - - private def getProperties: String = - properties.map({ - case (name,value) => "-D" + name + "=\"" + value + "\"" - }).mkString("", " ", "") - -/*============================================================================*\ -** Compilation and support methods ** -\*============================================================================*/ - - // XXX encoding and generalize - private def getResourceAsCharStream(clazz: Class[_], resource: String): Stream[Char] = { - val stream = clazz.getClassLoader() getResourceAsStream resource - if (stream == null) Stream.empty - else Stream continually stream.read() takeWhile (_ != -1) map (_.asInstanceOf[Char]) - } - - // Converts a variable like @SCALA_HOME@ to ${SCALA_HOME} when pre = "${" and post = "}" - private def transposeVariableMarkup(text: String, pre: String, post: String) : String = { - val chars = scala.io.Source.fromString(text) - val builder = new StringBuilder() - - while (chars.hasNext) { - val char = chars.next() - if (char == '@') { - var char = chars.next() - val token = new StringBuilder() - while (chars.hasNext && char != '@') { - token.append(char) - char = chars.next() - } - if (token.toString == "") - builder.append('@') - else - builder.append(pre + token.toString + post) - } else builder.append(char) - } - builder.toString - } - - private def readAndPatchResource(resource: String, tokens: Map[String, String]): String = { - val chars = getResourceAsCharStream(this.getClass, resource).iterator - val builder = new StringBuilder() - - while (chars.hasNext) { - val char = chars.next() - if (char == '@') { - var char = chars.next() - val token = new StringBuilder() - while (chars.hasNext && char != '@') { - token.append(char) - char = chars.next() - } - if (tokens.contains(token.toString)) - builder.append(tokens(token.toString)) - else if (token.toString == "") - builder.append('@') - else - builder.append("@" + token.toString + "@") - } else builder.append(char) - } - builder.toString - } - - private def writeFile(file: File, content: String) = - if (file.exists() && !file.canWrite()) - buildError("File " + file + " is not writable") - else { - val writer = new FileWriter(file, false) - writer write content - writer.close() - } - -/*============================================================================*\ -** The big execute method ** -\*============================================================================*/ - - /** Performs the tool creation. */ - override def execute() = { - // Tests if all mandatory attributes are set and valid. - if (file.isEmpty) buildError("Attribute 'file' is not set.") - if (mainClass.isEmpty) buildError("Main class must be set.") - val resourceRoot = "scala/tools/ant/templates/" - val patches = Map ( - ("class", mainClass.get), - ("properties", getProperties), - ("javaflags", javaFlags), - ("toolflags", toolFlags) - ) - // Consolidate Paths into classpath - classpath = classpath ::: classpathPath.list.toList - // Generate the scripts - if (platforms contains "unix") { - val unixPatches = patches + (("classpath", getUnixclasspath)) - val unixTemplateResource = resourceRoot + "tool-unix.tmpl" - val unixTemplate = readAndPatchResource(unixTemplateResource, unixPatches) - writeFile(file.get, unixTemplate) - } - if (platforms contains "windows") { - val winPatches = patches + (("classpath", getWinclasspath)) - val winTemplateResource = resourceRoot + "tool-windows.tmpl" - val winTemplate = readAndPatchResource(winTemplateResource, winPatches) - writeFile(new File(file.get.getAbsolutePath() + ".bat"), winTemplate) - } - } - -} diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala deleted file mode 100644 index 9a7523feb518..000000000000 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ /dev/null @@ -1,705 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import java.io.{File, PrintWriter, BufferedWriter, FileWriter} - -import org.apache.tools.ant.{ Project, AntClassLoader} -import org.apache.tools.ant.taskdefs.Java -import org.apache.tools.ant.types.{Path, Reference} -import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper, SourceFileScanner} -import org.apache.tools.ant.util.facade.{FacadeTaskHelper, ImplementationSpecificArgument} - -import scala.tools.nsc.{Global, Settings, CompilerCommand} -import scala.tools.nsc.io.{Path => SPath} -import scala.tools.nsc.reporters.{ ConsoleReporter, Reporter } -import scala.tools.nsc.settings.StandardScalaSettings - -/** An Ant task to compile with the new Scala compiler (NSC). - * - * This task can take the following parameters as attributes: - * - `srcdir` (mandatory), - * - `srcref`, - * - `destdir`, - * - `classpath`, - * - `classpathref`, - * - `sourcepath`, - * - `sourcepathref`, - * - `bootclasspath`, - * - `bootclasspathref`, - * - `extdirs`, - * - `extdirsref`, - * - `argfile`, - * - `dependencyfile`, - * - `encoding`, - * - `target`, - * - `force`, - * - `fork`, - * - `logging`, - * - `logphase`, - * - `debuginfo`, - * - `addparams`, - * - `explaintypes`, - * - `deprecation`, - * - `nobootcp`, - * - `nowarn`, - * - `optimise`, - * - `unchecked`, - * - `usejavacp`, - * - `failonerror`, - * - `scalacdebugging`, - * - * It also takes the following parameters as nested elements: - * - `src` (for `srcdir`), - * - `classpath`, - * - `sourcepath`, - * - `bootclasspath`, - * - `extdirs`, - * - `compilerarg`. - * - * @author Gilles Dubochet, Stephane Micheloud - */ -class Scalac extends ScalaMatchingTask with ScalacShared { - - /** The unique Ant file utilities instance to use in this task. */ - private val fileUtils = FileUtils.getFileUtils() - -/*============================================================================*\ -** Ant user-properties ** -\*============================================================================*/ - - abstract class PermissibleValue { - val values: List[String] - def isPermissible(value: String): Boolean = - (value == "") || values.exists(_.startsWith(value)) - } - - /** Defines valid values for the logging property. */ - object LoggingLevel extends PermissibleValue { - val values = List("none", "verbose", "debug") - } - - /** Defines valid values for properties that refer to compiler phases. */ - object CompilerPhase extends PermissibleValue { - val values = List("namer", "typer", "pickler", "refchecks", - "uncurry", "tailcalls", "specialize", "explicitouter", - "erasure", "fields", "lambdalift", "constructors", - "flatten", "mixin", "delambdafy", "cleanup", - "jvm", "terminal") - } - - /** Defines valid values for the `target` property. */ - object Target extends PermissibleValue { - val values = StandardScalaSettings.AllPermissibleTargetValues - } - - /** Defines valid values for the `deprecation` and `unchecked` properties. */ - object Flag extends PermissibleValue { - val values = List("yes", "no", "on", "off", "true", "false") - def toBoolean(flag: String) = - if (flag == "yes" || flag == "on" || flag == "true") Some(true) - else if (flag == "no" || flag == "off" || flag == "false") Some(false) - else None - } - - /** The directories that contain source files to compile. */ - protected var origin: Option[Path] = None - /** The directory to put the compiled files in. */ - protected var destination: Option[File] = None - - /** The class path to use for this compilation. */ - protected var classpath: Option[Path] = None - /** The source path to use for this compilation. */ - protected var sourcepath: Option[Path] = None - /** The boot class path to use for this compilation. */ - protected var bootclasspath: Option[Path] = None - /** The path to use when finding scalac - *only used for forking!* */ - protected var compilerPath: Option[Path] = None - /** The external extensions path to use for this compilation. */ - protected var extdirs: Option[Path] = None - - protected var argfile: Option[File] = None - /** The dependency tracking file. */ - protected var dependencyfile: Option[File] = None - /** The character encoding of the files to compile. */ - protected var encoding: Option[String] = None - - // the targeted backend - protected var backend: Option[String] = None - - /** Whether to force compilation of all files or not. */ - protected var force: Boolean = false - /** Whether to fork the execution of scalac */ - protected var fork : Boolean = false - /** If forking, these are the arguments to the JVM */ - protected var jvmArgs : Option[String] = None - /** How much logging output to print. Either none (default), - * verbose or debug. */ - protected var logging: Option[String] = None - /** Which compilation phases should be logged during compilation. */ - protected var logPhase: List[String] = Nil - - /** Instruct the compiler to generate debugging information */ - protected var debugInfo: Option[String] = None - /** Instruct the compiler to use additional parameters */ - protected var addParams: String = "" - /** Instruct the compiler to explain type errors in more detail. */ - protected var explaintypes: Option[Boolean] = None - /** Instruct the compiler to generate deprecation information. */ - protected var deprecation: Option[Boolean] = None - /** Instruct the compiler to not use the boot classpath for the scala jars. */ - protected var nobootcp: Option[Boolean] = None - /** Instruct the compiler to generate no warnings. */ - protected var nowarn: Option[Boolean] = None - /** Instruct the compiler to run optimizations. */ - protected var optimise: Option[Boolean] = None - /** Instruct the compiler to generate unchecked information. */ - protected var unchecked: Option[Boolean] = None - /** Instruct the compiler to use `java.class.path` in classpath resolution. */ - protected var usejavacp: Option[Boolean] = None - /** Indicates whether compilation errors will fail the build; defaults to true. */ - protected var failonerror: Boolean = true - - /** Prints out the files being compiled by the scalac ant task - * (not only the number of files). */ - protected var scalacDebugging: Boolean = false - - /** Encapsulates implementation of specific command line arguments. */ - protected var scalacCompilerArgs = new FacadeTaskHelper("compilerarg") - - /** Helpers */ - private def setOrAppend(old: Option[Path], arg: Path): Option[Path] = old match { - case Some(x) => x append arg ; Some(x) - case None => Some(arg) - } - private def pathAsList(p: Option[Path], name: String): List[File] = p match { - case None => buildError("Member '" + name + "' is empty.") - case Some(x) => x.list.toList map nameToFile - } - private def createNewPath(getter: () => Option[Path], setter: (Option[Path]) => Unit) = { - if (getter().isEmpty) - setter(Some(new Path(getProject))) - - getter().get.createPath() - } - - private def plural(xs: List[Any]) = if (xs.size > 1) "s" else "" - private def plural(x: Int) = if (x > 1) "s" else "" - -/*============================================================================*\ -** Properties setters ** -\*============================================================================*/ - - - /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `origin`. */ - def setSrcdir(input: Path) { - origin = setOrAppend(origin, input) - } - - /** Sets the `origin` as a nested src Ant parameter. - * @return An origin path to be configured. */ - def createSrc(): Path = createNewPath(origin _, p => origin = p) - - /** Sets the `origin` as an external reference Ant parameter. - * @param input A reference to an origin path. */ - def setSrcref(input: Reference) = - createSrc().setRefid(input) - - /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `destination`. */ - def setDestdir(input: File) { destination = Some(input) } - - /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `classpath`. */ - def setClasspath(input: Path) { - classpath = setOrAppend(classpath, input) - } - /** Sets the `compilerPath` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `compilerPath`. */ - def setCompilerPath(input: Path) { - compilerPath = setOrAppend(compilerPath, input) - } - - def createCompilerPath: Path = createNewPath(compilerPath _, p => compilerPath = p) - - /** Sets the `compilerpathref` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `compilerpathref`. */ - def setCompilerPathRef(input: Reference) { - createCompilerPath.setRefid(input) - } - - /** Sets the `classpath` as a nested classpath Ant parameter. - * @return A class path to be configured. */ - def createClasspath(): Path = createNewPath(classpath _, p => classpath = p) - - /** Sets the `classpath` as an external reference Ant parameter. - * @param input A reference to a class path. */ - def setClasspathref(input: Reference) { - createClasspath().setRefid(input) - } - - /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `sourcepath`. */ - def setSourcepath(input: Path) { - sourcepath = setOrAppend(sourcepath, input) - } - - /** Sets the `sourcepath` as a nested sourcepath Ant parameter. - * @return A source path to be configured. */ - def createSourcepath(): Path = createNewPath(sourcepath _, p => sourcepath = p) - - /** Sets the `sourcepath` as an external reference Ant parameter. - * @param input A reference to a source path. */ - def setSourcepathref(input: Reference) { - createSourcepath().setRefid(input) - } - - /** Sets the boot classpath attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `bootclasspath`. */ - def setBootclasspath(input: Path) { - bootclasspath = setOrAppend(bootclasspath, input) - } - - /** Sets the `bootclasspath` as a nested bootclasspath Ant parameter. - * @return A source path to be configured. */ - def createBootclasspath(): Path = createNewPath(bootclasspath _, p => bootclasspath = p) - - /** Sets the `bootclasspath` as an external reference Ant - * parameter. - * @param input A reference to a source path. */ - def setBootclasspathref(input: Reference) = - createBootclasspath().setRefid(input) - - /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `extdirs`. */ - def setExtdirs(input: Path) { - extdirs = setOrAppend(extdirs, input) - } - - /** Sets the `extdirs` as a nested extdirs Ant parameter. - * @return An extensions path to be configured. */ - def createExtdirs(): Path = createNewPath(extdirs _, p => extdirs = p) - - /** Sets the `extdirs` as an external reference Ant parameter. - * @param input A reference to an extensions path. */ - def setExtdirsref(input: Reference) = - createExtdirs().setRefid(input) - - /** Sets the `argfile` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `argfile`. */ - def setArgfile(input: File) { - argfile = Some(input) - } - - /** Sets the `dependencyfile` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `dependencyfile`. */ - def setDependencyfile(input: File) { - dependencyfile = Some(input) - } - - /** Sets the `encoding` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `encoding`. */ - def setEncoding(input: String) { - encoding = Some(input) - } - - /** Sets the `target` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `target`. */ - def setTarget(input: String): Unit = - if (Target.isPermissible(input)) backend = Some(input) - else buildError("Unknown target '" + input + "'") - - /** Sets the `force` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `force`. */ - def setForce(input: Boolean) { force = input } - - /** Sets the `fork` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `fork`. */ - def setFork(input : Boolean) { fork = input } - /** - * Sets the `jvmargs` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `jvmargs` - */ - def setJvmargs(input : String) { - jvmArgs = Some(input) - } - - /** Sets the logging level attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `logging`. */ - def setLogging(input: String) { - if (LoggingLevel.isPermissible(input)) logging = Some(input) - else buildError("Logging level '" + input + "' does not exist.") - } - - /** Sets the `logphase` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `logPhase`. */ - def setLogPhase(input: String) { - logPhase = input.split(",").toList.flatMap { s: String => - val st = s.trim() - if (CompilerPhase.isPermissible(st)) - (if (input != "") List(st) else Nil) - else { - buildError("Phase " + st + " in log does not exist.") - } - } - } - - /** Set the `debug` info attribute. - * @param input The value for `debug`. */ - def setDebuginfo(input: String) { debugInfo = Some(input) } - - /** Set the `addparams` info attribute. - * @param input The value for `addparams`. */ - def setAddparams(input: String) { addParams = input } - - /** Set the `explaintypes` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setExplaintypes(input: String) { - explaintypes = Flag toBoolean input orElse buildError("Unknown explaintypes flag '" + input + "'") - } - - /** Set the `deprecation` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setDeprecation(input: String) { - deprecation = Flag toBoolean input orElse buildError("Unknown deprecation flag '" + input + "'") - } - - /** Set the `nobootcp` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setNobootcp(input: String) { - nobootcp = Flag toBoolean input orElse buildError("Unknown nobootcp flag '" + input + "'") - } - - /** Set the `nowarn` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setNowarn(input: String) { - nowarn = Flag toBoolean input orElse buildError("Unknown nowarn flag '" + input + "'") - } - - /** Set the `optimise` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setOptimise(input: String) { - optimise = Flag toBoolean input orElse buildError("Unknown optimisation flag '" + input + "'") - } - - /** Set the `unchecked` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setUnchecked(input: String) { - unchecked = Flag toBoolean input orElse buildError("Unknown unchecked flag '" + input + "'") - } - - /** Set the `usejavacp` info attribute. - * @param input One of the flags `yes/no` or `on/off`. */ - def setUsejavacp(input: String) { - usejavacp = Flag toBoolean input orElse buildError("Unknown usejavacp flag '" + input + "'") - } - - /** Sets the `failonerror` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value for `failonerror`. */ - def setFailonerror(input: Boolean) { failonerror = input } - - /** Set the `scalacdebugging` info attribute. If set to - * `'''true'''`, the scalac ant task will print out the filenames - * being compiled. - * @param input The specified flag */ - def setScalacdebugging(input: Boolean) { scalacDebugging = input } - - /** Sets the `compilerarg` as a nested compilerarg Ant parameter. - * @return A compiler argument to be configured. */ - def createCompilerArg(): ImplementationSpecificArgument = { - val arg = new ImplementationSpecificArgument() - scalacCompilerArgs addImplementationArgument arg - arg - } - -/*============================================================================*\ -** Properties getters ** -\*============================================================================*/ - - /** Gets the value of the `classpath` attribute in a - * Scala-friendly form. - * @return The class path as a list of files. */ - protected def getClasspath: List[File] = pathAsList(classpath, "classpath") - - /** Gets the value of the `origin` attribute in a - * Scala-friendly form. - * @return The origin path as a list of files. */ - protected def getOrigin: List[File] = pathAsList(origin, "origin") - - /** Gets the value of the `destination` attribute in a - * Scala-friendly form. - * @return The destination as a file. */ - protected def getDestination: File = - if (destination.isEmpty) buildError("Member 'destination' is empty.") - else existing(getProject resolveFile destination.get.toString) - - /** Gets the value of the `sourcepath` attribute in a - * Scala-friendly form. - * @return The source path as a list of files. */ - protected def getSourcepath: List[File] = pathAsList(sourcepath, "sourcepath") - - /** Gets the value of the `bootclasspath` attribute in a - * Scala-friendly form. - * @return The boot class path as a list of files. */ - protected def getBootclasspath: List[File] = pathAsList(bootclasspath, "bootclasspath") - - /** Gets the value of the `extdirs` attribute in a - * Scala-friendly form. - * @return The extensions path as a list of files. */ - protected def getExtdirs: List[File] = pathAsList(extdirs, "extdirs") - -/*============================================================================*\ -** Compilation and support methods ** -\*============================================================================*/ - - /** Transforms a string name into a file relative to the provided base - * directory. - * @param base A file pointing to the location relative to which the name - * will be resolved. - * @param name A relative or absolute path to the file as a string. - * @return A file created from the name and the base file. */ - protected def nameToFile(base: File)(name: String): File = - existing(fileUtils.resolveFile(base, name)) - - /** Transforms a string name into a file relative to the build root - * directory. - * @param name A relative or absolute path to the file as a string. - * @return A file created from the name. */ - protected def nameToFile(name: String): File = - existing(getProject resolveFile name) - - /** Tests if a file exists and prints a warning in case it doesn't. Always - * returns the file, even if it doesn't exist. - * @param file A file to test for existence. - * @return The same file. */ - protected def existing(file: File): File = { - if (!file.exists) - log("Element '" + file.toString + "' does not exist.", - Project.MSG_WARN) - file - } - - /** Transforms a path into a Scalac-readable string. - * @param path A path to convert. - * @return A string-representation of the path like `a.jar:b.jar`. */ - protected def asString(path: List[File]): String = - path.map(asString) mkString File.pathSeparator - - /** Transforms a file into a Scalac-readable string. - * @param file A file to convert. - * @return A string-representation of the file like `/x/k/a.scala`. */ - protected def asString(file: File): String = - file.getAbsolutePath() - -/*============================================================================*\ -** Hooks for variants of Scala ** -\*============================================================================*/ - - protected def newSettings(error: String=>Unit): Settings = - new Settings(error) - - protected def newGlobal(settings: Settings, reporter: Reporter) = - Global(settings, reporter) - -/*============================================================================*\ -** The big execute method ** -\*============================================================================*/ - - /** Initializes settings and source files */ - protected def initialize: (Settings, List[File], Boolean) = { - if (scalacDebugging) - log("Base directory is `%s`".format(SPath("").normalize)) - - // Tests if all mandatory attributes are set and valid. - if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.") - if (!destination.isEmpty && !destination.get.isDirectory()) - buildError("Attribute 'destdir' does not refer to an existing directory.") - if (destination.isEmpty) destination = Some(getOrigin.head) - - val mapper = new GlobPatternMapper() - mapper setTo "*.class" - mapper setFrom "*.scala" - - var javaOnly = true - - def getOriginFiles(originDir: File) = { - val includedFiles = getDirectoryScanner(originDir).getIncludedFiles - val javaFiles = includedFiles filter (_ endsWith ".java") - val scalaFiles = { - val xs = includedFiles filter (_ endsWith ".scala") - if (force) xs - else new SourceFileScanner(this).restrict(xs, originDir, destination.get, mapper) - } - - javaOnly = javaOnly && (scalaFiles.length == 0) - val list = (scalaFiles ++ javaFiles).toList - - if (scalacDebugging && !list.isEmpty) - log("Compiling source file%s: %s to %s".format( - plural(list), - list.mkString(", "), - getDestination.toString - )) - else if (!list.isEmpty) { - val str = - if (javaFiles.isEmpty) "%d source file%s".format(list.length, plural(list)) - else "%d scala and %d java source files".format(scalaFiles.length, javaFiles.length) - log(s"Compiling $str to $getDestination") - } - else log("No files selected for compilation", Project.MSG_VERBOSE) - - list - } - - // Scans source directories to build up a compile lists. - // If force is false, only files were the .class file in destination is - // older than the .scala file will be used. - val sourceFiles: List[File] = - for (originDir <- getOrigin ; originFile <- getOriginFiles(originDir)) yield { - log(originFile, Project.MSG_DEBUG) - nameToFile(originDir)(originFile) - } - - // Builds-up the compilation settings for Scalac with the existing Ant - // parameters. - val settings = newSettings(buildError) - settings.outdir.value = asString(destination.get) - if (!classpath.isEmpty) - settings.classpath.value = asString(getClasspath) - if (!sourcepath.isEmpty) - settings.sourcepath.value = asString(getSourcepath) - if (!bootclasspath.isEmpty) - settings.bootclasspath.value = asString(getBootclasspath) - if (!extdirs.isEmpty) settings.extdirs.value = asString(getExtdirs) - if (!dependencyfile.isEmpty) - settings.dependencyfile.value = asString(dependencyfile.get) - if (!encoding.isEmpty) settings.encoding.value = encoding.get - if (!backend.isEmpty) settings.target.value = backend.get - if (!logging.isEmpty && logging.get == "verbose") - settings.verbose.value = true - else if (!logging.isEmpty && logging.get == "debug") { - settings.verbose.value = true - settings.debug.value = true - } - if (!logPhase.isEmpty) settings.log.value = logPhase - if (!debugInfo.isEmpty) settings.debuginfo.value = debugInfo.get - if (!explaintypes.isEmpty) settings.explaintypes.value = explaintypes.get - if (!deprecation.isEmpty) settings.deprecation.value = deprecation.get - if (!nobootcp.isEmpty) settings.nobootcp.value = nobootcp.get - if (!nowarn.isEmpty) settings.nowarn.value = nowarn.get - if (!optimise.isEmpty) settings.optimise.value = optimise.get - if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get - if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get - - val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J") - if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList - val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D") - if (!defines.isEmpty) settings.defines.value = defines.toList - - log("Scalac params = '" + addParams + "'", Project.MSG_DEBUG) - - // let CompilerCommand processes all params - val command = new CompilerCommand(settings.splitParams(addParams), settings) - - // resolve dependenciesFile path from project's basedir, so call from other project works. - // the dependenciesFile may be relative path to basedir or absolute path, in either case, the following code - // will return correct answer. - command.settings.dependenciesFile.value match { - case "none" => - case x => - val depFilePath = SPath(x) - command.settings.dependenciesFile.value = SPath(getProject.getBaseDir).normalize.resolve(depFilePath).path - } - - (command.settings, sourceFiles, javaOnly) - } - - override def execute() { - val (settings, sourceFiles, javaOnly) = initialize - if (sourceFiles.isEmpty || javaOnly) - return - - if (fork) executeFork(settings, sourceFiles) // TODO - Error - else executeInternal(settings, sourceFiles) - } - - protected def executeFork(settings: Settings, sourceFiles: List[File]) { - val java = new Java(this) - java setFork true - // using 'setLine' creates multiple arguments out of a space-separated string - jvmArgs foreach { java.createJvmarg() setLine _ } - - // use user-provided path or retrieve from classloader - // TODO - Allow user to override the compiler classpath - val scalacPath: Path = { - val path = new Path(getProject) - if (compilerPath.isDefined) path add compilerPath.get - else getClass.getClassLoader match { - case cl: AntClassLoader => path add new Path(getProject, cl.getClasspath) - case _ => buildError("Cannot determine default classpath for scalac, please specify one!") - } - path - } - - java setClasspath scalacPath - java setClassname MainClass - - // Write all settings to a temporary file - def writeSettings(): File = { - def escapeArgument(arg : String) = if (arg matches ".*\\s.*") '"' + arg + '"' else arg - val file = File.createTempFile("scalac-ant-",".args") - file.deleteOnExit() - val out = new PrintWriter(new BufferedWriter(new FileWriter(file))) - - try { - for (setting <- settings.visibleSettings ; arg <- setting.unparse) - out println escapeArgument(arg) - for (file <- sourceFiles) - out println escapeArgument(file.getAbsolutePath) - } - finally out.close() - - file - } - val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath)) - if (failonerror && res != 0) - buildError("Compilation failed because of an internal compiler error;"+ - " see the error output for details.") - } - - /** Performs the compilation. */ - protected def executeInternal(settings: Settings, sourceFiles : List[File]) { - val reporter = new ConsoleReporter(settings) - val compiler = newGlobal(settings, reporter) // compiles the actual code - - try new compiler.Run compile (sourceFiles map (_.toString)) - catch { - case ex: Throwable => - ex.printStackTrace() - val msg = if (ex.getMessage == null) "no error message provided" else ex.getMessage - buildError("Compile failed because of an internal compiler error (" + msg + "); see the error output for details.") - } - - reporter.finish() - if (reporter.hasErrors) { - val msg = "Compile failed with %d error%s; see the compiler error output for details.".format( - reporter.errorCount, plural(reporter.errorCount)) - if (failonerror) buildError(msg) else log(msg) - } - else if (reporter.warningCount > 0) - log("Compile succeeded with %d warning%s; see the compiler output for details.".format( - reporter.warningCount, plural(reporter.warningCount))) - } -} diff --git a/src/compiler/scala/tools/ant/ScalacShared.scala b/src/compiler/scala/tools/ant/ScalacShared.scala deleted file mode 100644 index cb1c91cc7b64..000000000000 --- a/src/compiler/scala/tools/ant/ScalacShared.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant - -import org.apache.tools.ant.Project -import org.apache.tools.ant.taskdefs.Java -import scala.tools.nsc.io - -trait ScalacShared extends ScalaMatchingTask { - val MainClass = "scala.tools.nsc.Main" - - def execWithArgFiles(java: Java, paths: List[String]) = { - paths foreach (p => java.createArg() setValue ("@"+ p)) - - val debugString = paths map (x => " (@ = '%s')".format(io.File(x).slurp())) mkString "" - log(java.getCommandLine.getCommandline.mkString("", " ", debugString), Project.MSG_VERBOSE) - java.executeJava() - } -} diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml deleted file mode 100644 index e3c3e370c6f7..000000000000 --- a/src/compiler/scala/tools/ant/antlib.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala deleted file mode 100644 index bce500fc19cb..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Break.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.ant.sabbus - -import org.apache.tools.ant.Task - -class Break extends Task { - - def setId(input: String) { - id = Some(input) - } - - private var id: Option[String] = None - - override def execute() { - if (id.isEmpty) sys.error("Attribute 'id' is not set") - Compilers.break(id.get) - } - -} diff --git a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala deleted file mode 100644 index 081cb10861f0..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -case class CompilationFailure(message: String, cause: Exception) extends Exception(message, cause) diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala deleted file mode 100644 index 64252ff5eb6a..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -import java.io.File -import java.net.URL -import java.lang.reflect.InvocationTargetException -import scala.reflect.internal.util.ScalaClassLoader - -class Compiler(classpath: Array[URL], val settings: Settings) -{ - val foreignCompilerName: String = "scala.tools.ant.sabbus.ForeignCompiler" - private lazy val classLoader = ScalaClassLoader fromURLs classpath - private lazy val foreignCompiler: AnyRef = classLoader create foreignCompilerName - - private def settingsArray: Array[String] = settings.toArgs.toArray - foreignInvoke("args_$eq", Array(classOf[Array[String]]), Array(settingsArray)) - - private def foreignInvoke(method: String, types: Array[Class[_]], args: Array[AnyRef]) = - try foreignCompiler.getClass.getMethod(method, types: _*).invoke(foreignCompiler, args: _*) - catch { - case e: InvocationTargetException => throw e.getCause - } - - def compile(files: Array[File]): (Int, Int) = //(errors, warnings) - try { - foreignInvoke("args_$eq", Array(classOf[Array[String]]), Array(settingsArray)) - val result = - foreignInvoke("compile", Array(classOf[Array[File]]), Array(files)).asInstanceOf[Int] - (result >> 16, result & 0x00FF) - } - catch { - case ex: Exception => throw CompilationFailure(ex.getMessage, ex) - } -} diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala deleted file mode 100644 index 4da9b81be51e..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -import java.net.URL - -object Compilers extends scala.collection.DefaultMap[String, Compiler] { - - val debug = false - - private val container = new scala.collection.mutable.HashMap[String, Compiler] - - def iterator = container.iterator - - def get(id: String) = container.get(id) - - override def size = container.size - - def make(id: String, classpath: Array[URL], settings: Settings): Compiler = { - if (debug) println("Making compiler " + id) - if (debug) println(" memory before: " + freeMemoryString) - val comp = new Compiler(classpath, settings) - container(id) = comp - if (debug) println(" memory after: " + freeMemoryString) - comp - } - - def break(id: String): Null = { - if (debug) println("Breaking compiler " + id) - if (debug) println(" memory before: " + freeMemoryString) - container -= id - System.gc() - if (debug) println(" memory after: " + freeMemoryString) - null - } - - private def freeMemoryString: String = - f"${Runtime.getRuntime.freeMemory/1048576.0}%10.2f MB" -} diff --git a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala deleted file mode 100644 index fd8f3a9fc233..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -import java.io.File - -import scala.tools.nsc._ -import scala.tools.nsc.reporters.ConsoleReporter - -class ForeignCompiler { - - private var argsBuffer: Array[String] = null - def args: Array[String] = argsBuffer - def args_=(a: Array[String]) { - argsBuffer = a - nsc - } - - private val error: (String => Nothing) = { msg => throw new Exception(msg) } - - private def settings = new scala.tools.nsc.Settings(error) - - private lazy val reporter = new ConsoleReporter(settings) - - private lazy val nsc: Global = { - try { - val command = new CompilerCommand(args.toList, settings) - new Global(command.settings, reporter) - } - catch { - case ex @ FatalError(msg) => - throw new Exception(msg, ex) - } - } - - def compile(files: Array[File]): Int = { - val command = new CompilerCommand(files.toList map (_.toString), settings) - (new nsc.Run) compile command.files - reporter.errorCount << 16 | reporter.warningCount - } - -} diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala deleted file mode 100644 index f14ca934eaf6..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Make.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.ant.sabbus - -import java.io.File -import org.apache.tools.ant.Task - -class Make extends Task with TaskArgs { - override def execute() { - if (id.isEmpty) sys.error("Mandatory attribute 'id' is not set.") - if (compilerPath.isEmpty) sys.error("Mandatory attribute 'compilerpath' is not set.") - val settings = new Settings - if (!destinationDir.isEmpty) settings.d = destinationDir.get - if (!compTarget.isEmpty) settings.target = compTarget.get - if (!compilationPath.isEmpty) settings.classpath = compilationPath.get - if (!sourcePath.isEmpty) settings.sourcepath = sourcePath.get - settings.extraParams = extraArgsFlat - Compilers.make(id.get, (compilerPath.get.list.map{ path => new File(path).toURI.toURL }), settings) - } -} diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala deleted file mode 100644 index bd3c350290dc..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.ant -package sabbus - -import java.io.File -import org.apache.tools.ant.Project -import org.apache.tools.ant.taskdefs.Java -import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner } -import org.apache.tools.ant.BuildException -import scala.tools.nsc.io -import scala.reflect.internal.util.ScalaClassLoader - -/** An Ant task to compile with the new Scala compiler (NSC). - * - * This task can take the following parameters as attributes: - * - `srcdir` (mandatory), - * - `failonerror`, - * - `timeout`, - * - `jvmargs`, - * - `argfile`, - * - `params`. - * - * It also takes the following parameters as nested elements: - * - `src` (for `srcdir`), - * - `classpath`, - * - `sourcepath`, - * - `bootclasspath`, - * - `extdirs`, - * - `compilerarg`. - * - * @author Gilles Dubochet - */ -class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { - - private def originOfThis: String = - ScalaClassLoader.originOfClass(classOf[ScalacFork]) map (_.toString) getOrElse "" - - /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `sourceDir`. */ - def setSrcdir(input: File) { - sourceDir = Some(input) - } - - /** Sets the `failonerror` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `failOnError`. */ - def setFailOnError(input: Boolean) { - failOnError = input - } - - /** Sets the `timeout` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `timeout`. */ - def setTimeout(input: Long) { - timeout = Some(input) - } - - /** Sets the `jvmargs` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `jvmArgs`. */ - def setJvmArgs(input: String) { - jvmArgs = Some(input) - } - - /** Sets the `argfile` attribute. Used by [[http://ant.apache.org Ant]]. - * @param input The value of `argfile`. */ - def setArgfile(input: File) { - argfile = Some(input) - } - - private var sourceDir: Option[File] = None - private var failOnError: Boolean = true - private var timeout: Option[Long] = None - private var jvmArgs: Option[String] = None - private var argfile: Option[File] = None - - private def createMapper() = { - val mapper = new GlobPatternMapper() - val extension = "*.class" - mapper setTo extension - mapper setFrom "*.scala" - - mapper - } - - override def execute() { - def plural(x: Int) = if (x > 1) "s" else "" - - log("Executing ant task scalacfork, origin: %s".format(originOfThis), Project.MSG_VERBOSE) - - val compilerPath = this.compilerPath getOrElse sys.error("Mandatory attribute 'compilerpath' is not set.") - val sourceDir = this.sourceDir getOrElse sys.error("Mandatory attribute 'srcdir' is not set.") - val destinationDir = this.destinationDir getOrElse sys.error("Mandatory attribute 'destdir' is not set.") - - val settings = new Settings - settings.d = destinationDir - - compTarget foreach (settings.target = _) - compilationPath foreach (settings.classpath = _) - sourcePath foreach (settings.sourcepath = _) - settings.extraParams = extraArgsFlat - - val mapper = createMapper() - - val includedFiles: Array[File] = - new SourceFileScanner(this).restrict( - getDirectoryScanner(sourceDir).getIncludedFiles, - sourceDir, - destinationDir, - mapper - ) map (x => new File(sourceDir, x)) - - /* Nothing to do. */ - if (includedFiles.isEmpty && argfile.isEmpty) - return - - if (includedFiles.nonEmpty) - log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir)) - - argfile foreach (x => log("Using argfile file: @" + x)) - - val java = new Java(this) // set this as owner - java setFork true - // using 'setLine' creates multiple arguments out of a space-separated string - jvmArgs foreach (java.createJvmarg() setLine _) - timeout foreach (java setTimeout _) - - java setClasspath compilerPath - java setClassname MainClass - - // Encode scalac/javac args for use in a file to be read back via "@file.txt" - def encodeScalacArgsFile(t: Traversable[String]) = t map { s => - if(s.find(c => c <= ' ' || "\"'\\".contains(c)).isDefined) - "\"" + s.flatMap(c => (if(c == '"' || c == '\\') "\\" else "") + c ) + "\"" - else s - } mkString "\n" - - // dump the arguments to a file and do "java @file" - val tempArgFile = io.File.makeTemp("scalacfork") - val tokens = settings.toArgs ++ (includedFiles map (_.getPath)) - tempArgFile writeAll encodeScalacArgsFile(tokens) - - val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString) - val res = execWithArgFiles(java, paths) - - if (failOnError && res != 0) - throw new BuildException("Compilation failed because of an internal compiler error;"+ - " see the error output for details.") - } -} diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala deleted file mode 100644 index 768b3a009122..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Settings.scala +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -import java.io.File - -import org.apache.tools.ant.types.Path - -class Settings { - - private var gBf: Option[String] = None - def g = gBf.get - def g_=(s: String): this.type = { gBf = Some(s); this } - - private var uncheckedBf: Boolean = false - def unchecked = uncheckedBf - def unchecked_=(b: Boolean): this.type = { uncheckedBf = b; this } - - private var classpathBf: Option[Path] = None - def classpath = classpathBf.get - def classpath_=(p: Path): this.type = { classpathBf = Some(p); this } - - private var sourcepathBf: Option[Path] = None - def sourcepath = sourcepathBf.get - def sourcepath_=(p: Path): this.type = { sourcepathBf = Some(p); this } - - private var sourcedirBf: Option[File] = None - def sourcedir = sourcedirBf.get - def sourcedir_=(p: File): this.type = { sourcedirBf = Some(p); this } - - private var bootclasspathBf: Option[Path] = None - def bootclasspath = bootclasspathBf.get - def bootclasspath_=(p: Path): this.type = { bootclasspathBf = Some(p); this } - - private var extdirsBf: Option[Path] = None - def extdirs = extdirsBf.get - def extdirs_=(p: Path): this.type = { extdirsBf = Some(p); this } - - private var dBf: Option[File] = None - def d = dBf.get - def d_=(f: File): this.type = { dBf = Some(f); this } - - private var encodingBf: Option[String] = None - def encoding = encodingBf.get - def encoding_=(s: String): this.type = { encodingBf = Some(s); this } - - private var targetBf: Option[String] = None - def target = targetBf.get - def target_=(s: String): this.type = { targetBf = Some(s); this } - - private var optimiseBf: Boolean = false - def optimise = optimiseBf - def optimise_=(b: Boolean) { optimiseBf = b } - - private var extraParamsBf: Seq[String] = Seq() - def extraParams = extraParamsBf - def extraParams_=(s: Seq[String]): this.type = { extraParamsBf = s; this } - - def toArgs: List[String] = - (if (!gBf.isEmpty) "-g:"+g :: Nil else Nil) ::: - (if (uncheckedBf) "-unchecked" :: Nil else Nil) ::: - (if (!classpathBf.isEmpty) "-classpath" :: classpath.toString :: Nil else Nil) ::: - (if (!sourcepathBf.isEmpty) "-sourcepath" :: sourcepath.toString :: Nil else Nil) ::: - (if (!sourcedirBf.isEmpty) "-Xsourcedir" :: sourcedir.toString :: Nil else Nil) ::: - (if (!bootclasspathBf.isEmpty) "-bootclasspath" :: bootclasspath.toString :: Nil else Nil) ::: - (if (!extdirsBf.isEmpty) "-extdirs" :: extdirs.toString :: Nil else Nil) ::: - (if (!dBf.isEmpty) "-d" :: d.getAbsolutePath :: Nil else Nil) ::: - (if (!encodingBf.isEmpty) "-encoding" :: encoding :: Nil else Nil) ::: - (if (!targetBf.isEmpty) "-target:"+target :: Nil else Nil) ::: - (if (optimiseBf) "-optimise" :: Nil else Nil) ::: - extraParamsBf.toList - - override def equals(that: Any): Boolean = that match { - case cs: Settings => - this.gBf == cs.gBf && - this.uncheckedBf == cs.uncheckedBf && - this.classpathBf == cs.classpathBf && - this.sourcepathBf == cs.sourcepathBf && - this.sourcedirBf == cs.sourcedirBf && - this.bootclasspathBf == cs.bootclasspathBf && - this.extdirsBf == cs.extdirsBf && - this.dBf == cs.dBf && - this.encodingBf == cs.encodingBf && - this.targetBf == cs.targetBf && - this.optimiseBf == cs.optimiseBf && - this.extraParamsBf == cs.extraParamsBf - case _ => false - } - - override lazy val hashCode: Int = Seq[Any]( - gBf, - uncheckedBf, - classpathBf, - sourcepathBf, - sourcedirBf, - bootclasspathBf, - extdirsBf, - dBf, - encodingBf, - targetBf, - optimiseBf, - extraParamsBf - ).## -} diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala deleted file mode 100644 index 531014dc3d40..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.ant.sabbus - -import java.io.File -import org.apache.tools.ant.Task -import org.apache.tools.ant.types.{Path, Reference} -import org.apache.tools.ant.types.Commandline.Argument - -trait CompilationPathProperty { - this: Task => - - protected var compilationPath: Option[Path] = None - - def setCompilationPath(input: Path) { - if (compilationPath.isEmpty) compilationPath = Some(input) - else compilationPath.get.append(input) - } - - def createCompilationPath: Path = { - if (compilationPath.isEmpty) compilationPath = Some(new Path(getProject())) - compilationPath.get.createPath() - } - - def setCompilationPathRef(input: Reference) { - createCompilationPath.setRefid(input) - } -} - -trait TaskArgs extends CompilationPathProperty { - this: Task => - - def setId(input: String) { - id = Some(input) - } - - def setParams(input: String) { - extraArgs ++= input.split(' ').map { s => val a = new Argument; a.setValue(s); a } - } - - def createCompilerArg(): Argument = { - val a = new Argument - extraArgs :+= a - a - } - - def setTarget(input: String) { - compTarget = Some(input) - } - - def setSrcPath(input: Path) { - if (sourcePath.isEmpty) sourcePath = Some(input) - else sourcePath.get.append(input) - } - - def createSrcPath: Path = { - if (sourcePath.isEmpty) sourcePath = Some(new Path(getProject())) - sourcePath.get.createPath() - } - - def setSrcPathRef(input: Reference) { - createSrcPath.setRefid(input) - } - - def setCompilerPath(input: Path) { - if (compilerPath.isEmpty) compilerPath = Some(input) - else compilerPath.get.append(input) - } - - def createCompilerPath: Path = { - if (compilerPath.isEmpty) compilerPath = Some(new Path(getProject())) - compilerPath.get.createPath() - } - - def setCompilerPathRef(input: Reference) { - createCompilerPath.setRefid(input) - } - - def setDestdir(input: File) { - destinationDir = Some(input) - } - - protected var id: Option[String] = None - protected var extraArgs: Seq[Argument] = Seq() - protected var compTarget: Option[String] = None - protected var sourcePath: Option[Path] = None - protected var compilerPath: Option[Path] = None - protected var destinationDir: Option[File] = None - - def extraArgsFlat: Seq[String] = extraArgs flatMap { a => - val parts = a.getParts - if(parts eq null) Seq[String]() else parts.toSeq - } -} diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala deleted file mode 100644 index 1021ca7614ca..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/Use.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.ant -package sabbus - -import java.io.File - -import org.apache.tools.ant.util.{GlobPatternMapper, SourceFileScanner} - -class Use extends ScalaMatchingTask { - - def setId(input: String) { - id = Some(input) - } - - def setSrcdir(input: File) { - sourceDir = Some(input) - } - - def setDestdir(input: File) { - destinationDir = Some(input) - } - - def setFailOnError(input: Boolean) { - failOnError = input - } - - private var id: Option[String] = None - private var sourceDir: Option[File] = None - private var destinationDir: Option[File] = None - private var failOnError: Boolean = true - - override def execute() { - if (id.isEmpty) sys.error("Mandatory attribute 'id' is not set.") - if (sourceDir.isEmpty) sys.error("Mandatory attribute 'srcdir' is not set.") - val compiler = Compilers(id.get) - if (!destinationDir.isEmpty) compiler.settings.d = destinationDir.get - val mapper = new GlobPatternMapper() - mapper.setTo("*.class") - mapper.setFrom("*.scala") - val includedFiles: Array[File] = - new SourceFileScanner(this).restrict( - getDirectoryScanner(sourceDir.get).getIncludedFiles, - sourceDir.get, - compiler.settings.d, - mapper - ) map (new File(sourceDir.get, _)) - if (includedFiles.length > 0) - try { - log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath) - val (errors, warnings) = compiler.compile(includedFiles) - if (errors > 0) - sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".") - else if (warnings > 0) - log("Compilation succeeded with " + warnings + " warning" + (if (warnings > 1) "s" else "") + ".") - } - catch { - case CompilationFailure(msg, ex) => - ex.printStackTrace - val errorMsg = - "Compilation failed because of an internal compiler error (" + msg + "); see the error output for details." - if (failOnError) sys.error(errorMsg) else log(errorMsg) - } - } - -} diff --git a/src/compiler/scala/tools/ant/sabbus/antlib.xml b/src/compiler/scala/tools/ant/sabbus/antlib.xml deleted file mode 100644 index 0a598bd70190..000000000000 --- a/src/compiler/scala/tools/ant/sabbus/antlib.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala deleted file mode 100644 index d87fbc1fe843..000000000000 --- a/src/compiler/scala/tools/cmd/CommandLine.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools -package cmd - -import scala.collection.mutable.ListBuffer - -trait CommandLineConfig { - def enforceArity: Boolean = true - def onlyKnownOptions: Boolean = true -} - -/** An instance of a command line, parsed according to a Spec. - */ -class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig { - def this(spec: Reference, line: String) = this(spec, CommandLineParser tokenize line) - def this(spec: Reference, args: Array[String]) = this(spec, args.toList) - - import spec.{ isUnaryOption, isBinaryOption, isExpandOption } - - val Terminator = "--" - val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true - - def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption) - def errorFn(msg: String) = println(msg) - - /** argMap is option -> argument (or "true" if it is a unary argument) - * residualArgs are what is left after removing the options and their args. - */ - lazy val (argMap, residualArgs): (Map[String, String], List[String]) = { - val residualBuffer = new ListBuffer[String] - - def loop(args: List[String]): Map[String, String] = { - def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() } - - /* Returns Some(List(args)) if this option expands to an - * argument list and it's not returning only the same arg. - */ - def expand(s1: String) = { - if (isExpandOption(s1)) { - val s2 = spec expandArg s1 - if (s2 == List(s1)) None - else Some(s2) - } - else None - } - - /* Assumes known options have all been ruled out already. */ - def isUnknown(opt: String) = - onlyKnownOptions && (opt startsWith "-") && { - errorFn(s"Option '$opt' not recognized.") - true - } - - args match { - case Nil => Map() - case Terminator :: xs => residual(xs) - case x :: Nil => - expand(x) foreach (exp => return loop(exp)) - if (isBinaryOption(x) && enforceArity) - errorFn(s"Option '$x' requires argument, found EOF instead.") - - if (isUnaryOption(x)) mapForUnary(x) - else if (isUnknown(x)) Map() - else residual(args) - - case x1 :: x2 :: xs => - expand(x1) foreach (exp => return loop(exp ++ args.tail)) - - if (x2 == Terminator) mapForUnary(x1) ++ residual(xs) - else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail) - else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs) - else if (isUnknown(x1)) loop(args.tail) - else residual(List(x1)) ++ loop(args.tail) - } - } - - (loop(originalArgs), residualBuffer map stripQuotes toList) - } - - def apply(arg: String) = argMap(arg) - def get(arg: String) = argMap get arg - def isSet(arg: String) = argMap contains arg - - def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse - - override def toString() = argMap.toString + " " + residualArgs.toString -} diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala deleted file mode 100644 index 5fcc59314bb2..000000000000 --- a/src/compiler/scala/tools/cmd/CommandLineParser.scala +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.cmd - -import scala.annotation.tailrec - -/** A simple enough command line parser. - */ -object CommandLineParser { - private final val DQ = '"' - private final val SQ = '\'' - - /** Split the line into tokens separated by whitespace or quotes. - * - * @return either an error message or reverse list of tokens - */ - private def tokens(in: String) = { - import Character.isWhitespace - import java.lang.{StringBuilder => Builder} - import collection.mutable.ArrayBuffer - - var accum: List[String] = Nil - var pos = 0 - var start = 0 - val qpos = new ArrayBuffer[Int](16) // positions of paired quotes - - def cur: Int = if (done) -1 else in.charAt(pos) - def bump() = pos += 1 - def done = pos >= in.length - - def skipToQuote(q: Int) = { - var escaped = false - def terminal = in.charAt(pos) match { - case _ if escaped => escaped = false ; false - case '\\' => escaped = true ; false - case `q` => true - case _ => false - } - while (!done && !terminal) pos += 1 - !done - } - def skipToDelim(): Boolean = - cur match { - case q @ (DQ | SQ) => { qpos.append(pos); bump(); skipToQuote(q) } && { qpos.append(pos); bump(); skipToDelim() } - case -1 => true - case c if isWhitespace(c) => true - case _ => bump(); skipToDelim() - } - def skipWhitespace() = while (isWhitespace(cur)) pos += 1 - def copyText() = { - val buf = new Builder - var p = start - var i = 0 - while (p < pos) { - if (i >= qpos.size) { - buf.append(in, p, pos) - p = pos - } else if (p == qpos(i)) { - buf.append(in, qpos(i)+1, qpos(i+1)) - p = qpos(i+1)+1 - i += 2 - } else { - buf.append(in, p, qpos(i)) - p = qpos(i) - } - } - buf.toString - } - def text() = { - val res = - if (qpos.isEmpty) in.substring(start, pos) - else if (qpos(0) == start && qpos(1) == pos) in.substring(start+1, pos-1) - else copyText() - qpos.clear() - res - } - def badquote = Left("Unmatched quote") - - @tailrec def loop(): Either[String, List[String]] = { - skipWhitespace() - start = pos - if (done) Right(accum) - else if (!skipToDelim()) badquote - else { - accum = text() :: accum - loop() - } - } - loop() - } - - class ParseException(msg: String) extends RuntimeException(msg) - - def tokenize(line: String, errorFn: String => Unit): List[String] = - tokens(line) match { - case Right(args) => args.reverse - case Left(msg) => errorFn(msg) ; Nil - } - - def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) -} diff --git a/src/compiler/scala/tools/cmd/Instance.scala b/src/compiler/scala/tools/cmd/Instance.scala deleted file mode 100644 index fefce38f5bf3..000000000000 --- a/src/compiler/scala/tools/cmd/Instance.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools -package cmd - -/** The trait mixed into each instance of a specification. - * - * @see Reference - */ -trait Instance extends Spec { - def parsed: CommandLine - - protected def help(str: => String): Unit = () - - def isSet(s: String) = parsed isSet toOpt(s) - def originalArgs = parsed.originalArgs // the full original list - def residualArgs = parsed.residualArgs // only args which were not options or args to options - - type OptionMagic = Opt.Instance - protected implicit def optionMagicAdditions(name: String) = new Opt.Instance(programInfo, parsed, name) -} diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala deleted file mode 100644 index 59eda1d6983b..000000000000 --- a/src/compiler/scala/tools/cmd/package.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools - -package object cmd { - def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } - - // make some language features in this package compile without warning - implicit def implicitConversions = scala.language.implicitConversions - implicit def postfixOps = scala.language.postfixOps - - private[cmd] def debug(msg: String): Unit = println(msg) - - def runAndExit(body: => Unit): Nothing = { - body - sys.exit(0) - } - - def toOpt(s: String): String = if (s startsWith "--") s else "--" + s - def fromOpt(s: String): String = s stripPrefix "--" - def toArgs(line: String): List[String] = CommandLineParser tokenize line - def fromArgs(args: List[String]): String = args mkString " " - - def stripQuotes(s: String): String = { - def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c - if (List('"', '\'') exists isQuotedBy) s.tail.init else s - } -} diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala index 1e9349e94417..2b1819c678d5 100644 --- a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala +++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,7 +28,7 @@ object ClassPathMemoryConsumptionTester { private class MainRetainsGlobal extends scala.tools.nsc.MainClass { var retainedGlobal: Global = _ - override def doCompile(compiler: Global) { + override def doCompile(compiler: Global): Unit = { retainedGlobal = compiler super.doCompile(compiler) } @@ -42,7 +42,7 @@ object ClassPathMemoryConsumptionTester { private def doTest(args: Array[String]) = { val settings = loadSettings(args.toList) - val mains = (1 to settings.requiredInstances.value) map (_ => new MainRetainsGlobal) + val mains = (1 to settings.requiredInstances.value).map(_ => new MainRetainsGlobal) // we need original settings without additional params to be able to use them later val baseArgs = argsWithoutRequiredInstances(args) @@ -50,7 +50,7 @@ object ClassPathMemoryConsumptionTester { println(s"Loading classpath ${settings.requiredInstances.value} times") val startTime = System.currentTimeMillis() - mains map (_.process(baseArgs)) + mains.foreach(_.process(baseArgs)) val elapsed = System.currentTimeMillis() - startTime println(s"Operation finished - elapsed $elapsed ms") @@ -74,7 +74,7 @@ object ClassPathMemoryConsumptionTester { val settings = new TestSettings() settings.processArguments(args, processAll = true) if (settings.classpath.isDefault) - settings.classpath.value = sys.props("java.class.path") + settings.classpath.value = System.getProperty("java.class.path", ".") settings } diff --git a/src/compiler/scala/tools/nsc/CloseableRegistry.scala b/src/compiler/scala/tools/nsc/CloseableRegistry.scala index 9812a2136263..ba5ddcf47564 100644 --- a/src/compiler/scala/tools/nsc/CloseableRegistry.scala +++ b/src/compiler/scala/tools/nsc/CloseableRegistry.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,12 +12,14 @@ package scala.tools.nsc +import java.io.Closeable + import scala.util.control.NonFatal /** Registry for resources to close when `Global` is closed */ -final class CloseableRegistry { - private[this] var closeables: List[java.io.Closeable] = Nil - final def registerClosable(c: java.io.Closeable): Unit = { +final class CloseableRegistry extends Closeable { + private[this] var closeables: List[Closeable] = Nil + final def registerCloseable(c: Closeable): Unit = { closeables ::= c } diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index b1fcd1b558d5..a4cb1e81ea5e 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,9 @@ package scala.tools.nsc -import scala.collection.mutable -import scala.collection.mutable.{LinkedHashSet, ListBuffer} +import scala.annotation.nowarn import scala.reflect.internal.util.{FreshNameCreator, NoSourceFile, SourceFile} +import scala.collection.mutable, mutable.ArrayDeque trait CompilationUnits { global: Global => @@ -39,6 +39,7 @@ trait CompilationUnits { global: Global => /** One unit of compilation that has been submitted to the compiler. * It typically corresponds to a single file of source code. It includes * error-reporting hooks. */ + @nowarn("""cat=deprecation&origin=scala\.reflect\.macros\.Universe\.CompilationUnitContextApi""") class CompilationUnit(val source: SourceFile, freshNameCreator: FreshNameCreator) extends CompilationUnitContextApi { self => def this(source: SourceFile) = this(source, new FreshNameCreator) /** the fresh name creator */ @@ -102,12 +103,12 @@ trait CompilationUnits { global: Global => /** Synthetic definitions generated by namer, eliminated by typer. */ object synthetics { - private val map = mutable.AnyRefMap[Symbol, Tree]() - def update(sym: Symbol, tree: Tree) { + private val map = mutable.HashMap[Symbol, Tree]() + def update(sym: Symbol, tree: Tree): Unit = { debuglog(s"adding synthetic ($sym, $tree) to $self") map.update(sym, tree) } - def -=(sym: Symbol) { + def -=(sym: Symbol): Unit = { debuglog(s"removing synthetic $sym from $self") map -= sym } @@ -122,10 +123,12 @@ trait CompilationUnits { global: Global => // namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result // is cached here and re-used in typedDefDef / typedValDef // Also used to cache imports type-checked by namer. - val transformed = new mutable.AnyRefMap[Tree, Tree] + val transformed = new mutable.HashMap[Tree, Tree] /** things to check at end of compilation unit */ - val toCheck = new ListBuffer[() => Unit] + val toCheck = ArrayDeque.empty[CompilationUnit.ToCheck] + private[nsc] def addPostUnitCheck(check: CompilationUnit.ToCheckAfterUnit): Unit = toCheck.append(check) + private[nsc] def addPostTyperCheck(check: CompilationUnit.ToCheckAfterTyper): Unit = toCheck.append(check) /** The features that were already checked for this unit */ var checkedFeatures = Set[Symbol]() @@ -140,11 +143,17 @@ trait CompilationUnits { global: Global => def targetPos: Position = NoPosition /** For sbt compatibility (https://github.com/scala/scala/pull/4588) */ - val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet + val icode: mutable.LinkedHashSet[icodes.IClass] = new mutable.LinkedHashSet /** Is this about a .java source file? */ val isJava: Boolean = source.isJava override def toString() = source.toString() } + + object CompilationUnit { + sealed trait ToCheck { def apply(): Unit } + trait ToCheckAfterUnit extends ToCheck + trait ToCheckAfterTyper extends ToCheck + } } diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala deleted file mode 100644 index 67c6824962b1..000000000000 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.nsc - -import settings.FscSettings -import scala.tools.util.CompileOutputCommon -import scala.sys.SystemProperties.preferIPv4Stack - -/** The client part of the fsc offline compiler. Instead of compiling - * things itself, it send requests to a CompileServer. - */ -class StandardCompileClient extends HasCompileSocket with CompileOutputCommon { - lazy val compileSocket: CompileSocket = CompileSocket - - val versionMsg = "Fast " + Properties.versionMsg - var verbose = false - - def process(args: Array[String]): Boolean = { - // Trying to get out in front of the log messages in case we're - // going from verbose to not verbose. - verbose = (args contains "-verbose") - - val settings = new FscSettings(Console.println) - val command = new OfflineCompilerCommand(args.toList, settings) - val shutdown = settings.shutdown.value - val extraVmArgs = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil - - val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs - val fscArgs = args.toList ++ command.extraFscArgs - - if (settings.version) { - Console println versionMsg - return true - } - - info(versionMsg) - info(args.mkString("[Given arguments: ", " ", "]")) - info(fscArgs.mkString("[Transformed arguments: ", " ", "]")) - info(vmArgs.mkString("[VM arguments: ", " ", "]")) - - val socket = - if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value) - else compileSocket.getSocket(settings.server.value) - - socket match { - case Some(sock) => compileOnServer(sock, fscArgs) - case _ => - echo( - if (shutdown) "[No compilation server running.]" - else "Compilation failed." - ) - shutdown - } - } -} - -object CompileClient extends StandardCompileClient { - def main(args: Array[String]): Unit = sys exit { - try { if (process(args)) 0 else 1 } - catch { case _: Exception => 1 } - } -} - diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 6da3b3cb20b8..c74f5efd9637 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,6 @@ package scala.tools.nsc -import java.nio.file.Files - - /** A class representing command line info for scalac */ class CompilerCommand(arguments: List[String], val settings: Settings) { def this(arguments: List[String], error: String => Unit) = this(arguments, new Settings(error)) @@ -34,77 +31,88 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { /** A descriptive alias for version and help messages. */ def cmdDesc = "compiler" - private def explainAdvanced = "\n" + """ - |-- Notes on option parsing -- - |Boolean settings are always false unless set. - |Where multiple values are accepted, they should be comma-separated. - | example: -Xplugin:option1,option2 - | means one or a comma-separated list of: - | (partial) phase names, phase ids, phase id ranges, or the string "all". - | example: -Xprint:all prints all phases. - | example: -Xprint:expl,24-26 prints phases explicitouter, closelim, dce, jvm. - | example: -Xprint:-4 prints only the phases up to typer. + private def explainAdvanced = """ + |-- Note -- + |Boolean settings generally are false unless set: -Xdev -Xcheck-init:true -Xprompt:false + |Multi-valued settings are comma-separated: -Xlint:infer-any,unused,-missing-interpolator + |Phases are a list of names, ids, or ranges of ids: -Vprint:parser,typer,5-10 -Ylog:-4 + |Use _ to enable all: -language:_ -Vprint:_ | - """.stripMargin.trim + "\n" + """.stripMargin.trim def shortUsage = "Usage: %s " format cmdName /** Creates a help message for a subset of options based on cond */ - def createUsageMsg(cond: Setting => Boolean): String = { - val baseList = (settings.visibleSettings filter cond).toList sortBy (_.name) - val width = (baseList map (_.helpSyntax.length)).max - def format(s: String) = ("%-" + width + "s") format s + def optionsMessage(cond: Setting => Boolean): String = { + val iswarning = cond(settings.warnUnused) // sordid check for if we're building -W warning help, to include lint and unused + val baseList = settings.visibleSettings.filter(cond).toList.sortBy(_.name) + val (deprecateds, theRest) = baseList.partition(_.isDeprecated) + + def columnOneWidth(s: Setting): Int = + if (iswarning && (s == settings.lint || s == settings.warnUnused)) + s.asInstanceOf[settings.MultiChoiceSetting[_]].choices.map(c => s"${s.name}:$c".length).max + else + s.helpSyntax.length + val width = baseList.map(columnOneWidth).max + val columnOneFormat = s"%-${width}s" + def format(s: String) = columnOneFormat.format(s) + def layout(c1: String, c2: String) = s"${format(c1)} ${c2}" def helpStr(s: Setting) = { - val str = format(s.helpSyntax) + " " + s.helpDescription + val str = layout(s.helpSyntax, s.helpDescription) val suffix = s.deprecationMessage match { case Some(msg) => "\n" + format("") + " deprecated: " + msg case _ => "" } str + suffix } - val debugs = baseList filter (_.isForDebug) - val deprecateds = baseList filter (_.isDeprecated) - val theRest = baseList filterNot (debugs.toSet ++ deprecateds) - - def sstring(msg: String, xs: List[Setting]) = - if (xs.isEmpty) None else Some(msg :: xs.map(helpStr) mkString "\n ") - - List( - sstring("", theRest), - sstring("\nAdditional debug settings:", debugs), - sstring("\nDeprecated settings:", deprecateds) - ).flatten mkString "\n" - } - def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting => Boolean): String = { - val prefix = List( - Some(shortUsage), - Some(explainAdvanced) filter (_ => shouldExplain), - Some(label + " options include:") - ).flatten mkString "\n" + def appendDescriptions(sb: StringBuilder, msg: String, xs: List[Setting]): Unit = + if (!xs.isEmpty) { + val ss = xs.flatMap { s => + if (iswarning && (s == settings.lint || s == settings.warnUnused)) { + val mcs = s.asInstanceOf[settings.MultiChoiceSetting[_]] + mcs.choices.map(c => s"${s.name}:$c").zipAll(mcs.descriptions, "", "").map { + case (c, d) => layout(c, d) + } + } else + List(helpStr(s)) + } + sb.append(msg) + for (each <- ss) sb.append(" ").append(each).append("\n") + } - prefix + createUsageMsg(cond) + val sb = new StringBuilder() + appendDescriptions(sb, "", theRest) + appendDescriptions(sb, "\nDeprecated settings:\n", deprecateds) + sb.toString + } + + def createUsageMsg(label: String, explain: Boolean = true)(cond: Setting => Boolean): String = { + val explained = if (explain) s"\n$explainAdvanced" else "" + s"$shortUsage\n\n$label options:\n${optionsMessage(cond)}${explained}\n" } /** Messages explaining usage and options */ - def usageMsg = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard) - def xusageMsg = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced) - def yusageMsg = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate) + def usageMsg = createUsageMsg("Standard", explain = false)(_.isStandard) + def vusageMsg = createUsageMsg("Verbose")(_.isVerbose) + def wusageMsg = createUsageMsg("Warnings")(_.isWarning) + def xusageMsg = createUsageMsg("Available advanced")(_.isAdvanced) + def yusageMsg = createUsageMsg("Available private")(_.isPrivate) /** For info settings, compiler should just print a message and quit. */ def shouldStopWithInfo = settings.isInfo def getInfoMessage(global: Global): String = { import settings._ - import Properties.{ versionString, copyrightString } //versionFor - def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString" - - if (version) versionFor(cmdDesc) - else if (help) usageMsg + global.pluginOptionsHelp - else if (Xhelp) xusageMsg - else if (Yhelp) yusageMsg - else if (showPlugins) global.pluginDescriptions - else if (showPhases) global.phaseDescriptions + ( + + if (version.value) Properties.versionFor(cmdDesc) + else if (help.value) usageMsg + global.pluginOptionsHelp + else if (Vhelp.value) vusageMsg + else if (Whelp.value) wusageMsg + else if (Xhelp.value) xusageMsg + else if (Yhelp.value) yusageMsg + else if (showPlugins.value) global.pluginDescriptions + else if (showPhases.value) global.phaseDescriptions + ( if (settings.isDebug) "\n" + global.phaseFlagDescriptions else "" ) else if (genPhaseGraph.isSetByUser) { @@ -114,18 +122,16 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { else allSettings.valuesIterator.filter(_.isHelping).map(_.help).mkString("\n\n") } - /** - * Expands all arguments starting with @ to the contents of the - * file named like each argument. - */ + /** Expands all arguments starting with @ to the contents of the file named like each argument. */ def expandArg(arg: String): List[String] = { - def stripComment(s: String) = s takeWhile (_ != '#') - import java.nio.file._ - import collection.JavaConverters._ - val file = Paths.get(arg stripPrefix "@") + import java.nio.file.{Files, Paths} + import scala.jdk.CollectionConverters._ + def stripComment(s: String) = s.takeWhile(_ != '#').trim() + val file = Paths.get(arg.stripPrefix("@")) if (!Files.exists(file)) - throw new java.io.FileNotFoundException("argument file %s could not be found" format file) - settings splitParams (Files.readAllLines(file).asScala map stripComment mkString " ") + throw new java.io.FileNotFoundException(s"argument file $file could not be found") + val lines = Files.readAllLines(file).asScala.map(stripComment).filterNot(_.isEmpty).toList + lines.flatMap(settings.splitParams) } // override this if you don't want arguments processed here diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala index d311471190be..2b250c96e49f 100644 --- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala +++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,17 +17,16 @@ import java.io.Writer /** A Writer that writes onto the Scala Console. * * @author Lex Spoon - * @version 1.0 */ class ConsoleWriter extends Writer { def close() = flush() def flush() = Console.flush() - def write(cbuf: Array[Char], off: Int, len: Int) { + def write(cbuf: Array[Char], off: Int, len: Int): Unit = { if (len > 0) write(new String(cbuf.slice(off, off+len))) } - override def write(str: String) { Console.print(str) } + override def write(str: String): Unit = { Console.print(str) } } diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index 1d89f8195c21..88cfc4576149 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,10 @@ package scala package tools.nsc -import Properties.{ versionMsg, residentPromptString } +import Properties.{versionMsg, residentPromptString} import scala.reflect.internal.util.FakePos import scala.tools.nsc.reporters.Reporter +import scala.tools.util.SystemExit abstract class Driver { @@ -27,12 +28,12 @@ abstract class Driver { /** Forward errors to the (current) reporter. */ protected def scalacError(msg: String): Unit = { - reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") + reporter.error(FakePos("scalac"), s"$msg\n scalac -help gives more information") } /** True to continue compilation. */ protected def processSettingsHook(): Boolean = { - if (settings.version) { reporter echo versionMsg ; false } + if (settings.version.value) { reporter echo versionMsg ; false } else !reporter.hasErrors } @@ -51,13 +52,13 @@ abstract class Driver { def process(args: Array[String]): Boolean = { val ss = new Settings(scalacError) - reporter = Reporter(ss) // for reporting early config errors, before compiler is constructed + reporter = Reporter(ss) command = new CompilerCommand(args.toList, ss) settings = command.settings if (processSettingsHook()) { val compiler = newCompiler() - reporter = compiler.reporter // adopt the configured reporter + reporter = compiler.reporter // adopt the compiler's reporter, which may be custom try { if (reporter.hasErrors) reporter.flush() @@ -66,6 +67,7 @@ abstract class Driver { else doCompile(compiler) } catch { + case _: SystemExit => // user requested to bail case ex: Throwable => compiler.reportThrowable(ex) ex match { @@ -77,5 +79,5 @@ abstract class Driver { !reporter.hasErrors } - def main(args: Array[String]): Unit = sys.exit(if (process(args)) 0 else 1) + def main(args: Array[String]): Unit = System.exit(if (process(args)) 0 else 1) } diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala index 443c9bbf400a..33e8a716b26f 100644 --- a/src/compiler/scala/tools/nsc/EvalLoop.scala +++ b/src/compiler/scala/tools/nsc/EvalLoop.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,8 +19,8 @@ import java.io.EOFException trait EvalLoop { def prompt: String - def loop(action: (String) => Unit) { - @tailrec def inner() { + def loop(action: (String) => Unit): Unit = { + @tailrec def inner(): Unit = { Console.print(prompt) val line = try StdIn.readLine() catch { case _: EOFException => null } if (line != null && line != "") { diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index 9bfd798240b6..6afed6a77509 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -45,7 +45,7 @@ extends CompilerCommand(args, settings) { val f = io.File(target) if (!f.hasExtension("class", "jar", "zip") && f.canRead) AsScript else { - Console.err.println("No such file or class on classpath: " + target) + settings.errorFn("No such file or class on classpath: " + target) Error } } @@ -94,17 +94,13 @@ Other startup options: -i preload before starting the REPL -I preload , enforcing line-by-line interpretation - -e execute as if entered in the REPL + -e execute as if it were in a source file -save save the compiled script in a jar for future use - -nc no compilation daemon: do not use the fsc offline compiler If the runner does not correctly guess how to run the target: - -howtorun what to run (default: guess) - -When running a script or using -e, an already running compilation daemon -(fsc) is used, or a new one started on demand. Use the -nc option to -create a fresh compiler instead.%n""" + -howtorun what to run (default: guess) +%n""" } object GenericRunnerCommand { diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index c729ea8fdbd2..8e5753095217 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,7 +34,7 @@ class GenericRunnerSettings(error: String => Unit, pathFactory: PathFactory) ext "how", "how to run the specified code", List("object", "script", "jar", "repl", "guess"), - "guess") + "guess") withAbbreviation "--how-to-run" val loadfiles = MultiStringSetting( @@ -58,13 +58,12 @@ class GenericRunnerSettings(error: String => Unit, pathFactory: PathFactory) ext val save = BooleanSetting( "-save", - "save the compiled script (assumes the code is a script)") withAbbreviation "-savecompiled" + "save the compiled script (assumes the code is a script)") withAbbreviation "-savecompiled" withAbbreviation "--save" + @deprecated("check Yscriptrunner instead", since="2.13.0") val nc = BooleanSetting( "-nc", - "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" withPostSetHook((x: BooleanSetting) => {_useCompDaemon = !x.value }) - - - private[this] var _useCompDaemon = true - def useCompDaemon: Boolean = _useCompDaemon + "do not use the legacy fsc compilation daemon").withAbbreviation("-nocompdaemon").withAbbreviation("--no-compilation-daemon") + .withDeprecationMessage("scripts use cold compilation by default; use -Yscriptrunner for custom behavior") + .withPostSetHook { x: BooleanSetting => Yscriptrunner.value = if (x.value) "default" else "resident" } } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index ccafaddef75e..e816ea58f1b1 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,12 +16,13 @@ package nsc import java.io.{Closeable, FileNotFoundException, IOException} import java.net.URL -import java.nio.charset._ +import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, StandardCharsets, UnsupportedCharsetException}, StandardCharsets.UTF_8 +import scala.annotation._ import scala.collection.{immutable, mutable} import scala.reflect.ClassTag import scala.reflect.internal.pickling.PickleBuffer -import scala.reflect.internal.util.{BatchSourceFile, FreshNameCreator, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile} +import scala.reflect.internal.util.{BatchSourceFile, FreshNameCreator, NoSourceFile, ScriptSourceFile, SourceFile} import scala.reflect.internal.{Reporter => InternalReporter} import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.ast.parser._ @@ -39,7 +40,7 @@ import scala.tools.nsc.transform._ import scala.tools.nsc.transform.async.AsyncPhase import scala.tools.nsc.transform.patmat.PatternMatching import scala.tools.nsc.typechecker._ -import scala.tools.nsc.util.{ClassPath, returning} +import scala.tools.nsc.util.ClassPath class Global(var currentSettings: Settings, reporter0: Reporter) extends SymbolTable @@ -57,7 +58,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // the mirror -------------------------------------------------- override def isCompilerUniverse = true - override val useOffsetPositions = !currentSettings.Yrangepos + override val useOffsetPositions = !currentSettings.Yrangepos.value type RuntimeClass = java.lang.Class[_] implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass]) @@ -80,10 +81,19 @@ class Global(var currentSettings: Settings, reporter0: Reporter) import definitions.findNamedMember def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName) + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + // presentation compiler uses `compileLate` whioch doesn't advance `globalPhase`, so `isPast` is false. + // therefore checking `isAtPhaseAfter` as well. + val forceNow = force || isPast(currentRun.namerPhase) || isRunGlobalInitialized && isAtPhaseAfter(currentRun.namerPhase) + if (forceNow) super.openPackageModule(pkgClass, force = true) + else analyzer.packageObjects.deferredOpen.addOne(pkgClass) + } // alternate constructors ------------------------------------------ override def settings = currentSettings + override def isSymbolLockTracingEnabled = settings.cyclic.value + private[this] var currentReporter: FilteringReporter = null locally { reporter = reporter0 } @@ -101,7 +111,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) this(new Settings(err => reporter.error(null, err)), reporter) def this(settings: Settings) = - this(settings, Global.reporter(settings)) + this(settings, Reporter(settings)) def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase @@ -130,16 +140,18 @@ class Global(var currentSettings: Settings, reporter0: Reporter) type ThisPlatform = JavaPlatform { val global: Global.this.type } lazy val platform: ThisPlatform = new GlobalPlatform - /** The classpath used by inliner's bytecode repository. - * If --release is used, swap the ctsym for jrt. - * REPL adds a classpath entry containing products of previous runs. (scala/bug#8779) - * @param base the class path to augment, nominally `this.classPath` or `platform.classPath` + /* Create a class path for the backend, based on the given class path. + * Used to make classes available to the inliner's bytecode repository. + * + * In particular, if ct.sym is used for compilation, replace it with jrt. + * + * See ReplGlobal, which appends a classpath entry containing products of previous runs. (Fixes scala/bug#8779.) */ def optimizerClassPath(base: ClassPath): ClassPath = base match { case AggregateClassPath(entries) if entries.head.isInstanceOf[CtSymClassPath] => - JrtClassPath(release = None, unsafe = None, closeableRegistry) match { - case jrt :: _ => AggregateClassPath(jrt +: entries.drop(1)) + JrtClassPath(release = None, settings.systemPathValue, unsafe = None, closeableRegistry) match { + case jrt :: Nil => AggregateClassPath(entries.drop(1).prepended(jrt)) case _ => base } case _ => base @@ -212,21 +224,29 @@ class Global(var currentSettings: Settings, reporter0: Reporter) var lastPrintedSource: String = "" infolevel = InfoLevel.Verbose - def showUnit(unit: CompilationUnit) { - print(" // " + unit.source) + def showUnit(unit: CompilationUnit): Unit = { + print(s" // ${unit.source}") if (unit.body == null) println(": tree is null") else { - val source = util.stringFromWriter(w => newTreePrinter(w) print unit.body) + val source = util.stringFromWriter(w => newTreePrinter(w).print(unit.body)) // treePrinter show unit.body if (lastPrintedSource == source) - println(": tree is unchanged since " + lastPrintedPhase) + println(s": tree is unchanged since $lastPrintedPhase") else { - lastPrintedPhase = phase.prev // since we're running inside "exitingPhase" + println() + if (settings.showTreeDiff) { + import scala.jdk.CollectionConverters._ + import com.github.difflib.{DiffUtils, UnifiedDiffUtils} + val diff = DiffUtils.diff(lastPrintedSource.linesIterator.toList.asJava, source.linesIterator.toList.asJava) + val unified = UnifiedDiffUtils.generateUnifiedDiff(lastPrintedPhase.name, phase.prev.name, lastPrintedSource.linesIterator.toList.asJava, diff, 1).asScala + unified.foreach(println) + } + else + println(source) + println() + lastPrintedPhase = phase.prev // since we're running inside "exitingPhase" lastPrintedSource = source - println("") - println(source) - println("") } } } @@ -249,7 +269,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) t } finally { propCnt = propCnt-1 - assert(propCnt >= 0) + assert(propCnt >= 0, "Bad propCnt") } } @@ -267,10 +287,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Called every time an AST node is successfully typechecked in typerPhase. */ - def signalDone(context: analyzer.Context, old: Tree, result: Tree) {} + def signalDone(context: analyzer.Context, old: Tree, result: Tree): Unit = {} /** Called from parser, which signals hereby that a method definition has been parsed. */ - def signalParseProgress(pos: Position) {} + def signalParseProgress(pos: Position): Unit = {} /** Called by ScaladocAnalyzer when a doc comment has been parsed. */ def signalParsedDocComment(comment: String, pos: Position) = { @@ -282,13 +302,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Register new context; called for every created context */ - def registerContext(c: analyzer.Context) { + def registerContext(c: analyzer.Context): Unit = { lastSeenContext = c } /** Register top level class (called on entering the class) */ - def registerTopLevelSym(sym: Symbol) {} + def registerTopLevelSym(sym: Symbol): Unit = {} // ------------------ Debugging ------------------------------------- @@ -304,7 +324,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) * to make them visually distinct. */ @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg) - @inline final def devWarning(pos: Position, msg: => String) { + @inline final def devWarning(pos: Position, msg: => String): Unit = { def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]" if (isDeveloper) runReporting.warning(pos, "!!! " + msg, WarningCategory.OtherDebug, site = "") @@ -318,7 +338,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) (settings.log containsPhase globalPhase) || (settings.log containsPhase phase) ) // Over 200 closure objects are eliminated by inlining this. - @inline final def log(msg: => AnyRef) { + @inline final def log(msg: => AnyRef): Unit = { if (shouldLogAtThisPhase) inform(s"[log $globalPhase$atPhaseStackMessage] $msg") } @@ -358,7 +378,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def ccon = Class.forName(name).getConstructor(classOf[CharsetDecoder], classOf[InternalReporter]) try Some(ccon.newInstance(charset.newDecoder(), reporter).asInstanceOf[SourceReader]) - catch { case ex: Throwable => + catch { case _: Throwable => globalError("exception while trying to instantiate source reader '" + name + "'") None } @@ -369,7 +389,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } } - if (settings.verbose || settings.Ylogcp) + if (settings.verbose.value || settings.Ylogcp.value) reporter.echo( s"[search path for source files: ${classPath.asSourcePathString}]\n" + s"[search path for class files: ${classPath.asClassPathString}]" @@ -405,7 +425,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) abstract class GlobalPhase(prev: Phase) extends Phase(prev) { phaseWithId(id) = this - def run() { + def run(): Unit = { echoPhaseSummary(this) val units = currentRun.units while (units.hasNext) @@ -421,20 +441,19 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { if (Thread.interrupted()) reporter.cancelled = true - val isCanceled = reporter.cancelled - isCanceled || unit.isJava && shouldSkipThisPhaseForJava + reporter.cancelled || unit.isJava && shouldSkipThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (settings.isDebug && (settings.verbose || currentRun.size < 5)) + if (settings.isDebug && (settings.verbose.value || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") } - @deprecated - final def withCurrentUnit(unit: CompilationUnit)(task: => Unit) { + @deprecated("Unused, inlined in applyPhase", since="2.13") + final def withCurrentUnit(unit: CompilationUnit)(task: => Unit): Unit = { beforeUnit(unit) if (!cancelled(unit)) { currentRun.informUnitStarting(this, unit) @@ -444,7 +463,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } @inline - final def withCurrentUnitNoLog(unit: CompilationUnit)(task: => Unit) { + @deprecated("Unused, see withCurrentUnit", since="2.13") + final def withCurrentUnitNoLog(unit: CompilationUnit)(task: => Unit): Unit = { val unit0 = currentUnit try { currentRun.currentUnit = unit @@ -491,23 +511,15 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // I only changed analyzer. // // factory for phases: namer, packageobjects, typer - lazy val analyzer = new { - val global: Global.this.type = Global.this - } with Analyzer - - // phaseName = "patmat" - object patmat extends { - val global: Global.this.type = Global.this - val runsAfter = List("typer") - val runsRightAfter = None - // patmat doesn't need to be right after typer, as long as we run before superaccessors - // (sbt does need to run right after typer, so don't conflict) - } with PatternMatching + lazy val analyzer = + if (settings.YmacroAnnotations.value) new { val global: Global.this.type = Global.this } with Analyzer with MacroAnnotationNamers + else new { val global: Global.this.type = Global.this } with Analyzer // phaseName = "superaccessors" object superAccessors extends { val global: Global.this.type = Global.this - val runsAfter = List("patmat") + val runsAfter = List("typer") + // sbt needs to run right after typer, so don't conflict val runsRightAfter = None } with SuperAccessors @@ -532,10 +544,20 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val runsRightAfter = None } with RefChecks + // phaseName = "patmat" + object patmat extends { + val global: Global.this.type = Global.this + // patmat does not need to run before the superaccessors phase, because + // patmat never emits `this.x` where `x` is a ParamAccessor. + // (However, patmat does need to run before outer accessors generation). + val runsAfter = List("refchecks") + val runsRightAfter = None + } with PatternMatching + // phaseName = "uncurry" override object uncurry extends { val global: Global.this.type = Global.this - val runsAfter = List("refchecks") + val runsAfter = List("patmat") val runsRightAfter = None } with UnCurry @@ -561,7 +583,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // phaseName = "specialize" object specializeTypes extends { val global: Global.this.type = Global.this - val runsAfter = List("") + val runsAfter = Nil val runsRightAfter = Some("tailcalls") } with SpecializeTypes @@ -575,14 +597,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // phaseName = "erasure" override object erasure extends { val global: Global.this.type = Global.this - val runsAfter = List("explicitouter") + val runsAfter = Nil val runsRightAfter = Some("explicitouter") } with Erasure // phaseName = "posterasure" override object postErasure extends { val global: Global.this.type = Global.this - val runsAfter = List("erasure") + val runsAfter = Nil val runsRightAfter = Some("erasure") } with PostErasure @@ -647,7 +669,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val global: Global.this.type = Global.this } with SubComponent { val phaseName = "terminal" - val runsAfter = List("jvm") + val runsAfter = Nil val runsRightAfter = None override val terminal = true @@ -656,7 +678,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } private class TerminalPhase(prev: Phase) extends GlobalPhase(prev) { def name = phaseName - def apply(unit: CompilationUnit) {} + def apply(unit: CompilationUnit): Unit = {} } } @@ -676,16 +698,16 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Add the internal compiler phases to the phases set. * This implementation creates a description map at the same time. */ - protected def computeInternalPhases(): Unit = { + protected def computeInternalPhases(): Unit = // Note: this fits -Xshow-phases into 80 column width, which is // desirable to preserve. - val phs = List( + List( syntaxAnalyzer -> "parse source into ASTs, perform simple desugaring", analyzer.namerFactory -> "resolve names, attach symbols to named trees", analyzer.packageObjects -> "load package objects", analyzer.typerFactory -> "the meat and potatoes: type the trees", - patmat -> "translate match expressions", superAccessors -> "add super accessors in traits and nested classes", + patmat -> "translate match expressions", extensionMethods -> "add extension methods for inline classes", pickler -> "serialize symbol tables", refChecks -> "reference/override checking, translate nested objects", @@ -704,23 +726,20 @@ class Global(var currentSettings: Settings, reporter0: Reporter) cleanup -> "platform-specific cleanups, generate reflective calls", terminal -> "the last phase during a compilation run" ) + .foreach((addToPhasesSet _).tupled) - phs foreach (addToPhasesSet _).tupled - } // This is slightly inelegant but it avoids adding a new member to SubComponent, - // and attractive -Xshow-phases output is unlikely if the descs span 20 files anyway. + // and attractive -Vphases output is unlikely if the descs span 20 files anyway. private val otherPhaseDescriptions = Map( "flatten" -> "eliminate inner classes", "jvm" -> "generate JVM bytecode" ) withDefaultValue "" - protected def computePlatformPhases() = platform.platformPhases foreach { sub => - addToPhasesSet(sub, otherPhaseDescriptions(sub.phaseName)) - } + protected def computePlatformPhases() = platform.platformPhases.foreach(p => addToPhasesSet(p, otherPhaseDescriptions(p.phaseName))) - // sequences the phase assembly + // compute the order in which phases will run; subclasses may override the template methods used here. protected def computePhaseDescriptors: List[SubComponent] = { - /** Allow phases to opt out of the phase assembly. */ + /* Allow phases to opt out of the phase assembly. */ def cullPhases(phases: List[SubComponent]) = { val enabled = if (settings.isDebug && settings.isInfo) phases else phases filter (_.enabled) def isEnabled(q: String) = enabled exists (_.phaseName == q) @@ -741,7 +760,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) protected lazy val phasesSet = new mutable.HashSet[SubComponent] protected lazy val phasesDescMap = new mutable.HashMap[SubComponent, String] withDefaultValue "" - protected def addToPhasesSet(sub: SubComponent, descr: String) { + protected def addToPhasesSet(sub: SubComponent, descr: String): Unit = { phasesSet += sub phasesDescMap(sub) = descr } @@ -749,13 +768,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** The names of the phases. */ lazy val phaseNames = { new Run // force some initialization - phaseDescriptors map (_.phaseName) + phaseDescriptors.map(_.phaseName) } - /** A description of the phases that will run in this configuration, or all if -Ydebug. */ + /** A description of the phases that will run in this configuration, or all if -Vdebug. */ def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.isDebug, phasesDescMap) - /** Summary of the per-phase values of nextFlags and newFlags, shown under -Xshow-phases -Ydebug. */ + /** Summary of the per-phase values of nextFlags and newFlags, shown under -Vphases -Vdebug. */ def phaseFlagDescriptions: String = { def fmt(ph: SubComponent) = { def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags) @@ -779,10 +798,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private def phaseHelp(title: String, elliptically: Boolean, describe: SubComponent => String): String = { val Limit = 16 // phase names should not be absurdly long val MaxCol = 80 // because some of us edit on green screens - val maxName = phaseNames map (_.length) max + val maxName = phaseNames.map(_.length).max val width = maxName min Limit val maxDesc = MaxCol - (width + 6) // descriptions not novels - val fmt = if (settings.verbose || !elliptically) s"%${maxName}s %2s %s%n" + val fmt = if (settings.verbose.value || !elliptically) s"%${maxName}s %2s %s%n" else s"%${width}.${width}s %2s %.${maxDesc}s%n" val line1 = fmt.format("phase name", "id", title) @@ -796,7 +815,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) else if (max < 4) s.take(max) else s.take(max - 3) + "..." ) - override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) { + override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int): Unit = { val p = foreshortened(s, precision) val w = if (width > 0 && p.length < width) { import FormattableFlags.LEFT_JUSTIFY @@ -829,13 +848,12 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Returns List of (phase, value) pairs, including only those * where the value compares unequal to the previous phase's value. */ - def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests + def afterEachPhase[T](op: => T): List[(Phase, T)] = // used in tests phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) => val value = exitingPhase(ph)(op) if (res.nonEmpty && res.head._2 == value) res else ((ph, value)) :: res - } reverse - } + }.reverse // ------------ REPL utilities --------------------------------- @@ -925,7 +943,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) invalidated, failed) } } - def show(msg: String, syms: scala.collection.Traversable[Symbol]) = + def show(msg: String, syms: scala.collection.Iterable[Symbol]) = if (syms.nonEmpty) informProgress(s"$msg: ${syms map (_.fullName) mkString ","}") show("invalidated packages", invalidated) @@ -960,7 +978,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) cp.packages(parent).exists(_.name == fullPackageName) } - def invalidateOrRemove(pkg: ClassSymbol) = { + def invalidateOrRemove(pkg: ClassSymbol): Unit = { if (packageExists(fullClasspath)) pkg setInfo new loaders.PackageLoader(fullPackageName, fullClasspath) else @@ -1021,16 +1039,16 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private[this] var curFreshNameCreator: FreshNameCreator = null private[scala] def currentFreshNameCreator_=(fresh: FreshNameCreator): Unit = curFreshNameCreator = fresh - def isGlobalInitialized = ( - definitions.isDefinitionsInitialized - && rootMirror.isMirrorInitialized - ) + def isGlobalInitialized = definitions.isDefinitionsInitialized && rootMirror.isMirrorInitialized + private def isRunGlobalInitialized = (curRun ne null) && isGlobalInitialized + override def isPastTyper = isPast(currentRun.typerPhase) - def isPast(phase: Phase) = ( - (curRun ne null) - && isGlobalInitialized // defense against init order issues - && (globalPhase.id > phase.id) - ) + def isBeforeErasure = isBefore(currentRun.erasurePhase) + def isPast(phase: Phase) = isRunGlobalInitialized && (globalPhase.id > phase.id) + def isBefore(phase: Phase) = isRunGlobalInitialized && (phase match { + case NoPhase => true // if phase is NoPhase then that phase ain't comin', so we're "before it" + case _ => globalPhase.id < phase.id + }) // TODO - trim these to the absolute minimum. @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op) @@ -1114,7 +1132,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def echoPhaseSummary(ph: Phase) = { /* Only output a summary message under debug if we aren't echoing each file. */ - if (settings.isDebug && !(settings.verbose || currentRun.size < 5)) + if (settings.isDebug && !(settings.verbose.value || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } @@ -1140,8 +1158,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = curRun.profiler.afterCompletion(root, associatedFile) - /** A Run is a single execution of the compiler on a set of units. - */ + /** A Run is a single execution of the compiler on a set of units. */ + @nowarn("""cat=deprecation&origin=scala\.reflect\.macros\.Universe\.RunContextApi""") class Run extends RunContextApi with RunReporting with RunParsing { /** Have been running into too many init order issues with Run * during erroneous conditions. Moved all these vals up to the @@ -1154,11 +1172,26 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val profiler: Profiler = Profiler(settings) keepPhaseStack = settings.log.isSetByUser - // We hit these checks regularly. They shouldn't change inside the same run, so cache the comparisons here. - val isScala211: Boolean = settings.isScala211 - val isScala212: Boolean = settings.isScala212 - val isScala213: Boolean = settings.isScala213 - val isScala3: Boolean = settings.isScala3 + val isScala3: Boolean = settings.isScala3: @nowarn + + object sourceFeatures { + private val s = settings + private val o = s.sourceFeatures + import s.XsourceFeatures.contains + def caseApplyCopyAccess = isScala3 && contains(o.caseApplyCopyAccess) + def caseCompanionFunction = isScala3 && contains(o.caseCompanionFunction) + def caseCopyByName = isScala3 && contains(o.caseCopyByName) + def inferOverride = isScala3 && contains(o.inferOverride) + def noInferStructural = isScala3 && contains(o.noInferStructural) + def any2StringAdd = isScala3 && contains(o.any2StringAdd) + def unicodeEscapesRaw = isScala3 && contains(o.unicodeEscapesRaw) + def stringContextScope = isScala3 && contains(o.stringContextScope) + def leadingInfix = isScala3 && contains(o.leadingInfix) + def packagePrefixImplicits = isScala3 && contains(o.packagePrefixImplicits) + def implicitResolution = isScala3 && contains(o.implicitResolution) || settings.Yscala3ImplicitResolution.value + def doubleDefinitions = isScala3 && contains(o.doubleDefinitions) + def etaExpandAlways = isScala3 && contains(o.etaExpandAlways) + } // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings @@ -1168,19 +1201,19 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private class SyncedCompilationBuffer { self => private val underlying = new mutable.ArrayBuffer[CompilationUnit] def size = synchronized { underlying.size } - def +=(cu: CompilationUnit): this.type = { synchronized { underlying += cu }; this } - def head: CompilationUnit = synchronized{ underlying.head } + def +=(cu: CompilationUnit): this.type = synchronized { underlying += cu; this } + def head: CompilationUnit = synchronized { underlying.head } def apply(i: Int): CompilationUnit = synchronized { underlying(i) } def iterator: Iterator[CompilationUnit] = new collection.AbstractIterator[CompilationUnit] { private var used = 0 - def hasNext = self.synchronized{ used < underlying.size } - def next = self.synchronized { + def hasNext = self.synchronized { used < underlying.size } + def next() = self.synchronized { if (!hasNext) throw new NoSuchElementException("next on empty Iterator") used += 1 underlying(used-1) } } - def toList: List[CompilationUnit] = synchronized{ underlying.toList } + def toList: List[CompilationUnit] = synchronized { underlying.toList } } private val unitbuf = new SyncedCompilationBuffer @@ -1191,7 +1224,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val symSource = new mutable.HashMap[Symbol, AbstractFile] /** A map from compiled top-level symbols to their picklers */ - val symData = new mutable.AnyRefMap[Symbol, PickleBuffer] + val symData = new mutable.HashMap[Symbol, PickleBuffer] private var phasec: Int = 0 // phases completed private var unitc: Int = 0 // units completed this phase @@ -1244,7 +1277,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } else skippable || !pd.enabled } - val phs = phaseDescriptors takeWhile unstoppable filterNot skippable + val phs = phaseDescriptors.takeWhile(unstoppable).filterNot(skippable) // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases. if (phs.isEmpty || !phs.last.terminal) { val t = if (phaseDescriptors.last.terminal) phaseDescriptors.last else terminal @@ -1252,9 +1285,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } else phs } // Create phases and link them together. We supply the previous, and the ctor sets prev.next. - val last = components.foldLeft(NoPhase: Phase)((prev, c) => c newPhase prev) - // rewind (Iterator.iterate(last)(_.prev) dropWhile (_.prev ne NoPhase)).next - val first = { var p = last ; while (p.prev ne NoPhase) p = p.prev ; p } + val phaseList = { + val last = components.foldLeft(NoPhase: Phase)((prev, c) => c.newPhase(prev)) + Iterator.iterate(last)(_.prev).takeWhile(_ != NoPhase).toList.reverse + } + nextFrom = Array.tabulate(phaseList.maxBy(_.id).id)(infoTransformers.nextFrom(_)) + //println(s"nextFrom: ${scala.runtime.ScalaRunTime.stringOf(nextFrom.map(_.pid))}") + //println(s"phaseList: ${scala.runtime.ScalaRunTime.stringOf(phaseList.map(_.name))}") + val first = phaseList.head val ss = settings // As a final courtesy, see if the settings make any sense at all. @@ -1262,27 +1300,26 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // doesn't select a unique phase, that might be surprising too. def checkPhaseSettings(including: Boolean, specs: Seq[String]*) = { def isRange(s: String) = s.forall(c => c.isDigit || c == '-') - def isSpecial(s: String) = (s == "all" || isRange(s)) - val setting = new ss.PhasesSetting("fake","fake") - for (p <- specs.flatten.to[Set]) { - setting.value = List(p) - val count = ( - if (including) first.iterator count (setting containsPhase _) - else phaseDescriptors count (setting contains _.phaseName) - ) + def isMulti(s: String) = s == "_" || s == "all" || isRange(s) || s.startsWith("~") + val tester = new ss.PhasesSetting("fake","fake") + for (p <- specs.flatten.to(Set)) { + tester.value = List(p) + val count = + if (including) first.iterator.count(tester.containsPhase(_)) + else phaseDescriptors.count(pd => tester.contains(pd.phaseName) || tester.contains(s"~${pd.phaseName}")) if (count == 0) runReporting.warning(NoPosition, s"'$p' specifies no phase", WarningCategory.Other, site = "") - if (count > 1 && !isSpecial(p)) runReporting.warning(NoPosition, s"'$p' selects $count phases", WarningCategory.Other, site = "") - if (!including && isSpecial(p)) globalError(s"-Yskip and -Ystop values must name phases: '$p'") - setting.clear() + if (count > 1 && !isMulti(p)) runReporting.warning(NoPosition, s"'$p' selects $count phases", WarningCategory.Other, site = "") + if (!including && isMulti(p)) globalError(s"-Yskip and -Ystop values must name phases: '$p'") + tester.clear() } } // phases that are excluded; for historical reasons, these settings only select by phase name val exclusions = List(ss.stopBefore, ss.stopAfter, ss.skip) val inclusions = ss.visibleSettings collect { - case s: ss.PhasesSetting if !(exclusions contains s) => s.value + case s: ss.PhasesSetting if !exclusions.contains(s) => s.value } checkPhaseSettings(including = true, inclusions.toSeq: _*) - checkPhaseSettings(including = false, exclusions map (_.value): _*) + checkPhaseSettings(including = false, exclusions.map(_.value): _*) // Report the overhead of statistics measurements per every run if (settings.areStatisticsEnabled && settings.Ystatistics.value.nonEmpty) @@ -1300,18 +1337,18 @@ class Global(var currentSettings: Settings, reporter0: Reporter) * @param current number of "progress units" completed * @param total total number of "progress units" in run */ - def progress(current: Int, total: Int) {} + def progress(current: Int, total: Int): Unit = {} /** * For subclasses to override. Called when `phase` is about to be run on `unit`. * Variables are passed explicitly to indicate that `globalPhase` and `currentUnit` have been set. */ - def informUnitStarting(phase: Phase, unit: CompilationUnit) { } + def informUnitStarting(phase: Phase, unit: CompilationUnit): Unit = { } /** take note that phase is completed * (for progress reporting) */ - def advancePhase() { + def advancePhase(): Unit = { unitc = 0 phasec += 1 refreshProgress() @@ -1319,7 +1356,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** take note that a phase on a unit is completed * (for progress reporting) */ - def advanceUnit() { + def advanceUnit(): Unit = { unitc += 1 refreshProgress() } @@ -1371,11 +1408,11 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** add unit to be compiled in this run */ - private def addUnit(unit: CompilationUnit) { + private def addUnit(unit: CompilationUnit): Unit = { unitbuf += unit compiledFiles += unit.source.file.path } - private def warnDeprecatedAndConflictingSettings() { + private def warnDeprecatedAndConflictingSettings(): Unit = { // issue warnings for any usage of deprecated settings settings.userSetSettings filter (_.isDeprecated) foreach { s => runReporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get, "", "", "") @@ -1397,7 +1434,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody! // Here we work around that wrinkle by claiming that a pre-initialized member is compiled in // *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`. - def compiles(sym: Symbol): Boolean = + @tailrec + final def compiles(sym: Symbol): Boolean = if (sym == NoSymbol) false else if (symSource.isDefinedAt(sym)) true else if (!sym.isTopLevel) compiles(sym.originalEnclosingTopLevelClassOrDummy) @@ -1412,7 +1450,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // --------------- Compilation methods ---------------------------- - protected def runCheckers() { + protected def runCheckers(): Unit = { val toCheck = globalPhase.prev val canCheck = toCheck.checkable val fmt = if (canCheck) "[Now checking: %s]" else "[Not checkable: %s]" @@ -1428,13 +1466,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private def showMembers() = { // Allows for syntax like scalac -Xshow-class Random@erasure,typer + @nowarn def splitClassAndPhase(str: String, term: Boolean): Name = { def mkName(s: String) = if (term) newTermName(s) else newTypeName(s) (str indexOf '@') match { case -1 => mkName(str) case idx => val phasePart = str drop (idx + 1) - settings.Yshow.tryToSetColon(phasePart split ',' toList) + settings.Yshow.tryToSetColon(phasePart.split(',').toList) mkName(str take idx) } } @@ -1450,13 +1489,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val global: Global.this.type = Global.this lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x)) def snapshot() = { - inform("\n[[symbol layout at end of " + phase + "]]") + println(s"\n[[symbol layout at end of $phase]]") exitingPhase(phase) { trackers foreach { t => t.snapshot() - inform(t.show("Heading from " + phase.prev.name + " to " + phase.name)) + println(t.show(s"Heading from ${phase.prev.name} to ${phase.name}")) } } + println() } } @@ -1464,46 +1504,44 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions - private def printArgs(sources: List[SourceFile]): Unit = { - if (settings.printArgs.isSetByUser) { - val argsFile = (settings.recreateArgs ::: sources.map(_.file.absolute.toString())).mkString("", "\n", "\n") - settings.printArgs.value match { + private def printArgs(sources: List[SourceFile]): Unit = + settings.printArgs.valueSetByUser foreach { value => + def quote(s: String) = if (s.charAt(0) != '"' && s.contains(' ')) "\"" + s + "\"" else s + val allArgs = settings.recreateArgs ::: sources.map(_.file.absolute.toString()) + val argsFile = allArgs.map(quote).mkString("", "\n", "\n") + value match { case "-" => reporter.echo(argsFile) case pathString => import java.nio.file._ val path = Paths.get(pathString) - Files.write(path, argsFile.getBytes(Charset.forName("UTF-8"))) - reporter.echo("Compiler arguments written to: " + path) + Files.write(path, argsFile.getBytes(UTF_8)) + reporter.echo(s"Compiler arguments written to: $path") } } - } - /** Compile list of source files, - * unless there is a problem already, - * such as a plugin was passed a bad option. + /** Compile a list of source files, unless there is a problem already, e.g., a plugin was passed a bad option. */ def compileSources(sources: List[SourceFile]): Unit = if (!reporter.hasErrors) { printArgs(sources) - def checkDeprecations() = { warnDeprecatedAndConflictingSettings() reporting.summarizeErrors() } - - val units = sources map scripted map (file => new CompilationUnit(file, warningFreshNameCreator)) - - units match { + sources match { case Nil => checkDeprecations() // nothing to compile, report deprecated options - case _ => compileUnits(units, firstPhase) + case _ => + val units = sources.map(src => new CompilationUnit(scripted(src), warningFreshNameCreator)) + compileUnits(units) } } private final val GlobalPhaseName = "global (synthetic)" protected final val totalCompileTime = statistics.newTimer("#total compile time", GlobalPhaseName) - def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) - private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { + def compileUnits(units: List[CompilationUnit], fromPhase: Phase = firstPhase): Unit = + compileUnitsInternal(units, fromPhase) + private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase): Unit = { units foreach addUnit reporter.reset() warnDeprecatedAndConflictingSettings() @@ -1519,7 +1557,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val profileBefore=profiler.beforePhase(phase) try globalPhase.run() - catch { case _: InterruptedException => reporter match { case cancelable: reporters.Reporter => cancelable.cancelled = true case _ => } } + catch { case _: InterruptedException => reporter.cancelled = true } finally if (timePhases) statistics.stopTimer(phaseTimer, startPhase) else () profiler.afterPhase(phase, profileBefore) @@ -1527,14 +1565,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) informTime(globalPhase.description, phaseTimer.nanos) // progress update - if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) { + if (settings.Xprint.containsPhase(globalPhase) || settings.printLate.value && runIsAt(cleanupPhase)) { // print trees - if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll() + if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) nodePrinters.printAll() else printAllUnits() } // print the symbols presently attached to AST nodes - if (settings.Yshowsyms) + if (settings.Yshowsyms.value) trackerFactory.snapshot() // print members @@ -1542,10 +1580,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) showMembers() // browse trees with swing tree viewer - if (settings.browse containsPhase globalPhase) - treeBrowser browse (phase.name, units) + if (settings.browse.containsPhase(globalPhase)) + treeBrowser.browse(phase.name, units) - if ((settings.Yvalidatepos containsPhase globalPhase) && !reporter.hasErrors) + if (!reporter.hasErrors && settings.Yvalidatepos.containsPhase(globalPhase)) currentRun.units.foreach(unit => validatePositions(unit.body)) // move the pointer @@ -1556,16 +1594,15 @@ class Global(var currentSettings: Settings, reporter0: Reporter) runCheckers() // output collected statistics - if (settings.areStatisticsEnabled && settings.Ystatistics.contains(phase.name)) + if (settings.YstatisticsEnabled.value && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) - if (!globalPhase.hasNext || reporter.hasErrors) - runReporting.warnUnusedSuppressions() - advancePhase() } profiler.finished() + runReporting.runFinished(hasErrors = reporter.hasErrors) + reporting.summarizeErrors() // val allNamesArray: Array[String] = allNames().map(_.toString).toArray.sorted @@ -1598,10 +1635,12 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // Clear any sets or maps created via perRunCaches. perRunCaches.clearAll() + if (settings.verbose.value) + println("Name table size after compilation: " + nameTableSize + " chars") } /** Compile list of abstract files. */ - def compileFiles(files: List[AbstractFile]) { + def compileFiles(files: List[AbstractFile]): Unit = { try { val snap = profiler.beforePhase(Global.InitPhase) val sources = files map getSourceFile @@ -1609,25 +1648,28 @@ class Global(var currentSettings: Settings, reporter0: Reporter) compileSources(sources) } catch { - case ex: InterruptedException => reporter match { case cancelable: reporters.Reporter => cancelable.cancelled = true case _ => } + case _: InterruptedException => reporter.cancelled = true case ex: IOException => globalError(ex.getMessage()) } } /** Compile list of files given by their names */ - def compile(filenames: List[String]) { + def compile(filenames: List[String]): Unit = { try { val snap = profiler.beforePhase(Global.InitPhase) val sources: List[SourceFile] = - if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time")) - else filenames map getSourceFile + if (settings.script.isSetByUser && filenames.size > 1) { + globalError("can only compile one script at a time") + Nil + } + else filenames.map(getSourceFile) profiler.afterPhase(Global.InitPhase, snap) compileSources(sources) } catch { - case ex: InterruptedException => reporter match { case cancelable: reporters.Reporter => cancelable.cancelled = true case _ => } + case _: InterruptedException => reporter.cancelled = true case ex: IOException => globalError(ex.getMessage()) } } @@ -1641,14 +1683,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Compile abstract file until `globalPhase`, but at least * to phase "namer". */ - def compileLate(file: AbstractFile) { + def compileLate(file: AbstractFile): Unit = { if (!compiledFiles(file.path)) compileLate(new CompilationUnit(scripted(getSourceFile(file)))) } - /** Compile abstract file until `globalPhase`, but at least to phase "namer". - */ - def compileLate(unit: CompilationUnit) { + /** Compile the unit until `globalPhase`, but at least to phase "typer". */ + def compileLate(unit: CompilationUnit): Unit = { addUnit(unit) if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary @@ -1661,8 +1702,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Reset package class to state at typer (not sure what this is needed for?) */ + @tailrec private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) { - enteringPhase(firstPhase) { + enteringPhase[Unit](firstPhase) { pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info)) } if (!pclazz.isRoot) resetPackageClass(pclazz.owner) @@ -1672,14 +1714,14 @@ class Global(var currentSettings: Settings, reporter0: Reporter) List(statistics.retainedCount, statistics.retainedByType) private val parserStats = { import statistics.treeNodeCount - if (settings.YhotStatisticsEnabled) treeNodeCount :: hotCounters + if (settings.YhotStatisticsEnabled.value) treeNodeCount :: hotCounters else List(treeNodeCount) } final def printStatisticsFor(phase: Phase) = { inform("*** Cumulative statistics at phase " + phase) - if (settings.YhotStatisticsEnabled) { + if (settings.YhotStatisticsEnabled.value) { // High overhead, only enable retained stats under hot stats statistics.retainedCount.value = 0 for (c <- statistics.retainedByType.keys) @@ -1692,17 +1734,15 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val quants: Iterable[statistics.Quantity] = if (phase.name == "parser") parserStats - else if (settings.YhotStatisticsEnabled) statistics.allQuantities + else if (settings.YhotStatisticsEnabled.value) statistics.allQuantities else statistics.allQuantities.filterNot(q => hotCounters.contains(q)) for (q <- quants if q.showAt(phase.name)) inform(q.line) } } // class Run - def printAllUnits() { - print("[[syntax trees at end of %25s]]".format(phase)) - exitingPhase(phase)(currentRun.units foreach { unit => - nodePrinters showUnit unit - }) + def printAllUnits(): Unit = { + print(f"[[syntax trees at end of $phase%25s]]") + exitingPhase(phase)(currentRun.units.foreach(nodePrinters.showUnit(_))) } /** We resolve the class/object ambiguity by passing a type/term name. @@ -1721,7 +1761,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val syms = findMemberFromRoot(fullName) match { // The name as given was not found, so we'll sift through every symbol in // the run looking for plausible matches. - case NoSymbol => phased(currentRun.symSource.keys map (sym => findNamedMember(fullName, sym)) filterNot (_ == NoSymbol) toList) + case NoSymbol => phased(currentRun.symSource.keys.map(findNamedMember(fullName, _)).filterNot(_ == NoSymbol).toList) // The name as given matched, so show only that. case sym => List(sym) } @@ -1750,17 +1790,11 @@ class Global(var currentSettings: Settings, reporter0: Reporter) object Global { def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter) - def apply(settings: Settings): Global = new Global(settings, reporter(settings)) - - private def reporter(settings: Settings): FilteringReporter = { - //val loader = ScalaClassLoader(getClass.getClassLoader) // apply does not make delegate - val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader - loader.create[FilteringReporter](settings.reporter.value, settings.errorFn)(settings) - } + def apply(settings: Settings): Global = new Global(settings, Reporter(settings)) private object InitPhase extends Phase(null) { def name = "" override def keepsTypeParams = false - def run() { throw new Error("InitPhase.run") } + def run(): Unit = { throw new Error("InitPhase.run") } } } diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala index ad1cee5d8ac8..6d507ea6e9d2 100644 --- a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index 0a10667687f5..8a51321016e0 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,22 +13,20 @@ package scala.tools package nsc -import scala.language.postfixOps - /** The main class for NSC, a compiler for the programming * language Scala. */ class MainClass extends Driver with EvalLoop { def resident(compiler: Global): Unit = loop { line => - val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError)) + val command = new CompilerCommand(line.split("\\s+").toList, new Settings(scalacError)) compiler.reporter.reset() new compiler.Run() compile command.files } override def newCompiler(): Global = Global(settings) - override def doCompile(compiler: Global) { - if (settings.resident) resident(compiler) + override def doCompile(compiler: Global): Unit = { + if (settings.resident.value) resident(compiler) else super.doCompile(compiler) } } diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index faeea4e99ace..d82a29d3cca8 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,6 @@ package scala.tools.nsc - /** The main class for NSC, a compiler for the programming * language Scala. */ @@ -30,8 +29,8 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.value = List("all") - theCompiler.settings.YhotStatistics.value = true + theCompiler.settings.Ystatistics.value = List("all") + theCompiler.settings.YhotStatisticsEnabled.value = true } process(args) val end = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 19da94f879dd..5a9317c7fc22 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,7 +22,7 @@ object MainTokenMetric { private var reporter: ConsoleReporter = _ - def tokenMetric(compiler: Global, fnames: List[String]) { + def tokenMetric(compiler: Global, fnames: List[String]): Unit = { import compiler.CompilationUnit import compiler.syntaxAnalyzer.UnitScanner import ast.parser.Tokens.EOF @@ -41,8 +41,8 @@ object MainTokenMetric { Console.println(totale.toString()+" total") } - def process(args: Array[String]) { - val settings = new Settings(sys.error) + def process(args: Array[String]): Unit = { + val settings = new Settings(msg => throw new RuntimeException(msg)) reporter = new ConsoleReporter(settings) val command = new CompilerCommand(args.toList, settings) try { @@ -56,9 +56,9 @@ object MainTokenMetric { } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { process(args) - sys.exit(if (reporter.hasErrors) 1 else 0) + System.exit(if (reporter.hasErrors) 1 else 0) } } diff --git a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala index e4ab36c35225..11ef67fe93af 100644 --- a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala +++ b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ import java.io.{Writer, PrintWriter} class NewLinePrintWriter(out: Writer, autoFlush: Boolean) extends PrintWriter(out, autoFlush) { - def this(out: Writer) = this(out, false) - override def println() { print("\n"); flush() } + def this(out: Writer) = this(out, autoFlush = false) + override def println(): Unit = { print("\n"); flush() } } diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index 3e1e4bb92291..0e09f1908e1a 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,34 +13,33 @@ package scala.tools.nsc import java.net.URL -import util.Exceptional.unwrap +import util.Exceptional.rootCause import scala.reflect.internal.util.ScalaClassLoader +import scala.util.control.NonFatal trait CommonRunner { /** Run a given object, specified by name, using a * specified classpath and argument list. * - * @throws ClassNotFoundException - * @throws NoSuchMethodException - * @throws InvocationTargetException + * @throws java.lang.ClassNotFoundException + * @throws java.lang.NoSuchMethodException + * @throws java.lang.reflect.InvocationTargetException */ - def run(urls: Seq[URL], objectName: String, arguments: Seq[String]) { + def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = { import scala.reflect.internal.util.RichClassLoader._ ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments) } - /** Catches exceptions enumerated by run (in the case of InvocationTargetException, - * unwrapping it) and returns it any thrown in Left(x). + /** Catches any non-fatal exception thrown by run (in the case of InvocationTargetException, + * unwrapping it) and returns it in an Option. */ - def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = { - try { run(urls, objectName, arguments) ; Right(true) } - catch { case e: Throwable => Left(unwrap(e)) } - } + def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Option[Throwable] = + try { run(urls, objectName, arguments) ; None } + catch { case NonFatal(e) => Some(rootCause(e)) } } /** An object that runs another object specified by name. * * @author Lex Spoon - * @version 1.1, 2007/7/13 */ -object ObjectRunner extends CommonRunner { } +object ObjectRunner extends CommonRunner diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala index e2aee496ef83..2820a106dfaf 100644 --- a/src/compiler/scala/tools/nsc/Parsing.scala +++ b/src/compiler/scala/tools/nsc/Parsing.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala package tools.nsc import scala.reflect.internal.Positions +import scala.reflect.internal.util.CodeAction /** Similar to Reporting: gather global functionality specific to parsing. */ @@ -35,8 +36,8 @@ trait Parsing { self : Positions with Reporting => } def incompleteHandled = incompleteHandler != null - def incompleteInputError(pos: Position, msg: String): Unit = + def incompleteInputError(pos: Position, msg: String, actions: List[CodeAction] = Nil): Unit = if (incompleteHandled) incompleteHandler(pos, msg) - else reporter.error(pos, msg) + else reporter.error(pos, msg, actions) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 9332866f1ad8..fa7420ce580f 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,304 +12,278 @@ package scala.tools.nsc -import scala.collection.mutable -import scala.language.postfixOps -import scala.tools.nsc.Reporting.WarningCategory +import java.util.concurrent.atomic.AtomicInteger +import scala.collection.mutable, mutable.ArrayDeque, mutable.ListBuffer +import scala.reflect.io.{File, Path} +import scala.util.chaining._ -/** Converts an unordered morass of components into an order that - * satisfies their mutual constraints. - * @see SIP 00002. You have read SIP 00002? +/** Sorts the global phasesSet according to SubComponent constraints. */ trait PhaseAssembly { - self: Global => + this: Global => - /** - * Aux data structure for solving the constraint system - * The dependency graph container with helper methods for node and edge creation + /** Called by Global#computePhaseDescriptors to compute phase order. + * + * The phases to assemble are provided by `phasesSet`, which must contain + * an `initial` phase. If no phase is `terminal`, then `global.terminal` is added. */ - private[nsc] class DependencyGraph { - - /** Simple edge with to and from refs */ - case class Edge(var frm: Node, var to: Node, var hard: Boolean) - - /** - * Simple node with name and object ref for the phase object, - * also sets of in and out going dependencies - */ - case class Node(name: String) { - val phasename = name - var phaseobj: Option[List[SubComponent]] = None - val after = new mutable.HashSet[Edge]() - var before = new mutable.HashSet[Edge]() - var visited = false - var level = 0 - - def allPhaseNames(): String = phaseobj match { - case None => phasename - case Some(lst) => lst.map(_.phaseName).reduceLeft(_+","+_) - } + def computePhaseAssembly(): List[SubComponent] = { + require(phasesSet.exists(phase => phase.initial || phase.phaseName == DependencyGraph.Parser), "Missing initial phase") + if (!phasesSet.exists(phase => phase.terminal || phase.phaseName == DependencyGraph.Terminal)) { + phasesSet.add(terminal) + reporter.warning(NoPosition, "Added default terminal phase") } + val warn = !settings.isScaladoc || settings.isDebug || settings.showPhases.value + val graph = DependencyGraph(phasesSet, warn) + for (n <- settings.genPhaseGraph.valueSetByUser; d <- settings.outputDirs.getSingleOutput if !d.isVirtual) + DependencyGraph.graphToDotFile(graph, Path(d.file) / File(s"$n.dot")) + graph.compilerPhaseList().tap(_ => graph.warnings.foreach(msg => reporter.warning(NoPosition, msg))) + } +} - val nodes = new mutable.HashMap[String,Node]() - val edges = new mutable.HashSet[Edge]() +/** A graph with the given number of vertices. + * + * Each vertex is labeled with its phase name. + */ +class DependencyGraph(order: Int, start: String, val components: Map[String, SubComponent]) { + import DependencyGraph.{FollowsNow, Weight} - /** Given a phase object, get the node for this phase object. If the - * node object does not exist, then create it. - */ - def getNodeByPhase(phs: SubComponent): Node = { - val node: Node = getNodeByPhase(phs.phaseName) - node.phaseobj match { - case None => - node.phaseobj = Some(List[SubComponent](phs)) - case _ => - } - node - } + //private final val debugging = false - /* Given the name of a phase object, get the node for that name. If the - * node object does not exits, then create it. - */ - def getNodeByPhase(name: String): Node = - nodes.getOrElseUpdate(name, Node(name)) + private var messages: List[String] = Nil + def warning(message: String): Unit = messages ::= message + def warnings: List[String] = messages.reverse.tap(_ => messages = Nil) - /* Connect the frm and to nodes with an edge and make it soft. - * Also add the edge object to the set of edges, and to the dependency - * list of the nodes - */ - def softConnectNodes(frm: Node, to: Node) { - val e = Edge(frm, to, false) - this.edges += e + /** For ith vertex, its outgoing edges. */ + private val adjacency: Array[List[Edge]] = Array.fill(order)(Nil) - frm.after += e - to.before += e - } + /** Directed edge. */ + private case class Edge(from: Int, to: Int, weight: Weight) - /* Connect the frm and to nodes with an edge and make it hard. - * Also add the edge object to the set of edges, and to the dependency - * list of the nodes - */ - def hardConnectNodes(frm: Node, to: Node) { - val e = Edge(frm, to, true) - this.edges += e + // phase names and their vertex index + private val nodeCount = new AtomicInteger + private val nodes = mutable.HashMap.empty[String, Int] // name to index + private val names = Array.ofDim[String](order) // index to name - frm.after += e - to.before += e + /** Add the edge between named phases, where `to` follows `from`. + */ + private def addEdge(from: String, to: String, weight: Weight): Unit = { + def getNode(name: String): Int = { + def installName(name: String, n: Int): Unit = + if (n >= names.length) throw new FatalError(names.mkString(s"Extra name $name; names [",",","]")) + else names(n) = name + nodes.getOrElseUpdate(name, nodeCount.getAndIncrement().tap(installName(name, _))) } + val v = getNode(from) + val w = getNode(to) + adjacency(v).find(_.to == w) match { + case None => + adjacency(v) ::= Edge(from = v, to = w, weight) + case Some(_) if weight == FollowsNow => // retain runsRightAfter if there is a competing constraint + adjacency(v) = Edge(from = v, to = w, weight) :: adjacency(v).filterNot(_.to == w) + case _ => + } + } - /* Given the entire graph, collect the phase objects at each level, where the phase - * names are sorted alphabetical at each level, into the compiler phase list - */ - def compilerPhaseList(): List[SubComponent] = - nodes.values.toList filter (_.level > 0) sortBy (x => (x.level, x.phasename)) flatMap (_.phaseobj) flatten - - /* Test if there are cycles in the graph, assign levels to the nodes - * and collapse hard links into nodes - */ - def collapseHardLinksAndLevels(node: Node, lvl: Int) { - if (node.visited) { - dump("phase-cycle") - throw new FatalError(s"Cycle in phase dependencies detected at ${node.phasename}, created phase-cycle.dot") + /** Find unreachable vertices. + * Input must be acyclic and a vertex can have only one outgoing FollowsNow edge. + */ + private def validate(warn: Boolean): Set[String] = if (order == 1) Set.empty else { + def checkFollowsNow(v: Int): Unit = + adjacency(v).foldLeft(-1) { (w, e) => + if (e.weight != FollowsNow) w + else if (w == -1) e.to + else throw new FatalError(s"Phases ${names(w)} and ${names(e.to)} both immediately follow ${names(v)}") } - - val initLevel = node.level - val levelUp = initLevel < lvl - if (levelUp) { - node.level = lvl + val seen = Array.ofDim[Boolean](order) + val onPath = Array.ofDim[Boolean](order) + val stack = mutable.Stack.empty[(Int, List[Edge])] // a vertex and list of edges remaining to follow + def walk(): Unit = { + nodes(start).tap { start => + stack.push(start -> adjacency(start)) } - if (initLevel != 0) { - if (!levelUp) { - // no need to revisit - node.visited = false - return + while (!stack.isEmpty) { + val (v, edges) = stack.pop() + if (!seen(v)) { + checkFollowsNow(v) + seen(v) = true } - } - var befores = node.before - def hasHardLinks() = befores.exists(_.hard) - while (hasHardLinks()) { - for (hl <- befores) { - if (hl.hard) { - node.phaseobj = Some(node.phaseobj.get ++ hl.frm.phaseobj.get) - node.before = hl.frm.before - nodes -= hl.frm.phasename - edges -= hl - for (edge <- node.before) edge.to = node - } + onPath(v) = true + edges match { + case Edge(_, to, _) :: edges => + if (onPath(to)) { + var path = v :: to :: Nil + while (path.head != to) + path ::= stack.pop()._1 + throw new FatalError(s"Phases form a cycle: ${path.map(names(_)).mkString(" -> ")}") + } + stack.push(v -> edges) + if (!seen(to)) + stack.push(to -> adjacency(to)) + case _ => onPath(v) = false } - befores = node.before } - node.visited = true + } + walk() + names.iterator.zipWithIndex.collect { case (n, i) if !seen(i) => + if (warn) warning(s"Dropping phase ${names(i)}, it is not reachable from $start") + n + }.toSet + } - for (edge <- node.before) { - collapseHardLinksAndLevels( edge.frm, lvl + 1) - } + def compilerPhaseList(): List[SubComponent] = if (order == 1) List(components(start)) else { + // distance from source to each vertex + val distance = Array.fill[Int](order)(Int.MinValue) - node.visited = false - } + // incoming edge terminating in each vertex for the current path + val edgeTo = Array.ofDim[Edge](order) - /* Find all edges in the given graph that are hard links. For each hard link we - * need to check that it's the only dependency. If not, then we will promote the - * other dependencies down - */ - def validateAndEnforceHardlinks() { - var hardlinks = edges.filter(_.hard) - for (hl <- hardlinks) { - if (hl.frm.after.size > 1) { - dump("phase-order") - throw new FatalError(s"Phase ${hl.frm.phasename} can't follow ${hl.to.phasename}, created phase-order.dot") - } - } + // whether vertex is on the queue + val enqueued = Array.ofDim[Boolean](order) - var rerun = true - while (rerun) { - rerun = false - hardlinks = edges.filter(_.hard) - for (hl <- hardlinks) { - val sanity = Nil ++ hl.to.before.filter(_.hard) - if (sanity.length == 0) { - throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!") - } else if (sanity.length > 1) { - dump("phase-order") - val following = (sanity map (_.frm.phasename)).sorted mkString "," - throw new FatalError(s"Multiple phases want to run right after ${sanity.head.to.phasename}; followers: $following; created phase-order.dot") - } else { + // vertices to process + val queue = mutable.Queue.empty[Int] - val promote = hl.to.before.filter(e => (!e.hard)) - hl.to.before.clear() - sanity foreach (edge => hl.to.before += edge) - for (edge <- promote) { - rerun = true - informProgress( - "promote the dependency of " + edge.frm.phasename + - ": " + edge.to.phasename + " => " + hl.frm.phasename) - edge.to = hl.frm - hl.frm.before += edge - } + def enqueue(v: Int): Unit = if (!enqueued(v)) queue.enqueue(v).tap(_ => enqueued(v) = true) + + def dequeue(): Int = queue.dequeue().tap(v => enqueued(v) = false) + + //def namedEdge(e: Edge): String = if (e == null) "[no edge]" else s"${names(e.from)} ${if (e.weight == FollowsNow) "=" else "-"}> ${names(e.to)}" + + /** Remove a vertex from the queue and check outgoing edges: + * if an edge improves (increases) the distance at the terminal, + * record that as the new incoming edge and enqueue that vertex + * to propagate updates. + */ + def relax(): Unit = { + nodes(start).tap { start => + distance(start) = 0 + enqueue(start) + } + while (!queue.isEmpty) { + val v = dequeue() + //if (debugging) println(s"deq ${names(v)}") + for (e <- adjacency(v)) { + val w = e.to + /* cannot happen as `runsRightAfter: Option[String]` is the only way to introduce a `FollowsNow` + val e2 = edgeTo(w) + if (e.weight == FollowsNow && e2 != null && e2.weight == FollowsNow && e.from != e2.from) + throw new FatalError(s"${names(w)} cannot follow right after both ${names(e.from)} and ${names(e2.from)}") + */ + if (distance(w) < distance(v) + e.weight) { + distance(w) = distance(v) + e.weight + edgeTo(w) = e + enqueue(w) + //if (debugging) println(s"update ${namedEdge(e)} dist = ${distance(w)}, enq ${names(w)}") } } } + //if (debugging) edgeTo.foreach(e => println(namedEdge(e))) } - - /** Remove all nodes in the given graph, that have no phase object - * Make sure to clean up all edges when removing the node object - * `Inform` with warnings, if an external phase has a - * dependency on something that is dropped. + /** Put the vertices in a linear order. + * + * `Follows` edges increase the level, `FollowsNow` don't. + * Partition by "level" or distance from start. + * Partition the level into "anchors" that follow a node in the previous level, and "followers" (nodes + * with a `FollowsNow` edge). + * Starting at the "ends", build the chains of `FollowsNow` nodes within the level. Each chain leads to an anchor. + * The anchors are sorted by name, then the chains are flattened. */ - def removeDanglingNodes() { - for (node <- nodes.values filter (_.phaseobj.isEmpty)) { - val msg = "dropping dependency on node with no phase object: "+node.phasename - informProgress(msg) - nodes -= node.phasename - - for (edge <- node.before) { - edges -= edge - edge.frm.after -= edge - if (edge.frm.phaseobj exists (lsc => !lsc.head.internal)) - runReporting.warning(NoPosition, msg, WarningCategory.Other, site = "") + def traverse(): List[SubComponent] = { + def componentOf(i: Int) = components(names(i)) + def sortComponents(c: SubComponent, d: SubComponent): Boolean = + // sort by name only, like the old implementation (pre scala/scala#10687) + /*c.internal && !d.internal ||*/ c.phaseName.compareTo(d.phaseName) < 0 + def sortVertex(i: Int, j: Int): Boolean = sortComponents(componentOf(i), componentOf(j)) + + distance.zipWithIndex.groupBy(_._1).toList.sortBy(_._1) + .flatMap { case (_, dis) => + val vs = dis.map { case (_, i) => i } + val (anchors, followers) = vs.partition(v => edgeTo(v) == null || edgeTo(v).weight != FollowsNow) + //if (debugging) println(s"d=$d, anchors=${anchors.toList.map(n => names(n))}, followers=${followers.toList.map(n => names(n))}") + if (followers.isEmpty) + anchors.toList.map(componentOf).sortWith(sortComponents) + else { + val froms = followers.map(v => edgeTo(v).from).toSet + val ends = followers.iterator.filterNot(froms).toList + val chains: Array[ArrayDeque[Int]] = anchors.map(ArrayDeque(_)) + def drill(v: Int, path: List[Int]): Unit = + edgeTo(v) match { + case e if e != null && e.weight == FollowsNow => drill(e.from, v :: path) + case _ => chains.find(_.apply(0) == v).foreach(deque => path.foreach(deque.append)) + } + ends.foreach(drill(_, Nil)) + chains.sortWith((p, q) => sortVertex(p(0), q(0))).toList.flatten.map(componentOf) } } } - - def dump(title: String = "phase-assembly") = graphToDotFile(this, s"$title.dot") - } - - - /** Called by Global#computePhaseDescriptors to compute phase order. */ - def computePhaseAssembly(): List[SubComponent] = { - - // Add all phases in the set to the graph - val graph = phasesSetToDepGraph(phasesSet) - - val dot = settings.genPhaseGraph.valueSetByUser - - // Output the phase dependency graph at this stage - def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot")) - - dump(1) - - // Remove nodes without phaseobj - graph.removeDanglingNodes() - - dump(2) - - // Validate and Enforce hardlinks / runsRightAfter and promote nodes down the tree - graph.validateAndEnforceHardlinks() - - dump(3) - - // test for cycles, assign levels and collapse hard links into nodes - graph.collapseHardLinksAndLevels(graph.getNodeByPhase("parser"), 1) - - dump(4) - - // assemble the compiler - graph.compilerPhaseList() + relax() + traverse() } - - /** Given the phases set, will build a dependency graph from the phases set - * Using the aux. method of the DependencyGraph to create nodes and edges. +} +object DependencyGraph { + + type Weight = Int + final val FollowsNow = 0 + final val Follows = 1 + + final val Parser = "parser" + final val Terminal = "terminal" + + /** Create a DependencyGraph from the given phases. The graph must be acyclic. + * + * A component must be declared as "initial". + * If no phase is "initial" but a phase is named "parser", it is taken as initial. + * If no phase is "terminal" but a phase is named "terminal", it is taken as terminal. + * Warnings are issued for invalid constraints (runsAfter / runsRightAfter / runsBefore) if `warn` is true. + * Components without a valid "runsAfter" or "runsRightAfter" are dropped with an "unreachable" warning. */ - private[nsc] def phasesSetToDepGraph(phsSet: Iterable[SubComponent]): DependencyGraph = { - val graph = new DependencyGraph() - - for (phs <- phsSet) { - - val fromnode = graph.getNodeByPhase(phs) - - phs.runsRightAfter match { - case None => - for (phsname <- phs.runsAfter) { - if (phsname != "terminal") { - val tonode = graph.getNodeByPhase(phsname) - graph.softConnectNodes(fromnode, tonode) - } else { - globalError("[phase assembly, after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]") - } - } - for (phsname <- phs.runsBefore) { - if (phsname != "parser") { - val tonode = graph.getNodeByPhase(phsname) - graph.softConnectNodes(tonode, fromnode) - } else { - globalError("[phase assembly, before dependency on parser phase not allowed: " + phsname + " => "+ fromnode.phasename + "]") - } - } - case Some(phsname) => - if (phsname != "terminal") { - val tonode = graph.getNodeByPhase(phsname) - graph.hardConnectNodes(fromnode, tonode) - } else { - globalError("[phase assembly, right after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]") - } + def apply(phases: Iterable[SubComponent], warn: Boolean = true): DependencyGraph = { + val start = phases.find(_.initial) + .orElse(phases.find(_.phaseName == Parser)) + .getOrElse(throw new AssertionError("Missing initial component")) + val end = phases.find(_.terminal) + .orElse(phases.find(_.phaseName == Terminal)) + .getOrElse(throw new AssertionError("Missing terminal component")) + val graph = new DependencyGraph(phases.size, start.phaseName, phases.map(p => p.phaseName -> p).toMap) + def phaseTypo(name: String) = + if (graph.components.contains(name)) "" + else graph.components.keysIterator.filter(util.EditDistance.levenshtein(name, _) < 3).toList match { + case Nil => "" + case close => s" - did you mean ${util.StringUtil.oxford(close, "or")}?" } + for (p <- phases) { + require(p.phaseName.nonEmpty, "Phase name must be non-empty.") + def checkConstraint(name: String, constraint: String): Boolean = + graph.components.contains(name).tap(ok => if (!ok && warn) graph.warning(s"No phase `$name` for ${p.phaseName}.$constraint${phaseTypo(name)}")) + for (after <- p.runsRightAfter if after.nonEmpty && checkConstraint(after, "runsRightAfter")) + graph.addEdge(after, p.phaseName, FollowsNow) + for (after <- p.runsAfter if after.nonEmpty && !p.runsRightAfter.contains(after) && checkConstraint(after, "runsAfter")) + graph.addEdge(after, p.phaseName, Follows) + for (before <- p.runsBefore if before.nonEmpty && checkConstraint(before, "runsBefore")) + graph.addEdge(p.phaseName, before, Follows) + // Add "runsBefore terminal" to phases without (or with invalid) runsBefore + if (p != end || p == end && p == start) + if (!p.runsBefore.exists(graph.components.contains)) + graph.addEdge(p.phaseName, end.phaseName, Follows) } - graph + val unreachable = graph.validate(warn) + if (unreachable.isEmpty) graph + else apply(phases.filterNot(p => unreachable(p.phaseName)), warn).tap(res => graph.warnings.foreach(res.warning)) } - /* This is a helper method, that given a dependency graph will generate a graphviz dot - * file showing its structure. - * Plug-in supplied phases are marked as green nodes and hard links are marked as blue edges. + /** Emit a graphviz dot file for the graph. + * Plug-in supplied phases are marked as green nodes and hard links are marked as blue edges. */ - private def graphToDotFile(graph: DependencyGraph, filename: String) { - val sbuf = new StringBuilder - val extnodes = new mutable.HashSet[graph.Node]() - val fatnodes = new mutable.HashSet[graph.Node]() - sbuf.append("digraph G {\n") - for (edge <- graph.edges) { - sbuf.append("\"" + edge.frm.allPhaseNames + "(" + edge.frm.level + ")" + "\"->\"" + edge.to.allPhaseNames + "(" + edge.to.level + ")" + "\"") - if (!edge.frm.phaseobj.get.head.internal) extnodes += edge.frm - edge.frm.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.frm ) - edge.to.phaseobj foreach (phobjs => if (phobjs.tail.nonEmpty) fatnodes += edge.to ) - val color = if (edge.hard) "#0000ff" else "#000000" - sbuf.append(s""" [color="$color"]\n""") - } - for (node <- extnodes) { - sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#00ff00\"]\n") - } - for (node <- fatnodes) { - sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n") - } - sbuf.append("}\n") - import reflect.io._ - for (d <- settings.outputDirs.getSingleOutput if !d.isVirtual) Path(d.file) / File(filename) writeAll sbuf.toString + def graphToDotFile(graph: DependencyGraph, file: File): Unit = { + def color(hex: String) = s""" [color="#$hex"]""" + val sb = ListBuffer.empty[String] + sb.addOne("digraph G {") + for (edges <- graph.adjacency; e <- edges) + sb.addOne(s"${graph.names(e.from)} -> ${graph.names(e.to)}${if (e.weight == FollowsNow) color("0000ff") else ""}") + for (n <- graph.names) + sb.addOne(s"${n}${if (graph.components(n).internal) "" else color("00ff00")}") + sb.addOne("}") + file.printlnAll(sb.toList: _*) } } diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index b566b4ae98c5..6d53127e849f 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,10 +12,11 @@ package scala.tools.nsc +import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} -import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter, collectionAsScalaIterableConverter} +import scala.jdk.CollectionConverters._ import scala.reflect.internal.pickling.ByteCodecs import scala.reflect.io.RootPath import scala.tools.asm.tree.ClassNode @@ -91,13 +92,13 @@ object PickleExtractor { if (input.visibleAnnotations != null) { input.visibleAnnotations.asScala.foreach { node => if (node.desc == "Lscala/reflect/ScalaSignature;") { - val Array("bytes", data: String) = node.values.toArray() - val bytes = data.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val Array("bytes", data: String) = node.values.toArray(): @unchecked + val bytes = data.getBytes(UTF_8) val len = ByteCodecs.decode(bytes) pickleData = bytes.take(len) } else if (node.desc == "Lscala/reflect/ScalaLongSignature;") { - val Array("bytes", data: java.util.Collection[String @unchecked]) = node.values.toArray() - val encoded = data.asScala.toArray flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val Array("bytes", data: java.util.Collection[String @unchecked]) = node.values.toArray(): @unchecked + val encoded = data.asScala.toArray.flatMap(_.getBytes(UTF_8)) val len = ByteCodecs.decode(encoded) pickleData = encoded.take(len) } diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 3fe5d7853cb8..2cad5a2eb15e 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc import java.io.File import java.lang.Thread.UncaughtExceptionHandler +import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.{Files, Path, Paths} import java.util.concurrent.ConcurrentHashMap import java.util.{Collections, Locale} @@ -21,10 +22,13 @@ import java.util.{Collections, Locale} import javax.tools.Diagnostic.Kind import javax.tools.{Diagnostic, DiagnosticListener, JavaFileObject, ToolProvider} -import scala.collection.JavaConverters._ -import scala.collection.{immutable, mutable, parallel} +import scala.annotation.nowarn +import scala.collection.{immutable, mutable} +import scala.collection.immutable.ArraySeq.unsafeWrapArray import scala.concurrent._ import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ +import scala.math.Ordering.Double.TotalOrdering import scala.reflect.internal.util.{BatchSourceFile, FakePos, NoPosition, Position} import scala.reflect.io.PlainNioFile import scala.tools.nsc.PipelineMain.{OutlineTypePipeline, Pipeline, Traditional} @@ -41,10 +45,10 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val newExtension = if (useJars) ".jar" else "" val root = file.getRoot // An empty component on Unix, just the drive letter on Windows - val validRootPathComponent = root.toString.replaceAllLiterally("/", "").replaceAllLiterally(":", "") + val validRootPathComponent = root.toString.replace("/", "").replace(":", "") val result = changeExtension(pickleCache.resolve(validRootPathComponent).resolve(root.relativize(file)).normalize(), newExtension) if (useJars) Files.createDirectories(result.getParent) - strippedAndExportedClassPath.put(file.toRealPath().normalize(), result) + strippedAndExportedClassPath.update(file.toRealPath().normalize(), result) result } @@ -63,7 +67,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe reporter.echo(NoPosition, msg) } private def reporterError(pos: Position, msg: String): Unit = synchronized { - reporter.echo(msg) + reporter.error(pos, msg) } private object handler extends UncaughtExceptionHandler { @@ -73,7 +77,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } } - implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) + implicit val executor: ExecutionContext = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) def changeExtension(p: Path, newExtension: String): Path = { val fileName = p.getFileName.toString val changedFileName = fileName.lastIndexOf('.') match { @@ -83,6 +87,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe p.getParent.resolve(changedFileName) } + @nowarn("cat=lint-inaccessible") def writeDotFile(logDir: Path, dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { val builder = new java.lang.StringBuilder() builder.append("digraph projects {\n") @@ -97,7 +102,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } builder.append("}\n") val path = logDir.resolve("projects.dot") - Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + Files.write(path, builder.toString.getBytes(UTF_8)) reporterEcho("Wrote project dependency graph to: " + path.toAbsolutePath) } @@ -116,7 +121,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val projects: List[Task] = argFiles.toList.map(commandFor) if (reporter.hasErrors) return false - val numProjects = projects.size val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { produces(p.outputDir) = p @@ -158,20 +162,21 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe def sequenceFailSlow[A](fs: Seq[Future[A]]): Future[Seq[A]] = { Future.traverse(fs)(_.transform(tr => Success(tr.toEither))).map { results => - results.collect { case Left(throwable) => throwable }.toList match { + val (failures, successes) = results.partitionMap(identity) + failures.toList match { case head :: rest => rest.foreach(head.addSuppressed(_)); throw head - case _ => results.collect { case Right(value) => value } + case _ => successes } } } def awaitDone(): Unit = { - val allFutures: immutable.Seq[Future[Any]] = projects.flatMap(_.futures) + val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) val numAllFutures = allFutures.size val awaitAllFutures: Future[_] = sequenceFailSlow(allFutures) var lastNumCompleted = allFutures.count(_.isCompleted) while (true) try { - Await.result(awaitAllFutures, Duration(60, "s")) + Await.ready(awaitAllFutures, Duration(60, "s")) timer.stop() val numCompleted = allFutures.count(_.isCompleted) reporterEcho(s"PROGRESS: $numCompleted / $numAllFutures") @@ -194,7 +199,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } } } - strategy match { case OutlineTypePipeline => projects.foreach { p: Task => @@ -321,6 +325,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe trace.append("""{"traceEvents": [""") val sb = new mutable.StringBuilder(trace) + @annotation.nowarn("cat=deprecation") def durationEvent(name: String, cat: String, t: Timer): String = { s"""{"name": "$name", "cat": "$cat", "ph": "X", "ts": ${(t.startMicros).toLong}, "dur": ${(t.durationMicros).toLong}, "pid": 0, "tid": ${t.thread.getId}}""" } @@ -345,7 +350,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") val traceFile = logDir.resolve(s"build-${label}.trace") - Files.write(traceFile, trace.toString.getBytes()) + Files.write(traceFile, trace.toString.getBytes(UTF_8)) reporterEcho("Chrome trace written to " + traceFile.toAbsolutePath) } @@ -644,7 +649,7 @@ object PipelineMain { def defaultSettings: PipelineSettings = { val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get - val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) + val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", java.lang.Runtime.getRuntime.availableProcessors()) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") @@ -661,7 +666,7 @@ object PipelineMain { case Array(path) if Files.isDirectory(Paths.get(path)) => Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList case _ => - args.map(Paths.get(_)) + unsafeWrapArray(args.map(Paths.get(_))) } val main = new PipelineMainClass(argFiles, defaultSettings) val result = main.process() @@ -678,9 +683,9 @@ object PipelineMain { //// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList // val argsFiles = List(Paths.get("/Users/jz/code/guardian-frontend/common/target/compile.args")) // val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") -// for (_ <- 1 to 20; n <- List(parallel.availableProcessors); strat <- List(OutlineTypePipeline)) { +// for (_ <- 1 to 20; n <- List(parallel.availableProcessors); start <- List(OutlineTypePipeline)) { // i += 1 -// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars) +// val main = new PipelineMainClass(start + "-" + i, n, start, argsFiles, useJars) // println(s"====== ITERATION $i=======") // val result = main.process() // if (!result) diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index a9f345f0d180..b286c4764a57 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,15 +30,12 @@ object Properties extends scala.util.PropertiesTrait { """Welcome to Scala %1$#s (%3$s, Java %2$s). |Type in expressions for evaluation. Or try :help.""".stripMargin ) + def shellBannerString = scalaPropOrElse("shell.banner", shellWelcomeString) // message to display at EOF (which by default ends with // a newline so as not to break the user's terminal) def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator") - // derived values - @deprecated("Emacs support is fully handled by JLine, this will be removed in next release", "2.12.6") - def isEmacsShell = propOrEmpty("env.emacs") != "" - // Where we keep fsc's state (ports/redirection) lazy val scalacDir = (Path(Properties.userHome) / ".scalac").createDirectory(force = false) } diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index ab7a3b1b1955..0af9f21a4608 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,22 +14,26 @@ package scala package tools package nsc +import java.io.IOException +import java.nio.charset.Charset +import java.nio.file.{Files, Path, Paths} import java.util.regex.PatternSyntaxException +import scala.annotation.{nowarn, tailrec} import scala.collection.mutable import scala.reflect.internal import scala.reflect.internal.util.StringOps.countElementsAsString -import scala.reflect.internal.util.{NoSourceFile, Position, SourceFile} +import scala.reflect.internal.util.{CodeAction, NoSourceFile, Position, ReplBatchSourceFile, SourceFile, TextEdit} import scala.tools.nsc.Reporting.Version.{NonParseableVersion, ParseableVersion} import scala.tools.nsc.Reporting._ -import scala.util.matching.{ Regex, UnanchoredRegex } +import scala.util.matching.Regex /** Provides delegates to the reporter doing the actual work. - * PerRunReporting implements per-Run stateful info tracking and reporting + * PerRunReporting implements per-Run stateful info tracking and reporting */ -trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions with CompilationUnits with scala.reflect.internal.Symbols => +trait Reporting extends internal.Reporting { self: ast.Positions with CompilationUnits with internal.Symbols => def settings: Settings - @deprecated("use `globalError` instead") + @deprecated("use `globalError` instead", since = "2.13.4") def error(msg: String): Unit = globalError(msg) // a new instance of this class is created for every Run (access the current instance via `runReporting`) @@ -48,50 +52,143 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w else "" globalError(s"Failed to parse `-Wconf` configuration: ${settings.Wconf.value}\n${msgs.mkString("\n")}$multiHelp") WConf(Nil) - case Right(c) => c + case Right(conf) => + // default is "cat=deprecation:ws,cat=feature:ws,cat=optimizer:ws" + // under -deprecation, "cat=deprecation:w", but under -deprecation:false or -nowarn, "cat=deprecation:s" + // similarly for -feature, -Wopt (?) + val needsDefaultAdjustment = settings.deprecation.isSetByUser + val adjusted = + if (needsDefaultAdjustment || settings.nowarn.value) { + val (user, defaults) = conf.filters.splitAt(conf.filters.length - settings.WconfDefault.length) + val Deprecation = MessageFilter.Category(WarningCategory.Deprecation) + val action = if (settings.deprecation.value) Action.Warning else Action.Silent + val fixed = defaults.map { + case (cat @ Deprecation :: Nil, Action.WarningSummary) => cat -> action + case other => other + } + conf.copy(filters = user ::: fixed) + } + else conf + // configure any:s for -nowarn or cat=scala3-migration:e for -Xsource:3 + def Migration = MessageFilter.Category(WarningCategory.Scala3Migration) + if (settings.nowarn.value) + adjusted.copy(filters = adjusted.filters :+ (MessageFilter.Any :: Nil, Action.Silent)) + else if (settings.isScala3: @nowarn) + adjusted.copy(filters = adjusted.filters :+ (Migration :: Nil, Action.Error)) + else adjusted + } + + private lazy val quickfixFilters = { + if (settings.quickfix.isSetByUser && settings.quickfix.value.isEmpty) { + globalError(s"Missing message filter for `-quickfix`; see `-quickfix:help` or use `-quickfix:any` to apply all available quick fixes.") + Nil + } else if (settings.quickFixSilent) { + Nil + } else { + val parsed = settings.quickfix.value.map(WConf.parseFilter(_, rootDirPrefix)) + val msgs = parsed.collect { case Left(msg) => msg } + if (msgs.nonEmpty) { + globalError(s"Failed to parse `-quickfix` filters: ${settings.quickfix.value.mkString(",")}\n${msgs.mkString("\n")}") + Nil + } else parsed.collect { case Right(f) => f } + } } + private val skipRewriteAction = Set(Action.WarningSummary, Action.InfoSummary, Action.Silent) + + private def registerTextEdit(m: Message): Boolean = + m.actions.exists(_.edits.nonEmpty) && + quickfixFilters.exists(_.matches(m)) && { + m.actions.foreach(action => textEdits.addAll(action.edits)) + true + } + + private def registerErrorTextEdit(pos: Position, msg: String, actions: List[CodeAction]): Boolean = + actions.exists(_.edits.nonEmpty) && + quickfixFilters.exists { + case MessageFilter.Any => true + case mp: MessageFilter.MessagePattern => mp.check(msg) + case sp: MessageFilter.SourcePattern => sp.check(pos) + case _ => false + } && { + actions.foreach(action => textEdits.addAll(action.edits)) + true + } + private val summarizedWarnings: mutable.Map[WarningCategory, mutable.LinkedHashMap[Position, Message]] = mutable.HashMap.empty private val summarizedInfos: mutable.Map[WarningCategory, mutable.LinkedHashMap[Position, Message]] = mutable.HashMap.empty + /** + * The REPL creates two SourceFile instances for each line: + * - `requestFromLine` -> `parse` + * - Class Request has field `unit` initialized with a new SourceFile - note that the content is different (`paddedLine`) + * + * ReplBatchSourceFile has a reference to the first source file and we make sure to consistently use this + * one for the warning suspension / suppression hash maps. + */ + private def repSrc(s: SourceFile) = s match { + case r: ReplBatchSourceFile => r.parserSource + case _ => s + } + private val suppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty private val suppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty private val suspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Message]] = mutable.LinkedHashMap.empty - private def isSuppressed(warning: Message): Boolean = - suppressions.getOrElse(warning.pos.source, Nil).find(_.matches(warning)) match { - case Some(s) => s.markUsed(); true - case _ => false + private val textEdits: mutable.Set[TextEdit] = mutable.Set.empty + + // Used in REPL. The old run is used for parsing. Don't discard its suspended warnings. + def initFrom(old: PerRunReporting): Unit = { + suspendedMessages ++= old.suspendedMessages + } + + def clearSuspendedMessages(): Unit = { + suspendedMessages.clear() + } + + private def nowarnAction(warning: Message): Action = + suppressions.getOrElse(repSrc(warning.pos.source), Nil).find(_.matches(warning)) match { + case Some(s) => + s.markUsed() + if (s.verbose) Action.WarningVerbose else Action.Silent + case _ => + Action.Warning } - def clearSuppressionsComplete(sourceFile: SourceFile): Unit = suppressionsComplete -= sourceFile + def clearSuppressionsComplete(sourceFile: SourceFile): Unit = suppressionsComplete -= repSrc(sourceFile) def addSuppression(sup: Suppression): Unit = { val source = sup.annotPos.source - suppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup + suppressions.getOrElseUpdate(repSrc(source), mutable.ListBuffer.empty) += sup } def suppressionExists(pos: Position): Boolean = - suppressions.getOrElse(pos.source, Nil).exists(_.annotPos.point == pos.point) - - def warnUnusedSuppressions(): Unit = { - // if we stop before typer completes (errors in parser, Ystop), report all suspended messages - suspendedMessages.valuesIterator.foreach(_.foreach(issueWarning)) - if (settings.warnUnusedNowarn && !settings.isScaladoc) { // scaladoc doesn't run all phases, so not all warnings are emitted - val sources = suppressions.keysIterator.toList - for (source <- sources; sups <- suppressions.remove(source); sup <- sups.reverse) { - if (!sup.used) - issueWarning(Message.Plain(sup.annotPos, "@nowarn annotation does not suppress any warnings", WarningCategory.UnusedNowarn, "")) - } - } + suppressions.getOrElse(repSrc(pos.source), Nil).exists(_.annotPos.point == pos.point) + + def runFinished(hasErrors: Boolean): Unit = { + // report suspended messages (in case the run finished before typer) + suspendedMessages.valuesIterator.foreach(_.foreach(issueWarning(_))) + + // report unused nowarns only if all all phases are done. scaladoc doesn't run all phases. + if (!hasErrors && settings.warnUnusedNowarn && !settings.isScaladoc) + for { + source <- suppressions.keysIterator.toList + sups <- suppressions.remove(source) + sup <- sups.reverse + } if (!sup.used && !sup.synthetic) issueWarning(Message.Plain(sup.annotPos, "@nowarn annotation does not suppress any warnings", WarningCategory.UnusedNowarn, "", Nil)) + + // apply quick fixes + quickfix(textEdits) + textEdits.clear() } def reportSuspendedMessages(unit: CompilationUnit): Unit = { + val src = repSrc(unit.source) // sort suppressions. they are not added in any particular order because of lazy type completion - for (sups <- suppressions.get(unit.source)) - suppressions(unit.source) = sups.sortBy(sup => 0 - sup.start) - suppressionsComplete += unit.source - suspendedMessages.remove(unit.source).foreach(_.foreach(issueIfNotSuppressed)) + for (sups <- suppressions.get(src)) + suppressions(src) = sups.sortBy(sup => 0 - sup.start) + suppressionsComplete += src + suspendedMessages.remove(src).foreach(_.foreach(issueIfNotSuppressed)) } private def summaryMap(action: Action, category: WarningCategory) = { @@ -102,33 +199,56 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w sm.getOrElseUpdate(category, mutable.LinkedHashMap.empty) } - private def issueWarning(warning: Message): Unit = { - def verbose = warning match { - case Message.Deprecation(_, msg, site, origin, version) => s"[${warning.category.name} @ $site | origin=$origin | version=${version.filterString}] $msg" - case Message.Plain(_, msg, category, site) => s"[${category.name} @ $site] $msg" + private def issueWarning(warning: Message, verbose: Boolean = false): Unit = { + val action = if (verbose) Action.WarningVerbose else wconf.action(warning) + + val quickfixed = { + if (!skipRewriteAction(action) && registerTextEdit(warning)) s"[rewritten by -quickfix] ${warning.msg}" + else if (warning.actions.exists(_.edits.nonEmpty) && !settings.quickFixSilent) s"${warning.msg} [quickfixable]" + else warning.msg + } + + def helpMsg(level: String, isError: Boolean = false) = { + def ifNonEmpty(kind: String, filter: String) = if (filter.nonEmpty) s", $kind=$filter" else "" + def maybeSite = ifNonEmpty("site", warning.site) + def maybeOrigin = warning match { + case Message.Deprecation(_, _, _, origin, version, _) => + ifNonEmpty("origin", origin) + ifNonEmpty("version", version.filterString) + case _ => "" + } + def filterHelp = s"msg=, cat=${warning.category.name}${maybeSite}${maybeOrigin}" + def scala3migration = + if (isError && warning.category == WarningCategory.Scala3Migration) + "\nScala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress." + else "" + s"$quickfixed$scala3migration\nApplicable -Wconf / @nowarn filters for this $level: $filterHelp" } - wconf.action(warning) match { - case Action.Error => reporter.error(warning.pos, warning.msg) - case Action.Warning => reporter.warning(warning.pos, warning.msg) - case Action.WarningVerbose => reporter.warning(warning.pos, verbose) - case Action.Info => reporter.echo(warning.pos, warning.msg) - case Action.InfoVerbose => reporter.echo(warning.pos, verbose) + + action match { + case Action.Error => reporter.error(warning.pos, helpMsg("fatal warning", isError = true), warning.actions) + case Action.Warning => reporter.warning(warning.pos, quickfixed, warning.actions) + case Action.WarningVerbose => reporter.warning(warning.pos, helpMsg("warning"), warning.actions) + case Action.Info => reporter.echo(warning.pos, quickfixed, warning.actions) + case Action.InfoVerbose => reporter.echo(warning.pos, helpMsg("message"), warning.actions) case a @ (Action.WarningSummary | Action.InfoSummary) => val m = summaryMap(a, warning.category.summaryCategory) - if (!m.contains(warning.pos)) m.put(warning.pos, warning) + if (!m.contains(warning.pos)) m.addOne((warning.pos, warning)) case Action.Silent => } } def shouldSuspend(warning: Message): Boolean = - warning.pos.source != NoSourceFile && !suppressionsComplete(warning.pos.source) + warning.pos.source != NoSourceFile && !suppressionsComplete(repSrc(warning.pos.source)) def issueIfNotSuppressed(warning: Message): Unit = if (shouldSuspend(warning)) - suspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning - else { - if (!isSuppressed(warning)) + suspendedMessages.getOrElseUpdate(repSrc(warning.pos.source), mutable.LinkedHashSet.empty) += warning + else nowarnAction(warning) match { + case Action.Warning => issueWarning(warning) + case Action.WarningVerbose => + issueWarning(warning, verbose = true) + case _ => } private def summarize(action: Action, category: WarningCategory): Unit = { @@ -181,28 +301,37 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w } private def siteName(sym: Symbol) = if (sym.exists) { + def skipAnon(s: Symbol, res: Symbol): Symbol = + if (s.isRootSymbol || s == NoSymbol) res + else if (s.isAnonymousClass || s.isLocalDummy) skipAnon(s.effectiveOwner, s.effectiveOwner) + else skipAnon(s.effectiveOwner, res) // Similar to fullNameString, but don't jump to enclosing class. Keep full chain of symbols. def impl(s: Symbol): String = if (s.isRootSymbol || s == NoSymbol) s.nameString else if (s.owner.isEffectiveRoot) s.nameString else impl(s.effectiveOwner) + "." + s.nameString - impl(sym) + impl(skipAnon(sym, sym)) } else "" - def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = - issueIfNotSuppressed(Message.Deprecation(pos, msg, site, origin, Version.fromString(since))) + override def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String, actions: List[CodeAction] = Nil): Unit = + issueIfNotSuppressed(Message.Deprecation(pos, msg, site, origin, Version.fromString(since), actions)) + // multiple overloads cannot have default args + def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String, actions: List[CodeAction]): Unit = + deprecationWarning(pos, msg, since, siteName(site), siteName(origin), actions) def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Unit = - deprecationWarning(pos, msg, since, siteName(site), siteName(origin)) + deprecationWarning(pos, msg, since, siteName(site), siteName(origin), actions = Nil) def deprecationWarning(pos: Position, origin: Symbol, site: Symbol): Unit = { val version = origin.deprecationVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" - val message = origin.deprecationMessage.map(": " + _).getOrElse("") + val message = origin.deprecationMessage.filter(!_.isEmpty).map(": " + _).getOrElse("") deprecationWarning(pos, origin, site, s"$origin${origin.locationString} is deprecated$since$message", version) } private[this] var reportedFeature = Set[Symbol]() + protected def featureReported(featureTrait: Symbol): Unit = reportedFeature += featureTrait + // we don't have access to runDefinitions here, so mapping from strings instead of feature symbols private val featureCategory: Map[String, WarningCategory.Feature] = { import WarningCategory._ @@ -219,39 +348,54 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Unit = { val req = if (required) "needs to" else "should" - val fqname = "scala.language." + featureName - val explain = ( - if (reportedFeature contains featureTrait) "" else - s""" - |---- - |This can be achieved by adding the import clause 'import $fqname' - |or by setting the compiler option -language:$featureName. - |See the Scaladoc for value $fqname for a discussion - |why the feature $req be explicitly enabled.""".stripMargin - ) + val fqname = s"scala.language.$featureName" + val explain = + if (reportedFeature contains featureTrait) "" + else sm"""| + |This can be achieved by adding the import clause 'import $fqname' + |or by setting the compiler option -language:$featureName. + |See the Scaladoc for value $fqname for a discussion + |why the feature $req be explicitly enabled.""" reportedFeature += featureTrait - val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct) - // maybe pos.source.file.file.getParentFile.getName or Path(source.file.file).parent.name - def parentFileName(source: internal.util.SourceFile) = - Option(java.nio.file.Paths.get(source.path).getParent).map(_.getFileName.toString) - // don't error on postfix in pre-0.13.18 xsbt/Compat.scala - def isSbtCompat = (featureName == "postfixOps" - && pos.source.file.name == "Compat.scala" - && parentFileName(pos.source).getOrElse("") == "xsbt" - && Thread.currentThread.getStackTrace.exists(_.getClassName.startsWith("sbt.")) - ) - if (required && !isSbtCompat) reporter.error(pos, msg) - else warning(pos, msg, featureCategory(featureTrait.nameString), site) - } - - // Used in the optimizer where we don't have no symbols, the site string is created from the class internal name and method name. + val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain".replace("#", construct) + // on postfix error, include interesting infix warning + def isXfix = featureName == "postfixOps" && suspendedMessages.get(repSrc(pos.source)).map(_.exists(w => pos.includes(w.pos))).getOrElse(false) + if (required) { + val amended = if (isXfix) s"$msg\n${suspendedMessages(repSrc(pos.source)).filter(pos includes _.pos).map(_.msg).mkString("\n")}" else msg + reporter.error(pos, amended) + } else warning(pos, msg, featureCategory(featureTrait.nameString), site) + } + + // Used in the optimizer where we don't have symbols, the site string is created from the class internal name and method name. + def warning(pos: Position, msg: String, category: WarningCategory, site: String, actions: List[CodeAction]): Unit = + issueIfNotSuppressed(Message.Plain(pos, msg, category, site, actions)) def warning(pos: Position, msg: String, category: WarningCategory, site: String): Unit = - issueIfNotSuppressed(Message.Plain(pos, msg, category, site)) + warning(pos, msg, category, site, actions = Nil) // Preferred over the overload above whenever a site symbol is available - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - warning(pos, msg, category, siteName(site)) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, actions: List[CodeAction] = Nil): Unit = + warning(pos, msg, category, siteName(site), actions) + + // Provide an origin for the warning. + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, origin: String, actions: List[CodeAction]): Unit = + issueIfNotSuppressed(Message.Origin(pos, msg, category, siteName(site), origin, actions)) + // convenience overload for source compatibility + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, origin: String): Unit = + warning(pos, msg, category, site, origin, actions = Nil) + + def codeAction(title: String, pos: Position, newText: String, desc: String, expected: Option[(String, CompilationUnit)] = None) = + CodeAction(title, pos, newText, desc, expected.forall(e => e._1 == e._2.source.sourceAt(pos))) + + // Remember CodeActions that match `-quickfix` and report the error through the reporter + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit = { + val quickfixed = { + if (registerErrorTextEdit(pos, msg, actions)) s"[rewritten by -quickfix] $msg" + else if (actions.exists(_.edits.nonEmpty) && !settings.quickFixSilent) s"$msg [quickfixable]" + else msg + } + reporter.error(pos, quickfixed, actions) + } // used by Global.deprecationWarnings, which is used by sbt def deprecationWarnings: List[(Position, String)] = summaryMap(Action.WarningSummary, WarningCategory.Deprecation).toList.map(p => (p._1, p._2.msg)) @@ -269,20 +413,112 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w var seenMacroExpansionsFallingBack = false // i.e., summarize warnings - def summarizeErrors(): Unit = if (!reporter.hasErrors && !settings.nowarn) { - for (c <- summarizedWarnings.keys.toList.sortBy(_.name)) + def summarizeErrors(): Unit = if (!reporter.hasErrors) { + val warnOK = !settings.nowarn.value + if (warnOK) for (c <- summarizedWarnings.keys.toList.sortBy(_.name)) summarize(Action.WarningSummary, c) for (c <- summarizedInfos.keys.toList.sortBy(_.name)) summarize(Action.InfoSummary, c) - if (seenMacroExpansionsFallingBack) + if (warnOK && seenMacroExpansionsFallingBack) warning(NoPosition, "some macros could not be expanded and code fell back to overridden methods;"+ "\nrecompiling with generated classfiles on the classpath might help.", WarningCategory.Other, site = "") // todo: migrationWarnings - if (settings.fatalWarnings && reporter.hasWarnings) - reporter.error(NoPosition, "No warnings can be incurred under -Xfatal-warnings.") + if (warnOK && settings.fatalWarnings.value && reporter.hasWarnings) + reporter.error(NoPosition, "No warnings can be incurred under -Werror.") + } + + private object quickfix { + + private def nofix(msg: String): Unit = nofixAt(NoPosition, msg) + private def nofixAt(pos: Position, msg: String): Unit = + issueWarning(Message.Plain(pos, msg, WarningCategory.Other, site = "", actions = Nil)) + + /** Source code at a position. Either a line with caret (offset), else the code at the range position. */ + def codeOf(pos: Position, source: SourceFile): String = + if (pos.start < pos.end) new String(source.content.slice(pos.start, pos.end)) + else { + val line = source.offsetToLine(pos.point) + val code = source.lines(line).next() + val caret = " " * (pos.point - source.lineToOffset(line)) + "^" + s"$code\n$caret" + } + + def checkNoOverlap(patches: List[TextEdit], source: SourceFile): Boolean = { + var ok = true + for (List(p1, p2) <- patches.sliding(2) if p1.position.end > p2.position.start) { + ok = false + val msg = + s"""overlapping quick fixes in ${source.file.file.getAbsolutePath}: + | + |add `${p1.newText}` at + |${codeOf(p1.position, source)} + | + |add `${p2.newText}` at + |${codeOf(p2.position, source)}""".stripMargin.trim + nofixAt(p1.position, msg) + } + ok + } + + def underlyingFile(source: SourceFile): Option[Path] = { + val fileClass = source.file.getClass.getName + val p = if (fileClass.endsWith("xsbt.ZincVirtualFile")) { + import scala.language.reflectiveCalls + val path = source.file.asInstanceOf[ {def underlying(): {def id(): String}}].underlying().id() + Some(Paths.get(path)) + } else + Option(source.file.file).map(_.toPath) + val r = p.filter(Files.exists(_)) + if (r.isEmpty) + nofix(s"Failed to apply quick fixes, file does not exist: ${source.file}") + r + } + + val encoding = Charset.forName(settings.encoding.value) + + def insertEdits(sourceChars: Array[Char], edits: List[TextEdit], file: Path): Array[Byte] = { + val patchedChars = new Array[Char](sourceChars.length + edits.iterator.map(_.delta).sum) + @tailrec + def loop(edits: List[TextEdit], inIdx: Int, outIdx: Int): Unit = { + def copy(upTo: Int): Int = { + val untouched = upTo - inIdx + System.arraycopy(sourceChars, inIdx, patchedChars, outIdx, untouched) + outIdx + untouched + } + edits match { + case e :: edits => + val outNew = copy(e.position.start) + e.newText.copyToArray(patchedChars, outNew) + loop(edits, e.position.end, outNew + e.newText.length) + case _ => + val outNew = copy(sourceChars.length) + if (outNew != patchedChars.length) + nofix(s"Unexpected content length when applying quick fixes; verify the changes to ${file.toFile.getAbsolutePath}") + } + } + loop(edits, 0, 0) + new String(patchedChars).getBytes(encoding) + } + + def apply(edits: mutable.Set[TextEdit]): Unit = + for ((source, edits) <- edits.groupBy(_.position.source).view.mapValues(_.toList.sortBy(_.position.start))) + if (checkNoOverlap(edits, source)) + underlyingFile(source).foreach { file => + val sourceChars = new String(Files.readAllBytes(file), encoding).toCharArray + val lastPos = edits.last.position + val trimmed = // SourceFile.content can add a NL, so trim any edit position past EOF + if (lastPos.start >= sourceChars.length) edits.filterNot(_.position.start >= sourceChars.length) + else if (lastPos.end > sourceChars.length) edits.init :+ edits.last.copy(position = lastPos.withEnd(sourceChars.length)) + else edits + try Files.write(file, insertEdits(sourceChars, trimmed, file)) + catch { + case e: IOException => + nofix(s"Failed to apply quick fixes to ${file.toFile.getAbsolutePath}\n${e.getMessage}") + } + } } } } @@ -293,109 +529,161 @@ object Reporting { def msg: String def category: WarningCategory def site: String // sym.FullName of the location where the warning is positioned, may be empty + def actions: List[CodeAction] } object Message { - final case class Plain(pos: Position, msg: String, category: WarningCategory, site: String) extends Message + // an ordinary Message has a `category` for filtering and the `site` where it was issued + final case class Plain(pos: Position, msg: String, category: WarningCategory, site: String, actions: List[CodeAction]) extends Message + + // a Plain message with an `origin` which should not be empty. For example, the origin of an unused import is the fully-qualified selection + final case class Origin(pos: Position, msg: String, category: WarningCategory, site: String, origin: String, actions: List[CodeAction]) extends Message // `site` and `origin` may be empty - final case class Deprecation(pos: Position, msg: String, site: String, origin: String, since: Version) extends Message { + final case class Deprecation(pos: Position, msg: String, site: String, origin: String, since: Version, actions: List[CodeAction]) extends Message { def category: WarningCategory = WarningCategory.Deprecation } } - sealed trait WarningCategory { - lazy val name: String = { - val objectName = this.getClass.getName.split('$').last - WarningCategory.insertDash.replaceAllIn(objectName, "-") - .stripPrefix("-") - .stripSuffix("-") - .toLowerCase - } - + sealed class WarningCategory { def includes(o: WarningCategory): Boolean = this eq o def summaryCategory: WarningCategory = this + lazy val name: String = WarningCategory.nameOf(this) + override def toString = name } object WarningCategory { - private val insertDash = "(?=[A-Z][a-z])".r - - var all: mutable.Map[String, WarningCategory] = mutable.Map.empty - private def add(c: WarningCategory): Unit = all += ((c.name, c)) - - object Deprecation extends WarningCategory; add(Deprecation) - - object Unchecked extends WarningCategory; add(Unchecked) - - object Optimizer extends WarningCategory; add(Optimizer) - - object Scaladoc extends WarningCategory; add(Scaladoc) - - object JavaSource extends WarningCategory; add(JavaSource) - - sealed trait Other extends WarningCategory { override def summaryCategory: WarningCategory = Other } - object Other extends Other { override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Other] }; add(Other) - object OtherShadowing extends Other; add(OtherShadowing) - object OtherPureStatement extends Other; add(OtherPureStatement) - object OtherMigration extends Other; add(OtherMigration) - object OtherMatchAnalysis extends WarningCategory; add(OtherMatchAnalysis) - object OtherDebug extends WarningCategory; add(OtherDebug) - object OtherNonCooperativeEquals extends Other; add(OtherNonCooperativeEquals) - - sealed trait WFlag extends WarningCategory { override def summaryCategory: WarningCategory = WFlag } - object WFlag extends WFlag { override def includes(o: WarningCategory): Boolean = o.isInstanceOf[WFlag] }; add(WFlag) - object WFlagDeadCode extends WFlag; add(WFlagDeadCode) - object WFlagExtraImplicit extends WFlag; add(WFlagExtraImplicit) - object WFlagNumericWiden extends WFlag; add(WFlagNumericWiden) - object WFlagSelfImplicit extends WFlag; add(WFlagSelfImplicit) - object WFlagValueDiscard extends WFlag; add(WFlagValueDiscard) - - sealed trait Unused extends WarningCategory { override def summaryCategory: WarningCategory = Unused } - object Unused extends Unused { override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Unused] }; add(Unused) - object UnusedImports extends Unused; add(UnusedImports) - object UnusedPatVars extends Unused; add(UnusedPatVars) - object UnusedPrivates extends Unused; add(UnusedPrivates) - object UnusedLocals extends Unused; add(UnusedLocals) - object UnusedParams extends Unused; add(UnusedParams) - object UnusedNowarn extends Unused; add(UnusedNowarn) - - sealed trait Lint extends WarningCategory { override def summaryCategory: WarningCategory = Lint } - object Lint extends Lint { override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Lint] }; add(Lint) - object LintAdaptedArgs extends Lint; add(LintAdaptedArgs) - object LintNullaryUnit extends Lint; add(LintNullaryUnit) - object LintInaccessible extends Lint; add(LintInaccessible) - object LintNullaryOverride extends Lint; add(LintNullaryOverride) - object LintInferAny extends Lint; add(LintInferAny) - object LintMissingInterpolator extends Lint; add(LintMissingInterpolator) - object LintDocDetached extends Lint; add(LintDocDetached) - object LintPrivateShadow extends Lint; add(LintPrivateShadow) - object LintTypeParameterShadow extends Lint; add(LintTypeParameterShadow) - object LintPolyImplicitOverload extends Lint; add(LintPolyImplicitOverload) - object LintOptionImplicit extends Lint; add(LintOptionImplicit) - object LintDelayedinitSelect extends Lint; add(LintDelayedinitSelect) - object LintPackageObjectClasses extends Lint; add(LintPackageObjectClasses) - object LintStarsAlign extends Lint; add(LintStarsAlign) - object LintConstant extends Lint; add(LintConstant) - object LintNonlocalReturn extends Lint; add(LintNonlocalReturn) - object LintImplicitNotFound extends Lint; add(LintImplicitNotFound) - object LintSerial extends Lint; add(LintSerial) - object LintEtaZero extends Lint; add(LintEtaZero) - object LintEtaSam extends Lint; add(LintEtaSam) - object LintDeprecation extends Lint; add(LintDeprecation) - object LintIntDivToFloat extends Lint; add(LintIntDivToFloat) - object LintBynameImplicit extends Lint; add(LintBynameImplicit) - object LintRecurseWithDefault extends Lint; add(LintRecurseWithDefault) - object LintUnitSpecialization extends Lint; add(LintUnitSpecialization) - - sealed trait Feature extends WarningCategory { override def summaryCategory: WarningCategory = Feature } - object Feature extends Feature { override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Feature] }; add(Feature) - object FeatureDynamics extends Feature; add(FeatureDynamics) - object FeatureExistentials extends Feature; add(FeatureExistentials) - object FeatureHigherKinds extends Feature; add(FeatureHigherKinds) - object FeatureImplicitConversions extends Feature; add(FeatureImplicitConversions) - object FeaturePostfixOps extends Feature; add(FeaturePostfixOps) - object FeatureReflectiveCalls extends Feature; add(FeatureReflectiveCalls) - object FeatureMacros extends Feature; add(FeatureMacros) + private val camels = "(?=[A-Z][a-z])".r + private def hyphenated(s: String): String = camels.split(s).mkString("-").toLowerCase + + private val _all: mutable.Map[String, WarningCategory] = mutable.Map.empty + def all: collection.Map[String, WarningCategory] = _all + + // Add all WarningCategory members to all, by category name derived from field name. + private def adderall(): Unit = + for (f <- getClass.getDeclaredFields if classOf[WarningCategory].isAssignableFrom(f.getType)) + _all.put(hyphenated(f.getName), f.get(this).asInstanceOf[WarningCategory]) + .foreach(_ => throw new AssertionError(s"warning category '${f.getName}' added twice")) + + private def nameOf(w: WarningCategory): String = + getClass.getDeclaredFields.find(_.get(this) eq w) match { + case Some(f) => hyphenated(f.getName) + case _ => hyphenated(w.getClass.getName).toLowerCase + } + + private def apply(): WarningCategory = new WarningCategory + + // "top-level" categories + val Deprecation, Unchecked, Optimizer, Scaladoc, JavaSource, Scala3Migration = WarningCategory() + + // miscellaneous warnings that are grouped together in summaries + sealed class Other extends WarningCategory { + override def summaryCategory: WarningCategory = Other + } + val Other = new Other { + override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Other] + } + private def other(): Other = new Other + val OtherShadowing, + OtherPureStatement, + OtherMigration, // API annotation + OtherMatchAnalysis, + OtherDebug, + OtherNullaryOverride, + OtherNonCooperativeEquals, + OtherImplicitType + = other() + + // categories corresponding to -W settings, such as -Wvalue-discard + sealed class WFlag extends WarningCategory { + override def summaryCategory: WarningCategory = WFlag + } + val WFlag = new WFlag { + override def includes(o: WarningCategory): Boolean = o.isInstanceOf[WFlag] + } + private def wflag(): WFlag = new WFlag + val WFlagDeadCode, + WFlagExtraImplicit, + WFlagNumericWiden, + WFlagSelfImplicit, + WFlagUnnamedBooleanLiteral, + WFlagTostringInterpolated, + WFlagValueDiscard + = wflag() + + sealed class Unused extends WarningCategory { + override def summaryCategory: WarningCategory = Unused + } + val Unused = new Unused { + override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Unused] + } + private def unused(): Unused = new Unused + val UnusedImports, + UnusedPatVars, + UnusedPrivates, + UnusedLocals, + UnusedParams, + UnusedNowarn + = unused() + + sealed class Lint extends WarningCategory { + override def summaryCategory: WarningCategory = Lint + } + val Lint = new Lint { + override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Lint] + } + private def lint(): Lint = new Lint + val LintAdaptedArgs, + LintNullaryUnit, + LintInaccessible, + LintStructuralType, + LintInferAny, + LintMissingInterpolator, + LintDocDetached, + LintPrivateShadow, + LintTypeParameterShadow, + LintPolyImplicitOverload, + LintOptionImplicit, + LintDelayedinitSelect, + LintPackageObjectClasses, + LintStarsAlign, + LintConstant, + LintNonlocalReturn, + LintImplicitNotFound, + LintSerial, + LintEtaZero, + LintEtaSam, + LintDeprecation, + LintBynameImplicit, + LintRecurseWithDefault, + LintUnitSpecialization, + LintPerformance, + LintIntDivToFloat, + LintUniversalMethods, + LintCloneable, + LintOverload, + LintNumericMethods + = lint() + + sealed class Feature extends WarningCategory { + override def summaryCategory: WarningCategory = Feature + } + val Feature = new Feature { + override def includes(o: WarningCategory): Boolean = o.isInstanceOf[Feature] + } + private def feature(): Feature = new Feature + val FeatureDynamics, + FeatureExistentials, + FeatureHigherKinds, + FeatureImplicitConversions, + FeaturePostfixOps, + FeatureReflectiveCalls, + FeatureMacros + = feature() + + locally { + adderall() + } } sealed trait Version { @@ -465,7 +753,7 @@ object Reporting { } object MessageFilter { - object Any extends MessageFilter { + case object Any extends MessageFilter { def matches(message: Message): Boolean = true } @@ -474,48 +762,46 @@ object Reporting { } final case class MessagePattern(pattern: Regex) extends MessageFilter { - def matches(message: Message): Boolean = pattern.findFirstIn(message.msg).nonEmpty + def check(msg: String) = pattern.findFirstIn(msg).nonEmpty + def matches(message: Message): Boolean = check(message.msg) } final case class SitePattern(pattern: Regex) extends MessageFilter { - def matches(message: Message): Boolean = regexMatches(pattern, message.site) + def matches(message: Message): Boolean = pattern.matches(message.site) } final case class SourcePattern(pattern: Regex) extends MessageFilter { private[this] val cache = mutable.Map.empty[SourceFile, Boolean] - def matches(message: Message): Boolean = cache.getOrElseUpdate(message.pos.source, { - val sourcePath = message.pos.source.file.canonicalPath.replace("\\", "/") + def check(pos: Position) = cache.getOrElseUpdate(pos.source, { + val sourcePath = pos.source.file.canonicalPath.replace("\\", "/") pattern.findFirstIn(sourcePath).nonEmpty }) + def matches(message: Message): Boolean = check(message.pos) } final case class DeprecatedOrigin(pattern: Regex) extends MessageFilter { def matches(message: Message): Boolean = message match { - case m: Message.Deprecation => regexMatches(pattern, m.origin) + case m: Message.Deprecation => pattern.matches(m.origin) + case m: Message.Origin => pattern.matches(m.origin) case _ => false } } final case class DeprecatedSince(comp: Int, version: ParseableVersion) extends MessageFilter { def matches(message: Message): Boolean = message match { - case Message.Deprecation(_, _, _, _, mv: ParseableVersion) => + case Message.Deprecation(_, _, _, _, mv: ParseableVersion, _) => if (comp == -1) mv.smaller(version) else if (comp == 0) mv.same(version) else mv.greater(version) case _ => false } } - - // local copy of 2.13's Regex#matches (forward bincompat..) - private def regexMatches(r: Regex, s: CharSequence) = runMatcher(r, r.pattern.matcher(s)) - private def runMatcher(r: Regex, m: java.util.regex.Matcher) = r match { - case _: UnanchoredRegex => m.find() - case _ => m.matches() - } } - sealed trait Action + sealed trait Action { + override def toString = s"Action[${getClass.getSimpleName.stripSuffix("$")}]" + } object Action { object Error extends Action @@ -556,7 +842,7 @@ object Reporting { regex(s.substring(5)).map(SitePattern) } else if (s.startsWith("origin=")) { regex(s.substring(7)).map(DeprecatedOrigin) - } else if(s.startsWith("since")) { + } else if (s.startsWith("since")) { def fail = Left(s"invalid since filter: `$s`; required shape: `since<1.2.3`, `since=3.2`, `since>2`") if (s.length < 6) fail else { @@ -598,36 +884,26 @@ object Reporting { case "info-summary" | "is" => Right(InfoSummary) case "info-verbose" | "iv" => Right(InfoVerbose) case "silent" | "s" => Right(Silent) - case _ => Left(List(s"unknonw action: `$s`")) + case _ => Left(List(s"unknown action: `$s`")) } if (setting.isEmpty) Right(WConf(Nil)) else { val parsedConfs: List[Either[List[String], (List[MessageFilter], Action)]] = setting.map(conf => { val parts = conf.split("[&:]") // TODO: don't split on escaped \& - val (ms, fs) = separateE(parts.view.init.map(parseFilter(_, rootDir)).toList) + val (ms, fs) = parts.view.init.map(parseFilter(_, rootDir)).toList.partitionMap(identity) if (ms.nonEmpty) Left(ms) else if (fs.isEmpty) Left(List("no filters or no action defined")) else parseAction(parts.last).map((fs, _)) }) - val (ms, fs) = separateE(parsedConfs) + val (ms, fs) = parsedConfs.partitionMap(identity) if (ms.nonEmpty) Left(ms.flatten) else Right(WConf(fs)) } } - - // aka xs.partitionMap(identity) in 2.13 - private def separateE[A, B](xs: List[Either[A, B]]): (List[A], List[B]) = { - import mutable.ListBuffer - val (a, b) = xs.foldLeft((new ListBuffer[A], new ListBuffer[B])) { - case ((a, b), Left(x)) => (a += x, b) - case ((a, b), Right(x)) => (a, b += x) - } - (a.toList, b.toList) - } } - case class Suppression(annotPos: Position, filters: List[MessageFilter], start: Int, end: Int) { + case class Suppression(annotPos: Position, filters: List[MessageFilter], start: Int, end: Int, synthetic: Boolean = false, verbose: Boolean = false) { private[this] var _used = false def used: Boolean = _used def markUsed(): Unit = { _used = true } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index f74e488da7cf..9af40d88c239 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,14 +10,15 @@ * additional information regarding copyright ownership. */ -package scala -package tools.nsc +package scala.tools.nsc -import io.{Directory, File, Path} +import scala.reflect.io.{ AbstractFile, Directory, File, Path } +import scala.tools.nsc.classpath.ClassPathFactory +import scala.tools.nsc.io.Jar +import scala.tools.nsc.reporters.{ ConsoleReporter, Reporter } +import scala.util.chaining._ +import scala.util.control.NonFatal import java.io.IOException -import scala.tools.nsc.classpath.DirectoryClassPath -import scala.tools.nsc.reporters.{Reporter,ConsoleReporter} -import util.Exceptional.unwrap /** An object that runs Scala code in script files. * @@ -41,192 +42,168 @@ import util.Exceptional.unwrap * }}} * * @author Lex Spoon - * @version 1.0, 15/05/2006 * @todo It would be better if error output went to stderr instead * of stdout... */ -class ScriptRunner extends HasCompileSocket { - lazy val compileSocket = CompileSocket +trait ScriptRunner { + /** Run a script file by name, with the given arguments. + * @return optionally an error, None for success + */ + def runScript(script: String, scriptArgs: List[String]): Option[Throwable] - /** Default name to use for the wrapped script */ - val defaultScriptMain = "Main" + /** Run the script text as supplied, with the given arguments. + * @return optionally an error, None for success + */ + def runScriptText(script: String, scriptArgs: List[String]): Option[Throwable] +} - /** Pick a main object name from the specified settings */ - def scriptMain(settings: Settings) = settings.script.value match { - case "" => defaultScriptMain - case x => x +class DefaultScriptRunner(settings: GenericRunnerSettings) extends AbstractScriptRunner(settings) { + protected def doCompile(scriptFile: String) = { + val reporter = new ConsoleReporter(settings) + val compiler = newGlobal(settings, reporter) + if (settings.pastefiles.value.nonEmpty) new compiler.Run().compile(settings.pastefiles.value) + // Setting settings.script.value informs the compiler this is not a self-contained compilation unit. + settings.script.value = mainClass + new compiler.Run().compile(scriptFile :: Nil) + !reporter.hasErrors } - /** Choose a jar filename to hold the compiled version of a script. */ - private def jarFileFor(scriptFile: String)= File( - if (scriptFile endsWith ".jar") scriptFile - else scriptFile.stripSuffix(".scala") + ".jar" - ) + protected def newGlobal(settings: Settings, reporter: Reporter) = Global(settings, reporter) +} - /** Compile a script using the fsc compilation daemon. - */ - private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = { - val scriptFile = Path(scriptFileIn).toAbsolute.path - val compSettingNames = new Settings(sys.error).visibleSettings map (_.name) - val compSettings = settings.visibleSettings filter (compSettingNames contains _.name) - val coreCompArgs = compSettings flatMap (_.unparse) - val compArgs = coreCompArgs ++ List("-Xscript", scriptMain(settings), scriptFile) - - // TODO: untangle this mess of top-level objects with their own little view of the mutable world of settings - compileSocket.verbose = settings.verbose.value - - compileSocket getOrCreateSocket "" match { - case Some(sock) => compileOnServer(sock, compArgs) - case _ => false - } - } +abstract class AbstractScriptRunner(settings: GenericRunnerSettings) extends ScriptRunner { - protected def newGlobal(settings: Settings, reporter: Reporter) = - Global(settings, reporter) + /** Do compile the given script file, returning true for success. */ + protected def doCompile(scriptFile: String): Boolean + + protected final def mainClass: String = settings.script.value /** Compile a script and then run the specified closure with * a classpath for the compiled script. * * @return true if compilation and the handler succeeds, false otherwise. */ - private def withCompiledScript( - settings: GenericRunnerSettings, - scriptFile: String) - (handler: String => Boolean): Boolean = - { - def mainClass = scriptMain(settings) - - /* Compiles the script file, and returns the directory with the compiled - * class files, if the compilation succeeded. - */ - def compile: Option[Directory] = { - val compiledPath = Directory makeTemp "scalascript" + private def withCompiledScript(scriptFile: String)(handler: String => Option[Throwable]): Option[Throwable] = { - // delete the directory after the user code has finished - sys.addShutdownHook(compiledPath.deleteRecursively()) + /* Compiles the script file, with the output set to either + * the user-specified location (jar or dir), or a temp dir. + * Returns the output location on success. + */ + def compile: Option[Path] = { + val outpath = + if (settings.outdir.isSetByUser) + Path(settings.outdir.value) + else + Directory.makeTemp("scalascript").tap { tmp => + // delete the directory after the user code has finished + Runtime.getRuntime.addShutdownHook(new Thread(() => tmp.deleteRecursively())) + settings.outdir.value = tmp.path + } - settings.outdir.value = compiledPath.path + if (doCompile(scriptFile)) Some(outpath) else None + } - if (!settings.useCompDaemon) { - /* Setting settings.script.value informs the compiler this is not a - * self contained compilation unit. - */ - settings.script.value = mainClass - val reporter = new ConsoleReporter(settings) - val compiler = newGlobal(settings, reporter) + def hasClassToRun(location: Path): Boolean = { + val cp = ClassPathFactory.newClassPath(AbstractFile.getDirectory(location), settings) + cp.findClass(mainClass).isDefined + } - new compiler.Run compile List(scriptFile) - if (reporter.hasErrors) None else Some(compiledPath) + // under -save, compile to a jar, specified either by -d or based on script name. + // if -d specifies a dir, assemble the jar by hand. + def withLatestJar(): Option[Throwable] = { + val outputToJar = settings.outdir.value.endsWith(".jar") + def stripped = List(".scala", ".sc").find(scriptFile.endsWith).map(scriptFile.stripSuffix).getOrElse(scriptFile) + val jarFile = File( + if (outputToJar) settings.outdir.value + else s"$stripped.jar".tap(j => if (!settings.outdir.isSetByUser) settings.outdir.value = j) + ) + def jarOK = jarFile.canRead && jarFile.isFresher(File(scriptFile)) + + def recompile(): Option[Throwable] = { + jarFile.delete() + + compile match { + case Some(compiledPath) => + if (hasClassToRun(compiledPath)) { + // user -d mydir -save means assemble script.jar, don't delete mydir + if (!Jar.isJarOrZip(compiledPath)) { + try { + Jar.create(jarFile, compiledPath.toDirectory, mainClass) + None + } catch { + case NonFatal(e) => jarFile.delete() ; Some(e) + } + } else None + } else Some(NoScriptError) + case _ => Some(ScriptCompileError) + } } - else if (compileWithDaemon(settings, scriptFile)) Some(compiledPath) - else None - } - def hasClassToRun(d: Directory): Boolean = { - val cp = DirectoryClassPath(d.jfile) - cp.findClass(mainClass).isDefined + val err = if (!jarOK) recompile() else None + err orElse handler(jarFile.toAbsolute.path) filterNot { case NoScriptError => true case _ => false } } - /* The script runner calls sys.exit to communicate a return value, but this must + /* The script runner calls System.exit to communicate a return value, but this must * not take place until there are no non-daemon threads running. Tickets #1955, #2006. */ util.waitingForThreads { - if (settings.save) { - val jarFile = jarFileFor(scriptFile) - def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile)) - - def recompile() = { - jarFile.delete() - - compile match { - case Some(compiledPath) => - if (!hasClassToRun(compiledPath)) { - // it compiled ok, but there is nothing to run; - // running an empty script should succeed - true - } else { - try io.Jar.create(jarFile, compiledPath, mainClass) - catch { case _: Exception => jarFile.delete() } - - if (jarOK) { - compiledPath.deleteRecursively() - handler(jarFile.toAbsolute.path) - } - // jar failed; run directly from the class files - else handler(compiledPath.path) - } - case _ => false - } + // either update the jar or don't use a cache jar at all, just use the class files, if they exist + if (settings.save.value) withLatestJar() + else { + compile match { + case Some(cp) if hasClassToRun(cp) => handler(cp.path) + case Some(_) => None + case _ => Some(ScriptCompileError) } - - if (jarOK) handler(jarFile.toAbsolute.path) // pre-compiled jar is current - else recompile() // jar old - recompile the script. } - // don't use a cache jar at all--just use the class files, if they exist - else compile exists (cp => !hasClassToRun(cp) || handler(cp.path)) } } - /** Run a script after it has been compiled + /** Run a script after it has been compiled. Prints any exceptions. * * @return true if execution succeeded, false otherwise */ - private def runCompiled( - settings: GenericRunnerSettings, - compiledLocation: String, - scriptArgs: List[String]): Boolean = - { + private def runCompiled(compiledLocation: String, scriptArgs: List[String]): Option[Throwable] = { val cp = File(compiledLocation).toURL +: settings.classpathURLs - ObjectRunner.runAndCatch(cp, scriptMain(settings), scriptArgs) match { - case Left(ex) => ex.printStackTrace() ; false - case _ => true - } + ObjectRunner.runAndCatch(cp, mainClass, scriptArgs) } - /** Run a script file with the specified arguments and compilation settings. - * - * @return true if compilation and execution succeeded, false otherwise. - */ - def runScript(settings: GenericRunnerSettings, scriptFile: String, scriptArgs: List[String]): Boolean = { - def checkedScript = { - val f = File(scriptFile) - if (!f.exists) throw new IOException(s"no such file: $scriptFile") - if (!f.canRead) throw new IOException(s"can't read: $scriptFile") - if (f.isDirectory) throw new IOException(s"can't compile a directory: $scriptFile") - if (!settings.nc && !f.isFile) throw new IOException(s"compile server requires a regular file: $scriptFile") - scriptFile - } - withCompiledScript(settings, checkedScript) { runCompiled(settings, _, scriptArgs) } + final def runScript(scriptFile: String, scriptArgs: List[String]): Option[Throwable] = { + val f = File(scriptFile) + def usingCompilationServer = settings.Yscriptrunner.valueSetByUser.map(_ != "default").getOrElse(false) + if (!f.exists) Some(new IOException(s"no such file: $scriptFile")) + else if (!f.canRead) Some(new IOException(s"can't read: $scriptFile")) + else if (f.isDirectory) Some(new IOException(s"can't compile a directory: $scriptFile")) + else if (!f.isFile && usingCompilationServer) Some(new IOException(s"compile server requires a regular file: $scriptFile")) + else withCompiledScript(scriptFile) { runCompiled(_, scriptArgs) } } - /** Calls runScript and catches the enumerated exceptions, routing - * them to Left(ex) if thrown. - */ - def runScriptAndCatch( - settings: GenericRunnerSettings, - scriptFile: String, - scriptArgs: List[String]): Either[Throwable, Boolean] = - { - try Right(runScript(settings, scriptFile, scriptArgs)) - catch { case e: Throwable => Left(unwrap(e)) } - } - - /** Run a command - * - * @return true if compilation and execution succeeded, false otherwise. - */ - def runCommand( - settings: GenericRunnerSettings, - command: String, - scriptArgs: List[String]): Boolean = - { + final def runScriptText(command: String, scriptArgs: List[String]): Option[Throwable] = { val scriptFile = File.makeTemp("scalacmd", ".scala") // save the command to the file scriptFile writeAll command - try withCompiledScript(settings, scriptFile.path) { runCompiled(settings, _, scriptArgs) } + try withCompiledScript(scriptFile.path) { runCompiled(_, scriptArgs) } + catch { + case NonFatal(e) => Some(e) + } finally scriptFile.delete() // in case there was a compilation error } } -object ScriptRunner extends ScriptRunner { } +object ScriptRunner { + import scala.reflect.internal.util.ScalaClassLoader + + def apply(settings: GenericRunnerSettings): ScriptRunner = + settings.Yscriptrunner.value match { + case "default" => new DefaultScriptRunner(settings) + case "resident" => new fsc.ResidentScriptRunner(settings) + case "shutdown" => new fsc.DaemonKiller(settings) + case custom => + val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader + loader.create[ScriptRunner](custom, settings.errorFn)(settings) + } +} + +object ScriptCompileError extends scala.util.control.ControlThrowable +object NoScriptError extends scala.util.control.ControlThrowable diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala index 4c43b89dacd5..098223aed46d 100644 --- a/src/compiler/scala/tools/nsc/Settings.scala +++ b/src/compiler/scala/tools/nsc/Settings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala index 9ecb41f81fe1..d44ad38484bb 100644 --- a/src/compiler/scala/tools/nsc/SubComponent.scala +++ b/src/compiler/scala/tools/nsc/SubComponent.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc import scala.ref.WeakReference -/** An nsc sub-component. +/** Compilation is split into phases and the sub-components of the compiler define such phases. * * @author Martin Odersky */ @@ -50,9 +50,14 @@ abstract class SubComponent { /** True if this phase runs after all other phases. Usually, `terminal`. */ val terminal: Boolean = false - /** SubComponent are added to a HashSet and two phases are the same if they have the same name */ + /** SubComponents are added to a HashSet and two phases are the same if they have the same name. */ override def hashCode() = phaseName.hashCode() + override def equals(other: Any) = other match { + case other: SubComponent => phaseName.equals(other.phaseName) + case _ => false + } + /** New flags defined by the phase which are not valid before */ def phaseNewFlags: Long = 0 @@ -68,7 +73,7 @@ abstract class SubComponent { @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op) @inline final def afterOwnPhase[T](op: => T) = global.exitingPhase(ownPhase)(op) - /** The phase corresponding to this subcomponent in the current compiler run */ + /** The phase corresponding to this subcomponent in the current compiler run. */ def ownPhase: Phase = { val cache = ownPhaseCache.underlying.get if (cache != null && ownPhaseRunId == global.currentRunId) @@ -81,9 +86,6 @@ abstract class SubComponent { } } - /** The phase defined by this subcomponent. Can be called only after phase is installed by newPhase. */ - // lazy val ownPhase: Phase = global.currentRun.phaseNamed(phaseName) - /** A standard phase template */ abstract class StdPhase(prev: Phase) extends global.GlobalPhase(prev) { def name = phaseName diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index b9e93e2bcde6..5a33f9ce81fb 100644 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,7 @@ package scala.tools.nsc package ast +import scala.annotation.tailrec import symtab._ import util.DocStrings._ import scala.collection.mutable @@ -20,7 +21,6 @@ import scala.tools.nsc.Reporting.WarningCategory /* * @author Martin Odersky - * @version 1.0 */ trait DocComments { self: Global => @@ -35,7 +35,7 @@ trait DocComments { self: Global => */ val docComments = mutable.WeakHashMap[Symbol, DocComment]() - def clearDocComments() { + def clearDocComments(): Unit = { cookedDocComments.clear() docComments.clear() defs.clear() @@ -58,16 +58,24 @@ trait DocComments { self: Global => * since r23926. */ private def allInheritedOverriddenSymbols(sym: Symbol): List[Symbol] = { - if (!sym.owner.isClass) Nil - else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) + val getter: Symbol = sym.getter + val symOrGetter = getter.orElse(sym) + if (symOrGetter.owner.isClass) + symOrGetter.owner.ancestors + .flatMap { ancestor => + symOrGetter.overriddenSymbol(ancestor) match { + case NoSymbol => Nil + case matching => List(matching) + } + } + else Nil } - def fillDocComment(sym: Symbol, comment: DocComment) { + def fillDocComment(sym: Symbol, comment: DocComment): Unit = { docComments(sym) = comment comment.defineVariables(sym) } - def replaceInheritDocToInheritdoc(docStr: String):String = { docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") } @@ -88,7 +96,7 @@ trait DocComments { self: Global => // scala/bug#8210 - The warning would be false negative when this symbol is a setter if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) runReporting.warning(sym.pos, s"The comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.", WarningCategory.Scaladoc, sym) - ownComment.replaceAllLiterally("@inheritdoc", "") + ownComment.replace("@inheritdoc", "") case Some(sc) => if (ownComment == "") sc else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) @@ -143,8 +151,7 @@ trait DocComments { self: Global => /** The cooked doc comment of an overridden symbol */ protected def superComment(sym: Symbol): Option[String] = { - val getter: Symbol = sym.getter - allInheritedOverriddenSymbols(getter.orElse(sym)).iterator + allInheritedOverriddenSymbols(sym).iterator .map(cookedDocComment(_)) .find(_ != "") } @@ -184,7 +191,7 @@ trait DocComments { self: Global => } def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { - case Some((start, end)) => + case Some((_, end)) => if (end > tocopy) tocopy = end case None => srcSec match { @@ -246,7 +253,7 @@ trait DocComments { self: Global => if (childSection.indexOf("@inheritdoc") == -1) childSection else - childSection.replaceAllLiterally("@inheritdoc", parentSection) + childSection.replace("@inheritdoc", parentSection) def getParentSection(section: (Int, Int)): String = { @@ -257,13 +264,11 @@ trait DocComments { self: Global => def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = paramMap.get(param) match { - case Some(section) => - // Cleanup the section tag and parameter - val sectionTextBounds = extractSectionText(parent, section) - cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) + case Some(paramSection) => + val (start, end) = extractSectionText(parent, paramSection) + cleanupSectionText(parent.substring(start, end)) // Cleanup the section tag and parameter case None => - reporter.echo(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym + - " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") + reporter.echo(sym.pos, s"""The "$getSectionHeader" annotation of the $sym comment contains @inheritdoc, but the corresponding section in the parent is not defined.""") "" } @@ -293,28 +298,41 @@ trait DocComments { self: Global => out.toString } - /** Maps symbols to the variable -> replacement maps that are defined - * in their doc comments + /** Maps symbols to the `variable -> replacement` maps that are defined + * in their doc comments. */ - private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() + private val defs = mutable.HashMap.empty[Symbol, Map[String, String]].withDefaultValue(Map()) - /** Lookup definition of variable. + /** Look up definition of variable. + * + * - For a module, try the companion class first. + * - For classes with a self type, search on that basis. + * - Search for definitions on base classes, then on enclosing elements. * * @param vble The variable for which a definition is searched * @param site The class for which doc comments are generated */ - def lookupVariable(vble: String, site: Symbol): Option[String] = site match { - case NoSymbol => None - case _ => - val searchList = - if (site.isModule) site :: site.info.baseClasses - else site.info.baseClasses - - searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { - case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) - case res => res orElse lookupVariable(vble, site.owner) + @tailrec + final def lookupVariable(vble: String, site: Symbol): Option[String] = + if (site == NoSymbol) None + else { + val searchList = { + var bases = List.empty[Symbol] + def include(k: Symbol): Unit = bases ::= k + def examine(k: Symbol): Unit = { + val bs = if (k.hasSelfType) k.typeOfThis.baseClasses else k.baseClasses + bs.foreach(include) + } + if (site.isModule) examine(site.companionClass) + examine(site) + bases.reverse.distinct } - } + searchList.collectFirst { case x if defs(x).contains(vble) => defs(x)(vble) } match { + case Some(str) if str.startsWith("$") => lookupVariable(str.tail, site) + case s @ Some(str) => defs(site) += vble -> str; s + case None => lookupVariable(vble, site.owner) + } + } /** Expand variable occurrences in string `str`, until a fix point is reached or * an expandLimit is exceeded. @@ -327,6 +345,7 @@ trait DocComments { self: Global => protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = { val expandLimit = 10 + @tailrec def expandInternal(str: String, depth: Int): String = { if (depth >= expandLimit) throw new ExpansionLimitExceeded(str) @@ -342,7 +361,7 @@ trait DocComments { self: Global => else { val vstart = idx idx = skipVariable(str, idx + 1) - def replaceWith(repl: String) { + def replaceWith(repl: String): Unit = { out append str.substring(copied, vstart) out append repl copied = idx @@ -361,13 +380,13 @@ trait DocComments { self: Global => case Some(replacement) => replaceWith(replacement) case None => val pos = docCommentPos(sym) - val loc = pos withPoint (pos.start + vstart + 1) + val loc = if (pos.isDefined) pos.withPoint(pos.start + vstart + 1) else NoPosition runReporting.warning(loc, s"Variable $vname undefined in comment for $sym in $site", WarningCategory.Scaladoc, sym) } } } } - if (out.length == 0) str + if (out.isEmpty) str else { out append str.substring(copied) expandInternal(out.toString, depth + 1) @@ -376,10 +395,9 @@ trait DocComments { self: Global => // We suppressed expanding \$ throughout the recursion, and now we // need to replace \$ with $ so it looks as intended. - expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$") + expandInternal(initialStr, 0).replace("""\$""", "$") } - // !!! todo: inherit from Comment? case class DocComment(raw: String, pos: Position = NoPosition, codePos: Position = NoPosition) { /** Returns: @@ -409,6 +427,8 @@ trait DocComments { self: Global => val comment = "/** " + raw.substring(commentStart, end) + "*/" val commentPos = subPos(commentStart, end) + runReporting.deprecationWarning(codePos, "The @usecase tag is deprecated, instead use the @example tag to document the usage of your API", "2.13.0", site = "", origin = "") + UseCase(DocComment(comment, commentPos, codePos), code, codePos) } @@ -417,23 +437,20 @@ trait DocComments { self: Global => else { val start1 = pos.start + start val end1 = pos.start + end - pos withStart start1 withPoint start1 withEnd end1 + pos.copyRange(start1, start1, end1) } - def defineVariables(sym: Symbol) = { - val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r - - defs(sym) ++= defines.map { - str => { - val start = skipWhitespace(str, "@define".length) - val (key, value) = str.splitAt(skipVariable(str, start)) - key.drop(start) -> value + def defineVariables(sym: Symbol) = + defs(sym) ++= defines.map { str => + val start = skipWhitespace(str, "@define".length) + str.splitAt(skipVariable(str, start)) match { + case (key, DocComment.Trim(value)) => variableName(key.drop(start)) -> value.replaceAll("\\s+\\*+$", "") + case x => throw new MatchError(x) } - } map { - case (key, Trim(value)) => - variableName(key) -> value.replaceAll("\\s+\\*+$", "") } - } + } + object DocComment { + private val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r } case class UseCase(comment: DocComment, body: String, pos: Position) { @@ -451,8 +468,8 @@ trait DocComments { self: Global => def getSite(name: Name): Type = { def findIn(sites: List[Symbol]): Type = sites match { - case List() => NoType - case site :: sites1 => select(site.thisType, name, findIn(sites1)) + case site1 :: sites1 => select(site1.thisType, name, findIn(sites1)) + case _ => NoType } // Previously, searching was taking place *only* in the current package and in the root package // now we're looking for it everywhere in the hierarchy, so we'll be able to link variable expansions like @@ -489,7 +506,7 @@ trait DocComments { self: Global => case _ => (getSite(partnames.head), partnames.tail) } - val result = (start /: rest)(select(_, _, NoType)) + val result = rest.foldLeft(start)(select(_, _, NoType)) if (result == NoType) runReporting.warning( comment.codePos, @@ -529,6 +546,7 @@ trait DocComments { self: Global => (typeRef(NoPrefix, alias, Nil), false) } + @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[(Type, Boolean)]): (Type, Boolean) = if (from.isEmpty) (sym.tpe, false) else if (from.head == sym) to.head @@ -536,16 +554,14 @@ trait DocComments { self: Global => val substAliases = new TypeMap { def apply(tp: Type) = mapOver(tp) match { - case tp1 @ TypeRef(pre, sym, args) if (sym.name.length > 1 && sym.name.startChar == '$') => + case tp1 @ TypeRef(_, sym, args) if sym.name.length > 1 && sym.name.startChar == '$' => subst(sym, aliases, aliasExpansions) match { - case (TypeRef(pre1, sym1, _), canNormalize) => - val tpe = typeRef(pre1, sym1, args) + case (TypeRef(pre, sym, _), canNormalize) => + val tpe = typeRef(pre, sym, args) if (canNormalize) tpe.normalize else tpe - case _ => - tp1 + case _ => tp1 } - case tp1 => - tp1 + case tp1 => tp1 } } @@ -557,5 +573,5 @@ trait DocComments { self: Global => } } - class ExpansionLimitExceeded(str: String) extends Exception + class ExpansionLimitExceeded(str: String) extends Exception(str) } diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala index 0c43f37b0fa3..3f5b06281197 100644 --- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala +++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,8 @@ package scala.tools.nsc package ast -import scala.compat.Platform.EOL +import java.lang.System.{lineSeparator => EOL} import symtab.Flags._ -import scala.language.postfixOps import scala.reflect.internal.util.ListOfNil /** The object `nodePrinter` converts the internal tree @@ -40,7 +39,7 @@ abstract class NodePrinters { } trait DefaultPrintAST extends PrintAST { - val printPos = settings.Xprintpos || settings.Yposdebug + val printPos = settings.Xprintpos.value || settings.Yposdebug.value def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name) def showDefTreeName(tree: DefTree) = showName(tree.name) @@ -67,8 +66,8 @@ abstract class NodePrinters { def showAttributes(tree: Tree): String = { if (infolevel == InfoLevel.Quiet) "" else { - try { List(showSymbol(tree), showType(tree)) filterNot (_ == "") mkString ", " trim } - catch { case ex: Throwable => "sym= " + ex.getMessage } + try List(showSymbol(tree), showType(tree)).filterNot(_ == "").mkString(", ").trim + catch { case ex: Throwable => s"sym= ${ex.getMessage}" } } } } @@ -90,7 +89,7 @@ abstract class NodePrinters { tree match { case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name) case Select(qual, name) => showRefTreeName(qual) + "." + showName(name) - case id @ Ident(name) => showNameAndPos(id) + case id: Ident => showNameAndPos(id) case _ => "" + tree } } @@ -108,8 +107,8 @@ abstract class NodePrinters { def stringify(tree: Tree): String = { buf.clear() - if (settings.XshowtreesStringified) buf.append(tree.toString + EOL) - if (settings.XshowtreesCompact) { + if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL) + if (settings.XshowtreesCompact.value) { buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes)) } else { level = 0 @@ -117,7 +116,7 @@ abstract class NodePrinters { } buf.toString } - def traverseAny(x: Any) { + def traverseAny(x: Any): Unit = { x match { case t: Tree => traverse(t) case xs: List[_] => printMultiline("List", "")(xs foreach traverseAny) @@ -125,8 +124,9 @@ abstract class NodePrinters { } } def println(s: String) = printLine(s, "") + def print(s: String) = buf.append(s) - def printLine(value: String, comment: String) { + def printLine(value: String, comment: String): Unit = { buf append " " * level buf append value if (comment != "") { @@ -152,7 +152,7 @@ abstract class NodePrinters { } str.toString } - def printModifiers(tree: MemberDef) { + def printModifiers(tree: MemberDef): Unit = { // scala/bug#5885: by default this won't print annotations of not yet initialized symbols val annots0 = tree.symbol.annotations match { case Nil => tree.mods.annotations @@ -169,14 +169,14 @@ abstract class NodePrinters { println(flagString + annots) } - def applyCommon(tree: Tree, fun: Tree, args: List[Tree]) { + def applyCommon(tree: Tree, fun: Tree, args: List[Tree]): Unit = { printMultiline(tree) { traverse(fun) traverseList("Nil", "argument")(args) } } - def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) { + def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]): Unit = { printMultiline(tree) { traverse(fun) traverseList("[]", "type argument")(args) @@ -184,10 +184,10 @@ abstract class NodePrinters { } def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix - def printMultiline(tree: Tree)(body: => Unit) { + def printMultiline(tree: Tree)(body: => Unit): Unit = { printMultiline(treePrefix(tree), showAttributes(tree))(body) } - def printMultiline(prefix: String, comment: String)(body: => Unit) { + def printMultiline(prefix: String, comment: String)(body: => Unit): Unit = { printLine(prefix + "(", comment) indent(body) println(")") @@ -199,23 +199,23 @@ abstract class NodePrinters { finally level -= 1 } - def traverseList(ifEmpty: String, what: String)(trees: List[Tree]) { + def traverseList(ifEmpty: String, what: String)(trees: List[Tree]): Unit = { if (trees.isEmpty) println(ifEmpty) else if (trees.tail.isEmpty) traverse(trees.head) else { - printLine("", trees.length + " " + what + "s") + printLine("", "" + trees.length + " " + what + "s") trees foreach traverse } } - def printSingle(tree: Tree, name: Name) { + def printSingle(tree: Tree, name: Name): Unit = { println(treePrefix(tree) + "(" + showName(name) + ")" + showAttributes(tree)) } - def traverse(tree: Tree) { - showPosition(tree) + def traverse(tree: Tree): Unit = { + print(showPosition(tree)) tree match { case ApplyDynamic(fun, args) => applyCommon(tree, fun, args) @@ -235,7 +235,6 @@ abstract class NodePrinters { case ld @ LabelDef(name, params, rhs) => printMultiline(tree) { - showNameAndPos(ld) traverseList("()", "params")(params) traverse(rhs) } @@ -295,7 +294,7 @@ abstract class NodePrinters { printLine("", "1 parameter list") ps foreach traverse case pss => - printLine("", pss.length + " parameter lists") + printLine("", "" + pss.length + " parameter lists") pss foreach (ps => traverseList("()", "parameter")(ps)) } traverse(tpt) @@ -359,18 +358,18 @@ abstract class NodePrinters { tree match { case t: RefTree => println(showRefTree(t)) case t if t.productArity == 0 => println(treePrefix(t)) - case t => printMultiline(tree)(tree.productIterator foreach traverseAny) + case _ => printMultiline(tree)(tree.productIterator foreach traverseAny) } } } } - def printUnit(unit: CompilationUnit) { + def printUnit(unit: CompilationUnit): Unit = { print("// Scala source: " + unit.source + "\n") println(Option(unit.body) map (x => nodeToString(x) + "\n") getOrElse "") } - def printAll() { + def printAll(): Unit = { print("[[syntax trees at end of " + phase + "]]") global.currentRun.units foreach printUnit } diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index ceab1abdcff9..3c7060d87c72 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,7 +20,7 @@ trait Positions extends scala.reflect.internal.Positions { class ValidatingPosAssigner extends PosAssigner { var pos: Position = _ - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (t eq EmptyTree) () else if (t.pos == NoPosition) super.traverse(t setPos pos) else if (globalPhase.id <= currentRun.picklerPhase.id) { @@ -39,6 +39,6 @@ trait Positions extends scala.reflect.internal.Positions { } override protected[this] lazy val posAssigner: PosAssigner = - if (settings.Yrangepos && settings.isDebug || settings.Yposdebug) new ValidatingPosAssigner + if (settings.Yrangepos.value && settings.isDebug || settings.Yposdebug.value) new ValidatingPosAssigner else new DefaultPosAssigner } diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index c12993c01603..254871fbfa92 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,13 +13,20 @@ package scala.tools.nsc package ast -import java.io.{ OutputStream, PrintWriter } +import java.io.{OutputStream, PrintWriter} + +import scala.annotation.nowarn trait Printers extends scala.reflect.internal.Printers { this: Global => import treeInfo.{ IsTrue, IsFalse } - class TreePrinter(out: PrintWriter) extends super.TreePrinter(out) { + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.ast\.Printers\.TreePrinter""") + final type AstTreePrinter = TreePrinter + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use AstTreePrinter instead", since = "2.13.4") + class TreePrinter(out: PrintWriter) extends InternalTreePrinter(out) { override def print(args: Any*): Unit = args foreach { case tree: Tree => @@ -27,7 +34,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => printTree( if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) { tree match { - case ClassDef(_, _, _, impl @ Template(ps, noSelfType, body)) + case ClassDef(_, _, _, impl @ Template(ps, `noSelfType`, body)) if (tree.symbol.thisSym != tree.symbol) => ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body)) case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl) @@ -50,13 +57,13 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => } // overflow cases missing from TreePrinter in scala.reflect.api - override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match { + override def xprintTree(treePrinter: InternalTreePrinter, tree: Tree) = tree match { case DocDef(comment, definition) => treePrinter.print(comment.raw) treePrinter.println() treePrinter.print(definition) - case TypeTreeWithDeferredRefCheck() => + case _: TypeTreeWithDeferredRefCheck => treePrinter.print("") case SelectFromArray(qualifier, name, _) => @@ -69,8 +76,8 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => /** A tree printer which is stingier about vertical whitespace and unnecessary * punctuation than the standard one. */ - class CompactTreePrinter(out: PrintWriter) extends TreePrinter(out) { - override def printRow(ts: List[Tree], start: String, sep: String, end: String) { + class CompactTreePrinter(out: PrintWriter) extends AstTreePrinter(out) { + override def printRow(ts: List[Tree], start: String, sep: String, end: String): Unit = { print(start) printSeq(ts)(print(_))(print(sep)) print(end) @@ -143,8 +150,8 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => // if a Block only continues one actual statement, just print it. case Block(stats, expr) => allStatements(tree) match { - case List(x) => printTree(x) - case xs => s() + case List(x) => printTree(x) + case _ => s() } // We get a lot of this stuff @@ -161,25 +168,26 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => def ifIndented(x: Tree) = { indent() ; println() ; printTree(x) ; undent() } - - val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements print("if ("); print(cond); print(") ") - thenStmts match { + allStatements(thenp) match { case List(x: If) => ifIndented(x) case List(x) => printTree(x) case _ => printTree(thenp) } - if (elseStmts.nonEmpty) { + def printElse(elsep: Tree) = { print(" else") indent() ; println() - elseStmts match { - case List(x) => printTree(x) - case _ => printTree(elsep) - } + printTree(elsep) undent() ; println() } + + allStatements(elsep) match { + case Nil => + case List(x) => printElse(x) + case _ => printElse(elsep) + } case _ => s() } } @@ -187,14 +195,14 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds) def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds) - def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true, true) + def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, printTypes = true, printIds = true, printOwners = true, printKinds = true) - def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) + def newStandardTreePrinter(writer: PrintWriter): AstTreePrinter = new AstTreePrinter(writer) def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer) - override def newTreePrinter(writer: PrintWriter): TreePrinter = - if (settings.Ycompacttrees) newCompactTreePrinter(writer) + override def newTreePrinter(writer: PrintWriter): AstTreePrinter = + if (settings.Ycompacttrees.value) newCompactTreePrinter(writer) else newStandardTreePrinter(writer) - override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream)) - override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) + override def newTreePrinter(stream: OutputStream): AstTreePrinter = newTreePrinter(new PrintWriter(stream)) + override def newTreePrinter(): AstTreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) } diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 875c65fe5092..a309eecfe874 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,29 +15,26 @@ package tools.nsc package ast import scala.language.implicitConversions - import java.awt.{List => _, _} import java.awt.event._ -import java.io.StringWriter - +import java.io.{StringWriter, Writer} import javax.swing._ import javax.swing.event.TreeModelListener import javax.swing.tree._ - -import scala.concurrent.Lock -import scala.text._ +import java.util.concurrent.CountDownLatch +import scala.annotation.{nowarn, tailrec} /** * Tree browsers can show the AST in a graphical and interactive * way, useful for debugging and understanding. * * @author Iulian Dragos - * @version 1.0 */ abstract class TreeBrowsers { val global: Global import global._ import nme.EMPTY + import TreeBrowsers._ val borderSize = 10 @@ -62,20 +59,16 @@ abstract class TreeBrowsers { /** print the whole program */ def browse(pName: String, units: List[CompilationUnit]): Unit = { - var unitList: List[UnitTree] = Nil - - for (i <- units) - unitList = UnitTree(i) :: unitList - val tm = new ASTTreeModel(ProgramTree(unitList)) - - val frame = new BrowserFrame(pName) - frame.setTreeModel(tm) - - val lock = new Lock() - frame.createFrame(lock) - + val latch = new CountDownLatch(1) + SwingUtilities.invokeAndWait {() => + val unitList = units.map(UnitTree(_)) + val tm = new ASTTreeModel(ProgramTree(unitList)) + val frame = new BrowserFrame(pName) + frame.setTreeModel(tm) + frame.createFrame(latch) + } // wait for the frame to be closed - lock.acquire() + latch.await() } } @@ -125,7 +118,6 @@ abstract class TreeBrowsers { * displaying information * * @author Iulian Dragos - * @version 1.0 */ class BrowserFrame(phaseName: String = "unknown") { try { @@ -169,20 +161,19 @@ abstract class TreeBrowsers { /** Create a frame that displays the AST. * - * @param lock The lock is used in order to stop the compilation thread + * @param latch The latch is used in order to stop the compilation thread * until the user is done with the tree inspection. Swing creates its * own threads when the frame is packed, and therefore execution * would continue. However, this is not what we want, as the tree and * especially symbols/types would change while the window is visible. */ - def createFrame(lock: Lock): Unit = { - lock.acquire() // keep the lock until the user closes the window + def createFrame(latch: CountDownLatch): Unit = { frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE) frame.addWindowListener(new WindowAdapter() { /** Release the lock, so compilation may resume after the window is closed. */ - override def windowClosed(e: WindowEvent): Unit = lock.release() + override def windowClosed(e: WindowEvent): Unit = latch.countDown() }) jTree = new JTree(treeModel) { @@ -220,11 +211,12 @@ abstract class TreeBrowsers { frame.getContentPane().add(splitPane) frame.pack() frame.setVisible(true) + splitPane.setDividerLocation(0.5) } class ASTMenuBar extends JMenuBar { - val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask() - val shiftKey = InputEvent.SHIFT_MASK + val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(): @nowarn("cat=deprecation") // deprecated since JDK 10, replacement only available in 10+ + val shiftKey = InputEvent.SHIFT_DOWN_MASK val jmFile = new JMenu("File") // val jmiSaveImage = new JMenuItem( // new AbstractAction("Save Tree Image") { @@ -243,7 +235,7 @@ abstract class TreeBrowsers { val jmiCancel = new JMenuItem ( new AbstractAction("Cancel Compilation") { putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false)) - override def actionPerformed(e: ActionEvent) { + override def actionPerformed(e: ActionEvent): Unit = { closeWindow() global.currentRun.cancel() } @@ -264,7 +256,7 @@ abstract class TreeBrowsers { val jmiExpand = new JMenuItem( new AbstractAction("Expand All Nodes") { putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_E, menuKey, false)) - override def actionPerformed(e: ActionEvent) { + override def actionPerformed(e: ActionEvent): Unit = { expandAll(jTree) } } @@ -273,7 +265,7 @@ abstract class TreeBrowsers { val jmiCollapse = new JMenuItem( new AbstractAction("Collapse All Nodes") { putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_L, menuKey, false)) - override def actionPerformed(e: ActionEvent) { + override def actionPerformed(e: ActionEvent): Unit = { collapseAll(jTree) } } @@ -282,7 +274,7 @@ abstract class TreeBrowsers { val jmiGoto = new JMenuItem( new AbstractAction("Go to unit") { putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_N, menuKey, false)) - override def actionPerformed(actionEvent: ActionEvent) { + override def actionPerformed(actionEvent: ActionEvent): Unit = { val query = JOptionPane.showInputDialog("Go to unit:", frame.getOwner) if (query ne null) { // "Cancel" returns null val units = treeModel.program.units @@ -290,7 +282,7 @@ abstract class TreeBrowsers { // skip through 1-ary trees def expando(tree: Tree): List[Tree] = tree.children match { case only :: Nil => only :: expando(only) - case other => tree :: Nil + case _ => tree :: Nil } val path = new TreePath((treeModel.getRoot :: unit :: expando(unit.unit.body)).toArray[AnyRef]) // targ necessary to disambiguate Object and Object[] ctors @@ -473,10 +465,10 @@ abstract class TreeBrowsers { case Super(qualif, mix) => List(qualif) - case This(qualif) => + case This(_) => Nil - case Select(qualif, selector) => + case Select(qualif, _) => List(qualif) case Ident(name) => @@ -494,7 +486,7 @@ abstract class TreeBrowsers { case SingletonTypeTree(ref) => List(ref) - case SelectFromTypeTree(qualif, selector) => + case SelectFromTypeTree(qualif, _) => List(qualif) case CompoundTypeTree(templ) => @@ -520,6 +512,8 @@ abstract class TreeBrowsers { case Star(t) => List(t) + + case x => throw new MatchError(x) } /** Return a textual representation of this t's symbol */ @@ -625,13 +619,13 @@ abstract class TreeBrowsers { toDocument(hi) :: ")") ) - case RefinedType(parents, defs) => + case RefinedType(parents, _) => Document.group( Document.nest(4, "RefinedType(" :/: toDocument(parents) :: ")") ) - case ClassInfoType(parents, defs, clazz) => + case ClassInfoType(parents, _, clazz) => Document.group( Document.nest(4,"ClassInfoType(" :/: toDocument(parents) :: ", " :/: @@ -692,8 +686,96 @@ abstract class TreeBrowsers { toDocument(erased) :: ")")) case _ => - sys.error("Unknown case: " + t.toString +", "+ t.getClass) + abort("Unknown case: " + t.toString +", "+ t.getClass) } } } + +object TreeBrowsers { + case object DocNil extends Document + case object DocBreak extends Document + case class DocText(txt: String) extends Document + case class DocGroup(doc: Document) extends Document + case class DocNest(indent: Int, doc: Document) extends Document + case class DocCons(hd: Document, tl: Document) extends Document + + /** + * A basic pretty-printing library, based on Lindig's strict version + * of Wadler's adaptation of Hughes' pretty-printer. + * + * @author Michel Schinz + */ + sealed abstract class Document { + def ::(hd: Document): Document = DocCons(hd, this) + def ::(hd: String): Document = DocCons(DocText(hd), this) + def :/:(hd: Document): Document = hd :: DocBreak :: this + def :/:(hd: String): Document = hd :: DocBreak :: this + + /** + * Format this document on `writer` and try to set line + * breaks so that the result fits in `width` columns. + */ + def format(width: Int, writer: Writer): Unit = { + type FmtState = (Int, Boolean, Document) + + @tailrec + def fits(w: Int, state: List[FmtState]): Boolean = state match { + case _ if w < 0 => false + case List() => true + case (_, _, DocNil) :: z => fits(w, z) + case (i, b, DocCons(h, t)) :: z => fits(w, (i, b, h) :: (i, b, t) :: z) + case (_, _, DocText(t)) :: z => fits(w - t.length(), z) + case (i, b, DocNest(ii, d)) :: z => fits(w, (i + ii, b, d) :: z) + case (_, false, DocBreak) :: z => fits(w - 1, z) + case (_, true, DocBreak) :: _ => true + case (i, _, DocGroup(d)) :: z => fits(w, (i, false, d) :: z) + } + + def spaces(n: Int): Unit = { + var rem = n + while (rem >= 16) { writer.write(" ") ; rem -= 16 } + if (rem >= 8) { writer.write(" ") ; rem -= 8 } + if (rem >= 4) { writer.write(" ") ; rem -= 4 } + if (rem >= 2) { writer.write(" ") ; rem -= 2 } + if (rem == 1) { writer.write(" ") } + } + + @tailrec + def fmt(k: Int, state: List[FmtState]): Unit = state match { + case List() => () + case (_, _, DocNil) :: z => fmt(k, z) + case (i, b, DocCons(h, t)) :: z => fmt(k, (i, b, h) :: (i, b, t) :: z) + case (_, _, DocText(t)) :: z => writer.write(t) ; fmt(k + t.length(), z) + case (i, b, DocNest(ii, d)) :: z => fmt(k, (i + ii, b, d) :: z) + case (i, true, DocBreak) :: z => writer.write("\n") ; spaces(i) ; fmt(i, z) + case (_, false, DocBreak) :: z => writer.write(" ") ; fmt(k + 1, z) + case (i, _, DocGroup(d)) :: z => fmt(k, (i, !fits(width - k, (i, false, d) :: z), d) :: z) + case _ => () + } + + fmt(0, (0, false, DocGroup(this)) :: Nil) + } + } + + object Document { + /** The empty document */ + def empty = DocNil + + /** A break, which will either be turned into a space or a line break */ + def break = DocBreak + + /** A document consisting of some text literal */ + def text(s: String): Document = DocText(s) + + /** + * A group, whose components will either be printed with all breaks + * rendered as spaces, or with all breaks rendered as line breaks. + */ + def group(d: Document): Document = DocGroup(d) + + /** A nested document, which will be indented as specified. */ + def nest(i: Int, d: Document): Document = DocNest(i, d) + } + +} diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index e539bba97e91..811d91043b5d 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,7 @@ package scala.tools.nsc package ast +import scala.annotation.unused import scala.language.implicitConversions /** A DSL for generating scala code. The goal is that the @@ -31,8 +32,6 @@ trait TreeDSL { def nullSafe[T](f: Tree => Tree, ifNull: Tree): Tree => Tree = tree => IF (tree MEMBER_== NULL) THEN ifNull ELSE f(tree) - def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f) - object LIT extends (Any => Literal) { def typed(x: Any) = apply(x) setType ConstantType(Constant(x)) def apply(x: Any) = Literal(Constant(x)) @@ -66,25 +65,27 @@ trait TreeDSL { * a member called nme.EQ. Not sure if that should happen, but we can be * robust by dragging in Any regardless. */ - def MEMBER_== (other: Tree) = { - val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ - if (opSym == NoSymbol) ANY_==(other) - else fn(target, opSym, other) - } + def MEMBER_== (other: Tree) = fn(target, (if (target.tpe == null) NoSymbol else target.tpe member nme.EQ).orElse(Any_==), other) def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) - def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) + def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_== (other: Tree) = fn(target, Object_equals, other) + def OBJ_## = fn(target, Object_hashCode) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other) // generic operations on ByteClass, IntClass, LongClass + @unused("avoid warning for multiple parameters") def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other) + @unused("avoid warning for multiple parameters") def GEN_& (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.AND), other) + @unused("avoid warning for multiple parameters") def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other) + @unused("avoid warning for multiple parameters") def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other) /** Apply, Select, Match **/ @@ -128,8 +129,10 @@ trait TreeDSL { } class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) { def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin) - def ENDTRY = Try(body, catches, fin) + def FINALLY(end: END.type) = Try(body, catches, fin) + def FINALLY(fin1: Tree) = Try(body, catches, fin1) } + object END def CASE(pat: Tree): CaseStart = new CaseStart(pat, EmptyTree) def DEFAULT: CaseStart = new CaseStart(Ident(nme.WILDCARD), EmptyTree) @@ -137,7 +140,17 @@ trait TreeDSL { def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) def NOT(tree: Tree) = Select(tree, Boolean_not) - def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd + def AND(guards: Tree*) = { + def binaryTreeAnd(tests: Seq[Tree]): Tree = tests match{ + case Seq() => EmptyTree + case Seq(single) => single + case multiple => + val (before, after) = multiple.splitAt(tests.size / 2) + gen.mkAnd(binaryTreeAnd(before), binaryTreeAnd(after)) + } + + binaryTreeAnd(guards) + } def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) @@ -145,8 +158,8 @@ trait TreeDSL { def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, gen.mkTuple(xs.toList)) /** Typed trees from symbols. */ - def REF(sym: Symbol) = gen.mkAttributedRef(sym) - def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym) + def REF(sym: Symbol): RefTree = gen.mkAttributedRef(sym) + def REF(pre: Type, sym: Symbol): RefTree = gen.mkAttributedRef(pre, sym) /** Implicits - some of these should probably disappear **/ implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 09107b3ba43e..b9f2062bac97 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,11 @@ package scala.tools.nsc package ast +import scala.annotation.{tailrec, unused} import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.{FreshNameCreator, ListOfNil} +import scala.util.chaining._ import symtab.Flags._ -import scala.language.postfixOps -import scala.reflect.internal.util.FreshNameCreator /** XXX to resolve: TreeGen only assumes global is a SymbolTable, but * TreeDSL at the moment expects a Global. Can we get by with SymbolTable? @@ -38,7 +39,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { def mkImport(qualSym: Symbol, name: Name, toName: Name): Import = mkImportFromSelector(qualSym, ImportSelector(name, 0, toName, 0) :: Nil) - private def mkImportFromSelector(qualSym: Symbol, selector: List[ImportSelector]): Import = { + def mkImportFromSelector(qualSym: Symbol, selector: List[ImportSelector]): Import = { assert(qualSym ne null, this) val qual = gen.mkAttributedStableRef(qualSym) val importSym = ( @@ -79,8 +80,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { def mkRuntimeCall(meth: Name, targs: List[Type], args: List[Tree]): Tree = mkMethodCall(ScalaRunTimeModule, meth, targs, args) - def mkSysErrorCall(message: String): Tree = - mkMethodCall(Sys_error, List(Literal(Constant(message)))) + def mkThrowNewRuntimeException(message: String) = + Throw(RuntimeExceptionClass.tpe, Literal(Constant(message))) /** A creator for a call to a scala.reflect.Manifest or ClassManifest factory method. * @@ -105,7 +106,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { def mkAppliedTypeForCase(clazz: Symbol): Tree = { val numParams = clazz.typeParams.size if (clazz.typeParams.isEmpty) Ident(clazz) - else AppliedTypeTree(Ident(clazz), 1 to numParams map (_ => Bind(tpnme.WILDCARD, EmptyTree)) toList) + else AppliedTypeTree(Ident(clazz), (1 to numParams).map(_ => Bind(tpnme.WILDCARD, EmptyTree)).toList) } def mkBindForCase(patVar: Symbol, clazz: Symbol, targs: List[Type]): Tree = { Bind(patVar, Typed(Ident(nme.WILDCARD), @@ -128,16 +129,14 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { /** Make forwarder to method `target`, passing all parameters in `params` */ def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = - (target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg)) + vparamss.foldLeft(target)((fn, vparams) => Apply(fn, vparams map paramToArg)) - /** Applies a wrapArray call to an array, making it a WrappedArray. - * Don't let a reference type parameter be inferred, in case it's a singleton: - * apply the element type directly. - */ - def mkWrapArray(tree: Tree, elemtp: Type) = { + /** Applies a wrapArray call to an array, making it an immutable ArraySeq suitable for Scala varargs. + * Don't let a reference type parameter be inferred, in case it's a singleton: apply the element type directly. + */ + def mkWrapVarargsArray(tree: Tree, elemtp: Type) = { mkMethodCall( - PredefModule, - wrapArrayMethodName(elemtp), + wrapVarargsArrayMethod(elemtp), if (isPrimitiveValueType(elemtp)) Nil else List(elemtp), List(tree) ) @@ -152,8 +151,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { */ override def mkCast(tree: Tree, pt: Type): Tree = { debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase) - assert(!tree.tpe.isInstanceOf[MethodType], tree) - assert(!pt.isInstanceOf[MethodType], tree) atPos(tree.pos) { mkAsInstanceOf(tree, pt, any = !phase.next.erasedTypes, wrapInApply = isAtPhaseAfter(currentRun.uncurryPhase)) } @@ -162,10 +159,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree def mkCastPreservingAnnotations(tree: Tree, pt: Type) = - Typed(mkCast(tree, pt.withoutAnnotations.dealias), TypeTree(pt)) - // ^^^ I think we should either normalize or do nothing, but the half measure of dealias does not make sense, - // as the logic behind a cast operates on the fully normalized type, not just on a dealiased type (think refinements with type aliases). - // It would be ok to do nothing here, because erasure will convert the type to something that can be casted anyway. + Typed(mkCast(tree, pt.withoutAnnotations), TypeTree(pt)) /** Generate a cast for tree Tree representing Array with * elem type elemtp to expected type pt. @@ -179,8 +173,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { /** Translate names in Select/Ident nodes to type names. */ def convertToTypeName(tree: Tree): Option[RefTree] = tree match { - case Select(qual, name) => Some(Select(qual, name.toTypeName)) - case Ident(name) => Some(Ident(name.toTypeName)) + case Select(qual, name) => Some(Select(qual, name.toTypeName).setAttachments(tree.attachments)) + case Ident(name) => Some(Ident(name.toTypeName).setAttachments(tree.attachments)) case _ => None } @@ -270,6 +264,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // used to create the lifted method that holds a function's body def mkLiftedFunctionBodyMethod(localTyper: global.analyzer.Typer)(owner: global.Symbol, fun: global.Function) = { + @tailrec def nonLocalEnclosingMember(sym: Symbol): Symbol = { if (sym.isLocalDummy) sym.enclClass.primaryConstructor else if (sym.isLocalToBlock) nonLocalEnclosingMember(sym.originalOwner) @@ -308,7 +303,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { * @param name name for the new method * @param additionalFlags flags to be put on the method in addition to FINAL */ - private def mkMethodForFunctionBody(localTyper: analyzer.Typer) + private def mkMethodForFunctionBody(@unused localTyper: analyzer.Typer) (owner: Symbol, fun: Function, name: TermName) (methParamProtos: List[Symbol] = fun.vparams.map(_.symbol), resTp: Type = functionResultType(fun.tpe), @@ -326,9 +321,9 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // Rewrite This(enclModuleClass) to Ident(enclModuleClass) to avoid unnecessary capture of the module // class, which might hamper serializability. // - // Analagous to this special case in ExplicitOuter: https://github.com/scala/scala/blob/d2d33ddf8c/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala#L410-L412 + // Analogous to this special case in ExplicitOuter: https://github.com/scala/scala/blob/d2d33ddf8c/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala#L410-L412 // that understands that such references shouldn't give rise to outer params. - val enclosingStaticModules = owner.enclClassChain.filter(x => !x.hasPackageFlag && x.isModuleClass && x.isStatic) + val enclosingStaticModules = owner.ownersIterator.filter(x => !x.hasPackageFlag && x.isModuleClass && x.isStatic) enclosingStaticModules.foldLeft(tree)((tree, moduleClass) => tree.substituteThis(moduleClass, gen.mkAttributedIdent(moduleClass.sourceModule)) ) } @@ -361,6 +356,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF).setFlag(ARTIFACT) newSym.updateInfo(newSym.info match { case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res) + case x => throw new MatchError(x) }) val selfParam = ValDef(selfParamSym) val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // scala/scala-dev#186 intentionally leaving Ident($this) is unpositioned @@ -369,16 +365,22 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } def expandFunction(localTyper: analyzer.Typer)(fun: Function, inConstructorFlag: Long): Tree = { - val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) - val parents = if (isFunctionType(fun.tpe)) { - anonClass addAnnotation SerialVersionUIDAnnotation - addSerializable(abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst)) - } else { - if (fun.tpe.typeSymbol.isSubClass(JavaSerializableClass)) - anonClass addAnnotation SerialVersionUIDAnnotation - fun.tpe :: Nil + val anonClass = fun.symbol.owner.newAnonymousFunctionClass(fun.pos, inConstructorFlag) + val typeSym = fun.tpe.typeSymbol + val isFunction = isFunctionSymbol(typeSym) + if (isFunction || typeSym.isSubClass(SerializableClass)) + anonClass.addAnnotation(SerialVersionUIDAnnotation) + + val rScope = newScope + def parents(tp: Type): List[Type] = tp match { + case RefinedType(ps, scope) => + assert(scope.forall(_.isType), s"Cannot expand function of type $tp") + ps.flatMap(parents).tap(_ => scope.foreach(rScope.enter)) + case _ => + if (!isFunction) tp :: Nil + else addSerializable(abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst)) } - anonClass setInfo ClassInfoType(parents, newScope, anonClass) + anonClass.setInfo(ClassInfoType(parents(fun.tpe), rScope, anonClass)) // The original owner is used in the backend for the EnclosingMethod attribute. If fun is // nested in a value-class method, its owner was already changed to the extension method. @@ -391,9 +393,12 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { localTyper.typedPos(fun.pos) { Block( ClassDef(anonClass, NoMods, ListOfNil, List(samDef), fun.pos), - Typed(New(anonClass.tpe), TypeTree(fun.tpe))) + Typed(New(anonClass.tpe), TypeTree(fun.tpe)), + ) } } override def isPatVarWarnable = settings.warnUnusedPatVars + + override def isVarDefWarnable = settings.lintValPatterns } diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index fa336c0b64f2..a16dbfe4260a 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,12 +13,13 @@ package scala.tools.nsc package ast +import scala.reflect.internal.MacroAnnotionTreeInfo + /** This class ... * * @author Martin Odersky - * @version 1.0 */ -abstract class TreeInfo extends scala.reflect.internal.TreeInfo { +abstract class TreeInfo extends scala.reflect.internal.TreeInfo with MacroAnnotionTreeInfo { val global: Global import global._ import definitions._ @@ -61,8 +62,8 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { // new B(v). Returns B and v. object Box { def unapply(t: Tree): Option[(Tree, Type)] = t match { - case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType)) - case _ => None + case Apply(Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType)) + case _ => None } } // (new B(v)).unbox. returns v. @@ -93,7 +94,7 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case _ => super.isInterfaceMember(tree) } - override def isConstructorWithDefault(t: Tree) = t match { + override def isConstructorWithDefault(t: Tree): Boolean = t match { case DocDef(_, definition) => isConstructorWithDefault(definition) case _ => super.isConstructorWithDefault(t) } @@ -117,7 +118,7 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { def unapply(tree: Apply) = tree match { case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == definitions.ArrayClass => tpt.tpe match { - case erasure.GenericArray(level, componentType) => Some(level, componentType, arg) + case erasure.GenericArray(level, componentType) => Some((level, componentType, arg)) case _ => None } case _ => None diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index fc6d769d8474..1c8ff6a8fd70 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,35 +14,65 @@ package scala.tools.nsc package ast import scala.reflect.ClassTag -import scala.compat.Platform.EOL +import java.lang.System.lineSeparator + +import scala.annotation.nowarn trait Trees extends scala.reflect.internal.Trees { self: Global => // --- additional cases -------------------------------------------------------- /** Only used during parsing */ - case class Parens(args: List[Tree]) extends Tree + case class Parens(args: List[Tree]) extends Tree { + override def traverse(traverser: Traverser): Unit = { + traverser.traverseTrees(args) + } + } /** Documented definition, eliminated by analyzer */ case class DocDef(comment: DocComment, definition: Tree) extends Tree { override def symbol: Symbol = definition.symbol - override def symbol_=(sym: Symbol) { definition.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { definition.symbol = sym } override def isDef = definition.isDef override def isTerm = definition.isTerm override def isType = definition.isType + override def transform(transformer: ApiTransformer): Tree = + transformer.treeCopy.DocDef(this, comment, transformer.transform(definition)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(definition) + } } /** Array selection ` . ` only used during erasure */ case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type) - extends RefTree with TermTree + extends RefTree with TermTree { + override def transform(transformer: ApiTransformer): Tree = + transformer.treeCopy.SelectFromArray( + this, transformer.transform(qualifier), name, erasure) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(qualifier) + } + } /** Derived value class injection (equivalent to: `new C(arg)` after erasure); only used during erasure. * The class `C` is stored as a tree attachment. */ case class InjectDerivedValue(arg: Tree) - extends SymTree with TermTree + extends SymTree with TermTree { + override def transform(transformer: ApiTransformer): Tree = + transformer.treeCopy.InjectDerivedValue(this, transformer.transform(arg)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(arg) + } + } /** emitted by typer, eliminated by refchecks */ - case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree + case class TypeTreeWithDeferredRefCheck(precheck: TypeTree)(val check: () => TypeTree) extends TypTree { + override def transform(transformer: ApiTransformer): Tree = + transformer.treeCopy.TypeTreeWithDeferredRefCheck(this) + override def traverse(traverser: Traverser): Unit = { + // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check) + } + } // --- factory methods ---------------------------------------------------------- @@ -82,21 +112,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => // --- additional cases in operations ---------------------------------- - override protected def xtraverse(traverser: Traverser, tree: Tree): Unit = tree match { - case Parens(ts) => - traverser.traverseTrees(ts) - case DocDef(comment, definition) => - traverser.traverse(definition) - case SelectFromArray(qualifier, selector, erasure) => - traverser.traverse(qualifier) - case InjectDerivedValue(arg) => - traverser.traverse(arg) - case TypeTreeWithDeferredRefCheck() => - // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check) - case _ => super.xtraverse(traverser, tree) - } - - trait TreeCopier extends super.InternalTreeCopierOps { + trait TreeCopier extends InternalTreeCopierOps { def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue @@ -104,9 +120,14 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => } implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier]) - def newStrictTreeCopier: TreeCopier = new StrictTreeCopier - def newLazyTreeCopier: TreeCopier = new LazyTreeCopier + def newStrictTreeCopier: TreeCopier = new StrictAstTreeCopier + def newLazyTreeCopier: TreeCopier = new LazyAstTreeCopier + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.ast\.Trees\.StrictTreeCopier""") + final type StrictAstTreeCopier = StrictTreeCopier + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use StrictAstTreeCopier instead", since = "2.13.4") class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier { def DocDef(tree: Tree, comment: DocComment, definition: Tree) = new DocDef(comment, definition).copyAttrs(tree) @@ -115,10 +136,16 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => def InjectDerivedValue(tree: Tree, arg: Tree) = new InjectDerivedValue(arg).copyAttrs(tree) def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match { - case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree) + case dc@TypeTreeWithDeferredRefCheck(prechk) => new TypeTreeWithDeferredRefCheck(prechk)(dc.check).copyAttrs(tree) + case x => throw new MatchError(x) } } + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.ast\.Trees\.LazyTreeCopier""") + final type LazyAstTreeCopier = LazyTreeCopier + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use LazyAstTreeCopier instead", since = "2.13.4") class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier { def DocDef(tree: Tree, comment: DocComment, definition: Tree) = tree match { case t @ DocDef(comment0, definition0) @@ -136,13 +163,22 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => case _ => this.treeCopy.InjectDerivedValue(tree, arg) } def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match { - case t @ TypeTreeWithDeferredRefCheck() => t + case t: TypeTreeWithDeferredRefCheck => t case _ => this.treeCopy.TypeTreeWithDeferredRefCheck(tree) } } - class Transformer extends super.Transformer { - def transformUnit(unit: CompilationUnit) { + type ApiTransformer = super.Transformer + + // TODO: uncomment when deprecating the below + // @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.ast\.Trees\.Transformer""") + final type AstTransformer = Transformer + + // TODO: deprecate when we can cleanly cross-compile without warnings + // @deprecated("use AstTransformer instead", since = "2.13.4") + @nowarn("msg=shadowing a nested class of a parent is deprecated") + class Transformer extends InternalTransformer { + def transformUnit(unit: CompilationUnit): Unit = { try unit.body = transform(unit.body) catch { case ex: Exception => @@ -153,7 +189,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => } // used when a phase is disabled - object noopTransformer extends Transformer { + object noopTransformer extends AstTransformer { override def transformUnit(unit: CompilationUnit): Unit = {} } @@ -166,8 +202,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => case InjectDerivedValue(arg) => transformer.treeCopy.InjectDerivedValue( tree, transformer.transform(arg)) - case TypeTreeWithDeferredRefCheck() => + case _: TypeTreeWithDeferredRefCheck => transformer.treeCopy.TypeTreeWithDeferredRefCheck(tree) + case _ => super.xtransform(transformer, tree) } // Finally, no one uses resetAllAttrs anymore, so I'm removing it from the compiler. @@ -216,15 +253,15 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => val locals = util.HashSet[Symbol](8) val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]() - def registerLocal(sym: Symbol) { + def registerLocal(sym: Symbol): Unit = { if (sym != null && sym != NoSymbol) { - if (debug && !(locals contains sym)) orderedLocals append sym + if (debug && !(locals contains sym)) orderedLocals += sym locals addEntry sym } } - class MarkLocals extends self.Traverser { - def markLocal(tree: Tree) { + class MarkLocals extends self.InternalTraverser { + def markLocal(tree: Tree): Unit = { if (tree.symbol != null && tree.symbol != NoSymbol) { val sym = tree.symbol registerLocal(sym) @@ -242,22 +279,19 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => override def traverse(tree: Tree) = { tree match { - case _: DefTree | Function(_, _) | Template(_, _, _) => - markLocal(tree) - case _ => - tree + case _: DefTree | Function(_, _) | Template(_, _, _) => markLocal(tree) + case _ => } - - super.traverse(tree) + tree.traverse(this) } } - class Transformer extends self.Transformer { + class ResetTransformer extends AstTransformer { override def transform(tree: Tree): Tree = { if (leaveAlone != null && leaveAlone(tree)) tree - else - super.transform { + else { + val tree1 = { tree match { case tree if !tree.canHaveAttrs => tree @@ -312,6 +346,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => dupl.clearType() } } + tree1.transform(this) + } } } @@ -319,12 +355,14 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => new MarkLocals().traverse(x) if (debug) { - assert(locals.size == orderedLocals.size) - val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL + assert(locals.size == orderedLocals.size, "Incongruent ordered locals") + val msg = orderedLocals.toList.filter{_ != NoSymbol} + .map(" " + _) + .mkString(lineSeparator) trace("locals (%d total): %n".format(orderedLocals.size))(msg) } - new Transformer().transform(x) + new ResetTransformer().transform(x) } } @@ -332,7 +370,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => case Parens(expr) (only used during parsing) case DocDef(comment, defn) => (eliminated by typer) - case TypeTreeWithDeferredRefCheck() => (created and eliminated by typer) + case TypeTreeWithDeferredRefCheck(prechk) => (created by typer and eliminated by refchecks) case SelectFromArray(_, _, _) => (created and eliminated by erasure) case InjectDerivedValue(_) => (created and eliminated by erasure) diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala index 8fbdec3db35c..fa5aa4bf3324 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala index 4838d59b7cfc..56dfdbc3217c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,4 +18,4 @@ package ast.parser * @param inserted If true, brace needs to be inserted, otherwise brace needs to be deleted. */ case class BracePatch(off: Int, inserted: Boolean) -extends Patch(off, if (inserted) Insertion("{") else Deletion(1)) \ No newline at end of file +extends Patch(off, if (inserted) Insertion("{") else Deletion(1)) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Change.scala b/src/compiler/scala/tools/nsc/ast/parser/Change.scala index 664cc9879c47..dd3003c112bd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Change.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Change.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala index 090c517054f7..e935ff77b54b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -51,7 +51,7 @@ abstract class CommonTokens { // J: PUBLIC = 42 final val PROTECTED = 43 final val PRIVATE = 44 - // S: SEALED = 45 + final val SEALED = 45 // J: contextual keyword final val ABSTRACT = 46 // J: DEFAULT = 47 // J: STATIC = 48 diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index ceb556498661..e0a21b66a788 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package ast.parser import scala.annotation.tailrec import scala.collection.mutable +import scala.collection.BufferedIterator import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import scala.tools.nsc.util.CharArrayReader @@ -83,7 +84,7 @@ trait MarkupParsers { var tmppos : Position = NoPosition def ch = input.ch /** this method assign the next character to ch and advances in input */ - def nextch() { input.nextChar() } + def nextch(): Unit = { input.nextChar() } protected def ch_returning_nextch: Char = { val result = ch; input.nextChar(); result @@ -136,7 +137,7 @@ trait MarkupParsers { try handle.parseAttribute(r2p(start, mid, curOffset), tmp) catch { - case e: RuntimeException => + case _: RuntimeException => errorAndResult("error parsing attribute value", parser.errorTermTree) } @@ -187,13 +188,13 @@ trait MarkupParsers { def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = { def append(text: String): Unit = { val tree = handle.text(pos, text) - ts append tree + ts += tree } val clean = if (preserveWS) txt else { val sb = new StringBuilder() txt foreach { c => - if (!isSpace(c)) sb append c - else if (sb.isEmpty || !isSpace(sb.last)) sb append ' ' + if (!isSpace(c)) sb += c + else if (sb.isEmpty || !isSpace(sb.last)) sb += ' ' } sb.toString.trim } @@ -201,9 +202,9 @@ trait MarkupParsers { } /** adds entity/character to ts as side-effect - * @precond ch == '&' + * @note Pre-condition: ch == '&' */ - def content_AMP(ts: ArrayBuffer[Tree]) { + def content_AMP(ts: ArrayBuffer[Tree]): Unit = { nextch() val toAppend = ch match { case '#' => // CharacterRef @@ -217,15 +218,15 @@ trait MarkupParsers { handle.entityRef(tmppos, n) } - ts append toAppend + ts += toAppend } /** - * @precond ch == '{' - * @postcond: xEmbeddedBlock == false! + * @note Pre-condition: ch == '{' + * @note Post-condition: xEmbeddedBlock == false! */ def content_BRACE(p: Position, ts: ArrayBuffer[Tree]): Unit = - if (xCheckEmbeddedBlock) ts append xEmbeddedExpr + if (xCheckEmbeddedBlock) ts += xEmbeddedExpr else appendText(p, ts, xText) /** At an open angle-bracket, detects an end tag @@ -240,7 +241,7 @@ trait MarkupParsers { case '?' => nextch() ; xProcInstr // PI case _ => element // child node } - ts append toAppend + ts += toAppend false } @@ -249,7 +250,7 @@ trait MarkupParsers { val coalescing = settings.XxmlSettings.isCoalescing @tailrec def loopContent(): Unit = if (xEmbeddedBlock) { - ts append xEmbeddedExpr + ts += xEmbeddedExpr loopContent() } else { tmppos = o2p(curOffset) @@ -338,7 +339,7 @@ trait MarkupParsers { if (charComingAfter(nextch()) == '}') nextch() else errorBraces() } - buf append ch + buf += ch nextch() } while (!(ch == SU || xCheckEmbeddedBlock || ch == '<' || ch == '&')) buf.toString @@ -393,7 +394,7 @@ trait MarkupParsers { handle.makeXMLseq(r2p(start, start, curOffset), ts) } else { - assert(ts.length == 1) + assert(ts.length == 1, "Require one tree") ts(0) } }, @@ -435,7 +436,7 @@ trait MarkupParsers { def xScalaPatterns: List[Tree] = escapeToScala(parser.xmlSeqPatterns(), "pattern") def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(pos, str) - def reportSyntaxError(str: String) { + def reportSyntaxError(str: String): Unit = { reportSyntaxError(curOffset, "in XML literal: " + str) nextch() } @@ -461,7 +462,7 @@ trait MarkupParsers { else ch match { case '<' => // tag nextch() - if (ch != '/') ts append xPattern // child + if (ch != '/') ts += xPattern // child else return false // terminate case '{' if xCheckEmbeddedBlock => // embedded Scala patterns, if not double brace diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 4a20532de104..66fbcfe3659f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,12 +16,14 @@ package scala.tools.nsc package ast.parser +import scala.annotation.tailrec import scala.collection.mutable -import mutable.ListBuffer +import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.{CodeAction, FreshNameCreator, ListOfNil, Position, SourceFile} import scala.reflect.internal.{Precedence, ModifierFlags => Flags} -import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Position, SourceFile} -import Tokens._ import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.ast.parser.Tokens._ +import scala.util.chaining._ /** Historical note: JavaParsers started life as a direct copy of Parsers * but at a time when that Parsers had been replaced by a different one. @@ -32,7 +34,8 @@ import scala.tools.nsc.Reporting.WarningCategory * the beginnings of a campaign against this latest incursion by Cutty * McPastington and his army of very similar soldiers. */ -trait ParsersCommon extends ScannersCommon { self => +trait ParsersCommon extends ScannersCommon { + self => val global : Global // the use of currentUnit in the parser should be avoided as it might // cause unexpected behaviour when you work with two units at the @@ -47,7 +50,7 @@ trait ParsersCommon extends ScannersCommon { self => */ abstract class ParserCommon { val in: ScannerCommon - def deprecationWarning(off: Offset, msg: String, since: String): Unit + def deprecationWarning(off: Offset, msg: String, since: String, actions: List[CodeAction] = Nil): Unit def accept(token: Token): Int /** Methods inParensOrError and similar take a second argument which, should @@ -55,54 +58,51 @@ trait ParsersCommon extends ScannersCommon { self => * instead of the contents of the groupers. However in all cases accept(LPAREN) * will be called, so a parse error will still result. If the grouping is * optional, in.token should be tested before calling these methods. + * + * Skip trailing comma is pushed down to scanner because this abstract parser + * doesn't have token info. */ - @inline final def inParens[T](body: => T): T = { - accept(LPAREN) - val ret = body - accept(RPAREN) - ret + @inline final def inGroupers[T](left: Token)(body: => T): T = { + accept(left) + try body + finally { + in.skipTrailingComma(left + 1) + accept(left + 1) + } } - @inline final def inParensOrError[T](body: => T, alt: T): T = - if (in.token == LPAREN) inParens(body) - else { accept(LPAREN) ; alt } - - @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit) + @inline final def inParens[T](body: => T): T = inGroupers(LPAREN)(body) + @inline final def inParensOrError[T](body: => T, alt: T): T = if (in.token == LPAREN) inParens(body) else { accept(LPAREN) ; alt } + @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit) @inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil) - @inline final def inBraces[T](body: => T): T = { - accept(LBRACE) - val ret = body - accept(RBRACE) - ret - } - @inline final def inBracesOrError[T](body: => T, alt: T): T = - if (in.token == LBRACE) inBraces(body) - else { accept(LBRACE) ; alt } - + @inline final def inBraces[T](body: => T): T = inGroupers(LBRACE)(body) + @inline final def inBracesOrError[T](body: => T, alt: T): T = if (in.token == LBRACE) inBraces(body) else { accept(LBRACE) ; alt } @inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil) - @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit) - @inline final def dropAnyBraces[T](body: => T): T = - if (in.token == LBRACE) inBraces(body) - else body + @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit) + @inline final def dropAnyBraces[T](body: => T): T = if (in.token == LBRACE) inBraces(body) else body - @inline final def inBrackets[T](body: => T): T = { - accept(LBRACKET) - val ret = body - accept(RBRACKET) - ret - } + @inline final def inBrackets[T](body: => T): T = inGroupers(LBRACKET)(body) /** Creates an actual Parens node (only used during parsing.) */ @inline final def makeParens(body: => List[Tree]): Parens = Parens(inParens(if (in.token == RPAREN) Nil else body)) - /** {{{ part { `sep` part } }}}, or if sepFirst is true, {{{ { `sep` part } }}}. */ - final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = { + /** {{{ { `sep` part } }}}. */ + def tokenSeparated[T](separator: Token, part: => T): List[T] = { val ts = new ListBuffer[T] - if (!sepFirst) + ts += part + + while (in.token == separator) { + in.nextToken() ts += part + } + ts.toList + } + /** {{{ { `sep` part } }}}. */ + def separatedToken[T](separator: Token, part: => T): List[T] = { + val ts = new ListBuffer[T] while (in.token == separator) { in.nextToken() ts += part @@ -111,8 +111,7 @@ trait ParsersCommon extends ScannersCommon { self => } /** {{{ tokenSeparated }}}, with the separator fixed to commas. */ - @inline final def commaSeparated[T](part: => T): List[T] = - tokenSeparated(COMMA, sepFirst = false, part) + @inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, part) } } @@ -157,7 +156,7 @@ self => val global: Global import global._ - case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], offset: Offset) { + case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], operatorPos: Position, targsPos: Position) { def precedence = Precedence(operator.toString) } @@ -178,11 +177,11 @@ self => def unit = global.currentUnit // suppress warnings; silent abort on errors - def warning(offset: Offset, msg: String, category: WarningCategory): Unit = () - def deprecationWarning(offset: Offset, msg: String, since: String): Unit = () + def warning(offset: Offset, msg: String, category: WarningCategory, actions: List[CodeAction]): Unit = () + def deprecationWarning(offset: Offset, msg: String, since: String, actions: List[CodeAction]): Unit = () - def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg) - def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg) + def syntaxError(offset: Offset, msg: String, actions: List[CodeAction]): Unit = throw new MalformedInput(offset, msg) + def incompleteInputError(msg: String, actions: List[CodeAction]): Unit = throw new MalformedInput(source.content.length - 1, msg) object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices val global: self.global.type = self.global @@ -220,7 +219,7 @@ self => override def blockExpr(): Tree = skipBraces(EmptyTree) - override def templateBody(isPre: Boolean) = skipBraces((noSelfType, EmptyTree.asList)) + override def templateBody() = skipBraces((noSelfType, EmptyTree.asList)) } class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself => @@ -228,12 +227,12 @@ self => override def newScanner() = new UnitScanner(unit, patches) - override def warning(offset: Offset, msg: String, category: WarningCategory): Unit = - runReporting.warning(o2p(offset), msg, category, site = "") + override def warning(offset: Offset, msg: String, category: WarningCategory, actions: List[CodeAction]): Unit = + runReporting.warning(o2p(offset), msg, category, site = "", actions) - override def deprecationWarning(offset: Offset, msg: String, since: String): Unit = + override def deprecationWarning(offset: Offset, msg: String, since: String, actions: List[CodeAction]): Unit = // we cannot provide a `site` in the parser, there's no context telling us where we are - runReporting.deprecationWarning(o2p(offset), msg, since, site = "", origin = "") + runReporting.deprecationWarning(o2p(offset), msg, since, site = "", origin = "", actions) private var smartParsing = false @inline private def withSmartParsing[T](body: => T): T = { @@ -244,20 +243,20 @@ self => } def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches) - val syntaxErrors = new ListBuffer[(Int, String)] + val syntaxErrors = new ListBuffer[(Int, String, List[CodeAction])] def showSyntaxErrors() = - for ((offset, msg) <- syntaxErrors) - reporter.error(o2p(offset), msg) + for ((offset, msg, actions) <- syntaxErrors) + runReporting.error(o2p(offset), msg, actions) - override def syntaxError(offset: Offset, msg: String): Unit = { - if (smartParsing) syntaxErrors += ((offset, msg)) - else reporter.error(o2p(offset), msg) + override def syntaxError(offset: Offset, msg: String, actions: List[CodeAction]): Unit = { + if (smartParsing) syntaxErrors += ((offset, msg, actions)) + else runReporting.error(o2p(offset), msg, actions) } - override def incompleteInputError(msg: String): Unit = { + override def incompleteInputError(msg: String, actions: List[CodeAction]): Unit = { val offset = source.content.length - 1 - if (smartParsing) syntaxErrors += ((offset, msg)) - else currentRun.parsing.incompleteInputError(o2p(offset), msg) + if (smartParsing) syntaxErrors += ((offset, msg, actions)) + else currentRun.parsing.incompleteInputError(o2p(offset), msg, actions) } /** parse unit. If there are unbalanced braces, @@ -266,9 +265,10 @@ self => def smartParse(): Tree = withSmartParsing { val firstTry = parse() if (syntaxErrors.isEmpty) firstTry - else in.healBraces() match { - case Nil => showSyntaxErrors() ; firstTry - case patches => (this withPatches patches).parse() + else { + val patches = in.healBraces() + if (!patches.isEmpty) withPatches(patches).parse() + else { showSyntaxErrors(); firstTry } } } } @@ -278,6 +278,14 @@ self => final val InBlock: Location = 1 final val InTemplate: Location = 2 + type ParamOwner = Int + object ParamOwner { + final val Class = 0 + final val Type = 1 + final val TypeParam = 2 // unused + final val Def = 3 + } + // These symbols may not yet be loaded (e.g. in the ide) so don't go // through definitions to obtain the names. lazy val ScalaValueClassNames = Seq(tpnme.AnyVal, @@ -303,8 +311,12 @@ self => */ @inline final def lookingAhead[T](body: => T): T = { val saved = new ScannerData {} copyFrom in + val seps = in.sepRegions in.nextToken() - try body finally in copyFrom saved + try body finally { + in.sepRegions = seps + in.copyFrom(saved) + } } class ParserTreeBuilder extends TreeBuilder { @@ -313,7 +325,7 @@ self => def source = parser.source } val treeBuilder = new ParserTreeBuilder - import treeBuilder.{global => _, unit => _, source => _, fresh => _, _} + import treeBuilder.{fresh => _, global => _, source => _, unit => _, _} implicit def fresh: FreshNameCreator = unit.fresh @@ -368,16 +380,21 @@ self => * by compilationUnit(). */ def scriptBody(): Tree = { - val stmts = parseStats() - def mainModuleName = newTermName(settings.script.value) + // remain backwards-compatible if -Xscript was set but not reasonably + settings.script.value match { + case null | "" => settings.script.value = "Main" + case _ => + } + + val stmts = parseStats() /* If there is only a single object template in the file and it has a * suitable main method, we will use it rather than building another object * around it. Since objects are loaded lazily the whole script would have * been a no-op, so we're not taking much liberty. */ - def searchForMain(): Tree = { + def searchForMain(mainModuleName: Name): Tree = { import PartialFunction.cond /* Have to be fairly liberal about what constitutes a main method since @@ -393,30 +410,24 @@ self => case Template(parents, _, _) => parents.exists(cond(_) { case Ident(tpnme.App) => true }) case _ => false } - /* We allow only one main module. */ + // We allow only one main module. var seenModule = false var disallowed = EmptyTree: Tree val newStmts = stmts.map { case md @ ModuleDef(mods, name, template) if !seenModule && (isApp(template) || md.exists(isMainMethod)) => seenModule = true - /* This slightly hacky situation arises because we have no way to communicate - * back to the scriptrunner what the name of the program is. Even if we were - * willing to take the sketchy route of settings.script.value = progName, that - * does not work when using fsc. And to find out in advance would impose a - * whole additional parse. So instead, if the actual object's name differs from - * what the script is expecting, we transform it to match. - */ + // If we detect a main module with an arbitrary name, rename it to the expected name. if (name == mainModuleName) md else treeCopy.ModuleDef(md, mods, mainModuleName, template) case md @ ModuleDef(_, _, _) => md case cd @ ClassDef(_, _, _, _) => cd case t @ Import(_, _) => t case t => - /* If we see anything but the above, fail. */ + // If we see anything but the above, fail. if (disallowed.isEmpty) disallowed = t EmptyTree } - if (disallowed.isEmpty) makeEmptyPackage(0, newStmts) + if (seenModule && disallowed.isEmpty) makeEmptyPackage(0, newStmts) else { if (seenModule) warning(disallowed.pos.point, "Script has a main object but statement is disallowed", WarningCategory.Other) @@ -424,24 +435,24 @@ self => } } - def mainModule: Tree = - if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain)) searchForMain() else EmptyTree + // pick up object specified by `-Xscript Main` + def mainModule: Tree = settings.script.valueSetByUser.map(name => searchForMain(TermName(name))).getOrElse(EmptyTree) + /* Here we are building an AST representing the following source fiction, + * where `moduleName` is from -Xscript (defaults to "Main") and are + * the result of parsing the script file. + * + * {{{ + * object moduleName { + * def main(args: Array[String]): Unit = + * new AnyRef { + * stmts + * } + * } + * }}} + */ def repackaged: Tree = { - /* Here we are building an AST representing the following source fiction, - * where `moduleName` is from -Xscript (defaults to "Main") and are - * the result of parsing the script file. - * - * {{{ - * object moduleName { - * def main(args: Array[String]): Unit = - * new AnyRef { - * stmts - * } - * } - * }}} - */ - def emptyInit = DefDef( + val emptyInit = DefDef( NoMods, nme.CONSTRUCTOR, Nil, @@ -451,25 +462,26 @@ self => ) // def main - def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String))) - def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree)) - def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts)) + val mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String))) + val mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree)) + val mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts)) // object Main - def moduleName = newTermName(ScriptRunner scriptMain settings) - def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef)) - def moduleDef = ModuleDef(NoMods, moduleName, moduleBody) + val moduleName = TermName(settings.script.value) + val moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef)) + val moduleDef = ModuleDef(NoMods, moduleName, moduleBody) // package { ... } makeEmptyPackage(0, moduleDef :: Nil) } + // either there is an entry point (a main method either detected or specified) or wrap it up mainModule orElse repackaged } /* --------------- PLACEHOLDERS ------------------------------------------- */ - /** The implicit parameters introduced by `_` in the current expression. + /** The parameters introduced by `_` "placeholder syntax" in the current expression. * Parameters appear in reverse order. */ var placeholderParams: List[ValDef] = Nil @@ -519,9 +531,10 @@ self => t } - def isWildcard(t: Tree): Boolean = t match { - case Ident(name1) => !placeholderParams.isEmpty && name1 == placeholderParams.head.name - case Typed(t1, _) => isWildcard(t1) + @tailrec + final def isWildcard(t: Tree): Boolean = t match { + case Ident(name1) => !placeholderParams.isEmpty && name1 == placeholderParams.head.name + case Typed(t1, _) => isWildcard(t1) case Annotated(t1, _) => isWildcard(t1) case _ => false } @@ -566,37 +579,63 @@ self => in.nextToken() } } - def warning(offset: Offset, msg: String, category: WarningCategory): Unit - def incompleteInputError(msg: String): Unit - def syntaxError(offset: Offset, msg: String): Unit + + def warning(offset: Offset, msg: String, category: WarningCategory, actions: List[CodeAction] = Nil): Unit + + def incompleteInputError(msg: String, actions: List[CodeAction] = Nil): Unit + + def syntaxError(offset: Offset, msg: String, actions: List[CodeAction] = Nil): Unit private def syntaxError(pos: Position, msg: String, skipIt: Boolean): Unit = - syntaxError(pos pointOrElse in.offset, msg, skipIt) + syntaxError(pos, msg, skipIt, actions = Nil) + private def syntaxError(pos: Position, msg: String, skipIt: Boolean, actions: List[CodeAction]): Unit = + syntaxError(pos pointOrElse in.offset, msg, skipIt, actions) + def syntaxError(msg: String, skipIt: Boolean): Unit = - syntaxError(in.offset, msg, skipIt) + syntaxError(msg, skipIt, actions = Nil) + def syntaxError(msg: String, skipIt: Boolean, actions: List[CodeAction]): Unit = + syntaxError(in.offset, msg, skipIt, actions) - def syntaxError(offset: Offset, msg: String, skipIt: Boolean): Unit = { + def syntaxError(offset: Offset, msg: String, skipIt: Boolean): Unit = + syntaxError(offset, msg, skipIt, actions = Nil) + def syntaxError(offset: Offset, msg: String, skipIt: Boolean, actions: List[CodeAction]): Unit = { if (offset > lastErrorOffset) { - syntaxError(offset, msg) + syntaxError(offset, msg, actions) lastErrorOffset = in.offset // no more errors on this token. } if (skipIt) skip(UNDEF) } - def warning(msg: String, category: WarningCategory): Unit = warning(in.offset, msg, category) + def warning(msg: String, category: WarningCategory): Unit = + warning(in.offset, msg, category, actions = Nil) + def warning(msg: String, category: WarningCategory, actions: List[CodeAction]): Unit = + warning(in.offset, msg, category, actions) - def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean): Unit = { + def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean, actions: List[CodeAction] = Nil): Unit = { if (in.token == EOF) - incompleteInputError(msg) + incompleteInputError(msg, actions) else - syntaxError(in.offset, msg, skipIt) + syntaxError(in.offset, msg, skipIt, actions) } - def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = { - syntaxErrorOrIncomplete(msg, skipIt) + def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean, actions: List[CodeAction] = Nil)(and: T): T = { + syntaxErrorOrIncomplete(msg, skipIt, actions) and } + // warn under -Xsource:3; otherwise if since is nonEmpty, issue a deprecation + def migrationWarning(offset: Offset, msg: String, since: String = "", actions: List[CodeAction] = Nil): Unit = + if (currentRun.isScala3) warning(offset, msg, WarningCategory.Scala3Migration, actions) + else if (!since.isEmpty) deprecationWarning(offset, msg, since, actions) + + // deprecation or migration under -Xsource:3, with different messages + def migrationWarning(offset: Offset, depr: => String, migr: => String, since: String, actions: String => List[CodeAction]): Unit = { + val msg = if (currentRun.isScala3) migr else depr + migrationWarning(offset, msg, since, actions(msg)) + } + def migrationWarning(offset: Offset, depr: => String, migr: => String, since: String): Unit = + migrationWarning(offset, depr, migr, since, (_: String) => Nil) + def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found." def expectedMsg(token: Token): String = in.token match { @@ -623,7 +662,7 @@ self => /** {{{ * semi = nl {nl} | `;` - * nl = `\n' // where allowed + * nl = `\n` // where allowed * }}} */ def acceptStatSep(): Unit = in.token match { @@ -698,13 +737,26 @@ self => def isIdentExcept(except: Name) = isIdent && in.name != except def isIdentOf(name: Name) = isIdent && in.name == name - def isUnaryOp = isIdent && raw.isUnary(in.name) + def isUnaryOp = isRawIdent && raw.isUnary(in.name) def isRawStar = isRawIdent && in.name == raw.STAR def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER def isWildcardType = in.token == USCORE || isScala3WildcardType def isScala3WildcardType = isRawIdent && in.name == raw.QMARK + def checkQMarkDefinition() = + if (isScala3WildcardType) { + val msg = "using `?` as a type name requires backticks." + syntaxError(in.offset, msg, + runReporting.codeAction("add backticks", r2p(in.offset, in.offset, in.offset + 1), "`?`", msg, expected = Some(("?", unit)))) + } + + def checkKeywordDefinition() = + if (isRawIdent && scala3Keywords.contains(in.name)) { + val msg = s"Wrap `${in.name}` in backticks to use it as an identifier, it will become a keyword in Scala 3." + deprecationWarning(in.offset, msg, "2.13.7", + runReporting.codeAction("add backticks", r2p(in.offset, in.offset, in.offset + in.name.length), s"`${in.name}`", msg, expected = Some((in.name.toString, unit)))) + } def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -712,34 +764,25 @@ self => def isLiteralToken(token: Token) = token match { case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true - case _ => false + case _ => false } def isLiteral = isLiteralToken(in.token) - def isSimpleExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match { - case IDENTIFIER | BACKQUOTED_IDENT | - THIS | SUPER | NEW | USCORE | - LPAREN | LBRACE | XMLSTART => true - case _ => false - }) - - def isSimpleExprIntro: Boolean = isExprIntroToken(in.token) - def isExprIntroToken(token: Token): Boolean = !isValidSoftModifier && (isLiteralToken(token) || (token match { - case IDENTIFIER | BACKQUOTED_IDENT | - THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | - DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true - case _ => false - })) + case IDENTIFIER | BACKQUOTED_IDENT | + THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | + DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true + case _ => false + })) def isExprIntro: Boolean = isExprIntroToken(in.token) - def isTypeIntroToken(token: Token): Boolean = token match { + def isTypeIntroToken(token: Token): Boolean = (isLiteralToken(token) && token != NULL) || (token match { case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER | USCORE | LPAREN | AT => true case _ => false - } + }) def isStatSeqEnd = in.token == RBRACE || in.token == EOF @@ -770,14 +813,23 @@ self => /** Convert tree to formal parameter list. */ def convertToParams(tree: Tree): List[ValDef] = tree match { - case Parens(ts) => ts map convertToParam - case _ => List(convertToParam(tree)) + case Parens(ts) => ts.map(convertToParam) + case Typed(Ident(_), _) => + val msg = "parentheses are required around the parameter of a lambda" + val wrn = sm"""|$msg + |Use '-Wconf:msg=lambda-parens:s' to silence this warning.""" + def actions = + if (tree.pos.isRange) runReporting.codeAction("add parentheses", tree.pos, s"(${unit.source.sourceAt(tree.pos)})", msg) + else Nil + migrationWarning(tree.pos.point, wrn, /*since="2.13.11",*/ actions = actions) + List(convertToParam(tree)) + case _ => List(convertToParam(tree)) } /** Convert tree to formal parameter. */ def convertToParam(tree: Tree): ValDef = atPos(tree.pos) { def removeAsPlaceholder(name: Name): Unit = { - placeholderParams = placeholderParams filter (_.name != name) + placeholderParams = placeholderParams.filter(_.name != name) } def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end)) def propagateNoWarnAttachment(from: Tree, to: ValDef): to.type = @@ -813,67 +865,76 @@ self => errorTypeTree } } - @inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part) - def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part) + + /** {{{ part { `sep` part } }}}. */ + override final def tokenSeparated[T](separator: Token, part: => T): List[T] = { + val ts = ListBuffer.empty[T].addOne(part) + var done = in.token != separator + while (!done) { + val skippable = separator == COMMA && in.sepRegions.nonEmpty && in.isTrailingComma(in.sepRegions.head) + if (!skippable) { + in.nextToken() + ts += part + } + done = (in.token != separator) || skippable + } + ts.toList + } + + /** {{{ { `sep` part } }}}. */ + override final def separatedToken[T](separator: Token, part: => T): List[T] = { + require(separator != COMMA, "separator cannot be a comma") + val ts = ListBuffer.empty[T] + while (in.token == separator) { + in.nextToken() + ts += part + } + ts.toList + } + + @inline final def caseSeparated[T](part: => T): List[T] = separatedToken(CASE, part) + def readAnnots(part: => Tree): List[Tree] = separatedToken(AT, part) /** Create a tuple type Tree. If the arity is not supported, a syntax error is emitted. */ - def makeSafeTupleType(elems: List[Tree], offset: Offset) = { - if (checkTupleSize(elems, offset)) makeTupleType(elems) + def makeSafeTupleType(elems: List[Tree]) = { + if (checkTupleSize(elems)) makeTupleType(elems) else makeTupleType(Nil) // create a dummy node; makeTupleType(elems) would fail } /** Create a tuple term Tree. If the arity is not supported, a syntax error is emitted. */ - def makeSafeTupleTerm(elems: List[Tree], offset: Offset) = { - checkTupleSize(elems, offset) + def makeSafeTupleTerm(elems: List[Tree]) = { + checkTupleSize(elems) makeTupleTerm(elems) } - private[this] def checkTupleSize(elems: List[Tree], offset: Offset): Boolean = + /** Create a function Tree. If the arity is not supported, a syntax error is emitted. */ + def makeSafeFunctionType(argtpes: List[Tree], restpe: Tree) = { + if (checkFunctionArity(argtpes)) makeFunctionTypeTree(argtpes, restpe) + else makeFunctionTypeTree(Nil, restpe) // create a dummy node + } + + private[this] def checkTupleSize(elems: List[Tree]): Boolean = elems.lengthCompare(definitions.MaxTupleArity) <= 0 || { - val msg = s"too many elements for tuple: ${elems.length}, allowed: ${definitions.MaxTupleArity}" - syntaxError(offset, msg, skipIt = false) + val firstInvalidElem = elems(definitions.MaxTupleArity) + val msg = s"tuples may not have more than ${definitions.MaxFunctionArity} elements, but ${elems.length} given" + syntaxError(firstInvalidElem.pos, msg, skipIt = false) + false + } + + private[this] def checkFunctionArity(argtpes: List[Tree]): Boolean = + argtpes.lengthCompare(definitions.MaxFunctionArity) <= 0 || { + val firstInvalidArg = argtpes(definitions.MaxFunctionArity) + val msg = s"function values may not have more than ${definitions.MaxFunctionArity} parameters, but ${argtpes.length} given" + syntaxError(firstInvalidArg.pos, msg, skipIt = false) false } /** Strip the artificial `Parens` node to create a tuple term Tree. */ def stripParens(t: Tree) = t match { - case Parens(ts) => atPos(t.pos) { makeSafeTupleTerm(ts, t.pos.point) } + case Parens(ts) => atPos(t.pos) { makeSafeTupleTerm(ts) } case _ => t } - /** Create tree representing (unencoded) binary operation expression or pattern. */ - def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = { - require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), - s"Incompatible args to makeBinop: !isExpr but targs=$targs") - - def mkSelection(t: Tree) = { - val pos = opPos union t.pos - val sel = atPos(pos)(Select(stripParens(t), op.encode)) - if (targs.isEmpty) sel - else atPos(pos union targs.last.pos withPoint pos.point) { - TypeApply(sel, targs) - } - } - def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args - val arguments = right match { - case Parens(args) => mkNamed(args) - case _ => right :: Nil - } - def mkApply(fun: Tree, args: List[Tree]) = Apply(fun, args).updateAttachment(InfixAttachment) - if (isExpr) { - if (treeInfo.isLeftAssoc(op)) { - mkApply(mkSelection(left), arguments) - } else { - val x = freshTermName() - Block( - List(ValDef(Modifiers(symtab.Flags.SYNTHETIC | symtab.Flags.ARTIFACT), x, TypeTree(), stripParens(left))), - mkApply(mkSelection(right), List(atPos(left.pos.makeTransparent)(Ident(x))))) - } - } else { - mkApply(Ident(op.encode), stripParens(left) :: arguments) - } - } - /** Is current ident a `*`, and is it followed by a `)` or `, )`? */ def followingIsScala3Vararg(): Boolean = currentRun.isScala3 && isRawStar && lookingAhead { @@ -900,47 +961,94 @@ self => private def headPrecedence = opHead.precedence private def popOpInfo(): OpInfo = try opHead finally opstack = opstack.tail private def pushOpInfo(top: Tree): Unit = { - val name = in.name - val offset = in.offset + val name = in.name + val nameStart = in.offset ident() + val operatorPos = Position.range(source, nameStart, nameStart, in.lastOffset) //offset + operator.length) + val targsStart = in.offset val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil - val opinfo = OpInfo(top, name, targs, offset) + val targsPos = if (targs.nonEmpty) Position.range(source, targsStart, targsStart, in.lastOffset) else NoPosition + val opinfo = OpInfo(top, name, targs, operatorPos, targsPos) opstack ::= opinfo } - def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.offset, opHead.operator, leftAssoc) + def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.operatorPos.point, opHead.operator, leftAssoc) def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) = ( - if (treeInfo.isLeftAssoc(op) != leftAssoc) + if (nme.isLeftAssoc(op) != leftAssoc) syntaxError(offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false) ) def finishPostfixOp(start: Int, base: List[OpInfo], opinfo: OpInfo): Tree = { if (opinfo.targs.nonEmpty) - syntaxError(opinfo.offset, "type application is not allowed for postfix operators") + syntaxError(opinfo.targsPos.point, "type application is not allowed for postfix operators") val lhs = reduceExprStack(base, opinfo.lhs) - makePostfixSelect(if (lhs.pos.isDefined) lhs.pos.start else start, opinfo.offset, stripParens(lhs), opinfo.operator) + val at = if (lhs.pos.isDefined) lhs.pos.start else start + atPos(opinfo.operatorPos.withStart(at)) { + Select(stripParens(lhs), opinfo.operator.encode).updateAttachment(PostfixAttachment) + } } - def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = { - import opinfo._ - val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length) - val pos = lhs.pos.union(rhs.pos).union(operatorPos).withEnd(in.lastOffset).withPoint(offset) - - atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs)) + /** Create tree representing (unencoded) binary operation expression or pattern. */ + def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, right: Tree): Tree = { + import opinfo.{lhs => left, operator, targs, operatorPos, targsPos} + val pos = operatorPos.union(left.pos).union(right.pos).withEnd(in.lastOffset) + + if (targs.nonEmpty) { + require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Binary op !isExpr but targs=$targs") + val qual = unit.source.sourceAt(left.pos) + val fun = s"${CodeAction.maybeWrapInParens(qual)}.${unit.source.sourceAt(operatorPos.withEnd(right.pos.start))}" + val fix = s"${fun.trim}${CodeAction.wrapInParens(unit.source.sourceAt(right.pos))}" + val msg = "type application is not allowed for infix operators" + // omit since="2.13.11" to avoid deprecation + migrationWarning(targsPos.point, msg, actions = runReporting.codeAction("use selection", pos, fix, msg)) + } + val rightAssoc = !nme.isLeftAssoc(operator) + def mkSelection(t: Tree) = { + // if it's right-associative, `targs` are between `op` and `t` so make the pos transparent + val selPos = operatorPos.union(t.pos).makeTransparentIf(rightAssoc) + val sel = atPos(selPos)(Select(stripParens(t), operator.encode)) + if (targs.isEmpty) sel + else atPos(selPos.union(targsPos).makeTransparentIf(rightAssoc)) { TypeApply(sel, targs) } + } + def mkNamed(args: List[Tree]) = if (!isExpr) args else + args.map(treeInfo.assignmentToMaybeNamedArg(_)) + .tap(res => if (currentRun.isScala3 && args.lengthCompare(1) == 0 && (args.head ne res.head)) + deprecationWarning(args.head.pos.point, "named argument is deprecated for infix syntax", since="2.13.16")) + var isMultiarg = false + val arguments = right match { + case Parens(Nil) => literalUnit :: Nil + case Parens(args @ (_ :: Nil)) => mkNamed(args) + case Parens(args) => isMultiarg = true; mkNamed(args) + case _ => right :: Nil + } + def mkApply(fun: Tree, args: List[Tree]) = + Apply(fun, args) + .updateAttachment(InfixAttachment) + .tap(apply => if (isMultiarg) apply.updateAttachment(MultiargInfixAttachment)) + atPos(pos) { + if (!isExpr) + mkApply(Ident(operator.encode), stripParens(left) :: arguments) + else if (!rightAssoc) + mkApply(mkSelection(left), arguments) + else { + import symtab.Flags._ + val x = freshTermName(nme.RIGHT_ASSOC_OP_PREFIX) + val liftedArg = atPos(left.pos) { + ValDef(Modifiers(FINAL | SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left)) + } + val apply = mkApply(mkSelection(right), List(Ident(x) setPos left.pos.focus)) + Block(liftedArg :: Nil, apply) + } + } } - def reduceExprStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = true, base, top) - def reducePatternStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = false, base, top) + def reduceExprStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = true, base, top) def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree): Tree = { val opPrecedence = if (isIdent) Precedence(in.name.toString) else Precedence(0) - val leftAssoc = !isIdent || (treeInfo isLeftAssoc in.name) + val leftAssoc = !isIdent || nme.isLeftAssoc(in.name) - reduceStack(isExpr, base, top, opPrecedence, leftAssoc) - } - - def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree, opPrecedence: Precedence, leftAssoc: Boolean): Tree = { def isDone = opstack == base def lowerPrecedence = !isDone && (opPrecedence < headPrecedence) def samePrecedence = !isDone && (opPrecedence == headPrecedence) @@ -949,10 +1057,11 @@ self => if (samePrecedence) checkHeadAssoc(leftAssoc) + @tailrec def loop(top: Tree): Tree = if (canReduce) { val info = popOpInfo() if (!isExpr && info.targs.nonEmpty) { - syntaxError(info.offset, "type application is not allowed in pattern") + syntaxError(info.targsPos.point, "type application is not allowed in pattern") info.targs.foreach(_.setType(ErrorType)) } loop(finishBinaryOp(isExpr, info, top)) @@ -967,7 +1076,7 @@ self => * called: either in a pattern context or not. Formerly, this was * threaded through numerous methods as boolean isPattern. */ - trait PatternContextSensitive { + sealed trait PatternContextSensitive { /** {{{ * ArgType ::= Type * }}} @@ -975,29 +1084,29 @@ self => def argType(): Tree def functionArgType(): Tree + // () must be () => R; (types) could be tuple or (types) => R private def tupleInfixType(start: Offset) = { - in.nextToken() - if (in.token == RPAREN) { - in.nextToken() - atPos(start, accept(ARROW)) { makeFunctionTypeTree(Nil, typ()) } + require(in.token == LPAREN, "first token must be a left parenthesis") + val ts = inParens { if (in.token == RPAREN) Nil else functionTypes() } + if (in.token == ARROW) + atPos(start, in.skipToken()) { makeSafeFunctionType(ts, typ()) } + else if (ts.isEmpty) { + val msg = "Illegal literal type (), use Unit instead" + syntaxError(start, msg, + runReporting.codeAction("use `Unit`", r2p(start, start, start + 2), "Unit", msg, expected = Some(("()", unit)))) + EmptyTree } else { - val ts = functionTypes() - accept(RPAREN) - if (in.token == ARROW) - atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) } - else { - ts foreach checkNotByNameOrVarargs - val tuple = atPos(start) { makeSafeTupleType(ts, start) } - val tpt = infixTypeRest( - compoundTypeRest( - annotTypeRest( - simpleTypeRest( - tuple))), - InfixMode.FirstOp - ) - if (currentRun.isScala3) andType(tpt) else tpt - } + ts foreach checkNotByNameOrVarargs + val tuple = atPos(start) { makeSafeTupleType(ts) } + val tpt = infixTypeRest( + compoundTypeRest( + annotTypeRest( + simpleTypeRest( + tuple))), + InfixMode.FirstOp + ) + if (currentRun.isScala3) andType(tpt) else tpt } } private def makeExistentialTypeTree(t: Tree) = { @@ -1012,10 +1121,10 @@ self => } /** {{{ - * Type ::= InfixType `=>' Type - * | `(' [`=>' Type] `)' `=>' Type + * Type ::= InfixType `=>` Type + * | `(` [`=>` Type] `)` `=>` Type * | InfixType [ExistentialClause] - * ExistentialClause ::= forSome `{' ExistentialDcl {semi ExistentialDcl}} `}' + * ExistentialClause ::= forSome `{` ExistentialDcl {semi ExistentialDcl} `}` * ExistentialDcl ::= type TypeDcl | val ValDcl * }}} */ @@ -1033,7 +1142,7 @@ self => } /** {{{ - * TypeArgs ::= `[' ArgType {`,' ArgType} `]' + * TypeArgs ::= `[` ArgType {`,` ArgType} `]` * }}} */ def typeArgs(): List[Tree] = inBrackets(types()) @@ -1046,33 +1155,53 @@ self => /** {{{ * SimpleType ::= SimpleType TypeArgs - * | SimpleType `#' Id + * | SimpleType `#` Id * | StableId - * | Path `.' type - * | `(' Types `)' + * | Path `.` type + * | Literal + * | `(` Types `)` * | WildcardType * }}} */ def simpleType(): Tree = { - val start = in.offset - simpleTypeRest(in.token match { - case LPAREN => atPos(start)(makeSafeTupleType(inParens(types()), start)) - case _ => - if (currentRun.isScala3 && (in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { - val start = in.offset - val identName = in.name.encode.append("_").toTypeName - in.nextToken() - in.nextToken() - atPos(start)(Ident(identName)) - } else if (isWildcardType) { - val scala3Wildcard = isScala3WildcardType - wildcardType(in.skipToken(), scala3Wildcard) - } else - path(thisOK = false, typeOK = true) match { - case r @ SingletonTypeTree(_) => r - case r => convertToTypeId(r) + if (isLiteralToken(in.token) && in.token != NULL) + atPos(in.offset)(SingletonTypeTree(literal())) + else if (in.name == raw.MINUS && lookingAhead(isNumericLit)) { + val start = in.offset + in.nextToken() + atPos(start)(SingletonTypeTree(literal(isNegated = true, start = start))) + } else { + val start = in.offset + simpleTypeRest(in.token match { + case LPAREN => + if (lookingAhead(in.token == RPAREN)) { + in.nextToken() + in.nextToken() + val msg = "Illegal literal type (), use Unit instead" + syntaxError(start, msg, + runReporting.codeAction("use `Unit`", r2p(start, start, start + 2), "Unit", msg, expected = Some(("()", unit)))) + EmptyTree } - }) + else + atPos(start)(makeSafeTupleType(inParens(types()))) + case _ => + if (currentRun.isScala3 && (in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + val start = in.offset + val identName = in.name.encode.append("_").toTypeName + in.nextToken() + in.nextToken() + atPos(start)(Ident(identName)) + } else if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType + wildcardType(in.skipToken(), scala3Wildcard) + } else { + path(thisOK = false, typeOK = true) match { + case r @ SingletonTypeTree(_) => r + case r => convertToTypeId(r) + } + } + }) + } } private def typeProjection(t: Tree): Tree = { @@ -1082,7 +1211,8 @@ self => val point = if (name == tpnme.ERROR) hashOffset else nameOffset atPos(t.pos.start, point)(SelectFromTypeTree(t, name)) } - def simpleTypeRest(t: Tree): Tree = in.token match { + @tailrec + final def simpleTypeRest(t: Tree): Tree = in.token match { case HASH => simpleTypeRest(typeProjection(t)) case LBRACKET => simpleTypeRest(atPos(t.pos.start, t.pos.point)(AppliedTypeTree(t, typeArgs()))) case _ => t @@ -1137,7 +1267,7 @@ self => } else EmptyTree def asInfix = { val opOffset = in.offset - val leftAssoc = treeInfo.isLeftAssoc(in.name) + val leftAssoc = nme.isLeftAssoc(in.name) if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp) val tycon = atPos(opOffset) { Ident(identForType()) } @@ -1156,7 +1286,7 @@ self => val parents = ListBuffer.empty[Tree] var otherInfixOp: Tree = EmptyTree def collect(tpt: Tree): Unit = tpt match { - case AppliedTypeTree(op @ Ident(tpnme.AND), List(left, right)) => + case AppliedTypeTree(Ident(tpnme.AND), List(left, right)) => collect(left) collect(right) case AppliedTypeTree(op, args) if args.exists(arg => arg.pos.start < op.pos.point) => @@ -1192,7 +1322,7 @@ self => } /** {{{ - * Types ::= Type {`,' Type} + * Types ::= Type {`,` Type} * }}} */ def types(): List[Tree] = commaSeparated(argType()) @@ -1213,15 +1343,17 @@ self => def rawIdent(): Name = try in.name finally in.nextToken() /** For when it's known already to be a type name. */ - def identForType(): TypeName = ident().toTypeName - def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName + def identForType(): TypeName = identForType(skipIt = true) + def identForType(skipIt: Boolean): TypeName = { + checkQMarkDefinition() + ident(skipIt).toTypeName + } def identOrMacro(): Name = if (isMacro) rawIdent() else ident() def selector(start: Offset, t0: Tree): Tree = { val t = stripParens(t0) val point = if (isIdent) in.offset else in.lastOffset //scala/bug#8459 - //assert(t.pos.isDefined, t) if (t != EmptyTree) Select(t, ident(skipIt = false)) setPos r2p(start, point, in.lastOffset) else @@ -1230,8 +1362,8 @@ self => /** {{{ * Path ::= StableId - * | [Ident `.'] this - * AnnotType ::= Path [`.' type] + * | [Ident `.`] this + * AnnotType ::= Path [`.` type] * }}} */ def path(thisOK: Boolean, typeOK: Boolean): Tree = { @@ -1270,6 +1402,7 @@ self => t = selector(start, t) if (in.token == DOT) t = selectors(start, t, typeOK, in.skipToken()) } else { + if (name == nme.ROOTPKG) t.updateAttachment(RootSelection) t = selectors(start, t, typeOK, dotOffset) } } @@ -1277,7 +1410,8 @@ self => t } - def selectors(start: Offset, t: Tree, typeOK: Boolean, dotOffset: Offset): Tree = + @tailrec + final def selectors(start: Offset, t: Tree, typeOK: Boolean, dotOffset: Offset): Tree = if (typeOK && in.token == TYPE) { in.nextToken() atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) } @@ -1289,7 +1423,7 @@ self => } /** {{{ - * MixinQualifier ::= `[' Id `]' + * MixinQualifier ::= `[` Id `]` * }}} */ def mixinQualifierOpt(): TypeName = @@ -1298,21 +1432,24 @@ self => /** {{{ * StableId ::= Id - * | Path `.' Id - * | [id `.'] super [`[' id `]']`.' id + * | Path `.` Id + * | [id `.`] super [`[` id `]`]`.` id * }}} */ def stableId(): Tree = - path(thisOK = false, typeOK = false) + path(thisOK = true, typeOK = false) /** {{{ - * QualId ::= Id {`.' Id} + * QualId ::= Id {`.` Id} * }}} */ def qualId(): Tree = { val start = in.offset val id = atPos(start) { Ident(ident()) } - if (in.token == DOT) { selectors(start, id, typeOK = false, in.skipToken()) } + if (in.token == DOT) { + if (id.name == nme.ROOTPKG) id.updateAttachment(RootSelection) + selectors(start, id, typeOK = false, in.skipToken()) + } else id } /** Calls `qualId()` and manages some package state. */ @@ -1337,10 +1474,16 @@ self => */ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) { def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken() - if (in.token == SYMBOLLIT) + if (in.token == INTERPOLATIONID) { + if (inPattern) interpolatedString(inPattern) + else withPlaceholders(interpolatedString(inPattern), isAny = true) // interpolator params are Any* by definition + } + else if (in.token == SYMBOLLIT) { + val msg = s"""symbol literal is deprecated; use Symbol("${in.strVal}") instead""" + deprecationWarning(in.offset, msg, "2.13.0", + runReporting.codeAction("replace symbol literal", r2p(in.offset, in.offset, in.offset + 1 + in.strVal.length), s"""Symbol("${in.strVal}")""", msg, expected = Some((s"'${in.strVal}", unit)))) Apply(scalaDot(nme.Symbol), List(finish(in.strVal))) - else if (in.token == INTERPOLATIONID) - interpolatedString(inPattern = inPattern) + } else finish(in.token match { case CHARLIT => in.charVal case INTLIT => in.intVal(isNegated).toInt @@ -1358,7 +1501,7 @@ self => /** Handle placeholder syntax. * If evaluating the tree produces placeholders, then make it a function. */ - private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = { + private def withPlaceholders(tree: => Tree, isAny: Boolean): Tree = { val savedPlaceholderParams = placeholderParams placeholderParams = List() var res = tree @@ -1386,41 +1529,33 @@ self => } private def interpolatedString(inPattern: Boolean): Tree = { - def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", - skipIt = true)(EmptyTree) - // Like Swiss cheese, with holes - def stringCheese: Tree = atPos(in.offset) { - val start = in.offset - val interpolator = in.name.encoded // ident() for INTERPOLATIONID - - val partsBuf = new ListBuffer[Tree] - val exprsBuf = new ListBuffer[Tree] - in.nextToken() - while (in.token == STRINGPART) { - partsBuf += literal() - exprsBuf += ( - if (inPattern) dropAnyBraces(pattern()) - else in.token match { - case IDENTIFIER => atPos(in.offset)(Ident(ident())) - //case USCORE => freshPlaceholder() // ifonly etapolation - case LBRACE => expr() // dropAnyBraces(expr0(Local)) - case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) - case _ => errpolation() - } - ) - } - if (in.token == STRINGLIT) partsBuf += literal() - - // Documenting that it is intentional that the ident is not rooted for purposes of virtualization - //val t1 = atPos(o2p(start)) { Select(Select (Ident(nme.ROOTPKG), nme.scala_), nme.StringContext) } - val t1 = atPos(o2p(start)) { Ident(nme.StringContext) } - val t2 = atPos(start) { Apply(t1, partsBuf.toList) } - t2 setPos t2.pos.makeTransparent - val t3 = Select(t2, interpolator) setPos t2.pos - atPos(start) { Apply(t3, exprsBuf.toList) } + val start = in.offset + val interpolator = in.name.encoded // ident() for INTERPOLATIONID + val partsBuf = ListBuffer.empty[Tree] + val exprsBuf = ListBuffer.empty[Tree] + in.nextToken() + while (in.token == STRINGPART) { + partsBuf += literal() + exprsBuf += ( + if (inPattern) dropAnyBraces(pattern()) + else in.token match { + case IDENTIFIER => atPos(in.offset)(Ident(ident())) + case LBRACE => expr() + case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) + case _ => syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", skipIt = true)(EmptyTree) + } + ) } - if (inPattern) stringCheese - else withPlaceholders(stringCheese, isAny = true) // string interpolator params are Any* by definition + if (in.token == STRINGLIT) partsBuf += literal() + + // Scala 2 allowed uprooted Ident for purposes of virtualization + val t1 = + if (currentRun.sourceFeatures.stringContextScope) atPos(o2p(start)) { Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.StringContextName) } + else atPos(o2p(start)) { Ident(nme.StringContextName).updateAttachment(VirtualStringContext) } + val t2 = atPos(start) { Apply(t1, partsBuf.toList) } updateAttachment InterpolatedString + t2 setPos t2.pos.makeTransparent + val t3 = Select(t2, interpolator) setPos t2.pos + atPos(start) { Apply(t3, exprsBuf.toList) } updateAttachment InterpolatedString } /* ------------- NEW LINES ------------------------------------------------- */ @@ -1447,7 +1582,7 @@ self => /* ------------- TYPES ---------------------------------------------------- */ /** {{{ - * TypedOpt ::= [`:' Type] + * TypedOpt ::= [`:` Type] * }}} */ def typedOpt(): Tree = @@ -1459,10 +1594,10 @@ self => else startInfixType() def annotTypeRest(t: Tree): Tree = - (t /: annotations(skipNewLines = false)) (makeAnnotated) + annotations(skipNewLines = false).foldLeft(t)(makeAnnotated) /** {{{ - * WildcardType ::= `_' TypeBounds + * WildcardType ::= `_` TypeBounds * }}} */ def wildcardType(start: Offset, qmark: Boolean) = { @@ -1481,6 +1616,11 @@ self => in.nextToken() val r = expr() accept(RPAREN) + if (isWildcard(r)) + placeholderParams.head.tpt match { + case TypeTree() => placeholderParams.head.updateAttachment(BooleanParameterType) + case _ => + } r } else { accept(LPAREN) @@ -1494,26 +1634,26 @@ self => def statement(location: Location): Tree = expr(location) // !!! still needed? /** {{{ - * Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr + * Expr ::= (Bindings | [`implicit`] Id | `_`) `=>` Expr * | Expr1 - * ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block + * ResultExpr ::= (Bindings | Id `:` CompoundType) `=>` Block * | Expr1 - * Expr1 ::= if `(' Expr `)' {nl} Expr [[semi] else Expr] - * | try (`{' Block `}' | Expr) [catch `{' CaseClauses `}'] [finally Expr] - * | while `(' Expr `)' {nl} Expr - * | do Expr [semi] while `(' Expr `)' - * | for (`(' Enumerators `)' | `{' Enumerators `}') {nl} [yield] Expr + * Expr1 ::= if `(` Expr `)` {nl} Expr [[semi] else Expr] + * | try (`{` Block `}` | Expr) [catch `{` CaseClauses `}`] [finally Expr] + * | while `(` Expr `)` {nl} Expr + * | do Expr [semi] while `(` Expr `)` + * | for (`(` Enumerators `)` | `{` Enumerators `}`) {nl} [yield] Expr * | throw Expr * | return [Expr] - * | [SimpleExpr `.'] Id `=' Expr - * | SimpleExpr1 ArgumentExprs `=' Expr + * | [SimpleExpr `.`] Id `=` Expr + * | SimpleExpr1 ArgumentExprs `=` Expr * | PostfixExpr Ascription - * | PostfixExpr match `{' CaseClauses `}' - * Bindings ::= `(' [Binding {`,' Binding}] `)' - * Binding ::= (Id | `_') [`:' Type] - * Ascription ::= `:' CompoundType - * | `:' Annotation {Annotation} - * | `:' `_' `*' + * | PostfixExpr match `{` CaseClauses `}` + * Bindings ::= `(` [Binding {`,` Binding}] `)` + * Binding ::= (Id | `_`) [`:` Type] + * Ascription ::= `:` CompoundType + * | `:` Annotation {Annotation} + * | `:` `_` `*` * }}} */ def expr(): Tree = expr(Local) @@ -1526,34 +1666,36 @@ self => val cond = condExpr() newLinesOpt() val thenp = expr() - val elsep = if (in.token == ELSE) { in.nextToken(); expr() } - else literalUnit + val elsep = + if (in.token == ELSE) { + in.nextToken() + expr() + } + else { + // user asked to silence warnings on unibranch if; also suppresses value discard + if (settings.warnNonUnitIf.isSetByUser && !settings.warnNonUnitIf.value) { + thenp match { + case Block(_, res) => res.updateAttachment(TypedExpectingUnitAttachment) + case _ => () + } + thenp.updateAttachment(TypedExpectingUnitAttachment) + } + literalUnit + } If(cond, thenp, elsep) } parseIf case TRY => def parseTry = atPos(in.skipToken()) { - val body = in.token match { - case LBRACE => inBracesOrUnit(block()) - case LPAREN => inParensOrUnit(expr()) - case _ => expr() - } - def catchFromExpr() = List(makeCatchFromExpr(expr())) - val catches: List[CaseDef] = - if (in.token != CATCH) Nil - else { - in.nextToken() - if (in.token != LBRACE) catchFromExpr() - else inBracesOrNil { - if (in.token == CASE) caseClauses() - else catchFromExpr() - } - } + val body = expr() + val handler: List[CaseDef] = + if (in.token == CATCH) { in.nextToken(); makeMatchFromExpr(expr()) } + else Nil val finalizer = in.token match { - case FINALLY => in.nextToken(); expr() - case _ => EmptyTree + case FINALLY => in.nextToken() ; expr() + case _ => EmptyTree } - Try(body, catches, finalizer) + Try(body, handler, finalizer) } parseTry case WHILE => @@ -1586,12 +1728,13 @@ self => if (in.token == LBRACE) inBracesOrNil(enumerators()) else inParensOrNil(enumerators()) newLinesOpt() - if (in.token == YIELD) { - in.nextToken() - gen.mkFor(enums, gen.Yield(expr())) - } else { - gen.mkFor(enums, expr()) - } + val body = + if (in.token == YIELD) { + in.nextToken() + gen.Yield(expr()) + } else + expr() + gen.mkFor(enums, body) } def adjustStart(tree: Tree) = if (tree.pos.isRange && start < tree.pos.start) @@ -1613,55 +1756,57 @@ self => case IMPLICIT => implicitClosure(in.skipToken(), location) case _ => - def parseOther = { + def parseOther: Tree = { var t = postfixExpr() - if (in.token == EQUALS) { - t match { - case Ident(_) | Select(_, _) | Apply(_, _) => - t = atPos(t.pos.start, in.skipToken()) { gen.mkAssign(t, expr()) } - case _ => - } - } else if (in.token == COLON) { - t = stripParens(t) - val colonPos = in.skipToken() - if (in.token == USCORE) { - //todo: need to handle case where USCORE is a wildcard in a type - val uscorePos = in.skipToken() - if (isIdent && in.name == nme.STAR) { - in.nextToken() - t = atPos(t.pos.start, colonPos) { - Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) }) - } - } else { - syntaxErrorOrIncomplete("`*' expected", skipIt = true) + in.token match { + case EQUALS => + t match { + case Ident(_) | Select(_, _) | Apply(_, _) => + t = atPos(t.pos.start, in.skipToken()) { gen.mkAssign(t, expr()) } + case _ => } - } else if (isAnnotation) { - t = (t /: annotations(skipNewLines = false))(makeAnnotated) - } else { - t = atPos(t.pos.start, colonPos) { - val tpt = typeOrInfixType(location) - if (isWildcard(t)) - (placeholderParams: @unchecked) match { - case (vd @ ValDef(mods, name, _, _)) :: rest => - placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest + case COLON => + t = stripParens(t) + val colonPos = in.skipToken() + if (in.token == USCORE) { + //todo: need to handle case where USCORE is a wildcard in a type + val uscorePos = in.skipToken() + if (isIdent && in.name == nme.STAR) { + in.nextToken() + t = atPos(t.pos.start, colonPos) { + Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) }) } - // this does not correspond to syntax, but is necessary to - // accept closures. We might restrict closures to be between {...} only. - Typed(t, tpt) + } + else syntaxErrorOrIncomplete("`*` expected", skipIt = true) } - } - } else if (in.token == MATCH) { - t = atPos(t.pos.start, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses()))) + else if (isAnnotation) + t = annotations(skipNewLines = false).foldLeft(t)(makeAnnotated) + else + t = atPos(t.pos.start, colonPos) { + val tpt = typeOrInfixType(location) + // for placeholder syntax `(_: Int) + 1`; function literal `(_: Int) => 42` uses `t` below + if (isWildcard(t)) + (placeholderParams: @unchecked) match { + case (vd @ ValDef(mods, name, _, _)) :: rest => + placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest + } + // this does not correspond to syntax, but is necessary to accept closures. See below & convertToParam. + Typed(t, tpt) + } + case MATCH => + t = atPos(t.pos.start, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses()))) + case _ => } - // in order to allow anonymous functions as statements (as opposed to expressions) inside - // templates, we have to disambiguate them from self type declarations - bug #1565 - // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which - // may be impossible to distinguish from a self-type and so remains an error. (See #1564) + // disambiguate between self types "x: Int =>" and orphan function literals "(x: Int) => ???" + // "(this: Int) =>" is parsed as an erroneous function literal but emits special guidance on + // what's probably intended. def lhsIsTypedParamList() = t match { - case Parens(xs) if xs.forall(isTypedParam) => true - case _ => false + case Parens(List(Typed(This(_), _))) => reporter.error(t.pos, "self-type annotation may not be in parentheses"); false + case Parens(xs) => xs.forall(isTypedParam) + case _ => false } - if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) { + + if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList())) { t = atPos(t.pos.start, in.skipToken()) { Function(convertToParams(t), if (location != InBlock) expr() else block()) } @@ -1674,18 +1819,18 @@ self => def isTypedParam(t: Tree) = t.isInstanceOf[Typed] /** {{{ - * Expr ::= implicit Id => Expr + * Expr ::= implicit Id `=>` Expr * }}} */ - def implicitClosure(start: Offset, location: Location): Tree = { val param0 = convertToParam { atPos(in.offset) { - Ident(ident()) match { - case expr if in.token == COLON => - in.nextToken() ; Typed(expr, typeOrInfixType(location)) - case expr => expr + val p = stripParens(postfixExpr()) //if (in.token == USCORE) freshPlaceholder() else Ident(ident()) + if (in.token == COLON) { + in.nextToken() + Typed(p, typeOrInfixType(location)) } + else p } } val param = copyValDef(param0)(mods = param0.mods | Flags.IMPLICIT) @@ -1705,6 +1850,7 @@ self => val start = in.offset val base = opstack + @tailrec def loop(top: Tree): Tree = if (!isIdent || followingIsScala3Vararg()) top else { pushOpInfo(reduceExprStack(base, top)) newLineOptWhenFollowing(isExprIntroToken) @@ -1725,38 +1871,37 @@ self => } /** {{{ - * PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr + * PrefixExpr ::= [`-` | `+` | `~` | `!`] SimpleExpr * }}} */ - def prefixExpr(): Tree = { + def prefixExpr(): Tree = if (isUnaryOp) { val start = in.offset atPos(start) { - if (lookingAhead(isSimpleExprIntro)) { + if (lookingAhead(isExprIntro)) { val namePos = in.offset val uname = nme.toUnaryName(rawIdent().toTermName) if (uname == nme.UNARY_- && isNumericLit) - /* start at the -, not the number */ + // start at the -, not the number simpleExprRest(start, literal(isNegated = true, start = namePos), canApply = true) else Select(stripParens(simpleExpr()), uname) } else simpleExpr() } - } - else simpleExpr() - } + } else simpleExpr() + def xmlLiteral(): Tree /** {{{ * SimpleExpr ::= new (ClassTemplate | TemplateBody) * | BlockExpr - * | SimpleExpr1 [`_'] + * | SimpleExpr1 [`_`] * SimpleExpr1 ::= literal * | xLiteral * | Path - * | `(' [Exprs] `)' - * | SimpleExpr `.' Id + * | `(` [Exprs] `)` + * | SimpleExpr `.` Id * | SimpleExpr TypeArgs * | SimpleExpr1 ArgumentExprs * }}} @@ -1792,7 +1937,8 @@ self => simpleExprRest(start, t, canApply = canApply) } - def simpleExprRest(start: Offset, t: Tree, canApply: Boolean): Tree = { + @tailrec + final def simpleExprRest(start: Offset, t: Tree, canApply: Boolean): Tree = { if (canApply) newLineOptWhenFollowedBy(LBRACE) in.token match { case DOT => @@ -1801,7 +1947,7 @@ self => case LBRACKET => val t1 = stripParens(t) t1 match { - case Ident(_) | Select(_, _) | Apply(_, _) => + case Ident(_) | Select(_, _) | Apply(_, _) | Literal(_) => var app: Tree = t1 while (in.token == LBRACKET) app = atPos(t.pos.start, in.offset)(TypeApply(app, exprTypeArgs())) @@ -1837,9 +1983,14 @@ self => * }}} */ def argumentExprs(): List[Tree] = { - def args(): List[Tree] = commaSeparated( - if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr() - ) + def args(): List[Tree] = commaSeparated { + val checkNamedArg = isIdent + expr() match { + case t @ Assign(id: Ident, rhs) if checkNamedArg => atPos(t.pos)(NamedArg(id, rhs)) + case t @ Literal(Constant(_: Boolean)) => t.updateAttachment(UnnamedArg) + case t => t + } + } in.token match { case LBRACE => List(blockExpr()) case LPAREN => inParens { @@ -1859,7 +2010,7 @@ self => } /** {{{ - * BlockExpr ::= `{' (CaseClauses | Block) `}' + * BlockExpr ::= `{` (CaseClauses | Block) `}` * }}} */ def blockExpr(): Tree = atPos(in.offset) { @@ -1886,7 +2037,7 @@ self => /** {{{ * CaseClauses ::= CaseClause {CaseClause} - * CaseClause ::= case Pattern [Guard] `=>' Block + * CaseClause ::= case Pattern [Guard] `=>` Block * }}} */ def caseClauses(): List[CaseDef] = { @@ -1913,7 +2064,7 @@ self => * Enumerators ::= Generator {semi Enumerator} * Enumerator ::= Generator * | Guard - * | val Pattern1 `=' Expr + * | Pattern1 `=` Expr * }}} */ def enumerators(): List[Tree] = { @@ -1926,12 +2077,16 @@ self => enums.toList } - def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true): List[Tree] = - if (in.token == IF && !isFirst) makeFilter(in.offset, guard()) :: Nil + def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true): List[Tree] = { + def loop(): List[Tree] = + if (in.token != IF) Nil + else makeFilter(in.offset, guard()) :: loop() + if (in.token == IF && !isFirst) loop() else generator(!isFirst, allowNestedIf) + } /** {{{ - * Generator ::= [`case'] Pattern1 (`<-' | `=') Expr [Guard] + * Generator ::= [`case`] Pattern1 (`<-` | `=`) Expr [Guard] * }}} */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { @@ -1941,6 +2096,7 @@ self => in.skipCASE() val hasVal = in.token == VAL + val valOffset = in.offset if (hasVal) in.nextToken() @@ -1949,8 +2105,15 @@ self => val hasEq = in.token == EQUALS if (hasVal) { - if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated", "2.10.0") - else syntaxError(in.offset, "val in for comprehension must be followed by assignment") + def actions(msg: String) = runReporting.codeAction("remove `val` keyword", r2p(valOffset, valOffset, valOffset + 4), "", msg, expected = Some(("val ", unit))) + def msg(what: String, instead: String): String = s"`val` keyword in for comprehension is $what: $instead" + if (hasEq) { + val without = "instead, bind the value without `val`" + migrationWarning(in.offset, msg("deprecated", without), msg("unsupported", without), since="2.10.0", actions=actions(_)) + } else { + val m = msg("unsupported", "just remove `val`") + syntaxError(in.offset, m, actions(m)) + } } if (hasEq && eqOK && !hasCase) in.nextToken() @@ -1978,40 +2141,37 @@ self => /** Methods which implicitly propagate whether the initial call took * place in a context where sequences are allowed. Formerly, this * was threaded through methods as boolean seqOK. + * @param isSequenceOK is a sequence pattern _* allowed? + * @param isXML are we in an XML pattern? */ - trait SeqContextSensitive extends PatternContextSensitive { - // is a sequence pattern _* allowed? - def isSequenceOK: Boolean - - // are we in an XML pattern? - def isXML: Boolean = false - - def functionArgType(): Tree = argType() - def argType(): Tree = { + final class SeqContextSensitive(val isSequenceOK: Boolean, val isXML: Boolean) extends PatternContextSensitive { + final def functionArgType(): Tree = argType() + final def argType(): Tree = { val start = in.offset if (isWildcardType) { val scala3Wildcard = isScala3WildcardType in.nextToken() if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start, scala3Wildcard) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } - } else - typ() match { - case Ident(name: TypeName) if nme.isVariableName(name) => + } else { + this.typ() match { + case t @ Ident(name: TypeName) if nme.isVariableName(name) && !t.hasAttachment[BackquotedIdentifierAttachment.type] => atPos(start) { Bind(name, EmptyTree) } case t => t } + } } /** {{{ - * Patterns ::= Pattern { `,' Pattern } - * SeqPatterns ::= SeqPattern { `,' SeqPattern } + * Patterns ::= Pattern { `,` Pattern } + * SeqPatterns ::= SeqPattern { `,` SeqPattern } * }}} */ def patterns(): List[Tree] = commaSeparated(pattern()) /** {{{ - * Pattern ::= Pattern1 { `|' Pattern1 } - * SeqPattern ::= SeqPattern1 { `|' SeqPattern1 } + * Pattern ::= Pattern1 { `|` Pattern1 } + * SeqPattern ::= SeqPattern1 { `|` SeqPattern1 } * }}} */ def pattern(): Tree = { @@ -2027,47 +2187,42 @@ self => } /** {{{ - * Pattern1 ::= boundvarid `:' TypePat - * | `_' `:' TypePat + * Pattern1 ::= boundvarid `:` TypePat + * | `_` `:` TypePat * | Pattern2 - * SeqPattern1 ::= boundvarid `:' TypePat - * | `_' `:' TypePat + * SeqPattern1 ::= boundvarid `:` TypePat + * | `_` `:` TypePat * | [SeqPattern2] * }}} */ def pattern1(): Tree = pattern2() match { case p @ Ident(name) if in.token == COLON => - if (nme.isVariableName(name)) { - p.removeAttachment[BackquotedIdentifierAttachment.type] - atPos(p.pos.start, in.skipToken())(Typed(p, compoundType())) - } else { - syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)") - p - } + if (!nme.isVariableName(name)) + syntaxError(p.pos.point, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)") + p.removeAttachment[BackquotedIdentifierAttachment.type] + atPos(p.pos.start, in.skipToken())(Typed(p, compoundType())) case p => p } /** {{{ - * Pattern2 ::= id @ Pattern3 - * | `_' @ Pattern3 + * Pattern2 ::= id `@` Pattern3 + * | `_` `@` Pattern3 * | Pattern3 * }}} */ - def pattern2(): Tree = (pattern3(), in.token) match { - case (Ident(nme.WILDCARD), AT) => - in.nextToken() - pattern3() - case (p @ Ident(name), AT) => + def pattern2(): Tree = pattern3() match { + case p @ Ident(name) if in.token == AT => in.nextToken() val body = pattern3() - atPos(p.pos.start, p.pos.start, body.pos.end) { + if (name == nme.WILDCARD) body + else atPos(p.pos.start, p.pos.start, body.pos.end) { val t = Bind(name, body) body match { - case Ident(nme.WILDCARD) if settings.warnUnusedPatVars => t updateAttachment NoWarnAttachment + case Ident(nme.WILDCARD) if settings.warnUnusedPatVars || settings.warnPatternShadow => t.updateAttachment(NoWarnAttachment) case _ => t } } - case (p, _) => p + case p => p } /** {{{ @@ -2076,26 +2231,31 @@ self => * }}} */ def pattern3(): Tree = { - val top = simplePattern(badPattern3) + val top = simplePattern(() => badPattern3()) val base = opstack // See scala/bug#3189, scala/bug#4832 for motivation. Cf scala/bug#3480 for counter-motivation. def isCloseDelim = in.token match { case RBRACE => isXML case RPAREN => !isXML + case COMMA => !isXML && in.isTrailingComma(RPAREN) case _ => false } - def checkWildStar: Tree = top match { - case Ident(nme.WILDCARD) if isSequenceOK && isRawStar && lookingAhead(isCloseDelim) => - atPos(top.pos.start, in.skipToken()) { Star(top) } - case Ident(name) if isSequenceOK && followingIsScala3Vararg() => - atPos(top.pos.start) { - Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + def checkWildStar: Tree = + if (isSequenceOK) { + top match { + case Ident(nme.WILDCARD) if isRawStar && lookingAhead(isCloseDelim) => + atPos(top.pos.start, in.skipToken()) { Star(top) } + case Ident(name) if followingIsScala3Vararg() => + atPos(top.pos.start) { + Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + } + case _ => EmptyTree } - case _ => - EmptyTree - } - def loop(top: Tree): Tree = reducePatternStack(base, top) match { - case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(badPattern3)) + } + else EmptyTree + @tailrec + def loop(top: Tree): Tree = reduceStack(isExpr = false, base, top) match { + case next if isIdent && !isRawBar => pushOpInfo(next); loop(simplePattern(() => badPattern3())) case next => next } checkWildStar orElse stripParens(loop(top)) @@ -2106,9 +2266,9 @@ self => def isDelimiter = in.token == RPAREN || in.token == RBRACE def isCommaOrDelimiter = isComma || isDelimiter val (isUnderscore, isStar) = opstack match { - case OpInfo(Ident(nme.WILDCARD), nme.STAR, _, _) :: _ => (true, true) - case OpInfo(_, nme.STAR, _, _) :: _ => (false, true) - case _ => (false, false) + case OpInfo(Ident(nme.WILDCARD), nme.STAR, _, _, _) :: _ => (true, true) + case OpInfo(_, nme.STAR, _, _, _) :: _ => (false, true) + case _ => (false, false) } def isSeqPatternClose = isUnderscore && isStar && isSequenceOK && isDelimiter val preamble = "bad simple pattern:" @@ -2128,21 +2288,19 @@ self => /** {{{ * SimplePattern ::= varid - * | `_' + * | `_` * | literal * | XmlPattern - * | StableId /[TypeArgs]/ [`(' [Patterns] `)'] - * | StableId [`(' [Patterns] `)'] - * | StableId [`(' [Patterns] `,' [varid `@'] `_' `*' `)'] - * | `(' [Patterns] `)' + * | StableId /[TypeArgs]/ [`(` [Patterns] `)`] + * | StableId [`(` [Patterns] `)`] + * | StableId [`(` [Patterns] `,` [varid `@`] `_` `*` `)`] + * | `(` [Patterns] `)` * }}} * * XXX: Hook for IDE */ - def simplePattern(): Tree = ( - // simple diagnostics for this entry point - simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree)) - ) + def simplePattern(): Tree = + simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree)) // simple diagnostics for this entry point def simplePattern(onError: () => Tree): Tree = { val start = in.offset in.token match { @@ -2181,23 +2339,19 @@ self => } } /** The implementation of the context sensitive methods for parsing outside of patterns. */ - object outPattern extends PatternContextSensitive { - def argType(): Tree = typ() - def functionArgType(): Tree = paramType(useStartAsPosition = true) + final val outPattern = new PatternContextSensitive { + def argType(): Tree = this.typ() + def functionArgType(): Tree = paramType(repeatedParameterOK = false, useStartAsPosition = true) } /** The implementation for parsing inside of patterns at points where sequences are allowed. */ - object seqOK extends SeqContextSensitive { - val isSequenceOK = true - } + final val seqOK = new SeqContextSensitive(isSequenceOK = true, isXML = false) + /** The implementation for parsing inside of patterns at points where sequences are disallowed. */ - object noSeq extends SeqContextSensitive { - val isSequenceOK = false - } + final val noSeq = new SeqContextSensitive(isSequenceOK = false, isXML = false) + /** For use from xml pattern, where sequence is allowed and encouraged. */ - object xmlSeqOK extends SeqContextSensitive { - val isSequenceOK = true - override val isXML = true - } + final val xmlSeqOK = new SeqContextSensitive(isSequenceOK = true, isXML = true) + /** These are default entry points into the pattern context sensitive methods: * they are all initiated from non-pattern context. */ @@ -2222,6 +2376,7 @@ self => /** Drop `private` modifier when followed by a qualifier. * Contract `abstract` and `override` to ABSOVERRIDE */ + @tailrec private def normalizeModifiers(mods: Modifiers): Modifiers = if (mods.isPrivate && mods.hasAccessBoundary) normalizeModifiers(mods &~ Flags.PRIVATE) @@ -2233,14 +2388,14 @@ self => private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = { if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false) in.nextToken() - (mods | mod) withPosition (mod, pos) + (mods | mod).withPosition(mod, pos) } private def tokenRange(token: TokenData) = r2p(token.offset, token.offset, token.offset + token.name.length - 1) /** {{{ - * AccessQualifier ::= `[' (Id | this) `]' + * AccessQualifier ::= `[` (Id | this) `]` * }}} */ def accessQualifierOpt(mods: Modifiers): Modifiers = { @@ -2273,8 +2428,11 @@ self => */ def accessModifierOpt(): Modifiers = normalizeModifiers { in.token match { - case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m))) - case _ => NoMods + case m @ (PRIVATE | PROTECTED) => + in.nextToken() + accessQualifierOpt(Modifiers(flagTokens(m))) + case _ => + NoMods } } @@ -2286,6 +2444,7 @@ self => * }}} */ def modifiers(): Modifiers = normalizeModifiers { + @tailrec def loop(mods: Modifiers): Modifiers = in.token match { case PRIVATE | PROTECTED => loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in)))) @@ -2309,6 +2468,7 @@ self => * }}} */ def localModifiers(): Modifiers = { + @tailrec def loop(mods: Modifiers): Modifiers = if (isLocalModifier) loop(addMod(mods, flagTokens(in.token), tokenRange(in))) else mods @@ -2317,8 +2477,8 @@ self => } /** {{{ - * Annotations ::= {`@' SimpleType {ArgumentExprs}} - * ConsrAnnotations ::= {`@' SimpleType ArgumentExprs} + * Annotations ::= {`@` SimpleType {ArgumentExprs}} + * ConstrAnnotations ::= {`@` SimpleType ArgumentExprs} * }}} */ def annotations(skipNewLines: Boolean): List[Tree] = readAnnots { @@ -2339,14 +2499,14 @@ self => /* -------- PARAMETERS ------------------------------------------- */ /** {{{ - * ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)'] - * ParamClause ::= [nl] `(' [Params] `)' - * Params ::= Param {`,' Param} - * Param ::= {Annotation} Id [`:' ParamType] [`=' Expr] - * ClassParamClauses ::= {ClassParamClause} [[nl] `(' implicit ClassParams `)'] - * ClassParamClause ::= [nl] `(' [ClassParams] `)' - * ClassParams ::= ClassParam {`,' ClassParam} - * ClassParam ::= {Annotation} [{Modifier} (`val' | `var')] Id [`:' ParamType] [`=' Expr] + * ParamClauses ::= {ParamClause} [[nl] `(` implicit Params `)`] + * ParamClause ::= [nl] `(` [Params] `)` + * Params ::= Param {`,` Param} + * Param ::= {Annotation} Id [`:` ParamType] [`=` Expr] + * ClassParamClauses ::= {ClassParamClause} [[nl] `(` implicit ClassParams `)`] + * ClassParamClause ::= [nl] `(` [ClassParams] `)` + * ClassParams ::= ClassParam {`,` ClassParam} + * ClassParam ::= {Annotation} [{Modifier} (`val` | `var`)] Id [`:` ParamType] [`=` Expr] * }}} */ def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = { @@ -2354,7 +2514,7 @@ self => var implicitOffset = -1 var warnAt = -1 var caseParam = ofCaseClass - val vds = new ListBuffer[List[ValDef]] + val vds = ListBuffer.empty[List[ValDef]] val start = in.offset def paramClause(): List[ValDef] = if (in.token == RPAREN) Nil else { val implicitmod = @@ -2368,9 +2528,7 @@ self => } newLineOptWhenFollowedBy(LPAREN) while (in.token == LPAREN) { - in.nextToken() - vds += paramClause() - accept(RPAREN) + vds += inParens(paramClause()) caseParam = false newLineOptWhenFollowedBy(LPAREN) } @@ -2383,43 +2541,35 @@ self => if (vds.isEmpty) syntaxError(start, s"case classes must have a parameter list; try 'case class $name()' or 'case object $name'") else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) { - if (currentRun.isScala213) - syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") - else { - deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class $name()$elliptical'", "2.12.2") - vds.insert(0, List.empty[ValDef]) - vds(1) = vds(1).map(vd => copyValDef(vd)(mods = vd.mods & ~Flags.CASEACCESSOR)) - if (implicitSection != -1) implicitSection += 1 - } + syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") } } if (implicitSection != -1 && implicitSection != vds.length - 1) syntaxError(implicitOffset, "an implicit parameter section must be last") if (warnAt != -1) syntaxError(warnAt, "multiple implicit parameter sections are not allowed") - else if (settings.warnExtraImplicit) { + else if (settings.warnExtraImplicit.value) { // guard against anomalous class C(private implicit val x: Int)(implicit s: String) val ttl = vds.count { case ValDef(mods, _, _, _) :: _ => mods.isImplicit ; case _ => false } if (ttl > 1) warning(in.offset, s"$ttl parameter sections are effectively implicit", WarningCategory.WFlagExtraImplicit) } val result = vds.toList - if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) { + if (owner == nme.CONSTRUCTOR && (result.isEmpty || result.head.take(1).exists(_.mods.isImplicit))) in.token match { case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false) case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list") case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false) } - } addEvidenceParams(owner, result, contextBounds) } /** {{{ - * ParamType ::= Type | `=>' Type | Type `*' + * ParamType ::= Type | `=>` Type | Type `*` * }}} */ - def paramType(): Tree = paramType(useStartAsPosition = false) - def paramType(useStartAsPosition: Boolean): Tree = { + def paramType(): Tree = paramType(repeatedParameterOK = true, useStartAsPosition = false) + def paramType(repeatedParameterOK: Boolean, useStartAsPosition: Boolean): Tree = { val start = in.offset in.token match { case ARROW => @@ -2429,14 +2579,15 @@ self => val t = typ() if (isRawStar) { in.nextToken() - if (useStartAsPosition) atPos(start)(repeatedApplication(t)) + if (!repeatedParameterOK) { syntaxError("repeated parameters are only allowed in method signatures; use Seq instead", skipIt = false) ; t } + else if (useStartAsPosition) atPos(start)(repeatedApplication(t)) else atPos(t.pos.start, t.pos.point)(repeatedApplication(t)) } else t } } - def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = { + def param(owner: Name, implicitmod: Long, caseParam: Boolean): ValDef = { val start = in.offset val annots = annotations(skipNewLines = false) var mods = Modifiers(Flags.PARAM) @@ -2445,7 +2596,7 @@ self => if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false) in.token match { case v @ (VAL | VAR) => - mods = mods withPosition (in.token.toLong, tokenRange(in)) + mods = mods.withPosition(in.token.toLong, tokenRange(in)) if (v == VAR) mods |= Flags.MUTABLE in.nextToken() case _ => @@ -2455,18 +2606,16 @@ self => if (caseParam) mods |= Flags.CASEACCESSOR } val nameOffset = in.offset + checkKeywordDefinition() val name = ident() - var bynamemod = 0 - val tpt = - if ((settings.YmethodInfer && !owner.isTypeName) && in.token != COLON) { - TypeTree() - } else { // XX-METHOD-INFER + var bynamemod = 0L + val tpt = { accept(COLON) if (in.token == ARROW) { if (owner.isTypeName && !mods.isLocalToThis) syntaxError( in.offset, - (if (mods.isMutable) "`var'" else "`val'") + + (if (mods.isMutable) "`var`" else "`val`") + " parameters may not be call-by-name", skipIt = false) else bynamemod = Flags.BYNAMEPARAM } @@ -2479,21 +2628,22 @@ self => expr() } else EmptyTree atPos(start, if (name == nme.ERROR) start else nameOffset) { - ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default) + ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name.toTermName, tpt, default) } } /** {{{ * TypeParamClauseOpt ::= [TypeParamClause] - * TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]'] - * VariantTypeParam ::= {Annotation} [`+' | `-'] TypeParam + * TypeParamClause ::= `[` VariantTypeParam {`,` VariantTypeParam} `]`] + * VariantTypeParam ::= {Annotation} [`+` | `-`] TypeParam * FunTypeParamClauseOpt ::= [FunTypeParamClause] - * FunTypeParamClause ::= `[' TypeParam {`,' TypeParam} `]'] - * TypeParam ::= Id TypeParamClauseOpt TypeBounds {<% Type} {":" Type} + * FunTypeParamClause ::= `[` TypeParam {`,` TypeParam} `]`] + * TypeParam ::= Id TypeParamClauseOpt TypeBounds {`<%` Type} {`:` Type} * }}} */ - def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree]): List[TypeDef] = { + def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree], ownerKind: ParamOwner): List[TypeDef] = { def typeParam(ms: Modifiers): TypeDef = { + val isAbstractOwner = ownerKind == ParamOwner.Type //|| ownerKind == ParamOwner.TypeParam var mods = ms | Flags.PARAM val start = in.offset if (owner.isTypeName && isIdent) { @@ -2506,40 +2656,51 @@ self => } } val nameOffset = in.offset - // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite - val pname: TypeName = wildcardOrIdent().toTypeName + checkQMarkDefinition() + checkKeywordDefinition() + val pname: TypeName = + if (in.token == USCORE) { + if (!isAbstractOwner) + migrationWarning(in.offset, "Top-level wildcard is not allowed", since = "2.13.7") + in.nextToken() + freshTypeName("_$$") + } + else ident(skipIt = false).toTypeName val param = atPos(start, nameOffset) { - val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now + val tparams = typeParamClauseOpt(pname, null, ParamOwner.Type) // @M TODO null --> no higher-order context bounds for now TypeDef(mods, pname, tparams, typeBounds()) } if (contextBoundBuf ne null) { + def msg(what: String) = s"""view bounds are $what; use an implicit parameter instead. + | example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`""".stripMargin while (in.token == VIEWBOUND) { - val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`." - if (settings.future) - deprecationWarning(in.offset, s"View bounds are deprecated. $msg", "2.12.0") + migrationWarning(in.offset, msg("deprecated"), msg("unsupported"), since = "2.12.0") contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ())) } while (in.token == COLON) { - contextBoundBuf += atPos(in.skipToken()) { - AppliedTypeTree(typ(), List(Ident(pname))) + in.nextToken() + val colonBound = typ() + contextBoundBuf += atPos(colonBound.pos) { + AppliedTypeTree(colonBound, List(Ident(pname))) } } } param } - newLineOptWhenFollowedBy(LBRACKET) if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(skipNewLines = true)))) else Nil } /** {{{ - * TypeBounds ::= [`>:' Type] [`<:' Type] + * TypeBounds ::= [`>:` Type] [`<:` Type] * }}} */ def typeBounds(): TypeBoundsTree = { - val lo = bound(SUPERTYPE) - val hi = bound(SUBTYPE) - val t = TypeBoundsTree(lo, hi) + val t = checkNoEscapingPlaceholders { + val lo = bound(SUPERTYPE) + val hi = bound(SUBTYPE) + TypeBoundsTree(lo, hi) + } val defined = List(t.hi, t.lo) filter (_.pos.isDefined) if (defined.nonEmpty) @@ -2554,7 +2715,7 @@ self => /** {{{ - * Import ::= import ImportExpr {`,' ImportExpr} + * Import ::= import ImportExpr {`,` ImportExpr} * }}} */ def importClause(): List[Tree] = { @@ -2563,13 +2724,13 @@ self => case Nil => Nil case t :: rest => // The first import should start at the position of the keyword. - t.setPos(t.pos.withStart(offset)) + if (t.pos.isRange) t.setPos(t.pos.withStart(offset)) t :: rest } } /** {{{ - * ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors) + * ImportExpr ::= StableId `.` (Id | `_` | ImportSelectors) * }}} */ def importExpr(): Tree = { @@ -2582,20 +2743,21 @@ self => accept(DOT) result } - /* Walks down import `foo.bar.baz.{ ... }` until it ends at a + /* Walks down import `foo.bar.baz.{ ... }` until it ends at * an underscore, a left brace, or an undotted identifier. */ def loop(expr: Tree): Tree = { expr setPos expr.pos.makeTransparent val selectors: List[ImportSelector] = in.token match { case USCORE => - List(importSelector()) // import foo.bar._ - case IDENTIFIER if currentRun.isScala3 && in.name == raw.STAR => - List(importSelector()) // import foo.bar.* - case LBRACE => - importSelectors() // import foo.bar.{ x, y, z } + List(wildImportSelector()) // import foo.bar._ + case IDENTIFIER if currentRun.isScala3 && (in.name == raw.STAR || in.name == nme.`given`) => + if (in.name == raw.STAR) List(wildImportSelector()) // import foo.bar.* + else List(importSelector()) // import foo.bar.given + case LBRACE => + importSelectors() // import foo.bar.{x, y, z, given, *} case _ => - if (settings.isScala3 && lookingAhead { isRawIdent && in.name == nme.as }) + if (currentRun.isScala3 && lookingAhead { isRawIdent && in.name == nme.as }) List(importSelector()) // import foo.bar as baz else { val nameOffset = in.offset @@ -2606,7 +2768,7 @@ self => in.nextToken() return loop(t) } - // import foo.bar.Baz; + // import foo.Bar else List(makeImportSelector(name, nameOffset)) } } @@ -2623,64 +2785,85 @@ self => else syntaxError(in.lastOffset, s". expected", skipIt = false) if (in.token == THIS) thisDotted(id.name.toTypeName) - else id + else { + if (id.name == nme.ROOTPKG) id.updateAttachment(RootSelection) + id + } }) } /** {{{ - * ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}' + * ImportSelectors ::= `{` {ImportSelector `,`} (ImportSelector | `_`) `}` * }}} */ def importSelectors(): List[ImportSelector] = { - val selectors0 = inBracesOrNil(commaSeparated(importSelector())) - - // Treat an import of `*, given` or `given, *` as if it was an import of `*` - // since the former in Scala 3 has the same semantics as the latter in Scala 2. - val selectors = - if (currentRun.isScala3 && selectors0.exists(_.name eq nme.WILDCARD)) - selectors0.filterNot(sel => sel.name == nme.`given` && sel.rename == sel.name) - else - selectors0 - - selectors.init foreach { - case ImportSelector(nme.WILDCARD, pos, _, _) => syntaxError(pos, "Wildcard import must be in last position") - case _ => () + def isWilder(sel: ImportSelector) = sel.isWildcard || sel.isGiven + // error on duplicate target names, import x.{a=>z, b=>z}, and fix import x.{given, *} to x._ + def checkSelectors(xs: List[ImportSelector]): List[ImportSelector] = xs match { + case h :: t => + // wildcards must come last, and for -Xsource:3, accept trailing given and/or *, converting {given, *} to * + if (isWilder(h)) { + val wildcard = + if (t.exists(!isWilder(_))) { + syntaxError(h.namePos, "wildcard import must be in last position") + h + } + else t match { + case Nil => h + case other :: Nil if h.isWildcard != other.isWildcard => + if (h.isWildcard) h else other + case _ => + val (wilds, givens) = xs.partition(_.isWildcard) + val dupes = if (wilds.length > 1) wilds else givens + syntaxError(dupes(1).namePos, "duplicate wildcard selector") + h + } + wildcard :: Nil + } + else { + if (!h.isMask) + t.find(_.rename == h.rename).foreach { duplicate => + val msg = + if (h.isRename || duplicate.isRename) + if (h.name == duplicate.name) s"${h.name} is renamed twice to ${h.rename}" + else s"${h.rename} is an ambiguous name on import" + else s"${h.rename} is imported twice" + syntaxError(duplicate.renamePos, msg) + } + h :: checkSelectors(t) + } + case _ => Nil } - selectors + checkSelectors(inBracesOrNil(commaSeparated(importSelector()))) } - def wildcardOrIdent() = { - if (in.token == USCORE || currentRun.isScala3 && isRawStar) { in.nextToken() ; nme.WILDCARD } + def wildcardOrIdent() = + if (in.token == USCORE || currentRun.isScala3 && isRawStar) { in.nextToken(); nme.WILDCARD } else ident() - } /** {{{ - * ImportSelector ::= Id [`=>' Id | `=>' `_'] + * ImportSelector ::= Id [`=>` Id | `=>` `_`] * }}} */ def importSelector(): ImportSelector = { - val start = in.offset - val name = - if (currentRun.isScala3 && isRawIdent && in.name == raw.STAR) { - in.nextToken() - nme.WILDCARD - } - else wildcardOrIdent() - var renameOffset = -1 - val rename = - if (in.token == ARROW || (currentRun.isScala3 && isRawIdent && in.name == nme.as)) { - in.nextToken() - renameOffset = in.offset - if (name == nme.WILDCARD) syntaxError(renameOffset, "Wildcard import cannot be renamed") - wildcardOrIdent() - } - else if (name == nme.WILDCARD) null - else { - renameOffset = start - name - } + val start = in.offset + val bbq = in.token == BACKQUOTED_IDENT + val name = wildcardOrIdent() + if (in.token == ARROW || (currentRun.isScala3 && isRawIdent && in.name == nme.as)) { + in.nextToken() + if (name == nme.WILDCARD && !bbq) syntaxError(in.offset, "Wildcard import cannot be renamed") + val renamePos = in.offset + ImportSelector(name, start, rename = wildcardOrIdent(), renamePos = renamePos) + } + else if (name == nme.WILDCARD && !bbq) ImportSelector.wildAt(start) + else if (currentRun.isScala3 && name == nme.`given` && !bbq) ImportSelector.givenAt(start) + else makeImportSelector(name, start) + } - ImportSelector(name, start, rename, renameOffset) + def wildImportSelector(): ImportSelector = { + val selector = ImportSelector.wildAt(in.offset) + in.nextToken() + selector } /** {{{ @@ -2700,13 +2883,13 @@ self => syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false) in.token match { case VAL => - patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in))) + patDefOrDcl(pos, mods.withPosition(VAL, tokenRange(in))) case VAR => - patDefOrDcl(pos, (mods | Flags.MUTABLE) withPosition (VAR, tokenRange(in))) + patDefOrDcl(pos, (mods | Flags.MUTABLE).withPosition(VAR, tokenRange(in))) case DEF => - List(funDefOrDcl(pos, mods withPosition(DEF, tokenRange(in)))) + List(funDefOrDcl(pos, mods.withPosition(DEF, tokenRange(in)))) case TYPE => - List(typeDefOrDcl(pos, mods withPosition(TYPE, tokenRange(in)))) + List(typeDefOrDcl(pos, mods.withPosition(TYPE, tokenRange(in)))) case _ => List(tmplDef(pos, mods)) } @@ -2714,64 +2897,100 @@ self => private def caseAwareTokenOffset = if (in.token == CASECLASS || in.token == CASEOBJECT) in.prev.offset else in.offset - def nonLocalDefOrDcl : List[Tree] = { + def nonLocalDefOrDcl: List[Tree] = { val annots = annotations(skipNewLines = true) defOrDcl(caseAwareTokenOffset, modifiers() withAnnotations annots) } /** {{{ - * PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr - * ValDcl ::= Id {`,' Id} `:' Type - * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_' + * PatDef ::= Pattern2 {`,` Pattern2} [`:` Type] `=` Expr + * ValDcl ::= Id {`,` Id} `:` Type + * VarDef ::= PatDef | Id {`,` Id} `:` Type `=` `_` * }}} */ - def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = { - var newmods = mods - in.nextToken() - val lhs = commaSeparated(stripParens(noSeq.pattern2())) - val tp = typedOpt() - - val (rhs, rhsPos) = - if (tp.isEmpty || in.token == EQUALS) { - accept(EQUALS) - if (!tp.isEmpty && newmods.isMutable && - (lhs.toList forall (_.isInstanceOf[Ident])) && in.token == USCORE) { - val start = in.skipToken() - newmods = newmods | Flags.DEFAULTINIT - (EmptyTree, r2p(start, in.offset)) - } else { - val t = expr() - (t, t.pos) - } - } else { - newmods = newmods | Flags.DEFERRED - (EmptyTree, NoPosition) + def patDefOrDcl(start: Int, mods: Modifiers): List[Tree] = { + def mkDefs(mods: Modifiers, pat: Tree, rhs: Tree, rhsPos: Position, defPos: Position, isMulti: Boolean) = { + val trees = makePatDef(mods, pat, rhs, rhsPos) + def fixPoint(d: Tree, transparent: Boolean): Unit = { + val p = defPos.withPoint(d.pos.start) + d.setPos(if (transparent) p.makeTransparent else p) } - def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = { - val trees = { - val pat = if (tp.isEmpty) p else Typed(p, tp) setPos (p.pos union tp.pos) - makePatDef(newmods, pat, rhs, rhsPos) + trees match { + case d :: Nil => fixPoint(d, transparent = isMulti) + case trees => trees.tail.foreach(fixPoint(_, transparent = true)) // skip match expr } - if (newmods.isDeferred) { + if (mods.isDeferred) trees match { - case List(ValDef(_, _, _, EmptyTree)) => - if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false) - case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false) + case ValDef(_, _, _, EmptyTree) :: Nil => + if (mods.isLazy) syntaxError(pat.pos, "lazy values may not be abstract", skipIt = false) + else () + case _ => syntaxError(pat.pos, "pattern definition may not be abstract", skipIt = false) } - } trees } - val trees = (lhs.toList.init flatMap (mkDefs(_, tp.duplicate, rhs.duplicate))) ::: mkDefs(lhs.last, tp, rhs) - val hd = trees.head - hd setPos hd.pos.withStart(pos) - ensureNonOverlapping(hd, trees.tail) - trees + // begin + in.nextToken() + checkKeywordDefinition() + val lhs: List[Tree] = commaSeparated { + val nameStart = in.offset + noSeq.pattern2() match { + case t @ Ident(_) => + val namePos = NamePos(r2p(nameStart, nameStart)) + stripParens(t).updateAttachment(namePos) + case t => stripParens(t) + } + } + val tp = typedOpt() + val (rhs, rhsPos, newmods) = + if (!tp.isEmpty && in.token != EQUALS) + (EmptyTree, NoPosition, mods | Flags.DEFERRED) + else { + accept(EQUALS) + expr() match { + case x if !tp.isEmpty && mods.isMutable && lhs.forall(_.isInstanceOf[Ident]) && isWildcard(x) => + tp match { + case SingletonTypeTree(Literal(Constant(_))) => + syntaxError(tp.pos, "default initialization prohibited for literal-typed vars", skipIt = false) + case _ => + } + placeholderParams = placeholderParams.tail + (EmptyTree, x.pos, mods | Flags.DEFAULTINIT) + case x => (x, x.pos, mods) + } + } + // each valdef gets transparent defPos with point at name and NamePos + val lhsPos = wrappingPos(lhs) + val defPos = + if (lhsPos.isRange) lhsPos.copyRange(start = start, end = in.lastOffset) + else o2p(start) + def typedPat(pat: Tree, tp: Tree, isLast: Boolean): Tree = + if (tp.isEmpty) pat + else Typed(pat, tp) + .setPos { + if (isLast) pat.pos | tp.pos + else ((pat.pos | tp.pos).makeTransparent) // pos may extend over other patterns + } + def expandPatDefs(lhs: List[Tree], expansion: List[Tree], isMulti: Boolean): List[Tree] = + lhs match { + case pat :: Nil => + // reuse tree on last (or only) expansion + expansion ::: mkDefs(newmods, typedPat(pat, tp, isLast = true), rhs, rhsPos, defPos, isMulti) + case pat :: lhs => + val ts = mkDefs(newmods, typedPat(pat, tp.duplicate, isLast = false), rhs.duplicate, rhsPos, defPos, isMulti) + expandPatDefs(lhs, expansion = expansion ::: ts, isMulti) + case x => throw new MatchError(x) // lhs must not be empty + } + expandPatDefs(lhs, expansion = Nil, lhs.lengthCompare(1) != 0) + .tap(trees => + if (trees.lengthCompare(1) > 0) + trees.foreach(_.updateAttachment[MultiDefAttachment.type](MultiDefAttachment)) + ) } /** {{{ * VarDef ::= PatDef - * | Id {`,' Id} `:' Type `=' `_' - * VarDcl ::= Id {`,' Id} `:' Type + * | Id {`,` Id} `:` Type `=` `_` + * VarDcl ::= Id {`,` Id} `:` Type * }}} def varDefOrDcl(mods: Modifiers): List[Tree] = { var newmods = mods | Flags.MUTABLE @@ -2797,57 +3016,69 @@ self => */ /** {{{ - * FunDef ::= FunSig [`:' Type] `=' [`macro'] Expr - * | FunSig [nl] `{' Block `}' - * | `this' ParamClause ParamClauses - * (`=' ConstrExpr | [nl] ConstrBlock) - * FunDcl ::= FunSig [`:' Type] + * FunDef ::= FunSig [`:` Type] `=` [`macro`] Expr + * | FunSig [nl] `{` Block `}` + * | `this` ParamClause ParamClauses + * (`=` ConstrExpr | [nl] ConstrBlock) + * FunDcl ::= FunSig [`:` Type] * FunSig ::= id [FunTypeParamClause] ParamClauses * }}} */ - def funDefOrDcl(start : Int, mods: Modifiers): Tree = { + def funDefOrDcl(start: Int, mods: Modifiers): Tree = { in.nextToken() if (in.token == THIS) { + def missingEquals() = { + val msg = "procedure syntax is deprecated for constructors: add `=`, as in method definition" + migrationWarning(in.lastOffset, msg, since = "2.13.2", actions = runReporting.codeAction("replace procedure syntax", o2p(in.lastOffset), " =", msg)) + } atPos(start, in.skipToken()) { - val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false) + val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds.map(_.duplicate), ofCaseClass = false) newLineOptWhenFollowedBy(LBRACE) - val rhs = in.token match { - case LBRACE => atPos(in.offset) { constrBlock(vparamss) } - case _ => accept(EQUALS) ; atPos(in.offset) { constrExpr(vparamss) } - } + val rhs = + if (in.token == LBRACE) { + missingEquals(); atPos(in.offset) { constrBlock() } + } + else { + accept(EQUALS) ; atPos(in.offset) { constrExpr() } + } DefDef(mods, nme.CONSTRUCTOR, List(), vparamss, TypeTree(), rhs) } } else { val nameOffset = in.offset + checkKeywordDefinition() val name = identOrMacro() funDefRest(start, nameOffset, mods, name) } } def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = { - val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) { + def orStart(p: Offset) = if (name.toTermName == nme.ERROR) start else p + val namePos = NamePos(r2p(orStart(nameOffset), orStart(nameOffset))) + val result = atPos(start, orStart(nameOffset)) { var newmods = mods // contextBoundBuf is for context bounded type parameters of the form // [T : B] or [T : => B]; it contains the equivalent implicit parameter type, // i.e. (B[T] or T => B) val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Def) val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false) newLineOptWhenFollowedBy(LBRACE) var restype = fromWithinReturnType(typedOpt()) + def msg(what: String, instead: String) = + s"procedure syntax is $what: instead, add `$instead` to explicitly declare `$name`'s return type" + def declActions(msg: String) = runReporting.codeAction("add result type", o2p(in.lastOffset), ": Unit", msg) + def defnActions(msg: String) = runReporting.codeAction("replace procedure syntax", o2p(in.lastOffset), ": Unit =", msg) val rhs = if (isStatSep || in.token == RBRACE) { if (restype.isEmpty) { - if (settings.future) - deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.", "2.12.0") + migrationWarning(in.lastOffset, msg("deprecated", ": Unit"), msg("unsupported", ": Unit"), since = "2.13.0", actions = declActions) restype = scalaUnitConstr } newmods |= Flags.DEFERRED EmptyTree } else if (restype.isEmpty && in.token == LBRACE) { - if (settings.future) - deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.", "2.12.0") + migrationWarning(in.offset, msg("deprecated", ": Unit ="), msg("unsupported", ": Unit ="), since = "2.13.0", actions = defnActions) restype = scalaUnitConstr blockExpr() } else { @@ -2862,7 +3093,21 @@ self => } expr() } - DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs) + if (nme.isEncodedUnary(name) && vparamss.nonEmpty) { + def instead = DefDef(newmods, name.toTermName.decodedName, tparams, vparamss.drop(1), restype, rhs) + def unaryMsg(what: String) = s"unary prefix operator definition with empty parameter list is $what: instead, remove () to declare as `$instead`" + def action(msg: String) = { + val o = nameOffset + name.decode.length + runReporting.codeAction("remove ()", r2p(o, o, o + 2), "", msg, expected = Some(("()", unit))) + } + def warnNilary() = migrationWarning(nameOffset, unaryMsg("deprecated"), unaryMsg("unsupported"), since = "2.13.4", actions = action) + vparamss match { + case List(List()) => warnNilary() + case List(List(), x :: _) if x.mods.isImplicit => warnNilary() + case _ => // ok + } + } + DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs).updateAttachment(namePos) } signalParseProgress(result.pos) result @@ -2873,15 +3118,15 @@ self => * | ConstrBlock * }}} */ - def constrExpr(vparamss: List[List[ValDef]]): Tree = - if (in.token == LBRACE) constrBlock(vparamss) - else Block(selfInvocation(vparamss) :: Nil, literalUnit) + def constrExpr(): Tree = + if (in.token == LBRACE) constrBlock() + else Block(selfInvocation() :: Nil, literalUnit) /** {{{ * SelfInvocation ::= this ArgumentExprs {ArgumentExprs} * }}} */ - def selfInvocation(vparamss: List[List[ValDef]]): Tree = + def selfInvocation(): Tree = atPos(accept(THIS)) { newLineOptWhenFollowedBy(LBRACE) var t = Apply(Ident(nme.CONSTRUCTOR), argumentExprs()) @@ -2890,17 +3135,16 @@ self => t = Apply(t, argumentExprs()) newLineOptWhenFollowedBy(LBRACE) } - if (classContextBounds.isEmpty) t - else Apply(t, vparamss.last.map(vp => Ident(vp.name))) + t } /** {{{ - * ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}' + * ConstrBlock ::= `{` SelfInvocation {semi BlockStat} `}` * }}} */ - def constrBlock(vparamss: List[List[ValDef]]): Tree = + def constrBlock(): Tree = atPos(in.skipToken()) { - val stats = selfInvocation(vparamss) :: { + val stats = selfInvocation() :: { if (isStatSep) { in.nextToken(); blockStatSeq() } else Nil } @@ -2909,31 +3153,34 @@ self => } /** {{{ - * TypeDef ::= type Id [TypeParamClause] `=' Type - * | FunSig `=' Expr + * TypeDef ::= type Id [TypeParamClause] `=` Type + * | FunSig `=` Expr * TypeDcl ::= type Id [TypeParamClause] TypeBounds * }}} */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { in.nextToken() newLinesOpt() - atPos(start, in.offset) { - val name = identForType() + checkKeywordDefinition() + val nameOffset = in.offset + val name = identForType() + val namePos = NamePos(r2p(nameOffset, nameOffset)) + atPos(start, nameOffset) { // @M! a type alias as well as an abstract type may declare type parameters - val tparams = typeParamClauseOpt(name, null) + val tparams = typeParamClauseOpt(name, null, ParamOwner.Type) in.token match { case EQUALS => in.nextToken() TypeDef(mods, name, tparams, typ()) - case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) => + case SEMI | NEWLINE | NEWLINES | SUPERTYPE | SUBTYPE | RBRACE | EOF => TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds()) case _ => - syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)( + syntaxErrorOrIncompleteAnd("`=`, `>:`, or `<:` expected", skipIt = true)( // assume a dummy type def so as to have somewhere to stash the annotations TypeDef(mods, tpnme.ERROR, Nil, EmptyTree) ) } - } + }.updateAttachment(namePos) } /** Hook for IDE, for top-level classes/objects. */ @@ -2954,15 +3201,15 @@ self => if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false) in.token match { case TRAIT => - classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in))) + classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT).withPosition(Flags.TRAIT, tokenRange(in))) case CLASS => classDef(pos, mods) case CASECLASS => - classDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'class', thus take prev*/))) + classDef(pos, (mods | Flags.CASE).withPosition(Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'class', thus take prev*/))) case OBJECT => objectDef(pos, mods) case CASEOBJECT => - objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/))) + objectDef(pos, (mods | Flags.CASE).withPosition(Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/))) case _ => syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)( // assume a class definition so as to have somewhere to stash the annotations @@ -2972,24 +3219,30 @@ self => } /** {{{ - * ClassDef ::= Id [TypeParamClause] {Annotation} + * ClassDef ::= Id [TypeParamClause] ConstrAnnotations * [AccessModifier] ClassParamClauses RequiresTypeOpt ClassTemplateOpt * TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt * }}} */ def classDef(start: Offset, mods: Modifiers): ClassDef = { + def isAfterLineEnd: Boolean = in.lastOffset < in.lineStartOffset && (in.lineStartOffset <= in.offset || in.lastOffset < in.lastLineStartOffset && in.lastLineStartOffset <= in.offset) in.nextToken() + checkKeywordDefinition() val nameOffset = in.offset val name = identForType() - atPos(start, if (name == tpnme.ERROR) start else nameOffset) { + if (currentRun.isScala3 && in.token == LBRACKET && isAfterLineEnd) + migrationWarning(in.offset, "type parameters should not follow newline", since = "2.13.7") + def orStart(p: Offset) = if (name == tpnme.ERROR) start else p + val namePos = NamePos(r2p(orStart(nameOffset), orStart(nameOffset))) + atPos(start, orStart(nameOffset)) { savingClassContextBounds { val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Class) classContextBounds = contextBoundBuf.toList val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min if (!classContextBounds.isEmpty && mods.isTrait) { - val viewBoundsExist = if (settings.future) "" else " nor view bounds `<% ...'" - syntaxError(s"traits cannot have type parameters with context bounds `: ...'$viewBoundsExist", skipIt = false) + val viewBoundsExist = if (currentRun.isScala3) "" else " nor view bounds `<% ...`" + syntaxError(s"traits cannot have type parameters with context bounds `: ...`$viewBoundsExist", skipIt = false) classContextBounds = List() } val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil @@ -2997,7 +3250,7 @@ self => if (mods.isTrait) (Modifiers(Flags.TRAIT), List()) else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase)) val template = templateOpt(mods, name, constrMods withAnnotations constrAnnots, vparamss, tstart) - val result = gen.mkClassDef(mods, name, tparams, template) + val result = gen.mkClassDef(mods, name, tparams, template).updateAttachment(namePos) // Context bounds generate implicit parameters (part of the template) with types // from tparams: we need to ensure these don't overlap if (!classContextBounds.isEmpty) @@ -3011,14 +3264,17 @@ self => * ObjectDef ::= Id ClassTemplateOpt * }}} */ - def objectDef(start: Offset, mods: Modifiers): ModuleDef = { + def objectDef(start: Offset, mods: Modifiers, isPackageObject: Boolean = false): ModuleDef = { in.nextToken() val nameOffset = in.offset + checkKeywordDefinition() val name = ident() val tstart = in.offset - atPos(start, if (name == nme.ERROR) start else nameOffset) { - val template = templateOpt(mods, name, NoMods, Nil, tstart) - ModuleDef(mods, name.toTermName, template) + def orStart(p: Offset) = if (name == tpnme.ERROR) start else p + val namePos = NamePos(r2p(orStart(nameOffset), orStart(nameOffset))) + atPos(start, orStart(nameOffset)) { + val template = templateOpt(mods, if (isPackageObject) nme.PACKAGEkw else name, NoMods, Nil, tstart) + ModuleDef(mods, name.toTermName, template).updateAttachment(namePos) } } @@ -3034,29 +3290,21 @@ self => * }}} */ def packageObjectDef(start: Offset): PackageDef = { - val defn = objectDef(in.offset, NoMods) + val defn = objectDef(in.offset, NoMods, isPackageObject = true) val pidPos = o2p(defn.pos.start) val pkgPos = r2p(start, pidPos.point) gen.mkPackageObject(defn, pidPos, pkgPos) } - def packageOrPackageObject(start: Offset): Tree = ( - if (in.token == OBJECT) - joinComment(packageObjectDef(start) :: Nil).head + + def packageOrPackageObject(start: Offset): Tree = + if (in.token == OBJECT) joinComment(packageObjectDef(start) :: Nil).head else { - in.flushDoc + in.flushDoc() makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq())) } - ) - // TODO - eliminate this and use "def packageObjectDef" (see call site of this - // method for small elaboration.) - def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match { - case ModuleDef(mods, name, impl) => - makePackaging( - start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl))) - } /** {{{ - * ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType} + * ClassParents ::= AnnotType {`(` [Exprs] `)`} {with AnnotType} * TraitParents ::= AnnotType {with AnnotType} * }}} */ @@ -3066,7 +3314,7 @@ self => val start = in.offset val parent = startAnnotType() parents += (in.token match { - case LPAREN => atPos(start)((parent /: multipleArgumentExprs())(Apply.apply)) + case LPAREN => atPos(start)(multipleArgumentExprs().foldLeft(parent)(Apply.apply)) case _ => parent }) } @@ -3078,16 +3326,21 @@ self => /** {{{ * ClassTemplate ::= [EarlyDefs with] ClassParents [TemplateBody] * TraitTemplate ::= [EarlyDefs with] TraitParents [TemplateBody] - * EarlyDefs ::= `{' [EarlyDef {semi EarlyDef}] `}' + * EarlyDefs ::= `{` [EarlyDef {semi EarlyDef}] `}` * EarlyDef ::= Annotations Modifiers PatDef * }}} */ def template(): (List[Tree], ValDef, List[Tree]) = { newLineOptWhenFollowedBy(LBRACE) if (in.token == LBRACE) { + val braceOffset = in.offset // @S: pre template body cannot stub like post body can! - val (self, body) = templateBody(isPre = true) + val (self, body) = templateBody() if (in.token == WITH && (self eq noSelfType)) { + val advice = + if (currentRun.isScala3) "use trait parameters instead." + else "they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits." + migrationWarning(braceOffset, s"early initializers are deprecated; $advice", since = "2.13.0") val earlyDefs: List[Tree] = body.map(ensureEarlyDef).filter(_.nonEmpty) in.nextToken() val parents = templateParents() @@ -3107,7 +3360,8 @@ self => case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred => copyValDef(vdef)(mods = mods | Flags.PRESUPER) case tdef @ TypeDef(mods, name, tparams, rhs) => - deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.", "2.11.0") + def msg(what: String): String = s"early type members are $what: move them to the regular body; the semantics are the same" + migrationWarning(tdef.pos.point, msg("deprecated"), msg("unsupported"), since = "2.11.0") treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs) case docdef @ DocDef(comm, rhs) => treeCopy.DocDef(docdef, comm, rhs) @@ -3119,17 +3373,19 @@ self => } /** {{{ - * ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody] + * ClassTemplateOpt ::= `extends` ClassTemplate | [[`extends`] TemplateBody] * TraitTemplateOpt ::= TraitExtends TraitTemplate | [[TraitExtends] TemplateBody] - * TraitExtends ::= `extends' | `<:' (deprecated) + * TraitExtends ::= `extends` | `<:` (deprecated) * }}} */ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = { def deprecatedUsage(): Boolean = { - deprecationWarning(in.offset, "Using `<:` for `extends` is deprecated", since = "2.12.5") + val msg = "Using `<:` for `extends` is deprecated" + deprecationWarning(in.offset, msg, since = "2.12.5", + runReporting.codeAction("use `extends`", r2p(in.offset, in.offset, in.offset + 2), "extends", msg, expected = Some(("<:", unit)))) true } - val (parents, self, body) = ( + val (parents, self, body) = if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait && deprecatedUsage()) { in.nextToken() template() @@ -3139,40 +3395,37 @@ self => val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName) (List(), self, body) } - ) - def anyvalConstructor() = ( - // Not a well-formed constructor, has to be finished later - see note - // regarding AnyVal constructor in AddInterfaces. - DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) - ) - val parentPos = o2p(in.offset) - val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + // Not a well-formed constructor, has to be finished later - see note + // regarding AnyVal constructor in AddInterfaces. + def anyvalConstructor() = DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) + // tstart is the offset of the token after `class C[A]` (which may be LPAREN, EXTENDS, LBRACE). + // if there is no template body, then tstart may be in the next program element, so back up to just after the `class C[A]`. + val templateOffset = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + val templatePos = o2p(templateOffset) - atPos(tstart1) { + atPos(templateOffset) { // Exclude only the 9 primitives plus AnyVal. if (inScalaRootPackage && ScalaValueClassNames.contains(name)) - Template(parents, self, anyvalConstructor :: body) + Template(parents, self, anyvalConstructor() :: body) else - gen.mkTemplate(gen.mkParents(mods, parents, parentPos), - self, constrMods, vparamss, body, o2p(tstart)) + gen.mkTemplate(gen.mkParents(mods, parents, templatePos), self, constrMods, vparamss, body, templatePos) } } /* -------- TEMPLATES ------------------------------------------- */ /** {{{ - * TemplateBody ::= [nl] `{' TemplateStatSeq `}' + * TemplateBody ::= [nl] `{` TemplateStatSeq `}` * }}} - * @param isPre specifies whether in early initializer (true) or not (false) */ - def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre = isPre)) match { - case (self, Nil) => (self, EmptyTree.asList) - case result => result + def templateBody() = inBraces(templateStatSeq()) match { + case (selfTypeVal, Nil) => (selfTypeVal, EmptyTree.asList) + case result => result } def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = { newLineOptWhenFollowedBy(LBRACE) if (in.token == LBRACE) { - templateBody(isPre = false) + templateBody() } else { if (in.token == LPAREN) { if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true) @@ -3183,7 +3436,7 @@ self => } /** {{{ - * Refinement ::= [nl] `{' RefineStat {semi RefineStat} `}' + * Refinement ::= [nl] `{` RefineStat {semi RefineStat} `}` * }}} */ def refinement(): List[Tree] = inBraces(refineStatSeq()) @@ -3193,6 +3446,7 @@ self => /** Create a tree representing a packaging. */ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match { case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats)) + case x => throw new MatchError(x) } def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = ( @@ -3215,7 +3469,7 @@ self => * TopStatSeq ::= TopStat {semi TopStat} * TopStat ::= Annotations Modifiers TmplDef * | Packaging - * | package object objectDef + * | package object ObjectDef * | Import * | * }}} @@ -3225,22 +3479,21 @@ self => case PACKAGE => packageOrPackageObject(in.skipToken()) :: Nil case IMPORT => - in.flushDoc + in.flushDoc() importClause() case _ if isAnnotation || isTemplateIntro || isModifier || isValidSoftModifier => joinComment(topLevelTmplDef :: Nil) } /** {{{ - * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStats + * TemplateStatSeq ::= [id [`:` Type] `=>`] TemplateStats * }}} - * @param isPre specifies whether in early initializer (true) or not (false) */ - def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders { + def templateStatSeq(): (ValDef, List[Tree]) = { var self: ValDef = noSelfType var firstOpt: Option[Tree] = None - if (isExprIntro) { - in.flushDoc + if (isExprIntro) checkNoEscapingPlaceholders { + in.flushDoc() val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed. if (in.token == ARROW) { first match { @@ -3272,15 +3525,15 @@ self => * | * }}} */ - def templateStats(): List[Tree] = statSeq(templateStat) + def templateStats(): List[Tree] = checkNoEscapingPlaceholders { statSeq(templateStat) } def templateStat: PartialFunction[Token, List[Tree]] = { case IMPORT => - in.flushDoc + in.flushDoc() importClause() case _ if isDefIntro || isModifier || isAnnotation || isValidSoftModifier => joinComment(nonLocalDefOrDcl) case _ if isExprIntro => - in.flushDoc + in.flushDoc() statement(InTemplate) :: Nil } @@ -3308,7 +3561,7 @@ self => } else if (!isStatSep) { syntaxErrorOrIncomplete( "illegal start of declaration"+ - (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)" + (if (inFunReturnType) " (possible cause: missing `=` in front of current method body)" else ""), skipIt = true) Nil } else Nil @@ -3327,10 +3580,10 @@ self => } */ - def localDef(implicitMod: Int): List[Tree] = { + def localDef(implicitMod: Long): List[Tree] = { val annots = annotations(skipNewLines = true) val pos = in.offset - val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots + val mods = (localModifiers() | implicitMod) withAnnotations annots val defs = if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods) else List(tmplDef(pos, mods)) @@ -3361,7 +3614,7 @@ self => else if (isDefIntro || isLocalModifier || isAnnotation) { if (in.token == IMPLICIT) { val start = in.skipToken() - if (isIdent) stats += implicitClosure(start, InBlock) + if (isIdent || in.token == USCORE) stats += implicitClosure(start, InBlock) else stats ++= localDef(Flags.IMPLICIT) } else { stats ++= localDef(0) @@ -3395,16 +3648,14 @@ self => if (in.token == PACKAGE) { in.nextToken() if (in.token == OBJECT) { - // TODO - this next line is supposed to be - // ts += packageObjectDef(start) - // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow. - ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods)))) + // note that joinComment is a hook method for scaladoc that takes a CBN arg -- tested by run/diagrams-filtering.scala + ts ++= joinComment(List(packageObjectDef(start))) if (in.token != EOF) { acceptStatSep() ts ++= topStatSeq() } } else { - in.flushDoc + in.flushDoc() val pkg = pkgQualId() if (in.token == EOF) { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala index 618d594a7fe3..67a632af1cdf 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,5 +12,7 @@ package scala.tools.nsc.ast.parser -class Patch(off: Int, change: Change) +import scala.annotation.unused + +class Patch(@unused off: Int, @unused change: Change) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index fbd051eeea1c..99edf1e42dc5 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,18 +13,17 @@ package scala.tools.nsc package ast.parser -import scala.tools.nsc.util.{ CharArrayReader, CharArrayReaderData } -import scala.reflect.internal.util._ -import scala.reflect.internal.Chars._ -import Tokens._ import scala.annotation.{switch, tailrec} -import scala.collection.mutable -import mutable.{ArrayBuffer, ListBuffer} +import scala.collection.mutable, mutable.{ArrayBuffer, ListBuffer} +import scala.reflect.internal.Chars._ +import scala.reflect.internal.util._ +import scala.tools.nsc.Reporting.WarningCategory, WarningCategory.Scala3Migration import scala.tools.nsc.ast.parser.xml.Utility.isNameStart +import scala.tools.nsc.settings.ScalaVersion +import scala.tools.nsc.util.{CharArrayReader, CharArrayReaderData} +import Tokens._ import java.lang.StringBuilder -import scala.tools.nsc.Reporting.WarningCategory - object Cbuf { final val TargetCapacity = 256 @@ -75,7 +74,10 @@ trait ScannersCommon { def error(off: Offset, msg: String): Unit def incompleteInputError(off: Offset, msg: String): Unit def warning(off: Offset, msg: String, category: WarningCategory): Unit - def deprecationWarning(off: Offset, msg: String, since: String): Unit + def deprecationWarning(off: Offset, msg: String, since: String, actions: List[CodeAction] = Nil): Unit + + // advance past COMMA NEWLINE RBRACE (to whichever token is the matching close bracket) + def skipTrailingComma(right: Token): Boolean = false } // Hooks for ScaladocUnitScanner and ScaladocJavaUnitScanner @@ -100,7 +102,7 @@ trait ScannersCommon { } def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = { - val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) } + val names = keywords.sortBy(_._1.start).map { case (k, v) => (k.start, v) } val low = names.head._1 val high = names.last._1 val arr = Array.fill(high - low + 1)(defaultToken) @@ -165,7 +167,54 @@ trait Scanners extends ScannersCommon { } abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon with DocScanner { - private def isDigit(c: Char) = java.lang.Character isDigit c + def unit: CompilationUnit + + /** A switch whether operators at the start of lines can be infix operators. */ + private var allowLeadingInfixOperators = true + + private def isDigit(c: Char) = Character.isDigit(c) + + import Character.{isHighSurrogate, isLowSurrogate, isUnicodeIdentifierPart, isUnicodeIdentifierStart, isValidCodePoint, toCodePoint} + + // given char (ch) is high surrogate followed by low, codepoint passes predicate. + // true means supplementary chars were put to buffer. + // strict to require low surrogate (if not in string literal). + private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = + isHighSurrogate(high) && { + var res = false + val low = lookaheadReader.getc() + if (isLowSurrogate(low)) { + val codePoint = toCodePoint(high, low) + if (isValidCodePoint(codePoint)) { + if (test(codePoint)) { + putChar(high) + putChar(low) + nextChar() + nextChar() + res = true + } + } + else syntaxError(f"illegal character '\\u$high%04x\\u$low%04x'") + } + else if (!strict) { + putChar(high) + nextChar() + res = true + } + else syntaxError(f"illegal character '\\u$high%04x' missing low surrogate") + res + } + private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = + isHighSurrogate(ch) && { + val hi = ch + val r = lookaheadReader + r.nextRawChar() + val lo = r.ch + isLowSurrogate(lo) && { + val codepoint = toCodePoint(hi, lo) + isValidCodePoint(codepoint) && f(codepoint) + } + } private var openComments = 0 final protected def putCommentChar(): Unit = { processCommentChar(); nextChar() } @@ -215,8 +264,8 @@ trait Scanners extends ScannersCommon { } } - /** @pre ch == '/' - * Returns true if a comment was skipped. + /** Returns true if a comment was skipped. + * @note Pre-condition: ch == '/' */ final def skipComment(): Boolean = ch match { case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; finishDocComment(); true @@ -236,14 +285,11 @@ trait Scanners extends ScannersCommon { /** A character buffer for literals */ - val cbuf = new StringBuilder + val cbuf = Cbuf.create() /** append Unicode character to "cbuf" buffer */ - protected def putChar(c: Char): Unit = { -// assert(cbuf.size < 10000, cbuf) - cbuf.append(c) - } + protected def putChar(c: Char): Unit = cbuf.append(c) /** Determines whether this scanner should emit identifier deprecation warnings, * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala. @@ -252,7 +298,7 @@ trait Scanners extends ScannersCommon { /** Clear buffer and set name and token */ private def finishNamed(idtoken: Token = IDENTIFIER): Unit = { - name = newTermName(cbuf.toString) + name = newTermName(cbuf.toCharArray) cbuf.clear() token = idtoken if (idtoken == IDENTIFIER) { @@ -304,11 +350,22 @@ trait Scanners extends ScannersCommon { /** Are we in a `${ }` block? such that RBRACE exits back into multiline string. */ private def inMultiLineInterpolatedExpression = { sepRegions match { - case RBRACE :: STRINGLIT :: STRINGPART :: rest => true + case RBRACE :: STRINGLIT :: STRINGPART :: _ => true case _ => false } } + def lookingAhead[A](body: => A): A = { + val saved = new ScannerData {} copyFrom this + val aLIO = allowLeadingInfixOperators + allowLeadingInfixOperators = false + nextToken() + try body finally { + this copyFrom saved + allowLeadingInfixOperators = aLIO + } + } + /** read next token and return last offset */ def skipToken(): Offset = { @@ -317,6 +374,21 @@ trait Scanners extends ScannersCommon { off } + // used by parser to distinguish pattern P(_*, p) from trailing comma. + // EOF is accepted for REPL, which can't look ahead past the current line. + def isTrailingComma(right: Token): Boolean = + token == COMMA && lookingAhead(afterLineEnd() && token == right || token == EOF) + + override def skipTrailingComma(right: Token): Boolean = + if (token == COMMA) { + // SIP-27 Trailing Comma (multi-line only) support + // If a comma is followed by a new line & then a closing paren, bracket or brace + // then it is a trailing comma and is ignored + val saved = new ScannerData {} copyFrom this + fetchToken() + (afterLineEnd() && token == right || token == EOF) || { copyFrom(saved) ; false } + } else false + /** Allow an otherwise deprecated ident here */ private var allowIdent: Name = nme.EMPTY @@ -331,6 +403,39 @@ trait Scanners extends ScannersCommon { } } + // Adapt sepRegions according to last token + def adjustSepRegions(lastToken: Token): Unit = (lastToken: @switch) match { + case LPAREN => + sepRegions = RPAREN :: sepRegions + case LBRACKET => + sepRegions = RBRACKET :: sepRegions + case LBRACE => + sepRegions = RBRACE :: sepRegions + case CASE => + sepRegions = ARROW :: sepRegions + case RBRACE => + while (!sepRegions.isEmpty && sepRegions.head != RBRACE) + sepRegions = sepRegions.tail + if (!sepRegions.isEmpty) + sepRegions = sepRegions.tail + + discardDocBuffer() + case RBRACKET | RPAREN => + if (!sepRegions.isEmpty && sepRegions.head == lastToken) + sepRegions = sepRegions.tail + + discardDocBuffer() + case ARROW => + if (!sepRegions.isEmpty && sepRegions.head == lastToken) + sepRegions = sepRegions.tail + case STRINGLIT => + if (inMultiLineInterpolation) + sepRegions = sepRegions.tail.tail + else if (inStringInterpolation) + sepRegions = sepRegions.tail + case _ => + } + /** Advance beyond a case token without marking the CASE in sepRegions. * This method should be called to skip beyond CASE tokens that are * not part of matches, i.e. no ARROW is expected after them. @@ -341,42 +446,14 @@ trait Scanners extends ScannersCommon { sepRegions = sepRegions.tail } + /** True to warn about migration change in infix syntax. */ + private val infixMigration = settings.Xmigration.value <= ScalaVersion("2.13.2") + /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = { val lastToken = token - // Adapt sepRegions according to last token - (lastToken: @switch) match { - case LPAREN => - sepRegions = RPAREN :: sepRegions - case LBRACKET => - sepRegions = RBRACKET :: sepRegions - case LBRACE => - sepRegions = RBRACE :: sepRegions - case CASE => - sepRegions = ARROW :: sepRegions - case RBRACE => - while (!sepRegions.isEmpty && sepRegions.head != RBRACE) - sepRegions = sepRegions.tail - if (!sepRegions.isEmpty) - sepRegions = sepRegions.tail - - discardDocBuffer() - case RBRACKET | RPAREN => - if (!sepRegions.isEmpty && sepRegions.head == lastToken) - sepRegions = sepRegions.tail - - discardDocBuffer() - case ARROW => - if (!sepRegions.isEmpty && sepRegions.head == lastToken) - sepRegions = sepRegions.tail - case STRINGLIT => - if (inMultiLineInterpolation) - sepRegions = sepRegions.tail.tail - else if (inStringInterpolation) - sepRegions = sepRegions.tail - case _ => - } + adjustSepRegions(lastToken) // Read a token or copy it from `next` tokenData if (next.token == EMPTY) { @@ -396,6 +473,51 @@ trait Scanners extends ScannersCommon { next.token = EMPTY } + def isSimpleExprIntroToken(token: Token): Boolean = token match { + case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | + STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL | // literals + IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER | NEW | USCORE | + LPAREN | LBRACE | XMLSTART => true + case _ => false + } + + def insertNL(nl: Token): Unit = { + next.copyFrom(this) + // todo: make offset line-end of previous line? + offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset + token = nl + } + + def isOperator: Boolean = token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1)) + + /* A leading infix operator must be followed by a lexically suitable expression. + * Usually any simple expr will do. However, a backquoted identifier may serve as + * either an op or a reference. So the additional constraint is that the following + * token can't be an assignment operator. (Dotty disallows binary ops, hence the + * test for unary.) See run/multiLineOps.scala for 42 + `x` on 3 lines, where + + * is not leading infix because backquoted x is non-unary op. + */ + def followedByInfixRHS: Boolean = { + //def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || nme.raw.isUnary(name) || token == BACKQUOTED_IDENT) + def isAssignmentOperator: Boolean = + name.endsWith('=') && !name.startsWith('=') && isOperatorPart(name.startChar) && + (name.length != 2 || (name.startChar match { case '!' | '<' | '>' => false case _ => true })) + def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || token == BACKQUOTED_IDENT || !isAssignmentOperator) + lookingAhead { + isCandidateInfixRHS || token == NEWLINE && { nextToken() ; isCandidateInfixRHS } + } + } + + /* A leading symbolic or backquoted identifier is treated as an infix operator + * if it is followed by at least one ' ' and a token on the same line + * that can start an expression. + */ + def isLeadingInfixOperator = + allowLeadingInfixOperators && + isOperator && + (isWhitespace(ch) || ch == LF) && + followedByInfixRHS + /* Insert NEWLINE or NEWLINES if * - we are after a newline * - we are within a { ... } or on toplevel (wrt sepRegions) @@ -404,12 +526,23 @@ trait Scanners extends ScannersCommon { */ if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) && (sepRegions.isEmpty || sepRegions.head == RBRACE)) { - next copyFrom this - offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset - token = if (pastBlankLine()) NEWLINES else NEWLINE + if (pastBlankLine()) insertNL(NEWLINES) + else if (!isLeadingInfixOperator) insertNL(NEWLINE) + else if (!currentRun.sourceFeatures.leadingInfix) { + val msg = + sm"""Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). + |To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator.""" + if (currentRun.isScala3) warning(offset, msg, Scala3Migration) + else if (infixMigration) deprecationWarning(msg, "2.13.2") + insertNL(NEWLINE) + } } + postProcessToken() +// print("["+this+"]") + } // end nextToken - // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE + // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE + def postProcessToken(): Unit = if (token == CASE) { prev copyFrom this val nextLastOffset = charOffset - 1 @@ -436,22 +569,8 @@ trait Scanners extends ScannersCommon { next copyFrom this this copyFrom prev } - } else if (token == COMMA) { - // SIP-27 Trailing Comma (multi-line only) support - // If a comma is followed by a new line & then a closing paren, bracket or brace - // then it is a trailing comma and is ignored - val saved = new ScannerData {} copyFrom this - fetchToken() - if (afterLineEnd() && (token == RPAREN || token == RBRACKET || token == RBRACE)) { - /* skip the trailing comma */ - } else if (token == EOF) { // e.g. when the REPL is parsing "val List(x, y, _*," - /* skip the trailing comma */ - } else this copyFrom saved } -// print("["+this+"]") - } - /** Is current token first one after a newline? */ private def afterLineEnd(): Boolean = lastOffset < lineStartOffset && @@ -483,6 +602,7 @@ trait Scanners extends ScannersCommon { /** read next token, filling TokenData fields of Scanner. */ + @tailrec protected final def fetchToken(): Unit = { offset = charOffset - 1 (ch: @switch) match { @@ -505,8 +625,7 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getIdentRest() - if (ch == '"' && token == IDENTIFIER) - token = INTERPOLATIONID + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID case '<' => // is XMLSTART? def fetchLT() = { val last = if (charOffset >= 2) buf(charOffset - 2) else ' ' @@ -541,8 +660,11 @@ trait Scanners extends ScannersCommon { nextChar() ch match { case 'x' | 'X' => base = 16 ; nextChar() - case _ => base = 8 // single decimal zero, perhaps + case 'b' | 'B' => base = 2 ; nextChar() + case _ => base = 10 ; putChar('0') } + if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) + syntaxError("invalid literal number") } fetchLeadingZero() getNumber() @@ -610,30 +732,51 @@ trait Scanners extends ScannersCommon { } syntaxError(msg) } + /** Either at closing quote of charlit + * or run the op and take it as a (deprecated) Symbol identifier. + */ + def charLitOrSymbolAfter(op: () => Unit): Unit = + if (ch == '\'') { + nextChar() + token = CHARLIT + setStrVal() + } else { + op() + token = SYMBOLLIT + strVal = name.toString + } def fetchSingleQuote() = { nextChar() - if (isIdentifierStart(ch)) - charLitOr(getIdentRest) - else if (isOperatorPart(ch) && (ch != '\\')) - charLitOr(getOperatorRest) - else if (!isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) { + if (isIdentifierStart(ch)) { + putChar(ch) + nextChar() + charLitOrSymbolAfter(() => getIdentRest()) + } + else if (isOperatorPart(ch) && (ch != '\\')) { + putChar(ch) + nextChar() + charLitOrSymbolAfter(() => getOperatorRest()) + } + else if (!isAtEnd && (ch != SU && ch != CR && ch != LF)) { val isEmptyCharLit = (ch == '\'') getLitChar() if (ch == '\'') { - if (isEmptyCharLit && currentRun.isScala213) + if (isEmptyCharLit) syntaxError("empty character literal (use '\\'' for single quote)") else { - if (isEmptyCharLit) - deprecationWarning("deprecated syntax for character literal (use '\\'' for single quote)", "2.12.2") nextChar() - token = CHARLIT - setStrVal() + if (cbuf.length != 1) + syntaxError("illegal codepoint in Char constant: " + cbuf.toString.map(c => f"\\u$c%04x").mkString("'", "", "'")) + else { + token = CHARLIT + setStrVal() + } } - } else if (isEmptyCharLit) { + } + else if (isEmptyCharLit) syntaxError("empty character literal") - } else { + else unclosedCharLit() - } } else unclosedCharLit() } @@ -665,7 +808,7 @@ trait Scanners extends ScannersCommon { case SU => if (isAtEnd) { bidiChars.foreach { case (char, offset) => - syntaxError(offset, f"found unicode bidirectional character '\\u$char%04x'; use a unicode escape instead") + syntaxError(offset, f"found unicode bidirectional character '\\u$char%04x'; in a string or character literal, use a unicode escape instead") } token = EOF } @@ -676,19 +819,31 @@ trait Scanners extends ScannersCommon { case _ => def fetchOther() = { if (ch == '\u21D2') { + val msg = "The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code." + deprecationWarning(msg, "2.13.0", + runReporting.codeAction("replace unicode arrow", unit.position(offset).withEnd(offset + 1), "=>", msg, expected = Some(("⇒", unit)))) nextChar(); token = ARROW } else if (ch == '\u2190') { + val msg = "The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code." + deprecationWarning(msg, "2.13.0", + runReporting.codeAction("replace unicode arrow", unit.position(offset).withEnd(offset + 1), "<-", msg, expected = Some(("←", unit)))) nextChar(); token = LARROW - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { putChar(ch) nextChar() getIdentRest() + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID } else if (isSpecial(ch)) { putChar(ch) nextChar() getOperatorRest() + } else if (isSupplementary(ch, isUnicodeIdentifierStart)) { + getIdentRest() + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID + } else if (isSupplementary(ch, isSpecial)) { + getOperatorRest() } else { - syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'") + syntaxError(f"illegal character '\\u$ch%04x'") nextChar() } } @@ -729,6 +884,7 @@ trait Scanners extends ScannersCommon { else syntaxError("unclosed quoted identifier") } + @tailrec private def getIdentRest(): Unit = (ch: @switch) match { case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | @@ -751,19 +907,24 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getIdentOrOperatorRest() - case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! + case ' ' | LF | // optimize for common whitespace + SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! finishNamed() case _ => - if (Character.isUnicodeIdentifierPart(ch)) { + if (isUnicodeIdentifierPart(ch)) { putChar(ch) nextChar() getIdentRest() - } else { - finishNamed() } + else if (isSupplementary(ch, isUnicodeIdentifierPart)) + getIdentRest() + else + finishNamed() } + @tailrec private def getOperatorRest(): Unit = (ch: @switch) match { + case ' ' | LF => finishNamed() // optimize case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -775,24 +936,12 @@ trait Scanners extends ScannersCommon { else { putChar('/'); getOperatorRest() } case _ => if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() } + else if (isSupplementary(ch, isSpecial)) getOperatorRest() else finishNamed() } - private def getIdentOrOperatorRest(): Unit = { - if (isIdentifierPart(ch)) - getIdentRest() - else ch match { - case '~' | '!' | '@' | '#' | '%' | - '^' | '*' | '+' | '-' | '<' | - '>' | '?' | ':' | '=' | '&' | - '|' | '\\' | '/' => - getOperatorRest() - case _ => - if (isSpecial(ch)) getOperatorRest() - else finishNamed() - } - } - + private def getIdentOrOperatorRest(): Unit = + if (isIdentifierPart(ch) || isSupplementary(ch, isIdentifierPart)) getIdentRest() else getOperatorRest() // Literals ----------------------------------------------------------------- @@ -805,13 +954,39 @@ trait Scanners extends ScannersCommon { } else unclosedStringLit() } - private def unclosedStringLit(): Unit = syntaxError("unclosed string literal") + private def unclosedStringLit(seenEscapedQuoteInInterpolation: Boolean = false): Unit = { + val note = + if (seenEscapedQuoteInInterpolation) "; note that `\\\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead" + else "" + syntaxError(s"unclosed string literal$note") + } + + private def replaceUnicodeEscapesInTriple(): Unit = + if (strVal != null) + try { + val processed = StringContext.processUnicode(strVal) + if (processed != strVal && !currentRun.sourceFeatures.unicodeEscapesRaw) { + val diffPosition = processed.zip(strVal).zipWithIndex.collectFirst { case ((r, o), i) if r != o => i }.getOrElse(processed.length - 1) + val pos = offset + 3 + diffPosition + def msg(what: String) = s"Unicode escapes in triple quoted strings are $what; use the literal character instead" + if (currentRun.isScala3) + warning(pos, msg("ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw)"), WarningCategory.Scala3Migration) + else + deprecationWarning(pos, msg("deprecated"), since="2.13.2") + strVal = processed + } + } catch { + case ue: StringContext.InvalidUnicodeEscapeException => + if (!currentRun.sourceFeatures.unicodeEscapesRaw) + syntaxError(offset + 3 + ue.index, ue.getMessage()) + } @tailrec private def getRawStringLit(): Unit = { if (ch == '\"') { nextRawChar() if (isTripleQuote()) { setStrVal() + replaceUnicodeEscapesInTriple() token = STRINGLIT } else getRawStringLit() @@ -824,7 +999,8 @@ trait Scanners extends ScannersCommon { } } - @tailrec private def getStringPart(multiLine: Boolean): Unit = { + // for interpolated strings + @tailrec private def getStringPart(multiLine: Boolean, seenEscapedQuote: Boolean = false): Unit = { def finishStringPart() = { setStrVal() token = STRINGPART @@ -838,18 +1014,46 @@ trait Scanners extends ScannersCommon { setStrVal() token = STRINGLIT } else - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } else { nextChar() setStrVal() token = STRINGLIT } + } else if (ch == '\\' && !multiLine) { + putChar(ch) + nextRawChar() + val q = ch == '"' + if (q || ch == '\\') { + putChar(ch) + nextRawChar() + } + getStringPart(multiLine, seenEscapedQuote || q) } else if (ch == '$') { + @tailrec def getInterpolatedIdentRest(): Unit = + if (ch != SU && isUnicodeIdentifierPart(ch)) { + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierPart)) { + putChar(ch) + nextRawChar() + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else { + next.token = IDENTIFIER + next.name = newTermName(cbuf.toCharArray) + cbuf.clear() + val idx = next.name.start - kwOffset + if (idx >= 0 && idx < kwArray.length) + next.token = kwArray(idx) + } nextRawChar() - if (ch == '$') { + if (ch == '$' || ch == '"') { putChar(ch) nextRawChar() - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } else if (ch == '{') { finishStringPart() nextRawChar() @@ -858,34 +1062,31 @@ trait Scanners extends ScannersCommon { finishStringPart() nextRawChar() next.token = USCORE - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { finishStringPart() - do { - putChar(ch) - nextRawChar() - } while (ch != SU && Character.isUnicodeIdentifierPart(ch)) - next.token = IDENTIFIER - next.name = newTermName(cbuf.toString) - cbuf.clear() - val idx = next.name.start - kwOffset - if (idx >= 0 && idx < kwArray.length) { - next.token = kwArray(idx) - } + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierStart)) { + finishStringPart() + getInterpolatedIdentRest() } else { - syntaxError(s"invalid string interpolation $$$ch, expected: $$$$, $$identifier or $${expression}") + val expectations = "$$, $\", $identifier or ${expression}" + syntaxError(charOffset - 2, s"invalid string interpolation $$$ch, expected: $expectations") + putChar('$') + getStringPart(multiLine, seenEscapedQuote) // consume rest of interpolation, taking $ as literal } } else { - val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF))) - if (isUnclosedLiteral) { + val isUnclosedLiteral = (ch == SU || (!multiLine && (ch == CR || ch == LF))) + if (isUnclosedLiteral) if (multiLine) incompleteInputError("unclosed multi-line string literal") else - unclosedStringLit() - } + unclosedStringLit(seenEscapedQuote) else { putChar(ch) nextRawChar() - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } } } @@ -915,58 +1116,90 @@ trait Scanners extends ScannersCommon { false } - /** copy current character into cbuf, interpreting any escape sequences, - * and advance to next character. + /** Copy current character into cbuf, interpreting any escape sequences, + * and advance to next character. Surrogate pairs are consumed (see check + * at fetchSingleQuote), but orphan surrogate is allowed. */ protected def getLitChar(): Unit = if (ch == '\\') { nextChar() - if ('0' <= ch && ch <= '7') { - val start = charOffset - 2 - val leadch: Char = ch - var oct: Int = digit2int(ch, 8) - nextChar() - if ('0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - if (leadch <= '3' && '0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - } - } - val alt = if (oct == LF) "\\n" else "\\u%04x" format oct - def msg(what: String) = s"Octal escape literals are $what, use $alt instead." - if (settings.future) - syntaxError(start, msg("unsupported")) - else - deprecationWarning(start, msg("deprecated"), "2.11.0") - putChar(oct.toChar) - } else { - ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') - case _ => invalidEscape() - } - nextChar() - } - } else { + charEscape() + } else if (!isSupplementary(ch, _ => true, strict = false)) { putChar(ch) nextChar() } + private def charEscape(): Unit = { + var bump = true + ch match { + case 'b' => putChar('\b') + case 't' => putChar('\t') + case 'n' => putChar('\n') + case 'f' => putChar('\f') + case 'r' => putChar('\r') + case '\"' => putChar('\"') + case '\'' => putChar('\'') + case '\\' => putChar('\\') + case 'u' => bump = uEscape() + case x if '0' <= x && x <= '7' => bump = octalEscape() + case _ => invalidEscape() + } + if (bump) nextChar() + } + + private def uEscape(): Boolean = { + while (ch == 'u') nextChar() + var codepoint = 0 + var digitsRead = 0 + while (digitsRead < 4) { + if (digitsRead > 0) nextChar() + val digit = digit2int(ch, 16) + digitsRead += 1 + if (digit >= 0) { + codepoint = codepoint << 4 + codepoint += digit + } + else { + invalidUnicodeEscape(digitsRead) + return false + } + } + val found = codepoint.asInstanceOf[Char] + putChar(found) + true + } + + private def octalEscape(): Boolean = { + val start = charOffset - 2 + val leadch: Char = ch + var oct: Int = digit2int(ch, 8) + nextChar() + if ('0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + if (leadch <= '3' && '0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + } + } + val alt = if (oct == LF) "\\n" else f"\\u$oct%04x" + syntaxError(start, s"octal escape literals are unsupported: use $alt instead") + putChar(oct.toChar) + false + } + protected def invalidEscape(): Unit = { syntaxError(charOffset - 1, "invalid escape character") putChar(ch) } + protected def invalidUnicodeEscape(n: Int): Unit = { + syntaxError(charOffset - n, "invalid unicode escape") + putChar(ch) + } + private def getLitChars(delimiter: Char) = { - while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) + while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF)) getLitChar() } @@ -974,28 +1207,31 @@ trait Scanners extends ScannersCommon { * if one is present. */ protected def getFraction(): Unit = { - token = DOUBLELIT - while ('0' <= ch && ch <= '9') { + while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) { putChar(ch) nextChar() } + checkNoTrailingSeparator() if (ch == 'e' || ch == 'E') { val lookahead = lookaheadReader lookahead.nextChar() if (lookahead.ch == '+' || lookahead.ch == '-') { lookahead.nextChar() } - if ('0' <= lookahead.ch && lookahead.ch <= '9') { + if ('0' <= lookahead.ch && lookahead.ch <= '9' || isNumberSeparator(lookahead.ch)) { putChar(ch) nextChar() if (ch == '+' || ch == '-') { putChar(ch) nextChar() } - while ('0' <= ch && ch <= '9') { + if (isNumberSeparator(ch)) + syntaxError(offset + cbuf.length, "illegal separator") + while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) { putChar(ch) nextChar() } + checkNoTrailingSeparator() } token = DOUBLELIT } @@ -1007,72 +1243,75 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() token = FLOATLIT - } + } else + token = DOUBLELIT checkNoLetter() setStrVal() } - /** Convert current strVal to char value + /** Convert current strVal to char value. */ - def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0 + def charVal: Char = if (!strVal.isEmpty) strVal.charAt(0) else 0 /** Convert current strVal, base to long value. * This is tricky because of max negative value. * - * Conversions in base 10 and 16 are supported. As a permanent migration - * path, attempts to write base 8 literals except `0` emit a verbose error. + * Conversions in base 2, 10 and 16 are supported. + * Number separators are skipped on the fly. */ def intVal(negated: Boolean): Long = { - def malformed: Long = { - if (base == 8) syntaxError("Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)") - else syntaxError("malformed integer number") - 0 - } - def tooBig: Long = { - syntaxError("integer number too large") - 0 - } def intConvert: Long = { - val len = strVal.length - if (len == 0) { - if (base != 8) syntaxError("missing integer number") // e.g., 0x; - 0 - } else { + def convertIt: Long = { + def malformed: Long = { syntaxError("malformed integer number") ; 0 } + def tooBig: Long = { syntaxError("integer number too large") ; 0 } val divider = if (base == 10) 1 else 2 val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue @tailrec def convert(value: Long, i: Int): Long = - if (i >= len) value + if (i >= strVal.length) value else { - val d = digit2int(strVal charAt i, base) - if (d < 0) - malformed - else if (value < 0 || - limit / (base / divider) < value || - limit - (d / divider) < value * (base / divider) && - !(negated && limit == value * base - 1 + d)) - tooBig - else - convert(value * base + d, i + 1) + val c = strVal.charAt(i) + if (isNumberSeparator(c)) convert(value, i + 1) + else { + val d = digit2int(c, base) + if (d < 0) + malformed + else if (value < 0 || + limit / (base / divider) < value || + limit - (d / divider) < value * (base / divider) && + !(negated && limit == value * base - 1 + d)) + tooBig + else + convert(value * base + d, i + 1) + } } val result = convert(0, 0) - if (base == 8) malformed else if (negated) -result else result + if (negated) -result else result + } + if (strVal.isEmpty) { + syntaxError("missing integer number") // e.g., 0x; previous error shadows this one + 0L + } else { + if (settings.warnOctalLiteral.value && base == 10 && strVal.charAt(0) == '0' && strVal.length() > 1) + deprecationWarning("Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.)", since="2.10") + convertIt } } if (token == CHARLIT && !negated) charVal.toLong else intConvert } - def intVal: Long = intVal(negated = false) + @`inline` def intVal: Long = intVal(negated = false) private val zeroFloat = raw"[0.]+(?:[eE][+-]?[0-9]+)?[fFdD]?".r /** Convert current strVal, base to float value. */ def floatVal(negated: Boolean): Float = { + val text = removeNumberSeparators(strVal) try { - val value: Float = java.lang.Float.parseFloat(strVal) + val value: Float = java.lang.Float.parseFloat(text) if (value > Float.MaxValue) syntaxError("floating point number too large") - if (value == 0.0f && !zeroFloat.pattern.matcher(strVal).matches) + if (value == 0.0f && !zeroFloat.pattern.matcher(text).matches) syntaxError("floating point number too small") if (negated) -value else value } catch { @@ -1082,16 +1321,17 @@ trait Scanners extends ScannersCommon { } } - def floatVal: Float = floatVal(negated = false) + @`inline` def floatVal: Float = floatVal(negated = false) /** Convert current strVal, base to double value. */ def doubleVal(negated: Boolean): Double = { + val text = removeNumberSeparators(strVal) try { - val value: Double = java.lang.Double.parseDouble(strVal) + val value: Double = java.lang.Double.parseDouble(text) if (value > Double.MaxValue) syntaxError("double precision floating point number too large") - if (value == 0.0d && !zeroFloat.pattern.matcher(strVal).matches) + if (value == 0.0d && !zeroFloat.pattern.matcher(text).matches) syntaxError("double precision floating point number too small") if (negated) -value else value } catch { @@ -1101,63 +1341,62 @@ trait Scanners extends ScannersCommon { } } - def doubleVal: Double = doubleVal(negated = false) + @`inline` def doubleVal: Double = doubleVal(negated = false) - def checkNoLetter(): Unit = { - if (isIdentifierPart(ch) && ch >= ' ') - syntaxError("Invalid literal number") - } + @`inline` def checkNoLetter(): Unit = if (isIdentifierPart(ch) && ch >= ' ') syntaxError("invalid literal number") + + @`inline` private def isNumberSeparator(c: Char): Boolean = c == '_' + + @`inline` private def removeNumberSeparators(s: String): String = if (s.indexOf('_') > 0) s.replace("_", "") else s + + @`inline` private def numberOffset = offset + (if (base == 10) 0 else 2) + + // disallow trailing numeric separator char + def checkNoTrailingSeparator(): Unit = + if (!cbuf.isEmpty && isNumberSeparator(cbuf.last)) + syntaxError(numberOffset + cbuf.length - 1, "illegal separator") /** Read a number into strVal. * - * The `base` can be 8, 10 or 16, where base 8 flags a leading zero. - * For ints, base 8 is legal only for the case of exactly one zero. + * The `base` can be 2, 10 or 16. */ protected def getNumber(): Unit = { - // consume digits of a radix - def consumeDigits(radix: Int): Unit = - while (digit2int(ch, radix) >= 0) { + // consume digits of the current radix + def consumeDigits(): Unit = + while (isNumberSeparator(ch) || digit2int(ch, base) >= 0) { putChar(ch) nextChar() } - // adding decimal point is always OK because `Double valueOf "0."` is OK + // at dot with digit following def restOfNonIntegralNumber(): Unit = { putChar('.') - if (ch == '.') nextChar() + nextChar() getFraction() } - // after int: 5e7f, 42L, 42.toDouble but not 42b. Repair 0d. + // 1l is an acknowledged bad practice + def lintel(): Unit = { + val msg = "Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead" + val o = numberOffset + cbuf.length + if (ch == 'l') deprecationWarning(o, msg, since="2.13.0", + runReporting.codeAction("use uppercase L", unit.position(o).withEnd(o + 1), "L", msg, expected = Some(("l", unit)))) + } + // after int: 5e7f, 42L, 42.toDouble but not 42b. def restOfNumber(): Unit = { ch match { case 'e' | 'E' | 'f' | 'F' | - 'd' | 'D' => if (cbuf.isEmpty) putChar('0'); restOfNonIntegralNumber() - case 'l' | 'L' => token = LONGLIT ; setStrVal() ; nextChar() + 'd' | 'D' => getFraction() + case 'l' | 'L' => lintel() ; token = LONGLIT ; setStrVal() ; nextChar() case _ => token = INTLIT ; setStrVal() ; checkNoLetter() } } // consume leading digits, provisionally an Int - consumeDigits(if (base == 16) 16 else 10) + consumeDigits() - val detectedFloat: Boolean = base != 16 && ch == '.' && isDigit(lookaheadReader.getc) - if (detectedFloat) restOfNonIntegralNumber() else restOfNumber() - } + checkNoTrailingSeparator() - /** Parse character literal if current character is followed by \', - * or follow with given op and return a symbol literal token - */ - def charLitOr(op: () => Unit): Unit = { - putChar(ch) - nextChar() - if (ch == '\'') { - nextChar() - token = CHARLIT - setStrVal() - } else { - op() - token = SYMBOLLIT - strVal = name.toString - } + val detectedFloat: Boolean = base == 10 && ch == '.' && isDigit(lookaheadReader.getc()) + if (detectedFloat) restOfNonIntegralNumber() else restOfNumber() } // Errors ----------------------------------------------------------------- @@ -1171,7 +1410,8 @@ trait Scanners extends ScannersCommon { /** generate an error at the current token offset */ def syntaxError(msg: String): Unit = syntaxError(offset, msg) - def deprecationWarning(msg: String, since: String): Unit = deprecationWarning(offset, msg, since) + def deprecationWarning(msg: String, since: String): Unit = deprecationWarning(msg, since, Nil) + def deprecationWarning(msg: String, since: String, actions: List[CodeAction]): Unit = deprecationWarning(offset, msg, since, actions) /** signal an error where the input ended in the middle of a token */ def incompleteInputError(msg: String): Unit = { @@ -1286,7 +1526,7 @@ trait Scanners extends ScannersCommon { nme.HASHkw -> HASH, nme.ATkw -> AT, nme.MACROkw -> IDENTIFIER, - nme.THENkw -> IDENTIFIER) + ) private var kwOffset: Offset = -1 private val kwArray: Array[Token] = { @@ -1299,6 +1539,8 @@ trait Scanners extends ScannersCommon { final val softModifierNames = Set(nme.open, nme.infix) + final val scala3Keywords = Set(nme.`enum`, nme.`export`, nme.`given`, nme.`then`) + // Token representation ---------------------------------------------------- /** Returns the string representation of given token. */ @@ -1325,7 +1567,7 @@ trait Scanners extends ScannersCommon { case COMMA => "','" case CASECLASS => "case class" case CASEOBJECT => "case object" - case XMLSTART => "$XMLSTART$<" + case XMLSTART => s"$$XMLSTART$$<" case _ => (token2name get token) match { case Some(name) => "'" + name + "'" @@ -1339,25 +1581,30 @@ trait Scanners extends ScannersCommon { * Useful for looking inside source files that are not currently compiled to see what's there */ class SourceFileScanner(val source: SourceFile) extends Scanner { + def unit = global.currentUnit + val buf = source.content - override val decodeUni: Boolean = !settings.nouescape // suppress warnings, throw exception on errors def warning(off: Offset, msg: String, category: WarningCategory): Unit = () - def deprecationWarning(off: Offset, msg: String, since: String): Unit = () + def deprecationWarning(off: Offset, msg: String, since: String, actions: List[CodeAction]): Unit = () def error(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) } /** A scanner over a given compilation unit */ - class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { + class UnitScanner(override val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - override def warning(off: Offset, msg: String, category: WarningCategory): Unit = runReporting.warning(unit.position(off), msg, category, site = "") - override def deprecationWarning(off: Offset, msg: String, since: String) = runReporting.deprecationWarning(unit.position(off), msg, since, site = "", origin = "") - override def error(off: Offset, msg: String) = reporter.error(unit.position(off), msg) - override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg) + override def warning(off: Offset, msg: String, category: WarningCategory): Unit = + runReporting.warning(unit.position(off), msg, category, site = "") + override def deprecationWarning(off: Offset, msg: String, since: String, actions: List[CodeAction]) = + runReporting.deprecationWarning(unit.position(off), msg, since, site = "", origin = "", actions) + override def error(off: Offset, msg: String) = + reporter.error(unit.position(off), msg) + override def incompleteInputError(off: Offset, msg: String) = + currentRun.parsing.incompleteInputError(unit.position(off), msg) private var bracePatches: List[BracePatch] = patches @@ -1390,7 +1637,7 @@ trait Scanners extends ScannersCommon { // println("applying brace patch "+offset)//DEBUG if (patch.inserted) { next copyFrom this - error(offset, "Missing closing brace `}' assumed here") + error(offset, "Missing closing brace `}` assumed here") token = RBRACE true } else { @@ -1408,7 +1655,7 @@ trait Scanners extends ScannersCommon { /** The source code with braces and line starts annotated with [NN] showing the index */ private def markedSource = { val code = unit.source.content - val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet + val braces = code.indices.filter(idx => "{}\n" contains code(idx)).toSet val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx)) mapped.mkString("") } @@ -1427,7 +1674,7 @@ trait Scanners extends ScannersCommon { var lineCount = 1 var lastOffset = 0 var indent = 0 - val oldBalance = scala.collection.mutable.Map[Int, Int]() + val oldBalance = mutable.Map[Int, Int]() def markBalance() = for ((k, v) <- balance) oldBalance(k) = v markBalance() @@ -1514,6 +1761,7 @@ trait Scanners extends ScannersCommon { var tabSeen = false def line(offset: Offset): Int = { + @tailrec def findLine(lo: Int, hi: Int): Int = { val mid = (lo + hi) / 2 if (offset < lineStart(mid)) findLine(lo, mid - 1) @@ -1544,10 +1792,10 @@ trait Scanners extends ScannersCommon { def insertRBrace(): List[BracePatch] = { def insert(bps: List[BracePair]): List[BracePatch] = bps match { case List() => patches - case (bp @ BracePair(loff, lindent, roff, rindent, nested)) :: bps1 => + case BracePair(loff, lindent, roff, rindent, nested) :: bps1 => if (lindent <= rindent) insert(bps1) else { -// println("patch inside "+bp+"/"+line(loff)+"/"+lineStart(line(loff))+"/"+lindent"/"+rindent)//DEBUG +// println("patch inside "+bps.head+"/"+line(loff)+"/"+lineStart(line(loff))+"/"+lindent"/"+rindent)//DEBUG val patches1 = insert(nested) if (patches1 ne patches) patches1 else { diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index 0f4135afbd39..b9aeaa629b85 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,11 @@ package scala.tools.nsc package ast.parser +import scala.annotation.unused import scala.collection.mutable -import symtab.Flags.MUTABLE import scala.reflect.internal.util.ListOfNil import scala.reflect.internal.util.StringOps.splitWhere +import symtab.Flags.MUTABLE /** This class builds instance of `Tree` that represent XML. * @@ -27,41 +28,40 @@ import scala.reflect.internal.util.StringOps.splitWhere * who understands this part better wants to give it a shot, please do! * * @author Burak Emir - * @version 1.0 */ -abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { +abstract class SymbolicXMLBuilder(@unused p: Parsers#Parser, @unused preserveWS: Boolean) { val global: Global import global._ private[parser] var isPattern: Boolean = _ private object xmltypes extends TypeNames { - val _Comment: NameType = "Comment" - val _Elem: NameType = "Elem" - val _EntityRef: NameType = "EntityRef" - val _Group: NameType = "Group" - val _MetaData: NameType = "MetaData" - val _NamespaceBinding: NameType = "NamespaceBinding" - val _NodeBuffer: NameType = "NodeBuffer" - val _PCData: NameType = "PCData" - val _PrefixedAttribute: NameType = "PrefixedAttribute" - val _ProcInstr: NameType = "ProcInstr" - val _Text: NameType = "Text" - val _Unparsed: NameType = "Unparsed" - val _UnprefixedAttribute: NameType = "UnprefixedAttribute" + val _Comment: NameType = nameType("Comment") + val _Elem: NameType = nameType("Elem") + val _EntityRef: NameType = nameType("EntityRef") + val _Group: NameType = nameType("Group") + val _MetaData: NameType = nameType("MetaData") + val _NamespaceBinding: NameType = nameType("NamespaceBinding") + val _NodeBuffer: NameType = nameType("NodeBuffer") + val _PCData: NameType = nameType("PCData") + val _PrefixedAttribute: NameType = nameType("PrefixedAttribute") + val _ProcInstr: NameType = nameType("ProcInstr") + val _Text: NameType = nameType("Text") + val _Unparsed: NameType = nameType("Unparsed") + val _UnprefixedAttribute: NameType = nameType("UnprefixedAttribute") } private object xmlterms extends TermNames { - val _Null: NameType = "Null" - val __Elem: NameType = "Elem" - val _PCData: NameType = "PCData" - val __Text: NameType = "Text" - val _buf: NameType = "$buf" - val _md: NameType = "$md" - val _plus: NameType = "$amp$plus" - val _scope: NameType = "$scope" - val _tmpscope: NameType = "$tmpscope" - val _xml: NameType = "xml" + val _Null: NameType = nameType("Null") + val __Elem: NameType = nameType("Elem") + val _PCData: NameType = nameType("PCData") + val __Text: NameType = nameType("Text") + val _buf: NameType = nameType("$buf") + val _md: NameType = nameType("$md") + val _plus: NameType = nameType("$amp$plus") + val _scope: NameType = nameType("$scope") + val _tmpscope: NameType = nameType("$tmpscope") + val _xml: NameType = nameType("xml") } import xmltypes.{ @@ -107,7 +107,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { attrs: Tree, scope: Tree, empty: Boolean, - children: Seq[Tree]): Tree = + children: scala.collection.Seq[Tree]): Tree = { def starArgs = if (children.isEmpty) Nil @@ -144,8 +144,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { protected def Comment(txt: Tree) = New(_scala_xml_Comment, LL(txt)) protected def ProcInstr(target: Tree, txt: Tree) = New(_scala_xml_ProcInstr, LL(target, txt)) - /** @todo: attributes */ - def makeXMLpat(pos: Position, n: String, args: Seq[Tree]): Tree = { + /** @todo attributes */ + def makeXMLpat(pos: Position, n: String, args: scala.collection.Seq[Tree]): Tree = { val (prepat, labpat) = splitPrefix(n) match { case (Some(pre), rest) => (const(pre), const(rest)) case _ => (wild, const(n)) @@ -157,7 +157,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { case _: Literal => makeTextPat(t) case _ => t } - protected def convertToTextPat(buf: Seq[Tree]): List[Tree] = + protected def convertToTextPat(buf: scala.collection.Seq[Tree]): List[Tree] = (buf map convertToTextPat).toList def parseAttribute(pos: Position, s: String): Tree = { @@ -176,9 +176,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { } /** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */ - def makeXMLseq(pos: Position, args: Seq[Tree]) = { - val buffer = ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil)) - val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t))) + def makeXMLseq(pos: Position, args: scala.collection.Seq[Tree]) = { + val buffer = atPos(pos)(ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))) + val applies = args.filterNot(isEmptyText).map(t => atPos(t.pos)(Apply(Select(Ident(_buf), _plus), List(t)))) atPos(pos)( gen.mkBlock(buffer :: applies.toList ::: List(Ident(_buf))) ) } @@ -190,13 +190,13 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { } /** Various node constructions. */ - def group(pos: Position, args: Seq[Tree]): Tree = + def group(pos: Position, args: scala.collection.Seq[Tree]): Tree = atPos(pos)( New(_scala_xml_Group, LL(makeXMLseq(pos, args))) ) def unparsed(pos: Position, str: String): Tree = atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) ) - def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = { + def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: scala.collection.Seq[Tree]): Tree = { def handleNamespaceBinding(pre: String, z: String): Tree = { def mkAssign(t: Tree): Tree = Assign( Ident(_tmpscope), diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index 308abe7f3972..4183cc163715 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package ast.parser import javac._ -/** An nsc sub-component. +/** The compiler sub-component that defines the parser phase, which converts source code into Trees. */ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners { import global._ @@ -38,12 +38,12 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse md.mods.isSynthetic || md.mods.isParamAccessor || nme.isConstructorName(md.name) - || (md.name containsName nme.ANON_CLASS_NAME) + || md.name.containsName(nme.ANON_CLASS_NAME) ) override def traverse(t: Tree): Unit = t match { case md: MemberDef if prune(md) => - case md @ PackageDef(_, stats) => traverseTrees(stats) + case PackageDef(_, stats) => traverseTrees(stats) case md: ImplDef => onMember(md) ; lower(traverseTrees(md.impl.body)) case md: ValOrDefDef => onMember(md) ; lower(traverse(md.rhs)) case _ => super.traverse(t) @@ -74,7 +74,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse def onMember(md: MemberDef) = println(outputFn(md)) // It recognizes "sed" and "anything else". - def show(style: String) { + def show(style: String): Unit = { if (style == "sed") { outputFn = outputForSed traverse(unit.body) @@ -98,7 +98,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse override val checkable = false override val keepsTypeParams = false - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { informProgress("parsing " + unit) // if the body is already filled in, don't overwrite it // otherwise compileLate is going to overwrite bodies of synthetic source files @@ -106,7 +106,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse unit.body = initialUnitBody(unit) if (settings.Ymemberpos.isSetByUser) - new MemberPosReporter(unit) show (style = settings.Ymemberpos.value) + new MemberPosReporter(unit).show(style = settings.Ymemberpos.value) } } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala index 56dbf3db7494..09a61d5680fd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,7 +28,6 @@ object Tokens extends CommonTokens { /** modifiers */ final val IMPLICIT = 40 final val OVERRIDE = 41 - final val SEALED = 45 final val LAZY = 55 final val MACRO = 57 diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index c99b8b5490d5..8f0eb3c13e1d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -55,11 +55,6 @@ abstract class TreeBuilder { def makeSelfDef(name: TermName, tpt: Tree): ValDef = ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree) - /** Tree for `od op`, start is start0 if od.pos is borked. */ - def makePostfixSelect(start: Int, end: Int, od: Tree, op: Name): Tree = { - atPos(r2p(start, end, end + op.length)) { Select(od, op.encode) }.updateAttachment(PostfixAttachment) - } - /** Create tree representing a while loop */ def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = { val lname = freshTermName(nme.WHILE_PREFIX) @@ -91,16 +86,27 @@ abstract class TreeBuilder { /** Create tree for a pattern alternative */ def makeAlternative(ts: List[Tree]): Tree = { def alternatives(t: Tree): List[Tree] = t match { - case Alternative(ts) => ts - case _ => List(t) + case Alternative(alts) => alts + case _ => List(t) } - Alternative(ts flatMap alternatives) + Alternative(ts.flatMap(alternatives)) } /** Create tree for case definition rhs> */ def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef = CaseDef(gen.patvarTransformer.transform(pat), guard, rhs) + /** At parser, rejigger non-case catch expression. + * + * Match is eliminated by unwrapping. Other expression + * becomes a single CaseDef with empty pattern and + * expr tree as RHS. + */ + def makeMatchFromExpr(catchExpr: Tree): List[CaseDef] = catchExpr match { + case Match(EmptyTree, cases) => cases + case _ => CaseDef(EmptyTree, EmptyTree, catchExpr) :: Nil + } + /** Creates tree representing: * { case x: Throwable => * val catchFn = catchExpr @@ -123,6 +129,18 @@ abstract class TreeBuilder { makeCaseDef(pat, EmptyTree, body) } + /** Creates tree representing: + * { case x: Throwable => catchExpr(x) } + */ + def makeCatchFromFunc(catchFn: Tree): CaseDef = { + val binder = freshTermName() + val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable))) + val body = atPos(catchFn.pos.makeTransparent)(Block( + Apply(Select(catchFn, nme.apply), List(Ident(binder))), + )) + makeCaseDef(pat, EmptyTree, body) + } + /** Create a tree representing the function type (argtpes) => restpe */ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe) @@ -131,11 +149,11 @@ abstract class TreeBuilder { if (contextBounds.isEmpty) vparamss else { val mods = Modifiers(if (owner.isTypeName) PARAMACCESSOR | LOCAL | PRIVATE else PARAM) - def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT | SYNTHETIC, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree) - val evidenceParams = contextBounds map makeEvidenceParam + def makeEvidenceParam(tpt: Tree) = atPos(tpt.pos)(ValDef(mods | IMPLICIT | SYNTHETIC, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)) + val evidenceParams = contextBounds.map(makeEvidenceParam) - val vparamssLast = if(vparamss.nonEmpty) vparamss.last else Nil - if(vparamssLast.nonEmpty && vparamssLast.head.mods.hasFlag(IMPLICIT)) + val vparamssLast = if (vparamss.nonEmpty) vparamss.last else Nil + if (vparamssLast.nonEmpty && vparamssLast.head.mods.hasFlag(IMPLICIT)) vparamss.init ::: List(evidenceParams ::: vparamssLast) else vparamss ::: List(evidenceParams) diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala index 7c197f174224..158c1d4ec3d2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,8 @@ package scala.tools.nsc.ast.parser.xml +import scala.collection.BufferedIterator + /** This is not a public trait - it contains common code shared * between the library level XML parser and the compiler's. * All members should be accessed through those. @@ -20,7 +22,7 @@ private[scala] trait MarkupParserCommon { import Utility._ import scala.reflect.internal.Chars.SU - protected def unreachable = scala.sys.error("Cannot be reached.") + protected def unreachable = throw new IllegalStateException("Cannot be reached.") type PositionType // Int, Position type ElementType // NodeSeq, Tree @@ -69,7 +71,7 @@ private[scala] trait MarkupParserCommon { /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>' */ - def xEndTag(startName: String) { + def xEndTag(startName: String): Unit = { xToken('/') if (xName != startName) errorNoEnd(startName) @@ -149,11 +151,11 @@ private[scala] trait MarkupParserCommon { x } - def xToken(that: Char) { + def xToken(that: Char): Unit = { if (ch == that) nextch() else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch)) } - def xToken(that: Seq[Char]) { that foreach xToken } + def xToken(that: Iterable[Char]): Unit = {that foreach xToken } /** scan [S] '=' [S]*/ def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() } @@ -166,9 +168,6 @@ private[scala] trait MarkupParserCommon { if (isSpace(ch)) { nextch(); xSpaceOpt() } else xHandleError(ch, "whitespace expected") - /** Apply a function and return the passed value */ - def returning[T](x: T)(f: T => Unit): T = { f(x); x } - /** Execute body with a variable saved and restored after execution */ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = { val saved = getter diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala index 911ae51fee96..54dba43e83a1 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 9cbdf1dcadab..7656eec9e704 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -57,7 +57,7 @@ trait JavaPlatform extends Platform { */ def isMaybeBoxed(sym: Symbol) = { (sym == ObjectClass) || - (sym == JavaSerializableClass) || + (sym == SerializableClass) || (sym == ComparableClass) || (sym isNonBottomSubClass BoxedNumberClass) || (sym isNonBottomSubClass BoxedCharacterClass) || diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index a69e79d4c4f1..c701ba581b8f 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,7 +26,7 @@ trait Platform { private[nsc] def classPath: ClassPath /** Update classpath with a substitution that maps entries to entries */ - def updateClassPath(subst: Map[ClassPath, ClassPath]) + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit /** Any platform-specific phases. */ def platformPhases: List[SubComponent] @@ -41,7 +41,7 @@ trait Platform { * Tells whether a class with both a binary and a source representation * (found in classpath and in sourcepath) should be re-compiled. Behaves * on the JVM similar to javac, i.e. if the source file is newer than the classfile, - * a re-compile is triggered. On .NET by contrast classfiles always take precedence. + * a re-compile is triggered. */ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean } diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index ab739e1868bb..716a7e598dd6 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -199,7 +199,7 @@ abstract class ScalaPrimitives { private val primitives: mutable.Map[Symbol, Int] = new mutable.HashMap() /** Initialize the primitive map */ - def init() { + def init(): Unit = { primitives.clear() // scala.Any addPrimitive(Any_==, EQ) @@ -442,12 +442,12 @@ abstract class ScalaPrimitives { } /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int) { + def addPrimitive(s: Symbol, code: Int): Unit = { assert(!(primitives contains s), "Duplicate primitive " + s) primitives(s) = code } - def addPrimitives(cls: Symbol, method: Name, code: Int) { + def addPrimitives(cls: Symbol, method: Name, code: Int): Unit = { val alts = (cls.info member method).alternatives if (alts.isEmpty) inform(s"Unknown primitive method $cls.$method") @@ -554,7 +554,7 @@ abstract class ScalaPrimitives { val arrayParent = tpe :: tpe.parents collectFirst { case TypeRef(_, ArrayClass, elem :: Nil) => elem } - arrayParent getOrElse sys.error(fun.fullName + " : " + (tpe :: tpe.baseTypeSeq.toList).mkString(", ")) + arrayParent getOrElse abort(fun.fullName + " : " + (tpe :: tpe.baseTypeSeq.toList).mkString(", ")) } code match { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala index 403001f4515b..45bf38465b39 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc.backend.jvm import java.io.{PrintWriter, StringWriter} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.tools.asm.tree._ import scala.tools.asm.util._ import scala.tools.asm.{Attribute, ClassReader, ClassWriter} @@ -96,7 +96,7 @@ object AsmUtils { node } - def main(args: Array[String]): Unit = args.par.foreach { classFileName => + def main(args: Array[String]): Unit = args /*.par*/.foreach { classFileName => val node = zapScalaClassAttrs(sortClassMembers(classFromBytes(classBytes(classFileName)))) val pw = new PrintWriter(classFileName + ".asm") @@ -168,7 +168,7 @@ object AsmUtils { /** * Returns a human-readable representation of the given instruction sequence. */ - def textify(insns: InsnList): String = textify(insns.iterator().asScala) + def textify(insns: InsnList): String = textify(insns.iterator.asScala) /** * Run ASM's CheckClassAdapter over a class. Returns None if no problem is found, otherwise diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index df90a9096170..d6d858e85840 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,19 +13,19 @@ package scala.tools.nsc package backend.jvm -import scala.annotation.switch +import scala.annotation.{ switch, tailrec } import scala.collection.mutable.ListBuffer +import scala.reflect.internal.Flags import scala.tools.asm import scala.tools.asm.Opcodes -import scala.tools.asm.tree.{MethodInsnNode, MethodNode} -import scala.tools.nsc.backend.jvm.BCodeHelpers.{InvokeStyle, TestOp} +import scala.tools.asm.tree.{ InvokeDynamicInsnNode, MethodInsnNode, MethodNode } +import scala.tools.nsc.backend.jvm.BCodeHelpers.{ InvokeStyle, TestOp } import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.backend.jvm.GenBCode._ /* * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 + * @author Miguel Garcia, https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ * */ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { @@ -33,7 +33,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import bTypes._ import coreBTypes._ import definitions._ - import genBCode.postProcessor.backendUtils.addIndyLambdaImplMethod + import genBCode.postProcessor.backendUtils.{addIndyLambdaImplMethod, classfileVersion} import genBCode.postProcessor.callGraph.{inlineAnnotatedCallsites, noInlineAnnotatedCallsites} /* @@ -42,9 +42,9 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { /* ---------------- helper utils for generating methods and code ---------------- */ - def emit(opc: Int) { mnode.visitInsn(opc) } + def emit(opc: Int): Unit = { mnode.visitInsn(opc) } - def emitZeroOf(tk: BType) { + def emitZeroOf(tk: BType): Unit = { tk match { case BOOL => bc.boolconst(false) case BYTE | @@ -64,7 +64,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * Two main cases: `tree` is an assignment, * otherwise an `adapt()` to UNIT is performed if needed. */ - def genStat(tree: Tree) { + def genStat(tree: Tree): Unit = { lineNumber(tree) tree match { case Assign(lhs @ Select(qual, _), rhs) => @@ -79,30 +79,32 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case Assign(lhs, rhs) => val s = lhs.symbol val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) + + rhs match { + case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && x.isShortRange => + lineNumber(tree) + bc.iinc(idx, x.intValue) + + case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => + lineNumber(tree) + bc.iinc(idx, -x.intValue) + + case _ => + genLoad(rhs, tk) + lineNumber(tree) + bc.store(idx, tk) + } case _ => genLoad(tree, UNIT) } } - def genThrow(expr: Tree): BType = { - val thrownKind = tpeTK(expr) - // `throw null` is valid although scala.Null (as defined in src/library-aux) isn't a subtype of Throwable. - // Similarly for scala.Nothing (again, as defined in src/library-aux). - assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(jlThrowableRef).get) - genLoad(expr, thrownKind) - lineNumber(expr) - emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level. - - srNothingRef // always returns the same, the invoker should know :) - } - /* Generate code for primitive arithmetic operations. */ def genArithmeticOp(tree: Tree, code: Int): BType = { - val Apply(fun @ Select(larg, _), args) = tree + val Apply(fun @ Select(larg, _), args) = tree: @unchecked var resKind = tpeTK(larg) assert(resKind.isNumericType || (resKind == BOOL), @@ -157,7 +159,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { /* Generate primitive array operations. */ def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = { - val Apply(Select(arrayObj, _), args) = tree + val Apply(Select(arrayObj, _), args) = tree: @unchecked val k = tpeTK(arrayObj) genLoad(arrayObj, k) val elementType = typeOfArrayOp.getOrElse(code, abort(s"Unknown operation on arrays: $tree code: $code")) @@ -171,7 +173,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = k.asArrayBType.componentType bc.aload(elementType) } else if (scalaPrimitives.isArraySet(code)) { - val List(a1, a2) = args + val List(a1, a2) = args: @unchecked genLoad(a1, INT) genLoad(a2, elementType) generatedType = UNIT @@ -227,7 +229,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genPrimitiveOp(tree: Apply, expectedType: BType): BType = { val sym = tree.symbol - val Apply(fun @ Select(receiver, _), _) = tree + val Apply(fun @ Select(receiver, _), _) = tree: @unchecked val code = scalaPrimitives.getPrimitive(sym, receiver.tpe) import scalaPrimitives.{isArithmeticOp, isArrayOp, isComparisonOp, isLogicalOp} @@ -264,7 +266,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { ) } - def genLoad(tree: Tree) { + def genLoad(tree: Tree): Unit = { genLoad(tree, tpeTK(tree)) } @@ -285,7 +287,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { generatedDest = dest case ValDef(_, nme.THIS, _, _) => - debuglog("skipping trivial assign to _$this: " + tree) + debuglog(s"skipping trivial assign to ${nme.THIS}: $tree") case ValDef(_, _, _, rhs) => val sym = tree.symbol @@ -333,24 +335,25 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { bc goTo label generatedDest = LoadDestination.Jump(label) case JumpDestination.LoadArgTo(paramType, jumpDest) => - val List(arg) = args + assert(args.sizeIs == 1, s"unexpected argument count for LoadArgTo label $sym") + val arg = args.head genLoadTo(arg, paramType, jumpDest) generatedDest = jumpDest } - case app : Apply => + case app: Apply => generatedType = genApply(app, expectedType) - case app @ ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: staticAndDynamicArgs) => + case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: staticAndDynamicArgs) => val numDynamicArgs = qual.symbol.info.params.length val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(staticAndDynamicArgs.length - numDynamicArgs) val bootstrapDescriptor = staticHandleFromSymbol(bootstrapMethodRef) - val bootstrapArgs = staticArgs.map({case t @ Literal(c: Constant) => bootstrapMethodArg(c, t.pos)}) - val descriptor = methodBTypeFromMethodType(qual.symbol.info, false) + val bootstrapArgs = staticArgs.map({case t @ Literal(c: Constant) => bootstrapMethodArg(c, t.pos) case x => throw new MatchError(x)}) + val descriptor = methodBTypeFromMethodType(qual.symbol.info, isConstructor=false) genLoadArguments(dynamicArgs, qual.symbol.info.params.map(param => typeToBType(param.info))) mnode.visitInvokeDynamicInsn(qual.symbol.name.encoded, descriptor.descriptor, bootstrapDescriptor, bootstrapArgs : _*) - case ApplyDynamic(qual, args) => sys.error("No invokedynamic support yet.") + case ApplyDynamic(qual, args) => abort("No invokedynamic support yet.") case This(qual) => val symIsModuleClass = tree.symbol.isModuleClass @@ -370,7 +373,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } - case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) => + case Select(Ident(nme.EMPTY_PACKAGE_NAME), _) => assert(tree.symbol.isModule, s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.pos}") genLoadModule(tree) @@ -378,7 +381,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val sym = tree.symbol generatedType = symInfoTK(sym) val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier - def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } } + def genLoadQualUnlessElidable(): Unit = { if (!qualSafeToElide) { genLoadQualifier(tree, drop = true) } } // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError, scala/bug#4283 def receiverClass = qualifier.tpe.typeSymbol if (sym.isModule) { @@ -458,7 +461,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val thrownKind = expectedType // `throw null` is valid although scala.Null (as defined in src/library-aux) isn't a subtype of Throwable. // Similarly for scala.Nothing (again, as defined in src/library-aux). - assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(jlThrowableRef).get) + assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(jlThrowableRef).get, "Require throwable") emit(asm.Opcodes.ATHROW) } } @@ -496,36 +499,37 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * must-single-thread * Otherwise it's safe to call from multiple threads. */ - def genConstant(const: Constant) { + def genConstant(const: Constant): Unit = { + (const.tag: @switch) match { - case BooleanTag => bc.boolconst(const.booleanValue) + case BooleanTag => bc.boolconst(const.booleanValue) - case ByteTag => bc.iconst(const.byteValue) - case ShortTag => bc.iconst(const.shortValue) - case CharTag => bc.iconst(const.charValue) - case IntTag => bc.iconst(const.intValue) + case ByteTag => bc.iconst(const.byteValue) + case ShortTag => bc.iconst(const.shortValue) + case CharTag => bc.iconst(const.charValue) + case IntTag => bc.iconst(const.intValue) - case LongTag => bc.lconst(const.longValue) - case FloatTag => bc.fconst(const.floatValue) - case DoubleTag => bc.dconst(const.doubleValue) + case LongTag => bc.lconst(const.longValue) + case FloatTag => bc.fconst(const.floatValue) + case DoubleTag => bc.dconst(const.doubleValue) - case UnitTag => () + case UnitTag => () - case StringTag => + case StringTag => assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag - case NullTag => emit(asm.Opcodes.ACONST_NULL) + case NullTag => emit(asm.Opcodes.ACONST_NULL) - case ClazzTag => + case ClazzTag => val tp = typeToBType(const.typeValue) // classOf[Int] is transformed to Integer.TYPE by CleanUp assert(!tp.isPrimitive, s"expected class type in classOf[T], found primitive type $tp") mnode.visitLdcInsn(tp.toASMType) - case EnumTag => - val sym = const.symbolValue + case EnumTag => + val sym = const.symbolValue val ownerName = internalName(sym.owner) val fieldName = sym.javaSimpleName.toString val fieldDesc = typeToBType(sym.tpe.underlying).descriptor @@ -545,20 +549,22 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // no need to call index() over lblDf.params, on first access that magic happens (moreover, no LocalVariableTable entries needed for them). // If we get inside genLabelDefTo, no one has or will register a non-regular jump destination for this LabelDef - val JumpDestination.Regular(label) = getJumpDestOrCreate(lblDf.symbol) - markProgramPoint(label) - lineNumber(lblDf) - genLoadTo(lblDf.rhs, expectedType, dest) + (getJumpDestOrCreate(lblDf.symbol): @unchecked) match { + case JumpDestination.Regular(label) => + markProgramPoint(label) + lineNumber(lblDf) + genLoadTo(lblDf.rhs, expectedType, dest) + } } - private def genReturn(r: Return) { + private def genReturn(r: Return): Unit = { val Return(expr) = r cleanups match { case Nil => // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`. genLoadTo(expr, returnType, LoadDestination.Return) - case nextCleanup :: rest => + case nextCleanup :: _ => genLoad(expr, returnType) lineNumber(r) val saveReturnValue = (returnType != UNIT) @@ -590,7 +596,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case _ => abort(s"Unexpected type application $fun[sym: ${sym.fullName}] in: $app") } - val Select(obj, _) = fun + val Select(obj, _) = fun: @unchecked val l = tpeTK(obj) val r = tpeTK(targs.head) @@ -629,28 +635,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = genTypeApply() case Apply(fun @ Select(sup @ Super(superQual, _), _), args) => - def initModule() { - // we initialize the MODULE$ field immediately after the super ctor - if (!isModuleInitialized && - jMethodName == INSTANCE_CONSTRUCTOR_NAME && - fun.symbol.javaSimpleName.toString == INSTANCE_CONSTRUCTOR_NAME && - isStaticModuleClass(claszSymbol)) { - isModuleInitialized = true - mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - mnode.visitFieldInsn( - asm.Opcodes.PUTSTATIC, - thisBType.internalName, - strMODULE_INSTANCE_FIELD, - thisBTypeDescriptor - ) - } - } - - // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not jsut ALOAD_0) + // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) genLoad(superQual) genLoadArguments(args, paramTKs(app)) generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.pos, sup.tpe.typeSymbol) - initModule() // 'new' constructor call: Note: since constructors are // thought to return an instance of what they construct, @@ -677,7 +665,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) * however the above does not enter a TypeName for each nested arrays in chrs. */ - for (i <- args.length until dims) elemKind = ArrayBType(elemKind) + for (_ <- args.length until dims) elemKind = ArrayBType(elemKind) } argsSize match { case 1 => bc newarray elemKind @@ -734,7 +722,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { if (invokeStyle.hasInstance) genLoadQualifier(fun) genLoadArguments(args, paramTKs(app)) - val Select(qual, _) = fun // fun is a Select, also checked in genLoadQualifier + val Select(qual, _) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier if (sym == definitions.Array_clone) { // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac. @@ -746,7 +734,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // // Note that using `Object.clone()` would work as well, but only because the JVM // relaxes protected access specifically if the receiver is an array: - // http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439 + // https://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439 // Example: `class C { override def clone(): Object = "hi" }` // Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError. val target: String = tpeTK(qual).asRefBType.classOrArrayType @@ -773,10 +761,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { // Check if the Apply tree has an InlineAnnotatedAttachment, added by the typer // for callsites marked `f(): @inline/noinline`. For nullary calls, the attachment // is on the Select node (not on the Apply node added by UnCurry). + @tailrec def recordInlineAnnotated(t: Tree): Unit = { if (t.hasAttachment[InlineAnnotatedAttachment]) lastInsn match { case m: MethodInsnNode => - if (app.hasAttachment[NoInlineCallsiteAttachment.type]) noInlineAnnotatedCallsites += m + if (t.hasAttachment[NoInlineCallsiteAttachment.type]) noInlineAnnotatedCallsites += m else inlineAnnotatedCallsites += m case _ => } else t match { @@ -793,7 +782,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } // end of genApply() private def genArrayValue(av: ArrayValue): BType = { - val ArrayValue(tpt @ TypeTree(), elems) = av + val ArrayValue(tpt @ TypeTree(), elems) = (av: @unchecked) val elmKind = tpeTK(tpt) val generatedType = ArrayBType(elmKind) @@ -845,7 +834,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { var switchBlocks: List[Tuple2[asm.Label, Tree]] = Nil // collect switch blocks and their keys, but don't emit yet any switch-block. - for (caze @ CaseDef(pat, guard, body) <- tree.cases) { + for (CaseDef(pat, guard, body) <- tree.cases) { assert(guard == EmptyTree, guard) val switchBlockPoint = new asm.Label switchBlocks ::= ((switchBlockPoint, body)) @@ -990,13 +979,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } - def adapt(from: BType, to: BType) { - if (!from.conformsTo(to).get) { - to match { - case UNIT => bc drop from - case _ => bc.emitT2T(from, to) - } - } else if (from.isNothingType) { + def adapt(from: BType, to: BType): Unit = { + if (from.isNothingType) { /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and * loading a (phantom) value of type Nothing. * @@ -1009,7 +993,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * Note: The two verifiers (old: type inference, new: type checking) have different * requirements. Very briefly: * - * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at + * Old (https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at * each program point, no matter what branches were taken to get there * - Stack is same size and has same typed values * - Local and stack values need to have consistent types @@ -1017,7 +1001,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * instructions after an ATHROW. For example, there can be another ATHROW (without * loading another throwable first). * - * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) + * New (https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) * - Requires consistent stack map frames. GenBCode always generates stack frames. * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: @@ -1056,26 +1040,30 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { */ if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) { bc drop from - emit(asm.Opcodes.ACONST_NULL) + if (to != UNIT) + emit(asm.Opcodes.ACONST_NULL) + } else if (to == UNIT) { + bc drop from + } + } else if (!from.conformsTo(to).get) { + to match { + case UNIT => bc drop from + case _ => bc.emitT2T(from, to) } - } - else (from, to) match { - case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) - case _ => () } } /* Emit code to Load the qualifier of `tree` on top of the stack. */ - def genLoadQualifier(tree: Tree) { + def genLoadQualifier(tree: Tree, drop: Boolean = false): Unit = { lineNumber(tree) tree match { - case Select(qualifier, _) => genLoad(qualifier) + case Select(qualifier, _) => genLoad(qualifier, if (drop) UNIT else tpeTK(qualifier)) case _ => abort(s"Unknown qualifier $tree") } } /* Generate code that loads args into label parameters. */ - def genLoadLabelArguments(args: List[Tree], lblDef: LabelDef, gotoPos: Position) { + def genLoadLabelArguments(args: List[Tree], lblDef: LabelDef, gotoPos: Position): Unit = { val aps = { val params: List[Symbol] = lblDef.params.map(_.symbol) @@ -1103,16 +1091,14 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } - def genLoadArguments(args: List[Tree], btpes: List[BType]) { - foreach2(args, btpes) { case (arg, btpe) => genLoad(arg, btpe) } - } + def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = foreach2(args, btpes)(genLoad) def genLoadModule(tree: Tree): BType = { val module = ( if (!tree.symbol.isPackageClass) tree.symbol else tree.symbol.info.packageObject match { case NoSymbol => abort(s"scala/bug#5604: Cannot use package as value: $tree") - case s => abort(s"scala/bug#5604: found package class where package object expected: $tree") + case _ => abort(s"scala/bug#5604: found package class where package object expected: $tree") } ) lineNumber(tree) @@ -1120,22 +1106,30 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { symInfoTK(module) } - def genLoadModule(module: Symbol) { + def genLoadModule(module: Symbol): Unit = { def inStaticMethod = methSymbol != null && methSymbol.isStaticMember if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) } else { val mbt = symInfoTK(module).asClassBType - mnode.visitFieldInsn( - asm.Opcodes.GETSTATIC, - mbt.internalName /* + "$" */ , - strMODULE_INSTANCE_FIELD, - mbt.descriptor // for nostalgics: typeToBType(module.tpe).descriptor - ) + def visitAccess(container: ClassBType, name: String): Unit = { + mnode.visitFieldInsn( + asm.Opcodes.GETSTATIC, + container.internalName, + name, + mbt.descriptor + ) + } + if (module.isScala3Defined && module.hasAttachment[DottyEnumSingleton.type]) { // TODO [tasty]: dotty enum singletons are not modules. + val enumCompanion = symInfoTK(module.originalOwner).asClassBType + visitAccess(enumCompanion, module.rawname.toString) + } else { + visitAccess(mbt, strMODULE_INSTANCE_FIELD) + } } } - def genConversion(from: BType, to: BType, cast: Boolean) { + def genConversion(from: BType, to: BType, cast: Boolean): Unit = { if (cast) { bc.emitT2T(from, to) } else { bc drop from @@ -1143,7 +1137,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } - def genCast(to: RefBType, cast: Boolean) { + def genCast(to: RefBType, cast: Boolean): Unit = { if (cast) { bc checkCast to } else { bc isInstance to } } @@ -1152,7 +1146,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def isPrimitive(fun: Symbol): Boolean = scalaPrimitives.isPrimitive(fun) /* Generate coercion denoted by "code" */ - def genCoercion(code: Int) { + def genCoercion(code: Int): Unit = { import scalaPrimitives._ (code: @switch) match { case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () @@ -1163,44 +1157,110 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } + /* Generate string concatenation + * + * On JDK 8: create and append using `StringBuilder` + * On JDK 9+: use `invokedynamic` with `StringConcatFactory` + */ def genStringConcat(tree: Tree): BType = { lineNumber(tree) liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x. We can avoid the StringBuilder. + // Optimization for expressions of the form "" + x case List(Literal(Constant("")), arg) => genLoad(arg, ObjectRef) genCallMethod(String_valueOf, InvokeStyle.Static, arg.pos) case concatenations => - val approxBuilderSize = concatenations.map { - case Literal(Constant(s: String)) => s.length - case Literal(c @ Constant(value)) if c.isNonUnitAnyVal => String.valueOf(c).length - case _ => - // could add some guess based on types of primitive args. - // or, we could stringify all the args onto the stack, compute the exact size of - // the stringbuffer. - // or, just let http://openjdk.java.net/jeps/280 (or a re-implementation thereof in our 2.13.x stdlib) do all the hard work at link time - 0 - }.sum - bc.genStartConcat(tree.pos, approxBuilderSize) - def isEmptyString(t: Tree) = t match { - case Literal(Constant("")) => true - case _ => false - } - for (elem <- concatenations if !isEmptyString(elem)) { - val loadedElem = elem match { + + val concatArguments = concatenations.view + .filter { + case Literal(Constant("")) => false // empty strings are no-ops in concatenation + case _ => true + } + .map { case Apply(boxOp, value :: Nil) if currentRun.runDefinitions.isBox(boxOp.symbol) => // Eliminate boxing of primitive values. Boxing is introduced by erasure because // there's only a single synthetic `+` method "added" to the string class. value + case other => other + } + .toList + + // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower + if (classfileVersion.get < asm.Opcodes.V9) { + + // Estimate capacity needed for the string builder + val approxBuilderSize = concatArguments.view.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => 0 + }.sum + bc.genNewStringBuilder(tree.pos, approxBuilderSize) + + for (elem <- concatArguments) { + val elemType = tpeTK(elem) + genLoad(elem, elemType) + bc.genStringBuilderAppend(elemType, elem.pos) + } + bc.genStringBuilderEnd(tree.pos) + } else { - case _ => elem + /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If + * the string concatenation is longer (unlikely), we spill into multiple calls + */ + val MaxIndySlots = 200 + val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument + val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant + + val recipe = new StringBuilder() + val argTypes = Seq.newBuilder[asm.Type] + val constVals = Seq.newBuilder[String] + var totalArgSlots = 0 + var countConcats = 1 // ie. 1 + how many times we spilled + + for (elem <- concatArguments) { + val tpe = tpeTK(elem) + val elemSlots = tpe.size + + // Unlikely spill case + if (totalArgSlots + elemSlots >= MaxIndySlots) { + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + countConcats += 1 + totalArgSlots = 0 + recipe.setLength(0) + argTypes.clear() + constVals.clear() + } + + elem match { + case Literal(Constant(s: String)) => + if (s.contains(TagArg) || s.contains(TagConst)) { + totalArgSlots += elemSlots + recipe.append(TagConst) + constVals += s + } else { + recipe.append(s) + } + + case _ => + totalArgSlots += elemSlots + recipe.append(TagArg) + val tpe = tpeTK(elem) + argTypes += tpe.toASMType + genLoad(elem, tpe) + } + } + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + + // If we spilled, generate one final concat + if (countConcats > 1) { + bc.genIndyStringConcat( + TagArg.toString * countConcats, + Seq.fill(countConcats)(StringRef.toASMType), + Seq.empty + ) } - val elemType = tpeTK(loadedElem) - genLoad(loadedElem, elemType) - bc.genConcat(elemType, loadedElem.pos) } - bc.genEndConcat(tree.pos) } StringRef } @@ -1255,7 +1315,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import InvokeStyle._ if (style == Super) { if (receiverClass.isTrait && !method.isJavaDefined) { - val staticDesc = MethodBType(typeToBType(method.owner.info) :: bmType.argumentTypes, bmType.returnType).descriptor + val args = new Array[BType](bmType.argumentTypes.length + 1) + args(0) = typeToBType(method.owner.info) + bmType.argumentTypes.copyToArray(args, 1) + val staticDesc = MethodBType(args, bmType.returnType).descriptor val staticName = traitSuperAccessorName(method) bc.invokestatic(receiverName, staticName, staticDesc, isInterface, pos) } else { @@ -1267,9 +1330,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } else { val opc = style match { - case Static => Opcodes.INVOKESTATIC - case Special => Opcodes.INVOKESPECIAL - case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL + case Static => Opcodes.INVOKESTATIC + case Special => Opcodes.INVOKESPECIAL + case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL + case x @ Super => throw new MatchError(x) // ?!? + case x => throw new MatchError(x) } bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface, pos) } @@ -1291,7 +1356,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val result = ListBuffer[Tree]() def loop(tree: Tree): Unit = { tree match { - case Apply(fun@Select(larg, method), rarg :: Nil) + case Apply(fun@Select(larg, _), rarg :: Nil) if (isPrimitive(fun.symbol) && scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT) => loop(larg) @@ -1305,7 +1370,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } /* Emit code to compare the two top-most stack values using the 'op' operator. */ - private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) { + @tailrec + private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { if (targetIfNoJump == success) genCJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT @@ -1326,7 +1392,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ - private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) { + @tailrec + private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT @@ -1335,6 +1402,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { op match { // references are only compared with EQ and NE case TestOp.EQ => bc emitIFNULL success case TestOp.NE => bc emitIFNONNULL success + case x => throw new MatchError(x) } } else { def useCmpG = if (negated) op == TestOp.GT || op == TestOp.GE else op == TestOp.LT || op == TestOp.LE @@ -1377,9 +1445,9 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * Generate code for conditional expressions. * The jump targets success/failure of the test are `then-target` and `else-target` resp. */ - private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label) { + private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { - def genComparisonOp(l: Tree, r: Tree, code: Int) { + def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = { val op = testOpForPrimitive(code) val nonNullSide = if (scalaPrimitives.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null if (nonNullSide != null) { @@ -1406,10 +1474,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import scalaPrimitives._ // lhs and rhs of test - lazy val Select(lhs, _) = fun + lazy val Select(lhs, _) = fun: @unchecked val rhs = if (args.isEmpty) EmptyTree else args.head // args.isEmpty only for ZNOT - def genZandOrZor(and: Boolean) { + def genZandOrZor(and: Boolean): Unit = { // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited). val keepGoing = new asm.Label @@ -1467,7 +1535,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * @param l left-hand-side of the '==' * @param r right-hand-side of the '==' */ - def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label, pos: Position) { + def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label, pos: Position): Unit = { /* True if the equality comparison is between values that require the use of the rich equality * comparator (scala.runtime.BoxesRunTime.equals). This is the case when either side of the @@ -1545,41 +1613,56 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genLoadTry(tree: Try): BType def genInvokeDynamicLambda(canLMF: delambdafy.LambdaMetaFactoryCapable) = { - import canLMF.{ lambdaTarget => originalTarget, _ } + import canLMF._ - val lambdaTarget = originalTarget.attachments.get[JustMethodReference].map(_.lambdaTarget).getOrElse(originalTarget) + val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC) def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType - val isInterface = lambdaTarget.owner.isTrait || lambdaTarget.owner.hasJavaAnnotationFlag - val tag = - if (lambdaTarget.isStaticMember) Opcodes.H_INVOKESTATIC - else if (lambdaTarget.isPrivate) Opcodes.H_INVOKESPECIAL - //else if (lambdaTarget.isClassConstructor) Opcodes.H_NEWINVOKESPECIAL // to invoke Foo::new directly - else if (isInterface) Opcodes.H_INVOKEINTERFACE - else Opcodes.H_INVOKEVIRTUAL + val isInterface = lambdaTarget.owner.isTrait val implMethodHandle = - new asm.Handle( - tag, + new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE else asm.Opcodes.H_INVOKEVIRTUAL, classBTypeFromSymbol(lambdaTarget.owner).internalName, lambdaTarget.name.toString, methodBTypeFromSymbol(lambdaTarget).descriptor, /* itf = */ isInterface) - val (capturedParams, lambdaParams) = originalTarget.paramss.head.splitAt(originalTarget.paramss.head.length - arity) - val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), capturedParams.map(sym => typeToBType(sym.info).toASMType): _*) - val constrainedType = MethodBType(lambdaParams.map(p => typeToBType(p.tpe)), typeToBType(lambdaTarget.tpe.resultType)).toASMType + val lambdaTargetParamss = lambdaTarget.paramss + val numCaptured = lambdaTargetParamss.head.length - arity + val invokedType = { + val numArgs = if (isStaticMethod) numCaptured else 1 + numCaptured + val argsArray: Array[asm.Type] = new Array[asm.Type](numArgs) + var i = 0 + if (! isStaticMethod) { + argsArray(0) = typeToBType(lambdaTarget.owner.info).toASMType + i = 1 + } + var xs = lambdaTargetParamss.head + while (i < numArgs && (!xs.isEmpty)) { + argsArray(i) = typeToBType(xs.head.info).toASMType + i += 1 + xs = xs.tail + } + asm.Type.getMethodDescriptor(asmType(functionalInterface), argsArray:_*) + } + val lambdaParams = lambdaTargetParamss.head.drop(numCaptured) + val lambdaParamsBTypes = BType.newArray(lambdaParams.size) + mapToArray(lambdaParams, lambdaParamsBTypes, 0)(symTpeToBType) + val constrainedType = MethodBType(lambdaParamsBTypes, typeToBType(lambdaTarget.tpe.resultType)).toASMType val samMethodType = methodBTypeFromSymbol(sam).toASMType - val markers = if (addScalaSerializableMarker) classBTypeFromSymbol(definitions.SerializableClass).toASMType :: Nil else Nil val overriddenMethods = bridges.map(b => methodBTypeFromSymbol(b).toASMType) - visitInvokeDynamicInsnLMF(bc.jmethod, sam.name.toString, invokedType, samMethodType, implMethodHandle, constrainedType, overriddenMethods, isSerializable, markers) - if (isSerializable) - addIndyLambdaImplMethod(cnode.name, implMethodHandle) + visitInvokeDynamicInsnLMF(bc.jmethod, sam.name.toString, invokedType, samMethodType, implMethodHandle, constrainedType, overriddenMethods, isSerializable) + if (isSerializable) { + val indy = bc.jmethod.instructions.getLast.asInstanceOf[InvokeDynamicInsnNode] + addIndyLambdaImplMethod(cnode.name, bc.jmethod, indy, implMethodHandle) + } } } + private val symTpeToBType = (p: Symbol) => typeToBType(p.tpe) // OPT hoisted to save allocation + private def visitInvokeDynamicInsnLMF(jmethod: MethodNode, samName: String, invokedType: String, samMethodType: asm.Type, implMethodHandle: asm.Handle, instantiatedMethodType: asm.Type, overriddenMethodTypes: Seq[asm.Type], - serializable: Boolean, markerInterfaces: Seq[asm.Type]): Unit = { - import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_MARKERS, FLAG_SERIALIZABLE} + serializable: Boolean): Unit = { + import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE} // scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object` // version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method. val needsGenericBridge = samMethodType != instantiatedMethodType @@ -1592,12 +1675,28 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { else overriddenMethodTypes ).distinct.filterNot(_ == samMethodType) - /* We're saving on precious BSM arg slots by not passing 0 as the bridge count */ - val bridgeArgs = if (bridges.nonEmpty) Int.box(bridges.length) +: bridges else Nil - def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0 - val flags = FLAG_MARKERS | flagIf(serializable, FLAG_SERIALIZABLE) | flagIf(bridges.nonEmpty, FLAG_BRIDGES) - val bsmArgs = Seq(samMethodType, implMethodHandle, instantiatedMethodType, Int.box(flags), Int.box(markerInterfaces.length)) ++ markerInterfaces ++ bridgeArgs + val flags = flagIf(serializable, FLAG_SERIALIZABLE) | flagIf(bridges.nonEmpty, FLAG_BRIDGES) + val bsmArgs: Array[AnyRef] = { + val len = if (bridges.isEmpty) 0 else 1 + bridges.length + val bsmArgsArray = new Array[AnyRef](4+len) + bsmArgsArray(0) = samMethodType + bsmArgsArray(1) = implMethodHandle + bsmArgsArray(2) = instantiatedMethodType + bsmArgsArray(3) = Int.box(flags) + if (! bridges.isEmpty) { + /* We're saving on precious BSM arg slots by not passing 0 as the bridge count */ + bsmArgsArray(4) = Int.box(bridges.length) + var i = 0 + var bs = bridges + while (i < len-1 && !bs.isEmpty){ + bsmArgsArray(i+5) = bs.head + i += 1 + bs = bs.tail + } + } + bsmArgsArray + } jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryAltMetafactoryHandle, bsmArgs: _*) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index c39ba4684909..eecbe0dc5105 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,26 +14,26 @@ package scala package tools.nsc package backend.jvm -import scala.tools.asm -import GenBCode._ -import BackendReporting._ -import scala.tools.asm.ClassWriter -import scala.tools.nsc.reporters.NoReporter -import PartialFunction.cond +import scala.PartialFunction.cond +import scala.annotation.{tailrec, unused} +import scala.tools.asm, asm.{ClassWriter, Label} import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.backend.jvm.BCodeHelpers.ScalaSigBytes +import scala.tools.nsc.backend.jvm.BackendReporting._ +import scala.tools.nsc.reporters.NoReporter +import scala.util.chaining._ /* * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 + * @author Miguel Garcia, https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ * */ abstract class BCodeHelpers extends BCodeIdiomatic { import global._ - import definitions._ import bTypes._ import coreBTypes._ + import definitions._ import genBCode.postProcessor.backendUtils /** @@ -64,9 +64,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic { def needsStaticImplMethod(sym: Symbol) = sym.hasAttachment[global.mixer.NeedStaticImpl.type] final def traitSuperAccessorName(sym: Symbol): String = { - val name = sym.javaSimpleName - if (sym.isMixinConstructor) name.toString - else name + nme.NAME_JOIN_STRING + val nameString = sym.javaSimpleName.toString + if (sym.isMixinConstructor) nameString + else nameString + nme.NAME_JOIN_STRING } /** @@ -131,7 +131,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic { } } - def nextEnclosingClass(sym: Symbol): Symbol = + @tailrec + final def nextEnclosingClass(sym: Symbol): Symbol = if (sym.isClass) sym else nextEnclosingClass(nextEnclosing(sym)) @@ -173,6 +174,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { exitingPickler(enclCls.isDerivedValueClass) && method.owner != enclCls } + @tailrec def enclosingMethod(sym: Symbol): Option[Symbol] = { if (sym.isClass || sym == NoSymbol) None else if (sym.isMethod && !sym.isGetter) { @@ -226,7 +228,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * object T { def f { object U } } * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here. */ - def isOriginallyStaticOwner(sym: Symbol): Boolean = + @tailrec + final def isOriginallyStaticOwner(sym: Symbol): Boolean = sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner) /** @@ -251,64 +254,81 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread + * + * TODO: make this next claim true, if possible + * by generating valid main methods as static in module classes + * not sure what the jvm allows here + * + " You can still run the program by calling it as " + sym.javaSimpleName + " instead." */ object isJavaEntryPoint { /* * must-single-thread */ - def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = { - def fail(msg: String, pos: Position = sym.pos) = { - runReporting.warning(pos, - s"""${sym.name} has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program. - | Reason: $msg""".stripMargin, - // TODO: make this next claim true, if possible - // by generating valid main methods as static in module classes - // not sure what the jvm allows here - // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead." - WarningCategory.Other, - sym) - false - } - def failNoForwarder(msg: String) = { - fail(s"$msg, which means no static forwarder can be generated.\n") - } - val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil + def apply(sym: Symbol, @unused csymCompUnit: CompilationUnit, mainClass: Option[String]): Boolean = sym.hasModuleFlag && { + val warn = mainClass.fold(true)(_ == sym.fullNameString) + def warnBadMain(msg: String, pos: Position): Unit = if (warn) runReporting.warning(pos, + s"""|not a valid main method for ${sym.fullName('.')}, + | because $msg. + | To define an entry point, please define the main method as: + | def main(args: Array[String]): Unit + |""".stripMargin, + WarningCategory.Other, + sym) + def warnNoForwarder(msg: String, hasExact: Boolean, mainly: Type) = if (warn) runReporting.warning(sym.pos, + s"""|${sym.name.decoded} has a ${if (hasExact) "valid " else ""}main method${if (mainly != NoType) " "+mainly else ""}, + | but ${sym.fullName('.')} will not have an entry point on the JVM. + | Reason: $msg, which means no static forwarder can be generated. + |""".stripMargin, + WarningCategory.Other, + sym) + val possibles = sym.tpe.nonPrivateMember(nme.main).alternatives val hasApproximate = possibles.exists(m => cond(m.info) { case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass }) - // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. - hasApproximate && { - // Before erasure so we can identify generic mains. - enteringErasure { - val companion = sym.linkedClassOfClass - if (definitions.hasJavaMainMethod(companion)) - failNoForwarder("companion contains its own main method") + // Before erasure so we can identify generic mains. + def check(): Boolean = enteringErasure { + val companion = sym.linkedClassOfClass + val exactly = possibles.find(definitions.isJavaMainMethod) + val hasExact = exactly.isDefined + def alternate = if (possibles.size == 1) possibles.head.info else NoType + + val companionAdvice = + if (companion.isTrait) + Some("companion is a trait") + else if (definitions.hasJavaMainMethod(companion)) + Some("companion contains its own main method") else if (companion.tpe.member(nme.main) != NoSymbol) // this is only because forwarders aren't smart enough yet - failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)") - else if (companion.isTrait) - failNoForwarder("companion is a trait") - // Now either succeed, or issue some additional warnings for things which look like - // attempts to be java main methods. - else (possibles exists definitions.isJavaMainMethod) || { - possibles exists { m => - m.info match { - case PolyType(_, _) => - fail("main methods cannot be generic.") - case MethodType(params, res) => - if (res.typeSymbol :: params exists (_.isAbstractType)) - fail("main methods cannot refer to type parameters or abstract types.", m.pos) - else - definitions.isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos) - case tp => - fail(s"don't know what this is: $tp", m.pos) - } + Some("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)") + else + None + + // some additional warnings for things which look like attempts to be java main methods. + val mainAdvice = + if (hasExact) Nil + else possibles.map { m => + val msg = m.info match { + case PolyType(_, _) => + "main methods cannot be generic" + case MethodType(params, res) if res.typeSymbol :: params exists (_.isAbstractType) => + "main methods cannot refer to type parameters or abstract types" + case MethodType(param :: Nil, _) if definitions.isArrayOfSymbol(param.tpe, StringClass) => + "main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result" + case MethodType(_, _) => + "main methods must have the exact signature `(Array[String]): Unit`" + case tp => + s"don't know what this is: $tp" } + (msg, m) } - } + + companionAdvice.foreach(msg => warnNoForwarder(msg, hasExact, exactly.fold(alternate)(_.info))) + mainAdvice.foreach { case (msg, m) => warnBadMain(msg, m.pos) } + companionAdvice.isEmpty && mainAdvice.isEmpty } + // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. + hasApproximate && check() } - } /* @@ -338,8 +358,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only * i.e., the pickle is contained in a custom annotation, see: * (1) `addAnnotations()`, - * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10 - * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5 + * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, https://www.scala-lang.org/sid/10 + * (3) SID # 5 - Internals of Scala Annotations, https://www.scala-lang.org/sid/5 * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9) * other than both ending up encoded as attributes (JVMS 4.7) * (with the caveat that the "ScalaSig" attribute is associated to some classes, @@ -348,7 +368,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ trait BCPickles { - import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } + import scala.reflect.internal.pickling.{PickleBuffer, PickleFormat} val versionPickle = { val vp = new PickleBuffer(new Array[Byte](16), -1, 0) @@ -384,7 +404,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * can-multi-thread */ def pickleMarkerForeign = { - createJAttribute(tpnme.ScalaATTR.toString, Array.emptyByteArray, 0, 0) + createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0) } /* Returns a ScalaSignature annotation if it must be added to this class, none otherwise. @@ -408,12 +428,19 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = { + def getAnnotPickle(@unused jclassName: String, sym: Symbol): Option[AnnotationInfo] = { currentRun.symData get sym match { case Some(pickle) if !sym.isModuleClass => // pickles for module classes are in the companion / mirror class val scalaAnnot = { - val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex)) - AnnotationInfo(sigBytes.sigAnnot, Nil, (nme.bytes, sigBytes) :: Nil) + val sigBytes = new ScalaSigBytes(pickle.bytes.take(pickle.writeIndex)) + val (annTp, arg) = if (sigBytes.fitsInOneString) { + val tp = definitions.ScalaSignatureAnnotation.tpe + (tp, LiteralAnnotArg(Constant(sigBytes.strEncode))) + } else { + val tp = definitions.ScalaLongSignatureAnnotation.tpe + (tp, ArrayAnnotArg(sigBytes.arrEncode.map(s => LiteralAnnotArg(Constant(s))))) + } + AnnotationInfo(annTp, Nil, (nme.bytes, arg) :: Nil) } currentRun.symData -= sym currentRun.symData -= sym.companionSymbol @@ -447,7 +474,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /** * Annotations are not processed by the compilation pipeline like ordinary trees. Instead, the - * typer extracts them into [[AnnotationInfo]] objects which are attached to the corresponding + * typer extracts them into [[scala.reflect.internal.AnnotationInfos.AnnotationInfo]] objects which are attached to the corresponding * symbol (sym.annotations) or type (as an AnnotatedType, eliminated by erasure). * * For Scala annotations this is OK: they are stored in the pickle and ignored by the backend. @@ -474,7 +501,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ private def shouldEmitAnnotation(annot: AnnotationInfo) = { annot.symbol.initialize.isJavaDefined && - annot.matches(ClassfileAnnotationClass) && retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue && annot.args.isEmpty } @@ -496,59 +522,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value }).getOrElse(AnnotationRetentionPolicyClassValue) - def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { - val ca = new Array[Char](bytes.length) - var idx = 0 - while(idx < bytes.length) { - val b: Byte = bytes(idx) - assert((b & ~0x7f) == 0) - ca(idx) = b.asInstanceOf[Char] - idx += 1 - } - ca - } - - final def arrEncode(sb: ScalaSigBytes): Array[String] = { - var strs: List[String] = Nil - val bSeven: Array[Byte] = sb.sevenBitsMayBeZero - // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) - var prevOffset = 0 - var offset = 0 - var encLength = 0 - while(offset < bSeven.length) { - val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) - val newEncLength = encLength.toLong + deltaEncLength - if(newEncLength >= 65535) { - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - encLength = 0 - prevOffset = offset - } else { - encLength += deltaEncLength - offset += 1 - } - } - if(prevOffset < offset) { - assert(offset == bSeven.length) - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - } - assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? - strs.reverse.toArray - } - - /* - * can-multi-thread - */ - private def strEncode(sb: ScalaSigBytes): String = { - val ca = ubytesToCharArray(sb.sevenBitsMayBeZero) - new java.lang.String(ca) - // debug val bvA = new asm.ByteVector; bvA.putUTF8(s) - // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes) - // debug assert(enc(idx) == bvA.getByte(idx + 2)) - // debug assert(bvA.getLength == enc.size + 2) - } - /* * For arg a LiteralAnnotArg(constt) with const.tag in {ClazzTag, EnumTag} * as well as for arg a NestedAnnotArg @@ -557,7 +530,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ def emitArgument(av: asm.AnnotationVisitor, name: String, - arg: ClassfileAnnotArg) { + arg: ClassfileAnnotArg): Unit = { (arg: @unchecked) match { case LiteralAnnotArg(const) => @@ -576,17 +549,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { } } - case sb @ ScalaSigBytes(bytes) => - // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files) - // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure. - if (sb.fitsInOneString) { - av.visit(name, strEncode(sb)) - } else { - val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name) - for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) } - arrAnnotV.visitEnd() - } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. - case ArrayAnnotArg(args) => val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name) for(arg <- args) { emitArgument(arrAnnotV, null, arg) } @@ -606,7 +568,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * must-single-thread * but not necessarily always. */ - def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) { + def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]): Unit = { for ((name, value) <- assocs) { emitArgument(av, name.toString(), value) } @@ -616,7 +578,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) { + def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]): Unit = { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) @@ -628,7 +590,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) { + def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]): Unit = { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) @@ -640,7 +602,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) { + def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]): Unit = { for(annot <- annotations; if shouldEmitAnnotation(annot)) { val AnnotationInfo(typ, args, assocs) = annot assert(args.isEmpty, args) @@ -652,7 +614,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) { + def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]): Unit = { val annotationss = pannotss map (_ filter shouldEmitAnnotation) if (annotationss forall (_.isEmpty)) return for ((annots, idx) <- annotationss.zipWithIndex; @@ -672,7 +634,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { var access = asm.Opcodes.ACC_FINAL if (param.isArtifact) access |= asm.Opcodes.ACC_SYNTHETIC - jmethod.visitParameter(param.name.decoded, access) + jmethod.visitParameter(param.name.encoded, access) } } } // end of trait BCAnnotGen @@ -688,7 +650,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { // without it. This is particularly bad because the availability of // generic information could disappear as a consequence of a seemingly // unrelated change. - settings.Ynogenericsig + settings.Ynogenericsig.value || sym.isArtifact || sym.isLiftedMethod || sym.isBridge @@ -729,7 +691,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { catch { case _: Throwable => false } } - if (settings.Xverify) { + if (settings.Xverify.value) { // Run the signature parser to catch bogus signatures. val isValidSignature = wrap { // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser) @@ -776,38 +738,12 @@ abstract class BCodeHelpers extends BCodeIdiomatic { trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { - /* Adds a @remote annotation, actual use unknown. - * - * Invoked from genMethod() and addForwarder(). - * - * must-single-thread - */ - def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) { - def hasThrowsRemoteException = meth.annotations.exists { - case ThrownException(exc) => exc.typeSymbol == definitions.RemoteExceptionClass - case _ => false - } - val needsAnnotation = { - (isRemoteClass || - isRemote(meth) && isJMethodPublic - ) && !hasThrowsRemoteException - } - if (needsAnnotation) { - val c = Constant(definitions.RemoteExceptionClass.tpe) - val arg = Literal(c) setType c.tpe - meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe :: Nil), arg) - } - } /* Add a forwarder for method m. Used only from addForwarders(). * * must-single-thread */ - private def addForwarder( - isRemoteClass: Boolean, - jclass: asm.ClassVisitor, - moduleClass: Symbol, - m: Symbol): Unit = { + private def addForwarder(jclass: asm.ClassVisitor, moduleClass: Symbol, m: Symbol): Unit = { def staticForwarderGenericSignature: String = { // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. // By rights, it should use the signature as-seen-from the module class, and add suitable @@ -823,7 +759,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic { val moduleName = internalName(moduleClass) val methodInfo = moduleClass.thisType.memberInfo(m) - val paramJavaTypes: List[BType] = methodInfo.paramTypes map typeToBType + val paramTypes = methodInfo.paramTypes + val paramJavaTypes = BType.newArray(paramTypes.length) + mapToArray(paramTypes, paramJavaTypes, 0)(typeToBType) // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) /* Forwarders must not be marked final, @@ -831,14 +769,15 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * and we don't know what classes might be subclassing the companion class. See scala/bug#4827. */ // TODO: evaluate the other flags we might be dropping on the floor here. + // TODO: ACC_SYNTHETIC ? val flags = GenBCode.PublicStatic | (if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0) | (if (m.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0) // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } val jgensig = staticForwarderGenericSignature - addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m) - val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass) + + val (throws, others) = partitionConserve(m.annotations)(_.symbol == definitions.ThrowsClass) val thrownExceptions: List[String] = getExceptions(throws) val jReturnType = typeToBType(methodInfo.resultType) @@ -858,6 +797,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitCode() + val codeStart: Label = new Label().tap(mirrorMethod.visitLabel) mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(moduleClass).descriptor) var index = 0 @@ -869,6 +809,13 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, methodBTypeFromSymbol(m).descriptor, false) mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) + val codeEnd = new Label().tap(mirrorMethod.visitLabel) + + methodInfo.params.lazyZip(paramJavaTypes).foldLeft(0) { + case (idx, (p, tp)) => + mirrorMethod.visitLocalVariable(p.name.encoded, tp.descriptor, null, codeStart, codeEnd, idx) + idx + tp.size + } mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments mirrorMethod.visitEnd() @@ -882,7 +829,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) { + def addForwarders(jclass: asm.ClassVisitor, @unused jclassName: String, moduleClass: Symbol): Unit = { assert(moduleClass.isModuleClass, moduleClass) val linkedClass = moduleClass.companionClass @@ -900,7 +847,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { val excl = m.isDeferred || m.isConstructor || m.hasAccessBoundary || { val o = m.owner; (o eq ObjectClass) || (o eq AnyRefClass) || (o eq AnyClass) } || conflictingNames(m.name) - if (!excl) addForwarder(isRemoteClass, jclass, moduleClass, m) + if (!excl) addForwarder(jclass, moduleClass, m) } } @@ -932,18 +879,20 @@ abstract class BCodeHelpers extends BCodeIdiomatic { val MIN_SWITCH_DENSITY = 0.7 /* - * Add public static final field serialVersionUID with value `id` + * Add private static final field serialVersionUID with value `id`. * * can-multi-thread */ - def addSerialVUID(id: Long, jclass: asm.ClassVisitor) { + def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = { // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` + // private for ease of binary compatibility (docs for java.io.Serializable + // claim that the access modifier can be anything we want). jclass.visitField( - GenBCode.PublicStaticFinal, + GenBCode.PrivateStaticFinal, "serialVersionUID", "J", null, // no java-generic-signature - new java.lang.Long(id) + java.lang.Long.valueOf(id) ).visitEnd() } } // end of trait BCClassGen @@ -967,7 +916,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * must-single-thread */ def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { - assert(moduleClass.isModuleClass) + assert(moduleClass.isModuleClass, "Require module class") assert(moduleClass.companionClass == NoSymbol, moduleClass) val bType = mirrorClassClassBType(moduleClass) @@ -988,7 +937,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa) - addForwarders(isRemote(moduleClass), mirrorClass, bType.internalName, moduleClass) + addForwarders(mirrorClass, bType.internalName, moduleClass) mirrorClass.visitEnd() @@ -999,114 +948,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { } // end of class JMirrorBuilder - /* builder of bean info classes */ - class JBeanInfoBuilder extends BCInnerClassGen { - - /* - * Generate a bean info class that describes the given class. - * - * @author Ross Judson (ross.judson@soletta.com) - * - * must-single-thread - */ - def genBeanInfoClass(cls: Symbol, cunit: CompilationUnit, fieldSymbols: List[Symbol], methodSymbols: List[Symbol]): asm.tree.ClassNode = { - - def javaSimpleName(s: Symbol): String = { s.javaSimpleName.toString } - - val beanInfoType = beanInfoClassClassBType(cls) - - val beanInfoClass = new asm.tree.ClassNode - beanInfoClass.visit( - backendUtils.classfileVersion.get, - beanInfoType.info.get.flags, - beanInfoType.internalName, - null, // no java-generic-signature - sbScalaBeanInfoRef.internalName, - EMPTY_STRING_ARRAY - ) - - beanInfoClass.visitSource( - cunit.source.toString, - null /* SourceDebugExtension */ - ) - - var fieldList = List[String]() - - for (f <- fieldSymbols if f.hasGetter; - g = f.getterIn(cls); - s = f.setterIn(cls); - if g.isPublic && !(f.name startsWith "$") - ) { - // inserting $outer breaks the bean - fieldList = javaSimpleName(f) :: javaSimpleName(g) :: (if (s != NoSymbol) javaSimpleName(s) else null) :: fieldList - } - - val methodList: List[String] = - for (m <- methodSymbols - if !m.isConstructor && - m.isPublic && - !(m.name startsWith "$") && - !m.isGetter && - !m.isSetter) - yield javaSimpleName(m) - - val constructor = beanInfoClass.visitMethod( - asm.Opcodes.ACC_PUBLIC, - INSTANCE_CONSTRUCTOR_NAME, - "()V", - null, // no java-generic-signature - EMPTY_STRING_ARRAY // no throwable exceptions - ) - - val stringArrayJType: BType = ArrayBType(StringRef) - val conJType: BType = MethodBType( - classBTypeFromSymbol(definitions.ClassClass) :: stringArrayJType :: stringArrayJType :: Nil, - UNIT - ) - - def push(lst: List[String]) { - var fi = 0 - for (f <- lst) { - constructor.visitInsn(asm.Opcodes.DUP) - constructor.visitLdcInsn(new java.lang.Integer(fi)) - if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) } - else { constructor.visitLdcInsn(f) } - constructor.visitInsn(StringRef.typedOpcode(asm.Opcodes.IASTORE)) - fi += 1 - } - } - - constructor.visitCode() - - constructor.visitVarInsn(asm.Opcodes.ALOAD, 0) - // push the class - constructor.visitLdcInsn(classBTypeFromSymbol(cls).toASMType) - - // push the string array of field information - constructor.visitLdcInsn(new java.lang.Integer(fieldList.length)) - constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringRef.internalName) - push(fieldList) - - // push the string array of method information - constructor.visitLdcInsn(new java.lang.Integer(methodList.length)) - constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringRef.internalName) - push(methodList) - - // invoke the superclass constructor, which will do the - // necessary java reflection and create Method objects. - constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.descriptor, false) - constructor.visitInsn(asm.Opcodes.RETURN) - - constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments - constructor.visitEnd() - - beanInfoClass.visitEnd() - - beanInfoClass - } - - } // end of class JBeanInfoBuilder - trait JAndroidBuilder { self: BCInnerClassGen => @@ -1127,7 +968,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) { + def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = { val androidCreatorType = classBTypeFromSymbol(AndroidCreatorClass) val tdesc_creator = androidCreatorType.descriptor @@ -1150,7 +991,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { ) // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator; - val bt = MethodBType(Nil, androidCreatorType) + val bt = MethodBType(BType.emptyArray, androidCreatorType) clinit.visitMethodInsn( asm.Opcodes.INVOKEVIRTUAL, moduleName, @@ -1180,7 +1021,7 @@ object BCodeHelpers { /** * Valid flags for InnerClass attribute entry. - * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 + * See https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 */ val INNER_CLASSES_FLAGS = { asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | @@ -1198,6 +1039,7 @@ object BCodeHelpers { case GE => LT case GT => LE case LE => GT + case x => throw new MatchError(x) } def opcodeIF = asm.Opcodes.IFEQ + op def opcodeIFICMP = asm.Opcodes.IF_ICMPEQ + op @@ -1229,4 +1071,103 @@ object BCodeHelpers { val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors) val Super = new InvokeStyle(3) // InvokeSpecial (super calls) } + + /** + * Helpers for encoding a Scala signature (array of bytes) into a String or, if too large, an + * array of Strings. + * + * The encoding is as described in [[scala.reflect.internal.pickling.ByteCodecs]]. However, the + * special encoding of 0x00 as 0xC0 0x80 is not done here, as the resulting String(s) are passed + * as annotation argument to ASM, which will perform this step. + */ + final class ScalaSigBytes(bytes: Array[Byte]) { + import scala.reflect.internal.pickling.ByteCodecs + + override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]") + + /** + * The data in `bytes` mapped to 7-bit bytes and then each element incremented by 1 (modulo 0x80). + * This implements parts of the encoding documented in [[scala.reflect.internal.pickling.ByteCodecs]]. 0x00 values are NOT + * mapped to the overlong encoding (0xC0 0x80) but left as-is. + * When creating a String from this array and writing it to a classfile as annotation argument + * using ASM, the ASM library will replace 0x00 values by the overlong encoding. So the data in + * the classfile will have the format documented in [[scala.reflect.internal.pickling.ByteCodecs]]. + */ + lazy val sevenBitsMayBeZero: Array[Byte] = mapToNextModSevenBits(ByteCodecs.encode8to7(bytes)) + + private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = { + var i = 0 + val srclen = src.length + while (i < srclen) { + val in = src(i) + src(i) = if (in == 0x7f) 0.toByte else (in + 1).toByte + i += 1 + } + src + } + + /* In order to store a byte array (the pickle) using a bytecode-level annotation, + * the most compact representation is used (which happens to be string-constant and not byte array as one would expect). + * However, a String constant in a classfile annotation is limited to a maximum of 65535 characters. + * Method `fitsInOneString` tells us whether the pickle can be held by a single classfile-annotation of string-type. + * Otherwise an array of strings will be used. + */ + def fitsInOneString: Boolean = { + // due to escaping, a zero byte in a classfile-annotation of string-type takes actually two characters. + var i = 0 + var numZeros = 0 + while (i < sevenBitsMayBeZero.length) { + if (sevenBitsMayBeZero(i) == 0) numZeros += 1 + i += 1 + } + (sevenBitsMayBeZero.length + numZeros) <= 65535 + } + + def strEncode: String = new java.lang.String(ubytesToCharArray(sevenBitsMayBeZero)) + + def arrEncode: Array[String] = { + var strs: List[String] = Nil + val bSeven: Array[Byte] = sevenBitsMayBeZero + // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) + var prevOffset = 0 + var offset = 0 + var encLength = 0 + while (offset < bSeven.length) { + val deltaEncLength = if (bSeven(offset) == 0) 2 else 1 + val newEncLength = encLength + deltaEncLength + if (newEncLength >= 65535) { + val ba = bSeven.slice(prevOffset, offset) + strs ::= new java.lang.String(ubytesToCharArray(ba)) + encLength = 0 + prevOffset = offset + } else { + encLength += deltaEncLength + offset += 1 + } + } + if (prevOffset < offset) { + assert(offset == bSeven.length) + val ba = bSeven.slice(prevOffset, offset) + strs ::= new java.lang.String(ubytesToCharArray(ba)) + } + assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? + strs.reverse.toArray + } + + /** + * Maps an array of bytes 1:1 to an array of characters, ensuring that each byte is 7-bit. + * Therefore no charset is required. + */ + private def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { + val ca = new Array[Char](bytes.length) + var idx = 0 + while(idx < bytes.length) { + val b: Byte = bytes(idx) + assert((b & ~0x7f) == 0) + ca(idx) = b.toChar + idx += 1 + } + ca + } + } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index c583baaa9973..89d87386ba8e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,7 @@ package scala.tools.nsc package backend.jvm -import scala.annotation.switch +import scala.annotation.{switch, tailrec} import scala.collection.mutable import scala.tools.asm import scala.tools.asm.tree.MethodInsnNode @@ -23,8 +23,7 @@ import scala.tools.nsc.backend.jvm.GenBCode._ /* * A high-level facade to the ASM API for bytecode generation. * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 + * @author Miguel Garcia, https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ * */ abstract class BCodeIdiomatic { @@ -41,7 +40,7 @@ abstract class BCodeIdiomatic { val EMPTY_STRING_ARRAY = Array.empty[String] val EMPTY_INT_ARRAY = Array.empty[Int] val EMPTY_LABEL_ARRAY = Array.empty[asm.Label] - val EMPTY_BTYPE_ARRAY = Array.empty[BType] + val EMPTY_BTYPE_ARRAY = BType.emptyArray /* can-multi-thread */ final def mkArray(xs: List[BType]): Array[BType] = { @@ -104,14 +103,14 @@ abstract class BCodeIdiomatic { import asm.Opcodes - final def emit(opc: Int) { jmethod.visitInsn(opc) } + final def emit(opc: Int): Unit = { jmethod.visitInsn(opc) } final def genPrimitiveNot(bType: BType): Unit = { if (bType.isIntSizedType) { emit(Opcodes.ICONST_M1) emit(Opcodes.IXOR) } else if (bType == LONG) { - jmethod.visitLdcInsn(new java.lang.Long(-1)) + jmethod.visitLdcInsn(java.lang.Long.valueOf(-1)) jmethod.visitInsn(Opcodes.LXOR) } else { abort(s"Impossible to negate a $bType") @@ -121,7 +120,7 @@ abstract class BCodeIdiomatic { /* * can-multi-thread */ - final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType) { + final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = { import scalaPrimitives.{AND, OR, XOR} @@ -150,7 +149,7 @@ abstract class BCodeIdiomatic { /* * can-multi-thread */ - final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType) { + final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = { import scalaPrimitives.{ASR, LSL, LSR} @@ -176,10 +175,11 @@ abstract class BCodeIdiomatic { } // end of method genPrimitiveShift() - /* + /* Creates a new `StringBuilder` instance with the requested capacity + * * can-multi-thread */ - final def genStartConcat(pos: Position, size: Int): Unit = { + final def genNewStringBuilder(pos: Position, size: Int): Unit = { jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) jmethod.visitLdcInsn(Integer.valueOf(size)) @@ -192,10 +192,11 @@ abstract class BCodeIdiomatic { ) } - /* + /* Issue a call to `StringBuilder#append` for the right element type + * * can-multi-thread */ - def genConcat(elemType: BType, pos: Position): Unit = { + final def genStringBuilderAppend(elemType: BType, pos: Position): Unit = { val paramType: BType = elemType match { case ct: ClassBType if ct.isSubtypeOf(StringRef).get => StringRef case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef).get => jlStringBufferRef @@ -206,18 +207,46 @@ abstract class BCodeIdiomatic { // jlStringBuilder does not have overloads for byte and short, but we can just use the int version case BYTE | SHORT => INT case pt: PrimitiveBType => pt + case x @ MethodBType(_, _) => throw new MatchError(x) } - val bt = MethodBType(List(paramType), jlStringBuilderRef) + val bt = MethodBType(Array(paramType), jlStringBuilderRef) invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor, pos) } - /* + /* Extract the built `String` from the `StringBuilder` + *: * can-multi-thread */ - final def genEndConcat(pos: Position): Unit = { + final def genStringBuilderEnd(pos: Position): Unit = { invokevirtual(JavaStringBuilderClassName, "toString", "()Ljava/lang/String;", pos) } + /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` + * (only works for JDK 9+) + * + * can-multi-thread + */ + final def genIndyStringConcat( + recipe: String, + argTypes: Seq[asm.Type], + constants: Seq[String] + ): Unit = { + jmethod.visitInvokeDynamicInsn( + "makeConcatWithConstants", + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + new asm.Handle( + asm.Opcodes.H_INVOKESTATIC, + jliStringConcatFactoryRef.internalName, + "makeConcatWithConstants", + List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, StringRef, ArrayBType(ObjectRef)) + .map(_.descriptor) + .mkString("(", "", s")${jliCallSiteRef.descriptor}"), + false + ), + (recipe +: constants):_* + ) + } + /* * Emits one or more conversion instructions based on the types given as arguments. * @@ -226,14 +255,15 @@ abstract class BCodeIdiomatic { * * can-multi-thread */ - final def emitT2T(from: BType, to: BType) { + @tailrec + final def emitT2T(from: BType, to: BType): Unit = { assert( from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, s"Cannot emit primitive conversion from $from to $to - ${global.currentUnit}" ) - def pickOne(opcs: Array[Int]) { // TODO index on to.sort + def pickOne(opcs: Array[Int]): Unit = { // TODO index on to.sort val chosen = (to: @unchecked) match { case BYTE => opcs(0) case SHORT => opcs(1) @@ -248,6 +278,7 @@ abstract class BCodeIdiomatic { if (from == to) { return } // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) + // TODO: it seems in the jvm a bool is an int, so it should be treated the same as byte (for example) assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") // We're done with BOOL already @@ -287,13 +318,13 @@ abstract class BCodeIdiomatic { } // end of emitT2T() // can-multi-thread - final def boolconst(b: Boolean) { + final def boolconst(b: Boolean): Unit = { if (b) emit(Opcodes.ICONST_1) else emit(Opcodes.ICONST_0) } // can-multi-thread - final def iconst(cst: Int) { + final def iconst(cst: Int): Unit = { if (cst.toByte == cst) { if (cst >= -1 && cst <= 5) { emit(Opcodes.ICONST_0 + cst) @@ -302,47 +333,47 @@ abstract class BCodeIdiomatic { } else if (cst.toShort == cst) { jmethod.visitIntInsn(Opcodes.SIPUSH, cst) } else { - jmethod.visitLdcInsn(new Integer(cst)) + jmethod.visitLdcInsn(Integer.valueOf(cst)) } } // can-multi-thread - final def lconst(cst: Long) { + final def lconst(cst: Long): Unit = { if (cst == 0L || cst == 1L) { emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int]) } else { - jmethod.visitLdcInsn(new java.lang.Long(cst)) + jmethod.visitLdcInsn(java.lang.Long.valueOf(cst)) } } // can-multi-thread - final def fconst(cst: Float) { + final def fconst(cst: Float): Unit = { val bits: Int = java.lang.Float.floatToRawIntBits(cst) if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int]) } else { - jmethod.visitLdcInsn(new java.lang.Float(cst)) + jmethod.visitLdcInsn(java.lang.Float.valueOf(cst)) } } // can-multi-thread - final def dconst(cst: Double) { + final def dconst(cst: Double): Unit = { val bits: Long = java.lang.Double.doubleToRawLongBits(cst) if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int]) } else { - jmethod.visitLdcInsn(new java.lang.Double(cst)) + jmethod.visitLdcInsn(java.lang.Double.valueOf(cst)) } } // can-multi-thread - final def newarray(elem: BType) { + final def newarray(elem: BType): Unit = { elem match { case c: RefBType => /* phantom type at play in `Array(null)`, scala/bug#1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) case _ => - assert(elem.isNonVoidPrimitiveType) + assert(elem.isNonVoidPrimitiveType, "Require primitive") val rand = { // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" (elem: @unchecked) match { @@ -361,18 +392,19 @@ abstract class BCodeIdiomatic { } - final def load( idx: Int, tk: BType) { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread - final def store(idx: Int, tk: BType) { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread + final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread - final def aload( tk: BType) { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread - final def astore(tk: BType) { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread + final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread + final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread - final def neg(tk: BType) { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread - final def add(tk: BType) { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread - final def sub(tk: BType) { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread - final def mul(tk: BType) { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread - final def div(tk: BType) { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread - final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread + final def neg(tk: BType): Unit = { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread + final def add(tk: BType): Unit = { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread + final def sub(tk: BType): Unit = { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread + final def mul(tk: BType): Unit = { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread + final def div(tk: BType): Unit = { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread + final def rem(tk: BType): Unit = { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread // can-multi-thread final def invokespecial(owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = { @@ -398,24 +430,24 @@ abstract class BCodeIdiomatic { } // can-multi-thread - final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) } + final def goTo(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.GOTO, label) } // can-multi-thread - final def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) } + final def emitIF(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIF, label) } // can-multi-thread - final def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) } + final def emitIF_ICMP(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) } // can-multi-thread - final def emitIF_ACMP(cond: TestOp, label: asm.Label) { + final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = { assert((cond == TestOp.EQ) || (cond == TestOp.NE), cond) val opc = (if (cond == TestOp.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) jmethod.visitJumpInsn(opc, label) } // can-multi-thread - final def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } + final def emitIFNONNULL(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } // can-multi-thread - final def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } + final def emitIFNULL (label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } // can-multi-thread - final def emitRETURN(tk: BType) { + final def emitRETURN(tk: BType): Unit = { if (tk == UNIT) { emit(Opcodes.RETURN) } else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) } } @@ -424,8 +456,8 @@ abstract class BCodeIdiomatic { * * can-multi-thread */ - final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) { - assert(keys.length == branches.length) + final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = { + assert(keys.length == branches.length, s"Bad branches, have ${branches.length}, wanted ${keys.length}") // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only. // Similar to what javac emits for a switch statement consisting only of a default case. @@ -500,7 +532,7 @@ abstract class BCodeIdiomatic { // don't make private otherwise inlining will suffer // can-multi-thread - final def emitVarInsn(opc: Int, idx: Int, tk: BType) { + final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = { assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) jmethod.visitVarInsn(tk.typedOpcode(opc), idx) } @@ -508,7 +540,7 @@ abstract class BCodeIdiomatic { // ---------------- array load and store ---------------- // can-multi-thread - final def emitTypeBased(opcs: Array[Int], tk: BType) { + final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = { assert(tk != UNIT, tk) val opc = { if (tk.isRef) { opcs(0) } @@ -533,7 +565,7 @@ abstract class BCodeIdiomatic { // ---------------- primitive operations ---------------- // can-multi-thread - final def emitPrimitive(opcs: Array[Int], tk: BType) { + final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = { val opc = { // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" tk match { @@ -547,10 +579,10 @@ abstract class BCodeIdiomatic { } // can-multi-thread - final def drop(tk: BType) { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } + final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } // can-multi-thread - final def dup(tk: BType) { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } + final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } // ---------------- type checks and casts ---------------- @@ -641,8 +673,8 @@ abstract class BCodeIdiomatic { * The entry-value for a LabelDef entry-key always contains the entry-key. * */ - class LabelDefsFinder(rhs: Tree) extends Traverser { - val result = mutable.AnyRefMap.empty[Tree, List[LabelDef]] + class LabelDefsFinder(rhs: Tree) extends InternalTraverser { + val result = mutable.HashMap.empty[Tree, List[LabelDef]] var acc: List[LabelDef] = Nil var directResult: List[LabelDef] = Nil @@ -652,10 +684,10 @@ abstract class BCodeIdiomatic { /* * can-multi-thread */ - override def traverse(tree: Tree) { + override def traverse(tree: Tree): Unit = { val saved = acc acc = Nil - super.traverse(tree) + tree.traverse(this) // acc contains all LabelDefs found under (but not at) `tree` tree match { case lblDf: LabelDef => acc ::= lblDf @@ -672,13 +704,13 @@ abstract class BCodeIdiomatic { } implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) { - @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) { mnode.instructions.foreachInsn(f) } + @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { mnode.instructions.foreachInsn(f) } } implicit class InsnIterInsnList(lst: asm.tree.InsnList) { - @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) { - val insnIter = lst.iterator() + @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { + val insnIter = lst.iterator while (insnIter.hasNext) { f(insnIter.next()) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 2be31e33cb4e..b169e0e9d645 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,17 +14,15 @@ package scala.tools.nsc package backend package jvm +import scala.annotation.unused import scala.collection.{immutable, mutable} -import scala.tools.nsc.symtab._ import scala.tools.asm +import scala.tools.nsc.symtab._ import GenBCode._ import BackendReporting._ /* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * + * @author Miguel Garcia, https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ */ abstract class BCodeSkelBuilder extends BCodeHelpers { import global._ @@ -85,7 +83,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { var claszSymbol: Symbol = null var isCZParcelable = false var isCZStaticModule = false - var isCZRemote = false /* ---------------- idiomatic way to ask questions to typer ---------------- */ @@ -99,32 +96,76 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { def tpeTK(tree: Tree): BType = typeToBType(tree.tpe) + @annotation.unused + private def canAssignModuleInClinit(cd: ClassDef, sym: Symbol): Boolean = { + import global.definitions._ + val parentsArePure = claszSymbol.parentSymbols.forall(sym => sym == ObjectClass || isFunctionSymbol(sym) || isAbstractFunctionSymbol(sym) || sym == definitions.SerializableClass) + def isPureConstructor(dd: DefDef): Boolean = { + dd.rhs match { + case Block(stats, _) => treeInfo.isSuperConstrCall(stats.last) + case _ => false + } + } + def constructorsArePure = cd.impl.body.iterator.collect { + case dd: DefDef if dd.symbol.isConstructor => dd + }.forall(isPureConstructor) + parentsArePure && constructorsArePure + } + /* ---------------- helper utils for generating classes and fields ---------------- */ - def genPlainClass(cd: ClassDef) { + def genPlainClass(cd0: ClassDef): Unit = { assert(cnode == null, "GenBCode detected nested methods.") - claszSymbol = cd.symbol + claszSymbol = cd0.symbol isCZParcelable = isAndroidParcelableClass(claszSymbol) isCZStaticModule = isStaticModuleClass(claszSymbol) - isCZRemote = isRemote(claszSymbol) thisBType = classBTypeFromSymbol(claszSymbol) thisBTypeDescriptor = thisBType.descriptor - cnode = new ClassNode1() initJClass(cnode) + val cd = if (isCZStaticModule) { + // Move statements from the primary constructor following the superclass constructor call to + // a newly synthesised tree representing the "", which also assigns the MODULE$ field. + // Because the assignments to both the module instance fields, and the fields of the module itself + // are in the , these fields can be static + final. - val hasStaticCtor = methodSymbols(cd) exists (_.isStaticConstructor) - if (!hasStaticCtor) { - // but needs one ... - if (isCZStaticModule || isCZParcelable) { - fabricateStaticInit() + // TODO should we do this transformation earlier, say in Constructors? Or would that just cause + // pain for scala-{js, native}? + + for (f <- fieldSymbols(claszSymbol)) { + f.setFlag(Flags.STATIC) } - } + val constructorDefDef = treeInfo.firstConstructor(cd0.impl.body).asInstanceOf[DefDef] + val (uptoSuperStats, remainingConstrStats) = treeInfo.splitAtSuper(constructorDefDef.rhs.asInstanceOf[Block].stats, classOnly = true) + val clInitSymbol = claszSymbol.newMethod(nme.CLASS_CONSTRUCTOR, claszSymbol.pos, Flags.STATIC).setInfo(NullaryMethodType(definitions.UnitTpe)) + + // We don't need to enter this field into the decls of claszSymbol.info as this is added manually to the generated class + // in addModuleInstanceField. TODO: try adding it to the decls and making the usual field generation do the right thing. + val moduleField = claszSymbol.newValue(nme.MODULE_INSTANCE_FIELD, claszSymbol.pos, Flags.STATIC | Flags.PRIVATE).setInfo(claszSymbol.tpeHK) + + val callConstructor = NewFromConstructor(claszSymbol.primaryConstructor).setType(claszSymbol.tpeHK) + val assignModuleField = Assign(global.gen.mkAttributedRef(moduleField).setType(claszSymbol.tpeHK), callConstructor).setType(definitions.UnitTpe) + val remainingConstrStatsSubst = remainingConstrStats.map(_.substituteThis(claszSymbol, global.gen.mkAttributedRef(claszSymbol.sourceModule)).changeOwner(claszSymbol.primaryConstructor -> clInitSymbol)) + val clinit = DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, Literal(Constant(())).setType(definitions.UnitTpe)).setType(definitions.UnitTpe)) + deriveClassDef(cd0)(tmpl => deriveTemplate(tmpl)(body => + clinit :: body.map { + case `constructorDefDef` => copyDefDef(constructorDefDef)(rhs = Block(uptoSuperStats, constructorDefDef.rhs.asInstanceOf[Block].expr)) + case tree => tree + } + )) + } else cd0 + + val hasStaticCtor = methodSymbols(cd) exists (_.isStaticConstructor) + if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() val optSerial: Option[Long] = serialVUID(claszSymbol) - if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} + /* serialVersionUID can't be put on interfaces (it's a private field). + * this is fine because it wouldn't do anything anyways. */ + if (optSerial.isDefined && !claszSymbol.isTrait) { + addSerialVUID(optSerial.get, cnode) + } addClassFields() @@ -141,7 +182,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { /* * must-single-thread */ - private def initJClass(jclass: asm.ClassVisitor) { + private def initJClass(@unused jclass: asm.ClassVisitor): Unit = { val bType = classBTypeFromSymbol(claszSymbol) val superClass = bType.info.get.superClass.getOrElse(ObjectRef).internalName @@ -174,7 +215,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } else { - if (!settings.noForwarders) { + if (!settings.noForwarders.value) { val lmoc = claszSymbol.companionModule // add static forwarders if there are no name conflicts; see bugs #363 and #1735 if (lmoc != NoSymbol) { @@ -184,7 +225,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } if (isCandidateForForwarders) { log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'") - addForwarders(isRemote(claszSymbol), cnode, thisBType.internalName, lmoc.moduleClass) + addForwarders(cnode, thisBType.internalName, lmoc.moduleClass) } } } @@ -198,15 +239,9 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { /* * can-multi-thread */ - private def addModuleInstanceField() { + private def addModuleInstanceField(): Unit = { // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED - // scala/scala-dev#194: - // This can't be FINAL on JVM 1.9+ because we assign it from within the - // instance constructor, not from directly. Assignment from , - // after the constructor has completely finished, seems like the principled - // thing to do, but it would change behaviour when "benign" cyclic references - // between modules exist. - val mods = GenBCode.PublicStatic + val mods = GenBCode.PublicStaticFinal val fv = cnode.visitField(mods, strMODULE_INSTANCE_FIELD, @@ -218,10 +253,13 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { fv.visitEnd() } + protected def assignModuleInstanceField(meth: asm.MethodVisitor): Unit = { + meth.visitFieldInsn(asm.Opcodes.PUTSTATIC, thisBType.internalName, strMODULE_INSTANCE_FIELD, thisBType.descriptor) + } /* * must-single-thread */ - private def fabricateStaticInit() { + private def fabricateStaticInitAndroid(): Unit = { val clinit: asm.MethodVisitor = cnode.visitMethod( GenBCode.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED @@ -232,20 +270,14 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { ) clinit.visitCode() - /* "legacy static initialization" */ - if (isCZStaticModule) { - clinit.visitTypeInsn(asm.Opcodes.NEW, thisBType.internalName) - clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, - thisBType.internalName, INSTANCE_CONSTRUCTOR_NAME, "()V", false) - } if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisBType.internalName) } - clinit.visitInsn(asm.Opcodes.RETURN) + clinit.visitInsn(asm.Opcodes.RETURN) clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments clinit.visitEnd() } - def addClassFields() { + def addClassFields(): Unit = { for (f <- fieldSymbols(claszSymbol)) { val javagensig = getGenericSignature(f, claszSymbol) val flags = javaFieldFlags(f) @@ -338,10 +370,10 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`. */ var cleanups: List[asm.Label] = Nil - def registerCleanup(finCleanup: asm.Label) { + def registerCleanup(finCleanup: asm.Label): Unit = { if (finCleanup != null) { cleanups = finCleanup :: cleanups } } - def unregisterCleanup(finCleanup: asm.Label) { + def unregisterCleanup(finCleanup: asm.Label): Unit = { if (finCleanup != null) { assert(cleanups.head eq finCleanup, s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup") @@ -365,18 +397,18 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { */ object locals { - private val slots = mutable.AnyRefMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) + private val slots = mutable.HashMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) private var nxtIdx = -1 // next available index for local-var - def reset(isStaticMethod: Boolean) { + def reset(isStaticMethod: Boolean): Unit = { slots.clear() nxtIdx = if (isStaticMethod) 0 else 1 } - def contains(locSym: Symbol): Boolean = { slots.contains(locSym) } + def contains(locSym: Symbol): Boolean = slots.contains(locSym) - def apply(locSym: Symbol): Local = { slots.apply(locSym) } + def apply(locSym: Symbol): Local = slots(locSym) /* Make a fresh local variable, ensuring a unique name. * The invoker must make sure inner classes are tracked for the sym's tpe. @@ -387,14 +419,10 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { locSym } - def makeLocal(locSym: Symbol): Local = { - makeLocal(locSym, symInfoTK(locSym)) - } + def makeLocal(locSym: Symbol): Local = makeLocal(locSym, symInfoTK(locSym)) - def getOrMakeLocal(locSym: Symbol): Local = { - // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. - slots.getOrElse(locSym, makeLocal(locSym)) - } + // `getOrElse` has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. + def getOrMakeLocal(locSym: Symbol): Local = slots.getOrElse(locSym, makeLocal(locSym)) private def makeLocal(sym: Symbol, tk: BType): Local = { assert(nxtIdx != -1, "not a valid start index") @@ -408,16 +436,15 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. - def store(locSym: Symbol) { + def store(locSym: Symbol): Unit = { val Local(tk, _, idx, _) = slots(locSym) bc.store(idx, tk) } - def load(locSym: Symbol) { + def load(locSym: Symbol): Unit = { val Local(tk, _, idx, _) = slots(locSym) bc.load(idx, tk) } - } /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */ @@ -460,15 +487,14 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { pp } } - def markProgramPoint(lbl: asm.Label) { + def markProgramPoint(lbl: asm.Label): Unit = { val skip = (lbl == null) || isAtProgramPoint(lbl) if (!skip) { mnode visitLabel lbl } } def isAtProgramPoint(lbl: asm.Label): Boolean = { (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } ) } - def lineNumber(tree: Tree) { - if (!emitLines || !tree.pos.isDefined) return + def lineNumber(tree: Tree): Unit = if (emitLines && tree.pos.isDefined && !tree.hasAttachment[SyntheticUnitAttachment.type]) { val nr = tree.pos.finalPosition.line if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr @@ -483,7 +509,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } // on entering a method - def resetMethodBookkeeping(dd: DefDef) { + def resetMethodBookkeeping(dd: DefDef): Unit = { locals.reset(isStaticMethod = methSymbol.isStaticMember) jumpDest = immutable.Map.empty // populate labelDefsAtOrUnder @@ -504,7 +530,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */ - def gen(tree: Tree) { + def gen(tree: Tree): Unit = { tree match { case EmptyTree => () @@ -522,7 +548,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { val forwarderDefDef = { val dd1 = global.gen.mkStatic(deriveDefDef(dd)(_ => EmptyTree), newTermName(traitSuperAccessorName(sym)), _.cloneSymbol.withoutAnnotations) dd1.symbol.setFlag(Flags.ARTIFACT).resetFlag(Flags.OVERRIDE) - val selfParam :: realParams = dd1.vparamss.head.map(_.symbol) + val selfParam :: realParams = dd1.vparamss.head.map(_.symbol): @unchecked deriveDefDef(dd1)(_ => atPos(dd1.pos)( Apply(Select(global.gen.mkAttributedIdent(selfParam).setType(sym.owner.typeConstructor), dd.symbol), @@ -543,11 +569,11 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { /* * must-single-thread */ - def initJMethod(flags: Int, params: List[Symbol]) { + def initJMethod(flags: Int, params: List[Symbol]): Unit = { val jgensig = getGenericSignature(methSymbol, claszSymbol) - addRemoteExceptionAnnot(isCZRemote, hasPublicBitSet(flags), methSymbol) - val (excs, others) = methSymbol.annotations partition (_.symbol == definitions.ThrowsClass) + + val (excs, others) = partitionConserve(methSymbol.annotations)(_.symbol == definitions.ThrowsClass) val thrownExceptions: List[String] = getExceptions(excs) val bytecodeName = @@ -565,15 +591,13 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { emitParamNames(mnode, params) emitAnnotations(mnode, others) - emitParamAnnotations(mnode, params.map(_.annotations)) + if (params.exists(_.annotations.nonEmpty)) + emitParamAnnotations(mnode, params.map(_.annotations)) } // end of method initJMethod - - def genDefDef(dd: DefDef) { - // the only method whose implementation is not emitted: getClass() - if (definitions.isGetClass(dd.symbol)) { return } - if (dd.symbol.hasAttachment[JustMethodReference]) { return } + // the only method whose implementation is not emitted: getClass() + def genDefDef(dd: DefDef): Unit = if (!definitions.isGetClass(dd.symbol)) { assert(mnode == null, "GenBCode detected nested method.") methSymbol = dd.symbol @@ -587,12 +611,23 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { val DefDef(_, _, _, vparamss, _, rhs) = dd assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss") val params = if (vparamss.isEmpty) Nil else vparamss.head - for (p <- params) { locals.makeLocal(p.symbol) } + for (p <- params) locals.makeLocal(p.symbol) // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") - if (params.size > MaximumJvmParameters) { - // scala/bug#7324 - reporter.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.") + // scala/bug#7324 + // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.3.3 + // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.11 + // https://docs.oracle.com/javase/specs/jvms/se20/html/jvms-4.html#jvms-4.11 + val paramsLength = params.foldLeft(0) { (sum, p) => + val i = p.symbol.info.typeSymbol match { + case definitions.LongClass | definitions.DoubleClass => 2 + case _ => 1 + } + sum + i + } + if (paramsLength > MaximumJvmParameters) { + val info = if (paramsLength == params.length) "" else " (Long and Double count as 2)" + reporter.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters$info.") return } @@ -617,15 +652,17 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { * When duplicating a finally-contained LabelDef, another program-point is needed for the copy (each such copy has its own asm.Label), * but the same vars (given by the LabelDef's params) can be reused, * because no LabelDef ends up nested within itself after such duplication. + * + * The tail-calls xform results in symbols shared btw method-params and labelDef-params, thus the guard below. */ - for(ld <- labelDefsAtOrUnder.getOrElse(dd.rhs, Nil); ldp <- ld.params; if !locals.contains(ldp.symbol)) { - // the tail-calls xform results in symbols shared btw method-params and labelDef-params, thus the guard above. - locals.makeLocal(ldp.symbol) - } + for { + ld <- labelDefsAtOrUnder.getOrElse(dd.rhs, Nil) + ldp <- ld.params + } if (!locals.contains(ldp.symbol)) locals.makeLocal(ldp.symbol) if (!isAbstractMethod && !isNative) { - def emitNormalMethodBody() { + def emitNormalMethodBody(): Unit = { val veryFirstProgramPoint = currProgramPoint() if (rhs == EmptyTree) { @@ -674,12 +711,12 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { * * TODO document, explain interplay with `fabricateStaticInit()` */ - private def appendToStaticCtor(dd: DefDef) { + private def appendToStaticCtor(@unused dd: DefDef): Unit = { def insertBefore( location: asm.tree.AbstractInsnNode, i0: asm.tree.AbstractInsnNode, - i1: asm.tree.AbstractInsnNode) { + i1: asm.tree.AbstractInsnNode): Unit = { if (i0 != null) { mnode.instructions.insertBefore(location, i0.clone(null)) mnode.instructions.insertBefore(location, i1.clone(null)) @@ -737,14 +774,14 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } - def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) { + def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = { val Local(tk, name, idx, isSynth) = locals(sym) if (force || !isSynth) { mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) } } - def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination) + def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit } // end of class PlainSkelBuilder diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index 601d1eb40d02..ebffa98620a5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,8 +20,7 @@ import scala.tools.asm /* * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 + * @author Miguel Garcia, https://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ * */ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { @@ -117,7 +116,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { * Useful to avoid emitting an empty try-block being protected by exception handlers, * which results in "java.lang.ClassFormatError: Illegal exception table range". See scala/bug#6102. */ - def nopIfNeeded(lbl: asm.Label) { + def nopIfNeeded(lbl: asm.Label): Unit = { val noInstructionEmitted = isAtProgramPoint(lbl) if (noInstructionEmitted) { emit(asm.Opcodes.NOP) } } @@ -194,6 +193,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) case Bind(_, _) => BoundEH (pat.symbol, caseBody) + case _ => throw new Exception(s"Unexpected try case pattern tree $pat of class ${pat.shortClass}") } } @@ -244,7 +244,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { val endTryBody = currProgramPoint() bc goTo postHandlers - /** + /* * A return within a `try` or `catch` block where a `finally` is present ("early return") * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block, * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]). @@ -376,7 +376,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { } // end of genLoadTry() /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */ - private def pendingCleanups() { + private def pendingCleanups(): Unit = { cleanups match { case Nil => if (earlyReturnVar != null) { @@ -392,7 +392,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { } } - def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType) { + def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = { val excInternalName: String = if (excType == null) null else excType.internalName @@ -401,7 +401,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { } /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */ - def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean) { + def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = { var saved: immutable.Map[ /* LabelDef */ Symbol, JumpDestination ] = null if (isDuplicate) { saved = jumpDest @@ -421,7 +421,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { /* Does this tree have a try-catch block? */ def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] } - trait EHClause + sealed trait EHClause case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 23eacc7e5c27..2303edbb8fae 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,10 +14,12 @@ package scala.tools.nsc package backend.jvm import java.{util => ju} +import java.lang.{StringBuilder, ThreadLocal} -import scala.collection.concurrent -import scala.tools.asm -import scala.tools.asm.Opcodes +import scala.annotation.tailrec +import scala.collection.SortedMap +import scala.collection.immutable.ArraySeq.unsafeWrapArray +import scala.tools.asm, asm.Opcodes import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, InternalName} import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.backend.jvm.opt._ @@ -53,23 +55,12 @@ abstract class BTypes { // Note usage should be private to this file, except for tests val classBTypeCache: ju.concurrent.ConcurrentHashMap[InternalName, ClassBType] = recordPerRunJavaMapCache(new ju.concurrent.ConcurrentHashMap[InternalName, ClassBType]) - + object BType { + val emptyArray = Array[BType]() + def newArray(n: Int): Array[BType] = if (n == 0) emptyArray else new Array[BType](n) + } sealed abstract class BType { - override def toString: String = { - val builder = new java.lang.StringBuilder(64) - buildString(builder) - builder.toString - } - final def buildString(builder: java.lang.StringBuilder): Unit = this match { - case p: PrimitiveBType => builder.append(p.desc) - case ClassBType(internalName) => builder.append('L').append(internalName).append(';') - case ArrayBType(component) => builder.append('['); component.buildString(builder) - case MethodBType(args, res) => - builder.append('(') - args.foreach(_.buildString(builder)) - builder.append(')') - res.buildString(builder) - } + override def toString: String = BTypeExporter.btypeToString(this) /** * @return The Java descriptor of this type. Examples: @@ -123,39 +114,33 @@ abstract class BTypes { case ArrayBType(component) => if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true else other match { - case ArrayBType(otherComponent) => component.conformsTo(otherComponent).orThrow + case ArrayBType(otherComponent) => + // Array[Short]().isInstanceOf[Array[Int]] is false + // but Array[String]().isInstanceOf[Array[Object]] is true + if (component.isPrimitive || otherComponent.isPrimitive) component == otherComponent + else component.conformsTo(otherComponent).orThrow case _ => false } case classType: ClassBType => - if (isBoxed) { - if (other.isBoxed) this == other - else if (other == ObjectRef) true - else other match { - case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType).orThrow // e.g., java/lang/Double conforms to java/lang/Number - case _ => false - } - } else if (isNullType) { - if (other.isNothingType) false - else if (other.isPrimitive) false - else true // Null conforms to all classes (except Nothing) and arrays. - } else if (isNothingType) { - true - } else other match { + // Quick test for ObjectRef to make a common case fast + other == ObjectRef || (other match { case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType).orThrow - // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case - case _ => - // isNothingType || // documentation only, because `if (isNothingType)` above covers this case - false - } + case _ => false + }) - case UNIT => - other == UNIT - case BOOL | BYTE | SHORT | CHAR => - this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). case _ => - assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") - this == other + // there are no bool/byte/short/char primitives at runtime, they are represented as ints. + // instructions like i2s are used to truncate, the result is again an int. conformsTo + // returns true for conversions that don't need a truncating instruction. see also emitT2T. + // note that for primitive arrays, Array[Short]().isInstanceOf[Array[Int]] is false. + this == other || ((this, other) match { + case (BOOL, BYTE | SHORT | INT) => true + case (BYTE, SHORT | INT) => true + case (SHORT, INT) => true + case (CHAR, INT) => true + case _ => false + }) } })) @@ -296,9 +281,12 @@ abstract class BTypes { } case LONG => - if (other.isIntegralType) LONG - else if (other.isRealType) other - else uncomparable + other match { + case INT | BYTE | LONG | CHAR | SHORT => LONG + case DOUBLE => DOUBLE + case FLOAT => FLOAT + case _ => uncomparable + } case FLOAT => if (other == DOUBLE) DOUBLE @@ -347,8 +335,8 @@ abstract class BTypes { * * In this summary, "class" means "class or interface". * - * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html - * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html + * JLS: https://docs.oracle.com/javase/specs/jls/se8/html/index.html + * JVMS: https://docs.oracle.com/javase/specs/jvms/se8/html/index.html * * Terminology * ----------- @@ -614,9 +602,9 @@ abstract class BTypes { def info: Either[NoClassBTypeInfo, ClassInfo] = { if (_info eq null) // synchronization required to ensure the apply is finished - // which populates info. ClassBType doesnt escape apart from via the map + // which populates info. ClassBType does not escape apart from via the map // and the object mutex is locked prior to insertion. See apply - this.synchronized() + this.synchronized {} assert(_info != null, s"ClassBType.info not yet assigned: $this") _info } @@ -651,10 +639,18 @@ abstract class BTypes { def isInterface: Either[NoClassBTypeInfo, Boolean] = info.map(i => (i.flags & asm.Opcodes.ACC_INTERFACE) != 0) - def superClassesTransitive: Either[NoClassBTypeInfo, List[ClassBType]] = info.flatMap(i => i.superClass match { - case None => Right(Nil) - case Some(sc) => sc.superClassesTransitive.map(sc :: _) - }) + /** The super class chain of this type, starting with Object, ending with `this`. */ + def superClassesChain: Either[NoClassBTypeInfo, List[ClassBType]] = try { + var res = List(this) + var sc = info.orThrow.superClass + while (sc.nonEmpty) { + res ::= sc.get + sc = sc.get.info.orThrow.superClass + } + Right(res) + } catch { + case Invalid(noInfo: NoClassBTypeInfo) => Left(noInfo) + } /** * The prefix of the internal name until the last '/', or the empty string. @@ -680,7 +676,7 @@ abstract class BTypes { } def innerClassAttributeEntry: Either[NoClassBTypeInfo, Option[InnerClassEntry]] = info.map(i => i.nestedInfo.force map { - case NestedInfo(_, outerName, innerName, isStaticNestedClass, exitingTyperPrivate) => + case NestedInfo(_, outerName, innerName, isStaticNestedClass, enteringTyperPrivate) => // the static flag in the InnerClass table has a special meaning, see InnerClass comment def adjustStatic(flags: Int): Int = ( flags & ~Opcodes.ACC_STATIC | (if (isStaticNestedClass) Opcodes.ACC_STATIC else 0) @@ -689,7 +685,7 @@ abstract class BTypes { internalName, outerName.orNull, innerName.orNull, - flags = adjustStatic(if (exitingTyperPrivate) (i.flags & ~Opcodes.ACC_PUBLIC) | Opcodes.ACC_PRIVATE else i.flags) + flags = adjustStatic(if (enteringTyperPrivate) (i.flags & ~Opcodes.ACC_PUBLIC) | Opcodes.ACC_PRIVATE else i.flags) ) }) @@ -725,9 +721,9 @@ abstract class BTypes { /** * Finding the least upper bound in agreement with the bytecode verifier * Background: - * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + * https://xavierleroy.org/publi/bytecode-verification-JAR.pdf * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - * https://github.com/scala/bug/issues/3872 + * https://github.com/scala/bug/issues/3872#issuecomment-292386375 */ def jvmWiseLUB(other: ClassBType): Either[NoClassBTypeInfo, ClassBType] = { def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType @@ -748,14 +744,7 @@ abstract class BTypes { if (this.isSubtypeOf(other).orThrow) other else ObjectRef case _ => - // TODO @lry I don't really understand the reasoning here. - // Both this and other are classes. The code takes (transitively) all superclasses and - // finds the first common one. - // MOST LIKELY the answer can be found here, see the comments and links by Miguel: - // - https://github.com/scala/bug/issues/3872 - // @jz Wouldn't it be better to walk the superclass chain of both types in reverse (starting from Object), and - // finding the last common link? That would be O(N), whereas this looks O(N^2) - firstCommonSuffix(this :: this.superClassesTransitive.orThrow, other :: other.superClassesTransitive.orThrow) + firstCommonSuffix(superClassesChain.orThrow, other.superClassesChain.orThrow) } assert(isNotNullOrNothing(res), s"jvmWiseLUB computed: $res") @@ -764,17 +753,16 @@ abstract class BTypes { } private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { - var chainA = as - var chainB = bs - var fcs: ClassBType = null - do { - if (chainB contains chainA.head) fcs = chainA.head - else if (chainA contains chainB.head) fcs = chainB.head - else { - chainA = chainA.tail - chainB = chainB.tail - } - } while (fcs == null) + // assert(as.head == ObjectRef, as.head) + // assert(bs.head == ObjectRef, bs.head) + var chainA = as.tail + var chainB = bs.tail + var fcs = ObjectRef + while (chainA.nonEmpty && chainB.nonEmpty && chainA.head == chainB.head) { + fcs = chainA.head + chainA = chainA.tail + chainB = chainB.tail + } fcs } @@ -809,9 +797,23 @@ abstract class BTypes { "scala/Null", "scala/Nothing" ) - def unapply(cr:ClassBType) = Some(cr.internalName) + def unapply(cr: ClassBType): Some[InternalName] = Some(cr.internalName) - def apply(internalName: InternalName, fromSymbol: Boolean)(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { + /** + * Retrieve the `ClassBType` for the class with the given internal name, creating the entry if it doesn't + * already exist + * + * @param internalName The name of the class + * @param t A value that will be passed to the `init` function. For efficiency, callers should use this + * value rather than capturing it in the `init` lambda, allowing that lambda to be hoisted. + * @param fromSymbol Is this type being initialized from a `Symbol`, rather than from byte code? + * @param init Function to initialize the info of this `BType`. During execution of this function, + * code _may_ reenter into `apply(internalName, ...)` and retrieve the initializing + * `ClassBType`. + * @tparam T The type of the state that will be threaded into the `init` function. + * @return The `ClassBType` + */ + final def apply[T](internalName: InternalName, t: T, fromSymbol: Boolean)(init: (ClassBType, T) => Either[NoClassBTypeInfo, ClassInfo]): ClassBType = { val cached = classBTypeCache.get(internalName) if (cached ne null) cached else { @@ -823,10 +825,10 @@ abstract class BTypes { newRes.synchronized { classBTypeCache.putIfAbsent(internalName, newRes) match { case null => - newRes._info = init(newRes) + newRes._info = init(newRes, t) newRes.checkInfoConsistency() newRes - case old => + case old => old } } @@ -876,7 +878,7 @@ abstract class BTypes { outerName: Option[String], innerName: Option[String], isStaticNestedClass: Boolean, - exitingTyperPrivate: Boolean) + enteringTyperPrivate: Boolean) /** * This class holds the data for an entry in the InnerClass table. See the InnerClass summary @@ -897,13 +899,44 @@ abstract class BTypes { case _ => 1 } + @tailrec def elementType: BType = componentType match { case a: ArrayBType => a.elementType case t => t } } - final case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType + final case class MethodBType(argumentTypes: Array[BType], returnType: BType) extends BType + + object BTypeExporter extends AutoCloseable { + private[this] val builderTL: ThreadLocal[StringBuilder] = new ThreadLocal[StringBuilder](){ + override protected def initialValue: StringBuilder = new StringBuilder(64) + } + + final def btypeToString(btype: BType): String = { + val builder = builderTL.get() + builder.setLength(0) + appendBType(builder, btype) + builder.toString + } + + final def appendBType(builder: StringBuilder, btype: BType): Unit = btype match { + case p: PrimitiveBType => builder.append(p.desc) + case ClassBType(internalName) => builder.append('L').append(internalName).append(';') + case ArrayBType(component) => builder.append('['); appendBType(builder, component) + case MethodBType(args, res) => + builder.append('(') + args.foreach(appendBType(builder, _)) + builder.append(')') + appendBType(builder, res) + } + def close(): Unit = { + // This will eagerly remove the thread local from the calling thread's ThreadLocalMap. It won't + // do the same for other threads used by `-Ybackend-parallelism=N`, but in practice this doesn't + // matter as that thread pool is shutdown at the end of compilation. + builderTL.remove() + } + } /* Some definitions that are required for the implementation of BTypes. They are abstract because * initializing them requires information from types / symbols, which is not accessible here in @@ -1104,17 +1137,21 @@ object BTypes { */ final case class InlineInfo(isEffectivelyFinal: Boolean, sam: Option[String], - methodInfos: Map[(String, String), MethodInlineInfo], + methodInfos: collection.SortedMap[(String, String), MethodInlineInfo], warning: Option[ClassInlineInfoWarning]) { lazy val methodInfosSorted: IndexedSeq[((String, String), MethodInlineInfo)] = { val result = new Array[((String, String), MethodInlineInfo)](methodInfos.size) - methodInfos.copyToArray(result) + var i = 0 + methodInfos.foreachEntry { (ownerAndName, info) => + result(i) = (ownerAndName, info) + i += 1 + } scala.util.Sorting.quickSort(result)(Ordering.by(_._1)) - result + unsafeWrapArray(result) } } - val EmptyInlineInfo = InlineInfo(false, None, Map.empty, None) + val EmptyInlineInfo = InlineInfo(isEffectivelyFinal = false, sam = None, methodInfos = SortedMap.empty, warning = None) /** * Metadata about a method, used by the inliner. @@ -1123,19 +1160,19 @@ object BTypes { * @param annotatedInline True if the method is annotated `@inline` * @param annotatedNoInline True if the method is annotated `@noinline` */ - final case class MethodInlineInfo(effectivelyFinal: Boolean, - annotatedInline: Boolean, - annotatedNoInline: Boolean) + final case class MethodInlineInfo(effectivelyFinal: Boolean = false, + annotatedInline: Boolean = false, + annotatedNoInline: Boolean = false) // no static way (without symbol table instance) to get to nme.ScalaATTR / ScalaSignatureATTR val ScalaAttributeName = "Scala" val ScalaSigAttributeName = "ScalaSig" // when inlining, local variable names of the callee are prefixed with the name of the callee method - val InlinedLocalVariablePrefixMaxLenght = 128 + val InlinedLocalVariablePrefixMaxLength = 128 } -object FlatConcurrentHashMap { - import collection.JavaConverters._ - def empty[K,V]: concurrent.Map[K,V] = - new java.util.concurrent.ConcurrentHashMap[K,V].asScala + +final class ClearableJConcurrentHashMap[K, V] extends scala.collection.mutable.Clearable { + val map = new java.util.concurrent.ConcurrentHashMap[K,V] + override def clear(): Unit = map.clear() } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index a49cd9e7ec65..ebc7c99d27bc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,9 @@ package scala.tools.nsc.backend.jvm -import scala.annotation.switch -import scala.collection.JavaConverters._ +import scala.annotation.{switch, unused} +import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.tools.asm.Opcodes import scala.tools.asm.tree.{ClassNode, InnerClassNode} import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, InternalName, MethodInlineInfo} @@ -38,7 +39,13 @@ abstract class BTypesFromClassfile { * * This method supports both descriptors and internal names. */ - def bTypeForDescriptorOrInternalNameFromClassfile(desc: String): BType = (desc(0): @switch) match { + def bTypeForDescriptorOrInternalNameFromClassfile(descOrIntN: String): BType = (descOrIntN(0): @switch) match { + case '[' => ArrayBType(bTypeForDescriptorFromClassfile(descOrIntN.substring(1))) + case 'L' if descOrIntN.last == ';' => bTypeForDescriptorFromClassfile(descOrIntN) + case _ => classBTypeFromParsedClassfile(descOrIntN) + } + + def bTypeForDescriptorFromClassfile(desc: String): BType = (desc(0): @switch) match { case 'V' => UNIT case 'Z' => BOOL case 'C' => CHAR @@ -48,17 +55,17 @@ abstract class BTypesFromClassfile { case 'F' => FLOAT case 'J' => LONG case 'D' => DOUBLE - case '[' => ArrayBType(bTypeForDescriptorOrInternalNameFromClassfile(desc.substring(1))) + case '[' => ArrayBType(bTypeForDescriptorFromClassfile(desc.substring(1))) case 'L' if desc.last == ';' => classBTypeFromParsedClassfile(desc.substring(1, desc.length - 1)) - case _ => classBTypeFromParsedClassfile(desc) + case _ => throw new IllegalArgumentException(s"Not a descriptor: $desc") } /** - * Parse the classfile for `internalName` and construct the [[ClassBType]]. If the classfile cannot + * Parse the classfile for `internalName` and construct the [[BTypes.ClassBType]]. If the classfile cannot * be found in the `byteCodeRepository`, the `info` of the resulting ClassBType is undefined. */ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { - ClassBType(internalName, fromSymbol = false) { res: ClassBType => + ClassBType(internalName, internalName, fromSymbol = false) { (res: ClassBType, internalName) => byteCodeRepository.classNode(internalName) match { case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) case Right(c) => computeClassInfoFromClassNode(c, res) @@ -67,15 +74,15 @@ abstract class BTypesFromClassfile { } /** - * Construct the [[ClassBType]] for a parsed classfile. + * Construct the [[BTypes.ClassBType]] for a parsed classfile. */ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { - ClassBType(classNode.name, fromSymbol = false) { res: ClassBType => + ClassBType(classNode.name, classNode, fromSymbol = false) { (res: ClassBType, classNode) => computeClassInfoFromClassNode(classNode, res) } } - private def computeClassInfoFromClassNode(classNode: ClassNode, classBType: ClassBType): Right[Nothing, ClassInfo] = { + private def computeClassInfoFromClassNode(classNode: ClassNode, @unused classBType: ClassBType): Right[Nothing, ClassInfo] = { val superClass = classNode.superName match { case null => assert(classNode.name == ObjectRef.internalName, s"class with missing super type: ${classNode.name}") @@ -86,7 +93,7 @@ abstract class BTypesFromClassfile { val flags = classNode.access - /** + /* * Find all nested classes of classNode. The innerClasses attribute contains all nested classes * that are declared inside classNode or used in the bytecode of classNode. So some of them are * nested in some other class than classNode, and we need to filter them. @@ -105,9 +112,9 @@ abstract class BTypesFromClassfile { }) } - def nestedClasses: List[ClassBType] = classNode.innerClasses.asScala.collect({ + def nestedClasses: List[ClassBType] = classNode.innerClasses.asScala.iterator.collect({ case i if nestedInCurrentClass(i) => classBTypeFromParsedClassfile(i.name) - })(collection.breakOut) + }).toList // if classNode is a nested class, it has an innerClass attribute for itself. in this // case we build the NestedInfo. @@ -129,7 +136,7 @@ abstract class BTypesFromClassfile { val inlineInfo = inlineInfoFromClassfile(classNode) - val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut) + val interfaces: List[ClassBType] = classNode.interfaces.asScala.iterator.map(classBTypeFromParsedClassfile).toList Right(ClassInfo(superClass, interfaces, flags, Lazy.withoutLock(nestedClasses), Lazy.withoutLock(nestedInfo), inlineInfo)) } @@ -156,13 +163,15 @@ abstract class BTypesFromClassfile { // require special handling. Excluding is OK because they are never inlined. // Here we are parsing from a classfile and we don't need to do anything special. Many of these // primitives don't even exist, for example Any.isInstanceOf. - val methodInfos:Map[(String, String),MethodInlineInfo] = classNode.methods.asScala.map(methodNode => { + val methodInfos = new mutable.TreeMap[(String, String), MethodInlineInfo]() + classNode.methods.forEach(methodNode => { val info = MethodInlineInfo( effectivelyFinal = BytecodeUtils.isFinalMethod(methodNode), annotatedInline = false, annotatedNoInline = false) - ((methodNode.name, methodNode.desc), info) - })(scala.collection.breakOut) + methodInfos((methodNode.name, methodNode.desc)) = info + }) + InlineInfo( isEffectivelyFinal = BytecodeUtils.isFinalClass(classNode), sam = inlinerHeuristics.javaSam(classNode.name), diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index a49c8604bc22..7cce66038f97 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -91,9 +91,9 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") + // note: classSym can be scala.Array, see https://github.com/scala/bug/issues/12225#issuecomment-729687859 if (global.settings.isDebug) { - // OPT these assertions have too much performance overhead to run unconditionally - assertClassNotArrayNotPrimitive(classSym) + // OPT this assertion has too much performance overhead to run unconditionally assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") } @@ -103,14 +103,18 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val internalName = classSym.javaBinaryNameString // The new ClassBType is added to the map via its apply, before we set its info. This // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. - ClassBType(internalName, fromSymbol = true) { res:ClassBType => - if (completeSilentlyAndCheckErroneous(classSym)) - Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) - else computeClassInfo(classSym, res) - } + val btype = ClassBType.apply(internalName, classSym, fromSymbol = true)(classBTypeFromSymbolInit) + if (currentRun.compiles(classSym)) + assert(btype.fromSymbol, s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") + btype } } + private val classBTypeFromSymbolInit = (res: ClassBType, classSym: Symbol) => + if (completeSilentlyAndCheckErroneous(classSym)) + Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) + else computeClassInfo(classSym, res) + /** * Builds a [[MethodBType]] for a method symbol. */ @@ -126,22 +130,26 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val resultType: BType = if (isConstructor) UNIT else typeToBType(tpe.resultType) - MethodBType(tpe.paramTypes map typeToBType, resultType) + val params = tpe.params + // OPT allocation hotspot + val paramBTypes = BType.newArray(params.length) + mapToArray(params, paramBTypes, 0)(param => typeToBType(param.tpe)) + MethodBType(paramBTypes, resultType) } - def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t match { - case Constant(mt: Type) => + def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t.value match { + case mt: Type => transformedType(mt) match { case mt1: MethodType => methodBTypeFromMethodType(mt1, isConstructor = false).toASMType - case t => - typeToBType(t).toASMType + case transformed => + typeToBType(transformed).toASMType } - case c @ Constant(sym: Symbol) if sym.owner.isJavaDefined && sym.isStaticMember => staticHandleFromSymbol(sym) - case c @ Constant(sym: Symbol) => handleFromMethodSymbol(sym) - case c @ Constant(value: String) => value - case c @ Constant(value) if c.isNonUnitAnyVal => c.value.asInstanceOf[AnyRef] - case _ => reporter.error(pos, "Unable to convert static argument of ApplyDynamic into a classfile constant: " + t); null + case sym: Symbol if sym.owner.isJavaDefined && sym.isStaticMember => staticHandleFromSymbol(sym) + case sym: Symbol => handleFromMethodSymbol(sym) + case value: String => value + case value if t.isNonUnitAnyVal => value.asInstanceOf[AnyRef] + case _ => reporter.error(pos, s"Unable to convert static argument of ApplyDynamic into a classfile constant: $t"); null } def staticHandleFromSymbol(sym: Symbol): asm.Handle = { @@ -179,16 +187,19 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { final def typeToBType(t: Type): BType = { import definitions.ArrayClass - /** + /* * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. */ def primitiveOrClassToBType(sym: Symbol): BType = { assertClassNotArray(sym) - primitiveTypeToBType.getOrElse(sym, classBTypeFromSymbol(sym)) + primitiveTypeToBType.getOrElse(sym, null) match { + case null => classBTypeFromSymbol(sym) + case res => res + } } - /** + /* * When compiling Array.scala, the type parameter T is not erased and shows up in method * signatures, e.g. `def apply(i: Int): T`. A TypeRef for T is replaced by ObjectRef. */ @@ -222,8 +233,9 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { case SingleType(_, sym) => primitiveOrClassToBType(sym) case ConstantType(_) => typeToBType(t.underlying) case RefinedType(parents, _) => parents.map(typeToBType(_).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b).get) - case AnnotatedType(_, t) => typeToBType(t) - case ExistentialType(_, t) => typeToBType(t) + case AnnotatedType(_, at) => typeToBType(at) + case ExistentialType(_, et) => typeToBType(et) + case x => throw new MatchError(x) } } } @@ -233,32 +245,13 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(sym != definitions.ArrayClass || isCompilingArray, sym) } - def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { - assertClassNotArray(sym) - assert(!primitiveTypeToBType.contains(sym) || isCompilingPrimitive, sym) - } - def implementedInterfaces(classSym: Symbol): List[Symbol] = { - // Additional interface parents based on annotations and other cues - def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match { - case RemoteAttr => Some(RemoteInterfaceClass.tpe) - case _ => None - } - - // scala/bug#9393: java annotations are interfaces, but the classfile / java source parsers make them look like classes. - def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait || sym.hasJavaAnnotationFlag - val classParents = { - val parents = classSym.info.parents - // scala/bug#9393: the classfile / java source parsers add Annotation and ClassfileAnnotation to the - // parents of a java annotations. undo this for the backend (where we need classfile-level information). - if (classSym.hasJavaAnnotationFlag) parents.filterNot(c => c.typeSymbol == ClassfileAnnotationClass || c.typeSymbol == AnnotationClass) - else parents - } + def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait - val allParents = classParents ++ classSym.annotations.flatMap(newParentForAnnotation) + val classParents = classSym.info.parents - val minimizedParents = if (classSym.isJavaDefined) allParents else erasure.minimizeParents(classSym, allParents) + val minimizedParents = if (classSym.isJavaDefined) classParents else erasure.minimizeParents(classSym, classParents) // We keep the superClass when computing minimizeParents to eliminate more interfaces. // Example: T can be eliminated from D // trait T @@ -283,17 +276,17 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { * * Specialized classes are always considered top-level, see comment in BTypes. */ - private def memberClassesForInnerClassTable(classSymbol: Symbol): List[Symbol] = classSymbol.info.decls.collect({ + private def memberClassesForInnerClassTable(classSymbol: Symbol): List[Symbol] = List.from(classSymbol.info.decls.iterator.collect({ case sym if sym.isClass && !considerAsTopLevelImplementationArtifact(sym) => sym case sym if sym.isModule && !considerAsTopLevelImplementationArtifact(sym) => val r = exitingPickler(sym.moduleClass) assert(r != NoSymbol, sym.fullLocationString) r - })(collection.breakOut) + })) private def computeClassInfo(classSym: Symbol, classBType: ClassBType): Right[Nothing, ClassInfo] = { - /** + /* * Reconstruct the classfile flags from a Java defined class symbol. * * The implementation of this method is slightly different from `javaFlags` in BTypesFromSymbols. @@ -312,24 +305,16 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def javaClassfileFlags(classSym: Symbol): Int = { assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}") import asm.Opcodes._ - def enumFlags = ACC_ENUM | { - // Java enums have the `ACC_ABSTRACT` flag if they have a deferred method. - // We cannot trust `hasAbstractFlag`: the ClassfileParser adds `ABSTRACT` and `SEALED` to all - // Java enums for exhaustiveness checking. - val hasAbstractMethod = classSym.info.decls.exists(s => s.isMethod && s.isDeferred) - if (hasAbstractMethod) ACC_ABSTRACT else 0 - } - // scala/bug#9393: the classfile / java source parser make java annotation symbols look like classes. - // here we recover the actual classfile flags. - ( if (classSym.hasJavaAnnotationFlag) ACC_ANNOTATION | ACC_INTERFACE | ACC_ABSTRACT else 0) | - ( if (classSym.isPublic) ACC_PUBLIC else 0) | - ( if (classSym.isFinal) ACC_FINAL else 0) | + // scala/bug#9393: the classfile / java source parser make java annotation symbols look like classes. + // here we recover the actual classfile flags. + (if (classSym.hasJavaAnnotationFlag) ACC_ANNOTATION | ACC_INTERFACE | ACC_ABSTRACT else 0) | + (if (classSym.isPublic) ACC_PUBLIC else 0) | + (if (classSym.isFinal) ACC_FINAL else 0) | // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces.) - ( if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER) | - // for Java enums, we cannot trust `hasAbstractFlag` (see comment in enumFlags)) - ( if (!classSym.hasJavaEnumFlag && classSym.hasAbstractFlag) ACC_ABSTRACT else 0) | - ( if (classSym.isArtifact) ACC_SYNTHETIC else 0) | - ( if (classSym.hasJavaEnumFlag) enumFlags else 0) + (if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER) | + (if (classSym.hasAbstractFlag) ACC_ABSTRACT else 0) | + (if (classSym.isArtifact) ACC_SYNTHETIC else 0) | + (if (classSym.hasJavaEnumFlag) ACC_ENUM else 0) } // Check for hasAnnotationFlag for scala/bug#9393: the classfile / java source parsers add @@ -433,21 +418,23 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { nestedClasses ++ companionModuleMembers } - /** + /* * For nested java classes, the scala compiler creates both a class and a module (and therefore * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols * for A contain both the class B and the module class B. * Here we get rid of the module class B, making sure that the class B is present. */ - def nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { - if (s.isJavaDefined && s.isModuleClass) { - // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that - // returns NoSymbol, so it doesn't work. - val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) - assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") - false - } else true - }) + def nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter { s => + val ok = !(s.isJavaDefined && s.isModuleClass) && !s.hasPackageFlag + if (!ok) + if (!s.hasPackageFlag) { + // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that + // returns NoSymbol, so it doesn't work. + val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) + assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") + } + ok + } val shouldBeLazy = classSym.isJavaDefined || !currentRun.compiles(classSym) @@ -525,10 +512,10 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // phase travel necessary: after flatten, the name includes the name of outer classes. // if some outer name contains $anon, a non-anon class is considered anon. if (exitingPickler(innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction)) None - else Some(innerClassSym.rawname + innerClassSym.moduleSuffix) // moduleSuffix for module classes + else Some(s"${innerClassSym.rawname}${innerClassSym.moduleSuffix}") // moduleSuffix for module classes } - Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass, exitingTyper(innerClassSym.isPrivate))) + Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass, enteringTyper(innerClassSym.isPrivate))) } /** @@ -564,7 +551,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { } /** - * Build the [[InlineInfo]] for a class symbol. + * Build the [[scala.tools.nsc.backend.jvm.BTypes.InlineInfo]] for a class symbol. */ def buildInlineInfoFromClassSymbol(classSym: Symbol): InlineInfo = { val isEffectivelyFinal = classSym.isEffectivelyFinal @@ -579,7 +566,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // empty parameter list in uncurry and would therefore be picked as SAM. // Similarly, the fields phases adds abstract trait setters, which should not be considered // abstract for SAMs (they do disqualify the SAM from LMF treatment, - // but an anonymous subclasss can be spun up by scalac after making just the single abstract method concrete) + // but an anonymous subclass can be spun up by scalac after making just the single abstract method concrete) val samSym = exitingPickler(definitions.samOf(classSym.tpe)) if (samSym == NoSymbol) None else Some(samSym.javaSimpleName.toString + methodBTypeFromSymbol(samSym).descriptor) @@ -601,13 +588,13 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]]. - val methodInlineInfos = methods.flatMap({ - case methodSym => + val methodInlineInfos = new collection.mutable.TreeMap[(String, String), MethodInlineInfo]() + methods.foreach { + methodSym => if (completeSilentlyAndCheckErroneous(methodSym)) { // Happens due to scala/bug#9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler. if (!classSym.isJavaDefined) devWarning("scala/bug#9111 should only be possible for Java classes") warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName)) - Nil } else { val name = methodSym.javaSimpleName.toString // same as in genDefDef val signature = (name, methodBTypeFromSymbol(methodSym).descriptor) @@ -630,20 +617,20 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val selfParam = methodSym.newSyntheticValueParam(methodSym.owner.typeConstructor, nme.SELF) val staticMethodType = methodSym.info match { case mt@MethodType(params, res) => copyMethodType(mt, selfParam :: params, res) + case x => throw new MatchError(x) } val staticMethodSignature = (staticName, methodBTypeFromMethodType(staticMethodType, isConstructor = false).descriptor) val staticMethodInfo = MethodInlineInfo( effectivelyFinal = true, annotatedInline = info.annotatedInline, annotatedNoInline = info.annotatedNoInline) - if (methodSym.isMixinConstructor) - (staticMethodSignature, staticMethodInfo) :: Nil - else - (signature, info) :: (staticMethodSignature, staticMethodInfo) :: Nil + methodInlineInfos(staticMethodSignature) = staticMethodInfo + if (!methodSym.isMixinConstructor) + methodInlineInfos(signature) = info } else - (signature, info) :: Nil + methodInlineInfos(signature) = info } - }).toMap + } InlineInfo(isEffectivelyFinal, sam, methodInlineInfos, warning) } @@ -656,7 +643,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = { assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING) - ClassBType(internalName, fromSymbol = true) { c: ClassBType => + ClassBType(internalName, moduleClassSym, fromSymbol = true) { (_: ClassBType, moduleClassSym) => val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) Right(ClassInfo( @@ -669,19 +656,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { } } - def beanInfoClassClassBType(mainClass: Symbol): ClassBType = { - val internalName = mainClass.javaBinaryNameString + "BeanInfo" - ClassBType(internalName, fromSymbol = true) { c: ClassBType => - Right(ClassInfo( - superClass = Some(sbScalaBeanInfoRef), - interfaces = Nil, - flags = javaFlags(mainClass), - nestedClasses = Lazy.eagerNil, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo)) - } - } - /** * True for module classes of package level objects. The backend will generate a mirror class for * such objects. @@ -700,9 +674,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { isOriginallyStaticOwner(sym.originalOwner) // isStaticModuleClass is a source-level property, see comment on isOriginallyStaticOwner } - // legacy, to be removed when the @remote annotation gets removed - final def isRemote(s: Symbol) = s hasAnnotation definitions.RemoteAttr - final def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0 /** * Return the Java modifiers for the given symbol. @@ -736,11 +707,12 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // Note that the presence of the `FINAL` flag on a symbol does not correspond 1:1 to emitting // ACC_FINAL in bytecode. // - // Top-level modules are marked ACC_FINAL in bytecode (even without the FINAL flag). Nested - // objects don't get the flag to allow overriding (under -Yoverride-objects, scala/bug#5676). + // Top-level modules are marked ACC_FINAL in bytecode (even without the FINAL flag). + // Currently, nested objects don't get the flag (originally, to allow overriding under the now-removed -Yoverride-objects, scala/bug#5676). + // TODO: give nested objects the ACC_FINAL flag again, since we won't let them be overridden // // For fields, only eager val fields can receive ACC_FINAL. vars or lazy vals can't: - // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 + // Source: https://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 // "Another problem is that the specification allows aggressive // optimization of final fields. Within a thread, it is permissible to // reorder reads of a final field with those modifications of a final diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 26012df1e04f..69ebfc21c0bd 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,16 +36,10 @@ object BackendReporting { def assertionError(message: String): Nothing = throw new AssertionError(message) implicit class RightBiasedEither[A, B](val v: Either[A, B]) extends AnyVal { - def withFilter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match { - case Left(_) => v - case Right(e) => if (f(e)) v else Left(empty) // scalaz.\/ requires an implicit Monoid m to get m.empty - } + def withFilter(f: B => Boolean)(implicit empty: A): Either[A, B] = v.filterOrElse(f, empty) /** Get the value, fail with an assertion if this is an error. */ - def get: B = { - assert(v.isRight, v.left.get) - v.right.get - } + def get: B = v.fold(a => assertionError(s"$a"), identity) /** * Get the right value of an `Either` by throwing a potential error message. Can simplify the @@ -56,10 +50,7 @@ object BackendReporting { * eitherOne.orThrow .... eitherTwo.orThrow ... eitherThree.orThrow * } */ - def orThrow: B = v match { - case Left(m) => throw Invalid(m) - case Right(t) => t - } + def orThrow: B = v.fold(a => throw Invalid(a), identity) } case class Invalid[A](e: A) extends ControlThrowable @@ -73,11 +64,13 @@ object BackendReporting { def emitWarning(settings: CompilerSettings): Boolean } - // Method withFilter in RightBiasedEither requires an implicit empty value. Taking the value here - // in scope allows for-comprehensions that desugar into withFilter calls (for example when using a - // tuple de-constructor). - implicit object emptyOptimizerWarning extends OptimizerWarning { - def emitWarning(settings: CompilerSettings): Boolean = false + object OptimizerWarning { + // Method withFilter in RightBiasedEither requires an implicit empty value. Taking the value here + // in scope allows for-comprehensions that desugar into withFilter calls (for example when using a + // tuple de-constructor). + implicit val emptyOptimizerWarning: OptimizerWarning = new OptimizerWarning { + def emitWarning(settings: CompilerSettings): Boolean = false + } } sealed trait MissingBytecodeWarning extends OptimizerWarning { @@ -116,11 +109,11 @@ object BackendReporting { } } - case class ClassNotFound(internalName: InternalName, definedInJavaSource: Boolean) extends MissingBytecodeWarning - case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning { + final case class ClassNotFound(internalName: InternalName, definedInJavaSource: Boolean) extends MissingBytecodeWarning + final case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning { def isArrayMethod = ownerInternalNameOrArrayDescriptor.charAt(0) == '[' } - case class FieldNotFound(name: String, descriptor: String, ownerInternalName: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning + final case class FieldNotFound(name: String, descriptor: String, ownerInternalName: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning sealed trait NoClassBTypeInfo extends OptimizerWarning { override def toString = this match { @@ -137,8 +130,8 @@ object BackendReporting { } } - case class NoClassBTypeInfoMissingBytecode(cause: MissingBytecodeWarning) extends NoClassBTypeInfo - case class NoClassBTypeInfoClassSymbolInfoFailedSI9111(classFullName: String) extends NoClassBTypeInfo + final case class NoClassBTypeInfoMissingBytecode(cause: MissingBytecodeWarning) extends NoClassBTypeInfo + final case class NoClassBTypeInfoClassSymbolInfoFailedSI9111(classFullName: String) extends NoClassBTypeInfo /** * Used in the CallGraph for nodes where an issue occurred determining the callee information. @@ -172,9 +165,9 @@ object BackendReporting { } } - case class MethodInlineInfoIncomplete(declarationClass: InternalName, name: String, descriptor: String, cause: ClassInlineInfoWarning) extends CalleeInfoWarning - case class MethodInlineInfoMissing(declarationClass: InternalName, name: String, descriptor: String, cause: Option[ClassInlineInfoWarning]) extends CalleeInfoWarning - case class MethodInlineInfoError(declarationClass: InternalName, name: String, descriptor: String, cause: NoClassBTypeInfo) extends CalleeInfoWarning + final case class MethodInlineInfoIncomplete(declarationClass: InternalName, name: String, descriptor: String, cause: ClassInlineInfoWarning) extends CalleeInfoWarning + final case class MethodInlineInfoMissing(declarationClass: InternalName, name: String, descriptor: String, cause: Option[ClassInlineInfoWarning]) extends CalleeInfoWarning + final case class MethodInlineInfoError(declarationClass: InternalName, name: String, descriptor: String, cause: NoClassBTypeInfo) extends CalleeInfoWarning sealed trait CannotInlineWarning extends OptimizerWarning { def calleeDeclarationClass: InternalName @@ -192,21 +185,27 @@ object BackendReporting { val reason = this match { case CalleeNotFinal(_, _, _, _) => s"The method is not final and may be overridden." - case IllegalAccessInstruction(_, _, _, _, callsiteClass, instruction) => - s"The callee $calleeMethodSig contains the instruction ${AsmUtils.textify(instruction)}" + + case IllegalAccessInstructions(_, _, _, _, callsiteClass, instructions) => + val suffix = if (instructions.lengthCompare(1) > 0) "s" else "" + s"The callee $calleeMethodSig contains the instruction$suffix ${instructions.map(AsmUtils.textify).mkString(", ")}" + s"\nthat would cause an IllegalAccessError when inlined into class $callsiteClass." case IllegalAccessCheckFailed(_, _, _, _, callsiteClass, instruction, cause) => - s"Failed to check if $calleeMethodSig can be safely inlined to $callsiteClass without causing an IllegalAccessError. Checking instruction ${AsmUtils.textify(instruction)} failed:\n" + cause + sm"""|Failed to check if $calleeMethodSig can be safely inlined to $callsiteClass without causing an IllegalAccessError. + |Checking failed for instruction ${AsmUtils.textify(instruction)}: + |$cause""" case MethodWithHandlerCalledOnNonEmptyStack(_, _, _, _, callsiteClass, callsiteName, callsiteDesc) => - s"""The operand stack at the callsite in ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} contains more values than the - |arguments expected by the callee $calleeMethodSig. These values would be discarded - |when entering an exception handler declared in the inlined method.""".stripMargin + sm"""|The operand stack at the callsite in ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} contains more values than the + |arguments expected by the callee $calleeMethodSig. These values would be discarded + |when entering an exception handler declared in the inlined method.""" case SynchronizedMethod(_, _, _, _) => s"Method $calleeMethodSig cannot be inlined because it is synchronized." + case _: NoBytecode => + s"Method $calleeMethodSig cannot be inlined because it does not have any instructions, even though it is not abstract. The class may come from a signature jar file (such as a Bazel 'hjar')." + case StrictfpMismatch(_, _, _, _, callsiteClass, callsiteName, callsiteDesc) => s"""The callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} |does not have the same strictfp mode as the callee $calleeMethodSig. @@ -225,22 +224,23 @@ object BackendReporting { annotatedInline && settings.optWarningEmitAtInlineFailed } } - case class CalleeNotFinal(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning - case class IllegalAccessInstruction(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, - callsiteClass: InternalName, instruction: AbstractInsnNode) extends CannotInlineWarning - case class IllegalAccessCheckFailed(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, + final case class CalleeNotFinal(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning + final case class IllegalAccessInstructions(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, + callsiteClass: InternalName, instructions: List[AbstractInsnNode]) extends CannotInlineWarning + final case class IllegalAccessCheckFailed(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, callsiteClass: InternalName, instruction: AbstractInsnNode, cause: OptimizerWarning) extends CannotInlineWarning - case class MethodWithHandlerCalledOnNonEmptyStack(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, + final case class MethodWithHandlerCalledOnNonEmptyStack(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning - case class SynchronizedMethod(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning - case class StrictfpMismatch(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, + final case class SynchronizedMethod(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning + final case class NoBytecode(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning + final case class StrictfpMismatch(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning case class ResultingMethodTooLarge(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean, callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning // TODO: this should be a subtype of CannotInlineWarning // but at the place where it's created (in findIllegalAccess) we don't have the necessary data (calleeName, calleeDescriptor). - case object UnknownInvokeDynamicInstruction extends OptimizerWarning { + final case object UnknownInvokeDynamicInstruction extends OptimizerWarning { override def toString = "The callee contains an InvokeDynamic instruction with an unknown bootstrap method (not a LambdaMetaFactory)." def emitWarning(settings: CompilerSettings): Boolean = settings.optWarningEmitAnyInlineFailed } @@ -264,8 +264,8 @@ object BackendReporting { s"The closure body invocation cannot be rewritten because the target method is not accessible in class $callsiteClass." } } - case class RewriteClosureAccessCheckFailed(pos: Position, cause: OptimizerWarning) extends RewriteClosureApplyToClosureBodyFailed - case class RewriteClosureIllegalAccess(pos: Position, callsiteClass: InternalName) extends RewriteClosureApplyToClosureBodyFailed + final case class RewriteClosureAccessCheckFailed(pos: Position, cause: OptimizerWarning) extends RewriteClosureApplyToClosureBodyFailed + final case class RewriteClosureIllegalAccess(pos: Position, callsiteClass: InternalName) extends RewriteClosureApplyToClosureBodyFailed /** * Used in the InlineInfo of a ClassBType, when some issue occurred obtaining the inline information. @@ -293,8 +293,8 @@ object BackendReporting { } } - case class NoInlineInfoAttribute(internalName: InternalName) extends ClassInlineInfoWarning - case class ClassSymbolInfoFailureSI9111(classFullName: String) extends ClassInlineInfoWarning - case class ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass: ClassNotFound) extends ClassInlineInfoWarning - case class UnknownScalaInlineInfoVersion(internalName: InternalName, version: Int) extends ClassInlineInfoWarning + final case class NoInlineInfoAttribute(internalName: InternalName) extends ClassInlineInfoWarning + final case class ClassSymbolInfoFailureSI9111(classFullName: String) extends ClassInlineInfoWarning + final case class ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass: ClassNotFound) extends ClassInlineInfoWarning + final case class UnknownScalaInlineInfoVersion(internalName: InternalName, version: Int) extends ClassInlineInfoWarning } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala index 6388a41bd4b1..b0ee4f2e044d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java index 5a4874d7d90e..0159edd7af54 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 82dabf72cdd8..781d528a9762 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package scala.tools.nsc.backend.jvm import java.io.{DataOutputStream, IOException} import java.nio.ByteBuffer import java.nio.channels.{ClosedByInterruptException, FileChannel} -import java.nio.charset.StandardCharsets +import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file._ import java.nio.file.attribute.FileAttribute import java.util @@ -29,6 +29,7 @@ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.plugins.{OutputFileWriter, Plugin} import scala.tools.nsc.util.JarFactory +import scala.util.chaining._ abstract class ClassfileWriters { val postProcessor: PostProcessor @@ -43,11 +44,11 @@ abstract class ClassfileWriters { * * Operations are threadsafe. */ - sealed trait ClassfileWriter extends OutputFileWriter { + sealed trait ClassfileWriter extends OutputFileWriter with AutoCloseable { /** * Write a classfile */ - def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile) + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit /** * Close the writer. Behavior is undefined after a call to `close`. @@ -77,18 +78,20 @@ abstract class ClassfileWriters { val basicClassWriter = settings.outputDirs.getSingleOutput match { case Some(dest) => new SingleClassWriter(FileWriter(global, dest, jarManifestMainClass)) case None => - val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.map(_._2)(scala.collection.breakOut) + val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.iterator.map(_._2).toSet if (distinctOutputs.size == 1) new SingleClassWriter(FileWriter(global, distinctOutputs.head, jarManifestMainClass)) else { val sourceToOutput: Map[AbstractFile, AbstractFile] = global.currentRun.units.map(unit => (unit.source.file, frontendAccess.compilerSettings.outputDirectory(unit.source.file))).toMap - new MultiClassWriter(sourceToOutput, distinctOutputs.map { output: AbstractFile => output -> FileWriter(global, output, jarManifestMainClass) }(scala.collection.breakOut)) + new MultiClassWriter(sourceToOutput, distinctOutputs.iterator.map { output: AbstractFile => output -> FileWriter(global, output, jarManifestMainClass) }.toMap) } } - val withAdditionalFormats = if (settings.Ygenasmp.valueSetByUser.isEmpty && settings.Ydumpclasses.valueSetByUser.isEmpty) basicClassWriter else { - val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => FileWriter(global, new PlainNioFile(getDirectory(dir)), None) } - val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => FileWriter(global, new PlainNioFile(getDirectory(dir)), None) } - new DebugClassWriter(basicClassWriter, asmp, dump) + val withAdditionalFormats = { + def maybeDir(dir: Option[String]): Option[Path] = dir.map(getDirectory).filter(path => Files.exists(path).tap(ok => if (!ok) frontendAccess.backendReporting.error(NoPosition, s"Output dir does not exist: $path"))) + def writer(out: Path) = FileWriter(global, new PlainNioFile(out), None) + val List(asmp, dump) = List(settings.Ygenasmp, settings.Ydumpclasses).map(s => maybeDir(s.valueSetByUser).map(writer)): @unchecked + if (asmp.isEmpty && dump.isEmpty) basicClassWriter + else new DebugClassWriter(basicClassWriter, asmp, dump) } val enableStats = settings.areStatisticsEnabled && settings.YaddBackendThreads.value == 1 @@ -130,7 +133,7 @@ abstract class ClassfileWriters { override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { basic.writeClass(className, bytes, sourceFile) asmp.foreach { writer => - val asmBytes = AsmUtils.textify(AsmUtils.readClass(bytes)).getBytes(StandardCharsets.UTF_8) + val asmBytes = AsmUtils.textify(AsmUtils.readClass(bytes)).getBytes(UTF_8) writer.writeFile(classRelativePath(className, ".asm"), asmBytes) } dump.foreach { writer => @@ -171,32 +174,36 @@ abstract class ClassfileWriters { } object FileWriter { - def apply(global: Global, file: AbstractFile, jarManifestMainClass: Option[String]): FileWriter = { - if (file hasExtension "jar") { + def apply(global: Global, file: AbstractFile, jarManifestMainClass: Option[String]): FileWriter = + if (file.hasExtension("jar")) { val jarCompressionLevel = global.settings.YjarCompressionLevel.value - val jarFactory = Class.forName(global.settings.YjarFactory.value).asSubclass(classOf[JarFactory]).newInstance() + val jarFactory = + Class.forName(global.settings.YjarFactory.value) + .asSubclass(classOf[JarFactory]) + .getDeclaredConstructor().newInstance() new JarEntryWriter(file, jarManifestMainClass, jarCompressionLevel, jarFactory, global.plugins) - } else if (file.isVirtual) { - new VirtualFileWriter(file) - } else if (file.isDirectory) { - new DirEntryWriter(file.file.toPath) - } else { - throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") } - } + else if (file.isVirtual) new VirtualFileWriter(file) + else if (file.isDirectory) new DirEntryWriter(file.file.toPath) + else throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") } private final class JarEntryWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int, jarFactory: JarFactory, plugins: List[Plugin]) extends FileWriter { //keep these imports local - avoid confusion with scala naming - import java.util.jar.Attributes.Name + import java.util.jar.Attributes.Name.{MANIFEST_VERSION, MAIN_CLASS} import java.util.jar.{JarOutputStream, Manifest} val storeOnly = compressionLevel == Deflater.NO_COMPRESSION val jarWriter: JarOutputStream = { - val manifest = new Manifest() - mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } - plugins foreach (_.augmentManifest(file, manifest)) + import scala.util.Properties._ + val manifest = new Manifest + val attrs = manifest.getMainAttributes + attrs.put(MANIFEST_VERSION, "1.0") + attrs.put(ScalaCompilerVersion, versionNumberString) + mainClass.foreach(c => attrs.put(MAIN_CLASS, c)) + plugins.foreach(_.augmentManifest(file, manifest)) + val jar = jarFactory.createJarOutputStream(file, manifest) jar.setLevel(compressionLevel) if (storeOnly) jar.setMethod(ZipOutputStream.STORED) @@ -228,14 +235,22 @@ abstract class ClassfileWriters { } private final class DirEntryWriter(base: Path) extends FileWriter { + import scala.util.Properties.{isWin => isWindows} val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() val noAttributes = Array.empty[FileAttribute[_]] - private val isWindows = scala.util.Properties.isWin + + private def checkName(component: Path): Unit = if (isWindows) { + val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r + val name = component.toString + def warnSpecial(): Unit = frontendAccess.backendReporting.warning(NoPosition, s"path component is special Windows device: ${name}") + specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) + } def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { import java.lang.Boolean.TRUE val parent = filePath.getParent if (!builtPaths.containsKey(parent)) { + parent.iterator.forEachRemaining(checkName) try Files.createDirectories(parent, noAttributes: _*) catch { case e: FileAlreadyExistsException => @@ -250,9 +265,10 @@ abstract class ClassfileWriters { current = current.getParent } } + checkName(filePath.getFileName()) } - // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // the common case is that we are creating a new file, and on MS Windows the create and truncate is expensive // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call // even if the file is new. // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 28ff39917c02..045916faeaab 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,12 +26,10 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { // TODO: do we really need a new instance per run? Is there state that depends on the compiler frontend (symbols, types, settings)? private[this] lazy val mirrorCodeGen: LazyVar[CodeGenImpl.JMirrorBuilder] = perRunLazy(this)(new CodeGenImpl.JMirrorBuilder()) - private[this] lazy val beanInfoCodeGen: LazyVar[CodeGenImpl.JBeanInfoBuilder] = perRunLazy(this)(new CodeGenImpl.JBeanInfoBuilder()) - /** - * Generate ASM ClassNodes for classes found in a compilation unit. The resulting classes are - * passed to the `genBCode.generatedClassHandler`. - */ + * Generate ASM ClassNodes for classes found in a compilation unit. The resulting classes are + * passed to the `genBCode.generatedClassHandler`. + */ def genUnit(unit: CompilationUnit): Unit = { val generatedClasses = ListBuffer.empty[GeneratedClass] @@ -49,10 +47,6 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { else log(s"No mirror class for module with linked class: ${sym.fullName}") } - if (sym hasAnnotation coreBTypes.BeanInfoAttr) { - val beanClassNode = genBeanInfoClass(cd, unit) - generatedClasses += GeneratedClass(beanClassNode, fullSymbolName, position, isArtifact = true) - } } catch { case ex: InterruptedException => throw ex case ex: Throwable => @@ -64,6 +58,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { case EmptyTree => () case PackageDef(_, stats) => stats foreach genClassDefs case cd: ClassDef => frontendAccess.frontendSynch(genClassDef(cd)) + case x => throw new MatchError(x) } statistics.timed(statistics.bcodeGenStat) { @@ -86,10 +81,6 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { mirrorCodeGen.get.genMirrorClass(classSym, unit) } - def genBeanInfoClass(cd: ClassDef, unit: CompilationUnit): ClassNode = { - val sym = cd.symbol - beanInfoCodeGen.get.genBeanInfoClass(sym, unit, CodeGenImpl.fieldSymbols(sym), CodeGenImpl.methodSymbols(cd)) - } private def addSbtIClassShim(cd: ClassDef): Unit = { // shim for SBT, see https://github.com/sbt/sbt/issues/2076 diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index 17d548af5cdd..95c50f184001 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -175,7 +175,7 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { private[this] lazy val _jlCloneableRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(JavaCloneableClass)) // java/lang/Cloneable def jiSerializableRef : ClassBType = _jiSerializableRef.get - private[this] lazy val _jiSerializableRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(JavaSerializableClass)) // java/io/Serializable + private[this] lazy val _jiSerializableRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(SerializableClass)) // java/io/Serializable def jlClassCastExceptionRef : ClassBType = _jlClassCastExceptionRef.get private[this] lazy val _jlClassCastExceptionRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(ClassCastExceptionClass)) // java/lang/ClassCastException @@ -189,9 +189,6 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { def juHashMapRef : ClassBType = _juHashMapRef.get private[this] lazy val _juHashMapRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(JavaUtilHashMap)) // java/util/HashMap - def sbScalaBeanInfoRef : ClassBType = _sbScalaBeanInfoRef.get - private[this] lazy val _sbScalaBeanInfoRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(requiredClass[scala.beans.ScalaBeanInfo])) - def jliSerializedLambdaRef : ClassBType = _jliSerializedLambdaRef.get private[this] lazy val _jliSerializedLambdaRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda])) @@ -213,6 +210,9 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { def jliLambdaMetafactoryRef : ClassBType = _jliLambdaMetafactoryRef.get private[this] lazy val _jliLambdaMetafactoryRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory])) + def jliStringConcatFactoryRef : ClassBType = _jliStringConcatFactoryRef.get + private[this] lazy val _jliStringConcatFactoryRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(getRequiredClass("java.lang.invoke.StringConcatFactory"))) + def srBoxesRunTimeRef : ClassBType = _srBoxesRunTimeRef.get private[this] lazy val _srBoxesRunTimeRef : LazyVar[ClassBType] = runLazy(classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime])) @@ -236,20 +236,20 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { } private def srBoxesRuntimeMethods(getName: (String, String) => String): Map[BType, MethodNameAndType] = { - ScalaValueClassesNoUnit.map(primitive => { + Map.from(ScalaValueClassesNoUnit.iterator.map(primitive => { val bType = primitiveTypeToBType(primitive) val name = newTermName(getName(primitive.name.toString, boxedClass(primitive).name.toString)) (bType, methodNameAndType(BoxesRunTimeClass, name)) - })(collection.breakOut) + })) } // Z -> MethodNameAndType(boxToBoolean,(Z)Ljava/lang/Boolean;) def srBoxesRuntimeBoxToMethods: Map[BType, MethodNameAndType] = _srBoxesRuntimeBoxToMethods.get - private[this] lazy val _srBoxesRuntimeBoxToMethods: LazyVar[Map[BType, MethodNameAndType]] = runLazy(srBoxesRuntimeMethods((primitive, boxed) => "boxTo" + boxed)) + private[this] lazy val _srBoxesRuntimeBoxToMethods: LazyVar[Map[BType, MethodNameAndType]] = runLazy(srBoxesRuntimeMethods((_, boxed) => "boxTo" + boxed)) // Z -> MethodNameAndType(unboxToBoolean,(Ljava/lang/Object;)Z) def srBoxesRuntimeUnboxToMethods: Map[BType, MethodNameAndType] = _srBoxesRuntimeUnboxToMethods.get - private[this] lazy val _srBoxesRuntimeUnboxToMethods: LazyVar[Map[BType, MethodNameAndType]] = runLazy(srBoxesRuntimeMethods((primitive, boxed) => "unboxTo" + primitive)) + private[this] lazy val _srBoxesRuntimeUnboxToMethods: LazyVar[Map[BType, MethodNameAndType]] = runLazy(srBoxesRuntimeMethods((primitive, _) => "unboxTo" + primitive)) private def singleParamOfClass(cls: Symbol) = (s: Symbol) => s.paramss match { case List(List(param)) => param.info.typeSymbol == cls @@ -259,29 +259,29 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { // java/lang/Boolean -> MethodNameAndType(valueOf,(Z)Ljava/lang/Boolean;) def javaBoxMethods: Map[InternalName, MethodNameAndType] = _javaBoxMethods.get private[this] lazy val _javaBoxMethods: LazyVar[Map[InternalName, MethodNameAndType]] = runLazy { - ScalaValueClassesNoUnit.map(primitive => { + Map.from(ScalaValueClassesNoUnit.iterator.map(primitive => { val boxed = boxedClass(primitive) val method = methodNameAndType(boxed, newTermName("valueOf"), static = true, filterOverload = singleParamOfClass(primitive)) (classBTypeFromSymbol(boxed).internalName, method) - })(collection.breakOut) + })) } // java/lang/Boolean -> MethodNameAndType(booleanValue,()Z) def javaUnboxMethods: Map[InternalName, MethodNameAndType] = _javaUnboxMethods.get private[this] lazy val _javaUnboxMethods: LazyVar[Map[InternalName, MethodNameAndType]] = runLazy { - ScalaValueClassesNoUnit.map(primitive => { + Map.from(ScalaValueClassesNoUnit.iterator.map(primitive => { val boxed = boxedClass(primitive) val name = primitive.name.toString.toLowerCase + "Value" (classBTypeFromSymbol(boxed).internalName, methodNameAndType(boxed, newTermName(name))) - })(collection.breakOut) + })) } private def predefBoxingMethods(getName: (String, String) => String): Map[String, MethodBType] = { - ScalaValueClassesNoUnit.map(primitive => { + Map.from(ScalaValueClassesNoUnit.iterator.map(primitive => { val boxed = boxedClass(primitive) val name = getName(primitive.name.toString, boxed.name.toString) (name, methodNameAndType(PredefModule.moduleClass, newTermName(name)).methodType) - })(collection.breakOut) + })) } // boolean2Boolean -> (Z)Ljava/lang/Boolean; @@ -293,8 +293,8 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { private[this] lazy val _predefAutoUnboxMethods: LazyVar[Map[String, MethodBType]] = runLazy(predefBoxingMethods((primitive, boxed) => boxed + "2" + primitive.toLowerCase)) private def staticRefMethods(name: Name): Map[InternalName, MethodNameAndType] = { - allRefClasses.map(refClass => - (classBTypeFromSymbol(refClass).internalName, methodNameAndType(refClass, name, static = true)))(collection.breakOut) + Map.from(allRefClasses.iterator.map(refClass => + (classBTypeFromSymbol(refClass).internalName, methodNameAndType(refClass, name, static = true)))) } // scala/runtime/BooleanRef -> MethodNameAndType(create,(Z)Lscala/runtime/BooleanRef;) @@ -308,14 +308,14 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { // java/lang/Boolean -> MethodNameAndType(,(Z)V) def primitiveBoxConstructors: Map[InternalName, MethodNameAndType] = _primitiveBoxConstructors.get private[this] lazy val _primitiveBoxConstructors: LazyVar[Map[InternalName, MethodNameAndType]] = runLazy { - ScalaValueClassesNoUnit.map(primitive => { + Map.from(ScalaValueClassesNoUnit.iterator.map(primitive => { val boxed = boxedClass(primitive) (classBTypeFromSymbol(boxed).internalName, methodNameAndType(boxed, nme.CONSTRUCTOR, filterOverload = singleParamOfClass(primitive))) - })(collection.breakOut) + })) } private def nonOverloadedConstructors(classes: Iterable[Symbol]): Map[InternalName, MethodNameAndType] = { - classes.map(cls => (classBTypeFromSymbol(cls).internalName, methodNameAndType(cls, nme.CONSTRUCTOR)))(collection.breakOut) + Map.from(classes.iterator.map(cls => (classBTypeFromSymbol(cls).internalName, methodNameAndType(cls, nme.CONSTRUCTOR)))) } // scala/runtime/BooleanRef -> MethodNameAndType(,(Z)V) @@ -363,9 +363,6 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { def AndroidCreatorClass: Symbol = _AndroidCreatorClass.get private[this] lazy val _AndroidCreatorClass: LazyVar[Symbol] = runLazy(getClassIfDefined("android.os.Parcelable$Creator")) - def BeanInfoAttr: Symbol = _BeanInfoAttr.get - private[this] lazy val _BeanInfoAttr: LazyVar[Symbol] = runLazy(requiredClass[scala.beans.BeanInfo]) - /* The Object => String overload. */ def String_valueOf: Symbol = _String_valueOf.get private[this] lazy val _String_valueOf: LazyVar[Symbol] = runLazy { @@ -380,7 +377,7 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { new Handle(Opcodes.H_INVOKESTATIC, coreBTypes.jliLambdaMetafactoryRef.internalName, sn.Metafactory.toString, MethodBType( - List( + Array( coreBTypes.jliMethodHandlesLookupRef, coreBTypes.StringRef, coreBTypes.jliMethodTypeRef, @@ -397,7 +394,7 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { new Handle(Opcodes.H_INVOKESTATIC, coreBTypes.jliLambdaMetafactoryRef.internalName, sn.AltMetafactory.toString, MethodBType( - List( + Array( coreBTypes.jliMethodHandlesLookupRef, coreBTypes.StringRef, coreBTypes.jliMethodTypeRef, @@ -412,7 +409,7 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { new Handle(Opcodes.H_INVOKESTATIC, coreBTypes.srLambdaDeserialize.internalName, sn.Bootstrap.toString, MethodBType( - List( + Array( coreBTypes.jliMethodHandlesLookupRef, coreBTypes.StringRef, coreBTypes.jliMethodTypeRef, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 1e8fc8dc45c9..62e8c75c106a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc package backend package jvm +import scala.annotation.nowarn import scala.tools.asm.Opcodes /** @@ -60,6 +61,7 @@ abstract class GenBCode extends SubComponent { val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { val bTypes: self.bTypes.type = self.bTypes } with PostProcessor + @nowarn("cat=lint-inaccessible") var generatedClassHandler: GeneratedClassHandler = _ val phaseName = "jvm" @@ -69,8 +71,6 @@ abstract class GenBCode extends SubComponent { class BCodePhase(prev: Phase) extends StdPhase(prev) { override def description = "Generate bytecode from ASTs using the ASM library" - erasedTypes = true - def apply(unit: CompilationUnit): Unit = codeGen.genUnit(unit) override def run(): Unit = { @@ -104,16 +104,19 @@ abstract class GenBCode extends SubComponent { } } - private def close(): Unit = { - Option(postProcessor.classfileWriter).foreach(_.close()) - Option(generatedClassHandler).foreach(_.close()) - } + private def close(): Unit = + List[AutoCloseable]( + postProcessor.classfileWriter, + generatedClassHandler, + bTypes.BTypeExporter, + ).filter(_ ne null).foreach(_.close()) } } object GenBCode { final val PublicStatic = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC final val PublicStaticFinal = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL + final val PrivateStaticFinal = Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL val CLASS_CONSTRUCTOR_NAME = "" val INSTANCE_CONSTRUCTOR_NAME = "" diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index cf25c348dfbe..a637192797a6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,13 +30,13 @@ import scala.util.control.NonFatal * Interface to handle post-processing and classfile writing (see [[PostProcessor]]) of generated * classes, potentially in parallel. */ -private[jvm] sealed trait GeneratedClassHandler { +private[jvm] sealed trait GeneratedClassHandler extends AutoCloseable { val postProcessor: PostProcessor /** * Pass the result of code generation for a compilation unit to this handler for post-processing */ - def process(unit: GeneratedCompilationUnit) + def process(unit: GeneratedCompilationUnit): Unit /** * If running in parallel, block until all generated classes are handled @@ -60,7 +60,13 @@ private[jvm] object GeneratedClassHandler { case maxThreads => if (settings.areStatisticsEnabled) - runReporting.warning(NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing", WarningCategory.Other, site = "") + runReporting.warning( + NoPosition, + "JVM statistics are not reliable with multi-threaded JVM class writing.\n" + + "To collect compiler statistics remove the " + settings.YaddBackendThreads.name + " setting.", + WarningCategory.Other, + site = "" + ) val additionalThreads = maxThreads - 1 // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes // a new task to be executed on the main thread, which provides back-pressure. @@ -146,7 +152,7 @@ private[jvm] object GeneratedClassHandler { } } - /** + /* * Go through each task in submission order, wait for it to finish and report its messages. * When finding task that has not completed, steal work from the executor's queue and run * it on the main thread (which we are on here), until the task is done. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java index b119ed90625a..5242dd5872bb 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java index 0c8cfbd3a889..82babe00c31d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala index 69eb97565d44..6147213165a8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,7 +25,7 @@ trait PerRunInit { // so the back end may initialise them in parallel, and ListBuffer is not threadsafe private val inits = ListBuffer.empty[() => Unit] - def perRunInit(init: => Unit): Unit = inits.synchronized (inits += (() => init)) + def perRunInit(init: => Unit): Unit = inits.synchronized[Unit](inits += (() => init)) def initialize(): Unit = inits.synchronized(inits.foreach(_.apply())) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index b3517ba2d3cd..29bbcd1d8638 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,12 +15,14 @@ package backend.jvm import java.util.concurrent.ConcurrentHashMap -import scala.reflect.internal.util.{NoPosition, Position, StringContextStripMarginOps} +import scala.collection.mutable +import scala.reflect.internal.util.{NoPosition, Position} import scala.reflect.io.AbstractFile -import scala.tools.asm.ClassWriter +import scala.tools.asm.{ByteVector, ClassVisitor, ClassWriter, MethodVisitor, Opcodes} import scala.tools.asm.tree.ClassNode import scala.tools.nsc.backend.jvm.analysis.BackendUtils import scala.tools.nsc.backend.jvm.opt._ +import scala.util.control.NonFatal /** * Implements late stages of the backend that don't depend on a Global instance, i.e., @@ -62,9 +64,9 @@ abstract class PostProcessor extends PerRunInit { val bytes = try { if (!clazz.isArtifact) { localOptimizations(classNode) - backendUtils.onIndyLambdaImplMethodIfPresent(internalName) { - methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) - } + val indyLambdaBodyMethods = backendUtils.indyLambdaBodyMethods(internalName) + if (indyLambdaBodyMethods.nonEmpty) + backendUtils.addLambdaDeserialize(classNode, indyLambdaBodyMethods) } warnCaseInsensitiveOverwrite(clazz) @@ -76,6 +78,7 @@ abstract class PostProcessor extends PerRunInit { // TODO fail fast rather than continuing to write the rest of the class files? if (frontendAccess.compilerSettings.debug) ex.printStackTrace() backendReporting.error(NoPosition, s"Error while emitting $internalName\n${ex.getMessage}") + if (NonFatal(ex)) checkConformance(classNode) null } @@ -87,6 +90,37 @@ abstract class PostProcessor extends PerRunInit { } } + // Provide more useful messaging when class writing fails. -Xverify verifies always. + private def checkConformance(classNode: ClassNode): Unit = { + val visitor = new ClassVisitor(Opcodes.ASM9) { + override def visitMethod(access: Int, name: String, descriptor: String, signature: String, exceptions: Array[String]) = { + val site = s"Method $name" + checkString(site, "name")(name) + checkString(site, "descriptor")(descriptor) + checkString(site, "signature")(signature) + new MethodVisitor(Opcodes.ASM9) { + override def visitLdcInsn(value: Object): Unit = { + value match { + case value: String => + checkString(site, "String constant")(value) + case _ => + } + } + } + } + private final val max = 0xFFFF + private final val threshold = 0xFFFF / 6 // char never expands to more than 6 bytes + private def checkString(site: String, which: String)(s: String): Unit = if (s != null && s.length > threshold) { + def fail() = backendReporting.error(NoPosition, s"$site in class ${classNode.name} has a bad $which of length ${s.length}${ if (frontendAccess.compilerSettings.debug) s":\n$s" else "" }") + if (s.length > max) fail() + else + try new ByteVector(s.length).putUTF8(s) + catch { case _: IllegalArgumentException => fail() } + } + } + classNode.accept(visitor) + } + private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass): Unit = { val name = clazz.classNode.name val lowercaseJavaClassName = name.toLowerCase @@ -107,7 +141,7 @@ abstract class PostProcessor extends PerRunInit { } } - def runGlobalOptimizations(generatedUnits: Traversable[GeneratedCompilationUnit]): Unit = { + def runGlobalOptimizations(generatedUnits: Iterable[GeneratedCompilationUnit]): Unit = { // add classes to the bytecode repo before building the call graph: the latter needs to // look up classes and methods in the code repo. if (compilerSettings.optAddToBytecodeRepository) { @@ -119,9 +153,9 @@ abstract class PostProcessor extends PerRunInit { callGraph.addClass(c.classNode) } if (compilerSettings.optInlinerEnabled) - inliner.runInliner() - if (compilerSettings.optClosureInvocations) - closureOptimizer.rewriteClosureApplyInvocations() + inliner.runInlinerAndClosureOptimizer() + else if (compilerSettings.optClosureInvocations) + closureOptimizer.rewriteClosureApplyInvocations(None, mutable.Map.empty) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 5d50da611ed4..081ccd08a656 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,14 @@ package scala.tools.nsc package backend.jvm -import scala.collection.generic.Clearable +import scala.collection.mutable.Clearable import scala.reflect.internal.util.{JavaClearable, Position, Statistics} import scala.reflect.io.AbstractFile +import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.util.chaining._ import java.util.{Collection => JCollection, Map => JMap} -import scala.tools.nsc.Reporting.WarningCategory - /** * Functionality needed in the post-processor whose implementation depends on the compiler * frontend. All methods are synchronized. @@ -67,10 +67,9 @@ object PostProcessorFrontendAccess { def optAddToBytecodeRepository: Boolean def optBuildCallGraph: Boolean + def optUseAnalyzerCache: Boolean def optNone: Boolean - def optLClasspath: Boolean - def optLProject: Boolean def optUnreachableCode: Boolean def optNullnessTracking: Boolean @@ -80,6 +79,9 @@ object PostProcessorFrontendAccess { def optSimplifyJumps: Boolean def optCompactLocals: Boolean def optClosureInvocations: Boolean + def optAllowSkipCoreModuleInit: Boolean + def optAssumeModulesNonNull: Boolean + def optAllowSkipClassLoading: Boolean def optInlinerEnabled: Boolean def optInlineFrom: List[String] @@ -110,7 +112,7 @@ object PostProcessorFrontendAccess { final class BufferingBackendReporting extends BackendReporting { // We optimise access to the buffered reports for the common case - that there are no warning/errors to report - // We could use a listBuffer etc - but that would be extra allocation in the common case + // We could use a ListBuffer etc - but that would be extra allocation in the common case // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and // consumed in another private var bufferedReports = List.empty[Report] @@ -184,7 +186,13 @@ object PostProcessorFrontendAccess { @inline def debug: Boolean = s.isDebug - val target: String = s.targetValue + val target: String = s.targetValue.tap { value => + s.releaseValue.foreach { release => + if (value.toInt < release.toInt) + directBackendReporting.warning(NoPosition, + s"target platform version $value is older than the release version $release") + } + } private val singleOutDir = s.outputDirs.getSingleOutput // the call to `outputDirFor` should be frontendSynch'd, but we assume that the setting is not mutated during the backend @@ -192,10 +200,9 @@ object PostProcessorFrontendAccess { val optAddToBytecodeRepository: Boolean = s.optAddToBytecodeRepository val optBuildCallGraph: Boolean = s.optBuildCallGraph + val optUseAnalyzerCache: Boolean = s.optUseAnalyzerCache val optNone: Boolean = s.optNone - val optLClasspath: Boolean = s.optLClasspath - val optLProject: Boolean = s.optLProject val optUnreachableCode: Boolean = s.optUnreachableCode val optNullnessTracking: Boolean = s.optNullnessTracking @@ -205,9 +212,12 @@ object PostProcessorFrontendAccess { val optSimplifyJumps: Boolean = s.optSimplifyJumps val optCompactLocals: Boolean = s.optCompactLocals val optClosureInvocations: Boolean = s.optClosureInvocations + val optAllowSkipCoreModuleInit: Boolean = s.optAllowSkipCoreModuleInit + val optAssumeModulesNonNull: Boolean = s.optAssumeModulesNonNull + val optAllowSkipClassLoading: Boolean = s.optAllowSkipClassLoading val optInlinerEnabled: Boolean = s.optInlinerEnabled - val optInlineFrom: List[String] = s.optInlineFrom.value + val optInlineFrom: List[String] = s.optInlineFrom val optInlineHeuristics: String = s.YoptInlineHeuristics.value val optWarningNoInlineMixed: Boolean = s.optWarningNoInlineMixed @@ -269,9 +279,9 @@ object PostProcessorFrontendAccess { def getEntryPoints: List[String] = frontendSynch(cleanup.getEntryPoints) def javaDefinedClasses: Set[InternalName] = frontendSynch { - currentRun.symSource.keys.collect{ + currentRun.symSource.keys.iterator.collect{ case sym if sym.isJavaDefined => sym.javaBinaryNameString - }(scala.collection.breakOut) + }.toSet } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingAnalyzer.scala new file mode 100644 index 000000000000..2ace193be5cf --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingAnalyzer.scala @@ -0,0 +1,688 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package backend.jvm +package analysis + +import scala.annotation.switch +import scala.collection.AbstractIterator +import scala.collection.mutable +import scala.tools.asm.Opcodes +import scala.tools.asm.tree._ +import scala.tools.asm.tree.analysis._ +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.analysis.AliasSet.SmallBitSet +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ + +/** + * A subclass of Frame that tracks aliasing of values stored in local variables and on the stack. + * + * Note: an analysis tracking aliases is roughly 5x slower than a usual analysis (assuming a simple + * value domain with a fast merge function). For example, nullness analysis is roughly 5x slower + * than a BasicValue analysis. + * + * See the doc of package object `analysis` for some notes on the performance of alias analysis. + */ +class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) { + import Opcodes._ + + // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame` + def this(src: Frame[_ <: V]) = { + this(src.getLocals, src.getMaxStackSize) + init(src) + } + + override def toString: String = super.toString + " - " + aliases.toList.filter(s => s != null && s.size > 1).map(_.toString).distinct.mkString(",") + + /** + * For every value the set of values that are aliases of it. + * + * Invariants: + * - If `aliases(i) == null` then i has no aliases. This is equivalent to having + * `aliases(i) == SingletonSet(i)`. + * - If `aliases(i) != null` then `aliases(i) contains i`. + * - If `aliases(i) contains j` then `aliases(i) eq aliases(j)`, i.e., they are references to the + * same (mutable) AliasSet. + */ + val aliases: Array[AliasSet] = new Array[AliasSet](getLocals + getMaxStackSize) + + /** + * The set of aliased values for a given entry in the `values` array. + */ + def aliasesOf(entry: Int): AliasSet = { + if (aliases(entry) != null) aliases(entry) + else { + val init = new AliasSet(new AliasSet.SmallBitSet(entry, -1, -1, -1), 1) + aliases(entry) = init + init + } + } + + /** + * Define a new alias. For example, an assignment + * b = a + * adds b to the set of aliases of a. + */ + private def newAlias(assignee: Int, source: Int): Unit = { + removeAlias(assignee) + val sourceAliases = aliasesOf(source) + sourceAliases += assignee + aliases(assignee) = sourceAliases + } + + /** + * Remove an alias. For example, an assignment + * a = someUnknownValue() + * removes a from its former alias set. + * As another example, stack values are removed from their alias sets when being consumed. + */ + private def removeAlias(assignee: Int): Unit = { + if (aliases(assignee) != null) { + aliases(assignee) -= assignee + aliases(assignee) = null + } + } + + /** + * Define the alias set for a given value. + */ + private def setAliasSet(assignee: Int, set: AliasSet): Unit = { + if (aliases(assignee) != null) { + aliases(assignee) -= assignee + } + aliases(assignee) = set + } + + override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = { + // Make the extension methods easier to use (otherwise we have to repeat `this`.stackTop) + def stackTop: Int = this.stackTop + def peekStack(n: Int): V = this.peekStack(n) + + val prodCons = InstructionStackEffect.forAsmAnalysis(insn, this) // needs to be called before super.execute, see its doc + val consumed = InstructionStackEffect.cons(prodCons) + val produced = InstructionStackEffect.prod(prodCons) + + super.execute(insn, interpreter) + + (insn.getOpcode: @switch) match { + case ILOAD | LLOAD | FLOAD | DLOAD | ALOAD => + newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`) + + case IINC => + removeAlias(insn.asInstanceOf[IincInsnNode].`var`) + + case DUP => + val top = stackTop + newAlias(assignee = top, source = top - 1) + + case DUP_X1 => + val top = stackTop + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + newAlias(assignee = top - 2, source = top) + + case DUP_X2 => + // Check if the second element on the stack is size 2 + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2 + val isSize2 = peekStack(1).getSize == 2 + val top = stackTop + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + if (isSize2) { + // Size 2 values on the stack only take one slot in the `values` array + newAlias(assignee = top - 2, source = top) + } else { + newAlias(assignee = top - 2, source = top - 3) + newAlias(assignee = top - 3, source = top) + } + + case DUP2 => + val isSize2 = peekStack(0).getSize == 2 + val top = stackTop + if (isSize2) { + newAlias(assignee = top, source = top - 1) + } else { + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top, source = top - 2) + } + + case DUP2_X1 => + val isSize2 = peekStack(0).getSize == 2 + val top = stackTop + if (isSize2) { + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + newAlias(assignee = top - 2, source = top) + } else { + newAlias(assignee = top, source = top - 2) + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top - 2, source = top - 4) + newAlias(assignee = top - 4, source = top) + newAlias(assignee = top - 5, source = top - 1) + } + + case DUP2_X2 => + val top = stackTop + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2 + val v1isSize2 = peekStack(0).getSize == 2 + if (v1isSize2) { + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + val v2isSize2 = peekStack(1).getSize == 2 + if (v2isSize2) { + // Form 4 + newAlias(assignee = top - 2, source = top) + } else { + // Form 2 + newAlias(assignee = top - 2, source = top - 3) + newAlias(assignee = top - 3, source = top) + } + } else { + newAlias(assignee = top, source = top - 2) + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top - 2, source = top - 4) + val v3isSize2 = peekStack(2).getSize == 2 + if (v3isSize2) { + // Form 3 + newAlias(assignee = top - 3, source = top) + newAlias(assignee = top - 4, source = top - 1) + } else { + // Form 1 + newAlias(assignee = top - 3, source = top - 5) + newAlias(assignee = top - 4, source = top) + newAlias(assignee = top - 5, source = top - 1) + } + } + + case SWAP => + // could be written more elegantly with higher-order combinators, but thinking of performance + val top = stackTop + + def moveNextToTop(): Unit = { + val nextAliases = aliases(top - 1) + aliases(top) = nextAliases + nextAliases -= (top - 1) + nextAliases += top + } + + if (aliases(top) != null) { + val topAliases = aliases(top) + if (aliases(top - 1) != null) moveNextToTop() + else aliases(top) = null + // move top to next + aliases(top - 1) = topAliases + topAliases -= top + topAliases += (top - 1) + } else { + if (aliases(top - 1) != null) { + moveNextToTop() + aliases(top - 1) = null + } + } + + case opcode => + (opcode: @switch) match { + case ISTORE | LSTORE | FSTORE | DSTORE | ASTORE => + // not a separate case: we re-use the code below that removes the consumed stack value from alias sets + val stackTopBefore = stackTop - produced + consumed + val local = insn.asInstanceOf[VarInsnNode].`var` + newAlias(assignee = local, source = stackTopBefore) + // if the value written is size 2, it overwrites the subsequent slot, which is then no + // longer an alias of anything. see the corresponding case in `Frame.execute`. + if (getLocal(local).getSize == 2) + removeAlias(local + 1) + + // if the value at the preceding index is size 2, it is no longer valid, so we remove its + // aliasing. see corresponding case in `Frame.execute` + if (local > 0) { + val precedingValue = getLocal(local - 1) + if (precedingValue != null && precedingValue.getSize == 2) + removeAlias(local - 1) + } + + case _ => + } + + // Remove consumed stack values from aliasing sets. + // Example: iadd + // - before: local1, local2, stack1, consumed1, consumed2 + // - after: local1, local2, stack1, produced1 // stackTop = 3 + val firstConsumed = stackTop - produced + 1 // firstConsumed = 3 + for (i <- 0 until consumed) + removeAlias(firstConsumed + i) // remove aliases for 3 and 4 + } + } + + /** + * When entering an exception handler, all values are dropped from the stack (and the exception + * value is pushed). The ASM analyzer invokes `firstHandlerInstructionFrame.clearStack()`. To + * ensure consistent aliasing sets, we need to remove the dropped values from aliasing sets. + */ + override def clearStack(): Unit = { + var i = getLocals + val end = i + getStackSize + while (i < end) { + removeAlias(i) + i += 1 + } + super.clearStack() + } + + /** + * Merge the AliasingFrame `other` into this AliasingFrame. + * + * Aliases that are common in both frames are kept. Example: + * + * var x, y = null + * if (...) { + * x = a + * y = a // (x, y, a) are aliases + * } else { + * x = a + * y = b // (x, a) and (y, b) + * } + * [...] // (x, a) -- merge of ((x, y, a)) and ((x, a), (y, b)) + */ + override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = { + // merge is the main performance hot spot of a data flow analysis. + + // in nullness analysis, super.merge (which actually merges the nullness values) takes 20% of + // the overall analysis time. + val valuesChanged = super.merge(other, interpreter) + + // in nullness analysis, merging the alias sets takes ~55% of the analysis time. therefore, this + // code has been heavily optimized. most of the time is spent in the `hasNext` method of the + // andNotIterator, see its comment. + + var aliasesChanged = false + val aliasingOther = other.asInstanceOf[AliasingFrame[_]] + + val numValues = getLocals + getStackSize + // assume (a, b) are aliases both in this frame, and the other frame. when merging the alias set + // for a, we already see that a and b will be aliases in the final result. so we can skip over + // merging the alias set for b. in this case, while merging the sets for a, knownOk(b) will be + // set to `true`. + val knownOk = new Array[Boolean](numValues) + var i = 0 + while (i < numValues) { + if (!knownOk(i)) { + val thisAliases = this.aliases(i) + val otherAliases = aliasingOther.aliases(i) + if (thisAliases != null) { + if (otherAliases == null) { + if (thisAliases.size > 1) { + aliasesChanged = true + removeAlias(i) + } + } else { + // The iterator yields elements that are in `thisAliases` but not in `otherAliases`. + // As a side-effect, for every index `i` that is in both alias sets, the iterator sets + // `knownOk(i) = true`: the alias sets for these values don't need to be merged again. + val thisNotOtherIt = AliasSet.andNotIterator(thisAliases, otherAliases, knownOk) + if (thisNotOtherIt.hasNext) { + aliasesChanged = true + val newSet = AliasSet.empty + while (thisNotOtherIt.hasNext) { + val next = thisNotOtherIt.next() + newSet += next + setAliasSet(next, newSet) + } + } + } + } + } + i += 1 + } + + valuesChanged || aliasesChanged + } + + private def min(s: SmallBitSet) = { + var r = s.a + if ( s.b < r) r = s.b + if (s.c != -1 && s.c < r) r = s.c + if (s.d != -1 && s.d < r) r = s.d + r + } + + override def init(src: Frame[_ <: V]): Frame[V] = { + super.init(src) // very quick (just an arraycopy) + System.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliases, 0, aliases, 0, aliases.length) // also quick + + val newSets = mutable.HashMap.empty[AliasSet, AliasSet] + + // the rest of this method (cloning alias sets) is the second performance˙hotspot (next to + // AliasingFrame.merge). for nullness, it takes ~20% of the analysis time. + // the difficulty here is that we have to clone the alias sets correctly. if two values a, b are + // aliases, then aliases(a) eq aliases(b). we need to make sure to use the same clone for the + // two values. + + var i = 0 + while (i < aliases.length) { + val set = aliases(i) + if (set != null) { + // size cannot be 0 - alias sets are always at least singletons. + // for sets of size 1-4, don't use the `newSets` map - lookup / update is slow + if (set.size == 1) { + aliases(i) = null + } else if (set.size <= 4) { + val small = set.set.asInstanceOf[AliasSet.SmallBitSet] + val firstOfSet = i == min(small) + if (firstOfSet) { + val newSet = set.clone() + aliases(small.a) = newSet + aliases(small.b) = newSet + if (small.c != -1) aliases(small.c) = newSet + if (small.d != -1) aliases(small.d) = newSet + } + } else { + // the actual hot spot is the hash map operations here: this is where almost all of the 20% + // mentioned above is spent. + // i also benchmarked an alternative implementation: keep an array of booleans for indexes + // that already contain the cloned set. iterate through all elements of the cloned set and + // assign the cloned set. this approach is 50% slower than using a hash map. + if (newSets contains set) aliases(i) = newSets(set) + else { + val newSet = set.clone() + newSets(set) = newSet + aliases(i) = newSet + } + } + } + i += 1 + } + this + } +} + +/** + * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis + * needs to track aliases, but doesn't require a more specific Frame subclass. + */ +class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) { + override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack) + override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src) +} + +// Marker trait for AsmAnalyzers that use AliasingFrame +trait AliasingAsmAnalyzerMarker + +class BasicAliasingAnalyzer(methodNode: MethodNode, classInternalName: InternalName) + extends AsmAnalyzer[BasicValue](methodNode, classInternalName, new AliasingAnalyzer(new BasicInterpreter)) + with AliasingAsmAnalyzerMarker + +/** + * An iterator over Int (required to prevent boxing the result of next). + */ +abstract class IntIterator extends AbstractIterator[Int] { + def hasNext: Boolean + def next(): Int +} + +/** + * An efficient mutable bit set. + * + * @param set Either a SmallBitSet or an Array[Long] + * @param size The size of the set, useful for performance of certain operations + */ +class AliasSet(var set: Object /*SmallBitSet | Array[Long]*/, var size: Int) { + import AliasSet._ + + override def toString: String = iterator.toSet.mkString("<", ",", ">") + + /** + * An iterator for the elements of this bit set. Note that only one iterator can be used at a + * time. Also make sure not to change the underlying AliasSet during iteration. + */ + def iterator: IntIterator = andNotIterator(this, empty, null) + + def +=(value: Int): Unit = (set: @unchecked) match { + case s: SmallBitSet => (size: @switch) match { + case 0 => s.a = value; size = 1 + case 1 => if (value != s.a) { s.b = value; size = 2 } + case 2 => if (value != s.a && value != s.b) { s.c = value; size = 3 } + case 3 => if (value != s.a && value != s.b && value != s.c) { s.d = value; size = 4 } + case 4 => + if (value != s.a && value != s.b && value != s.c && value != s.d) { + this.set = bsEmpty + this.size = 0 + bsAdd(this, s.a) + bsAdd(this, s.b) + bsAdd(this, s.c) + bsAdd(this, s.d) + bsAdd(this, value) + } + } + case _: Array[Long] => + bsAdd(this, value) + } + + def -=(value: Int): Unit = (set: @unchecked) match { + case s: SmallBitSet => (size: @switch) match { + case 0 => + case 1 => + if (value == s.a) { s.a = -1; size = 0 } + case 2 => + if (value == s.a) { s.a = s.b; s.b = -1; size = 1 } + else if (value == s.b) { s.b = -1; size = 1 } + case 3 => + if (value == s.a) { s.a = s.b; s.b = s.c; s.c = -1; size = 2 } + else if (value == s.b) { s.b = s.c; s.c = -1; size = 2 } + else if (value == s.c) { s.c = -1; size = 2 } + case 4 => + if (value == s.a) { s.a = s.b; s.b = s.c; s.c = s.d; s.d = -1; size = 3 } + else if (value == s.b) { s.b = s.c; s.c = s.d; s.d = -1; size = 3 } + else if (value == s.c) { s.c = s.d; s.d = -1; size = 3 } + else if (value == s.d) { s.d = -1; size = 3 } + } + case _: Array[Long] => + bsRemove(this, value) + if (this.size == 4) + this.set = bsToSmall(this.set.asInstanceOf[Array[Long]]) + } + + override def clone(): AliasSet = { + val resSet: Object = (set: @unchecked) match { + case s: SmallBitSet => new SmallBitSet(s.a, s.b, s.c, s.d) + case bits: Array[Long] => bits.clone() + } + new AliasSet(resSet, this.size) + } +} + +object AliasSet { + def empty = new AliasSet(new SmallBitSet(-1, -1, -1, -1), 0) + + final class SmallBitSet(var a: Int, var b: Int, var c: Int, var d: Int) { + override def toString = s"($a, $b, $c, $d)" + } + + def bsEmpty: Array[Long] = new Array[Long](1) + + private def bsEnsureCapacity(set: Array[Long], index: Int): Array[Long] = { + if (index < set.length) set + else { + var newLength = set.length + while (index >= newLength) newLength *= 2 + val newSet = new Array[Long](newLength) + Array.copy(set, 0, newSet, 0, set.length) + newSet + } + } + + def bsAdd(set: AliasSet, bit: Int): Unit = { + val bits = set.set.asInstanceOf[Array[Long]] + val index = bit >> 6 + val resSet = bsEnsureCapacity(bits, index) + val before = resSet(index) + val result = before | (1L << bit) + if (result != before) { + resSet(index) = result + set.set = resSet + set.size += 1 + } + } + + def bsRemove(set: AliasSet, bit: Int): Unit = { + val bits = set.set.asInstanceOf[Array[Long]] + val index = bit >> 6 + if (index < bits.length) { + val before = bits(index) + val result = before & ~(1L << bit) + if (result != before) { + bits(index) = result + set.size -= 1 + } + } + } + + def bsContains(set: Array[Long], bit: Int): Boolean = { + val index = bit >> 6 + bit >= 0 && index < set.length && (set(index) & (1L << bit)) != 0L + } + +// var sizesHist: Array[Int] = new Array[Int](1000) + + /** + * Convert a bit array to a SmallBitSet. Requires the bit array to contain exactly four bits. + */ + def bsToSmall(bits: Array[Long]): SmallBitSet = { + var a = -1 + var b = -1 + var c = -1 + var i = 0 + val end = bits.length * 64 + while (i < end) { + if (bsContains(bits, i)) { + if (a == -1) a = i + else if (b == -1) b = i + else if (c == -1) c = i + else return new SmallBitSet(a, b, c, i) + } + i += 1 + } + null + } + + /** + * An iterator that yields the elements that are in one bit set and not in another (&~). + */ + private class AndNotIt(setA: AliasSet, setB: AliasSet, thisAndOther: Array[Boolean]) extends IntIterator { + // values in the first bit set + private var a, b, c, d = -1 + private var xs: Array[Long] = null + + // values in the second bit set + private var notA, notB, notC, notD = -1 + private var notXs: Array[Long] = null + + // holds the next value of `x`, `y` or `z` that should be returned. assigned in hasNext + private var abcdNext = -1 + + // counts through elements in the `xs` bit set + private var i = 0 + // true if the current value of `i` should be returned by this iterator + private var iValid = false + + (setA.set: @unchecked) match { + case s: SmallBitSet => a = s.a; b = s.b; c = s.c; d = s.d + case bits: Array[Long] => xs = bits + } + + (setB.set: @unchecked) match { + case s: SmallBitSet => notA = s.a; notB = s.b; notC = s.c; notD = s.d + case bits: Array[Long] => notXs = bits + } + + // for each value that exists both in this AND (&) the other bit, `thisAndOther` is set to true. + // hacky side-effect, used for performance of AliasingFrame.merge. + private def setThisAndOther(x: Int) = if (thisAndOther != null) thisAndOther(x) = true + + private def checkABCD(x: Int, num: Int): Boolean = { + // assert(x == a && num == 1 || x == b && num == 2 || ...) + x != -1 && { + val otherHasA = x == notA || x == notB || x == notC || x == notD || (notXs != null && bsContains(notXs, x)) + if (otherHasA) setThisAndOther(x) + else abcdNext = x + (num: @switch) match { + case 1 => a = -1 + case 2 => b = -1 + case 3 => c = -1 + case 4 => d = -1 + } + !otherHasA + } + } + + // main performance hot spot + private def checkXs = { + (xs != null) && { + val end = xs.length * 64 + + while (i < end && { + val index = i >> 6 + if (xs(index) == 0L) { // boom. for nullness, this saves 35% of the overall analysis time. + i = ((index + 1) << 6) - 1 // -1 required because i is incremented in the loop body + true + } else { + val mask = 1L << i + // if (mask > xs(index)) we could also advance i to the next value, but that didn't pay off in benchmarks + val thisHasI = (xs(index) & mask) != 0L + !thisHasI || { + val otherHasI = i == notA || i == notB || i == notC || i == notD || (notXs != null && index < notXs.length && (notXs(index) & mask) != 0L) + if (otherHasI) setThisAndOther(i) + otherHasI + } + } + }) i += 1 + + iValid = i < end + iValid + } + } + + // this is the main hot spot of alias analysis. for nullness, 38% of the overall analysis time + // is spent here. within hasNext, almost the entire time is spent in `checkXs`. + // + def hasNext: Boolean = iValid || abcdNext != -1 || checkABCD(a, 1) || checkABCD(b, 2) || checkABCD(c, 3) || checkABCD(d, 4) || checkXs + + def next(): Int = { + if (hasNext) { + if (abcdNext != -1) { + val r = abcdNext; abcdNext = -1; r + } else { + val r = i; i += 1; iValid = false; r + } + } else Iterator.empty.next() + } + } + +// The number of bits in a bit array. Useful for debugging. +// def bsSize(bits: Array[Long]) = { +// var r = 0 +// var i = 0 +// while (i < bits.length) { +// r += java.lang.Long.bitCount(bits(i)) +// i += 1 +// } +// r +// } + + /** + * An iterator returning the elements in a that are not also in b (a &~ b). + * + * If `thisAndOther` is non-null, the iterator sets thisAndOther(i) to true for every value that + * is both in a and b (&). + */ + def andNotIterator(a: AliasSet, b: AliasSet, thisAndOther: Array[Boolean]): IntIterator = new AndNotIt(a, b, thisAndOther) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala deleted file mode 100644 index fd616da13af4..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala +++ /dev/null @@ -1,674 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package backend.jvm -package analysis - -import scala.annotation.switch -import scala.collection.mutable -import scala.tools.asm.Opcodes -import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter} -import opt.BytecodeUtils._ -import AliasSet.SmallBitSet - -/** - * A subclass of Frame that tracks aliasing of values stored in local variables and on the stack. - * - * Note: an analysis tracking aliases is roughly 5x slower than a usual analysis (assuming a simple - * value domain with a fast merge function). For example, nullness analysis is roughly 5x slower - * than a BasicValue analysis. - * - * See the doc of package object `analysis` for some notes on the performance of alias analysis. - */ -class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) { - import Opcodes._ - - // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame` - def this(src: Frame[_ <: V]) { - this(src.getLocals, src.getMaxStackSize) - init(src) - } - - override def toString: String = super.toString + " - " + aliases.toList.filter(s => s != null && s.size > 1).map(_.toString).distinct.mkString(",") - - /** - * For every value the set of values that are aliases of it. - * - * Invariants: - * - If `aliases(i) == null` then i has no aliases. This is equivalent to having - * `aliases(i) == SingletonSet(i)`. - * - If `aliases(i) != null` then `aliases(i) contains i`. - * - If `aliases(i) contains j` then `aliases(i) eq aliases(j)`, i.e., they are references to the - * same (mutable) AliasSet. - */ - val aliases: Array[AliasSet] = new Array[AliasSet](getLocals + getMaxStackSize) - - /** - * The set of aliased values for a given entry in the `values` array. - */ - def aliasesOf(entry: Int): AliasSet = { - if (aliases(entry) != null) aliases(entry) - else { - val init = new AliasSet(new AliasSet.SmallBitSet(entry, -1, -1, -1), 1) - aliases(entry) = init - init - } - } - - /** - * Define a new alias. For example, an assignment - * b = a - * adds b to the set of aliases of a. - */ - private def newAlias(assignee: Int, source: Int): Unit = { - removeAlias(assignee) - val sourceAliases = aliasesOf(source) - sourceAliases += assignee - aliases(assignee) = sourceAliases - } - - /** - * Remove an alias. For example, an assignment - * a = someUnknownValue() - * removes a from its former alias set. - * As another example, stack values are removed from their alias sets when being consumed. - */ - private def removeAlias(assignee: Int): Unit = { - if (aliases(assignee) != null) { - aliases(assignee) -= assignee - aliases(assignee) = null - } - } - - /** - * Define the alias set for a given value. - */ - private def setAliasSet(assignee: Int, set: AliasSet): Unit = { - if (aliases(assignee) != null) { - aliases(assignee) -= assignee - } - aliases(assignee) = set - } - - override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = { - // Make the extension methods easier to use (otherwise we have to repeat `this`.stackTop) - def stackTop: Int = this.stackTop - def peekStack(n: Int): V = this.peekStack(n) - - val prodCons = InstructionStackEffect.forAsmAnalysis(insn, this) // needs to be called before super.execute, see its doc - val consumed = InstructionStackEffect.cons(prodCons) - val produced = InstructionStackEffect.prod(prodCons) - - super.execute(insn, interpreter) - - (insn.getOpcode: @switch) match { - case ILOAD | LLOAD | FLOAD | DLOAD | ALOAD => - newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`) - - case DUP => - val top = stackTop - newAlias(assignee = top, source = top - 1) - - case DUP_X1 => - val top = stackTop - newAlias(assignee = top, source = top - 1) - newAlias(assignee = top - 1, source = top - 2) - newAlias(assignee = top - 2, source = top) - - case DUP_X2 => - // Check if the second element on the stack is size 2 - // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2 - val isSize2 = peekStack(1).getSize == 2 - val top = stackTop - newAlias(assignee = top, source = top - 1) - newAlias(assignee = top - 1, source = top - 2) - if (isSize2) { - // Size 2 values on the stack only take one slot in the `values` array - newAlias(assignee = top - 2, source = top) - } else { - newAlias(assignee = top - 2, source = top - 3) - newAlias(assignee = top - 3, source = top) - } - - case DUP2 => - val isSize2 = peekStack(0).getSize == 2 - val top = stackTop - if (isSize2) { - newAlias(assignee = top, source = top - 1) - } else { - newAlias(assignee = top - 1, source = top - 3) - newAlias(assignee = top, source = top - 2) - } - - case DUP2_X1 => - val isSize2 = peekStack(0).getSize == 2 - val top = stackTop - if (isSize2) { - newAlias(assignee = top, source = top - 1) - newAlias(assignee = top - 1, source = top - 2) - newAlias(assignee = top - 2, source = top) - } else { - newAlias(assignee = top, source = top - 2) - newAlias(assignee = top - 1, source = top - 3) - newAlias(assignee = top - 2, source = top - 4) - newAlias(assignee = top - 4, source = top) - newAlias(assignee = top - 5, source = top - 1) - } - - case DUP2_X2 => - val top = stackTop - // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2 - val v1isSize2 = peekStack(0).getSize == 2 - if (v1isSize2) { - newAlias(assignee = top, source = top - 1) - newAlias(assignee = top - 1, source = top - 2) - val v2isSize2 = peekStack(1).getSize == 2 - if (v2isSize2) { - // Form 4 - newAlias(assignee = top - 2, source = top) - } else { - // Form 2 - newAlias(assignee = top - 2, source = top - 3) - newAlias(assignee = top - 3, source = top) - } - } else { - newAlias(assignee = top, source = top - 2) - newAlias(assignee = top - 1, source = top - 3) - newAlias(assignee = top - 2, source = top - 4) - val v3isSize2 = peekStack(2).getSize == 2 - if (v3isSize2) { - // Form 3 - newAlias(assignee = top - 3, source = top) - newAlias(assignee = top - 4, source = top - 1) - } else { - // Form 1 - newAlias(assignee = top - 3, source = top - 5) - newAlias(assignee = top - 4, source = top) - newAlias(assignee = top - 5, source = top - 1) - } - } - - case SWAP => - // could be written more elegantly with higher-order combinators, but thinking of performance - val top = stackTop - - def moveNextToTop(): Unit = { - val nextAliases = aliases(top - 1) - aliases(top) = nextAliases - nextAliases -= (top - 1) - nextAliases += top - } - - if (aliases(top) != null) { - val topAliases = aliases(top) - if (aliases(top - 1) != null) moveNextToTop() - else aliases(top) = null - // move top to next - aliases(top - 1) = topAliases - topAliases -= top - topAliases += (top - 1) - } else { - if (aliases(top - 1) != null) { - moveNextToTop() - aliases(top - 1) = null - } - } - - case opcode => - (opcode: @switch) match { - case ISTORE | LSTORE | FSTORE | DSTORE | ASTORE => - // not a separate case: we re-use the code below that removes the consumed stack value from alias sets - val stackTopBefore = stackTop - produced + consumed - val local = insn.asInstanceOf[VarInsnNode].`var` - newAlias(assignee = local, source = stackTopBefore) - // if the value written is size 2, it overwrites the subsequent slot, which is then no - // longer an alias of anything. see the corresponding case in `Frame.execute`. - if (getLocal(local).getSize == 2) - removeAlias(local + 1) - - // if the value at the preceding index is size 2, it is no longer valid, so we remove its - // aliasing. see corresponding case in `Frame.execute` - if (local > 0) { - val precedingValue = getLocal(local - 1) - if (precedingValue != null && precedingValue.getSize == 2) - removeAlias(local - 1) - } - - case _ => - } - - // Remove consumed stack values from aliasing sets. - // Example: iadd - // - before: local1, local2, stack1, consumed1, consumed2 - // - after: local1, local2, stack1, produced1 // stackTop = 3 - val firstConsumed = stackTop - produced + 1 // firstConsumed = 3 - for (i <- 0 until consumed) - removeAlias(firstConsumed + i) // remove aliases for 3 and 4 - } - } - - /** - * When entering an exception handler, all values are dropped from the stack (and the exception - * value is pushed). The ASM analyzer invokes `firstHandlerInstructionFrame.clearStack()`. To - * ensure consistent aliasing sets, we need to remove the dropped values from aliasing sets. - */ - override def clearStack(): Unit = { - var i = getLocals - val end = i + getStackSize - while (i < end) { - removeAlias(i) - i += 1 - } - super.clearStack() - } - - /** - * Merge the AliasingFrame `other` into this AliasingFrame. - * - * Aliases that are common in both frames are kept. Example: - * - * var x, y = null - * if (...) { - * x = a - * y = a // (x, y, a) are aliases - * } else { - * x = a - * y = b // (x, a) and (y, b) - * } - * [...] // (x, a) -- merge of ((x, y, a)) and ((x, a), (y, b)) - */ - override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = { - // merge is the main performance hot spot of a data flow analysis. - - // in nullness analysis, super.merge (which actually merges the nullness values) takes 20% of - // the overall analysis time. - val valuesChanged = super.merge(other, interpreter) - - // in nullness analysis, merging the alias sets takes ~55% of the analysis time. therefore, this - // code has been heavily optimized. most of the time is spent in the `hasNext` method of the - // andNotIterator, see its comment. - - var aliasesChanged = false - val aliasingOther = other.asInstanceOf[AliasingFrame[_]] - - val numValues = getLocals + getStackSize - // assume (a, b) are aliases both in this frame, and the other frame. when merging the alias set - // for a, we already see that a and b will be aliases in the final result. so we can skip over - // merging the alias set for b. in this case, while merging the sets for a, knownOk(b) will be - // set to `true`. - val knownOk = new Array[Boolean](numValues) - var i = 0 - while (i < numValues) { - if (!knownOk(i)) { - val thisAliases = this.aliases(i) - val otherAliases = aliasingOther.aliases(i) - if (thisAliases != null) { - if (otherAliases == null) { - if (thisAliases.size > 1) { - aliasesChanged = true - removeAlias(i) - } - } else { - // The iterator yields elements that are in `thisAliases` but not in `otherAliases`. - // As a side-effect, for every index `i` that is in both alias sets, the iterator sets - // `knownOk(i) = true`: the alias sets for these values don't need to be merged again. - val thisNotOtherIt = AliasSet.andNotIterator(thisAliases, otherAliases, knownOk) - if (thisNotOtherIt.hasNext) { - aliasesChanged = true - val newSet = AliasSet.empty - while (thisNotOtherIt.hasNext) { - val next = thisNotOtherIt.next() - newSet += next - setAliasSet(next, newSet) - } - } - } - } - } - i += 1 - } - - valuesChanged || aliasesChanged - } - - private def min(s: SmallBitSet) = { - var r = s.a - if ( s.b < r) r = s.b - if (s.c != -1 && s.c < r) r = s.c - if (s.d != -1 && s.d < r) r = s.d - r - } - - override def init(src: Frame[_ <: V]): Frame[V] = { - super.init(src) // very quick (just an arraycopy) - System.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliases, 0, aliases, 0, aliases.length) // also quick - - val newSets = mutable.HashMap.empty[AliasSet, AliasSet] - - // the rest of this method (cloning alias sets) is the second performance˙hotspot (next to - // AliasingFrame.merge). for nullness, it takes ~20% of the analysis time. - // the difficulty here is that we have to clone the alias sets correctly. if two values a, b are - // aliases, then aliases(a) eq aliases(b). we need to make sure to use the same clone for the - // two values. - - var i = 0 - while (i < aliases.length) { - val set = aliases(i) - if (set != null) { - // size cannot be 0 - alias sets are always at least singletons. - // for sets of size 1-4, don't use the `newSets` map - lookup / update is slow - if (set.size == 1) { - aliases(i) = null - } else if (set.size <= 4) { - val small = set.set.asInstanceOf[AliasSet.SmallBitSet] - val firstOfSet = i == min(small) - if (firstOfSet) { - val newSet = set.clone() - aliases(small.a) = newSet - aliases(small.b) = newSet - if (small.c != -1) aliases(small.c) = newSet - if (small.d != -1) aliases(small.d) = newSet - } - } else { - // the actual hot spot is the hash map operations here: this is where almost all of the 20% - // mentioned above is spent. - // i also benchmarked an alternative implementation: keep an array of booleans for indexes - // that already contain the cloned set. iterate through all elements of the cloned set and - // assign the cloned set. this approach is 50% slower than using a hash map. - if (newSets contains set) aliases(i) = newSets(set) - else { - val newSet = set.clone() - newSets(set) = newSet - aliases(i) = newSet - } - } - } - i += 1 - } - this - } -} - -/** - * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis - * needs to track aliases, but doesn't require a more specific Frame subclass. - */ -class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) { - override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack) - override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src) -} - -/** - * An iterator over Int (required to prevent boxing the result of next). - */ -abstract class IntIterator extends Iterator[Int] { - def hasNext: Boolean - def next(): Int -} - -/** - * An efficient mutable bit set. - * - * @param set Either a SmallBitSet or an Array[Long] - * @param size The size of the set, useful for performance of certain operations - */ -class AliasSet(var set: Object /*SmallBitSet | Array[Long]*/, var size: Int) { - import AliasSet._ - - override def toString: String = iterator.toSet.mkString("<", ",", ">") - - /** - * An iterator for the elements of this bit set. Note that only one iterator can be used at a - * time. Also make sure not to change the underlying AliasSet during iteration. - */ - def iterator: IntIterator = andNotIterator(this, empty, null) - - def +=(value: Int): Unit = this.set match { - case s: SmallBitSet => (size: @switch) match { - case 0 => s.a = value; size = 1 - case 1 => if (value != s.a) { s.b = value; size = 2 } - case 2 => if (value != s.a && value != s.b) { s.c = value; size = 3 } - case 3 => if (value != s.a && value != s.b && value != s.c) { s.d = value; size = 4 } - case 4 => - if (value != s.a && value != s.b && value != s.c && value != s.d) { - this.set = bsEmpty - this.size = 0 - bsAdd(this, s.a) - bsAdd(this, s.b) - bsAdd(this, s.c) - bsAdd(this, s.d) - bsAdd(this, value) - } - } - case bits: Array[Long] => - bsAdd(this, value) - } - - def -=(value: Int): Unit = this.set match { - case s: SmallBitSet => (size: @switch) match { - case 0 => - case 1 => - if (value == s.a) { s.a = -1; size = 0 } - case 2 => - if (value == s.a) { s.a = s.b; s.b = -1; size = 1 } - else if (value == s.b) { s.b = -1; size = 1 } - case 3 => - if (value == s.a) { s.a = s.b; s.b = s.c; s.c = -1; size = 2 } - else if (value == s.b) { s.b = s.c; s.c = -1; size = 2 } - else if (value == s.c) { s.c = -1; size = 2 } - case 4 => - if (value == s.a) { s.a = s.b; s.b = s.c; s.c = s.d; s.d = -1; size = 3 } - else if (value == s.b) { s.b = s.c; s.c = s.d; s.d = -1; size = 3 } - else if (value == s.c) { s.c = s.d; s.d = -1; size = 3 } - else if (value == s.d) { s.d = -1; size = 3 } - } - case bits: Array[Long] => - bsRemove(this, value) - if (this.size == 4) - this.set = bsToSmall(this.set.asInstanceOf[Array[Long]]) - } - - override def clone(): AliasSet = { - val resSet = this.set match { - case s: SmallBitSet => new SmallBitSet(s.a, s.b, s.c, s.d) - case bits: Array[Long] => bits.clone() - } - new AliasSet(resSet, this.size) - } -} - -object AliasSet { - def empty = new AliasSet(new SmallBitSet(-1, -1, -1, -1), 0) - - final class SmallBitSet(var a: Int, var b: Int, var c: Int, var d: Int) { - override def toString = s"($a, $b, $c, $d)" - } - - def bsEmpty: Array[Long] = new Array[Long](1) - - private def bsEnsureCapacity(set: Array[Long], index: Int): Array[Long] = { - if (index < set.length) set - else { - var newLength = set.length - while (index >= newLength) newLength *= 2 - java.util.Arrays.copyOf(set, newLength) - } - } - - def bsAdd(set: AliasSet, bit: Int): Unit = { - val bits = set.set.asInstanceOf[Array[Long]] - val index = bit >> 6 - val resSet = bsEnsureCapacity(bits, index) - val before = resSet(index) - val result = before | (1l << bit) - if (result != before) { - resSet(index) = result - set.set = resSet - set.size += 1 - } - } - - def bsRemove(set: AliasSet, bit: Int): Unit = { - val bits = set.set.asInstanceOf[Array[Long]] - val index = bit >> 6 - if (index < bits.length) { - val before = bits(index) - val result = before & ~(1l << bit) - if (result != before) { - bits(index) = result - set.size -= 1 - } - } - } - - def bsContains(set: Array[Long], bit: Int): Boolean = { - val index = bit >> 6 - bit >= 0 && index < set.length && (set(index) & (1L << bit)) != 0L - } - -// var sizesHist: Array[Int] = new Array[Int](1000) - - /** - * Convert a bit array to a SmallBitSet. Requires the bit array to contain exactly four bits. - */ - def bsToSmall(bits: Array[Long]): SmallBitSet = { - var a = -1 - var b = -1 - var c = -1 - var i = 0 - val end = bits.length * 64 - while (i < end) { - if (bsContains(bits, i)) { - if (a == -1) a = i - else if (b == -1) b = i - else if (c == -1) c = i - else return new SmallBitSet(a, b, c, i) - } - i += 1 - } - null - } - - /** - * An iterator that yields the elements that are in one bit set and not in another (&~). - */ - private class AndNotIt(setA: AliasSet, setB: AliasSet, thisAndOther: Array[Boolean]) extends IntIterator { - // values in the first bit set - private var a, b, c, d = -1 - private var xs: Array[Long] = null - - // values in the second bit set - private var notA, notB, notC, notD = -1 - private var notXs: Array[Long] = null - - // holds the next value of `x`, `y` or `z` that should be returned. assigned in hasNext - private var abcdNext = -1 - - // counts through elements in the `xs` bit set - private var i = 0 - // true if the current value of `i` should be returned by this iterator - private var iValid = false - - setA.set match { - case s: SmallBitSet => a = s.a; b = s.b; c = s.c; d = s.d - case bits: Array[Long] => xs = bits - } - - setB.set match { - case s: SmallBitSet => notA = s.a; notB = s.b; notC = s.c; notD = s.d - case bits: Array[Long] => notXs = bits - } - - // for each value that exists both in this AND (&) the other bit, `thisAndOther` is set to true. - // hacky side-effect, used for performance of AliasingFrame.merge. - private def setThisAndOther(x: Int) = if (thisAndOther != null) thisAndOther(x) = true - - private def checkABCD(x: Int, num: Int): Boolean = { - // assert(x == a && num == 1 || x == b && num == 2 || ...) - x != -1 && { - val otherHasA = x == notA || x == notB || x == notC || x == notD || (notXs != null && bsContains(notXs, x)) - if (otherHasA) setThisAndOther(x) - else abcdNext = x - (num: @switch) match { - case 1 => a = -1 - case 2 => b = -1 - case 3 => c = -1 - case 4 => d = -1 - } - !otherHasA - } - } - - // main performance hot spot - private def checkXs = { - (xs != null) && { - val end = xs.length * 64 - - while (i < end && { - val index = i >> 6 - if (xs(index) == 0l) { // boom. for nullness, this saves 35% of the overall analysis time. - i = ((index + 1) << 6) - 1 // -1 required because i is incremented in the loop body - true - } else { - val mask = 1l << i - // if (mask > xs(index)) we could also advance i to the next value, but that didn't pay off in benchmarks - val thisHasI = (xs(index) & mask) != 0l - !thisHasI || { - val otherHasI = i == notA || i == notB || i == notC || i == notD || (notXs != null && index < notXs.length && (notXs(index) & mask) != 0l) - if (otherHasI) setThisAndOther(i) - otherHasI - } - } - }) i += 1 - - iValid = i < end - iValid - } - } - - // this is the main hot spot of alias analysis. for nullness, 38% of the overall analysis time - // is spent here. within hasNext, almost the entire time is spent in `checkXs`. - // - def hasNext: Boolean = iValid || abcdNext != -1 || checkABCD(a, 1) || checkABCD(b, 2) || checkABCD(c, 3) || checkABCD(d, 4) || checkXs - - def next(): Int = { - if (hasNext) { - if (abcdNext != -1) { - val r = abcdNext; abcdNext = -1; r - } else { - val r = i; i += 1; iValid = false; r - } - } else Iterator.empty.next() - } - } - -// The number of bits in a bit array. Useful for debugging. -// def bsSize(bits: Array[Long]) = { -// var r = 0 -// var i = 0 -// while (i < bits.length) { -// r += java.lang.Long.bitCount(bits(i)) -// i += 1 -// } -// r -// } - - /** - * An iterator returning the elements in a that are not also in b (a &~ b). - * - * If `thisAndOther` is non-null, the iterator sets thisAndOther(i) to true for every value that - * is both in a and b (&). - */ - def andNotIterator(a: AliasSet, b: AliasSet, thisAndOther: Array[Boolean]): IntIterator = new AndNotIt(a, b, thisAndOther) -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AsmAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AsmAnalyzer.scala new file mode 100644 index 000000000000..8eed053541ad --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AsmAnalyzer.scala @@ -0,0 +1,59 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.backend.jvm.analysis + +import scala.tools.asm.tree.analysis._ +import scala.tools.asm.tree.{AbstractInsnNode, MethodNode} +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.computeMaxLocalsMaxStack +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ + + +/** + * A wrapper to make ASM's Analyzer a bit easier to use. + */ +abstract class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, val analyzer: Analyzer[V]) { + computeMaxLocalsMaxStack(methodNode) + try { + analyzer.analyze(classInternalName, methodNode) + } catch { + case ae: AnalyzerException => + throw new AnalyzerException(null, "While processing " + classInternalName + "." + methodNode.name, ae) + } + def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) +} + +class BasicAnalyzer(methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer[BasicValue](methodNode, classInternalName, new Analyzer(new BasicInterpreter)) + +/** + * See the doc comment on package object `analysis` for a discussion on performance. + */ +object AsmAnalyzer { + // jvm limit is 65535 for both number of instructions and number of locals + + private def size(method: MethodNode) = { + val ml = BackendUtils.maxLocals(method) + method.instructions.size.toLong * ml * ml + } + + // with the limits below, analysis should not take more than one second + + private val nullnessSizeLimit = 5000L * 600L * 600L // 5000 insns, 600 locals + private val basicValueSizeLimit = 9000L * 1000L * 1000L + private val sourceValueSizeLimit = 8000L * 950L * 950L + + def sizeOKForAliasing(method: MethodNode): Boolean = size(method) < nullnessSizeLimit + def sizeOKForNullness(method: MethodNode): Boolean = size(method) < nullnessSizeLimit + def sizeOKForBasicValue(method: MethodNode): Boolean = size(method) < basicValueSizeLimit + def sizeOKForSourceValue(method: MethodNode): Boolean = size(method) < sourceValueSizeLimit +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index b7e94df5d48d..e83c8cef1b17 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,32 +14,28 @@ package scala.tools.nsc package backend.jvm package analysis -import java.lang.invoke.LambdaMetafactory - -import scala.annotation.{switch, tailrec} -import scala.collection.mutable -import scala.collection.JavaConverters._ import java.util.concurrent.ConcurrentHashMap +import scala.annotation.{ switch, tailrec } +import scala.collection.immutable.BitSet +import scala.collection.immutable.ArraySeq.unsafeWrapArray +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import scala.reflect.internal.util.Position import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis._ -import scala.tools.asm.{Handle, Type} +import scala.tools.asm.{ Handle, Opcodes, Type } import scala.tools.nsc.backend.jvm.BTypes._ import scala.tools.nsc.backend.jvm.GenBCode._ import scala.tools.nsc.backend.jvm.analysis.BackendUtils._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ -import scala.util.control.{NoStackTrace, NonFatal} +import scala.util.control.{ NoStackTrace, NonFatal } /** * This component hosts tools and utilities used in the backend that require access to a `BTypes` * instance. * - * One example is the AsmAnalyzer class, which runs `computeMaxLocalsMaxStack` on the methodNode to - * be analyzed. This method in turn lives inside the BTypes assembly because it queries the per-run - * cache `maxLocalsMaxStackComputed` defined in there. - * * TODO: move out of `analysis` package? */ abstract class BackendUtils extends PerRunInit { @@ -47,20 +43,18 @@ abstract class BackendUtils extends PerRunInit { import postProcessor.{bTypes, bTypesFromClassfile, callGraph} import bTypes._ - import callGraph.ClosureInstantiation import coreBTypes._ import frontendAccess.{compilerSettings, recordPerRunJavaMapCache} /** * Classes with indyLambda closure instantiations where the SAM type is serializable (e.g. Scala's - * FunctionN) need a `$deserializeLambda$` method. This map contains classes for which such a + * FunctionN) need a `\$deserializeLambda\$` method. This map contains classes for which such a * method has been generated. It is used during ordinary code generation, as well as during * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class * has the method. */ - private val indyLambdaImplMethods: ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunJavaMapCache{ - new ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] - } + private val indyLambdaImplMethods: ConcurrentHashMap[InternalName, mutable.Map[MethodNode, mutable.Map[InvokeDynamicInsnNode, asm.Handle]]] = + recordPerRunJavaMapCache(new ConcurrentHashMap) // unused objects created by these constructors are eliminated by pushPop private[this] lazy val sideEffectFreeConstructors: LazyVar[Set[(String, String)]] = perRunLazy(this) { @@ -68,85 +62,41 @@ abstract class BackendUtils extends PerRunInit { primitiveBoxConstructors.map(ownerDesc).toSet ++ srRefConstructors.map(ownerDesc) ++ tupleClassConstructors.map(ownerDesc) ++ Set( - (ObjectRef.internalName, MethodBType(Nil, UNIT).descriptor), - (StringRef.internalName, MethodBType(Nil, UNIT).descriptor), - (StringRef.internalName, MethodBType(List(StringRef), UNIT).descriptor), - (StringRef.internalName, MethodBType(List(ArrayBType(CHAR)), UNIT).descriptor)) + (ObjectRef.internalName, MethodBType(BType.emptyArray, UNIT).descriptor), + (StringRef.internalName, MethodBType(BType.emptyArray, UNIT).descriptor), + (StringRef.internalName, MethodBType(Array(StringRef), UNIT).descriptor), + (StringRef.internalName, MethodBType(Array(ArrayBType(CHAR)), UNIT).descriptor)) } private[this] lazy val classesOfSideEffectFreeConstructors: LazyVar[Set[String]] = perRunLazy(this)(sideEffectFreeConstructors.get.map(_._1)) lazy val classfileVersion: LazyVar[Int] = perRunLazy(this)(compilerSettings.target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15 - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20 - case "21" => asm.Opcodes.V21 - case "22" => asm.Opcodes.V22 - case "23" => asm.Opcodes.V23 - // to be continued... - }) - + case "8" => asm.Opcodes.V1_8 + case "9" => asm.Opcodes.V9 + case "10" => asm.Opcodes.V10 + case "11" => asm.Opcodes.V11 + case "12" => asm.Opcodes.V12 + case "13" => asm.Opcodes.V13 + case "14" => asm.Opcodes.V14 + case "15" => asm.Opcodes.V15 + case "16" => asm.Opcodes.V16 + case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 + case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 + case "21" => asm.Opcodes.V21 + case "22" => asm.Opcodes.V22 + case "23" => asm.Opcodes.V23 + case "24" => asm.Opcodes.V24 + case "25" => asm.Opcodes.V25 + // to be continued... + }) - lazy val majorVersion: LazyVar[Int] = perRunLazy(this)(classfileVersion.get & 0xFF) - - lazy val emitStackMapFrame: LazyVar[Boolean] = perRunLazy(this)(majorVersion.get >= 50) lazy val extraProc: LazyVar[Int] = perRunLazy(this)( - asm.ClassWriter.COMPUTE_MAXS | - (if (emitStackMapFrame.get) asm.ClassWriter.COMPUTE_FRAMES else 0) + asm.ClassWriter.COMPUTE_MAXS | asm.ClassWriter.COMPUTE_FRAMES ) - /** - * A wrapper to make ASM's Analyzer a bit easier to use. - */ - class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, val analyzer: Analyzer[V] = new Analyzer(new BasicInterpreter)) { - computeMaxLocalsMaxStack(methodNode) - try { - analyzer.analyze(classInternalName, methodNode) - } catch { - case ae: AnalyzerException => - throw new AnalyzerException(null, "While processing " + classInternalName + "." + methodNode.name, ae) - } - def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) - } - - /** - * See the doc comment on package object `analysis` for a discussion on performance. - */ - object AsmAnalyzer { - // jvm limit is 65535 for both number of instructions and number of locals - - private def size(method: MethodNode) = { - val ml = maxLocals(method) - method.instructions.size.toLong * ml * ml - } - - // with the limits below, analysis should not take more than one second - - private val nullnessSizeLimit = 5000l * 600l * 600l // 5000 insns, 600 locals - private val basicValueSizeLimit = 9000l * 1000l * 1000l - private val sourceValueSizeLimit = 8000l * 950l * 950l - - def sizeOKForAliasing(method: MethodNode): Boolean = size(method) < nullnessSizeLimit - def sizeOKForNullness(method: MethodNode): Boolean = size(method) < nullnessSizeLimit - def sizeOKForBasicValue(method: MethodNode): Boolean = size(method) < basicValueSizeLimit - def sizeOKForSourceValue(method: MethodNode): Boolean = size(method) < sourceValueSizeLimit - } - - class ProdConsAnalyzer(val methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new InitialProducerSourceInterpreter)) with ProdConsAnalyzerImpl - - class NonLubbingTypeFlowAnalyzer(val methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new NonLubbingTypeFlowInterpreter)) - /* * Add: * @@ -178,11 +128,11 @@ abstract class BackendUtils extends PerRunInit { // stack map frames and invokes the `getCommonSuperClass` method. This method expects all // ClassBTypes mentioned in the source code to exist in the map. - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + val serlamObjDesc = MethodBType(Array(jliSerializedLambdaRef), ObjectRef).descriptor val implMethodsArray = implMethods.toArray val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]) { + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { mv.visitVarInsn(ALOAD, 0) mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle, targetMethods: _*) } @@ -201,51 +151,57 @@ abstract class BackendUtils extends PerRunInit { } for ((label, i) <- initialLabels.iterator.zipWithIndex) { mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i)) + emitLambdaDeserializeIndy(unsafeWrapArray(groups(i))) mv.visitInsn(ARETURN) } mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1)) + emitLambdaDeserializeIndy(unsafeWrapArray(groups(numGroups - 1))) mv.visitInsn(ARETURN) } /** * Clone the instructions in `methodNode` into a new [[InsnList]], mapping labels according to - * the `labelMap`. Returns the new instruction list and a map from old to new instructions, and - * a list of lambda implementation methods references by invokedynamic[LambdaMetafactory] for a - * serializable SAM types. + * the `labelMap`. + * + * For invocation instructions, set the callGraph.callsitePositions to the `callsitePos`. + * + * Returns + * - the new instruction list + * - a map from old to new instructions + * - a bit set containing local variable indices that are stored into */ - def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], List[Handle]) = { + def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], callsitePos: Position, keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], mutable.BitSet) = { val javaLabelMap = labelMap.asJava val result = new InsnList var map = Map.empty[AbstractInsnNode, AbstractInsnNode] - val inlinedTargetHandles = mutable.ListBuffer[Handle]() + val writtenLocals = mutable.BitSet.empty for (ins <- methodNode.instructions.iterator.asScala) { - ins match { - case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => indy.bsmArgs match { - case Array(_, targetHandle: Handle, _, flags: Integer, xs@_*) if (flags.intValue & LambdaMetafactory.FLAG_SERIALIZABLE) != 0 => - inlinedTargetHandles += targetHandle - case _ => - } - case _ => - } - if (keepLineNumbers || !ins.isInstanceOf[LineNumberNode]) { + if (keepLineNumbers || ins.getType != AbstractInsnNode.LINE) { val cloned = ins.clone(javaLabelMap) + if (ins.getType == AbstractInsnNode.METHOD_INSN) { + val mi = ins.asInstanceOf[MethodInsnNode] + val clonedMi = cloned.asInstanceOf[MethodInsnNode] + callGraph.callsitePositions(clonedMi) = callsitePos + if (callGraph.inlineAnnotatedCallsites(mi)) + callGraph.inlineAnnotatedCallsites += clonedMi + if (callGraph.noInlineAnnotatedCallsites(mi)) + callGraph.noInlineAnnotatedCallsites += clonedMi + if (callGraph.staticallyResolvedInvokespecial(mi)) + callGraph.staticallyResolvedInvokespecial += clonedMi + } else if (isStore(ins)) { + val vi = ins.asInstanceOf[VarInsnNode] + writtenLocals += vi.`var` + } result add cloned map += ((ins, cloned)) } } - (result, map, inlinedTargetHandles.toList) + (result, map, writtenLocals) } def getBoxedUnit: FieldInsnNode = new FieldInsnNode(GETSTATIC, srBoxedUnitRef.internalName, "UNIT", srBoxedUnitRef.descriptor) - private val anonfunAdaptedName = """.*\$anonfun\$.*\$\d+\$adapted""".r - def hasAdaptedImplMethod(closureInit: ClosureInstantiation): Boolean = { - anonfunAdaptedName.pattern.matcher(closureInit.lambdaMetaFactoryCall.implMethod.getName).matches - } - - private def primitiveAsmTypeToBType(primitiveType: Type): PrimitiveBType = (primitiveType.getSort: @switch) match { + def primitiveAsmTypeToBType(primitiveType: Type): PrimitiveBType = (primitiveType.getSort: @switch) match { case Type.BOOLEAN => BOOL case Type.BYTE => BYTE case Type.CHAR => CHAR @@ -313,22 +269,31 @@ abstract class BackendUtils extends PerRunInit { def runtimeRefClassBoxedType(refClass: InternalName): Type = Type.getArgumentTypes(srRefCreateMethods(refClass).methodType.descriptor)(0) - def isSideEffectFreeCall(insn: MethodInsnNode): Boolean = { - isScalaBox(insn) || isScalaUnbox(insn) || - isJavaBox(insn) || // not java unbox, it may NPE - isSideEffectFreeConstructorCall(insn) + def isSideEffectFreeCall(mi: MethodInsnNode): Boolean = { + isScalaBox(mi) || // not Scala unbox, it may CCE + isJavaBox(mi) || // not Java unbox, it may NPE + isSideEffectFreeConstructorCall(mi) || + isClassTagApply(mi) } + // methods that are known to return a non-null result def isNonNullMethodInvocation(mi: MethodInsnNode): Boolean = { - isJavaBox(mi) || isScalaBox(mi) || isPredefAutoBox(mi) || isRefCreate(mi) || isRefZero(mi) + isJavaBox(mi) || isScalaBox(mi) || isPredefAutoBox(mi) || isRefCreate(mi) || isRefZero(mi) || isClassTagApply(mi) } - def isModuleLoad(insn: AbstractInsnNode, moduleName: InternalName): Boolean = insn match { - case fi: FieldInsnNode => fi.getOpcode == GETSTATIC && fi.owner == moduleName && fi.name == "MODULE$" && fi.desc == ("L" + moduleName + ";") - case _ => false - } + lazy val modulesAllowSkipInitialization: Set[InternalName] = + if (!compilerSettings.optAllowSkipCoreModuleInit) Set.empty + else Set( + "scala/Predef$", + "scala/runtime/ScalaRunTime$", + "scala/reflect/ClassTag$", + "scala/reflect/ManifestFactory$", + "scala/Array$", + "scala/collection/ArrayOps$", + "scala/collection/StringOps$", + ) ++ primitiveTypes.keysIterator - def isPredefLoad(insn: AbstractInsnNode) = isModuleLoad(insn, PredefRef.internalName) + def isPredefLoad(insn: AbstractInsnNode): Boolean = isModuleLoad(insn, _ == PredefRef.internalName) def isPrimitiveBoxConstructor(insn: MethodInsnNode): Boolean = calleeInMap(insn, primitiveBoxConstructors) def isRuntimeRefConstructor(insn: MethodInsnNode): Boolean = calleeInMap(insn, srRefConstructors) @@ -339,20 +304,108 @@ abstract class BackendUtils extends PerRunInit { insn.name == INSTANCE_CONSTRUCTOR_NAME && sideEffectFreeConstructors.get((insn.owner, insn.desc)) } - def isNewForSideEffectFreeConstructor(insn: AbstractInsnNode) = { + def isNewForSideEffectFreeConstructor(insn: AbstractInsnNode): Boolean = { insn.getOpcode == NEW && { val ti = insn.asInstanceOf[TypeInsnNode] classesOfSideEffectFreeConstructors.get.contains(ti.desc) } } - def isBoxedUnit(insn: AbstractInsnNode) = { + def isBoxedUnit(insn: AbstractInsnNode): Boolean = { insn.getOpcode == GETSTATIC && { val fi = insn.asInstanceOf[FieldInsnNode] fi.owner == srBoxedUnitRef.internalName && fi.name == "UNIT" && fi.desc == srBoxedUnitRef.descriptor } } + def isTraitSuperAccessor(method: MethodNode, owner: ClassBType): Boolean = { + owner.isInterface.get && + isSyntheticMethod(method) && + method.name.endsWith("$") && + isStaticMethod(method) && + findSingleCall(method, mi => mi.itf && mi.getOpcode == INVOKESPECIAL && mi.name + "$" == method.name).nonEmpty + } + + def isMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = { + !owner.isInterface.get && + // isSyntheticMethod(method) && // mixin forwarders are not synthetic it seems + !isStaticMethod(method) && + findSingleCall(method, mi => mi.itf && mi.getOpcode == INVOKESTATIC && mi.name == method.name + "$").nonEmpty + } + + def isTraitSuperAccessorOrMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = { + isTraitSuperAccessor(method, owner) || isMixinForwarder(method, owner) + } + + private val nonForwarderInstructionTypes: BitSet = { + import AbstractInsnNode._ + BitSet(FIELD_INSN, INVOKE_DYNAMIC_INSN, JUMP_INSN, IINC_INSN, TABLESWITCH_INSN, LOOKUPSWITCH_INSN) + } + + /** + * Identify forwarders, aliases, anonfun\$adapted methods, bridges, trivial methods (x + y), etc + * Returns + * -1 : no match + * 1 : trivial (no method calls), but not field getters + * 2 : factory + * 3 : forwarder with boxing adaptation + * 4 : generic forwarder / alias + * + * TODO: should delay some checks to `canInline` (during inlining) + * problem is: here we don't have access to the callee / accessed field, so we can't check accessibility + * - INVOKESPECIAL is not the only way to call private methods, INVOKESTATIC is also possible + * - the body of the callee can change between here (we're in inliner heuristics) and the point + * when we actually inline it (code may have been inlined into the callee) + * - methods accessing a public field could be inlined. on the other hand, methods accessing a private + * static field should not be inlined. + */ + def looksLikeForwarderOrFactoryOrTrivial(method: MethodNode, owner: InternalName, allowPrivateCalls: Boolean): Int = { + val paramTypes = Type.getArgumentTypes(method.desc) + val numPrimitives = paramTypes.count(_.getSort < Type.ARRAY) + (if (Type.getReturnType(method.desc).getSort < Type.ARRAY) 1 else 0) + + val maxSize = + 3 + // forwardee call, return + paramTypes.length + // param load + numPrimitives * 2 + // box / unbox call, for example Predef.int2Integer + paramTypes.length + 2 // some slack: +1 for each parameter, receiver, return value. allow things like casts. + + if (method.instructions.iterator.asScala.count(_.getOpcode > 0) > maxSize) return -1 + + var numBoxConv = 0 + var numCallsOrNew = 0 + var callMi: MethodInsnNode = null + val it = method.instructions.iterator + while (it.hasNext && numCallsOrNew < 2) { + val i = it.next() + val t = i.getType + if (t == AbstractInsnNode.METHOD_INSN) { + val mi = i.asInstanceOf[MethodInsnNode] + if (!allowPrivateCalls && i.getOpcode == INVOKESPECIAL && mi.name != GenBCode.INSTANCE_CONSTRUCTOR_NAME) { + numCallsOrNew = 2 // stop here: don't inline forwarders with a private or super call + } else { + if (isScalaBox(mi) || isScalaUnbox(mi) || isPredefAutoBox(mi) || isPredefAutoUnbox(mi) || isJavaBox(mi) || isJavaUnbox(mi)) + numBoxConv += 1 + else { + numCallsOrNew += 1 + callMi = mi + } + } + } else if (nonForwarderInstructionTypes(t)) { + if (i.getOpcode == GETSTATIC) { + if (!allowPrivateCalls && owner == i.asInstanceOf[FieldInsnNode].owner) + numCallsOrNew = 2 // stop here: not forwarder or trivial + } else { + numCallsOrNew = 2 // stop here: not forwarder or trivial + } + } + } + if (numCallsOrNew > 1 || numBoxConv > paramTypes.length + 1) -1 + else if (numCallsOrNew == 0) if (numBoxConv == 0) 1 else 3 + else if (callMi.name == GenBCode.INSTANCE_CONSTRUCTOR_NAME) 2 + else if (numBoxConv > 0) 3 + else 4 + } + private class Collector extends NestedClassesCollector[ClassBType](nestedOnly = true) { def declaredNestedClasses(internalName: InternalName): List[ClassBType] = bTypesFromClassfile.classBTypeFromParsedClassfile(internalName).info.get.nestedClasses.force @@ -388,7 +441,7 @@ abstract class BackendUtils extends PerRunInit { * * can-multi-thread */ - final def addInnerClasses(jclass: asm.tree.ClassNode, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]) { + final def addInnerClasses(jclass: asm.tree.ClassNode, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) allNestedClasses ++= declaredInnerClasses @@ -396,52 +449,188 @@ abstract class BackendUtils extends PerRunInit { for (nestedClass <- allNestedClasses) { // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry.get + val Some(e) = nestedClass.innerClassAttributeEntry.get: @unchecked jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) } } - def onIndyLambdaImplMethodIfPresent(hostClass: InternalName) (action : mutable.LinkedHashSet[asm.Handle] => Unit): Unit = + def onIndyLambdaImplMethodIfPresent[T](hostClass: InternalName)(action: mutable.Map[MethodNode, mutable.Map[InvokeDynamicInsnNode, asm.Handle]] => T): Option[T] = indyLambdaImplMethods.get(hostClass) match { - case null => - case xs => xs.synchronized(action(xs)) + case null => None + case methods => Some(methods.synchronized(action(methods))) } - def onIndyLambdaImplMethod[T](hostClass: InternalName) (action: mutable.LinkedHashSet[asm.Handle] => T): T ={ - val methods = indyLambdaImplMethods.computeIfAbsent(hostClass, (_) => mutable.LinkedHashSet[asm.Handle]()) + def onIndyLambdaImplMethod[T](hostClass: InternalName)(action: mutable.Map[MethodNode, mutable.Map[InvokeDynamicInsnNode, asm.Handle]] => T): T = { + val methods = indyLambdaImplMethods.computeIfAbsent(hostClass, _ => mutable.Map.empty) + methods.synchronized(action(methods)) + } + + def addIndyLambdaImplMethod(hostClass: InternalName, method: MethodNode, indy: InvokeDynamicInsnNode, handle: asm.Handle): Unit = { + onIndyLambdaImplMethod(hostClass)(_.getOrElseUpdate(method, mutable.Map.empty)(indy) = handle) + } - methods.synchronized (action(methods)) + def removeIndyLambdaImplMethod(hostClass: InternalName, method: MethodNode, indy: InvokeDynamicInsnNode): Unit = { + onIndyLambdaImplMethodIfPresent(hostClass)(_.get(method).foreach(_.remove(indy))) } - /** - * add methods - * @return the added methods. Note the order is undefined + /** + * The methods used as lambda bodies for IndyLambda instructions within `hostClass`. Note that + * the methods are not necessarily defined within the `hostClass` (when an IndyLambda is inlined + * into a different class). */ - def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Seq[asm.Handle] = { - if (handle.isEmpty) Nil else onIndyLambdaImplMethod(hostClass) { - case set => - if (set.isEmpty) { - set ++= handle - handle - } else { - var added = List.empty[asm.Handle] - handle foreach { h => if (set.add(h)) added ::= h } - added - } - } + def indyLambdaBodyMethods(hostClass: InternalName): mutable.SortedSet[Handle] = { + val res = mutable.TreeSet.empty[Handle](handleOrdering) + onIndyLambdaImplMethodIfPresent(hostClass)(methods => res addAll methods.valuesIterator.flatMap(_.valuesIterator)) + res } - def addIndyLambdaImplMethod(hostClass: InternalName, handle: asm.Handle): Boolean = { - onIndyLambdaImplMethod(hostClass) { - _ add handle - } + /** + * The methods used as lambda bodies for IndyLambda instructions within `method` of `hostClass`. + */ + def indyLambdaBodyMethods(hostClass: InternalName, method: MethodNode): Map[InvokeDynamicInsnNode, Handle] = { + onIndyLambdaImplMethodIfPresent(hostClass)(ms => ms.getOrElse(method, Nil).toMap).getOrElse(Map.empty) } - def removeIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { - if (handle.nonEmpty) - onIndyLambdaImplMethodIfPresent(hostClass) { - _ --= handle - } + // not in `backendReporting` since there we don't have access to the `Callsite` class + def optimizerWarningSiteString(cs: callGraph.Callsite): String = + frontendAccess.backendReporting.siteString(cs.callsiteClass.internalName, cs.callsiteMethod.name) +} + +object BackendUtils { + /** + * A pseudo-flag, added MethodNodes whose maxLocals / maxStack are computed. This allows invoking + * `computeMaxLocalsMaxStack` whenever running an analyzer but performing the actual computation + * only when necessary. + * + * The largest JVM flag (as of JDK 8) is ACC_MANDATED (0x8000), however the asm framework uses + * the same trick and defines some pseudo flags + * - ACC_DEPRECATED = 0x20000 + * - ACC_SYNTHETIC_ATTRIBUTE = 0x40000 + * - ACC_CONSTRUCTOR = 0x80000 + * + * I haven't seen the value picked here in use anywhere. We make sure to remove the flag when + * it's no longer needed. + */ + private val ACC_MAXS_COMPUTED = 0x1000000 + def isMaxsComputed(method: MethodNode) = (method.access & ACC_MAXS_COMPUTED) != 0 + def setMaxsComputed(method: MethodNode) = method.access |= ACC_MAXS_COMPUTED + def clearMaxsComputed(method: MethodNode) = method.access &= ~ACC_MAXS_COMPUTED + + /** + * A pseudo-flag indicating if a MethodNode's unreachable code has been eliminated. + * + * The ASM Analyzer class does not compute any frame information for unreachable instructions. + * Transformations that use an analyzer (including inlining) therefore require unreachable code + * to be eliminated. + * + * This flag allows running dead code elimination whenever an analyzer is used. If the method + * is already optimized, DCE can return early. + */ + private val ACC_DCE_DONE = 0x2000000 + def isDceDone(method: MethodNode) = (method.access & ACC_DCE_DONE) != 0 + def setDceDone(method: MethodNode) = method.access |= ACC_DCE_DONE + def clearDceDone(method: MethodNode) = method.access &= ~ACC_DCE_DONE + + private val LABEL_REACHABLE_STATUS = 0x1000000 + private def isLabelFlagSet(l: LabelNode1, f: Int): Boolean = (l.flags & f) != 0 + + private def setLabelFlag(l: LabelNode1, f: Int): Unit = { + l.flags |= f + } + + private def clearLabelFlag(l: LabelNode1, f: Int): Unit = { + l.flags &= ~f + } + def isLabelReachable(label: LabelNode) = isLabelFlagSet(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) + def setLabelReachable(label: LabelNode) = setLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) + def clearLabelReachable(label: LabelNode) = clearLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) + + final case class LambdaMetaFactoryCall(indy: InvokeDynamicInsnNode, samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type) + + object LambdaMetaFactoryCall { + val lambdaMetaFactoryMetafactoryHandle = new Handle( + Opcodes.H_INVOKESTATIC, + "java/lang/invoke/LambdaMetafactory", + "metafactory", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;", + /* itf = */ false) + + val lambdaMetaFactoryAltMetafactoryHandle = new Handle( + Opcodes.H_INVOKESTATIC, + "java/lang/invoke/LambdaMetafactory", + "altMetafactory", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", + /* itf = */ false) + + def unapply(insn: AbstractInsnNode): Option[(InvokeDynamicInsnNode, Type, Handle, Type, Array[Type])] = insn match { + case indy: InvokeDynamicInsnNode if indy.bsm == lambdaMetaFactoryMetafactoryHandle || indy.bsm == lambdaMetaFactoryAltMetafactoryHandle => + indy.bsmArgs match { + case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, _@_*) => + // LambdaMetaFactory performs a number of automatic adaptations when invoking the lambda + // implementation method (casting, boxing, unboxing, and primitive widening, see Javadoc). + // + // The closure optimizer supports only one of those adaptations: it will cast arguments + // to the correct type when re-writing a closure call to the body method. Example: + // + // val fun: String => String = l => l + // val l = List("") + // fun(l.head) + // + // The samMethodType of Function1 is `(Object)Object`, while the instantiatedMethodType + // is `(String)String`. The return type of `List.head` is `Object`. + // + // The implMethod has the signature `C$anonfun(String)String`. + // + // At the closure callsite, we have an `INVOKEINTERFACE Function1.apply (Object)Object`, + // so the object returned by `List.head` can be directly passed into the call (no cast). + // + // The closure object will cast the object to String before passing it to the implMethod. + // + // When re-writing the closure callsite to the implMethod, we have to insert a cast. + // + // The check below ensures that + // (1) the implMethod type has the expected signature (captured types plus argument types + // from instantiatedMethodType) + // (2) the receiver of the implMethod matches the first captured type, if any, otherwise + // the first parameter type of instantiatedMethodType + // (3) all parameters that are not the same in samMethodType and instantiatedMethodType + // are reference types, so that we can insert casts to perform the same adaptation + // that the closure object would. + + val isStatic = implMethod.getTag == Opcodes.H_INVOKESTATIC + val indyParamTypes = Type.getArgumentTypes(indy.desc) + val instantiatedMethodArgTypes = instantiatedMethodType.getArgumentTypes + + val (receiverType, expectedImplMethodType) = + if (isStatic) { + val paramTypes = indyParamTypes ++ instantiatedMethodArgTypes + (None, Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } else if (implMethod.getTag == H_NEWINVOKESPECIAL) { + (Some(instantiatedMethodType.getReturnType), Type.getMethodType(Type.VOID_TYPE, instantiatedMethodArgTypes: _*)) + } else { + if (indyParamTypes.nonEmpty) { + val paramTypes = indyParamTypes.tail ++ instantiatedMethodArgTypes + (Some(indyParamTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } else { + val paramTypes = instantiatedMethodArgTypes.tail + (Some(instantiatedMethodArgTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } + } + + val isIndyLambda = ( + Type.getType(implMethod.getDesc) == expectedImplMethodType // (1) + && receiverType.forall(rt => implMethod.getOwner == rt.getInternalName) // (2) + && samMethodType.getArgumentTypes.corresponds(instantiatedMethodArgTypes)((samArgType, instArgType) => + samArgType == instArgType || isReference(samArgType) && isReference(instArgType)) // (3) + ) + + if (isIndyLambda) Some((indy, samMethodType, implMethod, instantiatedMethodType, indyParamTypes)) + else None + + case _ => None + } + case _ => None + } } def maxLocals(method: MethodNode): Int = { @@ -485,8 +674,11 @@ abstract class BackendUtils extends PerRunInit { var queue = new Array[Int](8) var top = -1 def enq(i: Int): Unit = { - if (top == queue.length - 1) - queue = java.util.Arrays.copyOf(queue, queue.length * 2) + if (top == queue.length - 1) { + val nq = new Array[Int](queue.length * 2) + Array.copy(queue, 0, nq, 0, queue.length) + queue = nq + } top += 1 queue(top) = i } @@ -511,7 +703,7 @@ abstract class BackendUtils extends PerRunInit { } } - val tcbIt = method.tryCatchBlocks.iterator() + val tcbIt = method.tryCatchBlocks.iterator while (tcbIt.hasNext) { val tcb = tcbIt.next() enqInsn(tcb.handler, 1) @@ -525,7 +717,7 @@ abstract class BackendUtils extends PerRunInit { * * However, the JVM spec does not require subroutines to `RET x` to their caller, they could return back to an * outer subroutine caller (nested subroutines), or `RETURN`, or use a static jump. Static analysis of subroutines - * is therefore complex (http://www21.in.tum.de/~kleing/papers/KleinW-TPHOLS03.pdf). + * is therefore complex (https://www21.in.tum.de/~kleing/papers/KleinW-TPHOLS03.pdf). * * The asm.Analyzer however makes the assumption that subroutines only occur in the shape emitted by early * javac, i.e., `RET` always returns to the next enclosing caller. So we do that as well. @@ -601,60 +793,6 @@ abstract class BackendUtils extends PerRunInit { } } - // not in `backendReporting` since there we don't have access to the `Callsite` class - def optimizerWarningSiteString(cs: callGraph.Callsite): String = - frontendAccess.backendReporting.siteString(cs.callsiteClass.internalName, cs.callsiteMethod.name) -} - -object BackendUtils { - /** - * A pseudo-flag, added MethodNodes whose maxLocals / maxStack are computed. This allows invoking - * `computeMaxLocalsMaxStack` whenever running an analyzer but performing the actual computation - * only when necessary. - * - * The largest JVM flag (as of JDK 8) is ACC_MANDATED (0x8000), however the asm framework uses - * the same trick and defines some pseudo flags - * - ACC_DEPRECATED = 0x20000 - * - ACC_SYNTHETIC_ATTRIBUTE = 0x40000 - * - ACC_CONSTRUCTOR = 0x80000 - * - * I haven't seen the value picked here in use anywhere. We make sure to remove the flag when - * it's no longer needed. - */ - private val ACC_MAXS_COMPUTED = 0x1000000 - def isMaxsComputed(method: MethodNode) = (method.access & ACC_MAXS_COMPUTED) != 0 - def setMaxsComputed(method: MethodNode) = method.access |= ACC_MAXS_COMPUTED - def clearMaxsComputed(method: MethodNode) = method.access &= ~ACC_MAXS_COMPUTED - - /** - * A pseudo-flag indicating if a MethodNode's unreachable code has been eliminated. - * - * The ASM Analyzer class does not compute any frame information for unreachable instructions. - * Transformations that use an analyzer (including inlining) therefore require unreachable code - * to be eliminated. - * - * This flag allows running dead code elimination whenever an analyzer is used. If the method - * is already optimized, DCE can return early. - */ - private val ACC_DCE_DONE = 0x2000000 - def isDceDone(method: MethodNode) = (method.access & ACC_DCE_DONE) != 0 - def setDceDone(method: MethodNode) = method.access |= ACC_DCE_DONE - def clearDceDone(method: MethodNode) = method.access &= ~ACC_DCE_DONE - - private val LABEL_REACHABLE_STATUS = 0x1000000 - private def isLabelFlagSet(l: LabelNode1, f: Int): Boolean = (l.flags & f) != 0 - - private def setLabelFlag(l: LabelNode1, f: Int): Unit = { - l.flags |= f - } - - private def clearLabelFlag(l: LabelNode1, f: Int): Unit = { - l.flags &= ~f - } - def isLabelReachable(label: LabelNode) = isLabelFlagSet(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) - def setLabelReachable(label: LabelNode) = setLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) - def clearLabelReachable(label: LabelNode) = clearLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) - abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { val declaredInnerClasses = mutable.Set.empty[T] @@ -709,7 +847,7 @@ object BackendUtils { m.exceptions.asScala foreach visitInternalName for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`) - val iter = m.instructions.iterator() + val iter = m.instructions.iterator while (iter.hasNext) iter.next() match { case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc) case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc) @@ -724,8 +862,13 @@ object BackendUtils { } } - def visitInternalName(internalName: InternalName): Unit = if (internalName != null) { - for (c <- getClassIfNested(internalName)) + private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = { + val ix = s.indexOf(char, offset) + !(ix == -1 || ix >= offset + length) + } + + def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) { + for (c <- getClassIfNested(internalName.substring(offset, length))) if (!declaredInnerClasses.contains(c)) referredInnerClasses += c } @@ -736,7 +879,7 @@ object BackendUtils { def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) { val bracket = ref.lastIndexOf('[') if (bracket == -1) visitInternalName(ref) - else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref.substring(bracket + 2, ref.length - 1)) + else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref, bracket + 2, ref.length - 1) } // we are only interested in the class references in the descriptor, so we can skip over @@ -750,14 +893,14 @@ object BackendUtils { var seenDollar = false while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 if (seenDollar) - visitInternalName(desc.substring(start, i)) + visitInternalName(desc, start, i) } // skips over '[', ')', primitives i += 1 } case 'L' => - visitInternalName(desc.substring(1, desc.length - 1)) + visitInternalName(desc, 1, desc.length - 1) case '[' => visitInternalNameOrArrayReference(desc) @@ -789,7 +932,8 @@ object BackendUtils { } abstract class GenericSignatureVisitor(nestedOnly: Boolean) { - def visitInternalName(internalName: InternalName): Unit + final def visitInternalName(internalName: String): Unit = visitInternalName(internalName, 0, if (internalName eq null) 0 else internalName.length) + def visitInternalName(internalName: String, offset: Int, length: Int): Unit def raiseError(msg: String, sig: String, e: Option[Throwable] = None): Unit @@ -955,6 +1099,117 @@ object BackendUtils { } } } + + object handleOrdering extends Ordering[Handle] { + override def compare(x: Handle, y: Handle): Int = { + if (x eq y) return 0 + + val t = Ordering.Int.compare(x.getTag, y.getTag) + if (t != 0) return t + + val i = Ordering.Boolean.compare(x.isInterface, y.isInterface) + if (x.isInterface != y.isInterface) return i + + val o = x.getOwner compareTo y.getOwner + if (o != 0) return o + + val n = x.getName compareTo y.getName + if (n != 0) return n + + x.getDesc compareTo y.getDesc + } + } + + def isArrayGetLength(mi: MethodInsnNode): Boolean = mi.owner == "java/lang/reflect/Array" && mi.name == "getLength" && mi.desc == "(Ljava/lang/Object;)I" + + // If argument i of the method is null-checked, the bit `i+1` of the result is 1 + def argumentsNullCheckedByCallee(mi: MethodInsnNode): Long = { + if (isArrayGetLength(mi)) 1 + else 0 + } + + def classTagNewArrayArg(mi: MethodInsnNode, prodCons: ProdConsAnalyzer): InternalName = { + if (mi.name == "newArray" && mi.owner == "scala/reflect/ClassTag" && mi.desc == "(I)Ljava/lang/Object;") { + val prods = prodCons.initialProducersForValueAt(mi, prodCons.frameAt(mi).stackTop - 1) + if (prods.size == 1) prods.head match { + case ctApply: MethodInsnNode => + if (ctApply.name == "apply" && ctApply.owner == "scala/reflect/ClassTag$" && ctApply.desc == "(Ljava/lang/Class;)Lscala/reflect/ClassTag;") { + val clsProd = prodCons.initialProducersForValueAt(ctApply, prodCons.frameAt(ctApply).stackTop) + if (clsProd.size == 1) clsProd.head match { + case ldc: LdcInsnNode => + ldc.cst match { + case tp: Type if tp.getSort == Type.OBJECT || tp.getSort == Type.ARRAY => + return tp.getInternalName + case _ => + } + case _ => + } + } + case _ => + } + } + null + } + + // Check for an Array.getLength(x) call where x is statically known to be of array type + def isArrayGetLengthOnStaticallyKnownArray(mi: MethodInsnNode, typeAnalyzer: NonLubbingTypeFlowAnalyzer): Boolean = { + isArrayGetLength(mi) && { + val f = typeAnalyzer.frameAt(mi) + f.getValue(f.stackTop).getType.getSort == Type.ARRAY + } + } + + def getClassOnStaticallyKnownPrimitiveArray(mi: MethodInsnNode, typeAnalyzer: NonLubbingTypeFlowAnalyzer): Type = { + if (mi.name == "getClass" && mi.owner == "java/lang/Object" && mi.desc == "()Ljava/lang/Class;") { + val f = typeAnalyzer.frameAt(mi) + val tp = f.getValue(f.stackTop).getType + if (tp.getSort == Type.ARRAY) { + if (tp.getElementType.getSort != Type.OBJECT) + return tp + } + } + null + } + + lazy val primitiveTypes: Map[String, Type] = Map( + ("Unit", Type.VOID_TYPE), + ("Boolean", Type.BOOLEAN_TYPE), + ("Char", Type.CHAR_TYPE), + ("Byte", Type.BYTE_TYPE), + ("Short", Type.SHORT_TYPE), + ("Int", Type.INT_TYPE), + ("Float", Type.FLOAT_TYPE), + ("Long", Type.LONG_TYPE), + ("Double", Type.DOUBLE_TYPE)) + + private val primitiveManifestApplies: Map[String, String] = primitiveTypes map { + case (k, _) => (k, s"()Lscala/reflect/ManifestFactory$$${k}Manifest;") + } + + def isClassTagApply(mi: MethodInsnNode): Boolean = { + mi.owner == "scala/reflect/ClassTag$" && { + mi.name == "apply" && mi.desc == "(Ljava/lang/Class;)Lscala/reflect/ClassTag;" || + primitiveManifestApplies.get(mi.name).contains(mi.desc) + } + } + + def isModuleLoad(insn: AbstractInsnNode, nameMatches: InternalName => Boolean): Boolean = insn match { + case fi: FieldInsnNode => + fi.getOpcode == GETSTATIC && + nameMatches(fi.owner) && + fi.name == "MODULE$" && + fi.desc.length == fi.owner.length + 2 && + fi.desc.regionMatches(1, fi.owner, 0, fi.owner.length) + case _ => false + } + + def isRuntimeArrayLoadOrUpdate(insn: AbstractInsnNode): Boolean = insn.getOpcode == Opcodes.INVOKEVIRTUAL && { + val mi = insn.asInstanceOf[MethodInsnNode] + mi.owner == "scala/runtime/ScalaRunTime$" && { + mi.name == "array_apply" && mi.desc == "(Ljava/lang/Object;I)Ljava/lang/Object;" || + mi.name == "array_update" && mi.desc == "(Ljava/lang/Object;ILjava/lang/Object;)V" + } + } } // For performance (`Char => Boolean` is not specialized) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala index f0c21f090269..b5da4522f923 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index e23afd8a4a03..bea98549a151 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,10 +18,11 @@ import java.util import scala.annotation.switch import scala.tools.asm.tree.analysis._ -import scala.tools.asm.tree.{AbstractInsnNode, LdcInsnNode, MethodInsnNode, MethodNode, LabelNode} +import scala.tools.asm.tree._ import scala.tools.asm.{Opcodes, Type} -import scala.tools.nsc.backend.jvm.opt.BytecodeUtils +import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ +import scala.tools.nsc.backend.jvm.analysis.BackendUtils._ /** * See the package object `analysis` for details on the ASM analysis framework. @@ -37,11 +38,6 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ * call). However, the receiver is an value on the stack and consumed while interpreting the * instruction - so we can only gain some knowledge if we know that the receiver was an alias of * some other local variable or stack slot. Therefore we use the AliasingFrame class. - * - * TODO: - * Finally, we'd also like to exploit the knowledge gained from `if (x == null)` tests: x is known - * to be null in one branch, not null in the other. This will make use of alias tracking as well. - * We still have to figure out how to do this exactly in the analyzer framework. */ /** @@ -63,6 +59,8 @@ sealed abstract class NullnessValue(final val isSize2: Boolean) extends Value { } final override def equals(other: Any) = this eq other.asInstanceOf[Object] + + def invert: NullnessValue = if (this == NullValue) NotNullValue else if (this == NotNullValue) NullValue else this } object NullValue extends NullnessValue(isSize2 = false) { override def toString = "Null" } @@ -72,10 +70,10 @@ object NotNullValue extends NullnessValue(isSize2 = false) { override def toStr object NullnessValue { def unknown(isSize2: Boolean) = if (isSize2) UnknownValue2 else UnknownValue1 - def unknown(insn: AbstractInsnNode) = if (BytecodeUtils.instructionResultSize(insn) == 2) UnknownValue2 else UnknownValue1 + def unknown(insn: AbstractInsnNode) = if (instructionResultSize(insn) == 2) UnknownValue2 else UnknownValue1 } -final class NullnessInterpreter(knownNonNullInvocation: MethodInsnNode => Boolean, method: MethodNode) extends Interpreter[NullnessValue](Opcodes.ASM5) { +final class NullnessInterpreter(knownNonNullInvocation: MethodInsnNode => Boolean, modulesNonNull: Boolean, method: MethodNode) extends Interpreter[NullnessValue](Opcodes.ASM5) { def newValue(tp: Type): NullnessValue = { // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter, // which is provided by the framework. @@ -95,7 +93,7 @@ final class NullnessInterpreter(knownNonNullInvocation: MethodInsnNode => Boolea val isThis = local == 0 && (isInstanceMethod || { method.parameters != null && !method.parameters.isEmpty && { val p = method.parameters.get(0) - (p.access & Opcodes.ACC_SYNTHETIC) != 0 && p.name == "$this" + (p.access & Opcodes.ACC_SYNTHETIC) != 0 && p.name == s"$$this" } }) if (isThis) NotNullValue @@ -110,6 +108,11 @@ final class NullnessInterpreter(knownNonNullInvocation: MethodInsnNode => Boolea case _ => NullnessValue.unknown(insn) } + case Opcodes.GETSTATIC => + val fi = insn.asInstanceOf[FieldInsnNode] + if (modulesNonNull && isModuleLoad(fi, _ == fi.owner)) NotNullValue + else NullnessValue.unknown(insn) + // for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call. case _ => NullnessValue.unknown(insn) } @@ -149,7 +152,7 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal private[this] var ifNullAliases: AliasSet = null // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame` - def this(src: Frame[_ <: NullnessValue]) { + def this(src: Frame[_ <: NullnessValue]) = { this(src.getLocals, src.getMaxStackSize) init(src) } @@ -214,6 +217,21 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal val numArgs = Type.getArgumentTypes(desc).length aliasesOf(this.stackTop - numArgs) + case INVOKESTATIC => + var nullChecked = BackendUtils.argumentsNullCheckedByCallee(insn.asInstanceOf[MethodInsnNode]) + var i = 0 + var res: AliasSet = null + while (nullChecked > 0) { + if ((nullChecked & 1L) != 0) { + val a = aliasesOf(this.stackTop - i) + if (res == null) res = a + else a.iterator.foreach(res.+=) + } + i += 1 + nullChecked >>= 1 + } + res + case ARRAYLENGTH | MONITORENTER | MONITOREXIT => @@ -230,11 +248,13 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal } } -/** - * This class is required to override the `newFrame` methods, which makes makes sure the analyzer - * uses NullnessFrames. - */ -class NullnessAnalyzer(knownNonNullInvocation: MethodInsnNode => Boolean, method: MethodNode) extends Analyzer[NullnessValue](new NullnessInterpreter(knownNonNullInvocation, method)) { +class NullnessAnalyzerImpl(methodNode: MethodNode, knownNonNullInvocation: MethodInsnNode => Boolean, modulesNonNull: Boolean) + extends Analyzer[NullnessValue](new NullnessInterpreter(knownNonNullInvocation, modulesNonNull, methodNode)) { + // override the `newFrame` methods to make sure the analyzer uses NullnessFrames. override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack) override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src) } + +class NullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName, knownNonNullInvocation: MethodInsnNode => Boolean, modulesNonNull: Boolean) + extends AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzerImpl(methodNode, knownNonNullInvocation, modulesNonNull)) + with AliasingAsmAnalyzerMarker diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala new file mode 100644 index 000000000000..9a2316cdaa95 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala @@ -0,0 +1,477 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package backend.jvm +package analysis + +import java.util + +import scala.annotation.switch +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import scala.tools.asm.Opcodes._ +import scala.tools.asm.tree._ +import scala.tools.asm.tree.analysis._ +import scala.tools.asm.{MethodVisitor, Type} +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ + +/** + * This class provides additional queries over ASM's built-in `SourceValue` analysis. + * + * The analysis computes for each value in a frame a set of source instructions, which are the + * potential producers. Most instructions produce either nothing or a stack value. For example, + * a `LOAD` instruction is the producer of the value pushed onto the stack. The exception are + * `STORE` instructions, which produce a new value for a local variable slot, so they are used + * as producers for the value they stored. + * + * Note that pseudo-instructions are used as initial producers for parameters and local variables. + * See the documentation on class InitialProducer. + * + * This class implements the following queries over the data computed by the SourceValue analysis: + * + * - producersForValueAt(insn, slot) + * - consumersOfValueAt(insn, slot) + * + * - producersForInputsOf(insn) + * - consumersOfOutputsFrom(insn) + * + * - initialProducersForValueAt(insn, slot) + * - ultimateConsumersOfValueAt(insn, slot) + * + * - initialProducersForInputsOf(insn) + * - ultimateConsumersOfOutputsFrom(insn) + * + * The following operations are considered as copying operations: + * - xLOAD, xSTORE + * - DUP, DUP2, DUP_X1, DUP_X2, DUP2_X1, DUP2_X2 + * - SWAP + * - CHECKCAST + * + * If ever needed, we could introduce a mode where primitive conversions (l2i) are considered as + * copying operations. + * + * Note on performance: thee data flow analysis (SourceValue / SourceInterpreter, provided by ASM) + * is roughly 2-3x slower than a simple analysis (like BasicValue). The reason is that the merge + * function (merging producer sets) is more complex than merging simple basic values. + * See also the doc comment in the package object `analysis`. + */ +class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new InitialProducerSourceInterpreter)) { + /** + * Returns the potential producer instructions of a (local or stack) value in the frame of `insn`. + * This method simply returns the producer information computed by the SourceValue analysis. + */ + def producersForValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { + frameAt(insn).getValue(slot).insns.asScala.toSet + } + + /** + * Returns the potential consumer instructions of a (local or stack) value in the frame of `insn`. + * This is the counterpart of `producersForValueAt`. + */ + def consumersOfValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { + producersForValueAt(insn, slot).flatMap[AbstractInsnNode](prod => { + val outputNumber = outputValueSlots(prod).indexOf(slot) + _consumersOfOutputsFrom.get(prod).map(v => { + v(outputNumber) + }).getOrElse(Set.empty) + }) + } + + /** + * Returns the potential producer instructions of any of the values consumed by `insn`. + */ + def producersForInputsOf(insn: AbstractInsnNode): Set[AbstractInsnNode] = { + inputValues(insn).iterator.flatMap(v => v.insns.asScala).toSet + } + + def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { + case _: UninitializedLocalProducer => Set.empty + case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local) + case ExceptionProducer(handlerLabel, handlerStackTop) => consumersOfValueAt(handlerLabel, handlerStackTop) + case _ => + _consumersOfOutputsFrom.get(insn).map(v => Set.from[AbstractInsnNode](v.indices.iterator.flatMap(v.apply))).getOrElse(Set.empty) + } + + /** + * Returns the potential initial producer instructions of a value in the frame of `insn`. + * + * Unlike `producersForValueAt`, producers are tracked through copying instructions such as STORE + * and LOAD. If the producer of the value is a LOAD, then the producers of the stored value(s) are + * returned instead. + */ + def initialProducersForValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { + def initialProducers(insn: AbstractInsnNode, producedSlot: Int): Set[AbstractInsnNode] = { + if (isCopyOperation(insn)) { + val key = (insn, producedSlot) + _initialProducersCache.getOrElseUpdate(key, { + // prevent infinite recursion if an instruction is its own producer or consumer + // see cyclicProdCons in ProdConsAnalyzerTest + _initialProducersCache(key) = Set.empty + val (sourceValue, sourceValueSlot) = copyOperationSourceValue(insn, producedSlot) + sourceValue.insns.iterator.asScala.flatMap(initialProducers(_, sourceValueSlot)).toSet + }) + } else { + Set(insn) + } + } + producersForValueAt(insn, slot).flatMap(initialProducers(_, slot)) + } + + /** + * Returns the potential ultimate consumers of a value in the frame of `insn`. Consumers are + * tracked through copying operations such as SOTRE and LOAD. + */ + def ultimateConsumersOfValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { + def ultimateConsumers(insn: AbstractInsnNode, consumedSlot: Int): Set[AbstractInsnNode] = { + if (isCopyOperation(insn)) { + val key = (insn, consumedSlot) + _ultimateConsumersCache.getOrElseUpdate(key, { + // prevent infinite recursion if an instruction is its own producer or consumer + // see cyclicProdCons in ProdConsAnalyzerTest + _ultimateConsumersCache(key) = Set.empty + for { + producedSlot <- copyOperationProducedValueSlots(insn, consumedSlot) + consumer <- consumersOfValueAt(insn.getNext, producedSlot) + ultimateConsumer <- ultimateConsumers(consumer, producedSlot) + } yield ultimateConsumer + }) + } else { + Set(insn) + } + } + consumersOfValueAt(insn, slot).flatMap(ultimateConsumers(_, slot)) + } + + def initialProducersForInputsOf(insn: AbstractInsnNode): Set[AbstractInsnNode] = { + inputValueSlots(insn).flatMap(slot => initialProducersForValueAt(insn, slot)).toSet + } + + def ultimateConsumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { + case _: UninitializedLocalProducer => Set.empty + case _ => + lazy val next = insn match { + case _: ParameterProducer => methodNode.instructions.getFirst + case ExceptionProducer(handlerLabel, _) => handlerLabel + case _ => insn.getNext + } + outputValueSlots(insn).flatMap(slot => ultimateConsumersOfValueAt(next, slot)).toSet + } + + private def isCopyOperation(insn: AbstractInsnNode): Boolean = { + isLoadOrStore(insn) || { + (insn.getOpcode: @switch) match { + case DUP | DUP_X1 | DUP_X2 | DUP2 | DUP2_X1 | DUP2_X2 | SWAP | CHECKCAST => true + case _ => false + } + } + } + + /** + * Returns the value and its frame slot that `copyOp` copies into `producedSlot`. + * + * Example: + * - copyOp = DUP_X1, assume it produces slots 2,3,4 + * - producedSlot = 3 + * - the result is the value at slot 2 in the frame of `copyOp` + */ + private def copyOperationSourceValue(copyOp: AbstractInsnNode, producedSlot: Int): (SourceValue, Int) = { + val frame = frameAt(copyOp) + + // Index of the produced value. Example: DUP_X1 produces 3 values, so producedIndex is 0, 1 or 2, + // where 0 corresponds to the lowest value on the stack. + def producedIndex(numConsumed: Int) = { + val numUsedSlotsBeforeCopy = frame.stackTop + 1 + producedSlot - (numUsedSlotsBeforeCopy - numConsumed) + } + + def stackValue(n: Int) = (frame.peekStack(n), frame.stackTop - n) + + def dupX1Case = (producedIndex(2): @switch) match { + case 0 | 2 => stackValue(0) + case 1 => stackValue(1) + } + + // Form 1 of dup_x2 + def dupX2Case = (producedIndex(3): @switch) match { + case 0 | 3 => stackValue(0) + case 1 => stackValue(2) + case 2 => stackValue(1) + } + + // Form 1 of dup2_x1 + def dup2X1Case = (producedIndex(3): @switch) match { + case 0 | 3 => stackValue(1) + case 1 | 4 => stackValue(0) + case 2 => stackValue(2) + } + + if (isLoad(copyOp)) { + val slot = copyOp.asInstanceOf[VarInsnNode].`var` + (frame.getLocal(slot), slot) + } else if (isStore(copyOp)) { + stackValue(0) + } else (copyOp.getOpcode: @switch) match { + case DUP => + stackValue(0) // the current stack top is the source of both produced values + + case DUP_X1 => + dupX1Case + + case DUP_X2 => + if (frame.peekStack(1).getSize == 2) dupX1Case + else dupX2Case + + case DUP2 => + if (frame.peekStack(0).getSize == 2) stackValue(0) + else { + (producedIndex(2): @switch) match { + case 0 | 2 => stackValue(1) + case 1 | 3 => stackValue(0) + } + } + + case DUP2_X1 => + if (frame.peekStack(0).getSize == 2) dupX1Case + else dup2X1Case + + case DUP2_X2 => + val v1isSize2 = frame.peekStack(0).getSize == 2 + if (v1isSize2) { + val v2isSize2 = frame.peekStack(1).getSize == 2 + if (v2isSize2) dupX1Case // Form 4 + else dupX2Case // Form 2 + } else { + val v3isSize2 = frame.peekStack(2).getSize == 2 + if (v3isSize2) dup2X1Case // Form 3 + else { + // Form 1 + (producedIndex(4): @switch) match { + case 0 | 4 => stackValue(1) + case 1 | 5 => stackValue(0) + case 2 => stackValue(3) + case 3 => stackValue(2) + } + } + } + + case SWAP => + if (producedIndex(2) == 0) stackValue(0) + else stackValue(1) + + case CHECKCAST => + stackValue(0) + } + } + + /** + * Returns the value slots into which `copyOp` copies the value at `consumedSlot`. + * + * Example: + * - copyOp = DUP_X1, assume it consumes slots 2,3 and produces 2,3,4 + * - if consumedSlot == 2, the result is Set(3) + * - if consumedSlot == 3, the result is Set(2, 4) + */ + private def copyOperationProducedValueSlots(copyOp: AbstractInsnNode, consumedSlot: Int): Set[Int] = { + if (isStore(copyOp)) Set(copyOp.asInstanceOf[VarInsnNode].`var`) + else { + val nextFrame = frameAt(copyOp.getNext) + val top = nextFrame.stackTop + + // Index of the consumed value. Example: DUP_X1 consumes two values, so consumedIndex is + // 0 or 1, where 0 corresponds to the lower value on the stack. + def consumedIndex(numProduced: Int) = { + val numUsedSlotsAfterCopy = top + 1 + consumedSlot - (numUsedSlotsAfterCopy - numProduced) + } + + def dupX1Case = (consumedIndex(3): @switch) match { + case 0 => Set(top - 1) + case 1 => Set(top - 2, top) + } + + def dupX2Case = (consumedIndex(4): @switch) match { + case 0 => Set(top - 2) + case 1 => Set(top - 1) + case 2 => Set(top - 3, top) + } + + def dup2X1Case = (consumedIndex(5): @switch) match { + case 0 => Set(top - 2) + case 1 => Set(top - 4, top - 1) + case 2 => Set(top - 3, top) + } + + if (isLoad(copyOp)) Set(top) + else (copyOp.getOpcode: @switch) match { + case DUP => + Set(top - 1, top) + + case DUP_X1 => + dupX1Case + + case DUP_X2 => + if (nextFrame.peekStack(1).getSize == 2) dupX1Case + else dupX2Case + + case DUP2 => + if (nextFrame.peekStack(0).getSize == 2) Set(top - 1, top) + else (consumedIndex(4): @switch) match { + case 0 => Set(top - 3, top - 1) + case 1 => Set(top - 2, top) + } + + case DUP2_X1 => + if (nextFrame.peekStack(0).getSize == 2) dupX1Case + else dup2X1Case + + case DUP2_X2 => + val v1isSize2 = nextFrame.peekStack(0).getSize == 2 + if (v1isSize2) { + val v2isSize2 = nextFrame.peekStack(1).getSize == 2 + if (v2isSize2) dupX1Case // Form 4 + else dupX2Case // Form 2 + } else { + val v3isSize2 = nextFrame.peekStack(2).getSize == 2 + if (v3isSize2) dup2X1Case // Form 3 + else { + // Form 1 + (consumedIndex(6): @switch) match { + case 0 => Set(top - 3) + case 1 => Set(top - 2) + case 2 => Set(top - 5, top - 1) + case 3 => Set(top - 4, top) + } + } + } + + case SWAP => + if (consumedIndex(2) == 0) Set(top) + else Set(top - 1) + + case CHECKCAST => + Set(top) + } + } + } + + /** Returns the frame values consumed by executing `insn`. */ + private def inputValues(insn: AbstractInsnNode): Seq[SourceValue] = { + lazy val frame = frameAt(insn) + inputValueSlots(insn) map frame.getValue + } + + /** Returns the frame slots holding the values consumed by executing `insn`. */ + private def inputValueSlots(insn: AbstractInsnNode): Seq[Int] = { + if (insn.getOpcode == -1) return Seq.empty + if (isLoad(insn)) { + Seq(insn.asInstanceOf[VarInsnNode].`var`) + } else if (insn.getOpcode == IINC) { + Seq(insn.asInstanceOf[IincInsnNode].`var`) + } else { + val frame = frameAt(insn) + val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame) + val stackSize = frame.getLocals + frame.getStackSize + (stackSize - InstructionStackEffect.cons(prodCons)) until stackSize + } + } + + /** Returns the frame slots holding the values produced by executing `insn`. */ + private def outputValueSlots(insn: AbstractInsnNode): Seq[Int] = insn match { + case ParameterProducer(local) => Seq(local) + case UninitializedLocalProducer(local) => Seq(local) + case ExceptionProducer(_, stackTop) => Seq(stackTop) + case _ => + if (insn.getOpcode == -1) return Seq.empty + if (isStore(insn)) { + Seq(insn.asInstanceOf[VarInsnNode].`var`) + } else if (insn.getOpcode == IINC) { + Seq(insn.asInstanceOf[IincInsnNode].`var`) + } else { + val frame = frameAt(insn) + val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame) + val nextFrame = frameAt(insn.getNext) + val stackSize = nextFrame.getLocals + nextFrame.getStackSize + (stackSize - InstructionStackEffect.prod(prodCons)) until stackSize + } + } + + /** For each instruction, a set of potential consumers of the produced values. */ + private lazy val _consumersOfOutputsFrom: Map[AbstractInsnNode, Vector[Set[AbstractInsnNode]]] = { + var res = Map.empty[AbstractInsnNode, Vector[Set[AbstractInsnNode]]] + for { + insn <- methodNode.instructions.iterator.asScala + frame = frameAt(insn) + i <- inputValueSlots(insn) + producer <- frame.getValue(i).insns.asScala + } { + val producedSlots = outputValueSlots(producer) + val currentConsumers = res.getOrElse(producer, Vector.fill(producedSlots.size)(Set.empty[AbstractInsnNode])) + val outputIndex = producedSlots.indexOf(i) + res = res.updated(producer, currentConsumers.updated(outputIndex, currentConsumers(outputIndex) + insn)) + } + res + } + + private val _initialProducersCache: mutable.HashMap[(AbstractInsnNode, Int), Set[AbstractInsnNode]] = mutable.HashMap.empty + private val _ultimateConsumersCache: mutable.HashMap[(AbstractInsnNode, Int), Set[AbstractInsnNode]] = mutable.HashMap.empty +} + +/** + * A class for pseudo-instructions representing the initial producers of local values that have + * no producer instruction in the method: + * - parameters, including `this` + * - uninitialized local variables + * - exception values in handlers + * + * The ASM built-in SourceValue analysis yields an empty producers set for such values. This leads + * to ambiguities. Example (in Java one can re-assign parameter): + * + * int foo(int a) { + * if (a == 0) a = 1; + * return a; + * } + * + * In the first frame of the method, the SourceValue for parameter `a` gives an empty set of + * producer instructions. + * + * In the frame of the `IRETURN` instruction, the SourceValue for parameter `a` lists a single + * producer instruction: the `ISTORE 1`. This makes it look as if there was a single producer for + * `a`, where in fact it might still hold the parameter's initial value. + */ +abstract class InitialProducer extends AbstractInsnNode(-1) { + override def getType: Int = throw new UnsupportedOperationException + override def clone(labels: util.Map[LabelNode, LabelNode]): AbstractInsnNode = throw new UnsupportedOperationException + override def accept(cv: MethodVisitor): Unit = throw new UnsupportedOperationException +} + +case class ParameterProducer(local: Int) extends InitialProducer +case class UninitializedLocalProducer(local: Int) extends InitialProducer +case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerStackTop: Int) extends InitialProducer + +class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7) { + override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = { + new SourceValue(tp.getSize, ParameterProducer(local)) + } + + override def newEmptyValue(local: Int): SourceValue = { + new SourceValue(1, UninitializedLocalProducer(local)) + } + + override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[SourceValue], exceptionType: Type): SourceValue = { + // -1 to go from the number of locals to the (0-based) index of the last local + // +1 because this value is about to be pushed onto `handlerFrame` + val handlerStackTop = handlerFrame.getLocals - 1 + 1 + new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerStackTop)) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala deleted file mode 100644 index 71d815518d03..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala +++ /dev/null @@ -1,482 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package backend.jvm -package analysis - -import java.util - -import scala.annotation.switch -import scala.collection.mutable -import scala.tools.asm.{Type, MethodVisitor} -import scala.tools.asm.Opcodes._ -import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis._ - -import opt.BytecodeUtils._ - -import scala.collection.JavaConverters._ - -/** - * This class provides additional queries over ASM's built-in `SourceValue` analysis. - * - * The analysis computes for each value in a frame a set of source instructions, which are the - * potential producers. Most instructions produce either nothing or a stack value. For example, - * a `LOAD` instruction is the producer of the value pushed onto the stack. The exception are - * `STORE` instructions, which produce a new value for a local variable slot, so they are used - * as producers for the value they stored. - * - * Note that pseudo-instructions are used as initial producers for parameters and local variables. - * See the documentation on class InitialProducer. - * - * This class implements the following queries over the data computed by the SourceValue analysis: - * - * - producersForValueAt(insn, slot) - * - consumersOfValueAt(insn, slot) - * - * - producersForInputsOf(insn) - * - consumersOfOutputsFrom(insn) - * - * - initialProducersForValueAt(insn, slot) - * - ultimateConsumersOfValueAt(insn, slot) - * - * - initialProducersForInputsOf(insn) - * - ultimateConsumersOfOutputsFrom(insn) - * - * The following operations are considered as copying operations: - * - xLOAD, xSTORE - * - DUP, DUP2, DUP_X1, DUP_X2, DUP2_X1, DUP2_X2 - * - SWAP - * - CHECKCAST - * - * If ever needed, we could introduce a mode where primitive conversions (l2i) are considered as - * copying operations. - * - * Note on performance: thee data flow analysis (SourceValue / SourceInterpreter, provided by ASM) - * is roughly 2-3x slower than a simple analysis (like BasicValue). The reason is that the merge - * function (merging producer sets) is more complex than merging simple basic values. - * See also the doc comment in the package object `analysis`. - */ -trait ProdConsAnalyzerImpl { - val methodNode: MethodNode - - def frameAt(insn: AbstractInsnNode): Frame[SourceValue] - - /** - * Returns the potential producer instructions of a (local or stack) value in the frame of `insn`. - * This method simply returns the producer information computed by the SourceValue analysis. - */ - def producersForValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { - frameAt(insn).getValue(slot).insns.asScala.toSet - } - - /** - * Returns the potential consumer instructions of a (local or stack) value in the frame of `insn`. - * This is the counterpart of `producersForValueAt`. - */ - def consumersOfValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { - producersForValueAt(insn, slot).flatMap(prod => { - val outputNumber = outputValueSlots(prod).indexOf(slot) - _consumersOfOutputsFrom.get(prod).map(v => { - v(outputNumber) - }).getOrElse(Set.empty) - }) - } - - /** - * Returns the potential producer instructions of any of the values consumed by `insn`. - */ - def producersForInputsOf(insn: AbstractInsnNode): Set[AbstractInsnNode] = { - inputValues(insn).iterator.flatMap(v => v.insns.asScala).toSet - } - - def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { - case _: UninitializedLocalProducer => Set.empty - case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local) - case ExceptionProducer(handlerLabel, handlerStackTop) => consumersOfValueAt(handlerLabel, handlerStackTop) - case _ => - _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty) - } - - /** - * Returns the potential initial producer instructions of a value in the frame of `insn`. - * - * Unlike `producersForValueAt`, producers are tracked through copying instructions such as STORE - * and LOAD. If the producer of the value is a LOAD, then the producers of the stored value(s) are - * returned instead. - */ - def initialProducersForValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { - def initialProducers(insn: AbstractInsnNode, producedSlot: Int): Set[AbstractInsnNode] = { - if (isCopyOperation(insn)) { - val key = (insn, producedSlot) - _initialProducersCache.getOrElseUpdate(key, { - // prevent infinite recursion if an instruction is its own producer or consumer - // see cyclicProdCons in ProdConsAnalyzerTest - _initialProducersCache(key) = Set.empty - val (sourceValue, sourceValueSlot) = copyOperationSourceValue(insn, producedSlot) - sourceValue.insns.iterator.asScala.flatMap(initialProducers(_, sourceValueSlot)).toSet - }) - } else { - Set(insn) - } - } - producersForValueAt(insn, slot).flatMap(initialProducers(_, slot)) - } - - /** - * Returns the potential ultimate consumers of a value in the frame of `insn`. Consumers are - * tracked through copying operations such as SOTRE and LOAD. - */ - def ultimateConsumersOfValueAt(insn: AbstractInsnNode, slot: Int): Set[AbstractInsnNode] = { - def ultimateConsumers(insn: AbstractInsnNode, consumedSlot: Int): Set[AbstractInsnNode] = { - if (isCopyOperation(insn)) { - val key = (insn, consumedSlot) - _ultimateConsumersCache.getOrElseUpdate(key, { - // prevent infinite recursion if an instruction is its own producer or consumer - // see cyclicProdCons in ProdConsAnalyzerTest - _ultimateConsumersCache(key) = Set.empty - for { - producedSlot <- copyOperationProducedValueSlots(insn, consumedSlot) - consumer <- consumersOfValueAt(insn.getNext, producedSlot) - ultimateConsumer <- ultimateConsumers(consumer, producedSlot) - } yield ultimateConsumer - }) - } else { - Set(insn) - } - } - consumersOfValueAt(insn, slot).flatMap(ultimateConsumers(_, slot)) - } - - def initialProducersForInputsOf(insn: AbstractInsnNode): Set[AbstractInsnNode] = { - inputValueSlots(insn).flatMap(slot => initialProducersForValueAt(insn, slot)).toSet - } - - def ultimateConsumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { - case _: UninitializedLocalProducer => Set.empty - case _ => - lazy val next = insn match { - case _: ParameterProducer => methodNode.instructions.getFirst - case ExceptionProducer(handlerLabel, _) => handlerLabel - case _ => insn.getNext - } - outputValueSlots(insn).flatMap(slot => ultimateConsumersOfValueAt(next, slot)).toSet - } - - private def isCopyOperation(insn: AbstractInsnNode): Boolean = { - isLoadOrStore(insn) || { - (insn.getOpcode: @switch) match { - case DUP | DUP_X1 | DUP_X2 | DUP2 | DUP2_X1 | DUP2_X2 | SWAP | CHECKCAST => true - case _ => false - } - } - } - - /** - * Returns the value and its frame slot that `copyOp` copies into `producedSlot`. - * - * Example: - * - copyOp = DUP_X1, assume it produces slots 2,3,4 - * - producedSlot = 3 - * - the result is the value at slot 2 in the frame of `copyOp` - */ - private def copyOperationSourceValue(copyOp: AbstractInsnNode, producedSlot: Int): (SourceValue, Int) = { - val frame = frameAt(copyOp) - - // Index of the produced value. Example: DUP_X1 produces 3 values, so producedIndex is 0, 1 or 2, - // where 0 corresponds to the lowest value on the stack. - def producedIndex(numConsumed: Int) = { - val numUsedSlotsBeforeCopy = frame.stackTop + 1 - producedSlot - (numUsedSlotsBeforeCopy - numConsumed) - } - - def stackValue(n: Int) = (frame.peekStack(n), frame.stackTop - n) - - def dupX1Case = (producedIndex(2): @switch) match { - case 0 | 2 => stackValue(0) - case 1 => stackValue(1) - } - - // Form 1 of dup_x2 - def dupX2Case = (producedIndex(3): @switch) match { - case 0 | 3 => stackValue(0) - case 1 => stackValue(2) - case 2 => stackValue(1) - } - - // Form 1 of dup2_x1 - def dup2X1Case = (producedIndex(3): @switch) match { - case 0 | 3 => stackValue(1) - case 1 | 4 => stackValue(0) - case 2 => stackValue(2) - } - - if (isLoad(copyOp)) { - val slot = copyOp.asInstanceOf[VarInsnNode].`var` - (frame.getLocal(slot), slot) - } else if (isStore(copyOp)) { - stackValue(0) - } else (copyOp.getOpcode: @switch) match { - case DUP => - stackValue(0) // the current stack top is the source of both produced values - - case DUP_X1 => - dupX1Case - - case DUP_X2 => - if (frame.peekStack(1).getSize == 2) dupX1Case - else dupX2Case - - case DUP2 => - if (frame.peekStack(0).getSize == 2) stackValue(0) - else { - (producedIndex(2): @switch) match { - case 0 | 2 => stackValue(1) - case 1 | 3 => stackValue(0) - } - } - - case DUP2_X1 => - if (frame.peekStack(0).getSize == 2) dupX1Case - else dup2X1Case - - case DUP2_X2 => - val v1isSize2 = frame.peekStack(0).getSize == 2 - if (v1isSize2) { - val v2isSize2 = frame.peekStack(1).getSize == 2 - if (v2isSize2) dupX1Case // Form 4 - else dupX2Case // Form 2 - } else { - val v3isSize2 = frame.peekStack(2).getSize == 2 - if (v3isSize2) dup2X1Case // Form 3 - else { - // Form 1 - (producedIndex(4): @switch) match { - case 0 | 4 => stackValue(1) - case 1 | 5 => stackValue(0) - case 2 => stackValue(3) - case 3 => stackValue(2) - } - } - } - - case SWAP => - if (producedIndex(2) == 0) stackValue(0) - else stackValue(1) - - case CHECKCAST => - stackValue(0) - } - } - - /** - * Returns the value slots into which `copyOp` copies the value at `consumedSlot`. - * - * Example: - * - copyOp = DUP_X1, assume it consumes slots 2,3 and produces 2,3,4 - * - if consumedSlot == 2, the result is Set(3) - * - if consumedSlot == 3, the result is Set(2, 4) - */ - private def copyOperationProducedValueSlots(copyOp: AbstractInsnNode, consumedSlot: Int): Set[Int] = { - if (isStore(copyOp)) Set(copyOp.asInstanceOf[VarInsnNode].`var`) - else { - val nextFrame = frameAt(copyOp.getNext) - val top = nextFrame.stackTop - - // Index of the consumed value. Example: DUP_X1 consumes two values, so consumedIndex is - // 0 or 1, where 0 corresponds to the lower value on the stack. - def consumedIndex(numProduced: Int) = { - val numUsedSlotsAfterCopy = top + 1 - consumedSlot - (numUsedSlotsAfterCopy - numProduced) - } - - def dupX1Case = (consumedIndex(3): @switch) match { - case 0 => Set(top - 1) - case 1 => Set(top - 2, top) - } - - def dupX2Case = (consumedIndex(4): @switch) match { - case 0 => Set(top - 2) - case 1 => Set(top - 1) - case 2 => Set(top - 3, top) - } - - def dup2X1Case = (consumedIndex(5): @switch) match { - case 0 => Set(top - 2) - case 1 => Set(top - 4, top - 1) - case 2 => Set(top - 3, top) - } - - if (isLoad(copyOp)) Set(top) - else (copyOp.getOpcode: @switch) match { - case DUP => - Set(top - 1, top) - - case DUP_X1 => - dupX1Case - - case DUP_X2 => - if (nextFrame.peekStack(1).getSize == 2) dupX1Case - else dupX2Case - - case DUP2 => - if (nextFrame.peekStack(0).getSize == 2) Set(top - 1, top) - else (consumedIndex(4): @switch) match { - case 0 => Set(top - 3, top - 1) - case 1 => Set(top - 2, top) - } - - case DUP2_X1 => - if (nextFrame.peekStack(0).getSize == 2) dupX1Case - else dup2X1Case - - case DUP2_X2 => - val v1isSize2 = nextFrame.peekStack(0).getSize == 2 - if (v1isSize2) { - val v2isSize2 = nextFrame.peekStack(1).getSize == 2 - if (v2isSize2) dupX1Case // Form 4 - else dupX2Case // Form 2 - } else { - val v3isSize2 = nextFrame.peekStack(2).getSize == 2 - if (v3isSize2) dup2X1Case // Form 3 - else { - // Form 1 - (consumedIndex(6): @switch) match { - case 0 => Set(top - 3) - case 1 => Set(top - 2) - case 2 => Set(top - 5, top - 1) - case 3 => Set(top - 4, top) - } - } - } - - case SWAP => - if (consumedIndex(2) == 0) Set(top) - else Set(top - 1) - - case CHECKCAST => - Set(top) - } - } - } - - /** Returns the frame values consumed by executing `insn`. */ - private def inputValues(insn: AbstractInsnNode): Seq[SourceValue] = { - lazy val frame = frameAt(insn) - inputValueSlots(insn) map frame.getValue - } - - /** Returns the frame slots holding the values consumed by executing `insn`. */ - private def inputValueSlots(insn: AbstractInsnNode): Seq[Int] = { - if (insn.getOpcode == -1) return Seq.empty - if (isLoad(insn)) { - Seq(insn.asInstanceOf[VarInsnNode].`var`) - } else if (insn.getOpcode == IINC) { - Seq(insn.asInstanceOf[IincInsnNode].`var`) - } else { - val frame = frameAt(insn) - val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame) - val stackSize = frame.getLocals + frame.getStackSize - (stackSize - InstructionStackEffect.cons(prodCons)) until stackSize - } - } - - /** Returns the frame slots holding the values produced by executing `insn`. */ - private def outputValueSlots(insn: AbstractInsnNode): Seq[Int] = insn match { - case ParameterProducer(local) => Seq(local) - case UninitializedLocalProducer(local) => Seq(local) - case ExceptionProducer(_, stackTop) => Seq(stackTop) - case _ => - if (insn.getOpcode == -1) return Seq.empty - if (isStore(insn)) { - Seq(insn.asInstanceOf[VarInsnNode].`var`) - } else if (insn.getOpcode == IINC) { - Seq(insn.asInstanceOf[IincInsnNode].`var`) - } else { - val frame = frameAt(insn) - val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame) - val nextFrame = frameAt(insn.getNext) - val stackSize = nextFrame.getLocals + nextFrame.getStackSize - (stackSize - InstructionStackEffect.prod(prodCons)) until stackSize - } - } - - /** For each instruction, a set of potential consumers of the produced values. */ - private lazy val _consumersOfOutputsFrom: Map[AbstractInsnNode, Vector[Set[AbstractInsnNode]]] = { - var res = Map.empty[AbstractInsnNode, Vector[Set[AbstractInsnNode]]] - for { - insn <- methodNode.instructions.iterator.asScala - frame = frameAt(insn) - i <- inputValueSlots(insn) - producer <- frame.getValue(i).insns.asScala - } { - val producedSlots = outputValueSlots(producer) - val currentConsumers = res.getOrElse(producer, Vector.fill(producedSlots.size)(Set.empty[AbstractInsnNode])) - val outputIndex = producedSlots.indexOf(i) - res = res.updated(producer, currentConsumers.updated(outputIndex, currentConsumers(outputIndex) + insn)) - } - res - } - - private val _initialProducersCache: mutable.AnyRefMap[(AbstractInsnNode, Int), Set[AbstractInsnNode]] = mutable.AnyRefMap.empty - private val _ultimateConsumersCache: mutable.AnyRefMap[(AbstractInsnNode, Int), Set[AbstractInsnNode]] = mutable.AnyRefMap.empty -} - -/** - * A class for pseudo-instructions representing the initial producers of local values that have - * no producer instruction in the method: - * - parameters, including `this` - * - uninitialized local variables - * - exception values in handlers - * - * The ASM built-in SourceValue analysis yields an empty producers set for such values. This leads - * to ambiguities. Example (in Java one can re-assign parameter): - * - * int foo(int a) { - * if (a == 0) a = 1; - * return a; - * } - * - * In the first frame of the method, the SourceValue for parameter `a` gives an empty set of - * producer instructions. - * - * In the frame of the `IRETURN` instruction, the SourceValue for parameter `a` lists a single - * producer instruction: the `ISTORE 1`. This makes it look as if there was a single producer for - * `a`, where in fact it might still hold the parameter's initial value. - */ -abstract class InitialProducer extends AbstractInsnNode(-1) { - override def getType: Int = throw new UnsupportedOperationException - override def clone(labels: util.Map[LabelNode, LabelNode]): AbstractInsnNode = throw new UnsupportedOperationException - override def accept(cv: MethodVisitor): Unit = throw new UnsupportedOperationException -} - -case class ParameterProducer(local: Int) extends InitialProducer -case class UninitializedLocalProducer(local: Int) extends InitialProducer -case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerStackTop: Int) extends InitialProducer - -class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7) { - override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = { - new SourceValue(tp.getSize, ParameterProducer(local)) - } - - override def newEmptyValue(local: Int): SourceValue = { - new SourceValue(1, UninitializedLocalProducer(local)) - } - - override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[SourceValue], exceptionType: Type): SourceValue = { - // -1 to go from the number of locals to the (0-based) index of the last local - // +1 because this value is about to be pushed onto `handlerFrame` - val handlerStackTop = handlerFrame.getLocals - 1 + 1 - new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerStackTop)) - } -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowAnalyzer.scala new file mode 100644 index 000000000000..0fa174c63ab9 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowAnalyzer.scala @@ -0,0 +1,136 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package backend.jvm +package analysis + +import scala.annotation.tailrec +import scala.tools.asm.{Opcodes, Type} +import scala.tools.asm.tree.{AbstractInsnNode, InsnNode, MethodNode} +import scala.tools.asm.tree.analysis.{Analyzer, BasicInterpreter, BasicValue} +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.analysis.TypeFlowInterpreter._ +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.LambdaMetaFactoryCall +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ + +abstract class TypeFlowInterpreter extends BasicInterpreter(scala.tools.asm.Opcodes.ASM7) { + override def newParameterValue(isInstanceMethod: Boolean, local: Int, tpe: Type): BasicValue = + new ParamValue(local, tpe) + + override def newValue(tp: Type): BasicValue = { + if (tp == null) UninitializedValue + else if (isRef(tp)) new SpecialAwareBasicValue(tp) + else super.newValue(tp) + } + + def isRef(tp: Type): Boolean = tp != null && (tp.getSort match { + case Type.OBJECT | Type.ARRAY => true + case _ => false + }) + + override def binaryOperation(insn: AbstractInsnNode, value1: BasicValue, value2: BasicValue): BasicValue = insn.getOpcode match { + case Opcodes.AALOAD => new AaloadValue(insn.asInstanceOf[InsnNode]) // see [[AaloadValue]] + case _ => super.binaryOperation(insn, value1, value2) + } + + override def naryOperation(insn: AbstractInsnNode, values: java.util.List[_ <: BasicValue]): BasicValue = { + val v = super.naryOperation(insn, values) + insn.getOpcode match { + case Opcodes.INVOKEDYNAMIC => insn match { + case LambdaMetaFactoryCall(_, _, _, _, _) => new LMFValue(v.getType) + case _ => v + } + case _ => v + } + } + + def refLub(a: BasicValue, b: BasicValue): BasicValue + + @tailrec + override final def merge(a: BasicValue, b: BasicValue): BasicValue = { + if (a == b) a + else if (a.isInstanceOf[SpecialValue] || b.isInstanceOf[SpecialValue]) merge(new SpecialAwareBasicValue(a.getType), new SpecialAwareBasicValue(b.getType)) + else if (isRef(a.getType) && isRef(b.getType)) refLub(a, b) + else UninitializedValue + } +} + +object TypeFlowInterpreter { + // Marker trait for BasicValue subclasses that add a special meaning on top of the value's `getType`. + trait SpecialValue + + private val obj = Type.getObjectType("java/lang/Object") + + // A BasicValue with equality that knows about special versions + class SpecialAwareBasicValue(tpe: Type) extends BasicValue(tpe) { + override def equals(other: Any): Boolean = { + this match { + case tav: AaloadValue => other match { + case oav: AaloadValue => tav.aaload == oav.aaload + case _ => false + } + case _: LMFValue => other.isInstanceOf[LMFValue] && super.equals(other) + case pv: ParamValue => other.isInstanceOf[ParamValue] && pv.local == other.asInstanceOf[ParamValue].local && super.equals(other) + case _ => !other.isInstanceOf[SpecialValue] && super.equals(other) // A non-special value cannot equal a special value + } + } + + override def hashCode: Int = this match { + case av: AaloadValue => av.aaload.hashCode + case pv: ParamValue => pv.local + super.hashCode + case _ => super.hashCode + } + } + + val ObjectValue = new SpecialAwareBasicValue(BasicValue.REFERENCE_VALUE.getType) + val UninitializedValue = new SpecialAwareBasicValue(null) + + // In the interpreter, visiting an AALOAD, we don't know the type of the array + // just by looking at the instruction. By using an AaloadValue for the value produced + // by the AALOAD, we can go back to the AALOAD instruction and get the type of its input + // (once the analysis is done). See preciseAaloadTypeDesc. + // Note that the merge / refLub function discards AaloadValue instances (unless the merged values + // denote the same AALOAD instruction), so if a value may have other producers than the AALOAD, + // we just get Object. + class AaloadValue(val aaload: InsnNode) extends SpecialAwareBasicValue(obj) with SpecialValue + + // Note: merging two LMFValue with the same underlying type gives a LMFValue, but if the + // underlying types differ, the merge is just a BasicValue + class LMFValue(tpe: Type) extends SpecialAwareBasicValue(tpe) with SpecialValue + + // Note: merging two ParamValue with the same underlying type gives a ParamValue, but if the + // underlying types differ, the merge is just a BasicValue + class ParamValue(val local: Int, tpe: Type) extends SpecialAwareBasicValue(tpe) with SpecialValue +} + +/** + * A [[TypeFlowInterpreter]] which collapses LUBs of non-equal reference types to Object. + * This could be made more precise by looking up ClassBTypes for the two reference types and using + * the `jvmWiseLUB` method. + */ +class NonLubbingTypeFlowInterpreter extends TypeFlowInterpreter { + def refLub(a: BasicValue, b: BasicValue): BasicValue = ObjectValue +} + +class NonLubbingTypeFlowAnalyzer(methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new NonLubbingTypeFlowInterpreter)) { + // see [[AaloadValue]] + def preciseAaloadTypeDesc(value: BasicValue): String = value match { + case aaloadValue: AaloadValue => + val f = frameAt(aaloadValue.aaload) + val arrDesc = f.getValue(f.stackTop - 1).getType.getDescriptor + // TODO make it safe in case we don't get an array type + arrDesc.substring(1) // drop `[` + + case _ => value.getType.getDescriptor + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala deleted file mode 100644 index baa4450c5bb3..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package backend.jvm -package analysis - -import scala.tools.asm.Type -import scala.tools.asm.tree.analysis.{BasicValue, BasicInterpreter} - -abstract class TypeFlowInterpreter extends BasicInterpreter(scala.tools.asm.Opcodes.ASM7) { - override def newValue(tp: Type) = { - if (tp == null) super.newValue(tp) - else if (isRef(tp)) new BasicValue(tp) - else super.newValue(tp) - } - - def isRef(tp: Type) = tp != null && (tp.getSort match { - case Type.OBJECT | Type.ARRAY => true - case _ => false - }) - - def refLub(a: BasicValue, b: BasicValue): BasicValue - - override def merge(a: BasicValue, b: BasicValue): BasicValue = { - if (a == b) a - else if (isRef(a.getType) && isRef(b.getType)) refLub(a, b) - else BasicValue.UNINITIALIZED_VALUE - } -} - -/** - * A [[TypeFlowInterpreter]] which collapses LUBs of non-equal reference types to Object. - * This could be made more precise by looking up ClassBTypes for the two reference types and using - * the `jvmWiseLUB` method. - */ -class NonLubbingTypeFlowInterpreter extends TypeFlowInterpreter { - def refLub(a: BasicValue, b: BasicValue): BasicValue = BasicValue.REFERENCE_VALUE // java/lang/Object -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala index 9ba60e975c3a..b5b8f6e61795 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala index ec7a46300a65..faa89a1ad68d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,13 +14,15 @@ package scala.tools.nsc package backend.jvm package opt -import scala.annotation.tailrec -import scala.collection.JavaConverters._ +import scala.annotation.{tailrec, unused} +import scala.collection.AbstractIterator import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.tools.asm.Opcodes._ import scala.tools.asm.Type import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.analysis.{AsmAnalyzer, BackendUtils, ProdConsAnalyzer} import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class BoxUnbox { @@ -188,7 +190,7 @@ abstract class BoxUnbox { lazy val prodCons = new ProdConsAnalyzer(method, owner) - var nextLocal = backendUtils.maxLocals(method) + var nextLocal = BackendUtils.maxLocals(method) def getLocal(size: Int) = { val r = nextLocal nextLocal += size @@ -197,9 +199,9 @@ abstract class BoxUnbox { var maxStackGrowth = 0 - /** Method M1 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */ + /* Method M1 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */ def replaceBoxOperationsSingleCreation(creation: BoxCreation, finalCons: Set[BoxConsumer], boxKind: BoxKind, keepBox: Boolean): Unit = { - /** + /* * If the box is eliminated, all copy operations (loads, stores, others) of the box need to * be removed. This method returns all copy operations that should be removed. * @@ -233,20 +235,20 @@ abstract class BoxUnbox { }) if (canRewrite) { - val localSlots: Vector[(Int, Type)] = boxKind.boxedTypes.map(tp => (getLocal(tp.getSize), tp))(collection.breakOut) + val localSlots = Vector.from[(Int, Type)](boxKind.boxedTypes.iterator.map(tp => (getLocal(tp.getSize), tp))) // store boxed value(s) into localSlots - val storeOps = localSlots.toList reverseMap { case (slot, tp) => + val storeOps = localSlots.reverseIterator map { case (slot, tp) => new VarInsnNode(tp.getOpcode(ISTORE), slot) - } + } to(List) val storeInitialValues = creation.loadInitialValues match { case Some(ops) => ops ::: storeOps case None => storeOps } if (keepBox) { - val loadOps: List[VarInsnNode] = localSlots.map({ case (slot, tp) => + val loadOps = List.from[VarInsnNode](localSlots.iterator.map({ case (slot, tp) => new VarInsnNode(tp.getOpcode(ILOAD), slot) - })(collection.breakOut) + })) toInsertBefore(creation.valuesConsumer) = storeInitialValues ::: loadOps } else { toReplace(creation.valuesConsumer) = storeInitialValues @@ -278,9 +280,9 @@ abstract class BoxUnbox { } } - /** Method M2 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */ + /* Method M2 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */ def replaceBoxOperationsMultipleCreations(allCreations: Set[BoxCreation], allConsumers: Set[BoxConsumer], boxKind: BoxKind): Unit = { - /** + /* * If a single-value size-1 box is eliminated, local variables slots holding the box are * reused to hold the unboxed value. In case there's an entry for that local variable in the * method's local variables table (debug info), adapt the type. @@ -308,7 +310,7 @@ abstract class BoxUnbox { } } - /** Remove box creations - leave the boxed value(s) on the stack instead. */ + /* Remove box creations - leave the boxed value(s) on the stack instead. */ def replaceCreationOps(): Unit = { for (creation <- allCreations) creation.loadInitialValues match { case None => @@ -320,7 +322,7 @@ abstract class BoxUnbox { } } - /** + /* * Replace a value extraction operation. For a single-value box, the extraction operation can * just be removed. An extraction from a multi-value box is replaced by POP operations for the * non-used values, and an xSTORE / xLOAD for the extracted value. Example: tuple3._2 becomes @@ -342,7 +344,7 @@ abstract class BoxUnbox { for (extraction <- allConsumers) { val replacementOps = extraction match { case Drop(_) => - boxKind.boxedTypes.reverseMap(t => getPop(t.getSize)) + boxKind.boxedTypes.reverseIterator.map(t => getPop(t.getSize)).toList case _ => val valueIndex = boxKind.extractedValueIndex(extraction) if (valueIndex == 0) { @@ -350,7 +352,7 @@ abstract class BoxUnbox { pops ::: extraction.postExtractionAdaptationOps(boxKind.boxedTypes.head) } else { var loadOps: List[AbstractInsnNode] = null - val consumeStack = boxKind.boxedTypes.zipWithIndex reverseMap { + val consumeStack = boxKind.boxedTypes.zipWithIndex.reverseIterator.map { case (tp, i) => if (i == valueIndex) { val resultSlot = getLocal(tp.getSize) @@ -359,7 +361,7 @@ abstract class BoxUnbox { } else { getPop(tp.getSize) } - } + }.to(List) consumeStack ::: loadOps } } @@ -416,6 +418,8 @@ abstract class BoxUnbox { } } + // We don't need to worry about CallGraph.closureInstantiations and + // BackendUtils.indyLambdaImplMethods, the removed instructions are not IndyLambdas def removeFromCallGraph(insn: AbstractInsnNode): Unit = insn match { case mi: MethodInsnNode => callGraph.removeCallsite(mi, method) case _ => @@ -436,8 +440,9 @@ abstract class BoxUnbox { } method.maxLocals = nextLocal - method.maxStack = backendUtils.maxStack(method) + maxStackGrowth - toInsertBefore.nonEmpty || toReplace.nonEmpty || toDelete.nonEmpty + method.maxStack = BackendUtils.maxStack(method) + maxStackGrowth + val changed = toInsertBefore.nonEmpty || toReplace.nonEmpty || toDelete.nonEmpty + changed } } @@ -530,7 +535,7 @@ abstract class BoxUnbox { new VarInsnNode(opc, tp._2) } val locs = newLocals(vi.`var`) - replacements += vi -> (if (isLoad) locs.map(typedVarOp) else locs.reverseMap(typedVarOp)) + replacements += vi -> (if (isLoad) locs.map(typedVarOp) else locs.map(typedVarOp).reverse) case copyOp => if (copyOp.getOpcode == DUP && valueTypes.lengthCompare(1) == 0) { @@ -547,8 +552,8 @@ abstract class BoxUnbox { * For a set of box creation operations and a corresponding set of box consumer operations, * this iterator returns all copy operations (load, store, dup) that are in between. */ - class CopyOpsIterator(initialCreations: Set[BoxCreation], finalCons: Set[BoxConsumer], prodCons: ProdConsAnalyzer) extends Iterator[AbstractInsnNode] { - private val queue = mutable.Queue.empty[AbstractInsnNode] ++ initialCreations.iterator.flatMap(_.boxConsumers(prodCons, ultimate = false)) + class CopyOpsIterator(initialCreations: Set[BoxCreation], finalCons: Set[BoxConsumer], prodCons: ProdConsAnalyzer) extends AbstractIterator[AbstractInsnNode] { + private val queue = mutable.Queue.empty[AbstractInsnNode] ++= initialCreations.iterator.flatMap(_.boxConsumers(prodCons, ultimate = false)) // a single copy operation can consume multiple producers: val a = if (b) box(1) else box(2). // the `ASTORE a` has two producers (the two box operations). we need to handle it only once. @@ -762,7 +767,7 @@ abstract class BoxUnbox { } } - def checkRefConsumer(insn: AbstractInsnNode, kind: Ref, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = insn match { + def checkRefConsumer(insn: AbstractInsnNode, kind: Ref, @unused prodCons: ProdConsAnalyzer): Option[BoxConsumer] = insn match { case fi: FieldInsnNode if fi.owner == kind.refClass && fi.name == "elem" => if (fi.getOpcode == GETFIELD) Some(StaticGetterOrInstanceRead(fi)) else if (fi.getOpcode == PUTFIELD) Some(StaticSetterOrInstanceWrite(fi)) @@ -843,7 +848,7 @@ abstract class BoxUnbox { } private val getterIndexPattern = "_(\\d{1,2}).*".r - def tupleGetterIndex(getterName: String) = getterName match { case getterIndexPattern(i) => i.toInt - 1 } + def tupleGetterIndex(getterName: String) = getterName match { case getterIndexPattern(i) => i.toInt - 1 case x => throw new MatchError(x) } } // TODO: add more diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 0605631acd1f..9fc54055cb8f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,15 +14,16 @@ package scala.tools.nsc package backend.jvm package opt - -import scala.collection.JavaConverters._ +import scala.annotation.nowarn import scala.collection.{concurrent, mutable} +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.NoPosition import scala.tools.asm import scala.tools.asm.{Attribute, Type} import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.LambdaMetaFactoryCall import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ /** @@ -32,7 +33,7 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class ByteCodeRepository extends PerRunInit { val postProcessor: PostProcessor - import postProcessor.{bTypes, bTypesFromClassfile, callGraph} + import postProcessor.{bTypes, bTypesFromClassfile} import bTypes._ import frontendAccess.{backendReporting, backendClassPath, recordPerRunCache} @@ -55,7 +56,8 @@ abstract class ByteCodeRepository extends PerRunInit { * Note - although this is typed a mutable.Map, individual simple get and put operations are threadsafe as the * underlying data structure is synchronized. */ - val parsedClasses: mutable.Map[InternalName, Either[ClassNotFound, ClassNode]] = recordPerRunCache(LruMap[InternalName, Either[ClassNotFound, ClassNode]](maxCacheSize, true)) + val parsedClasses: mutable.Map[InternalName, Either[ClassNotFound, ClassNode]] = + recordPerRunCache(FifoCache[InternalName, Either[ClassNotFound, ClassNode]](maxCacheSize, threadsafe = true)) /** * Contains the internal names of all classes that are defined in Java source files of the current @@ -188,7 +190,7 @@ abstract class ByteCodeRepository extends PerRunInit { case Some(m) => Right(Some((m, owner.name))) case _ => if (owner.superName == null) Right(None) - else classNode(owner.superName).flatMap(findInSuperClasses(_, isInterface(owner))) + else classNode(owner.superName).flatMap(findInSuperClasses(_, publicInstanceOnly = isInterface(owner))) } } } @@ -197,6 +199,7 @@ abstract class ByteCodeRepository extends PerRunInit { val visited = mutable.Set.empty[InternalName] val found = mutable.ListBuffer.empty[(MethodNode, ClassNode)] + @nowarn("cat=lint-nonlocal-return") def findIn(owner: ClassNode): Option[ClassNotFound] = { for (i <- owner.interfaces.asScala if !visited(i)) classNode(i) match { case Left(e) => return Some(e) @@ -210,50 +213,45 @@ abstract class ByteCodeRepository extends PerRunInit { None } - def findSpecific = { - val result = - if (found.size <= 1) found.headOption - else { - val maxSpecific = found.filterNot({ - case (method, owner) => - val ownerTp = bTypesFromClassfile.classBTypeFromClassNode(owner) - found exists { - case (other, otherOwner) => - (other ne method) && { - val otherTp = bTypesFromClassfile.classBTypeFromClassNode(otherOwner) - otherTp.isSubtypeOf(ownerTp).get - } - } - }) - // (*) note that if there's no single, non-abstract, maximally-specific method, the jvm - // method resolution (jvms-5.4.3.3) returns any of the non-private, non-static parent - // methods at random (abstract or concrete). - // we chose not to do this here, to prevent the inliner from potentially inlining the - // wrong method. in other words, we guarantee that a concrete method is only returned if - // it resolves deterministically. - // however, there may be multiple abstract methods inherited. in this case we *do* want - // to return a result to allow performing accessibility checks in the inliner. note that - // for accessibility it does not matter which of these methods is return, as they are all - // non-private (i.e., public, protected is not possible, jvms-4.1). - // the remaining case (when there's no max-specific method, but some non-abstract one) - // does not occur in bytecode generated by scalac or javac. we return no result in this - // case. this may at worst prevent some optimizations from happening. - val nonAbs = maxSpecific.filterNot(p => isAbstractMethod(p._1)) - if (nonAbs.lengthCompare(1) == 0) nonAbs.headOption - else { - val foundNonAbs = found.filterNot(p => isAbstractMethod(p._1)) - if (foundNonAbs.lengthCompare(1) == 0) foundNonAbs.headOption - else if (foundNonAbs.isEmpty) found.headOption // (*) - else None - } - } - // end result - Right(result.map(p => (p._1, p._2.name))) - } - findIn(initialOwner) match { case Some(cnf) => Left(cnf) - case _ => findSpecific + case _ => + val result = + if (found.sizeIs <= 1) found.headOption + else { + val maxSpecific = found.filterNot { + case (method, owner) => + val ownerTp = bTypesFromClassfile.classBTypeFromClassNode(owner) + found.exists { + case (other, otherOwner) => + (other ne method) && { + val otherTp = bTypesFromClassfile.classBTypeFromClassNode(otherOwner) + otherTp.isSubtypeOf(ownerTp).get + } + } + } + // (*) if there's no single, non-abstract, maximally-specific method, JVM method resolution + // (jvms-5.4.3.3) returns any of the non-private, non-static parent methods arbitrarily + // (abstract or concrete). + // we chose not to do this here, to prevent the inlining the wrong method. in other words, + // a concrete method is only returned if it resolves deterministically. + // if there's no non-abstract method, we *do* want to return a result to allow performing + // accessibility checks in the inliner. for accessibility it does not matter which of these + // methods is returned, they are all public (protected is not possible, jvms-4.1). + // TODO: it would be cleaner to make `methodNode` return a list of methods and deal + // with it at the call sites, but it's a bigger refactoring that affects the + // `CallGraph`. in any case, it should not occur in Scala bytecode as we emit mixin + // forwarders. + val nonAbs = maxSpecific.filterNot(p => isAbstractMethod(p._1)) + if (nonAbs.sizeIs == 1) nonAbs.headOption + else { + val foundNonAbs = found.filterNot(p => isAbstractMethod(p._1)) + if (foundNonAbs.sizeIs == 1) foundNonAbs.headOption + else if (foundNonAbs.isEmpty) found.headOption // (*) + else None + } + } + Right(result.map(p => (p._1, p._2.name))) } } @@ -263,6 +261,7 @@ abstract class ByteCodeRepository extends PerRunInit { } else { def notFound(cnf: Option[ClassNotFound]) = Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, cnf)) val res: Either[ClassNotFound, Option[(MethodNode, InternalName)]] = classNode(ownerInternalNameOrArrayDescriptor).flatMap(c => + // TODO: if `c` is an interface, should directly go to `findInInterfaces` findInSuperClasses(c) flatMap { case None => findInInterfaces(c) case res => Right(res) @@ -285,8 +284,8 @@ abstract class ByteCodeRepository extends PerRunInit { case AbstractInsnNode.LINE => iter.remove() case AbstractInsnNode.INVOKE_DYNAMIC_INSN => insn match { - case callGraph.LambdaMetaFactoryCall(_, _, implMethod, _) => - postProcessor.backendUtils.addIndyLambdaImplMethod(classNode.name, implMethod) + case LambdaMetaFactoryCall(indy, _, implMethod, _, _) => + postProcessor.backendUtils.addIndyLambdaImplMethod(classNode.name, m, indy, implMethod) case _ => } case _ => @@ -315,7 +314,7 @@ abstract class ByteCodeRepository extends PerRunInit { // TODO: we need to remove them also for classes that are not parsed from classfiles, why not simplify and do it once when inlining? // OR: instead of skipping line numbers for inlined code, use write a SourceDebugExtension // attribute that contains JSR-45 data that encodes debugging info. - // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11 + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11 // https://jcp.org/aboutJava/communityprocess/final/jsr045/index.html removeLineNumbersAndAddLMFImplMethods(classNode) Some(classNode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 835c7f36ebdd..fa45ab167870 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,9 @@ package scala.tools.nsc package backend.jvm package opt -import scala.annotation.{tailrec, switch} -import scala.collection.JavaConverters._ +import scala.annotation.{switch, tailrec} +import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.Collections._ import scala.tools.asm.Opcodes._ import scala.tools.asm.commons.CodeSizeEvaluator @@ -27,7 +28,7 @@ import scala.tools.nsc.backend.jvm.analysis.InstructionStackEffect object BytecodeUtils { - // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.9.1 + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.9.1 final val maxJVMMethodSize = 65535 // 5% margin, more than enough for the instructions added by the inliner (store / load args, null check for instance methods) @@ -93,18 +94,18 @@ object BytecodeUtils { def isLoadOrStore(instruction: AbstractInsnNode): Boolean = isLoad(instruction) || isStore(instruction) - def isNonVirtualCall(instruction: AbstractInsnNode): Boolean = { - val op = instruction.getOpcode - op == INVOKESPECIAL || op == INVOKESTATIC + def isStaticCall(instruction: AbstractInsnNode): Boolean = { + instruction.getOpcode == INVOKESTATIC } def isVirtualCall(instruction: AbstractInsnNode): Boolean = { val op = instruction.getOpcode - op == INVOKEVIRTUAL || op == INVOKEINTERFACE + // invokespecial + op == INVOKESPECIAL || op == INVOKEVIRTUAL || op == INVOKEINTERFACE } def isCall(instruction: AbstractInsnNode): Boolean = { - isNonVirtualCall(instruction) || isVirtualCall(instruction) + isStaticCall(instruction) || isVirtualCall(instruction) } def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0 @@ -127,6 +128,8 @@ object BytecodeUtils { def isVarargsMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_VARARGS) != 0 + def isSyntheticMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_SYNTHETIC) != 0 + // cross-jdk def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = methodNode.visibleAnnotations != null && @@ -142,7 +145,7 @@ object BytecodeUtils { def isStrictfpMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_STRICT) != 0 - def isReference(t: Type) = t.getSort == Type.OBJECT || t.getSort == Type.ARRAY + def isReference(t: Type): Boolean = t.getSort == Type.OBJECT || t.getSort == Type.ARRAY /** Find the nearest preceding node to `insn` which is executable (i.e., not a label / line number) * and which is not selected by `stopBefore`. */ @@ -174,6 +177,23 @@ object BytecodeUtils { else nextExecutableInstructionOrLabel(next) } + def findSingleCall(method: MethodNode, such: MethodInsnNode => Boolean): Option[MethodInsnNode] = { + @tailrec def noMoreInvoke(insn: AbstractInsnNode): Boolean = { + insn == null || (!insn.isInstanceOf[MethodInsnNode] && noMoreInvoke(insn.getNext)) + } + @tailrec def find(insn: AbstractInsnNode): Option[MethodInsnNode] = { + if (insn == null) None + else insn match { + case mi: MethodInsnNode => + if (such(mi) && noMoreInvoke(insn.getNext)) Some(mi) + else None + case _ => + find(insn.getNext) + } + } + find(method.instructions.getFirst) + } + def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = { // Compare next executable instead of the labels. Identifies a, b as the same target: // LabelNode(a) @@ -182,7 +202,7 @@ object BytecodeUtils { nextExecutableInstruction(a.label) == nextExecutableInstruction(b.label) } - def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) { + def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode): Unit = { val instructions = method.instructions val op = jump.getOpcode if ((op >= IFEQ && op <= IFLE) || op == IFNULL || op == IFNONNULL) { @@ -271,7 +291,7 @@ object BytecodeUtils { if (l == from) list.set(i, to) } } - reference match { + (reference: @unchecked) match { case jump: JumpInsnNode => jump.label = to case line: LineNumberNode => line.start = to case switch: LookupSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to @@ -286,7 +306,7 @@ object BytecodeUtils { } } - def codeSizeOKForInlining(caller: MethodNode, callee: MethodNode): Boolean = { + def callsiteTooLargeAfterInlining(caller: MethodNode, callee: MethodNode): Boolean = { // Looking at the implementation of CodeSizeEvaluator, all instructions except tableswitch and // lookupswitch are <= 8 bytes. These should be rare enough for 8 to be an OK rough upper bound. def roughUpperBound(methodNode: MethodNode): Int = methodNode.instructions.size * 8 @@ -302,7 +322,7 @@ object BytecodeUtils { } def cloneLabels(methodNode: MethodNode): Map[LabelNode, LabelNode] = { - methodNode.instructions.iterator().asScala.collect({ + methodNode.instructions.iterator.asScala.collect({ case labelNode: LabelNode => (labelNode, newLabelNode) }).toMap } @@ -321,33 +341,38 @@ object BytecodeUtils { * Clone the local variable descriptors of `methodNode` and map their `start` and `end` labels * according to the `labelMap`. */ - def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], calleeMethodName: String, shift: Int): List[LocalVariableNode] = { - methodNode.localVariables.iterator().asScala.map(localVariable => { - val name = - if (calleeMethodName.length + localVariable.name.length < BTypes.InlinedLocalVariablePrefixMaxLenght) { - calleeMethodName + "_" + localVariable.name - } else { - val parts = localVariable.name.split("_").toVector - val (methNames, varName) = (calleeMethodName +: parts.init, parts.last) - // keep at least 5 characters per method name - val maxNumMethNames = BTypes.InlinedLocalVariablePrefixMaxLenght / 5 - val usedMethNames = - if (methNames.length < maxNumMethNames) methNames - else { - val half = maxNumMethNames / 2 - methNames.take(half) ++ methNames.takeRight(half) - } - val charsPerMethod = BTypes.InlinedLocalVariablePrefixMaxLenght / usedMethNames.length - usedMethNames.foldLeft("")((res, methName) => res + methName.take(charsPerMethod) + "_") + varName - } - new LocalVariableNode( - name, - localVariable.desc, - localVariable.signature, - labelMap(localVariable.start), - labelMap(localVariable.end), - localVariable.index + shift) - }).toList + def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], calleeMethodName: String, localIndexMap: Int => Int): List[LocalVariableNode] = { + val res = mutable.ListBuffer.empty[LocalVariableNode] + for (localVariable <- methodNode.localVariables.iterator.asScala) { + val newIdx = localIndexMap(localVariable.index) + if (newIdx >= 0) { + val name = + if (calleeMethodName.length + localVariable.name.length < BTypes.InlinedLocalVariablePrefixMaxLength) { + calleeMethodName + "_" + localVariable.name + } else { + val parts = localVariable.name.split("_").toVector + val (methNames, varName) = (calleeMethodName +: parts.init, parts.last) + // keep at least 5 characters per method name + val maxNumMethNames = BTypes.InlinedLocalVariablePrefixMaxLength / 5 + val usedMethNames = + if (methNames.length < maxNumMethNames) methNames + else { + val half = maxNumMethNames / 2 + methNames.take(half) ++ methNames.takeRight(half) + } + val charsPerMethod = BTypes.InlinedLocalVariablePrefixMaxLength / usedMethNames.length + usedMethNames.foldLeft("")((res, methName) => res + methName.take(charsPerMethod) + "_") + varName + } + res += new LocalVariableNode( + name, + localVariable.desc, + localVariable.signature, + labelMap(localVariable.start), + labelMap(localVariable.end), + newIdx) + } + } + res.toList } /** @@ -355,7 +380,7 @@ object BytecodeUtils { * labels according to the `labelMap`. */ def cloneTryCatchBlockNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode]): List[TryCatchBlockNode] = { - methodNode.tryCatchBlocks.iterator().asScala.map(tryCatch => new TryCatchBlockNode( + methodNode.tryCatchBlocks.iterator.asScala.map(tryCatch => new TryCatchBlockNode( labelMap(tryCatch.start), labelMap(tryCatch.end), labelMap(tryCatch.handler), diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index 9fba23c046ab..0f57f7f628d7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,15 +14,17 @@ package scala.tools.nsc package backend.jvm package opt -import scala.collection.JavaConverters._ import scala.collection.concurrent.TrieMap import scala.collection.immutable.IntMap import scala.collection.{concurrent, mutable} +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.{NoPosition, Position} import scala.tools.asm.tree._ -import scala.tools.asm.{Handle, Opcodes, Type} +import scala.tools.asm.{Opcodes, Type} import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.LambdaMetaFactoryCall +import scala.tools.nsc.backend.jvm.analysis.TypeFlowInterpreter.{LMFValue, ParamValue} import scala.tools.nsc.backend.jvm.analysis._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ @@ -32,8 +34,7 @@ abstract class CallGraph { import postProcessor._ import bTypes._ import bTypesFromClassfile._ - import backendUtils._ - import frontendAccess.{compilerSettings, recordPerRunCache} + import frontendAccess.recordPerRunCache /** * The call graph contains the callsites in the program being compiled. @@ -83,10 +84,19 @@ abstract class CallGraph { //currently single threaded access only val noInlineAnnotatedCallsites: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty) + // Contains `INVOKESPECIAL` instructions that were cloned by the inliner and need to be resolved + // statically by the call graph. See Inliner.maybeInlinedLater. + val staticallyResolvedInvokespecial: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty) + + def isStaticCallsite(call: MethodInsnNode): Boolean = { + val opc = call.getOpcode + opc == Opcodes.INVOKESTATIC || opc == Opcodes.INVOKESPECIAL && staticallyResolvedInvokespecial(call) + } + def removeCallsite(invocation: MethodInsnNode, methodNode: MethodNode): Option[Callsite] = { val methodCallsites = callsites(methodNode) val newCallsites = methodCallsites - invocation - if (newCallsites.isEmpty) callsites.remove(methodNode) + if (newCallsites.isEmpty) callsites.subtractOne(methodNode) else callsites(methodNode) = newCallsites methodCallsites.get(invocation) } @@ -97,72 +107,73 @@ abstract class CallGraph { } def containsCallsite(callsite: Callsite): Boolean = callsites(callsite.callsiteMethod) contains callsite.callsiteInstruction - def findCallSite(method: MethodNode, call: MethodInsnNode): Option[Callsite] = callsites.getOrElse(method, Map.empty).get(call) def removeClosureInstantiation(indy: InvokeDynamicInsnNode, methodNode: MethodNode): Option[ClosureInstantiation] = { val methodClosureInits = closureInstantiations(methodNode) val newClosureInits = methodClosureInits - indy - if (newClosureInits.isEmpty) closureInstantiations.remove(methodNode) + if (newClosureInits.isEmpty) closureInstantiations.subtractOne(methodNode) else closureInstantiations(methodNode) = newClosureInits methodClosureInits.get(indy) } - def addClosureInstantiation(closureInit: ClosureInstantiation) = { - val methodClosureInits = closureInstantiations(closureInit.ownerMethod) - closureInstantiations(closureInit.ownerMethod) = methodClosureInits + (closureInit.lambdaMetaFactoryCall.indy -> closureInit) - } - def addClass(classNode: ClassNode): Unit = { val classType = classBTypeFromClassNode(classNode) classNode.methods.asScala.foreach(addMethod(_, classType)) } - def addIfMissing(methodNode: MethodNode, definingClass: ClassBType): Unit = { - if (!callsites.contains(methodNode)) addMethod(methodNode, definingClass) + def refresh(methodNode: MethodNode, definingClass: ClassBType): Unit = { + callsites.subtractOne(methodNode) + closureInstantiations.subtractOne(methodNode) + // callsitePositions, inlineAnnotatedCallsites, noInlineAnnotatedCallsites, staticallyResolvedInvokespecial + // are left unchanged. They contain individual instructions, the state for those remains valid in case + // the inliner performs a rollback. + addMethod(methodNode, definingClass) } def addMethod(methodNode: MethodNode, definingClass: ClassBType): Unit = { - if (!BytecodeUtils.isAbstractMethod(methodNode) && !BytecodeUtils.isNativeMethod(methodNode)) { - // TODO: run dataflow analyses to make the call graph more precise - // - producers to get forwarded parameters (ForwardedParam) - // - typeAnalysis for more precise argument types, more precise callee - - // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance - // call is known to be not-null, in which case we don't have to emit a null check when inlining. - // It is also used to get the stack height at the call site. - - val analyzer = { - if (compilerSettings.optNullnessTracking && AsmAnalyzer.sizeOKForNullness(methodNode)) { - Some(new AsmAnalyzer(methodNode, definingClass.internalName, new NullnessAnalyzer(backendUtils.isNonNullMethodInvocation, methodNode))) - } else if (AsmAnalyzer.sizeOKForBasicValue(methodNode)) { - Some(new AsmAnalyzer(methodNode, definingClass.internalName)) - } else None - } - - // if the method is too large to run an analyzer, it is not added to the call graph - if (analyzer.nonEmpty) { - val Some(a) = analyzer - def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = a.analyzer match { - case nullnessAnalyzer: NullnessAnalyzer => - val frame = nullnessAnalyzer.frameAt(call, methodNode) - frame.getStack(frame.getStackSize - 1 - numArgs) eq NotNullValue - case _ => false - } - - var methodCallsites = Map.empty[MethodInsnNode, Callsite] - var methodClosureInstantiations = Map.empty[InvokeDynamicInsnNode, ClosureInstantiation] - - // lazy so it is only computed if actually used by computeArgInfos - lazy val prodCons = new ProdConsAnalyzer(methodNode, definingClass.internalName) + if (!BytecodeUtils.isAbstractMethod(methodNode) && !BytecodeUtils.isNativeMethod(methodNode) && AsmAnalyzer.sizeOKForBasicValue(methodNode)) { + lazy val typeAnalyzer = new NonLubbingTypeFlowAnalyzer(methodNode, definingClass.internalName) + + var methodCallsites = Map.empty[MethodInsnNode, Callsite] + var methodClosureInstantiations = Map.empty[InvokeDynamicInsnNode, ClosureInstantiation] + + methodNode.instructions.iterator.asScala foreach { + case call: MethodInsnNode if typeAnalyzer.frameAt(call) != null => // skips over unreachable code + // JVMS 6.5 invokespecial: " If all of the following are true, let C be the direct superclass of the current class" + def isSuperCall: Boolean = + call.getOpcode == Opcodes.INVOKESPECIAL && + call.name != GenBCode.INSTANCE_CONSTRUCTOR_NAME && { + val owner = call.owner + definingClass.internalName != owner && { + var nextSuper = definingClass.info.get.superClass + while (nextSuper.nonEmpty) { + if (nextSuper.get.internalName == owner) return true + nextSuper = nextSuper.get.info.get.superClass + } + false + } + } + val paramTps = FLazy(Type.getArgumentTypes(call.desc)) + // This is the type where method lookup starts (implemented in byteCodeRepository.methodNode) + val preciseOwner = + if (isStaticCallsite(call)) call.owner + else if (isSuperCall) definingClass.info.get.superClass.get.internalName + else if (call.getOpcode == Opcodes.INVOKESPECIAL) call.owner + else { + // invokevirtual, invokeinterface: start search at the type of the receiver + val f = typeAnalyzer.frameAt(call) + // Not Type.getArgumentsAndReturnSizes: in asm.Frame, size-2 values use a single stack slot + val numParams = paramTps.get.length + f.peekStack(numParams).getType.getInternalName + } - methodNode.instructions.iterator.asScala foreach { - case call: MethodInsnNode if a.frameAt(call) != null => // skips over unreachable code - val callee: Either[OptimizerWarning, Callee] = for { - (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)] + val callee: Either[OptimizerWarning, Callee] = { + for { + (method, declarationClass) <- byteCodeRepository.methodNode(preciseOwner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)] (declarationClassNode, calleeSourceFilePath) <- byteCodeRepository.classNodeAndSourceFilePath(declarationClass): Either[OptimizerWarning, (ClassNode, Option[String])] } yield { val declarationClassBType = classBTypeFromClassNode(declarationClassNode) - val info = analyzeCallsite(method, declarationClassBType, call, calleeSourceFilePath) + val info = analyzeCallsite(method, declarationClassBType, call, paramTps, calleeSourceFilePath, definingClass) import info._ Callee( callee = method, @@ -174,102 +185,87 @@ abstract class CallGraph { samParamTypes = info.samParamTypes, calleeInfoWarning = warning) } + } - val argInfos = computeArgInfos(callee, call, prodCons) - - val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || { - val numArgs = Type.getArgumentTypes(call.desc).length - receiverNotNullByAnalysis(call, numArgs) - } - - methodCallsites += call -> Callsite( - callsiteInstruction = call, - callsiteMethod = methodNode, - callsiteClass = definingClass, - callee = callee, - argInfos = argInfos, - callsiteStackHeight = a.frameAt(call).getStackSize, - receiverKnownNotNull = receiverNotNull, - callsitePosition = callsitePositions.getOrElse(call, NoPosition), - annotatedInline = inlineAnnotatedCallsites(call), - annotatedNoInline = noInlineAnnotatedCallsites(call) - ) - - case LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType) if a.frameAt(indy) != null => - val lmf = LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType) - val capturedArgInfos = computeCapturedArgInfos(lmf, prodCons) - methodClosureInstantiations += indy -> ClosureInstantiation( - lmf, - methodNode, - definingClass, - capturedArgInfos) - - case _ => - } + val argInfos = computeArgInfos(callee, call, paramTps, typeAnalyzer) + + // A nullness analysis could be used to prevent emitting unnecessary receiver null checks + // when inlining non-static callsites. However, LocalOpt's nullness cleanup will also do + // it after the fact, so we can avoid running the nullness analysis when building the call + // graph (or when inlining). + val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC + + methodCallsites += call -> Callsite( + callsiteInstruction = call, + callsiteMethod = methodNode, + callsiteClass = definingClass, + callee = callee, + argInfos = argInfos, + callsiteStackHeight = typeAnalyzer.frameAt(call).getStackSize, + receiverKnownNotNull = receiverNotNull, + callsitePosition = callsitePositions.getOrElse(call, NoPosition), + annotatedInline = inlineAnnotatedCallsites(call), + annotatedNoInline = noInlineAnnotatedCallsites(call) + ) + + case LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType, indyParamTypes) if typeAnalyzer.frameAt(indy) != null => + val lmf = LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType) + val capturedArgInfos = computeCapturedArgInfos(lmf, indyParamTypes, typeAnalyzer) + methodClosureInstantiations += indy -> ClosureInstantiation( + lmf, + methodNode, + definingClass, + capturedArgInfos) - callsites(methodNode) = methodCallsites - closureInstantiations(methodNode) = methodClosureInstantiations + case _ => } + + callsites(methodNode) = methodCallsites + closureInstantiations(methodNode) = methodClosureInstantiations } } - def computeArgInfos(callee: Either[OptimizerWarning, Callee], callsiteInsn: MethodInsnNode, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = { + def computeArgInfos(callee: Either[OptimizerWarning, Callee], callsiteInsn: MethodInsnNode, paramTps: FLazy[Array[Type]], typeAnalyzer: NonLubbingTypeFlowAnalyzer): IntMap[ArgInfo] = { if (callee.isLeft) IntMap.empty else { - lazy val numArgs = Type.getArgumentTypes(callsiteInsn.desc).length + (if (callsiteInsn.getOpcode == Opcodes.INVOKESTATIC) 0 else 1) - argInfosForSams(callee.get.samParamTypes, callsiteInsn, numArgs, prodCons) + val numArgs = FLazy(paramTps.get.length + (if (callsiteInsn.getOpcode == Opcodes.INVOKESTATIC) 0 else 1)) + argInfosForSams(callee.get.samParamTypes, callsiteInsn, numArgs, typeAnalyzer) } } - def computeCapturedArgInfos(lmf: LambdaMetaFactoryCall, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = { - val capturedSams = capturedSamTypes(lmf) - val numCaptures = Type.getArgumentTypes(lmf.indy.desc).length - argInfosForSams(capturedSams, lmf.indy, numCaptures, prodCons) + def computeCapturedArgInfos(lmf: LambdaMetaFactoryCall, indyParamTypes: Array[Type], typeAnalyzer: NonLubbingTypeFlowAnalyzer): IntMap[ArgInfo] = { + val capturedTypes = indyParamTypes.map(t => bTypeForDescriptorFromClassfile(t.getDescriptor)) + val capturedSams = samTypes(capturedTypes) + argInfosForSams(capturedSams, lmf.indy, FLazy(indyParamTypes.length), typeAnalyzer) } - private def argInfosForSams(sams: IntMap[ClassBType], consumerInsn: AbstractInsnNode, numConsumed: => Int, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = { - // TODO: use type analysis instead of ProdCons - should be more efficient - // some random thoughts: - // - assign special types to parameters and indy-lambda-functions to track them - // - upcast should not change type flow analysis: don't lose information. - // - can we do something about factory calls? Foo(x) for case class foo gives a Foo. - // inline the factory? analysis across method boundary? - - // assign to a lazy val to prevent repeated evaluation of the by-name arg - lazy val prodConsI = prodCons - lazy val firstConsumedSlot = { - val consumerFrame = prodConsI.frameAt(consumerInsn) - consumerFrame.stackTop - numConsumed + 1 - } - sams flatMap { + private def argInfosForSams(sams: IntMap[ClassBType], consumerInsn: AbstractInsnNode, numConsumed: FLazy[Int], typeAnalyzer: NonLubbingTypeFlowAnalyzer): IntMap[ArgInfo] = { + lazy val consumerFrame = typeAnalyzer.frameAt(consumerInsn) + lazy val firstConsumedSlot = consumerFrame.stackTop - numConsumed.get + 1 + val samInfos: IntMap[ArgInfo] = sams flatMap { case (index, _) => - val prods = prodConsI.initialProducersForValueAt(consumerInsn, firstConsumedSlot + index) - if (prods.size != 1) None - else { - val argInfo = prods.head match { - case LambdaMetaFactoryCall(_, _, _, _) => Some(FunctionLiteral) - case ParameterProducer(local) => Some(ForwardedParam(local)) - case _ => None - } - argInfo.map((index, _)) + val argInfo = consumerFrame.getValue(firstConsumedSlot + index) match { + case _: LMFValue => Some(FunctionLiteral) + case p: ParamValue => Some(ForwardedParam(p.local)) + case _ => None } + argInfo.map((index, _)) } + val isArrayLoadOrUpdateOnKnownArray = BackendUtils.isRuntimeArrayLoadOrUpdate(consumerInsn) && + consumerFrame.getValue(firstConsumedSlot + 1).getType.getSort == Type.ARRAY + if (isArrayLoadOrUpdateOnKnownArray) samInfos.updated(1, StaticallyKnownArray) + else samInfos } - def samParamTypes(methodNode: MethodNode, receiverType: ClassBType): IntMap[ClassBType] = { + def samParamTypes(methodNode: MethodNode, paramTps: Array[Type], receiverType: ClassBType): IntMap[ClassBType] = { val paramTypes = { - val params = Type.getMethodType(methodNode.desc).getArgumentTypes.map(t => bTypeForDescriptorOrInternalNameFromClassfile(t.getDescriptor)) + val params = paramTps.map(t => bTypeForDescriptorFromClassfile(t.getDescriptor)) val isStatic = BytecodeUtils.isStaticMethod(methodNode) if (isStatic) params else receiverType +: params } samTypes(paramTypes) } - def capturedSamTypes(lmf: LambdaMetaFactoryCall): IntMap[ClassBType] = { - val capturedTypes = Type.getArgumentTypes(lmf.indy.desc).map(t => bTypeForDescriptorOrInternalNameFromClassfile(t.getDescriptor)) - samTypes(capturedTypes) - } - private def samTypes(types: Array[BType]): IntMap[ClassBType] = { var res = IntMap.empty[ClassBType] for (i <- types.indices) { @@ -283,18 +279,38 @@ abstract class CallGraph { res } + final class FLazy[@specialized(Int) T](_init: => T) { + private[this] var init = () => _init + private[this] var v: T = _ + def get: T = { + if (init != null) { + v = init() + init = null + } + v + } + } + + object FLazy { + def apply[T](init: => T): FLazy[T] = new FLazy(init) + } + /** * Just a named tuple used as return type of `analyzeCallsite`. */ - private case class CallsiteInfo(isStaticallyResolved: Boolean, sourceFilePath: Option[String], - annotatedInline: Boolean, annotatedNoInline: Boolean, - samParamTypes: IntMap[ClassBType], - warning: Option[CalleeInfoWarning]) + private case class CallsiteInfo( + isStaticallyResolved: Boolean = false, + sourceFilePath: Option[String] = None, + annotatedInline: Boolean = false, + annotatedNoInline: Boolean = false, + samParamTypes: IntMap[ClassBType] = IntMap.empty, + warning: Option[CalleeInfoWarning], + ) /** * Analyze a callsite and gather meta-data that can be used for inlining decisions. */ - private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, calleeSourceFilePath: Option[String]): CallsiteInfo = { + private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, paramTps: FLazy[Array[Type]], calleeSourceFilePath: Option[String], callsiteClass: ClassBType): CallsiteInfo = { val methodSignature = (calleeMethodNode.name, calleeMethodNode.desc) try { @@ -304,10 +320,15 @@ abstract class CallGraph { calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match { case Some(methodInlineInfo) => val receiverType = classBTypeFromParsedClassfile(call.owner) - // (1) A non-final method can be safe to inline if the receiver type is a final subclass. Example: + // (1) Special case for trait super accessors. trait T { def f = 1 } generates a static + // method t$ which calls `invokespecial T.f`. Even if `f` is not final, this call will + // always resolve to `T.f`. This is a (very) special case. Otherwise, `invokespecial` + // is only used for private methods, constructors and super calls. + // + // (2) A non-final method can be safe to inline if the receiver type is a final subclass. Example: // class A { @inline def f = 1 }; object B extends A; B.f // can be inlined // - // TODO: (1) doesn't cover the following example: + // TODO: (2) doesn't cover the following example: // trait TravLike { def map = ... } // sealed trait List extends TravLike { ... } // assume map is not overridden // final case class :: / final case object Nil @@ -321,9 +342,10 @@ abstract class CallGraph { // TODO: type analysis can render more calls statically resolved. Example: // new A.f // can be inlined, the receiver type is known to be exactly A. val isStaticallyResolved: Boolean = { - isNonVirtualCall(call) || // scala/scala-dev#86: super calls (invokespecial) can be inlined -- TODO: check if that's still needed, and if it's correct: scala-dev#143 - methodInlineInfo.effectivelyFinal || - receiverType.info.orThrow.inlineInfo.isEffectivelyFinal // (1) + isStaticCallsite(call) || + (call.getOpcode == Opcodes.INVOKESPECIAL && receiverType == callsiteClass) || // (1) + methodInlineInfo.effectivelyFinal || + receiverType.info.orThrow.inlineInfo.isEffectivelyFinal // (2) } val warning = calleeDeclarationClassBType.info.orThrow.inlineInfo.warning.map( @@ -334,17 +356,17 @@ abstract class CallGraph { sourceFilePath = calleeSourceFilePath, annotatedInline = methodInlineInfo.annotatedInline, annotatedNoInline = methodInlineInfo.annotatedNoInline, - samParamTypes = samParamTypes(calleeMethodNode, receiverType), + samParamTypes = samParamTypes(calleeMethodNode, paramTps.get, receiverType), warning = warning) case None => val warning = MethodInlineInfoMissing(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, calleeDeclarationClassBType.info.orThrow.inlineInfo.warning) - CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning)) + CallsiteInfo(warning = Some(warning)) } } catch { case Invalid(noInfo: NoClassBTypeInfo) => val warning = MethodInlineInfoError(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, noInfo) - CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning)) + CallsiteInfo(warning = Some(warning)) } } @@ -366,12 +388,6 @@ abstract class CallGraph { callee: Either[OptimizerWarning, Callee], argInfos: IntMap[ArgInfo], callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position, annotatedInline: Boolean, annotatedNoInline: Boolean) { - /** - * Contains callsites that were created during inlining by cloning this callsite. Used to find - * corresponding callsites when inlining post-inline requests. - */ - val inlinedClones = mutable.Set.empty[ClonedCallsite] - // an annotation at the callsite takes precedence over an annotation at the definition site def isInlineAnnotated = annotatedInline || (callee.get.annotatedInline && !annotatedNoInline) def isNoInlineAnnotated = annotatedNoInline || (callee.get.annotatedNoInline && !annotatedInline) @@ -383,14 +399,13 @@ abstract class CallGraph { s" in ${callsiteClass.internalName}.${callsiteMethod.name}${callsiteMethod.desc}" } - final case class ClonedCallsite(callsite: Callsite, clonedWhenInlining: Callsite) - /** * Information about invocation arguments, obtained through data flow analysis of the callsite method. */ sealed trait ArgInfo case object FunctionLiteral extends ArgInfo final case class ForwardedParam(index: Int) extends ArgInfo + case object StaticallyKnownArray extends ArgInfo // final case class ArgTypeInfo(argType: BType, isPrecise: Boolean, knownNotNull: Boolean) extends ArgInfo // can be extended, e.g., with constant types @@ -432,83 +447,6 @@ abstract class CallGraph { * graph when re-writing a closure invocation to the body method. */ final case class ClosureInstantiation(lambdaMetaFactoryCall: LambdaMetaFactoryCall, ownerMethod: MethodNode, ownerClass: ClassBType, capturedArgInfos: IntMap[ArgInfo]) { - /** - * Contains closure instantiations that were created during inlining by cloning this instantiation. - */ - val inlinedClones = mutable.Set.empty[ClosureInstantiation] override def toString = s"ClosureInstantiation($lambdaMetaFactoryCall, ${ownerMethod.name + ownerMethod.desc}, $ownerClass)" } - final case class LambdaMetaFactoryCall(indy: InvokeDynamicInsnNode, samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type) - - object LambdaMetaFactoryCall { - def unapply(insn: AbstractInsnNode): Option[(InvokeDynamicInsnNode, Type, Handle, Type)] = insn match { - case indy: InvokeDynamicInsnNode if indy.bsm == coreBTypes.lambdaMetaFactoryMetafactoryHandle || indy.bsm == coreBTypes.lambdaMetaFactoryAltMetafactoryHandle => - indy.bsmArgs match { - case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, _@_*) => - // LambdaMetaFactory performs a number of automatic adaptations when invoking the lambda - // implementation method (casting, boxing, unboxing, and primitive widening, see Javadoc). - // - // The closure optimizer supports only one of those adaptations: it will cast arguments - // to the correct type when re-writing a closure call to the body method. Example: - // - // val fun: String => String = l => l - // val l = List("") - // fun(l.head) - // - // The samMethodType of Function1 is `(Object)Object`, while the instantiatedMethodType - // is `(String)String`. The return type of `List.head` is `Object`. - // - // The implMethod has the signature `C$anonfun(String)String`. - // - // At the closure callsite, we have an `INVOKEINTERFACE Function1.apply (Object)Object`, - // so the object returned by `List.head` can be directly passed into the call (no cast). - // - // The closure object will cast the object to String before passing it to the implMethod. - // - // When re-writing the closure callsite to the implMethod, we have to insert a cast. - // - // The check below ensures that - // (1) the implMethod type has the expected signature (captured types plus argument types - // from instantiatedMethodType) - // (2) the receiver of the implMethod matches the first captured type, if any, otherwise - // the first parameter type of instantiatedMethodType - // (3) all parameters that are not the same in samMethodType and instantiatedMethodType - // are reference types, so that we can insert casts to perform the same adaptation - // that the closure object would. - - val isStatic = implMethod.getTag == Opcodes.H_INVOKESTATIC - val indyParamTypes = Type.getArgumentTypes(indy.desc) - val instantiatedMethodArgTypes = instantiatedMethodType.getArgumentTypes - - val (receiverType, expectedImplMethodType) = - if (isStatic) { - val paramTypes = indyParamTypes ++ instantiatedMethodArgTypes - (None, Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) - } else if (implMethod.getTag == Opcodes.H_NEWINVOKESPECIAL) { - (Some(instantiatedMethodType.getReturnType), Type.getMethodType(Type.VOID_TYPE, instantiatedMethodArgTypes: _*)) - } else { - if (indyParamTypes.nonEmpty) { - val paramTypes = indyParamTypes.tail ++ instantiatedMethodArgTypes - (Some(indyParamTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) - } else { - val paramTypes = instantiatedMethodArgTypes.tail - (Some(instantiatedMethodArgTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) - } - } - - val isIndyLambda = ( - Type.getType(implMethod.getDesc) == expectedImplMethodType // (1) - && receiverType.forall(rt => implMethod.getOwner == rt.getInternalName) // (2) - && samMethodType.getArgumentTypes.corresponds(instantiatedMethodArgTypes)((samArgType, instArgType) => - samArgType == instArgType || isReference(samArgType) && isReference(instArgType)) // (3) - ) - - if (isIndyLambda) Some((indy, samMethodType, implMethod, instantiatedMethodType)) - else None - - case _ => None - } - case _ => None - } - } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index d554e609e24d..ac8615dbedd4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,16 +15,16 @@ package backend.jvm package opt import scala.annotation.switch -import scala.collection.JavaConverters._ import scala.collection.immutable.IntMap import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.NoPosition import scala.tools.asm.Opcodes._ import scala.tools.asm.Type import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ -import scala.tools.nsc.backend.jvm.analysis.BackendUtils +import scala.tools.nsc.backend.jvm.analysis.{AsmAnalyzer, BackendUtils, ProdConsAnalyzer} import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class ClosureOptimizer { @@ -42,6 +42,8 @@ abstract class ClosureOptimizer { private object closureInitOrdering extends Ordering[ClosureInstantiation] { override def compare(x: ClosureInstantiation, y: ClosureInstantiation): Int = { + if (x eq y) return 0 + val cls = x.ownerClass.internalName compareTo y.ownerClass.internalName if (cls != 0) return cls @@ -84,8 +86,12 @@ abstract class ClosureOptimizer { * [load captured values from locals] * [load argument values from locals] * [invoke the closure body method] + * + * @param methods The methods to check for rewrites. If not defined, check all methods with closure + * instantiations. + * @return The changed methods. The order of the resulting sequence is deterministic. */ - def rewriteClosureApplyInvocations(): Unit = { + def rewriteClosureApplyInvocations(methods: Option[Iterable[MethodNode]], inlinerState: mutable.Map[MethodNode, inliner.MethodInlinerState]): mutable.LinkedHashSet[MethodNode] = { // sort all closure invocations to rewrite to ensure bytecode stability val toRewrite = mutable.TreeMap.empty[ClosureInstantiation, mutable.ArrayBuffer[(MethodInsnNode, Int)]](closureInitOrdering) @@ -94,18 +100,20 @@ abstract class ClosureOptimizer { callsites += ((invocation, stackHeight)) } + // the `toList` prevents modifying closureInstantiations while iterating it. + // minimalRemoveUnreachableCode (called in the loop) removes elements + val methodsToRewrite = methods.getOrElse(closureInstantiations.keysIterator.toList) + // For each closure instantiation find callsites of the closure and add them to the toRewrite // buffer (cannot change a method's bytecode while still looking for further invocations to // rewrite, the frame indices of the ProdCons analysis would get out of date). If a callsite - // cannot be rewritten, for example because the lambda body method is not accessible, issue a - // warning. The `toList` in the next line prevents modifying closureInstantiations while - // iterating it: minimalRemoveUnreachableCode (called in the loop) removes elements. - for (method <- closureInstantiations.keysIterator.toList if AsmAnalyzer.sizeOKForBasicValue(method)) closureInstantiations.get(method) match { + // cannot be rewritten, e.g., because the lambda body method is not accessible, issue a warning. + for (method <- methodsToRewrite if AsmAnalyzer.sizeOKForBasicValue(method)) closureInstantiations.get(method) match { case Some(closureInitsBeforeDCE) if closureInitsBeforeDCE.nonEmpty => val ownerClass = closureInitsBeforeDCE.head._2.ownerClass.internalName // Advanced ProdCons queries (initialProducersForValueAt) expect no unreachable code. - localOpt.minimalRemoveUnreachableCode(method) + localOpt.minimalRemoveUnreachableCode(method, ownerClass) if (AsmAnalyzer.sizeOKForSourceValue(method)) closureInstantiations.get(method) match { case Some(closureInits) => @@ -126,12 +134,25 @@ abstract class ClosureOptimizer { case _ => } + val changedMethods = mutable.LinkedHashSet.empty[MethodNode] + var previousMethod: MethodNode = null + for ((closureInit, invocations) <- toRewrite) { // Local variables that hold the captured values and the closure invocation arguments. val (localsForCapturedValues, argumentLocalsList) = localsForClosureRewrite(closureInit) for ((invocation, stackHeight) <- invocations) rewriteClosureApplyInvocation(closureInit, invocation, stackHeight, localsForCapturedValues, argumentLocalsList) + + // toInit is sorted by `closureInitOrdering`, so multiple closure inits within a method are next to each other + if (closureInit.ownerMethod != previousMethod) { + previousMethod = closureInit.ownerMethod + changedMethods += previousMethod + val state = inlinerState.getOrElseUpdate(previousMethod, new inliner.MethodInlinerState) + state.inlineLog.logClosureRewrite(closureInit, invocations, invocations.headOption.flatMap(p => state.outerCallsite(p._1))) + } } + + changedMethods } /** @@ -148,7 +169,7 @@ abstract class ClosureOptimizer { // allocate locals for storing the arguments of the closure apply callsites. // if there are multiple callsites, the same locals are re-used. val argTypes = closureInit.lambdaMetaFactoryCall.samMethodType.getArgumentTypes - val firstArgLocal = backendUtils.maxLocals(ownerMethod) + val firstArgLocal = BackendUtils.maxLocals(ownerMethod) val argLocals = LocalsList.fromTypes(firstArgLocal, argTypes) ownerMethod.maxLocals = firstArgLocal + argLocals.size @@ -184,7 +205,7 @@ abstract class ClosureOptimizer { case _ => Right(prodCons.frameAt(invocation).getStackSize) } - stackSize.right.map((invocation, _)) + stackSize.map((invocation, _)) }).toList } @@ -332,7 +353,7 @@ abstract class ClosureOptimizer { // One slot per value is correct for long / double, see comment in the `analysis` package object. val numCapturedValues = localsForCapturedValues.locals.length val invocationStackHeight = stackHeight + numCapturedValues - 1 + (if (isNew) 2 else 0) // -1 because the closure is gone - if (invocationStackHeight > backendUtils.maxStack(ownerMethod)) + if (invocationStackHeight > BackendUtils.maxStack(ownerMethod)) ownerMethod.maxStack = invocationStackHeight // replace the callsite with a new call to the body method @@ -383,7 +404,7 @@ abstract class ClosureOptimizer { sourceFilePath = sourceFilePath, annotatedInline = false, annotatedNoInline = false, - samParamTypes = callGraph.samParamTypes(bodyMethodNode, bodyDeclClassType), + samParamTypes = callGraph.samParamTypes(bodyMethodNode, Type.getArgumentTypes(bodyMethodNode.desc), bodyDeclClassType), calleeInfoWarning = None) }) val argInfos = closureInit.capturedArgInfos ++ originalCallsite.map(cs => cs.argInfos map { @@ -411,9 +432,6 @@ abstract class ClosureOptimizer { // Rewriting a closure invocation may render code unreachable. For example, the body method of // (x: T) => ??? has return type Nothing$, and an ATHROW is added (see fixLoadedNothingOrNullValue). BackendUtils.clearDceDone(ownerMethod) - - if (hasAdaptedImplMethod(closureInit) && inliner.canInlineCallsite(bodyMethodCallsite).isEmpty) - inliner.inlineCallsite(bodyMethodCallsite) } /** @@ -423,7 +441,7 @@ abstract class ClosureOptimizer { private def storeCaptures(closureInit: ClosureInstantiation): LocalsList = { val indy = closureInit.lambdaMetaFactoryCall.indy val capturedTypes = Type.getArgumentTypes(indy.desc) - val firstCaptureLocal = backendUtils.maxLocals(closureInit.ownerMethod) + val firstCaptureLocal = BackendUtils.maxLocals(closureInit.ownerMethod) // This could be optimized: in many cases the captured values are produced by LOAD instructions. // If the variable is not modified within the method, we could avoid introducing yet another @@ -489,13 +507,13 @@ abstract class ClosureOptimizer { */ def fromTypes(firstLocal: Int, types: Array[Type]): LocalsList = { var sizeTwoOffset = 0 - val locals: List[Local] = types.indices.map(i => { + val locals = List.from[Local](types.indices.iterator.map(i => { // The ASM method `type.getOpcode` returns the opcode for operating on a value of `type`. val offset = types(i).getOpcode(ILOAD) - ILOAD val local = Local(firstLocal + i + sizeTwoOffset, offset) if (local.size == 2) sizeTwoOffset += 1 local - })(collection.breakOut) + })) LocalsList(locals) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala index 7e2b3ced0b9c..f3775c03a4ef 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,20 +15,21 @@ package backend.jvm package opt import scala.annotation.{switch, tailrec} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.tools.asm.Opcodes._ import scala.tools.asm.Type import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis.BasicInterpreter import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.{LambdaMetaFactoryCall, _} import scala.tools.nsc.backend.jvm.analysis._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class CopyProp { val postProcessor: PostProcessor - import postProcessor.{backendUtils, callGraph} + import postProcessor.{backendUtils, callGraph, bTypes} + import postProcessor.bTypes.frontendAccess.compilerSettings import backendUtils._ @@ -36,13 +37,13 @@ abstract class CopyProp { * For every `xLOAD n`, find all local variable slots that are aliases of `n` using an * AliasingAnalyzer and change the instruction to `xLOAD m` where `m` is the smallest alias. * This leaves behind potentially stale `xSTORE n` instructions, which are then eliminated - * by [[eliminateStaleStores]]. + * by [[eliminateStaleStoresAndRewriteSomeIntrinsics]]. */ def copyPropagation(method: MethodNode, owner: InternalName): Boolean = { AsmAnalyzer.sizeOKForAliasing(method) && { var changed = false val numParams = parametersSize(method) - lazy val aliasAnalysis = new AsmAnalyzer(method, owner, new AliasingAnalyzer(new BasicInterpreter)) + lazy val aliasAnalysis = new BasicAliasingAnalyzer(method, owner) // Remember locals that are used in a `LOAD` instruction. Assume a program has two LOADs: // @@ -53,7 +54,7 @@ abstract class CopyProp { // // In this example, we should change the second load from 1 to 3, which might render the // local variable 1 unused. - val knownUsed = new Array[Boolean](backendUtils.maxLocals(method)) + val knownUsed = new Array[Boolean](BackendUtils.maxLocals(method)) def usedOrMinAlias(it: IntIterator, init: Int): Int = { if (knownUsed(init)) init @@ -93,29 +94,45 @@ abstract class CopyProp { * Eliminate `xSTORE` instructions that have no consumer. If the instruction can be completely * eliminated, it is replaced by a POP. The [[eliminatePushPop]] cleans up unnecessary POPs. * + * Also rewrites some intrinsics (done here because a ProdCons analysis is available): + * - `ClassTag(classOf[X]).newArray` is rewritten to `new Array[X]` + * + * Finally there's an interesting special case that complements the inliner heuristics. After + * the rewrite above, if the `new Array[X]` is used in a `ScalaRuntime.array_apply/update` call, + * inline that method. These methods have a big pattern match for all primitive array types, and + * we only inline them if we statically know the array type. In this case, all the non-matching + * branches are later eliminated by `eliminateRedundantCastsAndRewriteSomeIntrinsics`. + * * Note that an `ASOTRE` can not always be eliminated: it removes a reference to the object that * is currently stored in that local, which potentially frees it for GC (scala/bug#5313). Therefore - * we replace such stores by `POP; ACONST_NULL; ASTORE x`. + * we replace such stores by `POP; ACONST_NULL; ASTORE x` - except if the store precedes an + * `xRETURN`, in which case it can be removed. + * + * Returns (staleStoreRemoved, intrinsicRewritten, callInlined). */ - def eliminateStaleStores(method: MethodNode, owner: InternalName): Boolean = { - AsmAnalyzer.sizeOKForSourceValue(method) && { + def eliminateStaleStoresAndRewriteSomeIntrinsics(method: MethodNode, owner: InternalName): (Boolean, Boolean, Boolean) = { + if (!AsmAnalyzer.sizeOKForSourceValue(method)) (false, false, false) else { lazy val prodCons = new ProdConsAnalyzer(method, owner) def hasNoCons(varIns: AbstractInsnNode, slot: Int) = prodCons.consumersOfValueAt(varIns.getNext, slot).isEmpty - // insns to delete: IINC that have no consumer - val toDelete = mutable.ArrayBuffer.empty[IincInsnNode] - - // xSTORE insns to be replaced by POP or POP2 - val storesToDrop = mutable.ArrayBuffer.empty[VarInsnNode] + def popFor(vi: VarInsnNode): AbstractInsnNode = getPop(if (isSize2LoadOrStore(vi.getOpcode)) 2 else 1) // ASTORE insn that have no consumer. // - if the local is not live, the store is replaced by POP // - otherwise, pop the argument value and store NULL instead. Unless the boolean field is // `true`: then the store argument is already known to be ACONST_NULL. - val toNullOut = mutable.ArrayBuffer.empty[(VarInsnNode, Boolean)] + val toNullOut = mutable.Map.empty[VarInsnNode, Boolean] + + val toReplace = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]] - // `true` for variables that are known to be live - val liveVars = new Array[Boolean](backendUtils.maxLocals(method)) + val returns = mutable.Set.empty[AbstractInsnNode] + + val toInline = mutable.Set.empty[MethodInsnNode] + + // `true` for variables that are known to be live and hold non-primitives + val liveRefVars = new Array[Boolean](BackendUtils.maxLocals(method)) + + val firstLocalIndex = parametersSize(method) val it = method.instructions.iterator while (it.hasNext) it.next() match { @@ -128,36 +145,94 @@ abstract class CopyProp { case _ => false }) } - if (canElim) storesToDrop += vi + if (canElim) toReplace(vi) = List(popFor(vi)) else { val prods = prodCons.producersForValueAt(vi, prodCons.frameAt(vi).stackTop) val isStoreNull = prods.size == 1 && prods.head.getOpcode == ACONST_NULL - toNullOut += ((vi, isStoreNull)) + toNullOut(vi) = isStoreNull } case ii: IincInsnNode if hasNoCons(ii, ii.`var`) => - toDelete += ii + toReplace(ii) = Nil case vi: VarInsnNode => - liveVars(vi.`var`) = true + val opc = vi.getOpcode + val markAsLive = opc == ALOAD || opc == ASTORE && ( + // a store makes the variable live if it's a parameter, or if a non-null value if stored + vi.`var` < firstLocalIndex || prodCons.initialProducersForInputsOf(vi).exists(_.getOpcode != ACONST_NULL) + ) + if (markAsLive) + liveRefVars(vi.`var`) = true + + case mi: MethodInsnNode => + // rewrite `ClassTag(classOf[X]).newArray` to `new Array[X]` + val newArrayCls = BackendUtils.classTagNewArrayArg(mi, prodCons) + if (newArrayCls != null) { + val receiverProds = prodCons.producersForValueAt(mi, prodCons.frameAt(mi).stackTop - 1) + if (receiverProds.size == 1) { + toReplace(receiverProds.head) = List(receiverProds.head, getPop(1)) + toReplace(mi) = List(new TypeInsnNode(ANEWARRAY, newArrayCls)) + toInline ++= prodCons.ultimateConsumersOfOutputsFrom(mi).collect({case i if isRuntimeArrayLoadOrUpdate(i) => i.asInstanceOf[MethodInsnNode]}) + } + } - case ii: IincInsnNode => - liveVars(ii.`var`) = true + case insn => + if (isReturn(insn)) returns += insn + } - case _ => + def isTrailing(insn: AbstractInsnNode) = insn != null && { + import scala.tools.asm.tree.AbstractInsnNode._ + insn.getType match { + case METHOD_INSN | INVOKE_DYNAMIC_INSN | JUMP_INSN | TABLESWITCH_INSN | LOOKUPSWITCH_INSN => false + case _ => true + } } - def replaceByPop(vi: VarInsnNode): Unit = { - val size = if (isSize2LoadOrStore(vi.getOpcode)) 2 else 1 - method.instructions.set(vi, getPop(size)) + // stale stores that precede a return can be removed, there's no need to null them out. the + // references are released for gc when the method returns. this also cleans up unnecessary + // `ACONST_NULL; ASTORE x` created by the inliner (for locals of the inlined method). + for (ret <- returns) { + var i = ret + while (isTrailing(i)) { + if (i.getType == AbstractInsnNode.VAR_INSN) { + val vi = i.asInstanceOf[VarInsnNode] + if (toNullOut.remove(vi).nonEmpty) + toReplace(vi) = List(popFor(vi)) + } + i = i.getPrevious + } } - toDelete foreach method.instructions.remove + var staleStoreRemoved = toNullOut.nonEmpty + var intrinsicRewritten = false + val callInlined = toInline.nonEmpty - storesToDrop foreach replaceByPop + for ((i, nis) <- toReplace) { + i.getType match { + case AbstractInsnNode.VAR_INSN | AbstractInsnNode.IINC_INSN => staleStoreRemoved = true + case AbstractInsnNode.METHOD_INSN => intrinsicRewritten = true + case _ => + } + // the original instruction `i` may appear (once) in `nis`. + var insertBefore = i + var insertAfter: AbstractInsnNode = null + for (ni <- nis) { + if (ni eq i) { + insertBefore = null + insertAfter = i + } else if (insertBefore != null) + method.instructions.insertBefore(insertBefore, ni) + else { + method.instructions.insert(insertAfter, ni) + insertAfter = ni + } + } + if (insertBefore != null) + method.instructions.remove(i) + } for ((vi, isStoreNull) <- toNullOut) { - if (!liveVars(vi.`var`)) replaceByPop(vi) // can drop `ASTORE x` where x has only dead stores + if (!liveRefVars(vi.`var`)) method.instructions.set(vi, popFor(vi)) // can drop `ASTORE x` where x has only dead stores else { if (!isStoreNull) { val prev = vi.getPrevious @@ -167,7 +242,18 @@ abstract class CopyProp { } } - toDelete.nonEmpty || storesToDrop.nonEmpty || toNullOut.nonEmpty + if (toInline.nonEmpty) { + import postProcessor._ + val methodCallsites = callGraph.callsites(method) + var css = toInline.flatMap(methodCallsites.get).toList.sorted(inliner.callsiteOrdering) + while (css.nonEmpty) { + val cs = css.head + css = css.tail + inliner.inlineCallsite(cs, None, updateCallGraph = css.isEmpty) + } + } + + (staleStoreRemoved, intrinsicRewritten, callInlined) } } @@ -182,9 +268,11 @@ abstract class CopyProp { * NEW scala/Tuple1; DUP; ALOAD 0; INVOKESPECIAL scala/Tuple1.; POP * The POP has a single producer (the DUP), it's easy to eliminate these two. A special case * is needed to eliminate the INVOKESPECIAL and NEW. + * + * Returns (pushPopChanged, castAdded, nullCheckAdded) */ - def eliminatePushPop(method: MethodNode, owner: InternalName): Boolean = { - AsmAnalyzer.sizeOKForSourceValue(method) && { + def eliminatePushPop(method: MethodNode, owner: InternalName): (Boolean, Boolean, Boolean) = { + if (!AsmAnalyzer.sizeOKForSourceValue(method)) (false, false, false) else { // A queue of instructions producing a value that has to be eliminated. If possible, the // instruction (and its inputs) will be removed, otherwise a POP is inserted after val queue = mutable.Queue.empty[ProducedValue] @@ -195,13 +283,16 @@ abstract class CopyProp { // running the ProdConsAnalyzer only once.) val toRemove = mutable.Set.empty[AbstractInsnNode] // instructions to insert before some instruction - val toInsertBefore = mutable.Map.empty[AbstractInsnNode, List[InsnNode]] + val toInsertBefore = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]] // an instruction to insert after some instruction val toInsertAfter = mutable.Map.empty[AbstractInsnNode, AbstractInsnNode] + var castAdded = false + var nullCheckAdded = false + lazy val prodCons = new ProdConsAnalyzer(method, owner) - /** + /* * Returns the producers for the stack value `inputSlot` consumed by `cons`, if the consumer * instruction is the only consumer for all of these producers. * @@ -209,7 +300,7 @@ abstract class CopyProp { * block, this method returns Set.empty. */ def producersIfSingleConsumer(cons: AbstractInsnNode, inputSlot: Int): Set[AbstractInsnNode] = { - /** + /* * True if the values produced by `prod` are all the same. Most instructions produce a single * value. DUP and DUP2 (with a size-2 input) produce two equivalent values. However, there * are some exotic instructions that produce multiple non-equal values (DUP_X1, SWAP, ...). @@ -247,7 +338,7 @@ abstract class CopyProp { if (singleConsumer) prods else Set.empty } - /** + /* * For a POP instruction that is the single consumer of its producers, remove the POP and * enqueue the producers. */ @@ -260,7 +351,7 @@ abstract class CopyProp { } } - /** + /* * Traverse the method in its initial state and collect all POP instructions and side-effect * free constructor invocations that can be eliminated. */ @@ -281,7 +372,7 @@ abstract class CopyProp { } } - /** + /* * Eliminate the `numArgs` inputs of the instruction `prod` (which was eliminated). For * each input value * - if the `prod` instruction is the single consumer, enqueue the producers of the input @@ -294,7 +385,7 @@ abstract class CopyProp { if (stackOffset >= 0) { val prods = producersIfSingleConsumer(prod, frame.stackTop - stackOffset) val nSize = frame.peekStack(stackOffset).getSize - if (prods.isEmpty) pops append getPop(nSize) + if (prods.isEmpty) pops += getPop(nSize) else queue ++= prods.map(ProducedValue(_, nSize)) handle(stackOffset - 1) } @@ -303,12 +394,11 @@ abstract class CopyProp { if (pops.nonEmpty) toInsertBefore(prod) = pops.toList } - /** - * Eliminate LMF `indy` and its inputs. - */ + /* Eliminate LMF `indy` and its inputs. */ def handleClosureInst(indy: InvokeDynamicInsnNode): Unit = { toRemove += indy callGraph.removeClosureInstantiation(indy, method) + removeIndyLambdaImplMethod(owner, method, indy) handleInputs(indy, Type.getArgumentTypes(indy.desc).length) } @@ -352,8 +442,7 @@ abstract class CopyProp { handleInputs(prod, 1) case GETFIELD | GETSTATIC => - // TODO eliminate side-effect free module loads (https://github.com/scala/scala-dev/issues/16) - if (isBoxedUnit(prod)) toRemove += prod + if (isBoxedUnit(prod) || isModuleLoad(prod, modulesAllowSkipInitialization)) toRemove += prod else popAfterProd() // keep potential class initialization (static field) or NPE (instance field) case INVOKEVIRTUAL | INVOKESPECIAL | INVOKESTATIC | INVOKEINTERFACE => @@ -363,12 +452,31 @@ abstract class CopyProp { callGraph.removeCallsite(methodInsn, method) val receiver = if (methodInsn.getOpcode == INVOKESTATIC) 0 else 1 handleInputs(prod, Type.getArgumentTypes(methodInsn.desc).length + receiver) + } else if (isScalaUnbox(methodInsn)) { + val tp = primitiveAsmTypeToBType(Type.getReturnType(methodInsn.desc)) + val boxTp = bTypes.coreBTypes.boxedClassOfPrimitive(tp) + toInsertBefore(methodInsn) = List(new TypeInsnNode(CHECKCAST, boxTp.internalName), new InsnNode(POP)) + toRemove += prod + callGraph.removeCallsite(methodInsn, method) + castAdded = true + } else if (isJavaUnbox(methodInsn)) { + val nullCheck = mutable.ListBuffer.empty[AbstractInsnNode] + val nonNullLabel = newLabelNode + nullCheck += new JumpInsnNode(IFNONNULL, nonNullLabel) + nullCheck += new InsnNode(ACONST_NULL) + nullCheck += new InsnNode(ATHROW) + nullCheck += nonNullLabel + toInsertBefore(methodInsn) = nullCheck.toList + toRemove += prod + callGraph.removeCallsite(methodInsn, method) + method.maxStack = math.max(BackendUtils.maxStack(method), prodCons.frameAt(methodInsn).getStackSize + 1) + nullCheckAdded = true } else popAfterProd() case INVOKEDYNAMIC => prod match { - case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => handleClosureInst(indy) + case LambdaMetaFactoryCall(indy, _, _, _, _) => handleClosureInst(indy) case _ => popAfterProd() } @@ -376,13 +484,14 @@ abstract class CopyProp { if (isNewForSideEffectFreeConstructor(prod)) toRemove += prod else popAfterProd() - case LDC => prod.asInstanceOf[LdcInsnNode].cst match { + case LDC => + prod.asInstanceOf[LdcInsnNode].cst match { case _: java.lang.Integer | _: java.lang.Float | _: java.lang.Long | _: java.lang.Double | _: String => toRemove += prod case _ => - // don't remove class literals, method types, method handles: keep a potential NoClassDefFoundError - popAfterProd() + if (compilerSettings.optAllowSkipClassLoading) toRemove += prod + else popAfterProd() } case MULTIANEWARRAY => @@ -443,8 +552,7 @@ abstract class CopyProp { toInsertAfter foreach { case (target, insn) => nextExecutableInstructionOrLabel(target) match { - // `insn` is of type `InsnNode`, so we only need to check the Opcode when comparing to another instruction - case Some(next) if next.getOpcode == insn.getOpcode && toRemove(next) => + case Some(next) if insn.getType == AbstractInsnNode.INSN && next.getOpcode == insn.getOpcode && toRemove(next) => // Inserting and removing a POP at the same place should not enable `changed`. This happens // when a POP directly follows a producer that cannot be eliminated, e.g. INVOKESTATIC A.m ()I; POP // The POP is initially added to `toRemove`, and the `INVOKESTATIC` producer is added to the queue. @@ -465,7 +573,7 @@ abstract class CopyProp { changed = true method.instructions.remove(insn) } - changed + (changed, castAdded, nullCheckAdded) } } @@ -491,15 +599,17 @@ abstract class CopyProp { * Analyzer on the method, making it more efficient. * * This method also removes `ACONST_NULL; ASTORE n` if the local n is not live. This pattern is - * introduced by [[eliminateStaleStores]]. + * introduced by [[eliminateStaleStoresAndRewriteSomeIntrinsics]]. * * The implementation is a little tricky to support the following case: * ISTORE 1; ISTORE 2; ILOAD 2; ACONST_NULL; ASTORE 3; ILOAD 1 * The outer store-load pair can be removed if two the inner pairs can be. */ def eliminateStoreLoad(method: MethodNode): Boolean = { + // TODO: use copyProp once we have cached analyses? or is the analysis invalidated anyway because instructions are deleted / changed? + // if we cache them anyway, we can use an analysis if it exists in the cache, and skip otherwise. val removePairs = mutable.Set.empty[RemovePair] - val liveVars = new Array[Boolean](backendUtils.maxLocals(method)) + val liveVars = new Array[Boolean](BackendUtils.maxLocals(method)) val liveLabels = mutable.Set.empty[LabelNode] def mkRemovePair(store: VarInsnNode, other: AbstractInsnNode, depends: List[RemovePairDependency]): RemovePair = { @@ -519,11 +629,11 @@ abstract class CopyProp { val pairStartStack = new mutable.Stack[(AbstractInsnNode, mutable.ListBuffer[RemovePairDependency])] - def push(insn: AbstractInsnNode) = { + def push(insn: AbstractInsnNode): Unit = { pairStartStack push ((insn, mutable.ListBuffer.empty)) } - def addDepends(dependency: RemovePairDependency) = if (pairStartStack.nonEmpty) { + def addDepends(dependency: RemovePairDependency): Unit = if (pairStartStack.nonEmpty) { val (_, depends) = pairStartStack.top depends += dependency } @@ -535,7 +645,7 @@ abstract class CopyProp { } } - /** + /* * Try to pair `insn` with its correspondent on the stack * - if the stack top is a store and `insn` is a corresponding load, create a pair * - otherwise, check the two top stack values for `null; store`. if it matches, create @@ -550,7 +660,7 @@ abstract class CopyProp { @tailrec def tryPairing(): Unit = { if (completesStackTop(insn)) { - val (store: VarInsnNode, depends) = pairStartStack.pop() + val (store: VarInsnNode, depends) = pairStartStack.pop(): @unchecked addDepends(mkRemovePair(store, insn, depends.toList)) } else if (pairStartStack.nonEmpty) { val (top, topDepends) = pairStartStack.pop() @@ -633,11 +743,12 @@ abstract class CopyProp { method.instructions.remove(removePair.other) } - removePairs.nonEmpty + val changed = removePairs.nonEmpty + changed } } -trait RemovePairDependency +sealed trait RemovePairDependency case class RemovePair(store: VarInsnNode, other: AbstractInsnNode, depends: List[RemovePairDependency]) extends RemovePairDependency { override def toString = s"<${AsmUtils textify store},${AsmUtils textify other}> [$depends]" } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/FifoCache.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/FifoCache.scala new file mode 100644 index 000000000000..887d5e63fd60 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/FifoCache.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.backend.jvm.opt + +import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue} +import java.util.{LinkedHashMap, Map => JMap} +import scala.collection.mutable +import scala.jdk.CollectionConverters._ + +object FifoCache { + def apply[K,V](maxSize: Int, threadsafe: Boolean): mutable.Map[K,V] = { + require(maxSize > 0) + if (threadsafe) new ConcFifoCache(maxSize) else new FifoCache[K, V](maxSize).asScala + } + + private class FifoCache[K, V](maxSize: Int) extends LinkedHashMap[K,V] { + override def removeEldestEntry(eldest: JMap.Entry[K, V]): Boolean = { + size() > maxSize + } + } + + private class ConcFifoCache[K, V](maxSize: Int) extends mutable.Map[K,V] { + private val cache: ConcurrentHashMap[K, V] = new ConcurrentHashMap() + private val queue: ConcurrentLinkedQueue[K] = new ConcurrentLinkedQueue() + + def get(key: K): Option[V] = Option(cache.get(key)) + + def subtractOne(key: K): this.type = { + cache.remove(key) + queue.remove(key) + this + } + + def addOne(elem: (K, V)): this.type = { + while (cache.size() >= maxSize) { + val oldest = queue.poll() + if (oldest != null) cache.remove(oldest) + } + queue.add(elem._1) + cache.put(elem._1, elem._2) + this + } + + def iterator: Iterator[(K, V)] = cache.entrySet.iterator.asScala.map(e => (e.getKey, e.getValue)) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala index 492f472af75d..c2a832ba5bf0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc package backend.jvm package opt +import scala.collection.mutable import scala.tools.asm._ import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo} import scala.tools.nsc.backend.jvm.BackendReporting.UnknownScalaInlineInfoVersion @@ -67,19 +68,20 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI } // The method count fits in a short (the methods_count in a classfile is also a short) - result.putShort(inlineInfo.methodInfosSorted.size) + result.putShort(inlineInfo.methodInfos.size) // Sort the methodInfos for stability of classfiles - for (((name, desc), info) <- inlineInfo.methodInfosSorted) { - result.putShort(cw.newUTF8(name)) - result.putShort(cw.newUTF8(desc)) - - var inlineInfo = 0 - if (info.effectivelyFinal) inlineInfo |= 1 - // inlineInfo |= 2 // no longer written - if (info.annotatedInline) inlineInfo |= 4 - if (info.annotatedNoInline) inlineInfo |= 8 - result.putByte(inlineInfo) + inlineInfo.methodInfos.foreachEntry { + case ((name, desc), info) => + result.putShort(cw.newUTF8(name)) + result.putShort(cw.newUTF8(desc)) + + var inlineInfo = 0 + if (info.effectivelyFinal) inlineInfo |= 1 + // inlineInfo |= 2 // no longer written + if (info.annotatedInline) inlineInfo |= 4 + if (info.annotatedNoInline) inlineInfo |= 8 + result.putByte(inlineInfo) } result } @@ -114,7 +116,8 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI } val numEntries = nextShort() - val infos = (0 until numEntries).map(_ => { + val infos = new mutable.TreeMap[(String, String), MethodInlineInfo] + (0 until numEntries).foreach{ _ => val name = nextUTF8() val desc = nextUTF8() @@ -123,8 +126,8 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI // = (inlineInfo & 2) != 0 // no longer used val isInline = (inlineInfo & 4) != 0 val isNoInline = (inlineInfo & 8) != 0 - ((name, desc), MethodInlineInfo(isFinal, isInline, isNoInline)) - }).toMap + infos((name, desc)) = MethodInlineInfo(isFinal, isInline, isNoInline) + } val info = InlineInfo(isFinal, sam, infos, None) InlineInfoAttribute(info) @@ -163,4 +166,4 @@ object InlineInfoAttribute { * In order to instruct the ASM framework to deserialize the ScalaInlineInfo attribute, we need * to pass a prototype instance when running the class reader. */ -object InlineInfoAttributePrototype extends InlineInfoAttribute(InlineInfo(false, null, null, null)) +object InlineInfoAttributePrototype extends InlineInfoAttribute(InlineInfo(isEffectivelyFinal = false, sam = null, methodInfos = null, warning = null)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index ea6dd1487f95..a1cb4d09d826 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,15 +15,18 @@ package backend.jvm package opt import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.tools.asm import scala.tools.asm.Opcodes._ +import scala.tools.asm.Type import scala.tools.asm.tree._ +import scala.tools.asm.tree.analysis.Value import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ -import scala.tools.nsc.backend.jvm.analysis.BackendUtils +import scala.tools.nsc.backend.jvm.analysis._ +import scala.tools.nsc.backend.jvm.analysis.BackendUtils.LambdaMetaFactoryCall import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class Inliner { @@ -37,112 +40,454 @@ abstract class Inliner { import frontendAccess.{backendReporting, compilerSettings} import inlinerHeuristics._ - sealed trait InlineLog { - def request: InlineRequest - } - final case class InlineLogSuccess(request: InlineRequest, sizeBefore: Int, sizeInlined: Int) extends InlineLog { - var downstreamLog: mutable.Buffer[InlineLog] = mutable.ListBuffer.empty + // A callsite that was inlined and the IllegalAccessInstructions warning that was delayed. + // The inliner speculatively inlines a callsite even if the method then has instructions that would + // cause an IllegalAccessError in the target class. If all of those instructions are eliminated + // (by inlining) in a later round, everything is fine. Otherwise the method is reverted. + final case class InlinedCallsite(eliminatedCallsite: Callsite, warning: Option[IllegalAccessInstructions]) { + // If this InlinedCallsite has a warning about a given instruction, return a copy where the warning + // only contains that instruction. + def filterForWarning(insn: AbstractInsnNode): Option[InlinedCallsite] = warning match { + case Some(w) if w.instructions.contains(insn) => Some(this.copy(warning = Some(w.copy(instructions = List(insn))))) + case _ => None + } } - final case class InlineLogFail(request: InlineRequest, warning: CannotInlineWarning) extends InlineLog - final case class InlineLogRollback(request: InlineRequest, warnings: List[CannotInlineWarning]) extends InlineLog - object InlineLog { - private def shouldLog(request: InlineRequest): Boolean = compilerSettings.optLogInline match { - case Some(v) => - def matchesName = { - val prefix = v match { - case "_" => "" - case p => p + // The state accumulated across inlining rounds for a single MethodNode + final class MethodInlinerState { + // Instructions that were copied into a method and would cause an IllegalAccess. They need to + // be inlined in a later round, otherwise the method is rolled back to its original state. + val illegalAccessInstructions = mutable.Set.empty[AbstractInsnNode] + + // A map from invocation instructions that were copied (inlined) into this method to the + // inlined callsite from which they originate. + // Note: entries are not removed from this map, even if an inlined callsite gets inlined in a + // later round. This allows re-constructing the inline chain. + val inlinedCalls = mutable.Map.empty[AbstractInsnNode, InlinedCallsite] + + var undoLog: UndoLog = NoUndoLogging + + var inlineLog = new InlineLog + + override def clone(): MethodInlinerState = { + val r = new MethodInlinerState + r.illegalAccessInstructions ++= illegalAccessInstructions + r.inlinedCalls ++= inlinedCalls + // The clone references the same InlineLog, so no logs are discarded when rolling back + r.inlineLog = inlineLog + // Skip undoLog: clone() is only called when undoLog == NoUndoLogging + r + } + + def outerCallsite(call: AbstractInsnNode): Option[Callsite] = inlinedCalls.get(call).map(_.eliminatedCallsite) + + // The chain of inlined callsites that that lead to some (call) instruction. Don't include + // synthetic forwarders if skipForwarders is true (don't show those in inliner warnings, as they + // don't show up in the source code). + // Also used to detect inlining cycles. + def inlineChain(call: AbstractInsnNode, skipForwarders: Boolean): List[Callsite] = { + @tailrec def impl(insn: AbstractInsnNode, res: List[Callsite]): List[Callsite] = inlinedCalls.get(insn) match { + case Some(inlinedCallsite) => + val cs = inlinedCallsite.eliminatedCallsite + val res1 = if (skipForwarders && backendUtils.isTraitSuperAccessorOrMixinForwarder(cs.callee.get.callee, cs.callee.get.calleeDeclarationClass)) res else cs :: res + impl(cs.callsiteInstruction, res1) + case _ => + res + } + impl(call, Nil) + } + + // In a chain of inlined calls which lead to some (call) instruction, return the root `InlinedCallsite` + // which has a delayed warning . When inlining `call` fails, warn about the root instruction instead of + // the downstream inline request that tried to eliminate an illegalAccess instruction. + // This method skips over forwarders. For example in `trait T { def m = ... }; class A extends T` + // the inline chain is `A.m (mixin forwarder) - T.m$ (static accessor) - T.m`. The method returns + // `T.m` (even if the root callsite is `A.m`. + // If the chain has only forwarders, `returnForwarderIfNoOther` determines whether to return `None` + // or the last inlined forwarder. + def rootInlinedCallsiteWithWarning(call: AbstractInsnNode, returnForwarderIfNoOther: Boolean): Option[InlinedCallsite] = { + def isForwarder(callsite: Callsite) = backendUtils.isTraitSuperAccessorOrMixinForwarder(callsite.callee.get.callee, callsite.callee.get.calleeDeclarationClass) + def result(res: Option[InlinedCallsite]) = res match { + case Some(r) if returnForwarderIfNoOther || !isForwarder(r.eliminatedCallsite) => res + case _ => None + } + @tailrec def impl(insn: AbstractInsnNode, res: Option[InlinedCallsite]): Option[InlinedCallsite] = inlinedCalls.get(insn) match { + case Some(inlinedCallsite) => + val w = inlinedCallsite.filterForWarning(insn) + if (w.isEmpty) result(res) + else { + val cs = inlinedCallsite.eliminatedCallsite + // The returned InlinedCallsite can be a forwarder if that forwarder was the initial callsite in the method + val nextRes = if (isForwarder(cs) && res.nonEmpty && !isForwarder(res.get.eliminatedCallsite)) res else w + impl(cs.callsiteInstruction, nextRes) } - val name: String = request.callsite.callsiteClass.internalName + "." + request.callsite.callsiteMethod.name - name startsWith prefix + + case _ => result(res) + } + impl(call, None) + } + } + + final class InlineLog { + import InlineLog._ + + private var _active = false + + var roots: mutable.ArrayBuffer[InlineLogResult] = null + var downstream: mutable.HashMap[Callsite, mutable.ArrayBuffer[InlineLogResult]] = null + var callsiteInfo: String = null + + // A bit of a hack.. We check the -Yopt-log-inline flag when logging the first inline request. + // Because the InlineLog is part of the MethodInlinerState, subsequent requests will all be + // for the same callsite class / method. At the point where the MethodInlinerState is created + // we don't have access to the enclosing class. + private def active(callsiteClass: ClassBType, callsiteMethod: MethodNode): Boolean = { + if (roots == null) { + compilerSettings.optLogInline match { + case Some("_") => _active = true + case Some(prefix) => _active = s"${callsiteClass.internalName}.${callsiteMethod.name}" startsWith prefix + case _ => _active = false } - upstream != null || (isTopLevel && matchesName) + if (_active) { + roots = mutable.ArrayBuffer.empty[InlineLogResult] + downstream = mutable.HashMap.empty[Callsite, mutable.ArrayBuffer[InlineLogResult]] + callsiteInfo = s"Inlining into ${callsiteClass.internalName}.${callsiteMethod.name}" + } + } + _active + } - case _ => false + private def active(callsite: Callsite): Boolean = active(callsite.callsiteClass, callsite.callsiteMethod) + + private def bufferForOuter(outer: Option[Callsite]) = outer match { + case Some(o) => downstream.getOrElse(o, roots) + case _ => roots } - // indexed by callsite method - private val logs = mutable.Map.empty[MethodNode, mutable.LinkedHashSet[InlineLog]] + def logSuccess(request: InlineRequest, sizeBefore: Int, sizeAfter: Int, outer: Option[Callsite]) = if (active(request.callsite)) { + bufferForOuter(outer) += InlineLogSuccess(request, sizeBefore, sizeAfter) + downstream(request.callsite) = mutable.ArrayBuffer.empty + } + + def logClosureRewrite(closureInit: ClosureInstantiation, invocations: mutable.ArrayBuffer[(MethodInsnNode, Int)], outer: Option[Callsite]) = if (active(closureInit.ownerClass, closureInit.ownerMethod)) { + bufferForOuter(outer) += InlineLogRewrite(closureInit, invocations.map(_._1).toList) + } - private var upstream: InlineLogSuccess = _ - private var isTopLevel = true + def logFail(request: InlineRequest, warning: CannotInlineWarning, outer: Option[Callsite]) = if (active(request.callsite)) { + bufferForOuter(outer) += InlineLogFail(request, warning) + } + + def logRollback(callsite: Callsite, reason: String, outer: Option[Callsite]) = if (active(callsite)) { + bufferForOuter(outer) += InlineLogRollback(reason) + } - def withInlineLogging[T](request: InlineRequest)(inlineRequest: => Unit)(inlinePost: => T): T = { - def doInlinePost(): T = { - val savedIsTopLevel = isTopLevel - isTopLevel = false - try inlinePost - finally isTopLevel = savedIsTopLevel + def nonEmpty = roots != null + + def print(): Unit = if (roots != null) { + def printChildren(indent: Int, callsite: Callsite): Unit = downstream.get(callsite) match { + case Some(logs) => logs.foreach(l => printLog(indent, l)) + case _ => } - if (shouldLog(request)) { - val sizeBefore = request.callsite.callsiteMethod.instructions.size - inlineRequest - val log = InlineLogSuccess(request, sizeBefore, request.callsite.callee.get.callee.instructions.size) - apply(log) - - val savedUpstream = upstream - upstream = log - try doInlinePost() - finally upstream = savedUpstream - } else { - inlineRequest - doInlinePost() + def printLog(indent: Int, log: InlineLogResult): Unit = { + println(log.entryString(indent)) + log match { + case s: InlineLogSuccess => printChildren(indent + 1, s.request.callsite) + case _ => + } + } + roots.size match { + case 0 => + case 1 => + Console.print(callsiteInfo) + Console.print(": ") + printLog(0, roots(0)) + case _ => + println(callsiteInfo) + for (log <- roots) printLog(1, log) } } + } - def apply(log: => InlineLog): Unit = if (shouldLog(log.request)) { - if (upstream != null) upstream.downstreamLog += log - else { - val methodLogs = logs.getOrElseUpdate(log.request.callsite.callsiteMethod, mutable.LinkedHashSet.empty) - methodLogs += log + object InlineLog { + sealed trait InlineLogResult { + def entryString(indent: Int): String = { + def calleeString(r: InlineRequest) = { + val callee = r.callsite.callee.get + callee.calleeDeclarationClass.internalName + "." + callee.callee.name + } + val indentString = " " * indent + this match { + case InlineLogSuccess(r, sizeBefore, sizeAfter) => + s"${indentString}inlined ${calleeString(r)} (${r.logText}). Before: $sizeBefore ins, after: $sizeAfter ins." + + case InlineLogRewrite(closureInit, invocations) => + s"${indentString}rewrote invocations of closure allocated in ${closureInit.ownerClass.internalName}.${closureInit.ownerMethod.name} with body ${closureInit.lambdaMetaFactoryCall.implMethod.getName}: ${invocations.map(AsmUtils.textify).mkString(", ")}" + + case InlineLogFail(r, w) => + s"${indentString}failed ${calleeString(r)} (${r.logText}). ${w.toString.replace('\n', ' ')}" + + case InlineLogRollback(reason) => + s"${indentString}rolled back: $reason." + } } } + final case class InlineLogSuccess(request: InlineRequest, sizeBefore: Int, sizeAfter: Int) extends InlineLogResult + final case class InlineLogRewrite(closureInit: ClosureInstantiation, invocations: List[MethodInsnNode]) extends InlineLogResult + final case class InlineLogFail(request: InlineRequest, warning: CannotInlineWarning) extends InlineLogResult + final case class InlineLogRollback(reason: String) extends InlineLogResult + } - def entryString(log: InlineLog, indent: Int = 0): String = { - val callee = log.request.callsite.callee.get - val calleeString = callee.calleeDeclarationClass.internalName + "." + callee.callee.name - val indentString = " " * indent - log match { - case s @ InlineLogSuccess(_, sizeBefore, sizeInlined) => - val self = s"${indentString}inlined $calleeString. Before: $sizeBefore ins, inlined: $sizeInlined ins." - if (s.downstreamLog.isEmpty) self - else s.downstreamLog.iterator.map(entryString(_, indent + 2)).mkString(self + "\n", "\n", "") + // True if all instructions (they would cause an IllegalAccessError otherwise) can potentially be + // inlined in a later inlining round. + // Note that this method has a side effect. It allows inlining `INVOKESPECIAL` calls of static + // super accessors that we emit in traits. The inlined calls are marked in the call graph as + // `staticallyResolvedInvokespecial`. When looking up the MethodNode for the cloned `INVOKESPECIAL`, + // the call graph will always return the corresponding method in the trait. + def maybeInlinedLater(callsite: Callsite, insns: List[AbstractInsnNode]): Boolean = { + insns.forall({ + case mi: MethodInsnNode => + (mi.getOpcode != INVOKESPECIAL) || { + // Special handling for invokespecial T.f that appears within T, and T defines f. + // Such an instruction can be inlined into a different class, but it needs to be inlined in + // turn in a later inlining round. + // The call graph needs to treat it specially: the normal dynamic lookup needs to be + // avoided, it needs to resolve to T.f, no matter in which class the invocation appears. + def hasMethod(c: ClassNode): Boolean = { + val r = c.methods.iterator.asScala.exists(m => m.name == mi.name && m.desc == mi.desc) + if (r) callGraph.staticallyResolvedInvokespecial += mi + r + } + + mi.name != GenBCode.INSTANCE_CONSTRUCTOR_NAME && + mi.owner == callsite.callee.get.calleeDeclarationClass.internalName && + byteCodeRepository.classNode(mi.owner).map(hasMethod).getOrElse(false) + } + case _ => false + }) + } - case InlineLogFail(_, w) => - s"${indentString}failed $calleeString. ${w.toString.replace('\n', ' ')}" + def runInlinerAndClosureOptimizer(): Unit = { + val runClosureOptimizer = compilerSettings.optClosureInvocations + var round = 0 + var changedByClosureOptimizer = mutable.LinkedHashSet.empty[MethodNode] - case InlineLogRollback(_, _) => - s"${indentString}rolling back, nested inline failed." + val inlinerState = mutable.Map.empty[MethodNode, MethodInlinerState] + + // Don't try again to inline failed callsites + val failedToInline = mutable.Set.empty[MethodInsnNode] + + while (round < 10 && (round == 0 || changedByClosureOptimizer.nonEmpty)) { + val specificMethodsForInlining = if (round == 0) None else Some(changedByClosureOptimizer) + val changedByInliner = runInliner(specificMethodsForInlining, inlinerState, failedToInline) + + if (runClosureOptimizer) { + val specificMethodsForClosureRewriting = if (round == 0) None else Some(changedByInliner) + // TODO: remove cast by moving `MethodInlinerState` and other classes from inliner to a separate PostProcessor component + changedByClosureOptimizer = closureOptimizer.rewriteClosureApplyInvocations(specificMethodsForClosureRewriting, inlinerState.asInstanceOf[mutable.Map[MethodNode, postProcessor.closureOptimizer.postProcessor.inliner.MethodInlinerState]]) } - } - def print(): Unit = if (compilerSettings.optLogInline.isDefined) { - val byClassAndMethod: List[(InternalName, mutable.Map[MethodNode, mutable.LinkedHashSet[InlineLog]])] = { - logs. - groupBy(_._2.head.request.callsite.callsiteClass.internalName). - toList.sortBy(_._1) + var logs = List.empty[(MethodNode, InlineLog)] + for (m <- inlinerState.keySet if !changedByClosureOptimizer(m)) { + val log = inlinerState.remove(m).get.inlineLog + if (log.nonEmpty) logs ::= ((m, log)) } - for { - (c, methodLogs) <- byClassAndMethod - (m, mLogs) <- methodLogs.toList.sortBy(_._1.name) - mLog <- mLogs // insertion order - } { - println(s"Inline into $c.${m.name}: ${entryString(mLog)}") + if (logs.nonEmpty) { + // Deterministic inline log + val sortedLogs = logs.sorted(Ordering.by[(MethodNode, InlineLog), (String, String)](p => (p._1.name, p._1.desc))) + sortedLogs.foreach(_._2.print()) } + + round += 1 } } - def runInliner(): Unit = { - for (request <- collectAndOrderInlineRequests) { - val Right(callee) = request.callsite.callee // collectAndOrderInlineRequests returns callsites with a known callee - val warnings = inline(request) - for (warning <- warnings) { - if (warning.emitWarning(compilerSettings)) - backendReporting.optimizerWarning(request.callsite.callsitePosition, warning.toString, site = backendUtils.optimizerWarningSiteString(request.callsite)) + /** + * @param methods The methods to check for callsites to inline. If not defined, check all methods. + * @return The set of changed methods, in no deterministic order. + */ + def runInliner(methods: Option[mutable.LinkedHashSet[MethodNode]], inlinerState: mutable.Map[MethodNode, MethodInlinerState], failed: mutable.Set[MethodInsnNode]): Iterable[MethodNode] = { + // Inline requests are grouped by method for performance: we only update the call graph (which + // runs analyzers) once all callsites are inlined. + val requests: mutable.Queue[(MethodNode, List[InlineRequest])] = + if (methods.isEmpty) collectAndOrderInlineRequests + else mutable.Queue.empty + + // Methods that were changed (inlined into), they will be checked for more callsites to inline + val changedMethods = { + val r = mutable.Queue.empty[MethodNode] + methods.foreach(r.addAll) + r + } + + var changedMethodHasIllegalAccess = false + + // TODO: remove those that were rolled back to their original form? + val overallChangedMethods = mutable.Set.empty[MethodNode] + + // Show chain of inlines that lead to a failure in inliner warnings + def inlineChainSuffix(callsite: Callsite, chain: List[Callsite]): String = + if (chain.isEmpty) "" else + s""" + |Note that this callsite was itself inlined into ${BackendReporting.methodSignature(callsite.callsiteClass.internalName, callsite.callsiteMethod)} + |by inlining the following methods: + |${chain.map(cs => BackendReporting.methodSignature(cs.callee.get.calleeDeclarationClass.internalName, cs.callee.get.callee)).mkString(" - ", "\n - ", "")}""".stripMargin + + while (requests.nonEmpty || changedMethods.nonEmpty) { + // First inline all requests that were initially collected. Then check methods that changed + // for more callsites to inline. + // Alternatively, we could find more callsites directly after inlining the initial requests + // of a method, before inlining into other methods. But that could cause work duplication. If + // a callee is inlined before the inliner has run on it, the inliner needs to do the work on + // both the callee and the cloned version(s). + // Exception: if, after inlining, `m` has instructions that would cause an IllegalAccessError, + // continue inlining into `m`. These instructions might get inlined as well, otherwise `m` is + // rolled back. This avoid cloning the illegal instructions in case `m` itself gets inlined. + if (requests.nonEmpty && !changedMethodHasIllegalAccess) { + val (method, rs) = requests.dequeue() + val state = inlinerState.getOrElseUpdate(method, new MethodInlinerState) + var changed = false + + def doInline(r: InlineRequest, aliasFrame: AliasingFrame[Value], w: Option[IllegalAccessInstructions]): Map[AbstractInsnNode, AbstractInsnNode] = { + val sizeBefore = method.instructions.size // cheap (a field read) + val instructionMap = inlineCallsite(r.callsite, Some(aliasFrame), updateCallGraph = false) + val inlined = InlinedCallsite(r.callsite, w.map(iw => iw.copy(instructions = iw.instructions.map(instructionMap)))) + instructionMap.valuesIterator foreach { + case mi: MethodInsnNode => state.inlinedCalls(mi) = inlined + case _ => + } + for (warn <- w; ins <- warn.instructions) { + state.illegalAccessInstructions += instructionMap(ins) + } + val callInsn = r.callsite.callsiteInstruction + state.illegalAccessInstructions.subtractOne(callInsn) + if (state.illegalAccessInstructions.isEmpty) + state.undoLog = NoUndoLogging + state.inlineLog.logSuccess(r, sizeBefore, method.instructions.size, state.outerCallsite(r.callsite.callsiteInstruction)) + changed = true + instructionMap + } + + val rsWithAliasFrames = { + val cs = rs.head.callsite + val a = new BasicAliasingAnalyzer(cs.callsiteMethod, cs.callsiteClass.internalName) + rs.map(r => (r, a.frameAt(r.callsite.callsiteInstruction).asInstanceOf[AliasingFrame[Value]])) + } + + var currentMethodRolledBack = false + + for ((r, aliasFrame) <- rsWithAliasFrames) if (!currentMethodRolledBack) { + canInlineCallsite(r.callsite) match { + case None => + doInline(r, aliasFrame, None) + + case Some(w: IllegalAccessInstructions) if maybeInlinedLater(r.callsite, w.instructions) => + if (state.undoLog == NoUndoLogging) { + val undo = new UndoLog() + val currentState = state.clone() + // undo actions for the method and global state + undo.saveMethodState(r.callsite.callsiteClass, method) + undo { + // undo actions for the state of the inliner loop + failed += r.callsite.callsiteInstruction + inlinerState(method) = currentState + // method is not in changedMethods in both places where `rollback` is invoked + changedMethods.enqueue(method) + } + state.undoLog = undo + } + doInline(r, aliasFrame, Some(w)) + + case Some(w) => + val callInsn = r.callsite.callsiteInstruction + + state.inlineLog.logFail(r, w, state.outerCallsite(r.callsite.callsiteInstruction)) + + if (state.illegalAccessInstructions(callInsn)) { + state.inlineLog.logRollback(r.callsite, "The callsite could not be inlined, keeping it would cause an IllegalAccessError", state.outerCallsite(r.callsite.callsiteInstruction)) + state.undoLog.rollback() + currentMethodRolledBack = true + } + + state.rootInlinedCallsiteWithWarning(r.callsite.callsiteInstruction, returnForwarderIfNoOther = false) match { + case Some(inlinedCallsite) => + val rw = inlinedCallsite.warning.get + if (rw.emitWarning(compilerSettings)) { + backendReporting.optimizerWarning( + inlinedCallsite.eliminatedCallsite.callsitePosition, + rw.toString + inlineChainSuffix(r.callsite, state.inlineChain(inlinedCallsite.eliminatedCallsite.callsiteInstruction, skipForwarders = true)), + backendUtils.optimizerWarningSiteString(inlinedCallsite.eliminatedCallsite)) + } + case _ => + if (w.emitWarning(compilerSettings)) + backendReporting.optimizerWarning( + r.callsite.callsitePosition, + w.toString + inlineChainSuffix(r.callsite, state.inlineChain(r.callsite.callsiteInstruction, skipForwarders = true)), + backendUtils.optimizerWarningSiteString(r.callsite)) + } + } + } + + if (changed) { + callGraph.refresh(method, rs.head.callsite.callsiteClass) + if (state.illegalAccessInstructions.nonEmpty) { + changedMethods.prepend(method) + changedMethodHasIllegalAccess = true + } else + changedMethods.enqueue(method) + overallChangedMethods += method + } + + } else { + // look at all callsites in a methods again, also those that were previously not selected for + // inlining. after inlining, types might get more precise and make a callsite inlineable. + val method = changedMethods.dequeue() + val state = inlinerState.getOrElseUpdate(method, new MethodInlinerState) + + def isLoop(call: MethodInsnNode, callee: Callee): Boolean = + callee.callee == method || { + state.inlineChain(call, skipForwarders = false).exists(_.callee.get.callee == callee.callee) + } + + val rs = mutable.ListBuffer.empty[InlineRequest] + callGraph.callsites(method).valuesIterator foreach { + // Don't inline: recursive calls, callsites that failed inlining before + case cs: Callsite if !failed(cs.callsiteInstruction) && cs.callee.isRight && !isLoop(cs.callsiteInstruction, cs.callee.get) => + inlineRequest(cs) match { + case Some(Right(req)) => rs += req + case _ => + } + case _ => + } + val newRequests = selectRequestsForMethodSize(method, rs.toList.sorted(inlineRequestOrdering), mutable.Map.empty) + + state.illegalAccessInstructions.find(insn => newRequests.forall(_.callsite.callsiteInstruction != insn)) match { + case None => + // why prepend: see changedMethodHasIllegalAccess + if (newRequests.nonEmpty) requests.prepend(method -> newRequests) + + case Some(notInlinedIllegalInsn) => + state.undoLog.rollback() + state.rootInlinedCallsiteWithWarning(notInlinedIllegalInsn, returnForwarderIfNoOther = true) match { + case Some(inlinedCallsite) => + val callsite = inlinedCallsite.eliminatedCallsite + val w = inlinedCallsite.warning.get + state.inlineLog.logRollback(callsite, s"Instruction ${AsmUtils.textify(notInlinedIllegalInsn)} would cause an IllegalAccessError, and is not selected for (or failed) inlining", state.outerCallsite(notInlinedIllegalInsn)) + if (w.emitWarning(compilerSettings)) + backendReporting.optimizerWarning( + callsite.callsitePosition, + w.toString + inlineChainSuffix(callsite, state.inlineChain(callsite.callsiteInstruction, skipForwarders = true)), + backendUtils.optimizerWarningSiteString(callsite)) + case _ => + // TODO: replace by dev warning after testing + assert(false, "should not happen") + } + } + + changedMethodHasIllegalAccess = false } } - InlineLog.print() + + overallChangedMethods } /** @@ -150,48 +495,48 @@ abstract class Inliner { * - Always remove the same request when breaking inlining cycles * - Perform inlinings in a consistent order */ - object callsiteOrdering extends Ordering[InlineRequest] { - override def compare(x: InlineRequest, y: InlineRequest): Int = { - val xCs = x.callsite - val yCs = y.callsite - val cls = xCs.callsiteClass.internalName compareTo yCs.callsiteClass.internalName + object callsiteOrdering extends Ordering[Callsite] { + override def compare(x: Callsite, y: Callsite): Int = { + if (x eq y) return 0 + + val cls = x.callsiteClass.internalName compareTo y.callsiteClass.internalName if (cls != 0) return cls - val name = xCs.callsiteMethod.name compareTo yCs.callsiteMethod.name + val name = x.callsiteMethod.name compareTo y.callsiteMethod.name if (name != 0) return name - val desc = xCs.callsiteMethod.desc compareTo yCs.callsiteMethod.desc + val desc = x.callsiteMethod.desc compareTo y.callsiteMethod.desc if (desc != 0) return desc def pos(c: Callsite) = c.callsiteMethod.instructions.indexOf(c.callsiteInstruction) - pos(xCs) - pos(yCs) + pos(x) - pos(y) } } + val inlineRequestOrdering = Ordering.by[InlineRequest, Callsite](_.callsite)(callsiteOrdering) + /** - * Returns the callsites that can be inlined. Ensures that the returned inline request graph does - * not contain cycles. + * Returns the callsites that can be inlined, grouped by method. Ensures that the returned inline + * request graph does not contain cycles. * * The resulting list is sorted such that the leaves of the inline request graph are on the left. * Once these leaves are inlined, the successive elements will be leaves, etc. */ - private def collectAndOrderInlineRequests: List[InlineRequest] = { + private def collectAndOrderInlineRequests: mutable.Queue[(MethodNode, List[InlineRequest])] = { val requestsByMethod = selectCallsitesForInlining withDefaultValue Set.empty val elided = mutable.Set.empty[InlineRequest] def nonElidedRequests(methodNode: MethodNode): Set[InlineRequest] = requestsByMethod(methodNode) diff elided - def allCallees(r: InlineRequest): Set[MethodNode] = r.post.flatMap(allCallees).toSet + r.callsite.callee.get.callee - - /** + /* * Break cycles in the inline request graph by removing callsites. * * The list `requests` is traversed left-to-right, removing those callsites that are part of a * cycle. Elided callsites are also removed from the `inlineRequestsForMethod` map. */ - def breakInlineCycles: List[InlineRequest] = { + def breakInlineCycles: List[(MethodNode, List[InlineRequest])] = { // is there a path of inline requests from start to goal? - def isReachable(start: Set[MethodNode], goal: MethodNode): Boolean = { + def isReachable(start: MethodNode, goal: MethodNode): Boolean = { @tailrec def reachableImpl(check: Set[MethodNode], visited: Set[MethodNode]): Boolean = { if (check.isEmpty) false else { @@ -199,120 +544,108 @@ abstract class Inliner { if (x == goal) true else if (visited(x)) reachableImpl(check - x, visited) else { - val callees = nonElidedRequests(x).flatMap(allCallees) + val callees = nonElidedRequests(x).map(_.callsite.callee.get.callee) reachableImpl(check - x ++ callees, visited + x) } } } - reachableImpl(start, Set.empty) + reachableImpl(Set(start), Set.empty) } - val result = new mutable.ListBuffer[InlineRequest]() val requests = requestsByMethod.valuesIterator.flatten.toArray // sort the inline requests to ensure that removing requests is deterministic - java.util.Arrays.sort(requests, callsiteOrdering) + // Callsites within the same method are next to each other in the sorted array. + java.util.Arrays.sort(requests, inlineRequestOrdering) + + val result = new mutable.ListBuffer[(MethodNode, List[InlineRequest])]() + var currentMethod: MethodNode = null + val currentMethodRequests = mutable.ListBuffer.empty[InlineRequest] for (r <- requests) { // is there a chain of inlining requests that would inline the callsite method into the callee? - if (isReachable(allCallees(r), r.callsite.callsiteMethod)) + if (isReachable(r.callsite.callee.get.callee, r.callsite.callsiteMethod)) elided += r - else - result += r - () + else { + val m = r.callsite.callsiteMethod + if (m == currentMethod) { + currentMethodRequests += r + } else { + if (currentMethod != null) + result += ((currentMethod, currentMethodRequests.toList)) + currentMethod = m + currentMethodRequests.clear() + currentMethodRequests += r + } + } } + if (currentMethod != null) + result += ((currentMethod, currentMethodRequests.toList)) result.toList } // sort the remaining inline requests such that the leaves appear first, then those requests // that become leaves, etc. - def leavesFirst(requests: List[InlineRequest], visited: Set[InlineRequest] = Set.empty): List[InlineRequest] = { - if (requests.isEmpty) Nil - else { - val (leaves, others) = requests.partition(r => { - val inlineRequestsForCallees = allCallees(r).flatMap(nonElidedRequests) - inlineRequestsForCallees.forall(visited) - }) - assert(leaves.nonEmpty, requests) - leaves ::: leavesFirst(others, visited ++ leaves) - } - } + def leavesFirst(requests: List[(MethodNode, List[InlineRequest])]): mutable.Queue[(MethodNode, List[InlineRequest])] = { + val result = mutable.Queue.empty[(MethodNode, List[InlineRequest])] + val visited = mutable.Set.empty[MethodNode] + + @tailrec def impl(toAdd: List[(MethodNode, List[InlineRequest])]): Unit = + if (toAdd.nonEmpty) { + val rest = mutable.ListBuffer.empty[(MethodNode, List[InlineRequest])] + toAdd.foreach { case r @ (_, rs) => + val callees = rs.iterator.map(_.callsite.callee.get.callee) + if (callees.forall(c => visited(c) || nonElidedRequests(c).isEmpty)) { + result += r + visited += r._1 + } else + rest += r + } + impl(rest.toList) + } - leavesFirst(breakInlineCycles) - } + impl(requests) + result + } - /** - * Given an InlineRequest(mainCallsite, post = List(postCallsite)), the postCallsite is a callsite - * in the method `mainCallsite.callee`. Once the mainCallsite is inlined into the target method - * (mainCallsite.callsiteMethod), we need to find the cloned callsite that corresponds to the - * postCallsite so we can inline that into the target method as well. - * - * However, it is possible that there is no cloned callsite at all that corresponds to the - * postCallsite, for example if the corresponding callsite already inlined. Example: - * - * def a() = 1 - * def b() = a() + 2 - * def c() = b() + 3 - * def d() = c() + 4 - * - * We have the following callsite objects in the call graph: - * - * c1 = a() in b - * c2 = b() in c - * c3 = c() in d - * - * Assume we have the following inline request - * r = InlineRequest(c3, - * post = List(InlineRequest(c2, - * post = List(InlineRequest(c1, post = Nil))))) - * - * But before inlining r, assume a separate InlineRequest(c2, post = Nil) is inlined first. We get - * - * c1' = a() in c // added to the call graph - * c1.inlinedClones += (c1' at c2) // remember that c1' was created when inlining c2 - * ~c2~ // c2 is removed from the call graph - * - * If we now inline r, we first inline c3. We get - * - * c1'' = a() in d // added to call graph - * c1'.inlinedClones += (c1'' at c3) // remember that c1'' was created when inlining c3 - * ~c3~ - * - * Now we continue with the post-requests for r, i.e. c2. - * - we try to find the clone of c2 that was created when inlining c3 - but there is none. c2 - * was already inlined before - * - we continue with the post-request of c2: c1 - * - we search for the callsite of c1 that was cloned when inlining c2, we find c1' - * - recursively we search for the callsite of c1' that was cloned when inlining c3, we find c1'' - * - so we create an inline request for c1'' - */ - def adaptPostRequestForMainCallsite(post: InlineRequest, mainCallsite: Callsite): List[InlineRequest] = { - def impl(post: InlineRequest, at: Callsite): List[InlineRequest] = { - post.callsite.inlinedClones.find(_.clonedWhenInlining == at) match { - case Some(clonedCallsite) => - List(InlineRequest(clonedCallsite.callsite, post.post, post.reason)) - case None => - post.post.flatMap(impl(_, post.callsite)).flatMap(impl(_, at)) - } + val sortedRequests = leavesFirst(breakInlineCycles) + val methodSizes = mutable.Map.empty[MethodNode, Int] + val result = mutable.Queue.empty[(MethodNode, List[InlineRequest])] + for ((method, rs) <- sortedRequests) { + val sizeOkRs = selectRequestsForMethodSize(method, rs, methodSizes) + if (sizeOkRs.nonEmpty) + result += ((method, sizeOkRs)) } - impl(post, mainCallsite) + result } class UndoLog(active: Boolean = true) { import java.util.{ArrayList => JArrayList} private var actions = List.empty[() => Unit] - private var methodStateSaved = false def apply(a: => Unit): Unit = if (active) actions = (() => a) :: actions def rollback(): Unit = if (active) actions.foreach(_.apply()) - def saveMethodState(methodNode: MethodNode): Unit = if (active && !methodStateSaved) { - methodStateSaved = true + def saveMethodState(ownerClass: ClassBType, methodNode: MethodNode): Unit = if (active) { val currentInstructions = methodNode.instructions.toArray val currentLocalVariables = new JArrayList(methodNode.localVariables) val currentTryCatchBlocks = new JArrayList(methodNode.tryCatchBlocks) val currentMaxLocals = methodNode.maxLocals val currentMaxStack = methodNode.maxStack + val currentIndyLambdaBodyMethods = indyLambdaBodyMethods(ownerClass.internalName, methodNode) + + // Instead of saving / restoring the CallGraph's callsites / closureInstantiations, we call + // callGraph.refresh on rollback. The call graph might not be up to date at the point where + // we save the method state, because it might be in the middle of inlining some callsites of + // that method. The call graph is only updated at the end (in the inliner loop). + // We don't save / restore the CallGraph's + // - callsitePositions + // - inlineAnnotatedCallsites + // - noInlineAnnotatedCallsites + // - staticallyResolvedInvokespecial + // These contain instructions, and we never remove from them. So when rolling back a method's + // instruction list, the old instructions are still in there. + apply { // `methodNode.instructions.clear()` doesn't work: it keeps the `prev` / `next` / `index` of // instruction nodes. `instructions.removeAll(true)` would work, but is not public. @@ -327,50 +660,19 @@ abstract class Inliner { methodNode.maxLocals = currentMaxLocals methodNode.maxStack = currentMaxStack - } - } - } - val NoUndoLogging = new UndoLog(active = false) + BackendUtils.clearDceDone(methodNode) + callGraph.refresh(methodNode, ownerClass) - /** - * Inline the callsite of an inlining request and its post-inlining requests. - * - * @return An inliner warning for each callsite that could not be inlined. - */ - def inline(request: InlineRequest, undo: UndoLog = NoUndoLogging): List[CannotInlineWarning] = { - def doInline(undo: UndoLog, callRollback: Boolean = false): List[CannotInlineWarning] = { - InlineLog.withInlineLogging(request) { - inlineCallsite(request.callsite, undo) - } { - val postRequests = request.post.flatMap(adaptPostRequestForMainCallsite(_, request.callsite)) - val warnings = postRequests.flatMap(inline(_, undo)) - if (callRollback && warnings.nonEmpty) { - undo.rollback() - InlineLog(InlineLogRollback(request, warnings)) - } - warnings + onIndyLambdaImplMethodIfPresent(ownerClass.internalName)(_.subtractOne(methodNode)) + if (currentIndyLambdaBodyMethods.nonEmpty) + onIndyLambdaImplMethod(ownerClass.internalName)(ms => ms(methodNode) = mutable.Map.empty ++= currentIndyLambdaBodyMethods) } } - - def inlinedByPost(insns: List[AbstractInsnNode]): Boolean = - insns.nonEmpty && insns.forall(ins => request.post.exists(_.callsite.callsiteInstruction == ins)) - - canInlineCallsite(request.callsite) match { - case None => - doInline(undo) - - case Some((_, illegalAccessInsns)) if inlinedByPost(illegalAccessInsns) => - // speculatively inline, roll back if an illegalAccessInsn cannot be eliminated - if (undo == NoUndoLogging) doInline(new UndoLog(), callRollback = true) - else doInline(undo) - - case Some((w, _)) => - InlineLog(InlineLogFail(request, w)) - List(w) - } } + val NoUndoLogging = new UndoLog(active = false) + /** * Copy and adapt the instructions of a method to a callsite. * @@ -381,21 +683,23 @@ abstract class Inliner { * @return A map associating instruction nodes of the callee with the corresponding cloned * instruction in the callsite method. */ - def inlineCallsite(callsite: Callsite, undo: UndoLog = NoUndoLogging): Unit = { + def inlineCallsite(callsite: Callsite, aliasFrame: Option[AliasingFrame[Value]] = None, updateCallGraph: Boolean = true): Map[AbstractInsnNode, AbstractInsnNode] = { import callsite._ - val Right(callsiteCallee) = callsite.callee + val Right(callsiteCallee) = callsite.callee: @unchecked import callsiteCallee.{callee, calleeDeclarationClass, sourceFilePath} + val isStatic = isStaticMethod(callee) + // Inlining requires the callee not to have unreachable code, the analyzer used below should not // return any `null` frames. Note that inlining a method can create unreachable code. Example: // def f = throw e // def g = f; println() // println is unreachable after inlining f // If we have an inline request for a call to g, and f has been already inlined into g, we // need to run DCE on g's body before inlining g. - localOpt.minimalRemoveUnreachableCode(callee) + localOpt.minimalRemoveUnreachableCode(callee, calleeDeclarationClass.internalName) // If the callsite was eliminated by DCE, do nothing. - if (!callGraph.containsCallsite(callsite)) return + if (!callGraph.containsCallsite(callsite)) return Map.empty // New labels for the cloned instructions val labelsMap = cloneLabels(callee) @@ -406,40 +710,118 @@ abstract class Inliner { } case _ => false } - val (clonedInstructions, instructionMap, targetHandles) = cloneInstructions(callee, labelsMap, keepLineNumbers = sameSourceFile) + val (clonedInstructions, instructionMap, writtenLocals) = cloneInstructions(callee, labelsMap, callsitePosition, keepLineNumbers = sameSourceFile) + + val refLocals = mutable.BitSet.empty - // local vars in the callee are shifted by the number of locals at the callsite - val localVarShift = backendUtils.maxLocals(callsiteMethod) + val calleAsmType = asm.Type.getMethodType(callee.desc) + val calleeParamTypes = calleAsmType.getArgumentTypes + + val f = aliasFrame.getOrElse({ + val aliasAnalysis = new BasicAliasingAnalyzer(callsiteMethod, callsiteClass.internalName) + aliasAnalysis.frameAt(callsiteInstruction).asInstanceOf[AliasingFrame[Value]] + }) + + //// find out for which argument values on the stack there is already a local variable //// + + val calleeFirstNonParamSlot = BytecodeUtils.parametersSize(callee) + + // Maps callee-local-variable-index to callsite-local-variable-index. + val calleeParamLocals = new Array[Int](calleeFirstNonParamSlot) + + // Counter for stack slots at the callsite holding the arguments (1 slot also for long / double) + var callsiteStackSlot = f.getLocals + f.getStackSize - calleeParamTypes.length - (if (isStatic) 0 else 1) + // Counter for param slots of the callee (long / double use 2 slots) + var calleeParamSlot = 0 + var nextLocalIndex = BackendUtils.maxLocals(callsiteMethod) + + val numLocals = f.getLocals + + // used later, but computed here + var skipReceiverNullCheck = receiverKnownNotNull || isStatic + + val paramSizes = (if (isStatic) Iterator.empty else Iterator(1)) ++ calleeParamTypes.iterator.map(_.getSize) + for (paramSize <- paramSizes) { + val min = f.aliasesOf(callsiteStackSlot).iterator.min + if (calleeParamSlot == 0 && !isStatic && min == 0) + skipReceiverNullCheck = true // no need to null-check `this` + val isWritten = writtenLocals(calleeParamSlot) || paramSize == 2 && writtenLocals(calleeParamSlot + 1) + if (min < numLocals && !isWritten) { + calleeParamLocals(calleeParamSlot) = min + } else { + calleeParamLocals(calleeParamSlot) = nextLocalIndex + nextLocalIndex += paramSize + } + if (paramSize == 2) + calleeParamLocals(calleeParamSlot + 1) = calleeParamLocals(calleeParamSlot) + 1 + callsiteStackSlot += 1 + calleeParamSlot += paramSize + } + + val numSavedParamSlots = BackendUtils.maxLocals(callsiteMethod) + calleeFirstNonParamSlot - nextLocalIndex + + // local var indices in the callee are adjusted + val localVarShift = BackendUtils.maxLocals(callsiteMethod) - numSavedParamSlots clonedInstructions.iterator.asScala foreach { - case varInstruction: VarInsnNode => varInstruction.`var` += localVarShift - case iinc: IincInsnNode => iinc.`var` += localVarShift - case _ => () + case varInstruction: VarInsnNode => + if (varInstruction.`var` < calleeParamLocals.length) + varInstruction.`var` = calleeParamLocals(varInstruction.`var`) + else { + varInstruction.`var` += localVarShift + if (varInstruction.getOpcode == ASTORE) refLocals += varInstruction.`var` + } + case iinc: IincInsnNode => + iinc.`var` += localVarShift + case _ => } // add a STORE instruction for each expected argument, including for THIS instance if any val argStores = new InsnList - var nextLocalIndex = backendUtils.maxLocals(callsiteMethod) - if (!isStaticMethod(callee)) { - if (!receiverKnownNotNull) { - argStores.add(new InsnNode(DUP)) + val nullOutLocals = new InsnList + val numCallsiteLocals = BackendUtils.maxLocals(callsiteMethod) + calleeParamSlot = 0 + if (!isStatic) { + def addNullCheck(): Unit = { val nonNullLabel = newLabelNode argStores.add(new JumpInsnNode(IFNONNULL, nonNullLabel)) argStores.add(new InsnNode(ACONST_NULL)) argStores.add(new InsnNode(ATHROW)) argStores.add(nonNullLabel) } - argStores.add(new VarInsnNode(ASTORE, nextLocalIndex)) - nextLocalIndex += 1 + val argLocalSlot = calleeParamLocals(calleeParamSlot) + if (argLocalSlot >= numCallsiteLocals) { + if (!skipReceiverNullCheck) { + argStores.add(new InsnNode(DUP)) + addNullCheck() + } + argStores.add(new VarInsnNode(ASTORE, argLocalSlot)) + nullOutLocals.add(new InsnNode(ACONST_NULL)) + nullOutLocals.add(new VarInsnNode(ASTORE, argLocalSlot)) + } else if (skipReceiverNullCheck) { + argStores.add(getPop(1)) + } else { + addNullCheck() + } + calleeParamSlot += 1 } - // We just use an asm.Type here, no need to create the MethodBType. - val calleAsmType = asm.Type.getMethodType(callee.desc) - val calleeParamTypes = calleAsmType.getArgumentTypes - for(argTp <- calleeParamTypes) { - val opc = argTp.getOpcode(ISTORE) // returns the correct xSTORE instruction for argTp - argStores.insert(new VarInsnNode(opc, nextLocalIndex)) // "insert" is "prepend" - the last argument is on the top of the stack - nextLocalIndex += argTp.getSize + val argLocalSlot = calleeParamLocals(calleeParamSlot) + if (argLocalSlot >= numCallsiteLocals) { + val opc = argTp.getOpcode(ISTORE) // returns the correct xSTORE instruction for argTp + argStores.insert(new VarInsnNode(opc, argLocalSlot)) // "insert" is "prepend" - the last argument is on the top of the stack + if (opc == ASTORE) { + nullOutLocals.add(new InsnNode(ACONST_NULL)) + nullOutLocals.add(new VarInsnNode(ASTORE, argLocalSlot)) + } + } else + argStores.insert(getPop(argTp.getSize)) + calleeParamSlot += argTp.getSize + } + + for (i <- refLocals) { + nullOutLocals.add(new InsnNode(ACONST_NULL)) + nullOutLocals.add(new VarInsnNode(ASTORE, i)) } clonedInstructions.insert(argStores) @@ -469,8 +851,10 @@ abstract class Inliner { val returnType = calleAsmType.getReturnType val hasReturnValue = returnType.getSort != asm.Type.VOID - val returnValueIndex = backendUtils.maxLocals(callsiteMethod) + backendUtils.maxLocals(callee) - nextLocalIndex += returnType.getSize + // Use a fresh slot for the return value. We could re-use local variable slot of the inlined + // code, but this makes some cleanups (in LocalOpt) fail / generate less clean code. + val returnValueIndex = BackendUtils.maxLocals(callsiteMethod) + BackendUtils.maxLocals(callee) - numSavedParamSlots + var needNullOutReturnValue: Boolean = false def returnValueStore(returnInstruction: AbstractInsnNode) = { val opc = returnInstruction.getOpcode match { @@ -487,9 +871,9 @@ abstract class Inliner { // of the values on the stack. // We don't need to worry about the method being too large for running an analysis. Callsites of // large methods are not added to the call graph. - val analyzer = new AsmAnalyzer(callee, calleeDeclarationClass.internalName) + val analyzer = new BasicAnalyzer(callee, calleeDeclarationClass.internalName) - for (originalReturn <- callee.instructions.iterator().asScala if isReturn(originalReturn)) { + for (originalReturn <- callee.instructions.iterator.asScala if isReturn(originalReturn)) { val frame = analyzer.frameAt(originalReturn) var stackHeight = frame.getStackSize @@ -512,6 +896,8 @@ abstract class Inliner { if (hasReturnValue) { returnReplacement add returnValueStore(originalReturn) stackHeight -= 1 + if (originalReturn.getOpcode() == ARETURN) + needNullOutReturnValue = true } // drop the rest of the stack @@ -527,85 +913,78 @@ abstract class Inliner { clonedInstructions.remove(inlinedReturn) } - undo.saveMethodState(callsiteMethod) + if (needNullOutReturnValue) { + nullOutLocals.add(new InsnNode(ACONST_NULL)) + nullOutLocals.add(new VarInsnNode(ASTORE, returnValueIndex)) + } + + val hasNullOutInsn = nullOutLocals.size > 0 // save here, the next line sets the size to 0 + clonedInstructions.add(nullOutLocals) callsiteMethod.instructions.insert(callsiteInstruction, clonedInstructions) callsiteMethod.instructions.remove(callsiteInstruction) - callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name, localVarShift).asJava) + val localIndexMap: Int => Int = oldIdx => { + if (oldIdx < 0) oldIdx + else if (oldIdx >= calleeParamLocals.length) oldIdx + localVarShift + else { + val newIdx = calleeParamLocals(oldIdx) + if (newIdx >= numCallsiteLocals) newIdx + else -1 // don't copy a local variable entry for params where an existing local of the callsite is re-used + } + } + callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name, localIndexMap).asJava) // prepend the handlers of the callee. the order of handlers matters: when an exception is thrown // at some instruction, the first handler guarding that instruction and having a matching exception // type is executed. prepending the callee's handlers makes sure to test those handlers first if // an exception is thrown in the inlined code. callsiteMethod.tryCatchBlocks.addAll(0, cloneTryCatchBlockNodes(callee, labelsMap).asJava) - callsiteMethod.maxLocals = backendUtils.maxLocals(callsiteMethod) + returnType.getSize + backendUtils.maxLocals(callee) + callsiteMethod.maxLocals = BackendUtils.maxLocals(callsiteMethod) + BackendUtils.maxLocals(callee) - numSavedParamSlots + returnType.getSize val maxStackOfInlinedCode = { // One slot per value is correct for long / double, see comment in the `analysis` package object. - val numStoredArgs = calleeParamTypes.length + (if (isStaticMethod(callee)) 0 else 1) - backendUtils.maxStack(callee) + callsiteStackHeight - numStoredArgs + val numStoredArgs = calleeParamTypes.length + (if (isStatic) 0 else 1) + BackendUtils.maxStack(callee) + callsiteStackHeight - numStoredArgs } val stackHeightAtNullCheck = { - // When adding a null check for the receiver, a DUP is inserted, which might cause a new maxStack. - // If the callsite has other argument values than the receiver on the stack, these are pop'ed - // and stored into locals before the null check, so in that case the maxStack doesn't grow. - val stackSlotForNullCheck = if (!isStaticMethod(callee) && !receiverKnownNotNull && calleeParamTypes.isEmpty) 1 else 0 + val stackSlotForNullCheck = + if (!skipReceiverNullCheck && calleeParamTypes.isEmpty) { + // When adding a null check for the receiver, a DUP is inserted, which might cause a new maxStack. + // If the callsite has other argument values than the receiver on the stack, these are pop'ed + // and stored into locals before the null check, so in that case the maxStack doesn't grow. + 1 + } + else if (hasNullOutInsn) { + // after the return value is loaded, local variables and the return local variable are + // nulled out, which means `null` is loaded to the stack. the max stack height is the + // callsite stack height +1 (receiver consumed, result produced, null loaded), but +2 + // for static calls + if (isStatic) 2 else 1 + } + else 0 callsiteStackHeight + stackSlotForNullCheck } - callsiteMethod.maxStack = math.max(backendUtils.maxStack(callsiteMethod), math.max(stackHeightAtNullCheck, maxStackOfInlinedCode)) - - val added = addIndyLambdaImplMethod(callsiteClass.internalName, targetHandles) - undo { removeIndyLambdaImplMethod(callsiteClass.internalName, added) } + callsiteMethod.maxStack = math.max(BackendUtils.maxStack(callsiteMethod), math.max(stackHeightAtNullCheck, maxStackOfInlinedCode)) - callGraph.addIfMissing(callee, calleeDeclarationClass) - - def mapArgInfo(argInfo: (Int, ArgInfo)): Option[(Int, ArgInfo)] = argInfo match { - case lit @ (_, FunctionLiteral) => Some(lit) - case (argIndex, ForwardedParam(paramIndex)) => callsite.argInfos.get(paramIndex).map((argIndex, _)) - } - - // Add all invocation instructions and closure instantiations that were inlined to the call graph - callGraph.callsites(callee).valuesIterator foreach { originalCallsite => - val newCallsiteIns = instructionMap(originalCallsite.callsiteInstruction).asInstanceOf[MethodInsnNode] - val argInfos = originalCallsite.argInfos flatMap mapArgInfo - val newCallsite = originalCallsite.copy( - callsiteInstruction = newCallsiteIns, - callsiteMethod = callsiteMethod, - callsiteClass = callsiteClass, - argInfos = argInfos, - callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight - ) - val clonedCallsite = ClonedCallsite(newCallsite, callsite) - originalCallsite.inlinedClones += clonedCallsite - callGraph.addCallsite(newCallsite) - undo { - originalCallsite.inlinedClones -= clonedCallsite - callGraph.removeCallsite(newCallsite.callsiteInstruction, newCallsite.callsiteMethod) + lazy val callsiteLambdaBodyMethods = onIndyLambdaImplMethod(callsiteClass.internalName)(_.getOrElseUpdate(callsiteMethod, mutable.Map.empty)) + onIndyLambdaImplMethodIfPresent(calleeDeclarationClass.internalName)(methods => methods.getOrElse(callee, Nil) foreach { + case (indy, handle) => instructionMap.get(indy) match { + case Some(clonedIndy: InvokeDynamicInsnNode) => + callsiteLambdaBodyMethods(clonedIndy) = handle + case _ => } - } + }) - callGraph.closureInstantiations(callee).valuesIterator foreach { originalClosureInit => - val newIndy = instructionMap(originalClosureInit.lambdaMetaFactoryCall.indy).asInstanceOf[InvokeDynamicInsnNode] - val capturedArgInfos = originalClosureInit.capturedArgInfos flatMap mapArgInfo - val newClosureInit = ClosureInstantiation( - originalClosureInit.lambdaMetaFactoryCall.copy(indy = newIndy), - callsiteMethod, - callsiteClass, - capturedArgInfos) - originalClosureInit.inlinedClones += newClosureInit - callGraph.addClosureInstantiation(newClosureInit) - undo { - callGraph.removeClosureInstantiation(newClosureInit.lambdaMetaFactoryCall.indy, newClosureInit.ownerMethod) - } - } + // Don't remove the inlined instruction from callsitePositions, inlineAnnotatedCallsites so that + // the information is still there in case the method is rolled back (UndoLog). - // Remove the elided invocation from the call graph - callGraph.removeCallsite(callsiteInstruction, callsiteMethod) - undo { callGraph.addCallsite(callsite) } + if (updateCallGraph) callGraph.refresh(callsiteMethod, callsiteClass) // Inlining a method body can render some code unreachable, see example above in this method. BackendUtils.clearDceDone(callsiteMethod) + + instructionMap } /** @@ -620,7 +999,7 @@ abstract class Inliner { */ def earlyCanInlineCheck(callsite: Callsite): Option[CannotInlineWarning] = { import callsite.{callsiteClass, callsiteMethod} - val Right(callsiteCallee) = callsite.callee + val Right(callsiteCallee) = callsite.callee: @unchecked import callsiteCallee.{callee, calleeDeclarationClass} if (isSynchronizedMethod(callee)) { @@ -631,6 +1010,8 @@ abstract class Inliner { Some(StrictfpMismatch( calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated, callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc)) + } else if (callee.instructions.size == 0) { + Some(NoBytecode(calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated)) } else None } @@ -652,9 +1033,9 @@ abstract class Inliner { * - `Some((message, instructions))` if inlining `instructions` into the callsite method would * cause an IllegalAccessError */ - def canInlineCallsite(callsite: Callsite): Option[(CannotInlineWarning, List[AbstractInsnNode])] = { + def canInlineCallsite(callsite: Callsite): Option[CannotInlineWarning] = { import callsite.{callsiteClass, callsiteInstruction, callsiteMethod, callsiteStackHeight} - val Right(callsiteCallee) = callsite.callee + val Right(callsiteCallee) = callsite.callee: @unchecked import callsiteCallee.{callee, calleeDeclarationClass} def calleeDesc = s"${callee.name} of type ${callee.desc} in ${calleeDeclarationClass.internalName}" @@ -663,7 +1044,7 @@ abstract class Inliner { assert(callsiteInstruction.desc == callee.desc, methodMismatch) assert(!isConstructor(callee), s"Constructors cannot be inlined: $calleeDesc") assert(!BytecodeUtils.isAbstractMethod(callee), s"Callee is abstract: $calleeDesc") - assert(callsiteMethod.instructions.contains(callsiteInstruction), s"Callsite ${textify(callsiteInstruction)} is not an instruction of $calleeDesc") + assert(callsiteMethod.instructions.contains(callsiteInstruction), s"Callsite ${textify(callsiteInstruction)} is not an instruction of $callsiteClass.${callsiteMethod.name}${callsiteMethod.desc}") // When an exception is thrown, the stack is cleared before jumping to the handler. When // inlining a method that catches an exception, all values that were on the stack before the @@ -682,31 +1063,31 @@ abstract class Inliner { callsiteStackHeight > expectedArgs } - if (codeSizeOKForInlining(callsiteMethod, callee)) { + if (callsiteTooLargeAfterInlining(callsiteMethod, callee)) { val warning = ResultingMethodTooLarge( calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated, callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc) - Some((warning, Nil)) + Some(warning) } else if (!callee.tryCatchBlocks.isEmpty && stackHasNonParameters) { val warning = MethodWithHandlerCalledOnNonEmptyStack( calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated, callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc) - Some((warning, Nil)) + Some(warning) } else findIllegalAccess(callee.instructions, calleeDeclarationClass, callsiteClass) match { case Right(Nil) => None case Right(illegalAccessInsns) => - val warning = IllegalAccessInstruction( + val warning = IllegalAccessInstructions( calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated, - callsiteClass.internalName, illegalAccessInsns.head) - Some((warning, illegalAccessInsns)) + callsiteClass.internalName, illegalAccessInsns) + Some(warning) case Left((illegalAccessIns, cause)) => val warning = IllegalAccessCheckFailed( calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated, callsiteClass.internalName, illegalAccessIns, cause) - Some((warning, Nil)) + Some(warning) } } @@ -715,7 +1096,8 @@ abstract class Inliner { * (A1) C is public * (A2) C and D are members of the same run-time package */ - def classIsAccessible(accessed: BType, from: ClassBType): Either[OptimizerWarning, Boolean] = (accessed: @unchecked) match { + @tailrec + final def classIsAccessible(accessed: BType, from: ClassBType): Either[OptimizerWarning, Boolean] = (accessed: @unchecked) match { // TODO: A2 requires "same run-time package", which seems to be package + classloader (JVMS 5.3.). is the below ok? case c: ClassBType => c.isPublic.map(_ || c.packageInternalName == from.packageInternalName) case a: ArrayBType => classIsAccessible(a.elementType, from) @@ -723,7 +1105,7 @@ abstract class Inliner { } /** - * Check if a member reference is accessible from the [[destinationClass]], as defined in the + * Check if a member reference is accessible from the `destinationClass`, as defined in the * JVMS 5.4.4. Note that the class name in a field / method reference is not necessarily the * class in which the member is declared: * @@ -803,7 +1185,7 @@ abstract class Inliner { * error occurred */ def findIllegalAccess(instructions: InsnList, calleeDeclarationClass: ClassBType, destinationClass: ClassBType): Either[(AbstractInsnNode, OptimizerWarning), List[AbstractInsnNode]] = { - /** + /* * Check if `instruction` can be transplanted to `destinationClass`. * * If the instruction references a class, method or field that cannot be found in the @@ -832,12 +1214,18 @@ abstract class Inliner { fieldDeclClass = classBTypeFromParsedClassfile(fieldDeclClassNode) res <- memberIsAccessible(fieldNode.access, fieldDeclClass, fieldRefClass, destinationClass) } yield { + // ensure the result ClassBType is cached (for stack map frame calculation) + if (res) bTypeForDescriptorFromClassfile(fi.desc) res } case mi: MethodInsnNode => - if (mi.owner.charAt(0) == '[') Right(true) // array methods are accessible - else { + if (mi.owner.charAt(0) == '[') { + // ensure the result ClassBType is cached (for stack map frame calculation) + if (mi.name == "getClass") bTypeForDescriptorFromClassfile("Ljava/lang/Class;") + // array methods are accessible + Right(true) + } else { def canInlineCall(opcode: Int, methodFlags: Int, methodDeclClass: ClassBType, methodRefClass: ClassBType): Either[OptimizerWarning, Boolean] = { opcode match { case INVOKESPECIAL if mi.name != GenBCode.INSTANCE_CONSTRUCTOR_NAME => @@ -856,16 +1244,20 @@ abstract class Inliner { methodDeclClass = classBTypeFromParsedClassfile(methodDeclClassNode) res <- canInlineCall(mi.getOpcode, methodNode.access, methodDeclClass, methodRefClass) } yield { + // ensure the result ClassBType is cached (for stack map frame calculation) + if (res) bTypeForDescriptorFromClassfile(Type.getReturnType(mi.desc).getDescriptor) res } } case _: InvokeDynamicInsnNode if destinationClass == calleeDeclarationClass => - // within the same class, any indy instruction can be inlined - Right(true) + // Within the same class, any indy instruction can be inlined. Since that class is currently + // being emitted, we don't need to worry about caching BTypes (for stack map frame calculation). + // The necessary BTypes were cached during code gen. + Right(true) // does the InvokeDynamicInsnNode call LambdaMetaFactory? - case LambdaMetaFactoryCall(_, _, implMethod, _) => + case LambdaMetaFactoryCall(indy, _, implMethod, _, _) => // an indy instr points to a "call site specifier" (CSP) [1] // - a reference to a bootstrap method [2] // - bootstrap method name @@ -897,10 +1289,10 @@ abstract class Inliner { // - the receiver is the target of the CallSite // - the other argument values are those that were on the operand stack at the indy instruction (indyLambda: the captured values) // - // [1] http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.4.10 - // [2] http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.23 - // [3] http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokedynamic - // [4] http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-5.html#jvms-5.4.3 + // [1] https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.4.10 + // [2] https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.23 + // [3] https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokedynamic + // [4] https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-5.html#jvms-5.4.3 // We cannot generically check if an `invokedynamic` instruction can be safely inlined into // a different class, that depends on the bootstrap method. The Lookup object passed to the @@ -917,6 +1309,8 @@ abstract class Inliner { methodDeclClass = classBTypeFromParsedClassfile(methodDeclClassNode) res <- memberIsAccessible(methodNode.access, methodDeclClass, methodRefClass, destinationClass) } yield { + // ensure the result ClassBType is cached (for stack map frame calculation) + if (res) bTypeForDescriptorFromClassfile(Type.getReturnType(indy.desc).getDescriptor) res } @@ -924,6 +1318,7 @@ abstract class Inliner { case ci: LdcInsnNode => ci.cst match { case t: asm.Type => classIsAccessible(bTypeForDescriptorOrInternalNameFromClassfile(t.getInternalName), destinationClass) + // TODO: method handle -- check if method accessible? case _ => Right(true) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 576ceaf92a68..fb094ed18331 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,12 +17,14 @@ package opt import java.util.regex.Pattern import scala.annotation.tailrec -import scala.collection.JavaConverters._ -import scala.tools.asm.Opcodes -import scala.tools.asm.tree.{AbstractInsnNode, MethodInsnNode, MethodNode} +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import scala.tools.asm.Type +import scala.tools.asm.tree.MethodNode import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting.{CalleeNotFinal, OptimizerWarning} -import scala.tools.nsc.backend.jvm.opt.InlinerHeuristics.InlineSourceMatcher +import scala.tools.nsc.backend.jvm.analysis.BackendUtils +import scala.tools.nsc.backend.jvm.opt.InlinerHeuristics._ abstract class InlinerHeuristics extends PerRunInit { val postProcessor: PostProcessor @@ -34,16 +36,54 @@ abstract class InlinerHeuristics extends PerRunInit { lazy val inlineSourceMatcher: LazyVar[InlineSourceMatcher] = perRunLazy(this)(new InlineSourceMatcher(compilerSettings.optInlineFrom)) - final case class InlineRequest(callsite: Callsite, post: List[InlineRequest], reason: String) { - // invariant: all post inline requests denote callsites in the callee of the main callsite - for (pr <- post) assert(pr.callsite.callsiteMethod == callsite.callee.get.callee, s"Callsite method mismatch: main $callsite - post ${pr.callsite}") + final case class InlineRequest(callsite: Callsite, reason: InlineReason) { + // non-null if `-Yopt-log-inline` is active, it explains why the callsite was selected for inlining + def logText: String = + if (compilerSettings.optLogInline.isEmpty) null + else if (compilerSettings.optInlineHeuristics == "everything") "-Yopt-inline-heuristics:everything is enabled" + else { + val callee = callsite.callee.get + reason match { + case AnnotatedInline => + val what = if (callee.annotatedInline) "callee" else "callsite" + s"the $what is annotated `@inline`" + case HigherOrderWithLiteral | HigherOrderWithForwardedParam => + val paramNames = Option(callee.callee.parameters).map(_.asScala.map(_.name).toVector) + def param(i: Int) = { + def syn = s"" + paramNames.fold(syn)(v => v.applyOrElse(i, (_: Int) => syn)) + } + def samInfo(i: Int, sam: String, arg: String) = s"the argument for parameter (${param(i)}: $sam) is a $arg" + val argInfos = for ((i, sam) <- callee.samParamTypes; info <- callsite.argInfos.get(i).iterator) yield { + val argKind = info match { + case FunctionLiteral => "function literal" + case ForwardedParam(_) => "parameter of the callsite method" + case StaticallyKnownArray => "" // should not happen, just included to avoid potential crash + } + samInfo(i, sam.internalName.split('/').last, argKind) + } + s"the callee is a higher-order method, ${argInfos.mkString(", ")}" + case SyntheticForwarder => + "the callee is a synthetic forwarder method" + case TrivialMethod => + "the callee is a small trivial method" + case FactoryMethod => + "the callee is a factory method" + case BoxingForwarder => + "the callee is a forwarder method with boxing adaptation" + case GenericForwarder => + "the callee is a forwarder or alias method" + case RefParam => + "the callee has a Ref type parameter" + case KnownArrayOp => + "ScalaRuntime.array_apply and array_update are inlined if the array has a statically known type" + } + } } - def canInlineFromSource(sourceFilePath: Option[String], calleeDeclarationClass: InternalName) = { - compilerSettings.optLClasspath || - compilerSettings.optLProject && sourceFilePath.isDefined || - inlineSourceMatcher.get.allowFromSources && sourceFilePath.isDefined || - inlineSourceMatcher.get.allow(calleeDeclarationClass) + def canInlineFromSource(sourceFilePath: Option[String], calleeDeclarationClass: InternalName): Boolean = { + inlineSourceMatcher.get.allowFromSources && sourceFilePath.isDefined || + inlineSourceMatcher.get.allow(calleeDeclarationClass) } /** @@ -64,7 +104,7 @@ abstract class InlinerHeuristics extends PerRunInit { var requests = Set.empty[InlineRequest] callGraph.callsites(methodNode).valuesIterator foreach { case callsite @ Callsite(_, _, _, Right(Callee(callee, _, _, _, _, _, _, callsiteWarning)), _, _, _, pos, _, _) => - inlineRequest(callsite, requests) match { + inlineRequest(callsite) match { case Some(Right(req)) => requests += req case Some(Left(w)) => @@ -85,64 +125,54 @@ abstract class InlinerHeuristics extends PerRunInit { }).filterNot(_._2.isEmpty).toMap } - private def findSingleCall(method: MethodNode, such: MethodInsnNode => Boolean): Option[MethodInsnNode] = { - @tailrec def noMoreInvoke(insn: AbstractInsnNode): Boolean = { - insn == null || (!insn.isInstanceOf[MethodInsnNode] && noMoreInvoke(insn.getNext)) - } - @tailrec def find(insn: AbstractInsnNode): Option[MethodInsnNode] = { - if (insn == null) None - else insn match { - case mi: MethodInsnNode => - if (such(mi) && noMoreInvoke(insn.getNext)) Some(mi) - else None - case _ => - find(insn.getNext) + val maxSize = 3000 + val mediumSize = 2000 + val smallSize = 1000 + + def selectRequestsForMethodSize(method: MethodNode, requests: List[InlineRequest], methodSizes: mutable.Map[MethodNode, Int]): List[InlineRequest] = { + val byReason = requests.groupBy(_.reason) + var size = method.instructions.size + val res = mutable.ListBuffer.empty[InlineRequest] + def include(kind: InlineReason, limit: Int): Unit = { + var rs = byReason.getOrElse(kind, Nil) + while (rs.nonEmpty && size < limit) { + val r = rs.head + rs = rs.tail + val callee = r.callsite.callee.get.callee + val cSize = methodSizes.getOrElse(callee, callee.instructions.size) + if (size + cSize < limit) { + res += r + size += cSize + } } } - find(method.instructions.getFirst) - } - - private def traitStaticSuperAccessorName(s: String) = s + "$" - - private def traitMethodInvocation(method: MethodNode): Option[MethodInsnNode] = - findSingleCall(method, mi => mi.itf && mi.getOpcode == Opcodes.INVOKESPECIAL && traitStaticSuperAccessorName(mi.name) == method.name) - - private def superAccessorInvocation(method: MethodNode): Option[MethodInsnNode] = - findSingleCall(method, mi => mi.itf && mi.getOpcode == Opcodes.INVOKESTATIC && mi.name == traitStaticSuperAccessorName(method.name)) - - private def isTraitSuperAccessor(method: MethodNode, owner: ClassBType): Boolean = { - owner.isInterface == Right(true) && - BytecodeUtils.isStaticMethod(method) && - traitMethodInvocation(method).nonEmpty - } - - private def isMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = { - owner.isInterface == Right(false) && - !BytecodeUtils.isStaticMethod(method) && - superAccessorInvocation(method).nonEmpty - } - - private def isTraitSuperAccessorOrMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = { - isTraitSuperAccessor(method, owner) || isMixinForwarder(method, owner) + include(AnnotatedInline, maxSize) + include(SyntheticForwarder, maxSize) + include(KnownArrayOp, maxSize) + include(HigherOrderWithLiteral, maxSize) + include(HigherOrderWithForwardedParam, mediumSize) + include(RefParam, mediumSize) + include(BoxingForwarder, mediumSize) + include(FactoryMethod, mediumSize) + include(GenericForwarder, smallSize) + include(TrivialMethod, smallSize) + methodSizes(method) = size + res.toList } - /** * Returns the inline request for a callsite if the callsite should be inlined according to the * current heuristics (`-Yopt-inline-heuristics`). * - * The resulting inline request may contain post-inlining requests of callsites that in turn are - * also selected as individual inlining requests. - * * @return `None` if this callsite should not be inlined according to the active heuristic - * `Some(Left)` if the callsite cannot be inlined (for example because that would cause - * an IllegalAccessError) but should be according to the heuristic - * TODO: what if a downstream inline request would cause an IAE and we don't create an - * InlineRequest for the original callsite? new subclass of OptimizerWarning. - * `Some(Right)` if the callsite should be and can be inlined + * `Some(Left)` if the callsite should be inlined according to the heuristic, but cannot + * be inlined according to an early, incomplete check (see earlyCanInlineCheck) + * `Some(Right)` if the callsite should be inlined (it's still possible that the callsite + * cannot be inlined in the end, for example if it contains instructions that would + * cause an IllegalAccessError in the new class; this is checked in the inliner) */ - def inlineRequest(callsite: Callsite, selectedRequestsForCallee: Set[InlineRequest]): Option[Either[OptimizerWarning, InlineRequest]] = { - def requestIfCanInline(callsite: Callsite, reason: String): Option[Either[OptimizerWarning, InlineRequest]] = { + def inlineRequest(callsite: Callsite): Option[Either[OptimizerWarning, InlineRequest]] = { + def requestIfCanInline(callsite: Callsite, reason: InlineReason): Option[Either[OptimizerWarning, InlineRequest]] = { val callee = callsite.callee.get if (!callee.safeToInline) { if (callsite.isInlineAnnotated && callee.canInlineFromSource) { @@ -158,78 +188,103 @@ abstract class InlinerHeuristics extends PerRunInit { callsite.isInlineAnnotated))) } else None } else inliner.earlyCanInlineCheck(callsite) match { - case Some(w) => Some(Left(w)) + case Some(w) => + Some(Left(w)) case None => - val postInlineRequest: List[InlineRequest] = { - val postCall = - if (isTraitSuperAccessor(callee.callee, callee.calleeDeclarationClass)) { - // scala-dev#259: when inlining a trait super accessor, also inline the callsite to the default method - val implName = callee.callee.name.dropRight(1) - findSingleCall(callee.callee, mi => mi.itf && mi.getOpcode == Opcodes.INVOKESPECIAL && mi.name == implName) - } else { - // scala-dev#259: when inlining a mixin forwarder, also inline the callsite to the static super accessor - superAccessorInvocation(callee.callee) - } - postCall.flatMap(call => { - callGraph.addIfMissing(callee.callee, callee.calleeDeclarationClass) - val maybeCallsite = callGraph.findCallSite(callee.callee, call) - maybeCallsite.flatMap(requestIfCanInline(_, reason).flatMap(_.right.toOption)) - }).toList - } - Some(Right(InlineRequest(callsite, postInlineRequest, reason))) + Some(Right(InlineRequest(callsite, reason))) } } - // scala-dev#259: don't inline into static accessors and mixin forwarders - if (isTraitSuperAccessorOrMixinForwarder(callsite.callsiteMethod, callsite.callsiteClass)) None + // don't inline into synthetic forwarders (anonfun-adapted methods, bridges, etc). the heuristics + // will instead inline such methods at callsite. however, *do* inline into user-written forwarders + // or aliases, because otherwise it's too confusing for users looking at generated code, they will + // write a small test method and think the inliner doesn't work correctly. + val isGeneratedForwarder = + BytecodeUtils.isSyntheticMethod(callsite.callsiteMethod) && backendUtils.looksLikeForwarderOrFactoryOrTrivial(callsite.callsiteMethod, callsite.callsiteClass.internalName, allowPrivateCalls = true) > 0 || + backendUtils.isMixinForwarder(callsite.callsiteMethod, callsite.callsiteClass) // seems mixin forwarders are not synthetic... + + if (isGeneratedForwarder) None else { val callee = callsite.callee.get compilerSettings.optInlineHeuristics match { case "everything" => - val reason = if (compilerSettings.optLogInline.isDefined) "the inline strategy is \"everything\"" else null - requestIfCanInline(callsite, reason) + requestIfCanInline(callsite, AnnotatedInline) case "at-inline-annotated" => - def reason = if (!compilerSettings.optLogInline.isDefined) null else { - val what = if (callee.annotatedInline) "callee" else "callsite" - s"the $what is annotated `@inline`" - } - if (callsite.isInlineAnnotated && !callsite.isNoInlineAnnotated) requestIfCanInline(callsite, reason) + if (callsite.isInlineAnnotated && !callsite.isNoInlineAnnotated) requestIfCanInline(callsite, AnnotatedInline) else None case "default" => - def reason = if (!compilerSettings.optLogInline.isDefined) null else { - if (callsite.isInlineAnnotated) { - val what = if (callee.annotatedInline) "callee" else "callsite" - s"the $what is annotated `@inline`" - } else { - val paramNames = Option(callee.callee.parameters).map(_.asScala.map(_.name).toVector) - def param(i: Int) = { - def syn = s"" - paramNames.fold(syn)(v => v.applyOrElse(i, (_: Int) => syn)) + def shouldInlineAnnotated = if (callsite.isInlineAnnotated) Some(AnnotatedInline) else None + + def shouldInlineHO = Option { + if (callee.samParamTypes.isEmpty) null + else { + val samArgs = callee.samParamTypes flatMap { + case (index, _) => Option.option2Iterable(callsite.argInfos.get(index)) + } + if (samArgs.isEmpty) null + else if (samArgs.exists(_ == FunctionLiteral)) HigherOrderWithLiteral + else HigherOrderWithForwardedParam + } + } + + def shouldInlineRefParam = + if (Type.getArgumentTypes(callee.callee.desc).exists(tp => coreBTypes.srRefCreateMethods.contains(tp.getInternalName))) Some(RefParam) + else None + + def shouldInlineArrayOp = + if (BackendUtils.isRuntimeArrayLoadOrUpdate(callsite.callsiteInstruction) && callsite.argInfos.get(1).contains(StaticallyKnownArray)) Some(KnownArrayOp) + else None + + def shouldInlineForwarder = Option { + // trait super accessors are excluded here because they contain an `invokespecial` of the default method in the trait. + // this instruction would have different semantics if inlined into some other class. + // we *do* inline trait super accessors if selected by a different heuristic. in this case, the `invokespecial` is then + // inlined in turn (chosen by the same heuristic), or the code is rolled back. but we don't inline them just because + // they are forwarders. + val isTraitSuperAccessor = backendUtils.isTraitSuperAccessor(callee.callee, callee.calleeDeclarationClass) + if (isTraitSuperAccessor) { + // inline static trait super accessors if the corresponding trait method is a forwarder or trivial (scala-dev#618) + { + val css = callGraph.callsites(callee.callee) + if (css.sizeIs == 1) css.head._2 else null + } match { + case null => null + case traitMethodCallsite => + val tmCallee = traitMethodCallsite.callee.get + val traitMethodForwarderKind = backendUtils.looksLikeForwarderOrFactoryOrTrivial( + tmCallee.callee, tmCallee.calleeDeclarationClass.internalName, allowPrivateCalls = false) + if (traitMethodForwarderKind > 0) GenericForwarder + else null } - def samInfo(i: Int, sam: String, arg: String) = s"the argument for parameter (${param(i)}: $sam) is a $arg" - val argInfos = for ((i, sam) <- callee.samParamTypes; info <- callsite.argInfos.get(i)) yield { - val argKind = info match { - case FunctionLiteral => "function literal" - case ForwardedParam(_) => "parameter of the callsite method" - } - samInfo(i, sam.internalName.split('/').last, argKind) + } + else { + val forwarderKind = backendUtils.looksLikeForwarderOrFactoryOrTrivial(callee.callee, callee.calleeDeclarationClass.internalName, allowPrivateCalls = false) + if (forwarderKind < 0) + null + else if (BytecodeUtils.isSyntheticMethod(callee.callee) || backendUtils.isMixinForwarder(callee.callee, callee.calleeDeclarationClass)) + SyntheticForwarder + else forwarderKind match { + case 1 => TrivialMethod + case 2 => FactoryMethod + case 3 => BoxingForwarder + case 4 => GenericForwarder } - s"the callee is a higher-order method, ${argInfos.mkString(", ")}" } } - def shouldInlineHO = callee.samParamTypes.nonEmpty && (callee.samParamTypes exists { - case (index, _) => callsite.argInfos.contains(index) - }) - if (!callsite.isNoInlineAnnotated && (callsite.isInlineAnnotated || shouldInlineHO)) requestIfCanInline(callsite, reason) - else None + + if (callsite.isNoInlineAnnotated) None + else { + val reason = shouldInlineAnnotated orElse shouldInlineHO orElse shouldInlineRefParam orElse shouldInlineArrayOp orElse shouldInlineForwarder + reason.flatMap(r => requestIfCanInline(callsite, r)) + } } } } /* - // using http://lihaoyi.github.io/Ammonite/ + // using https://lihaoyi.github.io/Ammonite/ load.ivy("com.google.guava" % "guava" % "18.0") val javaUtilFunctionClasses = { @@ -362,6 +417,18 @@ abstract class InlinerHeuristics extends PerRunInit { } object InlinerHeuristics { + sealed trait InlineReason + case object AnnotatedInline extends InlineReason + case object SyntheticForwarder extends InlineReason + case object TrivialMethod extends InlineReason + case object FactoryMethod extends InlineReason + case object BoxingForwarder extends InlineReason + case object GenericForwarder extends InlineReason + case object RefParam extends InlineReason + case object KnownArrayOp extends InlineReason + case object HigherOrderWithLiteral extends InlineReason + case object HigherOrderWithForwardedParam extends InlineReason + class InlineSourceMatcher(inlineFromSetting: List[String]) { // `terminal` is true if all remaining entries are of the same negation as this one case class Entry(pattern: Pattern, negated: Boolean, terminal: Boolean) { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 9a143117791c..1a03b1a83bd9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,8 @@ package backend.jvm package opt import scala.annotation.{switch, tailrec} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.tools.asm.Opcodes._ import scala.tools.asm.Type import scala.tools.asm.tree._ @@ -71,25 +71,32 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ * + enables downstream: * - stale stores (a stored value may not be loaded anymore) * - store-load pairs (a load n may now be right after a store n) - * + NOTE: copy propagation is only executed once, in the first fixpoint loop iteration. none of - * the other optimizations enables further copy prop. we still run it as part of the loop - * because it requires unreachable code to be eliminated. * - * stale stores (replace STORE by POP) + * stale stores (replace STORE by POP), rewrites `ClassTag(x).newArray`, inlines `array_apply/update` + * + enables UPSTREAM: + * - nullness optimizations (newArray rewrite or inlining may turn things non-null) * + enables downstream: * - push-pop (the new pop may be the single consumer for an instruction) + * - redundant casts (because rewrites `newArray`, the array type gets more precise) * - * redundant casts: eliminates casts that are statically known to succeed (uses type propagation) + * redundant casts and rewrite some intrinsics: eliminates casts that are statically known to + * succeed (uses type propagation), rewrites instanceof checks, rewrites intrinsics. * + enables UPSTREAM: * - box-unbox elimination (a removed checkcast may be a box consumer) + * - copy propagation (a removed checkcast may turn an upcasted local variable into an alias) * + enables downstream: - * - push-pop for closure allocation elimination (every indyLambda is followed by a checkcast, see scala/bug#9540) + * - push-pop for closure allocation elimination (every indyLambda is followed by a checkcast, + * see scala/bug#9540) + * - redundant casts (changing an instanceof to true/false removes branches and can make types + * of other values more precise) * * push-pop (when a POP is the only consumer of a value, remove the POP and its producer) * + enables UPSTREAM: * - stale stores (if a LOAD is removed, a corresponding STORE may become stale) * - box-unbox elimination (push-pop may eliminate a closure allocation, rendering a captured * box non-escaping) + * - redundant casts (Int.unbox(x) is replaced by `x.asInstanceOf[Integer]; pop`) + * - nullness (`x.intValue` is replaced by `if (x == null) throw null`) * + enables downstream: * - store-load pairs (a variable may become non-live) * - stale handlers (push-pop removes code) @@ -169,7 +176,7 @@ abstract class LocalOpt { * * @return A set containing the eliminated instructions */ - def minimalRemoveUnreachableCode(method: MethodNode): Boolean = { + def minimalRemoveUnreachableCode(method: MethodNode, ownerClassName: InternalName): Boolean = { // In principle, for the inliner, a single removeUnreachableCodeImpl would be enough. But that // would potentially leave behind stale handlers (empty try block) which is not legal in the // classfile. So we run both removeUnreachableCodeImpl and removeEmptyExceptionHandlers. @@ -180,7 +187,7 @@ abstract class LocalOpt { // handlers, see scaladoc of def methodOptimizations. Removing an live handler may render more // code unreachable and therefore requires running another round. def removalRound(): Boolean = { - val insnsRemoved = removeUnreachableCodeImpl(method) + val insnsRemoved = removeUnreachableCodeImpl(method, ownerClassName) if (insnsRemoved) { val removeHandlersResult = removeEmptyExceptionHandlers(method) if (removeHandlersResult.liveHandlerRemoved) removalRound() @@ -260,7 +267,7 @@ abstract class LocalOpt { currentTrace = after } - /** + /* * Runs the optimizations that depend on each other in a loop until reaching a fixpoint. See * comment in class [[LocalOpt]]. * @@ -270,10 +277,11 @@ abstract class LocalOpt { requestNullness: Boolean, requestDCE: Boolean, requestBoxUnbox: Boolean, + requestCopyProp: Boolean, requestStaleStores: Boolean, + requestRedundantCasts: Boolean, requestPushPop: Boolean, requestStoreLoad: Boolean, - firstIteration: Boolean, maxRecursion: Int = 10): (Boolean, Boolean) = { if (maxRecursion == 0) return (false, false) @@ -290,7 +298,7 @@ abstract class LocalOpt { val runDCE = (compilerSettings.optUnreachableCode && (requestDCE || nullnessOptChanged)) || compilerSettings.optBoxUnbox || compilerSettings.optCopyPropagation - val codeRemoved = if (runDCE) removeUnreachableCodeImpl(method) else false + val codeRemoved = if (runDCE) removeUnreachableCodeImpl(method, ownerClassName) else false traceIfChanged("dce") // BOX-UNBOX @@ -299,23 +307,23 @@ abstract class LocalOpt { traceIfChanged("boxUnbox") // COPY PROPAGATION - val runCopyProp = compilerSettings.optCopyPropagation && (firstIteration || boxUnboxChanged) + val runCopyProp = compilerSettings.optCopyPropagation && (requestCopyProp || boxUnboxChanged) val copyPropChanged = runCopyProp && copyPropagation(method, ownerClassName) traceIfChanged("copyProp") // STALE STORES val runStaleStores = compilerSettings.optCopyPropagation && (requestStaleStores || nullnessOptChanged || codeRemoved || boxUnboxChanged || copyPropChanged) - val storesRemoved = runStaleStores && eliminateStaleStores(method, ownerClassName) + val (storesRemoved, intrinsicRewrittenByStaleStores, callInlinedByStaleStores) = if (!runStaleStores) (false, false, false) else eliminateStaleStoresAndRewriteSomeIntrinsics(method, ownerClassName) traceIfChanged("staleStores") // REDUNDANT CASTS - val runRedundantCasts = compilerSettings.optRedundantCasts && (firstIteration || boxUnboxChanged) - val castRemoved = runRedundantCasts && eliminateRedundantCasts(method, ownerClassName) + val runRedundantCasts = compilerSettings.optRedundantCasts && (requestRedundantCasts || boxUnboxChanged || intrinsicRewrittenByStaleStores || callInlinedByStaleStores) + val (typeInsnChanged, intrinsicRewrittenByCasts) = if (!runRedundantCasts) (false, false) else eliminateRedundantCastsAndRewriteSomeIntrinsics(method, ownerClassName) traceIfChanged("redundantCasts") // PUSH-POP - val runPushPop = compilerSettings.optCopyPropagation && (requestPushPop || firstIteration || storesRemoved || castRemoved) - val pushPopRemoved = runPushPop && eliminatePushPop(method, ownerClassName) + val runPushPop = compilerSettings.optCopyPropagation && (requestPushPop || storesRemoved || typeInsnChanged) + val (pushPopRemoved, pushPopCastAdded, pushPopNullCheckAdded) = if (!runPushPop) (false, false, false) else eliminatePushPop(method, ownerClassName) traceIfChanged("pushPop") // STORE-LOAD PAIRS @@ -334,22 +342,25 @@ abstract class LocalOpt { traceIfChanged("simplifyJumps") // See doc comment in the beginning of this file (optimizations marked UPSTREAM) - val runNullnessAgain = boxUnboxChanged + val runNullnessAgain = boxUnboxChanged || callInlinedByStaleStores || pushPopNullCheckAdded val runDCEAgain = removeHandlersResult.liveHandlerRemoved || jumpsChanged - val runBoxUnboxAgain = boxUnboxChanged || castRemoved || pushPopRemoved || removeHandlersResult.liveHandlerRemoved + val runBoxUnboxAgain = boxUnboxChanged || typeInsnChanged || pushPopRemoved || removeHandlersResult.liveHandlerRemoved + val runCopyPropAgain = typeInsnChanged val runStaleStoresAgain = pushPopRemoved + val runRedundantCastsAgain = typeInsnChanged || pushPopCastAdded val runPushPopAgain = jumpsChanged val runStoreLoadAgain = jumpsChanged - val runAgain = runNullnessAgain || runDCEAgain || runBoxUnboxAgain || pushPopRemoved || runStaleStoresAgain || runPushPopAgain || runStoreLoadAgain + val runAgain = runNullnessAgain || runDCEAgain || runBoxUnboxAgain || runCopyPropAgain || runStaleStoresAgain || runRedundantCastsAgain || runPushPopAgain || runStoreLoadAgain val downstreamRequireEliminateUnusedLocals = runAgain && removalRound( requestNullness = runNullnessAgain, requestDCE = runDCEAgain, requestBoxUnbox = runBoxUnboxAgain, + requestCopyProp = runCopyPropAgain, requestStaleStores = runStaleStoresAgain, + requestRedundantCasts = runRedundantCastsAgain, requestPushPop = runPushPopAgain, requestStoreLoad = runStoreLoadAgain, - firstIteration = false, maxRecursion = maxRecursion - 1)._2 val requireEliminateUnusedLocals = downstreamRequireEliminateUnusedLocals || @@ -360,20 +371,21 @@ abstract class LocalOpt { storeLoadRemoved || removeHandlersResult.handlerRemoved - val codeChanged = nullnessOptChanged || codeRemoved || boxUnboxChanged || castRemoved || copyPropChanged || storesRemoved || pushPopRemoved || storeLoadRemoved || removeHandlersResult.handlerRemoved || jumpsChanged + val codeChanged = nullnessOptChanged || codeRemoved || boxUnboxChanged || copyPropChanged || storesRemoved || intrinsicRewrittenByStaleStores || callInlinedByStaleStores || typeInsnChanged || intrinsicRewrittenByCasts || pushPopRemoved || storeLoadRemoved || removeHandlersResult.handlerRemoved || jumpsChanged (codeChanged, requireEliminateUnusedLocals) } - // we run DCE even if the method is already in the `unreachableCodeEliminated` map: the DCE - // here is more thorough than `minimalRemoveUnreachableCode` that run before inlining. + // we run DCE even if `isDceDone(method)`: the DCE here is more thorough than + // `minimalRemoveUnreachableCode` that run before inlining. val (nullnessDceBoxesCastsCopypropPushpopOrJumpsChanged, requireEliminateUnusedLocals) = removalRound( requestNullness = true, requestDCE = true, requestBoxUnbox = true, + requestCopyProp = true, requestStaleStores = true, + requestRedundantCasts = true, requestPushPop = true, - requestStoreLoad = true, - firstIteration = true) + requestStoreLoad = true) if (compilerSettings.optUnreachableCode) BackendUtils.setDceDone(method) @@ -415,7 +427,7 @@ abstract class LocalOpt { */ def nullnessOptimizations(method: MethodNode, ownerClassName: InternalName): Boolean = { AsmAnalyzer.sizeOKForNullness(method) && { - lazy val nullnessAnalyzer = new AsmAnalyzer(method, ownerClassName, new NullnessAnalyzer(backendUtils.isNonNullMethodInvocation, method)) + lazy val nullnessAnalyzer = new NullnessAnalyzer(method, ownerClassName, backendUtils.isNonNullMethodInvocation, compilerSettings.optAssumeModulesNonNull) // When running nullness optimizations the method may still have unreachable code. Analyzer // frames of unreachable instructions are `null`. @@ -430,7 +442,7 @@ abstract class LocalOpt { // cannot change instructions while iterating, it gets the analysis out of synch (indexed by instructions) val toReplace = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]] - val it = method.instructions.iterator() + val it = method.instructions.iterator while (it.hasNext) it.next() match { case vi: VarInsnNode if isNull(vi, vi.`var`) => if (vi.getOpcode == ALOAD) @@ -482,6 +494,8 @@ abstract class LocalOpt { case _ => } + // We don't need to worry about CallGraph.closureInstantiations and + // BackendUtils.indyLambdaImplMethods, the removed instructions are not IndyLambdas def removeFromCallGraph(insn: AbstractInsnNode): Unit = insn match { case mi: MethodInsnNode => callGraph.removeCallsite(mi, method) case _ => @@ -493,7 +507,8 @@ abstract class LocalOpt { removeFromCallGraph(oldOp) } - toReplace.nonEmpty + val changed = toReplace.nonEmpty + changed } } @@ -503,15 +518,18 @@ abstract class LocalOpt { * When this method returns, each `labelNode.getLabel` has a status set whether the label is live * or not. This can be queried using `BackendUtils.isLabelReachable`. */ - def removeUnreachableCodeImpl(method: MethodNode): Boolean = { + def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: InternalName): Boolean = { val size = method.instructions.size // queue of instruction indices where analysis should start var queue = new Array[Int](8) var top = -1 def enq(i: Int): Unit = { - if (top == queue.length - 1) - queue = java.util.Arrays.copyOf(queue, queue.length * 2) + if (top == queue.length - 1) { + val nq = new Array[Int](queue.length * 2) + Array.copy(queue, 0, nq, 0, queue.length) + queue = nq + } top += 1 queue(top) = i } @@ -556,7 +574,7 @@ abstract class LocalOpt { * * However, the JVM spec does not require subroutines to `RET x` to their caller, they could return back to an * outer subroutine caller (nested subroutines), or `RETURN`, or use a static jump. Static analysis of subroutines - * is therefore complex (http://www21.in.tum.de/~kleing/papers/KleinW-TPHOLS03.pdf). + * is therefore complex (https://www21.in.tum.de/~kleing/papers/KleinW-TPHOLS03.pdf). * * The asm.Analyzer however makes the assumption that subroutines only occur in the shape emitted by early * javac, i.e., `RET` always returns to the next enclosing caller. So we do that as well. @@ -566,7 +584,7 @@ abstract class LocalOpt { while (top != -1) { val insnIndex = deq() val insn = method.instructions.get(insnIndex) - visited.add(insnIndex) + visited.addOne(insnIndex) if (insn.getOpcode == -1) { // frames, labels, line numbers enqInsnIndex(insnIndex + 1) @@ -629,7 +647,9 @@ abstract class LocalOpt { changed = true insn match { case invocation: MethodInsnNode => callGraph.removeCallsite(invocation, method) - case indy: InvokeDynamicInsnNode => callGraph.removeClosureInstantiation(indy, method) + case indy: InvokeDynamicInsnNode => + callGraph.removeClosureInstantiation(indy, method) + removeIndyLambdaImplMethod(ownerClassName, method, indy) case _ => } } @@ -646,40 +666,126 @@ abstract class LocalOpt { * Eliminate `CHECKCAST` instructions that are statically known to succeed. This is safe if the * tested object is null: `null.asInstanceOf` always succeeds. * + * Replace `INSTANCEOF` instructions with `ICONST_0/1` if the result is statically known. + * + * Since this optimization runs a type analysis, we use it to rewrite some intrinsic method calls + * - `java.lang.reflect.Arrays.getLength(x)` when `x` is statically known to be an array: + * rewrite to `ARRAYLENGTH` + * - `x.getClass` when `x` is statically known to be a primitive array. Rewrite to `LDC`. + * * The type of the tested object is determined using a NonLubbingTypeFlowAnalyzer. Note that this * analysis collapses LUBs of non-equal references types to Object for simplicity. Example: * given `B <: A <: Object`, the cast in `(if (..) new B else new A).asInstanceOf[A]` would not * be eliminated. * - * Note: we cannot replace `INSTANCEOF` tests by only looking at the types, `null.isInstanceOf` - * always returns false, so we'd also need nullness information. + * Note: to rewrite `INSTANCEOF` tests, we also run a nullness analyzer. We need to know nullness + * because `null.isInstanceOf` is always `false`. + * + * Returns two booleans (typeInsnChanged, intrinsicRewritten) */ - def eliminateRedundantCasts(method: MethodNode, owner: InternalName): Boolean = { - AsmAnalyzer.sizeOKForBasicValue(method) && { - def isSubType(aRefDesc: String, bClass: InternalName): Boolean = aRefDesc == bClass || bClass == ObjectRef.internalName || { - (bTypeForDescriptorOrInternalNameFromClassfile(aRefDesc) conformsTo classBTypeFromParsedClassfile(bClass)).getOrElse(false) + def eliminateRedundantCastsAndRewriteSomeIntrinsics(method: MethodNode, owner: InternalName): (Boolean, Boolean) = if (!AsmAnalyzer.sizeOKForNullness(method)) (false, false) else { + def isSubType(aDescOrIntN: String, bDescOrIntN: String): Boolean = { + // Neither a nor b may be descriptors for primitive types. INSTANCEOF and CHECKCAST require + // - "objectref must be of type reference" and + // - "constant pool item must be a class, array, or interface type" + // However we may get a mix of descriptors and internal names. The typeAnalyzer returns + // descriptors (`Lfoo/C;`), the descriptor in a TypeInsn is an internal name (`foo/C`) or an + // array descriptor. + def sameClass(a: String, b: String) = { + a == b || + a.length - 2 == b.length && a(0) == 'L' && a.last == ';' && a.regionMatches(1, b, 0, b.length) || + b.length - 2 == a.length && b(0) == 'L' && b.last == ';' && b.regionMatches(1, a, 0, a.length) + } + sameClass(aDescOrIntN, bDescOrIntN) || sameClass(bDescOrIntN, ObjectRef.internalName) || + bTypeForDescriptorOrInternalNameFromClassfile(aDescOrIntN).conformsTo(bTypeForDescriptorOrInternalNameFromClassfile(bDescOrIntN)).getOrElse(false) + } + + // precondition: !isSubType(aDescOrIntN, bDescOrIntN) + def isUnrelated(aDescOrIntN: String, bDescOrIntN: String): Boolean = { + @tailrec + def impl(aTp: BType, bTp: BType): Boolean = { + ((aTp, bTp): @unchecked) match { + case (aa: ArrayBType, ba: ArrayBType) => + impl(aa.elementType, ba.elementType) + case (act: ClassBType, bct: ClassBType) => + val noItf = act.isInterface.flatMap(aIf => bct.isInterface.map(bIf => !aIf && !bIf)).getOrElse(false) + noItf && !bct.conformsTo(act).getOrElse(true) + case (_: PrimitiveBType, _: RefBType) | (_: RefBType, _: PrimitiveBType) => + true + case (_: PrimitiveBType, _: PrimitiveBType) => + // note that this case happens for array element types. [S does not conform to [I. + aTp != bTp + case _ => + false + } } + impl( + bTypeForDescriptorOrInternalNameFromClassfile(aDescOrIntN), + bTypeForDescriptorOrInternalNameFromClassfile(bDescOrIntN)) + } - lazy val typeAnalyzer = new NonLubbingTypeFlowAnalyzer(method, owner) + lazy val typeAnalyzer = new NonLubbingTypeFlowAnalyzer(method, owner) + lazy val nullnessAnalyzer = new NullnessAnalyzer(method, owner, backendUtils.isNonNullMethodInvocation, compilerSettings.optAssumeModulesNonNull) - // cannot remove instructions while iterating, it gets the analysis out of synch (indexed by instructions) - val toRemove = mutable.Set.empty[TypeInsnNode] + // cannot remove instructions while iterating, it gets the analysis out of synch (indexed by instructions) + val toReplace = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]] - val it = method.instructions.iterator() - while (it.hasNext) it.next() match { - case ti: TypeInsnNode if ti.getOpcode == CHECKCAST => - val frame = typeAnalyzer.frameAt(ti) - val valueTp = frame.getValue(frame.stackTop) - if (valueTp.isReference && isSubType(valueTp.getType.getDescriptor, ti.desc)) { - toRemove += ti + val it = method.instructions.iterator + while (it.hasNext) it.next() match { + case ti: TypeInsnNode => + val opc = ti.getOpcode + if (opc == CHECKCAST || opc == INSTANCEOF) { + lazy val valueNullness = { + val frame = nullnessAnalyzer.frameAt(ti) + frame.getValue(frame.stackTop) + } + if (opc == INSTANCEOF && valueNullness == NullValue) { + toReplace(ti) = List(getPop(1), new InsnNode(ICONST_0)) + } else { + val valueDesc = typeAnalyzer.preciseAaloadTypeDesc({ + val frame = typeAnalyzer.frameAt(ti) + frame.getValue(frame.stackTop) + }) + if (isSubType(valueDesc, ti.desc)) { + if (opc == CHECKCAST) { + toReplace(ti) = Nil + } else if (valueNullness == NotNullValue) { + toReplace(ti) = List(getPop(1), new InsnNode(ICONST_1)) + } + } else if (opc == INSTANCEOF && isUnrelated(valueDesc, ti.desc)) { + // the two types are unrelated, so the instance check is known to fail + toReplace(ti) = List(getPop(1), new InsnNode(ICONST_0)) + } } + } - case _ => - } + case mi: MethodInsnNode => + // Rewrite some known method invocations + if (BackendUtils.isArrayGetLengthOnStaticallyKnownArray(mi, typeAnalyzer)) { + // Array.getLength(x) where x is known to be an array + toReplace(mi) = List(new InsnNode(ARRAYLENGTH)) + } else { + // x.getClass where x is statically known to be a primitive array + val getClassTp = BackendUtils.getClassOnStaticallyKnownPrimitiveArray(mi, typeAnalyzer) + if (getClassTp != null) { + toReplace(mi) = List(getPop(1), new LdcInsnNode(getClassTp)) + } + } + + case _ => + } - toRemove foreach method.instructions.remove - toRemove.nonEmpty + var typeInsnChanged = false + var intrinsicRewritten = false + + for ((oldOp, newOp) <- toReplace) { + if (oldOp.isInstanceOf[TypeInsnNode]) typeInsnChanged = true + else if (oldOp.isInstanceOf[MethodInsnNode]) intrinsicRewritten = true + for (n <- newOp) method.instructions.insertBefore(oldOp, n) + method.instructions.remove(oldOp) } + + (typeInsnChanged, intrinsicRewritten) } } @@ -690,7 +796,7 @@ object LocalOptImpls { * * There are no executable instructions that we can assume don't throw (eg ILOAD). The JVM spec * basically says that a VirtualMachineError may be thrown at any time: - * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3 + * https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3 * * Note that no instructions are eliminated. * @@ -700,7 +806,8 @@ object LocalOptImpls { * before, so that `BackendUtils.isLabelReachable` gives a correct answer. */ def removeEmptyExceptionHandlers(method: MethodNode): RemoveHandlersResult = { - /** True if there exists code between start and end. */ + /* True if there exists code between start and end. */ + @tailrec def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = { start != end && ((start.getOpcode: @switch) match { // FrameNode, LabelNode and LineNumberNode have opcode == -1. @@ -711,7 +818,7 @@ object LocalOptImpls { var result: RemoveHandlersResult = RemoveHandlersResult.NoneRemoved - val handlersIter = method.tryCatchBlocks.iterator() + val handlersIter = method.tryCatchBlocks.iterator while (handlersIter.hasNext) { val handler = handlersIter.next() if (!containsExecutableCode(handler.start, handler.end)) { @@ -758,7 +865,7 @@ object LocalOptImpls { } val initialNumVars = method.localVariables.size - val localsIter = method.localVariables.iterator() + val localsIter = method.localVariables.iterator while (localsIter.hasNext) { val local = localsIter.next() val index = local.index @@ -798,7 +905,7 @@ object LocalOptImpls { // Ensure the length of `renumber`. Unused variable indices are mapped to -1. val minLength = if (isWide) index + 2 else index + 1 - for (i <- renumber.length until minLength) renumber += -1 + for (_ <- renumber.length until minLength) renumber += -1 renumber(index) = index if (isWide) renumber(index + 1) = index @@ -810,7 +917,7 @@ object LocalOptImpls { val firstLocalIndex = parametersSize(method) for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used. - method.instructions.iterator().asScala foreach { + method.instructions.iterator.asScala foreach { case VarInstruction(varIns, slot) => addVar(varIns, slot) case _ => } @@ -831,12 +938,13 @@ object LocalOptImpls { else { // update variable instructions according to the renumber table method.maxLocals = nextIndex - method.instructions.iterator().asScala.foreach { + method.instructions.iterator.asScala.foreach { case VarInstruction(varIns, slot) => val oldIndex = slot if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex) varIns match { case vi: VarInsnNode => vi.`var` = renumber(slot) case ii: IincInsnNode => ii.`var` = renumber(slot) + case x => throw new MatchError(x) } case _ => } @@ -854,15 +962,16 @@ object LocalOptImpls { * lexically preceding label declaration. */ def removeEmptyLineNumbers(method: MethodNode): Boolean = { + @tailrec def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match { case null => true - case l: LineNumberNode => true + case _: LineNumberNode => true case n if n.getOpcode >= 0 => false case n => isEmpty(n) } val initialSize = method.instructions.size - val iterator = method.instructions.iterator() + val iterator = method.instructions.iterator var previousLabel: LabelNode = null while (iterator.hasNext) { iterator.next match { @@ -882,6 +991,7 @@ object LocalOptImpls { * Apply various simplifications to branching instructions. */ def simplifyJumps(method: MethodNode): Boolean = { + val allHandlers = method.tryCatchBlocks.asScala.toSet // A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn @@ -889,7 +999,7 @@ object LocalOptImpls { val jumpInsns = mutable.LinkedHashMap.empty[JumpInsnNode, Boolean] - for (insn <- method.instructions.iterator().asScala) insn match { + for (insn <- method.instructions.iterator.asScala) insn match { case l: LabelNode => activeHandlers ++= allHandlers.filter(_.start == l) activeHandlers = activeHandlers.filter(_.end != l) @@ -909,7 +1019,7 @@ object LocalOptImpls { } def removeJumpFromMap(jump: JumpInsnNode) = { - jumpInsns.remove(jump) + jumpInsns.subtractOne(jump) _jumpTargets = null } @@ -918,7 +1028,7 @@ object LocalOptImpls { removeJumpFromMap(jump) } - /** + /* * Removes a conditional jump if it is followed by a GOTO to the same destination. * * CondJump l; [nops]; GOTO l; [...] @@ -939,7 +1049,7 @@ object LocalOptImpls { case _ => false } - /** + /* * Replace jumps to a sequence of GOTO instructions by a jump to the final destination. * * {{{ @@ -961,7 +1071,7 @@ object LocalOptImpls { case _ => false } - /** + /* * Eliminates unnecessary jump instructions * * {{{ @@ -979,7 +1089,7 @@ object LocalOptImpls { case _ => false } - /** + /* * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch * and eliminates the GOTO. * @@ -1010,7 +1120,7 @@ object LocalOptImpls { case _ => false } - /** + /* * Inlines xRETURN and ATHROW * * {{{ @@ -1021,7 +1131,7 @@ object LocalOptImpls { * inlining is only done if the GOTO instruction is not part of a try block, otherwise the * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw * an IllegalMonitorStateException, as described here: - * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return + * https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return */ def simplifyGotoReturn(instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match { case Goto(jump) => @@ -1038,20 +1148,20 @@ object LocalOptImpls { case _ => false }) - /** - * Replace conditional jump instructions with GOTO or NOP if statically known to be true or false. - * - * {{{ - * ICONST_0; IFEQ l; - * => ICONST_0; POP; GOTO l; - * - * ICONST_1; IFEQ l; - * => ICONST_1; POP; - * }}} - * - * Note that the LOAD/POP pairs will be removed later by `eliminatePushPop`, and the code between - * the GOTO and `l` will be removed by DCE (if it's not jumped into from somewhere else). - */ + /* + * Replace conditional jump instructions with GOTO or NOP if statically known to be true or false. + * + * {{{ + * ICONST_0; IFEQ l; + * => ICONST_0; POP; GOTO l; + * + * ICONST_1; IFEQ l; + * => ICONST_1; POP; + * }}} + * + * Note that the LOAD/POP pairs will be removed later by `eliminatePushPop`, and the code between + * the GOTO and `l` will be removed by DCE (if it's not jumped into from somewhere else). + */ def simplifyConstantConditions(instruction: AbstractInsnNode): Boolean = { def replace(jump: JumpInsnNode, success: Boolean): Boolean = { if (success) method.instructions.insert(jump, new JumpInsnNode(GOTO, jump.label)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala deleted file mode 100644 index 6d49db505438..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc.backend.jvm.opt - -import scala.collection.mutable.Map -import scala.collection.JavaConverters._ -import java.util.{LinkedHashMap, Collections, Map => JMap} - -object LruMap{ - def apply[K,V](maxSize:Int, threadsafe:Boolean): Map[K,V] = { - require (maxSize > 0) - val basic = new LruMapImpl[K,V](maxSize) - val threaded = if (threadsafe) Collections.synchronizedMap(basic) else basic - - threaded.asScala - } - - private class LruMapImpl[K,V](maxSize: Int) extends LinkedHashMap[K,V] { - override def removeEldestEntry(eldest: JMap.Entry[K, V]): Boolean = { - size() > maxSize - } - } -} diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index c9b29c316897..857f1220cee6 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc.classpath import java.net.URL +import scala.collection.immutable.ArraySeq.unsafeWrapArray import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.FatalError import scala.reflect.io.AbstractFile @@ -29,7 +30,7 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation, EfficientClassPath} */ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { case Some(x) => x } @@ -39,7 +40,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) } - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. + // This method is performance sensitive as it is used by sbt's ExtractDependencies phase. override def findClass(className: String): Option[ClassRepresentation] = { // workaround a performance bug in exiting versions of Zinc. // https://github.com/sbt/zinc/issues/757 @@ -53,7 +54,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } if (noByteCode) None else { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) val packageName = PackageName(pkg) def findEntry(isSource: Boolean): Option[ClassRepresentation] = { @@ -116,7 +117,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } } - val distinctPackages: Seq[PackageEntry] = if (packages == null) Nil else packages.toArray(new Array[PackageEntry](packages.size())) + val distinctPackages: Seq[PackageEntry] = if (packages == null) Nil else unsafeWrapArray(packages.toArray(new Array[PackageEntry](packages.size()))) val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) ClassPathEntries(distinctPackages, distinctClassesAndSources) } @@ -126,7 +127,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { * creates an entry containing both of them. If there would be more than one class or source * entries for the same class it always would use the first entry of each type found on a classpath. */ - private def mergeClassesAndSources(entries: Seq[ClassRepresentation]): Seq[ClassRepresentation] = { + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { var count = 0 val indices = new java.util.HashMap[String, Int]((entries.size * 1.25).toInt) val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) @@ -149,7 +150,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { count += 1 } } - if (mergedEntries isEmpty) Nil else mergedEntries.toIndexedSeq + if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq } private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { @@ -161,7 +162,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } { if (seenNames.add(entry.name)) entriesBuffer += entry } - if (entriesBuffer isEmpty) Nil else entriesBuffer.toIndexedSeq + if (entriesBuffer.isEmpty) Nil else entriesBuffer.toIndexedSeq } } diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala index 54aa37e6cb71..ffbd6f850032 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,15 +12,16 @@ package scala.tools.nsc.classpath +import scala.annotation.unused import scala.reflect.io.AbstractFile import scala.tools.nsc.util.ClassRepresentation -case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation]) +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) object ClassPathEntries { import scala.language.implicitConversions // to have working unzip method - implicit def entry2Tuple(entry: ClassPathEntries): (Seq[PackageEntry], Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources) + implicit def entry2Tuple(entry: ClassPathEntries): (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources) val empty = ClassPathEntries(Seq.empty, Seq.empty) } @@ -78,10 +79,10 @@ private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry private[nsc] trait NoSourcePaths { final def asSourcePathString: String = "" - final private[nsc] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty + final private[nsc] def sources(@unused inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty } private[nsc] trait NoClassPaths { final def findClassFile(className: String): Option[AbstractFile] = None - private[nsc] final def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty + private[nsc] final def classes(@unused inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty } diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index 9e27c45e6ff0..f463c9e8aa5a 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package scala.tools.nsc.classpath import scala.reflect.io.{AbstractFile, VirtualDirectory} import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils.AbstractFileOps -import scala.tools.nsc.util.ClassPath +import scala.tools.nsc.util.ClassPath, ClassPath.{expandDir, expandPath} /** * Provides factory methods for classpath. When creating classpath instances for a given path, @@ -23,9 +23,6 @@ import scala.tools.nsc.util.ClassPath */ class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry) { - @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x - def this(settings: Settings) = this(settings, new CloseableRegistry) - /** * Create a new classpath based on the abstract file. */ @@ -40,11 +37,6 @@ class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry dir <- Option(settings.pathFactory.getDirectory(file)) } yield createSourcePath(dir) - - def expandPath(path: String, expandStar: Boolean = true): List[String] = scala.tools.nsc.util.ClassPath.expandPath(path, expandStar) - - def expandDir(extdir: String): List[String] = scala.tools.nsc.util.ClassPath.expandDir(extdir) - def contentsOfDirsInPath(path: String): List[ClassPath] = for { dir <- expandPath(path, expandStar = false) @@ -53,18 +45,18 @@ class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry } yield newClassPath(entry) def classesInExpandedPath(path: String): IndexedSeq[ClassPath] = - classesInPathImpl(path, expand = true).toIndexedSeq + classesInPathImpl(path, expandStar = true).toIndexedSeq - def classesInPath(path: String) = classesInPathImpl(path, expand = false) + def classesInPath(path: String): List[ClassPath] = classesInPathImpl(path, expandStar = false) - def classesInManifest(useManifestClassPath: Boolean) = + def classesInManifest(useManifestClassPath: Boolean): List[ClassPath] = if (useManifestClassPath) scala.tools.nsc.util.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) else Nil // Internal - protected def classesInPathImpl(path: String, expand: Boolean) = + protected def classesInPathImpl(path: String, expandStar: Boolean): List[ClassPath] = for { - file <- expandPath(path, expand) + file <- expandPath(path, expandStar) dir <- { def asImage = if (file.endsWith(".jimage")) Some(settings.pathFactory.getFile(file)) else None Option(settings.pathFactory.getDirectory(file)).orElse(asImage) @@ -77,13 +69,10 @@ class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry else if (file.isDirectory) DirectorySourcePath(file.file) else - sys.error(s"Unsupported sourcepath element: $file") + throw new IllegalArgumentException(s"Unsupported sourcepath element: $file") } object ClassPathFactory { - @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x - def newClassPath(file: AbstractFile, settings: Settings): ClassPath = - newClassPath(file, settings, new CloseableRegistry) def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry): ClassPath = file match { case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => @@ -92,6 +81,6 @@ object ClassPathFactory { else if (file.isDirectory) DirectoryClassPath(file.file) else - sys.error(s"Unsupported classpath element: $file") + throw new IllegalArgumentException(s"Unsupported classpath element: $file") } } diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index e027748343fb..ab821e4e6752 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,9 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} import java.net.{URI, URL} import java.nio.file._ +import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.internal.JDK9Reflectors import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile} import scala.tools.nsc.CloseableRegistry @@ -41,12 +42,13 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] protected def getSubDir(dirName: String): Option[F] protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F] + protected def hasChild(dir: F, name: String): Boolean protected def getName(f: F): String protected def toAbstractFile(f: F): AbstractFile protected def isPackage(f: F): Boolean protected def createFileEntry(file: AbstractFile): FileEntryType - protected def isMatchingFile(f: F): Boolean + protected def isMatchingFile(f: F, siblingExists: String => Boolean): Boolean private def getDirectory(forPackage: PackageName): Option[F] = { if (forPackage.isRoot) { @@ -64,14 +66,18 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla case None => emptyFiles case Some(directory) => listChildren(directory, Some(isPackage)) } - nestedDirs.map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) + scala.collection.immutable.ArraySeq.unsafeWrapArray( + nestedDirs.map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) + ) } protected def files(inPackage: PackageName): Seq[FileEntryType] = { val dirForPackage = getDirectory(inPackage) val files: Array[F] = dirForPackage match { case None => emptyFiles - case Some(directory) => listChildren(directory, Some(isMatchingFile)) + case Some(directory) => + val hasCh = hasChild(directory, _) + listChildren(directory, Some(f => isMatchingFile(f, hasCh))) } files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq } @@ -81,10 +87,11 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla dirForPackage match { case None => case Some(directory) => + val hasCh = hasChild(directory, _) for (file <- listChildren(directory)) { if (isPackage(file)) onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) - else if (isMatchingFile(file)) + else if (isMatchingFile(file, hasCh)) onClassesAndSources(createFileEntry(toAbstractFile(file))) } } @@ -119,6 +126,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo java.util.Arrays.sort(listing, (o1: File, o2: File) => o1.getName.compareTo(o2.getName)) listing } + protected def hasChild(dir: File, name: String): Boolean = new File(dir, name).isFile protected def getName(f: File): String = f.getName protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new scala.reflect.io.File(f)) protected def isPackage(f: File): Boolean = f.isPackage @@ -130,9 +138,9 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo } object JrtClassPath { - private val jrtClassPathCache = new FileBasedCache[Unit, JrtClassPath]() + private val jrtClassPathCache = new FileBasedCache[Option[String], JrtClassPath]() private val ctSymClassPathCache = new FileBasedCache[String, CtSymClassPath]() - def apply(release: Option[String], unsafe: Option[List[String]], closeableRegistry: CloseableRegistry): List[ClassPath] = + def apply(release: Option[String], systemPath: Option[String], unsafe: Option[List[String]], closeableRegistry: CloseableRegistry): List[ClassPath] = if (!isJavaAtLeast("9")) Nil else { // TODO escalate errors once we're sure they are fatal @@ -148,30 +156,36 @@ object JrtClassPath { val ct = createCt(version, closeableRegistry) unsafe match { case Some(pkgs) if pkgs.nonEmpty => - createJrt(closeableRegistry) match { - case Nil => ct - case jrt :: _ => ct :+ new FilteringJrtClassPath(jrt, pkgs: _*) + createJrt(systemPath, closeableRegistry) match { + case Nil => ct + case jrts => ct.appended(new FilteringJrtClassPath(jrts.head, pkgs: _*)) } case _ => ct } case _ => - createJrt(closeableRegistry) + createJrt(systemPath, closeableRegistry) } } private def createCt(v: String, closeableRegistry: CloseableRegistry): List[ClassPath] = try { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) Nil - else List( - ctSymClassPathCache.getOrCreate(v, ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, checkStamps = true) - ) + else { + val classPath = ctSymClassPathCache.getOrCreate(v, ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, checkStamps = true) + List(classPath) + } } catch { case NonFatal(_) => Nil } - private def createJrt(closeableRegistry: CloseableRegistry): List[JrtClassPath] = + private def createJrt(systemPath: Option[String], closeableRegistry: CloseableRegistry): List[JrtClassPath] = try { - val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - val classPath = jrtClassPathCache.getOrCreate((), Nil, () => new JrtClassPath(fs), closeableRegistry, checkStamps = false) + val classPath = jrtClassPathCache.getOrCreate(systemPath, Nil, () => { + val fs = systemPath match { + case Some(javaHome) => FileSystems.newFileSystem(URI.create("jrt:/"), Collections.singletonMap("java.home", javaHome)) + case None => FileSystems.getFileSystem(URI.create("jrt:/")) + } + new JrtClassPath(fs, systemPath.isDefined) + }, closeableRegistry, checkStamps = false) List(classPath) } catch { case _: ProviderNotFoundException | _: FileSystemNotFoundException => Nil @@ -199,7 +213,7 @@ final class FilteringJrtClassPath(delegate: JrtClassPath, allowed: String*) exte * * The implementation assumes that no classes exist in the empty package. */ -final class JrtClassPath(fs: FileSystem) extends ClassPath with NoSourcePaths { +final class JrtClassPath(fs: FileSystem, closeFS: Boolean) extends ClassPath with NoSourcePaths with Closeable { type F = Path private val dir: Path = fs.getPath("/packages") @@ -245,6 +259,9 @@ final class JrtClassPath(fs: FileSystem) extends ClassPath with NoSourcePaths { }.take(1).toList.headOption } } + + def close(): Unit = + if (closeFS) fs.close() } /** @@ -266,7 +283,7 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { - val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + val index = collection.mutable.HashMap[String, collection.mutable.ListBuffer[Path]]() val isJava12OrHigher = isJavaAtLeast("12") rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 @@ -325,7 +342,8 @@ case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileE } protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: File): Boolean = f.isClass + protected def isMatchingFile(f: File, siblingExists: String => Boolean): Boolean = + f.isClass && !(f.getName.endsWith(".class") && siblingExists(classNameToTasty(f.getName))) private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) } @@ -334,13 +352,13 @@ case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFil def asSourcePathString: String = asClassPathString protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) - protected def isMatchingFile(f: File): Boolean = endsScalaOrJava(f.getName) + protected def isMatchingFile(f: File, siblingExists: String => Boolean): Boolean = endsScalaOrJava(f.getName) override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl private def findSourceFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val sourceFile = Stream("scala", "java") + val sourceFile = Iterator("scala", "java") .map(ext => new File(s"$dir/$relativePath.$ext")) .collectFirst { case file if file.exists() => file } diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index da6505613706..d47c5db08c4c 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,11 +24,10 @@ object FileUtils { implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && (file.hasExtension("class") || file.hasExtension("sig")) + def isClass: Boolean = !file.isDirectory && (file.hasExtension("class") || file.hasExtension("sig") || file.hasExtension("tasty")) def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) - // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? def isJarOrZip: Boolean = file.isInstanceOf[ZipArchive] || !file.isDirectory && (file.hasExtension("jar") || file.hasExtension("zip")) /** @@ -47,6 +46,7 @@ object FileUtils { private val SUFFIX_SCALA = ".scala" private val SUFFIX_JAVA = ".java" private val SUFFIX_SIG = ".sig" + private val SUFFIX_TASTY = ".tasty" def stripSourceExtension(fileName: String): String = { if (endsScala(fileName)) stripClassExtension(fileName) @@ -58,8 +58,33 @@ object FileUtils { @inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length + def classNameToTasty(fileName: String): String = { + // TODO [tasty]: Dotty really wants to special-case standalone objects + // i.e. their classfile will end with `$`, but the tasty file will not. + // however then it needs to escape `Null$`, `Nothing$`, and `$` + // because these are "legitimate" classes with `$` in their name. + // It seems its not actually necessary to drop these files, + // as the classfile parser will not complain about them, + // however, it could increase efficiency to follow dotty + // and drop them anyway. + // Scala 3 also prevents compilation of `object Foo` and `class Foo$` in the same package + // See test/tasty/run/src-2/tastytest/TestRuntimeSpecialClasses.scala for a test case + val isStandaloneObjectHeuristic = ( + fileName.lastIndexOf('$') == fileName.length - 7 + && fileName != "Null$.class" + && fileName != "Nothing$.class" + && fileName != "$.class" + ) + val className = + if (isStandaloneObjectHeuristic) + fileName.stripSuffix("$.class") + else + fileName.stripSuffix(".class") + className + SUFFIX_TASTY + } + def endsClass(fileName: String): Boolean = - ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) + ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) || fileName.endsWith(SUFFIX_TASTY) def endsScalaOrJava(fileName: String): Boolean = endsScala(fileName) || endsJava(fileName) diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala index c589bcc6598a..dcb6eced0372 100644 --- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 40eb63200b37..ca895f6c0862 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package scala.tools.nsc.classpath import scala.tools.nsc.util.ClassRepresentation import scala.reflect.io.{AbstractFile, VirtualDirectory} import FileUtils._ -import java.net.URL +import java.net.{URI, URL} import scala.reflect.internal.util.AbstractFileClassLoader import scala.tools.nsc.util.ClassPath @@ -25,28 +25,30 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def emptyFiles: Array[AbstractFile] = Array.empty protected def getSubDir(packageDirName: String): Option[AbstractFile] = - Option(AbstractFileClassLoader.lookupPath(dir)(packageDirName.split('/'), directory = true)) + Option(AbstractFileClassLoader.lookupPath(dir)(packageDirName.split('/').toIndexedSeq, directory = true)) protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match { case Some(f) => dir.iterator.filter(f).toArray case _ => dir.toArray } + protected def hasChild(dir: AbstractFile, name: String): Boolean = dir.lookupName(name, directory = false) != null + def getName(f: AbstractFile): String = f.name def toAbstractFile(f: AbstractFile): AbstractFile = f def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL("file://_VIRTUAL_/" + dir.name)) + def asURLs: Seq[URL] = Seq(new URI("file://_VIRTUAL_/" + dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) + ".class" - Option(AbstractFileClassLoader.lookupPath(dir)(relativePath split '/', directory = false)) + Option(AbstractFileClassLoader.lookupPath(dir)(relativePath.split('/').toIndexedSeq, directory = false)) } private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass + protected def isMatchingFile(f: AbstractFile, siblingExists: String => Boolean): Boolean = + f.isClass && !(f.hasExtension("class") && siblingExists(classNameToTasty(f.name))) } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 39854624ecdd..43a24e0f4d26 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,6 +19,7 @@ import java.nio.file.attribute.{BasicFileAttributes, FileTime} import java.util.{Timer, TimerTask} import java.util.concurrent.atomic.AtomicInteger import scala.annotation.tailrec +import scala.collection.mutable import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.tools.nsc.{CloseableRegistry, Settings} @@ -40,10 +41,10 @@ sealed trait ZipAndJarFileLookupFactory { cache.checkCacheability(zipFile.toURL :: Nil, checkStamps = true, disableCache = disabled) match { case Left(_) => val result: ClassPath with Closeable = createForZipFile(zipFile, zipSettings) - closeableRegistry.registerClosable(result) + closeableRegistry.registerCloseable(result) result - case Right(Seq(path)) => - cache.getOrCreate(zipSettings, List(path), () => createForZipFile(zipFile, zipSettings), closeableRegistry, checkStamps = true) + case Right(paths) => + cache.getOrCreate(zipSettings, paths, () => createForZipFile(zipFile, zipSettings), closeableRegistry, checkStamps = true) } } @@ -63,7 +64,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) file(PackageName(pkg), simpleClassName + ".class").map(_.file) } - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. + // This method is performance sensitive as it is used by sbt's ExtractDependencies phase. override def findClass(className: String): Option[ClassRepresentation] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) file(PackageName(pkg), simpleClassName + ".class") @@ -72,7 +73,9 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + override protected def isRequiredFileType(file: AbstractFile, siblingExists: String => Boolean): Boolean = { + file.isClass && !(file.hasExtension("class") && siblingExists(classNameToTasty(file.name))) + } } /** @@ -108,32 +111,28 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * when we need subpackages of a given package or its classes, we traverse once and cache only packages. * Classes for given package can be then easily loaded when they are needed. */ - private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = { - val packages = collection.mutable.HashMap[String, PackageFileInfo]() - - def getSubpackages(dir: AbstractFile): List[AbstractFile] = - (for (file <- dir if file.isPackage) yield file)(collection.breakOut) - - @tailrec - def traverse(packagePrefix: String, - filesForPrefix: List[AbstractFile], - subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { - case pkgFile :: remainingFiles => - val subpackages = getSubpackages(pkgFile) - val fullPkgName = packagePrefix + pkgFile.name - packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages)) - val newPackagePrefix = fullPkgName + "." - subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) - traverse(packagePrefix, remainingFiles, subpackagesQueue) - case Nil if subpackagesQueue.nonEmpty => + private lazy val cachedPackages: mutable.HashMap[String, PackageFileInfo] = { + val packages = mutable.HashMap[String, PackageFileInfo]() + def getSubpackages(dir: AbstractFile): List[AbstractFile] = dir.iterator.filter(_.isPackage).toList + def traverse(subpackagesQueue: mutable.Queue[PackageInfo]): Unit = + while (subpackagesQueue.nonEmpty) { val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() - traverse(packagePrefix, filesForPrefix, subpackagesQueue) - case _ => - } - + @tailrec def loop(remainingFiles: List[AbstractFile]): Unit = remainingFiles match { + case pkgFile :: rest => + val subpackages = getSubpackages(pkgFile) + val fullPkgName = packagePrefix + pkgFile.name + packages.update(fullPkgName, PackageFileInfo(pkgFile, subpackages)) + val newPackagePrefix = fullPkgName + "." + subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) + loop(rest) + case Nil => + } + loop(filesForPrefix) + } val subpackages = getSubpackages(file) - packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages)) - traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) + packages.update(ClassPath.RootPackage, PackageFileInfo(file, subpackages)) + val infos = mutable.Queue(PackageInfo(ClassPath.RootPackage, subpackages)) + traverse(infos) packages } @@ -145,11 +144,9 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { case None => Seq.empty - case Some(PackageFileInfo(pkg, _)) => - (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut) + case Some(PackageFileInfo(pkg, _)) => pkg.iterator.filter(_.isClass).map(ClassFileEntryImpl(_)).toList } - override private[nsc] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) override private[nsc] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) } @@ -187,7 +184,7 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override private[nsc] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource + override protected def isRequiredFileType(file: AbstractFile, siblingExists: String => Boolean): Boolean = file.isScalaOrJavaSource } override protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) @@ -195,16 +192,17 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { final class FileBasedCache[K, T] { import java.nio.file.Path + private val NoFileKey = new Object + private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) private case class Entry(k: K, stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) var timerTask: TimerTask = null - def cancelTimer(): Unit = { + def cancelTimer(): Unit = timerTask match { case null => - case t => t.cancel() + case task => task.cancel() } - } } private val cache = collection.mutable.Map.empty[(K, Seq[Path]), Entry] @@ -228,7 +226,7 @@ final class FileBasedCache[K, T] { override def run(): Unit = { cache.synchronized { if (e.referenceCount.compareAndSet(0, -1)) { - cache.remove(key) + cache.subtractOne(key) cl.close() } } @@ -254,7 +252,7 @@ final class FileBasedCache[K, T] { if (disableCache) Left("caching is disabled due to a policy setting") else if (!checkStamps) Right(paths) else { - val nonJarZips = urlsAndFiles.filter { case (url, file) => file == null || !Jar.isJarOrZip(file.file) } + val nonJarZips = urlsAndFiles.filter { case (_, file) => file == null || !Jar.isJarOrZip(file.file) } if (nonJarZips.nonEmpty) Left(s"caching is disabled because of the following classpath elements: ${nonJarZips.map(_._1).mkString(", ")}.") else Right(paths) } @@ -267,9 +265,9 @@ final class FileBasedCache[K, T] { val lastModified = attrs.lastModifiedTime() // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp val fileKey = attrs.fileKey() - Stamp(lastModified, attrs.size(), fileKey) + Stamp(lastModified, attrs.size(), if (fileKey == null) NoFileKey else fileKey) } catch { - case ex: java.nio.file.NoSuchFileException => + case _: java.nio.file.NoSuchFileException => // Dummy stamp for (currently) non-existent file. Stamp(FileTime.fromMillis(0), -1, new Object) } @@ -282,7 +280,7 @@ final class FileBasedCache[K, T] { // Cache hit val count = e.referenceCount.incrementAndGet() assert(count > 0, (stamps, count)) - closeableRegistry.registerClosable(referenceCountDecrementer(e, (k1, paths))) + closeableRegistry.registerCloseable(referenceCountDecrementer(e, (k1, paths))) cached } else { // Cache miss: we found an entry but the underlying files have been modified @@ -293,19 +291,20 @@ final class FileBasedCache[K, T] { } else { // TODO: What do do here? Maybe add to a list of closeables polled by a cleanup thread? } + case x => throw new MatchError(x) } val value = create() val entry = Entry(k, stamps, value) - cache.put(key, entry) - closeableRegistry.registerClosable(referenceCountDecrementer(entry, key)) + cache.update(key, entry) + closeableRegistry.registerCloseable(referenceCountDecrementer(entry, key)) value } case _ => // Cache miss val value = create() val entry = Entry(k, stamps, value) - cache.put(key, entry) - closeableRegistry.registerClosable(referenceCountDecrementer(entry, key)) + cache.update(key, entry) + closeableRegistry.registerCloseable(referenceCountDecrementer(entry, key)) value } } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 294bdbe49345..e6f9b6cc717a 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} import java.net.URL -import scala.collection.Seq import scala.reflect.io.AbstractFile import scala.reflect.io.FileZipArchive import FileUtils.AbstractFileOps @@ -47,15 +46,19 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie protected def files(inPackage: PackageName): Seq[FileEntryType] = for { dirEntry <- findDirEntry(inPackage).toSeq - entry <- dirEntry.iterator if isRequiredFileType(entry) + entry <- dirEntry.iterator if isRequiredFileType(entry, dirEntry.entries.contains) } yield createFileEntry(entry) protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage) - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } yield createFileEntry(entry) + findDirEntry(inPackage) match { + case Some(dirEntry) => + val entry = dirEntry.lookupName(name, directory = false) + if (entry != null) + Some(createFileEntry(entry)) + else + None + case _ => None + } override private[nsc] def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined @@ -65,7 +68,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie for (entry <- dirEntry.iterator) { if (entry.isPackage) onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) - else if (isRequiredFileType(entry)) + else if (isRequiredFileType(entry, dirEntry.entries.contains)) onClassesAndSources(createFileEntry(entry)) } case None => @@ -78,6 +81,6 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType - protected def isRequiredFileType(file: AbstractFile): Boolean + protected def isRequiredFileType(file: AbstractFile, siblingExists: String => Boolean): Boolean } diff --git a/src/compiler/scala/tools/nsc/fsc/CompileClient.scala b/src/compiler/scala/tools/nsc/fsc/CompileClient.scala new file mode 100644 index 000000000000..00bea3d0ddab --- /dev/null +++ b/src/compiler/scala/tools/nsc/fsc/CompileClient.scala @@ -0,0 +1,68 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.fsc + +import scala.util.Properties + +/** The client part of the fsc offline compiler. Instead of compiling + * things itself, it send requests to a CompileServer. + */ +class StandardCompileClient extends HasCompileSocket with CompileOutputCommon { + lazy val compileSocket: CompileSocket = CompileSocket + + val versionMsg = "Fast " + Properties.versionMsg + var verbose = false + + def process(args: Array[String]): Boolean = { + // Trying to get out in front of the log messages in case we're + // going from verbose to not verbose. + verbose = (args contains "-verbose") + + val settings = new FscSettings(Console.println) + val command = new OfflineCompilerCommand(args.toList, settings) + val shutdown = settings.shutdown.value + val extraVmArgs = if (settings.preferIPv4.value) List(s"-Djava.net.preferIPv4Stack=true") else Nil + + val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs + val fscArgs = args.toList ++ command.extraFscArgs + + if (settings.version.value) { + Console println versionMsg + return true + } + + info(versionMsg) + info(args.mkString("[Given arguments: ", " ", "]")) + info(fscArgs.mkString("[Transformed arguments: ", " ", "]")) + info(vmArgs.mkString("[VM arguments: ", " ", "]")) + + val socket = Option(settings.server.value).filter(_.nonEmpty) + .map(compileSocket.getSocket) + .getOrElse( + compileSocket.getOrCreateSocket(vmArgs.mkString(" "), !shutdown, settings.port.value) + ) + + socket match { + case Some(sock) => compileOnServer(sock, fscArgs) + case _ if shutdown => echo("[No compilation server running.]") ; true + case _ => echo("Compilation failed.") ; false + } + } +} + +object CompileClient extends StandardCompileClient { + def main(args: Array[String]): Unit = { + val ok = try process(args) catch { case _: Exception => false } + System.exit(if (ok) 0 else 1) + } +} diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/fsc/CompileServer.scala similarity index 89% rename from src/compiler/scala/tools/nsc/CompileServer.scala rename to src/compiler/scala/tools/nsc/fsc/CompileServer.scala index d7e6cbc8d211..8a49b7eff6d6 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/fsc/CompileServer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,14 +11,15 @@ */ package scala.tools.nsc +package fsc import java.io.PrintStream import scala.reflect.internal.util.FakePos +import scala.reflect.internal.FatalError import scala.tools.nsc.io.Directory import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} -import scala.tools.nsc.settings.FscSettings -import scala.tools.util.SocketServer +import scala.util.Properties /** * The server part of the fsc offline compiler. It awaits compilation @@ -26,7 +27,6 @@ import scala.tools.util.SocketServer * that it can respond more quickly. * * @author Martin Odersky - * @version 1.0 */ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { lazy val compileSocket: CompileSocket = CompileSocket @@ -43,18 +43,12 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { private val runtime = Runtime.getRuntime() import runtime.{freeMemory, maxMemory, totalMemory} - /** Create a new compiler instance */ - def newGlobal(settings: Settings, reporter: Reporter) = - new Global(settings, reporter) { - override def inform(pos: Position, msg: String) = out.println(msg) - } - - override def timeout() { + override def timeout(): Unit = { if (!compileSocket.portFile(port).exists) fatal("port file no longer exists; skipping cleanup") } - def printMemoryStats() { + def printMemoryStats(): Unit = { def mb(bytes: Long) = "%10.2fMB".format(bytes / 1048576.0) info("New session: total memory = %s, max memory = %s, free memory = %s".format( mb(totalMemory), mb(maxMemory), mb(freeMemory))) @@ -75,21 +69,21 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { def unequalSettings(s1: Settings, s2: Settings): Set[Settings#Setting] = { val ignoreSettings = Set("-d", "-encoding", "-currentDir") def trim (s: Settings): Set[Settings#Setting] = ( - s.userSetSettings.toSet[Settings#Setting] filterNot (ss => ignoreSettings exists (ss respondsTo _)) + s.userSetSettings.toSet[Settings#Setting] filterNot (ss => ignoreSettings.exists(ss respondsTo _)) ) val ss1 = trim(s1) val ss2 = trim(s2) - (ss1 union ss2) -- (ss1 intersect ss2) + (ss1 union ss2) diff (ss1 intersect ss2) } - def session() { + def session(): Unit = { val password = compileSocket getPassword port val guessedPassword = in.readLine() val input = in.readLine() def fscError(msg: String): Unit = out println ( - FakePos("fsc") + msg + "\n fsc -help gives more information" + "" + FakePos("fsc") + msg + "\n fsc -help gives more information" ) if (input == null || password != guessedPassword) return @@ -139,6 +133,8 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { } unequal.isEmpty } + /* Create a new compiler instance */ + def newGlobal(settings: Settings, reporter: Reporter) = Global(settings, reporter) if (command.shouldStopWithInfo) reporter.echo(command.getInfoMessage(newGlobal(newSettings, reporter))) @@ -156,7 +152,7 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { val c = compiler try new c.Run() compile command.files catch { - case ex @ FatalError(msg) => + case FatalError(msg) => reporter.error(null, "fatal error: " + msg) clearCompiler() case ex: Throwable => @@ -166,6 +162,7 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { throw ex } } + reporter.flush() reporter.finish() if (isMemoryFullEnough()) { info("Nulling out compiler due to memory utilization.") @@ -174,7 +171,6 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { } } - object CompileServer { /** A directory holding redirected output */ //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory() @@ -182,8 +178,7 @@ object CompileServer { private def createRedirect(dir: Directory, filename: String) = new PrintStream((dir / filename).createFile().bufferedOutput()) - def main(args: Array[String]) = - execute(() => (), args) + def main(args: Array[String]) = execute(() => (), args) /** * The server's main loop. @@ -195,15 +190,15 @@ object CompileServer { * until the callback is finished. Callbacks should be kept simple and clients should not try to * interact with the server while the callback is processing. */ - def execute(startupCallback : () => Unit, args: Array[String]) { + def execute(startupCallback : () => Unit, args: Array[String]): Unit = { val debug = args contains "-v" var port = 0 val i = args.indexOf("-p") if (i >= 0 && args.length > i + 1) { - scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { - port = args(i + 1).toInt - } + scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { + port = args(i + 1).toInt + } } // Create instance rather than extend to pass a port parameter. diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/fsc/CompileSocket.scala similarity index 86% rename from src/compiler/scala/tools/nsc/CompileSocket.scala rename to src/compiler/scala/tools/nsc/fsc/CompileSocket.scala index 2886b1c9f05f..b64f09567620 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/fsc/CompileSocket.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,19 +10,20 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc +package scala.tools.nsc.fsc import java.math.BigInteger +import java.nio.charset.StandardCharsets.UTF_8 import java.security.SecureRandom +import scala.annotation.tailrec import scala.io.Codec import scala.reflect.internal.util.OwnerOnlyChmod import scala.reflect.internal.util.StringOps.splitWhere -import scala.sys.process._ import scala.tools.nsc.Properties.scalacDir -import scala.tools.nsc.io.{File, Socket} -import scala.tools.util.CompileOutputCommon +import scala.tools.nsc.io.File import scala.util.control.NonFatal +import scala.util.Properties trait HasCompileSocket { def compileSocket: CompileSocket @@ -38,6 +39,7 @@ trait HasCompileSocket { out println (compileSocket getPassword sock.getPort()) out println (args mkString "\u0000") + @tailrec def loop(): Boolean = in.readLine() match { case null => noErrors case line => @@ -74,7 +76,7 @@ class CompileSocket extends CompileOutputCommon { } /** The class name of the scala compile server */ - protected val serverClass = "scala.tools.nsc.CompileServer" + protected val serverClass = CompileServer.getClass.getName.init protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) /* A directory holding port identification files */ @@ -88,15 +90,11 @@ class CompileSocket extends CompileOutputCommon { Seq(vmCommand) ++ vmArgs ++ Seq(serverClass) ++ serverClassArgs filterNot (_ == "") /** Start a new server. */ - private def startNewServer(vmArgs: String) = { + private def startNewServer(vmArgs: String): Unit = { val cmd = serverCommand((vmArgs split " ").toSeq) - info("[Executing command: %s]" format cmd.mkString(" ")) + info(s"[Executing command: ${cmd.mkString(" ")}]") - // Hiding inadequate daemonized implementation from public API for now - Process(cmd) match { - case x: ProcessBuilder.AbstractBuilder => x.daemonized().run() - case x => x.run() - } + new java.lang.ProcessBuilder(cmd.toArray: _*).start() } /** The port identification file */ @@ -104,12 +102,11 @@ class CompileSocket extends CompileOutputCommon { /** Poll for a server port number; return -1 if none exists yet */ private def pollPort(): Int = if (fixPort > 0) { - if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1 + if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1 } else portsDir.list.toList match { case Nil => -1 - case x :: xs => try x.name.toInt catch { - case e: Exception => x.delete() - throw e + case x :: _ => try x.name.toInt catch { + case e: Exception => x.delete() ; throw e } } @@ -143,7 +140,7 @@ class CompileSocket extends CompileOutputCommon { val file = portFile(port) // 128 bits of delicious randomness, suitable for printing with println over a socket, // and storage in a file -- see getPassword - val secretDigits = new BigInteger(128, new SecureRandom()).toString.getBytes("UTF-8") + val secretDigits = new BigInteger(128, new SecureRandom()).toString.getBytes(UTF_8) try OwnerOnlyChmod.chmodFileAndWrite(file.jfile.toPath, secretDigits) catch chmodFailHandler(s"Cannot create file: ${file}") @@ -162,9 +159,10 @@ class CompileSocket extends CompileOutputCommon { val retryDelay = 50L val maxAttempts = (maxMillis / retryDelay).toInt + @tailrec def getsock(attempts: Int): Option[Socket] = attempts match { - case 0 => warn("Unable to establish connection to compilation daemon") ; None - case num => + case 0 => warn("Unable to establish connection to compilation daemon") ; None + case _ => val port = if (create) getPort(vmArgs) else pollPort() if (port < 0) return None @@ -186,13 +184,8 @@ class CompileSocket extends CompileOutputCommon { getsock(maxAttempts) } - // XXX way past time for this to be central - def parseInt(x: String): Option[Int] = - try { Some(x.toInt) } - catch { case _: NumberFormatException => None } - def getSocket(serverAdr: String): Option[Socket] = ( - for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield + for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- portStr.toIntOption) yield getSocket(name, port) ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr) @@ -236,9 +229,6 @@ class CompileSocket extends CompileOutputCommon { catch chmodFailHandler(s"Failed to change permissions on $dir. The compilation daemon requires a secure directory; use -nc to disable the daemon.") dir } - } - -object CompileSocket extends CompileSocket { -} +object CompileSocket extends CompileSocket diff --git a/src/compiler/scala/tools/nsc/fsc/FscSettings.scala b/src/compiler/scala/tools/nsc/fsc/FscSettings.scala new file mode 100644 index 000000000000..6261c0d00208 --- /dev/null +++ b/src/compiler/scala/tools/nsc/fsc/FscSettings.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.fsc + +import scala.tools.nsc.Settings +import scala.tools.nsc.util.ClassPath +import scala.reflect.io.{AbstractFile, Path} +import scala.tools.nsc.settings.{DefaultPathFactory, PathFactory} + +class FscSettings(error: String => Unit, pathFactory: PathFactory = DefaultPathFactory) extends Settings(error, pathFactory) { + outer => + + locally { + disable(prompt) + disable(resident) + } + + val currentDir = StringSetting ("-current-dir", "path", "Base directory for resolving relative paths", "").internalOnly() withAbbreviation "--current-directory" + val reset = BooleanSetting("-reset", "Reset compile server caches") withAbbreviation "--reset" + val shutdown = BooleanSetting("-shutdown", "Shutdown compile server") withAbbreviation "--shutdown" + val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "") withAbbreviation "--server" + val port = IntSetting ("-port", "Search and start compile server in given port only", + 0, Some((0, Int.MaxValue)), (_: String) => None) withAbbreviation "--port" + val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket") withAbbreviation "--ipv4" + val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)", + 30, Some((0, Int.MaxValue)), (_: String) => None) withAbbreviation "--max-idle" + + // For improved help output, separating fsc options from the others. + def fscSpecific = Set[Settings#Setting]( + currentDir, reset, shutdown, server, port, preferIPv4, idleMins + ) + val isFscSpecific: String => Boolean = fscSpecific map (_.name) + + /** If a setting (other than a PathSetting) represents a path or paths. + * For use in absolutization. + */ + private def holdsPath = Set[Settings#Setting](outdir, dependencyfile, pluginsDir) + + override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = { + val (r, args) = super.processArguments(arguments, processAll) + // we need to ensure the files specified with relative locations are absolutized based on the currentDir + (r, args map {a => absolutizePath(a)}) + } + + /** + * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir. + * If it's already absolute then it's left alone. + */ + private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path + + /** All user set settings rewritten with absolute paths based on currentDir */ + def absolutize(): Unit = { + userSetSettings foreach { + case p: OutputSetting => outputDirs.setSingleOutput(AbstractFile.getDirectory(absolutizePath(p.value))) + case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath) + case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value) + case _ => () + } + } +} diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/fsc/OfflineCompilerCommand.scala similarity index 80% rename from src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala rename to src/compiler/scala/tools/nsc/fsc/OfflineCompilerCommand.scala index a36715067a03..428bc8209ab9 100644 --- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/fsc/OfflineCompilerCommand.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,11 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc +package scala.tools.nsc.fsc -import settings.FscSettings -import io.Directory -import Properties.isWin +import scala.tools.nsc.CompilerCommand +import scala.reflect.io.Directory +import scala.util.Properties.isWin /** A compiler command for the offline compiler. * @@ -46,8 +46,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext override def cmdName = "fsc" override def usageMsg = ( - createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) + + createUsageMsg("where possible fsc", explain = false)(x => x.isStandard && settings.isFscSpecific(x.name)) + "\n\nStandard scalac options also available:" + - createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name)) + optionsMessage(x => x.isStandard && !settings.isFscSpecific(x.name)) ) } diff --git a/src/compiler/scala/tools/nsc/fsc/ResidentScriptRunner.scala b/src/compiler/scala/tools/nsc/fsc/ResidentScriptRunner.scala new file mode 100644 index 000000000000..b32503b0523c --- /dev/null +++ b/src/compiler/scala/tools/nsc/fsc/ResidentScriptRunner.scala @@ -0,0 +1,54 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package fsc + +import scala.annotation.unused +import scala.reflect.io.Path +import scala.util.control.NonFatal + +class ResidentScriptRunner(settings: GenericRunnerSettings) extends AbstractScriptRunner(settings) with HasCompileSocket { + lazy val compileSocket = CompileSocket + + /** Compile a script using the fsc compilation daemon. + */ + protected def doCompile(scriptFile: String) = { + val scriptPath = Path(scriptFile).toAbsolute.path + val compSettingNames = new Settings(msg => throw new RuntimeException(msg)).visibleSettings.toList map (_.name) + val compSettings = settings.visibleSettings.toList filter (compSettingNames contains _.name) + val coreCompArgs = compSettings flatMap (_.unparse) + val compArgs = coreCompArgs ++ List("-Xscript", mainClass, scriptPath) + + // TODO: untangle this mess of top-level objects with their own little view of the mutable world of settings + compileSocket.verbose = settings.verbose.value + + compileSocket getOrCreateSocket "" match { + case Some(sock) => compileOnServer(sock, compArgs) + case _ => false + } + } +} + +final class DaemonKiller(@unused settings: GenericRunnerSettings) extends ScriptRunner { + def runScript(script: String, scriptArgs: List[String]) = shutdownDaemon() + + def runScriptText(script: String, scriptArgs: List[String]) = shutdownDaemon() + + private def shutdownDaemon() = + try { + new StandardCompileClient().process(Array("-shutdown")) + None + } catch { + case NonFatal(t) => Some(t) + } +} diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/fsc/Socket.scala similarity index 85% rename from src/compiler/scala/tools/nsc/io/Socket.scala rename to src/compiler/scala/tools/nsc/fsc/Socket.scala index 0953f3dfa7c6..3636c02632bd 100644 --- a/src/compiler/scala/tools/nsc/io/Socket.scala +++ b/src/compiler/scala/tools/nsc/fsc/Socket.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,13 +10,12 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package io +package scala.tools.nsc.fsc -import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter, Closeable } -import java.io.{ BufferedOutputStream, BufferedReader } -import java.net.{ InetAddress, Socket => JSocket } +import java.io.{BufferedOutputStream, BufferedReader, Closeable, InputStreamReader, IOException, PrintWriter} +import java.net.{InetAddress, Socket => JSocket} import scala.io.Codec +import scala.reflect.io.Streamable /** A skeletal only-as-much-as-I-need Socket wrapper. */ @@ -44,7 +43,7 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable { def close() = jsocket.close() def printWriter() = new PrintWriter(outputStream(), true) - def bufferedReader(implicit codec: Codec) = new BufferedReader(new InputStreamReader(inputStream())) + def bufferedReader(implicit codec: Codec) = new BufferedReader(new InputStreamReader(inputStream(), codec.decoder)) def bufferedOutput(size: Int) = new BufferedOutputStream(outputStream(), size) /** Creates an InputStream and applies the closure, automatically closing it on completion. diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/nsc/fsc/SocketServer.scala similarity index 92% rename from src/compiler/scala/tools/util/SocketServer.scala rename to src/compiler/scala/tools/nsc/fsc/SocketServer.scala index dd13be8538a7..cc3c3b09b6c4 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/nsc/fsc/SocketServer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,13 +10,12 @@ * additional information regarding copyright ownership. */ -package scala -package tools.util +package scala.tools.nsc.fsc -import java.net.{ServerSocket, SocketException, SocketTimeoutException} +import scala.tools.util.SystemExit import java.io.{BufferedReader, PrintStream, PrintWriter} - -import scala.tools.nsc.io.Socket +import java.net.{ServerSocket, SocketException, SocketTimeoutException} +import scala.annotation.tailrec trait CompileOutputCommon { def verbose: Boolean @@ -36,7 +35,6 @@ trait CompileOutputCommon { * communication for the fast Scala compiler. * * @author Martin Odersky - * @version 1.0 */ abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon { def shutdown: Boolean @@ -86,10 +84,11 @@ abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon { } } - def run() { + def run(): Unit = { info("Starting SocketServer run() loop.") - def loop() { + @tailrec + def loop(): Unit = { acceptBox.either match { case Right(clientSocket) => try doSession(clientSocket) @@ -99,7 +98,7 @@ abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon { timeout() return case _ => - warn("Accept on port %d failed") + warn(s"Accept on port $port failed") } if (!shutdown) loop() diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 66deaed0ee9c..8503d5f4a802 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,13 +13,15 @@ package scala.tools.nsc package io -import scala.language.postfixOps - -import java.io.{ InputStream, OutputStream, DataOutputStream } +import java.io.{DataOutputStream, InputStream, OutputStream} import java.util.jar._ -import scala.collection.JavaConverters._ + +import scala.jdk.CollectionConverters._ import Attributes.Name +import scala.annotation.tailrec +import scala.collection.AbstractIterable + // Attributes.Name instances: // // static Attributes.Name CLASS_PATH @@ -40,7 +42,7 @@ import Attributes.Name // static Attributes.Name SPECIFICATION_VENDOR // static Attributes.Name SPECIFICATION_VERSION -class Jar(file: File) extends Iterable[JarEntry] { +class Jar(file: File) extends AbstractIterable[JarEntry] { def this(jfile: JFile) = this(File(jfile)) def this(path: String) = this(File(path)) @@ -49,9 +51,9 @@ class Jar(file: File) extends Iterable[JarEntry] { def mainClass = manifest map (f => f(Name.MAIN_CLASS)) /** The manifest-defined classpath String if available. */ def classPathString: Option[String] = - for (m <- manifest ; cp <- m.attrs get Name.CLASS_PATH) yield cp + for (m <- manifest ; cp <- m.attrs.get(Name.CLASS_PATH)) yield cp def classPathElements: List[String] = classPathString match { - case Some(s) => s split "\\s+" toList + case Some(s) => s.split("\\s+").toList case _ => Nil } @@ -62,9 +64,9 @@ class Jar(file: File) extends Iterable[JarEntry] { jarFile getEntry name match { case null => f(None) case entry => - val in = Some(jarFile getInputStream entry) + val in = Some(jarFile.getInputStream(entry)) try f(in) - finally in map (_.close()) + finally in.foreach(_.close()) } try apply() finally jarFile.close() } @@ -82,6 +84,7 @@ class Jar(file: File) extends Iterable[JarEntry] { Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f } override def iterator: Iterator[JarEntry] = this.toList.iterator + override def isEmpty: Boolean = iterator.isEmpty override def toString = "" + file } @@ -98,29 +101,30 @@ class JarWriter(val file: File, val manifest: Manifest) { new DataOutputStream(out) } - def writeAllFrom(dir: Directory) { + def writeAllFrom(dir: Directory): Unit = { try dir.list foreach (x => addEntry(x, "")) finally out.close() } - def addStream(entry: JarEntry, in: InputStream) { + def addStream(entry: JarEntry, in: InputStream): Unit = { out putNextEntry entry try transfer(in, out) finally out.closeEntry() } - def addFile(file: File, prefix: String) { + def addFile(file: File, prefix: String): Unit = { val entry = new JarEntry(prefix + file.name) addStream(entry, file.inputStream()) } - def addEntry(entry: Path, prefix: String) { + def addEntry(entry: Path, prefix: String): Unit = { if (entry.isFile) addFile(entry.toFile, prefix) else addDirectory(entry.toDirectory, prefix + entry.name + "/") } - def addDirectory(entry: Directory, prefix: String) { + def addDirectory(entry: Directory, prefix: String): Unit = { entry.list foreach (p => addEntry(p, prefix)) } private def transfer(in: InputStream, out: OutputStream) = { val buf = new Array[Byte](10240) + @tailrec def loop(): Unit = in.read(buf, 0, buf.length) match { case -1 => in.close() case n => out.write(buf, 0, n) ; loop() @@ -162,7 +166,7 @@ object Jar { def update(key: Attributes.Name, value: String) = attrs.put(key, value) } - // See http://docs.oracle.com/javase/7/docs/api/java/nio/file/Path.html + // See https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html // for some ideas. private val ZipMagicNumber = List[Byte](80, 75, 3, 4) private def magicNumberIsZip(f: Path) = f.toFile.bytes().take(4).toList == ZipMagicNumber @@ -170,7 +174,7 @@ object Jar { // file exists and either has name.jar or magic number def isJarOrZip(f: Path): Boolean = f.isFile && (Path.isExtensionJarOrZip(f.name) || magicNumberIsZip(f)) - def create(file: File, sourceDir: Directory, mainClass: String) { + def create(file: File, sourceDir: Directory, mainClass: String): Unit = { val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) writer writeAllFrom sourceDir } diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index b254d7f1def2..082fb6af5261 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,6 +17,7 @@ import java.io.{FileInputStream, IOException} import java.nio.{ByteBuffer, CharBuffer} import java.nio.channels.{Channels, ClosedByInterruptException, ReadableByteChannel} import java.nio.charset.{CharsetDecoder, CoderResult} +import scala.annotation.tailrec import scala.reflect.internal.Reporter /** This class implements methods to read and decode source files. */ @@ -116,6 +117,7 @@ object SourceReader { * argument indicates whether the byte buffer contains the last * chunk of the input file. */ + @tailrec def decode(decoder: CharsetDecoder, bytes: ByteBuffer, chars: CharBuffer, endOfInput: Boolean): CharBuffer = { @@ -135,6 +137,7 @@ object SourceReader { * allocating bigger ones if necessary and then flips and returns * the last allocated char buffer. */ + @tailrec def flush(decoder: CharsetDecoder, chars: CharBuffer): CharBuffer = { val result: CoderResult = decoder.flush(chars) if (result.isUnderflow()) { diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index 3a0502ae6169..aa88275dcec4 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 0eab75d9e43d..be88aa4207e4 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,13 +16,15 @@ package scala.tools.nsc package javac -import scala.collection.mutable.ListBuffer import symtab.Flags import JavaTokens._ +import scala.annotation._ +import scala.collection.mutable +import scala.collection.mutable.ListBuffer import scala.language.implicitConversions -import scala.reflect.internal.util.Position -import scala.reflect.internal.util.ListOfNil +import scala.reflect.internal.util.{CodeAction, ListOfNil, Position} import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val global : Global @@ -36,7 +38,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def freshName(prefix: String): Name = freshTermName(prefix) def freshTermName(prefix: String): TermName = unit.freshTermName(prefix) def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix) - def deprecationWarning(off: Int, msg: String, since: String) = runReporting.deprecationWarning(off, msg, since, site = "", origin = "") + def deprecationWarning(off: Int, msg: String, since: String, actions: List[CodeAction]) = runReporting.deprecationWarning(off, msg, since, site = "", origin = "", actions) implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset) def warning(pos : Int, msg : String) : Unit = runReporting.warning(pos, msg, WarningCategory.JavaSource, site = "") def syntaxError(pos: Int, msg: String) : Unit = reporter.error(pos, msg) @@ -44,6 +46,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { abstract class JavaParser extends ParserCommon { val in: JavaScanner + def unit: CompilationUnit def freshName(prefix : String): Name protected implicit def i2p(offset : Int) : Position @@ -64,7 +67,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { private var lastErrorPos : Int = -1 - protected def skip() { + protected def skip(): Unit = { var nparens = 0 var nbraces = 0 while (true) { @@ -90,11 +93,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def warning(pos : Int, msg : String) : Unit def syntaxError(pos: Int, msg: String) : Unit - def syntaxError(msg: String, skipIt: Boolean) { + def syntaxError(msg: String, skipIt: Boolean): Unit = { syntaxError(in.currentPos, msg, skipIt) } - def syntaxError(pos: Int, msg: String, skipIt: Boolean) { + def syntaxError(pos: Int, msg: String, skipIt: Boolean): Unit = { if (pos > lastErrorPos) { syntaxError(pos, msg) // no more errors on this token. @@ -117,6 +120,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def javaLangObject(): Tree = javaLangDot(tpnme.Object) + def javaLangRecord(): Tree = javaLangDot(tpnme.Record) + def arrayOf(tpt: Tree) = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) @@ -149,7 +154,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { // ------------- general parsing --------------------------- /** skip parent or brace enclosed sequence of things */ - def skipAhead() { + def skipAhead(): Unit = { var nparens = 0 var nbraces = 0 do { @@ -171,7 +176,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } while (in.token != EOF && (nparens > 0 || nbraces > 0)) } - def skipTo(tokens: Int*) { + def skipTo(tokens: Int*): Unit = { while (!(tokens contains in.token) && in.token != EOF) { if (in.token == LBRACE) { skipAhead(); accept(RBRACE) } else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) } @@ -196,7 +201,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { pos } - def acceptClosingAngle() { + def acceptClosingAngle(): Unit = { val closers: PartialFunction[Int, Int] = { case GTGTGTEQ => GTGTEQ case GTGTGT => GTGT @@ -262,7 +267,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { t } - def optArrayBrackets(tpt: Tree): Tree = + @tailrec + final def optArrayBrackets(tpt: Tree): Tree = if (in.token == LBRACKET) { val tpt1 = atPos(in.pos) { arrayOf(tpt) } in.nextToken() @@ -291,7 +297,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (in.token == FINAL) in.nextToken() if (in.token == IDENTIFIER) { var t = typeArgs(atPos(in.currentPos)(Ident(ident()))) - // typeSelect generates Select nodes is the lhs is an Ident or Select, + // typeSelect generates Select nodes if the lhs is an Ident or Select, // SelectFromTypeTree otherwise. See #3567. // Select nodes can be later // converted in the typechecker to SelectFromTypeTree if the class @@ -300,8 +306,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case Ident(_) | Select(_, _) => Select(t, name) case _ => SelectFromTypeTree(t, name.toTypeName) } + if (in.token == DOT) + t.updateAttachment(RootSelection) while (in.token == DOT) { in.nextToken() + annotations() // TODO: fix scala/bug#9883 (JSR 308) t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident()))) } convertToTypeId(t) @@ -317,7 +326,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (in.token == QMARK) { val pos = in.currentPos in.nextToken() - val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree + val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else Ident(definitions.ObjectClass) val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree val tdef = atPos(pos) { TypeDef( @@ -334,7 +343,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (in.token == LT) { in.nextToken() val t1 = convertToTypeId(t) - val args = repsep(typeArg, COMMA) + val args = repsep(() => typeArg(), COMMA) acceptClosingAngle() atPos(t1.pos) { val t2: Tree = AppliedTypeTree(t1, args) @@ -369,7 +378,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { * // We only support a subset of the Java syntax that can form constant expressions. * // https://docs.oracle.com/javase/specs/jls/se14/html/jls-15.html#jls-15.29 * // - * // Luckily, we can just parse matching `(` and `)` to find our way to the end of the the argument list. + * // Luckily, we can just parse matching `(` and `)` to find our way to the end of the argument list. * // and drop the arguments until we implement full support for Java constant expressions * // * ConditionalExpressionSubset := Literal @@ -381,7 +390,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { * ElementValueList ::= ElementValue {`,` ElementValue} */ def annotation(): Tree = { - object LiteralK { def unapply(token: Token) = tryLiteral() } + object LiteralK { def unapply(@unused token: Token) = tryLiteral() } def elementValue(): Tree = in.token match { case LiteralK(k) => in.nextToken(); atPos(in.currentPos)(Literal(k)) @@ -439,16 +448,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } } - def modifiers(inInterface: Boolean): Modifiers = { + def modifiers(inInterface: Boolean, annots0: List[Tree] = Nil): Modifiers = { var flags: Long = Flags.JAVA // assumed true unless we see public/private/protected var isPackageAccess = true - var annots: List[Tree] = Nil + var annots: List[Tree] = annots0 def addAnnot(sym: Symbol) = annots :+= New(sym.tpe) while (true) { in.token match { - case AT if (in.lookaheadToken != INTERFACE) => + case AT if in.lookaheadToken != INTERFACE => in.nextToken() val annot = annotation() if (annot.nonEmpty) annots :+= annot @@ -489,11 +498,39 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case SYNCHRONIZED => in.nextToken() case _ => - val privateWithin: TypeName = - if (isPackageAccess && !inInterface) thisPackageName - else tpnme.EMPTY - - return Modifiers(flags, privateWithin) withAnnotations annots + val unsealed = 0L // no flag for UNSEALED + def consume(added: FlagSet): false = { in.nextToken(); flags |= added; false } + def lookingAhead(s: String): Boolean = { + import scala.reflect.internal.Chars._ + var i = 0 + val n = s.length + val lookahead = in.in.lookahead + while (i < n && lookahead.ch != SU) { + if (lookahead.ch != s.charAt(i)) return false + lookahead.next() + i += 1 + } + i == n && Character.isWhitespace(lookahead.ch) + } + val done = (in.token != IDENTIFIER) || ( + in.name match { + case nme.javaRestrictedIdentifiers.SEALED => consume(Flags.SEALED) + case nme.javaRestrictedIdentifiers.UNSEALED => consume(unsealed) + case nme.javaRestrictedIdentifiers.NON => + !lookingAhead("-sealed") || { + in.nextToken() + in.nextToken() + consume(unsealed) + } + case _ => true + } + ) + if (done) { + val privateWithin: TypeName = + if (isPackageAccess && !inInterface) thisPackageName + else tpnme.EMPTY + return Modifiers(flags, privateWithin) withAnnotations annots + } } } abort("should not be here") @@ -502,7 +539,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def typeParams(): List[TypeDef] = if (in.token == LT) { in.nextToken() - val tparams = repsep(typeParam, COMMA) + val tparams = repsep(() => typeParam(), COMMA) acceptClosingAngle() tparams } else List() @@ -529,7 +566,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def formalParams(): List[ValDef] = { accept(LPAREN) - val vparams = if (in.token == RPAREN) List() else repsep(formalParam, COMMA) + val vparams = if (in.token == RPAREN) List() else repsep(() => formalParam(), COMMA) accept(RPAREN) vparams } @@ -547,10 +584,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM, typeNames.EMPTY, anns), t, ident().toTermName) } - def optThrows() { + def optThrows(): Unit = { if (in.token == THROWS) { in.nextToken() - repsep(typ, COMMA) + repsep(() => typ(), COMMA) } } @@ -562,6 +599,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def definesInterface(token: Int) = token == INTERFACE || token == AT + /** If the next token is the identifier "record", convert it into a proper + * token. Technically, "record" is just a restricted identifier. However, + * once we've figured out that it is in a position where it identifies a + * "record" class, it is much more convenient to promote it to a token. + */ + def adaptRecordIdentifier(): Unit = { + if (in.token == IDENTIFIER && in.name == nme.javaRestrictedIdentifiers.RECORD) + in.token = RECORD + } + def termDecl(mods: Modifiers, parentToken: Int): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams() else List() @@ -585,6 +632,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(mods, nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), methodBody()) } } + } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { + // compact constructor + methodBody() + List.empty } else { var mods1 = mods if (mods hasFlag Flags.ABSTRACT) mods1 = mods &~ Flags.ABSTRACT | Flags.DEFERRED @@ -595,7 +646,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val vparams = formalParams() if (!isVoid) rtpt = optArrayBrackets(rtpt) optThrows() - val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) + val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) || (mods hasFlag Flags.PRIVATE) val bodyOk = !(mods1 hasFlag Flags.DEFERRED) && isConcreteInterfaceMethod val body = if (bodyOk && in.token == LBRACE) { @@ -674,7 +725,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def varDecl(pos: Position, mods: Modifiers, tpt: Tree, name: TermName): ValDef = { val tpt1 = optArrayBrackets(tpt) - /** Tries to detect final static literals syntactically and returns a constant type replacement */ + /* Tries to detect final static literals syntactically and returns a constant type replacement */ def optConstantTpe(): Tree = { def constantTpe(const: Constant): Tree = TypeTree(ConstantType(const)) @@ -719,11 +770,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } } - def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = in.token match { - case CLASS | ENUM | INTERFACE | AT => - typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) - case _ => - termDecl(mods, parentToken) + def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = { + in.token match { + case CLASS | ENUM | RECORD | INTERFACE | AT => + typeDecl(mods) + case _ => + termDecl(mods, parentToken) + } } def makeCompanionObject(cdef: ClassDef, statics: List[Tree]): Tree = @@ -732,11 +785,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { makeTemplate(List(), statics)) } - def importCompanionObject(cdef: ClassDef): Tree = - atPos(cdef.pos) { - Import(Ident(cdef.name.toTermName), ImportSelector.wildList) - } - def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = List(makeCompanionObject(cdef, statics), cdef) @@ -744,6 +792,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(IMPORT) val pos = in.currentPos val buf = new ListBuffer[Name] + @tailrec def collectIdents() : Int = { if (in.token == ASTERISK) { val starOffset = in.pos @@ -764,14 +813,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val lastnameOffset = collectIdents() accept(SEMI) val names = buf.toList - if (names.length < 2) { + if (names.lengthIs < 2) { syntaxError(pos, "illegal import", skipIt = false) List() } else { - val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _)) + val qual = names.tail.init.foldLeft(Ident(names.head): Tree)(Select(_, _)) val lastname = names.last val selector = lastname match { - case nme.WILDCARD => ImportSelector(lastname, lastnameOffset, null, -1) + case nme.WILDCARD => ImportSelector.wildAt(lastnameOffset) case _ => ImportSelector(lastname, lastnameOffset, lastname, lastnameOffset) } List(atPos(pos)(Import(qual, List(selector)))) @@ -781,12 +830,20 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def interfacesOpt() = if (in.token == IMPLEMENTS) { in.nextToken() - repsep(typ, COMMA) + repsep(() => typ(), COMMA) } else { List() } + def permitsOpt() = + if (in.token == IDENTIFIER && in.name == nme.javaRestrictedIdentifiers.PERMITS) { + in.nextToken() + repsep(() => typ(), COMMA) + } + else Nil + def classDecl(mods: Modifiers): List[Tree] = { + if (mods.hasFlag(SEALED)) patmat.javaClassesByUnit(unit.source) = mutable.Set.empty accept(CLASS) val pos = in.currentPos val name = identForType() @@ -799,13 +856,56 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { javaLangObject() } val interfaces = interfacesOpt() - val (statics, body) = typeBody(CLASS, name) + val permits = permitsOpt() + val (statics, body) = typeBody(CLASS) addCompanionObject(statics, atPos(pos) { ClassDef(mods, name, tparams, makeTemplate(superclass :: interfaces, body)) + .tap(cd => if (permits.nonEmpty) cd.updateAttachment(PermittedSubclasses(permits))) + }) + } + + def recordDecl(mods: Modifiers): List[Tree] = { + accept(RECORD) + val pos = in.currentPos + val name = identForType() + val tparams = typeParams() + val header = formalParams() + val superclass = javaLangRecord() + val interfaces = interfacesOpt() + val (statics, body) = typeBody(RECORD) + + // Generate accessors, if not already explicitly specified. Record bodies tend to be trivial. + val existing = body.iterator.collect { case DefDef(_, name, Nil, ListOfNil, _, _) => name }.toSet + val accessors = header.iterator + .collect { + case ValDef(mods, name, tpt, _) if !existing(name) => + DefDef(Modifiers(Flags.JAVA).withAnnotations(mods.annotations), name, tparams = Nil, vparamss = ListOfNil, tpt.duplicate, blankExpr) + } + .toList + + // Generate canonical constructor. During parsing this is done unconditionally but the symbol + // is unlinked in Namer if it is found to clash with a manually specified constructor. + val canonicalCtor = DefDef( + mods | Flags.SYNTHETIC, + nme.CONSTRUCTOR, + List(), + List(header.map(_.duplicate)), + TypeTree(), + blankExpr + ) + + addCompanionObject(statics, atPos(pos) { + ClassDef( + mods | Flags.FINAL, + name, + tparams, + makeTemplate(superclass :: interfaces, canonicalCtor :: accessors ::: body) + ) }) } def interfaceDecl(mods: Modifiers): List[Tree] = { + if (mods.hasFlag(SEALED)) patmat.javaClassesByUnit(unit.source) = mutable.Set.empty accept(INTERFACE) val pos = in.currentPos val name = identForType() @@ -813,26 +913,28 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val parents = if (in.token == EXTENDS) { in.nextToken() - repsep(typ, COMMA) + repsep(() => typ(), COMMA) } else { List(javaLangObject()) } - val (statics, body) = typeBody(INTERFACE, name) + val permits = permitsOpt() + val (statics, body) = typeBody(INTERFACE) addCompanionObject(statics, atPos(pos) { ClassDef(mods | Flags.TRAIT | Flags.INTERFACE | Flags.ABSTRACT, name, tparams, makeTemplate(parents, body)) + .tap(cd => if (permits.nonEmpty) cd.updateAttachment(PermittedSubclasses(permits))) }) } - def typeBody(leadingToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBody(leadingToken: Int): (List[Tree], List[Tree]) = { accept(LBRACE) - val defs = typeBodyDecls(leadingToken, parentName) + val defs = typeBodyDecls(leadingToken) accept(RBRACE) defs } - def typeBodyDecls(parentToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBodyDecls(parentToken: Int): (List[Tree], List[Tree]) = { val inInterface = definesInterface(parentToken) val statics = new ListBuffer[Tree] val members = new ListBuffer[Tree] @@ -844,9 +946,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else if (in.token == SEMI) { in.nextToken() } else { - if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC + // See "14.3. Local Class and Interface Declarations" + adaptRecordIdentifier() + if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) + mods |= Flags.STATIC val decls = joinComment(memberDecl(mods, parentToken)) + @tailrec def isDefDef(tree: Tree): Boolean = tree match { case _: DefDef => true case DocDef(_, defn) => isDefDef(defn) @@ -861,20 +967,19 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } (statics.toList, members.toList) } - def annotationParents = List( - gen.scalaAnnotationDot(tpnme.Annotation), - Select(javaLangDot(nme.annotation), tpnme.Annotation), - gen.scalaAnnotationDot(tpnme.ClassfileAnnotation) - ) + def annotationParents = Select(javaLangDot(nme.annotation), tpnme.Annotation) :: Nil def annotationDecl(mods: Modifiers): List[Tree] = { accept(AT) accept(INTERFACE) val pos = in.currentPos val name = identForType() - val (statics, body) = typeBody(AT, name) + val (statics, body) = typeBody(AT) val templ = makeTemplate(annotationParents, body) addCompanionObject(statics, atPos(pos) { - ClassDef(mods | Flags.JAVA_ANNOTATION, name, List(), templ) + import Flags._ + ClassDef( + mods | JAVA_ANNOTATION | TRAIT | INTERFACE | ABSTRACT, + name, List(), templ) }) } @@ -887,7 +992,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(LBRACE) val buf = new ListBuffer[Tree] var enumIsFinal = true - def parseEnumConsts() { + @tailrec + def parseEnumConsts(): Unit = { if (in.token != RBRACE && in.token != SEMI && in.token != EOF) { val (const, hasClassBody) = enumConst(enumType) buf += const @@ -904,7 +1010,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val (statics, body) = if (in.token == SEMI) { in.nextToken() - typeBodyDecls(ENUM, name) + typeBodyDecls(ENUM) } else { (List(), List()) } @@ -922,11 +1028,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(RBRACE) val superclazz = AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType)) - val finalFlag = if (enumIsFinal) Flags.FINAL else 0l + val hasAbstractMember = body.exists { + case m: MemberDef => m.mods.hasFlag(Flags.DEFERRED) + case _ => false + } + val finalFlag = if (enumIsFinal) Flags.FINAL else 0L + val abstractFlag = if (hasAbstractMember) Flags.ABSTRACT else 0L addCompanionObject(consts ::: statics ::: predefs, atPos(pos) { - // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking. See also ClassfileParser. - // This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags. - ClassDef(mods | Flags.JAVA_ENUM | Flags.SEALED | Flags.ABSTRACT | finalFlag, name, List(), + ClassDef(mods | Flags.JAVA_ENUM | Flags.SEALED | abstractFlag | finalFlag, name, List(), makeTemplate(superclazz :: interfaces, body)) }) } @@ -952,12 +1061,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { (res, hasClassBody) } - def typeDecl(mods: Modifiers): List[Tree] = in.token match { - case ENUM => joinComment(enumDecl(mods)) - case INTERFACE => joinComment(interfaceDecl(mods)) - case AT => annotationDecl(mods) - case CLASS => joinComment(classDecl(mods)) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + def typeDecl(mods: Modifiers): List[Tree] = { + adaptRecordIdentifier() + in.token match { + case ENUM => joinComment(enumDecl(mods)) + case INTERFACE => joinComment(interfaceDecl(mods)) + case AT => annotationDecl(mods) + case CLASS => joinComment(classDecl(mods)) + case RECORD => joinComment(recordDecl(mods)) + case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + } } def tryLiteral(negate: Boolean = false): Option[Constant] = { @@ -976,28 +1089,34 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { else Some(Constant(l)) } - /** CompilationUnit ::= [package QualId semi] TopStatSeq + /** CompilationUnit ::= [[Annotation] package QualId semi] {Import} {TypeDecl} //TopStatSeq */ def compilationUnit(): Tree = { + val buf = ListBuffer.empty[Tree] var pos = in.currentPos + val leadingAnnots = if (in.token == AT) annotations() else Nil val pkg: RefTree = - if (in.token == AT || in.token == PACKAGE) { - annotations() // TODO: put these somewhere? - pos = in.currentPos + if (in.token == PACKAGE) { + if (!leadingAnnots.isEmpty) { // TODO: put these somewhere? + //if (unit.source.file.name != "package-info.java") + // syntaxError(pos, "package annotations must be in file package-info.java") + pos = in.currentPos + } accept(PACKAGE) - val pkg = qualId().asInstanceOf[RefTree] - accept(SEMI) - pkg - } else { + qualId().asInstanceOf[RefTree].tap(_ => accept(SEMI)) + } + else { + if (!leadingAnnots.isEmpty) + buf ++= typeDecl(modifiers(inInterface = false, annots0 = leadingAnnots)) Ident(nme.EMPTY_PACKAGE_NAME) } thisPackageName = gen.convertToTypeName(pkg) match { case Some(t) => t.name.toTypeName case _ => tpnme.EMPTY } - val buf = new ListBuffer[Tree] - while (in.token == IMPORT) - buf ++= importDecl() + if (buf.isEmpty) + while (in.token == IMPORT) + buf ++= importDecl() while (in.token != EOF && in.token != RBRACE) { while (in.token == SEMI) in.nextToken() if (in.token != EOF) diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index de4e71af799c..a7341dc228d5 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,6 +19,7 @@ import scala.reflect.internal.Chars._ import JavaTokens._ import scala.annotation.{switch, tailrec} import scala.language.implicitConversions +import scala.collection.immutable.ArraySeq import scala.tools.nsc.Reporting.WarningCategory // Todo merge these better with Scanners @@ -163,61 +164,59 @@ trait JavaScanners extends ast.parser.ScannersCommon { case STRINGLIT => "string literal" case EOF => "eof" case ERROR => "something" - case AMP => "`&'" - case AMPAMP => "`&&'" - case AMPEQ => "`&='" - case ASTERISK => "`*'" - case ASTERISKEQ => "`*='" - case AT => "`@'" - case BANG => "`!'" - case BANGEQ => "`!='" - case BAR => "`|'" - case BARBAR => "`||'" - case BAREQ => "`|='" - case COLON => "`:'" - case COMMA => "`,'" - case DOT => "`.'" - case DOTDOTDOT => "`...'" - case EQEQ => "`=='" - case EQUALS => "`='" - case GT => "`>'" - case GTEQ => "`>='" - case GTGT => "`>>'" - case GTGTEQ => "`>>='" - case GTGTGT => "`>>>'" - case GTGTGTEQ => "`>>>='" - case HAT => "`^'" - case HATEQ => "`^='" - case LBRACE => "`{'" - case LBRACKET => "`['" - case LPAREN => "`('" - case LT => "`<'" - case LTEQ => "`<='" - case LTLT => "`<<'" - case LTLTEQ => "`<<='" - case MINUS => "`-'" - case MINUSEQ => "`-='" - case MINUSMINUS => "`--'" - case PERCENT => "`%'" - case PERCENTEQ => "`%='" - case PLUS => "`+'" - case PLUSEQ => "`+='" - case PLUSPLUS => "`++'" - case QMARK => "`?'" - case RBRACE => "`}'" - case RBRACKET => "`]'" - case RPAREN => "`)'" - case SEMI => "`;'" - case SLASH => "`/'" - case SLASHEQ => "`/='" - case TILDE => "`~'" + case AMP => "`&`" + case AMPAMP => "`&&`" + case AMPEQ => "`&=`" + case ASTERISK => "`*`" + case ASTERISKEQ => "`*=`" + case AT => "`@`" + case BANG => "`!`" + case BANGEQ => "`!=`" + case BAR => "`|`" + case BARBAR => "`||`" + case BAREQ => "`|=`" + case COLON => "`:`" + case COMMA => "`,`" + case DOT => "`.`" + case DOTDOTDOT => "`...`" + case EQEQ => "`==`" + case EQUALS => "`=`" + case GT => "`>`" + case GTEQ => "`>=`" + case GTGT => "`>>`" + case GTGTEQ => "`>>=`" + case GTGTGT => "`>>>`" + case GTGTGTEQ => "`>>>=`" + case HAT => "`^`" + case HATEQ => "`^=`" + case LBRACE => "`{`" + case LBRACKET => "`[`" + case LPAREN => "`(`" + case LT => "`<`" + case LTEQ => "`<=`" + case LTLT => "`<<`" + case LTLTEQ => "`<<=`" + case MINUS => "`-`" + case MINUSEQ => "`-=`" + case MINUSMINUS => "`--`" + case PERCENT => "`%`" + case PERCENTEQ => "`%=`" + case PLUS => "`+`" + case PLUSEQ => "`+=`" + case PLUSPLUS => "`++`" + case QMARK => "`?`" + case RBRACE => "`}`" + case RBRACKET => "`]`" + case RPAREN => "`)`" + case SEMI => "`;`" + case SLASH => "`/`" + case SLASHEQ => "`/=`" + case TILDE => "`~`" case _ => - try ("`" + tokenName(token) + "'") + try s"`${tokenName(token)}`" catch { - case _: ArrayIndexOutOfBoundsException => - "`<" + token + ">'" - case _: NullPointerException => - "`<(" + token + ")>'" + case _: ArrayIndexOutOfBoundsException => s"`<$token>`" + case _: NullPointerException => s"`<($token)>`" } } } @@ -238,10 +237,13 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** append Unicode character to "lit" buffer */ - protected def putChar(c: Char) { cbuf.append(c) } + protected def putChar(c: Char): Unit = { cbuf.append(c) } + + /** Remove the last N characters from the buffer */ + private def popNChars(n: Int): Unit = if (n > 0) cbuf.setLength(cbuf.length - n) /** Clear buffer and set name */ - private def setName() { + private def setName(): Unit = { name = newTermName(cbuf.toString()) cbuf.setLength(0) } @@ -255,7 +257,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Get next token ------------------------------------------------------------ - def nextToken() { + def nextToken(): Unit = { if (next.token == EMPTY) { fetchToken() } @@ -276,7 +278,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** read next token */ - private def fetchToken() { + private def fetchToken(): Unit = { if (token == EOF) return lastPos = in.cpos - 1 while (true) { @@ -323,15 +325,26 @@ trait JavaScanners extends ast.parser.ScannersCommon { case '\"' => in.next() - while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { - getlitch() - } - if (in.ch == '\"') { - token = STRINGLIT - setName() - in.next() + if (in.ch != '\"') { // "..." non-empty string literal + while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { + getlitch() + } + if (in.ch == '\"') { + token = STRINGLIT + setName() + in.next() + } else { + syntaxError("unclosed string literal") + } } else { - syntaxError("unclosed string literal") + in.next() + if (in.ch != '\"') { // "" empty string literal + token = STRINGLIT + setName() + } else { + in.next() + getTextBlock() + } } return @@ -521,7 +534,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { if (in.ch == '.') { in.next() token = DOTDOTDOT - } else syntaxError("`.' character expected") + } else syntaxError("`.` character expected") } return @@ -620,7 +633,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Identifiers --------------------------------------------------------------- - private def getIdentRest() { + private def getIdentRest(): Unit = { while (true) { (in.ch: @switch) match { case 'A' | 'B' | 'C' | 'D' | 'E' | @@ -665,48 +678,179 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Literals ----------------------------------------------------------------- /** read next character in character or string literal: - */ - protected def getlitch() = - if (in.ch == '\\') { + * + * @param scanOnly skip emitting errors or adding to the literal buffer + * @param inTextBlock is this for a text block? + */ + protected def getlitch(scanOnly: Boolean = false, inTextBlock: Boolean = false): Unit = + getlitch(in = in, scanOnly = scanOnly, inTextBlock = inTextBlock) + + private def getlitch(in: JavaCharArrayReader, scanOnly: Boolean, inTextBlock: Boolean): Unit = { + def octal: Char = { + val leadch: Char = in.ch + var oct: Int = digit2int(in.ch, 8) in.next() if ('0' <= in.ch && in.ch <= '7') { - val leadch: Char = in.ch - var oct: Int = digit2int(in.ch, 8) + oct = oct * 8 + digit2int(in.ch, 8) in.next() - if ('0' <= in.ch && in.ch <= '7') { + if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') { oct = oct * 8 + digit2int(in.ch, 8) in.next() - if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') { - oct = oct * 8 + digit2int(in.ch, 8) - in.next() - } } - putChar(oct.asInstanceOf[Char]) - } else { - in.ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') + } + oct.asInstanceOf[Char] + } // end octal + def greatEscape: Char = { + in.next() + if ('0' <= in.ch && in.ch <= '7') octal + else { + val x = in.ch match { + case 'b' => '\b' + case 's' => ' ' + case 't' => '\t' + case 'n' => '\n' + case 'f' => '\f' + case 'r' => '\r' + case '\"' => '\"' + case '\'' => '\'' + case '\\' => '\\' + case CR | LF if inTextBlock => + if (!scanOnly) in.next() + 0.toChar case _ => - syntaxError(in.cpos - 1, "invalid escape character") - putChar(in.ch) + if (!scanOnly) syntaxError(in.cpos - 1, "invalid escape character") + in.ch } + if (x != 0) in.next() + x + } + } // end greatEscape + // begin getlitch + val c: Char = + if (in.ch == '\\') greatEscape + else { + val res = in.ch in.next() + res } - } else { - putChar(in.ch) + if (c != 0 && !scanOnly) putChar(c) + } // end getlitch + + /** read a triple-quote delimited text block, starting after the first three + * double quotes + */ + private def getTextBlock(): Unit = { + // Open delimiter is followed by optional space, then a newline + while (in.ch == ' ' || in.ch == '\t' || in.ch == FF) { in.next() } + if (in.ch != LF && in.ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` + syntaxError("illegal text block open delimiter sequence, missing line terminator") + return + } + in.next() + + /* Do a lookahead scan over the full text block to: + * - compute common white space prefix + * - find the offset where the text block ends + */ + var commonWhiteSpacePrefix = Int.MaxValue + var blockEndOffset = 0 + var blockClosed = false + var lineWhiteSpacePrefix = 0 + var lineIsOnlyWhitespace = true + val lookahead = in.lookahead + while (!blockClosed && (lookahead.isUnicode || lookahead.ch != SU)) { + if (lookahead.ch == '\"') { // Potential end of the block + lookahead.next() + if (lookahead.ch == '\"') { + lookahead.next() + if (lookahead.ch == '\"') { + blockClosed = true + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + blockEndOffset = lookahead.cpos - 2 + } + } + + // Not the end of the block - just a single or double " character + if (!blockClosed) { + lineIsOnlyWhitespace = false + } + } else if (lookahead.ch == CR || lookahead.ch == LF) { // new line in the block + lookahead.next() + if (!lineIsOnlyWhitespace) { + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + } + lineWhiteSpacePrefix = 0 + lineIsOnlyWhitespace = true + } else if (lineIsOnlyWhitespace && Character.isWhitespace(lookahead.ch)) { // extend white space prefix + lookahead.next() + lineWhiteSpacePrefix += 1 + } else { + lineIsOnlyWhitespace = false + getlitch(lookahead, scanOnly = true, inTextBlock = true) + } + } + + // Bail out if the block never did have an end + if (!blockClosed) { + syntaxError("unclosed text block") + return + } + + // Second pass: construct the literal string value this time + while (in.cpos < blockEndOffset) { + // Drop the line's leading whitespace + var remainingPrefix = commonWhiteSpacePrefix + while (remainingPrefix > 0 && in.ch != CR && in.ch != LF && in.cpos < blockEndOffset) { + in.next() + remainingPrefix -= 1 + } + + var trailingWhitespaceLength = 0 + var escapedNewline = false // Does the line end with `\`? + while (in.ch != CR && in.ch != LF && in.cpos < blockEndOffset && !escapedNewline) { + if (Character.isWhitespace(in.ch)) { + trailingWhitespaceLength += 1 + } else { + trailingWhitespaceLength = 0 + } + + // Detect if the line is about to end with `\` + if (in.ch == '\\' && { + val lookahead = in.copy + lookahead.next() + lookahead.ch == CR || lookahead.ch == LF + }) { + escapedNewline = true + } + + getlitch(scanOnly = false, inTextBlock = true) + } + + // Drop the line's trailing whitespace + popNChars(trailingWhitespaceLength) + + // Normalize line terminators + if ((in.ch == CR || in.ch == LF) && !escapedNewline) { + in.next() + putChar('\n') + } + } + + token = STRINGLIT + setName() + + // Trailing """ + in.next() + in.next() + in.next() + } /** read fractional part and exponent of floating point number * if one is present. */ - protected def getFraction() { + protected def getFraction(): Unit = { token = DOUBLELIT while ('0' <= in.ch && in.ch <= '9') { putChar(in.ch) @@ -795,7 +939,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { } /** read a number into name and set base */ - protected def getNumber() { + protected def getNumber(): Unit = { while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) { putChar(in.ch) in.next() @@ -835,17 +979,17 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** generate an error at the given position */ - def syntaxError(pos: Int, msg: String) { + def syntaxError(pos: Int, msg: String): Unit = { error(pos, msg) token = ERROR } /** generate an error at the current token position */ - def syntaxError(msg: String) { syntaxError(pos, msg) } + def syntaxError(msg: String): Unit = { syntaxError(pos, msg) } /** signal an error where the input ended in the middle of a token */ - def incompleteInputError(msg: String) { + def incompleteInputError(msg: String): Unit = { incompleteInputError(pos, msg) token = EOF } @@ -875,19 +1019,19 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** INIT: read lookahead character and token. */ - def init() { + def init(): Unit = { in.next() nextToken() } } class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { - in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) + in = new JavaCharArrayReader(new ArraySeq.ofChar(unit.source.content), decodeUni = true, syntaxError) init() def error(pos: Int, msg: String) = reporter.error(pos, msg) def incompleteInputError(pos: Int, msg: String) = currentRun.parsing.incompleteInputError(pos, msg) def warning(pos: Int, msg: String, category: WarningCategory) = runReporting.warning(pos, msg, category, site = "") - def deprecationWarning(pos: Int, msg: String, since: String) = runReporting.deprecationWarning(pos, msg, since, site = "", origin = "") + def deprecationWarning(pos: Int, msg: String, since: String, actions: List[CodeAction]) = runReporting.deprecationWarning(pos, msg, since, site = "", origin = "", actions) implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index 855fe19e6706..6f296bc9e672 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,6 +20,7 @@ object JavaTokens extends ast.parser.CommonTokens { /** identifiers */ final val IDENTIFIER = 10 + final val RECORD = 12 // restricted identifier, so not lexed directly def isIdentifier(code: Int) = code == IDENTIFIER @@ -37,6 +38,7 @@ object JavaTokens extends ast.parser.CommonTokens { final val NATIVE = 53 final val STRICTFP = 54 final val THROWS = 56 + final val UNSEALED = 57 // contextual keyword /** templates */ final val INTERFACE = 66 diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala index 46cd59b63625..0326aba538ac 100644 --- a/src/compiler/scala/tools/nsc/package.scala +++ b/src/compiler/scala/tools/nsc/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,8 @@ package scala.tools +import scala.reflect.internal.util.StringContextStripMarginOps + package object nsc { type Mode = scala.reflect.internal.Mode val Mode = scala.reflect.internal.Mode @@ -32,4 +34,8 @@ package object nsc { @deprecated("Use scala.reflect.internal.util.ListOfNil", "2.11.0") lazy val ListOfNil = scala.reflect.internal.util.ListOfNil + + /** Adds the `sm` interpolator to a [[scala.StringContext]]. + */ + implicit val `strip margin`: StringContext => StringContextStripMarginOps = StringContextStripMarginOps } diff --git a/src/compiler/scala/tools/nsc/plugins/OutputFileWriter.scala b/src/compiler/scala/tools/nsc/plugins/OutputFileWriter.scala index 6cac633cd2ac..5a86eeef433b 100644 --- a/src/compiler/scala/tools/nsc/plugins/OutputFileWriter.scala +++ b/src/compiler/scala/tools/nsc/plugins/OutputFileWriter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,5 +15,5 @@ package scala.tools.nsc.plugins import scala.reflect.io.AbstractFile trait OutputFileWriter { - def writeFile(relativeName: String, data: Array[Byte], outputDir: AbstractFile) + def writeFile(relativeName: String, data: Array[Byte], outputDir: AbstractFile): Unit } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 2836fd4f03ea..9ba109da6c5f 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,7 +30,6 @@ import scala.util.{Failure, Success, Try} * }}} * * @author Lex Spoon - * @version 1.0, 2007-5-21 */ abstract class Plugin { /** The name of this plugin */ @@ -123,7 +122,7 @@ object Plugin { try { Success[AnyClass](loader loadClass classname) } catch { - case NonFatal(e) => + case NonFatal (_) => Failure(new PluginLoadException(classname, s"Error: unable to load class: $classname")) case e: NoClassDefFoundError => Failure(new PluginLoadException(classname, s"Error: class not found: ${e.getMessage} required by $classname")) @@ -141,22 +140,16 @@ object Plugin { paths: List[List[Path]], dirs: List[Path], ignoring: List[String], - findPluginClassloader: Seq[Path] => ClassLoader, - ): List[Try[AnyClass]] = { - - def targeted(targets: List[List[Path]]) = targets.map { path => - val loader = findPluginClassloader(path) + findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = + { + def pluginResource(classpath: List[Path], loader: ClassLoader) = loader.getResource(PluginXML) match { - case null => Failure(new MissingPluginException(path)) + case null => Failure(new MissingPluginException(classpath)) case url => val inputStream = url.openStream - try { - Try((PluginDescription.fromXML(inputStream), loader)) - } finally { - inputStream.close() - } + try Try((PluginDescription.fromXML(inputStream), loader)) finally inputStream.close() } - } + def targeted(targets: List[List[Path]]) = targets.filter(_.nonEmpty).map(classpath => pluginResource(classpath, findPluginClassloader(classpath))) def dirList(dir: Path) = if (dir.isDirectory) dir.toDirectory.files.filter(Jar.isJarOrZip).toList.sortBy(_.name) else Nil // ask plugin loaders for plugin resources, but ignore if none in -Xpluginsdir @@ -164,10 +157,10 @@ object Plugin { val seen = mutable.HashSet[String]() val enabled = fromLoaders map { - case Success((pd, loader)) if seen(pd.classname) => + case Success((pd, _)) if seen(pd.classname) => // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})")) - case Success((pd, loader)) if ignoring contains pd.name => + case Success((pd, _)) if ignoring contains pd.name => Failure(new PluginLoadException(pd.name, s"Disabling plugin ${pd.name}")) case Success((pd, loader)) => seen += pd.classname @@ -181,9 +174,8 @@ object Plugin { /** Instantiate a plugin class, given the class and * the compiler it is to be used in. */ - def instantiate(clazz: AnyClass, global: Global): Plugin = { - (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin] - } + def instantiate(clazz: AnyClass, global: Global): Plugin = + clazz.getConstructor(classOf[Global]).newInstance(global).asInstanceOf[Plugin] } class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) { diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala index 1424a0420be6..989bab940ffa 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,6 @@ package plugins /** A component that is part of a Plugin. * * @author Lex Spoon - * @version 1.1, 2009/1/2 * Updated 2009/1/2 by Anders Bach Nielsen: Added features to implement SIP 00002 */ abstract class PluginComponent extends SubComponent { @@ -27,7 +26,7 @@ abstract class PluginComponent extends SubComponent { /** Only plugins are granted a reprieve from specifying whether they follow. */ val runsRightAfter: Option[String] = None - /** Useful for -Xshow-phases. */ + /** Useful for -Vphases. */ def description: String = "" } diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala index 83d5d238bde5..e31a1cc269f2 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,15 +13,11 @@ package scala.tools.nsc package plugins -import scala.reflect.internal.util.StringContextStripMarginOps - /** A description of a compiler plugin, suitable for serialization * to XML for inclusion in the plugin's .jar file. * * @author Lex Spoon - * @version 1.0, 2007-5-21 * @author Adriaan Moors - * @version 2.0, 2013 * @param name A short name of the plugin, used to identify it in * various contexts. The phase defined by the plugin * should have the same name. @@ -39,11 +35,6 @@ case class PluginDescription(name: String, classname: String) { } /** Utilities for the PluginDescription class. - * - * @author Lex Spoon - * @version 1.0, 2007-5-21 - * @author Adriaan Moors - * @version 2.0, 2013 */ object PluginDescription { private def text(ns: org.w3c.dom.NodeList): String = diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 6cb3d205f109..6f79043e6330 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,6 @@ package scala.tools.nsc package plugins - import java.net.URL import java.util @@ -21,14 +20,11 @@ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.plugins.Plugin.pluginClassLoadersCache +import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults /** Support for run-time loading of compiler plugins. - * - * @author Lex Spoon - * @version 1.1, 2009/1/2 - * Updated 2009/1/2 by Anders Bach Nielsen: Added features to implement SIP 00002 */ trait Plugins { global: Global => @@ -101,12 +97,12 @@ trait Plugins { global: Global => val cache = pluginClassLoadersCache val checkStamps = policy == settings.CachePolicy.LastModified.name cache.checkCacheability(classpath.map(_.toURL), checkStamps, disableCache) match { - case Left(msg) => + case Left(_) => val loader = newLoader() - closeableRegistry.registerClosable(loader) + closeableRegistry.registerCloseable(loader) loader case Right(paths) => - cache.getOrCreate((), classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps) + cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) } } @@ -118,32 +114,27 @@ trait Plugins { global: Global => */ protected def loadPlugins(): List[Plugin] = { // remove any with conflicting names or subcomponent names - def pick( - plugins: List[Plugin], - plugNames: Set[String], - phaseNames: Set[String]): List[Plugin] = - { - if (plugins.isEmpty) return Nil // early return - - val plug :: tail = plugins - val plugPhaseNames = Set(plug.components map (_.phaseName): _*) - def withoutPlug = pick(tail, plugNames, plugPhaseNames) - def withPlug = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames) - lazy val commonPhases = phaseNames intersect plugPhaseNames - - def note(msg: String): Unit = if (settings.verbose) inform(msg format plug.name) - def fail(msg: String) = { note(msg) ; withoutPlug } - - if (plugNames contains plug.name) - fail("[skipping a repeated plugin: %s]") - else if (settings.disable.value contains plug.name) - fail("[disabling plugin: %s]") - else if (!commonPhases.isEmpty) - fail("[skipping plugin %s because it repeats phase names: " + (commonPhases mkString ", ") + "]") - else { - note("[loaded plugin %s]") - withPlug - } + def pick(plugins: List[Plugin], plugNames: Set[String], phaseNames: Set[String]): List[Plugin] = plugins match { + case Nil => Nil // early return + case plug :: tail => + val plugPhaseNames = Set(plug.components map (_.phaseName): _*) + def withoutPlug = pick(tail, plugNames, plugPhaseNames) + def withPlug = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames) + lazy val commonPhases = phaseNames intersect plugPhaseNames + + def note(msg: String): Unit = if (settings.verbose.value) inform(msg format plug.name) + def fail(msg: String) = { note(msg) ; withoutPlug } + + if (plugNames contains plug.name) + fail("[skipping a repeated plugin: %s]") + else if (settings.disable.value contains plug.name) + fail("[disabling plugin: %s]") + else if (!commonPhases.isEmpty) + fail("[skipping plugin %s because it repeats phase names: " + (commonPhases mkString ", ") + "]") + else { + note("[loaded plugin %s]") + withPlug + } } val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet) @@ -180,4 +171,38 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString + + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ + def findMacroClassLoader(): ClassLoader = { + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- ClassPath.expandPath(settings.YmacroClasspath.value, expandStar = true) + af <- Option(settings.pathFactory.getDirectory(file)) + } yield af.file.toURI.toURL + } else global.classPath.asURLs + def newLoader: () => ScalaClassLoader.URLClassLoader = () => { + analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) + } + + val policy = settings.YcacheMacroClassLoader.value + val cache = Macros.macroClassLoadersCache + val disableCache = policy == settings.CachePolicy.None.name + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath, checkStamps, disableCache) match { + case Left(msg) => + analyzer.macroLogVerbose(s"macro classloader: $msg.") + val loader = newLoader() + closeableRegistry.registerCloseable(loader) + loader + case Right(paths) => + cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) + } + } } diff --git a/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java b/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java index 1d5cf4bc3e4e..5409a19f280a 100644 --- a/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java +++ b/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -260,13 +260,14 @@ public SunThreadMxBean(ThreadMXBean underlying) { super(underlying); this.real = underlying; try { - getThreadUserTimeMethod = real.getClass().getMethod("getThreadUserTime", long[].class); - isThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("isThreadAllocatedMemoryEnabled"); - setThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); - getThreadAllocatedBytesMethod1 = real.getClass().getMethod("getThreadAllocatedBytes", Long.TYPE); - getThreadAllocatedBytesMethod2 = real.getClass().getMethod("getThreadAllocatedBytes", long[].class); - isThreadAllocatedMemorySupportedMethod = real.getClass().getMethod("isThreadAllocatedMemorySupported"); - getThreadCpuTimeMethod = real.getClass().getMethod("getThreadCpuTime", long[].class); + Class cls = Class.forName("com.sun.management.ThreadMXBean"); + getThreadUserTimeMethod = cls.getMethod("getThreadUserTime", long[].class); + isThreadAllocatedMemoryEnabledMethod = cls.getMethod("isThreadAllocatedMemoryEnabled"); + setThreadAllocatedMemoryEnabledMethod = cls.getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); + getThreadAllocatedBytesMethod1 = cls.getMethod("getThreadAllocatedBytes", Long.TYPE); + getThreadAllocatedBytesMethod2 = cls.getMethod("getThreadAllocatedBytes", long[].class); + isThreadAllocatedMemorySupportedMethod = cls.getMethod("isThreadAllocatedMemorySupported"); + getThreadCpuTimeMethod = cls.getMethod("getThreadCpuTime", long[].class); getThreadUserTimeMethod.setAccessible(true); isThreadAllocatedMemoryEnabledMethod.setAccessible(true); diff --git a/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java b/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java index b8ee01090407..e89231f63e2e 100644 --- a/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java +++ b/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index e0b3de2240ab..eca33695cd86 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,6 +22,7 @@ import java.util.concurrent.atomic.AtomicInteger import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} +import scala.annotation.{nowarn, unused} import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.util.ChromeTrace import scala.reflect.io.AbstractFile @@ -29,7 +30,7 @@ import scala.tools.nsc.{Global, Phase, Settings} object Profiler { def apply(settings: Settings):Profiler = - if (!settings.YprofileEnabled) NoOpProfiler + if (!settings.YprofileEnabled.value) NoOpProfiler else { val reporter = settings.YprofileDestination.value match { case _ if !settings.YprofileDestination.isSetByUser => NoOpProfileReporter @@ -117,7 +118,7 @@ private [profile] object NoOpProfiler extends Profiler { override def finished(): Unit = () } private [profile] object RealProfiler { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val runtimeMx = ManagementFactory.getRuntimeMXBean val memoryMx = ManagementFactory.getMemoryMXBean val gcMx = ManagementFactory.getGarbageCollectorMXBeans.asScala.toList @@ -126,10 +127,10 @@ private [profile] object RealProfiler { val threadMx = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() + lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator.asScala.toList - lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator().asScala.toList - - private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { + @annotation.nowarn("cat=deprecation") + private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { val current = Thread.currentThread() val allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId) ProfileSnap( @@ -173,7 +174,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S reporter.reportBackground(this, threadRange) } - def outDir = settings.outputDirs.getSingleOutput.getOrElse(settings.outputDirs.outputs.head._2.file).toString + def outDir = settings.outputDirs.getSingleOutput.map(_.path).getOrElse(settings.outputDirs.outputs.head._2.path) RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.addNotificationListener(this, null, null) @@ -182,9 +183,9 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val active = RealProfiler.allPlugins map (_.generate(this, settings)) - private def doGC: Unit = { + private def doGC(): Unit = { System.gc() - System.runFinalization() + System.runFinalization(): @nowarn("cat=deprecation") // since Java 18 } reporter.header(this) @@ -194,7 +195,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S //we may miss a GC event if gc is occurring as we call this RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.removeNotificationListener(this) - case gc => + case _ => } reporter.close(this) if (chromeTrace != null) { @@ -216,7 +217,10 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S import java.lang.{Integer => jInt} val reportNs = System.nanoTime() val data = notification.getUserData + //val seq = notification.getSequenceNumber + //val message = notification.getMessage val tpe = notification.getType + //val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString @@ -232,6 +236,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S gcEvents += gcEvent } reporter.reportGc(gcEvent) + case x => throw new MatchError(x) } } @@ -239,7 +244,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S assert(mainThread eq Thread.currentThread()) if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Phase, phase.name) if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) - doGC + doGC() if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook start") ExternalToolHook.before() @@ -257,7 +262,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S ExternalToolHook.after() } val finalSnap = if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) { - doGC + doGC() initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Phase, phase.name) @@ -330,13 +335,12 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S } } - private def completionName(root: Global#Symbol, associatedFile: AbstractFile): String = { + private def completionName(root: Global#Symbol, @unused associatedFile: AbstractFile): String = if (root.hasPackageFlag || root.isTopLevel) root.javaBinaryNameString else { val enclosing = root.enclosingTopLevelClass enclosing.javaBinaryNameString + "::" + root.rawname.toString } - } } object EventType extends Enumeration { @@ -350,12 +354,16 @@ object EventType extends Enumeration { } sealed trait ProfileReporter { + @nowarn("cat=lint-inaccessible") def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit + @nowarn("cat=lint-inaccessible") def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportGc(data: GcEventData): Unit + @nowarn("cat=lint-inaccessible") def header(profiler: RealProfiler) :Unit + @nowarn("cat=lint-inaccessible") def close(profiler: RealProfiler) :Unit } @@ -380,18 +388,22 @@ object NoOpProfileReporter extends ProfileReporter { } class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { + @nowarn("cat=lint-inaccessible") override def header(profiler: RealProfiler): Unit = { out.println(s"info, ${profiler.id}, version, 2, output, ${profiler.outDir}") out.println(s"header(main/background),startNs,endNs,runId,phaseId,phaseName,purpose,task-count,threadId,threadName,runNs,idleNs,cpuTimeNs,userTimeNs,allocatedByte,heapSize") out.println(s"header(GC),startNs,endNs,startMs,endMs,name,action,cause,threads") } + @nowarn("cat=lint-inaccessible") override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { reportCommon(EventType.BACKGROUND, profiler, threadRange) } + @nowarn("cat=lint-inaccessible") override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { reportCommon(EventType.MAIN, profiler, threadRange) } + @annotation.nowarn("cat=deprecation") private def reportCommon(tpe:EventType.value, profiler: RealProfiler, threadRange: ProfileRange): Unit = { out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.name},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") } @@ -402,6 +414,7 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } + @nowarn("cat=lint-inaccessible") override def close(profiler: RealProfiler): Unit = { out.flush() out.close() diff --git a/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala index 97073f448259..4c830d8ff214 100644 --- a/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala +++ b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,7 @@ package scala.tools.nsc.profile +import scala.annotation.nowarn import scala.tools.nsc.{Phase, Settings} /** @@ -28,6 +29,7 @@ trait ProfilerPlugin { * @param settings the setting for the current compile * @return the run specific profiler, that will receive updates as the compile progresses */ + @nowarn("cat=lint-inaccessible") def generate(profiler: RealProfiler, settings: Settings): ProfilerPluginRun } @@ -38,7 +40,7 @@ trait ProfilerPluginRun { /** called before a phase */ def beforePhase(phase: Phase): Unit - /** called afer a phase a phase */ + /** called after a phase */ def afterPhase(phase: Phase): Unit /** called when the compile run completes */ diff --git a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 641526a1de48..b8d5486de873 100644 --- a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,6 +16,7 @@ import java.util.concurrent.ThreadPoolExecutor.AbortPolicy import java.util.concurrent._ import java.util.concurrent.atomic.AtomicInteger +import scala.annotation._ import scala.tools.nsc.{Global, Phase} sealed trait ThreadPoolFactory { @@ -44,7 +45,7 @@ object ThreadPoolFactory { private def childGroup(name: String) = new ThreadGroup(baseGroup, name) // Invoked when a new `Worker` is created, see `CommonThreadFactory.newThread` - protected def wrapWorker(worker: Runnable, shortId: String): Runnable = worker + protected def wrapWorker(worker: Runnable, @unused shortId: String): Runnable = worker protected final class CommonThreadFactory( shortId: String, @@ -151,4 +152,4 @@ object ThreadPoolFactory { } } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index e0f7e1c3f576..490e3d505b03 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,21 +15,22 @@ package tools.nsc package reporters import java.io.{BufferedReader, PrintWriter} -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{CodeAction, Position} /** This class implements a Reporter that displays messages on a text console. */ class ConsoleReporter(val settings: Settings, val reader: BufferedReader, val writer: PrintWriter, val echoWriter: PrintWriter) extends FilteringReporter with PrintReporter { def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true), new PrintWriter(Console.out, true)) def this(settings: Settings, reader: BufferedReader, writer: PrintWriter) = this(settings, reader, writer, writer) - def doReport(pos: Position, msg: String, severity: Severity): Unit = display(pos, msg, severity) + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = display(pos, msg, severity) override def finish(): Unit = { - import reflect.internal.util.StringOps.{countElementsAsString => countAs} - if (!settings.nowarn && hasWarnings) - echo(s"${countAs(warningCount, WARNING.toString.toLowerCase)} found") + import reflect.internal.util.StringOps.countElementsAsString + if (hasWarnings && !settings.nowarn.value) + writer.println(countElementsAsString(warningCount, WARNING.toString.toLowerCase)) if (hasErrors) - echo(s"${countAs(errorCount, ERROR.toString.toLowerCase)} found") + writer.println(countElementsAsString(errorCount, ERROR.toString.toLowerCase)) + writer.flush() super.finish() } } diff --git a/src/compiler/scala/tools/nsc/reporters/ForwardingReporter.scala b/src/compiler/scala/tools/nsc/reporters/ForwardingReporter.scala index bca541105a5c..1529893b39e1 100644 --- a/src/compiler/scala/tools/nsc/reporters/ForwardingReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ForwardingReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.tools.nsc.reporters import scala.reflect.internal.settings.MutableSettings -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{CodeAction, Position} import scala.tools.nsc.Settings @@ -20,7 +20,7 @@ import scala.tools.nsc.Settings * customize error reporting. * {{{ * val myReporter = new ForwardingReporter(global.reporter) { - * override def doReport(pos: Position, msg: String, severity: Severity): Unit = { ... } + * override def doReport(pos: Position, msg: String, severity: Severity, actions: List[Action]): Unit = { ... } * } * global.reporter = myReporter * }}} @@ -28,7 +28,8 @@ import scala.tools.nsc.Settings class ForwardingReporter(delegate: FilteringReporter) extends FilteringReporter { def settings: Settings = delegate.settings - def doReport(pos: Position, msg: String, severity: Severity): Unit = delegate.doReport(pos, msg, severity) + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = + delegate.doReport(pos, msg, severity, actions) override def filter(pos: Position, msg: String, severity: Severity): Int = delegate.filter(pos, msg, severity) @@ -56,7 +57,8 @@ class ForwardingReporter(delegate: FilteringReporter) extends FilteringReporter * maxerrs and do position filtering. */ class MakeFilteringForwardingReporter(delegate: Reporter, val settings: Settings) extends FilteringReporter { - def doReport(pos: Position, msg: String, severity: Severity): Unit = delegate.nonProtectedInfo0(pos, msg, severity) + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = + delegate.doReport(pos, msg, severity, actions) override def increment(severity: Severity): Unit = delegate.increment(severity) diff --git a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala index b59a0444d134..73af7d1ee729 100644 --- a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,11 +12,11 @@ package scala.tools.nsc.reporters -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{CodeAction, Position} import scala.tools.nsc.Settings /** A reporter that ignores reports. */ class NoReporter(val settings: Settings) extends FilteringReporter { - def doReport(pos: Position, msg: String, severity: Severity): Unit = () + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = () } diff --git a/src/compiler/scala/tools/nsc/reporters/PrintReporter.scala b/src/compiler/scala/tools/nsc/reporters/PrintReporter.scala index 163c8b72f2c0..af0306ecef2b 100644 --- a/src/compiler/scala/tools/nsc/reporters/PrintReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/PrintReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,17 +33,17 @@ trait PrintReporter extends internal.Reporter { /** Whether a short file name should be displayed before errors */ var shortname: Boolean = false - protected def clabel(severity: Severity): String = severity match { + private def clabel(severity: Severity): String = severity match { case internal.Reporter.ERROR => "error: " case internal.Reporter.WARNING => "warning: " case _ => "" } /** Prints the warning or error message. */ - protected def printMessage(msg: String): Unit = { + private def printMessage(msg: String): Unit = { writer.println(trimTrailing(msg)) writer.flush() - if (settings.prompt) displayPrompt() + if (settings.prompt.value) displayPrompt() } /** Prints the message to the echoWriter, which is usually stdout. */ @@ -54,7 +54,7 @@ trait PrintReporter extends internal.Reporter { /** Format a message and emit it. */ protected def display(pos: Position, msg: String, severity: Severity): Unit = { - val text = formatMessage(pos, s"${clabel(severity)}${Reporter.explanation(msg)}", shortname) + val text = formatMessage(pos, s"${clabel(severity)}${msg}", shortname) severity match { case internal.Reporter.INFO => echoMessage(text) case _ => printMessage(text) diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 15be43940c9e..a8bcb6e93305 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,7 @@ package scala.tools.nsc package reporters +import scala.annotation.{nowarn, unused} import scala.collection.mutable import scala.reflect.internal import scala.reflect.internal.util.{Position, ScalaClassLoader} @@ -23,12 +24,8 @@ import scala.reflect.internal.util.{Position, ScalaClassLoader} */ abstract class Reporter extends internal.Reporter { // used by sbt - @deprecated("Use echo, as internal.Reporter does not support unforced info", since="2.12.13") - final def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force = true) - - // allow calling info0 in MakeFilteringForwardingReporter - private[reporters] final def nonProtectedInfo0(pos: Position, msg: String, severity: Severity): Unit = - info0(pos, msg, severity, force = true) + @deprecated("Use echo, as internal.Reporter does not support unforced info", since="2.13.0") + final def info(pos: Position, msg: String, @unused force: Boolean): Unit = info0(pos, msg, INFO, force = true) // overridden by sbt, IDE -- should not be in the reporting interface // (IDE receives comments from ScaladocAnalyzer using this hook method) @@ -58,38 +55,30 @@ object Reporter { val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader loader.create[FilteringReporter](settings.reporter.value, settings.errorFn)(settings) } - - /** Take the message with its explanation, if it has one. */ - def explanation(msg: String): String = splitting(msg, explaining = true) - - /** Take the message without its explanation, if it has one. */ - def stripExplanation(msg: String): String = splitting(msg, explaining = false) - - /** Split a message into a prefix and an optional explanation that follows a line starting with `"----"`. */ - private def splitting(msg: String, explaining: Boolean): String = - if (msg != null && msg.indexOf("\n----") > 0) { - val (err, exp) = msg.linesIterator.span(!_.startsWith("----")) - if (explaining) (err ++ exp.drop(1)).mkString("\n") else err.mkString("\n") - } else { - msg - } } /** The reporter used in a Global instance. * * It filters messages based on - * - settings.nowarn * - settings.maxerrs / settings.maxwarns * - positions (only one error at a position, no duplicate messages on a position) */ abstract class FilteringReporter extends Reporter { def settings: Settings + @deprecatedOverriding("override the `doReport` overload (defined in reflect.internal.Reporter) instead", "2.13.12") + @deprecated("use the `doReport` overload instead", "2.13.12") + def doReport(pos: Position, msg: String, severity: Severity): Unit = doReport(pos, msg, severity, Nil) + // this should be the abstract method all the way up in reflect.internal.Reporter, but sbt compat - def doReport(pos: Position, msg: String, severity: Severity): Unit + // the abstract override is commented-out to maintain binary compatibility for FilteringReporter subclasses + // override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit - @deprecatedOverriding("override doReport instead", "2.12.13") // overridden in scalameta for example - protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = doReport(pos, msg, severity) + @deprecatedOverriding("override `doReport` instead", "2.13.1") // overridden in scalameta for example + @nowarn("cat=deprecation") + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = + // call the deprecated overload to support existing FilteringReporter subclasses (they override that overload) + doReport(pos, msg, severity) private lazy val positions = mutable.Map[Position, Severity]() withDefaultValue INFO private lazy val messages = mutable.Map[Position, List[String]]() withDefaultValue Nil @@ -106,8 +95,8 @@ abstract class FilteringReporter extends Reporter { } // Invoked when an error or warning is filtered by position. @inline def suppress = { - if (settings.prompt) doReport(pos, msg, severity) - else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity) + if (settings.prompt.value) doReport(pos, msg, severity, Nil) + else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity, Nil) Suppress } if (!duplicateOk(pos, severity, msg)) suppress else if (!maxOk) Count else Display @@ -134,15 +123,20 @@ abstract class FilteringReporter extends Reporter { } if (show) { positions(fpos) = severity - messages(fpos) ::= Reporter.stripExplanation(msg) // ignore explanatory suffix for suppressing duplicates + messages(fpos) ::= stripQuickfixable(msg) } show } } + private def stripQuickfixable(msg: String): String = { + val i = msg.indexOf(" [quickfixable]") + if (i > 0) msg.substring(0, i) else msg + } + override def reset(): Unit = { super.reset() positions.clear() messages.clear() - } + } } diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala index 77027a99b55b..1fa79cd19a93 100644 --- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,23 +16,25 @@ package reporters import scala.annotation.unchecked.uncheckedStable import scala.collection.mutable import scala.reflect.internal.Reporter.Severity -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{CodeAction, Position} /** This class implements a Reporter that stores its reports in the set `infos`. */ class StoreReporter(val settings: Settings) extends FilteringReporter { - @deprecated("use the constructor with a `Settings` parameter", "2.12.13") + @deprecated("use the constructor with a `Settings` parameter", since = "2.13.1") def this() = this(new Settings()) - @deprecated("use StoreReporter.Info") // used in scalameta for example + @deprecated("use StoreReporter.Info", since = "2.13.0") // used in scalameta for example type Info = StoreReporter.Info - @deprecated("use StoreReporter.Info") + @deprecated("use StoreReporter.Info", since = "2.13.0") @uncheckedStable def Info: StoreReporter.Info.type = StoreReporter.Info val infos = new mutable.LinkedHashSet[StoreReporter.Info] - def doReport(pos: Position, msg: String, severity: Severity): Unit = - infos += StoreReporter.Info(pos, msg, severity) + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = { + val info = StoreReporter.Info(pos, msg, severity, actions) + infos += info + } override def reset(): Unit = { super.reset() @@ -40,7 +42,7 @@ class StoreReporter(val settings: Settings) extends FilteringReporter { } } object StoreReporter { - case class Info(pos: Position, msg: String, severity: Severity) { - override def toString: String = s"pos: $pos $msg $severity" + case class Info(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]) { + override def toString: String = s"pos: $pos $msg $severity${if (actions.isEmpty) "" else actions}" } } diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index f4d37fdacacd..7f56cdcb2144 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,7 +20,7 @@ package settings trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { type Setting <: AbsSetting // Fix to the concrete Setting type - type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable + type ResultOfTryToSet // List[String] in MutableSettings def errorFn: String => Unit protected def allSettings: scala.collection.Map[String, Setting] @@ -66,14 +66,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { /* For tools which need to populate lists of available choices */ def choices : List[String] = Nil - /** In mutable Settings, these return the same object with a var set. - * In immutable, of course they will return a new object, which means - * we can't use "this.type", at least not in a non-casty manner, which - * is unfortunate because we lose type information without it. - * - * ...but now they're this.type because of #3462. The immutable - * side doesn't exist yet anyway. - */ def withAbbreviation(name: String): this.type def withHelpSyntax(help: String): this.type def withDeprecationMessage(msg: String): this.type @@ -92,8 +84,8 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { this } - /** Issue error and return */ - def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x } + /** Issue error and return the value. */ + def errorAndValue[A](msg: String, x: A): A = { errorFn(msg) ; x } /** If this method returns true, print the [[help]] message and exit. */ def isHelping: Boolean = false @@ -101,19 +93,17 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { /** The help message to be printed if [[isHelping]]. */ def help: String = "" - /** After correct Setting has been selected, tryToSet is called with the - * remainder of the command line. It consumes any applicable arguments and - * returns the unconsumed ones. + /** Setting is presented the remaining command line arguments. + * It should consume any applicable args and return the rest, + * or `None` on error. */ protected[nsc] def tryToSet(args: List[String]): Option[ResultOfTryToSet] - /** Commands which can take lists of arguments in form -Xfoo:bar,baz override - * this method and accept them as a list. It returns List[String] for - * consistency with tryToSet, and should return its incoming arguments - * unmodified on failure, and Nil on success. + /** Setting is presented arguments in form -Xfoo:bar,baz. + * It should consume all the arguments and return an empty list, + * or `None` on error. Unconsumed args may error. */ - protected[nsc] def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] = - errorAndValue("'%s' does not accept multiple arguments" format name, None) + protected[nsc] def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] /** Attempt to set from a properties file style property value. * Currently used by Eclipse SDT only. @@ -121,13 +111,17 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { */ def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) - /** These categorizations are so the help output shows -X and -P among - * the standard options and -Y among the advanced options. + /** Standard options are shown on the `-help` output, + * advanced on `-X`, private on `-Y`, warning on `-W`, verbose on `-V`. + * + * The single char options themselves, including `-P`, are explained on `-help`. + * Additionally, `-Werror` is on `-help` and `-Xlint` on `-W`. */ - def isAdvanced = name match { case "-Y" => true ; case "-X" => false ; case _ => name startsWith "-X" } - def isPrivate = name match { case "-Y" => false ; case _ => name startsWith "-Y" } - def isStandard = !isAdvanced && !isPrivate - def isForDebug = name endsWith "-debug" // by convention, i.e. -Ytyper-debug + def isAdvanced = name.startsWith("-X") && name != "-X" + def isPrivate = name.startsWith("-Y") && name != "-Y" + def isVerbose = name.startsWith("-V") && name != "-V" + def isWarning = name.startsWith("-W") && name != "-W" || name == "-Xlint" + def isStandard = !isAdvanced && !isPrivate && !isWarning && !isVerbose || name == "-Werror" def isDeprecated = deprecationMessage.isDefined def compare(that: Setting): Int = name compare that.name diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala deleted file mode 100644 index 7b4569ed4299..000000000000 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools -package nsc -package settings - -import util.ClassPath -import io.{ Path, AbstractFile } - -class FscSettings(error: String => Unit, pathFactory: PathFactory = DefaultPathFactory) extends Settings(error, pathFactory) { - outer => - - locally { - disable(prompt) - disable(resident) - } - - val currentDir = StringSetting ("-current-dir", "path", "Base directory for resolving relative paths", "").internalOnly() - val reset = BooleanSetting("-reset", "Reset compile server caches") - val shutdown = BooleanSetting("-shutdown", "Shutdown compile server") - val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "") - val port = IntSetting ("-port", "Search and start compile server in given port only", - 0, Some((0, Int.MaxValue)), (_: String) => None) - val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket") - val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)", - 30, Some((0, Int.MaxValue)), (_: String) => None) - - // For improved help output, separating fsc options from the others. - def fscSpecific = Set[Settings#Setting]( - currentDir, reset, shutdown, server, port, preferIPv4, idleMins - ) - val isFscSpecific: String => Boolean = fscSpecific map (_.name) - - /** If a setting (other than a PathSetting) represents a path or paths. - * For use in absolutization. - */ - private def holdsPath = Set[Settings#Setting](d, dependencyfile, pluginsDir) - - override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = { - val (r, args) = super.processArguments(arguments, processAll) - // we need to ensure the files specified with relative locations are absolutized based on the currentDir - (r, args map {a => absolutizePath(a)}) - } - - /** - * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir. - * If it's already absolute then it's left alone. - */ - private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path - - /** All user set settings rewritten with absolute paths based on currentDir */ - def absolutize() { - userSetSettings foreach { - case p: OutputSetting => outputDirs.setSingleOutput(AbstractFile.getDirectory(absolutizePath(p.value))) - case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath) - case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value) - case _ => () - } - } -} diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 160fd68b1a5a..1cd44c846d15 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,19 +16,20 @@ package scala.tools package nsc package settings -import io.{ AbstractFile, Path, PlainFile, VirtualDirectory } -import scala.collection.generic.Clearable +import io.{AbstractFile, Path, PlainFile, VirtualDirectory} +import scala.annotation.tailrec +import scala.collection.mutable.Clearable import scala.io.Source -import scala.reflect.internal.util.{ SomeOfNil, StringOps } -import scala.reflect.{ ClassTag, classTag } +import scala.reflect.internal.util.{SomeOfNil, StringOps} +import scala.reflect.{ClassTag, classTag} +import scala.sys.process.{Parser => CommandLineParser} /** A mutable Settings object. */ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) extends scala.reflect.internal.settings.MutableSettings with AbsSettings - with ScalaSettings - with Mutable { + with ScalaSettings { def this(errorFn: String => Unit) = this(errorFn, DefaultPathFactory) type ResultOfTryToSet = List[String] @@ -38,7 +39,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) settings } - def copyInto(settings: MutableSettings) { + def copyInto(settings: MutableSettings): Unit = { allSettings.valuesIterator foreach { thisSetting => val otherSetting = settings.allSettings.get(thisSetting.name) otherSetting foreach { otherSetting => @@ -67,28 +68,22 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) * Returns (success, List of unprocessed arguments) */ def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = { + @tailrec def loop(args: List[String], residualArgs: List[String]): (Boolean, List[String]) = args match { - case Nil => - (checkDependencies, residualArgs) - case "--" :: xs => - (checkDependencies, xs) + case Nil => (checkDependencies, residualArgs) + case "--" :: xs => (checkDependencies, xs) // discard empties, sometimes they appear because of ant or etc. // but discard carefully, because an empty string is valid as an argument // to an option, e.g. -cp "" . So we discard them only when they appear // where an option should be, not where an argument to an option should be. - case "" :: xs => - loop(xs, residualArgs) - case x :: xs => - if (x startsWith "-") { - parseParams(args) match { - case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args) - case newArgs => loop(newArgs, residualArgs) - } + case "" :: xs => loop(xs, residualArgs) + case (x @ Optionlike()) :: _ => + parseParams(args) match { + case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args) + case newArgs => loop(newArgs, residualArgs) } - else if (processAll) - loop(xs, residualArgs :+ x) - else - (checkDependencies, args) + case x :: xs if processAll => loop(xs, residualArgs :+ x) + case _ => (checkDependencies, args) } loop(arguments, Nil) } @@ -117,7 +112,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) /** Split the given line into parameters. */ - def splitParams(line: String) = cmd.CommandLineParser.tokenize(line, errorFn) + def splitParams(line: String) = CommandLineParser.tokenize(line, errorFn) /** Returns any unprocessed arguments. */ @@ -128,11 +123,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) args: List[String], setter: (Setting) => (List[String] => Option[List[String]]) ): Option[List[String]] = - lookupSetting(cmd) match { - //case None => errorFn("Parameter '" + cmd + "' is not recognised by Scalac.") ; None - case None => None //error reported in processArguments - case Some(cmd) => setter(cmd)(args) - } + lookupSetting(cmd).flatMap(setter(_)(args)) // -Xfoo: clears Clearables def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match { @@ -145,52 +136,38 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) // the entire arg is consumed, so return None for failure // any non-Nil return value means failure and we return s unmodified def parseColonArg(s: String): Option[List[String]] = - if (s endsWith ":") { + if (s endsWith ":") clearIfExists(s.init) - } else { - for { - (p, args) <- StringOps.splitWhere(s, _ == ':', doDropIndex = true) - rest <- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _) - } yield rest - } + else + StringOps.splitWhere(s, _ == ':', doDropIndex = true).flatMap { + // p:arg:a,b,c is taken as arg with selections a,b,c for a multichoice setting + case (p, args) if args.contains(":") && lookupSetting(p).map(_.isInstanceOf[MultiChoiceSetting[_]]).getOrElse(false) => tryToSetIfExists(p, List(args), (s: Setting) => s.tryToSetColon(_)) + case (p, args) => tryToSetIfExists(p, args.split(",").toList, (s: Setting) => s.tryToSetColon(_)) + } // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo") def parseNormalArg(p: String, args: List[String]): Option[List[String]] = - tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _) + tryToSetIfExists(p, args, (s: Setting) => s.tryToSet(_)) args match { - case Nil => Nil - case arg :: rest => - if (!arg.startsWith("-")) { - errorFn("Argument '" + arg + "' does not start with '-'.") - args - } - else if (arg == "-") { - errorFn("'-' is not a valid argument.") - args - } - else { - // we dispatch differently based on the appearance of p: - // 1) If it matches a prefix setting it is sent there directly. - // 2) If it has a : it is presumed to be -Xfoo:bar,baz - // 3) Otherwise, the whole string should be a command name - // - // Internally we use Option[List[String]] to discover error, - // but the outside expects our arguments back unchanged on failure - val prefix = prefixSettings find (_ respondsTo arg) - if (prefix.isDefined) { - prefix.get tryToSet args - rest - } - else if (arg contains ":") parseColonArg(arg) match { - case Some(_) => rest - case None => args - } - else parseNormalArg(arg, rest) match { - case Some(xs) => xs - case None => args - } + case Nil => Nil + case "-" :: _ => errorFn("'-' is not a valid argument.") ; args + case (arg @ Optionlike()) :: rest => + // we dispatch differently based on the appearance of p: + // 1) If it matches a prefix setting it is sent there directly. + // 2) If it has a : it is presumed to be -Xfoo:bar,baz + // 3) Otherwise, the whole string should be a command name + // + // Internally we use Option[List[String]] to discover error, + // but the outside expects our arguments back unchanged on failure + val prefix = prefixSettings.find(_ respondsTo arg) + prefix.map { setting => setting.tryToSet(args); rest } + .orElse { + if (arg contains ":") parseColonArg(arg).map(_ => rest) + else parseNormalArg(arg, rest) } + .getOrElse(args) + case arg :: _ => errorFn(s"Argument '$arg' does not start with '-'.") ; args } } @@ -205,7 +182,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) * The class loader for `T` should provide resources `app.class.path` * and `boot.class.path`. These resources should contain the application * and boot classpaths in the same form as would be passed on the command line.*/ - def embeddedDefaults(loader: ClassLoader) { + def embeddedDefaults(loader: ClassLoader): Unit = { explicitParentLoader = Option(loader) // for the Interpreter parentClassLoader getClasspath("app", loader) foreach { classpath.value = _ } getClasspath("boot", loader) foreach { bootclasspath append _ } @@ -217,9 +194,10 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) /** Retrieves the contents of resource "${id}.class.path" from `loader` * (wrapped in Some) or None if the resource does not exist.*/ private def getClasspath(id: String, loader: ClassLoader): Option[String] = - Option(loader).flatMap(ld => Option(ld.getResource(id + ".class.path"))).map { cp => - Source.fromURL(cp).mkString - } + for { + ld <- Option(loader) + r <- Option(ld.getResource(s"$id.class.path")) + } yield Source.fromURL(r).mkString // a wrapper for all Setting creators to keep our list up to date private def add[T <: Setting](s: T): T = { @@ -234,13 +212,13 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) add(new IntSetting(name, descr, default, range, parser)) def MultiStringSetting(name: String, arg: String, descr: String, default: List[String] = Nil, helpText: Option[String] = None, prepend: Boolean = false) = add(new MultiStringSetting(name, arg, descr, default, helpText, prepend)) - def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]] = None) = - add(new MultiChoiceSetting[E](name, helpArg, descr, domain, default)) - def OutputSetting(outputDirs: OutputDirs, default: String) = { outputDirs.setSingleOutput(default); add(new OutputSetting(default)) } + def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]] = None, helpText: Option[String] = None) = + add(new MultiChoiceSetting[E](name, helpArg, descr, domain, default, helpText)) + def OutputSetting(default: String) = add(new OutputSetting(default)) def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default)) - def StringSetting(name: String, arg: String, descr: String, default: String, helpText: Option[String] = None) = add(new StringSetting(name, arg, descr, default, helpText)) - def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None) = - add(new ScalaVersionSetting(name, arg, descr, initial, default)) + def StringSetting(name: String, arg: String, descr: String, default: String = "", helpText: Option[String] = None) = add(new StringSetting(name, arg, descr, default, helpText)) + def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None, helpText: Option[String] = None) = + add(new ScalaVersionSetting(name, arg, descr, initial, default, helpText)) def PathSetting(name: String, descr: String, default: String): PathSetting = { val prepend = StringSetting(name + "/p", "", "", "").internalOnly() val append = StringSetting(name + "/a", "", "", "").internalOnly() @@ -286,19 +264,19 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) else throw new FatalError(s"$name does not exist or is not a directory") } - def getSingleOutput: Option[AbstractFile] = singleOutDir - /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. */ def setSingleOutput(outDir: String): Unit = setSingleOutput(checkDirOrJar(outDir)) + def getSingleOutput: Option[AbstractFile] = singleOutDir + /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. */ def setSingleOutput(dir: AbstractFile): Unit = singleOutDir = Some(dir) - def add(src: AbstractFile, dst: AbstractFile) { + def add(src: AbstractFile, dst: AbstractFile): Unit = { singleOutDir = None outputDirs ::= ((src, dst)) } @@ -308,14 +286,11 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) /** Return the output directory for the given file. */ - def outputDirFor(src: AbstractFile): AbstractFile = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = src.path.startsWith(srcDir.path) - - singleOutDir.getOrElse(outputs.find((isBelow _).tupled) match { + def outputDirFor(src: AbstractFile): AbstractFile = + singleOutDir.getOrElse(outputs.find { case (srcDir, _) => src.path.startsWith(srcDir.path) } match { case Some((_, d)) => d case _ => throw new FatalError(s"Could not find an output directory for ${src.path} in ${outputs}") }) - } /** Return the source file path(s) which correspond to the given * classfile path and SourceFile attribute value, subject to the @@ -334,41 +309,35 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) * output directory there will be two or more candidate source file * paths. */ - def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = classFile.path.startsWith(outDir.path) - + def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = singleOutDir match { - case Some(d) => - d match { - case _: VirtualDirectory | _: io.ZipArchive => Nil - case _ => List(d.lookupPathUnchecked(srcPath, directory = false)) - } + case Some(_: VirtualDirectory | _: io.ZipArchive) => Nil + case Some(d) => List(d.lookupPathUnchecked(srcPath, directory = false)) case None => - (outputs filter (isBelow _).tupled) match { + outputs.filter { case (_, outDir) => classFile.path.startsWith(outDir.path) } match { case Nil => Nil case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false)) } } - } } /** A base class for settings of all types. * Subclasses each define a `value` field of the appropriate type. */ - abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue with Mutable { + abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue { /** Will be called after this Setting is set for any extra work. */ - private var _postSetHook: this.type => Unit = (x: this.type) => () + private[this] var _postSetHook: this.type => Unit = (_: this.type) => () override def postSetHook(): Unit = _postSetHook(this) def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this } /** The syntax defining this setting in a help string */ - private var _helpSyntax = name + private[this] var _helpSyntax = name override def helpSyntax: String = _helpSyntax def withHelpSyntax(s: String): this.type = { _helpSyntax = s ; this } /** Abbreviations for this setting */ - private var _abbreviations: List[String] = Nil + private[this] var _abbreviations: List[String] = Nil override def abbreviations = _abbreviations def withAbbreviation(s: String): this.type = { _abbreviations ++= List(s) ; this } @@ -377,9 +346,11 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) override def dependencies = dependency.toList def dependsOn(s: Setting, value: String): this.type = { dependency = Some((s, value)); this } - private var _deprecationMessage: Option[String] = None + private[this] var _deprecationMessage: Option[String] = None override def deprecationMessage = _deprecationMessage def withDeprecationMessage(msg: String): this.type = { _deprecationMessage = Some(msg) ; this } + + def reset(): Unit } /** A setting represented by an integer. */ @@ -420,20 +391,23 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) // Ensure that the default value is actually valid assert(isInputValid(default)) - def parseArgument(x: String): Option[Int] = { - parser(x) orElse { - try { Some(x.toInt) } - catch { case _: NumberFormatException => None } - } - } + def parseArgument(x: String): Option[Int] = parser(x) orElse x.toIntOption def errorMsg() = errorFn(s"invalid setting for $name $getValidText") - def tryToSet(args: List[String]) = - if (args.isEmpty) errorAndValue("missing argument", None) - else parseArgument(args.head) match { - case Some(i) => value = i ; Some(args.tail) - case None => errorMsg() ; None + def tryToSet(args: List[String]): Option[ResultOfTryToSet] = + args match { + case h :: rest => + parseArgument(h) match { + case Some(i) => value = i; Some(rest) + case None => errorMsg(); None + } + case Nil => errorAndValue("missing argument", None) + } + def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] = + args match { + case Nil | _ :: Nil => tryToSet(args) + case _ => errorAndValue("too many arguments", None) } def unparse: List[String] = @@ -441,6 +415,8 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) else List(name, value.toString) withHelpSyntax(s"$name ") + + override def reset() = v = default } /** A setting that is a boolean flag, with default as specified. */ @@ -465,6 +441,10 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) } else errorAndValue(s"'$x' is not a valid choice for '$name'", None) case _ => errorAndValue(s"'$name' accepts only one boolean value", None) } + override def reset() = { + v = default + setByUser = false + } } /** A special setting for accumulating arguments like -Dfoo=bar. */ @@ -483,8 +463,10 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) case _ => None } + def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] = errorAndValue(s"bad argument for $name", None) override def respondsTo(token: String) = token startsWith prefix def unparse: List[String] = value + override def reset() = v = Nil } /** A setting represented by a string, (`default` unless set) */ @@ -502,19 +484,21 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) withHelpSyntax(name + " <" + arg + ">") def tryToSet(args: List[String]) = args match { - case Nil => errorAndValue("missing argument", None) - case x :: xs => - if (helpText.nonEmpty && x == "help") - sawHelp = true - else - value = x - Some(xs) + case Nil | Optionlike() :: _ => errorAndValue(s"missing argument for $name", None) + case "help" :: rest if helpText.nonEmpty => sawHelp = true ; Some(rest) + case h :: rest => value = h ; Some(rest) } + def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] = + args match { + case Nil | _ :: Nil => tryToSet(args) + case _ => errorAndValue("too many arguments", None) + } def unparse: List[String] = if (value == default) Nil else List(name, value) override def isHelping: Boolean = sawHelp override def help = helpText.get + override def reset() = v = default } /** A setting represented by a Scala version. @@ -526,14 +510,16 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) val arg: String, descr: String, val initial: ScalaVersion, - default: Option[ScalaVersion]) + default: Option[ScalaVersion], + helpText: Option[String]) extends Setting(name, descr) { type T = ScalaVersion protected var v: T = initial + protected var sawHelp: Boolean = false // This method is invoked if there are no colonated args. In this case the default value is // used. No arguments are consumed. - override def tryToSet(args: List[String]) = { + def tryToSet(args: List[String]) = { default match { case Some(d) => value = d case None => errorFn(s"$name requires an argument, the syntax is $helpSyntax") @@ -541,14 +527,21 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) Some(args) } - override def tryToSetColon(args: List[String]) = args match { + def tryToSetColon(args: List[String]) = args match { + case "help" :: rest if helpText.nonEmpty => sawHelp = true; Some(rest) case x :: xs => value = ScalaVersion(x, errorFn); Some(xs) case nil => Some(nil) } def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}") + override def isHelping: Boolean = sawHelp + + override def help = helpText.get + withHelpSyntax(s"${name}:<${arg}>") + + override def reset() = v = initial } class PathSetting private[nsc]( @@ -568,10 +561,11 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) super.value, appendPath.value ) + override def reset() = () } /** Set the output directory for all sources. */ - class OutputSetting private[nsc](default: String) extends StringSetting("-d", "directory|jar", "destination for generated classfiles.", default, None) + class OutputSetting private[nsc](default: String) extends StringSetting("-d", "directory|jar", "Destination for generated artifacts.", default, None) /** * Each [[MultiChoiceSetting]] takes a MultiChoiceEnumeration as domain. The enumeration may @@ -590,7 +584,10 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) * not present in the multiChoiceSetting.value set, only their expansion. */ abstract class MultiChoiceEnumeration extends Enumeration { - case class Choice(name: String, help: String = "", expandsTo: List[Choice] = Nil) extends Val(name) + case class Choice(name: String, help: String = "", expandsTo: List[Choice] = Nil, requiresSelections: Boolean = false) extends Val(name) { + var selections: List[String] = Nil + } + def wildcardChoices: ValueSet = values.filter { case c: Choice => c.expandsTo.isEmpty case _ => true } } /** @@ -619,13 +616,14 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) val helpArg: String, descr: String, val domain: E, - val default: Option[List[String]] - ) extends Setting(name, s"$descr: `_' for all, `$name:help' to list choices.") with Clearable { + val default: Option[List[String]], + val helpText: Option[String] + ) extends Setting(name, descr) with Clearable { - withHelpSyntax(s"$name:<_,$helpArg,-$helpArg>") + withHelpSyntax(s"$name:<${helpArg}s>") object ChoiceOrVal { - def unapply(a: domain.Value): Option[(String, String, List[domain.Choice])] = a match { + def unapply(a: domain.Value): Some[(String, String, List[domain.Choice])] = a match { case c: domain.Choice => Some((c.name, c.help, c.expandsTo)) case v: domain.Value => Some((v.toString, "", Nil)) } @@ -644,10 +642,11 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) private var sawAll = false private def badChoice(s: String) = errorFn(s"'$s' is not a valid choice for '$name'") - private def isChoice(s: String) = (s == "_") || (choices contains pos(s)) + private def isChoice(s: String) = s == "_" || choices.contains(pos(s)) + private def choiceOf(s: String): domain.Choice = domain.withName(pos(s)).asInstanceOf[domain.Choice] - private def pos(s: String) = s stripPrefix "-" - private def isPos(s: String) = !(s startsWith "-") + private def pos(s: String) = s.stripPrefix("-") + private def isPos(s: String) = !s.startsWith("-") override val choices: List[String] = domain.values.toList map { case ChoiceOrVal(name, _, _) => name @@ -659,14 +658,14 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) case _ => "" } - /** (Re)compute from current yeas, nays, wildcard status. */ + /** (Re)compute from current yeas, nays, wildcard status. Assign option value. */ def compute() = { def simple(v: domain.Value) = v match { - case ChoiceOrVal(_, _, Nil) => true - case _ => false + case c: domain.Choice => c.expandsTo.isEmpty + case _ => true } - /** + /* * Expand an expanding option, if necessary recursively. Expanding options are not included in * the result (consistent with "_", which is not in `value` either). * @@ -678,8 +677,8 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) } // yeas from _ or expansions are weak: an explicit nay will disable them - val weakYeas = if (sawAll) domain.values filter simple else expand(yeas filterNot simple) - value = (yeas filter simple) | (weakYeas &~ nays) + val weakYeas = if (sawAll) domain.wildcardChoices else expand(yeas.filterNot(simple)) + value = yeas.filter(simple) | (weakYeas &~ nays) } /** Add a named choice to the multichoice value. */ @@ -690,10 +689,10 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) sawAll = true compute() case _ if isPos(arg) => - yeas += domain withName arg + yeas += domain.withName(arg) compute() case _ => - val choice = domain withName pos(arg) + val choice = domain.withName(pos(arg)) choice match { case ChoiceOrVal(_, _, _ :: _) => errorFn(s"'${pos(arg)}' cannot be negated, it enables other arguments") case _ => @@ -702,44 +701,71 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) compute() } + // refine a choice with selections. -opt:inline:** + def add(arg: String, selections: List[String]): Unit = { + add(arg) + domain.withName(arg).asInstanceOf[domain.Choice].selections ++= selections + } + def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true) - override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) + def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide /** Try to set args, handling "help" and default. * The "halting" parameter means args were "-option a b c -else" so halt * on "-else" or other non-choice. Otherwise, args were "-option:a,b,c,d", * so process all and report non-choices as errors. + * + * If a choice is seen as colonated, then set the choice selections: + * "-option:choice:selection1,selection2" + * * @param args args to process * @param halting stop on non-arg */ private def tryToSetArgs(args: List[String], halting: Boolean) = { - val added = collection.mutable.ListBuffer.empty[String] - - def tryArg(arg: String) = arg match { - case "help" => sawHelp = true - case s if isChoice(s) => added += s // this case also adds "_" - case s => badChoice(s) - } - def loop(args: List[String]): List[String] = args match { - case arg :: _ if halting && (!isPos(arg) || !isChoice(arg)) => args - case arg :: rest => tryArg(arg) ; loop(rest) - case Nil => Nil - } - val rest = loop(args) - - // if no arg consumed, use defaults or error; otherwise, add what they added - if (rest.size == args.size) default match { - case Some(defaults) => defaults foreach add - case None => errorFn(s"'$name' requires an option. See '$name:help'.") - } else { - added foreach add + val colonnade = raw"([^:]+):(.*)".r + var count = 0 + val rest = { + @tailrec + def loop(args: List[String]): List[String] = args match { + case "help" :: rest => + sawHelp = true + count += 1 + loop(rest) + case arg :: rest => + val (argx, selections) = arg match { + case colonnade(x, y) => (x, y) + case _ => (arg, "") + } + if (halting && (!isPos(argx) || !isChoice(argx))) + args + else { + if (isChoice(argx)) { + if (selections.nonEmpty) add(argx, selections.split(",").toList) + else if (argx != "_" && isPos(argx) && choiceOf(argx).requiresSelections) errorFn(s"'$argx' requires '$argx:'. See '$name:help'.") + else add(argx) // this case also adds "_" + postSetHook() // support -opt:l:method + } + else + badChoice(argx) + count += 1 + loop(rest) + } + case _ => Nil + } + loop(args) } + // if no arg applied, use defaults or error; otherwise, add what they added + if (count == 0) + default match { + case Some(defaults) => defaults.foreach(add) + case None => errorFn(s"'$name' requires an option. See '$name:help'.") + } Some(rest) } - def contains(choice: domain.Value): Boolean = value contains choice + def contains(choice: domain.Value): Boolean = value.contains(choice) // programmatically. def enable(choice: domain.Value): Unit = { nays -= choice ; yeas += choice ; compute() } @@ -762,18 +788,20 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) case _ => default } val orelse = verboseDefault.map(_.mkString(f"%nDefault: ", ", ", f"%n")).getOrElse("") - choices.zipAll(descriptions, "", "").map(describe).mkString(f"${descr}%n", f"%n", orelse) + choices.zipAll(descriptions, "", "").map(describe).mkString(f"${helpText.getOrElse(descr)}%n", f"%n", orelse) } - def clear(): Unit = { + def clear(): Unit = { + domain.values.foreach { case c: domain.Choice => c.selections = Nil ; case _ => } v = domain.ValueSet.empty yeas = domain.ValueSet.empty nays = domain.ValueSet.empty sawAll = false sawHelp = false } - def unparse: List[String] = value.toList map (s => s"$name:$s") + def unparse: List[String] = value.toList.map(s => s"$name:$s") def contains(s: String) = domain.values.find(_.toString == s).exists(value.contains) + override def reset() = clear() } /** A setting that accumulates all strings supplied to it, @@ -793,23 +821,22 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) withHelpSyntax(name + ":<" + arg + ">") - // try to set. halting means halt at first non-arg + // try to set. halting means halt at first non-arg i.e. at next option protected def tryToSetArgs(args: List[String], halting: Boolean) = { - def loop(args: List[String]): List[String] = args match { - case arg :: rest => - if (halting && (arg startsWith "-")) args - else { - if (helpText.isDefined && arg == "help") sawHelp = true - else if (prepend) value ::= arg - else value ++= List(arg) - loop(rest) - } - case Nil => Nil + @tailrec + def loop(seen: List[String], args: List[String]): (List[String], List[String]) = args match { + case Optionlike() :: _ if halting => (seen, args) + case "help" :: args if helpText.isDefined => sawHelp = true; loop(seen, args) + case head :: args => loop(head :: seen, args) + case Nil => (seen, Nil) } - Some(loop(if (prepend) args.reverse else args)) + val (seen, rest) = loop(Nil, args) + if (prepend) value = value.prependedAll(seen) + else value = value.appendedAll(seen.reverse) + Some(rest) } def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true) - override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) + def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false) override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide def clear(): Unit = (v = Nil) @@ -819,6 +846,11 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) override def isHelping: Boolean = sawHelp override def help = helpText.get + + override def reset() = { + v = default + setByUser = false + } } /** A setting represented by a string in a given set of `choices`, @@ -857,22 +889,24 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) def tryToSet(args: List[String]) = args match { - case Nil => errorAndValue(usageErrorMessage, None) - case arg :: rest => tryToSetColon(List(arg)).map(_ => rest) + case Nil | Optionlike() :: _ => errorAndValue(usageErrorMessage, None) + case arg :: rest => tryToSetColon(List(arg)).map(_ => rest) } - override def tryToSetColon(args: List[String]) = args map _preSetHook match { + def tryToSetColon(args: List[String]) = args map _preSetHook match { case Nil => errorAndValue(usageErrorMessage, None) case List("help") => sawHelp = true; SomeOfNil case List(x) if choices contains x => value = x ; SomeOfNil case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None) - case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None) + case _ => errorAndValue("'" + name + "' does not accept multiple arguments.", None) } def unparse: List[String] = if (value == default) Nil else List(name + ":" + value) override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide withHelpSyntax(name + ":<" + helpArg + ">") + + override def reset() = v = default } private def mkPhasesHelp(descr: String, default: String) = { @@ -882,9 +916,8 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) } /** A setting represented by a list of strings which should be prefixes of - * phase names. This is not checked here, however. Alternatively the string - * `"all"` can be used to represent all phases. - * (the empty list, unless set) + * phase names. This is not checked here, however. Alternatively, underscore + * can be used to indicate all phases. */ class PhasesSetting private[nsc]( name: String, @@ -897,14 +930,13 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) private[this] var _v: T = Nil private[this] var _numbs: List[(Int,Int)] = Nil private[this] var _names: T = Nil - //protected var v: T = Nil protected def v: T = _v protected def v_=(t: T): Unit = { // throws NumberFormat on bad range (like -5-6) - def asRange(s: String): (Int,Int) = (s indexOf '-') match { + def asRange(s: String): (Int,Int) = s.indexOf('-') match { case -1 => (s.toInt, s.toInt) case 0 => (-1, s.tail.toInt) - case i if s.last == '-' => (s.init.toInt, Int.MaxValue) + case _ if s.last == '-' => (s.init.toInt, Int.MaxValue) case i => (s.take(i).toInt, s.drop(i+1).toInt) } val numsAndStrs = t filter (_.nonEmpty) partition (_ forall (ch => ch.isDigit || ch == '-')) @@ -912,7 +944,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) _names = numsAndStrs._2 _v = t } - override def value = if (v contains "all") List("all") else super.value // i.e., v + override def value = if (v contains "_") List("_") else super.value // i.e., v private def numericValues = _numbs private def stringValues = _names private def phaseIdTest(i: Int): Boolean = numericValues exists (_ match { @@ -925,7 +957,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) private def splitDefault = default.split(',').toList - override def tryToSetColon(args: List[String]) = try { + def tryToSetColon(args: List[String]) = try { args match { case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(splitDefault) @@ -935,20 +967,27 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) def clear(): Unit = (v = Nil) - // we slightly abuse the usual meaning of "contains" here by returning - // true if our phase list contains "all", regardless of the incoming argument + /* True if the named phase is selected. + * + * A setting value "_" or "all" selects all phases by name. + */ def contains(phName: String) = doAllPhases || containsName(phName) - def containsName(phName: String) = stringValues exists (phName startsWith _) + /* True if the given phase name matches the selection, possibly as prefixed "~name". */ + def containsName(phName: String) = stringValues.exists(phName.startsWith(_)) def containsId(phaseId: Int) = phaseIdTest(phaseId) - def containsPhase(ph: Phase) = contains(ph.name) || containsId(ph.id) + /* True if the phase is selected by name or "all", or by id, or by prefixed "~name". */ + def containsPhase(ph: Phase) = contains(ph.name) || containsId(ph.id) || containsName(s"~${ph.name}") || + ph.next != null && containsName(s"~${ph.next.name}") // null if called during construction - def doAllPhases = stringValues contains "all" - def unparse: List[String] = value map (name + ":" + _) + def doAllPhases = stringValues.exists(s => s == "_" || s == "all") + def unparse: List[String] = value.map(v => s"$name:$v") withHelpSyntax( if (default == "") name + ":" else name + "[:phases]" ) + + override def reset() = clear() } /** Internal use - syntax enhancements. */ @@ -961,3 +1000,7 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) import scala.language.implicitConversions protected implicit def installEnableSettings[T <: BooleanSetting](s: T): EnableSettings[T] = new EnableSettings(s) } + +private object Optionlike { + def unapply(s: String): Boolean = s.startsWith("-") && s != "-" +} diff --git a/src/compiler/scala/tools/nsc/settings/PathFactory.scala b/src/compiler/scala/tools/nsc/settings/PathFactory.scala index eac093a750b0..d16b8de36009 100644 --- a/src/compiler/scala/tools/nsc/settings/PathFactory.scala +++ b/src/compiler/scala/tools/nsc/settings/PathFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index b7bed9d95d03..57b41f3a5255 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,12 +19,13 @@ package settings import java.util.zip.Deflater -import scala.language.existentials -import scala.annotation.elidable -import scala.tools.util.PathResolver.Defaults +import scala.annotation.{elidable, nowarn} import scala.collection.mutable +import scala.language.existentials import scala.reflect.internal.util.StatisticsStatics import scala.tools.nsc.util.DefaultJarFactory +import scala.tools.util.PathResolver.Defaults +import scala.util.chaining._ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSettings => @@ -35,16 +36,10 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett * defaults to the value of CLASSPATH env var if it is set, as in Java, * or else to `"."` for the current user directory. */ - protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".") - - /** Enabled under -Xexperimental. */ - protected def experimentalSettings = List[BooleanSetting](YpartialUnification) - - /** Enabled under -Xfuture. */ - protected def futureSettings = List[BooleanSetting]() + protected def defaultClasspath = Option(System.getenv("CLASSPATH")).getOrElse(".") /** If any of these settings is enabled, the compiler should print a message and exit. */ - def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph) + def infoSettings = List[Setting](version, help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph) /** Is an info setting set? Any -option:help? */ def isInfo = infoSettings.exists(_.isSetByUser) || allSettings.valuesIterator.exists(_.isHelping) @@ -54,28 +49,28 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") - /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "") - val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.") + /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "") withAbbreviation "--tool-class-path" + val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.") withAbbreviation "--no-boot-class-path" /** * Standard settings */ // argfiles is only for the help message /*val argfiles = */ BooleanSetting("@", "A text file containing compiler arguments (options and source files)") - val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" - val d = OutputSetting (outputDirs, ".").withPostSetHook(s => try outputDirs.setSingleOutput(s.value) catch { case FatalError(msg) => errorFn(msg) }) + val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" withAbbreviation "--class-path" + val outdir = OutputSetting (".").withPostSetHook(s => try outputDirs.setSingleOutput(s.value) catch { case FatalError(msg) => errorFn(msg) }).tap(_.postSetHook()) - val nospecialization = BooleanSetting("-no-specialization", "Ignore @specialize annotations.") + val nospecialization = BooleanSetting("-no-specialization", "Ignore @specialize annotations.") withAbbreviation "--no-specialization" // Would be nice to build this dynamically from scala.languageFeature. // The two requirements: delay error checking until you have symbols, and let compiler command build option-specific help. object languageFeatures extends MultiChoiceEnumeration { val dynamics = Choice("dynamics", "Allow direct or indirect subclasses of scala.Dynamic") - val postfixOps = Choice("postfixOps", "Allow postfix operator notation, such as `1 to 10 toList'") - val reflectiveCalls = Choice("reflectiveCalls", "Allow reflective access to members of structural types") - val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views") - val higherKinds = Choice("higherKinds", "Allow higher-kinded types") val existentials = Choice("existentials", "Existential types (besides wildcard types) can be written and inferred") + val higherKinds = Choice("higherKinds", "Allow higher-kinded types") + val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views") + val postfixOps = Choice("postfixOps", "Allow postfix operator notation, such as `1 to 10 toList` (not recommended)") + val reflectiveCalls = Choice("reflectiveCalls", "Allow reflective access to members of structural types") val macros = Choice("experimental.macros", "Allow macro definition (besides implementation and application)") } val language = { @@ -85,77 +80,165 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett helpArg = "feature", descr = description, domain = languageFeatures - ) + ) withAbbreviation "--language" } - /* - * The previous "-source" option is intended to be used mainly - * though this helper. - */ - private[this] val version211 = ScalaVersion("2.11.0") - def isScala211: Boolean = source.value >= version211 - private[this] val version212 = ScalaVersion("2.12.0") - def isScala212: Boolean = source.value >= version212 - private[this] val version213 = ScalaVersion("2.13.0") - def isScala213: Boolean = source.value >= version213 - private[this] val version3 = ScalaVersion("3.0.0") - def isScala3: Boolean = source.value >= version3 - /** * -X "Advanced" settings */ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.") val async = BooleanSetting ("-Xasync", "Enable the async phase for scala.async.Async.{async,await}.") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") - val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss").withPostSetHook(s => if (s.value) StatisticsStatics.enableDeveloperAndDeoptimize()) + val developer = BooleanSetting ("-Xdev", "Issue warnings about anything which seems amiss in compiler internals. Intended for compiler developers").withPostSetHook(s => if (s.value) StatisticsStatics.enableDeveloperAndDeoptimize()) val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") andThen (flag => if (flag) elidebelow.value = elidable.ASSERTION + 1) val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument", elidable.MINIMUM, None, elidable.byName get _) val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") - val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "") - val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.") - val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.") - val logReflectiveCalls = BooleanSetting ("-Xlog-reflective-calls", "Print a message when a reflective method call is generated") - val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.") - val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.") - val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None) + val genPhaseGraph = StringSetting ("-Vphase-graph", arg="file", descr="Generate phase graph to -*.dot.").withAbbreviation("-Xgenerate-phase-graph") val maxerrs = IntSetting ("-Xmaxerrs", "Maximum errors to print", 100, None, _ => None) val maxwarns = IntSetting ("-Xmaxwarns", "Maximum warnings to print", 100, None, _ => None) val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion)) - val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.") val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.") + val Xjline = ChoiceSetting ( + name = "-Xjline", + helpArg = "mode", + descr = "Select JLine mode.", + choices = List("emacs", "vi", "off"), + default = "emacs", + choicesHelp = List( + "emacs key bindings.", + "vi key bindings", + "No JLine editing.")) + .withDeprecationMessage("Replaced by use of '~/.inputrc'. Set 'editing-mode' to 'vi', 'emacs' or 'dumb'") + val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode.") val plugin = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") val disable = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") val require = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) - val Xprint = PhasesSetting ("-Xprint", "Print out program after") - .withAbbreviation ("-Vprint") - val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.") - val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).") - val printArgs = StringSetting ("-Xprint-args", "file", "Print all compiler arguments to the specified location. Use - to echo to the reporter.", "-") val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).") val resident = BooleanSetting ("-Xresident", "Compiler stays resident: read source filenames from standard input.") - val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "") + val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "Main") val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") - val Xshowcls = StringSetting ("-Xshow-class", "class", "Show internal representation of class.", "") - val Xshowobj = StringSetting ("-Xshow-object", "object", "Show internal representation of object.", "") - val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.") - .withAbbreviation ("-Vphases") val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "") val reporter = StringSetting ("-Xreporter", "classname", "Specify a custom subclass of FilteringReporter for compiler messages.", "scala.tools.nsc.reporters.ConsoleReporter") - val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types") - val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see scala/bug#8126.", initial = ScalaVersion("2.12")) + private val XsourceHelp = + sm"""|-Xsource:3 is for migrating a codebase. -Xsource-features can be added for + |cross-building to adopt certain Scala 3 behaviors. + | + |See also "Scala 2 with -Xsource:3" on docs.scala-lang.org. + | + |-Xsource:3 issues migration warnings in category `cat=scala3-migration`, + |which are promoted to errors by default using a `-Wconf` configuration. + |Examples of promoted warnings: + | * Implicit definitions must have an explicit type + | * (x: Any) + "" is deprecated + | * An empty argument list is not adapted to the unit value + | * Member classes cannot shadow a same-named class defined in a parent + | * Presence or absence of parentheses in overrides must match exactly + | + |Certain benign syntax features are enabled: + | * case C(xs*) => + | * A & B type intersection + | * import p.* + | * import p.m as n + | * import p.{given, *} + | * Eta-expansion `x.m` of methods without trailing `_` + | + |The following constructs emit a migration warning under -Xsource:3. To adopt + |Scala 3 semantics, see `-Xsource-features:help`. + |${sourceFeatures.values.toList.collect { case c: sourceFeatures.Choice if c.expandsTo.isEmpty => c.help }.map(h => s" * $h").mkString("\n")} + |""" + @nowarn("cat=deprecation") + val source = ScalaVersionSetting ("-Xsource", "version", "Enable warnings and features for a future version.", initial = ScalaVersion("2.13"), helpText = Some(XsourceHelp)).withPostSetHook { s => + if (s.value.unparse == "3.0.0-cross") + XsourceFeatures.tryToSet(List("_")) + if (s.value < ScalaVersion("3")) + if (s.value >= ScalaVersion("2.14")) + s.withDeprecationMessage("instead of -Xsource:2.14, use -Xsource:3 and optionally -Xsource-features").value = ScalaVersion("3") + else if (s.value < ScalaVersion("2.13")) + errorFn.apply(s"-Xsource must be at least the current major version (${ScalaVersion("2.13").versionString})") + } + + private val scala3Version = ScalaVersion("3") + @deprecated("Use currentRun.isScala3 instead", since="2.13.9") + def isScala3 = source.value >= scala3Version + + // buffet of features available under -Xsource:3 + object sourceFeatures extends MultiChoiceEnumeration { + // Changes affecting binary encoding + val caseApplyCopyAccess = Choice("case-apply-copy-access", "Constructor modifiers are used for apply / copy methods of case classes. [bin]") + val caseCompanionFunction = Choice("case-companion-function", "Synthetic case companion objects no longer extend FunctionN. [bin]") + val caseCopyByName = Choice("case-copy-by-name", "Synthesize case copy method with by-name parameters. [bin]") + val inferOverride = Choice("infer-override", "Inferred type of member uses type of overridden member. [bin]") + val noInferStructural = Choice("no-infer-structural", "Definitions with an inferred type never have a structural type. [bin]") + + // Other semantic changes + val any2StringAdd = Choice("any2stringadd", "Implicit `any2stringadd` is never inferred.") + val unicodeEscapesRaw = Choice("unicode-escapes-raw", "Don't process unicode escapes in triple quoted strings and raw interpolations.") + val stringContextScope = Choice("string-context-scope", "String interpolations always desugar to scala.StringContext.") + val leadingInfix = Choice("leading-infix", "Leading infix operators continue the previous line.") + val packagePrefixImplicits = Choice("package-prefix-implicits", "The package prefix p is no longer part of the implicit search scope for type p.A.") + val implicitResolution = Choice("implicit-resolution", "Use Scala-3-style downwards comparisons for implicit search and overloading resolution (see github.com/scala/scala/pull/6037).") + val doubleDefinitions = Choice("double-definitions", "Correctly disallow double definitions differing in empty parens.") + val etaExpandAlways = Choice("eta-expand-always", "Eta-expand even if the expected type is not a function type.") + + val v13_13_choices = List(caseApplyCopyAccess, caseCompanionFunction, inferOverride, any2StringAdd, unicodeEscapesRaw, stringContextScope, leadingInfix, packagePrefixImplicits) + + val v13_13 = Choice( + "v2.13.13", + v13_13_choices.mkString("", ",", "."), + expandsTo = v13_13_choices) + + val v13_14_choices = implicitResolution :: v13_13_choices + val v13_14 = Choice( + "v2.13.14", + "v2.13.13 plus implicit-resolution", + expandsTo = v13_14_choices) + + val v13_15_choices = doubleDefinitions :: v13_14_choices + val v13_15 = Choice( + "v2.13.15", + "v2.13.14 plus double-definitions", + expandsTo = v13_15_choices) + + val v13_17_choices = etaExpandAlways :: noInferStructural :: v13_15_choices + val v13_17 = Choice( + "v2.13.17", + "v2.13.15 plus no-infer-structural, eta-expand-always", + expandsTo = v13_17_choices) + } + val XsourceFeatures = MultiChoiceSetting( + name = "-Xsource-features", + helpArg = "feature", + descr = "Enable Scala 3 features under -Xsource:3: `-Xsource-features:help` for details.", + domain = sourceFeatures, + helpText = Some( + sm"""Enable Scala 3 features under -Xsource:3. + | + |Instead of `-Xsource-features:_`, it is recommended to enable individual features. + |Features can also be removed from a feature group by prefixing with `-`; + |for example, `-Xsource-features:v2.13.14,-case-companion-function`. + |Listing features explicitly ensures new semantic changes in future Scala versions are + |not silently adopted; new features can be enabled after auditing migration warnings. + | + |`-Xsource:3-cross` is a shorthand for `-Xsource:3 -Xsource-features:_`. + | + |Features marked with [bin] affect the binary encoding. Enabling them in a project + |with existing releases for Scala 2.13 can break binary compatibility. + | + |Available features: + |""") + ) val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") - val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.") val XmixinForceForwarders = ChoiceSetting( name = "-Xmixin-force-forwarders", helpArg = "mode", - descr = "Generate forwarder methods in classes inhering concrete methods from traits.", + descr = "Generate forwarder methods in classes inheriting concrete methods from traits.", choices = List("true", "junit", "false"), default = "true", choicesHelp = List( @@ -168,6 +251,8 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett def isAtLeastJunit = isTruthy || XmixinForceForwarders.value == "junit" } + val nonStrictPatmatAnalysis = BooleanSetting("-Xnon-strict-patmat-analysis", "Disable strict exhaustivity analysis, which assumes guards are false and refutable extractors don't match") + // XML parsing options object XxmlSettings extends MultiChoiceEnumeration { val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes") @@ -186,79 +271,111 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett def debuginfo = g def dependenciesFile = dependencyfile def nowarnings = nowarn - def outdir = d + @deprecated("Use outdir instead.", since="2.13.2") + def d = outdir def printLate = print /** * -Y "Private" settings */ - val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.") - val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.") val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.") val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during typing") - val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after") val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") - val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.") val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.") - val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.").withPostSetHook(s => if (s.value) StatisticsStatics.enableDebugAndDeoptimize()) val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts.", List("package", "object", "error"), "error") - val log = PhasesSetting ("-Ylog", "Log operations during") - val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.") val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") val noimports = BooleanSetting ("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + .withPostSetHook(bs => if (bs.value) imports.value = Nil) val nopredef = BooleanSetting ("-Yno-predef", "Compile without importing Predef.") - val noAdaptedArgs = BooleanSetting ("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.") + .withPostSetHook(bs => if (bs.value && !noimports.value) imports.value = "java.lang" :: "scala" :: Nil) + val imports = MultiStringSetting(name="-Yimports", arg="import", descr="Custom root imports, default is `java.lang,scala,scala.Predef`.", helpText=Some( + sm"""|Specify a list of packages and objects to import from as "root" imports. + |Root imports form the root context in which all Scala source is evaluated. + |The names supplied to `-Yimports` must be fully-qualified. + | + |For example, the default scala.Predef results in an `import scala.Predef._`. + |Ordinary access and scoping rules apply. Root imports increase the scoping + |depth, so that later root imports shadow earlier ones. In addition, + |names bound by root imports have lowest binding precedence, so that they + |cannot induce ambiguities in user code, where definitions and imports + |always have a higher precedence. Root imports are imports of last resort. + | + |By convention, an explicit import from a root import object such as + |Predef disables that root import for the current source file. The import + |is disabled when the import expression is compiled, so, also by convention, + |the import should be placed early in source code order. The textual name + |in the import does not need to match the value of `-Yimports`; the import + |works in the usual way, subject to renames and name binding precedence. + | + """ + )) val Yrecursion = IntSetting ("-Yrecursion", "Set recursion depth used when locking symbols.", 0, Some((0, Int.MaxValue)), (_: String) => None) - val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.") + + val YprintTrees = ChoiceSetting( + name = "-Yprint-trees", + helpArg = "style", + descr = "How to print trees when -Vprint is enabled.", + choices = List("text", "compact", "format", "text+format", "diff"), + default = "text" + ).withPostSetHook(pt => pt.value match { + case "compact" => XshowtreesCompact.value = true + case "format" => Xshowtrees.value = true + case "text+format" => XshowtreesStringified.value = true + case _ => + }) + + def showTreeDiff: Boolean = YprintTrees.value == "diff" + val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Vprint:) Print detailed ASTs in formatted form.").internalOnly() val XshowtreesCompact - = BooleanSetting ("-Yshow-trees-compact", "(Requires -Xprint:) Print detailed ASTs in compact form.") + = BooleanSetting ("-Yshow-trees-compact", "(Requires -Vprint:) Print detailed ASTs in compact form.").internalOnly() val XshowtreesStringified - = BooleanSetting ("-Yshow-trees-stringified", "(Requires -Xprint:) Print stringifications along with detailed ASTs.") - val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.") - val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.") - val Yshowsymowners = BooleanSetting ("-Yshow-symowners", "Print owner identifiers next to symbol names.") + = BooleanSetting ("-Yshow-trees-stringified", "(Requires -Vprint:) Print stringifications along with detailed ASTs.").internalOnly() + val skip = PhasesSetting ("-Yskip", "Skip") val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "") val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat val stopBefore = PhasesSetting ("-Ystop-before", "Stop before") - val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") + val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.", default = true) val Yvalidatepos = PhasesSetting ("-Yvalidate-pos", s"Validate positions after the given phases (implies ${Yrangepos.name})") withPostSetHook (_ => Yrangepos.value = true) - val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", s"Show start and end positions of members (implies ${Yrangepos.name})", "") withPostSetHook (_ => Yrangepos.value = true) val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler.", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal) - val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None) val YmacroFresh = BooleanSetting ("-Ymacro-global-fresh-names", "Should fresh names in macros be unique across all compilation units") - val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") - val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") - val YreplMagicImport = BooleanSetting ("-Yrepl-use-magic-imports", "In the code that wraps REPL snippets, use magic imports rather than nesting wrapper object/classes") + val YmacroAnnotations = BooleanSetting ("-Ymacro-annotations", "Enable support for macro annotations, formerly in macro paradise.") + val YtastyNoAnnotations = BooleanSetting("-Ytasty-no-annotations", "Disable support for reading annotations from TASTy, this will prevent safety features such as pattern match exhaustivity and reachability analysis.") + val YtastyReader = BooleanSetting("-Ytasty-reader", "Enable support for reading Scala 3's TASTy files, allowing consumption of libraries compiled with Scala 3 (provided they don't use any Scala 3 only features).") + val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects", default = true) + val YreplMagicImport = BooleanSetting ("-Yrepl-use-magic-imports", "In the code that wraps REPL snippets, use magic imports rather than nesting wrapper object/classes", default = true) val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") - val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") - val YdisableFlatCpCaching = BooleanSetting ("-Ydisable-flat-cp-caching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.").withAbbreviation("-YdisableFlatCpCaching") + @deprecated("Unused setting will be removed", since="2.13") + val Yreplsync = new BooleanSetting ("-Yrepl-sync", "Legacy setting for sbt compatibility, unused.", default = false).internalOnly() + val Yscriptrunner = StringSetting ("-Yscriptrunner", "classname", "Specify a scala.tools.nsc.ScriptRunner (default, resident, shutdown, or a class name).", "default") + val YdisableFlatCpCaching = BooleanSetting ("-Yno-flat-classpath-cache", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.").withAbbreviation("-YdisableFlatCpCaching") // Zinc adds YdisableFlatCpCaching automatically for straight-to-JAR compilation, this is a way to override that choice. - val YforceFlatCpCaching = BooleanSetting ("-Yforce-flat-cp-caching", "Force caching flat classpath representation of classpath elements from jars across compiler instances. Has precedence over: " + YdisableFlatCpCaching.name).internalOnly() + val YforceFlatCpCaching = BooleanSetting ("-Yforce-flat-cp-cache", "Force caching flat classpath representation of classpath elements from jars across compiler instances. Has precedence over: " + YdisableFlatCpCaching.name).internalOnly() val YcachePluginClassLoader = CachePolicy.setting("plugin", "compiler plugins") val YcacheMacroClassLoader = CachePolicy.setting("macro", "macros") - val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") - val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") - val Youtline = BooleanSetting ("-Youtline", "Don't compile method bodies. Use together with `-Ystop-afer:pickler to generate the pickled signatures for all source files.").internalOnly() + val YmacroClasspath = PathSetting ("-Ymacro-classpath", "The classpath used to reflectively load macro implementations, default is the compilation classpath.", "") + + val Youtline = BooleanSetting ("-Youtline", "Don't compile method bodies. Use together with `-Ystop-after:pickler` to generate the pickled signatures for all source files.").internalOnly() val unsafe = MultiStringSetting("-Yrelease", "packages", "Expose platform packages hidden under --release") val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() - val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method", "method-ref"), "method") - val YmacroClasspath = PathSetting ("-Ymacro-classpath", "The classpath used to reflectively load macro implementations, default is the compilation classpath.", "") + val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") // Allows a specialised jar to be written. For instance one that provides stable hashing of content, or customisation of the file storage val YjarFactory = StringSetting ("-YjarFactory", "classname", "factory for jar files", classOf[DefaultJarFactory].getName) - val YaddBackendThreads = IntSetting ("-Ybackend-parallelism", "maximum worker threads for backend", 1, Some((1,16)), (x: String) => None ) - val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (x: String) => None ) + val YaddBackendThreads = IntSetting ("-Ybackend-parallelism", "maximum worker threads for backend", 1, Some((1,16)), (_: String) => None ) + val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (_: String) => None ) val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", - Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) + Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (_: String) => None) val YpickleJava = BooleanSetting("-Ypickle-java", "Pickler phase should compute pickles for .java defined symbols for use by build tools").internalOnly() val YpickleWrite = StringSetting("-Ypickle-write", "directory|jar", "destination for generated .sig files containing type signatures.", "", None).internalOnly() val YpickleWriteApiOnly = BooleanSetting("-Ypickle-write-api-only", "Exclude private members (other than those material to subclass compilation, such as private trait vals) from generated .sig files containing type signatures.").internalOnly() val YtrackDependencies = BooleanSetting("-Ytrack-dependencies", "Record references to in unit.depends. Deprecated feature that supports SBT 0.13 with incOptions.withNameHashing(false) only.", default = true) + val Yscala3ImplicitResolution = BooleanSetting("-Yscala3-implicit-resolution", "Use Scala-3-style downwards comparisons for implicit search and overloading resolution (see github.com/scala/scala/pull/6037).") + .withDeprecationMessage("Use -Xsource:3 -Xsource-features:implicit-resolution instead") sealed abstract class CachePolicy(val name: String, val help: String) object CachePolicy { @@ -267,49 +384,48 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett object LastModified extends CachePolicy("last-modified", "Cache class loader, using file last-modified time to invalidate") object Always extends CachePolicy("always", "Cache class loader with no invalidation") // TODO Jorge to add new policy. Think about whether there is a benefit to the user on offering this as a separate policy or unifying with the previous one. - // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloade, using file last-modified time, then ZIP file metadata to invalidate") + // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloader, using file last-modified time, then ZIP file metadata to invalidate") def values: List[CachePolicy] = List(None, LastModified, Always) } object optChoices extends MultiChoiceEnumeration { - val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).") - val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") - val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.") - val copyPropagation = Choice("copy-propagation", "Eliminate redundant local variables and unused values (including closures). Enables unreachable-code.") - val redundantCasts = Choice("redundant-casts", "Eliminate redundant casts using a type propagation analysis.") - val boxUnbox = Choice("box-unbox", "Eliminate box-unbox pairs within the same method (also tuples, xRefs, value class instances). Enables unreachable-code.") - val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.") - val closureInvocations = Choice("closure-invocations" , "Rewrite closure invocations to the implementation method.") - val inline = Choice("inline", "Inline method invocations according to -Yopt-inline-heuristics and -opt-inline-from.") - - // note: unlike the other optimizer levels, "l:none" appears up in the `opt.value` set because it's not an expanding option (expandsTo is empty) - val lNone = Choice("l:none", - "Disable optimizations. Takes precedence: `-opt:l:none,+box-unbox` / `-opt:l:none -opt:box-unbox` don't enable box-unbox.") - - private val defaultChoices = List(unreachableCode) + val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).") + val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") + val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.") + val copyPropagation = Choice("copy-propagation", "Eliminate redundant local variables and unused values (including closures). Enables unreachable-code.") + val redundantCasts = Choice("redundant-casts", "Eliminate redundant casts using a type propagation analysis.") + val boxUnbox = Choice("box-unbox", "Eliminate box-unbox pairs within the same method (also tuples, xRefs, value class instances). Enables unreachable-code.") + val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.") + val closureInvocations = Choice("closure-invocations" , "Rewrite closure invocations to the implementation method.") + val allowSkipCoreModuleInit = Choice("allow-skip-core-module-init", "Allow eliminating unused module loads for core modules of the standard library (e.g., Predef, ClassTag).") + val assumeModulesNonNull = Choice("assume-modules-non-null", "Assume loading a module never results in null (happens if the module is accessed in its super constructor).") + val allowSkipClassLoading = Choice("allow-skip-class-loading", "Allow optimizations that can skip or delay class loading.") + val ell = Choice("l", "Deprecated l:none, l:default, l:method, l:inline.") + + // none is not an expanding option. It is excluded from -opt:_ below. + val lNone = Choice("none", "Disable all optimizations, including explicit options.") + + val defaultOptimizations = List(unreachableCode) val lDefault = Choice( - "l:default", - "Enable default optimizations: " + defaultChoices.mkString("", ",", "."), - expandsTo = defaultChoices) + "default", + defaultOptimizations.mkString("Enable default optimizations: ", ",", "."), + expandsTo = defaultOptimizations) - private val methodChoices = List(unreachableCode, simplifyJumps, compactLocals, copyPropagation, redundantCasts, boxUnbox, nullnessTracking, closureInvocations) + val localOptimizations = List(simplifyJumps, compactLocals, copyPropagation, redundantCasts, boxUnbox, nullnessTracking, closureInvocations, allowSkipCoreModuleInit, assumeModulesNonNull, allowSkipClassLoading) val lMethod = Choice( - "l:method", - "Enable intra-method optimizations: " + methodChoices.mkString("", ",", "."), - expandsTo = methodChoices) - - private val inlineChoices = List(lMethod, inline) - val lInline = Choice("l:inline", - "Enable cross-method optimizations (note: inlining requires -opt-inline-from): " + inlineChoices.mkString("", ",", "."), - expandsTo = inlineChoices) - - val lProject = Choice( - "l:project", - "[deprecated, use -opt:l:inline, -opt-inline-from] Enable cross-method optimizations within the current project.") - - val lClasspath = Choice( - "l:classpath", - "[deprecated, use -opt:l:inline, -opt-inline-from] Enable cross-method optimizations across the entire classpath.") + "local", + (defaultOptimizations ::: localOptimizations).mkString("Enable intra-method optimizations: ", ",", "."), + expandsTo = defaultOptimizations ::: localOptimizations) + + val inlineFrom = Choice( + "inline", + s"Inline method invocations and enable all optimizations; specify where to inline from, as shown below. See also -Yopt-inline-heuristics.\n$inlineHelp", + expandsTo = defaultOptimizations ::: localOptimizations, + requiresSelections = true + ) + + // "none" is excluded from wildcard expansion so that -opt:_ does not disable all settings + override def wildcardChoices = super.wildcardChoices.filter(_ ne lNone) } // We don't use the `default` parameter of `MultiChoiceSetting`: it specifies the default values @@ -318,19 +434,28 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val opt = MultiChoiceSetting( name = "-opt", helpArg = "optimization", - descr = "Enable optimizations", - domain = optChoices).withPostSetHook(s => { - import optChoices._ - if (!s.value.contains(inline) && (s.value.contains(lProject) || s.value.contains(lClasspath))) - s.enable(lInline) - }) + descr = "Enable optimizations: `-opt:local`, `-opt:inline:`; `-opt:help` for details.", + domain = optChoices, + ).withPostSetHook { ss => + // kludge alert: will be invoked twice, with selections available 2nd time + // for -opt:l:method reset the ell selections then enable local + if (ss.contains(optChoices.ell) && optChoices.ell.selections.nonEmpty) { + val todo = optChoices.ell.selections.map { + case "none" => "none" + case "default" => "default" + case "method" => "local" + case "inline" => "local" // enable all except inline, see -opt-inline-from + } + optChoices.ell.selections = Nil + ss.tryToSetColon(todo) + } + } - private def optEnabled(choice: optChoices.Choice) = { - !opt.contains(optChoices.lNone) && { + private def optEnabled(choice: optChoices.Choice) = + !optNone && { opt.contains(choice) || !opt.isSetByUser && optChoices.lDefault.expandsTo.contains(choice) } - } def optNone = opt.contains(optChoices.lNone) def optUnreachableCode = optEnabled(optChoices.unreachableCode) @@ -341,21 +466,21 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett def optBoxUnbox = optEnabled(optChoices.boxUnbox) def optNullnessTracking = optEnabled(optChoices.nullnessTracking) def optClosureInvocations = optEnabled(optChoices.closureInvocations) - def optInlinerEnabled = optEnabled(optChoices.inline) - - // deprecated inliner levels - def optLProject = optEnabled(optChoices.lProject) - def optLClasspath = optEnabled(optChoices.lClasspath) + def optAllowSkipCoreModuleInit = optEnabled(optChoices.allowSkipCoreModuleInit) + def optAssumeModulesNonNull = optEnabled(optChoices.assumeModulesNonNull) + def optAllowSkipClassLoading = optEnabled(optChoices.allowSkipClassLoading) + def optInlinerEnabled = !optInlineFrom.isEmpty && !optNone def optBuildCallGraph = optInlinerEnabled || optClosureInvocations def optAddToBytecodeRepository = optBuildCallGraph || optInlinerEnabled || optClosureInvocations + def optUseAnalyzerCache = opt.isSetByUser && !optNone && (optBuildCallGraph || opt.value.size > 1) - val optInlineFrom = MultiStringSetting( - "-opt-inline-from", - "patterns", - "Patterns for classfile names from which to allow inlining, `help` for details.", - helpText = Some( - """Patterns for classfile names from which the inliner is allowed to pull in code. + def optInlineFrom: List[String] = optChoices.inlineFrom.selections + + def inlineHelp = + """ + |Inlining requires a list of patterns defining where code can be inlined from: `-opt:inline:p1,p2`. + | | * Matches classes in the empty package | ** All classes | a.C Class a.C @@ -368,13 +493,22 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett | Classes defined in source files compiled in the current compilation, either | passed explicitly to the compiler or picked up from the `-sourcepath` | - |The setting accepts a list of patterns: `-opt-inline-from:p1,p2`. The setting can be passed - |multiple times, the list of patterns gets extended. A leading `!` marks a pattern excluding. - |The last matching pattern defines whether a classfile is included or excluded (default: excluded). - |For example, `a.**,!a.b.**` includes classes in a and sub-packages, but not in a.b and sub-packages. + |`-opt:inline:p` may be specified multiple times to extend the list of patterns. + |A leading `!` means exclude anything that matches the pattern. The last matching pattern wins. + |For example, `a.**,!a.b.**` includes classes in `a` and sub-packages, but not in `a.b` and sub-packages. | - |Note: on the command-line you might need to quote patterns containing `*` to prevent the shell - |from expanding it to a list of files in the current directory.""".stripMargin)) + |When patterns are supplied on a command line, it is usually necessary to quote special shell characters + |such as `*`, `<`, `>`, and `$`: `'-opt:inline:p.*,!p.C$D' '-opt:inline:'`. + |Quoting may not be needed in a build file.""".stripMargin + + @deprecated("Deprecated alias", since="2.13.8") + val xoptInlineFrom = MultiStringSetting( + "-opt-inline-from", + "patterns", + "Patterns for classfile names from which to allow inlining, `help` for details.", + helpText = Some(inlineHelp)) + //.withDeprecationMessage("use -opt:inline:**") + .withPostSetHook(from => opt.add("inline", from.value)) val YoptInlineHeuristics = ChoiceSetting( name = "-Yopt-inline-heuristics", @@ -394,15 +528,26 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett } val optWarnings = MultiChoiceSetting( - name = "-opt-warnings", + name = "-Wopt", helpArg = "warning", - descr = "Enable optimizer warnings", + descr = "Enable optimizer warnings, `help` for details.", domain = optWarningsChoices, - default = Some(List(optWarningsChoices.atInlineFailed.name))) withPostSetHook { _ => + default = Some(List(optWarningsChoices.atInlineFailed.name)) + ).withPostSetHook { _ => // no need to set `Wconf` to `silent` if optWarnings is none, since no warnings are reported if (optWarningsSummaryOnly) Wconf.tryToSet(List(s"cat=optimizer:ws")) else Wconf.tryToSet(List(s"cat=optimizer:w")) } + @deprecated("Deprecated alias", since="2.13.8") + val xoptWarnings = MultiChoiceSetting( + name = "-opt-warnings", + helpArg = "warning", + descr = "Enable optimizer warnings, `help` for details.", + domain = optWarningsChoices, + default = Some(List(optWarningsChoices.atInlineFailed.name)) + ).withPostSetHook { ow => + optWarnings.value = ow.value + }//.withDeprecationMessage("Use -Wopt instead.") def optWarningsSummaryOnly: Boolean = optWarnings.value subsetOf Set(optWarningsChoices.none, optWarningsChoices.atInlineFailedSummary) @@ -416,22 +561,6 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett def optWarningNoInlineMissingBytecode = optWarnings.contains(optWarningsChoices.noInlineMissingBytecode) def optWarningNoInlineMissingScalaInlineInfoAttr = optWarnings.contains(optWarningsChoices.noInlineMissingScalaInlineInfoAttr) - val YoptTrace = StringSetting("-Yopt-trace", "package/Class.method", "Trace the optimizer progress for methods; `_` to print all, prefix match to select.", "") - - val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") - - val Ystatistics = PhasesSetting("-Ystatistics", "Print compiler statistics for specific phases (implies `-Ycollect-statistics`)", "parser,typer,patmat,erasure,cleanup,jvm") - .withPostSetHook(s => if (s.value.nonEmpty) YcollectStatistics.value = true) - - val YcollectStatistics = BooleanSetting("-Ycollect-statistics", "Collect cold statistics (quietly, unless `-Ystatistics` is set)") - .withPostSetHook(s => if (s.value) StatisticsStatics.enableColdStatsAndDeoptimize()) - - val YhotStatistics = BooleanSetting("-Yhot-statistics-enabled", s"Enable `${Ystatistics.name}` to print hot statistics.") - .withPostSetHook(s => if (s && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize()) - - override def YstatisticsEnabled = YcollectStatistics.value - override def YhotStatisticsEnabled = YhotStatistics.value - val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") val YprofileDestination = StringSetting("-Yprofile-destination", "file", "Profiling output - specify a file or `-` for console.", ""). withPostSetHook( _ => YprofileEnabled.value = true ) @@ -442,34 +571,88 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val YprofileRunGcBetweenPhases = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or all", "_"). withPostSetHook( _ => YprofileEnabled.value = true ) - - - /** Area-specific debug output. - */ - val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") - val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.") - val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.") - val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.") - val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.") - val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.") - val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.") - val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.") - val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.") val YpatmatExhaustdepth = IntSetting("-Ypatmat-exhaust-depth", "off", 20, Some((10, Int.MaxValue)), str => Some(if(str.equalsIgnoreCase("off")) Int.MaxValue else str.toInt)) - val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.") + + /** + * -V "Verbose" settings + */ + val Vhelp = BooleanSetting("-V", "Print a synopsis of verbose options.") + val browse = PhasesSetting("-Vbrowse", "Browse the abstract syntax tree after") withAbbreviation "-Ybrowse" + val cyclic = BooleanSetting("-Vcyclic", "Debug cyclic reference error.") + val debug = BooleanSetting("-Vdebug", "Increase the quantity of debugging output.") withAbbreviation "-Ydebug" withPostSetHook (s => if (s.value) StatisticsStatics.enableDebugAndDeoptimize()) + val YdebugTasty = BooleanSetting("-Vdebug-tasty", "Increase the quantity of debugging output when unpickling tasty.") withAbbreviation "-Ydebug-tasty" + val VdebugTypeError = BooleanSetting("-Vdebug-type-error", "Print the stack trace when any error is caught.") withAbbreviation "-Ydebug-type-error" + val Ydocdebug = BooleanSetting("-Vdoc", "Trace scaladoc activity.") withAbbreviation "-Ydoc-debug" + val Yidedebug = BooleanSetting("-Vide", "Generate, validate and output trees using the interactive compiler.") withAbbreviation "-Yide-debug" +// val Yissuedebug = BooleanSetting("-Vissue", "Print stack traces when a context issues an error.") withAbbreviation "-Yissue-debug" + val log = PhasesSetting("-Vlog", "Log operations during") withAbbreviation "-Ylog" + val Ylogcp = BooleanSetting("-Vclasspath", "Output information about what classpath is being applied.") withAbbreviation "-Ylog-classpath" + val YmacrodebugLite = BooleanSetting("-Vmacro-lite", "Trace macro activities with less output.") withAbbreviation "-Ymacro-debug-lite" + val YmacrodebugVerbose = BooleanSetting("-Vmacro", "Trace macro activities: compilation, generation of synthetics, classloading, expansion, exceptions.") withAbbreviation "-Ymacro-debug-verbose" + val YoptTrace = StringSetting("-Vopt", "package/Class.method", "Trace the optimizer progress for methods; `_` to print all, prefix match to select.", "") + .withAbbreviation("-Yopt-trace") + val YoptLogInline = StringSetting("-Vinline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") + .withAbbreviation("-Yopt-log-inline") + val Ypatmatdebug = BooleanSetting("-Vpatmat", "Trace pattern matching translation.") withAbbreviation "-Ypatmat-debug" + val showPhases = BooleanSetting("-Vphases", "Print a synopsis of compiler phases.") + .withAbbreviation("-Xshow-phases") + val Yposdebug = BooleanSetting("-Vpos", "Trace position validation.") withAbbreviation "-Ypos-debug" + val Xprint = PhasesSetting("-Vprint", "Print out program after (or ~phase for before and after)", "typer") + .withAbbreviation("-Xprint") + val Xprintpos = BooleanSetting("-Vprint-pos", "Print tree positions, as offsets.") + .withAbbreviation("-Xprint-pos") + val printtypes = BooleanSetting("-Vprint-types", "Print tree types (debugging option).") + .withAbbreviation("-Xprint-types") + val printArgs = StringSetting("-Vprint-args", "file", "Print all compiler arguments to the specified location. Use - to echo to the reporter.", "-") + .withAbbreviation("-Xprint-args") + val Yquasiquotedebug = BooleanSetting("-Vquasiquote", "Trace quasiquotations.") withAbbreviation "-Yquasiquote-debug" + val Yreifydebug = BooleanSetting("-Vreify", "Trace reification.") withAbbreviation "-Yreify-debug" + val Yshow = PhasesSetting ("-Vshow", "(Requires -Xshow-class or -Xshow-object) Show after") + .withAbbreviation("-Yshow") + val Xshowcls = StringSetting("-Vshow-class", "class", "Show internal representation of class.", "") + .withAbbreviation("-Xshow-class") + val Xshowobj = StringSetting("-Vshow-object", "object", "Show internal representation of object.", "") + .withAbbreviation("-Xshow-object") + val Ymemberpos = StringSetting("-Vshow-member-pos", "output style", s"Show start and end positions of members (implies ${Yrangepos.name})", "") + .withPostSetHook(_ => Yrangepos.value = true) + .withAbbreviation("-Yshow-member-pos") + val Yshowsymkinds = BooleanSetting ("-Vshow-symkinds", "Print abbreviated symbol kinds next to symbol names.") + .withAbbreviation("-Yshow-symkinds") + val Yshowsymowners = BooleanSetting ("-Vshow-symowners", "Print owner identifiers next to symbol names.") + .withAbbreviation("-Yshow-symowners") + val Ystatistics = PhasesSetting("-Vstatistics", "Print compiler statistics for specific phases (implies `-Ycollect-statistics`)", "parser,typer,patmat,erasure,cleanup,jvm") + .withPostSetHook(s => if (s.value.nonEmpty) YstatisticsEnabled.value = true) + .withAbbreviation("-Ystatistics") + val YstatisticsEnabled = BooleanSetting("-Ystatistics-enabled", "Internal setting, indicating that statistics are enabled for some phase.").internalOnly().withPostSetHook(s => if (s.value) StatisticsStatics.enableColdStatsAndDeoptimize()) + val YhotStatisticsEnabled = BooleanSetting("-Vhot-statistics", s"Enable `${Ystatistics.name}` to also print hot statistics.") + .withAbbreviation("-Yhot-statistics").withPostSetHook(s => if (s.value && YstatisticsEnabled.value) StatisticsStatics.enableHotStatsAndDeoptimize()) + val YcollectStatistics = BooleanSetting("-Ycollect-statistics", "Collect cold statistics (quietly, unless `-Vstatistics` is set)") + .withPostSetHook(s => if (s.value) YstatisticsEnabled.value = true) + val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" + val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" + val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") + val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") + val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) + val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") + val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") + .withAbbreviation("-Xlog-implicit-conversions") + val logReflectiveCalls = BooleanSetting("-Vreflective-calls", "Print a message when a reflective method call is generated") + .withAbbreviation("-Xlog-reflective-calls") + val logFreeTerms = BooleanSetting("-Vfree-terms", "Print a message when reification creates a free term.") + .withAbbreviation("-Xlog-free-terms") + val logFreeTypes = BooleanSetting("-Vfree-types", "Print a message when reification resorts to generating a free type.") + .withAbbreviation("-Xlog-free-types") /** Groups of Settings. */ - val future = BooleanSetting("-Xfuture", "Turn on future language features.") enablingIfNotSetByUser futureSettings - val optimise = BooleanSetting("-optimise", "Compiler flag for the optimizer in Scala 2.11") - .withAbbreviation("-optimize") - .withDeprecationMessage("In 2.12, -optimise enables -opt:l:inline -opt-inline-from:**. Check -opt:help for using the Scala 2.12 optimizer.") - .withPostSetHook(_ => { - opt.enable(optChoices.lInline) - optInlineFrom.value = List("**") - }) - val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enablingIfNotSetByUser experimentalSettings + val future = BooleanSetting("-Xfuture", "Replaced by -Xsource.").withDeprecationMessage("Not used since 2.13.") + val optimise = BooleanSetting("-optimize", "Enables optimizations.") + .withAbbreviation("-optimise") + .withDeprecationMessage("Since 2.12, enables -opt:inline:**. This can be dangerous.") + .withPostSetHook(_ => opt.add("inline", List("**"))) + val Xexperimental = BooleanSetting("-Xexperimental", "Former graveyard for language-forking extensions.") + .withDeprecationMessage("Not used since 2.13.") // Feature extensions val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.") @@ -503,22 +686,13 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett } def conflictWarning: Option[String] = { - // See cd878232b5 for an example how to warn about conflicting settings - - /* - def checkSomeConflict: Option[String] = ... + @nowarn("cat=deprecation") + def sourceFeatures: Option[String] = + Option.when(XsourceFeatures.value.nonEmpty && !isScala3)(s"${XsourceFeatures.name} requires -Xsource:3") - List(/* checkSomeConflict, ... */).flatten match { + List(sourceFeatures).flatten match { case Nil => None case warnings => Some("Conflicting compiler settings were detected. Some settings will be ignored.\n" + warnings.mkString("\n")) } - */ - - if (opt.value.contains(optChoices.lProject)) - Some("-opt:l:project is deprecated, use -opt:l:inline and -opt-inline-from") - else if (opt.value.contains(optChoices.lClasspath)) - Some("-opt:l:classpath is deprecated, use -opt:l:inline and -opt-inline-from") - else - None } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala index 7870ac960f6c..a773dadab0a9 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,36 +15,47 @@ package scala package tools.nsc.settings -/** - * Represents a single Scala version in a manner that - * supports easy comparison and sorting. +/** Represents a single Scala version in a manner that + * supports easy comparison and sorting. + * + * A version can be `Specific`, `Maximal`, or `Minimal`. */ sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { def unparse: String + def versionString: String = unparse } -/** - * A scala version that sorts higher than all actual versions - */ -case object NoScalaVersion extends ScalaVersion { - def unparse = "none" - - def compare(that: ScalaVersion): Int = that match { - case NoScalaVersion => 0 +/** A scala version that sorts higher than all actual versions. */ +sealed abstract class MaximalScalaVersion extends ScalaVersion { + final def compare(that: ScalaVersion): Int = that match { + case _: MaximalScalaVersion => 0 case _ => 1 } } -/** - * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion - * may or may not be a released version - i.e. this same class is used to represent - * final, release candidate, milestone, and development builds. The build argument is used - * to segregate builds +/** A scala version that sorts lower than all actual versions. */ +sealed abstract class MinimalScalaVersion extends ScalaVersion { + final def compare(that: ScalaVersion): Int = that match { + case _: MinimalScalaVersion => 0 + case _ => -1 + } +} + +/** If "no version" is specified, assume a maximal version, "the latest". */ +case object NoScalaVersion extends MaximalScalaVersion { + def unparse = "none" +} + +/** A specific Scala version, not one of the magic min/max versions. + * + * A SpecificScalaVersion may or may not be a released version. + * The `build` parameter specifies final, release candidate, milestone, and development builds. */ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { def unparse = s"${major}.${minor}.${rev}${build.unparse}" + override def versionString = s"${major}.${minor}.${rev}" - def compare(that: ScalaVersion): Int = that match { + final def compare(that: ScalaVersion): Int = that match { case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these // comparisons a lot so I'm using brute force direct style code @@ -54,38 +65,31 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu else if (minor > thatMinor) 1 else if (rev < thatRev) -1 else if (rev > thatRev) 1 - else build compare thatBuild - case AnyScalaVersion => 1 - case NoScalaVersion => -1 + else build.compare(thatBuild) + case _: MinimalScalaVersion => 1 + case _: MaximalScalaVersion => -1 } } -/** - * A Scala version that sorts lower than all actual versions +/** A Scala version that sorts lower than all actual versions. */ -case object AnyScalaVersion extends ScalaVersion { +case object AnyScalaVersion extends MinimalScalaVersion { def unparse = "any" - - def compare(that: ScalaVersion): Int = that match { - case AnyScalaVersion => 0 - case _ => -1 - } } -/** - * Factory methods for producing ScalaVersions +/** Factory methods for producing ScalaVersions. */ object ScalaVersion { private val dot = """\.""" private val dash = "-" private val vchar = """\d""" //"[^-+.]" - private val vpat = s"(?s)($vchar+)(?:$dot($vchar+)(?:$dot($vchar+)(?:$dash(.*))?)?)?".r + private val vpat = s"(?s)($vchar+)(?:$dot($vchar+)(?:$dot($vchar+))?)?(?:$dash(.+))?".r private val rcpat = """(?i)rc(\d*)""".r private val mspat = """(?i)m(\d*)""".r def apply(versionString: String, errorHandler: String => Unit): ScalaVersion = { def error() = errorHandler( - s"Bad version (${versionString}) not major[.minor[.revision[-suffix]]]" + s"Bad version (${versionString}) not major[.minor[.revision]][-suffix]" ) def toInt(s: String) = s match { @@ -101,12 +105,11 @@ object ScalaVersion { } versionString match { - case "none" => NoScalaVersion - case "" => NoScalaVersion - case "any" => AnyScalaVersion + case "none" | "" => NoScalaVersion + case "any" => AnyScalaVersion case vpat(majorS, minorS, revS, buildS) => SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS)) - case _ => error() ; AnyScalaVersion + case _ => error(); AnyScalaVersion } } @@ -118,78 +121,67 @@ object ScalaVersion { */ val current = apply(util.Properties.versionNumberString) - /** - * The 2.8.0 version. - */ - val twoDotEight = SpecificScalaVersion(2, 8, 0, Final) + implicit class `not in Ordered`(private val v: ScalaVersion) extends AnyVal { + def min(other: ScalaVersion): ScalaVersion = if (v <= other) v else other + def max(other: ScalaVersion): ScalaVersion = if (v >= other) v else other + } } -/** - * Represents the data after the dash in major.minor.rev-build +/** Represents the data after the dash in major.minor.rev-build. + * + * In order, Development, Final, RC, Milestone. The order is "newest to oldest". */ -abstract class ScalaBuild extends Ordered[ScalaBuild] { - /** - * Return a version of this build information that can be parsed back into the - * same ScalaBuild - */ +sealed abstract class ScalaBuild extends Ordered[ScalaBuild] { + /** Return a version of this build information that can be parsed back into the same ScalaBuild. */ def unparse: String + + final def compare(that: ScalaBuild) = buildOrdering.compare(this, that) } -/** - * A development, test, integration, snapshot or other "unofficial" build +private object buildOrdering extends Ordering[ScalaBuild] { + override def compare(x: ScalaBuild, y: ScalaBuild): Int = + x match { + case Development(id) => + y match { + // sorting by id is pragmatic but not meaningful, such as "cross" < "migration" + case Development(thatId) => id.compare(thatId) + case _ => 1 // otherwise, newer than official builds, which is incorrect on the "build timeline" + } + case Milestone(n) => + y match { + case Milestone(thatN) => n - thatN // compare two milestones based on their milestone numbers + case _ => -1 // a milestone is older than anything other than another milestone + } + case RC(n) => + y match { + case RC(thatN) => n - thatN // compare two rcs based on their RC numbers + case Milestone(_) => 1 // an rc is older than anything other than a milestone or another rc + case _ => -1 + } + case Final => + y match { + case Final => 0 // a final is newer than anything other than a development build or another final + case Development(_) => -1 + case _ => 1 + } + } +} +/** A development, test, integration, snapshot or other "unofficial" build. */ case class Development(id: String) extends ScalaBuild { def unparse = s"-${id}" - - def compare(that: ScalaBuild) = that match { - // sorting two development builds based on id is reasonably valid for two versions created with the same schema - // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions - // this is a pragmatic compromise - case Development(thatId) => id compare thatId - // assume a development build is newer than anything else, that's not really true, but good luck - // mapping development build versions to other build types - case _ => 1 - } } -/** - * A final final +/** A final final. */ case object Final extends ScalaBuild { def unparse = "" - - def compare(that: ScalaBuild) = that match { - case Final => 0 - // a final is newer than anything other than a development build or another final - case Development(_) => -1 - case _ => 1 - } } - -/** - * A candidate for final release +/** A candidate for final release. */ case class RC(n: Int) extends ScalaBuild { def unparse = s"-RC${n}" - - def compare(that: ScalaBuild) = that match { - // compare two rcs based on their RC numbers - case RC(thatN) => n - thatN - // an rc is older than anything other than a milestone or another rc - case Milestone(_) => 1 - case _ => -1 - } } - -/** - * An intermediate release +/** An intermediate release. */ case class Milestone(n: Int) extends ScalaBuild { def unparse = s"-M${n}" - - def compare(that: ScalaBuild) = that match { - // compare two milestones based on their milestone numbers - case Milestone(thatN) => n - thatN - // a milestone is older than anything other than another milestone - case _ => -1 - - } } diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 28e48417106a..19f4bd987210 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,98 +13,104 @@ package scala.tools.nsc package settings -import scala.tools.nsc.settings.StandardScalaSettings._ import scala.tools.util.PathResolver.Defaults import scala.util.Properties.{isJavaAtLeast, javaSpecVersion} -/** Settings which aren't behind a -X, -Y, or -P option. +/** Settings which aren't behind a -V, -W, -X, -Y, or -P option. * When possible, the val and the option have identical names. - * The abstract settings are commented as to why they are as yet - * implemented in MutableSettings rather than mutation-generically. */ trait StandardScalaSettings { _: MutableSettings => - // Switched to MutableSettings so: - // 1. deprecation/feature/etc can access Wconf - // 2. and they have withPostSetHook methods + + import StandardScalaSettings._ /** Path related settings. */ - val bootclasspath = PathSetting ("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath) + val bootclasspath = PathSetting ("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath) withAbbreviation "--boot-class-path" val classpath: PathSetting // is mutated directly in various places (thus inspiring this very effort) - val d: OutputSetting // depends on mutable OutputDirs class - val extdirs = PathSetting ("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs) - val javabootclasspath = PathSetting ("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath) - val javaextdirs = PathSetting ("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs) - val sourcepath = PathSetting ("-sourcepath", "Specify location(s) of source files.", "") // Defaults.scalaSourcePath + val extdirs = PathSetting ("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs) withAbbreviation "--extension-directories" + val javabootclasspath = PathSetting ("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath) withAbbreviation "--java-boot-class-path" + val javaextdirs = PathSetting ("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs) withAbbreviation "--java-extension-directories" + val sourcepath = PathSetting ("-sourcepath", "Specify location(s) of source files.", "") withAbbreviation "--source-path" // Defaults.scalaSourcePath val rootdir = PathSetting ("-rootdir", "The absolute path of the project root directory, usually the git/scm checkout. Used by -Wconf.", "") withAbbreviation "--root-directory" + val systemPath = PathSetting ("-system", "Override location of Java system modules", "") withAbbreviation "--system" /** Other settings. */ - val dependencyfile = StringSetting ("-dependencyfile", "file", "Set dependency tracking file.", ".scala_dependencies") - val deprecation = BooleanSetting ("-deprecation", "Emit warning and location for usages of deprecated APIs. See also -Wconf.") withAbbreviation "--deprecation" withPostSetHook { s => - if (s.value) Wconf.tryToSet(List(s"cat=deprecation:w")) - else Wconf.tryToSet(List(s"cat=deprecation:s")) - } - val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding) - val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail.") + val dependencyfile = StringSetting ("-dependencyfile", "file", "Set dependency tracking file.", ".scala_dependencies") withAbbreviation "--dependency-file" + val deprecation = BooleanSetting ("-deprecation", "Emit warning and location for usages of deprecated APIs. See also -Wconf.").withAbbreviation("--deprecation") + val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding) withAbbreviation "--encoding" + val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail.") withAbbreviation "--explain-types" val feature = BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly. See also -Wconf.") withAbbreviation "--feature" withPostSetHook { s => if (s.value) Wconf.tryToSet(List(s"cat=feature:w")) else Wconf.tryToSet(List(s"cat=feature:s")) } val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars") - val help = BooleanSetting ("-help", "Print a synopsis of standard options") - val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.") withPostSetHook { s => if (s) maxwarns.value = 0 } + val help = BooleanSetting ("-help", "Print a synopsis of standard options") withAbbreviation "--help" withAbbreviation("-h") + val nowarn = BooleanSetting("-nowarn", "Silence warnings. (-Wconf:any:s)") + .withAbbreviation("--no-warnings") + .withPostSetHook(s => if (s.value) maxwarns.value = 0) val optimise: BooleanSetting // depends on post hook which mutates other settings - val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") + val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") withAbbreviation "--print" + val quickfix = MultiStringSetting( + "-quickfix", + "filters", + "Apply quick fixes provided by the compiler for warnings and errors to source files", + helpText = Some( + """Apply quick fixes provided by the compiler for warnings and errors to source files. + |Syntax: -quickfix:,..., + | + | syntax is the same as for configurable warnings, see `-Wconf:help`. Examples: + | -quickfix:any apply all available quick fixes + | -quickfix:msg=Auto-application apply quick fixes where the message contains "Auto-application" + | + |Use `-Wconf:any:warning-verbose` to display applicable message filters with each warning. + | + |Use `-quickfix:silent` to omit the `[quickfixable]` tag in compiler messages. + |""".stripMargin), + prepend = true) + def quickFixSilent: Boolean = quickfix.value == List("silent") val release = ChoiceSetting("-release", "release", "Compile for a version of the Java API and target class file.", AllTargetVersions, normalizeTarget(javaSpecVersion)) - .withPostSetHook { setting => - val current = setting.value.toInt - if (!isJavaAtLeast("9") && current > 8) errorFn.apply("-release is only supported on JVM 9 and higher") - if (target.valueSetByUser.map(_.toInt > current).getOrElse(false)) errorFn("-release cannot be less than -target") - } - .withAbbreviation("--release") - .withAbbreviation("-java-output-version") + .withPostSetHook { setting => + val current = setting.value.toInt + if (!isJavaAtLeast("9") && current > 8) errorFn.apply("-release is only supported on JVM 9 and higher") + if (target.valueSetByUser.map(_.toInt > current).getOrElse(false)) errorFn("-release cannot be less than -target") + if (systemPath.isSetByUser) errorFn("-release cannot be used with -system") + //target.value = setting.value // this would trigger deprecation + } + .withAbbreviation("--release") + .withAbbreviation("-java-output-version") def releaseValue: Option[String] = release.valueSetByUser + def systemPathValue: Option[String] = systemPath.valueSetByUser val target = - ChoiceSetting("-target", "target", "Target platform for class files. Target < 8 is deprecated; target > 8 uses 8.", - AllTargetVersions, DefaultTargetVersion, AllTargetVersions.map(v => if (v.toInt <= 8) s"uses $v" else "unsupported, uses default 8")) - .withPreSetHook(normalizeTarget) - .withPostSetHook { setting => - if (releaseValue.map(_.toInt < setting.value.toInt).getOrElse(false)) - errorFn("-release cannot be less than -target") - if (!setting.deprecationMessage.isDefined) - if (setting.value.toInt > MaxSupportedTargetVersion) { - setting.withDeprecationMessage(s"Scala 2.12 cannot emit valid class files for targets newer than $MaxSupportedTargetVersion; this is possible with Scala 2.13. Use -release to compile against a specific version of the platform API.") - setting.value = DefaultTargetVersion - } else if (setting.value.toInt < MinSupportedTargetVersion) { - setting.withDeprecationMessage(s"${setting.name}:${setting.value} is deprecated, forcing use of $DefaultTargetVersion") - setting.value = DefaultTargetVersion - } - } - .withAbbreviation("--target") - // Unlike 2.13, don't use `releaseValue.getOrElse(target.value)`, because 2.12 doesn't have a fix for scala-dev#408 - def targetValue: String = target.value + ChoiceSetting("-target", "target", "Target platform for object files.", AllTargetVersions, "8") + .withPreSetHook(normalizeTarget) + .withPostSetHook { setting => + if (releaseValue.map(_.toInt < setting.value.toInt).getOrElse(false)) errorFn("-release cannot be less than -target") + } + .withAbbreviation("--target") + // .withAbbreviation("--Xtarget") + // .withAbbreviation("-Xtarget") + .withAbbreviation("-Xunchecked-java-output-version") + def targetValue: String = target.valueSetByUser.orElse(releaseValue).getOrElse(target.value) val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions. See also -Wconf.") withAbbreviation "--unchecked" withPostSetHook { s => if (s.value) Wconf.tryToSet(List(s"cat=unchecked:w")) else Wconf.tryToSet(List(s"cat=unchecked:s")) } - val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") - val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") - val usemanifestcp = BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.") - val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.") - val version = BooleanSetting ("-version", "Print product version and exit.") + val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") withAbbreviation "--unique-id" + val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") withAbbreviation "--use-java-class-path" + val usemanifestcp = BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.") withAbbreviation "--use-manifest-class-path" + val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.") withAbbreviation "--verbose" + val version = BooleanSetting ("-version", "Print product version and exit.") withAbbreviation "--version" // Support passe prefixes of -target values: // - `jvm-` (from back when we also had `msil`) // - `1.` (from back when Java 2 was a possibility) + // Otherwise, `-release` could be `IntSetting`. private def normalizeTarget(in: String): String = { - val oldTarget = raw"1\.([5-8])".r - val oldJvm = raw"jvm-1\.([5-8])".r val jvmish = raw"jvm-(\d*)".r in match { - case oldJvm(n) => n - case oldTarget(n) => n + case "1.8" | "jvm-1.8" => "8" case jvmish(n) => n case n => n } @@ -112,17 +118,12 @@ trait StandardScalaSettings { _: MutableSettings => } object StandardScalaSettings { - // not final in case some separately compiled client code wanted to depend on updated values - val MinTargetVersion = 5 - val MinSupportedTargetVersion = 8 + val MinTargetVersion = 8 val MaxTargetVersion = ScalaVersion(javaSpecVersion) match { case SpecificScalaVersion(1, minor, _, _) => minor case SpecificScalaVersion(major, _, _, _) => major - case _ => 23 + case _ => 25 } - val MaxSupportedTargetVersion = 8 - val DefaultTargetVersion = "8" - private val AllTargetVersions = (MinTargetVersion to MaxTargetVersion).map(_.toString).toList - val AllPermissibleTargetValues: List[String] = AllTargetVersions.flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil) + private val AllTargetVersions = (MinTargetVersion to MaxTargetVersion).map(_.toString).to(List) } diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 3f0ae6357e66..57b1a687ce7d 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools package nsc package settings +import scala.annotation.nowarn import scala.tools.nsc.Reporting.WarningCategory /** Settings influencing the printing of warnings. @@ -21,11 +22,12 @@ import scala.tools.nsc.Reporting.WarningCategory trait Warnings { self: MutableSettings => + val Whelp = BooleanSetting("-W", "Print a synopsis of warning options.") + // Warning semantics. - val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.") - .withAbbreviation("-Werror") + val fatalWarnings = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.") withAbbreviation "-Xfatal-warnings" - private val WconfDefault = List("cat=deprecation:ws", "cat=feature:ws", "cat=optimizer:ws") + val WconfDefault = List("cat=deprecation:ws", "cat=feature:ws", "cat=optimizer:ws") // Note: user-defined settings are added on the right, but the value is reversed before // it's parsed, so that later defined settings take precedence. val Wconf = MultiStringSetting( @@ -38,8 +40,8 @@ trait Warnings { |Syntax: -Wconf::,:,... |multiple are combined with &, i.e., &...& | - |Note: Run with `-Wconf:any:warning-verbose` to print warnings with their category, site, - |and (for deprecations) origin and since-version. + |Use the `@nowarn("verbose")` / `@nowarn("v")` annotation or `-Wconf:any:warning-verbose` + |to print applicable message filters with every warning. | | | - Any message: any @@ -71,7 +73,7 @@ trait Warnings { | - error / e | - warning / w | - warning-summary / ws (summary with the number of warnings, like for deprecations) - | - warning-verbose / wv (show warning category and site) + | - warning-verbose / wv (show applicable filters with each warning) | - info / i (infos are not counted as warnings and don't affect `-Werror`) | - info-summary / is | - info-verbose / iv @@ -80,8 +82,13 @@ trait Warnings { |The default configuration is: | -Wconf:${WconfDefault.mkString(",")} | - |User-defined configurations are added to the left. The leftmost rule matching - |a warning message defines the action. + |Under -Xsource:3-cross, the category of scala3-migration warnings are errors by default: + | -Wconf:cat=scala3-migration:e + |Under -Xsource:3-migration, they are warnings: + | -Wconf:cat=scala3-migration:w + | + |User-defined configurations override previous settings, such that the last matching + |configuration defines the action for a given diagnostic message. | |Examples: | - change every warning into an error: -Wconf:any:error @@ -96,23 +103,48 @@ trait Warnings { |to prevent the shell from expanding patterns.""".stripMargin), prepend = true) - // Non-lint warnings. -- TODO turn into MultiChoiceEnumeration + // Non-lint warnings. val warnMacros = ChoiceSetting( - name = "-Ywarn-macros", + name = "-Wmacros", helpArg = "mode", descr = "Enable lint warnings on macro expansions.", - choices = List("none", "before", "after", "both"), - default = "before", + choices = List("none", "before", "after", "both", "default"), + default = "default", choicesHelp = List( "Do not inspect expansions or their original trees when generating unused symbol warnings.", "Only inspect unexpanded user-written code for unused symbols.", "Only inspect expanded trees when generating unused symbol warnings.", - "Inspect both user-written code and expanded trees when generating unused symbol warnings." + "Inspect both user-written code and expanded trees when generating unused symbol warnings.", + "Only inspect unexpanded user-written code for unused symbols but include usages in expansions.", ) + ) withAbbreviation "-Ywarn-macros" + val warnDeadCode = BooleanSetting("-Wdead-code", "Warn when dead code is identified.") withAbbreviation "-Ywarn-dead-code" + val warnNonUnitIf = BooleanSetting("-Wnonunit-if", "Warn when if statements are non-Unit expressions, enabled by -Wnonunit-statement.") + import scala.language.existentials + val warnNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") + .enablingIfNotSetByUser(warnNonUnitIf :: Nil) + val warnValueDiscard = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") withAbbreviation "-Ywarn-value-discard" + val warnNumericWiden = BooleanSetting("-Wnumeric-widen", "Warn when numerics are widened.") withAbbreviation "-Ywarn-numeric-widen" + val warnOctalLiteral = BooleanSetting("-Woctal-literal", "Warn on obsolete octal syntax.") withAbbreviation "-Ywarn-octal-literal" + val warnUnnamedBoolean = BooleanSetting("-Wunnamed-boolean-literal", "Warn about unnamed boolean literals if there is more than one or defaults are used, unless parameter has @deprecatedName.") + val warnUnnamedStrict = BooleanSetting("-Wunnamed-boolean-literal-strict", "Warn about all unnamed boolean literals, unless parameter has @deprecatedName or the method has a single leading boolean parameter.").enabling(warnUnnamedBoolean :: Nil) + val warnToString = BooleanSetting("-Wtostring-interpolated", "Warn when a standard interpolator uses toString.") + val warnMultiargInfix = BooleanSetting("-Wmultiarg-infix", "Infix operator was defined or used with multiarg operand.") + def multiargInfix = warnMultiargInfix.value + + object PerformanceWarnings extends MultiChoiceEnumeration { + val Captured = Choice("captured", "Modification of var in closure causes boxing.") + val NonlocalReturn = Choice("nonlocal-return", "A return statement used an exception for flow control.") + } + val warnPerformance = MultiChoiceSetting( + name = "-Wperformance", + helpArg = "warning", + descr = "Enable or disable specific lints for performance", + domain = PerformanceWarnings, + default = Some(List("_")) ) - val warnDeadCode = BooleanSetting("-Ywarn-dead-code", "Warn when dead code is identified.") - val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") - val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.") + def warnCaptured = warnPerformance.contains(PerformanceWarnings.Captured) + def warnNonlocalReturn = warnPerformance.contains(PerformanceWarnings.NonlocalReturn) object UnusedWarnings extends MultiChoiceEnumeration { val Imports = Choice("imports", "Warn if an import selector is not referenced.") @@ -121,36 +153,35 @@ trait Warnings { val Locals = Choice("locals", "Warn if a local definition is unused.") val Explicits = Choice("explicits", "Warn if an explicit parameter is unused.") val Implicits = Choice("implicits", "Warn if an implicit parameter is unused.") + val Synthetics = Choice("synthetics", "Warn if a synthetic implicit parameter (context bound) is unused.") val Nowarn = Choice("nowarn", "Warn if a @nowarn annotation does not suppress any warnings.") - val Params = Choice("params", "Enable -Ywarn-unused:explicits,implicits.", expandsTo = List(Explicits, Implicits)) + val Params = Choice("params", "Enable -Wunused:explicits,implicits,synthetics.", expandsTo = List(Explicits, Implicits, Synthetics)) val Linted = Choice("linted", "-Xlint:unused.", expandsTo = List(Imports, Privates, Locals, Implicits, Nowarn)) } // The -Ywarn-unused warning group. val warnUnused = MultiChoiceSetting( - name = "-Ywarn-unused", + name = "-Wunused", helpArg = "warning", - descr = "Enable or disable specific `unused' warnings", + descr = "Enable or disable specific `unused` warnings", domain = UnusedWarnings, default = Some(List("_")) - ) + ) withAbbreviation "-Ywarn-unused" def warnUnusedImport = warnUnused contains UnusedWarnings.Imports def warnUnusedPatVars = warnUnused contains UnusedWarnings.PatVars def warnUnusedPrivates = warnUnused contains UnusedWarnings.Privates def warnUnusedLocals = warnUnused contains UnusedWarnings.Locals - def warnUnusedParams = warnUnusedExplicits || warnUnusedImplicits + def warnUnusedParams = warnUnusedExplicits || warnUnusedImplicits || warnUnusedSynthetics def warnUnusedExplicits = warnUnused contains UnusedWarnings.Explicits def warnUnusedImplicits = warnUnused contains UnusedWarnings.Implicits + def warnUnusedSynthetics = warnUnused contains UnusedWarnings.Synthetics def warnUnusedNowarn = warnUnused contains UnusedWarnings.Nowarn - BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.") withPostSetHook { s => - warnUnused.add(s"${if (s) "" else "-"}imports") - } //withDeprecationMessage s"Enable -Ywarn-unused:imports" - - val warnExtraImplicit = BooleanSetting("-Ywarn-extra-implicit", "Warn when more than one implicit parameter section is defined.") + val warnExtraImplicit = BooleanSetting("-Wextra-implicit", "Warn when more than one implicit parameter section is defined.") withAbbreviation "-Ywarn-extra-implicit" - val warnSelfImplicit = BooleanSetting("-Ywarn-self-implicit", "Warn when an implicit resolves to an enclosing self-definition.") + @deprecated("Use lintImplicitRecursion", since="2.13.3") + val warnSelfImplicit = BooleanSetting("-Wself-implicit", "An implicit resolves to an enclosing definition.") withAbbreviation "-Ywarn-self-implicit" withDeprecationMessage "Use -Xlint:implicit-recursion" // Experimental lint warnings that are turned off, but which could be turned on programmatically. // They are not activated by -Xlint and can't be enabled on the command line because they are not @@ -161,29 +192,44 @@ trait Warnings { // Lint warnings object LintWarnings extends MultiChoiceEnumeration { - class LintWarning(name: String, help: String, val yAliased: Boolean) extends Choice(name, help) - def LintWarning(name: String, help: String, yAliased: Boolean = false) = new LintWarning(name, help, yAliased) - - val AdaptedArgs = LintWarning("adapted-args", "Warn if an argument list is modified to match the receiver.", true) - val NullaryUnit = LintWarning("nullary-unit", "Warn when nullary methods return Unit.", true) - val Inaccessible = LintWarning("inaccessible", "Warn about inaccessible types in method signatures.", true) - val NullaryOverride = LintWarning("nullary-override", "Warn when non-nullary `def f()' overrides nullary `def f'.", true) - val InferAny = LintWarning("infer-any", "Warn when a type argument is inferred to be `Any`.", true) + class LintWarning(name: String, help: String) extends Choice(name, help) + def LintWarning(name: String, help: String) = new LintWarning(name, help) + + val AdaptedArgs = LintWarning("adapted-args", "An argument list was modified to match the receiver.") + val NullaryUnit = LintWarning("nullary-unit", "`def f: Unit` looks like an accessor; add parens to look side-effecting.") + val Inaccessible = LintWarning("inaccessible", "Warn about inaccessible types in method signatures.") + val InferStructural = LintWarning("infer-structural", "Warn on definitions with an inferred structural type.") + val InferAny = LintWarning("infer-any", "A type argument was inferred as Any.") val MissingInterpolator = LintWarning("missing-interpolator", "A string literal appears to be missing an interpolator id.") - val DocDetached = LintWarning("doc-detached", "A Scaladoc comment appears to be detached from its element.") + val DocDetached = LintWarning("doc-detached", "When running scaladoc, warn if a doc comment is discarded.") val PrivateShadow = LintWarning("private-shadow", "A private field (or class parameter) shadows a superclass field.") val TypeParameterShadow = LintWarning("type-parameter-shadow", "A local type parameter shadows a type already in scope.") val PolyImplicitOverload = LintWarning("poly-implicit-overload", "Parameterized overloaded implicit methods are not visible as view bounds.") - val OptionImplicit = LintWarning("option-implicit", "Option.apply used implicit view.") + val OptionImplicit = LintWarning("option-implicit", "Option.apply used an implicit view.") val DelayedInitSelect = LintWarning("delayedinit-select", "Selecting member of DelayedInit.") - val ByNameRightAssociative = LintWarning("by-name-right-associative", "By-name parameter of right associative operator.") val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.") - val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.") - val StarsAlign = LintWarning("stars-align", "Pattern sequence wildcard must align with sequence component.") - val Constant = LintWarning("constant", "Evaluation of a constant arithmetic expression results in an error.") - val Unused = LintWarning("unused", "Enable -Ywarn-unused:imports,privates,locals,implicits,nowarn.") + val StarsAlign = LintWarning("stars-align", "In a pattern, a sequence wildcard `_*` should match all of a repeated parameter.") + val StrictUnsealedPatMat = LintWarning("strict-unsealed-patmat", "Pattern match on an unsealed class without a catch-all.") + val Constant = LintWarning("constant", "Evaluation of a constant arithmetic expression resulted in an error.") + val Unused = LintWarning("unused", "Enable -Wunused:imports,privates,locals,implicits,nowarn.") + val NonlocalReturn = LintWarning("nonlocal-return", "A return statement used an exception for flow control.") + val ImplicitNotFound = LintWarning("implicit-not-found", "Check @implicitNotFound and @implicitAmbiguous messages.") + val Serial = LintWarning("serial", "@SerialVersionUID on traits and non-serializable classes.") + val ValPattern = LintWarning("valpattern", "Enable pattern checks in val definitions.") + val EtaZero = LintWarning("eta-zero", "Usage `f` of parameterless `def f()` resulted in eta-expansion, not empty application `f()`.") + val EtaSam = LintWarning("eta-sam", "A method reference was eta-expanded but the expected SAM type was not annotated @FunctionalInterface.") val Deprecation = LintWarning("deprecation", "Enable -deprecation and also check @deprecated annotations.") + val ByNameImplicit = LintWarning("byname-implicit", "Block adapted by implicit with by-name parameter.") + val RecurseWithDefault = LintWarning("recurse-with-default", "Recursive call used default argument.") + val UnitSpecialization = LintWarning("unit-special", "Warn for specialization of Unit in parameter position.") + val ImplicitRecursion = LintWarning("implicit-recursion", "Implicit resolves to an enclosing definition.") + val UniversalMethods = LintWarning("universal-methods", "Dubious usage of member of `Any` or `AnyRef`.") + val NumericMethods = LintWarning("numeric-methods", "Dubious usages, such as `42.isNaN`.") + val ArgDiscard = LintWarning("arg-discard", "-Wvalue-discard for adapted arguments.") val IntDivToFloat = LintWarning("int-div-to-float", "Warn when an integer division is converted (widened) to floating point: `(someInt / 2): Double`.") + val PatternShadow = LintWarning("pattern-shadow", "Pattern variable id is also a term in scope.") + val CloneableObject = LintWarning("cloneable", "Modules (objects) should not be Cloneable.") + val DubiousOverload = LintWarning("overload", "Overload differs only in an implicit parameter.") def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]] } @@ -192,7 +238,7 @@ trait Warnings { def warnAdaptedArgs = lint contains AdaptedArgs def warnNullaryUnit = lint contains NullaryUnit def warnInaccessible = lint contains Inaccessible - def warnNullaryOverride = lint contains NullaryOverride + def warnInferStructural = lint contains InferStructural def warnInferAny = lint contains InferAny def warnMissingInterpolator = lint contains MissingInterpolator def warnDocDetached = lint contains DocDetached @@ -201,52 +247,48 @@ trait Warnings { def warnPolyImplicitOverload = lint contains PolyImplicitOverload def warnOptionImplicit = lint contains OptionImplicit def warnDelayedInit = lint contains DelayedInitSelect - def warnByNameRightAssociative = lint contains ByNameRightAssociative def warnPackageObjectClasses = lint contains PackageObjectClasses - def warnUnsoundMatch = lint contains UnsoundMatch + def warnStrictUnsealedPatMat = lint contains StrictUnsealedPatMat def warnStarsAlign = lint contains StarsAlign def warnConstant = lint contains Constant def lintUnused = lint contains Unused + def lintImplicitNotFound = lint contains ImplicitNotFound + def warnSerialization = lint contains Serial + def lintValPatterns = lint contains ValPattern + def warnEtaZero = lint contains EtaZero + def warnEtaSam = lint contains EtaSam def lintDeprecation = lint contains Deprecation - def lintIntDivToFloat = lint contains IntDivToFloat - - // Lint warnings that are currently -Y, but deprecated in that usage - @deprecated("Use warnAdaptedArgs", since="2.11.2") - def YwarnAdaptedArgs = warnAdaptedArgs - @deprecated("Use warnNullaryUnit", since="2.11.2") - def YwarnNullaryUnit = warnNullaryUnit - @deprecated("Use warnInaccessible", since="2.11.2") - def YwarnInaccessible = warnInaccessible - @deprecated("Use warnNullaryOverride", since="2.11.2") - def YwarnNullaryOverride = warnNullaryOverride - @deprecated("Use warnInferAny", since="2.11.2") - def YwarnInferAny = warnInferAny + def warnByNameImplicit = lint contains ByNameImplicit + def warnRecurseWithDefault = lint contains RecurseWithDefault + def unitSpecialization = lint contains UnitSpecialization + def lintImplicitRecursion = lint.contains(ImplicitRecursion) || (warnSelfImplicit.value: @nowarn("cat=deprecation")) + def lintUniversalMethods = lint.contains(UniversalMethods) + def lintNumericMethods = lint.contains(NumericMethods) + def lintArgDiscard = lint.contains(ArgDiscard) + def lintIntDivToFloat = lint.contains(IntDivToFloat) + def warnPatternShadow = lint.contains(PatternShadow) + def warnCloneableObject = lint.contains(CloneableObject) + def warnDubiousOverload = lint.contains(DubiousOverload) // The Xlint warning group. val lint = MultiChoiceSetting( name = "-Xlint", helpArg = "warning", - descr = "Enable or disable specific warnings", + descr = "Enable recommended warnings", domain = LintWarnings, default = Some(List("_")) ).withPostSetHook { s => if (s contains Unused) warnUnused.enable(UnusedWarnings.Linted) else warnUnused.disable(UnusedWarnings.Linted) if (s.contains(Deprecation) && deprecation.isDefault) deprecation.value = true + if (s.contains(NonlocalReturn)) warnPerformance.enable(PerformanceWarnings.NonlocalReturn) + else warnPerformance.disable(PerformanceWarnings.NonlocalReturn) } - allLintWarnings foreach { - case w if w.yAliased => - BooleanSetting(s"-Ywarn-${w.name}", {w.help}) withPostSetHook { s => - lint.add(if (s) w.name else s"-${w.name}") - } // withDeprecationMessage s"Enable -Xlint:${c._1}" - case _ => - } - - private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.") - // Backward compatibility. @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt + + private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.") @deprecated("This option is being removed", "2.11.0") def Xchecknull = warnSelectNullable // used by ide @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide } diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 6314a0fe7955..289640c04eb0 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -54,7 +54,7 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders { val memberSourceFile = member.sourceFile if (memberSourceFile != null) { if (existingSourceFile != memberSourceFile) - globalError(member+"is defined twice,"+ + globalError(""+member+"is defined twice,"+ "\n in "+existingSourceFile+ "\n and also in "+memberSourceFile) } @@ -66,7 +66,7 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders { /** Browse the top-level of given abstract file `src` and enter * any encountered top-level classes and modules in `root` */ - def browseTopLevel(root: Symbol, src: AbstractFile) { + def browseTopLevel(root: Symbol, src: AbstractFile): Unit = { class BrowserTraverser extends Traverser { var packagePrefix = "" @@ -92,8 +92,8 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders { } override def traverse(tree: Tree): Unit = tree match { - case PackageDef(pkg, body) => - inPackagePrefix(pkg) { body foreach traverse } + case PackageDef(pid, stats) => + inPackagePrefix(pid) { stats.foreach(traverse) } case ClassDef(_, name, _, _) => if (packagePrefix == root.fullName) { @@ -125,7 +125,7 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders { /** Enter top-level symbols from a source file */ - override def enterToplevelsFromSource(root: Symbol, name: TermName, src: AbstractFile) { + override def enterToplevelsFromSource(root: Symbol, name: TermName, src: AbstractFile): Unit = { try { if (root.isEffectiveRoot || !src.name.endsWith(".scala")) // RootClass or EmptyPackageClass super.enterToplevelsFromSource(root, name, src) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index c9b845313bb9..c193040c2400 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,16 +16,16 @@ package symtab import classfile.{ClassfileParser, ReusableDataReader} import java.io.IOException +import scala.annotation._ import scala.reflect.internal.MissingRequirementError -import scala.reflect.io.{AbstractFile, NoAbstractFile} -import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.reflect.internal.util.ReusableInstance +import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.util.{ClassPath, ClassRepresentation} /** This class ... * * @author Martin Odersky - * @version 1.0 */ abstract class SymbolLoaders { val symbolTable: symtab.SymbolTable { @@ -51,7 +51,7 @@ abstract class SymbolLoaders { // forwards to runReporting.warning, but we don't have global in scope here def warning(pos: Position, msg: String, category: WarningCategory, site: String): Unit - protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = { + protected def enterIfNew(owner: Symbol, member: Symbol, @unused completer: SymbolLoader): Symbol = { assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name) owner.info.decls enter member member @@ -138,7 +138,7 @@ abstract class SymbolLoaders { /** Enter class and module with given `name` into scope of `root` * and give them `completer` as type. */ - def enterClassAndModule(root: Symbol, name: TermName, getCompleter: (ClassSymbol, ModuleSymbol) => SymbolLoader) { + def enterClassAndModule(root: Symbol, name: TermName, getCompleter: (ClassSymbol, ModuleSymbol) => SymbolLoader): Unit = { val clazz0 = root.newClass(name.toTypeName) val module0 = root.newModule(name) val completer = getCompleter(clazz0, module0) @@ -168,7 +168,7 @@ abstract class SymbolLoaders { * with source completer for given `src` as type. * (overridden in interactive.Global). */ - def enterToplevelsFromSource(root: Symbol, name: TermName, src: AbstractFile) { + def enterToplevelsFromSource(root: Symbol, name: TermName, src: AbstractFile): Unit = { enterClassAndModule(root, name, (_, _) => new SourcefileLoader(src)) } @@ -185,14 +185,14 @@ abstract class SymbolLoaders { /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep` */ - def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation) { + def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation): Unit = { ((classRep.binary, classRep.source) : @unchecked) match { case (Some(bin), Some(src)) if platform.needCompile(bin, src) && !binaryOnly(owner, nameOf(classRep)) => - if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path) + if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path) enterToplevelsFromSource(owner, nameOf(classRep), src) case (None, Some(src)) => - if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path) + if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path) enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => enterClassAndModule(owner, nameOf(classRep), new ClassfileLoader(bin, _, _)) @@ -232,7 +232,7 @@ abstract class SymbolLoaders { private var ok = false - private def setSource(sym: Symbol) { + private def setSource(sym: Symbol): Unit = { sourcefile foreach (sf => sym match { case cls: ClassSymbol => cls.associatedFile = sf case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf @@ -240,16 +240,16 @@ abstract class SymbolLoaders { }) } - override def complete(root: Symbol) { + override def complete(root: Symbol): Unit = { val assocFile = associatedFile(root) currentRunProfilerBeforeCompletion(root, assocFile) try { try { - val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - val currentphase = phase - doComplete(root) - phase = currentphase - if (settings.verbose) informTime("loaded " + description, start) + informingProgress("loaded " + description) { + val currentphase = phase + try doComplete(root) + finally phase = currentphase + } ok = true setSource(root) setSource(root.companionSymbol) // module -> class, class -> module @@ -266,7 +266,7 @@ abstract class SymbolLoaders { } } - override def load(root: Symbol) { complete(root) } + override def load(root: Symbol): Unit = { complete(root) } private def markAbsent(sym: Symbol): Unit = { val tpe: Type = if (ok) NoType else ErrorType @@ -274,7 +274,7 @@ abstract class SymbolLoaders { if (sym != NoSymbol) sym setInfo tpe } - private def initRoot(root: Symbol) { + private def initRoot(root: Symbol): Unit = { if (root.rawInfo == this) List(root, root.moduleClass) foreach markAbsent else if (root.isClass && !root.isModuleClass) @@ -291,7 +291,7 @@ abstract class SymbolLoaders { s"package loader $shownPackageName" } - protected def doComplete(root: Symbol) { + protected def doComplete(root: Symbol): Unit = { assert(root.isPackageClass, root) root.setInfo(new PackageClassInfoType(newScope, root)) @@ -314,7 +314,7 @@ abstract class SymbolLoaders { } } } - private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = new ReusableInstance[ReusableDataReader](() => new ReusableDataReader(), enabled = isCompilerUniverse) + private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = ReusableInstance[ReusableDataReader](new ReusableDataReader(), initialSize = 1, enabled = isCompilerUniverse) class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter { private object classfileParser extends { val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable @@ -359,7 +359,7 @@ abstract class SymbolLoaders { object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter { protected def description = "module class loader" - protected def doComplete(root: Symbol) { root.sourceModule.initialize } + protected def doComplete(root: Symbol): Unit = { root.sourceModule.initialize } override def associatedFile(self: Symbol): AbstractFile = { val sourceModule = self.sourceModule sourceModule.rawInfo match { diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala index d562c715e493..2220065ea955 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index 7642e496c9f2..50b901372762 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,6 @@ package scala.tools.nsc package symtab import scala.language.implicitConversions -import scala.language.postfixOps /** Printing the symbol graph (for those symbols attached to an AST node) * after each phase. @@ -53,8 +52,8 @@ trait SymbolTrackers { def dropSymbol(sym: Symbol) = sym.ownerChain exists (_ hasFlag Flags.SPECIALIZED) def symbolSnapshot(unit: CompilationUnit): Map[Symbol, Set[Tree]] = { - if (unit.body == null) Map() - else unit.body filter containsSymbol groupBy (_.symbol) mapValues (_.toSet) toMap + if (unit.body == null) Map.empty + else unit.body.filter(containsSymbol).groupBy(_.symbol).view.mapValues(_.toSet).toMap } def apply(unit: CompilationUnit) = new SymbolTracker( () => symbolSnapshot(unit) filterNot { case (k, _) => dropSymbol(k) } @@ -138,7 +137,7 @@ trait SymbolTrackers { val s = sym.defString take 240 if (s.length == 240) s + "..." else s } - else sym + changedOwnerString + flagSummaryString + else "" + sym + changedOwnerString + flagSummaryString ) def flatten = children.foldLeft(Set(root))(_ ++ _.flatten) @@ -148,7 +147,7 @@ trait SymbolTrackers { else { indicatorString + indent + symString(root) + ( if (children.isEmpty) "" - else children map (c => c.indentString(indent + " ")) mkString ("\n", "\n", "") + else children.map(_.indentString(indent + " ")).mkString("\n", "\n", "") ) } } @@ -176,28 +175,26 @@ trait SymbolTrackers { val change = Change(added, removed, prevMap, owners, flags) prevMap = currentMap - prevOwners = current map (s => (s, s.owner)) toMap; - prevFlags = current map (s => (s, (s.flags & flagsMask))) toMap; + prevOwners = current.map(s => (s, s.owner)).toMap + prevFlags = current.map(s => (s, (s.flags & flagsMask))).toMap history = change :: history } def show(label: String): String = { val hierarchy = Node(current) val Change(_, removed, symMap, _, _) = history.head def detailString(sym: Symbol) = { - val ownerString = sym.ownerChain splitAt 3 match { - case (front, back) => - val xs = if (back.isEmpty) front else front :+ "..." - xs mkString " -> " - } - val treeStrings = symMap(sym) map { t => - "%10s: %s".format(t.shortClass, t) + val ownerString = { + val (few, rest) = sym.ownersIterator.splitAt(3) + val ellipsis = Iterator("...").filter(_ => rest.hasNext) + few.map(_.toString).concat(ellipsis).mkString(" -> ") } + val treeStrings = symMap(sym).map(t => f"${t.shortClass}%10s: $t") - ownerString :: treeStrings mkString "\n" + (ownerString :: treeStrings).mkString("\n") } - def removedString = (removed: List[Symbol]).zipWithIndex map { + def removedString = (removed: List[Symbol]).zipWithIndex.map { case (t, i) => "(%2s) ".format(i + 1) + detailString(t) - } mkString "\n" + }.mkString("\n") "" + hierarchy + ( if (removed.isEmpty) "" diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 389a22dfe5f0..0080d97908d4 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,13 +24,10 @@ import scala.tools.nsc.io.AbstractFile * This class reads files byte per byte. Only used by ClassFileParser * * @author Philippe Altherr - * @version 1.0, 23/03/2004 */ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { - @deprecated("Use other constructor", "2.12.13") - def this(file: AbstractFile) { - this(file.toByteArray) - } + @deprecated("Use other constructor", "2.13.0") + def this(file: AbstractFile) = this(file.toByteArray) /** the current input pointer */ @@ -68,9 +65,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { def getByte(mybp: Int): Byte = buf(mybp) - def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = System.arraycopy(buf, mybp, bytes, 0, bytes.length) - } /** extract a character at position bp from buf */ @@ -96,9 +92,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { */ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp)) - def getUTF(mybp: Int, len: Int): String = { + def getUTF(mybp: Int, len: Int): String = new DataInputStream(new ByteArrayInputStream(buf, mybp, len)).readUTF - } /** skip next 'n' bytes */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 2c5bb30a9e93..1fcc7d69e05d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,27 +15,28 @@ package tools.nsc package symtab package classfile -import java.io.{File, IOException} +import java.io.IOException import java.lang.Integer.toHexString -import scala.collection.{immutable, mutable} -import scala.collection.mutable.{ArrayBuffer, ListBuffer} -import scala.annotation.switch +import scala.annotation._ +import scala.collection.{immutable, mutable}, mutable.{ArrayBuffer, ListBuffer} import scala.reflect.internal.JavaAccFlags import scala.reflect.internal.pickling.ByteCodecs import scala.reflect.internal.util.ReusableInstance import scala.reflect.io.NoAbstractFile import scala.tools.nsc.Reporting.WarningCategory -import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.ClassPath +import scala.tools.nsc.tasty.{TastyUniverse, TastyUnpickler} import scala.util.control.NonFatal /** This abstract class implements a class file parser. * * @author Martin Odersky - * @version 1.0 */ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { + import ClassfileParser._ + val symbolTable: SymbolTable { def settings: Settings } @@ -75,6 +76,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { protected var classTParams = Map[Name,Symbol]() protected var srcfile0 : Option[AbstractFile] = None protected def moduleClass: Symbol = staticModule.moduleClass + private var YtastyReader = false private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz @@ -89,20 +91,26 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { protected final def u4(): Int = in.nextInt protected final def s1(): Int = in.nextByte.toInt // sign-extend the byte to int - protected final def s2(): Int = (in.nextByte.toInt << 8) | u1 // sign-extend and shift the first byte, or with the unsigned second byte + protected final def s2(): Int = (in.nextByte.toInt << 8) | u1() // sign-extend and shift the first byte, or with the unsigned second byte private def readInnerClassFlags() = readClassFlags() - private def readClassFlags() = JavaAccFlags classFlags u2 - private def readMethodFlags() = JavaAccFlags methodFlags u2 - private def readFieldFlags() = JavaAccFlags fieldFlags u2 + private def readClassFlags() = JavaAccFlags classFlags u2() + private def readMethodFlags() = JavaAccFlags methodFlags u2() + private def readFieldFlags() = JavaAccFlags fieldFlags u2() private def readTypeName() = readName().toTypeName - private def readName() = pool.getName(u2).name - private def readType() = pool getType u2 + private def readName() = pool.getName(u2()).name + @annotation.unused + private def readType() = pool getType u2() private object unpickler extends scala.reflect.internal.pickling.UnPickler { val symbolTable: ClassfileParser.this.symbolTable.type = ClassfileParser.this.symbolTable } + object TastyUniverse extends TastyUniverse { + type SymbolTable = ClassfileParser.this.symbolTable.type + val symbolTable: SymbolTable = ClassfileParser.this.symbolTable + } + private def handleMissing(e: MissingRequirementError) = { if (settings.isDebug) e.printStackTrace throw new IOException(s"Missing dependency '${e.req}', required by $file") @@ -148,12 +156,24 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { this.clazz = clazz this.staticModule = module this.isScala = false + this.YtastyReader = settings.YtastyReader.value - val magic = in.getInt(in.bp) - if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { + val isJavaMagic = in.getInt(in.bp) == JAVA_MAGIC + if (!isJavaMagic && file.name.endsWith(".sig")) { currentClass = clazz.javaClassName isScala = true unpickler.unpickle(in.buf.take(file.sizeOption.get), 0, clazz, staticModule, file.name) + } else if (!isJavaMagic && file.name.endsWith(".tasty")) { + if (!YtastyReader) + MissingRequirementError.signal(s"Add -Ytasty-reader to scalac options to parse the TASTy in $file") + + // TODO [tasty]: it seems tests don't fail if we remove this, but previously this + // was added for the following reason: + // > Force scala.AnyRef, otherwise we get "error: Symbol AnyRef is missing from the classpath" + AnyRefClass + + val bytes = in.buf.take(file.sizeOption.get) + TastyUnpickler.unpickle(TastyUniverse)(bytes, clazz, staticModule, file.path) } else { parseHeader() this.pool = new ConstantPool @@ -165,12 +185,12 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } } - private def parseHeader() { - val magic = u4 + private def parseHeader(): Unit = { + val magic = u4() if (magic != JAVA_MAGIC) abort(s"class file ${file} has wrong magic number 0x${toHexString(magic)}") - val minor, major = u2 + val minor, major = u2() if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION) abort(s"class file ${file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") } @@ -183,18 +203,17 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } } - def getClassSymbol(name: String): Symbol = { + def getClassSymbol(name: String): Symbol = name match { - case name if name.endsWith(nme.MODULE_SUFFIX_STRING) => rootMirror getModuleByName newTermName(name).dropModule - case name => classNameToSymbol(name) + case name if name.endsWith(nme.MODULE_SUFFIX_STRING) => rootMirror.getModuleByName(name.stripSuffix(nme.MODULE_SUFFIX_STRING)) + case name => classNameToSymbol(name) } - } /** * Constructor of this class should not be called directly, use `newConstantPool` instead. */ protected class ConstantPool { - protected val len = u2 + protected val len = u2() protected val starts = new Array[Int](len) protected val values = new Array[AnyRef](len) protected val internalized = new Array[NameOrString](len) @@ -205,8 +224,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { while (i < starts.length) { starts(i) = in.bp i += 1 - (u1: @switch) match { - case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2 + (u1(): @switch) match { + case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2() case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => in skip 2 case CONSTANT_MODULE | CONSTANT_PACKAGE => in skip 2 case CONSTANT_METHODHANDLE => in skip 3 @@ -349,19 +368,28 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { arr } - def getBytes(index: Int): Array[Byte] = ( + /** + * Get an array of bytes stored in the classfile as a string. The data is encoded in the format + * described in object [[scala.reflect.internal.pickling.ByteCodecs]]. Used for the ScalaSignature annotation argument. + */ + def getBytes(index: Int): Array[Byte] = { if (index <= 0 || len <= index) errorBadIndex(index) else values(index) match { case xs: Array[Byte] => xs - case _ => + case _ => val start = firstExpecting(index, CONSTANT_UTF8) - val len = (in getChar start).toInt + val len = (in getChar start).toInt val bytes = new Array[Byte](len) in.getBytes(start + 2, bytes) recordAtIndex(getSubArray(bytes), index) } - ) + } + /** + * Get an array of bytes stored in the classfile as an array of strings. The data is encoded in + * the format described in object [[scala.reflect.internal.pickling.ByteCodecs]]. Used for the ScalaLongSignature annotation + * argument. + */ def getBytes(indices: List[Int]): Array[Byte] = { val head = indices.head values(head) match { @@ -398,7 +426,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // - better owner than `NoSymbol` // - remove eager warning val msg = s"Class $name not found - continuing with a stub." - if ((!settings.isScaladoc) && (settings.verbose || settings.isDeveloper)) + if ((!settings.isScaladoc) && (settings.verbose.value || settings.isDeveloper)) loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) NoSymbol.newStubSymbol(name.toTypeName, msg) } @@ -463,11 +491,11 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { lookupClass(name) } - def parseClass() { + def parseClass(): Unit = { unpickleOrParseInnerClasses() val jflags = readClassFlags() - val classNameIndex = u2 + val classNameIndex = u2() currentClass = pool.getClassName(classNameIndex).value // Ensure that (top-level) classfiles are in the correct directory @@ -479,18 +507,9 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { if (!c.isInstanceOf[StubSymbol] && c != clazz) mismatchError(c) } - // TODO: remove after the next 2.13 milestone - // A bug in the backend caused classes ending in `$` do get only a Scala marker attribute - // instead of a ScalaSig and a Signature annotaiton. This went unnoticed because isScalaRaw - // classes were parsed like Java classes. The below covers the cases in the std lib. - def isNothingOrNull = { - val n = clazz.fullName.toString - n == "scala.runtime.Nothing$" || n == "scala.runtime.Null$" - } - if (isScala) { () // We're done - } else if (isScalaRaw && !isNothingOrNull) { + } else if (isScalaRaw) { val decls = clazz.enclosingPackage.info.decls for (c <- List(clazz, staticModule, staticModule.moduleClass)) { c.setInfo(NoType) @@ -506,10 +525,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { staticScope = newScope val staticInfo = ClassInfoType(List(), staticScope, moduleClass) - val parentIndex = u2 + val parentIndex = u2() val parentName = if (parentIndex == 0) null else pool.getClassName(parentIndex) - val ifaceCount = u2 - val ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClassName(u2) + val ifaceCount = u2() + val ifaces = List.fill(ifaceCount.toInt)(pool.getSuperClassName(index = u2())) val completer = new ClassTypeCompleter(clazz.name, jflags, parentName, ifaces) enterOwnInnerClasses() @@ -530,9 +549,9 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { parseAttributes(clazz, completer) in.bp = fieldsStartBp - 0 until u2 foreach (_ => parseField()) - 0 until u2 foreach (_ => parseMethod()) - val needsConstructor = (sflags & JAVA_ANNOTATION) != 0 + u2() times parseField() + u2() times parseMethod() + val needsConstructor = (sflags & JAVA_ANNOTATION) != 0L if (needsConstructor) instanceScope enter clazz.newClassConstructor(NoPosition) @@ -546,26 +565,26 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } /** Add type parameters of enclosing classes */ - def addEnclosingTParams(clazz: Symbol) { + def addEnclosingTParams(clazz: Symbol): Unit = { var sym = clazz.owner while (sym.isClass && !sym.isModuleClass) { for (t <- sym.tpe.typeArgs) - classTParams = classTParams + (t.typeSymbol.name -> t.typeSymbol) + classTParams += (t.typeSymbol.name -> t.typeSymbol) sym = sym.owner } } - def parseField() { + def parseField(): Unit = { val jflags = readFieldFlags() val sflags = jflags.toScalaFlags if ((sflags & PRIVATE) != 0L) { in.skip(4); skipAttributes() } else { - val name = readName() - val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2).value) - val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags) + val name = readName() + val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2()).value) + val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags) // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR @@ -579,23 +598,17 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { getScope(jflags) enter sym // sealed java enums - if (jflags.isEnum) { - val enumClass = sym.owner.linkedClassOfClass - enumClass match { + if (jflags.isEnum) + sym.owner.linkedClassOfClass match { case NoSymbol => devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") - case linked => - if (!linked.isSealed) - // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking. See also JavaParsers. - // This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags. - linked setFlag (SEALED | ABSTRACT) - linked addChild sym + case enumClass => + enumClass.addChild(sym) } - } } } - def parseMethod() { + def parseMethod(): Unit = { val jflags = readMethodFlags() if (jflags.isPrivate) { in.skip(4); skipAttributes() @@ -605,7 +618,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags) // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR - val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2).value) + val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2()).value) sym.info = lazyInfo propagatePackageBoundary(jflags, sym) parseAttributes(sym, lazyInfo) @@ -618,7 +631,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { val sigChars = sig.toCharArray var index = 0 val end = sig.length - def accept(ch: Char) { + def accept(ch: Char): Unit = { assert(sig.charAt(index) == ch, (sig.charAt(index), ch)) index += 1 } @@ -641,10 +654,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case BOOL_TAG => BooleanTpe case 'L' => def processInner(tp: Type): Type = tp match { - case TypeRef(pre, sym, args) if (!sym.isStatic) => - typeRef(processInner(pre.widen), sym, args) - case _ => - tp + case TypeRef(pre, sym, args) if !sym.isStatic => typeRef(processInner(pre.widen), sym, args) + case _ => tp } def processClassType(tp: Type): Type = tp match { case TypeRef(pre, classSym, args) => @@ -658,14 +669,14 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case variance @ ('+' | '-' | '*') => index += 1 val bounds = variance match { - case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs))) + case '+' => TypeBounds.upper(sig2type(tparams, skiptvs)) case '-' => val tp = sig2type(tparams, skiptvs) - // sig2type seems to return AnyClass regardless of the situation: - // we don't want Any as a LOWER bound. - if (tp.typeSymbol == AnyClass) TypeBounds.empty - else TypeBounds.lower(tp) - case '*' => TypeBounds.empty + // Interpret `sig2type` returning `Any` as "no bounds"; + // morally equivalent to TypeBounds.empty, but we're representing Java code, so use ObjectTpeJava for AnyTpe. + if (tp.typeSymbol == AnyClass) TypeBounds.upper(definitions.ObjectTpeJava) + else TypeBounds(tp, definitions.ObjectTpeJava) + case '*' => TypeBounds.upper(definitions.ObjectTpeJava) } val newtparam = sym.newExistential(newTypeName("?"+i), sym.pos) setInfo bounds existentials += newtparam @@ -692,9 +703,12 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { tp } - val classSym = classNameToSymbol(subName(c => c == ';' || c == '<')) - assert(!classSym.isOverloaded, classSym.alternatives) - var tpe = processClassType(processInner(classSym.tpe_*)) + val classTpe = { + val classSym = classNameToSymbol(subName(c => c == ';' || c == '<')) + assert(!classSym.isOverloaded, classSym.alternatives) + if (classSym eq ObjectClass) ObjectTpeJava else classSym.tpe_* + } + var tpe = processClassType(processInner(classTpe)) while (sig.charAt(index) == '.') { accept('.') val name = newTypeName(subName(c => c == ';' || c == '<' || c == '.')) @@ -711,10 +725,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // make unbounded Array[T] where T is a type variable into Array[T with Object] // (this is necessary because such arrays have a representation which is incompatible // with arrays of primitive types. - // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object - // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail // see also RestrictJavaArraysMap (when compiling java sources directly) - if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe)) { + if (elemtp.typeSymbol.isAbstractType && elemtp.upperBound =:= ObjectTpe) { elemtp = intersectionType(List(elemtp, ObjectTpe)) } @@ -724,7 +736,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { assert(sym ne null, sig) val paramtypes = new ListBuffer[Type]() while (sig.charAt(index) != ')') { - paramtypes += objToAny(sig2type(tparams, skiptvs)) + paramtypes += sig2type(tparams, skiptvs) } index += 1 val restype = if (sym != null && sym.isClassConstructor) { @@ -732,7 +744,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { clazz.tpe_* } else sig2type(tparams, skiptvs) - JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype) + MethodType(sym.newSyntheticValueParams(paramtypes.toList), restype) case 'T' => val n = newTypeName(subName(';'.==)) index += 1 @@ -746,7 +758,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { while (sig.charAt(index) == ':') { index += 1 if (sig.charAt(index) != ':') // guard against empty class bound - ts += objToAny(sig2type(tparams, skiptvs)) + ts += sig2type(tparams, skiptvs) } TypeBounds.upper(intersectionType(ts.toList, sym)) } @@ -782,7 +794,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) { - parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter + val parent = sig2type(tparams, skiptvs = false) // here the variance doesn't matter + parents += (if (parent == ObjectTpeJava) ObjectTpe else parent) } ClassInfoType(parents.toList, instanceScope, sym) } @@ -795,10 +808,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { private def parseAttributes(sym: symbolTable.Symbol, completer: JavaTypeCompleter): Unit = { def parseAttribute(): Unit = { val attrName = readTypeName() - val attrLen = u4 + val attrLen = u4() attrName match { case tpnme.SignatureATTR => - val sigIndex = u2 + val sigIndex = u2() val sig = pool.getExternalName(sigIndex) assert(sym.rawInfo == completer, sym) completer.sig = sig.value @@ -812,15 +825,17 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case tpnme.DeprecatedATTR => in.skip(attrLen) - if (sym == clazz) + if (!sym.hasAnnotation(JavaDeprecatedAttr)) + sym.addAnnotation(JavaDeprecatedAttr) + if (sym == clazz && !staticModule.hasAnnotation(JavaDeprecatedAttr)) staticModule.addAnnotation(JavaDeprecatedAttr) case tpnme.ConstantValueATTR => - completer.constant = pool.getConstant(u2) + completer.constant = pool.getConstant(u2()) case tpnme.MethodParametersATTR => def readParamNames(): Unit = { - val paramCount = u1 + val paramCount = u1() val paramNames = new Array[NameOrString](paramCount) val paramNameAccess = new Array[Int](paramCount) var i = 0 @@ -829,7 +844,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case 0 => null // may occur on JDK 21+, as per scala/bug#12783 case index => pool.getExternalName(index) } - paramNameAccess(i) = u2 + paramNameAccess(i) = u2() i += 1 } completer.paramNames = new ParamNames(paramNames, paramNameAccess) @@ -841,16 +856,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { in.skip(attrLen) case tpnme.RuntimeAnnotationATTR => - val numAnnots = u2 - val annots = new ListBuffer[AnnotationInfo] - for (n <- 0 until numAnnots; annot <- parseAnnotation(u2)) - annots += annot - /* `sym.withAnnotations(annots)`, like `sym.addAnnotation(annot)`, prepends, - * so if we parsed in classfile order we would wind up with the annotations - * in reverse order in `sym.annotations`. Instead we just read them out the - * other way around, for now. TODO: sym.addAnnotation add to the end? - */ - sym.setAnnotations(sym.annotations ::: annots.toList) + val numAnnots = u2() + numAnnots times { + parseAnnotation(u2()).foreach(addUniqueAnnotation(sym, _)) + } // TODO 1: parse runtime visible annotations on parameters // case tpnme.RuntimeParamAnnotationATTR @@ -862,8 +871,9 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { parseExceptions(attrLen, completer) case tpnme.SourceFileATTR => - if (forInteractive && settings.YpresentationLocateSourceFile) { - // opt: disable this code by default for performance reasons. + /* + if (forInteractive) { + // opt: disable this code in the batch compiler for performance reasons. // it appears to be looking for the .java source file mentioned in this attribute // in the output directories of scalac. // @@ -882,14 +892,27 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } srcfile0 = settings.outputDirs.srcFilesFor(file, srcpath).find(_.exists) } else in.skip(attrLen) + */ + in.skip(attrLen) case tpnme.CodeATTR => if (sym.owner.isInterface) { sym setFlag JAVA_DEFAULTMETHOD - log(s"$sym in ${sym.owner} is a java8+ default method.") + debuglog(s"$sym in ${sym.owner} is a java8+ default method.") } in.skip(attrLen) + case tpnme.PermittedSubclassesATTR => + sym.setFlag(SEALED) + val numberOfClasses = u2() + numberOfClasses times { + val k = pool.getClassSymbol(u2()) + completer match { + case ctc: ClassTypeCompleter => ctc.permittedSubclasses ::= k // sym.addChild(k) + case _ => + } + } + case _ => in.skip(attrLen) } @@ -899,21 +922,22 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { * Parse the "Exceptions" attribute which denotes the exceptions * thrown by a method. */ - def parseExceptions(len: Int, completer: JavaTypeCompleter): Unit = { - val nClasses = u2 - for (n <- 0 until nClasses) { + def parseExceptions(@unused len: Int, completer: JavaTypeCompleter): Unit = { + val nClasses = u2() + for (_ <- 0 until nClasses) { // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (scala/bug#7065) - val cls = pool.getClassName(u2) + val cls = pool.getClassName(u2()) completer.exceptions ::= cls } } // begin parseAttributes - for (i <- 0 until u2) parseAttribute() + u2() times parseAttribute() } + def parseAnnotArg(): Option[ClassfileAnnotArg] = { - val tag = u1 - val index = u2 + val tag = u1() + val index = u2() tag match { case STRING_TAG => Some(LiteralAnnotArg(Constant(pool.getName(index).value))) @@ -941,15 +965,16 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case ARRAY_TAG => val arr = new ArrayBuffer[ClassfileAnnotArg]() var hasError = false - for (i <- 0 until index) + index times { parseAnnotArg() match { case Some(c) => arr += c case None => hasError = true } + } if (hasError) None else Some(ArrayAnnotArg(arr.toArray)) case ANNOTATION_TAG => - parseAnnotation(index) map (NestedAnnotArg(_)) + parseAnnotation(index).map(NestedAnnotArg(_)) } } @@ -960,10 +985,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { */ def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try { val attrType = pool.getType(attrNameIndex) - val nargs = u2 + val nargs = u2() val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)] var hasError = false - for (i <- 0 until nargs) { + nargs times { val name = readName() parseAnnotArg() match { case Some(c) => nvpairs += ((name, c)) @@ -990,7 +1015,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { * if the corresponding flag is set in `flags`. */ def addJavaFlagsAnnotations(sym: Symbol, flags: JavaAccFlags): Unit = - flags.toScalaAnnotations(symbolTable) foreach (ann => sym.addAnnotation(ann)) + flags.toScalaAnnotations(symbolTable).foreach(sym.addAnnotation(_)) /** Enter own inner classes in the right scope. It needs the scopes to be set up, * and implicitly current class' superclasses. @@ -999,7 +1024,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { def className(name: String): String = name.substring(name.lastIndexOf('.') + 1, name.length) - def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) { + def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile): Unit = { def jflags = entry.jflags val name = entry.originalName val sflags = jflags.toScalaFlags @@ -1023,10 +1048,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { cls.associatedFile = file mod.moduleClass.associatedFile = file - /** - * need to set privateWithin here because the classfile of a nested protected class is public in bytecode, - * so propagatePackageBoundary will not set it when the symbols are completed - */ + /* + * need to set privateWithin here because the classfile of a nested protected class is public in bytecode, + * so propagatePackageBoundary will not set it when the symbols are completed + */ if (jflags.isProtected) { cls.privateWithin = cls.enclosingPackage mod.privateWithin = cls.enclosingPackage @@ -1067,7 +1092,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { * * Expects `in.bp` to point to the `access_flags` entry, restores the old `bp`. */ - def unpickleOrParseInnerClasses() { + def unpickleOrParseInnerClasses(): Unit = { val oldbp = in.bp in.skip(4) // access_flags, this_class skipSuperclasses() @@ -1077,11 +1102,11 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { var innersStart = -1 var runtimeAnnotStart = -1 - val numAttrs = u2 + val numAttrs = u2() var i = 0 while (i < numAttrs) { val attrName = readTypeName() - val attrLen = u4 + val attrLen = u4() attrName match { case tpnme.ScalaSignatureATTR => isScala = true @@ -1089,6 +1114,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case tpnme.ScalaATTR => isScalaRaw = true i = numAttrs + case tpnme.TASTYATTR => + MissingRequirementError.notFound(s"TASTy file for associated class file $file") case tpnme.InnerClassesATTR => innersStart = in.bp case tpnme.RuntimeAnnotationATTR => @@ -1102,32 +1129,32 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { if (isScala) { def parseScalaSigBytes(): Array[Byte] = { - val tag = u1 + val tag = u1() assert(tag == STRING_TAG, tag) - pool.getBytes(u2) + pool.getBytes(u2()) } def parseScalaLongSigBytes(): Array[Byte] = { - val tag = u1 + val tag = u1() assert(tag == ARRAY_TAG, tag) - val stringCount = u2 + val stringCount = u2() val entries = - for (i <- 0 until stringCount) yield { - val stag = u1 + for (_ <- 0 until stringCount) yield { + val stag = u1() assert(stag == STRING_TAG, stag) - u2 + u2() } pool.getBytes(entries.toList) } def checkScalaSigAnnotArg() = { - val numArgs = u2 + val numArgs = u2() assert(numArgs == 1, s"ScalaSignature has $numArgs arguments") val name = readName() assert(name == nme.bytes, s"ScalaSignature argument has name $name") } - def skipAnnotArg(): Unit = u1 match { + def skipAnnotArg(): Unit = u1() match { case STRING_TAG | BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG | CLASS_TAG => in.skip(2) @@ -1136,8 +1163,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { in.skip(4) case ARRAY_TAG => - val num = u2 - for (i <- 0 until num) skipAnnotArg() + val num = u2() + num times skipAnnotArg() case ANNOTATION_TAG => in.skip(2) // type @@ -1145,8 +1172,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } def skipAnnotArgs() = { - val numArgs = u2 - for (i <- 0 until numArgs) { + val numArgs = u2() + numArgs times { in.skip(2) skipAnnotArg() } @@ -1157,18 +1184,21 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { assert(runtimeAnnotStart != -1, s"No RuntimeVisibleAnnotations in classfile with ScalaSignature attribute: $clazz") in.bp = runtimeAnnotStart - val numAnnots = u2 + val numAnnots = u2() var i = 0 var bytes: Array[Byte] = null - while (i < numAnnots && bytes == null) pool.getType(u2) match { - case SigTpe => - checkScalaSigAnnotArg() - bytes = parseScalaSigBytes() - case LongSigTpe => - checkScalaSigAnnotArg() - bytes = parseScalaLongSigBytes() - case _ => - skipAnnotArgs() + while (i < numAnnots && bytes == null) { + pool.getType(u2()) match { + case SigTpe => + checkScalaSigAnnotArg() + bytes = parseScalaSigBytes() + case LongSigTpe => + checkScalaSigAnnotArg() + bytes = parseScalaLongSigBytes() + case _ => + skipAnnotArgs() + } + i += 1 } AnyRefClass // Force scala.AnyRef, otherwise we get "error: Symbol AnyRef is missing from the classpath" @@ -1176,9 +1206,9 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { unpickler.unpickle(bytes, 0, clazz, staticModule, file.name) } else if (!isScalaRaw && innersStart != -1) { in.bp = innersStart - val entries = u2 - for (i <- 0 until entries) { - val innerIndex, outerIndex, nameIndex = u2 + val entries = u2() + entries times { + val innerIndex, outerIndex, nameIndex = u2() val jflags = readInnerClassFlags() if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) innerClasses add InnerClassEntry(pool.getClassName(innerIndex), pool.getClassName(outerIndex), pool.getName(nameIndex), jflags) @@ -1244,16 +1274,16 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") } + override def complete(sym: Symbol): Unit = { throw new AssertionError("cyclic type dereferencing") } } class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { + override def complete(sym: Symbol): Unit = { sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun) } } // on JDK 21+, `names` may include nulls, as per scala/bug#12783 private class ParamNames(val names: Array[NameOrString], val access: Array[Int]) { - assert(names.length == access.length) + assert(names.length == access.length, "Require as many names as access") def length = names.length } private abstract class JavaTypeCompleter extends LazyType { @@ -1262,18 +1292,21 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { var paramNames: ParamNames = _ var exceptions: List[NameOrString] = Nil } - private final class ClassTypeCompleter(name: Name, jflags: JavaAccFlags, parent: NameOrString, ifaces: List[NameOrString]) extends JavaTypeCompleter { + private final class ClassTypeCompleter(@unused name: Name, @unused jflags: JavaAccFlags, parent: NameOrString, ifaces: List[NameOrString]) extends JavaTypeCompleter { + var permittedSubclasses: List[symbolTable.Symbol] = Nil override def complete(sym: symbolTable.Symbol): Unit = { val info = if (sig != null) sigToType(sym, sig) else { - val superType = - if (parent == null) AnyClass.tpe_* - else if (jflags.isAnnotation) { u2; AnnotationClass.tpe } - else getClassSymbol(parent.value).tpe_* - var ifacesTypes = ifaces.filterNot(_ eq null).map(x => getClassSymbol(x.value).tpe_*) - if (jflags.isAnnotation) ifacesTypes ::= ClassfileAnnotationClass.tpe - ClassInfoType(superType :: ifacesTypes, instanceScope, clazz) + val superTpe = if (parent == null) definitions.AnyClass.tpe_* else getClassSymbol(parent.value).tpe_* + val superTpe1 = if (superTpe == ObjectTpeJava) ObjectTpe else superTpe + val ifacesTypes = ifaces.filterNot(_ eq null).map(x => getClassSymbol(x.value).tpe_*) + ClassInfoType(superTpe1 :: ifacesTypes, instanceScope, clazz) } sym.setInfo(info) + // enum children are its enum fields, so don't register subclasses (which are listed as permitted) + if (!sym.hasJavaEnumFlag) + for (k <- permittedSubclasses) + if (k.parentSymbols.contains(sym)) + sym.addChild(k) } } @@ -1283,7 +1316,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { override def complete(sym: symbolTable.Symbol): Unit = { def descriptorInfo = sigToType(sym, descriptor) val hasOuterParam = (name == nme.CONSTRUCTOR) && (descriptorInfo match { - case MethodType(params, restpe) => + case MethodType(params, _) => // if this is a non-static inner class, remove the explicit outer parameter innerClasses getEntry currentClass match { case Some(entry) if !entry.jflags.isStatic => @@ -1306,10 +1339,10 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { sigToType(sym, sig) } else if (name == nme.CONSTRUCTOR) { descriptorInfo match { - case MethodType(params, restpe) => + case MethodType(params, _) => val paramsNoOuter = if (hasOuterParam) params.tail else params val newParams = paramsNoOuter match { - case (init :+ tail) if jflags.isSynthetic => + case init :+ _ if jflags.isSynthetic => // scala/bug#7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which // are added when an inner class needs to access a private constructor. init @@ -1377,17 +1410,17 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } } - def skipAttributes() { - var attrCount: Int = u2 + def skipAttributes(): Unit = { + var attrCount: Int = u2() while (attrCount > 0) { in skip 2 - in skip u4 + in skip u4() attrCount -= 1 } } - def skipMembers() { - var memberCount: Int = u2 + def skipMembers(): Unit = { + var memberCount: Int = u2() while (memberCount > 0) { in skip 6 skipAttributes() @@ -1395,12 +1428,42 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } } - def skipSuperclasses() { + def skipSuperclasses(): Unit = { in.skip(2) // superclass - val ifaces = u2 + val ifaces = u2() in.skip(2 * ifaces) } protected def getScope(flags: JavaAccFlags): Scope = if (flags.isStatic) staticScope else instanceScope + + // Append annotation. For Java deprecation, prefer an annotation with values (since, etc). + private def addUniqueAnnotation(symbol: Symbol, annot: AnnotationInfo): symbol.type = + if (annot.atp.typeSymbol == JavaDeprecatedAttr) { + def ensureDepr(sym: Symbol): sym.type = { + if (sym.hasAnnotation(JavaDeprecatedAttr)) + if (List(0, 1).exists(annot.constantAtIndex(_).isDefined)) + sym.setAnnotations { + def drop(cur: AnnotationInfo): Boolean = cur.atp.typeSymbol == JavaDeprecatedAttr + sym.annotations.foldRight(annot :: Nil)((a, all) => if (drop(a)) all else a :: all) + } + else sym + else sym.addAnnotation(annot) + } + if (symbol == clazz) + ensureDepr(staticModule) + ensureDepr(symbol) + } + else symbol.addAnnotation(annot) +} +object ClassfileParser { + private implicit class GoodTimes(private val n: Int) extends AnyVal { + def times(body: => Unit): Unit = { + var i = n + while (i > 0) { + body + i -= 1 + } + } + } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala index 9caf87a2c694..58691e7fcdbe 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 4b4a075af69c..79556be4ba15 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,14 +24,14 @@ import scala.reflect.internal.util.shortClassOfInstance import scala.collection.mutable import PickleFormat._ import Flags._ +import scala.annotation.{nowarn, tailrec} /** * Serialize a top-level module and/or class. * - * @see [[PickleFormat]] for symbol table attribute format. + * @see [[scala.reflect.internal.pickling.PickleFormat PickleFormat]] for symbol table attribute format. * * @author Martin Odersky - * @version 1.0 */ abstract class Pickler extends SubComponent { import global._ @@ -68,6 +68,7 @@ abstract class Pickler extends SubComponent { else None + @nowarn("cat=lint-nonlocal-return") def apply(unit: CompilationUnit): Unit = { def pickle(tree: Tree): Unit = { tree match { @@ -88,7 +89,7 @@ abstract class Pickler extends SubComponent { if (writeToSigFile) writeSigFile(sym, pickle) } - if (sigWriter.isDefined && settings.YpickleWriteApiOnly) { + if (sigWriter.isDefined && settings.YpickleWriteApiOnly.value) { pickle(noPrivates = false, writeToSymData = true, writeToSigFile = false) pickle(noPrivates = true, writeToSymData = false, writeToSigFile = true) } else { @@ -141,7 +142,7 @@ abstract class Pickler extends SubComponent { private def closeSigWriter(): Unit = { sigWriter.foreach { writer => writer.close() - if (settings.verbose) + if (settings.verbose.value) reporter.echo(NoPosition, "[sig files written]") } } @@ -149,7 +150,7 @@ abstract class Pickler extends SubComponent { override protected def shouldSkipThisPhaseForJava: Boolean = !settings.YpickleJava.value } - type Index = mutable.AnyRefMap[AnyRef, Int] // a map from objects (symbols, types, names, ...) to indices into Entries + type Index = mutable.HashMap[AnyRef, Int] // a map from objects (symbols, types, names, ...) to indices into Entries type Entries = Array[AnyRef] final val InitEntriesSize = 256 @@ -157,7 +158,7 @@ abstract class Pickler extends SubComponent { private[this] var _entries: Entries = _ final def initPickle(root: Symbol, noPrivates: Boolean)(f: Pickle => Unit): Pickle = { - if (_index eq null) { _index = new Index(InitEntriesSize) } + if (_index eq null) { _index = new Index(InitEntriesSize, mutable.HashMap.defaultLoadFactor) } if (_entries eq null) { _entries = new Entries(InitEntriesSize) } val pickle = new Pickle(root, _index, _entries, noPrivates) try f(pickle) finally { pickle.close(); _index.clear(); fill(_entries, null) } @@ -177,15 +178,16 @@ abstract class Pickler extends SubComponent { private def isRootSym(sym: Symbol) = sym.name.toTermName == rootName && sym.owner == rootOwner - /** Returns usually symbol's owner, but picks classfile root instead - * for existentially bound variables that have a non-local owner. - * Question: Should this be done for refinement class symbols as well? - * - * Note: tree pickling also finds its way here; e.g. in scala/bug#7501 the pickling - * of trees in annotation arguments considers the parameter symbol of a method - * called in such a tree as "local". The condition `sym.isValueParameter` was - * added to fix that bug, but there may be a better way. - */ + /** Usually `sym.owner`, except when `sym` is pickle-local, while `sym.owner` is not. + * + * In the latter case, the alternative owner is the pickle root, + * or a non-class owner of root (so that term-owned parameters remain term-owned). + * + * Note: tree pickling also finds its way here; e.g. in scala/bug#7501 the pickling + * of trees in annotation arguments considers the parameter symbol of a method + * called in such a tree as "local". The condition `sym.isValueParameter` was + * added to fix that bug, but there may be a better way. + */ private def localizedOwner(sym: Symbol) = if (isLocalToPickle(sym) && !isRootSym(sym) && !isLocalToPickle(sym.owner)) // don't use a class as the localized owner for type parameters that are not owned by a class: those are not instantiated by asSeenFrom @@ -198,6 +200,7 @@ abstract class Pickler extends SubComponent { * anyway? This is the case if symbol is a refinement class, * an existentially bound variable, or a higher-order type parameter. */ + @tailrec private def isLocalToPickle(sym: Symbol): Boolean = (sym != NoSymbol) && !sym.isPackageClass && ( isRootSym(sym) || sym.isRefinementClass @@ -272,7 +275,7 @@ abstract class Pickler extends SubComponent { /** Store symbol in index. If symbol is local, also store everything it references. */ - def putSymbol(sym0: Symbol) { + def putSymbol(sym0: Symbol): Unit = { val sym = deskolemize(sym0) if (putEntry(sym)) { @@ -301,7 +304,7 @@ abstract class Pickler extends SubComponent { putChildren(sym, children.toList sortBy (_.sealedSortName)) } - for (annot <- (sym.annotations filter (ann => ann.isStatic && !ann.isErroneous)).reverse) + for (annot <- sym.annotations.filter(ann => ann.isStatic && !ann.isErroneous)) putAnnotation(sym, annot) } else if (sym != NoSymbol) { @@ -359,14 +362,14 @@ abstract class Pickler extends SubComponent { throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")") } } - private def putTypes(tps: List[Type]) { tps foreach putType } + private def putTypes(tps: List[Type]): Unit = { tps foreach putType } private object putTreeTraverser extends Traverser { // Only used when pickling trees, i.e. in an argument of some Annotation // annotations in Modifiers are removed by the typechecker override def traverseModifiers(mods: Modifiers): Unit = if (putEntry(mods)) putEntry(mods.privateWithin) override def traverseName(name: Name): Unit = putEntry(name) - override def traverseConstant(const: Constant): Unit = putEntry(const) + override def traverseConstant(const: Constant): Unit = putConstant(const) override def traverse(tree: Tree): Unit = putTree(tree) def put(tree: Tree): Unit = { @@ -378,14 +381,14 @@ abstract class Pickler extends SubComponent { super.traverse(tree) } } - private def putTree(tree: Tree) { + private def putTree(tree: Tree): Unit = { if (putEntry(tree)) putTreeTraverser put tree } /** Store a constant in map index, along with anything it references. */ - private def putConstant(c: Constant) { + private def putConstant(c: Constant): Unit = { if (putEntry(c)) { if (c.tag == StringTag) putEntry(newTermName(c.stringValue)) else if (c.tag == ClazzTag) putType(c.typeValue) @@ -393,33 +396,35 @@ abstract class Pickler extends SubComponent { } } - private def putChildren(sym: Symbol, children: List[Symbol]) { + private def putChildren(sym: Symbol, children: List[Symbol]): Unit = { putEntry(sym -> children) children foreach putSymbol } /** used in putSymbol only, i.e. annotations on definitions, not on types */ - private def putAnnotation(sym: Symbol, annot: AnnotationInfo) { + private def putAnnotation(sym: Symbol, annot: AnnotationInfo): Unit = { // if an annotation with the same arguments is applied to the // same symbol multiple times, it's only pickled once. if (putEntry(sym -> annot)) putAnnotationBody(annot) } - private def putAnnotation(annot: AnnotationInfo) { + private def putAnnotation(annot: AnnotationInfo): Unit = { if (putEntry(annot)) putAnnotationBody(annot) } /** Puts the members of an AnnotationInfo */ - private def putAnnotationBody(annot: AnnotationInfo) { - def putAnnotArg(arg: Tree) { + private def putAnnotationBody(annot: AnnotationInfo): Unit = { + def putAnnotArg(arg: Tree): Unit = { arg match { - case Literal(c) => putConstant(c) + // Keep Literal with an AnnotatedType. Used in AnnotationInfo.argIsDefault. Allow `null` to prevent NPEs: + // Literal(Constant(v)) is used eg in compiler plugins, it produces a tree with `tpe == null`. + case Literal(c) if arg.tpe == null || arg.tpe.isInstanceOf[ConstantType] => putConstant(c) case _ => putTree(arg) } } - def putClassfileAnnotArg(carg: ClassfileAnnotArg) { + def putClassfileAnnotArg(carg: ClassfileAnnotArg): Unit = { (carg: @unchecked) match { case LiteralAnnotArg(const) => putConstant(const) case ArrayAnnotArg(args) => if (putEntry(carg)) args foreach putClassfileAnnotArg @@ -439,20 +444,20 @@ abstract class Pickler extends SubComponent { /** Write a reference to object, i.e., the object's number in the map index. */ - private def writeRef(ref: AnyRef) { + private def writeRef(ref: AnyRef): Unit = { assert(index ne null, this) writeNat(index(deskolemizeTypeSymbols(ref))) } private def writeRefs(refs: List[AnyRef]): Unit = refs foreach writeRef - private def writeRefsWithLength(refs: List[AnyRef]) { + private def writeRefsWithLength(refs: List[AnyRef]): Unit = { writeNat(refs.length) writeRefs(refs) } /** Write name, owner, flags, and info of a symbol. */ - private def writeSymInfo(sym: Symbol) { + private def writeSymInfo(sym: Symbol): Unit = { writeRef(sym.name) writeRef(localizedOwner(sym)) writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags))) @@ -461,18 +466,20 @@ abstract class Pickler extends SubComponent { } /** Write a name in UTF8 format. */ - private def writeName(name: Name) { + private def writeName(name: Name): Unit = { ensureCapacity(name.length * 3) val utfBytes = Codec toUTF8 name.toString - scala.compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length) + System.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length) writeIndex += utfBytes.length } /** Write an annotation */ - private def writeAnnotation(annot: AnnotationInfo) { - def writeAnnotArg(arg: Tree) { + private def writeAnnotation(annot: AnnotationInfo): Unit = { + def writeAnnotArg(arg: Tree): Unit = { arg match { - case Literal(c) => writeRef(c) + // Keep Literal with an AnnotatedType. Used in AnnotationInfo.argIsDefault. Allow `null` to prevent NPEs: + // Literal(Constant(v)) is used eg in compiler plugins, it produces a tree with `tpe == null`. + case Literal(c) if arg.tpe == null || arg.tpe.isInstanceOf[ConstantType] => writeRef(c) case _ => writeRef(arg) } } @@ -486,7 +493,7 @@ abstract class Pickler extends SubComponent { } /** Write a ClassfileAnnotArg (argument to classfile annotation) */ - def writeClassfileAnnotArg(carg: ClassfileAnnotArg) { + def writeClassfileAnnotArg(carg: ClassfileAnnotArg): Unit = { (carg: @unchecked) match { case LiteralAnnotArg(const) => writeRef(const) case ArrayAnnotArg(args) => writeRef(carg) @@ -523,8 +530,8 @@ abstract class Pickler extends SubComponent { } /** Write an entry */ - private def writeEntry(entry: AnyRef) { - def writeLocalSymbolBody(sym: Symbol) { + private def writeEntry(entry: AnyRef): Unit = { + def writeLocalSymbolBody(sym: Symbol): Unit = { writeSymInfo(sym) sym match { case _: ClassSymbol if sym.hasSelfType => writeRef(sym.typeOfThis) @@ -532,13 +539,13 @@ abstract class Pickler extends SubComponent { case _ => } } - def writeExtSymbolBody(sym: Symbol) { + def writeExtSymbolBody(sym: Symbol): Unit = { val name = if (sym.isModuleClass) sym.name.toTermName else sym.name writeRef(name) if (!sym.owner.isRoot) writeRef(sym.owner) } - def writeSymbolBody(sym: Symbol) { + def writeSymbolBody(sym: Symbol): Unit = { if (sym ne NoSymbol) { if (isLocalToPickle(sym)) writeLocalSymbolBody(sym) @@ -566,9 +573,10 @@ abstract class Pickler extends SubComponent { case StaticallyAnnotatedType(annots, tp) => writeRef(tp) ; writeRefs(annots) case AnnotatedType(_, tp) => writeTypeBody(tp) // write the underlying type if there are no static annotations case CompoundType(parents, _, clazz) => writeRef(clazz); writeRefs(parents) + case x => throw new MatchError(x) } - def writeTreeBody(tree: Tree) { + def writeTreeBody(tree: Tree): Unit = { writeNat(picklerSubTag(tree)) if (!tree.isEmpty) writeTreeBodyTraverser traverse tree @@ -581,17 +589,17 @@ abstract class Pickler extends SubComponent { case StringTag => writeRef(newTermName(c.stringValue)) case ClazzTag => writeRef(c.typeValue) case EnumTag => writeRef(c.symbolValue) - case tag => if (ByteTag <= tag && tag <= LongTag) writeLong(c.longValue) + case ctag => if (ByteTag <= ctag && ctag <= LongTag) writeLong(c.longValue) } - def writeModifiers(mods: Modifiers) { + def writeModifiers(mods: Modifiers): Unit = { val pflags = rawToPickledFlags(mods.flags) writeNat((pflags >> 32).toInt) writeNat((pflags & 0xFFFFFFFF).toInt) writeRef(mods.privateWithin) } - def writeSymbolTuple(target: Symbol, other: Any) { + def writeSymbolTuple(target: Symbol, other: Any): Unit = { writeRef(target) other match { case annot: AnnotationInfo => writeAnnotation(annot) @@ -628,8 +636,8 @@ abstract class Pickler extends SubComponent { } /** Write byte array */ - final def writeArray() { - assert(writeIndex == 0) + final def writeArray(): Unit = { + assert(writeIndex == 0, "Index must be zero") assert(index ne null, this) writeNat(MajorVersion) writeNat(MinorVersion) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala index f781d7cd8a50..46adc4c37d97 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala index ffe00c3c13bf..4e6f22601ea0 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/symtab/package.scala b/src/compiler/scala/tools/nsc/symtab/package.scala index 7a1dedec3fa3..6fd5a1673c07 100644 --- a/src/compiler/scala/tools/nsc/symtab/package.scala +++ b/src/compiler/scala/tools/nsc/symtab/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala new file mode 100644 index 000000000000..1e0991d55cff --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala @@ -0,0 +1,60 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.language.implicitConversions + +import ForceKinds._ + +object ForceKinds { + + /** When forcing the companion of a module */ + final val DeepForce: ForceKinds.Single = of(1 << 1) + /** When forcing the owner of a symbol */ + final val CompleteOwner: ForceKinds.Single = of(1 << 2) + /** When forcing an overloaded signature */ + final val OverloadedSym: ForceKinds.Single = of(1 << 3) + /** When forcing a symbol that will be copied */ + final val CopySym: ForceKinds.Single = of(1 << 4) + /** When forcing the underlying symbol of some type space */ + final val SpaceForce: ForceKinds.Single = of(1 << 5) + /** When forcing the enum singleton from its "fake" module class */ + final val EnumProxy: ForceKinds.Single = of(1 << 6) + + private def of(mask: Int): ForceKinds.Single = new ForceKinds.Single(mask) + + class Single(val toInt: Int) extends AnyVal { mode => + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + } + + @inline implicit def single2ForceKinds(single: ForceKinds.Single): ForceKinds = new ForceKinds(single.toInt) + +} + +/**A static type representing a bitset of modes that are for debugging why a symbol may have been forced + */ +class ForceKinds(val toInt: Int) extends AnyVal { + def is(single: ForceKinds.Single): Boolean = (toInt & single.toInt) == single.toInt + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + + def describe: List[String] = { + var xs = List.empty[String] + if (is(DeepForce)) xs ::= "deep" + if (is(CompleteOwner)) xs ::= "class owner is required" + if (is(OverloadedSym)) xs ::= "overload resolution" + if (is(CopySym)) xs ::= "copying its info" + if (is(SpaceForce)) xs ::= "space" + if (is(EnumProxy)) xs ::= "forcing enum value from fake object" + xs + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/TastyModes.scala b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala new file mode 100644 index 000000000000..5faab5f982d5 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala @@ -0,0 +1,75 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.collection.mutable + +/**A static type representing a bitset of modes that affect the interpretation of a TASTy file, + * such as distinguishing between reading the parents of a class, or an annotation tree. + */ +object TastyModes { + + final val EmptyTastyMode: TastyMode = TastyMode(0) + /** When reading the parents of a class template */ + final val ReadParents: TastyMode = TastyMode(1 << 0) + /** When reading trees of an annotation */ + final val ReadAnnotation: TastyMode = TastyMode(1 << 1) + /** When reading the outermost tree of an term */ + final val OuterTerm: TastyMode = TastyMode(1 << 2) + /** When reading statements in a sequence */ + final val IndexStats: TastyMode = TastyMode(1 << 3) + /** When reading a macro definition body */ + final val ReadMacro: TastyMode = TastyMode(1 << 4) + /** When not at the package scope */ + final val InnerScope: TastyMode = TastyMode(1 << 5) + /** When reading the tree of an Opaque type */ + final val OpaqueTypeDef: TastyMode = TastyMode(1 << 6) + /** When reading trees of an annotation */ + final val ReadAnnotationCtor: TastyMode = TastyMode(1 << 7) + /** When reading a TASTy file produced from a Java source file (file has JAVAattr attribute) */ + final val ReadJava: TastyMode = TastyMode(1 << 8) + + /** The union of `IndexStats` and `InnerScope` */ + final val IndexScopedStats: TastyMode = IndexStats | InnerScope + + final val ReadAnnotTopLevel: TastyMode = ReadAnnotation | ReadAnnotationCtor + + case class TastyMode(val toInt: Int) extends AnyVal { mode => + + def |(other: TastyMode): TastyMode = TastyMode(toInt | other.toInt) + def &(mask: TastyMode): TastyMode = TastyMode(toInt & mask.toInt) + def &~(mask: TastyMode): TastyMode = TastyMode(toInt & ~mask.toInt) + def is(mask: TastyMode): Boolean = (this & mask) == mask + def isOneOf(mask: TastyMode): Boolean = (this & mask).nonEmpty + def nonEmpty: Boolean = toInt != 0 + + def debug: String = { + if (mode == EmptyTastyMode) "EmptyTastyMode" + else { + val sb = mutable.ArrayBuffer.empty[String] + if (mode.is(ReadParents)) sb += "ReadParents" + if (mode.is(ReadAnnotation)) sb += "ReadAnnotation" + if (mode.is(OuterTerm)) sb += "OuterTerm" + if (mode.is(IndexStats)) sb += "IndexStats" + if (mode.is(ReadMacro)) sb += "ReadMacro" + if (mode.is(InnerScope)) sb += "InnerScope" + if (mode.is(OpaqueTypeDef)) sb += "OpaqueTypeDef" + if (mode.is(ReadAnnotationCtor)) sb += "ReadAnnotationCtor" + if (mode.is(ReadJava)) sb += "ReadJava" + sb.mkString(" | ") + } + } + + } + +} diff --git a/src/compiler/scala/tools/nsc/tasty/TastyUniverse.scala b/src/compiler/scala/tools/nsc/tasty/TastyUniverse.scala new file mode 100644 index 000000000000..4c9de6986024 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/TastyUniverse.scala @@ -0,0 +1,27 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import bridge._ + +/**A facade to `scala.tools.nsc.symbtab.SymbolTable`, providing operations that map from the language of TASTy to the + * nsc compiler, e.g. to create trees, resolve types and symbols. + */ +abstract class TastyUniverse extends TastyCore + with FlagOps + with TypeOps + with AnnotationOps + with ContextOps + with SymbolOps + with NameOps + with TreeOps diff --git a/src/compiler/scala/tools/nsc/tasty/TastyUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TastyUnpickler.scala new file mode 100644 index 000000000000..ac0a0b1104c2 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/TastyUnpickler.scala @@ -0,0 +1,215 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.collection.mutable +import scala.tools.tasty.{ErasedTypeRef, Signature, TastyName, TastyReader, TastyRefs} +import scala.tools.tasty.{AttributeUnpickler, Attributes} +import scala.tools.tasty.{TastyFormat, TastyHeaderUnpickler, TastyVersion, UnpicklerConfig} +import TastyFormat.NameTags._ +import TastyRefs.NameRef +import TastyName._ +import scala.reflect.io.AbstractFile + +/**The entry point to TASTy unpickling for nsc, initialises a `TastyUniverse#Context` with the root symbols of a + * top-level class, then parses the header and names from a TASTy file, before entering symbols from the `ASTs` section + * with `TreeUnpickler` + */ +object TastyUnpickler { + + /** Unpickle symbol table information descending from a class and/or singleton object root + * from an array of bytes. + * @param tasty the interface that translates TASTy operations into symbol table operations + * @param classRoot the top-level class which is unpickled + * @param objectRoot the top-level singleton object which is unpickled + * @param filename filename associated with bytearray, only used for error messages + */ + def unpickle[Tasty <: TastyUniverse](tasty: Tasty)(bytes: Array[Byte], classRoot: tasty.Symbol, objectRoot: tasty.Symbol, filename: String): Unit = { + import tasty._ + implicit val ctx: Context = new InitialContext(classRoot, AbstractFile.getFile(filename)) + + ctx.log(s"Unpickling $filename") + + def enter(treeUnpickler: TreeUnpickler[tasty.type])(implicit ctx: Context): Unit = { + treeUnpickler.enterTopLevel(classRoot, objectRoot) + } + + val unpickler = new TastyUnpickler[tasty.type](new TastyReader(bytes))(tasty) + unpickler.readHeader() + unpickler.readNames() + val Some(astReader) = unpickler.readSection(TastyFormat.ASTsSection): @unchecked + + val attributes = unpickler + .readSection(TastyFormat.AttributesSection) + .map(AttributeUnpickler.attributes) + .getOrElse(Attributes.empty) + + val treeUnpickler = new TreeUnpickler[tasty.type](astReader, unpickler.nameAtRef)(tasty) + val ctx0 = if (attributes.isJava) ctx.addMode(TastyModes.ReadJava) else ctx + enter(treeUnpickler)(ctx0) + } + + private final class Table[T] extends (NameRef => T) { + private[this] val names = new mutable.ArrayBuffer[T] + def add(name: T): mutable.ArrayBuffer[T] = names += name + def apply(ref: NameRef): T = names(ref.index) + def size: Int = names.size + } + + trait Scala2CompilerConfig extends UnpicklerConfig { + + /** When Scala 3 is in an RC phase for a new minor version, we put here the TASTy of that Minor, + * otherwise it should be empty. + */ + final val toolOverrides: List[TastyVersion] = List() + + private def asScala3Compiler(version: TastyVersion): String = + if (version.major == 28) { + // scala 3.x.y series + if (version.experimental > 0) + // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly. + s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler" + else s"a Scala 3.${version.minor}.0 compiler or newer" + } + else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same + else "a more recent Scala compiler" // unknown major version, just say later + + /** The description of the upgraded scala compiler that can read the given TASTy version */ + final def upgradeReaderHowTo(version: TastyVersion): String = + if (version.major == 28) { + // scala 3.x.y series + if (version.experimental > 0) + // scenario here is someone using 2.13.12 to read 3.4.1-RC1-NIGHTLY, in this case + // Scala 2.13 can not read it. + s"either use a stable version of the library, or try from the same Scala 3.x nightly or snapshot compiler" + else "use the latest Scala 2.13.x compiler" // happy path, they have stable TASTy, but this 2.13.x is too old. + } + else if (version.experimental > 0) "use the same Scala compiler" // unknown major version, just say same + else "use a more recent Scala compiler" // unknown major version, just say later + + /** The description of the upgraded scala compiler that can produce the given TASTy version */ + final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version) + + final def recompileAdditionalInfo: String = """ + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && toolVersion.experimental == 0) { + """ + | Note that Scala 2.13.x is only configured to read stable TASTy.""".stripMargin + } + else "" + } + + /** A config for the TASTy reader of a scala 2 compiler */ + val scala2CompilerConfig: UnpicklerConfig = new Scala2CompilerConfig with UnpicklerConfig.DefaultTastyVersion {} +} + +import TastyUnpickler._ + +private class TastyUnpickler[Tasty <: TastyUniverse](reader: TastyReader)(implicit tasty: Tasty) { self => + import tasty.{Context, assert} + import reader._ + + private[this] val nameTable = new Table[TastyName] + + def nameAtRef: NameRef => TastyName = nameTable + + private def readName(): TastyName = nameTable(readNameRef()) + + private def readParamSig(): Signature.ParamSig[ErasedTypeRef] = { + val ref = readInt() + if (ref < 0) + Left(ref.abs) + else { + Right(ErasedTypeRef(nameTable(NameRef(ref)))) + } + } + + private def readNameContents()(implicit ctx: Context): TastyName = { + val tag = readByte() + val length = readNat() + val start = currentAddr + val end = start + length + def debugName(name: TastyName): name.type = { + ctx.log(s"${nameTable.size}: ${name.debug}") + name + } + def readSignedRest(original: TastyName, target: TastyName): TastyName = { + val result = ErasedTypeRef(readName()) + val paramsSig = until(end)(readParamSig()) + val sig = Signature(paramsSig, result) + debugName(SignedName(original, sig, target)) + } + val result = tag match { + case UTF8 => + goto(end) + debugName(SimpleName(new String(bytes.slice(start.index, start.index + length), "UTF-8"))) + case tag @ (QUALIFIED | EXPANDED | EXPANDPREFIX) => + val sep = tag match { + case QUALIFIED => PathSep + case EXPANDED => ExpandedSep + case EXPANDPREFIX => ExpandPrefixSep + } + debugName(QualifiedName(readName(), sep, readName().asSimpleName)) + case UNIQUE => + val separator = readName().asSimpleName + val num = readNat() + val originals = until(end)(readName()) + val original = if (originals.isEmpty) TastyName.Empty else originals.head + debugName(UniqueName(original, separator, num)) + case DEFAULTGETTER => + debugName(DefaultName(readName(), readNat())) + case TARGETSIGNED => + val original = readName() + val target = readName() + readSignedRest(original, target) + case SIGNED => + val original = readName() + readSignedRest(original, original) + case OBJECTCLASS => + debugName(ObjectName(readName())) + case BODYRETAINER => + debugName(SuffixName(readName(), BodyRetainerSuffix)) + case INLINEACCESSOR | SUPERACCESSOR => + val prefix = tag match { + case INLINEACCESSOR => InlinePrefix + case SUPERACCESSOR => SuperPrefix + } + debugName(PrefixName(prefix, readName())) + case _ => + val qual = readName() + sys.error(s"at NameRef(${nameTable.size}): name `${qual.debug}` is qualified by unknown tag $tag") + } + assert(currentAddr == end, s"bad name ${result.debug} $start $currentAddr $end") + result + } + + def readHeader(): Unit = new TastyHeaderUnpickler(scala2CompilerConfig, reader).readHeader() + + def readNames()(implicit ctx: Context): Unit = { + ctx.log(s"reading names:") + doUntil(readEnd()) { nameTable.add(readNameContents()) } + } + + def readSection(name: String): Option[TastyReader] = { + while (!isAtEnd) { + val secName = readName().asSimpleName.raw + val secEnd = readEnd() + val curr = currentAddr + goto(secEnd) + if (name == secName) return Some(new TastyReader(bytes, curr.index, secEnd.index, curr.index)) + } + None + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala new file mode 100644 index 000000000000..e43da99adff0 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -0,0 +1,1396 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.tools.tasty.{TastyRefs, TastyReader, TastyName, TastyFormat, TastyFlags} +import TastyRefs._, TastyFlags._, TastyFormat._ +import ForceKinds._ + +import scala.annotation.{switch, unused} +import scala.collection.mutable +import scala.reflect.io.AbstractFile +import scala.reflect.internal.Variance +import scala.util.chaining._ +import scala.collection.immutable.ArraySeq + +/**`TreeUnpickler` is responsible for traversing all trees in the "ASTs" section of a TASTy file, which represent the + * definitions inside the classfile associated with the root class/module. `TreeUnpickler` will enter the public api + * of the TASTy file into the symbolTable of `TastyUniverse`. "Public API" includes annotations when they are + * simple trees. + * + * Where possible, `TreeUnpickler` should not directly manipulate values created by the symbolTable, but use + * operations provided by `TastyUniverse` + * @param reader the reader from which to unpickle + * @param nameAtRef an index of names from the tasty file of this unpickler + * @param tasty the handle on the `TastyUniverse` + */ +class TreeUnpickler[Tasty <: TastyUniverse]( + reader: TastyReader, + nameAtRef: NameRef => TastyName)(implicit + val tasty: Tasty) { self => + import tasty._ + import TreeUnpickler._ + import MaybeCycle._ + import TastyModes._ + + @inline + final protected def unsupportedWhen(cond: Boolean, msg: => String)(implicit ctx: Context): Unit = + if (cond) unsupportedError(msg) + + /** A map from addresses of definition entries to the symbols they define */ + private val symAtAddr = new mutable.HashMap[Addr, Symbol] + + /** A temporary map from addresses of definition entries to the trees they define. + * Used to remember trees of symbols that are created by a completion. Emptied + * once the tree is inlined into a larger tree. + */ + private val cycleAtAddr = new mutable.HashMap[Addr, MaybeCycle] + + /** A map from addresses of type entries to the types they define. + * Currently only populated for types that might be recursively referenced + * from within themselves (i.e. RecTypes, LambdaTypes). + */ + private val typeAtAddr = new mutable.HashMap[Addr, Type] + + /** The root symbol denotation which are defined by the Tasty file associated with this + * TreeUnpickler. Set by `enterTopLevel`. + */ + private[this] var roots: Set[Symbol] = _ + + /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */ + private[this] var ownerTree: OwnerTree = _ + + //---------------- unpickling trees ---------------------------------------------------------------------------------- + + private def registerSym(addr: Addr, sym: Symbol, rejected: Boolean)(implicit ctx: Context) = { + assert(!(rejected && isSymbol(sym)), "expected no symbol when rejected") + ctx.log( + if (isSymbol(sym)) s"$addr registered ${showSym(sym)}" + else s"$addr registering symbol was rejected" + ) + symAtAddr(addr) = sym + } + + /** Enter all toplevel classes and objects into their scopes + */ + def enterTopLevel(classRoot: Symbol, objectRoot: Symbol)(implicit ctx: Context): Unit = { + this.roots = Set(objectRoot, classRoot) + val rdr = new TreeReader(reader).fork + ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr) + def indexTopLevel()(implicit ctx: Context): Unit = rdr.indexStats(reader.endAddr) + if (rdr.isTopLevel) { + inIndexScopedStatsContext { ctx0 => + ctx0.trace(traceTopLevel(classRoot, objectRoot)) { + indexTopLevel()(ctx0) + } + } + } + } + + private def traceTopLevel(classRoot: Symbol, objectRoot: Symbol) = TraceInfo[Unit]( + query = s"reading top level roots", + qual = s"${showSym(classRoot)}, ${showSym(objectRoot)}", + res = _ => "entered top level roots" + ) + + /** A completer that captures the current position and context, which then uses the position to discover the symbol + * to compute the info for. + */ + class Completer( + isClass: Boolean, + reader: TastyReader, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends TastyCompleter(isClass, tflags) { + + private val symAddr = reader.currentAddr + + private def fork(reader: TastyReader): TastyReader = reader.subReader(reader.startAddr, reader.endAddr) + + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + // implicit assertion that the completion is done by the same mirror that loaded owner + require(symAtAddr(symAddr) eq sym) + cycleAtAddr(symAddr) = ctx.withPhaseNoLater("pickler") { ctx0 => + new TreeReader(fork(reader)).readIndexedMember()(ctx0) // fork here so that cycles start at the same address + } + } + + } + + class TreeReader(val reader: TastyReader) { + import reader._ + + def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr)) + def fork: TreeReader = forkAt(currentAddr) + + def skipParentTree(tag: Int): Unit = if (tag != SPLITCLAUSE) skipTree(tag) + def skipParentTree(): Unit = skipParentTree(readByte()) + + def skipTree(tag: Int): Unit = + if (tag >= firstLengthTreeTag) goto(readEnd()) + else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() } + else if (tag >= firstASTTreeTag) skipTree() + else if (tag >= firstNatTreeTag) readNat() + + def skipTree(): Unit = skipTree(readByte()) + + def skipParams(): Unit = + while ({ + val tag = nextByte + tag == PARAM || tag == TYPEPARAM || tag == EMPTYCLAUSE || tag == SPLITCLAUSE + }) skipTree() + + /** Record all directly nested definitions and templates in current tree + * as `OwnerTree`s in `buf`. + * A complication concerns member definitions. These are lexically nested in a + * Template node, but need to be listed separately in the OwnerTree of the enclosing class + * in order not to confuse owner chains. + */ + def scanTree(buf: mutable.ListBuffer[OwnerTree], mode: MemberDefMode): Unit = { + val start = currentAddr + val tag = readByte() + tag match { + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE => + val end = readEnd() + for (_ <- 0 until numRefs(tag)) readNat() + if (tag === TEMPLATE) { + // Read all member definitions now, whereas non-members are children of + // template's owner tree. + val nonMemberReader = fork + scanTrees(buf, end, MemberDefsOnly) + buf += new OwnerTree(start, tag, nonMemberReader, end) + } + else if (mode != NoMemberDefs) + buf += new OwnerTree(start, tag, fork, end) + goto(end) + case tag => + if (mode === MemberDefsOnly) skipTree(tag) + else if (tag >= firstLengthTreeTag) { + val end = readEnd() + val nrefs = numRefs(tag) + if (nrefs < 0) { + for (_ <- nrefs until 0) scanTree(buf, AllDefs) + goto(end) + } + else { + for (_ <- 0 until nrefs) readNat() + if (tag === BIND) { + // a Bind is never the owner of anything, so we set `end = start` + buf += new OwnerTree(start, tag, fork, end = start) + } + + scanTrees(buf, end, AllDefs) + } + } + else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf, AllDefs) } + else if (tag >= firstASTTreeTag) scanTree(buf, AllDefs) + else if (tag >= firstNatTreeTag) readNat() + } + } + + /** Record all directly nested definitions and templates between current address and `end` + * as `OwnerTree`s in `buf` + */ + def scanTrees(buf: mutable.ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode): Unit = { + while (currentAddr.index < end.index) scanTree(buf, mode) + assert(currentAddr.index === end.index) + } + + /** The next tag, following through SHARED tags */ + def nextUnsharedTag: Int = { + val tag = nextByte + if (tag === SHAREDtype || tag === SHAREDterm) { + val lookAhead = fork + lookAhead.reader.readByte() + forkAt(lookAhead.reader.readAddr()).nextUnsharedTag + } + else tag + } + + def readTastyName(): TastyName = nameAtRef(readNameRef()) + +// ------ Reading types ----------------------------------------------------- + + /** Read names in an interleaved sequence of types/bounds and (parameter) names, + * possibly followed by a sequence of modifiers. + */ + def readParamNamesAndMods(end: Addr): (ArraySeq[TastyName], TastyFlagSet) = { + val names = + collectWhile(currentAddr != end && !isModifierTag(nextByte)) { + skipTree() + readTastyName() + } + var mods = EmptyTastyFlags + while (currentAddr != end) { // avoid boxing the mods + readByte() match { + case IMPLICIT => mods |= Implicit + case ERASED => mods |= Erased + case GIVEN => mods |= Given + } + } + (names.to(ArraySeq), mods) + } + + /** Read `n` parameter types or bounds which are interleaved with names */ + def readParamTypes(ps: ArraySeq[Symbol])(implicit ctx: Context): ArraySeq[Type] = { + def inner(ps1: Iterator[Symbol], buf: mutable.ArrayBuffer[Type]): ArraySeq[Type] = { + if (ps1.isEmpty) buf.to(ArraySeq) + else { + val p = ps1.next() + val rest = ps1 + val localCtx = ctx.withOwner(p) + val t = readType()(localCtx) + readNat() // skip name + inner(rest, buf += t) + } + } + inner(ps.iterator, new mutable.ArrayBuffer) + } + + /** Read reference to definition and return symbol created at that definition */ + def readSymRef()(implicit ctx: Context): Symbol = symbolAt(readAddr()) + + /** The symbol at given address; create a new one if none exists yet */ + def symbolAt(addr: Addr)(implicit ctx: Context): Symbol = symAtAddr.get(addr) match { + case Some(sym) => + sym + case None => + ctx.trace(traceForwardReference(addr)) { + val ctxAtOwner = ctx.withOwner(ownerTree.findOwner(addr)) + forkAt(addr).createSymbol()(ctxAtOwner) + } + } + + private def traceForwardReference(addr: Addr) = TraceInfo[Symbol]( + query = s"creating forward reference", + qual = s"at $addr", + res = sym => s"$addr forward reference to ${showSym(sym)}" + ) + + /** The symbol defined by current definition */ + def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match { + case Some(sym) => + assert(ctx.owner === sym.owner, s"owner discrepancy for ${showSym(sym)}, expected: ${showSym(ctx.owner)}, found: ${showSym(sym.owner)}") + sym + case None => + ctx.trace(traceCurrentSymbol(currentAddr)) { + createSymbol() + } + } + + private def traceCurrentSymbol(addr: Addr) = TraceInfo[Symbol]( + query = "create symbol at current address", + qual = s"$addr", + res = sym => if (!isSymbol(sym)) s"evicted symbol at $addr" else s"created ${showSym(sym)} at $addr" + ) + + def readConstant(tag: Int)(implicit ctx: Context): Constant = (tag: @switch) match { + case UNITconst => + tpd.Constant(()) + case TRUEconst => + tpd.Constant(true) + case FALSEconst => + tpd.Constant(false) + case BYTEconst => + tpd.Constant(readInt().toByte) + case SHORTconst => + tpd.Constant(readInt().toShort) + case CHARconst => + tpd.Constant(readNat().toChar) + case INTconst => + tpd.Constant(readInt()) + case LONGconst => + tpd.Constant(readLongInt()) + case FLOATconst => + tpd.Constant(java.lang.Float.intBitsToFloat(readInt())) + case DOUBLEconst => + tpd.Constant(java.lang.Double.longBitsToDouble(readLongInt())) + case STRINGconst => + tpd.Constant(readTastyName().asSimpleName.raw) + case NULLconst => + tpd.Constant(null) + case CLASSconst => + tpd.Constant(readType()) + } + + /** Read a type */ + def readType()(implicit ctx: Context): Type = { + val start = currentAddr + val tag = readByte() + + def traceReadType = TraceInfo[Type]( + query = "reading type", + qual = s"${astTagToString(tag)} $start", + res = tpe => s"exit ${showType(tpe)} ${astTagToString(tag)} $start" + ) + + def registeringTypeWith[T](tp: Type, op: => T): T = { + typeAtAddr(start) = tp + op + } + + def readLengthType(): Type = { + val end = readEnd() + + def readMethodic[N <: TastyName]( + factory: LambdaFactory[N], + parseFlags: FlagSets.FlagParser, + nameMap: TastyName => N + )(implicit ctx: Context): Type = { + val result = typeAtAddr.getOrElse(start, { + // TODO [tasty]: can we share LambdaTypes/RecType/RefinedType safely + // under a new context owner? (aka when referenced by a `SHAREDtype`). + // So far this has been safe to do, but perhaps with macros comparing the + // owners of the symbols of PolyTypes maybe not? + // one concrete example where TypeLambdaType is shared between two unrelated classes: + // - test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala + val nameReader = fork + nameReader.skipTree() // skip result + val paramReader = nameReader.fork + val (paramNames, mods) = nameReader.readParamNamesAndMods(end) + LambdaFactory.parse(factory, paramNames.map(nameMap), parseFlags(mods)(ctx))( + ps => paramReader.readParamTypes(ps), + () => readType(), + pt => typeAtAddr(start) = pt, // register the lambda so that we can access its parameters + ) + }) + goto(end) + result + } + + def readVariances(tp: Type): Type = tp match { + case tp: LambdaPolyType if currentAddr != end => + val vs = until(end) { + readByte() match { + case STABLE => Variance.Invariant + case COVARIANT => Variance.Covariant + case CONTRAVARIANT => Variance.Contravariant + } + } + tp.withVariances(vs) + case _ => tp + } + + val result = + (tag: @switch) match { + case TERMREFin => + defn.TermRefIn(name = readTastyName(), prefix = readType(), space = readType()) + case TYPEREFin => + defn.TypeRefIn( + name = readTastyName().toTypeName, prefix = readType(), space = readType()) + case REFINEDtype => + var name = readTastyName() + val parent = readType() + if (nextUnsharedTag === TYPEBOUNDS) name = name.toTypeName + ctx.enterRefinement(parent)(refinedCtx => + defn.RefinedType(parent, name, refinedCtx.owner, readType()) + ) + case APPLIEDtype => defn.AppliedType(readType(), until(end)(readType())) + case TYPEBOUNDS => + val lo = readType() + if (nothingButMods(end)) readVariances(lo) + else defn.TypeBounds(lo, readVariances(readType())) + case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) + case ANDtype => defn.IntersectionType(readType(), readType()) + case ORtype => unionIsUnsupported + case SUPERtype => defn.SuperType(readType(), readType()) + case MATCHtype | MATCHCASEtype => matchTypeIsUnsupported + case POLYtype => readMethodic(PolyTypeLambda, FlagSets.addDeferred, _.toTypeName) + case METHODtype => readMethodic(MethodTermLambda, FlagSets.parseMethod, id) + case TYPELAMBDAtype => readMethodic(HKTypeLambda, FlagSets.addDeferred, _.toTypeName) + case PARAMtype => defn.ParamRef(readTypeRef(), readNat()) // reference to a parameter within a LambdaType + case FLEXIBLEtype => + // dotty would wrap the inner type in FlexibleType (with lower bound >: tpe | Null), + // but we can leave as-is - as Scala 2 does not have explicit nulls. + readType() + } + assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") + result + } + + def readSimpleType(): Type = { + (tag: @switch) match { + case TYPEREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TERMREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TYPEREFsymbol | TERMREFsymbol => defn.NamedType(sym = readSymRef(), prefix = readType()) + case TYPEREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef().objectImplementation) + case TERMREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef()) + case TYPEREF => defn.TypeRef(name = readTastyName().toTypeName, prefix = readType()) + case TERMREF => defn.TermRef(name = readTastyName(), prefix = readType()) + case THIS => defn.ThisType(readType()) + case RECtype => + typeAtAddr.get(start) match { + case Some(tp) => + skipTree(tag) + tp + case None => + defn.RecType(rt => + registeringTypeWith(rt, readType()(ctx.withOwner(rt.refinementClass))) + ).tap(typeAtAddr(start) = _) + } + case RECthis => defn.RecThis(readTypeRef()) + case SHAREDtype => + val ref = readAddr() + // TODO [tasty]: there are enough cases now to suggest that ultimately + // nsc is not designed around cached types, e.g. + // - unique symbols for wildcard type arguments + // - context-sensitive substitutions of types we need to make, + // which may propagate incorrect semantics to other sites if the type is shared + // so we should probably remove this, + // however as of writing removing caching breaks `TastyTestJUnit.run`, + // so further investigation is needed. + typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) + case BYNAMEtype => defn.ByNameType(readType()) + case _ => defn.ConstantType(readConstant(tag)) + } + } + ctx.traceV(traceReadType) { + if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() + } + } + + private def readPackageRef()(implicit ctx: Context): Symbol = { + ctx.requiredPackage(readTastyName()) + } + + def readTypeRef(): Type = typeAtAddr(readAddr()) + +// ------ Reading definitions ----------------------------------------------------- + + private def nothingButMods(end: Addr): Boolean = + currentAddr === end || isModifierTag(nextByte) + + private def normalizeName(isType: Boolean, name: TastyName)(implicit ctx: Context): TastyName = { + val prior = if (ctx.owner.isTrait && name === TastyName.Constructor) TastyName.MixinConstructor else name + if (isType) prior.toTypeName else prior + } + + private def addInferredFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { + var flags = tastyFlags + if (flags.is(Given)) + flags |= Implicit + val lacksDefinition = + rhsIsEmpty && + name.isTermName && !name.isConstructorName && !flags.isOneOf(FlagSets.TermParamOrAccessor) || + isAbsType || + flags.is(Opaque) && !isClass + if (lacksDefinition && tag != PARAM) flags |= Deferred + if (isClass && flags.is(Trait)) flags |= Abstract + if (tag === DEFDEF) { + flags |= Method + if (name.isDefaultName) + flags |= HasDefault // this corresponds to DEFAULTPARAM + if (ctx.isJava && !lacksDefinition && ctx.owner.is(Trait) && !name.isConstructorName) + flags |= HasDefault // will be replaced by JAVA_DEFAULTMETHOD + } + if (tag === VALDEF) { + if (flags.is(Inline) || ctx.owner.is(Trait)) + flags |= FieldAccessor + if (flags.not(Mutable)) + flags |= Stable + if (flags.is(Case | Enum)) // singleton enum case + flags |= Object | Stable // encode as a module (this needs to be corrected in bytecode) + } + if (ctx.owner.isClass) { + if (tag === TYPEPARAM) flags |= Param + else if (tag === PARAM) { + flags |= ParamSetter | FieldAccessor | Stable + if (!rhsIsEmpty) // param alias + flags |= Method + } + } + else if (isParamTag(tag)) flags |= Param + if (flags.is(Object)) flags |= (if (tag === VALDEF) FlagSets.Creation.ObjectDef else FlagSets.Creation.ObjectClassDef) + flags + } + + def isAbstractType(@unused ttag: Int)(implicit ctx: Context): Boolean = nextUnsharedTag match { + case LAMBDAtpt => + val rdr = fork + rdr.reader.readByte() // tag + rdr.reader.readNat() // length + rdr.skipParams() // tparams + rdr.isAbstractType(rdr.nextUnsharedTag) + case TYPEBOUNDS | TYPEBOUNDStpt => true + case _ => false + } + + /** Create symbol of definition node and enter in symAtAddr map + * @return the created symbol + */ + def createSymbol()(implicit ctx: Context): Symbol = nextByte match { + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => + createMemberSymbol() + case TEMPLATE => + val localDummy = ctx.newLocalDummy + registerSym(currentAddr, localDummy, rejected = false) + localDummy + case tag => + assert(tag != BIND, "bind pattern symbol creation from TASTy") + throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag") + } + + /** Create symbol of member definition or parameter node and enter in symAtAddr map + * @return the created symbol + */ + def createMemberSymbol()(implicit ctx: Context): Symbol = { + + def rejectSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Boolean = { + def isPureMixinCtor = + name == TastyName.MixinConstructor && owner.isTrait && flags.is(Stable) + def isInvisible = + flags.is(Invisible) + + isPureMixinCtor || isInvisible + } + + val start = currentAddr + val tag = readByte() + def isTypeTag = tag === TYPEDEF || tag === TYPEPARAM + val end = readEnd() + val parsedName: TastyName = readTastyName() + ctx.log(s"${astTagToString(tag)} ${parsedName.debug} in ${location(ctx.owner)}") + skipParams() + val ttag = nextUnsharedTag + val isAbsType = isAbstractType(ttag) + val isClass = ttag === TEMPLATE + val templateStart = currentAddr + skipTree() // tpt + val rhsIsEmpty = nothingButMods(end) + if (!rhsIsEmpty) skipTree() + val (parsedFlags0, annotations, privateWithin) = + readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) + val name = normalizeName(isTypeTag, parsedName) + val flags = addInferredFlags(tag, parsedFlags0, name, isAbsType, isClass, rhsIsEmpty) + def mkCompleter = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) + def isTypeParameter = flags.is(Param) && isTypeTag + def canEnterInClass = !isTypeParameter + ctx.log { + val privateFlag = { + if (isSymbol(privateWithin)) { + if (flags.is(Protected)) s"Protected[$privateWithin]" + else s"Private[$privateWithin]" + } + else { + "" + } + } + val debugFlags = { + if (privateFlag.nonEmpty) { + val flags0 = flags &~ Protected + val rest = if (!flags0) "" else s" ${flags0.debug}" + privateFlag + rest + } + else flags.debug + } + s"""$start parsed flags $debugFlags""" + } + val rejected = rejectSymbol(ctx.owner, name, flags) + val sym = { + if (tag === TYPEPARAM && ctx.owner.isConstructor) { + // TASTy encodes type parameters for constructors + // nsc only has class type parameters + val tparam = ctx.findOuterClassTypeParameter(name.toTypeName) + ctx.log(s"$start reusing class type param ${showSym(tparam)}") + tparam + } + else { + ctx.findRootSymbol(roots, name) match { + case Some(rootd) => + roots -= rootd + if (rejected) { + ctx.evict(rootd) + noSymbol + } + else { + ctx.redefineSymbol(rootd, mkCompleter, privateWithin) + ctx.log(s"$start replaced info of root ${showSym(rootd)}") + rootd + } + case _ => + if (rejected) noSymbol + else if (isClass) ctx.delayClassCompletion(ctx.owner, name.toTypeName, mkCompleter, privateWithin) + else ctx.delayCompletion(ctx.owner, name, mkCompleter, privateWithin) + } + } + } + registerSym(start, sym, rejected) + if (isSymbol(sym)) { + if (tag == VALDEF && flags.is(FlagSets.SingletonEnum)) + ctx.markAsEnumSingleton(sym) + if (canEnterInClass && ctx.owner.isClass) + ctx.enterIfUnseen(sym) + if (isClass) { + ctx.log(s"$templateStart indexing params (may be empty):") + val localCtx = ctx.withOwner(sym) + forkAt(templateStart).indexTemplateParams()(localCtx) + } + ctx.adjustAnnotations(sym, annotations) + } + goto(start) + sym + } + + /** Read modifier list into triplet of flags, annotations and a privateWithin + * boundary symbol. + */ + def readModifiers[WithinType] + (end: Addr, readAnnot: Context => DeferredAnnotation, readWithin: Context => WithinType, defaultWithin: WithinType) + (implicit ctx: Context): (TastyFlagSet, List[DeferredAnnotation], WithinType) = { + var flags = EmptyTastyFlags + var annotFns: List[DeferredAnnotation] = Nil + var privateWithin = defaultWithin + while (currentAddr.index != end.index) { + def addFlag(flag: TastyFlagSet) = { + flags |= flag + readByte() + } + nextByte match { + case PRIVATE => addFlag(Private) + case PROTECTED => addFlag(Protected) + case ABSTRACT => + readByte() + nextByte match { + case OVERRIDE => addFlag(AbsOverride) + case _ => flags |= Abstract + } + case FINAL => addFlag(Final) + case SEALED => addFlag(Sealed) + case CASE => addFlag(Case) + case IMPLICIT => addFlag(Implicit) + case ERASED => addFlag(Erased) + case LAZY => addFlag(Lazy) + case OVERRIDE => addFlag(Override) + case INLINE => addFlag(Inline) + case INLINEPROXY => addFlag(InlineProxy) + case MACRO => addFlag(Macro) + case OPAQUE => addFlag(Opaque) + case STATIC => addFlag(Static) + case OBJECT => addFlag(Object) + case TRAIT => addFlag(Trait) + case TRANSPARENT => addFlag(Transparent) + case INFIX => addFlag(Infix) + case ENUM => addFlag(Enum) + case LOCAL => addFlag(Local) + case SYNTHETIC => addFlag(Synthetic) + case ARTIFACT => addFlag(Artifact) + case MUTABLE => addFlag(Mutable) + case FIELDaccessor => addFlag(FieldAccessor) + case CASEaccessor => addFlag(CaseAccessor) + case COVARIANT => addFlag(Covariant) + case CONTRAVARIANT => addFlag(Contravariant) + case HASDEFAULT => addFlag(HasDefault) + case STABLE => addFlag(Stable) + case EXTENSION => addFlag(Extension) + case GIVEN => addFlag(Given) + case PARAMsetter => addFlag(ParamSetter) + case PARAMalias => addFlag(ParamAlias) + case EXPORTED => addFlag(Exported) + case OPEN => addFlag(Open) + case INVISIBLE => addFlag(Invisible) + case TRACKED => addFlag(Tracked) + case PRIVATEqualified => + readByte() + privateWithin = readWithin(ctx) + case PROTECTEDqualified => + addFlag(Protected) + privateWithin = readWithin(ctx) + case ANNOTATION => + annotFns = readAnnot(ctx) :: annotFns + case tag => + assert(false, s"illegal modifier tag ${astTagToString(tag)} at $currentAddr, end = $end") + } + } + (flags, if (ctx.ignoreAnnotations) Nil else annotFns.reverse, privateWithin) + } + + private val readTypedWithin: Context => Symbol = implicit ctx => readType().typeSymbolDirect + + private val readTypedAnnot: Context => DeferredAnnotation = { implicit ctx => + val annotCtx = ctx.addMode(ReadAnnotTopLevel) + val start = currentAddr + readByte() // tag + val end = readEnd() + val annotSym = readType()(annotCtx).typeSymbolDirect + val annotStart = currentAddr + ctx.log(s"$annotStart collected annotation ${showSym(annotSym)}, starting at $start, ending at $end") + val mkTree = readLaterWithOwner(end, rdr => ctx => + ctx.trace(traceAnnotation(annotStart, annotSym, ctx.owner)(ctx)) { + rdr.readTerm()(ctx) + } + )(annotCtx.retractMode(IndexScopedStats)) + DeferredAnnotation.fromTree(annotSym)(mkTree) + } + + private def traceAnnotation(annotStart: Addr, annotSym: Symbol, annotee: Symbol)(implicit ctx: Context) = TraceInfo[Tree]( + query = s"reading annotation tree", + qual = s"${showSym(annotSym)} at $annotStart", + res = atree => s"annotation of ${showSym(annotee)} = ${showTree(atree)}" + ) + + /** Create symbols for the definitions in the statement sequence between + * current address and `end`. + */ + def indexStats(end: Addr)(implicit ctx: Context): Unit = { + while (currentAddr.index < end.index) { + nextByte match { + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => + symbolAtCurrent() + skipTree() + case IMPORT | EXPORT => + skipTree() + case PACKAGE => + processPackage(end => implicit ctx => indexStats(end)) + case _ => + skipTree() + } + } + assert(currentAddr.index === end.index) + } + + /** Process package with given operation `op`. The operation takes as arguments + * - an end address, + * - a context which has the processed package as owner + */ + def processPackage[T](op: Addr => Context => T)(implicit ctx: Context): T = { + readByte() // tag + val end = readEnd() + val tpe = readType() + op(end)(ctx.withOwner(tpe.typeSymbolDirect.objectImplementation)) + } + + /** Create symbols the longest consecutive sequence of parameters with given + * `tag` starting at current address. + */ + def indexParams(tag: Int)(implicit ctx: Context): Unit = { + while (nextByte === tag) { + symbolAtCurrent() + skipTree() + } + } + + /** Create symbols for all type and value parameters of template starting + * at current address. + */ + def indexTemplateParams()(implicit ctx: Context): Unit = { + assert(readByte() === TEMPLATE) + readEnd() + indexParams(TYPEPARAM) + indexParams(PARAM) + } + + def readIndexedMember()(implicit ctx: Context): NoCycle = cycleAtAddr.remove(currentAddr) match { + case Some(maybeCycle) => + assert(maybeCycle ne Tombstone, s"Cyclic reference while unpickling definition at address ${currentAddr.index} in file ${ctx.source}") + skipTree() + maybeCycle.asInstanceOf[NoCycle] + case _ => + val start = currentAddr + cycleAtAddr(start) = Tombstone + val noCycle = initializeMember() + cycleAtAddr.remove(start) + noCycle + } + + private def initializeMember()(implicit ctx: Context): NoCycle = { + val symAddr = currentAddr + val tag = readByte() + val end = readEnd() + val tname = readTastyName() + val sym = symAtAddr(symAddr) + + def readParamss()(implicit ctx: Context): List[List[NoCycle]] = { + def readRest() = { + if (nextByte == SPLITCLAUSE) readByte() + readParamss() + } + nextByte match { + case PARAM => readParams[NoCycle](PARAM) :: readRest() + case TYPEPARAM => readParams[NoCycle](TYPEPARAM) :: readRest() + case EMPTYCLAUSE => readByte(); Nil :: readRest() + case _ => Nil + } + } + + def checkUnsupportedFlags(unsupported: TastyFlagSet)(implicit ctx: Context): Unit = { + unsupportedWhen(unsupported.hasFlags, s"${showTasty(unsupported)} ${sym.kindString} $tname") + } + + def DefDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { + val isMacro = repr.tflags.is(Erased | Macro) + val supportedFlags = Extension | Exported | Infix | Given | optFlag(isMacro)(Erased) + checkUnsupportedFlags(repr.unsupportedFlags &~ supportedFlags) + val isCtor = sym.isConstructor + val paramss = readParamss()(localCtx) + val typeClause = { + // A type parameter list must be non-empty and with type symbols + val first = paramss.take(1) + if (first.exists(_.headOption.exists(nc => symFromNoCycle(nc).isType))) first.head else Nil + } + val valueClauses = paramss.drop(if (typeClause.isEmpty) 0 else 1) + val typeParams = typeClause.map(symFromNoCycle) + val vparamss = { + val vparamSymss = valueClauses.map(_.map(symFromNoCycle)) + // A value parameter list may be empty, or filled with term symbols + val hasTypeParams = vparamSymss.exists(_.headOption.exists(_.isType)) + unsupportedWhen(hasTypeParams, { + val noun = ( + if (isCtor) "constructor" + else if (repr.unsupportedFlags.is(Extension)) "extension method" + else "method" + ) + s"$noun with unmergeable type parameters: $tname" + }) + vparamSymss + } + val tpt = readTpt()(localCtx) + if (isMacro) { + val impl = tpd.Macro(readTerm()(ctx.addMode(ReadMacro))) + val annot = symbolTable.AnnotationInfo( + atp = symbolTable.definitions.MacroImplLocationAnnotation.tpe, + args = List(impl), + assocs = Nil + ) + sym.addAnnotation(annot) + } + val valueParamss = normalizeIfConstructor(sym.enclClass, vparamss, valueClauses, isCtor) + val resType = effectiveResultType(sym, tpt.tpe) + ctx.setInfo(sym, defn.DefDefType(if (isCtor) Nil else typeParams, valueParamss, resType)) + } + + def ValDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { + // valdef in TASTy is either a singleton object or a method forwarder to a local value. + checkUnsupportedFlags(repr.unsupportedFlags &~ (Enum | Extension | Exported | Given)) + val tpe = readTpt()(localCtx).tpe + ctx.setInfo(sym, + if (repr.tflags.is(FlagSets.SingletonEnum)) { + ctx.completeEnumSingleton(sym, tpe) + defn.NamedType(sym.owner.thisPrefix, sym.objectImplementation) + } + else if (ctx.isJava && repr.tflags.is(FlagSets.JavaEnumCase)) defn.ConstantType(tpd.Constant(sym)) + else if (!ctx.isJava && sym.isFinal && isConstantType(tpe)) defn.InlineExprType(tpe) + else if (sym.isMethod) defn.ExprType(tpe) + else tpe + ) + } + + def TypeDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { + val allowedShared = Enum | Opaque | Infix | Given + val allowedTypeFlags = allowedShared | Exported + val allowedClassFlags = allowedShared | Open | Transparent | Tracked + if (sym.isClass) { + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedClassFlags) + sym.owner.ensureCompleted(CompleteOwner) + readTemplate()(localCtx) + } + else { + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedTypeFlags) + sym.info = defn.InitialTypeInfo // needed to avoid cyclic references when unpickling rhs, see dotty_i3816.scala + val rhs = readTpt()(if (repr.tflags.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) + val info = + if (repr.tflags.is(Opaque)) { + val (info, alias) = defn.OpaqueTypeToBounds(rhs.tpe) + ctx.markAsOpaqueType(sym, alias) + info + } + else rhs.tpe + ctx.setInfo(sym, defn.NormalisedBounds(info, sym)) + if (sym.is(Param)) sym.reset(Private | Protected) + } + } + + def TermParam(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { + checkUnsupportedFlags(repr.unsupportedFlags &~ (ParamAlias | Exported | Given | Tracked)) + val tpt = readTpt()(localCtx) + ctx.setInfo(sym, + if (nothingButMods(end) && sym.not(ParamSetter)) tpt.tpe + else defn.ExprType(tpt.tpe)) + } + + def initialize(localCtx: Context)(implicit ctx: Context) = ctx.trace(traceCompletion(symAddr, sym)) { + sym.rawInfo match { + case repr: TastyRepr => + tag match { + case DEFDEF => DefDef(repr, localCtx) + case VALDEF => ValDef(repr, localCtx) + case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) + case PARAM => TermParam(repr, localCtx) + } + repr.tflags + case _ => // nothing to do here (assume correctly initalised) + ctx.log(s"${showSym(sym)} is already initialised, in owner ${showSym(sym.owner)}") + EmptyTastyFlags + } + } + + try { + val localCtx = ctx.withOwner(sym) + val tflags = { + if (sym.isClass) { + inIndexScopedStatsContext(localCtx0 => initialize(localCtx0)(ctx))(localCtx) + } + else { + initialize(localCtx) + } + } + NoCycle(at = symAddr, tflags) + } + catch ctx.onCompletionError(sym) + finally goto(end) + } + + private def traceCompletion(addr: Addr, sym: Symbol)(implicit ctx: Context) = TraceInfo[TastyFlagSet]( + query = "begin completion", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)} $addr", + res = _ => s"completed ${showSym(sym)}: ${showType(sym.info)}" + ) + + private def readTemplate()(implicit ctx: Context): Unit = { + val start = currentAddr + val cls = ctx.enterClassCompletion() + val localDummy = symbolAtCurrent() + assert(readByte() === TEMPLATE) + val end = readEnd() + + def traceCompleteParams = TraceInfo[List[Symbol]]( + query = "force template parameters", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "forced template parameters" + ) + + def traceIndexMembers = TraceInfo[Unit]( + query = "index template body", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "indexed template body" + ) + + def traceCollectParents = TraceInfo[List[Type]]( + query = "collect template parents", + qual = s"${showSym(cls)} $currentAddr", + res = { parentTypes => + val addendum = parentTypes.map(lzyShow).mkString(s"`${cls.fullName} extends ", " with ", "`") + s"collected template parents $addendum" + } + ) + + def traceReadSelf = TraceInfo[Type]( + query = "reading template self-type", + qual = s"${showSym(cls)} $currentAddr", + res = tpe => s"template self-type is $tpe" + ) + + def completeParameters()(implicit ctx: Context): List[Symbol] = ctx.trace(traceCompleteParams) { + val tparams = readIndexedParams[NoCycle](TYPEPARAM).map(symFromNoCycle) + if (tparams.nonEmpty) { + cls.info = defn.PolyType(tparams, cls.info) + } + readIndexedParams[NoCycle](PARAM) // skip value parameters + tparams + } + + def indexMembers()(implicit ctx: Context): Unit = ctx.trace(traceIndexMembers) { + val bodyIndexer = fork + while ({val tag = bodyIndexer.reader.nextByte; tag != DEFDEF && tag != SPLITCLAUSE}) + bodyIndexer.skipParentTree() // skip until primary ctor + bodyIndexer.indexStats(end) + } + + def collectParents()(implicit ctx: Context): List[Type] = ctx.trace(traceCollectParents) { + val parentCtx = ctx.withOwner(localDummy).addMode(ReadParents) + val parentWithOuter = parentCtx.addMode(OuterTerm) + collectWhile({val tag = nextByte; tag != SELFDEF && tag != DEFDEF && tag != SPLITCLAUSE}) { + defn.adjustParent( + nextUnsharedTag match { + case APPLY | TYPEAPPLY | BLOCK => readTerm()(parentWithOuter).tpe + case _ => readTpt()(parentCtx).tpe + } + ) + } + } + + def addSelfDef()(implicit ctx: Context): Unit = { + val selfTpe = ctx.trace(traceReadSelf) { + readByte() // read SELFDEF tag + readLongNat() // skip Name + readTpt().tpe + } + cls.typeOfThis = selfTpe + } + + def setInfoWithParents(tparams: List[Symbol], parentTypes: List[Type])(implicit ctx: Context): Unit = { + val info = { + val classInfo = defn.ClassInfoType(parentTypes, cls) + // TODO [tasty]: if support opaque types, refine the self type with any opaque members here + if (tparams.isEmpty) classInfo + else defn.PolyType(tparams, classInfo) + } + ctx.setInfo(cls, info) + } + + def traverseTemplate()(implicit ctx: Context): Unit = { + val tparams = completeParameters() + indexMembers() + val parents = collectParents() + if (nextByte === SELFDEF) { + addSelfDef() + } + if (nextByte === SPLITCLAUSE) { + assert(ctx.isJava, s"unexpected SPLITCLAUSE at $start") + } + setInfoWithParents(tparams, ctx.processParents(cls, parents)) + } + + traverseTemplate() + + } + + def isTopLevel: Boolean = nextByte === IMPORT || nextByte === PACKAGE + + def readIndexedStatAsSym(@unused exprOwner: Symbol)(implicit ctx: Context): NoCycle = nextByte match { + case TYPEDEF | VALDEF | DEFDEF => + readIndexedMember() + case IMPORT => + unsupportedTermTreeError("import statement") + case EXPORT => + unsupportedTermTreeError("export statement") + case PACKAGE => + unsupportedTermTreeError("package statement") + case _ => + skipTree() // readTerm()(ctx.withOwner(exprOwner)) + NoCycle(at = NoAddr, tflags = EmptyTastyFlags) + } + + def readIndexedStatsAsSyms(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[NoCycle] = + until(end)(readIndexedStatAsSym(exprOwner)) + + def readStatsAsSyms(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[NoCycle] = { + def forkAndIndexStats(implicit ctx: Context): Unit = fork.indexStats(end) + inIndexStatsContext(forkAndIndexStats(_)) + readIndexedStatsAsSyms(exprOwner, end) + } + + def readIndexedParams[T <: MaybeCycle /*MemberDef*/](tag: Int)(implicit ctx: Context): List[T] = + collectWhile(nextByte === tag) { readIndexedMember().asInstanceOf[T] } + + def readParams[T <: MaybeCycle /*MemberDef*/](tag: Int)(implicit ctx: Context): List[T] = { + if (nextByte == tag) { + fork.indexParams(tag) + readIndexedParams(tag) + } + else { + Nil + } + } + +// ------ Reading trees ----------------------------------------------------- + + def readTerm()(implicit ctx: Context): Tree = { + val start = currentAddr + val tag = readByte() + + def traceReadTerm = TraceInfo[Tree]( + query = "reading term", + qual = s"${astTagToString(tag)} $start", + res = tree => s"exit term (`${showTree(tree)}`: ${showType(tree.tpe)}) ${astTagToString(tag)} $start" + ) + + def inParentCtor = ctx.mode.is(ReadParents | OuterTerm) + + def readPathTerm(): Tree = { + goto(start) + tpd.PathTree(readType()) + } + + def readQualId(): (TastyName.TypeName, Type) = { + val qual = readTerm() + (qual.typeIdent, defn.ThisType(qual.tpe)) + } + + def completeSelectType(name: TastyName.TypeName)(implicit ctx: Context): Tree = + completeSelect(name) + + def completeSelect(name: TastyName)(implicit ctx: Context): Tree = + tpd.Select(readTerm(), name) + + def completeSelectionParent(name: TastyName)(implicit ctx: Context): Tree = { + assert(name.isSignedConstructor, s"Parent of ${ctx.owner} is not a constructor.") + readTerm() // just need the type of the parent + } + + def readSimpleTerm(): Tree = tag match { + case SHAREDterm => forkAt(readAddr()).readTerm() + case IDENT => tpd.Ident(readTastyName())(readType()) + case IDENTtpt => tpd.Ident(readTastyName().toTypeName)(readType()) + case SELECT => + if (inParentCtor) completeSelectionParent(readTastyName()) + else completeSelect(readTastyName()) + case SELECTtpt => completeSelectType(readTastyName().toTypeName) + case QUALTHIS => + val (qual, tref) = readQualId() + tpd.This(qual)(tref) + case NEW => tpd.New(readTpt()) + case SINGLETONtpt => tpd.SingletonTypeTree(readTerm()) + case BYNAMEtpt => tpd.ByNameTypeTree(readTpt()) + case NAMEDARG => tpd.NamedArg(readTastyName(), readTerm()) + case THROW => unsupportedTermTreeError("throw clause") + case _ => readPathTerm() + } + + def readLengthTerm(): Tree = { + val end = readEnd() + val result = + (tag: @switch) match { + case SELECTin => + val name = readTastyName() + val qual = readTerm() + if (inParentCtor) { + assert(name.isSignedConstructor, s"Parent of ${ctx.owner} is not a constructor.") + skipTree() + qual + } + else { + tpd.Select(readType())(qual, name) + } + case SUPER => + val qual = readTerm() + val (mixId, mixTpe) = ifBefore(end)(readQualId(), (TastyName.EmptyTpe, defn.NoType)) + tpd.Super(qual, mixId)(mixTpe) + case APPLY => + val fn = readTerm() + if (inParentCtor) { + until(end)(skipTree()) + tpd.TypeTree(fnResult(fn.tpe)) + } else { + val argsCtx = ctx.argumentCtx(fn) + tpd.Apply(fn, until(end)(readTerm()(argsCtx))) + } + case TYPEAPPLY => tpd.TypeApply(readTerm(), until(end)(readTpt())) + case APPLYsigpoly => + // this is skipped if it appears in parents, so only affects forced annotation trees + signaturePolymorphicIsUnsupported + case TYPED => tpd.Typed(readTerm(), readTpt()) + case IF => + if (nextByte === INLINE) unsupportedTermTreeError("inline conditional expression") + else tpd.If(readTerm(), readTerm(), readTerm()) // if is ok if its parts are made of constants/paths + case REPEATED => + val elemtpt = readTpt() + tpd.SeqLiteral(until(end)(readTerm()), elemtpt) + case REFINEDtpt => + val refineCls = symAtAddr.getOrElse(start, ctx.newRefinementClassSymbol) + registerSym(start, refineCls, rejected = false) + typeAtAddr(start) = refineCls.ref + val parent = readTpt() + ctx.withOwner(refineCls).enterRefinement(parent.tpe) { refinedCtx => + readStatsAsSyms(refineCls, end)(refinedCtx) + tpd.RefinedTypeTree(parent, Nil, refineCls) + } + case APPLIEDtpt => + // If we do directly a tpd.AppliedType tree we might get a + // wrong number of arguments in some scenarios reading F-bounded + // types. This came up in #137 of collection strawman. + tpd.AppliedTypeTree(readTpt(), until(end)(readTpt())) + case ANNOTATEDtpt => tpd.Annotated(readTpt(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) + case LAMBDAtpt => tpd.LambdaTypeTree(readParams[NoCycle](TYPEPARAM).map(symFromNoCycle), readTpt()) + case MATCHtpt => matchTypeIsUnsupported + case TYPEBOUNDStpt => + val lo = readTpt() + val hi = if (currentAddr == end) lo else readTpt() + + val alias = { + if (currentAddr == end) { + untpd.EmptyTree + } + else { + assert(ctx.mode.is(OpaqueTypeDef)) + readTpt()(ctx.retractMode(OpaqueTypeDef)) + } + } + + tpd.TypeBoundsTree(lo, hi, alias) + case BLOCK => + if (inParentCtor | ctx.mode.is(ReadMacro)) { + val exprReader = fork + skipTree() + until(end)(skipTree()) //val stats = readStats(ctx.owner, end) + exprReader.readTerm() + } + else unsupportedTermTreeError("block expression") + case ASSIGN => unsupportedTermTreeError("assignment expression") + case LAMBDA => unsupportedTermTreeError("anonymous function literal") + case MATCH => unsupportedTermTreeError("match expression") + case RETURN => unsupportedTermTreeError("return statement") + case WHILE => unsupportedTermTreeError("loop statement") + case TRY => unsupportedTermTreeError("try expression") + case BIND => unsupportedTermTreeError("bind pattern") + case ALTERNATIVE => unsupportedTermTreeError("pattern alternative") + case UNAPPLY => unsupportedTermTreeError("unapply pattern") + case INLINED => unsupportedTermTreeError("inlined expression") + case SELECTouter => metaprogrammingIsUnsupported // only within inline + case QUOTE => abortQuote + case SPLICE => abortSplice + case QUOTEPATTERN => abortQuotePattern + case SPLICEPATTERN => abortSplicePattern + case HOLE => abortMacroHole + case _ => readPathTerm() + } + assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") + result + } + + ctx.traceV(traceReadTerm) { + if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() // dotty sets span of tree to start + } + } + + def readTpt()(implicit ctx: Context): Tree = { + val tpt: Tree = nextByte match { + case SHAREDterm => + readByte() + forkAt(readAddr()).readTpt() + case BLOCK => // BLOCK appears in type position when quoting a type, but only in the body of a method + metaprogrammingIsUnsupported + case HOLE => abortMacroHole + case tag => + if (isTypeTreeTag(tag)) readTerm()(ctx.retractMode(OuterTerm)) + else { + val tp = readType() + if (isTypeType(tp)) tpd.TypeTree(tp) else untpd.EmptyTree + } + } + tpt + } + + /** + * A HOLE should never appear in TASTy for a top level class, only in quotes. + */ + private def abortMacroHole[T]: T = abortWith(msg = "Scala 3 macro hole in pickled TASTy") + private def abortQuote[T]: T = abortWith(msg = "Scala 3 quoted expression in pickled TASTy") + private def abortSplice[T]: T = abortWith(msg = "Scala 3 quoted splice in pickled TASTy") + private def abortQuotePattern[T]: T = abortWith(msg = "Scala 3 quoted pattern in pickled TASTy") + private def abortSplicePattern[T]: T = abortWith(msg = "Scala 3 quoted pattern splice in pickled TASTy") + + private def signaturePolymorphicIsUnsupported[T](implicit ctx: Context): T = + unsupportedTermTreeError("signature polymorphic application") + + private def metaprogrammingIsUnsupported[T](implicit ctx: Context): T = + unsupportedError("Scala 3 metaprogramming features") + + def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context => T)(implicit ctx: Context): Symbol => Context => T = { + val localReader = fork + goto(end) + owner => ctx0 => readWith(localReader, owner, ctx.mode, ctx.source, op)(ctx0) + } + + } + + def readWith[T <: AnyRef]( + treader: TreeReader, + owner: Symbol, + mode: TastyMode, + source: AbstractFile, + op: TreeReader => Context => T)( + implicit ctx: Context + ): T = ctx.trace[T](traceReadWith(treader, mode, owner)) { + ctx.withPhaseNoLater("pickler") { ctx0 => + op(treader)(ctx0 + .withOwner(owner) + .withMode(mode) + .withSource(source) + ) + } + } + + private def traceReadWith[T](treader: TreeReader, mode: TastyMode, owner: Symbol) = TraceInfo[T]( + query = "read within owner", + qual = s"${showSym(owner)} with modes `${mode.debug}` at ${treader.reader.currentAddr}", + res = _ => s"exiting sub reader" + ) + + /** A lazy datastructure that records how definitions are nested in TASTY data. + * The structure is lazy because it needs to be computed only for forward references + * to symbols that happen before the referenced symbol is created (see `symbolAt`). + * Such forward references are rare. + * + * @param addr The address of tree representing an owning definition, NoAddr for root tree + * @param tag The tag at `addr`. Used to determine which subtrees to scan for children + * (i.e. if `tag` is template, don't scan member defs, as these belong already + * to enclosing class). + * @param reader The reader to be used for scanning for children + * @param end The end of the owning definition + */ + class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) { + + private var myChildren: List[OwnerTree] = _ + + /** All definitions that have the definition at `addr` as closest enclosing definition */ + def children: List[OwnerTree] = { + if (myChildren === null) myChildren = { + val buf = new mutable.ListBuffer[OwnerTree] + reader.scanTrees(buf, end, if (tag === TEMPLATE) NoMemberDefs else AllDefs) + buf.toList + } + myChildren + } + + /** Find the owner of definition at `addr` */ + def findOwner(addr: Addr)(implicit ctx: Context): Symbol = { + def search(cs: List[OwnerTree], current: Symbol): Symbol = + try cs match { + case ot :: cs1 => + if (ot.addr.index === addr.index) { + assert(isSymbol(current), s"no symbol at $addr") + current + } + else if (ot.addr.index < addr.index && addr.index < ot.end.index) + search(ot.children, reader.symbolAt(ot.addr)) + else + search(cs1, current) + case Nil => + throw new TreeWithoutOwner + } + catch { + case ex: TreeWithoutOwner => + ctx.log(s"no owner for $addr among $cs%, %") // pickling.println + throw ex + } + try search(children, noSymbol).tap(owner => ctx.log(s"$addr within owner ${showSym(owner)} do:")) + catch { + case ex: TreeWithoutOwner => + ctx.log(s"ownerTree = $ownerTree") // pickling.println + throw ex + } + } + + override def toString: String = + s"OwnerTree(${addr.index}, ${end.index}, ${if (myChildren === null) "?" else myChildren.mkString(" ")})" + } + + def symFromNoCycle(noCycle: NoCycle): Symbol = symAtAddr(noCycle.at) +} + +object TreeUnpickler { + + sealed trait MaybeCycle + object MaybeCycle { + case class NoCycle(at: Addr, tflags: TastyFlagSet) extends MaybeCycle + case object Tombstone extends MaybeCycle + } + + /** An enumeration indicating which subtrees should be added to an OwnerTree. */ + type MemberDefMode = Int + final val MemberDefsOnly = 0 // add only member defs; skip other statements + final val NoMemberDefs = 1 // add only statements that are not member defs + final val AllDefs = 2 // add everything + + class TreeWithoutOwner extends Exception +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala new file mode 100644 index 000000000000..6162028ba6ac --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala @@ -0,0 +1,85 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.nsc.tasty.TastyUniverse + +/** Adds support for creating annotations from Trees */ +trait AnnotationOps { self: TastyUniverse => + import self.{symbolTable => u} + + trait ShowKind[T] { + def showKind(annot: String, t: T)(implicit ctx: Context): String + } + + object ShowKind { + implicit object ShowSymbol extends ShowKind[u.Symbol] { + def showKind(annot: String, t: u.Symbol)(implicit ctx: Context): String = s"$annot ${location(t)}" + } + implicit object ShowType extends ShowKind[u.Type] { + def showKind(annot: String, t: u.Type)(implicit ctx: Context): String = + s"type ${showType(t, wrap = false)} $annot of ${location(ctx.owner)}" + } + } + + private[bridge] final def mkAnnotation[T: ShowKind](tree: Tree, annotee: T)(implicit ctx: Context): u.Annotation = { + def go(tpargs: List[Type], args: List[List[Tree]], tree: Tree): u.Annotation = tree match { + case u.Select(u.New(tpt), u.nme.CONSTRUCTOR) => + val atp = if (tpargs.isEmpty) tpt.tpe else u.appliedType(tpt.tpe, tpargs) + if (args.lengthIs > 1) { + val soFar = s"@${atp.typeSymbol.name.toString}${args.map(_.mkString("(", ", ", ")")).mkString("")}" + u.reporter.warning(u.NoPosition, + "Implementation limitation: multiple argument lists on annotations are\n"+ + "currently not supported; ignoring arguments " + args(1) + " on\n"+ + s"${implicitly[ShowKind[T]].showKind(soFar, annotee)}") + } + u.AnnotationInfo(atp, args.headOption.getOrElse(Nil), Nil) + case u.TypeApply(pre, newTpArgs) if tpargs.isEmpty => + go(newTpArgs.map(_.tpe), args, pre) + case u.Apply(pre, Nil) => // skip the empty term param list + go(tpargs, args, pre) + case u.Apply(pre, newArgs) => + go(tpargs, newArgs :: args, pre) + case _ => + throw new Exception(s"unexpected annotation kind from TASTy: ${u.showRaw(tree)}") + } + tree match { + case u.New(tpt) => + // this is to handle incorrectly formatted annotations in dotty - https://github.com/scala/scala3/issues/10113 + u.AnnotationInfo(tpt.tpe, Nil, Nil) + case _ => + go(Nil, Nil, tree) + } + } + + sealed abstract class DeferredAnnotation(annotSym: Symbol) { + + protected def eager(annotee: Symbol)(implicit ctx: Context): u.AnnotationInfo + private[bridge] final def lzy(annotee: Symbol)(implicit ctx: Context): u.LazyAnnotationInfo = { + u.AnnotationInfo.lazily(annotSym, eager(annotee)) + } + } + + object DeferredAnnotation { + + def fromTree(annotSym: Symbol)(tree: Symbol => Context => Tree): DeferredAnnotation = { + new DeferredAnnotation(annotSym) { + protected final def eager(annotee: Symbol)(implicit ctx: Context): u.AnnotationInfo = { + val atree = tree(annotee)(ctx) + mkAnnotation(atree, annotee) + } + } + } + } + +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala new file mode 100644 index 000000000000..9d9220ab6bdd --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -0,0 +1,805 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.annotation._ +import scala.collection.mutable +import scala.reflect.internal.MissingRequirementError +import scala.reflect.io.AbstractFile +import scala.tools.tasty.{TastyName, TastyFlags}, TastyFlags._, TastyName.ObjectName +import scala.tools.nsc.tasty.{TastyUniverse, TastyModes, SafeEq}, TastyModes._ +import scala.tools.nsc.tasty.{cyan, yellow, magenta, blue, green} + +import scala.util.chaining._ + + +/**This contains the definition for `Context`, along with standard error throwing capabilities with user friendly + * formatted errors that can change their output depending on the context mode. + */ +trait ContextOps { self: TastyUniverse => + import self.{symbolTable => u} + + private def describeOwner(owner: Symbol)(implicit ctx: Context): String = { + val kind = + if (owner.isOneOf(Param | ParamSetter)) { + if (owner.isType) "type parameter" + else "parameter" + } + else { + owner.kindString + } + s"$kind ${owner.nameString}" + } + + def boundsString(owner: Symbol)(implicit ctx: Context): String = { + if (owner.isType) s"bounds of $owner" + else if (owner.isOneOf(Param | ParamSetter)) s"parameter $owner" + else "result" + } + + @inline final def unsupportedTermTreeError[T](noun: String)(implicit ctx: Context): T = + unsupportedError( + if (ctx.mode.is(ReadAnnotation)) s"$noun in an annotation of ${describeOwner(ctx.owner)}; note that complex trees are not yet supported for Annotations" + else noun + ) + + @inline final def unsupportedError[T](noun: String)(implicit ctx: Context): T = { + typeError(unsupportedMessage(noun)) + } + + @inline final def unsupportedMessage(noun: String)(implicit ctx: Context): String = { + s"Unsupported Scala 3 $noun; found in ${location(ctx.globallyVisibleOwner)}." + } + + final def location(owner: Symbol)(implicit ctx: Context): String = { + if (!isSymbol(owner)) + "" + else if (owner.isClass || owner.isPackageClass || owner.isPackageObjectOrClass) + s"${owner.kindString} ${owner.fullNameString}" + else + s"${describeOwner(owner)} in ${location(owner.owner)}" + } + + @inline final def typeError[T](msg: String): T = throw new u.TypeError(msg) + + final def abortWith[T](msg: String): T = { + u.assert(false, msg) + ??? + } + + @inline final def assert(assertion: Boolean, msg: => Any): Unit = + u.assert(assertion, msg) + + @inline final def assert(assertion: Boolean): Unit = + u.assert(assertion, "") + + private final def findObject(owner: Symbol, name: u.Name): Symbol = { + val scope = + if (owner != null && owner.isClass) owner.rawInfo.decls + else u.EmptyScope + val it = scope.lookupAll(name).withFilter(_.isModule) + if (it.hasNext) it.next() + else u.NoSymbol //throw new AssertionError(s"no module $name in ${location(owner)}") + } + + /**Perform an operation within a context that has the mode `IndexStats` will force any collected annotations + * afterwards */ + def inIndexStatsContext[T](op: Context => T)(implicit ctx: Context): T = { + val statsCtx = ctx.addMode(IndexStats) + try op(statsCtx) + finally statsCtx.initialContext.forceAnnotations() + } + + /** Perform an operation within a context that has the mode `InnerScope` will enter any inline methods afterwards */ + def inInnerScopeContext[T](op: Context => T)(implicit ctx: Context): T = { + val innerCtx = ctx.addMode(InnerScope) + try op(innerCtx) + finally innerCtx.initialContext.enterLatentDefs(innerCtx.owner) + } + + + /** an aggregate of `inInnerScopeContext` within `inIndexStatsContext` */ + def inIndexScopedStatsContext[T](op: Context => T)(implicit ctx: Context): T = { + inIndexStatsContext(inInnerScopeContext(op)(_))(ctx) + } + + /**Analyses critical annotations, critical annotations will be forced as they are necessary to + * the reading of TASTy. E.g. `scala.annotation.internal.Child` is a critical annotation that + * must be forced to add its first type argument as a sealed child. + */ + private def analyseAnnotations(sym: Symbol)(implicit ctx: Context): Unit = { + + def inOwner[T](op: Context => T): T = op(ctx.withOwner(sym.owner)) + + def lookupChild(childTpe: Type): Symbol = { + val child = symOfType(childTpe) + assert(isSymbol(child), s"did not find symbol of sealed child ${showType(childTpe)}") + if (child.isClass || child.isJava && child.isJavaEnum) { + child + } + else { + assert(child.isModule, s"sealed child was not class, object, or java enum case ${showSym(child)}") + child.moduleClass + } + } + + var problematic: List[String] = Nil + + for (annot <- sym.annotations) { + if (annot.symbol === defn.ChildAnnot) { + val child = { + val child0 = lookupChild(annot.tpe.typeArgs.head) + if (child0 eq sym) { + // dotty represents a local sealed child of `C` with a child annotation + // that directly references `C`, this causes an infinite loop in + // `sealedDescendants`. See the tests: + // - test/tasty/neg/src-3/dottyi3149/dotty_i3149.scala + // - test/tasty/neg/src-2/Testdotty_i3149_fail.scala + // TODO [tasty] - fix assumption in compiler that sealed children cannot + // contain the parent class + ctx.newLocalSealedChildProxy(sym) + } + else { + child0 + } + } + ctx.log(s"adding sealed child ${showSym(child)} to ${showSym(sym)}") + sym.addChild(child) + } + if ((annot.symbol eq defn.TargetNameAnnotationClass) || + (annot.symbol eq defn.StaticMethodAnnotationClass)) { + problematic ::= inOwner { implicit ctx => + annot.completeInfo() // these should be safe to force + unsupportedMessage(s"annotation on $sym: @$annot") + } + } + if (annot.symbol === defn.AnnotationDefaultClass) { // Scala 3 has a different annotation for default values + import scala.reflect.internal.ModifierFlags + assert(sym.owner.hasAllFlags(ModifierFlags.JAVA | ModifierFlags.JAVA_ANNOTATION)) + sym.addAnnotation(u.definitions.AnnotationDefaultAttr) // Scala 2 expects this to be present + } + } + if (problematic.nonEmpty) { + sym.removeAnnotation(u.definitions.CompileTimeOnlyAttr) + sym.addAnnotation(u.definitions.CompileTimeOnlyAttr, u.Literal(u.Constant(problematic.head))) + } + } + + final case class TraceInfo[-T](query: String, qual: String, res: T => String, modifiers: List[String] = Nil) + + trait TraceFrame { + def parent: TraceFrame + def id: String + } + + /**Maintains state through traversal of a TASTy file, such as the outer scope of the defintion being traversed, the + * traversal mode, and the root owners and source path for the TASTy file. + * It also provides all operations for manipulation of the symbol table, such as creating/updating symbols and + * updating their types. + */ + sealed abstract class Context { thisCtx => + + protected implicit final def implyThisCtx: thisCtx.type = thisCtx + + /** JAVAattr is necessary to support pipelining in Zinc, we have to set Java erasure semantics if found. + * To support this we also need to support TASTy-only classpaths, see https://github.com/scala/scala3/pull/17594 + * For a test case, see test/tasty/run-pipelined + */ + def isJava: Boolean = mode.is(ReadJava) + + /**Associates the annotations with the symbol, and will force their evaluation if not reading statements.*/ + def adjustAnnotations(sym: Symbol, annots: List[DeferredAnnotation]): Unit = { + if (annots.nonEmpty) { + if (mode.is(IndexStats)) { + log(s"lazily adding annotations to ${showSym(sym)}") + initialContext.stageSymbolToForceAnnots(sym.setAnnotations(annots.map(_.lzy(sym)))) + } + else { + log(s"eagerly adding annotations to ${showSym(sym)}") + analyseAnnotations(sym.setAnnotations(annots.map(_.lzy(sym)))) + } + } + } + + final def globallyVisibleOwner: Symbol = owner.logicallyEnclosingMember + + final def ignoreAnnotations: Boolean = u.settings.YtastyNoAnnotations.value + + def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Inline + + def canEnterOverload(decl: Symbol): Boolean = { + !(decl.isModule && isSymbol(findObject(thisCtx.owner, decl.name))) + } + + final def log(str: => String): Unit = { + if (u.settings.YdebugTasty.value) { + logImpl(str) + } + } + + private final def logImpl(str: => String): Unit = u.reporter.echo( + pos = u.NoPosition, + msg = str + .linesIterator + .map(line => s"${blue(s"${showSymStable(classRoot)}:")} $line") + .mkString(System.lineSeparator) + ) + + @inline final def trace[T](info: => TraceInfo[T])(op: => T): T = { + + def addInfo(i: TraceInfo[T], op: => T)(frame: TraceFrame): T = { + val id0 = frame.id + val modStr = ( + if (i.modifiers.isEmpty) "" + else " " + green(i.modifiers.mkString("[", ",", "]")) + ) + logImpl(s"${yellow(id0)} ${cyan(s"<<< ${i.query}:")} ${magenta(i.qual)}$modStr") + op.tap(eval => logImpl(s"${yellow(id0)} ${cyan(s">>>")} ${magenta(i.res(eval))}$modStr")) + } + + if (u.settings.YdebugTasty.value) initialContext.subTrace(addInfo(info, op)) + else op + } + + /** Trace only when `-Vdebug` is set + */ + @inline final def traceV[T](info: => TraceInfo[T])(op: => T): T = { + if (u.settings.debug.value) { + trace(info)(op) + } + else op + } + + def owner: Symbol + def source: AbstractFile + def mode: TastyMode + + private final def loadingMirror: u.Mirror = u.mirrorThatLoaded(owner) + + final def requiredPackage(fullname: TastyName): Symbol = fullname match { + case TastyName.Root | TastyName.RootPkg => loadingMirror.RootPackage + case TastyName.EmptyPkg => loadingMirror.EmptyPackage + case fullname => + symOrDependencyError(isObject = false, isPackage = true, fullname)(loadingMirror.getPackage(encodeTermName(fullname).toString)) + } + + private def symOrDependencyError(isObject: Boolean, isPackage: Boolean, fullname: TastyName)(sym: => Symbol): Symbol = { + try sym + catch { + case _: MissingRequirementError => + val kind = if (isObject) "object" else if (isPackage) "package" else "class" + val addendum = if (mode.is(ReadAnnotation)) s" whilst reading annotation of $owner" else "" + val msg = + s"could not find $kind ${fullname.source}$addendum; perhaps it is missing from the classpath." + typeError(msg) + } + } + + final lazy val classRoot: Symbol = initialContext.topLevelClass + + final def newLocalDummy: Symbol = owner.newLocalDummy(u.NoPosition) + + final def newWildcard(info: Type): Symbol = + owner.newTypeParameter( + name = u.freshTypeName("_$")(u.currentFreshNameCreator), + pos = u.NoPosition, + newFlags = FlagSets.Creation.wildcard(isJava) + ).setInfo(info) + + final def newConstructor(owner: Symbol, info: Type): Symbol = unsafeNewSymbol( + owner = owner, + name = TastyName.Constructor, + flags = Method, + info = info + ) + + final def newLocalSealedChildProxy(cls: Symbol): Symbol = { + val tflags = Private | Local + unsafeNewClassSymbol( + owner = cls, + typeName = TastyName.SimpleName(cls.fullName('$') + "$$localSealedChildProxy").toTypeName, + flags = tflags, + info = defn.LocalSealedChildProxyInfo(cls, tflags), + privateWithin = u.NoSymbol + ) + } + + final def newLambdaParameter(tname: TastyName, flags: TastyFlagSet, idx: Int, infoDb: Int => Type): Symbol = { + val flags1 = flags | Param + unsafeNewSymbol( + owner = owner, + name = tname, + flags = flags1, + info = defn.LambdaParamInfo(flags1, idx, infoDb) + ) + } + + final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { + import TastyName.TypeName + + def isSameRoot(root: Symbol, selector: u.Name): Boolean = + (root.owner `eq` this.owner) && selector === root.name + + val selector = encodeTastyName(name) + roots.find(isSameRoot(_,selector)).map(found => + name match { + case TypeName(_: ObjectName) => found.linkedClassOfClass + case _ => found + } + ) + } + + final def findOuterClassTypeParameter(name: TastyName.TypeName): Symbol = { + val selector: u.Name = encodeTypeName(name) + owner.owner.typeParams.find(selector === _.name).getOrElse { + throw new AssertionError(s"${owner.owner} has no type params.") + } + } + + final def newRefinementSymbol(parent: Type, owner: Symbol, name: TastyName, tpe: Type): Symbol = { + val overridden = parent.member(encodeTastyName(name)) + val isOverride = isSymbol(overridden) + var flags = EmptyTastyFlags + if (isOverride && overridden.isType) flags |= Override + val info = { + if (name.isTermName) { + flags |= Method | Deferred + tpe match { + case u.TypeRef(_, u.definitions.ByNameParamClass, arg :: Nil) => // nullary method + u.NullaryMethodType(arg) + case u.PolyType(tparams, res) if res.paramss.isEmpty => u.PolyType(tparams, u.NullaryMethodType(res)) + case _:u.MethodType | _:u.PolyType => tpe + case _ => // val, which is not stable if structural. Dotty does not support vars + if (isOverride && overridden.is(Stable)) flags |= Stable + u.NullaryMethodType(tpe) + } + } + else { + if (tpe.isInstanceOf[u.TypeBounds]) flags |= Deferred + tpe + } + } + unsafeNewSymbol(owner, name, flags, info) + } + + /** Guards the creation of an object val by checking for an existing definition in the owner's scope + */ + final def delayCompletion(owner: Symbol, name: TastyName, completer: TastyCompleter, privateWithin: Symbol = noSymbol): Symbol = { + def default() = unsafeNewSymbol(owner, name, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { + val sourceObject = findObject(owner, encodeTermName(name)) + if (isSymbol(sourceObject)) + redefineSymbol(sourceObject, completer, privateWithin) + else + default() + } + else { + default() + } + } + + /** Guards the creation of an object class by checking for an existing definition in the owner's scope + */ + final def delayClassCompletion(owner: Symbol, typeName: TastyName.TypeName, completer: TastyCompleter, privateWithin: Symbol): Symbol = { + def default() = unsafeNewClassSymbol(owner, typeName, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { + val sourceObject = findObject(owner, encodeTermName(typeName.toTermName)) + if (isSymbol(sourceObject)) + redefineSymbol(sourceObject.objectImplementation, completer, privateWithin) + else + default() + } + else { + default() + } + } + + def evict(sym: Symbol): Unit = { + if (isSymbol(sym)) { + sym.owner.rawInfo.decls.unlink(sym) + sym.info = u.NoType + } + } + + final def enterIfUnseen(sym: Symbol): Unit = { + val decl = declaringSymbolOf(sym) + if (mode.is(IndexScopedStats)) + initialContext.collectLatentEvidence(owner, decl) + if (!requiresLatentEntry(decl)) + enterIfUnseen0(owner.rawInfo.decls, decl) + } + + protected final def enterIfUnseen0(decls: u.Scope, decl: Symbol): Unit = { + if (allowsOverload(decl) || decl.isParamGetter) { + if (canEnterOverload(decl)) { + decls.enter(decl) + } + } + else { + decls.enterIfNew(decl) + } + } + + /** Unsafe to call for creation of a object val, prefer `delayCompletion` if info is a LazyType + */ + private def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = + unsafeSetInfoAndPrivate(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) + + /** Unsafe to call for creation of a object class, prefer `delayClassCompletion` if info is a LazyType + */ + private def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = + unsafeSetInfoAndPrivate(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) + + private final def unsafeNewUntypedSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Symbol = { + if (flags.isOneOf(Param | ParamSetter)) { + if (name.isTypeName) { + owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + else { + if (owner.isClass && flags.is(FlagSets.FieldGetter)) { + val fieldFlags = flags &~ FlagSets.FieldGetter | FlagSets.LocalField + val termName = encodeTermName(name) + val getter = owner.newMethodSymbol(termName, u.NoPosition, newSymbolFlagSet(flags, isJava)) + val fieldSym = owner.newValue(termName, u.NoPosition, newSymbolFlagSet(fieldFlags, isJava)) + fieldSym.info = defn.CopyInfo(getter, fieldFlags) + owner.rawInfo.decls.enter(fieldSym) + getter + } + else { + owner.newValueParameter(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + } + } + else if (flags.is(FlagSets.Creation.ObjectDef)) { + val isEnum = flags.is(FlagSets.SingletonEnum) + if (!isEnum) { + log(s"!!! visited module value $name first") + } + val module = owner.newModule(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags, isJava)) + module.moduleClass.info = + if (isEnum) defn.SingletonEnumClassInfo(module, flags) + else defn.DefaultInfo + module + } + else if (name.isTypeName) { + owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + else if (name === TastyName.Constructor) { + owner.newConstructor(u.NoPosition, newSymbolFlagSet(flags &~ Stable, isJava)) + } + else if (name === TastyName.MixinConstructor) { + owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, newSymbolFlagSet(flags &~ Stable, isJava)) + } + else if (isJava && flags.not(Method)) { + owner.newValue(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + else { + owner.newMethodSymbol(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + } + + private final def unsafeNewUntypedClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet): Symbol = { + if (flags.is(FlagSets.Creation.ObjectClassDef)) { + log(s"!!! visited module class $typeName first") + val module = owner.newModule(encodeTermName(typeName), u.NoPosition, FlagSets.Creation.initial(isJava)) + module.info = defn.DefaultInfo + module.moduleClass.flags = newSymbolFlagSet(flags, isJava) + module.moduleClass + } + else { + owner.newClassSymbol(encodeTypeName(typeName), u.NoPosition, newSymbolFlagSet(flags, isJava)) + } + } + + final def enterClassCompletion(): Symbol = { + val cls = globallyVisibleOwner.asClass + val assumedSelfSym = { + if (cls.is(Object) && cls.owner.isClass) { + cls.sourceModule + } + else { + u.NoSymbol + } + } + cls.info = u.ClassInfoType(cls.repr.parents, cls.repr.decls, assumedSelfSym) + cls + } + + /** sets up value class machinery */ + final def processParents(cls: Symbol, parentTypes: List[Type]): parentTypes.type = { + if (parentTypes.head.typeSymbolDirect === u.definitions.AnyValClass) { + // TODO [tasty]: please reconsider if there is some shared optimised logic that can be triggered instead. + withPhaseNoLater("extmethods") { _ => + // duplicated from scala.tools.nsc.transform.ExtensionMethods + cls.primaryConstructor.makeNotPrivate(noSymbol) + for (decl <- cls.info.decls if decl.isMethod) { + if (decl.isParamAccessor) decl.makeNotPrivate(cls) + if (!decl.isClassConstructor) { + val extensionMeth = decl.newExtensionMethodSymbol(cls.companion, u.NoPosition) + extensionMeth setInfo u.extensionMethInfo(cls, extensionMeth, decl.info, cls) + } + } + } + } + else if (isJava && parentTypes.exists(_.typeSymbolDirect === defn.JavaAnnotationClass)) { + import scala.reflect.internal.ModifierFlags + //sys.error(s"Java annotations are not supported in TASTy $cls: $parentTypes, ${parentTypes.map(_.typeSymbolDirect)}, ${parentTypes.map(_.typeSymbol)}") + cls.setFlag(ModifierFlags.JAVA_ANNOTATION) + cls.info.decls.enter(cls.newClassConstructor(u.NoPosition)) + } + parentTypes + } + + private[bridge] final def resetFlag0(symbol: Symbol, flags: u.FlagSet): symbol.type = + symbol.resetFlag(flags) + + final def completeEnumSingleton(sym: Symbol, tpe: Type): Unit = { + val moduleCls = sym.moduleClass + val moduleClsFlags = FlagSets.withAccess( + flags = FlagSets.Creation.ObjectClassDef, + inheritedAccess = sym.repr.tflags + ) + val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) + val ctor = newConstructor(moduleCls, selfTpe) + moduleCls.typeOfThis = selfTpe + moduleCls.flags = newSymbolFlagSet(moduleClsFlags, isJava = false) + moduleCls.info = defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, moduleCls) + moduleCls.privateWithin = sym.privateWithin + } + + final def redefineSymbol(symbol: Symbol, completer: TastyCompleter, privateWithin: Symbol): symbol.type = { + symbol.flags = newSymbolFlagSet(completer.tflags, isJava) + unsafeSetInfoAndPrivate(symbol, completer, privateWithin) + } + + private def unsafeSetInfoAndPrivate(symbol: Symbol, info: Type, privateWithin: Symbol): symbol.type = { + symbol.privateWithin = privateWithin + symbol.info = info + symbol + } + + /** Determines the owner of a refinement in the current context by the following steps: + * 1) if the owner if this context is a refinement symbol, we are in a recursive RefinedType. Ensure that the + * context owner is initialised with the parent and reuse it. + * 2) if the parent is also a RefinedType, then we will flatten the nested structure by reusing its owner + * 3) the parent is not a RefinedType, and we are not in an enclosing RefinedType, so create a new RefinementClassSymbol. + * The Parent alongside the RefinedType owner are passed to the given operation + */ + final def enterRefinement[T](parent: Type)(op: Context => T): T = { + val clazz = owner match { + case enclosing: u.RefinementClassSymbol => + if (!enclosing.hasRawInfo) mkRefinedTypeWith(parent :: Nil, enclosing, u.newScope) + enclosing + case _ => parent match { + case nested: u.RefinedType => nested.typeSymbol + case _ => newRefinementClassSymbol + } + } + op(withOwner(clazz)) + } + + final def newRefinementClassSymbol: Symbol = owner.newRefinementClass(u.NoPosition) + + final def argumentCtx(fn: Tree): Context = + if (fn.symbol.isPrimaryConstructor) retractMode(ReadAnnotationCtor) else thisCtx + + final def setInfo(sym: Symbol, info: Type): Unit = sym.info = info + + final def markAsEnumSingleton(sym: Symbol): Unit = + sym.updateAttachment(u.DottyEnumSingleton) + + final def markAsOpaqueType(sym: Symbol, alias: Type): Unit = + sym.updateAttachment(new u.DottyOpaqueTypeAlias(alias)) + + final def onCompletionError[T](sym: Symbol): PartialFunction[Throwable, T] = { + case err: u.TypeError => + sym.info = u.ErrorType + throw err + } + + @tailrec + final def initialContext: InitialContext = this match { + case ctx: InitialContext => ctx + case ctx: FreshContext => ctx.outer.initialContext + } + + final def withOwner(owner: Symbol): Context = + if (owner `ne` this.owner) freshSymbol(owner) else this + + final def withNewScope: Context = + freshSymbol(newLocalDummy) + + final def freshSymbol(owner: Symbol): FreshContext = new FreshContext(owner, this, this.mode) + final def freshMode(mode: TastyMode): FreshContext = new FreshContext(this.owner, this, mode) + final def fresh: FreshContext = new FreshContext(this.owner, this, this.mode) + + final def addMode(mode: TastyMode): Context = + if (!this.mode.is(mode)) freshMode(this.mode | mode) + else this + + final def retractMode(mode: TastyMode): Context = + if (this.mode.isOneOf(mode)) freshMode(this.mode &~ mode) + else this + + final def withMode(mode: TastyMode): Context = + if (mode != this.mode) freshMode(mode) + else this + + final def withSource(source: AbstractFile): Context = + if (source `ne` this.source) fresh.atSource(source) + else this + + final def withPhaseNoLater[T](phase: String)(op: Context => T): T = + u.enteringPhaseNotLaterThan[T](u.findPhaseWithName(phase))(op(this)) + } + + + final class InitialContext(val topLevelClass: Symbol, val source: AbstractFile) extends Context { + def mode: TastyMode = EmptyTastyMode + def owner: Symbol = topLevelClass.owner + + private class TraceFrameImpl(val worker: Int, val parent: TraceFrameImpl) extends TraceFrame { + + var nextChild: Int = 0 + + val id: String = { + val buf = mutable.ArrayDeque.empty[Int] + var cur = this + while (cur.worker != -1) { + buf.prepend(cur.worker) + cur = cur.parent + } + buf.mkString("[", " ", ")") + } + + } + + private[this] var _trace: TraceFrameImpl = new TraceFrameImpl(worker = -1, parent = null) + + private[ContextOps] def subTrace[T](op: TraceFrame => T): T = { + val parent = _trace + val child = new TraceFrameImpl(worker = parent.nextChild, parent) + _trace = child + try op(child) + finally { + parent.nextChild += 1 + _trace = parent + } + } + + private[this] var mySymbolsToForceAnnots: mutable.LinkedHashSet[Symbol] = _ + + private[ContextOps] def stageSymbolToForceAnnots(sym: Symbol): Unit = { + if (sym.annotations.nonEmpty) { + if (mySymbolsToForceAnnots == null) { + mySymbolsToForceAnnots = mutable.LinkedHashSet.empty + } + mySymbolsToForceAnnots += sym + } + } + + /** Force any lazy annotations collected from declaration statements directly in this scope. + * + * It is important to call this *after* indexing statements in a scope, otherwise calling + * `ownertree.findOwner` can fail, this is because `ownertree.findOwner` cannot traverse a definition tree at + * a given address before a symbol has been registered to that address. + */ + private[ContextOps] def forceAnnotations(): Unit = { + if (mySymbolsToForceAnnots != null) { + val toForce = mySymbolsToForceAnnots.toList + mySymbolsToForceAnnots.clear() + for (sym <- toForce) { + trace(traceForceAnnotations(sym)) { + analyseAnnotations(sym) + } + } + assert(mySymbolsToForceAnnots.isEmpty, "more symbols added while forcing") + } + } + + private def traceForceAnnotations(sym: Symbol) = TraceInfo[Unit]( + query = "forcing annotations of symbol", + qual = s"${showSym(sym)}", + res = _ => s"annotations were forced on ${showSym(sym)}" + ) + + private[this] var myInlineDefs: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null + private[this] var myMacros: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null + private[this] var myTraitParamAccessors: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null + + /** Collect evidence from definitions that is required by `enterLatentDefs`. */ + private[ContextOps] def collectLatentEvidence(owner: Symbol, sym: Symbol): Unit = { + + def macroMap() = { + if (myMacros == null) myMacros = mutable.HashMap.empty + myMacros + } + + def inlineMap() = { + if (myInlineDefs == null) myInlineDefs = mutable.HashMap.empty + myInlineDefs + } + + def traitParamAccessors() = { + if (myTraitParamAccessors == null) myTraitParamAccessors = mutable.HashMap.empty + myTraitParamAccessors + } + + def append(map: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]])(owner: Symbol, sym: Symbol) = + map.getOrElseUpdate(owner, mutable.ArrayBuffer.empty) += sym + + if (sym.isScala2Macro) append(macroMap())(owner, sym) + else if (sym.isScala3Inline) append(inlineMap())(owner, sym) + else if (sym.isTraitParamAccessor) append(traitParamAccessors())(owner, sym) + + } + + /**Should be called after indexing all symbols in the given owners scope. + * + * Enters qualifying definitions into the given owners scope, according to the following rules: + * - an `inline macro` method (Scala 3 macro) without a corresponding `erased macro` method (Scala 2 macro). + * + * Reports illegal definitions: + * - trait constructors with parameters + * + * @param cls should be a class symbol associated with a non-empty scope + */ + private[ContextOps] def enterLatentDefs(cls: Symbol): Unit = { + + def macroDefs(cls: Symbol): Option[Iterable[Symbol]] = { + if (myMacros != null) myMacros.remove(cls) + else None + } + + def inlineDefs(cls: Symbol): Option[Iterable[Symbol]] = { + if (myInlineDefs != null) myInlineDefs.remove(cls) + else None + } + + def traitParamAccessors(cls: Symbol): Option[Iterable[Symbol]] = { + if (myTraitParamAccessors != null) myTraitParamAccessors.remove(cls) + else None + } + + def enterInlineDefs(cls: Symbol, decls: u.Scope): Unit = { + val macros = macroDefs(cls).getOrElse(Iterable.empty) + val defs = inlineDefs(cls).getOrElse(Iterable.empty) + + for (d <- defs if !macros.exists(_.name == d.name)) + enterIfUnseen0(decls, d) + } + + def reportParameterizedTrait(cls: Symbol, @unused decls: u.Scope): Unit = { + val traitParams = traitParamAccessors(cls).getOrElse(Iterable.empty) + if (traitParams.nonEmpty) { + log { + val parameters = traitParams.map(_.nameString) + s"parameterized trait ${parameters.mkString(s"${cls.nameString}(", ", ", ")")}" + } + cls.updateAttachment(new u.DottyParameterisedTrait(traitParams.toList)) + } + } + + val decls = cls.rawInfo.decls + enterInlineDefs(cls, decls) + reportParameterizedTrait(cls, decls) + + } + } + + final class FreshContext(val owner: Symbol, val outer: Context, val mode: TastyMode) extends Context { + private[this] var mySource: AbstractFile = null + def atSource(source: AbstractFile): this.type = { mySource = source ; this } + def source: AbstractFile = if (mySource == null) outer.source else mySource + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala new file mode 100644 index 000000000000..ebc7718a2162 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -0,0 +1,146 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.tasty.TastyFlags._ +import scala.tools.nsc.tasty.TastyUniverse +import scala.reflect.internal.{Flags, ModifierFlags} + +/** Handles encoding of `TastyFlagSet` to `scala.reflect` flags and witnessing which flags do not map directly + * from TASTy. + */ +trait FlagOps { self: TastyUniverse => + import self.{symbolTable => u} + + object FlagSets { + + val TastyOnlyFlags: TastyFlagSet = ( + Erased | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent + | Enum | Infix | Open | ParamAlias | Invisible | Tracked + ) + + type FlagParser = TastyFlagSet => Context => TastyFlagSet + + val addDeferred: FlagParser = flags => _ => flags | Deferred + val parseMethod: FlagParser = { mods0 => implicit ctx => + var mods = EmptyTastyFlags + if (mods0.is(Erased)) erasedRefinementIsUnsupported[Unit] + if (mods0.isOneOf(Given | Implicit)) mods |= Implicit + mods + } + + object Creation { + val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable + val ObjectClassDef: TastyFlagSet = Object | Final + def wildcard(isJava: Boolean): u.FlagSet = newSymbolFlagSetFromEncoded(Flags.EXISTENTIAL, isJava) + def initial(isJava: Boolean): u.FlagSet = newSymbolFlagSet(EmptyTastyFlags, isJava) + } + def withAccess(flags: TastyFlagSet, inheritedAccess: TastyFlagSet): TastyFlagSet = + flags | (inheritedAccess & (Private | Local | Protected)) + val SingletonEnum: TastyFlagSet = Case | Static | Enum | Stable + val JavaEnumCase: TastyFlagSet = Static | Enum // beware overlap with Scala enum + val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter + val FieldGetter: TastyFlagSet = FieldAccessor | Stable + val ParamGetter: TastyFlagSet = FieldGetter | ParamSetter + val LocalField: TastyFlagSet = Private | Local + val Scala2Macro: TastyFlagSet = Erased | Macro + } + + /** For purpose of symbol initialisation, encode a `TastyFlagSet` as a `symbolTable.FlagSet`. */ + private[bridge] def newSymbolFlagSet(tflags: TastyFlagSet, isJava: Boolean): u.FlagSet = + newSymbolFlagSetFromEncoded(unsafeEncodeTastyFlagSet(tflags, isJava), isJava) + + private[bridge] def newSymbolFlagSetFromEncoded(flags: u.FlagSet, isJava: Boolean): u.FlagSet = + flags | (if (isJava) ModifierFlags.JAVA else ModifierFlags.SCALA3X) + + implicit final class SymbolFlagOps(val sym: Symbol) { + def reset(tflags: TastyFlagSet)(implicit ctx: Context): sym.type = + ctx.resetFlag0(sym, unsafeEncodeTastyFlagSet(tflags, ctx.isJava)) + def isOneOf(mask: TastyFlagSet)(implicit ctx: Context): Boolean = + sym.hasFlag(unsafeEncodeTastyFlagSet(mask, ctx.isJava)) + def is(mask: TastyFlagSet)(implicit ctx: Context): Boolean = + sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask, ctx.isJava)) + def is(mask: TastyFlagSet, butNot: TastyFlagSet)(implicit ctx: Context): Boolean = + if (butNot.hasFlags) + is(mask) && not(butNot) + else + is(mask) + def not(mask: TastyFlagSet)(implicit ctx: Context): Boolean = + sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask, ctx.isJava)) + } + + /** encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. + * @note Do not use directly to initialise symbol flags, use `newSymbolFlagSet` + */ + private def unsafeEncodeTastyFlagSet(tflags: TastyFlagSet, isJava: Boolean): u.FlagSet = { + import u.Flag + var flags = u.NoFlags + // JAVA FLAGS + if (isJava && tflags.is(Enum)) flags |= ModifierFlags.JAVA_ENUM + if (isJava && tflags.is(Trait)) flags |= ModifierFlags.INTERFACE | ModifierFlags.ABSTRACT + if (isJava && tflags.is(HasDefault)) flags |= ModifierFlags.JAVA_DEFAULTMETHOD + // STANDARD FLAGS + if (tflags.is(Private)) flags |= Flag.PRIVATE + if (tflags.is(Protected)) flags |= Flag.PROTECTED + if (tflags.is(AbsOverride)) flags |= Flag.ABSOVERRIDE + if (tflags.is(Abstract)) flags |= Flag.ABSTRACT + if (tflags.is(Final)) flags |= Flag.FINAL + if (tflags.is(Sealed)) flags |= Flag.SEALED + if (tflags.is(Case)) flags |= Flag.CASE + if (tflags.is(Implicit)) flags |= ModifierFlags.IMPLICIT + if (tflags.is(Lazy)) flags |= Flag.LAZY + if (tflags.is(Macro)) flags |= Flag.MACRO + if (tflags.is(Override)) flags |= Flag.OVERRIDE + if (tflags.is(Static)) flags |= ModifierFlags.STATIC + if (tflags.is(Object)) flags |= Flags.MODULE + if (tflags.is(Trait)) flags |= Flag.TRAIT + if (tflags.is(Local)) flags |= Flag.LOCAL + if (tflags.is(Synthetic)) flags |= Flag.SYNTHETIC + if (tflags.is(Artifact)) flags |= Flag.ARTIFACT + if (tflags.is(Mutable)) flags |= Flag.MUTABLE + if (tflags.is(FieldAccessor)) flags |= Flags.ACCESSOR + if (tflags.is(CaseAccessor)) flags |= Flag.CASEACCESSOR + if (tflags.is(Covariant)) flags |= Flag.COVARIANT + if (tflags.is(Contravariant)) flags |= Flag.CONTRAVARIANT + if (tflags.is(HasDefault) && !isJava) flags |= Flag.DEFAULTPARAM + if (tflags.is(Stable)) flags |= Flag.STABLE + if (tflags.is(ParamSetter)) flags |= Flag.PARAMACCESSOR + if (tflags.is(Param)) flags |= Flag.PARAM + if (tflags.is(Deferred)) flags |= Flag.DEFERRED + if (tflags.is(Method)) flags |= Flags.METHOD + flags + } + + def showTasty(flags: TastyFlagSet): String = { // keep up to date with with FlagSets.TastyOnlyFlags + val tflags = flags & FlagSets.TastyOnlyFlags + if (!tflags) "EmptyTastyFlags" + else { + val sb = collection.mutable.ArrayBuffer.empty[String] + if (flags.is(Erased)) sb += "erased" + if (flags.is(Inline)) sb += "inline" + if (flags.is(InlineProxy)) sb += "" + if (flags.is(Opaque)) sb += "opaque" + if (flags.is(Extension)) sb += "" + if (flags.is(Given)) sb += "given" + if (flags.is(Exported)) sb += "" + if (flags.is(Transparent)) sb += "transparent" + if (flags.is(Enum)) sb += "enum" + if (flags.is(Open)) sb += "open" + if (flags.is(ParamAlias)) sb += "" + if (flags.is(Infix)) sb += "infix" + if (flags.is(Invisible)) sb += "" + if (flags.is(Tracked)) sb += "" + sb.mkString(" | ") + } + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/NameOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/NameOps.scala new file mode 100644 index 000000000000..b05cdfed9855 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/NameOps.scala @@ -0,0 +1,67 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.tasty.TastyName + +import scala.tools.nsc.tasty.TastyUniverse + +/**This layer handles encoding of [[TastyName]] to [[symbolTable.Name]], escaping any specially handled names. + * Also contains definitions of names for handling special compiler internal symbols from TASTy. + */ +trait NameOps { self: TastyUniverse => + import self.{symbolTable => u} + import TastyName._ + + private def encodeAsTermName(tastyName: TastyName): u.TermName = tastyName match { + case Empty => u.termNames.EMPTY + case Constructor => u.nme.CONSTRUCTOR + case EmptyPkg => u.nme.EMPTY_PACKAGE_NAME + case Root => u.nme.ROOT + case WildcardName() => u.nme.WILDCARD + case name => u.TermName(name.encoded) + } + + private def encodeAsTypeName(tastyName: TypeName): u.TypeName = tastyName match { + case RepeatedClass => u.tpnme.REPEATED_PARAM_CLASS_NAME + case name => encodeAsTermName(name.toTermName).toTypeName + } + + def encodeTypeName(name: TypeName): u.TypeName = encodeAsTypeName(name) + def encodeTermName(name: TastyName): u.TermName = encodeAsTermName(name.stripSignedPart) + + def encodeTastyName(name: TastyName): u.Name = name match { + case name: TypeName => encodeTypeName(name) + case name => encodeTermName(name) + } + + object tpnme { + + final val Object: String = "Object" + final val Or: String = "|" + final val And: String = "&" + final val AnyKind: String = "AnyKind" + final val TupleCons: String = "*:" + final val Tuple: String = "Tuple" + final val Matchable: String = "Matchable" + + val ErasedFunctionN = raw"ErasedFunction(\d+)".r + val ErasedContextFunctionN = raw"ErasedContextFunction(\d+)".r + val ContextFunctionN = raw"ContextFunction(\d+)".r + val FunctionN = raw"Function(\d+)".r + + final val ErrorType: TypeName = TastyName.SimpleName("").toTypeName + + } + +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala new file mode 100644 index 000000000000..8ff67983b336 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -0,0 +1,277 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.annotation._ +import scala.tools.nsc.tasty.{SafeEq, TastyUniverse, ForceKinds, TastyModes}, TastyModes._, ForceKinds._ +import scala.tools.tasty.{TastyName, Signature, TastyFlags}, TastyName.SignedName, Signature.MethodSignature, TastyFlags._ +import scala.tools.tasty.ErasedTypeRef + +import scala.tools.nsc.tasty.TreeUnpickler.MaybeCycle.NoCycle + +/**This layer deals with selecting a member symbol from a type using a `TastyName`, + * also contains factories for making type references to symbols. + */ +trait SymbolOps { self: TastyUniverse => + import self.{symbolTable => u} + + @inline final def noSymbol: Symbol = u.NoSymbol + @inline final def isSymbol(sym: Symbol): Boolean = sym ne u.NoSymbol + + final def allowsOverload(sym: Symbol) = ( // TODO [tasty]: taken from Namer. Added module symbols + (sym.isSourceMethod || sym.isModule) && sym.owner.isClass && !sym.isTopLevel + ) + + final def declaringSymbolOf(sym: Symbol): Symbol = + if (sym.isModuleClass) sym.sourceModule else sym + + private final def deepComplete(space: Type)(implicit ctx: Context): Unit = { + symOfType(space) match { + case u.NoSymbol => + ctx.log(s"could not retrieve symbol from type ${showType(space)}") + case termSym if termSym.isTerm => + if (termSym.is(Object)) { + termSym.ensureCompleted(SpaceForce) + termSym.moduleClass.ensureCompleted(DeepForce | SpaceForce) + } + else { + ctx.log(s"deep complete on non-module term ${showSym(termSym)}, not taking action") + } + case typeSym => + typeSym.ensureCompleted(SpaceForce) + } + } + + /** Fetch the symbol of a path type without forcing the symbol, + * `NoSymbol` if not a path. + */ + @tailrec + private[bridge] final def symOfType(tpe: Type): Symbol = tpe match { + case tpe: u.TypeRef => tpe.sym + case tpe: u.SingleType => tpe.sym + case tpe: u.ThisType => tpe.sym + case tpe: u.ConstantType => symOfType(tpe.value.tpe) + case tpe: u.ClassInfoType => tpe.typeSymbol + case tpe: u.RefinedType0 => tpe.typeSymbol + case tpe: u.ExistentialType => symOfType(tpe.underlying) + case _ => u.NoSymbol + } + + implicit final class SymbolDecorator(val sym: Symbol) { + + def isScala3Inline: Boolean = repr.tflags.is(Inline) + def isScala2Macro: Boolean = repr.tflags.is(FlagSets.Scala2Macro) + def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.tflags.is(FieldAccessor|ParamSetter) + + def isParamGetter: Boolean = + sym.isMethod && repr.tflags.is(FlagSets.ParamGetter) + + /** A computed property that should only be called on a symbol which is known to have been initialised by the + * Tasty Unpickler and is not yet completed. + * + * @todo adapt callsites and type so that this property is more safe to call (barring mutation from uncontrolled code) + */ + def repr: TastyRepr = { + try sym.rawInfo.asInstanceOf[TastyRepr] + catch { + case _: ClassCastException => + val raw = u.showRaw(sym.rawInfo) + val tastyRepr = u.typeOf[TastyRepr] + throw new AssertionError(s"$sym is already completed. Expected $tastyRepr, is $raw.") + } + } + + def ensureCompleted(forceKinds: ForceKinds)(implicit ctx: Context): Unit = { + val raw = sym.rawInfo + if (raw.isInstanceOf[u.LazyType]) { + ctx.trace(traceForceInfo(sym, forceKinds)) { + sym.info + sym.annotations.foreach(_.completeInfo()) + } + } else { + assert(!raw.isInstanceOf[TastyRepr], s"${showSym(sym)} has incorrectly initialised info $raw") + } + } + + private def traceForceInfo( + sym: Symbol, + forceKinds: ForceKinds + )(implicit ctx: Context) = TraceInfo[Unit]( + query = "force symbol info", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)}", + res = _ => s"${showSym(sym)} was forced", + modifiers = forceKinds.describe + ) + + def objectImplementation: Symbol = sym.moduleClass + def sourceObject: Symbol = sym.sourceModule + def ref: Type = u.appliedType(sym, Nil) + def safeOwner: Symbol = if (sym.owner eq sym) sym else sym.owner + } + + /** Is this symbol annotated with `scala.annotation.experimental`? */ + def symIsExperimental(sym: Symbol) = sym.hasAnnotation(defn.ExperimentalAnnotationClass) + + /** if isConstructor, make sure it has one non-implicit parameter list */ + def normalizeIfConstructor(@unused owner: Symbol, termParamss: List[List[Symbol]], paramClauses: List[List[NoCycle]], isConstructor: Boolean): List[List[Symbol]] = + if (!isConstructor) termParamss + else + paramClauses match { + case (vparam :: _) :: _ if vparam.tflags.is(Implicit, butNot=Given) => Nil :: termParamss + case _ => + if (paramClauses.forall(paramClause => paramClause.nonEmpty && paramClause.head.tflags.is(Given))) { + termParamss :+ Nil + } else { + termParamss + } + } + + private[bridge] def lookupSymbol(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + deepComplete(space) + tname match { + case SignedName(qual, sig, target) => lookupSigned(space, qual, sig.map(_.encode), target) + case _ => lookupSimple(space, tname) + } + } + + private def lookupSimple(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + // TODO [tasty]: dotty uses accessibleDenot which asserts that `fetched.isAccessibleFrom(pre)`, + // or else filters for non private. + // There should be an investigation to see what code makes that false, and what is an equivalent check. + val member = { + if (tname.isTypeName) { + val asTerm = tname.toTermName + if (asTerm.isObjectName) space.member(encodeTermName(asTerm)).moduleClass + else { + val selector = encodeTastyName(tname) + def lookInTypeCtor = + space.typeConstructor.typeParams.filter(selector === _.name).headOption.getOrElse(noSymbol) + space.member(selector).orElse(lookInTypeCtor) + } + } + else { + val firstTry = space.member(encodeTermName(tname)) + if (firstTry.isOverloaded) firstTry.filter(!_.isPrivateLocal) + else firstTry + } + } + if (isSymbol(member) && hasType(member)) member + else if (ctx.isJava && space.termSymbol.isModule && !space.termSymbol.hasPackageFlag) { + // TODO [tasty]: remove this workaround for https://github.com/scala/scala3/issues/19619 + // Use heuristic that we are accidentally looking in the static scope for some class/object, + // when really we should be looking in the instance scope. In this case, we should be always looking for + // the class and not the object, so we convert to type name and look in the companion. + // + // we have the added bonus that we are looking for an inner class defined in the same TASTy file, + // so there should be no cross-file issues. + val space0 = space.typeSymbol.companionClass.typeOfThis + val tname0 = tname.toTypeName + + val secondTry = lookupSymbol(space0, tname0) + if (secondTry.isClass) secondTry // avoid type parameters + else errorMissing(space0, tname0) + } + else errorMissing(space, tname) + } + + private def hasType(member: Symbol)(implicit ctx: Context) = { + ctx.mode.is(ReadAnnotation) || ctx.mode.is(ReadMacro) && (member.info `ne` u.NoType) || (member.rawInfo `ne` u.NoType) + } + + private def errorMissing[T](space: Type, tname: TastyName)(implicit ctx: Context) = { + val kind = if (tname.isTypeName) "type" else "term" + def typeToString(tpe: Type) = { + def isPath(pre: Type) = + pre.isInstanceOf[u.SingletonType] || pre.termSymbol.isModule || pre.typeSymbol.isModuleClass + def inner(sb: StringBuilder, tpe: Type): StringBuilder = tpe match { + case u.ThisType(cls) => + val isPackage = cls.hasPackageFlag + sb append cls.fullNameString append (if (isPackage) "" else ".this") + case u.SingleType(pre, sym) => + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString append ".type" + else inner(sb, pre) append '.' append sym.nameString append ".type" + case u.TypeRef(pre, sym, _) => + val sep = if (isPath(pre)) "." else "#" + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString + else inner(sb, pre) append sep append sym.nameString + case tpe => sb append tpe + } + inner(new StringBuilder(), tpe).toString + } + def addendum(name: String) = { + if (ctx.mode.is(ReadParents)) s"$kind in parents of ${location(if (ctx.owner.isLocalDummy) ctx.owner.owner else ctx.owner)}: $name" + else s"$kind required by ${location(ctx.owner)}: $name" + } + val missing = addendum(s"${typeToString(space)}.$tname") + typeError(s"can't find $missing; perhaps it is missing from the classpath.") + } + + private def lookupSigned( + space: Type, + qual: TastyName, + sig: MethodSignature[ErasedTypeRef], + target: TastyName + )(implicit ctx: Context): Symbol = { + if (target ne qual) { + unsupportedError(s"selection of method $qual with @targetName(" + '"' + target + '"' + ")") + } + else { + ctx.trace(traceOverload(space, qual, sig)) { + val member = space.member(encodeTermName(qual)) + if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) + val (tyParamCount, paramRefs) = { + val (tyParamCounts, params) = sig.params.partitionMap(identity) + if (tyParamCounts.length > 1) { + unsupportedError(s"method with unmergeable type parameters: $qual") + } + (tyParamCounts.headOption.getOrElse(0), params) + } + def compareSym(sym: Symbol): Boolean = sym match { + case sym: u.MethodSymbol => + sym.ensureCompleted(OverloadedSym) + // TODO [tasty]: we should cache signatures for symbols and compare against `sig` + val meth0 = u.unwrapWrapperTypes(sym.tpe.asSeenFrom(space, sym.owner)) + val paramSyms = meth0.paramss.flatten + val resTpe = meth0.finalResultType + val sameParamSize = paramSyms.length === paramRefs.length + def sameTyParamSize = tyParamCount === ({ + // the signature of a class/mixin constructor includes + // type parameters, in nsc these come from the parent. + val tyParamOwner = if (qual.isConstructorName) member.owner else sym + tyParamOwner.typeParams.length + }) + def sameParams = paramSyms.lazyZip(paramRefs).forall({ + case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) + }) + sameParamSize && sameTyParamSize && sameParams && sameErasure(sym)(resTpe, sig.result) + case _ => + ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") + false + } + member.asTerm.alternatives.find(compareSym).getOrElse( + typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}") + ) + } + } + } + + private def traceOverload(space: Type, tname: TastyName, sig: MethodSignature[ErasedTypeRef]) = TraceInfo[Symbol]( + query = s"looking for overload", + qual = s"symbolOf[$space] @@ $tname: ${showSig(sig)}", + res = overload => s"selected overload ${showSym(overload)}" + ) + + def showSig(sig: MethodSignature[ErasedTypeRef]): String = sig.map(_.signature).show + def showSym(sym: Symbol): String = s"`(#${sym.id}) ${sym.accurateKindString} ${sym.name}`" + def showSymStable(sym: Symbol): String = s"#[${sym.id}, ${sym.name}]" +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala new file mode 100644 index 000000000000..a3021c417849 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.nsc +import nsc.symtab, nsc.tasty.TastyUniverse + +/**The base of the `TastyUniverse` cake, providing aliases to types from `scala.reflect` at the same import level + * as new TASTy specific types. + */ +abstract class TastyCore { self: TastyUniverse => + import self.{symbolTable => u} + + // Compiler Entry Point + type SymbolTable <: symtab.SymbolTable { def settings: nsc.Settings } + val symbolTable: SymbolTable + + // Misc + type Symbol = u.Symbol + type Type = u.Type + type Tree = u.Tree + type Constant = u.Constant + + private val Identity = (x: Any) => x + + def id[T]: T => T = Identity.asInstanceOf[T => T] + +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala new file mode 100644 index 000000000000..957fa37be8d6 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -0,0 +1,204 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ + +import scala.tools.tasty.TastyName +import scala.reflect.internal.Flags + +/**This layer adds factories that construct typed `scala.reflect` Trees in the shapes that TASTy expects + */ +trait TreeOps { self: TastyUniverse => + import self.{symbolTable => u} + + object untpd { + final val EmptyTree: Tree = u.EmptyTree + } + + private class TastyIdent(val tname: TastyName) extends u.Ident(encodeTastyName(tname)) + + implicit class TreeDecorator(val tree: Tree) { + def typeIdent: TastyName.TypeName = tree match { + case tree: TastyIdent => tree.tname.toTypeName + case _ => TastyName.EmptyTpe + } + } + + def showTree(tree: Tree)(implicit ctx: Context): String = { + // here we want to avoid forcing the symbols of type trees, + // so instead substitute the type tree with an Identifier + // of the `showType`, which does not force. + val tree1 = tree.transform(new u.Transformer { + override def transform(tree: Tree) = tree match { + case tree: u.TypeTree => u.Ident(s"${showType(tree.tpe, wrap = false)}") // ident prints its name directly + case tree => super.transform(tree) + } + }) + u.show(tree1) + } + + object tpd { + + @inline final def Constant(value: Any): Constant = + u.Constant(value) + + @inline final def Ident(name: TastyName)(tpe: Type): Tree = + new TastyIdent(name).setType(tpe) + + @inline final def Select(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(qual.tpe)(qual.tpe, name)) + + @inline final def Select(owner: Type)(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(owner)(qual.tpe, name)) + + private def selectImpl(qual: Tree, name: TastyName)(lookup: Context => Type)(implicit ctx: Context): Tree = { + + def selectName(qual: Tree, name: TastyName)(lookup: Context => Type) = + u.Select(qual, encodeTastyName(name)).setType(lookup(ctx)) + + def selectCtor(qual: Tree) = + u.Select(qual, u.nme.CONSTRUCTOR).setType(qual.tpe.typeSymbol.primaryConstructor.tpe) + + if (ctx.mode.is(ReadAnnotationCtor) && name.isSignedConstructor) + selectCtor(qual) + else + selectName(qual, name)(lookup) + + } + + @inline final def This(qual: TastyName.TypeName)(tpe: Type): Tree = + u.This(encodeTypeName(qual)).setType(tpe) + + @inline final def New(tpt: Tree): Tree = + u.New(tpt).setType(safeClassType(tpt.tpe)) + + @inline final def SingletonTypeTree(ref: Tree): Tree = + u.SingletonTypeTree(ref).setType(ref.tpe) + + @inline final def ByNameTypeTree(arg: Tree): Tree = + u.gen.mkFunctionTypeTree(Nil, arg).setType(u.definitions.byNameType(arg.tpe)) + + @inline final def NamedArg(name: TastyName, value: Tree): Tree = + u.NamedArg(u.Ident(encodeTastyName(name)), value).setType(value.tpe) + + def Super(qual: Tree, mixId: TastyName.TypeName)(mixTpe: Type): Tree = { + val owntype = ( + if (!mixId.isEmpty) mixTpe + else u.intersectionType(qual.tpe.parents) + ) + u.Super(qual, encodeTypeName(mixId)).setType(u.SuperType(qual.tpe, owntype)) + } + + def PathTree(tpe: Type): Tree = tpe match { + case _:u.TypeRef | _:u.SingleType => u.TypeTree(tpe) + case path: u.ThisType => u.This(path.sym.name.toTypeName).setType(path) + case path: u.ConstantType => u.Literal(path.value).setType(tpe) + case x => throw new MatchError(x) + } + + @inline final def TypeTree(tp: Type): Tree = u.TypeTree(tp) + + @inline final def LambdaTypeTree(tparams: List[Symbol], body: Tree): Tree = + u.TypeTree(defn.LambdaFromParams(tparams, body.tpe)) + + def Macro(impl: Tree): Tree = impl match { + case tree @ u.TypeApply(qual, args) => + u.TypeApply(Macro(qual), args).setType(tree.tpe) + case tree @ u.Select(pre, sel) => + val sym = if (sel.isTermName) tree.tpe.termSymbol else tree.tpe.typeSymbol + u.Select(Macro(pre), sym).setType(tree.tpe) + case tree: u.TypeTree if tree.tpe.prefix !== u.NoType => + val sym = tree.tpe match { + case u.SingleType(_, sym) => sym + case u.TypeRef(_, sym, _) => sym + case u.ThisType(sym) => sym + case x => throw new MatchError(x) + } + if (tree.tpe.prefix === u.NoPrefix && (sym.hasFlag(Flags.PACKAGE) && !sym.isPackageObjectOrClass || sym.isLocalToBlock)) { + if (sym.isLocalToBlock) u.Ident(sym).setType(tree.tpe) + else u.This(sym).setType(tree.tpe) + } + else { + u.Select(Macro(u.TypeTree(tree.tpe.prefix)), sym).setType(tree.tpe) + } + case tree => + tree + } + + @inline final def Typed(expr: Tree, tpt: Tree): Tree = u.Typed(expr, tpt).setType(tpt.tpe) + + @inline final def Apply(fun: Tree, args: List[Tree]): Tree = u.Apply(fun, args).setType(fnResult(fun.tpe)) + + def TypeApply(fun: Tree, args: List[Tree]): Tree = { + if (u.definitions.isPredefMemberNamed(fun.tpe.termSymbol, u.TermName("classOf"))) { + assert(args.length == 1 && !fun.tpe.termSymbol.isOverloaded) + u.Literal(Constant(args.head.tpe)) + } + else { + u.TypeApply(fun, args).setType(tyconResult(fun.tpe, args.map(_.tpe))) + } + } + + def If(cond: Tree, thenp: Tree, elsep: Tree): Tree = + u.If(cond, thenp, elsep).setType( + if (elsep === u.EmptyTree) u.definitions.UnitTpe + else u.lub(thenp.tpe :: elsep.tpe :: Nil) + ) + + @inline final def SeqLiteral(trees: List[Tree], tpt: Tree): Tree = u.ArrayValue(tpt, trees).setType(tpt.tpe) + + def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit ctx: Context): Tree = { + if (tpt.tpe === AndTpe) { + u.CompoundTypeTree(u.Template(args, u.noSelfType, Nil)).setType(u.intersectionType(args.map(_.tpe))) + } + else if (ctx.isJava && u.definitions.isScalaRepeatedParamType(tpt.tpe)) { + u.AppliedTypeTree(tpt, args).setType(u.definitions.javaRepeatedType(args.head.tpe)) + } + else { + u.AppliedTypeTree(tpt, args).setType(defn.AppliedType(tpt.tpe, args.map(_.tpe))) + } + } + + def Annotated(tpt: Tree, annot: Tree)(implicit ctx: Context): Tree = { + if (annot.tpe.typeSymbol === defn.RepeatedAnnot + && tpt.tpe.typeSymbol.isSubClass(u.definitions.SeqClass) + && tpt.tpe.typeArgs.length == 1) { + if (ctx.isJava) tpd.TypeTree(u.definitions.javaRepeatedType(tpt.tpe.typeArgs.head)) + else tpd.TypeTree(u.definitions.scalaRepeatedType(tpt.tpe.typeArgs.head)) + } + else { + u.Annotated(annot, tpt).setType(defn.AnnotatedType(tpt.tpe, annot)) + } + } + + def RefinedTypeTree(parent: Tree, decls: List[Tree], refinedCls: Symbol)(implicit ctx: Context): Tree = { + refinedCls.info.parents.head match { + case defn.PolyFunctionType() => + val polyType = refinedCls.info.decls.map(_.tpe).headOption.fold(defn.NoType)(x => x) + polyFuncIsUnsupported(polyType) + case _ => + u.CompoundTypeTree(u.Template(parent :: Nil, u.noSelfType, decls)).setType(refinedCls.info) + } + } + + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree): Tree = { + val tpe = alias match { + case untpd.EmptyTree => u.TypeBounds(lo.tpe, hi.tpe) + case alias => new OpaqueTypeBounds(lo.tpe, hi.tpe, alias.tpe) + } + u.TypeBoundsTree(lo, hi).setType(tpe) + } + } + +} diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala new file mode 100644 index 000000000000..b10929e769af --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -0,0 +1,932 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty.bridge + +import scala.tools.nsc.tasty.{TastyUniverse, SafeEq, TastyModes, ForceKinds}, TastyModes._, ForceKinds._ + +import scala.tools.tasty.{TastyName, ErasedTypeRef, TastyFlags}, TastyFlags._ + +import scala.reflect.internal.Variance +import scala.util.chaining._ + +import scala.collection.mutable +import scala.collection.immutable.ArraySeq + +import scala.reflect.internal.Flags + +/**This layer adds factories that construct `scala.reflect` Types in the shapes that TASTy expects. + * Additionally provides operations to select a type from a type, or a type from a type with an additional prefix, + * using a `TastyName`. + */ +trait TypeOps { self: TastyUniverse => + import self.{symbolTable => u} + + @inline final def mergeableParams(t: Type, u: Type): Boolean = + t.typeParams.size == u.typeParams.size + + /** `*:` erases to either TupleXXL or Product */ + @inline final def genTupleIsUnsupported[T](name: String)(implicit ctx: Context): T = unsupportedError(s"generic tuple type $name in ${boundsString(ctx.owner)}") + @inline final def fnIsUnsupported[T](kind: String => String, tpeStr: String)(implicit ctx: Context): T = unsupportedError(s"${kind("function type")} in ${boundsString(ctx.owner)}: $tpeStr") + @inline final def bigFnIsUnsupported[T](tpeStr: String)(implicit ctx: Context): T = fnIsUnsupported(ft => s"$ft with more than 22 parameters", tpeStr) + @inline final def ctxFnIsUnsupported[T](tpeStr: String)(implicit ctx: Context): T = fnIsUnsupported(ft => s"context $ft", tpeStr) + @inline final def erasedFnIsUnsupported[T](tpeStr: String)(implicit ctx: Context): T = fnIsUnsupported(ft => s"erased $ft", tpeStr) + @inline final def erasedCtxFnIsUnsupported[T](tpeStr: String)(implicit ctx: Context): T = fnIsUnsupported(ft => s"erased context $ft", tpeStr) + @inline final def unionIsUnsupported[T](implicit ctx: Context): T = unsupportedError(s"union in ${boundsString(ctx.owner)}") + @inline final def matchTypeIsUnsupported[T](implicit ctx: Context): T = unsupportedError(s"match type in ${boundsString(ctx.owner)}") + @inline final def erasedRefinementIsUnsupported[T](implicit ctx: Context): T = unsupportedError(s"erased modifier in refinement of ${ctx.owner}") + @inline final def polyFuncIsUnsupported[T](tpe: Type)(implicit ctx: Context): T = unsupportedError(s"polymorphic function type in ${boundsString(ctx.owner)}: $tpe") + + @inline final def isConstantType(tpe: Type): Boolean = tpe.isInstanceOf[u.ConstantType] + + @inline final def isTypeType(tpe: Type): Boolean = !((tpe `eq` u.ErrorType) || (tpe `eq` u.NoType)) + + private object UnmergablePolyBounds { + def unapply(tpe: u.TypeBounds): Boolean = tpe match { + case u.TypeBounds(lo: u.PolyType, hi: u.PolyType) => !mergeableParams(lo,hi) + case _ => false + } + } + + def lzyShow(tpe: Type): String = { + val sym = symOfType(tpe) + if (isSymbol(sym)) { + val args = tpe.typeArgs + s"${sym.fullName}${if (args.nonEmpty) args.map(lzyShow).mkString("[", ",", "]") else ""}" + } + else { + s"${tpe.typeSymbolDirect.fullName}" + } + } + + def showType(tpe: Type, wrap: Boolean = true)(implicit ctx: Context): String = { + def prefixed(prefix: String)(op: => String) = { + val raw = op + if (wrap) s"""$prefix"$raw"""" + else raw + } + def parameterised(tparams: List[Symbol], prefix: String)(f: String => String) = prefixed(prefix) { + f(if (tparams.isEmpty) "" else tparams.map(p => s"${p.name}").mkString("[", ", ", "]")) + } + def cls(tparams: List[Symbol], tpe: u.ClassInfoType) = parameterised(tparams, "cls") { paramStr => + s"$paramStr${tpe.typeSymbol.fullName}$paramStr" + } + def meth(tparams: List[Symbol], tpe: u.MethodType) = parameterised(tparams, "meth") { paramStr => + s"$paramStr$tpe" + } + def preStr(pre: Type): String = { + val preSym = symOfType(pre) + val thisStr = { + if (pre.isInstanceOf[u.ThisType] && !pre.typeSymbol.isPackageClass && !pre.typeSymbol.isModuleClass) + ".this" + else + "" + } + if (isSymbol(preSym)) s"${preSym.fullName}$thisStr." else "" + } + tpe match { + case tpe: u.ClassInfoType => cls(Nil, tpe) + case u.PolyType(tparams, tpe: u.ClassInfoType) => cls(tparams, tpe) + case u.PolyType(tparams, tpe: u.MethodType) => meth(tparams, tpe) + case tpe: u.MethodType => meth(Nil, tpe) + case tpe: u.ThisType => prefixed("path") { s"${tpe.sym.fullName}.this" } + + case tpe: u.SingleType => + prefixed("path") { + if (tpe.sym.isModule) tpe.sym.fullName + ".type" + else s"${preStr(tpe.pre)}${tpe.sym.name}.type" + } + + case tpe: u.TypeRef => + if (tpe.sym.is(Object)) prefixed("path") { + s"${tpe.sym.fullName}.type" + } + else prefixed("tpelazy") { + val pre = preStr(tpe.pre) + val argsStrs = tpe.args.map(showType(_, wrap = false)) + val argsStr = if (argsStrs.nonEmpty) argsStrs.mkString("[", ", ", "]") else "" + s"$pre${tpe.sym.name}$argsStr" + } + + case tpe: u.TypeBounds => prefixed("tpebounds") { s"$tpe"} + + case tpe => prefixed("tpe") { s"$tpe" } + } + } + + def fnResult(fn: Type): Type = fn.dealiasWiden.finalResultType + def tyconResult(tycon: Type, args: List[Type]): Type = tycon.resultType.substituteTypes(tycon.typeParams, args) + + /** return a type that can be used as a class type, e.g. in parents of another class, or as the type of new */ + def safeClassType(tpe: Type): Type = tpe match { + case tpe: LambdaPolyType => tpe.toNested + case tpe => tpe + } + + def emptyTypeBounds: Type = u.TypeBounds.empty + + def intersectionParts(tpe: Type): List[Type] = tpe match { + case tpe: u.RefinedType => tpe.parents + case tpe => tpe :: Nil + } + + object defn { + + final val ChildAnnot: Symbol = u.definitions.ChildAnnotationClass + final val RepeatedAnnot: Symbol = u.definitions.RepeatedAnnotationClass + final val TargetNameAnnotationClass: Symbol = u.definitions.TargetNameAnnotationClass + final val StaticMethodAnnotationClass: Symbol = u.definitions.StaticMethodAnnotationClass + final val ExperimentalAnnotationClass: Symbol = u.definitions.ExperimentalAnnotationClass + final val AnnotationDefaultClass: Symbol = u.definitions.AnnotationDefaultClass + final val JavaAnnotationClass: Symbol = u.definitions.JavaAnnotationClass + + object PolyFunctionType { + + val PolyFunctionClass: Symbol = u.definitions.PolyFunctionClass + + def unapply(tpe: Type): Boolean = tpe match { + case polyfnRef: u.TypeRef => polyfnRef.sym eq PolyFunctionClass + case _ => false + } + + } + + final val NoType: Type = u.NoType + final val NoPrefix: Type = u.NoPrefix + + final val ObjectTpe: Type = u.definitions.ObjectTpe + final val ObjectTpeJava: Type = u.definitions.ObjectTpeJava + + def adjustParent(tp: Type)(implicit ctx: Context): Type = { + val tpe = tp.dealias + if (ctx.isJava && (tpe eq ObjectTpeJava)) ObjectTpe + else if (tpe.typeSymbolDirect === u.definitions.ObjectClass) u.definitions.AnyRefTpe + else tpe + } + + /** Represents a symbol that has been initialised by TastyUnpickler, but can not be in a state of completion + * because its definition has not yet been seen. + */ + object DefaultInfo extends TastyRepr { + override def isTrivial: Boolean = true + def tflags: TastyFlagSet = EmptyTastyFlags + } + + private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet)(implicit ctx: Context): TastyRepr = + new CopyCompleter(underlying, tflags) + + private[bridge] def SingletonEnumClassInfo( + enumValue: u.TermSymbol, + originalFlagSet: TastyFlagSet + )(implicit ctx: Context): TastyRepr = + new SingletonEnumModuleClassCompleter(enumValue, originalFlagSet) + + private[bridge] def LocalSealedChildProxyInfo(parent: Symbol, tflags: TastyFlagSet)(implicit ctx: Context): Type = + new LocalSealedChildProxyCompleter(parent, tflags) + + private[bridge] def LambdaParamInfo( + tflags: TastyFlagSet, + idx: Int, + infoDb: Int => Type + )(implicit ctx: Context): Type = + new LambdaParamCompleter(tflags, idx, infoDb) + + def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { + case u.PolyType(tparams, tpe) => + val (bounds, alias) = OpaqueTypeToBounds(tpe) + (u.PolyType(tparams, bounds), u.PolyType(tparams, alias)) + + case tpe: OpaqueTypeBounds => (tpe, tpe.alias) + + case _ => + // An alias opaque type is defined as IDENTtpt with a simple type, so has no bounds + (u.TypeBounds.empty, tpe) + + } + def ByNameType(arg: Type): Type = u.definitions.byNameType(arg) + def TypeBounds(lo: Type, hi: Type): Type = u.TypeBounds.apply(lo, hi) + def InitialTypeInfo: Type = u.TypeBounds.empty + def SingleType(pre: Type, sym: Symbol): Type = u.singleType(pre, sym) + def ExprType(res: Type): Type = u.NullaryMethodType(res) + def InlineExprType(res: Type): Type = res match { + case u.ConstantType(value) => u.NullaryMethodType(u.FoldableConstantType(value)) + case x => throw new MatchError(x) + } + def PolyType(params: List[Symbol], res: Type): Type = u.PolyType(params, res) + def ClassInfoType(parents: List[Type], clazz: Symbol): Type = u.ClassInfoType(parents, clazz.rawInfo.decls, clazz.asType) + def ClassInfoType(parents: List[Type], decls: List[Symbol], clazz: Symbol): Type = u.ClassInfoType(parents, u.newScopeWith(decls:_*), clazz.asType) + def ThisType(tpe: Type): Type = u.ThisType(symOfType(tpe)) + def ConstantType(c: Constant): Type = u.ConstantType(c) + def IntersectionType(tps: Type*): Type = u.intersectionType(tps.toList) + def IntersectionType(tps: List[Type]): Type = u.intersectionType(tps) + + def AnnotatedType(tpe: Type, annot: Tree)(implicit ctx: Context): Type = tpe match { + case u.AnnotatedType(annots, tpe) => u.AnnotatedType(annots :+ mkAnnotation(annot, tpe), tpe) + case _ => u.AnnotatedType(mkAnnotation(annot, tpe) :: Nil , tpe) + } + + def SuperType(thisTpe: Type, superTpe: Type): Type = u.SuperType(thisTpe, superTpe) + def LambdaFromParams(typeParams: List[Symbol], ret: Type): Type = u.PolyType(typeParams, lambdaResultType(ret)) + def RecType(run: RecType => Type)(implicit ctx: Context): Type = new RecType(run).parent + def RecThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis + + /** The method type corresponding to given parameters and result type */ + def DefDefType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type): Type = { + var tpe = valueParamss.foldRight(resultType)((ts, res) => u.MethodType(ts, res)) + if (valueParamss.isEmpty) tpe = u.NullaryMethodType(tpe) + if (typeParams.nonEmpty) tpe = u.PolyType(typeParams, tpe) + tpe + } + + def RefinedType(parent: Type, name: TastyName, refinedCls: Symbol, tpe: Type)(implicit ctx: Context): Type = { + val decl = ctx.newRefinementSymbol(parent, refinedCls, name, tpe) + parent match { + case defn.PolyFunctionType() => + polyFuncIsUnsupported(tpe) + case nested: u.RefinedType => + mkRefinedTypeWith(nested.parents, refinedCls, nested.decls.cloneScope.tap(_.enter(decl))) + case _ => + mkRefinedTypeWith(parent :: Nil, refinedCls, u.newScopeWith(decl)) + } + } + + def NormalisedBounds(tpe: Type, sym: Symbol)(implicit ctx: Context): Type = tpe match { + case bounds @ UnmergablePolyBounds() => + unsupportedError(s"diverging higher kinded bounds: $sym$bounds") + case tpe: u.TypeBounds => normaliseBounds(tpe) + case tpe => tpe + } + + def AppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type = { + + def formatFnType(arrow: String, isErased: Boolean, arity: Int, args: List[Type]): String = { + val len = args.length + assert(len == arity + 1) // tasty should be type checked already + val res = args.last + val params = args.init + val paramsBody = { + val body = params.mkString(",") + if (isErased) s"erased $body" else body + } + val argList = if (len == 2) paramsBody else s"($paramsBody)" + s"$argList $arrow $res" + } + + def typeRefUncurried(tycon: Type, args: List[Type]): Type = tycon match { + case tycon: u.TypeRef if tycon.typeArgs.nonEmpty => + unsupportedError(s"curried type application $tycon[${args.mkString(",")}]") + case ContextFunctionType(n) => ctxFnIsUnsupported(formatFnType("?=>", isErased = false, n, args)) + case ErasedContextFunctionType(n) => erasedCtxFnIsUnsupported(formatFnType("?=>", isErased = true, n, args)) + case ErasedFunctionType(n) => erasedFnIsUnsupported(formatFnType("=>", isErased = true, n, args)) + case FunctionXXLType(n) => bigFnIsUnsupported(formatFnType("=>", isErased = false, n, args)) + case _ => + if (ctx.isJava && tycon.typeSymbol === u.definitions.ArrayClass) { + val arg0 = args.head + val arg1 = + arg0 match { + case arg0: u.RefinedType if arg0.parents.exists(_ eq ObjectTpeJava) => + // TODO [tasty]: in theory we could add more Modes to context to + // detect this situation and not perform the substitution ahead of time, + // however this does not work with SHAREDtype which caches. + val parents1 = arg0.parents.map(tpe => + if (tpe eq ObjectTpeJava) ObjectTpe else tpe + ) + IntersectionType(parents1) + case _ => + arg0 + } + + val args1 = if (arg1 eq arg0) args else arg1 :: Nil + u.appliedType(tycon, args1) + } else { + u.appliedType(tycon, args) + } + } + + if (args.exists(tpe => tpe.isInstanceOf[u.TypeBounds] | tpe.isInstanceOf[LambdaPolyType])) { + val syms = mutable.ListBuffer.empty[Symbol] + def bindWildcards(tpe: Type) = tpe match { + case tpe: u.TypeBounds => ctx.newWildcard(tpe).tap(syms += _).pipe(_.ref) + case tpe: LambdaPolyType => tpe.toNested + case tpe => tpe + } + val args1 = args.map(bindWildcards) + if (syms.isEmpty) typeRefUncurried(tycon, args1) + else u.ExistentialType(syms.toList, typeRefUncurried(tycon, args1)) + } + else { + typeRefUncurried(tycon, args) + } + + } + + def ParamRef(binder: Type, idx: Int): Type = + binder.asInstanceOf[LambdaType].lambdaParams(idx).ref + + def NamedType(prefix: Type, sym: Symbol)(implicit ctx: Context): Type = { + if (ctx.isJava && sym.isClass && sym.isJavaDefined) { + def processInner(tp: Type): Type = tp match { + case u.TypeRef(pre, sym, args) if !sym.isStatic => u.typeRef(processInner(pre.widen), sym, args) + case _ => tp + } + prefix match { + case _: u.TypeRef => processInner(u.typeRef(prefix, sym, Nil)) // e.g. prefix is `Foo[Int]` + case _ => processInner(sym.tpeHK) // ignore prefix otherwise + } + } + else if (sym.isType) { + prefix match { + case _: u.ThisType if !sym.isTypeParameter => u.typeRef(prefix, sym, Nil) + case _:u.SingleType | _:u.RefinedType => u.typeRef(prefix, sym, Nil) + case _ => u.appliedType(sym, Nil) + } + } + else { // is a term + if (sym.hasAllFlags(Flags.PackageFlags)) { + u.typeRef(u.NoPrefix, sym, Nil) + } else { + u.singleType(prefix, sym) + } + } + } + + def TypeRef(prefix: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = + TypeRefIn(prefix, prefix, name) + + def TypeRefIn(prefix: Type, space: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = { + import scala.tools.tasty.TastyName._ + + def doLookup = lookupTypeFrom(space)(prefix, name) + + val preSym = prefix.typeSymbol + + // we escape some types in the scala package especially + if (preSym === u.definitions.ScalaPackage) { + name match { + case TypeName(SimpleName(raw @ SyntheticScala3Type())) => raw match { + case tpnme.And => AndTpe + case tpnme.Or => unionIsUnsupported + case tpnme.ContextFunctionN(n) => ContextFunctionType(n.toInt) + case tpnme.FunctionN(n) if (n.toInt > 22) => FunctionXXLType(n.toInt) + case tpnme.TupleCons => genTupleIsUnsupported("scala.*:") + case tpnme.Tuple if !ctx.mode.is(ReadParents) => genTupleIsUnsupported("scala.Tuple") + case tpnme.AnyKind => u.definitions.AnyTpe + case tpnme.Matchable => u.definitions.AnyTpe + case tpnme.ErasedContextFunctionN(n) if n.toInt > 0 => ErasedContextFunctionType(n.toInt) + case tpnme.ErasedFunctionN(n) => ErasedFunctionType(n.toInt) + case _ => doLookup + } + + case _ => doLookup + } + } + else { + if (ctx.isJava && preSym === u.definitions.JavaLangPackage) { + name match { + case TypeName(SimpleName(tpnme.Object)) => ObjectTpeJava // Object =:= scala.Any in Java. + case _ => doLookup + } + } else { + doLookup + } + } + } + + def TermRef(prefix: Type, name: TastyName)(implicit ctx: Context): Type = + TermRefIn(prefix, prefix, name) + + def TermRefIn(prefix: Type, space: Type, name: TastyName)(implicit ctx: Context): Type = + lookupTypeFrom(space)(prefix, name.toTermName) + + } + + private[bridge] def mkRefinedTypeWith(parents: List[Type], clazz: Symbol, decls: u.Scope): Type = + u.RefinedType.apply(parents, decls, clazz).tap(clazz.info = _) + + private def normaliseIfBounds(tpe: Type): Type = tpe match { + case tpe: u.TypeBounds => normaliseBounds(tpe) + case tpe => tpe + } + + private def normaliseBounds(bounds: u.TypeBounds): Type = { + val u.TypeBounds(lo, hi) = bounds + if (lo.isHigherKinded && hi.isHigherKinded) { + if (mergeableParams(lo, hi)) { + val nuLo = lo.resultType.upperBound.subst(lo.typeParams, hi.typeParams.map(_.ref)) + lo.typeParams.foreach { sym => + sym.owner.rawInfo.decls.unlink(sym) + sym.owner.rawInfo.members.unlink(sym) + sym.owner = noSymbol + } + u.PolyType(hi.typeParams, u.TypeBounds(nuLo, hi.resultType.upperBound)) + } + else bounds match { + case u.TypeBounds(lo: LambdaPolyType, hi: LambdaPolyType) => u.TypeBounds(lo.toNested,hi.toNested) + case _ => bounds + } + } + else if (hi.isHigherKinded) + u.PolyType(hi.typeParams, u.TypeBounds(lo.upperBound, hi.resultType.upperBound)) + else if (lo.isHigherKinded) + u.PolyType(lo.typeParams, u.TypeBounds(lo.resultType.upperBound, hi.upperBound)) + else + bounds + } + + private[bridge] def sameErasure(sym: Symbol)(tpe: Type, ref: ErasedTypeRef) = + NameErasure.sigName(tpe, sym) === ref + + /** This is a port from Dotty of transforming a Method type to an ErasedTypeRef + */ + private object NameErasure { + + def isRepeatedParam(self: Type): Boolean = + self.typeSymbol eq u.definitions.RepeatedParamClass + + /** Translate a type of the form From[T] to either To[T] or To[? <: T] (if `wildcardArg` is set). Keep other types as they are. + * `from` and `to` must be static classes, both with one type parameter, and the same variance. + * Do the same for by name types => From[T] and => To[T] + */ + def translateParameterized(self: Type)(from: u.ClassSymbol, to: u.ClassSymbol, wildcardArg: Boolean): Type = self match { + case u.NullaryMethodType(tp) => + u.NullaryMethodType(translateParameterized(tp)(from, to, wildcardArg = false)) + case _ => + if (self.typeSymbol.isSubClass(from)) { + def elemType(tp: Type): Type = tp.dealiasWiden match { + // case tp: AndOrType => tp.derivedAndOrType(elemType(tp.tp1), elemType(tp.tp2)) + case tp: u.RefinedType => u.intersectionType(tp.parents.map(elemType)) + case _ => tp.baseType(from).typeArgs.head + } + val arg = elemType(self) + val arg1 = if (wildcardArg) u.TypeBounds.upper(arg) else arg + u.appliedType(to, arg1 :: Nil) + } + else self + } + + def translateFromRepeated(self: Type)(toArray: Boolean): Type = { + val seqClass = if (toArray) u.definitions.ArrayClass else u.definitions.SeqClass + if (isRepeatedParam(self)) + // We want `Array[? <: T]` because arrays aren't covariant until after + // erasure. See `tests/pos/i5140`. + translateParameterized(self)(u.definitions.RepeatedParamClass, seqClass, wildcardArg = toArray) + else self + } + + def sigName(tp: Type, sym: Symbol): ErasedTypeRef = { + val normTp = translateFromRepeated(tp)(toArray = sym.isJavaDefined) + erasedSigName( + u.erasure.erasure(sym)(normTp) + ) + } + + private def erasedSigName(erased: Type): ErasedTypeRef = erased match { + case erased: u.ExistentialType => erasedSigName(erased.underlying) + case erased: u.TypeRef => + import TastyName._ + if (!isSymbol(erased.sym)) + typeError(s"missing: ${erased.prefix}, ${erased.sym.name}") + var dims = 0 + var clazzRef: Type = erased + while (clazzRef.typeArgs.nonEmpty && clazzRef.typeSymbol.isSubClass(u.definitions.ArrayClass)) { + dims += 1 + clazzRef = clazzRef.typeArgs.head + } + def unpeelName(acc: List[TastyName], tpe: Type): List[TastyName] = { + def mkRef(sym: Symbol) = { + val name = SimpleName(sym.name.toString) + if (sym.isModuleClass && !sym.isPackageClass) ObjectName(name) + else name + } + def rec(pre: Type) = + (pre ne u.NoPrefix) && (pre ne u.NoType) && (pre.typeSymbol != u.rootMirror.RootClass) + tpe match { + case u.TypeRef(pre, sym, _) => + val ref = mkRef(sym) + if (rec(pre)) unpeelName(ref :: acc, pre) + else ref :: acc + case tpe @ u.ThisType(sym) => + val ref = mkRef(sym) + val pre = tpe.prefix + if (rec(pre)) unpeelName(ref :: acc, pre) + else ref :: acc + case x => throw new MatchError(x) + } + } + val name = (unpeelName(Nil, clazzRef): @unchecked) match { + case single :: Nil => single + case base :: rest => rest.foldLeft(base)((acc, n) => n match { + case ObjectName(base) => ObjectName(QualifiedName(acc, PathSep, base.asSimpleName)) + case name => QualifiedName(acc, PathSep, name.asSimpleName) + }) + } + ErasedTypeRef(name.toTypeName, dims) + case u.ErrorType => + ErasedTypeRef(tpnme.ErrorType, 0) + case x => throw new MatchError(x) + } + + } + + /** A synthetic type `scala.&` which accepts two type arguments, representing an intersection type + * @see https://github.com/scala/scala3/issues/7688 + */ + case object AndTpe extends Type + + case class ErasedFunctionType(arity: Int) extends Type { + assert(arity > 0) + } + + case class ErasedContextFunctionType(arity: Int) extends Type { + assert(arity > 0) + } + + case class ContextFunctionType(arity: Int) extends Type { + assert(arity > 0) + } + + case class FunctionXXLType(arity: Int) extends Type { + assert(arity > 22) + } + + private val SyntheticScala3Type = + raw"^(?:&|\||AnyKind|(?:Erased)?(?:Context)?Function\d+|\*:|Tuple|Matchable)$$".r + + sealed abstract trait TastyRepr extends u.Type { + def tflags: TastyFlagSet + final def unsupportedFlags: TastyFlagSet = tflags & FlagSets.TastyOnlyFlags + } + + abstract class TastyCompleter( + isClass: Boolean, + tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends BaseTastyCompleter(tflags) { + override final val decls: u.Scope = if (isClass) u.newScope else u.EmptyScope + } + + private[TypeOps] class CopyCompleter( + underlying: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + underlying.ensureCompleted(CopySym) + sym.info = underlying.tpe + underlying.attachments.all.foreach(sym.updateAttachment(_)) + } + } + + /** This completer ensures that if the "fake" singleton enum module class + * is completed first, that it completes the module symbol which + * then completes the module class. + */ + private[TypeOps] class SingletonEnumModuleClassCompleter( + enumValue: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + enumValue.ensureCompleted(EnumProxy) + } + } + + private[TypeOps] class LocalSealedChildProxyCompleter( + parent: Symbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + sym.info = defn.ClassInfoType(parent.tpe_* :: Nil, sym) // TODO [tasty]: check if tpe_* forces + } + } + + private[TypeOps] final class LambdaParamCompleter( + flags: TastyFlagSet, + idx: Int, + infoDb: Int => Type, + )(implicit ctx: Context) + extends BaseTastyCompleter(flags) { + override def computeInfo(denot: Symbol)(implicit ctx: Context): Unit = + denot.info = infoDb(idx) + } + + abstract class BaseTastyCompleter( + final val tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends u.LazyType + with TastyRepr + with u.FlagAgnosticCompleter { + + override final def load(sym: Symbol): Unit = + complete(sym) + + override final def complete(sym: Symbol): Unit = + // we do have to capture Context here as complete is triggered outside of our control + // TODO [tasty]: perhaps Context can be redesigned so it can be reconstructed from a lightweight representation. + computeInfo(sym)(capturedCtx) + + /**Compute and set the info for the symbol in the given Context + */ + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit + } + + private[bridge] def lookupTypeFrom(owner: Type)(pre: Type, tname: TastyName)(implicit ctx: Context): Type = + defn.NamedType(pre, lookupSymbol(owner, tname)) + + private def lambdaResultType(resType: Type): Type = resType match { + case res: LambdaPolyType => res.toNested + case res => res + } + + final class LambdaPolyType(typeParams: List[Symbol], val resType: Type) extends u.PolyType(typeParams, LambdaPolyType.addLower(resType)) { + def toNested: u.PolyType = resType match { + case _: u.TypeBounds => this + case _ => u.PolyType(typeParams, resType) + } + def withVariances(variances: List[Variance]): this.type = { + typeParams.lazyZip(variances).foreach { (sym, variance) => // TODO [tasty]: should this be cloned instead? + variance match { + case Variance.Covariant => sym.flags |= Flags.COVARIANT + case Variance.Contravariant => sym.flags |= Flags.CONTRAVARIANT + case _ => () + } + } + this + } + } + + object LambdaPolyType { + private def addLower(tpe: Type): u.TypeBounds = tpe match { + case tpe: u.TypeBounds => tpe + case tpe => u.TypeBounds.upper(tpe) + } + } + + private[bridge] final class OpaqueTypeBounds(lo: Type, hi: Type, val alias: Type) extends u.TypeBounds(lo, hi) + + /** The given type, unless `sym` is a constructor, in which case the + * type of the constructed instance is returned + */ + def effectiveResultType(sym: Symbol, givenTp: Type): Type = + if (sym.name == u.nme.CONSTRUCTOR) sym.owner.tpe + else givenTp + + /** Lazy thread unsafe non-nullable value that can not be re-entered */ + private[bridge] final class SyncRef[A](private var compute: () => A) { + private var out: A = _ + private var entered: Boolean = false + + def apply(): A = { + if (entered) { + assert(out != null, "cyclic completion of SyncRef") + } + else { + entered = true + val result = compute() + compute = null + assert(result != null, "SyncRef is non-nullable") + out = result + } + out + } + } + + object MethodTermLambda extends TermLambdaFactory { + + type ThisLambda = MethodTermLambda + + protected def apply( + params: ArraySeq[TastyName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new MethodTermLambda(params, paramInfosOp, resultTypeOp, flags, registerCallback) + } + + } + + private[TypeOps] final class MethodTermLambda( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet, + registerCallback: Type => Unit, + )(implicit ctx: Context) + extends TermLambda("MethodTermLambda")(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + + protected def canonical(ps: List[Symbol], res: Type): Type = u.MethodType(ps, res) + + override def canEqual(that: Any): Boolean = that.isInstanceOf[MethodTermLambda] + } + + object HKTypeLambda extends TypeLambdaFactory { + + type ThisLambda = HKTypeLambda + + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new HKTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) + } + } + + private[TypeOps] final class HKTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("HKTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { + + final override protected def normaliseResult(resType: Type): Type = lambdaResultType(resType) + + protected def canonical(ps: List[Symbol], res: Type): Type = new LambdaPolyType(ps, res) + + override def canEqual(that: Any): Boolean = that.isInstanceOf[HKTypeLambda] + } + + object PolyTypeLambda extends TypeLambdaFactory { + + type ThisLambda = PolyTypeLambda + + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new PolyTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) + } + } + + private[TypeOps] final class PolyTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("PolyTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { + + protected def canonical(ps: List[Symbol], res: Type): Type = u.PolyType(ps, res) + + override def canEqual(that: Any): Boolean = that.isInstanceOf[PolyTypeLambda] + } + + private[TypeOps] abstract class TypeLambda( + kind: String)( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def typeParams: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = normaliseIfBounds(info) + } + + private[TypeOps] abstract class TermLambda( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def params: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = info + } + + private[TypeOps] abstract class LambdaType( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) extends AbstractLambdaType(kind) { + + protected def normaliseParam(info: Type): Type + protected def normaliseResult(resType: Type): Type = resType + + final val lambdaParams: ArraySeq[Symbol] = { + val paramInfoDb = new SyncRef(() => paramInfosOp(this.lambdaParams)) + def infoAt(idx: Int) = normaliseParam(paramInfoDb()(idx)) + + paramTNames.zipWithIndex.map { case (tname, idx) => + ctx.newLambdaParameter(tname, flags, idx, infoAt) + } + } + + registerCallback(this) + + final val resType: Type = normaliseResult(resultTypeOp()) + + } + + private[TypeOps] abstract class AbstractLambdaType(override val productPrefix: String) + extends Type + with Product + with Serializable { + + def lambdaParams: ArraySeq[Symbol] + def resType: Type + + final override def etaExpand: Type = { + lambdaParams.foreach(_.info) // force locally + canonical(lambdaParams.toList, resType) + } + + protected def canonical(ps: List[Symbol], res: Type): Type + + override final def productArity: Int = 2 + + override final def productElement(n: Int): Any = n match { + case 0 => lambdaParams + case 1 => resType + case _ => throw new IndexOutOfBoundsException(n.toString) + } + + override final def equals(that: Any): Boolean = that match { + case that: AbstractLambdaType => + (that.canEqual(self) + && that.lambdaParams == lambdaParams + && that.resType == resType) + case _ => false + } + + } + + abstract class LambdaFactory[N <: TastyName] { + + type ThisLambda <: LambdaType + + protected def apply( + params: ArraySeq[N], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda + + } + + object LambdaFactory { + final def parse[N <: TastyName]( + factory: LambdaFactory[N], + params: ArraySeq[N], + flags: TastyFlagSet)( + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): Type = + factory(params, flags, paramInfosOp, resultTypeOp, registerCallback) + .etaExpand // turn the LambdaType into something the compiler understands + .tap(registerCallback) // we should replace the type at start as it has been expanded + } + + abstract class TermLambdaFactory extends LambdaFactory[TastyName] + abstract class TypeLambdaFactory extends LambdaFactory[TastyName.TypeName] + + private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { + + override val productPrefix = "RecType" + override val productArity = 2 + + val refinementClass = ctx.newRefinementClassSymbol + val recThis: Type = u.ThisType(refinementClass) + val parent: Type = run(this) + + def canEqual(that: Any): Boolean = that.isInstanceOf[RecType] + def productElement(n: Int): Any = n match { + case 0 => if (parent == null) "" else parent + case 1 => hashCode + case _ => throw new IndexOutOfBoundsException(n.toString) + } + + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + override def safeToString: String = s"RecType(rt @ $hashCode => ${if (parent == null) "" else parent})" + + } + +} diff --git a/src/compiler/scala/tools/nsc/tasty/package.scala b/src/compiler/scala/tools/nsc/tasty/package.scala new file mode 100644 index 000000000000..df5f040af058 --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/package.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import annotation.unchecked.uncheckedVariance + +package object tasty { + + /** Adds equality operators asserting at compiletime that the RHS is a subtype of the LHS. */ + implicit final class SafeEq[-T](private val t: T @uncheckedVariance) extends AnyVal { + @inline final def ===(u: T): Boolean = t == u + @inline final def !==(u: T): Boolean = t != u + } + + def cyan(str: String): String = Console.CYAN + str + Console.RESET + def yellow(str: String): String = Console.YELLOW + str + Console.RESET + def magenta(str: String): String = Console.MAGENTA + str + Console.RESET + def red(str: String): String = Console.RED + str + Console.RESET + def green(str: String): String = Console.GREEN + str + Console.RESET + def blue(str: String): String = Console.BLUE + str + Console.RESET + +} diff --git a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala index 8e26e3689af3..e4089db46958 100644 --- a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala +++ b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,8 +10,6 @@ * additional information regarding copyright ownership. */ -// Copyright 2005-2017 LAMP/EPFL and Lightbend, Inc - package scala.tools.nsc package transform @@ -86,15 +84,17 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { if (sym.isSetter) setterBody(sym, sym.getterIn(clazz)) else getterBody(sym) protected def getterBody(getter: Symbol): Tree = { - assert(getter.isGetter) - assert(getter.hasFlag(PARAMACCESSOR)) + assert(getter.isGetter, s"$getter must be a getter") + assert(getter.hasFlag(PARAMACCESSOR), s"$getter must be an accessor") fieldAccess(getter) } protected def setterBody(setter: Symbol, getter: Symbol): Tree = { assert(getter.hasFlag(PARAMACCESSOR), s"missing implementation for non-paramaccessor $setter in $clazz") - + // scala-dev#408: fields for locals captured in a trait are non-final. The lambdalift phase adds the + // ConstructorNeedsFence attachment to the primary constructor of the class to ensure safe publication. + setter.accessed.setFlag(MUTABLE) Assign(fieldAccess(setter), Ident(setter.firstParam)) } @@ -118,7 +118,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { // TODO: better way to communicate from info transform to tree transform? - private[this] val _bitmapInfo = perRunCaches.newMap[Symbol, BitmapInfo] + private[this] val _bitmapInfo = perRunCaches.newMap[Symbol, BitmapInfo]() private[this] val _slowPathFor = perRunCaches.newMap[Symbol, Symbol]() def checkedAccessorSymbolSynth(clz: Symbol): CheckedAccessorSymbolSynth = @@ -200,10 +200,10 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { bitmapSyms } - fields groupBy bitmapCategory flatMap { - case (category, fields) if category != nme.NO_NAME && fields.nonEmpty => allocateBitmaps(fields, category) + fields.groupBy(bitmapCategory).flatMap { + case (category, fields) if category != nme.NO_NAME && fields.nonEmpty => allocateBitmaps(fields, category): Iterable[Symbol] case _ => Nil - } toList + }.toList } def slowPathFor(lzyVal: Symbol): Symbol = _slowPathFor(lzyVal) @@ -236,7 +236,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { */ def mkTest(bm: BitmapInfo, equalToZero: Boolean = true): Tree = if (bm.isBoolean) - if (equalToZero) NOT(bm.select(thisRef)) else bm.select(thisRef) + if (equalToZero) Apply(NOT(bm.select(thisRef)), Nil) else bm.select(thisRef) else Apply(bm.member(bm.applyToMask(thisRef, nme.AND), if (equalToZero) nme.EQ else nme.NE), List(ZERO)) @@ -256,7 +256,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { /** * The compute method (slow path) looks like: * - * ``` + * {{{ * def l\$compute() = { * synchronized(this) { * if ((bitmap\$n & MASK) == 0) { @@ -270,7 +270,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { * this.fn = null * l\$ * } - * ``` + * }}} * * `bitmap\$n` is a byte, int or long value acting as a bitmap of initialized values. * The kind of the bitmap determines how many bit indicators for lazy vals are stored in it. @@ -304,7 +304,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { // The lazy accessor delegates to the compute method if needed, otherwise just accesses the var (it was initialized previously) // `if ((bitmap&n & MASK) == 0) this.l$compute() else l$` - val accessorRhs = If(needsInit, Apply(Select(thisRef, slowPathSym), Nil), selectVar) + val accessorRhs = fields.castHack(If(needsInit, Apply(Select(thisRef, slowPathSym), Nil), selectVar), lazyVar.info) afterOwnPhase { // so that we can assign to vals Thicket(List((DefDef(slowPathSym, slowPathRhs)), DefDef(lazyAccessor, accessorRhs)) map typedPos(lazyAccessor.pos.focus)) @@ -315,7 +315,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { class SynthInitCheckedAccessorsIn(clazz: Symbol) extends SynthCheckedAccessorsTreesInClass(clazz) { // Add statements to the body of a constructor to set the 'init' bit for each field initialized in the constructor - private object addInitBitsTransformer extends Transformer { + private object addInitBitsTransformer extends AstTransformer { override def transformStats(stats: List[Tree], exprOwner: Symbol) = { val checkedStats = stats flatMap { // Mark field as initialized after an assignment @@ -326,7 +326,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { // TODO is this case ever hit? constructors does not generate Assigns with EmptyTree for the rhs AFAICT // !!! Ident(self) is never referenced, is it supposed to be confirming // that self is anything in particular? - case Apply(lhs@Select(Ident(self), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil + case Apply(lhs@Select(Ident(_), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil case stat => List(stat) } @@ -355,7 +355,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { else rhs private def mkCheckedAccessorRhs(retVal: Tree, pos: Position, bitmap: BitmapInfo): Tree = { - val msg = s"Uninitialized field: ${clazz.sourceFile}: ${pos.line}" + val msg = s"Uninitialized field: ${clazz.sourceFile.name}: ${pos.line}" val result = IF(mkTest(bitmap, equalToZero = false)). THEN(retVal). diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 3cec99c6f01f..b80330d8bc0a 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,28 +15,28 @@ package transform import symtab._ import Flags._ -import scala.collection._ +import scala.collection.mutable, mutable.{Buffer, ListBuffer} import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { import global._ import definitions._ import CODE._ - import treeInfo.StripCast + import treeInfo.{ SYNTH_CASE_FLAGS, isDefaultCase, StripCast } - /** the following two members override abstract members in Transform */ val phaseName: String = "cleanup" /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */ private val entryPoints = perRunCaches.newSet[Symbol]() // : List[Symbol] = Nil def getEntryPoints: List[String] = entryPoints.toList.map(_.fullName('.')).sorted - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new CleanUpTransformer(unit) class CleanUpTransformer(unit: CompilationUnit) extends StaticsTransformer { - private val newStaticMembers = mutable.Buffer.empty[Tree] - private val newStaticInits = mutable.Buffer.empty[Tree] + private val newStaticMembers = Buffer.empty[Tree] + private val newStaticInits = Buffer.empty[Tree] private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol] private var transformListApplyLimit = 8 private def reducingTransformListApply[A](depth: Int)(body: => A): A = { @@ -45,13 +45,13 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { try body finally transformListApplyLimit = saved } - private def clearStatics() { + private def clearStatics(): Unit = { newStaticMembers.clear() newStaticInits.clear() symbolsStoredAsStatic.clear() } private def transformTemplate(tree: Tree) = { - val Template(_, _, body) = tree + val Template(_, _, body) = tree: @unchecked clearStatics() val newBody = transformTrees(body) val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody) @@ -62,9 +62,6 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix) - //private val classConstantMeth = new HashMap[String, Symbol] - //private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)] - private var localTyper: analyzer.Typer = null private def typedWithPos(pos: Position)(tree: Tree) = @@ -83,12 +80,12 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { def transformApplyDynamic(ad: ApplyDynamic) = { val qual0 = ad.qual val params = ad.args - if (settings.logReflectiveCalls) + if (settings.logReflectiveCalls.value) reporter.echo(ad.pos, "method invocation uses reflection") val typedPos = typedWithPos(ad.pos) _ - assert(ad.symbol.isPublic) + assert(ad.symbol.isPublic, "Must be public") var qual: Tree = qual0 /* ### CREATING THE METHOD CACHE ### */ @@ -99,7 +96,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { methSym setInfoAndEnter MethodType(params, MethodClass.tpe) val methDef = typedPos(DefDef(methSym, forBody(methSym, params.head))) - newStaticMembers append transform(methDef) + newStaticMembers += transform(methDef) methSym } @@ -223,8 +220,8 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { case _ => false } def typesMatchUpdate = paramTypes match { - case List(tp1, tp2) => (tp1 <:< IntTpe) && isMaybeUnit - case _ => false + case List(tp1, _) => (tp1 <:< IntTpe) && isMaybeUnit + case _ => false } (methSym.name == nme.length && params.isEmpty) || @@ -259,7 +256,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { // reflective method call machinery val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...) def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol - def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache + def lookup = Apply(cache, List(qual1().GETCLASS())) // get Method object from cache def invokeArgs = ArrayValue(TypeTree(ObjectTpe), params) // args for invocation def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...) @@ -269,19 +266,18 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil)) // try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() } - fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY) + fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } FINALLY END) } /* A possible primitive method call, represented by methods in BoxesRunTime. */ def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args) - def genValueCallWithTest = { + def genValueCallWithTest = getPrimitiveReplacementForStructuralCall(methSym.name) match { case Some((operator, test)) => IF (test(qual1())) THEN genValueCall(operator) ELSE genDefaultCall case _ => genDefaultCall } - } /* A native Array call. */ def genArrayCall = fixResult( @@ -290,6 +286,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2)) case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1))) case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0)) + case x => throw new MatchError(x) }, mustBeUnit = methSym.name == nme.update ) @@ -299,14 +296,14 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { * so we have to generate both kinds of code. */ def genArrayCallWithTest = - IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall + IF ((qual1().GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall - localTyper typed ( + localTyper.typed { if (isMaybeBoxed && isJavaValueMethod) genValueCallWithTest else if (isArrayMethodSignature && isDefinitelyArray) genArrayCall else if (isArrayMethodSignature && isMaybeArray) genArrayCallWithTest else genDefaultCall - ) + } } } @@ -356,6 +353,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } case NoType => abort(ad.symbol.toString) + case x => throw new MatchError(x) } typedPos { val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe @@ -390,114 +388,142 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } } - override def transform(tree: Tree): Tree = tree match { - case _: ClassDef if genBCode.codeGen.CodeGenImpl.isJavaEntryPoint(tree.symbol, currentUnit) => - // collecting symbols for entry points here (as opposed to GenBCode where they are used) - // has the advantage of saving an additional pass over all ClassDefs. - entryPoints += tree.symbol - super.transform(tree) + object StringsPattern { + def unapply(arg: Tree): Option[List[String]] = arg match { + case Literal(Constant(value: String)) => Some(value :: Nil) + case Literal(Constant(null)) => Some(null :: Nil) + case Alternative(alts) => traverseOpt(alts)(unapply).map(_.flatten) + case _ => None + } + } - /* Transforms dynamic calls (i.e. calls to methods that are undefined - * in the erased type space) to -- dynamically -- unsafe calls using - * reflection. This is used for structural sub-typing of refinement - * types, but may be used for other dynamic calls in the future. - * For 'a.f(b)' it will generate something like: - * 'a.getClass(). - * ' getMethod("f", Array(classOf[b.type])). - * ' invoke(a, Array(b)) - * plus all the necessary casting/boxing/etc. machinery required - * for type-compatibility (see fixResult). - * - * USAGE CONTRACT: - * There are a number of assumptions made on the way a dynamic apply - * is used. Assumptions relative to type are handled by the erasure - * phase. - * - The applied arguments are compatible with AnyRef, which means - * that an argument tree typed as AnyVal has already been extended - * with the necessary boxing calls. This implies that passed - * arguments might not be strictly compatible with the method's - * parameter types (a boxed integer while int is expected). - * - The expected return type is an AnyRef, even when the method's - * return type is an AnyVal. This means that the tree containing the - * call has already been extended with the necessary unboxing calls - * (or is happy with the boxed type). - * - The type-checker has prevented dynamic applies on methods which - * parameter's erased types are not statically known at the call site. - * This is necessary to allow dispatching the call to the correct - * method (dispatching on parameters is static in Scala). In practice, - * this limitation only arises when the called method is defined as a - * refinement, where the refinement defines a parameter based on a - * type variable. */ + private def transformStringSwitch(sw: Match): Tree = { import CODE._ + // tree shape assumption justified by the codegen in MatchOptimization + val Match(Typed(selTree0, _), cases) = sw: @unchecked + // usually `selTree0` is an `Ident` or `Literal`, but Xasync may transform the scrutinee local into a state + // machine field (scala/bug#12686). `evalOnce` introduces another local if needed (not for Ident / Literal). + gen.evalOnce(selTree0, currentOwner, unit) { selTree => + def selArg = selTree() match { + case x: Ident => REF(x.symbol) + case x: Literal => x + case x => throw new MatchError(x) + } - case tree: ApplyDynamic if tree.symbol.owner.isRefinementClass => - transformApplyDynamic(tree) + val newSel = selTree() match { + case x: Ident => atPos(x.symbol.pos)(IF(x.symbol OBJ_EQ NULL) THEN ZERO ELSE selArg.OBJ_##) + case x: Literal => atPos(x.pos)(if (x.value.value == null) ZERO else selArg.OBJ_##) + case x => throw new MatchError(x) + } + val restpe = sw.tpe + val resUnit = restpe =:= UnitTpe + val swPos = sw.pos.focus + + /* From this: + * string match { case "AaAa" => 1 case "BBBB" | "c" => 2 case _ => 3 } + * Generate this: + * string.## match { + * case 2031744 => + * if ("AaAa" equals string) goto matchEnd (1) + * else if ("BBBB" equals string) goto case2 + * else goto defaultCase + * case 99 => + * if ("c" equals string) goto case2 + * else goto defaultCase + * case _ => goto defaultCase + * } + * case2: goto matchEnd (2) + * defaultCase: goto matchEnd (3) // or `goto matchEnd (throw new MatchError(string))` if no default was given + * matchEnd(res: Int): res + * Extra labels are added for alternative patterns branches, since multiple branches in the + * resulting switch may need to correspond to a single case body. + */ - /* Some cleanup transformations add members to templates (classes, traits, etc). - * When inside a template (i.e. the body of one of its members), two maps - * (newStaticMembers and newStaticInits) are available in the tree transformer. Any mapping from - * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newStaticMembers once the - * transformation of the template is finished will be added as a member to the - * template. Any mapping from a symbol to a tree that is in newStaticInits, will be added - * as a statement of the form "symbol = tree" to the beginning of the default - * constructor. */ - case Template(parents, self, body) => - localTyper = typer.atOwner(tree, currentClass) - transformTemplate(tree) + val labels = ListBuffer.empty[LabelDef] + var defaultCaseBody = Throw(New(MatchErrorClass.tpe_*, selArg)): Tree - case Literal(c) if c.tag == ClazzTag => - val tpe = c.typeValue - typedWithPos(tree.pos) { - if (isPrimitiveValueClass(tpe.typeSymbol)) { - if (tpe.typeSymbol == UnitClass) - REF(BoxedUnit_TYPE) - else - Select(REF(boxedModule(tpe.typeSymbol)), nme.TYPE_) - } + def LABEL(name: String) = currentOwner.newLabel(unit.freshTermName(name), swPos).setFlag(SYNTH_CASE_FLAGS) - else tree + def newCase() = LABEL("case").setInfo(MethodType(Nil, restpe)) + + val defaultCase = LABEL("defaultCase").setInfo(MethodType(Nil, restpe)) + val matchEnd = LABEL("matchEnd").tap { lab => + // genbcode isn't thrilled about seeing labels with Unit arguments, so `success`'s type is one of + // `${sw.tpe} => ${sw.tpe}` or `() => Unit` depending. + lab.setInfo(MethodType(if (resUnit) Nil else List(lab.newSyntheticValueParam(restpe)), restpe)) + } + + def goto(sym: Symbol, params: Tree*) = REF(sym) APPLY (params: _*) + + def gotoEnd(body: Tree) = if (resUnit) BLOCK(body, goto(matchEnd)) else goto(matchEnd, body) + + val casesByHash = cases.flatMap { + case cd@CaseDef(StringsPattern(strs), _, body) => + val jump = newCase() // always create a label so when its used it matches the source case (e.g. `case4()`) + strs match { + case str :: Nil => List((str, gotoEnd(body), cd.pat.pos)) + case _ => + labels += LabelDef(jump, Nil, gotoEnd(body)) + strs.map((_, goto(jump), cd.pat.pos)) + } + case cd if isDefaultCase(cd) => defaultCaseBody = gotoEnd(cd.body); None + case cd => globalError(s"unhandled in switch: $cd"); None + }.groupBy(_._1.##) + + val newCases = casesByHash.toList.sortBy(_._1).map { + case (hash, cases) => + val newBody = cases.foldRight(atPos(swPos)(goto(defaultCase): Tree)) { + case ((null, rhs, pos), next) => atPos(pos)(IF(NULL OBJ_EQ selArg) THEN rhs ELSE next) + case ((str, rhs, pos), next) => atPos(pos)(IF(LIT(str) OBJ_== selArg) THEN rhs ELSE next) + } + CASE(LIT(hash)) ==> newBody } - /* - * This transformation should identify Scala symbol invocations in the tree and replace them - * with references to a statically cached instance. - * - * The reasoning behind this transformation is the following. Symbols get interned - they are stored - * in a global map which is protected with a lock. The reason for this is making equality checks - * quicker. But calling Symbol.apply, although it does return a unique symbol, accesses a locked object, - * making symbol access slow. To solve this, the unique symbol from the global symbol map in Symbol - * is accessed only once during class loading, and after that, the unique symbol is in the statically - * initialized call site returned by invokedynamic. Hence, it is cheap to both reach the unique symbol - * and do equality checks on it. - * - * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler - * have little in common. - */ - case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil) - if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait => - - super.transform(treeCopy.ApplyDynamic(tree, atPos(fn.pos)(Ident(SymbolLiteral_dummy).setType(SymbolLiteral_dummy.info)), LIT(SymbolLiteral_bootstrap) :: arg :: Nil)) + labels += LabelDef(defaultCase, Nil, defaultCaseBody) + labels += LabelDef(matchEnd, matchEnd.info.params, matchEnd.info.params.headOption.fold(UNIT: Tree)(REF)) + + val stats = Match(newSel, newCases :+ (DEFAULT ==> goto(defaultCase))) :: labels.toList + + val res = Block(stats: _*) + typedWithPos(sw.pos)(res) + } + } + + // transform scrutinee of all matches to switchable types (ints, strings) + def transformSwitch(sw: Match): Tree = { + sw.selector.tpe.widen match { + case IntTpe => sw // can switch directly on ints + case StringTpe => transformStringSwitch(sw) + case _ => globalError(s"unhandled switch scrutinee type ${sw.selector.tpe}: $sw"); sw + } + } + + def transformApply(tree: Apply, fun: Tree, args: List[Tree]): Tree = tree match { + case Apply(Select(qual, nm), Nil) if nm == nme.Nil && qual.symbol == ScalaPackageObject => + typedWithPos(tree.pos)(gen.mkNil) // Drop the TypeApply, which was used in Erasure to make `synchronized { ... } ` erase like `...` // (and to avoid boxing the argument to the polymorphic `synchronized` method). - case app@Apply(TypeApply(fun, _), args) if fun.symbol == Object_synchronized => - super.transform(treeCopy.Apply(app, fun, args)) + case Apply(TypeApply(sync, _), _) if sync.symbol == Object_synchronized => + treeCopy.Apply(tree, sync, args).transform(this) - // Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), )` + // Replaces `Array(.wrapArray(ArrayValue(...).$asInstanceOf[...]), )` // with just `ArrayValue(...).$asInstanceOf[...]` // // See scala/bug#6611; we must *only* do this for literal vararg arrays. - case Apply(appMeth, Apply(wrapRefArrayMeth, (arg @ StripCast(ArrayValue(elemtpt, elems))) :: Nil) :: classTagEvidence :: Nil) - if (wrapRefArrayMeth.symbol == currentRun.runDefinitions.Predef_genericWrapRefArray || wrapRefArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray) && appMeth.symbol == ArrayModule_genericApply && - !elemtpt.tpe.typeSymbol.isBottomClass && !elemtpt.tpe.typeSymbol.isPrimitiveValueClass /* can happen via specialization.*/ => + case Apply(appMeth @ Select(appMethQual, _), Apply(wrapRefArrayMeth, (arg @ StripCast(ArrayValue(elemtpt, elems))) :: Nil) :: classTagEvidence :: Nil) + if (wrapRefArrayMeth.symbol == currentRun.runDefinitions.wrapVarargsRefArrayMethod || wrapRefArrayMeth.symbol == currentRun.runDefinitions.genericWrapVarargsRefArrayMethod) && + appMeth.symbol == ArrayModule_genericApply && treeInfo.isQualifierSafeToElide(appMethQual) && + !elemtpt.tpe.typeSymbol.isBottomClass && !elemtpt.tpe.typeSymbol.isPrimitiveValueClass /* can happen via specialization.*/ + => classTagEvidence.attachments.get[analyzer.MacroExpansionAttachment] match { case Some(att) if att.expandee.symbol.name == nme.materializeClassTag && tree.isInstanceOf[ApplyToImplicitArgs] => super.transform(arg) - case _ => - localTyper.typedPos(tree.pos) { - gen.evalOnce(classTagEvidence, currentOwner, unit) { ev => - val arr = localTyper.typedPos(tree.pos)(gen.mkMethodCall(classTagEvidence, definitions.ClassTagClass.info.decl(nme.newArray), Nil, Literal(Constant(elems.size)) :: Nil)) + case _ => + typedWithPos(tree.pos) { + gen.evalOnce(classTagEvidence, currentOwner, unit) { _ => + val arr = typedWithPos(tree.pos)(gen.mkMethodCall(classTagEvidence, definitions.ClassTagClass.info.decl(nme.newArray), Nil, Literal(Constant(elems.size)) :: Nil)) gen.evalOnce(arr, currentOwner, unit) { arr => - val stats = mutable.ListBuffer[Tree]() + val stats = ListBuffer[Tree]() foreachWithIndex(elems) { (elem, i) => stats += gen.mkMethodCall(gen.mkAttributedRef(definitions.ScalaRunTimeModule), currentRun.runDefinitions.arrayUpdateMethod, Nil, arr() :: Literal(Constant(i)) :: elem :: Nil) @@ -507,24 +533,37 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } } } - case Apply(appMeth, elem0 :: Apply(wrapArrayMeth, (rest @ ArrayValue(elemtpt, _)) :: Nil) :: Nil) - if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => - super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems)) - - // List(a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) - // Seq(a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) - case Apply(Select(appQual, nme.apply), List(Apply(wrapArrayMeth, List(StripCast(rest @ ArrayValue(_, _)))))) - if wrapArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray && { - val sym = appQual.symbol - sym == ListModule || sym == SeqModule || sym == ISeqModule - } && rest.elems.length < transformListApplyLimit => - val consed = rest.elems.reverse.foldLeft(gen.mkAttributedRef(NilModule): Tree)( - (acc, elem) => New(ConsClass, elem, acc) - ) + + case Apply(appMeth @ Select(appMethQual, _), elem0 :: Apply(wrapArrayMeth, (rest @ ArrayValue(elemtpt, _)) :: Nil) :: Nil) + if wrapArrayMeth.symbol == wrapVarargsArrayMethod(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) && treeInfo.isQualifierSafeToElide(appMethQual) => + treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems).transform(this) + + // See scala/bug#12201, should be rewrite as Primitive Array. + // Match Array + case Apply(appMeth @ Select(appMethQual, _), Apply(wrapRefArrayMeth, StripCast(ArrayValue(elemtpt, elems)) :: Nil) :: _ :: Nil) + if appMeth.symbol == ArrayModule_genericApply && treeInfo.isQualifierSafeToElide(appMethQual) && currentRun.runDefinitions.primitiveWrapArrayMethod.contains(wrapRefArrayMeth.symbol) => + typedWithPos(elemtpt.pos)( + ArrayValue(TypeTree(elemtpt.tpe), elems) + ).transform(this) + + case Apply(appMeth @ Select(appMethQual, _), elem :: (nil: RefTree) :: Nil) + if nil.symbol == NilModule && appMeth.symbol == ArrayModule_apply(elem.tpe.widen) && treeInfo.isExprSafeToInline(nil) && treeInfo.isQualifierSafeToElide(appMethQual) => + typedWithPos(elem.pos)( + ArrayValue(TypeTree(elem.tpe), elem :: Nil) + ).transform(this) + + // (a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) but only for reference types + case StripCast(Apply(appMeth @ Select(_, _), List(Apply(wrapArrayMeth, List(StripCast(rest @ ArrayValue(elemtpt, _))))))) + if wrapArrayMeth.symbol == currentRun.runDefinitions.wrapVarargsRefArrayMethod + && currentRun.runDefinitions.isSeqApply(appMeth) // includes List + && rest.elems.lengthIs < transformListApplyLimit + => + val consed = rest.elems.foldRight(gen.mkAttributedRef(NilModule): Tree)(New(ConsClass, _, _)) // Limiting extra stack frames consumed by generated code reducingTransformListApply(rest.elems.length) { - super.transform(localTyper.typedPos(tree.pos)(consed)) + super.transform(typedWithPos(tree.pos)(consed)) } + //methods on Double //new Predef.doubleToDouble(x).isNaN() -> java.lang.Double.isNaN(x) //new Predef.doubleToDouble(x).isInfinite() -> java.lang.Double.isInfinity(x) @@ -551,10 +590,11 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { // boolean2Boolean // case Apply(Select(Apply(boxing @ Select(qual, _), params), methodName), Nil) - if currentRun.runDefinitions.PreDef_primitives2Primitives.contains(boxing.symbol) && + if currentRun.runDefinitions.PreDef_primitives2Primitives.contains(boxing.symbol) && params.size == 1 && allPrimitiveMethodsToRewrite.contains(methodName) && - treeInfo.isExprSafeToInline(qual) => + treeInfo.isExprSafeToInline(qual) + => val newTree = if (doubleAndFloatRedirectMethods.contains(methodName)) { val cls = @@ -567,7 +607,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } else { gen.mkMethodCall(Select(params.head, javaNumberConversions(methodName)), Nil) } - super.transform(localTyper.typedPos(tree.pos)(newTree)) + super.transform(typedWithPos(tree.pos)(newTree)) //(x:Int).hashCode is transformed to scala.Int.box(x).hashCode() //(x:Int).toString is transformed to scala.Int.box(x).toString() @@ -577,7 +617,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { // scala.Int.box(x).toString() -> java.lang.Integer.toString(x) // similarly for all primitive types case Apply(Select(Apply(box @ Select(boxer, _), params), methodName), Nil) - if objectMethods.contains(methodName) && + if objectMethods.contains(methodName) && params.size == 1 && currentRun.runDefinitions.isBox(box.symbol) && treeInfo.isExprSafeToInline(boxer) @@ -585,29 +625,114 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { val target = boxedClass(boxer.symbol.companion) val targetMethod = target.companionModule.info.decl(methodName) val newTree = gen.mkMethodCall(targetMethod, params) - super.transform(localTyper.typedPos(tree.pos)(newTree)) + super.transform(typedWithPos(tree.pos)(newTree)) // Seq() ~> Nil (note: List() ~> Nil is rewritten in the Typer) - case Apply(Select(appQual, nme.apply), List(nil)) - if nil.symbol == NilModule && { - val sym = appQual.symbol - sym == ListModule || sym == SeqModule || sym == ISeqModule - } => + case Apply(Select(_, _), List(nil)) + if currentRun.runDefinitions.isNil(nil.symbol) && currentRun.runDefinitions.isSeqApply(fun) => gen.mkAttributedRef(NilModule) - // Seq.empty ~> Nil - case Apply(Select(appQual, nme.empty), Nil) - if { - val sym = appQual.symbol - sym == SeqModule || sym == ISeqModule - } => - gen.mkAttributedRef(NilModule) + /* This transformation should identify Scala symbol invocations in the tree and replace them + * with references to a statically cached instance. + * + * The reasoning behind this transformation is the following. Symbols get interned - they are stored + * in a global map which is protected with a lock. The reason for this is making equality checks + * quicker. But calling Symbol.apply, although it does return a unique symbol, accesses a locked object, + * making symbol access slow. To solve this, the unique symbol from the global symbol map in Symbol + * is accessed only once during class loading, and after that, the unique symbol is in the statically + * initialized call site returned by invokedynamic. Hence, it is cheap to both reach the unique symbol + * and do equality checks on it. + * + * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler + * have little in common. + */ + case Apply(Select(qual, _), (arg @ Literal(Constant(_: String))) :: Nil) + if fun.symbol == Symbol_apply && !currentClass.isTrait && treeInfo.isQualifierSafeToElide(qual) => + + treeCopy.ApplyDynamic(tree, atPos(fun.pos)(Ident(SymbolLiteral_dummy).setType(SymbolLiteral_dummy.info)), LIT(SymbolLiteral_bootstrap) :: arg :: Nil).transform(this) + + case _ => + super.transform(tree) + } + + override def transform(tree: Tree): Tree = tree match { + case _: ClassDef if genBCode.codeGen.CodeGenImpl.isJavaEntryPoint(tree.symbol, currentUnit, settings.mainClass.valueSetByUser.map(_.toString)) => + // collecting symbols for entry points here (as opposed to GenBCode where they are used) + // has the advantage of saving an additional pass over all ClassDefs. + entryPoints += tree.symbol + tree.transform(this) + + /* Transforms dynamic calls (i.e. calls to methods that are undefined + * in the erased type space) to -- dynamically -- unsafe calls using + * reflection. This is used for structural sub-typing of refinement + * types, but may be used for other dynamic calls in the future. + * For 'a.f(b)' it will generate something like: + * 'a.getClass(). + * ' getMethod("f", Array(classOf[b.type])). + * ' invoke(a, Array(b)) + * plus all the necessary casting/boxing/etc. machinery required + * for type-compatibility (see fixResult). + * + * USAGE CONTRACT: + * There are a number of assumptions made on the way a dynamic apply + * is used. Assumptions relative to type are handled by the erasure + * phase. + * - The applied arguments are compatible with AnyRef, which means + * that an argument tree typed as AnyVal has already been extended + * with the necessary boxing calls. This implies that passed + * arguments might not be strictly compatible with the method's + * parameter types (a boxed integer while int is expected). + * - The expected return type is an AnyRef, even when the method's + * return type is an AnyVal. This means that the tree containing the + * call has already been extended with the necessary unboxing calls + * (or is happy with the boxed type). + * - The type-checker has prevented dynamic applies on methods which + * parameter's erased types are not statically known at the call site. + * This is necessary to allow dispatching the call to the correct + * method (dispatching on parameters is static in Scala). In practice, + * this limitation only arises when the called method is defined as a + * refinement, where the refinement defines a parameter based on a + * type variable. */ + + case tree: ApplyDynamic if tree.symbol.owner.isRefinementClass => + transformApplyDynamic(tree) + + /* Some cleanup transformations add members to templates (classes, traits, etc). + * When inside a template (i.e. the body of one of its members), two maps + * (newStaticMembers and newStaticInits) are available in the tree transformer. Any mapping from + * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newStaticMembers once the + * transformation of the template is finished will be added as a member to the + * template. Any mapping from a symbol to a tree that is in newStaticInits, will be added + * as a statement of the form "symbol = tree" to the beginning of the default + * constructor. */ + case Template(parents, self, body) => + localTyper = typer.atOwner(tree, currentClass) + transformTemplate(tree) + + case Literal(c) if c.tag == ClazzTag => + val tpe = c.typeValue + typedWithPos(tree.pos) { + if (isPrimitiveValueClass(tpe.typeSymbol)) { + if (tpe.typeSymbol == UnitClass) + REF(BoxedUnit_TYPE) + else + Select(REF(boxedModule(tpe.typeSymbol)), nme.TYPE_) + } + + else tree + } + + case t @ Apply(fun, args) => + transformApply(t, fun, args) + + case switch: Match => + super.transform(transformSwitch(switch)) case _ => super.transform(tree) } - } // CleanUpTransformer + } // end CleanUpTransformer private val objectMethods = Map[Name, TermName]( diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index f13dc73c19e3..cde7623ec881 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,8 @@ package scala.tools.nsc package transform -import scala.collection.mutable +import scala.annotation._ +import scala.collection.mutable, mutable.ListBuffer import scala.reflect.internal.util.ListOfNil import scala.tools.nsc.Reporting.WarningCategory import symtab.Flags._ @@ -28,8 +29,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme /** the following two members override abstract members in Transform */ val phaseName: String = "constructors" - protected def newTransformer(unit: CompilationUnit): Transformer = - new ConstructorTransformer(unit) + protected def newTransformer(unit: CompilationUnit): AstTransformer = new ConstructorTransformer(unit) private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]() private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]() @@ -39,7 +39,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class, * for which a reference to the member precedes its definition. */ - private def checkUninitializedReads(cd: ClassDef) { + private def checkUninitializedReads(cd: ClassDef): Unit = { val stats = cd.impl.body val clazz = cd.symbol @@ -81,10 +81,10 @@ abstract class Constructors extends Statics with Transform with TypingTransforme override def transform(tree: Tree): Tree = { tree match { - case cd @ ClassDef(mods0, name0, tparams0, impl0) if !isPrimitiveValueClass(cd.symbol) && cd.symbol.primaryConstructor != NoSymbol => - if(cd.symbol eq AnyValClass) { + case cd @ ClassDef(mods0, name0, tparams0, impl0) + if !isPrimitiveValueClass(cd.symbol) && cd.symbol.primaryConstructor != NoSymbol => + if (cd.symbol eq AnyValClass) cd - } else { checkUninitializedReads(cd) val tplTransformer = new TemplateTransformer(unit, impl0) @@ -154,11 +154,11 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * Finally, the whole affair of eliding is avoided for DelayedInit subclasses, * given that for them usually nothing gets elided anyway. * That's a consequence from re-locating the post-super-calls statements from their original location - * (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, as required by DelayedInit. - * + * (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, + * as required by DelayedInit. */ private trait OmittablesHelper { - def computeOmittableAccessors(clazz: Symbol, defs: List[Tree], auxConstructors: List[Tree], constructor: List[Tree]): Set[Symbol] = { + def computeOmittableAccessors(clazz: Symbol, defs: List[Tree], auxConstructors: List[Tree], @unused constructor: List[Tree]): Set[Symbol] = { val decls = clazz.info.decls.toSet val isEffectivelyFinal = clazz.isEffectivelyFinal @@ -177,7 +177,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme val omittables = mutable.Set.empty[Symbol] ++ (decls filter (sym => omittableParamAcc(sym) || omittableOuterAcc(sym))) // the closure only captures isEffectivelyFinal // no point traversing further once omittables is empty, all candidates ruled out already. - object detectUsages extends Traverser { + object detectUsages extends InternalTraverser { lazy val bodyOfOuterAccessor = defs.collect{ case dd: DefDef if omittableOuterAcc(dd.symbol) => dd.symbol -> dd.rhs }.toMap override def traverse(tree: Tree): Unit = @@ -187,8 +187,8 @@ abstract class Constructors extends Statics with Transform with TypingTransforme case _: DefDef if (sym.owner eq clazz) && omittableOuterAcc(sym) => // don't mark as "needed" the field supporting this outer-accessor (not just yet) case _: Select if omittables(sym) => omittables -= sym // mark usage bodyOfOuterAccessor get sym foreach traverse // recurse to mark as needed the field supporting the outer-accessor-method - super.traverse(tree) - case _ => super.traverse(tree) + tree.traverse(this) + case _ => tree.traverse(this) } } } @@ -250,7 +250,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * * @return the DefDef for (c) above * - * */ + */ private trait DelayedInitHelper extends ConstructorTransformerBase { private def delayedEndpointDef(stats: List[Tree]): DefDef = { val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$") @@ -272,16 +272,10 @@ abstract class Constructors extends Statics with Transform with TypingTransforme closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass) - val outerField: TermSymbol = ( - closureClass - newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR) - setInfoAndEnter clazz.tpe - ) - val applyMethod: MethodSymbol = ( - closureClass - newMethod(nme.apply, impl.pos, FINAL) - setInfoAndEnter MethodType(Nil, ObjectTpe) - ) + val outerField: TermSymbol = + closureClass.newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR) setInfoAndEnter clazz.tpe + val applyMethod: MethodSymbol = + closureClass.newMethod(nme.apply, impl.pos, FINAL) setInfoAndEnter MethodType(Nil, ObjectTpe) val outerFieldDef = ValDef(outerField) val closureClassTyper = localTyper.atOwner(closureClass) val applyMethodTyper = closureClassTyper.atOwner(applyMethod) @@ -312,7 +306,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme } /** For a DelayedInit subclass, wrap remainingConstrStats into a DelayedInit closure. */ - def delayedInitDefsAndConstrStats(defs: List[Tree], remainingConstrStats: List[Tree]): (List[Tree], List[Tree]) = { + def delayedInitDefsAndConstrStats(@unused defs: List[Tree], remainingConstrStats: List[Tree]): (List[Tree], List[Tree]) = { val delayedHook = delayedEndpointDef(remainingConstrStats) val delayedHookSym = delayedHook.symbol.asInstanceOf[MethodSymbol] @@ -342,7 +336,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * `specializedStats` are replaced by the specialized assignment. */ private def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = { - val specBuf = new mutable.ListBuffer[Tree] + val specBuf = ListBuffer.empty[Tree] specBuf ++= specializedStats def specializedAssignFor(sym: Symbol): Option[Tree] = @@ -359,9 +353,9 @@ abstract class Constructors extends Statics with Transform with TypingTransforme */ def rewriteArrayUpdate(tree: Tree): Tree = { val arrayUpdateMethod = currentRun.runDefinitions.arrayUpdateMethod - val adapter = new Transformer { + val adapter = new AstTransformer { override def transform(t: Tree): Tree = t match { - case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod => + case Apply(fun @ Select(_, _), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod => localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v))) case _ => super.transform(t) } @@ -369,39 +363,41 @@ abstract class Constructors extends Statics with Transform with TypingTransforme adapter.transform(tree) } + def rewriteUnspecialized(assignee: Symbol, stat: Tree): Tree = { + assert(ctorParams(genericClazz).length == primaryConstrParams.length, "Bad param len") + // this is just to make private fields public + (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, addressFields = true))(stat) + // also make assigned fields mutable so they don't end up final in bytecode + // and mark the specialized class constructor for a release fence addition + if (assignee.isField) + assignee.setFlag(MUTABLE) + + val rewritten = rewriteArrayUpdate(stat) + // statements coming from the original class need retyping in the current context + debuglog("retyping " + rewritten) + val duplicator = new specializeTypes.Duplicator(Map.empty) + val context = localTyper.context1.asInstanceOf[duplicator.Context] + duplicator.retyped(context, rewritten, genericClazz, clazz, Map.empty) + } + log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n")) - for (s <- originalStats; stat = s.duplicate) yield { + for (stat <- originalStats) yield { log("merge: looking at " + stat) - val stat1 = stat match { - case Assign(sel @ Select(This(_), field), _) => - specializedAssignFor(sel.symbol).getOrElse(stat) - case _ => stat - } - if (stat1 ne stat) { - log("replaced " + stat + " with " + stat1) - specBuf -= stat1 + stat.duplicate match { + case assign @ Assign(select @ Select(This(_), _), _) => + val assignee = select.symbol + specializedAssignFor(assignee) match { + case Some(specialized) => + log("replaced " + assign + " with " + specialized) + specBuf -= specialized + specialized + case None => + rewriteUnspecialized(assignee, assign) + } + case other => + rewriteUnspecialized(NoSymbol, other) } - - if (stat1 eq stat) { - assert(ctorParams(genericClazz).length == primaryConstrParams.length) - // this is just to make private fields public - (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1) - - val stat2 = rewriteArrayUpdate(stat1) - // statements coming from the original class need retyping in the current context - debuglog("retyping " + stat2) - - val d = new specializeTypes.Duplicator(Map[Symbol, Type]()) - d.retyped(localTyper.context1.asInstanceOf[d.Context], - stat2, - genericClazz, - clazz, - Map.empty) - } else - stat1 } -// if (specBuf.nonEmpty) -// println("residual specialized constructor statements: " + specBuf) } /* Add an 'if' around the statements coming after the super constructor. This @@ -462,19 +458,20 @@ abstract class Constructors extends Statics with Transform with TypingTransforme { protected def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree) - val clazz = impl.symbol.owner // the transformed class - - val isDelayedInitSubclass = clazz isSubClass DelayedInitClass - - private val stats = impl.body // the transformed template body + override val clazz = impl.symbol.owner // the transformed class + private val stats = impl.body // the transformed template body + private val isDelayedInitSubclass = clazz isSubClass DelayedInitClass // find and dissect primary constructor - private val (primaryConstr, _primaryConstrParams, primaryConstrBody) = stats collectFirst { - case dd@DefDef(_, _, _, vps :: Nil, _, rhs: Block) if dd.symbol.isPrimaryConstructor => (dd, vps map (_.symbol), rhs) - } getOrElse { - abort("no constructor in template: impl = " + impl) - } - + private val (primaryConstr, _primaryConstrParams, primaryConstrBody) = + stats.collectFirst { + case dd @ DefDef(_, _, _, vps :: Nil, _, rhs: Block) + if dd.symbol.isPrimaryConstructor => + (dd, vps.map(_.symbol), rhs) + } + .getOrElse { + abort("no constructor in template: impl = " + impl) + } def primaryConstrParams = _primaryConstrParams def usesSpecializedField = intoConstructor.usesSpecializedField @@ -482,9 +479,9 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // The constructor parameter corresponding to an accessor def parameter(acc: Symbol): Symbol = { //works around the edge case where unexpandedName over-unexpands shenanigans like literal $$ or `$#` - def unexpanded = parameterNamed(acc.unexpandedName.getterName) + val unexpanded = parameterNamed(acc.unexpandedName.getterName) def expanded = parameterNamed(acc.getterName) - (if (unexpanded.isRight) unexpanded else expanded).swap.map(abort).merge + unexpanded.orElse(expanded).swap.map(abort).merge } // The constructor parameter with given getter name. This means the parameter name @@ -497,16 +494,15 @@ abstract class Constructors extends Statics with Transform with TypingTransforme } // A transformer for expressions that go into the constructor - object intoConstructor extends Transformer { - /* - * `usesSpecializedField` makes a difference in deciding whether constructor-statements - * should be guarded in a `guardSpecializedFieldInit` class, ie in a class that's the generic super-class of - * one or more specialized sub-classes. - * - * Given that `usesSpecializedField` isn't read for any other purpose than the one described above, - * we skip setting `usesSpecializedField` in case the current class isn't `guardSpecializedFieldInit` to start with. - * That way, trips to a map in `specializeTypes` are saved. - */ + object intoConstructor extends AstTransformer { + /* `usesSpecializedField` makes a difference in deciding whether constructor-statements + * should be guarded in a `guardSpecializedFieldInit` class, ie in a class that's the generic super-class of + * one or more specialized sub-classes. + * + * Given that `usesSpecializedField` isn't read for any other purpose than the one described above, + * we skip setting `usesSpecializedField` in case the current class isn't `guardSpecializedFieldInit` + * to start with. That way, trips to a map in `specializeTypes` are saved. + */ var usesSpecializedField: Boolean = false private def isParamRef(sym: Symbol) = sym.isParamAccessor && sym.owner == clazz @@ -519,8 +515,9 @@ abstract class Constructors extends Statics with Transform with TypingTransforme !sym.isVariable ) - /* - * whether `sym` denotes a param-accessor (ie in a class a PARAMACCESSOR field, or in a trait a method with same flag) + /* whether `sym` denotes a param-accessor + * (ie in a class a PARAMACCESSOR field, or in a trait a method with same flag) + * * that fulfills all of: * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and * (b) isn't subject to specialization. We might be processing statements for: @@ -535,7 +532,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // references to parameter accessor methods of own class become references to parameters // outer accessors become references to $outer parameter // println(s"to param ref in $clazz for ${tree.symbol} ${tree.symbol.debugFlagString} / ${tree.symbol.outerSource} / ${canBeSupplanted(tree.symbol)}") - if (clazz.isTrait && !(tree.symbol hasAllFlags (ACCESSOR | PARAMACCESSOR))) + if (clazz.isTrait && !tree.symbol.hasAllFlags(ACCESSOR | PARAMACCESSOR)) super.transform(tree) else if (canBeSupplanted(tree.symbol)) gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos @@ -598,7 +595,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * - `classInitStats`: statements that go into the class initializer */ class Triage { - private val defBuf, auxConstructorBuf, constrPrefixBuf, constrStatBuf, classInitStatBuf = new mutable.ListBuffer[Tree] + private val defBuf, auxConstructorBuf, constrPrefixBuf, constrStatBuf, classInitStatBuf = ListBuffer.empty[Tree] triage() @@ -610,7 +607,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme private def triage() = { // Constant typed vals are not memoized. - def memoizeValue(sym: Symbol) = !sym.info.resultType.isInstanceOf[ConstantType] + def memoizeValue(sym: Symbol) = enteringErasure(!sym.info.resultType.isInstanceOf[FoldableConstantType]) // The early initialized field definitions of the class (these are the class members) val presupers = treeInfo.preSuperFields(stats) @@ -621,8 +618,15 @@ abstract class Constructors extends Statics with Transform with TypingTransforme stat match { case ValDef(mods, name, _, _) if mods.hasFlag(PRESUPER) => // TODO trait presupers // stat is the constructor-local definition of the field value - val fields = presupers filter (_.getterName == name) - assert(fields.length == 1, s"expected exactly one field by name $name in $presupers of $clazz's early initializers") + val fields = presupers.filter { v => + val nm = + if (v.symbol.isPrivateLocal && v.symbol.hasFlag(EXPANDEDNAME)) + v.symbol.unexpandedName.dropLocal + else + v.getterName + nm == name + } + assert(fields.length == 1, s"expected exactly one field by name $name in $presupers of $clazz's early initializers but saw $fields") val to = fields.head.symbol if (memoizeValue(to)) constrStatBuf += mkAssign(to, Ident(stat.symbol)) @@ -673,7 +677,8 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // - the constructor, before the super call (early initialized or a parameter accessor), // - the constructor, after the super call (regular val). case vd: ValDef => - if (vd.rhs eq EmptyTree) { defBuf += vd } + if (vd.rhs eq EmptyTree) + defBuf += vd else { val emitField = memoizeValue(statSym) @@ -685,14 +690,22 @@ abstract class Constructors extends Statics with Transform with TypingTransforme case dd: DefDef => // either move the RHS to ctor (for getter of stored field) or just drop it (for corresponding setter) - def shouldMoveRHS = - clazz.isTrait && statSym.isAccessor && !statSym.isLazy && !statSym.isSpecialized && (statSym.isSetter || memoizeValue(statSym)) - - if ((dd.rhs eq EmptyTree) || !shouldMoveRHS) { defBuf += dd } - else { - if (statSym.isGetter) moveEffectToCtor(dd.mods, dd.rhs, statSym.asTerm.referenced orElse statSym.setterIn(clazz)) - defBuf += deriveDefDef(stat)(_ => EmptyTree) - } + def shouldMoveRHS = ( + (dd.rhs ne EmptyTree) + && clazz.isTrait + && statSym.isAccessor + && !statSym.isLazy + && !statSym.isSpecialized + && (statSym.isSetter || memoizeValue(statSym)) + ) + val toMove = + if (shouldMoveRHS) { + if (statSym.isGetter) + moveEffectToCtor(dd.mods, dd.rhs, statSym.asTerm.referenced.orElse(statSym.setterIn(clazz))) + deriveDefDef(stat)(_ => EmptyTree) + } + else dd + defBuf += toMove // all other statements go into the constructor case _ => @@ -721,19 +734,22 @@ abstract class Constructors extends Statics with Transform with TypingTransforme else clazz.constrParamAccessors // Initialize all parameters fields that must be kept. - val paramInits = paramAccessors filterNot omittableSym map { acc => + val paramInits = paramAccessors.filterNot(omittableSym).map { acc => // Check for conflicting field mixed in for a val/var defined in a parent trait (neg/t1960.scala). // Since the fields phase has already mixed in fields, we can just look for // an existing decl with the local variant of our paramaccessor's name. // - // TODO: mangle the constructor parameter name (it can only be used internally), though we probably first need more robust name mangling + // TODO: mangle the constructor parameter name (it can only be used internally), + // though we probably first need more robust name mangling // sometimes acc is a field with a local name (when it's a val/var constructor param) --> exclude the `acc` itself when looking for conflicting decl // sometimes it's not (just a constructor param) --> any conflicting decl is a problem val conflict = clazz.info.decl(acc.name.localName).filter(sym => sym ne acc) if (conflict ne NoSymbol) { val orig = exitingTyper(clazz.info.nonPrivateMember(acc.name).filter(_ hasFlag ACCESSOR)) - reporter.error(acc.pos, s"parameter '${acc.name}' requires field but conflicts with ${(orig orElse conflict).fullLocationString}") + reporter.error(acc.pos, s"parameter '${acc.name}' requires field but conflicts with ${ + orig.orElse(conflict).fullLocationString + }") } val accSetter = @@ -743,18 +759,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme copyParam(accSetter, parameter(acc)) } - // Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) - def splitAtSuper(stats: List[Tree]) = { - def isConstr(tree: Tree): Boolean = tree match { - case Block(_, expr) => isConstr(expr) // scala/bug#6481 account for named argument blocks - case _ => (tree.symbol ne null) && tree.symbol.isConstructor - } - val (pre, rest0) = stats span (!isConstr(_)) - val (supercalls, rest) = rest0 span (isConstr(_)) - (pre ::: supercalls, rest) - } - - val (uptoSuperStats, remainingConstrStats) = splitAtSuper(constructorStats) + val (uptoSuperStats, remainingConstrStats) = treeInfo.splitAtSuper(constructorStats, classOnly = false) /* TODO: XXX This condition (`isDelayedInitSubclass && remainingConstrStats.nonEmpty`) is not correct: * remainingConstrStats.nonEmpty excludes too much, @@ -764,25 +769,38 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * See test case files/run/bug4680.scala, the output of which is wrong in many * particulars. */ + var needFenceForDelayedInit = false val (delayedHookDefs, remainingConstrStatsDelayedInit) = if (isDelayedInitSubclass && remainingConstrStats.nonEmpty) { remainingConstrStats foreach { - case Assign(lhs, _ ) => lhs.symbol.setFlag(MUTABLE) // delayed init fields cannot be final, scala/bug#11412 + case Assign(lhs, _ ) => + lhs.symbol.setFlag(MUTABLE) // delayed init fields cannot be final, scala/bug#11412 + needFenceForDelayedInit = true case _ => } delayedInitDefsAndConstrStats(defs, remainingConstrStats) } else (Nil, remainingConstrStats) + val specializedStats = guardSpecializedInitializer(remainingConstrStatsDelayedInit) + val fence = if (needFenceForDelayedInit || clazz.primaryConstructor.hasAttachment[ConstructorNeedsFence.type]) { + val tree = localTyper.typedPos(clazz.primaryConstructor.pos)(gen.mkMethodCall(RuntimeStaticsModule, nme.releaseFence, Nil)) + tree :: Nil + } else Nil + // Assemble final constructor - val primaryConstructor = deriveDefDef(primaryConstr)(_ => { + val primaryConstructor = deriveDefDef(primaryConstr) { _ => treeCopy.Block( primaryConstrBody, - paramInits ::: constructorPrefix ::: uptoSuperStats ::: guardSpecializedInitializer(remainingConstrStatsDelayedInit), - primaryConstrBody.expr) - }) + paramInits ::: constructorPrefix ::: uptoSuperStats ::: specializedStats ::: fence, + primaryConstrBody.expr + ) + } - if ((exitingPickler(clazz.isAnonymousClass) || clazz.originalOwner.isTerm) && omittableAccessor.exists(_.isOuterField) && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true; case _ => false})) + if ((exitingPickler(clazz.isAnonymousClass) || clazz.originalOwner.isTerm) + && omittableAccessor.exists(_.isOuterField) + && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true case _ => false }) + ) primaryConstructor.symbol.updateAttachment(OuterArgCanBeElided) val constructors = primaryConstructor :: auxConstructors @@ -794,10 +812,10 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // Eliminate all field/accessor definitions that can be dropped from template // We never eliminate delayed hooks or the constructors, so, only filter `defs`. - val prunedStats = (defs filterNot omittableStat) ::: delayedHookDefs ::: constructors + val prunedStats = defs.filterNot(omittableStat) ::: delayedHookDefs ::: constructors val statsWithInitChecks = - if (settings.checkInit) { + if (settings.checkInit.value) { val addChecks = new SynthInitCheckedAccessorsIn(currentOwner) prunedStats mapConserve { case dd: DefDef if addChecks.needsWrapping(dd) => deriveDefDef(dd)(addChecks.wrapRhsWithInitChecks(dd.symbol)) diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 8c0c4e6854dc..59daa51171ae 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,8 @@ package transform import symtab._ import Flags._ -import scala.collection._ +import scala.annotation._ +import scala.collection.mutable /** * This transformer is responsible for preparing Function nodes for runtime, @@ -40,12 +41,11 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre /** the following two members override abstract members in Transform */ val phaseName: String = "delambdafy" - final case class LambdaMetaFactoryCapable(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, bridges: List[Symbol], isSerializable: Boolean, addScalaSerializableMarker: Boolean) + final case class LambdaMetaFactoryCapable(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, bridges: List[Symbol], isSerializable: Boolean) - /** - * Get the symbol of the target lifted lambda body method from a function. I.e. if - * the function is {args => anonfun(args)} then this method returns anonfun's symbol - */ + /** Get the symbol of the target lifted lambda body method from a function. I.e. if + * the function is {args => anonfun(args)} then this method returns anonfun's symbol + */ private def targetMethod(fun: Function): Symbol = fun match { case Function(_, Apply(target, _)) => target.symbol case _ => @@ -54,7 +54,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = { - if (settings.Ydelambdafy.value != "inline") new Phase(prev) + if (settings.Ydelambdafy.value == "method") new Phase(prev) else new SkipPhase(prev) } @@ -62,13 +62,13 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre def apply(unit: global.CompilationUnit): Unit = () } - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new DelambdafyTransformer(unit) class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { // we need to know which methods refer to the 'this' reference so that we can determine which lambdas need access to it // TODO: this looks expensive, so I made it a lazy val. Can we make it more pay-as-you-go / optimize for common shapes? - private[this] lazy val methodReferencesThis: Set[Symbol] = + private[this] lazy val methodReferencesThis: collection.Set[Symbol] = (new ThisReferringMethodsTraverser).methodReferencesThisIn(unit.body) private def mkLambdaMetaFactoryCall(fun: Function, target: Symbol, functionalInterface: Symbol, samUserDefined: Symbol, userSamCls: Symbol, isSpecialized: Boolean): Tree = { @@ -110,8 +110,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // no need for adaptation when the implemented sam is of a specialized built-in function type val lambdaTarget = if (isSpecialized) target else createBoxingBridgeMethodIfNeeded(fun, target, functionalInterface, sam) - val isSerializable = samUserDefined == NoSymbol || functionalInterface.isNonBottomSubClass(definitions.JavaSerializableClass) - val addScalaSerializableMarker = samUserDefined == NoSymbol + val isSerializable = samUserDefined == NoSymbol || functionalInterface.isNonBottomSubClass(definitions.SerializableClass) val samBridges = logResultIf[List[Symbol]](s"will add SAM bridges for $fun", _.nonEmpty) { userSamCls.fold[List[Symbol]](Nil) { @@ -129,13 +128,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // see https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html // instantiatedMethodType is derived from lambdaTarget's signature // samMethodType is derived from samOf(functionalInterface)'s signature - apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, fun.vparams.length, functionalInterface, sam, samBridges, isSerializable, addScalaSerializableMarker)) - - if (lambdaTarget != target) { - // A boxing bridge is needed, so the lambda isn't just a method reference :( - // Drop the annotation added in "pretransform" so that the backend doesn't drop it! - target.removeAttachment[JustMethodReference] - } + apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, fun.vparams.length, functionalInterface, sam, samBridges, isSerializable)) apply } @@ -155,7 +148,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } // determine which lambda target to use with java's LMF -- create a new one if scala-specific boxing is required - def createBoxingBridgeMethodIfNeeded(fun: Function, target: Symbol, functionalInterface: Symbol, sam: Symbol): Symbol = { + def createBoxingBridgeMethodIfNeeded(fun: Function, target: Symbol, @unused functionalInterface: Symbol, sam: Symbol): Symbol = { val oldClass = fun.symbol.enclClass val pos = fun.pos @@ -166,26 +159,28 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val samParamTypes = exitingErasure(sam.info.paramTypes) val samResultType = exitingErasure(sam.info.resultType) - /** How to satisfy the linking invariants of https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html - * - * Given samMethodType: (U1..Un)Ru and function type T1,..., Tn => Rt (the target method created by uncurry) - * - * Do we need a bridge, or can we use the original lambda target for implMethod: ( A1..An)Ra - * (We can ignore capture here.) - * - * If, for i=1..N: - * Ai =:= Ui || (Ai <:< Ui <:< AnyRef) - * Ru =:= void || (Ra =:= Ru || (Ra <:< AnyRef, Ru <:< AnyRef)) - * - * We can use the target method as-is -- if not, we create a bridging one that uses the types closest - * to the target method that still meet the above requirements. - */ + /* How to satisfy the linking invariants of https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html + * + * Given samMethodType: (U1..Un)Ru and function type T1,..., Tn => Rt (the target method created by uncurry) + * + * Do we need a bridge, or can we use the original lambda target for implMethod: ( A1..An)Ra + * (We can ignore capture here.) + * + * If, for i=1..N: + * Ai =:= Ui || (Ai <:< Ui <:< AnyRef) + * Ru =:= void || (Ra =:= Ru || (Ra <:< AnyRef, Ru <:< AnyRef)) + * + * We can use the target method as-is -- if not, we create a bridging one that uses the types closest + * to the target method that still meet the above requirements. + */ val resTpOk = ( samResultType =:= UnitTpe || functionResultType =:= samResultType - || (isReferenceType(samResultType) && isReferenceType(functionResultType))) // yes, this is what the spec says -- no further correspondence required - if (resTpOk && (samParamTypes corresponds functionParamTypes){ (samParamTp, funParamTp) => - funParamTp =:= samParamTp || (isReferenceType(funParamTp) && isReferenceType(samParamTp) && funParamTp <:< samParamTp) }) target + || (isReferenceType(samResultType) && isReferenceType(functionResultType))) // per spec, no further correspondence required + def paramTpsOk = samParamTypes.corresponds(functionParamTypes)((samParamTp, funParamTp) => + funParamTp =:= samParamTp || + (isReferenceType(funParamTp) && isReferenceType(samParamTp) && funParamTp <:< samParamTp)) + if (resTpOk && paramTpsOk) target else { // We have to construct a new lambda target that bridges to the one created by uncurry. // The bridge must satisfy the above invariants, while also minimizing adaptation on our end. @@ -200,7 +195,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // which means the function's parameter -- even if it expects a value class -- will need to be // boxed on the generic call to the sam method. - val bridgeParamTypes = map2(samParamTypes, functionParamTypes){ (samParamTp, funParamTp) => + val bridgeParamTypes = map2(samParamTypes, functionParamTypes) { (samParamTp, funParamTp) => if (isReferenceType(samParamTp) && funParamTp <:< samParamTp) funParamTp else postErasure.elimErasedValueType(samParamTp) } @@ -249,9 +244,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre gen.mkMethodCall(Select(gen.mkAttributedThis(oldClass), target), capturedArgRefs ::: functionArgRefs) } + val forwarderResultType = + if (samResultType.isInstanceOf[ErasedValueType] && functionResultType.isInstanceOf[ErasedValueType]) bridgeResultType + else functionResultType val bridge = postErasure.newTransformer(unit).transform(DefDef(methSym, List(bridgeParams.map(ValDef(_))), - adaptToType(forwarderCall setType functionResultType, bridgeResultType))).asInstanceOf[DefDef] + adaptToType(forwarderCall.setType(forwarderResultType), bridgeResultType))).asInstanceOf[DefDef] boxingBridgeMethods += bridge bridge.symbol @@ -261,7 +259,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre private def transformFunction(originalFunction: Function): Tree = { val target = targetMethod(originalFunction) - assert(target.hasFlag(Flags.STATIC)) + assert(target.hasFlag(Flags.STATIC), "static") target.setFlag(notPRIVATE) val funSym = originalFunction.tpe.typeSymbolDirect @@ -300,35 +298,17 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case Template(_, _, _) => def pretransform(tree: Tree): Tree = tree match { case dd: DefDef if dd.symbol.isDelambdafyTarget => - val ddef = if (!dd.symbol.hasFlag(STATIC) && methodReferencesThis(dd.symbol)) { + if (!dd.symbol.hasFlag(STATIC) && methodReferencesThis(dd.symbol)) { gen.mkStatic(dd, dd.symbol.name, sym => sym) } else { dd.symbol.setFlag(STATIC) dd } - if (settings.Ydelambdafy.value == "method-ref") { - // e.g. `def $anonfun$f$1(x$1: Foo): Unit = x$1.bar()` - // x$1.bar() is the Select, with x$1 as both the arg and the select.qualifier - def justMethRef(arg: ValDef, sel: Select) = ( - sel.symbol.isMethod // must be a method - && sel.qualifier.symbol == arg.symbol // the method must be on the first arg - && !sel.symbol.owner.isPrimitiveValueClass // can't involve primitives (boxing/specialisation) - && sel.symbol.owner != ArrayClass // ... or arrays - ) - ddef match { - case DefDef(_, _, _, List(arg :: Nil), _, Apply(sel @ Select(_: Ident, _), Nil)) if justMethRef(arg, sel) => - // Store the lambdaTarget while we still have access to the def's body (tree) - // (and now that we're post erasure, so we don't store a reference to un-erased Any) - ddef.symbol.updateAttachment(JustMethodReference(sel.symbol)) - case _ => - } - } - ddef case t => t } try { // during this call boxingBridgeMethods will be populated from the Function case - val Template(parents, self, body) = super.transform(deriveTemplate(tree)(_.mapConserve(pretransform))) + val Template(parents, self, body) = super.transform(deriveTemplate(tree)(_.mapConserve(pretransform))): @unchecked Template(parents, self, body ++ boxingBridgeMethods) } finally boxingBridgeMethods.clear() case dd: DefDef if dd.symbol.isLiftedMethod && !dd.symbol.isDelambdafyTarget => @@ -353,7 +333,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // A traverser that finds symbols used but not defined in the given Tree // TODO freeVarTraverser in LambdaLift does a very similar task. With some // analysis this could probably be unified with it - class FreeVarTraverser extends Traverser { + class FreeVarTraverser extends InternalTraverser { val freeVars = mutable.LinkedHashSet[Symbol]() val declared = mutable.LinkedHashSet[Symbol]() @@ -370,7 +350,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre if ((sym != NoSymbol) && sym.isLocalToBlock && sym.isTerm && !sym.isMethod && !declared.contains(sym)) freeVars += sym case _ => } - super.traverse(tree) + tree.traverse(this) } } @@ -383,14 +363,14 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } // finds all methods that reference 'this' - class ThisReferringMethodsTraverser extends Traverser { + class ThisReferringMethodsTraverser extends InternalTraverser { // the set of methods that refer to this - private val thisReferringMethods = mutable.Set[Symbol]() + private val thisReferringMethods = mutable.Set.empty[Symbol] // the set of lifted lambda body methods that each method refers to - private val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set()) + private val liftedMethodReferences = mutable.Map.empty[Symbol, mutable.Set[Symbol]] - def methodReferencesThisIn(tree: Tree) = { + def methodReferencesThisIn(tree: Tree): collection.Set[Symbol] = { traverse(tree) liftedMethodReferences.keys foreach refersToThis @@ -406,7 +386,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre else { seen += symbol (thisReferringMethods contains symbol) || - (liftedMethodReferences(symbol) exists loop) && { + (liftedMethodReferences.contains(symbol) && liftedMethodReferences(symbol).exists(loop)) && { // add it early to memoize debuglog(s"$symbol indirectly refers to 'this'") thisReferringMethods += symbol @@ -426,18 +406,18 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // we don't expect defs within defs. At this phase trees should be very flat if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.") currentMethod = tree.symbol - super.traverse(tree) + tree.traverse(this) currentMethod = NoSymbol case fun@Function(_, _) => // we don't drill into functions because at the beginning of this phase they will always refer to 'this'. // They'll be of the form {(args...) => this.anonfun(args...)} // but we do need to make note of the lifted body method in case it refers to 'this' - if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun) + if (currentMethod.exists) liftedMethodReferences.getOrElseUpdate(currentMethod, mutable.Set()) += targetMethod(fun) case Apply(sel @ Select(This(_), _), args) if sel.symbol.isLiftedMethod => - if (currentMethod.exists) liftedMethodReferences(currentMethod) += sel.symbol + if (currentMethod.exists) liftedMethodReferences.getOrElseUpdate(currentMethod, mutable.Set()) += sel.symbol super.traverseTrees(args) case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) => - super.traverse(fun) + fun.traverse(this) super.traverseTrees(rest) case This(_) => if (currentMethod.exists && tree.symbol == currentMethod.enclClass) { @@ -447,7 +427,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case _: ClassDef if !tree.symbol.isTopLevel => case _: DefDef => case _ => - super.traverse(tree) + tree.traverse(this) } } } diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index feebb50ab09e..77d568b2e6c2 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,9 @@ package scala.tools.nsc package transform +import scala.annotation._ import scala.reflect.internal.ClassfileConstants._ -import scala.collection.{ mutable, immutable } +import scala.collection.{immutable, mutable} import symtab._ import Flags._ import scala.reflect.internal.Mode._ @@ -32,67 +33,74 @@ abstract class Erasure extends InfoTransform val phaseName: String = "erasure" - val requiredDirectInterfaces = perRunCaches.newAnyRefMap[Symbol, mutable.Set[Symbol]]() + val requiredDirectInterfaces = perRunCaches.newMap[Symbol, mutable.Set[Symbol]]() - def newTransformer(unit: CompilationUnit): Transformer = + def newTransformer(unit: CompilationUnit): AstTransformer = new ErasureTransformer(unit) override def keepsTypeParams = false // -------- erasure on types -------------------------------------------------------- - // convert a numeric with a toXXX method + // convert a numeric with a toNNN method def numericConversion(tree: Tree, numericSym: Symbol): Tree = { val mname = newTermName("to" + numericSym.name) val conversion = tree.tpe member mname - assert(conversion != NoSymbol, tree + " => " + numericSym) + assert(conversion != NoSymbol, s"$tree => $numericSym") atPos(tree.pos)(Apply(Select(tree, conversion), Nil)) } private object NeedsSigCollector { - private val NeedsSigCollector_true = new NeedsSigCollector(true) - private val NeedsSigCollector_false = new NeedsSigCollector(false) + private val NeedsSigCollector_true = new NeedsSigCollector(isClassConstructor = true) + private val NeedsSigCollector_false = new NeedsSigCollector(isClassConstructor = false) def apply(isClassConstructor: Boolean) = if (isClassConstructor) NeedsSigCollector_true else NeedsSigCollector_false } - private class NeedsSigCollector(isClassConstructor: Boolean) extends TypeCollector(false) { - def traverse(tp: Type): Unit = + private class NeedsSigCollector(isClassConstructor: Boolean) extends TypeCollector(initial = false) { + def apply(tp: Type): Unit = if (!result) { tp match { case st: SubType => - traverse(st.supertype) + apply(st.supertype) case TypeRef(pre, sym, args) => - if (sym == ArrayClass) args foreach traverse + if (sym == ArrayClass) untilApply(args) else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true - else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585 - else if (!sym.isTopLevel) traverse(pre) - case PolyType(_, _) | ExistentialType(_, _) => - result = true + else if (sym.isClass) apply(rebindInnerClass(pre, sym)) // #2585 + else if (!sym.isTopLevel) apply(pre) + case PolyType(_, _) | ExistentialType(_, _) => result = true case RefinedType(parents, _) => - parents foreach traverse + untilApply(parents) case ClassInfoType(parents, _, _) => - parents foreach traverse + untilApply(parents) case AnnotatedType(_, atp) => - traverse(atp) + apply(atp) case MethodType(params, resultType) => if (isClassConstructor) { val sigParams = params match { case head :: tail if head.isOuterParam => tail case _ => params } - mapOver(sigParams) + this.foldOver(sigParams) // skip the result type, it is Void in the signature. } else { - mapOver(tp) + tp.foldOver(this) } case _ => - mapOver(tp) + tp.foldOver(this) } } + @tailrec + private def untilApply(ts: List[Type]): Unit = + ts match { + case t :: ts if !result => + apply(t) + untilApply(ts) + case _ => + } } - override protected def verifyJavaErasure = settings.Xverify || settings.isDebug - def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { + override protected def verifyJavaErasure = settings.Xverify.value || settings.isDebug + def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig.value && { def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp) needs(tp) || throwsArgs.exists(needs) } @@ -104,19 +112,19 @@ abstract class Erasure extends InfoTransform private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol) = ( !sym.isHigherOrderTypeParameter && sym.isTypeParameterOrSkolem && ( - (initialSymbol.enclClassChain.exists(sym isNestedIn _)) || - (initialSymbol.isMethod && initialSymbol.typeParams.contains(sym)) + (initialSymbol.isMethod && initialSymbol.typeParams.contains(sym)) || + (initialSymbol.ownersIterator.exists(encl => encl.isClass && !encl.hasPackageFlag && sym.isNestedIn(encl))) ) ) - /** This object is only used for sanity testing when -check:genjvm is set. + /** This object is only used for confidence checking when -check:genjvm is set. * In that case we make sure that the erasure of the `normalized` type * is the same as the erased type that's generated. Normalization means * unboxing some primitive types and further simplifications as they are done in jsig. */ - val prepareSigMap = new TypeMap { + val prepareSigMap: TypeMap = new TypeMap { def squashBoxed(tp: Type): Type = tp.dealiasWiden match { - case t @ RefinedType(parents, decls) => + case RefinedType(parents, decls) => val parents1 = parents mapConserve squashBoxed if (parents1 eq parents) tp else RefinedType(parents1, decls) @@ -224,7 +232,7 @@ abstract class Erasure extends InfoTransform // a signature should always start with a class def ensureClassAsFirstParent(tps: List[Type]) = tps match { case Nil => ObjectTpe :: Nil - case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps + case head :: _ if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps case _ => tps } @@ -240,7 +248,7 @@ abstract class Erasure extends InfoTransform } def boxedSig(tp: Type): Unit = jsig(tp, unboxedVCs = false) def boundsSig(bounds: List[Type]): Unit = { - val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait) + val (isTrait, isClass) = partitionConserve(bounds)(_.typeSymbol.isTrait) isClass match { case Nil => builder.append(':') // + boxedSig(ObjectTpe) case x :: _ => builder.append(':'); boxedSig(x) @@ -269,6 +277,7 @@ abstract class Erasure extends InfoTransform @noinline def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, unboxedVCs: Boolean = true): Unit = { + @inline def jsig1(tp0: Type) = jsig(tp0, existentiallyBound = Nil, toplevel = false, unboxedVCs = true) val tp = tp0.dealias tp match { case st: SubType => @@ -289,17 +298,17 @@ abstract class Erasure extends InfoTransform } else builder.append('*') } else tp match { - case PolyType(_, res) => + case PolyType(_, _) => builder.append('*') // scala/bug#7932 case _ => boxedSig(tp) } - def classSig: Unit = { + def classSig(): Unit = { markClassUsed(sym) val preRebound = pre.baseType(sym.owner) // #2585 if (needsJavaSig(sym, preRebound, Nil)) { val i = builder.length() - jsig(preRebound, existentiallyBound) + jsig(preRebound, existentiallyBound, toplevel = false, unboxedVCs = true) if (builder.charAt(i) == 'L') { builder.delete(builder.length() - 1, builder.length())// delete ';' // If the prefix is a module, drop the '$'. Classes (or modules) nested in modules @@ -328,10 +337,10 @@ abstract class Erasure extends InfoTransform // If args isEmpty, Array is being used as a type constructor if (sym == ArrayClass && args.nonEmpty) { - if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectTpe) + if (unboundedGenericArrayLevel(tp) == 1) jsig1(ObjectTpe) else { builder.append(ARRAY_TAG) - args.foreach(jsig(_)) + args.foreach(jsig1(_)) } } else if (isTypeParameterInSig(sym, sym0)) { @@ -339,32 +348,32 @@ abstract class Erasure extends InfoTransform builder.append(TVAR_TAG).append(sym.name).append(';') } else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) - jsig(ObjectTpe) + jsig1(ObjectTpe) else if (sym == UnitClass) - jsig(BoxedUnitTpe) + jsig1(BoxedUnitTpe) else if (sym == NothingClass) - jsig(RuntimeNothingClass.tpe) + jsig1(RuntimeNothingClass.tpe) else if (sym == NullClass) - jsig(RuntimeNullClass.tpe) + jsig1(RuntimeNullClass.tpe) else if (isPrimitiveValueClass(sym)) { - if (!unboxedVCs) jsig(ObjectTpe) - else if (sym == UnitClass) jsig(BoxedUnitTpe) + if (!unboxedVCs) jsig1(ObjectTpe) + else if (sym == UnitClass) jsig1(BoxedUnitTpe) else builder.append(abbrvTag(sym)) } else if (sym.isDerivedValueClass) { if (unboxedVCs) { val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType - jsig(unboxedSeen, existentiallyBound, toplevel) - } else classSig + jsig(unboxedSeen, existentiallyBound, toplevel, unboxedVCs = true) + } else classSig() } else if (sym.isClass) - classSig + classSig() else jsig(erasure(sym0)(tp), existentiallyBound, toplevel, unboxedVCs) case PolyType(tparams, restpe) => - assert(tparams.nonEmpty) + assert(tparams.nonEmpty, tparams) if (toplevel) polyParamSig(tparams) - jsig(restpe) + jsig1(restpe) case MethodType(params, restpe) => builder.append('(') @@ -380,14 +389,14 @@ abstract class Erasure extends InfoTransform builder.append('['); att.typeParamRef case _ => p.tpe } - jsig(tp) + jsig1(tp) } }) builder.append(')') - if (restpe.typeSymbol == UnitClass || sym0.isConstructor) builder.append(VOID_TAG) else jsig(restpe) + if (restpe.typeSymbol == UnitClass || sym0.isConstructor) builder.append(VOID_TAG) else jsig1(restpe) case RefinedType(parents, decls) => - jsig(intersectionDominator(parents), unboxedVCs = unboxedVCs) + jsig(intersectionDominator(parents), existentiallyBound = Nil, toplevel = false, unboxedVCs = unboxedVCs) case ClassInfoType(parents, _, _) => superSig(tp.typeSymbol, parents) case AnnotatedType(_, atp) => @@ -398,7 +407,7 @@ abstract class Erasure extends InfoTransform case _ => val etp = erasure(sym0)(tp) if (etp eq tp) throw new UnknownSig - else jsig(etp) + else jsig1(etp) } } val throwsArgs = sym0.annotations flatMap ThrownException.unapply @@ -411,7 +420,7 @@ abstract class Erasure extends InfoTransform } Some(builder.toString) } - catch { case ex: UnknownSig => None } + catch { case _: UnknownSig => None } } else None } @@ -442,8 +451,10 @@ abstract class Erasure extends InfoTransform case Block(stats, expr) => // needs `hasSymbolField` check because `supercall` could be a block (named / default args) - val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)) + val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)): @unchecked treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr) + + case x => throw new MatchError(x) } } @@ -457,6 +468,7 @@ abstract class Erasure extends InfoTransform case PolyType(_, _) => mapOver(tp) case MethodType(_, _) => mapOver(tp) // nullarymethod was eliminated during uncurry case ConstantType(Constant(_: Type)) => ClassClass.tpe // all classOfs erase to Class + case ConstantType(value) => value.tpe.deconst case _ => tp.deconst } } @@ -471,43 +483,45 @@ abstract class Erasure extends InfoTransform override def newTyper(context: Context) = new Eraser(context) - class EnterBridges(unit: CompilationUnit, root: Symbol) { - - class BridgesCursor(root: Symbol) extends overridingPairs.Cursor(root) { - override def parents = root.info.firstParent :: Nil - // Varargs bridges may need generic bridges due to the non-repeated part of the signature of the involved methods. - // The vararg bridge is generated during refchecks (probably to simplify override checking), - // but then the resulting varargs "bridge" method may itself need an actual erasure bridge. - // TODO: like javac, generate just one bridge method that wraps Seq <-> varargs and does erasure-induced casts - override def exclude(sym: Symbol) = !sym.isMethod || super.exclude(sym) - } - + class EnterBridges(@unused unit: CompilationUnit, root: Symbol) { val site = root.thisType val bridgesScope = newScope val bridgeTarget = mutable.HashMap[Symbol, Symbol]() - val opc = enteringExplicitOuter { new BridgesCursor(root) } + val opc = enteringExplicitOuter { new overridingPairs.BridgesCursor(root) } def computeAndEnter(): Unit = { while (opc.hasNext) { if (enteringExplicitOuter(!opc.low.isDeferred)) - checkPair(opc. currentPair) + checkPair(opc.currentPair) opc.next() } } /** Check that a bridge only overrides members that are also overridden by the original member. - * This test is necessary only for members that have a value class in their type. - * Such members are special because their types after erasure and after post-erasure differ/. + * + * That is, check that the signature of the bridge method does not accidentally override some + * other method, possibly written by the user or inherited. + * + * As an optimization, only perform this check for susceptible bridges. + * + * This test is necessary for members that have a value class in their type. + * Such members are special because their types after erasure and after post-erasure differ. * This means we generate them after erasure, but the post-erasure transform might introduce * a name clash. The present method guards against these name clashes. * + * A bridge might also introduce a signature that accidentally matches an existing method. + * In that case, it will have a parameter erased to the upper bound of a type parameter + * in the signature of the member overridden by the original member. (That upper bound might + * be `Object`.) By contrast, erasure of a parameter `List[A]` would already have induced an error + * if there were a matching member. + * * @param member The original member * @param other The overridden symbol for which the bridge was generated * @param bridge The bridge */ - def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = { + def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): scala.collection.Seq[(Position, String)] = { def fulldef(sym: Symbol) = if (sym == NoSymbol) sym.toString else s"$sym: ${sym.tpe} in ${sym.owner}" @@ -522,7 +536,7 @@ abstract class Erasure extends InfoTransform } for (bc <- root.baseClasses) { if (settings.isDebug) - exitingPostErasure(println( + exitingPostErasure(debuglog( sm"""check bridge overrides in $bc |${bc.info.nonPrivateDecl(bridge.name)} |${site.memberType(bridge)} @@ -549,69 +563,65 @@ abstract class Erasure extends InfoTransform /** TODO - work through this logic with a fine-toothed comb, incorporating * into SymbolPairs where appropriate. */ - def checkPair(pair: SymbolPair) { + def checkPair(pair: SymbolPair): Unit = { import pair._ val member = low val other = high val otpe = highErased - val bridgeNeeded = exitingErasure ( + val bridgeNeeded = exitingErasure { + def hasBridge = { + var e = bridgesScope.lookupEntry(member.name) + while ((e ne null) && !(e.sym.tpe =:= otpe && bridgeTarget(e.sym) == member)) + e = bridgesScope.lookupNextEntry(e) + e ne null + } !member.isMacro && !(other.tpe =:= member.tpe) && !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) && - { var e = bridgesScope.lookupEntry(member.name) - while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member))) - e = bridgesScope.lookupNextEntry(e) - (e eq null) + !hasBridge + } + def addBridgeIfOK(): Unit = { + var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | FINAL) + // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we + // end up with two module symbols with the same name in the same scope, which is surprising + // when implementing later phases. + if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | STABLE) + val bridge = other.cloneSymbolImpl(root, newFlags).setPos(root.pos).setAnnotations(member.annotations) + + debuglog(s"generating bridge from $other (${flagsToString(newFlags)}): ${otpe}${other.locationString} to $member: ${specialErasure(root)(member.tpe)}${member.locationString}") + + // the parameter symbols need to have the new owner + bridge setInfo (otpe cloneInfo bridge) + bridgeTarget(bridge) = member + + val shouldAdd = { + val sigContainsValueClass = member.tpe.exists(_.typeSymbol.isDerivedValueClass) + def bridgeMayClash = other.paramss.exists(_.exists(_.tpe match { case TypeRef(_, r, _) => r.isTypeParameter case _ => false })) + def bridgeIsAOK = checkBridgeOverrides(member, other, bridge) match { + case Nil => true + case _ if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true + case es => for ((pos, msg) <- es) reporter.error(pos, msg); false + } + !sigContainsValueClass && !bridgeMayClash || bridgeIsAOK } - ) - if (!bridgeNeeded) - return - - var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY) - // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we - // end up with two module symbols with the same name in the same scope, which is surprising - // when implementing later phases. - if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | STABLE) - val bridge = other.cloneSymbolImpl(root, newFlags).setPos(root.pos).setAnnotations(member.annotations) - - debuglog("generating bridge from %s (%s): %s to %s: %s".format( - other, flagsToString(newFlags), - otpe + other.locationString, member, - specialErasure(root)(member.tpe) + member.locationString) - ) - - // the parameter symbols need to have the new owner - bridge setInfo (otpe cloneInfo bridge) - bridgeTarget(bridge) = member - - def sigContainsValueClass = (member.tpe exists (_.typeSymbol.isDerivedValueClass)) + if (shouldAdd) { + exitingErasure(root.info.decls enter bridge) - val shouldAdd = ( - !sigContainsValueClass - || (checkBridgeOverrides(member, other, bridge) match { - case Nil => true - case es if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true - case es => for ((pos, msg) <- es) reporter.error(pos, msg); false - }) - ) - - if (shouldAdd) { - exitingErasure(root.info.decls enter bridge) - - bridgesScope enter bridge - addBridge(bridge, member, other) - //bridges ::= makeBridgeDefDef(bridge, member, other) + bridgesScope enter bridge + addBridge(bridge, member, other) // GenerateBridges.addBridge bridges ::= makeBridgeDefDef(bridge, member, other) + } } + if (bridgeNeeded) addBridgeIfOK() } - protected def addBridge(bridge: Symbol, member: Symbol, other: Symbol) {} // hook for GenerateBridges + protected def addBridge(bridge: Symbol, member: Symbol, other: Symbol): Unit = {} // hook for GenerateBridges } class GenerateBridges(unit: CompilationUnit, root: Symbol) extends EnterBridges(unit, root) { - var bridges = List.empty[Tree] - var toBeRemoved = immutable.Set.empty[Symbol] + var bridges = List[Tree]() + var toBeRemoved = immutable.Set.empty[Symbol] def generate(): (List[Tree], immutable.Set[Symbol]) = { super.computeAndEnter() @@ -653,16 +663,15 @@ abstract class Erasure extends InfoTransform } else bridgingCall } val rhs = member.tpe match { - case MethodType(Nil, ConstantType(c)) => Literal(c) + case MethodType(Nil, FoldableConstantType(c)) => Literal(c) case _ => - val sel: Tree = Select(This(root), member) - val bridgingCall = (sel /: bridge.paramss)((fun, vparams) => Apply(fun, vparams map Ident)) + val sel: Tree = gen.mkAttributedSelect(gen.mkAttributedThis(root), member) + val bridgingCall = bridge.paramss.foldLeft(sel)((fun, vparams) => Apply(fun, vparams map Ident)) maybeWrap(bridgingCall) } DefDef(bridge, rhs) } - } /** The modifier typer which retypes with erased types. */ @@ -711,13 +720,13 @@ abstract class Erasure extends InfoTransform } } else treeCopy.Apply(tree, treeCopy.TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), List()) - case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) + case Apply(TypeApply(Select(_, _), List(targ)), List()) if tree.symbol == Any_isInstanceOf => targ.tpe match { case ErasedValueType(clazz, _) => targ.setType(clazz.tpe) case _ => } - tree + tree case Select(qual, name) => if (tree.symbol == NoSymbol) { tree @@ -731,14 +740,18 @@ abstract class Erasure extends InfoTransform else if (tree.symbol.owner == AnyClass) adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, tree.symbol.name)))) else { - var qual1 = typedQualifier(qual) - if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) || - isErasedValueType(qual1.tpe)) - qual1 = box(qual1) - else if (!isPrimitiveValueType(qual1.tpe) && isPrimitiveValueMember(tree.symbol)) - qual1 = unbox(qual1, tree.symbol.owner.tpe) + val qual1 = { + val qual0 = typedQualifier(qual) + val isPrimitive = isPrimitiveValueType(qual0.tpe) + if (isPrimitive && !isPrimitiveValueMember(tree.symbol) || isErasedValueType(qual0.tpe)) + box(qual0) + else if (!isPrimitive && isPrimitiveValueMember(tree.symbol)) + unbox(qual0, tree.symbol.owner.tpe) + else + qual0 + } - def selectFrom(qual: Tree) = treeCopy.Select(tree, qual, name) + def selectFrom(fromQual: Tree) = treeCopy.Select(tree, fromQual, name) if (isPrimitiveValueMember(tree.symbol) && !isPrimitiveValueType(qual1.tpe)) { tree.symbol = NoSymbol @@ -787,9 +800,10 @@ abstract class Erasure extends InfoTransform /** A replacement for the standard typer's `typed1` method. */ override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = { - val tree1 = try { + val tree1 = try tree match { - case DefDef(_,_,_,_,_,_) if tree.symbol.isClassConstructor && tree.symbol.isPrimaryConstructor && tree.symbol.owner != ArrayClass => + case tree: DefDef + if tree.symbol.isClassConstructor && tree.symbol.isPrimaryConstructor && tree.symbol.owner != ArrayClass => super.typed1(deriveDefDef(tree)(addMixinConstructorCalls(_, tree.symbol.owner)), mode, pt) // (3) case Template(parents, self, body) => val parents1 = tree.symbol.owner.info.parents map (t => TypeTree(t) setPos tree.pos) @@ -812,16 +826,14 @@ abstract class Erasure extends InfoTransform case _ => super.typed1(adaptMember(tree), mode, pt) } - } catch { + catch { case er: TypeError => Console.println("exception when typing " + tree+"/"+tree.getClass) Console.println(er.msg + " in file " + context.owner.sourceFile) er.printStackTrace abort("unrecoverable error") case ex: Exception => - //if (settings.debug.value) - try Console.println("exception when typing " + tree) - finally throw ex + try Console.println(s"exception when typing $tree") catch identity: @nowarn throw ex } @@ -838,7 +850,6 @@ abstract class Erasure extends InfoTransform case Some(SAMFunction(samTp, _, _)) => fun setType specialScalaErasure(samTp) case _ => fun } - case If(cond, thenp, elsep) => treeCopy.If(tree1, cond, adaptBranch(thenp), adaptBranch(elsep)) case Match(selector, cases) => @@ -850,7 +861,7 @@ abstract class Erasure extends InfoTransform val first = tree1.symbol.alternatives.head val firstTpe = first.tpe val sym1 = tree1.symbol.filter { - alt => alt == first || !(firstTpe looselyMatches alt.tpe) + alt => alt == first || !firstTpe.looselyMatches(alt.tpe) } if (tree.symbol ne sym1) { tree1 setSymbol sym1 setType sym1.tpe @@ -864,10 +875,10 @@ abstract class Erasure extends InfoTransform } /** The erasure transformer */ - class ErasureTransformer(unit: CompilationUnit) extends Transformer { - import overridingPairs.Cursor + class ErasureTransformer(unit: CompilationUnit) extends AstTransformer { + import overridingPairs.PairsCursor - private def doubleDefError(pair: SymbolPair) { + private def doubleDefError(pair: SymbolPair): Unit = { import pair._ if (!pair.isErroneous) { @@ -876,13 +887,15 @@ abstract class Erasure extends InfoTransform else if (low.owner == base) "name clash between defined and inherited member" else "name clash between inherited members" ) - val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType) + val when = + if (exitingRefchecks(lowType matches highType)) "" + else s" after erasure: ${exitingPostErasure(highType)}" reporter.error(pos, - s"""|$what: - |${exitingRefchecks(highString)} and - |${exitingRefchecks(lowString)} - |have same type$when""".trim.stripMargin + sm"""|$what: + |${exitingRefchecks(highString)} and + |${exitingRefchecks(lowString)} + |have same type$when""" ) } low setInfo ErrorType @@ -892,7 +905,7 @@ abstract class Erasure extends InfoTransform exitingPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro /** TODO - adapt SymbolPairs so it can be used here. */ - private def checkNoDeclaredDoubleDefs(base: Symbol) { + private def checkNoDeclaredDoubleDefs(base: Symbol): Unit = { val decls = base.info.decls // scala/bug#8010 force infos, otherwise makeNotPrivate in ExplicitOuter info transformer can trigger @@ -922,7 +935,7 @@ abstract class Erasure extends InfoTransform } } - private class DoubleDefsCursor(root: Symbol) extends Cursor(root) { + private class DoubleDefsCursor(root: Symbol) extends PairsCursor(root) { // specialized members have no type history before 'specialize', causing double def errors for curried defs override def exclude(sym: Symbol): Boolean = ( sym.isType @@ -940,7 +953,7 @@ abstract class Erasure extends InfoTransform * - A template inherits two members `m` with different types, * but their erased types are the same. */ - private def checkNoDoubleDefs(root: Symbol) { + private def checkNoDoubleDefs(root: Symbol): Unit = { checkNoDeclaredDoubleDefs(root) def isErasureDoubleDef(pair: SymbolPair) = { import pair._ @@ -995,31 +1008,64 @@ abstract class Erasure extends InfoTransform * - Remove all instance creations new C(arg) where C is an inlined class. * - Reset all other type attributes to null, thus enforcing a retyping. */ - private val preTransformer = new TypingTransformer(unit) { + private val preTransformer: TypingTransformer = new TypingTransformer(unit) { // Work around some incomplete path unification :( there are similar casts in SpecializeTypes def context: Context = localTyper.context.asInstanceOf[Context] + // TODO: since the spec defines instanceOf checks in terms of pattern matching, + // this extractor should share code with TypeTestTreeMaker. The corresponding + // code is somewhat buried in and entangled with the pattern matching mechanics + // which makes this fiddly to do now. + object SingletonInstanceCheck { + def unapply(pt: Type): Option[(TermSymbol, Tree)] = { + def containsSingleton(tp: Type): Boolean = + tp.dealias match { + case SingleType(_, _) | ConstantType(_) | ThisType(_) | SuperType(_, _) => true + case RefinedType(parents, _) => parents.exists(containsSingleton) + case _ => false + } + if(containsSingleton(pt)) { + val cmpOp = if (pt.typeSymbol.isSubClass(AnyValClass)) Any_equals else Object_eq + val cmpArg = gen.mkAttributedQualifier(pt) + Some((cmpOp, cmpArg)) + } else None + } + } + private def preEraseNormalApply(tree: Apply) = { val fn = tree.fun val args = tree.args def qualifier = fn match { - case Select(qual, _) => qual + case Select(qual, _) => qual case TypeApply(Select(qual, _), _) => qual + case x => throw new MatchError(x) } + + // TODO: this should share logic with TypeTestTreeMaker in the pattern matcher, + // since `x.isInstanceOf[T]` is specified as the pattern match. The corresponding + // code is somewhat buried in and entangled with the pattern matching mechanics + // which makes this fiddly to do now. def preEraseAsInstanceOf = { (fn: @unchecked) match { case TypeApply(Select(qual, _), List(targ)) => - if (qual.tpe <:< targ.tpe) - atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) } - else if (isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(targ.tpe.typeSymbol)) - atPos(tree.pos)(numericConversion(qual, targ.tpe.typeSymbol)) - else - tree + targ.tpe match { + case argTp if qual.tpe <:< argTp => + atPos(tree.pos) { Typed(qual, TypeTree(argTp)) } + case argTp if isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(argTp.typeSymbol) => + atPos(tree.pos)(numericConversion(qual, argTp.typeSymbol)) + case _ => + tree + } } // todo: also handle the case where the singleton type is buried in a compound } + // TODO: this should share logic with TypeTestTreeMaker in the pattern matcher, + // since `x.isInstanceOf[T]` is specified as the pattern match. The corresponding + // code is somewhat buried in and entangled with the pattern matching mechanics + // which makes this fiddly to do now. + // `x match { case _: T => true case _ => false }` (modulo numeric conversion) def preEraseIsInstanceOf = { fn match { case TypeApply(sel @ Select(qual, name), List(targ)) => @@ -1033,12 +1079,9 @@ abstract class Erasure extends InfoTransform List(TypeTree(tp) setPos targ.pos)) setPos fn.pos, List()) setPos tree.pos targ.tpe match { - case SingleType(_, _) | ThisType(_) | SuperType(_, _) => - val cmpOp = if (targ.tpe <:< AnyValTpe) Any_equals else Object_eq - atPos(tree.pos) { - Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe))) - } - case RefinedType(parents, decls) if (parents.length >= 2) => + case SingletonInstanceCheck(cmpOp, cmpArg) => + atPos(tree.pos) { Apply(Select(cmpArg, cmpOp), List(qual)) } + case RefinedType(parents, decls) if (parents.lengthIs >= 2) => gen.evalOnce(qual, currentOwner, localTyper.fresh) { q => // Optimization: don't generate isInstanceOf tests if the static type // conforms, because it always succeeds. (Or at least it had better.) @@ -1053,8 +1096,14 @@ abstract class Erasure extends InfoTransform parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd } } - case _ => - tree + case TypeRef(_, SingletonClass, _) => + atPos(tree.pos) { + if(qual.tpe <:< AnyRefTpe) + Apply(Select(qual, Object_ne), List(Literal(Constant(null)) setType NullTpe)) + else + Literal(Constant(true)) + } + case _ => tree } case _ => tree } @@ -1078,8 +1127,8 @@ abstract class Erasure extends InfoTransform private def preEraseApply(tree: Apply) = { tree.fun match { case TypeApply(fun @ Select(qual, name), args @ List(arg)) - if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) && - unboundedGenericArrayLevel(arg.tpe) > 0) => // !!! todo: simplify by having GenericArray also extract trees + if isTypeTestSymbol(fun.symbol) && + unboundedGenericArrayLevel(arg.tpe) > 0 => // !!! todo: simplify by having GenericArray also extract trees val level = unboundedGenericArrayLevel(arg.tpe) def isArrayTest(arg: Tree) = gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level)))) @@ -1151,7 +1200,7 @@ abstract class Erasure extends InfoTransform global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen)))) } else if (primitiveGetClassMethods.contains(fn.symbol)) { // if we got here then we're trying to send a primitive getClass method to either - // a) an Any, in which cage Object_getClass works because Any erases to object. Or + // a) an Any, in which case Object_getClass works because Any erases to object. Or // // b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent // of the refinement is a primitive and another is AnyRef. In that case @@ -1166,7 +1215,7 @@ abstract class Erasure extends InfoTransform } else qual match { case New(tpt) if name == nme.CONSTRUCTOR && tpt.tpe.typeSymbol.isDerivedValueClass => // println("inject derived: "+arg+" "+tpt.tpe) - val List(arg) = args + val List(arg) = args: @unchecked val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef]) InjectDerivedValue(arg) updateAttachment attachment case _ => @@ -1182,10 +1231,11 @@ abstract class Erasure extends InfoTransform case tree: Apply => preEraseApply(tree) - case TypeApply(fun, args) if (fun.symbol.owner != AnyClass && - fun.symbol != Object_asInstanceOf && - fun.symbol != Object_isInstanceOf && - fun.symbol != Object_synchronized) => + case TypeApply(fun, args) + if fun.symbol.owner != AnyClass + && fun.symbol != Object_asInstanceOf + && fun.symbol != Object_isInstanceOf + && fun.symbol != Object_synchronized => // leave all other type tests/type casts, remove all other type applications preErase(fun) @@ -1243,9 +1293,11 @@ abstract class Erasure extends InfoTransform if (qualSym != owner) tree.updateAttachment(new QualTypeSymAttachment(qualSym)) - } else if (!isJvmAccessible(owner, context)) { + } else if (!isJvmAccessible(owner, context) || + // scala/bug#13007: isJvmAccessible is true for a protected java method, even if accessed through erased self type + sym.isJavaDefined && sym.isProtected && !qual.tpe.typeSymbol.isSubClass(sym.owner)) { val qualSym = qual.tpe.typeSymbol - if (qualSym != owner && isJvmAccessible(qualSym, context) && definesMemberAfterErasure(qualSym, sym)) + if (qualSym != owner && isJvmAccessible(qualSym, context) && (definesMemberAfterErasure(qualSym, sym) || sym.overridingSymbol(qualSym).exists)) tree.updateAttachment(new QualTypeSymAttachment(qualSym)) else reporter.error(tree.pos, s"Unable to emit reference to ${sym.fullLocationString}, $owner is not accessible in ${context.enclClass.owner}") @@ -1259,7 +1311,7 @@ abstract class Erasure extends InfoTransform treeCopy.Template(tree, parents, noSelfType, addBridgesToTemplate(body, currentOwner)) case Match(selector, cases) => - Match(Typed(selector, TypeTree(selector.tpe)), cases) + treeCopy.Match(tree, Typed(selector, TypeTree(selector.tpe)), cases) case Literal(ct) => // We remove the original tree attachments in pre-erasure to free up memory @@ -1268,7 +1320,6 @@ abstract class Erasure extends InfoTransform if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) { val typeValue = ct.typeValue.dealiasWiden val erased = erasure(typeValue.typeSymbol) applyInArray typeValue - treeCopy.Literal(cleanLiteral, Constant(erased)) } else cleanLiteral @@ -1310,6 +1361,16 @@ abstract class Erasure extends InfoTransform case ArrayValue(elemtpt, trees) => treeCopy.ArrayValue( tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType() + case ValDef(_, _, tpt, rhs) => + val vd1 = super.transform(tree1).clearType().asInstanceOf[ValDef] + vd1.tpt.tpe match { + case FoldableConstantType(_) if !vd1.rhs.isInstanceOf[Literal] => + val deconst = vd1.tpt.tpe.deconst + vd1.tpt setType deconst + tree1.symbol.setInfo(deconst) + case _ => + } + vd1 case DefDef(_, _, _, _, tpt, _) => // TODO: move this in some post-processing transform in the fields phase? if (fields.symbolAnnotationsTargetFieldAndGetter(tree.symbol)) @@ -1317,7 +1378,7 @@ abstract class Erasure extends InfoTransform try super.transform(tree1).clearType() finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType - case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: _) => + case ApplyDynamic(_, Literal(Constant(_: Symbol)) :: _) => tree case _: Apply if tree1 ne tree => /* some Apply trees get replaced (in `preEraseApply`) with one of @@ -1343,7 +1404,7 @@ abstract class Erasure extends InfoTransform } } - final def resolveAnonymousBridgeClash(sym: Symbol, bridge: Symbol) { + final def resolveAnonymousBridgeClash(sym: Symbol, bridge: Symbol): Unit = { // TODO reinstate this after Delambdafy generates anonymous classes that meet this requirement. // require(sym.owner.isAnonymousClass, sym.owner) log(s"Expanding name of ${sym.debugLocationString} as it clashes with bridge. Renaming deemed safe because the owner is anonymous.") diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 7ba5b20e5ae1..700bcf275d60 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,13 +16,13 @@ package transform import symtab._ import Flags.{CASE => _, _} +import scala.annotation.{nowarn, tailrec} import scala.collection.mutable.ListBuffer import scala.tools.nsc.Reporting.WarningCategory /** This class ... * * @author Martin Odersky - * @version 1.0 */ abstract class ExplicitOuter extends InfoTransform with TypingTransformers @@ -41,7 +41,7 @@ abstract class ExplicitOuter extends InfoTransform /** This class does not change linearization */ override def changesBaseClasses = false - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new ExplicitOuterTransformer(unit) /** Is given clazz an inner class? */ @@ -75,7 +75,7 @@ abstract class ExplicitOuter extends InfoTransform result } - class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer { + class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends AstTransformer { override def transform(tree: Tree) = tree match { case Bind(_, body) if toRemove(tree.symbol) => super.transform(body) case _ => super.transform(tree) @@ -111,8 +111,8 @@ abstract class ExplicitOuter extends InfoTransform * * {{{ * class C { - * trait T { C.this } // C$T$$$outer$ : C - * object T extends T { C.this } // C$T$$$outer$ : C.this.type + * trait T { C.this } // C\$T\$\$\$outer\$ : C + * object T extends T { C.this } // C\$T\$\$\$outer\$ : C.this.type * } * }}} * @@ -210,8 +210,8 @@ abstract class ExplicitOuter extends InfoTransform * values for outer parameters of constructors. * The class provides methods for referencing via outer. */ - abstract class OuterPathTransformer(initLocalTyper: analyzer.Typer) extends TypingTransformer(initLocalTyper) with UnderConstructionTransformer { - def this(unit: CompilationUnit) { this(newRootLocalTyper(unit)) } + abstract class OuterPathTransformer(initLocalTyper: analyzer.Typer) extends TypingTransformer(initLocalTyper) { + def this(unit: CompilationUnit) = this(newRootLocalTyper(unit)) /** The directly enclosing outer parameter, if we are in a constructor */ protected var outerParam: Symbol = NoSymbol @@ -266,11 +266,12 @@ abstract class ExplicitOuter extends InfoTransform } /** The path - *
    `base'.$outer$$C1 ... .$outer$$Cn
    + *
    `base`.\$outer\$\$C1 ... .\$outer\$\$Cn
    * which refers to the outer instance of class to of * value base. The result is typed but not positioned. */ - protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = { + @tailrec + protected final def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = { //Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe) if (from == to) base else { @@ -280,6 +281,13 @@ abstract class ExplicitOuter extends InfoTransform } } + + /** The stack of class symbols in which a call to this() or to the super + * constructor, or early definition is active + */ + protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz + protected val selfOrSuperCalls = collection.mutable.Stack[Symbol]() + override def transform(tree: Tree): Tree = { def sym = tree.symbol val savedOuterParam = outerParam @@ -292,7 +300,13 @@ abstract class ExplicitOuter extends InfoTransform assert(outerParam.name startsWith nme.OUTER, outerParam.name) case _ => } - super.transform(tree) + if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree)) { + selfOrSuperCalls push currentOwner.owner + val transformed = super.transform(tree) + selfOrSuperCalls.pop() + transformed + } else + super.transform(tree) } finally outerParam = savedOuterParam } @@ -352,7 +366,7 @@ abstract class ExplicitOuter extends InfoTransform * * @param mixinClass The mixin class which defines the abstract outer * accessor which is implemented by the generated one. - * @pre mixinClass is an inner class + * @note Pre-condition: `mixinClass` is an inner class */ def mixinOuterAccessorDef(mixinClass: Symbol): Tree = { val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass @@ -378,7 +392,7 @@ abstract class ExplicitOuter extends InfoTransform if (sym.isProtected && !sym.isJavaDefined) sym setFlag notPROTECTED } tree match { - case Template(parents, self, decls) => + case Template(_, _, _) => val newDefs = new ListBuffer[Tree] atOwner(tree, currentOwner) { if (!currentClass.isInterface) { @@ -401,22 +415,17 @@ abstract class ExplicitOuter extends InfoTransform ) case DefDef(_, _, _, vparamss, _, rhs) => if (sym.isClassConstructor) { - rhs match { - case Literal(_) => - sys.error("unexpected case") //todo: remove - case _ => - val clazz = sym.owner - val vparamss1 = - if (isInner(clazz)) { // (4) - if (isUnderConstruction(clazz.outerClass)) { - reporter.error(tree.pos, s"Implementation restriction: ${clazz.fullLocationString} requires premature access to ${clazz.outerClass}.") - } - val outerParam = - sym.newValueParameter(nme.OUTER, sym.pos, ARTIFACT) setInfo clazz.outerClass.thisType - ((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail - } else vparamss - super.transform(copyDefDef(tree)(vparamss = vparamss1)) - } + val clazz = sym.owner + val vparamss1 = + if (isInner(clazz)) { // (4) + if (isUnderConstruction(clazz.outerClass)) { + reporter.error(tree.pos, s"Implementation restriction: ${clazz.fullLocationString} requires premature access to ${clazz.outerClass}.") + } + val outerParam = + sym.newValueParameter(nme.OUTER, sym.pos, ARTIFACT) setInfo clazz.outerClass.thisType + ((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail + } else vparamss + super.transform(copyDefDef(tree)(vparamss = vparamss1)) } else super.transform(tree) @@ -456,10 +465,10 @@ abstract class ExplicitOuter extends InfoTransform }) super.transform(treeCopy.Apply(tree, sel, outerVal :: args)) - // for the new pattern matcher + // for the pattern matcher // base..eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE // TODO remove the synthetic `` method from outerFor?? - case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) => + case Apply(eqsel@Select(Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) => val outerFor = sel.symbol.owner val acc = outerAccessor(outerFor) @@ -479,6 +488,31 @@ abstract class ExplicitOuter extends InfoTransform transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args)) } + // (t12312) C.this.a().X().isInstanceOf[C.this.a.X.type]() --> + // D.this.$outer().a().X().isInstanceOf[D.this.$outer.a.X.type]() + case TypeApply(fun, targs) => + val rewriteTypeToExplicitOuter = new TypeMap { typeMap => + def apply(tp: Type) = tp match { + case ThisType(sym) if sym != currentClass && !(sym.hasModuleFlag && sym.isStatic) => + var cls = currentClass + var tpe = cls.thisType + do { + tpe = singleType(tpe, outerAccessor(cls)) + cls = cls.outerClass + } while (cls != NoSymbol && sym != cls) + tpe.mapOver(typeMap) + case tp => tp.mapOver(typeMap) + } + } + val fun2 = transform(fun) + val targs2 = targs.mapConserve { targ0 => + val targ = transform(targ0) + val targTp = targ.tpe + val targTp2 = rewriteTypeToExplicitOuter(targTp.dealias) + if (targTp eq targTp2) targ else TypeTree(targTp2).setOriginal(targ) + } + treeCopy.TypeApply(tree, fun2, targs2) + case _ => val x = super.transform(tree) if (x.tpe eq null) x @@ -487,15 +521,19 @@ abstract class ExplicitOuter extends InfoTransform } /** The transformation method for whole compilation units */ - override def transformUnit(unit: CompilationUnit) { + override def transformUnit(unit: CompilationUnit): Unit = { exitingExplicitOuter(super.transformUnit(unit)) } } - override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = - new Phase(prev) + override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new OuterPhase(prev) + + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.transform\.ExplicitOuter\.Phase""") + final type OuterPhase = Phase - class Phase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) { + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use OuterPhase instead", since = "2.13.4") + class Phase(prev: scala.tools.nsc.Phase) extends InfoPhase(prev) { override val checkable = false } } diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index a8621a0da282..1b976e6cc164 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,7 @@ package scala.tools.nsc package transform -import symtab._ -import Flags._ +import scala.annotation.tailrec import scala.collection.mutable /** @@ -22,7 +21,6 @@ import scala.collection.mutable * methods in a value class, except parameter or super accessors, or constructors. * * @author Martin Odersky - * @version 2.10 */ abstract class ExtensionMethods extends Transform with TypingTransformers { @@ -32,42 +30,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { /** the following two members override abstract members in Transform */ val phaseName: String = "extmethods" - def newTransformer(unit: CompilationUnit): Transformer = + def newTransformer(unit: CompilationUnit): AstTransformer = new Extender(unit) - /** Generate stream of possible names for the extension version of given instance method `imeth`. - * If the method is not overloaded, this stream consists of just "extension$imeth". - * If the method is overloaded, the stream has as first element "extensionX$imeth", where X is the - * index of imeth in the sequence of overloaded alternatives with the same name. This choice will - * always be picked as the name of the generated extension method. - * After this first choice, all other possible indices in the range of 0 until the number - * of overloaded alternatives are returned. The secondary choices are used to find a matching method - * in `extensionMethod` if the first name has the wrong type. We thereby gain a level of insensitivity - * of how overloaded types are ordered between phases and picklings. - */ - private def extensionNames(imeth: Symbol): Stream[Name] = { - val decl = imeth.owner.info.decl(imeth.name) - - // Bridge generation is done at phase `erasure`, but new scopes are only generated - // for the phase after that. So bridges are visible in earlier phases. - // - // `info.member(imeth.name)` filters these out, but we need to use `decl` - // to restrict ourselves to members defined in the current class, so we - // must do the filtering here. - val declTypeNoBridge = decl.filter(sym => !sym.isBridge).tpe - - declTypeNoBridge match { - case OverloadedType(_, alts) => - val index = alts indexOf imeth - assert(index >= 0, alts+" does not contain "+imeth) - def altName(index: Int) = newTermName(imeth.name+"$extension"+index) - altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName) - case tpe => - assert(tpe != NoType, imeth.name+" not found in "+imeth.owner+"'s decls: "+imeth.owner.info.decls) - Stream(newTermName(imeth.name+"$extension")) - } - } - private def companionModuleForce(sym: Symbol) = { sym.andAlso(_.owner.initialize) // See scala/bug#6976. `companionModule` only calls `rawInfo`. (Why?) sym.companionModule @@ -76,7 +41,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { /** Return the extension method that corresponds to given instance method `meth`. */ def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) { val companionInfo = companionModuleForce(imeth.owner).info - val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists) + val candidates = companionInfo.decl(imeth.name.extensionName).alternatives val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe) assert(matching.nonEmpty, sm"""|no extension method found for: @@ -85,13 +50,11 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { | | Candidates: | - | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")} + | ${candidates.map(c => s"${c.name}:${c.tpe}").mkString("\n")} | | Candidates (signatures normalized): | - | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")} - | - | Eligible Names: ${extensionNames(imeth).mkString(",")}" """) + | ${candidates.map(c => s"${c.name}:${normalize(c.tpe, imeth.owner)}").mkString("\n")}" """) matching.head } @@ -133,7 +96,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { class Extender(unit: CompilationUnit) extends TypingTransformer(unit) { private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]() - def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = + @tailrec + final def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = if (seen contains clazz) reporter.error(pos, "value class may not unbox to itself") else { @@ -141,62 +105,12 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed) } - /** We will need to clone the info of the original method (which obtains clones - * of the method type parameters), clone the type parameters of the value class, - * and create a new polymethod with the union of all those type parameters, with - * their infos adjusted to be consistent with their new home. Example: - * - * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { - * def baz[B >: A](x: B): List[B] = x :: xs - * // baz has to be transformed into this extension method, where - * // A is cloned from class Foo and B is cloned from method baz: - * // def extension\$baz[B >: A <: Any, A >: Nothing <: AnyRef](\$this: Foo[A])(x: B): List[B] - * } - * - * TODO: factor out the logic for consolidating type parameters from a class - * and a method for re-use elsewhere, because nobody will get this right without - * some higher level facilities. - */ - def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = { - val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth - // Start with the class type parameters - clones will be method type parameters - // so must drop their variance. - val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - - val thisParamType = appliedType(clazz, tparamsFromClass.map(_.tpeHK)) - val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType - val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) - val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) - - def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass) - def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass) - - // We can't substitute symbols on the entire polytype because we - // need to modify the bounds of the cloned type parameters, but we - // don't want to substitute for the cloned type parameters themselves. - val tparams = tparamsFromMethod ::: tparamsFromClass - tparams foreach (_ modifyInfo fixtparam) - GenPolyType(tparams, fixres(resultType)) - - // For reference, calling fix on the GenPolyType plays out like this: - // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] - // do not conform to method extension$baz#16148's type parameter bounds - // - // And the difference is visible here. See how B is bounded from below by A#16149 - // in both cases, but in the failing case, the other type parameter has turned into - // a different A. (What is that A? It is a clone of the original A created in - // SubstMap during the call to substSym, but I am not clear on all the particulars.) - // - // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] - // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] - } - override def transform(tree: Tree): Tree = { tree match { case Template(_, _, _) => if (currentOwner.isDerivedValueClass) { - /* This is currently redundant since value classes may not - wrap over other value classes anyway. + /* This is currently redundant since value classes may not + wrap over other value classes anyway. checkNonCyclic(currentOwner.pos, Set(), currentOwner) */ extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree] currentOwner.primaryConstructor.makeNotPrivate(NoSymbol) @@ -213,27 +127,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { val origParams = vparamss.flatten map (_.symbol) val companion = origThis.companionModule - def makeExtensionMethodSymbol = { - val extensionName = extensionNames(origMeth).head.toTermName - val extensionMeth = ( - companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL) - setAnnotations origMeth.annotations - ) - defineOriginalOwner(extensionMeth, origMeth.owner) - // @strictfp on class means strictfp on all methods, but `setAnnotations` won't copy it - if (origMeth.isStrictFP && !extensionMeth.hasAnnotation(ScalaStrictFPAttr)) - extensionMeth.addAnnotation(ScalaStrictFPAttr) - origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now. - companion.info.decls.enter(extensionMeth) - } - - val extensionMeth = makeExtensionMethodSymbol - val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis) + val extensionMeth = origMeth.newExtensionMethodSymbol(companion, tree.pos.focus) + val newInfo = extensionMethInfo(currentOwner, extensionMeth, origMeth.info, origThis) extensionMeth setInfo newInfo log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}") - val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo + val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo: @unchecked val extensionParams = allParameters(extensionMono) val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos) @@ -245,14 +145,14 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { .changeOwner(origMeth, extensionMeth) new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree) } - val castBody = - if (extensionBody.tpe <:< extensionMono.finalResultType) - extensionBody - else - gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // scala/bug#7818 e.g. mismatched existential skolems + + val resultType = extensionMono.finalResultType + val castBody = // scala/bug#7818 e.g. mismatched existential skolems + if (extensionBody.tpe <:< resultType) extensionBody + else gen.mkCastPreservingAnnotations(extensionBody, resultType) // Record the extension method. Later, in `Extender#transformStats`, these will be added to the companion object. - extensionDefs(companion) += DefDef(extensionMeth, castBody) + extensionDefs(companion) += newDefDef(extensionMeth, castBody)(tpt = TypeTree(resultType)) // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this) // which leaves the actual argument application for extensionCall. diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 32446dc4f710..59821f87ee03 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,8 @@ package scala.tools.nsc package transform -import scala.annotation.tailrec +import scala.annotation._ +import scala.reflect.internal.util.ListOfNil import symtab.Flags._ @@ -85,7 +86,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor /** the following two members override abstract members in Transform */ val phaseName: String = "fields" - protected def newTransformer(unit: CompilationUnit): Transformer = new FieldsTransformer(unit) + protected def newTransformer(unit: CompilationUnit): AstTransformer = new FieldsTransformer(unit) override def transformInfo(sym: Symbol, tp: Type): Type = if (sym.isJavaDefined || sym.isPackageClass || !sym.isClass) tp else synthFieldsAndAccessors(tp) @@ -118,16 +119,21 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } // TODO: add MIXEDIN (see e.g., `accessed` on `Symbol`) - private def setMixedinAccessorFlags(orig: Symbol, cloneInSubclass: Symbol): Unit = + private def setMixedinAccessorFlags(@unused orig: Symbol, cloneInSubclass: Symbol): Unit = cloneInSubclass setFlag OVERRIDE | NEEDS_TREES resetFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS - private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit = - fieldInSubclass setFlag (NEEDS_TREES | - PrivateLocal - | (accessor getFlag MUTABLE | LAZY | DEFAULTINIT) - | (if (accessor hasFlag STABLE) 0 else MUTABLE) - ) - + private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit = { + // Since initialization is performed (lexically) outside of the constructor (in the trait setter), + // we have to make the field mutable starting with classfile format 53 + // (it was never allowed, but the verifier enforces this now). + fieldInSubclass.setFlag(NEEDS_TREES | PrivateLocal | MUTABLE | accessor.getFlag(LAZY | DEFAULTINIT)) + if (accessor.hasFlag(STABLE)) { + // If the field is for an immutable val, make sure it's safely published + val isInStaticModule = fieldInSubclass.owner.isModuleClass && fieldInSubclass.owner.sourceModule.isStaticModule + if (!isInStaticModule) // the lock is enough. + fieldInSubclass.owner.primaryConstructor.updateAttachment(ConstructorNeedsFence) + } + } def checkAndClearOverriddenTraitSetter(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter) def checkAndClearNeedsTrees(setter: Symbol) = checkAndClear(NEEDS_TREES)(setter) @@ -164,7 +170,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // Note that a strict unit-typed val does receive a field, because we cannot omit the write to the field // (well, we could emit it for non-@volatile ones, if I understand the memory model correctly, // but that seems pretty edge-casey) - val constantTyped = tp.isInstanceOf[ConstantType] + val constantTyped = tp.isInstanceOf[FoldableConstantType] } private def fieldTypeForGetterIn(getter: Symbol, pre: Type): Type = getter.info.finalResultType.asSeenFrom(pre, getter.owner) @@ -186,8 +192,8 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // (in traits, getters must also hold annotations that target the underlying field, // because the latter won't be created until the trait is mixed into a class) // TODO do bean getters need special treatment to suppress field-targeting annotations in traits? - def dropFieldAnnotationsFromGetter(sym: Symbol) = - sym setAnnotations (sym.annotations filter AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false)) + def dropFieldAnnotationsFromGetter(sym: Symbol): Unit = + sym.setAnnotations(sym.annotations.filter(AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false))) def symbolAnnotationsTargetFieldAndGetter(sym: Symbol): Boolean = sym.isGetter && (sym.isLazy || sym.owner.isTrait) @@ -204,7 +210,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // can't use the referenced field since it already tracks the module's moduleClass - private[this] val moduleOrLazyVarOf = perRunCaches.newMap[Symbol, Symbol] + private[this] val moduleOrLazyVarOf = perRunCaches.newMap[Symbol, Symbol]() // TODO: can we drop FINAL? In any case, since these variables are MUTABLE, they cannot and will // not be emitted as ACC_FINAL. They are FINAL in the Scala sense, though: cannot be overridden. @@ -219,25 +225,25 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor def needsInit = Apply(Select(moduleVarRef, Object_eq), List(CODE.NULL)) val init = Assign(moduleVarRef, gen.newModule(module, moduleVar.info)) - /** double-checked locking following https://shipilev.net/blog/2014/safe-public-construction/#_safe_publication - * - * public class SafeDCLFactory { - * private volatile Singleton instance; - * - * public Singleton get() { - * if (instance == null) { // check 1 - * synchronized(this) { - * if (instance == null) { // check 2 - * instance = new Singleton(); - * } - * } - * } - * return instance; - * } - * } - * - * TODO: optimize using local variable? - */ + /* double-checked locking following https://shipilev.net/blog/2014/safe-public-construction/#_safe_publication + * + * public class SafeDCLFactory { + * private volatile Singleton instance; + * + * public Singleton get() { + * if (instance == null) { // check 1 + * synchronized(this) { + * if (instance == null) { // check 2 + * instance = new Singleton(); + * } + * } + * } + * return instance; + * } + * } + * + * TODO: optimize using local variable? + */ val computeName = nme.newLazyValSlowComputeName(module.name) val computeMethod = DefDef(NoMods, computeName, Nil, ListOfNil, TypeTree(UnitTpe), gen.mkSynchronized(monitorHolder)(If(needsInit, init, EmptyTree))) Block(computeMethod :: If(needsInit, Apply(Ident(computeName), Nil), EmptyTree) :: Nil, @@ -303,10 +309,10 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor private def newSuperLazy(lazyCallingSuper: Symbol, site: Type, lazyVar: Symbol) = { lazyCallingSuper.asTerm.referenced = lazyVar - val tp = site.memberInfo(lazyCallingSuper) + val tp = resultTypeMemberOfDeconst(site, lazyCallingSuper) - lazyVar setInfo tp.resultType - lazyCallingSuper setInfo tp + lazyVar setInfo tp + lazyCallingSuper setInfo MethodType(Nil, tp) } private def classNeedsInfoTransform(cls: Symbol): Boolean = { @@ -353,7 +359,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor if ((member hasFlag STABLE) && !(member hasFlag LAZY)) newDecls += newTraitSetter(member, clazz) } - } else if (member hasFlag MODULE) { + } else if (member.hasFlag(MODULE)) { nonStaticModuleToMethod(member) member setFlag NEEDS_TREES @@ -361,15 +367,15 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } } - if (newDecls nonEmpty) { + if (newDecls.nonEmpty) { val allDecls = newScope - origDecls foreach allDecls.enter - newDecls foreach allDecls.enter + origDecls.foreach(allDecls.enter(_)) + newDecls .foreach(allDecls.enter(_)) ClassInfoType(parents, allDecls, clazz) } else tp - case tp@ClassInfoType(parents, oldDecls, clazz) if !classNeedsInfoTransform(clazz) => tp + case tp@ClassInfoType(_, _, clazz) if !classNeedsInfoTransform(clazz) => tp // mix in fields & accessors for all mixed in traits case tp@ClassInfoType(parents, oldDecls, clazz) => @@ -387,7 +393,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor def newModuleVarMember(module: Symbol): TermSymbol = { val moduleVar = (clazz.newVariable(nme.moduleVarName(module.name.toTermName), module.pos.focus, MODULEVAR | ModuleOrLazyFieldFlags) - setInfo site.memberType(module).resultType + setInfo resultTypeMemberOfDeconst(site, module) addAnnotation VolatileAttr) if (module.hasAnnotation(TransientAttr)) moduleVar.addAnnotation(TransientAttr) @@ -398,7 +404,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } def newLazyVarMember(member: Symbol): TermSymbol = - Fields.this.newLazyVarMember(clazz, member, site.memberType(member).resultType) + Fields.this.newLazyVarMember(clazz, member, resultTypeMemberOfDeconst(site, member)) // a module does not need treatment here if it's static, unless it has a matching member in a superclass // a non-static method needs a module var @@ -431,12 +437,12 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // println(s"expanded modules for $clazz: $expandedModules") // afterOwnPhase, so traits receive trait setters for vals (needs to be at finest grain to avoid looping) - val synthInSubclass = - clazz.mixinClasses.flatMap(mixin => afterOwnPhase{mixin.info}.decls.toList.filter(accessorImplementedInSubclass)) + val synthInSubclass: List[Symbol] = + clazz.mixinClasses.flatMap(mixin => afterOwnPhase(mixin.info).decls.toList.filter(accessorImplementedInSubclass)) // mixin field accessors -- // invariant: (accessorsMaybeNeedingImpl, mixedInAccessorAndFields).zipped.forall(case (acc, clone :: _) => `clone` is clone of `acc` case _ => true) - val mixedInAccessorAndFields = synthInSubclass.map{ member => + val mixedInAccessorAndFields: List[List[Symbol]] = synthInSubclass.map{ member => def cloneAccessor() = { val clonedAccessor = (member cloneSymbol clazz) setPos clazz.pos setMixedinAccessorFlags(member, clonedAccessor) @@ -445,7 +451,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor if (symbolAnnotationsTargetFieldAndGetter(member)) // this simplifies to member.isGetter, but the full formulation really ties the triage together dropFieldAnnotationsFromGetter(clonedAccessor) - // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambalift proxy crash + // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambdalift proxy crash // TODO: use derive symbol variant? // println(s"cloning accessor $member to $clazz") // start at uncurry so that we preserve that part of the history where an accessor has a NullaryMethodType @@ -500,7 +506,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val newDecls = // under -Xcheckinit we generate all kinds of bitmaps, even when there are no lazy vals - if (expandedModulesAndLazyVals.isEmpty && mixedInAccessorAndFields.isEmpty && !settings.checkInit) + if (expandedModulesAndLazyVals.isEmpty && mixedInAccessorAndFields.isEmpty && !settings.checkInit.value) oldDecls.filterNot(omittableField) else { // must not alter `decls` directly @@ -530,6 +536,12 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } + // Deconst the result type of the getter (not relevant for modules, but we'll just be consistent), + // since we're after typers -- it's between unnecessary and wrong to keep constant types for result types of methods + // constant folding has already happened during typer. We can keep literal types around until erasure, + // but we shouldn't assume expressions of these types to be pure/constant foldable. (See pos/t10768.scala) + private def resultTypeMemberOfDeconst(site: Type, acc: Symbol) = site.memberType(acc).resultType.deconst + // done by uncurry's info transformer // instead of forcing every member's info to run said transformer, duplicate the flag update logic... def nonStaticModuleToMethod(module: Symbol): Unit = @@ -563,7 +575,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor * Desugar a local `lazy val x: Int = rhs` * or a local `object x { ...}` (the rhs will be instantiating the module's class) into: * - * ``` + * {{{ * val x\$lzy = new scala.runtime.LazyInt() * def x\$lzycompute(): Int = * x\$lzy.synchronized { @@ -571,7 +583,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor * else x\$lzy.initialize(rhs) // for a Unit-typed lazy val, this becomes `{ rhs ; x\$lzy.initialize() }` to avoid passing around BoxedUnit * } * def x(): Int = if (x\$lzy.initialized()) x\$lzy.value() else x\$lzycompute() - * ``` + * }}} * * The expansion is the same for local lazy vals and local objects, * except for the suffix of the underlying val's name (\$lzy or \$module) @@ -581,7 +593,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor import scala.reflect.{NameTransformer => nx} val owner = lazySym.owner - val lazyValType = lazySym.tpe.resultType + val lazyValType = lazySym.info.resultType.deconst val refClass = lazyHolders.getOrElse(lazyValType.typeSymbol, LazyRefClass) val isUnit = refClass == LazyUnitClass val refTpe = if (refClass != LazyRefClass) refClass.tpe else appliedType(refClass.typeConstructor, List(lazyValType)) @@ -602,9 +614,15 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // LazyUnit does not have a `value` member val valueSym = if (isUnit) NoSymbol else refTpe.member(nme.value) - def initialized = Select(Ident(holderSym), initializedSym) + def refineLiteral(tree: Tree): Tree = + lazyValType match { + case _: ConstantType => gen.mkAsInstanceOf(tree, lazyValType) + case _ => tree + } + + def initialized = Apply(Select(Ident(holderSym), initializedSym), Nil) def initialize = Select(Ident(holderSym), initializeSym) - def getValue = if (isUnit) UNIT else Apply(Select(Ident(holderSym), valueSym), Nil) + def getValue = if (isUnit) UNIT else refineLiteral(Apply(Select(Ident(holderSym), valueSym), Nil)) val computerSym = owner.newMethod(lazyName append nme.LAZY_SLOW_SUFFIX, pos, ARTIFACT | PRIVATE) setInfo MethodType(Nil, lazyValType) @@ -614,11 +632,12 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val computer = mkAccessor(computerSym)(gen.mkSynchronized(Ident(holderSym))( If(initialized, getValue, if (isUnit) Block(rhsAtComputer :: Nil, Apply(initialize, Nil)) - else Apply(initialize, rhsAtComputer :: Nil)))) + else refineLiteral(Apply(initialize, rhsAtComputer :: Nil))))) val accessor = mkAccessor(lazySym)( - If(initialized, getValue, - Apply(Ident(computerSym), Nil))) + refineLiteral( + If(initialized, getValue, + Apply(Ident(computerSym), Nil)))) // do last! // remove STABLE: prevent replacing accessor call of type Unit by BoxedUnit.UNIT in erasure @@ -677,7 +696,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val synthAccessorInClass = new SynthLazyAccessorsIn(clazz) def superLazy(getter: Symbol): Tree = { - assert(!clazz.isTrait) + assert(!clazz.isTrait, clazz) // this contortion was the only way I can get the super select to be type checked correctly.. // TODO: why does SelectSuper not work? val selectSuper = Select(Super(This(clazz), tpnme.EMPTY), getter.name) @@ -688,14 +707,14 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor synthAccessorInClass.expandLazyClassMember(lazyVar, getter, rhs) } - (afterOwnPhase { clazz.info.decls } toList) filter checkAndClearNeedsTrees map { + afterOwnPhase(clazz.info.decls).toList.filter(checkAndClearNeedsTrees).map { case module if module hasAllFlags (MODULE | METHOD) => moduleAccessorBody(module) case getter if getter hasAllFlags (LAZY | METHOD) => superLazy(getter) case setter if setter.isSetter => setterBody(setter) case getter if getter.hasFlag(ACCESSOR) => getterBody(getter) case field if !(field hasFlag METHOD) => mkTypedValDef(field) // vals/vars and module vars (cannot have flags PACKAGE | JAVA since those never receive NEEDS_TREES) case _ => EmptyTree - } filterNot (_ == EmptyTree) // there will likely be many EmptyTrees, but perhaps no thicket blocks that need expanding + }.filterNot(_ == EmptyTree) // there will likely be many EmptyTrees, but perhaps no thicket blocks that need expanding } def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree = @@ -754,7 +773,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val cd = super.transform(ClassDef(statSym.moduleClass, impl) setType NoType) if (currOwner.isClass) cd else { // local module -- symbols cannot be generated by info transformer, so do it all here - val Block(stats, _) = mkLazyLocalDef(statSym, gen.newModule(statSym, statSym.info.resultType)) + val Block(stats, _) = mkLazyLocalDef(statSym, gen.newModule(statSym, statSym.info.resultType)): @unchecked Thicket(cd :: stats) } diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index ec1a9861162b..01cf08418f98 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,14 +39,14 @@ abstract class Flatten extends InfoTransform { if (old.nonEmpty) debuglog(s"In scope of ${sym.owner}, unlinked $old_s") } - private def liftClass(sym: Symbol) { + private def liftClass(sym: Symbol): Unit = { if (!sym.isLifted) { sym setFlag LIFTED debuglog("re-enter " + sym.fullLocationString) replaceSymbolInCurrentScope(sym) } } - private def liftSymbol(sym: Symbol) { + private def liftSymbol(sym: Symbol): Unit = { liftClass(sym) } // This is a short-term measure partially working around objects being @@ -116,9 +116,9 @@ abstract class Flatten extends InfoTransform { def transformInfo(sym: Symbol, tp: Type): Type = flattened(tp) - protected def newTransformer(unit: CompilationUnit): Transformer = new Flattener + protected def newTransformer(unit: CompilationUnit): AstTransformer = new Flattener - class Flattener extends Transformer { + class Flattener extends AstTransformer { /** Buffers for lifted out classes */ private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]() @@ -126,10 +126,10 @@ abstract class Flatten extends InfoTransform { tree match { case PackageDef(_, _) => liftedDefs(tree.symbol.moduleClass) = new ListBuffer - super.transform(tree) + tree.transform(this) case Template(_, _, _) if tree.symbol.isDefinedInPackage => liftedDefs(tree.symbol.owner) = new ListBuffer - super.transform(tree) + tree.transform(this) case ClassDef(_, _, _, _) if tree.symbol.isNestedClass => // scala/bug#5508 Ordering important. In `object O { trait A { trait B } }`, we want `B` to appear after `A` in // the sequence of lifted trees in the enclosing package. Why does this matter? Currently, mixin @@ -142,12 +142,12 @@ abstract class Flatten extends InfoTransform { // - move the accessor creation to the Mixin info transformer val liftedBuffer = liftedDefs(tree.symbol.enclosingTopLevelClass.owner) val index = liftedBuffer.length - liftedBuffer.insert(index, super.transform(tree)) + liftedBuffer.insert(index, tree.transform(this)) if (tree.symbol.sourceModule.isStaticModule) removeSymbolInCurrentScope(tree.symbol.sourceModule) EmptyTree case _ => - super.transform(tree) + tree.transform(this) } } diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala index 66ad8f319ecd..f38a645ce56a 100644 --- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,8 @@ package scala.tools.nsc package transform +import scala.annotation.nowarn + /** * An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent. * The symbol info is transformed assuming it is consistent right before this phase. @@ -29,11 +31,16 @@ trait InfoTransform extends Transform { def transformInfo(sym: Symbol, tpe: Type): Type override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = - new Phase(prev) + new InfoPhase(prev) protected def changesBaseClasses = true protected def keepsTypeParams = true + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.transform\.InfoTransform\.Phase""") + final type InfoPhase = Phase + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use InfoPhase instead", since = "2.13.4") class Phase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) { override val keepsTypeParams = InfoTransform.this.keepsTypeParams diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 0de2c369b8ab..cf5266fd953e 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,10 @@ package transform import symtab._ import Flags._ +import scala.annotation.tailrec import scala.collection.mutable -import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet } +import scala.collection.mutable.{LinkedHashMap, LinkedHashSet} +import scala.tools.nsc.Reporting.WarningCategory.LintPerformance abstract class LambdaLift extends InfoTransform { import global._ @@ -52,7 +54,7 @@ abstract class LambdaLift extends InfoTransform { if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true) else lifted(tp) - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new LambdaLifter(unit) class LambdaLifter(unit: CompilationUnit) extends explicitOuter.OuterPathTransformer(unit) { @@ -122,6 +124,7 @@ abstract class LambdaLift extends InfoTransform { * `logicallyEnclosingMember` in this case to return a temporary symbol corresponding to that * method. */ + @tailrec private def logicallyEnclosingMember(sym: Symbol): Symbol = { if (sym.isLocalDummy) { val enclClass = sym.enclClass @@ -186,15 +189,15 @@ abstract class LambdaLift extends InfoTransform { } } - private def markCalled(sym: Symbol, owner: Symbol) { + private def markCalled(sym: Symbol, owner: Symbol): Unit = { // println(s"mark called: $sym of ${sym.owner} is called by $owner") symSet(called, owner) += sym if (sym.enclClass != owner.enclClass) calledFromInner += sym } /** The traverse function */ - private val freeVarTraverser = new Traverser { - override def traverse(tree: Tree) { + private val freeVarTraverser = new InternalTraverser { + override def traverse(tree: Tree): Unit = { // try { //debug val sym = tree.symbol tree match { @@ -212,7 +215,7 @@ abstract class LambdaLift extends InfoTransform { } case Ident(name) => if (sym == NoSymbol) { - assert(name == nme.WILDCARD) + assert(name == nme.WILDCARD, name) } else if (sym.isLocalToBlock) { val owner = logicallyEnclosingMember(currentOwner) if (sym.isTerm && !sym.isMethod) markFree(sym, owner) @@ -224,7 +227,7 @@ abstract class LambdaLift extends InfoTransform { markCalled(sym, logicallyEnclosingMember(currentOwner)) case _ => } - super.traverse(tree) + tree.traverse(this) // } catch {//debug // case ex: Throwable => // Console.println(s"$ex while traversing $tree") @@ -238,7 +241,7 @@ abstract class LambdaLift extends InfoTransform { * value/variable/let that are free in some function or class, and to * all class/function symbols that are owned by some function. */ - private def computeFreeVars() { + private def computeFreeVars(): Unit = { freeVarTraverser.traverse(unit.body) do { @@ -247,7 +250,7 @@ abstract class LambdaLift extends InfoTransform { markFree(fv, caller) } while (changedFreeVars) - def renameSym(sym: Symbol) { + def renameSym(sym: Symbol): Unit = { val originalName = sym.name sym setName newName(sym) debuglog("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name)) @@ -261,9 +264,9 @@ abstract class LambdaLift extends InfoTransform { val join = nme.NAME_JOIN_STRING if (sym.isAnonymousFunction && sym.owner.isMethod) { - freshen(sym.name + join + nme.ensureNonAnon(sym.owner.name.toString) + join) + freshen("" + sym.name + join + nme.ensureNonAnon(sym.owner.name.toString) + join) } else { - val name = freshen(sym.name + join) + val name = freshen(s"${sym.name}${join}") // scala/bug#5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?) // Generating a unique name, mangled with the enclosing full class name (including // package - subclass might have the same name), avoids a VerifyError in the case @@ -289,10 +292,14 @@ abstract class LambdaLift extends InfoTransform { proxies(owner) = for (fv <- freeValues.toList) yield { val proxyName = proxyNames.getOrElse(fv, fv.name) - debuglog(s"new proxy ${proxyName} in ${owner.fullLocationString}") + debuglog(s"new proxy $proxyName in ${owner.fullLocationString}") val proxy = if (owner.isTrait) { - val accessorFlags = newFlags.toLong | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS + val accessorFlags = newFlags | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS + // scala-dev#408: fields for locals captured in a trait are non-final (created in mixin), as they are + // assigned in a trait setter. For safe publication, subclass constructors need to call releaseFence. + // We need to add the attachment in lambdalift, as mixin runs after the constructors phase. + owner.children.foreach(_.primaryConstructor.updateAttachment(ConstructorNeedsFence)) // TODO do we need to preserve pre-erasure info for the accessors (and a NullaryMethodType for the getter)? // can't have a field in the trait, so add a setter @@ -302,7 +309,7 @@ abstract class LambdaLift extends InfoTransform { // the getter serves as the proxy -- entered below owner.newMethod(proxyName.getterName, fv.pos, accessorFlags | STABLE) setInfo MethodType(Nil, fv.info) } else - owner.newValue(proxyName.toTermName, fv.pos, newFlags.toLong | PrivateLocal) setInfo fv.info + owner.newValue(proxyName.toTermName, fv.pos, newFlags | PrivateLocal) setInfo fv.info if (owner.isClass) owner.info.decls enter proxy proxy @@ -377,7 +384,7 @@ abstract class LambdaLift extends InfoTransform { } } - def freeArgsOrNil(sym: Symbol) = free.getOrElse(sym, Nil).toList + def freeArgsOrNil(sym: Symbol) = free.getOrElse[Iterable[Symbol]](sym, Nil).toList private def freeArgs(sym: Symbol): List[Symbol] = freeArgsOrNil(sym) @@ -486,25 +493,23 @@ abstract class LambdaLift extends InfoTransform { if (sym.isLocalToBlock) liftDef(withFreeParams) else withFreeParams - case ValDef(mods, name, tpt, rhs) => - if (sym.isCapturedVariable) { - val tpt1 = TypeTree(sym.tpe) setPos tpt.pos - - val refTypeSym = sym.tpe.typeSymbol - - val factoryCall = typer.typedPos(rhs.pos) { - rhs match { - case EmptyTree => - val zeroMSym = refZeroMethod(refTypeSym) - gen.mkMethodCall(zeroMSym, Nil) - case arg => - val createMSym = refCreateMethod(refTypeSym) - gen.mkMethodCall(createMSym, arg :: Nil) - } + case ValDef(mods, name, tpt, rhs) if sym.isCapturedVariable => + val tpt1 = TypeTree(sym.tpe) setPos tpt.pos + val refTypeSym = sym.tpe.typeSymbol + val factoryCall = typer.typedPos(rhs.pos) { + rhs match { + case EmptyTree => + val zeroMSym = refZeroMethod(refTypeSym) + gen.mkMethodCall(zeroMSym, Nil) + case arg => + val createMSym = refCreateMethod(refTypeSym) + gen.mkMethodCall(createMSym, arg :: Nil) } - - treeCopy.ValDef(tree, mods, name, tpt1, factoryCall) - } else tree + } + if (settings.warnCaptured) + runReporting.warning(tree.pos, s"Modification of variable $name within a closure causes it to be boxed.", LintPerformance, sym) + treeCopy.ValDef(tree, mods, name, tpt1, factoryCall) + case ValDef(_, _, _, _) => tree case Return(expr) => assert(sym == currentMethod, sym) tree @@ -513,7 +518,7 @@ abstract class LambdaLift extends InfoTransform { case Assign(Apply(TypeApply(sel @ Select(qual, _), _), List()), rhs) => // eliminate casts introduced by selecting a captured variable field // on the lhs of an assignment. - assert(sel.symbol == Object_asInstanceOf) + assert(sel.symbol == Object_asInstanceOf, "asInstanceOf") treeCopy.Assign(tree, qual, rhs) case Ident(name) => val tree1 = @@ -532,7 +537,7 @@ abstract class LambdaLift extends InfoTransform { } else tree1 case Block(stats, expr0) => - val (lzyVals, rest) = stats partition { + val (lzyVals, rest) = partitionConserve(stats) { case stat: ValDef => stat.symbol.isLazy || stat.symbol.isModuleVar case _ => false } @@ -559,7 +564,7 @@ abstract class LambdaLift extends InfoTransform { def addLifted(stat: Tree): Tree = stat match { case ClassDef(_, _, _, _) => val lifted = liftedDefs remove stat.symbol match { - case Some(xs) => xs reverseMap addLifted + case Some(xs) => xs.reverseIterator.map(addLifted).toList case _ => log("unexpectedly no lifted defs for " + stat.symbol) ; Nil } deriveClassDef(stat)(impl => deriveTemplate(impl)(_ ::: lifted)) @@ -572,7 +577,7 @@ abstract class LambdaLift extends InfoTransform { super.transformStats(stats, exprOwner) map addLifted } - override def transformUnit(unit: CompilationUnit) { + override def transformUnit(unit: CompilationUnit): Unit = { computeFreeVars() afterOwnPhase { super.transformUnit(unit) @@ -584,7 +589,7 @@ abstract class LambdaLift extends InfoTransform { private def addFree[A](sym: Symbol, free: List[A], original: List[A]): List[A] = { val prependFree = ( !sym.isConstructor // this condition is redundant for now. It will be needed if we remove the second condition in 2.12.x - && (settings.Ydelambdafy.value != "inline" && sym.isDelambdafyTarget) // scala/bug#8359 Makes the lambda body a viable as the target MethodHandle for a call to LambdaMetafactory + && (settings.Ydelambdafy.value == "method" && sym.isDelambdafyTarget) // scala/bug#8359 Makes the lambda body a viable as the target MethodHandle for a call to LambdaMetafactory ) if (prependFree) free ::: original else original ::: free diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 4f3eed01a0d3..447e125eb9a4 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,10 +17,11 @@ import symtab._ import Flags._ import scala.annotation.tailrec import scala.collection.mutable +import scala.reflect.NameTransformer import scala.reflect.internal.util.ListOfNil -abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthesis { +abstract class Mixin extends Transform with ast.TreeDSL with AccessorSynthesis { import global._ import definitions._ import CODE._ @@ -32,7 +33,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes /** Some trait methods need to be implemented in subclasses, so they cannot be private. * * We used to publicize during explicitouter (for some reason), so the condition is a bit more involved now it's done here - * (need to exclude lambdaLIFTED methods, as they do no exist during explicitouter and thus did not need to be excluded...) + * (need to exclude lambdaLIFTED methods, as they do not exist during explicitouter and thus did not need to be excluded...) * * They may be protected, now that traits are compiled 1:1 to interfaces. * The same disclaimers about mapping Scala's notion of visibility to Java's apply: @@ -99,24 +100,41 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes /** Returns the symbol that is accessed by a super-accessor in a mixin composition. * * @param base The class in which everything is mixed together - * @param member The symbol statically referred to by the superaccessor in the trait + * @param acc The symbol statically referred to by the superaccessor in the trait * @param mixinClass The mixin class that produced the superaccessor */ - private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol = + private def rebindSuper(base: Symbol, acc: Symbol, mixinClass: Symbol): Symbol = exitingSpecialize { - var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail + val site = base.thisType + // the specialized version T$sp of a trait T will have a super accessor that has the same alias + // as the super accessor in trait T; we must rebind super + // from the vantage point of the original trait T, not the specialized T$sp + // (it's inserted in the base class seq late in the game and doesn't count as a super class in the super-call scheme) + val superTargetClass = if (mixinClass.isSpecialized) unspecializedSymbol(mixinClass) else mixinClass + var bcs = base.info.baseClasses.dropWhile(superTargetClass != _).tail var sym: Symbol = NoSymbol - debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe + - " " + mixinClass + " " + base.info.baseClasses + "/" + bcs) - while (!bcs.isEmpty && sym == NoSymbol) { - if (settings.isDebug) { - val other = bcs.head.info.nonPrivateDecl(member.name) - debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe + - " " + other.isDeferred) - } - sym = member.matchingSymbol(bcs.head, base.thisType).suchThat(sym => !sym.hasFlag(DEFERRED | BRIDGE)) + + // println(s"starting rebindsuper $base mixing in from $mixinClass: $acc : ${acc.tpe} of ${acc.owner} ; looking for super in $bcs (all bases: ${base.info.baseClasses})") + + // don't rebind to specialized members unless we're looking for the super of a specialized member, + // since we can't jump back and forth between the unspecialized name and specialized one + // (So we jump into the non-specialized world and stay there until we hit our super.) + val likeSpecialized = if (acc.isSpecialized) 0 else SPECIALIZED + + while (sym == NoSymbol && bcs.nonEmpty) { + sym = acc.matchingSymbol(bcs.head, site).suchThat(sym => !sym.hasFlag(DEFERRED | BRIDGE | likeSpecialized)) bcs = bcs.tail } + + // println(s"rebound $base from $mixinClass to $sym in ${sym.owner} ($bcs)") + + // Having a matching symbol is not enough: its info should also be a subtype + // of the superaccessor's type, see test/files/run/t11351.scala + if ((sym ne acc) && sym.exists && !(sym.isErroneous || (site.memberInfo(sym) <:< site.memberInfo(acc)))) + reporter.error(base.pos, s"illegal trait super target found for $acc required by $mixinClass;" + + s"\n found : ${exitingTyper{sym.defStringSeenAs(site.memberInfo(sym))}} in ${sym.owner};" + + s"\n expected: ${exitingTyper{acc.defStringSeenAs(site.memberInfo(acc))}} in ${acc.owner}") + sym } @@ -174,12 +192,38 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes // Optimize: no need if mixinClass has no typeparams. // !!! JZ Really? What about the effect of abstract types, prefix? if (mixinClass.typeParams.isEmpty) sym - else sym modifyInfo (_ => forwarderInfo) + else { + sym modifyInfo (_ => forwarderInfo) + avoidTypeParamShadowing(mixinMember, sym) + sym + } } newSym } - def publicizeTraitMethods(clazz: Symbol) { + // scala/bug#11523 rename method type parameters that shadow enclosing class type parameters in the host class + // of the mixin forwarder + private def avoidTypeParamShadowing(mixinMember: Symbol, forwarder: Symbol): Unit = { + def isForwarderTparam(sym: Symbol) = { + val owner = sym.owner + // TODO fix forwarder's info should not refer to tparams of mixinMember, fix cloning in caller! + // try forwarderInfo.cloneInfo(sym) + owner == forwarder || owner == mixinMember + } + + val symTparams: mutable.Map[Name, Symbol] = mutable.Map.from(forwarder.typeParams.iterator.map(t => (t.name, t))) + forwarder.info.foreach { + case TypeRef(_, tparam, _) if tparam.isTypeParameter && !isForwarderTparam(tparam) => + symTparams.get(tparam.name).foreach{ symTparam => + debuglog(s"Renaming ${symTparam} (owned by ${symTparam.owner}, a mixin forwarder hosted in ${forwarder.enclClass.fullNameString}) to avoid shadowing enclosing type parameter of ${tparam.owner.fullNameString})") + symTparam.name = symTparam.name.append(NameTransformer.NAME_JOIN_STRING) + symTparams.remove(tparam.name) // only rename once + } + case _ => + } + } + + def publicizeTraitMethods(clazz: Symbol): Unit = { if (treatedClassInfos(clazz) != clazz.info) { treatedClassInfos(clazz) = clazz.info assert(phase == currentRun.mixinPhase, phase) @@ -188,8 +232,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes if (member.isMethod) publicizeTraitMethod(member) else { assert(member.isTerm && !member.isDeferred, member) - // disable assert to support compiling against code compiled by an older compiler (until we re-starr) - // assert(member hasFlag PRESUPER, s"unexpected $member in $clazz ${member.debugFlagString}") + assert(member hasFlag PRESUPER, s"unexpected $member in $clazz ${member.debugFlagString}") clazz.info.decls.unlink(member) } @@ -198,6 +241,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes } } + def registerRequiredDirectInterface(alias: Symbol, clazz: Symbol, msg: Type => String): Unit = { + val owner = alias.owner + if (owner.isJavaDefined && owner.isInterface) { + if (!clazz.parentSymbolsIterator.contains(owner)) { + val suggestedParent = exitingTyper(clazz.info.baseType(owner)) + reporter.error(clazz.pos, msg(suggestedParent)) + } else + erasure.requiredDirectInterfaces.getOrElseUpdate(clazz, mutable.Set.empty) += owner + } + } + /** Add all members to be mixed in into a (non-trait-) class * These are: * for every mixin trait T that is not also inherited by the superclass: @@ -209,7 +263,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes * - for every super accessor in T, add an implementation of that accessor * - for every module in T, add a module */ - def addMixedinMembers(clazz: Symbol, unit: CompilationUnit) { + def addMixedinMembers(clazz: Symbol, unit: CompilationUnit): Unit = { def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = ( cloneAndAddMember(mixinClass, mixinMember, clazz) setPos clazz.pos @@ -217,84 +271,79 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes ) /* Mix in members of implementation class mixinClass into class clazz */ - def mixinTraitForwarders(mixinClass: Symbol) { - for (member <- mixinClass.info.decls ; if isImplementedStatically(member)) { - member overridingSymbol clazz match { - case NoSymbol => - val isMemberOfClazz = clazz.info.findMember(member.name, 0, 0L, stableOnly = false).alternatives.contains(member) - if (isMemberOfClazz) { - def genForwarder(required: Boolean): Unit = { - val owner = member.owner - val isJavaInterface = owner.isJavaDefined && owner.isInterface - if (isJavaInterface && !clazz.parentSymbolsIterator.contains(owner)) { - if (required) { - val text = s"Unable to implement a mixin forwarder for $member in $clazz unless interface ${owner.name} is directly extended by $clazz." - reporter.error(clazz.pos, text) - } - } else { - if (isJavaInterface) - erasure.requiredDirectInterfaces.getOrElseUpdate(clazz, mutable.Set.empty) += owner - cloneAndAddMixinMember(mixinClass, member).asInstanceOf[TermSymbol] setAlias member - } + def mixinTraitForwarders(mixinClass: Symbol): Unit = { + def isMemberOfClass(member: Symbol): Boolean = + clazz.info.findMember(member.name, 0, 0L, stableOnly = false).alternatives.contains(member) + for (member <- mixinClass.info.decls) + if (isImplementedStatically(member) && member.overridingSymbol(clazz) == NoSymbol && isMemberOfClass(member)) { + def genForwarder(required: Boolean): Unit = { + val owner = member.owner + val isJavaInterface = owner.isJavaDefined && owner.isInterface + if (isJavaInterface && !clazz.parentSymbolsIterator.contains(owner)) { + if (required) { + val text = s"Unable to implement a mixin forwarder for $member in $clazz unless interface ${owner.name} is directly extended by $clazz." + reporter.error(clazz.pos, text) } + } else { + if (isJavaInterface) + erasure.requiredDirectInterfaces.getOrElseUpdate(clazz, mutable.Set.empty) += owner + cloneAndAddMixinMember(mixinClass, member).asInstanceOf[TermSymbol] setAlias member + } + } - // `member` is a concrete method defined in `mixinClass`, which is a base class of - // `clazz`, and the method is not overridden in `clazz`. A forwarder is needed if: - // - // - A non-trait base class of `clazz` defines a matching method. Example: - // class C {def f: Int}; trait T extends C {def f = 1}; class D extends T - // Even if C.f is abstract, the forwarder in D is needed, otherwise the JVM would - // resolve `D.f` to `C.f`, see jvms-6.5.invokevirtual. - // - // - There exists another concrete, matching method in a parent interface `p` of - // `clazz`, and the `mixinClass` does not itself extend `p`. In this case the - // forwarder is needed to disambiguate. Example: - // trait T1 {def f = 1}; trait T2 extends T1 {override def f = 2}; class C extends T2 - // In C we don't need a forwarder for f because T2 extends T1, so the JVM resolves - // C.f to T2.f non-ambiguously. See jvms-5.4.3.3, "maximally-specific method". - // trait U1 {def f = 1}; trait U2 {self:U1 => override def f = 2}; class D extends U2 - // In D the forwarder is needed, the interfaces U1 and U2 are unrelated at the JVM - // level. - - @tailrec - def existsCompetingMethod(baseClasses: List[Symbol]): Boolean = baseClasses match { - case baseClass :: rest => - if (baseClass ne mixinClass) { - val m = member.overriddenSymbol(baseClass) - val isCompeting = m.exists && { - !m.owner.isTraitOrInterface || - (!m.isDeferred && !mixinClass.isNonBottomSubClass(m.owner)) - } - isCompeting || existsCompetingMethod(rest) - } else existsCompetingMethod(rest) - - case _ => false - } + // `member` is a concrete method defined in `mixinClass`, which is a base class of + // `clazz`, and the method is not overridden in `clazz`. A forwarder is needed if: + // + // - A non-trait base class of `clazz` defines a matching method. Example: + // class C {def f: Int}; trait T extends C {def f = 1}; class D extends T + // Even if C.f is abstract, the forwarder in D is needed, otherwise the JVM would + // resolve `D.f` to `C.f`, see jvms-6.5.invokevirtual. + // + // - There exists another concrete, matching method in a parent interface `p` of + // `clazz`, and the `mixinClass` does not itself extend `p`. In this case the + // forwarder is needed to disambiguate. Example: + // trait T1 {def f = 1}; trait T2 extends T1 {override def f = 2}; class C extends T2 + // In C we don't need a forwarder for f because T2 extends T1, so the JVM resolves + // C.f to T2.f non-ambiguously. See jvms-5.4.3.3, "maximally-specific method". + // trait U1 {def f = 1}; trait U2 {self:U1 => override def f = 2}; class D extends U2 + // In D the forwarder is needed, the interfaces U1 and U2 are unrelated at the JVM + // level. + + @tailrec + def existsCompetingMethod(baseClasses: List[Symbol]): Boolean = baseClasses match { + case baseClass :: rest => + if (baseClass ne mixinClass) { + val m = member.overriddenSymbol(baseClass) + val isCompeting = m.exists && { + !m.owner.isTraitOrInterface || + (!m.isDeferred && !mixinClass.isNonBottomSubClass(m.owner)) + } + isCompeting || existsCompetingMethod(rest) + } else existsCompetingMethod(rest) - def generateJUnitForwarder: Boolean = { - settings.mixinForwarderChoices.isAtLeastJunit && - member.annotations.nonEmpty && - JUnitAnnotations.exists(annot => annot.exists && member.hasAnnotation(annot)) - } + case _ => false + } - def generateSerializationForwarder: Boolean = { - (member.name == nme.readResolve || member.name == nme.writeReplace) && member.info.paramss == ListOfNil - } + def generateJUnitForwarder: Boolean = { + settings.mixinForwarderChoices.isAtLeastJunit && + member.annotations.nonEmpty && + JUnitAnnotations.exists(annot => annot.exists && member.hasAnnotation(annot)) + } - if (existsCompetingMethod(clazz.baseClasses) || generateJUnitForwarder || generateSerializationForwarder) - genForwarder(required = true) - else if (settings.mixinForwarderChoices.isTruthy) - genForwarder(required = false) - } + def generateSerializationForwarder: Boolean = { + (member.name == nme.readResolve || member.name == nme.writeReplace) && member.info.paramss == ListOfNil + } - case _ => + if (existsCompetingMethod(clazz.baseClasses) || generateJUnitForwarder || generateSerializationForwarder) + genForwarder(required = true) + else if (settings.mixinForwarderChoices.isTruthy) + genForwarder(required = false) } - } } /* Mix in members of trait mixinClass into class clazz. */ - def mixinTraitMembers(mixinClass: Symbol) { + def mixinTraitMembers(mixinClass: Symbol): Unit = { // For all members of a trait's interface do: for (mixinMember <- mixinClass.info.decls) { if (mixinMember.hasFlag(SUPERACCESSOR)) { // mixin super accessors @@ -306,13 +355,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes reporter.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format( mixinMember.alias, mixinClass)) case alias1 => - if (alias1.owner.isJavaDefined && alias1.owner.isInterface) { - if (!clazz.parentSymbolsIterator.contains(alias1.owner)) { - val suggestedParent = exitingTyper(clazz.info.baseType(alias1.owner)) - reporter.error(clazz.pos, s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $suggestedParent is directly extended by $clazz.") - } else - erasure.requiredDirectInterfaces.getOrElseUpdate(clazz, mutable.Set.empty) += alias1.owner - } + registerRequiredDirectInterface(alias1, clazz, parent => + s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $parent is directly extended by $clazz.") + if (alias1.isValue && !alias1.isMethod || alias1.isAccessor) + reporter.error(clazz.pos, s"parent $mixinClass has a super call to method ${mixinMember.alias.fullNameString}, which binds to the value ${alias1.fullNameString}. Super calls can only target methods.") superAccessor.asInstanceOf[TermSymbol] setAlias alias1 } } @@ -358,7 +404,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes // first complete the superclass with mixed in members addMixedinMembers(clazz.superClass, unit) - for (mc <- clazz.mixinClasses ; if mc.isTrait) { + for (mc <- clazz.mixinClasses if mc.isTrait) { // @SEAN: adding trait tracking so we don't have to recompile transitive closures unit.registerDependency(mc) publicizeTraitMethods(mc) @@ -367,14 +413,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes } } - override def transformInfo(sym: Symbol, tp: Type): Type = tp - // --------- term transformation ----------------------------------------------- - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new MixinTransformer(unit) - class MixinTransformer(unit : CompilationUnit) extends Transformer with AccessorTreeSynthesis { + class MixinTransformer(unit : CompilationUnit) extends AstTransformer with AccessorTreeSynthesis { /** The typer */ private var localTyper: erasure.Typer = _ protected def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree) @@ -383,7 +427,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes private val rootContext = erasure.NoContext.make(EmptyTree, rootMirror.RootClass, newScope) - private val nullables = mutable.AnyRefMap[Symbol, Map[Symbol, List[Symbol]]]() + private val nullables = mutable.HashMap[Symbol, Map[Symbol, List[Symbol]]]() /** The first transform; called in a pre-order traversal at phase mixin * (that is, every node is processed before its children). @@ -443,8 +487,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes val singleUseFields: Map[Symbol, List[Symbol]] = { val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil - object SingleUseTraverser extends Traverser { - override def traverse(tree: Tree) { + object SingleUseTraverser extends InternalTraverser { + override def traverse(tree: Tree): Unit = { tree match { // assignment targets don't count as a dereference -- only check the rhs case Assign(_, rhs) => traverse(rhs) @@ -458,8 +502,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes // println("added use in: " + currentOwner + " -- " + tree) usedIn(sym) ::= currentOwner } - super.traverse(tree) - case _ => super.traverse(tree) + tree.traverse(this) + case _ => tree.traverse(this) } } } @@ -482,7 +526,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes // invert the map to see which fields can be nulled for each non-transient lazy val for ((field, users) <- singleUseFields; lazyFld <- users) map(lazyFld) += field - map.mapValues(_.toList sortBy (_.id)).toMap + map.view.mapValues(_.toList.sortBy(_.id)).toMap } } @@ -541,7 +585,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes */ def completeSuperAccessor(stat: Tree) = stat match { case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor => - val body = atPos(stat.pos)(Apply(SuperSelect(clazz, stat.symbol.alias), vparams map (v => Ident(v.symbol)))) + val alias = stat.symbol.alias + debuglog(s"implementing super accessor in $clazz for ${stat.symbol} --> ${alias.owner} . ${alias}") + registerRequiredDirectInterface(alias, clazz, parent => + s"Unable to implement a super accessor, $parent needs to be directly extended by $clazz.") + val body = atPos(stat.pos)(Apply(SuperSelect(clazz, alias), vparams map (v => Ident(v.symbol)))) val pt = stat.symbol.tpe.resultType copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt)))) @@ -568,7 +616,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes val sym = tree.symbol tree match { - case templ @ Template(parents, self, body) => + case Template(parents, self, body) => // change parents of templates to conform to parents in the symbol info val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos) @@ -608,7 +656,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes */ override def transform(tree: Tree): Tree = { val saved = localTyper - val tree1 = super.transform(preTransform(tree)) + val tree1 = preTransform(tree).transform(this) // localTyper needed when not flattening inner classes. parts after an // inner class will otherwise be typechecked with a wrong scope try exitingMixin(postTransform(tree1)) diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 5f8b2a026e44..c2a495e2d0e4 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,7 @@ package scala.tools.nsc package transform +import scala.annotation.nowarn import scala.reflect.internal.SymbolPairs /** A class that yields a kind of iterator (`Cursor`), @@ -24,27 +25,66 @@ import scala.reflect.internal.SymbolPairs abstract class OverridingPairs extends SymbolPairs { import global._ - class Cursor(base: Symbol) extends super.Cursor(base) { - private lazy val isScala213 = settings.isScala213 + // TODO: uncomment when deprecating the below + // @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.transform\.OverridingPairs\.Cursor""") + final type PairsCursor = Cursor + // TODO: deprecate when we can cleanly cross-compile without warnings + // @deprecated("use PairsCursor instead", since = "2.13.4") + @nowarn("msg=shadowing a nested class of a parent is deprecated") + class Cursor(base: Symbol) extends super.Cursor(base) { /** Symbols to exclude: Here these are constructors and private/artifact symbols, * including bridges. But it may be refined in subclasses. */ - override protected def exclude(sym: Symbol) = { - sym.isPrivateLocal && (sym.isParamAccessor || !isScala213) || - sym.isArtifact || - sym.isConstructor || - (sym.isPrivate && sym.owner != base) // Privates aren't inherited. Needed for pos/t7475a.scala - } + override protected def exclude(sym: Symbol) = ( + (sym.isPrivateLocal && sym.isParamAccessor) + || sym.isArtifact + || sym.isConstructor + || (sym.isPrivate && sym.owner != base) // Privates aren't inherited. Needed for pos/t7475a.scala + ) /** Types always match. Term symbols match if their member types * relative to `self` match. */ override protected def matches(high: Symbol) = low.isType || ( (low.owner != high.owner) // don't try to form pairs from overloaded members + && !bothJavaOwnedAndEitherIsField(low, high) && !high.isPrivate // private or private[this] members never are overridden && !exclude(low) // this admits private, as one can't have a private member that matches a less-private member. && (lowMemberType matches (self memberType high)) ) // TODO we don't call exclude(high), should we? + + override protected def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = { + // Two Java-defined methods can be skipped if javac will check the overrides. Skipping is actually necessary to + // avoid false errors, as Java doesn't have the Scala's linearization rules and subtyping rules + // (`Array[String] <:< Array[Object]`). However, when a Java interface is mixed into a Scala class, mixed-in + // methods need to go through override checking (neg/t12394, neg/t12380). + lowClass.isJavaDefined && highClass.isJavaDefined && { // skip if both are java-defined, and + lowClass.isNonBottomSubClass(highClass) || { // - low <:< high, which means they are overrides in Java and javac is doing the check; or + base.info.parents.tail.forall(p => { // - every mixin parent is unrelated to (not a subclass of) low and high, i.e., + val psym = p.typeSymbol // we're not mixing in high or low, both are coming from the superclass + !psym.isNonBottomSubClass(lowClass) && !psym.isNonBottomSubClass(highClass) + }) + } + } + } + } + + private def bothJavaOwnedAndEitherIsField(low: Symbol, high: Symbol): Boolean = { + low.owner.isJavaDefined && high.owner.isJavaDefined && + (low.isField || high.isField) + } + + final class BridgesCursor(base: Symbol) extends PairsCursor(base) { + // Varargs bridges may need generic bridges due to the non-repeated part of the signature of the involved methods. + // The vararg bridge is generated during refchecks (probably to simplify override checking), + // but then the resulting varargs "bridge" method may itself need an actual erasure bridge. + // TODO: like javac, generate just one bridge method that wraps Seq <-> varargs and does erasure-induced casts + override def exclude(sym: Symbol) = !sym.isMethod || super.exclude(sym) + + // Skip if the (non-trait) class in `parents` is a subclass of the owners of both low and high. + // Correctness of bridge generation relies on visiting each such class only once. + override def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = + nonTraitParent.isNonBottomSubClass(lowClass) && nonTraitParent.isNonBottomSubClass(highClass) } } diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala index 9eb381e76851..f24f52477b68 100644 --- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,7 +24,7 @@ trait PostErasure extends InfoTransform with TypingTransformers with scala.refle val phaseName: String = "posterasure" - def newTransformer(unit: CompilationUnit): Transformer = new PostErasureTransformer(unit) + def newTransformer(unit: CompilationUnit): AstTransformer = new PostErasureTransformer(unit) override def changesBaseClasses = false class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { @@ -44,7 +44,7 @@ trait PostErasure extends InfoTransform with TypingTransformers with scala.refle case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v) // x.asInstanceOf[X] ==> x case ValueClass.BoxAndUnbox(v) => finish(v) // (new B(v)).unbox ==> v case ValueClass.BoxAndCompare(v1, op, v2) => binop(v1, op, v2) // new B(v1) == new B(v2) ==> v1 == v2 - case tree => tree + case transformed => transformed } } } diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala index 26e0347be4f5..761a427b3f7b 100644 --- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,8 @@ package scala.tools.nsc package transform +import scala.annotation._ + /** A sample transform. */ abstract class SampleTransform extends Transform { @@ -24,10 +26,10 @@ abstract class SampleTransform extends Transform { /** the following two members override abstract members in Transform */ val phaseName: String = "sample-phase" - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new SampleTransformer(unit) - class SampleTransformer(unit: CompilationUnit) extends Transformer { + class SampleTransformer(@unused unit: CompilationUnit) extends AstTransformer { override def transform(tree: Tree): Tree = { val tree1 = super.transform(tree); // transformers always maintain `currentOwner`. @@ -36,14 +38,15 @@ abstract class SampleTransform extends Transform { expr case Block(defs, sup @ Super(qual, mix)) => // A hypothetical transformation, which replaces // {super} by {super.sample} - treeCopy.Block( // `copy` is the usual lazy tree copier - tree1, defs, + treeCopy.Block( // `treeCopy` is a lazy tree copier + tree1, + defs, typed( // `typed` assigns types to its tree argument atPos(tree1.pos)( // `atPos` fills in position of its tree argument Select( // The `Select` factory method is defined in class `Trees` sup, currentOwner.newValue( // creates a new term symbol owned by `currentOwner` - newTermName("sample"), // The standard term name creator + TermName("sample"), // The standard term name creator tree1.pos))))) case _ => tree1 diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index a5c117af8486..ce3ea681f147 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,9 +14,11 @@ package scala package tools.nsc package transform -import scala.collection.{ immutable, mutable } +import scala.annotation._ +import scala.collection.mutable, mutable.{Buffer, HashMap, ListBuffer} import scala.tools.nsc.symtab.Flags import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ /** Specialize code on types. * @@ -72,17 +74,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { override def changesBaseClasses = true override def keepsTypeParams = true - type TypeEnv = immutable.Map[Symbol, Type] - def emptyEnv: TypeEnv = Map[Symbol, Type]() + type TypeEnv = Map[Symbol, Type] + def emptyEnv: TypeEnv = Map.empty[Symbol, Type] private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name) - - /** TODO - this is a lot of maps. - */ - /** For a given class and concrete type arguments, give its specialized class */ - val specializedClass = perRunCaches.newAnyRefMap[Symbol, mutable.AnyRefMap[TypeEnv, Symbol]] + val specializedClass = perRunCaches.newAnyRefMap[Symbol, HashMap[TypeEnv, Symbol]]() + + // read-only map, where missing value defaults to empty immutable.Map + def specializationOf(sym: Symbol) = specializedClass.getOrElse(sym, Map.empty[TypeEnv, Symbol]) /** Map a method symbol to a list of its specialized overloads in the same class. */ private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil @@ -107,29 +108,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def specializedOn(sym: Symbol): List[Symbol] = { val GroupOfSpecializable = currentRun.runDefinitions.GroupOfSpecializable - sym getAnnotation SpecializedClass match { - case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol) - case Some(ann @ AnnotationInfo(_, args, _)) => { - args map (_.tpe) flatMap { tp => - tp baseType GroupOfSpecializable match { - case TypeRef(_, GroupOfSpecializable, arg :: Nil) => - arg.typeArgs map (_.typeSymbol) - case _ => - tp.typeSymbol :: Nil - } - } + def expandGroup(tp: Type): List[Symbol] = + tp.baseType(GroupOfSpecializable) match { + case TypeRef(_, GroupOfSpecializable, arg :: Nil) => arg.typeArgs.map(_.typeSymbol) + case _ => tp.typeSymbol :: Nil } - case _ => Nil + sym.getAnnotation(SpecializedClass) match { + case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol) + case Some(AnnotationInfo(_, args, _)) => args.map(_.tpe).flatMap(expandGroup) + case _ => Nil } } - @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = { - if (candidates.isEmpty) NoSymbol - else f(candidates.head) match { - case NoSymbol => findSymbol(candidates.tail, f) - case sym => sym - } - } private def hasNewParents(tree: Tree) = { val parents = tree.symbol.info.parents val prev = enteringPrevPhase(tree.symbol.info.parents) @@ -154,9 +144,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * the given args. Expects the lists to have the same length. */ def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = { - ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args)) + ifDebug(assert(sym.info.typeParams.sizeCompare(args) == 0, "" + sym + " args: " + args)) - emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, v) => k.isSpecialized) + emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, _) => k.isSpecialized) } /** Does typeenv `t1` include `t2`? All type variables in `t1` @@ -172,8 +162,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } /** Reduce the given environment to contain mappings only for type variables in tps. */ - def restrict(env: TypeEnv, tps: immutable.Set[Symbol]): TypeEnv = - env.filterKeys(tps).toMap + def restrict(env: TypeEnv, tps: Set[Symbol]): TypeEnv = + env.view.filterKeys(tps).toMap /** Is the given environment a valid specialization for sym? * It is valid if each binding is from a @specialized type parameter in sym (or its owner) @@ -203,7 +193,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Just to mark uncheckable */ override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new SpecializationPhase(prev) - class SpecializationPhase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) { + class SpecializationPhase(prev: scala.tools.nsc.Phase) extends InfoPhase(prev) { override def checkable = false override def run(): Unit = { super.run() @@ -230,10 +220,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new SpecializationTransformer(unit) - abstract class SpecializedInfo { + sealed abstract class SpecializedInfo { def target: Symbol /** Are type bounds of @specialized type parameters of 'target' now in 'env'? */ @@ -260,13 +250,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def target = t } - /** Symbol is a special overload of the super accessor. */ + /** Symbol is a special overload of the super accessor. Treated like an abstract method with no specialized overload. */ case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo { def target = t } /** Symbol is a specialized accessor for the `target` field. */ - case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { } + case class SpecializedAccessor(target: Symbol) extends SpecializedInfo /** Symbol is a specialized method whose body should be the target's method body. */ case class Implementation(target: Symbol) extends SpecializedInfo @@ -293,7 +283,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult) - (stvTypeParams -- stvResult).nonEmpty + (stvTypeParams diff stvResult).nonEmpty } } @@ -336,7 +326,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe else tp ) - specializedClass.getOrElse(sym, Map.empty[TypeEnv, Symbol]).get(TypeEnv.fromSpecialization(sym, args1)) match { + specializationOf(sym).get(TypeEnv.fromSpecialization(sym, args1)) match { case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args)) case None => typeRef(pre1, sym, args) } @@ -346,12 +336,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedFunctionName(sym: Symbol, args: List[Type]) = exitingSpecialize { require(isFunctionSymbol(sym), sym) - val env: TypeEnv = TypeEnv.fromSpecialization(sym, args) - specializedClass.getOrElse(sym, Map.empty[TypeEnv, Symbol]).get(env) match { - case Some(x) => - x.name - case None => - sym.name + specializationOf(sym).get(TypeEnv.fromSpecialization(sym, args)) match { + case Some(x) => x.name + case None => sym.name } } @@ -370,7 +357,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { specializedName(sym.name, tvars, env) } - private def specializedName(name: Name, tvars: immutable.Set[Symbol], env: TypeEnv): TermName = { + private def specializedName(name: Name, tvars: Set[Symbol], env: TypeEnv): TermName = { val (methparams, others) = tvars.toList sortBy ("" + _.name) partition (_.owner.isMethod) // debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars) @@ -395,7 +382,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } ) - lazy val specializableTypes = ScalaValueClasses.map(_.tpe).sorted + private lazy val specializableTypes = ScalaValueClasses.map(_.tpe).sorted /** If the symbol is the companion of a value class, the value class. * Otherwise, AnyRef. @@ -452,27 +439,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = ( !hasUnspecializableAnnotation(sym) && ( specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty - || sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized)) + || sym.isClassConstructor && sym.enclClass.typeParams.exists(_.isSpecialized) || isNormalizedMember(sym) && info(sym).typeBoundsIn(env) ) ) private def hasUnspecializableAnnotation(sym: Symbol): Boolean = - sym.ownersIterator.exists(_ hasAnnotation UnspecializedClass) + sym.ownersIterator.exists(_.hasAnnotation(UnspecializedClass)) - def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists { + def isNormalizedMember(m: Symbol) = m.isSpecialized && info.get(m).exists { case NormalizedMember(_) => true case _ => false - }) - def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = { - val result = new mutable.ListBuffer[Symbol]() - tpes.foreach(tp => specializedTypeVarsBuffer(tp, result)) - if (result.isEmpty) immutable.Set.empty else result.toSet } - def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = { - val result = new mutable.ListBuffer[Symbol]() + def specializedTypeVars(tpes: List[Type]): Set[Symbol] = { + val result = ListBuffer.empty[Symbol] + tpes.foreach(specializedTypeVarsBuffer(_, result)) + result.toSet + } + def specializedTypeVars(sym: Symbol): Set[Symbol] = { + val result = ListBuffer.empty[Symbol] specializedTypeVarsBuffer(sym, result) - if (result.isEmpty) immutable.Set.empty else result.toSet + result.toSet } /** Return the set of @specialized type variables mentioned by the given type. @@ -481,16 +468,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * - as arguments to type constructors in @specialized positions * (arrays are considered as Array[@specialized T]) */ - def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = { - val result = new mutable.ListBuffer[Symbol]() + def specializedTypeVars(tpe: Type): Set[Symbol] = { + val result = ListBuffer.empty[Symbol] specializedTypeVarsBuffer(tpe, result) - if (result.isEmpty) immutable.Set.empty else result.toSet + result.toSet } - def specializedTypeVarsBuffer(sym: Symbol, result: mutable.Buffer[Symbol]): Unit = ( + def specializedTypeVarsBuffer(sym: Symbol, result: Buffer[Symbol]): Unit = if (!neverHasTypeParameters(sym)) enteringTyper(specializedTypeVarsBuffer(sym.info, result)) - ) /** Return the set of @specialized type variables mentioned by the given type. * It only counts type variables that appear: @@ -498,7 +484,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * - as arguments to type constructors in @specialized positions * (arrays are considered as Array[@specialized T]) */ - def specializedTypeVarsBuffer(tpe: Type, result: mutable.Buffer[Symbol]): Unit = tpe match { + def specializedTypeVarsBuffer(tpe: Type, result: Buffer[Symbol]): Unit = tpe match { case TypeRef(pre, sym, args) => if (sym.isAliasType) specializedTypeVarsBuffer(tpe.dealiasWiden, result) @@ -509,8 +495,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { else if (!args.isEmpty) enteringTyper { foreach2(sym.typeParams, args) { (tp, arg) => - if (tp.isSpecialized) + if (tp.isSpecialized) { specializedTypeVarsBuffer(arg, result) + } else if (sym == ValueOfClass) { // scala/bug#11489, we only update it for ValueOf + arg.typeSymbol.annotations.foreach { + case lzai: LazyAnnotationInfo if lzai.symbol == SpecializedClass => + specializedTypeVarsBuffer(arg, result) + case _ => + } + } } } case PolyType(tparams, resTpe) => specializedTypeVarsBuffer(resTpe, result); tparams.foreach(sym => specializedTypeVarsBuffer(sym.info, result)) @@ -538,7 +531,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Cleans the anyrefSpecCache of all type parameter symbols of a class. */ - private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) { + private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]): Unit = { // remove class type parameters and those of normalized members. clazz :: decls foreach (anyrefSpecCache remove _) } @@ -575,17 +568,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Maps AnyRef bindings from a raw environment (holding AnyRefs) into type parameters from * the specialized symbol (class (specialization) or member (normalization)), leaves everything else as-is. */ - private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env map { - case (sym, AnyRefTpe) if sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym)) - case x => x + private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env transform { + case (sym, AnyRefTpe) if sym.owner == origsym => typeParamSubAnyRef(sym, specsym) + case (_, v) => v } /** Maps AnyRef bindings from a raw environment (holding AnyRefs) into type parameters from * the original class, leaves everything else as-is. */ - private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env map { - case (sym, AnyRefTpe) if sym.owner == origcls => (sym, sym.tpe) - case x => x + private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env transform { + case (sym, AnyRefTpe) if sym.owner == origcls => sym.tpe + case (_, v) => v } /** Specialize 'clazz', in the environment `outerEnv`. The outer @@ -597,17 +590,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * each combination of `stps`. */ def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = { - def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = { + def toSpecializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = { /* It gets hard to follow all the clazz and cls, and specializedClass * was both already used for a map and mucho long. So "sClass" is the * specialized subclass of "clazz" throughout this file. */ - val clazzName = specializedName(clazz, env0).toTypeName // scala/bug#5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is // to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately // evaluating the info after creating the specialized class will mess the specialized class signature, so we'd - // better unlink the the class-file backed symbol before creating the new class symbol + // better unlink the class-file backed symbol before creating the new class symbol val bytecodeClazz = clazz.owner.info.decl(clazzName) // debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there") def unlink(sym: Symbol): Unit = if (sym != NoSymbol) { @@ -620,18 +612,25 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { unlink(companionModule.moduleClass) unlink(companionModule) - val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) - sClass.setAnnotations(clazz.annotations) // scala/bug#8574 important that the subclass picks up @SerialVersionUID, @strictfp, etc. + val sClass = { + val sc = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) + sc.setAnnotations(clazz.annotations) + sc + } def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) = member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName) - sClass.associatedFile = clazz.sourceFile - currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin + clazz.sourceFile match { + case null => + case file => + sClass.associatedFile = file + currentRun.symSource(sClass) = file // needed later on by mixin + } val env = mapAnyRefsInSpecSym(env0, clazz, sClass) typeEnv(sClass) = env - this.specializedClass.getOrElseUpdate(clazz, new mutable.AnyRefMap()).update(env0, sClass) + specializedClass.getOrElseUpdate(clazz, HashMap.empty).update(env0, sClass) val decls1 = newScope // declarations of the newly specialized class 'sClass' var oldClassTParams: List[Symbol] = Nil // original unspecialized type parameters @@ -645,7 +644,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // log("new tparams " + newClassTParams.zip(newClassTParams map {s => (s.tpe, s.tpe.upperBound)}) + ", in env: " + env) def applyContext(tpe: Type) = - subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)) + subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams.map(_.tpeHK)) /* Return a list of specialized parents to be re-mixed in a specialized subclass. * Assuming env = [T -> Int] and @@ -703,7 +702,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def enterMember(sym: Symbol): Symbol = { typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment - sym modifyInfo (_.substThis(clazz, sClass).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))) + sym.modifyInfo(_.substThis(clazz, sClass).instantiateTypeParams(oldClassTParams, newClassTParams.map(_.tpeHK))) // we remove any default parameters. At this point, they have been all // resolved by the type checker. Later on, erasure re-typechecks everything and // chokes if it finds default parameters for specialized members, even though @@ -740,118 +739,139 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { enterMember(om) } - for (m <- normMembers ; if needsSpecialization(outerEnv ++ env, m) && satisfiable(fullEnv)) { - if (!m.isDeferred) - addConcreteSpecMethod(m) - // specialized members have to be overridable. - if (m.isPrivate) - m.resetFlag(PRIVATE).setFlag(PROTECTED) - - if (m.isConstructor) { - val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) - info(specCtor) = Forward(m) - } - else if (isNormalizedMember(m)) { // methods added by normalization - val NormalizedMember(original) = info(m) - if (nonConflicting(env ++ typeEnv(m))) { - if (info(m).degenerate) { - debuglog("degenerate normalized member " + m.defString) - val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) - - info(specMember) = Implementation(original) - typeEnv(specMember) = env ++ typeEnv(m) - } else { - val om = forwardToOverload(m) - debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) - } + @tailrec def isTraitValSetter(sym: Symbol): Boolean = + sym.isSetter && sym.getterIn(sym.owner).isStable && + (sym.hasFlag(SYNTHESIZE_IMPL_IN_SUBCLASS) || isTraitValSetter(sym.nextOverriddenSymbol)) + + for (m <- normMembers) { + if (!needsSpecialization(fullEnv, m)) { + if (m.isValue && !m.isMutable && !m.isMethod && !m.isDeferred && !m.isLazy && !m.isParamAccessor) { + // non-specialized `val` fields are made mutable (in Constructors) and assigned from the + // constructors of specialized subclasses. See PR scala/scala#9704. + clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) + sClass.primaryConstructor.updateAttachment(ConstructorNeedsFence) } - else - debuglog("conflicting env for " + m + " env: " + env) - } - else if (m.isDeferred && m.isSpecialized) { // abstract methods - val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) - // debuglog("deferred " + specMember.fullName + " remains abstract") - - info(specMember) = Abstract(specMember) - // was: new Forward(specMember) { - // override def target = m.owner.info.member(specializedName(m, env)) - // } - } else if (!sClass.isTrait && m.isMethod && !m.hasAccessorFlag) { // other concrete methods - // log("other concrete " + m) - forwardToOverload(m) - - } else if (!sClass.isTrait && m.isMethod && m.hasFlag(LAZY)) { - forwardToOverload(m) - - } else if (m.isValue && !m.isMethod) { // concrete value definition - def mkAccessor(field: Symbol, name: Name) = { - val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) - // we rely on the super class to initialize param accessors - val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) - info(sym) = SpecializedAccessor(field) - sym + } else if (satisfiable(fullEnv)) { + if (!m.isDeferred) + addConcreteSpecMethod(m) + // specialized members have to be overridable. + if (m.isPrivate) + m.resetFlag(PRIVATE).setFlag(PROTECTED) + + if (m.isConstructor) { + val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) + info(specCtor) = Forward(m) } - def overrideIn(clazz: Symbol, sym: Symbol) = { - val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) - val sym1 = sym.cloneSymbol(clazz, newFlags) - sym1 modifyInfo (_ asSeenFrom (clazz.tpe, sym1.owner)) + else if (isNormalizedMember(m)) { // methods added by normalization + val NormalizedMember(original) = info(m): @unchecked + if (nonConflicting(env ++ typeEnv(m))) { + if (info(m).degenerate) { + debuglog("degenerate normalized member " + m.defString) + val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) + + info(specMember) = Implementation(original) + typeEnv(specMember) = env ++ typeEnv(m) + } else { + val om = forwardToOverload(m) + debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) + } + } + else + debuglog("conflicting env for " + m + " env: " + env) } - val specVal = specializedOverload(sClass, m, env) + else if (m.isDeferred && m.isSpecialized) { // abstract methods + val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) + // debuglog("deferred " + specMember.fullName + " remains abstract") + + info(specMember) = Abstract(specMember) + // was: new Forward(specMember) { + // override def target = m.owner.info.member(specializedName(m, env)) + // } + } else if (m.hasFlag(SUPERACCESSOR)) { // basically same as abstract case + // we don't emit a specialized overload for the super accessor because we can't jump back and forth + // between specialized and non-specialized methods during an invokespecial for the super call, + // so, we must jump immediately into the non-specialized world to find our super + val specMember = enterMember(cloneInSpecializedClass(m, f => f)) + + // rebindSuper in mixins knows how to rejigger this + // (basically it skips this specialized class in the base class seq, and then also never rebinds to a specialized method) + specMember.asInstanceOf[TermSymbol].referenced = m.alias + + info(specMember) = SpecialSuperAccessor(specMember) + } else if (m.isMethod && !m.isDeferred && (!m.isAccessor || m.isLazy || isTraitValSetter(m))) { // other concrete methods + forwardToOverload(m) + } else if (m.isValue && !m.isMethod) { // concrete value definition + def mkAccessor(field: Symbol, name: Name) = { + val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) + // we rely on the super class to initialize param accessors + val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) + info(sym) = SpecializedAccessor(field) + sym + } + + def overrideIn(clazz: Symbol, sym: Symbol) = { + val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) + val sym1 = sym.cloneSymbol(clazz, newFlags) + sym1.modifyInfo(_.asSeenFrom(clazz.tpe, sym1.owner)) + } - addConcreteSpecMethod(m) - specVal.asInstanceOf[TermSymbol].setAlias(m) + val specVal = specializedOverload(sClass, m, env) - enterMember(specVal) - // create accessors + addConcreteSpecMethod(m) + specVal.asInstanceOf[TermSymbol].setAlias(m) - if (m.isLazy) { - // no getters needed (we'll specialize the compute method and accessor separately), can stay private - // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private - // (the implementation needs it to be visible while duplicating and retypechecking, - // but it really could be private in bytecode) - specVal.setFlag(PRIVATE) - } - else if (nme.isLocalName(m.name)) { - val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) - val origGetter = overrideIn(sClass, m.getterIn(clazz)) - info(origGetter) = Forward(specGetter) - enterMember(specGetter) - enterMember(origGetter) - debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode)) - - clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => - val cfaGetter = overrideIn(sClass, cfa) - info(cfaGetter) = SpecializedAccessor(specVal) - enterMember(cfaGetter) - debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + enterMember(specVal) + // create accessors + + if (m.isLazy) { + // no getters needed (we'll specialize the compute method and accessor separately), can stay private + // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private + // (the implementation needs it to be visible while duplicating and retypechecking, + // but it really could be private in bytecode) + specVal.setFlag(PRIVATE) } + else if (nme.isLocalName(m.name)) { + val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) + val origGetter = overrideIn(sClass, m.getterIn(clazz)) + info(origGetter) = Forward(specGetter) + enterMember(specGetter) + enterMember(origGetter) + debuglog(s"specialize accessor in ${sClass.name.decode}: ${origGetter.name.decode} -> ${specGetter.name.decode}") + + clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => + val cfaGetter = overrideIn(sClass, cfa) + info(cfaGetter) = SpecializedAccessor(specVal) + enterMember(cfaGetter) + debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + } - if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { - val specSetter = mkAccessor(specVal, specGetter.setterName) - .resetFlag(STABLE) - specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), - UnitTpe)) - val origSetter = overrideIn(sClass, m.setterIn(clazz)) - info(origSetter) = Forward(specSetter) - enterMember(specSetter) - enterMember(origSetter) + if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { + val specSetter = mkAccessor(specVal, specGetter.setterName) + .resetFlag(STABLE) + specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), + UnitTpe)) + val origSetter = overrideIn(sClass, m.setterIn(clazz)) + info(origSetter) = Forward(specSetter) + enterMember(specSetter) + enterMember(origSetter) + } + } + else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses + m.resetFlag(PRIVATE) + specVal.resetFlag(PRIVATE) + debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( + m.name.decode, specVal.name.decode)) } } - else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses - m.resetFlag(PRIVATE) - specVal.resetFlag(PRIVATE) - debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( - m.name.decode, specVal.name.decode)) + else if (m.isClass) { + val specClass: Symbol = cloneInSpecializedClass(m, x => x) + typeEnv(specClass) = fullEnv + specClass setName specializedName(specClass, fullEnv).toTypeName + enterMember(specClass) + debuglog("entered specialized class " + specClass.fullName) + info(specClass) = SpecializedInnerClass(m, fullEnv) } } - else if (m.isClass) { - val specClass: Symbol = cloneInSpecializedClass(m, x => x) - typeEnv(specClass) = fullEnv - specClass setName specializedName(specClass, fullEnv).toTypeName - enterMember(specClass) - debuglog("entered specialized class " + specClass.fullName) - info(specClass) = SpecializedInnerClass(m, fullEnv) - } } sClass } @@ -864,7 +884,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (ms.nonEmpty && clazz.isTrait && clazz.isInterface) clazz.resetFlag(INTERFACE) - if (normalizedMember.isMethod) { + if (normalizedMember.isMethod && !normalizedMember.isScala3Defined) { val newTpe = subst(outerEnv, normalizedMember.info) // only do it when necessary, otherwise the method type might be at a later phase already if (newTpe != normalizedMember.info) { @@ -876,10 +896,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - val subclasses = specializations(clazz.info.typeParams) filter satisfiable + val subclasses = specializations(clazz.info.typeParams).filter(satisfiable(_)) subclasses foreach { env => - val spc = specializedClass(env, decls1) + val spc = toSpecializedClass(env, decls1) val existing = clazz.owner.info.decl(spc.name) // a symbol for the specialized class already exists if there's a classfile for it. @@ -905,7 +925,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = { sym :: ( - if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil + if (!sym.isMethod || sym.isScala3Defined || enteringTyper(sym.typeParams.isEmpty)) Nil else if (sym.hasDefault) { /* Specializing default getters is useless, also see scala/bug#7329 . */ sym.resetFlag(SPECIALIZED) @@ -923,7 +943,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { runReporting.warning(sym.pos, "%s %s unused or used in non-specializable positions.".format( unusedStvars.mkString("", ", ", ""), - if (unusedStvars.length == 1) "is" else "are"), + if (unusedStvars.lengthIs == 1) "is" else "are"), WarningCategory.Other, sym) unusedStvars foreach (_ removeAnnotation SpecializedClass) @@ -949,7 +969,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { tps1 foreach (_ modifyInfo (_.instantiateTypeParams(keys, vals))) // the cloneInfo is necessary so that method parameter symbols are cloned at the new owner - val methodType = sym.info.resultType.instantiateTypeParams(keys ++ tps, vals ++ tps1.map(_.tpe)).cloneInfo(specMember) + val methodType = sym.info.resultType.instantiateTypeParams(keys ++ tps, vals ++ tps1.map(_.tpeHK)).cloneInfo(specMember) specMember setInfo GenPolyType(tps1, methodType) debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env))) @@ -963,13 +983,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // concise printing of type env private def pp(env: TypeEnv): String = { - env.toList.sortBy(_._1.name) map { + env.toList.sortBy(_._1.name).map { case (k, v) => val vsym = v.typeSymbol if (k == vsym) "" + k.name - else k.name + ":" + vsym.name - - } mkString ("env(", ", ", ")") + else "" + k.name + ":" + vsym.name + }.mkString("env(", ", ", ")") } /** Specialize member `m` w.r.t. to the outer environment and the type @@ -1000,7 +1019,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { specMember } - if (!sym.isMethod || sym.isConstructor || hasUnspecializableAnnotation(sym)) { + if (!sym.isMethod || sym.isConstructor || hasUnspecializableAnnotation(sym) || sym.isSuperAccessor + || sym.isScala3Defined) { // Scala 3 does not have specialised methods yet. + // ) { Nil } else { val stvars = specializedTypeVars(sym) @@ -1043,10 +1064,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * * m overrides a method whose type contains specialized type variables * * there is a valid specialization environment that maps the overridden method type to m's type. */ + @nowarn("cat=lint-nonlocal-return") def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = { - def checkOverriddenTParams(overridden: Symbol) { + def checkOverriddenTParams(overridden: Symbol): Unit = { foreach2(overridden.info.typeParams, overriding.info.typeParams) { (baseTvar, derivedTvar) => - val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet + val missing = concreteTypes(baseTvar).toSet diff concreteTypes(derivedTvar).toSet if (missing.nonEmpty) { reporter.error(derivedTvar.pos, "Type parameter has to be specialized at least for the same types as in the overridden method. Missing " @@ -1065,7 +1087,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (currentRun compiles overriding) checkOverriddenTParams(overridden) - val env = unify(overridden.info, overriding.info, emptyEnv, false, true) + val env = unify(overridden.info, overriding.info, emptyEnv, tparams = true) def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env))) if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) { @@ -1080,11 +1102,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { (clazz.info.decls flatMap { overriding => needsSpecialOverride(overriding) match { case (NoSymbol, _) => + // run/t4996.scala, see the amazing commit message in 9733f56 if (overriding.isSuperAccessor) { val alias = overriding.alias debuglog(s"checking special overload for super accessor: ${overriding.fullName}, alias for ${alias.fullName}") needsSpecialOverride(alias) match { - case nope @ (NoSymbol, _) => None + case (NoSymbol, _) => None case (overridden, env) => val om = specializedOverload(clazz, overriding, env, overridden) om.setName(nme.superName(om.name)) @@ -1097,7 +1120,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } else None case (overridden, env) => - val om = specializedOverload(clazz, overridden, env) + val om = specializedOverload(clazz, overriding, env, overridden) clazz.info.decls.enter(om) foreachWithIndex(om.paramss) { (params, i) => foreachWithIndex(params) { (param, j) => @@ -1152,7 +1175,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * * If `tparams` is true, then the methods tries to unify over type params in polytypes as well. */ - private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean, tparams: Boolean = false): TypeEnv = (tp1, tp2) match { + private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean = false, tparams: Boolean = false): TypeEnv = (tp1, tp2) match { case (TypeRef(_, sym1, _), _) if sym1.isSpecialized => debuglog(s"Unify $tp1, $tp2") if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1)) @@ -1163,7 +1186,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { unifyError(tp1, tp2) else env - case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) => + case (TypeRef(_, _, args1), TypeRef(_, _, args2)) => if (args1.nonEmpty || args2.nonEmpty) debuglog(s"Unify types $tp1 and $tp2") @@ -1184,26 +1207,26 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog(s"Unify polytypes $tp1 and $tp2") if (strict && tparams1.length != tparams2.length) unifyError(tp1, tp2) - else if (tparams && tparams1.length == tparams2.length) { + else if (tparams && tparams1.sizeCompare(tparams2) == 0) { val env1 = unifyAux(res1, res2, env, strict) if (tparams1.isEmpty) env1 else foldLeft2(tparams1, tparams2)(env1){ (e, tp1, tp2) => unifyAux(tp1.info, tp2.info, e, strict) } } else - unify(res1, res2, env, strict) + unify(res1, res2, env, strict = strict, tparams = false) case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => val env1 = unifyAux(lo1, lo2, env, strict) unifyAux(hi1, hi2, env1, strict) - case (PolyType(_, res), other) => unify(res, other, env, strict) + case (PolyType(_, res), other) => unify(res, other, env, strict, tparams = false) case (ThisType(_), ThisType(_)) => env - case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict) - case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict) - case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict) - case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict) + case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict, tparams = false) + case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict, tparams = false) + case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict, tparams = false) + case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict, tparams = false) case (RefinedType(_, _), RefinedType(_, _)) => env - case (AnnotatedType(_, tp1), tp2) => unify(tp2, tp1, env, strict) - case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict) + case (AnnotatedType(_, tp1), tp2) => unify(tp2, tp1, env, strict, tparams = false) + case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict, tparams = false) case _ => debuglog(s"don't know how to unify $tp1 [${tp1.getClass}] with $tp2 [${tp2.getClass}]") env @@ -1216,9 +1239,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } private def unifyAux(arg1: Type, arg2: Type, env: TypeEnv, strict: Boolean): TypeEnv = - if (!strict) unify(arg1, arg2, env, strict) + if (!strict) unify(arg1, arg2, env, strict, tparams = false) else { - val nenv = unify(arg1, arg2, emptyEnv, strict) + val nenv = unify(arg1, arg2, emptyEnv, strict, tparams = false) if (env.keySet.intersect(nenv.keySet).isEmpty) env ++ nenv else { debuglog(s"could not unify: u($arg1, $arg2) yields $nenv, env: $env") @@ -1266,7 +1289,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * If it is a 'no-specialization' run, it is applied only to loaded symbols. */ override def transformInfo(sym: Symbol, tpe: Type): Type = { - if (settings.nospecialization && currentRun.compiles(sym)) { + if (settings.nospecialization.value && currentRun.compiles(sym)) { tpe } else tpe.resultType match { case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) => @@ -1313,8 +1336,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * that does not fall within the bounds, but whose bounds contain * type variables that are @specialized, (that could become satisfiable). */ - def satisfiable(env: TypeEnv): Boolean = satisfiable(env, false) - def satisfiable(env: TypeEnv, warnings: Boolean): Boolean = { + def satisfiable(env: TypeEnv, warnings: Boolean = false): Boolean = { def matches(tpe1: Type, tpe2: Type): Boolean = (tpe2 == AnyTpe) || { // opt for common case of unbounded type parameter val t1 = subst(env, tpe1) val t2 = subst(env, tpe2) @@ -1349,8 +1371,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // log(t2 + ", " + specializedTypeVars(t2)) // log("unify: " + unify(t1, t2, env, false, false) + " in " + env) if (t1 <:< t2) noconstraints - else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env, false, false) -- env.keys) - else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env, false, false) -- env.keys) + else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env) -- env.keys) + else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env) -- env.keys) else None } @@ -1370,6 +1392,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private val (castfrom, castto) = casts.unzip private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList) + @nowarn("""cat=deprecation&origin=scala\.tools\.nsc\.transform\.SpecializeTypes\.Duplicator\.BodyDuplicator""") + final type SpecializeBodyDuplicator = BodyDuplicator + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use SpecializeBodyDuplicator instead", since = "2.13.4") class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) { override def castType(tree: Tree, pt: Type): Tree = { tree modifyType fixType @@ -1385,7 +1412,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context) + private class SpecializeNamer(context: Context) extends Namer(context) { + // Avoid entering synthetic trees during specialization because the duplicated trees already contain them. + override def enterSyntheticSym(tree: Tree): Symbol = tree.symbol + } + + override protected def newBodyDuplicator(context: Context): SpecializeBodyDuplicator = + new SpecializeBodyDuplicator(context) + + override def newNamer(context: Context): Namer = + new SpecializeNamer(context) } /** Introduced to fix scala/bug#7343: Phase ordering problem between Duplicators and Specialization. @@ -1393,7 +1429,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * the new info then guides the tree changes. But if a symbol is created during duplication, * which runs after specialization, its info is not visited and thus the corresponding tree * is not specialized. One manifestation is the following: - * ``` + * {{{ * object Test { * class Parent[@specialized(Int) T] * @@ -1406,7 +1442,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * ... * } * } - * ``` + * }}} * We fix this by forcing duplication to take place before specialization. * * Note: The constructors phase (which also uses duplication) comes after erasure and uses the @@ -1451,7 +1487,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * in order to be accessible from specialized subclasses. */ override def transform(tree: Tree): Tree = tree match { - case Select(qual, name) => + case Select(_, _) => val sym = tree.symbol if (sym.isPrivate) debuglog( "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format( @@ -1482,7 +1518,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { class SpecializationTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { - override def transformUnit(unit: CompilationUnit): Unit = if (!settings.nospecialization) { + override def transformUnit(unit: CompilationUnit): Unit = if (!settings.nospecialization.value) { informProgress("specializing " + unit) try { exitingSpecialize(super.transformUnit(unit)) @@ -1493,14 +1529,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } /** Map a specializable method to its rhs, when not deferred. */ - val body = new mutable.AnyRefMap[Symbol, Tree]() + val body = HashMap.empty[Symbol, Tree] /** Map a specializable method to its value parameter symbols. */ - val parameters = new mutable.AnyRefMap[Symbol, List[Symbol]]() + val parameters = HashMap.empty[Symbol, List[Symbol]] /** Collect method bodies that are concrete specialized methods. */ - class CollectMethodBodies extends Traverser { + class CollectMethodBodies extends InternalTraverser { override def traverse(tree: Tree) = tree match { case DefDef(_, _, _, vparams :: Nil, _, rhs) => if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) { @@ -1516,7 +1552,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // log("!!! adding body of a valdef " + tree.symbol + ": " + rhs) //super.traverse(tree) case _ => - super.traverse(tree) + tree.traverse(this) } } @@ -1526,7 +1562,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case PolyType(_, resTpe) => debuglog(s"Conformance for anyref - polytype with result type: $resTpe and $treeType\nOrig. sym.: $origSymbol") try { - val e = unify(origSymbol.tpe, memberType, emptyEnv, true) + val e = unify(origSymbol.tpe, memberType, emptyEnv, strict = true, tparams = false) debuglog(s"obtained env: $e") e.keySet == env.keySet } catch { @@ -1543,12 +1579,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val symbol = tree.symbol /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */ def specSym(qual: Tree): Symbol = { - val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) + val env = unify(symbol.tpe, tree.tpe, emptyEnv, strict = false, tparams = false) def isMatch(member: Symbol) = { val memberType = qual.tpe memberType member val residualTreeType = tree match { - case TypeApply(fun, targs) if fun.symbol == symbol => + case TypeApply(fun, _) if fun.symbol == symbol => // scala/bug#6308 Handle methods with only some type parameters specialized. // drop the specialized type parameters from the PolyType, and // substitute in the type environment. @@ -1588,7 +1624,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case _ => copySelect } else { - val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) + val env = unify(symbol.tpe, tree.tpe, emptyEnv, strict = false, tparams = false) overloads(symbol) find (_ matchesEnv env) match { case Some(Overload(member, _)) => typedOp(member) case _ => @@ -1601,17 +1637,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - /** Computes residual type parameters after rewiring, like "String" in the following example: - * ``` - * def specMe[@specialized T, U](t: T, u: U) = ??? - * specMe[Int, String](1, "2") => specMe\$mIc\$sp[String](1, "2") - * ``` + /* Computes residual type parameters after rewiring, like "String" in the following example: + * {{{ + * def specMe[@specialized T, U](t: T, u: U) = ??? + * specMe[Int, String](1, "2") => specMe\$mIc\$sp[String](1, "2") + * }}} */ - def computeResidualTypeVars(baseTree: Tree, specMember: Symbol, specTree: Tree, baseTargs: List[Tree], env: TypeEnv): Tree = { + def computeResidualTypeVars(@unused baseTree: Tree, specMember: Symbol, specTree: Tree, baseTargs: List[Tree], env: TypeEnv): Tree = { val residualTargs = symbol.info.typeParams zip baseTargs collect { case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ } - ifDebug(assert(residualTargs.length == specMember.info.typeParams.length, + ifDebug(assert(residualTargs.sizeCompare(specMember.info.typeParams) == 0, "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env)) ) @@ -1660,7 +1696,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case specMember => debuglog("found " + specMember.fullName) val targs1 = targs // OPT: avoid ObjectRef due to capture of patmat var in by-name expression - ifDebug(assert(symbol.info.typeParams.length == targs1.length, symbol.info.typeParams + " / " + targs1)) + ifDebug(assert(symbol.info.typeParams.sizeCompare(targs1) == 0, "" + symbol.info.typeParams + " / " + targs)) val env = typeEnv(specMember) computeResidualTypeVars(tree, specMember, gen.mkAttributedSelect(qual1, specMember), targs, env) @@ -1671,8 +1707,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // def foo[@specialized T](t: T): T = t // foo(3) // TypeApply(Ident(foo), List(Int)) => foo$mIc$sp(3) // } - case TypeApply(sel @ Ident(name), targs) if name != nme.CONSTRUCTOR => - val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) + case TypeApply(Ident(name), targs) if name != nme.CONSTRUCTOR => + val env = unify(symbol.tpe, tree.tpe, emptyEnv, strict = false, tparams = false) if (env.isEmpty) super.transform(tree) else { overloads(symbol) find (_ matchesEnv env) match { @@ -1698,19 +1734,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case Template(parents, self, body) => def transformTemplate = { - val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed) - if (!symbol.isPackageClass) - (new CollectMethodBodies)(tree) - val parents1 = map2Conserve(parents, currentOwner.info.parents)((parent, tpe) => - parent match { - case tt @ TypeTree() if tpe eq tt.tpe => tt - case _ => TypeTree(tpe) setPos parent.pos - }) - - treeCopy.Template(tree, - parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ , - self, - atOwner(currentOwner)(transformTrees(body ::: specMembers))) + val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: implSpecClasses(body).map(localTyper.typed) + if (!symbol.isPackageClass) + new CollectMethodBodies()(tree) + // currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos) + val parents1 = map2Conserve(parents, currentOwner.info.parents)((parent, tpe) => + parent match { + case tt @ TypeTree() if tpe eq tt.tpe => tt + case _ => TypeTree(tpe) setPos parent.pos + } + ) + treeCopy.Template(tree, parents1, self, atOwner(currentOwner)(transformTrees(body ::: specMembers))) } transformTemplate @@ -1719,8 +1753,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val vparamss = ddef.vparamss if (symbol.isConstructor) { val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner)) + def check(fwd: Tree): Unit = if (settings.unitSpecialization) { + val Apply(_, args) = fwd: @unchecked + args.zip(vparamss.flatten).find { + case (arg, param) if (arg.tpe =:= UnitTpe) && param.symbol.name.endsWith(nme.SPECIALIZED_SUFFIX) => + val msg = "Class parameter is specialized for type Unit. Consider using `@specialized(Specializable.Arg)` instead." + runReporting.warning(arg.pos, msg, WarningCategory.LintUnitSpecialization, param.symbol.owner) + true + case _ => false + }: Unit + } if (symbol.isPrimaryConstructor) - localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(()))))) + localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(()))))).tap(_ => check(t)) else // duplicate the original constructor duplicateBody(ddef, info(symbol).target) } @@ -1740,7 +1784,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("implementation: " + tree1) deriveDefDef(tree1)(transform) case _ => - deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called.")) + deriveDefDef(tree)(_ => localTyper typed gen.mkThrowNewRuntimeException("Fatal error in code generation: this should never be called.")) } case SpecialOverride(target) => @@ -1787,14 +1831,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("abstract: " + targ) localTyper.typed(deriveDefDef(tree)(rhs => rhs)) - case SpecialSuperAccessor(targ) => - debuglog("special super accessor: " + targ + " for " + tree) + case SpecialSuperAccessor(_) => // same as abstract method + debuglog(s"special super accessor: $tree with $symbol -> ${symbol.alias} in ${symbol.alias.owner} (in $currentClass)") localTyper.typed(deriveDefDef(tree)(rhs => rhs)) + + case x @ SpecializedInnerClass(_, _) => throw new MatchError(x) // ?!? } - } + } // end transformDefDef expandInnerNormalizedMembers(transformDefDef(ddef)) - case ddef @ DefDef(_, _, _, _, _, _) => + case DefDef(_, _, _, _, _, _) => val tree1 = expandInnerNormalizedMembers(tree) super.transform(tree1) @@ -1838,7 +1884,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val owner = sym.owner val norm = normalizeMember(owner, sym, emptyEnv) - if (norm.length > 1) { + if (norm.lengthIs > 1) { // record the body for duplication body(sym) = rhs parameters(sym) = vparams.map(_.symbol) @@ -1846,7 +1892,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // flag. nobody has to see this anyway :) sym.setFlag(SPECIALIZED) // create empty bodies for specializations - localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, { vparamss: List[List[Symbol]] => EmptyTree })), ddef)) + localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, (_: List[List[Symbol]]) => EmptyTree)), ddef)) } else tree case _ => @@ -1883,34 +1929,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def addBody(tree: DefDef, source: Symbol): DefDef = { val symbol = tree.symbol debuglog("specializing body of" + symbol.defString) - val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree + val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree: @unchecked val env = typeEnv(symbol) - val origtparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) - if (origtparams.nonEmpty || symbol.typeParams.nonEmpty) - debuglog("substituting " + origtparams + " for " + symbol.typeParams) + + val srcVparams = parameters(source) + val srcTparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) + if (settings.isDebug && (srcTparams.nonEmpty || symbol.typeParams.nonEmpty)) + debuglog("substituting " + srcTparams + " for " + symbol.typeParams) // skolemize type parameters - val oldtparams = tparams map (_.symbol) - val newtparams = deriveFreshSkolems(oldtparams) - map2(tparams, newtparams)(_ setSymbol _) + val oldTparams = tparams.map(_.symbol) + val newTparams = deriveFreshSkolems(oldTparams) + map2(tparams, newTparams)(_ setSymbol _) // create fresh symbols for value parameters to hold the skolem types - val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams)) + val oldVparams = vparams.map(_.symbol) + val newVparams = cloneSymbolsAtOwnerAndModify(oldVparams, symbol, _.substSym(oldTparams, newTparams)) + + val srcParams = srcVparams ::: srcTparams + val oldParams = oldVparams ::: oldTparams + val newParams = newVparams ::: newTparams // replace value and type parameters of the old method with the new ones // log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams) // log("Type vars of: " + source + ": " + source.typeParams) // log("Type env of: " + tree.symbol + ": " + boundTvars) // log("newtparams: " + newtparams) - val symSubstituter = new ImplementationAdapter( - parameters(source) ::: origtparams, - newSyms ::: newtparams, - source.enclClass, - false) // don't make private fields public - - val newBody = symSubstituter(body(source).duplicate) - tpt modifyType (_.substSym(oldtparams, newtparams)) - copyDefDef(tree)(vparamss = List(newSyms map ValDef.apply), rhs = newBody) + // don't make private fields public + val substituter = new ImplementationAdapter(srcParams, newParams, source.enclClass, addressFields = false) + val newRhs = substituter(body(source).duplicate) + tpt.modifyType(_.substSym(oldParams, newParams)) + copyDefDef(tree)(vparamss = newVparams.map(ValDef.apply) :: Nil, rhs = newRhs) } /** Create trees for specialized members of 'sClass', based on the @@ -1920,7 +1969,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // add special overrides first // if (!specializedClass.hasFlag(SPECIALIZED)) // for (m <- specialOverrides(specializedClass)) specializedClass.info.decls.enter(m) - val mbrs = new mutable.ListBuffer[Tree] + val mbrs = ListBuffer.empty[Tree] var hasSpecializedFields = false for (m <- sClass.info.decls @@ -1938,18 +1987,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { ) ) // param accessors for private members (the others are inherited from the generic class) - if (m.isPrimaryConstructor) { - for (param <- vparams ; if sClass.info.nonPrivateMember(param.name) == NoSymbol) { - val acc = param.cloneSymbol(sClass, param.flags | PARAMACCESSOR | PRIVATE) + if (m.isPrimaryConstructor) + for (param <- vparams if sClass.info.nonPrivateMember(param.name) == NoSymbol) { + val acc = param.cloneSymbol(sClass, param.flags | PARAMACCESSOR | PrivateLocal) sClass.info.decls.enter(acc) mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos) } - } - // ctor mbrs += DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef.apply), EmptyTree) } else { - mbrs += DefDef(m, { paramss: List[List[Symbol]] => EmptyTree }) + mbrs += DefDef(m, (_: List[List[Symbol]]) => EmptyTree) } } else if (m.isValue) { mbrs += ValDef(m).setType(NoType) @@ -1970,30 +2017,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } /** Create specialized class definitions */ - def implSpecClasses(trees: List[Tree]): List[Tree] = { - trees flatMap { + def implSpecClasses(trees: List[Tree]): List[Tree] = + trees.flatMap { case tree @ ClassDef(_, _, _, impl) => - tree.symbol.info // force specialization - specializedClass.getOrNull(tree.symbol) match { - case null => Nil - case map => - val sym1 = tree.symbol - map.iterator.map { - case (env, specCls) => - debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env)) - val parents = specCls.info.parents.map(TypeTree) - ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List())) - .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos - }.toList - } + val sym1 = tree.symbol + sym1.info // force specialization + val specMap = specializationOf(sym1) + if (specMap.isEmpty) Nil + else specMap.iterator.map { + case (env, specCls) => + debuglog(s"created synthetic class: $specCls of $sym1 in ${pp(env)}") + val parents = specCls.info.parents.map(TypeTree) + ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List())) + .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos + }.toList case _ => Nil - } sortBy (_.name.decoded) - } + }.sortBy(_.name.decoded) } private def forwardCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = { val argss = mmap(paramss)(x => Ident(x.symbol)) - atPos(pos) { (receiver /: argss) (Apply.apply) } + atPos(pos) { argss.foldLeft(receiver)(Apply.apply) } } /** Forward to the generic class constructor. If the current class initializes @@ -2034,7 +2078,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * - there is a getter for the specialized field in the same class */ def initializesSpecializedField(f: Symbol) = ( - (f.name endsWith nme.SPECIALIZED_SUFFIX) + f.name.endsWith(nme.SPECIALIZED_SUFFIX) && clazz.info.member(f.unexpandedName).isPublic && clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol ) @@ -2045,7 +2089,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { else Ident(x.symbol) ) - atPos(pos) { (receiver /: argss) (Apply.apply) } + atPos(pos) { argss.foldLeft(receiver)(Apply.apply) } } /** Add method m to the set of symbols for which we need an implementation tree @@ -2054,7 +2098,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * @note This field is part of the specializeTypes subcomponent, so any symbols * that here are not garbage collected at the end of a compiler run! */ - def addConcreteSpecMethod(m: Symbol) { + def addConcreteSpecMethod(m: Symbol): Unit = { if (currentRun.compiles(m)) concreteSpecMethods += m } diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala index 6c19fda625ef..345ad4a4c9bb 100644 --- a/src/compiler/scala/tools/nsc/transform/Statics.scala +++ b/src/compiler/scala/tools/nsc/transform/Statics.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,7 @@ package transform abstract class Statics extends Transform with ast.TreeDSL { import global._ - trait StaticsTransformer extends Transformer { + trait StaticsTransformer extends AstTransformer { /** generate a static constructor with symbol fields inits, or an augmented existing static ctor */ def staticConstructor(body: List[Tree], localTyper: analyzer.Typer, pos: Position)(newStaticInits: List[Tree]): Tree = @@ -32,6 +32,7 @@ abstract class Statics extends Transform with ast.TreeDSL { case term: TermTree => // need to create a new block with inits and the old term treeCopy.Block(term, newStaticInits, term) + case x => throw new MatchError(x) } } getOrElse { // create new static ctor diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 507285efccc4..29aa7a91db3e 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,11 +16,11 @@ package transform import symtab.Flags import Flags.SYNTHETIC +import scala.annotation._ /** Perform tail recursive call elimination. * * @author Iulian Dragos - * @version 1.0 */ abstract class TailCalls extends Transform { import global._ // the global environment @@ -29,20 +29,10 @@ abstract class TailCalls extends Transform { val phaseName: String = "tailcalls" - def newTransformer(unit: CompilationUnit): Transformer = - new TailCallElimination(unit) - - /** Create a new phase which applies transformer */ - override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new Phase(prev) + override def enabled = settings.debuginfo.value != "notailcalls" - /** The phase defined by this transform */ - class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) { - def apply(unit: global.CompilationUnit) { - if (!(settings.debuginfo.value == "notailcalls")) { - newTransformer(unit).transformUnit(unit) - } - } - } + def newTransformer(unit: CompilationUnit): AstTransformer = + new TailCallElimination(unit) import treeInfo.hasSynthCaseSymbol @@ -50,7 +40,6 @@ abstract class TailCalls extends Transform { * A Tail Call Transformer * * @author Erik Stenman, Iulian Dragos - * @version 1.1 * * What it does: *

    @@ -95,11 +84,11 @@ abstract class TailCalls extends Transform { * parameter lists exist. *

    */ - class TailCallElimination(unit: CompilationUnit) extends Transformer { + class TailCallElimination(@unused unit: CompilationUnit) extends AstTransformer { private def defaultReason = "it contains a recursive call not in tail position" private val failPositions = perRunCaches.newMap[TailContext, Position]() withDefault (_.methodPos) private val failReasons = perRunCaches.newMap[TailContext, String]() withDefaultValue defaultReason - private def tailrecFailure(ctx: TailContext) { + private def tailrecFailure(ctx: TailContext): Unit = { val method = ctx.method val failReason = failReasons(ctx) val failPos = failPositions(ctx) @@ -137,9 +126,10 @@ abstract class TailCalls extends Transform { } override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}" - final def noTailContext() = clonedTailContext(false) - final def yesTailContext() = clonedTailContext(true) - protected def clonedTailContext(tailPos: Boolean): TailContext = this match { + final def noTailContext() = clonedTailContext(tailPos = false) + final def yesTailContext() = clonedTailContext(tailPos = true) + @tailrec + protected final def clonedTailContext(tailPos: Boolean): TailContext = this match { case _ if this.tailPos == tailPos => this case clone: ClonedTailContext => clone.that.clonedTailContext(tailPos) case _ => new ClonedTailContext(this, tailPos) @@ -174,7 +164,7 @@ abstract class TailCalls extends Transform { val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis) label setInfo MethodType(thisParam :: method.tpe.params, method.tpe_*.finalResultType) if (isEligible) - label substInfo (method.tpe.typeParams, tparams) + label.substInfo(method.tpe.typeParams, tparams) label } @@ -285,9 +275,9 @@ abstract class TailCalls extends Transform { import runDefinitions.{Boolean_or, Boolean_and} tree match { - case dd: DefDef if tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass) => + case _: DefDef if tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass) => reporter.error(tree.pos, "lazy vals are not tailcall transformed") - super.transform(tree) + tree.transform(this) case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) => val newCtx = new DefDefTailContext(dd) @@ -297,7 +287,7 @@ abstract class TailCalls extends Transform { debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}") val newRHS = transform(rhs0, newCtx) - deriveDefDef(tree) { rhs => + deriveDefDef(tree) { _ => if (newCtx.isTransformed) { /* We have rewritten the tree, but there may be nested recursive calls remaining. * If @tailrec is given we need to fail those now. @@ -340,7 +330,7 @@ abstract class TailCalls extends Transform { ) // a translated casedef - case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) => + case LabelDef(_, _, _) if hasSynthCaseSymbol(tree) => deriveLabelDef(tree)(transform) case Block(stats, expr) => @@ -352,7 +342,7 @@ abstract class TailCalls extends Transform { case CaseDef(pat, guard, body) => // CaseDefs are already translated and guards were moved into the body. // If this was not the case, guards would have to be transformed here as well. - assert(guard.isEmpty) + assert(guard.isEmpty, "empty guard") deriveCaseDef(tree)(transform) case If(cond, thenp, elsep) => @@ -407,13 +397,14 @@ abstract class TailCalls extends Transform { case Apply(fun, args) => rewriteApply(fun, fun, Nil, args) case Alternative(_) | Star(_) | Bind(_, _) => - sys.error("We should've never gotten inside a pattern") + assert(false, "We should've never gotten inside a pattern") + tree case Select(qual, name) => treeCopy.Select(tree, noTailTransform(qual), name) case EmptyTree | Super(_, _) | This(_) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() => tree case _ => - super.transform(tree) + tree.transform(this) } } @@ -480,7 +471,7 @@ abstract class TailCalls extends Transform { traverseNoTail(selector) traverseTrees(cases) - case dd @ DefDef(_, _, _, _, _, _) => // we are run per-method + case DefDef(_, _, _, _, _, _) => // we are run per-method case Block(stats, expr) => traverseTreesNoTail(stats) diff --git a/src/compiler/scala/tools/nsc/transform/Transform.scala b/src/compiler/scala/tools/nsc/transform/Transform.scala index 3bf69c53795b..14c1a9e8d0db 100644 --- a/src/compiler/scala/tools/nsc/transform/Transform.scala +++ b/src/compiler/scala/tools/nsc/transform/Transform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,29 +13,23 @@ package scala.tools.nsc package transform -/**

    - * A base class for transforms. - *

    - *

    - * A transform contains a compiler phase which applies a tree transformer. - *

    +/** A base class for transforms. + * A transform contains a compiler phase that applies a tree transformer. * * @author Martin Odersky - * @version 1.0 */ trait Transform extends SubComponent { /** The transformer factory */ - protected def newTransformer(unit: global.CompilationUnit): global.Transformer + protected def newTransformer(unit: global.CompilationUnit): global.AstTransformer /** Create a new phase which applies transformer */ def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new Phase(prev) /** The phase defined by this transform */ class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) { - def apply(unit: global.CompilationUnit) { + def apply(unit: global.CompilationUnit): Unit = { newTransformer(unit).transformUnit(unit) } } } - diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index cc3be2be91c5..4342f9d22bf5 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -40,7 +40,6 @@ trait TypeAdaptingTransformer { self: TreeDSL => } } - private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol) final def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner) final def isMethodTypeWithEmptyParams(tpe: Type) = tpe.isInstanceOf[MethodType] && tpe.params.isEmpty final def applyMethodWithEmptyParams(qual: Tree) = Apply(qual, List()) setPos qual.pos setType qual.tpe.resultType @@ -52,6 +51,13 @@ trait TypeAdaptingTransformer { self: TreeDSL => case LabelDef(_, _, _) => val ldef = deriveLabelDef(tree)(box) ldef setType ldef.rhs.tpe + case Apply(fun @ Ident(_), _) if fun.symbol.isLabel => + // don't box around label jumps, scala/bug#13043 + // need to set the tree type to avoid looping in `adaptToType` + tree.setType(tree.tpe match { + case ErasedValueType(clazz, _) => clazz.tpe + case _ => ObjectTpe + }) case _ => val tree1 = tree.tpe match { case ErasedValueType(clazz, _) => New(clazz, cast(tree, underlyingOfValueClass(clazz))) @@ -59,11 +65,10 @@ trait TypeAdaptingTransformer { self: TreeDSL => case UnitClass => if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT) else BLOCK(tree, REF(BoxedUnit_UNIT)) - case NothingClass => tree // a non-terminating expression doesn't need boxing case x => - assert(x != ArrayClass) + assert(x != ArrayClass, "array") tree match { - case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) => + case Apply(_, List(arg)) if isSafelyRemovableUnbox(tree, arg) => arg case _ => (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe @@ -88,17 +93,17 @@ trait TypeAdaptingTransformer { self: TreeDSL => if (treeInfo isExprSafeToInline side) value else BLOCK(side, value) val tree1 = pt match { - case ErasedValueType(clazz, BoxedUnitTpe) => - cast(preservingSideEffects(tree, REF(BoxedUnit_UNIT)), pt) + case ErasedValueType(_, BoxedUnitTpe) => cast(preservingSideEffects(tree, REF(BoxedUnit_UNIT)), pt) case ErasedValueType(clazz, underlying) => cast(unboxValueClass(tree, clazz, underlying), pt) case _ => pt.typeSymbol match { case UnitClass => preservingSideEffects(tree, UNIT) - case x => - assert(x != ArrayClass) - // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type - Apply(currentRun.runDefinitions.unboxMethod(pt.typeSymbol), tree) + case ArrayClass => assert(pt.typeSymbol != ArrayClass, "array") ; tree + case _ => + val unboxer = currentRun.runDefinitions.unboxMethod(pt.typeSymbol) + if (settings.isDeveloper) assert(boxedClass(pt.typeSymbol).tpe <:< tree.tpe, s"${tree.tpe} is not a boxed ${pt}") + Apply(unboxer, tree) // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type } } typedPos(tree.pos)(tree1) @@ -113,7 +118,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => /** Generate a synthetic cast operation from tree.tpe to pt. * - * @pre pt eq pt.normalize + * @note Pre-condition: pt eq pt.normalize */ final def cast(tree: Tree, pt: Type): Tree = { if (settings.isDebug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { @@ -127,6 +132,9 @@ trait TypeAdaptingTransformer { self: TreeDSL => } if (pt =:= UnitTpe) { // See scala/bug#4731 for one example of how this occurs. + // TODO: that initial fix was quite symptomatic (the real problem was that it allowed an illegal override, + // which resulted in types being so out of whack that'd case something to unit where we shouldn't), + // so I'm not sure this case actually still arises. log("Attempted to cast to Unit: " + tree) tree.duplicate setType pt } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) { @@ -140,6 +148,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => // Push the cast into the RHS of matchEnd LabelDefs. ld.symbol.modifyInfo { case MethodType(params, _) => MethodType(params, pt) + case x => throw new MatchError(x) } deriveLabelDef(ld)(rhs => cast(rhs, pt)).setType(pt) case _ => @@ -156,7 +165,6 @@ trait TypeAdaptingTransformer { self: TreeDSL => */ @tailrec final def adaptToType(tree: Tree, pt: Type): Tree = { val tpe = tree.tpe - if ((tpe eq pt) || tpe <:< pt) tree else if (tpe.isInstanceOf[ErasedValueType]) adaptToType(box(tree), pt) // what if pt is an erased value type? else if (pt.isInstanceOf[ErasedValueType]) adaptToType(unbox(tree, pt), pt) diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index 7dd656a3e286..55d70009aaf6 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -29,7 +29,7 @@ trait TypingTransformers { else // TODO: AM: should some phases use a regular rootContext instead of a post-typer one?? analyzer.newTyper(analyzer.rootContextPostTyper(unit, EmptyTree)) - abstract class TypingTransformer(initLocalTyper: global.analyzer.Typer) extends Transformer { + abstract class TypingTransformer(initLocalTyper: global.analyzer.Typer) extends AstTransformer { def this(unit: CompilationUnit) = this(newRootLocalTyper(unit)) var localTyper: analyzer.Typer = initLocalTyper currentOwner = localTyper.context.owner @@ -50,11 +50,11 @@ trait TypingTransformers { tree match { case Template(_, _, _) => // enter template into context chain - atOwner(currentOwner) { super.transform(tree) } + atOwner(currentOwner) { tree.transform(this) } case PackageDef(_, _) => - atOwner(tree.symbol) { super.transform(tree) } + atOwner(tree.symbol) { tree.transform(this) } case _ => - super.transform(tree) + tree.transform(this) } } def transformAtOwner(owner: Symbol, tree: Tree): Tree = atOwner(tree, owner) { transform(tree) } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index a90d9aa701eb..9ea2dbc75603 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,16 +14,14 @@ package scala package tools.nsc package transform +import scala.PartialFunction.cond import scala.annotation.tailrec -import symtab.Flags._ import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.reflect.internal.util.ListOfNil import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.symtab.Flags._ -import PartialFunction.cond - -/* */ /** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types. * - for every curried parameter list: (ps_1) ... (ps_n) ==> (ps_1, ..., ps_n) * - for every curried application: f(args_1)...(args_n) ==> f(args_1, ..., args_n) @@ -59,19 +57,18 @@ import PartialFunction.cond * - remove calls to elidable methods and replace their bodies with NOPs when elide-below * requires it */ -/* */ abstract class UnCurry extends InfoTransform with scala.reflect.internal.transform.UnCurry with TypingTransformers with ast.TreeDSL { val global: Global // need to repeat here because otherwise last mixin defines global as // SymbolTable. If we had DOT this would not be an issue - import global._ // the global environment - import definitions._ // standard classes and methods import CODE._ + import global._ + import definitions._ val phaseName: String = "uncurry" - def newTransformer(unit: CompilationUnit): Transformer = new UnCurryTransformer(unit) + def newTransformer(unit: CompilationUnit): AstTransformer = new UnCurryTransformer(unit) override def changesBaseClasses = false // ------ Type transformation -------------------------------------------------------- @@ -197,7 +194,7 @@ abstract class UnCurry extends InfoTransform import treeInfo.{catchesThrowable, isSyntheticCase} for { - Try(t, catches, _) <- body + Try(_, catches, _) <- body cdef <- catches if catchesThrowable(cdef) && !isSyntheticCase(cdef) } { @@ -244,21 +241,22 @@ abstract class UnCurry extends InfoTransform val typedNewFun = localTyper.typedPos(fun.pos)(Block(liftedMethod :: Nil, super.transform(newFun))) if (mustExpand) { - val Block(stats, expr : Function) = typedNewFun + val Block(stats, expr : Function) = typedNewFun: @unchecked treeCopy.Block(typedNewFun, stats, gen.expandFunction(localTyper)(expr, inConstructorFlag)) } else { typedNewFun } } - def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = { + def transformArgs(pos: Position, fun: Symbol, args: List[Tree], params: List[Symbol]): List[Tree] = { val isJava = fun.isJavaDefined - def transformVarargs(varargsElemType: Type) = { + + def transformVarargs(varargsElemType: Type): List[Tree] = { def mkArrayValue(ts: List[Tree], elemtp: Type) = ArrayValue(TypeTree(elemtp), ts) setType arrayType(elemtp) // when calling into scala varargs, make sure it's a sequence. - def arrayToSequence(tree: Tree, elemtp: Type) = { + def arrayToSequence(tree: Tree, elemtp: Type, copy: Boolean): Tree = { exitingUncurry { localTyper.typedPos(pos) { val pt = arrayType(elemtp) @@ -266,15 +264,21 @@ abstract class UnCurry extends InfoTransform if (tree.tpe <:< pt) tree else gen.mkCastArray(tree, elemtp, pt) - gen.mkWrapArray(adaptedTree, elemtp) + if(copy) { + runReporting.deprecationWarning(tree.pos, NoSymbol, currentOwner, + "Passing an explicit array value to a Scala varargs method is deprecated (since 2.13.0) and will result in a defensive copy; "+ + "Use the more efficient non-copying ArraySeq.unsafeWrapArray or an explicit toIndexedSeq call", "2.13.0") + gen.mkMethodCall(PredefModule, nme.copyArrayToImmutableIndexedSeq, List(elemtp), List(adaptedTree)) + } else gen.mkWrapVarargsArray(adaptedTree, elemtp) } } } // when calling into java varargs, make sure it's an array - see bug #1360 - def sequenceToArray(tree: Tree) = { + def sequenceToArray(tree: Tree): Tree = { val toArraySym = tree.tpe member nme.toArray - assert(toArraySym != NoSymbol) + assert(toArraySym != NoSymbol, "toArray") + @tailrec def getClassTag(tp: Type): Tree = { val tag = localTyper.resolveClassTag(tree.pos, tp) // Don't want bottom types getting any further than this (scala/bug#4024) @@ -283,15 +287,15 @@ abstract class UnCurry extends InfoTransform else if (tp.upperBound ne tp) getClassTag(tp.upperBound) else localTyper.TyperErrorGen.MissingClassTagError(tree, tp) } - def traversableClassTag(tpe: Type): Tree = { - (tpe baseType TraversableClass).typeArgs match { + def iterableClassTag(tpe: Type): Tree = { + (tpe baseType IterableClass).typeArgs match { case targ :: _ => getClassTag(targ) case _ => EmptyTree } } exitingUncurry { localTyper.typedPos(pos) { - gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe))) + gen.mkMethodCall(tree, toArraySym, Nil, List(iterableClassTag(tree.tpe))) } } } @@ -304,19 +308,21 @@ abstract class UnCurry extends InfoTransform val javaStyleVarArgs = isJavaVarArgsMethod(fun) var suffix: Tree = if (treeInfo isWildcardStarArgList args) { - val Typed(tree, _) = args.last + val Typed(tree, _) = args.last: @unchecked if (javaStyleVarArgs) if (tree.tpe.typeSymbol == ArrayClass) tree else sequenceToArray(tree) else if (tree.tpe.typeSymbol isSubClass SeqClass) tree - else arrayToSequence(tree, varargsElemType) + else arrayToSequence(tree, varargsElemType, copy = true) // existing array, make a defensive copy } else { - def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType) + def mkArray = mkArrayValue(args drop (params.length - 1), varargsElemType) + // if args.length < params.length the repeated argument is empty + def emptyVarargs = compareLengths(args, params) < 0 if (javaStyleVarArgs) mkArray - else if (args.isEmpty) gen.mkNil // avoid needlessly double-wrapping an empty argument list - else arrayToSequence(mkArray, varargsElemType) + else if (emptyVarargs) gen.mkNil // avoid needlessly double-wrapping an empty argument list + else arrayToSequence(mkArray, varargsElemType, copy = false) // fresh array, no need to copy } exitingUncurry { @@ -327,18 +333,22 @@ abstract class UnCurry extends InfoTransform } } } - args.take(formals.length - 1) :+ (suffix setType formals.last) + val args1 = ListBuffer[Tree]() + args1 ++= args.iterator.take(params.length - 1) + args1 += suffix setType params.last.info + args1.toList } - val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args + val isVarargs = isVarArgsList(params) + val args1 = if (isVarargs) transformVarargs(params.last.info.typeArgs.head.widen) else args - map2(formals, args1) { (formal, arg) => - if (!isByNameParamType(formal)) arg + map2Conserve(args1, params) { (arg, param) => + if (!isByNameParamType(param.info)) arg else if (isByNameRef(arg)) { // thunk does not need to be forced because it's a reference to a by-name arg passed to a by-name param byNameArgs += arg arg setType functionType(Nil, arg.tpe) } else { - log(s"Argument '$arg' at line ${arg.pos.line} is $formal from ${fun.fullName}") + log(s"Argument '$arg' at line ${arg.pos.line} is ${param.info} from ${fun.fullName}") def canUseDirectly(qual: Tree) = qual.tpe.typeSymbol.isSubClass(FunctionClass(0)) && treeInfo.isExprSafeToInline(qual) arg match { // don't add a thunk for by-name argument if argument already is an application of @@ -388,7 +398,7 @@ abstract class UnCurry extends InfoTransform * all lambda impl methods as static. */ private def translateSynchronized(tree: Tree) = tree match { - case dd @ DefDef(_, _, _, _, _, Apply(fn, body :: Nil)) if isSelfSynchronized(dd) => + case dd @ DefDef(_, _, _, _, _, Apply(_, body :: Nil)) if isSelfSynchronized(dd) => log("Translating " + dd.symbol.defString + " into synchronized method") dd.symbol setFlag SYNCHRONIZED deriveDefDef(dd)(_ => body) @@ -431,17 +441,17 @@ abstract class UnCurry extends InfoTransform def isLiftedLambdaMethod(funSym: Symbol) = funSym.isArtifact && funSym.name.containsName(nme.ANON_FUN_NAME) && funSym.isLocalToBlock - def checkIsElisible(sym: Symbol): Boolean = - (sym ne null) && sym.elisionLevel.exists { level => + def checkIsElidable(sym: Symbol): Boolean = (sym ne null) && sym.elisionLevel.exists { level => if (sym.isMethod) level < settings.elidebelow.value else { - if (currentRun.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable!") + // TODO: report error? It's already done in RefChecks. https://github.com/scala/scala/pull/5539#issuecomment-331376887 + reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") false } } val result = - if (checkIsElisible(sym)) + if (checkIsElidable(sym)) replaceElidableTree(tree) else translateSynchronized(tree) match { case dd @ DefDef(mods, name, tparams, _, tpt, rhs) => @@ -480,11 +490,22 @@ abstract class UnCurry extends InfoTransform else super.transform(tree) + case sel: Select if sel.qualifier.tpe.typeSymbol.isDerivedValueClass => + // `c.f` where `c` is a value class is translated to `C.f$extension(c)` (value class member) or + // `new C(c).f()` (universal trait member). In both cases, `try` within `c` needs a lift. + withNeedLift(needLift = true) { super.transform(tree) } + case Apply(fn, args) => - val needLift = needTryLift || !fn.symbol.isLabel // scala/bug#6749, no need to lift in args to label jumps. + // Read the param symbols before `transform(fn)`, because UnCurry replaces T* by Seq[T] (see DesugaredParameterType). + // The call to `transformArgs` below needs `formals` that still have varargs. + val fnParams = fn.tpe.params + val transformedFn = transform(fn) + // scala/bug#6479: no need to lift in args to label jumps + // scala/bug#11127: boolean && / || are emitted using jumps, the lhs stack value is consumed by the conditional jump + val noReceiverOnStack = fn.symbol.isLabel || fn.symbol == currentRun.runDefinitions.Boolean_and || fn.symbol == currentRun.runDefinitions.Boolean_or + val needLift = needTryLift || !noReceiverOnStack withNeedLift(needLift) { - val formals = fn.tpe.paramTypes - treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals))) + treeCopy.Apply(tree, transformedFn, transformTrees(transformArgs(tree.pos, fn.symbol, args, fnParams))) } case Assign(_: RefTree, _) => @@ -555,15 +576,6 @@ abstract class UnCurry extends InfoTransform tree } - @tailrec def isThrowable(pat: Tree): Boolean = pat match { - case Typed(Ident(nme.WILDCARD), tpt) => - tpt.tpe =:= ThrowableTpe - case Bind(_, pat) => - isThrowable(pat) - case _ => - false - } - tree match { /* Some uncurry post transformations add members to templates. * @@ -599,7 +611,7 @@ abstract class UnCurry extends InfoTransform val literalRhsIfConst = if (newParamss.head.isEmpty) { // We know newParamss.length == 1 from above ddSym.info.resultType match { - case tp@ConstantType(value) => Literal(value) setType tp setPos newRhs.pos // inlining of gen.mkAttributedQualifier(tp) + case tp@FoldableConstantType(value) => Literal(value) setType tp setPos newRhs.pos // inlining of gen.mkAttributedQualifier(tp) case _ => newRhs } } else newRhs @@ -635,6 +647,8 @@ abstract class UnCurry extends InfoTransform applyUnary() case ret @ Return(expr) if isNonLocalReturn(ret) => log(s"non-local return from ${currentOwner.enclMethod} to ${ret.symbol}") + if (settings.warnNonlocalReturn) + runReporting.warning(ret.pos, s"return statement uses an exception to pass control to the caller of the enclosing named ${ret.symbol}", WarningCategory.LintNonlocalReturn, ret.symbol) atPos(ret.pos)(nonLocalReturnThrow(expr, ret.symbol)) case TypeTree() => tree @@ -685,7 +699,7 @@ abstract class UnCurry extends InfoTransform * @return (newVparamss, newRhs) */ def erase(dd: DefDef): (List[List[ValDef]], Tree) = { - import dd.{ vparamss, rhs } + import dd.{rhs, vparamss} val (allParams, packedParamsSyms, tempVals): (List[ValDef], List[Symbol], List[ValDef]) = { val allParamsBuf: ListBuffer[ValDef] = ListBuffer.empty @@ -719,29 +733,29 @@ abstract class UnCurry extends InfoTransform val tempVal: ValDef = { // scala/bug#9442: using the "uncurry-erased" type (the one after the uncurry phase) can lead to incorrect // tree transformations. For example, compiling: - // ``` + // // def foo(c: Ctx)(l: c.Tree): Unit = { // val l2: c.Tree = l // } - // ``` + // // Results in the following AST: - // ``` + // // def foo(c: Ctx, l: Ctx#Tree): Unit = { // val l$1: Ctx#Tree = l.asInstanceOf[Ctx#Tree] // val l2: c.Tree = l$1 // no, not really, it's not. // } - // ``` + // // Of course, this is incorrect, since `l$1` has type `Ctx#Tree`, which is not a subtype of `c.Tree`. // // So what we need to do is to use the pre-uncurry type when creating `l$1`, which is `c.Tree` and is // correct. Now, there are two additional problems: // 1. when varargs and byname params are involved, the uncurry transformation desugars these special // cases to actual typerefs, eg: - // ``` - // T* ~> Seq[T] (Scala-defined varargs) - // T* ~> Array[T] (Java-defined varargs) - // =>T ~> Function0[T] (by name params) - // ``` + // + // T* ~> Seq[T] (Scala-defined varargs) + // T* ~> Array[T] (Java-defined varargs) + // => T ~> Function0[T] (by name params) + // // we use the DesugaredParameterType object (defined in scala.reflect.internal.transform.UnCurry) // to redo this desugaring manually here // 2. the type needs to be normalized, since `gen.mkCast` checks this (no HK here, just aliases have @@ -754,7 +768,7 @@ abstract class UnCurry extends InfoTransform tpe } val info = info0.normalize - val tempValName = unit freshTermName (p.name.toStringWithSuffix("$")) + val tempValName = unit.freshTermName(p.name.toStringWithSuffix("$")) val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(info) atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), info))) } @@ -765,7 +779,7 @@ abstract class UnCurry extends InfoTransform val viter = vparamss.iterator.flatten val piter = dd.symbol.info.paramss.iterator.flatten while (viter.hasNext && piter.hasNext) - addParamTransform(viter.next, piter.next) + addParamTransform(viter.next(), piter.next()) (allParamsBuf.toList, packedParamsSymsBuf.toList, tempValsBuf.toList) } @@ -824,11 +838,11 @@ abstract class UnCurry extends InfoTransform val theTyper = typer.atOwner(dd, currentClass) val forwTree = theTyper.typedPos(dd.pos) { - val seqArgs = map3(newPs, oldPs, isRepeated)((param, oldParam, isRep) => { + val seqArgs = map3(newPs, oldPs, isRepeated)((param, _, isRep) => { if (!isRep) Ident(param) else { val parTp = elementType(ArrayClass, param.tpe) - val wrap = gen.mkWrapArray(Ident(param), parTp) + val wrap = gen.mkWrapVarargsArray(Ident(param), parTp) param.attachments.get[TypeParamVarargsAttachment] match { case Some(TypeParamVarargsAttachment(tp)) => gen.mkCast(wrap, seqType(tp)) case _ => wrap diff --git a/src/compiler/scala/tools/nsc/transform/async/AnfTransform.scala b/src/compiler/scala/tools/nsc/transform/async/AnfTransform.scala index 3220388036c5..eaf866b6a162 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AnfTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AnfTransform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -131,26 +131,23 @@ private[async] trait AnfTransform extends TransformUtils { case ld: LabelDef if ld.tpe.typeSymbol == definitions.BoxedUnitClass => currentStats += ld literalBoxedUnit - case ld: LabelDef if ld.tpe.typeSymbol == definitions.UnitClass => + case ld: LabelDef if ld.tpe.typeSymbol == definitions.UnitClass => currentStats += ld literalUnit - case expr => expr + case expr1 => + expr1 } case ValDef(mods, name, tpt, rhs) => atOwner(tree.symbol) { - // Capture current cursor of a non-empty `stats` buffer so we can efficiently restrict the + // Capture size of `stats` buffer so we can efficiently restrict the // `changeOwner` to the newly added items... - var statsIterator = if (currentStats.isEmpty) null else currentStats.iterator + val oldItemsCount = currentStats.length val expr = atOwner(currentOwner.owner)(transform(rhs)) - // But, ListBuffer.empty.iterator doesn't reflect later mutation. Luckily we can just start - // from the beginning of the buffer - if (statsIterator == null) statsIterator = currentStats.iterator - // Definitions within stats lifted out of the `ValDef` rhs should no longer be owned by the // the ValDef. - statsIterator.foreach(_.changeOwner((currentOwner, currentOwner.owner))) + currentStats.iterator.drop(oldItemsCount).foreach(_.changeOwner((currentOwner, currentOwner.owner))) val expr1 = if (isUnitType(expr.tpe)) { currentStats += expr literalBoxedUnit @@ -180,7 +177,7 @@ private[async] trait AnfTransform extends TransformUtils { treeCopy.Match(tree, scrutExpr, casesWithAssign) } - case ld@LabelDef(name, params, rhs) => + case LabelDef(name, params, rhs) => treeCopy.LabelDef(tree, name, params, transformNewControlFlowBlock(rhs)) case t@Typed(expr, tpt) => transform(expr).setType(t.tpe) @@ -208,7 +205,10 @@ private[async] trait AnfTransform extends TransformUtils { if (isPatMatGeneratedJump(tree)) assignUnitType(tree) if (!needsResultVar || isUnitType(tree.tpe) || (tree.tpe =:= definitions.NothingTpe)) { - core(NoSymbol) + if (tree.tpe =:= definitions.BoxedUnitTpe) { + currentStats += assignUnitType(core(NoSymbol)) + literalBoxedUnit + } else core(NoSymbol) } else { val varDef = defineVar(nameSource(), tree.tpe, tree.pos) currentStats += varDef @@ -270,7 +270,8 @@ private[async] trait AnfTransform extends TransformUtils { onTail(ts) case i => val group = new Array[T](i + 1) - ts.copyToArray(group) + @annotation.unused val copied = ts.copyToArray(group) + //assert(copied == group.length, s"$copied != ${group.length}") onGroup(group) foreachGroupsEndingWith(ts.drop(i + 1))(isGroupEnd, onGroup, onTail) } @@ -317,11 +318,11 @@ private[async] trait AnfTransform extends TransformUtils { val statsExpr0: ArrayBuffer[Tree] = new ArrayBuffer[Tree](statsExpr.length) statsExpr.reverseIterator.foreach { - case ld@LabelDef(_, param :: Nil, _) => + case ld@LabelDef(_, _ :: Nil, _) => val (ld1, after) = modifyLabelDef(ld) statsExpr0 += after statsExpr0 += ld1 - case a@ValDef(mods, name, tpt, ld@LabelDef(_, param :: Nil, _)) => + case a@ValDef(mods, name, tpt, ld@LabelDef(_, _ :: Nil, _)) => val (ld1, after) = modifyLabelDef(ld) statsExpr0 += treeCopy.ValDef(a, mods, name, tpt, after) statsExpr0 += ld1 diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncAnalysis.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncAnalysis.scala index 201e474628a4..5eed85764497 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncAnalysis.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncNames.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncNames.scala index 82fd4dc35a2a..957b60ca2b4b 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncNames.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncNames.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -70,7 +70,7 @@ final class AsyncNames[U <: reflect.internal.Names with Singleton](val u: U) { if (seenPrefixes.contains(name)) { TermName(freshNameCreator.newName(name.toStringWithSuffix("$"))) } else { - seenPrefixes.add(name) + seenPrefixes.addOne(name) name } } @@ -78,7 +78,7 @@ final class AsyncNames[U <: reflect.internal.Names with Singleton](val u: U) { if (seenPrefixes.contains(name)) { TypeName(freshNameCreator.newName(name.toStringWithSuffix("$"))) } else { - seenPrefixes.add(name) + seenPrefixes.addOne(name) name } } diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index 05e6ac28fd57..306346910ac6 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc.transform.async import scala.collection.mutable import scala.tools.nsc.transform.{Transform, TypingTransformers} -import scala.reflect.internal.util.{SourceFile, NoSourceFile} +import scala.reflect.internal.util.{NoSourceFile, ReplBatchSourceFile, SourceFile} abstract class AsyncPhase extends Transform with TypingTransformers with AnfTransform with Lifter with LiveVariables { self => @@ -25,14 +25,12 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran protected[async] val tracing = new Tracing val phaseName: String = "async" - override def enabled: Boolean = settings.async + override def enabled: Boolean = settings.async.value private final case class AsyncAttachment(awaitSymbol: Symbol, postAnfTransform: Block => Block, stateDiagram: ((Symbol, Tree) => Option[String => Unit]), allowExceptionsToPropagate: Boolean) extends PlainAttachment - def hasAsyncAttachment(dd: DefDef) = dd.hasAttachment[AsyncAttachment] - // Optimization: avoid the transform altogether if there are no async blocks in a unit. private val sourceFilesToTransform = perRunCaches.newSet[SourceFile]() private val awaits: mutable.Set[Symbol] = perRunCaches.newSet[Symbol]() @@ -44,26 +42,27 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran final def markForAsyncTransform(owner: Symbol, method: DefDef, awaitMethod: Symbol, config: Map[String, AnyRef]): DefDef = { val pos = owner.pos - if (!settings.async) + if (!settings.async.value) reporter.warning(pos, s"${settings.async.name} must be enabled for async transformation.") sourceFilesToTransform += pos.source val postAnfTransform = config.getOrElse("postAnfTransform", (x: Block) => x).asInstanceOf[Block => Block] - val stateDiagram = config.getOrElse("stateDiagram", (sym: Symbol, tree: Tree) => None).asInstanceOf[(Symbol, Tree) => Option[String => Unit]] + val stateDiagram = config.getOrElse("stateDiagram", (_: Symbol, _: Tree) => None).asInstanceOf[(Symbol, Tree) => Option[String => Unit]] val allowExceptionsToPropagate = config.contains("allowExceptionsToPropagate") method.updateAttachment(new AsyncAttachment(awaitMethod, postAnfTransform, stateDiagram, allowExceptionsToPropagate)) + method.updateAttachment(ForceMatchDesugar) // Wrap in `{ expr: Any }` to force value class boxing before calling `completeSuccess`, see test/async/run/value-class.scala deriveDefDef(method) { rhs => - Block(Apply(gen.mkAttributedRef(definitions.Predef_locally), rhs :: Nil), Literal(Constant(()))) + Block(Apply(gen.mkAttributedRef(definitions.Predef_locally), rhs :: Nil).updateAttachment(TypedExpectingUnitAttachment), Literal(Constant(()))) }.updateAttachment(ChangeOwnerAttachment(owner)) } - def newTransformer(unit: CompilationUnit): Transformer = new AsyncTransformer(unit) + def newTransformer(unit: CompilationUnit): AstTransformer = new AsyncTransformer(unit) private def compileTimeOnlyPrefix: String = "[async] " /** Should refchecks defer reporting `@compileTimeOnly` errors for `sym` and instead let this phase issue the warning * if they survive the async tranform? */ - private[scala] def deferCompileTimeOnlyError(sym: Symbol): Boolean = settings.async && { + private[scala] def deferCompileTimeOnlyError(sym: Symbol): Boolean = settings.async.value && { awaits.contains(sym) || { val msg = sym.compileTimeOnlyMessage.getOrElse("") val shouldDefer = @@ -76,12 +75,16 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran // TOOD: figure out how to make the root-level async built-in macro sufficiently configurable: // replace the ExecutionContext implicit arg with an AsyncContext implicit that also specifies the type of the Future/Awaitable/Node/...? final class AsyncTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { - private lazy val liftableMap = new mutable.AnyRefMap[Symbol, (Symbol, List[Tree])]() + private lazy val liftableMap = new mutable.HashMap[Symbol, (Symbol, List[Tree])]() override def transformUnit(unit: CompilationUnit): Unit = { - if (settings.async) { + if (settings.async.value) { // NoSourceFile can happen for, e.g., toolbox compilation; overestimate by always transforming them. See test/async/jvm/toolbox.scala - val shouldTransform = unit.source == NoSourceFile || sourceFilesToTransform.contains(unit.source) + val shouldTransform = unit.source == NoSourceFile || sourceFilesToTransform(unit.source) || (unit.source match { + // scala/bug#13050, see also `PerRunReporting.repSrc` + case r: ReplBatchSourceFile => sourceFilesToTransform(r.parserSource) + case _ => false + }) if (shouldTransform) super.transformUnit(unit) if (awaits.exists(_.isInitialized)) { unit.body.foreach { @@ -117,14 +120,14 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran deriveTemplate(impl)(liftedTrees ::: _) }) } - assert(localTyper.context.owner == cd.symbol.owner) + assert(localTyper.context.owner == cd.symbol.owner, "local typer context's owner must be ClassDef symbol's owner") val withFields = new UseFields(localTyper, cd.symbol, applySym, liftedSyms, NoSymbol).transform(cd1) withFields case dd: DefDef if dd.hasAttachment[AsyncAttachment] => val asyncAttachment = dd.getAndRemoveAttachment[AsyncAttachment].get val asyncBody = (dd.rhs: @unchecked) match { - case blk@Block(Apply(qual, body :: Nil) :: Nil, Literal(Constant(()))) => body + case Block(Apply(_, body :: Nil) :: Nil, Literal(Constant(()))) => body } atOwner(dd, dd.symbol) { @@ -151,8 +154,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran currentTransformState = saved } } - case tree => - tree + case transformed => transformed } private def asyncTransform(asyncBody: Tree): (Tree, List[Tree]) = { @@ -210,7 +212,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran private class UseFields(initLocalTyper: analyzer.Typer, stateMachineClass: Symbol, applySym: Symbol, liftedSyms: Set[Symbol], selfSym: Symbol) extends explicitOuter.OuterPathTransformer(initLocalTyper) { private def fieldSel(tree: Tree) = { - assert(currentOwner != NoSymbol) + assert(currentOwner != NoSymbol, "currentOwner cannot be NoSymbol") val outerOrThis = if (selfSym != NoSymbol) gen.mkAttributedIdent(selfSym) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala index d6c54d6c1315..202a1ef6990e 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -49,7 +49,7 @@ trait AsyncTransformStates extends TypingTransformers { lazy val stateCompleteFailure: Symbol = stateMachineMember(TermName("completeFailure")) lazy val stateGetCompleted: Symbol = stateMachineMember(TermName("getCompleted")) lazy val stateTryGet: Symbol = stateMachineMember(TermName("tryGet")) - lazy val whileLabel: Symbol = applySym.newLabel(nme.WHILE_PREFIX).setInfo(MethodType(Nil, definitions.UnitTpe)) + lazy val whileLabel: Symbol = applySym.newLabel(TermName(nme.WHILE_PREFIX)).setInfo(MethodType(Nil, definitions.UnitTpe)) lazy val tryGetIsIdentity: Boolean = exitingTyper { stateTryGet.info.finalResultType.termSymbol == stateTryGet.firstParam diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 1c693cf852ac..8436020f6c50 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private def stateAssigner = currentTransformState.stateAssigner private def labelDefStates = currentTransformState.labelDefStates - private object replaceResidualJumpsWithStateTransitions extends Transformer { + private object replaceResidualJumpsWithStateTransitions extends AstTransformer { override def transform(tree: Tree): Tree = { // TODO: This is only needed for Scala.js compatibility. // See https://github.com/scala/scala/pull/8816#issuecomment-640725321 @@ -35,7 +35,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { Block(StateTransitionStyle.UpdateAndContinue.trees(state, new StateSet), typedCurrentPos(literalUnit)).setType(definitions.UnitTpe) case None => ap } - case tree => tree + case transformed => transformed } } } @@ -70,7 +70,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { stat => stats1 += stat if (stat.attachments.containsElement(StateTransitionTree)) { - assert(!foundStateTransition) + assert(!foundStateTransition, "cannot find more than one state transition") foundStateTransition = true // Insert post-state null assignments immediately after the state transition addNullAssigments(postNulls) @@ -125,7 +125,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private[this] var built: Boolean = false /** Build the state using the accumulated `stats` followed by a state transition. */ def build(nextState: Int, style: StateTransitionStyle): AsyncState = { - assert(!built) + assert(!built, "cannot build after already built") built = true // Record whether this state was free of meaningful stats (excluding unit literals which creep in after // the ANF and state machine transforms and the state transition code added below. @@ -198,8 +198,8 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { addStats() - private def addState(state: AsyncState): AsyncState = { - assert(building) + private def addState(state: AsyncState): state.type = { + assert(building, "must be building to add state") assert(!statesMap.contains(state.state), "Duplicate state: " + state) statesMap(state.state) = state state @@ -214,7 +214,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { addState(stateBuilder.build(endState, style = style)) } if (isRoot && currState != endState) { - addState(new AsyncState(Nil, endState, Array(), true)) + addState(new AsyncState(Nil, endState, Array(), isEmpty = true)) } } @@ -312,7 +312,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { buildStateAndOpenNextState(afterLabelState, style = StateTransitionStyle.None) } } else if (containsAwait(rhs)) { - // A while loop containg an await. We assuming that the the backward branch is reachable across the async + // A while loop containing an await. We assuming that the backward branch is reachable across the async // code path and create a state for the `while` label. // // In theory we could avoid creating this state in code like: @@ -337,7 +337,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private def buildNestedStatesFirstForInlining(nestedTree: Tree, endState: Int): (AsyncState, List[AsyncState]) = { val (nestedStats, nestedExpr) = statsAndExpr(nestedTree) val nestedBuilder = new AsyncBlockBuilder(nestedStats, nestedExpr, currState, endState, StateTransitionStyle.None, Some(this)) - val ((inlinedState :: Nil), nestedStates) = nestedBuilder.build.partition(_.state == currState) + val (inlinedState :: Nil, nestedStates) = nestedBuilder.build.partition(_.state == currState): @unchecked inlinedState.nextStates.foreach(stateBuilder.nextStates += _) (inlinedState, nestedStates) } @@ -370,7 +370,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { addState(state) loop(rest, needsAfterState || state.nextStates.contains(afterState)) } - loop(nestedStates, false) + loop(nestedStates, needsAfterState = false) } } @@ -396,8 +396,8 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { val blockBuilder = new AsyncBlockBuilder(stats, expr, startState, endState, startToEndUpdateStyle = StateTransitionStyle.Update) new AsyncBlock { - private val switchIds = mutable.AnyRefMap[Integer, Integer]() - private val emptyReplacements = mutable.AnyRefMap[Integer, Integer]() + private val switchIds = mutable.HashMap[Integer, Integer]() + private val emptyReplacements = mutable.HashMap[Integer, Integer]() private def switchIdOf(state: Integer) = switchIds(emptyReplacements.getOrElse(state, state)) // render with http://graphviz.it/#/new @@ -411,7 +411,13 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { (line: String) => builder.append(br) // TODO Wrap with CDATA instead? - builder.append(line.replaceAllLiterally("&", "&").replaceAllLiterally("\"", """).replaceAllLiterally("<", "<").replaceAllLiterally(">", ">").replaceAllLiterally(" ", " ")) + builder.append( + line.replace("&", "&") + .replace("\"", """) + .replace("<", "<") + .replace(">", ">") + .replace(" ", " ") + ) } builder.append(br) builder.append("") @@ -421,8 +427,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { def stateLabel(s: Int) = { if (s == 0) "INITIAL" else if (s == StateAssigner.Terminal) "TERMINAL" else (if (compactStates) switchIdOf(s) else s).toString } - val length = states.size - for ((state, i) <- asyncStates.zipWithIndex) { + for (state <- asyncStates) { dotBuilder.append(s"""${stateLabel(state.state)} [label=""").append("<") def show(t: Tree): String = { (t match { @@ -482,10 +487,10 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { // Filter out dead or trivial states. private def filterStates(all: List[AsyncState]): List[AsyncState] = if (compactStates) { - val ((initial :: Nil), rest) = all.partition(_.state == blockBuilder.startState) + val (initial :: Nil, rest) = all.partition(_.state == blockBuilder.startState): @unchecked val map = all.iterator.map(x => (x.state, x)).toMap val seen = mutable.HashSet[Int]() - seen.add(all.last.state) + seen.addOne(all.last.state) def followEmptyState(state: AsyncState): AsyncState = if (state.isEmpty && state.nextStates.size == 1) { val next = state.nextStates(0) if (next == blockBuilder.endState) state @@ -501,7 +506,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { } def loop(state: AsyncState): Unit = { if (!emptyReplacements.contains(state.state)) - seen.add(state.state) + seen.addOne(state.state) for (i <- state.nextStates if !seen.contains(i) && i != StateAssigner.Terminal) { loop(map(i)) } @@ -517,10 +522,10 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { live } else all - private val compactStateTransform = new Transformer { + private val compactStateTransform: AstTransformer = new AstTransformer { val transformState = currentTransformState override def transform(tree: Tree): Tree = tree match { - case as @ Apply(qual: Select, (lit @ Literal(Constant(i: Integer))) :: Nil) if qual.symbol == transformState.stateSetter && compactStates => + case Apply(qual: Select, (lit @ Literal(Constant(i: Integer))) :: Nil) if qual.symbol == transformState.stateSetter && compactStates => val replacement = switchIdOf(i) treeCopy.Apply(tree, qual, treeCopy.Literal(lit, Constant(replacement)):: Nil) case _: Match | _: CaseDef | _: Block | _: If | _: LabelDef => @@ -627,7 +632,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { } } - /** Update the state variable and jump to the the while loop that encloses the state machine. */ + /** Update the state variable and jump to the while loop that encloses the state machine. */ case object UpdateAndContinue extends StateTransitionStyle { def trees(nextState: Int, stateSet: StateSet): List[Tree] = { stateSet += nextState diff --git a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala index c9183527b745..b7ae5ec924d6 100644 --- a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala +++ b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.tools.nsc.transform.async -import scala.collection.mutable +import scala.collection.{mutable, Seq => CSeq} import scala.reflect.internal.Flags._ trait Lifter extends ExprBuilder { @@ -28,7 +28,7 @@ trait Lifter extends ExprBuilder { object companionship { private val companions = collection.mutable.Map[Symbol, Symbol]() - def record(classes: Seq[Symbol], moduleClasses: Seq[Symbol]): Unit = { + def record(classes: CSeq[Symbol], moduleClasses: CSeq[Symbol]): Unit = { // Keep note of local companions so we rename them consistently when lifting. for { cd <- classes @@ -69,7 +69,7 @@ trait Lifter extends ExprBuilder { } } companionship.record(classesBuffer, moduleClassesBuffer) - assert(!expr.isInstanceOf[ClassDef]) + assert(!expr.isInstanceOf[ClassDef], "expression cannot be a class def") super.traverse(tree) case _ => super.traverse(tree) @@ -95,7 +95,7 @@ trait Lifter extends ExprBuilder { // The definitions trees val symToTree: mutable.LinkedHashMap[Symbol, Tree] = defs.map { - case (k, v) => (k.symbol, k) + case (k, _) => (k.symbol, k) } // The direct references of each definition tree @@ -182,10 +182,11 @@ trait Lifter extends ExprBuilder { case NoSymbol => sym.setName(currentTransformState.name.freshen(sym.name.toTypeName)) sym.setName(sym.name.toTypeName) - case classSymbol => // will be renamed by above. + case classSymbol@_ => // will be renamed by above. } treeCopy.ClassDef(cd, Modifiers(sym.flags), sym.name, tparams, impl) } + case x => throw new MatchError(x) } atPos(t.pos)(treeLifted) }.toList diff --git a/src/compiler/scala/tools/nsc/transform/async/LiveVariables.scala b/src/compiler/scala/tools/nsc/transform/async/LiveVariables.scala index c5431adba1c3..82a5bef32ba0 100644 --- a/src/compiler/scala/tools/nsc/transform/async/LiveVariables.scala +++ b/src/compiler/scala/tools/nsc/transform/async/LiveVariables.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,8 @@ package scala.tools.nsc.transform.async +import scala.annotation._ +import scala.collection.immutable.ArraySeq import scala.collection.mutable import scala.reflect.internal.Flags._ @@ -27,7 +29,7 @@ trait LiveVariables extends ExprBuilder { * @param liftables the lifted fields * @return a map which indicates fields which are used for the final time in each state. */ - def fieldsToNullOut(asyncStates: List[AsyncState], finalState: AsyncState, + def fieldsToNullOut(asyncStates: List[AsyncState], @unused finalState: AsyncState, liftables: List[Tree]): mutable.LinkedHashMap[Int, (mutable.LinkedHashSet[Symbol], mutable.LinkedHashSet[Symbol])] = { val liftedSyms = mutable.LinkedHashSet[Symbol]() @@ -46,7 +48,7 @@ trait LiveVariables extends ExprBuilder { liftedSyms -= sym } - /** + /* * Traverse statements of an `AsyncState`, collect `Ident`-s referring to lifted fields. * * @param as a state of an `async` expression @@ -110,7 +112,7 @@ trait LiveVariables extends ExprBuilder { g.finish() } - graph.lastReferences[Int](liftedSyms.toArray[Symbol])(_.t.state) + graph.lastReferences[Int](ArraySeq.unsafeWrapArray(liftedSyms.toArray[Symbol]))(_.t.state) } private final class Graph[T] { @@ -177,7 +179,7 @@ trait LiveVariables extends ExprBuilder { } private var finished = false def finish(): this.type = { - assert(!finished) + assert(!finished, "cannot finish when already finished") for (node <- nodes.valuesIterator) { foreachWithIndex(node.succTs) {(succT, i) => val succ = nodes(succT) @@ -189,7 +191,7 @@ trait LiveVariables extends ExprBuilder { this } def lastReferences[K](syms: IndexedSeq[Symbol])(keyMapping: Node => K): mutable.LinkedHashMap[K, (mutable.LinkedHashSet[Symbol], mutable.LinkedHashSet[Symbol])] = { - assert(finished) + assert(finished, "lastReferences before finished") val symIndices: Map[Symbol, Int] = syms.zipWithIndex.toMap val nodeValues = nodes.values.toArray nodeValues.foreach { node => @@ -225,11 +227,11 @@ trait LiveVariables extends ExprBuilder { result } } - mutable.LinkedHashMap(nodeValues.map { x => + mutable.LinkedHashMap(ArraySeq.unsafeWrapArray(nodeValues.map { x => val pre = toSymSet(x.deadOnEntryLiveOnPredecessorExit) val post = toSymSet(x.deadOnExitLiveOnEntry) (keyMapping(x), (pre, post)) - }: _*) + }): _*) } } } diff --git a/src/compiler/scala/tools/nsc/transform/async/StateAssigner.scala b/src/compiler/scala/tools/nsc/transform/async/StateAssigner.scala index f5971c5a3671..39feb481b84d 100644 --- a/src/compiler/scala/tools/nsc/transform/async/StateAssigner.scala +++ b/src/compiler/scala/tools/nsc/transform/async/StateAssigner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/transform/async/StateSet.scala b/src/compiler/scala/tools/nsc/transform/async/StateSet.scala index 62c8aebb87b9..cef46b1addba 100644 --- a/src/compiler/scala/tools/nsc/transform/async/StateSet.scala +++ b/src/compiler/scala/tools/nsc/transform/async/StateSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package scala.tools.nsc.transform.async import java.util import java.util.function.{Consumer, IntConsumer} -import scala.collection.JavaConverters.{asScalaIteratorConverter, iterableAsScalaIterableConverter} +import scala.jdk.CollectionConverters._ // Set for StateIds, which are either small positive integers or -symbolID. final class StateSet { diff --git a/src/compiler/scala/tools/nsc/transform/async/TransformUtils.scala b/src/compiler/scala/tools/nsc/transform/async/TransformUtils.scala index 58886dae2247..05204a8440fe 100644 --- a/src/compiler/scala/tools/nsc/transform/async/TransformUtils.scala +++ b/src/compiler/scala/tools/nsc/transform/async/TransformUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -56,7 +56,7 @@ private[async] trait TransformUtils extends AsyncTransformStates { case trees @ (init :+ last) => val pos = trees.map(_.pos).reduceLeft(_ union _) Block(init, last).setType(last.tpe).setPos(pos) - case Nil => + case _ => throw new MatchError(trees) } @@ -78,6 +78,7 @@ private[async] trait TransformUtils extends AsyncTransformStates { case MatchEnd(ld) => ld.symbol.modifyInfo { case MethodType(params, _) => MethodType(params, exprType) + case x => throw new MatchError(x) } treeCopy.LabelDef(ld, ld.name, ld.params, deriveTree(ld.rhs, exprType)(deriveExpr)).setType(exprType) case _ => @@ -200,7 +201,7 @@ private[async] trait TransformUtils extends AsyncTransformStates { t.setAttachments(t.attachments.addElement(NoAwait)) } - val stack = mutable.ArrayStack[Tree]() + val stack = mutable.Stack[Tree]() override def traverse(tree: Tree): Unit = { stack.push(tree) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index ec37415ae1a6..a39ea17d7d1a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,16 @@ package scala package tools.nsc.transform.patmat -import scala.language.postfixOps -import scala.collection.mutable -import scala.reflect.internal.util.{HashSet, Position} +import scala.collection.immutable.ArraySeq +import scala.collection.{IterableOps, mutable} +import scala.reflect.internal.util.Collections._ +import scala.reflect.internal.util.HashSet +import scala.tools.nsc.transform.patmat.Logic.LogicLinkedHashSet -trait Logic extends Debugging { +trait Logic extends Debugging { import global._ - private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max + private def max(xs: Seq[Int]) = if (xs.isEmpty) 0 else xs.max private def alignedColumns(cols: Seq[Any]): Seq[String] = { def toString(x: Any) = if (x == null) "" else x.toString if (cols.isEmpty || cols.tails.isEmpty) cols map toString @@ -50,7 +52,7 @@ trait Logic extends Debugging { type Type type Tree - class Prop + sealed abstract class Prop final case class Eq(p: Var, q: Const) extends Prop type Const @@ -112,16 +114,21 @@ trait Logic extends Debugging { def implications: List[(Sym, List[Sym], List[Sym])] } + // Using LogicLinkedHashSet (a custom mutable.LinkedHashSet subclass) to ensure deterministic exhaustivity + // messages. immutable.ListSet was too slow (concatenate cost? scala/bug#12499). + // would be nice to statically check whether a prop is equational or pure, // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop) - final case class And(ops: Set[Prop]) extends Prop + final case class And(ops: LogicLinkedHashSet[Prop]) extends Prop object And { - def apply(ops: Prop*) = new And(ops.toSet) + def apply(ps: Prop*) = create(ps) + def create(ps: Iterable[Prop]) = new And(ps.to(LogicLinkedHashSet)) } - final case class Or(ops: Set[Prop]) extends Prop + final case class Or(ops: LogicLinkedHashSet[Prop]) extends Prop object Or { - def apply(ops: Prop*) = new Or(ops.toSet) + def apply(ps: Prop*) = create(ps) + def create(ps: Iterable[Prop]) = new Or(ps.to(LogicLinkedHashSet)) } final case class Not(a: Prop) extends Prop @@ -156,12 +163,21 @@ trait Logic extends Debugging { val newSym = new Sym(variable, const) (uniques findEntryOrUpdate newSym) } - def nextSymId = {_symId += 1; _symId}; private var _symId = 0 + def nextSymId = {_symId += 1; _symId}; private[this] var _symId = 0 implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id) } - def /\(props: Iterable[Prop]) = if (props.isEmpty) True else And(props.toSeq: _*) - def \/(props: Iterable[Prop]) = if (props.isEmpty) False else Or(props.toSeq: _*) + def /\(props: Iterable[Prop]) = props match { + case _ if props.isEmpty => True + case _ if props.sizeIs == 1 => props.head + case _ => And.create(props) + } + + def \/(props: Iterable[Prop]) = props match { + case _ if props.isEmpty => False + case _ if props.sizeIs == 1 => props.head + case _ => Or.create(props) + } /** * Simplifies propositional formula according to the following rules: @@ -175,11 +191,11 @@ trait Logic extends Debugging { * * Complexity: DFS over formula tree * - * See http://www.decision-procedures.org/slides/propositional_logic-2x3.pdf + * See https://www.decision-procedures.org/slides/propositional_logic-2x3.pdf */ def simplify(f: Prop): Prop = { - def hasImpureAtom(ops0: collection.Iterable[Prop]): Boolean = { + def hasImpureAtom(ops0: Iterable[Prop]): Boolean = { // HOT method, imperative rewrite of: // ops.combinations(2).exists { // case Seq(a, Not(b)) if a == b => true @@ -203,18 +219,19 @@ trait Logic extends Debugging { else if (size == 2) { // Specialized versions for size 2+3 val it = ops0.iterator val result = checkPair(it.next(), it.next()) - assert(!it.hasNext) + assert(!it.hasNext, "iterator must be empty") result } else if (size == 3) { val it = ops0.iterator val a = it.next() val b = it.next() val c = it.next() - assert(!it.hasNext) + assert(!it.hasNext, "iterator must be empty") checkPair(a, b) || checkPair(a, c) || checkPair(b, c) } else { val ops = new Array[Prop](size) - ops0.copyToArray(ops) + @annotation.unused val copied = ops0.copyToArray(ops) + //assert(copied == ops.length, "") var i = 0 val len = ops.length while (i < len - 1) { @@ -229,81 +246,81 @@ trait Logic extends Debugging { } } + def mapConserve[CC[X] <: IterableOps[X, CC, CC[X]], A <: AnyRef](s: CC[A])(f: A => A): CC[A] = { + var changed = false + val s1 = s.map {a => + val a1 = f(a) + if (a1 ne a) changed = true + a1 + } + if (changed) s1 else s + } + // push negation inside formula def negationNormalFormNot(p: Prop): Prop = p match { - case And(ops) => Or(ops.map(negationNormalFormNot)) // De Morgan - case Or(ops) => And(ops.map(negationNormalFormNot)) // De Morgan - case Not(p) => negationNormalForm(p) - case True => False - case False => True - case s: Sym => Not(s) + case And(ops) => Or(mapConserve(ops)(negationNormalFormNot)) // De Morgan + case Or(ops) => And(mapConserve(ops)(negationNormalFormNot)) // De Morgan + case Not(p) => negationNormalForm(p) + case True => False + case False => True + case s: Sym => Not(s) + case Eq(_, _) => Not(p) + case p @ AtMostOne(_) => Not(p) } def negationNormalForm(p: Prop): Prop = p match { - case And(ops) => And(ops.map(negationNormalForm)) - case Or(ops) => Or(ops.map(negationNormalForm)) + case And(ops) => + val ops1 = mapConserve(ops)(negationNormalForm) + if (ops1 eq ops) p else And(ops1) + case Or(ops) => + val ops1 = mapConserve(ops)(negationNormalForm) + if (ops1 eq ops) p else Or(ops1) case Not(negated) => negationNormalFormNot(negated) case True | False | (_: Sym) + | (_: Eq) | (_: AtMostOne) => p } - def simplifyProp(p: Prop): Prop = p match { - case And(fv) => - // recurse for nested And (pulls all Ands up) - // build up Set in order to remove duplicates - val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] - for (prop <- fv) { - val simplified = simplifyProp(prop) - if (simplified != True) { // ignore `True` - simplified match { - case And(fv) => fv.foreach(opsFlattenedBuilder += _) - case f => opsFlattenedBuilder += f - } - } - } - val opsFlattened = opsFlattenedBuilder.result() - - if (opsFlattened.contains(False) || hasImpureAtom(opsFlattened)) { - False - } else { - opsFlattened.size match { - case 0 => True - case 1 => opsFlattened.head - case _ => new And(opsFlattened) - } - } - case Or(fv) => - // recurse for nested Or (pulls all Ors up) - // build up Set in order to remove duplicates - val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] - for (prop <- fv) { - val simplified = simplifyProp(prop) - if (simplified != False) { // ignore `False` - simplified match { - case Or(fv) => fv.foreach(opsFlattenedBuilder += _) - case f => opsFlattenedBuilder += f - } - } + def simplifyAnd(ps: Iterable[Prop]): Prop = { + // recurse for nested And (pulls all Ands up) + // build up Set in order to remove duplicates + val props = LogicLinkedHashSet.empty[Prop] + for (prop <- ps) { + simplifyProp(prop) match { + case True => // ignore `True` + case And(fv) => fv.foreach(props += _) + case f => props += f } - val opsFlattened = opsFlattenedBuilder.result() - - if (opsFlattened.contains(True) || hasImpureAtom(opsFlattened)) { - True - } else { - opsFlattened.size match { - case 0 => False - case 1 => opsFlattened.head - case _ => new Or(opsFlattened) - } + } + + if (props.contains(False) || hasImpureAtom(props)) False + else /\(props) + } + + def simplifyOr(ps: Iterable[Prop]): Prop = { + // recurse for nested Or (pulls all Ors up) + // build up Set in order to remove duplicates + val props = LogicLinkedHashSet.empty[Prop] + for (prop <- ps) { + simplifyProp(prop) match { + case False => // ignore `False` + case Or(fv) => props ++= fv + case f => props += f } - case Not(Not(a)) => - simplify(a) - case Not(p) => - Not(simplify(p)) - case p => - p + } + + if (props.contains(True) || hasImpureAtom(props)) True + else \/(props) + } + + def simplifyProp(p: Prop): Prop = p match { + case And(ps) => simplifyAnd(ps) + case Or(ps) => simplifyOr(ps) + case Not(Not(a)) => simplify(a) + case Not(p) => Not(simplify(p)) + case p => p } val nnf = negationNormalForm(f) @@ -325,20 +342,20 @@ trait Logic extends Debugging { def applySymbol(x: Sym): Unit = {} } - def gatherVariables(p: Prop): Set[Var] = { - val vars = new mutable.HashSet[Var]() + def gatherVariables(p: Prop): collection.Set[Var] = { + val vars = new LogicLinkedHashSet[Var]() (new PropTraverser { override def applyVar(v: Var) = vars += v })(p) - vars.toSet + vars } - def gatherSymbols(p: Prop): Set[Sym] = { - val syms = new mutable.HashSet[Sym]() + def gatherSymbols(p: Prop): collection.Set[Sym] = { + val syms = new LogicLinkedHashSet[Sym]() (new PropTraverser { override def applySymbol(s: Sym) = syms += s })(p) - syms.toSet + syms } trait PropMap { @@ -367,10 +384,8 @@ trait Logic extends Debugging { } - // TODO: remove since deprecated - val budgetProp = scala.sys.Prop[String]("scalac.patmat.analysisBudget") - if (budgetProp.isSet) { - reportWarning(s"Please remove -D${budgetProp.key}, it is ignored.") + if (System.getProperty("scalac.patmat.analysisBudget") != null) { + reportWarning(s"Please remove -Dscalac.patmat.analysisBudget, it is ignored.") } // convert finite domain propositional logic with subtyping to pure boolean propositional logic @@ -379,7 +394,7 @@ trait Logic extends Debugging { // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain // in a prelude (the equality axioms) - // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain + // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiable types in its domain // 2. for each variable V in props, and each constant C it is compared to, // compute which assignments imply each other (as in the example above: V = 1 implies V = Int) // and which assignments are mutually exclusive (V = String implies -(V = Int)) @@ -394,7 +409,7 @@ trait Logic extends Debugging { def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null - val vars = new mutable.HashSet[Var] + val vars = new LogicLinkedHashSet[Var] object gatherEqualities extends PropTraverser { override def apply(p: Prop) = p match { @@ -416,17 +431,30 @@ trait Logic extends Debugging { val pure = props map (p => rewriteEqualsToProp(p)) - val eqAxioms = mutable.ArrayBuffer[Prop]() + val eqAxioms = ArraySeq.newBuilder[Prop] @inline def addAxiom(p: Prop) = eqAxioms += p debug.patmat("removeVarEq vars: "+ vars) vars.foreach { v => + val isScrutineeVar = v == vars.head + // if v.domainSyms.isEmpty, we must consider the domain to be infinite // otherwise, since the domain fully partitions the type of the value, // exactly one of the types (and whatever it implies, imposed separately) must be chosen // consider X ::= A | B | C, and A => B // coverage is formulated as: A \/ B \/ C and the implications are - v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) } + v.domainSyms foreach { dsyms => + // if the domain is knonw to be empty + // only add that axiom if the var is the scrutinee + // which has the effect of wiping out the whole formula + // (because `\/(Set.empty) == False`) + // otherwise it's just a subvariable of a match expression + // which has no domain (i.e. an unreachable branch) + // but it shouldn't wipe out the whole exhaustivity check + // neg/t8511 vs (pos/t6146 or neg/virtpatmat_exhaust_compound.scala) + if (isScrutineeVar || dsyms.nonEmpty) + addAxiom(\/(dsyms)) + } // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type, // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom @@ -458,18 +486,19 @@ trait Logic extends Debugging { } } - debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}") + val eqAxiomsSeq = eqAxioms.result() + debug.patmat(s"eqAxioms:\n${eqAxiomsSeq.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) - (And(eqAxioms: _*), pure) + (And(eqAxiomsSeq: _*), pure) } type Solvable def propToSolvable(p: Prop): Solvable = { - val (eqAxiom, pure :: Nil) = removeVarEq(List(p), modelNull = false) + val (eqAxiom, pure :: Nil) = removeVarEq(List(p), modelNull = false): @unchecked eqFreePropToSolvable(And(eqAxiom, pure)) } @@ -481,12 +510,37 @@ trait Logic extends Debugging { final case class Solution(model: Model, unassigned: List[Sym]) - def findModelFor(solvable: Solvable): Model + def hasModel(solvable: Solvable): Boolean def findAllModelsFor(solvable: Solvable, sym: Symbol = NoSymbol): List[Solution] } } +object Logic { + import scala.annotation.nowarn + import scala.collection.mutable.{Growable, GrowableBuilder, SetOps} + import scala.collection.{IterableFactory, IterableFactoryDefaults, StrictOptimizedIterableOps} + + // Local subclass because we can't override `addAll` in the collections (bin compat), see PR scala/scala#10361 + @nowarn("msg=inheritance from class LinkedHashSet") + class LogicLinkedHashSet[A] extends mutable.LinkedHashSet[A] + with SetOps[A, LogicLinkedHashSet, LogicLinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LogicLinkedHashSet, LogicLinkedHashSet[A]] + with IterableFactoryDefaults[A, LogicLinkedHashSet] { + override def iterableFactory: IterableFactory[LogicLinkedHashSet] = LogicLinkedHashSet + override def addAll(xs: IterableOnce[A]): this.type = { + sizeHint(xs) + super.addAll(xs) + } + } + + object LogicLinkedHashSet extends IterableFactory[LogicLinkedHashSet] { + override def from[A](source: IterableOnce[A]): LogicLinkedHashSet[A] = Growable.from(empty[A], source) + override def empty[A]: LogicLinkedHashSet[A] = new LogicLinkedHashSet[A] + override def newBuilder[A]: mutable.Builder[A, LogicLinkedHashSet[A]] = new GrowableBuilder(empty[A]) + } +} + trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis { type Type = global.Type @@ -497,12 +551,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() } object Var extends VarExtractor { - private var _nextId = 0 + private[this] var _nextId = 0 def nextId = {_nextId += 1; _nextId} def resetUniques() = {_nextId = 0; uniques.clear()} private val uniques = new mutable.HashMap[Tree, Var] - def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe)) + def apply(x: Tree): Var = uniques.getOrElseUpdate(x, new Var(x, x.tpe)) def unapply(v: Var) = Some(v.path) } class Var(val path: Tree, staticTp: Type) extends AbsVar { @@ -532,7 +586,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { val subConsts = enumerateSubtypes(staticTp, grouped = false) .headOption.map { tps => - tps.toSet[Type].map{ tp => + tps.to(scala.collection.immutable.ListSet).map { tp => val domainC = TypeConst(tp) registerEquality(domainC) domainC @@ -553,18 +607,18 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { val subtypes = enumerateSubtypes(staticTp, grouped = true) subtypes.map { subTypes => - val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).toSet + val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).to(scala.collection.immutable.ListSet) if (mayBeNull) syms + symForEqualsTo(NullConst) else syms }.filter(_.nonEmpty) } // populate equalitySyms // don't care about the result, but want only one fresh symbol per distinct constant c - def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))} + def registerEquality(c: Const): Unit = { ensureCanModify() ; symForEqualsTo.getOrElseUpdate(c, Sym(this, c)) } // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness) // (registerEquality(c) must have been called prior, either when constructing the domain or from outside) - def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)} + def propForEqualsTo(c: Const): Prop = { observed() ; symForEqualsTo.getOrElse(c, False) } // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p /** the information needed to construct the boolean proposition that encodes the equality proposition (V = C) @@ -595,7 +649,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { // else debug.patmat("NOT implies: "+(lower, upper)) - /** Does V=A preclude V=B? + /* Does V=A preclude V=B? * * (0) A or B must be in the domain to draw any conclusions. * @@ -670,7 +724,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A) // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula) val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const))) - val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const)) + val (excluded, notExcluded) = partitionConserve(todo)(b => excludes(sym.const, b.const)) val implied = notExcluded filter (b => implies(sym.const, b.const)) debug.patmat("eq axioms for: "+ sym.const) @@ -689,13 +743,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable)) // don't access until all potential equalities have been registered using registerEquality - private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList} + private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList.sortBy(_.toString) } // don't call until all equalities have been registered and registerNull has been called (if needed) def describe = { + val consts = symForEqualsTo.keys.toSeq.sortBy(_.toString) def domain_s = domain match { - case Some(d) => d mkString (" ::= ", " | ", "// "+ symForEqualsTo.keys) - case _ => symForEqualsTo.keys mkString (" ::= ", " | ", " | ...") + case Some(d) => d.mkString(" ::= ", " | ", "// " + consts) + case _ => consts.mkString(" ::= ", " | ", " | ...") } s"$this: ${staticTp}${domain_s} // = $path" } @@ -703,8 +758,8 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { } - import global.{ConstantType, SingletonType, Literal, Ident, singleType, TypeBounds, NoSymbol} import global.definitions._ + import global.{ConstantType, Ident, Literal, NoSymbol, SingletonType, TypeBounds, singleType} // all our variables range over types @@ -713,25 +768,23 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { object Const { def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()} - private var _nextTypeId = 0 + private[this] var _nextTypeId = 0 def nextTypeId = {_nextTypeId += 1; _nextTypeId} - private var _nextValueId = 0 + private[this] var _nextValueId = 0 def nextValueId = {_nextValueId += 1; _nextValueId} private val uniques = new mutable.HashMap[Type, Const] private[TreesAndTypesDomain] def unique(tp: Type, mkFresh: => Const): Const = - uniques.get(tp).getOrElse( - uniques.find {case (oldTp, oldC) => oldTp =:= tp} match { - case Some((_, c)) => - debug.patmat("unique const: "+ ((tp, c))) - c - case _ => - val fresh = mkFresh - debug.patmat("uniqued const: "+ ((tp, fresh))) - uniques(tp) = fresh - fresh + uniques.getOrElse(tp, { + // normalize to increase the chance of structural equality and reduce the cost of =:= + val normalized = tp.map(_.normalize) + uniques.getOrElseUpdate(normalized, { + val const = uniques.keysIterator.find(_ =:= normalized).fold(mkFresh)(uniques) + debug.patmat(s"unique const: ${tp -> const}") + const }) + }) private val trees = mutable.HashSet.empty[Tree] @@ -781,7 +834,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { // (At least conceptually: `true` is an instance of class `Boolean`) private def widenToClass(tp: Type): Type = if (tp.typeSymbol.isClass) tp - else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp) + else if (tp.baseClasses.isEmpty) AnyTpe else tp.baseType(tp.baseClasses.head) object TypeConst extends TypeConstExtractor { @@ -828,14 +881,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { else ConstantType(c) case Ident(_) if p.symbol.isStable => // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type - // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala) + // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see neg/virtpatmat_unreach_select.scala) singleType(tp.prefix, p.symbol) case _ => Const.uniqueTpForTree(p) } val toString = - if (hasStableSymbol(p)) p.symbol.name.toString // tp.toString + if (p.hasSymbolField && p.symbol.isStable) p.symbol.name.toString // tp.toString else p.toString //+"#"+ id Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index bd67d273ef5c..6180ff4bb5f2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,7 @@ package scala.tools.nsc.transform.patmat +import scala.annotation._ import scala.collection.mutable import scala.tools.nsc.Reporting.WarningCategory @@ -22,52 +23,29 @@ trait TreeAndTypeAnalysis extends Debugging { /** Compute the type T implied for a value `v` matched by a pattern `pat` (with expected type `pt`). * - * Usually, this is the pattern's type because pattern matching implies instance-of checks. + * Usually, this is the pattern's type because pattern matching implies instance-of checks. * - * However, Stable Identifier and Literal patterns are matched using `==`, - * which does not imply a type for the binder that binds the matched value. + * However, Stable Identifier and Literal patterns are matched using `==`, + * which does not imply a type for the binder that binds the matched value. + * E.g., in `case x@Nil => x `, all we know about `x` is that it satisfies `Nil == x`, which could be anything. + * A type pattern with a literal type works the same as the corresponding literal pattern. + * A literal pattern with a Boolean or Unit pattern does enforce that the respective value (`true`, `false`, `()`) + * was matched, so in those cases, the pattern type is assumed. * - * See scala/bug#1503, scala/bug#5024: don't cast binders to types we're not sure they have + * The other patterns imply type tests, so we can safely deduce that the binder has + * the pattern's type when the pattern matches. + * Concretely, a literal, type pattern, a case class (the constructor's result type) + * or extractor (the unapply's argument type) all imply type tests. * - * TODO: update spec as follows (deviation between `**`): - * - * A pattern binder x@p consists of a pattern variable x and a pattern p. - * The type of the variable x is the static type T **IMPLIED BY** the pattern p. - * This pattern matches any value v matched by the pattern p - * **Deleted: , provided the run-time type of v is also an instance of T, ** - * and it binds the variable name to that value. - * - * Addition: - * A pattern `p` _implies_ a type `T` if the pattern matches only values of the type `T`. + * See scala/bug#1503, scala/bug#5024: don't cast binders to types we're not sure they have */ - def binderTypeImpliedByPattern(pat: Tree, pt: Type, binder: Symbol): Type = + def binderTypeImpliedByPattern(pat: Tree, pt: Type): Type = pat match { - // because `==` decides whether these patterns match, stable identifier patterns (ident or selection) - // do not contribute any type information (beyond the pattern's expected type) - // e.g., in case x@Nil => x --> all we know about `x` is that it satisfies Nil == x, which could be anything - case Ident(_) | Select(_, _) => - if (settings.future) pt - else { - // TODO: don't warn unless this unsound assumption is actually used in a cast - // I tried annotating the type returned here with an internal annotation (`pat.tpe withAnnotation UnsoundAssumptionAnnotation`), - // and catching it in the patmat backend when used in a cast (because that would signal the unsound assumption was used), - // but the annotation didn't bubble up... - // This is a pretty poor approximation. - def unsoundAssumptionUsed = binder.name != nme.WILDCARD && !(pt <:< pat.tpe) - if (settings.warnUnsoundMatch && unsoundAssumptionUsed) - reporter.warning(pat.pos, - sm"""The value matched by $pat is bound to ${binder.name}, which may be used under the - |unsound assumption that it has type ${pat.tpe}, whereas we can only safely - |count on it having type $pt, as the pattern is matched using `==` (see scala/bug#1503).""") - - pat.tpe - } - - - // the other patterns imply type tests, so we can safely assume the binder has the pattern's type when the pattern matches - // concretely, a literal, type pattern, a case class (the constructor's result type) or extractor (the unapply's argument type) all imply type tests - // (and, inductively, an alternative) - case _ => pat.tpe + case _ if pat.tpe <:< BooleanTpe || pat.tpe <:< UnitTpe || pat.tpe <:< StringTpe + => pat.tpe + case Ident(_) | Select(_, _) | Literal(_) => pt + case Typed(_, _) if pat.tpe.isInstanceOf[ConstantType] => pt + case _ => pat.tpe } // we use subtyping as a model for implication between instanceof tests @@ -89,105 +67,97 @@ trait TreeAndTypeAnalysis extends Debugging { } def equivalentTree(a: Tree, b: Tree): Boolean = (a, b) match { - case (Select(qual1, _), Select(qual2, _)) => equivalentTree(qual1, qual2) && a.symbol == b.symbol - case (Ident(_), Ident(_)) => a.symbol == b.symbol - case (Literal(c1), Literal(c2)) => c1 == c2 - case (This(_), This(_)) => a.symbol == b.symbol - case (Apply(fun1, args1), Apply(fun2, args2)) => equivalentTree(fun1, fun2) && args1.corresponds(args2)(equivalentTree) - // Those are the only cases we need to handle in the pattern matcher - case _ => false + case (Select(qual1, _), Select(qual2, _)) => equivalentTree(qual1, qual2) && a.symbol == b.symbol + case (Ident(_), Ident(_)) => a.symbol == b.symbol + case (Literal(c1), Literal(c2)) => c1 == c2 + case (This(_), This(_)) => a.symbol == b.symbol + case (Apply(fun1, args1), Apply(fun2, args2)) => equivalentTree(fun1, fun2) && args1.corresponds(args2)(equivalentTree) + case (TypeApply(fun1, args1), TypeApply(fun2, args2)) => equivalentTree(fun1, fun2) && args1.corresponds(args2)(equivalentTree) + case (a @ TypeTree(), b @ TypeTree()) => a.tpe =:= b.tpe + case _ => false // Those are the only cases we need to handle in the pattern matcher } trait CheckableTreeAndTypeAnalysis { val typer: Typer - // TODO: domain of other feasibly enumerable built-in types (char?) - def enumerateSubtypes(tp: Type, grouped: Boolean): List[List[Type]] = - tp.typeSymbol match { - // TODO case _ if tp.isTupleType => // recurse into component types? - case UnitClass if !grouped => - List(List(UnitTpe)) - case BooleanClass if !grouped => - List(ConstantTrue :: ConstantFalse :: Nil) - // TODO case _ if tp.isTupleType => // recurse into component types - case modSym: ModuleClassSymbol if !grouped => - List(List(tp)) - case sym: RefinementClassSymbol => - val parentSubtypes = tp.parents.flatMap(parent => enumerateSubtypes(parent, grouped)) - if (parentSubtypes exists (_.nonEmpty)) { - // If any of the parents is enumerable, then the refinement type is enumerable. - // We must only include subtypes of the parents that conform to `tp`. - // See neg/virtpatmat_exhaust_compound.scala for an example. - parentSubtypes map (_.filter(_ <:< tp)) - } - else Nil - // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte - case sym if sym.isSealed => - - val tpApprox = analyzer.approximateAbstracts(tp) - val pre = tpApprox.prefix - - def filterChildren(children: List[Symbol]): List[Type] = { - children flatMap { sym => - // have to filter out children which cannot match: see ticket #3683 for an example - // compare to the fully known type `tp` (modulo abstract types), - // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String] - // however, must approximate abstract types in - - val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner) - val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType)) - val subTpApprox = analyzer.approximateAbstracts(subTp) // TODO: needed? - // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox)) - if (subTpApprox <:< tpApprox) Some(checkableType(subTp)) - else None - } + def enumerateSubtypes(tp: Type, grouped: Boolean): List[List[Type]] = tp.typeSymbol match { + case UnitClass => List(List(UnitTpe)) + case BooleanClass => List(List(ConstantTrue, ConstantFalse)) + case sym if sym.isModuleClass => List(List(tp)) + case sym if sym.isRefinementClass => enumerateRefinement(tp, grouped) + case sym if sym.isSealed => enumerateSealed(tp, grouped) + case sym if sym.isCase => List(List(tp)) + case sym if sym.isTypeSkolem => enumerateSubtypes(sym.info.upperBound, grouped) // pos/t12277 + case sym => debug.patmatResult(s"enum unsealed tp=$tp sym=$sym")(Nil) + } + + private def enumerateRefinement(tp: Type, grouped: Boolean) = { + val parentSubtypes = tp.parents.flatMap(parent => enumerateSubtypes(parent, grouped)) + if (parentSubtypes.exists(_.nonEmpty)) { + // If any of the parents is enumerable, then the refinement type is enumerable. + // We must only include subtypes of the parents that conform to `tpApprox`. + // See neg/virtpatmat_exhaust_compound.scala and pos/t9657.scala for examples. + val approximateTypeSkolemsToUpperBound = new TypeMap { // from approximateAbstracts + def apply(tp: Type): Type = tp.dealiasWiden match { + case TypeRef(_, sym, _) if sym.isTypeSkolem => tp.upperBound + case _ => mapOver(tp) } + } + val tpApprox = approximateTypeSkolemsToUpperBound(tp) + parentSubtypes.map(_.filter(_ <:< tpApprox)) + } else Nil + } - if(grouped) { - def enumerateChildren(sym: Symbol) = { - sym.sealedChildren.toList - .sortBy(_.sealedSortName) - .filterNot(x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) - } + private def enumerateSealed(tp: Type, grouped: Boolean): List[List[Type]] = { + val tpApprox = analyzer.approximateAbstracts(tp) + val pre = tp.prefix + val sym = tp.typeSymbol + + def subclassesToSubtypes(syms: List[Symbol]): List[Type] = syms.flatMap { sym => + // have to filter out children which cannot match: see ticket #3683 for an example + // compare to the fully known type `tp` (modulo abstract types), + // so that we can rule out stuff like: + // sealed trait X[T]; class XInt extends X[Int] + // XInt not valid when enumerating X[String] + // however, must also approximate abstract types + val memberType = nestedMemberType(sym, pre, tp.typeSymbol.owner) + val subTp = appliedType(memberType, WildcardType.fillList(sym.typeParams.length)) + val subTpApprox = analyzer.approximateAbstracts(subTp) + if (subTpApprox <:< tpApprox) Some(checkableType(subTp)) else None + } - // enumerate only direct subclasses, - // subclasses of subclasses are enumerated in the next iteration - // and added to a new group - def groupChildren(wl: List[Symbol], - acc: List[List[Type]]): List[List[Type]] = wl match { - case hd :: tl => - val children = enumerateChildren(hd) - // put each trait in a new group, since traits could belong to the same - // group as a derived class - val (traits, nonTraits) = children.partition(_.isTrait) - val filtered = (traits.map(List(_)) ++ List(nonTraits)).map(filterChildren) - groupChildren(tl ++ children, acc ++ filtered) - case Nil => acc - } + def filterAndSortChildren(children: Set[Symbol]) = { + // symbols which are both sealed and abstract need not be covered themselves, + // because all of their children must be and they cannot otherwise be created. + val children1 = children.toList + .filterNot(child => child.isSealed && (child.isAbstractClass || child.hasJavaEnumFlag)) + .sortBy(_.sealedSortName) + children1.filterNot { child => + // remove private abstract children that are superclasses of other children, for example in t6159 drop X2 + child.isPrivate && child.isAbstractClass && children1.exists(sym => (sym ne child) && sym.isSubClass(child)) + } + } - groupChildren(sym :: Nil, Nil) - } else { - val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")( - // symbols which are both sealed and abstract need not be covered themselves, because - // all of their children must be and they cannot otherwise be created. - sym.sealedDescendants.toList - sortBy (_.sealedSortName) - filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) - ) - - List(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") { - // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic - filterChildren(subclasses) - }) - } - case sym if sym.isCase => - List(List(tp)) + @tailrec def groupChildren(wl: List[Symbol], acc: List[List[Symbol]]): List[List[Symbol]] = wl match { + case Nil => acc + case hd :: tl => + val children = filterAndSortChildren(hd.sealedChildren) + // put each trait in a new group since traits could belong to the same group as a derived class + val (traits, nonTraits) = children.partition(_.isTrait) + groupChildren(tl ::: children, acc ::: traits.map(List(_)).appended(nonTraits)) + } - case sym => - debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))) - Nil + val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses") { + if (grouped) groupChildren(List(sym), Nil) + else List(filterAndSortChildren(sym.sealedDescendants)) } + debug.patmatResult(s"enum $sym sealed tp=$tp tpApprox=$tpApprox, subtypes") { + // A valid subtype is turned into a checkable type, as we are entering the realm of the dynamic + subclasses.map(subclassesToSubtypes) + } + } + // approximate a type to the static type that is fully checkable at run time, // hiding statically known but dynamically uncheckable information using existential quantification // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time) @@ -201,24 +171,19 @@ trait TreeAndTypeAnalysis extends Debugging { // See neg/t6771b.scala for elaboration def apply(tp: Type): Type = tp.dealias match { case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) => - TypeRef(pre, sym, args map (_ => WildcardType)) + TypeRef(pre, sym, WildcardType.fillList(args.length)) case _ => mapOver(tp) } } - val result = typeArgsToWildcardsExceptArray(tp) - debug.patmatResult(s"checkableType($tp)")(result) + debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp)) } - // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed) - // we consider tuple types with at least one component of a checkable type as a checkable type + // A type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed) + // A tuple of all uncheckable types is uncheckable def uncheckableType(tp: Type): Boolean = { - val checkable = { - if (isTupleType(tp)) tupleComponents(tp).exists(tp => !uncheckableType(tp)) - else enumerateSubtypes(tp, grouped = false).nonEmpty - } - // if (!checkable) debug.patmat("deemed uncheckable: "+ tp) - !checkable + if (isTupleType(tp)) tupleComponents(tp).forall(uncheckableType) + else enumerateSubtypes(tp, grouped = false).isEmpty } } } @@ -236,13 +201,6 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT var currId = 0 } case class Test(prop: Prop, treeMaker: TreeMaker) { - // private val reusedBy = new mutable.HashSet[Test] - var reuses: Option[Test] = None - def registerReuseBy(later: Test): Unit = { - assert(later.reuses.isEmpty, later.reuses) - // reusedBy += later - later.reuses = Some(this) - } val id = { Test.currId += 1; Test.currId} override def toString = s"T${id}C($prop)" } @@ -256,17 +214,18 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT private[this] val uniqueTypeProps = new mutable.HashMap[(Tree, Type), Eq] def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop = - uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs))) + uniqueEqualityProps.getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs))) def uniqueNonNullProp (testedPath: Tree): Prop = - uniqueNonNullProps getOrElseUpdate(testedPath, Not(Eq(Var(testedPath), NullConst))) + uniqueNonNullProps.getOrElseUpdate(testedPath, Not(Eq(Var(testedPath), NullConst))) def uniqueTypeProp(testedPath: Tree, pt: Type): Prop = - uniqueTypeProps getOrElseUpdate((testedPath, pt), Eq(Var(testedPath), TypeConst(checkableType(pt)))) + uniqueTypeProps.getOrElseUpdate((testedPath, pt), Eq(Var(testedPath), TypeConst(checkableType(pt)))) // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively) - private val pointsToBound = mutable.HashSet(root) - private val trees = mutable.HashSet.empty[Tree] + private val pointsToBound = mutable.HashSet(root) + private val trees = mutable.HashSet.empty[Tree] + private val extractBinders = mutable.HashMap.empty[Tree, Symbol] // the substitution that renames variables to variables in pointsToBound private var normalize: Substitution = EmptySubstitution @@ -309,7 +268,21 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders abstract class TreeMakerToProp extends (TreeMaker => Prop) { // requires(if (!substitutionComputed)) - def updateSubstitution(subst: Substitution): Unit = { + def updateSubstitution(tm: TreeMaker): Unit = { + val subst = tm.subPatternsAsSubstitution + + tm match { + case x @ ExtractorTreeMaker(_, None, binder) => + val extractor = accumSubst(normalize(x.extractor)) + extractBinders.collectFirst { + case (t, reuseBinder) if equivalentTree(t, extractor) => reuseBinder + } match { + case Some(reuseBinder) => normalize >>= Substitution(binder, binderToUniqueTree(reuseBinder)) + case None => extractBinders(extractor) = binder + } + case _ => + } + // find part of substitution that replaces bound symbols by new symbols, and reverse that part // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal @@ -334,7 +307,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT val okSubst = Substitution(unboundFrom.toList, unboundTo.toList) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway foreach2(okSubst.from, okSubst.to){(f, t) => - if (pointsToBound exists (sym => t.exists(_.symbol == sym))) + if (pointsToBound.exists(sym => t.exists(_.symbol == sym)) || tm.isInstanceOf[ExtractorTreeMaker]) pointsToBound += f } // debug.patmat("pointsToBound: "+ pointsToBound) @@ -355,53 +328,80 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT * TODO: don't ignore outer-checks */ def apply(tm: TreeMaker): Prop = { - if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution) + if (!substitutionComputed) updateSubstitution(tm) tm match { - case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) => - object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy { - type Result = Prop - def and(a: Result, b: Result) = And(a, b) - def withOuterTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType) - def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T) - val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt))) - } - def nonNullTest(testedBinder: Symbol) = uniqueNonNullProp(binderToUniqueTree(testedBinder)) - def equalsTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) - // rewrite eq test to type test against the singleton type `pat.tpe`; unrelated to == (uniqueEqualityProp), could be null - def eqTest(pat: Tree, testedBinder: Symbol) = uniqueTypeProp(binderToUniqueTree(testedBinder), uniqueTp(pat.tpe)) - def tru = True - } - ttm.renderCondition(condStrategy) + case ttm @ TypeTestTreeMaker(_, _, _, _) => ttm.renderCondition(condStrategy) case EqualityTestTreeMaker(prevBinder, patTree, _) => uniqueEqualityProp(binderToUniqueTree(prevBinder), unique(patTree)) case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this))) case ProductExtractorTreeMaker(testedBinder, None) => uniqueNonNullProp(binderToUniqueTree(testedBinder)) case SubstOnlyTreeMaker(_, _) => True - case GuardTreeMaker(guard) => - guard.tpe match { - case ConstantTrue => True - case ConstantFalse => False - case _ => handleUnknown(tm) - } - case ExtractorTreeMaker(_, _, _) | - ProductExtractorTreeMaker(_, _) | - BodyTreeMaker(_, _) => handleUnknown(tm) + case NonNullTestTreeMaker(prevBinder, _, _) => uniqueNonNullProp(binderToUniqueTree(prevBinder)) + case GuardTreeMaker(guard) if guard.tpe == ConstantTrue => True + case GuardTreeMaker(guard) if guard.tpe == ConstantFalse => False + case _ => handleUnknown(tm) } } } + object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy { + type Result = Prop + def and(a: Result, b: Result) = And(a, b) + def withOuterTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType) + def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T) + val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt))) + } + def nonNullTest(testedBinder: Symbol) = uniqueNonNullProp(binderToUniqueTree(testedBinder)) + def equalsTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) + // rewrite eq test to type test against the singleton type `pat.tpe`; unrelated to == (uniqueEqualityProp), could be null + def eqTest(pat: Tree, testedBinder: Symbol) = uniqueTypeProp(binderToUniqueTree(testedBinder), uniqueTp(pat.tpe)) + def tru = True + } + + + private def isIrrefutabilityProof(sym: Symbol): Boolean = { + sym.isMethod && sym.name == nme.isEmpty && { + // ConstantFalse is foldable but in joint compilation (bug?) this will be a literal type + // with case using `==` rather than `=:=` we need to do this instead. neg/t12240.scala + sym.tpe.finalResultType match { + case c: ConstantType => c.value == Constant(false) + case _ => false + } + } + } + // will an extractor with unapply method of methodtype `tp` always succeed? + // note: this assumes the other side-conditions implied by the extractor are met + // (argument of the right type, length check succeeds for unapplySeq,...) + private def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match { + //Some(x) is irrefutable + case TypeRef(_, SomeClass, _) => true + //name based pattern matching checks for constant false `isEmpty`. + case TypeRef(_, res, _) => res.tpe.members.exists(isIrrefutabilityProof) + //`true.type` is irrefutable for boolean extractors + case c: ConstantType => c.value == Constant(true) + case _ => false + } private val irrefutableExtractor: PartialFunction[TreeMaker, Prop] = { - // the extra condition is None, the extractor's result indicates it always succeeds, + // if the extra condition is None, the extractor's result indicates it always succeeds, // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker) - case IrrefutableExtractorTreeMaker(_, _) => True + case ExtractorTreeMaker(extractor, None, _) if irrefutableExtractorType(extractor.tpe) => True + // Otherwise, if we call the pattern irrefutable here, these conditions + // are no longer checked and considered true in exhaustiveness and + // reachability checking. + // Therefore, the below case alone would treat too much "irrefutable" + // that really isn't. Something similar is needed (perhaps elsewhere) + // to check whether a set of unapplySeq's with all arities is toegether + // exhaustive + //case p @ ExtractorTreeMaker(extractor, Some(conditions), _) if irrefutableExtractorType(extractor.tpe) => True } // special-case: interpret pattern `List()` as `Nil` + // as of 2.13, List.unapply returns an UnapplySeqWrapper (rather than a List) // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea... private val rewriteListPattern: PartialFunction[TreeMaker, Prop] = { case p @ ExtractorTreeMaker(_, _, testedBinder) - if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) => + if testedBinder.tpe.typeSymbol == UnapplySeqWrapperClass && p.checkedLength == Some(0) => uniqueEqualityProp(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe)) } val fullRewrite = (irrefutableExtractor orElse rewriteListPattern) @@ -461,12 +461,13 @@ trait MatchAnalysis extends MatchApproximation { // the case is reachable if there is a model for -P /\ C, // thus, the case is unreachable if there is no model for -(-P /\ C), // or, equivalently, P \/ -C, or C => P - def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { + def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], @unused pt: Type): Option[Int] = { + debug.patmat("reachability analysis") val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success - // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability + // don't rewrite List-like patterns, as List() and Nil need to be distinguished for unreachability val approx = new TreeMakersToProps(prevBinder) def approximate(default: Prop) = approx.approximateMatch(cases, approx.onUnknown { tm => approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default ) @@ -501,13 +502,8 @@ trait MatchAnalysis extends MatchApproximation { else { prefix += prefHead current = current.tail - val and = And((current.head +: prefix): _*) - val model = findModelFor(eqFreePropToSolvable(and)) - - // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix)) - // if (NoModel ne model) debug.patmat("reached: "+ modelString(model)) - - reachable = NoModel ne model + val and = And((current.head +: prefix).toIndexedSeq: _*) + reachable = hasModel(eqFreePropToSolvable(and)) } } @@ -523,21 +519,26 @@ trait MatchAnalysis extends MatchApproximation { // exhaustivity - def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else { + def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], @unused pt: Type): List[String] = if (!settings.warnStrictUnsealedPatMat && uncheckableType(prevBinder.info)) Nil else { + debug.patmat("exhaustiveness analysis") // customize TreeMakersToProps (which turns a tree of tree makers into a more abstract DAG of tests) // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`, // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive - // - back off (to avoid crying exhaustive too often) when: - // - there are guards --> - // - there are extractor calls (that we can't secretly/soundly) rewrite + // - back off (to avoid crying exhaustive too often) in unhandled cases val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false + val strict = !settings.nonStrictPatmatAnalysis.value val approx = new TreeMakersToProps(prevBinder) val symbolicCases = approx.approximateMatch(cases, approx.onUnknown { tm => approx.fullRewrite.applyOrElse[TreeMaker, Prop](tm, { case BodyTreeMaker(_, _) => True // irrelevant -- will be discarded by symbolCase later - case _ => // debug.patmat("backing off due to "+ tm) + case ExtractorTreeMaker(_, _, _) + | ProductExtractorTreeMaker(_, _) + | GuardTreeMaker(_) if strict => + False + case _ => + debug.patmat("backing off due to "+ tm) backoff = true False }) @@ -571,18 +572,15 @@ trait MatchAnalysis extends MatchApproximation { val matchFailModels = findAllModelsFor(propToSolvable(matchFails), prevBinder) val scrutVar = Var(prevBinderTree) - val counterExamples = { - matchFailModels.flatMap { - model => - val varAssignments = expandModel(model) - varAssignments.flatMap(modelToCounterExample(scrutVar) _) - } - } + val counterExamples = matchFailModels.iterator.flatMap { model => + expandModel(model).flatMap(modelToCounterExample(scrutVar)) + }.take(AnalysisBudget.maxDPLLdepth).toList // sorting before pruning is important here in order to // keep neg/t7020.scala stable // since e.g. List(_, _) would cover List(1, _) - val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString) + // and make sure the strings are distinct, see Shmeez & TestSequence06 in run/patmatnew.scala + val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString).distinct if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned @@ -632,7 +630,7 @@ trait MatchAnalysis extends MatchApproximation { override def coveredBy(other: CounterExample): Boolean = other match { case other@ListExample(_) => - this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b}) + this == other || ((elems.sizeCompare(other.elems) == 0) && (elems zip other.elems).forall{case (a, b) => a coveredBy b}) case _ => super.coveredBy(other) } @@ -644,7 +642,7 @@ trait MatchAnalysis extends MatchApproximation { override def coveredBy(other: CounterExample): Boolean = other match { case TupleExample(otherArgs) => - this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b}) + this == other || ((ctorArgs.sizeCompare(otherArgs) == 0) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b}) case _ => super.coveredBy(other) } } @@ -655,20 +653,21 @@ trait MatchAnalysis extends MatchApproximation { case object WildcardExample extends CounterExample { override def toString = "_" } case object NoExample extends CounterExample { override def toString = "??" } + type VarAssignment = Map[Var, (Seq[Const], Seq[Const])] + // returns a mapping from variable to // equal and notEqual symbols - def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] = - model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs => + def modelToVarAssignment(model: Model): VarAssignment = + model.toSeq.groupBy(_._1.variable).view.mapValues{ xs => val (trues, falses) = xs.partition(_._2) (trues map (_._1.const), falses map (_._1.const)) // should never be more than one value in trues... - } + }.to(Map) - def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) = + def varAssignmentString(varAssignment: VarAssignment) = varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) => - val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")" - v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment - }.mkString("\n") + s"$v(=${v.path}: ${v.staticTpCheckable}) == ${trues.mkString("(", ", ", ")")} != (${falses.mkString(", ")})" + }.mkString("\n") /** * The models we get from the DPLL solver need to be mapped back to counter examples. @@ -700,7 +699,7 @@ trait MatchAnalysis extends MatchApproximation { * Only one of these symbols can be set to true, * since `V2` can at most be equal to one of {2,6,5,4,7}. */ - def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = { + def expandModel(solution: Solution): List[VarAssignment] = { val model = solution.model @@ -717,7 +716,7 @@ trait MatchAnalysis extends MatchApproximation { val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable) val expanded = for { - (variable, syms) <- groupedByVar.toList + (variable, syms) <- groupedByVar.toList.sortBy(_._1.toString) } yield { val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil) @@ -733,7 +732,7 @@ trait MatchAnalysis extends MatchApproximation { // a list counter example could contain wildcards: e.g. `List(_,_)` val allEqual = addVarAssignment(syms.map(_.const), Nil) - if(equal.isEmpty) { + if (equal.isEmpty) { val oneHot = for { s <- syms } yield { @@ -745,38 +744,39 @@ trait MatchAnalysis extends MatchApproximation { } } - if (expanded.isEmpty) { - List(varAssignment) - } else { - // we need the Cartesian product here, - // since we want to report all missing cases - // (i.e., combinations) - val cartesianProd = expanded.reduceLeft((xs, ys) => - for {map1 <- xs - map2 <- ys} yield { - map1 ++ map2 - }) - - // add expanded variables - // note that we can just use `++` - // since the Maps have disjoint keySets - for { - m <- cartesianProd - } yield { - varAssignment ++ m + // we need the Cartesian product here, + // since we want to report all missing cases + // (i.e., combinations) + @tailrec def loop(acc: List[VarAssignment], in: List[List[VarAssignment]]): List[VarAssignment] = { + if (acc.sizeIs > AnalysisBudget.maxDPLLdepth) acc.take(AnalysisBudget.maxDPLLdepth) + else in match { + case vs :: vss => loop(for (map1 <- acc; map2 <- vs) yield map1 ++ map2, vss) + case _ => acc } } + expanded match { + case head :: tail => + val cartesianProd = loop(head, tail) + // add expanded variables + // note that we can just use `++` + // since the Maps have disjoint keySets + for (m <- cartesianProd) yield varAssignment ++ m + case _ => List(varAssignment) + } } // return constructor call when the model is a true counter example // (the variables don't take into account type information derived from other variables, // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), // since we didn't realize the tail of the outer cons was a Nil) - def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = { + def modelToCounterExample(scrutVar: Var)(varAssignment: VarAssignment): Option[CounterExample] = { + val strict = !settings.nonStrictPatmatAnalysis.value + // chop a path into a list of symbols def chop(path: Tree): List[Symbol] = path match { case Ident(_) => List(path.symbol) case Select(pre, name) => chop(pre) :+ path.symbol + case Apply(fun, args) => chop(fun) :+ path.symbol case _ => // debug.patmat("don't know how to chop "+ path) Nil @@ -788,7 +788,7 @@ trait MatchAnalysis extends MatchApproximation { object VariableAssignment { private def findVar(path: List[Symbol]) = path match { case List(root) if root == scrutVar.path.symbol => Some(scrutVar) - case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1) + case _ => varAssignment.find{case (v, _) => chop(v.path) == path}.map(_._1) } private val uniques = new mutable.HashMap[Var, VariableAssignment] @@ -818,7 +818,7 @@ trait MatchAnalysis extends MatchApproximation { // node in the tree that describes how to construct a counter-example case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const]) { - private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty + private val fields: mutable.LinkedHashMap[Symbol, VariableAssignment] = mutable.LinkedHashMap.empty // need to prune since the model now incorporates all super types of a constant (needed for reachability) private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp))) private lazy val inSameDomain = uniqueEqualTo forall (const => variable.domainSyms.exists(_.exists(_.const.tp =:= const.tp))) @@ -828,7 +828,7 @@ trait MatchAnalysis extends MatchApproximation { private lazy val cls = ctor.safeOwner private lazy val caseFieldAccs = cls.caseFieldAccessors - def addField(symbol: Symbol, assign: VariableAssignment) { + def addField(symbol: Symbol, assign: VariableAssignment): Unit = { // scala/bug#7669 Only register this field if if this class contains it. val shouldConstrainField = !symbol.isCaseAccessor || caseFieldAccs.contains(symbol) if (shouldConstrainField) fields(symbol) = assign @@ -876,7 +876,7 @@ trait MatchAnalysis extends MatchApproximation { case args => args }.map(ListExample) case _ if isTupleSymbol(cls) => args(brevity = true).map(TupleExample) - case _ if cls.isSealed && cls.isAbstractClass => + case _ if cls.isSealed && (cls.isAbstractClass || cls.hasJavaEnumFlag) => // don't report sealed abstract classes, since // 1) they can't be instantiated // 2) we are already reporting any missing subclass (since we know the full domain) @@ -900,7 +900,8 @@ trait MatchAnalysis extends MatchApproximation { // if uniqueEqualTo contains more than one symbol of the same domain // then we can safely ignore these counter examples since we will eventually encounter // both counter examples separately - case _ if inSameDomain => None + // ... in strict mode, consider variable assignment as a wild counter-example + case _ if inSameDomain => if (strict) Some(WildcardExample) else None // not a valid counter-example, possibly since we have a definite type but there was a field mismatch // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive @@ -913,7 +914,7 @@ trait MatchAnalysis extends MatchApproximation { } // slurp in information from other variables - varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) } + varAssignment.keys.toSeq.sortBy(_.toString).foreach(v => if (v != scrutVar) VariableAssignment(v)) // this is the variable we want a counter example for VariableAssignment(scrutVar).toCounterExample() diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index ec26625c1f2c..b7c38b023188 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,26 +12,18 @@ package scala.tools.nsc.transform.patmat -import scala.language.postfixOps - import scala.tools.nsc.symtab.Flags.SYNTHETIC -import scala.reflect.internal.util.Position -/** Factory methods used by TreeMakers to make the actual trees. - * - * We have two modes in which to emit trees: optimized (the default) - * and pure (aka "virtualized": match is parametric in its monad). - */ +/** Factory methods used by TreeMakers to make the actual trees. */ trait MatchCodeGen extends Interface { import global._ - import definitions._ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // generate actual trees /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait CodegenCore extends MatchMonadInterface { private var ctr = 0 - def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)} + def freshName(prefix: String) = { ctr += 1; newTermName(s"$prefix$ctr") } // assert(owner ne null); assert(owner ne NoSymbol) def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = @@ -61,6 +53,7 @@ trait MatchCodeGen extends Interface { def flatMap(prev: Tree, b: Symbol, next: Tree): Tree def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree def flatMapGuard(cond: Tree, next: Tree): Tree + def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree def ifThenElseZero(c: Tree, thenp: Tree): Tree = { val z = zero thenp match { @@ -79,18 +72,7 @@ trait MatchCodeGen extends Interface { def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) - - // Right now this blindly calls drop on the result of the unapplySeq - // unless it verifiably has no drop method (this is the case in particular - // with Array.) You should not actually have to write a method called drop - // for name-based matching, but this was an expedient route for the basics. - def drop(tgt: Tree)(n: Int): Tree = { - def callDirect = fn(tgt, nme.drop, LIT(n)) - def callRuntime = Apply(REF(currentRun.runDefinitions.traversableDropMethod), tgt :: LIT(n) :: Nil) - def needsRuntime = (tgt.tpe ne null) && (elementTypeFromDrop(tgt.tpe) == NoType) - - if (needsRuntime) callRuntime else callDirect - } + def drop(tgt: Tree)(n: Int): Tree = fn(tgt, nme.drop, LIT(n)) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) @@ -106,44 +88,6 @@ trait MatchCodeGen extends Interface { } } - trait PureMatchMonadInterface extends MatchMonadInterface { - val matchStrategy: Tree - import CODE._ - def _match(n: Name): SelectStart = matchStrategy DOT n - - // TODO: error message - private lazy val oneType = typer.typedOperator(_match(vpmName.one)).tpe - override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil)) - } - - trait PureCodegen extends CodegenCore with PureMatchMonadInterface { - def codegen: AbsCodegen = pureCodegen - - object pureCodegen extends CommonCodegen with Casegen { import CODE._ - //// methods in MatchingStrategy (the monad companion) -- used directly in translation - // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`) - // TODO: consider catchAll, or virtualized matching will break in exception handlers - def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = - _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse)) - - // __match.one(`res`) - def one(res: Tree): Tree = (_match(vpmName.one)) (res) - // __match.zero - protected def zero: Tree = _match(vpmName.zero) - // __match.guard(`c`, `then`) - def guard(c: Tree, thenp: Tree): Tree = _match(vpmName.guard) APPLY (c, thenp) - - //// methods in the monad instance -- used directly in translation - // `prev`.flatMap(`b` => `next`) - def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) - // `thisCase`.orElse(`elseCase`) - def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase) - // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`) - def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next) - // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`) - def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitTpe)), next) - } - } trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface { override def codegen: AbsCodegen = optimizedCodegen @@ -177,11 +121,11 @@ trait MatchCodeGen extends Interface { // must compute catchAll after caseLabels (side-effects nextCase) // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default) // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd - val catchAllDef = matchFailGen map { matchFailGen => + val catchAllDef = matchFailGen.map { matchFailGen => val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives - LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef))) - } toList // at most 1 element + LabelDef(_currCase, Nil, matchEnd APPLY matchFailGen(scrutRef)) + }.toList // at most 1 element // scrutSym == NoSymbol when generating an alternatives matcher val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives @@ -203,8 +147,8 @@ trait MatchCodeGen extends Interface { // only used to wrap the RHS of a body // res: T // returns MatchMonad[T] - def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply - protected def zero: Tree = nextCase APPLY () + def one(res: Tree): Tree = matchEnd.APPLY(res) // a jump to a case label is special-cased in typedApply + protected def zero: Tree = nextCase.APPLY() // prev: MatchMonad[T] // b: T @@ -216,8 +160,8 @@ trait MatchCodeGen extends Interface { ValDef(prevSym, prev), // must be isEmpty and get as we don't control the target of the call (prev is an extractor call) ifThenElseZero( - NOT(prevSym DOT vpmName.isEmpty), - Substitution(b, prevSym DOT vpmName.get)(next) + NOT(prevSym DOT nme.isEmpty), + Substitution(b, prevSym DOT nme.get)(next) ) ) } @@ -245,7 +189,7 @@ trait MatchCodeGen extends Interface { def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = ifThenElseZero(cond, BLOCK( - condSym === mkTRUE, + condSym === TRUE, nextBinder === res, next )) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala index 0b5c089dbfc7..61f14d404088 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index ce98521778f5..358da87057c9 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.tools.nsc.transform.patmat -import scala.language.postfixOps - +import scala.annotation._ import scala.collection.mutable import scala.tools.nsc.symtab.Flags.{MUTABLE, STABLE} import scala.tools.nsc.Reporting.WarningCategory @@ -23,8 +22,7 @@ import scala.tools.nsc.Reporting.WarningCategory * The patmat translation doesn't rely on this, so it could be disabled in principle. * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. scala/bug#7290) */ -// TODO: split out match analysis -trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { +trait MatchOptimization extends MatchTreeMaking with MatchApproximation { import global._ import global.definitions._ @@ -37,47 +35,53 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { * the variable is floated up so that its scope includes all of the program that shares it * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree) */ - def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): List[List[TreeMaker]] = { + def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], @unused pt: Type, selectorPos: Position): List[List[TreeMaker]] = { debug.patmat("before CSE:") showTreeMakers(cases) val testss = approximateMatchConservative(prevBinder, cases) // interpret: - val dependencies = new mutable.LinkedHashMap[Test, Set[Prop]] - val tested = new mutable.HashSet[Prop] + val dependencies = new mutable.LinkedHashMap[Test, mutable.LinkedHashSet[Prop]] + val tested = new mutable.LinkedHashSet[Prop] + val reusesMap = new mutable.LinkedHashMap[Int, Test] + val reusesTest = { (test: Test) => reusesMap.get(test.id) } + val registerReuseBy = { (priorTest: Test, later: Test) => + assert(!reusesMap.contains(later.id), reusesMap(later.id)) + reusesMap(later.id) = priorTest + } // TODO: use SAT solver instead of hashconsing props and approximating implication by subset/equality def storeDependencies(test: Test) = { val cond = test.prop def simplify(c: Prop): Set[Prop] = c match { - case And(ops) => ops.toSet flatMap simplify + case And(ops) => ops.flatMap(simplify).toSet case Or(ops) => Set(False) // TODO: make more precise - case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering + case Not(Eq(Var(_), NullConst)) => Set.empty // not worth remembering + case True => Set.empty // same case _ => Set(c) } val conds = simplify(cond) if (conds(False)) false // stop when we encounter a definite "no" or a "not sure" else { - val nonTrivial = conds - True - if (nonTrivial nonEmpty) { - tested ++= nonTrivial + if (!conds.isEmpty) { + tested ++= conds // is there an earlier test that checks our condition and whose dependencies are implied by ours? dependencies find { case (priorTest, deps) => - ((simplify(priorTest.prop) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly - (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions - ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested + ((simplify(priorTest.prop) == conds) || // our conditions are implied by priorTest if it checks the same thing directly + (conds subsetOf deps) // or if it depends on a superset of our conditions + ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested } foreach { case (priorTest, _) => // if so, note the dependency in both tests - priorTest registerReuseBy test + registerReuseBy(priorTest, test) } - dependencies(test) = tested.toSet // copies + dependencies(test) = tested.clone() } true } @@ -103,20 +107,20 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val collapsed = testss map { tests => // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker // if there's no sharing, simply map to the tree makers corresponding to the tests - var currDeps = Set[Prop]() + var currDeps = mutable.LinkedHashSet.empty[Prop] val (sharedPrefix, suffix) = tests span { test => (test.prop == True) || (for( - reusedTest <- test.reuses; + reusedTest <- reusesTest(test); nextDeps <- dependencies.get(reusedTest); - diff <- (nextDeps -- currDeps).headOption; - _ <- Some(currDeps = nextDeps)) + diff <- (nextDeps diff currDeps).headOption; + _ <- Some({ currDeps = nextDeps })) yield diff).nonEmpty } val collapsedTreeMakers = if (sharedPrefix.isEmpty) None else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%) - for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match { + for (test <- sharedPrefix; reusedTest <- reusesTest(test)) reusedTest.treeMaker match { case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM, selectorPos) case _ => } @@ -126,9 +130,8 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // if the shared prefix contains interesting conditions (!= True) // and the last of such interesting shared conditions reuses another treemaker's test // replace the whole sharedPrefix by a ReusingCondTreeMaker - for (lastShared <- sharedPrefix.reverse.dropWhile(_.prop == True).headOption; - lastReused <- lastShared.reuses) - yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker) + for (lastShared <- sharedPrefix.reverse.dropWhile(_.prop == True).headOption; _ <- reusesTest(lastShared)) + yield ReusingCondTreeMaker(sharedPrefix, reusesTest, reusedOrOrig) :: suffix.map(_.treeMaker) } collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above) @@ -155,13 +158,13 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } // TODO: finer-grained duplication - def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen) - atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) + def chainBefore(next: Tree)(casegen: Casegen): Tree = + atPos(pos)(casegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution)) } - case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ + case class ReusingCondTreeMaker(sharedPrefix: List[Test], reusesTest: Test => Option[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ val pos = sharedPrefix.last.treeMaker.pos lazy val localSubstitution = { @@ -169,7 +172,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { var mostRecentReusedMaker: ReusedCondTreeMaker = null def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder))) val (from, to) = sharedPrefix.flatMap { dropped => - dropped.reuses.map(test => toReused(test.treeMaker)).foreach { + reusesTest(dropped).map(test => toReused(test.treeMaker)).foreach { case reusedMaker: ReusedCondTreeMaker => mostRecentReusedMaker = reusedMaker case _ => @@ -190,7 +193,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _) } - lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head + lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => reusesTest(tm) map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head def chainBefore(next: Tree)(casegen: Casegen): Tree = { // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, @@ -201,24 +204,11 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } } - - //// DCE -// trait DeadCodeElimination extends TreeMakers { -// // TODO: non-trivial dead-code elimination -// // e.g., the following match should compile to a simple instanceof: -// // case class Ident(name: String) -// // for (Ident(name) <- ts) println(name) -// def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { -// // do minimal DCE -// cases -// } -// } - - //// SWITCHES -- TODO: operate on Tests rather than TreeMakers + //// SWITCHES trait SwitchEmission extends TreeMakers with MatchMonadInterface { import treeInfo.isGuardedCase - def inAsync: Boolean + def inForceDesugar: Boolean abstract class SwitchMaker { abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] } @@ -292,16 +282,17 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { if (isDefault || !canJump) defaultBody else Apply(Ident(defaultLabel), Nil) - val guardedBody = same.foldRight(jumpToDefault){ + val guardedBody = same.foldRight(jumpToDefault) { // the last case may be unguarded (we know it's the last one since fold's accum == jumpToDefault) // --> replace jumpToDefault by the unguarded case's body - case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b - case (cd@CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els) + case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b + case (cd @ CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els) + case x => throw new MatchError(x) } // if the cases that we're going to collapse bind variables, // must replace them by the single binder introduced by the collapsed case - val binders = same.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol} + val binders = same.collect { case CaseDef(x @ Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol } val (pat, guardedBodySubst) = if (binders.isEmpty) (commonPattern, guardedBody) else { @@ -314,7 +305,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // the patterns in same are equal (according to caseEquals) // we can thus safely pick the first one arbitrarily, provided we correct binding val origPatWithoutBind = commonPattern match { - case Bind(b, orig) => orig + case Bind(_, orig) => orig case o => o } // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above) @@ -322,7 +313,8 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { (Bind(binder, origPatWithoutBind), unifiedBody) } - atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst)) + val samePos = wrappingPos(same.flatMap(k => List(k.pat, k.body))) + atPos(samePos)(CaseDef(pat, EmptyTree, guardedBodySubst)) } // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless) @@ -398,18 +390,22 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy case (Ident(nme.WILDCARD), _) => true // type-switch for catch - case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), - Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe) - case _ => false + case (Typed(Ident(nme.WILDCARD), tpX), Typed(Ident(nme.WILDCARD), tpY)) => instanceOfTpImplies(tpY.tpe, tpX.tpe) + // peel off binders -- they don't influence matching + case (Bind(_, x), Bind(_, y)) => patternImplies(x)(y) + case (Bind(_, x), y) => patternImplies(x)(y) + case (x, Bind(_, y)) => patternImplies(x)(y) + case _ => false } private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase) // must do this before removing guards from cases and collapsing (scala/bug#6011, scala/bug#6048) - private def unreachableCase(cases: List[CaseDef]): Option[CaseDef] = { + def unreachableCase(cases: List[CaseDef]): Option[CaseDef] = { + @tailrec def loop(cases: List[CaseDef]): Option[CaseDef] = cases match { case head :: next :: _ if isDefault(head) => Some(next) // subsumed by the next case, but faster - case head :: rest if !isGuardedCase(head) || head.guard.tpe =:= ConstantTrue => rest find caseImplies(head) orElse loop(rest) + case head :: rest if !isGuardedCase(head) || head.guard.tpe =:= ConstantTrue => rest find caseImplies(head) match { case s @ Some(_) => s case None => loop(rest) } case head :: _ if head.guard.tpe =:= ConstantFalse => Some(head) case _ :: rest => loop(rest) case _ => None @@ -469,7 +465,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // a switch with duplicate cases yields a verify error, // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch // (even though the verify error would disappear, the behaviour would change) - val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty + val allReachable = unreachableCase(caseDefsWithGuards).map(cd => reportUnreachable(cd.body.pos)).isEmpty if (!allReachable) Nil else if (noGuards(caseDefsWithGuards)) { @@ -485,9 +481,12 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { else { def wrapInDefaultLabelDef(cd: CaseDef): CaseDef = if (needDefaultLabel) deriveCaseDef(cd){ b => - // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala - defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt) - LabelDef(defaultLabel, Nil, b) + // If `b` is synthesized in SwitchMaker (by `collapseGuardedCases` or by `defaultCase`) + // it is not yet typed. In order to assign the correct type to the label, type the case body. + // See scala/scala#10926, pos/t13060.scala. + val b1 = if (b.tpe == null) typer.typed(b, pt) else b + defaultLabel setInfo MethodType(Nil, b1.tpe.deconst) + LabelDef(defaultLabel, Nil, b1) } else cd val last = collapsed.last @@ -500,19 +499,33 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } } - class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker { - val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe) + class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker { import CODE._ + val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe, StringTpe) val alternativesSupported = true - val canJump = !inAsync + val canJump = !inForceDesugar // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))` // The tree itself can be a literal, an ident, a selection, ... object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match { - case ConstantType(const) if const.isIntRange => - Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches + case const: ConstantType => + if (const.value.isIntRange) + Some(LIT(const.value.intValue) setPos pat.pos) + else if (const.value.tag == StringTag) + Some(LIT(const.value.stringValue) setPos pat.pos) + else if (const.value.tag == NullTag) + Some(LIT(null) setPos pat.pos) + else None case _ => None }} + def scrutRef(scrut: Symbol): Tree = scrut.tpe.dealiasWiden match { + case subInt if subInt =:= IntTpe => + REF(scrut) + case subInt if definitions.isNumericSubClass(subInt.typeSymbol, IntClass) => + REF(scrut) DOT nme.toInt + case _ => REF(scrut) + } + object SwitchableTreeMaker extends SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] = x match { case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const) @@ -526,8 +539,10 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } def defaultSym: Symbol = scrutSym - def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse Throw(MatchErrorClass.tpe, REF(scrutSym)) } - def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) { + def defaultBody: Tree = matchFailGenOverride + .map(gen => gen(REF(scrutSym))) + .getOrElse(Throw(MatchErrorClass.tpe, REF(scrutSym))) + def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { atPos(body.pos) { (DEFAULT IF guard) ==> body }} } @@ -535,17 +550,14 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked) // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result - if (regularSwitchMaker.switchableTpe(dealiasWiden(scrutSym.tpe))) { + if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealiasWiden)) { val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt) - if (caseDefsWithDefault isEmpty) None // not worth emitting a switch. + if (caseDefsWithDefault.isEmpty) None // not worth emitting a switch. else { // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut) - val scrutToInt: Tree = - if (scrutSym.tpe =:= IntTpe) REF(scrutSym) - else (REF(scrutSym) DOT (nme.toInt)) Some(BLOCK( ValDef(scrutSym, scrut), - Match(scrutToInt, caseDefsWithDefault) // a switch + Match(regularSwitchMaker.scrutRef(scrutSym), caseDefsWithDefault) // a switch )) } } else None @@ -582,10 +594,12 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { }} } + override def unreachableTypeSwitchCase(cases: List[CaseDef]): Option[CaseDef] = typeSwitchMaker.unreachableCase(cases) + // TODO: drop null checks override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = { val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt) - if (caseDefsWithDefault isEmpty) None + if (caseDefsWithDefault.isEmpty) None else Some(caseDefsWithDefault) } } @@ -594,13 +608,8 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { with SwitchEmission with CommonSubconditionElimination { override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): (List[List[TreeMaker]], List[Tree]) = { - // TODO: do CSE on result of doDCE(prevBinder, cases, pt) val optCases = doCSE(prevBinder, cases, pt, selectorPos) - val toHoist = ( - for (treeMakers <- optCases) - yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} - ).flatten.flatten.toList - (optCases, toHoist) + (optCases, optCases.flatMap(flatCollect(_) { case tm: ReusedCondTreeMaker => tm.treesToHoist })) } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index f18dc348c973..3f508e006b14 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,9 +10,10 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc.transform.patmat +package scala.tools.nsc +package transform.patmat -import scala.language.postfixOps +import scala.annotation._ /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. */ @@ -28,8 +29,6 @@ trait MatchTranslation { private def setVarInfo(sym: Symbol, info: Type) = sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info)) - private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol - trait MatchTranslator extends TreeMakers with TreeMakerWarnings { import typer.context def selectorPos: Position @@ -60,8 +59,8 @@ trait MatchTranslation { object SymbolBound { def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match { - case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr) - case _ => None + case Bind(_, expr) if tree.hasExistingSymbol => Some(tree.symbol -> expr) + case _ => None } } @@ -69,13 +68,12 @@ trait MatchTranslation { private lazy val extractor = ExtractorCall(tree) def pos = tree.pos - def tpe = binder.info.dealiasWiden // the type of the variable bound to the pattern + def tpe = binder.info.dealias // the type of the variable bound to the pattern def pt = unbound match { - case Star(tpt) => this glbWith seqType(tpt.tpe) + case Star(tpt) => seqType(tpt.tpe) case TypeBound(tpe) => tpe - case tree => tree.tpe + case unbound => unbound.tpe } - def glbWith(other: Type) = if (currentRun.isScala213) other else glb(tpe :: other :: Nil).normalize object SymbolAndTypeBound { def unapply(tree: Tree): Option[(Symbol, Type)] = tree match { @@ -97,10 +95,10 @@ trait MatchTranslation { private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern)) private def equalityTestStep() = step(EqualityTestTreeMaker(binder, tree, pos))() - private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))() + private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, subPt)(pos))() private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))() private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate()) - private def noStep() = step()() + private def noStep() = step(DummyTreeMaker)() private def unsupportedPatternMsg = sm""" |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.) @@ -115,20 +113,21 @@ trait MatchTranslation { val (makers, unappBinder) = { val paramType = extractor.expectedExtractedType // Statically conforms to paramType - if (tpe <:< paramType) (treeMakers(binder, false, pos), binder) + if (tpe <:< paramType) { + // enforce all extractor patterns to be non-null + val nonNullTest = NonNullTestTreeMaker(binder, paramType, pos) + val unappBinder = nonNullTest.nextBinder + (nonNullTest :: treeMakers(unappBinder, pos), unappBinder) + } else { // chain a type-testing extractor before the actual extractor call // it tests the type, checks the outer pointer and casts to the expected type // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true) - val binderKnownNonNull = typeTest impliesBinderNonNull binder - - // check whether typetest implies binder is not null, - // even though the eventual null check will be on typeTest.nextBinder - // it'll be equal to binder casted to paramType anyway (and the type test is on binder) + // binder is known non-null because the type test would not succeed on `null` val unappBinder = typeTest.nextBinder - (typeTest :: treeMakers(unappBinder, binderKnownNonNull, pos), unappBinder) + (typeTest :: treeMakers(unappBinder, pos), unappBinder) } } @@ -181,25 +180,19 @@ trait MatchTranslation { override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")") } - /** Implement a pattern match by turning its cases (including the implicit failure case) - * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. - * - * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape - * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))` - * - * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed + /** NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed * thus, you must typecheck the result (and that will in turn translate nested matches) - * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch) + * this could probably be optimized... */ def translateMatch(match_ : Match): Tree = { val Match(selector, cases) = match_ val (nonSyntheticCases, defaultOverride) = cases match { - case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body))) + case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((_: Tree) => last.body))) case _ => (cases, None) } - if (!settings.XnoPatmatAnalysis) checkMatchVariablePatterns(nonSyntheticCases) + if (!settings.XnoPatmatAnalysis.value) checkMatchVariablePatterns(nonSyntheticCases) // we don't transform after uncurry // (that would require more sophistication when generating trees, @@ -207,11 +200,11 @@ trait MatchTranslation { if (phase.id >= currentRun.uncurryPhase.id) devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases") - debug.patmat("translating "+ cases.mkString("{", "\n", "}")) + debug.patmat(cases.mkString("translating {", "\n", "}")) val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatNanos) else null - val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations)) + val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.withoutAnnotations)) // when one of the internal cps-type-state annotations is present, strip all CPS annotations val origPt = removeCPSFromPt(match_.tpe) @@ -220,10 +213,10 @@ trait MatchTranslation { val pt = repeatedToSeq(origPt) // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner)) - val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS + val selectorSym = freshSym(selector.pos, selectorTp) setFlag treeInfo.SYNTH_CASE_FLAGS - // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental - val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride) + // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala + val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride, getSuppression(selector)) if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined @@ -236,8 +229,12 @@ trait MatchTranslation { // unlike translateMatch, we type our result before returning it def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] = // if they're already simple enough to be handled by the back-end, we're done - if (caseDefs forall treeInfo.isCatchCase) caseDefs - else { + if (caseDefs forall treeInfo.isCatchCase) { + // well, we do need to look for unreachable cases + if (!settings.XnoPatmatAnalysis.value) unreachableTypeSwitchCase(caseDefs).foreach(cd => reportUnreachable(cd.body.pos)) + + caseDefs + } else { val swatches = { // switch-catches // scala/bug#7459 must duplicate here as we haven't committed to switch emission, and just figuring out // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in @@ -245,29 +242,33 @@ trait MatchTranslation { val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) - val caseScrutSym = freshSym(caseDef.pat.pos, pureType(ThrowableTpe)) - (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) + val caseScrutSym = freshSym(caseDef.pat.pos, ThrowableTpe) + (caseScrutSym, translateCase(caseScrutSym, pt)(caseDef)) } for(cases <- emitTypeSwitch(bindersAndCases, pt).toList if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end - cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef] + cse <- cases) yield fixerUpper(matchOwner, pos)(cse) } val catches = if (swatches.nonEmpty) swatches else { - val scrutSym = freshSym(caseDefs.head.pat.pos, pureType(ThrowableTpe)) - val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))} - - val exSym = freshSym(pos, pureType(ThrowableTpe), "ex") - - List( - atPos(pos) { - CaseDef( - Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping? - EmptyTree, - combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, selectorPos, matchOwner, Some(scrut => Throw(REF(exSym)))) - ) - }) + val scrutSym = freshSym(caseDefs.head.pat.pos, ThrowableTpe) + val cases = caseDefs.map(translateCase(scrutSym, pt)) + val casesPos = wrappingPos(caseDefs) + + val exSym = freshSym(pos, ThrowableTpe, "ex") + val suppression = + if (settings.XnoPatmatAnalysis.value) Suppression.FullSuppression + else Suppression.NoSuppression.copy(suppressExhaustive = true) // try/catches needn't be exhaustive + + val combo = combineCases(REF(exSym), scrutSym, cases, pt, selectorPos, matchOwner, Some(_ => Throw(REF(exSym))), suppression) + List(atPos(casesPos) { + CaseDef( + Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping? + EmptyTree, + combo + ) + }) } typer.typedCases(catches, ThrowableTpe, WildcardType) @@ -301,9 +302,10 @@ trait MatchTranslation { * a function that will take care of binding and substitution of the next ast (to the right). * */ - def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = { + def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef): List[TreeMaker] = { val CaseDef(pattern, guard, body) = caseDef - translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt) + val treeMakers = translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt) + propagateSubstitution(treeMakers, EmptySubstitution) } def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate() @@ -312,11 +314,7 @@ trait MatchTranslation { if (guard == EmptyTree) Nil else List(GuardTreeMaker(guard)) - // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one), - // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand? - // to enable this, probably need to move away from Option to a monad specific to pattern-match, - // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad - // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference + // TODO: body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account def translateBody(body: Tree, matchPt: Type): TreeMaker = BodyTreeMaker(body, matchPt) @@ -373,16 +371,14 @@ trait MatchTranslation { def apply(tree: Tree): ExtractorCall = tree match { case UnApply(unfun@Unapplied(fun), args) => new ExtractorCallRegular(fun, args)(unfun) // extractor case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class + case x => throw new MatchError(x) } } abstract class ExtractorCall(fun: Tree, args: List[Tree]) extends ExtractorAlignment(fun, args)(context) { /** Create the TreeMaker that embodies this extractor call - * - * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null - * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ - def treeMakers(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] + def treeMakers(binder: Symbol, pos: Position): List[TreeMaker] // `subPatBinders` are the variables bound by this pattern in the following patterns // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is) @@ -397,29 +393,29 @@ trait MatchTranslation { } // never store these in local variables (for PreserveSubPatBinders) - lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet + lazy val ignoredSubPatBinders: Set[Symbol] = (subPatBinders zip args).collect { case (b, PatternBoundToUnderscore()) => b }.toSet // there are `productArity` non-seq elements in the tuple. protected def firstIndexingBinder = productArity protected def expectedLength = elementArity protected def lastIndexingBinder = nonStarArity - 1 - private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList - private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder)) - private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil + private def productElemsToN(binder: Symbol, n: Int): List[Tree] = if (n == 0) Nil else List.tabulate(n)(i => tupleSel(binder)(i + 1)) + private def genTake(binder: Symbol, n: Int): List[Tree] = if (n == 0) Nil else List.tabulate(n)(codegen index seqTree(binder, forceImmutable = false)) + private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder, forceImmutable = false))(n) :: Nil // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList - protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1) - protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i) + protected def seqTree(binder: Symbol, @unused forceImmutable: Boolean) = tupleSel(binder)(firstIndexingBinder + 1) + protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i) // the trees that select the subpatterns on the extractor's result, // referenced by `binder` protected def subPatRefsSeq(binder: Symbol): List[Tree] = { - def lastTrees: List[Tree] = ( + def lastTrees: List[Tree] = { if (!isStar) Nil - else if (expectedLength == 0) seqTree(binder) :: Nil + else if (expectedLength == 0) seqTree(binder, forceImmutable = true) :: Nil else genDrop(binder, expectedLength) - ) + } // this error-condition has already been checked by checkStarPatOK: // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) @@ -435,10 +431,10 @@ trait MatchTranslation { // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require (nbSubPats > 0 && (!lastIsStar || isSeq)) - protected def subPatRefs(binder: Symbol): List[Tree] = ( + protected def subPatRefs(binder: Symbol): List[Tree] = { if (totalArity > 0 && isSeq) subPatRefsSeq(binder) else productElemsToN(binder, totalArity) - ) + } private def compareInts(t1: Tree, t2: Tree) = gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil) @@ -449,9 +445,12 @@ trait MatchTranslation { // `binder.lengthCompare(expectedLength)` // ...if binder has a lengthCompare method, otherwise // `scala.math.signum(binder.length - expectedLength)` - def checkExpectedLength = lengthCompareSym match { - case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength)) - case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength)) + def checkExpectedLength = { + val tree = seqTree(binder, forceImmutable = false) + val typedTree = typer.typed(tree) + val lengthCompareSym = typedTree.tpe.member(nme.lengthCompare) + if (lengthCompareSym == NoSymbol) compareInts(Select(typedTree, nme.length), LIT(expectedLength)) + else (typedTree DOT lengthCompareSym)(LIT(expectedLength)) } // the comparison to perform @@ -462,7 +461,7 @@ trait MatchTranslation { else _ INT_== _ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero` - (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO) + (seqTree(binder, forceImmutable = false) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO) } def checkedLength: Option[Int] = @@ -478,10 +477,8 @@ trait MatchTranslation { /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary - * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null - * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ - def treeMakers(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] = { + def treeMakers(binder: Symbol, pos: Position): List[TreeMaker] = { val paramAccessors = expectedExtractedType.typeSymbol.constrParamAccessors val numParams = paramAccessors.length def paramAccessorAt(subPatIndex: Int) = paramAccessors(math.min(subPatIndex, numParams - 1)) @@ -502,7 +499,7 @@ trait MatchTranslation { ) // checks binder ne null before chaining to the next extractor - ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders) :: Nil + ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, ignoredSubPatBinders) :: Nil } // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component @@ -529,21 +526,20 @@ trait MatchTranslation { /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary - * `binderKnownNonNull` is not used in this subclass * * TODO: implement review feedback by @retronym: * Passing the pair of values around suggests: * case class Binder(sym: Symbol, knownNotNull: Boolean). * Perhaps it hasn't reached critical mass, but it would already clean things up a touch. */ - def treeMakers(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] = { + def treeMakers(patBinderOrCasted: Symbol, pos: Position): List[TreeMaker] = { // the extractor call (applied to the binder bound by the flatMap corresponding // to the previous (i.e., enclosing/outer) pattern) val (extractorApply, needsSubst) = spliceApply(pos, patBinderOrCasted) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely // wrong when isSeq, and resultInMonad should always be correct since it comes // directly from the extractor's result type - val binder = freshSym(pos, pureType(resultInMonad(patBinderOrCasted))) + val binder = freshSym(pos, resultInMonad(patBinderOrCasted)) val potentiallyMutableBinders: Set[Symbol] = if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !isSeq) Set.empty @@ -567,9 +563,13 @@ trait MatchTranslation { extractorTreeMaker :: Nil } - override protected def seqTree(binder: Symbol): Tree = - if (firstIndexingBinder == 0) REF(binder) - else super.seqTree(binder) + override protected def seqTree(binder: Symbol, forceImmutable: Boolean): Tree = + if (firstIndexingBinder == 0) { + val ref = REF(binder) + if (forceImmutable && !binder.tpe.typeSymbol.isNonBottomSubClass(SeqClass)) Select(ref, nme.toSeq) + else ref + } + else super.seqTree(binder, forceImmutable) // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require (totalArity > 0 && (!lastIsStar || isSeq)) @@ -579,7 +579,7 @@ trait MatchTranslation { protected def spliceApply(pos: Position, binder: Symbol): (Tree, Boolean) = { var needsSubst = false - object splice extends Transformer { + object splice extends AstTransformer { def binderRef(pos: Position): Tree = REF(binder) setPos pos override def transform(t: Tree) = t match { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 0b4a699b197b..5416476c7c33 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,25 +12,22 @@ package scala.tools.nsc.transform.patmat -import scala.language.postfixOps - +import scala.annotation._ import scala.collection.mutable import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT} import scala.tools.nsc.Reporting.WarningCategory /** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen. * - * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions, - * mostly agnostic to whether we're in optimized/pure (virtualized) mode. + * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions. */ trait MatchTreeMaking extends MatchCodeGen with Debugging { - import global._ - import definitions._ + import global._, definitions._, CODE._ - final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean) + final case class Suppression private (suppressExhaustive: Boolean, suppressUnreachable: Boolean) object Suppression { - val NoSuppression = Suppression(false, false) - val FullSuppression = Suppression(true, true) + val NoSuppression = new Suppression(suppressExhaustive=false, suppressUnreachable=false) + val FullSuppression = new Suppression(suppressExhaustive=true, suppressUnreachable=true) } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -47,6 +44,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = None + // Exposed separately from emitTypeSwitch, so that we can do the analysis for simple cases where we skip emitTypeSwitch + def unreachableTypeSwitchCase(cases: List[CaseDef]): Option[CaseDef] = + None + abstract class TreeMaker { def pos: Position @@ -90,6 +91,15 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { protected val localSubstitution: Substitution = EmptySubstitution } + /** A dummy tree maker used to mark wildcard patterns. + * This is later used to back off from exhaustivity checking. + */ + case object DummyTreeMaker extends TreeMaker with NoNewBinders { + def pos = EmptyTree.pos + + def chainBefore(next: Tree)(casegen: Casegen): Tree = next + } + case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders { def pos = tree.pos @@ -142,15 +152,15 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // mutable case class fields need to be stored regardless (scala/bug#5158, scala/bug#6070) -- see override in ProductExtractorTreeMaker // sub patterns bound to wildcard (_) are never stored as they can't be referenced // dirty debuggers will have to get dirty to see the wildcards - lazy val storedBinders: Set[Symbol] = - (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders + private lazy val storedBinders: Set[Symbol] = + (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders diff ignoredSubPatBinders // e.g., mutable fields of a case class in ProductExtractorTreeMaker def extraStoredBinders: Set[Symbol] def emitVars = storedBinders.nonEmpty - private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) } + private lazy val (stored, substed) = subPatBinders.lazyZip(subPatRefs).partition{ case (sym, _) => storedBinders(sym) } protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs) else { @@ -199,6 +209,31 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { } } + /** + * Make a TreeMaker that performs null check. + * This is called prior to extractor call. + */ + case class NonNullTestTreeMaker( + prevBinder: Symbol, + expectedTp: Type, + override val pos: Position) extends FunTreeMaker { + override lazy val nextBinder = prevBinder.asTerm // just passing through + val nextBinderTp = nextBinder.info.widen + + val nullCheck = REF(prevBinder) OBJ_NE NULL + lazy val localSubstitution = Substitution(Nil, Nil) + + def skipNullTest = isPrimitiveValueType(expectedTp) || expectedTp.typeSymbol.isDerivedValueClass + + def chainBefore(next: Tree)(casegen: Casegen): Tree = + atPos(pos) { + if (skipNullTest) next + else casegen.ifThenElseZero(nullCheck, next) + } + + override def toString = s"NN(${prevBinder.name})" + } + /** * Make a TreeMaker that will result in an extractor call specified by `extractor` * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing @@ -240,7 +275,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { bindSubPats(substitution(next)) } atPos(extractor.pos)( - if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext) + if (extractorReturnsBoolean) casegen.flatMapCond(extractor, UNIT, nextBinder, condAndNext) else casegen.flatMap(extractor, nextBinder, condAndNext) ) } @@ -274,24 +309,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val subPatBinders: List[Symbol], val subPatRefs: List[Tree], val mutableBinders: List[Symbol], - binderKnownNonNull: Boolean, val ignoredSubPatBinders: Set[Symbol] ) extends FunTreeMaker with PreserveSubPatBinders { - import CODE._ val nextBinder = prevBinder // just passing through // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (scala/bug#5158, scala/bug#6070) def extraStoredBinders: Set[Symbol] = mutableBinders.toSet def chainBefore(next: Tree)(casegen: Casegen): Tree = { - val nullCheck = REF(prevBinder) OBJ_NE NULL - val cond = - if (binderKnownNonNull) extraCond - else (extraCond map (nullCheck AND _) - orElse Some(nullCheck)) - - cond match { + extraCond match { case Some(cond) => casegen.ifThenElseZero(cond, bindSubPats(substitution(next))) case _ => @@ -299,26 +326,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { } } - override def toString = "P"+((prevBinder.name, extraCond getOrElse "", localSubstitution)) - } - - object IrrefutableExtractorTreeMaker { - // will an extractor with unapply method of methodtype `tp` always succeed? - // note: this assumes the other side-conditions implied by the extractor are met - // (argument of the right type, length check succeeds for unapplySeq,...) - def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match { - case TypeRef(_, SomeClass, _) => true - // probably not useful since this type won't be inferred nor can it be written down (yet) - case ConstantTrue => true - case _ => false - } - - def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match { - case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) => - Some((extractor, nextBinder)) - case _ => - None - } + override def toString = s"P(${prevBinder.name}, ${extraCond.fold("")(_.toString)}, ${localSubstitution})" } object TypeTestTreeMaker { @@ -327,7 +335,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { trait TypeTestCondStrategy { type Result - def withOuterTest(orig: Result)(testedBinder: Symbol, expectedTp: Type): Result = orig + def withOuterTest(orig: Result)(@unused testedBinder: Symbol, @unused expectedTp: Type): Result = orig // TODO: can probably always widen def typeTest(testedBinder: Symbol, expectedTp: Type): Result def nonNullTest(testedBinder: Symbol): Result @@ -337,23 +345,21 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def tru: Result } - object treeCondStrategy extends TypeTestCondStrategy { import CODE._ + object treeCondStrategy extends TypeTestCondStrategy { type Result = Tree def and(a: Result, b: Result): Result = a AND b - def tru = mkTRUE + def tru = TRUE def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp) def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder) def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat override def withOuterTest(orig: Tree)(testedBinder: Symbol, expectedTp: Type): Tree = { - val expectedPrefix = expectedTp.prefix - val testedPrefix = testedBinder.info.prefix - // Check if a type is defined in a static location. Unlike `tp.isStatic` before `flatten`, // this also includes methods and (possibly nested) objects inside of methods. def definedInStaticLocation(tp: Type): Boolean = { + @tailrec def isStatic(tp: Type): Boolean = if (tp == NoType || tp.typeSymbol.isPackageClass || tp == NoPrefix || nme.isReplWrapperName(tp.typeSymbol.name)) true else if (tp.typeSymbol.isModuleClass) isStatic(tp.prefix) @@ -361,20 +367,88 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { tp.typeSymbol.owner == tp.prefix.typeSymbol && isStatic(tp.prefix) } - if ((expectedPrefix eq NoPrefix) - || expectedTp.typeSymbol.isJava - || definedInStaticLocation(expectedTp) - || testedPrefix =:= expectedPrefix) orig - else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { - case None => orig - case Some(expectedOuterRef) => - // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` - // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` - // if there's an outer accessor, otherwise the condition becomes `true` - // TODO: centralize logic whether there's an outer accessor and use here? - val synthOuterGetter = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix - val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef - and(orig, outerTest) + // In `def foo(a: b.B) = a match { case _: p.P }` + // testedBinder.symbol.info = b.B + // expectedTp = p.P + + expectedTp.dealias match { + case RefinedType(Nil, _) => orig + case rt@RefinedType(parent :: rest, scope) => + // If the pattern type is refined type, emit outer tests for each component. + withOuterTest(withOuterTest(orig)(testedBinder, parent))(testedBinder, copyRefinedType(rt, rest, scope)) + case expectedTp => + val expectedClass = expectedTp.typeSymbol + // .typeSymbol dealiases, so look at the prefix of the base type at the dealiased symbol, + // not of expectedTp itself. + val expectedPrefix = expectedTp.baseType(expectedClass).prefix + + + // Given `(a: x.B) match { case _: x.P }` where P is subclass of B, is it possible + // that a value conforms to both x.B and x1.P where `x ne x1`? + // + // To answer this, we create a new prefix based on a fresh symbol and check the + // base type of TypeRef(freshPrefix, typePatternSymbol (P), args) at the binder + // symbol (B). If that is prefixed by the fresh symbol, they are statically the + // same. + // + // It is not sufficient to show that x.P is a subtype of x.B, as this + // would incorrectly elide the outer test in: + // + // class P extends p1.B + // def test(b: p1.B) = b match { case _: p1.P } + // test(new p2.P) + def prefixAligns: Boolean = { + expectedTp match { + case TypeRef(pre, _, _) if !pre.isStable => // e.g. _: Outer#Inner + false + case TypeRef(pre, sym, args) => + val testedBinderClass = testedBinder.info.baseClasses.find { sym => + sym.isClass && !sym.isRefinementClass + }.getOrElse(NoSymbol) + val testedBinderType = testedBinder.info.baseType(testedBinderClass) + + val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix + val testedPrefixAndExpectedPrefixAreStaticallyIdentical: Boolean = { + def check(freshPrefix: Type): Boolean = { + val expectedTpFromFreshPrefix = TypeRef(freshPrefix, sym, args) + val baseTypeFromFreshPrefix = expectedTpFromFreshPrefix.baseType(testedBinderClass) + freshPrefix eq baseTypeFromFreshPrefix.prefix + } + pre match { + case ThisType(thissym) => + check(ThisType(thissym.cloneSymbol(thissym.owner))) + case _ => + pre.termSymbol match { + case NoSymbol => false + case preSym => + val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) + check(singleType(pre.prefix, freshPreSym)) + } + } + + } + testedPrefixAndExpectedPrefixAreStaticallyIdentical && testedPrefixIsExpectedTypePrefix + case _ => + false + } + } + + if ((expectedPrefix eq NoPrefix) + || expectedTp.typeSymbol.isJava + || definedInStaticLocation(expectedTp) + || testedBinder.info <:< expectedTp + || prefixAligns) orig + else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { + case None => orig + case Some(expectedOuterRef) => + // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` + // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` + // if there's an outer accessor, otherwise the condition becomes `true` + // TODO: centralize logic whether there's an outer accessor and use here? + val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix + val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef + and(orig, outerTest) + } } } } @@ -390,17 +464,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false def tru = true } - - def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy { - type Result = Boolean - - def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder - def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder - def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def and(a: Result, b: Result): Result = a || b - def tru = false - } } /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations) @@ -435,7 +498,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // a `prevBinder` is expected to have type `expectedTp` // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null` - // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false") + // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne` will always yield false") def renderCondition(cs: TypeTestCondStrategy): cs.Result = { import cs._ @@ -477,9 +540,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // - Scala's arrays are invariant (so we don't drop type tests unsoundly) if (extractorArgTypeTest) mkDefault else expectedTp match { - case SingleType(_, sym) => mkEqTest(gen.mkAttributedQualifier(expectedTp)) // scala/bug#4577, scala/bug#4897 - case ThisType(sym) if sym.isModule => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil - case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL)) + case SingleType(_, sym) => + val expected = gen.mkAttributedQualifier(expectedTp) // scala/bug#4577, scala/bug#4897 + if (expectedTp <:< AnyRefTpe) mkEqTest(expected) + else mkEqualsTest(expected) + // Should revisit if we end up lifting `eq`'s definition to `Any`, as discussed here: + // https://groups.google.com/d/msg/scala-internals/jsVlJI4H5OQ/8emZWRmgzcoJ + case ThisType(sym) if sym.isModule => and(mkEqualsTest(REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil + case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(NULL)) case ConstantType(const) => mkEqualsTest(expTp(Literal(const))) case ThisType(sym) => mkEqTest(expTp(This(sym))) case _ => mkDefault @@ -492,8 +560,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission) def isPureTypeTest = renderCondition(pureTypeTestChecker) - def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder)) - override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp)) } @@ -521,10 +587,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // one alternative may still generate multiple trees (e.g., an extractor call + equality test) // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers val combinedAlts = altss map (altTreeMakers => - ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen)) + ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE)))(casegen)) ) - val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE)) + val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(_ => FALSE)) codegenAlt.ifThenElseZero(findAltMatcher, substitution(next)) } } @@ -544,88 +610,119 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) + def removeDummy(makers: List[TreeMaker]) = makers filterNot (_ == DummyTreeMaker) // a foldLeft to accumulate the localSubstitution left-to-right // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { var accumSubst: Substitution = initial - treeMakers foreach { maker => + removeDummy(treeMakers) foreach { maker => maker incorporateOuterSubstitution accumSubst accumSubst = maker.substitution } - removeSubstOnly(treeMakers) + removeSubstOnly(removeDummy(treeMakers)) } - // calls propagateSubstitution on the treemakers - def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = { - // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them - val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) - combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, selectorPos, owner, matchFailGenOverride) + def getSuppression(scrut: Tree): Suppression = + if (settings.XnoPatmatAnalysis.value) Suppression.FullSuppression + else scrut match { + case Typed(tree, tpt) => + val suppressExhaustive = tpt.tpe.hasAnnotation(UncheckedClass) + val suppressUnreachable = tree match { + // scala/bug#7183 don't warn for withFilter's that turn out to be irrefutable. + case Ident(name) => name.startsWith(nme.CHECK_IF_REFUTABLE_STRING) + case _ => false + } + Suppression(suppressExhaustive, suppressUnreachable) + case _ => Suppression.NoSuppression + } + + def requiresSwitch(scrut: Tree, cases: List[List[TreeMaker]]): Boolean = { + if (settings.XnoPatmatAnalysis.value) false + else scrut match { + case Typed(_, tpt) => + val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe) + // matches with two or fewer cases need not apply for switchiness (if-then-else will do) + // `case 1 | 2` is considered as two cases. + def exceedsTwoCasesOrAlts = { + // avoids traversing the entire list if there are more than 3 elements + def lengthMax3(cases: List[List[TreeMaker]]): Int = cases match { + case _ :: _ :: _ :: _ => 3 + case cases => cases.map { + case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts) + case _ => 1 + }.sum + } + lengthMax3(cases) > 2 + } + hasSwitchAnnotation && exceedsTwoCasesOrAlts + case _ => false + } + } + + // See the use of RegularSwitchMaker by SwitchEmission#emitSwitch, which this code emulates or duplicates. + private object Switchable { + val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe, StringTpe) + + def apply(scrutSym: Symbol, cases: List[List[TreeMaker]]): Boolean = switchableTpe(scrutSym.tpe.dealiasWiden) && { + def switchable(tms: List[TreeMaker]): Boolean = + tms.forall { + case EqualityTestTreeMaker(_, SwitchablePattern(), _) => true + case AlternativesTreeMaker(_, altss, _) => Switchable(scrutSym, altss) + case BodyTreeMaker(_, _) => true + case _ => false + } + cases.forall(switchable) + } + + object SwitchablePattern { + def unapply(pat: Tree): Boolean = pat.tpe match { + case const: ConstantType => const.value.isIntRange || const.value.tag == StringTag || const.value.tag == NullTag + case _ => false + } + } } // pt is the fully defined type of the cases (either pt or the lub of the types of the cases) - def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, - selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = + def combineCases( + scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, + selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree], + suppression: Suppression, + ): Tree = fixerUpper(owner, scrut.pos) { def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree)) - debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) + debug.patmat("combining cases: "+ (cases.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) - val (suppression, requireSwitch): (Suppression, Boolean) = - if (settings.XnoPatmatAnalysis) (Suppression.FullSuppression, false) - else scrut match { - case Typed(tree, tpt) => - val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass - val suppressUnreachable = tree match { - case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // scala/bug#7183 don't warn for withFilter's that turn out to be irrefutable. - case _ => false - } - val suppression = Suppression(suppressExhaustive, suppressUnreachable) - val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe) - // matches with two or fewer cases need not apply for switchiness (if-then-else will do) - // `case 1 | 2` is considered as two cases. - def exceedsTwoCasesOrAlts = { - // avoids traversing the entire list if there are more than 3 elements - def lengthMax3[T](l: List[T]): Int = l match { - case a :: b :: c :: _ => 3 - case cases => - cases.map({ - case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts) - case c => 1 - }).sum - } - lengthMax3(casesNoSubstOnly) > 2 - } - val requireSwitch = hasSwitchAnnotation && exceedsTwoCasesOrAlts - (suppression, requireSwitch) - case _ => - (Suppression.NoSuppression, false) - } + emitSwitch(scrut, scrutSym, cases, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse { + if (requiresSwitch(scrut, cases)) + typer.context.warning(scrut.pos, "could not emit switch for @switch annotated match", WarningCategory.OtherMatchAnalysis) - emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse{ - if (requireSwitch) typer.context.warning(scrut.pos, "could not emit switch for @switch annotated match", WarningCategory.OtherMatchAnalysis) + // If cases are switchable, suppress warning for exhaustivity. + // The switch was not emitted, probably because there aren't enough cases. + val suppression1 = + if (Switchable(scrutSym, cases)) suppression.copy(suppressExhaustive = true) + else suppression - if (casesNoSubstOnly nonEmpty) { - // before optimizing, check casesNoSubstOnly for presence of a default case, + if (!cases.isEmpty) { + // before optimizing, check cases for presence of a default case, // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one // exhaustivity and reachability must be checked before optimization as well // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op) // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking - val synthCatchAll = - if (casesNoSubstOnly.nonEmpty && { - val nonTrivLast = casesNoSubstOnly.last - nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] - }) None - else matchFailGen + val synthCatchAll = cases match { + case _ :+ Seq(_: BodyTreeMaker, _*) => None + case _ => matchFailGen + } - analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression) + analyzeCases(scrutSym, cases, pt, suppression1) - val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, selectorPos) + val (optimizedCases, toHoist) = optimizeCases(scrutSym, cases, pt, selectorPos) - val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll) + val matchRes = codegen.matcher(scrut, scrutSym, pt)(optimizedCases map combineExtractors, synthCatchAll) - if (toHoist isEmpty) matchRes else Block(toHoist, matchRes) + if (toHoist.isEmpty) matchRes else Block(toHoist, matchRes) } else { codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen) } @@ -635,10 +732,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // TODO: do this during tree construction, but that will require tracking the current owner in treemakers // TODO: assign more fine-grained positions // fixes symbol nesting, assigns positions - protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser { + protected def fixerUpper(origOwner: Symbol, pos: Position) = new InternalTraverser { currentOwner = origOwner - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (t != EmptyTree && t.pos == NoPosition) { t.setPos(pos) } @@ -655,10 +752,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { d.symbol.moduleClass andAlso (_.owner = currentOwner) d.symbol.owner = currentOwner // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => - debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))) + // debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))) case _ => } - super.traverse(t) + t.traverse(this) } // override def apply diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala index 3aa53ba38599..439bbdeca938 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -52,26 +52,25 @@ trait MatchWarnings { // However this is a pain (at least the way I'm going about it) // and I have to think these detailed errors are primarily useful // for beginners, not people writing nested pattern matches. - def checkMatchVariablePatterns(cases: List[CaseDef]) { + def checkMatchVariablePatterns(cases: List[CaseDef]): Unit = { // A string describing the first variable pattern var vpat: String = null // Using an iterator so we can recognize the last case val it = cases.iterator - def addendum(pat: Tree) = { + def addendum(pat: Tree) = matchingSymbolInScope(pat) match { case NoSymbol => "" case sym => - val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in" + val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else s"$sym in" s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>" } - } while (it.hasNext) { val cdef = it.next() // If a default case has been seen, then every succeeding case is unreachable. if (vpat != null) - typer.context.warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat), WarningCategory.OtherMatchAnalysis) // TODO: make configurable whether this is an error + typer.context.warning(cdef.body.pos, s"unreachable code due to $vpat${addendum(cdef.pat)}", WarningCategory.OtherMatchAnalysis) // TODO: make configurable whether this is an error // If this is a default case and more cases follow, warn about this one so // we have a reason to mention its pattern variable name and any corresponding // symbol in scope. Errors will follow from the remaining cases, at least diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index 6985aa283833..472231e7ca0d 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,9 +16,10 @@ package nsc package transform package patmat -import scala.tools.nsc.typechecker.Contexts import scala.reflect.internal.util import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.typechecker.Contexts +import scala.util.chaining._ /** An 'extractor' can be a case class or an unapply or unapplySeq method. * @@ -121,15 +122,13 @@ trait PatternExpansion { else tps.map(_.substSym(List(unapplySelector), List(extractedBinder))) val withoutStar = productTypes ::: List.fill(elementArity)(elementType) - replaceUnapplySelector(if (isStar) withoutStar :+ sequenceType else withoutStar) + replaceUnapplySelector(if (isStar) withoutStar :+ seqType(elementType) else withoutStar) } - def lengthCompareSym = sequenceType member nme.lengthCompare - // rest is private private val isUnapply = fun.symbol.name == nme.unapply private val isUnapplySeq = fun.symbol.name == nme.unapplySeq - private def isBooleanUnapply = isUnapply && unapplyResultType() =:= BooleanTpe + private def isBooleanUnapply = isUnapply && unapplyResultType().typeSymbol == definitions.BooleanClass private def isRepeatedCaseClass = caseCtorParamTypes.exists(tpes => tpes.nonEmpty && isScalaRepeatedParamType(tpes.last)) private def caseCtorParamTypes: Option[List[Type]] = @@ -165,7 +164,7 @@ trait PatternExpansion { val res = resultOfGetInMonad() // Can't only check for _1 thanks to pos/t796. if (res.hasNonPrivateMember(nme._1) && res.hasNonPrivateMember(nme._2)) - Some(Stream.from(1).map(n => res.nonPrivateMember(newTermName("_" + n))). + Some(LazyList.from(1).map(n => res.nonPrivateMember(newTermName("_" + n))). takeWhile(m => m.isMethod && m.paramLists.isEmpty).toList.map(m => res.memberType(m).resultType)) else None } @@ -187,7 +186,6 @@ trait PatternExpansion { // scala/bug#9029 A pattern with arity-1 that doesn't match the arity of // the Product-like result of the `get` method, will match that result in its entirety. // - // ``` // warning: there was one deprecation warning; re-run with -deprecation for details // scala> object Extractor { def unapply(a: Any): Option[(Int, String)] = Some((1, "2")) } // defined object Extractor @@ -196,32 +194,29 @@ trait PatternExpansion { // // scala> "" match { case Extractor(xy : (Int, String)) => } // warning: there was one deprecation warning; re-run with -deprecation for details - // ``` else if (totalArity == 1 && equivConstrParamTypes.tail.nonEmpty) { warnPatternTupling() (if (tupleValuedUnapply) tupleType(equivConstrParamTypes) else resultOfGetInMonad()) :: Nil } else equivConstrParamTypes - private def notRepeated = (NoType, NoType, NoType) - private val (elementType, sequenceType, repeatedType) = + private def notRepeated = (NoType, NoType) + private val (elementType, repeatedType) = // case class C() is deprecated, but still need to defend against equivConstrParamTypes.isEmpty if (isUnapply || equivConstrParamTypes.isEmpty) notRepeated else { val lastParamTp = equivConstrParamTypes.last - if (isUnapplySeq) { - val elementTp = - elementTypeFromHead(lastParamTp) orElse - elementTypeFromApply(lastParamTp) orElse - definitions.elementType(ArrayClass, lastParamTp) - - (elementTp, lastParamTp, scalaRepeatedType(elementTp)) - } else { + if (isUnapplySeq) + elementTypeFromApply(lastParamTp) match { + case NoType => notRepeated.tap(_ => + err(s"${unapplyResultType()} is not a valid result type of an unapplySeq method of an extractor.")) + case elementTp => (elementTp, scalaRepeatedType(elementTp)) + } + else definitions.elementType(RepeatedParamClass, lastParamTp) match { - case NoType => notRepeated - case elementTp => (elementTp, seqType(elementTp), lastParamTp) + case NoType => notRepeated + case elementTp => (elementTp, lastParamTp) } - } } // errors & warnings @@ -241,12 +236,12 @@ trait PatternExpansion { } private def arityError(mismatch: String) = { - val isErroneous = (productTypes contains NoType) && !(isSeq && (sequenceType ne NoType)) + val isErroneous = (productTypes contains NoType) && !(isSeq && (elementType ne NoType)) val offeringString = if (isErroneous) "" else productTypes match { case tps if isSeq => (tps.map(_.toString) :+ s"${elementType}*").mkString("(", ", ", ")") case Nil => "Boolean" - case tp :: Nil => tp + case tp :: Nil => tp.toString case tps => tps.mkString("(", ", ", ")") } val offerString = if (isErroneous) "" else s" offering $offeringString" @@ -256,7 +251,11 @@ trait PatternExpansion { // emit error/warning on mismatch if (isStar && !isSeq) err("Star pattern must correspond with varargs or unapplySeq") - else if (equivConstrParamTypes == List(NoType)) err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") + else if (equivConstrParamTypes == List(NoType)) + if (unapplyResultType().isNothing) + err(s"${fun.symbol.owner} can't be used as an extractor: The result type of an ${fun.symbol.name} method may not be Nothing") + else + err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") else if (elementArity < 0) arityError("not enough") else if (elementArity > 0 && !isSeq) arityError("too many") else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn( diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 60904b4144e3..cfc92ec8c4cc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,16 +12,15 @@ package scala.tools.nsc.transform.patmat +import scala.annotation.tailrec +import scala.collection.mutable import scala.collection.mutable.ListBuffer +import scala.reflect.internal.{Mode, Types} +import scala.reflect.internal.util.{SourceFile, Statistics} import scala.tools.nsc.Global import scala.tools.nsc.ast -import scala.language.postfixOps +import scala.tools.nsc.transform.{Transform, TypingTransformers} import scala.tools.nsc.Reporting.WarningCategory -import scala.tools.nsc.transform.TypingTransformers -import scala.tools.nsc.transform.Transform -import scala.reflect.internal.util.Statistics -import scala.reflect.internal.{Mode, Types} -import scala.reflect.internal.util.Position /** Translate pattern matching. * @@ -60,61 +59,67 @@ trait PatternMatching extends Transform val phaseName: String = "patmat" - def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit) + /** Symbols to force for determining children of sealed Java classes. */ + val javaClassesByUnit = perRunCaches.newMap[SourceFile, mutable.Set[Symbol]]() + + def newTransformer(unit: CompilationUnit): AstTransformer = new MatchTransformer(unit) class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { - private var inAsync = false + private var inForceDesugar = false override def transform(tree: Tree): Tree = tree match { - case dd: DefDef if async.hasAsyncAttachment(dd) => - val wasInAsync = inAsync + case dd: DefDef if dd.hasAttachment[ForceMatchDesugar.type] || dd.symbol.hasAttachment[ForceMatchDesugar.type] => + val wasInForceDesugar = inForceDesugar try { - inAsync = true + inForceDesugar = true super.transform(dd) } finally - inAsync = wasInAsync + inForceDesugar = wasInForceDesugar + + case CaseDef(UnApply(Apply(Select(qual, nme.unapply), Ident(nme.SELECTOR_DUMMY) :: Nil), (bind@Bind(name, Ident(nme.WILDCARD))) :: Nil), guard, body) + if guard.isEmpty && qual.symbol == definitions.NonFatalModule => + transform(treeCopy.CaseDef( + tree, + treeCopy.Bind(bind, name, Typed(Ident(nme.WILDCARD), TypeTree(definitions.ThrowableTpe)).setType(definitions.ThrowableTpe)), + localTyper.typed(atPos(tree.pos)(Apply(gen.mkAttributedRef(definitions.NonFatal_apply), List(Ident(bind.symbol))))), + body)) case Match(sel, cases) => val origTp = tree.tpe + // setType origTp intended for CPS -- TODO: is it necessary? - val translated = translator(sel.pos).translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]])) + val translated = translator(sel.pos).translateMatch(treeCopy.Match(tree, transform(sel), transformCaseDefs(cases))) try { // Keep 2.12 behaviour of using wildcard expected type, recomputing the LUB, then throwing it away for the continuations plugins // but for the rest of us pass in top as the expected type to avoid waste. val pt = if (origTp <:< definitions.AnyTpe) definitions.AnyTpe else WildcardType localTyper.typed(translated, pt) match { - case b @ Block(stats, m: Match) => + case b @ Block(_, m: Match) => b.setType(origTp) m.setType(origTp) b - case tree => tree setType origTp + case t => t.setType(origTp) } } catch { - case x: (Types#TypeError) => + case x: Types#TypeError => // TODO: this should never happen; error should've been reported during type checking - reporter.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg) + reporter.error(tree.pos, s"error during expansion of this match (this is a scalac bug).\nThe underlying error was: ${x.msg}") translated } case Try(block, catches, finalizer) => val selectorPos = catches.headOption.getOrElse(EmptyTree).orElse(finalizer).pos.focusEnd - treeCopy.Try(tree, transform(block), translator(selectorPos).translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer)) + treeCopy.Try(tree, transform(block), translator(selectorPos).translateTry(transformCaseDefs(catches), tree.tpe, tree.pos), transform(finalizer)) case _ => super.transform(tree) } - // TODO: only instantiate new match translator when localTyper has changed - // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A - // as this is the only time TypingTransformer changes it def translator(selectorPos: Position): MatchTranslator with CodegenCore = { - new OptimizingMatchTranslator(localTyper, selectorPos, inAsync) + new OptimizingMatchTranslator(localTyper, selectorPos, inForceDesugar) } - } - class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree, val selectorPos: Position) extends MatchTranslator with PureCodegen { - def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position) = (cases, Nil) - def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {} } - class OptimizingMatchTranslator(val typer: analyzer.Typer, val selectorPos: Position, val inAsync: Boolean) + + class OptimizingMatchTranslator(val typer: analyzer.Typer, val selectorPos: Position, val inForceDesugar: Boolean) extends MatchTranslator with MatchOptimizer with MatchAnalyzer @@ -139,78 +144,20 @@ trait Interface extends ast.TreeDSL { import global._ import analyzer.Typer - // 2.10/2.11 compatibility - protected final def dealiasWiden(tp: Type) = tp.dealiasWiden - protected final def mkTRUE = CODE.TRUE - protected final def mkFALSE = CODE.FALSE - protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable - - object vpmName { - val one = newTermName("one") - val flatMap = newTermName("flatMap") - val get = newTermName("get") - val guard = newTermName("guard") - val isEmpty = newTermName("isEmpty") - val orElse = newTermName("orElse") - val outer = newTermName("") - val runOrElse = newTermName("runOrElse") - val zero = newTermName("zero") - val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching... - - def counted(str: String, i: Int) = newTermName(str + i) - } - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// talking to userland -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - /** Interface with user-defined match monad? - * if there's a __match in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below: - - {{{ - type Matcher[P[_], M[+_], A] = { - def flatMap[B](f: P[A] => M[B]): M[B] - def orElse[B >: A](alternative: => M[B]): M[B] - } - - abstract class MatchStrategy[P[_], M[+_]] { - // runs the matcher on the given input - def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U] - - def zero: M[Nothing] - def one[T](x: P[T]): M[T] - def guard[T](cond: P[Boolean], then: => P[T]): M[T] - } - }}} - - * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) - - - * if no __match is found, we assume the following implementation (and generate optimized code accordingly) - - {{{ - object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] { - def zero = None - def one[T](x: T) = Some(x) - // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted - def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None - def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) - } - }}} - - */ trait MatchMonadInterface { val typer: Typer val matchOwner = typer.context.owner - def pureType(tp: Type): Type = tp def reportUnreachable(pos: Position) = typer.context.warning(pos, "unreachable code", WarningCategory.OtherMatchAnalysis) def reportMissingCases(pos: Position, counterExamples: List[String]) = { - val ceString = - if (counterExamples.tail.isEmpty) "input: " + counterExamples.head - else "inputs: " + counterExamples.mkString(", ") + val ceString = counterExamples match { + case Nil => "" // never occurs, but not carried in the type + case "_" :: Nil => "" + case ex :: Nil => s"\nIt would fail on the following input: $ex" + case exs => s"\nIt would fail on the following inputs: ${exs.mkString(", ")}" + } - typer.context.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString, WarningCategory.OtherMatchAnalysis) + typer.context.warning(pos, s"match may not be exhaustive.$ceString", WarningCategory.OtherMatchAnalysis) } } @@ -223,11 +170,11 @@ trait Interface extends ast.TreeDSL { def apply(from: Symbol, to: Tree): Substitution = new Substitution(from :: Nil, to :: Nil) // requires sameLength(from, to) def apply(from: List[Symbol], to: List[Tree]): Substitution = - if (from nonEmpty) new Substitution(from, to) else EmptySubstitution + if (from.isEmpty) EmptySubstitution else new Substitution(from, to) } class Substitution(val from: List[Symbol], val to: List[Tree]) { - import global.{Transformer, Ident, NoType, TypeTree, SingleType} + import global.{AstTransformer, Ident, NoType, TypeTree, SingleType} private def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) @@ -238,22 +185,19 @@ trait Interface extends ast.TreeDSL { // according to -Ystatistics 10% of translateMatch's time is spent in this method... // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - val checkType = new TypeCollector[Boolean](false) { - def traverse(tp: Type) { + val checkType = new TypeCollector[Boolean](initial = false) { + override def apply(tp: Type): Unit = if (!result) { tp match { - case SingleType(_, sym) => - if (from contains sym) { - global.devWarningIf(to.exists(!_.isInstanceOf[Ident])) { - s"Unexpected substitution of non-Ident into TypeTree, subst= $this" - } - result = true + case SingleType(_, sym) if from contains sym => + global.devWarningIf(to.exists(!_.isInstanceOf[Ident])) { + s"Unexpected substitution of non-Ident into TypeTree, subst= $this" } + result = true case _ => + tp.foldOver(this) } - mapOver(tp) } - } } val containsSym = tree.exists { case i@Ident(_) => from contains i.symbol @@ -263,7 +207,7 @@ trait Interface extends ast.TreeDSL { case _ => false } - object substIdentsForTrees extends Transformer { + object substIdentsForTrees extends AstTransformer { private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = if (origTp == null || origTp == NoType) to // important: only type when actually substituting and when original tree was typed @@ -272,6 +216,7 @@ trait Interface extends ast.TreeDSL { override def transform(tree: Tree): Tree = { + @tailrec def subst(from: List[Symbol], to: List[Tree]): Tree = if (from.isEmpty) tree else if (tree.symbol == from.head) typedIfOrigTyped(typedStable(to.head).setPos(tree.pos), tree.tpe) @@ -279,7 +224,7 @@ trait Interface extends ast.TreeDSL { val tree1 = tree match { case Ident(_) => subst(from, to) - case _ => super.transform(tree) + case _ => tree.transform(this) } tree1 match { case _: DefTree => @@ -313,7 +258,7 @@ trait Interface extends ast.TreeDSL { } new Substitution(newFrom.prependToList(other.from), newTo.prependToList(other.to.mapConserve(apply))) } - override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")") + override def toString = from.map(_.name).zip(to).mkString("Substitution(", ", ", ")") } object EmptySubstitution extends Substitution(Nil, Nil) { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index ba82f14d1063..9dbd871030a0 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,37 +12,36 @@ package scala.tools.nsc.transform.patmat -import scala.collection.mutable.ArrayBuffer -import scala.collection.{immutable,mutable} +import scala.annotation.{tailrec, unused} +import scala.collection.{immutable, mutable}, mutable.ArrayBuffer -// a literal is a (possibly negated) variable -class Lit(val v: Int) extends AnyVal { - def unary_- : Lit = Lit(-v) - - def variable: Int = Math.abs(v) - - def positive = v >= 0 - - override def toString(): String = s"Lit#$v" -} - -object Lit { - def apply(v: Int): Lit = new Lit(v) - - implicit val LitOrdering: Ordering[Lit] = Ordering.by(_.v) -} - -/** Solve pattern matcher exhaustivity problem via DPLL. - */ +/** Solve pattern matcher exhaustivity problem via DPLL. */ trait Solving extends Logic { import global._ trait CNF extends PropositionalLogic { + // a literal is a (possibly negated) variable + type Lit <: LitApi + trait LitApi { + def unary_- : Lit + } + + def Lit: LitModule + trait LitModule { + def apply(v: Int): Lit + } + + type Clause = Set[Lit] - type Clause = Set[Lit] + val NoClauses: Array[Clause] = Array() + val ArrayOfFalse: Array[Clause] = Array(clause()) // a clause is a disjunction of distinct literals - def clause(l: Lit*): Clause = l.toSet + def clause(): Clause = Set.empty + def clause(l: Lit): Clause = Set.empty + l + def clause(l: Lit, l2: Lit): Clause = Set.empty + l + l2 + def clause(l: Lit, l2: Lit, ls: Lit*): Clause = Set.empty + l + l2 ++ ls + def clause(ls: IterableOnce[Lit]): Clause = Set.from(ls) /** Conjunctive normal form (of a Boolean formula). * A formula in this form is amenable to a SAT solver @@ -50,17 +49,16 @@ trait Solving extends Logic { */ type Cnf = Array[Clause] - class SymbolMapping(symbols: Set[Sym]) { + class SymbolMapping(symbols: collection.Set[Sym]) { val variableForSymbol: Map[Sym, Int] = { - symbols.zipWithIndex.map { + symbols.iterator.zipWithIndex.map { case (sym, i) => sym -> (i + 1) }.toMap } val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap) - val relevantVars: immutable.BitSet = - symForVar.keySet.map(math.abs)(collection.breakOut) + val relevantVars = symForVar.keysIterator.map(math.abs).to(immutable.BitSet) def lit(sym: Sym): Lit = Lit(variableForSymbol(sym)) @@ -71,7 +69,8 @@ trait Solving extends Logic { final case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping) { def ++(other: Solvable) = { - require(this.symbolMapping eq other.symbolMapping) + require(this.symbolMapping eq other.symbolMapping, + "this and other must have the same symbol mapping (same reference)") Solvable(cnf ++ other.cnf, symbolMapping) } @@ -108,7 +107,7 @@ trait Solving extends Logic { def isConst(l: Lit): Boolean = l == constTrue || l == constFalse - def addClauseProcessed(clause: Clause) { + def addClauseProcessed(clause: Clause): Unit = { if (clause.nonEmpty) { buff += clause } @@ -155,27 +154,18 @@ trait Solving extends Logic { def convert(p: Prop): Option[Lit] = { p match { - case And(fv) => - Some(and(fv.flatMap(convert))) - case Or(fv) => - Some(or(fv.flatMap(convert))) - case Not(a) => - convert(a).map(not) - case sym: Sym => - Some(convertSym(sym)) - case True => - Some(constTrue) - case False => - Some(constFalse) - case AtMostOne(ops) => - atMostOne(ops) - None - case _: Eq => - throw new MatchError(p) + case And(fv) => Some(and(fv.flatMap(convert))) + case Or(fv) => Some(or(fv.flatMap(convert))) + case Not(a) => convert(a).map(not) + case sym: Sym => Some(convertSym(sym)) + case True => Some(constTrue) + case False => Some(constFalse) + case AtMostOne(ops) => atMostOne(ops) ; None + case _: Eq => throw new MatchError(p) } } - def and(bv: Set[Lit]): Lit = { + def and(bv: collection.Set[Lit]): Lit = { if (bv.isEmpty) { // this case can actually happen because `removeVarEq` could add no constraints constTrue @@ -187,14 +177,14 @@ trait Solving extends Logic { // op1 /\ op2 /\ ... /\ opx <==> // (o -> op1) /\ (o -> op2) ... (o -> opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o) // (!o \/ op1) /\ (!o \/ op2) ... (!o \/ opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o) - val new_bv = bv - constTrue // ignore `True` + val new_bv = bv.toSet - constTrue // ignore `True` val o = newLiteral() // auxiliary Tseitin variable new_bv.foreach(op => addClauseProcessed(clause(op, -o))) o } } - def or(bv: Set[Lit]): Lit = { + def or(bv: collection.Set[Lit]): Lit = { if (bv.isEmpty) { constFalse } else if (bv.size == 1) { @@ -205,7 +195,7 @@ trait Solving extends Logic { // op1 \/ op2 \/ ... \/ opx <==> // (op1 -> o) /\ (op2 -> o) ... (opx -> o) /\ (op1 \/ op2 \/... \/ opx \/ !o) // (!op1 \/ o) /\ (!op2 \/ o) ... (!opx \/ o) /\ (op1 \/ op2 \/... \/ opx \/ !o) - val new_bv = bv - constFalse // ignore `False` + val new_bv = bv.toSet - constFalse // ignore `False` val o = newLiteral() // auxiliary Tseitin variable addClauseProcessed(new_bv + (-o)) o @@ -215,13 +205,13 @@ trait Solving extends Logic { // no need for auxiliary variable def not(a: Lit): Lit = -a - /** + /* * This encoding adds 3n-4 variables auxiliary variables * to encode that at most 1 symbol can be set. * See also "Towards an Optimal CNF Encoding of Boolean Cardinality Constraints" * http://www.carstensinz.de/papers/CP-2005.pdf */ - def atMostOne(ops: List[Sym]) { + def atMostOne(ops: List[Sym]): Unit = { (ops: @unchecked) match { case hd :: Nil => convertSym(hd) case x1 :: tail => @@ -233,7 +223,7 @@ trait Solving extends Logic { @inline def /\(a: Lit, b: Lit) = addClauseProcessed(clause(a, b)) - val (mid, xn :: Nil) = tail.splitAt(tail.size - 1) + val (mid, xn :: Nil) = tail.splitAt(tail.size - 1): @unchecked // 1 <= x1,...,xn <==> // @@ -291,8 +281,8 @@ trait Solving extends Logic { None } cl.map(Array(_)) - case True => Some(Array()) // empty, no clauses needed - case False => Some(Array(clause())) // empty clause can't be satisfied + case True => Some(NoClauses) // empty, no clauses needed + case False => Some(ArrayOfFalse) // empty clause can't be satisfied case ToLiteral(lit) => Some(Array(clause(lit))) case _ => None } @@ -349,31 +339,44 @@ trait Solving extends Logic { val cnfExtractor = new AlreadyInCNF(symbolMapping) val cnfTransformer = new TransformToCnf(symbolMapping) - def cnfFor(prop: Prop): Solvable = { + def cnfFor(prop: Prop): Solvable = prop match { - case cnfExtractor.ToCnf(solvable) => - // this is needed because t6942 would generate too many clauses with Tseitin - // already in CNF, just add clauses - solvable - case p => - cnfTransformer.apply(p) + // this is needed because t6942 would generate too many clauses with Tseitin + // already in CNF, just add clauses + case cnfExtractor.ToCnf(solvable) => solvable + case prop => cnfTransformer.apply(prop) } - } simplified match { case And(props) => // scala/bug#6942: // CNF(P1 /\ ... /\ PN) == CNF(P1) ++ CNF(...) ++ CNF(PN) - props.iterator.map(cnfFor).reduce(_ ++ _) - case p => - cnfFor(p) + val cnfs = new Array[Solvable](props.size) + @unused val copied = props.iterator.map(x => cnfFor(x)).copyToArray(cnfs) + //assert(copied == cnfs.length, "") + new Solvable(cnfs.flatten[Clause](_.cnf, reflect.classTag[Clause]), cnfs.head.symbolMapping) + case simplified => cnfFor(simplified) } } } // simple solver using DPLL + // adapted from https://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat) trait Solver extends CNF { - import scala.collection.mutable.ArrayBuffer + case class Lit(v: Int) extends LitApi { + private lazy val negated: Lit = Lit(-v) + + def unary_- : Lit = negated + def variable: Int = Math.abs(v) + def positive: Boolean = v >= 0 + + override def toString = s"Lit#$v" + override def hashCode = v + } + + object Lit extends LitModule { + def apply(v: Int): Lit = new Lit(v) + } def cnfString(f: Array[Clause]): String = { val lits: Array[List[String]] = f map (_.map(_.toString).toList) @@ -382,8 +385,6 @@ trait Solving extends Logic { aligned } - // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat) - // empty set of clauses is trivially satisfied val EmptyModel = Map.empty[Sym, Boolean] @@ -393,57 +394,60 @@ trait Solving extends Logic { val NoModel: Model = null // this model contains the auxiliary variables as well - type TseitinModel = Set[Lit] - val EmptyTseitinModel = Set.empty[Lit] + type TseitinModel = List[Lit] val NoTseitinModel: TseitinModel = null // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??) def findAllModelsFor(solvable: Solvable, owner: Symbol): List[Solution] = { - debug.patmat("find all models for\n"+ cnfString(solvable.cnf)) + import solvable.{ cnf, symbolMapping }, symbolMapping.{ symForVar, relevantVars } + debug.patmat(s"find all models for\n${cnfString(cnf)}") // we must take all vars from non simplified formula // otherwise if we get `T` as formula, we don't expand the variables // that are not in the formula... - val relevantVars: immutable.BitSet = solvable.symbolMapping.relevantVars // debug.patmat("vars "+ vars) // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True) // (i.e. the blocking clause - used for ALL-SAT) - def negateModel(m: TseitinModel) = { + def negateModel(m: TseitinModel): TseitinModel = { // filter out auxiliary Tseitin variables - val relevantLits = m.filter(l => relevantVars.contains(l.variable)) - relevantLits.map(lit => -lit) + m.filter(lit => relevantVars.contains(lit.variable)).map(lit => -lit) } - final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) { - def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar) + def newSolution(model: TseitinModel, unassigned: List[Int]): Solution = { + val newModel: Model = if (model eq NoTseitinModel) NoModel else { + model.iterator.collect { + case lit if symForVar.isDefinedAt(lit.variable) => (symForVar(lit.variable), lit.positive) + }.to(scala.collection.immutable.ListMap) + } + Solution(newModel, unassigned.map(symForVar)) } + @tailrec def findAllModels(clauses: Array[Clause], - models: List[TseitinSolution], - recursionDepthAllowed: Int = AnalysisBudget.maxDPLLdepth): List[TseitinSolution]= + models: List[Solution], + recursionDepthAllowed: Int = AnalysisBudget.maxDPLLdepth): List[Solution] = { if (recursionDepthAllowed == 0) { uncheckedWarning(owner.pos, AnalysisBudget.recursionDepthReached, owner) models } else { - debug.patmat("find all models for\n" + cnfString(clauses)) + debug.patmat(s"find all models for\n${cnfString(clauses)}") val model = findTseitinModelFor(clauses) // if we found a solution, conjunct the formula with the model's negation and recurse - if (model ne NoTseitinModel) { + if (model eq NoTseitinModel) models else { // note that we should not expand the auxiliary variables (from Tseitin transformation) // since they are existentially quantified in the final solution - val unassigned: List[Int] = (relevantVars -- model.map(lit => lit.variable)).toList - debug.patmat("unassigned "+ unassigned +" in "+ model) + val unassigned: List[Int] = relevantVars.filterNot(x => model.exists(lit => x == lit.variable)).toList.sorted + debug.patmat(s"unassigned $unassigned in $model") - val solution = TseitinSolution(model, unassigned) - val negated = negateModel(model) + val solution = newSolution(model, unassigned) + val negated = negateModel(model).to(scala.collection.immutable.ListSet) findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1) } - else models } + } - val tseitinSolutions = findAllModels(solvable.cnf, Nil) - tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar)) + findAllModels(solvable.cnf, Nil) } /** Drop trivially true clauses, simplify others by dropping negation of `unitLit`. @@ -452,56 +456,43 @@ trait Solving extends Logic { * Clauses can be simplified by dropping the negation of the literal we're making true * (since False \/ X == X) */ - private def dropUnit(clauses: Array[Clause], unitLit: Lit): Array[Clause] = { + private def dropUnit(clauses: Array[Clause], unitLit: Lit): Unit = { val negated = -unitLit - - // Avoid value class boxing inside the loop. Ugly, I know, but this alone contributes 3% of allocations - // in some builds! - val unitLitBoxed = (unitLit: Any) - val negatedBoxed = (negated: Any) - - val simplified = new ArrayBuffer[Clause](clauses.length) - var changed = false - clauses foreach { - case trivial if trivial.asInstanceOf[Set[Any]].contains(unitLitBoxed) => - changed = true - // drop - case clause => - val withoutNegated = (clause.asInstanceOf[Set[Any]] - negatedBoxed).asInstanceOf[Clause] - changed ||= withoutNegated ne clause - simplified += withoutNegated + var i, j = 0 + while (i < clauses.length) { + val clause = clauses(i) + if (clause == null) return + clauses(i) = null + if (!clause.contains(unitLit)) { + clauses(j) = clause.excl(negated) + j += 1 + } + i += 1 } - if (changed) - simplified.toArray - else - clauses } - def findModelFor(solvable: Solvable): Model = { - projectToModel(findTseitinModelFor(solvable.cnf), solvable.symbolMapping.symForVar) - } + def hasModel(solvable: Solvable): Boolean = findTseitinModelFor(solvable.cnf) != NoTseitinModel def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { - debug.patmat(s"DPLL\n${cnfString(clauses)}") - val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null - val satisfiableWithModel = findTseitinModel0((clauses, Set.empty[Lit]) :: Nil) + debug.patmat(s"DPLL\n${cnfString(clauses)}") + val satisfiableWithModel = findTseitinModel0((java.util.Arrays.copyOf(clauses, clauses.length), Nil) :: Nil) if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } - type TseitinSearch = List[(Array[Clause], Set[Lit])] + type TseitinSearch = List[(Array[Clause], List[Lit])] - /** An implementation of the DPLL algorithm for checking statisfiability + /** An implementation of the DPLL algorithm for checking satisfiability * of a Boolean formula in CNF (conjunctive normal form). * * This is a backtracking, depth-first algorithm, which searches a * (conceptual) decision tree the nodes of which represent assignments * of truth values to variables. The algorithm works like so: * - * - If there are any empty clauses, the formula is unsatisifable. + * - If there are any empty clauses, the formula is unsatisfiable. * - If there are no clauses, the formula is trivially satisfiable. * - If there is a clause with a single positive (rsp. negated) variable * in it, any solution must assign it the value `true` (rsp. `false`). @@ -520,65 +511,92 @@ trait Solving extends Logic { * * See also [[https://en.wikipedia.org/wiki/DPLL_algorithm]]. * - * This implementation uses a `List` to reify the seach stack, thus making + * This implementation uses a `List` to reify the search stack, thus making * it run in constant stack space. The stack is composed of pairs of * `(remaining clauses, variable assignments)`, and depth-first search * is achieved by using a stack rather than a queue. * */ - @annotation.tailrec private def findTseitinModel0(state: TseitinSearch): TseitinModel = { - state match { + val pos = new java.util.BitSet() + val neg = new java.util.BitSet() + @tailrec def loop(state: TseitinSearch): TseitinModel = state match { case Nil => NoTseitinModel case (clauses, assignments) :: rest => - if (clauses.isEmpty) assignments - else if (clauses exists (_.isEmpty)) findTseitinModel0(rest) - else clauses.find(_.size == 1) match { - case Some(unitClause) => - val unitLit = unitClause.head - findTseitinModel0((dropUnit(clauses, unitLit), assignments + unitLit) :: rest) - case _ => + if (clauses.isEmpty || clauses.head == null) assignments + else { + var i = 0 + var emptyIndex = -1 + var unitIndex = -1 + while (i < clauses.length && emptyIndex == -1) { + val clause = clauses(i) + if (clause != null) { + clause.size match { + case 0 => emptyIndex = i + case 1 if unitIndex == -1 => + unitIndex = i + case _ => + } + } + i += 1 + } + if (emptyIndex != -1) + loop(rest) + else if (unitIndex != -1) { + val unitLit = clauses(unitIndex).head + dropUnit(clauses, unitLit) + val tuples: TseitinSearch = (clauses, unitLit :: assignments) :: rest + loop(tuples) + } else { // partition symbols according to whether they appear in positive and/or negative literals - val pos = new mutable.BitSet() - val neg = new mutable.BitSet() - mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable) + pos.clear() + neg.clear() + for (clause <- clauses) { + if (clause != null) { + clause.foreach { lit: Lit => + if (lit.positive) pos.set(lit.variable) else neg.set(lit.variable) + } + } + } // appearing only in either positive/negative positions - val pures = pos ^ neg - if (pures.nonEmpty) { - val pureVar = pures.head + pos.xor(neg) + val pures = pos + + if (!pures.isEmpty) { + val pureVar = pures.nextSetBit(0) // turn it back into a literal // (since equality on literals is in terms of equality // of the underlying symbol and its positivity, simply construct a new Lit) - val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar) + val pureLit: Lit = Lit(if (neg.get(pureVar)) -pureVar else pureVar) // debug.patmat("pure: "+ pureLit +" pures: "+ pures) - val simplified = clauses.filterNot(_.contains(pureLit)) - findTseitinModel0((simplified, assignments + pureLit) :: rest) + val simplified = clauses.filterNot(clause => clause != null && clause.contains(pureLit)) + loop((simplified, pureLit :: assignments) :: rest) } else { - val split = clauses.head.head + val split = clauses.find(_ != null).get.head // debug.patmat("split: "+ split) - val pos = (clauses :+ clause(split), assignments) - val neg = (clauses :+ clause(-split), assignments) - findTseitinModel0(pos :: neg :: rest) + var i = 0 + var nullIndex = -1 + while (i < clauses.length && nullIndex == -1) { + if (clauses(i) eq null) nullIndex = i + i += 1 + } + + val effectiveLength = if (nullIndex == -1) clauses.length else nullIndex + val posClauses = java.util.Arrays.copyOf(clauses, effectiveLength + 1) + val negClauses = java.util.Arrays.copyOf(clauses, effectiveLength + 1) + posClauses(effectiveLength) = Set.empty[Lit] + split + negClauses(effectiveLength) = Set.empty[Lit] + (-split) + + val pos = (posClauses, assignments) + val neg = (negClauses, assignments) + loop(pos :: neg :: rest) } + } } } + loop(state) } - - private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model = - if (model == NoTseitinModel) NoModel - else if (model == EmptyTseitinModel) EmptyModel - else { - val mappedModels = model.toList collect { - case lit if symForVar isDefinedAt lit.variable => (symForVar(lit.variable), lit.positive) - } - if (mappedModels.isEmpty) { - // could get an empty model if mappedModels is a constant like `True` - EmptyModel - } else { - mappedModels.toMap - } - } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 6a420048c142..4863b010d7f5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,13 +15,11 @@ package typechecker import scala.tools.nsc.Reporting.WarningCategory -/** This trait provides logic for assessing the validity of argument +/** A provider of the logic for assessing the validity of argument * adaptations, such as tupling, unit-insertion, widening, etc. Such * logic is spread around the compiler, without much ability on the * part of the user to tighten the potentially dangerous bits. * - * TODO: unifying/consolidating said logic under consistent management. - * * @author Paul Phillips */ trait Adaptations { @@ -38,6 +36,10 @@ trait Adaptations { case Apply(_, arg :: Nil) => arg case _ => EmptyTree } + def isInfix = t match { + case Apply(_, _ :: Nil) => t.hasAttachment[MultiargInfixAttachment.type] + case _ => false + } def callString = ( ( if (t.symbol.isConstructor) "new " else "" ) + ( t.symbol.owner.decodedName ) + @@ -74,25 +76,40 @@ trait Adaptations { || t.symbol.name == nme.NE ) } - - if (settings.noAdaptedArgs) - context.warning(t.pos, adaptWarningMessage("No automatic adaptation here: use explicit parentheses."), WarningCategory.LintAdaptedArgs) - else if (args.isEmpty) { - if (settings.future) - context.error(t.pos, adaptWarningMessage("Adaptation of argument list by inserting () has been removed.", showAdaptation = false)) - else { - val msg = "Adaptation of argument list by inserting () is deprecated: " + ( - if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous." - else "this is unlikely to be what you want.") - context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg), "2.11.0") + @inline def msg(what: String): String = s"adaptation of an empty argument list by inserting () $what" + @inline def deprecatedAdaptation: true = { + val twist = + if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous" + else "this is unlikely to be what you want" + val text = s"${msg("is deprecated")}: ${twist}" + if (currentRun.isScala3) + currentRun.reporting.warning(t.pos, adaptWarningMessage(text), WarningCategory.Scala3Migration, t.symbol) + else + context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(text), "2.11.0") + true // keep adaptation + } + @inline def warnAdaptation: true = { + def discardedArgs = t match { + case Apply(_, Block(Apply(TypeApply(Select(adapter, _), _), adapted) :: Nil, expr) :: Nil) => + isTupleSymbol(adapter.symbol.companion) && expr.tpe == UnitTpe && adapted == args + case _ => false } - } else if (settings.warnAdaptedArgs) - context.warning(t.pos, adaptWarningMessage( - s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want."), + if (settings.lintArgDiscard && discardedArgs) context.warning(t.pos, adaptWarningMessage( + s"adapted the argument list to expected Unit type: arguments will be discarded"), WarningCategory.LintAdaptedArgs) - - // return `true` if the adaptation should be kept - !(settings.noAdaptedArgs || (args.isEmpty && settings.future)) + else if (settings.warnAdaptedArgs && !isInfix) { + val msg = adaptWarningMessage( + s"adapted the argument list to the expected ${args.size}-tuple: add additional parens instead") + val pos = wrappingPos(args) + context.warning(t.pos, msg, WarningCategory.LintAdaptedArgs, + runReporting.codeAction("add wrapping parentheses", pos, s"(${pos.source.sourceAt(pos)})", msg)) + } + true // keep adaptation + } + if (args.nonEmpty) + warnAdaptation + else + deprecatedAdaptation } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b56fba420fe0..3ca7b7a1538d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,11 @@ package scala.tools.nsc package typechecker -/** The main attribution phase. +import scala.collection.mutable +import scala.collection.mutable.ArrayDeque +import scala.reflect.internal.util.JavaClearable + +/** Defines the sub-components for the namer, packageobjects, and typer phases. */ trait Analyzer extends AnyRef with Contexts @@ -29,7 +33,9 @@ trait Analyzer extends AnyRef with TypeDiagnostics with ContextErrors with StdAttachments + with MacroAnnotationAttachments with AnalyzerPlugins + with ImportTracking { val global : Global import global._ @@ -44,15 +50,20 @@ trait Analyzer extends AnyRef override val checkable = false override def keepsTypeParams = false - def apply(unit: CompilationUnit) { - newNamer(rootContext(unit)).enterSym(unit.body) - } + def apply(unit: CompilationUnit): Unit = newNamer(rootContext(unit)).enterSym(unit.body) } } object packageObjects extends { val global: Analyzer.this.global.type = Analyzer.this.global } with SubComponent { + val deferredOpen: mutable.Set[Symbol] = { + import scala.jdk.CollectionConverters._ + // This will throw a ConcurrentModificationException if we mutate during iteration + val javaSet = new java.util.LinkedHashSet[Symbol]() + perRunCaches.recordCache(JavaClearable.forCollection(javaSet)) + javaSet.asScala + } val phaseName = "packageobjects" val runsAfter = List[String]() val runsRightAfter= Some("namer") @@ -61,20 +72,35 @@ trait Analyzer extends AnyRef override val checkable = false import global._ - val openPackageObjectsTraverser = new Traverser { + val openPackageObjectsTraverser = new InternalTraverser { override def traverse(tree: Tree): Unit = tree match { case ModuleDef(_, _, _) => if (tree.symbol.name == nme.PACKAGEkw) { + // we've actually got a source file + deferredOpen.subtractOne(tree.symbol.owner) + openPackageModule(tree.symbol, tree.symbol.owner) } case ClassDef(_, _, _, _) => () // make it fast - case _ => super.traverse(tree) + case _ => tree.traverse(this) } } - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { openPackageObjectsTraverser(unit.body) } + + override def run(): Unit = { + super.run() + + for (sym <- deferredOpen.toVector) { + if (deferredOpen.remove(sym)) { + // this can remove entries from `deferredOpen`, hence the copy to a vector + // and the check of `remove` return value + openPackageModule(sym) + } + } + } } } @@ -88,42 +114,51 @@ trait Analyzer extends AnyRef def newPhase(prev: Phase): StdPhase = new TyperPhase(prev) final class TyperPhase(prev: Phase) extends StdPhase(prev) { override def keepsTypeParams = false - override def shouldSkipThisPhaseForJava: Boolean = !(settings.YpickleJava || createJavadoc) + override def shouldSkipThisPhaseForJava: Boolean = !settings.YpickleJava.value && !createJavadoc resetTyper() // the log accumulates entries over time, even though it should not (Adriaan, Martin said so). // Lacking a better fix, we clear it here (before the phase is created, meaning for each // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() + private val toCheckAfterTyper = ArrayDeque.empty[CompilationUnit.ToCheckAfterTyper] + def addCheckAfterTyper(check: CompilationUnit.ToCheckAfterTyper): Unit = toCheckAfterTyper.append(check) override def run(): Unit = { val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) val units = currentRun.units + while (units.hasNext) { applyPhase(units.next()) undoLog.clear() } finishComputeParamAlias() + try while (toCheckAfterTyper.nonEmpty) toCheckAfterTyper.removeHead().apply() + finally toCheckAfterTyper.clearAndShrink() // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.typerNanos, start) } - - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) // interactive typed may finish by throwing a `TyperResult` if (!settings.Youtline.value) { - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) + while (unit.toCheck.nonEmpty) { + unit.toCheck.removeHead() match { + case now: CompilationUnit.ToCheckAfterUnit => now() + case later: CompilationUnit.ToCheckAfterTyper => addCheckAfterTyper(later) + } + } + if (!settings.isScaladoc && settings.warnUnusedImport) warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) + if (!settings.isScaladoc && settings.warnUnused.isSetByUser) new checkUnused(typer).apply(unit) } } finally { runReporting.reportSuspendedMessages(unit) - unit.toCheck.clear() + unit.toCheck.clearAndShrink() } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index 66aff8e440b2..f9bf03de6fd2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,13 +13,15 @@ package scala.tools.nsc package typechecker +import scala.annotation._ + /** * @author Lukas Rytz - * @version 1.0 */ -trait AnalyzerPlugins { self: Analyzer => +trait AnalyzerPlugins { self: Analyzer with splain.SplainData => import global._ + @nowarn trait AnalyzerPlugin { /** * Selectively activate this analyzer plugin, e.g. according to the compiler phase. @@ -55,7 +57,8 @@ trait AnalyzerPlugins { self: Analyzer => /** * Let analyzer plugins change the types assigned to definitions. For definitions that have * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the - * type is inferred by typing the definition's righthand side. + * type is inferred by typing the definition's righthand side, or from the overridden + * member under `-Xsource-features`. * * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt` * TypeTree of the definition (for DefDef and ValDef). @@ -175,11 +178,21 @@ trait AnalyzerPlugins { self: Analyzer => * Access the implicit search result from Scalac's typechecker. * * The motivation of this method is to allow analyzer plugins to control when/where - * implicit search results are returned, and inspec them for data capturing purposes. + * implicit search results are returned, and inspect them for data capturing purposes. * * @param result The result to a given implicit search. */ def pluginsNotifyImplicitSearchResult(result: SearchResult): Unit = () + + /** + * Construct a custom error message for implicit parameters that could not be resolved. + * + * @param param The implicit parameter that was resolved + * @param errors The chain of intermediate implicits that lead to this error + * @param previous The error message constructed by the previous analyzer plugin, or the builtin default + */ + def noImplicitFoundError(param: Symbol, errors: List[ImplicitError], previous: String): String = + previous } /** @@ -187,6 +200,7 @@ trait AnalyzerPlugins { self: Analyzer => * or something else if the plugin knows better that the implementation provided in scala-compiler.jar. * If multiple plugins return a non-empty result, it's going to be a compilation error. */ + @nowarn trait MacroPlugin { /** * Selectively activate this analyzer plugin, e.g. according to the compiler phase. @@ -309,7 +323,7 @@ trait AnalyzerPlugins { self: Analyzer => private var analyzerPlugins: List[AnalyzerPlugin] = Nil /** Registers a new analyzer plugin */ - def addAnalyzerPlugin(plugin: AnalyzerPlugin) { + def addAnalyzerPlugin(plugin: AnalyzerPlugin): Unit = { if (!analyzerPlugins.contains(plugin)) analyzerPlugins = plugin :: analyzerPlugins } @@ -391,11 +405,18 @@ trait AnalyzerPlugins { self: Analyzer => def accumulate = (_, p) => p.pluginsNotifyImplicitSearchResult(result) }) + /** @see AnalyzerPlugin.noImplicitFoundError */ + def pluginsNoImplicitFoundError(param: Symbol, errors: List[ImplicitError], initial: String): String = + invoke(new CumulativeOp[String] { + def default = initial + def accumulate = (previous, p) => p.noImplicitFoundError(param, errors, previous) + }) + /** A list of registered macro plugins */ private var macroPlugins: List[MacroPlugin] = Nil /** Registers a new macro plugin */ - def addMacroPlugin(plugin: MacroPlugin) { + def addMacroPlugin(plugin: MacroPlugin): Unit = { if (!macroPlugins.contains(plugin)) macroPlugins = plugin :: macroPlugins } @@ -418,7 +439,7 @@ trait AnalyzerPlugins { self: Analyzer => if (plugin.isActive()) { op.custom(plugin) match { case None => - case s @ Some(custom) => + case s @ Some(_) => if (result.isDefined) { typer.context.error(op.position, s"both $resultPlugin and $plugin want to ${op.description}") op.default diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 0b325ffd65dc..7696f381ade2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,6 @@ package scala.tools.nsc package typechecker -import Checkability._ -import scala.language.postfixOps -import scala.collection.mutable.ListBuffer import scala.tools.nsc.Reporting.WarningCategory /** On pattern matcher checkability: @@ -40,7 +37,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * There are four possibilities to consider: * [P1] X will always conform to P - * [P2] x will never conform to P + * [P2] x will never be a P, because it is an X * [P3] X will conform to P if some runtime test is true * [P4] X cannot be checked against P * @@ -53,7 +50,7 @@ import scala.tools.nsc.Reporting.WarningCategory * which is essentially the intersection of X and |P|, where |P| is * the erasure of P. If XR <: P, then no warning is emitted. * - * We evaluate "X with conform to P" by checking `X <: P_wild`, where + * We evaluate "X will conform to P" by checking `X <: P_wild`, where * P_wild is the result of substituting wildcard types in place of * pattern type variables. This is intentionally stricter than * (X matchesPattern P), see scala/bug#8597 for motivating test cases. @@ -77,7 +74,22 @@ trait Checkable { import global._ import definitions._ - import CheckabilityChecker.{ isNeverSubType, isNeverSubClass } + + type Checkability = Int + object Checkability { + final val StaticallyTrue = 0 + final val StaticallyFalse = 1 + final val RuntimeCheckable = 2 + final val Uncheckable = 3 + final val CheckabilityError = 4 + lazy val describe: (Checkability => String) = List( + "statically true", + "statically false", + "runtime checkable", + "uncheckable", + "error", + ) + } /** The applied type of class 'to' after inferring anything * possible from the knowledge that 'to' must also be of the @@ -116,63 +128,30 @@ trait Checkable { appliedType(to, resArgs) } - private def isUnwarnableTypeArgSymbol(sym: Symbol) = ( - (!settings.isScala213 && sym.isTypeParameter) || // dummy - sym.name.toTermName == nme.WILDCARD || // don't warn for `case l: List[_]`. Here, `List[_]` is a TypeRef, the arg refers an abstract type symbol `_` - nme.isVariableName(sym.name) // don't warn for `x.isInstanceOf[List[_]]`. Here, `List[_]` is an existential, quantified sym has `isVariableName` - ) - private def isUnwarnableTypeArg(arg: Type) = ( - uncheckedOk(arg) // @unchecked T - || isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 - ) - private def uncheckedOk(tp: Type) = tp hasAnnotation UncheckedClass - - private def typeArgsInTopLevelType(tp: Type): List[Type] = { - val res: ListBuffer[Type] = ListBuffer.empty[Type] - def add(t: Type) = if (!isUnwarnableTypeArg(t)) res += t - def loop(tp: Type): Unit = tp match { - case RefinedType(parents, _) => - parents foreach loop - case TypeRef(_, ArrayClass, arg :: Nil) => - if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) - case TypeRef(pre, sym, args) => - loop(pre) - args.foreach(add) - case ExistentialType(tparams, underlying) => - tparams.foreach(tp => add(tp.tpe)) - loop(underlying) - case _ => () - } - loop(tp) - res.toList - } + private def uncheckedOk(tp: Type) = tp.hasAnnotation(UncheckedClass) private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = { - def typeVarToWildcard(tp: Type) = { - // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala - if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp - } + // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala + def typeVarToWildcard(tp: Type) = if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp val pattTpWild = pattTp.map(typeVarToWildcard) scrut <:< pattTpWild } - private class CheckabilityChecker(val X: Type, val P: Type) { + private class CheckabilityChecker(val X: Type, val P: Type, isRecheck: Boolean = false) { + import Checkability._ + import erasure.GenericArray def Xsym = X.typeSymbol def Psym = P.typeSymbol - def PErased = { + def PErased = P match { - case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) - case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) + case GenericArray(_, core) => existentialAbstraction(core.typeSymbol :: Nil, P) + case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) } - } - def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) - - - // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] - def P1 = scrutConformsToPatternType(X, P) - def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) - def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) - def P4 = !(P1 || P2 || P3) + def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) + def P1 = scrutConformsToPatternType(X, P) + def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) + def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) + def P4 = !(P1 || P2 || P3) def summaryString = f""" |Checking checkability of (x: $X) against pattern $P @@ -182,58 +161,61 @@ trait Checkable { |[P4] $P4%-6s None of the above // !(P1 || P2 || P3) """.stripMargin.trim - val result = ( + val result: Checkability = if (X.isErroneous || P.isErroneous) CheckabilityError else if (P1) StaticallyTrue else if (P2) StaticallyFalse else if (P3) RuntimeCheckable - else if (uncheckableType == NoType) { - // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type - debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString) + else if (uncheckableType != NoType) Uncheckable + else { // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type + debuglog(s"Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n$summaryString") CheckabilityError } - else Uncheckable - ) - lazy val uncheckableType = if (Psym.isAbstractType) P else { - val possibles = typeArgsInTopLevelType(P).toSet - val opt = possibles find { targ => + // collect type args which are candidates for warning because uncheckable + private def typeArgsInTopLevelType(tp: Type): Set[Type] = { + def isUnwarnableTypeArg(arg: Type) = + uncheckedOk(arg) || { // @unchecked T + val sym = arg.typeSymbolDirect // has to be direct: see pos/t1439 + sym.name.toTermName == nme.WILDCARD || // don't warn for `case l: List[_]`. Here, `List[_]` is a TypeRef, the arg refers an abstract type symbol `_` + nme.isVariableName(sym.name) // don't warn for `x.isInstanceOf[List[_]]`. Here, `List[_]` is an existential, quantified sym has `isVariableName` + } + var res: Set[Type] = Set.empty[Type] + def add(t: Type): Unit = if (!isUnwarnableTypeArg(t)) res += t + def loop(tp: Type): Unit = tp match { + case RefinedType(parents, _) => + parents.foreach(loop) + case TypeRef(_, ArrayClass, arg :: Nil) => + if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) + case TypeRef(pre, sym, args) => + loop(pre) + args.foreach(add) + case ExistentialType(tparams, underlying) => + tparams.foreach(tp => add(tp.tpe)) + loop(underlying) + case _ => () + } + loop(tp) + res + } + lazy val (uncheckableType, uncheckableCard) = + if (Psym.isAbstractType) (P, 1) + else { + val possibles = typeArgsInTopLevelType(P) // Create a derived type with every possibly uncheckable type replaced // with a WildcardType, except for 'targ'. If !(XR <: derived) then // 'targ' is uncheckable. - val derived = P map (tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) - !(XR <:< derived) + def candidate(targ: Type) = { + val derived = P.map(tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) + !(XR <:< derived) + } + val opt = possibles.find(candidate) + opt.map(res => (res, possibles.iterator.map(candidate).take(2).size)).getOrElse((NoType, 0)) } - opt getOrElse NoType - } def neverSubClass = isNeverSubClass(Xsym, Psym) def neverMatches = result == StaticallyFalse def isUncheckable = result == Uncheckable def isCheckable = !isUncheckable - def uncheckableMessage = uncheckableType match { - case NoType => "something" - case tp @ RefinedType(_, _) => "refinement " + tp - case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name - case tp => "non-variable type argument " + tp - } - } - - /** X, P, [P1], etc. are all explained at the top of the file. - */ - private object CheckabilityChecker { - /** Are these symbols classes with no subclass relationship? */ - def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( - sym1.isClass - && sym2.isClass - && !(sym1 isSubClass sym2) - && !(sym2 isSubClass sym1) - ) - /** Are all children of these symbols pairwise irreconcilable? */ - def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { - val sc1 = sym1.sealedChildren - val sc2 = sym2.sealedChildren - sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) - } /** Is it impossible for the given symbols to be parents in the same class? * This means given A and B, can there be an instance of A with B? This is the @@ -241,25 +223,38 @@ trait Checkable { * additional conditions holds: * - either A or B is effectively final * - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin) - * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable + * - either A or B is sealed/final, and every possible pairing of their children (or themselves) is irreconcilable * - * TODO: the last two conditions of the last possibility (that the symbols are not of + * The last two conditions of the last possibility (that the symbols are not of * classes being compiled in the current run) are because this currently runs too early, * and .children returns Nil for sealed classes because their children will not be - * populated until typer. It was too difficult to move things around for the moment, - * so I will consult with moors about the optimal time to be doing this. + * populated until typer. As a workaround, in this case, this check is performed a second + * time at the end of typer. #6537, #12414 */ - def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && ( + def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = { + // Are these symbols classes with no subclass relationship? + def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( + sym1.isClass + && sym2.isClass + && !sym1.isSubClass(sym2) + && !sym2.isSubClass(sym1) + ) + // Are all children of these symbols pairwise irreconcilable? + def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { + val sc1 = if (isSealedOrFinal(sym1)) sym1.sealedChildren else Set(sym1) + val sc2 = if (isSealedOrFinal(sym2)) sym2.sealedChildren else Set(sym2) + sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) + } + areUnrelatedClasses(sym1, sym2) && ( isEffectivelyFinal(sym1) // initialization important || isEffectivelyFinal(sym2) || !sym1.isTrait && !sym2.isTrait - || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2) - ) + || (isSealedOrFinal(sym1) || isSealedOrFinal(sym2)) && allChildrenAreIrreconcilable(sym1, sym2) && (isRecheck || !currentRun.compiles(sym1) && !currentRun.compiles(sym2)) + ) + } private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal - private def isEffectivelyFinal(sym: Symbol): Boolean = ( - // initialization important - sym.initialize.isEffectivelyFinalOrNotOverridden - ) + // initialization important + private def isEffectivelyFinal(sym: Symbol): Boolean = sym.initialize.isEffectivelyFinalOrNotOverridden def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2) @@ -279,9 +274,9 @@ trait Checkable { case _ => false } - // Important to dealias at any entry point (this is the only one at this writing.) + // Important to dealias at any entry point (this is the only one at this writing but cf isNeverSubClass.) def isNeverSubType(tp1: Type, tp2: Type): Boolean = /*logResult(s"isNeverSubType($tp1, $tp2)")*/((tp1.dealias, tp2.dealias) match { - case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) => + case (TypeRef(_, sym1, _), TypeRef(_, sym2, args2)) => isNeverSubClass(sym1, sym2) || { (sym1 isSubClass sym2) && { val tp1seen = tp1 baseType sym2 @@ -297,14 +292,13 @@ trait Checkable { def isUncheckable(P0: Type) = !isCheckable(P0) - def isCheckable(P0: Type): Boolean = ( + def isCheckable(P0: Type): Boolean = uncheckedOk(P0) || (P0.widen match { case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false case RefinedType(_, decls) if !decls.isEmpty => false - case RefinedType(parents, _) => parents forall isCheckable - case p => new CheckabilityChecker(AnyTpe, p) isCheckable + case RefinedType(parents, _) => parents.forall(isCheckable) + case p => new CheckabilityChecker(AnyTpe, p).isCheckable }) - ) /** TODO: much better error positions. * Kind of stuck right now because they just pass us the one tree. @@ -312,54 +306,77 @@ trait Checkable { * * Instead of the canRemedy flag, annotate uncheckable types that have become checkable because of the availability of a class tag? */ - def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false) { - if (uncheckedOk(P0)) return - def where = if (inPattern) "pattern " else "" + def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = if (!uncheckedOk(P0)) { + import Checkability._ - // singleton types not considered here, dealias the pattern for SI-XXXX - val P = P0.dealiasWiden - val X = X0.widen + if (P0.typeSymbol == SingletonClass) + context.warning(tree.pos, s"fruitless type test: every non-null value will be a Singleton dynamically", WarningCategory.Other) + else { + // singleton types not considered here, dealias the pattern + val P = P0.dealiasWiden + val X = X0.widen - def PString = if (P eq P0) P.toString else s"$P (the underlying of $P0)" + def PString = if (P eq P0) P.toString else s"$P (the underlying of $P0)" - P match { - // Prohibit top-level type tests for these, but they are ok nested (e.g. case Foldable[Nothing] => ... ) - case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => - InferErrorGen.TypePatternOrIsInstanceTestError(tree, P) - // If top-level abstract types can be checked using a classtag extractor, don't warn about them - case TypeRef(_, sym, _) if sym.isAbstractType && canRemedy => - ; - // Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet. - case RefinedType(_, decls) if !decls.isEmpty => - context.warning(tree.pos, s"a pattern match on a refinement type is unchecked", WarningCategory.Unchecked) - case RefinedType(parents, _) => - parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy)) - case _ => - val checker = new CheckabilityChecker(X, P) - if (checker.result == RuntimeCheckable) - log(checker.summaryString) + P match { + // Prohibit top-level type tests for these, but they are ok nested (e.g. case Foldable[Nothing] => ... ) + case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => + InferErrorGen.TypePatternOrIsInstanceTestError(tree, P) + // If top-level abstract types can be checked using a classtag extractor, don't warn about them + case TypeRef(_, sym, _) if sym.isAbstractType && canRemedy => + ; + // Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet. + case RefinedType(_, decls) if !decls.isEmpty => + context.warning(tree.pos, s"a pattern match on a refinement type is unchecked", WarningCategory.Unchecked) + case RefinedType(parents, _) => + parents.foreach(checkCheckable(tree, _, X, inPattern, canRemedy)) + case _ => + val checker = new CheckabilityChecker(X, P) + if (checker.result == RuntimeCheckable) + log(checker.summaryString) - if (checker.neverMatches) { - val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)" - context.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum", WarningCategory.Other) - } - else if (checker.isUncheckable) { - val msg = ( - if (checker.uncheckableType =:= P) s"abstract type $where$PString" - else s"${checker.uncheckableMessage} in type $where$PString" - ) - context.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure", WarningCategory.Unchecked) - } + def neverMatchesWarning(result: CheckabilityChecker) = { + val addendum = if (result.neverSubClass) "" else " (but still might match its erasure)" + context.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum", WarningCategory.Other) + } + if (checker.neverMatches) + neverMatchesWarning(checker) + else if (checker.isUncheckable) { + def uncheckableMessage = checker.uncheckableType match { + case NoType => "something" + case tp @ RefinedType(_, _) => "refinement " + tp + case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name + case tp => "non-variable type argument " + tp + } + val msg = { + val where = if (inPattern) "pattern " else "" + if (checker.uncheckableCard == 2) + s"the type test for $where$PString cannot be checked at runtime because it has type parameters eliminated by erasure" + else { + val thing = + if (checker.uncheckableType =:= P) s"abstract type $where$PString" + else s"$uncheckableMessage in type $where$PString" + s"$thing is unchecked since it is eliminated by erasure" + } + } + context.warning(tree.pos, msg, WarningCategory.Unchecked) + } + else if (checker.result == RuntimeCheckable) { + // register deferred checking for sealed types in current run + def recheckFruitless(): Unit = { + val rechecker = new CheckabilityChecker(X, P, isRecheck = true) + if (rechecker.neverMatches) neverMatchesWarning(rechecker) + } + def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal + val Xsym = X.typeSymbol + val Psym = P.typeSymbol + if ((isSealedOrFinal(Xsym) || isSealedOrFinal(Psym)) && (currentRun.compiles(Xsym) || currentRun.compiles(Psym))) { + debuglog(s"deferred recheckFruitless($X, $P)") + context.unit.addPostTyperCheck(() => recheckFruitless()) + } + } + } } } } } - -private[typechecker] final class Checkability(val value: Int) extends AnyVal { } -private[typechecker] object Checkability { - val StaticallyTrue = new Checkability(0) - val StaticallyFalse = new Checkability(1) - val RuntimeCheckable = new Checkability(2) - val Uncheckable = new Checkability(3) - val CheckabilityError = new Checkability(4) -} diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala index e4214bc0777b..d97ef571b721 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,37 +17,56 @@ package typechecker import java.lang.ArithmeticException import scala.tools.nsc.Reporting.WarningCategory +import scala.util.control.ControlThrowable /** This class ... * * @author Martin Odersky - * @version 1.0 */ abstract class ConstantFolder { val global: Global import global._ - /** If tree is a constant operation, replace with result. */ - def apply(tree: Tree, site: Symbol): Tree = fold(tree, tree match { - case Apply(Select(Literal(x), op), List(Literal(y))) => foldBinop(op, x, y) - case Select(Literal(x), op) => foldUnop(op, x) - case _ => null - }, site) - - /** If tree is a constant value that can be converted to type `pt`, perform - * the conversion. - */ - def apply(tree: Tree, pt: Type, site: Symbol): Tree = fold(apply(tree, site), tree.tpe match { - case ConstantType(x) => x convertTo pt - case _ => null - }, site) + val foldableUnaryOps: Set[Name] = nme.isEncodedUnary ++ List(nme.toChar, nme.toInt, nme.toLong, nme.toFloat, nme.toDouble) + + // We can fold side effect free terms and their types + object FoldableTerm { + @inline private def effectless(sym: Symbol): Boolean = sym != null && !sym.isLazy && (sym.isVal || sym.isGetter && sym.accessed.isVal) + + def unapply(tree: Tree): Option[Constant] = tree match { + case Literal(x) => Some(x) + case term if effectless(term.symbol) => extractConstant(term.tpe) + case _ => None + } + } - private def fold(tree: Tree, compX: => Constant, site: Symbol): Tree = + // We can fold the types of side effecting terms, but not the terms themselves + object ConstantTerm { + def unapply(tree: Tree): Option[Constant] = extractConstant(tree.tpe) + } + + private def extractConstant(tpe: Type): Option[Constant] = + tpe match { + case ConstantType(x) => Some(x) + case st: SingleType => + st.underlying match { + case ConstantType(x) => Some(x) + case _ => None + } + case _ => None + } + + /** If tree is a constant operation, replace with result. */ + def apply(tree: Tree, site: Symbol): Tree = if (isPastTyper) tree else try { - val x = compX - if ((x ne null) && x.tag != UnitTag) tree setType ConstantType(x) - else tree + tree match { + case Apply(Select(FoldableTerm(x), op), List(FoldableTerm(y))) => fold(tree, safelyFoldBinop(tree, site)(op, x, y), foldable = true) + case Apply(Select(ConstantTerm(x), op), List(ConstantTerm(y))) => fold(tree, safelyFoldBinop(tree, site)(op, x, y), foldable = false) + case Select(FoldableTerm(x), op) => fold(tree, foldUnop(op, x), foldable = true) + case Select(ConstantTerm(x), op) => fold(tree, foldUnop(op, x), foldable = false) + case _ => tree + } } catch { case e: ArithmeticException => if (settings.warnConstant) @@ -55,27 +74,63 @@ abstract class ConstantFolder { tree } - private def foldUnop(op: Name, x: Constant): Constant = (op, x.tag) match { - case (nme.UNARY_!, BooleanTag) => Constant(!x.booleanValue) - - case (nme.UNARY_~ , IntTag ) => Constant(~x.intValue) - case (nme.UNARY_~ , LongTag ) => Constant(~x.longValue) - - case (nme.UNARY_+ , IntTag ) => Constant(+x.intValue) - case (nme.UNARY_+ , LongTag ) => Constant(+x.longValue) - case (nme.UNARY_+ , FloatTag ) => Constant(+x.floatValue) - case (nme.UNARY_+ , DoubleTag ) => Constant(+x.doubleValue) + /** If tree is a constant value that can be converted to type `pt`, perform the conversion. + */ + def apply(tree: Tree, pt: Type, site: Symbol): Tree = { + val orig = apply(tree, site) + orig.tpe match { + case tp@ConstantType(x) => fold(orig, x.convertTo(pt), foldable = isConstantType(tp)) + case _ => orig + } + } - case (nme.UNARY_- , IntTag ) => Constant(-x.intValue) - case (nme.UNARY_- , LongTag ) => Constant(-x.longValue) - case (nme.UNARY_- , FloatTag ) => Constant(-x.floatValue) - case (nme.UNARY_- , DoubleTag ) => Constant(-x.doubleValue) + /** Set the computed constant type. + */ + private def fold(orig: Tree, folded: Constant, foldable: Boolean): Tree = + if (folded == null || folded.tag == UnitTag) orig + else orig.setType { + if (foldable) FoldableConstantType(folded) + else LiteralType(folded) + } - case _ => null + private def foldUnop(op: Name, x: Constant): Constant = { + val N = nme + import N._ + val value: Any = op match { + case UNARY_! => if (x.tag == BooleanTag) !x.booleanValue else null + case UNARY_~ => x.tag match { + case IntTag => ~x.intValue + case LongTag => ~x.longValue + case _ => null + } + case UNARY_+ => x.tag match { + case IntTag => +x.intValue + case LongTag => +x.longValue + case FloatTag => +x.floatValue + case DoubleTag => +x.doubleValue + case _ => null + } + case UNARY_- => x.tag match { + case IntTag => -x.intValue + case LongTag => -x.longValue + case FloatTag => -x.floatValue + case DoubleTag => -x.doubleValue + case _ => null + } + case _ if x.isNumeric => op match { + case `toChar` => x.charValue + case `toInt` => x.intValue + case `toLong` => x.longValue + case `toFloat` => x.floatValue + case `toDouble` => x.doubleValue + case _ => null + } + case _ => null + } + if (value != null) Constant(value) else null } - /** These are local helpers to keep foldBinop from overly taxing the - * optimizer. + /** These are local helpers to keep foldBinop from overly taxing the optimizer. */ private def foldBooleanOp(op: Name, x: Constant, y: Constant): Constant = op match { case nme.ZOR => Constant(x.booleanValue | y.booleanValue) @@ -100,10 +155,18 @@ abstract class ConstantFolder { case nme.GT => Constant(x.intValue > y.intValue) case nme.LE => Constant(x.intValue <= y.intValue) case nme.GE => Constant(x.intValue >= y.intValue) - case nme.ADD => Constant(x.intValue + y.intValue) - case nme.SUB => Constant(x.intValue - y.intValue) - case nme.MUL => Constant(x.intValue * y.intValue) - case nme.DIV => Constant(x.intValue / y.intValue) + case nme.ADD => Constant(safely(Math.addExact(x.intValue, y.intValue), x.intValue + y.intValue)) + case nme.SUB => Constant(safely(Math.subtractExact(x.intValue, y.intValue), x.intValue - y.intValue)) + case nme.MUL => Constant(safely(Math.multiplyExact(x.intValue, y.intValue), x.intValue * y.intValue)) + case nme.DIV => + val xd = x.intValue + val yd = y.intValue + val value = + if (yd == 0) xd / yd // Math.divideExact(xd, yd) // de-optimize + else if (yd == -1 && xd == Int.MinValue) + safely(throw new ArithmeticException("integer overflow"), xd / yd) + else xd / yd + Constant(value) case nme.MOD => Constant(x.intValue % y.intValue) case _ => null } @@ -112,13 +175,13 @@ abstract class ConstantFolder { case nme.XOR => Constant(x.longValue ^ y.longValue) case nme.AND => Constant(x.longValue & y.longValue) case nme.LSL if x.tag <= IntTag - => Constant(x.intValue << y.longValue) + => Constant(x.intValue << y.longValue.toInt) case nme.LSL => Constant(x.longValue << y.longValue) case nme.LSR if x.tag <= IntTag - => Constant(x.intValue >>> y.longValue) + => Constant(x.intValue >>> y.longValue.toInt) case nme.LSR => Constant(x.longValue >>> y.longValue) case nme.ASR if x.tag <= IntTag - => Constant(x.intValue >> y.longValue) + => Constant(x.intValue >> y.longValue.toInt) case nme.ASR => Constant(x.longValue >> y.longValue) case nme.EQ => Constant(x.longValue == y.longValue) case nme.NE => Constant(x.longValue != y.longValue) @@ -126,10 +189,18 @@ abstract class ConstantFolder { case nme.GT => Constant(x.longValue > y.longValue) case nme.LE => Constant(x.longValue <= y.longValue) case nme.GE => Constant(x.longValue >= y.longValue) - case nme.ADD => Constant(x.longValue + y.longValue) - case nme.SUB => Constant(x.longValue - y.longValue) - case nme.MUL => Constant(x.longValue * y.longValue) - case nme.DIV => Constant(x.longValue / y.longValue) + case nme.ADD => Constant(safely(Math.addExact(x.longValue, y.longValue), x.longValue + y.longValue)) + case nme.SUB => Constant(safely(Math.subtractExact(x.longValue, y.longValue), x.longValue - y.longValue)) + case nme.MUL => Constant(safely(Math.multiplyExact(x.longValue, y.longValue), x.longValue * y.longValue)) + case nme.DIV => + val xd = x.longValue + val yd = y.longValue + val value = + if (yd == 0) xd / yd // Math.divideExact(xd, yd) // de-optimize + else if (yd == -1 && xd == Long.MinValue) + safely(throw new ArithmeticException("long overflow"), xd / yd) + else xd / yd + Constant(value) case nme.MOD => Constant(x.longValue % y.longValue) case _ => null } @@ -178,4 +249,16 @@ abstract class ConstantFolder { case _ => null } } + private def safelyFoldBinop(tree: Tree, site: Symbol)(op: Name, x: Constant, y: Constant): Constant = + try foldBinop(op, x, y) + catch { + case e: ConstFoldException => + if (settings.warnConstant) + runReporting.warning(tree.pos, s"Evaluation of a constant expression results in an arithmetic error: ${e.getMessage}, using ${e.value}", WarningCategory.LintConstant, site) + Constant(e.value) + } + private def safely[A](exact: => A, inexact: A): A = + try exact + catch { case e: ArithmeticException => throw new ConstFoldException(e.getMessage, inexact) } + private class ConstFoldException(msg: String, val value: Any) extends ControlThrowable(msg) } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 37ce9f3e95ad..8e50e47307ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,26 +13,35 @@ package scala.tools.nsc package typechecker -import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString } -import scala.compat.Platform.EOL +import java.lang.System.{lineSeparator => EOL} +import scala.PartialFunction.cond +import scala.annotation._ +import scala.reflect.internal.util.{CodeAction, NoSourceFile} +import scala.reflect.internal.util.StringOps.{countAsString, countElementsAsString} +import scala.reflect.io.NoAbstractFile import scala.reflect.runtime.ReflectionUtils import scala.reflect.macros.runtime.AbortMacroException -import scala.util.control.NonFatal import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.util.stackTraceString -import scala.reflect.io.NoAbstractFile -import scala.reflect.internal.util.NoSourceFile +import scala.util.control.{ControlThrowable, NonFatal} -trait ContextErrors { +trait ContextErrors extends splain.SplainErrors { self: Analyzer => import global._ import definitions._ + final case class ContextWarning(pos: Position, msg: String, cat: WarningCategory, sym: Symbol, actions: List[CodeAction]) + sealed abstract class AbsTypeError { def errPos: Position def errMsg: String override def toString() = "[Type error at:" + errPos + "] " + errMsg + + // To include code actions in type errors, add a field to the corresponding case class + // override val actions: List[CodeAction] = Nil + // See TypeErrorWrapper for example + def actions: List[CodeAction] = Nil } abstract class AbsAmbiguousTypeError extends AbsTypeError @@ -50,17 +59,9 @@ trait ContextErrors { def errPos = underlyingTree.pos } - case class NormalTypeError(underlyingTree: Tree, errMsg: String) + case class NormalTypeError(underlyingTree: Tree, errMsg: String, override val actions: List[CodeAction] = Nil) extends TreeTypeError - /** - * Marks a TypeError that was constructed from a CyclicReference (under silent). - * This is used for named arguments, where we need to know if an assignment expression - * failed with a cyclic reference or some other type error. - */ - class NormalTypeErrorFromCyclicReference(underlyingTree: Tree, errMsg: String) - extends NormalTypeError(underlyingTree, errMsg) - case class AccessTypeError(underlyingTree: Tree, errMsg: String) extends TreeTypeError @@ -70,7 +71,7 @@ trait ContextErrors { def errPos = underlyingSym.pos } - case class TypeErrorWrapper(ex: TypeError) + case class TypeErrorWrapper(ex: TypeError, override val actions: List[CodeAction] = Nil) extends AbsTypeError { def errMsg = ex.msg def errPos = ex.pos @@ -103,21 +104,19 @@ trait ContextErrors { extends AbsTypeError object ErrorUtils { - def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) { - issueTypeError(NormalTypeError(tree, msg)) + def issueNormalTypeError(tree: Tree, msg: String, actions: List[CodeAction] = Nil)(implicit context: Context): Unit = { + issueTypeError(NormalTypeError(tree, msg, actions)) } - def issueSymbolTypeError(sym: Symbol, msg: String)(implicit context: Context) { + def issueSymbolTypeError(sym: Symbol, msg: String)(implicit context: Context): Unit = { issueTypeError(SymbolTypeError(sym, msg)) } - def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } + def issueTypeError(err: AbsTypeError)(implicit context: Context): Unit = { context.issue(err) } + // OPT: avoid error string creation for errors that won't see the light of day def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && currentRun.isScala213) - // OPT: avoid error string creation for errors that won't see the light of day, but predicate - // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 - "type mismatch" + if (!context.openImplicits.isEmpty && !settings.Vimplicits.value) "type mismatch" else "type mismatch" + foundReqMsg(found, req) } @@ -137,7 +136,7 @@ trait ContextErrors { else s"$name extends Any, not AnyRef" ) - if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else "\n" + + if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else sm"""|Note that $what. |Such types can participate in value classes, but instances |cannot appear in singleton types or in reference comparisons.""" @@ -158,21 +157,68 @@ trait ContextErrors { def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) + /** The implicit not found message from the annotation, and whether it's a supplement message or not. */ + def NoImplicitFoundAnnotation(tree: Tree, param: Symbol): (Boolean, String) = + param match { + case ImplicitNotFoundMsg(msg) => (false, msg.formatParameterMessage(tree)) + case _ => + val paramTp = param.tpe + paramTp.typeSymbolDirect match { + case ImplicitNotFoundMsg(msg) => (false, msg.formatDefSiteMessage(paramTp)) + case _ => + val supplement = param.baseClasses.collectFirst { + case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" + }.getOrElse("") + true -> supplement + } + } + def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = { - def errMsg = { + val (isSupplement, annotationMsg) = NoImplicitFoundAnnotation(tree, param) + def defaultErrMsg = { val paramName = param.name val paramTp = param.tpe - def evOrParam = ( + def evOrParam = if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type" else - s"parameter $paramName:") - paramTp.typeSymbolDirect match { - case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp) - case _ => s"could not find implicit value for $evOrParam $paramTp" + s"parameter $paramName:" + if (isSupplement) s"could not find implicit value for $evOrParam $paramTp$annotationMsg" + else annotationMsg + } + val errMsg = splainPushOrReportNotFound(tree, param, annotationMsg) + issueNormalTypeError(tree, if (errMsg.isEmpty) defaultErrMsg else errMsg) + } + + private def InferredImplicitErrorImpl(tree: Tree, inferred: Type, cx: Context, isTyper: Boolean): Unit = { + val sym = tree.symbol + def err(): Unit = { + val msg = + s"Implicit definition ${if (currentRun.isScala3) "must" else "should"} have explicit type${ + if (!inferred.isErroneous) s" (inferred $inferred)" else "" + }" + val namePos = if (sym.isAccessor && sym.accessed.pos.isDefined) sym.accessed.pos else sym.pos //tree.asInstanceOf[NameTree].namePos + val src = namePos.source + val pos = if (src.sourceAt(namePos) != tree.symbol.decodedName) None else { + val declEnd = + if (sym.isAccessor) namePos.end + else { + val vdd = tree.asInstanceOf[ValOrDefDef] + val eql = src.indexWhere(_ == '=', start = vdd.rhs.pos.start, step = -1) + src.indexWhere(!_.isWhitespace, start = eql - 1, step = -1) + 1 + } + Some(declEnd).filter(_ > 0).map(src.position(_)) } + val action = pos.map(p => runReporting.codeAction("insert explicit type", p, s": $inferred", msg)).getOrElse(Nil) + if (currentRun.isScala3) cx.warning(tree.pos, msg, WarningCategory.Scala3Migration, action) + else cx.warning(tree.pos, msg, WarningCategory.OtherImplicitType, action) + } + // Defer warning field of class until typing getter (which is marked implicit) + if (sym.isImplicit) { + if (!sym.isLocalToBlock) err() } - issueNormalTypeError(tree, errMsg) + else if (!isTyper && sym.isField && !sym.isLocalToBlock) + sym.updateAttachment(FieldTypeInferred) } trait TyperContextErrors { @@ -183,7 +229,7 @@ trait ContextErrors { object TyperErrorGen { implicit val contextTyperErrorGen: Context = infer.getContext - def UnstableTreeError(tree: Tree) = { + def UnstableTreeError(tree: Tree): tree.type = { def addendum = { "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile." } @@ -193,10 +239,11 @@ trait ContextErrors { setError(tree) } - def AdaptTypeError(tree: Tree, found: Type, req: Type) = { + def AdaptTypeError(tree: Tree, found: Type, req: Type): Tree = { // scala/bug#3971 unwrapping to the outermost Apply helps prevent confusion with the // error message point. def callee = { + @tailrec def unwrap(t: Tree): Tree = t match { case Apply(app: Apply, _) => unwrap(app) case _ => t @@ -204,16 +251,24 @@ trait ContextErrors { unwrap(tree) } + def issueError(foundType: Type): Tree = { + assert(!foundType.isErroneous, s"AdaptTypeError - foundType is Erroneous: $foundType") + assert(!req.isErroneous, s"AdaptTypeError - req is Erroneous: $req") + issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(context, foundType, req))) + infer.explainTypes(foundType, req) + setError(tree) + } + // If the expected type is a refinement type, and the found type is a refinement or an anon // class, we can greatly improve the error message by retyping the tree to recover the actual // members present, then display along with the expected members. This is done here because // this is the last point where we still have access to the original tree, rather than just // the found/req types. - val foundType: Type = req.dealiasWiden match { + req.dealiasWiden match { case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => val retyped = typed (tree.duplicate.clearType()) - val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic && !sym.isErroneous) - if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found + val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) + if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) issueError(found) else { // The members arrive marked private, presumably because there was no // expected type and so they're considered members of an anon class. @@ -221,16 +276,13 @@ trait ContextErrors { // TODO: if any of the found parents match up with required parents after normalization, // print the error so that they match. The major beneficiary there would be // java.lang.Object vs. AnyRef. - refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos) + val refined = refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos) + // If the refinement type of an anonymous class is erroneous, the errors will be issued at its definition. + if (found.typeSymbol.isAnonymousClass && refined.isErroneous) tree else issueError(refined) } case _ => - found + issueError(found) } - assert(!foundType.isErroneous, s"AdaptTypeError - foundType is Erroneous: $foundType") - assert(!req.isErroneous, s"AdaptTypeError - req is Erroneous: $req") - - issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(context, foundType, req))) - infer.explainTypes(foundType, req) } def WithFilterError(tree: Tree, ex: AbsTypeError) = { @@ -244,11 +296,31 @@ trait ContextErrors { setError(templ) } + def AuxConstrInConstantAnnotation(constr: Tree, clazz: Symbol) = + issueNormalTypeError(constr, s"$clazz cannot have auxiliary constructors because it extends ConstantAnnotation") + + def ConstantAnnotationNeedsSingleArgumentList(constr: Tree, clazz: Symbol) = + issueNormalTypeError(constr, s"$clazz needs to have exactly one argument list because it extends ConstantAnnotation") + + + private def formatTraitWithParams(parent: Symbol, paramSyms: List[Symbol]): String = { + val params = paramSyms.map(param => s"${param.name}: ${param.info}").mkString("(", ", ", ")") + s"$parent$params" + } + // additional parentTypes errors - def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) = - issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments") + def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) = { + val msg = parent.attachments.get[DottyParameterisedTrait] match { + case Some(holder) => + val prettyParent = formatTraitWithParams(parent, holder.params) + s"$prettyParent is an illegal Scala 3 parameterized trait; so can not take constructor arguments" + case _ => s"$parent is a trait; does not take constructor arguments" - def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) = + } + issueNormalTypeError(arg, msg) + } + + def ConstrArgsInParentOfTraitError(arg: Tree, @unused parent: Symbol) = issueNormalTypeError(arg, "parents of traits may not have parameters") def MissingTypeArgumentsParentTpeError(supertpt: Tree) = @@ -258,13 +330,15 @@ trait ContextErrors { def AmbiguousIdentError(tree: Tree, name: Name, msg: String) = NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg) - def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = { - NormalTypeError(tree, "not found: "+decodeWithKind(name, owner)) + def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, inPattern: Boolean, hidden: Boolean) = { + val help = if (inPattern && name.isTermName) s"\nIdentifiers ${if (name.charAt(0).isUpper) "that begin with uppercase" else "enclosed in backticks"} are not pattern variables but match the value in scope." else "" + val path = if (hidden) s"\nA ${if (name.isTermName) "value" else "class"} on the class path is shadowed by a companion artifact currently compiled; companions must be compiled together." else "" + NormalTypeError(tree, s"not found: ${decodeWithKind(name, owner)}$help$path") } // typedAppliedTypeTree def AppliedTypeNoParametersError(tree: Tree, errTpe: Type) = { - issueNormalTypeError(tree, errTpe + " does not take type parameters") + issueNormalTypeError(tree, s"$errTpe does not take type parameters") setError(tree) } @@ -298,16 +372,13 @@ trait ContextErrors { issueNormalTypeError(param, "*-parameter must come last") def StarWithDefaultError(meth: Symbol) = - issueSymbolTypeError(meth, "a parameter section with a `*'-parameter is not allowed to have default arguments") + issueSymbolTypeError(meth, "a parameter section with a `*`-parameter is not allowed to have default arguments") def InvalidConstructorDefError(ddef: Tree) = issueNormalTypeError(ddef, "constructor definition not allowed here") - def ImplicitByNameError(param: Symbol) = - issueSymbolTypeError(param, "implicit parameters may not be call-by-name") - def DeprecatedParamNameError(param: Symbol, name: Name) = - issueSymbolTypeError(param, "deprecated parameter name "+ name +" has to be distinct from any other parameter name (deprecated or not).") + issueSymbolTypeError(param, s"deprecated parameter name $name has to be distinct from any other parameter name (deprecated or not).") // analyzeSuperConsructor def SuperConstrReferenceError(tree: Tree) = @@ -349,40 +420,96 @@ trait ContextErrors { //typedSuper def MixinMissingParentClassNameError(tree: Tree, mix: Name, clazz: Symbol) = - issueNormalTypeError(tree, mix+" does not name a parent class of "+clazz) + issueNormalTypeError(tree, s"$mix does not name a parent class of $clazz") def AmbiguousParentClassError(tree: Tree) = issueNormalTypeError(tree, "ambiguous parent class qualifier") - //typedSelect - def NotAMemberError(sel: Tree, qual: Tree, name: Name) = { - def errMsg = { + //typedSelect or checkSelector + def NotAMemberError(sel: Tree /*Select|Import*/, qual: Tree, name: Name, cx: Context) = { + import util.EditDistance, util.StringUtil.oxford + def errMsg: String = { + val editThreshold = 3 + val maxSuggestions = 4 + val owner = qual.tpe.typeSymbol val target = qual.tpe.widen def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else "" def nameString = decodeWithKind(name, owner) /* Illuminating some common situations and errors a bit further. */ def addendum = { - val companion = { - if (name.isTermName && owner.isPackageClass) { - target.member(name.toTypeName) match { - case NoSymbol => "" - case sym => "\nNote: %s exists, but it has no companion object.".format(sym) - } + @inline def orEmpty(cond: Boolean)(s: => String) = if (cond) s else "" + val companionSymbol: Symbol = { + if (name.isTermName && owner.isPackageClass) + target.member(name.toTypeName) + else NoSymbol + } + val companion = orEmpty(companionSymbol != NoSymbol)(s"note: $companionSymbol exists, but it has no companion object.") + // find out all the names available under target within ~2 edit distances + lazy val alternatives: List[(Int, String)] = { + val x = name.decode + // effectively suppress comparison ops, but if they say <= and there is >=, offer it + def isEncodedComparison(n: Name) = n match { + case nme.EQ | nme.NE | nme.LT | nme.GT | nme.LE | nme.GE => true + case _ => false + } + val nameIsComparison = isEncodedComparison(name) + if (context.openImplicits.nonEmpty || x.length < 2) Nil + else { + target.members.iterator + .filter(sym => sym.isTerm == name.isTermName && + !sym.isConstructor && + !nme.isLocalName(sym.name) && + isEncodedComparison(sym.name) == nameIsComparison && + sym.name != nme.EQ && sym.name != nme.NE && + cx.isAccessible(sym, target)) + .map(_.name.decode) + .filter { n => + math.abs(n.length - x.length) <= editThreshold && + n != x && + !n.contains("$") + } + .map(n => (EditDistance.levenshtein(n, x), n)) + .filter { case (d, n) => + val nset = n.endsWith("_=") + val xset = x.endsWith("_=") + val (n1, x1) = if (nset && xset) (n.dropRight(2), x.dropRight(2)) else (n, x) + def contained = x1.forall(c => n1.indexOf(c) >= 0) + !(nset ^ xset) && d <= editThreshold && (d <= n1.length/2 && d <= x1.length/2 || contained) + } + .toList.sorted } - else "" } - val semicolon = ( - if (linePrecedes(qual, sel)) - "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?" - else - "" - ) - val notAnyRef = ( - if (ObjectClass.info.member(name).exists) notAnyRefMessage(target) - else "" - ) - companion + notAnyRef + semicolon + val altStr: String = + orEmpty(companionSymbol == NoSymbol && alternatives.nonEmpty) { + val d0 = alternatives.head._1 + val (best0, rest0) = alternatives.span(_._1 == d0) + val best = best0.map(_._2).distinct + val rest = rest0.map(_._2).distinct + val more = (maxSuggestions - best.length) max 0 + val add1 = orEmpty(more > 0 && rest.nonEmpty)(s" or perhaps ${oxford(rest.take(more), "or")}?") + val add2 = orEmpty(best.length > maxSuggestions || rest.length > more)(" or...?") + s"did you mean ${oxford(best.take(maxSuggestions), "or")}?$add1$add2" + } + val semicolon = orEmpty(linePrecedes(qual, sel))(s"possible cause: maybe a semicolon is missing before `$nameString`?") + val notAnyRef = orEmpty(ObjectClass.info.member(name).exists)(notAnyRefMessage(target)) + val javaRules = orEmpty(owner.isClass && !owner.hasPackageFlag) { + owner.baseClasses.iterator.filter(bc => bc.ne(ObjectClass) && bc.isJavaDefined) + .map { bc => cx.javaFindMember(bc.info, name, _.isStaticMember) match { + case (_, NoSymbol) if name.isTermName => + cx.javaFindMember(bc.info, name.toTypeName, _.isStaticMember) + case res => res + } + } + .find(_._2 ne NoSymbol) match { + case Some((jtype, jmember)) => + val more = sm"""Static Java members belong to companion objects in Scala; + |they are not inherited, even by subclasses defined in Java.""" + s"did you mean ${jtype.typeSymbol.fullName}.${jmember.name}? $more" + case _ => "" + } + } + List(companion, altStr, notAnyRef, semicolon, javaRules).filter("" != _).map("\n" + _).mkString } def targetStr = targetKindString + target.directObjectString withAddendum(qual.pos)( @@ -390,14 +517,21 @@ trait ContextErrors { else s"$nameString is not a member of $targetStr$addendum" ) } - issueNormalTypeError(sel, errMsg) + sel match { + case tree: Import => // selector name is unique; use it to improve position + tree.selectors.find(_.hasName(name)) match { + case Some(badsel) => issueTypeError(PosAndMsgTypeError(tree.posOf(badsel), errMsg)) + case _ => issueNormalTypeError(sel, errMsg) + } + case _ => issueNormalTypeError(sel, errMsg) + } // the error has to be set for the copied tree, otherwise // the error remains persistent across multiple compilations // and causes problems //setError(sel) } - def SelectWithUnderlyingError(sel: Tree, err: AbsTypeError) = { + def SelectWithUnderlyingError(sel: Tree, err: AbsTypeError): sel.type = { // if there's no position, this is likely the result of a MissingRequirementError // use the position of the selection we failed to type check to report the original message if (err.errPos == NoPosition) issueNormalTypeError(sel, err.errMsg) @@ -407,25 +541,16 @@ trait ContextErrors { //typedNew def IsAbstractError(tree: Tree, sym: Symbol) = { - issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated") + issueNormalTypeError(tree, s"$sym is abstract; cannot be instantiated") setError(tree) } - def DoesNotConformToSelfTypeError(tree: Tree, sym: Symbol, tpe0: Type) = { - issueNormalTypeError(tree, sym + " cannot be instantiated because it does not conform to its self-type " + tpe0) - setError(tree) + def DoesNotExtendAnnotation(tree: Tree, sym: Symbol) = { + NormalTypeError(tree, s"$sym does not extend ${AnnotationClass.fullName}") } - //typedEta - private def mkUnderscoreNullaryEtaMessage(what: String) = - s"Methods without a parameter list and by-name params can $what be converted to functions as `m _`, " + - "write a function literal `() => m` instead" - - final val UnderscoreNullaryEtaWarnMsg = mkUnderscoreNullaryEtaMessage("no longer") - final val UnderscoreNullaryEtaErrorMsg = mkUnderscoreNullaryEtaMessage("not") - - def UnderscoreNullaryEtaError(tree: Tree) = { - issueNormalTypeError(tree, UnderscoreNullaryEtaErrorMsg) + def DoesNotConformToSelfTypeError(tree: Tree, sym: Symbol, tpe0: Type) = { + issueNormalTypeError(tree, s"$sym cannot be instantiated because it does not conform to its self-type $tpe0") setError(tree) } @@ -441,7 +566,7 @@ trait ContextErrors { } def ReturnWithoutTypeError(tree: Tree, owner: Symbol) = { - issueNormalTypeError(tree, owner + " has return statement; needs result type") + issueNormalTypeError(tree, s"$owner has return statement; needs result type") setError(tree) } @@ -456,8 +581,8 @@ trait ContextErrors { issueNormalTypeError(tree, "_* may only come last") //typedFunction - def MaxFunctionArityError(fun: Tree) = { - issueNormalTypeError(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters") + def MaxFunctionArityError(fun: Tree, why: String) = { + issueNormalTypeError(fun, s"functions may not have more than ${definitions.MaxFunctionArity} parameters$why") setError(fun) } @@ -490,7 +615,7 @@ trait ContextErrors { "Expected type was: " + pt.toLongString) def ConstructorsOrderError(tree: Tree) = { - issueNormalTypeError(tree, "called constructor's definition must precede calling constructor's definition") + issueNormalTypeError(tree, "self constructor invocation must refer to a constructor definition which precedes it, to prevent infinite cycles") setError(tree) } @@ -512,7 +637,7 @@ trait ContextErrors { NormalTypeError(tree, "annotation argument cannot be null") def ArrayConstantsError(tree: Tree) = - NormalTypeError(tree, "Array constants have to be specified using the `Array(...)' factory method") + NormalTypeError(tree, "Array constants have to be specified using the `Array(...)` factory method") def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) = NormalTypeError(tree, "found array constant, expected argument of type " + pt) @@ -521,7 +646,7 @@ trait ContextErrors { NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found) def MultipleArgumentListForAnnotationError(tree: Tree) = - NormalTypeError(tree, "multiple argument lists on classfile annotation") + NormalTypeError(tree, "multiple argument lists on Java annotation") def UnknownAnnotationNameError(tree: Tree, name: Name) = NormalTypeError(tree, "unknown annotation argument name: " + name) @@ -530,7 +655,7 @@ trait ContextErrors { NormalTypeError(tree, "duplicate value for annotation argument " + name) def ClassfileAnnotationsAsNamedArgsError(tree: Tree) = - NormalTypeError(tree, "classfile annotation arguments have to be supplied as named arguments") + NormalTypeError(tree, "arguments to Java annotations have to be supplied as named arguments") def AnnotationMissingArgError(tree: Tree, annType: Type, sym: Symbol) = NormalTypeError(tree, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name) @@ -560,10 +685,10 @@ trait ContextErrors { // doTypeApply //tryNamesDefaults - def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) = + def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, @unused fun: Tree) = NormalTypeError(tree, "macro applications do not support named and/or default arguments") - def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, formals: List[Type], args: List[Tree], namelessArgs: List[Tree], argPos: Array[Int]) = { + def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, formals: List[Type], args: List[Tree], argPos: Array[Int]) = { val expected = formals.size val supplied = args.size // pick a caret. For f(k=1,i=2,j=3), argPos[0,-1,1] b/c `k=1` taken as arg0 @@ -572,31 +697,20 @@ trait ContextErrors { if (i < 0) tree else args(i min (supplied - 1)) } val msg = { - val badappl = { - val excess = supplied - expected - val target = treeSymTypeMsg(fun) - - if (expected == 0) s"no arguments allowed for nullary $target" - else if (excess < 3 && expected <= 5) s"too many arguments ($supplied) for $target" - else if (expected > 10) s"$supplied arguments but expected $expected for $target" - else { - val more = - if (excess == 1) "one more argument" - else if (excess > 0) s"$excess more arguments" - else "too many arguments" - s"$more than can be applied to $target" - } - } - val unknowns = (namelessArgs zip args) collect { - case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name + val target = treeSymTypeMsg(fun) + def isAutoTuplable = AnyRefTpe <:< (if (formals.head.typeSymbol.isTypeParameter) formals.head.upperBound else formals.head) + + expected match { + case 0 => + args match { + case (c @ Literal(Constant(()))) :: Nil if c.hasAttachment[SyntheticUnitAttachment.type] => + s"can't supply unit value with infix notation because nullary $target takes no arguments; use dotted invocation instead: ${show(treeCopy.Apply(tree, fun, Nil))}" + case _ => s"no arguments allowed for nullary $target" + } + case 1 if isTupleType(formals.head) => s"too many arguments (found $supplied, expected ${formals.head.typeArgs.size}-tuple) for $target" + case 1 if supplied > MaxTupleArity && isAutoTuplable => s"too many arguments (found $supplied, which exceeds the largest Tuple) for $target" + case _ => s"too many arguments (found $supplied, expected $expected) for $target" } - val suppl = - unknowns.size match { - case 0 => "" - case 1 => s"\nNote that '${unknowns.head}' is not a parameter name of the invoked method." - case _ => unknowns.mkString("\nNote that '", "', '", "' are not parameter names of the invoked method.") - } - s"${badappl}${suppl}" } NormalTypeError(excessive, msg) } @@ -617,11 +731,10 @@ trait ContextErrors { def NotEnoughArgsError(tree: Tree, fun: Tree, missing: List[Symbol]) = { val notEnoughArgumentsMsg = { val suffix = if (missing.isEmpty) "" else { - val keep = missing take 3 map (_.name) + val keep = missing.take(3).map(_.name) val ess = if (missing.tail.isEmpty) "" else "s" - f".%nUnspecified value parameter$ess ${ - keep.mkString("", ", ", if ((missing drop 3).nonEmpty) "..." else ".") - }" + val dots = if (missing.drop(3).nonEmpty) "..." else "." + keep.mkString(s".\nUnspecified value parameter$ess ", ", ", dots) } s"not enough arguments for ${ treeSymTypeMsg(fun) }$suffix" } @@ -642,11 +755,11 @@ trait ContextErrors { NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun)) def ApplyWithoutArgsError(tree: Tree, fun: Tree) = - NormalTypeError(tree, fun.tpe+" does not take parameters") + NormalTypeError(tree, s"${fun.tpe} does not take parameters") // Dynamic def DynamicVarArgUnsupported(tree: Tree, name: Name) = { - issueNormalTypeError(tree, name + " does not support passing a vararg parameter") + issueNormalTypeError(tree, s"$name does not support passing a vararg parameter") setError(tree) } @@ -657,12 +770,12 @@ trait ContextErrors { } //checkClassType - def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = { + def TypeNotAStablePrefixError(tpt: Tree, pre: Type): tpt.type = { issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix") setError(tpt) } - def ClassTypeRequiredError(tree: Tree, found: AnyRef) = { + def ClassTypeRequiredError(tree: Tree, found: AnyRef): tree.type = { issueNormalTypeError(tree, "class type required but "+found+" found") setError(tree) } @@ -674,11 +787,22 @@ trait ContextErrors { "\n is not a subclass of the super"+parentSym+ "\n of the mixin " + mixin) + def ParentIsScala3TraitError(parent: Tree, + parentSym: Symbol, params: List[Symbol], mixin: Symbol) = { + val parentWithCtor = formatTraitWithParams(parentSym, params) + val mixinMsg = { + if (mixin eq parentSym) " parameterized mixin "+mixin + else " parameterized super"+parentSym+"\n of the mixin "+mixin + } + NormalTypeError(parent, "illegal inheritance;"+mixinMsg+ + "\n is defined in Scala 3 as " + parentWithCtor) + } + def ParentNotATraitMixinError(parent: Tree, mixin: Symbol) = - NormalTypeError(parent, mixin+" needs to be a trait to be mixed in") + NormalTypeError(parent, s"$mixin needs to be a trait to be mixed in") def ParentFinalInheritanceError(parent: Tree, mixin: Symbol) = - NormalTypeError(parent, "illegal inheritance from final "+mixin) + NormalTypeError(parent, s"illegal inheritance from final $mixin") def ParentSelfTypeConformanceError(parent: Tree, selfType: Type) = NormalTypeError(parent, @@ -686,47 +810,85 @@ trait ContextErrors { parent +"'s selftype "+parent.tpe.typeOfThis) def ParentInheritedTwiceError(parent: Tree, parentSym: Symbol) = - NormalTypeError(parent, parentSym+" is inherited twice") + NormalTypeError(parent, s"$parentSym is inherited twice") //adapt def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = { val f = meth.name.decoded - val paf = s"$f(${ meth.asMethod.paramLists map (_ map (_ => "_") mkString ",") mkString ")(" })" - val advice = s""" - |Unapplied methods are only converted to functions when a function type is expected. - |You can make this conversion explicit by writing `$f _` or `$paf` instead of `$f`.""".stripMargin + val paf = s"$f(${ meth.asMethod.paramLists.map(_.map(_ => "_").mkString(",")).mkString(")(") })" + val feature = if (!currentRun.isScala3) "" else + sm"""| + |Use -Xsource-features:eta-expand-always to convert even if the expected type is not a function type.""" + val advice = + if (meth.isConstructor || meth.info.params.lengthIs > definitions.MaxFunctionArity) "" + else + sm"""| + |Unapplied methods are only converted to functions when a function type is expected.$feature + |You can make this conversion explicit by writing `$f _` or `$paf` instead of `$f`.""" + val help = { + def memberTypesOf(qualTpe: Type, name: Name): List[Type] = { + val m = qualTpe.member(name) + if (m.isOverloaded) + m.alternatives.map(qualTpe.memberType(_)) + else + List(qualTpe.memberType(meth)) + } + val (qualTpe, memberTypes) = tree match { + case Select(qualifier, name) => (qualifier.tpe, memberTypesOf(qualifier.tpe, name)) + case treeInfo.Applied(Select(qualifier, name), _, _) => (qualifier.tpe, memberTypesOf(qualifier.tpe, name)) + case _ => (NoType, Nil) + } + memberTypes match { + case tp :: Nil => s" of type $tp" + case Nil => s" of type ${meth.info}" + case ov => + sm"""| + |with overloaded members in ${qualTpe.dealiasWiden} + | ${ov.map(show(_)).sorted.mkString("\n ")}""" + } + } val message = if (meth.isMacro) MacroTooFewArgumentListsMessage - else s"""missing argument list for ${meth.fullLocationString}${ - if (!meth.isConstructor) advice else "" - }""" + else s"""missing argument list for ${meth.fullLocationString}${help}${advice}""" issueNormalTypeError(tree, message) setError(tree) } def MissingTypeParametersError(tree: Tree) = { - issueNormalTypeError(tree, tree.symbol+" takes type parameters") + issueNormalTypeError(tree, s"${tree.symbol} takes type parameters") setError(tree) } def KindArityMismatchError(tree: Tree, pt: Type) = { issueNormalTypeError(tree, - tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+ - ", expected: "+countAsString(pt.typeParams.length)) + s"${tree.tpe} takes ${countElementsAsString(tree.tpe.typeParams.length, "type parameter")}, expected: ${countAsString(pt.typeParams.length)}") setError(tree) } def CaseClassConstructorError(tree: Tree, baseMessage: String) = { - val addendum = directUnapplyMember(tree.symbol.info) match { - case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list" - case _ => "" + import UnapplyMemberResult._ + val addendum = { + def contextualize(sym: Symbol, because: String) = + s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor$because" + val sym = directUnapplyMember(tree.symbol.info) + validateUnapplyMember(sym.info) match { + case NoParams => + contextualize(sym, ": an unapply method must accept a single argument") + case MultiParams => + contextualize(sym, " as it has more than one (non-implicit) parameter") + case MultiParamss => + contextualize(sym, " due to its second non-implicit parameter list") + case VarArgs => + contextualize(sym, " since it is a varargs method") + case _ => "" + } } issueNormalTypeError(tree, baseMessage + addendum) setError(tree) } def ConstructorPrefixError(tree: Tree, restpe: Type) = { - issueNormalTypeError(tree, restpe.prefix+" is not a legal prefix for a constructor") + issueNormalTypeError(tree, s"${restpe.prefix} is not a legal prefix for a constructor") setError(tree) } @@ -756,8 +918,6 @@ trait ContextErrors { } // cases where we do not necessarily return trees - def DependentMethodTpeConversionToFunctionError(tree: Tree, tp: Type) = - issueNormalTypeError(tree, "method with dependent type "+tp+" cannot be converted to function value") //checkStarPatOK def StarPatternWithVarargParametersError(tree: Tree) = @@ -766,16 +926,14 @@ trait ContextErrors { def FinitaryError(tparam: Symbol) = issueSymbolTypeError(tparam, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive") - def QualifyingClassError(tree: Tree, qual: Name) = { + def QualifyingClassError(tree: Tree, qual: Name) = issueNormalTypeError(tree, - if (qual.isEmpty) tree + " can be used only in a class, object, or template" - else qual + " is not an enclosing class") - setError(tree) - } + if (qual.isEmpty) s"$tree can be used only in a class, object, or template" + else s"$qual is not an enclosing class") // def stabilize def NotAValueError(tree: Tree, sym: Symbol) = { - issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value") + issueNormalTypeError(tree, s"${sym.kindString} ${sym.fullName} is not a value") setError(tree) } @@ -795,12 +953,10 @@ trait ContextErrors { } // cyclic errors + def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) = issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0)) - def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) = - issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym")) - // macro-related errors (also see MacroErrors below) def MacroEtaError(tree: Tree) = { @@ -813,7 +969,7 @@ trait ContextErrors { } - case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable + case object MacroExpansionException extends ControlThrowable protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = { def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg @@ -886,15 +1042,12 @@ trait ContextErrors { } def MacroFreeSymbolError(expandee: Tree, sym: FreeSymbol) = { - def template(kind: String) = ( - s"Macro expansion contains free $kind variable %s. Have you forgotten to use %s? " - + s"If you have troubles tracking free $kind variables, consider using -Xlog-free-${kind}s" - ) - val forgotten = ( - if (sym.isTerm) "splice when splicing this variable into a reifee" - else "c.WeakTypeTag annotation for this type parameter" + val kind = sym.name.nameKind + val name = s"${sym.name} ${sym.origin}" + val forgotten = if (sym.isTerm) "splice when splicing this variable into a reifee" else "c.WeakTypeTag annotation for this type parameter" + macroExpansionError(expandee, + s"Macro expansion contains free $kind variable $name. Have you forgotten to use $forgotten? If you have troubles tracking free $kind variables, consider using -Xlog-free-${kind}s" ) - macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten)) } def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = { @@ -907,12 +1060,73 @@ trait ContextErrors { s"macro must return a compiler-specific $expected; returned value is " + ( if (expanded == null) "null" else if (isPathMismatch) s"$actual, but it doesn't belong to this compiler's universe" - else "of " + expanded.getClass + else s"of ${expanded.getClass}" )) } def MacroImplementationNotFoundError(expandee: Tree) = macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name)) + + def MacroAnnotationShapeError(clazz: Symbol) = { + val sym = clazz.info.member(nme.macroTransform) + var actualSignature = sym.toString + if (sym.isOverloaded) actualSignature += "(...) = ..." + else if (sym.isMethod) { + if (sym.typeParams.nonEmpty) { + def showTparam(tparam: Symbol) = + tparam.typeSignature match { + case tpe @ TypeBounds(_, _) => s"${tparam.name}$tpe" + case _ => tparam.name + } + def showTparams(tparams: List[Symbol]) = "[" + (tparams map showTparam mkString ", ") + "]" + actualSignature += showTparams(sym.typeParams) + } + if (sym.paramss.nonEmpty) { + def showParam(param: Symbol) = s"${param.name}: ${param.typeSignature}" + def showParams(params: List[Symbol]) = { + val s_mods = if (params.nonEmpty && params(0).hasFlag(scala.reflect.internal.Flags.IMPLICIT)) "implicit " else "" + val s_params = params map showParam mkString ", " + "(" + s_mods + s_params + ")" + } + def showParamss(paramss: List[List[Symbol]]) = paramss map showParams mkString "" + actualSignature += showParamss(sym.paramss) + } + if (sym.isTermMacro) actualSignature = actualSignature.replace("macro method", "def") + " = macro ..." + else actualSignature = actualSignature.replace("method", "def") + " = ..." + } + issueSymbolTypeError(clazz, s""" + |macro annotation has wrong shape: + | required: def macroTransform(annottees: Any*) = macro ... + | found : $actualSignature + """.trim.stripMargin) + } + + def MacroAnnotationMustBeStaticError(clazz: Symbol) = + issueSymbolTypeError(clazz, "macro annotation must extend scala.annotation.StaticAnnotation") + + def MacroAnnotationCannotBeInheritedError(clazz: Symbol) = + issueSymbolTypeError(clazz, "macro annotation cannot be @Inherited") + + def MacroAnnotationCannotBeMemberError(clazz: Symbol) = + issueSymbolTypeError(clazz, "macro annotation cannot be a member of another class") + + def MacroAnnotationNotExpandedMessage: String = + "macro annotation could not be expanded (since these are experimental, you must enable them with -Ymacro-annotations)" + + def MacroAnnotationOnlyDefinitionError(ann: Tree) = + issueNormalTypeError(ann, "macro annotations can only be put on definitions") + + def MacroAnnotationTopLevelClassWithCompanionBadExpansion(ann: Tree) = + issueNormalTypeError(ann, "top-level class with companion can only expand into a block consisting in eponymous companions") + + def MacroAnnotationTopLevelClassWithoutCompanionBadExpansion(ann: Tree) = + issueNormalTypeError(ann, "top-level class without companion can only expand either into an eponymous class or into a block consisting in eponymous companions") + + def MacroAnnotationTopLevelModuleBadExpansion(ann: Tree) = + issueNormalTypeError(ann, "top-level object can only expand into an eponymous object") + + def InferredImplicitError(tree: Tree, inferred: Type, cx: Context): Unit = + InferredImplicitErrorImpl(tree, inferred, cx, isTyper = true) } /** This file will be the death of me. */ @@ -926,7 +1140,9 @@ trait ContextErrors { self: Inferencer => private def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = { - def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")") + def asParams(xs: List[Any]) = + if (xs.isEmpty && tree.symbol.isConstructor) "no arguments" + else xs.mkString("(", ", ", ")") def resType = if (pt.isWildcard) "" else " with expected result type " + pt def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt @@ -939,7 +1155,7 @@ trait ContextErrors { object InferErrorGen { - implicit val contextInferErrorGen = getContext + implicit val contextInferErrorGen: Context = getContext object PolyAlternativeErrorKind extends Enumeration { type ErrorType = Value @@ -958,29 +1174,31 @@ trait ContextErrors { val ambiguousBuffered = !context.ambiguousErrors if (validTargets || ambiguousBuffered) context.issueAmbiguousError( - if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) { - val methodName = nme.defaultGetterToMethod(sym1.name) + if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) AmbiguousTypeError(sym1.enclClass.pos, - s"in ${sym1.enclClass}, multiple overloaded alternatives of $methodName define default arguments") - - } else { + s"in ${sym1.enclClass}, multiple overloaded alternatives of ${ + nme.defaultGetterToMethod(sym1.name) + } define default arguments" + ) + else AmbiguousTypeError(pos, - "ambiguous reference to overloaded definition,\n" + - s"both ${sym1.fullLocationString} of type ${pre.memberType(sym1)}\n" + - s"and ${sym2.fullLocationString} of type ${pre.memberType(sym2)}\n" + - s"match $rest") - }) + sm"""|ambiguous reference to overloaded definition, + |both ${sym1.fullLocationString} of type ${pre.memberType(sym1)} + |and ${sym2.fullLocationString} of type ${pre.memberType(sym2)} + |match $rest""" + ) + ) } def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError = AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation) def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = { - def errMsg = { - val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString + val errMsg = { + val location = if (sym.isClassConstructor) s"in $owner0" else s"as a member of ${pre.widen.directObjectString}" + val from = s" from ${owner0.fullLocationString}" - underlyingSymbol(sym).fullLocationString + " cannot be accessed in " + - location + explanation + underlyingSymbol(sym).fullLocationString + " cannot be accessed " + location + from + explanation } AccessTypeError(tree, errMsg) } @@ -992,7 +1210,7 @@ trait ContextErrors { "\n --- because ---\n" + msg) // TODO: no test case - def NoConstructorInstanceError(tree: Tree, restpe: Type, pt: Type, msg: String) = { + def NoConstructorInstanceError(tree: Tree, restpe: Type, pt: Type, msg: String): Unit = { issueNormalTypeError(tree, "constructor of type " + restpe + " cannot be uniquely instantiated to expected type " + pt + @@ -1009,9 +1227,44 @@ trait ContextErrors { // side-effect on the tree, break the overloaded type cycle in infer private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree) + def widenArgs(argtpes: List[Type], params0: List[Symbol], params1: List[Symbol]): List[Type] = + argtpes.zipWithIndex map { + case (nt@NamedType(name, tp), _) => // a named argument + (tp, params0.find(_.name == name).map(_.tpe), params1.find(_.name == name).map(_.tpe)) match { + case (ConstantType(_), Some(ConstantType(_)), _) => nt + case (ConstantType(_), _, Some(ConstantType(_))) => nt + case (ct: ConstantType, _, _) => NamedType(name, ct.widen) + case _ => nt + } + case (ct: ConstantType, pos) => + (params0.lift(pos).map(_.tpe), params1.lift(pos).map(_.tpe)) match { + case (Some(ConstantType(_)), _) => ct + case (_, Some(ConstantType(_))) => ct + case _ => ct.widen + } + case (tpe, _) => tpe + } + + def NoMatchingAlternative(tree: Tree, alts: List[Symbol], argTpes: List[Type], pt: Type) = { + val msg = " does not match arguments " + issueNormalTypeError(tree, applyErrorMsg(tree, msg, argTpes, pt)) + } + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = { + val alts = alternatives(tree) + val widenedArgtpes = widenArgs(argtpes, alts.head.params, alts.tail.head.params) + val proscription = + if (tree.symbol.isConstructor) " cannot be invoked with " + else " cannot be applied to " + val junkNames = { + val bads = argtpes.collect { + case NamedType(name, _) if !alts.exists(cond(_) { case MethodType(params, _) => params.exists(_.name == name) }) => name.decoded + } + if (bads.isEmpty) "" else bads.mkString(" [which have no such parameter ", ",", "]") + } + issueNormalTypeError(tree, - applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) + applyErrorMsg(tree, junkNames + proscription, widenedArgtpes, pt)) // since inferMethodAlternative modifies the state of the tree // we have to set the type of tree to ErrorType only in the very last // fallback action that is done in the inference. @@ -1019,19 +1272,17 @@ trait ContextErrors { setErrorOnLastTry(lastTry, tree) } + // If erroneous, do not even try further attempts because they should all fail + // even if this is not the last attempt (because of the SO lurking beyond the horizon) def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, - firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = { - - if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) { - val msg0 = - "argument types " + argtpes.mkString("(", ",", ")") + - (if (pt == WildcardType) "" else " and expected result type " + pt) + firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = + if (argtpes.exists(_.isErroneous) || pt.isErroneous) setError(tree) else { + def paramsOrEmpty(f: Symbol) = if (f.isMethod) f.asMethod.tpe.params else Nil + val widenedArgtpes = widenArgs(argtpes, paramsOrEmpty(best), paramsOrEmpty(firstCompeting)) + val msg0 = widenedArgtpes.mkString("argument types (", ",", if (pt == WildcardType) ")" else s") and expected result type $pt") issueAmbiguousTypeErrorUnlessErroneous(tree.pos, pre, best, firstCompeting, msg0) setErrorOnLastTry(lastTry, tree) - } else setError(tree) // do not even try further attempts because they should all fail - // even if this is not the last attempt (because of the SO's possibility on the horizon) - - } + } def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = { issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(context, tree.symbol.tpe, pt))) @@ -1053,22 +1304,22 @@ trait ContextErrors { kindErrors.toList.mkString("\n", ", ", "")) } - private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = { + private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean): String = { if (explaintypes) { - val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds) + val bounds = tparams.map(_.info.instantiateTypeParams(tparams, targs).bounds) foreach2(targs, bounds)((targ, bound) => explainTypes(bound.lo, targ)) foreach2(targs, bounds)((targ, bound) => explainTypes(targ, bound.hi)) } + def bracketed(items: List[_]) = items.mkString("[", ",", "]") + val bounds = tparams.headOption.map(h => s"${h.owner}'s type parameter bounds ${bracketed(tparams.map(_.defString))}").getOrElse("empty type parameter list") - prefix + "type arguments " + targs.mkString("[", ",", "]") + - " do not conform to " + tparams.head.owner + "'s type parameter bounds " + - (tparams map (_.defString)).mkString("[", ",", "]") + s"${prefix}type arguments ${bracketed(targs)} do not conform to ${bounds}" } def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type], - tparams: List[Symbol], kindErrors: List[String]) = + tparams: List[Symbol], @unused kindErrors: List[String]) = issueNormalTypeError(tree, - NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes)) + NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value)) //substExpr def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) = @@ -1103,6 +1354,7 @@ trait ContextErrors { "type arguments " + argtypes.mkString("[", ",", "]") + " conform to the bounds of none of the overloaded alternatives of\n "+sym+ ": "+sym.info + case x => throw new MatchError(x) } issueNormalTypeError(tree, msg) () @@ -1115,7 +1367,7 @@ trait ContextErrors { object NamerErrorGen { - implicit val contextNamerErrorGen = context + implicit val contextNamerErrorGen: Context = context object SymValidateErrors extends Enumeration { val ImplicitConstr, ImplicitNotTermOrClass, ImplicitAtToplevel, @@ -1124,17 +1376,12 @@ trait ContextErrors { ByNameParameter, AbstractVar = Value } - object DuplicatesErrorKinds extends Enumeration { - val RenamedTwice, AppearsTwice = Value - } - import SymValidateErrors._ - import DuplicatesErrorKinds._ import symtab.Flags def TypeSigError(tree: Tree, ex: TypeError) = { ex match { - case CyclicReference(_, _) if tree.symbol.isTermMacro => + case CyclicReference(_, _, _) if tree.symbol.isTermMacro => // say, we have a macro def `foo` and its macro impl `impl` // if impl: 1) omits return type, 2) has anything implicit in its body, 3) sees foo // @@ -1147,19 +1394,18 @@ trait ContextErrors { // hence we (together with reportTypeError in TypeDiagnostics) make sure that this CyclicReference // evades all the handlers on its way and successfully reaches `isCyclicOrErroneous` in Implicits throw ex - case c @ CyclicReference(sym, info: TypeCompleter) => - val error = new NormalTypeErrorFromCyclicReference(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage) - issueTypeError(error) + case CyclicReference(sym, info: TypeCompleter, trace) => + issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree, trace, tree.pos).getOrElse(ex.getMessage)) case _ => contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex)) } } def GetterDefinedTwiceError(getter: Symbol) = - issueSymbolTypeError(getter, getter+" is defined twice") + issueSymbolTypeError(getter, s"$getter is defined twice") def ValOrVarWithSetterSuffixError(tree: Tree) = - issueNormalTypeError(tree, "Names of vals or vars may not end in `_='") + issueNormalTypeError(tree, "Names of vals or vars may not end in `_=`") def PrivateThisCaseClassParameterError(tree: Tree) = issueNormalTypeError(tree, "private[this] not allowed for case class parameters") @@ -1168,10 +1414,10 @@ trait ContextErrors { issueNormalTypeError(tree, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import") def BeanPropertyAnnotationFieldWithoutLetterError(tree: Tree) = - issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to fields that start with a letter") + issueNormalTypeError(tree, "`BeanProperty` annotation can be applied only to fields that start with a letter") def BeanPropertyAnnotationPrivateFieldError(tree: Tree) = - issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to non-private fields") + issueNormalTypeError(tree, "`BeanProperty` annotation can be applied only to non-private fields") def DoubleDefError(currentSym: Symbol, prevSym: Symbol) = { val s1 = if (prevSym.isModule) "case class companion " else "" @@ -1179,10 +1425,10 @@ trait ContextErrors { val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym val where = if (currentSym.isTopLevel != prevSym.isTopLevel) { val inOrOut = if (prevSym.isTopLevel) "outside of" else "in" - " %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name) + s" $inOrOut package object ${prevSym.effectiveOwner.name}" } else "" - issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where) + issueSymbolTypeError(currentSym, s"${prevSym.name} is already defined as $s2$s3$where") } def MissingParameterOrValTypeError(vparam: Tree) = @@ -1191,76 +1437,42 @@ trait ContextErrors { def ParentSealedInheritanceError(parent: Tree, psym: Symbol) = NormalTypeError(parent, "illegal inheritance from sealed " + psym ) - def RootImportError(tree: Tree) = - issueNormalTypeError(tree, "_root_ cannot be imported") - - def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value) { + def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value): Unit = { val msg = errKind match { - case ImplicitConstr => - "`implicit' modifier not allowed for constructors" - - case ImplicitNotTermOrClass => - "`implicit' modifier can be used only for values, variables, methods and classes" - - case ImplicitAtToplevel => - "`implicit' modifier cannot be used for top-level objects" - - case OverrideClass => - "`override' modifier not allowed for classes" - - case SealedNonClass => - "`sealed' modifier can be used only for classes" - - case AbstractNonClass => - "`abstract' modifier can be used only for classes; it should be omitted for abstract members" - - case OverrideConstr => - "`override' modifier not allowed for constructors" - - case AbstractOverride => - "`abstract override' modifier only allowed for members of traits" - - case AbstractOverrideOnTypeMember => - "`abstract override' modifier not allowed for type members" - - case LazyAndEarlyInit => - "`lazy' definitions may not be initialized early" - - case ByNameParameter => - "pass-by-name arguments not allowed for case class parameters" - - case AbstractVar => - "only traits and abstract classes can have declared but undefined members" + abstractVarMessage(sym) - + case ImplicitConstr => "`implicit` modifier not allowed for constructors" + case ImplicitNotTermOrClass => "`implicit` modifier can be used only for values, variables, methods and classes" + case ImplicitAtToplevel => "`implicit` modifier cannot be used for top-level objects" + case OverrideClass => "`override` modifier not allowed for classes" + case SealedNonClass => "`sealed` modifier can be used only for classes" + case AbstractNonClass => "`abstract` modifier can be used only for classes; it should be omitted for abstract members" + case OverrideConstr => "`override` modifier not allowed for constructors" + case AbstractOverride => "`abstract override` modifier only allowed for members of traits" + case AbstractOverrideOnTypeMember => "`abstract override` modifier not allowed for type members" + case LazyAndEarlyInit => "`lazy` definitions may not be initialized early" + case ByNameParameter => "pass-by-name arguments not allowed for case class parameters" + case AbstractVar => "only traits and abstract classes can have declared but undefined members" + abstractVarMessage(sym) + case x => throw new MatchError(x) } issueSymbolTypeError(sym, msg) } - def AbstractMemberWithModiferError(sym: Symbol, flag: Int) = - issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag.toLong) + " modifier") + def AbstractMemberWithModiferError(sym: Symbol, flag: Long) = + issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier") - def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) = + def IllegalModifierCombination(sym: Symbol, flag1: Long, flag2: Long) = issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format( - Flags.flagsToString(flag1.toLong), Flags.flagsToString(flag2.toLong), sym)) + Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym)) def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = { val errorAddendum = ": parameter may only be referenced in a subsequent parameter section" issueSymbolTypeError(sym, "illegal dependent method type" + errorAddendum)(context) } - - def DuplicatesError(tree: Tree, name: Name, kind: DuplicatesErrorKinds.Value) = { - val msg = kind match { - case RenamedTwice => - "is renamed twice" - case AppearsTwice => - "appears twice as a target of a renaming" - } - - issueNormalTypeError(tree, name.decode + " " + msg) - } } + + def InferredImplicitError(tree: Tree, inferred: Type, cx: Context): Unit = + InferredImplicitErrorImpl(tree, inferred, cx, isTyper = false) } trait ImplicitsContextErrors { @@ -1278,7 +1490,7 @@ trait ContextErrors { | $pre2 ${info2.sym.fullLocationString} of type ${info2.tpe} | $trailer""" def viewMsg = { - val found :: req :: _ = pt.typeArgs + val found :: req :: _ = pt.typeArgs: @unchecked def explanation = { val sym = found.typeSymbol // Explain some common situations a bit more clearly. Some other @@ -1306,16 +1518,18 @@ trait ContextErrors { ) } + // Note that treeInfo.Applied always matches, it just returns Nil when no application was found... def treeTypeArgs(annotatedTree: Tree): List[String] = annotatedTree match { - case TypeApply(_, args) => args.map(_.toString) case Block(_, Function(_, treeInfo.Applied(_, targs, _))) => targs.map(_.toString) // eta expansion, see neg/t9527b.scala + case Function(_, treeInfo.Applied(_, targs, _)) => targs.map(_.toString) // eta expansion, see neg/t9527b.scala + case treeInfo.Applied(_, targs, _) => targs.map(_.toString) case _ => Nil } context0.issueAmbiguousError(AmbiguousImplicitTypeError(tree, (info1.sym, info2.sym) match { - case (ImplicitAmbiguousMsg(msg), _) => msg.format(treeTypeArgs(tree1)) - case (_, ImplicitAmbiguousMsg(msg)) => msg.format(treeTypeArgs(tree2)) + case (ImplicitAmbiguousMsg(msg), _) => msg.formatDefSiteMessage(treeTypeArgs(tree1)) + case (_, ImplicitAmbiguousMsg(msg)) => msg.formatDefSiteMessage(treeTypeArgs(tree2)) case (_, _) if isView => viewMsg case (_, _) => s"ambiguous implicit values:\n${coreMsg}match expected type $pt" } @@ -1341,24 +1555,9 @@ trait ContextErrors { issueSymbolTypeError(sym, errMsg) } - def AmbiguousReferenceInNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = { - if (!arg.isErroneous) { // check if name clash wasn't reported already - issueNormalTypeError(arg, - "reference to "+ name +" is ambiguous; it is both a method parameter "+ - "and a variable in scope.") - setError(arg) - } else arg - } - - def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = { - val note = "failed to determine if '"+ param.name + " = ...' is a named argument or an assignment expression.\n"+ - "an explicit type is required for the definition mentioned in the error message above." - context.warning(arg.pos, note, WarningCategory.Other) - } - - def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name, isVariableInScope: Boolean)(implicit context: Context) = { + def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name, warnVariableInScope: Boolean)(implicit context: Context) = { val suffix = - if (isVariableInScope) + if (warnVariableInScope) s"\nNote that assignments in argument position are no longer allowed since Scala 2.13.\nTo express the assignment expression, wrap it in brackets, e.g., `{ $name = ... }`." else "" issueNormalTypeError(arg, s"unknown parameter name: $name$suffix") diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 629765cf7606..854498ee0a61 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,19 +14,19 @@ package scala.tools.nsc package typechecker import scala.annotation.tailrec -import scala.collection.{immutable, mutable} -import scala.reflect.internal.util.{ReusableInstance, shortClassOfInstance, SomeOfNil} +import scala.collection.mutable +import scala.reflect.internal.util.{CodeAction, ReusableInstance, shortClassOfInstance, ListOfNil, SomeOfNil} import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ /** * @author Martin Odersky - * @version 1.0 */ -trait Contexts { self: Analyzer => +trait Contexts { self: Analyzer with ImportTracking => import global._ - import definitions.{ JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage } - + import definitions.{JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage} import ContextMode._ + import scala.reflect.internal.Flags._ protected def onTreeCheckerError(pos: Position, msg: String): Unit = () @@ -37,9 +37,7 @@ trait Contexts { self: Analyzer => enclClass = this enclMethod = this - override def nextEnclosing(p: Context => Boolean): Context = this override def enclosingContextChain: List[Context] = Nil - override def implicitss: List[List[ImplicitInfo]] = Nil override def imports: List[ImportInfo] = Nil override def firstImport: Option[ImportInfo] = None override def toString = "NoContext" @@ -52,86 +50,83 @@ trait Contexts { self: Analyzer => } private lazy val NoJavaMemberFound = (NoType, NoSymbol) - def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = - LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") - def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) = - LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp") - def ambiguousWithEnclosing(outer: Symbol, inherited: Symbol, currentClass: Symbol) = - if (!currentRun.isScala213 || !outer.exists || inherited.isImplicit) None else { - val outer1 = outer.alternatives.head - val inherited1 = inherited.alternatives.head - val classDesc = if (currentClass.isAnonymousClass) "anonymous class" else currentClass.toString - val parent = currentClass.parentSymbols.find(_.isNonBottomSubClass(inherited1.owner)).getOrElse(NoSymbol) - val inherit = if (parent.exists && parent != inherited1.owner) s", inherited through parent $parent" else "" - val message = - s"""it is both defined in the enclosing ${outer1.owner} and inherited in the enclosing $classDesc as $inherited1 (defined in ${inherited1.ownsString}$inherit) - |In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. - |Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.${outer1.name}`.""".stripMargin - if (currentRun.isScala3) - Some(LookupAmbiguous(message)) - else { - // passing the message to `typedIdent` as attachment, we don't have the position here to report the warning - inherited.updateAttachment( - LookupAmbiguityWarning( - s"""reference to ${outer1.name} is ambiguous; - |$message - |Or use `-Wconf:msg=legacy-binding:s` to silence this warning.""".stripMargin)) - None - } - } private lazy val startContext = NoContext.make( Template(List(), noSelfType, List()) setSymbol global.NoSymbol setType global.NoType, rootMirror.RootClass, rootMirror.RootClass.info.decls ) - private lazy val allUsedSelectors = - mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set() - private lazy val allImportInfos = - mutable.Map[CompilationUnit, List[(ImportInfo, Symbol)]]() withDefaultValue Nil - - def warnUnusedImports(unit: CompilationUnit) = if (!unit.isJava) { - for (imps <- allImportInfos.remove(unit)) { - for ((imp, owner) <- imps.distinct.reverse) { - val used = allUsedSelectors(imp) - for (sel <- imp.tree.selectors if !isMaskImport(sel) && !used(sel)) - runReporting.warning(imp.posOf(sel), "Unused import", WarningCategory.UnusedImports, site = owner) - } - allUsedSelectors --= imps.iterator.map(_._1) - } - } + var lastAccessCheckDetails: String = "" - def isMaskImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename == nme.WILDCARD - def isIndividualImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename != nme.WILDCARD - def isWildcardImport(s: ImportSelector): Boolean = s.name == nme.WILDCARD + val rootImportsCached = perRunCaches.newMap[CompilationUnit, List[Symbol]]() - var lastAccessCheckDetails: String = "" + val excludedRootImportsCached = perRunCaches.newMap[CompilationUnit, List[Symbol]]() + + // register an import for the narrow purpose of excluding root imports of predef modules + def registerImport(ctx: Context, imp: Import): Unit = { + val sym = imp.expr.symbol + if (sym != null && !sym.hasPackageFlag && ctx.enclosingNonImportContext.owner.hasPackageFlag && rootImports(ctx.unit).contains(sym)) { + var current = excludedRootImportsCached.get(ctx.unit).getOrElse(Nil) + current = sym :: current + excludedRootImportsCached += ctx.unit -> current + } + } - /** List of symbols to import from in a root context. Typically that - * is `java.lang`, `scala`, and [[scala.Predef]], in that order. Exceptions: + /** List of symbols to import from in a root context. By default, that + * is `java.lang`, `scala`, and [[scala.Predef]], in that order. * - * - if option `-Yno-imports` is given, nothing is imported - * - if the unit is java defined, only `java.lang` is imported - * - if option `-Yno-predef` is given, if the unit body has an import of Predef + * - if option `-Yimports` is supplied, then that specifies the preamble imports + * - if the unit body has an import of Predef * among its leading imports, or if the tree is [[scala.Predef]], `Predef` is not imported. + * Similarly for any module among the preamble imports. + * - if the unit is java defined, only `java.lang` is imported + * + * The root imports for a unit are cached. */ protected def rootImports(unit: CompilationUnit): List[Symbol] = { assert(definitions.isDefinitionsInitialized, "definitions uninitialized") - if (settings.noimports) Nil - else if (unit.isJava) RootImports.javaList - else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) { - // scala/bug#8258 Needed for the presentation compiler using -sourcepath, otherwise cycles can occur. See the commit - // message for this ticket for an example. - debuglog("Omitted import of Predef._ for " + unit) - RootImports.javaAndScalaList + if (unit.isJava) RootImports.javaList + else rootImportsCached.get(unit).getOrElse { + val calculated = defaultRootImports + rootImportsCached += unit -> calculated + calculated } - else RootImports.completeList } + private def defaultRootImports: List[Symbol] = + if (settings.imports.isSetByUser) + settings.imports.value.map { + case "java.lang" => JavaLangPackage + case "scala" => ScalaPackage + case "scala.Predef" => PredefModule + case name => + import rootMirror.{getModuleIfDefined, getPackageObjectIfDefined, getPackageIfDefined} + getModuleIfDefined(name) orElse + getPackageObjectIfDefined(name) orElse + getPackageIfDefined(name) orElse { + // force package objects in prefixes + def force(pkg: String, next: String): String = { + val full = if (pkg.isEmpty) next else s"$pkg.$next" + val sym = getPackageIfDefined(full) + if (sym != NoSymbol) openPackageModule(sym, force = true) + full + } + name.split('.').toList.init.foldLeft("")(force) + getModuleIfDefined(name) + } orElse NoSymbol.tap(_ => globalError(s"bad preamble import $name")) + } + else RootImports.completeList def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, throwing: Boolean = false, checking: Boolean = false): Context = { - val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym))) + val rootImportsContext = rootImports(unit).foldLeft(startContext) { (c, sym) => + val imp = + if ((sym eq PredefModule) && currentRun.sourceFeatures.any2StringAdd) + gen.mkImportFromSelector(sym, ImportSelector.mask(nme.any2stringadd) :: ImportSelector.wildList) + else + gen.mkWildcardImport(sym) + c.make(tree = imp, unit = unit) + } // there must be a scala.xml package when xml literals were parsed in this unit if (unit.hasXml && ScalaXmlPackage == NoSymbol) @@ -144,16 +139,13 @@ trait Contexts { self: Analyzer => if (!unit.hasXml || ScalaXmlTopScope == NoSymbol) rootImportsContext else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope)) - val c = contextWithXML.make(tree, unit = unit) - - c.initRootContext(throwing, checking) - c + contextWithXML.make(tree, unit = unit).tap(_.initRootContext(throwing, checking)) } def rootContextPostTyper(unit: CompilationUnit, tree: Tree = EmptyTree): Context = rootContext(unit, tree, throwing = true) - def resetContexts() { + def resetContexts(): Unit = { startContext.enclosingContextChain foreach { context => context.tree match { case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol) @@ -202,9 +194,9 @@ trait Contexts { self: Analyzer => * applications with and without an expected type, or when `Typer#tryTypedApply` tries to fit arguments to * a function type with/without implicit views. * - * When the error policies entail error/warning buffering, the mutable [[ReportBuffer]] records + * When the error policies entail error/warning buffering, the mutable [[ContextReporter]] records * everything that is issued. It is important to note, that child Contexts created with `make` - * "inherit" the very same `ReportBuffer` instance, whereas children spawned through `makeSilent` + * "inherit" the very same `ContextReporter` instance, whereas children spawned through `makeSilent` * receive a separate, fresh buffer. * * @param tree Tree associated with this context @@ -231,13 +223,13 @@ trait Contexts { self: Analyzer => var contextMode: ContextMode = ContextMode.DefaultMode /** Update all modes in `mask` to `value` */ - def update(mask: ContextMode, value: Boolean) { + def update(mask: ContextMode, value: Boolean): Unit = { contextMode = contextMode.set(value, mask) } /** Set all modes in the mask `enable` to true, and all in `disable` to false. */ def set(enable: ContextMode = NOmode, disable: ContextMode = NOmode): this.type = { - contextMode = contextMode.set(true, enable).set(false, disable) + contextMode = (contextMode | enable) &~ disable this } @@ -252,10 +244,7 @@ trait Contexts { self: Analyzer => */ var enclMethod: Context = _ - /** Variance relative to enclosing class */ - var variance: Variance = Variance.Invariant - - private var _undetparams: List[Symbol] = List() + private[this] var _undetparams: List[Symbol] = List() protected def outerDepth = if (outerIsNoContext) 0 else outer.depth @@ -267,12 +256,162 @@ trait Contexts { self: Analyzer => /** A root import is never unused and always bumps context depth. (e.g scala._ / Predef._ and magic REPL imports) */ def isRootImport: Boolean = false + var pendingStabilizers: List[Tree] = Nil + /** Types for which implicit arguments are currently searched */ var openImplicits: List[OpenImplicit] = List() final def isSearchingForImplicitParam: Boolean = { openImplicits.nonEmpty && openImplicits.exists(x => !x.isView) } + private type ImplicitDict = List[(Type, (Symbol, Tree))] + private var implicitDictionary: ImplicitDict = null + + @tailrec final def implicitRootContext: Context = { + if(implicitDictionary != null) this + else if(outerIsNoContext || outer.openImplicits.isEmpty) { + implicitDictionary = Nil + this + } else outer.implicitRootContext + } + + private def linkImpl(tpe: Type): Tree = { + val sym = + implicitDictionary.find(_._1 =:= tpe) match { + case Some((_, (sym, _))) => sym + case None => + val fresh = freshNameCreatorFor(this) + val vname = newTermName(fresh.newName("rec$")) + val vsym = owner.newValue(vname, newFlags = FINAL | SYNTHETIC) setInfo tpe + implicitDictionary +:= ((tpe, (vsym, EmptyTree))) + vsym + } + gen.mkAttributedRef(sym) setType tpe + } + + final def linkByNameImplicit(tpe: Type): Tree = implicitRootContext.linkImpl(tpe) + + private def refImpl(tpe: Type): Tree = + implicitDictionary.find(_._1 =:= tpe) match { + case Some((_, (sym, _))) => + gen.mkAttributedRef(sym) setType tpe + case None => + EmptyTree + } + + final def refByNameImplicit(tpe: Type): Tree = implicitRootContext.refImpl(tpe) + + private def defineImpl(tpe: Type, result: SearchResult): SearchResult = { + @tailrec + def patch(d: ImplicitDict, acc: ImplicitDict): (ImplicitDict, SearchResult) = d match { + case Nil => (implicitDictionary, result) + case (tp, (sym, EmptyTree)) :: tl if tp =:= tpe => + val ref = gen.mkAttributedRef(sym) setType tpe + val res = new SearchResult(ref, result.subst, result.undetparams) + (acc reverse_::: ((tpe, (sym, result.tree)) :: tl), res) + case hd :: tl => + patch(tl, hd :: acc) + } + + val (d, res) = patch(implicitDictionary, Nil) + implicitDictionary = d + res + } + + def defineByNameImplicit(tpe: Type, result: SearchResult): SearchResult = implicitRootContext.defineImpl(tpe, result) + + def emitImplicitDictionary(result: SearchResult): SearchResult = + if(implicitDictionary == null || implicitDictionary.isEmpty || result.tree == EmptyTree) result + else { + val typer = newTyper(this) + + @tailrec + def prune(trees: List[Tree], pending: List[(Symbol, Tree)], acc: List[(Symbol, Tree)]): List[(Symbol, Tree)] = pending match { + case Nil => acc + case ps => + val (in, out) = ps.partition { case (vsym, _) => trees.exists(_.exists(_.symbol == vsym)) } + if (in.isEmpty) acc + else prune(in.map(_._2) ++ trees, out, in ++ acc) + } + + val pruned = prune(List(result.tree), implicitDictionary.map(_._2), Nil) + if (pruned.isEmpty) result + else if (pruned.exists(_._2 == EmptyTree)) SearchFailure + else { + val pos = result.tree.pos + val (dictClassSym, dictClass0) = { + val cname = newTypeName(typer.fresh.newName("LazyDefns$")) + val parents = addSerializable(definitions.AnyRefTpe) + val csym = owner.newClass(cname, pos, FINAL | SYNTHETIC) + csym.setInfo(ClassInfoType(parents, newScope, csym)) + + val vdefs = pruned.map { case (vsym, rhs) => + changeNonLocalOwners(rhs, vsym) + // We want the normal mechanism for generating accessors during + // typechecking to be applied, so we don't create symbols for + // these ValDefs ourselves. + atPos(pos)(ValDef(Modifiers(FINAL | SYNTHETIC), vsym.name.toTermName, TypeTree(rhs.tpe), rhs)) + } + + val cdef = { + val cdef0 = ClassDef(csym, NoMods, ListOfNil, vdefs, pos) + typer.namer.enterSym(cdef0) + typer.typed(cdef0) + } + + (csym, cdef) + } + + val dictTpe = dictClassSym.tpe_* + + val preSyms = pruned.map(_._1) + val postSyms = preSyms.map(vsym => dictTpe.decl(vsym.name)) + + val symMap = (preSyms zip postSyms).toMap + + val dictClass = { + class DictionarySubstituter extends TreeSymSubstituter(preSyms, postSyms) { + override def transform(tree: Tree): Tree = { + if (tree.hasExistingSymbol) { + val sym = tree.symbol + symMap.get(sym.owner).foreach(sym.owner = _) + } + super.transform(tree) + } + } + (new DictionarySubstituter)(dictClass0) + } + + val dictSym = { + val vname = newTermName(typer.fresh.newName("lazyDefns$")) + owner.newValue(vname, pos, FINAL | SYNTHETIC).setInfo(dictTpe) + } + + val dict = { + val rhs = atPos(pos)(Apply(Select(New(Ident(dictClassSym)), nme.CONSTRUCTOR), List())) + val vdef0 = ValDef(dictSym, rhs) + typer.namer.enterSym(vdef0) + typer.typed(vdef0) + } + + val resultTree = { + class ReferenceSubstituter extends TreeSymSubstituter(preSyms, postSyms) { + override def transform(tree: Tree): Tree = tree match { + case i: Ident if symMap.contains(i.symbol) => + super.transform(atPos(i.pos)(treeCopy.Select(i, gen.mkAttributedRef(dictSym), i.name))) + + case _ => + super.transform(tree) + } + } + (new ReferenceSubstituter)(result.tree) + } + + val resultBlock = atPos(pos.focus)(Block(dictClass, dict, resultTree).setType(resultTree.tpe)) + new SearchResult(resultBlock, result.subst, result.undetparams) + } + } + var prefix: Type = NoPrefix def inSuperInit_=(value: Boolean) = this(SuperInit) = value @@ -299,18 +438,16 @@ trait Contexts { self: Analyzer => def inSecondTry_=(value: Boolean) = this(SecondTry) = value def inReturnExpr = this(ReturnExpr) def inTypeConstructorAllowed = this(TypeConstructorAllowed) + def inAnnotation = this(TypingAnnotation) def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode - /** To enrich error messages involving default arguments. - When extending the notion, group diagnostics in an object. */ - var diagUsedDefaults: Boolean = false - /** Saved type bounds for type parameters which are narrowed in a GADT. */ var savedTypeBounds: List[(Symbol, Type)] = List() /** The next enclosing context (potentially `this`) that is owned by a class or method */ - def enclClassOrMethod: Context = + @tailrec + final def enclClassOrMethod: Context = if (!owner.exists || owner.isClass || owner.isMethod) this else outer.enclClassOrMethod @@ -398,11 +535,11 @@ trait Contexts { self: Analyzer => inSilentMode { try { set(disable = ImplicitsEnabled | EnrichmentEnabled) // restored by inSilentMode - tryOnce(false) + tryOnce(isLastTry = false) reporter.hasErrors } catch { - case ex: CyclicReference => throw ex - case ex: TypeError => true // recoverable cyclic references? + case e: CyclicReference => throw e + case _: TypeError => true // recoverable cyclic references? } } } else true @@ -410,7 +547,7 @@ trait Contexts { self: Analyzer => // do last try if try with implicits enabled failed // (or if it was not attempted because they were disabled) if (doLastTry) - tryOnce(true) + tryOnce(isLastTry = true) } } @@ -436,6 +573,7 @@ trait Contexts { self: Analyzer => @inline final def withinSuperInit[T](op: => T): T = withMode(enabled = SuperInit)(op) @inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op) @inline final def withinPatAlternative[T](op: => T): T = withMode(enabled = PatternAlternative)(op) + @inline final def withinAnnotation[T](op: => T): T = withMode(enabled = TypingAnnotation)(op) @inline final def withSuppressDeadArgWarning[T](suppress: Boolean)(op: => T): T = if (suppress) withMode(enabled = SuppressDeadArgWarning)(op) else withMode(disabled = SuppressDeadArgWarning)(op) @@ -462,7 +600,7 @@ trait Contexts { self: Analyzer => val savedContextMode = contextMode val savedReporter = reporter - setAmbiguousErrors(false) + setAmbiguousErrors(report = false) _reporter = new BufferingReporter try expr @@ -519,8 +657,6 @@ trait Contexts { self: Analyzer => new Context(tree, owner, scope, unit, this, innerDepth(isRootImport = false), reporter) // Fields that are directly propagated - c.variance = variance - c.diagUsedDefaults = diagUsedDefaults c.openImplicits = openImplicits c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below. @@ -536,10 +672,16 @@ trait Contexts { self: Analyzer => c(TypeConstructorAllowed) = false registerContext(c.asInstanceOf[analyzer.Context]) - debuglog("[context] ++ " + c.unit + " / " + (if (tree == null) "" else tree.summaryString)) + debuglog(s"[context] ++ ${c.unit} / ${if (tree == null) "" else tree.summaryString}") c } + def makeImportContext(tree: Import): Context = + make(tree).tap { ctx => + if (settings.warnUnusedImport && openMacros.isEmpty && !ctx.isRootImport && !ctx.outer.owner.isInterpreterWrapper) + recordImportContext(ctx) + } + /** Use reporter (possibly buffered) for errors/warnings and enable implicit conversion **/ def initRootContext(throwing: Boolean = false, checking: Boolean = false): Unit = { _reporter = @@ -571,7 +713,7 @@ trait Contexts { self: Analyzer => def makeNonSilent(newtree: Tree): Context = { val c = make(newtree, reporter = reporter.makeImmediate) - c.setAmbiguousErrors(true) + c.setAmbiguousErrors(report = true) c } @@ -579,6 +721,7 @@ trait Contexts { self: Analyzer => def makeImplicit(reportAmbiguousErrors: Boolean) = { val c = makeSilent(reportAmbiguousErrors) c(ImplicitsEnabled | EnrichmentEnabled) = false + c(InImplicitSearch) = true c } @@ -599,8 +742,8 @@ trait Contexts { self: Analyzer => val argContext = baseContext.makeNewScope(tree, owner, reporter = this.reporter) argContext.contextMode = contextMode argContext.inSelfSuperCall = true - def enterElems(c: Context) { - def enterLocalElems(e: ScopeEntry) { + def enterElems(c: Context): Unit = { + def enterLocalElems(e: ScopeEntry): Unit = { if (e != null && e.owner == c.scope) { enterLocalElems(e.next) argContext.scope enter e.sym @@ -622,15 +765,25 @@ trait Contexts { self: Analyzer => // /** Issue/buffer/throw the given type error according to the current mode for error reporting. */ - private[typechecker] def issue(err: AbsTypeError) = reporter.issue(err)(this) + private[typechecker] def issue(err: AbsTypeError) = reporter.issue(err)(this) + /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */ private[typechecker] def issueAmbiguousError(err: AbsAmbiguousTypeError) = reporter.issueAmbiguousError(err)(this) + /** Issue/throw the given error message according to the current mode for error reporting. */ - def error(pos: Position, msg: String) = reporter.error(fixPosition(pos), msg) + def error(pos: Position, msg: String, actions: List[CodeAction] = Nil) = + reporter.errorAndDumpIfDebug(fixPosition(pos), msg, actions) + /** Issue/throw the given error message according to the current mode for error reporting. */ - def warning(pos: Position, msg: String, category: WarningCategory) = reporter.warning(fixPosition(pos), msg, category, owner) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol) = reporter.warning(fixPosition(pos), msg, category, site) - def echo(pos: Position, msg: String) = reporter.echo(fixPosition(pos), msg) + def warning(pos: Position, msg: String, category: WarningCategory, actions: List[CodeAction] = Nil): Unit = + reporter.warning(fixPosition(pos), msg, category, owner, actions) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, actions: List[CodeAction]): Unit = + reporter.warning(fixPosition(pos), msg, category, site, actions) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = + warning(pos, msg, category, site, Nil) + + def echo(pos: Position, msg: String) = reporter.echo(fixPosition(pos), msg) + def fixPosition(pos: Position): Position = pos match { case NoPosition => nextEnclosing(_.tree.pos != NoPosition).tree.pos case _ => pos @@ -638,9 +791,8 @@ trait Contexts { self: Analyzer => // TODO: buffer deprecations under silent (route through ContextReporter, store in BufferingReporter) - def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = - runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since) - + def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String, actions: List[CodeAction] = Nil): Unit = + runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since, actions) def deprecationWarning(pos: Position, sym: Symbol): Unit = runReporting.deprecationWarning(fixPosition(pos), sym, owner) @@ -648,27 +800,9 @@ trait Contexts { self: Analyzer => runReporting.featureWarning(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) - // nextOuter determines which context is searched next for implicits - // (after `this`, which contributes `newImplicits` below.) In - // most cases, it is simply the outer context: if we're owned by - // a constructor, the actual current context and the conceptual - // context are different when it comes to scoping. The current - // conceptual scope is the context enclosing the blocks which - // represent the constructor body (TODO: why is there more than one - // such block in the outer chain?) - private def nextOuter = { - // Drop the constructor body blocks, which come in varying numbers. - // -- If the first statement is in the constructor, scopingCtx == (constructor definition) - // -- Otherwise, scopingCtx == (the class which contains the constructor) - val scopingCtx = - if (owner.isConstructor) nextEnclosing(c => !c.tree.isInstanceOf[Block]) - else this - - scopingCtx.outer - } - - def nextEnclosing(p: Context => Boolean): Context = - if (p(this)) this else outer.nextEnclosing(p) + @tailrec + final def nextEnclosing(p: Context => Boolean): Context = + if (this eq NoContext) this else if (p(this)) this else outer.nextEnclosing(p) final def outermostContextAtCurrentPos: Context = { var pos = tree.pos @@ -690,9 +824,9 @@ trait Contexts { self: Analyzer => case x: Import => "" + x case Template(parents, `noSelfType`, body) => val pstr = if ((parents eq null) || parents.isEmpty) "Nil" else parents mkString " " - val bstr = if (body eq null) "" else body.length + " stats" + val bstr = if (body eq null) "" else "" + body.length + " stats" s"""Template($pstr, _, $bstr)""" - case x => s"${tree.shortClass}${treeIdString}:${treeTruncated}" + case _ => s"${tree.shortClass}${treeIdString}:${treeTruncated}" } override def toString = @@ -832,7 +966,7 @@ trait Contexts { self: Analyzer => // Type bound management // - def pushTypeBounds(sym: Symbol) { + def pushTypeBounds(sym: Symbol): Unit = { sym.info match { case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb") case info => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}") @@ -875,7 +1009,8 @@ trait Contexts { self: Analyzer => private var implicitsCache: List[ImplicitInfo] = null private var implicitsRunId = NoRunId - def resetCache() { + @tailrec + final def resetCache(): Unit = { implicitsRunId = NoRunId implicitsCache = null if (outer != null && outer != this) outer.resetCache() @@ -890,42 +1025,48 @@ trait Contexts { self: Analyzer => isAccessible(sym, pre) && !(imported && { val e = scope.lookupEntry(name) - (e ne null) && (e.owner == scope) && (!currentRun.isScala212 || e.sym.exists) + (e ne null) && (e.owner == scope) && e.sym.exists }) /** Do something with the symbols with name `name` imported via the import in `imp`, * if any such symbol is accessible from this context and is a qualifying implicit. */ - private def withQualifyingImplicitAlternatives(imp: ImportInfo, name: Name, pre: Type)(f: Symbol => Unit) = for { - sym <- importedAccessibleSymbol(imp, name, requireExplicit = false, record = false).alternatives - if isQualifyingImplicit(name, sym, pre, imported = true) - } f(sym) + private def withQualifyingImplicitAlternatives(imp: ImportInfo, name: Name, pre: Type)(f: Symbol => Unit) = { + val imported = importedAccessibleSymbol(imp, imp.importedSymbol(name)) + if (imported.isOverloaded) { + for (sym <- imported.alternatives) + if (isQualifyingImplicit(name, sym, pre, imported = true)) + f(sym) + } + else if (isQualifyingImplicit(name, imported, pre, imported = true)) + f(imported) + } - private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] = - for (sym <- syms.toList if isQualifyingImplicit(sym.name, sym, pre, imported)) yield - new ImplicitInfo(sym.name, pre, sym) + private def collectImplicits(syms: Scope, pre: Type): List[ImplicitInfo] = + for (sym <- syms.toList if isQualifyingImplicit(sym.name, sym, pre, imported = false)) + yield new ImplicitInfo(sym.name, pre, sym, inPackagePrefix = false) - private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = { + private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = if (isExcludedRootImport(imp)) List() else { val qual = imp.qual val pre = qual.tpe def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match { case List() => List() - case List(ImportSelector(nme.WILDCARD, _, _, _)) => + case sel :: _ if sel.isWildcard || sel.isGiven => // Using pre.implicitMembers seems to exposes a problem with out-dated symbols in the IDE, // see the example in https://www.assembla.com/spaces/scala-ide/tickets/1002552#/activity/ticket // I haven't been able to boil that down the an automated test yet. // Looking up implicit members in the package, rather than package object, here is at least // consistent with what is done just below for named imports. - collectImplicits(qual.tpe.implicitMembers, pre, imported = true) - case ImportSelector(from, _, to, _) :: sels1 => - var impls = collect(sels1) filter (info => info.name != from) - if (to != nme.WILDCARD) { + for (sym <- qual.tpe.implicitMembers.toList if isQualifyingImplicit(sym.name, sym, pre, imported = true)) + yield new ImplicitInfo(sym.name, pre, sym, importInfo = imp, importSelector = sel) + case (sel @ ImportSelector(from, _, to, _)) :: sels1 => + var impls = collect(sels1).filter(_.name != from) + if (!sel.isMask) withQualifyingImplicitAlternatives(imp, to, pre) { sym => - impls = new ImplicitInfo(to, pre, sym) :: impls + impls = new ImplicitInfo(to, pre, sym, importInfo = imp, importSelector = sel) :: impls } - } impls } //debuglog("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//DEBUG @@ -938,38 +1079,47 @@ trait Contexts { self: Analyzer => * `implicitss` will return implicit conversions defined inside the class. These are * filtered out later by `eligibleInfos` (scala/bug#4270 / 9129cfe9), as they don't type-check. */ - def implicitss: List[List[ImplicitInfo]] = { - val nextOuter = this.nextOuter - def withOuter(is: List[ImplicitInfo]): List[List[ImplicitInfo]] = - is match { - case Nil => nextOuter.implicitss - case _ => is :: nextOuter.implicitss + final def implicitss: List[List[ImplicitInfo]] = implicitssImpl(NoSymbol) + + private def implicitssImpl(skipClass: Symbol): List[List[ImplicitInfo]] = { + if (this == NoContext) Nil + else if (owner == skipClass) outer.implicitssImpl(NoSymbol) + else { + def withOuter(is: List[ImplicitInfo]): List[List[ImplicitInfo]] = { + // In a constructor super call, the members of the constructed class are not in scope. We + // need to skip over the context of that class when searching for implicits. See PR #8441. + val nextSkipClass = if (owner.isPrimaryConstructor && inSelfSuperCall) owner.owner else skipClass + is match { + case Nil => outer.implicitssImpl(nextSkipClass) + case _ => is :: outer.implicitssImpl(nextSkipClass) + } } - val CycleMarker = NoRunId - 1 - if (implicitsRunId == CycleMarker) { - debuglog(s"cycle while collecting implicits at owner ${owner}, probably due to an implicit without an explicit return type. Continuing with implicits from enclosing contexts.") - withOuter(Nil) - } else if (implicitsRunId != currentRunId) { - implicitsRunId = CycleMarker - implicits(nextOuter) match { - case None => - implicitsRunId = NoRunId - withOuter(Nil) - case Some(is) => - implicitsRunId = currentRunId - implicitsCache = is - withOuter(is) + val CycleMarker = NoRunId - 1 + if (implicitsRunId == CycleMarker) { + debuglog(s"cycle while collecting implicits at owner ${owner}, probably due to an implicit without an explicit return type. Continuing with implicits from enclosing contexts.") + withOuter(Nil) + } else if (implicitsRunId != currentRunId) { + implicitsRunId = CycleMarker + implicits match { + case None => + implicitsRunId = NoRunId + withOuter(Nil) + case Some(is) => + implicitsRunId = currentRunId + implicitsCache = is + withOuter(is) + } } + else withOuter(implicitsCache) } - else withOuter(implicitsCache) } /** @return None if a cycle is detected, or Some(infos) containing the in-scope implicits at this context */ - private def implicits(nextOuter: Context): Option[List[ImplicitInfo]] = { + private def implicits: Option[List[ImplicitInfo]] = { val firstImport = this.firstImport if (unit.isJava) SomeOfNil - else if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) { + else if (owner != outer.owner && owner.isClass && !owner.isPackageClass) { if (!owner.isInitialized) None else savingEnclClass(this) { // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List( // remedied nonetheless. Some(collectImplicits(owner.thisType.implicitMembers, owner.thisType)) } - } else if (scope != nextOuter.scope && !owner.isPackageClass) { + } else if (scope != outer.scope && !owner.isPackageClass) { debuglog("collect local implicits " + scope.toList)//DEBUG Some(collectImplicits(scope, NoPrefix)) - } else if (firstImport != nextOuter.firstImport) { + } else if (firstImport != outer.firstImport) { if (isDeveloper) - assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports)) + assert(imports.tail.headOption == outer.firstImport, (imports, outer.imports)) Some(collectImplicitImports(firstImport.get)) } else if (owner.isPackageClass) { // the corresponding package object may contain implicit members. @@ -1005,8 +1155,8 @@ trait Contexts { self: Analyzer => val imp1Explicit = imp1 isExplicitImport name val imp2Explicit = imp2 isExplicitImport name val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit - val imp1Symbol = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false)) - val imp2Symbol = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false)) + val imp1Symbol = imp1.importedSymbol(name).initialize.filter(isAccessible(_, imp1.qual.tpe, superAccess = false)) + val imp2Symbol = imp2.importedSymbol(name).initialize.filter(isAccessible(_, imp2.qual.tpe, superAccess = false)) // The types of the qualifiers from which the ambiguous imports come. // If the ambiguous name is a value, these must be the same. @@ -1017,12 +1167,6 @@ trait Contexts { self: Analyzer => def mt1 = t1 memberType imp1Symbol def mt2 = t2 memberType imp2Symbol - def characterize = List( - s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}", - s"member type 1: $mt1", - s"member type 2: $mt2" - ).mkString("\n ") - if (!ambiguous || !imp2Symbol.exists) Some(imp1) else if (!imp1Symbol.exists) Some(imp2) else ( @@ -1042,17 +1186,31 @@ trait Contexts { self: Analyzer => Some(imp1) } else { - log(s"Import is genuinely ambiguous:\n " + characterize) + log(s"""Import is genuinely ambiguous: + | types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2} + | member type 1: $mt1 + | member type 2: $mt2""".stripMargin) None } ) } - /** The symbol with name `name` imported via the import in `imp`, - * if any such symbol is accessible from this context. + def isPackageOwnedInDifferentUnit(s: Symbol): Boolean = + if (s.isOverloaded) s.alternatives.exists(isPackageOwnedInDifferentUnit) + else (s.isDefinedInPackage && ( + !currentRun.compiles(s) + || unit.exists && s.sourceFile != unit.source.file) + ) + + /** If the given import is permitted, fetch the symbol and filter for accessibility. + * Tests `exists` to complete SymbolLoaders, which sets the symbol's access flags (scala/bug#12736) */ - private[Contexts] def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean, record: Boolean): Symbol = - imp.importedSymbol(name, requireExplicit, record) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) + private[Contexts] def importedAccessibleSymbol(imp: ImportInfo, sym: => Symbol): Symbol = + if (isExcludedRootImport(imp)) NoSymbol + else sym.filter(s => s.exists && isAccessible(s, imp.qual.tpe, superAccess = false)) + + private def isExcludedRootImport(imp: ImportInfo): Boolean = + imp.isRootImport && excludedRootImportsCached.get(unit).exists(_.contains(imp.qual.symbol)) private[Contexts] def requiresQualifier(s: Symbol): Boolean = ( s.owner.isClass @@ -1106,11 +1264,23 @@ trait Contexts { self: Analyzer => } final def javaFindMember(pre: Type, name: Name, qualifies: Symbol => Boolean): (Type, Symbol) = { - val sym = pre.member(name).filter(qualifies) val preSym = pre.typeSymbol + val sym = { + def asModule = + if (name.isTypeName && nme.isModuleName(name)) + pre.member(name.dropModule.toTermName) match { + case nope @ NoSymbol => nope + case member => member.filter(qualifies).moduleClass + } + else NoSymbol + pre.member(name) match { + case NoSymbol => asModule + case member => member.filter(qualifies) + } + } if (sym.exists || preSym.isPackageClass || !preSym.isClass) (pre, sym) else { - // In Java code, static innner classes, which we model as members of the companion object, + // In Java code, static inner classes, which we model as members of the companion object, // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. val toSearch = if (preSym.isModuleClass) companionSymbolOf(pre.typeSymbol.sourceModule, this).baseClasses else preSym.baseClasses toSearch.iterator.map { bc => @@ -1118,7 +1288,8 @@ trait Contexts { self: Analyzer => val found = pre1.decl(name) found.filter(qualifies) match { case NoSymbol => - val pre2 = companionSymbolOf(pre1.typeSymbol, this).typeOfThis + val companionModule = companionSymbolOf(pre1.typeSymbol, this) + val pre2 = companionModule.typeOfThis val found = pre2.decl(name).filter(qualifies) found match { case NoSymbol => NoJavaMemberFound @@ -1130,15 +1301,14 @@ trait Contexts { self: Analyzer => } } - - private def isReplImportWrapperImport(tree: Tree): Boolean = { + // detect magic REPL imports (used to manage visibility) + private def isReplImportWrapperImport(tree: Tree): Boolean = tree match { case Import(expr, selector :: Nil) => // Just a syntactic check to avoid forcing typechecking of imports selector.name.string_==(nme.INTERPRETER_IMPORT_LEVEL_UP) && owner.enclosingTopLevelClass.isInterpreterWrapper case _ => false } - } } //class Context @@ -1155,6 +1325,35 @@ trait Contexts { self: Analyzer => private[this] var pre: Type = _ // the prefix type of defSym, if a class member private[this] var cx: Context = _ // the context under consideration private[this] var symbolDepth: Int = _ // the depth of the directly found symbol + private[this] var foundInPrefix: Boolean = _ // the symbol was found in pre + private[this] var foundInSuper: Boolean = _ // the symbol was found super of context class (inherited) + + def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = + LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") + def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) = + LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp") + def ambiguousDefinitions(outer: Symbol, inherited: Symbol, foundInSuper: Boolean, classOfInherited: Symbol, currentClass: Symbol) = + if (foundInSuper) { + if (inherited.isImplicit) None + else { + val outer1 = outer.alternatives.head + val inherited1 = inherited.alternatives.head + val classDesc = if (classOfInherited.isAnonymousClass) "anonymous class" else classOfInherited.toString + val parent = classOfInherited.parentSymbols.find(_.isNonBottomSubClass(inherited1.owner)).getOrElse(NoSymbol) + val inherit = if (parent.exists && parent != inherited1.owner) s", inherited through parent $parent" else "" + val fix = if (classOfInherited != currentClass) s"${classOfInherited.name}.this." else "this." + val message = + sm"""|it is both defined in the enclosing ${outer1.owner} and inherited in the enclosing $classDesc as $inherited1 (defined in ${inherited1.ownsString}$inherit) + |In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. + |Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `${fix}${outer1.name}`.""" + inherited.updateAttachment(LookupAmbiguityWarning( + sm"""|reference to ${outer1.name} is ambiguous; + |$message + |Or use `-Wconf:msg=legacy-binding:s` to silence this warning.""", fix)) + } + None + } else + Some(LookupAmbiguous(s"it is both defined in ${outer.owner} and available as ${inherited.fullLocationString}")) def apply(thisContext: Context, name: Name)(qualifies: Symbol => Boolean): NameLookup = { lookupError = null @@ -1163,6 +1362,8 @@ trait Contexts { self: Analyzer => pre = NoPrefix cx = thisContext symbolDepth = -1 + foundInPrefix = false + foundInSuper = false def finish(qual: Tree, sym: Symbol): NameLookup = ( if (lookupError ne null) lookupError @@ -1172,20 +1373,15 @@ trait Contexts { self: Analyzer => case _ => LookupSucceeded(qual, sym) } ) - def finishDefSym(sym: Symbol, pre0: Type): NameLookup = - if (!thisContext.unit.isJava && thisContext.requiresQualifier(sym)) - finish(gen.mkAttributedQualifier(pre0), sym) - else - finish(EmptyTree, sym) + def finishDefSym(sym: Symbol, pre0: Type): NameLookup = { + val qual = + if (!thisContext.unit.isJava && thisContext.requiresQualifier(sym)) gen.mkAttributedQualifier(pre0) + else EmptyTree + finish(qual, sym) + } - def isPackageOwnedInDifferentUnit(s: Symbol) = ( - s.isDefinedInPackage && ( - !currentRun.compiles(s) - || thisContext.unit.exists && s.sourceFile != thisContext.unit.source.file - ) - ) - def lookupInPrefix(name: Name) = { - if (thisContext.unit.isJava) { + def lookupInPrefix(name: Name): Symbol = + if (thisContext.unit.isJava) thisContext.javaFindMember(pre, name, qualifies) match { case (_, NoSymbol) => NoSymbol @@ -1193,10 +1389,8 @@ trait Contexts { self: Analyzer => pre = pre1 sym } - } else { + else pre.member(name).filter(qualifies) - } - } def accessibleInPrefix(s: Symbol) = thisContext.isAccessible(s, pre, superAccess = false) @@ -1204,7 +1398,7 @@ trait Contexts { self: Analyzer => def searchPrefix = { cx = cx.enclClass val found0 = lookupInPrefix(name) - val found1 = found0 filter accessibleInPrefix + val found1 = found0.filter(accessibleInPrefix) if (found0.exists && !found1.exists && inaccessible == null) inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails) @@ -1250,128 +1444,177 @@ trait Contexts { self: Analyzer => return finishDefSym(constructorSym, cx.enclClass.prefix) } - var foundInSuper: Boolean = false - var outerDefSym: Symbol = NoSymbol - // cx.scope eq null arises during FixInvalidSyms in Duplicators - while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { - pre = cx.enclClass.prefix - defSym = lookupInScope(cx.owner, cx.enclClass.prefix, cx.scope) match { - case NoSymbol => - val prefixSym = searchPrefix - if (currentRun.isScala213 && prefixSym.exists && prefixSym.alternatives.forall(_.owner != cx.owner)) - foundInSuper = true - prefixSym - case found => - found + def nextDefinition(lastDef: Symbol, lastPre: Type): Unit = { + var inPrefix = false + defSym = NoSymbol + while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { + pre = cx.enclClass.prefix + defSym = lookupInScope(cx.owner, pre, cx.scope) match { + case NoSymbol => inPrefix = true; searchPrefix + case found => inPrefix = false; found + } + if (!defSym.exists) cx = cx.outer // push further outward } - if (!defSym.exists) - cx = cx.outer // push further outward + if ((defSym.isAliasType || lastDef.isAliasType) && pre.memberType(defSym) =:= lastPre.memberType(lastDef)) + defSym = NoSymbol + if (defSym.isStable && lastDef.isStable && + (lastPre.memberType(lastDef).termSymbol == defSym || pre.memberType(defSym).termSymbol == lastDef)) + defSym = NoSymbol + foundInPrefix = inPrefix && defSym.exists + foundInSuper = foundInPrefix && defSym.alternatives.forall(_.owner != cx.owner) } + nextDefinition(NoSymbol, NoPrefix) if (symbolDepth < 0) symbolDepth = cx.depth - def checkAmbiguousWithEnclosing(): Unit = if (foundInSuper && !thisContext.unit.isJava) { - val defPre = pre - val defCx = cx - val defDepth = symbolDepth - - while ((cx ne NoContext) && (cx.owner == defCx.owner || cx.depth >= symbolDepth)) cx = cx.outer - - while ((cx ne NoContext) && (cx.scope ne null)) { - pre = cx.enclClass.prefix - val next = lookupInScope(cx.owner, cx.enclClass.prefix, cx.scope).orElse(searchPrefix).filter(_.owner == cx.owner) - if (next.exists && thisContext.unit.exists && next.sourceFile == thisContext.unit.source.file) { - outerDefSym = next - cx = NoContext - } else - cx = cx.outer - } - if (outerDefSym.exists) { - if (outerDefSym == defSym) - outerDefSym = NoSymbol - else if ((defSym.isAliasType || outerDefSym.isAliasType) && defPre.memberType(defSym) =:= pre.memberType(outerDefSym)) - outerDefSym = NoSymbol - else if (defSym.isStable && outerDefSym.isStable && - (pre.memberType(outerDefSym).termSymbol == defSym || defPre.memberType(defSym).termSymbol == outerDefSym)) - outerDefSym = NoSymbol - } - - pre = defPre - cx = defCx - symbolDepth = defDepth - } - checkAmbiguousWithEnclosing() - + var impSel: ImportSelector = null var impSym: Symbol = NoSymbol val importCursor = new ImportCursor(thisContext, name) import importCursor.{imp1, imp2} - def lookupImport(imp: ImportInfo, requireExplicit: Boolean) = - thisContext.importedAccessibleSymbol(imp, name, requireExplicit, record = true) filter qualifies - - var importLookupFor213MigrationWarning = false - def depthOk213 = { - currentRun.isScala213 && !thisContext.unit.isJava && !cx(ContextMode.InPackageClauseName) && defSym.exists && isPackageOwnedInDifferentUnit(defSym) && { - importLookupFor213MigrationWarning = true - true + // The symbol resolved by the given import for `name`, paired with the selector that was used. + // If `requireExplicit`, then only "named" or "specific" selectors are considered. + // In addition, the symbol must be accessible (in the current context) and satisfy the `qualifies` predicate. + def lookupImport(imp: ImportInfo, requireExplicit: Boolean): (ImportSelector, Symbol) = { + val pair @ (sel, sym) = imp.importedSelectedSymbol(name, requireExplicit) + if (sym == NoSymbol) pair + else { + val sym1 = thisContext.importedAccessibleSymbol(imp, sym).filter(qualifies) + if (sym1 eq sym) pair + else (sel, sym1) } } - // Java: A single-type-import declaration d in a compilation unit c of package p - // that imports a type named n shadows, throughout c, the declarations of: - // - // 1) any top level type named n declared in another compilation unit of p - // - // A type-import-on-demand declaration never causes any other declaration to be shadowed. - // - // Scala: Bindings of different kinds have a precedence defined on them: - // - // 1) Definitions and declarations that are local, inherited, or made available by a - // package clause in the same compilation unit where the definition occurs have - // highest precedence. - // 2) Explicit imports have next highest precedence. - def depthOk(imp: ImportInfo) = { - imp.depth > symbolDepth || - (thisContext.unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth) || - depthOk213 - } - - while (!impSym.exists && importCursor.imp1Exists && depthOk(importCursor.imp1)) { - impSym = lookupImport(imp1, requireExplicit = false) - if (!impSym.exists) - importCursor.advanceImp1Imp2() - } + /* Java: A single-type-import declaration d in a compilation unit c of package p + * that imports a type named n shadows, throughout c, the declarations of: + * + * 1) any top level type named n declared in another compilation unit of p + * + * A type-import-on-demand declaration never causes any other declaration to be shadowed. + * + * Scala: Bindings of different kinds have a defined precedence order: + * + * 1) Definitions and declarations in lexical scope have the highest precedence. + * 1b) Definitions and declarations that are either inherited, or made + * available by a package clause and also defined in the same compilation unit + * as the reference to them, have the next highest precedence. + * 2) Explicit imports have next highest precedence. + * 3) Wildcard imports have next highest precedence. + * 4) Bindings made available by a package clause, + * but not also defined in the same compilation unit as the reference to them, + * as well as bindings supplied by the compiler but not explicitly written in source code, + * have the lowest precedence. + */ + + /* Level 4 (see above) */ + def foreignDefined = defSym.exists && thisContext.isPackageOwnedInDifferentUnit(defSym) // SI-2458 + + // Find the first candidate import + def advanceCursorToNextImport(): Unit = { + val defIsLevel4 = foreignDefined + // can the import at this depth compete with the definition? + // If not, we can stop inspecting outer scopes (including more imports). + // A competing import can either shadow the definition or render it ambiguous. + // + @inline def importCanShadowAtDepth(imp: ImportInfo) = { + @inline def importCompetesWithDefinition = + if (thisContext.unit.isJava) imp.depth == symbolDepth && defIsLevel4 + else defIsLevel4 + !cx(ContextMode.InPackageClauseName) && + (imp.depth > symbolDepth || importCompetesWithDefinition) + } - if (impSym.exists && importLookupFor213MigrationWarning) { - if (impSym != defSym && imp1.depth >= symbolDepth && !imp1.isExplicitImport(name)) { - val msg = - s"""This wildcard import imports ${impSym.fullName}, which is shadowed by ${defSym.fullName}. - |This is not according to the language specification and has changed in Scala 2.13, where ${impSym.fullName} takes precedence. - |To keep the same meaning in 2.12 and 2.13, un-import ${name} by adding `$name => _` to the import list.""".stripMargin - runReporting.warning(imp1.pos, msg, WarningCategory.Other, "") + while (!impSym.exists && importCursor.imp1Exists && importCanShadowAtDepth(importCursor.imp1)) { + val javaRule = thisContext.unit.isJava && defIsLevel4 + val (sel, sym) = lookupImport(imp1, requireExplicit = javaRule) + impSel = sel + impSym = sym + if (!impSym.exists) + importCursor.advanceImp1Imp2() } - impSym = NoSymbol } - - if (defSym.exists && impSym.exists) { - // imported symbols take precedence over package-owned symbols in different compilation units. - if (isPackageOwnedInDifferentUnit(defSym)) - defSym = NoSymbol + advanceCursorToNextImport() + + val preferDef: Boolean = defSym.exists && (!impSym.exists || { + // Does the import just import the defined symbol? + def reconcileAmbiguousImportAndDef: Boolean = { + val res = impSym == defSym + if (res) log(s"Suppressing ambiguous import, taking $defSym for $name") + res + } + // 4) root imported symbols have same (lowest) precedence as package-owned symbols in different compilation units. + if (imp1.depth < symbolDepth && imp1.isRootImport && foreignDefined) + true + // 4) imported symbols have higher precedence than package-owned symbols in different compilation units. + // except that in Java, the import must be "explicit" (level 2) + else if (thisContext.unit.isJava && imp1.depth == symbolDepth && foreignDefined) + !importCursor.imp1Explicit + else if (!thisContext.unit.isJava && imp1.depth >= symbolDepth && foreignDefined) + false // Defined symbols take precedence over erroneous imports. else if (impSym.isError || impSym.name == nme.CONSTRUCTOR) - impSym = NoSymbol + true + // Try to reconcile them before giving up + else if (reconcileAmbiguousImportAndDef) + true // Otherwise they are irreconcilably ambiguous else return ambiguousDefnAndImport(defSym.alternatives.head.owner, imp1) + }) + + // If the defSym is at 4, and there is a def at 1b in scope due to packaging, then the reference is ambiguous. + // Also if defSym is at 1b inherited, the reference can be rendered ambiguous by a def at 1a in scope. + val possiblyAmbiguousDefinition = + foundInSuper && cx.owner.isClass || + foreignDefined && !defSym.hasPackageFlag + if (possiblyAmbiguousDefinition && !thisContext.unit.isJava) { + val defSym0 = defSym + val pre0 = pre + val cx0 = cx + val depth0 = symbolDepth + val wasFoundInSuper = foundInSuper + val foundCompetingSymbol: () => Boolean = + if (foreignDefined) + // if the first found symbol (defSym0) is level 4 (foreignDefined), a lower level (1 or 1b) defSym is competing + () => defSym.exists && !foreignDefined + else { + // if defSym0 is level 1 or 1b, another defSym is competing if defined in an outer scope in the same file + () => defSym.exists && !(pre.typeSymbol.isPackageClass && !defSym.owner.isPackageClass) && !foundInSuper && !foreignDefined + // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + // defined in package object (or inherited into package object) + } + while ((cx ne NoContext) && cx.depth >= symbolDepth) cx = cx.outer + if (wasFoundInSuper) + while ((cx ne NoContext) && (cx.owner eq cx0.owner)) cx = cx.outer + var done = false + while (!done) { + nextDefinition(defSym0, pre0) + done = (cx eq NoContext) || foundCompetingSymbol() + if (!done && (cx ne NoContext)) cx = cx.outer + } + val nonOverlapping = defSym.exists && { + if (defSym.isOverloaded || defSym0.isOverloaded) !defSym.alternatives.exists(defSym0.alternatives.contains) + else defSym ne defSym0 + } + if (nonOverlapping) { + val ambiguity = + if (preferDef) ambiguousDefinitions(defSym, defSym0, wasFoundInSuper, cx0.enclClass.owner, thisContext.enclClass.owner) + else Some(ambiguousDefnAndImport(owner = defSym.owner, imp1)) + if (ambiguity.nonEmpty) return ambiguity.get + } + defSym = defSym0 + pre = pre0 + cx = cx0 + symbolDepth = depth0 } + if (preferDef) impSym = NoSymbol else defSym = NoSymbol + // At this point only one or the other of defSym and impSym might be set. - if (defSym.exists) { - val ambiguity = ambiguousWithEnclosing(outerDefSym, defSym, cx.enclClass.owner) - ambiguity.getOrElse(finishDefSym(defSym, pre)) - } else if (impSym.exists) { + if (defSym.exists) finishDefSym(defSym, pre) + else if (impSym.exists) { // If we find a competitor imp2 which imports the same name, possible outcomes are: // // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1 @@ -1384,15 +1627,11 @@ trait Contexts { self: Analyzer => // symbol (e.g. import foo.X followed by import foo._) then we discard imp2 // and proceed. If we cannot, issue an ambiguity error. while (lookupError == null && importCursor.keepLooking) { - // If not at the same depth, limit the lookup to explicit imports. - // This is desirable from a performance standpoint (compare to - // filtering after the fact) but also necessary to keep the unused - // import check from being misled by symbol lookups which are not - // actually used. - val other = lookupImport(imp2, requireExplicit = !importCursor.sameDepth) - - @inline def imp1wins() { importCursor.advanceImp2() } - @inline def imp2wins() { impSym = other; importCursor.advanceImp1Imp2() } + // If not at the same depth, only an explicit import can induce an ambiguity. + val (sel, other) = lookupImport(imp2, requireExplicit = !importCursor.sameDepth) + + @inline def imp1wins(): Unit = { importCursor.advanceImp2() } + @inline def imp2wins(): Unit = { impSel = sel ; impSym = other ; importCursor.advanceImp1Imp2() } if (!other.exists) // imp1 wins; drop imp2 and continue. imp1wins() else if (importCursor.imp2Wins) // imp2 wins; drop imp1 and continue. @@ -1402,24 +1641,30 @@ trait Contexts { self: Analyzer => case _ => lookupError = ambiguousImports(imp1, imp2) } } - // optimization: don't write out package prefixes - finish(duplicateAndResetPos.transform(imp1.qual), impSym) + + // the choice has been made + if (lookupError == null) { + // implicit searcher decides when import was used + if (thisContext.contextMode.inNone(InImplicitSearch)) + imp1.recordUsage(impSel, impSym) + + // optimization: don't write out package prefixes + finish(duplicateAndResetPos.transform(imp1.qual), impSym) + } + else finish(EmptyTree, NoSymbol) } else finish(EmptyTree, NoSymbol) } } /** A `Context` focussed on an `Import` tree */ - final class ImportContext(tree: Tree, owner: Symbol, scope: Scope, + final class ImportContext private[Contexts] ( + tree: Tree, owner: Symbol, scope: Scope, unit: CompilationUnit, outer: Context, override val isRootImport: Boolean, depth: Int, reporter: ContextReporter) extends Context(tree, owner, scope, unit, outer, depth, reporter) { - private[this] val impInfo: ImportInfo = { - val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth) - if (settings.warnUnusedImport && openMacros.isEmpty && !isRootImport) // excludes java.lang/scala/Predef imports - allImportInfos(unit) ::= (info, owner) - info - } + private[this] val impInfo: ImportInfo = new ImportInfo(tree.asInstanceOf[Import], outerDepth, isRootImport) + override final def imports = impInfo :: super.imports override final def firstImport = Some(impInfo) override final def importOrNull = impInfo @@ -1438,19 +1683,23 @@ trait Contexts { self: Analyzer => * * To handle nested contexts, reporters share buffers. TODO: only buffer in BufferingReporter, emit immediately in ImmediateReporter */ - abstract class ContextReporter(private[this] var _errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, private[this] var _warningBuffer: mutable.LinkedHashSet[(Position, String, WarningCategory, Symbol)] = null) { - type Error = AbsTypeError - type Warning = (Position, String, WarningCategory, Symbol) - - def issue(err: AbsTypeError)(implicit context: Context): Unit = error(context.fixPosition(err.errPos), addDiagString(err.errMsg)) + abstract class ContextReporter(private[this] var _errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, private[this] var _warningBuffer: mutable.LinkedHashSet[ContextWarning] = null) { + def issue(err: AbsTypeError)(implicit context: Context): Unit = errorAndDumpIfDebug(context.fixPosition(err.errPos), addDiagString(err.errMsg), err.actions) - def echo(msg: String): Unit = echo(NoPosition, msg) + def echo(msg: String): Unit = echo(NoPosition, msg) def echo(pos: Position, msg: String): Unit = reporter.echo(pos, msg) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - runReporting.warning(pos, msg, category, site) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, actions: List[CodeAction] = Nil): Unit = + runReporting.warning(pos, msg, category, site, actions) - def error(pos: Position, msg: String): Unit + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit + + final def errorAndDumpIfDebug(pos: Position, msg: String, actions: List[CodeAction]): Unit = { + error(pos, msg, actions) + if (settings.VdebugTypeError.value) { + Thread.dumpStack() + } + } protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = () @@ -1465,7 +1714,7 @@ trait Contexts { self: Analyzer => * - else, let this context reporter decide */ final def issueAmbiguousError(err: AbsAmbiguousTypeError)(implicit context: Context): Unit = - if (context.ambiguousErrors) reporter.error(context.fixPosition(err.errPos), addDiagString(err.errMsg)) // force reporting... see TODO above + if (context.ambiguousErrors) reporter.error(context.fixPosition(err.errPos), addDiagString(err.errMsg), err.actions) // force reporting... see TODO above else handleSuppressedAmbiguous(err) @inline final def withFreshErrorBuffer[T](expr: => T): T = { @@ -1483,7 +1732,7 @@ trait Contexts { self: Analyzer => if (target.isBuffering) { target ++= errors } else { - errors.foreach(e => target.error(e.errPos, e.errMsg)) + errors.foreach(e => target.errorAndDumpIfDebug(e.errPos, e.errMsg, e.actions)) } // TODO: is clearAllErrors necessary? (no tests failed when dropping it) // NOTE: even though `this ne target`, it may still be that `target.errorBuffer eq _errorBuffer`, @@ -1501,7 +1750,7 @@ trait Contexts { self: Analyzer => // Implicit relies on this most heavily, but there you know reporter.isInstanceOf[BufferingReporter] // can we encode this statically? - // have to pass in context because multiple contexts may share the same ReportBuffer + // have to pass in context because multiple contexts may share the same ContextReporter def reportFirstDivergentError(fun: Tree, param: Symbol, paramTp: Type)(implicit context: Context): Unit = errors.collectFirst { case dte: DivergentImplicitTypeError => dte @@ -1511,8 +1760,8 @@ trait Contexts { self: Analyzer => // no need to issue the problem again if we are still in silent mode if (context.reportErrors) { context.issue(divergent.withPt(paramTp)) - errorBuffer.retain { - case dte: DivergentImplicitTypeError => false + errorBuffer.filterInPlace { + case _: DivergentImplicitTypeError => false case _ => true } } @@ -1521,7 +1770,7 @@ trait Contexts { self: Analyzer => } def retainDivergentErrorsExcept(saved: DivergentImplicitTypeError) = - errorBuffer.retain { + errorBuffer.filterInPlace { case err: DivergentImplicitTypeError => err ne saved case _ => false } @@ -1537,29 +1786,29 @@ trait Contexts { self: Analyzer => protected def addDiagString(msg: String)(implicit context: Context): String = { val diagUsedDefaultsMsg = "Error occurred in an application involving default arguments." - if (context.diagUsedDefaults && !(msg endsWith diagUsedDefaultsMsg)) msg + "\n" + diagUsedDefaultsMsg + if (context.contextMode.inAny(ContextMode.DiagUsedDefaults) && !(msg endsWith diagUsedDefaultsMsg)) msg + "\n" + diagUsedDefaultsMsg else msg } final def emitWarnings() = if (_warningBuffer != null) { _warningBuffer foreach { - case (pos, msg, category, site) => runReporting.warning(pos, msg, category, site) + case ContextWarning(pos, msg, category, site, actions) => runReporting.warning(pos, msg, category, site, actions) } _warningBuffer = null } // [JZ] Contexts, pre- the scala/bug#7345 refactor, avoided allocating the buffers until needed. This // is replicated here out of conservatism. - private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results. - final protected def errorBuffer = { if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer } + private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results. + final protected def errorBuffer = { if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer } final protected def warningBuffer = { if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer } - final def errors: immutable.Seq[Error] = errorBuffer.toVector - final def warnings: immutable.Seq[Warning] = warningBuffer.toVector + final def errors: Seq[AbsTypeError] = errorBuffer.toVector + final def warnings: Seq[ContextWarning] = warningBuffer.toVector final def firstError: Option[AbsTypeError] = errorBuffer.headOption // TODO: remove ++= and clearAll* entirely in favor of more high-level combinators like withFreshErrorBuffer - final private[typechecker] def ++=(errors: Traversable[AbsTypeError]): Unit = errorBuffer ++= errors + final private[typechecker] def ++=(errors: Iterable[AbsTypeError]): Unit = errorBuffer ++= errors // null references to buffers instead of clearing them, // as the buffers may be shared between different reporters @@ -1567,22 +1816,22 @@ trait Contexts { self: Analyzer => final def clearAllErrors(): Unit = { _errorBuffer = null } } - private[typechecker] class ImmediateReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String, WarningCategory, Symbol)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { + private[typechecker] class ImmediateReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[ContextWarning] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { override def makeBuffering: ContextReporter = new BufferingReporter(errorBuffer, warningBuffer) - def error(pos: Position, msg: String): Unit = reporter.error(pos, msg) + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit = runReporting.error(pos, msg, actions) } - private[typechecker] class BufferingReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String, WarningCategory, Symbol)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { + private[typechecker] class BufferingReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[ContextWarning] = null) extends ContextReporter(_errorBuffer, _warningBuffer) { override def isBuffering = true override def issue(err: AbsTypeError)(implicit context: Context): Unit = errorBuffer += err // this used to throw new TypeError(pos, msg) -- buffering lets us report more errors (test/files/neg/macro-basic-mamdmi) // the old throwing behavior was relied on by diagnostics in manifestOfType - def error(pos: Position, msg: String): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg)) + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg), actions) - override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - warningBuffer += ((pos, msg, category, site)) + override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, actions: List[CodeAction]): Unit = + warningBuffer += ContextWarning(pos, msg, category, site, actions) override protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = errorBuffer += err @@ -1596,105 +1845,121 @@ trait Contexts { self: Analyzer => */ private[typechecker] class ThrowingReporter extends ContextReporter { override def isThrowing = true - def error(pos: Position, msg: String): Unit = throw new TypeError(pos, msg) + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit = throw new TypeError(pos, msg) } /** Used during a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */ private[typechecker] class CheckingReporter extends ContextReporter { - def error(pos: Position, msg: String): Unit = onTreeCheckerError(pos, msg) + def error(pos: Position, msg: String, actions: List[CodeAction]): Unit = onTreeCheckerError(pos, msg) } - class ImportInfo(val tree: Import, val depth: Int) { + class ImportInfo(val tree: Import, val depth: Int, val isRootImport: Boolean) { def pos = tree.pos - def posOf(sel: ImportSelector) = - if (sel.namePos >= 0) tree.pos withPoint sel.namePos else tree.pos + def posOf(sel: ImportSelector) = tree.posOf(sel) /** The prefix expression */ def qual: Tree = tree.symbol.info match { case ImportType(expr) => expr case ErrorType => tree setType NoType // fix for #2870 - case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug + case bad => throw new FatalError(s"symbol ${tree.symbol} has bad type: ${bad}") } /** Is name imported explicitly, not via wildcard? */ - def isExplicitImport(name: Name): Boolean = - tree.selectors exists (_.rename == name.toTermName) + def isExplicitImport(name: Name): Boolean = tree.selectors.exists(_.introduces(name)) /** The symbol with name `name` imported from import clause `tree`. */ - def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false, record = true) - - private def recordUsage(sel: ImportSelector, result: Symbol): Unit = { - debuglog(s"In $this at ${ pos.source.file.name }:${ posOf(sel).line }, selector '${ selectorString(sel) - }' resolved to ${ - if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" - else s"(expr=${tree.expr}, ${result.fullLocationString})" - }") - allUsedSelectors(this) += sel - } + def importedSymbol(name: Name): Symbol = importedSelectedSymbol(name, requireExplicit = false)._2 /** If requireExplicit is true, wildcard imports are not considered. */ - def importedSymbol(name: Name, requireExplicit: Boolean, record: Boolean): Symbol = { + def importedSelectedSymbol(name: Name, requireExplicit: Boolean): (ImportSelector, Symbol) = { var result: Symbol = NoSymbol var renamed = false var selectors = tree.selectors @inline def current = selectors.head - while ((selectors ne Nil) && result == NoSymbol) { - def sameName(name: Name, other: Name) = { - (name eq other) || (name ne null) && name.start == other.start && name.length == other.length - } - def lookup(target: Name): Symbol = { - if (pos.source.isJava) { - val (_, sym) = NoContext.javaFindMember(qual.tpe, target, _ => true) - // We don't need to propagate the new prefix back out to the result of `Context.lookupSymbol` - // because typechecking .java sources doesn't need it. - sym + def maybeNonLocalMember(nom: Name): Symbol = + if (qual.tpe.isError) NoSymbol + // We don't need to propagate the new prefix back out to the result of `Context.lookupSymbol` + // because typechecking .java sources doesn't need it. + else if (pos.source.isJava) NoContext.javaFindMember(qual.tpe, nom, _ => true)._2 + else { + val tp = qual.tpe + // opening package objects is delayed (scala/scala#9661), but that can lead to missing symbols for + // package object types that are forced early through Definitions; see scala/bug#12740 / scala/scala#10333 + if (phase.id < currentRun.typerPhase.id) { + val sym = tp.typeSymbol + if (sym.hasPackageFlag && analyzer.packageObjects.deferredOpen.remove(sym)) + openPackageModule(sym) } - else qual.tpe nonLocalMember target + tp.nonLocalMember(nom) } - if (sameName(current.rename, name)) { - val target = current.name asTypeOf name - result = lookup(target) - } else if (sameName(current.name, name)) + while ((selectors ne Nil) && result == NoSymbol) { + if (current.introduces(name)) + result = maybeNonLocalMember(current.name.asTypeOf(name)) + else if (!current.isWildcard && !current.isGiven && current.hasName(name)) renamed = true - else if (current.name == nme.WILDCARD && !renamed && !requireExplicit) - result = lookup(name) - + else if (!renamed && !requireExplicit) + if (current.isWildcard) + result = maybeNonLocalMember(name) + else if (current.isGiven) + result = maybeNonLocalMember(name).filter(_.isImplicit) + .orElse(maybeNonLocalMember(name.toTypeName).filter(_.isImplicit)) if (result == NoSymbol) selectors = selectors.tail } - if (record && settings.warnUnusedImport && selectors.nonEmpty && result != NoSymbol && pos != NoPosition) - recordUsage(current, result) - // Harden against the fallout from bugs like scala/bug#6745 + // Harden against the fallout from bugs like scala/bug#6745 and #5389 + // Enforce no importing universal members from root import Predef modules. // // [JZ] I considered issuing a devWarning and moving the // check inside the above loop, as I believe that // this always represents a mistake on the part of // the caller. - if (definitions isImportable result) result - else NoSymbol + result.filter(sym => + if (isRootImport) !definitions.isUnimportableUnlessRenamed(sym) + else definitions.isImportable(sym) + ) match { + case _: NoSymbol => TupleOfNullAndNoSymbol + case _ => (current, result) + } } - private def selectorString(s: ImportSelector): String = { - if (s.name == nme.WILDCARD && s.rename == null) "_" - else if (s.name == s.rename) "" + s.name - else s.name + " => " + s.rename + + def fullSelectorString(s: ImportSelector): String = + s"${if (qual.tpe.isError) tree.toString else qual.tpe.typeSymbol.fullName}.${selectorString(s)}" + + private def selectorString(s: ImportSelector): String = + if (s.isWildcard) "_" + else if (s.isRename) s"${s.name} => ${s.rename}" + else s.name.decoded + + /** Optionally record that a selector was used to import the given symbol. */ + def recordUsage(sel: ImportSelector, result: Symbol): Unit = { + debuglog(s"In $this at ${ pos.source.file.name }:${ posOf(sel).line }, selector '${ selectorString(sel) + }' resolved to ${ + if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" + else s"(expr=${tree.expr}, ${result.fullLocationString})" + }") + if (settings.warnUnusedImport && !isRootImport && result != NoSymbol && pos != NoPosition) + recordImportUsage(this, sel) } def allImportedSymbols: Iterable[Symbol] = - importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _)) + importableMembers(qual.tpe).flatMap(transformImport(tree.selectors, _)) + @tailrec private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match { - case List() => List() - case List(ImportSelector(nme.WILDCARD, _, _, _)) => List(sym) - case ImportSelector(from, _, to, _) :: _ if from == (if (from.isTermName) sym.name.toTermName else sym.name.toTypeName) => - if (to == nme.WILDCARD) List() + case Nil => Nil + case sel :: Nil if sel.isWildcard => + if (isRootImport && definitions.isUnimportableUnlessRenamed(sym)) Nil + else List(sym) + case (sel @ ImportSelector(from, _, to, _)) :: _ if from == (if (from.isTermName) sym.name.toTermName else sym.name.toTypeName) => + if (sel.isMask) Nil else List(sym.cloneSymbol(sym.owner, sym.rawflags, to)) case _ :: rest => transformImport(rest, sym) } override def hashCode = tree.## override def equals(other: Any) = other match { - case that: ImportInfo => (tree == that.tree) + case that: ImportInfo => tree == that.tree case _ => false } override def toString = tree.toString @@ -1728,9 +1993,11 @@ trait Contexts { self: Analyzer => def sameDepth: Boolean = imp1.depth == imp2.depth private def imp2Exists = imp2Ctx.importOrNull != null - private def imp1Explicit = imp1 isExplicitImport name + def imp1Explicit = imp1 isExplicitImport name private def imp2Explicit = imp2 isExplicitImport name } + + private val TupleOfNullAndNoSymbol = (null, NoSymbol) } object ContextMode { @@ -1793,7 +2060,17 @@ object ContextMode { /** Were default arguments used? */ final val DiagUsedDefaults: ContextMode = 1 << 18 - final val InPackageClauseName: ContextMode = 1 << 19 + /** Are we currently typing the core or args of an annotation? + * When set, Java annotations may be instantiated directly. + */ + final val TypingAnnotation: ContextMode = 1 << 19 + + final val InPackageClauseName: ContextMode = 1 << 20 + + /** Context created with makeImplicit, for use in implicit search. + * Controls whether import elements are marked used on lookup. + */ + final val InImplicitSearch: ContextMode = 1 << 21 /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode. * To mimic the sticky mode behavior, when captain stickyfingers @@ -1819,8 +2096,9 @@ object ContextMode { SuperInit -> "SuperInit", SecondTry -> "SecondTry", TypeConstructorAllowed -> "TypeConstructorAllowed", + DiagUsedDefaults -> "DiagUsedDefaults", SuppressDeadArgWarning -> "SuppressDeadArgWarning", - DiagUsedDefaults -> "DiagUsedDefaults" + TypingAnnotation -> "TypingAnnotation", ) } @@ -1842,5 +2120,5 @@ final class ContextMode private (val bits: Int) extends AnyVal { override def toString = if (bits == 0) "NOmode" - else (contextModeNameMap filterKeys inAll).values.toList.sorted mkString " " + else contextModeNameMap.view.filterKeys(inAll).values.toList.sorted.mkString(" ") } diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 3069d4818f9d..f4270492cfc1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 577cb04f2b38..efd2717a04eb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,14 @@ package scala.tools.nsc package typechecker -import scala.tools.nsc.symtab.Flags +import scala.annotation._ import scala.collection.mutable +import scala.tools.nsc.symtab.Flags /** Duplicate trees and re-type check them, taking care to replace * and create fresh symbols for new local definitions. * * @author Iulian Dragos - * @version 1.0 */ abstract class Duplicators extends Analyzer { import global._ @@ -49,7 +49,7 @@ abstract class Duplicators extends Analyzer { override def newTyper(context: Context): Typer = newBodyDuplicator(context) - private def resetClassOwners() { + private def resetClassOwners(): Unit = { oldClassOwner = null newClassOwner = null } @@ -59,7 +59,7 @@ abstract class Duplicators extends Analyzer { private var envSubstitution: SubstTypeMap = _ private class SubstSkolemsTypeMap(from: List[Symbol], to: List[Type]) extends SubstTypeMap(from, to) { - protected override def matches(sym1: Symbol, sym2: Symbol) = + override protected def matches(sym1: Symbol, sym2: Symbol) = if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1 else sym1 eq sym2 } @@ -84,14 +84,14 @@ abstract class Duplicators extends Analyzer { val sym1 = ( context.scope lookup sym.name orElse { // try harder (look in outer scopes) - // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but - // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen) + // with virtpatmat, this could happen when the sym was referenced in the scope of a LabelDef but + // was defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen) BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol) } filter (_ ne sym) ) if (sym1.exists) { debuglog(s"fixing $sym -> $sym1") - typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams)) + typeRef(NoPrefix, sym1, args mapConserve this) } else super.mapOver(tpe) @@ -99,7 +99,7 @@ abstract class Duplicators extends Analyzer { val newsym = updateSym(sym) if (newsym ne sym) { debuglog("fixing " + sym + " -> " + newsym) - typeRef(mapOver(pre), newsym, mapOverArgs(args, newsym.typeParams)) + typeRef(mapOver(pre), newsym, args mapConserve this) } else super.mapOver(tpe) @@ -142,7 +142,7 @@ abstract class Duplicators extends Analyzer { else sym - private def invalidate(tree: Tree, owner: Symbol = NoSymbol) { + private def invalidate(tree: Tree, owner: Symbol = NoSymbol): Unit = { debuglog(s"attempting to invalidate symbol = ${tree.symbol}") if ((tree.isDef || tree.isInstanceOf[Function]) && tree.symbol != NoSymbol) { debuglog("invalid " + tree.symbol) @@ -189,14 +189,14 @@ abstract class Duplicators extends Analyzer { } } - private def invalidateAll(stats: List[Tree], owner: Symbol = NoSymbol) { + private def invalidateAll(stats: List[Tree], owner: Symbol = NoSymbol): Unit = { stats.foreach(invalidate(_, owner)) } /** Optionally cast this tree into some other type, if required. * Unless overridden, just returns the tree. */ - def castType(tree: Tree, pt: Type): Tree = tree + def castType(tree: Tree, @unused pt: Type): Tree = tree /** Special typer method for re-type checking trees. It expects a typed tree. * Returns a typed tree that has fresh symbols for all definitions in the original tree. @@ -261,7 +261,7 @@ abstract class Duplicators extends Analyzer { ldef.clearType() // is this LabelDef generated by tailcalls? - val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS) + val isTailLabel = (ldef.params.lengthIs >= 1) && (ldef.params.head.name == nme.THIS) // the typer does not create the symbols for a LabelDef's params, so unless they were created before we need // to do it manually here -- but for the tailcalls-generated labels, ValDefs are created before the LabelDef, @@ -294,7 +294,7 @@ abstract class Duplicators extends Analyzer { tree.symbol = updateSym(origtreesym) super.typed(tree.clearType(), mode, pt) - case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) => + case Select(th @ This(_), _) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) => // We use the symbol name instead of the tree name because the symbol // may have been name mangled, rendering the tree name obsolete. // ...but you can't just do a Select on a name because if the symbol is @@ -314,7 +314,7 @@ abstract class Duplicators extends Analyzer { case ((alt, tpe)) :: Nil => log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString") Select(This(newClassOwner), alt) - case xs => + case _ => alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match { case alt :: Nil => log(s"Resorted to parameter list arity to disambiguate to $alt\n Overload was: $memberString") @@ -355,7 +355,7 @@ abstract class Duplicators extends Analyzer { val scrutTpe = scrut1.tpe.widen val cases1 = { if (scrutTpe.isFinalType) cases filter { - case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) => + case CaseDef(Bind(_, Typed(_, tpt)), EmptyTree, body) => // the typed pattern is not incompatible with the scrutinee type scrutTpe matchesPattern fixType(tpt.tpe) case CaseDef(Typed(_, tpt), EmptyTree, body) => diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 44d0fe15dd1f..d50636f6c514 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,12 @@ package scala.tools.nsc package typechecker import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.FreshNameCreator import symtab.Flags._ /** This trait ... * * @author Martin Odersky - * @version 1.0 */ trait EtaExpansion { self: Analyzer => import global._ @@ -30,7 +30,7 @@ trait EtaExpansion { self: Analyzer => * the target of the application and its supplied arguments if needed (they are not stable), * and then wraps a Function that abstracts over the missing arguments. * - * ``` + * {{{ * { * private synthetic val eta\$f = p.f // if p is not stable * ... @@ -38,18 +38,19 @@ trait EtaExpansion { self: Analyzer => * ... * (ps_1 => ... => ps_m => eta\$f([es_1])...([es_m])(ps_1)...(ps_m)) * } - * ``` + * }}} * - * This is called from instantiateToMethodType after type checking `tree`, - * and we realize we have a method type, where a function type (builtin or SAM) is expected. + * This is called from typedEtaExpansion, which itself is called from + * - instantiateToMethodType (for a naked method reference), or + * - typedEta (when type checking a method value, `m _`). * **/ - def etaExpand(unit: CompilationUnit, tree: Tree, typer: Typer): Tree = { + def etaExpand(tree: Tree, owner: Symbol)(implicit creator: FreshNameCreator): Tree = { val tpe = tree.tpe var cnt = 0 // for NoPosition def freshName() = { cnt += 1 - freshTermName("eta$" + (cnt - 1) + "$")(typer.fresh) + freshTermName("eta$" + (cnt - 1) + "$") } val defs = new ListBuffer[Tree] @@ -62,16 +63,22 @@ trait EtaExpansion { self: Analyzer => else { val vname: Name = freshName() // Problem with ticket #2351 here + val valSym = owner.newValue(vname.toTermName, tree.pos.focus, SYNTHETIC) defs += atPos(tree.pos) { val rhs = if (byName) { - val res = typer.typed(Function(List(), tree)) - new ChangeOwnerTraverser(typer.context.owner, res.symbol) traverse tree // scala/bug#6274 - res - } else tree - ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), rhs) + val funSym = valSym.newAnonymousFunctionValue(tree.pos.focus) + val tree1 = tree.changeOwner(owner -> funSym) + val funType = definitions.functionType(Nil, tree1.tpe) + funSym.setInfo(funType) + Function(List(), tree1).setSymbol(funSym).setType(funType) + } else { + tree.changeOwner(owner -> valSym) + } + valSym.setInfo(rhs.tpe) + ValDef(valSym, rhs) } atPos(tree.pos.focus) { - if (byName) Apply(Ident(vname), List()) else Ident(vname) + if (byName) Apply(Ident(valSym), List()) else Ident(valSym) } } val tree1 = tree match { @@ -87,11 +94,12 @@ trait EtaExpansion { self: Analyzer => liftoutPrefix(fun) case Apply(fn, args) => val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift + val liftedFn = liftoutPrefix(fn) // scala/bug#11465: lift fn before args val newArgs = mapWithIndex(args) { (arg, i) => // with repeated params, there might be more or fewer args than params liftout(arg, byName(i).getOrElse(false)) } - treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType() + treeCopy.Apply(tree, liftedFn, newArgs).clearType() case TypeApply(fn, args) => treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType() case Select(qual, name) => @@ -99,6 +107,7 @@ trait EtaExpansion { self: Analyzer => treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol case Ident(name) => tree + case x => throw new MatchError(x) } if (tree1 ne tree) tree1 setPos tree1.pos.makeTransparent tree1 @@ -127,6 +136,9 @@ trait EtaExpansion { self: Analyzer => } val tree1 = liftoutPrefix(tree) - atPos(tree.pos)(Block(defs.toList, expand(tree1, tpe))) + val expansion = expand(tree1, tpe) + + if (defs.isEmpty) expansion + else atPos(tree.pos)(Block(defs.toList, expansion)) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index b45cc86f4d91..89b75bd3eb67 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,22 +19,20 @@ package scala package tools.nsc package typechecker -import scala.annotation.tailrec -import scala.collection.mutable -import mutable.{LinkedHashMap, ListBuffer} -import scala.util.matching.Regex -import symtab.Flags._ +import scala.annotation.{nowarn, tailrec} +import scala.collection.mutable, mutable.{LinkedHashMap, ListBuffer} +import scala.language.implicitConversions import scala.reflect.internal.util.{ReusableInstance, Statistics, TriState} import scala.reflect.internal.TypesStats -import scala.language.implicitConversions -import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.Reporting.WarningCategory.{Scala3Migration, WFlagSelfImplicit} +import symtab.Flags._ +import PartialFunction.cond /** This trait provides methods to find various kinds of implicits. * * @author Martin Odersky - * @version 1.0 */ -trait Implicits { +trait Implicits extends splain.SplainData { self: Analyzer => import global._ @@ -42,6 +40,7 @@ trait Implicits { import statistics._ import typingStack.printTyping import typeDebug._ + import scala.util.matching.Regex.Match // standard usage def inferImplicitFor(pt: Type, tree: Tree, context: Context, reportAmbiguous: Boolean = true): SearchResult = @@ -104,12 +103,16 @@ trait Implicits { val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeImpl) else null val start = if (settings.areStatisticsEnabled) statistics.startTimer(implicitNanos) else null if (shouldPrint) - typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) + typingStack.printTyping(tree, s"typing implicit: $tree ${context.undetparamsString}") val implicitSearchContext = context.makeImplicit(reportAmbiguous) - val search = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos) + ImplicitErrors.startSearch(pt) + val dpt = if (isView) pt else dropByName(pt) + val isByName = dpt ne pt + val search = new ImplicitSearch(tree, dpt, isView, implicitSearchContext, pos, isByName) pluginsNotifyImplicitSearch(search) val result = search.bestImplicit pluginsNotifyImplicitSearchResult(result) + ImplicitErrors.finishSearch(result.isSuccess, pt) if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors) implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter) @@ -124,15 +127,70 @@ trait Implicits { if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) - if (result.isSuccess && settings.warnSelfImplicit && result.tree.symbol != null) { - val s = - if (result.tree.symbol.isAccessor) result.tree.symbol.accessed - else if (result.tree.symbol.isModule) result.tree.symbol.moduleClass - else result.tree.symbol - if (s != NoSymbol && context.owner.hasTransOwner(s)) - context.warning(result.tree.pos, s"Implicit resolves to enclosing ${result.tree.symbol}", WarningCategory.WFlagSelfImplicit) + if (result.isSuccess) { + val rts = { + val infoSym = if (result.implicitInfo != null) result.implicitInfo.sym else NoSymbol + infoSym.orElse { + val rts0 = result.tree.symbol + if (rts0 != null) rts0 else NoSymbol + } + } + if (settings.lintImplicitRecursion) { + val target = + if (rts.isAccessor) rts.accessed + else if (rts.isModule) rts.moduleClass + else rts + def wrapped = { + val sym = tree match { + case NamedApplyBlock(i) => i.original.symbol + case t => t.symbol + } + if (sym == null) "expression" else if (sym.isMethod) s"result of $sym" else sym.toString + } + val rtsIsImplicitWrapper = isView && rts.isMethod && rts.isSynthetic && rts.isImplicit + def isSelfEnrichment(encl: Symbol): Boolean = + Option(tree.symbol).exists(s => s.isParamAccessor && s.owner == encl && !encl.isDerivedValueClass) + def targetsUniversalMember(target: => Type): Boolean = cond(pt) { + case TypeRef(pre, sym, _ :: RefinedType(WildcardType :: Nil, decls) :: Nil) => + sym == FunctionClass(1) && + decls.exists(d => d.isMethod && d.info == WildcardType && isUniversalMember(target.member(d.name))) + } + def targetsImplicitWrapper(encl: Symbol): Boolean = + encl.owner == rts.owner && encl.isClass && encl.isImplicit && encl.name == rts.name.toTypeName + if (target != NoSymbol) + context.owner.ownersIterator + .find(encl => encl == target || rtsIsImplicitWrapper && targetsImplicitWrapper(encl)) + .foreach { encl => + var doWarn = false + var help = "" + if (!encl.isClass) { + doWarn = true + if (encl.isMethod && targetsUniversalMember(encl.info.finalResultType)) + help = s"; the conversion adds a member of AnyRef to $wrapped" + } + else if (encl.isModuleClass) { + doWarn = true + } + else if (isSelfEnrichment(encl)) { + doWarn = true + help = s"; the enrichment wraps $wrapped" + } + else if (targetsUniversalMember(encl.info)) { + doWarn = true + help = s"; the conversion adds a member of AnyRef to $wrapped" + } + if (doWarn) + context.warning(result.tree.pos, s"Implicit resolves to enclosing $encl$help", WFlagSelfImplicit) + } + } + if (result.inPackagePrefix && currentRun.isScala3) { + val msg = + s"""Implicit $rts was found in a package prefix of the required type, which is not part of the implicit scope in Scala 3 (or with -Xsource-features:package-prefix-implicits). + |For migration, add `import ${rts.fullNameString}`.""".stripMargin + context.warning(result.tree.pos, msg, Scala3Migration) + } } - result + implicitSearchContext.emitImplicitDictionary(result) } /** A friendly wrapper over inferImplicit to be used in macro contexts and toolboxes. @@ -145,7 +203,7 @@ trait Implicits { if (result.isFailure && !silent) { val err = context.reporter.firstError val errPos = err.map(_.errPos).getOrElse(pos) - val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits") + val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Vimplicits") onError(errPos, errMsg) } result.tree @@ -167,7 +225,7 @@ trait Implicits { val tvars = tpars map (TypeVar untouchable _) val tpSubsted = tp.subst(tpars, tvars) - val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), true, context.makeImplicit(reportAmbiguousErrors = false)) + val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), isView = true, context.makeImplicit(reportAmbiguousErrors = false), isByNamePt = false) search.allImplicitsPoly(tvars) } @@ -180,32 +238,22 @@ trait Implicits { private val infoMapCache = new LinkedHashMap[Symbol, InfoMap] private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]() private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 } + private val shadowerUseOldImplementation = java.lang.Boolean.getBoolean("scalac.implicit.shadow.old") - def resetImplicits() { + def resetImplicits(): Unit = { implicitsCache.clear() infoMapCache.clear() improvesCache.clear() } - /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards. - * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate de Bruijn index types - * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`, - * so we have to approximate (otherwise it is excluded a priori). - */ - private def depoly(tp: Type): Type = tp match { - case PolyType(tparams, restpe) => deriveTypeWithWildcards(tparams)(ApproximateDependentMap(restpe)) - case _ => ApproximateDependentMap(tp) - } - /** The result of an implicit search * @param tree The tree representing the implicit * @param subst A substituter that represents the undetermined type parameters * that were instantiated by the winning implicit. * @param undetparams undetermined type parameters */ - class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) { - override def toString = "SearchResult(%s, %s)".format(tree, - if (subst.isEmpty) "" else subst) + class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol], val inPackagePrefix: Boolean = false, val implicitInfo: ImplicitInfo = null) { + override def toString = s"SearchResult($tree, ${if (subst.isEmpty) "" else subst}, $inPackagePrefix)" def isFailure = false def isAmbiguousFailure = false @@ -232,9 +280,9 @@ trait Implicits { * @param pre The prefix type of the implicit * @param sym The symbol of the implicit */ - class ImplicitInfo(val name: Name, val pre: Type, val sym: Symbol) { + class ImplicitInfo(val name: Name, val pre: Type, val sym: Symbol, val inPackagePrefix: Boolean = false, val importInfo: ImportInfo = null, val importSelector: ImportSelector = null) { private[this] var tpeCache: Type = null - private[this] var tpeDepolyCache : Type = null + private[this] var depolyCache: Type = null private[this] var isErroneousCache: TriState = TriState.Unknown /** Computes member type of implicit from prefix `pre` (cached). */ @@ -243,12 +291,29 @@ trait Implicits { tpeCache } - final def tpeDepoly: Type = { - if (tpeDepolyCache eq null) tpeDepolyCache = depoly(tpe) - tpeDepolyCache + /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards. + * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate de Bruijn index types + * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`, + * so we have to approximate (otherwise it is excluded a priori). + */ + final def depoly: Type = { + if (depolyCache eq null) { + depolyCache = tpe match { + case PolyType(tparams, restpe) => deriveTypeWithWildcards(tparams)(ApproximateDependentMap(restpe)) + case _ => ApproximateDependentMap(tpe) + } + } + depolyCache + } + + def dependsOnPrefix: Boolean = pre match { + case SingleType(pre0, _) => tpe.exists(_ =:= pre0) + case _ => false } - final def isCyclicOrErroneous: Boolean = + def isSearchedPrefix: Boolean = name == null && sym == NoSymbol + + def isCyclicOrErroneous: Boolean = if(sym.hasFlag(LOCKED)) true else { if(!isErroneousCache.isKnown) @@ -267,7 +332,7 @@ trait Implicits { /** Does type `tp` contain an Error type as parameter or result? */ private final def containsError(tp: Type): Boolean = tp match { - case PolyType(tparams, restpe) => + case PolyType(_, restpe) => containsError(restpe) case NullaryMethodType(restpe) => containsError(restpe) @@ -291,10 +356,7 @@ trait Implicits { import scala.util.hashing.MurmurHash3._ finalizeHash(mix(mix(productSeed, name.##), sym.##), 2) } - override def toString = ( - if (tpeCache eq null) name + ": ?" - else name + ": " + tpe - ) + override def toString = s"$name: ${ if (tpeCache eq null) "?" else tpe.toString }" } /** A class which is used to track pending implicits to prevent infinite implicit searches. @@ -306,11 +368,16 @@ trait Implicits { private def isView_=(value: Boolean): Unit = _isView = value private[this] var _isView: Boolean = false + + def isByName: Boolean = _isByName + private def isByName_=(value: Boolean): Unit = _isByName = value + private[this] var _isByName: Boolean = false } object OpenImplicit { - def apply(info: ImplicitInfo, pt: Type, tree: Tree, isView: Boolean): OpenImplicit = { + def apply(info: ImplicitInfo, pt: Type, tree: Tree, isView: Boolean, isByName: Boolean): OpenImplicit = { val result = new OpenImplicit(info, pt, tree) result.isView = isView + result.isByName = isByName result } } @@ -323,6 +390,8 @@ trait Implicits { override def hashCode = 1 } + def SearchedPrefixImplicitInfo(pre: Type) = new ImplicitInfo(null, pre, NoSymbol) + /** A constructor for types ?{ def/type name: tp }, used in infer view to member * searches. */ @@ -337,6 +406,8 @@ trait Implicits { result } + // TODO: use ProtoType for HasMember/HasMethodMatching + /** An extractor for types of the form ? { name: ? } */ object HasMember { @@ -397,11 +468,11 @@ trait Implicits { * (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument) * If it's set to NoPosition, then position-based services will use `tree.pos` */ - class ImplicitSearch(val tree: Tree, val pt: Type, val isView: Boolean, val context0: Context, val pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors { + class ImplicitSearch(val tree: Tree, val pt: Type, val isView: Boolean, val context0: Context, val pos0: Position = NoPosition, val isByNamePt: Boolean = false) extends Typer(context0) with ImplicitsContextErrors { val searchId = implicitSearchId() private def typingLog(what: String, msg: => String) = { if (printingOk(tree)) - typingStack.printTyping(f"[search #$searchId] $what $msg") + typingStack.printTyping(s"[search #$searchId] $what $msg") } import infer._ @@ -428,8 +499,8 @@ trait Implicits { def pos = if (pos0 != NoPosition) pos0 else tree.pos @inline final def failure(what: Any, reason: => String, pos: Position = this.pos): SearchResult = { - if (settings.XlogImplicits) - reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason) + if (settings.debug.value) + reporter.echo(pos, s"$what is not a valid implicit value for $pt because:\n$reason") SearchFailure } /** Is implicit info `info1` better than implicit info `info2`? @@ -452,10 +523,9 @@ trait Implicits { def isPlausiblyCompatible(tp: Type, pt: Type) = checkCompatibility(fast = true, tp, pt) def normSubType(tp: Type, pt: Type) = checkCompatibility(fast = false, tp, pt) - /** Does type `dtor` dominate type `dted`? - * This is the case if the stripped cores `dtor1` and `dted1` of both types are - * the same wrt `=:=`, or if they overlap and the complexity of `dtor1` is higher - * than the complexity of `dted1`. + /** Does stripped core type `dtor` dominate the stripped core type `dted`? + * This is the case if both types are the same wrt `=:=`, or if they overlap + * and the complexity of `dtor` is higher than the complexity of `dted`. * The _stripped core_ of a type is the type where * - all refinements and annotations are dropped, * - all universal and existential quantification is eliminated @@ -467,40 +537,72 @@ trait Implicits { * if one or both are intersection types with a pair of overlapping parent types. */ private def dominates(dtor: Type, dted: Type): Boolean = { - def core(tp: Type): Type = tp.dealiasWiden match { - case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner) - case AnnotatedType(annots, tp) => core(tp) - case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) - case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) - case _ => tp - } - def stripped(tp: Type): Type = { - // `t.typeSymbol` returns the symbol of the normalized type. If that normalized type - // is a `PolyType`, the symbol of the result type is collected. This is precisely - // what we require for scala/bug#5318. - val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol - deriveTypeWithWildcards(syms.distinct)(tp) - } @annotation.tailrec def sumComplexity(acc: Int, xs: List[Type]): Int = xs match { case h :: t => sumComplexity(acc + complexity(h), t) - case _: Nil.type => acc + case _ => acc } + def complexity(tp: Type): Int = tp.dealias match { case NoPrefix => 0 - case SingleType(pre, sym) => if (sym.hasPackageFlag) 0 else complexity(tp.dealiasWiden) + case SingleType(_, sym) => if (sym.hasPackageFlag) 0 else complexity(tp.dealiasWiden) case ThisType(sym) => if (sym.hasPackageFlag) 0 else 1 - case TypeRef(pre, sym, args) => 1 + complexity(pre) + sumComplexity(0, args) + case TypeRef(pre, _, args) => 1 + complexity(pre) + sumComplexity(0, args) case RefinedType(parents, _) => 1 + sumComplexity(0, parents) case _ => 1 } + def overlaps(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match { case (RefinedType(parents, _), _) => parents exists (overlaps(_, tp2)) case (_, RefinedType(parents, _)) => parents exists (overlaps(tp1, _)) case _ => tp1.typeSymbol == tp2.typeSymbol } - val dtor1 = stripped(core(dtor)) - val dted1 = stripped(core(dted)) - overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1)) + + overlaps(dtor, dted) && { + complexity(dtor) compareTo complexity(dted) match { + case 0 => dtor =:= dted + case cmp => cmp > 0 + } + } + } + + private def core(tp: Type): Type = tp.dealiasWiden match { + case RefinedType(parents, _) => intersectionType(parents map core, tp.typeSymbol.owner) + case AnnotatedType(_, underlying) => core(underlying) + case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) + case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) + case TypeRef(pre, sym, args) => + val coreArgs = args.mapConserve(core) + if (coreArgs eq args) tp + else typeRef(pre, sym, coreArgs) + case _ => tp + } + + private def stripped(tp: Type): Type = { + // `t.typeSymbol` returns the symbol of the normalized type. If that normalized type + // is a `PolyType`, the symbol of the result type is collected. This is precisely + // what we require for scala/bug#5318. + val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol + deriveTypeWithWildcards(syms.distinct)(tp) + } + + private object AllSymbols extends TypeCollector(Set[Symbol](NoSymbol)) { + def apply(tp: Type): Unit = tp match { + case SingleType(pre, sym) => + result += tp.typeSymbol + result += sym + apply(pre) + apply(tp.dealiasWiden) + case ThisType(sym) => + result += sym + case TypeRef(pre, sym, args) => + result += sym + apply(pre) + args.foreach(apply) + case RefinedType(parents, _) => + parents.foreach(apply) + case _ => + tp.foldOver(this) + } } /** The expected type with all undetermined type parameters replaced with wildcards. */ @@ -520,35 +622,77 @@ trait Implicits { // e.g. we have `class Foo(val bar: Bar)` and `class Bar(val x: Int)` // then it's quite reasonable for the macro writer to synthesize Complex[Foo] by calling `inferImplicitValue(typeOf[Complex[Bar])` // however if we didn't insert the `info.sym.isMacro` check here, then under some circumstances - // (e.g. as described here http://groups.google.com/group/scala-internals/browse_thread/thread/545462b377b0ac0a) + // (e.g. as described here https://groups.google.com/group/scala-internals/browse_thread/thread/545462b377b0ac0a) // `dominates` might decide that `Bar` dominates `Foo` and therefore a recursive implicit search should be prohibited // now when we yield control of divergent expansions to the macro writer, what happens next? // in the worst case, if the macro writer is careless, we'll get a StackOverflowException from repeated macro calls // otherwise, the macro writer could check `c.openMacros` and `c.openImplicits` and do `c.abort` when expansions are deemed to be divergent // upon receiving `c.abort` the typechecker will decide that the corresponding implicit search has failed // which will fail the entire stack of implicit searches, producing a nice error message provided by the programmer - val existsDominatedImplicit = tree != EmptyTree && context.openImplicits.exists { - case OpenImplicit(nfo, tp, tree1) => !nfo.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp) - } + val existsDominatedImplicit: Boolean = + if (tree == EmptyTree) false + else { + lazy val ptStripped = stripped(core(pt)) + lazy val ptStrippedSyms = AllSymbols.collect(ptStripped) - if(existsDominatedImplicit) { - //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG - DivergentSearchFailure - } else { - try { - context.openImplicits = OpenImplicit(info, pt, tree, isView) :: context.openImplicits - // println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG - val result = typedImplicit0(info, ptChecked, isLocalToCallsite) - if (result.isDivergent) { - //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG - if (context.openImplicits.tail.isEmpty && !pt.isErroneous) - DivergingImplicitExpansionError(tree, pt, info.sym)(context) - } - result - } finally { - context.openImplicits = context.openImplicits.tail - } - } + // Are all the symbols of the stripped core of dominating pt contained in the stripped core of tp? + def coversDominatingPt(tp: Type): Boolean = { + val tpStripped = stripped(core(tp)) + dominates(ptStripped, tpStripped) && ptStrippedSyms == AllSymbols.collect(tpStripped) + } + + @tailrec + def loop(ois: List[OpenImplicit], belowByName: Boolean): Boolean = ois match { + case Nil => false + case (hd @ OpenImplicit(info1, tp, tree1)) :: tl => + val possiblyDominated = !info1.sym.isMacro && tree1.symbol == tree.symbol + if (possiblyDominated && belowByName && tp =:= pt) false + else if (possiblyDominated && coversDominatingPt(tp)) true + else loop(tl, hd.isByName || belowByName) + } + + loop(context.openImplicits, this.isByNamePt) + } + + if(existsDominatedImplicit) { + //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG + DivergentSearchFailure + } else { + val ref = context.refByNameImplicit(pt) + if(ref != EmptyTree) + new SearchResult(ref, EmptyTreeTypeSubstituter, Nil, inPackagePrefix = info.inPackagePrefix) + else { + @tailrec + def loop(ois: List[OpenImplicit], isByName: Boolean): Option[OpenImplicit] = + ois match { + case hd :: _ if (isByName || hd.isByName) && hd.pt <:< pt => Some(hd) + case hd :: tl => loop(tl, isByName || hd.isByName) + case _ => None + } + + val recursiveImplicit: Option[OpenImplicit] = loop(context.openImplicits, isByNamePt) + + recursiveImplicit match { + case Some(rec) => + val ref = atPos(pos.focus)(context.linkByNameImplicit(rec.pt)) + new SearchResult(ref, EmptyTreeTypeSubstituter, Nil, inPackagePrefix = info.inPackagePrefix) + case None => + try { + context.openImplicits = OpenImplicit(info, pt, tree, isView, isByNamePt) :: context.openImplicits + //println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG + val result = typedImplicit0(info, ptChecked, isLocalToCallsite) + if (result.isDivergent) { + //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG + if (context.openImplicits.tail.isEmpty && !pt.isErroneous) + DivergingImplicitExpansionError(tree, pt, info.sym)(context) + result + } else context.defineByNameImplicit(pt, result) + } finally { + context.openImplicits = context.openImplicits.tail + } + } + } + } } /** Does type `tp` match expected type `pt` @@ -571,13 +715,13 @@ trait Implicits { result } private def matchesPt(info: ImplicitInfo): Boolean = ( - info.isStablePrefix && matchesPt(info.tpeDepoly, wildPt, Nil) + info.isStablePrefix && matchesPt(info.depoly, wildPt, Nil) ) private def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match { case MethodType(p :: _, restpe) if p.isImplicit => matchesPtView(restpe, ptarg, ptres, undet) case MethodType(p :: Nil, restpe) => matchesArgRes(p.tpe, restpe, ptarg, ptres, undet) - case ExistentialType(_, qtpe) => matchesPtView(normalize(qtpe), ptarg, ptres, undet) + case ExistentialType(_, qtpe) => matchesPtView(methodToExpressionTp(qtpe), ptarg, ptres, undet) case Function1(arg1, res1) => matchesArgRes(arg1, res1, ptarg, ptres, undet) case _ => false } @@ -606,6 +750,9 @@ trait Implicits { if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch1) false } else { + // we can't usefully prune views any further because we would need to type an application + // of the view to the term as is done in the computation of itree2 in typedImplicit1. + tvars.foreach(_.constr.stopWideningIfPrecluded()) val targs = solvedTypes(tvars, allUndetparams, varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) @@ -614,11 +761,11 @@ trait Implicits { false } else true } - } catch { - case _: NoInstance => false - } - case _ => true - } + } catch { + case _: NoInstance => false + } + case _ => true + } } /** Capturing the overlap between isPlausiblyCompatible and normSubType. @@ -633,8 +780,8 @@ trait Implicits { if (mt.isImplicit) loop(restpe, pt) else pt match { - case tr @ TypeRef(pre, sym, args) => - if (sym.isAliasType) loop(tp, pt.normalize) // OPT .normalize caches internally and means the same as .dealias for non higher-kinded TypeRefs + case TypeRef(pre, sym, args) => + if (sym.isAliasType) loop(tp, pt.dealias) else if (sym.isAbstractType) loop(tp, pt.lowerBound) else { val ptFunctionArity = functionArity(pt) @@ -649,17 +796,14 @@ trait Implicits { as = as.tail } } else { - while (ps.nonEmpty && as.nonEmpty) { + while (!(ps.isEmpty || as.isEmpty)) { if (!(as.head <:< ps.head.tpe)) return false ps = ps.tail as = as.tail } } - ps.isEmpty && as.nonEmpty && { - val lastArg = as.head - as.tail.isEmpty && loop(restpe, lastArg) - } + ps.isEmpty && !as.isEmpty && as.tail.isEmpty && loop(restpe, as.head) } } @@ -667,12 +811,20 @@ trait Implicits { } case NullaryMethodType(restpe) => loop(restpe, pt) case PolyType(_, restpe) => loop(restpe, pt) - case ExistentialType(_, qtpe) => if (fast) loop(qtpe, pt) else normalize(tp) <:< pt // is !fast case needed?? - case _ => if (fast) isPlausiblySubType(tp, pt) else tp <:< pt + case ExistentialType(_, qtpe) => if (fast) loop(qtpe, pt) else methodToExpressionTp(tp) <:< pt // is !fast case needed?? + case _ => (if (fast) isPlausiblySubType(tp, pt) else tp <:< pt) && { + pt match { + case RefinedType(_, syms) if !syms.isEmpty => + syms.reverseIterator.exists(x => context.isAccessible(tp.nonPrivateMember(x.name), tp)) + case _ => + true + } + } } loop(tp0, pt0) } + @annotation.unused private def isImpossibleSubType(tp1: Type, tp2: Type): Boolean = !isPlausiblySubType(tp1, tp2) private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = tp1.dealiasWiden match { @@ -740,30 +892,27 @@ trait Implicits { @inline def fail(reason: => String): SearchResult = failure(itree0, reason) def fallback = typed1(itree1, EXPRmode, wildPt) try { + // try to infer implicit parameters immediately in order to: + // 1) guide type inference for implicit views + // 2) discard ineligible views right away instead of risking spurious ambiguous implicits + // + // this is an improvement of the state of the art that brings consistency to implicit resolution rules + // (and also helps fundep materialization to be applicable to implicit views) + // + // there's one caveat though. we need to turn this behavior off for scaladoc + // because scaladoc usually doesn't know the entire story + // and is just interested in views that are potentially applicable + // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???` + // then Scaladoc will give us something of type `C[T]`, and it would like to know + // that `conv` is potentially available under such and such conditions val itree2 = if (!isView) fallback else pt match { case Function1(arg1, arg2) => - typed1( + val applied = typed1( atPos(itree0.pos)(Apply(itree1, Ident(nme.argument).setType(approximate(arg1)) :: Nil)), EXPRmode, approximate(arg2) - ) match { - // try to infer implicit parameters immediately in order to: - // 1) guide type inference for implicit views - // 2) discard ineligible views right away instead of risking spurious ambiguous implicits - // - // this is an improvement of the state of the art that brings consistency to implicit resolution rules - // (and also helps fundep materialization to be applicable to implicit views) - // - // there's one caveat though. we need to turn this behavior off for scaladoc - // because scaladoc usually doesn't know the entire story - // and is just interested in views that are potentially applicable - // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???` - // then Scaladoc will give us something of type `C[T]`, and it would like to know - // that `conv` is potentially available under such and such conditions - case tree if isImplicitMethodType(tree.tpe) && !isScaladoc => - applyImplicitArgs(tree) - case tree => tree - } + ) + if (isImplicitMethodType(applied.tpe) && !isScaladoc) applyImplicitArgs(applied) else applied case _ => fallback } context.reporter.firstError match { // using match rather than foreach to avoid non local return. @@ -812,10 +961,12 @@ trait Implicits { // In case we stepped on a macro along the way, the macro was expanded during the call to adapt. Along the way, // any type parameters that were instantiated were NOT yet checked for bounds, so we need to repeat the above // bounds check on the expandee tree - itree3.attachments.get[MacroExpansionAttachment] match { - case Some(MacroExpansionAttachment(exp @ TypeApply(fun, targs), _)) => - checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targs.map(_.tpe), "inferred ") - case _ => () + itree3.attachments.get[MacroExpansionAttachment].foreach { + case MacroExpansionAttachment(exp @ TypeApply(fun, targs), _) => + val targTpes = mapList(targs)(_.tpe) + val withinBounds = checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targTpes, "inferred ") + if (!withinBounds) splainPushNonconformantBonds(pt, tree, targTpes, undetParams, None) + case _ => } context.reporter.firstError match { @@ -829,6 +980,7 @@ trait Implicits { // prototype == WildcardType: want to remove all inferred Nothings val adjusted = adjustTypeArgs(undetParams, tvars, targs) import adjusted.{okParams, okArgs} + enhanceBounds(okParams, okArgs, undetParams) val subst: TreeTypeSubstituter = if (okParams.isEmpty) EmptyTreeTypeSubstituter @@ -860,9 +1012,10 @@ trait Implicits { context.reporter.firstError match { case Some(err) => + splainPushImplicitSearchFailure(itree3, pt, err) fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => - val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) + val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams, inPackagePrefix = info.inPackagePrefix) if (settings.areStatisticsEnabled) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result @@ -931,12 +1084,13 @@ trait Implicits { private def isIneligible(info: ImplicitInfo) = ( info.isCyclicOrErroneous - || isView && (info.sym eq Predef_conforms) // as an implicit conversion, Predef.$conforms is a no-op, so exclude it + || isView && ((info.sym eq Predef_conforms) || (info.sym eq SubType_refl)) // as implicit conversions, Predef.$conforms and <:<.refl are no-op, so exclude them || (!context.macrosEnabled && info.sym.isTermMacro) ) /** True if a given ImplicitInfo (already known isValid) is eligible. */ + @nowarn("cat=lint-inaccessible") def survives(info: ImplicitInfo, shadower: Shadower) = ( !isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded && isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt @@ -958,11 +1112,11 @@ trait Implicits { object DivergentImplicitRecovery { private var divergentError: Option[DivergentImplicitTypeError] = None - private def saveDivergent(err: DivergentImplicitTypeError) { + private def saveDivergent(err: DivergentImplicitTypeError): Unit = { if (divergentError.isEmpty) divergentError = Some(err) } - def issueSavedDivergentError() { + def issueSavedDivergentError(): Unit = { divergentError foreach (err => context.issue(err)) } @@ -993,15 +1147,12 @@ trait Implicits { /** Sorted list of eligible implicits. */ - private def eligibleOld = Shadower.using(isLocalToCallsite){ shadower => - val matches = iss flatMap { is => + private def eligibleOld = Shadower.using(isLocalToCallsite) { shadower => + iss flatMap { is => val result = is filter (info => checkValid(info.sym) && survives(info, shadower)) shadower addInfos is result } - - // most frequent one first - matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) } /** Sorted list of eligible implicits. @@ -1069,18 +1220,15 @@ trait Implicits { } if (removed) matches.removeIf(_ == null) // remove for real now. } - // most frequent one first. Sort in-place. - matches.sort(((x, y) => java.lang.Integer.compare(y.info.useCount(isView), x.info.useCount(isView)))) val result = new ListBuffer[ImplicitInfo] matches.forEach(x => result += x.info) result.toList } } - val eligible = if (shadowerUseOldImplementation) eligibleOld else eligibleNew - + val eligible: List[ImplicitInfo] = if (shadowerUseOldImplementation) eligibleOld else eligibleNew if (eligible.nonEmpty) - printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") + printTyping(tree, s"${eligible.size} eligible for pt=$pt at ${fullSiteString(context)}") /** Faster implicit search. Overall idea: * - prune aggressively @@ -1101,19 +1249,28 @@ trait Implicits { firstPending == alt || ( try improves(firstPending, alt) catch { - case e: CyclicReference => + case _: CyclicReference => devWarning(s"Discarding $firstPending during implicit search due to cyclic reference.") true } ) - val mark = undoLog.log - val typedFirstPending = - if(isView || wildPtNotInstantiable || matchesPtInst(firstPending)) - typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) - else SearchFailure - if (typedFirstPending.isFailure && currentRun.isScala213) + val savedInfos = undetParams.map(_.info) + val typedFirstPending = { + try { + if (isView || wildPtNotInstantiable || matchesPtInst(firstPending)) { + val res = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) + if (res.isFailure) res + else + new SearchResult(res.tree, res.subst, res.undetparams, res.inPackagePrefix, implicitInfo = firstPending) + } + else SearchFailure + } finally { + foreach2(undetParams, savedInfos){ (up, si) => up.setInfo(si) } + } + } + if (typedFirstPending.isFailure) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note @@ -1128,7 +1285,9 @@ trait Implicits { val pendingImprovingBest = undoLog undo { otherPending filterNot firstPendingImproves } - rankImplicits(pendingImprovingBest, (newBest, firstPending) :: acc) + + if (pt.typeSymbol.hasAnnotation(definitions.LanguageFeatureAnnot)) (newBest, firstPending):: Nil + else rankImplicits(pendingImprovingBest, (newBest, firstPending) :: acc) } } @@ -1162,9 +1321,11 @@ trait Implicits { if (invalidImplicits.nonEmpty) setAddendum(pos, () => - s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type" + s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type.${if (invalidImplicits.head.isModule) " An object can be written as a lazy val with an explicit type." else ""}" ) } + else if (best.implicitInfo != null && best.implicitInfo.importInfo != null) + best.implicitInfo.importInfo.recordUsage(best.implicitInfo.importSelector, best.tree.symbol) best } @@ -1199,7 +1360,7 @@ trait Implicits { */ def searchImplicit(implicitInfoss: Infoss, isLocalToCallsite: Boolean): SearchResult = if (implicitInfoss.forall(_.isEmpty)) SearchFailure - else new ImplicitComputation(implicitInfoss, isLocalToCallsite) findBest() + else new ImplicitComputation(implicitInfoss, isLocalToCallsite).findBest() /** Produce an implicit info map, i.e. a map from the class symbols C of all parts of this type to * the implicit infos in the companion objects of these class symbols C. @@ -1214,38 +1375,49 @@ trait Implicits { * bound, the implicits infos which are members of these companion objects. */ private def companionImplicitMap(tp: Type): InfoMap = { - val isScala213 = currentRun.isScala213 /* Populate implicit info map by traversing all parts of type `tp`. * Parameters as for `getParts`. */ def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.HashSet[Type], pending: Set[Symbol]) = tp match { - case TypeRef(pre, sym, args) => - infoMap get sym match { - case Some(infos1) => - infos1 match { - case head :: _ if !(pre =:= head.pre.prefix) => - log(s"Ignoring implicit members of $pre#$sym as it is also visible via another prefix: ${infos1.head.pre.prefix}") - infoMap(sym) = List() // ambiguous prefix - ignore implicit members - case _ => + case TypeRef(pre, sym, _) => + val symInfos = infoMap.getOrElse(sym, Nil) + if(!symInfos.exists(pre =:= _.pre.prefix)) { + if (symInfos.exists(_.isSearchedPrefix)) + infoMap(sym) = SearchedPrefixImplicitInfo(pre) :: symInfos + else if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) { + val (pre1, inPackagePrefix) = + if (sym.isPackageClass) (sym.packageObject.typeOfThis, true) + else (singleType(pre, companionSymbolOf(sym, context)), false) + val preInfos = { + if (currentRun.sourceFeatures.packagePrefixImplicits && inPackagePrefix) Iterator.empty + else pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem, inPackagePrefix = inPackagePrefix)) } - case None => - if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) { - val pre1 = - if (sym.isPackageClass) sym.packageObject.typeOfThis - else singleType(pre, companionSymbolOf(sym, context)) - val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList - if (infos.nonEmpty) - infoMap(sym) = infos - } - val bts = tp.baseTypeSeq - var i = 1 - while (i < bts.length) { - getParts(bts(i)) - i += 1 + val mergedInfos = if (symInfos.isEmpty) preInfos else { + if (shouldLogAtThisPhase && symInfos.exists(!_.dependsOnPrefix)) log { + val nonDepInfos = symInfos.iterator.filterNot(_.dependsOnPrefix).mkString("(", ", ", ")") + val prefix = symInfos.head.pre.prefix + s"Implicit members $nonDepInfos of $pre#$sym which are also visible via another prefix: $prefix" + } + + (symInfos.iterator ++ preInfos).filter(_.dependsOnPrefix) } - getParts(pre) + + if (mergedInfos.hasNext) + infoMap(sym) = mergedInfos.toList + else + infoMap(sym) = List(SearchedPrefixImplicitInfo(pre)) } + // Only strip annotations on the infrequent path + val bts = (if (symInfos.isEmpty) tp else tp.map(_.withoutAnnotations)).baseTypeSeq + var i = 1 + while (i < bts.length) { + getParts(bts(i)) + i += 1 + } + getParts(pre) + } + case x => throw new MatchError(x) } /* Populate implicit info map by traversing all parts of type `tp`. @@ -1256,40 +1428,29 @@ trait Implicits { * @param pending The set of static symbols for which we are currently trying to collect their parts * in order to cache them in infoMapCache */ - def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.HashSet[Type], pending: Set[Symbol]) { + def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.HashSet[Type], pending: Set[Symbol]): Unit = { if (seen add tp) tp match { case TypeRef(pre, sym, args) => - if (sym.isClass && !sym.isRoot && - (isScala213 || !sym.isAnonOrRefinementClass)) { - if (sym.isStatic && !(pending contains sym)) - infoMap ++= { - infoMapCache get sym match { - case Some(imap) => imap - case None => - val result = new InfoMap - getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym) - infoMapCache(sym) = result - result - } - } + if (sym.isClass && !sym.isRoot) { + if (sym.isStatic && !pending.contains(sym)) + infoMap ++= infoMapCache.getOrElseUpdate(sym, { + val result = new InfoMap + getClassParts(sym.tpeHK)(result, new mutable.HashSet, pending + sym) + result + }) else getClassParts(tp) - args foreach getParts + args.foreach(getParts) } else if (sym.isAliasType) { getParts(tp.normalize) // scala/bug#7180 Normalize needed to expand HK type refs } else if (sym.isAbstractType) { // SLS 2.12, section 7.2: - // - if `T` is an abstract type, the parts of its upper bound; getParts(tp.upperBound) - - if (isScala213) { - // - if `T` is a parameterized type `S[T1,…,Tn]`, the union of the parts of `S` and `T1,…,Tn` - args foreach getParts - - // - if `T` is a type projection `S#U`, the parts of `S` as well as `T` itself; - getParts(pre) - } + // - if `T` is a parameterized type `S[T1,…,Tn]`, the union of the parts of `S` and `T1,…,Tn` + args.foreach(getParts) + // - if `T` is a type projection `S#U`, the parts of `S` as well as `T` itself; + getParts(pre) } case ThisType(_) => getParts(tp.widen) @@ -1306,14 +1467,22 @@ trait Implicits { getParts(t) case PolyType(_, t) => getParts(t) + // not needed, a view's expected type is normalized in typer by normalizeProtoForView: + // case proto: OverloadedArgProto => getParts(proto.underlying) case _ => } } val infoMap = new InfoMap getParts(tp)(infoMap, new mutable.HashSet(), Set()) + val emptyInfos = mutable.ArrayBuffer[Symbol]() + infoMap.foreachEntry { (k, v) => + if (v.exists(_.isSearchedPrefix)) + emptyInfos.addOne(k) + } + emptyInfos.foreach(infoMap.remove) if (infoMap.nonEmpty) - printTyping(tree, infoMap.size + " implicits in companion scope") + printTyping(tree, "" + infoMap.size + " implicits in companion scope") infoMap } @@ -1341,7 +1510,7 @@ trait Implicits { if (settings.areStatisticsEnabled) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) - implicitsCache -= implicitsCache.keysIterator.next + implicitsCache -= implicitsCache.keysIterator.next() implicitInfoss1 } } @@ -1392,17 +1561,17 @@ trait Implicits { // so that if we find one, we could convert it to whatever universe we need by the means of the `in` method // if no tag is found in scope, we end up here, where we ask someone to materialize the tag for us // however, since the original search was about a tag with no particular prefix, we cannot proceed - // this situation happens very often, so emitting an error message here (even if only for -Xlog-implicits) would be too much + // this situation happens very often, so emitting an error message here (even if only for -Vimplicits) would be too much //return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind)) return SearchFailure } ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) - if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) + if (settings.debug.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently - // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled" + // otherwise -Vimplicits/-Vdebug will spam the long with zillions of "macros are disabled" // this is ugly but temporary, since all this code will be removed once I fix implicit macros else SearchFailure } @@ -1444,7 +1613,7 @@ trait Implicits { // can't generate a reference to a value that's abstracted over by an existential if (containsExistential(tp1)) EmptyTree else manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1)) - case ConstantType(value) => + case ConstantType(_) => manifestOfType(tp1.deconst, FullManifestClass) case TypeRef(pre, sym, args) => if (isPrimitiveValueClass(sym) || isPhantomClass(sym)) { @@ -1484,7 +1653,7 @@ trait Implicits { if (hasLength(parents, 1)) findManifest(parents.head) else if (full) manifestFactoryCall("intersectionType", tp, parents map findSubManifest: _*) else mot(erasure.intersectionDominator(parents), from, to) - case ExistentialType(tparams, result) => + case ExistentialType(_, _) => mot(tp1.skolemizeExistential, from, to) case _ => EmptyTree @@ -1524,6 +1693,22 @@ trait Implicits { } } + /** Creates a tree that will produce a ValueOf instance for the requested type. + * An EmptyTree is returned if materialization fails. + */ + private def valueOfType(tp: Type): SearchResult = { + def success(t: Tree) = wrapResult(Apply(Select(New(gen.scalaDot(tpnme.ValueOf)), nme.CONSTRUCTOR), List(t))) + + tp.dealias match { + case ConstantType(c: Constant) => success(Literal(c)) + case SingleType(p, v) => success(gen.mkAttributedRef(p, v) setType tp) + case ThisType(sym) => success(gen.mkAttributedThis(sym) setType tp) + case UnitTpe => success(Literal(Constant(()))) + case TypeRef(pre, sym, Nil) if sym.isModuleClass => success(gen.mkAttributedRef(pre, sym.sourceModule) setType tp) + case _ => SearchFailure + } + } + def wrapResult(tree: Tree): SearchResult = if (tree == EmptyTree) SearchFailure else new SearchResult(atPos(pos.focus)(tree), EmptyTreeTypeSubstituter, Nil) @@ -1535,9 +1720,12 @@ trait Implicits { case TypeRef(_, sym, _) if sym.isAbstractType => materializeImplicit(pt.dealias.lowerBound) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.lowerBound == pt) case pt @ TypeRef(pre, sym, arg :: Nil) => + // TODO: is there any way in which an OverloadedArgProto could sneak into one of these special expected types for implicit search? + // As the outer expected type, it's normalized in typer by normalizeProtoForView sym match { case sym if ManifestSymbols(sym) => manifestOfType(arg, sym) case sym if TagSymbols(sym) => tagOfType(pre, arg, sym) + case ValueOfClass => valueOfType(arg) // as of late ClassManifest is an alias of ClassTag // hence we need to take extra care when performing dealiasing // because it might destroy the flavor of the manifest requested by the user @@ -1626,9 +1814,9 @@ trait Implicits { val outSym = out.typeSymbol val fail = - if (out.annotations.isEmpty && (outSym == ObjectClass || (currentRun.isScala211 && outSym == AnyValClass))) + if (out.annotations.isEmpty && (outSym == ObjectClass || outSym == AnyValClass)) maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than $out") - else if (currentRun.isScala211 && in.annotations.isEmpty && in.typeSymbol == NullClass) + else if (in.annotations.isEmpty && in.typeSymbol == NullClass) maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion") else false @@ -1677,7 +1865,7 @@ trait Implicits { } class ImplicitAnnotationMsg(f: Symbol => Option[String], clazz: Symbol, annotationName: String) { - def unapply(sym: Symbol): Option[(Message)] = f(sym) match { + def unapply(sym: Symbol): Option[Message] = f(sym) match { case Some(m) => Some(new Message(sym, m, annotationName)) case None if sym.isAliasType => // perform exactly one step of dealiasing @@ -1701,34 +1889,88 @@ trait Implicits { object ImplicitAmbiguousMsg extends ImplicitAnnotationMsg(_.implicitAmbiguousMsg, ImplicitAmbiguousClass, "implicitAmbiguous") class Message(sym: Symbol, msg: String, annotationName: String) { - // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html + import scala.util.matching.Regex.{quoteReplacement, Groups} + // https://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r private def interpolate(text: String, vars: Map[String, String]) = - Intersobralator.replaceAllIn(text, (_: Regex.Match) match { - case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "") + Intersobralator.replaceAllIn(text, (_: Match) match { + case Groups(v) => quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw) + case x => throw new MatchError(x) }) - private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName) + def referencedTypeParams: List[String] = Intersobralator.findAllMatchIn(msg).map(_.group(1)).distinct.toList + + private def symTypeParamNames: List[String] = sym.typeParams.map(_.decodedName) + + def lookupTypeParam(name: String): Symbol = { + val n = newTypeName(name) + var r: Symbol = NoSymbol + var o = sym.owner + while (r == NoSymbol && o != NoSymbol) { + o.typeParams.find(_.name == n) match { + case Some(p) => r = p + case _ => + do { o = o.owner } while (!(o.isClass || o.isMethod || o == NoSymbol)) + } + } + r + } + private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs - def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString)) + def formatDefSiteMessage(paramTp: Type): String = + formatDefSiteMessage(typeArgsAtSym(paramTp).map(_.toString)) + + def formatDefSiteMessage(typeArgs: List[String]): String = + interpolate(msg, Map(symTypeParamNames.zip(typeArgs): _*)) + + def formatParameterMessage(fun: Tree): String = { + val paramNames = referencedTypeParams + val paramSyms = paramNames.map(lookupTypeParam).filterNot(_ == NoSymbol) + val paramTypeRefs = paramSyms.map(_.typeConstructor.etaExpand) // make polytypes for type constructors -- we'll abbreviate them below + val prefix = fun match { + case treeInfo.Applied(Select(qual, _), _, _) => qual.tpe + case _ => NoType + } + + val argTypes1 = if (prefix == NoType) paramTypeRefs else paramTypeRefs.map(_.asSeenFrom(prefix, fun.symbol.owner)) + val argTypes2 = fun match { + case treeInfo.Applied(_, targs, _) => argTypes1.map(_.instantiateTypeParams(fun.symbol.info.typeParams, targs.map(_.tpe))) + case _ => argTypes1 + } - def format(typeArgs: List[String]): String = - interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc? + val argTypes = argTypes2.map { + case PolyType(tps, tr@TypeRef(_, _, tprefs)) => + if (tps.corresponds(tprefs)((p, r) => p == r.typeSymbol)) tr.typeConstructor.toString + else { + val freshTpars = tps.mapConserve { p => + if (p.unexpandedName == tpnme.WILDCARD) p.cloneSymbol.setName(newTypeName("?T" + tps.indexOf(p))) + else p + } + freshTpars.map(_.name).mkString("[", ", ", "] -> ") + tr.instantiateTypeParams(tps, freshTpars.map(_.typeConstructor)).toString + } + case tp => tp.toString + } + interpolate(msg, Map(paramNames.zip(argTypes): _*)) + } def validate: Option[String] = { - val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet - val decls = typeParamNames.toSet + val refs = referencedTypeParams + val isMessageOnParameter = sym.isParameter + val decls = + if (isMessageOnParameter) referencedTypeParams.filterNot(p => lookupTypeParam(p) == NoSymbol) + else symTypeParamNames.distinct - (refs &~ decls) match { + refs.diff(decls) match { case s if s.isEmpty => None case unboundNames => val singular = unboundNames.size == 1 val ess = if (singular) "" else "s" val bee = if (singular) "is" else "are" - Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @$annotationName annotation $bee not defined by $sym.") + val where = if (isMessageOnParameter) s"in scope" else s"defined by $sym" + Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @$annotationName annotation $bee not $where.") } } } @@ -1738,7 +1980,7 @@ trait Implicits { def isShadowed(name: Name): Boolean } object Shadower { - private[this] val localShadowerCache = new ReusableInstance[LocalShadower](() => new LocalShadower, enabled = isCompilerUniverse) + private[this] val localShadowerCache = ReusableInstance[LocalShadower](new LocalShadower, enabled = isCompilerUniverse) def using[T](local: Boolean)(f: Shadower => T): T = if (local) localShadowerCache.using { shadower => diff --git a/src/compiler/scala/tools/nsc/typechecker/ImportTracking.scala b/src/compiler/scala/tools/nsc/typechecker/ImportTracking.scala new file mode 100644 index 000000000000..cbad8fa6ce01 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/ImportTracking.scala @@ -0,0 +1,192 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker + +import scala.annotation.nowarn +import scala.collection.mutable +import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace} +import scala.reflect.internal.util.CodeAction +import scala.tools.nsc.Reporting.WarningCategory + +/** Track import clauses and usages for -Wunused:imports reporting. + */ +trait ImportTracking { self: Analyzer => + import global._ + + // Associate info with info at import keyword, plus owner for warning filtering. `import a.x, b.x` -> `(b, a, owner)` + private type TrackedInfo = (ImportInfo, ImportInfo, Symbol) + + private val usedSelectors = mutable.Map.empty[ImportInfo, mutable.Set[ImportSelector]] + private val importInfos = mutable.Map.empty[CompilationUnit, List[TrackedInfo]].withDefaultValue(Nil) + + def recordImportUsage(info: ImportInfo, sel: ImportSelector): Unit = usedSelectors.get(info) match { + case Some(sels) => sels.addOne(sel) + case None => usedSelectors.put(info, mutable.Set(sel)) + } + + def recordImportContext(ctx: Context): Unit = ctx.firstImport.foreach { info => + val keyword = + if (info.pos.start != info.pos.point) info + else ctx.imports.find(p => p.pos.isDefined && p.pos.start != p.pos.point).getOrElse(info) + importInfos(ctx.unit) ::= (info, keyword, ctx.owner) : @nowarn + } + + def warnUnusedImports(unit: CompilationUnit): Unit = if (!unit.isJava) { + def checkDeprecatedElementInPath(selector: ImportSelector, info: ImportInfo): String = { + def msg(sym: Symbol) = sym.deprecationMessage.map(": " + _).getOrElse("") + def badName(name: Name) = + info.qual.tpe.member(name) match { + case m if m.isDeprecated => Some(s" of deprecated $m${msg(m)}") + case _ => None + } + val badSelected = + if (!selector.isMask && selector.isSpecific) badName(selector.name).orElse(badName(selector.name.toTypeName)) + else None + def badFrom = { + val sym = info.qual.symbol + if (sym.isDeprecated) Some(s" from deprecated $sym${msg(sym)}") else None + } + badSelected.orElse(badFrom).getOrElse("") + } + def warnUnusedSelections(infos: List[TrackedInfo]): Unit = { + type Culled = (ImportSelector, TrackedInfo) + def keyInfoOfTracked(info: TrackedInfo): ImportInfo = info._2 + def keyInfoOfCulled(culled: Culled): ImportInfo = keyInfoOfTracked(culled._2) + def infoOfCulled(culled: Culled): ImportInfo = culled._2._1 + val unused: List[Culled] = + infos.flatMap { + case (tracked @ (info, _, _)) => + val used = usedSelectors.remove(info).getOrElse(mutable.Set.empty) + info.tree.selectors.collect { + case selector if !selector.isMask && !used(selector) => selector -> tracked + } + }.sortBy { case (_, (info, _, _)) => info.pos.start } // stable sort on info.pos preserves order of selectors + def emit(culled: Culled, actions: List[CodeAction]): Unit = culled match { + case (selector, (info, _, owner)) => + val pos = info.posOf(selector) + val origin = info.fullSelectorString(selector) + val addendum = checkDeprecatedElementInPath(selector, info) + runReporting.warning(pos, s"Unused import$addendum", WarningCategory.UnusedImports, owner, origin, actions) + } + // If the rest of the line is blank, include it in the final edit position. (Delete trailing whitespace.) + // If replacement is empty, and the prefix of the line is also blank, then include that, too. (Del blank line.) + def editPosAt(pos: Position, replacement: String): Position = { + val content = pos.source.content + val prev = content.lastIndexWhere(c => !isWhitespace(c), end = pos.start - 1) + val emptyLeft = prev < 0 || isLineBreakChar(content(prev)) + val next = content.indexWhere(c => !isWhitespace(c), from = pos.end) + val emptyRight = next < 0 || isLineBreakChar(content(next)) + val deleteLine = emptyLeft && emptyRight && replacement.isEmpty + val bump = if (deleteLine) 1 else 0 + val p1 = if (next >= 0 && emptyRight) pos.withEnd(next + bump) else pos + val p2 = if (deleteLine) p1.withStart(prev + 1) else p1 + p2 + } + def isSingleSelector(infos: List[TrackedInfo]): Boolean = infos match { + case (info, _, _) :: Nil => info.tree.selectors.size == 1 + case _ => false + } + def emitEdits(): Unit = { + def edit(pos: Position, replacement: String) = + runReporting.codeAction("unused import", editPosAt(pos, replacement), replacement, desc = "remove import") + def delete(pos: Position) = edit(pos, replacement = "") + + val statements = infos.groupBy(keyInfoOfTracked) // keyInfo -> tracked infos in statement + + unused.groupBy(keyInfoOfCulled).foreach { // keyInfo -> culled selectors in statement + case (keyInfo, culled :: Nil) if isSingleSelector(statements(keyInfo)) => // import a.x + emit(culled, actions = delete(keyInfo.pos)) // just one warning with delete + case (keyInfo, culleds) => // import a.x, b.{y, z} + val tracking = culleds.groupBy(infoOfCulled) // info -> Culled selectors (group by import clause) + val deleting = tracking.view.mapValues(_.map(_._1)).toMap // info -> selectors to remove: b.{y, z} -> y + val existing = statements(keyInfo).map(_._1).sortBy(_.tree.pos.start) // infos for a, b + val (editing, keeping) = existing.partition(deleting.contains(_)) // deleting = info has a selector to del + val (removing, updating) = editing.partition(info => info.tree.selectors.length == deleting(info).size) + if (keeping.isEmpty && updating.isEmpty) { // all clauses are removed in the current statement + // existing.flatMap(tracking) + val ordered = culleds.sortBy(_._1.namePos) + ordered.init.foreach(emit(_, actions = Nil)) // emit warnings for N-1 selectors + val imports = existing.map(_.tree) + val editPos = wrappingPos(imports.head.pos, imports) // reconstitute range of import statement + emit(ordered.last, actions = delete(editPos)) // at Nth selector, delete the statement + } + else + foreachWithIndex(existing) { (info, i) => + if (removing.contains(info)) { + val toEmit = tracking(info).sortBy(_._1.namePos) + toEmit.init.foreach(emit(_, actions = Nil)) // emit warnings for N-1 selectors for clause + // normally, delete from start of this clause to start of next clause: a.x, b.{y, z} from a to b + // but if this is the last clause, then also delete the comma following the last undeleted clause. + // also if this is the first clause, start includes the keyword, so advance it to the name (point) + val n = existing.size + val editPos = { + val p0 = info.tree.pos.withStart(info.tree.pos.point) + if (i == n - 1) p0 + else p0.withEnd(existing(i + 1).tree.pos.start) + } + val actions = + if (n > 1 && i == n - 1) { + val prev = existing.lastIndexWhere(!deleting.contains(_)) + val prevPos = existing(prev).tree.pos + val commaPos = prevPos.copyRange(start = prevPos.end, end = existing(prev + 1).tree.pos.start) + delete(commaPos) ++ delete(editPos) + } + else delete(editPos) + emit(toEmit.last, actions) // at Nth selector, delete the clause (and maybe a comma) + } + else if (updating.contains(info)) { + val toEmit = tracking(info).sortBy(_._1.namePos) + val remaining = info.tree.selectors.filter(!deleting(info).contains(_)) + if (remaining.size == 1) { // reformat without braces if remaining selector a.x + toEmit.init.foreach(emit(_, actions = Nil)) + val editPos = info.tree.pos.withStart(info.tree.pos.point) // exclude import keyword if i == 0 + val revised = info.tree.copy(selectors = remaining) + emit(toEmit.last, edit(editPos, revised.toString.stripPrefix("import "))) // exclude the keyword + } + else { + // emit an edit at each change to preserve formatting. + // there are multiple selectors, comma-separated in braces {x, y => w, z}. + // delete from start of name to start of next name, + // except at last selector, where it's necessary to delete a preceding comma. + // find the previous selector that is not deleted, and delete from its comma to start of next name. + val selectors = info.tree.selectors + val infoPos = info.tree.pos + val last = selectors.last + val content = infoPos.source.content + toEmit.foreach { case culled @ (selector, (_, _, _)) => + if (selector != last) { + val index = selectors.indexWhere(_ == selector) + val editPos = infoPos.copyRange(start = selector.namePos, end = selectors(index + 1).namePos) + emit(culled, delete(editPos)) + } + else { + // info.tree.pos.end is one char after rbrace + val prev = selectors.lastIndexWhere(remaining.contains(_)) + val comma = content.indexWhere(_ == ',', from = selectors(prev).namePos) + val commaPos = infoPos.copyRange(start = comma, end = selectors(prev + 1).namePos) + val editPos = infoPos.copyRange(start = selector.namePos, end = info.tree.pos.end - 1) + emit(culled, delete(commaPos) ++ delete(editPos)) + } + } + } + } + } + } + } + if (settings.quickfix.isSetByUser && !settings.quickFixSilent) emitEdits() + else unused.foreach(emit(_, actions = Nil)) + } + importInfos.remove(unit).foreach(warnUnusedSelections) + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index b40e666e2c6b..820729d21c73 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,17 +13,15 @@ package scala.tools.nsc package typechecker -import scala.collection.mutable.ListBuffer -import scala.collection.immutable +import scala.collection.{immutable, mutable}, mutable.ListBuffer +import scala.reflect.internal.Depth import scala.util.control.ControlThrowable import symtab.Flags._ -import scala.reflect.internal.Depth import scala.tools.nsc.Reporting.WarningCategory /** This trait contains methods related to type parameter inference. * * @author Martin Odersky - * @version 1.0 */ trait Infer extends Checkable { self: Analyzer => @@ -50,15 +48,58 @@ trait Infer extends Checkable { (removeRepeated || numFormals != numArgs) && isVarArgTypes(formals1) ) - def lastType = formals1.last.dealiasWiden.typeArgs.head - def expanded(n: Int) = (1 to n).toList map (_ => lastType) - if (expandLast) - formals1.init ::: expanded(numArgs - numFormals + 1) - else + if (expandLast) { + // extract the T from T* + val lastType = formals1.last.dealiasWiden.typeArgs.head + + val n = numArgs - numFormals + 1 + // Optimized version of: formals1.init ::: List.fill(n)(lastType) + val result = mutable.ListBuffer[Type]() + var fs = formals1 + while ((fs ne Nil) && (fs.tail ne Nil)) { + result.addOne(fs.head) + fs = fs.tail + } + result.prependToList(fillList(n)(lastType)) + } else formals1 } + // @requires sam == samOf(samTp) + def instantiateSamFromFunction(funTp: Type, samTp: Type, sam: Symbol) = { + val samClassSym = samTp.typeSymbol + + // the unknowns + val tparams = samClassSym.typeParams + + if (tparams.isEmpty) samTp + else { + // ... as typevars + val tvars = tparams map freshVar + + // we're trying to fully define the type arguments for this type constructor + val samTyCon = samClassSym.typeConstructor + + val ptVars = appliedType(samTyCon, tvars) + + // carry over info from pt + ptVars <:< samTp + + val samInfoWithTVars = ptVars.memberInfo(sam) + + // use function type subtyping, not method type subtyping (the latter is invariant in argument types) + funTp <:< functionType(samInfoWithTVars.paramTypes, samInfoWithTVars.finalResultType) + + // solve constraints tracked by tvars + val targs = solvedTypes(tvars, tparams, varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil)) + + debuglog(s"sam infer: $samTp --> ${appliedType(samTyCon, targs)} by ${funTp} <:< $samInfoWithTVars --> $targs for $tparams") + + appliedType(samTyCon, targs) + } + } + /** Sorts the alternatives according to the given comparison function. * Returns a list containing the best alternative as well as any which * the best fails to improve upon. @@ -67,7 +108,6 @@ trait Infer extends Checkable { def improves(sym1: Symbol, sym2: Symbol) = ( (sym2 eq NoSymbol) || sym2.isError - || (sym2 hasAnnotation BridgeClass) || isBetter(sym1, sym2) ) @@ -91,7 +131,7 @@ trait Infer extends Checkable { */ def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam) - class NoInstance(msg: String) extends Throwable(msg) with ControlThrowable { } + class NoInstance(msg: String) extends ControlThrowable(msg) private class DeferredNoInstance(getmsg: () => String) extends NoInstance("") { override def getMessage(): String = getmsg() } @@ -113,9 +153,9 @@ trait Infer extends Checkable { finally excludedVars -= tv } def apply(tp: Type): Type = tp match { - case WildcardType | BoundedWildcardType(_) | NoType => throw new NoInstance("undetermined type") - case tv: TypeVar if !tv.untouchable => applyTypeVar(tv) - case _ => mapOver(tp) + case _: ProtoType | NoType => throw new NoInstance("undetermined type") + case tv: TypeVar if !tv.untouchable => applyTypeVar(tv) + case _ => mapOver(tp) } } @@ -125,13 +165,13 @@ trait Infer extends Checkable { /** Is type fully defined, i.e. no embedded anytypes or wildcards in it? */ private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match { - case WildcardType | BoundedWildcardType(_) | NoType => false - case NoPrefix | ThisType(_) | ConstantType(_) => true - case TypeRef(pre, _, args) => isFullyDefined(pre) && (args forall isFullyDefined) - case SingleType(pre, _) => isFullyDefined(pre) - case RefinedType(ts, _) => ts forall isFullyDefined - case TypeVar(_, constr) if constr.inst == NoType => false - case _ => falseIfNoInstance({ instantiate(tp) ; true }) + case _: ProtoType | NoType => false + case NoPrefix | ThisType(_) | ConstantType(_) => true + case TypeRef(pre, _, args) => isFullyDefined(pre) && (args forall isFullyDefined) + case SingleType(pre, _) => isFullyDefined(pre) + case RefinedType(ts, _) => ts forall isFullyDefined + case TypeVar(_, constr) if constr.inst == NoType => false + case _ => falseIfNoInstance { instantiate(tp); true } } /** Solve constraint collected in types `tvars`. @@ -161,25 +201,7 @@ trait Infer extends Checkable { case _ => tp } - /** Automatically perform the following conversions on expression types: - * A method type becomes the corresponding function type. - * A nullary method type becomes its result type. - * Implicit parameters are skipped. - * This method seems to be performance critical. - */ - def normalize(tp: Type): Type = tp match { - case PolyType(_, restpe) => - logResult(sm"""|Normalizing PolyType in infer: - | was: $restpe - | now""")(normalize(restpe)) - case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe) - case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => - if (phase.erasedTypes) FunctionClass(mt.params.length).tpe - else functionType(mt.paramTypes, normalize(restpe)) - case NullaryMethodType(restpe) => normalize(restpe) - case ExistentialType(tparams, qtpe) => newExistentialType(tparams, normalize(qtpe)) - case _ => tp // @MAT aliases already handled by subtyping - } + private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR) private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR) @@ -190,7 +212,7 @@ trait Infer extends Checkable { import InferErrorGen._ /* -- Error Messages --------------------------------------------------- */ - def setError[T <: Tree](tree: T): T = { + def setError[T <: Tree](tree: T): tree.type = { // scala/bug#7388, one can incur a cycle calling sym.toString // (but it'd be nicer if that weren't so) def name = { @@ -245,7 +267,7 @@ trait Infer extends Checkable { /** Check that `sym` is defined and accessible as a member of * tree `site` with type `pre` in current context. - * @PP: In case it's not abundantly obvious to anyone who might read + * @note PP: In case it's not abundantly obvious to anyone who might read * this, the method does a lot more than "check" these things, as does * nearly every method in the compiler, so don't act all shocked. * This particular example "checks" its way to assigning both the @@ -256,7 +278,7 @@ trait Infer extends Checkable { * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre, * since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck) */ - def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = { + def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree, isJava: Boolean): Tree = { def malformed(ex: MalformedType, instance: Type): Type = { val what = if (ex.msg contains "malformed type") "is malformed" else s"contains a ${ex.msg}" val message = s"\n because its instance type $instance $what" @@ -264,7 +286,7 @@ trait Infer extends Checkable { ErrorUtils.issueTypeError(error)(context) ErrorType } - def accessible = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) match { + def accessible = sym.filter(context.isAccessible(_, pre, site.isInstanceOf[Super])) match { case NoSymbol if sym.isJavaDefined && context.unit.isJava => sym // don't try to second guess Java; see #4402 case sym1 => sym1 } @@ -275,9 +297,9 @@ trait Infer extends Checkable { tree setSymbol sym setType ErrorType else accessible match { case NoSymbol => checkAccessibleError(tree, sym, pre, site) - case sym if context.owner.isTermMacro && (sym hasFlag LOCKED) => throw CyclicReference(sym, CheckAccessibleMacroCycle) - case sym => - val sym1 = if (sym.isTerm) sym.cookJavaRawInfo() else sym // xform java rawtypes into existentials + case acc if context.owner.isTermMacro && (acc hasFlag LOCKED) => throw CyclicReference(acc, CheckAccessibleMacroCycle) + case acc => + val sym1 = if (acc.isTerm) acc.cookJavaRawInfo() else acc // xform java rawtypes into existentials val owntype = ( try pre memberType sym1 catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) } @@ -287,7 +309,9 @@ trait Infer extends Checkable { // OPT: avoid lambda allocation and Type.map for super constructor calls case _: SuperType if !sym.isConstructor && !owntype.isInstanceOf[OverloadedType] => owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) - case _ => owntype + case _ => + if ((owntype eq ObjectTpe) && isJava) ObjectTpeJava + else owntype } ) } @@ -295,11 +319,11 @@ trait Infer extends Checkable { /** "Compatible" means conforming after conversions. * "Raising to a thunk" is not implicit; therefore, for purposes of applicability and - * specificity, an arg type `A` is considered compatible with cbn formal parameter type `=>A`. + * specificity, an arg type `A` is considered compatible with cbn formal parameter type `=> A`. * For this behavior, the type `pt` must have cbn params preserved; for instance, `formalTypes(removeByName = false)`. * - * `isAsSpecific` no longer prefers A by testing applicability to A for both m(A) and m(=>A) - * since that induces a tie between m(=>A) and m(=>A,B*) [scala/bug#3761] + * `isAsSpecific` no longer prefers A by testing applicability to A for both m(A) and m(=> A) + * since that induces a tie between m(=> A) and m(=> A, B*) [scala/bug#3761] */ private def isCompatible(tp: Type, pt: Type): Boolean = { def isCompatibleByName(tp: Type, pt: Type): Boolean = ( @@ -307,20 +331,28 @@ trait Infer extends Checkable { && !isByNameParamType(tp) && isCompatible(tp, dropByName(pt)) ) - def isCompatibleSam(tp: Type, pt: Type): Boolean = (definitions.isFunctionType(tp) || tp.isInstanceOf[MethodType] || tp.isInstanceOf[PolyType]) && { - val samFun = typer.samToFunctionType(pt) + def isCompatibleSam(tp: Type, pt: Type): Boolean = (definitions.isFunctionType(tp) || definitions.isPartialFunctionType(tp) || tp.isInstanceOf[MethodType] || tp.isInstanceOf[PolyType]) && { + val samFun = samToFunctionType(pt) (samFun ne NoType) && isCompatible(tp, samFun) } - val tp1 = normalize(tp) + // can only compare if both types are repeated or neither is (T* is not actually a first-class type, even though it has a BTS and thus participates in subtyping) + (!isRepeatedParamType(tp) || isRepeatedParamType(pt)) && { + val tp1 = methodToExpressionTp(tp) - ( (tp1 weak_<:< pt) - || isCoercible(tp1, pt) - || isCompatibleByName(tp, pt) - || isCompatibleSam(tp, pt) - ) + ((tp1 weak_<:< pt) + || isCoercible(tp1, pt) + || isCompatibleByName(tp, pt) + || isCompatibleSam(tp, pt) + ) + } + } + + def isCompatibleArgs(tps: List[Type], pts: List[Type]) = { + val res = (tps corresponds pts)(isCompatible) +// println(s"isCompatibleArgs $res : $tps <:< $pts") + res } - def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible) def isWeaklyCompatible(tp: Type, pt: Type): Boolean = { def isCompatibleNoParamsMethod = tp match { @@ -361,9 +393,8 @@ trait Infer extends Checkable { } override def apply(tp: Type): Type = mapOver(tp) match { - case WildcardType => addTypeParam(TypeBounds.empty) - case BoundedWildcardType(bounds) => addTypeParam(bounds) - case tp => tp + case pt: ProtoType => addTypeParam(pt.toBounds) + case t => t } } val tp1 = typeMap(tp) @@ -378,22 +409,27 @@ trait Infer extends Checkable { * conforms to `pt`, return null. */ private def exprTypeArgs(tvars: List[TypeVar], tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] = { - def restpeInst = restpe.instantiateTypeParams(tparams, tvars) - def conforms = if (useWeaklyCompatible) isWeaklyCompatible(restpeInst, pt) else isCompatible(restpeInst, pt) - // If the restpe is an implicit method, and the expected type is fully defined - // optimize type variables wrt to the implicit formals only; ignore the result type. - // See test pos/jesper.scala - def variance = restpe match { - case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe) - case _ => restpe - } - def solve() = solvedTypes(tvars, tparams, varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil)) + val resTpVars = restpe.instantiateTypeParams(tparams, tvars) + + if (if (useWeaklyCompatible) isWeaklyCompatible(resTpVars, pt) else isCompatible(resTpVars, pt)) { + // If conforms has just solved a tvar as a singleton type against pt, then we need to + // prevent it from being widened later by adjustTypeArgs + tvars.foreach(_.constr.stopWideningIfPrecluded()) + + // If the restpe is an implicit method, and the expected type is fully defined + // optimize type variables wrt to the implicit formals only; ignore the result type. + // See test pos/jesper.scala + val variance = restpe match { + case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe) + case _ => restpe + } - if (conforms) - try solve() catch { case _: NoInstance => null } - else + try solvedTypes(tvars, tparams, varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil)) + catch { case _: NoInstance => null } + } else null } + /** Overload which allocates fresh type vars. * The other one exists because apparently inferExprInstance needs access to the typevars * after the call, and it's wasteful to return a tuple and throw it away almost every time. @@ -437,10 +473,10 @@ trait Infer extends Checkable { if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt)) map2(tparams, tvars)((tparam, tvar) => try instantiateToBound(tvar, varianceInTypes(formals)(tparam)) - catch { case ex: NoInstance => WildcardType } + catch { case _: NoInstance => WildcardType } ) else - tvars map (_ => WildcardType) + WildcardType.fillList(tvars.length) } /** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params. @@ -507,17 +543,17 @@ trait Infer extends Checkable { */ def methTypeArgs(fn: Tree, tparams: List[Symbol], formals: List[Type], restpe: Type, argtpes: List[Type], pt: Type): AdjustedTypeArgs = { - val tvars = tparams map freshVar if (!sameLength(formals, argtpes)) throw new NoInstance("parameter lists differ in length") + val tvars = tparams.map(freshVar) val restpeInst = restpe.instantiateTypeParams(tparams, tvars) // first check if typevars can be fully defined from the expected type. // The return value isn't used so I'm making it obvious that this side // effects, because a function called "isXXX" is not the most obvious // side effecter. - isConservativelyCompatible(restpeInst, pt) + isConservativelyCompatible(restpeInst, pt): Unit // Return value unused with the following explanation: // @@ -540,28 +576,22 @@ trait Infer extends Checkable { // Note that isCompatible side-effects: subtype checks involving typevars // are recorded in the typevar's bounds (see TypeConstraint) - if (!isCompatible(tp1, pt1)) { + if (!isCompatible(tp1, pt1)) throw new DeferredNoInstance(() => "argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1)) - } } val targs = solvedTypes(tvars, tparams, varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes)) - // Can warn about inferring Any/AnyVal as long as they don't appear - // explicitly anywhere amongst the formal, argument, result, or expected type. - // ...or lower bound of a type param, since they're asking for it. - def canWarnAboutAny = { - val loBounds = tparams map (_.info.lowerBound) - def containsAny(t: Type) = (t contains AnyClass) || (t contains AnyValClass) - val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (_.dealiasWidenChain exists containsAny) - !hasAny - } - if (settings.warnInferAny && context.reportErrors && !fn.isEmpty && canWarnAboutAny) { - targs.foreach(_.typeSymbol match { - case sym @ (AnyClass | AnyValClass) => - context.warning(fn.pos, s"a type was inferred to be `${sym.name}`; this may indicate a programming error.", WarningCategory.LintInferAny) - case _ => - }) - } + // Any "top type" in the constraint mitigates the warning, instead of a precise match such as: + // !tvar.constr.loBounds.contains(targ) + // For example, don't require this arg, where the lub of `AnyRef` and `V` yields the `Any`. + // this.forall(kv => map.getOrElse[Any](kv._1, Map.DefaultSentinelFn()) == kv._2) + if (settings.warnInferAny && !fn.isEmpty) + foreach2(targs, tvars) { (targ, tvar) => + if (topTypes.contains(targ.typeSymbol) && + !tvar.constr.loBounds.exists(t => topTypes.contains(t.typeSymbol)) && + !tvar.constr.hiBounds.exists(t => topTypes.contains(t.typeSymbol))) + context.warning(fn.pos, s"a type was inferred to be `${targ.typeSymbol.name}`; this may indicate a programming error.", WarningCategory.LintInferAny) + } adjustTypeArgs(tparams, tvars, targs, restpe) } @@ -685,28 +715,44 @@ trait Infer extends Checkable { } /** The type of an argument list after being coerced to a tuple. - * @pre: the argument list is eligible for tuple conversion. + * @note Pre-condition: The argument list is eligible for tuple conversion. */ - private def typeAfterTupleConversion(argtpes: List[Type]): Type = ( + private def typeAfterTupleConversion(argtpes: List[Type]): Type = if (argtpes.isEmpty) UnitTpe // aka "Tuple0" else tupleType(argtpes map { case NamedType(name, tp) => UnitTpe // not a named arg - only assignments here case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_* case tp => tp }) - ) /** If the argument list needs to be tupled for the parameter list, - * a list containing the type of the tuple. Otherwise, the original - * argument list. - */ + * a list containing the type of the tuple. Otherwise, the original + * argument list. + * + * NOTE: we have to exclude repeated parameter types for overloading resolution like this: + * def f[T](x: T): T = x + * def f[T](x: T, xs: T*): T = x + * + * In the process of deciding which ones is more specific, isApplicableToMethod would otherwise try T' = (T, T*) + */ def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = { - if (eligibleForTupleConversion(formals, argtpes.size)) + if (!argtpes.exists(isRepeatedParamType) && eligibleForTupleConversion(formals, argtpes.size)) typeAfterTupleConversion(argtpes) :: Nil else argtpes } + // This is primarily a duplicate of enhanceBounds in typedAppliedTypeTree + // modified to use updateInfo rather than setInfo to avoid wiping out + // type history. + def enhanceBounds(okparams: List[Symbol], okargs: List[Type], undets: List[Symbol]): Unit = + undets.foreach { undet => + val bounds = undet.info.bounds + val substBounds = bounds.subst(okparams, okargs) + if(bounds ne substBounds) + undet.updateInfo(substBounds) + } + private def isApplicableToMethod(undetparams: List[Symbol], mt: MethodType, argtpes0: List[Type], pt: Type): Boolean = { val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false) def missingArgs = missingParams[Type](argtpes0, mt.params, x => Some(x) collect { case NamedType(n, _) => n }) @@ -724,6 +770,7 @@ trait Infer extends Checkable { val restpe = mt resultType args val adjusted = methTypeArgs(EmptyTree, undetparams, formals, restpe, args, pt) import adjusted.{okParams, okArgs, undetParams} + enhanceBounds(okParams, okArgs, undetParams) val restpeInst = restpe.instantiateTypeParams(okParams, okArgs) // #2665: must use weak conformance, not regular one (follow the monomorphic case above) exprTypeArgs(undetParams, restpeInst, pt, useWeaklyCompatible = true) match { @@ -742,12 +789,14 @@ trait Infer extends Checkable { case (_, pos, _) if !allArgsArePositional(pos) && !sameLength(formals, mt.params) => false // different length lists and all args not positional case (args, pos, _) => typesCompatible(reorderArgs(args, pos)) } - compareLengths(argtpes0, formals) match { + val res = compareLengths(argtpes0, formals) match { case 0 if containsNamedType(argtpes0) => reorderedTypesCompatible // right number of args, wrong order case 0 => typesCompatible(argtpes0) // fast track if no named arguments are used case x if x > 0 => tryWithArgs(argsTupled) // too many args, try tupling case _ => tryWithArgs(argsPlusDefaults) // too few args, try adding defaults or tupling } + // println(s"isApplicableToMethod $res : $mt --> $formals to $argtpes0 for $pt under $undetparams") + res } /** Is there an instantiation of free type variables `undetparams` such that @@ -759,17 +808,20 @@ trait Infer extends Checkable { * type is set to `Unit`, i.e. the corresponding argument is treated as * an assignment expression (@see checkNames). */ - private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = ( - ftpe match { - case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt)) - case ExistentialType(_, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt) - case mt @ MethodType(_, _) => isApplicableToMethod(undetparams, mt, argtpes0, pt) - case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt) - case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt)) - case ErrorType => true - case _ => false - } - ) + private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = { + val res = + ftpe match { + case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt)) + case ExistentialType(_, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt) + case mt@MethodType(_, _) => isApplicableToMethod(undetparams, mt, argtpes0, pt) + case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt) + case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt)) + case ErrorType => true + case _ => false + } +// println(s"isApplicable $res : $ftpe to $argtpes0 for $pt under $undetparams") + res + } /** * Are arguments of the given types applicable to `ftpe`? Type argument inference @@ -794,38 +846,70 @@ trait Infer extends Checkable { * @see SLS (sec:overloading-resolution) */ def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = { - def checkIsApplicable(argtpes: List[Type]) = isApplicable(Nil, ftpe2, argtpes, WildcardType) - def bothAreVarargs = isVarArgsList(ftpe1.params) && isVarArgsList(ftpe2.params) - def onRight = ftpe2 match { - case OverloadedType(pre, alts) => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt)) - case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _)) - case mt @ MethodType(_, restpe) => !mt.isImplicit || isAsSpecific(ftpe1, restpe) - case NullaryMethodType(res) => isAsSpecific(ftpe1, res) - case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(ftpe1, PolyType(tparams, restpe)) - case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe)) - case _ => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil) + def checkIsApplicable(mt: MethodType) = { + val paramTypes = mt.paramTypes + val aligned = + if (isRepeatedParamType(paramTypes.last) && isVarArgsList(ftpe2.params)) paramTypes.init :+ repeatedToSingle(paramTypes.last) + else paramTypes + isApplicable(Nil, ftpe2, aligned, WildcardType) } + + val res = ftpe1 match { case OverloadedType(pre, alts) => alts exists (alt => isAsSpecific(pre memberType alt, ftpe2)) case et: ExistentialType => isAsSpecific(et.skolemizeExistential, ftpe2) case NullaryMethodType(restpe) => isAsSpecific(restpe, ftpe2) case mt @ MethodType(_, restpe) if mt.isImplicit => isAsSpecific(restpe, ftpe2) - case mt @ MethodType(_, _) if bothAreVarargs => checkIsApplicable(mt.paramTypes mapConserve repeatedToSingle) - case mt @ MethodType(params, _) if params.nonEmpty => checkIsApplicable(mt.paramTypes) + case mt @ MethodType(params, _) if params.nonEmpty => checkIsApplicable(mt) case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(PolyType(tparams, restpe), ftpe2) case PolyType(tparams, mt @ MethodType(_, restpe)) if mt.isImplicit => isAsSpecific(PolyType(tparams, restpe), ftpe2) - case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty => checkIsApplicable(mt.paramTypes) + case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty => checkIsApplicable(mt) case ErrorType => true - case _ => onRight + case _ => + ftpe2 match { + case OverloadedType(pre, alts) => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt)) + case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _)) + case mt @ MethodType(_, restpe) => !mt.isImplicit || isAsSpecific(ftpe1, restpe) + case NullaryMethodType(res) => isAsSpecific(ftpe1, res) + case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(ftpe1, PolyType(tparams, restpe)) + case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe)) + case _ => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil) + } } + // println(s"isAsSpecific $res $ftpe1 - $ftpe2") + res } + private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = tpe1 match { case PolyType(tparams1, rtpe1) => isAsSpecificValueType(rtpe1, tpe2, undef1 ::: tparams1, undef2) case _ => tpe2 match { case PolyType(tparams2, rtpe2) => isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2) - case _ => existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2) + case _ if !currentRun.sourceFeatures.implicitResolution => + existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2) + case _ => + // Backport of fix for https://github.com/scala/bug/issues/2509 + // from Dotty https://github.com/scala/scala3/commit/89540268e6c49fb92b9ca61249e46bb59981bf5a + // + // Note that as of https://github.com/scala/scala3/commit/b9f3084205bc9fcbd2a5181d3f0e539e2a20253a + // Dotty flips variances throughout, not just at the top level. We follow that behaviour here. + + val e1 = existentialAbstraction(undef1, tpe1) + val e2 = existentialAbstraction(undef2, tpe2) + + val flip = new VariancedTypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(pre, sym, args) if variance > 0 && sym.typeParams.exists(_.isContravariant) => + mapOver(TypeRef(pre, sym.flipped, args)) + case _ => + mapOver(tp) + } + } + + val bt = e1.baseType(e2.typeSymbol) + val lhs = if(bt != NoType) bt else e1 + flip(lhs) <:< flip(e2) } } @@ -848,6 +932,9 @@ trait Infer extends Checkable { || isProperSubClassOrObject(sym1.safeOwner, sym2.owner) ) + // Note that this doesn't consider undetparams -- any type params in `ftpe1/2` need to be bound by their type (i.e. in a PolyType) + // since constructors of poly classes do not have their own polytype in their infos, this must be fixed up + // before calling this method (see memberTypeForSpecificity) def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type, sym1: Symbol, sym2: Symbol): Boolean = { // ftpe1 / ftpe2 are OverloadedTypes (possibly with one single alternative) if they // denote the type of an "apply" member method (see "followApply") @@ -902,7 +989,7 @@ trait Infer extends Checkable { * first to `strictPt` and then, if this fails, to `lenientPt`. If both * attempts fail, an error is produced. */ - def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) { + def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type): Unit = { printTyping(tree, s"inferring arg instance based on pt0=$strictPt, pt1=$lenientPt") var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false) if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt)) @@ -933,11 +1020,13 @@ trait Infer extends Checkable { } else { val adjusted = adjustTypeArgs(tparams, tvars, targsStrict) import adjusted.{okParams, okArgs, undetParams} + enhanceBounds(okParams, okArgs, undetParams) def solved_s = map2(okParams, okArgs)((p, a) => s"$p=$a") mkString "," def undet_s = undetParams match { case Nil => "" case ps => ps.mkString(", undet=", ",", "") } + printTyping(tree, s"infer solved $solved_s$undet_s") substExpr(tree, okParams, okArgs, pt) undetParams @@ -947,7 +1036,7 @@ trait Infer extends Checkable { /** Substitute free type variables `undetparams` of polymorphic argument * expression `tree` to `targs`, Error if `targs` is null. */ - private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) { + private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type): Unit = { if (targs eq null) { if (!tree.tpe.isErroneous && !pt.isErroneous) PolymorphicExpressionInstantiationError(tree, undetparams, pt) @@ -970,7 +1059,11 @@ trait Infer extends Checkable { */ def inferMethodInstance(fn: Tree, undetParams: List[Symbol], args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { - case mt @ MethodType(params0, _) => + case mt: MethodType => + // If we can't infer the type parameters, we can recover in `tryTypedApply` with an implicit conversion, + // but only when implicit conversions are enabled. In that case we have to infer the type parameters again. + def noInstanceResult = if (context.implicitsEnabled) undetParams else Nil + try { val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 val formals = formalTypes(mt.paramTypes, args.length) @@ -988,18 +1081,19 @@ trait Infer extends Checkable { adjusted.undetParams match { case Nil => Nil case xs => - // #3890 + // scala/bug#3890 val xs1 = treeSubst.typeMap mapOver xs if (xs ne xs1) new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args - + enhanceBounds(adjusted.okParams, adjusted.okArgs, xs1) xs1 } - } else Nil - } - catch ifNoInstance { msg => - NoMethodInstanceError(fn, args, msg); List() + } else noInstanceResult + } catch ifNoInstance { msg => + NoMethodInstanceError(fn, args, msg) + noInstanceResult } + case x => throw new MatchError(x) } /** Substitute free type variables `undetparams` of type constructor @@ -1009,7 +1103,7 @@ trait Infer extends Checkable { * @param undetparams the undetermined type parameters * @param pt0 the expected result type of the instance */ - def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) { + def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type): Unit = { val pt = abstractTypesToBounds(pt0) val ptparams = freeTypeParamsOfTerms(pt) val ctorTp = tree.tpe @@ -1049,7 +1143,7 @@ trait Infer extends Checkable { def inferForApproxPt = if (isFullyDefined(pt)) { - inferFor(pt.instantiateTypeParams(ptparams, ptparams map (x => WildcardType))) flatMap { targs => + inferFor(pt.instantiateTypeParams(ptparams, WildcardType.fillList(ptparams.length))) flatMap { targs => val ctorTpInst = tree.tpe.instantiateTypeParams(undetparams, targs) val resTpInst = skipImplicit(ctorTpInst.finalResultType) val ptvars = @@ -1082,23 +1176,7 @@ trait Infer extends Checkable { } } - @inline - private[this] def instBounds(tvar: TypeVar): TypeBounds = { - val tparam = tvar.origin.typeSymbol - val instType = toOrigin(tvar.constr.inst) - val lo = tparam.info.lowerBound - val hi = tparam.info.upperBound - val ifd = isFullyDefined(instType) - val loBounds = if (ifd) List(instType) else tvar.constr.loBounds - val hiBounds = if (ifd) List(instType) else tvar.constr.hiBounds - TypeBounds( - lub(lo :: loBounds map toOrigin), - glb(hi :: hiBounds map toOrigin) - ) - } - - @inline - private[this] def isInstantiatable(tvars: List[TypeVar]) = { + def isInstantiatable(tvars: List[TypeVar]) = { val tvars1 = tvars map (_.cloneInternal) // Note: right now it's not clear that solving is complete, or how it can be made complete! // So we should come back to this and investigate. @@ -1108,13 +1186,23 @@ trait Infer extends Checkable { // this is quite nasty: it destructively changes the info of the syms of e.g., method type params // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped) // the changes are rolled back by restoreTypeBounds, but might be unintentionally observed in the mean time - private[this] def instantiateTypeVar(tvar: TypeVar): Unit = { - val tparam = tvar.origin.typeSymbol - val tpinfo = tparam.info - val lo0 = tpinfo.lowerBound - val hi0 = tpinfo.upperBound - val tb @ TypeBounds(lo1, hi1) = instBounds(tvar) - val enclCase = context.enclosingCaseDef + def instantiateTypeVar(tvar: TypeVar): Unit = { + val tparam = tvar.origin.typeSymbol + val tparams = cloneSymbols(tvar.typeParams) + val targs = tparams.map(_.tpeHK) + val instType = if (!tvar.instValid) Nil else { + val inst = toOrigin(genPolyType(tparams, appliedType(tvar.inst, targs))) + if (isFullyDefined(inst)) List(inst) else Nil + } + + def instBounds(bounds: List[Type]) = + if (instType.isEmpty) bounds.map(toOrigin) else instType + + val lo0 = tparam.info.lowerBound + val hi0 = tparam.info.upperBound + val lo1 = lub(toOrigin(lo0) :: instBounds(tvar.constr.loBounds)) + val hi1 = glb(toOrigin(hi0) :: instBounds(tvar.constr.hiBounds)) + val enclCase = context.enclosingCaseDef def enclCase_s = enclCase.toString.replaceAll("\\n", " ").take(60) if (enclCase.savedTypeBounds.nonEmpty) log( @@ -1131,6 +1219,7 @@ trait Infer extends Checkable { log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds") else { enclCase pushTypeBounds tparam + val tb = genPolyType(tparams, appliedType(TypeBounds(lo1, hi1), targs)) tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb) } } @@ -1171,7 +1260,7 @@ trait Infer extends Checkable { return ErrorType } - checkCheckable(tree0, if (isUnapply && settings.isScala213) typer.applyTypeToWildcards(pattp) else pattp, pt, inPattern = true, canRemedy) + checkCheckable(tree0, if (isUnapply) typer.applyTypeToWildcards(pattp) else pattp, pt, inPattern = true, canRemedy = canRemedy) if (pattp <:< pt) () else { debuglog("free type params (1) = " + tpparams) @@ -1199,7 +1288,7 @@ trait Infer extends Checkable { } } tvars foreach instantiateTypeVar - invalidateTreeTpeCaches(tree0, tvars.map(_.origin.typeSymbol)) + invalidateTreeTpeCaches(tree0, tvars.map(_.origin.typeSymbol).toSet) } /* If the scrutinee has free type parameters but the pattern does not, * we have to flip the arguments so the expected type is treated as more @@ -1248,42 +1337,82 @@ trait Infer extends Checkable { /* -- Overload Resolution ---------------------------------------------- */ + /** Adjust polymorphic class's constructor info to be polymorphic as well + * + * Normal polymorphic methods have a PolyType as their info, but a constructor reuses the type params of the class. + * We wrap them in a PolyType here so that we get consistent behavior in determining specificity. + * + * @param pre + * @param sym must not be overloaded! + * @return `pre memberType sym`, unless `sym` is a polymorphic class's constructor that we're invoking using `new`, + * in which case a `PolyType` is wrapped around the ctor's info + * (since a self-constructor invocation `this(...)` cannot supply type params, we do not wrap the type params then) + */ + private def memberTypeForSpecificity(pre: Type, sym: Symbol, tree: Tree) = { + // Need to add type params for a polymorphic constructor invoked using `new C(...)` (but not `this(...)`) + val tparsToAdd = + tree match { + case Select(New(_), _) => sym.owner.info.typeParams // for a well-formed program, we know `sym.isConstructor` + case _ => Nil + } + + if (tparsToAdd.isEmpty) pre memberType sym + // Need to make sure tparsToAdd are owned by sym (the constructor), and not the class (`sym.owner`). + // Otherwise, asSeenFrom will rewrite them to the corresponding symbols in `pre` (the new this type for `sym.owner`). + else createFromClonedSymbolsAtOwner(tparsToAdd, sym, sym.info)(PolyType(_, _)).asSeenFrom(pre, sym.owner) + } + /** Assign `tree` the symbol and type of the alternative which * matches prototype `pt`, if it exists. * If several alternatives match `pt`, take parameterless one. * If no alternative matches `pt`, take the parameterless one anyway. + * (There may be more than one parameterless alternative, in particular, + * badly overloaded default args or case class elements. These are detected elsewhere.) */ def inferExprAlternative(tree: Tree, pt: Type): Tree = { val c = context class InferTwice(pre: Type, alts: List[Symbol]) extends c.TryTwice { def tryOnce(isSecondTry: Boolean): Unit = { - val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt)) + val alts0 = alts.filter(alt => isWeaklyCompatible(pre.memberType(alt), pt)) val alts1 = if (alts0.isEmpty) alts else alts0 val bests = bestAlternatives(alts1) { (sym1, sym2) => - val tp1 = pre memberType sym1 - val tp2 = pre memberType sym2 + val tp1 = memberTypeForSpecificity(pre, sym1, tree) + val tp2 = memberTypeForSpecificity(pre, sym2, tree) ( (tp2 eq ErrorType) || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt) || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2) ) } - // todo: missing test case for bests.isEmpty + def finish(s: Symbol): Unit = tree.setSymbol(s).setType(pre.memberType(s)) + def paramlessOr(error: => Unit): Unit = { + val paramless = + if (isSecondTry) + alts.find { alt => val ps = alt.info.paramss; ps.isEmpty || ps.tail.isEmpty && ps.head.isEmpty } + else None + paramless match { + case Some(alt) => finish(alt) + case None => error + } + } bests match { - case best :: Nil => tree setSymbol best setType (pre memberType best) + case best :: Nil => + finish(best) case best :: competing :: _ if alts0.nonEmpty => - // scala/bug#6912 Don't give up and leave an OverloadedType on the tree. - // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try - // unless an error is issued. We're not issuing an error, in the assumption that it would be - // spurious in light of the erroneous expected type - if (pt.isErroneous) setError(tree) - else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) - case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) + // If erroneous expected type, don't issue spurious error and don't `tryTwice` again with implicits. + // scala/bug#6912 except it does not loop + paramlessOr { + if (pt.isErroneous) setError(tree) + else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) + } + case _ if bests.isEmpty || alts0.isEmpty => + paramlessOr(NoBestExprAlternativeError(tree, pt, isSecondTry)) + case _ => } } } tree.tpe match { - case OverloadedType(pre, alts) => (new InferTwice(pre, alts)).apply() ; tree + case OverloadedType(pre, alts) => (new InferTwice(pre, alts)).apply(); tree case _ => tree } } @@ -1358,7 +1487,7 @@ trait Infer extends Checkable { * the type is replaces by `Unit`, i.e. the argument is treated as an * assignment expression. * - * @pre tree.tpe is an OverloadedType. + * @note Pre-condition `tree.tpe` is an `OverloadedType`. */ def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = { // This potentially makes up to four attempts: tryOnce may execute @@ -1366,25 +1495,27 @@ trait Infer extends Checkable { // with pt = WildcardType if it fails with pt != WildcardType. val c = context class InferMethodAlternativeTwice extends c.TryTwice { - private[this] val OverloadedType(pre, alts) = tree.tpe + private[this] val OverloadedType(pre, alts) = tree.tpe: @unchecked private[this] var varargsStar = false private[this] val argtpes = argtpes0 mapConserve { case RepeatedType(tp) => varargsStar = true ; tp case tp => tp } - private def followType(sym: Symbol) = followApply(pre memberType sym) + private def followType(sym: Symbol) = followApply(memberTypeForSpecificity(pre, sym, tree)) // separate method to help the inliner private def isAltApplicable(pt: Type)(alt: Symbol) = context inSilentMode { isApplicable(undetparams, followType(alt), argtpes, pt) && !context.reporter.hasErrors } private def rankAlternatives(sym1: Symbol, sym2: Symbol) = isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2) private def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = { val applicable = overloadsToConsiderBySpecificity(alts filter isAltApplicable(pt), argtpes, varargsStar) + // println(s"bestForExpectedType($argtpes, $pt): $alts -app-> ${alts filter isAltApplicable(pt)} -arity-> $applicable") val ranked = bestAlternatives(applicable)(rankAlternatives) + def finish(s: Symbol): Unit = tree.setSymbol(s).setType(pre.memberType(s)) ranked match { case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous - case best :: Nil => tree setSymbol best setType (pre memberType best) // success - case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed - case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType + case best :: _ => finish(best) + case _ if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed + case _ => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType } } @@ -1404,7 +1535,7 @@ trait Infer extends Checkable { * If no such polymorphic alternative exist, error. */ def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = { - val OverloadedType(pre, alts) = tree.tpe + val OverloadedType(pre, alts) = tree.tpe: @unchecked // Alternatives with a matching length type parameter list val matchingLength = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes)) def allMonoAlts = alts forall (_.typeParams.isEmpty) @@ -1416,11 +1547,10 @@ trait Infer extends Checkable { def fail() = PolyAlternativeError(tree, argtypes, matchingLength, errorKind) def finish(sym: Symbol, tpe: Type) = tree setSymbol sym setType tpe // Alternatives which conform to bounds - def checkWithinBounds(sym: Symbol) = sym.alternatives match { - case Nil if argtypes.exists(_.isErroneous) => - case Nil => fail() - case alt :: Nil => finish(alt, pre memberType alt) - case alts @ (hd :: _) => + def checkWithinBounds(sym: Symbol): Unit = sym.alternatives match { + case Nil => if (!argtypes.exists(_.isErroneous)) fail() + case alt :: Nil => finish(alt, pre memberType alt) + case alts @ hd :: _ => log(s"Attaching AntiPolyType-carrying overloaded type to $sym") // Multiple alternatives which are within bounds; spin up an // overloaded type which carries an "AntiPolyType" as a prefix. @@ -1432,14 +1562,18 @@ trait Infer extends Checkable { matchingLength.alternatives match { case Nil => fail() case alt :: Nil => finish(alt, pre memberType alt) - case _ => checkWithinBounds(matchingLength filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))) + case _ => + checkWithinBounds(matchingLength.filter { alt => + isWithinBounds(pre, alt.owner, alt.typeParams, argtypes) && + kindsConform(alt.typeParams, argtypes, pre, alt.owner) + }) } } } object toOrigin extends TypeMap { def apply(tp: Type): Type = tp match { - case TypeVar(origin, _) => origin + case TypeVar(origin, _) => appliedType(origin, tp.typeArgs) case _ => mapOver(tp) } } @@ -1451,5 +1585,7 @@ trait Infer extends Checkable { } } + private lazy val topTypes: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, AnyRefClass) + final case class AdjustedTypeArgs(okParams: List[Symbol], okArgs: List[Type], undetParams: List[Symbol], allArgs: List[Type]) } diff --git a/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala new file mode 100644 index 000000000000..7af94b66eabd --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala @@ -0,0 +1,801 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.typechecker + +import scala.tools.nsc.Reporting.WarningCategory + +// imported from scalamacros/paradise +trait MacroAnnotationNamers { self: Analyzer => + import global._ + import scala.reflect.internal.Flags._ + import scala.reflect.internal.Mode._ + + override def newNamer(context: Context): Namer = new MacroAnnotationNamer(context) + + class MacroAnnotationNamer(context: Context) extends Namer(context) { + import NamerErrorGen._ + import typer.TyperErrorGen._ + + override def standardEnterSym(tree: Tree): Context = { + def dispatch() = { + var returnContext = context + tree match { + case DocDef(_, mdef) => + enterSym(mdef) + case tree @ Import(_, _) => + createAssignAndEnterSymbol(tree) + finishSymbol(tree) + returnContext = context.makeImportContext(tree) + case tree: MemberDef => + createAssignAndEnterSymbol(tree) + finishSymbol(tree) + case _ => + } + returnContext + } + tree.symbol match { + case NoSymbol => try dispatch() catch typeErrorHandler(tree, context) + case sym => enterExistingSym(sym, tree) + } + } + + protected def createAssignAndEnterSymbol(tree: Tree, mask: Long = -1L): Symbol = { + def coreCreateAssignAndEnterSymbol = { + val sym = tree match { + case PackageDef(pid, _) => createPackageSymbol(tree.pos, pid) // package symbols are entered elsewhere + case imp: Import => createImportSymbol(imp) // import symbols are dummies, no need to enter them anywhere + case mdef: MemberDef => enterInScope(setPrivateWithin(mdef, createMemberSymbol(mdef, mdef.name, mask))) + case _ => abort("Unexpected tree: " + tree) + } + if (isPastTyper) sym.name.toTermName match { + case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => + case _ => + tree match { + case _: DefDef => log("[+symbol] " + sym.debugLocationString) + case _ => + } + } + tree.symbol = sym + sym + } + def deriveSymbolFromSource(tree: Tree)(pf: PartialFunction[Tree, Symbol]): Symbol = { + val sym = pf(tree) + // can't do this in coreCreateAssignAndEnterSymbol + // because then we won't get to update sources for redefinitions + // this might be crucial when we have classfiles of the definition we're currently compiling + attachSource(sym, tree) + sym + } + deriveSymbolFromSource(tree) { + case cdef @ ClassDef(mods, name, _, _) => + val existing = context.scope.lookup(name) + val isRedefinition = ( + existing.isType + && existing.isTopLevel + && context.scope == existing.owner.info.decls + && ( + currentRun.canRedefine(existing) || + isExpanded(existing) + ) + ) + val clazz: Symbol = { + if (isRedefinition) { + updatePosFlags(existing, cdef.pos, mods.flags) + setPrivateWithin(cdef, existing) + clearRenamedCaseAccessors(existing) + cdef.symbol = existing + existing + } + else coreCreateAssignAndEnterSymbol setFlag inConstructorFlag + } + if (clazz.isClass && clazz.isTopLevel) { + if (clazz.sourceFile != null && clazz.sourceFile != contextFile) + devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile") + + clazz.associatedFile = contextFile + if (clazz.sourceFile != null) { + assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile) + currentRun.symSource(clazz) = clazz.sourceFile + } + registerTopLevelSym(clazz) + assert(clazz.name.toString.indexOf('(') < 0, clazz.name) // ) + } + clazz + case mdef @ ModuleDef(mods, name, _) => + var m: Symbol = context.scope lookupModule name + val moduleFlags = mods.flags | MODULE + // TODO: inCurrentScope(m) check that's present in vanilla Namer is omitted here + // this fixes SI-3772, but may break something else - I didn't have time to look into that + if (m.isModule && !m.hasPackageFlag && (currentRun.canRedefine(m) || m.isSynthetic || isExpanded(m))) { + // This code accounts for the way the package objects found in the classpath are opened up + // early by the completer of the package itself. If the `packageobjects` phase then finds + // the same package object in sources, we have to clean the slate and remove package object + // members from the package class. + // + // TODO SI-4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids + // opening up the package object on the classpath at all if one exists in source. + if (m.isPackageObject) { + val packageScope = m.enclosingPackageClass.rawInfo.decls + packageScope.filter(_.owner != m.enclosingPackageClass).toList.foreach(packageScope unlink _) + } + updatePosFlags(m, mdef.pos, moduleFlags) + setPrivateWithin(mdef, m) + m.moduleClass andAlso (setPrivateWithin(mdef, _)) + context.unit.synthetics -= m + mdef.symbol = m + } + else { + m = coreCreateAssignAndEnterSymbol + m.moduleClass setFlag moduleClassFlags(moduleFlags) + setPrivateWithin(mdef, m.moduleClass) + } + m.moduleClass setInfo namerOf(m).moduleClassTypeCompleter(mdef) + if (m.isTopLevel && !m.hasPackageFlag) { + m.moduleClass.associatedFile = contextFile + currentRun.symSource(m) = m.moduleClass.sourceFile + registerTopLevelSym(m) + } + m + case _ => + coreCreateAssignAndEnterSymbol + } + } + + // reimplemented to integrate with weakEnsureCompanionObject + override def standardEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { + val m = patchedCompanionSymbolOf(cdef.symbol, context) + + if (m != NoSymbol && currentRun.compiles(m) && !isWeak(m)) m + else unmarkWeak(enterSyntheticSym(atPos(cdef.pos.focus)(creator(cdef)))) + } + + /** Does the same as `ensureCompanionObject`, but also makes sure that the returned symbol destroys itself + * if noone ends up using it (either by calling `ensureCompanionObject` or by `finishSymbol`). + */ + // TODO: deduplicate + protected def weakEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { + val m = patchedCompanionSymbolOf(cdef.symbol, context) + if (m != NoSymbol && currentRun.compiles(m)) m + else { + val existsVal = context.tree.children.find { + case ValDef(_, term, _, _) if cdef.getterName == term => true + case _ => false + } + if (existsVal.isDefined) NoSymbol else { + val mdef = atPos(cdef.pos.focus)(creator(cdef)) + enterSym(mdef) + markWeak(mdef.symbol) + } + } + } + + protected def finishSymbol(tree: Tree): Unit = { + // annotations on parameters expand together with their owners + // therefore when we actually get to enter the parameters, we shouldn't even bother checking + // TODO: we don't handle primary ctors that might get spuriously marked as maybe expandees because of primary paramss + val aprioriNotExpandable = (context.tree, tree) match { + case (ClassDef(_, _, _, _), TypeDef(_, _, _, _)) => true + case (Template(_, _, _), ValDef(mods, _, _, _)) if mods.isParamAccessor => true + // vparamss of primary ctors are entered in `enterValueParams`, which doesn't call us + case (DefDef(_, _, _, _, _, _), TypeDef(_, _, _, _)) => true + // vparamss of normal methods are also entered in `enterValueParams`, which doesn't call us + case (TypeDef(_, _, _, _), TypeDef(_, _, _, _)) => true + case _ => false + } + + if (aprioriNotExpandable) finishSymbolNotExpandee(tree) + else { + treeInfo.getAnnotationZippers(tree) match { + case Nil => finishSymbolNotExpandee(tree) + case zippers => finishSymbolMaybeExpandee(tree, zippers) + } + + // this will only show companions defined above ourselves + // so when finishing `class C` in `{ class C; object C }` + // we won't see `object C` in `companion` - we will see NoSymbol + // that's the limitation of how namer works, but nevertheless it's not a problem for us + // because if finishing `class C` doesn't set up the things, finishing `object C` will + val sym = tree.symbol + val companion = patchedCompanionSymbolOf(sym, context) + + tree match { + // TODO: should we also support annotations on modules expanding companion classes? + case tree @ ClassDef(_, _, _, _) if isMaybeExpandee(sym) => + val wasExpanded = isExpanded(companion) + val m = weakEnsureCompanionObject(tree) + finishSymbolMaybeExpandeeCompanion(attachedSource(m), m, sym) + if (wasExpanded) markExpanded(m) // why is this necessary? see files/run/macro-annotation-recursive-class + // TODO: in general, this first call to FSMEC usually only brings grief + // can we get rid of it completely without having to sweep its results under the carpet? + case tree @ ModuleDef(_, _, _) if isMaybeExpandee(companion) => + finishSymbolMaybeExpandeeCompanion(tree, sym, companion) + case _ => + } + } + } + + protected def finishSymbolNotExpandee(tree: Tree): Unit = { + val sym = tree.symbol + def savingLock[T](op: => T): T = { + val wasLocked = sym.hasFlag(LOCKED) + val result = op + if (wasLocked) sym.setFlag(LOCKED) + result + } + + savingLock { + tree match { + case tree @ PackageDef(_, _) => + newNamer(context.make(tree, sym.moduleClass, sym.info.decls)).enterSyms(tree.stats) + case tree @ ClassDef(mods, name, tparams, impl) => + val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size + // not entering + tree.symbol setInfo completerOf(tree) + + if (mods.isCase) { + val m = ensureCompanionObject(tree, caseModuleDef) + m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree)) + } + val hasDefault = impl.body exists treeInfo.isConstructorWithDefault + if (hasDefault) { + val m = ensureCompanionObject(tree) + m.updateAttachment(new ConstructorDefaultsAttachment(tree, null)) + } + val owner = tree.symbol.owner + if (settings.warnPackageObjectClasses && owner.isPackageObjectClass && !mods.isImplicit) { + context.warning(tree.pos, + "it is not recommended to define classes/objects inside of package objects.\n" + + "If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead.", + WarningCategory.LintPackageObjectClasses) + } + // Suggested location only. + if (mods.isImplicit) { + if (primaryConstructorArity == 1) { + log("enter implicit wrapper "+tree+", owner = "+owner) + enterImplicitWrapper(tree) + } + else reporter.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter") + } + validateCompanionDefs(tree) + case tree @ ModuleDef(_, _, _) => + unmarkWeak(sym) + sym setInfo completerOf(tree) + validateCompanionDefs(tree) + case tree @ ValDef(_, _, _, _) => + val isScala = !context.unit.isJava + if (isScala) { + if (nme.isSetterName(tree.name)) ValOrVarWithSetterSuffixError(tree) + if (tree.mods.isPrivateLocal && tree.mods.isCaseAccessor) PrivateThisCaseClassParameterError(tree) + } + if (isScala && deriveAccessors(tree)) { + // when refactoring enterSym, I needed to decouple symbol creation and various syntheses + // so that annotation expansion mechanism could be installed in-between of those + // it went well except for one thing - ValDef symbol creation is very closely tied to syntheses + // because depending on whether the ValDef is a val, var or a lazy val, different symbols need to be generated + // since I didn't have much time (and, back then, much understanding), I just decided to create dummies + // that live only to stand in as potential annottees and get destroyed if any sort of synthesis is necessary + // TODO: this is obviously ugly and needs to be fixed + context.scope.unlink(tree.symbol) + tree.symbol setInfo NoType + enterGetterSetter(tree) + } else { + tree.symbol setInfo completerOf(tree) + } + if (isEnumConstant(tree)) + tree.symbol setInfo ConstantType(Constant(tree.symbol)) + case tree @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => + if (mexists(tree.vparamss)(_.mods.hasDefault)) + enterDefaultGetters(tree.symbol, tree, tree.vparamss, tree.tparams) + sym setInfo completerOf(tree) + case tree @ DefDef(mods, name, tparams, _, _, _) => + if (mexists(tree.vparamss)(_.mods.hasDefault)) + enterDefaultGetters(tree.symbol, tree, tree.vparamss, tree.tparams) + + val completer = + if (sym hasFlag SYNTHETIC) { + if (name == nme.copy) copyMethodCompleter(tree) + else if (sym hasFlag CASE) applyUnapplyMethodCompleter(tree, context) + else completerOf(tree) + } else completerOf(tree) + sym setInfo completer + case tree @ TypeDef(_, _, _, _) => + sym setInfo completerOf(tree) + case tree @ Import(_, _) => + namerOf(tree.symbol) importTypeCompleter tree + case x => throw new MatchError(x) + } + } + } + + // we have several occasions when so called "maybe expandees" need special care + // ("maybe expandees" = annotated members, which might or might not be annotated with a macro expansion) + // 1) (when called by Symbol.info) trigger the MaybeExpandeeCompleter and then immediately recur into a fresh completer + // if we don't recur, we're doomed to fail, because there are only so many retries that Symbol.info can tolerate + // and this retry threshold is already fine-tuned to the current chain of completers, which makes MaybeExpandeeCompleter one too many + // 2) (when called by expandMacroAnnotations from templateSig or typedBlock) in this situation noone needs us to fully complete + // the underlying symbol. just making sure that we don't have any annotations to expand is the least and the most we should do. + // if we're overeager like in mode #1, we might easily induce cyclic reference errors (like in tests/run/macro-annotations-packageobject) + // 3) (when called by Symbol.typeParams) this one is different from Symbol.info, because it calls load, not complete + // from what I understand, this separation exists because it takes much less effort to figure out tparams rather than the full signature + // for example, vanilla completers assigned in namer are created with typeParams already known + // you can see for yourself in the distinction between monoTypeCompleter and PolyTypeCompleter + // therefore, just as with Symbol.info we need to trigger the MaybeExpandeeCompleter + // and then not forget to recur into the fresh completer's load, again because of the retry limit baked into Symbol.typeParams + // 4) TODO: (when called by Symbol.unsafeTypeParams) figure out what's the deal with them + // existence of this method profoundly scares me, even though I never had a problem with it + abstract class MaybeExpandeeCompleter(val tree: Tree) extends LockingTypeCompleter with FlagAssigningCompleter { + def destroy(syms: Symbol*) = { + for (sym <- syms) { + context.unit.synthetics -= sym + context.scope.unlink(sym) + sym setInfo NoType + sym.moduleClass setInfo NoType + sym.removeAttachment[SymbolCompleterAttachment] + } + } + + def complete(sym: Symbol, onlyExpansions: Boolean) = { + lockedCount += 1 + try completeImpl(sym, onlyExpansions) + finally lockedCount -= 1 + } + + override def completeImpl(sym: Symbol): Unit = { + completeImpl(sym, onlyExpansions = false) + } + + def completeImpl(sym: Symbol, onlyExpansions: Boolean): Unit = { + val thisCompleter = sym.rawInfo + maybeExpand() + assert(sym.rawInfo != thisCompleter, s"${sym.accurateKindString} ${sym.rawname}#${sym.id} with $kind") + if (onlyExpansions) sym.rawInfo.completeOnlyExpansions(sym) + else sym.rawInfo.complete(sym) + } + + override def load(sym: Symbol): Unit = { + this.completeOnlyExpansions(sym) + sym.rawInfo.load(sym) + } + + def maybeExpand(): Unit // TODO: should I also pass `sym` here? + } + + abstract class MaybeExpandeeCompanionCompleter(tree: Tree) extends MaybeExpandeeCompleter(tree) + + private implicit class RichType(tpe: Type) { + def completeOnlyExpansions(sym: Symbol) = tpe match { + case mec: MacroAnnotationNamer#MaybeExpandeeCompleter => mec.complete(sym, onlyExpansions = true) + case _ => + } + } + + protected def finishSymbolMaybeExpandee(tree: Tree, annZippers: List[treeInfo.AnnotationZipper]): Unit = { + val sym = tree.symbol + unmarkWeak(sym) + markMaybeExpandee(sym) + sym.setInfo(new MaybeExpandeeCompleter(tree) { + override def kind = s"maybeExpandeeCompleter for ${sym.accurateKindString} ${sym.rawname}#${sym.id}" + override def maybeExpand(): Unit = { + val companion = if (this.tree.isInstanceOf[ClassDef]) patchedCompanionSymbolOf(sym, context) else NoSymbol + + def maybeExpand(annotation: Tree, annottee: Tree, maybeExpandee: Tree): Option[List[Tree]] = + if (context.macrosEnabled) { // TODO: when is this bit flipped -- can we pull this check out farther? + val treeInfo.Applied(Select(New(tpt), nme.CONSTRUCTOR), _, _) = annotation: @unchecked + val mann = probeMacroAnnotation(context, tpt) + if (mann.isClass && mann.hasFlag(MACRO)) { + assert(!currentRun.compiles(mann), mann) + val annm = prepareAnnotationMacro(annotation, mann, sym, annottee, maybeExpandee) + expandAnnotationMacro(this.tree, annm) + // if we encounter an error, we just return None, so that other macro annotations can proceed + // this is unlike macroExpand1 when any error in an expandee blocks expansions + // there it's necessary in order not to exacerbate typer errors + // but when manning we aren't in typer, so we don't have to do as macroExpand1 does + // and also there's a good reason not to ban other macro annotations + // if we do ban them, we might get spurious compilation errors from non-existent members that could've been generated + } else None + } else None + + annZippers.iterator.flatMap(annz => maybeExpand(annz.annotation, annz.annottee, annz.owner)).nextOption() match { + case Some(expanded) => + // TODO: The workaround employed in https://github.com/scalamacros/paradise/issues/19 + // no longer works because of the REPL refactoring in 2.13.0-M2. + // See https://github.com/scalamacros/paradise/issues/102 for discussion. + //tellReplAboutExpansion(sym, companion, expanded) + + markExpanded(sym) + markExpanded(companion) + // expansion brings new trees, probably wildly different from current ones. what do we do? + // the most robust thing would be to destroy ourselves (us and our companion), but we can't do that at top level + // therefore at top level we don't destroy, but rather rely on enterSyms to redefine ourselves + // however when nested we go all out + // TODO: unlinking distorts the order of symbols in scope + // note however that trees (calculated by expandMacroAnnotations) will be generated in correct order + if (!sym.isTopLevel) destroy(sym, companion) + enterSyms(expanded) // TODO: we can't reliably expand into imports, because they won't be accounted by definitions below us + case None => + markNotExpandable(sym) + finishSymbolNotExpandee(this.tree) + } + + // take care of the companion if it's no longer needed + // we can't do this in companion's completer, because that one isn't guaranteed to ever be called + val expandedWithoutCompanion = isExpanded(sym) && attachedExpansion(companion).map(_.isEmpty).getOrElse(false) + val companionHasReemerged = expandedWithoutCompanion && sym.isTopLevel && !isWeak(companion) + val notExpandableWeakCompanion = isNotExpandable(sym) && isWeak(companion) + if ((expandedWithoutCompanion && !companionHasReemerged) || notExpandableWeakCompanion) destroy(companion) + } + }) + } + + // how do we make sure that this completer falls back to the vanilla completer if the companion ends up not expanding? + // well, if a module symbol has a maybeExpandee companion then the last two calls to its setInfo will be one of: + // * non-FSMEC completer for the module and then FSMEC => fallback should call native completer + // * FSMEC from enterSyntheticSym for a phantom module and then FSMEC again => fallback should do nothing + // now it's easy to see that both are correctly handled here + protected def finishSymbolMaybeExpandeeCompanion(tree: Tree, m: Symbol, c: Symbol): Unit = { + val worthBackingUp = !m.rawInfo.isInstanceOf[MacroAnnotationNamer#MaybeExpandeeCompanionCompleter] + if (worthBackingUp) backupCompleter(m) + markMaybeExpandee(m) + m.setInfo(new MaybeExpandeeCompanionCompleter(tree) { + override def kind = s"maybeExpandeeCompanionCompleter for ${m.rawname}#${m.id}" + override def maybeExpand(): Unit = { + c.rawInfo.completeOnlyExpansions(c) + // this is a very tricky part of annotation expansion + // because now, after deferring to our companion's judgement for a while, we have to ourselves figure out: + // 1) whether we should start completing on our own + // 2) if we should do it on our own, then how exactly + // 1 is easy. If our companion's expansion has destroyed us (or hasn't materialized us if we were weak) + // then we no longer care and we silently go into oblivion. Otherwise, we should take care of ourselves. + // 2 is hard, because we have two distinct situations to handle: + // 2a) isExpanded(c) is true, which means that our companion has just expanded + // 2b) isNotExpandable(c) is true, which means that our companion has just been deemed unexpandable + // 2a is simple, because it means that we don't have to do anything, as we've either got destroyed + // or we've got entered in `enterSyms(expanded)` that follows expansions. + // 2b is tricky, because it means that we need to fall back to the most recent non-FSMEC completer. + // The hardest part here is that we can't just get to the completer that was preceding `this` as m.rawInfo + // (otherwise we run into issue #9, for more details see history of this change). Instead we need to track m's type history. + val destroyedDuringExpansion = m.rawInfo == NoType + val failedToMaterializeDuringExpansion = isWeak(m) + val aliveAndKicking = !destroyedDuringExpansion && !failedToMaterializeDuringExpansion + if (aliveAndKicking && isNotExpandable(c)) { + if (worthBackingUp) restoreCompleter(m) + val maybeExpandee = m.rawInfo.isInstanceOf[MacroAnnotationNamer#MaybeExpandeeCompleter] + if (maybeExpandee) markMaybeExpandee(m) else markNotExpandable(m) + } + } + }) + } + + // mostly copy/pasted and adapted from typedIdent + // adaptations = ignore error reporting + ignore java + don't force symbols being compiled + // the last requirement leads to us being imprecise in some situation wrt normal name resolution + // but that's okay, since it's the only way for manns to remain modular and not to cripple normal annotations + protected def probeMacroAnnotation(context: Context, tpt: Tree): Symbol = { + // SAFE HELPERS (can't cause unnecessary completions) + def reallyExists(sym: Symbol) = { if (newTyper(context).isStale(sym)) sym.setInfo(NoType); exists(sym) } + def qualifies(sym: Symbol): Boolean = sym.hasRawInfo && reallyExists(sym) + + // UNSAFE HELPERS (need to guard against unnecessary completions) + def canDefineMann(sym: Symbol): Boolean = !currentRun.compiles(sym) + def exists(sym: Symbol) = if (canDefineMann(sym)) sym.exists else false + def importedSymbol(imp: ImportInfo, name: Name): Symbol = { // TODO: be more precise in reproducing importSig and importedSymbol + val impContext = context.enclosingContextChain.find(_.tree.symbol == imp.tree.symbol).get + val sym = imp.tree.cached("importQualProbe", probeMacroAnnotation(impContext.outer, imp.tree.expr)) + val pre = if (reallyExists(sym) && isAccessible(impContext, sym)) sym.tpe else NoType + var result: Symbol = NoSymbol + var renamed = false + var selectors = imp.tree.selectors + def current = selectors.head + while (selectors != Nil && result == NoSymbol) { + if (current.introduces(name)) + result = nonLocalMember(pre, if (name.isTypeName) current.name.toTypeName else current.name) + else if (selectors.head.name == name.toTermName) + renamed = true + else if (current.isWildcard && !renamed) + result = nonLocalMember(pre, name) + if (result == NoSymbol) + selectors = selectors.tail + } + if (settings.warnUnusedImport && selectors.nonEmpty && result != NoSymbol && imp.pos != NoPosition) { + val m_recordUsage = imp.getClass.getDeclaredMethods().find(_.getName == "recordUsage").get + m_recordUsage.setAccessible(true) + m_recordUsage.invoke(imp, current, result) + } + if (definitions isImportable result) result + else NoSymbol + } + // def isAccessible(cx: Context, sym: Symbol) = if (canDefineMann(cx.owner)) cx.isAccessible(sym, cx.prefix, superAccess = false) else false + def isAccessible(cx: Context, sym: Symbol) = true // TODO: sorry, it's 2am, and I can't figure this out + def member(tpe: Type, name: Name) = if (canDefineMann(tpe.typeSymbol)) tpe.member(name) else NoSymbol + def nonLocalMember(tpe: Type, name: Name) = if (canDefineMann(tpe.typeSymbol)) tpe.nonLocalMember(name) else NoSymbol + + if (tpt.hasSymbolField && tpt.symbol != NoSymbol) tpt.symbol + else tpt match { + case Ident(name) => + + // STEP 1: RESOLVE THE NAME IN SCOPE + var defSym: Symbol = NoSymbol + var defEntry: ScopeEntry = null + var cx = context + while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { + defEntry = cx.scope.lookupEntry(name) + if ((defEntry ne null) && qualifies(defEntry.sym)) defSym = defEntry.sym + else { + cx = cx.enclClass + val foundSym = member(cx.prefix, name) filter qualifies + defSym = foundSym filter (isAccessible(cx, _)) + if (defSym == NoSymbol) cx = cx.outer + } + } + if (defSym == NoSymbol && settings.exposeEmptyPackage.value) { + defSym = rootMirror.EmptyPackageClass.info member name + } + + // STEP 2: RESOLVE THE NAME IN IMPORTS + val symDepth = if (defEntry eq null) cx.depth + else cx.depth - ({ + if (cx.scope ne null) cx.scope.nestingLevel + else 0 // TODO: fix this in toolboxes, not hack around here + } - defEntry.owner.nestingLevel) + var impSym: Symbol = NoSymbol + var imports = context.imports + while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) { + impSym = importedSymbol(imports.head, name) + if (!exists(impSym)) imports = imports.tail + } + + // FIXME: repl hack. somehow imports that come from repl are doubled + // e.g. after `import $line7.$read.$iw.$iw.foo` you'll have another identical `import $line7.$read.$iw.$iw.foo` + // this is a crude workaround for the issue + imports match { + case fst :: snd :: _ if exists(impSym) && fst == snd => imports = imports.tail + case _ => // do nothing + } + + // STEP 3: TRY TO RESOLVE AMBIGUITIES + if (exists(defSym) && exists(impSym)) { + if (defSym.isDefinedInPackage && + (!currentRun.compiles(defSym) || + context.unit.exists && defSym.sourceFile != context.unit.source.file)) + defSym = NoSymbol + else if (impSym.isError || impSym.name == nme.CONSTRUCTOR) + impSym = NoSymbol + } + if (!exists(defSym) && exists(impSym)) { + var impSym1: Symbol = NoSymbol + var imports1 = imports.tail + while (!imports1.isEmpty && + (!imports.head.isExplicitImport(name) || + imports1.head.depth == imports.head.depth)) { + impSym1 = importedSymbol(imports1.head, name) + if (reallyExists(impSym1)) { + if (imports1.head.isExplicitImport(name)) { + if (imports.head.isExplicitImport(name) || + imports1.head.depth != imports.head.depth) return NoSymbol // was possibly fixable ambiguous import + impSym = impSym1 + imports = imports1 + } else if (!imports.head.isExplicitImport(name) && + imports1.head.depth == imports.head.depth) return NoSymbol // was possibly fixable ambiguous import + } + imports1 = imports1.tail + } + } + + // STEP 4: DEAL WITH WHAT WE HAVE + if (exists(defSym) && !exists(impSym)) defSym + else if (exists(defSym) && exists(impSym)) NoSymbol // was ambiguous import + else if (!exists(defSym) && exists(impSym)) impSym + else { + val lastTry = rootMirror.missingHook(rootMirror.RootClass, name) + if (lastTry != NoSymbol && isAccessible(context, lastTry)) lastTry + else NoSymbol + } + case Select(qualtree, name) => // TODO: be more precise wrt typedSelect + val qual = probeMacroAnnotation(context, qualtree) + val sym = if (canDefineMann(qual)) member(qual.tpe, name) else NoSymbol + if (reallyExists(sym) && isAccessible(context, sym)) sym else NoSymbol + case AppliedTypeTree(tpt, _) => // https://github.com/scalamacros/paradise/issues/2: expand manns with type parameters + probeMacroAnnotation(context, tpt) + case _ => + NoSymbol + } + } + + // see https://github.com/scalamacros/paradise/issues/7 + // also see https://github.com/scalamacros/paradise/issues/64 + protected def patchedCompanionSymbolOf(original: Symbol, ctx: Context): Symbol = if (original == NoSymbol) NoSymbol else { + val owner = original.owner + // SI-7264 Force the info of owners from previous compilation runs. + // Doing this generally would trigger cycles; that's what we also + // use the lower-level scan through the current Context as a fall back. + if (!currentRun.compiles(owner) && + // NOTE: the following three lines of code are added to work around #7 + !owner.enclosingTopLevelClass.isRefinementClass && + !owner.ownerChain.exists(_.isLocalDummy) && + owner.ownerChain.forall(!currentRun.compiles(_))) { + owner.initialize + } + original.companionSymbol orElse { + implicit class PatchedContext(ctx: Context) { + trait PatchedLookupResult { def suchThat(criterion: Symbol => Boolean): Symbol } + def patchedLookup(name: Name, expectedOwner: Symbol) = new PatchedLookupResult { + override def suchThat(criterion: Symbol => Boolean): Symbol = { + var res: Symbol = NoSymbol + var ctx = PatchedContext.this.ctx + while (res == NoSymbol && ctx.outer != ctx) { + // NOTE: original implementation says `val s = ctx.scope lookup name` + // but we can't use it, because Scope.lookup returns wrong results when the lookup is ambiguous + // and that triggers https://github.com/scalamacros/paradise/issues/64 + val s = { + val lookupResult = ctx.scope.lookupAll(name).filter(criterion).toList + lookupResult match { + case Nil => NoSymbol + case List(unique) => unique + case _ => abort(s"unexpected multiple results for a companion symbol lookup for $original#{$original.id}") + } + } + if (s != NoSymbol && s.owner == expectedOwner) + res = s + else + ctx = ctx.outer + } + res + } + } + } + ctx.patchedLookup(original.name.companionName, owner).suchThat(sym => + (original.isTerm || sym.hasModuleFlag) && + (sym isCoDefinedWith original) + ) + } + } + + protected def prepareAnnotationMacro(ann: Tree, mann: Symbol, sym: Symbol, annottee: Tree, expandee: Tree): Tree = { + val companion = if (expandee.isInstanceOf[ClassDef]) patchedCompanionSymbolOf(sym, context) else NoSymbol + val companionSource = if (!isWeak(companion)) attachedSource(companion) else EmptyTree + val expandees = List(annottee, expandee, companionSource).distinct.filterNot(_.isEmpty) + val safeExpandees = expandees.map(expandee => duplicateAndKeepPositions(expandee)).map(_.setSymbol(NoSymbol)) + val prefix = Select(ann, nme.macroTransform) setSymbol mann.info.member(nme.macroTransform) setPos ann.pos + Apply(prefix, safeExpandees) setPos ann.pos + } + + protected def expandAnnotationMacro(original: Tree, expandee: Tree): Option[List[Tree]] = { + val sym = original.symbol + val companion = if (original.isInstanceOf[ClassDef]) patchedCompanionSymbolOf(sym, context) else NoSymbol + val wasWeak = isWeak(companion) + val wasTransient = companion == NoSymbol || companion.isSynthetic + def rollThroughImports(context: Context): Context = { + if (context.isInstanceOf[ImportContext]) rollThroughImports(context.outer) + else context + } + val typer = { + // expanding at top level => allow the macro to see everything + if (sym.isTopLevel) newTyper(context) + // expanding at template level => only allow to see outside of the enclosing class + // we have to skip two contexts: + // 1) the Template context that hosts members + // 2) the ImplDef context that hosts type params (and just them?) + // upd. actually, i don't think we should skip the second context + // that doesn't buy us absolutely anything wrt robustness + else if (sym.owner.isClass) newTyper(rollThroughImports(context).outer) + // expanding at block level => only allow to see outside of the block + else newTyper(rollThroughImports(context).outer) + } + def onlyIfExpansionAllowed[T](expand: => Option[T]): Option[T] = { + if (settings.Ymacroexpand.value == settings.MacroExpand.None) None + else { + val oldYmacroexpand = settings.Ymacroexpand.value + try { settings.Ymacroexpand.value = settings.MacroExpand.Normal; expand } + catch { case ex: Exception => settings.Ymacroexpand.value = oldYmacroexpand; throw ex } + } + } + def expand(): Option[Tree] = (new DefMacroExpander(typer, expandee, NOmode, WildcardType) { + override def onSuccess(expanded: Tree) = expanded + })(expandee) match { + case tree if tree.isErroneous => None + case tree => Some(tree) + } + def extract(expanded: Tree): List[Tree] = expanded match { + case Block(stats, Literal(Constant(()))) => stats // ugh + case tree => List(tree) + } + def validate(expanded: List[Tree]): Option[List[Tree]] = { + if (sym.owner.isPackageClass) { + original match { + case ClassDef(_, originalName, _, _) => + expanded match { + case (expandedClass @ ClassDef(_, className, _, _)) :: Nil + if className == originalName && wasWeak => + attachExpansion(sym, List(expandedClass)) + attachExpansion(companion, Nil) + Some(expanded) + case (expandedCompanion @ ModuleDef(_, moduleName, _)) :: (expandedClass @ ClassDef(_, className, _, _)) :: Nil + if className == originalName && moduleName == originalName.toTermName => + attachExpansion(sym, if (wasWeak) List(expandedClass, expandedCompanion) else List(expandedClass)) + attachExpansion(companion, List(expandedCompanion)) + Some(expanded) + case (expandedClass @ ClassDef(_, className, _, _)) :: (expandedCompanion @ ModuleDef(_, moduleName, _)) :: Nil + if className == originalName && moduleName == originalName.toTermName => + attachExpansion(sym, if (wasWeak) List(expandedClass, expandedCompanion) else List(expandedClass)) + attachExpansion(companion, List(expandedCompanion)) + Some(expanded) + case _ => + if (wasWeak) MacroAnnotationTopLevelClassWithoutCompanionBadExpansion(expandee) + else MacroAnnotationTopLevelClassWithCompanionBadExpansion(expandee) + None + } + case ModuleDef(_, originalName, _) => + expanded match { + case (expandedModule @ ModuleDef(_, expandedName, _)) :: Nil if expandedName == originalName => + attachExpansion(sym, List(expandedModule)) + Some(expanded) + case _ => + MacroAnnotationTopLevelModuleBadExpansion(expandee) + None + } + case x => throw new MatchError(x) + } + } else { + if (wasTransient) { + attachExpansion(sym, expanded) + attachExpansion(companion, Nil) + } else { + def companionRelated(tree: Tree) = tree.isInstanceOf[ModuleDef] && tree.asInstanceOf[ModuleDef].name == companion.name + val (forCompanion, forSym) = expanded.partition(companionRelated) + attachExpansion(sym, forSym) + attachExpansion(companion, forCompanion) + } + Some(expanded) + } + } + for { + lowlevelExpansion <- onlyIfExpansionAllowed(expand()) + expansion <- Some(extract(lowlevelExpansion)) + duplicated = expansion.map(duplicateAndKeepPositions) + validatedExpansion <- validate(duplicated) + } yield validatedExpansion + } + + override def expandMacroAnnotations(stats: List[Tree]): List[Tree] = { + def mightNeedTransform(stat: Tree): Boolean = stat match { + case stat: DocDef => mightNeedTransform(stat.definition) + case stat: MemberDef => isMaybeExpandee(stat.symbol) || hasAttachedExpansion(stat.symbol) + case _ => false + } + def rewrapAfterTransform(stat: Tree, transformed: List[Tree]): List[Tree] = (stat, transformed) match { + case (stat @ DocDef(comment, _), List(transformed: MemberDef)) => List(treeCopy.DocDef(stat, comment, transformed)) + case (DocDef(_, _), List(transformed: DocDef)) => List(transformed) + case (_, Nil | List(_: MemberDef)) => transformed + case (_, unexpected) => unexpected // NOTE: who knows how people are already using macro annotations, so it's scary to fail here + } + if (phase.id > currentRun.typerPhase.id || !stats.exists(mightNeedTransform)) stats + else stats.flatMap { stat => + if (mightNeedTransform(stat)) { + val sym = stat.symbol + assert(sym != NoSymbol, (sym, stat)) + if (isMaybeExpandee(sym)) { + def assert(what: Boolean) = Predef.assert(what, s"${sym.accurateKindString} ${sym.rawname}#${sym.id} with ${sym.rawInfo.kind}") + assert(sym.rawInfo.isInstanceOf[MacroAnnotationNamer#MaybeExpandeeCompleter]) + sym.rawInfo.completeOnlyExpansions(sym) + assert(!sym.rawInfo.isInstanceOf[MacroAnnotationNamer#MaybeExpandeeCompleter]) + } + val derivedTrees = attachedExpansion(sym).getOrElse(List(stat)) + val (me, others) = derivedTrees.partition(_.symbol == sym) + rewrapAfterTransform(stat, me) ++ expandMacroAnnotations(others) + } else { + List(stat) + } + } + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index d419e724324b..d58ac096241c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,17 +14,19 @@ package scala.tools.nsc package typechecker import java.lang.Math.min + import symtab.Flags._ -import scala.reflect.internal.util.ScalaClassLoader -import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.Statistics +import scala.annotation._ import scala.reflect.internal.TypesStats -import scala.reflect.macros.util._ -import scala.util.control.ControlThrowable import scala.reflect.internal.util.ListOfNil -import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes} +import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.internal.util.Statistics import scala.reflect.macros.compiler.DefaultMacroCompiler +import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes} +import scala.reflect.macros.util._ +import scala.reflect.runtime.ReflectionUtils import scala.tools.reflect.FastTrack +import scala.util.control.ControlThrowable import scala.util.control.NonFatal import Fingerprint._ @@ -64,42 +66,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - import java.net.URL - - val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { - for { - file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) - af <- Option(settings.pathFactory.getDirectory(file)) - } yield af.file.toURI.toURL - } else global.classPath.asURLs - def newLoader: () => ScalaClassLoader.URLClassLoader = () => { - analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) - } - - val policy = settings.YcacheMacroClassLoader.value - val cache = Macros.macroClassLoadersCache - val disableCache = policy == settings.CachePolicy.None.name - val checkStamps = policy == settings.CachePolicy.LastModified.name - cache.checkCacheability(classpath, checkStamps, disableCache) match { - case Left(msg) => - analyzer.macroLogVerbose(s"macro classloader: $msg.") - val loader = newLoader() - closeableRegistry.registerClosable(loader) - loader - case Right(paths) => - cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) - } - } - /** `MacroImplBinding` and its companion module are responsible for * serialization/deserialization of macro def -> impl bindings. * @@ -163,6 +129,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { * * We will have the following annotation added on the macro definition `foo`: * + * {{{ * @scala.reflect.macros.internal.macroImpl( * `macro`( * "macroEngine" = , @@ -171,31 +138,32 @@ trait Macros extends MacroRuntimes with Traces with Helpers { * "signature" = List(Other), * "methodName" = "impl", * "className" = "Macros$")) + * }}} */ def macroEngine = "v7.0 (implemented in Scala 2.11.0-M8)" object MacroImplBinding { - def pickleAtom(obj: Any): Tree = - obj match { - case list: List[_] => Apply(Ident(ListModule), list map pickleAtom) - case s: String => Literal(Constant(s)) - case d: Double => Literal(Constant(d)) - case b: Boolean => Literal(Constant(b)) - case f: Fingerprint => Literal(Constant(f.value)) - } + def pickleAtom(obj: Any): Tree = obj match { + case list: List[_] => Apply(Ident(ListModule), list map pickleAtom) + case s: String => Literal(Constant(s)) + case d: Double => Literal(Constant(d)) + case b: Boolean => Literal(Constant(b)) + case f: Fingerprint => Literal(Constant(f.value)) + case x => throw new MatchError(x) + } - def unpickleAtom(tree: Tree): Any = - tree match { - case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom - case Literal(Constant(s: String)) => s - case Literal(Constant(d: Double)) => d - case Literal(Constant(b: Boolean)) => b - case Literal(Constant(i: Int)) => Fingerprint(i) - } + def unpickleAtom(tree: Tree): Any = tree match { + case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom + case Literal(Constant(s: String)) => s + case Literal(Constant(d: Double)) => d + case Literal(Constant(b: Boolean)) => b + case Literal(Constant(i: Int)) => Fingerprint(i) + case x => throw new MatchError(x) + } - def pickle(macroImplRef: Tree): Tree = { + def extractMacroBindingImpl(macroImplRef: Tree): MacroImplBinding = { val runDefinitions = currentRun.runDefinitions import runDefinitions._ - val MacroImplReference(isBundle, isBlackbox, owner, macroImpl, targs) = macroImplRef + val MacroImplReference(isBundle, isBlackbox, owner, macroImpl, targs) = (macroImplRef: @unchecked) // todo. refactor when fixing scala/bug#5498 def className: String = { @@ -219,16 +187,24 @@ trait Macros extends MacroRuntimes with Traces with Helpers { case _ => Other } - val transformed = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => tparam) + val transformed = transformTypeTagEvidenceParams(macroImplRef, (_, tparam) => tparam) mmap(transformed)(p => if (p.isTerm) fingerprint(p.info) else Tagged(p.paramPos)) } + MacroImplBinding(isBundle, isBlackbox, className, macroImpl.name.toString, signature, targs map (_.duplicate)) + } + + def pickle(macroImplRef: Tree): Tree = { + + val MacroImplBinding(isBundle, isBlackbox, className, methodName, signature, targs) = + extractMacroBindingImpl(macroImplRef) + val payload = List[(String, Any)]( "macroEngine" -> macroEngine, "isBundle" -> isBundle, "isBlackbox" -> isBlackbox, "className" -> className, - "methodName" -> macroImpl.name.toString, + "methodName" -> methodName, "signature" -> signature ) @@ -238,19 +214,19 @@ trait Macros extends MacroRuntimes with Traces with Helpers { // I just named it "macro", because it's macro-related, but I could as well name it "foobar" val nucleus = Ident(newTermName("macro")) val wrapped = Apply(nucleus, payload map { case (k, v) => Assign(pickleAtom(k), pickleAtom(v)) }) - val pickle = gen.mkTypeApply(wrapped, targs map (_.duplicate)) + val pickle = gen.mkTypeApply(wrapped, targs) // assign NoType to all freshly created AST nodes // otherwise pickler will choke on tree.tpe being null // there's another gotcha // if you don't assign a ConstantType to a constant // then pickling will crash - new Transformer { + new AstTransformer { override def transform(tree: Tree) = { tree match { - case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const) - case _ if tree.tpe == null => tree setType NoType - case _ => ; + case Literal(const @ Constant(_)) if tree.tpe == null => tree.setType(ConstantType(const)) + case _ if tree.tpe == null => tree.setType(NoType) + case _ => } super.transform(tree) } @@ -263,8 +239,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { case TypeApply(wrapped, targs) => (wrapped, targs) case wrapped => (wrapped, Nil) } - val Apply(_, pickledPayload) = wrapped - val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap + val Apply(_, pickledPayload) = wrapped: @unchecked + val payload = pickledPayload.map { case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) case x => throw new MatchError(x) }.toMap // TODO: refactor error handling: fail always throws a TypeError, // and uses global state (analyzer.lastTreeToTyper) to determine the position for the error @@ -315,10 +291,14 @@ trait Macros extends MacroRuntimes with Traces with Helpers { macroImplBindingCache.getOrElseUpdate(macroDef, macroDef.getAnnotation(MacroImplAnnotation) collect { case AnnotationInfo(_, List(pickle), _) => MacroImplBinding.unpickle(pickle) + } orElse { + macroDef.getAnnotation(MacroImplLocationAnnotation) collect { + case AnnotationInfo(_, List(macroImplRef), _) => MacroImplBinding.extractMacroBindingImpl(macroImplRef) + } } ) } - private val macroImplBindingCache = perRunCaches.newAnyRefMap[Symbol, Option[MacroImplBinding]]() + private val macroImplBindingCache = perRunCaches.newMap[Symbol, Option[MacroImplBinding]]() def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectCore(expandee).symbol) def isBlackbox(macroDef: Symbol): Boolean = pluginsIsBlackbox(macroDef) @@ -332,49 +312,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { fastTrackBoxity orElse bindingBoxity getOrElse false } - def computeMacroDefTypeFromMacroImplRef(macroDdef: DefDef, macroImplRef: Tree): Type = { - macroImplRef match { - case MacroImplReference(_, _, _, macroImpl, targs) => - // Step I. Transform c.Expr[T] to T and everything else to Any - var runtimeType = decreaseMetalevel(macroImpl.info.finalResultType) - - // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body - runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, targs map (_.tpe)) - - // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY - def unsigma(tpe: Type): Type = - transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) match { - case (implCtxParam :: Nil) :: implParamss => - val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap - object UnsigmaTypeMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(pre, sym, args) => - val pre1 = pre match { - case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue => - ThisType(macroDdef.symbol.owner) - case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue => - implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre - case _ => - pre - } - val args1 = args map mapOver - TypeRef(pre1, sym, args1) - case _ => - mapOver(tp) - } - } - - UnsigmaTypeMap(tpe) - case _ => - tpe - } - - unsigma(runtimeType) - case _ => - ErrorType - } - } - /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method or a top-level macro bundle, * and that that method is signature-wise compatible with the given macro definition. * @@ -411,7 +348,18 @@ trait Macros extends MacroRuntimes with Traces with Helpers { val macroDdef: self.global.DefDef = macroDdef1 } with DefaultMacroCompiler val macroImplRef = macroCompiler.resolveMacroImpl - if (macroImplRef.isEmpty) fail() else success(macroImplRef) + if (macroImplRef.isEmpty) fail() else { + def hasTypeTag = { + val marker = NoSymbol.newErrorValue(TermName("restricted")) + val xformed = transformTypeTagEvidenceParams(macroImplRef, (_, _) => marker) + xformed.nonEmpty && xformed.last.contains(marker) + } + if (macroDdef.name == nme.macroTransform && hasTypeTag) { + typer.context.error(macroDdef.pos, "implementation restriction: macro annotation impls cannot have typetag context bounds " + + "(consider taking apart c.macroApplication and manually calling c.typecheck on the type arguments)") + fail() + } else success(macroImplRef) + } } } } @@ -452,8 +400,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { import typer.TyperErrorGen._ val isNullaryArgsEmptyParams = argss.isEmpty && paramss == ListOfNil - if (paramss.length < argss.length) MacroTooManyArgumentListsError(expandee) - if (paramss.length > argss.length && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee) + if (paramss.sizeCompare(argss) < 0) MacroTooManyArgumentListsError(expandee) + if (paramss.sizeCompare(argss) > 0 && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee) val macroImplArgs: List[Any] = if (fastTrack contains macroDef) { @@ -472,10 +420,10 @@ trait Macros extends MacroRuntimes with Traces with Helpers { val trees = map3(argss, paramss, signature)((args, defParams, implParams) => { val isVarargs = isVarArgsList(defParams) if (isVarargs) { - if (defParams.length > args.length + 1) MacroTooFewArgumentsError(expandee) + if (defParams.lengthIs > (args.length + 1)) MacroTooFewArgumentsError(expandee) } else { - if (defParams.length < args.length) MacroTooManyArgumentsError(expandee) - if (defParams.length > args.length) MacroTooFewArgumentsError(expandee) + if (defParams.sizeCompare(args) < 0) MacroTooManyArgumentsError(expandee) + if (defParams.sizeCompare(args) > 0) MacroTooFewArgumentsError(expandee) } val wrappedArgs = mapWithIndex(args)((arg, j) => { @@ -602,7 +550,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def onSuppressed(expandee: Tree): Tree = expandee def onDelayed(expanded: Tree): Tree = expanded def onSkipped(expanded: Tree): Tree = expanded - def onFailure(expanded: Tree): Tree = { typer.infer.setError(expandee); expandee } + def onFailure(@unused expanded: Tree): Tree = { typer.infer.setError(expandee); expandee } def apply(desugared: Tree): Tree = { if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee) @@ -656,8 +604,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } expanded match { case Success(expanded) => - // duplicate expanded tree to avoid structural sharing in macro-genrated trees - // see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc + // duplicate expanded tree to avoid structural sharing in macro-generated trees + // see https://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc val expanded1 = try onSuccess(positionsToOffset(duplicateAndKeepPositions(expanded))) finally popMacroContext() if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1) if (settings.Ymacroexpand.value == settings.MacroExpand.Discard && !typer.context.isSearchingForImplicitParam) { @@ -692,7 +640,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { else { // approximation is necessary for whitebox macros to guide type inference // read more in the comments for onDelayed below - val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol } + val undetparams = tp collect { case tp1 if tp1.typeSymbol.isTypeParameter => tp1.typeSymbol } deriveTypeWithWildcards(undetparams)(tp) } } @@ -810,19 +758,21 @@ trait Macros extends MacroRuntimes with Traces with Helpers { expander(expandee) } - sealed abstract class MacroStatus(val result: Tree) - case class Success(expanded: Tree) extends MacroStatus(expanded) - case class Fallback(fallback: Tree) extends MacroStatus(fallback) { runReporting.seenMacroExpansionsFallingBack = true } - case class Delayed(delayed: Tree) extends MacroStatus(delayed) - case class Skipped(skipped: Tree) extends MacroStatus(skipped) - case class Failure(failure: Tree) extends MacroStatus(failure) - def Delay(expanded: Tree) = Delayed(expanded) - def Skip(expanded: Tree) = Skipped(expanded) + private sealed abstract class MacroStatus(val result: Tree) + private case class Success(expanded: Tree) extends MacroStatus(expanded) + private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { + runReporting.seenMacroExpansionsFallingBack = true + } + private case class Delayed(delayed: Tree) extends MacroStatus(delayed) + private case class Skipped(skipped: Tree) extends MacroStatus(skipped) + private case class Failure(failure: Tree) extends MacroStatus(failure) + private def Delay(expanded: Tree) = Delayed(expanded) + private def Skip(expanded: Tree) = Skipped(expanded) /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded * Meant for internal use within the macro infrastructure, don't use it elsewhere. */ - def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = { + private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = { val wasDelayed = isDelayed(expandee) val undetparams = calculateUndetparams(expandee) val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty @@ -858,13 +808,13 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } expanded match { case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree) - case expanded: Tree if expandee.symbol.isTermMacro => validateResultingTree(expanded) + case expanded: Tree if expandee.symbol.isTermMacro => validateResultingTree(expanded) case _ => MacroExpansionHasInvalidTypeError(expandee, expanded) } } catch { - case ex: Throwable => + case t: Throwable => if (openMacros.nonEmpty) popMacroContext() // weirdly we started popping on an empty stack when refactoring fatalWarnings logic - val realex = ReflectionUtils.unwrapThrowable(ex) + val realex = ReflectionUtils.unwrapThrowable(t) realex match { case ex: InterruptedException => throw ex case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex) @@ -882,17 +832,16 @@ trait Macros extends MacroRuntimes with Traces with Helpers { /** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded * Meant for internal use within the macro infrastructure, don't use it elsewhere. */ - def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = { + private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = { import typer.TyperErrorGen._ val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee) macroLogLite(s"falling back to: $fallbackSym") - def mkFallbackTree(tree: Tree): Tree = { - tree match { - case Select(qual, name) => Select(qual, name) setPos tree.pos setSymbol fallbackSym - case Apply(fn, args) => Apply(mkFallbackTree(fn), args) setPos tree.pos - case TypeApply(fn, args) => TypeApply(mkFallbackTree(fn), args) setPos tree.pos - } + def mkFallbackTree(tree: Tree): Tree = tree match { + case Select(qual, name) => Select(qual, name) setPos tree.pos setSymbol fallbackSym + case Apply(fn, args) => Apply(mkFallbackTree(fn), args) setPos tree.pos + case TypeApply(fn, args) => TypeApply(mkFallbackTree(fn), args) setPos tree.pos + case x => throw new MatchError(x) } Fallback(mkFallbackTree(expandee)) } @@ -911,7 +860,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { */ var hasPendingMacroExpansions = false // JZ this is never reset to false. What is its purpose? Should it not be stored in Context? def typerShouldExpandDeferredMacros: Boolean = hasPendingMacroExpansions && !delayed.isEmpty - private val forced = perRunCaches.newWeakSet[Tree] + private val forced = perRunCaches.newWeakSet[Tree]() private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Symbol]]() private def isDelayed(expandee: Tree) = !delayed.isEmpty && (delayed contains expandee) def clearDelayed(): Unit = delayed.clear() @@ -953,7 +902,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { * See the documentation for `macroExpand` for more information. */ def macroExpandAll(typer: Typer, expandee: Tree): Tree = - new Transformer { + new AstTransformer { override def transform(tree: Tree) = super.transform(tree match { // todo. expansion should work from the inside out case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty && !tree.isErroneous => diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index a58e6073b654..d513fa6bf567 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -84,10 +84,19 @@ trait MethodSynthesis { def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree = createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident))) - def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = { + def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree): Tree = { + def dflt(arg: Tree) = currentRun.runDefinitions.RuntimeStatics_ioobe match { + case NoSymbol => + // Support running the compiler with an older library on the classpath + Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg, nme.toString_)) + case ioobeSym => + val ioobeTypeApply = TypeApply(gen.mkAttributedRef(ioobeSym), List(TypeTree(returnType))) + Apply(ioobeTypeApply, List(arg)) + } + createMethod(name, List(IntTpe), returnType) { m => val arg0 = Ident(m.firstParam) - val default = DEFAULT ==> Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg0, nme.toString_)) + val default = DEFAULT ==> dflt(arg0) val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default Match(arg0, cases) @@ -129,9 +138,10 @@ trait MethodSynthesis { // populate synthetics for this unit with trees that will later be added by the typer // we get here when entering the symbol for the valdef, so its rhs has not yet been type checked def enterGetterSetter(tree: ValDef): Unit = { + val sympos = tree.namePos val fieldSym = if (noFieldFor(tree, owner)) NoSymbol - else owner.newValue(tree.name append NameTransformer.LOCAL_SUFFIX_STRING, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal) + else owner.newValue(tree.name append NameTransformer.LOCAL_SUFFIX_STRING, sympos, tree.mods.flags & FieldFlags | PrivateLocal) val getter = Getter(tree) val getterSym = getter.createSym @@ -141,7 +151,7 @@ trait MethodSynthesis { // // scala/bug#10009 the tree's modifiers can be temporarily out of sync with the new symbol's flags. // typedValDef corrects this later on. - tree.symbol = fieldSym orElse (getterSym setPos tree.pos) + tree.symbol = fieldSym orElse getterSym.setPos(sympos) val namer = namerOf(tree.symbol) @@ -224,9 +234,8 @@ trait MethodSynthesis { } } - def enterImplicitWrapper(classDef: ClassDef): Unit = { - val methDef = factoryMeth(classDef.mods & AccessFlags | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef) + val methDef = factoryMeth(classDef.mods & (AccessFlags | FINAL) | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef) val methSym = enterInScope(assignMemberSymbol(methDef)) context.unit.synthetics(methSym) = methDef @@ -239,15 +248,14 @@ trait MethodSynthesis { methSym setInfo implicitFactoryMethodCompleter(methDef, classDef.symbol) } - trait DerivedAccessor { def tree: ValDef def derivedName: TermName def derivedFlags: Long def derivedTree(sym: Symbol): Tree - def derivedPos = tree.pos.focus - def createSym = createMethod(tree, derivedName, derivedPos, derivedFlags) + def symPos = tree.namePos + def createSym = createMethod(tree, derivedName, symPos, derivedFlags) } case class Getter(tree: ValDef) extends DerivedAccessor { @@ -263,7 +271,7 @@ trait MethodSynthesis { if (noFieldFor(tree, owner)) tree.rhs // context.unit.transformed.getOrElse(tree.rhs, tree.rhs) else Select(This(tree.symbol.enclClass), tree.symbol) - newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = Nil, tpt = tpt) + newDefDefAt(derivedPos)(derivedSym, rhs)(tparams = Nil, vparamss = Nil, tpt = tpt) } // derivedSym setPos tree.pos @@ -294,10 +302,8 @@ trait MethodSynthesis { if (noFieldFor(tree, owner)) EmptyTree else Assign(Select(This(tree.symbol.enclClass), tree.symbol), Ident(setterParam)) - newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt) - + newDefDefAt(derivedPos)(derivedSym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt) } } - } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index e4996fc5293c..036491205b2e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,17 +13,16 @@ package scala.tools.nsc package typechecker -import scala.annotation.tailrec +import scala.annotation._ import scala.collection.mutable import symtab.Flags._ -import scala.language.postfixOps import scala.reflect.internal.util.ListOfNil import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ /** This trait declares methods to create symbols and to enter them into scopes. * * @author Martin Odersky - * @version 1.0 */ trait Namers extends MethodSynthesis { self: Analyzer => @@ -34,7 +33,7 @@ trait Namers extends MethodSynthesis { /** Replaces any Idents for which cond is true with fresh TypeTrees(). * Does the same for any trees containing EmptyTrees. */ - private class TypeTreeSubstituter(cond: Name => Boolean) extends Transformer { + private class TypeTreeSubstituter(cond: Name => Boolean) extends AstTransformer { override def transform(tree: Tree): Tree = tree match { case Ident(name) if cond(name) => TypeTree() case _ => super.transform(tree) @@ -58,7 +57,9 @@ trait Namers extends MethodSynthesis { abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer => // overridden by the presentation compiler - def saveDefaultGetter(meth: Symbol, default: Symbol) { } + def saveDefaultGetter(meth: Symbol, default: Symbol): Unit = { } + + def expandMacroAnnotations(stats: List[Tree]): List[Tree] = stats import NamerErrorGen._ val typer = newTyper(context) @@ -144,34 +145,41 @@ trait Namers extends MethodSynthesis { vd.mods.hasAllFlags(JAVA_ENUM | STABLE | STATIC) && ownerHasEnumFlag } - def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T = - if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym - else sym setPrivateWithin typer.qualifyingClass(tree, mods.privateWithin, packageOK = true) + def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): sym.type = + if (sym.isPrivateLocal) sym + else { + val qualClass = if (mods.hasAccessBoundary) + typer.qualifyingClass(tree, mods.privateWithin, packageOK = true, immediate = false) + else + NoSymbol + sym setPrivateWithin qualClass + } - def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol = + def setPrivateWithin(tree: MemberDef, sym: Symbol): sym.type = setPrivateWithin(tree, sym, tree.mods) def inConstructorFlag: Long = { - var c = context - def inTermOwnedContext = c.owner.isTerm && !c.owner.isAnonymousFunction - def constructorNonSuffix = c.owner.isConstructor && !c.inConstructorSuffix - def earlyInit = c.owner.isEarlyInitialized - while (inTermOwnedContext) { - if (constructorNonSuffix || earlyInit) return INCONSTRUCTOR - c = c.outer - } - 0L + @tailrec def go(context: Context): Long = + if (context eq NoContext) 0L else { + val owner = context.owner + if (!owner.isTerm || owner.isAnonymousFunction) 0L + else if (owner.isConstructor) if (context.inConstructorSuffix) 0L else INCONSTRUCTOR + else if (owner.isEarlyInitialized) INCONSTRUCTOR + else go(context.outer) + } + + go(context) } def moduleClassFlags(moduleFlags: Long) = (moduleFlags & ModuleToClassFlags) | inConstructorFlag - def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = { + def updatePosFlags(sym: Symbol, pos: Position, flags: Long): sym.type = { debuglog("[overwrite] " + sym) val newFlags = (sym.flags & LOCKED) | flags // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef // over this mutated symbol, and we witness a stale cache for `parents`. - invalidateCaches(sym.rawInfo, sym :: sym.moduleClass :: Nil) + invalidateCaches(sym.rawInfo, Set(sym, sym.moduleClass)) sym reset NoType setFlag newFlags setPos pos sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags))) @@ -198,26 +206,6 @@ trait Namers extends MethodSynthesis { else innerNamer } - // FIXME - this logic needs to be thoroughly explained - // and justified. I know it's wrong with respect to package - // objects, but I think it's also wrong in other ways. - protected def conflict(newS: Symbol, oldS: Symbol) = ( - ( !oldS.isSourceMethod - || nme.isSetterName(newS.name) - || newS.isTopLevel - ) && - !( // @M: allow repeated use of `_` for higher-order type params - (newS.owner.isTypeParameter || newS.owner.isAbstractType) - // FIXME: name comparisons not successful, are these underscores - // sometimes nme.WILDCARD and sometimes tpnme.WILDCARD? - && (newS.name string_== nme.WILDCARD) - ) - ) - - private def allowsOverload(sym: Symbol) = ( - sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel - ) - private def inCurrentScope(m: Symbol): Boolean = { if (owner.isClass) owner == m.owner else context.scope.lookupSymbolEntry(m) match { @@ -227,46 +215,44 @@ trait Namers extends MethodSynthesis { } /** Enter symbol into context's scope and return symbol itself */ - def enterInScope(sym: Symbol): Symbol = enterInScope(sym, context.scope) - + def enterInScope(sym: Symbol): sym.type = enterInScope(sym, context.scope) + + // There is nothing which reconciles a package's scope with + // the package object's scope. This is the source of many bugs + // with e.g. defining a case class in a package object. When + // compiling against classes, the class symbol is created in the + // package and in the package object, and the conflict is undetected. + // There is also a non-deterministic outcome for situations like + // an object with the same name as a method in the package object. /** Enter symbol into given scope and return symbol itself */ - def enterInScope(sym: Symbol, scope: Scope): Symbol = { - // FIXME - this is broken in a number of ways. - // - // 1) If "sym" allows overloading, that is not itself sufficient to skip - // the check, because "prev.sym" also must allow overloading. - // - // 2) There is nothing which reconciles a package's scope with - // the package object's scope. This is the source of many bugs - // with e.g. defining a case class in a package object. When - // compiling against classes, the class symbol is created in the - // package and in the package object, and the conflict is undetected. - // There is also a non-deterministic outcome for situations like - // an object with the same name as a method in the package object. - - // allow for overloaded methods - if (!allowsOverload(sym)) { - val prev = scope.lookupEntry(sym.name) - if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) { - if (sym.isSynthetic || prev.sym.isSynthetic) { - handleSyntheticNameConflict(sym, prev.sym) - handleSyntheticNameConflict(prev.sym, sym) - } - DoubleDefError(sym, prev.sym) - sym setInfo ErrorType - scope unlink prev.sym // let them co-exist... - // FIXME: The comment "let them co-exist" is confusing given that the - // line it comments unlinks one of them. What does it intend? - } - } + def enterInScope(sym: Symbol, scope: Scope): sym.type = { if (sym.isModule && sym.isSynthetic && sym.owner.isClass && !sym.isTopLevel) { val entry = scope.lookupEntry(sym.name.toTypeName) if (entry eq null) scope enter sym else scope.enterBefore(sym, entry) - } else - scope enter sym + } else { + val disallowsOverload = !(sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel) + if (disallowsOverload) { + val prev = scope.lookupEntry(sym.name) + val dde = + (prev ne null) && prev.owner == scope && + (!prev.sym.isSourceMethod || nme.isSetterName(sym.name) || sym.isTopLevel) && + !((sym.owner.isTypeParameter || sym.owner.isAbstractType) && (sym.name string_== nme.WILDCARD)) + // @M: allow repeated use of `_` for higher-order type params + if (dde) { + if (sym.isSynthetic || prev.sym.isSynthetic) { + handleSyntheticNameConflict(sym, prev.sym) + handleSyntheticNameConflict(prev.sym, sym) + } + DoubleDefError(sym, prev.sym) + sym.setInfo(ErrorType) + scope.unlink(prev.sym) // retain the new erroneous symbol in scope (was for IDE); see #scala/bug#2779 + } + } + scope.enter(sym) + } } /** Logic to handle name conflicts of synthetically generated symbols @@ -293,7 +279,7 @@ trait Namers extends MethodSynthesis { case tree @ DefDef(_, _, _, _, _, _) => enterDefDef(tree) case tree @ TypeDef(_, _, _, _) => enterTypeDef(tree) case DocDef(_, defn) => enterSym(defn) - case tree @ Import(_, _) => enterImport(tree); returnContext = context.make(tree) + case tree @ Import(_, _) => enterImport(tree); returnContext = context.makeImportContext(tree) case _ => } returnContext @@ -334,7 +320,7 @@ trait Namers extends MethodSynthesis { * the flags to keep. */ def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = { - val pos = tree.pos + val pos = tree.namePos val isParameter = tree.mods.isParameter val flags = tree.mods.flags & mask @@ -353,13 +339,14 @@ trait Namers extends MethodSynthesis { } def createImportSymbol(tree: Import) = - NoSymbol.newImport(tree.pos) setInfo (namerOf(tree.symbol) importTypeCompleter tree) + NoSymbol.newImport(tree.pos).setInfo(namerOf(tree.symbol).importTypeCompleter(tree)) /** All PackageClassInfoTypes come from here. */ def createPackageSymbol(pos: Position, pid: RefTree): Symbol = { val pkgOwner = pid match { case Ident(_) => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass + case x => throw new MatchError(x) } val existing = pkgOwner.info.decls.lookup(pid.name) @@ -376,7 +363,7 @@ trait Namers extends MethodSynthesis { } } - private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = { + private def enterClassSymbol(@unused tree: ClassDef, clazz: ClassSymbol): Symbol = { var sourceFile = clazz.sourceFile if (sourceFile != null && sourceFile != contextFile) devWarning(s"Source file mismatch in $clazz: ${sourceFile} vs. $contextFile") @@ -418,60 +405,47 @@ trait Namers extends MethodSynthesis { /** Given a ClassDef or ModuleDef, verifies there isn't a companion which * has been defined in a separate file. */ - def validateCompanionDefs(tree: ImplDef) { - val sym = tree.symbol orElse { return } - val ctx = if (context.owner.isPackageObjectClass) context.outer else context - val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name - val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name - val fails = ( - module.isModule - && clazz.isClass - && !module.isSynthetic - && !clazz.isSynthetic - && (clazz.sourceFile ne null) - && (module.sourceFile ne null) - && !(module isCoDefinedWith clazz) - && module.exists - && clazz.exists - && (currentRun.compiles(clazz) == currentRun.compiles(module)) - ) - if (fails) { - reporter.error(tree.pos, ( - s"Companions '$clazz' and '$module' must be defined in same file:\n" - + s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}") + def validateCompanionDefs(tree: ImplDef): Unit = { + val sym = tree.symbol + if (sym != NoSymbol) { + val ctx = if (context.owner.isPackageObjectClass) context.outer else context + val module = if (sym.isModule) sym else ctx.scope.lookupModule(tree.name) + val clazz = if (sym.isClass) sym else ctx.scope.lookupClass(tree.name) + val fails = ( + module.isModule + && clazz.isClass + && !module.isSynthetic + && !clazz.isSynthetic + && (clazz.sourceFile ne null) + && (module.sourceFile ne null) + && !module.isCoDefinedWith(clazz) + && module.exists + && clazz.exists + && currentRun.compiles(clazz) == currentRun.compiles(module) ) + if (fails) reporter.error(tree.pos, + sm"""|Companions '$clazz' and '$module' must be defined in same file: + | Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}""") } } - def enterModuleDef(tree: ModuleDef) = { + def enterModuleDef(tree: ModuleDef): Unit = { val sym = enterModuleSymbol(tree) sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree) sym setInfo completerOf(tree) validateCompanionDefs(tree) - sym } /** Enter a module symbol. */ - def enterModuleSymbol(tree : ModuleDef): Symbol = { + def enterModuleSymbol(tree: ModuleDef): Symbol = { val moduleFlags = tree.mods.flags | MODULE val existingModule = context.scope lookupModule tree.name if (existingModule.isModule && !existingModule.hasPackageFlag && inCurrentScope(existingModule) && (currentRun.canRedefine(existingModule) || existingModule.isSynthetic)) { - // This code accounts for the way the package objects found in the classpath are opened up - // early by the completer of the package itself. If the `packageobjects` phase then finds - // the same package object in sources, we have to clean the slate and remove package object - // members from the package class. - // - // TODO scala/bug#4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids - // opening up the package object on the classpath at all if one exists in source. - if (existingModule.isPackageObject) { - val packageScope = existingModule.enclosingPackageClass.rawInfo.decls - packageScope.foreach(mem => if (mem.owner != existingModule.enclosingPackageClass) packageScope unlink mem) - } updatePosFlags(existingModule, tree.pos, moduleFlags) setPrivateWithin(tree, existingModule) - existingModule.moduleClass andAlso (setPrivateWithin(tree, _)) + existingModule.moduleClass.andAlso(setPrivateWithin(tree, _)) context.unit.synthetics -= existingModule tree.symbol = existingModule } @@ -484,6 +458,7 @@ trait Namers extends MethodSynthesis { val m = tree.symbol if (m.isTopLevel && !m.hasPackageFlag) { + // TODO: I've seen crashes where m.moduleClass == NoSymbol m.moduleClass.associatedFile = contextFile currentRun.symSource(m) = m.moduleClass.sourceFile registerTopLevelSym(m) @@ -491,14 +466,14 @@ trait Namers extends MethodSynthesis { m } - def enterSyms(trees: List[Tree]): Namer = { + def enterSyms(trees: List[Tree]): Unit = trees.foldLeft(this: Namer) { (namer, t) => val ctx = namer enterSym t // for Import trees, enterSym returns a changed context, so we need a new namer if (ctx eq namer.context) namer else newNamer(ctx) } - } + def applicableTypeParams(owner: Symbol): List[Symbol] = if (owner.isTerm || owner.isPackageClass) Nil else applicableTypeParams(owner.owner) ::: owner.typeParams @@ -540,7 +515,6 @@ trait Namers extends MethodSynthesis { } private def checkSelectors(tree: Import): Unit = { - import DuplicatesErrorKinds._ val Import(expr, selectors) = tree val base = expr.tpe @@ -550,9 +524,10 @@ trait Namers extends MethodSynthesis { def check(to: Name): Unit = { val e = context.scope.lookupEntry(to) - if (e != null && e.owner == context.scope && e.sym.exists) - typer.permanentlyHiddenWarning(pos, to0, e.sym) - else if (context ne context.enclClass) { + if (e != null && e.owner == context.scope && e.sym.exists) { + if (!context.isPackageOwnedInDifferentUnit(e.sym)) + typer.permanentlyHiddenWarning(pos, to0, e.sym) + } else if (context ne context.enclClass) { val defSym = context.prefix.member(to) filter ( sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false)) @@ -560,9 +535,9 @@ trait Namers extends MethodSynthesis { } } if (!tree.symbol.isSynthetic && expr.symbol != null && !expr.symbol.isInterpreterWrapper) { - if (base.member(from) != NoSymbol) + if (base.member(from).exists) check(to0) - if (base.member(from.toTypeName) != NoSymbol) + if (base.member(from.toTypeName).exists) check(to0.toTypeName) } } @@ -577,12 +552,12 @@ trait Namers extends MethodSynthesis { lookup(original.toTermName) != NoSymbol || lookup(original.toTypeName) != NoSymbol } - if (from != nme.WILDCARD && base != ErrorType) { + if (!s.isWildcard && !s.isGiven && base != ErrorType) { val okay = isValid(from, base) || context.unit.isJava && ( // Java code... (nme.isModuleName(from) && isValid(from.dropModule, base)) // - importing Scala module classes || isValid(from, base.companion) // - importing type members from types ) - if (!okay) typer.TyperErrorGen.NotAMemberError(tree, expr, from) + if (!okay) typer.TyperErrorGen.NotAMemberError(tree, expr, from, context.outer) // Setting the position at the import means that if there is // more than one hidden name, the second will not be warned. @@ -592,29 +567,13 @@ trait Namers extends MethodSynthesis { // so don't warn for them. There is a corresponding special treatment // in the shadowing rules in typedIdent to (scala/bug#7232). In any case, // we shouldn't be emitting warnings for .java source files. - if (!context.unit.isJava) - checkNotRedundant(tree.pos withPoint fromPos, from, to) - } - } - - selectors foreach checkSelector - - def noDuplicates(): Unit = { - @inline def isRename(hd: ImportSelector): Boolean = - hd.rename != null && hd.rename != nme.WILDCARD && hd.rename != hd.name - def loop(xs: List[ImportSelector]): Unit = xs match { - case Nil => () - case hd :: tl => - if (hd.name != nme.WILDCARD && tl.exists(x => ! (x.name == nme.WILDCARD) && x.name == hd.name)) - DuplicatesError(tree, hd.name, RenamedTwice) - else if (isRename(hd) && tl.exists(x => isRename(hd) && x.rename == hd.rename)) - DuplicatesError(tree, hd.rename, AppearsTwice) - else loop(tl) + if (!context.unit.isJava) { + val at = if (tree.pos.isRange) tree.pos.withPoint(fromPos) else tree.pos + checkNotRedundant(at, from, to) + } } - loop(selectors) } - // checks on the whole set - noDuplicates() + selectors.foreach(checkSelector) } def copyMethodCompleter(copyDef: DefDef): TypeCompleter = { @@ -624,7 +583,7 @@ trait Namers extends MethodSynthesis { def assignParamTypes(copyDef: DefDef, sym: Symbol): Unit = { val clazz = sym.owner val constructorType = clazz.primaryConstructor.tpe - val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol)) + val subst = SubstSymMap(clazz.typeParams, copyDef.tparams.map(_.symbol)) val classParamss = constructorType.paramss foreach2(copyDef.vparamss, classParamss)((copyParams, classParams) => @@ -680,7 +639,7 @@ trait Namers extends MethodSynthesis { val userDefined = ownerInfo.memberBasedOnName(sym.name, BridgeFlags | SYNTHETIC) (userDefined != NoSymbol) && { - assert(userDefined != sym) + assert(userDefined != sym, "userDefined symbol cannot be the same as symbol of which it is a member") val alts = userDefined.alternatives // could be just the one, if this member isn't overloaded // don't compute any further `memberInfo`s if there's an error somewhere alts.exists(_.isErroneous) || { @@ -697,7 +656,7 @@ trait Namers extends MethodSynthesis { // There are two ways in which we exclude the symbol from being added in typedStats::addSynthetics, // because we don't know when the completer runs with respect to this loop in addSynthetics // for (sym <- scope) - // for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) { + // for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) // if (!sym.initialize.hasFlag(IS_ERROR)) // newStats += typedStat(tree) // If we're already in the loop, set the IS_ERROR flag and trigger the condition `sym.initialize.hasFlag(IS_ERROR)` @@ -734,7 +693,7 @@ trait Namers extends MethodSynthesis { /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`. * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds: - * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol) + * !tree.symbol.isAbstractType || tparams.forall(_.symbol == NoSymbol) * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter. */ if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ? @@ -755,31 +714,32 @@ trait Namers extends MethodSynthesis { else assignAndEnterFinishedSymbol(tree) if (isEnumConstant(tree)) { + val annots = annotSig(tree.mods.annotations, tree, _ => true) + if (annots.nonEmpty) annotate(tree.symbol, annots) tree.symbol setInfo ConstantType(Constant(tree.symbol)) tree.symbol.owner.linkedClassOfClass addChild tree.symbol } } - def enterPackage(tree: PackageDef) { + def enterPackage(tree: PackageDef): Unit = { val sym = createPackageSymbol(tree.pos, tree.pid) tree.symbol = sym newNamer(context.make(tree, sym.moduleClass, sym.info.decls)) enterSyms tree.stats } - private def enterImport(tree: Import) = { + private def enterImport(tree: Import): Unit = { val sym = createImportSymbol(tree) tree.symbol = sym } - def enterTypeDef(tree: TypeDef) = assignAndEnterFinishedSymbol(tree) + def enterTypeDef(tree: TypeDef): Unit = assignAndEnterFinishedSymbol(tree) def enterDefDef(tree: DefDef): Unit = { tree match { case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => assignAndEnterFinishedSymbol(tree) case DefDef(mods, name, _, _, _, _) => - val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0 - val sym = enterInScope(assignMemberSymbol(tree)) setFlag bridgeFlag + val sym = enterInScope(assignMemberSymbol(tree)) val completer = if (sym hasFlag SYNTHETIC) { @@ -794,12 +754,14 @@ trait Namers extends MethodSynthesis { enterDefaultGetters(tree.symbol, tree, tree.vparamss, tree.tparams) } - def enterClassDef(tree: ClassDef) { + def enterClassDef(tree: ClassDef): Unit = { val ClassDef(mods, _, _, impl) = tree val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size tree.symbol = enterClassSymbol(tree) tree.symbol setInfo completerOf(tree) + if (tree.symbol.isJava) patmat.javaClassesByUnit.get(tree.symbol.pos.source).foreach(_.addOne(tree.symbol)) + if (mods.isCase) { val m = ensureCompanionObject(tree, caseModuleDef) m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree)) @@ -829,10 +791,10 @@ trait Namers extends MethodSynthesis { } // Hooks which are overridden in the presentation compiler - def enterExistingSym(sym: Symbol, tree: Tree): Context = { + def enterExistingSym(@unused sym: Symbol, @unused tree: Tree): Context = { this.context } - def enterIfNotThere(sym: Symbol) { } + def enterIfNotThere(sym: Symbol): Unit = () def enterSyntheticSym(tree: Tree): Symbol = { enterSym(tree) @@ -842,13 +804,14 @@ trait Namers extends MethodSynthesis { // --- Lazy Type Assignment -------------------------------------------------- + @nowarn("cat=lint-nonlocal-return") def findCyclicalLowerBound(tp: Type): Symbol = { tp match { case TypeBounds(lo, _) => // check that lower bound is not an F-bound // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed - for (tp1 @ TypeRef(_, sym, _) <- lo) { - if (settings.breakCycles) { + for (TypeRef(_, sym, _) <- lo) { + if (settings.breakCycles.value) { if (!sym.maybeInitialize) { log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}") return sym @@ -864,12 +827,6 @@ trait Namers extends MethodSynthesis { def monoTypeCompleter(tree: MemberDef) = new MonoTypeCompleter(tree) class MonoTypeCompleter(tree: MemberDef) extends TypeCompleterBase(tree) { override def completeImpl(sym: Symbol): Unit = { - // this early test is there to avoid infinite baseTypes when - // adding setters and getters --> bug798 - // It is a def in an attempt to provide some insulation against - // uninitialized symbols misleading us. It is not a certainty - // this accomplishes anything, but performance is a non-consideration - // on these flag checks so it can't hurt. def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential val annotations = annotSig(tree.mods.annotations, tree, _ => true) @@ -888,12 +845,6 @@ trait Namers extends MethodSynthesis { sym.setInfo(if (!sym.isJavaDefined) tp else RestrictJavaArraysMap(tp)) - if (needsCycleCheck) { - log(s"Needs cycle check: ${sym.debugLocationString}") - if (!typer.checkNonCyclic(tree.pos, tp)) - sym setInfo ErrorType - } - validate(sym) } } @@ -936,9 +887,11 @@ trait Namers extends MethodSynthesis { val annots = if (mods.annotations.isEmpty) Nil else { - val annotSigs = annotSig(mods.annotations, tree, _ => true) - if (isGetter) filterAccessorAnnots(annotSigs, tree) // if this is really a getter, retain annots targeting either field/getter - else annotSigs filter annotationFilter(FieldTargetClass, !mods.isParamAccessor) + // if this is really a getter, retain annots targeting either field/getter + val pred: AnnotationInfo => Boolean = + if (isGetter) accessorAnnotsFilter(tree.mods) + else annotationFilter(FieldTargetClass, !mods.isParamAccessor) + annotSig(mods.annotations, tree, pred) } // must use typeSig, not memberSig (TODO: when do we need to switch namers?) @@ -946,6 +899,8 @@ trait Namers extends MethodSynthesis { fieldOrGetterSym setInfo (if (isGetter) NullaryMethodType(sig) else sig) + checkBeanAnnot(tree, annots) + validate(fieldOrGetterSym) } } @@ -984,17 +939,17 @@ trait Namers extends MethodSynthesis { val mods = valDef.mods val annots = if (mods.annotations.isEmpty) Nil - else filterAccessorAnnots(annotSig(mods.annotations, valDef, _ => true), valDef, isSetter, isBean) + else annotSig(mods.annotations, valDef, accessorAnnotsFilter(valDef.mods, isSetter, isBean)) // for a setter, call memberSig to attribute the parameter (for a bean, we always use the regular method sig completer since they receive method types) // for a regular getter, make sure it gets a NullaryMethodType (also, no need to recompute it: we already have the valSig) val sig = - if (isSetter || isBean) typeSig(ddef, annots) - else { - if (annots.nonEmpty) annotate(accessorSym, annots) + if (isSetter || isBean) typeSig(ddef, annots) + else { + if (annots.nonEmpty) annotate(accessorSym, annots) - NullaryMethodType(valSig) - } + NullaryMethodType(valSig) + } accessorSym setInfo pluginsTypeSigAccessor(sig, typer, valDef, accessorSym) @@ -1002,6 +957,7 @@ trait Namers extends MethodSynthesis { if (isSetter) ddef.rhs.setType(ErrorType) else GetterDefinedTwiceError(accessorSym) + validate(accessorSym) case _ => @@ -1010,27 +966,28 @@ trait Namers extends MethodSynthesis { } } + private def checkBeanAnnot(tree: ValDef, annotSigs: List[AnnotationInfo]) = { + val mods = tree.mods + // neg/t3403: check that we didn't get a sneaky type alias/renamed import that we couldn't detect + // because we only look at names during synthesis (in deriveBeanAccessors) + // (TODO: can we look at symbols earlier?) + val hasNamedBeanAnnots = (mods hasAnnotationNamed tpnme.BeanPropertyAnnot) || (mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot) + if (!hasNamedBeanAnnots && annotSigs.exists(ann => (ann.matches(BeanPropertyAttr)) || ann.matches(BooleanBeanPropertyAttr))) + BeanPropertyAnnotationLimitationError(tree) + } + // see scala.annotation.meta's package class for more info // Annotations on ValDefs can be targeted towards the following: field, getter, setter, beanGetter, beanSetter, param. // The defaults are: // - (`val`-, `var`- or plain) constructor parameter annotations end up on the parameter, not on any other entity. - // - val/var member annotations solely end up on the underlying field, except in traits and for all lazy vals (@since 2.12), + // - val/var member annotations solely end up on the underlying field, except in traits and for all lazy vals, // where there is no field, and the getter thus holds annotations targeting both getter & field. // As soon as there is a field/getter (in subclasses mixing in the trait, or after expanding the lazy val during the fields phase), // we triage the annotations. // // TODO: these defaults can be surprising for annotations not meant for accessors/fields -- should we revisit? // (In order to have `@foo val X` result in the X getter being annotated with `@foo`, foo needs to be meta-annotated with @getter) - private def filterAccessorAnnots(annotSigs: List[global.AnnotationInfo], tree: global.ValDef, isSetter: Boolean = false, isBean: Boolean = false): List[AnnotationInfo] = { - val mods = tree.mods - if (!isBean) { - // neg/t3403: check that we didn't get a sneaky type alias/renamed import that we couldn't detect because we only look at names during synthesis - // (TODO: can we look at symbols earlier?) - if (!((mods hasAnnotationNamed tpnme.BeanPropertyAnnot) || (mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot)) - && annotSigs.exists(ann => (ann.matches(BeanPropertyAttr)) || ann.matches(BooleanBeanPropertyAttr))) - BeanPropertyAnnotationLimitationError(tree) - } - + private def accessorAnnotsFilter(mods: Modifiers, isSetter: Boolean = false, isBean: Boolean = false): AnnotationInfo => Boolean = { val canTriageAnnotations = isSetter || !fields.getterTreeAnnotationsTargetFieldAndGetter(owner, mods) def filterAccessorAnnotations: AnnotationInfo => Boolean = @@ -1047,10 +1004,9 @@ trait Namers extends MethodSynthesis { annotationFilter(FieldTargetClass, defaultRetention = true)(ann) || annotationFilter(BeanGetterTargetClass, defaultRetention = true)(ann)) - annotSigs filter (if (isBean) filterBeanAccessorAnnotations else filterAccessorAnnotations) + if (isBean) filterBeanAccessorAnnotations else filterAccessorAnnotations } - def selfTypeCompleter(tree: Tree) = new SelfTypeCompleter(tree) class SelfTypeCompleter(tree: Tree) extends TypeCompleterBase(tree) { override def completeImpl(sym: Symbol): Unit = { @@ -1062,65 +1018,145 @@ trait Namers extends MethodSynthesis { } } - /** This method has a big impact on the eventual compiled code. - * At this point many values have the most specific possible - * type (e.g. in val x = 42, x's type is Int(42), not Int) but - * most need to be widened to avoid undesirable propagation of - * those singleton types. + private def refersToSymbolLessAccessibleThan(tp: Type, sym: Symbol): Boolean = { + val accessibilityReference = + if (sym.isValue && sym.owner.isClass && sym.isPrivate) sym.getterIn(sym.owner) + else sym + + @tailrec def loop(tp: Type): Boolean = tp match { + case SingleType(pre, sym) => sym.isLessAccessibleThan(accessibilityReference) || loop(pre) + case ThisType(sym) => sym.isLessAccessibleThan(accessibilityReference) + case p: SimpleTypeProxy => loop(p.underlying) + case _ => false + } + loop(tp) + } + + /* + * This method has a big impact on the eventual compiled code. + * At this point many values have the most specific possible + * type (e.g. in val x = 42, x's type is Int(42), not Int) but + * most need to be widened (which deconsts) to avoid undesirable + * propagation of those singleton types. * - * However, the compilation of pattern matches into switch - * statements depends on constant folding, which will only take - * place for those values which aren't widened. The "final" - * modifier is the present means of signaling that a constant - * value should not be widened, so it has a use even in situations - * whether it is otherwise redundant (such as in a singleton.) + * However, the compilation of pattern matches into switch + * statements depends on constant folding, which will only take + * place for those values which aren't deconsted. The "final" + * modifier is the present means of signaling that a constant + * value should not deconsted, so it has a use even in situations + * whether it is otherwise redundant (such as in a singleton.) */ private def widenIfNecessary(sym: Symbol, tpe: Type, pt: Type): Type = { - val getter = - if (sym.isValue && sym.owner.isClass && sym.isPrivate) - sym.getterIn(sym.owner) - else sym - def isHidden(tp: Type): Boolean = tp match { - case SingleType(pre, sym) => - (sym isLessAccessibleThan getter) || isHidden(pre) - case ThisType(sym) => - sym isLessAccessibleThan getter - case p: SimpleTypeProxy => - isHidden(p.underlying) - case _ => - false - } - val shouldWiden = ( - !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo" - && (tpe.widen <:< pt) // Don't widen our way out of conforming to pt - && ( sym.isVariable - || sym.hasFlag(ACCESSOR) && !sym.hasFlag(STABLE) - || sym.isMethod && !sym.hasFlag(ACCESSOR) - || isHidden(tpe) - ) - ) - dropIllegalStarTypes( - if (shouldWiden) tpe.widen - else if (sym.isFinal && !sym.isLazy) tpe // "final val" allowed to retain constant type - else tpe.deconst - ) + // Are we inferring the result type of a stable symbol, whose type doesn't refer to a hidden symbol? + // If we refer to an inaccessible symbol, let's hope widening will result in an expressible type. + // (A LiteralType should be widened because it's too precise for a definition's type.) + val mayKeepSingletonType = + tpe match { + case ConstantType(_) | AnnotatedType(_, ConstantType(_)) => false + case _ => sym.isStable && !refersToSymbolLessAccessibleThan(tpe, sym) + } + + // Only final vals may be constant folded, so deconst inferred type of other members. + @inline def keepSingleton = if (sym.isFinal) tpe else tpe.deconst + + // Only widen if the definition can't keep its inferred singleton type, + // (Also keep singleton type if so indicated by the expected type `pt` + // OPT: 99.99% of the time, `pt` will be `WildcardType`). + if (mayKeepSingletonType || (sym.isFinal && sym.isVal && !sym.isLazy) || ((pt ne WildcardType) && !(tpe.widen <:< pt)) || sym.isDefaultGetter) keepSingleton + else tpe.widen } + /** Computes the type of the body in a ValDef or DefDef, and * assigns the type to the tpt's node. Returns the type. + * + * Under `-Xsource-features`, use `pt`, the type of the overridden member. + * But preserve the precise type of a whitebox macro. + * For `def f = macro g`, here we see `def f = xp(g)` the expansion, + * not the `isMacro` case: `openMacros` will be nonEmpty. + * For `def m = f`, retrieve the typed RHS and check if it is an expansion; + * in that case, check if the expandee `f` is whitebox and preserve + * the precise type if it is. The user must provide an explicit type + * to "opt out" of the inferred narrow type; in Scala 3, they would + * inline the def to "opt in". */ private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = { + class CheckOrDropStructural(drop: Boolean, rhsTpe: Type) extends TypeMap { + override def apply(tp: Type): Type = tp match { + case rt: RefinedType => + val sym = tree.symbol + val warns = rt.decls.filter(_.isOnlyRefinementMember) + if (warns.nonEmpty) { + if (drop) { + val keep = rt.decls.toList.filterNot(warns.toSet) + if (keep.isEmpty && rt.parents.sizeIs == 1) rt.parents.head + else { + val res = refinedType(rt.parents, rt.typeSymbol) + keep.foreach(res.decls.enter) + res + } + } else { + val cat = if (currentRun.isScala3) WarningCategory.Scala3Migration else WarningCategory.LintStructuralType + val msg = + if (currentRun.isScala3) s"in Scala 3 (or with -Xsource-features:no-infer-structural), $sym will no longer have a structural type" + else s"$sym has an inferred structural type" + context.warning(sym.pos, + s"""$msg: $rhsTpe + | members that can be accessed with a reflective call: ${warns.mkString(",")}""".stripMargin, + cat) + rt + } + } else rt + case _ => + mapOver(tp) + } + } val rhsTpe = tree match { - case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt) + case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt) // unreached, see methodSig case _ => defnTyper.computeType(tree.rhs, pt) } - - val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt) - tree.tpt defineType defnTpe setPos tree.pos.focus + val nonStructural = if (!tree.symbol.isLocalToBlock && (currentRun.isScala3 || settings.warnInferStructural)) + new CheckOrDropStructural(currentRun.sourceFeatures.noInferStructural, rhsTpe)(rhsTpe) + else rhsTpe + tree.tpt.defineType { + // infer from overridden symbol, contingent on Xsource; exclude constants and whitebox macros + val inferOverridden = currentRun.isScala3 && + !pt.isWildcard && pt != NoType && !pt.isErroneous && + !(tree.isInstanceOf[ValDef] && tree.symbol.isFinal && isConstantType(nonStructural)) && + openMacros.isEmpty && { + context.unit.transformed.get(tree.rhs) match { + case Some(t) if t.hasAttachment[MacroExpansionAttachment] => + val xp = macroExpandee(t) + xp.symbol == null || isBlackbox(xp.symbol) + case _ => true + } + } + val legacy = dropIllegalStarTypes(widenIfNecessary(tree.symbol, nonStructural, pt)) + // <:< check as a workaround for scala/bug#12968 + def warnIfInferenceChanged(): Unit = if (!(legacy =:= pt || legacy <:< pt && pt <:< legacy)) { + val pts = pt.toString + val leg = legacy.toString + val help = if (pts != leg) s" instead of $leg" else "" + val msg = s"in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to $pts$help" + val src = tree.pos.source + val pos = { + val eql = src.indexWhere(_ == '=', start = tree.rhs.pos.start, step = -1) + val declEnd = src.indexWhere(!_.isWhitespace, start = eql - 1, step = -1) + 1 + Some(declEnd).filter(_ > 0).map(src.position) + } + val action = pos.map(p => runReporting.codeAction("add explicit type", p.focus, s": $leg", msg)).getOrElse(Nil) + runReporting.warning(tree.pos, msg, WarningCategory.Scala3Migration, tree.symbol, action) + } + if (inferOverridden && currentRun.sourceFeatures.inferOverride) pt + else { + if (inferOverridden) warnIfInferenceChanged() + legacy.tap(InferredImplicitError(tree, _, context)) + } + }.setPos(tree.pos.focus) tree.tpt.tpe } // owner is the class with the self type - def enterSelf(self: ValDef) { + def enterSelf(self: ValDef): Unit = { val ValDef(_, name, tpt, _) = self if (self eq noSelfType) return @@ -1146,34 +1182,38 @@ trait Namers extends MethodSynthesis { private def templateSig(templ: Template): Type = { val clazz = context.owner - val parentTrees = typer.typedParentTypes(templ) - val pending = mutable.ListBuffer[AbsTypeError]() parentTrees foreach { tpt => val ptpe = tpt.tpe - if (!ptpe.isError) { + if (!ptpe.isError && !phase.erasedTypes) { val psym = ptpe.typeSymbol - val sameSourceFile = context.unit.source.file == psym.sourceFile - - if (psym.isSealed && !phase.erasedTypes) - if (sameSourceFile) - psym addChild context.owner + if (psym.isSealed) { + val sameSourceFile = context.unit.source.file == psym.sourceFile + val okChild = + if (psym.isJava) + psym.attachments.get[PermittedSubclassSymbols] match { + case Some(permitted) => permitted.permits.exists(_ == clazz) + case _ => sameSourceFile + } + else + sameSourceFile + if (okChild) + psym.addChild(clazz) else pending += ParentSealedInheritanceError(tpt, psym) - if (psym.isLocalToBlock && psym.isClass && !phase.erasedTypes) - psym addChild context.owner + } + if (psym.isLocalToBlock && psym.isClass) + psym.addChild(clazz) } } pending.foreach(ErrorUtils.issueTypeError) - def checkParent(tpt: Tree): Type = { - if (tpt.tpe.isError) AnyRefTpe - else tpt.tpe + val parents = { + def checkParent(tpt: Tree): Type = if (tpt.tpe.isError) AnyRefTpe else tpt.tpe + parentTrees map checkParent } - val parents = parentTrees map checkParent - enterSelf(templ.self) val decls = newScope @@ -1182,13 +1222,12 @@ trait Namers extends MethodSynthesis { // add apply and unapply methods to companion objects of case classes, // unless they exist already; here, "clazz" is the module class - if (clazz.isModuleClass) { + if (clazz.isModuleClass) clazz.attachments.get[ClassForCaseCompanionAttachment] foreach { cma => val cdef = cma.caseClass assert(cdef.mods.isCase, "expected case class: "+ cdef) addApplyUnapply(cdef, templateNamer) } - } // add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because // the namer phase must traverse this copy method to create default getters for its parameters. @@ -1198,7 +1237,7 @@ trait Namers extends MethodSynthesis { val modClass = companionSymbolOf(clazz, context).moduleClass modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma => val cdef = cma.caseClass - def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists) + def hasCopy = decls.containsName(nme.copy) || parents.exists { p => val ov = p.member(nme.copy); ov.exists && !ov.isDeferred } // scala/bug#5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name if (cdef.symbol == clazz && !hasCopy) @@ -1219,7 +1258,9 @@ trait Namers extends MethodSynthesis { } cda.companionModuleClassNamer = templateNamer } + val classTp = ClassInfoType(parents, decls, clazz) + templateNamer.expandMacroAnnotations(templ.body) pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType) } @@ -1230,7 +1271,13 @@ trait Namers extends MethodSynthesis { val resultType = templateSig(impl) val res = GenPolyType(tparams0, resultType) + val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType) + cdef.getAndRemoveAttachment[PermittedSubclasses].foreach { permitted => + clazz.updateAttachment[PermittedSubclassSymbols] { + PermittedSubclassSymbols(permitted.permits.map(typer.typed(_, Mode.NOmode).symbol)) + } + } // Already assign the type to the class symbol (monoTypeCompleter will do it again). // Allows isDerivedValueClass to look at the info. @@ -1241,6 +1288,10 @@ trait Namers extends MethodSynthesis { // Don't force the owner's info lest we create cycles as in scala/bug#6357. enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef) } + + if (settings.YmacroAnnotations.value && treeInfo.isMacroAnnotation(cdef)) + typer.typedMacroAnnotation(cdef) + pluginsTp } @@ -1261,12 +1312,12 @@ trait Namers extends MethodSynthesis { // make a java method type if meth.isJavaDefined private def methodTypeFor(meth: Symbol, vparamSymss: List[List[Symbol]], restpe: Type) = { - def makeJavaMethodType(vparams: List[Symbol], restpe: Type) = { - vparams foreach (p => p setInfo objToAny(p.tpe)) - JavaMethodType(vparams, restpe) + def makeMethodType(vparams: List[Symbol], restpe: Type) = { + vparams foreach (p => p setInfo p.tpe) + MethodType(vparams, restpe) } if (vparamSymss.isEmpty) NullaryMethodType(restpe) - else if (meth.isJavaDefined) vparamSymss.foldRight(restpe)(makeJavaMethodType) + else if (meth.isJavaDefined) vparamSymss.foldRight(restpe)(makeMethodType) else vparamSymss.foldRight(restpe)(MethodType(_, _)) } @@ -1300,7 +1351,6 @@ trait Namers extends MethodSynthesis { val tparamSyms = typer.reenterTypeParams(tparams) val tparamSkolems = tparams.map(_.symbol) - /* * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type. * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter, @@ -1312,7 +1362,6 @@ trait Namers extends MethodSynthesis { def deskolemizedPolySig(vparamSymss: List[List[Symbol]], restpe: Type) = GenPolyType(tparamSyms, methodTypeFor(meth, vparamSymss, restpe).substSym(tparamSkolems, tparamSyms)) - if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) { tpt defineType context.enclClass.owner.tpe_* tpt setPos meth.pos.focus @@ -1332,12 +1381,9 @@ trait Namers extends MethodSynthesis { tptTyped.tpe } - // ignore missing types unless we can look to overridden method to recover the missing information val canOverride = methOwner.isClass && !meth.isConstructor val inferResTp = canOverride && tpt.isEmpty - val inferArgTp = canOverride && settings.YmethodInfer && mexists(vparamss)(_.tpt.isEmpty) - /* * Find the overridden method that matches a schematic method type, @@ -1350,21 +1396,15 @@ trait Namers extends MethodSynthesis { * * NOTE: mutates info of symbol of vparamss that don't specify a type */ - val methodSigApproxUnknownArgs: () => Type = - if (!inferArgTp) () => deskolemizedPolySig(vparamSymss, resTpGiven) - else () => { - // for all params without type set WildcardType - mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType) - // must wait to call deskolemizedPolySig until we've temporarily set the WildcardType info for the vparamSymss - // (Otherwise, valDefSig will complain about missing argument types.) - deskolemizedPolySig(vparamSymss, resTpGiven) - } + def methodSigApproxUnknownArgs(): Type = + deskolemizedPolySig(vparamSymss, resTpGiven) // Must be lazy about the schema to avoid cycles in neg/t5093.scala - val overridden = + def computeOverridden(immediate: Boolean) = if (!canOverride) NoSymbol - else safeNextOverriddenSymbolLazySchema(meth, methodSigApproxUnknownArgs) + else safeNextOverriddenSymbolLazySchema(meth, methodSigApproxUnknownArgs _, immediate) + val overridden = computeOverridden(immediate = false) /* * If `meth` doesn't have an explicit return type, extract the return type from the method * overridden by `meth` (if there's an unique one). This type is later used as the expected @@ -1373,12 +1413,9 @@ trait Namers extends MethodSynthesis { * * If the result type is missing, assign a MethodType to `meth` that's constructed using this return type. * This allows omitting the result type for recursive methods. - * - * Missing parameter types are also recovered from the overridden method (by mutating the info of their symbols). - * (The parser accepts missing parameter types under -Yinfer-argument-types.) */ val resTpFromOverride = - if (!(inferArgTp || inferResTp) || overridden == NoSymbol || overridden.isOverloaded) resTpGiven + if (!inferResTp || overridden == NoSymbol || overridden.isOverloaded) resTpGiven else { overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials @@ -1388,23 +1425,6 @@ trait Namers extends MethodSynthesis { case mt => (Nil, mt) } - // try to derive empty parameter types from the overridden method's argument types - if (inferArgTp) { - val overriddenSyms = overriddenTparams ++ overridden.paramss.flatten - val ourSyms = tparamSkolems ++ vparamSymss.flatten - foreach2(vparamss, overridden.paramss) { foreach2(_, _) { (vparam, overriddenParam) => - // println(s"infer ${vparam.symbol} from ${overriddenParam}? ${vparam.tpt}") - if (vparam.tpt.isEmpty) { - val overriddenParamTp = overriddenParam.tpe.substSym(overriddenSyms, ourSyms) - // println(s"inferred ${vparam.symbol} : $overriddenParamTp") - // references to type parameters in overriddenParamTp link to the type skolems, so the - // assigned type is consistent with the other / existing parameter types in vparamSymss. - vparam.symbol setInfo overriddenParamTp - vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus - } - }} - } - @tailrec @inline def applyFully(tp: Type, paramss: List[List[Symbol]]): Type = if (paramss.isEmpty) tp match { case NullaryMethodType(rtpe) => rtpe @@ -1451,7 +1471,7 @@ trait Namers extends MethodSynthesis { val resTp = { // When return type is inferred, we don't just use resTpFromOverride -- it must be packed and widened. - // Here, C.f has type String: + // Here, C.f has type String (unless -Xsource-features:infer-override): // trait T { def f: Object }; class C extends T { def f = "" } // using resTpFromOverride as expected type allows for the following (C.f has type A): // trait T { def f: A }; class C extends T { implicit def b2a(t: B): A = ???; def f = new B } @@ -1465,18 +1485,34 @@ trait Namers extends MethodSynthesis { } // Add a () parameter section if this overrides some method with () parameters - val vparamSymssOrEmptyParamsFromOverride = - if (overridden != NoSymbol && vparamSymss.isEmpty && overridden.alternatives.exists(_.info.isInstanceOf[MethodType])) ListOfNil // NOTE: must check `.info.isInstanceOf[MethodType]`, not `.isMethod`! - else vparamSymss + val vparamSymssOrEmptyParamsFromOverride = { + // check the first override for paren purposes + def overridesNilary: Boolean = { + val toCheck = if (currentRun.isScala3) computeOverridden(immediate = true) else overridden + // must check `.info.isInstanceOf[MethodType]`, not `.isMethod`, to exclude NullaryMethodType. + // Note that the matching MethodType of a NullaryMethodType must be nilary not nelary. + toCheck != NoSymbol && toCheck.alternatives.exists(_.info.isInstanceOf[MethodType]) + } + if (vparamSymss.isEmpty && overridesNilary) { + meth.updateAttachment(NullaryOverrideAdapted) + ListOfNil + } else vparamSymss + } val methSig = deskolemizedPolySig(vparamSymssOrEmptyParamsFromOverride, resTp) - pluginsTypeSig(methSig, typer, ddef, resTpGiven) + val unlink = methOwner.isJava && meth.isSynthetic && meth.isConstructor && methOwner.superClass == JavaRecordClass && + methOwner.info.decl(meth.name).alternatives.exists(c => c != meth && c.tpe.matches(methSig)) + if (unlink) { + methOwner.info.decls.unlink(meth) + ErrorType + } else + pluginsTypeSig(methSig, typer, ddef, resTpGiven) } /** * For every default argument, insert a method symbol computing that default */ - def enterDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef]) { + def enterDefaultGetters(meth: Symbol, @unused ddef: DefDef, vparamss: List[List[ValDef]], @unused tparams: List[TypeDef]): Unit = { val methOwner = meth.owner val search = DefaultGetterNamerSearch(context, meth, initCompanionModule = false) var posCounter = 1 @@ -1517,14 +1553,14 @@ trait Namers extends MethodSynthesis { * typechecked, the corresponding param would not yet have the "defaultparam" * flag. */ - private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overridden: Symbol): Unit = { - val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(deriveDefDef(ddef)(_ => EmptyTree).duplicate) + private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], @unused tparams: List[TypeDef], overridden: Symbol): Unit = { + val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(deriveDefDef(ddef)(_ => EmptyTree).duplicate): @unchecked // having defs here is important to make sure that there's no sneaky tree sharing // in methods with multiple default parameters - def rtparams = rtparams0.map(_.duplicate) + def rtparams = rtparams0.map(_.duplicate) def rvparamss = rvparamss0.map(_.map(_.duplicate)) - val search = DefaultGetterNamerSearch(context, meth, initCompanionModule = true) - val overrides = overridden != NoSymbol && !overridden.isOverloaded + val search = DefaultGetterNamerSearch(context, meth, initCompanionModule = true) + val overrides = overridden != NoSymbol && !overridden.isOverloaded // value parameters of the base class (whose defaults might be overridden) var baseParamss = (vparamss, overridden.tpe.paramss) match { // match empty and missing parameter list @@ -1634,11 +1670,11 @@ trait Namers extends MethodSynthesis { else new DefaultMethodInOwningScope(c, meth) } private abstract class DefaultGetterNamerSearch { - def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree) + def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree): Unit def createAndEnter(f: Symbol => Symbol): Unit } - private class DefaultGetterInCompanion(c: Context, meth: Symbol, initCompanionModule: Boolean) extends DefaultGetterNamerSearch { + private class DefaultGetterInCompanion(@unused c: Context, meth: Symbol, initCompanionModule: Boolean) extends DefaultGetterNamerSearch { private val module = companionSymbolOf(meth.owner, context) if (initCompanionModule) module.initialize private val cda: Option[ConstructorDefaultsAttachment] = module.attachments.get[ConstructorDefaultsAttachment] @@ -1665,7 +1701,7 @@ trait Namers extends MethodSynthesis { moduleNamer match { case Some(namer) => val cdef = attachment.classWithDefault - val ClassDef(_, _, rtparams, _) = resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate) + val ClassDef(_, _, rtparams, _) = resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate): @unchecked val defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT))) val tree = create(namer, defTparams) namer.enterSyntheticSym(tree) @@ -1676,7 +1712,7 @@ trait Namers extends MethodSynthesis { } } - private class DefaultMethodInOwningScope(c: Context, meth: Symbol) extends DefaultGetterNamerSearch { + private class DefaultMethodInOwningScope(@unused c: Context, meth: Symbol) extends DefaultGetterNamerSearch { private lazy val ownerNamer: Namer = { val ctx = context.nextEnclosing(c => c.scope.toList.contains(meth)) // TODO use lookup rather than toList.contains assert(ctx != NoContext, meth) @@ -1691,82 +1727,94 @@ trait Namers extends MethodSynthesis { } } - private def valDefSig(vdef: ValDef) = { + private def valDefSig(vdef: ValDef): Type = { val ValDef(_, _, tpt, rhs) = vdef - val result = - if (tpt.isEmpty) { - if (rhs.isEmpty) { - MissingParameterOrValTypeError(tpt) - ErrorType - } else { - // enterGetterSetter assigns the getter's symbol to a ValDef when there's no underlying field - // (a deferred val or most vals defined in a trait -- see Field.noFieldFor) - val isGetter = vdef.symbol hasFlag ACCESSOR - - val pt = { - val valOwner = owner.owner - // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out - - if (!currentRun.isScala212 || !valOwner.isClass) WildcardType - else { - // normalize to getter so that we correctly consider a val overriding a def - // (a val's name ends in a " ", so can't compare to def) - val overridingSym = if (isGetter) vdef.symbol else vdef.symbol.getterIn(valOwner) + def inferredValTpt: Type = { + // enterGetterSetter assigns the getter's symbol to a ValDef when there's no underlying field + // (a deferred val or most vals defined in a trait -- see Field.noFieldFor) + val isGetter = vdef.symbol hasFlag ACCESSOR + + val pt: Type = { + val valOwner = owner.owner + if (!valOwner.isClass) WildcardType + else { + // normalize to getter so that we correctly consider a val overriding a def + // (a val's name ends in a " ", so can't compare to def) + val overridingSym = if (isGetter) vdef.symbol else vdef.symbol.getterIn(valOwner) - // We're called from an accessorTypeCompleter, which is completing the info for the accessor's symbol, - // which may or may not be `vdef.symbol` (see isGetter above) - val overridden = safeNextOverriddenSymbol(overridingSym) + // We're called from an accessorTypeCompleter, which is completing the info for the accessor's symbol, + // which may or may not be `vdef.symbol` (see isGetter above) + val overridden = safeNextOverriddenSymbol(overridingSym) - if (overridden == NoSymbol || overridden.isOverloaded) WildcardType - else valOwner.thisType.memberType(overridden).resultType - } - } + if (overridden == NoSymbol || overridden.isOverloaded) WildcardType + else valOwner.thisType.memberType(overridden).resultType + } + } - def patchSymInfo(tp: Type): Unit = - if (pt ne WildcardType) // no patching up to do if we didn't infer a prototype - vdef.symbol setInfo (if (isGetter) NullaryMethodType(tp) else tp) + def patchSymInfo(tp: Type): Unit = + if (pt ne WildcardType) // no patching up to do if we didn't infer a prototype + vdef.symbol.setInfo { if (isGetter) NullaryMethodType(tp) else tp } - patchSymInfo(pt) + patchSymInfo(pt) - // derives the val's result type from type checking its rhs under the expected type `pt` - // vdef.tpt is mutated, and `vdef.tpt.tpe` is `assignTypeToTree`'s result - val tptFromRhsUnderPt = assignTypeToTree(vdef, typer, pt) + if (vdef.hasAttachment[MultiDefAttachment.type]) + vdef.symbol.updateAttachment[MultiDefAttachment.type](MultiDefAttachment) - // need to re-align with assignTypeToTree, as the type we're returning from valDefSig (tptFromRhsUnderPt) - // may actually go to the accessor, not the valdef (and if assignTypeToTree returns a subtype of `pt`, - // we would be out of synch between field and its accessors), and thus the type completer won't - // fix the symbol's info for us -- we set it to tmpInfo above, which may need to be improved to tptFromRhsUnderPt - if (!isGetter) patchSymInfo(tptFromRhsUnderPt) + // derives the val's result type from type checking its rhs under the expected type `pt` + // vdef.tpt is mutated, and `vdef.tpt.tpe` is `assignTypeToTree`'s result + val tptFromRhsUnderPt = assignTypeToTree(vdef, typer, pt) - tptFromRhsUnderPt - } + // need to re-align with assignTypeToTree, as the type we're returning from valDefSig (tptFromRhsUnderPt) + // may actually go to the accessor, not the valdef (and if assignTypeToTree returns a subtype of `pt`, + // we would be out of synch between field and its accessors), and thus the type completer won't + // fix the symbol's info for us -- we set it to tmpInfo above, which may need to be improved to tptFromRhsUnderPt + if (!isGetter) patchSymInfo(tptFromRhsUnderPt) + + tptFromRhsUnderPt + } + val result: Type = + if (tpt.isEmpty) { + if (rhs.isEmpty) { MissingParameterOrValTypeError(tpt); ErrorType } + else inferredValTpt } else { val tptTyped = typer.typedType(tpt) context.unit.transformed(tpt) = tptTyped tptTyped.tpe } - // println(s"val: $result / ${vdef.tpt.tpe} / ") - pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result) } // Pretend we're an erroneous symbol, for now, so that we match while finding the overridden symbol, // but are not considered during implicit search. - private def safeNextOverriddenSymbol(sym: Symbol, schema: Type = ErrorType): Symbol = { + // `immediate` for immediate override only, not narrowest override + private def safeNextOverriddenSymbol(sym: Symbol, schema: Type = ErrorType, immediate: Boolean = false): Symbol = { val savedInfo = sym.rawInfo val savedFlags = sym.rawflags try { sym setInfo schema - sym.nextOverriddenSymbol + // pick the overridden symbol with narrowest type; dotty uses intersection + if (!immediate && currentRun.isScala3) { + def typeOf(s: Symbol): Type = { + val t = if (s.isMethod) s.asMethod.returnType else s.tpe + t.asSeenFrom(sym.owner.thisType, s.owner) + } + sym.allOverriddenSymbols match { + case Nil => NoSymbol + case overridden :: candidates => + candidates.foldLeft(overridden)((acc, o) => if (typeOf(o) <:< typeOf(acc)) o else acc) + } + } + else + sym.nextOverriddenSymbol } finally { sym setInfo savedInfo // setInfo resets the LOCKED flag, so restore saved flags as well sym.rawflags = savedFlags } } - private def safeNextOverriddenSymbolLazySchema(sym: Symbol, schema: () => Type): Symbol = - safeNextOverriddenSymbol(sym, new LazyType { override def complete(sym: Symbol): Unit = sym setInfo schema() }) + private def safeNextOverriddenSymbolLazySchema(sym: Symbol, schema: () => Type, immediate: Boolean): Symbol = + safeNextOverriddenSymbol(sym, new LazyType { override def complete(sym: Symbol): Unit = sym setInfo schema() }, immediate) //@M! an abstract type definition (abstract type member/type parameter) @@ -1776,12 +1824,9 @@ trait Namers extends MethodSynthesis { // log("typeDefSig(" + tpsym + ", " + tparams + ")") val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef) val tp = typer.typedType(rhs).tpe match { - case TypeBounds(lt, rt) if (lt.isError || rt.isError) => - TypeBounds.empty - case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) => - TypeBounds(lt, objToAny(rt)) - case tp => - tp + case TypeBounds(lt, rt) if lt.isError || rt.isError => TypeBounds.empty + case TypeBounds(lt, rt) if tdef.symbol.hasFlag(JAVA) => TypeBounds(lt, rt) + case tp => tp } // see neg/bug1275, #3419 // used to do a rudimentary kind check here to ensure overriding in refinements @@ -1808,9 +1853,6 @@ trait Namers extends MethodSynthesis { val Import(expr, selectors) = imp val expr1 = typer.typedQualifier(expr) - if (expr1.symbol != null && expr1.symbol.isRootPackage) - RootImportError(imp) - if (expr1.isErrorTyped) ErrorType else { @@ -1826,6 +1868,7 @@ trait Namers extends MethodSynthesis { val newImport = treeCopy.Import(imp, expr1, selectors) checkSelectors(newImport) context.unit.transformed(imp) = newImport + registerImport(context, newImport) // copy symbol and type attributes back into old expression // so that the structure builder will find it. expr setSymbol expr1.symbol setType expr1.tpe @@ -1845,7 +1888,7 @@ trait Namers extends MethodSynthesis { * @param cdef is the class definition of the case class * @param namer is the namer of the module class (the comp. obj) */ - def addApplyUnapply(cdef: ClassDef, namer: Namer) { + def addApplyUnapply(cdef: ClassDef, namer: Namer): Unit = { if (!cdef.symbol.hasAbstractFlag) namer.enterSyntheticSym(caseModuleApplyMeth(cdef)) @@ -1854,7 +1897,7 @@ trait Namers extends MethodSynthesis { namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef)) } - def addCopyMethod(cdef: ClassDef, namer: Namer) { + def addCopyMethod(cdef: ClassDef, namer: Namer): Unit = { caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym } @@ -1869,6 +1912,7 @@ trait Namers extends MethodSynthesis { try tree match { case member: MemberDef => createNamer(tree).memberSig(member) case imp: Import => importSig(imp) + case x => throw new MatchError(x) } catch typeErrorHandler(tree, ErrorType) } @@ -1880,7 +1924,7 @@ trait Namers extends MethodSynthesis { * or may not be visible. */ def annotSig(annotations: List[Tree], annotee: Tree, pred: AnnotationInfo => Boolean): List[AnnotationInfo] = - annotations filterNot (_ eq null) map { ann => + annotations.filterNot(_ eq null).map { ann => val ctx = typer.context // need to be lazy, #1782. enteringTyper to allow inferView in annotation args, scala/bug#5892. def computeInfo: AnnotationInfo = enteringTyper { @@ -1920,7 +1964,7 @@ trait Namers extends MethodSynthesis { case tdef: TypeDef => typeDefSig(tdef) case cdef: ClassDef => classSig(cdef) case mdef: ModuleDef => moduleSig(mdef) - // skip PackageDef + case x: PackageDef => throw new MatchError(x) // skip PackageDef } def includeParent(tpe: Type, parent: Symbol): Type = tpe match { @@ -1933,20 +1977,6 @@ trait Namers extends MethodSynthesis { tpe } - class LogTransitions[S](onEnter: S => String, onExit: S => String) { - @inline final def apply[T](entity: S)(body: => T): T = { - if (settings.isDebug) { - log(onEnter(entity)) - try body - finally log(onExit(entity)) - } else body - } - } - private val logDefinition = new LogTransitions[Symbol]( - sym => "[define] >> " + sym.flagString + " " + sym.fullLocationString, - sym => "[define] << " + sym - ) - /** Convert Java generic array type T[] to (T with Object)[] * (this is necessary because such arrays have a representation which is incompatible * with arrays of primitive types.) @@ -1973,12 +2003,12 @@ trait Namers extends MethodSynthesis { * - `def` modifier never for parameters of case classes * - declarations only in mixins or abstract classes (when not @native) */ - def validate(sym: Symbol) { + def validate(sym: Symbol): Unit = { import SymValidateErrors._ def fail(kind: SymValidateErrors.Value) = SymbolValidationError(sym, kind) - def checkNoConflict(flag1: Int, flag2: Int) = { - if (sym hasAllFlags flag1.toLong | flag2) + def checkNoConflict(flag1: Long, flag2: Long) = { + if (sym hasAllFlags flag1 | flag2) IllegalModifierCombination(sym, flag1, flag2) } if (sym.isImplicit) { @@ -2016,7 +2046,7 @@ trait Namers extends MethodSynthesis { checkNoConflict(ABSTRACT, FINAL) if (sym.isDeferred) { - def checkWithDeferred(flag: Int) = { + def checkWithDeferred(flag: Long) = { if (sym hasFlag flag) AbstractMemberWithModiferError(sym, flag) } @@ -2049,20 +2079,18 @@ trait Namers extends MethodSynthesis { // checkNoConflict(PRIVATE, OVERRIDE) // this one leads to bad error messages like #4174, so catch in refchecks // checkNoConflict(PRIVATE, FINAL) // can't do this because FINAL also means compile-time constant // checkNoConflict(ABSTRACT, FINAL) // this one gives a bad error for non-@inline classes which extend AnyVal - // @PP: I added this as a sanity check because these flags are supposed to be - // converted to ABSOVERRIDE before arriving here. + // @PP: I added this as a check because these flags are supposed to be converted to ABSOVERRIDE before arriving here. checkNoConflict(ABSTRACT, OVERRIDE) } } abstract class TypeCompleter extends LazyType { - val tree: Tree - override def forceDirectSuperclasses: Unit = { + def tree: Tree + override def forceDirectSuperclasses(): Unit = tree.foreach { case dt: DefTree => global.withPropagateCyclicReferences(Option(dt.symbol).map(_.maybeInitialize)) case _ => } - } } @deprecated("Instantiate TypeCompleterBase (for monomorphic, non-wrapping completer) or CompleterWrapper directly.", "2.12.2") @@ -2154,7 +2182,7 @@ trait Namers extends MethodSynthesis { case _ => mapOver(tp) } - def check(vparamss: List[List[Symbol]]) { + def check(vparamss: List[List[Symbol]]): Unit = { for (vps <- vparamss) { for (p <- vps) this(p.info) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 822583029945..e22b682b98ca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,13 @@ package scala.tools.nsc package typechecker -import symtab.Flags._ import scala.collection.mutable import scala.reflect.ClassTag -import PartialFunction.{ cond => when } +import symtab.Flags._ +import PartialFunction.cond /** * @author Lukas Rytz - * @version 1.0 */ trait NamesDefaults { self: Analyzer => @@ -56,20 +55,24 @@ trait NamesDefaults { self: Analyzer => qual: Option[Tree], targs: List[Tree], vargss: List[List[Tree]], - blockTyper: Typer - ) { } + blockTyper: Typer, + original: Tree, + ) object NamedApplyBlock { private[this] val tag = reflect.classTag[NamedApplyInfo] + def namedApplyInfo(t: Tree): Option[NamedApplyInfo] = t.attachments.get[NamedApplyInfo](tag) def unapply(b: Tree): Option[NamedApplyInfo] = b match { - case _: Block => b.attachments.get[NamedApplyInfo](tag) + case _: Block => namedApplyInfo(b) case _ => None } + def apply(stats: List[Tree], expr: Tree)(nai: NamedApplyInfo): Block = + Block(stats, expr.updateAttachment(nai)).updateAttachment(nai) } - private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name } + private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case NamedArg(Ident(name), _) => name } def isNamedArg(arg: Tree) = arg match { - case AssignOrNamedArg(Ident(_), _) => true - case _ => false + case NamedArg(Ident(_), _) => true + case _ => false } /** @param pos maps indices from old to new */ @@ -82,7 +85,7 @@ trait NamesDefaults { self: Analyzer => /** @param pos maps indices from new to old (!) */ private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = { val argsArray = args.toArray - (argsArray.indices map (i => argsArray(pos(i)))).toList + argsArray.indices.map(i => argsArray(pos(i))).toList } /** returns `true` if every element is equal to its index */ @@ -118,8 +121,8 @@ trait NamesDefaults { self: Analyzer => * @param mode the mode to use for calling typer.doTypedApply * @param pt the expected type for calling typer.doTypedApply * - * @param tree: the function application tree - * @argPos: a function mapping arguments from their current position to the + * @param tree the function application tree + * @param argPos a function mapping arguments from their current position to the * position specified by the method type. example: * def foo(a: Int, b: String) * foo(b = "1", a = 2) @@ -185,11 +188,12 @@ trait NamesDefaults { self: Analyzer => // never used for constructor calls, they always have a stable qualifier def blockWithQualifier(qual: Tree, selected: Name) = { - val sym = blockTyper.context.owner.newValue(freshTermName(nme.QUAL_PREFIX)(typer.fresh), newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent) + val sym = blockTyper.context.owner.newValue(freshTermName(nme.QUAL_PREFIX)(typer.fresh), newFlags = ARTIFACT) + .setInfo(uncheckedBounds(qual.tpe)) + .setPos(qual.pos.makeTransparent) blockTyper.context.scope enter sym val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType) - // it stays in Vegas: scala/bug#5720, scala/bug#5727 - qual changeOwner (blockTyper.context.owner, sym) + qual.changeOwner(blockTyper.context.owner, sym) // scala/bug#5720, scala/bug#5727 val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name))) val baseFunTransformed = atPos(baseFun.pos.makeTransparent) { @@ -197,24 +201,23 @@ trait NamesDefaults { self: Analyzer => // assigning the correct method symbol, typedSelect will just assign the type. the reason // to still call 'typed' is to correctly infer singleton types, scala/bug#5259. val selectPos = - if(qual.pos.isRange && baseFun1.pos.isRange) qual.pos.union(baseFun1.pos).withStart(Math.min(qual.pos.end, baseFun1.pos.end)) + if (qual.pos.isRange && baseFun1.pos.isRange) + if (qual.pos == baseFun1.pos) qual.pos + else baseFun1.pos.union(qual.pos).withStart(Math.min(qual.pos.end, baseFun1.pos.end)) // use basefun point; why isn't start always qual.pos.end else baseFun1.pos val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos)) if (funTargs.isEmpty) f else TypeApply(f, funTargs).setType(baseFun.tpe) } - val b = Block(List(vd), baseFunTransformed) - .setType(baseFunTransformed.tpe).setPos(baseFun.pos.makeTransparent) - b.updateAttachment(NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper)) - b + NamedApplyBlock(List(vd), baseFunTransformed)(NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper, tree)) + .setType(baseFunTransformed.tpe) + .setPos(baseFun.pos.makeTransparent) } - def blockWithoutQualifier(defaultQual: Option[Tree]) = { - val b = atPos(baseFun.pos)(Block(Nil, baseFun).setType(baseFun.tpe)) - b.updateAttachment(NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper)) - b - } + def blockWithoutQualifier(defaultQual: Option[Tree]) = + atPos(baseFun.pos)(NamedApplyBlock(Nil, baseFun)(NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper, tree))) + .setType(baseFun.tpe) def moduleQual(pos: Position, classType: Type) = { // prefix does 'normalize', which fixes #3384 @@ -278,6 +281,8 @@ trait NamesDefaults { self: Analyzer => blockWithoutQualifier(Some(qual.duplicate)) else blockWithQualifier(qual, name) + + case x => throw new MatchError(x) } } @@ -301,18 +306,15 @@ trait NamesDefaults { self: Analyzer => case _ => val byName = isByNameParamType(paramTpe) val repeated = isScalaRepeatedParamType(paramTpe) - val argTpe = ( - if (repeated) arg match { + // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a + // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), + // which is important for (at least) macros. + val argTpe = + arg match { + case _ if !repeated => arg.tpe case WildcardStarArg(expr) => expr.tpe - case _ => seqType(arg.tpe) + case _ => seqType(arg.tpe.widen) // avoid constant type } - else { - // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a - // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), - // which is important for (at least) macros. - arg.tpe - } - ).widen // have to widen or types inferred from literal defaults will be singletons val s = context.owner.newValue(freshTermName(nme.NAMEDARG_PREFIX)(typer.fresh), arg.pos, newFlags = ARTIFACT) setInfo { val tp = if (byName) functionType(Nil, argTpe) else argTpe uncheckedBounds(tp) @@ -329,10 +331,11 @@ trait NamesDefaults { self: Analyzer => res } else { new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502 - if (repeated) arg match { + arg match { + case _ if !repeated => arg case WildcardStarArg(expr) => expr - case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg)) - } else arg + case _ => blockTyper.typed(gen.mkSeqApply(resetAttrs(arg))) + } } Some(atPos(body.pos)(ValDef(sym, body).setType(NoType))) } @@ -340,50 +343,61 @@ trait NamesDefaults { self: Analyzer => // begin transform tree match { - case NamedApplyBlock(info) => tree + case NamedApplyBlock(_) => tree // `fun` is typed. `namelessArgs` might be typed or not, if they are types are kept. case Apply(fun, namelessArgs) => val transformedFun = transformNamedApplication(typer, mode, pt)(fun, x => x) if (transformedFun.isErroneous) setError(tree) else { - val NamedApplyBlock(NamedApplyInfo(qual, targs, vargss, blockTyper)) = transformedFun - val Block(stats, funOnly) = transformedFun + val NamedApplyBlock(NamedApplyInfo(qual, targs, vargss, blockTyper, _)) = transformedFun: @unchecked + val Block(stats, funOnly) = transformedFun: @unchecked // type the application without names; put the arguments in definition-site order val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt) typedApp match { - case Apply(expr, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) => + case Apply(_, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) => setError(tree) // bail out with and erroneous Apply *or* erroneous arguments, see scala/bug#7238, scala/bug#7509 case Apply(expr, typedArgs) => - // Extract the typed arguments, restore the call-site evaluation order (using - // ValDef's in the block), change the arguments to these local values. - - // typedArgs: definition-site order - val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false) - // valDefs: call-site order - val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos), - reorderArgsInv(formals, argPos), - blockTyper) - // refArgs: definition-site order again - val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match { - case None => origArg - case Some(vDef) => - val ref = gen.mkAttributedRef(vDef.symbol) - atPos(vDef.pos.focus) { - // for by-name parameters, the local value is a nullary function returning the argument - tpe.typeSymbol match { - case ByNameParamClass => Apply(ref, Nil) - case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR)) - case _ => ref + val isAnnot = mode.in(Mode.ANNOTmode) && { + val s = funOnly.symbol + s != null && s.isConstructor && s.owner.isNonBottomSubClass(AnnotationClass) + } + + if (isAnnot) { + NamedApplyBlock(stats, typedApp)(NamedApplyInfo(qual, targs, vargss :+ typedArgs, blockTyper, tree)) + .setType(typedApp.tpe) + .setPos(tree.pos.makeTransparent) + } else { + // Extract the typed arguments, restore the call-site evaluation order (using + // ValDef's in the block), change the arguments to these local values. + + // typedArgs: definition-site order + val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false) + // valDefs: call-site order + val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos), + reorderArgsInv(formals, argPos), + blockTyper) + // refArgs: definition-site order again + val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match { + case None => origArg + case Some(vDef) => + val ref = gen.mkAttributedRef(vDef.symbol) + atPos(vDef.pos.focus) { + // for by-name parameters, the local value is a nullary function returning the argument + tpe.typeSymbol match { + case ByNameParamClass => Apply(ref, Nil) + case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR)) + case _ => origArg.attachments.get[UnnamedArg.type].foreach(ref.updateAttachment); ref + } } - } - }) - // cannot call blockTyper.typedBlock here, because the method expr might be partially applied only - val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt) - res.setPos(res.pos.makeTransparent) - val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent) - block.updateAttachment(NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)) - block + }) + // cannot call blockTyper.typedBlock here, because the method expr might be partially applied only + val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt) + res.setPos(res.pos.makeTransparent) + NamedApplyBlock(stats ::: valDefs.flatten, res)(NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper, tree)) + .setType(res.tpe) + .setPos(tree.pos.makeTransparent) + } case _ => tree } } @@ -406,7 +420,7 @@ trait NamesDefaults { self: Analyzer => */ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = { // The argument list contains first a mix of positional args and named args that are on the - // right parameter position, and then a number or named args on different positions. + // right parameter position, and then a number of named args on different positions. // collect all named arguments whose position does not match the parameter they define val namedArgsOnChangedPosition = args.zip(params) dropWhile { @@ -414,9 +428,8 @@ trait NamesDefaults { self: Analyzer => val n = argName(arg) // drop the argument if // - it's not named, or - // - it's named, but defines the parameter on its current position, or - // - it's named, but none of the parameter names matches (treated as a positional argument, an assignment expression) - n.isEmpty || n.get == param.name || params.forall(_.name != n.get) + // - it's named, but defines the parameter on its current position + n.isEmpty || n.get == param.name } map (_._1) val paramsWithoutPositionalArg = params.drop(args.length - namedArgsOnChangedPosition.length) @@ -434,6 +447,8 @@ trait NamesDefaults { self: Analyzer => * Extend the argument list `givenArgs` with default arguments. Defaults are added * as named arguments calling the corresponding default getter. * + * Returns the extended arg list and missing parameters if any. + * * Example: given * def foo(x: Int = 2, y: String = "def") * foo(y = "lt") @@ -441,32 +456,43 @@ trait NamesDefaults { self: Analyzer => */ def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree], previousArgss: List[List[Tree]], params: List[Symbol], - pos: scala.reflect.internal.util.Position, context: Context): (List[Tree], List[Symbol]) = { + pos: scala.reflect.internal.util.Position, context: Context, mode : Mode): (List[Tree], List[Symbol]) = { if (givenArgs.length < params.length) { val (missing, positional) = missingParams(givenArgs, params, nameOfNamedArg) - if (missing forall (_.hasDefault)) { - val defaultArgs = missing flatMap (p => { - val defGetter = defaultGetter(p, context) - // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope) - if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649 - else { - var default1: Tree = qual match { - case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter) - case None => gen.mkAttributedRef(defGetter) - + if (missing.forall(_.hasDefault)) { + val defaultArgs = missing flatMap { p => + val annDefault = + if (mode.in(Mode.ANNOTmode) && p.owner.isConstructor && p.enclClass.isNonBottomSubClass(AnnotationClass) && !p.enclClass.isNonBottomSubClass(ConstantAnnotationClass)) + p.getAnnotation(DefaultArgAttr).flatMap(_.args.headOption).map(dflt => atPos(pos) { + // The `arg.tpe` is tagged with the `@defaultArg` annotation, see AnnotationInfo.argIsDefault + val arg = dflt.duplicate.setType(dflt.tpe.withAnnotation(AnnotationInfo(DefaultArgAttr.tpe, Nil, Nil))) + if (positional) arg + else NamedArg(Ident(p.name), arg) + }) + else None + annDefault orElse { + val defGetter = defaultGetter(p, context) + // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope) + if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649 + else { + var default1: Tree = qual match { + case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter) + case None => gen.mkAttributedRef(defGetter) + + } + default1 = if (targs.isEmpty) default1 + else TypeApply(default1, targs.map(_.duplicate)) + val default2 = previousArgss.foldLeft(default1)((tree, args) => + Apply(tree, args.map(_.duplicate))) + Some(atPos(pos) { + if (positional) default2 + else NamedArg(Ident(p.name), default2) + }) } - default1 = if (targs.isEmpty) default1 - else TypeApply(default1, targs.map(_.duplicate)) - val default2 = (default1 /: previousArgss)((tree, args) => - Apply(tree, args.map(_.duplicate))) - Some(atPos(pos) { - if (positional) default2 - else AssignOrNamedArg(Ident(p.name), default2) - }) } - }) + } (givenArgs ::: defaultArgs, Nil) - } else (givenArgs, missing filterNot (_.hasDefault)) + } else (givenArgs, missing.filterNot(_.hasDefault)) } else (givenArgs, Nil) } @@ -477,9 +503,33 @@ trait NamesDefaults { self: Analyzer => def defaultGetter(param: Symbol, context: Context): Symbol = { val i = param.owner.paramss.flatten.indexWhere(p => p.name == param.name) + 1 if (i > 0) { - val defGetterName = nme.defaultGetterName(param.owner.name, i) - if (param.owner.isConstructor) { - val mod = companionSymbolOf(param.owner.owner, context) + + def isScala3SyntheticApply(meth: Symbol): Boolean = { + // According to rules in Scala 3, a synthetic method named `apply` + // should use `` as the prefix of its default getters, + // i.e. reuse the constructor's default getters. + // We add some more precision - also verify that `apply` + // is defined in a module which has a case class companion + + def isModuleWithCaseClassCompanion(owner: Symbol) = ( + owner.isModuleClass + && linkedClassOfClassOf(owner, context).isCaseClass + ) + + (meth.isScala3Defined + && meth.isSynthetic + && meth.name == nme.apply + && isModuleWithCaseClassCompanion(meth.owner)) + } + + val scala3SynthApply = isScala3SyntheticApply(param.owner) + val defGetterName = { + val methodName = if (scala3SynthApply) nme.CONSTRUCTOR else param.owner.name + nme.defaultGetterName(methodName, i) + } + if (scala3SynthApply || param.owner.isConstructor) { + val scope = param.owner.owner + val mod = if (scala3SynthApply) scope else companionSymbolOf(scope, context) mod.info.member(defGetterName) } else { @@ -494,93 +544,17 @@ trait NamesDefaults { self: Analyzer => } else NoSymbol } - def isVariableInScope(context: Context, name: Name): Boolean = { - context.lookupSymbol(name, _.isVariable).isSuccess - } - - /** A full type check is very expensive; let's make sure there's a name - * somewhere which could potentially be ambiguous before we go that route. - */ - private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = { - import typer.context - (context isNameInScope param.name) && { - // for named arguments, check whether the assignment expression would - // typecheck. if it does, report an ambiguous error. - val paramtpe = param.tpe.cloneInfo(param) - // replace type parameters by wildcard. in the below example we need to - // typecheck (x = 1) with wildcard (not T) so that it succeeds. - // def f[T](x: T) = x - // var x = 0 - // f(x = 1) << "x = 1" typechecks with expected type WildcardType - val udp = context.undetparams - context.savingUndeterminedTypeParams(reportAmbiguous = false) { - val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) { - override def apply(tp: Type): Type = super.apply(dropByName(tp)) - } - // This throws an exception which is caught in `tryTypedApply` (as it - // uses `silent`) - unfortunately, tryTypedApply recovers from the - // exception if you use errorTree(arg, ...) and conforms is allowed as - // a view (see tryImplicit in Implicits) because it tries to produce a - // new qualifier (if the old one was P, the new one will be - // conforms.apply(P)), and if that works, it pretends nothing happened. - // - // To make sure tryTypedApply fails, we would like to pass EmptyTree - // instead of arg, but can't do that because eventually setType(ErrorType) - // is called, and EmptyTree can only be typed NoType. Thus we need to - // disable conforms as a view... - val errsBefore = reporter.errorCount - try typer.silent { tpr => - val res = tpr.typed(arg.duplicate, subst(paramtpe)) - // better warning for scala/bug#5044: if `silent` was not actually silent give a hint to the user - // [H]: the reason why `silent` is not silent is because the cyclic reference exception is - // thrown in a context completely different from `context` here. The exception happens while - // completing the type, and TypeCompleter is created/run with a non-silent Namer `context` - // and there is at the moment no way to connect the two unless we go through some global state. - if (errsBefore < reporter.errorCount) - WarnAfterNonSilentRecursiveInference(param, arg)(context) - res - } match { - case SilentResultValue(t) => - !t.isErroneous // #4041 - case SilentTypeError(e: NormalTypeErrorFromCyclicReference) => - // If we end up here, the CyclicReference was reported in a silent context. This can - // happen for local definitions, when the completer for a definition is created during - // type checking in silent mode. ContextErrors.TypeSigError catches that cyclic reference - // and transforms it into a NormalTypeErrorFromCyclicReference. - // The cycle needs to be reported, because the program cannot be typed: we don't know - // if we have an assignment or a named arg. - context.issue(e) - // 'err = true' is required because we're in a silent context - WarnAfterNonSilentRecursiveInference(param, arg)(context) - false - case _ => - // We got a type error, so it cannot be an assignment (it doesn't type check as one). - false - } - catch { - // `silent` only catches and returns TypeErrors which are not - // CyclicReferences. Fix for #3685 - case cr @ CyclicReference(sym, _) => - (sym.name == param.name) && sym.accessedOrSelf.isVariable && { - NameClashError(sym, arg)(context) - true - } - } - } - } - } - /** Removes name assignments from args. Additionally, returns an array mapping * argument indices from call-site-order to definition-site-order. * * Verifies that names are not specified twice, and positional args don't appear after named ones. */ def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = { - implicit val context0 = typer.context + implicit val context0: Context = typer.context def matchesName(param: Symbol, name: Name, argIndex: Int) = { def warn(msg: String, since: String) = context0.deprecationWarning(args(argIndex).pos, param, msg, since) def checkDeprecation(anonOK: Boolean) = - when (param.deprecatedParamName) { + cond(param.deprecatedParamName) { case Some(`name`) => true case Some(nme.NO_NAME) => anonOK } @@ -588,11 +562,11 @@ trait NamesDefaults { self: Analyzer => def since = if (version.isEmpty) version else s" (since $version)" def checkName = { val res = param.name == name - if (res && checkDeprecation(true)) warn(s"naming parameter $name is deprecated$since.", version) + if (res && checkDeprecation(anonOK = true)) warn(s"naming parameter $name is deprecated$since.", version) res } def checkAltName = { - val res = checkDeprecation(false) + val res = checkDeprecation(anonOK = false) if (res) warn(s"the parameter name $name is deprecated$since: use ${param.name} instead", version) res } @@ -602,35 +576,17 @@ trait NamesDefaults { self: Analyzer => val argPos = Array.fill(args.length)(-1) val namelessArgs = { var positionalAllowed = true - def stripNamedArg(arg: AssignOrNamedArg, argIndex: Int): Tree = { - val AssignOrNamedArg(Ident(name), rhs) = arg - def invokesDefault: Boolean = rhs match { - case Select(_, f) => f.startsWith(nme.DEFAULT_GETTER_INIT_STRING) || f.indexOf(nme.DEFAULT_GETTER_STRING) >= 0 - case _ => false - } - params indexWhere (p => matchesName(p, name, argIndex)) match { - case -1 if positionalAllowed && !currentRun.isScala213 => - if (isVariableInScope(context0, name)) { - // only issue the deprecation warning if `name` is in scope, this avoids the warning when mis-spelling a parameter name. - context0.deprecationWarning( - arg.pos, - context0.owner, - s"assignments in argument position are deprecated in favor of named arguments. Wrap the assignment in brackets, e.g., `{ $name = ... }`.", - "2.12.4") - } - // prevent isNamed from being true when calling doTypedApply recursively, - // treat the arg as an assignment of type Unit - Assign(arg.lhs, rhs) setPos arg.pos + def stripNamedArg(arg: NamedArg, argIndex: Int): Tree = { + val NamedArg(Ident(name), rhs) = arg: @unchecked + params.indexWhere(p => matchesName(p, name, argIndex)) match { case -1 => - UnknownParameterNameNamesDefaultError(arg, name, isVariableInScope(context0, name)) + UnknownParameterNameNamesDefaultError(arg, name, warnVariableInScope = context0.lookupSymbol(name, _.isVariable).isSuccess) case paramPos if argPos contains paramPos => val existingArgIndex = argPos.indexWhere(_ == paramPos) val otherName = Some(args(paramPos)) collect { - case AssignOrNamedArg(Ident(oName), _) if oName != name => oName + case NamedArg(Ident(oName), _) if oName != name => oName } DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName) - case paramPos if !currentRun.isScala213 && !invokesDefault && isAmbiguousAssignment(typer, params(paramPos), arg) => - AmbiguousReferenceInNamesDefaultError(arg, name) case paramPos if paramPos != argIndex => positionalAllowed = false // named arg is not in original parameter order: require names after this argPos(argIndex) = paramPos // fix up the arg position @@ -639,16 +595,14 @@ trait NamesDefaults { self: Analyzer => } } mapWithIndex(args) { - case (arg: AssignOrNamedArg, argIndex) => + case (arg: NamedArg, argIndex) => val t = stripNamedArg(arg, argIndex) if (!t.isErroneous && argPos(argIndex) < 0) argPos(argIndex) = argIndex t - case (arg, argIndex) => - if (positionalAllowed) { - argPos(argIndex) = argIndex - arg - } else - PositionalAfterNamedNamesDefaultError(arg) + case (arg, argIndex) if positionalAllowed => + argPos(argIndex) = argIndex + arg + case (arg, _) => PositionalAfterNamedNamesDefaultError(arg) } } (namelessArgs, argPos) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 0f7f6bd14466..45cf52bce8ee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,15 +19,17 @@ import scala.collection.mutable import symtab.Flags import Mode._ - /** +/** + * A pattern match such as: * - * A pattern match such as - * - * x match { case Foo(a, b) => ...} + * {{{ + * x match { case Foo(a, b) => ...} + * }}} * * Might match an instance of any of the following definitions of Foo. * Note the analogous treatment between case classes and unapplies. * + * {{{ * case class Foo(xs: Int*) * case class Foo(a: Int, xs: Int*) * case class Foo(a: Int, b: Int) @@ -37,8 +39,8 @@ import Mode._ * object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] } * object Foo { def unapply(x: Any): Option[(Int, Int)] } * object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] } + * }}} */ - trait PatternTypers { self: Analyzer => @@ -53,12 +55,10 @@ trait PatternTypers { // If the tree's symbol's type does not define an extractor, maybe the tree's type does. // this is the case when we encounter an arbitrary tree as the target of an unapply call - // (rather than something that looks like a constructor call.) (for now, this only happens - // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become - // more common place) + // (rather than something that looks like a constructor call.) + // (happens due to wrapClassTagUnapply) private def hasUnapplyMember(tpe: Type): Boolean = reallyExists(unapplyMember(tpe)) private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*) - private def hasUnapplyMember(fun: Tree): Boolean = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe) // ad-hoc overloading resolution to deal with unapplies and case class constructors // If some but not all alternatives survive filtering the tree's symbol with `p`, @@ -90,6 +90,21 @@ trait PatternTypers { val member = unapplyMember(fun.tpe) def resultType = (fun.tpe memberType member).finalResultType def isEmptyType = resultOfIsEmpty(resultType) + + def useConstructor = ( + // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala + // Use the case class constructor if (after canElide + isCase) the unapply method: + // (1) doesn't exist, e.g. case classes with 23+ params. run/case-class-23.scala + // (2) is the synthetic case class one, i.e. not user redefined. pos/t11252.scala + // (3a) is overloaded and the synthetic case class one is still present (i.e. not suppressed) pos/t12250.scala + // (3b) the scrutinee type is the case class (not a subtype). pos/overloaded-unapply.scala vs pos/t12250b.scala + canElide && caseClass.isCase && ( + member == NoSymbol // (1) + || member.isSynthetic // (2) + || (member.alternatives.exists(_.isSynthetic) && caseClass.tpe =:= pt) // (3a)(3b) + ) + ) + def isOkay = ( resultType.isErroneous || (resultType <:< BooleanTpe) @@ -98,13 +113,13 @@ trait PatternTypers { || member.isOverloaded // the whole overloading situation is over the rails ) - // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala - // A case class with 23+ params has no unapply method. - // A case class constructor may be overloaded with unapply methods in the companion. - if (canElide && caseClass.isCase && !member.isOverloaded) + // if we're already failing, no need to emit another error here + if (fun.tpe.isErroneous) + fun + else if (useConstructor) logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt)) else if (!reallyExists(member)) - CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member") + CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have a valid unapply/unapplySeq member") else if (isOkay) fun else if (isEmptyType == NoType) @@ -143,22 +158,25 @@ trait PatternTypers { } protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = { - val Typed(expr, tpt) = tree + val Typed(expr, tpt) = tree: @unchecked val exprTyped = typed(expr, mode) val baseClass = exprTyped.tpe.typeSymbol match { case ArrayClass => ArrayClass case NothingClass => NothingClass + case NullClass => NullClass case _ => SeqClass } val starType = baseClass match { case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt) case ArrayClass => boundedArrayType(pt) + case NullClass => seqType(NothingTpe) case _ => seqType(pt) } val exprAdapted = adapt(exprTyped, mode, starType) - exprAdapted.tpe baseType baseClass match { + exprAdapted.tpe.baseType(baseClass) match { case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp case _ if baseClass eq NothingClass => exprAdapted + case _ if baseClass eq NullClass => treeCopy.Typed(tree, exprAdapted, tpt.setType(NothingTpe)).setType(NothingTpe) case _ => setError(tree) } } @@ -176,7 +194,7 @@ trait PatternTypers { case _ => extractor.nonEmpty } - val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy, isUnapply = false) + val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy = canRemedy, isUnapply = false) val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType extractor match { @@ -184,7 +202,8 @@ trait PatternTypers { case _ => wrapClassTagUnapply(treeTyped, extractor, tpe) } } - private class VariantToSkolemMap extends TypeMap(trackVariance = true) { + + private class VariantToSkolemMap extends VariancedTypeMap { private val skolemBuffer = mutable.ListBuffer[TypeSymbol]() // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see scala/bug#5189 @@ -192,21 +211,21 @@ trait PatternTypers { def eligible(tparam: Symbol) = ( tparam.isTypeParameterOrSkolem && tparam.owner.isTerm - && (settings.strictInference || !variance.isInvariant) + && !variance.isInvariant ) def skolems = try skolemBuffer.toList finally skolemBuffer.clear() def apply(tp: Type): Type = mapOver(tp) match { - case tp @ TypeRef(NoPrefix, tpSym, Nil) if eligible(tpSym) => - val bounds = ( - if (variance.isInvariant) tpSym.tpeHK.bounds - else if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK) - else TypeBounds.lower(tpSym.tpeHK) + case TypeRef(NoPrefix, tpSym, Nil) if eligible(tpSym) => + val bounds = genPolyType(tpSym.typeParams, + if (variance.isInvariant) tpSym.tpe.bounds + else if (variance.isPositive) TypeBounds.upper(tpSym.tpe) + else TypeBounds.lower(tpSym.tpe) ) // origin must be the type param so we can deskolemize val skolem = context.owner.newGADTSkolem(freshTypeName("?" + tpSym.name), tpSym, bounds) skolemBuffer += skolem - logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*) + logResult(s"Created gadt skolem $skolem: ${skolem.tpeHK} to stand in for $tpSym")(skolem.tpeHK) case tp1 => tp1 } } @@ -239,29 +258,34 @@ trait PatternTypers { * see test/files/../t5189*.scala */ private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, ptIn: Type): Tree = { - // TODO scala/bug#7886 / scala/bug#5900 This is well intentioned but doesn't quite hit the nail on the head. - // For now, I've put it completely behind -Xstrict-inference. - val untrustworthyPt = settings.strictInference && ( - ptIn =:= AnyTpe - || ptIn =:= NothingTpe - || ptIn.typeSymbol != caseClass - ) val variantToSkolem = new VariantToSkolemMap - val caseClassType = tree.tpe.prefix memberType caseClass - val caseConstructorType = caseClassType memberType caseClass.primaryConstructor - val tree1 = TypeTree(caseConstructorType) setOriginal tree - val pt = if (untrustworthyPt) caseClassType else ptIn + + // `caseClassType` is the prefix from which we're seeing the constructor info, so it must be kind *. + // Need the `initialize` call to make sure we see any type params. + val caseClassType = caseClass.initialize.tpe_*.asSeenFrom(tree.tpe.prefix, caseClass.owner) + assert(!caseClassType.isHigherKinded, s"Unexpected type constructor $caseClassType") + + // If the case class is polymorphic, need to capture those type params in the type that we relativize using asSeenFrom, + // as they may also be sensitive to the prefix (see test/files/pos/t11103.scala). + // Note that undetParams may thus be different from caseClass.typeParams. + // (For a monomorphic case class, GenPolyType will not create/destruct a PolyType.) + val GenPolyType(undetparams, caseConstructorType) = + GenPolyType(caseClass.typeParams, caseClass.primaryConstructor.info).asSeenFrom(caseClassType, caseClass) + + // log(s"convertToCaseConstructor(${tree.tpe}, $caseClass, $ptIn) // $caseClassType // ${caseConstructorType.typeParams.map(_.info)}") + + val tree1 = TypeTree(caseConstructorType) setOriginal tree // have to open up the existential and put the skolems in scope // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance) - val ptSafe = logResult(s"case constructor from (${tree.summaryString}, $caseClassType, $pt)")(variantToSkolem(pt)) + val ptSafe = logResult(s"case constructor from (${tree.summaryString}, $caseClassType, $ptIn)")(variantToSkolem(ptIn)) val freeVars = variantToSkolem.skolems // use "tree" for the context, not context.tree: don't make another CaseDef context, // as instantiateTypeVar's bounds would end up there val ctorContext = context.makeNewScope(tree, context.owner) - freeVars foreach ctorContext.scope.enter - newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe) + freeVars.foreach(ctorContext.scope.enter(_)) + newTyper(ctorContext).infer.inferConstructorInstance(tree1, undetparams, ptSafe) // simplify types without losing safety, // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems @@ -303,13 +327,12 @@ trait PatternTypers { case OverloadedType(_, _) => OverloadedUnapplyError(funOverloadResolved); ErrorType case _ => UnapplyWithSingleArgError(funOverloadResolved); ErrorType } - val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) - val unapplyContext = context.makeNewScope(context.tree, context.owner) - freeVars foreach unapplyContext.scope.enter - val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy, isUnapply = true) + val unapplyContext = context.makeNewScope(tree, context.owner) + freeVars.foreach(unapplyContext.scope.enter(_)) + val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy = canRemedy, isUnapply = true) // turn any unresolved type variables in freevars into existential skolems - val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) + val skolems = freeVars.map(fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) pattp.substSym(freeVars, skolems) } } @@ -377,10 +400,7 @@ trait PatternTypers { } // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) // but at least make a proper type before passing it elsewhere - val pt1 = if (settings.isScala213) applyTypeToWildcards(pt.dealiasWiden) else pt.dealiasWiden match { - case tr@TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies - case pt1 => pt1 - } + val pt1 = applyTypeToWildcards(pt.dealiasWiden) if (isCheckable(pt1)) EmptyTree else resolveClassTag(pos, pt1) match { case tree if unapplyMember(tree.tpe).exists => tree diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 8270e998529b..253779e3e5af 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,42 +13,32 @@ package scala.tools.nsc package typechecker -import scala.language.postfixOps - +import scala.annotation._ import scala.collection.mutable import scala.collection.mutable.ListBuffer -import scala.tools.nsc.Reporting.WarningCategory +import scala.reflect.internal.util.CodeAction +import scala.tools.nsc.Reporting.WarningCategory, WarningCategory.{LintOverload} import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.settings.NoScalaVersion import symtab.Flags._ import transform.Transform - -/**

    - * Post-attribution checking and transformation. - *

    - *

    - * This phase performs the following checks. - *

    - *
      - *
    • All overrides conform to rules.
    • - *
    • All type arguments conform to bounds.
    • - *
    • All type variable uses conform to variance annotations.
    • - *
    • No forward reference to a term symbol extends beyond a value definition.
    • - *
    - *

    - * It performs the following transformations. - *

    - *
      - *
    • Local modules are replaced by variables and classes
    • - *
    • Calls to case factory methods are replaced by new's.
    • - *
    • Eliminate branches in a conditional if the condition is a constant
    • - *
    +/** Post-attribution checking and transformation. * - * @author Martin Odersky - * @version 1.0 + * This phase checks the following postconditions: + * + * - All overrides conform to rules. + * - All type arguments conform to bounds. + * - Every use of a type variable conforms to the variance annotation of that variable. + * - No forward reference to a term symbol extends beyond a value definition. * - * @todo Check whether we always check type parameter bounds. + * It performs the following transformations: + * + * - Local modules are replaced by variables and classes. + * - Calls to case factory methods are replaced by new's. + * - Eliminate branches in a conditional if the condition is a constant. + * + * @author Martin Odersky */ abstract class RefChecks extends Transform { @@ -65,41 +55,20 @@ abstract class RefChecks extends Transform { def newTransformer(unit: CompilationUnit): RefCheckTransformer = new RefCheckTransformer(unit) - val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) - val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) + val toJavaRepeatedParam = SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) + val toScalaRepeatedParam = SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) def accessFlagsToString(sym: Symbol) = flagsToString( sym getFlag (PRIVATE | PROTECTED), if (sym.hasAccessBoundary) "" + sym.privateWithin.name else "" ) - def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type, isModuleOverride: Boolean): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match { - case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) => - rtp1 <:< rtp2 - case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) => - rtp1 <:< rtp2 - - // all this module business would be so much simpler if we moduled^w modelled a module as a class and an accessor, like we do for fields - case (TypeRef(_, sym, _), _) if sym.isModuleClass => - overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix, isModuleOverride) - case (_, TypeRef(_, sym, _)) if sym.isModuleClass => - overridesTypeInPrefix(tp1, NullaryMethodType(tp2), prefix, isModuleOverride) - - case _ => - def classBoundAsSeen(tp: Type) = tp.typeSymbol.classBound.asSeenFrom(prefix, tp.typeSymbol.owner) - (tp1 <:< tp2) || isModuleOverride && ( - // Object override check. This requires that both the overridden and the overriding member are object - // definitions. The overriding module type is allowed to replace the original one with the same name - // as long as it conform to the original non-singleton type. - tp1.typeSymbol.isModuleClass && tp2.typeSymbol.isModuleClass && { - val cb1 = classBoundAsSeen(tp1) - val cb2 = classBoundAsSeen(tp2) - (cb1 <:< cb2) && { - log("Allowing %s to override %s because %s <:< %s".format(tp1, tp2, cb1, cb2)) - true - } - } - ) + def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match { + case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) => rtp1 <:< rtp2 + case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) => rtp1 <:< rtp2 + case (TypeRef(_, sym, _), _) if sym.isModuleClass => overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix) + case (_, TypeRef(_, sym, _)) if sym.isModuleClass => overridesTypeInPrefix(tp1, NullaryMethodType(tp2), prefix) + case _ => tp1 <:< tp2 } private val separatelyCompiledScalaSuperclass = perRunCaches.newAnyRefMap[Symbol, Unit]() @@ -111,20 +80,40 @@ abstract class RefChecks extends Transform { false } - class RefCheckTransformer(unit: CompilationUnit) extends Transformer { + class RefCheckTransformer(@unused unit: CompilationUnit) extends AstTransformer { + private final val indent = " " var localTyper: analyzer.Typer = typer var currentApplication: Tree = EmptyTree + var inAnnotation: Boolean = false var inPattern: Boolean = false @inline final def savingInPattern[A](body: => A): A = { val saved = inPattern try body finally inPattern = saved } - var checkedCombinations = Set[List[Type]]() + // Track symbols of the refinement's parents and the base at which we've checked them, + // as well as the entire refinement type seen at that base. + // No need to check the same symbols again in a base that's a subclass of a previously checked base + private val checkedCombinations = mutable.Map[List[Symbol], (Symbol, Type)]() + private def notYetCheckedOrAdd(rt: RefinedType, currentBase: Symbol) = { + val seen = checkedCombinations.get(rt.parents.map(_.typeSymbol)).exists { + case (prevBase, prevTp) => + val isSub = (currentBase, prevBase) match { + case (cRef: RefinementClassSymbol, pRef: RefinementClassSymbol) => + cRef.info.parents.map(_.typeSymbol) == pRef.info.parents.map(_.typeSymbol) + case _ => + currentBase.isSubClass(prevBase) + } + val sameTp = rt =:= prevTp.asSeenFrom(currentBase.thisType, prevBase) + isSub && sameTp + } + if (!seen) checkedCombinations.addOne((rt.parents.map(_.typeSymbol), (currentBase, rt))) + !seen + } - private def refchecksWarning(pos: Position, msg: String, cat: WarningCategory): Unit = - runReporting.warning(pos, msg, cat, currentOwner) + private def refchecksWarning(pos: Position, msg: String, cat: WarningCategory, actions: List[CodeAction] = Nil): Unit = + runReporting.warning(pos, msg, cat, currentOwner, actions) // only one overloaded alternative is allowed to define default arguments private def checkOverloadedRestrictions(clazz: Symbol, defaultClass: Symbol): Unit = { @@ -138,16 +127,7 @@ abstract class RefChecks extends Transform { defaultMethodNames.toList.distinct foreach { name => val methods = clazz.info.findMember(name, 0L, requiredFlags = METHOD, stableOnly = false).alternatives - def hasDefaultParam(tpe: Type): Boolean = tpe match { - case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe) - case _ => false - } - val haveDefaults = methods filter ( - if (currentRun.isScala211) - (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name)) - else - (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name)) - ) + val haveDefaults = methods.filter(sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name)) if (haveDefaults.lengthCompare(1) > 0) { val owners = haveDefaults map (_.owner) @@ -166,11 +146,9 @@ abstract class RefChecks extends Transform { } // Check for doomed attempt to overload applyDynamic - if (clazz isSubClass DynamicClass) { - for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) { + if (clazz.isSubClass(DynamicClass)) + for ((_, m1 :: _ :: _) <- clazz.info.member(nme.applyDynamic).alternatives.groupBy(_.typeParams.length)) reporter.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)") - } - } // This has become noisy with implicit classes. if (settings.isDeveloper && settings.warnPolyImplicitOverload) { @@ -182,6 +160,73 @@ abstract class RefChecks extends Transform { }) } } + private def checkDubiousOverloads(clazz: Symbol): Unit = if (settings.warnDubiousOverload) { + // nullary members or methods with leading implicit params + def ofInterest(tp: Type): Boolean = tp match { + case mt: MethodType => mt.isImplicit + case PolyType(_, rt) => ofInterest(rt) + case _ => true // includes NullaryMethodType + } + // takes no value parameters + def isNullary(tp: Type): Boolean = tp match { + case _: MethodType => false + case PolyType(_, rt) => isNullary(rt) + case _ => true // includes NullaryMethodType + } + def warnDubious(sym: Symbol, alts: List[Symbol]): Unit = { + val usage = if (sym.isMethod && !sym.isGetter) "Calls to parameterless" else "Usages of" + val simpl = "a single implicit parameter list" + val suffix = alts.filter(_ != sym).map(_.defString) match { + case impl :: Nil => s"$impl, which has $simpl." + case impls => + sm"""|overloads which have $simpl: + | ${impls.mkString("\n ")}""" + } + val warnAt = + if (sym.owner == clazz) sym.pos + else + alts.find(_.owner == clazz) match { + case Some(conflict) => conflict.pos + case _ => clazz.pos + } + refchecksWarning(warnAt, s"$usage $sym will be easy to mistake for calls to $suffix", LintOverload) + } + val byName = + clazz.info.members + .reverseIterator + .filter(m => ofInterest(m.info)) + .toList + .groupBy(_.name.dropLocal) + def isCompetitive(syms: List[Symbol], sawNlly: Boolean, sawNonNlly: Boolean): Boolean = + sawNlly && sawNonNlly || (syms match { + case sym :: syms => + if (!sawNlly && isNullary(sym.info)) isCompetitive(syms, sawNlly = true, sawNonNlly) + else if (!sawNonNlly && !isNullary(sym.info)) isCompetitive(syms, sawNlly, sawNonNlly = true) + else isCompetitive(syms, sawNlly, sawNonNlly) + case _ => false + }) + for ((_, syms) <- byName if syms.lengthCompare(1) > 0 && isCompetitive(syms, sawNlly=false, sawNonNlly=false)) { + val (nullaries, alts) = syms.partition(sym => isNullary(sym.info)) + //assert(!alts.isEmpty) + nullaries match { + case nullary :: Nil => warnDubious(nullary, syms) + case nullaries => + //assert(!nullaries.isEmpty) + val dealiased = + nullaries.find(_.isPrivateLocal) match { + case Some(local) => + nullaries.find(sym => sym.isAccessor && sym.accessed == local) match { + case Some(accessor) => nullaries.filter(_ != local) // drop local if it has an accessor + case _ => nullaries + } + case _ => nullaries + } + // there are multiple exactly for a private local and an inherited member + for (nullary <- dealiased) + warnDubious(nullary, nullary :: alts) + } + } + } // Override checking ------------------------------------------------------------ @@ -207,7 +252,7 @@ abstract class RefChecks extends Transform { val params = bridge.paramss.head val elemtp = params.last.tpe.typeArgs.head val idents = params map Ident - val lastarg = gen.wildcardStar(gen.mkWrapArray(idents.last, elemtp)) + val lastarg = gen.wildcardStar(gen.mkWrapVarargsArray(idents.last, elemtp)) val body = Apply(Select(This(clazz), member), idents.init :+ lastarg) localTyper typed DefDef(bridge, body) @@ -254,7 +299,6 @@ abstract class RefChecks extends Transform { * 1.2. O must not be final. * 1.3. O is deferred, or M has `override` modifier. * 1.4. If O is stable, then so is M. - * // @M: LIFTED 1.5. Neither M nor O are a parameterized type alias * 1.6. If O is a type alias, then M is an alias of O. * 1.7. If O is an abstract type then * 1.7.1 either M is an abstract type, and M's bounds are sharper than O's bounds. @@ -272,277 +316,310 @@ abstract class RefChecks extends Transform { * that are not implemented in a subclass. * 4. Check that every member with an `override` modifier * overrides some other member. + * 5. Check that the nested class do not shadow other nested classes from outer class's parent. */ - private def checkAllOverrides(clazz: Symbol, typesOnly: Boolean = false) { + private def checkAllOverrides(clazz: Symbol, typesOnly: Boolean = false): Unit = { val self = clazz.thisType - case class MixinOverrideError(member: Symbol, msg: String) + case class MixinOverrideError(member: Symbol, msg: String, actions: List[CodeAction], s3Migration: Boolean) val mixinOverrideErrors = new ListBuffer[MixinOverrideError]() - def printMixinOverrideErrors() { + def issue(pos: Position, msg: String, actions: List[CodeAction], s3Migration: Boolean) = + if (s3Migration) runReporting.warning(pos, msg, WarningCategory.Scala3Migration, currentOwner, actions) + else runReporting.error(pos, msg, actions) + + def printMixinOverrideErrors(): Unit = { mixinOverrideErrors.toList match { case List() => - case List(MixinOverrideError(_, msg)) => - reporter.error(clazz.pos, msg) - case MixinOverrideError(member, msg) :: others => + case List(MixinOverrideError(_, msg, actions, s3Migration)) => + issue(clazz.pos, msg, actions, s3Migration) + case MixinOverrideError(member, msg, actions, s3Migration) :: others => val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).distinct - reporter.error( + issue( clazz.pos, - msg+(if (others1.isEmpty) "" - else ";\n other members with override errors are: "+(others1 mkString ", "))) + if (others1.isEmpty) msg + else s"$msg;\n other members with override errors are: ${others1.mkString(", ")}", + actions, + s3Migration) } } def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz) - def infoStringWithLocation(sym: Symbol) = infoString0(sym, true) + def infoStringWithLocation(sym: Symbol) = infoString0(sym, showLocation = true) def infoString0(member: Symbol, showLocation: Boolean) = { - val underlying = // not using analyzer.underlyingSymbol(member) because we should get rid of it - if (!(member hasFlag ACCESSOR)) member - else member.accessed match { - case field if field.exists => field - case _ if member.isSetter => member.getterIn(member.owner) - case _ => member - } - - def memberInfo = - self.memberInfo(underlying) match { - case getterTp if underlying.isGetter => getterTp.resultType - case tp => tp + val location = + if (!showLocation) "" + else member.ownsString match { + case "" => "" + case s => s" (defined in $s)" } + val macroStr = if (member.isTermMacro) "macro " else "" - underlying.toString() + - (if (showLocation) - underlying.locationString + - (if (underlying.isAliasType) s", which equals $memberInfo" - else if (underlying.isAbstractType) s" with bounds$memberInfo" - else if (underlying.isModule) "" - else if (underlying.isTerm) s" of type $memberInfo" - else "") - else "") + macroStr + member.defStringSeenAs(self.memberInfo(member)) + location } - /* Check that all conditions for overriding `other` by `member` - * of class `clazz` are met. + /* Check that all conditions for overriding `other` by `member` of class `clazz` are met. + * + * TODO: error messages could really be improved, including how they are composed */ - def checkOverride(pair: SymbolPair) { - import pair._ - val member = low - val other = high + def checkOverride(pair: SymbolPair): Unit = { + import pair.{highType, lowType, highInfo, rootType} -// debuglog(s"Checking validity of ${member.fullLocationString} overriding ${other.fullLocationString}") + val member = pair.low + val other = pair.high + val memberClass = member.owner + val otherClass = other.owner + + // debuglog(s"Checking validity of ${member.fullLocationString} overriding ${other.fullLocationString}") def noErrorType = !pair.isErroneous def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol - def isNeitherInClass = member.owner != pair.base && other.owner != pair.base + val isMemberClass = memberClass == clazz + def isNeitherInClass = !isMemberClass && otherClass != clazz - def objectOverrideErrorMsg = ( - "overriding " + high.fullLocationString + " with " + low.fullLocationString + ":\n" + - "an overriding object must conform to the overridden object's class bound" + - analyzer.foundReqMsg(pair.lowClassBound, pair.highClassBound) - ) + /** Emit an error if member is owned by current class, using the member position. + * Otherwise, accumulate the error, to be emitted after other messages, using the class position. + */ + def emitOverrideError(fullmsg: String, actions: List[CodeAction] = Nil, s3Migration: Boolean = false): Unit = + if (isMemberClass) issue(member.pos, fullmsg, actions, s3Migration) + else mixinOverrideErrors += MixinOverrideError(member, fullmsg, actions, s3Migration) - def overrideErrorMsg(msg: String): String = { + def overriddenWithAddendum(msg: String, foundReq: Boolean = settings.isDebug): String = { val isConcreteOverAbstract = - (other.owner isSubClass member.owner) && other.isDeferred && !member.isDeferred + otherClass.isSubClass(memberClass) && other.isDeferred && !member.isDeferred val addendum = if (isConcreteOverAbstract) - ";\n (Note that %s is abstract,\n and is therefore overridden by concrete %s)".format( - infoStringWithLocation(other), - infoStringWithLocation(member) - ) - else if (settings.isDebug) - analyzer.foundReqMsg(member.tpe, other.tpe) + sm"""|; + |${indent}(note that ${infoStringWithLocation(other)} is abstract, + |${indent}and is therefore overridden by concrete ${infoStringWithLocation(member)})""" + else if (foundReq) { + def info(sym: Symbol) = self.memberInfo(sym) match { case tp if sym.isGetter || sym.isValue && !sym.isMethod => tp.resultType case tp => tp } + analyzer.foundReqMsg(info(member), info(other)) + } else "" + val msg1 = if (!msg.isEmpty) s"\n$indent$msg" else msg - s"overriding ${infoStringWithLocation(other)};\n ${infoString(member)} $msg$addendum" - } - def emitOverrideError(fullmsg: String) { - if (member.owner == clazz) reporter.error(member.pos, fullmsg) - else mixinOverrideErrors += MixinOverrideError(member, fullmsg) + s"${infoStringWithLocation(other)}${msg1}${addendum}" } - def overrideError(msg: String) { - if (noErrorType) - emitOverrideError(overrideErrorMsg(msg)) - } + def overrideError(msg: String): Unit = + if (noErrorType) emitOverrideError(msg) - def overrideTypeError() { - if (noErrorType) { - emitOverrideError( - if (member.isModule && other.isModule) objectOverrideErrorMsg - else overrideErrorMsg("has incompatible type") - ) - } - } + def getWithIt = if (isMemberClass) "" else s"with ${infoString(member)}" - def overrideAccessError() { - val otherAccess = accessFlagsToString(other) - overrideError("has weaker access privileges; it should be "+ (if (otherAccess == "") "public" else "at least "+otherAccess)) - } + def overrideErrorWithMemberInfo(msg: String, actions: List[CodeAction] = Nil, s3Migration: Boolean = false): Unit = + if (noErrorType) emitOverrideError(s"${msg}\n${overriddenWithAddendum(getWithIt)}", actions, s3Migration) - //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG - - // return if we already checked this combination elsewhere - if (member.owner != clazz) { - def deferredCheck = member.isDeferred || !other.isDeferred - def subOther(s: Symbol) = s isSubClass other.owner - def subMember(s: Symbol) = s isSubClass member.owner + def overrideErrorOrNullaryWarning(msg: String, actions: List[CodeAction]): Unit = if (isMemberClass || !member.owner.isSubClass(other.owner)) + if (currentRun.isScala3) + overrideErrorWithMemberInfo(msg, actions, s3Migration = true) + else if (isMemberClass) + refchecksWarning(member.pos, msg, WarningCategory.OtherNullaryOverride, actions) + else + refchecksWarning(clazz.pos, msg, WarningCategory.OtherNullaryOverride, actions) - if (subOther(member.owner) && deferredCheck) { - //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG - return - } - if (clazz.parentSymbolsIterator exists (p => subOther(p) && subMember(p) && deferredCheck)) { - //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG - return + def overrideTypeError(): Unit = + if (member.isModule && other.isModule) + overrideError(sm"""|overriding ${other.fullLocationString} with ${member.fullLocationString}: + |an overriding object must conform to the overridden object's class bound${ + analyzer.foundReqMsg(pair.lowClassBound, pair.highClassBound)}""") + else { + val needSameType = !other.isDeferred && other.isAliasType + val msg = s"${getWithIt}${if (needSameType) " (Equivalent type required when overriding a type alias.)" else ""}" + overrideError(sm"""|incompatible type in overriding + |${overriddenWithAddendum(msg, foundReq = !needSameType)}""") } - if (clazz.parentSymbolsIterator forall (p => subOther(p) == subMember(p))) { - //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG - return + + def overrideErrorConcreteMissingOverride() = + if (isNeitherInClass && !otherClass.isSubClass(memberClass)) + emitOverrideError(sm"""|$clazz inherits conflicting members: + |$indent${infoStringWithLocation(other)} and + |$indent${infoStringWithLocation(member)} + |$indent(note: this can be resolved by declaring an `override` in $clazz.)""") + else + overrideErrorWithMemberInfo("`override` modifier required to override concrete member:") + + def weakerAccessError(advice: String): Unit = + overrideError(sm"""|weaker access privileges in overriding + |${overriddenWithAddendum(advice)}""") + def overrideAccessError(): Unit = + weakerAccessError { + accessFlagsToString(other) match { + case "" => "override should be public" + case otherAccess => s"override should at least be $otherAccess" + } } - } + + //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG /* Is the intersection between given two lists of overridden symbols empty? */ - def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) = - !(syms1 exists (syms2 contains _)) + def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) = !syms1.exists(syms2.contains) - if (typesOnly) checkOverrideTypes() + if (memberClass == ObjectClass && otherClass == AnyClass) () // skip -- can we have a mode of symbolpairs where this pair doesn't even appear? + else if (typesOnly) checkOverrideTypes() else { // o: public | protected | package-protected (aka java's default access) // ^-may be overridden by member with access privileges-v // m: public | public/protected | public/protected/package-protected-in-same-package-as-o if (member.isPrivate) // (1.1) - overrideError("has weaker access privileges; it should not be private") + weakerAccessError("override should not be private") // todo: align accessibility implication checking with isAccessible in Contexts - val ob = other.accessBoundary(member.owner) - val mb = member.accessBoundary(member.owner) - def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access - (!other.isProtected || member.isProtected) && // if o is protected, so is m - ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary - other.isJavaDefined) // overriding a protected java member, see #3946 + @inline def protectedOK = !other.isProtected || member.isProtected + @inline def accessBoundaryOK = { + val ob = other.accessBoundary(memberClass) + val mb = member.accessBoundary(memberClass) + @inline def companionBoundaryOK = ob.isClass && mb.isModuleClass && mb.module == ob.companionSymbol + !isRootOrNone(ob) && (ob.hasTransOwner(mb) || companionBoundaryOK) } - if (!isOverrideAccessOK) { + @inline def otherIsJavaProtected = other.isJavaDefined && other.isProtected + val isOverrideAccessOK = + member.isPublic || // member is public, definitely same or relaxed access + protectedOK && // if o is protected, so is m + (accessBoundaryOK || // m relaxes o's access boundary + otherIsJavaProtected // overriding a protected java member, see #3946 #12349 + ) + if (!isOverrideAccessOK) overrideAccessError() - } else if (other.isClass) { - overrideError("cannot be used here - class definitions cannot be overridden") - } else if (!other.isDeferred && member.isClass) { - overrideError("cannot be used here - classes can only override abstract types") - } else if (other.isEffectivelyFinal) { // (1.2) - overrideError("cannot override final member") - } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*) - // (*) Synthetic exclusion for (at least) default getters, fixes scala/bug#5178. We cannot assign the OVERRIDE flag to - // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket. - if (isNeitherInClass && !(other.owner isSubClass member.owner)) - emitOverrideError( - clazz + " inherits conflicting members:\n " - + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member) - + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)" - ) - else - overrideError("needs `override' modifier") - } else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) { - overrideError("needs `abstract override' modifiers") - } - else if (member.isAnyOverride && (other hasFlag ACCESSOR) && !(other hasFlag STABLE | DEFERRED)) { - // The check above used to look at `field` == `other.accessed`, ensuring field.isVariable && !field.isLazy, - // which I think is identical to the more direct `!(other hasFlag STABLE)` (given that `other` is a method). - // Also, we're moving away from (looking at) underlying fields (vals in traits no longer have them, to begin with) - // TODO: this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. - if (!settings.overrideVars) - overrideError("cannot override a mutable variable") - } - else if (member.isAnyOverride && - !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) && + else if (other.isClass) + overrideErrorWithMemberInfo("class definitions cannot be overridden:") + else if (!other.isDeferred && member.isClass) + overrideErrorWithMemberInfo("classes can only override abstract types; cannot override:") + else if (other.isEffectivelyFinal) // (1.2) + overrideErrorWithMemberInfo("cannot override final member:") + else { + // In Java, the OVERRIDE flag is implied + val memberOverrides = member.isAnyOverride || (member.isJavaDefined && !member.isDeferred) + + // Concrete `other` requires `override` for `member`. + // Synthetic exclusion for (at least) default getters, fixes scala/bug#5178. + // We cannot assign the OVERRIDE flag to the default getter: + // one default getter might sometimes override, sometimes not. Example in comment on ticket. + if (!memberOverrides && !other.isDeferred && !member.isSynthetic) + overrideErrorConcreteMissingOverride() + else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) + overrideErrorWithMemberInfo("`abstract override` modifiers required to override:") + else if (memberOverrides && other.hasFlag(ACCESSOR) && !other.hasFlag(STABLE | DEFERRED)) + // TODO: this is not covered by the spec. + overrideErrorWithMemberInfo("mutable variable cannot be overridden:") + else if (memberOverrides && + !memberClass.thisType.baseClasses.exists(_.isSubClass(otherClass)) && !member.isDeferred && !other.isDeferred && - intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) { - overrideError("cannot override a concrete member without a third member that's overridden by both "+ - "(this rule is designed to prevent ``accidental overrides'')") - } else if (other.isStable && !member.isStable) { // (1.4) - overrideError("needs to be a stable, immutable value") - } else if (member.isValue && member.isLazy && - other.isValue && other.hasFlag(STABLE) && !(other.isDeferred || other.isLazy)) { - overrideError("cannot override a concrete non-lazy value") - } else if (other.isValue && other.isLazy && - member.isValue && !member.isLazy) { - overrideError("must be declared lazy to override a concrete lazy value") - } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9) - overrideError("cannot be used here - term macros cannot override abstract methods") - } else if (other.isTermMacro && !member.isTermMacro) { // (1.10) - overrideError("cannot be used here - only term macros can override term macros") - } else { - checkOverrideTypes() - checkOverrideDeprecated() - if (settings.warnNullaryOverride) { - if (other.paramss.isEmpty && !member.paramss.isEmpty && !member.isJavaDefined) { - refchecksWarning(member.pos, "non-nullary method overrides nullary method", WarningCategory.LintNullaryOverride) + intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) + overrideErrorWithMemberInfo("cannot override a concrete member without a third member that's overridden by both " + + "(this rule is designed to prevent accidental overrides)") + else if (other.isStable && !member.isStable) // (1.4) + overrideErrorWithMemberInfo("stable, immutable value required to override:") + else if (member.isValue && member.isLazy && + other.isValue && other.hasStableFlag && !other.isDeferred && !other.isLazy) + overrideErrorWithMemberInfo("concrete non-lazy value cannot be overridden:") + else if (other.isValue && other.isLazy && member.isValue && !member.isLazy) + overrideErrorWithMemberInfo("value must be lazy when overriding concrete lazy value:") + else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) // (1.9) + overrideErrorWithMemberInfo("macro cannot override abstract method:") + else if (other.isTermMacro && !member.isTermMacro) // (1.10) + overrideErrorWithMemberInfo("macro can only be overridden by another macro:") + else { + checkOverrideTypes() + // Don't bother users with deprecations caused by classes they inherit. + // Only warn for the pair that has one leg in `clazz`. + if (isMemberClass) checkOverrideDeprecated() + def javaDetermined(sym: Symbol) = sym.isJavaDefined || isUniversalMember(sym) + def exempted = javaDetermined(member) || javaDetermined(other) || member.overrides.exists(javaDetermined) + // warn that nilary member matched nullary other, so either it was adapted by namer or will be silently mixed in by mixin + def warnAdaptedNullaryOverride(): Unit = { + val mbr = if (isMemberClass) "method" else s"${member.fullLocationString} defined" + val msg = s"$mbr without a parameter list overrides ${other.fullLocationString} defined with a single empty parameter list" + val namePos = member.pos + val action = + if (namePos.isDefined && namePos.source.sourceAt(namePos) == member.decodedName) + runReporting.codeAction("add empty parameter list", namePos.focusEnd, "()", msg) + else Nil + overrideErrorOrNullaryWarning(msg, action) + } + def warnExtraParens(): Unit = { + val mbr = if (isMemberClass) "method" else s"${member.fullLocationString} defined" + val msg = s"$mbr with a single empty parameter list overrides ${other.fullLocationString} defined without a parameter list" + val namePos = member.pos + val action = + if (namePos.isDefined && namePos.source.sourceAt(namePos) == member.decodedName) + runReporting.codeAction("remove empty parameter list", namePos.focusEnd.withEnd(namePos.end + 2), "", msg, expected = Some(("()", currentUnit))) + else Nil + overrideErrorOrNullaryWarning(msg, action) + } + if (member.hasAttachment[NullaryOverrideAdapted.type]) { + if (!exempted) + warnAdaptedNullaryOverride() + } + else if (member.paramLists.isEmpty) { + // Definitions that directly override get a parameter list and a `NullaryOverrideAdapted` attachment + // in Namers. Here we also warn when there's a mismatch between two mixed-in members. + if (!member.isStable && other.paramLists.nonEmpty && !exempted && !other.overrides.exists(javaDetermined)) + warnAdaptedNullaryOverride() + } + else if (other.paramLists.isEmpty) { + if (!exempted && !member.hasAnnotation(BeanPropertyAttr) && !member.hasAnnotation(BooleanBeanPropertyAttr)) + warnExtraParens() } } } } - //if (!member.typeParams.isEmpty) (1.5) @MAT - // overrideError("may not be parameterized"); - //if (!other.typeParams.isEmpty) (1.5) @MAT - // overrideError("may not override parameterized type"); - // @M: substSym - def checkOverrideAlias() { + def checkOverrideAlias(): Unit = { // Important: first check the pair has the same kind, since the substitution // carries high's type parameter's bounds over to low, so that // type equality doesn't consider potentially different bounds on low/high's type params. // In b781e25afe this went from using memberInfo to memberType (now lowType/highType), tested by neg/override.scala. // TODO: was that the right fix? it seems type alias's RHS should be checked by looking at the symbol's info - if (pair.sameKind && lowType.substSym(low.typeParams, high.typeParams) =:= highType) () + if (pair.sameKind && lowType.substSym(member.typeParams, other.typeParams) =:= highType) () else overrideTypeError() // (1.6) } - //if (!member.typeParams.isEmpty) // (1.7) @MAT - // overrideError("may not be parameterized"); - def checkOverrideAbstract() { + def checkOverrideAbstractType(): Unit = { if (!(highInfo.bounds containsType lowType)) { // (1.7.1) overrideTypeError(); // todo: do an explaintypes with bounds here explainTypes(_.bounds containsType _, highInfo, lowType) } // check overriding (abstract type --> abstract type or abstract type --> concrete type member (a type alias)) // making an abstract type member concrete is like passing a type argument - typer.infer.checkKindBounds(high :: Nil, lowType :: Nil, rootType, low.owner) match { // (1.7.2) + typer.infer.checkKindBounds(other :: Nil, lowType :: Nil, rootType, memberClass) match { // (1.7.2) case Nil => case kindErrors => reporter.error(member.pos, - "The kind of "+member.keyString+" "+member.varianceString + member.nameString+ + "The kind of " + member.keyString+" " + member.varianceString + member.nameString+ " does not conform to the expected kind of " + other.defString + other.locationString + "." + kindErrors.toList.mkString("\n", ", ", "")) } // check a type alias's RHS corresponds to its declaration // this overlaps somewhat with validateVariance - if (low.isAliasType) { - typer.infer.checkKindBounds(low :: Nil, lowType.normalize :: Nil, rootType, low.owner) match { + if (member.isAliasType) { + typer.infer.checkKindBounds(member :: Nil, lowType.normalize :: Nil, rootType, memberClass) match { case Nil => case kindErrors => reporter.error(member.pos, - "The kind of the right-hand side "+lowType.normalize+" of "+low.keyString+" "+ - low.varianceString + low.nameString+ " does not conform to its expected kind."+ + "The kind of the right-hand side "+lowType.normalize+" of " + member.keyString+" "+ + member.varianceString + member.nameString+ " does not conform to its expected kind."+ kindErrors.toList.mkString("\n", ", ", "")) } } - else if (low.isAbstractType && lowType.isVolatile && !highInfo.upperBound.isVolatile) - overrideError("is a volatile type; cannot override a type with non-volatile upper bound") + else if (member.isAbstractType && lowType.isVolatile && !highInfo.upperBound.isVolatile) + overrideErrorWithMemberInfo("volatile type member cannot override type member with non-volatile upper bound:") } - def checkOverrideTerm() { + def checkOverrideTerm(): Unit = { + member.cookJavaRawInfo() // #11584, #11840 other.cookJavaRawInfo() // #2454 - if (!overridesTypeInPrefix(lowType, highType, rootType, low.isModuleOrModuleClass && high.isModuleOrModuleClass)) { // 8 + if (!overridesTypeInPrefix(lowType, highType, rootType)) { // 8 overrideTypeError() explainTypes(lowType, highType) } - if (low.isStable && !highType.isVolatile) { + if (member.isStable && !highType.isVolatile) { if (lowType.isVolatile) - overrideError("has a volatile type; cannot override a member with non-volatile type") + overrideErrorWithMemberInfo("member with volatile type cannot override member with non-volatile type:") else lowType.normalize.resultType match { - case rt: RefinedType if !(rt =:= highType) && !(checkedCombinations contains rt.parents) => + case rt: RefinedType if !(rt =:= highType) && notYetCheckedOrAdd(rt, pair.base) => // might mask some inconsistencies -- check overrides - checkedCombinations += rt.parents val tsym = rt.typeSymbol if (tsym.pos == NoPosition) tsym setPos member.pos checkAllOverrides(tsym, typesOnly = true) @@ -550,14 +627,14 @@ abstract class RefChecks extends Transform { } } } - def checkOverrideTypes() { - if (high.isAliasType) checkOverrideAlias() - else if (high.isAbstractType) checkOverrideAbstract() - else if (high.isTerm) checkOverrideTerm() + def checkOverrideTypes(): Unit = { + if (other.isAliasType) checkOverrideAlias() + else if (other.isAbstractType) checkOverrideAbstractType() + else if (other.isTerm) checkOverrideTerm() } - def checkOverrideDeprecated() { - if (other.hasDeprecatedOverridingAnnotation && !(member.hasDeprecatedOverridingAnnotation || member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation))) { + def checkOverrideDeprecated(): Unit = { + if (other.hasDeprecatedOverridingAnnotation && !(member.hasDeprecatedOverridingAnnotation || member.ownerChain.exists(_.isDeprecated))) { val version = other.deprecatedOverridingVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse "" @@ -567,7 +644,7 @@ abstract class RefChecks extends Transform { } } - val opc = new overridingPairs.Cursor(clazz) + val opc = new overridingPairs.PairsCursor(clazz) while (opc.hasNext) { if (!opc.high.isClass) checkOverride(opc.currentPair) @@ -578,21 +655,18 @@ abstract class RefChecks extends Transform { // Verifying a concrete class has nothing unimplemented. if (clazz.isConcreteClass && !typesOnly) { - val abstractErrors = new ListBuffer[String] - def abstractErrorMessage = - // a little formatting polish - if (abstractErrors.size <= 2) abstractErrors mkString " " - else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "") - - def abstractClassError(mustBeMixin: Boolean, msg: String) { - def prelude = ( - if (clazz.isAnonymousClass || clazz.isModuleClass) "object creation impossible" - else if (mustBeMixin) clazz + " needs to be a mixin" - else clazz + " needs to be abstract" - ) + ", since" - - if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg) - else abstractErrors += msg + val abstractErrors = ListBuffer.empty[String] + def abstractErrorMessage = abstractErrors.mkString("\n") + + def abstractClassError(msg: String, supplement: String = "", mustBeMixin: Boolean = false): Unit = { + def prelude = + if (clazz.isAnonymousClass || clazz.isModuleClass) "object creation impossible." + else if (mustBeMixin) s"$clazz needs to be a mixin." + else s"$clazz needs to be abstract." + + if (abstractErrors.isEmpty) abstractErrors += prelude + abstractErrors += msg + if (!supplement.isEmpty) abstractErrors += supplement } def javaErasedOverridingSym(sym: Symbol): Symbol = @@ -607,147 +681,152 @@ abstract class RefChecks extends Transform { exitingErasure(tp1 matches tp2) }) - def ignoreDeferred(member: Symbol) = ( + def ignoreDeferred(member: Symbol) = (member.isAbstractType && !member.isFBounded) || ( // the test requires exitingErasure so shouldn't be // done if the compiler has no erasure phase available member.isJavaDefined && (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol) ) - ) // 2. Check that only abstract classes have deferred members def checkNoAbstractMembers(): Unit = { - // Avoid spurious duplicates: first gather any missing members. - def memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) - val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m)) - // Group missing members by the name of the underlying symbol, - // to consolidate getters and setters. - val grouped = missing groupBy (_.name.getterName) - val missingMethods = grouped.toList flatMap { - case (name, syms) => - if (syms exists (_.isSetter)) syms filterNot (_.isGetter) - else syms - } - - def stubImplementations: List[String] = { - // Grouping missing methods by the declaring class - val regrouped = missingMethods.groupBy(_.owner).toList - def membersStrings(members: List[Symbol]) = { - members foreach fullyInitializeSymbol - members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???") - } - - if (regrouped.tail.isEmpty) - membersStrings(regrouped.head._2) - else (regrouped.sortBy("" + _._1.name) flatMap { - case (owner, members) => - ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ "" - }).init - } - - // If there are numerous missing methods, we presume they are aware of it and - // give them a nicely formatted set of method signatures for implementing. - if (missingMethods.size > 1) { - abstractClassError(false, "it has " + missingMethods.size + " unimplemented members.") - val preface = - """|/** As seen from %s, the missing signatures are as follows. - | * For convenience, these are usable as stub implementations. - | */ - |""".stripMargin.format(clazz) - abstractErrors += stubImplementations.map(" " + _ + "\n").mkString(preface, "", "") - return - } - - for (member <- missing) { - def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg) + val NoError = null.asInstanceOf[String] + val EmptyDiagnostic = "" + def diagnose(member: Symbol, accessors: List[Symbol], nonPrivateMembers: Scope, fastDiagnostics: Boolean): String = { val underlying = analyzer.underlyingSymbol(member) // TODO: don't use this method // Give a specific error message for abstract vars based on why it fails: // It could be unimplemented, have only one accessor, or be uninitialized. - val groupedAccessors = grouped.getOrElse(member.name.getterName, Nil) - val isMultiple = groupedAccessors.size > 1 - - if (groupedAccessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) { - // If both getter and setter are missing, squelch the setter error. - if (member.isSetter && isMultiple) () - else undefined( - if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" - else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" - else "\n(Note that variables need to be initialized to be defined)" - ) + val isMultiple = accessors.size > 1 + + if (accessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) { + if (member.isSetter && isMultiple) NoError // If both getter and setter are missing, squelch the setter error. + else if (member.isSetter) "an abstract var requires a setter in addition to the getter" + else if (member.isGetter && !isMultiple) "an abstract var requires a getter in addition to the setter" + else "variables need to be initialized to be defined" } - else if (underlying.isMethod) { - // If there is a concrete method whose name matches the unimplemented - // abstract method, and a cursory examination of the difference reveals - // something obvious to us, let's make it more obvious to them. + else if (!fastDiagnostics && underlying.isMethod) { + // Highlight any member that nearly matches: same name and arity, + // but differs in one param or param list. val abstractParamLists = underlying.paramLists - val matchingName = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE) - val matchingArity = matchingName filter { m => + val matchingArity = nonPrivateMembers.reverseIterator.filter { m => !m.isDeferred && - (m.name == underlying.name) && + m.name == underlying.name && sameLength(m.paramLists, abstractParamLists) && sumSize(m.paramLists, 0) == sumSize(abstractParamLists, 0) && - sameLength(m.tpe.typeParams, underlying.tpe.typeParams) - } - + sameLength(m.tpe.typeParams, underlying.tpe.typeParams) && + !(m.isJavaDefined && m.hasFlag(JAVA_DEFAULTMETHOD)) + }.toList matchingArity match { // So far so good: only one candidate method - case Scope(concrete) => - val aplIter = abstractParamLists .iterator.flatten - val cplIter = concrete.paramLists.iterator.flatten + case concrete :: Nil => + val concreteParamLists = concrete.paramLists + val aplIter = abstractParamLists.iterator.flatten + val cplIter = concreteParamLists.iterator.flatten def mismatch(apl: Symbol, cpl: Symbol): Option[(Type, Type)] = - if (apl.tpe =:= cpl.tpe) None else Some(apl.tpe -> cpl.tpe) - - mapFilter2(aplIter, cplIter)(mismatch).take(2).toList match { + if (apl.tpe.asSeenFrom(clazz.tpe, underlying.owner) =:= cpl.tpe) None else Some(apl.tpe -> cpl.tpe) + def missingImplicit = abstractParamLists.zip(concreteParamLists).exists { + case (abss, konkrete) => abss.headOption.exists(_.isImplicit) && !konkrete.headOption.exists(_.isImplicit) + } + val mismatches = mapFilter2(aplIter, cplIter)(mismatch).take(2).toList + mismatches match { // Only one mismatched parameter: say something useful. case (pa, pc) :: Nil => val abstractSym = pa.typeSymbol val concreteSym = pc.typeSymbol - def subclassMsg(c1: Symbol, c2: Symbol) = ( - ": %s is a subclass of %s, but method parameter types must match exactly.".format( - c1.fullLocationString, c2.fullLocationString) - ) - val addendum = ( + def subclassMsg(c1: Symbol, c2: Symbol) = + s": ${c1.fullLocationString} is a subclass of ${c2.fullLocationString}, but method parameter types must match exactly." + def wrongSig = { + val m = concrete + fullyInitializeSymbol(m) + m.defStringSeenAs(clazz.tpe_*.memberType(m)) + } + val addendum = if (abstractSym == concreteSym) { - // TODO: what is the optimal way to test for a raw type at this point? - // Compilation has already failed so we shouldn't have to worry overmuch - // about forcing types. if (underlying.isJavaDefined && pa.typeArgs.isEmpty && abstractSym.typeParams.nonEmpty) s". To implement this raw type, use ${rawToExistential(pa)}" else if (pa.prefix =:= pc.prefix) ": their type parameters differ" else - ": their prefixes (i.e. enclosing instances) differ" + ": their prefixes (i.e., enclosing instances) differ" } - else if (abstractSym isSubClass concreteSym) - subclassMsg(abstractSym, concreteSym) - else if (concreteSym isSubClass abstractSym) - subclassMsg(concreteSym, abstractSym) - else "" - ) - - undefined("\n(Note that %s does not match %s%s)".format(pa, pc, addendum)) - case _ => - undefined("") + else if (abstractSym.isSubClass(concreteSym)) subclassMsg(abstractSym, concreteSym) + else if (concreteSym.isSubClass(abstractSym)) subclassMsg(concreteSym, abstractSym) + else s" in `$wrongSig`" + s"$pa does not match $pc$addendum" + case Nil if missingImplicit => "overriding member must declare implicit parameter list" // other overriding gotchas + case _ => EmptyDiagnostic } - case _ => - undefined("") + case _ => EmptyDiagnostic } } - else undefined("") + else EmptyDiagnostic } - + def emitErrors(missing: List[Symbol], nonPrivateMembers: Scope): Unit = { + val fastDiagnostics = missing.lengthCompare(100) > 0 + // Group missing members by the name of the underlying symbol, to consolidate getters and setters. + val byName = missing.groupBy(_.name.getterName) + // There may be 1 or more missing members declared in 1 or more parents. + // If a single parent, the message names it. Otherwise, missing members are grouped by declaring class. + val byOwner = missing.groupBy(_.owner).toList + val announceOwner = byOwner.size > 1 + def membersStrings(members: List[Symbol]) = { + members.sortBy(_.name).flatMap { m => + val accessors = byName.getOrElse(m.name.getterName, Nil) + val diagnostic = diagnose(m, accessors, nonPrivateMembers, fastDiagnostics) + if (diagnostic == NoError) Nil + else { + val s0a = infoString0(m, showLocation = false) + fullyInitializeSymbol(m) + val s0b = m.defString + val s1 = m.defStringSeenAs(clazz.tpe_*.memberType(m)) + val implMsg = if (s1 != s0a) s"implements `$s0a`" else if (s1 != s0b) s"implements `$s0b`" else "" + val spacer = if (diagnostic.nonEmpty && implMsg.nonEmpty) "; " else "" + val comment = if (diagnostic.nonEmpty || implMsg.nonEmpty) s" // $implMsg$spacer$diagnostic" else "" + s"$s1 = ???$comment" :: Nil + } + } + } + var count = 0 + def isMulti = count > 1 + def helpfulListing = + byOwner.sortBy(_._1.name.toString).flatMap { + case (owner, members) => + val ms = membersStrings(members) :+ "" + count += ms.size - 1 + if (announceOwner) s"// Members declared in ${owner.fullName}" :: ms else ms + }.init.map(s => s" $s\n").mkString + val stubs = helpfulListing + def singleParent = if (byOwner.size == 1 && byOwner.head._1 != clazz) s" member${if (isMulti) "s" else ""} of ${byOwner.head._1}" else "" + val line0 = + if (isMulti) s"Missing implementations for ${count}${val p = singleParent ; if (p.isEmpty) " members" else p}." + else s"Missing implementation${val p = singleParent ; if (p.isEmpty) p else s" for$p"}:" + abstractClassError(line0, supplement = stubs) + } + def filtered[A](it: Iterator[A])(p: A => Boolean)(q: A => Boolean): (List[A], List[A]) = { + var ps, qs: List[A] = Nil + while (it.hasNext) { + val a = it.next() + if (p(a)) ps ::= a + else if (q(a)) qs ::= a + } + (ps, qs) + } + val nonPrivateMembers = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) + // Avoid extra allocations with reverseIterator. Filter for abstract members of interest, and bad abstract override. + val (missing, abstractIncomplete): (List[Symbol], List[Symbol]) = + filtered(nonPrivateMembers.reverseIterator)(m => m.isDeferred & !ignoreDeferred(m))(m => m.isAbstractOverride && m.isIncompleteIn(clazz)) + if (missing.nonEmpty) emitErrors(missing, nonPrivateMembers) // Check the remainder for invalid absoverride. - for (member <- rest ; if (member.isAbstractOverride && member.isIncompleteIn(clazz))) { - val other = member.superSymbolIn(clazz) - val explanation = - if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other) - else ", but no concrete implementation could be found in a base class" - - abstractClassError(true, infoString(member) + " is marked `abstract' and `override'" + explanation) + for (member <- abstractIncomplete) { + val explanation = member.superSymbolIn(clazz) match { + case NoSymbol => ", but no concrete implementation could be found in a base class" + case other => " and overrides incomplete superclass member\n" + infoString(other) + } + abstractClassError(s"${infoString(member)} is marked `abstract` and `override`$explanation", mustBeMixin = true) } - } + } // end checkNoAbstractMembers // 3. Check that concrete classes do not have deferred definitions // that are not implemented in a subclass. @@ -757,14 +836,13 @@ abstract class RefChecks extends Transform { // class D extends C { def m: Int } // // (3) is violated but not (2). - def checkNoAbstractDecls(bc: Symbol) { + def checkNoAbstractDecls(bc: Symbol): Unit = { for (decl <- bc.info.decls) { if (decl.isDeferred && !ignoreDeferred(decl)) { val impl = decl.matchingSymbol(clazz.thisType, admit = VBRIDGE) - if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) { - abstractClassError(false, "there is a deferred declaration of "+infoString(decl)+ - " which is not implemented in a subclass"+analyzer.abstractVarMessage(decl)) - } + if (impl == NoSymbol || decl.owner.isSubClass(impl.owner)) + abstractClassError(s"No implementation found in a subclass for deferred declaration\n" + + s"${infoString(decl)}${analyzer.abstractVarMessage(decl)}") } } if (bc.superClass hasFlag ABSTRACT) @@ -778,7 +856,7 @@ abstract class RefChecks extends Transform { if (abstractErrors.nonEmpty) reporter.error(clazz.pos, abstractErrorMessage) } - else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) { + else if (clazz.isTrait && !clazz.isSubClass(AnyValClass)) { // For non-AnyVal classes, prevent abstract methods in interfaces that override // final members in Object; see #4431 for (decl <- clazz.info.decls) { @@ -800,10 +878,8 @@ abstract class RefChecks extends Transform { lazy val varargsType = toJavaRepeatedParam(member.tpe) def isSignatureMatch(sym: Symbol) = !sym.isTerm || { - val symtpe = clazz.thisType memberType sym - def matches(tp: Type) = tp matches symtpe - - matches(member.tpe) || (isVarargs && matches(varargsType)) + val symtpe = clazz.thisType.memberType(sym) + member.tpe.matches(symtpe) || (isVarargs && varargsType.matches(symtpe)) } /* The rules for accessing members which have an access boundary are more * restrictive in java than scala. Since java has no concept of package nesting, @@ -828,15 +904,16 @@ abstract class RefChecks extends Transform { || sym.isProtected // marked protected in java, thus accessible to subclasses || sym.privateWithin == member.enclosingPackageClass // exact package match ) - def classDecls = inclazz.info.nonPrivateDecl(member.name) - def matchingSyms = classDecls filter (sym => isSignatureMatch(sym) && javaAccessCheck(sym)) + def classDecl = inclazz.info.nonPrivateDecl(member.name) + .orElse(inclazz.info.nonPrivateDecl(member.unexpandedName)) + def matchingSyms = classDecl.filter(sym => isSignatureMatch(sym) && javaAccessCheck(sym)) (inclazz != clazz) && (matchingSyms != NoSymbol) } // 4. Check that every defined member with an `override` modifier overrides some other member. for (member <- clazz.info.decls) - if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) { + if (member.isAnyOverride && !clazz.thisType.baseClasses.exists(hasMatchingSym(_, member))) { // for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal) @@ -850,7 +927,21 @@ abstract class RefChecks extends Transform { } member resetFlag (OVERRIDE | ABSOVERRIDE) // Any Override } - } + + // 5. Check that the nested class do not shadow other nested classes from outer class's parent + def checkNestedClassShadow(): Unit = + if (clazz.isNestedClass && !clazz.isModuleClass) { + val overridden = clazz.owner.ancestors + .map(a => clazz.matchingSymbol(a, clazz.owner.thisType)) + .filter(c => c.exists && c.isClass) + overridden foreach { sym2 => + def msg(what: String) = s"shadowing a nested class of a parent is $what but $clazz shadows $sym2 defined in ${sym2.owner}; rename the class to something else" + if (currentRun.isScala3) runReporting.warning(clazz.pos, msg("deprecated"), WarningCategory.Scala3Migration, clazz) + else runReporting.deprecationWarning(clazz.pos, clazz, currentOwner, msg("deprecated"), "2.13.2") + } + } + checkNestedClassShadow() + } // end checkAllOverrides // Basetype Checking -------------------------------------------------------- @@ -861,30 +952,28 @@ abstract class RefChecks extends Transform { * * */ - private def validateBaseTypes(clazz: Symbol) { + private def validateBaseTypes(clazz: Symbol): Unit = { val seenParents = mutable.HashSet[Type]() - val seenTypes = new Array[List[Type]](clazz.info.baseTypeSeq.length) - for (i <- 0 until seenTypes.length) - seenTypes(i) = Nil + val seenTypes = Array.fill[List[Type]](clazz.info.baseTypeSeq.length)(Nil) + val warnCloneable = settings.warnCloneableObject && clazz.isModuleClass /* validate all base types of a class in reverse linear order. */ def register(tp: Type): Unit = { -// if (clazz.fullName.endsWith("Collection.Projection")) -// println("validate base type "+tp) val baseClass = tp.typeSymbol if (baseClass.isClass) { if (!baseClass.isTrait && !baseClass.isJavaDefined && !currentRun.compiles(baseClass) && !separatelyCompiledScalaSuperclass.contains(baseClass)) separatelyCompiledScalaSuperclass.update(baseClass, ()) val index = clazz.info.baseTypeIndex(baseClass) if (index >= 0) { - if (seenTypes(index) forall (tp1 => !(tp1 <:< tp))) - seenTypes(index) = - tp :: (seenTypes(index) filter (tp1 => !(tp <:< tp1))) + if (!seenTypes(index).exists(_ <:< tp)) + seenTypes(index) = tp :: seenTypes(index).filterNot(tp <:< _) } } - val remaining = tp.parents filterNot seenParents + if (warnCloneable && baseClass.eq(JavaCloneableClass)) + refchecksWarning(clazz.pos, s"$clazz should not extend Cloneable.", WarningCategory.LintCloneable) + val remaining = tp.parents.filterNot(seenParents) seenParents ++= remaining - remaining foreach register + remaining.foreach(register) } register(clazz.tpe) for (i <- 0 until seenTypes.length) { @@ -892,12 +981,12 @@ abstract class RefChecks extends Transform { seenTypes(i) match { case Nil => devWarning(s"base $baseClass not found in basetypes of $clazz. This might indicate incorrect caching of TypeRef#parents.") - case _ :: Nil => - ;// OK + case _ :: Nil => // OK case tp1 :: tp2 :: _ => - reporter.error(clazz.pos, "illegal inheritance;\n " + clazz + - " inherits different type instances of " + baseClass + - ":\n" + tp1 + " and " + tp2) + reporter.error(clazz.pos, + sm"""|illegal inheritance; + | $clazz inherits different type instances of $baseClass: + |$tp1 and $tp2""") explainTypes(tp1, tp2) explainTypes(tp2, tp1) } @@ -911,9 +1000,9 @@ abstract class RefChecks extends Transform { case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner) case _ => "type "+tp } - override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) { + override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance, tpe: Type): Unit = { reporter.error(base.pos, - s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base") + s"${sym.variance} $sym occurs in $required position in ${tpString(tpe)} of $base") } } @@ -929,15 +1018,15 @@ abstract class RefChecks extends Transform { private var currentLevel: LevelInfo = null private val symIndex = perRunCaches.newMap[Symbol, Int]() - private def pushLevel() { + private def pushLevel(): Unit = { currentLevel = new LevelInfo(currentLevel) } - private def popLevel() { + private def popLevel(): Unit = { currentLevel = currentLevel.outer } - private def enterSyms(stats: List[Tree]) { + private def enterSyms(stats: List[Tree]): Unit = { var index = -1 for (stat <- stats) { index = index + 1 @@ -951,7 +1040,7 @@ abstract class RefChecks extends Transform { } } - private def enterReference(pos: Position, sym: Symbol) { + private def enterReference(pos: Position, sym: Symbol): Unit = { if (sym.isLocalToBlock) { val e = currentLevel.scope.lookupEntry(sym.name) if ((e ne null) && sym == e.sym) { @@ -972,8 +1061,8 @@ abstract class RefChecks extends Transform { def apply(tp: Type) = mapOver(tp).normalize } - def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.warnOptionImplicit) (fn, args) match { - case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply => + def checkImplicitViewOptionApply(pos: Position, fun: Tree, argss: List[List[Tree]]): Unit = if (settings.warnOptionImplicit) argss match { + case List(List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply => refchecksWarning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.", WarningCategory.LintOptionImplicit) // scala/bug#6567 case _ => } @@ -988,7 +1077,7 @@ abstract class RefChecks extends Transform { * * NOTE: I'm really not convinced by the logic here. I also think this would work better after erasure. */ - private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree) = { + private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree): Unit = { def isReferenceOp = sym == Object_eq || sym == Object_ne def isNew(tree: Tree) = tree match { case Function(_, _) | Apply(Select(New(_), nme.CONSTRUCTOR), _) => true @@ -1004,14 +1093,18 @@ abstract class RefChecks extends Transform { def onTrees[T](f: List[Tree] => T) = f(List(qual, other)) def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual)) + // many parts of the implementation assume that `actual` and `receiver` are one `ClassSymbol` + // to support intersection types we'd need to work with lists of class symbols + if (onSyms(_.exists(_.isRefinementClass))) return + // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol` - def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(other.tpe.widen) + def typesString = s"${normalizeAll(qual.tpe.widen)} and ${normalizeAll(other.tpe.widen)}" // TODO: this should probably be used in more type comparisons in checkSensibleEquals def erasedClass(tp: Type) = erasure.javaErasure(tp).typeSymbol /* Symbols which limit the warnings we can issue since they may be value types */ - val couldBeAnything = Set[Symbol](ObjectClass, ComparableClass, JavaSerializableClass) + val couldBeAnything = Set[Symbol](ObjectClass, ComparableClass, SerializableClass) def isMaybeValue(sym: Symbol): Boolean = couldBeAnything(erasedClass(sym.tpe)) // Whether def equals(other: Any) has known behavior: it is the default @@ -1020,7 +1113,7 @@ abstract class RefChecks extends Transform { // equals. def isUsingWarnableEquals = { val m = receiver.info.member(nme.equals_) - ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m)) + (m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m) } def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_)) @@ -1045,7 +1138,7 @@ abstract class RefChecks extends Transform { def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s) // used to short-circuit unrelatedTypes check if both sides are special def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s) - val nullCount = onSyms(_ filter (_ == NullClass) size) + val nullCount = onSyms(_.filter(_ == NullClass).size) def isNonsenseValueClassCompare = ( !haveSubclassRelationship && isUsingDefaultScalaOp @@ -1053,6 +1146,17 @@ abstract class RefChecks extends Transform { && !isCaseEquals ) + def isCollection(s: Symbol) = s.isNonBottomSubClass(IterableClass) + lazy val SeqClass = rootMirror.getClassIfDefined("scala.collection.Seq") + lazy val SetClass = rootMirror.getClassIfDefined("scala.collection.Set") + lazy val MapClass = rootMirror.getClassIfDefined("scala.collection.Map") + def collectionBase(s: Symbol) = { + if (s.isNonBottomSubClass(SeqClass)) SeqClass + else if (s.isNonBottomSubClass(SetClass)) SetClass + else if (s.isNonBottomSubClass(MapClass)) MapClass + else NoSymbol + } + def isEffectivelyFinalDeep(sym: Symbol): Boolean = ( sym.isEffectivelyFinal // If a parent of an intersection is final, the resulting type must effectively be final. @@ -1107,14 +1211,28 @@ abstract class RefChecks extends Transform { if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc" nonSensiblyNeq() } - else if (isWarnable && !isCaseEquals) { + else if (receiver == StringClass) { + if (!haveSubclassRelationship) + nonSensiblyNeq() + } + else if (isCollection(receiver)) { + val rBase = collectionBase(receiver) + val aBase = collectionBase(actual) + if (rBase != NoSymbol) { + if (aBase != NoSymbol && rBase != aBase) + nonSensiblyNeq() + else if (!isCollection(actual) && !haveSubclassRelationship) + nonSensiblyNeq() + } + } + else if (isWarnable && !isCaseEquals) { // case equals is handled below if (isNew(qual)) // new X == y nonSensiblyNew() else if (isNew(other) && (isEffectivelyFinalDeep(receiver) || isReferenceOp)) // object X ; X == new Y nonSensiblyNew() else if (isEffectivelyFinalDeep(actual) && isEffectivelyFinalDeep(receiver) && !haveSubclassRelationship) { // object X, Y; X == Y if (isEitherNullable) - nonSensible("non-null ", false) + nonSensible("non-null ", alwaysEqual = false) else nonSensiblyNeq() } @@ -1134,7 +1252,7 @@ abstract class RefChecks extends Transform { // better to have lubbed and lost // We erase the lub because the erased type is closer to what happens at run time. // Also, the lub of `S` and `String` is, weirdly, the refined type `Serializable{}` (for `class S extends Serializable`), - // which means we can't just take its type symbol and look it up in our isMaybeValue Set. Erasure restores sanity. + // which means we can't just take its type symbol and look it up in our isMaybeValue Set. val commonRuntimeClass = erasedClass(global.lub(List(actual.tpe, receiver.tpe))) if (commonRuntimeClass == ObjectClass) unrelatedTypes() @@ -1146,7 +1264,7 @@ abstract class RefChecks extends Transform { if (isCaseEquals) { def thisCase = receiver.info.member(nme.equals_).owner actual.info.baseClasses.find(_.isCase) match { - case Some(p) if p != thisCase => nonSensible("case class ", false) + case Some(p) if p != thisCase => nonSensible("case class ", alwaysEqual = false) case None => // stronger message on (Some(1) == None) //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq() @@ -1190,20 +1308,22 @@ abstract class RefChecks extends Transform { } /** Sensibility check examines flavors of equals. */ - def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match { - case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) && (!currentOwner.isSynthetic || currentOwner.isAnonymousFunction) => - checkSensibleEquals(pos, qual, name, fn.symbol, args.head) - case Select(qual, name@nme.equals_) if settings.isScala213 && args.length == 1 && (!currentOwner.isSynthetic || currentOwner.isAnonymousFunction) => - checkSensibleAnyEquals(pos, qual, name, fn.symbol, args.head) + def checkSensible(pos: Position, fn: Tree, argss: List[List[Tree]]) = (fn, argss) match { + case (Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)), List(List(arg))) + if isObjectOrAnyComparisonMethod(fn.symbol) && (!currentOwner.isSynthetic || currentOwner.isAnonymousFunction) => + checkSensibleEquals(pos, qual, name, fn.symbol, arg) + case (Select(qual, name @ nme.equals_), List(List(arg))) + if !currentOwner.isSynthetic || currentOwner.isAnonymousFunction => + checkSensibleAnyEquals(pos, qual, name, fn.symbol, arg) case _ => } // scala/bug#6276 warn for trivial recursion, such as `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think. // TODO: Move to abide rule. Also, this does not check that the def is final or not overridden, for example def checkInfiniteLoop(sym: Symbol, rhs: Tree): Unit = - if (!sym.isValueParameter && sym.paramss.isEmpty) { + if (!sym.isValueParameter && sym.paramss.forall(_.isEmpty)) { rhs match { - case t@(Ident(_) | Select(This(_), _)) if t hasSymbolWhich (_.accessedOrSelf == sym) => + case Ident(_) | Select(This(_), _) | Apply(Select(This(_), _), _) if rhs hasSymbolWhich (_.accessedOrSelf == sym) => refchecksWarning(rhs.pos, s"${sym.fullLocationString} does nothing other than call itself recursively", WarningCategory.Other) case _ => } @@ -1211,20 +1331,6 @@ abstract class RefChecks extends Transform { // Transformation ------------------------------------------------------------ - /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */ - def toConstructor(pos: Position, tpe: Type): Tree = { - val rtpe = tpe.finalResultType - assert(rtpe.typeSymbol hasFlag CASE, tpe) - val tree = localTyper.typedOperator { - atPos(pos) { - Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor) - } - } - checkUndesiredProperties(rtpe.typeSymbol, tree.pos) - checkUndesiredProperties(rtpe.typeSymbol.primaryConstructor, tree.pos) - tree - } - override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { pushLevel() try { @@ -1238,14 +1344,18 @@ abstract class RefChecks extends Transform { finally popLevel() } + private def showCurrentRef: String = { + val refsym = currentLevel.refsym + s"$refsym defined on line ${refsym.pos.line}" + } + def transformStat(tree: Tree, index: Int): Tree = tree match { case t if treeInfo.isSelfConstrCall(t) => assert(index == 0, index) try transform(tree) finally if (currentLevel.maxindex > 0) { - // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see scala/bug#4717 - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") + // An implementation restriction to avoid VerifyErrors and lazy vals mishaps; see scala/bug#4717 + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef not allowed from self constructor invocation") } case ValDef(_, _, _, _) => val tree1 = transform(tree) // important to do before forward reference check @@ -1253,8 +1363,7 @@ abstract class RefChecks extends Transform { else { val sym = tree.symbol if (sym.isLocalToBlock && index <= currentLevel.maxindex) { - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference extends over definition of " + sym) + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef extends over definition of $sym") } tree1 } @@ -1269,7 +1378,7 @@ abstract class RefChecks extends Transform { catch { case ex: TypeError => reporter.error(tree0.pos, ex.getMessage()) - if (settings.explaintypes) { + if (settings.explaintypes.value) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds) foreach2(argtps, bounds)((targ, bound) => explainTypes(bound.lo, targ)) foreach2(argtps, bounds)((targ, bound) => explainTypes(targ, bound.hi)) @@ -1281,7 +1390,7 @@ abstract class RefChecks extends Transform { clazz == seltpe.typeSymbol && clazz.isCaseClass && (args corresponds clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable) - case Typed(pat, tpt) => + case Typed(_, tpt) => seltpe <:< tpt.tpe case Ident(tpnme.WILDCARD) => true @@ -1296,11 +1405,13 @@ abstract class RefChecks extends Transform { // I assume that's a consequence of some code trying to avoid noise by suppressing // warnings after the first, but I think it'd be better if we didn't have to // arbitrarily choose one as more important than the other. - private def checkUndesiredProperties(sym: Symbol, pos: Position) { - // If symbol is deprecated, and the point of reference is not enclosed - // in either a deprecated member or a scala bridge method, issue a warning. - // TODO: x.hasBridgeAnnotation doesn't seem to be needed here... - if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) + private def checkUndesiredProperties(sym: Symbol, pos: Position): Unit = { + // Issue a warning if symbol is deprecated, unless the point of reference is enclosed by a deprecated member, + // or has a deprecated companion. + if (sym.isDeprecated && + // synthetic calls to deprecated case class constructor + !(sym.isConstructor && sym.owner.isCaseClass && currentOwner.isSynthetic) && + !currentOwner.ownersIterator.exists(_.isDeprecated)) runReporting.deprecationWarning(pos, sym, currentOwner) // Similar to deprecation: check if the symbol is marked with @migration @@ -1317,9 +1428,14 @@ abstract class RefChecks extends Transform { if (changed) refchecksWarning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}", WarningCategory.OtherMigration) } + if (sym.isExperimental && !currentOwner.ownerChain.exists(x => x.isExperimental)) { + val msg = + s"${sym.fullLocationString} is marked @experimental and therefore its enclosing scope must be experimental." + reporter.error(pos, msg) + } // See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly. // async/await is expanded after erasure - if (sym.isCompileTimeOnly && !currentOwner.ownerChain.exists(x => x.isCompileTimeOnly)) { + if (sym.isCompileTimeOnly && !inAnnotation && !currentOwner.ownerChain.exists(x => x.isCompileTimeOnly)) { if (!async.deferCompileTimeOnlyError(sym)) { def defaultMsg = sm"""Reference to ${sym.fullLocationString} should not have survived past type checking, @@ -1346,26 +1462,21 @@ abstract class RefChecks extends Transform { && !otherSym.isProtected && !otherSym.isTypeParameterOrSkolem && !otherSym.isExistentiallyBound - && (otherSym isLessAccessibleThan memberSym) - && (otherSym isLessAccessibleThan memberSym.enclClass) + && memberSym.ownersIterator.forall(otherSym.isLessAccessibleThan(_)) ) private def lessAccessibleSymsInType(other: Type, memberSym: Symbol): List[Symbol] = { - val res: ListBuffer[Symbol] = ListBuffer.empty[Symbol] - def loop(tp: Type): Unit = { - if (lessAccessible(tp.typeSymbol, memberSym)) - res += tp.typeSymbol - tp match { + val extras = other match { + case TypeRef(pre, _, args) => // checking the prefix here gives us spurious errors on e.g. a private[process] // object which contains a type alias, which normalizes to a visible type. - case TypeRef(pre, _, args) => - args foreach { arg => if (arg ne NoPrefix) loop(arg) } - case _ => () - } + args.filterNot(_ eq NoPrefix).flatMap(lessAccessibleSymsInType(_, memberSym)) + case _ => + Nil } - loop(other) - res.toList + if (lessAccessible(other.typeSymbol, memberSym)) other.typeSymbol :: extras + else extras } - private def warnLessAccessible(otherSym: Symbol, memberSym: Symbol) { + private def warnLessAccessible(otherSym: Symbol, memberSym: Symbol): Unit = { val comparison = accessFlagsToString(memberSym) match { case "" => "" case acc => " is " + acc + " but" @@ -1384,17 +1495,17 @@ abstract class RefChecks extends Transform { /** Warn about situations where a method signature will include a type which * has more restrictive access than the method itself. */ - private def checkAccessibilityOfReferencedTypes(tree: Tree) { + private def checkAccessibilityOfReferencedTypes(tree: Tree): Unit = { val member = tree.symbol - def checkAccessibilityOfType(tpe: Type) { + def checkAccessibilityOfType(tpe: Type): Unit = { val inaccessible = lessAccessibleSymsInType(tpe, member) // if the unnormalized type is accessible, that's good enough if (inaccessible.isEmpty) () // or if the normalized type is, that's good too else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) () // otherwise warn about the inaccessible syms in the unnormalized type - else inaccessible foreach (sym => warnLessAccessible(sym, member)) + else inaccessible.foreach(warnLessAccessible(_, member)) } // types of the value parameters @@ -1403,21 +1514,11 @@ abstract class RefChecks extends Transform { member.typeParams.foreach(tp => checkAccessibilityOfType(tp.info.upperBound.widen)) } - private def checkByNameRightAssociativeDef(tree: DefDef) { - tree match { - case DefDef(_, name, _, params :: _, _, _) => - if (settings.warnByNameRightAssociative && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol))) - refchecksWarning(tree.pos, - "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see scala/bug#1980.", WarningCategory.Other) - case _ => - } - } - /** Check that a deprecated val or def does not override a * concrete, non-deprecated method. If it does, then * deprecation is meaningless. */ - private def checkDeprecatedOvers(tree: Tree) { + private def checkDeprecatedOvers(tree: Tree): Unit = { val symbol = tree.symbol if (symbol.isDeprecated) { val concrOvers = @@ -1431,30 +1532,63 @@ abstract class RefChecks extends Transform { s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}", "") } } - private def isRepeatedParamArg(tree: Tree) = currentApplication match { - case Apply(fn, args) => - ( args.nonEmpty - && (args.last eq tree) - && (fn.tpe.params.length == args.length) - && isRepeatedParamType(fn.tpe.params.last.tpe) - ) - case _ => - false + private def checkRepeatedParamArg(tree: Tree): Unit = { + val bailure = "such annotations are only allowed in arguments to *-parameters" + val err = currentApplication match { + case Apply(fn, args) => + val ok = ( args.nonEmpty + && (args.last eq tree) + && (fn.tpe.params.length == args.length) + && isRepeatedParamType(fn.tpe.params.last.tpe) + ) + if (ok) null + else if (!args.exists(tree.eq)) bailure + else { + val i = args.indexWhere(tree.eq) + val isLast = i == args.length - 1 + val formal = if (i >= fn.tpe.params.length - 1) fn.tpe.params.last.tpe else fn.tpe.params(i).tpe + val isRepeated = isRepeatedParamType(formal) + val lastly = if (!isLast) ";\nsequence argument must be the last argument" else "" + val solely = if (fn.tpe.params.length == 1) "single" else "corresponding" + if (isRepeated) + s"it is not the only argument to be passed to the $solely repeated parameter $formal$lastly" + else + s"the $solely parameter has type $formal which is not a repeated parameter type$lastly" + } + case _ => bailure + } + if (err != null) + reporter.error(tree.pos, s"Sequence argument type annotation `: _*` cannot be used here:\n$err") } private object RefCheckTypeMap extends TypeMap { - object ExistentialToWildcard extends TypeMap { - override def apply(tpe: Type): Type = - if (tpe.typeSymbol.isExistential) WildcardType else mapOver(tpe) + object UnboundExistential extends TypeMap { + private[this] val bound = mutable.Set.empty[Symbol] + + def toWildcardIn(tpe: Type): Type = + try apply(tpe) finally bound.clear() + + override def apply(tpe: Type): Type = tpe match { + case ExistentialType(quantified, _) => + bound ++= quantified + tpe.mapOver(this) + case tpe => + val sym = tpe.typeSymbol + if (sym.isExistential && !bound(sym)) WildcardType + else tpe.mapOver(this) + } } + private[this] var inPattern = false private[this] var skipBounds = false private[this] var tree: Tree = EmptyTree - def check(tpe: Type, tree: Tree): Type = { + def check(tpe: Type, tree: Tree, inPattern: Boolean = false): Type = { + this.inPattern = inPattern this.tree = tree try apply(tpe) finally { - skipBounds = false + this.inPattern = false + this.skipBounds = false this.tree = EmptyTree } } @@ -1467,40 +1601,57 @@ abstract class RefChecks extends Transform { // which might not conform to the constraints. val savedSkipBounds = skipBounds skipBounds = true - try mapOver(tpe).filterAnnotations(_.symbol != UncheckedBoundsClass) + try tpe.mapOver(this).filterAnnotations(_.symbol != UncheckedBoundsClass) finally skipBounds = savedSkipBounds case tpe: TypeRef => - checkTypeRef(ExistentialToWildcard(tpe)) - mapOver(tpe) + if (!inPattern) checkTypeRef(UnboundExistential.toWildcardIn(tpe)) + checkUndesired(tpe.sym) + tpe.mapOver(this) case tpe => - mapOver(tpe) + tpe.mapOver(this) } private def checkTypeRef(tpe: Type): Unit = tpe match { case TypeRef(pre, sym, args) => - tree match { - // scala/bug#7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAttrs - case tree: TypeTree if tree.original == null => - case tree => checkUndesiredProperties(sym, tree.pos) - } if (sym.isJavaDefined) sym.typeParams.foreach(_.cookJavaRawInfo()) if (!tpe.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) case _ => } + + private def checkUndesired(sym: Symbol): Unit = tree match { + // scala/bug#7783 don't warn about inferred types + // FIXME: reconcile this check with one in resetAttrs + case tree: TypeTree if tree.original == null => + case tree => checkUndesiredProperties(sym, tree.pos) + } } private def applyRefchecksToAnnotations(tree: Tree): Unit = { + def checkVarArgs(tp: Type, tree: Tree): Unit = tp match { + case TypeRef(_, VarargsClass, _) => + tree match { + case tt: TypeTree if tt.original == null => // same exception as in checkTypeRef + case _: DefDef => + case _ => reporter.error(tree.pos, s"Only methods can be marked @varargs") + } + case _ => + } def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => - RefCheckTypeMap.check(ann.tpe, tree) + checkVarArgs(ann.atp, tree) + RefCheckTypeMap.check(ann.atp, tree) + if (ann.original != null && ann.original.hasExistingSymbol) + checkUndesiredProperties(ann.original.symbol, tree.pos) } - val annotsBySymbol = new mutable.LinkedHashMap[Symbol, ListBuffer[AnnotationInfo]]() - annots foreach { annot => - val transformedAnnot = annot.transformArgs(transformTrees) + val transformedAnnots = { + val saved = inAnnotation + inAnnotation = true + try annots.map(_.transformArgs(transformTrees)) finally inAnnotation = saved + } + for (transformedAnnot <- transformedAnnots) { val buffer = annotsBySymbol.getOrElseUpdate(transformedAnnot.symbol, new ListBuffer) buffer += transformedAnnot } @@ -1509,9 +1660,10 @@ abstract class RefChecks extends Transform { // assumes non-empty `anns` def groupRepeatableAnnotations(sym: Symbol, anns: List[AnnotationInfo]): List[AnnotationInfo] = - if (!(sym.isJavaDefined && sym.isSubClass(ClassfileAnnotationClass))) anns else anns match { - case single :: Nil => anns - case multiple => + if (!sym.isJavaDefined) anns + else anns match { + case single @ _ :: Nil => single + case multiple => sym.getAnnotation(AnnotationRepeatableAttr) match { case Some(repeatable) => repeatable.assocs.collectFirst { @@ -1526,43 +1678,59 @@ abstract class RefChecks extends Transform { devWarning(s"@Repeatable $sym had no containing class") multiple } - case None => reporter.error(tree.pos, s"$sym may not appear multiple times on ${tree.symbol}") multiple } } - def checkIsElisible(sym: Symbol) = if (sym ne null) sym.elisionLevel.foreach { level => - if (!sym.isMethod || sym.isAccessor || sym.isLazy || sym.isDeferred) - reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") + def checkIsElidable(sym: Symbol): Unit = if (sym ne null) sym.elisionLevel.foreach { _ => + if (!sym.isMethod || sym.isAccessor || sym.isLazy || sym.isDeferred) { + val rest = if (sym.isDeferred) " The annotation affects only the annotated method, not overriding methods in subclasses." else "" + reporter.error(sym.pos, s"${sym.name}: Only concrete methods can be marked @elidable.$rest") + } } - if (currentRun.isScala213) checkIsElisible(tree.symbol) + checkIsElidable(tree.symbol) - tree match { - case m: MemberDef => - val sym = m.symbol - sym.setAnnotations(applyChecks(sym.annotations)) + def checkMember(sym: Symbol): Unit = { + sym.setAnnotations(applyChecks(sym.annotations)) + // validate implicitNotFoundMessage and implicitAmbiguousMessage + if (settings.lintImplicitNotFound) { def messageWarning(name: String)(warn: String) = refchecksWarning(tree.pos, s"Invalid $name message for ${sym}${sym.locationString}:\n$warn", WarningCategory.LintImplicitNotFound) - - // validate implicitNotFoundMessage and implicitAmbiguousMessage analyzer.ImplicitNotFoundMsg.check(sym) foreach messageWarning("implicitNotFound") analyzer.ImplicitAmbiguousMsg.check(sym) foreach messageWarning("implicitAmbiguous") + } + + if (settings.warnSerialization && sym.isClass && sym.hasAnnotation(SerialVersionUIDAttr)) { + def warn(what: String) = + refchecksWarning(tree.pos, s"@SerialVersionUID has no effect on $what", WarningCategory.LintSerial) + + if (sym.isTrait) warn("traits") + else if (!sym.isSerializable) warn("non-serializable classes") + } + if (!sym.isMethod && !sym.isConstructor) + checkNoThrows(sym.annotations) + } + def checkNoThrows(anns: List[AnnotationInfo]): Unit = + if (anns.exists(_.symbol == ThrowsClass)) + reporter.error(tree.pos, s"`@throws` only allowed for methods and constructors") + tree match { + case m: MemberDef => + checkMember(m.symbol) case tpt@TypeTree() => - if (tpt.original != null) { - tpt.original foreach { - case dc@TypeTreeWithDeferredRefCheck() => + if (tpt.original != null) + tpt.original.foreach { + case dc: TypeTreeWithDeferredRefCheck => applyRefchecksToAnnotations(dc.check()) // #2416 case _ => } - } - if (!inPattern) - tree.setType(tree.tpe map { + tree.setType(tree.tpe.map { case AnnotatedType(anns, ul) => + checkNoThrows(anns) AnnotatedType(applyChecks(anns), ul) case tp => tp }) @@ -1570,139 +1738,122 @@ abstract class RefChecks extends Transform { } } - private def isSimpleCaseApply(tree: Tree): Boolean = { - val sym = tree.symbol - def isClassTypeAccessible(tree: Tree): Boolean = tree match { - case TypeApply(fun, targs) => - isClassTypeAccessible(fun) - case Select(module, apply) => - ( // scala/bug#4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`; - // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`. - treeInfo.isQualifierSafeToElide(module) && - // scala/bug#5626 Classes in refinement types cannot be constructed with `new`. In this case, - // the companion class is actually not a ClassSymbol, but a reference to an abstract type. - module.symbol.companionClass.isClass - ) - } + private def isSimpleCaseApply(fun: Tree): Boolean = { + val sym = fun.symbol + def isClassTypeAccessible = { + val Select(module, _) = fun: @unchecked + // scala/bug#4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`; + // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`. + treeInfo.isQualifierSafeToElide(module) && + // scala/bug#5626 Classes in refinement types cannot be constructed with `new`. + !module.exists { case t @ Select(_, _) => t.symbol != null && t.symbol.isStructuralRefinementMember case _ => false } + } sym.name == nme.apply && - !(sym hasFlag STABLE) && // ??? - sym.isCase && - isClassTypeAccessible(tree) && - !tree.tpe.finalResultType.typeSymbol.primaryConstructor.isLessAccessibleThan(tree.symbol) + sym.isCase && // only synthetic case apply methods + isClassTypeAccessible && + !sym.tpe.finalResultType.typeSymbol.primaryConstructor.isLessAccessibleThan(sym) } - private def transformCaseApply(tree: Tree) = { - def loop(t: Tree): Unit = t match { - case Ident(_) => - checkUndesiredProperties(t.symbol, t.pos) - case Select(qual, _) => - checkUndesiredProperties(t.symbol, t.pos) - loop(qual) - case _ => - } - - tree foreach { - case i@Ident(_) => - enterReference(i.pos, i.symbol) // scala/bug#5390 need to `enterReference` for `a` in `a.B()` - case _ => + private def transformCaseApply(tpe: Type, pos: Position) = { + val rtpe = tpe.finalResultType + assert(rtpe.typeSymbol hasFlag CASE, tpe) + localTyper.typedOperator { + atPos(pos) { + Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor) + } } - loop(tree) - toConstructor(tree.pos, tree.tpe) } - private def transformApply(tree: Apply): Tree = tree match { - case Apply( - Select(qual, nme.withFilter), - List(Function( - List(ValDef(_, pname, tpt, _)), - Match(_, CaseDef(pat1, _, _) :: _)))) - if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) && + private def transformApplication(tree: Tree, fun: Tree, @unused targs: List[Tree], argss: List[List[Tree]]): Tree = { + (fun, argss) match { + case ( + Select(qual, nme.withFilter), + List(List(Function( + List(ValDef(_, pname, tpt, _)), + Match(_, CaseDef(pat1, _, _) :: _))))) + if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) && isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) => - - transform(qual) - case StringContextIntrinsic(treated, args) => - val argsIndexed = args.toVector - val concatArgs = ListBuffer[Tree]() - val numLits = treated.length - foreachWithIndex(treated.tail) { (lit, i) => - val treatedContents = lit.asInstanceOf[Literal].value.stringValue - val emptyLit = treatedContents.isEmpty - if (i < numLits - 1) { - concatArgs += argsIndexed(i) - if (!emptyLit) concatArgs += lit - } else if (!emptyLit) { - concatArgs += lit - } - } - def mkConcat(pos: Position, lhs: Tree, rhs: Tree): Tree = - atPos(pos)(gen.mkMethodCall(gen.mkAttributedSelect(lhs, definitions.String_+), rhs :: Nil)).setType(StringTpe) - - var result: Tree = treated.head - val chunkSize = 32 - if (concatArgs.lengthCompare(chunkSize) <= 0) { - concatArgs.foreach { t => - result = mkConcat(t.pos, result, t) - } - } else { - concatArgs.toList.grouped(chunkSize).foreach { - case group => - var chunkResult: Tree = Literal(Constant("")).setType(StringTpe) - group.foreach { t => - chunkResult = mkConcat(t.pos, chunkResult, t) - } - result = mkConcat(chunkResult.pos, result, chunkResult) + qual + case _ => + currentApplication = tree + // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability + // analyses in the pattern matcher + if (!inPattern) { + checkImplicitViewOptionApply(tree.pos, fun, argss) + checkSensible(tree.pos, fun, argss) // TODO: this should move to preEraseApply, as reasoning about runtime semantics makes more sense in the JVM type system + checkNamedBooleanArgs(fun, argss) } - } - - result match { - case ap: Apply => transformApply(ap) - case _ => result - } - case Apply(fn, args) => - // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability - // analyses in the pattern matcher - if (!inPattern) { - checkImplicitViewOptionApply(tree.pos, fn, args) - checkSensible(tree.pos, fn, args) // TODO: this should move to preEraseApply, as reasoning about runtime semantics makes more sense in the JVM type system - } - currentApplication = tree - tree + if (isSimpleCaseApply(fun)) { + transform(fun) + val callee = new treeInfo.Applied(tree).callee + val fun1 = transformCaseApply(callee.tpe, callee.pos) + def res(t: Tree): Tree = t match { + case Apply(f, args) => treeCopy.Apply(t, res(f), args) + case _ => fun1 + } + res(tree) + } else + tree + } } - private object StringContextIntrinsic { - def unapply(t: Apply): Option[(List[Tree], List[Tree])] = { - val sym = t.fun.symbol - // symbol check done first for performance - val rd = currentRun.runDefinitions - if (sym == rd.StringContext_s || sym == rd.StringContext_raw) { - t match { - case Apply(fn @ Select(Apply(qual1 @ Select(qual, _), lits), _), args) - if qual1.symbol == rd.StringContext_apply && - treeInfo.isQualifierSafeToElide(qual) && - lits.forall(lit => treeInfo.isLiteralString(lit)) && - lits.length == (args.length + 1) => - val isRaw = sym == rd.StringContext_raw - if (isRaw) Some((lits, args)) - else { - try { - val treated = lits.mapConserve { lit => - val stringVal = lit.asInstanceOf[Literal].value.stringValue - val k = Constant(StringContext.processEscapes(stringVal)) - treeCopy.Literal(lit, k).setType(ConstantType(k)) + /** Check that boolean literals are passed as named args. + * The rule is enforced when the type of the parameter is `Boolean`, + * and there is more than one parameter with an unnamed argument. + * The stricter lint warns for any unnamed argument, + * except that the rule is relaxed when the method has exactly one boolean parameter + * and it is the first parameter, such as `assert(false, msg)`. + */ + private def checkNamedBooleanArgs(fn: Tree, argss: List[List[Tree]]): Unit = { + val sym = fn.symbol + if (settings.warnUnnamedBoolean.value && !sym.isJavaDefined) { + for ((params, args) <- sym.paramLists.zip(argss) if args.nonEmpty) { + val strictly = settings.warnUnnamedStrict.value // warn about any unnamed boolean arg, modulo "assert" + val numBools = params.count(_.tpe == BooleanTpe) + def onlyLeadingBool = numBools == 1 && params.head.tpe == BooleanTpe + val checkable = if (strictly) numBools > 0 && !onlyLeadingBool else numBools >= 2 + if (checkable) { + def isUnnamedArg(t: Tree) = t.hasAttachment[UnnamedArg.type] + def isNameableBoolean(param: Symbol) = param.tpe.typeSymbol == BooleanClass && !param.deprecatedParamName.contains(nme.NO_NAME) + val unnamed = args.lazyZip(params).filter { + case (arg @ Literal(Constant(_: Boolean)), param) => isNameableBoolean(param) && isUnnamedArg(arg) + case _ => false + } + def numSuspicious = unnamed.length + { + analyzer.NamedApplyBlock.namedApplyInfo(currentApplication) match { + case Some(analyzer.NamedApplyInfo(_, _, _, _, original)) => + val treeInfo.Applied(_, _, argss) = original + argss match { + case h :: _ => + val allParams = sym.paramLists.flatten + h.count { + case treeInfo.Applied(getter, _, _) if getter.symbol != null && getter.symbol.isDefaultGetter => + val (_, i) = nme.splitDefaultGetterName(getter.symbol.name) + i > 0 && isNameableBoolean(allParams(i-1)) + case _ => false + } + case _ => 0 } - Some((treated, args)) - } catch { - case _: StringContext.InvalidEscapeException => None - } + case _ => args.count(arg => arg.symbol != null && arg.symbol.isDefaultGetter) + } + } + val warn = !unnamed.isEmpty && (strictly || numSuspicious >= 2) + if (warn) + unnamed.foreach { + case (arg, param) => + val msg = s"Boolean literals should be passed using named argument syntax for parameter ${param.name}." + val action = runReporting.codeAction("name boolean literal", arg.pos.focusStart, s"${param.name} = ", msg) + runReporting.warning(arg.pos, msg, WarningCategory.WFlagUnnamedBooleanLiteral, sym, action) + case _ => } - case _ => None } - } else None + } } } + private def transformSelect(tree: Select): Tree = { - val Select(qual, _) = tree + val Select(qual, name) = tree val sym = tree.symbol checkUndesiredProperties(sym, tree.pos) @@ -1710,39 +1861,35 @@ abstract class RefChecks extends Transform { if (!sym.exists) devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe) - else if (sym.isLocalToThis) - varianceValidator.checkForEscape(sym, currentClass) + + if (name == nme.synchronized_ && isBoxedValueClass(qual.tpe.typeSymbol)) + refchecksWarning(tree.pos, s"Suspicious `synchronized` call involving boxed primitive `${qual.tpe.typeSymbol.name}`", WarningCategory.LintUniversalMethods) def checkSuper(mix: Name) = // term should have been eliminated by super accessors assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY), (qual.symbol, sym, mix)) - // Rewrite eligible calls to monomorphic case companion apply methods to the equivalent constructor call. - // - // Note: for generic case classes the rewrite needs to be handled at the enclosing `TypeApply` to transform - // `TypeApply(Select(C, apply), targs)` to `Select(New(C[targs]), )`. In case such a `TypeApply` - // was deemed ineligible for transformation (e.g. the case constructor was private), the refchecks transform - // will recurse to this point with `Select(C, apply)`, which will have a type `[T](...)C[T]`. - // - // We don't need to perform the check on the Select node, and `!isHigherKinded will guard against this - // redundant (and previously buggy, scala/bug#9546) consideration. - if (!tree.tpe.isHigherKinded && isSimpleCaseApply(tree)) { - transformCaseApply(tree) - } else { - qual match { - case Super(_, mix) => checkSuper(mix) - case _ => - } - tree + qual match { + case Super(_, mix) => checkSuper(mix) + case _ => + } + + if (sym.name == nme.apply && sym.isCase && qual.symbol == sym.owner.module) { + val clazz = sym.tpe.finalResultType.typeSymbol + checkUndesiredProperties(clazz, tree.pos) + checkUndesiredProperties(clazz.primaryConstructor, tree.pos) } + + tree } + private def transformIf(tree: If): Tree = { val If(cond, thenpart, elsepart) = tree def unitIfEmpty(t: Tree): Tree = if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t cond.tpe match { - case ConstantType(value) => + case FoldableConstantType(value) => val res = if (value.booleanValue) thenpart else elsepart unitIfEmpty(res) case _ => tree @@ -1756,28 +1903,109 @@ abstract class RefChecks extends Transform { // on Unit, in which case we had better let it slide. val isOk = ( sym.isGetter - || (sym.name containsName nme.DEFAULT_GETTER_STRING) + || sym.isDefaultGetter || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType)) + || sym.isArtifact ) - if (!isOk) - refchecksWarning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead", WarningCategory.LintNullaryUnit) + if (!isOk) { + val msg = s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead" + val namePos = + if (sym.pos.isRange) sym.pos + else sym.pos.toRange.withEnd(sym.pos.point + sym.decodedName.length) + val action = + if (namePos.source.sourceAt(namePos) == sym.decodedName) + runReporting.codeAction("add empty parameter list", namePos.focusEnd, "()", msg) + else Nil + refchecksWarning(sym.pos, msg, WarningCategory.LintNullaryUnit, action) + } case _ => () } // Verify classes extending AnyVal meet the requirements - private def checkAnyValSubclass(clazz: Symbol) = { + private def checkAnyValSubclass(clazz: Symbol) = if (clazz.isDerivedValueClass) { if (clazz.isTrait) reporter.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal") else if (clazz.hasAbstractFlag) - reporter.error(clazz.pos, "`abstract' modifier cannot be used with value classes") + reporter.error(clazz.pos, "`abstract` modifier cannot be used with value classes") } - } private def checkUnexpandedMacro(t: Tree) = if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) reporter.error(t.pos, "macro has not been expanded") + // if expression in statement position (of template or block) + // looks like a useful value that should not be ignored, warn and return true + // User specifies that an expression is boring by ascribing `e: Unit`. + // The subtree `e` will bear an attachment, but may be wrapped in adaptations. + private def checkInterestingResultInStatement(t: Tree): Boolean = { + def isUninterestingSymbol(sym: Symbol): Boolean = + sym != null && ( + sym.isConstructor || + sym.hasPackageFlag || + sym.isPackageObjectOrClass || + sym == BoxedUnitClass || + sym == AnyClass || + sym == AnyRefClass || + sym == AnyValClass + ) + def isUninterestingType(tpe: Type): Boolean = + tpe != null && ( + isUnitType(tpe) || + tpe.typeSymbol.isBottomClass || + tpe =:= UnitTpe || + tpe =:= BoxedUnitTpe || + isTrivialTopType(tpe) + ) + // java lacks this.type idiom to distinguish side-effecting method, so ignore result of invoking java method. + def isJavaApplication(t: Tree): Boolean = t match { + case Apply(f, _) => f.symbol.isJavaDefined && !isUniversalMember(f.symbol) + case _ => false + } + // The quirk of typechecking if is that the LUB often results in boring types. + // Parser adds suppressing attachment on `if (b) expr` when user has `-Wnonunit-if:false`. + def checkInterestingShapes(t: Tree): Boolean = + t match { + case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) // either or + //case Block(_, Apply(label, Nil)) if label.symbol != null && nme.isLoopHeaderLabel(label.symbol.name) => false + case Block(_, res) => checkInterestingShapes(res) + case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) + case _ => checksForInterestingResult(t) + } + // tests for various flavors of blandness in expressions. + def checksForInterestingResult(t: Tree): Boolean = ( + !t.isDef && !treeInfo.isPureDef(t) // ignore defs + && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any + && !isUninterestingType(t.tpe) // bottom types, Unit, Any + && !treeInfo.isThisTypeResult(t) // buf += x + && !treeInfo.isSuperConstrCall(t) // just a thing + && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit + && !isJavaApplication(t) // Java methods are inherently side-effecting + ) + def checkDiscardValue(t: Tree): Boolean = + t.attachments.containsElement(DiscardedValue) && { + t.setAttachments(t.attachments.removeElement(DiscardedValue)) + val msg = s"discarded non-Unit value of type ${t.tpe}" + refchecksWarning(t.pos, msg, WarningCategory.WFlagValueDiscard) + true + } + // begin checkInterestingResultInStatement + settings.warnNonUnitStatement.value && checkInterestingShapes(t) && { + val where = t match { + case Block(_, res) => res + case If(_, thenpart, Literal(Constant(()))) => + thenpart match { + case Block(_, res) => res + case _ => thenpart + } + case _ => t + } + def msg = s"unused value of type ${where.tpe}" + refchecksWarning(where.pos, msg, WarningCategory.OtherPureStatement) + true + } || checkDiscardValue(t) + } // end checkInterestingResultInStatement + override def transform(tree: Tree): Tree = { val savedLocalTyper = localTyper val savedCurrentApplication = currentApplication @@ -1798,83 +2026,76 @@ abstract class RefChecks extends Transform { if (settings.warnNullaryUnit) checkNullaryMethodReturnType(sym) if (settings.warnInaccessible) { - if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.isSynthetic) + if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.owner.isSealed && !sym.isSynthetic) checkAccessibilityOfReferencedTypes(tree) } - tree match { - case dd: DefDef => - checkByNameRightAssociativeDef(dd) - - if (sym hasAnnotation NativeAttr) { - if (sym.owner.isTrait) { - reporter.error(tree.pos, "A trait cannot define a native method.") - tree - } else if (dd.rhs == EmptyTree) { - // pretend it had a stub implementation - sym resetFlag DEFERRED - deriveDefDef(dd)(_ => typed(gen.mkSysErrorCall("native method stub"))) - } else tree - } else tree - + val r = tree match { + case dd: DefDef if sym.hasAnnotation(NativeAttr) => + if (sym.owner.isTrait) { + reporter.error(tree.pos, "A trait cannot define a native method.") + tree + } else if (dd.rhs == EmptyTree) { + // pretend it had a stub implementation + sym resetFlag DEFERRED + deriveDefDef(dd)(_ => typed(gen.mkThrowNewRuntimeException("native method stub"))) + } else + tree case _ => tree } + r.transform(this) - case Template(parents, self, body) => + case Template(_, _, body) => localTyper = localTyper.atOwner(tree, currentOwner) - for (stat <- body) { - if (treeInfo.isPureExprForWarningPurposes(stat)) { + for (stat <- body) + if (!checkInterestingResultInStatement(stat) && treeInfo.isPureExprForWarningPurposes(stat)) { val msg = "a pure expression does nothing in statement position" - val clause = if (body.lengthCompare(1) > 0) "; multiline expressions may require enclosing parentheses" else "" + val clause = if (body.lengthCompare(2) > 0) "; multiline expressions may require enclosing parentheses" else "" refchecksWarning(stat.pos, s"$msg$clause", WarningCategory.OtherPureStatement) } - } - validateBaseTypes(currentOwner) checkOverloadedRestrictions(currentOwner, currentOwner) // scala/bug#7870 default getters for constructors live in the companion module checkOverloadedRestrictions(currentOwner, currentOwner.companionModule) + checkDubiousOverloads(currentOwner) val bridges = addVarargBridges(currentOwner) // TODO: do this during uncurry? checkAllOverrides(currentOwner) checkAnyValSubclass(currentOwner) if (currentOwner.isDerivedValueClass) currentOwner.primaryConstructor makeNotPrivate NoSymbol // scala/bug#6601, must be done *after* pickler! - if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree + val res = if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree + res.transform(this) + + case _: TypeTreeWithDeferredRefCheck => + abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc") - case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc") case tpt@TypeTree() => if(tpt.original != null) { tpt.original foreach { - case dc@TypeTreeWithDeferredRefCheck() => + case dc: TypeTreeWithDeferredRefCheck => transform(dc.check()) // #2416 -- only call transform to do refchecks, but discard results // tpt has the right type if the deferred checks are ok case _ => } } - if (inPattern) tree - else tree.setType(RefCheckTypeMap.check(tree.tpe, tree)) - - case TypeApply(fn, args) => - checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) - if (isSimpleCaseApply(tree)) - transformCaseApply(tree) - else - tree + tree.setType(RefCheckTypeMap.check(tree.tpe, tree, inPattern)).transform(this) - case x @ Apply(_, _) => - transformApply(x) + case treeInfo.Application(fun, targs, argss) => + if (targs.nonEmpty) + checkBounds(tree, NoPrefix, NoSymbol, fun.tpe.typeParams, targs map (_.tpe)) + val res = transformApplication(tree, fun, targs, argss) + res.transform(this) case x @ If(_, _, _) => - transformIf(x) + transformIf(x).transform(this) case New(tpt) => enterReference(tree.pos, tpt.tpe.typeSymbol) - tree + tree.transform(this) - case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) => - reporter.error(tree.pos, "no `: _*' annotation allowed here\n"+ - "(such annotations are only allowed in arguments to *-parameters)") - tree + case treeInfo.WildcardStarArg(_) => + checkRepeatedParamArg(tree) + tree.transform(this) case Ident(name) => checkUndesiredProperties(sym, tree.pos) @@ -1882,90 +2103,165 @@ abstract class RefChecks extends Transform { assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug enterReference(tree.pos, sym) } - tree + tree.transform(this) case x @ Select(_, _) => - transformSelect(x) + transformSelect(x).transform(this) case Literal(Constant(tpe: Type)) => RefCheckTypeMap.check(tpe, tree) - tree + tree.transform(this) case UnApply(fun, args) => transform(fun) // just make sure we enterReference for unapply symbols, note that super.transform(tree) would not transform(fun) // transformTrees(args) // TODO: is this necessary? could there be forward references in the args?? // probably not, until we allow parameterised extractors - tree + tree.transform(this) case Block(stats, expr) => - val (count, result0, adapted) = + // diagnostic info + val (count, result0) = expr match { - case Block(expr :: Nil, Literal(Constant(()))) => (1, expr, true) - case Literal(Constant(())) => (0, EmptyTree, false) - case _ => (1, EmptyTree, false) + case Block(expr :: Nil, Literal(Constant(()))) => (1, expr) + case Literal(Constant(())) => (0, EmptyTree) + case _ => (1, EmptyTree) } - def checkPure(t: Tree, supple: Boolean): Unit = - if (treeInfo.isPureExprForWarningPurposes(t)) { - val msg = "a pure expression does nothing in statement position" - val parens = if (stats.length + count > 1) "multiline expressions might require enclosing parentheses" else "" - val discard = if (adapted) "; a value can be silently discarded when Unit is expected" else "" + val isMultiline = stats.lengthCompare(1 - count) > 0 + + def checkPure(t: Tree): Unit = + if (!treeInfo.hasExplicitUnit(t) && treeInfo.isPureExprForWarningPurposes(t)) { + val msg = + if (t.attachments.containsElement(DiscardedExpr)) { + t.setAttachments(t.attachments.removeElement(DiscardedExpr)) + "discarded pure expression does nothing" + } + else "a pure expression does nothing in statement position" val text = - if (supple) s"$parens$discard" - else if (!parens.isEmpty) s"$msg; $parens" else msg + if (!isMultiline) msg + else s"$msg; multiline expressions might require enclosing parentheses" refchecksWarning(t.pos, text, WarningCategory.OtherPureStatement) } - // sanity check block for unintended expr placement - stats.foreach(checkPure(_, supple = false)) - if (result0.nonEmpty) checkPure(result0, supple = true) - tree - case _ => tree - } + // check block for unintended "expression in statement position" + stats.foreach { t => if (!checkInterestingResultInStatement(t)) checkPure(t) } + if (result0.nonEmpty) result0.updateAttachment(DiscardedExpr) // see checkPure on recursion into result + + def checkImplicitlyAdaptedBlockResult(t: Tree): Unit = { + def loop(t: Tree): Unit = + t match { + case Apply(coercion, _) if t.isInstanceOf[ApplyImplicitView] => + coercion.symbol.paramLists match { + case (p :: Nil) :: _ if p.isByNameParam => refchecksWarning(t.pos, s"Block result expression was adapted via implicit conversion (${coercion.symbol}) taking a by-name parameter; only the result was passed, not the entire block.", WarningCategory.LintBynameImplicit) + case _ => + } + case TypeApply(fun, _) => loop(fun) + case Apply(fun, _) => loop(fun) + case _ => + } + loop(t) + } + if (isMultiline && settings.warnByNameImplicit) checkImplicitlyAdaptedBlockResult(expr) + + tree.transform(this) + + case Match(selector, cases) => + // only warn if it could be put in backticks in a pattern + def isWarnable(sym: Symbol): Boolean = + sym != null && sym.exists && + !sym.hasPackageFlag && sym.isStable && !isByName(sym) && + !sym.hasAttachment[PatVarDefAttachment.type] // val (_, v) = with one var is shadowed in desugaring + //!toCheck.isSynthetic // self-type symbols are synthetic: value self (), do warn + + class CheckSelector extends InternalTraverser { + var selectorSymbols: List[Symbol] = null + override def traverse(t: Tree): Unit = { + val include = t match { + case _: This => true // !t.symbol.isStable + case _: SymTree => isWarnable(t.symbol) + case _ => false + } + if (include) selectorSymbols ::= t.symbol + t.traverse(this) + } + // true if the shadowed toCheck appears in the selector expression + def implicatesSelector(toCheck: Symbol): Boolean = { + if (selectorSymbols == null) { + selectorSymbols = Nil + apply(selector) + } + selectorSymbols.exists(sym => sym.eq(toCheck) || sym.accessedOrSelf.eq(toCheck.accessedOrSelf) || + toCheck.isThisSym && toCheck.owner == sym) // self match { case self: S => }, selector C.this is class symbol + } + } + val checkSelector = new CheckSelector + // true to warn about shadowed when selSym is the scrutinee + def checkShadowed(shadowed: Symbol): Boolean = { + def checkShadowedSymbol(toCheck: Symbol): Boolean = + isWarnable(toCheck) && !checkSelector.implicatesSelector(toCheck) + + if (shadowed.isOverloaded) shadowed.alternatives.exists(checkShadowedSymbol) + else checkShadowedSymbol(shadowed) + } + // warn if any checkable pattern var shadows, in the context of the selector, + // or for `tree match case Apply(fun, args) =>` check whether names in args equal names of fun.params + def checkPattern(p: Tree): Unit = { + val traverser = new InternalTraverser { + // names absolved of shadowing because it is a "current" parameter (of a case class, etc) + var absolved: List[Name] = Nil + override def traverse(t: Tree): Unit = t match { + case Apply(_, args) => + treeInfo.dissectApplied(t).core.tpe match { + case MethodType(ps, _) => + foreach2(ps, args) { (p, arg) => + absolved ::= p.name + try traverse(arg) + finally absolved = absolved.tail + } + case _ => t.traverse(this) + } + case bind @ Bind(name, _) => + def richLocation(sym: Symbol): String = sym.ownsString match { + case "" => val n = sym.pos.line; if (n > 0) s"$sym at line $n" else sym.fullLocationString + case owns => s"$sym in $owns" + } + for (shade <- bind.getAndRemoveAttachment[PatShadowAttachment]) { + val shadowed = shade.shadowed + if (!absolved.contains(name) && !bind.symbol.hasTransOwner(shadowed.accessedOrSelf) && checkShadowed(shadowed)) + refchecksWarning(bind.pos, s"Name $name is already introduced in an enclosing scope as ${richLocation(shadowed)}. Did you intend to match it using backquoted `$name`?", WarningCategory.OtherShadowing) + + } + case _ => t.traverse(this) + } + } + traverser(p) + } + // check the patterns for unfriendly shadowing, patvars bearing PatShadowAttachment + if (settings.warnPatternShadow) for (cdef <- cases) checkPattern(cdef.pat) + tree.transform(this) - // skip refchecks in patterns.... - val result1 = result match { + // skip refchecks in patterns.... case CaseDef(pat, guard, body) => val pat1 = savingInPattern { inPattern = true transform(pat) } treeCopy.CaseDef(tree, pat1, transform(guard), transform(body)) - case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) => - savingInPattern { - inPattern = true - deriveLabelDef(result)(transform) - } - case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) => - savingInPattern { - // scala/bug#7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals - // that we are in the user-supplied code in the case body. - // - // Relies on the translation of: - // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }' - // to: - // val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]); - // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply. - inPattern = false - super.transform(result) - } - case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) => - deriveValDef(result)(transform) // scala/bug#7716 Don't refcheck the tpt of the synthetic val that holds the selector. + case _ => - super.transform(result) + tree.transform(this) } - result1 match { - case ClassDef(_, _, _, _) - | TypeDef(_, _, _, _) - | ModuleDef(_, _, _) => - if (result1.symbol.isLocalToBlock || result1.symbol.isTopLevel) - varianceValidator.traverse(result1) + + result match { + case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) | ModuleDef(_, _, _) => + if (result.symbol.isLocalToBlock || result.symbol.isTopLevel) + varianceValidator.traverse(result) case tt @ TypeTree() if tt.original != null => - varianceValidator.traverse(tt.original) // See scala/bug#7872 + varianceValidator.validateVarianceOfPolyTypesIn(tt.tpe) case _ => } - checkUnexpandedMacro(result1) + checkUnexpandedMacro(result) - result1 + result } catch { case ex: TypeError => if (settings.isDebug) ex.printStackTrace() diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 2aa75040b800..69d78ad1fb5d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -32,7 +32,7 @@ trait StdAttachments { /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment. */ - def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment = + def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment = tree.attachments.get[MacroExpanderAttachment] getOrElse { tree match { case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn) @@ -88,11 +88,11 @@ trait StdAttachments { /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it. */ - def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment) + def suppressMacroExpansion(tree: Tree): tree.type = tree.updateAttachment(SuppressMacroExpansionAttachment) /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children. */ - def unsuppressMacroExpansion(tree: Tree): Tree = { + def unsuppressMacroExpansion(tree: Tree): tree.type = { tree.removeAttachment[SuppressMacroExpansionAttachment.type] tree match { // see the comment to `isMacroExpansionSuppressed` to learn why we need @@ -157,12 +157,13 @@ trait StdAttachments { * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat * its expansion as a macro impl reference. */ - def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type] + def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) /** Determines whether a tree should or should not be adapted, * because someone has put MacroImplRefAttachment on it. */ - def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type] + def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) + private[this] val MacroImplRefAttachmentTag: reflect.ClassTag[MacroImplRefAttachment.type] = reflect.classTag[MacroImplRefAttachment.type] /** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected * from typedNamedApply, the applyDynamicNamed argument rewriter, the latter @@ -175,8 +176,9 @@ trait StdAttachments { */ case object DynamicRewriteAttachment def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment) - def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type] - def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined + def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag) + def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag).isDefined + private[this] val DynamicRewriteAttachmentTag: reflect.ClassTag[DynamicRewriteAttachment.type] = reflect.classTag[DynamicRewriteAttachment.type] /** * Marks a tree that has been adapted by typer and sets the original tree that was in place before. @@ -194,7 +196,80 @@ trait StdAttachments { * track of other adapted trees. */ case class OriginalTreeAttachment(original: Tree) +} + + +// imported from scalamacros/paradise +trait MacroAnnotationAttachments { + self: Analyzer => + + import global._ + import scala.collection.mutable + + case object WeakSymbolAttachment + def markWeak(sym: Symbol) = if (sym != null && sym != NoSymbol) sym.updateAttachment(WeakSymbolAttachment) else sym + def unmarkWeak(sym: Symbol) = if (sym != null && sym != NoSymbol) sym.removeAttachment[WeakSymbolAttachment.type] else sym + def isWeak(sym: Symbol) = sym == null || sym == NoSymbol || sym.attachments.get[WeakSymbolAttachment.type].isDefined + + case class SymbolCompleterAttachment(info: Type) + def backupCompleter(sym: Symbol): Symbol = { + if (sym != null && sym != NoSymbol) { + assert(sym.rawInfo.isInstanceOf[LazyType], s"${sym.accurateKindString} ${sym.rawname}#${sym.id} with ${sym.rawInfo.kind}") + sym.updateAttachment(SymbolCompleterAttachment(sym.rawInfo)) + } else sym + } + def restoreCompleter(sym: Symbol): Unit = { + if (sym != null && sym != NoSymbol) { + val oldCompleter = sym.attachments.get[SymbolCompleterAttachment].get.info + sym setInfo oldCompleter + sym.attachments.remove[SymbolCompleterAttachment] + } else () + } + + // here we should really store and retrieve duplicates of trees in order to avoid leakage through tree attributes + case class SymbolSourceAttachment(source: Tree) + def attachSource(sym: Symbol, tree: Tree): Symbol = if (sym != null && sym != NoSymbol) sym.updateAttachment(SymbolSourceAttachment(duplicateAndKeepPositions(tree))) else sym + def attachedSource(sym: Symbol): Tree = if (sym != null && sym != NoSymbol) sym.attachments.get[SymbolSourceAttachment].map(att => duplicateAndKeepPositions(att.source)).getOrElse(EmptyTree) else EmptyTree + + // unfortunately we cannot duplicate here, because that would dissociate the symbol from its derived symbols + // that's because attachExpansion(tree) happens prior to enterSym(tree), so if we duplicate the assigned symbol never makes it into the att + // in its turn, that would mean that we won't be able to handle recursive expansions in typedTemplate + // because by the time typedTemplate gets activated, everything's already expanded by templateSig + // so we need to go from original trees/symbols to recursively expanded ones and that requires links to derived symbols + // TODO: should be a better solution + case class SymbolExpansionAttachment(expansion: List[Tree]) + def hasAttachedExpansion(sym: Symbol) = sym.attachments.get[SymbolExpansionAttachment].isDefined + def attachExpansion(sym: Symbol, trees: List[Tree]): Symbol = if (sym != null && sym != NoSymbol) sym.updateAttachment(SymbolExpansionAttachment(trees/*.map(tree => duplicateAndKeepPositions(tree))*/)) else sym + def attachedExpansion(sym: Symbol): Option[List[Tree]] = if (sym != null && sym != NoSymbol) sym.attachments.get[SymbolExpansionAttachment].map(_.expansion/*.map(tree => duplicateAndKeepPositions(tree))*/) else None - /** Added to trees that appear in a method value, e.g., to `f(x)` in `f(x) _` */ - case object MethodValueAttachment + import SymbolExpansionStatus._ + private def checkExpansionStatus(sym: Symbol, p: SymbolExpansionStatus => Boolean) = sym.attachments.get[SymbolExpansionStatus].map(p).getOrElse(false) + def isMaybeExpandee(sym: Symbol): Boolean = checkExpansionStatus(sym, _.isUnknown) + def isExpanded(sym: Symbol): Boolean = checkExpansionStatus(sym, _.isExpanded) + def isNotExpandable(sym: Symbol): Boolean = checkExpansionStatus(sym, _.isNotExpandable) + def markMaybeExpandee(sym: Symbol): Symbol = if (sym != null && sym != NoSymbol) sym.updateAttachment(Unknown) else sym + def markExpanded(sym: Symbol): Symbol = if (sym != null && sym != NoSymbol) sym.updateAttachment(Expanded) else sym + def markNotExpandable(sym: Symbol): Symbol = if (sym != null && sym != NoSymbol) sym.updateAttachment(NotExpandable) else sym + def unmarkExpanded(sym: Symbol): Symbol = if (sym != null && sym != NoSymbol) sym.removeAttachment[SymbolExpansionStatus] else sym + + case class CacheAttachment(cache: mutable.Map[String, Any]) + implicit class RichTree(tree: Tree) { + def cached[T](key: String, op: => T): T = { + val cache = tree.attachments.get[CacheAttachment].map(_.cache).getOrElse(mutable.Map[String, Any]()) + val result = cache.getOrElseUpdate(key, op).asInstanceOf[T] + tree.updateAttachment(CacheAttachment(cache)) + result + } + } + + private final class SymbolExpansionStatus private (val value: Int) { //extends AnyVal { + def isUnknown = this == SymbolExpansionStatus.Unknown + def isExpanded = this == SymbolExpansionStatus.Expanded + def isNotExpandable = this == SymbolExpansionStatus.NotExpandable + } + private object SymbolExpansionStatus { + val Unknown = new SymbolExpansionStatus(0) + val Expanded = new SymbolExpansionStatus(1) + val NotExpandable = new SymbolExpansionStatus(2) + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 56e1e6238f67..398ecfaf5639 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,52 +19,51 @@ package scala package tools.nsc package typechecker -import scala.collection.{immutable, mutable} -import mutable.ListBuffer +import scala.collection.{immutable, mutable}, mutable.ListBuffer import scala.tools.nsc.Reporting.WarningCategory import symtab.Flags._ -/** This phase performs the following functions, each of which could be split out in a - * mini-phase: +/** This phase performs the following functions, + * each of which could be split out into its own mini-phase: * - * (1) Adds super accessors for all super calls that either + * 1. Adds super accessors for all super calls that either * appear in a trait or have as a target a member of some outer class. * - * (2) Converts references to parameter fields that have the same name as a corresponding + * 2. Converts references to parameter fields that have the same name as a corresponding * public parameter field in a superclass to a reference to the superclass * field (corresponding = super class field is initialized with subclass field). * This info is pre-computed by the `alias` field in Typer. `dotc` follows a different * route; it computes everything in SuperAccessors and changes the subclass field * to a forwarder instead of manipulating references. This is more modular. * - * (3) Adds protected accessors if the access to the protected member happens + * 3. Adds protected accessors if the access to the protected member happens * in a class which is not a subclass of the member's owner. * - * (4) Mangles the names of class-members which are + * 4. Mangles the names of class-members which are * private up to an enclosing non-package class, in order to avoid overriding conflicts. * This is a dubious, and it would be better to deprecate class-qualified privates. * - * (5) This phase also sets SPECIALIZED flag on type parameters with + * 5. This phase also sets SPECIALIZED flag on type parameters with * `@specialized` annotation. We put this logic here because the * flag must be set before pickling. * * It also checks that: * - * (1) Symbols accessed from super are not abstract, or are overridden by + * 1. Symbols accessed from super are not abstract, or are overridden by * an abstract override. * - * (2) If a symbol accessed accessed from super is defined in a real class (not a trait), + * 2. If a symbol accessed accessed from super is defined in a real class (not a trait), * there are no abstract members which override this member in Java's rules * (see scala/bug#4989; such an access would lead to illegal bytecode) * - * (3) Super calls do not go to some synthetic members of Any (see isDisallowed) + * 3. Super calls do not go to some synthetic members of Any (see isDisallowed) * - * (4) Super calls do not go to synthetic field accessors + * 4. Super calls do not go to synthetic field accessors * - * (5) A class and its companion object do not both define a class or module with the + * 5. A class and its companion object do not both define a class or module with the * same name. * - * TODO: Rename phase to "Accessors" because it handles more than just super accessors + * Should've really been called "Accessors" as it handles more than just super accessors. */ abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers { import global._ @@ -77,7 +76,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT /** The following flags may be set by this phase: */ override def phaseNewFlags: Long = notPRIVATE - protected def newTransformer(unit: CompilationUnit): Transformer = + protected def newTransformer(unit: CompilationUnit): AstTransformer = new SuperAccTransformer(unit) class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { @@ -91,7 +90,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT private val accDefs = mutable.Map[Symbol, ListBuffer[Tree]]() private def storeAccessorDefinition(clazz: Symbol, tree: Tree) = { - val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz)) + val buf = accDefs.getOrElse(clazz, abort(s"no acc def buf for $clazz")) buf += typers(clazz) typed tree } private def ensureAccessor(sel: Select, mixName: TermName = nme.EMPTY) = { @@ -117,15 +116,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe) } - private def transformArgs(params: List[Symbol], args: List[Tree]) = { + private def transformArgs(params: List[Symbol], args: List[Tree]) = treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) => if (isByNameParamType(param.tpe)) withInvalidOwner(transform(arg)) else transform(arg) } - } - /** Check that a class and its companion object to not both define + /** Check that a class and its companion object do not both define * a class or module with same name */ private def checkCompanionNameClashes(sym: Symbol) = @@ -136,14 +134,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (other == NoSymbol) other = linked.info.decl(sym.name.toTermName).filter(_.isModule) if (other != NoSymbol) - reporter.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+ - "\nand its companion "+sym.owner.companionModule+" also defines "+ - other) + reporter.error(sym.pos, + sm"""|name clash: ${sym.owner} defines $sym + |and its companion ${sym.owner.companionModule} also defines $other""") } } private def transformSuperSelect(sel: Select): Tree = { - val Select(sup @ Super(_, mix), name) = sel + val Select(sup @ Super(_, mix), name) = sel: @unchecked val sym = sel.symbol val clazz = sup.symbol @@ -152,7 +150,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (mix != tpnme.EMPTY || member == NoSymbol || !(member.isAbstractOverride && member.isIncompleteIn(clazz))) reporter.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ - "unless it is overridden by a member declared `abstract' and `override'") + "unless it is overridden by a member declared `abstract` and `override`") } else { val owner = sym.owner if (mix == tpnme.EMPTY && !owner.isTrait) { @@ -193,7 +191,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } def mixIsTrait = sup.tpe match { - case SuperType(thisTpe, superTpe) => superTpe.typeSymbol.isTrait + case SuperType(_, superTpe) => superTpe.typeSymbol.isTrait + case x => throw new MatchError(x) } val needAccessor = name.isTermName && { @@ -239,13 +238,13 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val decls = sym.info.decls for (s <- decls) { val privateWithin = s.privateWithin - if (privateWithin.isClass && !s.hasFlag(EXPANDEDNAME | PROTECTED) && !privateWithin.isModuleClass && - !s.isConstructor) { + def isPrivateWithinNonCompanionModule = privateWithin.isModuleClass + if (privateWithin.isClass && !s.hasFlag(EXPANDEDNAME | PROTECTED) && !isPrivateWithinNonCompanionModule && !s.isConstructor) { val savedName = s.name decls.unlink(s) s.expandName(privateWithin) decls.enter(s) - log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym)) + log(s"Expanded '$savedName' to '${s.name}' in $sym") } } super.transform(tree) @@ -302,23 +301,15 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT // also exists in a superclass, because they may be surprised // to find out that a constructor parameter will shadow a // field. See scala/bug#4762. - if (settings.warnPrivateShadow) { - if (sym.isPrivateLocal && sym.paramss.isEmpty) { - qual.symbol.ancestors foreach { parent => - parent.info.decls filterNot (x => x.isPrivate || x.isLocalToThis) foreach { m2 => - if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) { - runReporting.warning(sel.pos, - sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name - + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within " - + sym.owner + " - you may want to give them distinct names.", - WarningCategory.LintPrivateShadow, - currentOwner) - } - } - } - } - } - + if (settings.warnPrivateShadow && sym.isPrivateLocal && sym.paramss.isEmpty) + for (parent <- qual.symbol.ancestors) + for (m2 <- parent.info.decls) + if (!m2.isPrivate && !m2.isLocalToThis && sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) + runReporting.warning(sel.pos, + sq"""${sym.accessString} ${sym.fullLocationString} shadows mutable ${m2.name} inherited from ${m2.owner}. + > Changes to ${m2.name} will not be visible within ${sym.owner}; you may want to give them distinct names.""", + WarningCategory.LintPrivateShadow, + currentOwner) def isAccessibleFromSuper(sym: Symbol) = { val pre = SuperType(sym.owner.tpe, qual.tpe) @@ -355,6 +346,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT && !sym.owner.isTrait && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass && qual.symbol.info.member(sym.name).exists + && !(currentClass.typeOfThis.typeSymbol.isSubClass(sym.owner)) // scala/bug#11924 && !needsProtectedAccessor(sym, tree.pos) ) if (shouldEnsureAccessor) { @@ -365,14 +357,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false) } - case Super(_, mix) => - if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { - if (!settings.overrideVars) - reporter.error(tree.pos, "super may not be used on " + sym.accessedOrSelf) - } else if (isDisallowed(sym)) { - reporter.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") - } - transformSuperSelect(sel) + case Super(_, mix) => + if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { + val more = "super can only be used to select a member that is a method or type" + reporter.error(tree.pos, s"super may not be used on ${ if (sym.isLazy) sym else sym.accessedOrSelf }; $more") + } else if (isDisallowed(sym)) { + reporter.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") + } + transformSuperSelect(sel) case _ => mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true) @@ -383,7 +375,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT case DefDef(_, _, _, _, _, _) if tree.symbol.isMethodWithExtension => deriveDefDef(tree)(rhs => withInvalidOwner(transform(rhs))) - case TypeApply(sel @ Select(qual, name), args) => + case TypeApply(sel @ Select(_, _), args) => mayNeedProtectedAccessor(sel, args, goToSuper = true) case Assign(lhs @ Select(qual, name), rhs) => @@ -434,7 +426,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (owner.isClass) validCurrentOwner = true val savedLocalTyper = localTyper localTyper = localTyper.atOwner(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner) - typers = typers updated (owner, localTyper) + typers = typers.updated(owner, localTyper) val result = super.atOwner(tree, owner)(trans) localTyper = savedLocalTyper validCurrentOwner = savedValid @@ -483,7 +475,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT newAcc setInfoAndEnter accType(newAcc) val code = DefDef(newAcc, { - val (receiver :: _) :: tail = newAcc.paramss + val (receiver :: _) :: tail = newAcc.paramss: @unchecked val base: Tree = Select(Ident(receiver), sym) foldLeft2(tail, sym.info.paramss)(base){ (acc, params, pps) => val y = map2(params, pps)( (param, pp) => makeArg(param, receiver, pp.tpe)) @@ -548,7 +540,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val accessorType = MethodType(params, UnitTpe) protAcc setInfoAndEnter accessorType - val obj :: value :: Nil = params + val obj :: value :: Nil = params: @unchecked storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value)))) protAcc diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index b2afc128846c..5eec1b3852ef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,9 @@ package scala.tools.nsc package typechecker -import scala.language.postfixOps - import scala.collection.mutable import scala.collection.mutable.ListBuffer +import scala.runtime.Statics import scala.tools.nsc.Reporting.WarningCategory import symtab.Flags._ @@ -26,7 +25,7 @@ import symtab.Flags._ * def productArity: Int * def productElement(n: Int): Any * def productPrefix: String - * def productIterator: Iterator[Any] + * def productIterator: Iterator[Any] // required for binary compatibility of value classes * * Selectively added to case classes/objects, unless a non-default * implementation already exists: @@ -36,7 +35,7 @@ import symtab.Flags._ * def toString(): String * * Special handling: - * protected def readResolve(): AnyRef + * protected def writeReplace(): AnyRef */ trait SyntheticMethods extends ast.TreeDSL { self: Analyzer => @@ -45,7 +44,7 @@ trait SyntheticMethods extends ast.TreeDSL { import definitions._ import CODE._ - private lazy val productSymbols = List(Product_productPrefix, Product_productArity, Product_productElement, Product_iterator, Product_canEqual) + private lazy val productSymbols = List(Product_productPrefix, Product_productArity, Product_productElement) ::: Product_productElementName.toOption.toList ::: List(Product_iterator, Product_canEqual) private lazy val valueSymbols = List(Any_hashCode, Any_equals) private lazy val caseSymbols = List(Object_hashCode, Object_toString) ::: productSymbols private lazy val caseValueSymbols = Any_toString :: valueSymbols ::: productSymbols @@ -71,7 +70,7 @@ trait SyntheticMethods extends ast.TreeDSL { */ def addSyntheticMethods(templ: Template, clazz0: Symbol, context: Context): Template = { val syntheticsOk = (phase.id <= currentRun.typerPhase.id) && { - symbolsToSynthesize(clazz0) filter (_ matchingSymbol clazz0.info isSynthetic) match { + symbolsToSynthesize(clazz0).filter(_.matchingSymbol(clazz0.info).isSynthetic) match { case Nil => true case syms => log("Not adding synthetic methods: already has " + syms.mkString(", ")) ; false } @@ -79,7 +78,7 @@ trait SyntheticMethods extends ast.TreeDSL { if (!syntheticsOk) return templ - val typer = newTyper(if (reporter.hasErrors) context.makeSilent(false) else context) + val typer = newTyper(if (reporter.hasErrors) context.makeSilent(reportAmbiguousErrors = false) else context) val synthesizer = new ClassMethodSynthesis(clazz0, typer) import synthesizer._ @@ -92,11 +91,13 @@ trait SyntheticMethods extends ast.TreeDSL { else templ } + def Lit(c: Any) = LIT.typed(c) + def accessors = clazz.caseFieldAccessors val arity = accessors.size def forwardToRuntime(method: Symbol): Tree = - forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _) + forwardMethod(method, getMember(ScalaRunTimeModule, method.name.prepend("_")))(mkThis :: _) def callStaticsMethodName(name: TermName)(args: Tree*): Tree = { val method = RuntimeStaticsModule.info.member(name) @@ -116,17 +117,18 @@ trait SyntheticMethods extends ast.TreeDSL { (m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth)) } } - def productIteratorMethod = { + def productIteratorMethod = createMethod(nme.productIterator, iteratorOfType(AnyTpe))(_ => gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(AnyTpe), List(mkThis)) ) - } - /* Common code for productElement and (currently disabled) productElementName */ - def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree = + def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree = createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx))) - // def productElementNameMethod = perElementMethod(nme.productElementName, StringTpe)(x => LIT(x.name.toString)) + def productElementNameMethod = { + val elementAccessors = clazz.constrParamAccessors.take(arity) + createSwitchMethod(nme.productElementName, elementAccessors.indices, StringTpe)(idx => LIT(elementAccessors(idx).name.dropLocal.decode)) + } var syntheticCanEqual = false @@ -177,16 +179,25 @@ trait SyntheticMethods extends ast.TreeDSL { * - asInstanceOf if no equality checks need made (see scala/bug#9240, scala/bug#10361) */ def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = { - def usefulEquality(acc: Symbol): Boolean = { - val rt = acc.info.resultType - rt != NothingTpe && rt != NullTpe && rt != UnitTpe - } - val otherName = freshTermName(clazz.name.toStringWithSuffix("$"))(freshNameCreatorFor(context)) val otherSym = eqmeth.newValue(otherName, eqmeth.pos, SYNTHETIC) setInfo clazz.tpe - val pairwise = accessors collect { - case acc if usefulEquality(acc) => - fn(Select(mkThis, acc), acc.tpe member nme.EQ, Select(Ident(otherSym), acc)) + val pairwise = { + //compare primitive fields first, slow equality checks of non-primitive fields can be skipped when primitives differ + val prims = ListBuffer[Tree]() + val refs = ListBuffer[Tree]() + for (acc <- accessors) { + val resultType = acc.info.resultType + val usefulEquals = resultType != NothingTpe && resultType != NullTpe && resultType != UnitTpe + if (usefulEquals) { + val thisAcc = Select(mkThis, acc) + val otherAcc = Select(Ident(otherSym), acc) + if (isPrimitiveValueType(resultType)) + prims += fn(thisAcc, acc.tpe.member(nme.EQ), otherAcc) + else + refs += fn(thisAcc, Any_==, otherAcc) + } + } + prims.prependToList(refs.toList) // (prims ++ refs).toList } val canEq = gen.mkMethodCall(otherSym, nme.canEqual_, Nil, List(mkThis)) val tests = if (clazz.isDerivedValueClass || clazz.isFinal && syntheticCanEqual) pairwise else pairwise :+ canEq @@ -233,14 +244,14 @@ trait SyntheticMethods extends ast.TreeDSL { /* The hashcode method for value classes * def hashCode(): Int = this.underlying.hashCode */ - def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { m => + def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { _ => Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_) } /* The _1, _2, etc. methods to implement ProductN, disabled * until we figure out how to introduce ProductN without cycles. */ - /**** + /* def productNMethods = { val accs = accessors.toIndexedSeq 1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num))) @@ -248,26 +259,37 @@ trait SyntheticMethods extends ast.TreeDSL { def projectionMethod(accessor: Symbol, num: Int) = { createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor)) } - ****/ + */ // methods for both classes and objects - def productMethods = { + def productMethods: List[(Symbol, () => Tree)] = { List( - Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)), - Product_productArity -> (() => constantNullary(nme.productArity, arity)), - Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)), - Product_iterator -> (() => productIteratorMethod), - Product_canEqual -> (() => canEqualMethod) - // This is disabled pending a reimplementation which doesn't add any - // weight to case classes (i.e. inspects the bytecode.) - // Product_productElementName -> (() => productElementNameMethod(accessors)), + Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)), + Product_productArity -> (() => constantNullary(nme.productArity, arity)), + Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)), + Product_iterator -> (() => productIteratorMethod), + Product_canEqual -> (() => canEqualMethod) ) } + def productClassMethods: List[(Symbol, () => Tree)] = { + // Classes get productElementName but case objects do not. + // For a case object the correct behaviour (i.e. to throw an IOOBE) + // is already provided by the default implementation in the Product trait. + + // Support running the compiler with an older library on the classpath + def elementName: List[(Symbol, () => Tree)] = Product_productElementName match { + case NoSymbol => Nil + case sym => (sym, () => productElementNameMethod) :: Nil + } + + productMethods ::: elementName + } + def hashcodeImplementation(sym: Symbol): Tree = { sym.tpe.finalResultType.typeSymbol match { - case UnitClass | NullClass => Literal(Constant(0)) - case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237))) + case UnitClass | NullClass => Lit(0) + case BooleanClass => If(Ident(sym), Lit(1231), Lit(1237)) case IntClass => Ident(sym) case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt) case LongClass => callStaticsMethodName(nme.longHash)(Ident(sym)) @@ -280,37 +302,64 @@ trait SyntheticMethods extends ast.TreeDSL { def specializedHashcode = { createMethod(nme.hashCode_, Nil, IntTpe) { m => val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntTpe - val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe))) + val valdef = ValDef(accumulator, Lit(0xcafebabe)) + val mixPrefix = + Assign( + Ident(accumulator), + callStaticsMethod("mix")(Ident(accumulator), Lit(clazz.name.decode.hashCode))) val mixes = accessors map (acc => Assign( Ident(accumulator), callStaticsMethod("mix")(Ident(accumulator), hashcodeImplementation(acc)) ) ) - val finish = callStaticsMethod("finalizeHash")(Ident(accumulator), Literal(Constant(arity))) + val finish = callStaticsMethod("finalizeHash")(Ident(accumulator), Lit(arity)) - Block(valdef :: mixes, finish) + Block(valdef :: mixPrefix :: mixes, finish) } } - def chooseHashcode = { + + def productHashCode: Tree = { + // case `hashCode` used to call `ScalaRunTime._hashCode`, but that implementation mixes in the result + // of `productPrefix`, which causes scala/bug#13033. + // Because case hashCode has two possible implementations (`specializedHashcode` and `productHashCode`) we + // need to fix it twice. + // 1. `specializedHashcode` above was changed to mix in the case class name statically. + // 2. we can achieve the same thing here by calling `MurmurHash3Module.productHash` with a `seed` that mixes + // in the case class name already. This is backwards and forwards compatible: + // - the new generated code works with old and new standard libraries + // - the `MurmurHash3Module.productHash` implementation returns the same result as before when called by + // previously compiled case classes + // Alternatively, we could decide to always generate the full implementation (like `specializedHashcode`) + // at the cost of bytecode size. + createMethod(nme.hashCode_, Nil, IntTpe) { _ => + if (arity == 0) Lit(clazz.name.decode.hashCode) + else gen.mkMethodCall(MurmurHash3Module, TermName("productHash"), List( + mkThis, + Lit(Statics.mix(0xcafebabe, clazz.name.decode.hashCode)), + Lit(true) + )) + } + } + + def chooseHashcode = if (accessors exists (x => isPrimitiveValueType(x.tpe.finalResultType))) specializedHashcode else - forwardToRuntime(Object_hashCode) - } + productHashCode def valueClassMethods = List( Any_hashCode -> (() => hashCodeDerivedValueClassMethod), Any_equals -> (() => equalsDerivedValueClassMethod) ) - def caseClassMethods = productMethods ++ /*productNMethods ++*/ Seq( + def caseClassMethods = productClassMethods ++ /*productNMethods ++*/ Seq( Object_hashCode -> (() => chooseHashcode), Object_toString -> (() => forwardToRuntime(Object_toString)), Object_equals -> (() => equalsCaseClassMethod) ) - def valueCaseClassMethods = productMethods ++ /*productNMethods ++*/ valueClassMethods ++ Seq( + def valueCaseClassMethods = productClassMethods ++ /*productNMethods ++*/ valueClassMethods ++ Seq( Any_toString -> (() => forwardToRuntime(Object_toString)) ) @@ -321,16 +370,17 @@ trait SyntheticMethods extends ast.TreeDSL { // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam))) ) - /* If you serialize a singleton and then deserialize it twice, - * you will have two instances of your singleton unless you implement - * readResolve. Here it is implemented for all objects which have - * no implementation and which are marked serializable (which is true - * for all case objects.) + /* If you serialize a singleton you will get an additional + * instance of the singleton, unless you implement + * special serialization logic. Here we use a serialization proxy that prevents + * serialization of state and will, on deserialization by replaced by the object + * via use of readResolve. This is done for all top level objects which extend + * `java.io.Serializable` (such as case objects) */ - def needsReadResolve = ( + def needsModuleSerializationProxy = ( clazz.isModuleClass && clazz.isSerializable - && !hasConcreteImpl(nme.readResolve) + && !hasConcreteImpl(nme.writeReplace) && clazz.isStatic ) @@ -352,7 +402,7 @@ trait SyntheticMethods extends ast.TreeDSL { !hasOverridingImplementation(m) || { clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && { // Without a means to suppress this warning, I've thought better of it. - if (settings.warnValueOverrides) { + if (settings.warnValueOverrides.value) { (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m => typer.context.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics", WarningCategory.Other /* settings.warnValueOverrides is not exposed as compiler flag */) } @@ -364,13 +414,14 @@ trait SyntheticMethods extends ast.TreeDSL { for ((m, impl) <- methods ; if shouldGenerate(m)) yield impl() } def extras = { - if (needsReadResolve) { + if (needsModuleSerializationProxy) { // Aha, I finally decoded the original comment. // This method should be generated as private, but apparently if it is, then // it is name mangled afterward. (Wonder why that is.) So it's only protected. - // For sure special methods like "readResolve" should not be mangled. - List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { - m setFlag PRIVATE; REF(clazz.sourceModule) + // For sure special methods like "writeReplace" should not be mangled. + List(createMethod(nme.writeReplace, Nil, ObjectTpe)(m => { + m setFlag PRIVATE + New(ModuleSerializationProxyClass, gen.mkClassOf(clazz.typeOfThis)) })) } else Nil @@ -397,9 +448,9 @@ trait SyntheticMethods extends ast.TreeDSL { devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}") freshTermName(original.name.toStringWithSuffix("$"))(freshNameCreatorFor(context)) } - def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName + def nameSuffixedByParamIndex = original.name.append(s"${nme.CASE_ACCESSOR}$$${i}").toTermName val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex - val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc => + val newAcc = deriveMethod(ddef.symbol, _ => newName) { newAcc => newAcc.makePublic newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE) ddef.rhs.duplicate diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 5a3bfa198a11..f9ee5c2cc8ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 17fc1592a0dc..1a98d6c0c530 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,8 +21,10 @@ import scala.tools.nsc.Reporting.WarningCategory abstract class TreeCheckers extends Analyzer { import global._ - override protected def onTreeCheckerError(pos: Position, msg: String) { - if (settings.fatalWarnings) + override protected def onTreeCheckerError(pos: Position, msg: String): Unit = { + // could thread the `site` through ContextReporter for errors, like we do for warnings, but it + // looks like an overkill since it would only be used here. + if (settings.fatalWarnings.value) runReporting.warning(pos, "\n** Error during internal checking:\n" + msg, WarningCategory.OtherDebug, site = "") } @@ -52,25 +54,22 @@ abstract class TreeCheckers extends Analyzer { case _ => diffTrees(t1, t2).toString // "" } - private def clean_s(s: String) = s.replaceAllLiterally("scala.collection.", "s.c.") + private def clean_s(s: String) = s.replace("scala.collection.", "s.c.") private def typestr(x: Type) = " (tpe = " + x + ")" - private def treestr(t: Tree) = t + " [" + classString(t) + "]" + typestr(t.tpe) - private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString - private def wholetreestr(t: Tree) = nodeToString(t) + "\n" - private def truncate(str: String, len: Int): String = ( - if (str.length <= len) str - else (str takeWhile (_ != '\n') take len - 3) + "..." - ) + private def treestr(t: Tree) = s"$t [${classString(t)}]${typestr(t.tpe)}" + private def ownerstr(s: Symbol) = s"'$s'${s.locationString}" + private def wholetreestr(t: Tree) = s"${nodeToString(t)}\n" + private def truncate(str: String, len: Int): String = if (str.length <= len) str else s"${str.takeWhile(_ != '\n').take(len - 3)}..." private def signature(sym: Symbol) = clean_s(sym match { case null => "null" - case _: ClassSymbol => sym.name + ": " + sym.tpe_* + case _: ClassSymbol => s"${sym.name}: ${sym.tpe_*}" case _ => sym.defString }) - private def classString(x: Any) = x match { + private def classString(x: Any): String = x match { case null => "" case t: Tree => t.shortClass case s: Symbol => s.shortSymbolClass - case x: AnyRef => shortClassOfInstance(x) + case x => shortClassOfInstance(x.asInstanceOf[AnyRef]) } private def nonPackageOwners(s: Symbol) = s.ownerChain drop 1 takeWhile (!_.hasPackageFlag) private def nonPackageOwnersPlusOne(s: Symbol) = nonPackageOwners(s) ::: (s.ownerChain dropWhile (!_.hasPackageFlag) take 1) @@ -103,7 +102,7 @@ abstract class TreeCheckers extends Analyzer { val movedMsgs = mutable.ListBuffer[String]() def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString) - def record(tree: Tree) { + def record(tree: Tree): Unit = { val sym = tree.symbol if ((sym eq null) || (sym eq NoSymbol)) return @@ -157,7 +156,7 @@ abstract class TreeCheckers extends Analyzer { traverse(unit.body) reportChanges() } - override def traverse(tree: Tree) { + override def traverse(tree: Tree): Unit = { record(tree) super.traverse(tree) } @@ -178,7 +177,7 @@ abstract class TreeCheckers extends Analyzer { def errorFn(msg: Any): Unit = errorFn(NoPosition, msg) def informFn(msg: Any): Unit = { - if (settings.verbose || settings.isDebug) + if (settings.verbose.value || settings.isDebug) println("[check: %s] %s".format(phase.prev, msg)) } @@ -195,8 +194,8 @@ abstract class TreeCheckers extends Analyzer { } } - def checkTrees() { - if (settings.verbose) + def checkTrees(): Unit = { + if (settings.verbose.value) Console.println("[consistency check at the beginning of phase " + phase + "]") currentRun.units foreach (x => wrap(x)(check(x))) @@ -210,7 +209,7 @@ abstract class TreeCheckers extends Analyzer { assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit) currentRun.currentUnit = unit0 } - def check(unit: CompilationUnit) { + def check(unit: CompilationUnit): Unit = { informProgress("checking "+unit) val context = rootContext(unit, checking = true) tpeOfTree.clear() @@ -291,7 +290,7 @@ abstract class TreeCheckers extends Analyzer { case _ => traverseInternal(tree) } - private def traverseInternal(tree: Tree) { + private def traverseInternal(tree: Tree): Unit = { if (!tree.canHaveAttrs) return @@ -313,10 +312,7 @@ abstract class TreeCheckers extends Analyzer { if (accessed != NoSymbol) { val agetter = accessed.getterIn(sym.owner) val asetter = accessed.setterIn(sym.owner) - - assertFn(agetter == sym || asetter == sym, - sym + " is getter or setter, but accessed sym " + accessed + " shows " + agetter + " and " + asetter - ) + assertFn(agetter == sym || asetter == sym, s"$sym is getter or setter, but accessed sym $accessed shows $agetter and $asetter") } } } @@ -328,7 +324,7 @@ abstract class TreeCheckers extends Analyzer { if (args exists (_ == EmptyTree)) errorFn(tree.pos, "Apply arguments to " + fn + " contains an empty tree: " + args) - case Select(qual, name) => + case Select(_, _) => checkSym(tree) case This(_) => checkSym(tree) @@ -349,14 +345,13 @@ abstract class TreeCheckers extends Analyzer { checkSym(tree) tree match { - case x: PackageDef => - if ((sym.ownerChain contains currentOwner) || currentOwner.isEmptyPackageClass) () - else fail(sym + " owner chain does not contain currentOwner " + currentOwner + sym.ownerChain) + case _: PackageDef if sym.ownerChain.contains(currentOwner) || currentOwner.isEmptyPackageClass => () + case _: PackageDef => fail(s"$sym owner chain does not contain currentOwner ${currentOwner}${sym.ownerChain}") case _ => def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner if (sym.owner != currentOwner) { - val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol } + val expected = currentOwner.ownerChain.find(cond(_)).getOrElse { fail("DefTree can't find owner: ") ; NoSymbol } if (sym.owner != expected) fail(sm"""| | currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "} @@ -368,7 +363,7 @@ abstract class TreeCheckers extends Analyzer { super.traverse(tree) } - private def checkSymbolRefsRespectScope(enclosingMemberDefs: List[MemberDef], tree: Tree) { + private def checkSymbolRefsRespectScope(enclosingMemberDefs: List[MemberDef], tree: Tree): Unit = { def symbolOf(t: Tree): Symbol = if (t.symbol eq null) NoSymbol else t.symbol def typeOf(t: Tree): Type = if (t.tpe eq null) NoType else t.tpe def infoOf(t: Tree): Type = symbolOf(t).info @@ -411,7 +406,7 @@ abstract class TreeCheckers extends Analyzer { val fmt = "%-" + width + "s" val lines = pairs map { case (s: Symbol, msg) => fmt.format(msg) + " in " + ownersString(s) - case (x, msg) => fmt.format(msg) + case (_, msg) => fmt.format(msg) } lines.mkString("Out of scope symbol reference {\n", "\n", "\n}") } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index d4a5f1f6d59b..66ff438bb599 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,13 +13,13 @@ package scala.tools.nsc package typechecker -import scala.collection.mutable -import scala.collection.mutable.ListBuffer +import scala.annotation._ +import scala.collection.mutable, mutable.ListBuffer +import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ import scala.util.control.Exception.ultimately import symtab.Flags._ -import PartialFunction.condOpt -import scala.annotation.tailrec -import scala.tools.nsc.Reporting.WarningCategory +import PartialFunction.{cond, condOpt} /** An interface to enable higher configurability of diagnostic messages * regarding type errors. This is barely a beginning as error messages are @@ -38,10 +38,9 @@ import scala.tools.nsc.Reporting.WarningCategory * And more, and there is plenty of overlap, so it'll be a process. * * @author Paul Phillips - * @version 1.0 */ -trait TypeDiagnostics { - self: Analyzer with StdAttachments => +trait TypeDiagnostics extends splain.SplainDiagnostics { + _: Analyzer with StdAttachments => import global._ import definitions._ @@ -49,9 +48,9 @@ trait TypeDiagnostics { /** For errors which are artifacts of the implementation: such messages * indicate that the restriction may be lifted in the future. */ - def restrictionWarning(pos: Position, unit: CompilationUnit, msg: String, category: WarningCategory, site: Symbol): Unit = + def restrictionWarning(pos: Position, @unused unit: CompilationUnit, msg: String, category: WarningCategory, site: Symbol): Unit = runReporting.warning(pos, "Implementation restriction: " + msg, category, site) - def restrictionError(pos: Position, unit: CompilationUnit, msg: String): Unit = + def restrictionError(pos: Position, @unused unit: CompilationUnit, msg: String): Unit = reporter.error(pos, "Implementation restriction: " + msg) /** A map of Positions to addendums - if an error involves a position in @@ -87,12 +86,6 @@ trait TypeDiagnostics { prefix + name.decode } - // Bind of pattern var was `x @ _` - private def atBounded(t: Tree) = t.hasAttachment[NoWarnAttachment.type] - - // ValDef was a PatVarDef `val P(x) = ???` - private def wasPatVarDef(t: Tree) = t.hasAttachment[PatVarDefAttachment.type] - /** Does the positioned line assigned to t1 precede that of t2? */ def posPrecedes(p1: Position, p2: Position) = p1.isDefined && p2.isDefined && p1.line < p2.line @@ -117,11 +110,9 @@ trait TypeDiagnostics { else "" private def methodTypeErrorString(tp: Type) = tp match { - case mt @ MethodType(params, resultType) => - def forString = params map (_.defString) - - forString.mkString("(", ",", ")") + resultType - case x => x.toString + case MethodType(params, resultType) => + params.map(_.defString).mkString("(", ",", s")$resultType") + case tp => tp.toString } /** @@ -145,7 +136,7 @@ trait TypeDiagnostics { case _ => Nil } def alternativesString(tree: Tree) = - alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " \n", "\n") + alternatives(tree).map(x => " " + methodTypeErrorString(x)).mkString("", " \n", "\n") /** The symbol which the given accessor represents (possibly in part). * This is used for error messages, where we want to speak in terms @@ -171,7 +162,9 @@ trait TypeDiagnostics { def patternMessage = "pattern " + tree.tpe.finalResultType + valueParamsString(tree.tpe) def exprMessage = "expression of type " + tree.tpe - def overloadedMessage = s"overloaded method $sym with alternatives:\n" + alternativesString(tree) + def overloadedMessage = + if (sym.isConstructor) s"multiple constructors for ${sym.owner.decodedName}${sym.idString} with alternatives:\n${alternativesString(tree)}" + else s"overloaded method ${sym.decodedName} with alternatives:\n${alternativesString(tree)}" def moduleMessage = "" + sym def defaultMessage = moduleMessage + preResultString + tree.tpe def applyMessage = defaultMessage + tree.symbol.locationString @@ -202,7 +195,7 @@ trait TypeDiagnostics { // expanded if necessary to disambiguate simple identifiers. val deepDealias = DealiasedType(tp) if (tp eq deepDealias) "" else { - // A sanity check against expansion being identical to original. + // A check against expansion being identical to original. val s = "" + deepDealias if (s == "" + tp) "" else "\n (which expands to) " + s @@ -217,6 +210,7 @@ trait TypeDiagnostics { * * TODO: handle type aliases better. */ + @nowarn("cat=lint-nonlocal-return") def explainVariance(found: Type, req: Type): String = { found.baseTypeSeq.toList foreach { tp => if (tp.typeSymbol isSubClass req.typeSymbol) { @@ -276,7 +270,7 @@ trait TypeDiagnostics { val messages = relationships.flatten // the condition verifies no type argument came back None if (messages.size == foundArgs.size) - return messages filterNot (_ == "") mkString ("\n", "\n", "") + return messages.filterNot(_ == "").mkString("\n", "\n", "") } } } @@ -285,9 +279,7 @@ trait TypeDiagnostics { // For found/required errors where AnyRef would have sufficed: // explain in greater detail. - def explainAnyVsAnyRef(found: Type, req: Type): String = { - if (AnyRefTpe <:< req) notAnyRefMessage(found) else "" - } + def explainAnyVsAnyRef(found: Type, req: Type): String = if (AnyRefTpe <:< req) notAnyRefMessage(found).pipe(msg => if (msg.isEmpty) "" else "\n" + msg) else "" def finalOwners(tpe: Type): Boolean = (tpe.prefix == NoPrefix) || recursivelyFinal(tpe) @@ -309,7 +301,7 @@ trait TypeDiagnostics { // when the message will never be seen. I though context.reportErrors // being false would do that, but if I return "" under // that condition, I see it. - def foundReqMsg(found: Type, req: Type): String = { + def builtinFoundReqMsg(found: Type, req: Type): String = { val foundWiden = found.widen val reqWiden = req.widen val sameNamesDifferentPrefixes = @@ -339,18 +331,23 @@ trait TypeDiagnostics { } } + def foundReqMsg(found: Type, req: Type): String = { + val errMsg = splainFoundReqMsg(found, req) + if (errMsg.isEmpty) builtinFoundReqMsg(found, req) else errMsg + } + def typePatternAdvice(sym: Symbol, ptSym: Symbol) = { val clazz = if (sym.isModuleClass) sym.companionClass else sym val caseString = if (clazz.isCaseClass && (clazz isSubClass ptSym)) ( clazz.caseFieldAccessors - map (_ => "_") // could use the actual param names here - mkString (s"`case ${clazz.name}(", ",", ")`") + .map(_ => "_") // could use the actual param names here + .mkString(s"`case ${clazz.name}(", ",", ")`") ) else "`case _: " + (clazz.typeParams match { case Nil => "" + clazz.name - case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]") + case xs => xs.map(_ => "_").mkString(s"${clazz.name}[", ",", "]") })+ "`" if (!clazz.exists) "" @@ -363,7 +360,7 @@ trait TypeDiagnostics { private val savedName = sym.name private var postQualifiedWith: List[Symbol] = Nil def restoreName() = sym.name = savedName - def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString)) + def modifyName(f: String => String): Unit = sym setName newTypeName(f(sym.name.toString)) // functions to manipulate the name def preQualify() = modifyName(trueOwner.fullName + "." + _) @@ -481,7 +478,7 @@ trait TypeDiagnostics { } def apply(context: Context, tree: Tree): Tree = { - if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && !context.contextMode.inAny(ContextMode.SuppressDeadArgWarning)) + if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && !context.contextMode.inAny(ContextMode.SuppressDeadArgWarning)) context.warning(tree.pos, "dead code following this construct", WarningCategory.WFlagDeadCode) tree } @@ -494,20 +491,39 @@ trait TypeDiagnostics { val ignoreNames: Set[TermName] = Set( "readResolve", "readObject", "writeObject", "writeReplace" ).map(TermName(_)) + + // Bind of pattern var was `x @ _`; also used for wildcard, e.g. `_ <- e` + private def nowarn(tree: Bind): Boolean = tree.hasAttachment[NoWarnAttachment.type] + private def nowarn(tree: ValDef): Boolean = tree.hasAttachment[NoWarnAttachment.type] + + // ValDef was a PatVarDef `val P(x) = ???` + private def wasPatVarDef(tree: ValDef): Boolean = tree.hasAttachment[PatVarDefAttachment.type] + private def wasPatVarDef(sym: Symbol): Boolean = sym.hasAttachment[PatVarDefAttachment.type] } class UnusedPrivates extends Traverser { - import UnusedPrivates.ignoreNames - def isEffectivelyPrivate(sym: Symbol): Boolean = false - val defnTrees = ListBuffer[MemberDef]() - val targets = mutable.Set[Symbol]() - val setVars = mutable.Set[Symbol]() - val treeTypes = mutable.Set[Type]() - val params = mutable.Set[Symbol]() - val patvars = mutable.Set[Symbol]() - - def defnSymbols = defnTrees.toList map (_.symbol) - def localVars = defnSymbols filter (t => t.isLocalToBlock && t.isVar) + import UnusedPrivates.{ignoreNames, nowarn, wasPatVarDef} + def isEffectivelyPrivate(sym: Symbol): Boolean = false // see REPL + val defnTrees = ListBuffer.empty[MemberDef] + val targets = mutable.Set.empty[Symbol] + val setVars = mutable.Set.empty[Symbol] + val treeTypes = mutable.Set.empty[Type] + val params = mutable.Set.empty[Symbol] + val patvars = ListBuffer.empty[Tree /*Bind|ValDef*/] + val ignore = mutable.Set.empty[Symbol] // nowarn + + val annots = mutable.Set.empty[AnnotationInfo] // avoid revisiting annotations of symbols and types + + def recordReference(sym: Symbol): Unit = targets.addOne(sym) + + def checkNowarn(tree: Tree): Unit = + tree match { + case tree: Bind => + if (nowarn(tree)) ignore += tree.symbol + case tree: ValDef => + if (nowarn(tree)) ignore += tree.symbol + case _ => + } def qualifiesTerm(sym: Symbol) = ( (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocalToBlock || isEffectivelyPrivate(sym)) @@ -523,42 +539,126 @@ trait TypeDiagnostics { && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) def isExisting(sym: Symbol) = sym != null && sym.exists + def addPatVar(t: Tree) = { + checkNowarn(t) + patvars += t + } + + // so trivial that it never consumes params + def isTrivial(rhs: Tree): Boolean = + rhs.symbol == Predef_??? || rhs.tpe == null || rhs.tpe =:= NothingTpe || (rhs match { + case Literal(_) => true + case _ => isConstantType(rhs.tpe) || isSingleType(rhs.tpe) || rhs.isInstanceOf[This] + }) override def traverse(t: Tree): Unit = { - val sym = t.symbol t match { - case m: MemberDef if qualifies(sym) && !t.isErrorTyped => + case t: ValDef if wasPatVarDef(t) => // include field excluded by qualifies test + if (settings.warnUnusedPatVars) + addPatVar(t) + case t: MemberDef if qualifies(t.symbol) && !t.isErrorTyped => + val sym = t.symbol t match { - case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => - if (settings.warnUnusedPatVars && !atBounded(t)) patvars += sym - case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => + case DefDef(_, _, _, vparamss, _, rhs) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => + if (isSuppressed(sym)) return // ignore params and rhs of @unused def if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa else if (sym.isSynthetic && sym.isImplicit) return - else if (!sym.isConstructor && rhs.symbol != Predef_???) - for (vs <- vparamss) params ++= vs.map(_.symbol) - defnTrees += m + else if (!sym.isConstructor && !sym.isVar && !isTrivial(rhs)) + for (vs <- vparamss; v <- vs) if (!isSingleType(v.symbol.tpe)) params += v.symbol + if (sym.isGetter && wasPatVarDef(sym.accessed)) { + if (settings.warnUnusedPatVars) + addPatVar(t) + } + else defnTrees += t + case TypeDef(_, _, _, _) => + if (!sym.isAbstract && !sym.isDeprecated) + defnTrees += t case _ => - defnTrees += m + defnTrees += t } - case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars && !t.isErrorTyped => + case Match(selector, cases) => + // don't warn when a patvar redefines the selector ident: x match { case x: X => } + // or extracts a single patvar named identically to the selector + def allowVariableBindings(n: Name, pat: Tree): Unit = + pat match { + case Bind(`n`, _) => pat.updateAttachment(NoWarnAttachment) + case Apply(_, _) | UnApply(_, _) => // really interested in args + pat.filter(_.isInstanceOf[Bind]) match { // never nme.WILDCARD + case (bind @ Bind(`n`, _)) :: Nil => bind.updateAttachment(NoWarnAttachment) // one only + case _ => + } + case _ => + } + def allow(n: Name): Unit = cases.foreach(k => allowVariableBindings(n, k.pat)) + def loop(selector: Tree): Unit = + selector match { + case Ident(n) => allow(n) + case Typed(expr, _) => loop(expr) + case Select(This(_), n) => allow(n) + case _ => + } + loop(selector) + case CaseDef(pat, _, _) if settings.warnUnusedPatVars && !t.isErrorTyped => + def allowVariableBindings(app: Apply, args: List[Tree]): Unit = + treeInfo.dissectApplied(app).core.tpe match { + case MethodType(ps, _) => + foreach2(ps, args) { (p, x) => + x match { + case Bind(n, _) if p.name == n => x.updateAttachment(NoWarnAttachment) + case _ => + } + } + case _ => + } pat.foreach { - case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol + case app @ Apply(_, args) => allowVariableBindings(app, args) + case b @ Bind(n, _) if n != nme.DEFAULT_CASE => addPatVar(b) case _ => } - case _: RefTree if isExisting(sym) => targets += sym + case t: RefTree => + val sym = t.symbol + if (isExisting(sym) && !currentOwner.hasTransOwner(sym) && !t.hasAttachment[ForAttachment.type]) + recordReference(sym) case Assign(lhs, _) if isExisting(lhs.symbol) => setVars += lhs.symbol - case Function(ps, _) if settings.warnUnusedParams && !t.isErrorTyped => params ++= - ps.filterNot(p => atBounded(p) || p.symbol.isSynthetic).map(_.symbol) - case _ => + case Function(ps, _) if !t.isErrorTyped => + for (p <- ps) { + if (wasPatVarDef(p)) { + if (settings.warnUnusedPatVars) + addPatVar(p) + } + else { + if (settings.warnUnusedParams && !p.symbol.isSynthetic) { + checkNowarn(p) + params += p.symbol + } + } + } + case treeInfo.Applied(fun, _, _) + if t.hasAttachment[ForAttachment.type] && fun.symbol != null && isTupleSymbol(fun.symbol.owner.companion) => + return // ignore tupling of assignments + case Literal(_) => + t.attachments.get[OriginalTreeAttachment].foreach(ota => traverse(ota.original)) + case tt: TypeTree => + tt.original match { + case null => + case xo if xo ne tt => traverse(xo) + case _ => + } + case _ => } - if (t.tpe ne null) { - for (tp <- t.tpe) if (!treeTypes(tp)) { + def descend(annot: AnnotationInfo): Unit = + if (!annots(annot)) { + annots.addOne(annot) + traverse(annot.original) + } + if ((t.tpe ne null) && t.tpe != NoType) { + for (tp <- t.tpe if tp != NoType) if (!treeTypes(tp)) { // Include references to private/local aliases (which might otherwise refer to an enclosing class) val isAlias = { val td = tp.typeSymbolDirect - td.isAliasType && (td.isLocal || td.isPrivate) + td.isAliasType && (td.isLocalToBlock || td.isPrivate) } // Ignore type references to an enclosing class. A reference to C must be outside C to avoid warning. if (isAlias || !currentOwner.hasTransOwner(tp.typeSymbol)) tp match { @@ -573,31 +673,46 @@ trait TypeDiagnostics { log(s"${if (isAlias) "alias " else ""}$tp referenced from $currentOwner") treeTypes += tp } + for (annot <- tp.annotations) + descend(annot) } // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused. - for (p <- t.tpe.prefix) condOpt(p) { - case SingleType(_, sym) => targets += sym + t.tpe.prefix foreach { + case SingleType(_, sym) => recordReference(sym) + case _ => () } } + + if (t.symbol != null && t.symbol.exists) + for (annot <- t.symbol.annotations) + descend(annot) + super.traverse(t) } + def isSuppressed(sym: Symbol): Boolean = sym.hasAnnotation(UnusedClass) def isUnusedType(m: Symbol): Boolean = ( m.isType + && !isSuppressed(m) && !m.isTypeParameterOrSkolem // would be nice to improve this && (m.isPrivate || m.isLocalToBlock || isEffectivelyPrivate(m)) - && !(treeTypes.exists(_.exists(_.typeSymbolDirect == m))) - ) - def isSyntheticWarnable(sym: Symbol) = ( - sym.isDefaultGetter + && !treeTypes.exists(_.exists(_.typeSymbolDirect == m)) ) + def isSyntheticWarnable(sym: Symbol) = { + def privateSyntheticDefault: Boolean = + cond(nme.defaultGetterToMethod(sym.name)) { + case nme.CONSTRUCTOR => sym.owner.companion.isCaseClass + case nme.copy => sym.owner.typeSignature.member(nme.copy).isSynthetic + } + sym.isParameter || sym.isParamAccessor || sym.isDefaultGetter && !privateSyntheticDefault + } def isUnusedTerm(m: Symbol): Boolean = ( m.isTerm + && !isSuppressed(m) && (!m.isSynthetic || isSyntheticWarnable(m)) && ((m.isPrivate && !(m.isConstructor && m.owner.isAbstract)) || m.isLocalToBlock || isEffectivelyPrivate(m)) && !targets(m) && !(m.name == nme.WILDCARD) // e.g. val _ = foo && (m.isValueParameter || !ignoreNames(m.name.toTermName)) // serialization/repl methods - && !isConstantType(m.info.resultType) // subject to constant inlining && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar ) def isUnusedParam(m: Symbol): Boolean = ( @@ -609,45 +724,104 @@ trait TypeDiagnostics { targets.exists(s => s.isParameter && s.name == m.name && s.owner.isConstructor && s.owner.owner == m.owner) // exclude ctor params )) + && !(m.info.typeSymbol == UnitClass) + && !(m.owner.isClass && m.owner.thisType.baseClasses.contains(AnnotationClass)) + && !ignore(m) ) - def sympos(s: Symbol): Int = - if (s.pos.isDefined) s.pos.point else if (s.isTerm) s.asTerm.referenced.pos.point else -1 - def treepos(t: Tree): Int = - if (t.pos.isDefined) t.pos.point else sympos(t.symbol) - - def unusedTypes = defnTrees.toList.filter(t => isUnusedType(t.symbol)).sortBy(treepos) + def unusedTypes = defnTrees.iterator.filter(t => isUnusedType(t.symbol)) def unusedTerms = { - val all = defnTrees.toList.filter(v => isUnusedTerm(v.symbol)) - // is this a getter-setter pair? and why is this a difficult question for traits? def sameReference(g: Symbol, s: Symbol) = if (g.accessed.exists && s.accessed.exists) g.accessed == s.accessed - else g.owner == s.owner && g.setterName == s.name //sympos(g) == sympos(s) + else g.owner == s.owner && g.setterName == s.name + val all = defnTrees.iterator.filter(v => isUnusedTerm(v.symbol)).toSet // filter out setters if already warning for getter. val clean = all.filterNot(v => v.symbol.isSetter && all.exists(g => g.symbol.isGetter && sameReference(g.symbol, v.symbol))) - clean.sortBy(treepos) + clean.iterator } // local vars which are never set, except those already returned in unused - def unsetVars = localVars.filter(v => !setVars(v) && !isUnusedTerm(v)).sortBy(sympos) - def unusedParams = params.toList.filter(isUnusedParam).sortBy(sympos) + def unsetVars = { + def varsWithoutSetters = defnTrees.iterator.map(_.symbol).filter(t => t.isVar && !isExisting(t.setter)) + varsWithoutSetters.filter(v => !isSuppressed(v) && !setVars(v) && !isUnusedTerm(v)) + } + def unusedParams = params.iterator.filter(isUnusedParam) def inDefinedAt(p: Symbol) = p.owner.isMethod && p.owner.name == nme.isDefinedAt && p.owner.owner.isAnonymousFunction - def unusedPatVars = patvars.toList.filter(p => isUnusedTerm(p) && !inDefinedAt(p)).sortBy(sympos) + def unusedPatVars = { + // in elaboration of for comprehensions, patterns are duplicated; + // track a patvar by its symbol position; "original" has a range pos + val all = patvars.filterInPlace(_.symbol.pos.isDefined) + val byPos = all.groupBy(_.symbol.pos.start) + def isNotPrivateOrLocal(s: Symbol) = s.hasAccessorFlag && s.hasNoFlags(PRIVATE | LOCAL) + def isUnusedPatVar(t: Tree): Boolean = + byPos(t.symbol.pos.start).forall(p => + !targets(p.symbol) + && !isNotPrivateOrLocal(p.symbol) + && !ignore(p.symbol) + ) + // the "original" tree has an opaque range; + // for multi-var patdef, tree pos is transparent but sym pos is opaque; + // use the field as the primary definition, and also remove it from targets + // if it has a getter (in which case it has the "local" name to disambiguate). + // Note that for uni-var patdef `val Some(x)`, tree pos is opaque. + def isPrimaryPatVarDefinition(p: Tree): Boolean = + p.symbol.pos.isOpaqueRange && { + val primary = p.pos.isOpaqueRange || p.symbol.isPrivateLocal + if (primary && nme.isLocalName(p.symbol.name)) + targets.subtractOne(p.symbol) // field is trivially accessed by its getter if it has one + primary + } + all.iterator.filter(p => + isPrimaryPatVarDefinition(p) + && isUnusedTerm(p.symbol) + && isUnusedPatVar(p) + && !nme.isFreshTermName(p.symbol.name) + && !inDefinedAt(p.symbol) + ) + } } class checkUnused(typer: Typer) { + private def isMacroAnnotationExpansion(tree: Tree): Boolean = tree.hasSymbolField && isExpanded(tree.symbol) + + private def isMacroExpansion(tree: Tree): Boolean = hasMacroExpansionAttachment(tree) || isMacroAnnotationExpansion(tree) + object skipMacroCall extends UnusedPrivates { override def qualifiesTerm(sym: Symbol): Boolean = super.qualifiesTerm(sym) && !sym.isMacro } object skipMacroExpansion extends UnusedPrivates { - override def traverse(t: Tree): Unit = - if (!hasMacroExpansionAttachment(t)) super.traverse(t) + override def traverse(tree: Tree): Unit = if (!isMacroExpansion(tree)) super.traverse(tree) } object checkMacroExpandee extends UnusedPrivates { - override def traverse(t: Tree): Unit = - super.traverse(if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t) + override def traverse(tree: Tree): Unit = + if (!isMacroAnnotationExpansion(tree)) + super.traverse(if (hasMacroExpansionAttachment(tree)) macroExpandee(tree) else tree) + } + // collect definitions and refs from expandee (and normal trees) but only refs from expanded trees + object checkMacroExpandeeAndExpandedRefs extends UnusedPrivates { + object refCollector extends Traverser { + override def traverse(tree: Tree): Unit = { + tree match { + case _: RefTree if isExisting(tree.symbol) => recordReference(tree.symbol) + case _ => + } + if (tree.tpe != null) tree.tpe.prefix.foreach { + case SingleType(_, sym) => recordReference(sym) + case _ => + } + super.traverse(tree) + } + } + override def traverse(tree: Tree): Unit = + if (hasMacroExpansionAttachment(tree)) { + super.traverse(macroExpandee(tree)) + refCollector.traverse(tree) + } + else if (isMacroAnnotationExpansion(tree)) + refCollector.traverse(tree) + else super.traverse(tree) } private def warningsEnabled: Boolean = { @@ -658,113 +832,156 @@ trait TypeDiagnostics { // `checkUnused` is invoked after type checking. we have to avoid using `typer.context.warning`, which uses // `context.owner` as the `site` of the warning, but that's the root symbol at this point. - def emitUnusedWarning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = runReporting.warning(pos, msg, category, site) + private val unusedWarnings = ListBuffer.empty[(Position, String, WarningCategory, Symbol)] + private def emitUnusedWarning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = + unusedWarnings.addOne((pos, msg, category, site)) + private def reportAll(): Unit = { + implicit val ordering = new Ordering[Position] { + def posOf(p: Position): Int = if (p.isDefined) p.point else -1 + override def compare(x: Position, y: Position): Int = posOf(x) - posOf(y) + } + unusedWarnings.toArray.sortBy(_._1).foreach { case (pos, msg, category, site) => runReporting.warning(pos, msg, category, site) } + unusedWarnings.clear() + } def run(unusedPrivates: UnusedPrivates)(body: Tree): Unit = { unusedPrivates.traverse(body) if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { - def shouldWarnOn(sym: Symbol) = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals + def shouldWarnOn(sym: Symbol) = + if (sym.isPrivate) settings.warnUnusedPrivates && !sym.isTopLevel + else settings.warnUnusedLocals val valAdvice = "is never updated: consider using immutable val" + def varAdvice(v: Symbol) = if (v.accessedOrSelf.hasAttachment[MultiDefAttachment.type]) "is never updated: consider refactoring vars to a separate definition" else valAdvice def wcat(sym: Symbol) = if (sym.isPrivate) WarningCategory.UnusedPrivates else WarningCategory.UnusedLocals def termWarning(defn: SymTree): Unit = { val sym = defn.symbol - val pos = ( - if (defn.pos.isDefined) defn.pos - else if (sym.pos.isDefined) sym.pos - else sym match { - case sym: TermSymbol => sym.referenced.pos - case _ => NoPosition + val pos = + sym match { + case sym if sym.pos.isDefined => sym.pos + case sym: TermSymbol if sym.referenced.pos.isDefined => sym.referenced.pos + case _ if defn.pos.isDefined => defn.pos + case _ => NoPosition } - ) val why = if (sym.isPrivate) "private" else "local" var cond = "is never used" - val what = ( + def long = if (settings.uniqid.value) s" (${sym.nameString})" else "" + def getterNameString(sym: Symbol): String = sym.getterName.decoded + long + val what = if (sym.isDefaultGetter) "default argument" else if (sym.isConstructor) "constructor" - else if ( - sym.isVar - || sym.isGetter && (sym.accessed.isVar || (sym.owner.isTrait && !sym.hasFlag(STABLE))) - ) s"var ${sym.name.getterName.decoded}" - else if ( - sym.isVal - || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE))) - || sym.isLazy - ) s"val ${sym.name.decoded}" - else if (sym.isSetter) { cond = valAdvice ; s"var ${sym.name.getterName.decoded}" } - else if (sym.isMethod) s"method ${sym.name.decoded}" - else if (sym.isModule) s"object ${sym.name.decoded}" + else if (sym.isSetter) { cond = varAdvice(sym); s"var ${getterNameString(sym)}" } + else if (sym.isVar || sym.isGetter && sym.accessed.isVar) s"var ${sym.nameString}" + else if (sym.isVal || sym.isGetter && sym.accessed.isVal || sym.isLazy) s"val ${sym.nameString}" + else if (sym.isMethod) s"method ${sym.nameString}" + else if (sym.isModule) s"object ${sym.nameString}" else "term" - ) + // consider using sym.owner.fullLocationString emitUnusedWarning(pos, s"$why $what in ${sym.owner} $cond", wcat(sym), sym) } def typeWarning(defn: SymTree): Unit = { - val why = if (defn.symbol.isPrivate) "private" else "local" - emitUnusedWarning(defn.pos, s"$why ${defn.symbol.fullLocationString} is never used", wcat(defn.symbol), defn.symbol) + val sym = defn.symbol + val why = if (sym.isPrivate) "private" else "local" + val pos = if (sym.pos.isDefined) sym.pos else defn.pos + emitUnusedWarning(pos, s"$why ${sym.fullLocationString} is never used", wcat(sym), sym) } for (defn <- unusedPrivates.unusedTerms if shouldWarnOn(defn.symbol)) { termWarning(defn) } for (defn <- unusedPrivates.unusedTypes if shouldWarnOn(defn.symbol)) { typeWarning(defn) } for (v <- unusedPrivates.unsetVars) { - emitUnusedWarning(v.pos, s"local var ${v.name} in ${v.owner} ${valAdvice}", WarningCategory.UnusedPrivates, v) + emitUnusedWarning(v.pos, s"local var ${v.nameString} in ${v.owner} ${varAdvice(v)}", WarningCategory.UnusedPrivates, v) } } - if (settings.warnUnusedPatVars) { + if (settings.warnUnusedPatVars) for (v <- unusedPrivates.unusedPatVars) - emitUnusedWarning(v.pos, s"pattern var ${v.name} in ${v.owner} is never used: use a wildcard `_` or suppress this warning with `${v.name}@_`", WarningCategory.UnusedPatVars, v) - } + emitUnusedWarning(v.symbol.pos, s"pattern var ${v.symbol.name.dropLocal} in ${v.symbol.owner} is never used", WarningCategory.UnusedPatVars, v.symbol) if (settings.warnUnusedParams) { - def isImplementation(m: Symbol): Boolean = { + // don't warn unused args of overriding methods (or methods matching in self-type) + def isImplementation(m: Symbol): Boolean = m.isMethod && { def classOf(s: Symbol): Symbol = if (s.isClass || s == NoSymbol) s else classOf(s.owner) - val opc = new overridingPairs.Cursor(classOf(m)) - opc.iterator.exists(pair => pair.low == m) - } - def isConvention(p: Symbol): Boolean = { - (p.name.decoded == "args" && p.owner.isMethod && p.owner.name.decoded == "main") || - (p.tpe =:= typeOf[scala.Predef.DummyImplicit]) + val classOfM = classOf(m) + if (classOfM.hasSelfType) { + val opc = new overridingPairs.PairsCursor(classOfM) { + override protected def bases: List[Symbol] = self.baseClasses + } + opc.iterator.exists(pair => pair.low == m || pair.high == m) + } else { + val opc = new overridingPairs.PairsCursor(classOfM) + opc.iterator.exists(_.low == m) + } } - def warningIsOnFor(s: Symbol) = if (s.isImplicit) settings.warnUnusedImplicits else settings.warnUnusedExplicits + def isEmptyMarker(p: Symbol): Boolean = p.info.members.reverseIterator.forall(isUniversalMember(_)) // nonTrivialMembers(p).isEmpty + def isConvention(p: Symbol): Boolean = ( + p.name.decoded == "args" && p.owner.isMethod && p.owner.name.decoded == "main" + || + p.isImplicit && cond(p.tpe.typeSymbol) { case SameTypeClass | SubTypeClass | DummyImplicitClass => true } + ) + def warningIsOnFor(s: Symbol) = + if (!s.isImplicit) settings.warnUnusedExplicits + else { + if (!s.isSynthetic) settings.warnUnusedImplicits + else settings.warnUnusedSynthetics + } && !isEmptyMarker(s) def warnable(s: Symbol) = ( warningIsOnFor(s) && !isImplementation(s.owner) && !isConvention(s) ) - for (s <- unusedPrivates.unusedParams if warnable(s)) - emitUnusedWarning(s.pos, s"parameter $s in ${if (s.owner.isAnonymousFunction) "anonymous function" else s.owner} is never used", WarningCategory.UnusedParams, s) + for (s <- unusedPrivates.unusedParams if warnable(s)) { + val what = + if (s.name.startsWith(nme.EVIDENCE_PARAM_PREFIX)) s"evidence parameter ${s.nameString} of type ${s.tpe}" + else s"parameter ${s.nameString}" + val where = + if (s.owner.isAnonymousFunction) "anonymous function" else s.owner.toString + emitUnusedWarning(s.pos, s"$what in $where is never used", WarningCategory.UnusedParams, s) + } } } def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava && !typer.context.reporter.hasErrors) { val body = unit.body - // TODO the message should distinguish whether the unusage is before or after macro expansion. + // TODO the message should distinguish whether the non-usage is before or after macro expansion. settings.warnMacros.value match { + case "default"=> run(checkMacroExpandeeAndExpandedRefs)(body) case "none" => run(skipMacroExpansion)(body) case "before" => run(checkMacroExpandee)(body) case "after" => run(skipMacroCall)(body) case "both" => run(checkMacroExpandee)(body) ; run(skipMacroCall)(body) } + reportAll() } } + trait TyperDiagnostics { - self: Typer => + _: Typer => def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = context.warning(pos, "imported `%s` is permanently hidden by definition of %s".format(hidden, defn.fullLocationString), WarningCategory.OtherShadowing) - private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded - private def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive" - /** Returns Some(msg) if the given tree is untyped apparently due * to a cyclic reference, and None otherwise. */ - def cyclicReferenceMessage(sym: Symbol, tree: Tree) = condOpt(tree) { - case ValDef(_, _, TypeTree(), _) => s"recursive $sym needs type" - case DefDef(_, _, _, _, TypeTree(), _) => s"${cyclicAdjective(sym)} $sym needs result type" - case Import(expr, selectors) => - """encountered unrecoverable cycle resolving import. - |Note: this is often due in part to a class depending on a definition nested within its companion. - |If applicable, you may wish to try moving some members into another object.""".stripMargin + def cyclicReferenceMessage(sym: Symbol, tree: Tree, trace: Array[Symbol], pos: Position) = { + def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded + def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive" + + val badsym = if (!sym.isSynthetic) sym else { + val organics = trace.filter(!_.isSynthetic) + if (organics.length == 0) sym + else if (organics.length == 1) organics(0) + else organics.find(_.pos.focus == pos.focus).getOrElse(organics(0)) + } + def help = if (!badsym.isSynthetic || settings.cyclic.value) "" else + s"; $badsym is synthetic; use -Vcyclic to find which definition needs an explicit type" + condOpt(tree) { + case ValDef(_, _, TypeTree(), _) => s"recursive $badsym needs type$help" + case DefDef(_, _, _, _, TypeTree(), _) => s"${cyclicAdjective(badsym)} $badsym needs result type$help" + case Import(_, _) => + sm"""encountered unrecoverable cycle resolving import. + |Note: this is often due in part to a class depending on a definition nested within its companion. + |If applicable, you may wish to try moving some members into another object.""" + } } // warn about class/method/type-members' type parameters that shadow types already in scope @@ -790,7 +1007,7 @@ trait TypeDiagnostics { * @param pos The position where to report the error * @param ex The exception that caused the error */ - def reportTypeError(context0: Context, pos: Position, ex: TypeError) { + def reportTypeError(context0: Context, pos: Position, ex: TypeError): Unit = { if (ex.pos == NoPosition) ex.pos = pos // TODO: should be replaced by throwErrors // but it seems that throwErrors excludes some of the errors that should actually be @@ -799,23 +1016,79 @@ trait TypeDiagnostics { if (settings.isDebug) ex.printStackTrace() ex match { - case CyclicReference(sym, info: TypeCompleter) => - if (context0.owner.isTermMacro) { - // see comments to TypeSigError for an explanation of this special case - throw ex - } else { - val pos = info.tree match { - case Import(expr, _) => expr.pos - case _ => ex.pos - } - context0.error(pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) - - if (sym == ObjectClass) - throw new FatalError("cannot redefine root "+sym) + // see comments to TypeSigError for an explanation of this special case + case _: CyclicReference if context0.owner.isTermMacro => throw ex + case CyclicReference(sym, info: TypeCompleter, trace) => + val pos = info.tree match { + case Import(expr, _) => expr.pos + case _ => ex.pos } + context0.error(pos, cyclicReferenceMessage(sym, info.tree, trace, pos).getOrElse(ex.getMessage)) + + if (sym == ObjectClass) throw new FatalError(s"cannot redefine root $sym") case _ => context0.error(ex.pos, ex.msg) } } + + /** Check that type `tree` does not refer to private + * components unless itself is wrapped in something private + * (`owner` tells where the type occurs). + */ + def checkNoEscapingPrivates(typer: Typer, owner: Symbol, tree: Tree): Tree = + if (owner.isJavaDefined) tree + else new CheckNoEscaping(typer, owner, tree).check(tree) + + /** Check that type of given tree does not contain local or private components. */ + private final class CheckNoEscaping(typer: Typer, owner: Symbol, tree: Tree) extends TypeMap { + private var hiddenSymbols: List[Symbol] = Nil + + def check(tree: Tree): Tree = { + import typer.TyperErrorGen._ + val tp1 = apply(tree.tpe) + if (hiddenSymbols.isEmpty) tree setType tp1 + else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree) + else if (tp1.typeSymbol.isAnonymousClass) + check(tree setType tp1.typeSymbol.classBound) + else if (owner == NoSymbol) + tree setType packSymbols(hiddenSymbols.reverse, tp1) + else if (!isPastTyper) { // privates + val badSymbol = hiddenSymbols.head + SymbolEscapesScopeError(tree, badSymbol) + } else tree + } + + def addHidden(sym: Symbol) = + if (!(hiddenSymbols contains sym)) hiddenSymbols = sym :: hiddenSymbols + + override def apply(t: Type): Type = { + def checkNoEscape(sym: Symbol): Unit = { + if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) { + var o = owner + while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass && + !o.isLocalToBlock && !o.isPrivate && + !o.privateWithin.hasTransOwner(sym.owner)) + o = o.owner + if (o == sym.owner || o == sym.owner.linkedClassOfClass) + addHidden(sym) + } + } + mapOver( + t match { + case TypeRef(_, sym, args) => + checkNoEscape(sym) + if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym && + sym.isAliasType && sameLength(sym.typeParams, args)) { + hiddenSymbols = hiddenSymbols.tail + t.dealias + } else t + case SingleType(_, sym) => + checkNoEscape(sym) + t + case _ => + t + }) + } + } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index f0da3193040a..12f84de815b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,12 @@ package scala.tools.nsc package typechecker -import java.lang.{ reflect => r } +import java.lang.{reflect => r} import r.TypeVariable + import scala.reflect.NameTransformer import NameTransformer._ +import scala.collection.immutable.ArraySeq import scala.reflect.runtime.{universe => ru} import scala.reflect.{ClassTag, classTag} @@ -205,7 +207,7 @@ trait TypeStrings { else scalaName(xs.head) } private def tparamString(clazz: JClass): String = { - brackets(clazz.getTypeParameters map tvarString: _*) + brackets(ArraySeq.unsafeWrapArray(clazz.getTypeParameters map tvarString): _*) } private def tparamString[T: ru.TypeTag] : String = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 378b32796b83..f00f25359682 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,30 +10,22 @@ * additional information regarding copyright ownership. */ -// Added: Sat Oct 7 16:08:21 2006 -//todo: use inherited type info also for vars and values - -// Added: Thu Apr 12 18:23:58 2007 -//todo: disallow C#D in superclass -//todo: treat :::= correctly package scala package tools.nsc package typechecker -import scala.collection.mutable -import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics} -import scala.reflect.internal.TypesStats -import mutable.ListBuffer +import scala.annotation._ +import scala.collection.mutable, mutable.{ArrayBuffer, ListBuffer} +import scala.reflect.internal.{Chars, TypesStats} +import scala.reflect.internal.util.{CodeAction, FreshNameCreator, ListOfNil, Statistics} +import scala.tools.nsc.Reporting.{MessageFilter, Suppression, WConf, WarningCategory}, WarningCategory.Scala3Migration +import scala.util.chaining._ import symtab.Flags._ import Mode._ -import scala.tools.nsc.Reporting.{MessageFilter, Suppression, WConf, WarningCategory} -// Suggestion check whether we can do without priming scopes with symbols of outer scopes, -// like the IDE does. -/** This trait provides methods to assign types to trees. +/** A provider of methods to assign types to trees. * * @author Martin Odersky - * @version 1.0 */ trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers { self: Analyzer => @@ -47,21 +39,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final val shortenImports = false + // All typechecked RHS of ValDefs for right-associative operator desugaring + private val rightAssocValDefs = new mutable.HashMap[Symbol, Tree] + // Symbols of ValDefs for right-associative operator desugaring which are passed by name and have been inlined + private val inlinedRightAssocValDefs = new mutable.HashSet[Symbol] + // For each class, we collect a mapping from constructor param accessors that are aliases of their superclass // param accessors. At the end of the typer phase, when this information is available all the way up the superclass // chain, this is used to determine which are true aliases, ones where the field can be elided from this class. // And yes, if you were asking, this is yet another binary fragility, as we bake knowledge of the super class into // this class. - private val superConstructorCalls: mutable.AnyRefMap[Symbol, collection.Map[Symbol, Symbol]] = perRunCaches.newAnyRefMap() + private val superConstructorCalls: mutable.HashMap[Symbol, collection.Map[Symbol, Symbol]] = perRunCaches.newMap() // allows override of the behavior of the resetTyper method w.r.t comments def resetDocComments() = clearDocComments() - def resetTyper() { + def resetTyper(): Unit = { //println("resetTyper called") resetContexts() resetImplicits() resetDocComments() + rightAssocValDefs.clear() + inlinedRightAssocValDefs.clear() superConstructorCalls.clear() } @@ -80,17 +79,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper @inline final def filter(p: T => Boolean): SilentResult[T] = this match { case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p"))) case _ => this - } + } @inline final def orElse[T1 >: T](f: Seq[AbsTypeError] => T1): T1 = this match { case SilentResultValue(value) => value - case s : SilentTypeError => f(s.reportableErrors) + case s: SilentTypeError => f(s.reportableErrors) } } - class SilentTypeError private(val errors: List[AbsTypeError], val warnings: List[(Position, String, WarningCategory, Symbol)]) extends SilentResult[Nothing] { + class SilentTypeError private(val errors: List[AbsTypeError], val warnings: List[ContextWarning]) extends SilentResult[Nothing] { override def isEmpty = true def err: AbsTypeError = errors.head def reportableErrors = errors match { - case (e1: AmbiguousImplicitTypeError) +: _ => + case (e1: AmbiguousImplicitTypeError) :: _ => List(e1) // DRYer error reporting for neg/t6436b.scala case all => all @@ -98,7 +97,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } object SilentTypeError { def apply(errors: AbsTypeError*): SilentTypeError = apply(errors.toList, Nil) - def apply(errors: List[AbsTypeError], warnings: List[(Position, String, WarningCategory, Symbol)]): SilentTypeError = new SilentTypeError(errors, warnings) + def apply(errors: List[AbsTypeError], warnings: List[ContextWarning]): SilentTypeError = new SilentTypeError(errors, warnings) // todo: this extracts only one error, should be a separate extractor. def unapply(error: SilentTypeError): Option[AbsTypeError] = error.errors.headOption } @@ -113,88 +112,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // A transient flag to mark members of anonymous classes // that are turned private by typedBlock - private final val SYNTHETIC_PRIVATE = TRANS_FLAG + private[typechecker] final val SYNTHETIC_PRIVATE = TRANS_FLAG private final val InterpolatorCodeRegex = """\$\{\s*(.*?)\s*\}""".r - private final val InterpolatorIdentRegex = """\$[$\w]+""".r // note that \w doesn't include $ - - /** Check that type of given tree does not contain local or private - * components. - */ - object checkNoEscaping extends TypeMap { - private var owner: Symbol = _ - private var scope: Scope = _ - private var hiddenSymbols: List[Symbol] = _ - - /** Check that type `tree` does not refer to private - * components unless itself is wrapped in something private - * (`owner` tells where the type occurs). - */ - def privates[T <: Tree](typer: Typer, owner: Symbol, tree: T): T = - if (owner.isJavaDefined) tree else check(typer, owner, EmptyScope, WildcardType, tree) - - private def check[T <: Tree](typer: Typer, owner: Symbol, scope: Scope, pt: Type, tree: T): T = { - this.owner = owner - this.scope = scope - hiddenSymbols = List() - import typer.TyperErrorGen._ - val tp1 = apply(tree.tpe) - if (hiddenSymbols.isEmpty) tree setType tp1 - else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree) - else if (isFullyDefined(pt)) tree setType pt - else if (tp1.typeSymbol.isAnonymousClass) - check(typer, owner, scope, pt, tree setType tp1.typeSymbol.classBound) - else if (owner == NoSymbol) - tree setType packSymbols(hiddenSymbols.reverse, tp1) - else if (!isPastTyper) { // privates - val badSymbol = hiddenSymbols.head - SymbolEscapesScopeError(tree, badSymbol) - } else tree - } - - def addHidden(sym: Symbol) = - if (!(hiddenSymbols contains sym)) hiddenSymbols = sym :: hiddenSymbols - - override def apply(t: Type): Type = { - def checkNoEscape(sym: Symbol): Unit = { - if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) { - var o = owner - while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass && - !o.isLocalToBlock && !o.isPrivate && - !o.privateWithin.hasTransOwner(sym.owner)) - o = o.owner - if (o == sym.owner || o == sym.owner.linkedClassOfClass) - addHidden(sym) - } else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) { - var e = scope.lookupEntry(sym.name) - var found = false - while (!found && (e ne null) && e.owner == scope) { - if (e.sym == sym) { - found = true - addHidden(sym) - } else { - e = scope.lookupNextEntry(e) - } - } - } - } - mapOver( - t match { - case TypeRef(_, sym, args) => - checkNoEscape(sym) - if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym && - sym.isAliasType && sameLength(sym.typeParams, args)) { - hiddenSymbols = hiddenSymbols.tail - t.dealias - } else t - case SingleType(_, sym) => - checkNoEscape(sym) - t - case _ => - t - }) - } - } + private final val InterpolatorIdentRegex = """\$[\w]+""".r // note that \w doesn't include $ private final val typerFreshNameCreators = perRunCaches.newAnyRefMap[Symbol, FreshNameCreator]() def freshNameCreatorFor(context: Context) = typerFreshNameCreators.getOrElseUpdate(context.outermostContextAtCurrentPos.enclClassOrMethod.owner, new FreshNameCreator) @@ -203,7 +124,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def unit = context.unit import typeDebug.ptTree import TyperErrorGen._ - implicit def fresh: FreshNameCreator = freshNameCreatorFor(context) private def transformed: mutable.Map[Tree, Tree] = unit.transformed @@ -215,9 +135,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } /** Overridden to false in scaladoc and/or interactive. */ + def isInteractive = false def canAdaptConstantTypeToLiteral = true def canTranslateEmptyListToNil = true - def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree + def missingSelectErrorTree(tree: Tree, @unused qual: Tree, @unused name: Name): Tree = tree // used to exempt synthetic accessors (i.e. those that are synthesized by the compiler to access a field) // from skolemization because there's a weird bug that causes spurious type mismatches @@ -232,7 +153,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // (it erases in TypeTrees, but not in, e.g., the type a Function node) def phasedAppliedType(sym: Symbol, args: List[Type]) = { val tp = appliedType(sym, args) - if (phase.erasedTypes) erasure.specialScalaErasure(tp) else tp + if (phase.erasedTypes) erasure.specialScalaErasureFor(sym)(tp) else tp } def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = @@ -290,6 +211,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper new ApplyToImplicitArgs(fun, args) setPos fun.pos case ErrorType => fun + case x => throw new MatchError(x) } def viewExists(from: Type, to: Type): Boolean = ( @@ -302,6 +224,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // to avoid unpositioned type errors. ) + // Get rid of any special ProtoTypes, so that implicit search won't have to deal with them + private def normalizeProtoForView(proto: Type): Type = proto match { + case proto: OverloadedArgProto => proto.underlying + case pt => pt + } /** Infer an implicit conversion (`view`) between two types. * @param tree The tree which needs to be converted. @@ -320,8 +247,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper debuglog(s"Inferring view from $from to $to for $tree (reportAmbiguous= $reportAmbiguous, saveErrors=$saveErrors)") val fromNoAnnot = from.withoutAnnotations - val result = inferImplicitView(fromNoAnnot, to, tree, context, reportAmbiguous, saveErrors) match { - case fail if fail.isFailure => inferImplicitView(byNameType(fromNoAnnot), to, tree, context, reportAmbiguous, saveErrors) + val toNorm = normalizeProtoForView(to) + val result = inferImplicitView(fromNoAnnot, toNorm, tree, context, reportAmbiguous, saveErrors) match { + case fail if fail.isFailure => inferImplicitView(byNameType(fromNoAnnot), toNorm, tree, context, reportAmbiguous, saveErrors) case ok => ok } @@ -335,7 +263,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper import infer._ private var namerCache: Namer = null - def namer = { + def namer: Namer = { if ((namerCache eq null) || namerCache.context != context) namerCache = newNamer(context) namerCache @@ -345,7 +273,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def context1 = context def dropExistential(tp: Type): Type = tp match { - case ExistentialType(tparams, tpe) => + case ExistentialType(tparams, _) => new SubstWildcardMap(tparams).apply(tp) case TypeRef(_, sym, _) if sym.isAliasType => val tp0 = tp.dealias @@ -362,10 +290,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false } private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false } - /** Check that `tpt` refers to a non-refinement class type */ - def checkClassType(tpt: Tree): Boolean = { + /** Check that `tpt` refers to a non-refinement class or module type */ + def checkClassOrModuleType(tpt: Tree): Boolean = { val tpe = unwrapToClass(tpt.tpe) - isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe) + def isModule = tpe match { + case SingleType(_, sym) => sym.isModule + case _ => false + } + isNonRefinementClassType(tpe) || isModule || errorNotClass(tpt, tpe) } /** Check that `tpt` refers to a class type with a stable prefix. */ @@ -390,67 +322,62 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) } - /** Check that type `tp` is not a subtype of itself. - */ - def checkNonCyclic(pos: Position, tp: Type): Boolean = { - def checkNotLocked(sym: Symbol) = { - sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false } + class NonCyclicStack { + // for diverging types, neg/t510.scala + private val maxRecursion = 42 + + // For each abstract type symbol (type member, type parameter), keep track of seen types represented by that symbol + private lazy val map = mutable.HashMap[Symbol, ListBuffer[Type]]() + + def lockSymbol[T](sym: Symbol, tp: Type)(body: => T): T = { + val stk = map.getOrElseUpdate(sym, ListBuffer.empty) + stk.prepend(tp) + try body + finally stk.remove(0) } + + def isUnlocked(sym: Symbol, tp: Type): Boolean = + !sym.isNonClassType || !map.get(sym).exists(tps => tps.length > maxRecursion || tps.contains(tp)) + } + + /** Check that type `tp` is not a subtype of itself + */ + def checkNonCyclic(pos: Position, tp: Type, stack: NonCyclicStack = new NonCyclicStack): Boolean = { + def checkNotLocked(sym: Symbol) = + stack.isUnlocked(sym, tp) || { CyclicAliasingOrSubtypingError(pos, sym); false } + tp match { case TypeRef(pre, sym, args) => - checkNotLocked(sym) && - ((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym)) - // @M! info for a type ref to a type parameter now returns a polytype - // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym) + checkNotLocked(sym) && { + !sym.isNonClassType || + stack.lockSymbol(sym, tp) { + checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), stack) + } + } - case SingleType(pre, sym) => + case SingleType(_, sym) => checkNotLocked(sym) case st: SubType => - checkNonCyclic(pos, st.supertype) + checkNonCyclic(pos, st.supertype, stack) case ct: CompoundType => - ct.parents forall (x => checkNonCyclic(pos, x)) + ct.parents forall (x => checkNonCyclic(pos, x, stack)) case _ => true } } - def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try { - if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false - else checkNonCyclic(pos, tp) - } finally { - lockedSym.unlock() - } - - def checkNonCyclic(sym: Symbol) { + def checkNonCyclic(sym: Symbol): Unit = { if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType) } - def checkNonCyclic(defn: Tree, tpt: Tree) { - if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) { + def checkNonCyclic(defn: ValOrDefDef, tpt: Tree): Unit = { + if (!checkNonCyclic(defn.pos, tpt.tpe)) { tpt setType ErrorType defn.symbol.setInfo(ErrorType) } } - def checkParamsConvertible(tree: Tree, tpe0: Type) { - def checkParamsConvertible0(tpe: Type) = - tpe match { - case MethodType(formals, restpe) => - /* - if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1) - error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters") - if (formals exists (isRepeatedParamType(_))) - error(pos, "methods with `*`-parameters cannot be converted to function values"); - */ - if (tpe.isDependentMethodType) - DependentMethodTpeConversionToFunctionError(tree, tpe) - checkParamsConvertible(tree, restpe) - case _ => - } - checkParamsConvertible0(tpe0) - } - - def reenterValueParams(vparamss: List[List[ValDef]]) { + def reenterValueParams(vparamss: List[List[ValDef]]): Unit = { for (vparams <- vparamss) for (vparam <- vparams) context.scope enter vparam.symbol @@ -466,10 +393,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * of a this or super with prefix `qual`. * packageOk is equal false when qualifying class symbol */ - def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) = + def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean, immediate: Boolean) = context.enclClass.owner.ownersIterator.find(o => qual.isEmpty || o.isClass && o.name == qual) match { case Some(c) if packageOK || !c.isPackageClass => c - case _ => QualifyingClassError(tree, qual) ; NoSymbol + case _ => + QualifyingClassError(tree, qual) + // Delay `setError` in namer, scala/bug#10748 + if (immediate) setError(tree) else unit.addPostUnitCheck(() => setError(tree)) + NoSymbol } /** The typer for an expression, depending on where we are. If we are before a superclass @@ -538,7 +469,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper || mode.inQualMode && !tree.symbol.isConstant || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.lowerBound.isStable || ptSym.isRefinementClass) ) - + def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match { + case TypeRef(_, _, _) | RefinedType(_, _) => true + case tpe => !isConstantType(tpe) && !phase.erasedTypes + } ( isNarrowable(tree.tpe) && mode.typingExprNotLhs && expectsStable @@ -555,21 +489,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper */ private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Any /*Type | (Tree, Type)*/ = if (!unit.isJava && context.isInPackageObject(sym, pre.typeSymbol)) { - if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) { - // short cut some aliases. It seems pattern matching needs this - // to notice exhaustiveness and to generate good code when - // List extractors are mixed with :: patterns. See Test5 in lists.scala. - // - // TODO scala/bug#6609 Eliminate this special case once the old pattern matcher is removed. - def dealias(sym: Symbol) = - (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType) - sym.name match { - case nme.List => return dealias(ListModule) - case nme.Seq => return dealias(SeqModule) - case nme.Nil => return dealias(NilModule) - case _ => - } - } val qual = typedQualifier { atPos(tree.pos.makeTransparent) { tree match { case Ident(_) => @@ -577,20 +496,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!sym.isOverloaded && sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed else pre.typeSymbol.packageObject Ident(packageObject) - case Select(qual, _) => Select(qual, nme.PACKAGEkw) + case Select(qual, _) => Select(qual, nme.PACKAGEkw) case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw) + case x => throw new MatchError(x) } }} val tree1 = atPos(tree.pos) { tree match { - case Ident(name) => Select(qual, name) - case Select(_, name) => Select(qual, name) + case Ident(name) => Select(qual, name) + case Select(_, name) => Select(qual, name) case SelectFromTypeTree(_, name) => SelectFromTypeTree(qual, name) + case x => throw new MatchError(x) } } - (checkAccessible(tree1, sym, qual.tpe, qual), qual.tpe) + (checkAccessible(tree1, sym, qual.tpe, qual, unit.isJava), qual.tpe) } else { - checkAccessible(tree, sym, pre, site) + checkAccessible(tree, sym, pre, site, unit.isJava) } /** Post-process an identifier or selection node, performing the following: @@ -602,15 +523,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper */ protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = { - // Side effect time! Don't be an idiot like me and think you - // can move "val sym = tree.symbol" before this line, because - // inferExprAlternative side-effects the tree's symbol. - if (tree.symbol.isOverloaded && !mode.inFunMode) - inferExprAlternative(tree, pt) - - val sym = tree.symbol + val sym = { + // inferExprAlternative side-effects the tree's symbol. + if (tree.symbol.isOverloaded && !mode.inFunMode) + inferExprAlternative(tree, pt) + tree.symbol + } val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm - def isModuleTypedExpr = ( treeInfo.admitsTypeSelection(tree) && (isStableContext(tree, mode, pt) || sym.isModuleNotMethod) @@ -627,33 +546,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // this so for now it requires the type symbol be public. def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic - def narrowIf(tree: Tree, condition: Boolean) = - if (condition) tree setType singleType(pre, sym) else tree - - def checkStable(tree: Tree): Tree = - if (treeInfo.isStableIdentifierPattern(tree)) tree - else UnstableTreeError(tree) - if (tree.isErrorTyped) tree else if (!sym.isValue && isStableValueRequired) // (2) NotAValueError(tree, sym) else if (isStableIdPattern) // (1) - // A module reference in a pattern has type Foo.type, not "object Foo" - narrowIf(checkStable(tree), sym.isModuleNotMethod) + if (!treeInfo.isStableIdentifierPattern(tree)) UnstableTreeError(tree) + else if (sym.isModuleNotMethod) tree.setType(singleType(pre, sym)) + else tree else if (isModuleTypedExpr) // (3) - narrowIf(tree, true) + tree.setType(singleType(pre, sym)) else if (isGetClassCall) // (4) - tree setType MethodType(Nil, getClassReturnType(pre)) + tree.setType(MethodType(Nil, getClassReturnType(pre))) else tree } - private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match { - case TypeRef(_, _, _) | RefinedType(_, _) => true - case _ => !phase.erasedTypes - } - def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = { val sym = tree.symbol val pre = tree match { @@ -709,10 +617,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val context1 = context.makeSilent(reportAmbiguousErrors, newtree) context1.undetparams = context.undetparams context1.savedTypeBounds = context.savedTypeBounds + context1.pendingStabilizers = context.pendingStabilizers val typer1 = newTyper(context1) val result = op(typer1) context.undetparams = context1.undetparams context.savedTypeBounds = context1.savedTypeBounds + context.pendingStabilizers = context1.pendingStabilizers // If we have a successful result, emit any warnings it created. if (!context1.reporter.hasErrors) @@ -747,55 +657,34 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * if feature check is delayed or suppressed because we are past typer: true */ def checkFeature(pos: Position, featureTrait: Symbol, construct: => String = "", immediate: Boolean = false): Boolean = - if (isPastTyper) true - else { - val nestedOwners = - featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse - val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name - def action(): Boolean = { + isPastTyper || { + val featureName = { + val nestedOwners = featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse + nestedOwners.map(s => s"${s.name}.").mkString + featureTrait.name + } + settings.language.contains(featureName) || { + def action(): Boolean = { + if (!immediate) + debuglog(s"deferred check of feature $featureTrait") def hasImport = inferImplicitByType(featureTrait.tpe, context).isSuccess - def hasOption = settings.language contains featureName - val OK = hasImport || hasOption - if (!OK) { - val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) = - featureTrait getAnnotation LanguageFeatureAnnot - context.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required) + hasImport || { + val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) = + featureTrait.getAnnotation(LanguageFeatureAnnot): @unchecked + context.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required) + false + } } - OK - } - if (immediate) { - action() - } else { - unit.toCheck += action - true + if (immediate) action() + else { unit.addPostUnitCheck(() => action()); true } } } def checkExistentialsFeature(pos: Position, tpe: Type, prefix: String) = tpe match { - case extp: ExistentialType if !extp.isRepresentableWithWildcards => + case extp: ExistentialType if !extp.isRepresentableWithWildcards && !tpe.isError => checkFeature(pos, currentRun.runDefinitions.ExistentialsFeature, prefix+" "+tpe) case _ => } - /** - * Convert a SAM type to the corresponding FunctionType, - * extrapolating BoundedWildcardTypes in the process - * (no type precision is lost by the extrapolation, - * but this facilitates dealing with the types arising from Java's use-site variance). - */ - def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = { - val samSym = sam orElse samOf(tp) - - def correspondingFunctionSymbol = { - val numVparams = samSym.info.params.length - if (numVparams > definitions.MaxFunctionArity) NoSymbol - else FunctionClass(numVparams) - } - - if (samSym.exists && tp.typeSymbol != correspondingFunctionSymbol) // don't treat Functions as SAMs - wildcardExtrapolation(normalize(tp memberInfo samSym)) - else NoType - } /** Perform the following adaptations of expression, pattern or type `tree` wrt to * given mode `mode` and given prototype `pt`: @@ -807,11 +696,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * store these instances in context.undetparams, * unless followed by explicit type application. * (4) Do the following to unapplied methods used as values: - * (4.1) If the method has only implicit parameters pass implicit arguments - * (4.2) otherwise, if `pt` is a function type and method is not a constructor, - * convert to function by eta-expansion, - * (4.3) otherwise, if the method is nullary with a result type compatible to `pt` - * and it is not a constructor, apply it to () + * (4.1) If the method has only implicit parameters, pass implicit arguments (see adaptToImplicitMethod) + * (4.2) otherwise, if the method is 0-ary and it can be auto-applied (see checkCanAutoApply), apply it to () + * (4.3) otherwise, if the method is not a constructor, and can be eta-expanded (see checkCanEtaExpand), eta-expand * otherwise issue an error * (5) Convert constructors in a pattern as follows: * (5.1) If constructor refers to a case class factory, set tree's type to the unique @@ -857,17 +744,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper setError(tree) else withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 => - if (original != EmptyTree && pt != WildcardType) { + if (original != EmptyTree && !pt.isWildcard) { typer1 silent { tpr => val withImplicitArgs = tpr.applyImplicitArgs(tree) if (tpr.context.reporter.hasErrors) tree // silent will wrap it in SilentTypeError anyway else tpr.typed(withImplicitArgs, mode, pt) - } orElse { _ => + } orElse { originalErrors => // Re-try typing (applying to implicit args) without expected type. Add in 53d98e7d42 to - // for better error message (scala/bug#2180, http://www.scala-lang.org/old/node/3453.html) + // for better error message (scala/bug#2180, https://www.scala-lang.org/old/node/3453.html) val resetTree = resetAttrs(original) resetTree match { - case treeInfo.Applied(fun, targs, args) => + case treeInfo.Applied(fun, _, _) => if (fun.symbol != null && fun.symbol.isError) // scala/bug#9041 Without this, we leak error symbols past the typer! // because the fallback typechecking notices the error-symbol, @@ -875,11 +762,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // else was responsible for issuing the related type error! fun.setSymbol(NoSymbol) } - debuglog(s"fallback on implicits: ${tree}/$resetTree") + debuglog(s"fallback on implicits: $tree/$resetTree") // scala/bug#10066 Need to patch the enclosing tree in the context to make translation of Dynamic // work during fallback typechecking below. val resetContext: Context = { - object substResetForOriginal extends Transformer { + object substResetForOriginal extends AstTransformer { override def transform(tree: Tree): Tree = { if (tree eq original) resetTree else super.transform(tree) @@ -888,11 +775,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper context.make(substResetForOriginal.transform(context.tree)) } typerWithLocalContext(resetContext) { typer1 => - val tree1 = typer1.typed(resetTree, mode) - // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that - // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin. - tree1 setType pluginsTyped(tree1.tpe, typer1, tree1, mode, pt) - if (tree1.isEmpty) tree1 else typer1.adapt(tree1, mode, pt) + typer1.silent { typer1 => + val tree1 = typer1.typed(resetTree, mode) + // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that + // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin. + tree1 setType pluginsTyped(tree1.tpe, typer1, tree1, mode, pt) + if (tree1.isEmpty) tree1 else typer1.adapt(tree1, mode, pt, original = EmptyTree) + } orElse { _ => + originalErrors.foreach(context.issue) + setError(tree) + } } } } @@ -901,51 +793,122 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) } - def instantiateToMethodType(mt: MethodType): Tree = { - val meth = tree match { - // a partial named application is a block (see comment in EtaExpansion) - case Block(_, tree1) => tree1.symbol - case _ => tree.symbol - } + def adaptMethodTypeToExpr(mt: MethodType): Tree = { + val meth = + tree match { + // a partial named application is a block (see comment in EtaExpansion) + // How about user-written blocks? Can they ever have a MethodType? + case Block(_, tree1) => tree1.symbol + case _ => tree.symbol + } - def cantAdapt = - if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth) - else UnstableTreeError(tree) - def emptyApplication: Tree = { - val apply = Apply(tree, Nil).setPos(tree.pos).updateAttachment(AutoApplicationAttachment) - if (tree.hasAttachment[PostfixAttachment.type]) apply.updateAttachment(InfixAttachment) - adapt(typed(apply), mode, pt, original) + val arity = mt.params.length + + def warnTree = original orElse tree + + def warnEtaZero(): true = { + if (settings.warnEtaZero) { + context.warning(tree.pos, + s"""An unapplied 0-arity method was eta-expanded (due to the expected type $pt), rather than applied to `()`. + |Write ${Apply(warnTree, Nil)} to invoke method ${meth.decodedName}, or change the expected type.""".stripMargin, + WarningCategory.LintEtaZero) + } + true + } + + def warnEtaSam(): true = { + if (settings.warnEtaSam || currentRun.isScala3) { + val sam = samOf(pt) + if (sam.exists) { + val samClazz = sam.owner + val isJavaClass = samClazz.isJava && !samClazz.isInterface + if (!samClazz.hasAnnotation(definitions.FunctionalInterfaceClass)) { + val ft = samToFunctionType(pt) + val sample = Function(meth.paramss.head.map(ValDef(_)), Apply(meth, meth.paramss.head.map(p => Ident(p.name)): _*)) + val places = Apply(meth, meth.paramss.head.map(_ => Ident(nme.USCOREkw)): _*) + val advice = if (isJavaClass) "" else s"\n$samClazz should be annotated with `@FunctionalInterface` if eta-expansion is desired." + context.warning(tree.pos, + sm"""Eta-expansion to expected type $pt, which is not a function type but is SAM-convertible to $ft.$advice + |Avoid eta-expansion by writing the function literal `$sample` or `$places`. + |This warning can be filtered with `-Wconf:cat=lint-eta-sam`.""", + WarningCategory.LintEtaSam) + } + } + } + true } - // constructors do not eta-expand - if (meth.isConstructor) cantAdapt - // (4.2) eta-expand method value when function or sam type is expected - else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) { - // scala/bug#9536 `!mt.params.isEmpty &&`: for backwards compatibility with 2.11, - // we don't adapt a zero-arg method value to a SAM - checkParamsConvertible(tree, tree.tpe) + // note that isFunctionProto(pt) does not work properly for Function0 + lazy val ptUnderlying = + (pt match { + case oapt: OverloadedArgProto => oapt.underlying + case pt => pt + }).dealiasWiden + + // (4.3) condition for eta-expansion by arity & -Xsource level + // + // for arity == 0: + // - if Function0 is expected -- SAM types do not eta-expand because it could be an accidental SAM scala/bug#9489 + // for arity > 0: + // - 2.13: if function or sam type is expected + // - 3.0: unconditionally + // + // warnings: + // - for arity == 0: eta-expansion of zero-arg methods was deprecated (scala/bug#7187) + // - for arity > 0: expected type is a SAM that is not annotated with `@FunctionalInterface` + def checkCanEtaExpand(): Boolean = { + def expectingSamOfArity = { + val sam = samOf(ptUnderlying) + sam.exists && sam.info.params.lengthIs == arity + } - // We changed our mind on deprecating 0-arity eta expansion in https://github.com/scala/scala/pull/7660 - // For history on this, see scala/bug#7187, scala/bug#9178 - // We will deprecate insertion of `()` in 2.13 (except for java-defined methods) and remove it in 2.14 - // Once that's done, we can more aggressively eta-expand method references, even if they are 0-arity. + val expectingFunctionOfArity = { + val ptSym = ptUnderlying.typeSymbolDirect + (ptSym eq FunctionClass(arity)) || (arity > 0 && (ptSym eq FunctionClass(1))) // allowing for tupling conversion + } - val tree0 = etaExpand(context.unit, tree, this) + if (arity == 0) + expectingFunctionOfArity && warnEtaZero() + else + expectingFunctionOfArity || expectingSamOfArity && warnEtaSam() || currentRun.sourceFeatures.etaExpandAlways + } - // #2624: need to infer type arguments for eta expansion of a polymorphic method - // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand) - // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null - // can't type with the expected type, as we can't recreate the setup in (3) without calling typed - // (note that (3) does not call typed to do the polymorphic type instantiation -- - // it is called after the tree has been typed with a polymorphic expected result type) - if (hasUndets) instantiate(typed(tree0, mode), mode, pt) - else typed(tree0, mode, pt) + def matchNullaryLoosely: Boolean = { + def test(sym: Symbol) = sym.isJavaDefined || sym.owner == AnyClass + test(meth) || meth.overrides.exists(test) } - // (4.3) apply to empty argument list - else if (mt.params.isEmpty) emptyApplication - else cantAdapt - } + + // (4.2) condition for auto-application + // + // Currently the condition is more involved to give slack to Scala methods overriding Java-defined ones; + // I (moors) think we should resolve that by introducing slack in overriding e.g. a Java-defined `def toString()` by a Scala-defined `def toString`. + // This also works better for dealing with accessors overriding Java-defined methods. The current strategy in methodSig is problematic: + // > // Add a () parameter section if this overrides some method with () parameters + // > val vparamSymssOrEmptyParamsFromOverride = + // This means an accessor that overrides a Java-defined method gets a MethodType instead of a NullaryMethodType, which breaks lots of assumptions about accessors) + def checkCanAutoApply(): Boolean = { + if (!isPastTyper && !matchNullaryLoosely) { + val msg = + s"""Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method ${meth.decodedName}, + |or remove the empty argument list from its definition (Java-defined methods are exempt). + |In Scala 3, an unapplied method like this will be eta-expanded into a function.""".stripMargin + val action = runReporting.codeAction("add `()`", tree.pos.focusEnd, "()", msg) + context.deprecationWarning(tree.pos, NoSymbol, msg, "2.13.3", action) + } + true + } + + if (!meth.isConstructor && checkCanEtaExpand()) typedEtaExpansion(tree, mode, pt) + else if (arity == 0 && checkCanAutoApply()) { + val apply = Apply(tree, Nil).setPos(tree.pos).updateAttachment(AutoApplicationAttachment) + if (tree.hasAttachment[PostfixAttachment.type]) apply.updateAttachment(InfixAttachment) + adapt(typed(apply), mode, pt, original) + } + // `context.implicitsEnabled` implies we are not in a pattern + else if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth) + else UnstableTreeError(tree) + } // end adaptMethodTypeToExpr def adaptType(): Tree = { // @M When not typing a type constructor (!context.inTypeConstructorAllowed) @@ -992,18 +955,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def insertApply(): Tree = { assert(!context.inTypeConstructorAllowed, mode) //@M - val adapted = adaptToName(tree, nme.apply) + val adapted = adaptToName(unmarkDynamicRewrite(tree), nme.apply) val qual = gen.stabilize(adapted) - typedPos(tree.pos, mode, pt) { - Select(qual setPos tree.pos.makeTransparent, nme.apply) - } + val t = atPos(tree.pos)(Select(qual setPos tree.pos.makeTransparent, nme.apply)) + wrapErrors(t, _.typed(t, mode, pt)) } def adaptConstant(value: Constant): Tree = { val sym = tree.symbol - if (sym != null && sym.isDeprecated) + if (sym != null && !context.unit.isJava && sym.isDeprecated) context.deprecationWarning(tree.pos, sym) tree match { - case Literal(`value`) => tree + case Literal(`value`) /*| Bind(_, _)*/ => tree case _ => // If the original tree is not a literal, make it available to plugins in an attachment treeCopy.Literal(tree, value).updateAttachment(OriginalTreeAttachment(tree)) @@ -1067,18 +1029,48 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil boundOrSkolems match { - case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree) - case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))) - } - } + case Nil => AdaptTypeError(tree, tree.tpe, pt) + case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt), original = EmptyTree)) + } + } + + // if user wrote case companion C for expected function type, use C.apply or (C.apply _).tupled + def adaptApplyInsertion(): Tree = doAdaptApplyInsertion(retry = false) + + def doAdaptApplyInsertion(retry: Boolean): Tree = + if (!isPastTyper && tree.symbol != null && tree.symbol.isModule && tree.symbol.companion.isCase && isFunctionType(pt)) + silent(_.typed(atPos(tree.pos)(Select(tree, nme.apply)), mode, if (retry) WildcardType else pt)) match { + case SilentResultValue(applicator) => + val arity = definitions.functionArityFromType(applicator.tpe) + if (arity < 0) EmptyTree + else functionOrPfOrSamArgTypes(pt) match { + case arg :: Nil if definitions.isTupleType(arg) && arg.typeArgs.lengthCompare(arity) == 0 => + val tupled = typed(atPos(tree.pos)(Select(applicator, nme.tupled)), mode, pt) + if (!tupled.isErroneous) { + val msg = s"The method `apply` is inserted. The auto insertion will be deprecated, please write `(${tree.symbol.name}.apply _).tupled` explicitly." + context.deprecationWarning(tree.pos, tree.symbol, msg, "2.13.13") + tupled + } + else EmptyTree + case args if args.lengthCompare(arity) == 0 => + val msg = s"The method `apply` is inserted. The auto insertion will be deprecated, please write `${tree.symbol.name}.apply` explicitly." + context.deprecationWarning(tree.pos, tree.symbol, msg, "2.13.13") + applicator + case _ => EmptyTree + } + case _ if !retry => doAdaptApplyInsertion(retry = true) + case _ => EmptyTree + } + else EmptyTree def adaptExprNotFunMode(): Tree = { def lastTry(err: AbsTypeError = null): Tree = { debuglog("error tree = " + tree) - if (settings.isDebug && settings.explaintypes) explainTypes(tree.tpe, pt) + if (settings.isDebug && settings.explaintypes.value) explainTypes(tree.tpe, pt) if (err ne null) context.issue(err) if (tree.tpe.isErroneous || pt.isErroneous) setError(tree) - else adaptMismatchedSkolems() + else + adaptApplyInsertion() orElse adaptMismatchedSkolems() } // TODO: should we even get to fallbackAfterVanillaAdapt for an ill-typed tree? @@ -1086,31 +1078,38 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper @inline def tpdPos(transformed: Tree) = typedPos(tree.pos, mode, pt)(transformed) @inline def tpd(transformed: Tree) = typed(transformed, mode, pt) - @inline def warnValueDiscard(): Unit = if (!isPastTyper && settings.warnValueDiscard) { - def isThisTypeResult = (tree, tree.tpe) match { - case (Apply(Select(receiver, _), _), SingleType(_, sym)) => sym == receiver.symbol - case _ => false + @inline def warnValueDiscard(): Unit = + if (!isPastTyper && settings.warnValueDiscard.value && !treeInfo.isThisTypeResult(tree) && !treeInfo.hasExplicitUnit(tree)) + tree.updateAttachment(DiscardedValue) + @inline def warnNumericWiden(tpSym: Symbol, ptSym: Symbol): Unit = if (!isPastTyper) { + val targetIsWide = ptSym == FloatClass || ptSym == DoubleClass + val isInharmonic = { + def intWidened = tpSym == IntClass && ptSym == FloatClass + def longWidened = tpSym == LongClass && targetIsWide + intWidened || longWidened } - if (!isThisTypeResult) context.warning(tree.pos, "discarded non-Unit value", WarningCategory.WFlagValueDiscard) - } - @inline def warnNumericWiden(target: Symbol): Unit = { - // not `context.deprecationWarning` because they are not buffered in silent mode - if (!isPastTyper && settings.warnNumericWiden) context.warning(tree.pos, "implicit numeric widening", WarningCategory.WFlagNumericWiden) - object warnIntDiv extends Traverser { - def isInt(t: Tree) = ScalaIntegralValueClasses(t.tpe.typeSymbol) - override def traverse(tree: Tree): Unit = tree match { - case Apply(Select(q, nme.DIV), _) if isInt(q) => - context.warning(tree.pos, s"integral division is implicitly converted (widened) to floating point. Add an explicit `.to${target.name}`.", WarningCategory.LintIntDivToFloat) - case Apply(Select(a1, _), List(a2)) if isInt(tree) && isInt(a1) && isInt(a2) => - traverse(a1) - traverse(a2) - case Select(q, _) if isInt(tree) && isInt(q) => - traverse(q) - case _ => + if (isInharmonic) { + // not `context.deprecationWarning` because they are not buffered in silent mode + val msg = s"Widening conversion from ${tpSym.name} to ${ptSym.name} is deprecated because it loses precision. Write `.to${ptSym.name}` instead." + val orig = tree.pos.source.sourceAt(tree.pos) + context.warning(tree.pos, msg, WarningCategory.Deprecation, + runReporting.codeAction("add conversion", tree.pos, s"${CodeAction.maybeWrapInParens(orig)}.to${ptSym.name}", msg)) + } else { + object warnIntDiv extends Traverser { + def isInt(t: Tree) = ScalaIntegralValueClasses(t.tpe.typeSymbol) + override def traverse(tree: Tree): Unit = tree match { + case Apply(Select(q, nme.DIV), _) if isInt(q) => + val msg = s"integral division is implicitly converted (widened) to floating point. Add an explicit `.to${ptSym.name}`." + context.warning(tree.pos, msg, WarningCategory.LintIntDivToFloat, + runReporting.codeAction("add conversion", tree.pos, s"(${tree.pos.source.sourceAt(tree.pos)}).to${ptSym.name}", msg)) + case Apply(Select(a1, _), List(a2)) if isInt(tree) && isInt(a1) && isInt(a2) => traverse(a1); traverse(a2) + case Select(q, _) if isInt(tree) && isInt(q) => traverse(q) + case _ => + } } + if (targetIsWide && settings.lintIntDivToFloat) warnIntDiv(tree) + if (settings.warnNumericWiden.value) context.warning(tree.pos, "implicit numeric widening", WarningCategory.WFlagNumericWiden) } - if (!isPastTyper && settings.lintIntDivToFloat && (target == FloatClass || target == DoubleClass)) - warnIntDiv(tree) } // The <: Any requirement inhibits attempts to adapt continuation types to non-continuation types. @@ -1120,7 +1119,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case TypeRef(_, UnitClass, _) if anyTyped => // (12) warnValueDiscard() ; tpdPos(gen.mkUnitBlock(tree)) case TypeRef(_, numValueCls, _) if anyTyped && isNumericValueClass(numValueCls) && isNumericSubType(tree.tpe, pt) => // (10) (11) - warnNumericWiden(numValueCls) ; tpdPos(Select(tree, s"to${numValueCls.name}")) + warnNumericWiden(tree.tpe.widen.typeSymbol, numValueCls) ; tpdPos(Select(tree, s"to${numValueCls.name}")) case dealiased if dealiased.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt) => // (13) tpd(adaptAnnotations(tree, this, mode, pt)) case _ => @@ -1139,7 +1138,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else EmptyTree if (coercion ne EmptyTree) { def msg = s"inferred view from ${tree.tpe} to $pt via $coercion: ${coercion.tpe}" - if (settings.logImplicitConv) context.echo(tree.pos, msg) + if (settings.logImplicitConv.value) context.echo(tree.pos, msg) else debuglog(msg) val viewApplied = new ApplyImplicitView(coercion, List(tree)) setPos tree.pos @@ -1164,12 +1163,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty) def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0) - acceptsApplyDynamic(tree.tpe) || ( - if (mode.inTappMode) - tree.tpe.typeParams.isEmpty && hasPolymorphicApply - else - hasMonomorphicApply - ) + def badDynamicApply() = { + tree match { + case Apply(fun, _) => DynamicRewriteError(tree, ApplyWithoutArgsError(tree, fun)) + case _ => () + } + false + } + if (acceptsApplyDynamic(tree.tpe)) + !isDynamicRewrite(tree) || badDynamicApply() + else if (mode.inTappMode) + tree.tpe.typeParams.isEmpty && hasPolymorphicApply + else + hasMonomorphicApply } def shouldInsertApply(tree: Tree) = mode.typingExprFun && { tree.tpe match { @@ -1177,24 +1183,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => applyPossible } } - if (tree.isType) + if (tree.isType) // (6) adaptType() - else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree)) + else if (mode.typingExprNotFun && + treeInfo.isMacroApplication(tree) && + !isMacroExpansionSuppressed(tree)) macroExpand(this, tree, mode, pt) else if (mode.typingConstructorPattern) typedConstructorPattern(tree, pt) - else if (shouldInsertApply(tree)) + else if (shouldInsertApply(tree)) // (8) insertApply() - else if (hasUndetsInMonoMode) { // (9) + else if (hasUndetsInMonoMode) // (9) // This used to have // assert(!context.inTypeConstructorAllowed, context) // but that's not guaranteed to be true in the face of erroneous code; errors in typedApply might mean we // never get around to inferring them, and they leak out and wind up here. instantiatePossiblyExpectingUnit(tree, mode, pt) - } - else if (tree.tpe <:< pt) + else if (isScalaRepeatedParamType(tree.tpe) && !isScalaRepeatedParamType(pt)) + // TODO: we really shouldn't use T* as a first class types (e.g. for repeated case fields), + // but we can't allow T* to conform to other types (see isCompatible) because that breaks overload resolution + adapt(tree.setType(repeatedToSeq(tree.tpe)), mode, pt, original = EmptyTree) + else if (tree.tpe <:< pt) { + val sym = tree.symbol + if (sym != null && !isPastTyper && currentRun.isScala3 && isFunctionType(pt) && sym.isModule && sym.isSynthetic && sym.companion.isCase) { + val msg = s"Synthetic case companion used as a function. In Scala 3 (or with -Xsource-features:case-companion-function), case companions no longer extend FunctionN. Use ${sym.name}.apply instead." + val action = runReporting.codeAction("add `.apply`", tree.pos.focusEnd, ".apply", msg) + context.warning(tree.pos, msg, Scala3Migration, action) + } tree - else if (mode.inPatternMode && { inferModulePattern(tree, pt); isPopulated(tree.tpe, approximateAbstracts(pt)) }) + } else if (mode.inPatternMode && { inferModulePattern(tree, pt); isPopulated(tree.tpe, approximateAbstracts(pt)) }) tree else { val constFolded = constfold(tree, pt, context.owner) @@ -1208,15 +1225,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original) else tree } else tree.tpe match { - case atp @ AnnotatedType(_, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1) + case AnnotatedType(_, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1) adaptAnnotations(tree, this, mode, pt) - case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0) + case ct @ FoldableConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0) adaptConstant(value) - case OverloadedType(pre, alts) if !mode.inFunMode => // (1) - inferExprAlternative(tree, pt) + case OverloadedType(_, _) if !mode.inFunMode => // (1) + inferExprAlternative(tree, pt): Unit adaptAfterOverloadResolution(tree, mode, pt, original) case NullaryMethodType(restpe) => // (2) - adapt(tree setType restpe, mode, pt, original) + if (hasUndets && settings.lintUniversalMethods && (isCastSymbol(tree.symbol) || isTypeTestSymbol(tree.symbol)) && context.undetparams.exists(_.owner == tree.symbol)) + context.warning(tree.pos, s"missing type argument to ${tree.symbol}", WarningCategory.LintUniversalMethods) + val resTpDeconst = // keep constant types when they are safe to fold. erasure eliminates constant types modulo some exceptions, so keep those. + if (isBeforeErasure && tree.symbol.isAccessor && tree.symbol.hasFlag(STABLE) && treeInfo.isExprSafeToInline(tree)) restpe + else restpe.deconst + adapt(tree setType resTpDeconst, mode, pt, original) case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2) adapt(tree setType arg, mode, pt, original) case tp if mode.typingExprNotLhs && isExistentialType(tp) && !isSyntheticAccessor(context.owner) => @@ -1230,18 +1252,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // -- are we sure we want to expand aliases this early? // -- what caused this change in behaviour?? val tparams1 = cloneSymbols(tparams) - val tree1 = ( + val tree1 = if (tree.isType) tree - else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos - ) + else TypeApply(tree, tparams1.map(tparam => TypeTree(tparam.tpeHK).setPos(tree.pos.focus))).setPos(tree.pos) + context.undetparams ++= tparams1 notifyUndetparamsAdded(tparams1) adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original) case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1) adaptToImplicitMethod(mt) - case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) => - instantiateToMethodType(mt) + case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) => // (4.2) - (4.3) + adaptMethodTypeToExpr(mt) case _ => vanillaAdapt(tree) } @@ -1254,7 +1276,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = { - inferExprInstance(tree, context.extractUndetparams(), pt) + inferExprInstance(tree, context.extractUndetparams(), pt, useWeaklyCompatible = true) adapt(tree, mode, pt) } /** If the expected type is Unit: try instantiating type arguments @@ -1285,7 +1307,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper && ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue) && !qtpe.isError && !qtpe.typeSymbol.isBottomClass - && qtpe != WildcardType + && !qtpe.isWildcard && !qual.isInstanceOf[ApplyImplicitView] // don't chain views && (context.implicitsEnabled || context.enrichmentEnabled) // Elaborating `context.implicitsEnabled`: @@ -1305,13 +1327,34 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => } inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match { - case EmptyTree => qual - case coercion => - if (settings.logImplicitConv) - context.echo(qual.pos, - "applied implicit conversion from %s to %s = %s".format( - qual.tpe, searchTemplate, coercion.symbol.defString)) - + case EmptyTree => qual + case coercion => + if (settings.logImplicitConv.value) + context.echo(qual.pos, s"applied implicit conversion from ${qual.tpe} to ${searchTemplate} = ${coercion.symbol.defString}") + if (currentRun.isScala3 && coercion.symbol == currentRun.runDefinitions.Predef_any2stringaddMethod) + if (!currentRun.sourceFeatures.any2StringAdd) + runReporting.warning(qual.pos, s"Converting to String for concatenation is not supported in Scala 3 (or with -Xsource-features:any2stringadd).", Scala3Migration, coercion.symbol) + if (settings.lintUniversalMethods) { + def targetsUniversalMember(target: => Type): Option[Symbol] = searchTemplate match { + case HasMethodMatching(name, argtpes, restpe) => + target.member(name) + .alternatives + .find { m => + def argsOK = m.paramLists match { + case h :: _ => argtpes.corresponds(h.map(_.info))(_ <:< _) + case nil => argtpes.isEmpty + } + isUniversalMember(m) && argsOK + } + case RefinedType(WildcardType :: Nil, decls) => + decls.find(d => d.isMethod && d.info == WildcardType && isUniversalMember(target.member(d.name))) + case _ => + None + } + for (target <- targetsUniversalMember(coercion.symbol.info.finalResultType)) + context.warning(qual.pos, s"conversion ${coercion.symbol.nameString} adds universal member $target to ${qual.tpe.typeSymbol}", + WarningCategory.LintUniversalMethods) + } typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual)))) } } @@ -1329,9 +1372,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = { def doAdapt(restpe: Type) = //util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ") - adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors) + adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), normalizeProtoForView(restpe)), reportAmbiguous, saveErrors) - if (pt == WildcardType) + if (pt.isWildcard) doAdapt(pt) else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ => logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType)) @@ -1345,12 +1388,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = { def onError(reportError: => Tree): Tree = context.tree match { case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => - ( silent (_.typedArgs(args.map(_.duplicate), mode)) - filter (xs => !(xs exists (_.isErrorTyped))) - map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors)) - orElse ( _ => reportError) - ) - case _ => + silent(_.typedArgs(args.map(_.duplicate), mode)) + .filter(!_.exists(_.isErrorTyped)) + .fold(reportError)(adaptToArguments(qual, name, _, WildcardType, reportAmbiguous, saveErrors)) + case _ => reportError } @@ -1430,7 +1471,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (stat.symbol.isAuxiliaryConstructor) notAllowed("secondary constructor") else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic) - notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.") + notAllowed(s"redefinition of $name method. See SIP-15, criterion 5.") else if (stat.symbol != null && stat.symbol.isParamAccessor) notAllowed("additional parameter") checkEphemeralDeep.traverse(rhs) @@ -1453,7 +1494,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (if (clazz.owner.isTerm) "local class" else "member of another class")) if (!clazz.isPrimitiveValueClass) { clazz.primaryConstructor.paramss match { - case List(List(param)) => + case List(List(_)) => val decls = clazz.info.decls val paramAccessor = clazz.constrParamAccessors.head if (paramAccessor.isMutable) @@ -1468,7 +1509,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper /* check all base classes, since derived value classes might lurk in refinement parents */ if (acc.tpe.typeSymbol.baseClasses exists (_.isDerivedValueClass)) context.error(acc.pos, "value class may not wrap another user-defined value class") - checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor)) + def referencesUnderlying(sym: Symbol) = sym != null && sym.accessedOrSelf == paramAccessor + checkEphemeral(clazz, body.filterNot(stat => referencesUnderlying(stat.symbol))) } case _ => context.error(clazz.pos, "value class needs to have exactly one val parameter") @@ -1480,6 +1522,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper context.error(tparam.pos, "type parameter of value class may not be specialized") } + private def warnMultiargInfix(tree: Tree): Unit = + context.warning(tree.pos, "multiarg infix syntax looks like a tuple", WarningCategory.Other) + /** Typechecks a parent type reference. * * This typecheck is harder than it might look, because it should honor early @@ -1538,17 +1583,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * */ private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = { - val app = treeInfo.dissectApplied(encodedtpt) - val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee)) + val app @ treeInfo.Applied(core, _, argss) = treeInfo.dissectApplied(encodedtpt) + val decodedtpt = app.callee val argssAreTrivial = argss == Nil || argss == ListOfNil // we cannot avoid cyclic references with `initialize` here, because when type macros arrive, // we'll have to check the probe for isTypeMacro anyways. // therefore I think it's reasonable to trade a more specific "inherits itself" error // for a generic, yet understandable "cyclic reference" error - var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol - if (probe == null) probe = NoSymbol - probe.initialize + val probe = { + val p = typedTypeConstructor(core.duplicate).tpe.typeSymbol + if (p == null) NoSymbol + else p.initialize + } def cookIfNeeded(tpt: Tree) = if (context.unit.isJava) tpt modifyType rawToExistential else tpt cookIfNeeded(if (probe.isTrait || inMixinPosition) { @@ -1566,7 +1613,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper typedPrimaryConstrBody(templ) { val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK))) val supercall = New(supertpe, mmap(argss)(_.duplicate)) - val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall + val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall: @unchecked ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck atPos(supertpt.pos.focus)(supercall) } match { @@ -1627,7 +1674,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } val superCall1 = (superCall match { case global.pendingSuperCall => actualSuperCall - case EmptyTree => EmptyTree + case EmptyTree => EmptyTree + case x => throw new MatchError(x) }) orElse cunit val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1) val clazz = context.owner @@ -1654,7 +1702,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper foreach2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe) if (superCall1 == cunit) EmptyTree - else cbody2 match { + else cbody2 match { // ??? case Block(_, expr) => expr case tree => tree } @@ -1668,7 +1716,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def normalizeFirstParent(parents: List[Tree]): List[Tree] = { @annotation.tailrec def explode0(parents: List[Tree]): List[Tree] = { - val supertpt :: rest = parents // parents is always non-empty here - it only grows + val supertpt :: rest = parents: @unchecked // parents is always non-empty here - it only grows if (supertpt.tpe.typeSymbol == AnyClass) { supertpt setType AnyRefTpe parents @@ -1700,7 +1748,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case x :: xs => val sym = x.symbol x :: fixDuplicateSyntheticParents( - if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym) + if (isPossibleSyntheticParent(sym)) xs.filter(_.symbol != sym) else xs ) } @@ -1722,7 +1770,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (treeInfo.hasUntypedPreSuperFields(templ.body)) typedPrimaryConstrBody(templ)(EmptyTree) - supertpts mapConserve (tpt => checkNoEscaping.privates(this, context.owner, tpt)) + supertpts mapConserve (tpt => checkNoEscapingPrivates(this, context.owner, tpt)) } catch { case ex: TypeError if !global.propagateCyclicReferences => @@ -1751,13 +1799,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper *
  • no two parents define same symbol.
  • * */ - def validateParentClasses(parents: List[Tree], selfType: Type) { + def validateParentClasses(parents: List[Tree], selfType: Type, clazzIsTrait: Boolean): Unit = { val pending = ListBuffer[AbsTypeError]() def validateDynamicParent(parent: Symbol, parentPos: Position) = if (parent == DynamicClass) checkFeature(parentPos, currentRun.runDefinitions.DynamicsFeature) def validateParentClass(parent: Tree, superclazz: Symbol) = - if (!parent.isErrorTyped) { + if (!parent.isErrorTyped) { // redundant val psym = parent.tpe.typeSymbol.initialize if (!context.unit.isJava) @@ -1770,6 +1818,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val ps = psym.info.parents if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) + if (!clazzIsTrait) { + def hasTraitParams(sym: Symbol) = + sym.isScala3Defined && sym.isTrait && sym.hasAttachment[DottyParameterisedTrait] + // TODO perhaps there can be a flag to skip this when we know there can be no Scala 3 definitions + // or otherwise use an optimised representation for trait parameters + (parent.tpe :: ps).collectFirst { + case p if hasTraitParams(p.typeSymbol) => + p.typeSymbol.attachments.get[DottyParameterisedTrait].foreach( attach => + pending += ParentIsScala3TraitError(parent, p.typeSymbol, attach.params, psym) + ) + } + } } else { pending += ParentNotATraitMixinError(parent, psym) } @@ -1781,7 +1841,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val sameSourceFile = context.unit.source.file == psym.sourceFile if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && - !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { + !sameSourceFile && !context.owner.ownerChain.exists(_.isDeprecated)) { val version = psym.deprecatedInheritanceVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" val message = psym.deprecatedInheritanceMessage.map(msg => s": $msg").getOrElse("") @@ -1799,7 +1859,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper !(selfType <:< parentTypeOfThis) ) { pending += ParentSelfTypeConformanceError(parent, selfType) - if (settings.explaintypes) explainTypes(selfType, parentTypeOfThis) + if (settings.explaintypes.value) explainTypes(selfType, parentTypeOfThis) } if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError)) @@ -1810,13 +1870,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) { val superclazz = parents.head.tpe.typeSymbol - for (p <- parents) validateParentClass(p, superclazz) + parents.foreach(validateParentClass(_, superclazz)) } pending.foreach(ErrorUtils.issueTypeError) } - def checkFinitary(classinfo: ClassInfoType) { + def checkFinitary(classinfo: ClassInfoType): Unit = { val clazz = classinfo.typeSymbol for (tparam <- clazz.typeParams) { @@ -1847,33 +1907,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val typedMods = typedModifiers(cdef.mods) assert(clazz != NoSymbol, cdef) reenterTypeParams(cdef.tparams) - val tparams1 = cdef.tparams mapConserve (typedTypeDef) + val tparams1 = cdef.tparams.mapConserve(typedTypeDef) val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) val impl2 = finishMethodSynthesis(impl1, clazz, context) if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) checkEphemeral(clazz, impl2.body) - if (!clazz.isJavaDefined && (clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { - if (!clazz.owner.isPackageClass) - context.error(clazz.pos, "inner classes cannot be classfile annotations") - // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. - // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement - // of constant argument values "for free". Related to scala/bug#7041. - else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, - """|subclassing ClassfileAnnotation does not - |make your annotation visible at runtime. If that is what - |you want, you must write the annotation class in Java.""".stripMargin, WarningCategory.Other, clazz) - } - warnTypeParameterShadow(tparams1, clazz) - if (!isPastTyper) { - for (ann <- clazz.getAnnotation(DeprecatedAttr)) { - val m = companionSymbolOf(clazz, context) - if (m != NoSymbol) - m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) - } - } treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) .setType(NoType) } finally { @@ -1912,7 +1953,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (currentRun.isScala211 && mdef.symbol == PredefModule) + if (mdef.symbol == PredefModule) ensurePredefParentsAreInSameSourceFile(impl2) treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType @@ -1922,10 +1963,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } private def ensurePredefParentsAreInSameSourceFile(template: Template) = { - val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass) val PredefModuleFile = PredefModule.associatedFile - if (parentSyms exists (_.associatedFile != PredefModuleFile)) - context.error(template.pos, s"All parents of Predef must be defined in ${PredefModuleFile}.") + if (template.parents.exists(p => p.symbol != AnyRefClass && p.symbol.associatedFile != PredefModuleFile)) + context.error(template.pos, s"All parents of Predef must be defined in $PredefModuleFile.") } /** In order to override this in the TreeCheckers Typer so synthetics aren't re-added * all the time, it is exposed here the module/class typing methods go through it. @@ -1953,8 +1993,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (txt eq context) namer enterSym tree else newNamer(txt) enterSym tree - /** Check that inner classes do not inherit from Annotation - */ def typedTemplate(templ0: Template, parents1: List[Tree]): Template = { val templ = templ0 // please FIXME: uncommenting this line breaks everything @@ -1980,12 +2018,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper updatePolyClassInfo(clazz, ClassInfoType(parentTypes, clazz.info.decls, clazz)) } - clazz.annotations.map(_.completeInfo()) + clazz.annotations.foreach(_.completeInfo()) if (templ.symbol == NoSymbol) templ setSymbol clazz.newLocalDummy(templ.pos) val self1 = (templ.self: @unchecked) match { case vd @ ValDef(_, _, tpt, EmptyTree) => - val tpt1 = checkNoEscaping.privates( + val tpt1 = checkNoEscapingPrivates( this, clazz.thisSym, treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe @@ -1999,26 +2037,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (self1.name != nme.WILDCARD) context.scope enter self1.symbol - val selfType = ( + val selfType = if (clazz.isAnonymousClass && !phase.erasedTypes) intersectionType(clazz.info.parents, clazz.owner) else clazz.typeOfThis - ) + // the following is necessary for templates generated later assert(clazz.info.decls != EmptyScope, clazz) - val body1 = pluginsEnterStats(this, templ.body) + val body1 = pluginsEnterStats(this, namer.expandMacroAnnotations(templ.body)) enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1) if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore - validateParentClasses(parents1, selfType) + validateParentClasses(parents1, selfType, clazz.isTrait) if (clazz.isCase) validateNoCaseAncestor(clazz) if (clazz.isTrait && hasSuperArgs(parents1.head)) ConstrArgsInParentOfTraitError(parents1.head, clazz) - if (!clazz.isJavaDefined && (clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) - context.error(clazz.pos, "inner classes cannot be classfile annotations") - if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType]) @@ -2029,7 +2064,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val argss = superArgs(parents1.head) getOrElse Nil val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent val superCall = atPos(pos)(PrimarySuperCall(argss)) - deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos + deriveDefDef(primaryCtor)(_ => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos case _ => primaryCtor } body1 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat } @@ -2040,10 +2075,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.info.firstParent.typeSymbol == AnyValClass) validateDerivedValueClass(clazz, body3) + if (!clazz.isTrait && clazz.isNonBottomSubClass(ConstantAnnotationClass)) { + val ctors = body3.iterator.collect { case method: DefDef if method.symbol.isConstructor => method } + val primary = ctors.next() // there is always a primary constructor + if (primary.symbol.paramss.lengthIs != 1) ConstantAnnotationNeedsSingleArgumentList(primary, clazz) + ctors.foreach(AuxConstrInConstantAnnotation(_, clazz)) + } + if (clazz.isTrait) { - for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) { - context.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.", WarningCategory.Other) - } + for (decl <- clazz.info.decls) + if (decl.isTerm && decl.isEarlyInitialized) + context.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.", WarningCategory.Other) } treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_* @@ -2090,8 +2132,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper typedModifiers(vdef.mods.copy(flags = sym.flags, privateWithin = tpnme.EMPTY)) } else typedModifiers(vdef.mods) - sym.annotations.map(_.completeInfo()) - val tpt1 = checkNoEscaping.privates(this, sym, transformedOr(vdef.tpt, typedType(vdef.tpt))) + sym.annotations.foreach(_.completeInfo()) + sym.filterAnnotations(_ != UnmappableAnnotation) + + val tpt1 = checkNoEscapingPrivates(this, sym, transformedOr(vdef.tpt, typedType(vdef.tpt))) checkNonCyclic(vdef, tpt1) // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to @@ -2109,7 +2153,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // When typechecking default parameter, replace all type parameters in the expected type by Wildcard. // This allows defining "def foo[T](a: T = 1)" val tparams = sym.owner.skipConstructor.info.typeParams - val subst = new SubstTypeMap(tparams, tparams map (_ => WildcardType)) { + val subst = new SubstTypeMap(tparams, WildcardType.fillList(tparams.length)) { + @tailrec override def matches(sym: Symbol, sym1: Symbol) = if (sym.isSkolem) matches(sym.deSkolemize, sym1) else if (sym1.isSkolem) matches(sym, sym1.deSkolemize) @@ -2118,24 +2163,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // allow defaults on by-name parameters if (sym hasFlag BYNAMEPARAM) if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Function0 - else subst(tpt1.tpe.typeArgs(0)) + else subst(tpt1.tpe.typeArgs.head) else subst(tpt1.tpe) } else tpt1.tpe transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2) } - treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(context, rhs1)) setType NoType + if (!isPastTyper && sym.hasDefault && sym.owner.isConstructor && sym.enclClass.isNonBottomSubClass(AnnotationClass)) + sym.addAnnotation(AnnotationInfo(DefaultArgAttr.tpe, List(duplicateAndResetPos.transform(rhs1)), Nil)) + val vdef1 = treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(context, rhs1)) setType NoType + if (sym.isSynthetic && sym.name.startsWith(nme.RIGHT_ASSOC_OP_PREFIX)) + rightAssocValDefs += ((sym, vdef1.rhs)) + if (vdef.hasAttachment[PatVarDefAttachment.type]) { + sym.updateAttachment(PatVarDefAttachment) + if (sym.isSynthetic && sym.owner.isClass && (tpt1.tpe eq UnitTpe)) + if (sym.isPrivateThis && vdef.mods.isPrivateLocal && !sym.enclClassChain.exists(_.isInterpreterWrapper)) + context.warning(vdef.pos, + s"Pattern definition introduces Unit-valued member of ${sym.owner.name}; consider wrapping it in `locally { ... }`.", + WarningCategory.OtherMatchAnalysis) + } + vdef1 } /** Analyze the super constructor call to record information used later to compute parameter aliases */ - def analyzeSuperConsructor(meth: Symbol, vparamss: List[List[ValDef]], rhs: Tree): Unit = { + def analyzeSuperConstructor(meth: Symbol, vparamss: List[List[ValDef]], rhs: Tree): Unit = if (!rhs.isErrorTyped) { val clazz = meth.owner debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs") val pending = ListBuffer[AbsTypeError]() // !!! This method is redundant with other, less buggy ones. def decompose(call: Tree): (Tree, List[Tree]) = call match { - case _ if call.isErrorTyped => // e.g. scala/bug#7636 - (call, Nil) case Apply(fn, args) => // an object cannot be allowed to pass a reference to itself to a superconstructor // because of initialization issues; scala/bug#473, scala/bug#3913, scala/bug#6928. @@ -2143,7 +2199,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree.symbol.isModule) pending += SuperConstrReferenceError(tree) tree match { - case This(qual) => + case This(_) => pending += SuperConstrArgsThisReferenceError(tree) case _ => () } @@ -2151,7 +2207,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val (superConstr, preArgs) = decompose(fn) val params = fn.tpe.params // appending a dummy tree to represent Nil for an empty varargs (is this really necessary?) - val applyArgs = if (args.length < params.length) args :+ EmptyTree else args take params.length + val applyArgs = if (args.sizeCompare(params) < 0) args :+ EmptyTree else args take params.length assert(sameLength(applyArgs, params) || call.isErrorTyped, s"arity mismatch but call is not error typed: $clazz (params=$params, args=$applyArgs)") @@ -2170,7 +2226,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!superClazz.isJavaDefined) { val superParamAccessors = superClazz.constrParamAccessors if (sameLength(superParamAccessors, superArgs)) { - val accToSuperAcc = mutable.AnyRefMap[Symbol, Symbol]() + val accToSuperAcc = mutable.HashMap[Symbol, Symbol]() for ((superAcc, superArg@Ident(name)) <- superParamAccessors zip superArgs) { if (mexists(vparamss)(_.symbol == superArg.symbol)) { val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match { @@ -2195,13 +2251,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // Check for scala/bug#4842. - private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) { + private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol): Unit = { val pending = ListBuffer[AbsTypeError]() ddef.rhs match { case Block(stats, expr) => val selfConstructorCall = stats.headOption.getOrElse(expr) foreachSubTreeBoundTo(List(selfConstructorCall), clazz) { - case tree @ This(qual) => + case tree @ This(_) => pending += SelfConstrArgsThisReferenceError(tree) case _ => () } @@ -2226,7 +2282,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper f(subTree) } - /** Check if a structurally defined method violates implementation restrictions. + /** Check if a structurally defined method violates implementation restrictions. * A method cannot be called if it is a non-private member of a refinement type * and if its parameter's types are any of: * - the self-type of the refinement @@ -2237,14 +2293,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper */ def checkMethodStructuralCompatible(ddef: DefDef): Unit = { val meth = ddef.symbol - def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match { + def parentString = meth.owner.parentSymbols.filter(_ != ObjectClass) match { case Nil => "" case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")") } - def fail(pos: Position, msg: String): Boolean = { - context.error(pos, msg) - false - } /* Have to examine all parameters in all lists. */ def paramssTypes(tp: Type): List[List[Type]] = tp match { @@ -2256,8 +2308,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def nthParamPos(n1: Int, n2: Int) = try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos } - def failStruct(pos: Position, what: String, where: String = "Parameter type") = - fail(pos, s"$where in structural refinement may not refer to $what") + def failStruct(pos: Position, member: String, referTo: String): Unit = + context.error(pos, s"$member in structural refinement may not refer to $referTo") + def failStructAbstractType(pos: Position, member: String): false = { + failStruct(pos, member, referTo="an abstract type defined outside that refinement") + false + } + def failStructTypeMember(pos: Position, member: String): false = { + failStruct(pos, member, referTo="a type member of that refinement") + false + } foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) => foreachWithIndex(paramList) { (paramType, paramIdx) => @@ -2272,8 +2332,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def checkAbstract(tp0: Type, what: String): Boolean = { def check(sym: Symbol): Boolean = !sym.isAbstractType || { log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""") - ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what)) - || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what)) + ( (!sym.hasTransOwner(meth.owner) && failStructAbstractType(paramPos, what)) + || (!sym.hasTransOwner(meth) && failStructTypeMember(paramPos, what)) || checkAbstract(sym.info.upperBound, "Type bound") ) } @@ -2282,13 +2342,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper checkAbstract(paramType, "Parameter type") if (sym.isDerivedValueClass) - failStruct(paramPos, "a user-defined value class") + failStruct(paramPos, member="Parameter type", referTo="a user-defined value class") if (paramType.isInstanceOf[ThisType] && sym == meth.owner) - failStruct(paramPos, "the type of that refinement (self type)") + failStruct(paramPos, member="Parameter type", referTo="the type of that refinement (self type)") } } if (resultType.typeSymbol.isDerivedValueClass) - failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type") + failStruct(ddef.tpt.pos, member="Result type", referTo="a user-defined value class") } def typedDefDef(ddef: DefDef): DefDef = { @@ -2303,23 +2363,62 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!isPastTyper && meth.isPrimaryConstructor) { for (vparams <- ddef.vparamss; vd <- vparams) { if (vd.mods.isParamAccessor) { - vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + val sym = vd.symbol + sym.setAnnotations(sym.annotations.filter(AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true))) } } } - val tparams1 = ddef.tparams mapConserve typedTypeDef - val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) + val tparams1 = ddef.tparams.mapConserve(typedTypeDef) + val vparamss1 = ddef.vparamss.mapConserve(_.mapConserve(typedValDef)) warnTypeParameterShadow(tparams1, meth) - meth.annotations.map(_.completeInfo()) + meth.annotations.foreach(_.completeInfo()) + // we only have to move annotations around for accessors -- see annotSig as used by AccessorTypeCompleter and ValTypeCompleter + if (meth.isAccessor) meth.filterAnnotations(_ != UnmappableAnnotation) + + if (meth.isPrimaryConstructor && !isPastTyper) { + // add `@superArg` / `@superFwdArg` to subclasses of concrete annotations, e.g., + // `@superArg("value", "cat=deprecation")` for `class nodep extends nowarn("cat=deprecation")` + // this is done by duplicating the untyped super arguments before type checking the super call, because the + // super call can be transformed by named/default arguments. to build the `@superArg` annotations, the super + // call is type checked using `typedAnnotation`, which uses Mode.ANNOTmode. + def superArgs(t: Tree): List[Tree] = t match { + case treeInfo.Application(fn, _, List(args)) => args.map(_.duplicate) + case Block(_ :+ superCall, _) => superArgs(superCall) + case _ => Nil + } + val cls = meth.enclClass + val supCls = cls.superClass + if (!supCls.isAbstract && supCls.isNonBottomSubClass(AnnotationClass)) { + val superAnnotArgs = superArgs(ddef.rhs) + if (superAnnotArgs.nonEmpty && supCls.primaryConstructor.paramss.size == 1) + silent(_.typedAnnotation(New(cls.info.parents.head, superAnnotArgs: _*), None)).map(i => { + if (supCls.isNonBottomSubClass(ConstantAnnotationClass)) { + i.assocs.foreach { + case (p, LiteralAnnotArg(arg)) => + cls.addAnnotation(AnnotationInfo(SuperArgAttr.tpe, List(CODE.LIT.typed(p.toString), CODE.LIT.typed(arg.value)), Nil)) + case _ => + } + } else { + val ps = vparamss1.headOption.getOrElse(Nil).map(_.symbol).toSet + i.symbol.primaryConstructor.paramss.headOption.getOrElse(Nil).zip(i.args).foreach { + case (p, arg) if ps(arg.symbol) => + cls.addAnnotation(AnnotationInfo(SuperFwdArgAttr.tpe, List(CODE.LIT.typed(p.name.toString), CODE.LIT.typed(arg.symbol.name.toString)), Nil)) + case (p, arg) => + cls.addAnnotation(AnnotationInfo(SuperArgAttr.tpe, List(CODE.LIT.typed(p.name.toString), arg), Nil)) + } + } + }) + } + } for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) if (isRepeatedParamType(vparam1.symbol.tpe)) StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, transformedOr(ddef.tpt, typedType(ddef.tpt))) + val tpt1 = checkNoEscapingPrivates(this, meth, transformedOr(ddef.tpt, typedType(ddef.tpt))) checkNonCyclic(ddef, tpt1) ddef.tpt.setType(tpt1.tpe) val typedMods = typedModifiers(ddef.mods) @@ -2346,10 +2445,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { // There are no supercalls for AnyVal or constructors from Java sources, which - // would blow up in analyzeSuperConsructor; there's nothing to be computed for them - // anyway. + // would blow up in analyzeSuperConstructor; there's nothing to be computed for them anyway. if (meth.isPrimaryConstructor) - analyzeSuperConsructor(meth, vparamss1, rhs1) + analyzeSuperConstructor(meth, vparamss1, rhs1) else checkSelfConstructorArgs(ddef, meth.owner) } @@ -2357,28 +2455,40 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) rhs1 = checkDead(context, rhs1) - if (!isPastTyper && meth.owner.isClass && - meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) - StarWithDefaultError(meth) - if (!isPastTyper) { - for (pp <- meth.paramss; p <- pp) { - if (p.isImplicit && p.isByNameParam) ImplicitByNameError(p) - for (n <- p.deprecatedParamName) { - if (mexists(meth.paramss)(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.contains(n)))) - DeprecatedParamNameError(p, n) + if (meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) + StarWithDefaultError(meth) + + for (pp <- meth.paramss; p <- pp) + p.deprecatedParamName match { + case Some(nme.NO_NAME) | None => + case Some(alt) => + if (mexists(meth.paramss)(p1 => p != p1 && (p1.name == alt || p1.deprecatedParamName.contains(alt)))) + DeprecatedParamNameError(p, alt) } - } + + if (settings.multiargInfix && !meth.isConstructor && meth.owner.isClass && !meth.isDeprecated && !meth.hasAnnotation(UnusedClass) && !meth.ownerChain.exists(_.isDeprecated) && !meth.isSynthetic) + meth.paramss match { + case (h :: _ :: _) :: Nil if !h.isImplicit && Chars.isOperatorPart(meth.name.decoded.head) => + warnMultiargInfix(ddef) + case _ => + } + if (meth.isStructuralRefinementMember) checkMethodStructuralCompatible(ddef) - if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { - case List(param) :: _ if !param.isImplicit => - checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) - case _ => + if (meth.isImplicit) { + if (!meth.isSynthetic) meth.paramss match { + case List(param) :: _ if !param.isImplicit => + checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) + case _ => + } + if (meth.isGetter && !meth.isLocalToBlock && meth.accessed.hasAttachment[FieldTypeInferred.type]) { + meth.accessed.removeAttachment[FieldTypeInferred.type] + InferredImplicitError(ddef, meth.accessed.tpe.resultType, context) + } } } - treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType } finally { currentRun.profiler.afterTypedImplDef(meth) @@ -2396,36 +2506,33 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper reenterTypeParams(tdef.tparams) val tparams1 = tdef.tparams mapConserve typedTypeDef val typedMods = typedModifiers(tdef.mods) - tdef.symbol.annotations.map(_.completeInfo()) + tdef.symbol.annotations.foreach(_.completeInfo()) warnTypeParameterShadow(tparams1, tdef.symbol) // @specialized should not be pickled when compiling with -no-specialize - if (settings.nospecialization && currentRun.compiles(tdef.symbol)) { + if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) { tdef.symbol.removeAnnotation(definitions.SpecializedClass) tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass) } - val rhs1 = checkNoEscaping.privates(this, tdef.symbol, typedType(tdef.rhs)) + val rhs1 = checkNoEscapingPrivates(this, tdef.symbol, typedType(tdef.rhs)) checkNonCyclic(tdef.symbol) if (tdef.symbol.owner.isType) rhs1.tpe match { - case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1) - case _ => () + case TypeBounds(lo1, hi1) if !(lo1 <:< hi1) => LowerBoundError(tdef, lo1, hi1) + case _ => () } - if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded) - checkFeature(tdef.pos, currentRun.runDefinitions.HigherKindsFeature) - treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType } - private def enterLabelDef(stat: Tree) { + private def enterLabelDef(stat: Tree): Unit = { stat match { case ldef @ LabelDef(_, _, _) => if (ldef.symbol == NoSymbol) ldef.symbol = namer.enterInScope( - context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe)) + context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(Nil, UnitTpe)) case _ => } } @@ -2447,8 +2554,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else { context.scope.unlink(ldef.symbol) val sym2 = namer.enterInScope( - context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe)) - val LabelDef(_, _, rhs1) = resetAttrs(ldef) + context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(Nil, restpe)) + val LabelDef(_, _, rhs1) = resetAttrs(ldef): @unchecked val rhs2 = typed(brutallyResetAttrs(rhs1), restpe) ldef.params foreach (param => param setType param.symbol.tpe) deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe @@ -2460,7 +2567,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val syntheticPrivates = new ListBuffer[Symbol] try { namer.enterSyms(block0.stats) - val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr) + val block = treeCopy.Block(block0, pluginsEnterStats(this, namer.expandMacroAnnotations(block0.stats)), block0.expr) for (stat <- block.stats) enterLabelDef(stat) if (phaseId(currentPeriod) <= currentRun.typerPhase.id) { @@ -2485,11 +2592,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper block match { case Block(List(classDef @ ClassDef(_, _, _, _)), Apply(Select(New(_), _), _)) => val classDecls = classDef.symbol.info.decls - lazy val visibleMembers = pt match { - case WildcardType => classDecls.toList - case BoundedWildcardType(TypeBounds(lo, _)) => lo.members - case _ => pt.members - } + lazy val visibleMembers = + pt.members match { + case _: ErrorScope => classDecls.toList + case ms => ms + } + def matchesVisibleMember(member: Symbol) = visibleMembers exists { vis => (member.name == vis.name) && (member.tpe <:< vis.tpe.substThis(vis.owner, classDef.symbol)) @@ -2514,8 +2622,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } val statsTyped = typedStats(block.stats, context.owner) - val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt) - treeCopy.Block(block, statsTyped, expr1) + val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode | APPSELmode), pt) + + // Remove ValDef for right-associative by-value operator desugaring which has been inlined into expr1 + val statsTyped2 = statsTyped match { + case (vd: ValDef) :: Nil if inlinedRightAssocValDefs.remove(vd.symbol) => Nil + case _ => statsTyped + } + + treeCopy.Block(block, statsTyped2, expr1) .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst) } finally { // enable escaping privates checking from the outside and recycle @@ -2579,8 +2694,40 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector) val casesTyped = typedCases(cases, selectorTp, pt) - def finish(cases: List[CaseDef], matchType: Type) = - treeCopy.Match(tree, selector1, cases) setType matchType + def initChildren(sym: Symbol): Unit = + if (sym.isJava && sym.isSealed) + sym.attachments.get[PermittedSubclassSymbols] match { + case Some(PermittedSubclassSymbols(permits)) => + for (child <- permits if child.isJava) + initChildren(child.initialize) + case _ => + val seen = mutable.HashSet.empty[Symbol] + def populate(): Unit = + patmat.javaClassesByUnit.get(sym.pos.source) match { + case Some(classes) => + classes.find(!seen(_)) match { + case Some(unseen) => + seen += unseen + unseen.initialize.companionSymbol.moduleClass.initialize + if (unseen.hasAttachment[PermittedSubclassSymbols]) initChildren(unseen) + populate() + case _ => + } + case _ => + } + populate() + } + initChildren(selectorTp.typeSymbol) + + def finish(cases: List[CaseDef], matchType: Type) = { + if (!isPastTyper && settings.warnPatternShadow && !context.owner.isSynthetic) + for (cdef <- cases; bind @ Bind(name, _) <- cdef.pat if !bind.hasAttachment[NoWarnAttachment.type]) + context.lookupSymbol(name, _ => true) match { + case LookupSucceeded(_, sym) => bind.updateAttachment(PatShadowAttachment(sym)) + case _ => + } + treeCopy.Match(tree, selector1, cases).setType(matchType) + } if (isFullyDefined(pt)) finish(casesTyped, pt) @@ -2588,25 +2735,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed)) case packed => val lub = weakLub(packed) - finish(casesTyped map (adaptCase(_, mode, lub)), lub) + finish(casesTyped.map(adaptCase(_, mode, lub)), lub) } } - // match has been typed -- virtualize it during type checking so the full context is available - def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = { - import patmat.{ vpmName, PureMatchTranslator } - - // TODO: add fallback __match sentinel to predef - val matchStrategy: Tree = - if (!(settings.Yvirtpatmat && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen - else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null) - - if (matchStrategy ne null) // virtualize - typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy, match_.selector.pos.focusEnd)).translateMatch(match_), mode, pt) - else - match_ // will be translated in phase `patmat` - } - /** synthesize and type check a PartialFunction implementation based on the match in `tree` * * `param => sel match { cases }` becomes: @@ -2629,33 +2761,32 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later) * however, note that pattern matching codegen is designed to run *before* uncurry */ - def synthesizePartialFunction(paramName: TermName, paramPos: Position, paramSynthetic: Boolean, + def synthesizePartialFunction(paramName: TermName, paramPos: Position, paramType: Type, paramSynthetic: Boolean, tree: Tree, mode: Mode, pt: Type): Tree = { assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.") - val targs = pt.dealiasWiden.typeArgs - - // if targs.head isn't fully defined, we can't translate --> error - targs match { - case argTp :: _ if isFullyDefined(argTp) => // ok - case _ => // uh-oh - MissingParameterTypeAnonMatchError(tree, pt) - return setError(tree) - } + val (argTp0, resTp) = partialFunctionArgResTypeFromProto(pt) + // if argTp isn't fully defined, we can't translate --> error // NOTE: resTp still might not be fully defined - val argTp :: resTp :: Nil = targs + if (!isFullyDefined(argTp0)) { + MissingParameterTypeAnonMatchError(tree, pt) + return setError(tree) + } + val argTp = + if (paramType.ne(NoType)) paramType + else argTp0 // targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs) - val targsValidParams = targs forall (_ <:< AnyTpe) + val targsValidParams = (argTp <:< AnyTpe) && (resTp <:< AnyTpe) - val anonClass = context.owner newAnonymousFunctionClass tree.pos addAnnotation SerialVersionUIDAnnotation + val anonClass = context.owner.newAnonymousFunctionClass(tree.pos).addAnnotation(SerialVersionUIDAnnotation) import CODE._ - val Match(sel, cases) = tree + val Match(sel, cases) = tree: @unchecked // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up - val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef]) + val casesTrue = cases.map(deriveCaseDef(_)(x => atPos(x.pos.focus)(TRUE)).duplicate) // must generate a new tree every time def selector(paramSym: Symbol): Tree = gen.mkUnchecked( @@ -2688,17 +2819,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 = - // ${`$selector match { $cases; case default$ => default(x) }` + // ${`$selector match { $cases; case default$ => default(x) }`} def applyOrElseMethodDef = { val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE) // create the parameter that corresponds to the function's parameter - val A1 = methodSym newTypeParameter (newTypeName("A1")) setInfo TypeBounds.upper(argTp) + val A1 = methodSym.newTypeParameter(newTypeName("A1")).setInfo(TypeBounds.upper(argTp)) val x = mkParam(methodSym, A1.tpe) // applyOrElse's default parameter: - val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty - val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe) + val B1 = methodSym.newTypeParameter(newTypeName("B1")).setInfo(TypeBounds.empty) + val default = methodSym.newValueParameter(newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe) val paramSyms = List(x, default) methodSym setInfo genPolyType(List(A1, B1), MethodType(paramSyms, B1.tpe)) @@ -2708,8 +2839,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // First, type without the default case; only the cases provided // by the user are typed. The LUB of these becomes `B`, the lower - // bound of `B1`, which in turn is the result type of the default - // case + // bound of `B1`, which in turn is the result type of the default case val match0 = methodBodyTyper.typedMatch(selector(x), cases, mode, resTp) val matchResTp = match0.tpe @@ -2717,8 +2847,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // the default uses applyOrElse's first parameter since the scrut's type has been widened val match_ = { - val cdef = mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe)) - val List(defaultCase) = methodBodyTyper.typedCases(List(cdef), argTp, B1.tpe) + val cdef = mkDefaultCase(methodBodyTyper.typed1(REF(default).APPLY(REF(x)), mode, B1.tpe).setType(B1.tpe)) + val List(defaultCase) = methodBodyTyper.typedCases(List(cdef), argTp, B1.tpe): @unchecked treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase) } match_ setType B1.tpe @@ -2767,13 +2897,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def newParam(param: Symbol): ValDef = { val vd = ValDef(param, EmptyTree) - val tt @ TypeTree() = vd.tpt + val tt @ TypeTree() = vd.tpt: @unchecked tt setOriginal (originals(param) setPos param.pos.focus) vd } - val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe) - val defdef = newDefDef(methodSym, rhs)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe)) + val defdef = newDefDef(methodSym, match_)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe)) (defdef, matchResTp) } @@ -2790,7 +2919,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val defaultCase = mkDefaultCase(FALSE) val match_ = methodBodyTyper.typedMatch(selector(paramSym), casesTrue :+ defaultCase, mode, BooleanTpe) - DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe)) + DefDef(methodSym, match_) } // only used for @cps annotated partial functions @@ -2809,7 +2938,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val matchResTp = match_.tpe methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info - (DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, matchResTp)), matchResTp) + (DefDef(methodSym, match_), matchResTp) } def parents(resTp: Type) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, List(argTp, resTp))) @@ -2835,7 +2964,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val typedBlock = typedPos(tree.pos, mode, pt) { Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)( - Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List()) + Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), Nil) )) } @@ -2852,12 +2981,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * where `S` is the expected type that defines a single abstract method (call it `apply` for the example), * that has signature `(p1: T1', ..., pN: TN'): T'`, synthesize the instantiation of the following anonymous class * - * ``` + * {{{ * new S { * def apply\$body(p1: T1, ..., pN: TN): T = body * def apply(p1: T1', ..., pN: TN'): T' = apply\$body(p1,..., pN) * } - * ``` + * }}} * * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `pt`, * If `pt` is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters. @@ -2880,94 +3009,71 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * function type is a built-in FunctionN or some SAM type * */ - def inferSamType(fun: Tree, pt: Type, mode: Mode): Boolean = fun match { - case fun@Function(vparams, _) if !isFunctionType(pt) => - // TODO: can we ensure there's always a SAMFunction attachment, instead of looking up the sam again??? - // seems like overloading complicates things? - val sam = samOf(pt) - - if (!samMatchesFunctionBasedOnArity(sam, vparams)) false - else { - def fullyDefinedMeetsExpectedFunTp(pt: Type): Boolean = isFullyDefined(pt) && { - val samMethType = pt memberInfo sam - fun.tpe <:< functionType(samMethType.paramTypes, samMethType.resultType) - } - - val samTp = - if (!sam.exists) NoType - else if (fullyDefinedMeetsExpectedFunTp(pt)) pt - else try { - val samClassSym = pt.typeSymbol - - // we're trying to fully define the type arguments for this type constructor - val samTyCon = samClassSym.typeConstructor - - // the unknowns - val tparams = samClassSym.typeParams - // ... as typevars - val tvars = tparams map freshVar - - val ptVars = appliedType(samTyCon, tvars) - - // carry over info from pt - ptVars <:< pt - - val samInfoWithTVars = ptVars.memberInfo(sam) - - // use function type subtyping, not method type subtyping (the latter is invariant in argument types) - fun.tpe <:< functionType(samInfoWithTVars.paramTypes, samInfoWithTVars.finalResultType) - - // solve constraints tracked by tvars - val targs = solvedTypes(tvars, tparams, varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil)) - - debuglog(s"sam infer: $pt --> ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams") + def inferSamType(fun: Tree, pt: Type, mode: Mode): Boolean = + if (pt.isInstanceOf[OverloadedArgProto]) inferSamType(fun, pt.underlying, mode) // scala/bug#12560 + else fun match { + case fun@Function(vparams, _) if !isFunctionType(pt) => + // TODO: can we ensure there's always a SAMFunction attachment, instead of looking up the sam again??? + // seems like overloading complicates things? + val sam = samOfProto(pt) + + if (!samMatchesFunctionBasedOnArity(sam, vparams)) false + else { + def fullyDefinedMeetsExpectedFunTp(pt: Type): Boolean = isFullyDefined(pt) && { + val samMethType = pt memberInfo sam + fun.tpe <:< functionType(samMethType.paramTypes, samMethType.resultType) + } - val ptFullyDefined = appliedType(samTyCon, targs) - if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) { - debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}") - ptFullyDefined - } else { - debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)") - NoType + val samTp = + if (!sam.exists) NoType + else if (fullyDefinedMeetsExpectedFunTp(pt)) pt + else try { + val ptFullyDefined = instantiateSamFromFunction(fun.tpe, pt, sam) + if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) { + debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}") + ptFullyDefined + } else { + debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)") + NoType + } + } catch { + case e@(_: NoInstance | _: TypeError) => + debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\n$e") + NoType } - } catch { - case e@(_: NoInstance | _: TypeError) => - debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\n$e") - NoType - } - if (samTp eq NoType) false - else { - /* Make a synthetic class symbol to represent the synthetic class that + if (samTp eq NoType) false + else { + /* Make a synthetic class symbol to represent the synthetic class that * will be spun up by LMF for this function. This is necessary because * it's possible that the SAM method might need bridges, and they have * to go somewhere. Erasure knows to compute bridges for these classes * just as if they were real templates extending the SAM type. */ - val synthCls = fun.symbol.owner.newClassWithInfo( - name = tpnme.ANON_CLASS_NAME, - parents = ObjectTpe :: samTp :: Nil, - scope = newScope, - pos = sam.pos, - newFlags = SYNTHETIC | ARTIFACT - ) + val synthCls = fun.symbol.owner.newClassWithInfo( + name = tpnme.ANON_CLASS_NAME, + parents = ObjectTpe :: samTp :: Nil, + scope = newScope, + pos = sam.pos, + newFlags = SYNTHETIC | ARTIFACT + ) - synthCls.info.decls.enter { - val newFlags = (sam.flags & ~DEFERRED) | SYNTHETIC - sam.cloneSymbol(synthCls, newFlags).setInfo(samTp memberInfo sam) - } + synthCls.info.decls.enter { + val newFlags = (sam.flags & ~DEFERRED) | SYNTHETIC + sam.cloneSymbol(synthCls, newFlags).setInfo(samTp memberInfo sam) + } - fun.setType(samTp) + fun.setType(samTp) - /* Arguably I should do `fun.setSymbol(samCls)` rather than leaning + /* Arguably I should do `fun.setSymbol(samCls)` rather than leaning * on an attachment, but doing that confounds lambdalift's free var * analysis in a way which does not seem to be trivially reparable. */ - fun.updateAttachment(SAMFunction(samTp, sam, synthCls)) + fun.updateAttachment(SAMFunction(samTp, sam, synthCls)) - true + true + } } - } - case _ => false - } + case _ => false + } /** * Deconstruct an expected function-ish type `pt` into `numVparams` argument prototypes and a result prototype. @@ -2975,58 +3081,62 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * If the expected type `pt` does not denote a function-ish type with arity `numVparams`, * still return the expected number of ErrorType/NoType argument protos, and WildcardType for the result. * - * @param pt - * @param numVparams * @return (argProtos, resProto) where argProtos.lengthCompare(numVparams) == 0 */ - private def argsResProtosFromFun(pt: Type, numVparams: Int): (List[Type], Type) = { - val FunctionSymbol = FunctionClass(numVparams) + private def argsResProtosFromFun(pt: Type, numVparams: Int): (List[Type], Type) = + pt match { + case pt: OverloadedArgProto if pt.hofParamTypes.lengthCompare(numVparams) == 0 => (pt.hofParamTypes, WildcardType) + case _ => + val FunctionSymbol = FunctionClass(numVparams) + + // In case of any non-trivial type slack between `pt` and the built-in function types, we go the SAM route, + // as a subclass could have (crazily) implemented the apply method and introduced another abstract method + // to serve as the vehicle. + val ptNorm = pt.typeSymbol match { + case NoSymbol => NoType + case FunctionSymbol | PartialFunctionClass => pt + case _ => + val sam = samOf(pt) + if (sam.exists && sam.info.params.lengthCompare(numVparams) == 0) + wildcardExtrapolation(methodToExpressionTp(pt memberInfo sam)) + else pt // allow type slack (pos/6221) + } - // In case of any non-trivial type slack between `pt` and the built-in function types, we go the SAM route, - // as a subclass could have (crazily) implemented the apply method and introduced another abstract method - // to serve as the vehicle. - val ptNorm = pt.typeSymbol match { - case NoSymbol => NoType - case FunctionSymbol | PartialFunctionClass => pt - case _ => - val sam = samOf(pt) - if (sam.exists && sam.info.params.lengthCompare(numVparams) == 0) - wildcardExtrapolation(normalize(pt memberInfo sam)) - else pt // allow type slack (pos/6221) + unwrapWrapperTypes(ptNorm baseType FunctionSymbol) match { + case TypeRef(_, _, args :+ res) => (args, res) // if it's a TypeRef, we know its symbol will be FunctionSymbol + case _ => + val dummyPt = if (pt == ErrorType) ErrorType else NoType + (List.fill(numVparams)(dummyPt), WildcardType) // dummyPt is in CBN position + } } - ptNorm baseType FunctionSymbol match { - case TypeRef(_, _, args :+ res) => (args, res) // if it's a TypeRef, we know its symbol will be FunctionSymbol - case _ => { - val dummyPt = if (pt == ErrorType) ErrorType else NoType - (List.fill(numVparams)(dummyPt), WildcardType) // dummyPt is in CBN position - } - } - } /** Type check a function literal. * * Based on the expected type pt, potentially synthesize an instance of * - PartialFunction, - * - a type with a Single Abstract Method (under -Xexperimental for now). + * - a type with a Single Abstract Method. */ private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = { val vparams = fun.vparams val numVparams = vparams.length - if (numVparams > definitions.MaxFunctionArity) MaxFunctionArityError(fun) + if (numVparams > definitions.MaxFunctionArity) MaxFunctionArityError(fun, s", but $numVparams given") else { val (argProtos, resProto) = argsResProtosFromFun(pt, numVparams) // After typer, no need for further checks, parameter type inference or PartialFunction synthesis. if (isPastTyper) doTypedFunction(fun, resProto) else { - val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 + val paramsMissingType = ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 // first, try to define param types from expected function's arg types if needed foreach2(vparams, argProtos) { (vparam, argpt) => + // TODO: do we need to exclude vparam.symbol.isError? (I don't think so, + // because I don't see how we could recurse after the `setError(vparam)` call below if (vparam.tpt.isEmpty) { - if (isFullyDefined(argpt)) vparam.tpt setType argpt - else paramsMissingType += vparam + if (isFullyDefined(argpt)) vparam.tpt setType argpt + else if (vparam.hasAttachment[BooleanParameterType.type]) vparam.tpt.setType(definitions.BooleanTpe) // `if (_)` + else paramsMissingType += vparam if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus } @@ -3034,34 +3144,45 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (paramsMissingType.nonEmpty && pt != ErrorType) { // If we can resolve the missing parameter type by undoing eta-expansion and recursing, do that -- otherwise, report error and bail - typedFunctionUndoingEtaExpansion(fun, mode, pt, argProtos, resProto) orElse { + typedFunctionUndoingEtaExpansion(fun, mode, resProto) orElse { // we ran out of things to try, missing parameter types are an irrevocable error var issuedMissingParameterTypeError = false paramsMissingType.foreach { vparam => - vparam.tpt setType ErrorType + setError(vparam) // see neg/t8675b.scala setting vparam.tpt to ErrorType isn't as effective MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError) issuedMissingParameterTypeError = true } - - doTypedFunction(fun, resProto) // TODO: why is it not enough to do setError(fun)? (for test case, see neg/t8675b.scala) + fun match { + case Function(_, Match(_, _)) => setError(fun) + case _ if !issuedMissingParameterTypeError => setError(fun) + case _ => + // Improve error reporting: propagate what we know about the function's type for better failure. + val paramTypesForErrorMessage = vparams.map { param => + if (param.tpt.isEmpty) WildcardType + else silent(_.typedType(param.tpt).tpe) + .fold(WildcardType: Type) { case ErrorType => NoType case tp => tp } + } + fun.setType(appliedType(FunctionClass(numVparams), paramTypesForErrorMessage :+ WildcardType)) + } } - } else { - fun.body match { - // translate `x => x match { }` : PartialFunction to - // `new PartialFunction { def applyOrElse(x, default) = x match { } def isDefinedAt(x) = ... }` - case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) => - // go to outer context -- must discard the context that was created for the Function since we're discarding the function - // thus, its symbol, which serves as the current context.owner, is not the right owner - // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner) - val outerTyper = newTyper(context.outer) - val p = vparams.head - if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe - - outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) - - case _ => doTypedFunction(fun, resProto) + } else if (numVparams == 1 && pt.typeSymbol == PartialFunctionClass) { // dodge auto-tupling with the == 1 + // translate `x => x match { }` : PartialFunction to + // `new PartialFunction { def applyOrElse(x, default) = x match { } def isDefinedAt(x) = ... }` + val funBody = fun.body match { + case Match(sel, _) if sel ne EmptyTree => fun.body + case funBody => + atPos(funBody.pos.makeTransparent) { + Match(EmptyTree, List(CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), funBody))) + } } - } + // go to outer context -- must discard the context that was created for the Function since we're discarding the function + // thus, its symbol, which serves as the current context.owner, is not the right owner + // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner) + val outerTyper = newTyper(context.outer) + val p = vparams.head + if (p.tpt.tpe == null) p.tpt.setType(outerTyper.typedType(p.tpt).tpe) + outerTyper.synthesizePartialFunction(p.name, p.pos, p.tpt.tpe, paramSynthetic = false, funBody, mode, pt) + } else doTypedFunction(fun, resProto) } } } @@ -3084,7 +3205,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * * @return EmptyTree on failure, or a typed version of `fun` if we are successful */ - private def typedFunctionUndoingEtaExpansion(fun: Function, mode: Mode, pt: Type, argProtos: List[Type], resProto: Type) = { + private def typedFunctionUndoingEtaExpansion(fun: Function, mode: Mode, resProto: Type) = { val vparams = fun.vparams fun.body match { @@ -3100,7 +3221,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // (based on where the function's parameter is applied to `meth`) val formalsFromApply = vparams.map { vd => - if (!vd.tpt.isEmpty) Right(vd.tpt.tpe) + if (!vd.tpt.isEmpty) { + if (!vd.tpt.isTyped) { + val vd1 = typedValDef(vd) + if (vd1.isErroneous) Left(-1) + else Right(vd1.tpt.tpe) + } + else Right(vd.tpt.tpe) + } else Left(args.indexWhere { case Ident(name) => name == vd.name case _ => false // TODO: this does not catch eta-expansion of an overloaded method that involves numeric widening scala/bug#9738 (and maybe similar patterns?) @@ -3113,7 +3241,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else { // We're looking for a method (as indicated by FUNmode in the silent typed below), // so let's make sure our expected type is a MethodType (of the right arity, but we can't easily say more about the argument types) - val methArgs = NoSymbol.newSyntheticValueParams(args map { case _ => WildcardType }) + val methArgs = NoSymbol.newSyntheticValueParams(WildcardType.fillList(args.length)) silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))).fold(EmptyTree: Tree) { methTyped => // if context.undetparams is not empty, the method was polymorphic, @@ -3126,7 +3254,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // Be careful to reuse methTyped -- it may have changed from meth (scala/bug#9745)! // // TODO: CBN / varargs / implicits? should we use formalTypes? - normalize(methTyped.tpe) match { // we don't know how many of the vparams of our function were actually applied to the method + methodToExpressionTp(methTyped.tpe) match { // we don't know how many of the vparams of our function were actually applied to the method case TypeRef(_, _, argProtos :+ _) => val argProtosRecovered = formalsFromApply.map { @@ -3166,32 +3294,54 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val funTp = if (phase.erasedTypes) funSym.tpe else { - val resTp = packedType(bodyTyped, fun.symbol).deconst.resultType + val resTp = + packedType(bodyTyped, fun.symbol).resultType.deconst match { + case ct: ConstantType if (bodyPt eq WildcardType) || (ct.widen <:< bodyPt) => ct.widen + case tp => tp + } + appliedType(funSym, vparamSyms.map(_.tpe) :+ resTp) } treeCopy.Function(fun, vparamsTyped, bodyTyped) setType funTp } - def typedRefinement(templ: Template) { + // #2624: need to infer type arguments for eta expansion of a polymorphic method + // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand) + // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null + // can't type with the expected type, as we can't recreate the setup in (3) without calling typed + // (note that (3) does not call typed to do the polymorphic type instantiation -- + // it is called after the tree has been typed with a polymorphic expected result type) + def typedEtaExpansion(tree: Tree, mode: Mode, pt: Type): Tree = { + debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt") + + val expansion = etaExpand(tree, context.owner) + if (context.undetparams.isEmpty) typed(expansion, mode, pt) + else instantiate(typed(expansion, mode), mode, pt) + } + + def typedRefinement(templ: Template): Unit = { val stats = templ.body - namer.enterSyms(stats) - - // need to delay rest of typedRefinement to avoid cyclic reference errors - unit.toCheck += { () => - val stats1 = typedStats(stats, NoSymbol) - // this code kicks in only after typer, so `stats` will never be filled in time - // as a result, most of compound type trees with non-empty stats will fail to reify - // todo. investigate whether something can be done about this - val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil)) - templ.removeAttachment[CompoundTypeTreeOriginalAttachment] - templ updateAttachment att.copy(stats = stats1) - for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol) - stat.symbol setFlag OVERRIDE + if (!stats.isEmpty) { + namer.enterSyms(stats) + + // need to delay rest of typedRefinement to avoid cyclic reference errors + debuglog(s"deferred typed refinement") + unit.addPostUnitCheck { () => + val stats1 = typedStats(stats, NoSymbol) + // this code kicks in only after typer, so `stats` will never be filled in time + // as a result, most of compound type trees with non-empty stats will fail to reify + // todo. investigate whether something can be done about this + val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil)) + templ.removeAttachment[CompoundTypeTreeOriginalAttachment] + templ updateAttachment att.copy(stats = stats1) + for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol) + stat.symbol setFlag OVERRIDE + } } } - def typedImport(imp : Import) : Import = (transformed remove imp) match { + def typedImport(imp: Import): Import = transformed.remove(imp) match { case Some(imp1: Import) => imp1 case _ => log(s"unhandled import: $imp in $unit"); imp } @@ -3221,59 +3371,78 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (treeInfo.isSelfOrSuperConstrCall(result)) { context.inConstructorSuffix = true if (treeInfo.isSelfConstrCall(result)) { - if (result.symbol == exprOwner.enclMethod) - ConstructorRecursesError(stat) - else if (result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) - ConstructorsOrderError(stat) + val called = result.symbol + val defined = exprOwner.enclMethod + if (called == defined) ConstructorRecursesError(stat) + else { + val calledPos = called.pos.pointOrElse(0) + val definedPos = defined.pos.pointOrElse(0) + if (calledPos > definedPos) ConstructorsOrderError(stat) + else if (calledPos == definedPos) { + // Trees generated by a macro have the same position + // Trees typechecked by a ToolBox have no position + val constructors = defined.owner.info.decl(nme.CONSTRUCTOR).alternatives + if (constructors.indexOf(called) > constructors.indexOf(defined)) ConstructorsOrderError(stat) + } + } } } result } - // TODO: adapt to new trait field encoding, figure out why this exemption is made - // 'accessor' and 'accessed' are so similar it becomes very difficult to - //follow the logic, so I renamed one to something distinct. - def accesses(looker: Symbol, accessed: Symbol) = accessed.isLocalToThis && ( - (accessed.isParamAccessor) - || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate) - ) - + /* From the spec (refchecks checks other conditions regarding erasing to the same type and default arguments): + * + * A block expression [... its] statement sequence may not contain two definitions or + * declarations that bind the same name --> `inBlock` + * + * It is an error if a template directly defines two matching members. + * + * A member definition $M$ _matches_ a member definition $M'$, if $M$ and $M'$ bind the same name, + * and one of following holds: + * 1. Neither $M$ nor $M'$ is a method definition. + * 2. $M$ and $M'$ define both monomorphic methods with equivalent argument types. + * 3. $M$ defines a parameterless method and $M'$ defines a method with an empty parameter list `()` or _vice versa_. + * 4. $M$ and $M'$ define both polymorphic methods with equal number of argument types $\overline T$, $\overline T'$ + * and equal numbers of type parameters $\overline t$, $\overline t'$, say, + * and $\overline T' = [\overline t'/\overline t]\overline T$. + */ def checkNoDoubleDefs(scope: Scope): Unit = { var e = scope.elems while ((e ne null) && e.owner == scope) { + val sym = e.sym var e1 = scope.lookupNextEntry(e) while ((e1 ne null) && e1.owner == scope) { - val sym = e.sym val sym1 = e1.sym - /** From the spec (refchecks checks other conditions regarding erasing to the same type and default arguments): - * - * A block expression [... its] statement sequence may not contain two definitions or - * declarations that bind the same name --> `inBlock` - * - * It is an error if a template directly defines two matching members. - * - * A member definition $M$ _matches_ a member definition $M'$, if $M$ and $M'$ bind the same name, - * and one of following holds: - * 1. Neither $M$ nor $M'$ is a method definition. - * 2. $M$ and $M'$ define both monomorphic methods with equivalent argument types. - * 3. $M$ defines a parameterless method and $M'$ defines a method with an empty parameter list `()` or _vice versa_. - * 4. $M$ and $M'$ define both polymorphic methods with equal number of argument types $\overline T$, $\overline T'$ - * and equal numbers of type parameters $\overline t$, $\overline t'$, say, - * and $\overline T' = [\overline t'/\overline t]\overline T$. - */ - if (!(accesses(sym, sym1) || accesses(sym1, sym)) // TODO: does this purely defer errors until later? - && (inBlock || !(sym.isMethod || sym1.isMethod) || (sym.tpe matches sym1.tpe)) - // default getters are defined twice when multiple overloads have defaults. - // The error for this is deferred until RefChecks.checkDefaultsInOverloaded - && (!sym.isErroneous && !sym1.isErroneous && !sym.hasDefault && - !sym.hasAnnotation(BridgeClass) && !sym1.hasAnnotation(BridgeClass))) { - log("Double definition detected:\n " + - ((sym.getClass, sym.info, sym.ownerChain)) + "\n " + - ((sym1.getClass, sym1.info, sym1.ownerChain))) + def allowPrivateLocalAcc: Boolean = + sym.isParamAccessor && sym.isPrivateLocal || sym1.isParamAccessor && sym1.isPrivateLocal + def nullaryNilary: Boolean = { + def nn(m: Symbol): Boolean = m.isParamAccessor || m.hasAccessorFlag || !m.isMethod || { + m.tpe match { + case MethodType(Nil, _) | NullaryMethodType(_) => true + case _ => false + } + } + nn(sym) && nn(sym1) + } + def correctly: Boolean = nullaryNilary.tap(if (_) reportCorrection()) && currentRun.sourceFeatures.doubleDefinitions + def reportCorrection(): Unit = + if (currentRun.isScala3 && !currentRun.sourceFeatures.doubleDefinitions) + context.warning(sym.pos, s"Double definition will be detected in Scala 3; the conflicting $sym1 is defined at ${sym1.pos.line}:${sym1.pos.column}", Scala3Migration) + + val conflicted = inBlock || (!sym.isMethod && !sym1.isMethod) || + sym.tpe.matches(sym1.tpe) && !allowPrivateLocalAcc || // Scala 2: allow `class C(x: A) { def x: B }` + correctly // Scala 3: warn / err for `class C(x: A) { def x: B }`, and with nilary `def x(): B` + + // default getters are defined twice when multiple overloads have defaults. + // The error for this is deferred until RefChecks.checkDefaultsInOverloaded + if (conflicted && !sym.isErroneous && !sym1.isErroneous && !sym.hasDefault) { + log(sm"""Double definition detected: + | ${(sym.getClass, sym.info, sym.ownerChain)} + | ${(sym1.getClass, sym1.info, sym1.ownerChain)}""") DefDefinedTwiceError(sym, sym1) - scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779 + scope.unlink(e1) // need to unlink to avoid later problems with lub; see #scala/bug#2779 } e1 = scope.lookupNextEntry(e1) } @@ -3301,14 +3470,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (!sym.isModule || shouldAddAsModule) && (inBlock || !context.isInPackageObject(sym, context.owner)) } - for (sym <- scope) + for (sym <- scope) context.unit.synthetics.get(sym) match { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop - for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) { + case Some(tree) if shouldAdd(sym) => // if the completer set the IS_ERROR flag, retract the stat (currently only used by applyUnapplyMethodCompleter) - if (!sym.initialize.hasFlag(IS_ERROR)) + if (!sym.initialize.hasFlag(IS_ERROR)) { newStats += typedStat(tree) // might add even more synthetics to the scope + tree.getAndRemoveAttachment[CaseApplyInheritAccess.type].foreach(_ => + runReporting.warning(tree.pos, "access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access)", Scala3Migration, sym)) + } context.unit.synthetics -= sym - } + case _ => () + } // the type completer of a synthetic might add more synthetics. example: if the // factory method of a case class (i.e. the constructor) has a default. moreToAdd = scope.elems ne initElems @@ -3360,7 +3533,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // As packages are open, it doesn't make sense to check double definitions here. Furthermore, // it is expensive if the package is large. Instead, such double definitions are checked in `Namers.enterInScope` - if (!context.owner.isPackageClass) + if (!context.owner.isPackageClass && !unit.isJava) checkNoDoubleDefs(scope) // Note that Java units don't have synthetics, but there's no point in making a special case (for performance or correctness), @@ -3384,16 +3557,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = { def isLowerBounded(tparam: Symbol) = !tparam.info.lowerBound.typeSymbol.isBottomClass - exists2(formals, args) { - case (formal, Function(vparams, _)) => - (vparams exists (_.tpt.isEmpty)) && - vparams.length <= MaxFunctionArity && - (formal baseType FunctionClass(vparams.length) match { + tparams.forall(isLowerBounded) && exists2(formals, args) { + case (formal, Function(vparams, _)) if vparams.exists(_.tpt.isEmpty) => + val arity = vparams.length + arity <= MaxFunctionArity && (formal.baseType(FunctionClass(arity)) match { case TypeRef(_, _, formalargs) => - ( exists2(formalargs, vparams)((formal, vparam) => - vparam.tpt.isEmpty && (tparams exists formal.contains)) - && (tparams forall isLowerBounded) - ) + exists2(formalargs, vparams) { (formal, vparam) => + vparam.tpt.isEmpty && tparams.exists(formal.contains) + } case _ => false }) @@ -3427,13 +3598,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // No need for phasedAppliedType, as we don't get here during erasure -- // overloading resolution happens during type checking. // During erasure, the condition above (fun.symbol.isOverloaded) is false. - functionType(vparams map (_ => AnyTpe), shapeType(body)) - case AssignOrNamedArg(Ident(name), rhs) => + functionType(vparams.map(_ => AnyTpe), shapeType(body)) + case Match(EmptyTree, _) => // A partial function literal + appliedType(PartialFunctionClass, AnyTpe :: NothingTpe :: Nil) + case NamedArg(Ident(name), rhs) => NamedType(name, shapeType(rhs)) case _ => NothingTpe } - val argtypes = args map shapeType + val argtypes = args.map(shapeType) val pre = fun.symbol.tpe.prefix var sym = fun.symbol filter { alt => // must use pt as expected type, not WildcardType (a tempting quick fix to #2665) @@ -3449,20 +3622,25 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt) } if (sym.isOverloaded) { - // eliminate functions that would result from tupling transforms - // keeps alternatives with repeated params - val sym1 = sym filter (alt => - isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false) - || alt.tpe.params.exists(_.hasDefault) - ) - if (sym1 != NoSymbol) sym = sym1 + // retracted synthetic apply in favor of user-defined apply + def isRetracted(alt: Symbol) = alt.isError && alt.isSynthetic + // loose arity check: based on args, prefer no tupling, assume no args: _*, + // but keep alt with repeated params or default args, this is a loose fitting + def isLooseFit(alt: Symbol) = + isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false) || alt.tpe.params.exists(_.hasDefault) + sym.filter(alt => !isRetracted(alt) && isLooseFit(alt)) match { + case _: NoSymbol => + case sym1 => sym = sym1 + } } - if (sym == NoSymbol) fun + if (sym == NoSymbol) EmptyTree else adaptAfterOverloadResolution(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode) } else fun } - val fun = preSelectOverloaded(fun0) + val preSelected = preSelectOverloaded(fun0) + val shapeless = preSelected.isEmpty + val fun = if (shapeless) fun0 else preSelected val argslen = args.length fun.tpe match { @@ -3470,59 +3648,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def handleOverloaded = { val undetparams = context.undetparams - def funArgTypes(tpAlts: List[(Type, Symbol)]) = tpAlts.map { case (tp, alt) => - val relTp = tp.asSeenFrom(pre, alt.owner) - val argTps = functionOrSamArgTypes(relTp) - //println(s"funArgTypes $argTps from $relTp") - argTps.map(approximateAbstracts) - } - - def functionProto(argTpWithAlt: List[(Type, Symbol)]): Type = - try functionType(funArgTypes(argTpWithAlt).transpose.map(lub), WildcardType) - catch { case _: IllegalArgumentException => WildcardType } - - // To propagate as much information as possible to typedFunction, which uses the expected type to - // infer missing parameter types for Function trees that we're typing as arguments here, - // we expand the parameter types for all alternatives to the expected argument length, - // then transpose to get a list of alternative argument types (push down the overloading to the arguments). - // Thus, for each `arg` in `args`, the corresponding `argPts` in `altArgPts` is a list of expected types - // for `arg`. Depending on which overload is picked, only one of those expected types must be met, but - // we're in the process of figuring that out, so we'll approximate below by normalizing them to function types - // and lubbing the argument types (we treat SAM and FunctionN types equally, but non-function arguments - // do not receive special treatment: they are typed under WildcardType.) - val altArgPts = - if (currentRun.isScala212 && args.exists(treeInfo.isFunctionMissingParamType)) - try alts.map(alt => formalTypes(alt.info.paramTypes, argslen).map(ft => (ft, alt))).transpose // do least amount of work up front - catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen - else args.map(_ => Nil) // will type under argPt == WildcardType - val argTpes: ListBuffer[Type] = ListBuffer.empty[Type] val args1: List[Tree] = context.savingUndeterminedTypeParams() { val amode = forArgMode(fun, mode) - map2Conserve(args, altArgPts) { (arg, argPtAlts) => - def typedArg0(tree: Tree): Tree = { - // if we have an overloaded HOF such as `(f: Int => Int)Int (f: Char => Char)Char`, - // and we're typing a function like `x => x` for the argument, try to collapse - // the overloaded type into a single function type from which `typedFunction` - // can derive the argument type for `x` in the function literal above - val argPt = - if (argPtAlts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPtAlts) - else WildcardType - - typedArg(tree, amode, BYVALmode, argPt) + + mapWithIndex(args) { (arg, argIdx) => + def typedArg0(tree: Tree, argIdxOrName: Either[Int, Name] = Left(argIdx)) = { + typedArg(tree, amode, BYVALmode, OverloadedArgProto(argIdxOrName, pre, alts)(undetparams)) } arg match { - // scala/bug#8197/scala/bug#4592 call for checking whether this named argument could be interpreted as an assign - // infer.checkNames must not use UnitType: it may not be a valid assignment, or the setter may return another type from Unit - // TODO: just make it an error to refer to a non-existent named arg, as it's far more likely to be - // a typo than an assignment passed as an argument - case AssignOrNamedArg(lhs@Ident(name), rhs) => + case NamedArg(lhs@Ident(name), rhs) => // named args: only type the righthand sides ("unknown identifier" errors otherwise) // the assign is untyped; that's ok because we call doTypedApply - val rhsTyped = typedArg0(rhs) + val rhsTyped = typedArg0(rhs, Right(name)) argTpes += NamedType(name, rhsTyped.tpe.deconst) - treeCopy.AssignOrNamedArg(arg, lhs, rhsTyped) + treeCopy.NamedArg(arg, lhs, rhsTyped) case treeInfo.WildcardStarArg(_) => val argTyped = typedArg0(arg) argTpes += RepeatedType(argTyped.tpe.deconst) @@ -3534,11 +3675,40 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } - if (context.reporter.hasErrors) + if (context.reporter.hasErrors) { + if (shapeless) { + val argsWilded = args1.zip(argTpes).map { + case (Function(vparams, _), argTpe) if argTpe.isError => + val paramTypesForErrorMessage = vparams.map { param => + if (param.tpt.isEmpty) WildcardType + else silent(_.typedType(param.tpt).tpe) + .fold(WildcardType: Type) { case ErrorType => NoType case tp => tp } + } + appliedType(FunctionClass(vparams.length), paramTypesForErrorMessage :+ WildcardType) + case (_, argTpe) => if (argTpe.isError) WildcardType else argTpe + } + InferErrorGen.NoMatchingAlternative(fun, alts, argsWilded, pt) + } setError(tree) + } else { + // warn about conversions applied to blocks (#9386) in lieu of fixing + def checkConversionsToBlockArgs(appl: Tree): Unit = + if (settings.warnByNameImplicit) { + val treeInfo.Applied(_, _, argss) = appl + val needsAdjust = + argss.find { + case (aiv: ApplyImplicitView) :: Nil => + aiv.args match { + case Block(_ :: _, _) :: Nil => true + case _ => false + } + case _ => false + } + needsAdjust.foreach(ts => context.warning(ts.head.pos, "Overloaded implicit conversions that take a by-name parameter are applied to the entire block, not just the result expression.", WarningCategory.LintBynameImplicit)) + } inferMethodAlternative(fun, undetparams, argTpes.toList, pt) - doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt) + doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt).tap(checkConversionsToBlockArgs) } } handleOverloaded @@ -3581,13 +3751,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // regardless of typer's mode val invalidAdaptation = t.symbol != null && !checkValidAdaptation(t, args) // only bail if we're typing an expression (and not inside another application) - if (invalidAdaptation && mode.typingExprNotFun) EmptyTree else t + if (invalidAdaptation && mode.typingExprNotFun) EmptyTree + else t.removeAttachment[MultiargInfixAttachment.type] // don't warn if we tupled } - def reset(errors: Seq[AbsTypeError]): Tree = { + def reset(): Tree = { context.undetparams = savedUndetparams EmptyTree } - silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)).map(validate).orElse(reset) + silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)).fold(reset())(validate) } /* Treats an application which uses named or default arguments. @@ -3609,8 +3780,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper duplErrorTree(WrongNumberOfArgsError(tree, fun)) } else if (lencmp > 0) { tryTupleApply orElse duplErrorTree { - val (namelessArgs, argPos) = removeNames(Typer.this)(args, params) - TooManyArgsNamesDefaultsError(tree, fun, formals, args, namelessArgs, argPos) + val (argsNoNames, argPos) = removeNames(Typer.this)(args, params) + argsNoNames.foreach(typed(_, mode, ErrorType)) // typecheck args + TooManyArgsNamesDefaultsError(tree, fun, formals, args, argPos) } } else if (lencmp == 0) { // we don't need defaults. names were used, so this application is transformed @@ -3621,7 +3793,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else if (!allArgsArePositional(argPos) && !sameLength(formals, params)) // !allArgsArePositional indicates that named arguments are used to re-order arguments duplErrorTree(MultipleVarargError(tree)) - else if (allArgsArePositional(argPos) && !NamedApplyBlock.unapply(fun).isDefined) { + else if (allArgsArePositional(argPos) && NamedApplyBlock.unapply(fun).isEmpty) { // if there's no re-ordering, and fun is not transformed, no need to transform // more than an optimization, e.g. important in "synchronized { x = update-x }" checkNotMacro() @@ -3653,14 +3825,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val symsOwnedByContextOwner = tree.collect { case t @ (_: DefTree | _: Function) if ownerOf(t.symbol) == context.owner => t.symbol } - def rollbackNamesDefaultsOwnerChanges() { + def rollbackNamesDefaultsOwnerChanges(): Unit = { symsOwnedByContextOwner foreach (_.owner = context.owner) } val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x) if (fun1.isErroneous) duplErrTree else { - val NamedApplyBlock(NamedApplyInfo(qual, targs, previousArgss, _)) = fun1 + val NamedApplyBlock(NamedApplyInfo(qual, targs, previousArgss, _, _)) = fun1: @unchecked val blockIsEmpty = fun1 match { case Block(Nil, _) => // if the block does not have any ValDef we can remove it. Note that the call to @@ -3669,8 +3841,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper true case _ => false } - val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context) - val funSym = fun1 match { case Block(_, expr) => expr.symbol } + val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context, mode) + val funSym = fun1 match { case Block(_, expr) => expr.symbol case x => throw new MatchError(x) } val lencmp2 = compareLengths(allArgs, formals) if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) { @@ -3681,11 +3853,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else if (lencmp2 == 0) { // useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]() checkNotMacro() - context.diagUsedDefaults = true - doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt) + context.set(ContextMode.DiagUsedDefaults) + def checkRecursive(res: Tree): Unit = + if (settings.warnRecurseWithDefault && !res.isErroneous && context.owner.hasTransOwner(funSym)) + context.warning(res.pos, "Recursive call used default arguments instead of passing current argument values.", WarningCategory.LintRecurseWithDefault) + + doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt).tap(checkRecursive) } else { rollbackNamesDefaultsOwnerChanges() - tryTupleApply orElse duplErrorTree(NotEnoughArgsError(tree, fun, missing)) + tryTupleApply orElse { + // If we don't have enough arguments we still try to type the arguments that we do have, in order to + // propagate known types throughout the subtree to support queries in the presentation compiler. + if (isInteractive && missing.nonEmpty) { + // You would expect `missing` to be non-empty in this branch, but `addDefaults` has a corner case + // for t3649 that causes it to drop some params from `missing` (see `addDefaults` for the reasoning). + val allArgsPlusMissingErrors = allArgs ++ missing.map(s => NamedArg(Ident(s.name), gen.mkZero(NothingTpe))) + silent(_.doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgsPlusMissingErrors, mode, pt)) + } + + val (argsNoNames, _) = removeNames(Typer.this)(allArgs, params) // report bad names + argsNoNames.foreach(typed(_, mode, ErrorType)) // typecheck args + duplErrorTree(NotEnoughArgsError(tree, fun, missing)) + } } } } @@ -3709,36 +3898,54 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer) def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) - val args1 = ( + val args1 = if (noExpectedType) typedArgs(args, forArgMode(fun, mode)) else typedArgsForFormals(args, paramTypes, forArgMode(fun, mode)) - ) // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case: // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo) // precise(foo) : foo.type => foo.type val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe)) def ifPatternSkipFormals(tp: Type) = tp match { - case MethodType(_, rtp) if (mode.inPatternMode) => rtp + case MethodType(_, rtp) if mode.inPatternMode => rtp case _ => tp } - /* - * This is translating uses of List() into Nil. This is less - * than ideal from a consistency standpoint, but it shouldn't be - * altered without due caution. - * ... this also causes bootstrapping cycles if List_apply is - * forced during kind-arity checking, so it is guarded by additional - * tests to ensure we're sufficiently far along. - */ - if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == currentRun.runDefinitions.List_apply)) - atPos(tree.pos)(gen.mkNil setType restpe) - else - constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe), context.owner) + // Inline RHS of ValDef for right-associative by-value operator desugaring. + // Remove the ValDef also if the argument is a constant-folded reference to it. + var (args2, pos2) = (args1, tree.pos) + args1 match { + case List(lit: Literal) => + lit.attachments.get[OriginalTreeAttachment] match { + case Some(OriginalTreeAttachment(id: Ident)) if rightAssocValDefs.contains(id.symbol) => + inlinedRightAssocValDefs += id.symbol + rightAssocValDefs.subtractOne(id.symbol) + case _ => + } + + case List(id: Ident) if rightAssocValDefs.contains(id.symbol) => + mt.params match { + case List(p) if p.isByNameParam => + inlinedRightAssocValDefs += id.symbol + val rhs = rightAssocValDefs.remove(id.symbol).get + args2 = rhs.changeOwner(id.symbol -> context.owner) :: Nil + pos2 = wrappingPos(tree :: rhs :: Nil) + case _ => + } + case _ => + } + + if (!isPastTyper && args.isEmpty && canTranslateEmptyListToNil && currentRun.runDefinitions.isListApply(fun)) + atPos(tree.pos)(gen.mkNil.setType(restpe)) + else { + // annoying issue with classOf that shouldn't be deconsted after typers (during fields phase) + val resTp = ifPatternSkipFormals(if (isPastTyper) restpe else restpe.deconst) + constfold(treeCopy.Apply(tree, fun, args2) setType resTp setPos pos2, context.owner) + } } - if (settings.warnDeadCode) { + if (settings.warnDeadCode.value) { val sym = fun.symbol if (sym != null && sym.isMethod && !sym.isConstructor) { val suppress = sym == Object_synchronized || (sym.isLabel && treeInfo.isSynthCaseSymbol(sym)) @@ -3769,14 +3976,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!argtparams.isEmpty) { val strictPt = formal.instantiateTypeParams(tparams, strictTargs) inferArgumentInstance(arg1, argtparams, strictPt, lenientPt) - arg1 - } else arg1 + } + arg1 } val args1 = map2(args, formals)(typedArgToPoly) - if (args1 exists { _.isErrorTyped }) duplErrTree + if (args1.exists(_.isErrorTyped)) duplErrTree else { debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.lowerBound) + ", parambounds = " + tparams.map(_.info)) //debug - // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun" + // define the undetparams which have been fixed by this param list, + // replace the corresponding symbols in "fun" // returns those undetparams which have not been instantiated. val undetparams = inferMethodInstance(fun, tparams, args1, pt) try doTypedApply(tree, fun, args1, mode, pt) @@ -3795,7 +4003,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // @H change to setError(treeCopy.Apply(tree, fun, args)) // scala/bug#7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>` - case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm => + case HasUnapply(_) if mode.inPatternMode && fun.isTerm => doTypedUnapply(tree, fun0, fun, args, mode, pt) case _ => @@ -3813,53 +4021,54 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val pending = ListBuffer[AbsTypeError]() def ErroneousAnnotation = new ErroneousAnnotation().setOriginal(ann) + def rangeFinder(): (Int, Int) = + if (settings.Yrangepos.value && annotee.get.pos.isDefined) { + val p = annotee.get.pos + (p.start, p.end) + } else { + // compute approximate range + var s = unit.source.length + var e = 0 + object setRange extends ForeachTreeTraverser({ child => + val pos = child.pos + if (pos.isDefined) { + s = s min pos.start + e = e max pos.end + } + }) { + // in `@nowarn @ann(deprecatedMethod) def foo`, the deprecation warning should show + override def traverseModifiers(mods: Modifiers): Unit = () + } + setRange(annotee.get) + (s, e max s) + } def registerNowarn(info: AnnotationInfo): Unit = { - if (annotee.nonEmpty && NowarnClass.exists && info.matches(NowarnClass) && !runReporting.suppressionExists(info.pos)) { - val filters = (info.assocs: @unchecked) match { + if (annotee.isDefined && NowarnClass.exists && info.matches(NowarnClass) && !runReporting.suppressionExists(info.pos)) { + var verbose = false + val filters = (info.assocsForSuper(NowarnClass): @unchecked) match { case Nil => List(MessageFilter.Any) case (_, LiteralAnnotArg(s)) :: Nil => - if (s.stringValue.isEmpty) List() - else { - val (ms, fs) = separateE(s.stringValue.split('&').map(WConf.parseFilter(_, runReporting.rootDirPrefix)).toList) + val str = s.stringValue + if (str.isEmpty) Nil + else if (str == "v" || str == "verbose") { + verbose = true + List(MessageFilter.Any) + } else { + val (ms, fs) = str.split('&').map(WConf.parseFilter(_, runReporting.rootDirPrefix)).toList.partitionMap(identity) if (ms.nonEmpty) reporter.error(info.pos, s"Invalid message filter:\n${ms.mkString("\n")}") fs } } - val (start, end) = - if (settings.Yrangepos) { - val p = annotee.get.pos - (p.start, p.end) - } else { - // compute approximate range - var s = unit.source.length - var e = 0 - object setRange extends ForeachTreeTraverser({ child => - val pos = child.pos - if (pos.isDefined) { - s = s min pos.start - e = e max pos.end - } - }) { - // in `@nowarn @ann(deprecatedMethod) def foo`, the deprecation warning should show - override def traverseModifiers(mods: Modifiers): Unit = () - } - setRange(annotee.get) - (s, e max s) - } - runReporting.addSuppression(Suppression(info.pos, filters, start, end)) + val (start, end) = rangeFinder() + runReporting.addSuppression(Suppression(info.pos, filters, start, end, verbose = verbose)) } } - - // aka xs.partitionMap(identity) in 2.13 - def separateE[A, B](xs: List[Either[A, B]]): (List[A], List[B]) = { - import mutable.ListBuffer - val (a, b) = xs.foldLeft((new ListBuffer[A], new ListBuffer[B])) { - case ((a, b), Left(x)) => (a += x, b) - case ((a, b), Right(x)) => (a, b += x) + def registerDeprecationSuppression(info: AnnotationInfo): Unit = + if (annotee.isDefined && info.matches(DeprecatedAttr) && !runReporting.suppressionExists(info.pos)) { + val (start, end) = rangeFinder() + runReporting.addSuppression(Suppression(info.pos, List(MessageFilter.Category(WarningCategory.Deprecation)), start, end, synthetic = true)) } - (a.toList, b.toList) - } def finish(res: AnnotationInfo): AnnotationInfo = { if (hasError) { @@ -3867,6 +4076,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ErroneousAnnotation } else { registerNowarn(res) + registerDeprecationSuppression(res) res } } @@ -3877,24 +4087,50 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ErroneousAnnotation } + // begin typedAnnotation + val treeInfo.Applied(fun0, _, argss) = ann + if (fun0.isErroneous) return finish(ErroneousAnnotation) + val typedFun = context.withinAnnotation(typed(fun0, mode.forFunMode)) + if (typedFun.isErroneous) return finish(ErroneousAnnotation) + + val Select(New(annTpt), _) = typedFun: @unchecked + val annType = annTpt.tpe // for a polymorphic annotation class, this type will have unbound type params (see context.undetparams) + val annTypeSym = annType.typeSymbol + val isJava = annTypeSym.isJavaDefined + + val isAnnotation = annTypeSym.isJavaAnnotation || annType <:< AnnotationClass.tpe + if (!isAnnotation) { + reportAnnotationError(DoesNotExtendAnnotation(typedFun, annTypeSym)) + return finish(ErroneousAnnotation) + } + if (currentRun.isScala3 && (/*annTypeSym.eq(SpecializedClass) ||*/ annTypeSym.eq(ElidableMethodClass))) + context.warning(ann.pos, s"@${annTypeSym.fullNameString} is ignored in Scala 3", Scala3Migration) + /* Calling constfold right here is necessary because some trees (negated * floats and literals in particular) are not yet folded. */ def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = { // The typed tree may be relevantly different than the tree `tr`, // e.g. it may have encountered an implicit conversion. - val ttree = typed(constfold(tr, context.owner), pt) + val ttree = if (isJava) typed(constfold(tr, context.owner), pt) else tr val const: Constant = ttree match { case l @ Literal(c) if !l.isErroneous => c case tree => tree.tpe match { - case ConstantType(c) => c - case tpe => null + case ConstantType(c) => c + case _ => null } } + // Usually, defaults are the default expression ASTs, but only for annotations compiled with a recent compiler + // that have `annotation.meta.defaultArg` meta annotations on them. + def isDefaultArg(tree: Tree) = tree match { + case treeInfo.Applied(fun, _, _) => fun.symbol != null && fun.symbol.isDefaultGetter + case _ => false + } + if (const == null) { if (unit.isJava) unmappable = true - else reportAnnotationError(AnnotationNotAConstantError(ttree)) + else if (!isDefaultArg(ttree)) reportAnnotationError(AnnotationNotAConstantError(ttree)) None } else if (const.value == null) { reportAnnotationError(AnnotationArgNullError(tr)); None @@ -3905,32 +4141,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper /* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails, * an error message is reported and None is returned. */ + @tailrec def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match { case Apply(Select(New(_), nme.CONSTRUCTOR), _) if pt.typeSymbol == ArrayClass && unit.isJava => // In Java, a single value may be passed for array annotation parameters tree2ConstArg(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.apply), List(tree)), pt) - case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) => + case Apply(Select(New(_), nme.CONSTRUCTOR), _) if pt.typeSymbol == ArrayClass => reportAnnotationError(ArrayConstantsError(tree)); None - case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => - val annInfo = typedAnnotation(ann, None, mode) - val annType = annInfo.tpe + case Apply(Select(New(tpt), nme.CONSTRUCTOR), _) if isJava => + val annInfo = typedAnnotation(tree, None, mode) + val annType = annInfo.atp if (!annType.typeSymbol.isSubClass(pt.typeSymbol)) reportAnnotationError(AnnotationTypeMismatchError(tpt, pt, annType)) - else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass)) - reportAnnotationError(NestedAnnotationError(ann, annType)) + else if (!annType.typeSymbol.isJavaDefined) + reportAnnotationError(NestedAnnotationError(tree, annType)) if (annInfo.atp.isErroneous) { hasError = true; None } else Some(NestedAnnotArg(annInfo)) // use of Array.apply[T: ClassTag](xs: T*): Array[T] // and Array.apply(x: Int, xs: Int*): Array[Int] (and similar) - case Apply(fun, args) => - val typedFun = typed(fun, mode.forFunMode) + case treeInfo.Applied(fun, targs, args :: _) => + val typedFun = if (isJava) typed(fun, mode.forFunMode) else fun if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply) pt match { + case _ if !isJava => + trees2ConstArg(args, targs.headOption.map(_.tpe).getOrElse(WildcardType)) case TypeRef(_, ArrayClass, targ :: _) => trees2ConstArg(args, targ) case _ => @@ -3959,134 +4198,181 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper .map(args => ArrayAnnotArg(args.toArray)) } - // begin typedAnnotation - val treeInfo.Applied(fun0, targs, argss) = ann - if (fun0.isErroneous) - return finish(ErroneousAnnotation) - val typedFun0 = typed(fun0, mode.forFunMode) - val typedFunPart = ( - // If there are dummy type arguments in typeFun part, it suggests we - // must type the actual constructor call, not only the select. The value - // arguments are how the type arguments will be inferred. - if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe))) - logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _)))) - else - typedFun0 - ) - val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart - val annType = annTpt.tpe + @inline def constantly = { + // Arguments of Java annotations and ConstantAnnotations are checked to be constants and + // stored in the `assocs` field of the resulting AnnotationInfo + if (argss.lengthIs > 1) { + reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) + } else { + val annScopeJava = annType.decls.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) - finish( - if (typedFun.isErroneous || annType == null) - ErroneousAnnotation - else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) { - // annotation to be saved as java classfile annotation - val isJava = typedFun.symbol.owner.isJavaDefined - if (argss.length > 1) { - reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) + val names = mutable.Set[Symbol]() + names ++= annScopeJava.iterator + + def hasValue = names exists (_.name == nme.value) + val namedArgs = argss match { + case List(List(arg)) if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil + case List(args) => args + case x => throw new MatchError(x) + } + + val nvPairs = namedArgs map { + case arg @ NamedArg(Ident(name), rhs) => + val sym = annScopeJava.lookup(name) + if (sym == NoSymbol) { + reportAnnotationError(UnknownAnnotationNameError(arg, name)) + (nme.ERROR, None) + } else if (!names.contains(sym)) { + reportAnnotationError(DuplicateValueAnnotationError(arg, name)) + (nme.ERROR, None) + } else { + names -= sym + sym.cookJavaRawInfo() // #3429 + val annArg = tree2ConstArg(rhs, sym.tpe.resultType) + (sym.name, annArg) + } + case arg => + reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg)) + (nme.ERROR, None) + } + for (sym <- names) { + // make sure the flags are up to date before erroring (jvm/t3415 fails otherwise) + sym.initialize + if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefault) + reportAnnotationError(AnnotationMissingArgError(ann, annType, sym)) } + + if (hasError) ErroneousAnnotation + else if (unmappable) UnmappableAnnotation else { - val annScopeJava = - if (isJava) annType.decls.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) - else EmptyScope // annScopeJava is only used if isJava - - val names = mutable.Set[Symbol]() - names ++= (if (isJava) annScopeJava.iterator - else typedFun.tpe.params.iterator) - - def hasValue = names exists (_.name == nme.value) - val args = argss match { - case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil - case args :: Nil => args + if (annTypeSym == JavaDeprecatedAttr && !context.unit.isJava && settings.lintDeprecation) + context.warning(ann.pos, """Prefer the Scala annotation over Java's `@Deprecated` to provide a message and version: @deprecated("message", since = "MyLib 1.0")""", WarningCategory.LintDeprecation) + AnnotationInfo(annType, Nil, nvPairs.map(p => (p._1, p._2.get))).setOriginal(Apply(typedFun, namedArgs).setPos(ann.pos)) + } + } + } + @inline def statically = { + val typedAnn: Tree = { + // local dummy fixes scala/bug#5544 + val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos))) + localTyper.typed(ann, mode | ANNOTmode) + } + @tailrec + def annInfo(t: Tree): AnnotationInfo = t match { + case Block(Nil, expr) => annInfo(expr) + + case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => + // `tpt.tpe` is more precise than `annType`, since it incorporates the types of `args` + AnnotationInfo(tpt.tpe, args, Nil).setOriginal(typedAnn).setPos(t.pos) + + case Apply(fun, args) => + context.warning(t.pos, "Implementation limitation: multiple argument lists on annotations are\n"+ + "currently not supported; ignoring arguments "+ args, WarningCategory.Other) + annInfo(fun) + + case _ => + reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn)) + } + + if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation + else { + val info = annInfo(typedAnn) + // check message and since args to deprecated + // when deprecated gets more params, those other args may also be defaults. + def usesDefault = typedAnn match { + case Block(vals, _) => info.args.exists { + case Ident(n) => vals.exists { + case ValDef(_, `n`, _, rhs) => treeInfo.isDefaultGetter(rhs) + case _ => false + } + case _ => + false } + case _ => + info.args.exists(treeInfo.isDefaultGetter) + } + if (annTypeSym == DeprecatedAttr && settings.lintDeprecation && argss.head.lengthIs < 2 && usesDefault) + context.warning(ann.pos, """Specify both message and version: @deprecated("message", since = "MyLib 1.0")""", WarningCategory.LintDeprecation) + info + } + } - val nvPairs = args map { - case arg @ AssignOrNamedArg(Ident(name), rhs) => - val sym = if (isJava) annScopeJava.lookup(name) - else findSymbol(typedFun.tpe.params)(_.name == name) - if (sym == NoSymbol) { - reportAnnotationError(UnknownAnnotationNameError(arg, name)) - (nme.ERROR, None) - } else if (!names.contains(sym)) { - reportAnnotationError(DuplicateValueAnnotationError(arg, name)) - (nme.ERROR, None) - } else { - names -= sym - if (isJava) sym.cookJavaRawInfo() // #3429 - val annArg = tree2ConstArg(rhs, sym.tpe.resultType) - (sym.name, annArg) - } - case arg => - reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg)) - (nme.ERROR, None) + finish { + if (isJava) + constantly + else { + val info = statically + if (!info.isErroneous && annTypeSym.isNonBottomSubClass(ConstantAnnotationClass)) { + var namedArgs: Map[Name, Tree] = Map.empty + val treeInfo.Applied(constr, _, _) = info.original match { + case Block(stats, call) => + // when named / default args are used + namedArgs = Map.from(stats collect { + case ValDef(_, name, _, rhs) => (name, rhs) + }) + call + case call => call } - for (sym <- names) { - // make sure the flags are up to date before erroring (jvm/t3415 fails otherwise) - sym.initialize - if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefault) - reportAnnotationError(AnnotationMissingArgError(ann, annType, sym)) + val params = constr.symbol.paramss.headOption.getOrElse(Nil) + val assocs = info.args.zip(params) map { + case (arg, param) => + val origArg = arg match { + case Ident(n) => namedArgs.getOrElse(n, arg) + case _ => arg + } + (param.name, tree2ConstArg(origArg, param.tpe.resultType)) } - if (hasError) ErroneousAnnotation else if (unmappable) UnmappableAnnotation - else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) - } + else AnnotationInfo(info.atp, Nil, assocs.collect { case (n, Some(arg)) => (n, arg) }).setOriginal(info.original).setPos(info.pos) + } else + info } - else { - val typedAnn: Tree = { - // local dummy fixes scala/bug#5544 - val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos))) - localTyper.typed(ann, mode, annType) - } - def annInfo(t: Tree): AnnotationInfo = t match { - case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => - AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos) - - case Block(stats, expr) => - context.warning(t.pos, "Usage of named or default arguments transformed this annotation\n"+ - "constructor call into a block. The corresponding AnnotationInfo\n"+ - "will contain references to local values and default getters instead\n"+ - "of the actual argument trees", WarningCategory.Other) - annInfo(expr) - - case Apply(fun, args) => - context.warning(t.pos, "Implementation limitation: multiple argument lists on annotations are\n"+ - "currently not supported; ignoring arguments "+ args, WarningCategory.Other) - annInfo(fun) + } + } - case _ => - reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn)) + def typedMacroAnnotation(cdef: ClassDef) = { + val clazz = cdef.symbol + if (!isPastTyper) { + if (clazz != null && (clazz isNonBottomSubClass AnnotationClass)) { + val macroTransform = clazz.info.member(nme.macroTransform) + if (macroTransform != NoSymbol) { + clazz.setFlag(MACRO) + if (clazz.getAnnotation(CompileTimeOnlyAttr).isEmpty) clazz.addAnnotation(AnnotationInfo(CompileTimeOnlyAttr.tpe, List(Literal(Constant(MacroAnnotationNotExpandedMessage)) setType StringClass.tpe), Nil)) + def flavorOk = macroTransform.isMacro + def paramssOk = mmap(macroTransform.paramss)(p => (p.name, p.info)) == List(List((nme.annottees, scalaRepeatedType(AnyTpe)))) + def tparamsOk = macroTransform.typeParams.isEmpty + def everythingOk = flavorOk && paramssOk && tparamsOk + if (!everythingOk) MacroAnnotationShapeError(clazz) + if (!(clazz isNonBottomSubClass StaticAnnotationClass)) MacroAnnotationMustBeStaticError(clazz) + // TODO: revisit the decision about @Inherited + if (clazz.hasAnnotation(InheritedAttr)) MacroAnnotationCannotBeInheritedError(clazz) + if (!clazz.isStatic) MacroAnnotationCannotBeMemberError(clazz) } - if (annType.typeSymbol == DeprecatedAttr && settings.lintDeprecation && sumSize(argss, 0) < 2) - context.warning(ann.pos, """Specify both message and version: @deprecated("message", since = "1.0")""", WarningCategory.LintDeprecation) - - if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation - else annInfo(typedAnn) } - ) + } + cdef } /** Compute an existential type from raw hidden symbols `syms` and type `tp` */ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context.owner) - def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = ( - ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || { + def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = + ctx.owner.isTerm && ctx.scope.exists(dcl => dcl.isInitialized && dcl.info.contains(sym)) || { var ctx1 = ctx.outer while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer (ctx1 != NoContext) && isReferencedFrom(ctx1, sym) } - ) - def isCapturedExistential(sym: Symbol) = ( + def isCapturedExistential(sym: Symbol) = (sym hasAllFlags EXISTENTIAL | CAPTURED) && { val start = if (settings.areStatisticsEnabled) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) finally if (settings.areStatisticsEnabled) statistics.stopTimer(isReferencedNanos, start) } - ) def packCaptured(tpe: Type): Type = { val captured = mutable.Set[Symbol]() @@ -4123,8 +4409,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol)) val dealiasLocals = new TypeMap { + @tailrec def apply(tp: Type): Type = tp match { - case TypeRef(pre, sym, args) => + case TypeRef(pre, sym, _) => if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias) else { if (pre.isVolatile) pre match { @@ -4141,9 +4428,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // add all local symbols of `tp` to `localSyms` // TODO: expand higher-kinded types into individual copies for each instance. - def addLocals(tp: Type) { + def addLocals(tp: Type): Unit = { val remainingSyms = new ListBuffer[Symbol] - def addIfLocal(sym: Symbol, tp: Type) { + def addIfLocal(sym: Symbol, tp: Type): Unit = { if (isLocal(sym) && !localSyms(sym) && !boundSyms(sym)) { if (sym.typeParams.isEmpty) { localSyms += sym @@ -4185,7 +4472,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) = - if (!checkClassType(tpt) && noGen) tpt + if (!checkClassOrModuleType(tpt) && noGen) tpt else atPos(tree.pos)(gen.mkClassOf(tpt.tpe)) protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = { @@ -4200,10 +4487,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => { val original = tpt1 match { case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses)) - case _ => { + case _ => debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree") tree - } } TypeTree(newExistentialType(tparams, tp)) setOriginal original } @@ -4212,7 +4498,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // lifted out of typed1 because it's needed in typedImplicit0 protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match { - case OverloadedType(pre, alts) => + case OverloadedType(_, _) => inferPolyAlternatives(fun, mapList(args)(_.tpe)) // scala/bug#8267 `memberType` can introduce existentials *around* a PolyType/MethodType, see AsSeenFromMap#captureThis. @@ -4283,26 +4569,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // - // START: applyDynamic suport + // START: applyDynamic support // import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed} - protected def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass + private def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass - /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not. - * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType) - * NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs) - */ - private def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] = - // don't selectDynamic selectDynamic, do select dynamic at unknown type, - // in scala-virtualized, we may return a Some(tp) where tp ne NoType - if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType) - else None + /** Returns `true` if `name` can be selected dynamically on `qual`, `false` if not. */ + private def acceptsApplyDynamicWithType(qual: Tree, name: Name): Boolean = + // don't selectDynamic selectDynamic, do select dynamic at unknown type + !isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen) + // if the qualifier is a Dynamic, that's all we need to know private def isDynamicallyUpdatable(tree: Tree) = tree match { - // if the qualifier is a Dynamic, that's all we need to know - case DynamicUpdate(qual, name) => acceptsApplyDynamic(qual.tpe) - case _ => false + case DynamicUpdate(qual, _) => acceptsApplyDynamic(qual.tpe) + case _ => false } private def isApplyDynamicNamed(fun: Tree): Boolean = fun match { @@ -4317,7 +4598,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { def argToBinding(arg: Tree): Tree = arg match { - case AssignOrNamedArg(i @ Ident(name), rhs) => + case NamedArg(i @ Ident(name), rhs) => atPos(i.pos.withEnd(rhs.pos.end)) { gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs)) } @@ -4348,7 +4629,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * - simplest solution: have two method calls * */ - protected def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { + private def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { val cxTree = context.enclosingNonImportContext.tree // scala/bug#8364 debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)") val treeInfo.Applied(treeSelection, _, _) = tree @@ -4359,11 +4640,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => false } } - acceptsApplyDynamicWithType(qual, name) map { tp => - // If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all - // here - it is for scala-virtualized, where tp will be passed as an argument (for - // selection on a staged Struct) - def matches(t: Tree) = isDesugaredApply || treeInfo.dissectCore(t) == treeSelection + + Option.when(acceptsApplyDynamicWithType(qual, name)) { + def matches(t: Tree) = isDesugaredApply || treeInfo.dissectCore(t) == treeSelection /* Note that the trees which arrive here are potentially some distance from * the trees of direct interest. `cxTree` is some enclosing expression which @@ -4375,7 +4654,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper */ def findSelection(t: Tree): Option[(TermName, Tree)] = t match { case Apply(fn, args) if matches(fn) => - val op = if(args.exists(_.isInstanceOf[AssignOrNamedArg])) nme.applyDynamicNamed else nme.applyDynamic + val op = if(args.exists(_.isInstanceOf[NamedArg])) nme.applyDynamicNamed else nme.applyDynamic // not supported: foo.bar(a1,..., an: _*) val fn1 = if(treeInfo.isWildcardStarArgList(args)) DynamicVarArgUnsupported(fn, op) else fn Some((op, fn1)) @@ -4383,21 +4662,30 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ if matches(t) => Some((nme.selectDynamic, t)) case _ => t.children.flatMap(findSelection).headOption } - findSelection(cxTree) map { case (opName, treeInfo.Applied(_, targs, _)) => - val fun = gen.mkTypeApply(Select(qual, opName), targs) + findSelection(cxTree).map { case (opName, treeInfo.Applied(_, targs, _)) => + val fun = atPos(wrappingPos(qual :: targs)) { + gen.mkTypeApply(Select(qual, opName) setPos qual.pos, targs) + } if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // scala/bug#7617 - val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) { - Literal(Constant(name.decode)) + val nameStringLit = { + val p = if (treeSelection.pos.isDefined) treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent else treeSelection.pos + atPos(p) { + Literal(Constant(name.decode)) + } + } + markDynamicRewrite { + atPos(wrappingPos(qual :: fun :: nameStringLit :: Nil)) { + Apply(fun, List(nameStringLit)) + } } - markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit)))) - } getOrElse { + }.getOrElse { // While there may be an error in the found tree itself, it should not be possible to *not find* it at all. devWarning(s"Tree $tree not found in the context $cxTree while trying to do a dynamic application") setError(tree) } } } - protected def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) + private def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) // // END: applyDynamic support // @@ -4411,14 +4699,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name) def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name - def lookupInQualifier(qual: Tree, name: Name): Symbol = ( + def lookupInQualifier(qual: Tree, name: Name): Symbol = if (name == nme.ERROR || qual.tpe.widen.isErroneous) NoSymbol else lookupInOwner(qual.tpe.typeSymbol, name) orElse { - NotAMemberError(tree, qual, name) + NotAMemberError(tree, qual, name, startingIdentContext) NoSymbol } - ) + + def startingIdentContext = + // ignore current variable scope in patterns to enforce linearity + if (mode.inNone(PATTERNmode | TYPEPATmode)) context + else context.outer def typedAnnotated(atd: Annotated): Tree = { val ann = atd.annot @@ -4464,6 +4756,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val atype = ann.tpe // For `f(): @inline/noinline` callsites, add the InlineAnnotatedAttachment. TypeApplys // are eliminated by erasure, so add it to the underlying function in this case. + @tailrec def setInlineAttachment(t: Tree, att: InlineAnnotatedAttachment): Unit = t match { case TypeApply(fun, _) => setInlineAttachment(fun, att) case _ => t.updateAttachment(att) @@ -4477,7 +4770,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedBind(tree: Bind) = tree match { case Bind(name: TypeName, body) => - assert(body == EmptyTree, s"${context.unit} typedBind: ${name.debugString} ${body} ${body.getClass}") + assert(body.isEmpty, s"${context.unit} typedBind: ${name.debugString} ${body} ${body.getClass}") val sym = if (tree.symbol != NoSymbol) tree.symbol else { @@ -4505,7 +4798,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } val body1 = typed(body, mode, pt) - val impliedType = patmat.binderTypeImpliedByPattern(body1, pt, sym) // scala/bug#1503, scala/bug#5204 + val impliedType = patmat.binderTypeImpliedByPattern(body1, pt) // scala/bug#1503, scala/bug#5204 val symTp = if (treeInfo.isSequenceValued(body)) seqType(impliedType) else impliedType @@ -4520,7 +4813,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // since body1 is not necessarily equal to body, we must return a copied tree, // but we must still mutate the original bind tree setSymbol sym - treeCopy.Bind(tree, name, body1) setSymbol sym setType body1.tpe + treeCopy.Bind(tree, name, body1) setSymbol sym setType impliedType } def typedArrayValue(tree: ArrayValue) = { @@ -4546,35 +4839,44 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (varsym == null) return fail() - if (treeInfo.mayBeVarGetter(varsym)) { + def shadowsSetter = + lhs1 match { + case treeInfo.Applied(Select(qual, _), _, _) if qual.isTyped => + qual.tpe.member(varsym.name.setterName).exists + case _ => false + } + + def setterRewrite = lhs1 match { case treeInfo.Applied(Select(qual, _), _, _) => val sel = Select(qual, varsym.name.setterName) setPos lhs.pos val app = Apply(sel, List(rhs)) setPos tree.pos - return typed(app, mode, pt) - - case _ => + typed(app, mode, pt) + case _ => EmptyTree } - } -// if (varsym.isVariable || -// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?! -// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) { - if (varsym.isVariable || varsym.isValue && phase.assignsFields) { + + val rewritten = + if (treeInfo.mayBeVarGetter(varsym)) setterRewrite + else EmptyTree + + if (!rewritten.isEmpty) rewritten + else if (varsym.isVariable || varsym.isValue && phase.assignsFields) { val rhs1 = typedByValueExpr(rhs, lhs1.tpe) treeCopy.Assign(tree, lhs1, checkDead(context, rhs1)) setType UnitTpe } - else if(isDynamicallyUpdatable(lhs1)) { + else if (isDynamicallyUpdatable(lhs1)) { val t = atPos(lhs1.pos.withEnd(rhs.pos.end)) { Apply(lhs1, List(rhs)) } wrapErrors(t, _.typed1(t, mode, pt)) } + else if (shadowsSetter) setterRewrite orElse fail() else fail() } def typedIf(tree: If): If = { val cond1 = checkDead(context, typedByValueExpr(tree.cond, BooleanTpe)) - // One-legged ifs don't need a lot of analysis + // Unibranch if normally has unit value else, but synthetic code may emit empty else. if (tree.elsep.isEmpty) return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe @@ -4583,8 +4885,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway // in the special (though common) case where the types are equal, it pays to pack before comparing - // especially virtpatmat needs more aggressive unification of skolemized types - // this breaks src/library/scala/collection/immutable/TrieIterator.scala + // especially virtpatmat needed more aggressive unification of skolemized types + // this breaks src/library/scala/collection/immutable/TrieIterator.scala (which as of 2.13 doesn't actually exist anymore) // annotated types need to be lubbed regardless (at least, continuations break if you bypass them like this) def samePackedTypes = ( !isPastTyper @@ -4608,15 +4910,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - // When there's a suitable __match in scope, virtualize the pattern match - // otherwise, type the Match and leave it until phase `patmat` (immediately after typer) + // Type the Match and leave it until phase `patmat` // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it def typedVirtualizedMatch(tree: Match): Tree = { val selector = tree.selector val cases = tree.cases if (selector == EmptyTree) { if (pt.typeSymbol == PartialFunctionClass) - synthesizePartialFunction(newTermName(fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt) + synthesizePartialFunction(newTermName(fresh.newName("x")), tree.pos, paramType = NoType, paramSynthetic = true, tree, mode, pt) else { val arity = functionArityFromType(pt) match { case -1 => 1 case arity => arity } // scala/bug#8429: consider sam and function type equally in determining function arity @@ -4635,7 +4936,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper typed1(atPos(tree.pos) { Function(params, body) }, mode, pt) } } else - virtualizedMatch(typedMatch(selector, cases, mode, pt, tree), mode, pt) + typedMatch(selector, cases, mode, pt, tree) } def typedReturn(tree: Return) = { @@ -4647,7 +4948,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) { ReturnOutsideOfDefError(tree) } else { - val DefDef(_, name, _, _, restpt, _) = enclMethod.tree + val DefDef(_, name, _, _, restpt, _) = enclMethod.tree: @unchecked if (restpt.tpe eq null) { ReturnWithoutTypeError(tree, enclMethod.owner) } @@ -4660,7 +4961,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // unless the warning is legitimate. val typedExpr = typed(expr) if (!isPastTyper && typedExpr.tpe.typeSymbol != UnitClass) - context.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded", WarningCategory.Other) + context.warning(tree.pos, "enclosing method " + name + s" has result type Unit: return value of type ${typedExpr.tpe} discarded", WarningCategory.Other) } val res = treeCopy.Return(tree, checkDead(context, expr1)).setSymbol(enclMethod.owner) val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe) @@ -4679,13 +4980,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be // given a dealiased type. val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias) - if (checkStablePrefixClassType(tpt0)) - if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) { - context.undetparams = cloneSymbols(tpt0.symbol.typeParams) - notifyUndetparamsAdded(context.undetparams) - TypeTree().setOriginal(tpt0) - .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347. - } else tpt0 + + if (checkStablePrefixClassType(tpt0)) { + tpt0.tpe.normalize match { // eta-expand + case PolyType(undet, appliedToUndet) => + context.undetparams = undet // can reuse these type params, they're fresh + notifyUndetparamsAdded(undet) + TypeTree().setOriginal(tpt0).setType(appliedToUndet) + case _ => tpt0 + } + } else tpt0 } @@ -4705,10 +5009,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val tp = tpt1.tpe val sym = tp.typeSymbol.initialize - if (sym.isAbstractType || sym.hasAbstractFlag) + + if ((sym.isAbstractType || sym.hasAbstractFlag) + && !(sym.isJavaAnnotation && context.inAnnotation)) IsAbstractError(tree, sym) else if (isPrimitiveValueClass(sym)) { - NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR) + NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR, startingIdentContext) setError(tpt) } else if (!( tp == sym.typeOfThis // when there's no explicit self type -- with (#3612) or without self variable @@ -4721,29 +5027,30 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper treeCopy.New(tree, tpt1).setType(tp) } - def functionTypeWildcard(arity: Int): Type = - functionType(List.fill(arity)(WildcardType), WildcardType) - - def checkArity(tree: Tree)(tp: Type): tp.type = tp match { - case NoType => MaxFunctionArityError(tree); tp - case _ => tp - } - - /** Eta expand an expression like `m _`, where `m` denotes a method or a by-name argument - * - * The spec says: - * The expression `$e$ _` is well-formed if $e$ is of method type or if $e$ is a call-by-name parameter. - * (1) If $e$ is a method with parameters, `$e$ _` represents $e$ converted to a function type - * by [eta expansion](#eta-expansion). - * (2) If $e$ is a parameterless method or call-by-name parameter of type `=>$T$`, `$e$ _` represents - * the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameterlist `()`. - */ + /* Eta expand an expression like `m _`, where `m` denotes a method or a by-name argument + * + * The spec says: + * The expression `$e$ _` is well-formed if $e$ is of method type or if $e$ is a call-by-name parameter. + * (1) If $e$ is a method with parameters, `$e$ _` represents $e$ converted to a function type + * by [eta expansion](#eta-expansion). + * (2) If $e$ is a parameterless method or call-by-name parameter of type `=> $T$`, `$e$ _` represents + * the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameter list `()`. + */ def typedEta(methodValue: Tree): Tree = methodValue.tpe match { case tp@(MethodType(_, _) | PolyType(_, MethodType(_, _))) => // (1) - val formals = tp.params - if (isFunctionType(pt) || samMatchesFunctionBasedOnArity(samOf(pt), formals)) methodValue - else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length))) + if (tp.params.lengthCompare(definitions.MaxFunctionArity) > 0) MaxFunctionArityError(methodValue, s"; method ${methodValue.symbol.name} cannot be eta-expanded because it takes ${tp.params.length} arguments") + else { + val etaPt = + pt match { + case pt: ProtoType => + pt.asFunctionType orElse functionType(WildcardType.fillList(tp.params.length), WildcardType) orElse WildcardType // arity overflow --> NoType + case _ => pt + } + + // We know syntactically methodValue can't refer to a constructor because you can't write `this _` for that (right???) + typedEtaExpansion(methodValue, mode, etaPt) + } case TypeRef(_, ByNameParamClass, _) | NullaryMethodType(_) => // (2) val pos = methodValue.pos @@ -4753,14 +5060,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val result = typed(Function(Nil, methodValue) setSymbol funSym setPos pos, mode, pt) - if (currentRun.isScala3) { - UnderscoreNullaryEtaError(methodValue) - } else { - if (currentRun.isScala213) - context.deprecationWarning(pos, NoSymbol, UnderscoreNullaryEtaWarnMsg, "2.13.2") - result + val msg = "Methods without a parameter list and by-name params can no longer be converted to functions as `m _`, " + + "write a function literal `() => m` instead" + + val action = { + val etaPos = pos.withEnd(pos.end + 2) + if (pos.source.sourceAt(etaPos).endsWith(" _")) + runReporting.codeAction("replace by function literal", etaPos, s"() => ${pos.source.sourceAt(pos)}", msg) + else Nil } + if (currentRun.isScala3) + context.warning(pos, msg, Scala3Migration, action) + else + context.deprecationWarning(pos, NoSymbol, msg, "2.13.2", action) + + result + case ErrorType => methodValue @@ -4777,7 +5093,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } catch { case ex: CyclicReference => throw ex - case te: TypeError => + case _: TypeError => // @H some of typer errors can still leak, // for instance in continuations None @@ -4790,7 +5106,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { val start = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null - def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String, WarningCategory, Symbol)]): Tree = { + def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[ContextWarning]): Tree = { if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. @@ -4818,7 +5134,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case If(_, t, e) => loop(t) || loop(e) case Try(b, catches, _) => loop(b) || catches.exists(pred) case MethodValue(r) => loop(r) - case Select(qual, name) => loop(qual) + case Select(qual, _) => loop(qual) case Apply(fun, args) => loop(fun) || args.exists(loop) case TypeApply(fun, args) => loop(fun) || args.exists(loop) case _ => false @@ -4826,13 +5142,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper loop(tree) } val retry = typeErrors.forall(_.errPos != null) && (errorInResult(fun) || errorInResult(tree) || args.exists(errorInResult)) - typingStack.printTyping({ - val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ") - if (retry) "second try: " + funStr - else "no second try: " + funStr + " because error not in result: " + typeErrors.head.errPos+"!="+tree.pos - }) + typingStack.printTyping { + val funStr = s"${ptTree(fun)} and ${args.map(ptTree).mkString(", ")}" + if (retry) s"second try: $funStr" + else s"no second try: $funStr because error not in result: ${typeErrors.head.errPos}!=${tree.pos}" + } if (retry) { - val Select(qual, name) = fun + val Select(qual, name) = fun: @unchecked tryTypedArgs(args, forArgMode(fun, mode)) match { case Some(args1) if !args1.exists(arg => arg.exists(_.isErroneous)) => val qual1 = @@ -4845,8 +5161,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => () } } - typeErrors foreach context.issue - warnings foreach { case (p, m, c, s) => context.warning(p, m, c, s) } + def adjust(err: AbsTypeError) = + if (tree.hasAttachment[InterpolatedString.type]) + tree match { + case Apply(sc @ Ident(nme.StringContextName), _) => + if (sc.isErroneous) err + else NormalTypeError(tree, s"${err.errMsg}; signature for interpolation must be `StringContext.apply(String*)`") + case Apply(Select(_, nm), badargs) => + if (badargs.exists(arg => arg.isErroneous || arg.pos.includes(err.errPos) && arg.pos != err.errPos)) err + else NormalTypeError(tree, s"${err.errMsg}; incompatible interpolation method $nm") + case x => throw new MatchError(x) + } + else err + typeErrors.foreach(err => context.issue(adjust(err))) + warnings.foreach { case ContextWarning(p, m, c, s, as) => context.warning(p, m, c, s, as) } setError(treeCopy.Apply(tree, fun, args)) } @@ -4867,9 +5195,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) def reportError(error: SilentTypeError): Tree = { - error.reportableErrors foreach context.issue - error.warnings foreach { case (p, m, c, s) => context.warning(p, m, c, s) } - args foreach (arg => typed(arg, mode, ErrorType)) + error.reportableErrors.foreach(context.issue) + error.warnings.foreach { case ContextWarning(p, m, c, s, as) => context.warning(p, m, c, s, as) } + args.map { case NamedArg(_, rhs) => rhs case arg => arg } + .foreach(typed(_, mode, ErrorType)) setError(tree) } def advice1(convo: Tree, errors: List[AbsTypeError], err: SilentTypeError): List[AbsTypeError] = @@ -4877,7 +5206,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (e.errPos samePointAs tree.pos) { val header = f"${e.errMsg}%n Expression does not convert to assignment because:%n " val expansion = f"%n expansion: ${show(convo)}" - NormalTypeError(tree, err.errors.iterator.flatMap(_.errMsg.linesIterator).mkString(header, f"%n ", expansion)) + NormalTypeError(tree, err.errors.flatMap(_.errMsg.linesIterator.toList).mkString(header, f"%n ", expansion)) } else e } def advice2(errors: List[AbsTypeError]): List[AbsTypeError] = @@ -4895,6 +5224,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) + if (settings.multiargInfix && tree.hasAttachment[MultiargInfixAttachment.type] && args.lengthCompare(1) > 0) + warnMultiargInfix(tree) silent(op = _.typed1(convo, mode, pt)) match { case SilentResultValue(t) => t case err: SilentTypeError => reportError( @@ -4904,7 +5235,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } else { if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) - val Apply(Select(qual2, _), args2) = tree + val Apply(Select(qual2, _), args2) = tree: @unchecked val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { if (erred) error else SilentTypeError(advice2(error.errors), error.warnings) @@ -4923,15 +5254,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 if (settings.areStatisticsEnabled) statistics.incCounter(typedApplyCount) - val noSecondTry = ( - isPastTyper - || context.inSecondTry - || (fun2.symbol ne null) && fun2.symbol.isConstructor - || isImplicitMethodType(fun2.tpe) - ) val isFirstTry = fun2 match { - case Select(_, _) => !noSecondTry && mode.inExprMode - case _ => false + case Select(_, _) => mode.inExprMode && { + val noSecondTry = ( + isPastTyper + || context.inSecondTry + || (fun2.symbol ne null) && fun2.symbol.isConstructor + || isImplicitMethodType(fun2.tpe) + ) + !noSecondTry + } + case _ => false } if (isFirstTry) tryTypedApply(fun2, args) @@ -4967,6 +5300,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // The enclosing context may be case c @ C(_) => or val c @ C(_) = v. tree1 modifyType (_.finalResultType) tree1 + case tree1 @ Apply(_, args1) if settings.multiargInfix && tree.hasAttachment[MultiargInfixAttachment.type] && args1.lengthCompare(1) > 0 => + warnMultiargInfix(tree1) + tree1 case tree1 => tree1 } } @@ -4980,16 +5316,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper Select(vble.duplicate, prefix) setPos fun.pos.focus, args) setPos tree.pos.makeTransparent ) setPos tree.pos - def mkUpdate(table: Tree, indices: List[Tree], argss: List[List[Tree]]) = + def mkUpdate(table: Tree, indices: List[Tree], args_? : Option[List[Tree]]) = gen.evalOnceAll(table :: indices, context.owner, fresh) { case tab :: is => - def mkCall(name: Name, extraArgs: Tree*) = ( + def mkCall(name: Name, extraArgs: Tree*) = Apply( Select(tab(), name) setPos table.pos, is.map(i => i()) ++ extraArgs ) setPos tree.pos - ) - def mkApplies(core: Tree) = argss.foldLeft(core)((x, args) => Apply(x, args)) + + def mkApplies(core: Tree) = args_?.fold(core) { args => + Apply(core, args) setPos wrappingPos(core :: args) + } mkCall( nme.update, Apply(Select(mkApplies(mkCall(nme.apply)), prefix) setPos fun.pos, args) setPos tree.pos @@ -5001,7 +5339,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Ident(_) => mkAssign(qual) - case Select(qualqual, vname) => + case Select(qualqual, _) => gen.evalOnce(qualqual, context.owner, fresh) { qq => val qq1 = qq() mkAssign(Select(qq1, qual.symbol) setPos qual.pos) @@ -5011,16 +5349,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper fn match { case treeInfo.Applied(Select(table, nme.apply), _, indices :: Nil) => // table(indices)(implicits) - mkUpdate(table, indices, extra :: Nil) + mkUpdate(table, indices, Some(extra)) case _ => UnexpectedTreeAssignmentConversionError(qual) } case Apply(fn, indices) => fn match { case treeInfo.Applied(Select(table, nme.apply), _, Nil) => - mkUpdate(table, indices, Nil) + mkUpdate(table, indices, None) case _ => UnexpectedTreeAssignmentConversionError(qual) } + + case x => throw new MatchError(x) } assignment } @@ -5055,29 +5395,68 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - val owntype = ( + val owntype = if (!mix.isEmpty) findMixinSuper(clazz.tpe) else if (context.inSuperInit) clazz.info.firstParent else intersectionType(clazz.info.parents) - ) + treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype) } def typedThis(tree: This) = - tree.symbol orElse qualifyingClass(tree, tree.qual, packageOK = false) match { + tree.symbol orElse qualifyingClass(tree, tree.qual, packageOK = false, immediate = true) match { case NoSymbol => tree case clazz => tree setSymbol clazz setType clazz.thisType.underlying if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree } - // For Java, instance and static members are in the same scope, but we put the static ones in the companion object // so, when we can't find a member in the class scope, check the companion def inCompanionForJavaStatic(cls: Symbol, name: Name): Symbol = - if (!(context.unit.isJava && cls.isClass)) NoSymbol else { - context.javaFindMember(cls.typeOfThis, name, _.isStaticMember)._2 + if (!(context.unit.isJava && cls.isClass)) NoSymbol + else context.javaFindMember(cls.typeOfThis, name, s => s.isStaticMember || s.isStaticModule)._2 + // For Java, a selection p.q requires checking if p is a type with member q; otherwise it is a package. + // If a non-package term was found, look for a class; otherwise just look for a package. + def repairJavaSelection(qual: Tree, name: Name): Symbol = + if (!context.unit.isJava || !qual.hasAttachment[RootSelection.type] || qual.symbol.hasPackageFlag) NoSymbol + else qual match { + case Ident(qname) => + val found = + if (qual.symbol.isTerm) { + val lookup = context.lookupSymbol(qname.toTypeName, s => qualifies(s) && s.isClass) + if (lookup.isSuccess) inCompanionForJavaStatic(lookup.symbol, name) else NoSymbol + } + else NoSymbol + found.orElse { + context.lookupSymbol(qname, s => qualifies(s) && s.hasPackageFlag) match { + case LookupSucceeded(_, pkg) => member(pkg.info, name) + case _ => NoSymbol + } + } + case _ => NoSymbol + } + + // If they try C.tupled, make it (C.apply _).tupled + def fixUpCaseTupled(tree: Tree, qual: Tree, name: Name, mode: Mode): Tree = { + def isFixable(name: Name) = name == nme.tupled || name == nme.curried + + if (!isPastTyper && qual.symbol != null && qual.symbol.isModule && qual.symbol.companion.isCase && + context.undetparams.isEmpty && isFixable(name)) { + val t2 = { + val t = atPos(tree.pos)(Select(qual, nme.apply)) + val t1 = typedSelect(t, qual, nme.apply) + typed(atPos(tree.pos)(Select(etaExpand(t1, context.owner), name)), mode, pt) + } + if (!t2.isErroneous) { + val msg = s"The method `apply` is inserted. The auto insertion will be deprecated, please write `($qual.apply _).$name` explicitly." + context.deprecationWarning(tree.pos, qual.symbol, msg, "2.13.13") + t2 + } + else EmptyTree } + else EmptyTree + } /* Attribute a selection where `tree` is `qual.name`. * `qual` is already attributed. @@ -5092,18 +5471,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else UnstableTreeError(qual) else { def asDynamicCall = mkInvoke(context, tree, qual, name) map { t => - wrapErrors(t, (_.typed1(t, mode, pt))) + wrapErrors(t, _.typed1(t, mode, pt)) } - - val sym = tree.symbol orElse member(qual.tpe, name) orElse inCompanionForJavaStatic(qual.tpe.typeSymbol, name) + def checkDubiousUnitSelection(result: Tree): Unit = + if (!isPastTyper && isUniversalMember(result.symbol)) + context.warning(tree.pos, s"dubious usage of ${result.symbol} with unit value", WarningCategory.LintUniversalMethods) + + val sym = tree.symbol + .orElse(member(qualTp, name)) + .orElse(inCompanionForJavaStatic(qualTp.typeSymbol, name)) + .orElse(repairJavaSelection(qual, name)) if ((sym eq NoSymbol) && name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) { // symbol not found? --> try to convert implicitly to a type that does have the required // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an // xml member to StringContext, which in turn has an unapply[Seq] method) + def checkDubiousAdaptation(sel: Tree): Unit = if (!isPastTyper && settings.lintNumericMethods) { + val dubious = ScalaIntegralValueClasses(qualTp.typeSymbol) && sel.symbol != null && ( + sel.symbol.owner.eq(BoxedFloatClass) || sel.symbol.owner.eq(RichFloatClass)) + if (dubious) + context.warning(tree.pos, s"dubious usage of ${sel.symbol} with integer value", WarningCategory.LintNumericMethods) + } val qual1 = adaptToMemberWithArgs(tree, qual, name, mode) - if ((qual1 ne qual) && !qual1.isErrorTyped) - return typed(treeCopy.Select(tree, qual1, name), mode, pt) + val fixed = + if ((qual1 ne qual) && !qual1.isErrorTyped) + typed(treeCopy.Select(tree, qual1, name), mode, pt).tap(checkDubiousAdaptation) + else + fixUpCaseTupled(tree, qual, name, mode) + if (!fixed.isEmpty) + return fixed } // This special-case complements the logic in `adaptMember` in erasure, it handles selections @@ -5125,7 +5521,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!reallyExists(sym)) { def handleMissing: Tree = { def errorTree = missingSelectErrorTree(tree, qual, name) - def asTypeSelection = ( + def asTypeSelection = if (context.unit.isJava && name.isTypeName) { // scala/bug#3120 Java uses the same syntax, A.B, to express selection from the // value A and from the type A. We have to try both. @@ -5133,9 +5529,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case EmptyTree => None case tree1 => Some(typed1(tree1, mode, pt)) } - } - else None - ) + } else None + debuglog(s""" |qual=$qual:${qual.tpe} |symbol=${qual.tpe.termSymbol.defString} @@ -5157,23 +5552,46 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper handleMissing } else { + if ((sym ne NoSymbol) && !qual.tpe.isStable && argsDependOnPrefix(sym)) { + // Rewrites "qual.name ..." to "{ val lhs = qual ; lhs.name ... }" in cases where + // qual is not stable and name has a method type which depends on its prefix. If + // this is the case then hoisting qual out as a stable val means that members of + // implicit scopes which are accessible via lhs can be candidates for satisfying + // implicit (conversions to) arguments of name. + + val vsym = context.owner.newValue(freshTermName(nme.STABILIZER_PREFIX), qual.pos.focus, SYNTHETIC | ARTIFACT | STABLE) + vsym.setInfo(uncheckedBounds(qual.tpe)) + val vdef = atPos(vsym.pos)(ValDef(vsym, focusInPlace(qual)) setType NoType) + context.pendingStabilizers ::= vdef + qual.changeOwner(context.owner -> vsym) + val newQual = Ident(vsym) setType singleType(NoPrefix, vsym) setPos qual.pos.focus + return typedSelect(tree, newQual, name).modifyType(_.map { + // very specific fix for scala/bug#12987 (details in the ticket) + case t: AliasTypeRef if t.pre.termSymbol == vsym && context.undetparams.contains(t.normalize.typeSymbol) => + t.normalize + case t => t + }) + } + val tree1 = tree match { - case Select(_, _) => treeCopy.Select(tree, qual, name) + case Select(_, _) => treeCopy.Select(tree, qual, name) case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) + case x => throw new MatchError(x) } val pre = qual.tpe var accessibleError: AccessTypeError = null val result = silent(_.makeAccessible(tree1, sym, pre, qual)) match { - case SilentTypeError(err: AccessTypeError) => - accessibleError = err - tree1 - case SilentTypeError(err) => - SelectWithUnderlyingError(tree, err) - return tree - case SilentResultValue((qual: Tree, pre1: Type)) => - stabilize(qual, pre1, mode, pt) - case SilentResultValue(qual: Tree) => - stabilize(qual, pre, mode, pt) + case SilentTypeError(err) => err match { + case err: AccessTypeError => + accessibleError = err + tree1 + case _ => + SelectWithUnderlyingError(tree, err) + return tree + } + case SilentResultValue(result: Tree) => stabilize(result, pre, mode, pt) + case SilentResultValue((result: Tree, pre1: Type)) => stabilize(result, pre1, mode, pt) + case x => throw new MatchError(x) } result match { @@ -5190,24 +5608,30 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper setError(tree) } // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual? - case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks + case SelectFromTypeTree(qualifier@TypeTree(), name) if qualifier.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks treeCopy.SelectFromTypeTree( result, - (TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect + TypeTreeWithDeferredRefCheck(qualifier) { () => val tp = qualifier.tpe; val sym = tp.typeSymbolDirect // will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one? - checkBounds(qual, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "") - qual // you only get to see the wrapped tree after running this check :-p - }) setType qual.tpe setPos qual.pos, - name) + checkBounds(qualifier, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "") + qualifier // you only get to see the wrapped tree after running this check :-p + }.setType(qualifier.tpe).setPos(qual.pos), + name + ) case _ => - result + if (settings.lintUniversalMethods && qualTp.widen.eq(UnitTpe)) checkDubiousUnitSelection(result) + if (isConstantType(qualTp) && constfold.foldableUnaryOps(name)) constfold(result, context.owner) + else result } } } } - def typedTypeSelectionQualifier(tree: Tree, pt: Type = AnyRefTpe) = - context.withImplicitsDisabled { typed(tree, MonoQualifierModes | mode.onlyTypePat, pt) } + def typedTypeSelectionQualifier(tree: Tree, pt: Type) = + context.withImplicitsDisabled { + val mode1 = MonoQualifierModes | mode.onlyTypePat + typed(checkRootOfQualifier(tree, mode1), mode1, pt) + } def typedSelectOrSuperCall(tree: Select) = tree match { case Select(qual @ Super(_, _), nme.CONSTRUCTOR) => @@ -5259,7 +5683,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper */ def typedIdent(tree: Tree, name: Name): Tree = { // setting to enable unqualified idents in empty package (used by the repl) - def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol + def inEmptyPackage = if (settings.exposeEmptyPackage.value) lookupInEmpty(name) else NoSymbol def issue(err: AbsTypeError) = { // Avoiding some spurious error messages: see scala/bug#2388. @@ -5276,62 +5700,104 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper startContext.lookupSymbol(name.toTypeName, qualifies).symbol } else NoSymbol - // in Java, only pick a package if it is rooted + // in Java, only pick a package p if it is rooted (no relative packaging) def termQualifies(sym: Symbol) = qualifies(sym) && ( !startContext.unit.isJava || !sym.hasPackageFlag || sym.owner.isEffectiveRoot || sym.owner.isRootPackage || sym.isRootPackage ) val nameLookup = tree.symbol match { case NoSymbol => startContext.lookupSymbol(name, termQualifies) - case sym => LookupSucceeded(EmptyTree, sym) + case oksymbol => LookupSucceeded(EmptyTree, oksymbol) } import InferErrorGen._ nameLookup match { - case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) - case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) - case LookupNotFound => + case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) + case LookupInaccessible(symbol, msg) => issue(AccessError(tree, symbol, context, msg)) + case LookupNotFound => asTypeName orElse inEmptyPackage orElse lookupInRoot(name) match { - case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) - case sym => typed1(tree setSymbol sym, mode, pt) + case NoSymbol => + def hasOutput = settings.outputDirs.getSingleOutput.isDefined || settings.outputDirs.outputs.nonEmpty + val hidden = !unit.isJava && hasOutput && { + startContext.lookupSymbol(name.companionName, _ => true) match { + case LookupSucceeded(qualifier, symbol) if symbol.owner.hasPackageFlag && symbol.sourceFile != null => + symbol.owner.info.decls.lookupAll(name).toList match { + case other :: Nil if other.sourceFile == null => + val nameString = name.toString + classPath.classes(symbol.owner.fullNameString).find(_.name == nameString).exists { repr => + settings.outputDirs.getSingleOutput match { + // is the class file not in the output directory, so it's a dependency not a stale symbol + case Some(out) => repr.binary.exists(!_.path.startsWith(out.path)) + // is the class file not in any output directory + case _ => repr.binary.exists { bin => + !settings.outputDirs.outputs.exists { case (_, out) => + bin.path.startsWith(out.path) + } + } + } + } + case _ => false + } + case _ => false + } } - case LookupSucceeded(qual, sym) => - sym.getAndRemoveAttachment[LookupAmbiguityWarning].foreach(w => - runReporting.warning(tree.pos, w.msg, WarningCategory.Other, context.owner)) - (// this -> Foo.this - if (sym.isThisSym) - typed1(This(sym.owner) setPos tree.pos, mode, pt) - else if (sym.rawname == nme.classOf && currentRun.runDefinitions.isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) { - // Inferring classOf type parameter from expected type. Otherwise an - // actual call to the stubbed classOf method is generated, returning null. - typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus)) + issue(SymbolNotFoundError(tree, name, context.owner, inPattern = mode.in(all = PATTERNmode, none = APPSELmode | TYPEPATmode), hidden = hidden)) + case oksymbol => typed1(tree.setSymbol(oksymbol), mode, pt) } - else { - val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe - val tree1 = if (qual == EmptyTree) tree else { - val pos = tree.pos - Select(atPos(pos.focusStart)(qual), name).setPos(pos) + case LookupSucceeded(qual, symbol) => + symbol.getAndRemoveAttachment[LookupAmbiguityWarning].foreach(w => { + val cat = if (currentRun.isScala3) Scala3Migration else WarningCategory.Other + val fix = runReporting.codeAction("make reference explicit", tree.pos.focusStart, w.fix, w.msg) + runReporting.warning(tree.pos, w.msg, cat, context.owner, fix) + }) + if (currentRun.isScala3) + tree.getAndRemoveAttachment[VirtualStringContext.type].foreach(_ => + if (symbol != definitions.StringContextModule) + runReporting.warning( + tree.pos, + s"In Scala 3 (or with -Xsource-features:string-context-scope), String interpolations always use scala.StringContext (${symbol.fullNameString} is used here)", + Scala3Migration, + context.owner) + ) + val onSuccess = + if (symbol.isThisSym) + typed1(This(symbol.owner).setPos(tree.pos), mode, pt) // this -> Foo.this + else if (symbol.rawname == nme.classOf && currentRun.runDefinitions.isPredefClassOf(symbol) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) { + // Inferring classOf type parameter from expected type. Otherwise an + // actual call to the stubbed classOf method is generated, returning null. + typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus)) } - var tree2: Tree = null - var pre2: Type = pre1 - makeAccessible(tree1, sym, pre1, qual) match { - case (t: Tree, tp: Type) => - tree2 = t - pre2 = tp - case t: Tree => - tree2 = t + else { + val pre1 = if (symbol.isTopLevel) symbol.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe + if (settings.lintUniversalMethods && !pre1.isInstanceOf[ThisType] && isUniversalMember(symbol)) + context.warning(tree.pos, s"${symbol.nameString} not selected from this instance", WarningCategory.LintUniversalMethods) + val tree1 = if (qual == EmptyTree) tree else { + val pos = tree.pos + Select(atPos(pos.focusStart)(qual), name).setPos(pos) + } + var tree2: Tree = null + var pre2: Type = pre1 + makeAccessible(tree1, symbol, pre1, qual) match { + case (t: Tree, tp: Type) => + tree2 = t + pre2 = tp + case t: Tree => + tree2 = t + case x => throw new MatchError(x) + } + // scala/bug#5967 Important to replace param type A* with Seq[A] when seen from from a reference, + // to avoid inference errors in pattern matching. + stabilize(tree2, pre2, mode, pt).modifyType(dropIllegalStarTypes) } - // scala/bug#5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid - // inference errors in pattern matching. - stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes - }) setAttachments tree.attachments - } + onSuccess.setAttachments(tree.attachments) } + } def typedIdentOrWildcard(tree: Ident) = { val name = tree.name if (settings.areStatisticsEnabled) statistics.incCounter(typedIdentCount) - if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || - (name == tpnme.WILDCARD && mode.inTypeMode)) + if (!tree.isBackquoted && + ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || + (name == tpnme.WILDCARD && mode.inTypeMode))) tree setType makeFullyDefined(pt) else typedIdent(tree, name) @@ -5358,11 +5824,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedAppliedTypeTree(tree: AppliedTypeTree) = { - val tpt = tree.tpt - val args = tree.args - val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType) - def isPoly = tpt1.tpe.isInstanceOf[PolyType] - def isComplete = tpt1.symbol.rawInfo.isComplete + val tpt = tree.tpt + val args = tree.args + val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType) if (tpt1.isErrorTyped) { tpt1 @@ -5370,6 +5834,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper AppliedTypeNoParametersError(tree, tpt1.tpe) } else { val tparams = tpt1.symbol.typeParams + val isComplete = tpt1.symbol.rawInfo.isComplete if (sameLength(tparams, args)) { // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer) @@ -5391,20 +5856,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper foreach2(args, tparams) { (arg, tparam) => // note: can't use args1 in selector, because Binds got replaced val asym = arg.symbol + def abounds = asym.info.bounds + def tbounds = tparam.info.bounds + // TODO investigate whether this should be merged with the near duplicate in Inferencer + // and whether or not we should avoid using setInfo here as well to avoid potentially + // trampling on type history. def enhanceBounds(): Unit = { - val info0 = asym.info - val lo0 = info0.lowerBound - val hi0 = info0.upperBound - val tpinfo = tparam.info - val lo1 = tpinfo.lowerBound.subst(tparams, argtypes) - val hi1 = tpinfo.upperBound.subst(tparams, argtypes) + val TypeBounds(lo0, hi0) = abounds: @unchecked + val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes): @unchecked val lo = lub(List(lo0, lo1)) val hi = glb(List(hi0, hi1)) if (!(lo =:= lo0 && hi =:= hi0)) - asym setInfo logResult({ - val abounds = TypeBounds(lo0, hi0) - s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to" - })(TypeBounds(lo, hi)) + asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi)) } if (asym != null && asym.isAbstractType) { arg match { @@ -5419,14 +5882,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } val original = treeCopy.AppliedTypeTree(tree, tpt1, args1) val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original + val isPoly = tpt1.tpe.isInstanceOf[PolyType] if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction? - TypeTreeWithDeferredRefCheck(){ () => + TypeTreeWithDeferredRefCheck(result) { () => // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap // we can't simply use original in refchecks because it does not contains types // (and the only typed trees we have been mangled so they're not quite the original tree anymore) checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "") result // you only get to see the wrapped tree after running this check :-p - } setType (result.tpe) setPos(result.pos) + }.setType(result.tpe).setPos(result.pos) else result } else if (tparams.isEmpty) { AppliedTypeNoParametersError(tree, tpt1.tpe) @@ -5438,13 +5902,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } + // pre-begin typed1 val sym: Symbol = tree.symbol if ((sym ne null) && (sym ne NoSymbol)) sym.initialize def typedPackageDef(pdef0: PackageDef) = { - val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats)) - val pid1 = context.withMode(ContextMode.InPackageClauseName)(typedQualifier(pdef.pid).asInstanceOf[RefTree]) + val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, namer.expandMacroAnnotations(pdef0.stats))) + val pid1 = context.withMode(ContextMode.InPackageClauseName)(typedPackageQualifier(pdef.pid).asInstanceOf[RefTree]) assert(sym.moduleClass ne NoSymbol, sym) + if (pid1.symbol.ne(NoSymbol) && !(pid1.symbol.hasPackageFlag || pid1.symbol.isModule)) + reporter.error(pdef.pos, s"There is name conflict between the ${pid1.symbol.fullName} and the package ${sym.fullName}.") val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls)) .typedStats(pdef.stats, NoSymbol) treeCopy.PackageDef(tree, pid1, stats1) setType NoType @@ -5471,34 +5938,47 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt) } - def issueTryWarnings(tree: Try): Try = { - def checkForCatchAll(cdef: CaseDef) { - def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol - def warn(name: Name) = { - val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning." - context.warning(cdef.pat.pos, msg, WarningCategory.Other) + + def typedTry(tree: Try) = { + def warn(pos: Position, name: Name) = { + val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning." + context.warning(pos, msg, WarningCategory.Other) + } + def issueTryWarnings(tree: Try): Try = { + def checkForCatchAll(cdef: CaseDef): Unit = { + def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol + if (cdef.guard.isEmpty) cdef.pat match { + case Bind(name, i @ Ident(_)) if unbound(i) => warn(cdef.pat.pos, name) + case i @ Ident(name) if unbound(i) => warn(cdef.pat.pos, name) + case _ => + } } - if (cdef.guard.isEmpty) cdef.pat match { - case Bind(name, i @ Ident(_)) if unbound(i) => warn(name) - case i @ Ident(name) if unbound(i) => warn(name) - case _ => + if (!isPastTyper) tree match { + case Try(_, Nil, fin) => + if (fin eq EmptyTree) + context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.", WarningCategory.Other) + case Try(_, catches, _) => + catches foreach checkForCatchAll } + tree } - if (!isPastTyper) tree match { - case Try(_, Nil, fin) => - if (fin eq EmptyTree) - context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.", WarningCategory.Other) - case Try(_, catches, _) => - catches foreach checkForCatchAll - } - tree - } - def typedTry(tree: Try) = { - val Try(block, catches, fin) = tree + val Try(block, catches, finalizer) = tree val block1 = typed(block, pt) - val catches1 = typedCases(catches, ThrowableTpe, pt) - val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe) + val cases = catches match { + case CaseDef(EmptyTree, EmptyTree, catchExpr) :: Nil => + val e = typed(catchExpr, functionType(List(ThrowableTpe), pt)) + val catcher = + if (isPartialFunctionType(e.tpe)) treeBuilder.makeCatchFromExpr(e) + else { + warn(e.pos, nme.WILDCARD) + treeBuilder.makeCatchFromFunc(e) + } + catcher :: Nil + case _ => catches + } + val catches1 = typedCases(cases, ThrowableTpe, pt) + val fin1 = if (finalizer.isEmpty) finalizer else typed(finalizer, UnitTpe) def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType @@ -5532,18 +6012,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // that typecheck must not trigger macro expansions, so we explicitly prohibit them // however we cannot do `context.withMacrosDisabled` // because `expr` might contain nested macro calls (see scala/bug#6673). - // Otherwise, eta-expand, passing the original tree, which is required in adapt - // for trees of the form `f() _`: if the method type takes implicits, the fallback - // strategy will use `f()`; else if not, original is used to distinguish an explicit - // method value from eta-expansion driven by an expected function type. + // Otherwise, (check for dead code, and) eta-expand. case MethodValue(expr) => - typed1(suppressMacroExpansion(expr), mode, pt) match { + // Need to type in FUNmode so that we accept a method type (which also means we can't use our pt), + // this does mean no overloading is performed. The main reason to ignore pt and move to FUNmode is that + // the `m` in `m _` could involve an implicit conversion, which will go through adapt after converting, + // which will run afoul of the restriction that a method-typed tree is only allowed when a function type is expected. + // We peeled off the `_` marker for the typed1 call, so we don't know that the user has requested eta-expansion. + // See scala/bug#8299. + val funTyped = typed1(suppressMacroExpansion(expr), mode | FUNmode, WildcardType) + if (funTyped.tpe.isInstanceOf[OverloadedType]) inferExprAlternative(funTyped, pt) + funTyped match { case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef) - case methodValue => typedEta(checkDead(context, methodValue).updateAttachment(MethodValueAttachment)) + case methodValue => typedEta(checkDead(context, methodValue)) } case Typed(expr, tpt) => val tpt1 = typedType(tpt, mode) // type the ascribed type first - val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type + val exprWithAttachment = + if (definitions.isUnitType(tpt1.tpe)) expr.updateAttachment(TypedExpectingUnitAttachment) + else expr + val expr1 = typed(exprWithAttachment, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe } } @@ -5588,7 +6076,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedApplyDynamic(tree: ApplyDynamic) = { - assert(phase.erasedTypes) + assert(phase.erasedTypes, "typedApplyDynamic called before erasure") val qual1 = typed(tree.qual, AnyRefTpe) val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe)) treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe @@ -5596,7 +6084,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedReferenceToBoxed(tree: ReferenceToBoxed) = { val id = tree.ident - val id1 = typed1(id, mode, pt) match { case id: Ident => id } + val id1 = typed1(id, mode, pt).asInstanceOf[Ident] // [Eugene] am I doing it right? val erasedTypes = phaseId(currentPeriod) >= currentRun.erasurePhase.id val tpe = capturedVariableType(id.symbol, erasedTypes = erasedTypes) @@ -5608,64 +6096,80 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // attempt to avoid warning about trees munged by macros def isMacroExpansion = { // context.tree is not the expandee; it is plain new SC(ps).m(args) - //context.tree exists (t => (t.pos includes lit.pos) && hasMacroExpansionAttachment(t)) + //context.tree.exists(t => t.pos.includes(lit.pos) && hasMacroExpansionAttachment(t)) // testing pos works and may suffice - //openMacros exists (_.macroApplication.pos includes lit.pos) + //openMacros.exists(_.macroApplication.pos.includes(lit.pos)) // tests whether the lit belongs to the expandee of an open macro - openMacros exists (_.macroApplication.attachments.get[MacroExpansionAttachment] match { - case Some(MacroExpansionAttachment(_, t: Tree)) => t exists (_ == lit) + openMacros.exists(_.macroApplication.attachments.get[MacroExpansionAttachment] match { + case Some(MacroExpansionAttachment(_, t: Tree)) => t.exists(_ eq lit) case _ => false }) } - // attempt to avoid warning about the special interpolated message string - // for implicitNotFound or any standard interpolation (with embedded $$). - def isRecognizablyNotForInterpolation = context.enclosingApply.tree match { - case Apply(Select(Apply(RefTree(_, nme.StringContext), _), _), _) => true - case Apply(Select(New(RefTree(_, tpnme.implicitNotFound)), _), _) => true - case _ => isMacroExpansion + val checkMacroExpansion = settings.warnMacros.value match { + case "both" | "after" => true + case _ => !isMacroExpansion } - def requiresNoArgs(tp: Type): Boolean = tp match { - case PolyType(_, restpe) => requiresNoArgs(restpe) - case MethodType(Nil, restpe) => requiresNoArgs(restpe) // may be a curried method - can't tell yet - case MethodType(p :: _, _) => p.isImplicit // implicit method requires no args - case _ => true // catches all others including NullaryMethodType + // An interpolation desugars to `StringContext(parts).m(args)`, so obviously not missing. + // `implicitNotFound` annotations have strings with `${A}`, so never warn for that. + // Also don't warn for macro expansion unless they ask for it. + def mightBeMissingInterpolation: Boolean = context.enclosingApply.tree match { + case Apply(Select(Apply(RefTree(_, nme.StringContextName), _), _), _) => false + case Apply(Select(New(RefTree(_, tpnme.implicitNotFound)), _), _) => false + case _ => checkMacroExpansion } - def isPlausible(m: Symbol) = !m.isPackage && m.alternatives.exists(x => requiresNoArgs(x.info)) - def maybeWarn(s: String): Unit = { - def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message", WarningCategory.LintMissingInterpolator) - def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol - val suspiciousExprs = InterpolatorCodeRegex findAllMatchIn s - def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(TermName(s drop 1))) - def isCheapIdent(expr: String) = (Character.isJavaIdentifierStart(expr.charAt(0)) && - expr.tail.forall(Character.isJavaIdentifierPart)) - def warnableExpr(expr: String) = !expr.isEmpty && (!isCheapIdent(expr) || isPlausible(suspiciousSym(TermName(expr)))) - - if (suspiciousExprs.nonEmpty) { - val exprs = (suspiciousExprs map (_ group 1)).toList + def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message", WarningCategory.LintMissingInterpolator) + def isPlausible(id: String): Boolean = { + def requiresNoArgs(tp: Type): Boolean = tp match { + case PolyType(_, restpe) => requiresNoArgs(restpe) + case MethodType(Nil, restpe) => requiresNoArgs(restpe) // may be a curried method - can't tell yet + case MethodType(p :: _, _) => p.isImplicit // implicit method requires no args + case _ => true // catches all others including NullaryMethodType + } + def isNullaryTerm: Boolean = { + val idName = TermName(id) + val maybe = context.lookupSymbol(idName, _ => true).symbol + maybe != NoSymbol && !maybe.hasPackageFlag && !maybe.isModule && + !context.owner.ownersIterator.exists(_.name.dropLocal == idName) && // avoid forcing an owner + maybe.alternatives.exists(x => requiresNoArgs(x.info)) + } + id == "this" || isNullaryTerm + } + val suspiciousExprs = InterpolatorCodeRegex.findAllMatchIn(s) + def suspiciousIdents = InterpolatorIdentRegex.findAllIn(s) + + if (suspiciousExprs.hasNext) { + def isCheapIdent(expr: String) = Character.isJavaIdentifierStart(expr.charAt(0)) && expr.tail.forall(Character.isJavaIdentifierPart) + def warnableExpr(expr: String) = !expr.isEmpty && (!isCheapIdent(expr) || isPlausible(expr)) + val exprs = suspiciousExprs.map(_ group 1).toList // short-circuit on leading ${} if (!exprs.head.isEmpty && exprs.exists(warnableExpr)) warn("detected an interpolated expression") // "${...}" - } else - suspiciousIdents find isPlausible foreach (sym => warn(s"detected interpolated identifier `$$${sym.name}`")) // "$id" + } else suspiciousIdents.toList match { + case Nil => + case id :: Nil => if (isPlausible(id.substring(1))) warn(s"detected interpolated identifier `$id`") + case all => if (all.forall(id => isPlausible(id.substring(1)))) warn(all.mkString("detected interpolated identifiers `", "`, `", "`")) + } } lit match { - case Literal(Constant(s: String)) if !isRecognizablyNotForInterpolation => maybeWarn(s) - case _ => + case Literal(Constant(s: String)) if mightBeMissingInterpolation => maybeWarn(s) + case _ => } } def typedLiteral(tree: Literal) = { if (settings.warnMissingInterpolator) warnMissingInterpolator(tree) - tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value)) + tree.setType(if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value)) } def typedSingletonTypeTree(tree: SingletonTypeTree) = { - val refTyped = typedTypeSelectionQualifier(tree.ref) + val refTyped = typedTypeSelectionQualifier(tree.ref, WildcardType) if (refTyped.isErrorTyped) setError(tree) else { + // .resultType unwraps NullaryMethodType (accessor of a path) + // .deconst unwraps the ConstantType to a LiteralType (for literal-based singleton types) if (!treeInfo.admitsTypeSelection(refTyped)) UnstableTreeError(tree) else treeCopy.SingletonTypeTree(tree, refTyped).setType(refTyped.tpe.resultType.deconst) } @@ -5728,7 +6232,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case tree: SelectFromTypeTree => typedSelect(tree, typedType(tree.qualifier, mode), tree.name) case tree: CompoundTypeTree => typedCompoundTypeTree(tree) case tree: ExistentialTypeTree => typedExistentialTypeTree(tree) - case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure) + case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree") } @@ -5744,14 +6248,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // Trees not allowed during pattern mode. def typedOutsidePatternMode(tree: Tree): Tree = tree match { - case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt)) + case tree: Block => + val blockContext = context.makeNewScope(tree, context.owner) + try typerWithLocalContext(blockContext)(_.typedBlock(tree, mode, pt)) + finally context.undetparams ++= blockContext.undetparams case tree: If => typedIf(tree) case tree: TypeApply => typedTypeApply(tree) case tree: Function => typedFunction(tree) case tree: Match => typedVirtualizedMatch(tree) case tree: New => typedNew(tree) case tree: Assign => typedAssign(tree.lhs, tree.rhs) - case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck + case tree: NamedArg => typedAssign(tree.lhs, tree.rhs) case tree: Super => typedSuper(tree) case tree: Annotated => typedAnnotated(tree) case tree: Return => typedReturn(tree) @@ -5796,9 +6303,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (settings.areHotStatisticsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) val shouldPopTypingStack = shouldPrintTyping && typingStack.beforeNextTyped(tree, mode, pt, context) - try { - val ptPlugins = pluginsPt(pt, this, tree, mode) + def shouldInsertStabilizersImpl = tree match { + case _ if phase.erasedTypes || mode.in(APPSELmode) || isMacroImplRef(tree) => false + case _: Select | _: Apply | _: TypeApply => true + case _ => false + } + + val shouldInsertStabilizers = shouldInsertStabilizersImpl + val mode1: Mode = if (shouldInsertStabilizers) mode | APPSELmode else mode + val savedPendingStabilizer = context.pendingStabilizers + if (shouldInsertStabilizers) context.pendingStabilizers = Nil + + try { + val ptPlugins = pluginsPt(pt, this, tree, mode1) def retypingOk = ( context.retyping && (tree.tpe ne null) @@ -5810,11 +6328,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } val alreadyTyped = tree.tpe ne null val shouldPrint = !alreadyTyped && !phase.erasedTypes - val ptWild = if (mode.inPatternMode) + val ptWild = if (mode1.inPatternMode) ptPlugins // scala/bug#5022 don't widen pt for patterns as types flow from it to the case body. else dropExistential(ptPlugins) // FIXME: document why this is done. - val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode, ptWild) + val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode1, ptWild) + if (shouldPrint) typingStack.showTyped(tree1) @@ -5823,24 +6342,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree1.tpe eq null) return setError(tree) - tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins) + tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode1, ptPlugins) - val result = + val adapted = if (tree1.isEmpty) tree1 else { - val result = adapt(tree1, mode, ptPlugins, tree) + val result = adapt(tree1, mode1, ptPlugins, tree) if (typerShouldExpandDeferredMacros) { macroExpandAll(this, result) } else result } + val result = + if (shouldInsertStabilizers) addStabilizers(context.pendingStabilizers, adapted) + else adapted + if (shouldPrint) typingStack.showAdapt(tree1, result, ptPlugins, context) if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result) - if (mode.inPatternMode && !mode.inPolyMode && result.isType) + if (mode1.inPatternMode && !mode1.inPolyMode && result.isType) PatternMustBeValue(result, pt) if (shouldPopTypingStack) typingStack.showPop(result) @@ -5852,7 +6375,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case ex: TypeError => tree.clearType() // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere. - typingStack.printTyping(tree, "caught %s: while typing %s".format(ex, tree)) //DEBUG + typingStack.printTyping(tree, s"caught $ex: while typing $tree") reportTypeError(context, tree.pos, ex) setError(tree) case ex: Exception => @@ -5864,6 +6387,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } finally { if (shouldPopTypingStack) typingStack.pop(tree) if (settings.areHotStatisticsEnabled) statistics.popTimer(byTypeStack, startByType) + if (shouldInsertStabilizers) context.pendingStabilizers = savedPendingStabilizer + } + } + + private def addStabilizers(newStabilizers: List[Tree], expr: Tree): Tree = { + if (newStabilizers.isEmpty) expr else { + devWarningIf(newStabilizers.forall(_.symbol.owner == context.owner))(s"${context.owner} - ${(newStabilizers.map(vd => (vd.symbol, vd.symbol.owner.fullNameString)), context.owner)}") + // Insert stabilizing ValDefs (if any) which might have been introduced during the typing of the original expression. + Block(newStabilizers.reverse, expr).setPos(expr.pos).setType(expr.tpe) } } @@ -5898,7 +6430,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * E.g. is tree occurs in a context like `tree.m`. */ @inline final def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree = - typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit + typed(checkRootOfQualifier(tree, mode), PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit /** Types qualifier `tree` of a select node. * E.g. is tree occurs in a context like `tree.m`. @@ -5908,6 +6440,50 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper @inline final def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType) + // if a package id is a selection from _root_ in scope, warn about semantics and set symbol for typedQualifier + @inline final def typedPackageQualifier(tree: Tree): Tree = typedQualifier(checkRootOfPackageQualifier(tree)) + + def checkRootOfPackageQualifier(q: Tree): Tree = { + q match { + case Select(id @ Ident(nme.ROOTPKG), _) if !id.hasExistingSymbol && id.hasAttachment[RootSelection.type] => + context.lookupSymbol(nme.ROOTPKG, p => p.hasPackageFlag && !p.isRootPackage) match { + case LookupSucceeded(_, sym) => + runReporting.warning( + id.pos, + s"${nme.ROOTPKG} in root position in package definition does not refer to the root package, but to ${sym.fullLocationString}, which is in scope", + WarningCategory.Other, + context.owner) + id.removeAttachment[RootSelection.type] + id.setSymbol(sym) + case _ => + } + case _ => + } + q + } + + /** If import from path starting with _root_, warn if there is a _root_ value in scope, + * and ensure _root_ can only be the root package in that position. + */ + def checkRootOfQualifier(q: Tree, mode: Mode): Tree = { + q match { + case Ident(nme.ROOTPKG) if !q.hasExistingSymbol && q.hasAttachment[RootSelection.type] => + val startContext = if (mode.typingPatternOrTypePat) context.outer else context + startContext.lookupSymbol(nme.ROOTPKG, !_.isRootPackage) match { + case LookupSucceeded(_, sym) => + runReporting.warning( + q.pos, + s"${nme.ROOTPKG} in root position of qualifier refers to the root package, not ${sym.fullLocationString}, which is in scope", + WarningCategory.Other, + context.owner) + case _ => + } + q.setSymbol(rootMirror.RootPackage) + case _ => + } + q + } + /** Types function part of an application */ @inline final def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes) @@ -5950,7 +6526,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (pt != Kind.Wildcard && pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's * else context withinTypeConstructorAllowed typed(tree, NOmode, pt) - def typedHigherKindedType(tree: Tree, mode: Mode): Tree = + def typedHigherKindedType(tree: Tree, @unused mode: Mode): Tree = context withinTypeConstructorAllowed typed(tree) /** Types a type constructor tree used in a new or supertype */ @@ -5984,41 +6560,31 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tpe } - def computeMacroDefType(ddef: DefDef, pt: Type): Type = { + // called from `methodSig`. Macro defs must have an explicit type. + // The supplied expected type is ignored. + def computeMacroDefType(ddef: DefDef, @unused pt: Type): Type = { assert(context.owner.isMacro, context.owner) assert(ddef.symbol.isMacro, ddef.symbol) val rhs1 = - if (transformed contains ddef.rhs) { - // macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap - // if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree` - // here we guard against this case + if (transformed contains ddef.rhs) transformed(ddef.rhs) - } else { + else { val rhs1 = typedMacroBody(this, ddef) transformed(ddef.rhs) = rhs1 rhs1 } - - val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree + val isMacroBodyOkay = !ddef.symbol.isErroneous && !rhs1.exists(_.isErroneous) && rhs1 != EmptyTree val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty if (isMacroBodyOkay && shouldInheritMacroImplReturnType) { - val commonMessage = "macro defs must have explicitly specified return types" def reportFailure() = { + val commonMessage = "macro defs must have explicitly specified return types" ddef.symbol.setFlag(IS_ERROR) context.error(ddef.pos, commonMessage) } - def reportWarning(inferredType: Type) = { - val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12" - context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)", "2.12.0") - } - computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match { - case ErrorType => ErrorType - case NothingTpe => NothingTpe - case NoType => reportFailure(); AnyTpe - case tpe => reportWarning(tpe); tpe - } - } else AnyTpe + reportFailure() + } + AnyTpe } @inline final def transformedOr(tree: Tree, op: => Tree): Tree = lookupTransformed(tree) match { @@ -6038,7 +6604,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else transformed remove tree private final def canSkipRhs(tree: Tree) = settings.Youtline.value && !tree.exists { - case Super(qual, mix) => + case Super(_, mix) => // conservative approximation of method bodies that may give rise to super accessors which must be // stored in pickle. context.owner.enclClass.isTrait || mix != tpnme.EMPTY @@ -6054,10 +6620,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper for (sym <- classes) { for ((ownAcc, superAcc) <- superConstructorCalls.getOrElse(sym, Nil)) { - // We have a corresponding paramter in the super class. + // We have a corresponding parameter in the super class. val superClazz = sym.superClass val alias = ( - superAcc.initialize.alias // Is the param accessor is an alias for a field further up the class heirarchy? + superAcc.initialize.alias // Is the param accessor is an alias for a field further up the class hierarchy? orElse (superAcc getterIn superAcc.owner) // otherwise, lookup the accessor for the super filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias) // the accessor must be public ) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index 5e14a3ac9273..cc5e8fb89e43 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc package typechecker import Mode._ +import scala.annotation._ trait TypersTracking { self: Analyzer => @@ -84,7 +85,8 @@ trait TypersTracking { def indented(s: String): String = if (s == "") "" else currentIndent + s.replaceAll("\n", "\n" + currentIndent) - @inline final def runWith[T](t: Tree)(body: => T): T = { + @annotation.unused + @inline private final def runWith[T](t: Tree)(body: => T): T = { push(t) try body finally pop(t) } @@ -98,15 +100,15 @@ trait TypersTracking { trees = trees.tail depth -= 1 } - def show(s: String) { if (s != "") out.println(s) } + def show(s: String): Unit = { if (s != "") out.println(s) } - def showPush(tree: Tree, context: Context) { + def showPush(tree: Tree, context: Context): Unit = { showPush(tree, NOmode, WildcardType, context) } - def showPush(tree: Tree, mode: Mode, pt: Type, context: Context) { + def showPush(tree: Tree, mode: Mode, pt: Type, context: Context): Unit = { def tree_s = truncAndOneLine(ptTree(tree)) def pt_s = if (pt.isWildcard || context.inTypeConstructorAllowed) "" else s": pt=$pt" - def all_s = List(tree_s, pt_s, mode, fullSiteString(context)) filterNot (_ == "") mkString " " + def all_s = List(tree_s, pt_s, mode.toString, fullSiteString(context)).filterNot(_.isEmpty).mkString(" ") atLowerIndent(show(indented("""|-- """ + all_s))) } @@ -115,7 +117,7 @@ trait TypersTracking { show(resetIfEmpty(indented("""\-> """ + s))) typedTree } - def showAdapt(original: Tree, adapted: Tree, pt: Type, context: Context) { + def showAdapt(original: Tree, adapted: Tree, pt: Type, @unused context: Context): Unit = { if (!noPrintAdapt(original, adapted)) { def tree_s1 = inLightCyan(truncAndOneLine(ptTree(original))) def pt_s = if (pt.isWildcard) "" else s" based on pt $pt" @@ -126,7 +128,7 @@ trait TypersTracking { show(indented(s"[adapt] $tree_s1 $tree_s2")) } } - def showTyped(tree: Tree) { + def showTyped(tree: Tree): Unit = { def class_s = tree match { case _: RefTree => "" case _ => " " + tree.shortClass @@ -135,25 +137,11 @@ trait TypersTracking { show(indented(s"[typed$class_s] " + truncAndOneLine(ptTree(tree)))) } - def nextTyped(tree: Tree, mode: Mode, pt: Type, context: Context)(body: => Tree): Tree = - nextTypedInternal(tree, showPush(tree, mode, pt, context))(body) - - def nextTypedInternal(tree: Tree, pushFn: => Unit)(body: => Tree): Tree = ( - if (noPrintTyping(tree)) - body - else - runWith(tree) { pushFn ; showPop(body) } - ) - def beforeNextTyped(tree: Tree, mode: Mode, pt: Type, context: Context): Boolean = if (noPrintTyping(tree)) false else { push(tree) showPush(tree, mode, pt, context) true } - def afterNextTyped(tree: Tree, typedTree: Tree): Unit = { - showPop(typedTree) - pop(tree) - } @inline final def printTyping(tree: Tree, s: => String) = { if (printTypings && !noPrintTyping(tree)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index 200a92bfdeaf..8123cea99abc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,20 +13,22 @@ package scala.tools.nsc package typechecker +import scala.annotation.tailrec import symtab.Flags._ import scala.reflect.internal.util.ListOfNil +import scala.tools.nsc.Reporting.WarningCategory.Scala3Migration +import scala.util.chaining._ /* * @author Martin Odersky - * @version 1.0 */ trait Unapplies extends ast.TreeDSL { self: Analyzer => import global._ import definitions._ - import CODE.{ CASE => _, _ } - import treeInfo.{ isRepeatedParamType, isByNameParamType } + import CODE.{CASE => _, _} + import treeInfo.{isByNameParamType, isRepeatedParamType} private def unapplyParamName = nme.x_0 private def caseMods = Modifiers(SYNTHETIC | CASE) @@ -42,10 +44,33 @@ trait Unapplies extends ast.TreeDSL { */ def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq) - /** Filters out unapplies with multiple (non-implicit) parameter lists, - * as they cannot be used as extractors + /** Filters out unapplies with invalid shapes: extractor methods must have + * either one unary param list or one unary param list and an implicit param list. */ - def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym)) + def unapplyMember(tp: Type): Symbol = { + def qualifies(sym: Symbol) = + validateUnapplyMember(sym.info) == UnapplyMemberResult.Ok + directUnapplyMember(tp) filter qualifies + } + + // this slight extravagance opens this to reuse in error message generation + object UnapplyMemberResult extends Enumeration { + val Ok, NoParams, MultiParams, MultiParamss, VarArgs, Other = Value + } + @tailrec final def validateUnapplyMember(tp: Type): UnapplyMemberResult.Value = { + import UnapplyMemberResult._ + tp match { + case PolyType(_, restpe) => validateUnapplyMember(restpe) + case MethodType(Nil, _) | NullaryMethodType(_) => NoParams + case MethodType(_ :: Nil, snd: MethodType) => + if (snd.isImplicit) Ok else MultiParamss + case MethodType(x :: Nil, _) => + if (definitions.isRepeated(x)) VarArgs + else Ok + case MethodType(_, _) => MultiParams + case _ => Other + } + } object HasUnapply { def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption @@ -60,55 +85,44 @@ trait Unapplies extends ast.TreeDSL { } private def constrParamss(cdef: ClassDef): List[List[ValDef]] = { - val prunedClassDef = deriveClassDef(cdef)(tmpl => deriveTemplate(tmpl)(stats => treeInfo.firstConstructor(stats).duplicate :: Nil)) - val ClassDef(_, _, _, Template(_, _, firstConstructor :: Nil)) = resetAttrs(prunedClassDef) - val DefDef(_, _, _, vparamss, _, _) = firstConstructor - vparamss + resetAttrs(deriveClassDef(cdef)(deriveTemplate(_)(treeInfo.firstConstructor(_).duplicate :: Nil))) match { + case ClassDef(_, _, _, Template(_, _, DefDef(_, _, _, vparamss, _, _) :: Nil)) => vparamss + case x => throw new MatchError(x) + } } private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = { - val prunedClassDef = deriveClassDef(cdef)(tmpl => Template(Nil, noSelfType, Nil)) - val ClassDef(_, _, tparams, _) = resetAttrs(prunedClassDef.duplicate) - val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT))) - tparamsInvariant - } - - /** The return value of an unapply method of a case class C[Ts] - * @param param The name of the parameter of the unapply method, assumed to be of type C[Ts] - * @param caseclazz The case class C[Ts] - */ - private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = { - def caseFieldAccessorValue(selector: ValDef): Tree = { - // Selecting by name seems to be the most straight forward way here to - // avoid forcing the symbol of the case class in order to list the accessors. - def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name) - // But, that gives a misleading error message in neg/t1422.scala, where a case - // class has an illegal private[this] parameter. We can detect this by checking - // the modifiers on the param accessors. - // We just generate a call to that param accessor here, which gives us an inaccessible - // symbol error, as before. - def localAccessor = caseclazz.impl.body find { - case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal - case _ => false - } - localAccessor.fold(selectByName)(Ident(param) DOT _.symbol) + resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate) match { + case ClassDef(_, _, tparams, _) => tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT))) + case x => throw new MatchError(x) } + } - // Working with trees, rather than symbols, to avoid cycles like scala/bug#5082 - constrParamss(caseclazz).take(1).flatten match { - case Nil => TRUE - case xs => SOME(xs map caseFieldAccessorValue: _*) - } + private object ApplyAccess { + type Flags = Int + final val Default = 0 + final val Warn = 1 + final val Inherit = 2 + def isWarn(access: Flags): Boolean = access.&(Warn) != 0 + def isInherit(access: Flags): Boolean = access.&(Inherit) != 0 + } + private def applyAccess(mods: Modifiers): ApplyAccess.Flags = { + import ApplyAccess._ + val changeModsIn3 = mods.hasFlag(PRIVATE) || (!mods.hasFlag(PROTECTED) && mods.hasAccessBoundary) + if (!changeModsIn3) Default + else if (currentRun.sourceFeatures.caseApplyCopyAccess) Inherit + else if (currentRun.isScala3) Warn + else Default } /** The module corresponding to a case class; overrides toString to show the module's name */ def caseModuleDef(cdef: ClassDef): ModuleDef = { val params = constrParamss(cdef) - def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && (params match { + def inheritFromFun = !currentRun.sourceFeatures.caseCompanionFunction && !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && (params match { case List(ps) if ps.length <= MaxFunctionArity => true case _ => false - }) + }) && !ApplyAccess.isInherit(applyAccess(constrMods(cdef))) def createFun = { def primaries = params.head map (_.tpt) gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true) @@ -146,38 +160,81 @@ trait Unapplies extends ast.TreeDSL { ) } + + private def constrMods(cdef: ClassDef): Modifiers = treeInfo.firstConstructorMods(cdef.impl.body) + /** The apply method corresponding to a case class */ - def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef) + def caseModuleApplyMeth(cdef: ClassDef): DefDef = { + val inheritedMods = constrMods(cdef) + val access = applyAccess(inheritedMods) + val mods = + if (ApplyAccess.isInherit(access)) (caseMods | (inheritedMods.flags & PRIVATE)).copy(privateWithin = inheritedMods.privateWithin) + else caseMods + factoryMeth(mods, nme.apply, cdef).tap(m => + if (ApplyAccess.isWarn(access)) + m.updateAttachment(CaseApplyInheritAccess) + ) + } /** The unapply method corresponding to a case class */ def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = { - val tparams = constrTparamsInvariant(cdef) - val method = constrParamss(cdef) match { + val tparams = constrTparamsInvariant(cdef) + val method = constrParamss(cdef) match { case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq case _ => nme.unapply } - val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) - val resultType = if (!currentRun.isScala212) TypeTree() else { // fix for scala/bug#6541 under -Xsource:2.12 - def repeatedToSeq(tp: Tree) = tp match { + val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) + val resultType = { // fix for scala/bug#6541 under -Xsource:2.12 + def repeatedToSeq(tp: Tree) = tp match { case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps) case _ => tp } + constrParamss(cdef) match { case Nil | Nil :: _ => gen.rootScalaDot(tpnme.Boolean) - case params :: _ => + case params :: _ => val constrParamTypes = params.map(param => repeatedToSeq(param.tpt)) AppliedTypeTree(gen.rootScalaDot(tpnme.Option), List(treeBuilder.makeTupleType(constrParamTypes))) } } - val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule) - val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName)) - atPos(cdef.pos.focus)( - DefDef(caseMods, method, tparams, List(cparams), resultType, body) - ) + def selectCaseFieldAccessor(constrParam: ValDef): Tree = { + val unapplyParam = Ident(unapplyParamName) + + // Selecting by name seems to be the most straight forward way here to + // avoid forcing the symbol of the case class in order to list the accessors. + // + // But, that gives a misleading error message in neg/t1422.scala, where a case + // class has an illegal private[this] parameter. We can detect this by checking + // the modifiers on the param accessors. + // We just generate a call to that param accessor here, which gives us an inaccessible + // symbol error, as before. + val accSel = + cdef.impl.body collectFirst { + case localAccessor@ValOrDefDef(mods, constrParam.name, _, _) if mods.isPrivateLocal => Select(unapplyParam, localAccessor.symbol) + } getOrElse Select(unapplyParam, caseAccessorName(cdef.symbol, constrParam.name)) + + constrParam.tpt match { + case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => Typed(accSel, AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps)) + case _ => accSel + } + } + + val body = + If(Ident(unapplyParamName) OBJ_EQ NULL, + if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule), + // Working with trees, rather than symbols, to avoid cycles like scala/bug#5082 + constrParamss(cdef).take(1).flatten match { + case Nil => TRUE + case xs => SOME(xs map selectCaseFieldAccessor: _*) + } + ) + + + atPos(cdef.pos.focus)(DefDef(caseMods, method, tparams, List(cparams), resultType, body)) } /** @@ -208,11 +265,16 @@ trait Unapplies extends ast.TreeDSL { * ClassDef of the case class. */ def caseClassCopyMeth(cdef: ClassDef): Option[DefDef] = { - def isDisallowed(vd: ValDef) = isRepeatedParamType(vd.tpt) || isByNameParamType(vd.tpt) - val classParamss = constrParamss(cdef) - - if (cdef.symbol.hasAbstractFlag || mexists(classParamss)(isDisallowed)) None - else { + val classParamss = constrParamss(cdef) + def copyOK = { + def warn() = if (currentRun.isScala3) runReporting.warning(cdef.namePos, "case `copy` method is allowed to have by-name parameters under Scala 3 (or with -Xsource-features:case-copy-by-name)", Scala3Migration, cdef.symbol) + def isAllowed(vd: ValDef) = { + def checkByName = currentRun.sourceFeatures.caseCopyByName || !isByNameParamType(vd.tpt).tap(if (_) warn()) + !isRepeatedParamType(vd.tpt) && checkByName + } + !cdef.symbol.hasAbstractFlag && mforall(classParamss)(isAllowed) + } + def synthesizeCopy = { def makeCopyParam(vd: ValDef, putDefault: Boolean) = { val rhs = if (putDefault) toIdent(vd) else EmptyTree val flags = PARAM | (vd.mods.flags & IMPLICIT) | (if (putDefault) DEFAULTPARAM else 0) @@ -220,21 +282,32 @@ trait Unapplies extends ast.TreeDSL { val tpt = atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt) treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs) } - val tparams = constrTparamsInvariant(cdef) val paramss = classParamss match { case Nil => Nil case ps :: pss => ps.map(makeCopyParam(_, putDefault = true)) :: mmap(pss)(makeCopyParam(_, putDefault = false)) } - val classTpe = classType(cdef, tparams) val argss = mmap(paramss)(toIdent) val body: Tree = New(classTpe, argss) - val copyDefDef = atPos(cdef.pos.focus)( - DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, TypeTree(), body) + val synth = Modifiers(SYNTHETIC) + val copyMods = + if (currentRun.isScala3) { + val inheritedMods = constrMods(cdef) + val mods3 = Modifiers(SYNTHETIC | (inheritedMods.flags & AccessFlags), inheritedMods.privateWithin) + if (currentRun.sourceFeatures.caseApplyCopyAccess) mods3 + else { + if (mods3 != synth) + runReporting.warning(cdef.namePos, "access modifiers for `copy` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access)", Scala3Migration, cdef.symbol) + synth + } + } + else synth + atPos(cdef.pos.focus)( + DefDef(copyMods, nme.copy, tparams, paramss, TypeTree(), body) ) - Some(copyDefDef) } + if (copyOK) Some(synthesizeCopy) else None } } diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala new file mode 100644 index 000000000000..73a6a508caec --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala @@ -0,0 +1,99 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.annotation.tailrec +import scala.util.matching.Regex + +trait SplainData { + self: Analyzer => + + import global._ + + sealed trait ImplicitErrorSpecifics + + object ImplicitErrorSpecifics { + case class NotFound(param: Symbol) extends ImplicitErrorSpecifics + + case class NonconformantBounds( + targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError], + ) extends ImplicitErrorSpecifics + } + + object ImplicitErrors { + var stack: List[Type] = Nil + var errors: List[ImplicitError] = Nil + + def push(error: ImplicitError): Unit = errors ::= error + def nesting: Int = stack.length - 1 + def nested: Boolean = stack.nonEmpty + def removeErrorsFor(tpe: Type): Unit = errors = errors.dropWhile(_.tpe == tpe) + + def startSearch(expectedType: Type): Unit = { + if (settings.Vimplicits.value) { + if (!nested) errors = List() + stack = expectedType :: stack + } + } + + def finishSearch(success: Boolean, expectedType: Type): Unit = { + if (settings.Vimplicits.value) { + if (success) removeErrorsFor(expectedType) + stack = stack.drop(1) + } + } + } + + case class ImplicitError(tpe: Type, candidate: Tree, nesting: Int, specifics: ImplicitErrorSpecifics) { + import ImplicitError._ + + override def equals(other: Any) = other match { + case o: ImplicitError => o.tpe.toString == tpe.toString && candidateName(this) == candidateName(o) + case _ => false + } + + override def hashCode = (tpe.toString.##, ImplicitError.candidateName(this).##).## + override def toString = s"ImplicitError(${shortName(tpe.toString)}, ${shortName(candidate.toString)}), $nesting, $specifics)" + } + + object ImplicitError { + def unapplyCandidate(e: ImplicitError): Tree = unapplyRecursively(e.candidate) + + @tailrec + private def unapplyRecursively(tree: Tree): Tree = + tree match { + case TypeApply(fun, _) => unapplyRecursively(fun) + case Apply(fun, _) => unapplyRecursively(fun) + case a => a + } + + def cleanCandidate(e: ImplicitError): String = + unapplyCandidate(e).toString match { + case ImplicitError.candidateRegex(suf) => suf + case a => a + } + + def candidateName(e: ImplicitError): String = + unapplyCandidate(e) match { + case Select(_, name) => name.toString + case Ident(name) => name.toString + case a => a.toString + } + + val candidateRegex: Regex = """.*\.this\.(.*)""".r + + def shortName(ident: String): String = ident.substring(ident.lastIndexOf(".") + 1) + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala new file mode 100644 index 000000000000..df1844b2cc26 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala @@ -0,0 +1,26 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainDiagnostics extends splain.SplainFormatting { + self: Analyzer => + + import global._ + + def splainFoundReqMsg(found: Type, req: Type): String = { + if (settings.VtypeDiffs.value) ";\n" + showFormattedL(formatDiff(found, req, top = true), break = true).indent.joinLines + else "" + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala new file mode 100644 index 000000000000..a9caa525c607 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala @@ -0,0 +1,64 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainErrors { self: Analyzer with SplainFormatting => + import global._ + + def splainPushNotFound(tree: Tree, param: Symbol): Unit = + ImplicitErrors.stack.headOption.foreach { pt => + val specifics = ImplicitErrorSpecifics.NotFound(param) + ImplicitErrors.push(ImplicitError(pt, tree, ImplicitErrors.nesting, specifics)) + } + + def splainPushOrReportNotFound(tree: Tree, param: Symbol, annotationMsg: String): String = + if (settings.Vimplicits.value) + if (ImplicitErrors.nested) { + splainPushNotFound(tree, param) + "" + } + else pluginsNoImplicitFoundError(param, ImplicitErrors.errors, formatImplicitError(param, ImplicitErrors.errors, annotationMsg)) + else "" + + def splainPushNonconformantBonds( + tpe: Type, + candidate: Tree, + targs: List[Type], + tparams: List[Symbol], + originalError: Option[AbsTypeError], + ): Unit = { + if (settings.Vimplicits.value) { + val specifics = ImplicitErrorSpecifics.NonconformantBounds(targs, tparams, originalError) + ImplicitErrors.push(ImplicitError(tpe, candidate, ImplicitErrors.nesting, specifics)) + } + } + + def splainPushImplicitSearchFailure(implicitTree: Tree, expectedType: Type, originalError: AbsTypeError): Unit = { + def pushImpFailure(fun: Tree, args: List[Tree]): Unit = { + fun.tpe match { + case PolyType(tparams, _) if tparams.nonEmpty && sameLength(tparams, args) => + splainPushNonconformantBonds(expectedType, implicitTree, mapList(args)(_.tpe), tparams, Some(originalError)) + case _ => + } + } + if (settings.Vimplicits.value) { + implicitTree match { + case TypeApply(fun, args) => pushImpFailure(fun, args) + case Apply(TypeApply(fun, args), _) => pushImpFailure(fun, args) + case _ => + } + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala new file mode 100644 index 000000000000..c73b9c1f7846 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala @@ -0,0 +1,92 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.annotation.tailrec + +object Formatted { + @tailrec def comparator(formatted: Formatted): String = formatted match { + case Infix(left, _, _, _) => comparator(left) + case Simple(tpe) => tpe.name + case Qualified(Nil, tpe) => tpe.name + case Qualified(path, tpe) => s"${path.mkString}${tpe.name}" + case UnitForm => "()" + case Applied(cons, _) => comparator(cons) + case TupleForm(Nil) => "()" + case TupleForm(h :: _) => comparator(h) + case FunctionForm(Nil, ret, _) => comparator(ret) + case FunctionForm(h :: _, _, _) => comparator(h) + case RefinedForm(Nil, _) => "()" + case RefinedForm(h :: _, _) => comparator(h) + case Diff(l, _) => comparator(l) + case Decl(sym, _) => comparator(sym) + case DeclDiff(sym, _, _) => comparator(sym) + case ByName(tpe) => comparator(tpe) + } + + implicit val Ord: Ordering[Formatted] = (x, y) => Ordering[String].compare(comparator(x), comparator(y)) +} + +sealed trait Formatted { + def length: Int = this match { + case Infix(infix, left, right, top) => infix.length + left.length + right.length + 2 + case Simple(tpe) => tpe.name.length + case Qualified(path, tpe) => path.map(_.length).sum + path.length + tpe.name.length + case UnitForm => 4 + case Applied(cons, args) => args.map(_.length).sum + ( args.length - 1) * 2 + cons.length + 2 + case TupleForm(elems) => elems.map(_.length).sum + (elems.length - 1) + 2 + case FunctionForm(args, ret, top) => args.map(_.length).sum + ( args.length - 1) + 2 + ret.length + 4 + case RefinedForm(elems, decls) => elems.map(_.length).sum + (elems.length - 1) * 6 + case Diff(lhs, rhs) => lhs.length + rhs.length + 1 + case Decl(sym, rhs) => sym.length + rhs.length + 8 + case DeclDiff(sym, lhs, rhs) => sym.length + lhs.length + rhs.length + 9 + case ByName(tpe) => tpe.length + 5 + } +} + +sealed trait FormattedName { val name: String } +case class SimpleName(name: String) extends FormattedName +case class InfixName(name: String) extends FormattedName + +case class Infix(infix: Formatted, left: Formatted, right: Formatted, top: Boolean) extends Formatted +case class Simple(tpe: FormattedName) extends Formatted +case class Qualified(path: List[String], tpe: FormattedName) extends Formatted +case object UnitForm extends Formatted +case class Applied(cons: Formatted, args: List[Formatted]) extends Formatted +case class TupleForm(elems: List[Formatted]) extends Formatted +case class FunctionForm(args: List[Formatted], ret: Formatted, top: Boolean) extends Formatted +case class RefinedForm(elems: List[Formatted], decls: List[Formatted]) extends Formatted +case class Diff(left: Formatted, right: Formatted) extends Formatted +case class Decl(sym: Formatted, rhs: Formatted) extends Formatted +case class DeclDiff(sym: Formatted, left: Formatted, right: Formatted) extends Formatted +case class ByName(tpe: Formatted) extends Formatted + +sealed trait TypeRepr { + def flat: String + def lines: List[String] + def tokenize: String = lines.mkString(" ") + def joinLines: String = lines.mkString("\n") + def indent: TypeRepr +} + +case class BrokenType(lines: List[String]) extends TypeRepr { + def flat = lines.mkString(" ") + def indent = BrokenType(lines.map(" " + _)) +} + +case class FlatType(flat: String) extends TypeRepr { + def lines = List(flat) + def indent = FlatType(s" $flat") +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala new file mode 100644 index 000000000000..25c341f328fc --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -0,0 +1,808 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.collection.immutable.{List, Nil, Seq} +import scala.collection.mutable +import scala.language.implicitConversions + +class FormatCache[K, V](cache: mutable.Map[K, V]) { + def apply(k: K, orElse: => V): V = cache.getOrElseUpdate(k, orElse) +} + +object FormatCache { + def apply[K, V]() = new FormatCache[K, V](mutable.Map()) +} + +trait SplainFormatters { + self: Analyzer => + + import global._ + import definitions._ + + implicit def asSimpleName(s: String): SimpleName = SimpleName(s) + + def formatType(tpe: Type, top: Boolean): Formatted + + object Refined { + def unapply(tpe: Type): Option[(List[Type], Scope)] = tpe match { + case RefinedType(parents, decls) => Some((parents, decls)) + case t @ SingleType(_, _) => unapply(t.underlying) + case _ => None + } + } + + trait SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] + } + + object FunctionFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Function")) { + val fmtArgs = formattedArgs + val (params, returnt) = fmtArgs.splitAt(fmtArgs.length - 1) + Some(FunctionForm(params, returnt.headOption.getOrElse(UnitForm), top)) + } else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object TupleFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Tuple")) Some(TupleForm(formattedArgs)) + else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object RefinedFormatter extends SpecialFormatter { + + object DeclSymbol { + def unapply(sym: Symbol): Option[(Formatted, Formatted)] = + if (sym.hasRawInfo) Some((Simple(sym.simpleName.toString), formatType(sym.rawInfo, top = true))) + else None + } + + lazy val ignoredTypes: List[Type] = List(typeOf[Object], typeOf[Any], typeOf[AnyRef]) + + def sanitizeParents: List[Type] => List[Type] = { ps => + val tpes = ps.distinct + val result = tpes.filterNot(t => ignoredTypes.exists(_ =:= t)) + + if (result.isEmpty) tpes.headOption.toList + else result + } + + object Refined { + def unapply(tpe: Type): Option[(List[Type], Scope)] = + tpe match { + case TypeRef(pre, sym, List(RefinedType(parents, decls))) + if decls.isEmpty && pre.typeSymbol.fullName == "zio" && sym.fullName == "zio.Has" => + val sanitized = sanitizeParents(parents) + if (sanitized.length == 1) + Some((List(TypeRef(pre, sym, sanitized.headOption.toList)), decls)) + else + None + case RefinedType(types, scope) => + if (scope.isEmpty) { + val subtypes = types.map(_.dealias).flatMap { + case Refined(types, _) => + types + case tpe => + List(tpe) + } + Some((subtypes, scope)) + } else + Some((types, scope)) + case t@SingleType(_, _) => + unapply(t.underlying) + case _ => + None + } + } + + def formatDecl: Symbol => Formatted = { + case DeclSymbol(n, t) => Decl(n, t) + case sym => Simple(sym.toString) + } + + override def apply[A]( + tpe: Type, + simple: String, + args: List[A], + formattedArgs: => List[Formatted], + top: Boolean + )(rec: (A, Boolean) => Formatted): Option[Formatted] = { + tpe match { + case Refined(parents, decls) => + Some(RefinedForm(sanitizeParents(parents).map(formatType(_, top)), decls.toList.map(formatDecl))) + case _ => None + } + } + + val none: Formatted = Simple("") + + def separate[A](left: List[A], right: List[A]): (List[A], List[A], List[A]) = { + val leftS = Set(left: _*) + val rightS = Set(right: _*) + val common = leftS.intersect(rightS) + val uniqueLeft = leftS -- common + val uniqueRight = rightS -- common + (common.toList, uniqueLeft.toList, uniqueRight.toList) + } + + def matchTypes(left: List[Type], right: List[Type]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = + separate(left.map(formatType(_, top = true)), right.map(formatType(_, top = true))) + val diffs = uniqueLeft + .zipAll(uniqueRight, none, none) + .map { + case (l, r) => + Diff(l, r) + } + common ++ diffs + } + + def filterDecls(syms: List[Symbol]): List[(Formatted, Formatted)] = + syms.collect { + case DeclSymbol(sym, rhs) => + (sym, rhs) + } + + def matchDecls(left: List[Symbol], right: List[Symbol]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = separate(filterDecls(left), filterDecls(right)) + val diffs = uniqueLeft + .map(Some(_)) + .zipAll(uniqueRight.map(Some(_)), None, None) + .collect { + case (Some((sym, l)), Some((_, r))) => DeclDiff(sym, l, r) + case (None, Some((sym, r))) => DeclDiff(sym, none, r) + case (Some((sym, l)), None) => DeclDiff(sym, l, none) + } + common.map { + case (sym, rhs) => Decl(sym, rhs) + } ++ diffs + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = + (left, right) match { + case (Refined(leftParents, leftDecls), Refined(rightParents, rightDecls)) => + val parents = matchTypes(sanitizeParents(leftParents), sanitizeParents(rightParents)).sorted + val decls = matchDecls(leftDecls.toList, rightDecls.toList).sorted + Some(RefinedForm(parents, decls)) + case _ => None + } + } + + object ByNameFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = tpe match { + case TypeRef(_, ByNameParamClass, List(a)) => Some(ByName(formatType(a, top = true))) + case _ => None + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = None + } +} + +object SplainFormatting { + + import scala.reflect.internal.TypeDebugging.AnsiColor._ + + val ELLIPSIS: String = "⋮".blue +} + +trait SplainFormatting extends SplainFormatters { + self: Analyzer => + + import global._ + + import SplainFormatting._ + import scala.reflect.internal.TypeDebugging.AnsiColor._ + + case class ImplicitErrorLink( + fromTree: ImplicitError, + fromHistory: DivergentImplicitTypeError + ) { + + val sameCandidateTree: Boolean = fromTree.candidate equalsStructure fromHistory.underlyingTree + + val samePendingType: Boolean = fromTree.specifics match { + case ss: ImplicitErrorSpecifics.NotFound => + fromHistory.pt0 =:= ss.param.tpe + case _ => + false + } + + val moreSpecificPendingType: Boolean = fromTree.specifics match { + case ss: ImplicitErrorSpecifics.NotFound => + fromHistory.pt0 <:< ss.param.tpe + case _ => + false + } + + val sameStartingWith: Boolean = { + fromHistory.sym.fullLocationString == fromTree.candidate.symbol.fullLocationString + } + + lazy val divergingSearchStartingWithHere: Boolean = sameStartingWith + + lazy val divergingSearchDiscoveredHere: Boolean = sameCandidateTree && moreSpecificPendingType + } + + object ImplicitErrorLink {} + + case class ImplicitErrorTree( + error: ImplicitError, + children: Seq[ImplicitErrorTree] = Nil + ) { + + import ImplicitErrorTree._ + + def doCollectFull(alwaysDisplayRoot: Boolean = false): Seq[ErrorNode] = + if (children.isEmpty) Seq(ErrorNode(error, alwaysShow = true)) + else { + + Seq(ErrorNode(error, alwaysShow = alwaysDisplayRoot)) ++ { + + if (children.size >= 2) children.flatMap(_.doCollectFull(alwaysDisplayRoot = true)) + else children.flatMap(_.doCollectFull(alwaysDisplayRoot = false)) + } + } + + lazy val collectFull: Seq[ErrorNode] = doCollectFull(alwaysDisplayRoot = true) + + lazy val collectCompact: Seq[ErrorNode] = { + + val displayed = collectFull.zipWithIndex.filter { + case (v, _) => + v.alwaysShow + } + + val ellipsisIndices = displayed.map(_._2 - 1).toSet + (collectFull.size - 1) + + val withEllipsis = displayed.map { + case (v, i) => + if (!ellipsisIndices.contains(i)) v.copy(showEllipsis = true) + else v + } + + withEllipsis + } + + case class FormattedChain( + source: Seq[ErrorNode] + ) { + + val toList: List[String] = { + val collected = source.toList + val baseIndent = collected.headOption.map(_.nesting).getOrElse(0) + + val formatted = collected.map { v => + val formatted = v.formatted + if (v.showEllipsis) formatted.copy(_2 = formatted._2 :+ ELLIPSIS) + else formatted + } + + indentTree(formatted, baseIndent) + } + + override lazy val toString: String = toList.mkString("\n") + } + + object FormattedChain { + + object Full extends FormattedChain(collectFull) + + object Compact extends FormattedChain(collectCompact) + + val display: FormattedChain = if (settings.VimplicitsVerboseTree.value) Full else Compact + } + + override def toString: String = FormattedChain.Full.toString + } + + object ImplicitErrorTree { + + case class ErrorNode( + error: ImplicitError, + alwaysShow: Boolean, + showEllipsis: Boolean = false + ) { + + def nesting: RunId = error.nesting + + val formatted: (String, List[String], RunId) = + formatNestedImplicit(error) + } + + def fromNode( + Node: ImplicitError, + offsprings: List[ImplicitError] + ): ImplicitErrorTree = { + val topNesting = Node.nesting + + val children = fromChildren( + offsprings, + topNesting + ) + + ImplicitErrorTree(Node, children) + } + + def fromChildren( + offsprings: List[ImplicitError], + topNesting: Int + ): List[ImplicitErrorTree] = { + + if (offsprings.isEmpty) + return Nil + + val minNesting = offsprings.map(v => v.nesting).min + + if (minNesting < topNesting + 1) + throw new InternalError( + "Detail: nesting level of offsprings of an implicit search tree node should be higher" + ) + + val wII = offsprings.zipWithIndex + + val childrenII = wII + .filter { + case (sub, _) => + if (sub.nesting < minNesting) { + throw new InternalError( + s"Detail: Sub-node in implicit tree can only have nesting level larger than top node," + + s" but (${sub.nesting} < $minNesting)" + ) + } + + sub.nesting == minNesting + } + .map(_._2) + + val ranges = { + + val seqs = (childrenII ++ Seq(offsprings.size)) + .sliding(2) + .toList + + seqs.map { + case Seq(from, until) => + from -> until + case _ => + throw new InternalError("Detail: index should not be empty") + } + } + + val children = ranges.map { range => + val _top = offsprings(range._1) + + val _offsprings = offsprings.slice(range._1 + 1, range._2) + + fromNode( + _top, + _offsprings + ) + } + + mergeDuplicates(children) + // children + } + + def mergeDuplicates(children: List[ImplicitErrorTree]): List[ImplicitErrorTree] = { + val errors = children.map(_.error).distinct + + val grouped = errors.map { ee => + val group = children.filter(c => c.error == ee) + + val mostSpecificError = group.head.error + // TODO: this old design is based on a huge hypothesis, should it be improved + // val mostSpecificError = group.map(_.error).maxBy(v => v.candidate.toString.length) + + val allChildren = group.flatMap(v => v.children) + val mergedChildren = mergeDuplicates(allChildren) + + ImplicitErrorTree(mostSpecificError, mergedChildren) + } + + grouped.distinctBy(v => v.FormattedChain.Full.toString) // TODO: this may lose information + } + } + + + def formatNestedImplicit(err: ImplicitError): (String, List[String], Int) = { + + val candidate = ImplicitError.cleanCandidate(err) + val problem = s"${candidate.red} invalid because" + val reason = err.specifics match { + case e: ImplicitErrorSpecifics.NotFound => implicitMessage(e.param, NoImplicitFoundAnnotation(err.candidate, e.param)._2) + case e: ImplicitErrorSpecifics.NonconformantBounds => formatNonConfBounds(e) + } + val base = (problem, reason, err.nesting) + + val reasons = base._2 + + (problem, reasons, base._3) + } + + val breakInfixLength: Int = 70 + + def dealias(tpe: Type) = + if (isAux(tpe)) tpe + else (tpe match { + case ExistentialType(_, t) => t + case _ => tpe + }).dealias + + def extractArgs(tpe: Type) = tpe match { + case PolyType(params, result) => result.typeArgs.map { + case t if params.contains(t.typeSymbol) => WildcardType + case a => a + } + case t: AliasTypeRef if !isAux(tpe) => + t.betaReduce.typeArgs.map(a => if (a.typeSymbolDirect.isTypeParameter) WildcardType else a) + case _ => tpe.typeArgs + } + + def isRefined(tpe: Type) = tpe.dealias match { + case RefinedType(_, _) => true + case _ => false + } + + def isSymbolic(tpe: Type) = { + val n = tpe.typeConstructor.typeSymbol.name + !isRefined(tpe) && n.encodedName.toString != n.decodedName.toString + } + + def ctorNames(tpe: Type): List[String] = + scala.util.Try(tpe.typeConstructor.toString) + .map(_.split('.').toList) + .getOrElse(List(tpe.toString)) + + def isAux(tpe: Type) = ctorNames(tpe).lastOption.contains("Aux") + + def formatRefinement(sym: Symbol) = { + if (sym.hasRawInfo) s"$sym = ${showType(sym.rawInfo)}" + else sym.toString + } + + def formatAuxSimple(tpe: Type): (List[String], String) = { + val names = ctorNames(tpe) + (names.dropRight(2), ctorNames(tpe).takeRight(2).mkString(".")) + } + + def symbolPath(sym: Symbol): List[String] = + sym + .ownerChain + .takeWhile(sym => sym.isType && !sym.isPackageClass) + .map(_.name.decodedName.toString) + .reverse + + def sanitizePath(path: List[String]): List[String] = + path.takeWhile(_ != "type").filter(!_.contains("$")) + + def pathPrefix: List[String] => String = { + case Nil => "" + case List("") => "" + case a => a.mkString("", ".", ".") + } + + def qualifiedName(path: List[String], name: FormattedName): String = name match { + case SimpleName(name) => s"${pathPrefix(path)}$name" + case InfixName(name) => name + } + + def stripModules(path: List[String], name: FormattedName): String = { + val qName = qualifiedName(path, name) + if (shorthands(qName)) name.name else qName + } + + case class TypeParts(sym: Symbol, tt: Type) { + def modulePath: List[String] = (tt, sym) match { + case (TypeRef(pre, _, _), _) if !pre.toString.isEmpty => sanitizePath(pre.toString.split("\\.").toList) + case (SingleType(_, _), sym) => symbolPath(sym).dropRight(1) + case (_, _) => Nil + } + + def ownerPath: List[String] = { + val parts = sym.ownerChain.reverse.map(_.name.decodedName.toString) + parts.splitAt(Math.max(0, parts.size - 1))._1 + } + + def shortName: String = tt.safeToString.stripPrefix(tt.prefixString.split('.').dropRight(1).mkString(".") + ".") + } + + def stripType(tpe: Type): (List[String], String) = tpe match { + case tt: SingletonType => + val parts = TypeParts(tt.termSymbol, tt) + parts.modulePath -> parts.shortName + + case tt: RefinedType => + val parts = TypeParts(tt.typeSymbol, tt) + parts.modulePath -> parts.shortName + + case _ => + // TODO: should this also use TypeParts ? + val sym = if (tpe.takesTypeArgs) tpe.typeSymbolDirect else tpe.typeSymbol + val symName = sym.name.decodedName.toString + val parts = TypeParts(sym, tpe) + (parts.modulePath, if (sym.isModuleClass) s"$symName.type" else symName) + } + + def formatNormalSimple(tpe: Type): (List[String], String) = tpe match { + case a @ WildcardType => (Nil, a.toString) + case a => stripType(a) + } + + def formatSimpleType(tpe: Type): (List[String], String) = + if (isAux(tpe)) formatAuxSimple(tpe) + else formatNormalSimple(tpe) + + def indentLine(line: String, n: Int = 1, prefix: String = " ") = (prefix * n) + line + def indent(lines: List[String], n: Int = 1, prefix: String = " ") = lines.map(indentLine(_, n, prefix)) + + /** If the args of an applied type constructor are multiline, + * create separate lines for the constructor name and the closing bracket; + * else return a single line. */ + def showTypeApply(cons: String, args: List[TypeRepr], break: Boolean): TypeRepr = { + val flatArgs = bracket(args.map(_.flat)) + val flat = FlatType(s"$cons$flatArgs") + def brokenArgs = args match { + case head :: tail => tail.foldLeft(head.lines)((z, a) => z ::: "," :: a.lines) + case _ => Nil + } + def broken = BrokenType(s"$cons[" :: indent(brokenArgs) ::: List("]")) + if (break) decideBreak(flat, broken) else flat + } + + def showTuple(args: List[String]) = args match { + case head :: Nil => s"Tuple1[$head]" + case _ => args.mkString("(", ",", ")") + } + + def showFuncParams(args: List[String]) = args match { + case head :: Nil => head + case _ => args.mkString("(", ",", ")") + } + + def showRefined(parents: List[String], decls: List[String]) = { + val p = parents.mkString(" with ") + val d = if (decls.isEmpty) "" else decls.mkString(" {", "; ", "}") + s"$p$d" + } + + def bracket[A](params: List[A]) = params.mkString("[", ", ", "]") + + def formatFunction(args: List[String]) = { + val (params, returnt) = args.splitAt(args.length - 1) + s"${showTuple(params)} => ${showTuple(returnt)}" + } + + def decideBreak(flat: FlatType, broken: => BrokenType): TypeRepr = + if (flat.flat.length > breakInfixLength) broken + else flat + + /** Turn a nested infix type structure into a flat list + * {{{ + * ::[A, ::[B, C]]] => List(A, ::, B, ::, C) + * }}} + */ + def flattenInfix(tpe: Infix): List[Formatted] = { + def step(tpe: Formatted): List[Formatted] = tpe match { + case Infix(infix, left, right, _) => left :: infix :: step(right) + case a => List(a) + } + step(tpe) + } + + /** Break a list produced by [[flattenInfix]] into lines by taking two + * elements at a time, then appending the terminal. + * If the expression's length is smaller than the threshold specified via + * plugin parameter, return a single line. */ + def breakInfix(types: List[Formatted]): TypeRepr = { + val form = types.map(showFormattedL(_, break = true)) + def broken = form.sliding(2, 2).flatMap { + case FlatType(tpe) :: FlatType(infix) :: Nil => List(s"$tpe $infix") + case left :: right :: Nil => left.lines ++ right.lines + case last :: Nil => last.lines + case _ => Nil + }.toList + decideBreak(FlatType(form.flatMap(_.lines).mkString(" ")), BrokenType(broken)) + } + + val showFormattedLCache = FormatCache[(Formatted, Boolean), TypeRepr]() + val formatTypeCache = FormatCache[(Type, Boolean), Formatted]() + val formatDiffCache = FormatCache[(Type, Type, Boolean), Formatted]() + + val specialFormatters: List[SpecialFormatter] = + List(FunctionFormatter, TupleFormatter, RefinedFormatter, ByNameFormatter) + + def truncateDecls(decls: List[Formatted]): Boolean = settings.VimplicitsMaxRefined.value < decls.map(_.length).sum + + def showFormattedQualified(path: List[String], name: FormattedName): TypeRepr = + FlatType(stripModules(path, name)) + + def formattedDiff(left: Formatted, right: Formatted): String = (left, right) match { + case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => + val prefix = lpath.reverseIterator.zip(rpath.reverseIterator).takeWhile { case (l, r) => l == r }.size + 1 + s"${qualifiedName(lpath.takeRight(prefix), lname).red}|${qualifiedName(rpath.takeRight(prefix), rname).green}" + case (left, right) => + val l = showFormatted(left) + val r = showFormatted(right) + s"${l.red}|${r.green}" + } + + def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = tpe match { + case Simple(name) => FlatType(name.name) + case Qualified(path, name) => showFormattedQualified(path, name) + case Applied(cons, args) => showTypeApply(showFormatted(cons), args.map(showFormattedL(_, break)), break) + case tpe @ Infix(_, _, _, top) => wrapParensRepr(if (break) breakInfix(flattenInfix(tpe)) else FlatType(flattenInfix(tpe).map(showFormatted).mkString(" ")), top) + case UnitForm => FlatType("Unit") + case FunctionForm(args, ret, top) => FlatType(wrapParens(s"${showFuncParams(args.map(showFormatted))} => ${showFormatted(ret)}", top)) + case TupleForm(elems) => FlatType(showTuple(elems.map(showFormatted))) + case RefinedForm(elems, decls) => FlatType(showRefined(elems.map(showFormatted), if (truncateDecls(decls)) List("...") else decls.map(showFormatted))) + case Diff(left, right) => FlatType(formattedDiff(left, right)) + case Decl(sym, rhs) => FlatType(s"type ${showFormatted(sym)} = ${showFormatted(rhs)}") + case DeclDiff(sym, left, right) => FlatType(s"type ${showFormatted(sym)} = ${formattedDiff(left, right)}") + case ByName(tpe) => FlatType(s"(=> ${showFormatted(tpe)})") + } + + def showFormattedL(tpe: Formatted, break: Boolean): TypeRepr = showFormattedLCache((tpe, break), showFormattedLImpl(tpe, break)) + def showFormatted(tpe: Formatted): String = showFormattedL(tpe, break = false).tokenize + def showType(tpe: Type): String = showFormattedL(formatType(tpe, top = true), break = false).joinLines + def showTypeBreakL(tpe: Type): List[String] = showFormattedL(formatType(tpe, top = true), break = true).lines + + def wrapParens(expr: String, top: Boolean): String = if (top) expr else s"($expr)" + + def wrapParensRepr(tpe: TypeRepr, top: Boolean): TypeRepr = tpe match { + case FlatType(tpe) => FlatType(wrapParens(tpe, top)) + case BrokenType(lines) => if (top) tpe else BrokenType("(" :: indent(lines) ::: List(")")) + } + + def formatSpecial[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = + specialFormatters.iterator.map(_.apply(tpe, simple, args, formattedArgs, top)(rec)).collectFirst { case Some(a) => a } + + def formatInfix[A]( + path: List[String], simple: String, left: A, right: A, top: Boolean, + )(rec: (A, Boolean) => Formatted): Formatted = + Infix(Qualified(path, InfixName(simple)), rec(left, false), rec(right, false), top) + + def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean)(rec: (A, Boolean) => Formatted): Formatted = { + val (path, simple) = formatSimpleType(tpe) + lazy val formattedArgs = args.map(rec(_, true)) + formatSpecial(tpe, simple, args, formattedArgs, top)(rec).getOrElse { + args match { + case left :: right :: Nil if isSymbolic(tpe) => formatInfix(path, simple, left, right, top)(rec) + case _ :: _ => Applied(Qualified(path, SimpleName(simple)), formattedArgs) + case _ => Qualified(path, SimpleName(simple)) + } + } + } + + def formatTypeImpl(tpe: Type, top: Boolean): Formatted = { + val dtpe = dealias(tpe) + formatWithInfix(dtpe, extractArgs(dtpe), top)(formatType) + } + + def formatType(tpe: Type, top: Boolean): Formatted = formatTypeCache((tpe, top), formatTypeImpl(tpe, top)) + + def formatDiffInfix(left: Type, right: Type, top: Boolean): Formatted = + formatWithInfix(left, extractArgs(left).zip(extractArgs(right)), top) { case ((l, r), t) => formatDiff(l, r, t) } + + def formatDiffSpecial(left: Type, right: Type, top: Boolean): Option[Formatted] = + specialFormatters.iterator.map(_.diff(left, right, top)).collectFirst { case Some(a) => a } + + def formatDiffSimple(left: Type, right: Type): Formatted = + Diff(formatType(left, top = true), formatType(right, top = true)) + + def formatDiffImpl(found: Type, req: Type, top: Boolean): Formatted = { + val (left, right) = dealias(found) -> dealias(req) + + val normalized = Seq(left, right).map(_.normalize).distinct + if (normalized.size == 1) return formatType(normalized.head, top) + + if (left.typeSymbol == right.typeSymbol) formatDiffInfix(left, right, top) + else formatDiffSpecial(left, right, top).getOrElse(formatDiffSimple(left, right)) + } + + def formatDiff(left: Type, right: Type, top: Boolean): Formatted = + formatDiffCache((left, right, top), formatDiffImpl(left, right, top)) + + def formatNonConfBounds(err: ImplicitErrorSpecifics.NonconformantBounds): List[String] = { + val params = bracket(err.tparams.map(_.defString)) + val types = bracket(err.targs.map(showType)) + List("nonconformant bounds;", types.red, params.green) + } + + def hideImpError(error: ImplicitError): Boolean = error.specifics match { + case ImplicitErrorSpecifics.NonconformantBounds(_, _, _) => true + case ImplicitErrorSpecifics.NotFound(_) => false + } + + def indentTree(tree: List[(String, List[String], Int)], baseIndent: Int): List[String] = { + val nestings = tree.map(_._3).distinct.sorted + tree.flatMap { case (head, tail, nesting) => + val ind = baseIndent + nestings.indexOf(nesting).abs + indentLine(head, ind, "――") :: indent(tail, ind) + } + } + + def formatIndentTree(chain: List[ImplicitError], baseIndent: Int) = + indentTree(chain.map(formatNestedImplicit), baseIndent) + + def deepestLevel(chain: List[ImplicitError]) = + chain.foldLeft(0)((z, a) => if (a.nesting > z) a.nesting else z) + + def formatImplicitChainTreeFull(chain: List[ImplicitError]): List[String] = + formatIndentTree(chain, chain.headOption.map(_.nesting).getOrElse(0)) + + def formatImplicitChainFlat(chain: List[ImplicitError]): List[String] = + chain.map(formatNestedImplicit).flatMap { case (h, t, _) => h :: t } + + def implicitMessage(param: Symbol, annotationMsg: String): List[String] = { + val tpe = param.tpe + val msg = if (annotationMsg.isEmpty) Nil else annotationMsg.split("\n").toList.map(_.blue) :+ "" + val head = s"${"!".red}${"I".blue} ${param.name.toString.yellow}:" + val lines = showTypeBreakL(tpe).map(_.green) match { + case single :: Nil => List(s"$head $single") + case l => head :: indent(l) + } + lines ::: indent(msg) + } + + def splitChains(errors: List[ImplicitError]): List[List[ImplicitError]] = { + errors.foldRight(Nil: List[List[ImplicitError]]) { + case (a, chains @ ((chain @ (prev :: _)) :: tail)) => + if (a.nesting > prev.nesting) List(a) :: chains + else (a :: chain) :: tail + case (a, _) => List(List(a)) + } + } + + def formatImplicitError( + param: Symbol, + errors: List[ImplicitError], + annotationMsg: String + ): String = { + + val msg = implicitMessage(param, annotationMsg) + val errorTrees = ImplicitErrorTree.fromChildren(errors, -1) + + val errorTreesStr = errorTrees.map(_.FormattedChain.display.toString) + + val components: Seq[String] = + Seq("implicit error;") ++ + msg ++ + errorTreesStr + + val result = components.mkString("\n") + + result + } +} diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala index 762bf8df9b45..8297bd3ab9ec 100644 --- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -29,14 +29,11 @@ trait CharArrayReaderData { /** The start offset of the line before the current one */ var lastLineStartOffset: Int = 0 - protected var lastUnicodeOffset = -1 - def copyFrom(cd: CharArrayReaderData): this.type = { this.ch = cd.ch this.charOffset = cd.charOffset this.lineStartOffset = cd.lineStartOffset this.lastLineStartOffset = cd.lastLineStartOffset - this.lastUnicodeOffset = cd.lastUnicodeOffset this } } @@ -47,14 +44,6 @@ abstract class CharArrayReader extends CharArrayReaderData { self => val bidiChars: ListBuffer[(Int, Int)] = ListBuffer.empty - def decodeUni: Boolean = true - - /** An error routine to call on bad unicode escapes \\uxxxx. */ - protected def error(offset: Int, msg: String): Unit - - /** Is last character a unicode escape \\uxxxx? */ - def isUnicodeEscape = charOffset == lastUnicodeOffset - /** Advance one character; reducing CR;LF pairs to just LF */ final def nextChar(): Unit = { if (charOffset >= buf.length) { @@ -64,8 +53,7 @@ abstract class CharArrayReader extends CharArrayReaderData { self => ch = c charOffset += 1 if (isBiDiCharacter(ch)) - bidiChars.+=((ch, charOffset)) - if (c == '\\') potentialUnicode() + bidiChars.addOne((ch, charOffset)) if (ch < ' ') { skipCR() potentialLineEnd() @@ -77,7 +65,7 @@ abstract class CharArrayReader extends CharArrayReaderData { self => * This is for use in multi-line strings, so there are no * "potential line ends" here. */ - final def nextRawChar() { + final def nextRawChar(): Unit = { if (charOffset >= buf.length) { ch = SU } else { @@ -85,39 +73,7 @@ abstract class CharArrayReader extends CharArrayReaderData { self => ch = c charOffset += 1 if (isBiDiCharacter(ch)) - bidiChars.+=((ch, charOffset)) - if (c == '\\') potentialUnicode() - } - } - - /** Interpret \\uxxxx escapes */ - private def potentialUnicode() = { - def evenSlashPrefix: Boolean = { - var p = charOffset - 2 - while (p >= 0 && buf(p) == '\\') p -= 1 - (charOffset - p) % 2 == 0 - } - def udigit: Int = { - if (charOffset >= buf.length) { - // Since the positioning code is very insistent about throwing exceptions, - // we have to decrement the position so our error message can be seen, since - // we are one past EOF. This happens with e.g. val x = \ u 1 - error(charOffset - 1, "incomplete unicode escape") - SU - } - else { - val d = digit2int(buf(charOffset), 16) - if (d >= 0) charOffset += 1 - else error(charOffset, "error in unicode escape") - d - } - } - if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) { - do charOffset += 1 - while (charOffset < buf.length && buf(charOffset) == 'u') - val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit - lastUnicodeOffset = charOffset - ch = code.toChar + bidiChars.addOne((ch, charOffset)) } } @@ -128,14 +84,11 @@ abstract class CharArrayReader extends CharArrayReaderData { self => case LF => charOffset += 1 ch = LF - case '\\' => - if (lookaheadReader.getu == LF) - potentialUnicode() case _ => } /** Handle line ends */ - private def potentialLineEnd() { + private def potentialLineEnd(): Unit = { if (ch == LF || ch == FF) { lastLineStartOffset = lineStartOffset lineStartOffset = charOffset @@ -149,10 +102,7 @@ abstract class CharArrayReader extends CharArrayReaderData { self => val buf = self.buf charOffset = self.charOffset ch = self.ch - override def decodeUni = self.decodeUni - def error(offset: Int, msg: String) = self.error(offset, msg) /** A mystery why CharArrayReader.nextChar() returns Unit */ def getc() = { nextChar() ; ch } - def getu() = { require(buf(charOffset) == '\\') ; ch = '\\' ; charOffset += 1 ; potentialUnicode() ; ch } } } diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 77ad71578a93..e61edc3a6725 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,10 @@ package scala.tools.nsc package util import io.{AbstractFile, Directory, File, Jar} -import java.net.MalformedURLException -import java.net.URL +import java.net.{MalformedURLException, URI, URISyntaxException, URL} import java.util.regex.PatternSyntaxException import File.pathSeparator -import Jar.isJarOrZip import scala.tools.nsc.classpath.{ClassPathEntries, PackageEntry, PackageName} /** @@ -132,13 +130,13 @@ object ClassPath { /* Get all subdirectories, jars, zips out of a directory. */ def lsDir(dir: Directory, filt: String => Boolean = _ => true) = - dir.list.filter(x => filt(x.name) && (x.isDirectory || isJarOrZip(x))).map(_.path).toList + dir.list.filter(x => filt(x.name) && (x.isDirectory || Jar.isJarOrZip(x))).map(_.path).toList if (pattern == "*") lsDir(Directory(".")) else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2)) else if (pattern contains '*') { try { - val regexp = ("^" + pattern.replaceAllLiterally("""\*""", """.*""") + "$").r + val regexp = s"^${pattern.replace(raw"\*", ".*")}$$".r lsDir(Directory(pattern).parent, regexp.findFirstIn(_).isDefined) } catch { case _: PatternSyntaxException => List(pattern) } @@ -159,7 +157,7 @@ object ClassPath { def map(cp: String, f: String => String): String = join(split(cp) map f: _*) /** Expand path and possibly expanding stars */ - def expandPath(path: String, expandStar: Boolean = true): List[String] = + def expandPath(path: String, expandStar: Boolean): List[String] = if (expandStar) split(path) flatMap expandS else split(path) @@ -185,11 +183,11 @@ object ClassPath { } def specToURL(spec: String): Option[URL] = - try Some(new URL(spec)) - catch { case _: MalformedURLException => None } + try Some(new URI(spec).toURL) + catch { case _: MalformedURLException | _: URISyntaxException => None } def manifests: List[java.net.URL] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF") resources.asScala.filter(_.getProtocol == "jar").toList } diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index a0205f50efde..0c3d18279a2d 100644 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -44,7 +44,7 @@ object DocStrings { /** Returns index of string `str` after `start` skipping longest * sequence of space and tab characters, possibly also containing * a single `*` character or the `/``**` sequence. - * @pre start == str.length || str(start) == `\n` + * @note Pre-condition: start == str.length || str(start) == `\n` */ def skipLineLead(str: String, start: Int): Int = if (start == str.length) start @@ -91,7 +91,7 @@ object DocStrings { * usecase or the end of the string, as they might include other sections * of their own */ - def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = { + def tagIndex(str: String, p: Int => Boolean = (_ => true)): List[(Int, Int)] = { var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx)) indices = mergeUsecaseSections(str, indices) indices = mergeInheritdocSections(str, indices) @@ -208,15 +208,17 @@ object DocStrings { str.substring(start, finish) } + private val sectionTags = List("@param", "@tparam", "@throws") + /** Extract the section text, except for the tag and comment newlines */ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = { val (beg, end) = section - if (str.startsWith("@param", beg) || - str.startsWith("@tparam", beg) || - str.startsWith("@throws", beg)) - (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end) - else - (skipWhitespace(str, skipTag(str, beg)), end) + val skipped = + if (sectionTags.exists(str.startsWith(_, beg))) + skipIdent(str, skipWhitespace(str, skipTag(str, beg))) + else + skipTag(str, beg) + (skipWhitespace(str, skipped), end) } /** Cleanup section text */ diff --git a/src/compiler/scala/tools/nsc/util/EditDistance.scala b/src/compiler/scala/tools/nsc/util/EditDistance.scala new file mode 100644 index 000000000000..2f3c71f85db5 --- /dev/null +++ b/src/compiler/scala/tools/nsc/util/EditDistance.scala @@ -0,0 +1,63 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package util + +import java.lang.Character.{ toLowerCase => lower } + +object EditDistance { + + /** + * @author Paul Phillips + * Translated from the java version at + * https://www.merriampark.com/ld.htm + * which is declared to be public domain. + */ + def levenshtein( + s: String, + t: String, + insertCost: Int = 1, + deleteCost: Int = 1, + subCost: Int = 1, + matchCost: Int = 0, + caseCost: Int = 1, + transpositions: Boolean = false + ): Int = { + val n = s.length + val m = t.length + if (n == 0) return m + if (m == 0) return n + + val d = Array.ofDim[Int](n + 1, m + 1) + 0 to n foreach (x => d(x)(0) = x) + 0 to m foreach (x => d(0)(x) = x) + + for (i <- 1 to n; s_i = s(i - 1); j <- 1 to m) { + val t_j = t(j - 1) + val cost = if (s_i == t_j) matchCost else if (lower(s_i) == lower(t_j)) caseCost else subCost + + val c1 = d(i - 1)(j) + deleteCost + val c2 = d(i)(j - 1) + insertCost + val c3 = d(i - 1)(j - 1) + cost + + d(i)(j) = c1 min c2 min c3 + + if (transpositions) { + if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) + d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) + } + } + + d(n)(m) + } +} diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala index 903a92e009f9..0cb869ec2348 100644 --- a/src/compiler/scala/tools/nsc/util/Exceptional.scala +++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,20 +10,19 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package util +package scala.tools.nsc.util import java.util.concurrent.ExecutionException -import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException } +import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException} object Exceptional { - def unwrap(x: Throwable): Throwable = x match { + def rootCause(x: Throwable): Throwable = x match { case _: InvocationTargetException | _: ExceptionInInitializerError | _: UndeclaredThrowableException | _: ExecutionException if x.getCause != null => - unwrap(x.getCause) + rootCause(x.getCause) case _ => x } diff --git a/src/compiler/scala/tools/nsc/util/InterruptReq.scala b/src/compiler/scala/tools/nsc/util/InterruptReq.scala index ddb1f3353c93..c9ccc78fbb80 100644 --- a/src/compiler/scala/tools/nsc/util/InterruptReq.scala +++ b/src/compiler/scala/tools/nsc/util/InterruptReq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/util/JarFactory.scala b/src/compiler/scala/tools/nsc/util/JarFactory.scala index 4b7e2cec869a..7a662036f727 100644 --- a/src/compiler/scala/tools/nsc/util/JarFactory.scala +++ b/src/compiler/scala/tools/nsc/util/JarFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index 2a506f0e373d..cee0e67ee9de 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,15 @@ package scala package tools.nsc package util +import scala.collection.AbstractIterator +import scala.collection.immutable.ArraySeq import scala.reflect.internal.Chars._ +import scala.util.chaining._ -class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, startcol: int, */ - decodeUni: Boolean, error: String => Unit) extends Iterator[Char] with Cloneable { +class JavaCharArrayReader(buf: ArraySeq.ofChar, start: Int, /* startline: int, startcol: int, */ + decodeUni: Boolean, error: String => Unit) extends AbstractIterator[Char] with Cloneable { - def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) = + def this(buf: ArraySeq.ofChar, decodeUni: Boolean, error: String => Unit) = this(buf, 0, /* 1, 1, */ decodeUni, error) /** the line and column position of the current character @@ -32,7 +35,7 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, def hasNext = bp < buf.length def next(): Char = { - val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array + val buf = this.buf.unsafeArray if(!hasNext) { ch = SU return SU // there is an endless stream of SU's at the end @@ -75,4 +78,8 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, def copy: JavaCharArrayReader = new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error) + + // a copy of this reader that is primed to read starting at the current character. + def lookahead: JavaCharArrayReader = + new JavaCharArrayReader(buf, bp-1, /* nextcol, nextline, */ decodeUni, error).tap(_.next()) } diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index b67f2df20177..6f2c1996c096 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,10 +33,10 @@ object ShowPickled extends Names { } def readName = if (isName) new String(bytes, "UTF-8") - else sys.error("%s is no name" format tagName) + else throw new IllegalStateException(s"$tagName is no name") def nameIndex = if (hasName) readNat(bytes, 0) - else sys.error("%s has no name" format tagName) + else throw new IllegalStateException(s"$tagName has no name") def tagName = tag2string(tag) override def toString = "%d,%d: %s".format(num, startIndex, tagName) @@ -96,6 +96,7 @@ object ShowPickled extends Names { case LITERALnull => "LITERALnull" case LITERALclass => "LITERALclass" case LITERALenum => "LITERALenum" + case LITERALsymbol => "LITERALsymbol" case SYMANNOT => "SYMANNOT" case CHILDREN => "CHILDREN" case ANNOTATEDtpe => "ANNOTATEDtpe" @@ -123,7 +124,7 @@ object ShowPickled extends Names { result.toInt } - def printFile(buf: PickleBuffer, out: PrintStream) { + def printFile(buf: PickleBuffer, out: PrintStream): Unit = { out.println("Version " + buf.readNat() + "." + buf.readNat()) val index = buf.createIndex val entryList = makeEntryList(buf, index) @@ -131,7 +132,7 @@ object ShowPickled extends Names { def p(s: String) = out print s - def printNameRef() { + def printNameRef(): Unit = { val idx = buf.readNat() val name = entryList nameAt idx val toPrint = " %s(%s)".format(idx, name) @@ -149,7 +150,7 @@ object ShowPickled extends Names { def printConstAnnotArgRef() = printNat() def printAnnotArgRef() = printNat() - def printSymInfo(end: Int) { + def printSymInfo(end: Int): Unit = { printNameRef() printSymbolRef() val pflags = buf.readLongNat() @@ -157,7 +158,7 @@ object ShowPickled extends Names { val accessBoundary = ( for (idx <- privateWithin) yield { val s = entryList nameAt idx - idx + "(" + s + ")" + "" + idx + "(" + s + ")" } ) val flagString = { @@ -187,9 +188,9 @@ object ShowPickled extends Names { * interpreted are for the most part going to tell you the wrong thing. * It's not so easy to duplicate the logic applied in the UnPickler. */ - def printEntry(i: Int) { + def printEntry(i: Int): Unit = { buf.readIndex = index(i) - p(i + "," + buf.readIndex + ": ") + p("" + i + "," + buf.readIndex + ": ") val tag = buf.readByte() out.print(tag2string(tag)) val len = buf.readNat() @@ -217,17 +218,17 @@ object ShowPickled extends Names { case CONSTANTtpe => printTypeRef(); printConstantRef() case TYPEREFtpe => - printTypeRef(); printSymbolRef(); buf.until(end, printTypeRef) + printTypeRef(); printSymbolRef(); buf.until(end, () => printTypeRef()) case TYPEBOUNDStpe => printTypeRef(); printTypeRef() case REFINEDtpe => - printSymbolRef(); buf.until(end, printTypeRef) + printSymbolRef(); buf.until(end, () => printTypeRef()) case CLASSINFOtpe => - printSymbolRef(); buf.until(end, printTypeRef) + printSymbolRef(); buf.until(end, () => printTypeRef()) case METHODtpe | IMPLICITMETHODtpe => - printTypeRef(); buf.until(end, printTypeRef) + printTypeRef(); buf.until(end, () => printTypeRef()) case POLYtpe => - printTypeRef(); buf.until(end, printSymbolRef) + printTypeRef(); buf.until(end, () => printSymbolRef()) case LITERALboolean => out.print(if (buf.readLong(len) == 0L) " false" else " true") case LITERALbyte => @@ -246,6 +247,8 @@ object ShowPickled extends Names { out.print(" " + longBitsToDouble(buf.readLong(len))) case LITERALstring => printNameRef() + case LITERALsymbol => + printNameRef() case LITERALenum => printSymbolRef() case LITERALnull => @@ -253,17 +256,17 @@ object ShowPickled extends Names { case LITERALclass => printTypeRef() case CHILDREN => - printSymbolRef(); buf.until(end, printSymbolRef) + printSymbolRef(); buf.until(end, () => printSymbolRef()) case SYMANNOT => - printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef) + printSymbolRef(); printTypeRef(); buf.until(end, () => printAnnotArgRef()) case ANNOTATEDtpe => - printTypeRef(); buf.until(end, printAnnotInfoRef) + printTypeRef(); buf.until(end, () => printAnnotInfoRef()) case ANNOTINFO => - printTypeRef(); buf.until(end, printAnnotArgRef) + printTypeRef(); buf.until(end, () => printAnnotArgRef()) case ANNOTARGARRAY => - buf.until(end, printConstAnnotArgRef) + buf.until(end, () => printConstAnnotArgRef()) case EXISTENTIALtpe => - printTypeRef(); buf.until(end, printSymbolRef) + printTypeRef(); buf.until(end, () => printSymbolRef()) case _ => } @@ -291,7 +294,7 @@ object ShowPickled extends Names { pickle.readIndex = saved } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { args foreach { arg => fromFile(arg) match { case Some(pb) => show(arg + ":", pb) diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index af49114e52f5..68b1a5b9ccfd 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,11 +18,11 @@ package util import java.io.PrintStream /** A simple tracer - * @param out: The print stream where trace info should be sent - * @param enabled: A condition that must be true for trace info to be produced. + * @param out The print stream where trace info should be sent + * @param enabled A condition that must be true for trace info to be produced. */ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { - def apply[T](msg: => String)(value: T): T = { + def apply[T](msg: => String)(value: T): value.type = { if (enabled) out.println(msg+value) value } diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala index 8c9bc0c8c4db..65e9f81b344b 100644 --- a/src/compiler/scala/tools/nsc/util/StackTracing.scala +++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,12 +12,15 @@ package scala.tools.nsc.util +import collection.mutable, mutable.ListBuffer +import java.lang.System.lineSeparator + private[util] trait StackTracing extends Any { - /** Format a stack trace, returning the prefix consisting of frames that satisfy - * a given predicate. + /** Format a stack trace, returning the prefix consisting of frames that satisfy a given predicate. + * * The format is similar to the typical case described in the Javadoc - * for [[java.lang.Throwable#printStackTrace]]. + * for [[java.lang.Throwable#printStackTrace()*]]. * If a stack trace is truncated, it will be followed by a line of the form * `... 3 elided`, by analogy to the lines `... 3 more` which indicate * shared stack trace segments. @@ -25,49 +28,52 @@ private[util] trait StackTracing extends Any { * @param p the predicate to select the prefix */ def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = { - import collection.mutable.{ ArrayBuffer, ListBuffer } - import compat.Platform.EOL type TraceRelation = String val Self = new TraceRelation("") val CausedBy = new TraceRelation("Caused by: ") val Suppressed = new TraceRelation("Suppressed: ") - def clazz(e: Throwable): String = e.getClass.getName - def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) } - def msg(e: Throwable): String = e.getMessage match { case null => because(e) ; case s => s } - def txt(e: Throwable): String = msg(e) match { case null => "" ; case s => s": $s" } - def header(e: Throwable): String = s"${clazz(e)}${txt(e)}" + def header(e: Throwable): String = { + def because = e.getCause match { case null => null ; case c => header(c) } + def msg = e.getMessage match { case null => because ; case s => s } + def txt = msg match { case null => "" ; case s => s": $s" } + s"${e.getClass.getName}$txt" + } - val seen = new ArrayBuffer[Throwable](16) + val seen = mutable.Set.empty[Throwable] def unseen(t: Throwable) = { - def inSeen = seen exists (_ eq t) - val interesting = (t != null) && !inSeen + val interesting = t != null && !seen(t) if (interesting) seen += t interesting } - val sb = ListBuffer.empty[String] + val lines = ListBuffer.empty[String] // format the stack trace, skipping the shared trace def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) { val trace = e.getStackTrace val frames = if (share.isEmpty) trace else { - val spare = share.reverseIterator - val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _) + val spare = share.reverseIterator + val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next() == _) trimmed.reverse } - val prefix = frames takeWhile p + val prefix = frames.takeWhile(p) val margin = " " * indents - sb append s"${margin}${r}${header(e)}" - prefix foreach (f => sb append s"${margin} at $f") - if (frames.size < trace.size) sb append s"${margin} ... ${trace.size - frames.size} more" - if (r == Self && prefix.size < frames.size) sb append s"${margin} ... ${frames.size - prefix.size} elided" + lines += s"$margin$r${header(e)}" + prefix.foreach(frame => lines += s"$margin at $frame") + + val traceFramesLenDiff = trace.length - frames.length + val framesPrefixLenDiff = frames.length - prefix.length + if (traceFramesLenDiff > 0) { + if (framesPrefixLenDiff > 0) lines += s"$margin ... $framesPrefixLenDiff elided and $traceFramesLenDiff more" + else lines += s"$margin ... $traceFramesLenDiff more" + } else if (framesPrefixLenDiff > 0) lines += s"$margin ... $framesPrefixLenDiff elided" + print(e.getCause, CausedBy, trace, indents) - e.getSuppressed foreach (t => print(t, Suppressed, frames, indents + 1)) + e.getSuppressed.foreach(print(_, Suppressed, frames, indents + 1)) } print(e, Self, share = Array.empty, indents = 0) - - sb mkString EOL + lines.mkString(lineSeparator) } } diff --git a/src/compiler/scala/tools/nsc/util/StringUtil.scala b/src/compiler/scala/tools/nsc/util/StringUtil.scala new file mode 100644 index 000000000000..062a20a58e1a --- /dev/null +++ b/src/compiler/scala/tools/nsc/util/StringUtil.scala @@ -0,0 +1,26 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package util + +import scala.collection.immutable.Seq + +object StringUtil { + def oxford(vs: Seq[String], conj: String): String = + vs match { + case Seq() => "" + case Seq(a) => a + case Seq(a, b) => s"$a $conj $b" + case xs => xs.init.mkString("", ", ", s", $conj ${xs.last}") + } +} diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala index 064d00df6282..a229b4052434 100644 --- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala +++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -43,7 +43,7 @@ class WorkScheduler { } def dequeueAllInterrupts(f: InterruptReq => Unit): Unit = synchronized { - interruptReqs.dequeueAll { iq => f(iq); true } + interruptReqs.removeAll().foreach(f) } /** Called from server: return optional exception posted by client @@ -103,6 +103,6 @@ class WorkScheduler { } class EmptyAction extends (() => Unit) { - def apply() {} + def apply(): Unit = {} } diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index d59556a067c6..ec0e2e633a9d 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package tools package nsc import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter, Reader } +import scala.collection.immutable.ArraySeq package object util { // forwarder for old code that builds against 2.9 and 2.10 @@ -24,9 +25,6 @@ package object util { type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T] val HashSet = scala.reflect.internal.util.HashSet - /** Apply a function and return the passed value */ - def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } - /** Execute code and then wait for all non-daemon Threads * created and begun during its execution to complete. */ @@ -46,9 +44,16 @@ package object util { * which were created during its execution. */ def trackingThreads[T](body: => T): (T, Seq[Thread]) = { - val ts1 = sys.allThreads() + def allThreads(): IndexedSeq[Thread] = { + val tarray = new Array[Thread](Thread.activeCount()) + val got = Thread.enumerate(tarray) + + ArraySeq.unsafeWrapArray(tarray.take(got)) + } + + val ts1 = allThreads() val result = body - val ts2 = sys.allThreads() + val ts2 = allThreads() (result, ts2 filterNot (ts1 contains _)) } @@ -64,7 +69,7 @@ package object util { writer.toString() } - /** Generate a string using a routine that wants to write on a writer. */ + /** Generate a string using a routine that wants to write on a stream. */ def stringFromWriter(writer: PrintWriter => Unit): String = { val stringWriter = new StringWriter() val stream = new NewLinePrintWriter(stringWriter) @@ -72,25 +77,23 @@ package object util { stream.close() stringWriter.toString } - /** Generate a string using a routine that wants to write on a stream. */ def stringFromStream(stream: OutputStream => Unit): String = { - val utf8 = java.nio.charset.StandardCharsets.UTF_8 val bs = new ByteArrayOutputStream() - val ps = new PrintStream(bs, /*autoflush=*/ false, utf8.name) // use Charset directly in jdk10 + val ps = new PrintStream(bs) stream(ps) ps.close() - bs.toString(utf8.name) + bs.toString() } - def stackTraceString(t: Throwable): String = stringFromWriter(t.printStackTrace(_)) + def stackTraceString(ex: Throwable): String = stringFromWriter(ex.printStackTrace) /** A one line string which contains the class of the exception, the * message if any, and the first non-Predef location in the stack trace * (to exclude assert, require, etc.) */ - def stackTraceHeadString(t: Throwable): String = { - val frame = t.getStackTrace.dropWhile(_.getClassName contains "Predef").take(1).mkString("") - val msg = t.getMessage match { case null | "" => "" ; case s => s"""("$s")""" } - val clazz = t.getClass.getName.split('.').last + def stackTraceHeadString(ex: Throwable): String = { + val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString "" + val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" } + val clazz = ex.getClass.getName.split('.').last s"$clazz$msg @ $frame" } @@ -99,7 +102,7 @@ package object util { /** Format the stack trace, returning the prefix consisting of frames that satisfy * a given predicate. * The format is similar to the typical case described in the Javadoc - * for [[java.lang.Throwable#printStackTrace]]. + * for [[java.lang.Throwable#printStackTrace()*]]. * If a stack trace is truncated, it will be followed by a line of the form * `... 3 elided`, by analogy to the lines `... 3 more` which indicate * shared stack trace segments. @@ -108,10 +111,15 @@ package object util { def stackTracePrefixString(p: StackTraceElement => Boolean): String = stackTracePrefixString(e)(p) } + implicit class `quickie stack dump`(private val sc: StringContext) extends AnyVal { + @deprecated("For debug only", since="forever") + def trace(args: Any*): Unit = new Throwable(sc.s(args: _*)).printStackTrace() + } + lazy val trace = new SimpleTracer(System.out) - // These four deprecated since 2.10.0 are still used in (at least) - // the sbt 0.12.4 compiler interface. + // These four deprecated since 2.10.0 are still used in + // the sbt 0.13 compiler interface. @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0") type Position = scala.reflect.internal.util.Position @deprecated("Moved to scala.reflect.internal.util.NoPosition", "2.10.0") diff --git a/src/compiler/scala/tools/reflect/FastStringInterpolator.scala b/src/compiler/scala/tools/reflect/FastStringInterpolator.scala new file mode 100644 index 000000000000..7f664aa85b29 --- /dev/null +++ b/src/compiler/scala/tools/reflect/FastStringInterpolator.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools +package reflect + +import scala.StringContext.{InvalidEscapeException, InvalidUnicodeEscapeException, processEscapes, processUnicode} +import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.Position +import nsc.Reporting.WarningCategory.{Scala3Migration, WFlagTostringInterpolated} + +trait FastStringInterpolator extends FormatInterpolator { + import c.universe._ + + // fast track entry for StringContext.s + def interpolateS: Tree = interpolated(c.macroApplication, isRaw = false) + // fast track entry for StringContext.raw + def interpolateRaw: Tree = interpolated(c.macroApplication, isRaw = true) + + // rewrite a tree like `scala.StringContext.apply("hello \\n ", " ", "").s("world", Test.this.foo)` + // to `"hello \n world ".+(Test.this.foo)` + private def interpolated(macroApp: Tree, isRaw: Boolean): Tree = macroApp match { + case Apply(Select(Apply(stringCtx @ Select(qualSC, _), parts), _interpol@_), args) + if stringCtx.symbol == currentRun.runDefinitions.StringContext_apply + && treeInfo.isQualifierSafeToElide(qualSC) + && parts.forall(treeInfo.isLiteralString) + && parts.length == (args.length + 1) => + + def adjustedEscPos(p: Position, delta: Int) = { + val start = p.start + delta + Position.range(p.source, start = start, point = start, end = start + 2) + } + val treated = parts.mapConserve { + case lit @ Literal(Constant(stringVal: String)) => + def asRaw = if (currentRun.sourceFeatures.unicodeEscapesRaw) stringVal else { + val processed = processUnicode(stringVal) + if (processed == stringVal) stringVal else { + val pos = { + val diffindex = processed.zip(stringVal).zipWithIndex.collectFirst { + case ((p, o), i) if p != o => i + }.getOrElse(processed.length - 1) + lit.pos.withShift(diffindex) + } + def msg(fate: String) = s"Unicode escapes in raw interpolations are $fate; use literal characters instead" + if (currentRun.isScala3) + runReporting.warning(pos, msg("ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw)"), Scala3Migration, c.internal.enclosingOwner) + else + runReporting.deprecationWarning(pos, msg("deprecated"), "2.13.2", "", "") + processed + } + } + val value = + try if (isRaw) asRaw else processEscapes(stringVal) + catch { + case ie: InvalidEscapeException => c.abort(adjustedEscPos(lit.pos, ie.index), ie.getMessage) + case iue: InvalidUnicodeEscapeException => c.abort(adjustedEscPos(lit.pos, iue.index), iue.getMessage) + } + val k = Constant(value) + // To avoid the backlash of backslash, taken literally by Literal, escapes are processed strictly (scala/bug#11196) + treeCopy.Literal(lit, k).setType(ConstantType(k)) + case x => throw new MatchError(x) + } + + if (args.forall(treeInfo.isLiteralString)) { + val it1 = treated.iterator + val it2 = args.iterator + val res = new StringBuilder + def add(t: Tree): Unit = res.append(t.asInstanceOf[Literal].value.value) + add(it1.next()) + while (it2.hasNext) { + add(it2.next()) + add(it1.next()) + } + val k = Constant(res.toString) + Literal(k).setType(ConstantType(k)) + } + else concatenate(treated, args) + + // Fallback -- inline the original implementation of the `s` or `raw` interpolator. + case Apply(Select(someStringContext, _interpol@_), args) => + q"""{ + val sc = $someStringContext + _root_.scala.StringContext.standardInterpolator( + ${if(isRaw) q"_root_.scala.Predef.identity" else q"_root_.scala.StringContext.processEscapes"}, + $args, + sc.parts) + }""" + case x => throw new MatchError(x) + } + + def concatenate(parts: List[Tree], args: List[Tree]): Tree = { + val argsIndexed = args.toVector + val concatArgs = ListBuffer.empty[Tree] + val numLits = parts.length + foreachWithIndex(parts.tail) { (lit, i) => + val treatedContents = lit.asInstanceOf[Literal].value.stringValue + val emptyLit = treatedContents.isEmpty + if (i < numLits - 1) { + val arg = argsIndexed(i) + if (linting && !(arg.tpe =:= definitions.StringTpe)) + if (arg.tpe.typeSymbol eq definitions.UnitClass) + runReporting.warning(arg.pos, "interpolated Unit value", WFlagTostringInterpolated, c.internal.enclosingOwner) + else if (!definitions.isPrimitiveValueType(arg.tpe)) + runReporting.warning(arg.pos, "interpolation uses toString", WFlagTostringInterpolated, c.internal.enclosingOwner) + concatArgs += arg + } + if (!emptyLit) concatArgs += lit + } + def mkConcat(pos: Position, lhs: Tree, rhs: Tree): Tree = + atPos(pos)(gen.mkMethodCall(gen.mkAttributedSelect(lhs, definitions.String_+), rhs :: Nil)).setType(definitions.StringTpe) + + var result: Tree = parts.head + val chunkSize = 32 + if (concatArgs.lengthCompare(chunkSize) <= 0) + concatArgs.foreach { t => + result = mkConcat(t.pos, result, t) + } + else + concatArgs.toList.grouped(chunkSize).foreach { + case group => + var chunkResult: Tree = Literal(Constant("")).setType(definitions.StringTpe) + group.foreach { t => + chunkResult = mkConcat(t.pos, chunkResult, t) + } + result = mkConcat(chunkResult.pos, result, chunkResult) + } + + result + } +} diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index 3bdd34be6e01..8d5b37fd1c67 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -35,8 +35,8 @@ class FastTrack[MacrosAndAnalyzer <: Macros with Analyzer](val macros: MacrosAnd private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers - private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } = - new { val c: c0.type = c0 } with FormatInterpolator + private implicit def context2macroimplementations(c0: MacroContext): FastStringInterpolator { val c: c0.type } = + new { val c: c0.type = c0 } with FastStringInterpolator private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } = new { val c: c0.type = c0 } with QuasiquoteImpls private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) = @@ -61,8 +61,10 @@ class FastTrack[MacrosAndAnalyzer <: Macros with Analyzer](val macros: MacrosAnd makeBlackbox( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) }, makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) }, makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) }, - makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }, - makeBlackbox( StringContext_f) { case _ => _.interpolate }, + makeBlackbox( ApiUniverseReify) { case Applied(_, _ :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }, + makeWhitebox( StringContext_f) { case _ => _.interpolateF }, + makeWhitebox( StringContext_s) { case _ => _.interpolateS }, + makeWhitebox( StringContext_raw) { case _ => _.interpolateRaw }, makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }, makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote }, makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote } diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index ec432a75a248..d4c534fbb321 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,36 +12,47 @@ package scala.tools.reflect -import scala.reflect.macros.runtime.Context -import scala.collection.mutable.{ ListBuffer, Stack } -import scala.reflect.internal.util.Position import scala.PartialFunction.cond +import scala.collection.mutable.ListBuffer +import scala.reflect.macros.runtime.Context +import scala.tools.nsc.Reporting.WarningCategory, WarningCategory.WFlagTostringInterpolated import scala.util.matching.Regex.Match +import scala.util.chaining._ import java.util.Formattable abstract class FormatInterpolator { + import FormatInterpolator._ + import SpecifierGroups.{Value => SpecGroup, _} + val c: Context val global: c.universe.type = c.universe - import c.universe.{ Match => _, _ } + import c.universe.{Match => _, _} import definitions._ import treeInfo.Applied - @inline private def truly(body: => Unit): Boolean = { body ; true } - @inline private def falsely(body: => Unit): Boolean = { body ; false } + protected var linting = settings.warnToString.value + + protected final def withoutLinting[A](body: => A): A = { + val linted = linting + linting = false + try body finally linting = linted + } private def bail(msg: String) = global.abort(msg) - def interpolate: Tree = c.macroApplication match { + def concatenate(parts: List[Tree], args: List[Tree]): Tree + + def interpolateF: Tree = c.macroApplication match { //case q"$_(..$parts).f(..$args)" => case Applied(Select(Apply(_, parts), _), _, argss) => val args = argss.flatten - def badlyInvoked = (parts.length != args.length + 1) && truly { + def badlyInvoked = parts.lengthIs != args.length + 1 and { def because(s: String) = s"too $s arguments for interpolated string" val (p, msg) = - if (parts.length == 0) (c.prefix.tree.pos, "there are no parts") - else if (args.length + 1 < parts.length) + if (parts.isEmpty) (c.prefix.tree.pos, "there are no parts") + else if (parts.lengthIs > (args.length + 1)) (if (args.isEmpty) c.enclosingPosition else args.last.pos, because("few")) else (args(parts.length-1).pos, because("many")) c.abort(p, msg) @@ -57,145 +68,134 @@ abstract class FormatInterpolator { * is inserted. * * In any other position, the only permissible conversions are - * the literals (%% and %n) or an index reference (%1$ or %<). + * the literals (%% and %n) or an index reference (%1\$ or %<). * * A conversion specifier has the form: * - * [index$][flags][width][.precision]conversion + * [index\$][flags][width][.precision]conversion * - * 1) "...${smth}" => okay, equivalent to "...${smth}%s" - * 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah" - * 3) "...${smth}%" => error - * 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n" - * 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%" - * 6) "...${smth}[%legalJavaConversion]" => okay* - * 7) "...${smth}[%illegalJavaConversion]" => error - * *Legal according to [[http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html]] + * 1) "...\${smth}" => okay, equivalent to "...\${smth}%s" + * 2) "...\${smth}blahblah" => okay, equivalent to "...\${smth}%sblahblah" + * 3) "...\${smth}%" => error + * 4) "...\${smth}%n" => okay, equivalent to "...\${smth}%s%n" + * 5) "...\${smth}%%" => okay, equivalent to "...\${smth}%s%%" + * 6) "...\${smth}[%legalJavaConversion]" => okay* + * 7) "...\${smth}[%illegalJavaConversion]" => error + * *Legal according to [[java.util.Formatter]] */ - def interpolated(parts: List[Tree], args: List[Tree]) = { - val fstring = new StringBuilder - val evals = ListBuffer[ValDef]() - val ids = ListBuffer[Ident]() - val argStack = Stack(args: _*) - - // create a tmp val and add it to the ids passed to format - def defval(value: Tree, tpe: Type): Unit = { - val freshName = TermName(c.freshName("arg$")) - evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos - ids += Ident(freshName) + def interpolated(parts: List[Tree], args: List[Tree]): Tree = { + val argTypes = args.map(_.tpe) + val argc = argTypes.length + // amended parts and actual args to use, in amended.mkString.format(actuals) + val amended = ListBuffer.empty[String] + val actuals = ListBuffer.empty[Tree] + val convert = ListBuffer.empty[Conversion] + + // whether this format does more than concatenate strings + var formatting = false + + def argType(argi: Int, types: Type*): Type = { + val tpe = argTypes(argi) + types.find(t => t != AnyTpe && argConformsTo(argi, tpe, t)) + .orElse(types.find(t => t != AnyTpe && argConvertsTo(argi, tpe, t))) + .orElse(types.find(t => t == AnyTpe && argConformsTo(argi, tpe, t))) + .getOrElse { + val msg = "type mismatch" + { + val req = raw"required: (.*)".r.unanchored + val all = types.map(req => global.analyzer.foundReqMsg(tpe, req)) + if (all.isEmpty) "" + else if (all.length == 1) all.head + else all.head + all.tail.map { case req(what) => what case _ => "?" }.mkString(", ", ", ", "") + } + c.error(args(argi).pos, msg) + reported = true + actuals += args(argi) + types.head + } } - // Append the nth part to the string builder, possibly prepending an omitted %s first. - // Sanity-check the % fields in this part. - def copyPart(part: Tree, n: Int): Unit = { - import SpecifierGroups.{ Spec, Index } - val s0 = part match { - case Literal(Constant(x: String)) => x - case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals") + def argConformsTo(argi: Int, arg: Type, target: Type): Boolean = (arg <:< target).tap(if (_) actuals += args(argi)) + def argConvertsTo(argi: Int, arg: Type, target: Type): Boolean = + c.inferImplicitView(args(argi), arg, target) match { + case EmptyTree => false + case _ => + // let the compiler figure out how to apply the conversion + val freshName = TermName(c.freshName("arg$")) + val value = args(argi) + val ValDef(_, _, _, rhs) = c.typecheck(ValDef(Modifiers(), freshName, TypeTree(target).setPos(value.pos.focus), value).setPos(value.pos)): @unchecked + actuals += rhs + true } - def escapeHatch: PartialFunction[Throwable, String] = { - // trailing backslash, octal escape, or other - case e: StringContext.InvalidEscapeException => - def errPoint = part.pos withPoint (part.pos.point + e.index) - def octalOf(c: Char) = Character.digit(c, 8) - def alt = { - def altOf(i: Int) = i match { - case '\b' => "\\b" - case '\t' => "\\t" - case '\n' => "\\n" - case '\f' => "\\f" - case '\r' => "\\r" - case '\"' => "$" /* avoid lint warn */ + - "{'\"'} or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\"" - case '\'' => "'" - case '\\' => """\\""" - case x => "\\u%04x" format x - } - val suggest = { - val r = "([0-7]{1,3}).*".r - (s0 drop e.index + 1) match { - case r(n) => altOf { (0 /: n) { case (a, o) => (8 * a) + (o - '0') } } - case _ => "" - } - } - val txt = - if ("" == suggest) "" - else s", use $suggest instead" - txt - } - def badOctal = { - def msg(what: String) = s"Octal escape literals are $what$alt." - if (settings.future) { - c.error(errPoint, msg("unsupported")) - s0 - } else { - currentRun.reporting.deprecationWarning(errPoint, msg("deprecated"), "2.11.0", site = "", origin = "") - try StringContext.treatEscapes(s0) catch escapeHatch - } + + // Append the nth part to the string builder, possibly prepending an omitted %s first. + // Check the % fields in this part. + def loop(remaining: List[Tree], n: Int): Unit = + remaining match { + case part0 :: remaining => + val part1 = part0 match { + case Literal(Constant(x: String)) => x + case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals") } - if (e.index == s0.length - 1) { - c.error(errPoint, """Trailing '\' escapes nothing.""") - s0 - } else if (octalOf(s0(e.index + 1)) >= 0) { - badOctal - } else { - c.error(errPoint, e.getMessage) - s0 + val part = try StringContext.processEscapes(part1) catch escapeHatch(c)(part1, part0.pos) + val matches = formatPattern.findAllMatchIn(part) + + def insertStringConversion(): Unit = { + amended += "%s" + part + val cv = Conversion(part0.pos, argc) + cv.accepts(argType(n-1, AnyTpe)) + convert += cv + cv.lintToString(argTypes(n-1)) } - } - val s = try StringContext.processEscapes(s0) catch escapeHatch - val ms = fpat findAllMatchIn s - - def errorLeading(op: Conversion) = op.errorAt(Spec, s"conversions must follow a splice; ${Conversion.literalHelp}") - - def first = n == 0 - // a conversion for the arg is required - if (!first) { - val arg = argStack.pop() - def s_%() = { - fstring append "%s" - defval(arg, AnyTpe) - } - def accept(op: Conversion) = { - if (!op.isLeading) errorLeading(op) - op.accepts(arg) match { - case Some(tpe) => defval(arg, tpe) - case None => + def errorLeading(op: Conversion) = op.errorAt(Spec)(s"conversions must follow a splice; ${Conversion.literalHelp}") + def accept(op: Conversion): Unit = { + if (!op.isLeading) errorLeading(op) + op.accepts(argType(n-1, op.acceptableVariants: _*)) + amended += part + op.lintToString(argTypes(n-1)) } - } - if (ms.hasNext) { - Conversion(ms.next, part.pos, args.size) match { - case Some(op) if op.isLiteral => s_%() - case Some(op) if op.indexed => - if (op.index map (_ == n) getOrElse true) accept(op) + + if (n == 0) amended += part + else if (!matches.hasNext) insertStringConversion() + else { + val cv = Conversion(matches.next(), part0.pos, argc) + if (cv.kind != Kind.StringXn || cv.cc.isUpper || cv.width.nonEmpty || cv.flags.nonEmpty) + formatting = true + if (cv.isLiteral) insertStringConversion() + else if (cv.isIndexed) { + if (cv.index.getOrElse(-1) == n) accept(cv) else { - // either some other arg num, or '<' - c.warning(op.groupPos(Index), "Index is not this arg") - s_%() + // "$x$y%1" where "%1" follows a splice but does not apply to it + c.warning(cv.groupPosAt(Index, 0), "Index is not this arg") + insertStringConversion() } - case Some(op) => accept(op) - case None => + } + else if (!cv.isError) accept(cv) } - } else s_%() - } - // any remaining conversions must be either literals or indexed - while (ms.hasNext) { - Conversion(ms.next, part.pos, args.size) match { - case Some(op) if first && op.hasFlag('<') => op.badFlag('<', "No last arg") - case Some(op) if op.isLiteral || op.indexed => // OK - case Some(op) => errorLeading(op) - case None => - } + // any remaining conversions in this part must be either literals or indexed + while (matches.hasNext) { + val cv = Conversion(matches.next(), part0.pos, argc) + if (n == 0 && cv.hasFlag('<')) cv.badFlag('<', "No last arg") + else if (!cv.isLiteral && !cv.isIndexed) errorLeading(cv) + formatting = true + } + loop(remaining, n = n + 1) + case Nil => } - fstring append s - } + loop(parts, n = 0) - parts.zipWithIndex foreach { - case (part, n) => copyPart(part, n) + def constantly(s: String) = { + val k = Constant(s) + Literal(k).setType(ConstantType(k)) } //q"{..$evals; new StringOps(${fstring.toString}).format(..$ids)}" - val format = fstring.toString - if (ids.isEmpty && !format.contains("%")) Literal(Constant(format)) + val format = amended.mkString + if (actuals.isEmpty && !formatting) constantly(format) + else if (!reported && actuals.forall(treeInfo.isLiteralString)) constantly(format.format(actuals.map(_.asInstanceOf[Literal].value.value).toIndexedSeq: _*)) + else if (!formatting) { + withoutLinting { // already warned + concatenate(amended.map(p => constantly(p.stripPrefix("%s"))).toList, actuals.toList) + } + } else { val scalaPackage = Select(Ident(nme.ROOTPKG), TermName("scala")) val newStringOps = Select( @@ -203,198 +203,252 @@ abstract class FormatInterpolator { TermName("collection")), TermName("immutable")), TypeName("StringOps"))), termNames.CONSTRUCTOR ) - val expr = - Apply( - Select( - Apply( - newStringOps, - List(Literal(Constant(format)))), - TermName("format")), - ids.toList - ) + val expr = Apply(Select(Apply(newStringOps, List(Literal(Constant(format)))), TermName("format")), actuals.toList) val p = c.macroApplication.pos - Block(evals.toList, atPos(p.focus)(expr)) setPos p.makeTransparent + expr.setPos(p.makeTransparent) } } - val fpat = """%(?:(\d+)\$)?([-#+ 0,(\<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r - object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value } + val BigDecimalTpe = typeTag[BigDecimal].tpe + val BigIntTpe = typeTag[BigInt].tpe + val CalendarTpe = typeTag[java.util.Calendar].tpe + val DateTpe = typeTag[java.util.Date].tpe + val FormattableTpe = typeTag[Formattable].tpe - val stdContextTags = new { val tc: c.type = c } with StdContextTags - import stdContextTags._ - val tagOfFormattable = typeTag[Formattable] + object Kind extends Enumeration { val StringXn, HashXn, BooleanXn, CharacterXn, IntegralXn, FloatingPointXn, DateTimeXn, LiteralXn, ErrorXn = Value } + import Kind.{Value => KindOf, _} - /** A conversion specifier matched by `m` in the string part at `pos`, - * with `argc` arguments to interpolate. + /** A conversion specifier matched in the argi'th string part, with `argc` arguments to interpolate. */ - sealed trait Conversion { - def m: Match - def pos: Position - def argc: Int - - import SpecifierGroups.{ Value => SpecGroup, _ } - private def maybeStr(g: SpecGroup) = Option(m group g.id) - private def maybeInt(g: SpecGroup) = maybeStr(g) map (_.toInt) - val index: Option[Int] = maybeInt(Index) - val flags: Option[String] = maybeStr(Flags) - val width: Option[Int] = maybeInt(Width) - val precision: Option[Int] = maybeStr(Precision) map (_.drop(1).toInt) - val op: String = maybeStr(CC) getOrElse "" - - def cc: Char = if ("tT" contains op(0)) op(1) else op(0) - - def indexed: Boolean = index.nonEmpty || hasFlag('<') - def isLiteral: Boolean = false - def isLeading: Boolean = m.start(0) == 0 - def verify: Boolean = goodFlags && goodIndex - def accepts(arg: Tree): Option[Type] - - val allFlags = "-#+ 0,(<" - def hasFlag(f: Char) = (flags getOrElse "") contains f - def hasAnyFlag(fs: String) = fs exists (hasFlag) + final class Conversion(val descriptor: Match, pos: Position, val kind: KindOf, argc: Int) { + // the descriptor fields + val index: Option[Int] = descriptor.intOf(Index) + val flags: String = descriptor.stringOf(Flags) + val width: Option[Int] = descriptor.intOf(Width) + val precision: Option[Int] = descriptor.group(Precision).map(_.drop(1).toInt) + val op: String = descriptor.stringOf(CC) - def badFlag(f: Char, msg: String) = { - val i = flags map (_.indexOf(f)) filter (_ >= 0) getOrElse 0 - errorAtOffset(Flags, i, msg) - } - def groupPos(g: SpecGroup) = groupPosAt(g, 0) - def groupPosAt(g: SpecGroup, i: Int) = pos withPoint (pos.point + m.start(g.id) + i) - def errorAt(g: SpecGroup, msg: String) = c.error(groupPos(g), msg) - def errorAtOffset(g: SpecGroup, i: Int, msg: String) = c.error(groupPosAt(g, i), msg) - - def noFlags = flags.isEmpty || falsely { errorAt(Flags, "flags not allowed") } - def noWidth = width.isEmpty || falsely { errorAt(Width, "width not allowed") } - def noPrecision = precision.isEmpty || falsely { errorAt(Precision, "precision not allowed") } - def only_-(msg: String) = { - val badFlags = (flags getOrElse "") filterNot { case '-' | '<' => true case _ => false } - badFlags.isEmpty || falsely { badFlag(badFlags(0), s"Only '-' allowed for $msg") } - } - protected def okFlags: String = allFlags - def goodFlags = { - val badFlags = flags map (_ filterNot (okFlags contains _)) - for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'") - badFlags.getOrElse("").isEmpty + // the conversion char is the head of the op string (but see DateTimeXn) + val cc: Char = + kind match { + case ErrorXn if op.isEmpty => '?' + case ErrorXn => op(0) + case DateTimeXn if op.length > 1 => op(1) + case DateTimeXn => '?' + case StringXn if op.isEmpty => 's' // accommodate the default %s + case _ => op(0) + } + + def isIndexed: Boolean = index.nonEmpty || hasFlag('<') + def isError: Boolean = kind == ErrorXn + def isLiteral: Boolean = kind == LiteralXn + + // descriptor is at index 0 of the part string + def isLeading: Boolean = descriptor.at(Spec) == 0 + + // true if passes. + def verify: Boolean = { + // various assertions + def goodies = goodFlags && goodIndex + def noFlags = flags.isEmpty or errorAt(Flags)("flags not allowed") + def noWidth = width.isEmpty or errorAt(Width)("width not allowed") + def noPrecision = precision.isEmpty or errorAt(Precision)("precision not allowed") + def only_-(msg: String) = { + val badFlags = flags.filterNot { case '-' | '<' => true case _ => false } + badFlags.isEmpty or badFlag(badFlags(0), s"Only '-' allowed for $msg") + } + def goodFlags = flags.isEmpty || { + for (dupe <- flags.diff(flags.distinct).distinct) errorAt(Flags, flags.lastIndexOf(dupe))(s"Duplicate flag '$dupe'") + val badFlags = flags.filterNot(okFlags.contains(_)) + for (f <- badFlags) badFlag(f, s"Illegal flag '$f'") + badFlags.isEmpty + } + def goodIndex = !isIndexed || { + if (index.nonEmpty && hasFlag('<')) warningAt(Index)("Argument index ignored if '<' flag is present") + val okRange = index.map(i => i > 0 && i <= argc).getOrElse(true) + okRange || hasFlag('<') or errorAt(Index)("Argument index out of range") + } + // begin verify + kind match { + case StringXn => goodies + case BooleanXn => goodies + case HashXn => goodies + case CharacterXn => goodies && noPrecision && only_-("c conversion") + case IntegralXn => + def d_# = cc == 'd' && hasFlag('#') and badFlag('#', "# not allowed for d conversion") + def x_comma = cc != 'd' && hasFlag(',') and badFlag(',', "',' only allowed for d conversion of integral types") + goodies && noPrecision && !d_# && !x_comma + case FloatingPointXn => + goodies && (cc match { + case 'a' | 'A' => + val badFlags = ",(".filter(hasFlag) + noPrecision && badFlags.isEmpty or badFlags.foreach(badf => badFlag(badf, s"'$badf' not allowed for a, A")) + case _ => true + }) + case DateTimeXn => + def hasCC = op.length == 2 or errorAt(CC)("Date/time conversion must have two characters") + def goodCC = "HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc".contains(cc) or errorAt(CC, 1)(s"'$cc' doesn't seem to be a date or time conversion") + goodies && hasCC && goodCC && noPrecision && only_-("date/time conversions") + case LiteralXn => + op match { + case "%" => goodies && noPrecision and width.foreach(_ => warningAt(Width)("width ignored on literal")) + case "n" => noFlags && noWidth && noPrecision + } + case ErrorXn => + errorAt(CC)(s"illegal conversion character '$cc'") + false + case _ => + errorAt(CC)(s"bad conversion '$kind' for '$cc'") + false + } } - def goodIndex = { - if (index.nonEmpty && hasFlag('<')) - c.warning(groupPos(Index), "Argument index ignored if '<' flag is present") - val okRange = index map (i => i > 0 && i <= argc) getOrElse true - okRange || hasFlag('<') || falsely { errorAt(Index, "Argument index out of range") } + + // is the specifier OK with the given arg + def accepts(arg: Type): Boolean = + kind match { + case BooleanXn => arg == BooleanTpe orElse warningAt(CC)("Boolean format is null test for non-Boolean") + case IntegralXn => + arg == BigIntTpe || !cond(cc) { + case 'o' | 'x' | 'X' if hasAnyFlag("+ (") => "+ (".filter(hasFlag).foreach(bad => badFlag(bad, s"only use '$bad' for BigInt conversions to o, x, X")) ; true + } + case _ => true + } + def lintToString(arg: Type): Unit = + if (linting && kind == StringXn && !(arg =:= StringTpe)) + if (arg.typeSymbol eq UnitClass) + warningAt(CC)("interpolated Unit value", WFlagTostringInterpolated) + else if (!definitions.isPrimitiveValueType(arg)) + warningAt(CC)("interpolation uses toString", WFlagTostringInterpolated) + + // what arg type if any does the conversion accept + def acceptableVariants: List[Type] = + kind match { + case StringXn if hasFlag('#') => FormattableTpe :: Nil + case StringXn => AnyTpe :: Nil + case BooleanXn => BooleanTpe :: NullTpe :: AnyTpe :: Nil // warn if not boolean + case HashXn => AnyTpe :: Nil + case CharacterXn => CharTpe :: ByteTpe :: ShortTpe :: IntTpe :: Nil + case IntegralXn => IntTpe :: LongTpe :: ByteTpe :: ShortTpe :: BigIntTpe :: Nil + case FloatingPointXn => DoubleTpe :: FloatTpe :: BigDecimalTpe :: Nil + case DateTimeXn => LongTpe :: CalendarTpe :: DateTpe :: Nil + case LiteralXn => Nil + case ErrorXn => Nil + case _ => errorAt(CC)(s"bad conversion '$kind' for '$cc'") ; Nil + } + + // what flags does the conversion accept? + private def okFlags: String = + kind match { + case StringXn => "-#<" + case BooleanXn | HashXn => "-<" + case LiteralXn => "-" + case _ => "-#+ 0,(<" + } + + def hasFlag(f: Char) = flags.contains(f) + def hasAnyFlag(fs: String) = fs.exists(hasFlag) + + def badFlag(f: Char, msg: String) = { + val i = flags.indexOf(f) match { case -1 => 0 case j => j } + errorAt(Flags, i)(msg) } - /** Pick the type of an arg to format from among the variants - * supported by a conversion. This is the type of the temporary, - * so failure results in an erroneous assignment to the first variant. - * A more complete message would be nice. - */ - def pickAcceptable(arg: Tree, variants: Type*): Option[Type] = - variants find (arg.tpe <:< _) orElse ( - variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree) - ) orElse Some(variants(0)) + + def groupPosAt(g: SpecGroup, i: Int) = pos.withPoint(pos.point + descriptor.offset(g, i)) + def errorAt(g: SpecGroup, i: Int = 0)(msg: String) = c.error(groupPosAt(g, i), msg).tap(_ => reported = true) + def warningAt(g: SpecGroup, i: Int = 0)(msg: String, cat: WarningCategory = WarningCategory.Other) = c.callsiteTyper.context.warning(groupPosAt(g, i), msg, cat, Nil) } + object Conversion { - import SpecifierGroups.{ Spec, CC } - def apply(m: Match, p: Position, n: Int): Option[Conversion] = { - def badCC(msg: String) = { - val dk = new ErrorXn(m, p) - val at = if (dk.op.isEmpty) Spec else CC - dk.errorAt(at, msg) - } - def cv(cc: Char) = cc match { - case 'b' | 'B' | 'h' | 'H' | 's' | 'S' => - new GeneralXn(m, p, n) - case 'c' | 'C' => - new CharacterXn(m, p, n) - case 'd' | 'o' | 'x' | 'X' => - new IntegralXn(m, p, n) - case 'e' | 'E' | 'f' | 'g' | 'G' | 'a' | 'A' => - new FloatingPointXn(m, p, n) - case 't' | 'T' => - new DateTimeXn(m, p, n) - case '%' | 'n' => - new LiteralXn(m, p, n) - case _ => - badCC(s"illegal conversion character '$cc'") - null + def apply(m: Match, p: Position, argc: Int): Conversion = { + def kindOf(cc: Char) = cc match { + case 's' | 'S' => StringXn + case 'h' | 'H' => HashXn + case 'b' | 'B' => BooleanXn + case 'c' | 'C' => CharacterXn + case 'd' | 'o' | + 'x' | 'X' => IntegralXn + case 'e' | 'E' | + 'f' | + 'g' | 'G' | + 'a' | 'A' => FloatingPointXn + case 't' | 'T' => DateTimeXn + case '%' | 'n' => LiteralXn + case _ => ErrorXn } - Option(m group CC.id) map (cc => cv(cc(0))) match { - case Some(x) => Option(x) filter (_.verify) - case None => - badCC(s"Missing conversion operator in '${m.matched}'; $literalHelp") - None + m.group(CC) match { + case Some(cc) => new Conversion(m, p, kindOf(cc(0)), argc).tap(_.verify) + case None => new Conversion(m, p, ErrorXn, argc).tap(_.errorAt(Spec)(s"Missing conversion operator in '${m.matched}'; $literalHelp")) } } + // construct a default %s conversion + def apply(p: Position, argc: Int): Conversion = + new Conversion(formatPattern.findAllMatchIn("%").next(), p, StringXn, argc) val literalHelp = "use %% for literal %, %n for newline" } - class GeneralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - def accepts(arg: Tree) = cc match { - case 's' | 'S' if hasFlag('#') => pickAcceptable(arg, tagOfFormattable.tpe) - case 'b' | 'B' => if (arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe) - case _ => Some(AnyTpe) - } - override protected def okFlags = cc match { - case 's' | 'S' => "-#<" - case _ => "-<" - } + + var reported = false +} +object FormatInterpolator { + // match a conversion specifier + private val formatPattern = """%(?:(\d+)\$)?([-#+ 0,(<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r + // ordinal is the regex group index in the format pattern + private object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value } + import SpecifierGroups.{Value => SpecGroup} + private implicit class `enumlike`(val value: SpecGroup) extends AnyVal { + def ordinal = value.id } - class LiteralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - import SpecifierGroups.Width - override val isLiteral = true - override def verify = op match { - case "%" => super.verify && noPrecision && truly(width foreach (_ => c.warning(groupPos(Width), "width ignored on literal"))) - case "n" => noFlags && noWidth && noPrecision - } - override protected val okFlags = "-" - def accepts(arg: Tree) = None + + private implicit class `boolean whimsy`(val value: Boolean) extends AnyVal { + def or(body: => Unit): Boolean = value || { body ; false } + def orElse(body: => Unit): Boolean = value || { body ; true } + def and(body: => Unit): Boolean = value && { body ; true } + def but(body: => Unit): Boolean = value && { body ; false } } - class CharacterXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - override def verify = super.verify && noPrecision && only_-("c conversion") - def accepts(arg: Tree) = pickAcceptable(arg, CharTpe, ByteTpe, ShortTpe, IntTpe) + private implicit class `match game`(val descriptor: Match) extends AnyVal { + def at(g: SpecGroup): Int = descriptor.start(g.ordinal) + def offset(g: SpecGroup, i: Int = 0): Int = at(g) + i + def group(g: SpecGroup): Option[String] = Option(descriptor.group(g.ordinal)) + def stringOf(g: SpecGroup): String = group(g).getOrElse("") + def intOf(g: SpecGroup): Option[Int] = group(g).map(_.toInt) } - class IntegralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - override def verify = { - def d_# = (cc == 'd' && hasFlag('#') && - truly { badFlag('#', "# not allowed for d conversion") } - ) - def x_comma = (cc != 'd' && hasFlag(',') && - truly { badFlag(',', "',' only allowed for d conversion of integral types") } - ) - super.verify && noPrecision && !d_# && !x_comma - } - override def accepts(arg: Tree) = { - def isBigInt = arg.tpe <:< tagOfBigInt.tpe - val maybeOK = "+ (" - def bad_+ = cond(cc) { - case 'o' | 'x' | 'X' if hasAnyFlag(maybeOK) && !isBigInt => - maybeOK filter hasFlag foreach (badf => - badFlag(badf, s"only use '$badf' for BigInt conversions to o, x, X")) - true - } - if (bad_+) None else pickAcceptable(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe) - } - } - class FloatingPointXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - override def verify = super.verify && (cc match { - case 'a' | 'A' => - val badFlags = ",(" filter hasFlag - noPrecision && badFlags.isEmpty || falsely { - badFlags foreach (badf => badFlag(badf, s"'$badf' not allowed for a, A")) + private def escapeHatch(c: Context)(s0: String, pos: c.universe.Position): PartialFunction[Throwable, String] = { + // trailing backslash, octal escape, or other + case e: StringContext.InvalidEscapeException => + def errPoint = pos.withPoint(pos.point + e.index) + def octalOf(c: Char) = Character.digit(c, 8) + def alt = { + def altOf(i: Int) = i match { + case '\b' => "\\b" + case '\t' => "\\t" + case '\n' => "\\n" + case '\f' => "\\f" + case '\r' => "\\r" + case '\"' => "$" /* avoid lint warn */ + + "{'\"'} or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\"" + case '\'' => "'" + case '\\' => """\\""" + case x => "\\u%04x" format x } - case _ => true - }) - def accepts(arg: Tree) = pickAcceptable(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe) - } - class DateTimeXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { - import SpecifierGroups.CC - def hasCC = (op.length == 2 || - falsely { errorAt(CC, "Date/time conversion must have two characters") }) - def goodCC = ("HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc" contains cc) || - falsely { errorAtOffset(CC, 1, s"'$cc' doesn't seem to be a date or time conversion") } - override def verify = super.verify && hasCC && goodCC && noPrecision && only_-("date/time conversions") - def accepts(arg: Tree) = pickAcceptable(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe) - } - class ErrorXn(val m: Match, val pos: Position) extends Conversion { - val argc = 0 - override def verify = false - def accepts(arg: Tree) = None + val suggest = { + val r = "([0-7]{1,3}).*".r + s0.drop(e.index + 1) match { + case r(n) => altOf(n.foldLeft(0) { case (a, o) => (8 * a) + (o - '0') }) + case _ => "" + } + } + if (suggest.isEmpty) "" + else s"use $suggest instead" + } + def control(ctl: Char, i: Int, name: String) = + c.error(errPoint, s"\\$ctl is not supported, but for $name use \\u${f"$i%04x"};\n${e.getMessage}") + if (e.index == s0.length - 1) c.error(errPoint, """Trailing '\' escapes nothing.""") + else s0(e.index + 1) match { + case 'a' => control('a', 0x7, "alert or BEL") + case 'v' => control('v', 0xB, "vertical tab") + case 'e' => control('e', 0x1B, "escape") + case i if octalOf(i) >= 0 => c.error(errPoint, s"octal escape literals are unsupported: $alt") + case _ => c.error(errPoint, e.getMessage) + } + s0 } } diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala index 9027ff822dce..4bf42f38d039 100644 --- a/src/compiler/scala/tools/reflect/FrontEnd.scala +++ b/src/compiler/scala/tools/reflect/FrontEnd.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,7 +37,7 @@ trait FrontEnd { val infos = new scala.collection.mutable.LinkedHashSet[Info] /** Handles incoming info */ - def log(pos: Position, msg: String, severity: Severity) { + def log(pos: Position, msg: String, severity: Severity): Unit = { infos += Info(pos, msg, severity) severity.count += 1 display(infos.last) diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 8593052d8b9b..fe0c552e08de 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,10 +14,8 @@ package scala.tools package reflect import scala.reflect.internal.util.ScalaClassLoader -import scala.tools.nsc.Global -import scala.tools.nsc.Settings +import scala.tools.nsc.{Global, Settings} import scala.tools.nsc.reporters.Reporter -import scala.tools.nsc.typechecker.Analyzer /** A version of Global that uses reflection to get class * infos, instead of reading class or source files. @@ -25,18 +23,14 @@ import scala.tools.nsc.typechecker.Analyzer class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { - override lazy val analyzer = new { - val global: ReflectGlobal.this.type = ReflectGlobal.this - } with Analyzer { - /** Obtains the classLoader used for runtime macro expansion. - * - * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. - * The [[rootClassLoader]] is used to obtain runtime defined macros. - */ - override protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - ScalaClassLoader.fromURLs(classpath, rootClassLoader) - } + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in `global.classPath` or `rootClassLoader`. + * The `rootClassLoader` is used to obtain runtime defined macros. + */ + override def findMacroClassLoader(): ClassLoader = { + val classpath = classPath.asURLs + perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader)) } override def transformedType(sym: Symbol) = @@ -68,9 +62,11 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val // Mirror and RuntimeClass come from both Global and reflect.runtime.SymbolTable // so here the compiler needs an extra push to help decide between those (in favor of the latter) import scala.reflect.ClassTag - override type Mirror = JavaMirror + override type Mirror = MirrorImpl override implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[Mirror]) override type RuntimeClass = java.lang.Class[_] override implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass]) + + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = super.openPackageModule(pkgClass, force = true) } diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index a290c6bfafc8..659f2344b5f3 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/reflect/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala index daea54a79d73..6d2b0130f7fb 100644 --- a/src/compiler/scala/tools/reflect/ReflectSetup.scala +++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,4 +19,4 @@ import scala.tools.nsc.Global * reflect specific traits are initialized */ private[reflect] trait ReflectSetup { this: Global => phase = new Run().typerPhase -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala index db4c386a9189..501e2170f262 100644 --- a/src/compiler/scala/tools/reflect/StdTags.scala +++ b/src/compiler/scala/tools/reflect/StdTags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index b7d151fed6ba..8caff3b6d5ae 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -67,7 +67,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of a typecheck error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Ydebug. + * Such errors don't vanish and can be inspected by turning on -Vdebug. * * Typechecking can be steered with the following optional parameters: * `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false @@ -84,7 +84,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree @@ -98,7 +98,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 89d31ec386d6..b7bc8c5597f7 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,14 +14,15 @@ package scala package tools package reflect -import scala.tools.cmd.CommandLineParser import scala.tools.nsc.reporters._ import scala.tools.nsc.CompilerCommand import scala.tools.nsc.io.{AbstractFile, VirtualDirectory} import scala.reflect.internal.util.{AbstractFileClassLoader, NoSourceFile} import scala.reflect.internal.Flags._ +import scala.sys.process.{Parser => CommandLineParser} import java.lang.{Class => jClass} -import scala.compat.Platform.EOL +import java.lang.System.{lineSeparator => EOL} + import scala.reflect.NameTransformer import scala.reflect.api.JavaUniverse import scala.reflect.io.NoAbstractFile @@ -74,7 +75,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => lastSeenContext = analyzer.NoContext } - def verify(expr: Tree): Tree = { + def verify(expr: Tree): expr.type = { // Previously toolboxes used to typecheck their inputs before compiling. // Actually, the initial demo by Martin first typechecked the reified tree, // then ran it, which typechecked it again, and only then launched the @@ -109,7 +110,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => if (namesakes.length > 0) name += ("$" + (namesakes.length + 1)) freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX)) }) - val expr = new Transformer { + val expr = new AstTransformer { override def transform(tree: Tree): Tree = if (tree.hasSymbolField && tree.symbol.isFreeTerm) { tree match { @@ -166,7 +167,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => } val invertedIndex = freeTerms map (_.swap) - val indexed = new Transformer { + val indexed = new AstTransformer { override def transform(tree: Tree): Tree = tree match { case Ident(name: TermName) if invertedIndex contains name => @@ -183,12 +184,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def typecheck(expr: Tree, pt: Type, mode: scala.reflect.internal.Mode, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree = transformDuringTyper(expr, mode, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)( (currentTyper, expr) => { - trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value)) + trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, printOwners = settings.Yshowsymowners.value, printKinds = settings.Yshowsymkinds.value)) currentTyper.silent(_.typed(expr, mode, pt), reportAmbiguousErrors = false) match { case analyzer.SilentResultValue(result) => - trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value)) + trace("success: ")(showAttributed(result, printKinds = settings.Yshowsymkinds.value)) result - case error @ analyzer.SilentTypeError(_) => + case error: analyzer.SilentTypeError => trace("failed: ")(error.err.errMsg) if (!silent) throw ToolBoxError("reflective typecheck has failed: %s".format(error.err.errMsg)) EmptyTree @@ -198,8 +199,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def inferImplicit(tree: Tree, pt: Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: Position): Tree = transformDuringTyper(tree, TERMmode, withImplicitViewsDisabled = false, withMacrosDisabled = withMacrosDisabled)( (currentTyper, tree) => { - trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value)) - analyzer.inferImplicit(tree, pt, isView, currentTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw ToolBoxError(msg)) + trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, printOwners = settings.Yshowsymowners.value, printKinds = settings.Yshowsymkinds.value)) + analyzer.inferImplicit(tree, pt, isView, currentTyper.context, silent, withMacrosDisabled, pos, (_, msg) => throw ToolBoxError(msg)) }) private def wrapInPackageAndCompile(packageName: TermName, tree: ImplDef): Symbol = { @@ -238,14 +239,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val (fv, name) = schema meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType)) } - meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyTpe) + meth setInfo MethodType(freeTerms.map(makeParam _).toList, AnyTpe) minfo.decls enter meth def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match { case Some(sym) if sym != null && sym != NoSymbol => sym.owner case _ => NoSymbol } trace("wrapping ")(defOwner(expr) -> meth) - val methdef = DefDef(meth, expr changeOwner (defOwner(expr), meth)) + val methdef = DefDef(meth, expr.changeOwner(defOwner(expr), meth)) val moduledef = ModuleDef( obj, @@ -256,10 +257,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => List(), List(methdef), NoPosition)) - trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value)) + trace("wrapped: ")(showAttributed(moduledef, printOwners = settings.Yshowsymowners.value, printKinds = settings.Yshowsymkinds.value)) val cleanedUp = resetAttrs(moduledef) - trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymowners.value, settings.Yshowsymkinds.value)) + trace("cleaned up: ")(showAttributed(cleanedUp, printOwners = settings.Yshowsymowners.value, printKinds = settings.Yshowsymkinds.value)) cleanedUp.asInstanceOf[ModuleDef] } @@ -382,11 +383,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def typecheck(tree: u.Tree, mode: TypecheckMode = TERMmode, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = withCompilerApi { compilerApi => import compilerApi._ - if (compiler.settings.verbose) println("importing "+tree+", expectedType = "+expectedType) + if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType) val ctree: compiler.Tree = importer.importTree(tree) val cexpectedType: compiler.Type = importer.importType(expectedType) - if (compiler.settings.verbose) println("typing "+ctree+", expectedType = "+expectedType) + if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType) val ttree: compiler.Tree = compiler.typecheck(ctree, cexpectedType, mode, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled) val uttree = exporter.importTree(ttree) uttree @@ -405,12 +406,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = withCompilerApi { compilerApi => import compilerApi._ - if (compiler.settings.verbose) println(s"importing pt=$pt, tree=$tree, pos=$pos") + if (compiler.settings.verbose.value) println(s"importing pt=$pt, tree=$tree, pos=$pos") val ctree: compiler.Tree = importer.importTree(tree) val cpt: compiler.Type = importer.importType(pt) val cpos: compiler.Position = importer.importPosition(pos) - if (compiler.settings.verbose) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled)) + if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled)) val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos) val uitree = exporter.importTree(itree) uitree @@ -428,7 +429,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def parse(code: String): u.Tree = withCompilerApi { compilerApi => import compilerApi._ - if (compiler.settings.verbose) println("parsing "+code) + if (compiler.settings.verbose.value) println("parsing "+code) val ctree: compiler.Tree = compiler.parse(code) val utree = exporter.importTree(ctree) utree @@ -437,20 +438,20 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def compile(tree: u.Tree): () => Any = withCompilerApi { compilerApi => import compilerApi._ - if (compiler.settings.verbose) println("importing "+tree) + if (compiler.settings.verbose.value) println("importing "+tree) val ctree: compiler.Tree = importer.importTree(tree) - if (compiler.settings.verbose) println("compiling "+ctree) + if (compiler.settings.verbose.value) println("compiling "+ctree) compiler.compile(ctree) } def define(tree: u.ImplDef): u.Symbol = withCompilerApi { compilerApi => import compilerApi._ - if (compiler.settings.verbose) println("importing "+tree) + if (compiler.settings.verbose.value) println("importing "+tree) val ctree: compiler.ImplDef = importer.importTree(tree).asInstanceOf[compiler.ImplDef] - if (compiler.settings.verbose) println("defining "+ctree) + if (compiler.settings.verbose.value) println("defining "+ctree) val csym: compiler.Symbol = compiler.define(ctree) val usym = exporter.importSymbol(csym) usym diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala index ae68965b4fa8..022ff46ea3e8 100644 --- a/src/compiler/scala/tools/reflect/WrappedProperties.scala +++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,15 +27,15 @@ trait WrappedProperties extends PropertiesTrait { protected def pickJarBasedOn = this.getClass override def propIsSet(name: String) = wrap(super.propIsSet(name)) exists (x => x) - override def propOrElse(name: String, alt: String) = wrap(super.propOrElse(name, alt)) getOrElse alt + override def propOrElse(name: String, alt: => String) = wrap(super.propOrElse(name, alt)) getOrElse alt override def setProp(name: String, value: String) = wrap(super.setProp(name, value)).orNull override def clearProp(name: String) = wrap(super.clearProp(name)).orNull - override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt + override def envOrElse(name: String, alt: => String) = wrap(super.envOrElse(name, alt)) getOrElse alt override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten - override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt + override def envOrSome(name: String, alt: => Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt def systemProperties: List[(String, String)] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ wrap { // scala/bug#7269,7775 Avoid `ConcurrentModificationException` and nulls if another thread modifies properties val props = System.getProperties @@ -47,6 +47,7 @@ trait WrappedProperties extends PropertiesTrait { object WrappedProperties { object AccessControl extends WrappedProperties { + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrap[T](body: => T) = try Some(body) catch { case _: AccessControlException => None } } } diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala index 012e6309988e..1e85a08e19c2 100644 --- a/src/compiler/scala/tools/reflect/package.scala +++ b/src/compiler/scala/tools/reflect/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,8 @@ package scala.tools import scala.language.implicitConversions import scala.reflect.api.JavaUniverse -import scala.reflect.internal.util.Position +import scala.reflect.internal.Reporter +import scala.reflect.internal.util.{CodeAction, Position} import scala.tools.nsc.Settings import scala.tools.nsc.reporters.{ConsoleReporter, FilteringReporter} @@ -61,6 +62,7 @@ package object reflect { case API_INFO => reporter.echo(info.pos, info.msg) case API_WARNING => reporter.warning(info.pos, info.msg) case API_ERROR => reporter.error(info.pos, info.msg) + case x => throw new MatchError(x) } override def flush(): Unit = { @@ -77,16 +79,16 @@ package object reflect { private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): FilteringReporter = new FilteringReporter { val settings = settings0 - val API_INFO = frontEnd.INFO + val API_INFO = frontEnd.INFO val API_WARNING = frontEnd.WARNING - val API_ERROR = frontEnd.ERROR + val API_ERROR = frontEnd.ERROR - type NscSeverity = Severity - val NSC_INFO = INFO - val NSC_WARNING = WARNING - val NSC_ERROR = ERROR + type NscSeverity = Reporter.Severity + val NSC_INFO = Reporter.INFO + val NSC_WARNING = Reporter.WARNING + val NSC_ERROR = Reporter.ERROR - def doReport(pos: Position, msg: String, nscSeverity: NscSeverity): Unit = + override def doReport(pos: Position, msg: String, nscSeverity: NscSeverity, actions: List[CodeAction]): Unit = frontEnd.log(pos, msg, (nscSeverity: @unchecked) match { case NSC_INFO => API_INFO case NSC_WARNING => API_WARNING diff --git a/src/compiler/scala/tools/tasty/AttributeUnpickler.scala b/src/compiler/scala/tools/tasty/AttributeUnpickler.scala new file mode 100644 index 000000000000..265fc89f1cc3 --- /dev/null +++ b/src/compiler/scala/tools/tasty/AttributeUnpickler.scala @@ -0,0 +1,49 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +import scala.collection.immutable.BitSet + +import scala.tools.tasty.{TastyFormat, TastyReader} +import TastyFormat.{isBooleanAttrTag, isStringAttrTag} + +object AttributeUnpickler { + + /** Unpickle the `Attributes` section of a TASTy file. */ + def attributes(reader: TastyReader): Attributes = { + import reader._ + + val booleanTags = BitSet.newBuilder + + var lastTag = -1 + while (!isAtEnd) { + val tag = readByte() + if (isBooleanAttrTag(tag)) + booleanTags += tag + else if (isStringAttrTag(tag)) { + // read a name ref, which is the discarded UTF8 string value of the attribute. + // in the future, if we need this value then look it up in the name table. + val _ = readNameRef() + } + else + assert(false, "unknown attribute tag: " + tag) + + assert(tag != lastTag, s"duplicate attribute tag: $tag") + assert(tag > lastTag, s"attribute tags are not ordered: $tag after $lastTag") + lastTag = tag + } + + val isJava = booleanTags.result().contains(TastyFormat.JAVAattr) + if (isJava) Attributes.javaSource else Attributes.empty + } +} diff --git a/src/compiler/scala/tools/tasty/Attributes.scala b/src/compiler/scala/tools/tasty/Attributes.scala new file mode 100644 index 000000000000..eb3c3493af49 --- /dev/null +++ b/src/compiler/scala/tools/tasty/Attributes.scala @@ -0,0 +1,28 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +/** Representation of the `Attributes` section of a TASTy file, + * with a minimal API for what is relevant for reading of signatures. + */ +sealed trait Attributes { + def isJava: Boolean +} + + +object Attributes { + private class ConcreteAttributes(val isJava: Boolean) extends Attributes + + val empty: Attributes = new ConcreteAttributes(isJava = false) + val javaSource: Attributes = new ConcreteAttributes(isJava = true) +} diff --git a/src/compiler/scala/tools/tasty/ErasedTypeRef.scala b/src/compiler/scala/tools/tasty/ErasedTypeRef.scala new file mode 100644 index 000000000000..cc8f4690b3f2 --- /dev/null +++ b/src/compiler/scala/tools/tasty/ErasedTypeRef.scala @@ -0,0 +1,65 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +import TastyName.{ObjectName, QualifiedName, SimpleName, TypeName, Empty, PathSep} + +/** Represents an erased type of a scala class/object with the number of array dimensions. + * + * @param qualifiedName the fully qualified path of the class/object, including selection from package or class, unencoded + * @param arrayDims the number of array dimensions of this type ref. + * A 0-dimensional array is just qualifiedName itself + */ +case class ErasedTypeRef(qualifiedName: TypeName, arrayDims: Int) { + def signature: String = { + val qualified = qualifiedName.source + "[" * arrayDims + (if (qualifiedName.toTermName.isObjectName) s"object $qualified" else qualified) + } + def encode: ErasedTypeRef = ErasedTypeRef(TastyName.deepEncode(qualifiedName).toTypeName, arrayDims) +} + +object ErasedTypeRef { + + def apply(tname: TastyName): ErasedTypeRef = { + + def name(qual: TastyName, tname: SimpleName, isModule: Boolean) = { + val qualified = if (qual == Empty) tname else QualifiedName(qual, PathSep, tname) + if (isModule) ObjectName(qualified) else qualified + } + + def specialised(qual: TastyName, terminal: String, isModule: Boolean, arrayDims: Int = 0): ErasedTypeRef = terminal match { + case s"$inner[]" => specialised(qual, inner, isModule, arrayDims + 1) + case clazz => ErasedTypeRef(name(qual, SimpleName(clazz), isModule).toTypeName, arrayDims) + } + + var isModule = false + + val classKind = tname.toTermName match { + case ObjectName(classKind) => + isModule = true + classKind + case nonModule => nonModule + } + + classKind match { + case terminal: SimpleName => // unqualified in the package + specialised(Empty, terminal.raw, isModule) + case QualifiedName(path, PathSep, terminal) => + specialised(path, terminal.raw, isModule) + case _ => + throw new AssertionError(s"Unexpected erased type ref ${tname.debug}") + } + + } + +} diff --git a/src/compiler/scala/tools/tasty/Signature.scala b/src/compiler/scala/tools/tasty/Signature.scala new file mode 100644 index 000000000000..e7613ed5cce4 --- /dev/null +++ b/src/compiler/scala/tools/tasty/Signature.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +/** Represents the structure of an uncurried Scala method signature */ +sealed abstract class Signature[+T] { self => + import Signature._ + + final def show: String = mergeShow(new StringBuilder(30)).toString + + final def mergeShow(sb: StringBuilder): StringBuilder = self match { + case MethodSignature(params, result) => + params.map(_.merge).addString(sb, "(", ",", ")").append(result) + } + +} + +object Signature { + + /** Encodes either an `Int` which is the size of a type parameter list, or `T`, which represents a type */ + type ParamSig[T] = Either[Int, T] + + def merge[T](sb: StringBuilder, sig: Signature[T]): StringBuilder = sig.mergeShow(sb) + + def apply[T](params: List[ParamSig[T]], result: T): MethodSignature[T] = new MethodSignature(params, result) + + /** Encodes the structure of an uncurried Scala method signature, with generic type parameter lists erased to just + * their size and position. + * @param params represents types of method parameters interspersed by the lengths of generic type parameter lists + * @param result represents the type of the method result + */ + case class MethodSignature[T] private[Signature](params: List[ParamSig[T]], result: T) extends Signature[T] { + def map[U](f: T => U): MethodSignature[U] = MethodSignature(params.map(_.map(f)), f(result)) + } + +} diff --git a/src/compiler/scala/tools/tasty/TastyFlags.scala b/src/compiler/scala/tools/tasty/TastyFlags.scala new file mode 100644 index 000000000000..b51636c1b7f4 --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyFlags.scala @@ -0,0 +1,135 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +/**A static type representing a bitset of flags that are encoded in a TASTy file, along with some other flags + * inferred from context, such as `Method` and `Deferred`. + */ +object TastyFlags { + + final val EmptyTastyFlags = TastyFlagSet(0) + + final val Private = EmptyTastyFlags.next + final val Protected = Private.next + final val AbsOverride = Protected.next + final val Abstract = AbsOverride.next + final val Final = Abstract.next + final val Sealed = Final.next + final val Case = Sealed.next + final val Implicit = Case.next + final val Lazy = Implicit.next + final val Override = Lazy.next + final val Static = Override.next + final val Object = Static.next + final val Trait = Object.next + final val Local = Trait.next + final val Synthetic = Local.next + final val Artifact = Synthetic.next + final val Mutable = Artifact.next + final val FieldAccessor = Mutable.next + final val CaseAccessor = FieldAccessor.next + final val Covariant = CaseAccessor.next + final val Contravariant = Covariant.next + final val HasDefault = Contravariant.next + final val Stable = HasDefault.next + final val ParamSetter = Stable.next + final val Param = ParamSetter.next + final val Deferred = Param.next + final val Method = Deferred.next + final val Erased = Method.next + final val Inline = Erased.next + final val InlineProxy = Inline.next + final val Opaque = InlineProxy.next + final val Extension = Opaque.next + final val Given = Extension.next + final val Exported = Given.next + final val Macro = Exported.next + final val Transparent = Macro.next + final val Enum = Transparent.next + final val Open = Enum.next + final val ParamAlias = Open.next + final val Infix = ParamAlias.next + final val Invisible = Infix.next + final val Tracked = Invisible.next + + def optFlag(cond: Boolean)(flag: TastyFlagSet): TastyFlagSet = if (cond) flag else EmptyTastyFlags + + case class TastyFlagSet(val toLong: Long) extends AnyVal { + + private[TastyFlags] def next: TastyFlagSet = + TastyFlagSet(if (toLong == 0) 1 else toLong << 1) + + def |(other: TastyFlagSet): TastyFlagSet = TastyFlagSet(toLong | other.toLong) + def &(mask: TastyFlagSet): TastyFlagSet = TastyFlagSet(toLong & mask.toLong) + def &~(mask: TastyFlagSet): TastyFlagSet = TastyFlagSet(toLong & ~mask.toLong) + def unary_! : Boolean = this.toLong == 0 + def is(mask: TastyFlagSet): Boolean = (this & mask) == mask + def isOneOf(mask: TastyFlagSet): Boolean = (this & mask).hasFlags + def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = if (!butNot) is(mask) else is(mask) && not(butNot) + def not(mask: TastyFlagSet): Boolean = !isOneOf(mask) + def hasFlags: Boolean = this.toLong != 0 + + def debug: String = { + if (!this) { + "EmptyTastyFlags" + } + else { + val sb = collection.mutable.ArrayBuffer.empty[String] + if (is(Private)) sb += "Private" + if (is(Protected)) sb += "Protected" + if (is(AbsOverride)) sb += "AbsOverride" + if (is(Abstract)) sb += "Abstract" + if (is(Final)) sb += "Final" + if (is(Sealed)) sb += "Sealed" + if (is(Case)) sb += "Case" + if (is(Implicit)) sb += "Implicit" + if (is(Lazy)) sb += "Lazy" + if (is(Override)) sb += "Override" + if (is(Static)) sb += "Static" + if (is(Object)) sb += "Object" + if (is(Trait)) sb += "Trait" + if (is(Local)) sb += "Local" + if (is(Synthetic)) sb += "Synthetic" + if (is(Artifact)) sb += "Artifact" + if (is(Mutable)) sb += "Mutable" + if (is(FieldAccessor)) sb += "FieldAccessor" + if (is(CaseAccessor)) sb += "CaseAccessor" + if (is(Covariant)) sb += "Covariant" + if (is(Contravariant)) sb += "Contravariant" + if (is(HasDefault)) sb += "HasDefault" + if (is(Stable)) sb += "Stable" + if (is(ParamSetter)) sb += "ParamSetter" + if (is(Param)) sb += "Param" + if (is(Deferred)) sb += "Deferred" + if (is(Method)) sb += "Method" + if (is(Erased)) sb += "Erased" + if (is(Inline)) sb += "Inline" + if (is(InlineProxy)) sb += "InlineProxy" + if (is(Opaque)) sb += "Opaque" + if (is(Extension)) sb += "Extension" + if (is(Given)) sb += "Given" + if (is(Exported)) sb += "Exported" + if (is(Macro)) sb += "Macro" + if (is(Transparent)) sb += "Transparent" + if (is(Enum)) sb += "Enum" + if (is(Open)) sb += "Open" + if (is(ParamAlias)) sb += "ParamAlias" + if (is(Infix)) sb += "Infix" + if (is(Invisible)) sb += "Invisible" + if (is(Tracked)) sb += "Tracked" + sb.mkString(" | ") + } + } + } + +} diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala new file mode 100644 index 000000000000..95092da86fda --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -0,0 +1,608 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +// revision: https://github.com/scala/scala3/commit/24bff2a019afcf0f45ddfd3af6b213e7e228471c +object TastyFormat { + + /** The first four bytes of a TASTy file, followed by four values: + * - `MajorVersion: Int` - see definition in `TastyFormat` + * - `MinorVersion: Int` - see definition in `TastyFormat` + * - `ExperimentalVersion: Int` - see definition in `TastyFormat` + * - `ToolingVersion: String` - arbitrary length string representing the tool that produced the TASTy. + */ + final val header: Array[Int] = Array(0x5C, 0xA1, 0xAB, 0x1F) + + /** Natural number. Each increment of the `MajorVersion` begins a + * new series of backward compatible TASTy versions. + * + * A TASTy file in either the preceeding or succeeding series is + * incompatible with the current value. + */ + final val MajorVersion: Int = 28 + + /** Natural number. Each increment of the `MinorVersion`, within + * a series declared by the `MajorVersion`, breaks forward + * compatibility, but remains backwards compatible, with all + * preceding `MinorVersion`. + */ + final val MinorVersion: Int = 6 + + /** Natural Number. The `ExperimentalVersion` allows for + * experimentation with changes to TASTy without committing + * to any guarantees of compatibility. + * + * A zero value indicates that the TASTy version is from a + * stable, final release. + * + * A strictly positive value indicates that the TASTy + * version is experimental. An experimental TASTy file + * can only be read by a tool with the same version. + * However, tooling with an experimental TASTy version + * is able to read final TASTy documents if the file's + * `MinorVersion` is strictly less than the current value. + */ + final val ExperimentalVersion: Int = 0 + + /**This method implements a binary relation (`<:<`) between two TASTy versions. + * + * We label the lhs `file` and rhs `compiler`. + * if `file <:< compiler` then the TASTy file is valid to be read. + * + * A TASTy version, e.g. `v := 28.0-3` is composed of three fields: + * - v.major == 28 + * - v.minor == 0 + * - v.experimental == 3 + * + * TASTy versions have a partial order, for example, + * `a <:< b` and `b <:< a` are both false if + * - `a` and `b` have different `major` fields. + * - `a` and `b` have the same `major` & `minor` fields, + * but different `experimental` fields, both non-zero. + * + * A TASTy version with a zero value for its `experimental` field + * is considered to be stable. Files with a stable TASTy version + * can be read by a compiler with an unstable TASTy version, + * (where the compiler's TASTy version has a higher `minor` field). + * + * A compiler with a stable TASTy version can never read a file + * with an unstable TASTy version. + * + * We follow the given algorithm: + * + * ``` + * (fileMajor, fileMinor, fileExperimental) match + * case (`compilerMajor`, `compilerMinor`, `compilerExperimental`) => true // full equality + * case (`compilerMajor`, minor, 0) if minor < compilerMinor => true // stable backwards compatibility + * case _ => false + * ``` + * @syntax markdown + */ + def isVersionCompatible( + fileMajor: Int, + fileMinor: Int, + fileExperimental: Int, + compilerMajor: Int, + compilerMinor: Int, + compilerExperimental: Int + ): Boolean = ( + fileMajor == compilerMajor && + ( fileMinor == compilerMinor && fileExperimental == compilerExperimental // full equality + || fileMinor < compilerMinor && fileExperimental == 0 // stable backwards compatibility + ) + ) + + final val ASTsSection = "ASTs" + final val PositionsSection = "Positions" + final val CommentsSection = "Comments" + final val AttributesSection = "Attributes" + + /** Tags used to serialize names, should update [[TastyFormat$.nameTagToString]] if a new constant is added */ + class NameTags { + final val UTF8 = 1 // A simple name in UTF8 encoding. + + final val QUALIFIED = 2 // A fully qualified name `.`. + + final val EXPANDED = 3 // An expanded name `$$`, + // used by Scala-2 for private names. + + final val EXPANDPREFIX = 4 // An expansion prefix `$`, + // used by Scala-2 for private names. + + final val UNIQUE = 10 // A unique name `$` where `` + // is used only once for each ``. + + final val DEFAULTGETTER = 11 // The name `$default$` + // of a default getter that returns a default argument. + + final val SUPERACCESSOR = 20 // The name of a super accessor `super$name` created by SuperAccesors. + + final val INLINEACCESSOR = 21 // The name of an inline accessor `inline$name` + + final val BODYRETAINER = 22 // The name of a synthetic method that retains the runtime + // body of an inline method + + final val OBJECTCLASS = 23 // The name of an object class (or: module class) `$`. + + final val SIGNED = 63 // A pair of a name and a signature, used to identify + // possibly overloaded methods. + + final val TARGETSIGNED = 62 // A triple of a name, a targetname and a signature, used to identify + // possibly overloaded methods that carry a @targetName annotation. + + // TODO swap SIGNED and TARGETSIGNED codes on next major version bump + } + object NameTags extends NameTags + + /**Should be kept in sync with [[NameTags]]. Converts constants to a String representing their identifier, + * or NotANameTag(tag) if unrecognised. + * + * For debugging purposes when unpickling names in a TASTy file. + */ + def nameTagToString(tag: Int) = { + import NameTags._ + tag match { + case UTF8 => "UTF8" + case QUALIFIED => "QUALIFIED" + case EXPANDED => "EXPANDED" + case EXPANDPREFIX => "EXPANDPREFIX" + case UNIQUE => "UNIQUE" + case DEFAULTGETTER => "DEFAULTGETTER" + case SUPERACCESSOR => "SUPERACCESSOR" + case INLINEACCESSOR => "INLINEACCESSOR" + case BODYRETAINER => "BODYRETAINER" + case OBJECTCLASS => "OBJECTCLASS" + case SIGNED => "SIGNED" + case TARGETSIGNED => "TARGETSIGNED" + case id => s"NotANameTag($id)" + } + } + + // Position header + + final val SOURCE = 4 + + // AST tags + + // Tree Cat. 1: tag + final val firstSimpleTreeTag = UNITconst + // final val ??? = 1 + final val UNITconst = 2 + final val FALSEconst = 3 + final val TRUEconst = 4 + final val NULLconst = 5 + final val PRIVATE = 6 + // final val ??? = 7 + final val PROTECTED = 8 + final val ABSTRACT = 9 + final val FINAL = 10 + final val SEALED = 11 + final val CASE = 12 + final val IMPLICIT = 13 + final val LAZY = 14 + final val OVERRIDE = 15 + final val INLINEPROXY = 16 + final val INLINE = 17 + final val STATIC = 18 + final val OBJECT = 19 + final val TRAIT = 20 + final val ENUM = 21 + final val LOCAL = 22 + final val SYNTHETIC = 23 + final val ARTIFACT = 24 + final val MUTABLE = 25 + final val FIELDaccessor = 26 + final val CASEaccessor = 27 + final val COVARIANT = 28 + final val CONTRAVARIANT = 29 + // final val ??? = 30 + final val HASDEFAULT = 31 + final val STABLE = 32 + final val MACRO = 33 + final val ERASED = 34 + final val OPAQUE = 35 + final val EXTENSION = 36 + final val GIVEN = 37 + final val PARAMsetter = 38 + final val EXPORTED = 39 + final val OPEN = 40 + final val PARAMalias = 41 + final val TRANSPARENT = 42 + final val INFIX = 43 + final val INVISIBLE = 44 + final val EMPTYCLAUSE = 45 + final val SPLITCLAUSE = 46 + final val TRACKED = 47 + + // Tree Cat. 2: tag Nat + final val firstNatTreeTag = SHAREDterm + final val SHAREDterm = 60 + final val SHAREDtype = 61 + final val TERMREFdirect = 62 + final val TYPEREFdirect = 63 + final val TERMREFpkg = 64 + final val TYPEREFpkg = 65 + final val RECthis = 66 + final val BYTEconst = 67 + final val SHORTconst = 68 + final val CHARconst = 69 + final val INTconst = 70 + final val LONGconst = 71 + final val FLOATconst = 72 + final val DOUBLEconst = 73 + final val STRINGconst = 74 + final val IMPORTED = 75 + final val RENAMED = 76 + + // Tree Cat. 3: tag AST + final val firstASTTreeTag = THIS + final val THIS = 90 + final val QUALTHIS = 91 + final val CLASSconst = 92 + final val BYNAMEtype = 93 + final val BYNAMEtpt = 94 + final val NEW = 95 + final val THROW = 96 + final val IMPLICITarg = 97 + final val PRIVATEqualified = 98 + final val PROTECTEDqualified = 99 + final val RECtype = 100 + final val SINGLETONtpt = 101 + final val BOUNDED = 102 + final val EXPLICITtpt = 103 + final val ELIDED = 104 + + // Tree Cat. 4: tag Nat AST + final val firstNatASTTreeTag = IDENT + final val IDENT = 110 + final val IDENTtpt = 111 + final val SELECT = 112 + final val SELECTtpt = 113 + final val TERMREFsymbol = 114 + final val TERMREF = 115 + final val TYPEREFsymbol = 116 + final val TYPEREF = 117 + final val SELFDEF = 118 + final val NAMEDARG = 119 + + // Tree Cat. 5: tag Length ... + final val firstLengthTreeTag = PACKAGE + final val PACKAGE = 128 + final val VALDEF = 129 + final val DEFDEF = 130 + final val TYPEDEF = 131 + final val IMPORT = 132 + final val TYPEPARAM = 133 + final val PARAM = 134 + // final val ??? = 135 + final val APPLY = 136 + final val TYPEAPPLY = 137 + final val TYPED = 138 + final val ASSIGN = 139 + final val BLOCK = 140 + final val IF = 141 + final val LAMBDA = 142 + final val MATCH = 143 + final val RETURN = 144 + final val WHILE = 145 + final val TRY = 146 + final val INLINED = 147 + final val SELECTouter = 148 + final val REPEATED = 149 + final val BIND = 150 + final val ALTERNATIVE = 151 + final val UNAPPLY = 152 + final val ANNOTATEDtype = 153 + final val ANNOTATEDtpt = 154 + final val CASEDEF = 155 + final val TEMPLATE = 156 + final val SUPER = 157 + final val SUPERtype = 158 + final val REFINEDtype = 159 + final val REFINEDtpt = 160 + final val APPLIEDtype = 161 + final val APPLIEDtpt = 162 + final val TYPEBOUNDS = 163 + final val TYPEBOUNDStpt = 164 + final val ANDtype = 165 + // final val ??? = 166 + final val ORtype = 167 + // final val ??? = 168 + final val POLYtype = 169 + final val TYPELAMBDAtype = 170 + final val LAMBDAtpt = 171 + final val PARAMtype = 172 + final val ANNOTATION = 173 + final val TERMREFin = 174 + final val TYPEREFin = 175 + final val SELECTin = 176 + final val EXPORT = 177 + final val QUOTE = 178 + final val SPLICE = 179 + final val METHODtype = 180 + final val APPLYsigpoly = 181 + final val QUOTEPATTERN = 182 + final val SPLICEPATTERN = 183 + + final val MATCHtype = 190 + final val MATCHtpt = 191 + final val MATCHCASEtype = 192 + final val FLEXIBLEtype = 193 + + final val HOLE = 255 + + // Attributes tags + + // Attribute Category 1 (tags 1-32) : tag + def isBooleanAttrTag(tag: Int): Boolean = 1 <= tag && tag <= 32 + final val SCALA2STANDARDLIBRARYattr = 1 + final val EXPLICITNULLSattr = 2 + final val CAPTURECHECKEDattr = 3 + final val WITHPUREFUNSattr = 4 + final val JAVAattr = 5 + final val OUTLINEattr = 6 + + // Attribute Category 2 (tags 33-128): unassigned + + // Attribute Category 3 (tags 129-160): tag Utf8Ref + def isStringAttrTag(tag: Int): Boolean = 129 <= tag && tag <= 160 + final val SOURCEFILEattr = 129 + + // Attribute Category 4 (tags 161-255): unassigned + + // end of Attributes tags + + + /** Useful for debugging */ + def isLegalTag(tag: Int): Boolean = + firstSimpleTreeTag <= tag && tag <= SPLITCLAUSE || + firstNatTreeTag <= tag && tag <= RENAMED || + firstASTTreeTag <= tag && tag <= BOUNDED || + firstNatASTTreeTag <= tag && tag <= NAMEDARG || + firstLengthTreeTag <= tag && tag <= FLEXIBLEtype || + tag == HOLE + + def isParamTag(tag: Int): Boolean = tag == PARAM || tag == TYPEPARAM + + def isModifierTag(tag: Int): Boolean = tag match { + case PRIVATE + | PROTECTED + | ABSTRACT + | FINAL + | SEALED + | CASE + | IMPLICIT + | GIVEN + | ERASED + | LAZY + | OVERRIDE + | INLINE + | INLINEPROXY + | MACRO + | OPAQUE + | STATIC + | OBJECT + | TRAIT + | TRANSPARENT + | INFIX + | ENUM + | LOCAL + | SYNTHETIC + | ARTIFACT + | MUTABLE + | FIELDaccessor + | CASEaccessor + | COVARIANT + | CONTRAVARIANT + | HASDEFAULT + | STABLE + | EXTENSION + | PARAMsetter + | PARAMalias + | EXPORTED + | OPEN + | INVISIBLE + | ANNOTATION + | PRIVATEqualified + | PROTECTEDqualified + | TRACKED => true + case _ => false + } + + def isTypeTreeTag(tag: Int): Boolean = tag match { + case IDENTtpt + | SELECTtpt + | SINGLETONtpt + | REFINEDtpt + | APPLIEDtpt + | LAMBDAtpt + | TYPEBOUNDStpt + | ANNOTATEDtpt + | BYNAMEtpt + | MATCHtpt + | EXPLICITtpt + | BIND => true + case _ => false + } + + def astTagToString(tag: Int): String = tag match { + case UNITconst => "UNITconst" + case FALSEconst => "FALSEconst" + case TRUEconst => "TRUEconst" + case NULLconst => "NULLconst" + case PRIVATE => "PRIVATE" + case PROTECTED => "PROTECTED" + case ABSTRACT => "ABSTRACT" + case FINAL => "FINAL" + case SEALED => "SEALED" + case CASE => "CASE" + case IMPLICIT => "IMPLICIT" + case ERASED => "ERASED" + case LAZY => "LAZY" + case OVERRIDE => "OVERRIDE" + case INLINE => "INLINE" + case INLINEPROXY => "INLINEPROXY" + case MACRO => "MACRO" + case OPAQUE => "OPAQUE" + case STATIC => "STATIC" + case OBJECT => "OBJECT" + case TRAIT => "TRAIT" + case TRANSPARENT => "TRANSPARENT" + case INFIX => "INFIX" + case ENUM => "ENUM" + case LOCAL => "LOCAL" + case SYNTHETIC => "SYNTHETIC" + case ARTIFACT => "ARTIFACT" + case MUTABLE => "MUTABLE" + case FIELDaccessor => "FIELDaccessor" + case CASEaccessor => "CASEaccessor" + case COVARIANT => "COVARIANT" + case CONTRAVARIANT => "CONTRAVARIANT" + case HASDEFAULT => "HASDEFAULT" + case STABLE => "STABLE" + case EXTENSION => "EXTENSION" + case GIVEN => "GIVEN" + case PARAMsetter => "PARAMsetter" + case EXPORTED => "EXPORTED" + case OPEN => "OPEN" + case INVISIBLE => "INVISIBLE" + case PARAMalias => "PARAMalias" + case EMPTYCLAUSE => "EMPTYCLAUSE" + case SPLITCLAUSE => "SPLITCLAUSE" + + case SHAREDterm => "SHAREDterm" + case SHAREDtype => "SHAREDtype" + case TERMREFdirect => "TERMREFdirect" + case TYPEREFdirect => "TYPEREFdirect" + case TERMREFpkg => "TERMREFpkg" + case TYPEREFpkg => "TYPEREFpkg" + case RECthis => "RECthis" + case BYTEconst => "BYTEconst" + case SHORTconst => "SHORTconst" + case CHARconst => "CHARconst" + case INTconst => "INTconst" + case LONGconst => "LONGconst" + case FLOATconst => "FLOATconst" + case DOUBLEconst => "DOUBLEconst" + case STRINGconst => "STRINGconst" + case RECtype => "RECtype" + + case IDENT => "IDENT" + case IDENTtpt => "IDENTtpt" + case SELECT => "SELECT" + case SELECTtpt => "SELECTtpt" + case TERMREFsymbol => "TERMREFsymbol" + case TERMREF => "TERMREF" + case TYPEREFsymbol => "TYPEREFsymbol" + case TYPEREF => "TYPEREF" + + case PACKAGE => "PACKAGE" + case VALDEF => "VALDEF" + case DEFDEF => "DEFDEF" + case TYPEDEF => "TYPEDEF" + case IMPORT => "IMPORT" + case EXPORT => "EXPORT" + case TYPEPARAM => "TYPEPARAM" + case PARAM => "PARAM" + case IMPORTED => "IMPORTED" + case RENAMED => "RENAMED" + case BOUNDED => "BOUNDED" + case APPLY => "APPLY" + case TYPEAPPLY => "TYPEAPPLY" + case APPLYsigpoly => "APPLYsigpoly" + case NEW => "NEW" + case THROW => "THROW" + case TYPED => "TYPED" + case NAMEDARG => "NAMEDARG" + case ASSIGN => "ASSIGN" + case BLOCK => "BLOCK" + case IF => "IF" + case LAMBDA => "LAMBDA" + case MATCH => "MATCH" + case RETURN => "RETURN" + case WHILE => "WHILE" + case INLINED => "INLINED" + case SELECTouter => "SELECTouter" + case TRY => "TRY" + case REPEATED => "REPEATED" + case BIND => "BIND" + case ALTERNATIVE => "ALTERNATIVE" + case UNAPPLY => "UNAPPLY" + case ANNOTATEDtype => "ANNOTATEDtype" + case ANNOTATEDtpt => "ANNOTATEDtpt" + case CASEDEF => "CASEDEF" + case IMPLICITarg => "IMPLICITarg" + case TEMPLATE => "TEMPLATE" + case SELFDEF => "SELFDEF" + case THIS => "THIS" + case QUALTHIS => "QUALTHIS" + case SUPER => "SUPER" + case CLASSconst => "CLASSconst" + case SINGLETONtpt => "SINGLETONtpt" + case SUPERtype => "SUPERtype" + case TERMREFin => "TERMREFin" + case TYPEREFin => "TYPEREFin" + case SELECTin => "SELECTin" + + case REFINEDtype => "REFINEDtype" + case REFINEDtpt => "REFINEDtpt" + case APPLIEDtype => "APPLIEDtype" + case APPLIEDtpt => "APPLIEDtpt" + case TYPEBOUNDS => "TYPEBOUNDS" + case TYPEBOUNDStpt => "TYPEBOUNDStpt" + case ANDtype => "ANDtype" + case ORtype => "ORtype" + case BYNAMEtype => "BYNAMEtype" + case BYNAMEtpt => "BYNAMEtpt" + case POLYtype => "POLYtype" + case METHODtype => "METHODtype" + case TYPELAMBDAtype => "TYPELAMBDAtype" + case LAMBDAtpt => "LAMBDAtpt" + case MATCHtype => "MATCHtype" + case MATCHCASEtype => "MATCHCASEtype" + case MATCHtpt => "MATCHtpt" + case PARAMtype => "PARAMtype" + case FLEXIBLEtype => "FLEXIBLEtype" + case ANNOTATION => "ANNOTATION" + case PRIVATEqualified => "PRIVATEqualified" + case PROTECTEDqualified => "PROTECTEDqualified" + case EXPLICITtpt => "EXPLICITtpt" + case ELIDED => "ELIDED" + case QUOTE => "QUOTE" + case SPLICE => "SPLICE" + case QUOTEPATTERN => "QUOTEPATTERN" + case SPLICEPATTERN => "SPLICEPATTERN" + case HOLE => "HOLE" + } + + def attributeTagToString(tag: Int): String = tag match { + case SCALA2STANDARDLIBRARYattr => "SCALA2STANDARDLIBRARYattr" + case EXPLICITNULLSattr => "EXPLICITNULLSattr" + case CAPTURECHECKEDattr => "CAPTURECHECKEDattr" + case WITHPUREFUNSattr => "WITHPUREFUNSattr" + case JAVAattr => "JAVAattr" + case OUTLINEattr => "OUTLINEattr" + case SOURCEFILEattr => "SOURCEFILEattr" + } + + /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry. + * If negative, minus the number of leading non-reference trees. + */ + def numRefs(tag: Int): Int = tag match { + case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND | + SELFDEF | REFINEDtype | TERMREFin | TYPEREFin | SELECTin | HOLE => 1 + case RENAMED | PARAMtype => 2 + case POLYtype | TYPELAMBDAtype | METHODtype => -1 + case _ => 0 + } +} diff --git a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala new file mode 100644 index 000000000000..40b20623dc9f --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala @@ -0,0 +1,205 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +import java.util.UUID + +import TastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, header} + +// /** +// * The Tasty Header consists of four fields: +// * - uuid +// * - contains a hash of the sections of the TASTy file +// * - majorVersion +// * - matching the TASTy format version that last broke backwards compatibility +// * - minorVersion +// * - matching the TASTy format version that last broke forward compatibility +// * - experimentalVersion +// * - 0 for final compiler version +// * - positive for between minor versions and forward compatibility +// * is broken since the previous stable version. +// * - toolingVersion +// * - arbitrary string representing the tooling that produced the TASTy +// */ +// sealed abstract case class TastyHeader( +// uuid: UUID, +// majorVersion: Int, +// minorVersion: Int, +// experimentalVersion: Int, +// toolingVersion: String +// ) + +trait UnpicklerConfig { + /** The TASTy version that this reader supports */ + def toolVersion: TastyVersion + /** TASTy versions that this tool can pretend to be (e.g. for testing against Scala 3 RCs). + * Even though this can accept experimental versions, nsc will still forbid usage + * of experimental API (behave as a stable compiler). + */ + def toolOverrides: List[TastyVersion] + /** The description of the upgraded tool that can read the given TASTy version */ + def upgradeReaderHowTo(version: TastyVersion): String + /** The description of the upgraded tool that can produce the given TASTy version */ + def upgradedProducerTool(version: TastyVersion): String + /** Additional information to help a user fix the outdated TASTy problem */ + def recompileAdditionalInfo: String + /** Additional information to help a user fix the more recent TASTy problem */ + def upgradeAdditionalInfo(fileVersion: TastyVersion): String +} + +object UnpicklerConfig { + + /** A config where its major, minor and experimental versions are fixed to those in TastyFormat */ + trait DefaultTastyVersion extends UnpicklerConfig { + override final val toolVersion: TastyVersion = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion) + } +} + +class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { + import TastyHeaderUnpickler._ + import reader._ + + def this(config: UnpicklerConfig, bytes: Array[Byte]) = this(config, new TastyReader(bytes)) + // def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader) + // def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + + /** reads and verifies the TASTy version, extracting the UUID */ + def readHeader(): UUID = { + for (i <- 0 until header.length) + check(readByte() == header(i), "not a TASTy file") + val fileMajor = readNat() + if (fileMajor <= 27) { // old behavior before `tasty-core` 3.0.0-M4 + val fileMinor = readNat() + val fileVersion = TastyVersion(fileMajor, fileMinor, 0) + val signature = signatureString(fileVersion, config.toolVersion, what = "Backward", tool = None) + val fix = recompileFix(config.toolVersion.minStable) + throw new UnpickleException(signature + fix + tastyAddendum) + } + else { + val fileMinor = readNat() + val fileExperimental = readNat() + val toolingVersion = { + val length = readNat() + val start = currentAddr + val end = start + length + goto(end) + new String(bytes, start.index, length) + } + + val validVersion = TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = config.toolVersion.major, + compilerMinor = config.toolVersion.minor, + compilerExperimental = config.toolVersion.experimental + ) + + val possibles = config.toolOverrides + val validOverride = possibles.isEmpty || possibles.exists { overrideVersion => + TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = overrideVersion.major, + compilerMinor = overrideVersion.minor, + compilerExperimental = overrideVersion.experimental + ) + } + + check(validVersion || validOverride, { + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = config.toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, config.toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) config.toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else config.toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler) + } + else upgradeFix(fileVersion) + ) + signature + fix + tastyAddendum + }) + + new UUID(readUncompressedLong(), readUncompressedLong()) + } + } + + def isAtEnd: Boolean = reader.isAtEnd + + private def check(cond: Boolean, msg: => String): Unit = { + if (!cond) throw new UnpickleException(msg) + } + + private def signatureString( + fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { + val optProducedBy = tool.fold("")(t => s", produced by $t") + s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy, + | expected ${toolVersion.validRange}. + |""".stripMargin + } + + private def recompileFix(producerVersion: TastyVersion) = { + val addendum = config.recompileAdditionalInfo + val newTool = config.upgradedProducerTool(producerVersion) + s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin + } + + private def upgradeFix(fileVersion: TastyVersion) = { + val addendum = config.upgradeAdditionalInfo(fileVersion) + val newToolHowTo = config.upgradeReaderHowTo(fileVersion) + s""" To read this ${fileVersion.kind} file, $newToolHowTo.$addendum""".stripMargin + } + + private def tastyAddendum: String = """ + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin +} + +object TastyHeaderUnpickler { + + private object Compatibility { + final val BackwardIncompatibleMajor = -3 + final val BackwardIncompatibleExperimental = -2 + final val ExperimentalRecompile = -1 + final val ExperimentalUpgrade = 1 + final val ForwardIncompatible = 2 + + /** Given that file can't be read, extract the reason */ + def failReason(file: TastyVersion, read: TastyVersion): Int = + if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) { + if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new + else ExperimentalUpgrade // they should upgrade compiler as library is too new + } + else if (file.major < read.major) + BackwardIncompatibleMajor // pre 3.0.0 + else if (file.isExperimental && file.major == read.major && file.minor <= read.minor) + // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY + BackwardIncompatibleExperimental + else ForwardIncompatible + } +} diff --git a/src/compiler/scala/tools/tasty/TastyName.scala b/src/compiler/scala/tools/tasty/TastyName.scala new file mode 100644 index 000000000000..1b84656078ea --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyName.scala @@ -0,0 +1,226 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +import scala.reflect.NameTransformer + +object TastyName { + + // TODO [tasty]: cache chars for Names. SimpleName acts as a cursor + + final case class SimpleName(raw: String) extends TastyName + final case class ObjectName(base: TastyName) extends TastyName + final case class QualifiedName(qual: TastyName, sep: SimpleName, selector: SimpleName) extends TastyName + final case class UniqueName(qual: TastyName, sep: SimpleName, num: Int) extends TastyName + final case class DefaultName(qual: TastyName, num: Int) extends TastyName + final case class PrefixName(prefix: SimpleName, qual: TastyName) extends TastyName + final case class SuffixName(qual: TastyName, suffix: SimpleName) extends TastyName + final case class TypeName private (base: TastyName) extends TastyName + final case class SignedName( + qual: TastyName, + sig: Signature.MethodSignature[ErasedTypeRef], + target: TastyName) extends TastyName + + object TypeName { + private[TastyName] def apply(base: TastyName): TypeName = base match { + case name: TypeName => name + case name => new TypeName(name) + } + } + + // Separators + final val PathSep: SimpleName = SimpleName(".") + final val ExpandedSep: SimpleName = SimpleName("$$") + final val ExpandPrefixSep: SimpleName = SimpleName("$") + final val WildcardSep: SimpleName = SimpleName("_$") + final val InlinePrefix: SimpleName = SimpleName("inline$") + final val SuperPrefix: SimpleName = SimpleName("super$") + final val BodyRetainerSuffix: SimpleName = SimpleName("$retainedBody") + + // TermNames + final val Empty: SimpleName = SimpleName("") + final val Constructor: SimpleName = SimpleName("") + final val MixinConstructor: SimpleName = SimpleName("$init$") + final val EmptyPkg: SimpleName = SimpleName("") + final val Root: SimpleName = SimpleName("") + final val RootPkg: SimpleName = SimpleName("_root_") + + // TypeNames + final val RepeatedClass: TypeName = SimpleName("").toTypeName + final val EmptyTpe: TypeName = Empty.toTypeName + + object WildcardName { + def unapply(name: TastyName): Boolean = name match { + case UniqueName(Empty, WildcardSep, _) => true + case _ => false + } + } + + final val DefaultGetterStr = "$default$" + final val DefaultGetterInitStr = NameTransformer.encode("") + DefaultGetterStr + + trait NameEncoder[U] { + final def encode[O](name: TastyName)(init: => U, finish: U => O): O = finish(traverse(init, name)) + def traverse(u: U, name: TastyName): U + } + + trait StringBuilderEncoder extends NameEncoder[StringBuilder] { + final def encode(name: TastyName): String = name match { + case SimpleName(raw) => raw + case _ => super.encode(name)(new StringBuilder(25), _.toString) + } + } + + /** Converts a name to a representation closest to source code. + */ + object SourceEncoder extends StringBuilderEncoder { + def traverse(sb: StringBuilder, name: TastyName): StringBuilder = name match { + case name: SimpleName => sb append (name.raw) + case name: ObjectName => traverse(sb, name.base) + case name: TypeName => traverse(sb, name.base) + case name: SignedName => traverse(sb, name.qual) + case name: UniqueName => traverse(sb, name.qual) append (name.sep.raw) append (name.num) + case name: QualifiedName => traverse(traverse(sb, name.qual) append (name.sep.raw), name.selector) + case name: PrefixName => traverse(sb append (name.prefix.raw), name.qual) + case name: SuffixName => traverse(sb, name.qual) append (name.suffix.raw) + + case name: DefaultName if name.qual == Constructor => + sb append DefaultGetterInitStr append (name.num + 1) + + case name: DefaultName => traverse(sb, name.qual) append DefaultGetterStr append (name.num + 1) + } + } + + /** Displays formatted information about the structure of the name + */ + object DebugEncoder extends StringBuilderEncoder { + import Signature.merge + + def traverse(sb: StringBuilder, name: TastyName): StringBuilder = name match { + + case SimpleName(raw) => sb append raw + case DefaultName(qual, num) => traverse(sb, qual) append "[Default " append (num + 1) append ']' + case PrefixName(prefix, qual) => traverse(sb, qual) append "[Prefix " append (prefix.raw) append ']' + case SuffixName(qual, suffix) => traverse(sb, qual) append "[Suffix " append (suffix.raw) append ']' + case ObjectName(name) => traverse(sb, name) append "[ModuleClass]" + case TypeName(name) => traverse(sb, name) append "[Type]" + case SignedName(name, sig, target) => merge(traverse(sb, name) append "[Signed ", sig.map(_.signature)) append " @" append target.source append ']' + + case QualifiedName(qual, sep, name) => + traverse(sb, qual) append "[Qualified " append (sep.raw) append ' ' append (name.raw) append ']' + + case UniqueName(qual, sep, num) => + traverse(sb, qual) append "[Unique " append (sep.raw) append ' ' append num append ']' + + } + + } + + /** Encodes names as expected by the Scala Reflect SymbolTable + */ + object ScalaNameEncoder extends NameEncoder[StringBuilder] { + + /** Escapes all symbolic characters. Special names should be handled before calling this. + */ + final def encode(name: TastyName): String = name match { + case SimpleName(raw) => NameTransformer.encode(raw) + case _ => super.encode(name)(new StringBuilder(25), _.toString) + } + + def traverse(sb: StringBuilder, name: TastyName): StringBuilder = name match { + case name: SimpleName => sb.append(NameTransformer.encode(name.raw)) + case name: ObjectName => traverse(sb, name.base) + case name: TypeName => traverse(sb, name.base) + case name: SignedName => traverse(sb, name.qual) + case name: UniqueName => traverse(sb, name.qual) append (name.sep.raw) append (name.num) + case name: QualifiedName => traverse(traverse(sb, name.qual) append (name.sep.raw), name.selector) + case name: PrefixName => traverse(sb append (name.prefix.raw), name.qual) + case name: SuffixName => traverse(sb, name.qual) append (name.suffix.raw) + + case name: DefaultName if name.qual == Constructor => sb append DefaultGetterInitStr append (name.num + 1) + + case name: DefaultName => traverse(sb, name.qual) append DefaultGetterStr append (name.num + 1) + } + + } + + def deepEncode(name: TastyName): TastyName = name match { + case SimpleName(raw) => SimpleName(NameTransformer.encode(raw)) + case QualifiedName(qual, sep, selector) => QualifiedName(deepEncode(qual), sep, deepEncode(selector).asSimpleName) + case ObjectName(base) => ObjectName(deepEncode(base)) + case UniqueName(qual, sep, num) => UniqueName(deepEncode(qual), sep, num) + case DefaultName(qual, num) => DefaultName(deepEncode(qual), num) + case PrefixName(prefix, qual) => PrefixName(prefix, deepEncode(qual)) + case SuffixName(qual, suffix) => SuffixName(deepEncode(qual), suffix) + case TypeName(base) => TypeName(deepEncode(base)) + case SignedName(qual, sig, target) => SignedName(deepEncode(qual), sig.map(_.encode), target) + } + +} + +/**This is a data structure representing semantic names. `TastyName` is the interface that TASTy uses to select + * members from a type, providing more information than simple strings, such as selecting types over terms, + * companion module instead of a class, or signals if a term is a default getter. + * Names can also be a `SignedName`, which is used to select an overloaded method, and pairs a name with a + * `MethodSignature` with types are represented by `ErasedTypeRef`. + */ +sealed abstract class TastyName extends Product with Serializable { self => + import TastyName._ + + final override def toString: String = source + + final def isObjectName: Boolean = self.isInstanceOf[ObjectName] + final def isDefaultName: Boolean = self.isInstanceOf[DefaultName] + final def isTypeName: Boolean = self.isInstanceOf[TypeName] + final def isTermName: Boolean = !isTypeName + final def isEmpty: Boolean = toTermName == Empty + final def isConstructorName = self == TastyName.Constructor || self == TastyName.MixinConstructor + + final def asSimpleName: SimpleName = self match { + case self: SimpleName => self + case _ => throw new AssertionError(s"not simplename: ${self.debug}") + } + + /** The name as as expected by the Scala Reflect SymbolTable + */ + final def encoded: String = ScalaNameEncoder.encode(self) + + /** The name as represented in source code + */ + final def source: String = SourceEncoder.encode(self) + + /** Debug information about the structure of the name. + */ + final def debug: String = DebugEncoder.encode(self) + + final def toTermName: TastyName = self match { + case TypeName(name) => name + case name => name + } + + final def toTypeName: TypeName = TypeName(self) + + final def stripSignedPart: TastyName = self match { + case SignedName(pre, _, _) => pre + case name => name + } + + final def isSignedConstructor = self match { + case SignedName(TastyName.Constructor, sig, _) if isMethodSignature(sig) => true + case _ => false + } + + /** Guard against API change to SignedName */ + @inline private final def isMethodSignature(sig: Signature.MethodSignature[ErasedTypeRef]): true = true + +} diff --git a/src/compiler/scala/tools/tasty/TastyReader.scala b/src/compiler/scala/tools/tasty/TastyReader.scala new file mode 100644 index 000000000000..1ea12df2dbad --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyReader.scala @@ -0,0 +1,154 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +import TastyRefs._ +import collection.mutable + +/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format, + * and that supports reading and patching addresses represented as natural numbers. + * + * @param bytes The array containing data + * @param start The position from which to read + * @param end The position one greater than the last byte to be read + * @param base The index referenced by the logical zero address Addr(0) + */ +class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) { + + def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length) + + private[this] var bp: Int = start + + def addr(idx: Int): Addr = Addr(idx - base) + def index(addr: Addr): Int = addr.index + base + + /** The address of the first byte to read, respectively byte that was read */ + def startAddr: Addr = addr(start) + + /** The address of the next byte to read */ + def currentAddr: Addr = addr(bp) + + /** the address one greater than the last brte to read */ + def endAddr: Addr = addr(end) + + /** Have all bytes been read? */ + def isAtEnd: Boolean = bp == end + + /** A new reader over the same array with the same address base, but with + * specified start and end positions + */ + def subReader(start: Addr, end: Addr): TastyReader = + new TastyReader(bytes, index(start), index(end), base) + + /** Read a byte of data. */ + def readByte(): Int = { + val result = bytes(bp) & 0xff + bp += 1 + result + } + + /** Returns the next byte of data as a natural number without advancing the read position */ + def nextByte: Int = bytes(bp) & 0xff + + /** Read the next `n` bytes of `data`. */ + def readBytes(n: Int): Array[Byte] = { + val result = new Array[Byte](n) + System.arraycopy(bytes, bp, result, 0, n) + bp += n + result + } + + /** Read a natural number fitting in an Int in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readNat(): Int = readLongNat().toInt + + /** Read an integer number in 2's complement big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readInt(): Int = readLongInt().toInt + + /** Read a natural number fitting in a Long in big endian format, base 128. + * All but the last digits have bit 0x80 set. + */ + def readLongNat(): Long = { + var b = 0L + var x = 0L + while ({ + b = bytes(bp) + x = (x << 7) | (b & 0x7f) + bp += 1 + (b & 0x80) == 0 + }) () + x + } + + /** Read a long integer number in 2's complement big endian format, base 128. */ + def readLongInt(): Long = { + var b = bytes(bp) + var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6. + bp += 1 + while ((b & 0x80) == 0) { + b = bytes(bp) + x = (x << 7) | (b & 0x7f) + bp += 1 + } + x + } + + /** Read an uncompressed Long stored in 8 bytes in big endian format */ + def readUncompressedLong(): Long = { + var x: Long = 0 + for (_ <- 0 to 7) + x = (x << 8) | (readByte() & 0xff) + x + } + + /** Read a natural number and return as a NameRef */ + def readNameRef(): NameRef = NameRef(readNat()) + + /** Read a natural number and return as an address */ + def readAddr(): Addr = Addr(readNat()) + + /** Read a length number and return the absolute end address implied by it, + * given as
    + . + */ + def readEnd(): Addr = addr(readNat() + bp) + + /** Set read position to the one pointed to by `addr` */ + def goto(addr: Addr): Unit = + bp = index(addr) + + /** Perform `op` until `end` address is reached and collect results in a list. */ + def until[T](end: Addr)(op: => T): List[T] = { + val buf = new mutable.ListBuffer[T] + doUntil(end)(buf += op) + buf.toList + } + + def doUntil(end: Addr)(op: => Unit): Unit = { + while (bp < index(end)) op + assert(bp == index(end)) + } + + /** If before given `end` address, the result of `op`, otherwise `default` */ + def ifBefore[T](end: Addr)(op: => T, default: T): T = + if (bp < index(end)) op else default + + /** Perform `op` while cindition `cond` holds and collect results in a list. */ + def collectWhile[T](cond: => Boolean)(op: => T): List[T] = { + val buf = new mutable.ListBuffer[T] + while (cond) buf += op + buf.toList + } +} diff --git a/src/compiler/scala/tools/tasty/TastyRefs.scala b/src/compiler/scala/tools/tasty/TastyRefs.scala new file mode 100644 index 000000000000..dc515b9317f7 --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyRefs.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +object TastyRefs { + + /** An address pointing to an index in a Tasty buffer's byte array */ + case class Addr(index: Int) extends AnyVal { + def - (delta: Int): Addr = Addr(this.index - delta) + def + (delta: Int): Addr = Addr(this.index + delta) + + def relativeTo(base: Addr): Addr = this - base.index - AddrWidth + + def ==(that: Addr): Boolean = this.index == that.index + def !=(that: Addr): Boolean = this.index != that.index + } + + val NoAddr: Addr = Addr(-1) + + /** The maximal number of address bytes. + * Since addresses are written as base-128 natural numbers, + * the value of 4 gives a maximal array size of 256M. + */ + final val AddrWidth = 4 + + /** An address referring to a serialized name */ + case class NameRef(index: Int) extends AnyVal +} diff --git a/src/compiler/scala/tools/tasty/TastyVersion.scala b/src/compiler/scala/tools/tasty/TastyVersion.scala new file mode 100644 index 000000000000..65eb5e2f4a0b --- /dev/null +++ b/src/compiler/scala/tools/tasty/TastyVersion.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +// sourced from: https://github.com/scala/scala3/blob/release-3.4.0/tasty/src/dotty/tools/tasty/TastyVersion.scala + +case class TastyVersion private(major: Int, minor: Int, experimental: Int) { + def isExperimental: Boolean = experimental > 0 + + def nextStable: TastyVersion = copy(experimental = 0) + + def minStable: TastyVersion = copy(minor = 0, experimental = 0) + + def show: String = { + val suffix = if (isExperimental) s"-experimental-$experimental" else "" + s"$major.$minor$suffix" + } + + def kind: String = + if (isExperimental) "experimental TASTy" else "TASTy" + + def validRange: String = { + val min = TastyVersion(major, 0, 0) + val max = if (experimental == 0) this else TastyVersion(major, minor - 1, 0) + val extra = Option.when(experimental > 0)(this) + s"stable TASTy from ${min.show} to ${max.show}${extra.fold("")(e => s", or exactly ${e.show}")}" + } +} + +object TastyVersion { + + private val cache: java.util.concurrent.ConcurrentHashMap[TastyVersion, TastyVersion] = + new java.util.concurrent.ConcurrentHashMap() + + def apply(major: Int, minor: Int, experimental: Int): TastyVersion = { + val version = new TastyVersion(major, minor, experimental) + val cachedVersion = cache.putIfAbsent(version, version) + if (cachedVersion == null) version else cachedVersion + } +} diff --git a/src/compiler/scala/tools/tasty/UnpickleException.scala b/src/compiler/scala/tools/tasty/UnpickleException.scala new file mode 100644 index 000000000000..b385c4635065 --- /dev/null +++ b/src/compiler/scala/tools/tasty/UnpickleException.scala @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tasty + +class UnpickleException(msg: String) extends RuntimeException(msg) diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index c872c9448ad1..e1541a4ec44a 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,12 +16,12 @@ package util import java.net.URL +import scala.reflect.io.{Directory, File, Path} import scala.tools.reflect.WrappedProperties.AccessControl import scala.tools.nsc.{CloseableRegistry, Settings} +import scala.tools.nsc.classpath._ import scala.tools.nsc.util.ClassPath -import scala.reflect.io.{Directory, File, Path} import PartialFunction.condOpt -import scala.tools.nsc.classpath._ // Loosely based on the draft specification at: // https://wiki.scala-lang.org/display/SIW/Classpath @@ -29,15 +29,15 @@ import scala.tools.nsc.classpath._ object PathResolver { // Imports property/environment functions which suppress security exceptions. import AccessControl._ - import scala.compat.Platform.EOL + import java.lang.System.{lineSeparator => EOL} - implicit class MkLines(val t: TraversableOnce[_]) extends AnyVal { - def mkLines: String = t.mkString("", EOL, EOL) + implicit class MkLines(val t: IterableOnce[_]) extends AnyVal { + def mkLines: String = t.iterator.mkString("", EOL, EOL) def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = { val space = "\u0020" val sep = if (indented) EOL + space * 2 else EOL val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "") - t.mkString(header + lbrace + sep, sep, rbrace + EOL) + t.iterator.mkString(header + lbrace + sep, sep, rbrace + EOL) } } implicit class AsLines(val s: String) extends AnyVal { @@ -55,7 +55,7 @@ object PathResolver { /** Values found solely by inspecting environment or property variables. */ object Environment { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ private def searchForBootClasspath: String = { val props = System.getProperties @@ -199,6 +199,7 @@ object PathResolver { pr.result match { case cp: AggregateClassPath => println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + case x => throw new MatchError(x) } } finally { registry.close() @@ -208,27 +209,23 @@ object PathResolver { final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry) { - @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x - def this(settings: Settings) = this(settings, new CloseableRegistry) - private val classPathFactory = new ClassPathFactory(settings, closeableRegistry) - import PathResolver.{ AsLines, Defaults, ppcp } + import PathResolver.{AsLines, Defaults, ppcp} - private def cmdLineOrElse(name: String, alt: String) = { - (commandLineFor(name) match { - case Some("") => None - case x => x - }) getOrElse alt - } + private def cmdLineOrElse(name: String, alt: String) = + commandLineFor(name) match { + case Some("") | None => alt + case Some(x) => x + } private def commandLineFor(s: String): Option[String] = condOpt(s) { - case "javabootclasspath" => settings.javabootclasspath.value - case "javaextdirs" => settings.javaextdirs.value - case "bootclasspath" => settings.bootclasspath.value - case "extdirs" => settings.extdirs.value - case "classpath" | "cp" => settings.classpath.value - case "sourcepath" => settings.sourcepath.value + case "javabootclasspath" => settings.javabootclasspath.value + case "javaextdirs" => settings.javaextdirs.value + case "bootclasspath" => settings.bootclasspath.value + case "extdirs" => settings.extdirs.value + case "classpath" | "cp" => settings.classpath.value + case "sourcepath" => settings.sourcepath.value } /** Calculated values based on any given command line options, falling back on @@ -249,7 +246,7 @@ final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistr * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) * [scaladoc] ^ * because the bootstrapping will look at the sourcepath and create package "reflect" in "" - * and then when typing relative names, instead of picking .scala.relect, typedIdentifier will pick up the + * and then when typing relative names, instead of picking .scala.reflect, typedIdentifier will pick up the * .reflect package created by the bootstrapping. Thus, no bootstrapping for scaladoc! * TODO: we should refactor this as a separate -bootstrap option to have a clean implementation, no? */ def sourcePath = if (!settings.isScaladoc) cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) else "" @@ -259,7 +256,7 @@ final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistr import classPathFactory._ // Assemble the elements! - def basis = List[Traversable[ClassPath]]( + def basis = List[Iterable[ClassPath]]( if (settings.javabootclasspath.isSetByUser) // respect explicit `-javabootclasspath rt.jar` Nil else jrt, // 0. The Java 9+ classpath (backed by the ct.sym or jrt:/ virtual system, if available) @@ -273,7 +270,7 @@ final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistr sourcesInPath(sourcePath) // 7. The Scala source path. ) - private def jrt: List[ClassPath] = JrtClassPath.apply(settings.releaseValue, settings.unsafe.valueSetByUser, closeableRegistry) + private def jrt: List[ClassPath] = JrtClassPath.apply(settings.releaseValue, settings.systemPathValue, settings.unsafe.valueSetByUser, closeableRegistry) lazy val containers = basis.flatten.distinct @@ -297,13 +294,13 @@ final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistr def result: ClassPath = { val cp = computeResult() - if (settings.Ylogcp) { - Console print f"Classpath built from ${settings.toConciseString} %n" - Console print s"Defaults: ${PathResolver.Defaults}" - Console print s"Calculated: $Calculated" + if (settings.Ylogcp.value) { + Console.print(f"Classpath built from ${settings.toConciseString} %n") + Console.print(s"Defaults: ${PathResolver.Defaults}") + Console.print(s"Calculated: $Calculated") - val xs = (Calculated.basis drop 2).flatten.distinct - Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true)) + val xs = Calculated.basis.drop(2).flatten.distinct + Console.print(xs.mkLines(s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true)) } cp } diff --git a/src/compiler/scala/tools/util/SystemExit.scala b/src/compiler/scala/tools/util/SystemExit.scala index 7b96691fe434..f74d66da7c30 100644 --- a/src/compiler/scala/tools/util/SystemExit.scala +++ b/src/compiler/scala/tools/util/SystemExit.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,4 +20,4 @@ import scala.util.control.ControlThrowable * * @param code the exit code */ -final case class SystemExit(code: Int) extends Throwable(s"exit code $code") with ControlThrowable +final case class SystemExit(code: Int) extends ControlThrowable(s"exit code $code") diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala index 93f65564d2da..2eb59c24ab5d 100644 --- a/src/compiler/scala/tools/util/VerifyClass.scala +++ b/src/compiler/scala/tools/util/VerifyClass.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,7 @@ package scala.tools.util import scala.tools.nsc.io._ import java.net.URLClassLoader -import scala.collection.JavaConverters._ -import scala.language.postfixOps +import scala.jdk.CollectionConverters._ object VerifyClass { @@ -30,14 +29,14 @@ object VerifyClass { } } - def checkClassesInJar(name: String, cl: ClassLoader) = new Jar(File(name)) filter (_.getName.endsWith(".class")) map { x => + def checkClassesInJar(name: String, cl: ClassLoader) = new Jar(File(name)).filter(_.getName.endsWith(".class")).map { x => checkClass(x.getName.stripSuffix(".class").replace('/', '.'), cl) - } toMap + }.toMap def checkClassesInDir(name: String, cl: ClassLoader) = (for { file <- Path(name).walk if file.name endsWith ".class" - } yield checkClass(name, cl)) toMap + } yield checkClass(name, cl)).toMap def checkClasses(name: String, cl: ClassLoader) = if (name endsWith ".jar") checkClassesInJar(name, cl) diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/templates/tool-unix.tmpl similarity index 89% rename from src/compiler/scala/tools/ant/templates/tool-unix.tmpl rename to src/compiler/templates/tool-unix.tmpl index 9045e0547e0b..1a610fd1e5e8 100755 --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/templates/tool-unix.tmpl @@ -1,11 +1,15 @@ #!/usr/bin/env bash # ############################################################################## -# Copyright 2002-2013 LAMP/EPFL +# Scala (https://www.scala-lang.org) # -# This is free software; see the distribution for copying conditions. -# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. +# Copyright EPFL and Lightbend, Inc. dba Akka +# +# Licensed under Apache License 2.0 +# (http://www.apache.org/licenses/LICENSE-2.0). +# +# See the NOTICE file distributed with this work for +# additional information regarding copyright ownership. ############################################################################## findScalaHome () { @@ -102,19 +106,6 @@ if [[ -z "$TOOL_CLASSPATH" ]]; then done fi -if [[ -n "$cygwin" ]]; then - if [[ "$OS" = "Windows_NT" ]] && cygpath -m .>/dev/null 2>/dev/null ; then - format=mixed - else - format=windows - fi - SCALA_HOME="$(cygpath --$format "$SCALA_HOME")" - if [[ -n "$JAVA_HOME" ]]; then - JAVA_HOME="$(cygpath --$format "$JAVA_HOME")" - fi - TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")" -fi - if [[ -n "$cygwin$mingw$msys" ]]; then case "$TERM" in rxvt* | xterm* | cygwin*) @@ -180,6 +171,19 @@ done # reset "$@@" to the remaining args set -- "${scala_args[@@]}" +if [[ -n "$cygwin$mingw$msys" ]]; then + if [[ "$OS" = "Windows_NT" ]] && cygpath -m .>/dev/null 2>/dev/null ; then + format=mixed + else + format=windows + fi + SCALA_HOME="$(cygpath --$format "$SCALA_HOME")" + if [[ -n "$JAVA_HOME" ]]; then + JAVA_HOME="$(cygpath --$format "$JAVA_HOME")" + fi + TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")" +fi + if [[ -z "$JAVACMD" && -n "$JAVA_HOME" && -x "$JAVA_HOME/bin/java" ]]; then JAVACMD="$JAVA_HOME/bin/java" fi @@ -205,6 +209,14 @@ else classpath_args=(-classpath "$TOOL_CLASSPATH") fi +# Remove newline as delimiter for word splitting the java command. +# This permits the use case: +# export JAVA_OPTS=-Dline.separator=$'\r'$'\n' +# where otherwise the newline char is stripped after expansion. +# The following works with the default IFS: +# scala -J-Dline.separator=$'\r'$'\n' +IFS=" "$'\t' + # note that variables which may intentionally be empty must not # be quoted: otherwise an empty string will appear as a command line # argument, and java will think that is the program to run. @@ -219,6 +231,9 @@ execCommand \ @properties@ @class@ @toolflags@ "$@@" # record the exit status lest it be overwritten: -# then reenable echo and propagate the code. +# then restore IFS, reenable echo and propagate the code. scala_exit_status=$? + +unset IFS + onExit diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/templates/tool-windows.tmpl similarity index 87% rename from src/compiler/scala/tools/ant/templates/tool-windows.tmpl rename to src/compiler/templates/tool-windows.tmpl index 48e1c322237f..901e9a1a5c8a 100644 --- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl +++ b/src/compiler/templates/tool-windows.tmpl @@ -1,18 +1,22 @@ @@echo off rem ########################################################################## -rem # Copyright 2002-2013 LAMP/EPFL +rem # Scala (https://www.scala-lang.org) rem # -rem # This is free software; see the distribution for copying conditions. -rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A -rem # PARTICULAR PURPOSE. +rem # Copyright EPFL and Lightbend, Inc. dba Akka +rem # +rem # Licensed under Apache License 2.0 +rem # (http://www.apache.org/licenses/LICENSE-2.0). +rem # +rem # See the NOTICE file distributed with this work for +rem # additional information regarding copyright ownership. rem ########################################################################## setlocal enableextensions enabledelayedexpansion set _LINE_TOOLCP= -rem Use "%~1" to handle spaces in paths. See http://ss64.com/nt/syntax-args.html +rem Use "%~1" to handle spaces in paths. See https://ss64.com/nt/syntax-args.html rem scala/bug#7295 The goto here is needed to avoid problems with `scala Script.cmd "arg(with)paren"`, rem we must not evaluate %~2 eagerly, but delayed expansion doesn't seem to allow rem removal of quotation marks. @@ -136,6 +140,7 @@ if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%" set _PROPS=-Dscala.home="!_SCALA_HOME!" %_OVERRIDE_USEJAVACP% @properties@ +setlocal DisableDelayedExpansion rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* goto end @@ -152,7 +157,7 @@ rem # subroutines goto :eof rem Variable "%~dps0" works on WinXP SP2 or newer -rem (see http://support.microsoft.com/?kbid=833431) +rem (see https://support.microsoft.com/?kbid=833431) rem set _SCALA_HOME=%~dps0.. :set_home set _BIN_DIR= @@ -163,5 +168,5 @@ goto :eof :end @@endlocal -REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu +REM exit code fix, see https://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu @@"%COMSPEC%" /C exit %errorlevel% >nul diff --git a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala deleted file mode 100644 index 89c924aa7e0a..000000000000 --- a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc - -import com.fasterxml.jackson.annotation._ -import com.fasterxml.jackson.core.util.DefaultPrettyPrinter -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.dataformat.yaml.{YAMLFactory, YAMLGenerator} -import com.fasterxml.jackson.module.scala.DefaultScalaModule - -import scala.reflect.runtime.universe._ - -object ScalaCompilerOptionsExporter { - - case class Category(name: String, load: Int) extends Ordered[Category] { - def compare(that: Category): Int = (this.load) compare (that.load) - } - val StandardSettings = Category("Standard Settings", 0) - val JVMSettings = Category("JVM Settings", 1) - val PluginSettings = Category("Plugin Settings", 2) - val AdvancedSettings = Category("Advanced Settings", 3) - val PrivateSettings = Category("Private Settings", 4) - val WarningSettings = Category("Warning Settings", 5) - val IDESpecificSettings = Category("IDE Specific Settings", 6) - - trait JacksonWorkaround { - val category: String - } - @JsonIgnoreProperties(Array("_category")) - @JsonPropertyOrder(Array("category", "description", "options")) - case class Section(_category: Category, description: Option[String], options: List[ScalacOption]) extends JacksonWorkaround{ - val category: String = _category.name - } - case class ScalacOption( - option: String, - schema: Schema, - description: String, - abbreviations: Seq[String] = Seq.empty, - deprecated: Option[String] = None, - note: Option[String] = None - ) - case class Schema( - @JsonProperty("type") _type: String, - arg: Option[String] = None, - multiple: Option[Boolean] = None, - default: Option[Any] = None, - choices: Seq[Choice] = Seq.empty, - min: Option[Any] = None, - max: Option[Any] = None - ) - case class Choice(choice: String, description: Option[String] = None, deprecated: Option[String] = None) - - private val quoted = """`([^`']+)'""".r - - def markdownifyBackquote(string: String) : String = { - quoted.replaceAllIn(string, "`$1`") - } - - private val htmlTag = """<([^>]+)>""".r - def dehtmlfy(string: String) : String = { - htmlTag.replaceAllIn(string, "$1") - } - - def main(args: Array[String]): Unit = { - val writer = new java.io.StringWriter(2000) - - val runtimeMirror = scala.reflect.runtime.currentMirror - - val settings = new scala.tools.nsc.Settings(s => ()) - val instanceMirror = runtimeMirror.reflect(settings) - val sortedInOrderOfAppearance = runtimeMirror.classSymbol(settings.getClass).toType.members.sorted - val accessors = sortedInOrderOfAppearance.collect { - case m: MethodSymbol if m.isGetter && m.isPublic => m - } - - def mergeChoice(labels: Seq[String], descriptions: Seq[String]): Seq[Choice] = { - for { - (choice, d) <- (labels zipAll (descriptions, "", "")) - } yield { - Choice( - choice, - description = Option(d).map(markdownifyBackquote).map(dehtmlfy).filter(_.nonEmpty), - deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => d.toLowerCase.contains("deprecated")) - ) - } - } - - val extractedSettings : List[ScalacOption] = accessors.map(acc => instanceMirror.reflectMethod(acc).apply()).collect { - case s: settings.Setting => - val schema = s match { - case b: settings.BooleanSetting => - Schema(_type = "Boolean") - case i: settings.IntSetting => - Schema(_type="Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) - case c: settings.ChoiceSetting => - val choices = mergeChoice(c.choices, c.choicesHelp) - Schema(_type="Choice", arg = Some(c.helpArg).map(dehtmlfy), default = Option(c.default), choices = choices) - case mc: settings.MultiChoiceSetting[_] => - val choices = mergeChoice(mc.choices, mc.descriptions) - Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg).map(dehtmlfy), choices = choices) - case ps: settings.PhasesSetting => - Schema(_type="Phases", default = Option(ps.default)) - case px: settings.PrefixSetting => - Schema(_type="Prefix") - case sv: settings.ScalaVersionSetting => - Schema(_type="ScalaVersion", arg = Some(sv.arg).map(dehtmlfy), default = Some(sv.initial.unparse)) - case pathStr: settings.PathSetting => - Schema(_type="Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) - case str: settings.StringSetting => - Schema(_type="String", arg = Some(str.arg).map(dehtmlfy), default = Some(str.default)) - case ms: settings.MultiStringSetting => - Schema(_type="String", multiple = Some(true), arg = Some(ms.arg).map(dehtmlfy)) - } - - ScalacOption( - option = s.name, - schema = schema, - description = dehtmlfy(markdownifyBackquote(s.helpDescription)), - abbreviations = s.abbreviations, - deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated")) - ) - } - - - val categoriezed = extractedSettings.groupBy { option => - val name = option.option - if (name.startsWith("-Xfatal-warnings") || name == "-Xlint" || name.startsWith("-Ywarn")) { - WarningSettings - } else if (name.startsWith("-Ypresentation")) { - IDESpecificSettings - } else if (name.startsWith("-X")) { - AdvancedSettings - } else if (name.startsWith("-Y") || name.startsWith("-opt") && name != "-optimise") { - PrivateSettings - } else if (name.startsWith("-P")) { - PluginSettings - } else if (name.startsWith("-J") || name.startsWith("-D") || name.startsWith("-nobootcp")) { - JVMSettings - } else { - StandardSettings - } - } - - val source = categoriezed.toSeq.sortBy(_._1).map { case (key, options) => - Section(key, Some("ADD_NICE_DESCRIPTION_HERE"),options = options) - } - - val yamlFactory = new YAMLFactory() - .disable(YAMLGenerator.Feature.SPLIT_LINES) - val mapper = new ObjectMapper(yamlFactory) - .registerModule(DefaultScalaModule) - .setSerializationInclusion(JsonInclude.Include.NON_EMPTY) - - mapper - .writer(new DefaultPrettyPrinter()) - .writeValue(writer, source) - // TODO: println can be deleted if write can write to file - println(writer.toString) - } -} diff --git a/src/eclipse/.gitignore b/src/eclipse/.gitignore deleted file mode 100644 index 8999e4d83971..000000000000 --- a/src/eclipse/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -**/.cache-* -**/.settings/ diff --git a/src/eclipse/README.md b/src/eclipse/README.md deleted file mode 100644 index c7a48273418d..000000000000 --- a/src/eclipse/README.md +++ /dev/null @@ -1,80 +0,0 @@ -Eclipse project files -===================== - -For important details on building, debugging and file encodings, please see [the excellent tutorial on scala-ide.org](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html). - -The following points describe how to build Scala using Eclipse. - -0. Download the [Scala IDE bundle](http://scala-ide.org/download/sdk.html). It comes preconfigured for optimal performance. - -0. Run `ant build` to download some necessary jars and see a successful build. - -0. You need to define a `path variable` and a `classpath variable` inside Eclipse, both pointing to the Scala checkout directory: - - (experimental): run `./update-workspace.sh scala_checkout_dir [workspace_dir]`. This should update your workspace settings - (restart Eclipse if it was running). For example: - ``` - ./update-workspace.sh $HOME/git/scala ~/Documents/workspace-scalac - ``` - - If the above didn't work, you can perform these steps manually: Define `SCALA_BASEDIR` in `Preferences/General/Workspace/Linked Resources`. The value should be the absolute -path to your Scala checkout. All paths in the project files are relative to this one, so nothing will work before you do so. -The same `SCALA_BASEDIR` variable needs to be defined **also** as a `classpath variable` in -`Java/Build Path/Classpath Variables`. - -0. Import the project (in `src/eclipse`) via `File` → `Import Existing Projects` and navigate to `scala/src/eclipse`. Check all projects and click ok. - - Lastly, the JRE used by Eclipse needs to know the path to the `JLine` library, which is used by the REPL. -To set the JAR file, navigate to `Java/Installed JREs`, select the default JRE, press `Edit/Add External JARs...` -and enter the path to JLine whose location is `SCALA_BASEDIR/build/deps/repl/jline-2.11.jar` (`SCALA_BASEDIR` cannot be entered, -it needs to be replaced with its absolute path). - -0. The Eclipse Java compiler does not allow certain calls to restricted APIs in the -JDK. The Scala library uses such APIs, so you'd see this error: - - Access restriction: The method compareAndSwapObject(Object, long, Object, Object) - from the type Unsafe is not accessible due to restriction on required library. - - You can *fix* it by allowing calls to restricted APIs in `Java/Compiler/Errors/Warnings/Deprecated and Restricted API` -settings. - -0. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them -from being shown as dirty in `git status`. You can still ignore them by telling Git to -consider them unchanged: - - git update-index --assume-unchanged `find src/eclipse -iname .classpath -or -iname .project` - - If you want to go back to normal (for instance, to commit your changes to project files), run: - - git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project` - -0. The 2.12, sources of Scala need to be built with a 2.12 version of the compiler. One can configure a 2.12 Scala installation -in Eclipse. In order to do this, go to `Window -> Preferences -> Scala -> Installations` and add a 2.12 installation. You can -either download a prepackaged version of 2.12 from the Scala homepage or you add the Scala installation that is part of the -`build/pack/lib` directory. The latter is required in case you absolutely need to depend on a nightly build of the compiler to -compile the compiler itself. Once the 2.12 Scala installation is created you need to select all Scala projects, do a right click -and select `Scala -> Set the Scala installation` where you have to choose the newly created 2.12 Scala installation. - -If it doesn’t compile -===================== - -The likely reason is that the build path of the imported projects isn’t correct. This can happen for instance -when the [versions.properties](https://github.com/scala/scala/blob/master/versions.properties) file is updated, -and Eclipse .classpath of the different projects isn’t updated accordingly. The fix is simple, manually inspect -the build path of each project and make sure the version of the declared dependencies is in sync with the version -declared in the `versions.properties` file. If it isn’t, update it manually and, when done, don’t forget to share -your changes via a pull request. -(We are aware this is cumbersome. If you feel like scripting the process, pull requests are of course welcome.) - -Launching & Debugging scalac -============================ - -Read [here](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html#Launching_and_Debugging_scalac). - -DETAILS -======= - -The compiler project depends on the library, reflect, and asm projects. The -builder will take care of the correct ordering, and changes in one project will -be picked up by the dependent projects. - -The output directory is set to be `build/quick`, so the runner scripts in quick -work as they are (they are generated after an ant build). diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath deleted file mode 100644 index 9c02e9bb1aab..000000000000 --- a/src/eclipse/interactive/.classpath +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/src/eclipse/interactive/.gitignore b/src/eclipse/interactive/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/interactive/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/interactive/.project b/src/eclipse/interactive/.project deleted file mode 100644 index 1d30e0c001f9..000000000000 --- a/src/eclipse/interactive/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - interactive - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-interactive - 2 - SCALA_BASEDIR/build/quick/classes/interactive - - - interactive - 2 - SCALA_BASEDIR/src/interactive - - - lib - 2 - SCALA_BASEDIR/lib - - - diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath deleted file mode 100644 index 0e5ac5f29058..000000000000 --- a/src/eclipse/partest/.classpath +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - diff --git a/src/eclipse/partest/.gitignore b/src/eclipse/partest/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/partest/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project deleted file mode 100644 index 5c0c851b80d3..000000000000 --- a/src/eclipse/partest/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - partest-extras - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-partest-extras - 2 - SCALA_BASEDIR/build/quick/classes/partest-extras - - - lib - 2 - SCALA_BASEDIR/lib - - - partest-extras - 2 - SCALA_BASEDIR/src/partest-extras - - - diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath deleted file mode 100644 index ee6bcd47da1a..000000000000 --- a/src/eclipse/reflect/.classpath +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/src/eclipse/reflect/.gitignore b/src/eclipse/reflect/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/reflect/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/reflect/.project b/src/eclipse/reflect/.project deleted file mode 100644 index 1e5cbb4ed9a3..000000000000 --- a/src/eclipse/reflect/.project +++ /dev/null @@ -1,30 +0,0 @@ - - - reflect - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-reflect - 2 - SCALA_BASEDIR/build/quick/classes/reflect - - - reflect - 2 - SCALA_BASEDIR/src/reflect - - - diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath deleted file mode 100644 index 4b5369096254..000000000000 --- a/src/eclipse/repl/.classpath +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/src/eclipse/repl/.gitignore b/src/eclipse/repl/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/repl/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/repl/.project b/src/eclipse/repl/.project deleted file mode 100644 index 69ad08ab1ad9..000000000000 --- a/src/eclipse/repl/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - repl - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-repl - 2 - SCALA_BASEDIR/build/quick/classes/repl - - - lib - 2 - SCALA_BASEDIR/lib - - - repl - 2 - SCALA_BASEDIR/src/repl - - - diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath deleted file mode 100644 index c4683059346b..000000000000 --- a/src/eclipse/scala-compiler/.classpath +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/src/eclipse/scala-compiler/.gitignore b/src/eclipse/scala-compiler/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/scala-compiler/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/scala-compiler/.project b/src/eclipse/scala-compiler/.project deleted file mode 100644 index cf8a68c8b68f..000000000000 --- a/src/eclipse/scala-compiler/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - scala-compiler - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-compiler - 2 - SCALA_BASEDIR/build/quick/classes/compiler - - - compiler - 2 - SCALA_BASEDIR/src/compiler - - - lib - 2 - SCALA_BASEDIR/lib - - - diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath deleted file mode 100644 index eff3c8e0b713..000000000000 --- a/src/eclipse/scala-library/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/src/eclipse/scala-library/.gitignore b/src/eclipse/scala-library/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/scala-library/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/scala-library/.project b/src/eclipse/scala-library/.project deleted file mode 100644 index 049cf75e0b93..000000000000 --- a/src/eclipse/scala-library/.project +++ /dev/null @@ -1,30 +0,0 @@ - - - scala-library - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-lib - 2 - SCALA_BASEDIR/build/quick/classes/library - - - library - 2 - SCALA_BASEDIR/src/library - - - diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath deleted file mode 100644 index b84002a5f66b..000000000000 --- a/src/eclipse/scaladoc/.classpath +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/src/eclipse/scaladoc/.gitignore b/src/eclipse/scaladoc/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/scaladoc/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/scaladoc/.project b/src/eclipse/scaladoc/.project deleted file mode 100644 index bf7649039f09..000000000000 --- a/src/eclipse/scaladoc/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - scaladoc - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-scaladoc - 2 - SCALA_BASEDIR/build/quick/classes/scaladoc - - - lib - 2 - SCALA_BASEDIR/lib - - - scaladoc - 2 - SCALA_BASEDIR/src/scaladoc - - - diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath deleted file mode 100644 index 3b635cf56e29..000000000000 --- a/src/eclipse/scalap/.classpath +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/src/eclipse/scalap/.gitignore b/src/eclipse/scalap/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/scalap/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/scalap/.project b/src/eclipse/scalap/.project deleted file mode 100644 index 3599168e324c..000000000000 --- a/src/eclipse/scalap/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - scalap - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-quick-scalap - 2 - SCALA_BASEDIR/build/quick/classes/scalap - - - lib - 2 - SCALA_BASEDIR/lib - - - scalap - 2 - SCALA_BASEDIR/src/scalap - - - diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath deleted file mode 100644 index af112840b7d3..000000000000 --- a/src/eclipse/test-junit/.classpath +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/src/eclipse/test-junit/.gitignore b/src/eclipse/test-junit/.gitignore deleted file mode 100644 index fe789dd68628..000000000000 --- a/src/eclipse/test-junit/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# what appears to be a Scala IDE-generated file -.cache-main diff --git a/src/eclipse/test-junit/.project b/src/eclipse/test-junit/.project deleted file mode 100644 index 052b6c1b6fde..000000000000 --- a/src/eclipse/test-junit/.project +++ /dev/null @@ -1,35 +0,0 @@ - - - test-junit - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - build-test-junit - 2 - SCALA_BASEDIR/build/junit/classes - - - lib - 2 - SCALA_BASEDIR/lib - - - test-junit - 2 - SCALA_BASEDIR/test/junit - - - diff --git a/src/eclipse/update-workspace.sh b/src/eclipse/update-workspace.sh deleted file mode 100755 index 24382d1445ba..000000000000 --- a/src/eclipse/update-workspace.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -function usage() { - echo "$0 scala_checkout_dir [workspace_dir]" - echo "\n Add necessary path variables to Eclipse workspace settings for Scalac to build" -} - -METADATA_DIR=`pwd`/.metadata - -if [ $# -lt 1 ]; then - echo "Need the Scala directory checkout as argument" - exit 1 -fi - -SCALA_DIR=$1 - -if [ ! -z $2 ]; then - METADATA_DIR=$2/.metadata -fi - -if [ ! -d $METADATA_DIR ]; then - echo "$METADATA_DIR is not a directory" - exit 1 -fi - -echo "Using metadata directory $METADATA_DIR and Scala checkout $SCALA_DIR" - -CORE_PREFS=$METADATA_DIR/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.core.resources.prefs -if [ ! -f $CORE_PREFS ]; then - echo "Couldn't find $CORE_PREFS. Is $METADATA_DIR an Eclipse workspace?" - exit 1 -fi -echo -e "Workspace preferences:\t$CORE_PREFS" - -JDT_PREFS=$METADATA_DIR/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jdt.core.prefs -if [ ! -f $JDT_PREFS ]; then - echo "Couldn't find $JDT_PREFS. Creating fresh file." - touch $JDT_PREFS -fi -echo -e "JDT preferences:\t$JDT_PREFS" - -# $1 - preference file (will be backed-up before writing) -# $2 - preference key -# $3 - preference value -function updatePref() { - mv $1 ${1}_backup - - awk -v key=$2 -v value=$3 ' - BEGIN { - FS="="; - OFS="="; - prev="" - } - { - if ($1 == key) { - prev=$2 - $2=value - } - print - } - END { - if (prev) { - printf "Updated existing value from %s to %s\n", prev, value > "/dev/stderr" - } else { - print key,value - } - } - ' ${1}_backup >$1 -} - -updatePref $CORE_PREFS "pathvariable.SCALA_BASEDIR" $SCALA_DIR -updatePref $JDT_PREFS "org.eclipse.jdt.core.classpathVariable.SCALA_BASEDIR" $SCALA_DIR diff --git a/src/intellij/README.md b/src/intellij/README.md index 7bd990288b3d..8c45f5b31d81 100644 --- a/src/intellij/README.md +++ b/src/intellij/README.md @@ -1,50 +1,57 @@ # Developing Scala in IntelliJ IDEA -Use the latest IntelliJ release and install the Scala plugin from within the IDE. +Use the latest IntelliJ release. + +Make sure the Scala plugin is installed. (It may come preinstalled; +if not, install it before proceeding.) ## Initial Setup -To create the IntelliJ project files: +Do not attempt to import our sbt build into IntelliJ; it won't work. + +Instead, create IntelliJ project files as follows. - Run `sbt intellij` - - Open `src/intellij/scala.ipr` in IntelliJ - - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry - named "1.8" containing the Java 1.8 SDK (1.6 if you're on the Scala the 2.11.x branch) The project files are created as copies of the `.SAMPLE` files, which are under version control. The actual IntelliJ project files are in `.gitignore` so that local changes are ignored. -## Dependencies +Then to start coding: -For every module in the IntelliJ project there is a corresponding `-deps` library, for example `compiler-deps` provides `ant.jar` for the compiler codebase. -The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again. -This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated. + - Open `src/intellij/scala.ipr` in IntelliJ + - Wait for the project to index + - On the `Build` menu, choose `Build Project` -Note that this command only patches the dependency lists, all other settings in the IntelliJ project definition are unchanged. -To overwrite the project definition files by copying the `.SAMPLE` files again run `sbt intellijFromSample`. +Everything (library, compiler etc) should build within a few minutes. -## Switching Branches +## Troubleshooting -The 2.12.x branch contains IntelliJ module files for `actors` and `forkjoin` even though these modules only exist in 2.11.x. -This allows using the same IntelliJ project files when switching to the 2.11.x branch (without causing any issues while working on 2.12.x). +Recent versions of IntelliJ are able to find a JDK on your system and select it +automatically. If that doesn't happen: -When switching between 2.11.x and 2.12.x, make sure to run `sbt intellij`. -Note that the `Project SDK` is not updated in this process. -If you want to use the Java 1.6 SDK while working on 2.11.x you need to change it manually (`File` → `Project Structure` → `Project` → `Project SDK`). + - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry + named "1.8" containing the Java 1.8 SDK -If you switch between 2.11.x and 2.12.x often, it makes sense to have a separate clone -of the repository for each branch. +Note that 8 is the safest choice. If you're having trouble, you might check to see +if IntelliJ selected some later version. -## Incremental Compilation +## Switching Branches -Run `Build` → `Make Project` to build all modules of the Scala repository (library, -compiler, etc). Note that compilation IntelliJ is performed in a single pass (no +If you often work on both the 2.12.x and 2.13.x branches, the safest approach is to +have a separate clone of the repository for each branch. + +(But if you find that switching works even in the same clone, consider +submitting an update to this readme with any advice you have on this.) + +## IntelliJ and sbt + +Note that compilation IntelliJ is performed in a single pass (no bootstrap), like the sbt build. Note that the output directory when compiling in IntelliJ is the same as for the -sbt and (deprecated) ant builds. This allows building incrementally in IntelliJ -and directly use the changes using the command-line scripts in `build/quick/bin/`. +sbt build. This allows building incrementally in IntelliJ +and directly using the changes using the command-line scripts in `build/quick/bin/`. ## Running JUnit Tests @@ -75,7 +82,17 @@ To run the REPL create an "Application" configuration with - Main class: `scala.tools.nsc.MainGenericRunner` - Program arguments: `-usejavacp` - Working directory: the path of your checkout - - Use classpath of module: `repl` + - Use classpath of module: `repl-frontend` + +## Dependencies + +For every module in the IntelliJ project there is a corresponding `-deps` library, for example `compiler-deps` provides JARs for the compiler codebase. +The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again. +This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated. + +Note that this command only patches the dependency lists, all other settings in the IntelliJ project definition are unchanged. + +To overwrite the project definition files by copying the `.SAMPLE` files again run `sbt intellijFromSample`. ## Updating the `.SAMPLE` files diff --git a/src/intellij/benchmarks.iml.SAMPLE b/src/intellij/benchmarks.iml.SAMPLE index 5fe3bdab1cb9..bb48bcec16af 100644 --- a/src/intellij/benchmarks.iml.SAMPLE +++ b/src/intellij/benchmarks.iml.SAMPLE @@ -7,6 +7,8 @@ + + @@ -18,4 +20,4 @@ - + \ No newline at end of file diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE index 1ebf409c1b37..80cbb88b2416 100644 --- a/src/intellij/compiler.iml.SAMPLE +++ b/src/intellij/compiler.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/compilerOptionsExporter.iml.SAMPLE b/src/intellij/compilerOptionsExporter.iml.SAMPLE deleted file mode 100644 index c1a1ee49e720..000000000000 --- a/src/intellij/compilerOptionsExporter.iml.SAMPLE +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE index 05b4e162dbbe..e2e2a84f1f7c 100644 --- a/src/intellij/interactive.iml.SAMPLE +++ b/src/intellij/interactive.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/junit.iml.SAMPLE b/src/intellij/junit.iml.SAMPLE index 593fb3432fe1..d8f9e531e11a 100644 --- a/src/intellij/junit.iml.SAMPLE +++ b/src/intellij/junit.iml.SAMPLE @@ -13,9 +13,11 @@ + + diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE index d39c9d20322b..dcfbc83ce11b 100644 --- a/src/intellij/library.iml.SAMPLE +++ b/src/intellij/library.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/partest-javaagent.iml.SAMPLE b/src/intellij/partest-javaagent.iml.SAMPLE index 22c2cbf1bc53..a5cfaefa5b27 100644 --- a/src/intellij/partest-javaagent.iml.SAMPLE +++ b/src/intellij/partest-javaagent.iml.SAMPLE @@ -1,6 +1,6 @@ - + @@ -9,6 +9,6 @@ - + \ No newline at end of file diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE index e1e2628654aa..93322559cb89 100644 --- a/src/intellij/partest.iml.SAMPLE +++ b/src/intellij/partest.iml.SAMPLE @@ -1,6 +1,6 @@ - + @@ -15,7 +15,9 @@ - + + + - + \ No newline at end of file diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE index d0aba81f0bb9..1e7b668941a7 100644 --- a/src/intellij/reflect.iml.SAMPLE +++ b/src/intellij/reflect.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/repl-frontend.iml.SAMPLE b/src/intellij/repl-frontend.iml.SAMPLE new file mode 100644 index 000000000000..1a28deab937f --- /dev/null +++ b/src/intellij/repl-frontend.iml.SAMPLE @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/repl-jline.iml.SAMPLE b/src/intellij/repl-jline.iml.SAMPLE deleted file mode 100644 index b765a58d96e8..000000000000 --- a/src/intellij/repl-jline.iml.SAMPLE +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE index f1b2938016de..b8410f1fc604 100644 --- a/src/intellij/scala.iml.SAMPLE +++ b/src/intellij/scala.iml.SAMPLE @@ -3,7 +3,6 @@ - diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index fdfb814745c7..fc16122f8e43 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -2,178 +2,173 @@ - - - - - - - - + + + + + + + + - + + + + - + - - + - - + + - - + + - - + + - - + + - - + + - + - - + + - + - - + + - + - - + + - + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - + + - - + + - - + + - - + + - - - + + + - - + + - - + + - + - - - - - - - - + + + + + + + + - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + - - + + - - + + - + - - - - - - - - - - - - - + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - + + - - - - + + - - - - - - - - - - - + + + + + + + - - - - + - - - - - - - - - - + + + + + + + - + - + - - - - + + - + - - - - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - + - + - - - - - - - + + + + + - - - - - + + + - - - - + + @@ -536,30 +465,52 @@ - - - - - - + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - + + + + + + + + + + + + + + + + + diff --git a/src/intellij/scalacheck-test.iml.SAMPLE b/src/intellij/scalacheck-test.iml.SAMPLE new file mode 100644 index 000000000000..30bd79c64569 --- /dev/null +++ b/src/intellij/scalacheck-test.iml.SAMPLE @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/scalacheck.iml.SAMPLE b/src/intellij/scalacheck.iml.SAMPLE deleted file mode 100644 index cb7837fcd46f..000000000000 --- a/src/intellij/scalacheck.iml.SAMPLE +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE index 9ab94c1bbb9c..f4a3c8dc8a1a 100644 --- a/src/intellij/scaladoc.iml.SAMPLE +++ b/src/intellij/scaladoc.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/scalap.iml.SAMPLE b/src/intellij/scalap.iml.SAMPLE index dfe6892bd332..74dff5962437 100644 --- a/src/intellij/scalap.iml.SAMPLE +++ b/src/intellij/scalap.iml.SAMPLE @@ -1,6 +1,6 @@ - + diff --git a/src/intellij/tastytest.iml.SAMPLE b/src/intellij/tastytest.iml.SAMPLE new file mode 100644 index 000000000000..a8d86a18b3d6 --- /dev/null +++ b/src/intellij/tastytest.iml.SAMPLE @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE index 5b4776186202..7073e521693a 100644 --- a/src/intellij/test.iml.SAMPLE +++ b/src/intellij/test.iml.SAMPLE @@ -3,7 +3,11 @@ + + + + @@ -16,4 +20,4 @@ - + \ No newline at end of file diff --git a/src/intellij/testkit.iml.SAMPLE b/src/intellij/testkit.iml.SAMPLE new file mode 100644 index 000000000000..65bc1449fcc2 --- /dev/null +++ b/src/intellij/testkit.iml.SAMPLE @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index b75d61a22092..a988528b013d 100644 --- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -73,7 +73,7 @@ trait CompilerControl { self: Global => /** Removes the CompilationUnit corresponding to the given SourceFile * from consideration for recompilation. */ - def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file } + def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved.synchronized { toBeRemoved += s.file }; unitOfFile get s.file } /** Returns the top level classes and objects that were deleted * in the editor since last time recentlyDeleted() was called. @@ -85,7 +85,7 @@ trait CompilerControl { self: Global => } /** Locate smallest tree that encloses position - * @pre Position must be loaded + * @note Pre-condition: Position must be loaded */ def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body } @@ -131,7 +131,7 @@ trait CompilerControl { self: Global => postWorkItem(new AskTypeAtItem(pos, response)) /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`. - * @pre `source` needs to be loaded. + * @note Pre-condition: `source` needs to be loaded. * @note Deprecated because of race conditions in the typechecker when the background compiler * is interrupted while typing the same `source`. * @see scala/bug#6578 @@ -175,14 +175,14 @@ trait CompilerControl { self: Global => /** Sets sync var `response` to list of members that are visible * as members of the tree enclosing `pos`, possibly reachable by an implicit. - * @pre source is loaded + * @note Pre-condition: source is loaded */ def askTypeCompletion(pos: Position, response: Response[List[Member]]) = postWorkItem(new AskTypeCompletionItem(pos, response)) /** Sets sync var `response` to list of members that are visible * as members of the scope enclosing `pos`. - * @pre source is loaded + * @note Pre-condition: source is loaded */ def askScopeCompletion(pos: Position, response: Response[List[Member]]) = postWorkItem(new AskScopeCompletionItem(pos, response)) @@ -207,7 +207,7 @@ trait CompilerControl { self: Global => postWorkItem(new AskLoadedTypedItem(source, keepLoaded, response)) final def askLoadedTyped(source: SourceFile, response: Response[Tree]): Unit = - askLoadedTyped(source, false, response) + askLoadedTyped(source, keepLoaded = false, response) /** If source if not yet loaded, get an outline view with askParseEntered. * If source is loaded, wait for it to be typechecked. @@ -280,6 +280,7 @@ trait CompilerControl { self: Global => val sym: Symbol val tpe: Type val accessible: Boolean + val aliasInfo: Option[ScopeMember] def implicitlyAdded = false def symNameDropLocal: Name = if (sym.name.isTermName) sym.name.dropLocal else sym.name @@ -292,12 +293,14 @@ trait CompilerControl { self: Global => def infoString = s"$accessible_s${sym.defStringSeenAs(tpe)}" } + // Note: a `TypeMember` is a member *of* a type, not a member that *is* a type case class TypeMember( sym: Symbol, tpe: Type, accessible: Boolean, inherited: Boolean, - viaView: Symbol) extends Member { + viaView: Symbol, + aliasInfo: Option[ScopeMember] = None) extends Member { // should be a case class parameter, but added as a var instead to preserve compatibility with the IDE var prefix: Type = NoType override def implicitlyAdded = viaView != NoSymbol @@ -307,7 +310,8 @@ trait CompilerControl { self: Global => sym: Symbol, tpe: Type, accessible: Boolean, - viaImport: Tree) extends Member { + viaImport: Tree, + aliasInfo: Option[ScopeMember] = None) extends Member { // should be a case class parameter, but added as a var instead to preserve compatibility with the IDE var prefix: Type = NoType } diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala index 5da3a0f1538a..43fa50776aec 100644 --- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala +++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -159,7 +159,7 @@ trait ContextTrees { self: Global => true } else false } - def loop(lo: Int, hi: Int) { + def loop(lo: Int, hi: Int): Unit = { if (hi - lo > 1) { val mid = (lo + hi) / 2 val midpos = contexts(mid).pos diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 9a89589f890b..1daf9b72960f 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,12 +16,11 @@ package interactive import java.io.{FileReader, FileWriter} import java.util.concurrent.ConcurrentHashMap -import scala.annotation.{elidable, tailrec} -import scala.collection.JavaConverters._ +import scala.annotation.{elidable, nowarn, tailrec} import scala.collection.mutable import scala.collection.mutable.{HashSet, LinkedHashMap} +import scala.jdk.javaapi.CollectionConverters import scala.language.implicitConversions -import scala.reflect.internal.Chars.isIdentifierStart import scala.reflect.internal.util.SourceFile import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.Reporter @@ -51,16 +50,18 @@ trait InteractiveAnalyzer extends Analyzer { override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer trait InteractiveTyper extends Typer { + override def isInteractive = true override def canAdaptConstantTypeToLiteral = false override def canTranslateEmptyListToNil = false override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match { case Select(_, _) => treeCopy.Select(tree, qual, name) case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) + case _ => tree } } trait InteractiveNamer extends Namer { - override def saveDefaultGetter(meth: Symbol, default: Symbol) { + override def saveDefaultGetter(meth: Symbol, default: Symbol): Unit = { // save the default getters as attachments in the method symbol. if compiling the // same local block several times (which can happen in interactive mode) we might // otherwise not find the default symbol, because the second time it the method @@ -95,9 +96,9 @@ trait InteractiveAnalyzer extends Analyzer { } super.enterExistingSym(sym, tree) } - override def enterIfNotThere(sym: Symbol) { + override def enterIfNotThere(sym: Symbol): Unit = { val scope = context.scope - @tailrec def search(e: ScopeEntry) { + @tailrec def search(e: ScopeEntry): Unit = { if ((e eq null) || (e.owner ne scope)) scope enter sym else if (e.sym ne sym) // otherwise, aborts since we found sym @@ -164,52 +165,51 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") // (the map will grow indefinitely, and the only use case is the backend) override def defineOriginalOwner(sym: Symbol, owner: Symbol): Unit = { } - override def forInteractive = true override protected def synchronizeNames = true /** A map of all loaded files to the rich compilation units that correspond to them. */ - val unitOfFile = (new ConcurrentHashMap[AbstractFile, RichCompilationUnit] { - override def put(key: AbstractFile, value: RichCompilationUnit) = { - val r = super.put(key, value) - if (r == null) debugLog("added unit for "+key) - r - } - override def remove(key: Any) = { - val r = super.remove(key) - if (r != null) debugLog("removed unit for "+key) - r + val unitOfFile: mutable.Map[AbstractFile, RichCompilationUnit] = { + val m = new ConcurrentHashMap[AbstractFile, RichCompilationUnit] { + override def put(key: AbstractFile, value: RichCompilationUnit) = { + val r = super.put(key, value) + if (r == null) debugLog("added unit for "+key) + r + } + override def remove(key: Any) = { + val r = super.remove(key) + if (r != null) debugLog("removed unit for "+key) + r + } } - }).asScala + CollectionConverters.asScala(m) + } /** A set containing all those files that need to be removed * Units are removed by getUnit, typically once a unit is finished compiled. */ - protected val toBeRemoved: mutable.Set[AbstractFile] = - new HashSet[AbstractFile] with mutable.SynchronizedSet[AbstractFile] + protected val toBeRemoved: HashSet[AbstractFile] = new HashSet[AbstractFile] /** A set containing all those files that need to be removed after a full background compiler run */ - protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] = - new HashSet[AbstractFile] with mutable.SynchronizedSet[AbstractFile] + protected val toBeRemovedAfterRun: HashSet[AbstractFile] = new HashSet[AbstractFile] - class ResponseMap extends mutable.HashMap[SourceFile, Set[Response[Tree]]] { - override def default(key: SourceFile): Set[Response[Tree]] = Set() - override def += (binding: (SourceFile, Set[Response[Tree]])) = { + private def newResponseMap: ResponseMap = + mutable.HashMap.empty[SourceFile, Set[Response[Tree]]].withDefaultValue(Set.empty[Response[Tree]]) + type ResponseMap = mutable.Map[SourceFile, Set[Response[Tree]]] + /* TODO restore assert on addOne assert(interruptsEnabled, "delayed operation within an ask") - super.+=(binding) - } - } + */ /** A map that associates with each abstract file the set of responses that are waiting * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked. */ - protected val waitLoadedTypeResponses = new ResponseMap + protected val waitLoadedTypeResponses = newResponseMap /** A map that associates with each abstract file the set of responses that ware waiting * (via build) for the unit associated with the abstract file to be parsed and entered */ - protected var getParsedEnteredResponses = new ResponseMap + protected var getParsedEnteredResponses = newResponseMap private def cleanResponses(rmap: ResponseMap): Unit = { for ((source, rs) <- rmap.toList) { @@ -228,7 +228,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val global: Global.this.type = Global.this } with InteractiveAnalyzer - private def cleanAllResponses() { + private def cleanAllResponses(): Unit = { cleanResponses(waitLoadedTypeResponses) cleanResponses(getParsedEnteredResponses) } @@ -239,7 +239,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") r raise new MissingResponse } - def checkNoResponsesOutstanding() { + def checkNoResponsesOutstanding(): Unit = { checkNoOutstanding(waitLoadedTypeResponses) checkNoOutstanding(getParsedEnteredResponses) } @@ -278,13 +278,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") private var ignoredFiles: Set[AbstractFile] = Set() /** Flush the buffer of sources that are ignored during background compilation. */ - def clearIgnoredFiles() { + def clearIgnoredFiles(): Unit = { ignoredFiles = Set() } /** Remove a crashed file from the ignore buffer. Background compilation will take it into account * and errors will be reported against it. */ - def enableIgnoredFile(file: AbstractFile) { + def enableIgnoredFile(file: AbstractFile): Unit = { ignoredFiles -= file debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles)) } @@ -320,7 +320,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Called from parser, which signals hereby that a method definition has been parsed. */ - override def signalParseProgress(pos: Position) { + override def signalParseProgress(pos: Position): Unit = { // We only want to be interruptible when running on the PC thread. if(onCompilerThread) { checkForMoreWork(pos) @@ -334,7 +334,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * @param old The original node * @param result The transformed node */ - override def signalDone(context: Context, old: Tree, result: Tree) { + override def signalDone(context: Context, old: Tree, result: Tree): Unit = { val canObserveTree = ( interruptsEnabled && lockedCount == 0 @@ -379,11 +379,11 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** The top level classes and objects no longer seen in the presentation compiler */ - val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol] + val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] /** Called from typechecker every time a top-level class or object is entered. */ - override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym } + override def registerTopLevelSym(sym: Symbol): Unit = { currentTopLevelSyms += sym } protected type SymbolLoadersInInteractive = GlobalSymbolLoaders { val global: Global.this.type @@ -398,6 +398,14 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val platform: Global.this.platform.type = Global.this.platform } with BrowsingLoaders + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + val isPastNamer = force || currentTyperRun == null || (currentTyperRun.currentUnit match { + case unit: RichCompilationUnit => unit.isParsed + case _ => true + }) + super.openPackageModule(pkgClass, force = isPastNamer) + } + // ----------------- Polling --------------------------------------- case class WorkEvent(atNode: Int, atMillis: Long) @@ -420,12 +428,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * @param pos The position of the tree if polling while typechecking, NoPosition otherwise * */ - private[interactive] def pollForWork(pos: Position) { + private[interactive] def pollForWork(pos: Position): Unit = { var loop: Boolean = true while (loop) { breakable{ loop = false - if (!interruptsEnabled) return + // TODO refactor to eliminate breakable/break/return? + (if (!interruptsEnabled) return): @nowarn("cat=lint-nonlocal-return") if (pos == NoPosition || nodesSeen % yieldPeriod == 0) Thread.`yield`() @@ -454,7 +463,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") debugLog("ask finished"+timeStep) interruptsEnabled = true } - loop = true; break + loop = true; break() case _ => } @@ -510,7 +519,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - protected def checkForMoreWork(pos: Position) { + protected def checkForMoreWork(pos: Position): Unit = { val typerRun = currentTyperRun pollForWork(pos) if (typerRun != currentTyperRun) demandNewCompilerRun() @@ -528,7 +537,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase) */ @elidable(elidable.WARNING) - override def assertCorrectThread() { + override def assertCorrectThread(): Unit = { assert(initializing || anyThread || onCompilerThread, "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) + " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets") @@ -548,7 +557,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Compile all loaded source files in the order given by `allSources`. */ - private[interactive] final def backgroundCompile() { + private[interactive] final def backgroundCompile(): Unit = { informIDE("Starting new presentation compiler type checking pass") reporter.reset() @@ -576,7 +585,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) { try { if (!unit.isUpToDate) - if (unit.problems.isEmpty || !settings.YpresentationStrict) + if (unit.problems.isEmpty || !settings.YpresentationStrict.value) typeCheck(unit) else debugLog("%s has syntax errors. Skipped typechecking".format(unit)) else debugLog("already up to date: "+unit) @@ -604,7 +613,11 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } // move units removable after this run to the "to-be-removed" buffer - toBeRemoved ++= toBeRemovedAfterRun + toBeRemoved.synchronized { + toBeRemovedAfterRun.synchronized { + toBeRemoved ++= toBeRemovedAfterRun + } + } // clean out stale waiting responses cleanAllResponses() @@ -622,7 +635,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Service all pending getParsedEntered requests */ - private def serviceParsedEntered() { + private def serviceParsedEntered(): Unit = { var atOldRun = true for ((source, rs) <- getParsedEnteredResponses; r <- rs) { if (atOldRun) { newTyperRun(); atOldRun = false } @@ -633,8 +646,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Reset unit to unloaded state */ private def reset(unit: RichCompilationUnit): Unit = { - unit.depends.clear() - unit.defined.clear() + unit.depends.clear(): @nowarn("cat=deprecation") + unit.defined.clear(): @nowarn("cat=deprecation") unit.synthetics.clear() unit.toCheck.clear() unit.checkedFeatures = Set() @@ -659,7 +672,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Make sure unit is typechecked */ - private[scala] def typeCheck(unit: RichCompilationUnit) { + private[scala] def typeCheck(unit: RichCompilationUnit): Unit = { debugLog("type checking: "+unit) parseAndEnter(unit) unit.status = PartiallyChecked @@ -669,7 +682,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** Update deleted and current top-level symbols sets */ - def syncTopLevelSyms(unit: RichCompilationUnit) { + def syncTopLevelSyms(unit: RichCompilationUnit): Unit = { val deleted = currentTopLevelSyms filter { sym => /** We sync after namer phase and it resets all the top-level symbols * that survive the new parsing @@ -681,22 +694,22 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } for (d <- deleted) { d.owner.info.decls unlink d - deletedTopLevelSyms += d + deletedTopLevelSyms.synchronized { deletedTopLevelSyms += d } currentTopLevelSyms -= d } } /** Move list of files to front of allSources */ - def moveToFront(fs: List[SourceFile]) { + def moveToFront(fs: List[SourceFile]): Unit = { allSources = fs ::: (allSources diff fs) } // ----------------- Implementations of client commands ----------------------- def respond[T](result: Response[T])(op: => T): Unit = - respondGradually(result)(Stream(op)) + respondGradually(result)(LazyList(op)) - def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = { + def respondGradually[T](response: Response[T])(op: => LazyList[T]): Unit = { val prevResponse = pendingResponse try { pendingResponse = response @@ -741,17 +754,17 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - private[interactive] def reloadSource(source: SourceFile) { + private[interactive] def reloadSource(source: SourceFile): Unit = { val unit = new RichCompilationUnit(source) unitOfFile(source.file) = unit - toBeRemoved -= source.file - toBeRemovedAfterRun -= source.file + toBeRemoved.synchronized { toBeRemoved -= source.file } + toBeRemovedAfterRun.synchronized { toBeRemovedAfterRun -= source.file } reset(unit) //parseAndEnter(unit) } /** Make sure a set of compilation units is loaded and parsed */ - private def reloadSources(sources: List[SourceFile]) { + private def reloadSources(sources: List[SourceFile]): Unit = { newTyperRun() minRunId = currentRunId sources foreach reloadSource @@ -759,20 +772,20 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** Make sure a set of compilation units is loaded and parsed */ - private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) { + private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]): Unit = { informIDE("reload: " + sources) lastWasReload = true respond(response)(reloadSources(sources)) demandNewCompilerRun() } - private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) { + private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]): Unit = { informIDE("files deleted: " + sources) val deletedFiles = sources.map(_.file).toSet val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile} for (d <- deletedSyms) { d.owner.info.decls unlink d - deletedTopLevelSyms += d + deletedTopLevelSyms.synchronized { deletedTopLevelSyms += d } currentTopLevelSyms -= d } sources foreach (removeUnitOf(_)) @@ -785,8 +798,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * If we do just removeUnit, some problems with default parameters can ensue. * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly. */ - private def afterRunRemoveUnitsOf(sources: List[SourceFile]) { - toBeRemovedAfterRun ++= sources map (_.file) + private def afterRunRemoveUnitsOf(sources: List[SourceFile]): Unit = { + toBeRemovedAfterRun.synchronized { toBeRemovedAfterRun ++= sources map (_.file) } } /** A fully attributed tree located at position `pos` */ @@ -834,13 +847,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** Set sync var `response` to a fully attributed tree located at position `pos` */ - private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) { + private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]): Unit = { respond(response)(typedTreeAt(pos)) } /** Set sync var `response` to a fully attributed tree corresponding to the * entire compilation unit */ - private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) { + private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]): Unit = { respond(response)(typedTree(source, forceReload)) } @@ -874,10 +887,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") sym.isType || { try { val tp1 = pre.memberType(alt) onTypeError NoType - val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams) + val tp2 = adaptToNewRunMap(sym.tpe).substSym(originalTypeParams, sym.owner.typeParams) matchesType(tp1, tp2, alwaysMatchSimple = false) || { debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed") - val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams) + val tp3 = adaptToNewRunMap(sym.tpe).substSym(originalTypeParams, alt.owner.typeParams) matchesType(tp1, tp3, alwaysMatchSimple = false) || { debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed") false @@ -910,7 +923,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** Implements CompilerControl.askLinkPos */ - private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) { + private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]): Unit = { informIDE("getLinkPos "+sym+" "+source) respond(response) { if (sym.owner.isClass) { @@ -924,7 +937,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) { + private def forceDocComment(sym: Symbol, unit: RichCompilationUnit): Unit = { unit.body foreachPartial { case DocDef(comment, defn) if defn.symbol == sym => fillDocComment(defn.symbol, comment) @@ -936,7 +949,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Implements CompilerControl.askDocComment */ private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], - response: Response[(String, String, Position)]) { + response: Response[(String, String, Position)]): Unit = { informIDE(s"getDocComment $sym at $source, site $site") respond(response) { withTempUnits(fragments.unzip._2){ units => @@ -961,26 +974,26 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") singleType(NoPrefix, tree.symbol) case Select(qual, _) if treeInfo.admitsTypeSelection(tree) => singleType(qual.tpe, tree.symbol) - case Import(expr, selectors) => + case Import(_, _) => tree.symbol.info match { case ImportType(expr) => expr match { - case s@Select(qual, name) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, s.symbol) - case i : Ident => i.tpe + case Select(qual, _) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, expr.symbol) + case _: Ident => expr.tpe case _ => tree.tpe } case _ => tree.tpe } - case _ => tree.tpe } import analyzer.{ImplicitSearch, SearchResult} - private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) { + private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]): Unit = { informIDE("getScopeCompletion" + pos) - respond(response) { scopeMembers(pos) } + respond(response) { scopeMemberFlatten(scopeMembers(pos)) } } + @nowarn("msg=inheritance from class LinkedHashMap") private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] { override def default(key: Name) = Set() @@ -993,10 +1006,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") !sym.hasFlag(ACCESSOR | PARAMACCESSOR) && (!implicitlyAdded || m.implicitlyAdded) - def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) { + def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M): Unit = { if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) { add(sym.accessed, pre, implicitlyAdded)(toMember) - } else if (!sym.name.decodedName.containsName("$") && !sym.isError && !sym.isArtifact && sym.hasRawInfo) { + } else if (!sym.isError && !sym.isArtifact && sym.hasRawInfo && !sym.isDefaultGetter && !sym.isMixinConstructor) { val symtpe = pre.memberType(sym) onTypeError ErrorType matching(sym, symtpe, this(sym.name)) match { case Some(m) => @@ -1029,9 +1042,15 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") locals.add(sym, pre, implicitlyAdded = false) { (s, st) => // imported val and var are always marked as inaccessible, but they could be accessed through their getters. scala/bug#7995 val member = if (s.hasGetter) - new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport) - else - new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport) + else { + if (s.isAliasType) { + val aliasInfo = ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + ScopeMember(s.info.typeSymbol, s.info.typeSymbol.tpe, + context.isAccessible(s.info.typeSymbol, pre, superAccess = false), viaImport, + aliasInfo = Some(aliasInfo)) + } else ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + } member.prefix = pre member } @@ -1066,13 +1085,16 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") result } - private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) { + private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]): Unit = { informIDE("getTypeCompletion " + pos) respondGradually(response) { typeMembers(pos) } //if (debugIDE) typeMembers(pos) } - private def typeMembers(pos: Position): Stream[List[TypeMember]] = { + // it's expected that later items in the `LazyList` supersede earlier items. + // (once a second item becomes available, you entirely discard the first item, + // rather than combine them) + private def typeMembers(pos: Position): LazyList[List[TypeMember]] = { // Choosing which tree will tell us the type members at the given position: // If pos leads to an Import, type the expr // If pos leads to a Select, type the qualifier as long as it is not erroneous @@ -1114,7 +1136,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Create a function application of a given view function to `tree` and typechecked it. */ def viewApply(view: SearchResult): Tree = { - assert(view.tree != EmptyTree) + assert(view.tree != EmptyTree, "view.tree should be non-empty") val t = analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false)) .typed(Apply(view.tree, List(tree)) setPos tree.pos) if (!t.tpe.isErroneous) t @@ -1140,7 +1162,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val applicableViews: List[SearchResult] = if (ownerTpe.isErroneous) List() else new ImplicitSearch( - tree, functionType(List(ownerTpe), AnyTpe), isView = true, + tree, functionType(List(ownerTpe), AnyTpe), isView = true, isByNamePt = false, context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits for (view <- applicableViews) { val vtree = viewApply(view) @@ -1149,8 +1171,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") addTypeMember(sym, vpre, inherited = false, view.tree.symbol) } } - //println() - Stream(members.allMembers) + LazyList(members.allMembers) } } @@ -1161,83 +1182,85 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") def name: Name /** Cursor Offset - positionDelta == position of the start of the name */ def positionDelta: Int + def forImport: Boolean def matchingResults(nameMatcher: (Name) => Name => Boolean = entered => candidate => candidate.startsWith(entered)): List[M] = { val enteredName = if (name == nme.ERROR) nme.EMPTY else name val matcher = nameMatcher(enteredName) - results filter { (member: Member) => + results.filter { (member: Member) => val symbol = member.sym def isStable = member.tpe.isStable || member.sym.isStable || member.sym.getterIn(member.sym.owner).isStable - def isJunk = symbol.name.isEmpty || !isIdentifierStart(member.sym.name.charAt(0)) // e.g. - !isJunk && member.accessible && !symbol.isConstructor && (name.isEmpty || matcher(member.sym.name) && (symbol.name.isTermName == name.isTermName || name.isTypeName && isStable)) + def isJunk = !symbol.exists || symbol.name.isEmpty || symbol.encodedName.charAt(0) == '<' // e.g. + def nameTypeOk: Boolean = { + forImport || // Completing an import: keep terms and types. + symbol.name.isTermName == name.isTermName || // Keep names of the same type + name.isTypeName && isStable // Completing a type: keep stable terms (paths) + } + // scala/bug#11846 aliasInfo should be match + def aliasTypeOk: Boolean = { + matcher(member.aliasInfo.map(_.sym.name).getOrElse(NoSymbol.name)) && !forImport && symbol.name.isTermName == name.isTermName + } + + !isJunk && member.accessible && (name.isEmpty || (matcher(member.sym.name) || aliasTypeOk) + && nameTypeOk) + } } } object CompletionResult { - final case class ScopeMembers(positionDelta: Int, results: List[ScopeMember], name: Name) extends CompletionResult { + final case class ScopeMembers(positionDelta: Int, results: List[ScopeMember], name: Name, forImport: Boolean) extends CompletionResult { type M = ScopeMember } final case class TypeMembers(positionDelta: Int, qualifier: Tree, tree: Tree, results: List[TypeMember], name: Name) extends CompletionResult { + def forImport: Boolean = tree.isInstanceOf[Import] type M = TypeMember } case object NoResults extends CompletionResult { override def results = Nil override def name = nme.EMPTY override def positionDelta = 0 - - } - private val CamelRegex = "([A-Z][^A-Z]*)".r - private def camelComponents(s: String, allowSnake: Boolean): List[String] = { - if (allowSnake && s.forall(c => c.isUpper || c == '_')) s.split('_').toList.filterNot(_.isEmpty) - else CamelRegex.findAllIn("X" + s).toList match { case head :: tail => head.drop(1) :: tail; case Nil => Nil } - } - def camelMatch(entered: Name): Name => Boolean = { - val enteredS = entered.toString - val enteredLowercaseSet = enteredS.toLowerCase().toSet - val allowSnake = !enteredS.contains('_') - - (candidate: Name) => { - def candidateChunks = camelComponents(candidate.dropLocal.toString, allowSnake) - // Loosely based on IntelliJ's autocompletion: the user can just write everything in - // lowercase, as we'll let `isl` match `GenIndexedSeqLike` or `isLovely`. - def lenientMatch(entered: String, candidate: List[String], matchCount: Int): Boolean = { - candidate match { - case Nil => entered.isEmpty && matchCount > 0 - case head :: tail => - val enteredAlternatives = Set(entered, entered.capitalize) - val n = (head, entered).zipped.count {case (c, e) => c == e || (c.isUpper && c == e.toUpper)} - head.take(n).inits.exists(init => - enteredAlternatives.exists(entered => - lenientMatch(entered.stripPrefix(init), tail, matchCount + (if (init.isEmpty) 0 else 1)) - ) - ) - } - } - val containsAllEnteredChars = { - // Trying to rule out some candidates quickly before the more expensive `lenientMatch` - val candidateLowercaseSet = candidate.toString.toLowerCase().toSet - enteredLowercaseSet.diff(candidateLowercaseSet).isEmpty - } - containsAllEnteredChars && lenientMatch(enteredS, candidateChunks, 0) - } + override def forImport: Boolean = false } } + private def scopeMemberFlatten(members: List[ScopeMember]): List[ScopeMember] = { + val (infoWithoutAlias, infoWithAlias) = members.partition(_.aliasInfo.isEmpty) + infoWithoutAlias ++ infoWithAlias ++ infoWithAlias.flatten(_.aliasInfo) + } + final def completionsAt(pos: Position): CompletionResult = { val focus1: Tree = typedTreeAt(pos) def typeCompletions(tree: Tree, qual: Tree, nameStart: Int, name: Name): CompletionResult = { val qualPos = qual.pos - val allTypeMembers = typeMembers(qualPos).toList.flatten + val saved = tree.tpe + // Force `typeMembers` to complete via the prefix, not the type of the Select itself. + tree.setType(ErrorType) + val allTypeMembers = try { + typeMembers(qualPos).last + } finally { + tree.setType(saved) + } val positionDelta: Int = pos.start - nameStart val subName: Name = name.newName(new String(pos.source.content, nameStart, pos.start - nameStart)).encodedName CompletionResult.TypeMembers(positionDelta, qual, tree, allTypeMembers, subName) } focus1 match { + case Apply(Select(qual, name), _) if qual.hasAttachment[InterpolatedString.type] => + // This special case makes CompletionTest.incompleteStringInterpolation work. + // In incomplete code, the parser treats `foo""` as a nested string interpolation, even + // though it is likely that the user wanted to complete `fooBar` before adding the closing brace. + // val fooBar = 42; s"abc ${foo" + // + // TODO: We could also complete the selection here to expand `ra"..."` to `raw"..."`. + val allMembers = scopeMembers(pos) + val positionDelta: Int = pos.start - focus1.pos.start + val subName = name.subName(0, positionDelta) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = false) case imp@Import(i @ Ident(name), head :: Nil) if head.name == nme.ERROR => val allMembers = scopeMembers(pos) val nameStart = i.pos.start val positionDelta: Int = pos.start - nameStart val subName = name.subName(0, pos.start - i.pos.start) - CompletionResult.ScopeMembers(positionDelta, allMembers, subName) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = true) case imp@Import(qual, selectors) => selectors.reverseIterator.find(_.namePos <= pos.start) match { case None => CompletionResult.NoResults @@ -1245,18 +1268,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") typeCompletions(imp, qual, selector.namePos, selector.name) } case sel@Select(qual, name) => - val qualPos = qual.pos - def fallback = qualPos.end + 2 - val source = pos.source - val nameStart: Int = (focus1.pos.end - 1 to qualPos.end by -1).find(p => - source.identifier(source.position(p)).exists(_.length == 0) - ).map(_ + 1).getOrElse(fallback) + val rawNameStart: Int = sel.pos.point + val hasBackTick = pos.source.content.lift(rawNameStart).contains('`') + val nameStart = if (hasBackTick) rawNameStart + 1 else rawNameStart typeCompletions(sel, qual, nameStart, name) - case Ident(name) => + case ident@Ident(name) => val allMembers = scopeMembers(pos) - val positionDelta: Int = pos.start - focus1.pos.start + val rawNameStart: Int = ident.pos.point + val hasBackTick = pos.source.content.lift(rawNameStart).contains('`') + val nameStart = if (hasBackTick) rawNameStart + 1 else rawNameStart + val positionDelta: Int = pos.start - nameStart val subName = name.subName(0, positionDelta) - CompletionResult.ScopeMembers(positionDelta, allMembers, subName) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = false) case _ => CompletionResult.NoResults } @@ -1264,7 +1287,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Implements CompilerControl.askLoadedTyped */ - private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean = false, onSameThread: Boolean = true) { + private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean, onSameThread: Boolean): Unit = { getUnit(source) match { case Some(unit) => if (unit.isUpToDate) { @@ -1283,14 +1306,14 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") debugLog("load unit and type") try reloadSources(List(source)) finally { - waitLoadedTyped(source, response, onSameThread) + waitLoadedTyped(source, response, keepLoaded, onSameThread = true) if (!keepLoaded) removeUnitOf(source) } } } /** Implements CompilerControl.askParsedEntered */ - private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) { + private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true): Unit = { getUnit(source) match { case Some(unit) => getParsedEnteredNow(source, response) @@ -1308,7 +1331,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } /** Parses and enters given source file, storing parse tree in response */ - private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) { + private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]): Unit = { respond(response) { onUnitOf(source) { unit => parseAndEnter(unit) @@ -1337,18 +1360,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** Apply a phase to a compilation unit * @return true iff typechecked correctly */ - private def applyPhase(phase: Phase, unit: CompilationUnit) { + private def applyPhase(phase: Phase, unit: CompilationUnit): Unit = { enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit } } } - def newTyperRun() { + def newTyperRun(): Unit = { currentTyperRun = new TyperRun } class TyperResult(val tree: Tree) extends ControlThrowable - assert(globalPhase.id == 0) + assert(globalPhase.id == 0, "phase at zero") implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x) diff --git a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala index 7e38d2f1ec87..159fdf536fd2 100644 --- a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala +++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,10 +14,10 @@ package scala.tools.nsc package interactive import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{CodeAction, Position} import scala.tools.nsc.reporters.FilteringReporter -case class Problem(pos: Position, msg: String, severityLevel: Int) +case class Problem(pos: Position, msg: String, severityLevel: Int, actions: List[CodeAction]) abstract class InteractiveReporter extends FilteringReporter { @@ -27,7 +27,7 @@ abstract class InteractiveReporter extends FilteringReporter { val otherProblems = new ArrayBuffer[Problem] - override def doReport(pos: Position, msg: String, severity: Severity): Unit = try { + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = try { val problems = if (compiler eq null) { otherProblems @@ -44,12 +44,12 @@ abstract class InteractiveReporter extends FilteringReporter { compiler.debugLog("[no position] :" + msg) otherProblems } - problems += Problem(pos, msg, severity.id) + problems += Problem(pos, msg, severity.id, actions) } catch { case ex: UnsupportedOperationException => } - override def reset() { + override def reset(): Unit = { super.reset() otherProblems.clear() } diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala index 39ee494ce62d..f1ac87b6ce6c 100644 --- a/src/interactive/scala/tools/nsc/interactive/Lexer.scala +++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -89,7 +89,7 @@ object Lexer { (if (d < 10) d + '0' else d - 10 + 'A').toChar } - private def addToStr(buf: StringBuilder, ch: Char) { + private def addToStr(buf: StringBuilder, ch: Char): Unit = { ch match { case '"' => buf ++= "\\\"" case '\b' => buf ++= "\\b" @@ -155,7 +155,7 @@ class Lexer(rd: Reader) { private var bp = 0 /** Reads next character into `ch` */ - def nextChar() { + def nextChar(): Unit = { assert(!atEOF) if (bp == nread) { nread = rd.read(buf) @@ -170,25 +170,25 @@ class Lexer(rd: Reader) { /** If last-read character equals given character, reads next character, * otherwise raises an error * @param c the given character to compare with last-read character - * @throws MalformedInput if character does not match + * @throws Lexer.MalformedInput if character does not match */ def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected") private val sb = new StringBuilder - private def putChar() { + private def putChar(): Unit = { sb += ch; nextChar() } - private def putAcceptString(str: String) { + private def putAcceptString(str: String): Unit = { str foreach acceptChar sb ++= str } /** Skips whitespace and reads next lexeme into `token` - * @throws MalformedInput if lexeme not recognized as a valid token + * @throws Lexer.MalformedInput if lexeme not recognized as a valid token */ - def nextToken() { + def nextToken(): Unit = { sb.clear() while (!atEOF && ch <= ' ') nextChar() tokenPos = pos - 1 @@ -214,9 +214,9 @@ class Lexer(rd: Reader) { /** Reads a string literal, and forms a `StringLit` token from it. * Last-read input character `ch` must be opening `"`-quote. - * @throws MalformedInput if lexeme not recognized as a string literal. + * @throws Lexer.MalformedInput if lexeme not recognized as a string literal. */ - def getString() { + def getString(): Unit = { def udigit() = { nextChar() if ('0' <= ch && ch <= '9') ch - '9' @@ -253,9 +253,9 @@ class Lexer(rd: Reader) { /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it. * Last-read input character `ch` must be either `-` or a digit. - * @throws MalformedInput if lexeme not recognized as a numeric literal. + * @throws Lexer.MalformedInput if lexeme not recognized as a numeric literal. */ - def getNumber() { + def getNumber(): Unit = { def digit() = if ('0' <= ch && ch <= '9') putChar() else error(" expected") @@ -281,27 +281,27 @@ class Lexer(rd: Reader) { /** If current token equals given token, reads next token, otherwise raises an error. * @param t the given token to compare current token with - * @throws MalformedInput if the two tokens do not match. + * @throws Lexer.MalformedInput if the two tokens do not match. */ - def accept(t: Token) { + def accept(t: Token): Unit = { if (token == t) nextToken() - else error(t+" expected, but "+token+" found") + else error(s"$t expected, but $token found") } /** The current token is a delimiter consisting of given character, reads next token, * otherwise raises an error. * @param ch the given delimiter character to compare current token with - * @throws MalformedInput if the current token `token` is not a delimiter, or + * @throws Lexer.MalformedInput if the current token `token` is not a delimiter, or * consists of a character different from `c`. */ - def accept(ch: Char) { + def accept(ch: Char): Unit = { token match { case Delim(`ch`) => nextToken() case _ => accept(Delim(ch)) } } - /** Always throws a `MalformedInput` exception with given error message. + /** Always throws a [[Lexer.MalformedInput]] exception with given error message. * @param msg the error message */ def error(msg: String) = throw new MalformedInput(this, msg) diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala index 333f6cc0969c..555e80a3cc18 100644 --- a/src/interactive/scala/tools/nsc/interactive/Main.scala +++ b/src/interactive/scala/tools/nsc/interactive/Main.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,14 +30,14 @@ object Main extends nsc.MainClass { val reloaded = new interactive.Response[Unit] compiler.askReload(sfs, reloaded) - reloaded.get.right.toOption match { + reloaded.get.toOption match { case Some(ex) => reporter.error(NoPosition, ex.getMessage) // Causes exit code to be non-0 - case None => reporter.reset() // Causes other compiler errors to be ignored + case None => reporter.reset() // Causes other compiler errors to be ignored } compiler.askShutdown() } super.processSettingsHook() && ( - if (this.settings.Yidedebug) { run() ; false } else true + if (this.settings.Yidedebug.value) { run() ; false } else true ) } } diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala index c125fa3fff8a..cfd6094e56a1 100644 --- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala +++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc.interactive import Lexer._ import java.io.Writer +import scala.collection.AbstractIterator /** An abstract class for writing and reading Scala objects to and * from a legible representation. The representation follows the following grammar: @@ -33,8 +34,8 @@ import java.io.Writer * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by * Iulian Dragos' picklers for Scala to XML. See: * - * - * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide + * + * https://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide * */ abstract class Pickler[T] { @@ -45,7 +46,7 @@ abstract class Pickler[T] { * @param wr the writer to which pickled form is written * @param x the value to write */ - def pickle(wr: Writer, x: T) + def pickle(wr: Writer, x: T): Unit /** Reads value from pickled form. * @@ -53,8 +54,8 @@ abstract class Pickler[T] { * @return An `UnpickleSuccess` value if the current input corresponds to the * kind of value that is unpickled by the current subclass of `Pickler`, * an `UnpickleFailure` value otherwise. - * @throws `Lexer.MalformedInput` if input is invalid, or if - * an `Unpickle + * @throws Lexer.MalformedInput if input is invalid, or if + * an `Unpickle` */ def unpickle(rd: Lexer): Unpickled[T] @@ -68,7 +69,7 @@ abstract class Pickler[T] { /** A pickler that adds a label to the current pickler, using the representation * `label ( )` * - * @label the string to be added as a label. + * @param label the string to be added as a label. */ def labelled(label: String): Pickler[T] = labelledPickler(label, this) @@ -99,38 +100,38 @@ object Pickler { * where a value of the given type `T` could not be unpickled from input. * @tparam T the type of unpickled values in case of success. */ - abstract class Unpickled[+T] { + sealed abstract class Unpickled[+T] { /** Transforms success values to success values using given function, * leaves failures alone * @param f the function to apply. */ def map[U](f: T => U): Unpickled[U] = this match { - case UnpickleSuccess(x) => UnpickleSuccess(f(x)) - case f: UnpickleFailure => f + case UnpickleSuccess(x) => UnpickleSuccess(f(x)) + case fail: UnpickleFailure => fail } /** Transforms success values to successes or failures using given function, * leaves failures alone. * @param f the function to apply. */ def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match { - case UnpickleSuccess(x) => f(x) - case f: UnpickleFailure => f + case UnpickleSuccess(x) => f(x) + case fail: UnpickleFailure => fail } /** Tries alternate expression if current result is a failure * @param alt the alternate expression to be tried in case of failure */ def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match { case UnpickleSuccess(x) => this - case f: UnpickleFailure => alt + case _: UnpickleFailure => alt } /** Transforms failures into thrown `MalformedInput` exceptions. - * @throws MalformedInput if current result is a failure + * @throws Lexer.MalformedInput if current result is a failure */ def requireSuccess: UnpickleSuccess[T] = this match { - case s @ UnpickleSuccess(x) => s + case s @ UnpickleSuccess(_) => s case f: UnpickleFailure => - throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg) + throw new MalformedInput(f.rd, s"Unrecoverable unpickle failure:\n${f.errMsg}") } } @@ -145,7 +146,7 @@ object Pickler { * @param rd the lexer unpickled values were read from (can be used to get * error position, for instance). */ - class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] { + final class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] { def errMsg = msg override def toString = "Failure at "+rd.tokenPos+":\n"+msg } @@ -211,7 +212,7 @@ object Pickler { /** Same as `p ~ q` */ - def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] { + def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]): Pickler[T ~ U] = new Pickler[T ~ U] { lazy val qq = q def pickle(wr: Writer, x: T ~ U) = { p.pickle(wr, x.fst) @@ -225,7 +226,7 @@ object Pickler { /** Same as `p | q` */ - def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) = + def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]): CondPickler[T] = new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) { lazy val qq = q override def tryPickle(wr: Writer, x: Any): Boolean = @@ -249,11 +250,11 @@ object Pickler { /** A pickler the handles instances of classes that have an empty constructor. * It represents than as `\$new ( )`. * When unpickling, a new instance of the class is created using the empty - * constructor of the class via `Class.forName().newInstance()`. + * constructor of the class via `Class.forName().getConstructor().newInstance()`. */ def javaInstancePickler[T <: AnyRef]: Pickler[T] = (stringPickler labelled "$new") - .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName } + .wrapped { name => Class.forName(name).getConstructor().newInstance().asInstanceOf[T] } { _.getClass.getName } /** A picklers that handles iterators. It pickles all values * returned by an iterator separated by commas. @@ -270,14 +271,14 @@ object Pickler { */ implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] { lazy val p = pkl[T] - def pickle(wr: Writer, xs: Iterator[T]) { + def pickle(wr: Writer, xs: Iterator[T]): Unit = { var first = true for (x <- xs) { if (first) first = false else wr.write(',') p.pickle(wr, x) } } - def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] { + def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new AbstractIterator[T] { var first = true def hasNext = { val t = rd.token @@ -323,7 +324,7 @@ object Pickler { /** A pickler for values of type `Unit`, represented by the empty character string */ implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] { - def pickle(wr: Writer, x: Unit) {} + def pickle(wr: Writer, x: Unit): Unit = () def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(()) } diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala index 4577f68dfefd..779552a760a8 100644 --- a/src/interactive/scala/tools/nsc/interactive/Picklers.scala +++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -42,9 +42,7 @@ trait Picklers { self: Global => .wrapped[AbstractFile] { new PlainFile(_) } { _.path } .asClass (classOf[PlainFile]) - private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] { - override def default(key: AbstractFile) = Array() - } + private val sourceFilesSeen = mutable.HashMap.empty[AbstractFile, Array[Char]].withDefaultValue(Array.empty[Char]) type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/) @@ -100,7 +98,7 @@ trait Picklers { self: Global => } implicit lazy val symPickler: Pickler[Symbol] = { - def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = { + def ownerNames(sym: Symbol, buf: ListBuffer[Name]): buf.type = { if (!sym.isRoot) { ownerNames(sym.owner, buf) buf += (if (sym.isModuleClass) sym.sourceModule else sym).name @@ -108,7 +106,7 @@ trait Picklers { self: Global => val sym1 = sym.owner.info.decl(sym.name) if (sym1.isOverloaded) { val index = sym1.alternatives.indexOf(sym) - assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives) + assert(index >= 0, s"$sym1 not found in alternatives ${sym1.alternatives}") buf += newTermName(index.toString) } } @@ -178,7 +176,7 @@ trait Picklers { self: Global => implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] = pkl[SourceFile] - .wrapped { source => new AskLoadedTypedItem(source, false, new Response) } { _.source } + .wrapped { source => new AskLoadedTypedItem(source, keepLoaded = false, new Response) } { _.source } .asClass (classOf[AskLoadedTypedItem]) implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] = diff --git a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala index 32f090aa2fa4..47d42e8d79b3 100644 --- a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala +++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ final class PresentationCompilerThread(var compiler: Global, name: String = "") /** The presentation compiler loop. */ - override def run() { + override def run(): Unit = { compiler.debugLog("starting new runner thread") while (compiler ne null) try { compiler.checkNoResponsesOutstanding() diff --git a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala index 5a965c2431f0..b93ad752c9e9 100644 --- a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala +++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,7 @@ import java.io.Writer class PrettyWriter(wr: Writer) extends Writer { protected val indentStep = " " private var indent = 0 - private def newLine() { + private def newLine(): Unit = { wr.write('\n') wr.write(indentStep * indent) } diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index 0782abd870dd..d1ef21c3e235 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,16 +30,16 @@ object REPL { var reporter: ConsoleReporter = _ - private def replError(msg: String) { + private def replError(msg: String): Unit = { reporter.error(/*new Position */FakePos("scalac"), msg + "\n scalac -help gives more information") } - def process(args: Array[String]) { + def process(args: Array[String]): Unit = { val settings = new Settings(replError) reporter = new ConsoleReporter(settings) val command = new CompilerCommand(args.toList, settings) - if (command.settings.version) + if (command.settings.version.value) reporter.echo(versionMsg) else { try { @@ -64,15 +64,15 @@ object REPL { } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { process(args) - sys.exit(if (reporter.hasErrors) 1 else 0) + System.exit(if (reporter.hasErrors) 1 else 0) } - def loop(action: (String) => Unit) { + def loop(action: (String) => Unit): Unit = { Console.print(prompt) try { - val line = Console.readLine() + val line = scala.io.StdIn.readLine() if (line.length() > 0) { action(line) } @@ -89,7 +89,7 @@ object REPL { * typeat file off1 off2? * complete file off1 off2? */ - def run(comp: Global) { + def run(comp: Global): Unit = { val reloadResult = new Response[Unit] val typeatResult = new Response[comp.Tree] val completeResult = new Response[List[comp.Member]] @@ -101,17 +101,17 @@ object REPL { comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt) } - def doTypeAt(pos: Position) { + def doTypeAt(pos: Position): Unit = { comp.askTypeAt(pos, typeatResult) show(typeatResult) } - def doComplete(pos: Position) { + def doComplete(pos: Position): Unit = { comp.askTypeCompletion(pos, completeResult) show(completeResult) } - def doStructure(file: String) { + def doStructure(file: String): Unit = { comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult) show(structureResult) } @@ -137,7 +137,7 @@ object REPL { doComplete(makePos(file, off1, off1)) case List("quit") => comp.askShutdown() - sys.exit(1) + System.exit(1) case List("structure", file) => doStructure(file) case _ => diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala index 2686ab337947..f0052def43a8 100644 --- a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/interactive/scala/tools/nsc/interactive/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala index 0f7d439132ab..fcba21ca7f56 100644 --- a/src/interactive/scala/tools/nsc/interactive/Replayer.scala +++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,8 +20,8 @@ import Lexer.EOF abstract class LogReplay { def logreplay(event: String, x: => Boolean): Boolean def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T] - def close() - def flush() + def close(): Unit + def flush(): Unit } class Logger(wr0: Writer) extends LogReplay { @@ -42,15 +42,15 @@ class Logger(wr0: Writer) extends LogReplay { } xx } - def close() { wr.close() } - def flush() { wr.flush() } + def close(): Unit = wr.close() + def flush(): Unit = wr.flush() } object NullLogger extends LogReplay { def logreplay(event: String, x: => Boolean) = x def logreplay[T: Pickler](event: String, x: => Option[T]) = x - def close() {} - def flush() {} + def close() = () + def flush() = () } class Replayer(raw: Reader) extends LogReplay { @@ -80,7 +80,7 @@ class Replayer(raw: Reader) extends LogReplay { } } - def close() { raw.close() } - def flush() {} + def close(): Unit = raw.close() + def flush(): Unit = () } diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala index 5df96f440ea5..fc09c87ae1d9 100644 --- a/src/interactive/scala/tools/nsc/interactive/Response.scala +++ b/src/interactive/scala/tools/nsc/interactive/Response.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala index 27361f9a367f..21a1457623e9 100644 --- a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala +++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -56,7 +56,7 @@ trait RichCompilationUnits { self: Global => */ var _targetPos: Position = NoPosition override def targetPos: Position = _targetPos - def targetPos_=(p: Position) { _targetPos = p } + def targetPos_=(p: Position): Unit = { _targetPos = p } var contexts: Contexts = new Contexts diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala index 0da94c405a3b..6d31a7c869ab 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,9 +46,6 @@ import scala.collection.mutable.ListBuffer * framework will automatically pick it up. * * @see Check existing tests under test/files/presentation - * - * @author Iulian Dragos - * @author Mirco Dotta */ abstract class InteractiveTest extends AskParse @@ -75,12 +72,10 @@ abstract class InteractiveTest /** Add new presentation compiler actions to test. Presentation compiler's test * need to extends trait `PresentationCompilerTestDef`. */ - protected def ++(tests: PresentationCompilerTestDef*) { - testActions ++= tests - } + protected def ++(tests: PresentationCompilerTestDef*): Unit = testActions ++= tests /** Test's entry point */ - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { try execute() finally askShutdown() } @@ -91,25 +86,26 @@ abstract class InteractiveTest loadSources() runDefaultTests() } - }.linesIterator.map(normalize).foreach(println) + }.linesIterator.filterNot(filterOutLines).map(normalize).foreach(println) } + protected def filterOutLines(line: String) = false protected def normalize(s: String) = s /** Load all sources before executing the test. */ - protected def loadSources() { + protected def loadSources(): Unit = { // ask the presentation compiler to track all sources. We do // not wait for the file to be entirely typed because we do want // to exercise the presentation compiler on scoped type requests. - askReload(sourceFiles) + askReload(sourceFiles.toIndexedSeq) // make sure all sources are parsed before running the test. This // is because test may depend on the sources having been parsed at // least once - askParse(sourceFiles) + askParse(sourceFiles.toIndexedSeq) } /** Run all defined `PresentationCompilerTestDef` */ - protected def runDefaultTests() { + protected def runDefaultTests(): Unit = { //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles) testActions.foreach(_.runTest()) } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala index d5921e4160b7..6b01262615e5 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,15 +17,11 @@ package tests import java.io.File.pathSeparatorChar import java.io.File.separatorChar import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance -import scala.tools.nsc.io.{File,Path} +import scala.tools.nsc.io.Path import core.Reporter import core.TestSettings trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance { - /** Character delimiter for comments in .opts file */ - private final val CommentStartDelimiter = "#" - - private final val TestOptionsFileExtension = "flags" /** Prepare the settings object. Load the .opts file and adjust all paths from the * Unix-like syntax to the platform specific syntax. This is necessary so that a @@ -36,8 +32,8 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst * bootclasspath takes precedence over the scala-library used to run the current * test. */ - override protected def prepareSettings(settings: Settings) { - def adjustPaths(paths: settings.PathSetting*) { + override protected def prepareSettings(settings: Settings): Unit = { + def adjustPaths(paths: settings.PathSetting*): Unit = { for (p <- paths if argsString.contains(p.name)) p.value = p.value.map { case '/' => separatorChar case ':' => pathSeparatorChar @@ -63,16 +59,9 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath) } - /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */ - protected val argsString = { - val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension) - val str = try File(optsFile).slurp() catch { - case e: java.io.IOException => "" - } - str.linesIterator.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ") - } + protected def argsString: String = "" - override protected def printClassPath(implicit reporter: Reporter) { + override protected def printClassPath(implicit reporter: Reporter): Unit = { reporter.println("\toutDir: %s".format(outDir.path)) reporter.println("\tbaseDir: %s".format(baseDir.path)) reporter.println("\targsString: %s".format(argsString)) diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala index a1f49648c037..3942994333b4 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,8 +25,8 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { val reporter = new StoreReporter(settings) val compiler = new Global(settings, reporter) - def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) { - if (settings.verbose) print(msg+" "+arg+": ") + def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit): Unit = { + if (settings.verbose.value) print(msg+" "+arg+": ") val TIMEOUT = 10 // ms val limit = System.currentTimeMillis() + randomDelayMillis val res = new Response[U] @@ -37,7 +37,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { } else res.get(TIMEOUT.toLong) match { case Some(Left(t)) => /**/ - if (settings.verbose) println(t) + if (settings.verbose.value) println(t) case Some(Right(ex)) => ex.printStackTrace() println(ex) @@ -88,7 +88,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+ (if (toLeft) "left" else "right") - def deleteOne() { + def deleteOne(): Unit = { val sf = inputs(sfidx) deleted = sf.content(pos) :: deleted val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1)) @@ -96,7 +96,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { askReload(sf1) } - def deleteAll() { + def deleteAll(): Unit = { print("/"+nchars) for (i <- 0 until nchars) { if (toLeft) { @@ -112,7 +112,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { } } - def insertAll() { + def insertAll(): Unit = { for (chr <- if (toLeft) deleted else deleted.reverse) { val sf = inputs(sfidx) val (pre, post) = sf./**/content splitAt pos @@ -127,7 +127,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { val testComment = "/**/" def testFileChanges(sfidx: Int) = { - lazy val testPositions: Seq[Int] = { + lazy val testPositions: scala.collection.Seq[Int] = { val sf = inputs(sfidx) val buf = new ArrayBuffer[Int] var pos = sf.content.indexOfSlice(testComment) @@ -137,7 +137,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { } buf } - def otherTest() { + def otherTest(): Unit = { if (testPositions.nonEmpty) { val pos = Position.offset(inputs(sfidx), rand.nextInt(testPositions.length)) rand.nextInt(3) match { @@ -152,7 +152,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { /**/ new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean()) } - doTest(sfidx, changes, testPositions, otherTest) match { + doTest(sfidx, changes, testPositions, () => otherTest()) match { case Some(errortrace) => println(errortrace) minimize(errortrace) @@ -161,7 +161,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { } } - def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = { + def doTest(sfidx: Int, changes: scala.collection.Seq[Change], testPositions: scala.collection.Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = { print("new round with "+changes.length+" changes:") changes foreach (_.deleteAll()) otherTest() @@ -185,11 +185,11 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { "\nContents:\n"+content.mkString } - def minimize(etrace: ErrorTrace) {} + def minimize(etrace: ErrorTrace): Unit = () /**/ - def run() { - askReload(inputs: _*) + def run(): Unit = { + askReload(inputs.toIndexedSeq: _*) for (i <- 0 until ntests) testFileChanges(randomSourceFileIdx()) } @@ -206,12 +206,12 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { * do ask-types, type-completions, or scope-completions. */ object Tester { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val settings = new Settings() val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true) println("filenames = "+filenames) val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile) new Tester(args(0).toInt, files, settings).run() - sys.exit(0) + System.exit(0) } } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala index 3ed8cd215c94..662b158aee7e 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala.tools.nsc package interactive package tests.core +import scala.annotation.unused import scala.tools.nsc.interactive.Response import scala.reflect.internal.util.Position import scala.reflect.internal.util.SourceFile @@ -53,7 +54,7 @@ trait AskParse extends AskCommand { * (else commands such as `AskTypeCompletionAt` may fail simply because * the source's AST is not yet loaded). */ - def askParse(sources: Seq[SourceFile]) { + def askParse(sources: Seq[SourceFile]): Unit = { val responses = sources map (askParse(_)) responses.foreach(_.get) // force source files parsing } @@ -121,7 +122,7 @@ trait AskTypeAt extends AskCommand { trait AskLoadedTyped extends AskCommand { import compiler.Tree - protected def askLoadedTyped(source: SourceFile, keepLoaded: Boolean = false)(implicit reporter: Reporter): Response[Tree] = { + protected def askLoadedTyped(source: SourceFile, keepLoaded: Boolean = false)(implicit @unused reporter: Reporter): Response[Tree] = { ask { compiler.askLoadedTyped(source, keepLoaded, _) } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index 630f2e3317eb..b7ae411be9be 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,7 +28,7 @@ private[tests] trait CoreTestDefs extends PresentationCompilerTestDef with AskTypeCompletionAt { - override def runTest() { + override def runTest(): Unit = { askAllSources(TypeCompletionMarker) { pos => askTypeCompletionAt(pos) } { (pos, members) => @@ -52,7 +52,7 @@ private[tests] trait CoreTestDefs extends PresentationCompilerTestDef with AskScopeCompletionAt { - override def runTest() { + override def runTest(): Unit = { askAllSources(ScopeCompletionMarker) { pos => askScopeCompletionAt(pos) } { (pos, members) => @@ -83,7 +83,7 @@ private[tests] trait CoreTestDefs extends PresentationCompilerTestDef with AskTypeAt { - override def runTest() { + override def runTest(): Unit = { askAllSources(TypeMarker) { pos => askTypeAt(pos) } { (pos, tree) => @@ -102,7 +102,7 @@ private[tests] trait CoreTestDefs with AskTypeAt with AskTypeCompletionAt { - override def runTest() { + override def runTest(): Unit = { askAllSources(HyperlinkMarker) { pos => askTypeAt(pos)(NullReporter) } { (pos, tree) => @@ -113,10 +113,8 @@ private[tests] trait CoreTestDefs reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name) val r = new Response[Position] val sourceFile = tree.symbol.sourceFile - // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int` - // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! - val treePath = if (sourceFile ne null) sourceFile.path else null - val treeName = if (sourceFile ne null) sourceFile.name else null + val treePath = sourceFile.path + val treeName = sourceFile.name sourceFiles.find(_.path == treePath) match { case Some(source) => diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala index 7cc4b448de67..7b5041225c11 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,9 +36,9 @@ private[tests] trait PresentationCompilerInstance extends TestSettings { * You should provide an implementation of this method if you need * to customize the `settings` used to instantiate the presentation compiler. * */ - protected def prepareSettings(settings: Settings) {} + protected def prepareSettings(settings: Settings): Unit = () - protected def printClassPath(implicit reporter: Reporter) { + protected def printClassPath(implicit reporter: Reporter): Unit = { reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value)) reporter.println("\tverbose: %b".format(settings.verbose.value)) } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala index cc24852f15df..ff21713b8122 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,7 +27,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would * ask the type at all positions marked with `TypeMarker.marker` and println the result. */ - private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { + private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit): Unit = { val positions = allPositionsOf(str = marker.marker) val responses = for (pos <- positions) yield askAt(pos) @@ -37,13 +37,13 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the * response before going to the next one. */ - private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { + private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit): Unit = { val positions = allPositionsOf(str = marker.marker) for (pos <- positions) withResponse(pos, askAt(pos))(f) } /** All positions of the given string in all source files. */ - private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] = + private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles.toIndexedSeq, str: String): Seq[Position] = for (s <- srcs; p <- positionsOf(s, str)) yield p /** Return all positions of the given str in the given source file. */ @@ -57,7 +57,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { buf.toList } - private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) { + private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit): Unit = { /** Return the filename:line:col version of this position. */ def showPos(pos: Position): String = "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column) diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala index b95b26a7d9a5..75100c838f2d 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,7 +18,7 @@ trait PresentationCompilerTestDef { private[tests] def runTest(): Unit - protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) { + protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter): Unit = { def printDelimiter() = reporter.println("=" * 80) printDelimiter() block diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala index d60d74031605..e0e583b597c4 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,10 +18,10 @@ private[tests] trait Reporter { /** Reporter that simply prints all messages in the standard output.*/ private[tests] object ConsoleReporter extends Reporter { - def println(msg: Any) { Console.println(msg) } + def println(msg: Any): Unit = { Console.println(msg) } } /** Reporter that swallows all passed message. */ private[tests] object NullReporter extends Reporter { - def println(msg: Any) {} -} \ No newline at end of file + def println(msg: Any): Unit = {} +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala index 6d9cb255a8a9..69aae26aecaa 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,7 +23,7 @@ private[tests] object SourcesCollector { * With the default `filter` only .scala and .java files are collected. * */ def apply(base: Path, filter: SourceFilter): Array[SourceFile] = { - assert(base.isDirectory, base + " is not a directory") + assert(base.isDirectory, s"$base is not a directory") base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name) } diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala index 2e39a68b3355..066d55951cd8 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,7 +18,7 @@ object TestMarker { import scala.collection.mutable.Map private val markers: Map[String, TestMarker] = Map.empty - private def checkForDuplicate(marker: TestMarker) { + private def checkForDuplicate(marker: TestMarker): Unit = { markers.get(marker.marker) match { case None => markers(marker.marker) = marker case Some(otherMarker) => diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala index c17cd43c9618..f82b5ea53304 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,4 +21,4 @@ private[tests] trait TestResources extends TestSettings { protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource) private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java" -} \ No newline at end of file +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala index e0ddc18535ec..480fcb6a08f2 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,7 +19,7 @@ private[tests] trait TestSettings { protected final val TIMEOUT = 30000 // timeout in milliseconds /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */ - protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse(".")) + protected val outDir = Path(System.getProperty("partest.cwd", ".")) /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */ protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path(".")) diff --git a/src/library-aux/README b/src/library-aux/README index e6dcd2927749..6912cd2c6836 100644 --- a/src/library-aux/README +++ b/src/library-aux/README @@ -1,3 +1,3 @@ Source files under this directory cannot be compiled by normal means. -They exist for bootstrapping and documentation purposes. \ No newline at end of file +They exist for bootstrapping and documentation purposes. diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index f0d55190a669..bfef6f23cb4b 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package scala /** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala * execution environment inherits directly or indirectly from this class. * - * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. * * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. @@ -31,13 +30,13 @@ package scala * w.print() * }}} * - * See the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more + * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more * details on the interplay of universal traits and value classes. */ abstract class Any { /** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. * - * Any implementation of this method should be an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: + * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: * * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and @@ -108,29 +107,66 @@ abstract class Any { * * @return a hash value consistent with == */ - final def ##(): Int = sys.error("##") + final def ## : Int = sys.error("##") - /** Test whether the dynamic type of the receiver object is `T0`. - * - * Note that the result of the test is modulo Scala's erasure semantics. - * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the - * expression `List(1).isInstanceOf[List[String]]` will return `true`. - * In the latter example, because the type argument is erased as part of compilation it is - * not possible to check whether the contents of the list are of the specified type. + /** Test whether the dynamic type of the receiver object has the same erasure as `T0`. + * + * Depending on what `T0` is, the test is done in one of the below ways: + * + * - `T0` is a non-parameterized class type, e.g. `BigDecimal`: this method returns `true` if + * the value of the receiver object is a `BigDecimal` or a subtype of `BigDecimal`. + * - `T0` is a parameterized class type, e.g. `List[Int]`: this method returns `true` if + * the value of the receiver object is some `List[X]` for any `X`. + * For example, `List(1, 2, 3).isInstanceOf[List[String]]` will return true. + * - `T0` is some singleton type `x.type` or literal `x`: this method returns `this.eq(x)`. + * For example, `x.isInstanceOf[1]` is equivalent to `x.eq(1)` + * - `T0` is an intersection `X with Y` or `X & Y: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` + * - `T0` is a union `X | Y`: this method is equivalent to `x.isInstanceOf[X] || x.isInstanceOf[Y]` + * - `T0` is a type parameter or an abstract type member: this method is equivalent + * to `isInstanceOf[U]` where `U` is `T0`'s upper bound, `Any` if `T0` is unbounded. + * For example, `x.isInstanceOf[A]` where `A` is an unbounded type parameter + * will return true for any value of `x`. + * + * This is exactly equivalent to the type pattern `_: T0` + * + * @note due to the unexpectedness of `List(1, 2, 3).isInstanceOf[List[String]]` returning true and + * `x.isInstanceOf[A]` where `A` is a type parameter or abstract member returning true, + * these forms issue a warning. * * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. */ final def isInstanceOf[T0]: Boolean = sys.error("isInstanceOf") - /** Cast the receiver object to be of type `T0`. - * - * Note that the success of a cast at runtime is modulo Scala's erasure semantics. - * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at - * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. - * In the latter example, because the type argument is erased as part of compilation it is - * not possible to check whether the contents of the list are of the requested type. - * - * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. + /** Forces the compiler to treat the receiver object as having type `T0`, + * even though doing so may violate type safety. + * + * This method is useful when you believe you have type information the compiler doesn't, + * and it also isn't possible to check the type at runtime. + * In such situations, skipping type safety is the only option. + * + * It is platform dependent whether `asInstanceOf` has any effect at runtime. + * It might do a runtime type test on the erasure of `T0`, + * insert a conversion (such as boxing/unboxing), fill in a default value, or do nothing at all. + * + * In particular, `asInstanceOf` is not a type test. It does **not** mean: + * {{{ + * this match { + * case x: T0 => x + * case _ => throw ClassCastException("...") + * }}} + * Use pattern matching or [[isInstanceOf]] for type testing instead. + * + * Situations where `asInstanceOf` is useful: + * - when flow analysis fails to deduce `T0` automatically + * - when down-casting a type parameter or an abstract type member (which cannot be checked at runtime due to type erasure) + * If there is any doubt and you are able to type test instead, you should do so. + * + * Be careful of using `asInstanceOf` when `T0` is a primitive type. + * When `T0` is primitive, `asInstanceOf` may insert a conversion instead of a type test. + * If your intent is to convert, use a `toT` method (`x.toChar`, `x.toByte`, etc.). + * + * @throws ClassCastException if the receiver is not an instance of the erasure of `T0`, + * if that can be checked on this platform * @return the receiver object. */ final def asInstanceOf[T0]: T0 = sys.error("asInstanceOf") diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala index fabb1a7f51a9..d0d87d35c40b 100644 --- a/src/library-aux/scala/AnyRef.scala +++ b/src/library-aux/scala/AnyRef.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -47,11 +47,11 @@ trait AnyRef extends Any { * @param body the code to execute * @return the result of `body` */ - def synchronized[T](body: => T): T + def synchronized[T](body: => T): T = sys.error("synchronized") /** Tests whether the argument (`that`) is a reference to the receiver object (`this`). * - * The `eq` method implements an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on + * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on * non-null instances of `AnyRef`, and has three additional properties: * * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of @@ -116,12 +116,24 @@ trait AnyRef extends Any { */ final def notifyAll(): Unit - /** Causes the current Thread to wait until another Thread invokes - * the notify() or notifyAll() methods. + /** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. * * @note not specified by SLS as a member of AnyRef */ final def wait (): Unit + + /** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] + * + * @param timeout the maximum time to wait in milliseconds. + * @param nanos additional time, in nanoseconds range 0-999999. + * @note not specified by SLS as a member of AnyRef + */ final def wait (timeout: Long, nanos: Int): Unit + + /** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. + * + * @param timeout the maximum time to wait in milliseconds. + * @note not specified by SLS as a member of AnyRef + */ final def wait (timeout: Long): Unit } diff --git a/src/library-aux/scala/Nothing.scala b/src/library-aux/scala/Nothing.scala index d52353e3f314..b2f905fe0f39 100644 --- a/src/library-aux/scala/Nothing.scala +++ b/src/library-aux/scala/Nothing.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala index 8d40134fa554..1bbf78ebdfd7 100644 --- a/src/library-aux/scala/Null.scala +++ b/src/library-aux/scala/Null.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library-aux/scala/Singleton.scala b/src/library-aux/scala/Singleton.scala new file mode 100644 index 000000000000..5f444937b020 --- /dev/null +++ b/src/library-aux/scala/Singleton.scala @@ -0,0 +1,59 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + + +/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, + * as they are also singleton types. + * + * {{{ + * scala> object A { val x = 42 } + * defined object A + * + * scala> implicitly[A.type <:< Singleton] + * res12: A.type <:< Singleton = generalized constraint + * + * scala> implicitly[A.x.type <:< Singleton] + * res13: A.x.type <:< Singleton = generalized constraint + * + * scala> implicitly[42 <:< Singleton] + * res14: 42 <:< Singleton = generalized constraint + * + * scala> implicitly[Int <:< Singleton] + * ^ + * error: Cannot prove that Int <:< Singleton. + * }}} + * + * `Singleton` has a special meaning when it appears as an upper bound on a formal type + * parameter. Normally, type inference in Scala widens singleton types to the underlying + * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, + * the compiler infers a singleton type. + * + * {{{ + * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x + * check42: [T](x: T)(implicit ev: T =:= 42)T + * + * scala> val x1 = check42(42) + * ^ + * error: Cannot prove that Int =:= 42. + * + * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x + * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T + * + * scala> val x2 = singleCheck42(42) + * x2: Int = 42 + * }}} + * + * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. + */ +final trait Singleton extends Any diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt index e06460caecae..458605b1bab6 100644 --- a/src/library/rootdoc.txt +++ b/src/library/rootdoc.txt @@ -19,16 +19,6 @@ Notable packages include: [[scala.collection.mutable.HashMap `HashMap`]] or [[scala.collection.mutable.HashSet `HashSet`]] - [[scala.collection.concurrent `scala.collection.concurrent`]] - Mutable, concurrent data-structures such as [[scala.collection.concurrent.TrieMap `TrieMap`]] - - [[scala.collection.parallel.immutable `scala.collection.parallel.immutable`]] - Immutable, parallel - data-structures such as [[scala.collection.parallel.immutable.ParVector `ParVector`]], - [[scala.collection.parallel.immutable.ParRange `ParRange`]], - [[scala.collection.parallel.immutable.ParHashMap `ParHashMap`]] or - [[scala.collection.parallel.immutable.ParHashSet `ParHashSet`]] - - [[scala.collection.parallel.mutable `scala.collection.parallel.mutable`]] - Mutable, parallel - data-structures such as [[scala.collection.parallel.mutable.ParArray `ParArray`]], - [[scala.collection.parallel.mutable.ParHashMap `ParHashMap`]], - [[scala.collection.parallel.mutable.ParTrieMap `ParTrieMap`]] or - [[scala.collection.parallel.mutable.ParHashSet `ParHashSet`]] - [[scala.concurrent `scala.concurrent`]] - Primitives for concurrent programming such as [[scala.concurrent.Future `Futures`]] and [[scala.concurrent.Promise `Promises`]] - [[scala.io `scala.io`]] - Input and output operations @@ -41,10 +31,11 @@ Other packages exist. See the complete list on the right. Additional parts of the standard library are shipped as separate libraries. These include: - - [[scala.reflect `scala.reflect`]] - Scala's reflection API (scala-reflect.jar) - - [[https://github.com/scala/scala-xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar) - - [[https://github.com/scala/scala-swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar) + - [[https://www.scala-lang.org/api/current/scala-reflect/scala/reflect/index.html `scala.reflect`]] - Scala's reflection API (scala-reflect.jar) + - [[https://github.com/scala/scala-xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar) + - [[https://github.com/scala/scala-parallel-collections `scala.collection.parallel`]] - Parallel collections (scala-parallel-collections.jar) - [[https://github.com/scala/scala-parser-combinators `scala.util.parsing`]] - Parser combinators (scala-parser-combinators.jar) + - [[https://github.com/scala/scala-swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar) == Automatic imports == diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala index 79b2a51ec4f8..79be48d642be 100644 --- a/src/library/scala/AnyVal.scala +++ b/src/library/scala/AnyVal.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -29,16 +29,15 @@ package scala * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. * - * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, - * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' - * which is treated specially by the compiler. Properly-defined user value classes provide a way + * A subclass of `AnyVal` is called a ''user-defined value class'' + * and is treated specially by the compiler. Properly-defined user value classes provide a way * to improve performance on user-defined types by avoiding object allocation at runtime, and by * replacing virtual method invocations with static method invocations. * * User-defined value classes which avoid object allocation... * * - must have a single `val` parameter that is the underlying runtime representation. - * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. + * - can define `def`s, but no `val`s, `var`s, or nested `trait`s, `class`es or `object`s. * - typically extend no other trait apart from `AnyVal`. * - cannot be used in type tests or pattern matching. * - may not override `equals` or `hashCode` methods. @@ -52,7 +51,7 @@ package scala * * It's important to note that user-defined value classes are limited, and in some circumstances, * still must allocate a value class instance at runtime. These limitations and circumstances are - * explained in greater detail in the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. + * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. */ abstract class AnyVal extends Any { def getClass(): Class[_ <: AnyVal] = null diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala index 968422915d9b..2bb8584f8bac 100644 --- a/src/library/scala/AnyValCompanion.scala +++ b/src/library/scala/AnyValCompanion.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala index 3298cb0d12ef..896cf8d9b22e 100644 --- a/src/library/scala/App.scala +++ b/src/library/scala/App.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,9 @@ package scala -import scala.compat.Platform.currentTime +import java.lang.System.{currentTimeMillis => currentTime} + +import scala.annotation.nowarn import scala.collection.mutable.ListBuffer /** The `App` trait can be used to quickly turn objects @@ -22,7 +24,9 @@ import scala.collection.mutable.ListBuffer * Console.println("Hello World: " + (args mkString ", ")) * } * }}} - * Here, object `Main` inherits the `main` method of `App`. + * + * No explicit `main` method is needed. Instead, + * the whole class body becomes the “main method”. * * `args` returns the current command line arguments as an array. * @@ -32,29 +36,44 @@ import scala.collection.mutable.ListBuffer * functionality, which means that fields of the object will not have been initialized * before the main method has been executed.''''' * - * It should also be noted that the `main` method should not be overridden: - * the whole class body becomes the “main method”. - * * Future versions of this trait will no longer extend `DelayedInit`. - * - * @author Martin Odersky - * @since 2.1 + * + * In Scala 3, the `DelayedInit` feature was dropped. `App` exists only in a limited form + * that also does not support command line arguments and will be deprecated in the future. + * + * [[https://docs.scala-lang.org/scala3/book/methods-main-methods.html @main]] methods are the + * recommended scheme to generate programs that can be invoked from the command line in Scala 3. + * + * {{{ + * @main def runMyProgram(args: String*): Unit = { + * // your program here + * } + * }}} + * + * If programs need to cross-build between Scala 2 and Scala 3, it is recommended to use an + * explicit `main` method: + * {{{ + * object Main { + * def main(args: Array[String]): Unit = { + * // your program here + * } + * } + * }}} */ +@nowarn("""cat=deprecation&origin=scala\.DelayedInit""") trait App extends DelayedInit { /** The time when the execution of this program started, in milliseconds since 1 * January 1970 UTC. */ - @deprecatedOverriding("executionStart should not be overridden", "2.11.0") - val executionStart: Long = currentTime + final val executionStart: Long = currentTime /** The command line arguments passed to the application's `main` method. */ - @deprecatedOverriding("args should not be overridden", "2.11.0") - protected def args: Array[String] = _args + protected final def args: Array[String] = _args - private var _args: Array[String] = _ + private[this] var _args: Array[String] = _ - private val initCode = new ListBuffer[() => Unit] + private[this] val initCode = new ListBuffer[() => Unit] /** The init hook. This saves all initialization code for execution within `main`. * This method is normally never called directly from user code. @@ -64,7 +83,7 @@ trait App extends DelayedInit { * @param body the initialization code to be stored for later execution */ @deprecated("the delayedInit mechanism will disappear", "2.11.0") - override def delayedInit(body: => Unit) { + override def delayedInit(body: => Unit): Unit = { initCode += (() => body) } @@ -74,8 +93,7 @@ trait App extends DelayedInit { * they were passed to `delayedInit`. * @param args the arguments passed to the main method */ - @deprecatedOverriding("main should not be overridden", "2.11.0") - def main(args: Array[String]) = { + final def main(args: Array[String]) = { this._args = args for (proc <- initCode) proc() if (util.Properties.propIsSet("scala.time")) { diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index b2390a41804e..02af1837e1b7 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,32 +12,16 @@ package scala -import scala.collection.generic._ -import scala.collection.{immutable, mutable} -import mutable.{ArrayBuilder, ArraySeq} +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit import scala.runtime.ScalaRunTime import scala.runtime.ScalaRunTime.{array_apply, array_update} -/** Contains a fallback builder for arrays when the element type - * does not have a class tag. In that case a generic array is built. - */ -class FallbackArrayBuilding { - - /** A builder factory that generates a generic array. - * Called instead of `Array.newBuilder` if the element type of an array - * does not have a class tag. Note that fallbackBuilder factory - * needs an implicit parameter (otherwise it would not be dominated in - * implicit search by `Array.canBuildFrom`). We make sure that - * implicit search is always successful. - */ - implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] = - new CanBuildFrom[Array[_], T, ArraySeq[T]] { - def apply(from: Array[_]) = ArraySeq.newBuilder[T] - def apply() = ArraySeq.newBuilder[T] - } -} - /** Utility methods for operating on arrays. * For example: * {{{ @@ -47,109 +31,54 @@ class FallbackArrayBuilding { * }}} * where the array objects `a`, `b` and `c` have respectively the values * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. - * - * @author Martin Odersky - * @since 1.0 */ -object Array extends FallbackArrayBuilding { - - val emptyBooleanArray = empty[Boolean] - val emptyByteArray = empty[Byte] - val emptyCharArray = empty[Char] - val emptyDoubleArray = empty[Double] - val emptyFloatArray = empty[Float] - val emptyIntArray = empty[Int] - val emptyLongArray = empty[Long] - val emptyShortArray = empty[Short] - - private[scala] //this is only private because of binary compatability - val emptyUnitArray = empty[scala.runtime.BoxedUnit].asInstanceOf[Array[Unit]] - val emptyObjectArray = empty[Object] - - implicit def canBuildFrom[T](implicit tag: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] = { - val cls = tag.runtimeClass - (if (cls.isPrimitive) { - cls match { - case java.lang.Integer.TYPE => cbfIntArray - case java.lang.Double.TYPE => cbfDoubleArray - case java.lang.Long.TYPE => cbfLongArray - case java.lang.Float.TYPE => cbfFloatArray - case java.lang.Character.TYPE => cbfCharArray - case java.lang.Byte.TYPE => cbfByteArray - case java.lang.Short.TYPE => cbfShortArray - case java.lang.Boolean.TYPE => cbfBooleanArray - case java.lang.Void.TYPE => cbfUnitArray - } - } else if (cls == ObjectClass) { - cbfObjectArray - } else { - refCBF[T with AnyRef](tag.asInstanceOf[ClassTag[T with AnyRef]]) - }).asInstanceOf[CanBuildFrom[Array[_], T, Array[T]]] +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] } - private[this] val ObjectClass = classOf[Object] - - private[this] val cbfBooleanArray = new CanBuildFrom[Array[_], Boolean, Array[Boolean]] { - def apply(from: Array[_]) = new ArrayBuilder.ofBoolean() - def apply() = new ArrayBuilder.ofBoolean() - } - - private[this] val cbfByteArray = new CanBuildFrom[Array[_], Byte, Array[Byte]] { - def apply(from: Array[_]) = new ArrayBuilder.ofByte() - def apply() = new ArrayBuilder.ofByte() - } - - private[this] val cbfCharArray = new CanBuildFrom[Array[_], Char, Array[Char]] { - def apply(from: Array[_]) = new ArrayBuilder.ofChar() - def apply() = new ArrayBuilder.ofChar() - } - - private[this] val cbfDoubleArray = new CanBuildFrom[Array[_], Double, Array[Double]] { - def apply(from: Array[_]) = new ArrayBuilder.ofDouble() - def apply() = new ArrayBuilder.ofDouble() - } - - private[this] val cbfFloatArray = new CanBuildFrom[Array[_], Float, Array[Float]] { - def apply(from: Array[_]) = new ArrayBuilder.ofFloat() - def apply() = new ArrayBuilder.ofFloat() - } - - private[this] val cbfIntArray = new CanBuildFrom[Array[_], Int, Array[Int]] { - def apply(from: Array[_]) = new ArrayBuilder.ofInt() - def apply() = new ArrayBuilder.ofInt() - } - - private[this] val cbfLongArray = new CanBuildFrom[Array[_], Long, Array[Long]] { - def apply(from: Array[_]) = new ArrayBuilder.ofLong() - def apply() = new ArrayBuilder.ofLong() - } - - private[this] val cbfShortArray = new CanBuildFrom[Array[_], Short, Array[Short]] { - def apply(from: Array[_]) = new ArrayBuilder.ofShort() - def apply() = new ArrayBuilder.ofShort() - } - - private[this] val cbfUnitArray = new CanBuildFrom[Array[_], Unit, Array[Unit]] { - def apply(from: Array[_]) = new ArrayBuilder.ofUnit() - def apply() = new ArrayBuilder.ofUnit() - } - - private[this] val cbfObjectArray = refCBF[Object] - private[this] def refCBF[T <: AnyRef](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] = - new CanBuildFrom[Array[_], T, Array[T]] { - def apply(from: Array[_]) = new ArrayBuilder.ofRef[T]()(t) - def apply() = new ArrayBuilder.ofRef[T]()(t) - } /** * Returns a new [[scala.collection.mutable.ArrayBuilder]]. */ - def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(t) + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } private def slowcopy(src : AnyRef, srcPos : Int, dest : AnyRef, destPos : Int, - length : Int) { + length : Int): Unit = { var i = srcPos var j = destPos val srcUntil = srcPos + length @@ -175,18 +104,79 @@ object Array extends FallbackArrayBuilding { * * @see `java.lang.System#arraycopy` */ - def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) { + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { val srcClass = src.getClass - if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + val destClass = dest.getClass + if (srcClass.isArray && ((destClass eq srcClass) || + (destClass.isArray && !srcClass.getComponentType.isPrimitive && !destClass.getComponentType.isPrimitive))) java.lang.System.arraycopy(src, srcPos, dest, destPos, length) else slowcopy(src, srcPos, dest, destPos, length) } - /** Returns an array of length 0 */ - def empty[T: ClassTag]: Array[T] = { - implicitly[ClassTag[T]].emptyArray + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case original: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case original: Array[AnyRef] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Int] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Double] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Long] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Float] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Char] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Byte] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Short] => java.util.Arrays.copyOf(original, newLength) + case original: Array[Boolean] => java.util.Arrays.copyOf(original, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + /** Creates an array with given elements. * * @param xs the elements to put in the array @@ -197,16 +187,19 @@ object Array extends FallbackArrayBuilding { def apply[T: ClassTag](xs: T*): Array[T] = { val len = xs.length xs match { - case wa: mutable.WrappedArray[_] if wa.elemTag == classTag[T] => + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => // We get here in test/files/run/sd760a.scala, `Array[T](t)` for // a specialized type parameter `T`. While we still pay for two // copies of the array it is better than before when we also boxed // each element when populating the result. - ScalaRunTime.array_clone(wa.array).asInstanceOf[Array[T]] + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] case _ => val array = new Array[T](len) + val iterator = xs.iterator var i = 0 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } } @@ -216,8 +209,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { val array = new Array[Boolean](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -226,8 +222,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Byte, xs: Byte*): Array[Byte] = { val array = new Array[Byte](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -236,8 +235,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Short, xs: Short*): Array[Short] = { val array = new Array[Short](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -246,8 +248,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Char, xs: Char*): Array[Char] = { val array = new Array[Char](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -256,8 +261,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Int, xs: Int*): Array[Int] = { val array = new Array[Int](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -266,8 +274,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Long, xs: Long*): Array[Long] = { val array = new Array[Long](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -276,8 +287,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Float, xs: Float*): Array[Float] = { val array = new Array[Float](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -286,8 +300,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Double, xs: Double*): Array[Double] = { val array = new Array[Double](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -295,8 +312,11 @@ object Array extends FallbackArrayBuilding { def apply(x: Unit, xs: Unit*): Array[Unit] = { val array = new Array[Unit](xs.length + 1) array(0) = x + val iterator = xs.iterator var i = 1 - for (x <- xs.iterator) { array(i) = x; i += 1 } + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } array } @@ -347,14 +367,17 @@ object Array extends FallbackArrayBuilding { * `elem`. */ def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { - val b = newBuilder[T] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array } - b.result() } /** Returns a two-dimensional array that contains the results of some element @@ -372,7 +395,7 @@ object Array extends FallbackArrayBuilding { * * @param n1 the number of elements in the 1st dimension * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension + * @param n3 the number of elements in the 3rd dimension * @param elem the element computation */ def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = @@ -383,7 +406,7 @@ object Array extends FallbackArrayBuilding { * * @param n1 the number of elements in the 1st dimension * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension + * @param n3 the number of elements in the 3rd dimension * @param n4 the number of elements in the 4th dimension * @param elem the element computation */ @@ -395,7 +418,7 @@ object Array extends FallbackArrayBuilding { * * @param n1 the number of elements in the 1st dimension * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension + * @param n3 the number of elements in the 3rd dimension * @param n4 the number of elements in the 4th dimension * @param n5 the number of elements in the 5th dimension * @param elem the element computation @@ -408,17 +431,20 @@ object Array extends FallbackArrayBuilding { * * @param n The number of elements in the array * @param f The function computing element values - * @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)` + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` */ def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { - val b = newBuilder[T] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array } - b.result() } /** Returns a two-dimensional array containing values of a given function @@ -485,15 +511,16 @@ object Array extends FallbackArrayBuilding { */ def range(start: Int, end: Int, step: Int): Array[Int] = { if (step == 0) throw new IllegalArgumentException("zero step") - val b = newBuilder[Int] - b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false)) + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + var n = 0 var i = start while (if (step < 0) end < i else i < end) { - b += i + array(n) = i i += step + n += 1 } - b.result() + array } /** Returns an array containing repeated applications of a function to a start value. @@ -504,32 +531,61 @@ object Array extends FallbackArrayBuilding { * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` */ def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { - val b = newBuilder[T] - if (len > 0) { - b.sizeHint(len) + val array = new Array[T](len) var acc = start var i = 1 - b += acc + array(0) = acc while (i < len) { acc = f(acc) + array(i) = acc i += 1 - b += acc } + array + } else { + empty[T] } - b.result() } + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. * * @param x the selector value - * @return sequence wrapped in a [[scala.Some]], if `x` is a Seq, otherwise `None` + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` */ - def unapplySeq[T](x: Array[T]): Option[IndexedSeq[T]] = - if (x == null) None else Some(x.toIndexedSeq) - // !!! the null check should to be necessary, but without it 2241 fails. Seems to be a bug - // in pattern matcher. @PP: I noted in #4364 I think the behavior is correct. + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } } /** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation @@ -548,31 +604,29 @@ object Array extends FallbackArrayBuilding { * `update(Int, T)`. * * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion - * to [[scala.collection.mutable.ArrayOps]] (shown on line 4 of the example above) and a conversion - * to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collection.Seq]]). + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). * Both types make available many of the standard operations found in the Scala collections API. * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, - * while the conversion to `WrappedArray` is permanent as all operations return a `WrappedArray`. + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. * - * The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`. For instance, + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, * consider the following code: * * {{{ * val arr = Array(1, 2, 3) * val arrReversed = arr.reverse - * val seqReversed : Seq[Int] = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse * }}} * * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed - * by converting to `WrappedArray` first and invoking the variant of `reverse` that returns another - * `WrappedArray`. + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. * - * @author Martin Odersky - * @since 1.0 - * @see [[http://www.scala-lang.org/files/archive/spec/2.12/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) - * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. - * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. * @hideImplicitConversion scala.Predef.booleanArrayOps * @hideImplicitConversion scala.Predef.byteArrayOps * @hideImplicitConversion scala.Predef.charArrayOps @@ -602,11 +656,6 @@ object Array extends FallbackArrayBuilding { * @define willNotTerminateInf * @define collectExample * @define undefinedorder - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is either `Array[B]` if an ClassTag is available for B or `ArraySeq[B]` otherwise. - * @define zipthatinfo $thatinfo - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current - * representation type `Repr` and the new element type `B`. */ final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { @@ -633,7 +682,7 @@ final class Array[T](_length: Int) extends java.io.Serializable with java.lang.C * @param x the value to be written at index `i` * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` */ - def update(i: Int, x: T) { throw new Error() } + def update(i: Int, x: T): Unit = { throw new Error() } /** Clone the Array. * diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala index a208f863346f..ea8a2e37cda9 100644 --- a/src/library/scala/Boolean.scala +++ b/src/library/scala/Boolean.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala index 25e668033061..1f32d4d0bca1 100644 --- a/src/library/scala/Byte.scala +++ b/src/library/scala/Byte.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,6 +46,7 @@ final abstract class Byte private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Int + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala index ad88d1721fdc..52871422a39e 100644 --- a/src/library/scala/Char.scala +++ b/src/library/scala/Char.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,6 +46,7 @@ final abstract class Char private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Int + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** diff --git a/src/library/scala/Cloneable.scala b/src/library/scala/Cloneable.scala deleted file mode 100644 index a1cd9d7e2788..000000000000 --- a/src/library/scala/Cloneable.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** - * Classes extending this trait are cloneable across platforms (Java, .NET). - */ -trait Cloneable extends java.lang.Cloneable diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index e02a86ae2e11..82e5ac10413e 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,7 @@ package scala import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader } -import scala.io.{ AnsiColor, StdIn } +import scala.io.AnsiColor import scala.util.DynamicVariable /** Implements functionality for printing Scala values on the terminal. For reading values @@ -109,9 +109,6 @@ import scala.util.DynamicVariable * First primes: Vector(2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47) * * - * @author Matthias Zenger - * @since 1.0 - * * @groupname console-output Console Output * @groupprio console-output 30 * @groupdesc console-output These methods provide output via the console. @@ -126,10 +123,10 @@ import scala.util.DynamicVariable * a body of code. Threadsafe by virtue of [[scala.util.DynamicVariable]]. * */ -object Console extends DeprecatedConsole with AnsiColor { - private val outVar = new DynamicVariable[PrintStream](java.lang.System.out) - private val errVar = new DynamicVariable[PrintStream](java.lang.System.err) - private val inVar = new DynamicVariable[BufferedReader]( +object Console extends AnsiColor { + private[this] val outVar = new DynamicVariable[PrintStream](java.lang.System.out) + private[this] val errVar = new DynamicVariable[PrintStream](java.lang.System.err) + private[this] val inVar = new DynamicVariable[BufferedReader]( new BufferedReader(new InputStreamReader(java.lang.System.in))) protected def setOutDirect(out: PrintStream): Unit = outVar.value = out @@ -139,15 +136,15 @@ object Console extends DeprecatedConsole with AnsiColor { /** The default output, can be overridden by `withOut` * @group io-default */ - def out = outVar.value + def out: PrintStream = outVar.value /** The default error, can be overridden by `withErr` * @group io-default */ - def err = errVar.value + def err: PrintStream = errVar.value /** The default input, can be overridden by `withIn` * @group io-default */ - def in = inVar.value + def in: BufferedReader = inVar.value /** Sets the default output stream for the duration * of execution of one thunk. @@ -163,7 +160,7 @@ object Console extends DeprecatedConsole with AnsiColor { * @see `withOut[T](out:OutputStream)(thunk: => T)` * @group io-redefinition */ - def withOut[T](out: PrintStream)(thunk: =>T): T = + def withOut[T](out: PrintStream)(thunk: => T): T = outVar.withValue(out)(thunk) /** Sets the default output stream for the duration @@ -176,7 +173,7 @@ object Console extends DeprecatedConsole with AnsiColor { * @see `withOut[T](out:PrintStream)(thunk: => T)` * @group io-redefinition */ - def withOut[T](out: OutputStream)(thunk: =>T): T = + def withOut[T](out: OutputStream)(thunk: => T): T = withOut(new PrintStream(out))(thunk) /** Set the default error stream for the duration @@ -189,10 +186,10 @@ object Console extends DeprecatedConsole with AnsiColor { * @param thunk the code to execute with * the new error stream active * @return the results of `thunk` - * @see `withErr[T](err:OutputStream)(thunk: =>T)` + * @see `withErr[T](err:OutputStream)(thunk: => T)` * @group io-redefinition */ - def withErr[T](err: PrintStream)(thunk: =>T): T = + def withErr[T](err: PrintStream)(thunk: => T): T = errVar.withValue(err)(thunk) /** Sets the default error stream for the duration @@ -202,10 +199,10 @@ object Console extends DeprecatedConsole with AnsiColor { * @param thunk the code to execute with * the new error stream active * @return the results of `thunk` - * @see `withErr[T](err:PrintStream)(thunk: =>T)` + * @see `withErr[T](err:PrintStream)(thunk: => T)` * @group io-redefinition */ - def withErr[T](err: OutputStream)(thunk: =>T): T = + def withErr[T](err: OutputStream)(thunk: => T): T = withErr(new PrintStream(err))(thunk) /** Sets the default input stream for the duration @@ -223,10 +220,10 @@ object Console extends DeprecatedConsole with AnsiColor { * the new input stream active * * @return the results of `thunk` - * @see `withIn[T](in:InputStream)(thunk: =>T)` + * @see `withIn[T](in:InputStream)(thunk: => T)` * @group io-redefinition */ - def withIn[T](reader: Reader)(thunk: =>T): T = + def withIn[T](reader: Reader)(thunk: => T): T = inVar.withValue(new BufferedReader(reader))(thunk) /** Sets the default input stream for the duration @@ -236,10 +233,10 @@ object Console extends DeprecatedConsole with AnsiColor { * @param thunk the code to execute with * the new input stream active * @return the results of `thunk` - * @see `withIn[T](reader:Reader)(thunk: =>T)` + * @see `withIn[T](reader:Reader)(thunk: => T)` * @group io-redefinition */ - def withIn[T](in: InputStream)(thunk: =>T): T = + def withIn[T](in: InputStream)(thunk: => T): T = withIn(new InputStreamReader(in))(thunk) /** Prints an object to `out` using its `toString` method. @@ -247,7 +244,7 @@ object Console extends DeprecatedConsole with AnsiColor { * @param obj the object to print; may be null. * @group console-output */ - def print(obj: Any) { + def print(obj: Any): Unit = { out.print(if (null == obj) "null" else obj.toString()) } @@ -256,19 +253,19 @@ object Console extends DeprecatedConsole with AnsiColor { * to be made visible on the terminal. * @group console-output */ - def flush() { out.flush() } + def flush(): Unit = { out.flush() } /** Prints a newline character on the default output. * @group console-output */ - def println() { out.println() } + def println(): Unit = { out.println() } /** Prints out an object to the default output, followed by a newline character. * * @param x the object to print. * @group console-output */ - def println(x: Any) { out.println(x) } + def println(x: Any): Unit = { out.println(x) } /** Prints its arguments as a formatted string to the default output, * based on a string pattern (in a fashion similar to printf in C). @@ -280,65 +277,5 @@ object Console extends DeprecatedConsole with AnsiColor { * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments * @group console-output */ - def printf(text: String, args: Any*) { out.print(text format (args : _*)) } -} - -private[scala] abstract class DeprecatedConsole { - self: Console.type => - - /** Internal usage only. */ - protected def setOutDirect(out: PrintStream): Unit - protected def setErrDirect(err: PrintStream): Unit - protected def setInDirect(in: BufferedReader): Unit - - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort() - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format) - @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format) - - /** Sets the default output stream. - * - * @param out the new output stream. - */ - @deprecated("use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out) - - /** Sets the default output stream. - * - * @param out the new output stream. - */ - @deprecated("use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out)) - - /** Sets the default error stream. - * - * @param err the new error stream. - */ - @deprecated("use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err) - - /** Sets the default error stream. - * - * @param err the new error stream. - */ - @deprecated("use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err)) - - /** Sets the default input stream. - * - * @param reader specifies the new input stream. - */ - @deprecated("use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader)) - - /** Sets the default input stream. - * - * @param in the new input stream. - */ - @deprecated("use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in))) + def printf(text: String, args: Any*): Unit = { out.print(text.format(args: _*)) } } diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala index 66cf41a0a97d..924d405d6c9d 100644 --- a/src/library/scala/DelayedInit.scala +++ b/src/library/scala/DelayedInit.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -45,7 +45,6 @@ package scala * * @see "Delayed Initialization" subsection of the Scala Language Specification (section 5.1) * - * @author Martin Odersky */ @deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0", "2.11.0") trait DelayedInit { diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala index fb90a6e291d4..84d9f31daa90 100644 --- a/src/library/scala/Double.scala +++ b/src/library/scala/Double.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,6 +37,7 @@ final abstract class Double private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Double + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/src/library/scala/DummyImplicit.scala b/src/library/scala/DummyImplicit.scala new file mode 100644 index 000000000000..07e7acfc6ebb --- /dev/null +++ b/src/library/scala/DummyImplicit.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +/** A type for which there is always an implicit value. */ +final class DummyImplicit private () + +object DummyImplicit { + /** An implicit value yielding a `DummyImplicit`. */ + implicit val dummyImplicit: DummyImplicit = new DummyImplicit +} diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala index 1fa6403cf028..f80df3e49b7d 100644 --- a/src/library/scala/Dynamic.scala +++ b/src/library/scala/Dynamic.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,7 +30,7 @@ package scala * foo.arr(10) ~~> foo.applyDynamic("arr")(10) * }}} * - * As of Scala 2.10, defining direct or indirect subclasses of this trait + * Defining direct or indirect subclasses of this trait * is only possible if the language feature `dynamics` is enabled. */ trait Dynamic extends Any diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 04208198a3fb..bf61198f7d3b 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,10 @@ package scala -import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet } -import java.lang.reflect.{ Method => JMethod, Field => JField } +import scala.collection.{SpecificIterableFactory, StrictOptimizedIterableOps, View, immutable, mutable} +import java.lang.reflect.{Field => JField, Method => JMethod} + +import scala.annotation.{implicitNotFound, tailrec} import scala.reflect.NameTransformer._ import scala.util.matching.Regex @@ -23,7 +25,7 @@ import scala.util.matching.Regex * * Each call to a `Value` method adds a new unique value to the enumeration. * To be accessible, these values are usually defined as `val` members of - * the evaluation. + * the enumeration. * * All values in an enumeration share a common, unique type defined as the * `Value` type member of the enumeration (`Value` selected on the stable @@ -60,6 +62,7 @@ import scala.util.matching.Regex * def surfaceGravity: Double = Planet.G * mass / (radius * radius) * def surfaceWeight(otherMass: Double): Double = otherMass * surfaceGravity * } + * import scala.language.implicitConversions * implicit def valueToPlanetVal(x: Value): PlanetVal = x.asInstanceOf[PlanetVal] * * val G: Double = 6.67300E-11 @@ -80,7 +83,6 @@ import scala.util.matching.Regex * * @param initial The initial value from which to count the integers that * identifies values at run-time. - * @author Matthias Zenger */ @SerialVersionUID(8476000850333817230L) abstract class Enumeration (initial: Int) extends Serializable { @@ -94,7 +96,7 @@ abstract class Enumeration (initial: Int) extends Serializable { /** The name of this enumeration. */ - override def toString = + override def toString: String = ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split Regex.quote(NAME_JOIN_STRING)).last @@ -108,7 +110,7 @@ abstract class Enumeration (initial: Int) extends Serializable { /** The mapping from the integer used to identify values to their * names. */ - private val nmap: mutable.Map[Int, String] = new mutable.HashMap + private[this] val nmap: mutable.Map[Int, String] = new mutable.HashMap /** The values of this enumeration as a set. */ @@ -131,11 +133,11 @@ abstract class Enumeration (initial: Int) extends Serializable { /** The highest integer amongst those used to identify values in this * enumeration. */ - private var topId = initial + private[this] var topId = initial /** The lowest integer amongst those used to identify values in this * enumeration, but no higher than 0. */ - private var bottomId = if(initial < 0) initial else 0 + private[this] var bottomId = if(initial < 0) initial else 0 /** The one higher than the highest integer amongst those used to identify * values in this enumeration. */ @@ -185,8 +187,14 @@ abstract class Enumeration (initial: Int) extends Serializable { */ protected final def Value(i: Int, name: String): Value = new Val(i, name) - private def populateNameMap() { - val fields: Array[JField] = getClass.getDeclaredFields + private def populateNameMap(): Unit = { + @tailrec def getFields(clazz: Class[_], acc: Array[JField]): Array[JField] = { + if (clazz == null) + acc + else + getFields(clazz.getSuperclass, if (clazz.getDeclaredFields.isEmpty) acc else acc ++ clazz.getDeclaredFields) + } + val fields = getFields(getClass.getSuperclass, getClass.getDeclaredFields) def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType) // The list of possible Value methods: 0-args which return a conforming type @@ -223,14 +231,14 @@ abstract class Enumeration (initial: Int) extends Serializable { if (this.id < that.id) -1 else if (this.id == that.id) 0 else 1 - override def equals(other: Any) = other match { + override def equals(other: Any): Boolean = other match { case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id) case _ => false } override def hashCode: Int = id.## /** Create a ValueSet which contains this value and another one */ - def + (v: Value) = ValueSet(this, v) + def + (v: Value): ValueSet = ValueSet(this, v) } /** A class implementing the [[scala.Enumeration.Value]] type. This class @@ -249,21 +257,21 @@ abstract class Enumeration (initial: Int) extends Serializable { nextId = i + 1 if (nextId > topId) topId = nextId if (i < bottomId) bottomId = i - def id = i - override def toString() = + def id: Int = i + override def toString(): String = if (name != null) name else try thisenum.nameOf(i) catch { case _: NoSuchElementException => "" } protected def readResolve(): AnyRef = { - val enum = thisenum.readResolve().asInstanceOf[Enumeration] - if (enum.vmap == null) this - else enum.vmap(i) + val enumeration = thisenum.readResolve().asInstanceOf[Enumeration] + if (enumeration.vmap == null) this + else enumeration.vmap(i) } } /** An ordering by id for values of this set */ - object ValueOrdering extends Ordering[Value] { + implicit object ValueOrdering extends Ordering[Value] { def compare(x: Value, y: Value): Int = x compare y } @@ -274,52 +282,69 @@ abstract class Enumeration (initial: Int) extends Serializable { * not fall below zero), organized as a `BitSet`. * @define Coll `collection.immutable.SortedSet` */ + @SerialVersionUID(7229671200427364242L) class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet) - extends AbstractSet[Value] - with immutable.SortedSet[Value] - with SortedSetLike[Value, ValueSet] - with Serializable { + extends immutable.AbstractSet[Value] + with immutable.SortedSet[Value] + with immutable.SortedSetOps[Value, immutable.SortedSet, ValueSet] + with StrictOptimizedIterableOps[Value, immutable.Set, ValueSet] + with Serializable { implicit def ordering: Ordering[Value] = ValueOrdering def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet = new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId))) - override def empty = ValueSet.empty - def contains(v: Value) = nnIds contains (v.id - bottomId) - def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId)) - def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId)) - def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id)) - override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(bottomId + id)) - override def stringPrefix = thisenum + ".ValueSet" + override def empty: ValueSet = ValueSet.empty + override def knownSize: Int = nnIds.size + override def isEmpty: Boolean = nnIds.isEmpty + def contains(v: Value): Boolean = nnIds contains (v.id - bottomId) + def incl (value: Value): ValueSet = new ValueSet(nnIds + (value.id - bottomId)) + def excl (value: Value): ValueSet = new ValueSet(nnIds - (value.id - bottomId)) + def iterator: Iterator[Value] = nnIds.iterator map (id => thisenum.apply(bottomId + id)) + override def iteratorFrom(start: Value): Iterator[Value] = nnIds iteratorFrom start.id map (id => thisenum.apply(bottomId + id)) + override def className: String = s"$thisenum.ValueSet" /** Creates a bit mask for the zero-adjusted ids in this set as a * new array of longs */ def toBitMask: Array[Long] = nnIds.toBitMask - private[Enumeration] lazy val byName: Map[String, Value] = iterator.map( v => v.toString -> v).toMap + + override protected def fromSpecific(coll: IterableOnce[Value]): ValueSet = ValueSet.fromSpecific(coll) + override protected def newSpecificBuilder = ValueSet.newBuilder + + def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(this, f)) + def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(this, f)) + + // necessary for disambiguation: + override def map[B](f: Value => B)(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].map[B](f) + override def flatMap[B](f: Value => IterableOnce[B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].flatMap[B](f) + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(ValueSet.zipOrdMsg) ev: Ordering[(Value, B)]): immutable.SortedSet[(Value, B)] = + super[SortedSet].zip[B](that) + override def collect[B](pf: PartialFunction[Value, B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].collect[B](pf) + + @transient private[Enumeration] lazy val byName: Map[String, Value] = iterator.map( v => v.toString -> v).toMap } /** A factory object for value sets */ - object ValueSet { - import generic.CanBuildFrom + @SerialVersionUID(3L) + object ValueSet extends SpecificIterableFactory[Value, ValueSet] { + private final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Value] first by calling `unsorted`." + private final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Value, ${B})]. You may want to upcast to a Set[Value] first by calling `unsorted`." /** The empty value set */ - val empty = new ValueSet(immutable.BitSet.empty) - /** A value set consisting of given elements */ - def apply(elems: Value*): ValueSet = (newBuilder ++= elems).result() + val empty: ValueSet = new ValueSet(immutable.BitSet.empty) /** A value set containing all the values for the zero-adjusted ids * corresponding to the bits in an array */ def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems)) /** A builder object for value sets */ def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] { private[this] val b = new mutable.BitSet - def += (x: Value) = { b += (x.id - bottomId); this } + def addOne (x: Value) = { b += (x.id - bottomId); this } def clear() = b.clear() def result() = new ValueSet(b.toImmutable) } - /** The implicit builder for value sets */ - implicit def canBuildFrom: CanBuildFrom[ValueSet, Value, ValueSet] = - new CanBuildFrom[ValueSet, Value, ValueSet] { - def apply(from: ValueSet) = newBuilder - def apply() = newBuilder - } + def fromSpecific(it: IterableOnce[Value]): ValueSet = + newBuilder.addAll(it).result() } } diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala index db8eb9d50bc4..0c35742a6746 100644 --- a/src/library/scala/Equals.scala +++ b/src/library/scala/Equals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,9 +16,11 @@ package scala * The only method not already present in class `AnyRef` is `canEqual`. */ trait Equals extends Any { - /** A method that should be called from every well-designed equals method + /** Checks whether this instance can possibly equal `that`. + * + * A method that should be called from every well-designed equals method * that is open to be overridden in a subclass. See - * [[http://www.artima.com/pins1ed/object-equality.html Programming in Scala, + * [[https://www.artima.com/pins1ed/object-equality.html Programming in Scala, * Chapter 28]] for discussion and design. * * @param that the value being probed for possible equality @@ -26,7 +28,8 @@ trait Equals extends Any { */ def canEqual(that: Any): Boolean - /** The universal equality method defined in `AnyRef`. + /** Checks whether this instance is equal to `that`. + * This universal equality method is defined in `AnyRef`. */ def equals(that: Any): Boolean } diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala index af4d3d0a7e98..c63620ed8d53 100644 --- a/src/library/scala/Float.scala +++ b/src/library/scala/Float.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,6 +37,7 @@ final abstract class Float private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Float + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala index 08f38a71ee46..be612752552e 100644 --- a/src/library/scala/Function.scala +++ b/src/library/scala/Function.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,6 @@ package scala /** A module defining utility methods for higher-order functional programming. - * - * @author Martin Odersky - * @since 1.0 */ object Function { /** Given a sequence of functions `f,,1,,`, ..., `f,,n,,`, return the @@ -23,7 +20,7 @@ object Function { * * @param fs The given sequence of functions */ - def chain[a](fs: Seq[a => a]): a => a = { x => (x /: fs) ((x, f) => f(x)) } + def chain[T](fs: scala.collection.Seq[T => T]): T => T = { x => fs.foldLeft(x)((x, f) => f(x)) } /** The constant function */ def const[T, U](x: T)(y: U): T = x @@ -46,25 +43,25 @@ object Function { /** Uncurrying for functions of arity 2. This transforms a unary function * returning another unary function into a function of arity 2. */ - def uncurried[a1, a2, b](f: a1 => a2 => b): (a1, a2) => b = { + def uncurried[T1, T2, R](f: T1 => T2 => R): (T1, T2) => R = { (x1, x2) => f(x1)(x2) } /** Uncurrying for functions of arity 3. */ - def uncurried[a1, a2, a3, b](f: a1 => a2 => a3 => b): (a1, a2, a3) => b = { + def uncurried[T1, T2, T3, R](f: T1 => T2 => T3 => R): (T1, T2, T3) => R = { (x1, x2, x3) => f(x1)(x2)(x3) } /** Uncurrying for functions of arity 4. */ - def uncurried[a1, a2, a3, a4, b](f: a1 => a2 => a3 => a4 => b): (a1, a2, a3, a4) => b = { + def uncurried[T1, T2, T3, T4, R](f: T1 => T2 => T3 => T4 => R): (T1, T2, T3, T4) => R = { (x1, x2, x3, x4) => f(x1)(x2)(x3)(x4) } /** Uncurrying for functions of arity 5. */ - def uncurried[a1, a2, a3, a4, a5, b](f: a1 => a2 => a3 => a4 => a5 => b): (a1, a2, a3, a4, a5) => b = { + def uncurried[T1, T2, T3, T4, T5, R](f: T1 => T2 => T3 => T4 => T5 => R): (T1, T2, T3, T4, T5) => R = { (x1, x2, x3, x4, x5) => f(x1)(x2)(x3)(x4)(x5) } @@ -75,59 +72,59 @@ object Function { * hold pending superior type inference for tupling anonymous functions. */ // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = { - case Tuple2(x1, x2) => f(x1, x2) + def tupled[T1, T2, R](f: (T1, T2) => R): ((T1, T2)) => R = { + case ((x1, x2)) => f(x1, x2) } /** Tupling for functions of arity 3. This transforms a function * of arity 3 into a unary function that takes a triple of arguments. */ // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = { - case Tuple3(x1, x2, x3) => f(x1, x2, x3) + def tupled[T1, T2, T3, R](f: (T1, T2, T3) => R): ((T1, T2, T3)) => R = { + case ((x1, x2, x3)) => f(x1, x2, x3) } /** Tupling for functions of arity 4. This transforms a function * of arity 4 into a unary function that takes a 4-tuple of arguments. */ // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = { - case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4) + def tupled[T1, T2, T3, T4, R](f: (T1, T2, T3, T4) => R): ((T1, T2, T3, T4)) => R = { + case ((x1, x2, x3, x4)) => f(x1, x2, x3, x4) } /** Tupling for functions of arity 5. This transforms a function * of arity 5 into a unary function that takes a 5-tuple of arguments. */ // @deprecated("use `f.tupled` instead") - def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = { - case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5) + def tupled[T1, T2, T3, T4, T5, R](f: (T1, T2, T3, T4, T5) => R): ((T1, T2, T3, T4, T5)) => R = { + case ((x1, x2, x3, x4, x5)) => f(x1, x2, x3, x4, x5) } /** Un-tupling for functions of arity 2. This transforms a function taking * a pair of arguments into a binary function which takes each argument separately. */ - def untupled[a1, a2, b](f: Tuple2[a1, a2] => b): (a1, a2) => b = { - (x1, x2) => f(Tuple2(x1, x2)) + def untupled[T1, T2, R](f: ((T1, T2)) => R): (T1, T2) => R = { + (x1, x2) => f((x1, x2)) } /** Un-tupling for functions of arity 3. This transforms a function taking * a triple of arguments into a ternary function which takes each argument separately. */ - def untupled[a1, a2, a3, b](f: Tuple3[a1, a2, a3] => b): (a1, a2, a3) => b = { - (x1, x2, x3) => f(Tuple3(x1, x2, x3)) + def untupled[T1, T2, T3, R](f: ((T1, T2, T3)) => R): (T1, T2, T3) => R = { + (x1, x2, x3) => f((x1, x2, x3)) } /** Un-tupling for functions of arity 4. This transforms a function taking * a 4-tuple of arguments into a function of arity 4 which takes each argument separately. */ - def untupled[a1, a2, a3, a4, b](f: Tuple4[a1, a2, a3, a4] => b): (a1, a2, a3, a4) => b = { - (x1, x2, x3, x4) => f(Tuple4(x1, x2, x3, x4)) + def untupled[T1, T2, T3, T4, R](f: ((T1, T2, T3, T4)) => R): (T1, T2, T3, T4) => R = { + (x1, x2, x3, x4) => f((x1, x2, x3, x4)) } /** Un-tupling for functions of arity 5. This transforms a function taking * a 5-tuple of arguments into a function of arity 5 which takes each argument separately. */ - def untupled[a1, a2, a3, a4, a5, b](f: Tuple5[a1, a2, a3, a4, a5] => b): (a1, a2, a3, a4, a5) => b = { - (x1, x2, x3, x4, x5) => f(Tuple5(x1, x2, x3, x4, x5)) + def untupled[T1, T2, T3, T4, T5, R](f: ((T1, T2, T3, T4, T5)) => R): (T1, T2, T3, T4, T5) => R = { + (x1, x2, x3, x4, x5) => f((x1, x2, x3, x4, x5)) } } diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala index 82f464564b23..0cdea05ebc05 100644 --- a/src/library/scala/Function0.scala +++ b/src/library/scala/Function0.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,24 +11,27 @@ */ // GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Fri Oct 05 11:04:52 CEST 2018 +// genprod generated these sources at: 2022-01-17T20:47:12.170348200Z package scala /** A function of 0 parameters. * - * In the following example, the definition of javaVersion is a - * shorthand for the anonymous class definition anonfun0: + * In the following example, the definition of `greeting` is + * shorthand, conceptually, for the anonymous class definition + * `anonfun0`, although the implementation details of how the + * function value is constructed may differ: * * {{{ * object Main extends App { - * val javaVersion = () => sys.props("java.version") + * val name = "world" + * val greeting = () => s"hello, $name" * * val anonfun0 = new Function0[String] { - * def apply(): String = sys.props("java.version") + * def apply(): String = s"hello, $name" * } - * assert(javaVersion() == anonfun0()) + * assert(greeting() == anonfun0()) * } * }}} */ @@ -38,5 +41,5 @@ trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self */ def apply(): R - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala index e9b67af8502d..10d366303ab2 100644 --- a/src/library/scala/Function1.scala +++ b/src/library/scala/Function1.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,10 +15,40 @@ package scala +object Function1 { + + implicit final class UnliftOps[A, B] private[Function1](private val f: A => Option[B]) extends AnyVal { + /** Converts an optional function to a partial function. + * + * @example Unlike [[Function.unlift]], this [[UnliftOps.unlift]] method can be used in extractors. + * {{{ + * val of: Int => Option[String] = { i => + * if (i == 2) { + * Some("matched by an optional function") + * } else { + * None + * } + * } + * + * util.Random.nextInt(4) match { + * case of.unlift(m) => // Convert an optional function to a pattern + * println(m) + * case _ => + * println("Not matched") + * } + * }}} + */ + def unlift: PartialFunction[A, B] = Function.unlift(f) + } + +} + /** A function of 1 parameter. * - * In the following example, the definition of succ is a - * shorthand for the anonymous class definition anonfun1: + * In the following example, the definition of `succ` is + * shorthand, conceptually, for the anonymous class definition + * `anonfun1`, although the implementation details of how the + * function value is constructed may differ: * * {{{ * object Main extends App { @@ -34,13 +64,13 @@ package scala * is that the latter can specify inputs which it will not handle. */ @annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.") -trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => +trait Function1[@specialized(Specializable.Arg) -T1, @specialized(Specializable.Return) +R] extends AnyRef { self => /** Apply the body of this function to the argument. * @return the result of function application. */ def apply(v1: T1): R - /** Composes two instances of Function1 in a new Function1, with this function applied last. + /** Composes two instances of `Function1` in a new `Function1`, with this function applied last. * * @tparam A the type to which function `g` can be applied * @param g a function A => T1 @@ -48,7 +78,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) - */ @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) } - /** Composes two instances of Function1 in a new Function1, with this function applied first. + /** Composes two instances of `Function1` in a new `Function1`, with this function applied first. * * @tparam A the result type of function `g` * @param g a function R => A @@ -56,5 +86,5 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) - */ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala index 2bf6ecf08065..59192bf8ee7d 100644 --- a/src/library/scala/Function10.scala +++ b/src/library/scala/Function10.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == f(Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)` */ - @annotation.unspecialized def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = { - case Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala index 1842fbca065e..10b1509bf369 100644 --- a/src/library/scala/Function11.scala +++ b/src/library/scala/Function11.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == f(Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)` */ - @annotation.unspecialized def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = { - case Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala index 080d4b4c4156..08d962583108 100644 --- a/src/library/scala/Function12.scala +++ b/src/library/scala/Function12.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == f(Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)` */ - @annotation.unspecialized def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = { - case Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala index 448b96e2fdfe..971368c1d467 100644 --- a/src/library/scala/Function13.scala +++ b/src/library/scala/Function13.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == f(Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)` */ - @annotation.unspecialized def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = { - case Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala index 2f6569a1c3a0..c0b72feef42c 100644 --- a/src/library/scala/Function14.scala +++ b/src/library/scala/Function14.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == f(Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)` */ - @annotation.unspecialized def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = { - case Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala index 0af3fb8bc378..67c7e1dc470a 100644 --- a/src/library/scala/Function15.scala +++ b/src/library/scala/Function15.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == f(Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)` */ - @annotation.unspecialized def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = { - case Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala index cf883964dcfd..8ea8dec9b117 100644 --- a/src/library/scala/Function16.scala +++ b/src/library/scala/Function16.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == f(Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)` */ - @annotation.unspecialized def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = { - case Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala index 4dc75ffe2604..bc157115963d 100644 --- a/src/library/scala/Function17.scala +++ b/src/library/scala/Function17.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == f(Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)` */ - @annotation.unspecialized def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = { - case Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala index f8705fd1c208..d8ff8db313c6 100644 --- a/src/library/scala/Function18.scala +++ b/src/library/scala/Function18.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == f(Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)` */ - @annotation.unspecialized def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = { - case Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala index b4c661886e87..9d79b5c2d7c1 100644 --- a/src/library/scala/Function19.scala +++ b/src/library/scala/Function19.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == f(Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)` */ - @annotation.unspecialized def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = { - case Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala index 0d02341c40ec..f30d57e49344 100644 --- a/src/library/scala/Function2.scala +++ b/src/library/scala/Function2.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,8 +17,10 @@ package scala /** A function of 2 parameters. * - * In the following example, the definition of max is a - * shorthand for the anonymous class definition anonfun2: + * In the following example, the definition of `max` is + * shorthand, conceptually, for the anonymous class definition + * `anonfun2`, although the implementation details of how the + * function value is constructed may differ: * * {{{ * object Main extends App { @@ -31,7 +33,7 @@ package scala * } * }}} */ -trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => +trait Function2[@specialized(Specializable.Args) -T1, @specialized(Specializable.Args) -T2, @specialized(Specializable.Return) +R] extends AnyRef { self => /** Apply the body of this function to the arguments. * @return the result of function application. */ @@ -49,8 +51,8 @@ trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @speciali * @return a function `f` such that `f((x1, x2)) == f(Tuple2(x1, x2)) == apply(x1, x2)` */ - @annotation.unspecialized def tupled: Tuple2[T1, T2] => R = { - case Tuple2(x1, x2) => apply(x1, x2) + @annotation.unspecialized def tupled: ((T1, T2)) => R = { + case ((x1, x2)) => apply(x1, x2) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala index b1523f689392..1ed5e55a1616 100644 --- a/src/library/scala/Function20.scala +++ b/src/library/scala/Function20.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == f(Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)` */ - @annotation.unspecialized def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = { - case Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala index c41374058d36..4c81489ec323 100644 --- a/src/library/scala/Function21.scala +++ b/src/library/scala/Function21.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == f(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)` */ - @annotation.unspecialized def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = { - case Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala index 175cc0d30887..c3911f34c08e 100644 --- a/src/library/scala/Function22.scala +++ b/src/library/scala/Function22.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == f(Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)` */ - @annotation.unspecialized def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = { - case Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala index a9479f28e049..77c1a8f38541 100644 --- a/src/library/scala/Function3.scala +++ b/src/library/scala/Function3.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self => * @return a function `f` such that `f((x1, x2, x3)) == f(Tuple3(x1, x2, x3)) == apply(x1, x2, x3)` */ - @annotation.unspecialized def tupled: Tuple3[T1, T2, T3] => R = { - case Tuple3(x1, x2, x3) => apply(x1, x2, x3) + @annotation.unspecialized def tupled: ((T1, T2, T3)) => R = { + case ((x1, x2, x3)) => apply(x1, x2, x3) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala index ab7b89016dd4..f68164cf2727 100644 --- a/src/library/scala/Function4.scala +++ b/src/library/scala/Function4.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self => * @return a function `f` such that `f((x1, x2, x3, x4)) == f(Tuple4(x1, x2, x3, x4)) == apply(x1, x2, x3, x4)` */ - @annotation.unspecialized def tupled: Tuple4[T1, T2, T3, T4] => R = { - case Tuple4(x1, x2, x3, x4) => apply(x1, x2, x3, x4) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4)) => R = { + case ((x1, x2, x3, x4)) => apply(x1, x2, x3, x4) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala index 9c822db6cba1..b5c347f5ee30 100644 --- a/src/library/scala/Function5.scala +++ b/src/library/scala/Function5.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self => * @return a function `f` such that `f((x1, x2, x3, x4, x5)) == f(Tuple5(x1, x2, x3, x4, x5)) == apply(x1, x2, x3, x4, x5)` */ - @annotation.unspecialized def tupled: Tuple5[T1, T2, T3, T4, T5] => R = { - case Tuple5(x1, x2, x3, x4, x5) => apply(x1, x2, x3, x4, x5) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5)) => R = { + case ((x1, x2, x3, x4, x5)) => apply(x1, x2, x3, x4, x5) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala index 109f53626d80..784a51f61e59 100644 --- a/src/library/scala/Function6.scala +++ b/src/library/scala/Function6.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self => * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6)) == f(Tuple6(x1, x2, x3, x4, x5, x6)) == apply(x1, x2, x3, x4, x5, x6)` */ - @annotation.unspecialized def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = { - case Tuple6(x1, x2, x3, x4, x5, x6) => apply(x1, x2, x3, x4, x5, x6) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6)) => R = { + case ((x1, x2, x3, x4, x5, x6)) => apply(x1, x2, x3, x4, x5, x6) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala index bc0ae325c92f..07c90bfd91d3 100644 --- a/src/library/scala/Function7.scala +++ b/src/library/scala/Function7.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self => * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7)) == f(Tuple7(x1, x2, x3, x4, x5, x6, x7)) == apply(x1, x2, x3, x4, x5, x6, x7)` */ - @annotation.unspecialized def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = { - case Tuple7(x1, x2, x3, x4, x5, x6, x7) => apply(x1, x2, x3, x4, x5, x6, x7) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7)) => apply(x1, x2, x3, x4, x5, x6, x7) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala index 5bae2f6f378c..27ee36b2de90 100644 --- a/src/library/scala/Function8.scala +++ b/src/library/scala/Function8.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8)) == f(Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)) == apply(x1, x2, x3, x4, x5, x6, x7, x8)` */ - @annotation.unspecialized def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = { - case Tuple8(x1, x2, x3, x4, x5, x6, x7, x8) => apply(x1, x2, x3, x4, x5, x6, x7, x8) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8)) => apply(x1, x2, x3, x4, x5, x6, x7, x8) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala index cc2f55c79d30..5bf1a5b16565 100644 --- a/src/library/scala/Function9.scala +++ b/src/library/scala/Function9.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,8 +36,8 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef * @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9)) == f(Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)` */ - @annotation.unspecialized def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = { - case Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9) + @annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9)) => R = { + case ((x1, x2, x3, x4, x5, x6, x7, x8, x9)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9) } - override def toString() = "" + override def toString(): String = "" } diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala deleted file mode 100644 index 16a04fccbf55..000000000000 --- a/src/library/scala/Immutable.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** A marker trait for all immutable data structures such as immutable - * collections. - * - * @since 2.8 - */ -trait Immutable diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala index 0d791adae651..003bd502a730 100644 --- a/src/library/scala/Int.scala +++ b/src/library/scala/Int.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,6 +46,7 @@ final abstract class Int private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Int + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** @@ -477,8 +478,9 @@ object Int extends AnyValCompanion { override def toString = "object scala.Int" /** Language mandated coercions from Int to "wider" types. */ import scala.language.implicitConversions - implicit def int2long(x: Int): Long = x.toLong + @deprecated("Implicit conversion from Int to Float is dangerous because it loses precision. Write `.toFloat` instead.", "2.13.1") implicit def int2float(x: Int): Float = x.toFloat + implicit def int2long(x: Int): Long = x.toLong implicit def int2double(x: Int): Double = x.toDouble } diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala index 862a2b190f32..09bd35c64677 100644 --- a/src/library/scala/Long.scala +++ b/src/library/scala/Long.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,6 +46,7 @@ final abstract class Long private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Long + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** @@ -474,7 +475,9 @@ object Long extends AnyValCompanion { override def toString = "object scala.Long" /** Language mandated coercions from Long to "wider" types. */ import scala.language.implicitConversions + @deprecated("Implicit conversion from Long to Float is dangerous because it loses precision. Write `.toFloat` instead.", "2.13.1") implicit def long2float(x: Long): Float = x.toFloat + @deprecated("Implicit conversion from Long to Double is dangerous because it loses precision. Write `.toDouble` instead.", "2.13.1") implicit def long2double(x: Long): Double = x.toDouble } diff --git a/src/library/scala/MatchError.scala b/src/library/scala/MatchError.scala index 0f39e5a51cc5..39fa11e817f0 100644 --- a/src/library/scala/MatchError.scala +++ b/src/library/scala/MatchError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,23 +15,19 @@ package scala /** This class implements errors which are thrown whenever an * object doesn't match any pattern of a pattern matching * expression. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 2.0 */ final class MatchError(@transient obj: Any) extends RuntimeException { /** There's no reason we need to call toString eagerly, * so defer it until getMessage is called or object is serialized */ - private lazy val objString = { + private[this] lazy val objString = { def ofClass = "of class " + obj.getClass.getName if (obj == null) "null" - else try { - obj.toString() + " (" + ofClass + ")" - } catch { - case _: Throwable => "an instance " + ofClass - } + else + try s"$obj ($ofClass)" + catch { + case _: Throwable => "an instance " + ofClass + } } @throws[java.io.ObjectStreamException] diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala deleted file mode 100644 index 4d5ab888882e..000000000000 --- a/src/library/scala/Mutable.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** - * A marker trait for mutable data structures such as mutable collections - * - * @since 2.8 - */ -trait Mutable diff --git a/src/library/scala/NotImplementedError.scala b/src/library/scala/NotImplementedError.scala index b4448fece11a..22361b78b85b 100644 --- a/src/library/scala/NotImplementedError.scala +++ b/src/library/scala/NotImplementedError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala deleted file mode 100644 index 5b94c015dbf3..000000000000 --- a/src/library/scala/NotNull.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** - * A marker trait for things that are not allowed to be null - * @since 2.5 - */ - -@deprecated("this trait will be removed", "2.11.0") -trait NotNull extends Any {} diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 08a108b6b7b3..514bf50607ff 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,9 +16,9 @@ object Option { import scala.language.implicitConversions - /** An implicit conversion that converts an option to an iterable value - */ - implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList + /** An implicit conversion that converts an option to an iterable value */ + implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = + if (xo.isEmpty) Iterable.empty else Iterable.single(xo.get) /** An Option factory which creates Some(x) if the argument is not null, * and None if it is null. @@ -32,6 +32,19 @@ object Option { * the collections hierarchy. */ def empty[A] : Option[A] = None + + /** When a given condition is true, evaluates the `a` argument and returns + * Some(a). When the condition is false, `a` is not evaluated and None is + * returned. + */ + def when[A](cond: Boolean)(a: => A): Option[A] = + if (cond) Some(a) else None + + /** Unless a given condition is true, this will evaluate the `a` argument and + * return Some(a). Otherwise, `a` is not evaluated and None is returned. + */ + @inline def unless[A](cond: Boolean)(a: => A): Option[A] = + when(!cond)(a) } /** Represents optional values. Instances of `Option` @@ -42,18 +55,18 @@ object Option { * `foreach`: * * {{{ - * val name: Option[String] = request getParameter "name" - * val upper = name map { _.trim } filter { _.length != 0 } map { _.toUpperCase } - * println(upper getOrElse "") + * val name: Option[String] = request.getParameter("name") + * val upper = name.map(_.trim).filter(_.length != 0).map(_.toUpperCase) + * println(upper.getOrElse("")) * }}} * * Note that this is equivalent to {{{ * val upper = for { - * name <- request getParameter "name" + * name <- request.getParameter("name") * trimmed <- Some(name.trim) * upper <- Some(trimmed.toUpperCase) if trimmed.length != 0 * } yield upper - * println(upper getOrElse "") + * println(upper.getOrElse("")) * }}} * * Because of how for comprehension works, if $none is returned @@ -80,10 +93,13 @@ object Option { * - [[exists]] — Apply predicate on optional value, or false if empty * - [[forall]] — Apply predicate on optional value, or true if empty * - [[contains]] — Checks if value equals optional value, or false if empty + * - [[zip]] — Combine two optional values to make a paired optional value + * - [[unzip]] — Split an optional pair to two optional values + * - [[unzip3]] — Split an optional triple to three optional values * - [[toList]] — Unary list of optional value, otherwise the empty list * * A less-idiomatic way to use $option values is via pattern matching: {{{ - * val nameMaybe = request getParameter "name" + * val nameMaybe = request.getParameter("name") * nameMaybe match { * case Some(name) => * println(name.trim.toUppercase) @@ -106,13 +122,10 @@ object Option { * }}} * * @note Many of the methods in here are duplicative with those - * in the Traversable hierarchy, but they are duplicated for a reason: + * in the Iterable hierarchy, but they are duplicated for a reason: * the implicit conversion tends to leave one with an Iterable in * situations where one could have retained an Option. * - * @author Martin Odersky - * @author Matthias Zenger - * @since 1.1 * @define none `None` * @define some [[scala.Some]] * @define option [[scala.Option]] @@ -126,12 +139,9 @@ object Option { * @define willNotTerminateInf * @define collectExample * @define undefinedorder - * @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]` - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current - * representation type `Repr` and the new element type `B`. */ @SerialVersionUID(-114498752079829388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -sealed abstract class Option[+A] extends Product with Serializable { +sealed abstract class Option[+A] extends IterableOnce[A] with Product with Serializable { self => /** Returns true if the option is $none, false otherwise. @@ -144,7 +154,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * } * }}} */ - def isEmpty: Boolean + final def isEmpty: Boolean = this eq None /** Returns true if the option is an instance of $some, false otherwise. * @@ -156,7 +166,9 @@ sealed abstract class Option[+A] extends Product with Serializable { * } * }}} */ - def isDefined: Boolean = !isEmpty + final def isDefined: Boolean = !isEmpty + + override final def knownSize: Int = if (isEmpty) 0 else 1 /** Returns the option's value. * @@ -168,7 +180,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * } * }}} * @note The option must be nonempty. - * @throws java.util.NoSuchElementException if the option is empty. + * @throws NoSuchElementException if the option is empty. */ def get: A @@ -242,7 +254,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * }}} * This is also equivalent to: * {{{ - * option map f getOrElse ifEmpty + * option.map(f).getOrElse(ifEmpty) * }}} * @param ifEmpty the expression to evaluate if empty. * @param f the function to apply if nonempty. @@ -270,6 +282,24 @@ sealed abstract class Option[+A] extends Product with Serializable { @inline final def flatMap[B](f: A => Option[B]): Option[B] = if (isEmpty) None else f(this.get) + /** Returns the nested $option value if it is nonempty. Otherwise, + * return $none. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(Some(b)) => Some(b) + * case _ => None + * } + * }}} + * @example {{{ + * Some(Some("something")).flatten + * }}} + * + * @param ev an implicit conversion that asserts that the value is + * also an $option. + * @see flatMap + */ def flatten[B](implicit ev: A <:< Option[B]): Option[B] = if (isEmpty) None else ev(this.get) @@ -314,7 +344,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * }}} * @note Implemented here to avoid the implicit conversion to Iterable. */ - final def nonEmpty = isDefined + final def nonEmpty: Boolean = isDefined /** Necessary to keep $option from being implicitly converted to * [[scala.collection.Iterable]] in `for` comprehensions. @@ -403,7 +433,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * @see map * @see flatMap */ - @inline final def foreach[U](f: A => U) { + @inline final def foreach[U](f: A => U): Unit = { if (!isEmpty) f(this.get) } @@ -446,6 +476,84 @@ sealed abstract class Option[+A] extends Product with Serializable { @inline final def orElse[B >: A](alternative: => Option[B]): Option[B] = if (isEmpty) alternative else this + /** Returns a $some formed from this option and another option + * by combining the corresponding elements in a pair. + * If either of the two options is empty, $none is returned. + * + * This is equivalent to: + * {{{ + * (option1, option2) match { + * case (Some(x), Some(y)) => Some((x, y)) + * case _ => None + * } + * }}} + * @example {{{ + * // Returns Some(("foo", "bar")) because both options are nonempty. + * Some("foo") zip Some("bar") + * + * // Returns None because `that` option is empty. + * Some("foo") zip None + * + * // Returns None because `this` option is empty. + * None zip Some("bar") + * }}} + * + * @param that the options which is going to be zipped + */ + final def zip[A1 >: A, B](that: Option[B]): Option[(A1, B)] = + if (isEmpty || that.isEmpty) None else Some((this.get, that.get)) + + /** Converts an Option of a pair into an Option of the first element and an Option of the second element. + * + * This is equivalent to: + * {{{ + * option match { + * case Some((x, y)) => (Some(x), Some(y)) + * case _ => (None, None) + * } + * }}} + * @tparam A1 the type of the first half of the element pair + * @tparam A2 the type of the second half of the element pair + * @param asPair an implicit conversion which asserts that the element type + * of this Option is a pair. + * @return a pair of Options, containing, respectively, the first and second half + * of the element pair of this Option. + */ + final def unzip[A1, A2](implicit asPair: A <:< (A1, A2)): (Option[A1], Option[A2]) = { + if (isEmpty) + (None, None) + else { + val e = asPair(this.get) + (Some(e._1), Some(e._2)) + } + } + + /** Converts an Option of a triple into three Options, one containing the element from each position of the triple. + * + * This is equivalent to: + * {{{ + * option match { + * case Some((x, y, z)) => (Some(x), Some(y), Some(z)) + * case _ => (None, None, None) + * } + * }}} + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Option is a triple. + * @return a triple of Options, containing, respectively, the first, second, and third + * elements from the element triple of this Option. + */ + final def unzip3[A1, A2, A3](implicit asTriple: A <:< (A1, A2, A3)): (Option[A1], Option[A2], Option[A3]) = { + if (isEmpty) + (None, None, None) + else { + val e = asTriple(this.get) + (Some(e._1), Some(e._2), Some(e._3)) + } + } + /** Returns a singleton iterator returning the $option's value * if it is nonempty, or an empty iterator if the option is empty. */ @@ -505,26 +613,16 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Class `Some[A]` represents existing values of type * `A`. - * - * @author Martin Odersky - * @since 1.0 */ @SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option[A] { - def isEmpty = false - def get = value - - @deprecated("Use .value instead.", "2.12.0") def x: A = value +final case class Some[+A](value: A) extends Option[A] { + def get: A = value } /** This case object represents non-existent values. - * - * @author Martin Odersky - * @since 1.0 */ @SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 case object None extends Option[Nothing] { - def isEmpty = true - def get = throw new NoSuchElementException("None.get") + def get: Nothing = throw new NoSuchElementException("None.get") } diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index 28c48d28c426..5150f52ef7e3 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,17 +12,18 @@ package scala +import scala.annotation.nowarn /** A partial function of type `PartialFunction[A, B]` is a unary function * where the domain does not necessarily include all values of type `A`. - * The function `isDefinedAt` allows to test dynamically if a value is in + * The function [[isDefinedAt]] allows to test dynamically if a value is in * the domain of the function. * * Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may * still throw an exception, so the following code is legal: * * {{{ - * val f: PartialFunction[Int, Any] = { case _ => 1/0 } + * val f: PartialFunction[Int, Any] = { case x => x / 0 } // ArithmeticException: / by zero * }}} * * It is the responsibility of the caller to call `isDefinedAt` before @@ -30,35 +31,98 @@ package scala * `apply` will throw an exception to indicate an error condition. If an * exception is not thrown, evaluation may result in an arbitrary value. * - * The main distinction between `PartialFunction` and [[scala.Function1]] is - * that the user of a `PartialFunction` may choose to do something different - * with input that is declared to be outside its domain. For example: + * The usual way to respect this contract is to call [[applyOrElse]], + * which is expected to be more efficient than calling both `isDefinedAt` + * and `apply`. + * + * Note that `isDefinedAt` may itself throw an exception while evaluating pattern guards + * or other parts of the `PartialFunction`. The same caveat holds for `applyOrElse`. * * {{{ * val sample = 1 to 10 - * val isEven: PartialFunction[Int, String] = { - * case x if x % 2 == 0 => x+" is even" + * def isEven(n: Int) = n % 2 == 0 + * + * val eveningNews: PartialFunction[Int, String] = { + * case x if isEven(x) => s"\$x is even" * } * - * // the method collect can use isDefinedAt to select which members to collect - * val evenNumbers = sample collect isEven + * // The method "collect" is described as "filter + map" + * // because it uses a PartialFunction to select elements + * // to which the function is applied. + * val evenNumbers = sample.collect(eveningNews) + * + * // It's more usual to write the PartialFunction as a block of case clauses + * // called an "anonymous pattern-matching function". Since the collect method + * // expects a PartialFunction, one is synthesized from the case clauses. + * def evenly = sample.collect { case x if isEven(x) => s"\$x is even" } + * + * // A method that takes a Function will get one, using the same syntax. + * // Note that all cases are supplied since Function has no `isDefinedAt`. + * def evened = sample.map { case odd if !isEven(odd) => odd + 1 case even => even } + * }}} + * + * The main distinction between `PartialFunction` and [[scala.Function1]] is + * that the client of a `PartialFunction` can perform an alternative computation + * with input that is reported to be outside the domain of the function. + * + * For example: * - * val isOdd: PartialFunction[Int, String] = { - * case x if x % 2 == 1 => x+" is odd" + * {{{ + * val oddlyEnough: PartialFunction[Int, String] = { + * case x if !isEven(x) => s"\$x is odd" * } * - * // the method orElse allows chaining another partial function to handle - * // input outside the declared domain - * val numbers = sample map (isEven orElse isOdd) + * // The method orElse allows chaining another PartialFunction + * // to handle input outside the declared domain. + * val numbers = sample.map(eveningNews.orElse(oddlyEnough)) + * + * // The same computation but with a function literal that calls applyOrElse + * // with oddlyEnough as fallback, which it can do because a PartialFunction is a Function. + * val numbers = sample.map(n => eveningNews.applyOrElse(n, oddlyEnough)) + * }}} + * + * As a convenience, function literals can also be adapted into partial functions + * when needed. If the body of the function is a match expression, then the cases + * are used to synthesize the PartialFunction as already shown. + * + * {{{ + * // The partial function isDefinedAt inputs resulting in the Success case. + * val inputs = List("1", "two", "3").collect(x => Try(x.toInt) match { case Success(i) => i }) * }}} * + * @note Optional [[Function]]s, [[PartialFunction]]s and extractor objects + * can be converted to each other as shown in the following table. + *   + * | How to convert ... | to a [[PartialFunction]] | to an optional [[Function]] | to an extractor | + * | :---: | --- | --- | --- | + * | from a [[PartialFunction]] | [[Predef.identity]] | [[lift]] | [[Predef.identity]] | + * | from optional [[Function]] | [[Function1.UnliftOps#unlift]] or [[Function.unlift]] | [[Predef.identity]] | [[Function1.UnliftOps#unlift]] | + * | from an extractor | `{ case extractor(x) => x }` | `extractor.unapply(_)` | [[Predef.identity]] | + *   * - * @author Martin Odersky, Pavel Pavlov, Adriaan Moors - * @since 1.0 + * @define applyOrElseOrElse Note that calling [[isDefinedAt]] on the resulting partial function + * may apply the first partial function and execute its side effect. + * For efficiency, it is recommended to call [[applyOrElse]] instead of [[isDefinedAt]] or [[apply]]. */ trait PartialFunction[-A, +B] extends (A => B) { self => import PartialFunction._ + /** Tries to extract a `B` from an `A` in a pattern matching expression. */ + def unapply(a: A): Option[B] = lift(a) + + /** Returns an extractor object with a `unapplySeq` method, which extracts each element of a sequence data. + * + * @example {{{ + * val firstChar: String => Option[Char] = _.headOption + * + * Seq("foo", "bar", "baz") match { + * case firstChar.unlift.elementWise(c0, c1, c2) => + * println(s"\$c0, \$c1, \$c2") // Output: f, b, b + * } + * }}} + */ + def elementWise: ElementWiseExtractor[A, B] = new ElementWiseExtractor[A, B](this) + /** Checks if a value is contained in the function's domain. * * @param x the value to test @@ -82,13 +146,48 @@ trait PartialFunction[-A, +B] extends (A => B) { self => /** Composes this partial function with a transformation function that * gets applied to results of this partial function. + * + * If the runtime type of the function is a `PartialFunction` then the + * other `andThen` method is used (note its cautions). + * * @param k the transformation function * @tparam C the result type of the transformation function. - * @return a partial function with the same domain as this partial function, which maps + * @return a partial function with the domain of this partial function, + * possibly narrowed by the specified function, which maps * arguments `x` to `k(this(x))`. */ - override def andThen[C](k: B => C): PartialFunction[A, C] = - new AndThen[A, B, C] (this, k) + override def andThen[C](k: B => C): PartialFunction[A, C] = k match { + case pf: PartialFunction[B, C] => andThen(pf) + case _ => new AndThen[A, B, C](this, k) + } + + /** + * Composes this partial function with another partial function that + * gets applied to results of this partial function. + * + * $applyOrElseOrElse + * + * @param k the transformation function + * @tparam C the result type of the transformation function. + * @return a partial function with the domain of this partial function narrowed by + * other partial function, which maps arguments `x` to `k(this(x))`. + */ + def andThen[C](k: PartialFunction[B, C]): PartialFunction[A, C] = + new Combined[A, B, C](this, k) + + /** + * Composes another partial function `k` with this partial function so that this + * partial function gets applied to results of `k`. + * + * $applyOrElseOrElse + * + * @param k the transformation function + * @tparam R the parameter type of the transformation function. + * @return a partial function with the domain of other partial function narrowed by + * this partial function, which maps arguments `x` to `this(k(x))`. + */ + def compose[R](k: PartialFunction[R, A]): PartialFunction[R, B] = + new Combined[R, A, B](k, this) /** Turns this partial function into a plain function returning an `Option` result. * @see Function.unlift @@ -121,7 +220,6 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * @param x the function argument * @param default the fallback function * @return the result of this function or fallback function application. - * @since 2.10 */ def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = if (isDefinedAt(x)) apply(x) else default(x) @@ -139,7 +237,6 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * @param action the action function * @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function * runs `action(this(x))` where `this` is defined. - * @since 2.10 */ def runWith[U](action: B => U): A => Boolean = { x => val z = applyOrElse(x, checkFallback[B]) @@ -158,11 +255,19 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * } * def onlyInt(v: Any): Option[Int] = condOpt(v) { case x: Int => x } * }}} - * - * @author Paul Phillips - * @since 2.8 */ object PartialFunction { + + final class ElementWiseExtractor[-A, +B] private[PartialFunction] (private val pf: PartialFunction[A, B]) extends AnyVal { + @nowarn("cat=lint-nonlocal-return") + def unapplySeq(seq: Seq[A]): Option[Seq[B]] = { + Some(seq.map { + case pf(b) => b + case _ => return None + }) + } + } + /** Composite function produced by `PartialFunction#orElse` method */ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) @@ -176,10 +281,10 @@ object PartialFunction { if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default) } - override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) = + override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): OrElse[A1, B1] = new OrElse[A1, B1] (f1, f2 orElse that) - override def andThen[C](k: B => C) = + override def andThen[C](k: B => C): OrElse[A, C] = new OrElse[A, C] (f1 andThen k, f2 andThen k) } @@ -196,6 +301,22 @@ object PartialFunction { } } + /** Composite function produced by `PartialFunction#andThen` method + */ + private class Combined[-A, B, +C] (pf: PartialFunction[A, B], k: PartialFunction[B, C]) extends PartialFunction[A, C] with Serializable { + def isDefinedAt(x: A): Boolean = { + val b: B = pf.applyOrElse(x, checkFallback[B]) + if (!fallbackOccurred(b)) k.isDefinedAt(b) else false + } + + def apply(x: A): C = k(pf(x)) + + override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = { + val pfv = pf.applyOrElse(x, checkFallback[B]) + if (!fallbackOccurred(pfv)) k.applyOrElse(pfv, (_: B) => default(x)) else default(x) + } + } + /** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently * the following trick is used: * @@ -215,11 +336,11 @@ object PartialFunction { * This correctly interacts with specialization as return type of `applyOrElse` * (which is parameterized upper bound) can never be specialized. * - * Here `fallback_pf` is used as both unique marker object and special fallback function that returns it. + * Here `fallback_fn` is used as both unique marker object and special fallback function that returns it. */ - private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf } - private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]] - private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef]) + private[this] val fallback_fn: Any => Any = _ => fallback_fn + private def checkFallback[B] = fallback_fn.asInstanceOf[Any => B] + private def fallbackOccurred[B](x: B) = fallback_fn eq x.asInstanceOf[AnyRef] private class Lifted[-A, +B] (val pf: PartialFunction[A, B]) extends scala.runtime.AbstractFunction1[A, Option[B]] with Serializable { @@ -234,8 +355,7 @@ object PartialFunction { def isDefinedAt(x: A): Boolean = f(x).isDefined override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = { - val z = f(x) - if (!z.isEmpty) z.get else default(x) + f(x).getOrElse(default(x)) } override def lift = f @@ -246,11 +366,12 @@ object PartialFunction { case ff => new Unlifted(ff) } - /** Converts ordinary function to partial one - * @since 2.10 + /** Converts an ordinary function to a partial function. Note that calling `isDefinedAt(x)` on + * this partial function will return `true` for every `x`. + * @param f an ordinary function + * @return a partial function which delegates to the ordinary function `f` */ - @deprecated("""For converting an ordinary function f to a partial function pf, use `val pf: PartialFunction[A, B] = { case x => f(x) }`. For creating a new PartialFunction, use an explicit type annotation instead, like in `val pf: PartialFunction[Int, String] = { case 1 => "one" }`.""", "2.12.5") - def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) } + def fromFunction[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) } private[this] val constFalse: Any => Boolean = { _ => false} @@ -258,36 +379,36 @@ object PartialFunction { def isDefinedAt(x: Any) = false def apply(x: Any) = throw new MatchError(x) override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that - override def andThen[C](k: Nothing => C) = this - override val lift = (x: Any) => None + override def andThen[C](k: Nothing => C): PartialFunction[Any, Nothing] = this + override val lift: Any => None.type = (x: Any) => None override def runWith[U](action: Nothing => U) = constFalse } /** The partial function with empty domain. * Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception. - * @since 2.10 */ def empty[A, B] : PartialFunction[A, B] = empty_pf - /** Creates a Boolean test based on a value and a partial function. - * It behaves like a 'match' statement with an implied 'case _ => false' - * following the supplied cases. + /** A Boolean test that is the result of the given function where defined, + * and false otherwise. + * + * It behaves like a `case _ => false` were added to the partial function. * * @param x the value to test * @param pf the partial function * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`. */ - def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse) + def cond[A](x: A)(pf: PartialFunction[A, Boolean]): Boolean = pf.applyOrElse(x, constFalse) - /** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f` - * whose result is `Some(x)` if the argument is in `pf`'s domain and `None` - * otherwise, and applies it to the value `x`. In effect, it is a - * `'''match'''` statement which wraps all case results in `Some(_)` and - * adds `'''case''' _ => None` to the end. + /** Apply the function to the given value if defined, and return the result + * in a `Some`; otherwise, return `None`. * * @param x the value to test * @param pf the PartialFunction[T, U] * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise. */ - def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x) + def condOpt[A, B](x: A)(pf: PartialFunction[A, B]): Option[B] = { + val z = pf.applyOrElse(x, checkFallback[B]) + if (!fallbackOccurred(z)) Some(z) else None + } } diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index b6e548c043cc..26dbc568a9ab 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,23 +14,17 @@ package scala import scala.language.implicitConversions -import scala.collection.{ mutable, immutable, generic } -import immutable.StringOps -import mutable.ArrayOps -import generic.CanBuildFrom -import scala.annotation.{ elidable, implicitNotFound } -import scala.annotation.elidable.ASSERTION -import scala.io.StdIn +import scala.collection.{mutable, immutable, ArrayOps, StringOps}, immutable.WrappedString +import scala.annotation.{elidable, implicitNotFound}, elidable.ASSERTION +import scala.annotation.meta.{ companionClass, companionMethod } /** The `Predef` object provides definitions that are accessible in all Scala * compilation units without explicit qualification. * * === Commonly Used Types === * Predef provides type aliases for types which are commonly used, such as - * the immutable collection types [[scala.collection.immutable.Map]], - * [[scala.collection.immutable.Set]], and the [[scala.collection.immutable.List]] - * constructors ([[scala.collection.immutable.::]] and - * [[scala.collection.immutable.Nil]]). + * the immutable collection types [[scala.collection.immutable.Map]] and + * [[scala.collection.immutable.Set]]. * * === Console Output === * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]], @@ -81,17 +75,13 @@ import scala.io.StdIn * @groupprio console-output 30 * @groupdesc console-output These methods provide output via the console. * - * @groupname type-constraints Type Constraints - * @groupprio type-constraints 40 - * @groupdesc type-constraints These entities allows constraints between types to be stipulated. - * * @groupname aliases Aliases * @groupprio aliases 50 * @groupdesc aliases These aliases bring selected immutable types into scope without any imports. * * @groupname conversions-string String Conversions * @groupprio conversions-string 60 - * @groupdesc conversions-string Conversions to and from String and StringOps. + * @groupdesc conversions-string Conversions from String to StringOps or WrappedString. * * @groupname implicit-classes-any Implicit Classes * @groupprio implicit-classes-any 70 @@ -109,11 +99,11 @@ import scala.io.StdIn * @groupprio conversions-anyval-to-java 100 * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents. * - * @groupname conversions-array-to-wrapped-array Array to WrappedArray + * @groupname conversions-array-to-wrapped-array Array to ArraySeq * @groupprio conversions-array-to-wrapped-array 110 - * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to WrappedArrays. + * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to ArraySeqs. */ -object Predef extends LowPriorityImplicits with DeprecatedPredef { +object Predef extends LowPriorityImplicits { /** * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to * the class literal `T.class` in Java. @@ -125,14 +115,32 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * val mapIntString = classOf[Map[Int,String]] * // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map * }}} + * + * @return The runtime [[Class]] representation of type `T`. * @group utilities */ def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler. - /** The `String` type in Scala has methods that come either from the underlying - * Java String (see the documentation corresponding to your Java version, for - * example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or - * are added implicitly through [[scala.collection.immutable.StringOps]]. + /** + * Retrieve the single value of a type with a unique inhabitant. + * + * @example {{{ + * object Foo + * val foo = valueOf[Foo.type] + * // foo is Foo.type = Foo + * + * val bar = valueOf[23] + * // bar is 23.type = 23 + * }}} + * @group utilities + */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T = vt.value + + /** The `String` type in Scala has all the methods of the underlying + * [[java.lang.String]], of which it is just an alias. + * + * In addition, extension methods in [[scala.collection.StringOps]] + * are added implicitly through the conversion [[augmentString]]. * @group aliases */ type String = java.lang.String @@ -147,7 +155,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { type Function[-A, +B] = Function1[A, B] /** @group aliases */ - type Map[A, +B] = immutable.Map[A, B] + type Map[K, +V] = immutable.Map[K, V] /** @group aliases */ type Set[A] = immutable.Set[A] /** @group aliases */ @@ -155,19 +163,28 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { /** @group aliases */ val Set = immutable.Set + /** + * Allows destructuring tuples with the same syntax as constructing them. + * + * @example {{{ + * val tup = "foobar" -> 3 + * + * val c = tup match { + * case str -> i => str.charAt(i) + * } + * }}} + * @group aliases + */ + val -> = Tuple2 + // Manifest types, companions, and incantations for summoning - @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - type ClassManifest[T] = scala.reflect.ClassManifest[T] // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") type OptManifest[T] = scala.reflect.OptManifest[T] - @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") + @implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") - val ClassManifest = scala.reflect.ClassManifest // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") val Manifest = scala.reflect.Manifest @@ -177,20 +194,59 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") - def manifest[T](implicit m: Manifest[T]) = m - @deprecated("use scala.reflect.classTag[T] instead", "2.10.0") - def classManifest[T](implicit m: ClassManifest[T]) = m + def manifest[T](implicit m: Manifest[T]): Manifest[T] = m // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") - def optManifest[T](implicit m: OptManifest[T]) = m + def optManifest[T](implicit m: OptManifest[T]): OptManifest[T] = m // Minor variations on identity functions - /** @group utilities */ - @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version - /** @group utilities */ - @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` - /** @group utilities */ - @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + /** + * A method that returns its input value. + * @tparam A type of the input value x. + * @param x the value of type `A` to be returned. + * @return the value `x`. + * @group utilities */ + @inline def identity[A](x: A): A = x // see `$conforms` for the implicit version + + /** Summon an implicit value of type `T`. Usually, the argument is not passed explicitly. + * + * @tparam T the type of the value to be summoned + * @return the implicit value of type `T` + * @group utilities + */ + @inline def implicitly[T](implicit e: T): T = e // TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero` + + /** Used to mark code blocks as being expressions, instead of being taken as part of anonymous classes and the like. + * This is just a different name for [[identity]]. + * + * @example Separating code blocks from `new`: + * {{{ + * val x = new AnyRef + * { + * val y = ... + * println(y) + * } + * // the { ... } block is seen as the body of an anonymous class + * + * val x = new AnyRef + * + * { + * val y = ... + * println(y) + * } + * // an empty line is a brittle "fix" + * + * val x = new AnyRef + * locally { + * val y = ... + * println(y) + * } + * // locally guards the block and helps communicate intent + * }}} + * @group utilities + */ + @inline def locally[T](@deprecatedName("x") x: T): T = x // assertions --------------------------------------------------------- @@ -203,7 +259,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @group assertions */ @elidable(ASSERTION) - def assert(assertion: Boolean) { + def assert(assertion: Boolean): Unit = { if (!assertion) throw new java.lang.AssertionError("assertion failed") } @@ -218,7 +274,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @group assertions */ @elidable(ASSERTION) @inline - final def assert(assertion: Boolean, message: => Any) { + final def assert(assertion: Boolean, message: => Any): Unit = { if (!assertion) throw new java.lang.AssertionError("assertion failed: "+ message) } @@ -234,7 +290,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @group assertions */ @elidable(ASSERTION) - def assume(assumption: Boolean) { + def assume(assumption: Boolean): Unit = { if (!assumption) throw new java.lang.AssertionError("assumption failed") } @@ -251,7 +307,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @group assertions */ @elidable(ASSERTION) @inline - final def assume(assumption: Boolean, message: => Any) { + final def assume(assumption: Boolean, message: => Any): Unit = { if (!assumption) throw new java.lang.AssertionError("assumption failed: "+ message) } @@ -263,7 +319,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @param requirement the expression to test * @group assertions */ - def require(requirement: Boolean) { + def require(requirement: Boolean): Unit = { if (!requirement) throw new IllegalArgumentException("requirement failed") } @@ -276,41 +332,24 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @param message a String to include in the failure message * @group assertions */ - @inline final def require(requirement: Boolean, message: => Any) { + @inline final def require(requirement: Boolean, message: => Any): Unit = { if (!requirement) throw new IllegalArgumentException("requirement failed: "+ message) } /** `???` can be used for marking methods that remain to be implemented. - * @throws NotImplementedError + * @throws NotImplementedError when `???` is invoked. * @group utilities */ def ??? : Nothing = throw new NotImplementedError - // tupling ------------------------------------------------------------ - - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - type Pair[+A, +B] = Tuple2[A, B] - @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") - object Pair { - def apply[A, B](x: A, y: B) = Tuple2(x, y) - def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) - } - - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - type Triple[+A, +B, +C] = Tuple3[A, B, C] - @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") - object Triple { - def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) - def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) - } - // implicit classes ----------------------------------------------------- /** @group implicit-classes-any */ implicit final class ArrowAssoc[A](private val self: A) extends AnyVal { - @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y) - def →[B](y: B): Tuple2[A, B] = ->(y) + @inline def -> [B](y: B): (A, B) = (self, y) + @deprecated("Use `->` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", "2.13.0") + def →[B](y: B): (A, B) = ->(y) } /** @group implicit-classes-any */ @@ -331,57 +370,40 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { @inline def formatted(fmtstr: String): String = fmtstr format self } + /** Injects String concatenation operator `+` to any classes. + * @group implicit-classes-any + */ + @(deprecated @companionMethod)("Implicit injection of + is deprecated. Convert to String to call +", "2.13.0") + @(deprecated @companionClass)("Implicit injection of + is deprecated. Convert to String to call +", "2.13.0") // for Scaladoc // scala/bug#8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit - /** @group implicit-classes-any */ implicit final class any2stringadd[A](private val self: A) extends AnyVal { def +(other: String): String = String.valueOf(self) + other } - implicit final class RichException(private val self: Throwable) extends AnyVal { - import scala.compat.Platform.EOL - @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) - } - - // Sadly we have to do `@deprecatedName(null, "2.12.0")` because - // `@deprecatedName(since="2.12.0")` incurs a warning about - // Usage of named or default arguments transformed this annotation constructor call into a block. - // The corresponding AnnotationInfo will contain references to local values and default getters - // instead of the actual argument trees - // and `@deprecatedName(Symbol(""), "2.12.0")` crashes scalac with - // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol - // in run/repl-no-imports-no-predef-power.scala. /** @group char-sequence-wrappers */ - final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = __sequenceOfChars.length - def charAt(index: Int): Char = __sequenceOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) - override def toString = __sequenceOfChars mkString "" + final class SeqCharSequence(sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + def length: Int = sequenceOfChars.length + def charAt(index: Int): Char = sequenceOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(sequenceOfChars.slice(start, end)) + override def toString = sequenceOfChars.mkString } /** @group char-sequence-wrappers */ def SeqCharSequence(sequenceOfChars: scala.collection.IndexedSeq[Char]): SeqCharSequence = new SeqCharSequence(sequenceOfChars) /** @group char-sequence-wrappers */ - @deprecated("use `java.nio.CharBuffer.wrap` instead", "2.12.13") - final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence { - def length: Int = __arrayOfChars.length - def charAt(index: Int): Char = __arrayOfChars(index) - def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) - override def toString = __arrayOfChars mkString "" + final class ArrayCharSequence(arrayOfChars: Array[Char]) extends CharSequence { + def length: Int = arrayOfChars.length + def charAt(index: Int): Char = arrayOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(arrayOfChars, start, end) + override def toString = arrayOfChars.mkString } /** @group char-sequence-wrappers */ def ArrayCharSequence(arrayOfChars: Array[Char]): ArrayCharSequence = new ArrayCharSequence(arrayOfChars) - implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { - def apply(from: String) = apply() - def apply() = mutable.StringBuilder.newBuilder - } - /** @group conversions-string */ @inline implicit def augmentString(x: String): StringOps = new StringOps(x) - /** @group conversions-string */ - @inline implicit def unaugmentString(x: StringOps): String = x.repr // printing ----------------------------------------------------------- @@ -390,19 +412,19 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * @param x the object to print; may be null. * @group console-output */ - def print(x: Any) = Console.print(x) + def print(x: Any): Unit = Console.print(x) /** Prints a newline character on the default output. * @group console-output */ - def println() = Console.println() + def println(): Unit = Console.println() /** Prints out an object to the default output, followed by a newline character. * * @param x the object to print. * @group console-output */ - def println(x: Any) = Console.println(x) + def println(x: Any): Unit = Console.println(x) /** Prints its arguments as a formatted string to the default output, * based on a string pattern (in a fashion similar to printf in C). @@ -413,43 +435,35 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. * * @param text the pattern for formatting the arguments. - * @param args the arguments used to instantiating the pattern. + * @param xs the arguments used to instantiate the pattern. * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments * * @see [[scala.StringContext.f StringContext.f]] * @group console-output */ - def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) + def printf(text: String, xs: Any*): Unit = Console.print(text.format(xs: _*)) // views -------------------------------------------------------------- - implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) - implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) - - implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { - case x: Array[AnyRef] => refArrayOps[AnyRef](x) - case x: Array[Boolean] => booleanArrayOps(x) - case x: Array[Byte] => byteArrayOps(x) - case x: Array[Char] => charArrayOps(x) - case x: Array[Double] => doubleArrayOps(x) - case x: Array[Float] => floatArrayOps(x) - case x: Array[Int] => intArrayOps(x) - case x: Array[Long] => longArrayOps(x) - case x: Array[Short] => shortArrayOps(x) - case x: Array[Unit] => unitArrayOps(x) - case null => null - }).asInstanceOf[ArrayOps[T]] - - implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps.ofBoolean = new ArrayOps.ofBoolean(xs) - implicit def byteArrayOps(xs: Array[Byte]): ArrayOps.ofByte = new ArrayOps.ofByte(xs) - implicit def charArrayOps(xs: Array[Char]): ArrayOps.ofChar = new ArrayOps.ofChar(xs) - implicit def doubleArrayOps(xs: Array[Double]): ArrayOps.ofDouble = new ArrayOps.ofDouble(xs) - implicit def floatArrayOps(xs: Array[Float]): ArrayOps.ofFloat = new ArrayOps.ofFloat(xs) - implicit def intArrayOps(xs: Array[Int]): ArrayOps.ofInt = new ArrayOps.ofInt(xs) - implicit def longArrayOps(xs: Array[Long]): ArrayOps.ofLong = new ArrayOps.ofLong(xs) - implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps.ofRef[T] = new ArrayOps.ofRef[T](xs) - implicit def shortArrayOps(xs: Array[Short]): ArrayOps.ofShort = new ArrayOps.ofShort(xs) - implicit def unitArrayOps(xs: Array[Unit]): ArrayOps.ofUnit = new ArrayOps.ofUnit(xs) + // these two are morally deprecated but the @deprecated annotation has been moved to the extension method themselves, + // in order to provide a more specific deprecation method. + implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)): runtime.Tuple2Zipped.Ops[T1, T2] = new runtime.Tuple2Zipped.Ops(x) + implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)): runtime.Tuple3Zipped.Ops[T1, T2, T3] = new runtime.Tuple3Zipped.Ops(x) + + // Not specialized anymore since 2.13 but we still need separate methods + // to avoid https://github.com/scala/bug/issues/10746 + // TODO: should not need @inline. add heuristic to inline factories for value classes. + @inline implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) + @inline implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps(xs) + @inline implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps(xs) + @inline implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps(xs) + @inline implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps(xs) + @inline implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps(xs) + @inline implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps(xs) + @inline implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps(xs) + @inline implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) + @inline implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps(xs) + @inline implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps(xs) // "Autoboxing" and "Autounboxing" --------------------------------------------------- @@ -487,106 +501,25 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { /** @group conversions-java-to-anyval */ implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean] - // Type Constraints -------------------------------------------------------------- - - /** - * An instance of `A <:< B` witnesses that `A` is a subtype of `B`. - * Requiring an implicit argument of the type `A <:< B` encodes - * the generalized constraint `A <: B`. - * - * @note we need a new type constructor `<:<` and evidence `conforms`, - * as reusing `Function1` and `identity` leads to ambiguities in - * case of type errors (`any2stringadd` is inferred) - * - * To constrain any abstract type T that's in scope in a method's - * argument list (not just the method's own type parameters) simply - * add an implicit argument of type `T <:< U`, where `U` is the required - * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the - * required lower bound. - * - * In part contributed by Jason Zaugg. - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") - sealed abstract class <:<[-From, +To] extends (From => To) with Serializable - private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x } - // The dollar prefix is to dodge accidental shadowing of this method - // by a user-defined method of the same name (scala/bug#7788). - // The collections rely on this method. - /** @group type-constraints */ - implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] - - @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") - def conforms[A]: A <:< A = $conforms[A] - - /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. - * - * @see `<:<` for expressing subtyping constraints - * @group type-constraints - */ - @implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") - sealed abstract class =:=[From, To] extends (From => To) with Serializable - private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x } - /** @group type-constraints */ - object =:= { - implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A] - } - - /** A type for which there is always an implicit value. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` + /** An implicit of type `A => A` is available for all `A` because it can always + * be implemented using the identity function. This also means that an + * implicit of type `A => B` is always available when `A <: B`, because + * `(A => A) <: (A => B)`. */ - class DummyImplicit - - object DummyImplicit { - - /** An implicit value yielding a `DummyImplicit`. - * @see [[scala.Array$]], method `fallbackCanBuildFrom` - */ - implicit def dummyImplicit: DummyImplicit = new DummyImplicit - } -} - -private[scala] trait DeprecatedPredef { - self: Predef.type => - - // Deprecated stubs for any who may have been calling these methods directly. - @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) - @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) - @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) - @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) - @deprecated("use `java.nio.CharBuffer.wrap`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) - - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) - @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) + // $ to avoid accidental shadowing (e.g. scala/bug#7788) + implicit def $conforms[A]: A => A = <:<.refl } /** The `LowPriorityImplicits` class provides implicit values that * are valid in all Scala compilation units without explicit qualification, * but that are partially overridden by higher-priority conversions in object * `Predef`. -* -* @author Martin Odersky -* @since 2.8 */ // scala/bug#7335 Parents of Predef are defined in the same compilation unit to avoid // cyclic reference errors compiling the standard library *without* a previously // compiled copy on the classpath. -private[scala] abstract class LowPriorityImplicits { - import mutable.WrappedArray - import immutable.WrappedString +private[scala] abstract class LowPriorityImplicits extends LowPriorityImplicits2 { + import mutable.ArraySeq /** We prefer the java.lang.* boxed types to these wrappers in * any potential conflicts. Conflicts do exist because the wrappers @@ -598,57 +531,56 @@ private[scala] abstract class LowPriorityImplicits { * Even inlined, every call site does a no-op retrieval of Predef's MODULE$ * because maybe loading Predef has side effects! */ - @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x) - @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x) - @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x) - @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c) - @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x) - @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x) - @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x) - @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x) + @inline implicit def byteWrapper(x: Byte): runtime.RichByte = new runtime.RichByte(x) + @inline implicit def shortWrapper(x: Short): runtime.RichShort = new runtime.RichShort(x) + @inline implicit def intWrapper(x: Int): runtime.RichInt = new runtime.RichInt(x) + @inline implicit def charWrapper(c: Char): runtime.RichChar = new runtime.RichChar(c) + @inline implicit def longWrapper(x: Long): runtime.RichLong = new runtime.RichLong(x) + @inline implicit def floatWrapper(x: Float): runtime.RichFloat = new runtime.RichFloat(x) + @inline implicit def doubleWrapper(x: Double): runtime.RichDouble = new runtime.RichDouble(x) + @inline implicit def booleanWrapper(x: Boolean): runtime.RichBoolean = new runtime.RichBoolean(x) /** @group conversions-array-to-wrapped-array */ - implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] = + implicit def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = if (xs eq null) null - else WrappedArray.make(xs) + else ArraySeq.make(xs) // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef] // is as good as another for all T <: AnyRef. Instead of creating 100,000,000 // unique ones by way of this implicit, let's share one. /** @group conversions-array-to-wrapped-array */ - implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = { + implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq.ofRef[T] = { if (xs eq null) null - else if (xs.length == 0) WrappedArray.empty[T] - else new WrappedArray.ofRef[T](xs) + else if (xs.length == 0) ArraySeq.empty[AnyRef].asInstanceOf[ArraySeq.ofRef[T]] + else new ArraySeq.ofRef[T](xs) } /** @group conversions-array-to-wrapped-array */ - implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null + implicit def wrapIntArray(xs: Array[Int]): ArraySeq.ofInt = if (xs ne null) new ArraySeq.ofInt(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null + implicit def wrapDoubleArray(xs: Array[Double]): ArraySeq.ofDouble = if (xs ne null) new ArraySeq.ofDouble(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null + implicit def wrapLongArray(xs: Array[Long]): ArraySeq.ofLong = if (xs ne null) new ArraySeq.ofLong(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null + implicit def wrapFloatArray(xs: Array[Float]): ArraySeq.ofFloat = if (xs ne null) new ArraySeq.ofFloat(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null + implicit def wrapCharArray(xs: Array[Char]): ArraySeq.ofChar = if (xs ne null) new ArraySeq.ofChar(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null + implicit def wrapByteArray(xs: Array[Byte]): ArraySeq.ofByte = if (xs ne null) new ArraySeq.ofByte(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null + implicit def wrapShortArray(xs: Array[Short]): ArraySeq.ofShort = if (xs ne null) new ArraySeq.ofShort(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null + implicit def wrapBooleanArray(xs: Array[Boolean]): ArraySeq.ofBoolean = if (xs ne null) new ArraySeq.ofBoolean(xs) else null /** @group conversions-array-to-wrapped-array */ - implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null + implicit def wrapUnitArray(xs: Array[Unit]): ArraySeq.ofUnit = if (xs ne null) new ArraySeq.ofUnit(xs) else null /** @group conversions-string */ implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null - /** @group conversions-string */ - implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null +} - implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] = - new CanBuildFrom[String, T, immutable.IndexedSeq[T]] { - def apply(from: String) = immutable.IndexedSeq.newBuilder[T] - def apply() = immutable.IndexedSeq.newBuilder[T] - } +private[scala] abstract class LowPriorityImplicits2 { + @deprecated("implicit conversions from Array to immutable.IndexedSeq are implemented by copying; use `toIndexedSeq` explicitly if you want to copy, or use the more efficient non-copying ArraySeq.unsafeWrapArray", since="2.13.0") + implicit def copyArrayToImmutableIndexedSeq[T](xs: Array[T]): IndexedSeq[T] = + if (xs eq null) null + else new ArrayOps(xs).toIndexedSeq } diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 3992503f11d9..c0fa80a95ef5 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,33 +16,30 @@ package scala * least [[scala.Product1]] through [[scala.Product22]] and therefore also * their subclasses [[scala.Tuple1]] through [[scala.Tuple22]]. In addition, * all case classes implement `Product` with synthetically generated methods. - * - * @author Burak Emir - * @since 2.3 */ trait Product extends Any with Equals { + /** The size of this product. + * @return for a product `A(x,,1,,, ..., x,,k,,)`, returns `k` + */ + def productArity: Int + /** The n^th^ element of this product, 0-based. In other words, for a * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. * * @param n the index of the element to return - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= productArity). * @return the element `n` elements after the first element */ def productElement(n: Int): Any - /** The size of this product. - * @return for a product `A(x,,1,,, ..., x,,k,,)`, returns `k` - */ - def productArity: Int - /** An iterator over all the elements of this product. * @return in the default implementation, an `Iterator[Any]` */ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] { - private var c: Int = 0 - private val cmax = productArity - def hasNext = c < cmax - def next() = { val result = productElement(c); c += 1; result } + private[this] var c: Int = 0 + private[this] val cmax = productArity + def hasNext: Boolean = c < cmax + def next(): Any = { val result = productElement(c); c += 1; result } } /** A string used in the `toString` methods of derived classes. @@ -51,5 +48,25 @@ trait Product extends Any with Equals { * * @return in the default implementation, the empty string */ - def productPrefix = "" + def productPrefix: String = "" + + /** The name of the n^th^ element of this product, 0-based. + * In the default implementation, an empty string. + * + * @param n the index of the element name to return + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= productArity). + * @return the name of the specified element + */ + def productElementName(n: Int): String = + if (n >= 0 && n < productArity) "" + else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${productArity-1})") + + /** An iterator over the names of all the elements of this product. + */ + def productElementNames: Iterator[String] = new scala.collection.AbstractIterator[String] { + private[this] var c: Int = 0 + private[this] val cmax = productArity + def hasNext: Boolean = c < cmax + def next(): String = { val result = productElementName(c); c += 1; result } + } } diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala index 41e97a9005b8..912f4dc8f0d8 100644 --- a/src/library/scala/Product1.scala +++ b/src/library/scala/Product1.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product1 { } /** Product1 is a Cartesian product of 1 component. - * @since 2.3 */ trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product { /** The arity of this product. * @return 1 */ - override def productArity = 1 + override def productArity: Int = 1 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,13 +33,13 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 1). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 0)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala index a170baf72dc0..8ab742e3458f 100644 --- a/src/library/scala/Product10.scala +++ b/src/library/scala/Product10.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product10 { } /** Product10 is a Cartesian product of 10 components. - * @since 2.3 */ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any with Product { /** The arity of this product. * @return 10 */ - override def productArity = 10 + override def productArity: Int = 10 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any w * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 10). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -49,7 +48,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any w case 7 => _8 case 8 => _9 case 9 => _10 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 9)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala index d67fcb33ac4e..c970235fc693 100644 --- a/src/library/scala/Product11.scala +++ b/src/library/scala/Product11.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product11 { } /** Product11 is a Cartesian product of 11 components. - * @since 2.3 */ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends Any with Product { /** The arity of this product. * @return 11 */ - override def productArity = 11 + override def productArity: Int = 11 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 11). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -50,7 +49,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends case 8 => _9 case 9 => _10 case 10 => _11 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 10)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala index f1113f34a681..2823288e430b 100644 --- a/src/library/scala/Product12.scala +++ b/src/library/scala/Product12.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product12 { } /** Product12 is a Cartesian product of 12 components. - * @since 2.3 */ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] extends Any with Product { /** The arity of this product. * @return 12 */ - override def productArity = 12 + override def productArity: Int = 12 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 12). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -51,7 +50,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e case 9 => _10 case 10 => _11 case 11 => _12 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 11)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala index 08b93a102f2e..25e804081407 100644 --- a/src/library/scala/Product13.scala +++ b/src/library/scala/Product13.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product13 { } /** Product13 is a Cartesian product of 13 components. - * @since 2.3 */ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13] extends Any with Product { /** The arity of this product. * @return 13 */ - override def productArity = 13 + override def productArity: Int = 13 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 13). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -52,7 +51,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 10 => _11 case 11 => _12 case 12 => _13 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 12)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala index 664d9cf5abed..76afd49e1856 100644 --- a/src/library/scala/Product14.scala +++ b/src/library/scala/Product14.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product14 { } /** Product14 is a Cartesian product of 14 components. - * @since 2.3 */ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14] extends Any with Product { /** The arity of this product. * @return 14 */ - override def productArity = 14 + override def productArity: Int = 14 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 14). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -53,7 +52,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 11 => _12 case 12 => _13 case 13 => _14 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 13)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala index c34f9c0311e2..dd6e49a33eba 100644 --- a/src/library/scala/Product15.scala +++ b/src/library/scala/Product15.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product15 { } /** Product15 is a Cartesian product of 15 components. - * @since 2.3 */ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15] extends Any with Product { /** The arity of this product. * @return 15 */ - override def productArity = 15 + override def productArity: Int = 15 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 15). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -54,7 +53,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 12 => _13 case 13 => _14 case 14 => _15 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 14)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala index f990d3ae7a0d..900ccdcab195 100644 --- a/src/library/scala/Product16.scala +++ b/src/library/scala/Product16.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product16 { } /** Product16 is a Cartesian product of 16 components. - * @since 2.3 */ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16] extends Any with Product { /** The arity of this product. * @return 16 */ - override def productArity = 16 + override def productArity: Int = 16 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 16). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -55,7 +54,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 13 => _14 case 14 => _15 case 15 => _16 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 15)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala index 858ffb6f789f..4e6636f2e5d1 100644 --- a/src/library/scala/Product17.scala +++ b/src/library/scala/Product17.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product17 { } /** Product17 is a Cartesian product of 17 components. - * @since 2.3 */ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17] extends Any with Product { /** The arity of this product. * @return 17 */ - override def productArity = 17 + override def productArity: Int = 17 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 17). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -56,7 +55,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 14 => _15 case 15 => _16 case 16 => _17 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 16)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala index eb76d6657104..4a68f49f5623 100644 --- a/src/library/scala/Product18.scala +++ b/src/library/scala/Product18.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product18 { } /** Product18 is a Cartesian product of 18 components. - * @since 2.3 */ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18] extends Any with Product { /** The arity of this product. * @return 18 */ - override def productArity = 18 + override def productArity: Int = 18 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 18). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -57,7 +56,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 15 => _16 case 16 => _17 case 17 => _18 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 17)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala index 6f2bad96293a..fdc4c232742d 100644 --- a/src/library/scala/Product19.scala +++ b/src/library/scala/Product19.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product19 { } /** Product19 is a Cartesian product of 19 components. - * @since 2.3 */ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Any with Product { /** The arity of this product. * @return 19 */ - override def productArity = 19 + override def productArity: Int = 19 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 19). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -58,7 +57,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 16 => _17 case 17 => _18 case 18 => _19 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 18)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala index fa3c648a207c..2498e9727b7f 100644 --- a/src/library/scala/Product2.scala +++ b/src/library/scala/Product2.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product2 { } /** Product2 is a Cartesian product of 2 components. - * @since 2.3 */ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Any with Product { /** The arity of this product. * @return 2 */ - override def productArity = 2 + override def productArity: Int = 2 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,14 +33,14 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 2). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 1)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala index 0893588f9669..206dc1e375cc 100644 --- a/src/library/scala/Product20.scala +++ b/src/library/scala/Product20.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product20 { } /** Product20 is a Cartesian product of 20 components. - * @since 2.3 */ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Any with Product { /** The arity of this product. * @return 20 */ - override def productArity = 20 + override def productArity: Int = 20 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 20). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -59,7 +58,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 17 => _18 case 18 => _19 case 19 => _20 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 19)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala index 463b022ad8fc..0cbb44068fc8 100644 --- a/src/library/scala/Product21.scala +++ b/src/library/scala/Product21.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product21 { } /** Product21 is a Cartesian product of 21 components. - * @since 2.3 */ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21] extends Any with Product { /** The arity of this product. * @return 21 */ - override def productArity = 21 + override def productArity: Int = 21 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 21). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -60,7 +59,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 18 => _19 case 19 => _20 case 20 => _21 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 20)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala index dd251cd60a3f..df6963c03843 100644 --- a/src/library/scala/Product22.scala +++ b/src/library/scala/Product22.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product22 { } /** Product22 is a Cartesian product of 22 components. - * @since 2.3 */ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22] extends Any with Product { /** The arity of this product. * @return 22 */ - override def productArity = 22 + override def productArity: Int = 22 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 22). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -61,7 +60,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + case 19 => _20 case 20 => _21 case 21 => _22 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 21)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala index ec73f4e15c22..48de4b6e7d20 100644 --- a/src/library/scala/Product3.scala +++ b/src/library/scala/Product3.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product3 { } /** Product3 is a Cartesian product of 3 components. - * @since 2.3 */ trait Product3[+T1, +T2, +T3] extends Any with Product { /** The arity of this product. * @return 3 */ - override def productArity = 3 + override def productArity: Int = 3 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,15 +33,15 @@ trait Product3[+T1, +T2, +T3] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 3). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 2)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala index 1eb820b8f14d..7b34b570f1f0 100644 --- a/src/library/scala/Product4.scala +++ b/src/library/scala/Product4.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product4 { } /** Product4 is a Cartesian product of 4 components. - * @since 2.3 */ trait Product4[+T1, +T2, +T3, +T4] extends Any with Product { /** The arity of this product. * @return 4 */ - override def productArity = 4 + override def productArity: Int = 4 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,16 +33,16 @@ trait Product4[+T1, +T2, +T3, +T4] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 4). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 case 3 => _4 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 3)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala index 3b3c0aca2338..769e2f0b22d3 100644 --- a/src/library/scala/Product5.scala +++ b/src/library/scala/Product5.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product5 { } /** Product5 is a Cartesian product of 5 components. - * @since 2.3 */ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product { /** The arity of this product. * @return 5 */ - override def productArity = 5 + override def productArity: Int = 5 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,17 +33,17 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 5). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 case 3 => _4 case 4 => _5 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 4)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala index 7c77df212139..aff1fbb92e46 100644 --- a/src/library/scala/Product6.scala +++ b/src/library/scala/Product6.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product6 { } /** Product6 is a Cartesian product of 6 components. - * @since 2.3 */ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product { /** The arity of this product. * @return 6 */ - override def productArity = 6 + override def productArity: Int = 6 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,18 +33,18 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 6). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 case 3 => _4 case 4 => _5 case 5 => _6 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 5)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala index 05a4be35fbe4..7aef56fc53a6 100644 --- a/src/library/scala/Product7.scala +++ b/src/library/scala/Product7.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product7 { } /** Product7 is a Cartesian product of 7 components. - * @since 2.3 */ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product { /** The arity of this product. * @return 7 */ - override def productArity = 7 + override def productArity: Int = 7 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product { * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 7). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -46,7 +45,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product { case 4 => _5 case 5 => _6 case 6 => _7 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 6)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala index 3bf83f34f301..f8604b887358 100644 --- a/src/library/scala/Product8.scala +++ b/src/library/scala/Product8.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product8 { } /** Product8 is a Cartesian product of 8 components. - * @since 2.3 */ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product { /** The arity of this product. * @return 8 */ - override def productArity = 8 + override def productArity: Int = 8 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 8). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -47,7 +46,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product case 5 => _6 case 6 => _7 case 7 => _8 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 7)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala index 895032834ef6..6731142a015b 100644 --- a/src/library/scala/Product9.scala +++ b/src/library/scala/Product9.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,12 @@ object Product9 { } /** Product9 is a Cartesian product of 9 components. - * @since 2.3 */ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Product { /** The arity of this product. * @return 9 */ - override def productArity = 9 + override def productArity: Int = 9 /** Returns the n-th projection of this product if 0 <= n < productArity, @@ -34,11 +33,11 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Pro * * @param n number of the projection to be returned * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`. - * @throws IndexOutOfBoundsException + * @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 9). */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int): Any = n match { case 0 => _1 case 1 => _2 case 2 => _3 @@ -48,7 +47,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Pro case 6 => _7 case 7 => _8 case 8 => _9 - case _ => throw new IndexOutOfBoundsException(n.toString()) + case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 8)") } /** A projection of element 1 of this Product. diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala index e75ec6761049..8da03133e3cd 100644 --- a/src/library/scala/Proxy.scala +++ b/src/library/scala/Proxy.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,10 +22,8 @@ package scala * }}} * '''Note:''' forwarding methods in this way will most likely create * an asymmetric equals method, which is not generally recommended. - * - * @author Matthias Zenger - * @since 1.0 */ +@deprecated("Explicitly override hashCode, equals and toString instead.", "2.13.0") trait Proxy extends Any { def self: Any @@ -39,9 +37,11 @@ trait Proxy extends Any { override def toString = "" + self } +@deprecated("All members of this object are deprecated.", "2.13.0") object Proxy { /** A proxy which exposes the type it is proxying for via a type parameter. */ + @deprecated("Explicitly override hashCode, equals and toString instead.", "2.13.0") trait Typed[T] extends Any with Proxy { def self: T } diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala deleted file mode 100644 index e741bcf8ed73..000000000000 --- a/src/library/scala/Responder.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** This object contains utility methods to build responders. - * - * @author Martin Odersky - * @author Burak Emir - * - * @see class Responder - * @since 2.1 - */ -@deprecated("this object will be removed", "2.11.0") -object Responder { - - /** Creates a responder that answer continuations with the constant `a`. - */ - def constant[A](x: A) = new Responder[A] { - def respond(k: A => Unit) = k(x) - } - - /** Executes `x` and returns `'''true'''`, useful as syntactic - * convenience in for comprehensions. - */ - def exec[A](x: => Unit): Boolean = { x; true } - - /** Runs a responder, returning an optional result. - */ - def run[A](r: Responder[A]): Option[A] = { - var result: Option[A] = None - r.foreach(x => result = Some(x)) - result - } - - def loop[A](r: Responder[Unit]): Responder[Nothing] = - for (_ <- r; y <- loop(r)) yield y - - def loopWhile[A](cond: => Boolean)(r: Responder[Unit]): Responder[Unit] = - if (cond) for (_ <- r; y <- loopWhile(cond)(r)) yield y - else constant(()) -} - -/** Instances of responder are the building blocks of small programs - * written in continuation passing style. By using responder classes - * in for comprehensions, one can embed domain-specific languages in - * Scala while giving the impression that programs in these DSLs are - * written in direct style. - * - * @author Martin Odersky - * @author Burak Emir - * @since 2.1 - */ -@deprecated("this class will be removed", "2.11.0") -abstract class Responder[+A] extends Serializable { - - def respond(k: A => Unit): Unit - - def foreach(k: A => Unit) { respond(k) } - - def map[B](f: A => B) = new Responder[B] { - def respond(k: B => Unit) { - Responder.this.respond(x => k(f(x))) - } - } - - def flatMap[B](f: A => Responder[B]) = new Responder[B] { - def respond(k: B => Unit) { - Responder.this.respond(x => f(x).respond(k)) - } - } - - def filter(p: A => Boolean) = new Responder[A] { - def respond(k: A => Unit) { - Responder.this.respond(x => if (p(x)) k(x) else ()) - } - } - - override def toString = "Responder" -} diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala index 05023df34f19..0c85b2591247 100644 --- a/src/library/scala/SerialVersionUID.scala +++ b/src/library/scala/SerialVersionUID.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,15 @@ package scala /** - * Annotation for specifying the `static SerialVersionUID` field - * of a serializable class. - */ -class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation + * Annotation for specifying the `serialVersionUID` field of a (serializable) class. + * + * On the JVM, a class with this annotation will receive a `private`, `static`, + * and `final` field called `serialVersionUID` with the provided `value`, + * which the JVM's serialization mechanism uses to determine serialization + * compatibility between different versions of a class. + * + * @see [[java.io.Serializable]] + * @see [[Serializable]] + */ +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") +class SerialVersionUID(value: Long) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/Serializable.scala b/src/library/scala/Serializable.scala deleted file mode 100644 index 99c839329b34..000000000000 --- a/src/library/scala/Serializable.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** - * Classes extending this trait are serializable across platforms (Java, .NET). - */ -trait Serializable extends Any with java.io.Serializable diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala index 334e60ae893f..4a56d71d0733 100644 --- a/src/library/scala/Short.scala +++ b/src/library/scala/Short.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -46,6 +46,7 @@ final abstract class Short private extends AnyVal { /** Returns the negation of this value. */ def unary_- : Int + @deprecated("Adding a number and a String is deprecated. Use the string interpolation `s\"$num$str\"`", "2.13.0") def +(x: String): String /** diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala index f7afc104d4c1..54fb59dba83e 100644 --- a/src/library/scala/Specializable.scala +++ b/src/library/scala/Specializable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,15 +19,20 @@ trait Specializable object Specializable { // No type parameter in @specialized annotation. - trait SpecializedGroup { } + trait SpecializedGroup // Smuggle a list of types by way of a tuple upon which Group is parameterized. - class Group[T >: Null](value: T) extends SpecializedGroup { } + class Group[T >: Null](value: T) extends SpecializedGroup - final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)) - final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)) - final val Bits32AndUp = new Group((Int, Long, Float, Double)) - final val Integral = new Group((Byte, Short, Int, Long, Char)) - final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double)) - final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef)) + final val Primitives: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)] = null + final val Everything: Group[(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)] = null + final val Bits32AndUp: Group[(Int, Long, Float, Double)] = null + final val Integral: Group[(Byte, Short, Int, Long, Char)] = null + final val AllNumeric: Group[(Byte, Short, Int, Long, Char, Float, Double)] = null + final val BestOfBreed: Group[(Int, Double, Boolean, Unit, AnyRef)] = null + final val Unit: Group[Tuple1[Unit]] = null + + final val Arg: Group[(Int, Long, Float, Double)] = null + final val Args: Group[(Int, Long, Double)] = null + final val Return: Group[(Int, Long, Float, Double, Boolean, Unit)] = null } diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala index 68ff6e09a886..ec5c49a2349e 100644 --- a/src/library/scala/StringContext.scala +++ b/src/library/scala/StringContext.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,7 @@ package scala import java.lang.{ StringBuilder => JLSBuilder } import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuilder /** This class provides the basic mechanism to do String Interpolation. * String Interpolation allows users @@ -51,24 +52,15 @@ import scala.annotation.tailrec * Here the `JsonHelper` extension class implicitly adds the `json` method to * `StringContext` which can be used for `json` string literals. * - * @since 2.10.0 * @param parts The parts that make up the interpolated string, * without the expressions that get inserted by interpolation. */ case class StringContext(parts: String*) { - import StringContext._ - - /** Checks that the length of the given argument `args` is one less than the number - * of `parts` supplied to the enclosing `StringContext`. - * @param `args` The arguments to be checked. - * @throws IllegalArgumentException if this is not the case. - */ - def checkLengths(args: Seq[Any]): Unit = - if (parts.length != args.length + 1) - throw new IllegalArgumentException("wrong number of arguments ("+ args.length - +") for interpolated string with "+ parts.length +" parts") + import StringContext.{checkLengths => scCheckLengths, glob, processEscapes, standardInterpolator => scStandardInterpolator} + @deprecated("use same-named method on StringContext companion object", "2.13.0") + def checkLengths(args: scala.collection.Seq[Any]): Unit = scCheckLengths(args, parts) /** The simple string interpolator. * @@ -98,8 +90,45 @@ case class StringContext(parts: String*) { * @note The Scala compiler may replace a call to this method with an equivalent, but more efficient, * use of a StringBuilder. */ - def s(args: Any*): String = standardInterpolator(treatEscapes, args) - + def s(args: Any*): String = macro ??? // fasttracked to scala.tools.reflect.FastStringInterpolator::interpolateS + object s { + /** The simple string matcher. + * + * Attempts to match the input string to the given interpolated patterns via + * a naive globbing, that is the reverse of the simple interpolator. + * + * Here is an example usage: + * + * {{{ + * val s"Hello, \$name" = "Hello, James" + * println(name) // "James" + * }}} + * + * In this example, the string "James" ends up matching the location where the pattern + * `\$name` is positioned, and thus ends up bound to that variable. + * + * Multiple matches are supported: + * + * {{{ + * val s"\$greeting, \$name" = "Hello, James" + * println(greeting) // "Hello" + * println(name) // "James" + * }}} + * + * And the `s` matcher can match an arbitrary pattern within the `\${}` block, for example: + * + * {{{ + * val TimeSplitter = "([0-9]+)[.:]([0-9]+)".r + * val s"The time is \${TimeSplitter(hours, mins)}" = "The time is 10.50" + * println(hours) // 10 + * println(mins) // 50 + * }}} + * + * Here, we use the `TimeSplitter` regex within the `s` matcher, further splitting the + * matched string "10.50" into its constituent parts + */ + def unapplySeq(s: String): Option[Seq[String]] = glob(parts.map(processEscapes), s) + } /** The raw string interpolator. * * It inserts its arguments between corresponding parts of the string context. @@ -108,7 +137,9 @@ case class StringContext(parts: String*) { * * For example, the raw processed string `raw"a\nb"` is equal to the scala string `"a\\nb"`. * - * ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes. + * ''Note:'' Even when using the raw interpolator, Scala will process Unicode escapes. + * Unicode processing in the raw interpolator is deprecated as of scala 2.13.2 and + * will be removed in the future * For example: * {{{ * scala> raw"\u005cu0023" @@ -122,19 +153,10 @@ case class StringContext(parts: String*) { * @note The Scala compiler may replace a call to this method with an equivalent, but more efficient, * use of a StringBuilder. */ - def raw(args: Any*): String = standardInterpolator(identity, args) + def raw(args: Any*): String = macro ??? // fasttracked to scala.tools.reflect.FastStringInterpolator::interpolateRaw - def standardInterpolator(process: String => String, args: Seq[Any]): String = { - checkLengths(args) - val pi = parts.iterator - val ai = args.iterator - val bldr = new JLSBuilder(process(pi.next())) - while (ai.hasNext) { - bldr append ai.next - bldr append process(pi.next()) - } - bldr.toString - } + @deprecated("Use the static method StringContext.standardInterpolator instead of the instance method", "2.13.0") + def standardInterpolator(process: String => String, args: Seq[Any]): String = scStandardInterpolator(process, args, parts) /** The formatted string interpolator. * @@ -171,53 +193,198 @@ case class StringContext(parts: String*) { * 2. Any `%` characters not in formatting positions must begin one of the conversions * `%%` (the literal percent) or `%n` (the platform-specific line separator). */ - // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f` - // Using the mechanism implemented in `scala.tools.reflect.FastTrack` - def f[A >: Any](args: A*): String = macro ??? + def f[A >: Any](args: A*): String = macro ??? // fasttracked to scala.tools.reflect.FormatInterpolator::interpolateF } object StringContext { + /** + * Linear time glob-matching implementation. + * Adapted from https://research.swtch.com/glob + * + * @param patternChunks The non-wildcard portions of the input pattern, + * separated by wildcards + * @param input The input you wish to match against + * @return None if there is no match, Some containing the sequence of matched + * wildcard strings if there is a match + */ + def glob(patternChunks: Seq[String], input: String): Option[Seq[String]] = { + var patternIndex = 0 + var inputIndex = 0 + var nextPatternIndex = 0 + var nextInputIndex = 0 + + val numWildcards = patternChunks.length - 1 + val matchStarts = Array.fill(numWildcards)(-1) + val matchEnds = Array.fill(numWildcards)(-1) + + val nameLength = input.length + // The final pattern is as long as all the chunks, separated by 1-character + // glob-wildcard placeholders + val patternLength = patternChunks.iterator.map(_.length).sum + numWildcards + + // Convert the input pattern chunks into a single sequence of shorts; each + // non-negative short represents a character, while -1 represents a glob wildcard + val pattern = { + val b = new ArrayBuilder.ofShort ; b.sizeHint(patternLength) + patternChunks.head.foreach(c => b.addOne(c.toShort)) + patternChunks.tail.foreach { s => b.addOne(-1) ; s.foreach(c => b.addOne(c.toShort)) } + b.result() + } + + // Lookup table for each character in the pattern to check whether or not + // it refers to a glob wildcard; a non-negative integer indicates which + // glob wildcard it represents, while -1 means it doesn't represent any + val matchIndices = { + val arr = Array.fill(patternLength + 1)(-1) + patternChunks.init.zipWithIndex.foldLeft(0) { case (ttl, (chunk, i)) => + val sum = ttl + chunk.length + arr(sum) = i + sum + 1 + } + arr + } + + while (patternIndex < patternLength || inputIndex < nameLength) { + matchIndices(patternIndex) match { + case -1 => // do nothing + case n => + matchStarts(n) = matchStarts(n) match { + case -1 => inputIndex + case s => math.min(s, inputIndex) + } + matchEnds(n) = matchEnds(n) match { + case -1 => inputIndex + case s => math.max(s, inputIndex) + } + } + + val continue = if (patternIndex < patternLength) { + val c = pattern(patternIndex) + c match { + case -1 => // zero-or-more-character wildcard + // Try to match at nx. If that doesn't work out, restart at nx+1 next. + nextPatternIndex = patternIndex + nextInputIndex = inputIndex + 1 + patternIndex += 1 + true + case _ => // ordinary character + if (inputIndex < nameLength && input(inputIndex) == c) { + patternIndex += 1 + inputIndex += 1 + true + } else { + false + } + } + } else false + + // Mismatch. Maybe restart. + if (!continue) { + if (0 < nextInputIndex && nextInputIndex <= nameLength) { + patternIndex = nextPatternIndex + inputIndex = nextInputIndex + } else { + return None + } + } + } + + // Matched all of pattern to all of name. Success. + Some(collection.immutable.ArraySeq.unsafeWrapArray( + Array.tabulate(patternChunks.length - 1)(n => input.slice(matchStarts(n), matchEnds(n))) + )) + } /** An exception that is thrown if a string contains a backslash (`\`) character * that does not start a valid escape sequence. * @param str The offending string * @param index The index of the offending backslash character in `str`. */ - class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int) extends IllegalArgumentException( + class InvalidEscapeException(str: String, val index: Int) extends IllegalArgumentException( s"""invalid escape ${ require(index >= 0 && index < str.length) - val ok = """[\b, \t, \n, \f, \r, \\, \", \']""" + val ok = s"""[\\b, \\t, \\n, \\f, \\r, \\\\, \\", \\', \\uxxxx]""" if (index == str.length - 1) "at terminal" else s"'\\${str(index + 1)}' not one of $ok at" } index $index in "$str". Use \\\\ for literal \\.""" ) + protected[scala] class InvalidUnicodeEscapeException(str: String, val escapeStart: Int, val index: Int) extends IllegalArgumentException( + s"""invalid unicode escape at index $index of $str""" + ) + + private[this] def readUEscape(src: String, startindex: Int): (Char, Int) = { + val len = src.length() + def loop(uindex: Int): (Char, Int) = { + def loopCP(dindex: Int, codepoint: Int): (Char, Int) = { + //supports BMP + surrogate escapes + //but only in four hex-digit code units (uxxxx) + if(dindex >= 4) { + val usRead = uindex - startindex + val digitsRead = dindex + (codepoint.asInstanceOf[Char], usRead + digitsRead) + } + else if (dindex + uindex >= len) + throw new InvalidUnicodeEscapeException(src, startindex, uindex + dindex) + else { + val ch = src(dindex + uindex) + val e = ch.asDigit + if(e >= 0 && e <= 15) loopCP(dindex + 1, (codepoint << 4) + e) + else throw new InvalidUnicodeEscapeException(src, startindex, uindex + dindex) + } + } + if(uindex >= len) throw new InvalidUnicodeEscapeException(src, startindex, uindex - 1) + //allow one or more `u` characters between the + //backslash and the code unit + else if(src(uindex) == 'u') loop(uindex + 1) + else loopCP(0, 0) + } + loop(startindex) + } + /** Expands standard Scala escape sequences in a string. * Escape sequences are: * control: `\b`, `\t`, `\n`, `\f`, `\r` * escape: `\\`, `\"`, `\'` - * octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`. * * @param str A string that may contain escape sequences * @return The string with all escape sequences expanded. */ - def treatEscapes(str: String): String = treatEscapes0(str, strict = false) + @deprecated("use processEscapes", "2.13.0") + def treatEscapes(str: String): String = processEscapes(str) - /** Treats escapes, but disallows octal escape sequences. */ - def processEscapes(str: String): String = treatEscapes0(str, strict = true) + /** Expands standard Scala escape sequences in a string. + * Escape sequences are: + * control: `\b`, `\t`, `\n`, `\f`, `\r` + * escape: `\\`, `\"`, `\'` + * + * @param str A string that may contain escape sequences + * @return The string with all escape sequences expanded. + */ + def processEscapes(str: String): String = + str indexOf '\\' match { + case -1 => str + case i => replace(str, i) + } - private def treatEscapes0(str: String, strict: Boolean): String = { - val len = str.length - // replace escapes with given first escape - def replace(first: Int): String = { - val b = new JLSBuilder - // append replacement starting at index `i`, with `next` backslash - @tailrec def loop(i: Int, next: Int): String = { - if (next >= 0) { - //require(str(next) == '\\') - if (next > i) b.append(str, i, next) + protected[scala] def processUnicode(str: String): String = + str indexOf "\\u" match { + case -1 => str + case i => replaceU(str, i) + } + + //replace escapes with given first escape + private[this] def replace(str: String, first: Int): String = { + val len = str.length() + val b = new JLSBuilder + // append replacement starting at index `i`, with `next` backslash + @tailrec def loop(i: Int, next: Int): String = { + if (next >= 0) { + //require(str(next) == '\\') + if (next > i) b.append(str, i, next) var idx = next + 1 if (idx >= len) throw new InvalidEscapeException(str, next) val c = str(idx) match { + case 'u' => 'u' case 'b' => '\b' case 't' => '\t' case 'n' => '\n' @@ -226,25 +393,12 @@ object StringContext { case '"' => '"' case '\'' => '\'' case '\\' => '\\' - case o if '0' <= o && o <= '7' => - if (strict) throw new InvalidEscapeException(str, next) - val leadch = str(idx) - var oct = leadch - '0' - idx += 1 - if (idx < len && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' - idx += 1 - if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') { - oct = oct * 8 + str(idx) - '0' - idx += 1 - } - } - idx -= 1 // retreat - oct.toChar case _ => throw new InvalidEscapeException(str, next) } - idx += 1 // advance - b append c + val (ch, advance) = if (c == 'u') readUEscape(str, idx) + else (c, 1) + idx += advance + b append ch loop(idx, str.indexOf('\\', idx)) } else { if (i < len) b.append(str, i, len) @@ -253,9 +407,70 @@ object StringContext { } loop(0, first) } - str indexOf '\\' match { - case -1 => str - case i => replace(i) + + /** replace Unicode escapes starting at index `backslash` which must be the + * index of the first index of a backslash character followed by a `u` + * character + * + * If a backslash is followed by one or more `u` characters and there is + * an odd number of backslashes immediately preceding the `u`, processing + * the escape is attempted and an invalid escape is an error. + * The odd backslashes rule is, well, odd, but is grandfathered in from + * pre-2.13.2 times, when this same rule existed in the scanner, and was also + * odd. Since escape handling here is for backwards compatibility only, that + * backwards compatibility is also retained. + * Otherwise, the backslash is not taken to introduce an escape and the + * backslash is taken to be literal + */ + private[this] def replaceU(str: String, backslash: Int): String = { + val len = str.length() + val b = new JLSBuilder + + @tailrec def loop(i: Int, next: Int): String = { + if (next >= 0) { + //require(str(next) == '\\' && str(next + 1) == 'u') + def oddBackslashes(ibackslash: Int): Boolean = + if (ibackslash > 0 && str(ibackslash - 1) == '\\') oddBackslashes(ibackslash - 1) + else ((next - ibackslash) % 2) == 0 + + if(oddBackslashes(next)) { + if (next > i) b.append(str, i, next) + val idx = next + 1 + val (ch, advance) = readUEscape(str, idx) + val nextCharIndex = idx + advance + b.append(ch) + loop(nextCharIndex, str.indexOf("\\u", nextCharIndex)) + } + else loop(i, str.indexOf("\\u", next + 1)) + } + else { + if (i < len) b.append(str, i, len) + b.toString() + } + } + loop(0, backslash) + } + + def standardInterpolator(process: String => String, args: scala.collection.Seq[Any], parts: Seq[String]): String = { + StringContext.checkLengths(args, parts) + val pi = parts.iterator + val ai = args.iterator + val bldr = new JLSBuilder(process(pi.next())) + while (ai.hasNext) { + bldr append ai.next() + bldr append process(pi.next()) } + bldr.toString } + + /** Checks that the length of the given argument `args` is one less than the number + * of `parts` supplied to the `StringContext`. + * + * @throws IllegalArgumentException if this is not the case. + */ + def checkLengths(args: scala.collection.Seq[Any], parts: Seq[String]): Unit = + if (parts.length != args.length + 1) + throw new IllegalArgumentException("wrong number of arguments ("+ args.length + +") for interpolated string with "+ parts.length +" parts") + } diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index fc7b3613f906..36a99c5e4e6e 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,20 +14,11 @@ package scala /** This class provides a simple way to get unique objects for equal strings. * Since symbols are interned, they can be compared using reference equality. - * Instances of `Symbol` can be created easily with Scala's built-in quote - * mechanism. - * - * For instance, the Scala term `'mysym` will - * invoke the constructor of the `Symbol` class in the following way: - * `Symbol("mysym")`. - * - * @author Martin Odersky, Iulian Dragos - * @since 1.7 */ final class Symbol private (val name: String) extends Serializable { - /** Converts this symbol to a string. + /** A string representation of this symbol. */ - override def toString(): String = "'" + name + override def toString(): String = s"Symbol($name)" @throws(classOf[java.io.ObjectStreamException]) private def readResolve(): Any = Symbol.apply(name) @@ -43,16 +34,15 @@ object Symbol extends UniquenessCache[String, Symbol] { /** This is private so it won't appear in the library API, but * abstracted to offer some hope of reusability. */ -private[scala] abstract class UniquenessCache[K, V >: Null] -{ +private[scala] abstract class UniquenessCache[K, V >: Null] { import java.lang.ref.WeakReference import java.util.WeakHashMap import java.util.concurrent.locks.ReentrantReadWriteLock - private val rwl = new ReentrantReadWriteLock() - private val rlock = rwl.readLock - private val wlock = rwl.writeLock - private val map = new WeakHashMap[K, WeakReference[V]] + private[this] val rwl = new ReentrantReadWriteLock() + private[this] val rlock = rwl.readLock + private[this] val wlock = rwl.writeLock + private[this] val map = new WeakHashMap[K, WeakReference[V]] protected def valueFromKey(k: K): V protected def keyFromValue(v: V): Option[K] @@ -85,10 +75,10 @@ private[scala] abstract class UniquenessCache[K, V >: Null] } finally wlock.unlock } - - val res = cached() - if (res == null) updateCache() - else res + cached() match { + case null => updateCache() + case res => res + } } def unapply(other: V): Option[K] = keyFromValue(other) } diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala index a9e6eb31d777..6af3d3582b14 100644 --- a/src/library/scala/Tuple1.scala +++ b/src/library/scala/Tuple1.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,6 +23,6 @@ package scala final case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1) extends Product1[T1] { - override def toString() = "(" + _1 + ")" + override def toString(): String = "(" + _1 + ")" } diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala index f2447ca8ab6f..63fa78016769 100644 --- a/src/library/scala/Tuple10.scala +++ b/src/library/scala/Tuple10.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -32,6 +32,6 @@ package scala final case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10) extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")" } diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala index 63f7ebe23fc6..3cdf35e84b8c 100644 --- a/src/library/scala/Tuple11.scala +++ b/src/library/scala/Tuple11.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,6 +33,6 @@ package scala final case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11) extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")" } diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala index 808c91c14cd2..b27538f446c2 100644 --- a/src/library/scala/Tuple12.scala +++ b/src/library/scala/Tuple12.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,7 +34,7 @@ package scala final case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12) extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")" } diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala index 7a5b9d6b5ff4..84f2f3cecc28 100644 --- a/src/library/scala/Tuple13.scala +++ b/src/library/scala/Tuple13.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -35,7 +35,7 @@ package scala final case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13) extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")" } diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala index 7c26a5d23048..08cd54c0cb82 100644 --- a/src/library/scala/Tuple14.scala +++ b/src/library/scala/Tuple14.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -36,7 +36,7 @@ package scala final case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14) extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")" } diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala index dd2de9347f61..b4a932352092 100644 --- a/src/library/scala/Tuple15.scala +++ b/src/library/scala/Tuple15.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,7 +37,7 @@ package scala final case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15) extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")" } diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala index fba6bc957ef5..417fa3aff002 100644 --- a/src/library/scala/Tuple16.scala +++ b/src/library/scala/Tuple16.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -38,7 +38,7 @@ package scala final case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16) extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")" } diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala index 1600d31da5bd..e7d63a81d1e9 100644 --- a/src/library/scala/Tuple17.scala +++ b/src/library/scala/Tuple17.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,7 +39,7 @@ package scala final case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17) extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")" } diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala index b7d30b5c0941..86875130951a 100644 --- a/src/library/scala/Tuple18.scala +++ b/src/library/scala/Tuple18.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -40,7 +40,7 @@ package scala final case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18) extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")" } diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala index b1d55c889a45..e3826ddd3073 100644 --- a/src/library/scala/Tuple19.scala +++ b/src/library/scala/Tuple19.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -41,7 +41,7 @@ package scala final case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19) extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")" } diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala index ec00f2254f55..3429ed7dea62 100644 --- a/src/library/scala/Tuple2.scala +++ b/src/library/scala/Tuple2.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,7 +24,7 @@ package scala final case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2) extends Product2[T1, T2] { - override def toString() = "(" + _1 + "," + _2 + ")" + override def toString(): String = "(" + _1 + "," + _2 + ")" /** Swaps the elements of this `Tuple`. * @return a new Tuple where the first element is the second element of this Tuple and the diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala index cc6493d54774..1d4826b94841 100644 --- a/src/library/scala/Tuple20.scala +++ b/src/library/scala/Tuple20.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -42,7 +42,7 @@ package scala final case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20) extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")" } diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala index 0b4be8c55ce2..01503f0e9362 100644 --- a/src/library/scala/Tuple21.scala +++ b/src/library/scala/Tuple21.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -43,7 +43,7 @@ package scala final case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21) extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")" } diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala index 9fab34a4e013..3f84e75a5dc6 100644 --- a/src/library/scala/Tuple22.scala +++ b/src/library/scala/Tuple22.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -44,7 +44,7 @@ package scala final case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22) extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")" } diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala index 368f11569ad7..b053d9c4c6b2 100644 --- a/src/library/scala/Tuple3.scala +++ b/src/library/scala/Tuple3.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,6 +25,6 @@ package scala final case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3) extends Product3[T1, T2, T3] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + ")" } diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala index 0abe4119fc67..29970f510398 100644 --- a/src/library/scala/Tuple4.scala +++ b/src/library/scala/Tuple4.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,6 +26,6 @@ package scala final case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4) extends Product4[T1, T2, T3, T4] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")" } diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala index aac3687b511c..b6dbd2ea6cd4 100644 --- a/src/library/scala/Tuple5.scala +++ b/src/library/scala/Tuple5.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,6 +27,6 @@ package scala final case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5) extends Product5[T1, T2, T3, T4, T5] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")" } diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala index 41a01471615c..834d81a43e84 100644 --- a/src/library/scala/Tuple6.scala +++ b/src/library/scala/Tuple6.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,6 +28,6 @@ package scala final case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6) extends Product6[T1, T2, T3, T4, T5, T6] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")" } diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala index 38f04b2c2926..d6e86752addd 100644 --- a/src/library/scala/Tuple7.scala +++ b/src/library/scala/Tuple7.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -29,6 +29,6 @@ package scala final case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7) extends Product7[T1, T2, T3, T4, T5, T6, T7] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")" } diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala index 9a94e80dbb7a..035d44e5330e 100644 --- a/src/library/scala/Tuple8.scala +++ b/src/library/scala/Tuple8.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,6 +30,6 @@ package scala final case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8) extends Product8[T1, T2, T3, T4, T5, T6, T7, T8] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")" } diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala index f4296d0dcd67..50869e2c9b22 100644 --- a/src/library/scala/Tuple9.scala +++ b/src/library/scala/Tuple9.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -31,6 +31,6 @@ package scala final case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9) extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9] { - override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")" + override def toString(): String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")" } diff --git a/src/library/scala/UninitializedError.scala b/src/library/scala/UninitializedError.scala index 87d9cee23d81..84332c9a9d3e 100644 --- a/src/library/scala/UninitializedError.scala +++ b/src/library/scala/UninitializedError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,7 @@ package scala /** This class represents uninitialized variable/value errors. - * - * @author Martin Odersky - * @since 2.5 */ -// TODO: remove in 2.14 +// TODO: remove @deprecated("will be removed in a future release", since = "2.12.7") final class UninitializedError extends RuntimeException("uninitialized value") diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala index 08946df41d4b..d516abb68936 100644 --- a/src/library/scala/UninitializedFieldError.scala +++ b/src/library/scala/UninitializedFieldError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,8 +17,6 @@ package scala * * Such runtime checks are not emitted by default. * They can be enabled by the `-Xcheckinit` compiler option. - * - * @since 2.7 */ final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) { def this(obj: Any) = this("" + obj) diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala index 440a131e0672..66fde8c72038 100644 --- a/src/library/scala/Unit.scala +++ b/src/library/scala/Unit.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,9 +27,13 @@ final abstract class Unit private extends AnyVal { override def getClass(): Class[Unit] = ??? } +@scala.annotation.compileTimeOnly("`Unit` companion object is not allowed in source; instead, use `()` for the unit value") object Unit extends AnyValCompanion { /** Transform a value type into a boxed reference type. + * + * This method is not intended for use in source code. + * The runtime representation of this value is platform specific. * * @param x the Unit to be boxed * @return a scala.runtime.BoxedUnit offering `x` as its underlying value. @@ -40,6 +44,9 @@ object Unit extends AnyValCompanion { * method is not typesafe: it accepts any Object, but will throw * an exception if the argument is not a scala.runtime.BoxedUnit. * + * This method is not intended for use in source code. + * The result of successfully unboxing a value is `()`. + * * @param x the scala.runtime.BoxedUnit to be unboxed. * @throws ClassCastException if the argument is not a scala.runtime.BoxedUnit * @return the Unit value () diff --git a/src/library/scala/ValueOf.scala b/src/library/scala/ValueOf.scala new file mode 100644 index 000000000000..30d5f7ff40c7 --- /dev/null +++ b/src/library/scala/ValueOf.scala @@ -0,0 +1,55 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +/** + * `ValueOf[T]` provides the unique value of the type `T` where `T` is a type which has a + * single inhabitant. Eligible types are singleton types of the form `stablePath.type`, + * Unit and singleton types corresponding to value literals. + * + * The value itself can conveniently be retrieved with [[Predef#valueOf]], which requires + * a `ValueOf` to be available in implicit scope. + * + * The compiler provides instances of `ValueOf[T]` for all eligible types. Typically + * an instance would be required where a runtime value corresponding to a type level + * computation is needed. + + * For example, we might define a type `Residue[M <: Int]` corresponding to the group of + * integers modulo `M`. We could then mandate that residues can be summed only when they + * are parameterized by the same modulus, + * + * {{{ + * case class Residue[M <: Int](n: Int) extends AnyVal { + * def +(rhs: Residue[M])(implicit m: ValueOf[M]): Residue[M] = + * Residue((this.n + rhs.n) % valueOf[M]) + * } + * + * val fiveModTen = Residue[10](5) + * val nineModTen = Residue[10](9) + * + * fiveModTen + nineModTen // OK == Residue[10](4) + * + * val fourModEleven = Residue[11](4) + * + * fiveModTen + fourModEleven // compiler error: type mismatch; + * // found : Residue[11] + * // required: Residue[10] + * }}} + * + * Notice that here the modulus is encoded in the type of the values and so does not + * incur any additional per-value storage cost. When a runtime value of the modulus + * is required in the implementation of `+` it is provided at the call site via the + * implicit argument `m` of type `ValueOf[M]`. + */ +@scala.annotation.implicitNotFound(msg = "No singleton value available for ${T}; eligible singleton types for `ValueOf` synthesis include literals and stable paths.") +final class ValueOf[T](val value: T) extends AnyVal diff --git a/src/library/scala/annotation/Annotation.scala b/src/library/scala/annotation/Annotation.scala index e39874f62aba..a78842cbf1c4 100644 --- a/src/library/scala/annotation/Annotation.scala +++ b/src/library/scala/annotation/Annotation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,13 +12,15 @@ package scala.annotation -/** A base class for annotations. Annotations extending this class directly - * are not preserved for the Scala type checker and are also not stored as - * Java annotations in classfiles. To enable either or both of these, one - * needs to inherit from [[scala.annotation.StaticAnnotation]] or/and - * [[scala.annotation.ClassfileAnnotation]]. +/** + * A base class for annotations. * - * @author Martin Odersky - * @since 2.4 + * Annotations extending this class directly are not preserved in the classfile. To enable storing + * annotations in the classfile's Scala signature and make it available to Scala reflection and + * other tools, the annotation needs to inherit from [[scala.annotation.StaticAnnotation]]. + * + * Annotation classes defined in Scala are not stored in classfiles in a Java-compatible manner + * and therefore not visible in Java reflection. In order to achieve this, the annotation has to + * be written in Java. */ -abstract class Annotation {} +abstract class Annotation diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index 0ad112f089d2..be3c98b6130a 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,8 @@ package scala.annotation /** A base class for classfile annotations. These are stored as - * [[http://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]]] + * [[https://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]] * in classfiles. - * - * @author Martin Odersky - * @since 2.4 */ -trait ClassfileAnnotation extends StaticAnnotation +@deprecated("Annotation classes need to be written in Java in order to be stored in classfiles in a Java-compatible manner", "2.13.0") +trait ClassfileAnnotation extends ConstantAnnotation diff --git a/src/library/scala/annotation/ConstantAnnotation.scala b/src/library/scala/annotation/ConstantAnnotation.scala new file mode 100644 index 000000000000..b9a933371cc8 --- /dev/null +++ b/src/library/scala/annotation/ConstantAnnotation.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.annotation + +/** + * Annotation classes extending this trait only accept constant values as arguments. + * + * Note that this trait extends [[StaticAnnotation]], so constant annotations are persisted in the + * classfile. + * + * The implementation requires arguments of constant annotations to be passed as named arguments, + * except if there is a single argument, which then defines the annotation's parameter named + * `value`. + * + * Constant annotations may use default arguments. Note that the internal representation of an + * annotation usage (which is visible for compiler plugins, for example) only contains arguments + * that are explicitly provided. + * + * Constant annotations are not allowed to define auxiliary constructors, and the primary + * constructor is required to have a single parameter list. + * + * Example: + * + * {{{ + * class Ann(value: Int, x: Int = 0) extends scala.annotation.ConstantAnnotation + * class Test { + * def someInt = 0 + * @Ann(value = 0, x = 1) def g = 0 + * @Ann(0) def f = 0 // Internal representation contains `@Ann(value = 0)` + * @Ann(someInt) // error: argument needs to be a compile-time constant + * } + * }}} + */ +trait ConstantAnnotation extends StaticAnnotation diff --git a/src/library/scala/annotation/StaticAnnotation.scala b/src/library/scala/annotation/StaticAnnotation.scala index 6a47f28bf2b7..dc0136db70af 100644 --- a/src/library/scala/annotation/StaticAnnotation.scala +++ b/src/library/scala/annotation/StaticAnnotation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,10 +12,12 @@ package scala.annotation -/** A base class for static annotations. These are available - * to the Scala type checker, even across different compilation units. +/** + * A base class for static annotations. These are available to the Scala type checker or Scala + * reflection, even across different compilation units. * - * @author Martin Odersky - * @since 2.4 + * Annotation classes defined in Scala are not stored in classfiles in a Java-compatible manner + * and therefore not visible in Java reflection. In order to achieve this, the annotation has to + * be written in Java. */ trait StaticAnnotation extends Annotation diff --git a/src/library/scala/annotation/TypeConstraint.scala b/src/library/scala/annotation/TypeConstraint.scala index 51d7b133594e..b9b5a62aa3c9 100644 --- a/src/library/scala/annotation/TypeConstraint.scala +++ b/src/library/scala/annotation/TypeConstraint.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,8 +22,5 @@ package scala.annotation * down is not a proper constrained type, and this marker should not be * applied. A Scala compiler will drop such annotations in cases where it * would rewrite a type constraint. - * - * @author Lex Spoon - * @since 2.6 */ trait TypeConstraint extends Annotation diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala deleted file mode 100644 index e40ce914c6ad..000000000000 --- a/src/library/scala/annotation/bridge.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.annotation - -/** If this annotation is present on a method, it will be treated as a bridge method. - */ -@deprecated("reconsider whether using this annotation will accomplish anything", "2.10.0") -private[scala] class bridge extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala index a2eb330621db..e2eb7560b8bf 100644 --- a/src/library/scala/annotation/compileTimeOnly.scala +++ b/src/library/scala/annotation/compileTimeOnly.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,7 +28,6 @@ import scala.annotation.meta._ * * @param message the error message to print during compilation if a reference remains * after type checking - * @since 2.11.0 */ @getter @setter @beanGetter @beanSetter @companionClass @companionMethod final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala index 775e61d483bf..e15f0de8d9f1 100644 --- a/src/library/scala/annotation/elidable.scala +++ b/src/library/scala/annotation/elidable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,34 +37,69 @@ package scala.annotation * }}} * * Complete example: - {{{ - import scala.annotation._, elidable._ - object Test extends App { - def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 } - - @elidable(WARNING) def warning(msg: String) = println(msg) - @elidable(FINE) def debug(msg: String) = println(msg) - @elidable(FINE) def computedValue = expensiveComputation() - - warning("Warning! Danger! Warning!") - debug("Debug! Danger! Debug!") - println("I computed a value: " + computedValue) - } - % scalac example.scala && scala Test - Warning! Danger! Warning! - Debug! Danger! Debug! - I computed a value: 172 - - // INFO lies between WARNING and FINE - % scalac -Xelide-below INFO example.scala && scala Test - Warning! Danger! Warning! - I computed a value: 0 - }}} + * {{{ + * import scala.annotation._, elidable._ + * object Test extends App { + * def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 } + * + * @elidable(WARNING) def warning(msg: String) = println(msg) + * @elidable(FINE) def debug(msg: String) = println(msg) + * @elidable(FINE) def computedValue = expensiveComputation() + * + * warning("Warning! Danger! Warning!") + * debug("Debug! Danger! Debug!") + * println("I computed a value: " + computedValue) + * } + * % scalac example.scala && scala Test + * Warning! Danger! Warning! + * Debug! Danger! Debug! + * I computed a value: 172 * - * @author Paul Phillips - * @since 2.8 + * // INFO lies between WARNING and FINE + * % scalac -Xelide-below INFO example.scala && scala Test + * Warning! Danger! Warning! + * I computed a value: 0 + * }}} + * + * Note that only concrete methods can be marked `@elidable`. A non-annotated method + * is not elided, even if it overrides / implements a method that has the annotation. + * + * Also note that the static type determines which annotations are considered: + * + * {{{ + * import scala.annotation._, elidable._ + * class C { @elidable(0) def f(): Unit = ??? } + * object O extends C { override def f(): Unit = println("O.f") } + * object Test extends App { + * O.f() // not elided + * (O: C).f() // elided if compiled with `-Xelide-below 1` + * } + * }}} + * + * Note for Scala 3 users: + * If you're using Scala 3, the annotation exists since Scala 3 uses the Scala 2 + * standard library, but it's unsupported by the Scala 3 compiler. Instead, to + * achieve the same result you'd want to utilize the `inline if` feature to + * introduce behavior that makes a method de facto elided at compile-time. + * {{{ + * type LogLevel = Int + * + * object LogLevel: + * inline val Info = 0 + * inline val Warn = 1 + * inline val Debug = 2 + * + * inline val appLogLevel = LogLevel.Warn + * + * inline def log(msg: String, inline level: LogLevel): Unit = + * inline if (level <= appLogLevel) then println(msg) + * + * log("Warn log", LogLevel.Warn) + * + * log("Debug log", LogLevel. Debug) + * }}} */ -final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation +final class elidable(final val level: Int) extends scala.annotation.ConstantAnnotation /** This useless appearing code was necessary to allow people to use * named constants for the elidable annotation. This is what it takes @@ -74,8 +109,6 @@ final class elidable(final val level: Int) extends scala.annotation.StaticAnnota * (Select(Level, Select(FINEST, Apply(intValue, Nil)))) * }}} * instead of the number `300`. - * - * @since 2.8 */ object elidable { /** The levels `ALL` and `OFF` are confusing in this context because diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala index c522413a77fe..5520c945fef2 100644 --- a/src/library/scala/annotation/implicitAmbiguous.scala +++ b/src/library/scala/annotation/implicitAmbiguous.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,9 +37,6 @@ package scala.annotation * * implicitly[Int =!= Int] * }}} - * - * @author Brian McKenna - * @since 2.12.0 */ @meta.getter -final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation +final class implicitAmbiguous(msg: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala index acc2bea24cce..55a9179a394f 100644 --- a/src/library/scala/annotation/implicitNotFound.scala +++ b/src/library/scala/annotation/implicitNotFound.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,43 @@ package scala.annotation /** * To customize the error message that's emitted when an implicit of type - * C[T1,..., TN] cannot be found, annotate the class C with @implicitNotFound. - * Assuming C has type parameters X1,..., XN, the error message will be the - * result of replacing all occurrences of ${Xi} in the string msg with the - * string representation of the corresponding type argument Ti. * + * `C[T1,..., TN]` cannot be found, annotate the class `C` with `@implicitNotFound`. + * Assuming `C` has type parameters `X1, ..., XN`, the error message will be the + * result of replacing all occurrences of `\${Xi}` in the string `msg` with the + * string representation of the corresponding type argument `Ti`. + * The annotation is effectively inherited by subtypes if they are not annotated. * - * @author Adriaan Moors - * @since 2.8.1 + * The annotation can also be attached to implicit parameters. In this case, `\${Xi}` + * can refer to type parameters in the current scope. The `@implicitNotFound` message + * on the parameter takes precedence over the one on the parameter's type. + * + * {{{ + * import scala.annotation.implicitNotFound + * + * @implicitNotFound("Could not find an implicit C[\${T}, \${U}]") + * class C[T, U] + * + * class K[A] { + * def m[B](implicit c: C[List[A], B]) = 0 + * def n[B](implicit @implicitNotFound("Specific message for C of list of \${A} and \${B}") c: C[List[A], B]) = 1 + * } + * + * object Test { + * val k = new K[Int] + * k.m[String] + * k.n[String] + * } + * }}} + * + * The compiler issues the following error messages: + * + *
    + * Test.scala:13: error: Could not find an implicit C[List[Int], String]
    + *   k.m[String]
    + *      ^
    + * Test.scala:14: error: Specific message for C of list of Int and String
    + *   k.n[String]
    + *      ^
    + * 
    */ -final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {} +final class implicitNotFound(msg: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/annotation/meta/beanGetter.scala b/src/library/scala/annotation/meta/beanGetter.scala index 3d45ade30e69..58d37bfb1cde 100644 --- a/src/library/scala/annotation/meta/beanGetter.scala +++ b/src/library/scala/annotation/meta/beanGetter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/beanSetter.scala b/src/library/scala/annotation/meta/beanSetter.scala index 04483bd1759d..670c67259fa8 100644 --- a/src/library/scala/annotation/meta/beanSetter.scala +++ b/src/library/scala/annotation/meta/beanSetter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/companionClass.scala b/src/library/scala/annotation/meta/companionClass.scala index abff9ccb5d3a..0a7b072b521d 100644 --- a/src/library/scala/annotation/meta/companionClass.scala +++ b/src/library/scala/annotation/meta/companionClass.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/companionMethod.scala b/src/library/scala/annotation/meta/companionMethod.scala index 44eecd2cf541..2e0080a1d61f 100644 --- a/src/library/scala/annotation/meta/companionMethod.scala +++ b/src/library/scala/annotation/meta/companionMethod.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/companionObject.scala b/src/library/scala/annotation/meta/companionObject.scala index d447c87389c4..dc817b138707 100644 --- a/src/library/scala/annotation/meta/companionObject.scala +++ b/src/library/scala/annotation/meta/companionObject.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/defaultArg.scala b/src/library/scala/annotation/meta/defaultArg.scala new file mode 100644 index 000000000000..4964bcb683dc --- /dev/null +++ b/src/library/scala/annotation/meta/defaultArg.scala @@ -0,0 +1,30 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.annotation +package meta + +/** + * This internal meta annotation is used by the compiler to support default annotation arguments. + * + * For an annotation definition `class ann(x: Int = defaultExpr) extends Annotation`, the compiler adds + * `@defaultArg(defaultExpr)` to the parameter `x`. This causes the syntax tree of `defaultExpr` to be + * stored in the classfile. + * + * When using a default annotation argument, the compiler can recover the syntax tree and insert it in the + * `AnnotationInfo`. + * + * For details, see `scala.reflect.internal.AnnotationInfos.AnnotationInfo`. + */ +@meta.param class defaultArg(arg: Any) extends StaticAnnotation { + def this() = this(null) +} diff --git a/src/library/scala/annotation/meta/field.scala b/src/library/scala/annotation/meta/field.scala index 267037e8d9f7..ccd64a0179f7 100644 --- a/src/library/scala/annotation/meta/field.scala +++ b/src/library/scala/annotation/meta/field.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/getter.scala b/src/library/scala/annotation/meta/getter.scala index 36d8a76763b5..acbc7989c901 100644 --- a/src/library/scala/annotation/meta/getter.scala +++ b/src/library/scala/annotation/meta/getter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/languageFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala index 6b68f76338cb..f2d9c7890d47 100644 --- a/src/library/scala/annotation/meta/languageFeature.scala +++ b/src/library/scala/annotation/meta/languageFeature.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/package.scala b/src/library/scala/annotation/meta/package.scala index 7d09a8785517..ab315e412e07 100644 --- a/src/library/scala/annotation/meta/package.scala +++ b/src/library/scala/annotation/meta/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/param.scala b/src/library/scala/annotation/meta/param.scala index 5d4ebf5c8221..8c69a3b644b1 100644 --- a/src/library/scala/annotation/meta/param.scala +++ b/src/library/scala/annotation/meta/param.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/setter.scala b/src/library/scala/annotation/meta/setter.scala index fae59b5a48a7..b37979bec597 100644 --- a/src/library/scala/annotation/meta/setter.scala +++ b/src/library/scala/annotation/meta/setter.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/annotation/meta/superArg.scala b/src/library/scala/annotation/meta/superArg.scala new file mode 100644 index 000000000000..181db2651f4e --- /dev/null +++ b/src/library/scala/annotation/meta/superArg.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.annotation +package meta + +/** + * This internal annotation encodes arguments passed to annotation superclasses. Example: + * + * {{{ + * class a(x: Int) extends Annotation + * class b extends a(42) // the compiler adds `@superArg("x", 42)` to class b + * }}} + */ +class superArg(p: String, v: Any) extends StaticAnnotation + +/** + * This internal annotation encodes arguments passed to annotation superclasses. Example: + * + * {{{ + * class a(x: Int) extends Annotation + * class b(y: Int) extends a(y) // the compiler adds `@superFwdArg("x", "y")` to class b + * }}} + */ +class superFwdArg(p: String, n: String) extends StaticAnnotation diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala index 03e61f36aebf..81ef78dbd367 100644 --- a/src/library/scala/annotation/migration.scala +++ b/src/library/scala/annotation/migration.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,7 +26,5 @@ package scala.annotation * * @param changedIn The version, in which the behaviour change was * introduced. - * - * @since 2.8 */ - private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation +private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/annotation/nowarn.scala b/src/library/scala/annotation/nowarn.scala index 889b81f8583f..a083af4544ed 100644 --- a/src/library/scala/annotation/nowarn.scala +++ b/src/library/scala/annotation/nowarn.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,11 @@ package scala.annotation /** An annotation for local warning suppression. * - * The optional `value` parameter allows selectively silencing messages, see `scalac -Wconf:help` - * for help. Examples: + * The optional `value` parameter allows selectively silencing messages. See `-Wconf:help` for help + * writing a message filter expression, or use `@nowarn("verbose")` / `@nowarn("v")` to display message + * filters applicable to a specific warning. + * + * Examples: * * {{{ * def f = { @@ -23,6 +26,9 @@ package scala.annotation * 2 * } * + * // show the warning, plus the applicable @nowarn / Wconf filters ("cat=other-pure-statement", ...) + * @nowarn("v") def f = { 1; 2 } + * * @nowarn def f = { 1; deprecated() } // don't warn * * @nowarn("msg=pure expression does nothing") @@ -30,6 +36,7 @@ package scala.annotation * }}} * * To ensure that a `@nowarn` annotation actually suppresses a warning, enable `-Xlint:unused` or `-Wunused:nowarn`. + * The unused annotation warning is emitted in category `unused-nowarn` and can be selectively managed + * using `-Wconf:cat=unused-nowarn:s`. */ -@nowarn("msg=subclassing ClassfileAnnotation does not\nmake your annotation visible at runtime") -class nowarn(value: String = "") extends ClassfileAnnotation +class nowarn(value: String = "") extends ConstantAnnotation diff --git a/src/library/scala/annotation/showAsInfix.scala b/src/library/scala/annotation/showAsInfix.scala index b5bf349848e7..9c4ecbe2eea1 100644 --- a/src/library/scala/annotation/showAsInfix.scala +++ b/src/library/scala/annotation/showAsInfix.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,6 +34,6 @@ package scala.annotation }}} * * @param enabled whether to show this type as an infix type operator. - * @since 2.12.2 */ -class showAsInfix(enabled: Boolean = true) extends annotation.StaticAnnotation \ No newline at end of file +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") +class showAsInfix(enabled: Boolean = true) extends annotation.StaticAnnotation diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala index fde18cbdb780..40e297fc4d97 100644 --- a/src/library/scala/annotation/strictfp.scala +++ b/src/library/scala/annotation/strictfp.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,6 @@ package scala.annotation /** If this annotation is present on a method or its enclosing class, * the strictfp flag will be emitted. - * - * @author Paul Phillips - * @since 2.9 */ +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class strictfp extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala index 94df3bfcc132..f4af17741fe6 100644 --- a/src/library/scala/annotation/switch.scala +++ b/src/library/scala/annotation/switch.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,8 @@ package scala.annotation /** An annotation to be applied to a match expression. If present, * the compiler will verify that the match has been compiled to a - * [[http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-3.html#jvms-3.10 tableswitch or lookupswitch]] - * and issue an error if it instead compiles into a series of conditional expressions. + * [[https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-3.html#jvms-3.10 tableswitch or lookupswitch]] + * and issue a warning if it instead compiles into a series of conditional expressions. * Example usage: {{{ val Constant = 'Q' @@ -29,8 +29,5 @@ package scala.annotation * * Note: for pattern matches with one or two cases, the compiler generates jump instructions. * Annotating such a match with `@switch` does not issue any warning. - * - * @author Paul Phillips - * @since 2.8 */ final class switch extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala index 96c1273c07b0..7b3f80dbf66f 100644 --- a/src/library/scala/annotation/tailrec.scala +++ b/src/library/scala/annotation/tailrec.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,5 @@ package scala.annotation * * If it is present, the compiler will issue an error if the method cannot * be optimized into a loop. - * - * @since 2.8 */ final class tailrec extends StaticAnnotation diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala index 20ebe8ebbb20..12a18d635fe4 100644 --- a/src/library/scala/annotation/unchecked/uncheckedStable.scala +++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,8 +16,6 @@ import scala.annotation.meta.{field, getter} /** An annotation for values that are assumed to be stable even though their * types are volatile. - * - * @since 2.7 */ @getter @field final class uncheckedStable extends scala.annotation.StaticAnnotation {} diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala index 83ff3bb977ea..60f06ad95f7d 100644 --- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala +++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,6 @@ package scala.annotation.unchecked -/** An annotation for type arguments for which one wants to suppress variance checking - * types are volatile. - * - * @since 2.7 +/** An annotation for type arguments for which one wants to suppress variance checking. */ final class uncheckedVariance extends scala.annotation.StaticAnnotation {} diff --git a/src/library/scala/annotation/unspecialized.scala b/src/library/scala/annotation/unspecialized.scala index 83c5ccc88f20..c0f668a75298 100644 --- a/src/library/scala/annotation/unspecialized.scala +++ b/src/library/scala/annotation/unspecialized.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package scala.annotation /** A method annotation which suppresses the creation of * additional specialized forms based on enclosing specialized * type parameters. - * - * @since 2.10 */ +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class unspecialized extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/annotation/unused.scala b/src/library/scala/annotation/unused.scala new file mode 100644 index 000000000000..270286864822 --- /dev/null +++ b/src/library/scala/annotation/unused.scala @@ -0,0 +1,26 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.annotation + +/** Mark an element unused for a given context. + * + * Unused warnings are suppressed for elements known to be unused. + * + * For example, a method parameter may be marked `@unused` + * because the method is designed to be overridden by + * an implementation that does use the parameter. + */ +@meta.getter @meta.setter +class unused(message: String) extends StaticAnnotation { + def this() = this("") +} diff --git a/src/library/scala/annotation/varargs.scala b/src/library/scala/annotation/varargs.scala index 255f35cb6663..68d1080a53f8 100644 --- a/src/library/scala/annotation/varargs.scala +++ b/src/library/scala/annotation/varargs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,5 @@ package scala.annotation /** A method annotation which instructs the compiler to generate a * Java varargs-style forwarder method for interop. This annotation can * only be applied to methods with repeated parameters. - * - * @since 2.9 */ final class varargs extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/beans/BeanDescription.scala b/src/library/scala/beans/BeanDescription.scala deleted file mode 100644 index 01fbfaed7d00..000000000000 --- a/src/library/scala/beans/BeanDescription.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.beans - -/** Provides a short description that will be included when generating - * bean information. This annotation can be attached to the bean itself, - * or to any member. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.5") -class BeanDescription(val description: String) extends scala.annotation.Annotation - diff --git a/src/library/scala/beans/BeanDisplayName.scala b/src/library/scala/beans/BeanDisplayName.scala deleted file mode 100644 index 49c139414319..000000000000 --- a/src/library/scala/beans/BeanDisplayName.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.beans - -/** Provides a display name when generating bean information. This - * annotation can be attached to the bean itself, or to any member. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.5") -class BeanDisplayName(val name: String) extends scala.annotation.Annotation - diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala deleted file mode 100644 index cf7ba97c9ec6..000000000000 --- a/src/library/scala/beans/BeanInfo.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.beans - -/** This annotation indicates that a JavaBean-compliant `BeanInfo` class - * should be generated for this annotated Scala class. - * - * - A `'''val'''` becomes a read-only property. - * - A `'''var'''` becomes a read-write property. - * - A `'''def'''` becomes a method. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0") -class BeanInfo extends scala.annotation.Annotation diff --git a/src/library/scala/beans/BeanInfoSkip.scala b/src/library/scala/beans/BeanInfoSkip.scala deleted file mode 100644 index d23a2960645c..000000000000 --- a/src/library/scala/beans/BeanInfoSkip.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.beans - -/** This annotation indicates that bean information should - * not be generated for the val, var, or def that it is - * attached to. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.5") -class BeanInfoSkip extends scala.annotation.Annotation diff --git a/src/library/scala/beans/BeanProperty.scala b/src/library/scala/beans/BeanProperty.scala index b05326f4a0cf..7b2ef0c9d516 100644 --- a/src/library/scala/beans/BeanProperty.scala +++ b/src/library/scala/beans/BeanProperty.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,8 @@ package scala.beans +import scala.annotation.meta.{beanGetter, beanSetter, field} + /** When attached to a field, this annotation adds a setter and a getter * method following the Java Bean convention. For example: * {{{ @@ -20,11 +22,17 @@ package scala.beans * }}} * adds the following methods to the class: * {{{ - * def setStatus(s: String) { this.status = s } - * def getStatus: String = this.status + * def setStatus(s: String): Unit = { this.status = s } + * def getStatus(): String = this.status * }}} * For fields of type `Boolean`, if you need a getter named `isStatus`, * use the `scala.beans.BooleanBeanProperty` annotation instead. + * + * In Scala 2, the added methods are visible from both Scala and Java. + * + * In Scala 3, that has changed. The added methods are only visible from + * Java (including via Java reflection). */ -@scala.annotation.meta.field +@field @beanGetter @beanSetter +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class BeanProperty extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/beans/BooleanBeanProperty.scala b/src/library/scala/beans/BooleanBeanProperty.scala index da865a0fd4a8..72d253df2f8b 100644 --- a/src/library/scala/beans/BooleanBeanProperty.scala +++ b/src/library/scala/beans/BooleanBeanProperty.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,12 @@ package scala.beans +import scala.annotation.meta.{beanGetter, beanSetter, field} + /** This annotation has the same functionality as * `scala.beans.BeanProperty`, but the generated Bean getter will be * named `isFieldName` instead of `getFieldName`. */ -@scala.annotation.meta.field +@field @beanGetter @beanSetter +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class BooleanBeanProperty extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala deleted file mode 100644 index e08761027b4b..000000000000 --- a/src/library/scala/beans/ScalaBeanInfo.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.beans - -/** Provides some simple runtime processing necessary to create - * JavaBean descriptors for Scala entities. The compiler creates - * subclasses of this class automatically when the BeanInfo annotation is - * attached to a class. - * - * @author Ross Judson (rjudson@managedobjects.com) - */ -@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.5") -abstract class ScalaBeanInfo(clazz: java.lang.Class[_], - props: Array[String], - methods: Array[String]) extends java.beans.SimpleBeanInfo { - - import java.beans._ - - private val pd = new Array[PropertyDescriptor](props.length / 3) - private val md = - for (m <- clazz.getMethods if methods.exists(_ == m.getName)) - yield new MethodDescriptor(m) - - init() - - override def getPropertyDescriptors() = pd - override def getMethodDescriptors() = md - - // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass) - - private def init() { - var i = 0 - while (i < props.length) { - pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2)) - i = i + 3 - } - } - -} - diff --git a/src/library/scala/collection/ArrayOps.scala b/src/library/scala/collection/ArrayOps.scala new file mode 100644 index 000000000000..08758e2ab46a --- /dev/null +++ b/src/library/scala/collection/ArrayOps.scala @@ -0,0 +1,1666 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.CommonErrors +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies the given binary operator `op` to the given initial value `z` and + * all elements of this array, going left to right. Returns the initial value + * if this array is empty. + * + * If `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this array, the + * result is `op( op( ... op( op(z, x,,1,,), x,,2,,) ... ), x,,n,,)`. + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to `z` and all elements of this array, + * going left to right. Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies the given binary operator `op` to all elements of this array and + * the given initial value `z`, going right to left. Returns the initial + * value if this array is empty. + * + * If `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this array, the + * result is `op(x,,1,,, op(x,,2,,, op( ... op(x,,n,,, z) ... )))`. + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to all elements of this array + * and `z`, going right to left. Returns `z` if this array + * is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Alias for [[foldLeft]]. + * + * The type parameter is more restrictive than for `foldLeft` to be + * consistent with [[IterableOnceOps.fold]]. + * + * @tparam A1 The type parameter for the binary operator, a supertype of `A`. + * @param z An initial value. + * @param op A binary operator. + * @return The result of applying `op` to `z` and all elements of this array, + * going left to right. Returns `z` if this string is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + b.sizeHint(suffix, delta = xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == xs.length) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) + throw CommonErrors.indexOutOfBounds(index = index, max = xs.length-1) + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala index 280242cbcd24..a2da58ea3b9b 100644 --- a/src/library/scala/collection/BitSet.scala +++ b/src/library/scala/collection/BitSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,25 +13,336 @@ package scala package collection -import generic._ +import java.io.{ObjectInputStream, ObjectOutputStream} -/** A common base class for mutable and immutable bitsets. - * $bitsetinfo - */ -trait BitSet extends SortedSet[Int] - with BitSetLike[BitSet] { - override def empty: BitSet = BitSet.empty +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.Builder + + +/** Base type of bitsets. + * + * This trait provides most of the operations of a `BitSet` independently of its representation. + * It is inherited by all concrete implementations of bitsets. + * + * @define bitsetinfo + * Bitsets are sets of non-negative integers which are represented as + * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is + * determined by the largest number stored in it. + * @define coll bitset + * @define Coll `BitSet` + */ +trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "BitSet" + override def unsorted: Set[Int] = this } -/** $factoryInfo - * @define coll bitset - * @define Coll `BitSet` - */ -object BitSet extends BitSetFactory[BitSet] { - val empty: BitSet = immutable.BitSet.empty - def newBuilder = immutable.BitSet.newBuilder +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." + + def empty: BitSet = immutable.BitSet.empty + def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder + def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + + @SerialVersionUID(3L) + private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { + + @transient protected var elems: Array[Long] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val nwords = coll.nwords + out.writeInt(nwords) + var i = 0 + while(i < nwords) { + out.writeLong(coll.word(i)) + i += 1 + } + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val nwords = in.readInt() + elems = new Array[Long](nwords) + var i = 0 + while(i < nwords) { + elems(i) = in.readLong() + i += 1 + } + } + + protected[this] def readResolve(): Any + } +} + +/** Base implementation type of bitsets */ +trait BitSetOps[+C <: BitSet with BitSetOps[C]] + extends SortedSetOps[Int, SortedSet, C] { self => + import BitSetOps._ + + def bitSetFactory: SpecificIterableFactory[Int, C] + + def unsorted: Set[Int] + + final def ordering: Ordering[Int] = Ordering.Int + + /** The number of words (each with 64 bits) making up the set */ + protected[collection] def nwords: Int + + /** The words at index `idx`, or 0L if outside the range of the set + * '''Note:''' requires `idx >= 0` + */ + protected[collection] def word(idx: Int): Long + + /** Creates a new set of this kind from an array of longs + */ + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C + + def contains(elem: Int): Boolean = + 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L + + def iterator: Iterator[Int] = iteratorFrom(0) + + def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var currentPos = if (start > 0) start >> LogWL else 0 + private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) + final override def hasNext: Boolean = { + while (currentWord == 0) { + if (currentPos + 1 >= nwords) return false + currentPos += 1 + currentWord = word(currentPos) + } + true + } + final override def next(): Int = { + if (hasNext) { + val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) + currentWord &= currentWord - 1 + (currentPos << LogWL) + bitPos + } else Iterator.empty.next() + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = scala.collection.convert.impl.BitSetStepper.from(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } - /** $canBuildFromInfo */ - implicit val canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom + override def size: Int = { + var s = 0 + var i = nwords + while (i > 0) { + i -= 1 + s += java.lang.Long.bitCount(word(i)) + } + s + } + + override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) + + @inline private[this] def smallestInt: Int = { + val thisnwords = nwords + var i = 0 + while(i < thisnwords) { + val currentWord = word(i) + if (currentWord != 0L) { + return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) + } + i += 1 + } + throw new UnsupportedOperationException("empty.smallestInt") + } + + @inline private[this] def largestInt: Int = { + var i = nwords - 1 + while(i >= 0) { + val currentWord = word(i) + if (currentWord != 0L) { + return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 + } + i -= 1 + } + throw new UnsupportedOperationException("empty.largestInt") + } + + override def max[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) largestInt + else if (Ordering.Int isReverseOf ord) smallestInt + else super.max(ord) + + + override def min[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) smallestInt + else if (Ordering.Int isReverseOf ord) largestInt + else super.min(ord) + + override def foreach[U](f: Int => U): Unit = { + /* NOTE: while loops are significantly faster as of 2.11 and + one major use case of bitsets is performance. Also, there + is nothing to do when all bits are clear, so use that as + the inner loop condition. */ + var i = 0 + while (i < nwords) { + var w = word(i) + var j = i * WordLength + while (w != 0L) { + if ((w&1L) == 1L) f(j) + w = w >>> 1 + j += 1 + } + i += 1 + } + } + + /** Creates a bit mask for this set as a new array of longs + */ + def toBitMask: Array[Long] = { + val a = new Array[Long](nwords) + var i = a.length + while(i > 0) { + i -= 1 + a(i) = word(i) + } + a + } + + def rangeImpl(from: Option[Int], until: Option[Int]): C = { + val a = coll.toBitMask + val len = a.length + if (from.isDefined) { + val f = from.get + val w = f >> LogWL + val b = f & (WordLength - 1) + if (w >= 0) { + java.util.Arrays.fill(a, 0, math.min(w, len), 0) + if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) + } + } + if (until.isDefined) { + val u = until.get + val w = u >> LogWL + val b = u & (WordLength - 1) + if (w < len) { + java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) + if (w >= 0) a(w) &= (1L << b) - 1 + } + } + coll.fromBitMaskNoCopy(a) + } + + override def concat(other: collection.IterableOnce[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords max otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) | otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.concat(other) + } + + override def intersect(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords min otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.intersect(other) + } + + abstract override def diff(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & ~otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.diff(other) + } + + /** Computes the symmetric difference of this bitset and another bitset by performing + * a bitwise "exclusive-or". + * + * @param other the other bitset to take part in the symmetric difference. + * @return a bitset containing those bits of this + * bitset or the other bitset that are not contained in both bitsets. + */ + def xor(other: BitSet): C = { + val len = coll.nwords max other.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = coll.word(idx) ^ other.word(idx) + coll.fromBitMaskNoCopy(words) + } + + @`inline` final def ^ (other: BitSet): C = xor(other) + + /** + * Builds a new bitset by applying a function to all elements of this bitset + * @param f the function to apply to each element. + * @return a new bitset resulting from applying the given function ''f'' to + * each element of this bitset and collecting the results + */ + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) + + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) + + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) + + override def partition(p: Int => Boolean): (C, C) = { + val left = filter(p) + (left, this &~ left) + } } +object BitSetOps { + + /* Final vals can sometimes be inlined as constants (faster) */ + private[collection] final val LogWL = 6 + private[collection] final val WordLength = 64 + private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 + + private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { + var len = elems.length + while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 + var newlen = len + if (idx >= newlen && w != 0L) newlen = idx + 1 + val newelems = new Array[Long](newlen) + Array.copy(elems, 0, newelems, 0, len) + if (idx < newlen) newelems(idx) = w + else assert(w == 0L) + newelems + } + + private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = + if (oldWord == 0L) 0L else { + var w = oldWord + val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) + var jmask = 1L << trailingZeroes + var j = wordIndex * BitSetOps.WordLength + trailingZeroes + val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) + while (j != maxJ) { + if ((w & jmask) != 0L) { + if (pred(j) == isFlipped) { + // j did not pass the filter here + w = w & ~jmask + } + } + jmask = jmask << 1 + j += 1 + } + w + } +} diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala deleted file mode 100644 index 6d0fad27b67a..000000000000 --- a/src/library/scala/collection/BitSetLike.scala +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import BitSetLike._ -import mutable.StringBuilder - -/** A template trait for bitsets. - * $bitsetinfo - * - * This trait provides most of the operations of a `BitSet` independently of its representation. - * It is inherited by all concrete implementations of bitsets. - * - * @tparam This the type of the bitset itself. - * - * @define bitsetinfo - * Bitsets are sets of non-negative integers which are represented as - * variable-size arrays of bits packed into 64-bit words. The memory footprint of a bitset is - * determined by the largest number stored in it. - * @author Martin Odersky - * @since 2.8 - * @define coll bitset - * @define Coll `BitSet` - */ -trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSetLike[Int, This] { self => - - def empty: This - - /** The number of words (each with 64 bits) making up the set */ - protected def nwords: Int - - /** The words at index `idx`, or 0L if outside the range of the set - * '''Note:''' requires `idx >= 0` - */ - protected def word(idx: Int): Long - - /** Creates a new set of this kind from an array of longs - */ - protected def fromBitMaskNoCopy(elems: Array[Long]): This - - /** Creates a bit mask for this set as a new array of longs - */ - def toBitMask: Array[Long] = { - val a = new Array[Long](nwords) - var i = a.length - while(i > 0) { - i -= 1 - a(i) = word(i) - } - a - } - - override def size: Int = { - var s = 0 - var i = nwords - while (i > 0) { - i -= 1 - s += java.lang.Long.bitCount(word(i)) - } - s - } - - override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) - - implicit def ordering: Ordering[Int] = Ordering.Int - - def rangeImpl(from: Option[Int], until: Option[Int]): This = { - val a = toBitMask - val len = a.length - if (from.isDefined) { - var f = from.get - var pos = 0 - while (f >= 64 && pos < len) { - f -= 64 - a(pos) = 0 - pos += 1 - } - if (f > 0 && pos < len) a(pos) &= ~((1L << f)-1) - } - if (until.isDefined) { - val u = until.get - val w = u / 64 - val b = u % 64 - var clearw = w+1 - while (clearw < len) { - a(clearw) = 0 - clearw += 1 - } - if (w < len) a(w) &= (1L << b)-1 - } - fromBitMaskNoCopy(a) - } - - def iterator: Iterator[Int] = iteratorFrom(0) - - override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] { - private var current = start - private val end = nwords * WordLength - def hasNext: Boolean = { - while (current != end && !self.contains(current)) current += 1 - current != end - } - def next(): Int = - if (hasNext) { val r = current; current += 1; r } - else Iterator.empty.next() - } - - override def foreach[U](f: Int => U) { - /* NOTE: while loops are significantly faster as of 2.11 and - one major use case of bitsets is performance. Also, there - is nothing to do when all bits are clear, so use that as - the inner loop condition. */ - var i = 0 - while (i < nwords) { - var w = word(i) - var j = i * WordLength - while (w != 0L) { - if ((w&1L) == 1L) f(j) - w = w >>> 1 - j += 1 - } - i += 1 - } - } - - /** Computes the union between this bitset and another bitset by performing - * a bitwise "or". - * - * @param other the bitset to form the union with. - * @return a new bitset consisting of all bits that are in this - * bitset or in the given bitset `other`. - */ - def | (other: BitSet): This = { - val len = this.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) | other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the intersection between this bitset and another bitset by performing - * a bitwise "and". - * @param other the bitset to intersect with. - * @return a new bitset consisting of all elements that are both in this - * bitset and in the given bitset `other`. - */ - def & (other: BitSet): This = { - val len = this.nwords min other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the difference of this bitset and another bitset by performing - * a bitwise "and-not". - * - * @param other the set of bits to exclude. - * @return a bitset containing those bits of this - * bitset that are not also contained in the given bitset `other`. - */ - def &~ (other: BitSet): This = { - val len = this.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & ~other.word(idx) - fromBitMaskNoCopy(words) - } - - /** Computes the symmetric difference of this bitset and another bitset by performing - * a bitwise "exclusive-or". - * - * @param other the other bitset to take part in the symmetric difference. - * @return a bitset containing those bits of this - * bitset or the other bitset that are not contained in both bitsets. - */ - def ^ (other: BitSet): This = { - val len = this.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) ^ other.word(idx) - fromBitMaskNoCopy(words) - } - - def contains(elem: Int): Boolean = - 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L - - /** Tests whether this bitset is a subset of another bitset. - * - * @param other the bitset to test. - * @return `true` if this bitset is a subset of `other`, i.e. if - * every bit of this set is also an element in `other`. - */ - def subsetOf(other: BitSet): Boolean = - (0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L) - - override def head: Int = { - val n = nwords - var i = 0 - while (i < n) { - val wi = word(i) - if (wi != 0L) return WordLength*i + java.lang.Long.numberOfTrailingZeros(wi) - i += 1 - } - throw new NoSuchElementException("Empty BitSet") - } - - override def last: Int = { - var i = nwords - 1 - while (i >= 0) { - val wi = word(i) - if (wi != 0L) return WordLength*i + 63 - java.lang.Long.numberOfLeadingZeros(wi) - i -= 1 - } - throw new NoSuchElementException("Empty BitSet") - } - - override def addString(sb: StringBuilder, start: String, sep: String, end: String) = { - sb append start - var pre = "" - val max = nwords * WordLength - var i = 0 - while (i != max) { - if (contains(i)) { - sb append pre append i - pre = sep - } - i += 1 - } - sb append end - } - - override def stringPrefix = "BitSet" -} - -/** Companion object for BitSets. Contains private data only */ -object BitSetLike { - /* Final vals can sometimes be inlined as constants (faster) */ - private[collection] final val LogWL = 6 - private final val WordLength = 64 - private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 - - private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { - var len = elems.length - while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 - var newlen = len - if (idx >= newlen && w != 0L) newlen = idx + 1 - val newelems = new Array[Long](newlen) - Array.copy(elems, 0, newelems, 0, len) - if (idx < newlen) newelems(idx) = w - else assert(w == 0L) - newelems - } -} diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala index 2e058819caec..b5a7f9658422 100644 --- a/src/library/scala/collection/BufferedIterator.scala +++ b/src/library/scala/collection/BufferedIterator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,14 +10,11 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection + /** Buffered iterators are iterators which provide a method `head` * that inspects the next element without discarding it. - * - * @author Martin Odersky - * @since 2.8 */ trait BufferedIterator[+A] extends Iterator[A] { diff --git a/src/library/scala/collection/BuildFrom.scala b/src/library/scala/collection/BuildFrom.scala new file mode 100644 index 000000000000..0530e4445bd5 --- /dev/null +++ b/src/library/scala/collection/BuildFrom.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import scala.collection.mutable.Builder +import scala.collection.immutable.WrappedString +import scala.reflect.ClassTag + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") +trait BuildFrom[-From, -A, +C] extends Any { self => + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) + + /** Partially apply a BuildFrom to a Factory */ + def toFactory(from: From): Factory[A, C] = new Factory[A, C] { + def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def newBuilder: Builder[A, C] = self.newBuilder(from) + } +} + +object BuildFrom extends BuildFromLowPriority1 { + + /** Build the source collection type from a MapOps */ + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + } + + /** Build the source collection type from a SortedMapOps */ + implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + } + + implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = + new BuildFrom[C, Int, C] { + def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder + } + + implicit val buildFromString: BuildFrom[String, Char, String] = + new BuildFrom[String, Char, String] { + def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder + } + + implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = + new BuildFrom[WrappedString, Char, WrappedString] { + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder + } + + implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + new BuildFrom[Array[_], A, Array[A]] { + def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder + } + + implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + new BuildFrom[View[A], B, View[B]] { + def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder + } + +} + +trait BuildFromLowPriority1 extends BuildFromLowPriority2 { + + /** Build the source collection type from an Iterable with SortedOps */ + // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the + // implicit search space for faster compilation and reduced change of divergence. See the compilation + // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 + implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + } + + implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = + new BuildFrom[String, A, immutable.IndexedSeq[A]] { + def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] + } +} + +trait BuildFromLowPriority2 { + /** Build the source collection type from an IterableOps */ + implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + } + + implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { + def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + } +} diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala deleted file mode 100644 index 54d57603215f..000000000000 --- a/src/library/scala/collection/CustomParallelizable.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import parallel.Combiner - -trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] { - override def par: ParRepr - override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("") -} - diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index c1b3185c9fb6..ca7d2a67f757 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,36 +13,9 @@ package scala package collection -/** A default map which implements the `+` and `-` methods of maps. - * - * Instances that inherit from `DefaultMap[A, B]` still have to define: - * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * }}} - * It refers back to the original map. - * - * It might also be advisable to override `foreach` or `size` if efficient - * implementations can be found. - * - * @since 2.8 - */ -trait DefaultMap[A, +B] extends Map[A, B] { self => - - /** A default implementation which creates a new immutable map. - */ - override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { - val b = Map.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - /** A default implementation which creates a new immutable map. - */ - override def - (key: A): Map[A, B] = { - val b = newBuilder - b ++= this filter (key != _._1) - b.result() - } -} +/** A default map which builds a default `immutable.Map` implementation for all + * transformations. + */ +@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") +trait DefaultMap[K, +V] extends Map[K, V] diff --git a/src/library/scala/collection/Factory.scala b/src/library/scala/collection/Factory.scala new file mode 100644 index 000000000000..4a05e6ce23bd --- /dev/null +++ b/src/library/scala/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(it, delta = 0) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty $coll + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala deleted file mode 100644 index a416d7b53b84..000000000000 --- a/src/library/scala/collection/GenIterable.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - - -import generic._ - - -/** A trait for all iterable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenIterable[+A] -extends GenIterableLike[A, GenIterable[A]] - with GenTraversable[A] - with GenericTraversableTemplate[A, GenIterable] -{ - def seq: Iterable[A] - override def companion: GenericCompanion[GenIterable] = GenIterable -} - - -object GenIterable extends GenTraversableFactory[GenIterable] { - implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Iterable.newBuilder -} - diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala deleted file mode 100644 index ab63ebee5a0d..000000000000 --- a/src/library/scala/collection/GenIterableLike.scala +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic.{ CanBuildFrom => CBF } - -/** A template trait for all iterable collections which may possibly - * have their operations implemented in parallel. - * - * This trait contains abstract methods and methods that can be implemented - * directly in terms of other methods. - * - * @define Coll `GenIterable` - * @define coll general iterable collection - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define zipthatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `(A1, B)` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, B), That]`. - * is found. - * @define zipbfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `(A1, B)`. - * @define iterableInfo - * This is a base trait for all Scala collections that define an `iterator` - * method to step through one-by-one the collection's elements. - */ -trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] { - - def iterator: Iterator[A] - - /** Checks if the other iterable collection contains the same elements in the same order as this $coll. - * - * @param that the collection to compare with. - * @tparam A1 the type of the elements of collection `that`. - * @return `true`, if both collections contain the same elements in the same order, `false` otherwise. - * - * @usecase def sameElements(that: GenIterable[A]): Boolean - * @inheritdoc - * - * $orderDependent - * $willNotTerminateInf - * - * @param that the collection to compare with. - * @return `true`, if both collections contain the same elements in the same order, `false` otherwise. - */ - def sameElements[A1 >: A](that: GenIterable[A1]): Boolean - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam A1 the type of the first half of the returned pairs (this is always a supertype - * of the collection's element type `A`). - * @tparam B the type of the second half of the returned pairs - * @tparam That $zipthatinfo - * @param bf $zipbfinfo - * @return a new collection of type `That` containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the minimum of the lengths of this $coll and `that`. - * - * @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)] - * @inheritdoc - * - * $orderDependent - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the minimum of the lengths of this $coll and `that`. - */ - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That - - /** Zips this $coll with its indices. - * - * @tparam A1 the type of the first half of the returned pairs (this is always a supertype - * of the collection's element type `A`). - * @tparam That the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `(A1, Int)` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`. - * is found. - * @param bf an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `(A1, Int)`. - * @return A new collection of type `That` containing pairs consisting of all elements of this - * $coll paired with their index. Indices start at `0`. - * - * @usecase def zipWithIndex: $Coll[(A, Int)] - * @inheritdoc - * - * $orderDependent - * - * @return A new $coll containing pairs consisting of all elements of this - * $coll paired with their index. Indices start at `0`. - * @example - * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))` - * - */ - def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is shorter than the other, - * placeholder elements are used to extend the shorter collection to the length of the longer. - * - * @param that the iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @return a new collection of type `That` containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the maximum of the lengths of this $coll and `that`. - * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. - * - * @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)] - * @inheritdoc - * - * $orderDependent - * - * @param that The iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of - * corresponding elements of this $coll and `that`. The length - * of the returned collection is the maximum of the lengths of this $coll and `that`. - * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. - */ - def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That - -} diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala deleted file mode 100644 index fadbd494b599..000000000000 --- a/src/library/scala/collection/GenMap.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import scala.runtime.AbstractFunction1 - -/** A trait for all traversable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenMap[K, +V] -extends GenMapLike[K, V, GenMap[K, V]] - with GenIterable[(K, V)] -{ - def seq: Map[K, V] - - def updated [V1 >: V](key: K, value: V1): GenMap[K, V1] -} - -object GenMap extends GenMapFactory[GenMap] { - def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty - - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), GenMap[K, V]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (K, V), GenMap[K, V]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] - - private[collection] def mapEquals[K1, V, K2](thisMap: GenMapLike[K1, V, _], thatMap: GenMap[K2, _]): Boolean = { - (thisMap eq thatMap) || - (thatMap canEqual thisMap) && - (thisMap.size == thatMap.size) && { - try { - val checker = new AbstractFunction1[(K1, V),Boolean] with Function0[V]{ - override def apply(kv: (K1,V)): Boolean = { - // Note: uncurry optimizes this to `get.getOrElse(..., this: Function0)`; there is no extra lambda allocated. - val v2 = thatMap.getOrElse(kv._1.asInstanceOf[K2], this.apply()) - // A mis-behaving user-defined equals method might not expect the sentinel value, and we should try to limit - // the chance of it escaping. Its also probably quicker to avoid the virtual call to equals. - (v2.asInstanceOf[AnyRef] ne this) && v2 == kv._2 - } - override def apply(): V = this.asInstanceOf[V] - } - thisMap forall checker - } catch { - case ex: ClassCastException => false - }} - } -} diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala deleted file mode 100644 index 675ab68c782c..000000000000 --- a/src/library/scala/collection/GenMapLike.scala +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** A trait for all maps upon which operations may be - * implemented in parallel. - * - * @define Coll `GenMap` - * @define coll general map - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define mapNote - * - * A map is a collection of bindings from keys to values, where there are - * no duplicate keys. - */ -trait GenMapLike[K, +V, +Repr] extends GenIterableLike[(K, V), Repr] with Equals with Parallelizable[(K, V), parallel.ParMap[K, V]] { - def default(key: K): V - def get(key: K): Option[V] - def apply(key: K): V - def seq: Map[K, V] - def +[V1 >: V](kv: (K, V1)): GenMap[K, V1] - def - (key: K): Repr - - // This hash code must be symmetric in the contents but ought not - // collide trivially. - override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq) - - /** Returns the value associated with a key, or a default value if the key is not contained in the map. - * @param key the key. - * @param default a computation that yields a default value in case no binding for `key` is - * found in the map. - * @tparam B1 the result type of the default computation. - * @return the value associated with `key` if it exists, - * otherwise the result of the `default` computation. - * @usecase def getOrElse(key: K, default: => V): V - * @inheritdoc - */ - def getOrElse[V1 >: V](key: K, default: => V1): V1 - - /** Tests whether this map contains a binding for a key. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def contains(key: K): Boolean - - /** Tests whether this map contains a binding for a key. This method, - * which implements an abstract method of trait `PartialFunction`, - * is equivalent to `contains`. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def isDefinedAt(key: K): Boolean - - def keySet: GenSet[K] - - /** Collects all keys of this map in an iterable collection. - * - * @return the keys of this map as an iterable. - */ - def keys: GenIterable[K] - - /** Collects all values of this map in an iterable collection. - * - * @return the values of this map as an iterable. - */ - def values: GenIterable[V] - - /** Creates an iterator for all keys. - * - * @return an iterator over all keys. - */ - def keysIterator: Iterator[K] - - /** Creates an iterator for all values in this map. - * - * @return an iterator over all values that are associated with some key in this map. - */ - def valuesIterator: Iterator[V] - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - def filterKeys(p: K => Boolean): GenMap[K, V] - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - def mapValues[W](f: V => W): GenMap[K, W] - - /** Compares two maps structurally; i.e., checks if all mappings - * contained in this map are also contained in the other map, - * and vice versa. - * - * @param that the other map - * @return `true` if both maps contain exactly the - * same mappings, `false` otherwise. - */ - override def equals(that: Any): Boolean = that match { - // copy/pasted to immutable.SortedMap.equals for binary compat reasons! - case that: GenMap[b, _] => - GenMap.mapEquals(this, that) - case _ => - false - } -} diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala deleted file mode 100644 index 8978982417e7..000000000000 --- a/src/library/scala/collection/GenSeq.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - - -import generic._ - - -/** A trait for all sequences which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenSeq[+A] -extends GenSeqLike[A, GenSeq[A]] - with GenIterable[A] - with Equals - with GenericTraversableTemplate[A, GenSeq] -{ - def seq: Seq[A] - override def companion: GenericCompanion[GenSeq] = GenSeq -} - - -object GenSeq extends GenTraversableFactory[GenSeq] { - implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Seq.newBuilder -} diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala deleted file mode 100644 index ab63a153c21d..000000000000 --- a/src/library/scala/collection/GenSeqLike.scala +++ /dev/null @@ -1,485 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ - -/** A template trait for all sequences which may be traversed - * in parallel. - * - * @define Coll GenSeq - * @define coll general sequence - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define seqInfo - * Sequences are special cases of iterable collections of class `Iterable`. - * Unlike iterables, sequences always have a defined order of elements. - */ -trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] { - def seq: Seq[A] - - /** Selects an element by its index in the $coll. - * - * Example: - * - * {{{ - * scala> val x = List(1, 2, 3, 4, 5) - * x: List[Int] = List(1, 2, 3, 4, 5) - * - * scala> x(3) - * res1: Int = 4 - * }}} - * - * @param idx The index to select. - * @return the element of this $coll at index `idx`, where `0` indicates the first element. - * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. - */ - def apply(idx: Int): A - - /** The length of the $coll. - * - * $willNotTerminateInf - * - * Note: `xs.length` and `xs.size` yield the same result. - * - * @return the number of elements in this $coll. - * @throws IllegalArgumentException if the length of the sequence cannot be represented in an `Int`, for example, `(-1 to Int.MaxValue).length`. - */ - def length: Int - - /** Tests whether this $coll contains given index. - * - * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into - * a `PartialFunction[Int, A]`. - * - * @param idx the index to test - * @return `true` if this $coll contains an element at position `idx`, `false` otherwise. - */ - def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length) - - /** Computes length of longest segment whose elements all satisfy some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @param from the index where the search starts. - * @return the length of the longest segment of this $coll starting from index `from` - * such that every element of the segment satisfies the predicate `p`. - */ - def segmentLength(p: A => Boolean, from: Int): Int - - /** Returns the length of the longest prefix whose elements all satisfy some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the length of the longest prefix of this $coll - * such that every element of the segment satisfies the predicate `p`. - */ - def prefixLength(p: A => Boolean): Int = segmentLength(p, 0) - - /** Finds index of the first element satisfying some predicate after or at some start index. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @param from the start index - * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(p: A => Boolean, from: Int): Int - - /** Finds index of first element satisfying some predicate. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the index of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) - - /** Finds index of first occurrence of some value in this $coll. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @return the index of the first element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def indexOf(elem: A): Int - * @inheritdoc - * - * $mayNotTerminateInf - * - */ - def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) - - /** Finds index of first occurrence of some value in this $coll after or at some start index. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @param from the start index - * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def indexOf(elem: A, from: Int): Int - * @inheritdoc - * - * $mayNotTerminateInf - * - */ - def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from) - - /** Finds index of last occurrence of some value in this $coll. - * - * @param elem the element value to search for. - * @tparam B the type of the element `elem`. - * @return the index of the last element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def lastIndexOf(elem: A): Int - * @inheritdoc - * - * $willNotTerminateInf - * - */ - def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem == _) - - /** Finds index of last occurrence of some value in this $coll before or at a given end index. - * - * @param elem the element value to search for. - * @param end the end index. - * @tparam B the type of the element `elem`. - * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - * - * @usecase def lastIndexOf(elem: A, end: Int): Int - * @inheritdoc - */ - def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem == _, end) - - /** Finds index of last element satisfying some predicate. - * - * $willNotTerminateInf - * - * @param p the predicate used to test elements. - * @return the index of the last element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, length - 1) - - /** Finds index of last element satisfying some predicate before or at given end index. - * - * @param p the predicate used to test elements. - * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean, end: Int): Int - - /** Returns new $coll with elements in reversed order. - * - * $willNotTerminateInf - * - * @return A new $coll with all elements of this $coll in reversed order. - */ - def reverse: Repr - - /** - * Builds a new collection by applying a function to all elements of this $coll and - * collecting the results in reversed order. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given function - * `f` to each element of this $coll and collecting the results in reversed order. - * - * @usecase def reverseMap[B](f: A => B): $Coll[B] - * @inheritdoc - * - * $willNotTerminateInf - * - * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient. - * - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results in reversed order. - */ - def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Tests whether this $coll starts with the given sequence. - * - * @param that the sequence to test - * @return `true` if this collection has `that` as a prefix, `false` otherwise. - */ - def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0) - - /** Tests whether this $coll contains the given sequence at a given index. - * - * '''Note''': If the both the receiver object `this` and the argument - * `that` are infinite sequences this method may not terminate. - * - * @param that the sequence to test - * @param offset the index where the sequence is searched. - * @return `true` if the sequence `that` is contained in this $coll at - * index `offset`, otherwise `false`. - */ - def startsWith[B](that: GenSeq[B], offset: Int): Boolean - - /** Tests whether this $coll ends with the given sequence. - * $willNotTerminateInf - * @param that the sequence to test - * @return `true` if this $coll has `that` as a suffix, `false` otherwise. - */ - def endsWith[B](that: GenSeq[B]): Boolean - - /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence. - * - * @param from the index of the first replaced element - * @param patch the replacement sequence - * @param replaced the number of elements to drop in the original $coll - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new $coll consisting of all elements of this $coll - * except that `replaced` elements starting from `from` are replaced - * by `patch`. - * - * @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A] - * @inheritdoc - * - * @return a new $coll consisting of all elements of this $coll - * except that `replaced` elements starting from `from` are replaced - * by `patch`. - */ - def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with one single replaced element. - * @param index the position of the replacement - * @param elem the replacing element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new $coll which is a copy of this $coll with the element at position `index` replaced by `elem`. - * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. - * - * @usecase def updated(index: Int, elem: A): $Coll[A] - * @inheritdoc - * - * @return a copy of this $coll with the element at position `index` replaced by `elem`. - */ - def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of the $coll with an element prepended. - * - * @param elem the prepended element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of `elem` followed - * by all elements of this $coll. - * - * @usecase def +:(elem: A): $Coll[A] - * @inheritdoc - * - * Note that :-ending operators are right associative (see example). - * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. - * - * Also, the original $coll is not modified, so you will want to capture the result. - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = 2 +: x - * y: List[Int] = List(2, 1) - * - * scala> println(x) - * List(1) - * }}} - * - * @return a new $coll consisting of `elem` followed - * by all elements of this $coll. - */ - def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with an element appended. - * - * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. - * - * @param elem the appended element - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of - * all elements of this $coll followed by `elem`. - * - * @usecase def :+(elem: A): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * Example: - * {{{ - * scala> val a = List(1) - * a: List[Int] = List(1) - * - * scala> val b = a :+ 2 - * b: List[Int] = List(1, 2) - * - * scala> println(a) - * List(1) - * }}} - * - * @return a new $coll consisting of - * all elements of this $coll followed by `elem`. - */ - def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** A copy of this $coll with an element value appended until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` consisting of - * all elements of this $coll followed by the minimal number of occurrences of `elem` so - * that the resulting collection has a length of at least `len`. - * @usecase def padTo(len: Int, elem: A): $Coll[A] - * @inheritdoc - * - * @return a new $coll consisting of - * all elements of this $coll followed by the minimal number of occurrences of `elem` so - * that the resulting $coll has a length of at least `len`. - */ - def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Tests whether every element of this $coll relates to the - * corresponding element of another sequence by satisfying a test predicate. - * - * @param that the other sequence - * @param p the test predicate, which relates elements from both sequences - * @tparam B the type of the elements of `that` - * @return `true` if both sequences have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this $coll - * and `y` of `that`, otherwise `false`. - */ - def corresponds[B](that: GenSeq[B])(p: (A, B) => Boolean): Boolean - - def toSeq: GenSeq[A] - - /** Produces a new sequence which contains all elements of this $coll and also all elements of - * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. - * - * @param that the sequence to add. - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements of this $coll - * followed by all elements of `that`. - * - * @usecase def union(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * Another way to express this - * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. - * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that - - /** Computes the multiset difference between this $coll and another sequence. - * - * @param that the sequence of elements to remove - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - * - * @usecase def diff(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: GenSeq[B]): Repr - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * - * @usecase def intersect(that: GenSeq[A]): $Coll[A] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: GenSeq[B]): Repr - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr - - /** Hashcodes for $Coll produce a value from the hashcodes of all the - * elements of the $coll. - */ - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) - - /** The equals method for arbitrary sequences. Compares this sequence to - * some other object. - * @param that The object to compare the sequence to - * @return `true` if `that` is a sequence that has the same elements as - * this sequence in the same order, `false` otherwise - */ - override def equals(that: Any): Boolean = that match { - case that: GenSeq[_] => (that eq this.asInstanceOf[AnyRef]) || (that canEqual this) && (this sameElements that) - case _ => false - } - -} diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala deleted file mode 100644 index ffe9b8644804..000000000000 --- a/src/library/scala/collection/GenSet.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - - -import generic._ - - -/** A trait for sets which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenSet[A] -extends GenSetLike[A, GenSet[A]] - with GenIterable[A] - with GenericSetTemplate[A, GenSet] -{ - override def companion: GenericCompanion[GenSet] = GenSet - def seq: Set[A] -} - - -object GenSet extends GenTraversableFactory[GenSet] { - implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Set.newBuilder - private[collection] def setEquals[A1, A2](thisSet: GenSetLike[A1, _], thatSet: GenSet[A2]): Boolean = { - (thisSet eq thatSet) || - (thatSet canEqual thisSet) && - (thisSet.size == thatSet.size) && - (try thisSet subsetOf thatSet.asInstanceOf[GenSet[A1]] - catch { case ex: ClassCastException => false }) - } -} - diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala deleted file mode 100644 index 1b2fa145a11b..000000000000 --- a/src/library/scala/collection/GenSetLike.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - - -/** A template trait for sets which may possibly - * have their operations implemented in parallel. - * - * @define Coll GenSet - * @define coll general set - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - * @define setNote - * - * A set is a collection that contains no duplicate elements. - */ -trait GenSetLike[A, +Repr] -extends GenIterableLike[A, Repr] - with (A => Boolean) - with Equals - with Parallelizable[A, parallel.ParSet[A]] { - - def iterator: Iterator[A] - def contains(elem: A): Boolean - def +(elem: A): Repr - def -(elem: A): Repr - - def seq: Set[A] - - /** Tests if some element is contained in this set. - * - * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - def apply(elem: A): Boolean = this contains elem - - /** Computes the intersection between this set and another set. - * - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def intersect(that: GenSet[A]): Repr = this filter that - - /** Computes the intersection between this set and another set. - * - * '''Note:''' Same as `intersect`. - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def &(that: GenSet[A]): Repr = this intersect that - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def union(that: GenSet[A]): Repr - - /** Computes the union between this set and another set. - * - * '''Note:''' Same as `union`. - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def | (that: GenSet[A]): Repr = this union that - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: GenSet[A]): Repr - - /** The difference of this set and another set. - * - * '''Note:''' Same as `diff`. - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def &~(that: GenSet[A]): Repr = this diff that - - /** Tests whether this set is a subset of another set. - * - * @param that the set to test. - * @return `true` if this set is a subset of `that`, i.e. if - * every element of this set is also an element of `that`. - */ - def subsetOf(that: GenSet[A]): Boolean = this forall that - - /** Compares this set with another object for equality. - * - * '''Note:''' This operation contains an unchecked cast: if `that` - * is a set, it will assume with an unchecked cast - * that it has the same element type as this set. - * Any subsequent ClassCastException is treated as a `false` result. - * @param that the other object - * @return `true` if `that` is a set which contains the same elements - * as this set. - */ - override def equals(that: Any): Boolean = that match { - // copy/pasted to immutable.SortedSet.equals for binary compat reasons! - case that: GenSet[_] => - GenSet.setEquals(this, that) - case _ => - false - } - - // Careful! Don't write a Set's hashCode like: - // override def hashCode() = this map (_.hashCode) sum - // Calling map on a set drops duplicates: any hashcode collisions would - // then be dropped before they can be added. - // Hash should be symmetric in set entries, but without trivial collisions. - override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq) -} diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala deleted file mode 100644 index b26b491dc887..000000000000 --- a/src/library/scala/collection/GenTraversable.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ - -/** A trait for all traversable collections which may possibly - * have their operations implemented in parallel. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversable[+A] -extends GenTraversableLike[A, GenTraversable[A]] - with GenTraversableOnce[A] - with GenericTraversableTemplate[A, GenTraversable] -{ - def seq: Traversable[A] - def companion: GenericCompanion[GenTraversable] = GenTraversable -} - -object GenTraversable extends GenTraversableFactory[GenTraversable] { - implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A] = Traversable.newBuilder -} diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala deleted file mode 100644 index fefdb7f06d25..000000000000 --- a/src/library/scala/collection/GenTraversableLike.scala +++ /dev/null @@ -1,384 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - - -import generic._ -import scala.annotation.migration - - -/** A template trait for all traversable collections upon which operations - * may be implemented in parallel. - * - * @define thatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `B` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]` - * is found. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines - * the result class `That` from the current representation type `Repr` and - * the new element type `B`. - * @define orderDependent - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * or the operator is associative and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @define traversableInfo - * This is a base trait of all kinds of Scala collections. - * - * @define Coll `GenTraversable` - * @define coll general collection - * @define collectExample - * @tparam A the collection element type. - * @tparam Repr the actual type of the element container. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] { - - def repr: Repr - - def size: Int - - /** Selects the first element of this $coll. - * $orderDependent - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A - - /** Optionally selects the first element. - * $orderDependent - * @return the first element of this $coll if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] - - /** Tests whether this $coll can be repeatedly traversed. - * @return `true` - */ - def isTraversableAgain: Boolean - - /** Selects all elements except the first. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the first one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def tail: Repr - - /** Selects the last element. - * $orderDependent - * @return The last element of this $coll. - * @throws NoSuchElementException If the $coll is empty. - */ - def last: A - - /** Optionally selects the last element. - * $orderDependent - * @return the last element of this $coll$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] - - /** Selects all elements except the last. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the last one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def init: Repr - - /** Computes a prefix scan of the elements of the collection. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam B element type of the resulting collection - * @tparam That type of the resulting collection - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * @param cbf combiner factory which provides a combiner - * - * @return a new $coll containing the prefix scan of the elements in this $coll - */ - def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That - - /** Produces a collection containing cumulative results of applying the - * operator going left to right. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @tparam That the actual type of the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @param bf $bfinfo - * @return collection with intermediate results - */ - def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Produces a collection containing cumulative results of applying the operator going right to left. - * The head of the collection is the last cumulative result. - * $willNotTerminateInf - * $orderDependent - * - * Example: - * {{{ - * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) - * }}} - * - * @tparam B the type of the elements in the resulting collection - * @tparam That the actual type of the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @param bf $bfinfo - * @return collection with intermediate results - */ - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - def foreach[U](f: A => U): Unit - - /** Builds a new collection by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - * - * @usecase def map[B](f: A => B): $Coll[B] - * @inheritdoc - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Builds a new collection by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - * - * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B] - * @inheritdoc - * - * $collectExample - * - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Builds a new collection by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - * - * @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * For example: - * - * {{{ - * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+") - * }}} - * - * The type of the resulting collection is guided by the static type of $coll. This might - * cause unexpected results sometimes. For example: - * - * {{{ - * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set - * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet) - * - * // lettersOf will return a Set[Char], not a Seq - * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq) - * - * // xs will be an Iterable[Int] - * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2) - * - * // ys will be a Map[Int, Int] - * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2) - * }}} - * - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the - * right hand operand. The element type of the $coll is the most specific superclass encompassing - * the element types of the two operands. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - */ - def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - - /** Selects all elements of this $coll which satisfy a predicate. - * - * @param pred the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that satisfy the given - * predicate `p`. Their order may not be preserved. - */ - def filter(pred: A => Boolean): Repr - - /** Selects all elements of this $coll which do not satisfy a predicate. - * - * @param pred the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that do not satisfy the given - * predicate `p`. Their order may not be preserved. - */ - def filterNot(pred: A => Boolean): Repr - - /** Partitions this $coll in two ${coll}s according to a predicate. - * - * @param pred the predicate on which to partition. - * @return a pair of ${coll}s: the first $coll consists of all elements that - * satisfy the predicate `p` and the second $coll consists of all elements - * that don't. The relative order of the elements in the resulting ${coll}s - * may not be preserved. - */ - def partition(pred: A => Boolean): (Repr, Repr) - - /** Partitions this $coll into a map of ${coll}s according to some discriminator function. - * - * Note: this method is not re-implemented by views. This means - * when applied to a view it will always force the view and - * return a new $coll. - * - * @param f the discriminator function. - * @tparam K the type of keys returned by the discriminator function. - * @return A map from keys to ${coll}s such that the following invariant holds: - * {{{ - * (xs groupBy f)(k) = xs filter (x => f(x) == k) - * }}} - * That is, every key `k` is bound to a $coll of those elements `x` - * for which `f(x)` equals `k`. - * - */ - def groupBy[K](f: A => K): GenMap[K, Repr] - - /** Selects first ''n'' elements. - * $orderDependent - * @param n the number of elements to take from this $coll. - * @return a $coll consisting only of the first `n` elements of this $coll, - * or else the whole $coll, if it has less than `n` elements. - * If `n` is negative, returns an empty $coll. - */ - def take(n: Int): Repr - - /** Selects all elements except first ''n'' ones. - * $orderDependent - * @param n the number of elements to drop from this $coll. - * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the - * empty $coll, if this $coll has less than `n` elements. - * If `n` is negative, don't drop any elements. - */ - def drop(n: Int): Repr - - /** Selects an interval of elements. The returned collection is made up - * of all elements `x` which satisfy the invariant: - * {{{ - * from <= indexOf(x) < until - * }}} - * $orderDependent - * - * @param unc_from the lowest index to include from this $coll. - * @param unc_until the lowest index to EXCLUDE from this $coll. - * @return a $coll containing the elements greater than or equal to - * index `from` extending up to (but not including) index `until` - * of this $coll. - */ - def slice(unc_from: Int, unc_until: Int): Repr - - /** Splits this $coll into two at a given position. - * Note: `c splitAt n` is equivalent to (but possibly more efficient than) - * `(c take n, c drop n)`. - * $orderDependent - * - * @param n the position at which to split. - * @return a pair of ${coll}s consisting of the first `n` - * elements of this $coll, and the other elements. - */ - def splitAt(n: Int): (Repr, Repr) - - /** Takes longest prefix of elements that satisfy a predicate. - * $orderDependent - * @param pred The predicate used to test elements. - * @return the longest prefix of this $coll whose elements all satisfy - * the predicate `p`. - */ - def takeWhile(pred: A => Boolean): Repr - - /** Splits this $coll into a prefix/suffix pair according to a predicate. - * - * Note: `c span p` is equivalent to (but possibly more efficient than) - * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the - * predicate `p` does not cause any side-effects. - * $orderDependent - * - * @param pred the test predicate - * @return a pair consisting of the longest prefix of this $coll whose - * elements all satisfy `p`, and the rest of this $coll. - */ - def span(pred: A => Boolean): (Repr, Repr) - - /** Drops longest prefix of elements that satisfy a predicate. - * $orderDependent - * @param pred The predicate used to test elements. - * @return the longest suffix of this $coll whose first element - * does not satisfy the predicate `p`. - */ - def dropWhile(pred: A => Boolean): Repr - - /** Defines the prefix of this object's `toString` representation. - * - * @return a string representation which starts the result of `toString` - * applied to this $coll. By default the string prefix is the - * simple name of the collection class $coll. - */ - def stringPrefix: String - -} diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala deleted file mode 100644 index 3e3bcb998a3d..000000000000 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ /dev/null @@ -1,675 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.reflect.ClassTag -import scala.collection.generic.CanBuildFrom -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import scala.language.higherKinds - -/** A template trait for all traversable-once objects which may be - * traversed in parallel. - * - * Methods in this trait are either abstract or can be implemented in terms - * of other methods. - * - * @define Coll `GenTraversableOnce` - * @define coll collection or iterator - * @define possiblyparinfo - * This trait may possibly have operations implemented in parallel. - * @define undefinedorder - * The order in which operations are performed on elements is unspecified - * and may be nondeterministic. - * @define orderDependent - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered or the operator is associative - * and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - * - * @author Martin Odersky - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait GenTraversableOnce[+A] extends Any { - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - * - * Note: this method underlies the implementation of most other bulk operations. - * It's important to implement this method in an efficient way. - * - */ - def foreach[U](f: A => U): Unit - - /** Tests whether this $coll is known to have a finite size. - * All strict collections are known to have finite size. For a non-strict - * collection such as `Stream`, the predicate returns `'''true'''` if all - * elements have been computed. It returns `'''false'''` if the stream is - * not yet evaluated to the end. Non-empty Iterators usually return - * `'''false'''` even if they were created from a collection with a known - * finite size. - * - * Note: many collection methods will not work on collections of infinite sizes. - * The typical failure mode is an infinite loop. These methods always attempt a - * traversal without checking first that `hasDefiniteSize` returns `'''true'''`. - * However, checking `hasDefiniteSize` can provide an assurance that size is - * well-defined and non-termination is not a concern. - * - * @return `'''true'''` if this collection is known to have finite size, - * `'''false'''` otherwise. - */ - def hasDefiniteSize: Boolean - - def seq: TraversableOnce[A] - - /** The size of this $coll. - * - * $willNotTerminateInf - * - * @return the number of elements in this $coll. - */ - def size: Int - - /** The size of this $coll, if it can be cheaply computed - * - * @return the number of elements in this $coll, or -1 if the size cannot be determined cheaply - */ - protected[collection] def sizeHintIfCheap: Int = -1 - - /** Tests whether the $coll is empty. - * - * Note: Implementations in subclasses that are not repeatedly traversable must take - * care not to consume any elements when `isEmpty` is called. - * - * @return `true` if the $coll contains no elements, `false` otherwise. - */ - def isEmpty: Boolean - - /** Tests whether the $coll is not empty. - * - * @return `true` if the $coll contains at least one element, `false` otherwise. - */ - def nonEmpty: Boolean - - /** Tests whether this $coll can be repeatedly traversed. Always - * true for Traversables and false for Iterators unless overridden. - * - * @return `true` if it is repeatedly traversable, `false` otherwise. - */ - def isTraversableAgain: Boolean - - /** Reduces the elements of this $coll using the specified associative binary operator. - * - * $undefinedorder - * - * @tparam A1 A type parameter for the binary operator, a supertype of `A`. - * @param op A binary operator that must be associative. - * @return The result of applying reduce operator `op` between all the elements if the $coll is nonempty. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def reduce[A1 >: A](op: (A1, A1) => A1): A1 - - /** Reduces the elements of this $coll, if any, using the specified - * associative binary operator. - * - * $undefinedorder - * - * @tparam A1 A type parameter for the binary operator, a supertype of `A`. - * @param op A binary operator that must be associative. - * @return An option value containing result of applying reduce operator `op` between all - * the elements if the collection is nonempty, and `None` otherwise. - */ - def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] - - /** Folds the elements of this $coll using the specified associative - * binary operator. - * - * $undefinedorder - * $willNotTerminateInf - * - * @tparam A1 a type parameter for the binary operator, a supertype of `A`. - * @param z a neutral element for the fold operation; may be added to the result - * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication). - * @param op a binary operator that must be associative. - * @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty. - */ - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 - - /** Applies a binary operator to a start value and all elements of this $coll, - * going left to right. - * - * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as - * `xs foldLeft z`. - * - * Examples: - * - * Note that the folding function used to compute b is equivalent to that used to compute c. - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = (5 /: a)(_+_) - * b: Int = 15 - * - * scala> val c = (5 /: a)((x,y) => x + y) - * c: Int = 15 - * }}} - - * $willNotTerminateInf - * $orderDependentFold - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(op(z, x_1), x_2), ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - */ - @deprecated("Use foldLeft instead of /:", "2.12.10") - def /:[B](z: B)(op: (B, A) => B): B - - /** Applies a binary operator to all elements of this $coll and a start value, - * going right to left. - * - * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as - * `xs foldRight z`. - * $willNotTerminateInf - * $orderDependentFold - * - * Examples: - * - * Note that the folding function used to compute b is equivalent to that used to compute c. - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = (a :\ 5)(_+_) - * b: Int = 15 - * - * scala> val c = (a :\ 5)((x,y) => x + y) - * c: Int = 15 - * - * }}} - * - * @param z the start value - * @param op the binary operator - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - */ - @deprecated("Use foldRight instead of :\\", "2.12.10") - def :\[B](z: B)(op: (A, B) => B): B - - /** Applies a binary operator to a start value and all elements of this $coll, - * going left to right. - * - * $willNotTerminateInf - * $orderDependentFold - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(z, x_1), x_2, ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * Returns `z` if this $coll is empty. - */ - def foldLeft[B](z: B)(op: (B, A) => B): B - - /** Applies a binary operator to all elements of this $coll and a start value, - * going right to left. - * - * $willNotTerminateInf - * $orderDependentFold - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * Returns `z` if this $coll is empty. - */ - def foldRight[B](z: B)(op: (A, B) => B): B - - /** Aggregates the results of applying an operator to subsequent elements. - * - * This is a more general form of `fold` and `reduce`. It is similar to - * `foldLeft` in that it doesn't require the result to be a supertype of the - * element type. In addition, it allows parallel collections to be processed - * in chunks, and then combines the intermediate results. - * - * `aggregate` splits the $coll into partitions and processes each - * partition by sequentially applying `seqop`, starting with `z` (like - * `foldLeft`). Those intermediate results are then combined by using - * `combop` (like `fold`). The implementation of this operation may operate - * on an arbitrary number of collection partitions (even 1), so `combop` may - * be invoked an arbitrary number of times (even 0). - * - * As an example, consider summing up the integer values of a list of chars. - * The initial value for the sum is 0. First, `seqop` transforms each input - * character to an Int and adds it to the sum (of the partition). Then, - * `combop` just needs to sum up the intermediate results of the partitions: - * {{{ - * List('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 }) - * }}} - * - * @tparam B the type of accumulated results - * @param z the initial value for the accumulated result of the partition - this - * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) and may be evaluated - * more than once - * @param seqop an operator used to accumulate results within a partition - * @param combop an associative operator used to combine results from different partitions - */ - def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B - - /** Applies a binary operator to all elements of this $coll, going right to left. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going right to left: - * {{{ - * op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def reduceRight[B >: A](op: (A, B) => B): B - - /** Optionally applies a binary operator to all elements of this $coll, going left to right. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return an option value containing the result of `reduceLeft(op)` if this $coll is nonempty, - * `None` otherwise. - */ - def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] - - /** Optionally applies a binary operator to all elements of this $coll, going - * right to left. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return an option value containing the result of `reduceRight(op)` if this $coll is nonempty, - * `None` otherwise. - */ - def reduceRightOption[B >: A](op: (A, B) => B): Option[B] - - /** Counts the number of elements in the $coll which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return the number of elements satisfying the predicate `p`. - */ - def count(p: A => Boolean): Int - - /** Sums up the elements of this collection. - * - * @param num an implicit parameter defining a set of numeric operations - * which includes the `+` operator to be used in forming the sum. - * @tparam A1 the result type of the `+` operator. - * @return the sum of all elements of this $coll with respect to the `+` operator in `num`. - * - * @usecase def sum: A - * @inheritdoc - * - * @return the sum of all elements in this $coll of numbers of type `Int`. - * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation - * can be used as element type of the $coll and as result type of `sum`. - * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`. - * - */ - def sum[A1 >: A](implicit num: Numeric[A1]): A1 - - /** Multiplies up the elements of this collection. - * - * @param num an implicit parameter defining a set of numeric operations - * which includes the `*` operator to be used in forming the product. - * @tparam A1 the result type of the `*` operator. - * @return the product of all elements of this $coll with respect to the `*` operator in `num`. - * - * @usecase def product: A - * @inheritdoc - * - * @return the product of all elements in this $coll of numbers of type `Int`. - * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation - * can be used as element type of the $coll and as result type of `product`. - * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`. - */ - def product[A1 >: A](implicit num: Numeric[A1]): A1 - - /** Finds the smallest element. - * - * @param ord An ordering to be used for comparing elements. - * @tparam A1 The type over which the ordering is defined. - * @return the smallest element of this $coll with respect to the ordering `ord`. - * - * @usecase def min: A - * @inheritdoc - * - * @return the smallest element of this $coll - * @throws UnsupportedOperationException if this $coll is empty. - */ - def min[A1 >: A](implicit ord: Ordering[A1]): A - - /** Finds the largest element. - * - * @param ord An ordering to be used for comparing elements. - * @tparam A1 The type over which the ordering is defined. - * @return the largest element of this $coll with respect to the ordering `ord`. - * - * @usecase def max: A - * @inheritdoc - * - * @return the largest element of this $coll. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def max[A1 >: A](implicit ord: Ordering[A1]): A - - /** Finds the first element which yields the largest value measured by function f. - * - * @param cmp An ordering to be used for comparing elements. - * @tparam B The result type of the function f. - * @param f The measuring function. - * @return the first element of this $coll with the largest value measured by function f - * with respect to the ordering `cmp`. - * - * @usecase def maxBy[B](f: A => B): A - * @inheritdoc - * - * @return the first element of this $coll with the largest value measured by function f. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A - - /** Finds the first element which yields the smallest value measured by function f. - * - * @param cmp An ordering to be used for comparing elements. - * @tparam B The result type of the function f. - * @param f The measuring function. - * @return the first element of this $coll with the smallest value measured by function f - * with respect to the ordering `cmp`. - * - * @usecase def minBy[B](f: A => B): A - * @inheritdoc - * - * @return the first element of this $coll with the smallest value measured by function f. - * @throws UnsupportedOperationException if this $coll is empty. - */ - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A - - /** Tests whether a predicate holds for all elements of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if this $coll is empty or the given predicate `p` - * holds for all elements of this $coll, otherwise `false`. - */ - def forall(@deprecatedName('pred) p: A => Boolean): Boolean - - /** Tests whether a predicate holds for at least one element of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false` - */ - def exists(@deprecatedName('pred) p: A => Boolean): Boolean - - /** Finds the first element of the $coll satisfying a predicate, if any. - * - * $mayNotTerminateInf - * $orderDependent - * - * @param p the predicate used to test elements. - * @return an option value containing the first element in the $coll - * that satisfies `p`, or `None` if none exists. - */ - def find(@deprecatedName('pred) p: A => Boolean): Option[A] - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with values of this $coll. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached. - * - * @param xs the array to fill. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A]): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B]): Unit - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with values of this $coll, beginning at index `start`. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached. - * - * @param xs the array to fill. - * @param start the starting index. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A], start: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B], start: Int): Unit - - /** Copies the elements of this $coll to an array. - * Fills the given array `xs` with at most `len` elements of - * this $coll, starting at position `start`. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the target array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the target array. - * - * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit - - /** Displays all elements of this $coll in a string using start, end, and - * separator strings. - * - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return a string representation of this $coll. The resulting string - * begins with the string `start` and ends with the string - * `end`. Inside, the string representations (w.r.t. the method - * `toString`) of all elements of this $coll are separated by - * the string `sep`. - * - * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"` - */ - def mkString(start: String, sep: String, end: String): String - - /** Displays all elements of this $coll in a string using a separator string. - * - * @param sep the separator string. - * @return a string representation of this $coll. In the resulting string - * the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * @example `List(1, 2, 3).mkString("|") = "1|2|3"` - */ - def mkString(sep: String): String - - /** Displays all elements of this $coll in a string. - * - * @return a string representation of this $coll. In the resulting string - * the string representations (w.r.t. the method `toString`) - * of all elements of this $coll follow each other without any - * separator string. - */ - def mkString: String - - /** Converts this $coll to an array. - * - * @tparam A1 the type of the elements of the array. An `ClassTag` for - * this type must be available. - * @return an array containing all elements of this $coll. - * - * @usecase def toArray: Array[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return an array containing all elements of this $coll. - * An `ClassTag` must be available for the element type of this $coll. - */ - def toArray[A1 >: A: ClassTag]: Array[A1] - - /** Converts this $coll to a list. - * $willNotTerminateInf - * @return a list containing all elements of this $coll. - */ - def toList: List[A] - - /** Converts this $coll to an indexed sequence. - * $willNotTerminateInf - * @return an indexed sequence containing all elements of this $coll. - */ - def toIndexedSeq: immutable.IndexedSeq[A] - - /** Converts this $coll to a stream. - * @return a stream containing all elements of this $coll. - */ - def toStream: Stream[A] - - /** Returns an Iterator over the elements in this $coll. Will return - * the same Iterator if this instance is already an Iterator. - * $willNotTerminateInf - * @return an Iterator containing all elements of this $coll. - */ - def toIterator: Iterator[A] - - /** Uses the contents of this $coll to create a new mutable buffer. - * $willNotTerminateInf - * @return a buffer containing all elements of this $coll. - */ - def toBuffer[A1 >: A]: scala.collection.mutable.Buffer[A1] - - /** Converts this $coll to an unspecified Traversable. Will return - * the same collection if this instance is already Traversable. - * $willNotTerminateInf - * @return a Traversable containing all elements of this $coll. - */ - def toTraversable: GenTraversable[A] - - /** Converts this $coll to an iterable collection. Note that - * the choice of target `Iterable` is lazy in this default implementation - * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may - * be an iterator which is only traversable once). - * - * $willNotTerminateInf - * @return an `Iterable` containing all elements of this $coll. - */ - def toIterable: GenIterable[A] - - /** Converts this $coll to a sequence. As with `toIterable`, it's lazy - * in this default implementation, as this `TraversableOnce` may be - * lazy and unevaluated. - * - * $willNotTerminateInf - * @return a sequence containing all elements of this $coll. - */ - def toSeq: GenSeq[A] - - /** Converts this $coll to a set. - * $willNotTerminateInf - * @return a set containing all elements of this $coll. - */ - def toSet[A1 >: A]: GenSet[A1] - - /** Converts this $coll to a map. This method is unavailable unless - * the elements are members of Tuple2, each ((T, U)) becoming a key-value - * pair in the map. Duplicate keys will be overwritten by later keys: - * if this is an unordered collection, which key is in the resulting map - * is undefined. - * @return a map containing all elements of this $coll. - * - * @usecase def toMap[T, U]: Map[T, U] - * @inheritdoc - * $willNotTerminateInf - * @return a map of type `immutable.Map[T, U]` - * containing all key/value pairs of type `(T, U)` of this $coll. - */ - def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V] - - /** Converts this $coll to a Vector. - * $willNotTerminateInf - * @return a vector containing all elements of this $coll. - */ - def toVector: Vector[A] - - /** Converts this $coll into another by copying all elements. - * @tparam Col The collection type to build. - * @return a new collection containing all elements of this $coll. - * - * @usecase def to[Col[_]]: Col[A] - * @inheritdoc - * $willNotTerminateInf - * @return a new collection containing all elements of this $coll. - */ - def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] -} diff --git a/src/library/scala/collection/Hashing.scala b/src/library/scala/collection/Hashing.scala new file mode 100644 index 000000000000..7d3702d26e43 --- /dev/null +++ b/src/library/scala/collection/Hashing.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + + +protected[collection] object Hashing { + + def elemHashCode(key: Any): Int = key.## + + def improve(hcode: Int): Int = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) + } + + def computeHash(key: Any): Int = + improve(elemHashCode(key)) + + /** + * Utility method to keep a subset of all bits in a given bitmap + * + * Example + * bitmap (binary): 00000001000000010000000100000001 + * keep (binary): 1010 + * result (binary): 00000001000000000000000100000000 + * + * @param bitmap the bitmap + * @param keep a bitmask containing which bits to keep + * @return the original bitmap with all bits where keep is not 1 set to 0 + */ + def keepBits(bitmap: Int, keep: Int): Int = { + var result = 0 + var current = bitmap + var kept = keep + while (kept != 0) { + // lowest remaining bit in current + val lsb = current ^ (current & (current - 1)) + if ((kept & 1) != 0) { + // mark bit in result bitmap + result |= lsb + } + // clear lowest remaining one bit in abm + current &= ~lsb + // look at the next kept bit + kept >>>= 1 + } + result + } + +} diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 9ca84b8a9662..3735755041a3 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,27 +13,163 @@ package scala package collection -import generic._ -import mutable.Builder +import scala.annotation.{nowarn, tailrec} +import scala.collection.Searching.{Found, InsertionPoint, SearchResult} +import scala.collection.Stepper.EfficientSplit +import scala.math.Ordering -/** A base trait for indexed sequences. - * $indexedSeqInfo - */ +/** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - override def seq: IndexedSeq[A] = this + with IndexedSeqOps[A, IndexedSeq, IndexedSeq[A]] + with IterableFactoryDefaults[A, IndexedSeq] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeq" + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq } -/** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. - * @define coll indexed sequence - * @define Coll `IndexedSeq` - */ -object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) + +/** Base trait for indexed Seq operations */ +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => + + def iterator: Iterator[A] = view.iterator + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIndexedSeqStepper (this.asInstanceOf[IndexedSeqOps[Int, AnyConstr, _]], 0, length) + case StepperShape.LongShape => new LongIndexedSeqStepper (this.asInstanceOf[IndexedSeqOps[Long, AnyConstr, _]], 0, length) + case StepperShape.DoubleShape => new DoubleIndexedSeqStepper(this.asInstanceOf[IndexedSeqOps[Double, AnyConstr, _]], 0, length) + case _ => shape.parUnbox(new AnyIndexedSeqStepper[A](this, 0, length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def reverseIterator: Iterator[A] = view.reverseIterator + + /* TODO 2.14+ uncomment and delete related code in IterableOnce + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, apply(start)), op) + */ + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(apply(end - 1), z), op) + + //override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + //override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, apply(0), op) else super.reduceLeft(op) + + //override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, apply(length - 1), op) else super.reduceRight(op) + + override def view: IndexedSeqView[A] = new IndexedSeqView.Id[A](this) + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) + + override protected def reversed: Iterable[A] = new IndexedSeqView.Reverse(this) + + // Override transformation operations to use more efficient views than the default ones + override def prepended[B >: A](elem: B): CC[B] = iterableFactory.from(new IndexedSeqView.Prepended(elem, this)) + + override def take(n: Int): C = fromSpecific(new IndexedSeqView.Take(this, n)) + + override def takeRight(n: Int): C = fromSpecific(new IndexedSeqView.TakeRight(this, n)) + + override def drop(n: Int): C = fromSpecific(new IndexedSeqView.Drop(this, n)) + + override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) + + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) + + override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) + + override def slice(from: Int, until: Int): C = fromSpecific(new IndexedSeqView.Slice(this, from, until)) + + override def sliding(size: Int, step: Int): Iterator[C] = { + require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") + new IndexedSeqSlidingIterator[A, CC, C](this, size, step) + } + + override def head: A = + if (!isEmpty) apply(0) + else throw new NoSuchElementException(s"head of empty ${ + self match { + case self: IndexedSeq[_] => self.collectionClassName + case _ => toString + } + }") + + override def headOption: Option[A] = if (isEmpty) None else Some(head) + + override def last: A = + if (!isEmpty) apply(length - 1) + else throw new NoSuchElementException(s"last of empty ${ + self match { + case self: IndexedSeq[_] => self.collectionClassName + case _ => toString + } + }") + + // We already inherit an efficient `lastOption = if (isEmpty) None else Some(last)` + + override final def lengthCompare(len: Int): Int = Integer.compare(length, len) + + override def knownSize: Int = length + + override final def lengthCompare(that: Iterable[_]): Int = { + val res = that.sizeCompare(length) + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } + + override def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = + binarySearch(elem, 0, length)(ord) + + override def search[B >: A](elem: B, from: Int, to: Int)(implicit ord: Ordering[B]): SearchResult = + binarySearch(elem, from, to)(ord) + + @tailrec + private[this] def binarySearch[B >: A](elem: B, from: Int, to: Int) + (implicit ord: Ordering[B]): SearchResult = { + if (from < 0) binarySearch(elem, 0, to) + else if (to > length) binarySearch(elem, from, length) + else if (to <= from) InsertionPoint(from) + else { + val idx = from + (to - from - 1) / 2 + math.signum(ord.compare(elem, apply(idx))) match { + case -1 => binarySearch(elem, from, idx)(ord) + case 1 => binarySearch(elem, idx + 1, to)(ord) + case _ => Found(idx) + } + } + } +} + +/** A fast sliding iterator for IndexedSeqs which uses the underlying `slice` operation. */ +private final class IndexedSeqSlidingIterator[A, CC[_], C](s: IndexedSeqOps[A, CC, C], size: Int, step: Int) + extends AbstractIterator[C] { + + private[this] val len = s.length + private[this] var pos = 0 + private def chklen: Boolean = len == s.length || { + throw new java.util.ConcurrentModificationException("collection size changed during iteration") + false + } + + def hasNext: Boolean = chklen && pos < len - def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + def next(): C = if (!chklen || !hasNext) Iterator.empty.next() else { + val end = { val x = pos + size; if (x < 0 || x > len) len else x } // (pos.toLong + size).min(len).toInt + val slice = s.slice(pos, end) + pos = + if (end >= len) len + else { val x = pos + step; if (x < 0 || x > len) len else x } // (pos.toLong + step).min(len).toInt + slice + } } diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala deleted file mode 100644 index 73e23ac8e111..000000000000 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** A template trait for indexed sequences of type `IndexedSeq[A]`. - * - * $indexedSeqInfo - * - * This trait just implements `iterator` in terms of `apply` and `length`. - * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations - * to make them run faster under the assumption of fast random access with `apply`. - * - * @define Coll IndexedSeq - * @define indexedSeqInfo - * Indexed sequences support constant-time or near constant-time element - * access and length computation. They are defined in terms of abstract methods - * `apply` for indexing and `length`. - * - * Indexed sequences do not add any new methods to `Seq`, but promise - * efficient implementations of random access patterns. - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - * @author Martin Odersky - * @since 2.8 - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { - self => - - def seq: IndexedSeq[A] - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ? - - override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]] - override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]] - - /** The class of the iterator returned by the `iterator` method. - * multiple `take`, `drop`, and `slice` operations on this iterator are bunched - * together for better efficiency. - */ - // pre: start >= 0, end <= self.length - @SerialVersionUID(1756321872811029277L) - protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable { - private var index = start - private def available = (end - index) max 0 - - def hasNext: Boolean = index < end - - def next(): A = { - if (index >= end) - Iterator.empty.next() - - val x = self(index) - index += 1 - x - } - - def head = { - if (index >= end) - Iterator.empty.next() - - self(index) - } - - override def drop(n: Int): Iterator[A] = - if (n <= 0) new Elements(index, end) - else if (index + n >= end) new Elements(end, end) - else new Elements(index + n, end) - override def take(n: Int): Iterator[A] = - if (n <= 0) Iterator.empty - else if (n <= available) new Elements(index, index + n) - else new Elements(index, end) - override def slice(from: Int, until: Int): Iterator[A] = - this take until drop from - } - - override /*IterableLike*/ - def iterator: Iterator[A] = { - val len = length - if (len == 0) Iterator.empty - else new Elements(0, length) - } - - /* Overridden for efficiency */ - override def toBuffer[A1 >: A]: mutable.Buffer[A1] = { - val result = new mutable.ArrayBuffer[A1](size) - copyToBuffer(result) - result - } - - override protected[collection] def sizeHintIfCheap: Int = size -} diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala deleted file mode 100644 index d89f826386b3..000000000000 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import scala.annotation.tailrec - -/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes - * the implementation of several methods under the assumption of fast random access. - * - * $indexedSeqInfo - * - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { self => - - override /*IterableLike*/ - def isEmpty: Boolean = { length == 0 } - - override /*IterableLike*/ - def foreach[U](f: A => U): Unit = { - var i = 0 - val len = length - while (i < len) { f(this(i)); i += 1 } - } - - private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = { - var i = 0 - while (i < length && p(apply(i)) == expectTrue) i += 1 - i - } - - override /*IterableLike*/ - def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length - - override /*IterableLike*/ - def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length - - override /*IterableLike*/ - def find(p: A => Boolean): Option[A] = { - val i = prefixLength(!p(_)) - if (i < length) Some(this(i)) else None - } - - @tailrec - private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = - if (start == end) z - else foldl(start + 1, end, op(z, this(start)), op) - - @tailrec - private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = - if (start == end) z - else foldr(start, end - 1, op(this(end - 1), z), op) - - override /*TraversableLike*/ - def foldLeft[B](z: B)(op: (B, A) => B): B = - foldl(0, length, z, op) - - override /*IterableLike*/ - def foldRight[B](z: B)(op: (A, B) => B): B = - foldr(0, length, z, op) - - override /*TraversableLike*/ - def reduceLeft[B >: A](op: (B, A) => B): B = - if (length > 0) foldl(1, length, this(0), op) else super.reduceLeft(op) - - override /*IterableLike*/ - def reduceRight[B >: A](op: (A, B) => B): B = - if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op) - - override /*IterableLike*/ - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match { - case that: IndexedSeq[_] => - val b = bf(repr) - var i = 0 - val len = this.length min that.length - b.sizeHint(len) - while (i < len) { - b += ((this(i), that(i).asInstanceOf[B])) - i += 1 - } - b.result() - case _ => - super.zip[A1, B, That](that)(bf) - } - - override /*IterableLike*/ - def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { - val b = bf(repr) - val len = length - b.sizeHint(len) - var i = 0 - while (i < len) { - b += ((this(i), i)) - i += 1 - } - b.result() - } - - override /*IterableLike*/ - def slice(from: Int, until: Int): Repr = { - val lo = math.max(from, 0) - val hi = math.min(math.max(until, 0), length) - val elems = math.max(hi - lo, 0) - val b = newBuilder - b.sizeHint(elems) - - var i = lo - while (i < hi) { - b += self(i) - i += 1 - } - b.result() - } - - override /*IterableLike*/ - def head: A = if (isEmpty) super.head else this(0) - - override /*TraversableLike*/ - def tail: Repr = if (isEmpty) super.tail else slice(1, length) - - override /*TraversableLike*/ - def last: A = if (length > 0) this(length - 1) else super.last - - override /*IterableLike*/ - def init: Repr = if (length > 0) slice(0, length - 1) else super.init - - override /*TraversableLike*/ - def take(n: Int): Repr = slice(0, n) - - override /*TraversableLike*/ - def drop(n: Int): Repr = slice(n, length) - - override /*IterableLike*/ - def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length) - - override /*IterableLike*/ - def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0)) - - override /*TraversableLike*/ - def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n)) - - override /*IterableLike*/ - def takeWhile(p: A => Boolean): Repr = take(prefixLength(p)) - - override /*TraversableLike*/ - def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p)) - - override /*TraversableLike*/ - def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p)) - - override /*IterableLike*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that: IndexedSeq[_] => - val len = length - len == that.length && { - var i = 0 - while (i < len && this(i) == that(i)) i += 1 - i == len - } - case _ => - super.sameElements(that) - } - - override /*IterableLike*/ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = 0 - var j = start - val end = length min len min (xs.length - start) - while (i < end) { - xs(j) = this(i) - i += 1 - j += 1 - } - } - - // Overridden methods from Seq - - override /*SeqLike*/ - def lengthCompare(len: Int): Int = length - len - - override /*SeqLike*/ - def segmentLength(p: A => Boolean, from: Int): Int = { - val len = length - var i = from - while (i < len && p(this(i))) i += 1 - i - from - } - - private def negLength(n: Int) = if (n >= length) -1 else n - - override /*SeqLike*/ - def indexWhere(p: A => Boolean, from: Int): Int = { - val start = math.max(from, 0) - negLength(start + segmentLength(!p(_), start)) - } - - override /*SeqLike*/ - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = math.min(end, length - 1) - while (i >= 0 && !p(this(i))) i -= 1 - i - } - - override /*SeqLike*/ - def reverse: Repr = { - val b = newBuilder - b.sizeHint(length) - var i = length - while (0 < i) { - i -= 1 - b += this(i) - } - b.result() - } - - override /*SeqLike*/ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() - } - - override /*SeqLike*/ - def startsWith[B](that: GenSeq[B], offset: Int): Boolean = that match { - case that: IndexedSeq[_] => - var i = offset - var j = 0 - val thisLen = length - val thatLen = that.length - while (i < thisLen && j < thatLen && this(i) == that(j)) { - i += 1 - j += 1 - } - j == thatLen - case _ => - var i = offset - val thisLen = length - val thatElems = that.iterator - while (i < thisLen && thatElems.hasNext) { - if (this(i) != thatElems.next()) - return false - - i += 1 - } - !thatElems.hasNext - } - - override /*SeqLike*/ - def endsWith[B](that: GenSeq[B]): Boolean = that match { - case that: IndexedSeq[_] => - var i = length - 1 - var j = that.length - 1 - - (j <= i) && { - while (j >= 0) { - if (this(i) != that(j)) - return false - i -= 1 - j -= 1 - } - true - } - case _ => - super.endsWith(that) - } - - override def toList: List[A] = { - var i = length - 1 - var result: List[A] = Nil - while (i >= 0) { - result ::= apply(i) - i -= 1 - } - result - } -} - diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala new file mode 100644 index 000000000000..4fd99c1080af --- /dev/null +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -0,0 +1,180 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn + + +/** View defined in terms of indexing a range */ +trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self => + + override def view: IndexedSeqView[A] = this + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) + + override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeqView" +} + +object IndexedSeqView { + + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = self.length + override def knownSize: Int = remainder + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + + def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value + + val formatFrom = formatRange(from) + val formatUntil = formatRange(until) + remainder = Math.max(0, formatUntil - formatFrom) + current = current + formatFrom + this + } + } + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(pos) + pos -= 1 + remainder -= 1 + r + } else Iterator.empty.next() + + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } + } + this + } + } + + /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _] + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]) + extends SeqView.Id(underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A) + extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]) + extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A]) + extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Take(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B) + extends SeqView.Map(underlying, f) with IndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} + +/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index 9f9474e31d49..304a87402f79 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,44 +13,1034 @@ package scala package collection -import generic._ -import mutable.Builder +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} -/** A base trait for iterable collections. - * $iterableInfo - */ -trait Iterable[+A] extends Traversable[A] - with GenIterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] { - override def companion: GenericCompanion[Iterable] = Iterable - - override def seq = this - - /* The following methods are inherited from trait IterableLike - * - override def iterator: Iterator[A] - override def takeRight(n: Int): Iterable[A] - override def dropRight(n: Int): Iterable[A] - override def sameElements[B >: A](that: GenIterable[B]): Boolean - override def view - override def view(from: Int, until: Int) +/** Base trait for generic collections. + * + * @tparam A the element type of the collection + * + * @define Coll `Iterable` + * @define coll iterable collection */ +trait Iterable[+A] extends IterableOnce[A] + with IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + + // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + final def toIterable: this.type = this + + final protected def coll: this.type = this + + def iterableFactory: IterableFactory[Iterable] = Iterable + + @deprecated("Iterable.seq always returns the iterable itself", "2.13.0") + def seq: this.type = this + + /** Defines the prefix of this object's `toString` representation. + * + * It is recommended to return the name of the concrete collection type, but + * not implementation subclasses. For example, for `ListMap` this method should + * return `"ListMap"`, not `"Map"` (the supertype) or `"Node"` (an implementation + * subclass). + * + * The default implementation returns "Iterable". It is overridden for the basic + * collection kinds "Seq", "IndexedSeq", "LinearSeq", "Buffer", "Set", "Map", + * "SortedSet", "SortedMap" and "View". + * + * @return a string representation which starts the result of `toString` + * applied to this $coll. By default the string prefix is the + * simple name of the collection class $coll. + */ + protected[this] def className: String = stringPrefix + + /** Forwarder to `className` for use in `scala.runtime.ScalaRunTime`. + * + * This allows the proper visibility for `className` to be + * published, but provides the exclusive access needed by + * `scala.runtime.ScalaRunTime.stringOf` (and a few tests in + * the test suite). + */ + private[scala] final def collectionClassName: String = className + + @deprecatedOverriding("Override className instead", "2.13.0") + protected[this] def stringPrefix: String = "Iterable" + /** Converts this $coll to a string. + * + * @return a string representation of this collection. By default this + * string consists of the `className` of this $coll, followed + * by all elements separated by commas and enclosed in parentheses. + */ + override def toString = mkString(className + "(", ", ", ")") + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) } -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll iterable collection - * @define Coll `Iterable` - */ -object Iterable extends TraversableFactory[Iterable] { +/** Base trait for Iterable operations + * + * =VarianceNote= + * + * We require that for all child classes of Iterable the variance of + * the child class and the variance of the `C` parameter passed to `IterableOps` + * are the same. We cannot express this since we lack variance polymorphism. That's + * why we have to resort at some places to write `C[A @uncheckedVariance]`. + * + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * + * @define Coll Iterable + * @define coll iterable collection + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + */ +trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + /** + * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. + */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + def toIterable: Iterable[A] + + /** Converts this $coll to an unspecified Iterable. Will return + * the same collection if this instance is already Iterable. + * @return An Iterable containing all elements of this $coll. + */ + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") + final def toTraversable: Traversable[A] = toIterable + + override def isTraversableAgain: Boolean = true + + /** + * @return This collection as a `C`. + */ + protected def coll: C + + @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") + final def repr: C = coll + + /** + * Defines how to turn a given `Iterable[A]` into a collection of type `C`. + * + * This process can be done in a strict way or a non-strict way (ie. without evaluating + * the elements of the resulting collections). In other words, this methods defines + * the evaluation model of the collection. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method + * might be unsound. However, as long as it is called with an + * `Iterable[A]` obtained from `this` collection (as it is the case in the + * implementations of operations where we use a `View[A]`), it is safe. + */ + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + + /** The companion object of this ${coll}, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def iterableFactory: IterableFactory[CC] + + @deprecated("Use iterableFactory instead", "2.13.0") + @deprecatedOverriding("Use iterableFactory instead", "2.13.0") + @`inline` def companion: IterableFactory[CC] = iterableFactory + + /** + * @return a strict builder for the same collection type. + * + * Note that in the case of lazy collections (e.g. [[scala.collection.View]] or [[scala.collection.immutable.LazyList]]), + * it is possible to implement this method but the resulting `Builder` will break laziness. + * As a consequence, operations should preferably be implemented with `fromSpecific` + * instead of this method. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method might + * be unsound. However, as long as the returned builder is only fed + * with `A` values taken from `this` instance, it is safe. + */ + protected def newSpecificBuilder: Builder[A @uncheckedVariance, C] + + /** The empty $coll. + * + * @return an empty iterable of type $Coll. + */ + def empty: C = fromSpecific(Nil) + + /** Selects the first element of this $coll. + * $orderDependent + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A = iterator.next() + + /** Optionally selects the first element. + * $orderDependent + * @return the first element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = { + val it = iterator + if (it.hasNext) Some(it.next()) else None + } + + /** Selects the last element. + * $orderDependent + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + def last: A = { + val it = iterator + var lst = it.next() + while (it.hasNext) lst = it.next() + lst + } + + /** Optionally selects the last element. + * $orderDependent + * @return the last element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if (isEmpty) None else Some(last) + + /** A view over the elements of this collection. */ + def view: View[A] = View.fromIteratorProvider(() => iterator) + + /** Compares the size of this $coll to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(size min otherSize)` instead of `O(size)`. The method should be overridden + * if computing `size` is cheap and `knownSize` returns `-1`. + * + * @see [[sizeIs]] + */ + def sizeCompare(otherSize: Int): Int = + if (otherSize < 0) 1 + else { + val known = knownSize + if (known >= 0) Integer.compare(known, otherSize) + else { + var i = 0 + val it = iterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + + /** Returns a value class containing operations for comparing the size of this $coll to a test value. + * + * These operations are implemented in terms of [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + /** Compares the size of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < that.size + * x == 0 if this.size == that.size + * x > 0 if this.size > that.size + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def sizeCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this sizeCompare thatKnownSize + else { + val thisKnownSize = this.knownSize + + if (thisKnownSize >= 0) { + val res = that sizeCompare thisKnownSize + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } else { + val thisIt = this.iterator + val thatIt = that.iterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } + } + + /** A view over a slice of the elements of this collection. */ + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + def view(from: Int, until: Int): View[A] = view.slice(from, until) + + /** Transposes this $coll of iterable collections into + * a $coll of ${coll}s. + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(4, 5, 6)).transpose + * // xs == List( + * // List(1, 4), + * // List(2, 5), + * // List(3, 6)) + * + * val ys = Vector( + * List(1, 2, 3), + * List(4, 5, 6)).transpose + * // ys == Vector( + * // Vector(1, 4), + * // Vector(2, 5), + * // Vector(3, 6)) + * }}} + * + * $willForceEvaluation + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the + * element type of this $coll is an `Iterable`. + * @return a two-dimensional $coll of ${coll}s which has as ''n''th row + * the ''n''th column of this $coll. + * @throws IllegalArgumentException if all collections in this $coll + * are not of the same size. + */ + def transpose[B](implicit asIterable: A => /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + iterableFactory.from(bs.map(_.result())) + } + + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + + /** Creates a non-strict filter of this $coll. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new collection, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, + * all elements that do not. + * + * The two $coll correspond to the result of [[filter]] and [[filterNot]], respectively. + * + * The default implementation provided here needs to traverse the collection twice. + * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, + * which requires only a single traversal. + */ + def partition(p: A => Boolean): (C, C) = { + val first = new View.Filter(this, p, isFlipped = false) + val second = new View.Filter(this, p, isFlipped = true) + (fromSpecific(first), fromSpecific(second)) + } + + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + + def take(n: Int): C = fromSpecific(new View.Take(this, n)) + + /** Selects the last ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the last `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + + /** Selects all elements except last ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + + /** Partitions elements in fixed size ${coll}s. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[C] = + iterator.grouped(size).map(fromSpecific) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`). + * + * An empty collection returns an empty iterator, and a non-empty + * collection containing fewer elements than the window size returns + * an iterator that will produce the original collection as its only + * element. + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except for a + * non-empty collection with less than `size` elements, which + * returns an iterator that produces the source collection itself + * as its only element. + * @example `List().sliding(2) = empty iterator` + * @example `List(1).sliding(2) = Iterator(List(1))` + * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` + * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` + */ + def sliding(size: Int): Iterator[C] = sliding(size, 1) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`). + * + * The returned iterator will be empty when called on an empty collection. + * The last element the iterator produces may be smaller than the window + * size when the original collection isn't exhausted by the window before + * it and its last element isn't skipped by the step before it. + * + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return An iterator producing ${coll}s of size `size`, except the last + * element (which may be the only element) will be smaller + * if there are fewer than `size` elements remaining to be grouped. + * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + */ + def sliding(size: Int, step: Int): Iterator[C] = + iterator.sliding(size, step).map(fromSpecific) - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + /** The rest of the collection without its first element. */ + def tail: C = { + if (isEmpty) throw new UnsupportedOperationException + drop(1) + } - def newBuilder[A]: Builder[A, Iterable[A]] = immutable.Iterable.newBuilder[A] + /** The initial part of the collection without its last element. + * $willForceEvaluation + */ + def init: C = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } + + def slice(from: Int, until: Int): C = + fromSpecific(new View.Drop(new View.Take(this, until), from)) + + /** Partitions this $coll into a map of ${coll}s according to some discriminator function. + * + * $willForceEvaluation + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to ${coll}s such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a $coll of those elements `x` + * for which `f(x)` equals `k`. + * + */ + def groupBy[K](f: A => K): immutable.Map[K, C] = { + val m = mutable.Map.empty[K, Builder[A, C]] + val it = iterator + while (it.hasNext) { + val elem = it.next() + val key = f(elem) + val bldr = m.getOrElseUpdate(key, newSpecificBuilder) + bldr += elem + } + var result = immutable.HashMap.empty[K, C] + val mapIt = m.iterator + while (mapIt.hasNext) { + val (k, v) = mapIt.next() + result = result.updated(k, v.result()) + } + result + } + + /** + * Partitions this $coll into a map of ${coll}s according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Seq[User]): Map[Int, Seq[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * $willForceEvaluation + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B](key: A => K)(f: A => B): immutable.Map[K, CC[B]] = { + val m = mutable.Map.empty[K, Builder[B, CC[B]]] + for (elem <- this) { + val k = key(elem) + val bldr = m.getOrElseUpdate(k, iterableFactory.newBuilder[B]) + bldr += f(elem) + } + class Result extends runtime.AbstractFunction1[(K, Builder[B, CC[B]]), Unit] { + var built = immutable.Map.empty[K, CC[B]] + def apply(kv: (K, Builder[B, CC[B]])) = + built = built.updated(kv._1, kv._2.result()) + } + val result = new Result + m.foreach(result) + result.built + } + + /** + * Partitions this $coll into a map according to a discriminator function `key`. All the values that + * have the same discriminator are then transformed by the `f` function and then reduced into a + * single value with the `reduce` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more efficient. + * + * {{{ + * def occurrences[A](as: Seq[A]): Map[A, Int] = + * as.groupMapReduce(identity)(_ => 1)(_ + _) + * }}} + * + * $willForceEvaluation + */ + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): immutable.Map[K, B] = { + val m = mutable.Map.empty[K, B] + for (elem <- this) { + val k = key(elem) + val v = + m.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + m.put(k, v) + } + m.to(immutable.Map) + } + + /** Computes a prefix scan of the elements of the collection. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting collection + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new $coll containing the prefix scan of the elements in this $coll + */ + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. + * $willNotTerminateInf + * $orderDependent + * $willForceEvaluation + * + * Example: + * {{{ + * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) + * }}} + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + class Scanner extends runtime.AbstractFunction1[A, Unit] { + var acc = z + var scanned = acc :: immutable.Nil + def apply(x: A) = { + acc = op(x, acc) + scanned ::= acc + } + } + val scanner = new Scanner + reversed.foreach(scanner) + iterableFactory.from(scanner.scanned) + } + + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + + def collect[B](pf: PartialFunction[A, B]): CC[B] = + iterableFactory.from(new View.Collect(this, pf)) + + /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = $Coll(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] + * + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) + (iterableFactory.from(left), iterableFactory.from(right)) + } + + /** Returns a new $ccoll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $ccoll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from { + suffix match { + case suffix: Iterable[B] => new View.Concat(this, suffix) + case suffix => iterator ++ suffix.iterator + } + } + + /** Alias for `concat` */ + @inline final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + + /** Returns a $ccoll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $ccoll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. + * @return a new $coll containing pairs consisting of + * corresponding elements of this $coll and `that`. The length + * of the returned collection is the maximum of the lengths of this $coll and `that`. + * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + (iterableFactory.from(first), iterableFactory.from(second)) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) + } + + /** Iterates over the tails of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this $coll + * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` + */ + def tails: Iterator[C] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `init`. + * + * $willForceEvaluation + * + * @return an iterator over all the inits of this $coll + * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` + */ + def inits: Iterator[C] = iterateUntilEmpty(_.init) + + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + case xs: Iterable[B] => new View.Concat(xs, this) + case _ => that.iterator ++ iterator + }) +} + +object IterableOps { + + /** Operations for comparing the size of a collection to a test value. + * + * These operations are implemented in terms of + * [[scala.collection.IterableOps!.sizeCompare(Int):Int* `sizeCompare(Int)`]] + */ + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = it.sizeCompare(size) <= 0 + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = it.sizeCompare(size) == 0 + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = it.sizeCompare(size) != 0 + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = it.sizeCompare(size) >= 0 + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = it.sizeCompare(size) > 0 + } + + /** A trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ + @SerialVersionUID(3L) + class WithFilter[+A, +CC[_]]( + self: IterableOps[A, CC, _], + p: A => Boolean + ) extends collection.WithFilter[A, CC] with Serializable { + + protected def filtered: Iterable[A] = + new View.Filter(self, p, isFlipped = false) + + def map[B](f: A => B): CC[B] = + self.iterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = + self.iterableFactory.from(new View.FlatMap(filtered, f)) + + def foreach[U](f: A => U): Unit = filtered.foreach(f) + + def withFilter(q: A => Boolean): WithFilter[A, CC] = + new WithFilter(self, (a: A) => p(a) && q(a)) + + } + +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](immutable.Iterable) { + + def single[A](a: A): Iterable[A] = new AbstractIterable[A] { + override def iterator = Iterator.single(a) + override def knownSize = 1 + override def head = a + override def headOption: Some[A] = Some(a) + override def last = a + override def lastOption: Some[A] = Some(a) + override def view: View.Single[A] = new View.Single(a) + override def take(n: Int) = if (n > 0) this else Iterable.empty + override def takeRight(n: Int) = if (n > 0) this else Iterable.empty + override def drop(n: Int) = if (n > 0) Iterable.empty else this + override def dropRight(n: Int) = if (n > 0) Iterable.empty else this + override def tail: Iterable[Nothing] = Iterable.empty + override def init: Iterable[Nothing] = Iterable.empty + } } /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[+A] extends AbstractTraversable[A] with Iterable[A] +abstract class AbstractIterable[+A] extends Iterable[A] + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + + // overridden for efficiency, since we know CC[A] =:= C + override def empty: CC[A @uncheckedVariance] = iterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for collections that have an additional constraint, + * expressed by the `evidenceIterableFactory` method. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] + implicit protected def iterableEvidence: Ev[A @uncheckedVariance] + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] + override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted sets. + * + * Note that in sorted sets, the `CC` type of the set is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Set` in [[SortedSetOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedSetFactoryDefaults[+A, + +CC[X] <: SortedSet[X] with SortedSetOps[X, CC, CC[X]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { + self: IterableOps[A, WithFilterCC, _] => + + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) + + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) +} + + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for maps. + * + * Note that in maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Map` in [[MapOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait MapFactoryDefaults[K, +V, + +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] + override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { + // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case _ => mapFactory.empty + } + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted maps. + * + * Note that in sorted maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying map (which is fixed to `Map` in [[SortedMapOps]]). This trait has therefore + * three type parameters `CC`, `WithFilterCC` and `UnsortedCC`. The `withFilter` method inherited + * from `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedMapFactoryDefaults[K, +V, + +CC[x, y] <: Map[x, y] with SortedMapOps[x, y, CC, CC[x, y]] with UnsortedCC[x, y], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x], + +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { + self: IterableOps[(K, V), WithFilterCC, _] => + + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) + + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) +} diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala deleted file mode 100644 index adf7f7ae33c1..000000000000 --- a/src/library/scala/collection/IterableLike.scala +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import immutable.Stream - -/** A template trait for iterable collections of type `Iterable[A]`. - * $iterableInfo - * @define iterableInfo - * This is a base trait for all $mutability Scala collections that define an `iterator` - * method to step through one-by-one the collection's elements. - * Implementations of this trait need to provide a concrete method with - * signature: - * {{{ - * def iterator: Iterator[A] - * }}} - * They also need to provide a method `newBuilder` - * which creates a builder for collections of the same kind. - * - * This trait implements `Iterable`'s `foreach` - * method by stepping through all elements using `iterator`. - * Subclasses should re-implement `foreach` with something more efficient, - * if possible. - - * This trait adds methods `iterator`, `sameElements`, - * `takeRight`, `dropRight` to the methods inherited - * from trait - * `Traversable`. - - * Note: This trait replaces every method that uses `break` in - * `TraversableLike` by an iterator version. - * - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll Iterable - * @define coll iterable collection - */ -trait IterableLike[+A, +Repr] extends Any with Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] { -self => - - override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]] - override protected[this] def toCollection(repr: Repr): Iterable[A] = repr.asInstanceOf[Iterable[A]] - - /** Creates a new iterator over all elements contained in this iterable object. - * - * @return the new iterator - */ - def iterator: Iterator[A] - - /** Applies a function `f` to all elements of this $coll. - * - * Note: this method underlies the implementation of most other bulk operations. - * Subclasses should re-implement this method if a more efficient implementation exists. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U): Unit = - iterator.foreach(f) - - override /*TraversableLike*/ def forall(p: A => Boolean): Boolean = - iterator.forall(p) - override /*TraversableLike*/ def exists(p: A => Boolean): Boolean = - iterator.exists(p) - override /*TraversableLike*/ def find(p: A => Boolean): Option[A] = - iterator.find(p) - override /*TraversableLike*/ def isEmpty: Boolean = - !iterator.hasNext - override /*TraversableLike*/ def foldRight[B](z: B)(op: (A, B) => B): B = - iterator.foldRight(z)(op) - override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B = - iterator.reduceRight(op) - - - /** Returns this $coll as an iterable collection. - * - * A new collection will not be built; lazy collections will stay lazy. - * - * $willNotTerminateInf - * @return an `Iterable` containing all elements of this $coll. - */ - override /*TraversableLike*/ def toIterable: Iterable[A] = - thisCollection - - /** Returns an Iterator over the elements in this $coll. Produces the same - * result as `iterator`. - * $willNotTerminateInf - * @return an Iterator containing all elements of this $coll. - */ - @deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0") - override def toIterator: Iterator[A] = iterator - - override /*TraversableLike*/ def head: A = - iterator.next() - - override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = { - val lo = math.max(from, 0) - val elems = until - lo - val b = newBuilder - if (elems <= 0) b.result() - else { - b.sizeHintBounded(elems, this) - var i = 0 - val it = iterator drop lo - while (i < elems && it.hasNext) { - b += it.next - i += 1 - } - b.result() - } - } - - override /*TraversableLike*/ def take(n: Int): Repr = { - val b = newBuilder - - if (n <= 0) b.result() - else { - b.sizeHintBounded(n, this) - var i = 0 - val it = iterator - while (i < n && it.hasNext) { - b += it.next - i += 1 - } - b.result() - } - } - - override /*TraversableLike*/ def drop(n: Int): Repr = { - val b = newBuilder - val lo = math.max(0, n) - b.sizeHint(this, -lo) - var i = 0 - val it = iterator - while (i < n && it.hasNext) { - it.next() - i += 1 - } - (b ++= it).result() - } - - override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - val it = iterator - while (it.hasNext) { - val x = it.next() - if (!p(x)) return b.result() - b += x - } - b.result() - } - - /** Partitions elements in fixed size ${coll}s. - * @see [[scala.collection.Iterator]], method `grouped` - * - * @param size the number of elements per group - * @return An iterator producing ${coll}s of size `size`, except the - * last will be less than size `size` if the elements don't divide evenly. - */ - def grouped(size: Int): Iterator[Repr] = - for (xs <- iterator grouped size) yield { - val b = newBuilder - b ++= xs - b.result() - } - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in `grouped`.) - * The "sliding window" step is set to one. - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @return An iterator producing ${coll}s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int): Iterator[Repr] = sliding(size, 1) - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in grouped.) - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @param step the distance between the first elements of successive - * groups - * @return An iterator producing ${coll}s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int, step: Int): Iterator[Repr] = - for (xs <- iterator.sliding(size, step)) yield { - val b = newBuilder - b ++= xs - b.result() - } - - /** Selects last ''n'' elements. - * $orderDependent - * - * @param n the number of elements to take - * @return a $coll consisting only of the last `n` elements of this $coll, or else the - * whole $coll, if it has less than `n` elements. - */ - def takeRight(n: Int): Repr = { - val b = newBuilder - b.sizeHintBounded(n, this) - val lead = this.iterator drop n - val it = this.iterator - while (lead.hasNext) { - lead.next() - it.next() - } - while (it.hasNext) b += it.next() - b.result() - } - - /** Selects all elements except last ''n'' ones. - * $orderDependent - * - * @param n The number of elements to take - * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the - * empty $coll, if this $coll has less than `n` elements. - */ - def dropRight(n: Int): Repr = { - val b = newBuilder - if (n >= 0) b.sizeHint(this, -n) - val lead = iterator drop n - val it = iterator - while (lead.hasNext) { - b += it.next - lead.next() - } - b.result() - } - - override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = start - val end = (start + len) min xs.length - val it = iterator - while (i < end && it.hasNext) { - xs(i) = it.next() - i += 1 - } - } - - def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { - val b = bf(repr) - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - b += ((these.next(), those.next())) - b.result() - } - - def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { - val b = bf(repr) - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - b += ((these.next(), those.next())) - while (these.hasNext) - b += ((these.next(), thatElem)) - while (those.hasNext) - b += ((thisElem, those.next())) - b.result() - } - - def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { - val b = bf(repr) - val these = this.iterator - var i = 0 - while (these.hasNext) { - b += ((these.next(), i)) - i += 1 - } - b.result() - } - - def sameElements[B >: A](that: GenIterable[B]): Boolean = (this.asInstanceOf[AnyRef] eq that.asInstanceOf[AnyRef]) || { - that match { - case thatVector: Vector[_] if this.isInstanceOf[Vector[_]] => - val thisVector = this.asInstanceOf[Vector[_]] - (thisVector eq thatVector) || { - var equal = thisVector.length == thatVector.length - if (equal) { - val length = thatVector.length - var index = 0 - while (index < length && equal) { - equal = thisVector(index) == thatVector(index) - index += 1 - } - } - equal - } - case thatSet: GenSet[A] if this.isInstanceOf[GenSetLike[A,_]]=> - val thisSet = this.asInstanceOf[GenSetLike[A,_]] - thisSet.size == thatSet.size && thisSet.subsetOf(thatSet) - - case _ => - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - if (these.next != those.next) - return false - - !these.hasNext && !those.hasNext - } - } - - override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream - - /** Method called from equality methods, so that user-defined subclasses can - * refuse to be equal to other collections of the same kind. - * @param that The object with which this $coll should be compared - * @return `true`, if this $coll can possibly equal `that`, `false` otherwise. The test - * takes into consideration only the run-time types of objects but ignores their elements. - */ - override /*TraversableLike*/ def canEqual(that: Any) = true - - override /*TraversableLike*/ def view = new IterableView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - } - - override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until) -} diff --git a/src/library/scala/collection/IterableOnce.scala b/src/library/scala/collection/IterableOnce.scala new file mode 100644 index 000000000000..71bac9ca0052 --- /dev/null +++ b/src/library/scala/collection/IterableOnce.scala @@ -0,0 +1,1514 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.StringBuilder +import scala.language.implicitConversions +import scala.math.{Numeric, Ordering} +import scala.reflect.ClassTag +import scala.runtime.{AbstractFunction1, AbstractFunction2} + +/** + * A template trait for collections which can be traversed either once only + * or one or more times. + * + * Note: `IterableOnce` does not extend [[IterableOnceOps]]. This is different than the general + * design of the collections library, which uses the following pattern: + * {{{ + * trait Seq extends Iterable with SeqOps + * trait SeqOps extends IterableOps + * + * trait IndexedSeq extends Seq with IndexedSeqOps + * trait IndexedSeqOps extends SeqOps + * }}} + * + * The goal is to provide a minimal interface without any sequential operations. This allows + * third-party extension like Scala parallel collections to integrate at the level of IterableOnce + * without inheriting unwanted implementations. + * + * @define coll collection + * @define ccoll $coll + */ +trait IterableOnce[+A] extends Any { + + /** An [[scala.collection.Iterator]] over the elements of this $coll. + * + * If an `IterableOnce` object is in fact an [[scala.collection.Iterator]], this method always returns itself, + * in its current state, but if it is an [[scala.collection.Iterable]], this method always returns a new + * [[scala.collection.Iterator]]. + */ + def iterator: Iterator[A] + + /** Returns a [[scala.collection.Stepper]] for the elements of this collection. + * + * The Stepper enables creating a Java stream to operate on the collection, see + * [[scala.jdk.StreamConverters]]. For collections holding primitive values, the Stepper can be + * used as an iterator which doesn't box the elements. + * + * The implicit [[scala.collection.StepperShape]] parameter defines the resulting Stepper type according to the + * element type of this collection. + * + * - For collections of `Int`, `Short`, `Byte` or `Char`, an [[scala.collection.IntStepper]] is returned + * - For collections of `Double` or `Float`, a [[scala.collection.DoubleStepper]] is returned + * - For collections of `Long` a [[scala.collection.LongStepper]] is returned + * - For any other element type, an [[scala.collection.AnyStepper]] is returned + * + * Note that this method is overridden in subclasses and the return type is refined to + * `S with EfficientSplit`, for example [[scala.collection.IndexedSeqOps.stepper]]. For Steppers marked with + * [[scala.collection.Stepper.EfficientSplit]], the converters in [[scala.jdk.StreamConverters]] + * allow creating parallel streams, whereas bare Steppers can be converted only to sequential + * streams. + */ + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper[A](iterator)) + } + s.asInstanceOf[S] + } + + /** The number of elements in this $coll, if it can be cheaply computed, + * -1 otherwise. Cheaply usually means: Not requiring a collection traversal. + */ + def knownSize: Int = -1 +} + +final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { + @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") + def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) + + @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") + def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) + + @deprecated("Use .iterator.min instead", "2.13.0") + def min(implicit ord: Ordering[A]): A = it.iterator.min + + @deprecated("Use .iterator.nonEmpty instead", "2.13.0") + def nonEmpty: Boolean = it.iterator.nonEmpty + + @deprecated("Use .iterator.max instead", "2.13.0") + def max(implicit ord: Ordering[A]): A = it.iterator.max + + @deprecated("Use .iterator.reduceRight(...) instead", "2.13.0") + def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) + + @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") + def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + + @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") + def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) + + @deprecated("Use .iterator.sum instead", "2.13.0") + def sum(implicit num: Numeric[A]): A = it.iterator.sum + + @deprecated("Use .iterator.product instead", "2.13.0") + def product(implicit num: Numeric[A]): A = it.iterator.product + + @deprecated("Use .iterator.count(...) instead", "2.13.0") + def count(f: A => Boolean): Int = it.iterator.count(f) + + @deprecated("Use .iterator.reduceOption(...) instead", "2.13.0") + def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) + + @deprecated("Use .iterator.minBy(...) instead", "2.13.0") + def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + + @deprecated("Use .iterator.size instead", "2.13.0") + def size: Int = it.iterator.size + + @deprecated("Use .iterator.forall(...) instead", "2.13.0") + def forall(f: A => Boolean): Boolean = it.iterator.forall(f) + + @deprecated("Use .iterator.collectFirst(...) instead", "2.13.0") + def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) + + @deprecated("Use .iterator.filter(...) instead", "2.13.0") + def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) + + @deprecated("Use .iterator.exists(...) instead", "2.13.0") + def exists(f: A => Boolean): Boolean = it.iterator.exists(f) + + @deprecated("Use .iterator.copyToBuffer(...) instead", "2.13.0") + def copyToBuffer(dest: mutable.Buffer[A]): Unit = it.iterator.copyToBuffer(dest) + + @deprecated("Use .iterator.reduce(...) instead", "2.13.0") + def reduce(f: (A, A) => A): A = it.iterator.reduce(f) + + @deprecated("Use .iterator.reduceRightOption(...) instead", "2.13.0") + def reduceRightOption(f: (A, A) => A): Option[A] = it.iterator.reduceRightOption(f) + + @deprecated("Use .iterator.toIndexedSeq instead", "2.13.0") + def toIndexedSeq: IndexedSeq[A] = it.iterator.toIndexedSeq + + @deprecated("Use .iterator.foreach(...) instead", "2.13.0") + @`inline` def foreach[U](f: A => U): Unit = it match { + case it: Iterable[A] => it.foreach(f) + case _ => it.iterator.foreach(f) + } + + @deprecated("Use .iterator.to(factory) instead", "2.13.0") + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) + + @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") + def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + + @deprecated("Use .iterator.toArray", "2.13.0") + def toArray[B >: A: ClassTag]: Array[B] = it match { + case it: Iterable[B] => it.toArray[B] + case _ => it.iterator.toArray[B] + } + + @deprecated("Use .iterator.to(List) instead", "2.13.0") + def toList: immutable.List[A] = immutable.List.from(it) + + @deprecated("Use .iterator.to(Set) instead", "2.13.0") + @`inline` def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(it) + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toTraversable: Traversable[A] = toIterable + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toIterable: Iterable[A] = Iterable.from(it) + + @deprecated("Use .iterator.to(Seq) instead", "2.13.0") + @`inline` def toSeq: immutable.Seq[A] = immutable.Seq.from(it) + + @deprecated("Use .iterator.to(LazyList) instead", "2.13.0") + @`inline` def toStream: immutable.Stream[A] = immutable.Stream.from(it) + + @deprecated("Use .iterator.to(Vector) instead", "2.13.0") + @`inline` def toVector: immutable.Vector[A] = immutable.Vector.from(it) + + @deprecated("Use .iterator.to(Map) instead", "2.13.0") + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(it.asInstanceOf[IterableOnce[(K, V)]]) + + @deprecated("Use .iterator instead", "2.13.0") + @`inline` def toIterator: Iterator[A] = it.iterator + + @deprecated("Use .iterator.isEmpty instead", "2.13.0") + def isEmpty: Boolean = it match { + case it: Iterable[A] => it.isEmpty + case _ => it.iterator.isEmpty + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(start: String, sep: String, end: String): String = it match { + case it: Iterable[A] => it.mkString(start, sep, end) + case _ => it.iterator.mkString(start, sep, end) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(sep: String): String = it match { + case it: Iterable[A] => it.mkString(sep) + case _ => it.iterator.mkString(sep) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString: String = it match { + case it: Iterable[A] => it.mkString + case _ => it.iterator.mkString + } + + @deprecated("Use .iterator.find instead", "2.13.0") + def find(p: A => Boolean): Option[A] = it.iterator.find(p) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def foldLeft[B](z: B)(op: (B, A) => B): B = it.iterator.foldLeft(z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def foldRight[B](z: B)(op: (A, B) => B): B = it.iterator.foldRight(z)(op) + + @deprecated("Use .iterator.fold instead", "2.13.0") + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = it.iterator.fold(z)(op) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") + def map[B](f: A => B): IterableOnce[B] = it match { + case it: Iterable[A] => it.map(f) + case _ => it.iterator.map(f) + } + + @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") + def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { + case it: Iterable[A] => it.flatMap(f) + case _ => it.iterator.flatMap(f) + } + + @deprecated("Use .iterator.sameElements instead", "2.13.0") + def sameElements[B >: A](that: IterableOnce[B]): Boolean = it.iterator.sameElements(that) +} + +object IterableOnce { + @inline implicit def iterableOnceExtensionMethods[A](it: IterableOnce[A]): IterableOnceExtensionMethods[A] = + new IterableOnceExtensionMethods[A](it) + + /** Computes the number of elements to copy to an array from a source IterableOnce + * + * @param srcLen the length of the source collection + * @param destLen the length of the destination array + * @param start the index in the destination array at which to start copying elements to + * @param len the requested number of elements to copy (we may only be able to copy less than this) + * @return the number of elements that will be copied to the destination array + */ + @inline private[collection] def elemsToCopyToArray(srcLen: Int, destLen: Int, start: Int, len: Int): Int = + math.max(math.min(math.min(len, srcLen), destLen - start), 0) + + /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ + @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = + elems match { + case src: Iterable[A] => src.copyToArray[B](xs, start, len) + case src => src.iterator.copyToArray[B](xs, start, len) + } +} + +/** This implementation trait can be mixed into an `IterableOnce` to get the basic methods that are shared between + * `Iterator` and `Iterable`. The `IterableOnce` must support multiple calls to `iterator` but may or may not + * return the same `Iterator` every time. + * + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentReduce + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define orderIndependentReduce + * + * Note: might return different results for different runs, unless either + * of the following conditions is met: (1) the operator is associative, + * and the underlying collection type is ordered; or (2) the operator is + * associative and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define willForceEvaluation + * Note: Even when applied to a view or a lazy collection it will always force the elements. + * @define consumesIterator + * After calling this method, one should discard the iterator it was called + * on. Using it is undefined and subject to change. + * @define undefinedOrder + * The order of applications of the operator is unspecified and may be nondeterministic. + * @define exactlyOnce + * Each element appears exactly once in the computation. + */ +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => + /////////////////////////////////////////////////////////////// Abstract methods that must be implemented + + /** Produces a $coll containing cumulative results of applying the + * operator going left to right, including the initial value. + * + * $willNotTerminateInf + * $orderDependent + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] + + /** Selects all elements of this $coll which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that satisfy the given + * predicate `p`. The order of the elements is preserved. + */ + def filter(p: A => Boolean): C + + /** Selects all elements of this $coll which do not satisfy a predicate. + * + * @param pred the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that do not satisfy the given + * predicate `pred`. Their order may not be preserved. + */ + def filterNot(pred: A => Boolean): C + + /** Selects the first `n` elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the first `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def take(n: Int): C + + /** Selects the longest prefix of elements that satisfy a predicate. + * + * The matching prefix starts with the first element of this $coll, + * and the element following the prefix is the first element that + * does not satisfy the predicate. The matching prefix may empty, + * so that this method returns an empty $coll. + * + * Example: + * + * {{{ + * scala> List(1, 2, 3, 100, 4).takeWhile(n => n < 10) + * val res0: List[Int] = List(1, 2, 3) + * + * scala> List(1, 2, 3, 100, 4).takeWhile(n => n == 0) + * val res1: List[Int] = List() + * }}} + * + * Use [[span]] to obtain both the prefix and suffix. + * Use [[filter]] to retain only those elements from the entire $coll that satisfy the predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C + + /** Selects all elements except the first `n` ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def drop(n: Int): C + + /** Selects all elements except the longest prefix that satisfies a predicate. + * + * The matching prefix starts with the first element of this $coll, + * and the element following the prefix is the first element that + * does not satisfy the predicate. The matching prefix may be empty, + * so that this method returns the entire $coll. + * + * Example: + * + * {{{ + * scala> List(1, 2, 3, 100, 4).dropWhile(n => n < 10) + * val res0: List[Int] = List(100, 4) + * + * scala> List(1, 2, 3, 100, 4).dropWhile(n => n == 0) + * val res1: List[Int] = List(1, 2, 3, 100, 4) + * }}} + * + * Use [[span]] to obtain both the prefix and suffix. + * Use [[filterNot]] to drop all elements that satisfy the predicate. + * + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest suffix of this $coll whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): C + + /** Selects an interval of elements. The returned $coll is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * $orderDependent + * + * @param from the lowest index to include from this $coll. + * @param until the lowest index to EXCLUDE from this $coll. + * @return a $coll containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this $coll. + * @example + * `List('a', 'b', 'c', 'd', 'e').slice(1, 3) == List('b', 'c')` + */ + def slice(from: Int, until: Int): C + + /** Builds a new $ccoll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned $ccoll. + * @return a new $ccoll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new $ccoll by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * For example: + * + * {{{ + * def getWords(lines: Seq[String]): Seq[String] = lines.flatMap(line => line.split("\\W+")) + * }}} + * + * The type of the resulting collection is guided by the static type of this $coll. This might + * cause unexpected results sometimes. For example: + * + * {{{ + * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set + * def lettersOf(words: Seq[String]) = words.flatMap(word => word.toSet) + * + * // lettersOf will return a Set[Char], not a Seq + * def lettersOf(words: Seq[String]) = words.toSet.flatMap(word => word.toSeq) + * + * // xs will be an Iterable[Int] + * val xs = Map("a" -> List(11, 111), "b" -> List(22, 222)).flatMap(_._2) + * + * // ys will be a Map[Int, Int] + * val ys = Map("a" -> List(1 -> 11, 1 -> 111), "b" -> List(2 -> 22, 2 -> 222)).flatMap(_._2) + * }}} + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $ccoll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Given that the elements of this collection are themselves iterable collections, + * converts this $coll into a $ccoll comprising the elements of these iterable collections. + * + * The resulting collection's type will be guided by the + * type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the element + * type of this $coll is an `Iterable`. + * @return a new $ccoll resulting from concatenating all element collections. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] + + /** Builds a new $ccoll by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned $coll. + * @return a new $ccoll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: PartialFunction[A, B]): CC[B] + + /** Zips this $coll with its indices. + * + * @return A new $ccoll containing pairs consisting of all elements of this $coll paired with their index. + * Indices start at `0`. + * @example + * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` + */ + def zipWithIndex: CC[(A @uncheckedVariance, Int)] + + /** Splits this $coll into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but possibly more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side effects. + * $orderDependent + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this $coll whose + * elements all satisfy `p`, and the rest of this $coll. + */ + def span(p: A => Boolean): (C, C) + + /** Splits this $coll into a prefix/suffix pair at a given position. + * + * Note: `c splitAt n` is equivalent to (but possibly more efficient than) + * `(c take n, c drop n)`. + * $orderDependent + * + * @param n the position at which to split. + * @return a pair of ${coll}s consisting of the first `n` + * elements of this $coll, and the other elements. + */ + def splitAt(n: Int): (C, C) = { + class Spanner extends runtime.AbstractFunction1[A, Boolean] { + var i = 0 + def apply(a: A) = i < n && { i += 1 ; true } + } + val spanner = new Spanner + span(spanner) + } + + /** Applies a side-effecting function to each element in this collection. + * Strict collections will apply `f` to their elements immediately, while lazy collections + * like Views and LazyLists will only apply `f` on each element if and when that element + * is evaluated, and each time that element is evaluated. + * + * @param f a function to apply to each element in this $coll + * @tparam U the return type of f + * @return The same logical collection as this + */ + def tapEach[U](f: A => U): C + + /////////////////////////////////////////////////////////////// Concrete methods based on iterator + + /** Tests whether this $coll is known to have a finite size. + * All strict collections are known to have finite size. For a non-strict + * collection such as `Stream`, the predicate returns `'''true'''` if all + * elements have been computed. It returns `'''false'''` if the stream is + * not yet evaluated to the end. Non-empty Iterators usually return + * `'''false'''` even if they were created from a collection with a known + * finite size. + * + * Note: many collection methods will not work on collections of infinite sizes. + * The typical failure mode is an infinite loop. These methods always attempt a + * traversal without checking first that `hasDefiniteSize` returns `'''true'''`. + * However, checking `hasDefiniteSize` can provide an assurance that size is + * well-defined and non-termination is not a concern. + * + * @deprecated This method is deprecated in 2.13 because it does not provide any + * actionable information. As noted above, even the collection library itself + * does not use it. When there is no guarantee that a collection is finite, it + * is generally best to attempt a computation anyway and document that it will + * not terminate for infinite collections rather than backing out because this + * would prevent performing the computation on collections that are in fact + * finite even though `hasDefiniteSize` returns `false`. + * + * @see method `knownSize` for a more useful alternative + * + * @return `'''true'''` if this collection is known to have finite size, + * `'''false'''` otherwise. + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + def hasDefiniteSize: Boolean = true + + /** Tests whether this $coll can be repeatedly traversed. Always + * true for Iterables and false for Iterators unless overridden. + * + * @return `true` if it is repeatedly traversable, `false` otherwise. + */ + def isTraversableAgain: Boolean = false + + /** Applies `f` to each element for its side effects. + * Note: `U` parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val it = iterator + while(it.hasNext) f(it.next()) + } + + /** Tests whether a predicate holds for all elements of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if this $coll is empty or the given predicate `p` + * holds for all elements of this $coll, otherwise `false`. + */ + def forall(p: A => Boolean): Boolean = { + var res = true + val it = iterator + while (res && it.hasNext) res = p(it.next()) + res + } + + /** Tests whether a predicate holds for at least one element of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false` + */ + def exists(p: A => Boolean): Boolean = { + var res = false + val it = iterator + while (!res && it.hasNext) res = p(it.next()) + res + } + + /** Counts the number of elements in the $coll which satisfy a predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the number of elements satisfying the predicate `p`. + */ + def count(p: A => Boolean): Int = { + var res = 0 + val it = iterator + while (it.hasNext) if (p(it.next())) res += 1 + res + } + + /** Finds the first element of the $coll satisfying a predicate, if any. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the $coll + * that satisfies `p`, or `None` if none exists. + */ + def find(p: A => Boolean): Option[A] = { + val it = iterator + while (it.hasNext) { + val a = it.next() + if (p(a)) return Some(a) + } + None + } + + // in future, move to IndexedSeqOps + private def foldl[X >: A, B](seq: IndexedSeq[X], start: Int, z: B, op: (B, X) => B): B = { + @tailrec def loop(at: Int, end: Int, acc: B): B = + if (at == end) acc + else loop(at + 1, end, op(acc, seq(at))) + loop(start, seq.length, z) + } + + private def foldr[X >: A, B >: X](seq: IndexedSeq[X], op: (X, B) => B): B = { + @tailrec def loop(at: Int, acc: B): B = + if (at == 0) acc + else loop(at - 1, op(seq(at - 1), acc)) + loop(seq.length - 1, seq(seq.length - 1)) + } + + /** Applies the given binary operator `op` to the given initial value `z` and all + * elements of this $coll, going left to right. Returns the initial value if this $coll + * is empty. + * + * "Going left to right" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op( op( ... op( op(z, x,,1,,), x,,2,,) ... ), x,,n,,)`. + * + * If this collection is not ordered, then for each application of the operator, each + * right operand is an element. In addition, the leftmost operand is the initial + * value, and each other left operand is itself an application of the operator. The + * elements of this $coll and the initial value all appear exactly once in the + * computation. + * + * $orderDependent + * $willNotTerminateInf + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to `z` and all elements of this $coll, + * going left to right. Returns `z` if this $coll is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] => foldl[A, B](seq, 0, z, op) + case _ => + var result = z + val it = iterator + while (it.hasNext) { + result = op(result, it.next()) + } + result + } + + /** Applies the given binary operator `op` to all elements of this $coll and the given + * initial value `z`, going right to left. Returns the initial value if this $coll is + * empty. + * + * "Going right to left" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op(x,,1,,, op(x,,2,,, op( ... op(x,,n,,, z) ... )))`. + * + * If this collection is not ordered, then for each application of the operator, each + * left operand is an element. In addition, the rightmost operand is the initial + * value, and each other right operand is itself an application of the operator. The + * elements of this $coll and the initial value all appear exactly once in the + * computation. + * + * $orderDependent + * $willNotTerminateInf + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to all elements of this $coll and `z`, + * going right to left. Returns `z` if this $coll is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = reversed.foldLeft(z)((b, a) => op(a, b)) + + @deprecated("Use foldLeft instead of /:", "2.13.0") + @`inline` final def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use foldRight instead of :\\", "2.13.0") + @`inline` final def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + /** Applies the given binary operator `op` to the given initial value `z` and all + * elements of this $coll. + * + * For each application of the operator, each operand is either an element of this + * $coll, the initial value, or another such application of the operator. + * $undefinedOrder $exactlyOnce The initial value may be used an arbitrary number of + * times, but at least once. + * + * If this collection is ordered, then for any application of the operator, the + * element(s) appearing in the left operand will precede those in the right. + * + * $orderIndependentReduce In either case, it is also necessary that the initial value + * be a neutral value for the operator, e.g. `Nil` for `List` concatenation or `1` for + * multiplication. + * + * The default implementation in `IterableOnce` is equivalent to `foldLeft` but may be + * overridden for more efficient traversal orders. + * + * $willNotTerminateInf + * + * @tparam A1 The type parameter for the binary operator, a supertype of `A`. + * @param z An initial value; may be used an arbitrary number of times in the + * computation of the result; must be a neutral value for `op` for the + * result to always be the same across runs. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of applying `op` between all the elements and `z`, or `z` + * if this $coll is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Applies the given binary operator `op` to all elements of this $coll. + * + * For each application of the operator, each operand is either an element of this + * $coll or another such application of the operator. $undefinedOrder $exactlyOnce + * + * If this collection is ordered, then for any application of the operator, the + * element(s) appearing in the left operand will precede those in the right. + * + * $orderIndependentReduce + * $willNotTerminateInf + * + * @tparam B The type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of applying `op` between all the elements if the $coll is + * nonempty. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduce[B >: A](op: (B, B) => B): B = reduceLeft(op) + + /** If this $coll is nonempty, reduces it with the given binary operator `op`. + * + * The behavior is the same as [[reduce]] except that the value is `None` if the $coll + * is empty. $undefinedOrder $exactlyOnce + * + * $orderIndependentReduce + * $willNotTerminateInf + * + * @tparam B A type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of reducing this $coll with `op` if the $coll is nonempty, + * inside a `Some`, and `None` otherwise. + */ + def reduceOption[B >: A](op: (B, B) => B): Option[B] = reduceLeftOption(op) + + /** Applies the given binary operator `op` to all elements of this $coll, going left to + * right. + * + * "Going left to right" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op( op( op( ... op(x,,1,,, x,,2,,) ... ), x,,n-1,,), x,,n,,)`. + * + * If this collection is not ordered, then for each application of the operator, each + * right operand is an element. In addition, the leftmost operand is the first element + * of this $coll and each other left operand is itself an application of the + * operator. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of applying `op` to all elements of this $coll, going + * left to right. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceLeft[B >: A](op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldl(seq, 1, seq(0), op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceLeft") + case _ => reduceLeftIterator[B](throw new UnsupportedOperationException("empty.reduceLeft"))(op) + } + private final def reduceLeftIterator[B >: A](onEmpty: => B)(op: (B, A) => B): B = { + val it = iterator + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + acc + } + else onEmpty + } + + /** Applies the given binary operator `op` to all elements of this $coll, going right to + * left. + * + * "Going right to left" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op(x,,1,,, op(x,,2,,, op( ... op(x,,n-1,,, x,,n,,) ... )))`. + * + * If this collection is not ordered, then for each application of the operator, each + * left operand is an element. In addition, the rightmost operand is the last element + * of this $coll and each other right operand is itself an application of the + * operator. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of applying `op` to all elements of this $coll, going + * right to left. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceRight[B >: A](op: (A, B) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldr[A, B](seq, op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceRight") + case _ => reversed.reduceLeft[B]((x, y) => op(y, x)) // reduceLeftIterator + } + + /** If this $coll is nonempty, reduces it with the given binary operator `op`, going + * left to right. + * + * The behavior is the same as [[reduceLeft]] except that the value is `None` if the + * $coll is empty. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of reducing this $coll with `op` going left to right if + * the $coll is nonempty, inside a `Some`, and `None` otherwise. + */ + def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = + knownSize match { + case -1 => reduceLeftOptionIterator[B](op) + case 0 => None + case _ => Some(reduceLeft(op)) + } + private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + Some(acc) + } + else None + } + + /** If this $coll is nonempty, reduces it with the given binary operator `op`, going + * right to left. + * + * The behavior is the same as [[reduceRight]] except that the value is `None` if the + * $coll is empty. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of reducing this $coll with `op` going right to left if + * the $coll is nonempty, inside a `Some`, and `None` otherwise. + */ + def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = + knownSize match { + case -1 => reduceOptionIterator[A, B](reversed.iterator)((x, y) => op(y, x)) + case 0 => None + case _ => Some(reduceRight(op)) + } + + /** Tests whether the $coll is empty. + * + * Note: The default implementation creates and discards an iterator. + * + * Note: Implementations in subclasses that are not repeatedly iterable must take + * care not to consume any elements when `isEmpty` is called. + * + * @return `true` if the $coll contains no elements, `false` otherwise. + */ + def isEmpty: Boolean = + knownSize match { + case -1 => !iterator.hasNext + case 0 => true + case _ => false + } + + /** Tests whether the $coll is not empty. + * + * @return `true` if the $coll contains at least one element, `false` otherwise. + */ + @deprecatedOverriding("nonEmpty is defined as !isEmpty; override isEmpty instead", "2.13.0") + def nonEmpty: Boolean = !isEmpty + + /** The size of this $coll. + * + * $willNotTerminateInf + * + * @return the number of elements in this $coll. + */ + def size: Int = + if (knownSize >= 0) knownSize + else { + val it = iterator + var len = 0 + while (it.hasNext) { len += 1; it.next() } + len + } + + @deprecated("Use `dest ++= coll` instead", "2.13.0") + @inline final def copyToBuffer[B >: A](dest: mutable.Buffer[B]): Unit = dest ++= this + + /** Copies elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + + /** Copies elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with at most `len` elements of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val it = iterator + var i = start + val end = start + math.min(len, xs.length - start) + while (i < end && it.hasNext) { + xs(i) = it.next() + i += 1 + } + i - start + } + + /** Sums the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `+` operator to be used in forming the sum. + * @tparam B the result type of the `+` operator. + * @return the sum of all elements of this $coll with respect to the `+` operator in `num`. + */ + def sum[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.zero)(num.plus) + case 0 => num.zero + case _ => reduce(num.plus) + } + + /** Multiplies together the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `*` operator to be used in forming the product. + * @tparam B the result type of the `*` operator. + * @return the product of all elements of this $coll with respect to the `*` operator in `num`. + */ + def product[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.one)(num.times) + case 0 => num.one + case _ => reduce(num.times) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the smallest element of this $coll with respect to the ordering `ord`. + * + */ + def min[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.min"))(ord.min) + case 0 => throw new UnsupportedOperationException("empty.min") + case _ => reduceLeft(ord.min) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the smallest element of this $coll + * with respect to the ordering `ord`. + */ + def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.min) + case 0 => None + case _ => Some(reduceLeft(ord.min)) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the largest element of this $coll with respect to the ordering `ord`. + */ + def max[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.max"))(ord.max) + case 0 => throw new UnsupportedOperationException("empty.max") + case _ => reduceLeft(ord.max) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the largest element of this $coll with + * respect to the ordering `ord`. + */ + def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.max) + case 0 => None + case _ => Some(reduceLeft(ord.max)) + } + + /** Finds the first element which yields the largest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the largest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.maxBy") + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result + } + + private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X = null.asInstanceOf[X] + var maxF: B = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } + + /** Finds the first element which yields the largest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @return an option value containing the first element of this $coll with the + * largest value measured by function `f` with respect to the ordering `cmp`. + */ + def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element which yields the smallest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the smallest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.minBy") + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result + } + + /** Finds the first element which yields the smallest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @return an option value containing the first element of this $coll + * with the smallest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element of the $coll for which the given partial + * function is defined, and applies the partial function to it. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param pf the partial function + * @return an option value containing pf applied to the first + * value for which it is defined, or `None` if none exists. + * @example `Seq("a", 1, 5L).collectFirst { case x: Int => x*10 } = Some(10)` + */ + def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { + // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself + // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it) + val sentinel: scala.Function1[A, Any] = new AbstractFunction1[A, Any] { + def apply(a: A): AbstractFunction1[A, Any] = this + } + val it = iterator + while (it.hasNext) { + val x = pf.applyOrElse(it.next(), sentinel) + if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B]) + } + None + } + + /** Aggregates the results of applying an operator to subsequent elements. + * + * Since this method degenerates to `foldLeft` for sequential (non-parallel) collections, + * where the combining operation is ignored, it is advisable to prefer `foldLeft` for that case. + * + * For [[https://github.com/scala/scala-parallel-collections parallel collections]], + * use the `aggregate` method specified by `scala.collection.parallel.ParIterableLike`. + * + * @param z the start value, a neutral element for `seqop`. + * @param seqop the binary operator used to accumulate the result. + * @param combop an associative operator for combining sequential results, unused for sequential collections. + * @tparam B the result type, produced by `seqop`, `combop`, and by this function as a final result. + */ + @deprecated("For sequential collections, prefer `foldLeft(z)(seqop)`. For parallel collections, use `ParIterableLike#aggregate`.", "2.13.0") + def aggregate[B](z: => B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) + + /** Tests whether every element of this collection's iterator relates to the + * corresponding element of another collection by satisfying a test predicate. + * + * $willNotTerminateInf + * + * @param that the other collection + * @param p the test predicate, which relates elements from both collections + * @tparam B the type of the elements of `that` + * @return `true` if both collections have the same length and + * `p(x, y)` is `true` for all corresponding elements `x` of this iterator + * and `y` of `that`, otherwise `false` + */ + def corresponds[B](that: IterableOnce[B])(p: (A, B) => Boolean): Boolean = { + val a = iterator + val b = that.iterator + + while (a.hasNext && b.hasNext) { + if (!p(a.next(), b.next())) return false + } + + a.hasNext == b.hasNext + } + + /** Displays all elements of this $coll in a string using start, end, and separator strings. + * + * Delegates to addString, which can be overridden. + * + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return a string representation of this $coll. The resulting string + * begins with the string `start` and ends with the string + * `end`. Inside, the string representations (w.r.t. the method + * `toString`) of all elements of this $coll are separated by + * the string `sep`. + * + * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"` + */ + final def mkString(start: String, sep: String, end: String): String = + if (knownSize == 0) start + end + else addString(new StringBuilder(), start, sep, end).result() + + /** Displays all elements of this $coll in a string using a separator string. + * + * Delegates to addString, which can be overridden. + * + * @param sep the separator string. + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * @example `List(1, 2, 3).mkString("|") = "1|2|3"` + */ + @inline final def mkString(sep: String): String = mkString("", sep, "") + + /** Displays all elements of this $coll in a string. + * + * Delegates to addString, which can be overridden. + * + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll follow each other without any + * separator string. + */ + @inline final def mkString: String = mkString("") + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b , "List(" , ", " , ")") + * res5: StringBuilder = List(1, 2, 3, 4) + * }}} + * + * @param b the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + def addString(b: StringBuilder, start: String, sep: String, end: String): b.type = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + if (sep.length != 0) jsb.append(sep) + jsb.append(it.next()) + } + } + if (end.length != 0) jsb.append(end) + b + } + + /** Appends all elements of this $coll to a string builder using a separator string. + * The written text consists of the string representations (w.r.t. the method `toString`) + * of all elements of this $coll, separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b, ", ") + * res0: StringBuilder = 1, 2, 3, 4 + * }}} + * + * @param b the string builder to which elements are appended. + * @param sep the separator string. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder, sep: String): b.type = addString(b, "", sep, "") + + /** Appends all elements of this $coll to a string builder. + * The written text consists of the string representations (w.r.t. the method + * `toString`) of all elements of this $coll without any separator string. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> val h = a.addString(b) + * h: StringBuilder = 1234 + * }}} + * + * @param b the string builder to which elements are appended. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder): b.type = addString(b, "") + + /** Given a collection factory `factory`, converts this $coll to the appropriate + * representation for the current element type `A`. Example uses: + * + * {{{ + * xs.to(List) + * xs.to(ArrayBuffer) + * xs.to(BitSet) // for xs: Iterable[Int] + * }}} + */ + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) + + @deprecated("Use .iterator instead of .toIterator", "2.13.0") + @`inline` final def toIterator: Iterator[A] = iterator + + /** Converts this $coll to a `List`. + * + * @return This $coll as a `List[A]`. + */ + def toList: immutable.List[A] = immutable.List.from(this) + + /** Converts this $coll to a `Vector`. + * + * @return This $coll as a `Vector[A]`. + */ + def toVector: immutable.Vector[A] = immutable.Vector.from(this) + + /** Converts this $coll to a `Map`, given an implicit coercion from the $coll's type to a key-value tuple. + * + * @tparam K The key type for the resulting map. + * @tparam V The value type for the resulting map. + * @param ev An implicit coercion from `A` to `[K, V]`. + * @return This $coll as a `Map[K, V]`. + */ + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(this.asInstanceOf[IterableOnce[(K, V)]]) + + /** Converts this $coll to a `Set`. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as a `Set[B]`. + */ + def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(this) + + /** @return This $coll as a `Seq[A]`. This is equivalent to `to(Seq)` but might be faster. + */ + def toSeq: immutable.Seq[A] = immutable.Seq.from(this) + + /** Converts this $coll to an `IndexedSeq`. + * + * @return This $coll as an `IndexedSeq[A]`. + */ + def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq.from(this) + + @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") + @inline final def toStream: immutable.Stream[A] = to(immutable.Stream) + + /** Converts this $coll to a `Buffer`. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as a `Buffer[B]`. + */ + @inline final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + + /** Converts this $coll to an `Array`. + * + * Implementation note: DO NOT call [[Array.from]] from this method. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as an `Array[B]`. + */ + def toArray[B >: A: ClassTag]: Array[B] = + if (knownSize >= 0) { + val destination = new Array[B](knownSize) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == destination.length) + destination + } + else mutable.ArrayBuilder.make[B].addAll(this).result() + + // For internal use + protected def reversed: Iterable[A] = { + var xs: immutable.List[A] = immutable.Nil + val it = iterator + while (it.hasNext) xs = it.next() :: xs + xs + } +} diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala deleted file mode 100644 index 4fab88fee13c..000000000000 --- a/src/library/scala/collection/IterableProxy.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** This trait implements a proxy for iterable objects. It forwards all calls - * to a different iterable object. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]] diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala deleted file mode 100644 index 7847455af9c1..000000000000 --- a/src/library/scala/collection/IterableProxyLike.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ - -// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Iterable objects. It forwards - * all calls to a different Iterable object. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]] - extends IterableLike[A, Repr] - with TraversableProxyLike[A, Repr] { - override def iterator: Iterator[A] = self.iterator - override def grouped(size: Int): Iterator[Repr] = self.grouped(size) - override def sliding(size: Int): Iterator[Repr] = self.sliding(size) - override def sliding(size: Int, step: Int): Iterator[Repr] = self.sliding(size, step) - override def takeRight(n: Int): Repr = self.takeRight(n) - override def dropRight(n: Int): Repr = self.dropRight(n) - override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf) - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf) - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf) - override def sameElements[B >: A](that: GenIterable[B]): Boolean = self.sameElements(that) - override def view = self.view - override def view(from: Int, until: Int) = self.view(from, until) -} diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala deleted file mode 100644 index 0bae07f3da00..000000000000 --- a/src/library/scala/collection/IterableView.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import TraversableView.NoBuilder - -/** A base trait for non-strict views of `Iterable`s. - * $iterableViewInfo - */ -trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] - -/** An object containing the necessary implicit definitions to make - * `IterableView`s work. Its definitions are generally not accessed directly by clients. - */ -object IterableView { - type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]} - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IterableView[A, Iterable[_]]] = - new CanBuildFrom[Coll, A, IterableView[A, Iterable[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala deleted file mode 100644 index a60ab4cf4903..000000000000 --- a/src/library/scala/collection/IterableViewLike.scala +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import immutable.Stream -import scala.language.implicitConversions - -/** A template trait for non-strict views of iterable collections. - * $iterableViewInfo - * - * @define iterableViewInfo - * $viewInfo - * All views for iterable collections are defined by re-interpreting the `iterator` method. - * - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait IterableViewLike[+A, - +Coll, - +This <: IterableView[A, Coll] with IterableViewLike[A, Coll, This]] - extends Iterable[A] - with IterableLike[A, This] - with TraversableView[A, Coll] - with TraversableViewLike[A, Coll, This] -{ self => - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B] - - trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B] { - def iterator: Iterator[B] - override def foreach[U](f: B => U): Unit = iterator foreach f - override def toString = viewToString - override def isEmpty = !iterator.hasNext - } - - trait EmptyView extends Transformed[Nothing] with super.EmptyView { - final def iterator: Iterator[Nothing] = Iterator.empty - } - - trait Forced[B] extends super.Forced[B] with Transformed[B] { - def iterator = forced.iterator - } - - trait Sliced extends super.Sliced with Transformed[A] { - def iterator: Iterator[A] = self.iterator.slice(from, until) - } - - trait Mapped[B] extends super.Mapped[B] with Transformed[B] { - def iterator = self.iterator map mapping - } - - trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] { - def iterator: Iterator[B] = self.iterator flatMap mapping - } - - trait Appended[B >: A] extends super.Appended[B] with Transformed[B] { - def iterator = self.iterator ++ rest - } - - trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] { - def iterator = fst.toIterator ++ self - } - - trait Filtered extends super.Filtered with Transformed[A] { - def iterator = self.iterator filter pred - } - - trait TakenWhile extends super.TakenWhile with Transformed[A] { - def iterator = self.iterator takeWhile pred - } - - trait DroppedWhile extends super.DroppedWhile with Transformed[A] { - def iterator = self.iterator dropWhile pred - } - - trait Zipped[B] extends Transformed[(A, B)] { - protected[this] val other: GenIterable[B] - def iterator: Iterator[(A, B)] = self.iterator zip other.iterator - final override protected[this] def viewIdentifier = "Z" - } - - trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] { - protected[this] val other: GenIterable[B] - protected[this] val thisElem: A1 - protected[this] val thatElem: B - final override protected[this] def viewIdentifier = "Z" - def iterator: Iterator[(A1, B)] = - self.iterator.zipAll(other.iterator, thisElem, thatElem) - } - - private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This] - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B] - protected def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new { - val other: GenIterable[B] = that - val thisElem = _thisElem - val thatElem = _thatElem - } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B] - protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] - protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] - protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B] - protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] - protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] - protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered - protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced - protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile - protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile - - // After adding take and drop overrides to IterableLike, these overrides (which do nothing - // but duplicate the implementation in TraversableViewLike) had to be added to prevent the - // overrides in IterableLike from besting the overrides in TraversableViewLike when mixed - // together in e.g. SeqViewLike. This is a suboptimal situation. Examples of failing tests - // are run/bug2876 and run/viewtest. - protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n)) - protected override def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue)) - override def drop(n: Int): This = newDropped(n) - override def take(n: Int): This = newTaken(n) - - override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = { - newZipped(that).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That] -// else super.zip[A1, B, That](that)(bf) - } - - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That = - zip[A1, Int, That](Stream from 0)(bf) - - override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That = - newZippedAll(that, thisElem, thatElem).asInstanceOf[That] - - override def grouped(size: Int): Iterator[This] = - self.iterator grouped size map (x => newForced(x).asInstanceOf[This]) - - override def sliding(size: Int, step: Int): Iterator[This] = - self.iterator.sliding(size, step) map (x => newForced(x).asInstanceOf[This]) - - override def sliding(size: Int): Iterator[This] = - sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented. - - override def dropRight(n: Int): This = - take(thisSeq.length - math.max(n, 0)) - - override def takeRight(n: Int): This = - drop(thisSeq.length - math.max(n, 0)) - - override def stringPrefix = "IterableView" -} diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 6c6c4d0a2578..7c288bf58e9f 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,569 +10,514 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection -import mutable.ArrayBuffer -import scala.annotation.{tailrec, migration} -import scala.annotation.unchecked.{uncheckedVariance => uV} -import immutable.Stream +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.runtime.Statics -/** The `Iterator` object provides various functions for creating specialized iterators. - * - * @author Martin Odersky - * @author Matthias Zenger - * @since 2.8 - */ -object Iterator { +/** Iterators are data structures that allow to iterate over a sequence + * of elements. They have a `hasNext` method for checking + * if there is a next element available, and a `next` method + * which returns the next element and advances the iterator. + * + * An iterator is mutable: most operations on it change its state. While it is often used + * to iterate through the elements of a collection, it can also be used without + * being backed by any collection (see constructors on the companion object). + * + * It is of particular importance to note that, unless stated otherwise, ''one should never + * use an iterator after calling a method on it''. The two most important exceptions + * are also the sole abstract methods: `next` and `hasNext`. + * + * Both these methods can be called any number of times without having to discard the + * iterator. Note that even `hasNext` may cause mutation -- such as when iterating + * from an input stream, where it will block until the stream is closed or some + * input becomes available. + * + * Consider this example for safe and unsafe use: + * + * {{{ + * def f[A](it: Iterator[A]) = { + * if (it.hasNext) { // Safe to reuse "it" after "hasNext" + * it.next() // Safe to reuse "it" after "next" + * val remainder = it.drop(2) // it is *not* safe to use "it" again after this line! + * remainder.take(2) // it is *not* safe to use "remainder" after this line! + * } else it + * } + * }}} + * + * @define mayNotTerminateInf + * Note: may not terminate for infinite iterators. + * @define preservesIterator + * The iterator remains valid for further use whatever result is returned. + * @define consumesIterator + * After calling this method, one should discard the iterator it was called + * on. Using it is undefined and subject to change. + * @define consumesAndProducesIterator + * After calling this method, one should discard the iterator it was called + * on, and use only the iterator that was returned. Using the old iterator + * is undefined, subject to change, and may result in changes to the new + * iterator as well. + * @define consumesTwoAndProducesOneIterator + * After calling this method, one should discard the iterator it was called + * on, as well as the one passed as a parameter, and use only the iterator + * that was returned. Using the old iterators is undefined, subject to change, + * and may result in changes to the new iterator as well. + * @define consumesOneAndProducesTwoIterators + * After calling this method, one should discard the iterator it was called + * on, and use only the iterators that were returned. Using the old iterator + * is undefined, subject to change, and may result in changes to the new + * iterators as well. + * @define coll iterator + */ +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { self => - /** With the advent of `TraversableOnce` and `Iterator`, it can be useful to have a builder which - * operates on `Iterator`s so they can be treated uniformly along with the collections. - * See `scala.util.Random.shuffle` for an example. - */ - implicit def IteratorCanBuildFrom[A] = new TraversableOnce.BufferedCanBuildFrom[A, Iterator] { - def bufferToColl[B](coll: ArrayBuffer[B]) = coll.iterator - def traversableToColl[B](t: GenTraversable[B]) = t.toIterator - } + /** Check if there is a next element available. + * + * @return `true` if there is a next element, `false` otherwise + * @note Reuse: $preservesIterator + */ + def hasNext: Boolean - /** The iterator which produces no values. */ - val empty: Iterator[Nothing] = new AbstractIterator[Nothing] { - def hasNext: Boolean = false - def next(): Nothing = throw new NoSuchElementException("next on empty iterator") - } + @deprecated("hasDefiniteSize on Iterator is the same as isEmpty", "2.13.0") + @`inline` override final def hasDefiniteSize = isEmpty + + /** Return the next element and advance the iterator. + * + * @throws NoSuchElementException if there is no next element. + * @return the next element. + * @note Reuse: Advances the iterator, which may exhaust the elements. It is valid to + * make additional calls on the iterator. + */ + @throws[NoSuchElementException] + def next(): A - /** Creates an iterator which produces a single element. - * '''Note:''' Equivalent, but more efficient than Iterator(elem) - * - * @param elem the element - * @return An iterator which produces `elem` on the first call to `next`, - * and which has no further elements. - */ - def single[A](elem: A): Iterator[A] = new AbstractIterator[A] { - private var hasnext = true - def hasNext: Boolean = hasnext - def next(): A = - if (hasnext) { hasnext = false; elem } - else empty.next() - } + @inline final def iterator = this - /** Creates an iterator with given elements. - * - * @param elems The elements returned one-by-one from the iterator - * @return An iterator which produces the given elements on the - * first calls to `next`, and which has no further elements. - */ - def apply[A](elems: A*): Iterator[A] = elems.iterator + /** Wraps the value of `next()` in an option. + * + * @return `Some(next)` if a next element exists, `None` otherwise. + */ + def nextOption(): Option[A] = if (hasNext) Some(next()) else None - /** Creates iterator that produces the results of some element computation a number of times. - * - * @param len the number of elements returned by the iterator. - * @param elem the element computation - * @return An iterator that produces the results of `n` evaluations of `elem`. - */ - def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = i < len - def next(): A = - if (hasNext) { i += 1; elem } - else empty.next() - } - - /** Creates an iterator producing the values of a given function over a range of integer values starting from 0. - * - * @param end The number of elements returned by the iterator - * @param f The function computing element values - * @return An iterator that produces the values `f(0), ..., f(n -1)`. - */ - def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = i < end - def next(): A = - if (hasNext) { val result = f(i); i += 1; result } - else empty.next() - } + /** Tests whether this iterator contains a given value as an element. + * $mayNotTerminateInf + * + * @param elem the element to test. + * @return `true` if this iterator produces some value that is + * is equal (as determined by `==`) to `elem`, `false` otherwise. + * @note Reuse: $consumesIterator + */ + def contains(elem: Any): Boolean = exists(_ == elem) // Note--this seems faster than manual inlining! - /** Creates nn iterator returning successive values in some integer interval. - * - * @param start the start value of the iterator - * @param end the end value of the iterator (the first value NOT returned) - * @return the iterator producing values `start, start + 1, ..., end - 1` - */ - def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1) + /** Creates a buffered iterator from this iterator. + * + * @see [[scala.collection.BufferedIterator]] + * @return a buffered iterator producing the same values as this iterator. + * @note Reuse: $consumesAndProducesIterator + */ + def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false - /** An iterator producing equally spaced values in some integer interval. - * - * @param start the start value of the iterator - * @param end the end value of the iterator (the first value NOT returned) - * @param step the increment value of the iterator (must be positive or negative) - * @return the iterator producing values `start, start + step, ...` up to, but excluding `end` - */ - def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { - if (step == 0) throw new IllegalArgumentException("zero step") - private var i = start - private var hasOverflowed = false - def hasNext: Boolean = { - (step <= 0 || i < end) && (step >= 0 || i > end) && !hasOverflowed - } - def next(): Int = - if (hasNext) { - val result = i - val nextValue = i + step - hasOverflowed = (step > 0) == nextValue < i - i = nextValue - result + def head: A = { + if (!hdDefined) { + hd = next() + hdDefined = true } - else empty.next() - } - - /** Creates an infinite iterator that repeatedly applies a given function to the previous result. - * - * @param start the start value of the iterator - * @param f the function that's repeatedly applied - * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { - private[this] var first = true - private[this] var acc = start - def hasNext: Boolean = true - def next(): T = { - if (first) first = false - else acc = f(acc) - - acc + hd } - } - /** Creates an infinite-length iterator which returns successive values from some start value. + override def knownSize = { + val thisSize = self.knownSize + if (thisSize >= 0 && hdDefined) thisSize + 1 + else thisSize + } - * @param start the start value of the iterator - * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...` - */ - def from(start: Int): Iterator[Int] = from(start, 1) + def hasNext = + hdDefined || self.hasNext - /** Creates an infinite-length iterator returning values equally spaced apart. - * - * @param start the start value of the iterator - * @param step the increment between successive values - * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...` - */ - def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { - private var i = start - def hasNext: Boolean = true - def next(): Int = { val result = i; i += step; result } + def next() = + if (hdDefined) { + hdDefined = false + hd + } else self.next() } - /** Creates an infinite-length iterator returning the results of evaluating an expression. - * The expression is recomputed for every element. + /** A flexible iterator for transforming an `Iterator[A]` into an + * `Iterator[Seq[A]]`, with configurable sequence size, step, and + * strategy for dealing with remainder elements which don't fit evenly + * into the last group. * - * @param elem the element computation. - * @return the iterator containing an infinite number of results of evaluating `elem`. + * A `GroupedIterator` is yielded by `grouped` and by `sliding`, + * where the `step` may differ from the group `size`. */ - def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { - def hasNext = true - def next = elem - } + class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { - /** Creates an iterator to which other iterators can be appended efficiently. - * Nested ConcatIterators are merged to avoid blowing the stack. - */ - private final class ConcatIterator[+A](private var current: Iterator[A @uV]) extends Iterator[A] { - private var tail: ConcatIteratorCell[A @uV] = null - private var last: ConcatIteratorCell[A @uV] = null - private var currentHasNextChecked = false + require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - // Advance current to the next non-empty iterator - // current is set to null when all iterators are exhausted - @tailrec - private[this] def advance(): Boolean = { - if (tail eq null) { - current = null - last = null - false - } - else { - current = tail.headIterator - if (last eq tail) last = last.tail - tail = tail.tail - merge() - if (currentHasNextChecked) true - else if ((current ne null) && current.hasNext) { - currentHasNextChecked = true - true - } else advance() - } + private[this] var buffer: Array[B] = null // current result + private[this] var prev: Array[B] = null // if sliding, overlap from previous result + private[this] var first = true // if !first, advancing may skip ahead + private[this] var filled = false // whether the buffer is "hot" + private[this] var partial = true // whether to emit partial sequence + private[this] var padding: () => B = null // what to pad short sequences with + private[this] def pad = padding != null // irrespective of partial flag + private[this] def newBuilder = { + val b = ArrayBuilder.make[Any] + val k = self.knownSize + if (k > 0) b.sizeHint(k min size) // if k < size && !partial, buffer will grow on padding + b } - // If the current iterator is a ConcatIterator, merge it into this one - @tailrec - private[this] def merge(): Unit = - if (current.isInstanceOf[ConcatIterator[_]]) { - val c = current.asInstanceOf[ConcatIterator[A]] - current = c.current - currentHasNextChecked = c.currentHasNextChecked - if (c.tail ne null) { - if (last eq null) last = c.last - c.last.tail = tail - tail = c.tail - } - merge() - } - - def hasNext = - if (currentHasNextChecked) true - else if (current eq null) false - else if (current.hasNext) { - currentHasNextChecked = true - true - } else advance() - - def next() = - if (hasNext) { - currentHasNextChecked = false - current.next() - } else Iterator.empty.next() - - override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = { - val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] - if(tail eq null) { - tail = c - last = c - } else { - last.tail = c - last = c - } - if(current eq null) current = Iterator.empty + /** Specifies a fill element used to pad a partial segment + * so that all segments have the same size. + * + * Any previous setting of `withPartial` is ignored, + * as the last group will always be padded to `size` elements. + * + * The by-name argument is evaluated for each fill element. + * + * @param x The element that will be appended to the last segment, if necessary. + * @return The same iterator, and ''not'' a new iterator. + * @note This method mutates the iterator it is called on, which can be safely used afterwards. + * @note This method is mutually exclusive with `withPartial`. + * @group Configuration + */ + def withPadding(x: => B): this.type = { + padding = () => x + partial = true // redundant, as padding always results in complete segment + this + } + /** Specify whether to drop the last segment if it has less than `size` elements. + * + * If this flag is `false`, elements of a partial segment at the end of the iterator + * are not returned. + * + * The flag defaults to `true`. + * + * Any previous setting of `withPadding` is ignored, + * as the last group will never be padded. + * A partial segment is either retained or dropped, per the flag. + * + * @param x `true` if partial segments may be returned, `false` otherwise. + * @return The same iterator, and ''not'' a new iterator. + * @note This method mutates the iterator it is called on, which can be safely used afterwards. + * @note This method is mutually exclusive with `withPadding`. + * @group Configuration + */ + def withPartial(x: Boolean): this.type = { + partial = x + padding = null this } - } - - private[this] final class ConcatIteratorCell[A](head: => GenTraversableOnce[A], var tail: ConcatIteratorCell[A]) { - def headIterator: Iterator[A] = head.toIterator - } - /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. - * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. - */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { - private var remaining = limit - private var dropping = start - @inline private def unbounded = remaining < 0 - private def skip(): Unit = - while (dropping > 0) { - if (underlying.hasNext) { - underlying.next() + /** Eagerly fetch `size` elements to buffer. + * + * If buffer is dirty and stepping, copy prefix. + * If skipping, skip ahead. + * Fetch remaining elements. + * If unable to deliver size, then pad if padding enabled, otherwise drop segment. + * Returns true if successful in delivering `count` elements, + * or padded segment, or partial segment. + */ + private def fulfill(): Boolean = { + val builder = newBuilder + var done = false + // keep prefix of previous buffer if stepping + if (prev != null) builder.addAll(prev) + // skip ahead + if (!first && step > size) { + var dropping = step - size + while (dropping > 0 && self.hasNext) { + self.next(): Unit dropping -= 1 - } else - dropping = 0 + } + done = dropping > 0 // skip failed } - def hasNext = { skip(); remaining != 0 && underlying.hasNext } - def next() = { - skip() - if (remaining > 0) { - remaining -= 1 - underlying.next() + var index = builder.length + if (!done) { + // advance to rest of segment if possible + while (index < size && self.hasNext) { + builder.addOne(self.next()) + index += 1 + } + // if unable to complete segment, pad if possible + if (index < size && pad) { + builder.sizeHint(size) + while (index < size) { + builder.addOne(padding()) + index += 1 + } + } } - else if (unbounded) underlying.next() - else empty.next() + // segment must have data, and must be complete unless they allow partial + val ok = index > 0 && (partial || index == size) + if (ok) buffer = builder.result().asInstanceOf[Array[B]] + else prev = null + ok } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { - val lo = from max 0 - def adjustedBound = - if (unbounded) -1 - else 0 max (remaining - lo) - val rest = - if (until < 0) adjustedBound // respect current bound, if any - else if (until <= lo) 0 // empty - else if (unbounded) until - lo // now finite - else adjustedBound min (until - lo) // keep lesser bound - if (rest == 0) empty + + // fill() returns false if no more sequences can be produced + private def fill(): Boolean = filled || { filled = self.hasNext && fulfill() ; filled } + + def hasNext = fill() + + @throws[NoSuchElementException] + def next(): immutable.Seq[B] = + if (!fill()) Iterator.empty.next() else { - dropping += lo - remaining = rest - this + filled = false + // if stepping, retain overlap in prev + if (step < size) { + if (first) prev = buffer.drop(step) + else if (buffer.length == size) Array.copy(src = buffer, srcPos = step, dest = prev, destPos = 0, length = size - step) + else prev = null + } + val res = immutable.ArraySeq.unsafeWrapArray(buffer).asInstanceOf[immutable.ArraySeq[B]] + buffer = null + first = false + res } - } } -} -/** Iterators are data structures that allow to iterate over a sequence - * of elements. They have a `hasNext` method for checking - * if there is a next element available, and a `next` method - * which returns the next element and advances the iterator. - * - * An iterator is mutable: most operations on it change its state. While it is often used - * to iterate through the elements of a collection, it can also be used without - * being backed by any collection (see constructors on the companion object). - * - * It is of particular importance to note that, unless stated otherwise, ''one should never - * use an iterator after calling a method on it''. The two most important exceptions - * are also the sole abstract methods: `next` and `hasNext`. - * - * Both these methods can be called any number of times without having to discard the - * iterator. Note that even `hasNext` may cause mutation -- such as when iterating - * from an input stream, where it will block until the stream is closed or some - * input becomes available. - * - * Consider this example for safe and unsafe use: - * - * {{{ - * def f[A](it: Iterator[A]) = { - * if (it.hasNext) { // Safe to reuse "it" after "hasNext" - * it.next // Safe to reuse "it" after "next" - * val remainder = it.drop(2) // it is *not* safe to use "it" again after this line! - * remainder.take(2) // it is *not* safe to use "remainder" after this line! - * } else it - * } - * }}} - * - * @author Martin Odersky, Matthias Zenger - * @since 1 - * @define willNotTerminateInf - * Note: will not terminate for infinite iterators. - * @define mayNotTerminateInf - * Note: may not terminate for infinite iterators. - * @define preservesIterator - * The iterator remains valid for further use whatever result is returned. - * @define consumesIterator - * After calling this method, one should discard the iterator it was called - * on. Using it is undefined and subject to change. - * @define consumesAndProducesIterator - * After calling this method, one should discard the iterator it was called - * on, and use only the iterator that was returned. Using the old iterator - * is undefined, subject to change, and may result in changes to the new - * iterator as well. - * @define consumesTwoAndProducesOneIterator - * After calling this method, one should discard the iterator it was called - * on, as well as the one passed as a parameter, and use only the iterator - * that was returned. Using the old iterators is undefined, subject to change, - * and may result in changes to the new iterator as well. - * @define consumesOneAndProducesTwoIterators - * After calling this method, one should discard the iterator it was called - * on, and use only the iterators that were returned. Using the old iterator - * is undefined, subject to change, and may result in changes to the new - * iterators as well. - * @define consumesTwoIterators - * After calling this method, one should discard the iterator it was called - * on, as well as the one passed as parameter. Using the old iterators is - * undefined and subject to change. - */ -trait Iterator[+A] extends TraversableOnce[A] { - self => + /** A copy of this $coll with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of + * all elements of this $coll followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A](len: Int, elem: B): Iterator[B] = new AbstractIterator[B] { + private[this] var i = 0 - import Iterator.empty + override def knownSize: Int = { + val thisSize = self.knownSize + if (thisSize < 0) -1 + else thisSize max (len - i) + } - def seq: Iterator[A] = this + def next(): B = { + val b = + if (self.hasNext) self.next() + else if (i < len) elem + else Iterator.empty.next() + i += 1 + b + } - /** Tests whether this iterator can provide another element. - * - * @return `true` if a subsequent call to `next` will yield an element, - * `false` otherwise. - * @note Reuse: $preservesIterator - */ - def hasNext: Boolean + def hasNext: Boolean = self.hasNext || i < len + } - /** Produces the next element of this iterator. + /** Partitions this iterator in two iterators according to a predicate. * - * @return the next element of this iterator, if `hasNext` is `true`, - * undefined behavior otherwise. - * @note Reuse: $preservesIterator + * @param p the predicate on which to partition + * @return a pair of iterators: the iterator that satisfies the predicate + * `p` and the iterator that does not. + * The relative order of the elements in the resulting iterators + * is the same as in the original iterator. + * @note Reuse: $consumesOneAndProducesTwoIterators */ - def next(): A + def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { + val (a, b) = duplicate + (a filter p, b filterNot p) + } - /** Tests whether this iterator is empty. + /** Returns an iterator which groups this iterator into fixed size + * blocks. Example usages: + * {{{ + * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7))) + * (1 to 7).iterator.grouped(3).toList + * // Returns List(List(1, 2, 3), List(4, 5, 6)) + * (1 to 7).iterator.grouped(3).withPartial(false).toList + * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7, 20, 25) + * // Illustrating that withPadding's argument is by-name. + * val it2 = Iterator.iterate(20)(_ + 5) + * (1 to 7).iterator.grouped(3).withPadding(it2.next).toList + * }}} * - * @return `true` if hasNext is false, `false` otherwise. - * @note Reuse: $preservesIterator + * @note Reuse: $consumesAndProducesIterator */ - def isEmpty: Boolean = !hasNext + def grouped[B >: A](size: Int): GroupedIterator[B] = + new GroupedIterator[B](self, size, size) - /** Tests whether this Iterator can be repeatedly traversed. + /** Returns an iterator which presents a "sliding window" view of + * this iterator. The first argument is the window size, and + * the second argument `step` is how far to advance the window + * on each iteration. The `step` defaults to `1`. * - * @return `false` - * @note Reuse: $preservesIterator - */ - def isTraversableAgain = false - - /** Tests whether this Iterator has a known size. + * The returned `GroupedIterator` can be configured to either + * pad a partial result to size `size` or suppress the partial + * result entirely. * - * @return `true` for empty Iterators, `false` otherwise. - * @note Reuse: $preservesIterator - */ - def hasDefiniteSize = isEmpty - - /** Selects first ''n'' values of this iterator. + * Example usages: + * {{{ + * // Returns List(ArraySeq(1, 2, 3), ArraySeq(2, 3, 4), ArraySeq(3, 4, 5)) + * (1 to 5).iterator.sliding(3).toList + * // Returns List(ArraySeq(1, 2, 3, 4), ArraySeq(4, 5)) + * (1 to 5).iterator.sliding(4, 3).toList + * // Returns List(ArraySeq(1, 2, 3, 4)) + * (1 to 5).iterator.sliding(4, 3).withPartial(false).toList + * // Returns List(ArraySeq(1, 2, 3, 4), ArraySeq(4, 5, 20, 25)) + * // Illustrating that withPadding's argument is by-name. + * val it2 = Iterator.iterate(20)(_ + 5) + * (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList + * }}} * - * @param n the number of values to take - * @return an iterator producing only the first `n` values of this iterator, or else the - * whole iterator, if it produces fewer than `n` values. - * @note Reuse: $consumesAndProducesIterator - */ - def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) - - /** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller. + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return A `GroupedIterator` producing `Seq[B]`s of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + * This behavior can be configured. * - * @param n the number of elements to drop - * @return an iterator which produces all values of the current iterator, except - * it omits the first `n` values. - * @note Reuse: $consumesAndProducesIterator + * @note Reuse: $consumesAndProducesIterator */ - def drop(n: Int): Iterator[A] = { - var j = 0 - while (j < n && hasNext) { - next() - j += 1 + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = + new GroupedIterator[B](self, size, step) + + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { + // We use an intermediate iterator that iterates through the first element `z` + // and then that will be modified to iterate through the collection + private[this] var current: Iterator[B] = + new AbstractIterator[B] { + override def knownSize = { + val thisSize = self.knownSize + + if (thisSize < 0) -1 + else thisSize + 1 + } + def hasNext: Boolean = true + def next(): B = { + // Here we change our self-reference to a new iterator that iterates through `self` + current = new AbstractIterator[B] { + private[this] var acc = z + def next(): B = { + acc = op(acc, self.next()) + acc + } + def hasNext: Boolean = self.hasNext + override def knownSize = self.knownSize + } + z + } + } + override def knownSize = current.knownSize + def next(): B = current.next() + def hasNext: Boolean = current.hasNext + } + + @deprecated("Call scanRight on an Iterable instead.", "2.13.0") + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = ArrayBuffer.from(this).scanRight(z)(op).iterator + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + * @note Reuse: $consumesIterator + */ + def indexWhere(p: A => Boolean, from: Int = 0): Int = { + var i = math.max(from, 0) + val dropped = drop(from) + while (dropped.hasNext) { + if (p(dropped.next())) return i + i += 1 } - this + -1 } - /** Creates an iterator returning an interval of the values produced by this iterator. - * - * @param from the index of the first element in this iterator which forms part of the slice. - * If negative, the slice starts at zero. - * @param until the index of the first element following the slice. If negative, the slice is empty. - * @return an iterator which advances this iterator past the first `from` elements using `drop`, - * and then takes `until - from` elements, using `take`. - * @note Reuse: $consumesAndProducesIterator - */ - def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) + /** Returns the index of the first occurrence of the specified + * object in this iterable object. + * $mayNotTerminateInf + * + * @param elem element to search for. + * @return the index of the first occurrence of `elem` in the values produced by this iterator, + * or -1 if such an element does not exist until the end of the iterator is reached. + * @note Reuse: $consumesIterator + */ + def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) - /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A] = { - val lo = from max 0 - val rest = - if (until < 0) -1 // unbounded - else if (until <= lo) 0 // empty - else until - lo // finite + /** Returns the index of the first occurrence of the specified object in this iterable object + * after or at some start index. + * $mayNotTerminateInf + * + * @param elem element to search for. + * @param from the start index + * @return the index `>= from` of the first occurrence of `elem` in the values produced by this + * iterator, or -1 if such an element does not exist until the end of the iterator is + * reached. + * @note Reuse: $consumesIterator + */ + def indexOf[B >: A](elem: B, from: Int): Int = { + var i = 0 + while (i < from && hasNext) { + next() + i += 1 + } - if (rest == 0) empty - else new Iterator.SliceIterator(this, lo, rest) + while (hasNext) { + if (next() == elem) return i + i += 1 + } + -1 } - /** Creates a new iterator that maps all produced values of this iterator - * to new values using a transformation function. - * - * @param f the transformation function - * @return a new iterator which transforms every value produced by this - * iterator by applying the function `f` to it. - * @note Reuse: $consumesAndProducesIterator - */ - def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { - def hasNext = self.hasNext - def next() = f(self.next()) - } + @inline final def length: Int = size - /** Concatenates this iterator with another. - * - * @param that the other iterator - * @return a new iterator that first yields the values produced by this - * iterator followed by the values produced by iterator `that`. - * @note Reuse: $consumesTwoAndProducesOneIterator - * - * @usecase def ++(that: => Iterator[A]): Iterator[A] - * @inheritdoc - */ - def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.ConcatIterator(self) ++ that + @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") + override def isEmpty: Boolean = !hasNext - /** Creates a new iterator by applying a function to all values produced by this iterator - * and concatenating the results. - * - * @param f the function to apply on each element. - * @return the iterator resulting from applying the given iterator-valued function - * `f` to each value produced by this iterator and concatenating the results. - * @note Reuse: $consumesAndProducesIterator - */ - def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { - private var cur: Iterator[B] = empty - private def nextCur(): Unit = { cur = null ; cur = f(self.next()).toIterator } - def hasNext: Boolean = { - // Equivalent to cur.hasNext || self.hasNext && { nextCur(); hasNext } - // but slightly shorter bytecode (better JVM inlining!) - while (!cur.hasNext) { - if (!self.hasNext) return false - nextCur() - } - true - } - def next(): B = (if (hasNext) cur else empty).next() - } + def filter(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = false) - /** Returns an iterator over all the elements of this iterator that satisfy the predicate `p`. - * The order of the elements is preserved. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def filter(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - // TODO 2.12 - Make a full-fledged FilterImpl that will reverse sense of p - private var hd: A = _ - private var hdDefined: Boolean = false + def filterNot(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = true) + + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A] = new AbstractIterator[A] { + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false def hasNext: Boolean = hdDefined || { - do { + if (!self.hasNext) return false + hd = self.next() + while (p(hd) == isFlipped) { if (!self.hasNext) return false hd = self.next() - } while (!p(hd)) + } hdDefined = true - true + true } - def next() = if (hasNext) { hdDefined = false; hd } else empty.next() - } - - /** Tests whether every element of this iterator relates to the - * corresponding element of another collection by satisfying a test predicate. - * - * @param that the other collection - * @param p the test predicate, which relates elements from both collections - * @tparam B the type of the elements of `that` - * @return `true` if both collections have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this iterator - * and `y` of `that`, otherwise `false` - */ - def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = { - val that0 = that.toIterator - while (hasNext && that0.hasNext) - if (!p(next(), that0.next())) return false - - hasNext == that0.hasNext + def next() = + if (hasNext) { + hdDefined = false + hd + } + else Iterator.empty.next() } /** Creates an iterator over all the elements of this iterator that - * satisfy the predicate `p`. The order of the elements - * is preserved. - * - * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that - * for-expressions with filters work over iterators. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ + * satisfy the predicate `p`. The order of the elements + * is preserved. + * + * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that + * for-expressions with filters work over iterators. + * + * @param p the predicate used to test values. + * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. + * @note Reuse: $consumesAndProducesIterator + */ def withFilter(p: A => Boolean): Iterator[A] = filter(p) - /** Creates an iterator over all the elements of this iterator which - * do not satisfy a predicate p. - * - * @param p the predicate used to test values. - * @return an iterator which produces those values of this iterator which do not satisfy the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ - def filterNot(p: A => Boolean): Iterator[A] = filter(!p(_)) - - /** Creates an iterator by transforming values - * produced by this iterator with a partial function, dropping those - * values for which the partial function is not defined. - * - * @param pf the partial function which filters and maps the iterator. - * @return a new iterator which yields each value `x` produced by this iterator for - * which `pf` is defined the image `pf(x)`. - * @note Reuse: $consumesAndProducesIterator - */ - @migration("`collect` has changed. The previous behavior can be reproduced with `toSeq`.", "2.8.0") - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] { + def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { // Manually buffer to avoid extra layer of wrapping with buffered - private[this] var hd: A = _ + private[this] var hd: B = _ // Little state machine to keep track of where we are // Seek = 0; Found = 1; Empty = -1 @@ -580,77 +525,123 @@ trait Iterator[+A] extends TraversableOnce[A] { // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! private[this] var status = 0/*Seek*/ + def apply(value: A): B = Statics.pfMarker.asInstanceOf[B] + def hasNext = { + val marker = Statics.pfMarker while (status == 0/*Seek*/) { if (self.hasNext) { - hd = self.next() - if (pf.isDefinedAt(hd)) status = 1/*Found*/ + val x = self.next() + val v = pf.applyOrElse(x, this) + if (marker ne v.asInstanceOf[AnyRef]) { + hd = v + status = 1/*Found*/ + } } else status = -1/*Empty*/ } status == 1/*Found*/ } - def next() = if (hasNext) { status = 0/*Seek*/; pf(hd) } else Iterator.empty.next() - } + def next() = if (hasNext) { status = 0/*Seek*/; hd } else Iterator.empty.next() + } + + /** + * Builds a new iterator from this one without any duplicated elements on it. + * @return iterator with distinct elements + * + * @note Reuse: $consumesIterator + */ + def distinct: Iterator[A] = distinctBy(identity) + + /** + * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return iterator with distinct elements + * + * @note Reuse: $consumesIterator + */ + def distinctBy[B](f: A => B): Iterator[A] = new AbstractIterator[A] { + + private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] var nextElementDefined: Boolean = false + private[this] var nextElement: A = _ + + def hasNext: Boolean = nextElementDefined || (self.hasNext && { + val a = self.next() + if (traversedValues.add(f(a))) { + nextElement = a + nextElementDefined = true + true + } + else hasNext + }) + + def next(): A = + if (hasNext) { + nextElementDefined = false + nextElement + } else { + Iterator.empty.next() + } + } + + def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { + override def knownSize = self.knownSize + def hasNext = self.hasNext + def next() = f(self.next()) + } + + def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new AbstractIterator[B] { + private[this] var cur: Iterator[B] = Iterator.empty + /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ + private[this] var _hasNext: Int = -1 + + def nextCur(): Unit = { + cur = Iterator.empty + cur = f(self.next()).iterator + _hasNext = -1 + } - /** Produces a collection containing cumulative results of applying the - * operator going left to right, including the initial value. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @return iterator with intermediate results - * @note Reuse: $consumesAndProducesIterator - */ - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { - private[this] var state = 0 // 1 consumed initial, 2 self.hasNext, 3 done - private[this] var accum = z - private[this] def gen() = { val res = op(accum, self.next()) ; accum = res ; res } - def hasNext = state match { - case 0 | 2 => true - case 3 => false - case _ => if (self.hasNext) { state = 2 ; true } else { state = 3 ; false } + def hasNext: Boolean = { + if (_hasNext == -1) { + while (!cur.hasNext) { + if (!self.hasNext) { + _hasNext = 0 + // since we know we are exhausted, we can release cur for gc, and as well replace with + // static Iterator.empty which will support efficient subsequent `hasNext`/`next` calls + cur = Iterator.empty + return false + } + nextCur() + } + _hasNext = 1 + true + } else _hasNext == 1 } - def next() = state match { - case 0 => state = 1 ; accum - case 1 => gen() - case 2 => state = 1 ; gen() - case 3 => Iterator.empty.next() + def next(): B = { + if (hasNext) { + _hasNext = -1 + } + cur.next() } } - /** Produces a collection containing cumulative results of applying the operator going right to left. - * The head of the collection is the last cumulative result. - * - * $willNotTerminateInf - * $orderDependent - * - * @tparam B the type of the elements in the resulting collection - * @param z the initial value - * @param op the binary operator applied to the intermediate result and the element - * @return iterator with intermediate results - * @example {{{ - * Iterator(1, 2, 3, 4).scanRight(0)(_ + _).toList == List(10, 9, 7, 4, 0) - * }}} - * @note Reuse: $consumesAndProducesIterator - */ - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = toBuffer.scanRight(z)(op).iterator + def flatten[B](implicit ev: A => IterableOnce[B]): Iterator[B] = + flatMap[B](ev) + + def concat[B >: A](xs: => IterableOnce[B]): Iterator[B] = new Iterator.ConcatIterator[B](self).concat(xs) + + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]): Iterator[B] = concat(xs) + + def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) - /** Takes longest prefix of values produced by this iterator that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return An iterator returning the values produced by this iterator, until - * this iterator produces a value that does not satisfy - * the predicate `p`. - * @note Reuse: $consumesAndProducesIterator - */ def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - private var hd: A = _ - private var hdDefined: Boolean = false - private var tail: Iterator[A] = self + private[this] var hd: A = _ + private[this] var hdDefined: Boolean = false + private[this] var tail: Iterator[A] = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() @@ -658,45 +649,47 @@ trait Iterator[+A] extends TraversableOnce[A] { else tail = Iterator.empty hdDefined } - def next() = if (hasNext) { hdDefined = false; hd } else empty.next() + def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - /** Partitions this iterator in two iterators according to a predicate. - * - * @param p the predicate on which to partition - * @return a pair of iterators: the iterator that satisfies the predicate - * `p` and the iterator that does not. - * The relative order of the elements in the resulting iterators - * is the same as in the original iterator. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ - def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { - val self = buffered - class PartitionIterator(p: A => Boolean) extends AbstractIterator[A] { - var other: PartitionIterator = _ - val lookahead = new mutable.Queue[A] - def skip() = - while (self.hasNext && !p(self.head)) { - other.lookahead += self.next + def drop(n: Int): Iterator[A] = sliceIterator(n, -1) + + def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { + // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator + private[this] var status = -1 + // Local buffering to avoid double-wrap with .buffered + private[this] var fst: A = _ + def hasNext: Boolean = + if (status == 1) self.hasNext + else if (status == 0) true + else { + while (self.hasNext) { + val a = self.next() + if (!p(a)) { + fst = a + status = 0 + return true + } } - def hasNext = !lookahead.isEmpty || { skip(); self.hasNext } - def next() = if (!lookahead.isEmpty) lookahead.dequeue() - else { skip(); self.next() } - } - val l = new PartitionIterator(p) - val r = new PartitionIterator(!p(_)) - l.other = r - r.other = l - (l, r) + status = 1 + false + } + def next() = + if (hasNext) { + if (status == 1) self.next() + else { + status = 1 + fst + } + } + else Iterator.empty.next() } - /** Splits this Iterator into a prefix/suffix pair according to a predicate. - * - * @param p the test predicate - * @return a pair of Iterators consisting of the longest prefix of this - * whose elements all satisfy `p`, and the rest of the Iterator. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ + /** + * @inheritdoc + * + * @note Reuse: $consumesOneAndProducesTwoIterators + */ def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { /* * Giving a name to following iterator (as opposed to trailing) because @@ -704,7 +697,7 @@ trait Iterator[+A] extends TraversableOnce[A] { * iterator is referring (the finish() method) and thus triggering * handling of structural calls. It's not what's intended here. */ - class Leading extends AbstractIterator[A] { + final class Leading extends AbstractIterator[A] { private[this] var lookahead: mutable.Queue[A] = null private[this] var hd: A = _ /* Status is kept with magic numbers @@ -714,7 +707,7 @@ trait Iterator[+A] extends TraversableOnce[A] { * -2 means we are done but have saved hd for the other iterator to use as its first element */ private[this] var status = 0 - private def store(a: A) { + private def store(a: A): Unit = { if (lookahead == null) lookahead = new mutable.Queue[A] lookahead += a } @@ -735,13 +728,14 @@ trait Iterator[+A] extends TraversableOnce[A] { if (status == 1) { status = 0; hd } else lookahead.dequeue() } - else empty.next() + else Iterator.empty.next() } + @tailrec def finish(): Boolean = status match { case -2 => status = -1 ; true case -1 => false case 1 => store(hd) ; status = 0 ; finish() - case 0 => + case 0 => status = -1 while (self.hasNext) { val a = self.next() @@ -788,519 +782,102 @@ trait Iterator[+A] extends TraversableOnce[A] { self.next() } } - else empty.next() + else Iterator.empty.next() } } (leading, trailing) } - /** Skips longest sequence of elements of this iterator which satisfy given - * predicate `p`, and returns an iterator of the remaining elements. - * - * @param p the predicate used to skip elements. - * @return an iterator consisting of the remaining elements - * @note Reuse: $consumesAndProducesIterator - */ - def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { - // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator - private[this] var status = -1 - // Local buffering to avoid double-wrap with .buffered - private[this] var fst: A = _ - def hasNext: Boolean = - if (status == 1) self.hasNext - else if (status == 0) true - else { - while (self.hasNext) { - val a = self.next() - if (!p(a)) { - fst = a - status = 0 - return true - } - } - status = 1 - false - } - def next() = - if (hasNext) { - if (status == 1) self.next() - else { - status = 1 - fst - } - } - else Iterator.empty.next() - } - - /** Creates an iterator formed from this iterator and another iterator - * by combining corresponding values in pairs. - * If one of the two iterators is longer than the other, its remaining - * elements are ignored. - * - * @param that The iterator providing the second half of each result pair - * @return a new iterator containing pairs consisting of - * corresponding elements of this iterator and `that`. The number - * of elements returned by the new iterator is the - * minimum of the number of elements returned by this - * iterator and `that`. - * @note Reuse: $consumesTwoAndProducesOneIterator - */ - def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { - def hasNext = self.hasNext && that.hasNext - def next = (self.next(), that.next()) - } - - /** Appends an element value to this iterator until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @return a new iterator consisting of producing all values of this iterator, - * followed by the minimal number of occurrences of `elem` so - * that the number of produced values is at least `len`. - * @note Reuse: $consumesAndProducesIterator - * - * @usecase def padTo(len: Int, elem: A): Iterator[A] - * @inheritdoc - */ - def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] { - private var count = 0 - def hasNext = self.hasNext || count < len - def next = { - count += 1 - if (self.hasNext) self.next() - else if (count <= len) elem - else empty.next() - } - } - - /** Creates an iterator that pairs each element produced by this iterator - * with its index, counting from 0. - * - * @return a new iterator containing pairs consisting of - * corresponding elements of this iterator and their indices. - * @note Reuse: $consumesAndProducesIterator - */ - def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { - var idx = 0 - def hasNext = self.hasNext - def next = { - val ret = (self.next(), idx) - idx += 1 - ret - } - } - - /** Creates an iterator formed from this iterator and another iterator - * by combining corresponding elements in pairs. - * If one of the two iterators is shorter than the other, - * placeholder elements are used to extend the shorter iterator to the length of the longer. - * - * @param that iterator `that` may have a different length - * as the self iterator. - * @param thisElem element `thisElem` is used to fill up the - * resulting iterator if the self iterator is shorter than - * `that` - * @param thatElem element `thatElem` is used to fill up the - * resulting iterator if `that` is shorter than - * the self iterator - * @return a new iterator containing pairs consisting of - * corresponding values of this iterator and `that`. The length - * of the returned iterator is the maximum of the lengths of this iterator and `that`. - * If this iterator is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this iterator, `thatElem` values are used to pad the result. - * @note Reuse: $consumesTwoAndProducesOneIterator - * - * @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)] - * @inheritdoc - */ - def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] { - def hasNext = self.hasNext || that.hasNext - def next(): (A1, B1) = - if (self.hasNext) { - if (that.hasNext) (self.next(), that.next()) - else (self.next(), thatElem) - } else { - if (that.hasNext) (thisElem, that.next()) - else empty.next() - } - } - - /** Applies a function `f` to all values produced by this iterator. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @note Reuse: $consumesIterator - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U) { while (hasNext) f(next()) } - - /** Tests whether a predicate holds for all values produced by this iterator. - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for all values - * produced by this iterator, otherwise `false`. - * @note Reuse: $consumesIterator - */ - def forall(p: A => Boolean): Boolean = { - var res = true - while (res && hasNext) res = p(next()) - res - } - - /** Tests whether a predicate holds for some of the values produced by this iterator. - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` holds for some of the values - * produced by this iterator, otherwise `false`. - * @note Reuse: $consumesIterator - */ - def exists(p: A => Boolean): Boolean = { - var res = false - while (!res && hasNext) res = p(next()) - res - } - - /** Tests whether this iterator contains a given value as an element. - * $mayNotTerminateInf - * - * @param elem the element to test. - * @return `true` if this iterator produces some value that is - * is equal (as determined by `==`) to `elem`, `false` otherwise. - * @note Reuse: $consumesIterator - */ - def contains(elem: Any): Boolean = exists(_ == elem) // Note--this seems faster than manual inlining! - - /** Finds the first value produced by the iterator satisfying a - * predicate, if any. - * $mayNotTerminateInf - * - * @param p the predicate used to test values. - * @return an option value containing the first value produced by the iterator that satisfies - * predicate `p`, or `None` if none exists. - * @note Reuse: $consumesIterator - */ - def find(p: A => Boolean): Option[A] = { - while (hasNext) { - val a = next() - if (p(a)) return Some(a) - } - None - } - - /** Returns the index of the first produced value satisfying a predicate, or -1. - * $mayNotTerminateInf - * - * @param p the predicate to test values - * @return the index of the first produced value satisfying `p`, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) - - /** Returns the index of the first produced value satisfying a predicate, or -1, after or at - * some start index. - * $mayNotTerminateInf - * - * @param p the predicate to test values - * @param from the start index - * @return the index `>= from` of the first produced value satisfying `p`, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = 0 - while (i < from && hasNext) { - next() - i += 1 - } - - while (hasNext) { - if (p(next())) return i - i += 1 - } - -1 - } - - /** Returns the index of the first occurrence of the specified - * object in this iterable object. - * $mayNotTerminateInf - * - * @param elem element to search for. - * @return the index of the first occurrence of `elem` in the values produced by this iterator, - * or -1 if such an element does not exist until the end of the iterator is reached. - * @note Reuse: $consumesIterator - */ - def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) - - /** Returns the index of the first occurrence of the specified object in this iterable object - * after or at some start index. - * $mayNotTerminateInf - * - * @param elem element to search for. - * @param from the start index - * @return the index `>= from` of the first occurrence of `elem` in the values produced by this - * iterator, or -1 if such an element does not exist until the end of the iterator is - * reached. - * @note Reuse: $consumesIterator - */ - def indexOf[B >: A](elem: B, from: Int): Int = { - var i = 0 - while (i < from && hasNext) { - next() - i += 1 - } - - while (hasNext) { - if (next() == elem) return i - i += 1 - } - -1 - } - - /** Creates a buffered iterator from this iterator. - * - * @see [[scala.collection.BufferedIterator]] - * @return a buffered iterator producing the same values as this iterator. - * @note Reuse: $consumesAndProducesIterator - */ - def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { - private var hd: A = _ - private var hdDefined: Boolean = false - - def head: A = { - if (!hdDefined) { - hd = next() - hdDefined = true - } - hd - } - - def hasNext = - hdDefined || self.hasNext - - def next() = - if (hdDefined) { - hdDefined = false - hd - } else self.next() - } - - /** A flexible iterator for transforming an `Iterator[A]` into an - * Iterator[Seq[A]], with configurable sequence size, step, and - * strategy for dealing with elements which don't fit evenly. - * - * Typical uses can be achieved via methods `grouped` and `sliding`. - */ - class GroupedIterator[B >: A](self: Iterator[A], size: Int, step: Int) - extends AbstractIterator[Seq[B]] - with Iterator[Seq[B]] { - - require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - - private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer - private[this] var filled = false // whether the buffer is "hot" - private[this] var _partial = true // whether we deliver short sequences - private[this] var pad: Option[() => B] = None // what to pad short sequences with - - /** Public functions which can be used to configure the iterator before use. - * - * Pads the last segment if necessary so that all segments will - * have the same size. - * - * @param x The element that will be appended to the last segment, if necessary. - * @return The same iterator, and ''not'' a new iterator. - * @note This method mutates the iterator it is called on, which can be safely used afterwards. - * @note This method is mutually exclusive with `withPartial(true)`. - */ - def withPadding(x: => B): this.type = { - pad = Some(() => x) - this - } - /** Public functions which can be used to configure the iterator before use. - * - * Select whether the last segment may be returned with less than `size` - * elements. If not, some elements of the original iterator may not be - * returned at all. - * - * @param x `true` if partial segments may be returned, `false` otherwise. - * @return The same iterator, and ''not'' a new iterator. - * @note This method mutates the iterator it is called on, which can be safely used afterwards. - * @note This method is mutually exclusive with `withPadding`. - */ - def withPartial(x: Boolean): this.type = { - _partial = x - if (_partial == true) // reset pad since otherwise it will take precedence - pad = None + def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) - this - } + /** Creates an optionally bounded slice, unbounded if `until` is negative. */ + protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + val lo = from max 0 + val rest = + if (until < 0) -1 // unbounded + else if (until <= lo) 0 // empty + else until - lo // finite - /** For reasons which remain to be determined, calling - * self.take(n).toSeq cause an infinite loop, so we have - * a slight variation on take for local usage. - * NB: self.take.toSeq is slice.toStream, lazily built on self, - * so a subsequent self.hasNext would not test self after the - * group was consumed. - */ - private def takeDestructively(size: Int): Seq[A] = { - val buf = new ArrayBuffer[A] - var i = 0 - // The order of terms in the following condition is important - // here as self.hasNext could be blocking - while (i < size && self.hasNext) { - buf += self.next - i += 1 - } - buf - } + if (rest == 0) Iterator.empty + else new Iterator.SliceIterator(this, lo, rest) + } - private def padding(x: Int) = List.fill(x)(pad.get()) - private def gap = (step - size) max 0 - - private def go(count: Int) = { - val prevSize = buffer.size - def isFirst = prevSize == 0 - // If there is padding defined we insert it immediately - // so the rest of the code can be oblivious - val xs: Seq[B] = { - val res = takeDestructively(count) - // was: extra checks so we don't calculate length unless there's reason - // but since we took the group eagerly, just use the fast length - val shortBy = count - res.length - if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res - } - lazy val len = xs.length - lazy val incomplete = len < count - - // if 0 elements are requested, or if the number of newly obtained - // elements is less than the gap between sequences, we are done. - def deliver(howMany: Int) = { - (howMany > 0 && (isFirst || len > gap)) && { - if (!isFirst) - buffer trimStart (step min prevSize) - - val available = - if (isFirst) len - else howMany min (len - gap) - - buffer ++= (xs takeRight available) - filled = true - true - } - } + def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { + val thatIterator = that.iterator + override def knownSize = self.knownSize min thatIterator.knownSize + def hasNext = self.hasNext && thatIterator.hasNext + def next() = (self.next(), thatIterator.next()) + } - if (xs.isEmpty) false // self ran out of elements - else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless - else if (incomplete) false // !_partial && incomplete means no more seqs - else if (isFirst) deliver(len) // first element - else deliver(step min size) // the typical case + def zipAll[A1 >: A, B](that: IterableOnce[B], thisElem: A1, thatElem: B): Iterator[(A1, B)] = new AbstractIterator[(A1, B)] { + val thatIterator = that.iterator + override def knownSize = { + val thisSize = self.knownSize + val thatSize = thatIterator.knownSize + if (thisSize < 0 || thatSize < 0) -1 + else thisSize max thatSize } - - // fill() returns false if no more sequences can be produced - private def fill(): Boolean = { - if (!self.hasNext) false - // the first time we grab size, but after that we grab step - else if (buffer.isEmpty) go(size) - else go(step) + def hasNext = self.hasNext || thatIterator.hasNext + def next(): (A1, B) = { + val next1 = self.hasNext + val next2 = thatIterator.hasNext + if(!(next1 || next2)) throw new NoSuchElementException + (if(next1) self.next() else thisElem, if(next2) thatIterator.next() else thatElem) } + } - def hasNext = filled || fill() - def next = { - if (!filled) - fill() - - if (!filled) - throw new NoSuchElementException("next on empty iterator") - filled = false - buffer.toList + def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { + var idx = 0 + override def knownSize = self.knownSize + def hasNext = self.hasNext + def next() = { + val ret = (self.next(), idx) + idx += 1 + ret } } - /** Returns an iterator which groups this iterator into fixed size - * blocks. Example usages: - * {{{ - * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7))) - * (1 to 7).iterator grouped 3 toList - * // Returns List(List(1, 2, 3), List(4, 5, 6)) - * (1 to 7).iterator grouped 3 withPartial false toList - * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7, 20, 25) - * // Illustrating that withPadding's argument is by-name. - * val it2 = Iterator.iterate(20)(_ + 5) - * (1 to 7).iterator grouped 3 withPadding it2.next toList - * }}} - * - * @note Reuse: $consumesAndProducesIterator - */ - def grouped[B >: A](size: Int): GroupedIterator[B] = - new GroupedIterator[B](self, size, size) - - /** Returns an iterator which presents a "sliding window" view of - * this iterator. The first argument is the window size, and - * the second argument `step` is how far to advance the window - * on each iteration. The `step` defaults to `1`. - * - * The default `GroupedIterator` can be configured to either - * pad a partial result to size `size` or suppress the partial - * result entirely. - * - * Example usages: - * {{{ - * // Returns List(List(1, 2, 3), List(2, 3, 4), List(3, 4, 5)) - * (1 to 5).iterator.sliding(3).toList - * // Returns List(List(1, 2, 3, 4), List(4, 5)) - * (1 to 5).iterator.sliding(4, 3).toList - * // Returns List(List(1, 2, 3, 4)) - * (1 to 5).iterator.sliding(4, 3).withPartial(false).toList - * // Returns List(List(1, 2, 3, 4), List(4, 5, 20, 25)) - * // Illustrating that withPadding's argument is by-name. - * val it2 = Iterator.iterate(20)(_ + 5) - * (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList - * }}} - * - * @return An iterator producing `Seq[B]`s of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - * This behavior can be configured. - * - * @note Reuse: $consumesAndProducesIterator - */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = - new GroupedIterator[B](self, size, step) - - /** Returns the number of elements in this iterator. - * $willNotTerminateInf + /** Checks whether corresponding elements of the given iterable collection + * compare equal (with respect to `==`) to elements of this $coll. * - * @note Reuse: $consumesIterator + * @param that the collection to compare + * @tparam B the type of the elements of collection `that`. + * @return `true` if both collections contain equal elements in the same order, `false` otherwise. */ - def length: Int = this.size + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + val those = that.iterator + while (hasNext) { + if (!those.hasNext) return false + if (next() != those.next()) return false + } + !those.hasNext + } /** Creates two new iterators that both iterate over the same elements - * as this iterator (in the same order). The duplicate iterators are - * considered equal if they are positioned at the same element. - * - * Given that most methods on iterators will make the original iterator - * unfit for further use, this methods provides a reliable way of calling - * multiple such methods on an iterator. - * - * @return a pair of iterators - * @note The implementation may allocate temporary storage for elements - * iterated by one iterator but not yet by the other. - * @note Reuse: $consumesOneAndProducesTwoIterators - */ + * as this iterator (in the same order). The duplicate iterators are + * considered equal if they are positioned at the same element. + * + * Given that most methods on iterators will make the original iterator + * unfit for further use, this methods provides a reliable way of calling + * multiple such methods on an iterator. + * + * @return a pair of iterators + * @note The implementation may allocate temporary storage for elements + * iterated by one iterator but not yet by the other. + * @note Reuse: $consumesOneAndProducesTwoIterators + */ def duplicate: (Iterator[A], Iterator[A]) = { val gap = new scala.collection.mutable.Queue[A] var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { + override def knownSize: Int = self.synchronized { + val thisSize = self.knownSize + + if (this eq ahead) thisSize + else if (thisSize < 0 || gap.knownSize < 0) -1 + else thisSize + gap.knownSize + } def hasNext: Boolean = self.synchronized { (this ne ahead) && !gap.isEmpty || self.hasNext } @@ -1325,106 +902,411 @@ trait Iterator[+A] extends TraversableOnce[A] { } /** Returns this iterator with patched values. - * Patching at negative indices is the same as patching starting at 0. - * Patching at indices at or larger than the length of the original iterator appends the patch to the end. - * If more values are replaced than actually exist, the excess is ignored. - * - * @param from The start index from which to patch - * @param patchElems The iterator of patch values - * @param replaced The number of values in the original iterator that are replaced by the patch. - * @note Reuse: $consumesTwoAndProducesOneIterator - */ - def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { - private var origElems = self - private var i = (if (from > 0) from else 0) // Counts down, switch to patch on 0, -1 means use patch first - def hasNext: Boolean = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - origElems.hasNext || patchElems.hasNext - } - def next(): B = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - if (i < 0) { - if (patchElems.hasNext) patchElems.next() - else origElems.next() + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original iterator appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param patchElems The iterator of patch values + * @param replaced The number of values in the original iterator that are replaced by the patch. + * @note Reuse: $consumesTwoAndProducesOneIterator + */ + def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = + new AbstractIterator[B] { + private[this] var origElems = self + // > 0 => that many more elems from `origElems` before switching to `patchElems` + // 0 => need to drop elems from `origElems` and start using `patchElems` + // -1 => have dropped elems from `origElems`, will be using `patchElems` until it's empty + // and then using what's left of `origElems` after the drop + private[this] var state = if (from > 0) from else 0 + + // checks state and handles 0 => -1 + @inline private[this] def switchToPatchIfNeeded(): Unit = + if (state == 0) { + origElems = origElems drop replaced + state = -1 + } + + def hasNext: Boolean = { + switchToPatchIfNeeded() + origElems.hasNext || patchElems.hasNext } - else { - if (origElems.hasNext) { - i -= 1 - origElems.next() + + def next(): B = { + switchToPatchIfNeeded() + if (state < 0 /* == -1 */) { + if (patchElems.hasNext) patchElems.next() + else origElems.next() } else { - i = -1 - patchElems.next() + if (origElems.hasNext) { + state -= 1 + origElems.next() + } + else { + state = -1 + patchElems.next() + } } } } + + override def tapEach[U](f: A => U): Iterator[A] = new AbstractIterator[A] { + override def knownSize = self.knownSize + override def hasNext = self.hasNext + override def next() = { + val _next = self.next() + f(_next) + _next + } } - /** Copies selected values produced by this iterator to an array. - * Fills the given array `xs` starting at index `start` with at most - * `len` values produced by this iterator. - * Copying will stop once either the end of the current iterator is reached, - * or the end of the array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the array. - * - * @note Reuse: $consumesIterator - * - * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit - * @inheritdoc + /** Converts this iterator to a string. * - * $willNotTerminateInf + * @return `""` + * @note Reuse: $preservesIterator */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = { - var i = start - val end = start + math.min(len, xs.length - start) - while (i < end && hasNext) { - xs(i) = next() - i += 1 + override def toString = "" + + @deprecated("Iterator.seq always returns the iterator itself", "2.13.0") + def seq: this.type = this +} + +@SerialVersionUID(3L) +object Iterator extends IterableFactory[Iterator] { + + private[this] val _empty: Iterator[Nothing] = new AbstractIterator[Nothing] { + def hasNext = false + def next() = throw new NoSuchElementException("next on empty iterator") + override def knownSize: Int = 0 + override protected def sliceIterator(from: Int, until: Int): AbstractIterator[Nothing] = this + } + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + override def from[A](source: IterableOnce[A]): Iterator[A] = source.iterator + + /** The iterator which produces no values. */ + @`inline` final def empty[T]: Iterator[T] = _empty + + def single[A](a: A): Iterator[A] = new AbstractIterator[A] { + private[this] var consumed: Boolean = false + def hasNext = !consumed + def next() = if (consumed) empty.next() else { consumed = true; a } + override protected def sliceIterator(from: Int, until: Int) = + if (consumed || from > 0 || until == 0) empty + else this + } + + override def apply[A](xs: A*): Iterator[A] = xs.iterator + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, Iterator[A]] = + new ImmutableBuilder[A, Iterator[A]](empty[A]) { + override def addOne(elem: A): this.type = { elems = elems ++ single(elem); this } } - // TODO: return i - start so the caller knows how many values read? + + /** Creates iterator that produces the results of some element computation a number of times. + * + * @param len the number of elements returned by the iterator. + * @param elem the element computation + * @return An iterator that produces the results of `n` evaluations of `elem`. + */ + override def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { + private[this] var i = 0 + override def knownSize: Int = (len - i) max 0 + def hasNext: Boolean = i < len + def next(): A = + if (hasNext) { i += 1; elem } + else empty.next() } - /** Tests if another iterator produces the same values as this one. - * - * $willNotTerminateInf - * - * @param that the other iterator - * @return `true`, if both iterators produce the same elements in the same order, `false` otherwise. - * - * @note Reuse: $consumesTwoIterators - */ - def sameElements(that: Iterator[_]): Boolean = { - while (hasNext && that.hasNext) - if (next != that.next) - return false + /** Creates an iterator producing the values of a given function over a range of integer values starting from 0. + * + * @param end The number of elements returned by the iterator + * @param f The function computing element values + * @return An iterator that produces the values `f(0), ..., f(n -1)`. + */ + override def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { + private[this] var i = 0 + override def knownSize: Int = (end - i) max 0 + def hasNext: Boolean = i < end + def next(): A = + if (hasNext) { val result = f(i); i += 1; result } + else empty.next() + } + + /** Creates an infinite-length iterator which returns successive values from some start value. + + * @param start the start value of the iterator + * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...` + */ + def from(start: Int): Iterator[Int] = from(start, 1) + + /** Creates an infinite-length iterator returning values equally spaced apart. + * + * @param start the start value of the iterator + * @param step the increment between successive values + * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...` + */ + def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var i = start + def hasNext: Boolean = true + def next(): Int = { val result = i; i += step; result } + } + + /** Creates nn iterator returning successive values in some integer interval. + * + * @param start the start value of the iterator + * @param end the end value of the iterator (the first value NOT returned) + * @return the iterator producing values `start, start + 1, ..., end - 1` + */ + def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1) - !hasNext && !that.hasNext + /** An iterator producing equally spaced values in some integer interval. + * + * @param start the start value of the iterator + * @param end the end value of the iterator (the first value NOT returned) + * @param step the increment value of the iterator (must be positive or negative) + * @return the iterator producing values `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { + if (step == 0) throw new IllegalArgumentException("zero step") + private[this] var i = start + private[this] var hasOverflowed = false + override def knownSize: Int = { + val size = math.ceil((end.toLong - i.toLong) / step.toDouble) + if (size < 0) 0 + else if (size > Int.MaxValue) -1 + else size.toInt + } + def hasNext: Boolean = { + (step <= 0 || i < end) && (step >= 0 || i > end) && !hasOverflowed + } + def next(): Int = + if (hasNext) { + val result = i + val nextValue = i + step + hasOverflowed = (step > 0) == nextValue < i + i = nextValue + result + } + else empty.next() } - def toTraversable: Traversable[A] = toStream - def toIterator: Iterator[A] = self - def toStream: Stream[A] = - if (self.hasNext) Stream.cons(self.next(), self.toStream) - else Stream.empty[A] + /** Creates an infinite iterator that repeatedly applies a given function to the previous result. + * + * @param start the start value of the iterator + * @param f the function that's repeatedly applied + * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { + private[this] var first = true + private[this] var acc = start + def hasNext: Boolean = true + def next(): T = { + if (first) first = false + else acc = f(acc) + acc + } + } - /** Converts this iterator to a string. - * - * @return `""` - * whether or not the iterator is empty. - * @note Reuse: $preservesIterator + /** Creates an Iterator that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return an Iterator that produces elements using `f` until `f` returns `None` + */ + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A] = new UnfoldIterator(init)(f) + + /** Creates an infinite-length iterator returning the results of evaluating an expression. + * The expression is recomputed for every element. + * + * @param elem the element computation. + * @return the iterator containing an infinite number of results of evaluating `elem`. + */ + def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { + def hasNext = true + def next() = elem + } + + /** Creates an iterator to which other iterators can be appended efficiently. + * Nested ConcatIterators are merged to avoid blowing the stack. */ - override def toString = "" + private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { + private var tail: ConcatIteratorCell[A @uncheckedVariance] = null + private var last: ConcatIteratorCell[A @uncheckedVariance] = null + private var currentHasNextChecked = false + + def hasNext = + if (currentHasNextChecked) true + else if (current == null) false + else if (current.hasNext) { + currentHasNextChecked = true + true + } + else { + // If we advanced the current iterator to a ConcatIterator, merge it into this one + @tailrec def merge(): Unit = + if (current.isInstanceOf[ConcatIterator[_]]) { + val c = current.asInstanceOf[ConcatIterator[A]] + current = c.current + currentHasNextChecked = c.currentHasNextChecked + if (c.tail != null) { + if (last == null) last = c.last + c.last.tail = tail + tail = c.tail + } + merge() + } + + // Advance current to the next non-empty iterator + // current is set to null when all iterators are exhausted + @tailrec def advance(): Boolean = + if (tail == null) { + current = null + last = null + false + } + else { + current = tail.headIterator + if (last eq tail) last = last.tail + tail = tail.tail + merge() + if (currentHasNextChecked) true + else if (current != null && current.hasNext) { + currentHasNextChecked = true + true + } else advance() + } + + advance() + } + + def next() = + if (hasNext) { + currentHasNextChecked = false + current.next() + } else Iterator.empty.next() + + override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { + val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] + if (tail == null) { + tail = c + last = c + } + else { + last.tail = c + last = c + } + if (current == null) current = Iterator.empty + this + } + } + + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { + def headIterator: Iterator[A] = head.iterator + } + + /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. + * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. + */ + private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { + private[this] var remaining = limit + private[this] var dropping = start + @inline private def unbounded = remaining < 0 + private def skip(): Unit = + while (dropping > 0) { + if (underlying.hasNext) { + underlying.next() + dropping -= 1 + } else + dropping = 0 + } + override def knownSize: Int = { + val size = underlying.knownSize + if (size < 0) -1 + else { + val dropSize = 0 max (size - dropping) + if (unbounded) dropSize + else remaining min dropSize + } + } + def hasNext = { skip(); remaining != 0 && underlying.hasNext } + def next() = { + skip() + if (remaining > 0) { + remaining -= 1 + underlying.next() + } + else if (unbounded) underlying.next() + else empty.next() + } + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + val lo = from max 0 + def adjustedBound = + if (unbounded) -1 + else 0 max (remaining - lo) + val rest = + if (until < 0) adjustedBound // respect current bound, if any + else if (until <= lo) 0 // empty + else if (unbounded) until - lo // now finite + else adjustedBound min (until - lo) // keep lesser bound + val sum = dropping + lo + if (rest == 0) empty + else if (sum < 0) { + dropping = Int.MaxValue + remaining = 0 + this.concat(new SliceIterator(underlying, start = sum - Int.MaxValue, limit = rest)) + } + else { + dropping = sum + remaining = rest + this + } + } + } + + /** Creates an iterator that uses a function `f` to produce elements of + * type `A` and update an internal state of type `S`. + */ + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { + private[this] var state: S = init + private[this] var nextResult: Option[(A, S)] = null + + override def hasNext: Boolean = { + if (nextResult eq null) { + nextResult = { + val res = f(state) + if (res eq null) throw new NullPointerException("null during unfold") + res + } + state = null.asInstanceOf[S] // allow GC + } + nextResult.isDefined + } + + override def next(): A = { + if (hasNext) { + val (value, newState) = nextResult.get + state = newState + nextResult = null + value + } else Iterator.empty.next() + } + } } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala deleted file mode 100644 index abfcafa5df13..000000000000 --- a/src/library/scala/collection/JavaConversions.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import convert._ - -/** A variety of implicit conversions supporting interoperability between - * Scala and Java collections. - * - * The following conversions are supported: - *{{{ - * scala.collection.Iterable <=> java.lang.Iterable - * scala.collection.Iterable <=> java.util.Collection - * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration } - * scala.collection.mutable.Buffer <=> java.util.List - * scala.collection.mutable.Set <=> java.util.Set - * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary } - * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap - *}}} - * In all cases, converting from a source type to a target type and back - * again will return the original source object: - * - *{{{ - * import scala.collection.JavaConversions._ - * - * val sl = new scala.collection.mutable.ListBuffer[Int] - * val jl : java.util.List[Int] = sl - * val sl2 : scala.collection.mutable.Buffer[Int] = jl - * assert(sl eq sl2) - *}}} - * In addition, the following one way conversions are provided: - * - *{{{ - * scala.collection.Seq => java.util.List - * scala.collection.mutable.Seq => java.util.List - * scala.collection.Set => java.util.Set - * scala.collection.Map => java.util.Map - * java.util.Properties => scala.collection.mutable.Map[String, String] - *}}} - * - * The transparent conversions provided here are considered - * fragile because they can result in unexpected behavior and performance. - * - * Therefore, this API has been deprecated and `JavaConverters` should be - * used instead. `JavaConverters` provides the same conversions, but through - * extension methods. - * - * @author Miles Sabin - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("use JavaConverters", since="2.12.0") -object JavaConversions extends WrapAsScala with WrapAsJava diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index 073066726aef..7a803a685d3e 100644 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,10 +10,13 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection -import convert._ +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.convert._ +import scala.language.implicitConversions /** A variety of decorators that enable converting between * Scala and Java collections using extension methods, `asScala` and `asJava`. @@ -63,7 +66,7 @@ import convert._ * vs: java.util.List[String] = [hi, bye] * * scala> val ss = asScalaIterator(vs.iterator) - * ss: Iterator[String] = non-empty iterator + * ss: Iterator[String] = * * scala> .toList * res0: List[String] = List(hi, bye) @@ -71,7 +74,262 @@ import convert._ * scala> val ss = asScalaBuffer(vs) * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) * }}} - * - * @since 2.8.1 */ -object JavaConverters extends DecorateAsJava with DecorateAsScala +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object JavaConverters extends AsJavaConverters with AsScalaConverters { + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + + @deprecated("Use `asScala` instead", "2.13.0") + def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + + // Deprecated implicit conversions for code that directly imports them + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[asJavaIterator]] + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[asJavaEnumeration]] + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[asJavaIterable]] + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[asJavaCollection]] + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[bufferAsJavaList]] + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[mutableSeqAsJavaList]] + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. + * @see [[seqAsJavaList]] + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[mutableSetAsJavaSet]] + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. + * @see [[setAsJavaSet]] + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[mutableMapAsJavaMap]] + */ + implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[asJavaDictionary]] + */ + implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. + * @see [[mapAsJavaMap]] + */ + implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[mapAsJavaConcurrentMap]]. + */ + implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = + new AsJava(mapAsJavaConcurrentMap(m)) + + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[asScalaIterator]] + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[enumerationAsScalaIterator]] + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[iterableAsScalaIterable]] + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[collectionAsScalaIterable]] + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[asScalaBuffer]] + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[asScalaSet]] + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[mapAsScalaMap]] + */ + implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. + * @see [[mapAsScalaConcurrentMap]] + */ + implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = + new AsScala(mapAsScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[dictionaryAsScalaMap]] + */ + implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * @see [[propertiesAsScalaMap]] + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) + + + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + } +} diff --git a/src/library/scala/collection/LazyZipOps.scala b/src/library/scala/collection/LazyZipOps.scala new file mode 100644 index 000000000000..a7a72ce882a8 --- /dev/null +++ b/src/library/scala/collection/LazyZipOps.scala @@ -0,0 +1,422 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions + +/** Decorator representing lazily zipped pairs. + * + * @define coll pair + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1], coll2: Iterable[El2]) { + + /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. + * + * @param that the iterable providing the third element of each eventual triple + * @tparam B the type of the third element in each eventual triple + * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or + * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip3[El1, El2, B, C1] = new LazyZip3(src, coll1, coll2, that) + + def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = f(elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext) + _current = f(elems1.next(), elems2.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2)] { + def iterator: AbstractIterator[(El1, El2)] = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: (El1, El2) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + if (p(e1, e2)) _current = (e1, e2) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.hasNext + }) + } + + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) + + res + } + + def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) + + def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + + while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) + } + + private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { + def iterator: AbstractIterator[(El1, El2)] = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = (elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + + override def toString = s"$coll1.lazyZip($coll2)" +} + +object LazyZip2 { + implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable +} + + +/** Decorator representing lazily zipped triples. + * + * @define coll triple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3]) { + + /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. + * + * @param that the iterable providing the fourth element of each eventual 4-tuple + * @tparam B the type of the fourth element in each eventual 4-tuple + * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. + * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip4[El1, El2, El3, B, C1] = new LazyZip4(src, coll1, coll2, coll3, that) + + def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { + def iterator: AbstractIterator[(El1, El2, El3)] = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: (El1, El2, El3) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + if (p(e1, e2, e3)) _current = (e1, e2, e3) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next()) + + res + } + + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) + + def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) + f(elems1.next(), elems2.next(), elems3.next()) + } + + private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { + def iterator: AbstractIterator[(El1, El2, El3)] = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else s1 min s2 min s3 + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" +} + +object LazyZip3 { + implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable +} + + + +/** Decorator representing lazily zipped 4-tuples. + * + * @define coll tuple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3], + coll4: Iterable[El4]) { + + def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator: AbstractIterator[B] = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { + def iterator: AbstractIterator[(El1, El2, El3, El4)] = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: (El1, El2, El3, El4) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + val e4 = elems4.next() + if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + + res + } + + def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) + + def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + + private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { + def iterator: AbstractIterator[(El1, El2, El3, El4)] = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else { + val s4 = coll4.knownSize + if (s4 == 0) 0 else s1 min s2 min s3 min s4 + } + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" +} + +object LazyZip4 { + implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = + zipped4.toIterable +} diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index d5e43c41e7b4..965edecdadc7 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,32 +13,298 @@ package scala package collection -import generic._ -import mutable.Builder +import scala.annotation.{nowarn, tailrec} -/** A base trait for linear sequences. - * - * $linearSeqInfo - * - * @define linearSeqInfo - * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods. - * If these methods provide the fastest way to traverse the collection, a - * collection `Coll` that extends this trait should also extend - * `LinearSeqOptimized[A, Coll[A]]`. - */ +/** Base trait for linearly accessed sequences that have efficient `head` and + * `tail` operations. + * Known subclasses: List, LazyList + */ trait LinearSeq[+A] extends Seq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this + with LinearSeqOps[A, LinearSeq, LinearSeq[A]] + with IterableFactoryDefaults[A, LinearSeq] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "LinearSeq" + + override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq } -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll linear sequence - * @define Coll `LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = immutable.LinearSeq.newBuilder[A] +@SerialVersionUID(3L) +object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) + +/** Base trait for linear Seq operations */ +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation that is inherited from [[SeqOps]] + * uses `lengthCompare`, which is defined here to use `isEmpty`. + */ + override def isEmpty: Boolean + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def head: A + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def tail: C + + override def headOption: Option[A] = + if (isEmpty) None else Some(head) + + def iterator: Iterator[A] = + if (knownSize == 0) Iterator.empty + else new LinearSeqIterator[A](this) + + def length: Int = { + var these = coll + var len = 0 + while (these.nonEmpty) { + len += 1 + these = these.tail + } + len + } + + override def last: A = { + if (isEmpty) throw new NoSuchElementException("LinearSeq.last") + else { + var these = coll + var scout = tail + while (scout.nonEmpty) { + these = scout + scout = scout.tail + } + these.head + } + } + + override def lengthCompare(len: Int): Int = { + @tailrec def loop(i: Int, xs: LinearSeq[A]): Int = { + if (i == len) + if (xs.isEmpty) 0 else 1 + else if (xs.isEmpty) + -1 + else + loop(i + 1, xs.tail) + } + if (len < 0) 1 + else loop(0, coll) + } + + override def lengthCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this lengthCompare thatKnownSize + else that match { + case that: LinearSeq[_] => + var thisSeq = this + var thatSeq = that + while (thisSeq.nonEmpty && thatSeq.nonEmpty) { + thisSeq = thisSeq.tail + thatSeq = thatSeq.tail + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatSeq.nonEmpty) + case _ => + var thisSeq = this + val thatIt = that.iterator + while (thisSeq.nonEmpty && thatIt.hasNext) { + thisSeq = thisSeq.tail + thatIt.next() + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatIt.hasNext) + } + } + + override def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0 + + // `apply` is defined in terms of `drop`, which is in turn defined in + // terms of `tail`. + @throws[IndexOutOfBoundsException] + override def apply(n: Int): A = { + if (n < 0) throw new IndexOutOfBoundsException(n.toString) + val skipped = drop(n) + if (skipped.isEmpty) throw new IndexOutOfBoundsException(n.toString) + skipped.head + } + + override def foreach[U](f: A => U): Unit = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + f(these.head) + these = these.tail + } + } + + override def forall(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (!p(these.head)) return false + these = these.tail + } + true + } + + override def exists(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return true + these = these.tail + } + false + } + + override def contains[A1 >: A](elem: A1): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (these.head == elem) return true + these = these.tail + } + false + } + + override def find(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return Some(these.head) + these = these.tail + } + None + } + + override def foldLeft[B](z: B)(op: (B, A) => B): B = { + var acc = z + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + acc = op(acc, these.head) + these = these.tail + } + acc + } + + override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = + (a eq b) || { + if (a.nonEmpty && b.nonEmpty && a.head == b.head) { + linearSeqEq(a.tail, b.tail) + } + else { + a.isEmpty && b.isEmpty + } + } + + that match { + case that: LinearSeq[B] => linearSeqEq(coll, that) + case _ => super.sameElements(that) + } + } + + override def segmentLength(p: A => Boolean, from: Int): Int = { + var i = 0 + var seq = drop(from) + while (seq.nonEmpty && p(seq.head)) { + i += 1 + seq = seq.tail + } + i + } + + override def indexWhere(p: A => Boolean, from: Int): Int = { + var i = math.max(from, 0) + var these: LinearSeq[A] = this drop from + while (these.nonEmpty) { + if (p(these.head)) + return i + + i += 1 + these = these.tail + } + -1 + } + + override def lastIndexWhere(p: A => Boolean, end: Int): Int = { + var i = 0 + var these: LinearSeq[A] = coll + var last = -1 + while (!these.isEmpty && i <= end) { + if (p(these.head)) last = i + these = these.tail + i += 1 + } + last + } + + override def findLast(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + var found = false + var last: A = null.asInstanceOf[A] // don't use `Option`, to prevent excessive `Some` allocation + while (these.nonEmpty) { + val elem = these.head + if (p(elem)) { + found = true + last = elem + } + these = these.tail + } + if (found) Some(last) else None + } + + override def tails: Iterator[C] = { + val end = Iterator.single(empty) + Iterator.iterate(coll)(_.tail).takeWhile(_.nonEmpty) ++ end + } +} + +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { + // A more efficient iterator implementation than the default LinearSeqIterator + override def iterator: Iterator[A] = new AbstractIterator[A] { + private[this] var current = StrictOptimizedLinearSeqOps.this + def hasNext = !current.isEmpty + def next() = { val r = current.head; current = current.tail; r } + } + + // Optimized version of `drop` that avoids copying + override def drop(n: Int): C = { + @tailrec def loop(n: Int, s: C): C = + if (n <= 0 || s.isEmpty) s + else loop(n - 1, s.tail) + loop(n, coll) + } + + override def dropWhile(p: A => Boolean): C = { + @tailrec def loop(s: C): C = + if (s.nonEmpty && p(s.head)) loop(s.tail) + else s + loop(coll) + } +} + +/** A specialized Iterator for LinearSeqs that is lazy enough for Stream and LazyList. This is accomplished by not + * evaluating the tail after returning the current head. + */ +private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, LinearSeq, LinearSeq[A]]) extends AbstractIterator[A] { + // A call-by-need cell + private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) { lazy val v = st } + + private[this] var these: LazyCell = { + // Reassign reference to avoid creating a private class field and holding a reference to the head. + // LazyCell would otherwise close over `coll`. + val initialHead = coll + new LazyCell(initialHead) + } + + def hasNext: Boolean = these.v.nonEmpty + + def next(): A = + if (isEmpty) Iterator.empty.next() + else { + val cur = these.v + val result = cur.head + these = new LazyCell(cur.tail) + result + } } diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala deleted file mode 100644 index e7b4af4add67..000000000000 --- a/src/library/scala/collection/LinearSeqLike.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import immutable.List -import scala.annotation.tailrec - -/** A template trait for linear sequences of type `LinearSeq[A]`. - * - * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`. - * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations - * to make them run faster under the assumption of fast linear access with `head` and `tail`. - * - * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations - * of linear access patterns. - * @author Martin Odersky - * @since 2.8 - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - */ -trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr] { - self: Repr => - - override protected[this] def thisCollection: LinearSeq[A] = this.asInstanceOf[LinearSeq[A]] - override protected[this] def toCollection(repr: Repr): LinearSeq[A] = repr.asInstanceOf[LinearSeq[A]] - - def seq: LinearSeq[A] - - override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ? - - override /*IterableLike*/ - def iterator: Iterator[A] = if (self.isEmpty) Iterator.empty else new AbstractIterator[A] { - var these = self - def hasNext: Boolean = !these.isEmpty - def next(): A = - if (hasNext) { - val result = these.head; these = these.tail; result - } else Iterator.empty.next() - - override def toList: List[A] = { - /* Have to clear `these` so the iterator is exhausted like - * it would be without the optimization. - * - * Calling "newBuilder.result()" in toList method - * prevents original seq from garbage collection, - * so we use these.take(0) here. - * - * Check scala/bug#8924 for details - */ - val xs = these.toList - these = these.take(0) - xs - } - } - - @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { - if (this.isEmpty) that.isEmpty - else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p) - } -} diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala deleted file mode 100644 index 62064662c4dd..000000000000 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.tailrec - -/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes - * the implementation of various methods under the assumption of fast linear access. - * - * $linearSeqOptim - * - * @define linearSeqOptim - * Linear-optimized sequences implement most operations in in terms of three methods, - * which are assumed to have efficient implementations. These are: - * {{{ - * def isEmpty: Boolean - * def head: A - * def tail: Repr - * }}} - * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself. - * Note that default implementations are provided via inheritance, but these - * should be overridden for performance. - * - * - */ -trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr => - - def isEmpty: Boolean - - def head: A - - def tail: Repr - - /** The length of the $coll. - * - * $willNotTerminateInf - * - * Note: the execution of `length` may take time proportional to the length of the sequence. - */ - def length: Int = { - var these = self - var len = 0 - while (!these.isEmpty) { - len += 1 - these = these.tail - } - len - } - - /** Selects an element by its index in the $coll. - * Note: the execution of `apply` may take time proportional to the index value. - * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. - */ - def apply(n: Int): A = { - val rest = drop(n) - if (n < 0 || rest.isEmpty) throw new IndexOutOfBoundsException("" + n) - rest.head - } - - override /*IterableLike*/ - def foreach[U](f: A => U) { - var these = this - while (!these.isEmpty) { - f(these.head) - these = these.tail - } - } - - - override /*IterableLike*/ - def forall(p: A => Boolean): Boolean = { - var these = this - while (!these.isEmpty) { - if (!p(these.head)) return false - these = these.tail - } - true - } - - override /*IterableLike*/ - def exists(p: A => Boolean): Boolean = { - var these = this - while (!these.isEmpty) { - if (p(these.head)) return true - these = these.tail - } - false - } - - override /*SeqLike*/ - def contains[A1 >: A](elem: A1): Boolean = { - var these = this - while (!these.isEmpty) { - if (these.head == elem) return true - these = these.tail - } - false - } - - override /*IterableLike*/ - def find(p: A => Boolean): Option[A] = { - var these = this - while (!these.isEmpty) { - if (p(these.head)) return Some(these.head) - these = these.tail - } - None - } - - override /*TraversableLike*/ - def foldLeft[B](z: B)(@deprecatedName('f) op: (B, A) => B): B = { - var acc = z - var these = this - while (!these.isEmpty) { - acc = op(acc, these.head) - these = these.tail - } - acc - } - - override /*IterableLike*/ - def foldRight[B](z: B)(@deprecatedName('f) op: (A, B) => B): B = - if (this.isEmpty) z - else op(head, tail.foldRight(z)(op)) - - override /*TraversableLike*/ - def reduceLeft[B >: A](@deprecatedName('f) op: (B, A) => B): B = - if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else tail.foldLeft[B](head)(op) - - override /*IterableLike*/ - def reduceRight[B >: A](op: (A, B) => B): B = - if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight") - else if (tail.isEmpty) head - else op(head, tail.reduceRight(op)) - - override /*TraversableLike*/ - def last: A = { - if (isEmpty) throw new NoSuchElementException - var these = this - var nx = these.tail - while (!nx.isEmpty) { - these = nx - nx = nx.tail - } - these.head - } - - override /*IterableLike*/ - def take(n: Int): Repr = { - val b = newBuilder - var i = 0 - var these = repr - while (!these.isEmpty && i < n) { - i += 1 - b += these.head - these = these.tail - } - b.result() - } - - override /*TraversableLike*/ - def drop(n: Int): Repr = { - var these: Repr = repr - var count = n - while (!these.isEmpty && count > 0) { - these = these.tail - count -= 1 - } - // !!! This line should actually be something like: - // newBuilder ++= these result - // since we are in collection.*, not immutable.*. - // However making that change will pessimize all the - // immutable linear seqs (like list) which surely expect - // drop to share. (Or at least it would penalize List if - // it didn't override drop. It would be a lot better if - // the leaf collections didn't override so many methods.) - // - // Upshot: MutableList is broken and passes part of the - // original list as the result of drop. - these - } - - override /*IterableLike*/ - def dropRight(n: Int): Repr = { - val b = newBuilder - var these = this - var lead = this drop n - while (!lead.isEmpty) { - b += these.head - these = these.tail - lead = lead.tail - } - b.result() - } - - override /*IterableLike*/ - def slice(from: Int, until: Int): Repr = { - var these: Repr = repr - var count = from max 0 - if (until <= count) - return newBuilder.result() - - val b = newBuilder - var sliceElems = until - count - while (these.nonEmpty && count > 0) { - these = these.tail - count -= 1 - } - while (these.nonEmpty && sliceElems > 0) { - sliceElems -= 1 - b += these.head - these = these.tail - } - b.result() - } - - override /*IterableLike*/ - def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - var these = this - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - b.result() - } - - override /*TraversableLike*/ - def span(p: A => Boolean): (Repr, Repr) = { - var these: Repr = repr - val b = newBuilder - while (!these.isEmpty && p(these.head)) { - b += these.head - these = these.tail - } - (b.result(), these) - } - - override /*IterableLike*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = that match { - case that1: LinearSeq[_] => - // Probably immutable, so check reference identity first (it's quick anyway) - (this eq that1) || { - var these = this - var those = that1 - while (!these.isEmpty && !those.isEmpty && these.head == those.head) { - these = these.tail - those = those.tail - } - these.isEmpty && those.isEmpty - } - case _ => - super.sameElements(that) - } - - override /*SeqLike*/ - def lengthCompare(len: Int): Int = { - @tailrec def loop(i: Int, xs: Repr): Int = { - if (i == len) - if (xs.isEmpty) 0 else 1 - else if (xs.isEmpty) - -1 - else - loop(i + 1, xs.tail) - } - if (len < 0) 1 - else loop(0, this) - } - - override /*SeqLike*/ - def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0 - - override /*SeqLike*/ - def segmentLength(p: A => Boolean, from: Int): Int = { - var i = 0 - var these = this drop from - while (!these.isEmpty && p(these.head)) { - i += 1 - these = these.tail - } - i - } - - override /*SeqLike*/ - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = math.max(from, 0) - var these = this drop from - while (these.nonEmpty) { - if (p(these.head)) - return i - - i += 1 - these = these.tail - } - -1 - } - - override /*SeqLike*/ - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = 0 - var these = this - var last = -1 - while (!these.isEmpty && i <= end) { - if (p(these.head)) last = i - these = these.tail - i += 1 - } - last - } - - override /*TraversableLike*/ - def tails: Iterator[Repr] = Iterator.iterate(repr)(_.tail).takeWhile(_.nonEmpty) ++ Iterator(newBuilder.result) -} diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 6201d0b5072c..1b88058d3197 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,52 +13,407 @@ package scala package collection -import generic._ +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.StringBuilder +import scala.util.hashing.MurmurHash3 -/** - * A map from keys of type `K` to values of type `V`. - * - * $mapNote - * - * '''Note:''' If you do not have specific implementations for `add` and `-` in mind, - * you might consider inheriting from `DefaultMap` instead. - * - * '''Note:''' If your additions and mutations return the same kind of map as the map - * you are defining, you should inherit from `MapLike` as well. - * - * @tparam K the type of the keys in this map. - * @tparam V the type of the values associated with keys. - * - * @since 1.0 - */ -trait Map[K, +V] extends Iterable[(K, V)] with GenMap[K, V] with MapLike[K, V, Map[K, V]] { - def empty: Map[K, V] = Map.empty +/** Base Map type */ +trait Map[K, +V] + extends Iterable[(K, V)] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] + with Equals { + + def mapFactory: scala.collection.MapFactory[Map] = Map - override def seq: Map[K, V] = this + def canEqual(that: Any): Boolean = true + + /** + * Equality of maps is implemented using the lookup method [[get]]. This method returns `true` if + * - the argument `o` is a `Map`, + * - the two maps have the same [[size]], and + * - for every `(key, value)` pair in this map, `other.get(key) == Some(value)`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Map` can narrow down the equality + * to specific map types. The `Map` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two maps use the same + * key equivalence function in their lookup operation. For example, the key equivalence operation in a + * [[scala.collection.immutable.TreeMap]] is defined by its ordering. Comparing a `TreeMap` with a `HashMap` leads + * to unexpected results if `ordering.equiv(k1, k2)` (used for lookup in `TreeMap`) is different from `k1 == k2` + * (used for lookup in `HashMap`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeMap("A" -> 1)(ord) == HashMap("a" -> 1) + * val res0: Boolean = false + * + * scala> HashMap("a" -> 1) == TreeMap("A" -> 1)(ord) + * val res1: Boolean = true + * }}} + * + * + * @param o The map to which this map is compared + * @return `true` if the two maps are equal according to the description + */ + override def equals(o: Any): Boolean = + (this eq o.asInstanceOf[AnyRef]) || (o match { + case map: Map[K @unchecked, _] if map.canEqual(this) => + (this.size == map.size) && { + try this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.mapHash(this) + + // These two methods are not in MapOps so that MapView is not forced to implement them + @deprecated("Use - or removed on an immutable Map", "2.13.0") + def - (key: K): Map[K, V] + @deprecated("Use -- or removedAll on an immutable Map", "2.13.0") + def - (key1: K, key2: K, keys: K*): Map[K, V] + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Map" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too } -/** $factoryInfo - * @define Coll `Map` - * @define coll map - */ -object Map extends MapFactory[Map] { - def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty +/** Base Map implementation type + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC type constructor of the map (e.g. `HashMap`). Operations returning a collection + * with a different type of entries `(L, W)` (e.g. `map`) return a `CC[L, W]`. + * @tparam C type of the map (e.g. `HashMap[Int, String]`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * @define coll map + * @define Coll `Map` + */ +// Note: the upper bound constraint on CC is useful only to +// erase CC to IterableOps instead of Object +trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends IterableOps[(K, V), Iterable, C] + with PartialFunction[K, V] { - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (K, V), Map[K, V]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] + override def view: MapView[K, V] = new MapView.Id(this) + + /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ + def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (keysIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (keysIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(keysIterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper(keysIterator)) + } + s.asInstanceOf[S] + } + + /** Returns a [[Stepper]] for the values of this map. See method [[stepper]]. */ + def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (valuesIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (valuesIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(valuesIterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper(valuesIterator)) + } + s.asInstanceOf[S] + } - /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map - * because of variance issues. + /** Similar to `fromIterable`, but returns a Map collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): CC[K2, V2] = mapFactory.from(it) + + /** The companion object of this map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def mapFactory: MapFactory[CC] + + /** Optionally returns the value associated with a key. + * + * @param key the key value + * @return an option value containing the value associated with `key` in this map, + * or `None` if none exists. + */ + def get(key: K): Option[V] + + /** Returns the value associated with a key, or a default value if the key is not contained in the map. + * @param key the key. + * @param default a computation that yields a default value in case no binding for `key` is + * found in the map. + * @tparam V1 the result type of the default computation. + * @return the value associated with `key` if it exists, + * otherwise the result of the `default` computation. + */ + def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match { + case Some(v) => v + case None => default + } + + /** Retrieves the value which is associated with the given key. This + * method invokes the `default` method of the map if there is no mapping + * from the given key to a value. Unless overridden, the `default` method throws a + * `NoSuchElementException`. + * + * @param key the key + * @return the value associated with the given key, or the result of the + * map's `default` method, if none exists. + */ + @throws[NoSuchElementException] + def apply(key: K): V = get(key) match { + case None => default(key) + case Some(value) => value + } + + override /*PartialFunction*/ def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 = getOrElse(x, default(x)) + + /** A set representing the keys contained by this map. + * + * For efficiency the resulting set may be a view (maintaining a reference to the map and reflecting modifications + * to the map), but it may also be a strict collection without reference to the map. + * + * - To ensure an independent strict collection, use `m.keysIterator.toSet` + * - To obtain a view on the keys, use `scala.collection.View.fromIteratorProvider(m.keysIterator)` + * + * @return a set representing the keys contained by this map */ - abstract class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends AbstractMap[K, V] with Map[K, V] with Serializable { - override def size = underlying.size - def get(key: K) = underlying.get(key) // removed in 2.9: orElse Some(default(key)) - def iterator = underlying.iterator - override def default(key: K): V = d(key) + def keySet: Set[K] = new KeySet + + /** The implementation class of the set returned by `keySet`. + */ + protected class KeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def diff(that: Set[K]): Set[K] = fromSpecific(this.view.filterNot(that)) + } + + /** A generic trait that is reused by keyset implementations */ + protected trait GenKeySet { this: Set[K] => + def iterator: Iterator[K] = MapOps.this.keysIterator + def contains(key: K): Boolean = MapOps.this.contains(key) + override def size: Int = MapOps.this.size + override def knownSize: Int = MapOps.this.knownSize + override def isEmpty: Boolean = MapOps.this.isEmpty } + /** An [[Iterable]] collection of the keys contained by this map. + * + * For efficiency the resulting collection may be a view (maintaining a reference to the map and reflecting + * modifications to the map), but it may also be a strict collection without reference to the map. + * + * - To ensure an independent strict collection, use `m.keysIterator.toSet` + * - To obtain a view on the keys, use `scala.collection.View.fromIteratorProvider(m.keysIterator)` + * + * @return an [[Iterable]] collection of the keys contained by this map + */ + @deprecatedOverriding("This method should be an alias for keySet", since="2.13.13") + def keys: Iterable[K] = keySet + + /** Collects all values of this map in an iterable collection. + * + * @return the values of this map as an iterable. + */ + def values: Iterable[V] = new AbstractIterable[V] with DefaultSerializable { + override def knownSize: Int = MapOps.this.knownSize + override def iterator: Iterator[V] = valuesIterator + } + + /** An [[Iterator]] of the keys contained by this map. + * + * @return an [[Iterator]] of the keys contained by this map + */ + def keysIterator: Iterator[K] = new AbstractIterator[K] { + val iter = MapOps.this.iterator + def hasNext = iter.hasNext + def next() = iter.next()._1 + } + + /** Creates an iterator for all values in this map. + * + * @return an iterator over all values that are associated with some key in this map. + */ + def valuesIterator: Iterator[V] = new AbstractIterator[V] { + val iter = MapOps.this.iterator + def hasNext = iter.hasNext + def next() = iter.next()._2 + } + + /** Apply `f` to each key/value pair for its side effects + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreachEntry[U](f: (K, V) => U): Unit = { + val it = iterator + while (it.hasNext) { + val next = it.next() + f(next._1, next._2) + } + } + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + + /** Defines the default value computation for the map, + * returned when a key is not found. + * + * The method implemented here throws an exception, + * but it may be overridden by subclasses. + * + * @param key the given key value for which a binding is missing. + * @throws NoSuchElementException if no default value is defined + */ + @throws[NoSuchElementException] + def default(key: K): V = + throw new NoSuchElementException("key not found: " + key) + + /** Tests whether this map contains a binding for a key. + * + * @param key the key + * @return `true` if there is a binding for `key` in this map, `false` otherwise. + */ + def contains(key: K): Boolean = get(key).isDefined + + + /** Tests whether this map contains a binding for a key. This method, + * which implements an abstract method of trait `PartialFunction`, + * is equivalent to `contains`. + * + * @param key the key + * @return `true` if there is a binding for `key` in this map, `false` otherwise. + */ + def isDefinedAt(key: K): Boolean = contains(key) + + /** Builds a new map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(this, f)) + + /** Builds a new collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam K2 the key type of the returned $coll. + * @tparam V2 the value type of the returned $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + mapFactory.from(new View.Collect(this, pf)) + + /** Builds a new map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is + // SortedMap's CC, while Map's CC is fixed to Map + /** Alias for `concat` */ + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat.", "2.13.0") + def + [V1 >: V](kv: (K, V1)): CC[K, V1] = + mapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = + mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + @deprecated("Consider requiring an immutable Map.", "2.13.0") + @`inline` def -- (keys: IterableOnce[K]): C = { + lazy val keysSet = keys.iterator.to(immutable.Set) + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++: [V1 >: V](that: IterableOnce[(K,V1)]): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)] = that match { + case that: Iterable[(K, V1)] => that + case that => View.from(that) + } + mapFactory.from(new View.Concat(thatIterable, this)) + } +} + +object MapOps { + /** Specializes `WithFilter` for Map collection types by adding overloads to transformation + * operations that can return a Map. + * + * @define coll map collection + */ + @SerialVersionUID(3L) + class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( + self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { + + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.mapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.mapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] = + new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +/** + * $factoryInfo + * @define coll map + * @define Coll `Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](immutable.Map) { + private val DefaultSentinel: AnyRef = new AnyRef + private val DefaultSentinelFn: () => AnyRef = () => DefaultSentinel } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala deleted file mode 100644 index bd57e6f4f5a5..000000000000 --- a/src/library/scala/collection/MapLike.scala +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.{ Builder, MapBuilder } -import scala.annotation.migration -import parallel.ParMap - -/** A template trait for maps, which associate keys with values. - * - * $mapNote - * $mapTags - * @since 2.8 - * - * @define mapNote - * '''Implementation note:''' - * This trait provides most of the operations of a `Map` independently of its representation. - * It is typically inherited by concrete implementations of maps. - * - * To implement a concrete map, you need to provide implementations of the - * following methods: - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V1)): This - * def -(key: K): This - * }}} - * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @define mapTags - * @tparam K the type of the keys. - * @tparam V the type of associated values. - * @tparam This the type of the map itself. - * - * @author Martin Odersky - * - * @define coll map - * @define Coll Map - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] - extends PartialFunction[K, V] - with IterableLike[(K, V), This] - with GenMapLike[K, V, This] - with Subtractable[K, This] - with Parallelizable[(K, V), ParMap[K, V]] -{ -self => - - /** The empty map of the same type as this map - * @return an empty map of type `This`. - */ - def empty: This - - /** A common implementation of `newBuilder` for all maps in terms of `empty`. - * Overridden for mutable maps in `mutable.MapLike`. - */ - override protected[this] def newBuilder: Builder[(K, V), This] = new MapBuilder[K, V, This](empty) - - /** Optionally returns the value associated with a key. - * - * @param key the key value - * @return an option value containing the value associated with `key` in this map, - * or `None` if none exists. - */ - def get(key: K): Option[V] - - /** Creates a new iterator over all key/value pairs of this map - * - * @return the new iterator - */ - def iterator: Iterator[(K, V)] - - /** Adds a key/value pair to this map, returning a new map. - * @param kv the key/value pair - * @tparam V1 the type of the value in the key/value pair. - * @return a new map with the new binding added to this map - * - * @usecase def + (kv: (K, V)): Map[K, V] - * @inheritdoc - */ - def + [V1 >: V] (kv: (K, V1)): Map[K, V1] - - /** Removes a key from this map, returning a new map. - * @param key the key to be removed - * @return a new map without a binding for `key` - * - * @usecase def - (key: K): Map[K, V] - * @inheritdoc - */ - def - (key: K): This - - /** Tests whether the map is empty. - * - * @return `true` if the map does not contain any key/value binding, `false` otherwise. - */ - override def isEmpty: Boolean = size == 0 - - /** Returns the value associated with a key, or a default value if the key is not contained in the map. - * @param key the key. - * @param default a computation that yields a default value in case no binding for `key` is - * found in the map. - * @tparam V1 the result type of the default computation. - * @return the value associated with `key` if it exists, - * otherwise the result of the `default` computation. - * - * @usecase def getOrElse(key: K, default: => V): V - * @inheritdoc - */ - def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match { - case Some(v) => v - case None => default - } - - /** Retrieves the value which is associated with the given key. This - * method invokes the `default` method of the map if there is no mapping - * from the given key to a value. Unless overridden, the `default` method throws a - * `NoSuchElementException`. - * - * @param key the key - * @return the value associated with the given key, or the result of the - * map's `default` method, if none exists. - */ - def apply(key: K): V = get(key) match { - case None => default(key) - case Some(value) => value - } - - /** Tests whether this map contains a binding for a key. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def contains(key: K): Boolean = get(key).isDefined - - /** Tests whether this map contains a binding for a key. This method, - * which implements an abstract method of trait `PartialFunction`, - * is equivalent to `contains`. - * - * @param key the key - * @return `true` if there is a binding for `key` in this map, `false` otherwise. - */ - def isDefinedAt(key: K) = contains(key) - - override /*PartialFunction*/ - def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 = - getOrElse(x, default(x)) - - /** Collects all keys of this map in a set. - * @return a set containing all keys of this map. - */ - def keySet: Set[K] = new DefaultKeySet - - /** The implementation class of the set returned by `keySet`. - */ - @SerialVersionUID(1589106351530299313L) - protected class DefaultKeySet extends AbstractSet[K] with Set[K] with Serializable { - def contains(key : K) = self.contains(key) - def iterator = keysIterator - def + (elem: K): Set[K] = (Set[K]() ++ this + elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem - def - (elem: K): Set[K] = (Set[K]() ++ this - elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem - override def size = self.size - override def foreach[U](f: K => U) = self.keysIterator foreach f - } - - /** Creates an iterator for all keys. - * - * @return an iterator over all keys. - */ - def keysIterator: Iterator[K] = new AbstractIterator[K] { - val iter = self.iterator - def hasNext = iter.hasNext - def next() = iter.next()._1 - } - - /** Collects all keys of this map in an iterable collection. - * - * @return the keys of this map as an iterable. - */ - @migration("`keys` returns `Iterable[K]` rather than `Iterator[K]`.", "2.8.0") - def keys: Iterable[K] = keySet - - /** Collects all values of this map in an iterable collection. - * - * @return the values of this map as an iterable. - */ - @migration("`values` returns `Iterable[V]` rather than `Iterator[V]`.", "2.8.0") - def values: Iterable[V] = new DefaultValuesIterable - - /** The implementation class of the iterable returned by `values`. - */ - protected class DefaultValuesIterable extends AbstractIterable[V] with Iterable[V] with Serializable { - def iterator = valuesIterator - override def size = self.size - override def foreach[U](f: V => U) = self.valuesIterator foreach f - } - - /** Creates an iterator for all values in this map. - * - * @return an iterator over all values that are associated with some key in this map. - */ - def valuesIterator: Iterator[V] = new AbstractIterator[V] { - val iter = self.iterator - def hasNext = iter.hasNext - def next() = iter.next()._2 - } - - /** Defines the default value computation for the map, - * returned when a key is not found - * The method implemented here throws an exception, - * but it might be overridden in subclasses. - * - * @param key the given key value for which a binding is missing. - * @throws NoSuchElementException - */ - def default(key: K): V = - throw new NoSuchElementException("key not found: " + key) - - protected class FilteredKeys(p: K => Boolean) extends AbstractMap[K, V] with DefaultMap[K, V] { - override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def iterator = self.iterator.filter(kv => p(kv._1)) - override def contains(key: K) = p(key) && self.contains(key) - def get(key: K) = if (!p(key)) None else self.get(key) - } - - /** Filters this map by retaining only keys satisfying a predicate. - * - * '''Note''': the predicate must accept any key of type `K`, not just those already - * present in the map, as the predicate is tested before the underlying map is queried. - * - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) - - protected class MappedValues[W](f: V => W) extends AbstractMap[K, W] with DefaultMap[K, W] { - override def foreach[U](g: ((K, W)) => U): Unit = for ((k, v) <- self) g((k, f(v))) - def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) - override def size = self.size - override def contains(key: K) = self.contains(key) - def get(key: K) = self.get(key).map(f) - } - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) - - // The following 5 operations (updated, two times +, two times ++) should really be - // generic, returning This[V]. We need better covariance support to express that though. - // So right now we do the brute force approach of code duplication. - - /** Creates a new map obtained by updating this map with a given key/value pair. - * @param key the key - * @param value the value - * @tparam V1 the type of the added value - * @return A new map with the new key/value mapping added to this map. - * - * @usecase def updated(key: K, value: V): Map[K, V] - * @inheritdoc - */ - def updated [V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) - - /** Adds key/value pairs to this map, returning a new map. - * - * This method takes two or more key/value pairs. Another overloaded - * variant of this method handles the case where a single key/value pair is - * added. - * @param kv1 the first key/value pair - * @param kv2 the second key/value pair - * @param kvs the remaining key/value pairs - * @tparam V1 the type of the added values - * @return a new map with the given bindings added to this map - * - * @usecase def + (kvs: (K, V)*): Map[K, V] - * @inheritdoc - * @param kvs the key/value pairs - */ - def + [V1 >: V] (kv1: (K, V1), kv2: (K, V1), kvs: (K, V1) *): Map[K, V1] = - this + kv1 + kv2 ++ kvs - - /** Adds all key/value pairs in a traversable collection to this map, returning a new map. - * - * @param xs the collection containing the added key/value pairs - * @tparam V1 the type of the added values - * @return a new map with the given bindings added to this map - * - * @usecase def ++ (xs: Traversable[(K, V)]): Map[K, V] - * @inheritdoc - */ - def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = - ((repr: Map[K, V1]) /: xs.seq) (_ + _) - - /** Returns a new map obtained by removing all key/value pairs for which the predicate - * `p` returns `true`. - * - * '''Note:''' This method works by successively removing elements for which the - * predicate is true from this set. - * If removal is slow, or you expect that most elements of the set - * will be removed, you might consider using `filter` - * with a negated predicate instead. - * @param p A predicate over key-value pairs - * @return A new map containing elements not satisfying the predicate. - */ - override def filterNot(p: ((K, V)) => Boolean): This = { - var res: This = repr - for (kv <- this) - if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem - res - } - - override def toSeq: Seq[(K, V)] = { - if (isEmpty) Vector.empty[(K, V)] - else { - // Default appropriate for immutable collections; mutable collections override this - val vb = Vector.newBuilder[(K, V)] - foreach(vb += _) - vb.result - } - } - - override def toBuffer[E >: (K, V)]: mutable.Buffer[E] = { - val result = new mutable.ArrayBuffer[E](size) - // Faster to let the map iterate itself than to defer through copyToBuffer - foreach(result += _) - result - } - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** Appends all bindings of this map to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string - * `end`. Inside, the string representations of all bindings of this map - * in the form of `key -> value` are separated by the string `sep`. - * - * @param b the builder to which strings are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = - this.iterator.map { case (k, v) => k+" -> "+v }.addString(b, start, sep, end) - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this $coll. - * Unless overridden in subclasses, the string prefix of every map is `"Map"`. - */ - override def stringPrefix: String = "Map" - - override /*PartialFunction*/ - def toString = super[IterableLike].toString - -} diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala deleted file mode 100644 index 43f4fa4bdf7a..000000000000 --- a/src/library/scala/collection/MapProxy.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** This is a simple wrapper class for [[scala.collection.Map]]. - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala deleted file mode 100644 index 8e39c748dc82..000000000000 --- a/src/library/scala/collection/MapProxyLike.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Map objects. It forwards - * all calls to a different Map object. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] - extends MapLike[A, B, This] - with IterableProxyLike[(A, B), This] -{ - override def get(key: A): Option[B] = self.get(key) - override def iterator: Iterator[(A, B)] = self.iterator - override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = self.+(kv) - override def - (key: A): This = self.-(key) - override def isEmpty: Boolean = self.isEmpty - override def getOrElse[B1 >: B](key: A, default: => B1): B1 = self.getOrElse(key, default) - override def apply(key: A): B = self.apply(key) - override def contains(key: A): Boolean = self.contains(key) - override def isDefinedAt(key: A) = self.isDefinedAt(key) - override def keySet: Set[A] = self.keySet - override def keysIterator: Iterator[A] = self.keysIterator - override def keys: Iterable[A] = self.keys - override def values: Iterable[B] = self.values - override def valuesIterator: Iterator[B] = self.valuesIterator - override def default(key: A): B = self.default(key) - override def filterKeys(p: A => Boolean) = self.filterKeys(p) - override def mapValues[C](f: B => C) = self.mapValues(f) - override def updated [B1 >: B](key: A, value: B1): Map[A, B1] = self.updated(key, value) - override def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = self.+(kv1, kv2, kvs: _*) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs) - override def filterNot(p: ((A, B)) => Boolean) = self filterNot p - - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = - self.addString(b, start, sep, end) -} diff --git a/src/library/scala/collection/MapView.scala b/src/library/scala/collection/MapView.scala new file mode 100644 index 000000000000..39742c434c41 --- /dev/null +++ b/src/library/scala/collection/MapView.scala @@ -0,0 +1,188 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn +import scala.collection.MapView.SomeMapOps +import scala.collection.mutable.Builder + +trait MapView[K, +V] + extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] + with View[(K, V)] { + + override def view: MapView[K, V] = this + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all keys of this map. + * + * @return the keys of this map as a view. + */ + @nowarn("msg=overriding method keys") + override def keys: Iterable[K] = new MapView.Keys(this) + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all values of this map. + * + * @return the values of this map as a view. + */ + override def values: Iterable[V] = new MapView.Values(this) + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + override def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + override def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + + override def filter(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, isFlipped = false, pred) + + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, isFlipped = true, pred) + + override def partition(p: ((K, V)) => Boolean): (MapView[K, V], MapView[K, V]) = (filter(p), filterNot(p)) + + override def tapEach[U](f: ((K, V)) => U): MapView[K, V] = new MapView.TapEach(this, f) + + def mapFactory: MapViewFactory = MapView + + override def empty: MapView[K, V] = mapFactory.empty + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l] = new MapOps.WithFilter(this, p) + + override def toString: String = super[View].toString + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "MapView" +} + +object MapView extends MapViewFactory { + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] + /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ + type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] + + @SerialVersionUID(3L) + private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + override def get(key: Any): Option[Nothing] = None + override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this + override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this + override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) + } + + @SerialVersionUID(3L) + class Id[K, +V](underlying: SomeMapOps[K, V]) extends AbstractMapView[K, V] { + def get(key: K): Option[V] = underlying.get(key) + def iterator: Iterator[(K, V)] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Keys[K](underlying: SomeMapOps[K, _]) extends AbstractView[K] { + def iterator: Iterator[K] = underlying.keysIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Values[+V](underlying: SomeMapOps[_, V]) extends AbstractView[V] { + def iterator: Iterator[V] = underlying.valuesIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V], f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)] = underlying.iterator.map(kv => (kv._1, f(kv._2))) + def get(key: K): Option[W] = underlying.get(key).map(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class FilterKeys[K, +V](underlying: SomeMapOps[K, V], p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filter { case (k, _) => p(k) } + def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class Filter[K, +V](underlying: SomeMapOps[K, V], isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filterImpl(p, isFlipped) + def get(key: K): Option[V] = underlying.get(key) match { + case s @ Some(v) if p((key, v)) != isFlipped => s + case _ => None + } + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V], f: ((K, V)) => U) extends AbstractMapView[K, V] { + override def get(key: K): Option[V] = { + underlying.get(key) match { + case s @ Some(v) => + f((key, v)) + s + case None => None + } + } + override def iterator: Iterator[(K, V)] = underlying.iterator.tapEach(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + + override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] + + override def from[K, V](it: IterableOnce[(K, V)]): View[(K, V)] = View.from(it) + + override def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] = it match { + case mv: MapView[K, V] => mv + case other => new MapView.Id(other) + } + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { + + def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] + + def empty[X, Y]: MapView[X, Y] + + def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V] + diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala deleted file mode 100644 index cdfb5d995b97..000000000000 --- a/src/library/scala/collection/Parallel.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** A marker trait for collections which have their operations parallelised. - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait Parallel diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala deleted file mode 100644 index 0ad8182404df..000000000000 --- a/src/library/scala/collection/Parallelizable.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import parallel.Combiner - -/** This trait describes collections which can be turned into parallel collections - * by invoking the method `par`. Parallelizable collections may be parameterized with - * a target type different than their own. - * - * @tparam A the type of the elements in the collection - * @tparam ParRepr the actual type of the collection, which has to be parallel - */ -trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { - - def seq: TraversableOnce[A] - - /** Returns a parallel implementation of this collection. - * - * For most collection types, this method creates a new parallel collection by copying - * all the elements. For these collection, `par` takes linear time. Mutable collections - * in this category do not produce a mutable parallel collection that has the same - * underlying dataset, so changes in one collection will not be reflected in the other one. - * - * Specific collections (e.g. `ParArray` or `mutable.ParHashMap`) override this default - * behaviour by creating a parallel collection which shares the same underlying dataset. - * For these collections, `par` takes constant or sublinear time. - * - * All parallel collections return a reference to themselves. - * - * @return a parallel implementation of this collection - */ - def par: ParRepr = { - val cb = parCombiner - for (x <- seq) cb += x - cb.result() - } - - /** The default `par` implementation uses the combiner provided by this method - * to create a new parallel collection. - * - * @return a combiner for the parallel collection of type `ParRepr` - */ - protected[this] def parCombiner: Combiner[A, ParRepr] -} - diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala index 8091f53f3778..8b8132870287 100644 --- a/src/library/scala/collection/Searching.scala +++ b/src/library/scala/collection/Searching.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,113 +10,48 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection import scala.language.implicitConversions -import scala.annotation.tailrec -import scala.collection.generic.IsSeqLike -import scala.math.Ordering +import scala.collection.generic.IsSeq -/** A collection of wrappers that provide sequence classes with search functionality. - * - * Example usage: - * {{{ - * import scala.collection.Searching._ - * val l = List(1, 2, 3, 4, 5) - * l.search(3) - * // == Found(2) - * }}} - */ object Searching { + + /** The result of performing a search on a sorted sequence + * + * Example usage: + * + * {{{ + * val list = List(1, 3, 4, 5) // list must be sorted before searching + * list.search(4) // Found(2) + * list.search(2) // InsertionPoint(1) + * }}} + * + * */ sealed abstract class SearchResult { + /** The index corresponding to the element searched for in the sequence, if it was found, + * or the index where the element would be inserted in the sequence, if it was not in the sequence */ def insertionPoint: Int } + /** The result of performing a search on a sorted sequence, where the element was found. + * + * @param foundIndex the index corresponding to the element searched for in the sequence + */ case class Found(foundIndex: Int) extends SearchResult { - override def insertionPoint = foundIndex + override def insertionPoint: Int = foundIndex } - case class InsertionPoint(insertionPoint: Int) extends SearchResult - - class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) { - /** Search the sorted sequence for a specific element. If the sequence is an - * `IndexedSeqLike`, a binary search is used. Otherwise, a linear search is used. - * - * The sequence should be sorted with the same `Ordering` before calling; otherwise, - * the results are undefined. - * - * @see [[scala.collection.IndexedSeqLike]] - * @see [[scala.math.Ordering]] - * @see [[scala.collection.SeqLike]], method `sorted` - * - * @param elem the element to find. - * @param ord the ordering to be used to compare elements. - * - * @return a `Found` value containing the index corresponding to the element in the - * sequence, or the `InsertionPoint` where the element would be inserted if - * the element is not in the sequence. - */ - final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = - coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, 0, coll.length)(ord) - case _ => linearSearch(coll.view, elem, 0)(ord) - } - - /** Search within an interval in the sorted sequence for a specific element. If the - * sequence is an `IndexedSeqLike`, a binary search is used. Otherwise, a linear search - * is used. - * - * The sequence should be sorted with the same `Ordering` before calling; otherwise, - * the results are undefined. - * - * @see [[scala.collection.IndexedSeqLike]] - * @see [[scala.math.Ordering]] - * @see [[scala.collection.SeqLike]], method `sorted` - * - * @param elem the element to find. - * @param from the index where the search starts. - * @param to the index following where the search ends. - * @param ord the ordering to be used to compare elements. - * - * @return a `Found` value containing the index corresponding to the element in the - * sequence, or the `InsertionPoint` where the element would be inserted if - * the element is not in the sequence. - */ - final def search[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): SearchResult = - coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, from, to)(ord) - case _ => linearSearch(coll.view(from, to), elem, from)(ord) - } - @tailrec - private def binarySearch[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): SearchResult = { - if (to == from) InsertionPoint(from) else { - val idx = from+(to-from-1)/2 - math.signum(ord.compare(elem, coll(idx))) match { - case -1 => binarySearch(elem, from, idx)(ord) - case 1 => binarySearch(elem, idx + 1, to)(ord) - case _ => Found(idx) - } - } - } - - private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int) - (implicit ord: Ordering[B]): SearchResult = { - var idx = offset - val it = c.iterator - while (it.hasNext) { - val cur = it.next() - if (ord.equiv(elem, cur)) return Found(idx) - else if (ord.lt(elem, cur)) return InsertionPoint(idx) - idx += 1 - } - InsertionPoint(idx) - } + /** The result of performing a search on a sorted sequence, where the element was not found + * + * @param insertionPoint the index where the element would be inserted in the sequence + */ + case class InsertionPoint(insertionPoint: Int) extends SearchResult - } + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal - implicit def search[Repr, A](coll: Repr) - (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll)) + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = + new SearchImpl(fr.conversion(coll)) } diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index d4dcfc168ede..753d51b6a51d 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,35 +10,1187 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection -import generic._ -import mutable.Builder +import scala.collection.immutable.Range +import scala.util.hashing.MurmurHash3 +import Searching.{Found, InsertionPoint, SearchResult} +import scala.annotation.nowarn -/** A base trait for sequences. - * $seqInfo - */ -trait Seq[+A] extends PartialFunction[Int, A] - with Iterable[A] - with GenSeq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] { - override def companion: GenericCompanion[Seq] = Seq - - override def seq: Seq[A] = this +/** Base trait for sequence collections + * + * @tparam A the element type of the collection + */ +trait Seq[+A] + extends Iterable[A] + with PartialFunction[Int, A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] + with Equals { + + override def iterableFactory: SeqFactory[Seq] = Seq + + def canEqual(that: Any): Boolean = true + + override def equals(o: Any): Boolean = + (this eq o.asInstanceOf[AnyRef]) || (o match { + case seq: Seq[A @unchecked] if seq.canEqual(this) => sameElements(seq) + case _ => false + }) + + override def hashCode(): Int = MurmurHash3.seqHash(this) + + override def toString(): String = super[Iterable].toString() + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Seq" } -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll sequence - * @define Coll `Seq` - */ -object Seq extends SeqFactory[Seq] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] +/** + * $factoryInfo + * @define coll sequence + * @define Coll `Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) + +/** Base trait for Seq operations + * + * @tparam A the element type of the collection + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * + * @define coll sequence + * @define Coll `Seq` + */ +trait SeqOps[+A, +CC[_], +C] extends Any + with IterableOps[A, CC, C] { self => + + override def view: SeqView[A] = new SeqView.Id[A](this) + + /** Gets the element at the specified index. This operation is provided for convenience in `Seq`. It should + * not be assumed to be efficient unless you have an `IndexedSeq`. */ + @throws[IndexOutOfBoundsException] + def apply(i: Int): A + + /** The length (number of elements) of the $coll. `size` is an alias for `length` in `Seq` collections. */ + def length: Int + + /** A copy of the $coll with an element prepended. + * + * Also, the original $coll is not modified, so you will want to capture the result. + * + * Example: + * {{{ + * scala> val x = List(1) + * x: List[Int] = List(1) + * + * scala> val y = 2 +: x + * y: List[Int] = List(2, 1) + * + * scala> println(x) + * List(1) + * }}} + * + * @param elem the prepended element + * @tparam B the element type of the returned $coll. + * + * @return a new $coll consisting of `value` followed + * by all elements of this $coll. + */ + def prepended[B >: A](elem: B): CC[B] = iterableFactory.from(new View.Prepended(elem, this)) + + /** Alias for `prepended`. + * + * Note that :-ending operators are right associative (see example). + * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. + */ + @`inline` final def +: [B >: A](elem: B): CC[B] = prepended(elem) + + /** A copy of this $coll with an element appended. + * + * $willNotTerminateInf + * + * Example: + * {{{ + * scala> val a = List(1) + * a: List[Int] = List(1) + * + * scala> val b = a :+ 2 + * b: List[Int] = List(1, 2) + * + * scala> println(a) + * List(1) + * }}} + * + * @param elem the appended element + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of + * all elements of this $coll followed by `value`. + */ + def appended[B >: A](elem: B): CC[B] = iterableFactory.from(new View.Appended(this, elem)) + + /** Alias for `appended`. + * + * Note that :-ending operators are right associative (see example). + * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side. + */ + @`inline` final def :+ [B >: A](elem: B): CC[B] = appended(elem) + + /** As with `:++`, returns a new collection containing the elements from the left operand followed by the + * elements from the right operand. + * + * It differs from `:++` in that the right operand determines the type of + * the resulting collection rather than the left one. + * Mnemonic: the COLon is on the side of the new COLlection type. + * + * @param prefix the iterable to prepend. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements of `prefix` followed + * by all the elements of this $coll. + */ + def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = iterableFactory.from(prefix match { + case prefix: Iterable[B] => new View.Concat(prefix, this) + case _ => prefix.iterator ++ iterator + }) + + /** Alias for `prependedAll`. */ + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]): CC[B] = prependedAll(prefix) + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new collection of type `CC[B]` which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = super.concat(suffix) + + /** Alias for `appendedAll`. */ + @inline final def :++ [B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + + // Make `concat` an alias for `appendedAll` so that it benefits from performance + // overrides of this method + @inline final override def concat[B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + + /** Produces a new sequence which contains all elements of this $coll and also all elements of + * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. + * + * @param that the sequence to add. + * @tparam B the element type of the returned $coll. + * @return a new collection which contains all elements of this $coll + * followed by all elements of `that`. + */ + @deprecated("Use `concat` instead", "2.13.0") + @inline final def union[B >: A](that: Seq[B]): CC[B] = concat(that) + + final override def size: Int = length + + /** Selects all the elements of this $coll ignoring the duplicates. + * + * @return a new $coll consisting of all the elements of this $coll without duplicates. + */ + def distinct: C = distinctBy(identity) + + /** Selects all the elements of this $coll ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new $coll consisting of all the elements of this $coll without duplicates. + */ + def distinctBy[B](f: A => B): C = fromSpecific(new View.DistinctBy(this, f)) + + /** Returns a new $coll with the elements of this $coll in reverse order. + * + * $willNotTerminateInf + * $willForceEvaluation + * + * @return a new $coll with all elements of this $coll in reverse order. + */ + def reverse: C = fromSpecific(reversed) + + /** An iterator yielding the elements of this $coll in reverse order. + * + * $willNotTerminateInf + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but might be more efficient. + * + * @return an iterator yielding the elements of this $coll in reverse order. + */ + def reverseIterator: Iterator[A] = reversed.iterator + + /** Tests whether this $coll contains the given sequence at a given index. + * + * '''Note''': If the both the receiver object `this` and the argument + * `that` are infinite sequences this method may not terminate. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this $coll at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = { + val i = iterator drop offset + val j = that.iterator + while (j.hasNext && i.hasNext) + if (i.next() != j.next()) + return false + + !j.hasNext + } + + /** Tests whether this $coll ends with the given sequence. + * $willNotTerminateInf + * @param that the sequence to test + * @return `true` if this $coll has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = { + if (that.isEmpty) true + else { + val i = iterator.drop(length - that.size) + val j = that.iterator + while (i.hasNext && j.hasNext) + if (i.next() != j.next()) + return false + + !j.hasNext + } + } + + /** Tests whether this $coll contains given index. + * + * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into + * a `PartialFunction[Int, A]`. + * + * @param idx the index to test + * @return `true` if this $coll contains an element at position `idx`, `false` otherwise. + */ + def isDefinedAt(idx: Int): Boolean = idx >= 0 && lengthIs > idx + + /** A copy of this $coll with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned $coll. + * @return a new $ccoll consisting of + * all elements of this $coll followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A](len: Int, elem: B): CC[B] = iterableFactory.from(new View.PadTo(this, len, elem)) + + /** Computes the length of the longest segment that starts from the first element + * and whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the length of the longest segment of this $coll that starts from the first element + * such that every element of the segment satisfies the predicate `p`. + */ + final def segmentLength(p: A => Boolean): Int = segmentLength(p, 0) + + /** Computes the length of the longest segment that starts from some index + * and whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @param from the index where the search starts. + * @return the length of the longest segment of this $coll starting from index `from` + * such that every element of the segment satisfies the predicate `p`. + */ + def segmentLength(p: A => Boolean, from: Int): Int = { + var i = 0 + val it = iterator.drop(from) + while (it.hasNext && p(it.next())) + i += 1 + i + } + + /** Returns the length of the longest prefix whose elements all satisfy some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the length of the longest prefix of this $coll + * such that every element of the segment satisfies the predicate `p`. + */ + @deprecated("Use segmentLength instead of prefixLength", "2.13.0") + @`inline` final def prefixLength(p: A => Boolean): Int = segmentLength(p, 0) + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(p: A => Boolean, from: Int): Int = iterator.indexWhere(p, from) + + /** Finds index of the first element satisfying some predicate. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index `>= 0` of the first element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + @deprecatedOverriding("Override indexWhere(p, from) instead - indexWhere(p) calls indexWhere(p, 0)", "2.13.0") + def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) + + /** Finds index of first occurrence of some value in this $coll after or at some start index. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @param from the start index + * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from) + + /** Finds index of first occurrence of some value in this $coll. + * + * @param elem the element value to search for. + * @tparam B the type of the element `elem`. + * @return the index `>= 0` of the first element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + @deprecatedOverriding("Override indexOf(elem, from) instead - indexOf(elem) calls indexOf(elem, 0)", "2.13.0") + def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) + + /** Finds index of last occurrence of some value in this $coll before or at a given end index. + * + * $willNotTerminateInf + * + * @param elem the element value to search for. + * @param end the end index. + * @tparam B the type of the element `elem`. + * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf[B >: A](elem: B, end: Int = length - 1): Int = lastIndexWhere(elem == _, end) + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int): Int = { + var i = length - 1 + val it = reverseIterator + while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 + i + } + + /** Finds index of last element satisfying some predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the index of the last element of this $coll that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + @deprecatedOverriding("Override lastIndexWhere(p, end) instead - lastIndexWhere(p) calls lastIndexWhere(p, Int.MaxValue)", "2.13.0") + def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, Int.MaxValue) + + @inline private[this] def toGenericSeq: scala.collection.Seq[A] = this match { + case s: scala.collection.Seq[A] => s + case _ => toSeq + } + + /** Finds first index after or at a start index where this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @param from the start index + * @return the first index `>= from` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + // TODO Should be implemented in a way that preserves laziness + def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = + if (that.isEmpty && from == 0) 0 + else { + val l = knownSize + val tl = that.knownSize + if (l >= 0 && tl >= 0) { + val clippedFrom = math.max(0, from) + if (from > l) -1 + else if (tl < 1) clippedFrom + else if (l < tl) -1 + else SeqOps.kmpSearch(toGenericSeq, clippedFrom, l, that, 0, tl, forward = true) + } + else { + var i = from + var s: scala.collection.Seq[A] = toGenericSeq.drop(i) + while (!s.isEmpty) { + if (s startsWith that) + return i + + i += 1 + s = s.tail + } + -1 + } + } + + /** Finds first index where this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @return the first index `>= 0` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + @deprecatedOverriding("Override indexOfSlice(that, from) instead - indexOfSlice(that) calls indexOfSlice(that, 0)", "2.13.0") + def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that, 0) + + /** Finds last index before or at a given end index where this $coll contains a given sequence as a slice. + * + * $willNotTerminateInf + * + * @param that the sequence to test + * @param end the end index + * @return the last index `<= end` such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = { + val l = length + val tl = that.length + val clippedL = math.min(l-tl, end) + + if (end < 0) -1 + else if (tl < 1) clippedL + else if (l < tl) -1 + else SeqOps.kmpSearch(toGenericSeq, 0, clippedL+tl, that, 0, tl, forward = false) + } + + /** Finds last index where this $coll contains a given sequence as a slice. + * + * $willNotTerminateInf + * + * @param that the sequence to test + * @return the last index such that the elements of this $coll starting at this index + * match the elements of sequence `that`, or `-1` if no such subsequence exists. + */ + @deprecatedOverriding("Override lastIndexOfSlice(that, end) instead - lastIndexOfSlice(that) calls lastIndexOfSlice(that, Int.MaxValue)", "2.13.0") + def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that, Int.MaxValue) + + /** Finds the last element of the $coll satisfying a predicate, if any. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return an option value containing the last element in the $coll + * that satisfies `p`, or `None` if none exists. + */ + def findLast(p: A => Boolean): Option[A] = { + val it = reverseIterator + while (it.hasNext) { + val elem = it.next() + if (p(elem)) return Some(elem) + } + None + } + + /** Tests whether this $coll contains a given sequence as a slice. + * $mayNotTerminateInf + * @param that the sequence to test + * @return `true` if this $coll contains a slice with the same elements + * as `that`, otherwise `false`. + */ + def containsSlice[B >: A](that: Seq[B]): Boolean = indexOfSlice(that) != -1 + + /** Tests whether this $coll contains a given value as an element. + * $mayNotTerminateInf + * + * @param elem the element to test. + * @return `true` if this $coll has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem) + + @deprecated("Use .reverseIterator.map(f).to(...) instead of .reverseMap(f)", "2.13.0") + def reverseMap[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(View.fromIteratorProvider(() => reverseIterator), f)) + + /** Iterates over distinct permutations of elements. + * + * $willForceEvaluation + * + * @return An Iterator which traverses the distinct permutations of this $coll. + * @example {{{ + * Seq('a', 'b', 'b').permutations.foreach(println) + * // List(a, b, b) + * // List(b, a, b) + * // List(b, b, a) + * }}} + */ + def permutations: Iterator[C] = + if (isEmpty) Iterator.single(coll) + else new PermutationsItr + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * $willForceEvaluation + * + * @return An Iterator which traverses the n-element combinations of this $coll. + * @example {{{ + * Seq('a', 'b', 'b', 'b', 'c').combinations(2).foreach(println) + * // List(a, b) + * // List(a, c) + * // List(b, b) + * // List(b, c) + * Seq('b', 'a', 'b').combinations(2).foreach(println) + * // List(b, b) + * // List(b, a) + * }}} + */ + def combinations(n: Int): Iterator[C] = + if (n < 0 || n > size) Iterator.empty + else new CombinationsItr(n) + + private class PermutationsItr extends AbstractIterator[C] { + private[this] val (elms, idxs) = init() + private[this] var _hasNext = true + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) + Iterator.empty.next() + + val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val result = (newSpecificBuilder ++= forcedElms).result() + var i = idxs.length - 2 + while(i >= 0 && idxs(i) >= idxs(i+1)) + i -= 1 + + if (i < 0) + _hasNext = false + else { + var j = idxs.length - 1 + while(idxs(j) <= idxs(i)) j -= 1 + swap(i,j) + + val len = (idxs.length - i) / 2 + var k = 1 + while (k <= len) { + swap(i+k, idxs.length - k) + k += 1 + } + } + result + } + private def swap(i: Int, j: Int): Unit = { + val tmpI = idxs(i) + idxs(i) = idxs(j) + idxs(j) = tmpI + val tmpE = elms(i) + elms(i) = elms(j) + elms(j) = tmpE + } + + private[this] def init() = { + val m = mutable.HashMap[A, Int]() + val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip + + (es.to(mutable.ArrayBuffer), is.toArray) + } + } + + private class CombinationsItr(n: Int) extends AbstractIterator[C] { + // generating all nums such that: + // (1) nums(0) + .. + nums(length-1) = n + // (2) 0 <= nums(i) <= cnts(i), where 0 <= i <= cnts.length-1 + private[this] val (elms, cnts, nums) = init() + private[this] val offs = cnts.scanLeft(0)(_ + _) + private[this] var _hasNext = true + + def hasNext = _hasNext + def next(): C = { + if (!hasNext) + Iterator.empty.next() + + /* Calculate this result. */ + val buf = newSpecificBuilder + for(k <- 0 until nums.length; j <- 0 until nums(k)) + buf += elms(offs(k)+j) + val res = buf.result() + + /* Prepare for the next call to next. */ + var idx = nums.length - 1 + while (idx >= 0 && nums(idx) == cnts(idx)) + idx -= 1 + + idx = nums.lastIndexWhere(_ > 0, idx - 1) + + if (idx < 0) + _hasNext = false + else { + // OPT: hand rolled version of `sum = nums.view(idx + 1, nums.length).sum + 1` + var sum = 1 + var i = idx + 1 + while (i < nums.length) { + sum += nums(i) + i += 1 + } + nums(idx) -= 1 + for (k <- (idx+1) until nums.length) { + nums(k) = sum min cnts(k) + sum -= nums(k) + } + } + + res + } + + /** Rearrange seq to newSeq a0a0..a0a1..a1...ak..ak such that + * seq.count(_ == aj) == cnts(j) + * + * @return (newSeq,cnts,nums) + */ + private def init(): (IndexedSeq[A], Array[Int], Array[Int]) = { + val m = mutable.HashMap[A, Int]() + + // e => (e, weight(e)) + val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip + val cs = new Array[Int](m.size) + is foreach (i => cs(i) += 1) + val ns = new Array[Int](cs.length) + + var r = n + 0 until ns.length foreach { k => + ns(k) = r min cs(k) + r -= ns(k) + } + (es.to(IndexedSeq), cs, ns) + } + } + + /** Sorts this $coll according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * $willForceEvaluation + * + * @param ord the ordering to be used to compare elements. + * @return a $coll consisting of the elements of this $coll + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): C = { + val len = this.length + val b = newSpecificBuilder + if (len == 1) b += head + else if (len > 1) { + b.sizeHint(len) + val arr = new Array[Any](len) + @annotation.unused val copied = copyToArray(arr) + //assert(copied == len) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + var i = 0 + while (i < len) { + b += arr(i).asInstanceOf[A] + i += 1 + } + } + b.result() + } + + /** Sorts this $coll according to a comparison function. + * $willNotTerminateInf + * $willForceEvaluation + * + * The sort is stable. That is, elements that are equal + * (`lt` returns false for both directions of comparison) + * appear in the same order in the sorted sequence as in the original. + * + * @param lt a predicate that is true if + * its first argument strictly precedes its second argument in + * the desired ordering. + * @return a $coll consisting of the elements of this $coll + * sorted according to the comparison function `lt`. + * @example {{{ + * List("Steve", "Bobby", "Tom", "John", "Bob").sortWith((x, y) => x.take(3).compareTo(y.take(3)) < 0) = + * List("Bobby", "Bob", "John", "Steve", "Tom") + * }}} + */ + def sortWith(lt: (A, A) => Boolean): C = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this $coll according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * $willNotTerminateInf + * $willForceEvaluation + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return a $coll consisting of the elements of this $coll + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + * + * @example {{{ + * val words = "The quick brown fox jumped over the lazy dog".split(' ') + * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]] + * words.sortBy(x => (x.length, x.head)) + * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped) + * }}} + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): C = sorted(ord on f) + + /** Produces the range of all indices of this sequence. + * $willForceEvaluation + * + * @return a `Range` value from `0` to one less than the length of this $coll. + */ + def indices: Range = Range(0, length) + + override final def sizeCompare(otherSize: Int): Int = lengthCompare(otherSize) + + /** Compares the length of this $coll to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + * The method as implemented here does not call `length` directly; its running time + * is `O(length min len)` instead of `O(length)`. The method should be overridden + * if computing `length` is cheap and `knownSize` returns `-1`. + * + * @see [[lengthIs]] + */ + def lengthCompare(len: Int): Int = super.sizeCompare(len) + + override final def sizeCompare(that: Iterable[_]): Int = lengthCompare(that) + + /** Compares the length of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < that.size + * x == 0 if this.length == that.size + * x > 0 if this.length > that.size + * }}} + * The method as implemented here does not call `length` or `size` directly; its running time + * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def lengthCompare(that: Iterable[_]): Int = super.sizeCompare(that) + + /** Returns a value class containing operations for comparing the length of this $coll to a test value. + * + * These operations are implemented in terms of [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + @inline final def lengthIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + override def isEmpty: Boolean = lengthCompare(0) == 0 + + /** Checks whether corresponding elements of the given iterable collection + * compare equal (with respect to `==`) to elements of this $coll. + * + * @param that the collection to compare + * @tparam B the type of the elements of collection `that`. + * @return `true` if both collections contain equal elements in the same order, `false` otherwise. + */ + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + val thisKnownSize = knownSize + if (thisKnownSize != -1) { + val thatKnownSize = that.knownSize + if (thatKnownSize != -1) { + if (thisKnownSize != thatKnownSize) return false + if (thisKnownSize == 0) return true + } + } + iterator.sameElements(that) + } + + /** Tests whether every element of this $coll relates to the + * corresponding element of another sequence by satisfying a test predicate. + * + * @param that the other sequence + * @param p the test predicate, which relates elements from both sequences + * @tparam B the type of the elements of `that` + * @return `true` if both sequences have the same length and + * `p(x, y)` is `true` for all corresponding elements `x` of this $coll + * and `y` of `that`, otherwise `false`. + */ + def corresponds[B](that: Seq[B])(p: (A, B) => Boolean): Boolean = { + val i = iterator + val j = that.iterator + while (i.hasNext && j.hasNext) + if (!p(i.next(), j.next())) + return false + !i.hasNext && !j.hasNext + } + + /** Computes the multiset difference between this $coll and another sequence. + * + * @param that the sequence of elements to remove + * @return a new $coll which contains all elements of this $coll + * except some of the occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): C = { + val occ = occCounts(that) + fromSpecific(iterator.filter { x => + var include = false + occ.updateWith(x) { + case None => { + include = true + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include + }) + } + + /** Computes the multiset intersection between this $coll and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new $coll which contains all elements of this $coll + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): C = { + val occ = occCounts(that) + fromSpecific(iterator.filter { x => + var include = true + occ.updateWith(x) { + case None => { + include = false + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include + }) + } + + /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original $coll appends the patch to the end. + * If the `replaced` count would exceed the available elements, the difference in excess is ignored. + * + * @param from the index of the first replaced element + * @param other the replacement sequence + * @param replaced the number of elements to drop in the original $coll + * @tparam B the element type of the returned $coll. + * @return a new $coll consisting of all elements of this $coll + * except that `replaced` elements starting from `from` are replaced + * by all the elements of `other`. + */ + def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = + iterableFactory.from(new View.Patched(this, from, other, replaced)) + + /** A copy of this $coll with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @tparam B the element type of the returned $coll. + * @return a new $coll which is a copy of this $coll with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. In case of a + * lazy collection this exception may be thrown at a later time or not at + * all (if the end of the collection is never evaluated). + */ + def updated[B >: A](index: Int, elem: B): CC[B] = { + if(index < 0) throw new IndexOutOfBoundsException(index.toString) + val k = knownSize + if(k >= 0 && index >= k) throw new IndexOutOfBoundsException(index.toString) + iterableFactory.from(new View.Updated(this, index, elem)) + } + + protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { + val occ = new mutable.HashMap[B, Int]() + for (y <- sq) occ.updateWith(y) { + case None => Some(1) + case Some(n) => Some(n + 1) + } + occ + } + + /** Searches this sorted sequence for a specific element. If the sequence is an + * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.collection.IndexedSeq]] + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqOps]], method `sorted` + * + * @param elem the element to find. + * @param ord the ordering to be used to compare elements. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. + */ + def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = + linearSearch(view, elem, 0)(ord) + + /** Searches within an interval in this sorted sequence for a specific element. If this + * sequence is an `IndexedSeq`, a binary search is used. Otherwise, a linear search + * is used. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.collection.IndexedSeq]] + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqOps]], method `sorted` + * + * @param elem the element to find. + * @param from the index where the search starts. + * @param to the index following where the search ends. + * @param ord the ordering to be used to compare elements. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. + * + * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` + * is returned + */ + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) + + private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) + (implicit ord: Ordering[B]): SearchResult = { + var idx = offset + val it = c.iterator + while (it.hasNext) { + val cur = it.next() + if (ord.equiv(elem, cur)) return Found(idx) + else if (ord.lt(elem, cur)) return InsertionPoint(idx) + idx += 1 + } + InsertionPoint(idx) + } +} + +object SeqOps { + + // KMP search utilities + + /** A KMP implementation, based on the undoubtedly reliable wikipedia entry. + * Note: I made this private to keep it from entering the API. That can be reviewed. + * + * @param S Sequence that may contain target + * @param m0 First index of S to consider + * @param m1 Last index of S to consider (exclusive) + * @param W Target sequence + * @param n0 First index of W to match + * @param n1 Last index of W to match (exclusive) + * @param forward Direction of search (from beginning==true, from end==false) + * @return Index of start of sequence if found, -1 if not (relative to beginning of S, not m0). + */ + private def kmpSearch[B](S: scala.collection.Seq[B], m0: Int, m1: Int, W: scala.collection.Seq[B], n0: Int, n1: Int, forward: Boolean): Int = { + // Check for redundant case when target has single valid element + def clipR(x: Int, y: Int) = if (x < y) x else -1 + def clipL(x: Int, y: Int) = if (x > y) x else -1 + + if (n1 == n0+1) { + if (forward) + clipR(S.indexOf(W(n0), m0), m1) + else + clipL(S.lastIndexOf(W(n0), m1-1), m0-1) + } + + // Check for redundant case when both sequences are same size + else if (m1-m0 == n1-n0) { + // Accepting a little slowness for the uncommon case. + if (S.iterator.slice(m0, m1).sameElements(W.iterator.slice(n0, n1))) m0 + else -1 + } + // Now we know we actually need KMP search, so do it + else S match { + case xs: scala.collection.IndexedSeq[_] => + // We can index into S directly; it should be adequately fast + val Wopt = kmpOptimizeWord(W, n0, n1, forward) + val T = kmpJumpTable(Wopt, n1-n0) + var i, m = 0 + val zero = if (forward) m0 else m1-1 + val delta = if (forward) 1 else -1 + while (i+m < m1-m0) { + if (Wopt(i) == S(zero+delta*(i+m))) { + i += 1 + if (i == n1-n0) return (if (forward) m+m0 else m1-m-i) + } + else { + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + -1 + case _ => + // We had better not index into S directly! + val iter = S.iterator.drop(m0) + val Wopt = kmpOptimizeWord(W, n0, n1, forward = true) + val T = kmpJumpTable(Wopt, n1-n0) + val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind + var largest = 0 + var i, m = 0 + var answer = -1 + while (m+m0+n1-n0 <= m1) { + while (i+m >= largest) { + cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] + largest += 1 + } + if (Wopt(i) == cache((i+m)%(n1-n0)).asInstanceOf[B]) { + i += 1 + if (i == n1-n0) { + if (forward) return m+m0 + else { + i -= 1 + answer = m+m0 + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + } + else { + val ti = T(i) + m += i - ti + if (i > 0) i = ti + } + } + answer + } + } + + /** Make sure a target sequence has fast, correctly-ordered indexing for KMP. + * + * @param W The target sequence + * @param n0 The first element in the target sequence that we should use + * @param n1 The far end of the target sequence that we should use (exclusive) + * @return Target packed in an IndexedSeq (taken from iterator unless W already is an IndexedSeq) + */ + private def kmpOptimizeWord[B](W: scala.collection.Seq[B], n0: Int, n1: Int, forward: Boolean): IndexedSeqView[B] = W match { + case iso: IndexedSeq[B] => + // Already optimized for indexing--use original (or custom view of original) + if (forward && n0==0 && n1==W.length) iso.view + else if (forward) new AbstractIndexedSeqView[B] { + val length = n1 - n0 + def apply(x: Int) = iso(n0 + x) + } + else new AbstractIndexedSeqView[B] { + def length = n1 - n0 + def apply(x: Int) = iso(n1 - 1 - x) + } + case _ => + // W is probably bad at indexing. Pack in array (in correct orientation) + // Would be marginally faster to special-case each direction + new AbstractIndexedSeqView[B] { + private[this] val Warr = new Array[AnyRef](n1-n0) + private[this] val delta = if (forward) 1 else -1 + private[this] val done = if (forward) n1-n0 else -1 + val wit = W.iterator.drop(n0) + var i = if (forward) 0 else (n1-n0-1) + while (i != done) { + Warr(i) = wit.next().asInstanceOf[AnyRef] + i += delta + } + + val length = n1 - n0 + def apply(x: Int) = Warr(x).asInstanceOf[B] + } + } - def newBuilder[A]: Builder[A, Seq[A]] = immutable.Seq.newBuilder[A] + /** Make a jump table for KMP search. + * + * @param Wopt The target sequence + * @param wlen Just in case we're only IndexedSeq and not IndexedSeqOptimized + * @return KMP jump table for target sequence + */ + private def kmpJumpTable[B](Wopt: IndexedSeqView[B], wlen: Int) = { + val arr = new Array[Int](wlen) + var pos = 2 + var cnd = 0 + arr(0) = -1 + arr(1) = 0 + while (pos < wlen) { + if (Wopt(pos-1) == Wopt(cnd)) { + arr(pos) = cnd + 1 + pos += 1 + cnd += 1 + } + else if (cnd > 0) { + cnd = arr(cnd) + } + else { + arr(pos) = 0 + pos += 1 + } + } + arr + } } /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala deleted file mode 100644 index f77a6f16dc29..000000000000 --- a/src/library/scala/collection/SeqExtractors.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** An extractor used to head/tail deconstruct sequences. */ -object +: { - def unapply[T,Coll <: SeqLike[T, Coll]]( - t: Coll with SeqLike[T, Coll]): Option[(T, Coll)] = - if(t.isEmpty) None - else Some(t.head -> t.tail) -} - -/** An extractor used to init/last deconstruct sequences. */ -object :+ { - /** Splits a sequence into init :+ last. - * @return Some((init, last)) if sequence is non-empty. None otherwise. - */ - def unapply[T,Coll <: SeqLike[T, Coll]]( - t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] = - if(t.isEmpty) None - else Some(t.init -> t.last) -} - -// Dummy to fool ant -private abstract class SeqExtractors diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala deleted file mode 100644 index 615c73699611..000000000000 --- a/src/library/scala/collection/SeqLike.scala +++ /dev/null @@ -1,937 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import immutable.{ List, Range } -import generic._ -import parallel.ParSeq -import scala.math.Ordering - -/** A template trait for sequences of type `Seq[A]` - * $seqInfo - * - * @define seqInfo - * Sequences are special cases of iterable collections of class `Iterable`. - * Unlike iterables, sequences always have a defined order of elements. - * Sequences provide a method `apply` for indexing. Indices range from `0` up to the `length` of - * a sequence. Sequences support a number of methods to find occurrences of elements or subsequences, including - * `segmentLength`, `prefixLength`, `indexWhere`, `indexOf`, `lastIndexWhere`, `lastIndexOf`, - * `startsWith`, `endsWith`, `indexOfSlice`. - * - * Another way to see a sequence is as a `PartialFunction` from `Int` values - * to the element type of the sequence. The `isDefinedAt` method of a sequence - * returns `true` for the interval from `0` until `length`. - * - * Sequences can be accessed in reverse order of their elements, using methods - * `reverse` and `reverseIterator`. - * - * Sequences have two principal subtraits, `IndexedSeq` and `LinearSeq`, which give different guarantees for performance. - * An `IndexedSeq` provides fast random-access of elements and a fast `length` operation. - * A `LinearSeq` provides fast access only to the first element via `head`, but also - * has a fast `tail` operation. - * - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @author Martin Odersky - * @author Matthias Zenger - * @since 2.8 - * - * @define Coll `Seq` - * @define coll sequence - * @define thatinfo the class of the returned collection. Where possible, `That` is - * the same class as the current collection class `Repr`, but this - * depends on the element type `B` being admissible for that class, - * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]` - * is found. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. - * @define orderDependent - * @define orderDependentFold - */ -trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self => - - override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]] - override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]] - - def length: Int - - def apply(idx: Int): A - - protected[this] override def parCombiner = ParSeq.newCombiner[A] - - /** Compares the length of this $coll to a test value. - * - * @param len the test value that gets compared with the length. - * @return A value `x` where - * {{{ - * x < 0 if this.length < len - * x == 0 if this.length == len - * x > 0 if this.length > len - * }}} - * The method as implemented here does not call `length` directly; its running time - * is `O(length min len)` instead of `O(length)`. The method should be overwritten - * if computing `length` is cheap. - */ - def lengthCompare(len: Int): Int = { - if (len < 0) 1 - else { - var i = 0 - val it = iterator - while (it.hasNext) { - if (i == len) return if (it.hasNext) 1 else 0 - it.next() - i += 1 - } - i - len - } - } - - override /*IterableLike*/ def isEmpty: Boolean = lengthCompare(0) == 0 - - /** The size of this $coll, equivalent to `length`. - * - * $willNotTerminateInf - */ - override def size = length - - def segmentLength(p: A => Boolean, from: Int): Int = { - var i = 0 - val it = iterator.drop(from) - while (it.hasNext && p(it.next())) - i += 1 - i - } - - def indexWhere(p: A => Boolean, from: Int): Int = { - var i = math.max(from, 0) - val it = iterator.drop(from) - while (it.hasNext) { - if (p(it.next())) return i - else i += 1 - } - -1 - } - - def lastIndexWhere(p: A => Boolean, end: Int): Int = { - var i = length - 1 - val it = reverseIterator - while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 - i - } - - /** Iterates over distinct permutations. - * - * @return An Iterator which traverses the distinct permutations of this $coll. - * @example `"abb".permutations = Iterator(abb, bab, bba)` - */ - def permutations: Iterator[Repr] = - if (isEmpty) Iterator(repr) - else new PermutationsItr - - /** Iterates over combinations. A _combination_ of length `n` is a subsequence of - * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"` - * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is - * more than one way to generate the same subsequence, only one will be returned. - * - * For example, `"xyyy"` has three different ways to generate `"xy"` depending on - * whether the first, second, or third `"y"` is selected. However, since all are - * identical, only one will be chosen. Which of the three will be taken is an - * implementation detail that is not defined. - * - * @return An Iterator which traverses the possible n-element combinations of this $coll. - * @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)` - */ - def combinations(n: Int): Iterator[Repr] = - if (n < 0 || n > size) Iterator.empty - else new CombinationsItr(n) - - private class PermutationsItr extends AbstractIterator[Repr] { - private[this] val (elms, idxs) = init() - private var _hasNext = true - - def hasNext = _hasNext - def next(): Repr = { - if (!hasNext) - Iterator.empty.next() - - val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms - val result = (self.newBuilder ++= forcedElms).result() - var i = idxs.length - 2 - while(i >= 0 && idxs(i) >= idxs(i+1)) - i -= 1 - - if (i < 0) - _hasNext = false - else { - var j = idxs.length - 1 - while(idxs(j) <= idxs(i)) j -= 1 - swap(i,j) - - val len = (idxs.length - i) / 2 - var k = 1 - while (k <= len) { - swap(i+k, idxs.length - k) - k += 1 - } - } - result - } - private def swap(i: Int, j: Int) { - val tmpI = idxs(i) - idxs(i) = idxs(j) - idxs(j) = tmpI - val tmpE = elms(i) - elms(i) = elms(j) - elms(j) = tmpE - } - - private[this] def init() = { - val m = mutable.HashMap[A, Int]() - val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip - - (es.toBuffer, is.toArray) - } - } - - private class CombinationsItr(n: Int) extends AbstractIterator[Repr] { - // generating all nums such that: - // (1) nums(0) + .. + nums(length-1) = n - // (2) 0 <= nums(i) <= cnts(i), where 0 <= i <= cnts.length-1 - private val (elms, cnts, nums) = init() - private val offs = cnts.scanLeft(0)(_ + _) - private var _hasNext = true - - def hasNext = _hasNext - def next(): Repr = { - if (!hasNext) - Iterator.empty.next() - - /* Calculate this result. */ - val buf = self.newBuilder - for(k <- 0 until nums.length; j <- 0 until nums(k)) - buf += elms(offs(k)+j) - val res = buf.result() - - /* Prepare for the next call to next. */ - var idx = nums.length - 1 - while (idx >= 0 && nums(idx) == cnts(idx)) - idx -= 1 - - idx = nums.lastIndexWhere(_ > 0, idx - 1) - - if (idx < 0) - _hasNext = false - else { - // OPT: hand rolled version of `sum = nums.view(idx + 1, nums.length).sum + 1` - var sum = 1 - var i = idx + 1 - while (i < nums.length) { - sum += nums(i) - i += 1 - } - nums(idx) -= 1 - for (k <- (idx+1) until nums.length) { - nums(k) = sum min cnts(k) - sum -= nums(k) - } - } - - res - } - - /** Rearrange seq to newSeq a0a0..a0a1..a1...ak..ak such that - * seq.count(_ == aj) == cnts(j) - * - * @return (newSeq,cnts,nums) - */ - private def init(): (IndexedSeq[A], Array[Int], Array[Int]) = { - val m = mutable.HashMap[A, Int]() - - // e => (e, weight(e)) - val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip - val cs = new Array[Int](m.size) - is foreach (i => cs(i) += 1) - val ns = new Array[Int](cs.length) - - var r = n - 0 until ns.length foreach { k => - ns(k) = r min cs(k) - r -= ns(k) - } - (es.toIndexedSeq, cs, ns) - } - } - - def reverse: Repr = { - var xs: List[A] = List() - for (x <- this) - xs = x :: xs - val b = newBuilder - b.sizeHint(this) - for (x <- xs) - b += x - b.result() - } - - def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - var xs: List[A] = List() - for (x <- this) - xs = x :: xs - val b = bf(repr) - for (x <- xs) - b += f(x) - - b.result() - } - - /** An iterator yielding elements in reversed order. - * - * $willNotTerminateInf - * - * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but might be more efficient. - * - * @return an iterator yielding the elements of this $coll in reversed order - */ - def reverseIterator: Iterator[A] = toCollection(reverse).iterator - - def startsWith[B](that: GenSeq[B], offset: Int): Boolean = { - val i = this.iterator drop offset - val j = that.iterator - while (j.hasNext && i.hasNext) - if (i.next != j.next) - return false - - !j.hasNext - } - - def endsWith[B](that: GenSeq[B]): Boolean = { - val i = this.iterator.drop(length - that.length) - val j = that.iterator - while (i.hasNext && j.hasNext) - if (i.next != j.next) - return false - - !j.hasNext - } - - /** Finds first index where this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @return the first index such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0) - - /** Finds first index after or at a start index where this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @param from the start index - * @return the first index `>= from` such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = - if (this.hasDefiniteSize && that.hasDefiniteSize) { - val l = length - val tl = that.length - val clippedFrom = math.max(0, from) - if (from > l) -1 - else if (tl < 1) clippedFrom - else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true) - } - else { - var i = from - var s: Seq[A] = thisCollection drop i - while (!s.isEmpty) { - if (s startsWith that) - return i - - i += 1 - s = s.tail - } - -1 - } - - /** Finds last index where this $coll contains a given sequence as a slice. - * $willNotTerminateInf - * @param that the sequence to test - * @return the last index such that the elements of this $coll starting a this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length) - - /** Finds last index before or at a given end index where this $coll contains a given sequence as a slice. - * @param that the sequence to test - * @param end the end index - * @return the last index `<= end` such that the elements of this $coll starting at this index - * match the elements of sequence `that`, or `-1` of no such subsequence exists. - */ - def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = { - val l = length - val tl = that.length - val clippedL = math.min(l-tl, end) - - if (end < 0) -1 - else if (tl < 1) clippedL - else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false) - } - - /** Tests whether this $coll contains a given sequence as a slice. - * $mayNotTerminateInf - * @param that the sequence to test - * @return `true` if this $coll contains a slice with the same elements - * as `that`, otherwise `false`. - */ - def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1 - - /** Tests whether this $coll contains a given value as an element. - * $mayNotTerminateInf - * - * @param elem the element to test. - * @return `true` if this $coll has an element that is equal (as - * determined by `==`) to `elem`, `false` otherwise. - */ - def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem) - - /** Produces a new sequence which contains all elements of this $coll and also all elements of - * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. - * - * @param that the sequence to add. - * @tparam B the element type of the returned $coll. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements of this $coll - * followed by all elements of `that`. - * @usecase def union(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * Another way to express this - * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. - * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = - this ++ that - - /** Computes the multiset difference between this $coll and another sequence. - * - * @param that the sequence of elements to remove - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - * @usecase def diff(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * $willNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: GenSeq[B]): Repr = { - val occ = occCounts(that.seq) - val b = newBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) b += x - else occ(x) = ox - 1 - } - b.result() - } - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam B the element type of the returned $coll. - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * @usecase def intersect(that: Seq[A]): $Coll[A] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: GenSeq[B]): Repr = { - val occ = occCounts(that.seq) - val b = newBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - b += x - occ(x) = ox - 1 - } - } - b.result() - } - - private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 } - for (y <- sq) occ(y) += 1 - occ - } - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr = { - val isImmutable = this.isInstanceOf[immutable.Seq[_]] - if (isImmutable && lengthCompare(1) <= 0) repr - else { - val b = newBuilder - val seen = new mutable.HashSet[A]() - var it = this.iterator - var different = false - while (it.hasNext) { - val next = it.next - if (seen.add(next)) b += next else different = true - } - if (different || !isImmutable) b.result() else repr - } - } - - def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - var i = 0 - val it = this.iterator - while (i < from && it.hasNext) { - b += it.next() - i += 1 - } - b ++= patch.seq - i = replaced - while (i > 0 && it.hasNext) { - it.next() - i -= 1 - } - while (it.hasNext) b += it.next() - b.result() - } - - def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - if (index < 0) throw new IndexOutOfBoundsException(index.toString) - val b = bf(repr) - var i = 0 - val it = this.iterator - while (i < index && it.hasNext) { - b += it.next() - i += 1 - } - if (!it.hasNext) throw new IndexOutOfBoundsException(index.toString) - b += elem - it.next() - while (it.hasNext) b += it.next() - b.result() - } - - def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b += elem - b ++= thisCollection - b.result() - } - - def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b ++= thisCollection - b += elem - b.result() - } - - def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - val L = length - b.sizeHint(math.max(L, len)) - var diff = len - L - b ++= thisCollection - while (diff > 0) { - b += elem - diff -= 1 - } - b.result() - } - - def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { - val i = this.iterator - val j = that.iterator - while (i.hasNext && j.hasNext) - if (!p(i.next(), j.next())) - return false - - !i.hasNext && !j.hasNext - } - - /** Sorts this $coll according to a comparison function. - * $willNotTerminateInf - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @param lt the comparison function which tests whether - * its first argument precedes its second argument in - * the desired ordering. - * @return a $coll consisting of the elements of this $coll - * sorted according to the comparison function `lt`. - * @example {{{ - * List("Steve", "Tom", "John", "Bob").sortWith(_.compareTo(_) < 0) = - * List("Bob", "John", "Steve", "Tom") - * }}} - */ - def sortWith(lt: (A, A) => Boolean): Repr = sorted(Ordering fromLessThan lt) - - /** Sorts this $Coll according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * @see [[scala.math.Ordering]] - * $willNotTerminateInf - * @param f the transformation function mapping elements - * to some other domain `B`. - * @param ord the ordering assumed on domain `B`. - * @tparam B the target type of the transformation `f`, and the type where - * the ordering `ord` is defined. - * @return a $coll consisting of the elements of this $coll - * sorted according to the ordering where `x < y` if - * `ord.lt(f(x), f(y))`. - * - * @example {{{ - * val words = "The quick brown fox jumped over the lazy dog".split(' ') - * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]] - * words.sortBy(x => (x.length, x.head)) - * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped) - * }}} - */ - def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sorted(ord on f) - - /** Sorts this $coll according to an Ordering. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @see [[scala.math.Ordering]] - * - * @param ord the ordering to be used to compare elements. - * @return a $coll consisting of the elements of this $coll - * sorted according to the ordering `ord`. - */ - def sorted[B >: A](implicit ord: Ordering[B]): Repr = { - val len = this.length - val b = newBuilder - if (len == 1) b ++= this - else if (len > 1) { - b.sizeHint(len) - val arr = new Array[AnyRef](len) // Previously used ArraySeq for more compact but slower code - var i = 0 - for (x <- this) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) - i = 0 - while (i < arr.length) { - b += arr(i).asInstanceOf[A] - i += 1 - } - } - b.result() - } - - /** Converts this $coll to a sequence. - * $willNotTerminateInf - * - * A new collection will not be built; in particular, lazy sequences will stay lazy. - */ - override def toSeq: Seq[A] = thisCollection - - /** Produces the range of all indices of this sequence. - * - * @return a `Range` value from `0` to one less than the length of this $coll. - */ - def indices: Range = 0 until length - - override def view = new SeqView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - override def length = self.length - override def apply(idx: Int) = self.apply(idx) - } - - override def view(from: Int, until: Int) = view.slice(from, until) - - /* Need to override string, so that it's not the Function1's string that gets mixed in. - */ - override def toString = super[IterableLike].toString -} - -/** The companion object for trait `SeqLike`. - */ -object SeqLike { - // KMP search utilities - - /** Make sure a target sequence has fast, correctly-ordered indexing for KMP. - * - * @author Rex Kerr - * @since 2.10 - * @param W The target sequence - * @param n0 The first element in the target sequence that we should use - * @param n1 The far end of the target sequence that we should use (exclusive) - * @return Target packed in an IndexedSeq (taken from iterator unless W already is an IndexedSeq) - */ - private def kmpOptimizeWord[B](W: Seq[B], n0: Int, n1: Int, forward: Boolean) = W match { - case iso: IndexedSeq[_] => - // Already optimized for indexing--use original (or custom view of original) - if (forward && n0==0 && n1==W.length) iso.asInstanceOf[IndexedSeq[B]] - else if (forward) new AbstractSeq[B] with IndexedSeq[B] { - val length = n1 - n0 - def apply(x: Int) = iso(n0 + x).asInstanceOf[B] - } - else new AbstractSeq[B] with IndexedSeq[B] { - def length = n1 - n0 - def apply(x: Int) = iso(n1 - 1 - x).asInstanceOf[B] - } - case _ => - // W is probably bad at indexing. Pack in array (in correct orientation) - // Would be marginally faster to special-case each direction - new AbstractSeq[B] with IndexedSeq[B] { - private[this] val Warr = new Array[AnyRef](n1-n0) - private[this] val delta = if (forward) 1 else -1 - private[this] val done = if (forward) n1-n0 else -1 - val wit = W.iterator.drop(n0) - var i = if (forward) 0 else (n1-n0-1) - while (i != done) { - Warr(i) = wit.next().asInstanceOf[AnyRef] - i += delta - } - - val length = n1 - n0 - def apply(x: Int) = Warr(x).asInstanceOf[B] - } - } - - /** Make a jump table for KMP search. - * - * @author paulp, Rex Kerr - * @since 2.10 - * @param Wopt The target sequence, as at least an IndexedSeq - * @param wlen Just in case we're only IndexedSeq and not IndexedSeqOptimized - * @return KMP jump table for target sequence - */ - private def kmpJumpTable[B](Wopt: IndexedSeq[B], wlen: Int) = { - val arr = new Array[Int](wlen) - var pos = 2 - var cnd = 0 - arr(0) = -1 - arr(1) = 0 - while (pos < wlen) { - if (Wopt(pos-1) == Wopt(cnd)) { - arr(pos) = cnd + 1 - pos += 1 - cnd += 1 - } - else if (cnd > 0) { - cnd = arr(cnd) - } - else { - arr(pos) = 0 - pos += 1 - } - } - arr - } - - /** A KMP implementation, based on the undoubtedly reliable wikipedia entry. - * Note: I made this private to keep it from entering the API. That can be reviewed. - * - * @author paulp, Rex Kerr - * @since 2.10 - * @param S Sequence that may contain target - * @param m0 First index of S to consider - * @param m1 Last index of S to consider (exclusive) - * @param W Target sequence - * @param n0 First index of W to match - * @param n1 Last index of W to match (exclusive) - * @param forward Direction of search (from beginning==true, from end==false) - * @return Index of start of sequence if found, -1 if not (relative to beginning of S, not m0). - */ - private def kmpSearch[B](S: Seq[B], m0: Int, m1: Int, W: Seq[B], n0: Int, n1: Int, forward: Boolean): Int = { - // Check for redundant case when target has single valid element - def clipR(x: Int, y: Int) = if (x < y) x else -1 - def clipL(x: Int, y: Int) = if (x > y) x else -1 - - if (n1 == n0+1) { - if (forward) - clipR(S.indexOf(W(n0), m0), m1) - else - clipL(S.lastIndexOf(W(n0), m1-1), m0-1) - } - - // Check for redundant case when both sequences are same size - else if (m1-m0 == n1-n0) { - // Accepting a little slowness for the uncommon case. - if (S.view.slice(m0, m1) == W.view.slice(n0, n1)) m0 - else -1 - } - // Now we know we actually need KMP search, so do it - else S match { - case xs: IndexedSeq[_] => - // We can index into S directly; it should be adequately fast - val Wopt = kmpOptimizeWord(W, n0, n1, forward) - val T = kmpJumpTable(Wopt, n1-n0) - var i, m = 0 - val zero = if (forward) m0 else m1-1 - val delta = if (forward) 1 else -1 - while (i+m < m1-m0) { - if (Wopt(i) == S(zero+delta*(i+m))) { - i += 1 - if (i == n1-n0) return (if (forward) m+m0 else m1-m-i) - } - else { - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - -1 - case _ => - // We had better not index into S directly! - val iter = S.iterator.drop(m0) - val Wopt = kmpOptimizeWord(W, n0, n1, forward = true) - val T = kmpJumpTable(Wopt, n1-n0) - val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind - var largest = 0 - var i, m = 0 - var answer = -1 - while (m+m0+n1-n0 <= m1) { - while (i+m >= largest) { - cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] - largest += 1 - } - if (Wopt(i) == cache((i+m)%(n1-n0))) { - i += 1 - if (i == n1-n0) { - if (forward) return m+m0 - else { - i -= 1 - answer = m+m0 - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - } - else { - val ti = T(i) - m += i - ti - if (i > 0) i = ti - } - } - answer - } - } - - /** Finds a particular index at which one sequence occurs in another sequence. - * Both the source sequence and the target sequence are expressed in terms - * other sequences S' and T' with offset and length parameters. This - * function is designed to wrap the KMP machinery in a sufficiently general - * way that all library sequence searches can use it. It is unlikely you - * have cause to call it directly: prefer functions such as StringBuilder#indexOf - * and Seq#lastIndexOf. - * - * @param source the sequence to search in - * @param sourceOffset the starting offset in source - * @param sourceCount the length beyond sourceOffset to search - * @param target the sequence being searched for - * @param targetOffset the starting offset in target - * @param targetCount the length beyond targetOffset which makes up the target string - * @param fromIndex the smallest index at which the target sequence may start - * - * @return the applicable index in source where target exists, or -1 if not found - */ - def indexOf[B]( - source: Seq[B], sourceOffset: Int, sourceCount: Int, - target: Seq[B], targetOffset: Int, targetCount: Int, - fromIndex: Int - ): Int = { - // Fiddle with variables to match previous behavior and use kmpSearch - // Doing LOTS of max/min, both clearer and faster to use math._ - val slen = source.length - val clippedFrom = math.max(0, fromIndex) - val s0 = math.min(slen, sourceOffset + clippedFrom) - val s1 = math.min(slen, s0 + sourceCount) - val tlen = target.length - val t0 = math.min(tlen, targetOffset) - val t1 = math.min(tlen, t0 + targetCount) - - // Error checking - if (clippedFrom > slen-sourceOffset) -1 // Cannot return an index in range - else if (t1 - t0 < 1) s0 // Empty, matches first available position - else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target - else { - // Nontrivial search - val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true) - if (ans < 0) ans else ans - math.min(slen, sourceOffset) - } - } - - /** Finds a particular index at which one sequence occurs in another sequence. - * Like `indexOf`, but finds the latest occurrence rather than earliest. - * - * @see [[scala.collection.SeqLike]], method `indexOf` - */ - def lastIndexOf[B]( - source: Seq[B], sourceOffset: Int, sourceCount: Int, - target: Seq[B], targetOffset: Int, targetCount: Int, - fromIndex: Int - ): Int = { - // Fiddle with variables to match previous behavior and use kmpSearch - // Doing LOTS of max/min, both clearer and faster to use math._ - val slen = source.length - val tlen = target.length - val s0 = math.min(slen, sourceOffset) - val s1 = math.min(slen, s0 + sourceCount) - val clippedFrom = math.min(s1 - s0, fromIndex) - val t0 = math.min(tlen, targetOffset) - val t1 = math.min(tlen, t0 + targetCount) - val fixed_s1 = math.min(s1, s0 + clippedFrom + (t1 - t0) - 1) - - // Error checking - if (clippedFrom < 0) -1 // Cannot return an index in range - else if (t1 - t0 < 1) s0+clippedFrom // Empty, matches last available position - else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target - else { - // Nontrivial search - val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false) - if (ans < 0) ans else ans - s0 - } - } -} diff --git a/src/library/scala/collection/SeqMap.scala b/src/library/scala/collection/SeqMap.scala new file mode 100644 index 000000000000..f2b65dfbfb6f --- /dev/null +++ b/src/library/scala/collection/SeqMap.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn + +/** + * A generic trait for ordered maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] extends Map[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqMap" + + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) + diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala deleted file mode 100644 index 3ac78881d582..000000000000 --- a/src/library/scala/collection/SeqProxy.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** This trait implements a proxy for sequence objects. It forwards - * all calls to a different sequence object. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SeqProxy[+A] extends Seq[A] with SeqProxyLike[A, Seq[A]] diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala deleted file mode 100644 index aed6ed15fd85..000000000000 --- a/src/library/scala/collection/SeqProxyLike.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ - -// Methods could be printed by cat SeqLike.scala | egrep '^ (override )?def' - - -/** This trait implements a proxy for sequences. It forwards - * all calls to a different sequence. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] { - override def size = self.size - override def toSeq: Seq[A] = self.toSeq - override def length: Int = self.length - override def apply(idx: Int): A = self.apply(idx) - override def lengthCompare(len: Int): Int = self.lengthCompare(len) - override def isDefinedAt(x: Int): Boolean = self.isDefinedAt(x) - override def segmentLength(p: A => Boolean, from: Int): Int = self.segmentLength(p, from) - override def prefixLength(p: A => Boolean) = self.prefixLength(p) - override def indexWhere(p: A => Boolean): Int = self.indexWhere(p) - override def indexWhere(p: A => Boolean, from: Int): Int = self.indexWhere(p, from) - override def indexOf[B >: A](elem: B): Int = self.indexOf(elem) - override def indexOf[B >: A](elem: B, from: Int): Int = self.indexOf(elem, from) - override def lastIndexOf[B >: A](elem: B): Int = self.lastIndexOf(elem) - override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem == _, end) - override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1) - override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p) - override def reverse: Repr = self.reverse - override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf) - override def reverseIterator: Iterator[A] = self.reverseIterator - override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = self.startsWith(that, offset) - override def startsWith[B](that: GenSeq[B]): Boolean = self.startsWith(that) - override def endsWith[B](that: GenSeq[B]): Boolean = self.endsWith(that) - override def indexOfSlice[B >: A](that: GenSeq[B]): Int = self.indexOfSlice(that) - override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = self.indexOfSlice(that) - override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that) - override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end) - override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1 - override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem) - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf) - override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that) - override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that) - override def distinct: Repr = self.distinct - override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf) - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf) - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf) - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf) - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf) - override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p) - override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt) - override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord) - override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord) - override def indices: Range = self.indices - override def view = self.view - override def view(from: Int, until: Int) = self.view(from, until) -} - - diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala index ccf9c8cf7bda..a45797892220 100644 --- a/src/library/scala/collection/SeqView.scala +++ b/src/library/scala/collection/SeqView.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,23 +13,202 @@ package scala package collection -import generic._ -import TraversableView.NoBuilder +import scala.annotation.nowarn +import scala.collection.generic.CommonErrors -/** A base trait for non-strict views of sequences. - * $seqViewInfo - */ -trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] -/** An object containing the necessary implicit definitions to make - * `SeqView`s work. Its definitions are generally not accessed directly by clients. - */ +trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { + override def view: SeqView[A] = this + + override def map[B](f: A => B): SeqView[B] = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B] = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B] = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A] = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A] = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A] = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A] = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A] = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A] = new SeqView.Map(this, { (a: A) => f(a); a }) + + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(prefix, this) + + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A] = new SeqView.Sorted(this, ord) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqView" +} + object SeqView { - type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]} - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] = - new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder + + /** A `SeqOps` whose collection type and collection type constructor are unknown */ + private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] + + /** A view that doesn’t apply any transformation to an underlying sequence */ + @SerialVersionUID(3L) + class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(idx: Int): A = underlying.apply(idx) + def length: Int = underlying.length + def iterator: Iterator[A] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeSeqOps[A], f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + def apply(idx: Int): B = f(underlying(idx)) + def length: Int = underlying.length + } + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeSeqOps[A], elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]) extends View.Prepended(elem, underlying) with SeqView[A] { + def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeSeqOps[A], suffix: SomeSeqOps[A]) extends View.Concat[A](prefix, suffix) with SeqView[A] { + def apply(idx: Int): A = { + val l = prefix.length + if (idx < l) prefix(idx) else suffix(idx - l) + } + def length: Int = prefix.length + suffix.length + } + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(i: Int) = underlying.apply(size - 1 - i) + def length = underlying.size + def iterator: Iterator[A] = underlying.reverseIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Take[+A](underlying: SomeSeqOps[A], n: Int) extends View.Take(underlying, n) with SeqView[A] { + def apply(idx: Int): A = if (idx < n) { + underlying(idx) + } else { + throw ( + if (underlying.knownSize >= 0) CommonErrors.indexOutOfBounds(index = idx, max = knownSize - 1) + else CommonErrors.indexOutOfBounds(index = idx) + ) } + def length: Int = underlying.length min normN + } + + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeSeqOps[A], n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + private[this] val delta = (underlying.size - (n max 0)) max 0 + def length = underlying.size - delta + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + delta) + } + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeSeqOps[A], n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + def length = (underlying.size - normN) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + normN) + override def drop(n: Int): SeqView[A] = new Drop(underlying, this.n + n) + } + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeSeqOps[A], n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + private[this] val len = (underlying.size - (n max 0)) max 0 + def length = len + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i) + } + + @SerialVersionUID(3L) + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + private[this] val len: Int, + ord: Ordering[B]) + extends SeqView[A] { + outer => + + // force evaluation immediately by calling `length` so infinite collections + // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls + def this(underlying: SomeSeqOps[A], ord: Ordering[B]) = this(underlying, underlying.length, ord) + + @SerialVersionUID(3L) + private[this] class ReverseSorted extends SeqView[A] { + private[this] lazy val _reversed = new SeqView.Reverse(_sorted) + + def apply(i: Int): A = _reversed.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _reversed.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) + override def reverse: SeqView[A] = outer + override protected def reversed: Iterable[A] = outer + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == Sorted.this.ord) outer + else if (ord1.isReverseOf(Sorted.this.ord)) this + else new Sorted(elems, len, ord1) + } + + @volatile private[this] var evaluated = false + + private[this] lazy val _sorted: Seq[A] = { + val res = { + val len = this.len + if (len == 0) Nil + else if (len == 1) List(underlying.head) + else { + val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] + @annotation.unused val copied = underlying.copyToArray(arr) + //assert(copied == len) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it + // is safe because: + // - the ArraySeq is immutable, and items that are not of type A + // cannot be added to it + // - we know it only contains items of type A (and if this collection + // contains items of another type, we'd get a CCE anyway) + // - the cast doesn't actually do anything in the runtime because the + // type of A is not known and Array[_] is Array[AnyRef] + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + } + } + evaluated = true + underlying = null + res + } + + private[this] def elems: SomeSeqOps[A] = { + val orig = underlying + if (evaluated) _sorted else orig + } + + def apply(i: Int): A = _sorted.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _sorted.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) + override def reverse: SeqView[A] = new ReverseSorted + // we know `_sorted` is either tiny or has efficient random access, + // so this is acceptable for `reversed` + override protected def reversed: Iterable[A] = new ReverseSorted + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == this.ord) this + else if (ord1.isReverseOf(this.ord)) reverse + else new Sorted(elems, len, ord1) + } } +/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala deleted file mode 100644 index 8b3e5a955c99..000000000000 --- a/src/library/scala/collection/SeqViewLike.scala +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import Seq.fill - -/** A template trait for non-strict views of sequences. - * $seqViewInfo - * - * @define seqViewInfo - * $viewInfo - * All views for sequences are defined by re-interpreting the `length` and - * `apply` methods. - * - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait SeqViewLike[+A, - +Coll, - +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]] - extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] -{ self => - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B] - - trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] { - def length: Int - def apply(idx: Int): B - override def toString = viewToString - } - - trait EmptyView extends Transformed[Nothing] with super.EmptyView { - final override def length = 0 - final override def apply(n: Int) = Nil(n) - } - - trait Forced[B] extends super.Forced[B] with Transformed[B] { - def length = forced.length - def apply(idx: Int) = forced.apply(idx) - } - - trait Sliced extends super.Sliced with Transformed[A] { - def length = iterator.size - def apply(idx: Int): A = - if (idx >= 0 && idx + from < until) self.apply(idx + from) - else throw new IndexOutOfBoundsException(idx.toString) - - override def foreach[U](f: A => U) = iterator foreach f - override def iterator: Iterator[A] = self.iterator drop from take endpoints.width - } - - trait Mapped[B] extends super.Mapped[B] with Transformed[B] { - def length = self.length - def apply(idx: Int): B = mapping(self(idx)) - } - - trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] { - protected[this] lazy val index = { - val index = new Array[Int](self.length + 1) - index(0) = 0 - for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad - index(i + 1) = index(i) + mapping(self(i)).seq.size - index - } - protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = { - val mid = (lo + hi) / 2 - if (idx < index(mid)) findRow(idx, lo, mid - 1) - else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi) - else mid - } - def length = index(self.length) - def apply(idx: Int) = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString) - val row = findRow(idx, 0, self.length - 1) - mapping(self(row)).seq.toSeq(idx - index(row)) - } - } - - trait Appended[B >: A] extends super.Appended[B] with Transformed[B] { - protected[this] lazy val restSeq = rest.toSeq - def length = self.length + restSeq.length - def apply(idx: Int) = - if (idx < self.length) self(idx) else restSeq(idx - self.length) - } - - trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] { - protected[this] lazy val fstSeq = fst.toSeq - def length: Int = fstSeq.length + self.length - def apply(idx: Int): B = - if (idx < fstSeq.length) fstSeq(idx) - else self.apply(idx - fstSeq.length) - } - - trait Filtered extends super.Filtered with Transformed[A] { - protected[this] lazy val index = { - var len = 0 - val arr = new Array[Int](self.length) - for (i <- 0 until self.length) - if (pred(self(i))) { - arr(len) = i - len += 1 - } - arr take len - } - def length = index.length - def apply(idx: Int) = self(index(idx)) - } - - trait TakenWhile extends super.TakenWhile with Transformed[A] { - protected[this] lazy val len = self prefixLength pred - def length = len - def apply(idx: Int) = - if (idx < len) self(idx) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait DroppedWhile extends super.DroppedWhile with Transformed[A] { - protected[this] lazy val start = self prefixLength pred - def length = self.length - start - def apply(idx: Int) = - if (idx >= 0) self(idx + start) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] { - protected[this] lazy val thatSeq = other.seq.toSeq - /* Have to be careful here - other may be an infinite sequence. */ - def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length - def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx)) - } - - trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] { - protected[this] lazy val thatSeq = other.seq.toSeq - def length: Int = self.length max thatSeq.length - def apply(idx: Int) = - (if (idx < self.length) self.apply(idx) else thisElem, - if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem) - } - - trait Reversed extends Transformed[A] { - override def iterator: Iterator[A] = createReversedIterator - def length: Int = self.length - def apply(idx: Int): A = self.apply(length - 1 - idx) - final override protected[this] def viewIdentifier = "R" - - private def createReversedIterator = { - var lst = List[A]() - for (elem <- self) lst ::= elem - lst.iterator - } - } - - // Note--for this to work, must ensure 0 <= from and 0 <= replaced - // Must also take care to allow patching inside an infinite stream - // (patching in an infinite stream is not okay) - trait Patched[B >: A] extends Transformed[B] { - protected[this] val from: Int - protected[this] val patch: GenSeq[B] - protected[this] val replaced: Int - private lazy val plen = patch.length - override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced) - def length: Int = { - val len = self.length - val pre = math.min(from, len) - val post = math.max(0, len - pre - replaced) - pre + plen + post - } - def apply(idx: Int): B = { - val actualFrom = if (self.lengthCompare(from) < 0) self.length else from - if (idx < actualFrom) self.apply(idx) - else if (idx < actualFrom + plen) patch.apply(idx - actualFrom) - else self.apply(idx - plen + replaced) - } - final override protected[this] def viewIdentifier = "P" - } - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] - protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] - protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B] - protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] - protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] - protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered - protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced - protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile - protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile - protected override def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B] - protected override def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new { - val other = that - val thisElem = _thisElem - val thatElem = _thatElem - } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B] - protected def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed - protected def newPatched[B >: A](_from: Int, _patch: GenSeq[B], _replaced: Int): Transformed[B] = new { - val from = _from - val patch = _patch - val replaced = _replaced - } with AbstractTransformed[B] with Patched[B] - - // see comment in IterableViewLike. - protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n)) - protected override def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue)) - - override def reverse: This = newReversed.asInstanceOf[This] - - override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = { - // Be careful to not evaluate the entire sequence! Patch should work (slowly, perhaps) on infinite streams. - val nonNegFrom = math.max(0,from) - val nonNegRep = math.max(0,replaced) - newPatched(nonNegFrom, patch, nonNegRep).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That] -// else super.patch[B, That](from, patch, replaced)(bf) - } - - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - patch(length, fill(len - length)(elem), 0) - - override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = - reverse map f - - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = { - require(0 <= index && index < length) // !!! can't call length like this. - patch(index, List(elem), 1)(bf) - } - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(elem :: Nil).asInstanceOf[That] - - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = - ++(Iterator.single(elem))(bf) - - override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq union that).asInstanceOf[That] - - override def diff[B >: A](that: GenSeq[B]): This = - newForced(thisSeq diff that).asInstanceOf[This] - - override def intersect[B >: A](that: GenSeq[B]): This = - newForced(thisSeq intersect that).asInstanceOf[This] - - override def sorted[B >: A](implicit ord: Ordering[B]): This = - newForced(thisSeq sorted ord).asInstanceOf[This] - - override def sortWith(lt: (A, A) => Boolean): This = - newForced(thisSeq sortWith lt).asInstanceOf[This] - - override def sortBy[B](f: (A) => B)(implicit ord: Ordering[B]): This = - newForced(thisSeq sortBy f).asInstanceOf[This] - - override def combinations(n: Int): Iterator[This] = - (thisSeq combinations n).map(as => newForced(as).asInstanceOf[This]) - - override def permutations: Iterator[This] = - thisSeq.permutations.map(as => newForced(as).asInstanceOf[This]) - - override def distinct: This = - newForced(thisSeq.distinct).asInstanceOf[This] - - override def stringPrefix = "SeqView" -} diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index 813dc91205f8..bce5974ed5db 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,41 +13,255 @@ package scala package collection -import generic._ +import scala.util.hashing.MurmurHash3 +import java.lang.String -/** A base trait for all sets, mutable as well as immutable. - * - * $setNote - * '''Implementation note:''' If your additions and mutations return the same kind of set as the set - * you are defining, you should inherit from `SetLike` as well. - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - */ -trait Set[A] extends (A => Boolean) - with Iterable[A] - with GenSet[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] { - override def companion: GenericCompanion[Set] = Set - - override def seq: Set[A] = this +import scala.annotation.nowarn + +/** Base trait for set collections. + */ +trait Set[A] + extends Iterable[A] + with SetOps[A, Set, Set[A]] + with Equals + with IterableFactoryDefaults[A, Set] { + + def canEqual(that: Any) = true + + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ + override def equals(that: Any): Boolean = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.setHash(this) + + override def iterableFactory: IterableFactory[Set] = Set + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Set" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too } -/** $factoryInfo - * The current default implementation of a $Coll is one of `EmptySet`, `Set1`, `Set2`, `Set3`, `Set4` in - * class `immutable.Set` for sets of sizes up to 4, and a `immutable.HashSet` for sets of larger sizes. - * @define coll set - * @define Coll `Set` - */ -object Set extends SetFactory[Set] { - def newBuilder[A] = immutable.Set.newBuilder[A] - override def empty[A]: Set[A] = immutable.Set.empty[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Set[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] +/** Base trait for set operations + * + * @define coll set + * @define Coll `Set` + */ +trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends IterableOps[A, CC, C] + with (A => Boolean) { + + def contains(elem: A): Boolean + + /** Tests if some element is contained in this set. + * + * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. + * @param elem the element to test for membership. + * @return `true` if `elem` is contained in this set, `false` otherwise. + */ + @`inline` final def apply(elem: A): Boolean = this.contains(elem) + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: Set[A]): Boolean = this.forall(that) + + /** An iterator over all subsets of this set of the given size. + * If the requested size is impossible, an empty iterator is returned. + * + * @param len the size of the subsets. + * @return the iterator. + */ + def subsets(len: Int): Iterator[C] = { + if (len < 0 || len > size) Iterator.empty + else new SubsetsItr(this.to(IndexedSeq), len) + } + + /** An iterator over all subsets of this set. + * + * @return the iterator. + */ + def subsets(): Iterator[C] = new AbstractIterator[C] { + private[this] val elms = SetOps.this.to(IndexedSeq) + private[this] var len = 0 + private[this] var itr: Iterator[C] = Iterator.empty + + def hasNext = len <= elms.size || itr.hasNext + def next() = { + if (!itr.hasNext) { + if (len > elms.size) Iterator.empty.next() + else { + itr = new SubsetsItr(elms, len) + len += 1 + } + } + + itr.next() + } + } + + /** An Iterator including all subsets containing exactly len elements. + * If the elements in 'This' type is ordered, then the subsets will also be in the same order. + * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} + * + * $willForceEvaluation + * + */ + private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { + private[this] val idxs = Array.range(0, len+1) + private[this] var _hasNext = true + idxs(len) = elms.size + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) Iterator.empty.next() + + val buf = newSpecificBuilder + idxs.slice(0, len) foreach (idx => buf += elms(idx)) + val result = buf.result() + + var i = len - 1 + while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 + + if (i < 0) _hasNext = false + else { + idxs(i) += 1 + for (j <- (i+1) until len) + idxs(j) = idxs(j-1) + 1 + } + + result + } + } + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: Set[A]): C = this.filter(that) + + /** Alias for `intersect` */ + @`inline` final def & (that: Set[A]): C = intersect(that) + + /** Computes the difference of this set and another set. + * + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def diff(that: Set[A]): C + + /** Alias for `diff` */ + @`inline` final def &~ (that: Set[A]): C = this diff that + + @deprecated("Consider requiring an immutable Set", "2.13.0") + def -- (that: IterableOnce[A]): C = { + val toRemove = that.iterator.to(immutable.Set) + fromSpecific(view.filterNot(toRemove)) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") + def - (elem: A): C = diff(Set(elem)) + + @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") + def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) + + /** Creates a new $ccoll by adding all elements contained in another collection to this $coll, omitting duplicates. + * + * Example: + * {{{ + * scala> val a = Set(1, 2) concat Set(2, 3) + * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) + * }}} + * + * @param that the collection containing the elements to add. + * @return a new $coll with the given elements added, omitting duplicates. + */ + def concat(that: collection.IterableOnce[A]): C = this match { + case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => + // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) + var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] + val it = that.iterator + while (it.hasNext) result = result + it.next() + result.asInstanceOf[C] + case _ => fromSpecific(that match { + case that: collection.Iterable[A] => new View.Concat(this, that) + case _ => iterator.concat(that.iterator) + }) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + /** Alias for `concat` */ + @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) + + /** Computes the union between of set and another set. + * + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + @`inline` final def union(that: Set[A]): C = concat(that) + + /** Alias for `union` */ + @`inline` final def | (that: Set[A]): C = concat(that) } +/** + * $factoryInfo + * @define coll set + * @define Coll `Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](immutable.Set) + /** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala deleted file mode 100644 index 5005c9c5a7a3..000000000000 --- a/src/library/scala/collection/SetLike.scala +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.{Builder, SetBuilder} -import scala.annotation.migration -import parallel.ParSet -import scala.collection.immutable.TreeSet - -/** A template trait for sets. - * - * $setNote - * '''Implementation note:''' - * This trait provides most of the operations of a `Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. - * $setTags - * @since 2.8 - * - * @define setNote - * - * A set is a collection that contains no duplicate elements. - * - * To implement a concrete set, you need to provide implementations of the - * following methods: - * {{{ - * def contains(key: A): Boolean - * def iterator: Iterator[A] - * def +(elem: A): This - * def -(elem: A): This - * }}} - * If you wish that methods like `take`, `drop`, - * `filter` return the same kind of set, you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @define setTags - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - * @author Martin Odersky - * - * @define coll set - * @define Coll Set - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait SetLike[A, +This <: SetLike[A, This] with Set[A]] -extends IterableLike[A, This] - with GenSetLike[A, This] - with Subtractable[A, This] - with Parallelizable[A, ParSet[A]] -{ -self => - - /** The empty set of the same type as this set - * @return an empty set of type `This`. - */ - def empty: This - - /** A common implementation of `newBuilder` for all sets in terms - * of `empty`. Overridden for mutable sets in - * - * `mutable.SetLike`. - */ - override protected[this] def newBuilder: Builder[A, This] = new SetBuilder[A, This](empty) - - protected[this] override def parCombiner = ParSet.newCombiner[A] - - // Default collection type appropriate for immutable collections; mutable collections override this - override def toSeq: Seq[A] = { - if (isEmpty) Vector.empty[A] - else { - val vb = Vector.newBuilder[A] - foreach(vb += _) - vb.result - } - } - - override def toBuffer[A1 >: A]: mutable.Buffer[A1] = { - val result = new mutable.ArrayBuffer[A1](size) - // Faster to let the map iterate itself than to defer through copyToBuffer - foreach(result += _) - result - } - - // note: this is only overridden here to add the migration annotation, - // which I hope to turn into an Xlint style warning as the migration aspect - // is not central to its importance. - @migration("Set.map now returns a Set, so it will discard duplicate values.", "2.8.0") - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf) - - /** Tests if some element is contained in this set. - * - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - def contains(elem: A): Boolean - - /** Creates a new set with an additional element, unless the element is - * already present. - * - * @param elem the element to be added - * @return a new set that contains all elements of this set and that also - * contains `elem`. - */ - def + (elem: A): This - - /** Creates a new $coll with additional elements, omitting duplicates. - * - * This method takes two or more elements to be added. Elements that already exist in the $coll will - * not be added. Another overloaded variant of this method handles the case where a single element is added. - * - * Example: - * {{{ - * scala> val a = Set(1, 3) + 2 + 3 - * a: scala.collection.immutable.Set[Int] = Set(1, 3, 2) - * }}} - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def + (elem1: A, elem2: A, elems: A*): This = this + elem1 + elem2 ++ elems - - /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. - * - * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. - * - * Example: - * {{{ - * scala> val a = Set(1, 2) ++ Set(2, "a") - * a: scala.collection.immutable.Set[Any] = Set(1, 2, a) - * }}} - * - * @param elems the collection containing the elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def ++ (elems: GenTraversableOnce[A]): This = { - import immutable.HashSet - //in 2.14 this should be moved to the appropriate place - HashSet and EmptySet. - //we can't break binary comparability before then - this match { - case _ if this eq immutable.Set.empty.asInstanceOf[AnyRef] => - import immutable.Set.{Set1, Set2, Set3, Set4} - elems match { - case hs: HashSet[A] if hs.size > 4 => hs.asInstanceOf[This] - case hs: Set1[A] => hs.asInstanceOf[This] - case hs: Set2[A] => hs.asInstanceOf[This] - case hs: Set3[A] => hs.asInstanceOf[This] - case hs: Set4[A] => hs.asInstanceOf[This] - case _ => - if (elems.isEmpty) this.asInstanceOf[This] - else (repr /: elems.seq) (_ + _) - } - case hs: immutable.HashSet[A] => - elems match { - case that: GenSet[A] => - hs.union(that).asInstanceOf[This] - case _ => - (repr /: elems.seq) (_ + _) - } - case ts1: TreeSet[A] => - elems match { - case ts2: TreeSet[A] if ts1.ordering == ts2.ordering => - ts1.addAllTreeSetImpl(ts2).asInstanceOf[This] - case _ => - (repr /: elems.seq) (_ + _) - } - case _ => - (repr /: elems.seq) (_ + _) - - } - } - - /** Creates a new set with a given element removed from this set. - * - * @param elem the element to be removed - * @return a new set that contains all elements of this set but that does not - * contain `elem`. - */ - def - (elem: A): This - - /** Tests if this set is empty. - * - * @return `true` if there is no element in the set, `false` otherwise. - */ - override def isEmpty: Boolean = size == 0 - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - def union(that: GenSet[A]): This = this ++ that - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: GenSet[A]): This = this -- that - - /** An iterator over all subsets of this set of the given size. - * If the requested size is impossible, an empty iterator is returned. - * - * @param len the size of the subsets. - * @return the iterator. - */ - def subsets(len: Int): Iterator[This] = { - if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(self.toIndexedSeq, len) - } - - /** An iterator over all subsets of this set. - * - * @return the iterator. - */ - def subsets(): Iterator[This] = new AbstractIterator[This] { - private val elms = self.toIndexedSeq - private var len = 0 - private var itr: Iterator[This] = Iterator.empty - - def hasNext = len <= elms.size || itr.hasNext - def next = { - if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next() - else { - itr = new SubsetsItr(elms, len) - len += 1 - } - } - - itr.next() - } - } - - /** An Iterator including all subsets containing exactly len elements. - * If the elements in 'This' type is ordered, then the subsets will also be in the same order. - * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} - * - * @author Eastsun - * @date 2010.12.6 - */ - private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[This] { - private val idxs = Array.range(0, len+1) - private var _hasNext = true - idxs(len) = elms.size - - def hasNext = _hasNext - def next(): This = { - if (!hasNext) Iterator.empty.next() - - val buf = self.newBuilder - idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result() - - var i = len - 1 - while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 - - if (i < 0) _hasNext = false - else { - idxs(i) += 1 - for (j <- (i+1) until len) - idxs(j) = idxs(j-1) + 1 - } - - result - } - } - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this set. - * Unless overridden this is simply `"Set"`. - */ - override def stringPrefix: String = "Set" - override def toString = super[IterableLike].toString - -} diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala deleted file mode 100644 index 8e69797d01c2..000000000000 --- a/src/library/scala/collection/SetProxy.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -/** This is a simple wrapper class for [[scala.collection.Set]]. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 2.0 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala deleted file mode 100644 index c170afc5c179..000000000000 --- a/src/library/scala/collection/SetProxyLike.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -// Methods could be printed by cat SetLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for sets. It forwards - * all calls to a different set. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] { - def empty: This - override def contains(elem: A): Boolean = self.contains(elem) - override def + (elem: A) = self.+(elem) - override def - (elem: A) = self.-(elem) - override def isEmpty: Boolean = self.isEmpty - override def apply(elem: A): Boolean = self.apply(elem) - override def intersect(that: GenSet[A]) = self.intersect(that) - override def &(that: GenSet[A]): This = self.&(that) - override def union(that: GenSet[A]): This = self.union(that) - override def | (that: GenSet[A]): This = self.|(that) - override def diff(that: GenSet[A]): This = self.diff(that) - override def &~(that: GenSet[A]): This = self.&~(that) - override def subsetOf(that: GenSet[A]): Boolean = self.subsetOf(that) -} diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 12d22282bb96..d2ccb9e38aa9 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,44 +13,208 @@ package scala package collection -import generic._ -import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} -/** A map whose keys are sorted. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.4 - */ -trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedMap[A, B] = SortedMap.empty[A, B] +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" - override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = - immutable.SortedMap.newBuilder[A, B] + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } } -/** - * @since 2.8 - */ -object SortedMap extends SortedMapFactory[SortedMap] { - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord) - - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] - - private[collection] trait Default[A, +B] extends SortedMap[A, B] { - self => - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { - val b = SortedMap.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet - override def - (key: A): SortedMap[A, B] = { - val b = newBuilder - for (kv <- this; if kv._1 != key) b += kv - b.result() + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet } } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(using ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) } + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala deleted file mode 100644 index 692aad7b9049..000000000000 --- a/src/library/scala/collection/SortedMapLike.scala +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ - -/** A template for maps whose keys are sorted. - * To create a concrete sorted map, you need to implement the rangeImpl method, - * in addition to those of `MapLike`. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.8 - */ -trait SortedMapLike[A, +B, +This <: SortedMapLike[A, B, This] with SortedMap[A, B]] extends Sorted[A, This] with MapLike[A, B, This] { -self => - - def firstKey : A = head._1 - def lastKey : A = last._1 - - implicit def ordering: Ordering[A] - - // XXX: implement default version - def rangeImpl(from : Option[A], until : Option[A]) : This - - override def keySet : SortedSet[A] = new DefaultKeySortedSet - - @SerialVersionUID(-38666158592954763L) - protected class DefaultKeySortedSet extends super.DefaultKeySet with SortedSet[A] { - implicit def ordering = self.ordering - override def + (elem: A): SortedSet[A] = (SortedSet[A]() ++ this + elem) - override def - (elem: A): SortedSet[A] = (SortedSet[A]() ++ this - elem) - override def rangeImpl(from : Option[A], until : Option[A]) : SortedSet[A] = { - val map = self.rangeImpl(from, until) - new map.DefaultKeySortedSet - } - override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start) - } - - /** Add a key/value pair to this map. - * @param key the key - * @param value the value - * @return A new map with the new binding added to this map - */ - override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this+((key, value)) - - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - */ - def + [B1 >: B] (kv: (A, B1)): SortedMap[A, B1] - - // todo: Add generic +,-, and so on. - - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = { - var m = this + elem1 + elem2 - for (e <- elems) m = m + e - m - } - - override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) - override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} - override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p - override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v} - } - - override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) - override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))} - override def keysIteratorFrom(start: A) = self keysIteratorFrom start - override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f - } - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) - - /** - * Creates an iterator over all the key/value pairs - * contained in this map having a key greater than or - * equal to `start` according to the ordering of - * this map. x.iteratorFrom(y) is equivalent - * to but often more efficient than x.from(y).iterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def iteratorFrom(start: A): Iterator[(A, B)] - /** - * Creates an iterator over all the values contained in this - * map that are associated with a key greater than or equal to `start` - * according to the ordering of this map. x.valuesIteratorFrom(y) is - * equivalent to but often more efficient than - * x.from(y).valuesIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def valuesIteratorFrom(start: A): Iterator[B] -} diff --git a/src/library/scala/collection/SortedOps.scala b/src/library/scala/collection/SortedOps.scala new file mode 100644 index 000000000000..bd034fbf14d6 --- /dev/null +++ b/src/library/scala/collection/SortedOps.scala @@ -0,0 +1,90 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +/** Base trait for sorted collections */ +trait SortedOps[A, +C] { + + def ordering: Ordering[A] + + /** Returns the first key of the collection. */ + def firstKey: A + + /** Returns the last key of the collection. */ + def lastKey: A + + /** Comparison function that orders keys. */ + @deprecated("Use ordering.compare instead", "2.13.0") + @deprecatedOverriding("Use ordering.compare instead", "2.13.0") + @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + + /** Creates a ranged projection of this collection. Any mutations in the + * ranged projection will update this collection and vice versa. + * + * Note: keys are not guaranteed to be consistent between this collection + * and the projection. This is the case for buffers where indexing is + * relative to the projection. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * `None` if there is no lower bound. + * @param until The upper-bound (exclusive) of the ranged projection. + * `None` if there is no upper bound. + */ + def rangeImpl(from: Option[A], until: Option[A]): C + + /** Creates a ranged projection of this collection with both a lower-bound + * and an upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeFrom", "2.13.0") + final def from(from: A): C = rangeFrom(from) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + def rangeFrom(from: A): C = rangeImpl(Some(from), None) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + @deprecated("Use rangeUntil", "2.13.0") + final def until(until: A): C = rangeUntil(until) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def rangeUntil(until: A): C = rangeImpl(None, Some(until)) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeTo", "2.13.0") + final def to(to: A): C = rangeTo(to) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + def rangeTo(to: A): C +} diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 89813171c177..37c28c260000 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,27 +10,181 @@ * additional information regarding copyright ownership. */ -package scala -package collection -import generic._ +package scala.collection + +import scala.annotation.{implicitNotFound, nowarn} +import scala.annotation.unchecked.uncheckedVariance + +/** Base type of sorted sets */ +trait SortedSet[A] extends Set[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + def unsorted: Set[A] = this + + def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedSet" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => + (ss canEqual this) && + (this.size == ss.size) && { + val i1 = this.iterator + val i2 = ss.iterator + var allEqual = true + while (allEqual && i1.hasNext) + allEqual = ordering.equiv(i1.next(), i2.next()) + allEqual + } + case _ => + super.equals(that) + } -/** A sorted set. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.4 - */ -trait SortedSet[A] extends Set[A] with SortedSetLike[A, SortedSet[A]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] } -/** - * @since 2.8 - */ -object SortedSet extends SortedSetFactory[SortedSet] { - def empty[A](implicit ord: Ordering[A]): immutable.SortedSet[A] = immutable.SortedSet.empty[A](ord) - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A] - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with SortedOps[A, C] { + + /** The companion object of this sorted set, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedIterableFactory: SortedIterableFactory[CC] + + /** Widens the type of this set to its unsorted counterpart. */ + def unsorted: Set[A] + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] + + @deprecated("Use `iteratorFrom` instead.", "2.13.0") + @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) + + def firstKey: A = head + def lastKey: A = last + + /** Find the smallest element larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: A): Option[A] = rangeFrom(key).headOption + + /** Find the largest element less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption + + override def min[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.min") + else if (ord == ordering) head + else if (ord isReverseOf ordering) last + else super.min[B] // need the type annotation for it to infer the correct implicit + + override def max[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.max") + else if (ord == ordering) last + else if (ord isReverseOf ordering) head + else super.max[B] // need the type annotation for it to infer the correct implicit + + def rangeTo(to: A): C = { + val i = rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + /** Builds a new sorted collection by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Map(this, f)) + + /** Builds a new sorted collection by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.FlatMap(this, f)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote + sortedIterableFactory.from(that match { + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + /** Builds a new sorted collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Collect(this, pf)) } + +object SortedSetOps { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + + /** Specialize `WithFilter` for sorted collections + * + * @define coll sorted collection + */ + class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( + self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], + p: A => Boolean + ) extends IterableOps.WithFilter[A, IterableCC](self, p) { + + def map[B : Ordering](f: A => B): CC[B] = + self.sortedIterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = + self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) + } + +} + +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) + diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala deleted file mode 100644 index 044d881931cb..000000000000 --- a/src/library/scala/collection/SortedSetLike.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -import generic._ - -/** A template for sets which are sorted. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.8 - */ -trait SortedSetLike[A, +This <: SortedSet[A] with SortedSetLike[A, This]] extends Sorted[A, This] with SetLike[A, This] { -self => - - implicit def ordering: Ordering[A] - - override def keySet = repr - - override def firstKey: A = head - override def lastKey: A = last - - def rangeImpl(from: Option[A], until: Option[A]): This - - override def from(from: A): This = rangeImpl(Some(from), None) - override def until(until: A): This = rangeImpl(None, Some(until)) - override def range(from: A, until: A): This = rangeImpl(Some(from), Some(until)) - - override def subsetOf(that: GenSet[A]): Boolean = that match { - // TODO: It may actually be pretty rare that the guard here ever - // passes. Is this really worth keeping? If it is, we should add - // more sensible implementations of == to Ordering. - case that: SortedSet[_] if that.ordering == ordering => that.hasAll(this.iterator) - case that => super.subsetOf(that) - } - - /** - * Creates an iterator that contains all values from this collection - * greater than or equal to `start` according to the ordering of - * this collection. x.iteratorFrom(y) is equivalent to but will usually - * be more efficient than x.from(y).iterator - * - * @param start The lower-bound (inclusive) of the iterator - */ - def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start) -} diff --git a/src/library/scala/collection/Stepper.scala b/src/library/scala/collection/Stepper.scala new file mode 100644 index 000000000000..f1355e8182c3 --- /dev/null +++ b/src/library/scala/collection/Stepper.scala @@ -0,0 +1,368 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} +import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** Steppers exist to enable creating Java streams over Scala collections, see + * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections + * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. + * + * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference + * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are + * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). + * These enable iterating over collections holding unboxed primitives (e.g., Arrays, + * [[scala.jdk.Accumulator]]s) without boxing the elements. + * + * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized + * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) + * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). + * + * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive + * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. + * + * @tparam A the element type of the Stepper + */ +trait Stepper[@specialized(Double, Int, Long) +A] { + /** Check if there's an element available. */ + def hasStep: Boolean + + /** Return the next element and advance the stepper */ + def nextStep(): A + + /** Split this stepper, if applicable. The elements of the current Stepper are split up between + * the resulting Stepper and the current stepper. + * + * May return `null`, in which case the current Stepper yields the same elements as before. + * + * See method `trySplit` in [[java.util.Spliterator]]. + */ + def trySplit(): Stepper[A] + + /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See + * method `estimateSize` in [[java.util.Spliterator]]. + */ + def estimateSize: Long + + /** Returns a set of characteristics of this Stepper and its elements. See method + * `characteristics` in [[java.util.Spliterator]]. + */ + def characteristics: Int + + /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. + * + * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning + * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] + * (which is a `Stepper[Int]`). + */ + def spliterator[B >: A]: Spliterator[_] + + /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. + * + * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning + * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass + * [[IntStepper]] (which is a `Stepper[Int]`). + */ + def javaIterator[B >: A]: JIterator[_] + + /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to + * primitive Steppers box the elements. + */ + def iterator: Iterator[A] = new AbstractIterator[A] { + def hasNext: Boolean = hasStep + def next(): A = nextStep() + } +} + +object Stepper { + /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time + * and space complexity, and that the division is likely to be reasonably even. Steppers marked + * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method + * defined in [[scala.jdk.StreamConverters]]. + */ + trait EfficientSplit + + private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") + + /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example IntArrayStepper and WidenedByteArrayStepper). */ + + private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingDoubleStepper(s) + } + } + + private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingIntStepper(s) + } + } + + private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): LongStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingLongStepper(s) + } + } + + private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingByteStepper(s) + } + } + + private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingCharStepper(s) + } + } + + private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingShortStepper(s) + } + } + + private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingFloatStepper(s) + } + } +} + +/** A Stepper for arbitrary element types. See [[Stepper]]. */ +trait AnyStepper[+A] extends Stepper[A] { + def trySplit(): AnyStepper[A] + + def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + + def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { + def hasNext: Boolean = hasStep + def next(): B = nextStep() + } +} + +object AnyStepper { + class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + def tryAdvance(c: Consumer[_ >: A]): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + def trySplit(): Spliterator[A] = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: A]): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + } + + def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) + def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit + + def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) + def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit + + def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) + def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit + + private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Double] = { + val s = st.trySplit() + if (s == null) null else new BoxedDoubleStepper(s) + } + } + + private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Int] = { + val s = st.trySplit() + if (s == null) null else new BoxedIntStepper(s) + } + } + + private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Long] = { + val s = st.trySplit() + if (s == null) null else new BoxedLongStepper(s) + } + } +} + +/** A Stepper for Ints. See [[Stepper]]. */ +trait IntStepper extends Stepper[Int] { + def trySplit(): IntStepper + + def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + + def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext: Boolean = hasStep + def nextInt(): Int = nextStep() + } +} +object IntStepper { + class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + def tryAdvance(c: IntConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfInt = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: IntConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Doubles. See [[Stepper]]. */ +trait DoubleStepper extends Stepper[Double] { + def trySplit(): DoubleStepper + + def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def hasNext: Boolean = hasStep + def nextDouble(): Double = nextStep() + } +} + +object DoubleStepper { + class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + def tryAdvance(c: DoubleConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfDouble = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: DoubleConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Longs. See [[Stepper]]. */ +trait LongStepper extends Stepper[Long] { + def trySplit(): LongStepper + + def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + + def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def hasNext: Boolean = hasStep + def nextLong(): Long = nextStep() + } +} + +object LongStepper { + class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + def tryAdvance(c: LongConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfLong = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: LongConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } + } + } +} diff --git a/src/library/scala/collection/StepperShape.scala b/src/library/scala/collection/StepperShape.scala new file mode 100644 index 000000000000..db8c00b47992 --- /dev/null +++ b/src/library/scala/collection/StepperShape.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly + * specialized Stepper `S` according to the element type `T`. + */ +sealed trait StepperShape[-T, S <: Stepper[_]] { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: StepperShape.Shape + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit +} + +object StepperShape extends StepperShapeLowPriority1 { + class Shape private[StepperShape] (private val s: Int) extends AnyVal + + // reference + val ReferenceShape = new Shape(0) + + // primitive + val IntShape = new Shape(1) + val LongShape = new Shape(2) + val DoubleShape = new Shape(3) + + // widening + val ByteShape = new Shape(4) + val ShortShape = new Shape(5) + val CharShape = new Shape(6) + val FloatShape = new Shape(7) + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntShape + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit + } + implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] + + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongShape + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit + } + implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] + + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleShape + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit + } + implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] + + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteShape + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit + } + implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] + + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortShape + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit + } + implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] + + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharShape + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit + } + implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] + + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatShape + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit + } + implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] +} + +trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { + implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] +} + +trait StepperShapeLowPriority2 { + implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] + + protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { + def shape = StepperShape.ReferenceShape + def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st + } +} \ No newline at end of file diff --git a/src/library/scala/collection/StrictOptimizedIterableOps.scala b/src/library/scala/collection/StrictOptimizedIterableOps.scala new file mode 100644 index 000000000000..3260c1bc262e --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedIterableOps.scala @@ -0,0 +1,284 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.runtime.Statics + +/** + * Trait that overrides iterable operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedIterableOps[+A, +CC[_], +C] + extends Any + with IterableOps[A, CC, C] { + + // Optimized, push-based version of `partition` + override def partition(p: A => Boolean): (C, C) = { + val l, r = newSpecificBuilder + iterator.foreach(x => (if (p(x)) l else r) += x) + (l.result(), r.result()) + } + + override def span(p: A => Boolean): (C, C) = { + val first = newSpecificBuilder + val second = newSpecificBuilder + val it = iterator + var inFirst = true + while (it.hasNext && inFirst) { + val a = it.next() + if (p(a)) { + first += a + } else { + second += a + inFirst = false + } + } + while (it.hasNext) { + second += it.next() + } + (first.result(), second.result()) + } + + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first = iterableFactory.newBuilder[A1] + val second = iterableFactory.newBuilder[A2] + foreach { a => + val pair = asPair(a) + first += pair._1 + second += pair._2 + } + (first.result(), second.result()) + } + + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val b1 = iterableFactory.newBuilder[A1] + val b2 = iterableFactory.newBuilder[A2] + val b3 = iterableFactory.newBuilder[A3] + + foreach { xyz => + val triple = asTriple(xyz) + b1 += triple._1 + b2 += triple._2 + b3 += triple._3 + } + (b1.result(), b2.result(), b3.result()) + } + + // The implementations of the following operations are not fundamentally different from + // the view-based implementations, but they turn out to be slightly faster because + // a couple of indirection levels are removed + + override def map[B](f: A => B): CC[B] = + strictOptimizedMap(iterableFactory.newBuilder, f) + + /** + * @param b Builder to use to build the resulting collection + * @param f Element transformation function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedMap[B, C2](b: mutable.Builder[B, C2], f: A => B): C2 = { + val it = iterator + while (it.hasNext) { + b += f(it.next()) + } + b.result() + } + + override def flatMap[B](f: A => IterableOnce[B]): CC[B] = + strictOptimizedFlatMap(iterableFactory.newBuilder, f) + + /** + * @param b Builder to use to build the resulting collection + * @param f Element transformation function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]): C2 = { + val it = iterator + while (it.hasNext) { + b ++= f(it.next()) + } + b.result() + } + + /** + * @param that Elements to concatenate to this collection + * @param b Builder to use to build the resulting collection + * @tparam B Type of elements of the resulting collections (e.g. `Int`) + * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B], b: mutable.Builder[B, C2]): C2 = { + b ++= this + b ++= that + b.result() + } + + override def collect[B](pf: PartialFunction[A, B]): CC[B] = + strictOptimizedCollect(iterableFactory.newBuilder, pf) + + /** + * @param b Builder to use to build the resulting collection + * @param pf Element transformation partial function + * @tparam B Type of elements of the resulting collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[String]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]): C2 = { + val marker = Statics.pfMarker + val it = iterator + while (it.hasNext) { + val elem = it.next() + val v = pf.applyOrElse(elem, ((x: A) => marker).asInstanceOf[Function[A, B]]) + if (marker ne v.asInstanceOf[AnyRef]) b += v + } + b.result() + } + + override def flatten[B](implicit toIterableOnce: A => IterableOnce[B]): CC[B] = + strictOptimizedFlatten(iterableFactory.newBuilder) + + /** + * @param b Builder to use to build the resulting collection + * @param toIterableOnce Evidence that `A` can be seen as an `IterableOnce[B]` + * @tparam B Type of elements of the resulting collection (e.g. `Int`) + * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A => IterableOnce[B]): C2 = { + val it = iterator + while (it.hasNext) { + b ++= toIterableOnce(it.next()) + } + b.result() + } + + override def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) + + /** + * @param that Collection to zip with this collection + * @param b Builder to use to build the resulting collection + * @tparam B Type of elements of the second collection (e.g. `String`) + * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) + * @return The resulting collection + */ + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B], b: mutable.Builder[(A, B), C2]): C2 = { + val it1 = iterator + val it2 = that.iterator + while (it1.hasNext && it2.hasNext) { + b += ((it1.next(), it2.next())) + } + b.result() + } + + override def zipWithIndex: CC[(A @uncheckedVariance, Int)] = { + val b = iterableFactory.newBuilder[(A, Int)] + var i = 0 + val it = iterator + while (it.hasNext) { + b += ((it.next(), i)) + i += 1 + } + b.result() + } + + override def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = { + val b = iterableFactory.newBuilder[B] + b.sizeHint(this, delta = 0) + var acc = z + b += acc + val it = iterator + while (it.hasNext) { + acc = op(acc, it.next()) + b += acc + } + b.result() + } + + override def filter(pred: A => Boolean): C = filterImpl(pred, isFlipped = false) + + override def filterNot(pred: A => Boolean): C = filterImpl(pred, isFlipped = true) + + protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): C = { + val b = newSpecificBuilder + val it = iterator + while (it.hasNext) { + val elem = it.next() + if (pred(elem) != isFlipped) { + b += elem + } + } + b.result() + } + + // Optimized, push-based version of `partitionMap` + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val l = iterableFactory.newBuilder[A1] + val r = iterableFactory.newBuilder[A2] + foreach { x => + f(x) match { + case Left(x1) => l += x1 + case Right(x2) => r += x2 + } + } + (l.result(), r.result()) + } + + // Optimization avoids creation of second collection + override def tapEach[U](f: A => U): C = { + foreach(f) + coll + } + + /** A collection containing the last `n` elements of this collection. + * $willForceEvaluation + */ + override def takeRight(n: Int): C = { + val b = newSpecificBuilder + b.sizeHintBounded(n, toIterable: @nowarn("cat=deprecation")) + val lead = iterator drop n + val it = iterator + while (lead.hasNext) { + lead.next() + it.next() + } + while (it.hasNext) b += it.next() + b.result() + } + + /** The rest of the collection without its `n` last elements. For + * linear, immutable collections this should avoid making a copy. + * $willForceEvaluation + */ + override def dropRight(n: Int): C = { + val b = newSpecificBuilder + if (n >= 0) b.sizeHint(this, delta = -n) + val lead = iterator drop n + val it = iterator + while (lead.hasNext) { + b += it.next() + lead.next() + } + b.result() + } +} diff --git a/src/library/scala/collection/StrictOptimizedMapOps.scala b/src/library/scala/collection/StrictOptimizedMapOps.scala new file mode 100644 index 000000000000..a87ba3ee9e20 --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedMapOps.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends MapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + strictOptimizedMap(mapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + strictOptimizedFlatMap(mapFactory.newBuilder, f) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(suffix, mapFactory.newBuilder) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + strictOptimizedCollect(mapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val b = mapFactory.newBuilder[K, V1] + b ++= this + b += elem1 + b += elem2 + if (elems.nonEmpty) b ++= elems + b.result() + } +} diff --git a/src/library/scala/collection/StrictOptimizedSeqOps.scala b/src/library/scala/collection/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..a131498d8b28 --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedSeqOps.scala @@ -0,0 +1,108 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides operations on sequences in order + * to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps [+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A => B): C = { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next + } + builder.result() + } + + override def prepended[B >: A](elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + b.sizeHint(this, delta = 1) + b += elem + b ++= this + b.result() + } + + override def appended[B >: A](elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + b.sizeHint(this, delta = 1) + b ++= this + b += elem + b.result() + } + + override def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = + strictOptimizedConcat(suffix, iterableFactory.newBuilder) + + override def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = { + val b = iterableFactory.newBuilder[B] + b ++= prefix + b ++= this + b.result() + } + + override def padTo[B >: A](len: Int, elem: B): CC[B] = { + val b = iterableFactory.newBuilder[B] + val L = size + b.sizeHint(math.max(L, len)) + var diff = len - L + b ++= this + while (diff > 0) { + b += elem + diff -= 1 + } + b.result() + } + + override def diff[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) coll + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => { + b.addOne(x) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + } + b.result() + } + + override def intersect[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) empty + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => None + case Some(n) => { + b.addOne(x) + if (n == 1) None else Some(n - 1) + } + } + } + b.result() + } +} diff --git a/src/library/scala/collection/StrictOptimizedSetOps.scala b/src/library/scala/collection/StrictOptimizedSetOps.scala new file mode 100644 index 000000000000..39e585324f45 --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedSetOps.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: IterableOnce[A]): C = + strictOptimizedConcat(that, newSpecificBuilder) + +} diff --git a/src/library/scala/collection/StrictOptimizedSortedMapOps.scala b/src/library/scala/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..8317913c6d1b --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/src/library/scala/collection/StrictOptimizedSortedSetOps.scala b/src/library/scala/collection/StrictOptimizedSortedSetOps.scala new file mode 100644 index 000000000000..c01b0d8466f3 --- /dev/null +++ b/src/library/scala/collection/StrictOptimizedSortedSetOps.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.implicitNotFound +import scala.annotation.unchecked.uncheckedVariance + +/** + * Trait that overrides sorted set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { + + override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedMap(sortedIterableFactory.newBuilder, f) + + override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) + + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) + + override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) + +} diff --git a/src/library/scala/collection/StringOps.scala b/src/library/scala/collection/StringOps.scala new file mode 100644 index 000000000000..f641c792156a --- /dev/null +++ b/src/library/scala/collection/StringOps.scala @@ -0,0 +1,1650 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.{StringBuilder => JStringBuilder} + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.{CharStringStepper, CodePointStringStepper} +import scala.collection.immutable.{ArraySeq, WrappedString} +import scala.collection.mutable.StringBuilder +import scala.math.{ScalaNumber, max, min} +import scala.reflect.ClassTag +import scala.util.matching.Regex + +object StringOps { + // just statics for companion class. + private final val LF = 0x0A + private final val CR = 0x0D + + private class StringIterator(private[this] val s: String) extends AbstractIterator[Char] { + private[this] var pos = 0 + def hasNext: Boolean = pos < s.length + def next(): Char = { + if (pos >= s.length) Iterator.empty.next() + val r = s.charAt(pos) + pos += 1 + r + } + } + + private class ReverseIterator(private[this] val s: String) extends AbstractIterator[Char] { + private[this] var pos = s.length-1 + def hasNext: Boolean = pos >= 0 + def next(): Char = { + if (pos < 0) Iterator.empty.next() + val r = s.charAt(pos) + pos -= 1 + r + } + } + + private class GroupedIterator(s: String, groupSize: Int) extends AbstractIterator[String] { + private[this] var pos = 0 + def hasNext: Boolean = pos < s.length + def next(): String = { + if(pos >= s.length) Iterator.empty.next() + val r = s.slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** A lazy filtered string. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter(p: Char => Boolean, s: String) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: Char => U): Unit = { + val len = s.length + var i = 0 + while(i < len) { + val x = s.charAt(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new collection by applying a function to all chars of this filtered string. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map[B](f: Char => B): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) b.addOne(f(x)) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this filtered string. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map(f: Char => Char): String = { + val len = s.length + val sb = new JStringBuilder(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) sb.append(f(x)) + i += 1 + } + sb.toString + } + + /** Builds a new collection by applying a function to all chars of this filtered string + * and using the elements of the resulting collections. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given collection-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) b.addAll(f(x)) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this filtered string + * and using the elements of the resulting Strings. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given string-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap(f: Char => String): String = { + val len = s.length + val sb = new JStringBuilder + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(p(x)) sb.append(f(x)) + i += 1 + } + sb.toString + } + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: Char => Boolean): WithFilter = new WithFilter(a => p(a) && q(a), s) + } + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** Provides extension methods for strings. + * + * Some of these methods treat strings as a plain collection of [[Char]]s + * without any regard for Unicode handling. Unless the user takes Unicode + * handling in to account or makes sure the strings don't require such handling, + * these methods may result in unpaired or invalidly paired surrogate code + * units. + * + * @define unicodeunaware This method treats a string as a plain sequence of + * Char code units and makes no attempt to keep + * surrogate pairs or codepoint sequences together. + * The user is responsible for making sure such cases + * are handled correctly. Failing to do so may result in + * an invalid Unicode string. + */ +final class StringOps(private val s: String) extends AnyVal { + import StringOps._ + + @inline def view: StringView = new StringView(s) + + @inline def size: Int = s.length + + @inline def knownSize: Int = s.length + + /** Get the char at the specified index. */ + @inline def apply(i: Int): Char = s.charAt(i) + + def sizeCompare(otherSize: Int): Int = Integer.compare(s.length, otherSize) + + def lengthCompare(len: Int): Int = Integer.compare(s.length, len) + + def sizeIs: Int = s.length + + def lengthIs: Int = s.length + + /** Builds a new collection by applying a function to all chars of this string. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map[B](f: Char => B): immutable.IndexedSeq[B] = { + val len = s.length + val dst = new Array[AnyRef](len) + var i = 0 + while (i < len) { + dst(i) = f(s charAt i).asInstanceOf[AnyRef] + i += 1 + } + new ArraySeq.ofRef(dst).asInstanceOf[immutable.IndexedSeq[B]] + } + + /** Builds a new string by applying a function to all chars of this string. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given function + * `f` to each char of this string and collecting the results. + */ + def map(f: Char => Char): String = { + val len = s.length + val dst = new Array[Char](len) + var i = 0 + while (i < len) { + dst(i) = f(s charAt i) + i += 1 + } + new String(dst) + } + + /** Builds a new collection by applying a function to all chars of this string + * and using the elements of the resulting collections. + * + * @param f the function to apply to each char. + * @return a new collection resulting from applying the given collection-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + val len = s.length + val b = immutable.IndexedSeq.newBuilder[B] + var i = 0 + while (i < len) { + b.addAll(f(s.charAt(i))) + i += 1 + } + b.result() + } + + /** Builds a new string by applying a function to all chars of this string + * and using the elements of the resulting strings. + * + * @param f the function to apply to each char. + * @return a new string resulting from applying the given string-valued function + * `f` to each char of this string and concatenating the results. + */ + def flatMap(f: Char => String): String = { + val len = s.length + val sb = new JStringBuilder + var i = 0 + while (i < len) { + sb append f(s.charAt(i)) + i += 1 + } + sb.toString + } + + /** Builds a new String by applying a partial function to all chars of this String + * on which the function is defined. + * + * @param pf the partial function which filters and maps the String. + * @return a new String resulting from applying the given partial function + * `pf` to each char on which it is defined and collecting the results. + */ + def collect(pf: PartialFunction[Char, Char]): String = { + val fallback: Any => Any = StringOps.fallback + var i = 0 + val b = new StringBuilder + while (i < s.length) { + val v = pf.applyOrElse(s.charAt(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[Char]) + i += 1 + } + b.result() + } + + /** Builds a new collection by applying a partial function to all chars of this String + * on which the function is defined. + * + * @param pf the partial function which filters and maps the String. + * @tparam B the element type of the returned collection. + * @return a new collection resulting from applying the given partial function + * `pf` to each char on which it is defined and collecting the results. + */ + def collect[B](pf: PartialFunction[Char, B]): immutable.IndexedSeq[B] = { + val fallback: Any => Any = StringOps.fallback + var i = 0 + val b = immutable.IndexedSeq.newBuilder[B] + while (i < s.length) { + val v = pf.applyOrElse(s.charAt(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Returns a new collection containing the chars from this string followed by the elements from the + * right hand operand. + * + * @param suffix the collection to append. + * @return a new collection which contains all chars + * of this string followed by all elements of `suffix`. + */ + def concat[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + val k = suffix.knownSize + b.sizeHint(s.length + (if(k >= 0) k else 16)) + b.addAll(new WrappedString(s)) + b.addAll(suffix) + b.result() + } + + /** Returns a new string containing the chars from this string followed by the chars from the + * right hand operand. + * + * @param suffix the collection to append. + * @return a new string which contains all chars + * of this string followed by all chars of `suffix`. + */ + def concat(suffix: IterableOnce[Char]): String = { + val k = suffix.knownSize + val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) + sb.append(s) + for (ch <- suffix.iterator) sb.append(ch) + sb.toString + } + + /** Returns a new string containing the chars from this string followed by the chars from the + * right hand operand. + * + * @param suffix the string to append. + * @return a new string which contains all chars + * of this string followed by all chars of `suffix`. + */ + @inline def concat(suffix: String): String = s + suffix + + /** Alias for `concat` */ + @inline def ++[B >: Char](suffix: Iterable[B]): immutable.IndexedSeq[B] = concat(suffix) + + /** Alias for `concat` */ + @inline def ++(suffix: IterableOnce[Char]): String = concat(suffix) + + /** Alias for `concat` */ + def ++(xs: String): String = concat(xs) + + /** Returns a collection with an element appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @return a collection consisting of + * this string followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: Char](len: Int, elem: B): immutable.IndexedSeq[B] = { + val sLen = s.length + if (sLen >= len) new WrappedString(s) else { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(len) + b.addAll(new WrappedString(s)) + var i = sLen + while (i < len) { + b.addOne(elem) + i += 1 + } + b.result() + } + } + + /** Returns a string with a char appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @return a string consisting of + * this string followed by the minimal number of occurrences of `elem` so + * that the resulting string has a length of at least `len`. + */ + def padTo(len: Int, elem: Char): String = { + val sLen = s.length + if (sLen >= len) s else { + val sb = new JStringBuilder(len) + sb.append(s) + // With JDK 11, this can written as: + // sb.append(String.valueOf(elem).repeat(len - sLen)) + var i = sLen + while (i < len) { + sb.append(elem) + i += 1 + } + sb.toString + } + } + + /** A copy of the string with an element prepended */ + def prepended[B >: Char](elem: B): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(s.length + 1) + b.addOne(elem) + b.addAll(new WrappedString(s)) + b.result() + } + + /** Alias for `prepended` */ + @inline def +: [B >: Char] (elem: B): immutable.IndexedSeq[B] = prepended(elem) + + /** A copy of the string with an char prepended */ + def prepended(c: Char): String = + new JStringBuilder(s.length + 1).append(c).append(s).toString + + /** Alias for `prepended` */ + @inline def +: (c: Char): String = prepended(c) + + /** A copy of the string with all elements from a collection prepended */ + def prependedAll[B >: Char](prefix: IterableOnce[B]): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + val k = prefix.knownSize + b.sizeHint(s.length + (if(k >= 0) k else 16)) + b.addAll(prefix) + b.addAll(new WrappedString(s)) + b.result() + } + + /** Alias for `prependedAll` */ + @inline def ++: [B >: Char] (prefix: IterableOnce[B]): immutable.IndexedSeq[B] = prependedAll(prefix) + + /** A copy of the string with another string prepended */ + def prependedAll(prefix: String): String = prefix + s + + /** Alias for `prependedAll` */ + @inline def ++: (prefix: String): String = prependedAll(prefix) + + /** A copy of the string with an element appended */ + def appended[B >: Char](elem: B): immutable.IndexedSeq[B] = { + val b = immutable.IndexedSeq.newBuilder[B] + b.sizeHint(s.length + 1) + b.addAll(new WrappedString(s)) + b.addOne(elem) + b.result() + } + + /** Alias for `appended` */ + @inline def :+ [B >: Char](elem: B): immutable.IndexedSeq[B] = appended(elem) + + /** A copy of the string with an element appended */ + def appended(c: Char): String = + new JStringBuilder(s.length + 1).append(s).append(c).toString + + /** Alias for `appended` */ + @inline def :+ (c: Char): String = appended(c) + + /** A copy of the string with all elements from a collection appended */ + @inline def appendedAll[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + concat(suffix) + + /** Alias for `appendedAll` */ + @inline def :++ [B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + concat(suffix) + + /** A copy of the string with another string appended */ + @inline def appendedAll(suffix: String): String = s + suffix + + /** Alias for `appendedAll` */ + @inline def :++ (suffix: String): String = s + suffix + + /** Produces a new collection where a slice of characters in this string is replaced by another collection. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement collection + * @param replaced the number of chars to drop in the original string + * @return a new collection consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + */ + def patch[B >: Char](from: Int, other: IterableOnce[B], replaced: Int): immutable.IndexedSeq[B] = { + val len = s.length + @inline def slc(off: Int, length: Int): WrappedString = + new WrappedString(s.substring(off, off+length)) + val b = immutable.IndexedSeq.newBuilder[B] + val k = other.knownSize + if(k >= 0) b.sizeHint(len + k - replaced) + val chunk1 = if(from > 0) min(from, len) else 0 + if(chunk1 > 0) b.addAll(slc(0, chunk1)) + b ++= other + val remaining = len - chunk1 - replaced + if(remaining > 0) b.addAll(slc(len - remaining, remaining)) + b.result() + } + + /** Produces a new collection where a slice of characters in this string is replaced by another collection. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement string + * @param replaced the number of chars to drop in the original string + * @return a new string consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + * @note $unicodeunaware + */ + def patch(from: Int, other: IterableOnce[Char], replaced: Int): String = + patch(from, other.iterator.mkString, replaced) + + /** Produces a new string where a slice of characters in this string is replaced by another string. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original string appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from the index of the first replaced char + * @param other the replacement string + * @param replaced the number of chars to drop in the original string + * @return a new string consisting of all chars of this string + * except that `replaced` chars starting from `from` are replaced + * by `other`. + * @note $unicodeunaware + */ + def patch(from: Int, other: String, replaced: Int): String = { + val len = s.length + val sb = new JStringBuilder(len + other.size - replaced) + val chunk1 = if(from > 0) min(from, len) else 0 + if(chunk1 > 0) sb.append(s, 0, chunk1) + sb.append(other) + val remaining = len - chunk1 - replaced + if(remaining > 0) sb.append(s, len - remaining, len) + sb.toString + } + + /** A copy of this string with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new string which is a copy of this string with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + * @note $unicodeunaware + */ + def updated(index: Int, elem: Char): String = { + val sb = new JStringBuilder(s.length).append(s) + sb.setCharAt(index, elem) + sb.toString + } + + /** Tests whether this string contains the given character. + * + * @param elem the character to test. + * @return `true` if this string has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: Char): Boolean = s.indexOf(elem) >= 0 + + /** Displays all elements of this string in a string using start, end, and + * separator strings. + * + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return The resulting string + * begins with the string `start` and ends with the string + * `end`. Inside, the string chars of this string are separated by + * the string `sep`. + * @note $unicodeunaware + */ + final def mkString(start: String, sep: String, end: String): String = + addString(new StringBuilder(), start, sep, end).toString + + /** Displays all elements of this string in a string using a separator string. + * + * @param sep the separator string. + * @return In the resulting string + * the chars of this string are separated by the string `sep`. + * @note $unicodeunaware + */ + @inline final def mkString(sep: String): String = + if (sep.isEmpty || s.length < 2) s + else mkString("", sep, "") + + /** Returns this string */ + @inline final def mkString: String = s + + /** Appends this string to a string builder. */ + @inline final def addString(b: StringBuilder): b.type = b.append(s) + + /** Appends this string to a string builder using a separator string. */ + @inline final def addString(b: StringBuilder, sep: String): b.type = + addString(b, "", sep, "") + + /** Appends this string to a string builder using start, end and separator strings. */ + final def addString(b: StringBuilder, start: String, sep: String, end: String): b.type = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val len = s.length + if (len != 0) { + if (sep.isEmpty) jsb.append(s) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(s.charAt(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(s.charAt(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + b + } + + /** Selects an interval of elements. The returned string is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this string. + * @param until the lowest index to EXCLUDE from this string. + * @return a string containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this string. + * @note $unicodeunaware + */ + def slice(from: Int, until: Int): String = { + val start = from max 0 + val end = until min s.length + + if (start >= end) "" + else s.substring(start, end) + } + + // Note: String.repeat is added in JDK 11. + /** Return the current string concatenated `n` times. + */ + def *(n: Int): String = + if (n <= 0) { + "" + } else { + val sb = new JStringBuilder(s.length * n) + var i = 0 + while (i < n) { + sb.append(s) + i += 1 + } + sb.toString + } + + @inline private def isLineBreak(c: Char) = c == CR || c == LF + @inline private def isLineBreak2(c0: Char, c: Char) = c0 == CR && c == LF + + /** Strip the trailing line separator from this string if there is one. + * The line separator is taken as `"\n"`, `"\r"`, or `"\r\n"`. + */ + def stripLineEnd: String = + if (s.isEmpty) s + else { + var i = s.length - 1 + val last = apply(i) + if (!isLineBreak(last)) s + else { + if (i > 0 && isLineBreak2(apply(i - 1), last)) i -= 1 + s.substring(0, i) + } + } + + /** Return an iterator of all lines embedded in this string, + * including trailing line separator characters. + * + * The empty string yields an empty iterator. + */ + def linesWithSeparators: Iterator[String] = linesSeparated(stripped = false) + + /** Lines in this string, where a line is terminated by + * `"\n"`, `"\r"`, `"\r\n"`, or the end of the string. + * A line may be empty. Line terminators are removed. + */ + def linesIterator: Iterator[String] = linesSeparated(stripped = true) + + // if `stripped`, exclude the line separators + private def linesSeparated(stripped: Boolean): Iterator[String] = new AbstractIterator[String] { + def hasNext: Boolean = !done + def next(): String = if (done) Iterator.empty.next() else advance() + + private[this] val len = s.length + private[this] var index = 0 + @inline private def done = index >= len + private def advance(): String = { + val start = index + while (!done && !isLineBreak(apply(index))) index += 1 + var end = index + if (!done) { + val c = apply(index) + index += 1 + if (!done && isLineBreak2(c, apply(index))) index += 1 + if (!stripped) end = index + } + s.substring(start, end) + } + } + + /** Return all lines in this string in an iterator, excluding trailing line + * end characters; i.e., apply `.stripLineEnd` to all lines + * returned by `linesWithSeparators`. + */ + @deprecated("Use `linesIterator`, because JDK 11 adds a `lines` method on String", "2.13.0") + def lines: Iterator[String] = linesIterator + + /** Returns this string with first character converted to upper case. + * If the first character of the string is capitalized, it is returned unchanged. + * This method does not convert characters outside the Basic Multilingual Plane (BMP). + */ + def capitalize: String = + if (s == null || s.length == 0 || !s.charAt(0).isLower) s + else updated(0, s.charAt(0).toUpper) + + /** Returns this string with the given `prefix` stripped. If this string does not + * start with `prefix`, it is returned unchanged. + */ + def stripPrefix(prefix: String) = + if (s startsWith prefix) s.substring(prefix.length) + else s + + /** Returns this string with the given `suffix` stripped. If this string does not + * end with `suffix`, it is returned unchanged. + */ + def stripSuffix(suffix: String) = + if (s endsWith suffix) s.substring(0, s.length - suffix.length) + else s + + /** Replace all literal occurrences of `literal` with the literal string `replacement`. + * This method is equivalent to [[java.lang.String#replace(CharSequence,CharSequence)]]. + * + * @param literal the string which should be replaced everywhere it occurs + * @param replacement the replacement string + * @return the resulting string + */ + @deprecated("Use `s.replace` as an exact replacement", "2.13.2") + def replaceAllLiterally(literal: String, replacement: String): String = s.replace(literal, replacement) + + /** For every line in this string: + * + * Strip a leading prefix consisting of blanks or control characters + * followed by `marginChar` from the line. + */ + def stripMargin(marginChar: Char): String = { + val sb = new JStringBuilder(s.length) + for (line <- linesWithSeparators) { + val len = line.length + var index = 0 + while (index < len && line.charAt(index) <= ' ') index += 1 + val stripped = + if (index < len && line.charAt(index) == marginChar) line.substring(index + 1) + else line + sb.append(stripped) + } + sb.toString + } + + /** For every line in this string: + * + * Strip a leading prefix consisting of blanks or control characters + * followed by `|` from the line. + */ + def stripMargin: String = stripMargin('|') + + private[this] def escape(ch: Char): String = if ( + (ch >= 'a') && (ch <= 'z') || + (ch >= 'A') && (ch <= 'Z') || + (ch >= '0' && ch <= '9')) ch.toString + else "\\" + ch + + /** Split this string around the separator character + * + * If this string is the empty string, returns an array of strings + * that contains a single empty string. + * + * If this string is not the empty string, returns an array containing + * the substrings terminated by the start of the string, the end of the + * string or the separator character, excluding empty trailing substrings + * + * If the separator character is a surrogate character, only split on + * matching surrogate characters if they are not part of a surrogate pair + * + * The behaviour follows, and is implemented in terms of String.split(re: String) + * + * + * @example {{{ + * "a.b".split('.') //returns Array("a", "b") + * + * //splitting the empty string always returns the array with a single + * //empty string + * "".split('.') //returns Array("") + * + * //only trailing empty substrings are removed + * "a.".split('.') //returns Array("a") + * ".a.".split('.') //returns Array("", "a") + * "..a..".split('.') //returns Array("", "", "a") + * + * //all parts are empty and trailing + * ".".split('.') //returns Array() + * "..".split('.') //returns Array() + * + * //surrogate pairs + * val high = 0xD852.toChar + * val low = 0xDF62.toChar + * val highstring = high.toString + * val lowstring = low.toString + * + * //well-formed surrogate pairs are not split + * val highlow = highstring + lowstring + * highlow.split(high) //returns Array(highlow) + * + * //bare surrogate characters are split + * val bare = "_" + highstring + "_" + * bare.split(high) //returns Array("_", "_") + * + * }}} + * + * @param separator the character used as a delimiter + */ + def split(separator: Char): Array[String] = s.split(escape(separator)) + + @throws(classOf[java.util.regex.PatternSyntaxException]) + def split(separators: Array[Char]): Array[String] = { + val re = separators.foldLeft("[")(_+escape(_)) + "]" + s.split(re) + } + + /** You can follow a string with `.r`, turning it into a `Regex`. E.g. + * + * `"""A\w*""".r` is the regular expression for ASCII-only identifiers starting with `A`. + * + * `"""(?\d\d)-(?\d\d)-(?\d\d\d\d)""".r` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". + */ + def r: Regex = new Regex(s) + + /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, + * with group names g1 through gn. + * + * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". + * + * @param groupNames The names of the groups in the pattern, in the order they appear. + */ + @deprecated("use inline group names like (?X) instead", "2.13.7") + def r(groupNames: String*): Regex = new Regex(s, groupNames: _*) + + /** + * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`. + */ + def toBoolean: Boolean = toBooleanImpl(s) + + /** + * Try to parse as a `Boolean` + * @return `Some(true)` if the string is "true" case insensitive, + * `Some(false)` if the string is "false" case insensitive, + * and `None` if the string is anything else + * @throws java.lang.NullPointerException if the string is `null` + */ + def toBooleanOption: Option[Boolean] = StringParsers.parseBool(s) + + /** + * Parse as a `Byte` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`. + */ + def toByte: Byte = java.lang.Byte.parseByte(s) + + /** + * Try to parse as a `Byte` + * @return `Some(value)` if the string contains a valid byte value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toByteOption: Option[Byte] = StringParsers.parseByte(s) + + /** + * Parse as a `Short` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`. + */ + def toShort: Short = java.lang.Short.parseShort(s) + + /** + * Try to parse as a `Short` + * @return `Some(value)` if the string contains a valid short value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toShortOption: Option[Short] = StringParsers.parseShort(s) + + /** + * Parse as an `Int` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`. + */ + def toInt: Int = java.lang.Integer.parseInt(s) + + /** + * Try to parse as an `Int` + * @return `Some(value)` if the string contains a valid Int value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toIntOption: Option[Int] = StringParsers.parseInt(s) + + /** + * Parse as a `Long` (string must contain only decimal digits and optional leading `-` or `+`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`. + */ + def toLong: Long = java.lang.Long.parseLong(s) + + /** + * Try to parse as a `Long` + * @return `Some(value)` if the string contains a valid long value, otherwise `None` + * @throws java.lang.NullPointerException if the string is `null` + */ + def toLongOption: Option[Long] = StringParsers.parseLong(s) + + /** + * Parse as a `Float` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`. + * @throws java.lang.NullPointerException If the string is null. + */ + def toFloat: Float = java.lang.Float.parseFloat(s) + + /** + * Try to parse as a `Float` + * @return `Some(value)` if the string is a parsable `Float`, `None` otherwise + * @throws java.lang.NullPointerException If the string is null + */ + def toFloatOption: Option[Float] = StringParsers.parseFloat(s) + + /** + * Parse as a `Double` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`. + * @throws java.lang.NullPointerException If the string is null. + */ + def toDouble: Double = java.lang.Double.parseDouble(s) + + /** + * Try to parse as a `Double` + * @return `Some(value)` if the string is a parsable `Double`, `None` otherwise + * @throws java.lang.NullPointerException If the string is null + */ + def toDoubleOption: Option[Double] = StringParsers.parseDouble(s) + + private[this] def toBooleanImpl(s: String): Boolean = + if (s == null) throw new IllegalArgumentException("For input string: \"null\"") + else if (s.equalsIgnoreCase("true")) true + else if (s.equalsIgnoreCase("false")) false + else throw new IllegalArgumentException("For input string: \""+s+"\"") + + def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = + if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] + else new WrappedString(s).toArray[B] + + private[this] def unwrapArg(arg: Any): AnyRef = arg match { + case x: ScalaNumber => x.underlying + case x => x.asInstanceOf[AnyRef] + } + + /** Uses the underlying string as a pattern (in a fashion similar to + * printf in C), and uses the supplied arguments to fill in the + * holes. + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]], with the addition that + * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and + * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter` understands. + * + * See [[scala.StringContext#f]] for a formatting interpolator that + * checks the format string at compilation. + * + * @param args the arguments used to instantiating the pattern. + * @throws java.util.IllegalFormatException if the format contains syntax or conversion errors + */ + def format(args: Any*): String = + java.lang.String.format(s, args.map(unwrapArg): _*) + + /** Like `format(args*)` but takes an initial `Locale` parameter + * which influences formatting as in `java.lang.String`'s format. + * + * The interpretation of the formatting patterns is described in + * [[java.util.Formatter]], with the addition that + * classes deriving from `ScalaNumber` (such as `scala.BigInt` and + * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter` + * understands. + * + * @param l an instance of `java.util.Locale` + * @param args the arguments used to instantiating the pattern. + * @throws java.util.IllegalFormatException if the format contains syntax or conversion errors + */ + def formatLocal(l: java.util.Locale, args: Any*): String = + java.lang.String.format(l, s, args.map(unwrapArg): _*) + + def compare(that: String): Int = s.compareTo(that) + + /** Returns true if `this` is less than `that` */ + def < (that: String): Boolean = compare(that) < 0 + + /** Returns true if `this` is greater than `that`. */ + def > (that: String): Boolean = compare(that) > 0 + + /** Returns true if `this` is less than or equal to `that`. */ + def <= (that: String): Boolean = compare(that) <= 0 + + /** Returns true if `this` is greater than or equal to `that`. */ + def >= (that: String): Boolean = compare(that) >= 0 + + /** Counts the number of chars in this string which satisfy a predicate */ + def count(p: (Char) => Boolean): Int = { + var i, res = 0 + val len = s.length + while(i < len) { + if(p(s.charAt(i))) res += 1 + i += 1 + } + res + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: Char => U): Unit = { + val len = s.length + var i = 0 + while(i < len) { + f(s.charAt(i)) + i += 1 + } + } + + /** Tests whether a predicate holds for all chars of this string. + * + * @param p the predicate used to test elements. + * @return `true` if this string is empty or the given predicate `p` + * holds for all chars of this string, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: Char => Boolean): Boolean = { + var i = 0 + val len = s.length + while(i < len) { + if(!p(s.charAt(i))) return false + i += 1 + } + true + } + + /** Applies the given binary operator `op` to the given initial value `z` and all chars + * in this string, going left to right. Returns the initial value if this string is + * empty. + * + * If `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the chars in this string, the + * result is `op( op( ... op( op(z, x,,1,,), x,,2,,) ... ), x,,n,,)`. + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to `z` and all chars in this string, + * going left to right. Returns `z` if this string is empty. + */ + def foldLeft[B](z: B)(op: (B, Char) => B): B = { + var v = z + var i = 0 + val len = s.length + while(i < len) { + v = op(v, s.charAt(i)) + i += 1 + } + v + } + + /** Applies the given binary operator `op` to all chars in this string and the given + * initial value `z`, going right to left. Returns the initial value if this string is + * empty. + * + * If `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the chars in this string, the + * result is `op(x,,1,,, op(x,,2,,, op( ... op(x,,n,,, z) ... )))`. + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to all chars in this string + * and `z`, going right to left. Returns `z` if this string + * is empty. + */ + def foldRight[B](z: B)(op: (Char, B) => B): B = { + var v = z + var i = s.length - 1 + while(i >= 0) { + v = op(s.charAt(i), v) + i -= 1 + } + v + } + + /** Alias for [[foldLeft]]. + * + * The type parameter is more restrictive than for `foldLeft` to be + * consistent with [[IterableOnceOps.fold]]. + * + * @tparam A1 The type parameter for the binary operator, a supertype of `Char`. + * @param z An initial value. + * @param op A binary operator. + * @return The result of applying `op` to `z` and all chars in this string, + * going left to right. Returns `z` if this string is empty. + */ + @inline def fold[A1 >: Char](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Selects the first char of this string. + * @return the first char of this string. + * @throws NoSuchElementException if the string is empty. + */ + def head: Char = if(s.isEmpty) throw new NoSuchElementException("head of empty String") else s.charAt(0) + + /** Optionally selects the first char. + * @return the first char of this string if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[Char] = + if(s.isEmpty) None else Some(s.charAt(0)) + + /** Selects the last char of this string. + * @return the last char of this string. + * @throws NoSuchElementException if the string is empty. + */ + def last: Char = if(s.isEmpty) throw new NoSuchElementException("last of empty String") else s.charAt(s.length-1) + + /** Optionally selects the last char. + * @return the last char of this string if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[Char] = + if(s.isEmpty) None else Some(s.charAt(s.length-1)) + + /** Produces the range of all indices of this string. + * + * @return a `Range` value from `0` to one less than the length of this string. + */ + def indices: Range = Range(0, s.length) + + /** Iterator can be used only once */ + def iterator: Iterator[Char] = new StringIterator(s) + + /** Stepper can be used with Java 8 Streams. This method is equivalent to a call to + * [[charStepper]]. See also [[codePointStepper]]. + */ + @inline def stepper: IntStepper with EfficientSplit = charStepper + + /** Steps over characters in this string. Values are packed in `Int` for efficiency + * and compatibility with Java 8 Streams which have an efficient specialization for `Int`. + */ + @inline def charStepper: IntStepper with EfficientSplit = new CharStringStepper(s, 0, s.length) + + /** Steps over code points in this string. + */ + @inline def codePointStepper: IntStepper with EfficientSplit = new CodePointStringStepper(s, 0, s.length) + + /** Tests whether the string is not empty. */ + @inline def nonEmpty: Boolean = !s.isEmpty + + /** Returns new sequence with elements in reversed order. + * @note $unicodeunaware + */ + def reverse: String = new JStringBuilder(s).reverse().toString + + /** An iterator yielding chars in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the chars of this string in reversed order + */ + def reverseIterator: Iterator[Char] = new ReverseIterator(s) + + /** Creates a non-strict filter of this string. + * + * @note the difference between `c filter p` and `c withFilter p` is that + * the former creates a new string, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `stringOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those chars of this string + * which satisfy the predicate `p`. + */ + def withFilter(p: Char => Boolean): StringOps.WithFilter = new StringOps.WithFilter(p, s) + + /** The rest of the string without its first char. + * @throws UnsupportedOperationException if the string is empty. + * @note $unicodeunaware + */ + def tail: String = if(s.isEmpty) throw new UnsupportedOperationException("tail of empty String") else slice(1, s.length) + + /** The initial part of the string without its last char. + * @throws UnsupportedOperationException if the string is empty. + * @note $unicodeunaware + */ + def init: String = if(s.isEmpty) throw new UnsupportedOperationException("init of empty String") else slice(0, s.length-1) + + /** A string containing the first `n` chars of this string. + * @note $unicodeunaware + */ + def take(n: Int): String = slice(0, min(n, s.length)) + + /** The rest of the string without its `n` first chars. + * @note $unicodeunaware + */ + def drop(n: Int): String = slice(min(n, s.length), s.length) + + /** A string containing the last `n` chars of this string. + * @note $unicodeunaware + */ + def takeRight(n: Int): String = drop(s.length - max(n, 0)) + + /** The rest of the string without its `n` last chars. + * @note $unicodeunaware + */ + def dropRight(n: Int): String = take(s.length - max(n, 0)) + + /** Iterates over the tails of this string. The first value will be this + * string and the final one will be an empty string, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this string + * @note $unicodeunaware + */ + def tails: Iterator[String] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this string. The first value will be this + * string and the final one will be an empty string, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this string + * @note $unicodeunaware + */ + def inits: Iterator[String] = iterateUntilEmpty(_.init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: String => String): Iterator[String] = + Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") + + /** Selects all chars of this string which satisfy a predicate. */ + def filter(pred: Char => Boolean): String = { + val len = s.length + val sb = new JStringBuilder(len) + var i = 0 + while (i < len) { + val x = s.charAt(i) + if(pred(x)) sb.append(x) + i += 1 + } + if(len == sb.length()) s else sb.toString + } + + /** Selects all chars of this string which do not satisfy a predicate. */ + @inline def filterNot(pred: Char => Boolean): String = filter(c => !pred(c)) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either the entire string has been copied + * or the end of the array is reached + * + * @param xs the array to fill. + */ + @inline def copyToArray(xs: Array[Char]): Int = + copyToArray(xs, 0, Int.MaxValue) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either the entire string has been copied + * or the end of the array is reached + * + * @param xs the array to fill. + * @param start the starting index. + */ + @inline def copyToArray(xs: Array[Char], start: Int): Int = + copyToArray(xs, start, Int.MaxValue) + + /** Copy chars of this string to an array. + * Fills the given array `xs` starting at index `start` with at most `len` chars. + * Copying will stop once either the entire string has been copied, + * or the end of the array is reached or `len` chars have been copied. + * + * @param xs the array to fill. + * @param start the starting index. + * @param len the maximal number of elements to copy. + */ + def copyToArray(xs: Array[Char], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(s.length, xs.length, start, len) + if (copied > 0) { + s.getChars(0, copied, xs, start) + } + copied + } + + /** Finds index of the first char satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this string that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(p: Char => Boolean, from: Int = 0): Int = { + val len = s.length + var i = from + while(i < len) { + if(p(s.charAt(i))) return i + i += 1 + } + -1 + } + + /** Finds index of the last char satisfying some predicate before or at some end index. + * + * @param p the predicate used to test elements. + * @param end the end index + * @return the index `<= end` of the last element of this string that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: Char => Boolean, end: Int = Int.MaxValue): Int = { + val len = s.length + var i = min(end, len-1) + while(i >= 0) { + if(p(s.charAt(i))) return i + i -= 1 + } + -1 + } + + /** Tests whether a predicate holds for at least one char of this string. */ + def exists(p: Char => Boolean): Boolean = indexWhere(p) != -1 + + /** Finds the first char of the string satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the string + * that satisfies `p`, or `None` if none exists. + */ + def find(p: Char => Boolean): Option[Char] = indexWhere(p) match { + case -1 => None + case i => Some(s.charAt(i)) + } + + /** Drops longest prefix of chars that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this string whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: Char => Boolean): String = indexWhere(c => !p(c)) match { + case -1 => "" + case i => s.substring(i) + } + + /** Takes longest prefix of chars that satisfy a predicate. */ + def takeWhile(p: Char => Boolean): String = indexWhere(c => !p(c)) match { + case -1 => s + case i => s.substring(0, i) + } + + /** Splits this string into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of strings consisting of the first `n` + * chars of this string, and the other chars. + * @note $unicodeunaware + */ + def splitAt(n: Int): (String, String) = (take(n), drop(n)) + + /** Splits this string into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this string whose + * chars all satisfy `p`, and the rest of this string. + */ + def span(p: Char => Boolean): (String, String) = indexWhere(c => !p(c)) match { + case -1 => (s, "") + case i => (s.substring(0, i), s.substring(i)) + } + + /** Partitions elements in fixed size strings. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing strings of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + * @note $unicodeunaware + */ + def grouped(size: Int): Iterator[String] = new StringOps.GroupedIterator(s, size) + + /** A pair of, first, all chars that satisfy predicate `p` and, second, all chars that do not. */ + def partition(p: Char => Boolean): (String, String) = { + val res1, res2 = new JStringBuilder + var i = 0 + val len = s.length + while(i < len) { + val x = s.charAt(i) + (if(p(x)) res1 else res2).append(x) + i += 1 + } + (res1.toString, res2.toString) + } + + /** Applies a function `f` to each character of the string and returns a pair of strings: the first one + * made of those characters returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = "1one2two3three" partitionMap { c => + * if (c > 'a') Left(c) else Right(c) + * } + * // xs == ("onetwothree", "123") + * }}} + * + * @param f the 'split function' mapping the elements of this string to an [[scala.util.Either]] + * + * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap(f: Char => Either[Char,Char]): (String, String) = { + val res1, res2 = new JStringBuilder + var i = 0 + val len = s.length + while(i < len) { + f(s.charAt(i)) match { + case Left(c) => res1.append(c) + case Right(c) => res2.append(c) + } + i += 1 + } + (res1.toString, res2.toString) + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[Char, B, String] = new LazyZip2(s, new WrappedString(s), that) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to WrappedString implementations which + may not provide the best possible performance. We need them in `StringOps` because their return type + mentions `C` (which is `String` in `StringOps` and `WrappedString` in `WrappedString`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this string and another sequence. + * + * @param that the sequence of chars to remove + * @return a new string which contains all chars of this string + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + * @note $unicodeunaware + */ + def diff[B >: Char](that: Seq[B]): String = new WrappedString(s).diff(that).unwrap + + /** Computes the multiset intersection between this string and another sequence. + * + * @param that the sequence of chars to intersect with. + * @return a new string which contains all chars of this string + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + * @note $unicodeunaware + */ + def intersect[B >: Char](that: Seq[B]): String = new WrappedString(s).intersect(that).unwrap + + /** Selects all distinct chars of this string ignoring the duplicates. + * + * @note $unicodeunaware + */ + def distinct: String = new WrappedString(s).distinct.unwrap + + /** Selects all distinct chars of this string ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new string consisting of all the chars of this string without duplicates. + * @note $unicodeunaware + */ + def distinctBy[B](f: Char => B): String = new WrappedString(s).distinctBy(f).unwrap + + /** Sorts the characters of this string according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return a string consisting of the chars of this string + * sorted according to the ordering `ord`. + * @note $unicodeunaware + */ + def sorted[B >: Char](implicit ord: Ordering[B]): String = new WrappedString(s).sorted(ord).unwrap + + /** Sorts this string according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return a string consisting of the elements of this string + * sorted according to the comparison function `lt`. + * @note $unicodeunaware + */ + def sortWith(lt: (Char, Char) => Boolean): String = new WrappedString(s).sortWith(lt).unwrap + + /** Sorts this string according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `ord.compare`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return a string consisting of the chars of this string + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + * @note $unicodeunaware + */ + def sortBy[B](f: Char => B)(implicit ord: Ordering[B]): String = new WrappedString(s).sortBy(f)(ord).unwrap + + /** Partitions this string into a map of strings according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to strings such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a string of those elements `x` + * for which `f(x)` equals `k`. + * @note $unicodeunaware + */ + def groupBy[K](f: Char => K): immutable.Map[K, String] = new WrappedString(s).groupBy(f).view.mapValues(_.unwrap).toMap + + /** Groups chars in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of chars per group + * @param step the distance between the first chars of successive groups + * @return An iterator producing strings of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` chars remaining to be grouped. + * @note $unicodeunaware + */ + def sliding(size: Int, step: Int = 1): Iterator[String] = new WrappedString(s).sliding(size, step).map(_.unwrap) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this string. + * @example {{{ + * "abbbc".combinations(2).foreach(println) + * // ab + * // ac + * // bb + * // bc + * "bab".combinations(2).foreach(println) + * // bb + * // ba + * }}} + * @note $unicodeunaware + */ + def combinations(n: Int): Iterator[String] = new WrappedString(s).combinations(n).map(_.unwrap) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this string. + * @example {{{ + * "abb".permutations.foreach(println) + * // abb + * // bab + * // bba + * }}} + * @note $unicodeunaware + */ + def permutations: Iterator[String] = new WrappedString(s).permutations.map(_.unwrap) +} + +final case class StringView(s: String) extends AbstractIndexedSeqView[Char] { + def length = s.length + @throws[StringIndexOutOfBoundsException] + def apply(n: Int) = s.charAt(n) + override def toString: String = s"StringView($s)" +} diff --git a/src/library/scala/collection/StringParsers.scala b/src/library/scala/collection/StringParsers.scala new file mode 100644 index 000000000000..36108dc539da --- /dev/null +++ b/src/library/scala/collection/StringParsers.scala @@ -0,0 +1,321 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec + +/** A module containing the implementations of parsers from strings to numeric types, and boolean + */ +private[scala] object StringParsers { + + //compile-time constant helpers + + //Int.MinValue == -2147483648 + private final val intOverflowBoundary = -214748364 + private final val intOverflowDigit = 9 + //Long.MinValue == -9223372036854775808L + private final val longOverflowBoundary = -922337203685477580L + private final val longOverflowDigit = 9 + + private final val POS = true + + @inline + private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) + + @inline + private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { + @tailrec + def rec(i: Int, agg: Int): Option[Int] = + if (agg < min) None + else if (i == len) { + if (!isPositive) Some(agg) + else if (agg == min) None + else Some(-agg) + } + else { + val digit = decValue(from.charAt(i)) + if (digit == -1) None + else rec(i + 1, agg * 10 - digit) + } + rec(1, agg) + } + + @inline + private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' + + //bool + @inline + final def parseBool(from: String): Option[Boolean] = + if (from.equalsIgnoreCase("true")) Some(true) + else if (from.equalsIgnoreCase("false")) Some(false) + else None + + //integral types + final def parseByte(from: String): Option[Byte] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toByte) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, POS, Byte.MinValue).map(_.toByte) + else if (first == '+') stepToOverflow(from, len, 0, POS, Byte.MinValue).map(_.toByte) + else if (first == '-') stepToOverflow(from, len, 0, !POS, Byte.MinValue).map(_.toByte) + else None + } + } + + final def parseShort(from: String): Option[Short] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toShort) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, POS, Short.MinValue).map(_.toShort) + else if (first == '+') stepToOverflow(from, len, 0, POS, Short.MinValue).map(_.toShort) + else if (first == '-') stepToOverflow(from, len, 0, !POS, Short.MinValue).map(_.toShort) + else None + } + } + + final def parseInt(from: String): Option[Int] = { + val len = from.length() + + @tailrec + def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { + if (i == len) { + if (!isPositive) Some(agg) + else if (agg == Int.MinValue) None + else Some(-agg) + } + else if (agg < intOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None + else step(i + 1, (agg * 10) - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, POS) + else if (first == '+') step(1, 0, POS) + else if (first == '-') step(1, 0, !POS) + else None + } + } + + final def parseLong(from: String): Option[Long] = { + //like parseInt, but Longer + val len = from.length() + + @tailrec + def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { + if (i == len) { + if (isPositive && agg == Long.MinValue) None + else if (isPositive) Some(-agg) + else Some(agg) + } + else if (agg < longOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None + else step(i + 1, agg * 10 - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first).toLong + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, POS) + else if (first == '+') step(1, 0, POS) + else if (first == '-') step(1, 0, !POS) + else None + } + } + + //floating point + final def checkFloatFormat(format: String): Boolean = { + //indices are tracked with a start index which points *at* the first index + //and an end index which points *after* the last index + //so that slice length === end - start + //thus start == end <=> empty slice + //and format.substring(start, end) is equivalent to the slice + + //some utilities for working with index bounds into the original string + @inline + def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { + @tailrec + def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) + rec(start) + } + + //one after last index for the predicate to hold, or `from` if none hold + //may point after the end of the string + @inline + def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { + @tailrec @inline + def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) + else i + rec(from) + } + + + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F')) + + def prefixOK(startIndex: Int, endIndex: Int): Boolean = { + val len = endIndex - startIndex + (len > 0) && { + //the prefix part is + //hexDigits + //hexDigits. + //hexDigits.hexDigits + //.hexDigits + //but not . + if (format.charAt(startIndex) == '.') { + (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) + } else { + val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) + (noLeading >= endIndex) || + ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) + } + } + } + + def postfixOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + (forAllBetween(startIndex, endIndex, isDigit)) || { + val startchar = format.charAt(startIndex) + (startchar == '+' || startchar == '-') && + (endIndex - startIndex > 1) && + forAllBetween(startIndex + 1, endIndex, isDigit) + } + } + // prefix [pP] postfix + val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) + (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) + } + + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + //invariant: endIndex > startIndex + + def isExp(c: Char): Boolean = c == 'e' || c == 'E' + + def expOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + val startChar = format.charAt(startIndex) + if (startChar == '+' || startChar == '-') + (endIndex > (startIndex + 1)) && + skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex + else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex + } + + //significant can be one of + //* digits.digits + //* .digits + //* digits. + //but not just . + val startChar = format.charAt(startIndex) + if (startChar == '.') { + val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) + // a digit is required followed by optional exp + (noSignificant > startIndex + 1) && (noSignificant >= endIndex || + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + ) + } + else if (isDigit(startChar)) { + // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent + val noInt = skipIndexWhile(isDigit, startIndex, endIndex) + // just the digits + (noInt == endIndex) || { + if (format.charAt(noInt) == '.') { + val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) + (noSignificant >= endIndex) || //no exponent + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + } else + isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) + } + } + else false + } + + //count 0x00 to 0x20 as "whitespace", and nothing else + val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) + val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 + + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false + else { + //all formats can have a sign + val unsigned = { + val startchar = format.charAt(unspacedStart) + if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart + } + if (unsigned >= unspacedEnd) false + //that's it for NaN and Infinity + else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" + else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" + else { + //all other formats can have a format suffix + val desuffixed = { + val endchar = format.charAt(unspacedEnd - 1) + if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 + else unspacedEnd + } + val len = desuffixed - unsigned + if (len <= 0) false + else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) + format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) + else isDecFloatLiteral(unsigned, desuffixed) + } + } + } + + @inline + def parseFloat(from: String): Option[Float] = + if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) + else None + + @inline + def parseDouble(from: String): Option[Double] = + if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) + else None + +} diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala deleted file mode 100644 index 4ece859e82c7..000000000000 --- a/src/library/scala/collection/Traversable.scala +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.Builder -import scala.util.control.Breaks - -/** A trait for traversable collections. - * All operations are guaranteed to be performed in a single-threaded manner. - * - * $traversableInfo - */ -trait Traversable[+A] extends TraversableLike[A, Traversable[A]] - with GenTraversable[A] - with TraversableOnce[A] - with GenericTraversableTemplate[A, Traversable] { - override def companion: GenericCompanion[Traversable] = Traversable - - override def seq: Traversable[A] = this - - /* The following methods are inherited from TraversableLike - * - override def isEmpty: Boolean - override def size: Int - override def hasDefiniteSize - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That - override def filter(p: A => Boolean): Traversable[A] - override def remove(p: A => Boolean): Traversable[A] - override def partition(p: A => Boolean): (Traversable[A], Traversable[A]) - override def groupBy[K](f: A => K): Map[K, Traversable[A]] - override def foreach[U](f: A => U): Unit - override def forall(p: A => Boolean): Boolean - override def exists(p: A => Boolean): Boolean - override def count(p: A => Boolean): Int - override def find(p: A => Boolean): Option[A] - override def foldLeft[B](z: B)(op: (B, A) => B): B - override def /: [B](z: B)(op: (B, A) => B): B - override def foldRight[B](z: B)(op: (A, B) => B): B - override def :\ [B](z: B)(op: (A, B) => B): B - override def reduceLeft[B >: A](op: (B, A) => B): B - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] - override def reduceRight[B >: A](op: (A, B) => B): B - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] - override def head: A - override def headOption: Option[A] - override def tail: Traversable[A] - override def last: A - override def lastOption: Option[A] - override def init: Traversable[A] - override def take(n: Int): Traversable[A] - override def drop(n: Int): Traversable[A] - override def slice(from: Int, until: Int): Traversable[A] - override def takeWhile(p: A => Boolean): Traversable[A] - override def dropWhile(p: A => Boolean): Traversable[A] - override def span(p: A => Boolean): (Traversable[A], Traversable[A]) - override def splitAt(n: Int): (Traversable[A], Traversable[A]) - override def copyToBuffer[B >: A](dest: Buffer[B]) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) - override def copyToArray[B >: A](xs: Array[B], start: Int) - override def toArray[B >: A : ClassTag]: Array[B] - override def toList: List[A] - override def toIterable: Iterable[A] - override def toSeq: Seq[A] - override def toStream: Stream[A] - override def sortWith(lt : (A,A) => Boolean): Traversable[A] - override def mkString(start: String, sep: String, end: String): String - override def mkString(sep: String): String - override def mkString: String - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder - override def addString(b: StringBuilder, sep: String): StringBuilder - override def addString(b: StringBuilder): StringBuilder - override def toString - override def stringPrefix : String - override def view - override def view(from: Int, until: Int): TraversableView[A, Traversable[A]] - */ -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - */ -object Traversable extends TraversableFactory[Traversable] { self => - - /** Provides break functionality separate from client code */ - private[collection] val breaks: Breaks = new Breaks - - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, Traversable[A]] = immutable.Traversable.newBuilder[A] -} - -/** Explicit instantiation of the `Traversable` trait to reduce class file size in subclasses. */ -abstract class AbstractTraversable[+A] extends Traversable[A] diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala deleted file mode 100644 index 6e1d39876eaa..000000000000 --- a/src/library/scala/collection/TraversableLike.scala +++ /dev/null @@ -1,1004 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.Builder -import scala.annotation.{migration, tailrec} -import scala.annotation.unchecked.{uncheckedVariance => uV} -import parallel.ParIterable -import scala.collection.immutable.{::, List, Nil} -import scala.language.higherKinds -import scala.runtime.AbstractFunction0 - -/** A template trait for traversable collections of type `Traversable[A]`. - * - * $traversableInfo - * @define mutability - * @define traversableInfo - * This is a base trait of all kinds of $mutability Scala collections. It - * implements the behavior common to all collections, in terms of a method - * `foreach` with signature: - * {{{ - * def foreach[U](f: Elem => U): Unit - * }}} - * Collection classes mixing in this trait provide a concrete - * `foreach` method which traverses all the - * elements contained in the collection, applying a given function to each. - * They also need to provide a method `newBuilder` - * which creates a builder for collections of the same kind. - * - * A traversable class might or might not have two properties: strictness - * and orderedness. Neither is represented as a type. - * - * The instances of a strict collection class have all their elements - * computed before they can be used as values. By contrast, instances of - * a non-strict collection class may defer computation of some of their - * elements until after the instance is available as a value. - * A typical example of a non-strict collection class is a - * [[scala.collection.immutable.Stream]]. - * A more general class of examples are `TraversableViews`. - * - * If a collection is an instance of an ordered collection class, traversing - * its elements with `foreach` will always visit elements in the - * same order, even for different runs of the program. If the class is not - * ordered, `foreach` can visit elements in different orders for - * different runs (but it will keep the same order in the same run).' - * - * A typical example of a collection class which is not ordered is a - * `HashMap` of objects. The traversal order for hash maps will - * depend on the hash codes of its elements, and these hash codes might - * differ from one run to the next. By contrast, a `LinkedHashMap` - * is ordered because its `foreach` method visits elements in the - * order they were inserted into the `HashMap`. - * - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the collection - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll Traversable - * @define coll traversable collection - */ -trait TraversableLike[+A, +Repr] extends Any - with HasNewBuilder[A, Repr] - with FilterMonadic[A, Repr] - with TraversableOnce[A] - with GenTraversableLike[A, Repr] - with Parallelizable[A, ParIterable[A]] -{ - self => - - import Traversable.breaks._ - - /** The type implementing this traversable */ - protected[this] type Self = Repr - - /** The collection of type $coll underlying this `TraversableLike` object. - * By default this is implemented as the `TraversableLike` object itself, - * but this can be overridden. - */ - def repr: Repr = this.asInstanceOf[Repr] - - final def isTraversableAgain: Boolean = true - - /** The underlying collection seen as an instance of `$Coll`. - * By default this is implemented as the current collection object itself, - * but this can be overridden. - */ - protected[this] def thisCollection: Traversable[A] = this.asInstanceOf[Traversable[A]] - - /** A conversion from collections of type `Repr` to `$Coll` objects. - * By default this is implemented as just a cast, but this can be overridden. - */ - protected[this] def toCollection(repr: Repr): Traversable[A] = repr.asInstanceOf[Traversable[A]] - - /** Creates a new builder for this collection type. - */ - protected[this] def newBuilder: Builder[A, Repr] - - protected[this] def parCombiner = ParIterable.newCombiner[A] - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - * - * Note: this method underlies the implementation of most other bulk operations. - * It's important to implement this method in an efficient way. - * - */ - def foreach[U](f: A => U): Unit - - /** Tests whether this $coll is empty. - * - * @return `true` if the $coll contain no elements, `false` otherwise. - */ - def isEmpty: Boolean = { - var result = true - breakable { - for (x <- this) { - result = false - break - } - } - result - } - - def hasDefiniteSize = true - - def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def defaultPlusPlus: That = { - val b = bf(repr) - if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.seq.size) - b ++= thisCollection - b ++= that.seq - b.result - } - - if (bf eq immutable.Set.canBuildFrom) { - this match { - case s: immutable.Set[A] if that.isInstanceOf[GenSet[A]] => - (s union that.asInstanceOf[GenSet[A]]).asInstanceOf[That] - case _ => defaultPlusPlus - } - } else if (bf eq immutable.HashSet.canBuildFrom) { - this match { - case s: immutable.HashSet[A] if that.isInstanceOf[GenSet[A]] => - (s union that.asInstanceOf[GenSet[A]]).asInstanceOf[That] - case _ => defaultPlusPlus - } - } else { - this match { - case thisTS: collection.immutable.TreeSet[A] => - thisTS.addAllImpl[B, That](that)(bf.asInstanceOf[CanBuildFrom[immutable.TreeSet[A], B, That]]) - case _ => - defaultPlusPlus - } - } - - } - - /** As with `++`, returns a new collection containing the elements from the left operand followed by the - * elements from the right operand. - * - * It differs from `++` in that the right operand determines the type of - * the resulting collection rather than the left one. - * Mnemonic: the COLon is on the side of the new COLlection type. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - * - * @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = LinkedList(2) - * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2) - * - * scala> val z = x ++: y - * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * }}} - * - * @return a new $coll which contains all elements of this $coll - * followed by all elements of `that`. - */ - def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def defaultPlusPlus: That = { - val b = bf(repr) - if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.size) - b ++= that - b ++= thisCollection - b.result - } - if (bf eq immutable.Set.canBuildFrom) { - this match { - case s: immutable.Set[A] if that.isInstanceOf[GenSet[A]] => - (s union that.asInstanceOf[GenSet[A]]).asInstanceOf[That] - case _ => defaultPlusPlus - } - } else if (bf eq immutable.HashSet.canBuildFrom) { - this match { - case s: immutable.HashSet[A] if that.isInstanceOf[GenSet[A]] => - (s union that.asInstanceOf[GenSet[A]]).asInstanceOf[That] - case _ => defaultPlusPlus - } - } else { - this match { - case thisTS: immutable.TreeSet[A] => - thisTS.addAllImpl[B, That](that)(bf.asInstanceOf[CanBuildFrom[immutable.TreeSet[A], B, That]]) - case _ => - defaultPlusPlus - } - } - - } - - /** As with `++`, returns a new collection containing the elements from the - * left operand followed by the elements from the right operand. - * - * It differs from `++` in that the right operand determines the type of - * the resulting collection rather than the left one. - * Mnemonic: the COLon is on the side of the new COLlection type. - * - * Example: - * {{{ - * scala> val x = List(1) - * x: List[Int] = List(1) - * - * scala> val y = LinkedList(2) - * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2) - * - * scala> val z = x ++: y - * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * }}} - * - * This overload exists because: for the implementation of `++:` we should - * reuse that of `++` because many collections override it with more - * efficient versions. - * - * Since `TraversableOnce` has no `++` method, we have to implement that - * directly, but `Traversable` and down can use the overload. - * - * @param that the traversable to append. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` which contains all elements - * of this $coll followed by all elements of `that`. - */ - def ++:[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = - (that ++ seq)(breakOut) - - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def builder = { // extracted to keep method size under 35 bytes, so that it can be JIT-inlined - val b = bf(repr) - b.sizeHint(this) - b - } - val b = builder - for (x <- this) b += f(x) - b.result - } - - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - def builder = bf(repr) // extracted to keep method size under 35 bytes, so that it can be JIT-inlined - val b = builder - for (x <- this) b ++= f(x).seq - b.result - } - - private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { - this match { - case as: List[A] => - filterImplList(as, p, isFlipped).asInstanceOf[Repr] - case _ => - val b = newBuilder - for (x <- this) - if (p(x) != isFlipped) b += x - - b.result - } - } - - private[this] def filterImplList[A](self: List[A], p: A => Boolean, isFlipped: Boolean): List[A] = { - - // everything seen so far so far is not included - @tailrec def noneIn(l: List[A]): List[A] = { - if (l.isEmpty) - Nil - else { - val h = l.head - val t = l.tail - if (p(h) != isFlipped) - allIn(l, t) - else - noneIn(t) - } - } - - // everything from 'start' is included, if everything from this point is in we can return the origin - // start otherwise if we discover an element that is out we must create a new partial list. - @tailrec def allIn(start: List[A], remaining: List[A]): List[A] = { - if (remaining.isEmpty) - start - else { - val x = remaining.head - if (p(x) != isFlipped) - allIn(start, remaining.tail) - else - partialFill(start, remaining) - } - } - - // we have seen elements that should be included then one that should be excluded, start building - def partialFill(origStart: List[A], firstMiss: List[A]): List[A] = { - val newHead = new ::(origStart.head, Nil) - var toProcess = origStart.tail - var currentLast = newHead - - // we know that all elements are :: until at least firstMiss.tail - while (!(toProcess eq firstMiss)) { - val newElem = new ::(toProcess.head, Nil) - currentLast.tl = newElem - currentLast = newElem - toProcess = toProcess.tail - } - - // at this point newHead points to a list which is a duplicate of all the 'in' elements up to the first miss. - // currentLast is the last element in that list. - - // now we are going to try and share as much of the tail as we can, only moving elements across when we have to. - var next = firstMiss.tail - var nextToCopy = next // the next element we would need to copy to our list if we cant share. - while (!next.isEmpty) { - // generally recommended is next.isNonEmpty but this incurs an extra method call. - val head: A = next.head - if (p(head) != isFlipped) { - next = next.tail - } else { - // its not a match - do we have outstanding elements? - while (!(nextToCopy eq next)) { - val newElem = new ::(nextToCopy.head, Nil) - currentLast.tl = newElem - currentLast = newElem - nextToCopy = nextToCopy.tail - } - nextToCopy = next.tail - next = next.tail - } - } - - // we have remaining elements - they are unchanged attach them to the end - if (!nextToCopy.isEmpty) - currentLast.tl = nextToCopy - - newHead - } - - val result = noneIn(self) - result - } - - /** Selects all elements of this $coll which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that satisfy the given - * predicate `p`. The order of the elements is preserved. - */ - def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false) - - /** Selects all elements of this $coll which do not satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new $coll consisting of all elements of this $coll that do not satisfy the given - * predicate `p`. The order of the elements is preserved. - */ - def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true) - - def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - foreach(pf.runWith(b += _)) - b.result - } - - /** Builds a new collection by applying an option-valued function to all - * elements of this $coll on which the function is defined. - * - * @param f the option-valued function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying the option-valued function - * `f` to each element and collecting all defined results. - * The order of the elements is preserved. - * - * @usecase def filterMap[B](f: A => Option[B]): $Coll[B] - * @inheritdoc - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given option-valued function - * `f` to each element and collecting all defined results. - * The order of the elements is preserved. - def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- this) - f(x) match { - case Some(y) => b += y - case _ => - } - b.result - } - */ - - /** Partitions this $coll in two ${coll}s according to a predicate. - * - * @param p the predicate on which to partition. - * @return a pair of ${coll}s: the first $coll consists of all elements that - * satisfy the predicate `p` and the second $coll consists of all elements - * that don't. The relative order of the elements in the resulting ${coll}s - * is the same as in the original $coll. - */ - def partition(p: A => Boolean): (Repr, Repr) = { - val l, r = newBuilder - for (x <- this) (if (p(x)) l else r) += x - (l.result, r.result) - } - - def groupBy[K](f: A => K): immutable.Map[K, Repr] = { - object grouper extends AbstractFunction0[Builder[A, Repr]] with Function1[A, Unit] { - var k0, k1, k2, k3: K = null.asInstanceOf[K] - var v0, v1, v2, v3 = (null : Builder[A, Repr]) - var size = 0 - var hashMap: mutable.HashMap[K, Builder[A, Repr]] = null - override def apply(): mutable.Builder[A, Repr] = { - size += 1 - newBuilder - } - def apply(elem: A): Unit = { - val key = f(elem) - val bldr = builderFor(key) - bldr += elem - } - def builderFor(key: K): Builder[A, Repr] = - size match { - case 0 => - k0 = key - v0 = apply() - v0 - case 1 => - if (k0 == key) v0 - else {k1 = key; v1 = apply(); v1 } - case 2 => - if (k0 == key) v0 - else if (k1 == key) v1 - else {k2 = key; v2 = apply(); v2 } - case 3 => - if (k0 == key) v0 - else if (k1 == key) v1 - else if (k2 == key) v2 - else {k3 = key; v3 = apply(); v3 } - case 4 => - if (k0 == key) v0 - else if (k1 == key) v1 - else if (k2 == key) v2 - else if (k3 == key) v3 - else { - hashMap = new mutable.HashMap - hashMap(k0) = v0 - hashMap(k1) = v1 - hashMap(k2) = v2 - hashMap(k3) = v3 - val bldr = apply() - hashMap(key) = bldr - bldr - } - case _ => - hashMap.getOrElseUpdate(key, apply()) - } - - def result(): immutable.Map[K, Repr] = - size match { - case 0 => immutable.Map.empty - case 1 => new immutable.Map.Map1(k0, v0.result()) - case 2 => new immutable.Map.Map2(k0, v0.result(), k1, v1.result()) - case 3 => new immutable.Map.Map3(k0, v0.result(), k1, v1.result(), k2, v2.result()) - case 4 => new immutable.Map.Map4(k0, v0.result(), k1, v1.result(), k2, v2.result(), k3, v3.result()) - case _ => - val it = hashMap.entriesIterator0 - val m1 = immutable.HashMap.newBuilder[K, Repr] - while (it.hasNext) { - val entry = it.next() - m1.+=((entry.key, entry.value.result())) - } - m1.result() - } - - } - this.seq.foreach(grouper) - grouper.result() - } - - def forall(p: A => Boolean): Boolean = { - var result = true - breakable { - for (x <- this) - if (!p(x)) { result = false; break } - } - result - } - - /** Tests whether a predicate holds for at least one element of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `false` if this $coll is empty, otherwise `true` if the given predicate `p` - * holds for some of the elements of this $coll, otherwise `false` - */ - def exists(p: A => Boolean): Boolean = { - var result = false - breakable { - for (x <- this) - if (p(x)) { result = true; break } - } - result - } - - def find(p: A => Boolean): Option[A] = { - var result: Option[A] = None - breakable { - for (x <- this) - if (p(x)) { result = Some(x); break } - } - result - } - - def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That = scanLeft(z)(op) - - def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - b.sizeHint(this, 1) - var acc = z - b += acc - for (x <- this) { acc = op(acc, x); b += acc } - b.result - } - - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - var scanned = List(z) - var acc = z - for (x <- reversed) { - acc = op(x, acc) - scanned ::= acc - } - val b = bf(repr) - for (elem <- scanned) b += elem - b.result - } - - /** Selects the first element of this $coll. - * $orderDependent - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A = { - var result: () => A = () => throw new NoSuchElementException - breakable { - for (x <- this) { - result = () => x - break - } - } - result() - } - - /** Optionally selects the first element. - * $orderDependent - * @return the first element of this $coll if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] = if (isEmpty) None else Some(head) - - /** Selects all elements except the first. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the first one. - * @throws java.lang.UnsupportedOperationException if the $coll is empty. - */ - override def tail: Repr = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - drop(1) - } - - /** Selects the last element. - * $orderDependent - * @return The last element of this $coll. - * @throws NoSuchElementException If the $coll is empty. - */ - def last: A = { - var lst = head - for (x <- this) - lst = x - lst - } - - /** Optionally selects the last element. - * $orderDependent - * @return the last element of this $coll$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] = if (isEmpty) None else Some(last) - - /** Selects all elements except the last. - * $orderDependent - * @return a $coll consisting of all elements of this $coll - * except the last one. - * @throws UnsupportedOperationException if the $coll is empty. - */ - def init: Repr = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - var lst = head - var follow = false - val b = newBuilder - b.sizeHint(this, -1) - for (x <- this) { - if (follow) b += lst - else follow = true - lst = x - } - b.result - } - - def take(n: Int): Repr = slice(0, n) - - def drop(n: Int): Repr = - if (n <= 0) { - val b = newBuilder - b.sizeHint(this) - (b ++= thisCollection).result - } - else sliceWithKnownDelta(n, Int.MaxValue, -n) - - def slice(from: Int, until: Int): Repr = - sliceWithKnownBound(scala.math.max(from, 0), until) - - // Precondition: from >= 0, until > 0, builder already configured for building. - private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = { - var i = 0 - breakable { - for (x <- this) { - if (i >= from) b += x - i += 1 - if (i >= until) break - } - } - b.result - } - // Precondition: from >= 0 - private[scala] def sliceWithKnownDelta(from: Int, until: Int, delta: Int): Repr = { - val b = newBuilder - if (until <= from) b.result - else { - b.sizeHint(this, delta) - sliceInternal(from, until, b) - } - } - // Precondition: from >= 0 - private[scala] def sliceWithKnownBound(from: Int, until: Int): Repr = { - val b = newBuilder - if (until <= from) b.result - else { - b.sizeHintBounded(until - from, this) - sliceInternal(from, until, b) - } - } - - def takeWhile(p: A => Boolean): Repr = { - val b = newBuilder - breakable { - for (x <- this) { - if (!p(x)) break - b += x - } - } - b.result - } - - def dropWhile(p: A => Boolean): Repr = { - val b = newBuilder - var go = false - for (x <- this) { - if (!go && !p(x)) go = true - if (go) b += x - } - b.result - } - - def span(p: A => Boolean): (Repr, Repr) = { - val l, r = newBuilder - var toLeft = true - for (x <- this) { - toLeft = toLeft && p(x) - (if (toLeft) l else r) += x - } - (l.result, r.result) - } - - def splitAt(n: Int): (Repr, Repr) = { - val l, r = newBuilder - l.sizeHintBounded(n, this) - if (n >= 0) r.sizeHint(this, -n) - var i = 0 - for (x <- this) { - (if (i < n) l else r) += x - i += 1 - } - (l.result, r.result) - } - - /** Iterates over the tails of this $coll. The first value will be this - * $coll and the final one will be an empty $coll, with the intervening - * values the results of successive applications of `tail`. - * - * @return an iterator over all the tails of this $coll - * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` - */ - def tails: Iterator[Repr] = iterateUntilEmpty(_.tail) - - /** Iterates over the inits of this $coll. The first value will be this - * $coll and the final one will be an empty $coll, with the intervening - * values the results of successive applications of `init`. - * - * @return an iterator over all the inits of this $coll - * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` - */ - def inits: Iterator[Repr] = iterateUntilEmpty(_.init) - - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - var i = start - val end = (start + len) min xs.length - breakable { - for (x <- this) { - if (i >= end) break - xs(i) = x - i += 1 - } - } - } - - @deprecatedOverriding("Enforce contract of toTraversable that if it is Traversable it returns itself.", "2.11.0") - def toTraversable: Traversable[A] = thisCollection - - def toIterator: Iterator[A] = toStream.iterator - def toStream: Stream[A] = toBuffer.toStream - // Override to provide size hint. - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { - val b = cbf() - b.sizeHint(this) - b ++= thisCollection - b.result - } - - /** Converts this $coll to a string. - * - * @return a string representation of this collection. By default this - * string consists of the `stringPrefix` of this $coll, followed - * by all elements separated by commas and enclosed in parentheses. - */ - override def toString = mkString(stringPrefix + "(", ", ", ")") - - /** Defines the prefix of this object's `toString` representation. - * - * @return a string representation which starts the result of `toString` - * applied to this $coll. By default the string prefix is the - * simple name of the collection class $coll. - */ - def stringPrefix: String = { - /* This method is written in a style that avoids calling `String.split()` - * as well as methods of java.lang.Character that require the Unicode - * database information. This is mostly important for Scala.js, so that - * using the collection library does automatically bring java.util.regex.* - * and the Unicode database in the generated code. - * - * This algorithm has the additional benefit that it won't allocate - * anything except the result String in the common case, where the class - * is not an inner class (i.e., when the result contains no '.'). - */ - val fqn = repr.getClass.getName - var pos: Int = fqn.length - 1 - - // Skip trailing $'s - while (pos != -1 && fqn.charAt(pos) == '$') { - pos -= 1 - } - if (pos == -1 || fqn.charAt(pos) == '.') { - return "" - } - - var result: String = "" - while (true) { - // Invariant: if we enter the loop, there is a non-empty part - - // Look for the beginning of the part, remembering where was the last non-digit - val partEnd = pos + 1 - while (pos != -1 && fqn.charAt(pos) <= '9' && fqn.charAt(pos) >= '0') { - pos -= 1 - } - val lastNonDigit = pos - while (pos != -1 && fqn.charAt(pos) != '$' && fqn.charAt(pos) != '.') { - pos -= 1 - } - val partStart = pos + 1 - - // A non-last part which contains only digits marks a method-local part -> drop the prefix - if (pos == lastNonDigit && partEnd != fqn.length) { - return result - } - - // Skip to the next part, and determine whether we are the end - while (pos != -1 && fqn.charAt(pos) == '$') { - pos -= 1 - } - val atEnd = pos == -1 || fqn.charAt(pos) == '.' - - // Handle the actual content of the part (we ignore parts that are likely synthetic) - def isPartLikelySynthetic = { - val firstChar = fqn.charAt(partStart) - (firstChar > 'Z' && firstChar < 0x7f) || (firstChar < 'A') - } - if (atEnd || !isPartLikelySynthetic) { - val part = fqn.substring(partStart, partEnd) - result = if (result.isEmpty) part else part + '.' + result - if (atEnd) - return result - } - } - - // dead code - result - } - - /** Creates a non-strict view of this $coll. - * - * @return a non-strict view of this $coll. - */ - def view = new TraversableView[A, Repr] { - protected lazy val underlying = self.repr - override def foreach[U](f: A => U) = self foreach f - } - - /** Creates a non-strict view of a slice of this $coll. - * - * Note: the difference between `view` and `slice` is that `view` produces - * a view of the current $coll, whereas `slice` produces a new $coll. - * - * Note: `view(from, to)` is equivalent to `view.slice(from, to)` - * $orderDependent - * - * @param from the index of the first element of the view - * @param until the index of the element following the view - * @return a non-strict view of a slice of this $coll, starting at index `from` - * and extending up to (but not including) index `until`. - */ - def view(from: Int, until: Int): TraversableView[A, Repr] = view.slice(from, until) - - /** Creates a non-strict filter of this $coll. - * - * Note: the difference between `c filter p` and `c withFilter p` is that - * the former creates a new collection, whereas the latter only - * restricts the domain of subsequent `map`, `flatMap`, `foreach`, - * and `withFilter` operations. - * $orderDependent - * - * @param p the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll - * which satisfy the predicate `p`. - */ - def withFilter(p: A => Boolean): FilterMonadic[A, Repr] = new WithFilter(p) - - /** A class supporting filtered operations. Instances of this class are - * returned by method `withFilter`. - */ - class WithFilter(p: A => Boolean) extends FilterMonadic[A, Repr] { - - /** Builds a new collection by applying a function to all elements of the - * outer $coll containing this `WithFilter` instance that satisfy predicate `p`. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying - * the given function `f` to each element of the outer $coll - * that satisfies predicate `p` and collecting the results. - * - * @usecase def map[B](f: A => B): $Coll[B] - * @inheritdoc - * - * @return a new $coll resulting from applying the given function - * `f` to each element of the outer $coll that satisfies - * predicate `p` and collecting the results. - */ - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- self) - if (p(x)) b += f(x) - b.result - } - - /** Builds a new collection by applying a function to all elements of the - * outer $coll containing this `WithFilter` instance that satisfy - * predicate `p` and concatenating the results. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @tparam That $thatinfo - * @param bf $bfinfo - * @return a new collection of type `That` resulting from applying - * the given collection-valued function `f` to each element - * of the outer $coll that satisfies predicate `p` and - * concatenating the results. - * - * @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B] - * @inheritdoc - * - * The type of the resulting collection will be guided by the static type - * of the outer $coll. - * - * @return a new $coll resulting from applying the given - * collection-valued function `f` to each element of the - * outer $coll that satisfies predicate `p` and concatenating - * the results. - */ - def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { - val b = bf(repr) - for (x <- self) - if (p(x)) b ++= f(x).seq - b.result - } - - /** Applies a function `f` to all elements of the outer $coll containing - * this `WithFilter` instance that satisfy predicate `p`. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ - def foreach[U](f: A => U): Unit = - for (x <- self) - if (p(x)) f(x) - - /** Further refines the filter for this $coll. - * - * @param q the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll which - * satisfy the predicate `q` in addition to the predicate `p`. - */ - def withFilter(q: A => Boolean): WithFilter = - new WithFilter(x => p(x) && q(x)) - } - - // A helper for tails and inits. - private def iterateUntilEmpty(f: Traversable[A @uV] => Traversable[A @uV]): Iterator[Repr] = { - val it = Iterator.iterate(thisCollection)(f) takeWhile (x => !x.isEmpty) - it ++ Iterator(Nil) map (x => (newBuilder ++= x).result) - } -} diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala deleted file mode 100644 index 187ab7ac407d..000000000000 --- a/src/library/scala/collection/TraversableOnce.scala +++ /dev/null @@ -1,534 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import mutable.{ArrayBuffer, Buffer, Builder} -import generic.CanBuildFrom -import scala.annotation.unchecked.{uncheckedVariance => uV} -import scala.language.{higherKinds, implicitConversions} -import scala.reflect.ClassTag -import scala.runtime.AbstractFunction1 - -/** A template trait for collections which can be traversed either once only - * or one or more times. - * $traversableonceinfo - * - * @author Martin Odersky - * @author Paul Phillips - * @since 2.8 - * - * @define coll traversable or iterator - * - * @tparam A the element type of the collection - * - * @define traversableonceinfo - * This trait exists primarily to eliminate code duplication between - * `Iterator` and `Traversable`, and thus implements some of the common - * methods that can be implemented solely in terms of foreach without - * access to a `Builder`. It also includes a number of abstract methods - * whose implementations are provided by `Iterator`, `Traversable`, etc. - * It contains implementations common to `Iterators` and - * `Traversables`, such as folds, conversions, and other operations which - * traverse some or all of the elements and return a derived value. - * Directly subclassing `TraversableOnce` is not recommended - instead, - * consider declaring an `Iterator` with a `next` and `hasNext` method or - * creating an `Iterator` with one of the methods on the `Iterator` object. - * Consider declaring a subclass of `Traversable` instead if the elements - * can be traversed repeatedly. - * - * @define coll traversable or iterator - * @define orderDependent - * - * Note: might return different results for different runs, unless the underlying collection type is ordered. - * @define orderDependentFold - * - * Note: might return different results for different runs, unless the - * underlying collection type is ordered or the operator is associative - * and commutative. - * @define mayNotTerminateInf - * - * Note: may not terminate for infinite-sized collections. - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { - self => - - // A note on `isEmpty`: it is documented in GenTraversableOnce as required to - // not consume any elements. However, when (in scala/scala#8732) we tried - // to add some `isEmpty` checks in this file for efficiency, we found that: - // * in our own standard library, at least one subclass implemented - // `isEmpty` as `size == 0`, making it problematic to call `isEmpty` - // from `size` in this file - // * in the community build, at least one repo had a subclass where `isEmpty` - // consumed elements, making it problematic to call `isEmpty` in this file - // from other methods such as `count`, `foldLeft`, and `addString` - // Because it is now so late (2.12.11) in the 2.12.x series, we have chosen - // to avoid adding `isEmpty` calls here, figuring that the breakage isn't - // worth the presumably slight performance gain. (And note that in 2.13.x, - // `TraversableOnce` no longer even exists, and `IterableOnce#isEmpty` is - // deprecated.) - - //TODO 2.12: Remove these methods. They are already defined in GenTraversableOnce - /* Self-documenting abstract methods. */ - def foreach[U](f: A => U): Unit - def isEmpty: Boolean - def hasDefiniteSize: Boolean - - // Note: We could redefine this in TraversableLike to always return `repr` - // of type `Repr`, only if `Repr` had type bounds, which it doesn't, because - // not all `Repr` are a subtype `TraversableOnce[A]`. - // The alternative is redefining it for maps, sets and seqs. For concrete implementations - // we don't have to do this anyway, since they are leaves in the inheritance hierarchy. - // Note 2: This is implemented in all collections _not_ inheriting `Traversable[A]` - // at least indirectly. Currently, these are `ArrayOps` and `StringOps`. - // It is also implemented in `TraversableOnce[A]`. - /** A version of this collection with all - * of the operations implemented sequentially (i.e., in a single-threaded manner). - * - * This method returns a reference to this collection. In parallel collections, - * it is redefined to return a sequential implementation of this collection. In - * both cases, it has O(1) complexity. - * - * @return a sequential view of the collection. - */ - def seq: TraversableOnce[A] - - // Presently these are abstract because the Traversable versions use - // breakable/break, and I wasn't sure enough of how that's supposed to - // function to consolidate them with the Iterator versions. - def forall(p: A => Boolean): Boolean - def exists(p: A => Boolean): Boolean - def find(p: A => Boolean): Option[A] - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit - - // for internal use - protected[this] def reversed = { - //avoid the LazyRef as we don't have an @eager object - class reverser extends AbstractFunction1[A, Unit] { - var elems: List[A] = Nil - override def apply(v1: A): Unit = elems ::= v1 - } - val reverser = new reverser - self foreach reverser - reverser.elems - } - - def size: Int = { - //we can't guard with isEmpty as some implementation have - // def isEmpty = size == 0 - - //avoid the LazyRef as we don't have an @eager object - class counter extends AbstractFunction1[A, Unit] { - var result = 0 - override def apply(v1: A): Unit = result += 1 - } - val counter = new counter - self foreach counter - counter.result - } - - def nonEmpty: Boolean = !isEmpty - - def count(p: A => Boolean): Int = { - //avoid the LazyRef as we don't have an @eager object - class counter extends AbstractFunction1[A, Unit] { - var result = 0 - override def apply(v1: A): Unit = if (p(v1)) result += 1 - } - val counter = new counter - this foreach counter - counter.result - } - - /** Finds the first element of the $coll for which the given partial - * function is defined, and applies the partial function to it. - * - * $mayNotTerminateInf - * $orderDependent - * - * @param pf the partial function - * @return an option value containing pf applied to the first - * value for which it is defined, or `None` if none exists. - * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)` - */ - def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { - // TODO 2.12 -- move out alternate implementations into child classes - val i: Iterator[A] = self match { - case it: Iterator[A] => it - case _: GenIterable[_] => self.toIterator // If it might be parallel, be sure to .seq or use iterator! - case _ => // Not parallel, not iterable--just traverse - self.foreach(pf.runWith(b => return Some(b))) - return None - } - // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself - // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it--change in 2.12.) - val sentinel: Function1[A, Any] = new scala.runtime.AbstractFunction1[A, Any]{ def apply(a: A) = this } - while (i.hasNext) { - val x = pf.applyOrElse(i.next, sentinel) - if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B]) - } - None - } - - @deprecated("Use foldLeft instead of /:", "2.12.10") - def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op) - - @deprecated("Use foldRight instead of :\\", "2.12.10") - def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op) - - def foldLeft[B](z: B)(op: (B, A) => B): B = { - //avoid the LazyRef as we don't have an @eager object - class folder extends AbstractFunction1[A, Unit] { - var result = z - override def apply(v1: A): Unit = result = op(result,v1) - } - val folder = new folder - this foreach folder - folder.result - } - - def foldRight[B](z: B)(op: (A, B) => B): B = - reversed.foldLeft(z)((x, y) => op(y, x)) - - /** Applies a binary operator to all elements of this $coll, - * going left to right. - * $willNotTerminateInf - * $orderDependentFold - * - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this $coll, - * going left to right: - * {{{ - * op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. - * @throws UnsupportedOperationException if this $coll is empty. */ - def reduceLeft[B >: A](op: (B, A) => B): B = { - if (isEmpty) - throw new UnsupportedOperationException("empty.reduceLeft") - - //avoid the LazyRef as we don't have an @eager object - class reducer extends AbstractFunction1[A, Unit] { - var first = true - var acc: B = null.asInstanceOf[B] - - override def apply(x: A): Unit = - if (first) { - acc = x - first = false - } - else acc = op(acc, x) - } - val reducer = new reducer - self foreach reducer - reducer.acc - } - - def reduceRight[B >: A](op: (A, B) => B): B = { - if (isEmpty) - throw new UnsupportedOperationException("empty.reduceRight") - - reversed.reduceLeft[B]((x, y) => op(y, x)) - } - - def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = - if (isEmpty) None else Some(reduceLeft(op)) - - def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = - if (isEmpty) None else Some(reduceRight(op)) - - def reduce[A1 >: A](op: (A1, A1) => A1): A1 = reduceLeft(op) - - def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] = reduceLeftOption(op) - - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) - - def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) - - def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus) - - def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times) - - def min[B >: A](implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.min") - - reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y) - } - - def max[B >: A](implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.max") - - reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y) - } - - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.maxBy") - - //avoid the LazyRef as we don't have an @eager object - class maxer extends AbstractFunction1[A, Unit] { - var maxF: B = null.asInstanceOf[B] - var maxElem: A = null.asInstanceOf[A] - var first = true - override def apply(elem: A): Unit = { - val fx = f(elem) - if (first || cmp.gt(fx, maxF)) { - maxElem = elem - maxF = fx - first = false - } - } - - } - val maxer = new maxer - self foreach maxer - maxer.maxElem - } - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = { - if (isEmpty) - throw new UnsupportedOperationException("empty.minBy") - - //avoid the LazyRef as we don't have an @eager object - class miner extends AbstractFunction1[A, Unit] { - var minF: B = null.asInstanceOf[B] - var minElem: A = null.asInstanceOf[A] - var first = true - override def apply(elem: A): Unit = { - val fx = f(elem) - if (first || cmp.lt(fx, minF)) { - minElem = elem - minF = fx - first = false - } - } - - } - val miner = new miner - self foreach miner - miner.minElem - } - - /** Copies all elements of this $coll to a buffer. - * $willNotTerminateInf - * @param dest The buffer to which elements are copied. - */ - def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= seq - - def copyToArray[B >: A](xs: Array[B], start: Int): Unit = - copyToArray(xs, start, xs.length - start) - - def copyToArray[B >: A](xs: Array[B]): Unit = - copyToArray(xs, 0, xs.length) - - def toArray[B >: A : ClassTag]: Array[B] = { - if (isTraversableAgain) { - val result = new Array[B](size) - copyToArray(result, 0) - result - } - else toBuffer.toArray - } - - def toTraversable: Traversable[A] - - def toList: List[A] = to[List] - - def toIterable: Iterable[A] = toStream - - def toSeq: Seq[A] = toStream - - def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq] - - def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]] - - def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]] - - def toVector: Vector[A] = to[Vector] - - def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { - val b = cbf() - b ++= seq - b.result() - } - - def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = { - val b = immutable.Map.newBuilder[T, U] - b ++= seq.asInstanceOf[TraversableOnce[(T, U)]] - b.result() - } - - def mkString(start: String, sep: String, end: String): String = - addString(new StringBuilder(), start, sep, end).toString - - def mkString(sep: String): String = mkString("", sep, "") - - def mkString: String = mkString("") - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> a.addString(b , "List(" , ", " , ")") - * res5: StringBuilder = List(1, 2, 3, 4) - * }}} - * - * @param b the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - b append start - - class appender extends AbstractFunction1[A, Unit] { - var first = true - override def apply(x: A): Unit = { - if (first) { - b append x - first = false - } - else { - b append sep - b append x - } - } - } - val appender = new appender - self foreach appender - b append end - b - } - - /** Appends all elements of this $coll to a string builder using a separator string. - * The written text consists of the string representations (w.r.t. the method `toString`) - * of all elements of this $coll, separated by the string `sep`. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> a.addString(b, ", ") - * res0: StringBuilder = 1, 2, 3, 4 - * }}} - * - * @param b the string builder to which elements are appended. - * @param sep the separator string. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "") - - /** Appends all elements of this $coll to a string builder. - * The written text consists of the string representations (w.r.t. the method - * `toString`) of all elements of this $coll without any separator string. - * - * Example: - * - * {{{ - * scala> val a = List(1,2,3,4) - * a: List[Int] = List(1, 2, 3, 4) - * - * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> val h = a.addString(b) - * h: StringBuilder = 1234 - * }}} - - * @param b the string builder to which elements are appended. - * @return the string builder `b` to which elements were appended. - */ - def addString(b: StringBuilder): StringBuilder = addString(b, "") -} - - -object TraversableOnce { - implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity - implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) = - new FlattenOps[A](travs map ev) - - /* Functionality reused in Iterator.CanBuildFrom */ - private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] { - def bufferToColl[B](buff: ArrayBuffer[B]): CC[B] - def traversableToColl[B](t: GenTraversable[B]): CC[B] - - def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl - - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: CC[_]): Builder[A, CC[A]] = from match { - case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult { - case res => traversableToColl(res.asInstanceOf[GenTraversable[A]]) - } - case _ => newIterator - } - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newIterator - } - - /** With the advent of `TraversableOnce`, it can be useful to have a builder which - * operates on `Iterator`s so they can be treated uniformly along with the collections. - * See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example. - */ - class OnceCanBuildFrom[A] extends BufferedCanBuildFrom[A, TraversableOnce] { - def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator - def traversableToColl[B](t: GenTraversable[B]) = t.seq - } - - /** Evidence for building collections from `TraversableOnce` collections */ - implicit def OnceCanBuildFrom[A] = new OnceCanBuildFrom[A] - - class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) { - def flatten: Iterator[A] = new AbstractIterator[A] { - val its = travs.toIterator - private var it: Iterator[A] = Iterator.empty - def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext } - def next(): A = if (hasNext) it.next() else Iterator.empty.next() - } - } - - class ForceImplicitAmbiguity - - implicit class MonadOps[+A](trav: TraversableOnce[A]) { - def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f - def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f - def withFilter(p: A => Boolean) = trav.toIterator filter p - def filter(p: A => Boolean): TraversableOnce[A] = withFilter(p) - } -} diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala deleted file mode 100644 index 867dd43d9c08..000000000000 --- a/src/library/scala/collection/TraversableProxy.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def' - - -/** This trait implements a proxy for traversable objects. It forwards - * all calls to a different traversable object - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") -trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]] diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala deleted file mode 100644 index bcf6eeaddb16..000000000000 --- a/src/library/scala/collection/TraversableProxyLike.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.{Buffer, StringBuilder} -import scala.reflect.ClassTag - -// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def' - -/** This trait implements a proxy for Traversable objects. It forwards - * all calls to a different Traversable object. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy { - def self: Repr - - override def foreach[U](f: A => U): Unit = self.foreach(f) - override def isEmpty: Boolean = self.isEmpty - override def nonEmpty: Boolean = self.nonEmpty - override def size: Int = self.size - override def hasDefiniteSize = self.hasDefiniteSize - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf) - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf) - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf) - override def filter(p: A => Boolean): Repr = self.filter(p) - override def filterNot(p: A => Boolean): Repr = self.filterNot(p) - override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf) - override def partition(p: A => Boolean): (Repr, Repr) = self.partition(p) - override def groupBy[K](f: A => K): immutable.Map[K, Repr] = self.groupBy(f) - override def forall(p: A => Boolean): Boolean = self.forall(p) - override def exists(p: A => Boolean): Boolean = self.exists(p) - override def count(p: A => Boolean): Int = self.count(p) - override def find(p: A => Boolean): Option[A] = self.find(p) - override def foldLeft[B](z: B)(op: (B, A) => B): B = self.foldLeft(z)(op) - override def /: [B](z: B)(op: (B, A) => B): B = self./:(z)(op) - override def foldRight[B](z: B)(op: (A, B) => B): B = self.foldRight(z)(op) - override def :\ [B](z: B)(op: (A, B) => B): B = self.:\(z)(op) - override def reduceLeft[B >: A](op: (B, A) => B): B = self.reduceLeft(op) - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = self.reduceLeftOption(op) - override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op) - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = self.reduceRightOption(op) - override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanLeft(z)(op)(bf) - override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanRight(z)(op)(bf) - override def sum[B >: A](implicit num: Numeric[B]): B = self.sum(num) - override def product[B >: A](implicit num: Numeric[B]): B = self.product(num) - override def min[B >: A](implicit cmp: Ordering[B]): A = self.min(cmp) - override def max[B >: A](implicit cmp: Ordering[B]): A = self.max(cmp) - override def head: A = self.head - override def headOption: Option[A] = self.headOption - override def tail: Repr = self.tail - override def last: A = self.last - override def lastOption: Option[A] = self.lastOption - override def init: Repr = self.init - override def take(n: Int): Repr = self.take(n) - override def drop(n: Int): Repr = self.drop(n) - override def slice(from: Int, until: Int): Repr = self.slice(from, until) - override def takeWhile(p: A => Boolean): Repr = self.takeWhile(p) - override def dropWhile(p: A => Boolean): Repr = self.dropWhile(p) - override def span(p: A => Boolean): (Repr, Repr) = self.span(p) - override def splitAt(n: Int): (Repr, Repr) = self.splitAt(n) - override def copyToBuffer[B >: A](dest: Buffer[B]) = self.copyToBuffer(dest) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len) - override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start) - override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs) - override def toArray[B >: A: ClassTag]: Array[B] = self.toArray - override def toList: List[A] = self.toList - override def toIterable: Iterable[A] = self.toIterable - override def toSeq: Seq[A] = self.toSeq - override def toIndexedSeq: immutable.IndexedSeq[A] = self.toIndexedSeq - override def toBuffer[B >: A] = self.toBuffer - override def toStream: Stream[A] = self.toStream - override def toSet[B >: A]: immutable.Set[B] = self.toSet - override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = self.toMap(ev) - override def toTraversable: Traversable[A] = self.toTraversable - override def toIterator: Iterator[A] = self.toIterator - override def mkString(start: String, sep: String, end: String): String = self.mkString(start, sep, end) - override def mkString(sep: String): String = self.mkString(sep) - override def mkString: String = self.mkString - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = self.addString(b, start, sep, end) - override def addString(b: StringBuilder, sep: String): StringBuilder = self.addString(b, sep) - override def addString(b: StringBuilder): StringBuilder = self.addString(b) - override def stringPrefix : String = self.stringPrefix - override def view = self.view - override def view(from: Int, until: Int): TraversableView[A, Repr] = self.view(from, until) - // This appears difficult to override due to the type of WithFilter. - // override def withFilter(p: A => Boolean): WithFilter = self.withFilter(p) -} diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala deleted file mode 100644 index a2c881365505..000000000000 --- a/src/library/scala/collection/TraversableView.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.Builder - -/** A base trait for non-strict views of traversable collections. - * $traversableViewInfo - */ -trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { } - -/** An object containing the necessary implicit definitions to make - * `TraversableView`s work. Its definitions are generally not accessed directly by clients. - */ -object TraversableView { - class NoBuilder[A] extends Builder[A, Nothing] { - def +=(elem: A): this.type = this - def iterator: Iterator[A] = Iterator.empty - def result() = throw new UnsupportedOperationException("TraversableView.Builder.result") - def clear() {} - } - type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]} - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, TraversableView[A, Traversable[_]]] = - new CanBuildFrom[Coll, A, TraversableView[A, Traversable[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala deleted file mode 100644 index 9b146a0ecc1b..000000000000 --- a/src/library/scala/collection/TraversableViewLike.scala +++ /dev/null @@ -1,337 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import generic._ -import mutable.{ Builder, ArrayBuffer } -import scala.annotation.migration -import scala.language.implicitConversions - -trait ViewMkString[+A] { - self: Traversable[A] => - - // It is necessary to use thisSeq rather than toSeq to avoid cycles in the - // eager evaluation of vals in transformed view subclasses, see #4558. - protected[this] def thisSeq: Seq[A] = (new ArrayBuffer[A] ++= self).result - - // Have to overload all three to work around #4299. The overload - // is because mkString should force a view but toString should not. - override def mkString: String = mkString("") - override def mkString(sep: String): String = mkString("", sep, "") - override def mkString(start: String, sep: String, end: String): String = { - thisSeq.addString(new StringBuilder(), start, sep, end).toString - } - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - var first = true - b append start - for (x <- self) { - if (first) first = false else b append sep - b append x - } - b append end - b - } -} - -/** A template trait for non-strict views of traversable collections. - * $traversableViewInfo - * - * Implementation note: Methods such as `map` or `flatMap` on this view will not invoke the implicitly passed - * `Builder` factory, but will return a new view directly, to preserve by-name behavior. - * The new view is then cast to the factory's result type. This means that every `CanBuildFrom` - * that takes a `View` as its `From` type parameter must yield the same view (or a generic - * superclass of it) as its result parameter. If that assumption is broken, cast errors might result. - * - * @define viewInfo - * A view is a lazy version of some collection. Collection transformers such as - * `map` or `filter` or `++` do not traverse any elements when applied on a view. - * Instead they create a new view which simply records that fact that the operation - * needs to be applied. The collection elements are accessed, and the view operations are applied, - * when a non-view result is needed, or when the `force` method is called on a view. - * @define traversableViewInfo - * $viewInfo - * - * All views for traversable collections are defined by creating a new `foreach` method. - * - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - * @tparam This the type of the view itself - */ -trait TraversableViewLike[+A, - +Coll, - +This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]] - extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] -{ - self => - - protected def underlying: Coll - protected[this] def viewIdentifier: String = "" - protected[this] def viewIdString: String = "" - def viewToString = stringPrefix + viewIdString + "(...)" - override def stringPrefix = "TraversableView" - - override protected[this] def newBuilder: Builder[A, This] = - throw new UnsupportedOperationException(this+".newBuilder") - - def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = { - val b = bf(underlying) - b ++= this - b.result() - } - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B] - - - /** The implementation base trait of this view. - * This trait and all its subtraits has to be re-implemented for each - * ViewLike class. - */ - trait Transformed[+B] extends TraversableView[B, Coll] { - def foreach[U](f: B => U): Unit - - lazy val underlying = self.underlying - final override protected[this] def viewIdString = self.viewIdString + viewIdentifier - - // Methods whose standard implementations use "isEmpty" need to be rewritten - // for views, else they will end up traversing twice in a situation like: - // xs.view.flatMap(f).headOption - override def headOption: Option[B] = { - for (x <- this) - return Some(x) - - None - } - - override def last: B = { - // (Should be) better than allocating a Some for every element. - var empty = true - var result: B = null.asInstanceOf[B] - for (x <- this) { - empty = false - result = x - } - if (empty) throw new NoSuchElementException("last of empty traversable") else result - } - - override def lastOption: Option[B] = { - // (Should be) better than allocating a Some for every element. - var empty = true - var result: B = null.asInstanceOf[B] - for (x <- this) { - empty = false - result = x - } - if (empty) None else Some(result) - } - - // XXX: As yet not dealt with, tail and init both call isEmpty. - override def stringPrefix = self.stringPrefix - override def toString = viewToString - } - - trait EmptyView extends Transformed[Nothing] { - final override def isEmpty = true - final override def foreach[U](f: Nothing => U): Unit = () - } - - /** A fall back which forces everything into a vector and then applies an operation - * on it. Used for those operations which do not naturally lend themselves to a view - */ - trait Forced[B] extends Transformed[B] { - protected[this] val forced: GenSeq[B] - def foreach[U](f: B => U) = forced foreach f - final override protected[this] def viewIdentifier = "C" - } - - trait Sliced extends Transformed[A] { - protected[this] val endpoints: SliceInterval - protected[this] def from = endpoints.from - protected[this] def until = endpoints.until - // protected def newSliced(_endpoints: SliceInterval): Transformed[A] = - // self.newSliced(endpoints.recalculate(_endpoints)) - - def foreach[U](f: A => U) { - var index = 0 - for (x <- self) { - if (from <= index) { - if (until <= index) return - f(x) - } - index += 1 - } - } - final override protected[this] def viewIdentifier = "S" - } - - trait Mapped[B] extends Transformed[B] { - protected[this] val mapping: A => B - def foreach[U](f: B => U) { - for (x <- self) - f(mapping(x)) - } - final override protected[this] def viewIdentifier = "M" - } - - trait FlatMapped[B] extends Transformed[B] { - protected[this] val mapping: A => GenTraversableOnce[B] - def foreach[U](f: B => U) { - for (x <- self) - for (y <- mapping(x).seq) - f(y) - } - final override protected[this] def viewIdentifier = "N" - } - - trait Appended[B >: A] extends Transformed[B] { - protected[this] val rest: GenTraversable[B] - def foreach[U](f: B => U) { - self foreach f - rest foreach f - } - final override protected[this] def viewIdentifier = "A" - } - - trait Prepended[B >: A] extends Transformed[B] { - protected[this] val fst: GenTraversable[B] - def foreach[U](f: B => U) { - fst foreach f - self foreach f - } - final override protected[this] def viewIdentifier = "A" - } - - trait Filtered extends Transformed[A] { - protected[this] val pred: A => Boolean - def foreach[U](f: A => U) { - for (x <- self) - if (pred(x)) f(x) - } - final override protected[this] def viewIdentifier = "F" - } - - trait TakenWhile extends Transformed[A] { - protected[this] val pred: A => Boolean - def foreach[U](f: A => U) { - for (x <- self) { - if (!pred(x)) return - f(x) - } - } - final override protected[this] def viewIdentifier = "T" - } - - trait DroppedWhile extends Transformed[A] { - protected[this] val pred: A => Boolean - def foreach[U](f: A => U) { - var go = false - for (x <- self) { - if (!go && !pred(x)) go = true - if (go) f(x) - } - } - final override protected[this] def viewIdentifier = "D" - } - - override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newAppended(xs.seq.toTraversable).asInstanceOf[That] - - override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(xs.seq.toTraversable).asInstanceOf[That] - - // Need second one because of optimization in TraversableLike - override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = - newPrepended(xs).asInstanceOf[That] - - override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = { - newMapped(f).asInstanceOf[That] -// val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newMapped(f).asInstanceOf[That] -// else super.map[B, That](f)(bf) - } - - override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That = - filter(pf.isDefinedAt).map(pf)(bf) - - override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = { - newFlatMapped(f).asInstanceOf[That] -// was: val b = bf(repr) -// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That] -// else super.flatMap[B, That](f)(bf) - } - override def flatten[B](implicit asTraversable: A => /*<: GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] - protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] - protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B] - protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] - protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] - protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered - protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced - protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile - protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile - - protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n)) - protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue)) - - override def filter(p: A => Boolean): This = newFiltered(p) - override def withFilter(p: A => Boolean): This = newFiltered(p) - override def partition(p: A => Boolean): (This, This) = (newFiltered(p), newFiltered(!p(_))) - override def init: This = newSliced(SliceInterval(0, size - 1)) // !!! can't call size here. - override def drop(n: Int): This = newDropped(n) - override def take(n: Int): This = newTaken(n) - override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until)) - override def dropWhile(p: A => Boolean): This = newDroppedWhile(p) - override def takeWhile(p: A => Boolean): This = newTakenWhile(p) - override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p)) - override def splitAt(n: Int): (This, This) = (newTaken(n), newDropped(n)) - - override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That] - - @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") - override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That = - newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That] - - override def groupBy[K](f: A => K): immutable.Map[K, This] = - thisSeq groupBy f mapValues (xs => newForced(xs)) - - override def unzip[A1, A2](implicit asPair: A => (A1, A2)) = - (newMapped(x => asPair(x)._1), newMapped(x => asPair(x)._2)) // TODO - Performance improvements. - - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) = - (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3)) // TODO - Performance improvements. - - override def filterNot(p: (A) => Boolean): This = - newFiltered(a => !(p(a))) - - override def inits: Iterator[This] = - thisSeq.inits.map(as => newForced(as).asInstanceOf[This]) - - override def tails: Iterator[This] = - thisSeq.tails.map(as => newForced(as).asInstanceOf[This]) - - override def tail: This = - // super.tail would also work as it is currently implemented in terms of drop(Int). - if (isEmpty) super.tail else newDropped(1) - - override def toString = viewToString -} diff --git a/src/library/scala/collection/View.scala b/src/library/scala/collection/View.scala new file mode 100644 index 000000000000..f304b8931f14 --- /dev/null +++ b/src/library/scala/collection/View.scala @@ -0,0 +1,535 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{nowarn, tailrec} +import scala.collection.mutable.{ArrayBuffer, Builder} +import scala.collection.immutable.LazyList + +/** Views are collections whose transformation operations are non strict: the resulting elements + * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), + * or when the view is converted to a strict collection type (using the `to` operation). + * @define coll view + * @define Coll `View` + */ +trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { + + override def view: View[A] = this + + override def iterableFactory: IterableFactory[View] = View + + override def empty: scala.collection.View[A] = iterableFactory.empty + + override def toString: String = className + "()" + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "View" + + @deprecated("Views no longer know about their underlying collection type; .force always returns an IndexedSeq", "2.13.0") + @`inline` def force: IndexedSeq[A] = toIndexedSeq +} + +/** This object reifies operations on views as case classes + * + * @define Coll View + * @define coll view + */ +@SerialVersionUID(3L) +object View extends IterableFactory[View] { + + /** + * @return A `View[A]` whose underlying iterator is provided by the `it` parameter-less function. + * + * @param it Function creating the iterator to be used by the view. This function must always return + * a fresh `Iterator`, otherwise the resulting view will be effectively iterable only once. + * + * @tparam A View element type + */ + def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { + def iterator = it() + } + + /** + * @return A view iterating over the given `Iterable` + * + * @param it The `IterableOnce` to view. A proper `Iterable` is used directly. If it is really only + * `IterableOnce` it gets memoized on the first traversal. + * + * @tparam E View element type + */ + def from[E](it: IterableOnce[E]): View[E] = it match { + case it: View[E] => it + case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) + case _ => LazyList.from(it).view + } + + def empty[A]: View[A] = Empty + + def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + + override def apply[A](xs: A*): View[A] = new Elems(xs: _*) + + /** The empty view */ + @SerialVersionUID(3L) + case object Empty extends AbstractView[Nothing] { + def iterator = Iterator.empty + override def knownSize = 0 + override def isEmpty: Boolean = true + } + + /** A view with exactly one element */ + @SerialVersionUID(3L) + class Single[A](a: A) extends AbstractView[A] { + def iterator: Iterator[A] = Iterator.single(a) + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + } + + /** A view with given elements */ + @SerialVersionUID(3L) + class Elems[A](xs: A*) extends AbstractView[A] { + def iterator = xs.iterator + override def knownSize = xs.knownSize + override def isEmpty: Boolean = xs.isEmpty + } + + /** A view containing the results of some element computation a number of times. */ + @SerialVersionUID(3L) + class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { + def iterator = Iterator.fill(n)(elem) + override def knownSize: Int = 0 max n + override def isEmpty: Boolean = n <= 0 + } + + /** A view containing values of a given function over a range of integer values starting from 0. */ + @SerialVersionUID(3L) + class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { + def iterator: Iterator[A] = Iterator.tabulate(n)(f) + override def knownSize: Int = 0 max n + override def isEmpty: Boolean = n <= 0 + } + + /** A view containing repeated applications of a function to a start value */ + @SerialVersionUID(3L) + class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { + def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) + override def knownSize: Int = 0 max len + override def isEmpty: Boolean = len <= 0 + } + + /** A view that uses a function `f` to produce elements of type `A` and update + * an internal state `S`. + */ + @SerialVersionUID(3L) + class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { + def iterator: Iterator[A] = Iterator.unfold(initial)(f) + } + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableOps[A] = IterableOps[A, AnyConstr, _] + + /** A view that filters an underlying collection. */ + @SerialVersionUID(3L) + class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.filterImpl(p, isFlipped) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + object Filter { + def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = + underlying match { + case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + case _ => new Filter(underlying, p, isFlipped) + } + } + + /** A view that removes the duplicated elements as determined by the transformation function `f` */ + @SerialVersionUID(3L) + class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.distinctBy(f) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator: AbstractIterator[A1] = new AbstractIterator[A1] { + private[this] val self = underlying.iterator + private[this] var hd: A1 = _ + private[this] var hdDefined: Boolean = false + def hasNext = hdDefined || { + @tailrec + def findNext(): Boolean = + if (self.hasNext) { + f(self.next()) match { + case Left(a1) => hd = a1; hdDefined = true; true + case Right(_) => findNext() + } + } else false + findNext() + } + def next() = + if (hasNext) { + hdDefined = false + hd + } else Iterator.empty.next() + } + } + + @SerialVersionUID(3L) + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator: AbstractIterator[A2] = new AbstractIterator[A2] { + private[this] val self = underlying.iterator + private[this] var hd: A2 = _ + private[this] var hdDefined: Boolean = false + def hasNext = hdDefined || { + @tailrec + def findNext(): Boolean = + if (self.hasNext) { + f(self.next()) match { + case Left(_) => findNext() + case Right(a2) => hd = a2; hdDefined = true; true + } + } else false + findNext() + } + def next() = + if (hasNext) { + hdDefined = false + hd + } else Iterator.empty.next() + } + } + + /** A view that drops leading elements of the underlying collection. */ + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.drop(n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) (size - normN) max 0 else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that drops trailing elements of the underlying collection. */ + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = dropRightIterator(underlying.iterator, n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) (size - normN) max 0 else -1 + } + override def isEmpty: Boolean = + if(knownSize >= 0) knownSize == 0 + else iterator.isEmpty + } + + @SerialVersionUID(3L) + class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.dropWhile(p) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that takes leading elements of the underlying collection. */ + @SerialVersionUID(3L) + class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.take(n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) size min normN else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that takes trailing elements of the underlying collection. */ + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = takeRightIterator(underlying.iterator, n) + protected val normN = n max 0 + override def knownSize = { + val size = underlying.knownSize + if (size >= 0) size min normN else -1 + } + override def isEmpty: Boolean = + if(knownSize >= 0) knownSize == 0 + else iterator.isEmpty + } + + @SerialVersionUID(3L) + class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.takeWhile(p) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that maps elements of the underlying collection. */ + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { + def iterator = underlying.iterator.map(f) + override def knownSize = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + /** A view that flatmaps elements of the underlying collection. */ + @SerialVersionUID(3L) + class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { + def iterator = underlying.iterator.flatMap(f) + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + /** A view that collects elements of the underlying collection. */ + @SerialVersionUID(3L) + class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { + def iterator = underlying.iterator.collect(pf) + } + + /** A view that concatenates elements of the prefix collection or iterator with the elements + * of the suffix collection or iterator. + */ + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { + def iterator = prefix.iterator ++ suffix.iterator + override def knownSize = { + val prefixSize = prefix.knownSize + if (prefixSize >= 0) { + val suffixSize = suffix.knownSize + if (suffixSize >= 0) prefixSize + suffixSize + else -1 + } + else -1 + } + override def isEmpty: Boolean = prefix.isEmpty && suffix.isEmpty + } + + /** A view that zips elements of the underlying collection with the elements + * of another collection. + */ + @SerialVersionUID(3L) + class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zip(other) + override def knownSize = { + val s1 = underlying.knownSize + if (s1 == 0) 0 else { + val s2 = other.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + override def isEmpty: Boolean = underlying.isEmpty || other.isEmpty + } + + /** A view that zips elements of the underlying collection with the elements + * of another collection. If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + */ + @SerialVersionUID(3L) + class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) + override def knownSize = { + val s1 = underlying.knownSize + if(s1 == -1) -1 else { + val s2 = other.knownSize + if(s2 == -1) -1 else s1 max s2 + } + } + override def isEmpty: Boolean = underlying.isEmpty && other.isEmpty + } + + /** A view that appends an element to its elements */ + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = false + } + + /** A view that prepends an element to its elements */ + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size + 1 else -1 + } + override def isEmpty: Boolean = false + } + + @SerialVersionUID(3L) + class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new AbstractIterator[A] { + private[this] val it = underlying.iterator + private[this] var i = 0 + def next(): A = { + val value = if (i == index) { it.next(); elem } else it.next() + i += 1 + value + } + def hasNext: Boolean = + if(it.hasNext) true + else if(index >= i) throw new IndexOutOfBoundsException(index.toString) + else false + } + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { + // we may be unable to traverse `other` more than once, so we need to cache it if that's the case + private val _other: Iterable[A] = other match { + case other: Iterable[A] => other + case other => LazyList.from(other) + } + + def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) + override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty + } + + @SerialVersionUID(3L) + class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) + + override def knownSize: Int = { + val size = underlying.knownSize + if (size >= 0) size max len else -1 + } + override def isEmpty: Boolean = underlying.isEmpty && len <= 0 + } + + private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + val k = it.knownSize + if(k == 0 || n <= 0) Iterator.empty + else if(n == Int.MaxValue) it + else if(k > 0) it.drop((k-n) max 0) + else new TakeRightIterator[A](it, n) + } + + private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private[this] var len: Int = -1 + private[this] var pos: Int = 0 + private[this] var buf: ArrayBuffer[AnyRef] = _ + def init(): Unit = if(buf eq null) { + buf = new ArrayBuffer[AnyRef](maxlen min 256) + len = 0 + while(underlying.hasNext) { + val n = underlying.next().asInstanceOf[AnyRef] + if(pos >= buf.length) buf.addOne(n) + else buf(pos) = n + pos += 1 + if(pos == maxlen) pos = 0 + len += 1 + } + underlying = null + if(len > maxlen) len = maxlen + pos = pos - len + if(pos < 0) pos += maxlen + } + override def knownSize = len + def hasNext: Boolean = { + init() + len > 0 + } + def next(): A = { + init() + if(len == 0) Iterator.empty.next() + else { + val x = buf(pos).asInstanceOf[A] + pos += 1 + if(pos == maxlen) pos = 0 + len -= 1 + x + } + } + override def drop(n: Int): Iterator[A] = { + init() + if (n > 0) { + len = (len - n) max 0 + pos = (pos + n) % maxlen + } + this + } + } + + private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + if(n <= 0) it + else { + val k = it.knownSize + if(k >= 0) it.take(k - n) + else new DropRightIterator[A](it, n) + } + } + + private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet + private[this] var pos: Int = 0 + private[this] var buf: ArrayBuffer[AnyRef] = _ + def init(): Unit = if(buf eq null) { + buf = new ArrayBuffer[AnyRef](maxlen min 256) + while(pos < maxlen && underlying.hasNext) { + buf.addOne(underlying.next().asInstanceOf[AnyRef]) + pos += 1 + } + if(!underlying.hasNext) len = 0 + pos = 0 + } + override def knownSize = len + def hasNext: Boolean = { + init() + len != 0 + } + def next(): A = { + if(!hasNext) Iterator.empty.next() + else { + val x = buf(pos).asInstanceOf[A] + if(len == -1) { + buf(pos) = underlying.next().asInstanceOf[AnyRef] + if(!underlying.hasNext) len = 0 + } else len -= 1 + pos += 1 + if(pos == maxlen) pos = 0 + x + } + } + } +} + +/** Explicit instantiation of the `View` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractView[+A] extends scala.collection.AbstractIterable[A] with View[A] diff --git a/src/library/scala/collection/WithFilter.scala b/src/library/scala/collection/WithFilter.scala new file mode 100644 index 000000000000..7a68275336ff --- /dev/null +++ b/src/library/scala/collection/WithFilter.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ +@SerialVersionUID(3L) +abstract class WithFilter[+A, +CC[_]] extends Serializable { + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given function `f` to each element of the filtered outer $coll + * and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll containing this `WithFilter` instance that satisfy + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given collection-valued function `f` to each element + * of the filtered outer $coll and + * concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Applies a function `f` to all elements of the `filtered` outer $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + */ + def foreach[U](f: A => U): Unit + + /** Further refines the filter for this `filtered` $coll. + * + * @param q the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll which + * also satisfy both `p` and `q` predicates. + */ + def withFilter(q: A => Boolean): WithFilter[A, CC] + +} diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java index c6ec91e4fde8..b6a628d1295e 100644 --- a/src/library/scala/collection/concurrent/BasicNode.java +++ b/src/library/scala/collection/concurrent/BasicNode.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java index 9d7aced75e2b..4033c12af449 100644 --- a/src/library/scala/collection/concurrent/CNodeBase.java +++ b/src/library/scala/collection/concurrent/CNodeBase.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,22 +16,22 @@ abstract class CNodeBase extends MainNode { - @SuppressWarnings("rawtypes") - public static final AtomicIntegerFieldUpdater updater = - AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize"); + @SuppressWarnings("unchecked") + public static final AtomicIntegerFieldUpdater> updater = + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) CNodeBase.class, "csize"); public volatile int csize = -1; public boolean CAS_SIZE(int oldval, int nval) { - return updater.compareAndSet(this, oldval, nval); + return updater.compareAndSet(this, oldval, nval); } public void WRITE_SIZE(int nval) { - updater.set(this, nval); + updater.set(this, nval); } public int READ_SIZE() { - return updater.get(this); + return updater.get(this); } -} \ No newline at end of file +} diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java index 07af2983f32d..548c1892321f 100644 --- a/src/library/scala/collection/concurrent/Gen.java +++ b/src/library/scala/collection/concurrent/Gen.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java index 30fa26973d8b..b16265c68ea3 100644 --- a/src/library/scala/collection/concurrent/INodeBase.java +++ b/src/library/scala/collection/concurrent/INodeBase.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,22 +16,24 @@ abstract class INodeBase extends BasicNode { - @SuppressWarnings("rawtypes") - public static final AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode"); + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) INodeBase.class, (Class>) (Class) MainNode.class, "mainnode"); - public static final Object RESTART = new Object(); + static final Object RESTART = new Object(); + + static final Object NO_SUCH_ELEMENT_SENTINEL = new Object(); public volatile MainNode mainnode = null; public final Gen gen; public INodeBase(Gen generation) { - gen = generation; + gen = generation; } public BasicNode prev() { - return null; + return null; } -} \ No newline at end of file +} diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java index c830a19aefcc..1bfc11594ec9 100644 --- a/src/library/scala/collection/concurrent/MainNode.java +++ b/src/library/scala/collection/concurrent/MainNode.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,28 +16,31 @@ abstract class MainNode extends BasicNode { - @SuppressWarnings("rawtypes") - public static final AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev"); + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) MainNode.class, (Class>) (Class) MainNode.class, "prev"); public volatile MainNode prev = null; public abstract int cachedSize(Object ct); + // standard contract + public abstract int knownSize(); + public boolean CAS_PREV(MainNode oldval, MainNode nval) { - return updater.compareAndSet(this, oldval, nval); + return updater.compareAndSet(this, oldval, nval); } public void WRITE_PREV(MainNode nval) { - updater.set(this, nval); + updater.set(this, nval); } // do we need this? unclear in the javadocs... // apparently not - volatile reads are supposed to be safe - // irregardless of whether there are concurrent ARFU updates + // regardless of whether there are concurrent ARFU updates @Deprecated @SuppressWarnings("unchecked") public MainNode READ_PREV() { - return updater.get(this); + return (MainNode) updater.get(this); } -} \ No newline at end of file +} diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index d475703d88ba..291f85513b58 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,48 +13,100 @@ package scala package collection.concurrent +import scala.annotation.tailrec + /** A template trait for mutable maps that allow concurrent access. - * - * $concurrentmapinfo - * - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] - * section on `Concurrent Maps` for more information. - * - * @tparam A the key type of the map - * @tparam B the value type of the map - * - * @define Coll `concurrent.Map` - * @define coll concurrent map - * @define concurrentmapinfo - * This is a base trait for all Scala concurrent map implementations. It - * provides all of the methods a `Map` does, with the difference that all the - * changes are atomic. It also describes methods specific to concurrent maps. - * - * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. - * - * @define atomicop - * This is an atomic operation. - */ -trait Map[A, B] extends scala.collection.mutable.Map[A, B] { + * + * $concurrentmapinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] + * section on `Concurrent Maps` for more information. + * + * @tparam K the key type of the map + * @tparam V the value type of the map + * + * @define Coll `concurrent.Map` + * @define coll concurrent map + * @define concurrentmapinfo + * This is a base trait for all Scala concurrent map implementations. It + * provides all of the methods a `Map` does, with the difference that all the + * changes are atomic. It also describes methods specific to concurrent maps. + * + * @define atomicop + * This is an atomic operation. + */ +trait Map[K, V] extends scala.collection.mutable.Map[K, V] { /** - * Associates the given key with a given value, unless the key was already - * associated with some other value. - * - * $atomicop - * - * @param k key with which the specified value is to be associated with - * @param v value to be associated with the specified key - * @return `Some(oldvalue)` if there was a value `oldvalue` previously - * associated with the specified key, or `None` if there was no - * mapping for the specified key - */ - def putIfAbsent(k: A, v: B): Option[B] + * Associates the given key with a given value, unless the key was already + * associated with some other value. + * + * $atomicop + * + * @param k key with which the specified value is to be associated with + * @param v value to be associated with the specified key + * @return `Some(oldvalue)` if there was a value `oldvalue` previously + * associated with the specified key, or `None` if there was no + * mapping for the specified key + */ + def putIfAbsent(k: K, v: V): Option[V] + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + def remove(k: K, v: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldvalue value expected to be associated with the specified key + * if replacing is to happen + * @param newvalue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + def replace(k: K, oldvalue: V, newvalue: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped + * to some value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param v value to be associated with the specified key + * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + */ + def replace(k: K, v: V): Option[V] + + override def getOrElseUpdate(key: K, @deprecatedName("op", since="2.13.13") defaultValue: => V): V = get(key) match { + case Some(v) => v + case None => + val v = defaultValue + putIfAbsent(key, v) match { + case Some(ov) => ov + case None => v + } + } /** * Removes the entry for the specified key if it's currently mapped to the - * specified value. + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. * * $atomicop * @@ -63,42 +115,76 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] { * the removal is to take place * @return `true` if the removal took place, `false` otherwise */ - def remove(k: A, v: B): Boolean + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) /** * Replaces the entry for the given key only if it was previously mapped to - * a given value. + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. * * $atomicop * * @param k key for which the entry should be replaced - * @param oldvalue value expected to be associated with the specified key + * @param oldValue value expected to be associated with the specified key * if replacing is to happen - * @param newvalue value to be associated with the specified key + * @param newValue value to be associated with the specified key * @return `true` if the entry was replaced, `false` otherwise */ - def replace(k: A, oldvalue: B, newvalue: B): Boolean + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) /** - * Replaces the entry for the given key only if it was previously mapped - * to some value. + * Update a mapping for the specified key and its current optionally mapped value + * (`Some` if there is current mapping, `None` if not). * - * $atomicop + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. * - * @param k key for which the entry should be replaced - * @param v value to be associated with the specified key - * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. + * + * @param key the key value + * @param remappingFunction a function that receives current optionally mapped value and return a new mapping + * @return the new value associated with the specified key */ - def replace(k: A, v: B): Option[B] + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) - override def getOrElseUpdate(key: A, op: =>B): B = get(key) match { - case Some(v) => v - case None => - val v = op - putIfAbsent(key, v) match { - case Some(nv) => nv - case None => v + @tailrec + private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = get(key) + val nextValue = remappingFunction(previousValue) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this } + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) + } + this + } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 4e449514d2c8..ddc5379f1f25 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,18 +15,21 @@ package collection package concurrent import java.util.concurrent.atomic._ -import scala.collection.parallel.mutable.ParTrieMap -import scala.util.hashing.Hashing -import scala.util.control.ControlThrowable -import generic._ +import scala.{unchecked => uc} import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{List, Nil} +import scala.collection.mutable.GrowableBuilder +import scala.util.Try +import scala.util.hashing.Hashing -private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { +private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { import INodeBase._ WRITE(bn) - def this(g: Gen) = this(null, g) + def this(g: Gen, equiv: Equiv[K]) = this(null, g, equiv) def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) @@ -84,22 +87,22 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) private def inode(cn: MainNode[K, V]) = { - val nin = new INode[K, V](gen) + val nin = new INode[K, V](gen, equiv) nin.WRITE(cn) nin } def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { - val nin = new INode[K, V](ngen) + val nin = new INode[K, V](ngen, equiv) val main = GCAS_READ(ct) nin.WRITE(main) nin } /** Inserts a key value pair, overwriting the old pair if the keys match. - * - * @return true if successful, false otherwise - */ + * + * @return true if successful, false otherwise + */ @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { val m = GCAS_READ(ct) // use -Yinline! @@ -113,23 +116,24 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) else false } - case sn: SNode[K, V] => - if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct) + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) GCAS(cn, nn, ct) } + case basicNode => throw new MatchError(basicNode) } } else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) GCAS(cn, ncnode, ct) } case tn: TNode[K, V] => @@ -138,15 +142,24 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends case ln: LNode[K, V] => // 3) an l-node val nn = ln.inserted(k, v) GCAS(ln, nn, ct) + case mainNode => throw new MatchError(mainNode) } } + + /** Inserts a new key value pair, given that a specific condition is met. - * - * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v` - * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) - */ - @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + * + * @param cond KEY_PRESENT_OR_ABSENT - don't care if the key was there, insert or overwrite + * KEY_ABSENT - key wasn't there, insert only, do not overwrite + * KEY_PRESENT - key was there, overwrite only, do not insert + * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` + * + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { val m = GCAS_READ(ct) // use -Yinline! m match { @@ -159,19 +172,19 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => cond match { - case null => + case sn: SNode[K, V] @uc => cond match { + case INode.KEY_PRESENT_OR_ABSENT => if (sn.hc == hc && equal(sn.k, k, ct)) { - if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null } else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) if (GCAS(cn, nn, ct)) None else null } @@ -179,7 +192,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) if (GCAS(cn, nn, ct)) None else null } @@ -188,15 +201,16 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None case otherv => - if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) { + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None } + case basicNode => throw new MatchError(basicNode) } } else cond match { - case null | INode.KEY_ABSENT => + case INode.KEY_PRESENT_OR_ABSENT | INode.KEY_ABSENT => val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) - val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) if (GCAS(cn, ncnode, ct)) None else null case INode.KEY_PRESENT => None case otherv => None @@ -210,7 +224,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends GCAS(ln, nn, ct) } cond match { - case null => + case INode.KEY_PRESENT_OR_ABSENT => val optv = ln.get(k) if (insertln()) optv else null case INode.KEY_ABSENT => @@ -225,17 +239,22 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends } case otherv => ln.get(k) match { - case Some(v0) if v0 == otherv => if (insertln()) Some(otherv.asInstanceOf[V]) else null + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null case _ => None } } + case mainNode => throw new MatchError(mainNode) } } /** Looks up the value associated with the key. - * - * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise - */ + * + * @param hc the hashcode of `k` + * + * @return NO_SUCH_ELEMENT_SENTINEL if no value has been found, RESTART if the operation wasn't successful, + * or any other value otherwise + */ @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { val m = GCAS_READ(ct) // use -Yinline! @@ -244,45 +263,58 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends val idx = (hc >>> lev) & 0x1f val flag = 1 << idx val bmp = cn.bitmap - if ((bmp & flag) == 0) null // 1a) bitmap shows no binding + if ((bmp & flag) == 0) NO_SUCH_ELEMENT_SENTINEL // 1a) bitmap shows no binding else { // 1b) bitmap contains a value - descend val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) - else RESTART // used to be throw RestartException + else RESTART } - case sn: SNode[K, V] => // 2) singleton node + case sn: SNode[K, V] @uc => // 2) singleton node if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] - else null + else NO_SUCH_ELEMENT_SENTINEL + case basicNode => throw new MatchError(basicNode) } } - case tn: TNode[K, V] => // 3) non-live node + case tn: TNode[_, _] => // 3) non-live node def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { clean(parent, ct, lev - 5) - RESTART // used to be throw RestartException + RESTART } else { if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] - else null + else NO_SUCH_ELEMENT_SENTINEL } cleanReadOnly(tn) case ln: LNode[K, V] => // 5) an l-node - ln.get(k).asInstanceOf[Option[AnyRef]].orNull + ln.get(k).asInstanceOf[Option[AnyRef]].getOrElse(NO_SUCH_ELEMENT_SENTINEL) + case mainNode => throw new MatchError(mainNode) } } /** Removes the key associated with the given value. - * - * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value - * @return null if not successful, an Option[V] indicating the previous value otherwise - */ - def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { - val m = GCAS_READ(ct) // use -Yinline! - - m match { + * + * @param hc the hashcode of `k` + * + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + def rec_remove( + k: K, + v: V, + removalPolicy: Int, + hc: Int, + lev: Int, + parent: INode[K, V], + startgen: Gen, + ct: TrieMap[K, V]): Option[V] = { + + GCAS_READ(ct) match { case cn: CNode[K, V] => val idx = (hc >>> lev) & 0x1f val bmp = cn.bitmap @@ -292,24 +324,25 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) val res = sub match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct) + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => - if (sn.hc == hc && equal(sn.k, k, ct) && (v == null || sn.v == v)) { + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) if (GCAS(cn, ncn, ct)) Some(sn.v) else null } else None + case basicNode => throw new MatchError(basicNode) } if (res == None || (res eq null)) res else { - @tailrec def cleanParent(nonlive: AnyRef) { - val pm = parent.GCAS_READ(ct) - pm match { + @tailrec def cleanParent(nonlive: AnyRef): Unit = { + val cn = parent.GCAS_READ(ct) + cn match { case cn: CNode[K, V] => val idx = (hc >>> (lev - 5)) & 0x1f val bmp = cn.bitmap @@ -318,8 +351,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends else { val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) - if (sub eq this) nonlive match { - case tn: TNode[K, V] => + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) if (!parent.GCAS(cn, ncn, ct)) if (ct.readRoot().gen == startgen) cleanParent(nonlive) @@ -342,20 +375,21 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends clean(parent, ct, lev - 5) null case ln: LNode[K, V] => - if (v == null) { + if (removalPolicy == RemovalPolicy.Always) { val optv = ln.get(k) val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null } else ln.get(k) match { - case optv @ Some(v0) if v0 == v => + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null case _ => None } + case mainNode => throw new MatchError(mainNode) } } - private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int) { + private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int): Unit = { val m = nd.GCAS_READ(ct) m match { case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) @@ -365,10 +399,11 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null - def cachedSize(ct: TrieMap[K, V]): Int = { - val m = GCAS_READ(ct) - m.cachedSize(ct) - } + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() /* this is a quiescent method! */ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { @@ -383,13 +418,17 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends private[concurrent] object INode { - val KEY_PRESENT = new AnyRef - val KEY_ABSENT = new AnyRef - - def newRootNode[K, V] = { + //////////////////////////////////////////////////////////////////////////////////////////////////// + // Arguments for `cond` argument in TrieMap#rec_insertif + //////////////////////////////////////////////////////////////////////////////////////////////////// + final val KEY_PRESENT = new AnyRef + final val KEY_ABSENT = new AnyRef + final val KEY_PRESENT_OR_ABSENT = new AnyRef + + def newRootNode[K, V](equiv: Equiv[K]) = { val gen = new Gen val cn = new CNode[K, V](0, new Array(0), gen) - new INode[K, V](cn, gen) + new INode[K, V](cn, gen, equiv) } } @@ -397,10 +436,12 @@ private[concurrent] object INode { private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { WRITE_PREV(p) - def string(lev: Int) = throw new UnsupportedOperationException + def string(lev: Int): Nothing = throw new UnsupportedOperationException def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + def knownSize: Int = throw new UnsupportedOperationException + override def toString = "FailedNode(%s)".format(p) } @@ -411,48 +452,72 @@ private[concurrent] trait KVNode[K, V] { private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) -extends BasicNode with KVNode[K, V] { - final def copy = new SNode(k, v, hc) - final def copyTombed = new TNode(k, v, hc) - final def copyUntombed = new SNode(k, v, hc) - final def kvPair = (k, v) - final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) + extends BasicNode with KVNode[K, V] { + def copy = new SNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) } - +// Tomb Node, used to ensure proper ordering during removals private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) -extends MainNode[K, V] with KVNode[K, V] { - final def copy = new TNode(k, v, hc) - final def copyTombed = new TNode(k, v, hc) - final def copyUntombed = new SNode(k, v, hc) - final def kvPair = (k, v) - final def cachedSize(ct: AnyRef): Int = 1 - final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) + extends MainNode[K, V] with KVNode[K, V] { + def copy = new TNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 + def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) } +// List Node, leaf node that handles hash collisions +private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) + extends MainNode[K, V] { + + def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) + + def this(k1: K, v1: V, k2: K, v2: V, equiv: Equiv[K]) = + this(if (equiv.equiv(k1, k2)) (k2 -> v2) :: Nil else (k1 -> v1) :: (k2 -> v2) :: Nil, equiv) + + def inserted(k: K, v: V) = { + var k0: K = k + @tailrec + def remove(elems: List[(K, V)], acc: List[(K, V)]): List[(K, V)] = { + if (elems.isEmpty) acc + else if (equiv.equiv(elems.head._1, k)) { + k0 = elems.head._1 + acc ::: elems.tail + } else remove(elems.tail, elems.head :: acc) + } + val e = remove(entries, Nil) + new LNode((k0 -> v) :: e, equiv) + } -private[collection] final class LNode[K, V](final val listmap: immutable.ListMap[K, V]) -extends MainNode[K, V] { - def this(k: K, v: V) = this(immutable.ListMap(k -> v)) - def this(k1: K, v1: V, k2: K, v2: V) = this(immutable.ListMap(k1 -> v1, k2 -> v2)) - def inserted(k: K, v: V) = new LNode(listmap + ((k, v))) def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { - val updmap = listmap - k - if (updmap.size > 1) new LNode(updmap) + val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) else { val (k, v) = updmap.iterator.next() new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses } } - def get(k: K) = listmap.get(k) - def cachedSize(ct: AnyRef): Int = listmap.size - def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", ")) -} + def get(k: K): Option[V] = entries.find(entry => equiv.equiv(entry._1, k)).map(_._2) + def cachedSize(ct: AnyRef): Int = entries.size + + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) + +} + +// Ctrie Node, contains bitmap and array of references to branch nodes private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots - def cachedSize(ct: AnyRef) = { + def cachedSize(ct: AnyRef): Int = { val currsz = READ_SIZE() if (currsz != -1) currsz else { @@ -462,6 +527,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } } + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + // lends itself towards being parallelizable by choosing // a random starting offset in the array // => if there are concurrent size computations, they start @@ -472,14 +539,15 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba var sz = 0 val offset = if (array.length > 0) - //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ + //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) else 0 while (i < array.length) { val pos = (i + offset) % array.length array(pos) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ct) + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) } i += 1 } @@ -487,7 +555,9 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { - val narr = java.util.Arrays.copyOf(array, array.length) + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) narr(pos) = nn new CNode[K, V](bitmap, narr, gen) } @@ -501,19 +571,19 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba new CNode[K, V](bitmap ^ flag, narr, gen) } - def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = { + def insertedAt(pos: Int, flag: Int, k: K, v: V, hc: Int, gen: Gen) = { val len = array.length val bmp = bitmap val narr = new Array[BasicNode](len + 1) Array.copy(array, 0, narr, 0, pos) - narr(pos) = nn + narr(pos) = new SNode(k, v, hc) Array.copy(array, pos, narr, pos + 1, len - pos) new CNode[K, V](bmp | flag, narr, gen) } /** Returns a copy of this cnode such that all the i-nodes below it are copied - * to the specified generation `ngen`. - */ + * to the specified generation `ngen`. + */ def renewed(ngen: Gen, ct: TrieMap[K, V]) = { var i = 0 val arr = array @@ -521,8 +591,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba val narr = new Array[BasicNode](len) while (i < len) { arr(i) match { - case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) - case bn: BasicNode => narr(i) = bn + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn } i += 1 } @@ -535,7 +605,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { - case sn: SNode[K, V] => sn.copyTombed + case sn: SNode[K, V] @uc => sn.copyTombed case _ => this } else this @@ -553,12 +623,13 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba while (i < arr.length) { // construct new bitmap val sub = arr(i) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => val inodemain = in.gcasRead(ct) assert(inodemain ne null) tmparray(i) = resurrect(in, inodemain) - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => tmparray(i) = sn + case basicNode => throw new MatchError(basicNode) } i += 1 } @@ -566,36 +637,34 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba new CNode[K, V](bmp, tmparray, gen).toContracted(lev) } - private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) - - private def collectLocalElems: Seq[String] = array flatMap { - case sn: SNode[K, V] => Some(sn.kvPair._2.toString) - case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")") - } + def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) override def toString = { - val elems = collectLocalElems - "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" } } - private[concurrent] object CNode { - def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) { + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { val xidx = (xhc >>> lev) & 0x1f val yidx = (yhc >>> lev) & 0x1f val bmp = (1 << xidx) | (1 << yidx) if (xidx == yidx) { - val subinode = new INode[K, V](gen)//(TrieMap.inodeupdater) - subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen) + val subinode = new INode[K, V](gen, equiv)//(TrieMap.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen, equiv) new CNode(bmp, Array(subinode), gen) } else { if (xidx < yidx) new CNode(bmp, Array(x, y), gen) else new CNode(bmp, Array(y, x), gen) } } else { - new LNode(x.k, x.v, y.k, y.v) + new LNode(x.k, x.v, y.k, y.v, equiv) } } @@ -607,35 +676,33 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected /** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free - * implementation of a hash array mapped trie. It is used to implement the - * concurrent map abstraction. It has particularly scalable concurrent insert - * and remove operations and is memory-efficient. It supports O(1), atomic, - * lock-free snapshots which are used to implement linearizable lock-free size, - * iterator and clear operations. The cost of evaluating the (lazy) snapshot is - * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. - * - * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] - * - * @author Aleksandar Prokopec - * @since 2.10 - */ -@SerialVersionUID(0L - 6402774413839597105L) + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] + */ +@SerialVersionUID(-5212455458703321708L) final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) -extends scala.collection.concurrent.Map[K, V] - with scala.collection.mutable.MapLike[K, V, TrieMap[K, V]] - with CustomParallelizable[(K, V), ParTrieMap[K, V]] - with Serializable -{ - private var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf - private var equalityobj = ef - private var rootupdater = rtupd + extends scala.collection.mutable.AbstractMap[K, V] + with scala.collection.concurrent.Map[K, V] + with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] + with scala.collection.MapFactoryDefaults[K, V, TrieMap, mutable.Iterable] + with DefaultSerializable { + + private[this] var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf + private[this] var equalityobj = ef + @transient + private[this] var rootupdater = rtupd def hashing = hashingobj def equality = equalityobj - @deprecated("this field will be made private", "2.12.0") - @volatile /*private*/ var root = r + @volatile private var root = r def this(hashf: Hashing[K], ef: Equiv[K]) = this( - INode.newRootNode, + INode.newRootNode(ef), AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), hashf, ef @@ -643,9 +710,11 @@ extends scala.collection.concurrent.Map[K, V] def this() = this(Hashing.default, Equiv.universal) + override def mapFactory: MapFactory[TrieMap] = TrieMap + /* internal methods */ - private def writeObject(out: java.io.ObjectOutputStream) { + private def writeObject(out: java.io.ObjectOutputStream): Unit = { out.writeObject(hashingobj) out.writeObject(equalityobj) @@ -658,44 +727,43 @@ extends scala.collection.concurrent.Map[K, V] out.writeObject(TrieMapSerializationEnd) } - private def readObject(in: java.io.ObjectInputStream) { - root = INode.newRootNode + private def readObject(in: java.io.ObjectInputStream): Unit = { + root = INode.newRootNode(equality) rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") hashingobj = in.readObject().asInstanceOf[Hashing[K]] equalityobj = in.readObject().asInstanceOf[Equiv[K]] - var obj: AnyRef = null - do { + var obj: AnyRef = in.readObject() + + while (obj != TrieMapSerializationEnd) { obj = in.readObject() if (obj != TrieMapSerializationEnd) { val k = obj.asInstanceOf[K] val v = in.readObject().asInstanceOf[V] update(k, v) } - } while (obj != TrieMapSerializationEnd) + } } - @deprecated("this method will be made private", "2.12.0") - /*private*/ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + private def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) - @deprecated("this method will be made private", "2.12.0") - /*private[collection]*/ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + private[collection] def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) - @deprecated("this method will be made private", "2.12.0") - /*private[concurrent]*/ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { val r = /*READ*/root r match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) } } @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { val v = /*READ*/root v match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => val RDCSS_Descriptor(ov, exp, nv) = desc if (abort) { if (CAS_ROOT(desc, ov)) ov @@ -712,6 +780,7 @@ extends scala.collection.concurrent.Map[K, V] else RDCSS_Complete(abort) } } + case x => throw new MatchError(x) } } @@ -723,19 +792,26 @@ extends scala.collection.concurrent.Map[K, V] } else false } - @tailrec private def inserthc(k: K, hc: Int, v: V) { + @tailrec private def inserthc(k: K, hc: Int, v: V): Unit = { val r = RDCSS_READ_ROOT() if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) } - @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { val r = RDCSS_READ_ROOT() - val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) - if (ret eq null) insertifhc(k, hc, v, cond) + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) else ret } + /** Finds the value associated with this key + * + * @param k the key to look up + * @param hc the hashcode of `k` + * + * @return the value: V associated with `k`, if it exists. Otherwise, INodeBase.NO_SUCH_ELEMENT_SENTINEL + */ @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { val r = RDCSS_READ_ROOT() val res = r.rec_lookup(k, hc, 0, null, r.gen, this) @@ -743,49 +819,39 @@ extends scala.collection.concurrent.Map[K, V] else res } - /* slower: - //@tailrec - private def lookuphc(k: K, hc: Int): AnyRef = { + /** Removes a key-value pair from the map + * + * @param k the key to remove + * @param v the value compare with the value found associated with the key + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * @return an Option[V] indicating the previous value + */ + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { val r = RDCSS_READ_ROOT() - try { - r.rec_lookup(k, hc, 0, null, r.gen, this) - } catch { - case RestartException => - lookuphc(k, hc) - } - } - */ - - @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = { - val r = RDCSS_READ_ROOT() - val res = r.rec_remove(k, v, hc, 0, null, r.gen, this) + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) if (res ne null) res - else removehc(k, v, hc) + else removehc(k, v, removalPolicy, hc) } + def string = RDCSS_READ_ROOT().string(0) /* public methods */ - override def seq = this - - override def par = new ParTrieMap(this) - - override def empty: TrieMap[K, V] = new TrieMap[K, V] - def isReadOnly = rootupdater eq null def nonReadOnly = rootupdater ne null /** Returns a snapshot of this TrieMap. - * This operation is lock-free and linearizable. - * - * The snapshot is lazily updated - the first time some branch - * in the snapshot or this TrieMap are accessed, they are rewritten. - * This means that the work of rebuilding both the snapshot and this - * TrieMap is distributed across all the threads doing updates or accesses - * subsequent to the snapshot creation. - */ + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this TrieMap are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * TrieMap is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ @tailrec def snapshot(): TrieMap[K, V] = { val r = RDCSS_READ_ROOT() val expmain = r.gcasRead(this) @@ -794,17 +860,17 @@ extends scala.collection.concurrent.Map[K, V] } /** Returns a read-only snapshot of this TrieMap. - * This operation is lock-free and linearizable. - * - * The snapshot is lazily updated - the first time some branch - * of this TrieMap are accessed, it is rewritten. The work of creating - * the snapshot is thus distributed across subsequent updates - * and accesses on this TrieMap by all threads. - * Note that the snapshot itself is never rewritten unlike when calling - * the `snapshot` method, but the obtained snapshot cannot be modified. - * - * This method is used by other methods such as `size` and `iterator`. - */ + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this TrieMap are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this TrieMap by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { val r = RDCSS_READ_ROOT() val expmain = r.gcasRead(this) @@ -812,117 +878,123 @@ extends scala.collection.concurrent.Map[K, V] else readOnlySnapshot() } - @tailrec override def clear() { + @tailrec override def clear(): Unit = { val r = RDCSS_READ_ROOT() - if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V](equality))) clear() } - def computeHash(k: K) = hashingobj.hash(k) + @deprecated("Use getOrElse(k, null) instead.", "2.13.0") def lookup(k: K): V = { val hc = computeHash(k) - lookuphc(k, hc).asInstanceOf[V] + val lookupRes = lookuphc(k, hc) + val res = if (lookupRes == INodeBase.NO_SUCH_ELEMENT_SENTINEL) null else lookupRes + res.asInstanceOf[V] } override def apply(k: K): V = { val hc = computeHash(k) val res = lookuphc(k, hc) - if (res eq null) throw new NoSuchElementException + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) throw new NoSuchElementException else res.asInstanceOf[V] } def get(k: K): Option[V] = { val hc = computeHash(k) - Option(lookuphc(k, hc)).asInstanceOf[Option[V]] + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) None else Some(res).asInstanceOf[Option[V]] } override def put(key: K, value: V): Option[V] = { val hc = computeHash(key) - insertifhc(key, hc, value, null) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) } - override def update(k: K, v: V) { + override def update(k: K, v: V): Unit = { val hc = computeHash(k) inserthc(k, hc, v) } - def +=(kv: (K, V)) = { + def addOne(kv: (K, V)) = { update(kv._1, kv._2) this } override def remove(k: K): Option[V] = { val hc = computeHash(k) - removehc(k, null.asInstanceOf[V], hc) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) } - def -=(k: K) = { + def subtractOne(k: K) = { remove(k) this } def putIfAbsent(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) } // TODO once computeIfAbsent is added to concurrent.Map, // move the comment there and tweak the 'at most once' part /** If the specified key is not already in the map, computes its value using - * the given thunk `op` and enters it into the map. - * - * Since concurrent maps cannot contain `null` for keys or values, - * a `NullPointerException` is thrown if the thunk `op` - * returns `null`. + * the given thunk `defaultValue` and enters it into the map. * * If the specified mapping function throws an exception, * that exception is rethrown. * - * Note: This method will invoke op at most once. - * However, `op` may be invoked without the result being added to the map if + * Note: This method will invoke `defaultValue` at most once. + * However, `defaultValue` may be invoked without the result being added to the map if * a concurrent process is also trying to add a value corresponding to the * same key `k`. * * @param k the key to modify - * @param op the expression that computes the value + * @param defaultValue the expression that computes the value * @return the newly added value */ - override def getOrElseUpdate(k: K, op: =>V): V = { - val oldv = lookup(k) - if (oldv != null) oldv.asInstanceOf[V] - else { - val v = op - if (v == null) { - throw new NullPointerException("Concurrent TrieMap values cannot be null.") - } else { - val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) match { - case Some(oldv) => oldv + override def getOrElseUpdate(k: K, @deprecatedName("op", since="2.13.13") defaultValue: => V): V = { + val hc = computeHash(k) + lookuphc(k, hc) match { + case INodeBase.NO_SUCH_ELEMENT_SENTINEL => + val v = defaultValue + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { + case Some(oldValue) => oldValue case None => v } - } + case oldValue => oldValue.asInstanceOf[V] } } def remove(k: K, v: V): Boolean = { val hc = computeHash(k) - removehc(k, v, hc).nonEmpty + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty } def replace(k: K, oldvalue: V, newvalue: V): Boolean = { val hc = computeHash(k) - insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty } def replace(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_PRESENT) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) } - def iterator: Iterator[(K, V)] = + def iterator: Iterator[(K, V)] = { if (nonReadOnly) readOnlySnapshot().iterator else new TrieMapIterator(0, this) + } //////////////////////////////////////////////////////////////////////////// // @@ -939,49 +1011,63 @@ extends scala.collection.concurrent.Map[K, V] if (nonReadOnly) readOnlySnapshot().keySet else super.keySet } - override def filterKeys(p: K => Boolean): collection.Map[K, V] = { - if (nonReadOnly) readOnlySnapshot().filterKeys(p) - else super.filterKeys(p) - } - override def mapValues[W](f: V => W): collection.Map[K, W] = { - if (nonReadOnly) readOnlySnapshot().mapValues(f) - else super.mapValues(f) - } - // END extra overrides - /////////////////////////////////////////////////////////////////// + override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view - private def cachedSize() = { - val r = RDCSS_READ_ROOT() - r.cachedSize(this) - } + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + override def filterKeys(p: K => Boolean): collection.MapView[K, V] = view.filterKeys(p) + + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + override def mapValues[W](f: V => W): collection.MapView[K, W] = view.mapValues(f) + // END extra overrides + /////////////////////////////////////////////////////////////////// override def size: Int = if (nonReadOnly) readOnlySnapshot().size - else cachedSize() + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize + override protected[this] def className = "TrieMap" + + override def lastOption: Option[(K, V)] = if (isEmpty) None else Try(last).toOption +} - override def stringPrefix = "TrieMap" -} +@SerialVersionUID(3L) +object TrieMap extends MapFactory[TrieMap] { + def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] -object TrieMap extends MutableMapFactory[TrieMap] { - val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + def from[K, V](it: IterableOnce[(K, V)]): TrieMap[K, V] = new TrieMap[K, V]() ++= it - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (K, V), TrieMap[K, V]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] + def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) - def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + @transient + val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") class MangledHashing[K] extends Hashing[K] { - def hash(k: K)= scala.util.hashing.byteswap32(k.##) + def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) } -} + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } +} -private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { +// non-final as an extension point for parallel collections +private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { private val stack = new Array[Array[BasicNode]](7) private val stackpos = new Array[Int](7) private var depth = -1 @@ -1013,10 +1099,11 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: case tn: TNode[K, V] => current = tn case ln: LNode[K, V] => - subiter = ln.listmap.iterator + subiter = ln.entries.iterator checkSubiter() case null => current = null + case mainNode => throw new MatchError(mainNode) } private def checkSubiter() = if (!subiter.hasNext) { @@ -1024,22 +1111,22 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: advance() } - private def initialize() { + private def initialize(): Unit = { assert(ct.isReadOnly) val r = ct.RDCSS_READ_ROOT() readin(r) } - def advance(): Unit = if (depth >= 0) { + @tailrec + final def advance(): Unit = if (depth >= 0) { val npos = stackpos(depth) + 1 if (npos < stack(depth).length) { stackpos(depth) = npos stack(depth)(npos) match { - case sn: SNode[K, V] => - current = sn - case in: INode[K, V] => - readin(in) + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) } } else { depth -= 1 @@ -1047,9 +1134,9 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: } } else current = null - protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new TrieMapIterator[K, V](_lev, _ct, _mustInit) + protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): TrieMapIterator[K, V] = new TrieMapIterator[K, V](_lev, _ct, _mustInit) - protected def dupTo(it: TrieMapIterator[K, V]) = { + protected def dupTo(it: TrieMapIterator[K, V]): Unit = { it.level = this.level it.ct = this.ct it.depth = this.depth @@ -1062,15 +1149,15 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: // this one needs to be evaluated if (this.subiter == null) it.subiter = null else { - val lst = this.subiter.toList + val lst = this.subiter.to(immutable.List) this.subiter = lst.iterator it.subiter = lst.iterator } } /** Returns a sequence of iterators over subsets of this iterator. - * It's used to ease the implementation of splitters for a parallel version of the TrieMap. - */ + * It's used to ease the implementation of splitters for a parallel version of the TrieMap. + */ protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { // the case where an LNode is being iterated val it = newIterator(level + 1, ct, _mustInit = false) @@ -1106,40 +1193,8 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: Seq(this) } - @deprecated("this method will be removed", "2.12.0") - def printDebug() { - println("ctrie iterator") - println(stackpos.mkString(",")) - println("depth: " + depth) - println("curr.: " + current) - println(stack.mkString("\n")) - } - } - -private[concurrent] object RestartException extends ControlThrowable - - /** Only used for ctrie serialization. */ -@SerialVersionUID(0L - 7237891413820527142L) +@SerialVersionUID(3L) private[concurrent] case object TrieMapSerializationEnd - - -private[concurrent] object Debug { - import JavaConverters._ - - lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef] - - def log(s: AnyRef) = logbuffer.add(s) - - def flush() { - for (s <- logbuffer.iterator().asScala) Console.out.println(s.toString) - logbuffer.clear() - } - - def clear() { - logbuffer.clear() - } - -} diff --git a/src/library/scala/collection/convert/AsJavaConverters.scala b/src/library/scala/collection/convert/AsJavaConverters.scala index 632361f7a105..2fc73da64fe7 100644 --- a/src/library/scala/collection/convert/AsJavaConverters.scala +++ b/src/library/scala/collection/convert/AsJavaConverters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,11 +14,16 @@ package scala package collection package convert -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} -/** Defines converter methods from Scala to Java collections. */ +import scala.{unchecked => uc} + +/** Defines converter methods from Scala to Java collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ trait AsJavaConverters { - import Wrappers._ + import JavaCollectionWrappers._ /** * Converts a Scala `Iterator` to a Java `Iterator`. @@ -27,16 +32,15 @@ trait AsJavaConverters { * using it via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaIterator]](java.util.Iterator)` then the original Java `Iterator` will - * be returned. + * `asScala` then the original Java `Iterator` will be returned. * * @param i The Scala `Iterator` to be converted. * @return A Java `Iterator` view of the argument. */ - def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = i match { - case null => null - case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] - case _ => IteratorWrapper(i) + def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + case null => null + case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) } /** @@ -46,16 +50,15 @@ trait AsJavaConverters { * of using it via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.enumerationAsScalaIterator]](java.util.Enumeration)` then the original Java - * `Enumeration` will be returned. + * `asScala` then the original Java `Enumeration` will be returned. * * @param i The Scala `Iterator` to be converted. * @return A Java `Enumeration` view of the argument. */ def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { - case null => null - case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] - case _ => IteratorWrapper(i) + case null => null + case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) } /** @@ -65,32 +68,30 @@ trait AsJavaConverters { * using it via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.iterableAsScalaIterable]](java.lang.Iterable)` then the original Java - * `Iterable` will be returned. + * `asScala` then the original Java `Iterable` will be returned. * * @param i The Scala `Iterable` to be converted. * @return A Java `Iterable` view of the argument. */ - def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { - case null => null - case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] - case _ => IterableWrapper(i) + def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + case null => null + case wrapper: JIterableWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) } /** * Converts a Scala `Iterable` to an immutable Java `Collection`. * * If the Scala `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.collectionAsScalaIterable]](java.util.Collection)` then the original Java - * `Collection` will be returned. + * `asScala` then the original Java `Collection` will be returned. * * @param i The Scala `Iterable` to be converted. * @return A Java `Collection` view of the argument. */ def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { - case null => null - case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] - case _ => new IterableWrapper(i) + case null => null + case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) } /** @@ -100,16 +101,15 @@ trait AsJavaConverters { * it via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Buffer` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. + * `asScala` then the original Java `List` will be returned. * * @param b The Scala `Buffer` to be converted. * @return A Java `List` view of the argument. */ - def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableBufferWrapper(b) + def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableBufferWrapper(b) } /** @@ -119,16 +119,15 @@ trait AsJavaConverters { * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Seq` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. + * `asScala` then the original Java `List` will be returned. * * @param s The Scala `Seq` to be converted. * @return A Java `List` view of the argument. */ - def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = s match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableSeqWrapper(s) + def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableSeqWrapper(s) } /** @@ -138,16 +137,15 @@ trait AsJavaConverters { * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Seq` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be - * returned. + * `asScala` then the original Java `List` will be returned. * * @param s The Scala `Seq` to be converted. * @return A Java `List` view of the argument. */ - def seqAsJavaList[A](s: Seq[A]): ju.List[A] = s match { - case null => null - case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] - case _ => new SeqWrapper(s) + def asJava[A](s: Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new SeqWrapper(s) } /** @@ -157,33 +155,33 @@ trait AsJavaConverters { * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned. + * `asScala` then the original Java `Set` will be returned. * * @param s The Scala mutable `Set` to be converted. * @return A Java `Set` view of the argument. */ - def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new MutableSetWrapper(s) + def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new MutableSetWrapper(s) } - /** + /** * Converts a Scala `Set` to a Java `Set`. * * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned. + * `asScala` then the original Java `Set` will be returned. * * @param s The Scala `Set` to be converted. * @return A Java `Set` view of the argument. */ - def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new SetWrapper(s) + def asJava[A](s: Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new SetWrapper(s) } /** @@ -193,16 +191,15 @@ trait AsJavaConverters { * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be - * returned. + * `asScala` then the original Java `Map` will be returned. * * @param m The Scala mutable `Map` to be converted. * @return A Java `Map` view of the argument. */ - def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped - case _ => new MutableMapWrapper(m) + def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MutableMapWrapper(m) } /** @@ -212,17 +209,16 @@ trait AsJavaConverters { * side-effects of using it via the Java interface will be visible via the Scala interface and * vice versa. * - * If the Scala `Dictionary` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.dictionaryAsScalaMap]](java.util.Dictionary)` then the original Java - * `Dictionary` will be returned. + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Dictionary` will be returned. * * @param m The Scala `Map` to be converted. * @return A Java `Dictionary` view of the argument. */ - def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { - case null => null - case JDictionaryWrapper(wrapped) => wrapped - case _ => new DictionaryWrapper(m) + def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + case null => null + case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new DictionaryWrapper(m) } /** @@ -232,16 +228,15 @@ trait AsJavaConverters { * via the Java interface will be visible via the Scala interface and vice versa. * * If the Scala `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be - * returned. + * `asScala` then the original Java `Map` will be returned. * * @param m The Scala `Map` to be converted. * @return A Java `Map` view of the argument. */ - def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] - case _ => new MapWrapper(m) + def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MapWrapper(m) } /** @@ -252,15 +247,14 @@ trait AsJavaConverters { * vice versa. * * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsScalaConcurrentMap]](java.util.concurrent.ConcurrentMap)` then the - * original Java `ConcurrentMap` will be returned. + * `asScala` then the original Java `ConcurrentMap` will be returned. * * @param m The Scala `concurrent.Map` to be converted. * @return A Java `ConcurrentMap` view of the argument. */ - def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match { - case null => null - case JConcurrentMapWrapper(wrapped) => wrapped - case _ => new ConcurrentMapWrapper(m) + def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + case null => null + case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new ConcurrentMapWrapper(m) } } diff --git a/src/library/scala/collection/convert/AsJavaExtensions.scala b/src/library/scala/collection/convert/AsJavaExtensions.scala new file mode 100644 index 000000000000..d356a419325d --- /dev/null +++ b/src/library/scala/collection/convert/AsJavaExtensions.scala @@ -0,0 +1,108 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsJavaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsJava[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Iterator`, see + * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Iterator[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Enumeration`, see + * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. + */ + def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + } + + implicit class IterableHasAsJava[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Iterable`, see + * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: jl.Iterable[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Collection`, see + * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. + */ + def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + } + + implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + /** Converts a Scala `Buffer` to a Java `List`, see + * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(b) + } + + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class SeqHasAsJava[A](s: Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + /** Converts a Scala `mutable.Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class SetHasAsJava[A](s: Set[A]) { + /** Converts a Scala `Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. + */ + def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + } + + implicit class MapHasAsJava[K, V](m: Map[K, V]) { + /** Converts a Scala `Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + } + + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + } +} diff --git a/src/library/scala/collection/convert/AsScalaConverters.scala b/src/library/scala/collection/convert/AsScalaConverters.scala index 8733338ca784..e1055c60b36e 100644 --- a/src/library/scala/collection/convert/AsScalaConverters.scala +++ b/src/library/scala/collection/convert/AsScalaConverters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,11 +14,16 @@ package scala package collection package convert -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} -/** Defines converter methods from Java to Scala collections. */ +import scala.{unchecked => uc} + +/** Defines converter methods from Java to Scala collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ trait AsScalaConverters { - import Wrappers._ + import JavaCollectionWrappers._ /** * Converts a Java `Iterator` to a Scala `Iterator`. @@ -27,16 +32,15 @@ trait AsScalaConverters { * using it via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Iterator` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaIterator]](scala.collection.Iterator)` then the original Scala - * `Iterator` will be returned. + * `asJava` then the original Scala `Iterator` will be returned. * * @param i The Java `Iterator` to be converted. * @return A Scala `Iterator` view of the argument. */ - def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JIteratorWrapper(i) + def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JIteratorWrapper(i) } /** @@ -46,16 +50,15 @@ trait AsScalaConverters { * of using it via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Enumeration` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaEnumeration]](scala.collection.Iterator)` then the original Scala - * `Iterator` will be returned. + * `asJavaEnumeration` then the original Scala `Iterator` will be returned. * - * @param i The Java `Enumeration` to be converted. + * @param e The Java `Enumeration` to be converted. * @return A Scala `Iterator` view of the argument. */ - def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JEnumerationWrapper(i) + def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JEnumerationWrapper(e) } /** @@ -65,32 +68,30 @@ trait AsScalaConverters { * using it via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Iterable` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaIterable]](scala.collection.Iterable) then the original Scala - * `Iterable` will be returned. + * `asJava` then the original Scala `Iterable` will be returned. * * @param i The Java `Iterable` to be converted. * @return A Scala `Iterable` view of the argument. */ - def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JIterableWrapper(i) + def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JIterableWrapper(i) } /** - * Converts a Java `Collection` to an Scala `Iterable`. + * Converts a Java `Collection` to a Scala `Iterable`. * * If the Java `Collection` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaCollection]](scala.collection.Iterable)` then the original Scala - * `Iterable` will be returned. + * `asJavaCollection` then the original Scala `Iterable` will be returned. * - * @param i The Java `Collection` to be converted. + * @param c The Java `Collection` to be converted. * @return A Scala `Iterable` view of the argument. */ - def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JCollectionWrapper(i) + def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JCollectionWrapper(c) } /** @@ -100,16 +101,15 @@ trait AsScalaConverters { * it via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `List` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.bufferAsJavaList]](scala.collection.mutable.Buffer)` then the original Scala - * `Buffer` will be returned. + * `asJava` then the original Scala `Buffer` will be returned. * * @param l The Java `List` to be converted. * @return A Scala mutable `Buffer` view of the argument. */ - def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { - case null => null - case MutableBufferWrapper(wrapped) => wrapped - case _ => new JListWrapper(l) + def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case null => null + case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying + case _ => new JListWrapper(l) } /** @@ -119,16 +119,15 @@ trait AsScalaConverters { * via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Set` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mutableSetAsJavaSet]](scala.collection.mutable.Set)` then the original Scala - * `Set` will be returned. + * `asJava` then the original Scala `Set` will be returned. * * @param s The Java `Set` to be converted. * @return A Scala mutable `Set` view of the argument. */ - def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { - case null => null - case MutableSetWrapper(wrapped) => wrapped - case _ => new JSetWrapper(s) + def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + case null => null + case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying + case _ => new JSetWrapper(s) } /** @@ -138,8 +137,7 @@ trait AsScalaConverters { * via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Map` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mutableMapAsJavaMap]](scala.collection.mutable.Map)` then the original Scala - * `Map` will be returned. + * `asJava` then the original Scala `Map` will be returned. * * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. @@ -149,10 +147,10 @@ trait AsScalaConverters { * @param m The Java `Map` to be converted. * @return A Scala mutable `Map` view of the argument. */ - def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { - case null => null - case MutableMapWrapper(wrapped) => wrapped - case _ => new JMapWrapper(m) + def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + case null => null + case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JMapWrapper(m) } /** @@ -163,35 +161,33 @@ trait AsScalaConverters { * vice versa. * * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.mapAsJavaConcurrentMap]](scala.collection.mutable.ConcurrentMap)` - * then the original Scala `ConcurrentMap` will be returned. + * `asJava` then the original Scala `ConcurrentMap` will be returned. * * @param m The Java `ConcurrentMap` to be converted. * @return A Scala mutable `ConcurrentMap` view of the argument. */ - def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match { - case null => null - case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) + def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + case null => null + case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap + case _ => new JConcurrentMapWrapper(m) } - /** + /** * Converts a Java `Dictionary` to a Scala mutable `Map`. * * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of * using it via the Scala interface will be visible via the Java interface and vice versa. * * If the Java `Dictionary` was previously obtained from an implicit or explicit call of - * `[[JavaConverters.asJavaDictionary]](scala.collection.mutable.Map)` then the original - * Scala `Map` will be returned. + * `asJavaDictionary` then the original Scala `Map` will be returned. * - * @param p The Java `Dictionary` to be converted. + * @param d The Java `Dictionary` to be converted. * @return A Scala mutable `Map` view of the argument. */ - def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { - case null => null - case DictionaryWrapper(wrapped) => wrapped - case _ => new JDictionaryWrapper(p) + def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + case null => null + case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JDictionaryWrapper(d) } /** @@ -204,7 +200,7 @@ trait AsScalaConverters { * @param p The Java `Properties` to be converted. * @return A Scala mutable `Map[String, String]` view of the argument. */ - def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { + def asScala(p: ju.Properties): mutable.Map[String, String] = p match { case null => null case _ => new JPropertiesWrapper(p) } diff --git a/src/library/scala/collection/convert/AsScalaExtensions.scala b/src/library/scala/collection/convert/AsScalaExtensions.scala new file mode 100644 index 000000000000..ef08f4505fe1 --- /dev/null +++ b/src/library/scala/collection/convert/AsScalaExtensions.scala @@ -0,0 +1,93 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsScalaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + /** Converts a Java `Iterator` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(i) + } + + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + /** Converts a Java `Enumeration` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(e) + } + + implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + /** Converts a Java `Iterable` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(i) + } + + implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + /** Converts a Java `Collection` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(c) + } + + implicit class ListHasAsScala[A](l: ju.List[A]) { + /** Converts a Java `List` to a Scala `Buffer`, see + * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Buffer[A] = conv.asScala(l) + } + + implicit class SetHasAsScala[A](s: ju.Set[A]) { + /** Converts a Java `Set` to a Scala `Set`, see + * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Set[A] = conv.asScala(s) + } + + implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + /** Converts a Java `Map` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(m) + } + + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: concurrent.Map[K, V] = conv.asScala(m) + } + + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + /** Converts a Java `Dictionary` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(d) + } + + implicit class PropertiesHasAsScala(i: ju.Properties) { + /** Converts a Java `Properties` to a Scala `Map`, see + * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[String, String] = conv.asScala(i) + } +} diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala deleted file mode 100644 index c2b26670be7d..000000000000 --- a/src/library/scala/collection/convert/DecorateAsJava.scala +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import Decorators._ -import scala.language.implicitConversions - -/** Defines `asJava` extension methods for [[JavaConverters]]. */ -trait DecorateAsJava extends AsJavaConverters { - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[asJavaIterator]] - */ - implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = - new AsJava(asJavaIterator(i)) - - /** - * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[asJavaEnumeration]] - */ - implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = - new AsJavaEnumeration(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[asJavaIterable]] - */ - implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = - new AsJava(asJavaIterable(i)) - - /** - * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[asJavaCollection]] - */ - implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = - new AsJavaCollection(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[bufferAsJavaList]] - */ - implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = - new AsJava(bufferAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[mutableSeqAsJavaList]] - */ - implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = - new AsJava(mutableSeqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. - * @see [[seqAsJavaList]] - */ - implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = - new AsJava(seqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[mutableSetAsJavaSet]] - */ - implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = - new AsJava(mutableSetAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. - * @see [[setAsJavaSet]] - */ - implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = - new AsJava(setAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[mutableMapAsJavaMap]] - */ - implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mutableMapAsJavaMap(m)) - - /** - * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[asJavaDictionary]] - */ - implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] = - new AsJavaDictionary(m) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. - * @see [[mapAsJavaMap]] - */ - implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mapAsJavaMap(m)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[mapAsJavaConcurrentMap]]. - */ - implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] = - new AsJava(mapAsJavaConcurrentMap(m)) -} diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala deleted file mode 100644 index 715c925d4344..000000000000 --- a/src/library/scala/collection/convert/DecorateAsScala.scala +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import Decorators._ -import scala.language.implicitConversions - -/** Defines `asScala` extension methods for [[JavaConverters]]. */ -trait DecorateAsScala extends AsScalaConverters { - /** - * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[asScalaIterator]] - */ - implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = - new AsScala(asScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[enumerationAsScalaIterator]] - */ - implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = - new AsScala(enumerationAsScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[iterableAsScalaIterable]] - */ - implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = - new AsScala(iterableAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[collectionAsScalaIterable]] - */ - implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = - new AsScala(collectionAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[asScalaBuffer]] - */ - implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = - new AsScala(asScalaBuffer(l)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[asScalaSet]] - */ - implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = - new AsScala(asScalaSet(s)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[mapAsScalaMap]] - */ - implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(mapAsScalaMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. - * @see [[mapAsScalaConcurrentMap]] - */ - implicit def mapAsScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[concurrent.Map[A, B]] = - new AsScala(mapAsScalaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[dictionaryAsScalaMap]] - */ - implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(dictionaryAsScalaMap(p)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. - * @see [[propertiesAsScalaMap]] - */ - implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - new AsScala(propertiesAsScalaMap(p)) -} diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala deleted file mode 100644 index 03502ea598a3..000000000000 --- a/src/library/scala/collection/convert/Decorators.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ util => ju } - -private[collection] object Decorators { - /** Generic class containing the `asJava` converter method */ - class AsJava[A](op: => A) { - /** Converts a Scala collection to the corresponding Java collection */ - def asJava: A = op - } - - /** Generic class containing the `asScala` converter method */ - class AsScala[A](op: => A) { - /** Converts a Java collection to the corresponding Scala collection */ - def asScala: A = op - } - - /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { - /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) - } - - /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { - /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) - } - - /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[A, B](m : mutable.Map[A, B]) { - /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[A, B] = JavaConverters.asJavaDictionary(m) - } -} diff --git a/src/library/scala/collection/convert/ImplicitConversions.scala b/src/library/scala/collection/convert/ImplicitConversions.scala index e4068fa4da62..6492c60d6d9e 100644 --- a/src/library/scala/collection/convert/ImplicitConversions.scala +++ b/src/library/scala/collection/convert/ImplicitConversions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,130 +14,133 @@ package scala package collection package convert -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} -import JavaConverters._ +import scala.collection.JavaConverters._ +import scala.language.implicitConversions /** Defines implicit converter methods from Java to Scala collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") trait ToScalaImplicits { /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[AsScalaConverters.asScalaIterator]] + * @see [[JavaConverters.asScalaIterator]] */ implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[AsScalaConverters.enumerationAsScalaIterator]] + * @see [[JavaConverters.enumerationAsScalaIterator]] */ implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[AsScalaConverters.iterableAsScalaIterable]] + * @see [[JavaConverters.iterableAsScalaIterable]] */ implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) /** Implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[AsScalaConverters.collectionAsScalaIterable]] + * @see [[JavaConverters.collectionAsScalaIterable]] */ implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[AsScalaConverters.asScalaBuffer]] + * @see [[JavaConverters.asScalaBuffer]] */ implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) /** Implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[AsScalaConverters.asScalaSet]] + * @see [[JavaConverters.asScalaSet]] */ implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) /** Implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[AsScalaConverters.mapAsScalaMap]] + * @see [[JavaConverters.mapAsScalaMap]] */ - implicit def `map AsScala`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m) + implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. - * @see [[AsScalaConverters.mapAsScalaConcurrentMap]] + * @see [[JavaConverters.mapAsScalaConcurrentMap]] */ - implicit def `map AsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m) + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[AsScalaConverters.dictionaryAsScalaMap]] + * @see [[JavaConverters.dictionaryAsScalaMap]] */ - implicit def `dictionary AsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p) + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. - * @see [[AsScalaConverters.propertiesAsScalaMap]] + * @see [[JavaConverters.propertiesAsScalaMap]] */ implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) } /** Defines implicit conversions from Scala to Java collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") trait ToJavaImplicits { /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[AsJavaConverters.asJavaIterator]] + * @see [[JavaConverters.asJavaIterator]] */ implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[AsJavaConverters.asJavaEnumeration]] + * @see [[JavaConverters.asJavaEnumeration]] */ implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[AsJavaConverters.asJavaIterable]] + * @see [[JavaConverters.asJavaIterable]] */ implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[AsJavaConverters.asJavaCollection]] + * @see [[JavaConverters.asJavaCollection]] */ implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[AsJavaConverters.bufferAsJavaList]] + * @see [[JavaConverters.bufferAsJavaList]] */ implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) /** Implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[AsJavaConverters.mutableSeqAsJavaList]] + * @see [[JavaConverters.mutableSeqAsJavaList]] */ implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) /** Implicitly converts a Scala `Seq` to a Java `List`. - * @see [[AsJavaConverters.seqAsJavaList]] + * @see [[JavaConverters.seqAsJavaList]] */ implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) /** Implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[AsJavaConverters.mutableSetAsJavaSet]] + * @see [[JavaConverters.mutableSetAsJavaSet]] */ implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) /** Implicitly converts a Scala `Set` to a Java `Set`. - * @see [[AsJavaConverters.setAsJavaSet]] + * @see [[JavaConverters.setAsJavaSet]] */ implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) /** Implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[AsJavaConverters.mutableMapAsJavaMap]] + * @see [[JavaConverters.mutableMapAsJavaMap]] */ - implicit def `mutableMap AsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m) + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[AsJavaConverters.asJavaDictionary]] + * @see [[JavaConverters.asJavaDictionary]] */ - implicit def `dictionary asJava`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m) + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) /** Implicitly converts a Scala `Map` to a Java `Map`. - * @see [[AsJavaConverters.mapAsJavaMap]] + * @see [[JavaConverters.mapAsJavaMap]] */ - implicit def `map AsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m) + implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[AsJavaConverters.mapAsJavaConcurrentMap]] + * @see [[JavaConverters.mapAsJavaConcurrentMap]] */ - implicit def `map AsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m) + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) } /** @@ -146,6 +149,7 @@ trait ToJavaImplicits { * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") object ImplicitConversionsToJava extends ToJavaImplicits /** @@ -154,6 +158,7 @@ object ImplicitConversionsToJava extends ToJavaImplicits * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") object ImplicitConversionsToScala extends ToScalaImplicits /** @@ -172,4 +177,5 @@ object ImplicitConversionsToScala extends ToScalaImplicits * The above example returns `null` instead of producing a type error at compile-time. The map is * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/src/library/scala/collection/convert/JavaCollectionWrappers.scala b/src/library/scala/collection/convert/JavaCollectionWrappers.scala new file mode 100644 index 000000000000..f79adff98e23 --- /dev/null +++ b/src/library/scala/collection/convert/JavaCollectionWrappers.scala @@ -0,0 +1,635 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.util.{NavigableMap} +import java.{lang => jl, util => ju} + +import scala.jdk.CollectionConverters._ +import scala.util.Try +import scala.util.chaining._ +import scala.util.control.ControlThrowable + +/** Wrappers for exposing Scala collections as Java collections and vice-versa */ +@SerialVersionUID(3L) +// not private[convert] because `WeakHashMap` uses JMapWrapper +private[collection] object JavaCollectionWrappers extends Serializable { + @SerialVersionUID(3L) + class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next() + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next() + override def remove(): Nothing = throw new UnsupportedOperationException + override def equals(other: Any): Boolean = other match { + case that: IteratorWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next + override def equals(other: Any): Boolean = other match { + case that: JIteratorWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Serializable { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + override def equals(other: Any): Boolean = other match { + case that: JEnumerationWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator: IteratorWrapper[A] = new IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + @SerialVersionUID(3L) + class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable { + override def equals(other: Any): Boolean = other match { + case that: IterableWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class JIterableWrapper[A](val underlying: jl.Iterable[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer + override def isEmpty: Boolean = !underlying.iterator().hasNext + override def equals(other: Any): Boolean = other match { + case that: JIterableWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class JCollectionWrapper[A](val underlying: ju.Collection[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def size = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer + override def equals(other: Any): Boolean = other match { + case that: JCollectionWrapper[_] => this.underlying == that.underlying + case _ => false + } + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + } + + @SerialVersionUID(3L) + class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + @SerialVersionUID(3L) + class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying += elem; true } + override def remove(i: Int) = underlying remove i + } + + @SerialVersionUID(3L) + class JListWrapper[A](val underlying: ju.List[A]) + extends mutable.AbstractBuffer[A] + with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with IterableFactoryDefaults[A, mutable.Buffer] + with Serializable { + def length = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator.asScala + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } + def addOne(elem: A): this.type = { underlying add elem; this } + def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) + def insertAll(i: Int, elems: IterableOnce[A]) = { + val ins = underlying.subList(0, i) + elems.iterator.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() + override def iterableFactory: mutable.ArrayBuffer.type = mutable.ArrayBuffer + override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } + } + + @SerialVersionUID(3L) + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => + // Note various overrides to avoid performance gotchas. + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty + def size = underlying.size + def iterator: ju.Iterator[A] = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next(); prev = Some(e); e } + override def remove() = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + @SerialVersionUID(3L) + class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying.remove(elem.asInstanceOf[A]) + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + class JSetWrapper[A](val underlying: ju.Set[A]) + extends mutable.AbstractSet[A] + with mutable.SetOps[A, mutable.Set, mutable.Set[A]] + with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] + with Serializable { + + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + def iterator: Iterator[A] = underlying.iterator.asScala + + def contains(elem: A): Boolean = underlying.contains(elem) + + def addOne(elem: A): this.type = { underlying add elem; this } + def subtractOne(elem: A): this.type = { underlying remove elem; this } + + override def remove(elem: A): Boolean = underlying remove elem + + override def clear(): Unit = { + underlying.clear() + } + + override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) + + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) + + override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet + + override def filterInPlace(p: A => Boolean): this.type = { + if (underlying.size() > 0) underlying.removeIf(!p(_)) + this + } + } + + @SerialVersionUID(3L) + class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => + override def size = underlying.size + + override def get(key: AnyRef): V = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { + def size = self.size + + def iterator: ju.Iterator[ju.Map.Entry[K, V]] = new ju.Iterator[ju.Map.Entry[K, V]] { + val ui = underlying.iterator + var prev : Option[K] = None + + def hasNext = ui.hasNext + + def next(): ju.Map.Entry[K, V] = { + val (k, v) = ui.next() + prev = Some(k) + new ju.Map.Entry[K, V] { + def getKey = k + def getValue = v + def setValue(v1 : V) = self.put(k, v1) + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + override def remove(): Unit = { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm -= k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + + override def containsKey(key: AnyRef): Boolean = try { + // Note: Subclass of collection.Map with specific key type may redirect generic + // contains to specific contains, which will throw a ClassCastException if the + // wrong type is passed. This is why we need a type cast to A inside a try/catch. + underlying.contains(key.asInstanceOf[K]) + } catch { + case ex: ClassCastException => false + } + } + + @SerialVersionUID(3L) + class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { + override def put(k: K, v: V) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef): V = try { + underlying remove k.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + abstract class AbstractJMapWrapper[K, V] + extends mutable.AbstractMap[K, V] + with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable + + trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] + extends mutable.MapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { + + def underlying: ju.Map[K, V] + + override def size = underlying.size + + // support Some(null) if currently bound to null + def get(k: K) = { + val v = underlying.get(k) + if (v != null) + Some(v) + else if (underlying.containsKey(k)) + Some(null.asInstanceOf[V]) + else + None + } + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] + case v => v + } + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + // support Some(null) if currently bound to null + override def put(k: K, v: V): Option[V] = + if (v == null) { + val present = underlying.containsKey(k) + val result = underlying.put(k, v) + if (present) Some(result) else None + } else { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = v.tap(_ => + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + ) + underlying.compute(k, recompute) + result + } + + override def update(k: K, v: V): Unit = underlying.put(k, v) + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) + } + } + + // support Some(null) if currently bound to null + override def remove(k: K): Option[V] = { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = { + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + null.asInstanceOf[V] + } + underlying.compute(k, recompute) + result + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val i = underlying.entrySet().iterator() + while (i.hasNext) { + val entry = i.next() + f(entry.getKey, entry.getValue) + } + } + + override def clear() = underlying.clear() + + } + + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ + @SerialVersionUID(3L) + class JMapWrapper[K, V](val underlying : ju.Map[K, V]) + extends AbstractJMapWrapper[K, V] with Serializable { + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty: JMapWrapper[K, V] = new JMapWrapper(new ju.HashMap[K, V]) + } + + @SerialVersionUID(3L) + class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { + + def underlyingConcurrentMap: concurrent.Map[K, V] = underlying + + override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v).getOrElse(null.asInstanceOf[V]) + + override def remove(k: AnyRef, v: AnyRef) = + try underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) + catch { case ex: ClassCastException => false } + + override def replace(k: K, v: V): V = underlying.replace(k, v).getOrElse(null.asInstanceOf[V]) + + override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) + } + + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ + @SerialVersionUID(3L) + class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) + extends AbstractJMapWrapper[K, V] + with concurrent.Map[K, V] { + + override def get(k: K) = Option(underlying get k) + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) + case v => v + } + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty: JConcurrentMapWrapper[K, V] = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) + + def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) + + def remove(k: K, v: V): Boolean = underlying.remove(k, v) + + def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) + + override def lastOption: Option[(K, V)] = + underlying match { + case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) + case _ if isEmpty => None + case _ => Try(last).toOption + } + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull // see scala/scala#10129 + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) + } + } + } + + @SerialVersionUID(3L) + class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + def put(key: K, value: V): V = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def equals(other: Any): Boolean = other match { + case that: DictionaryWrapper[_, _] => this.underlying == that.underlying + case _ => false + } + + override def hashCode: Int = underlying.hashCode() + } + + @SerialVersionUID(3L) + class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + + def get(k: K) = Option(underlying get k) + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) + + override def update(k: K, v: V): Unit = { underlying.put(k, v) } + + override def remove(k: K): Option[V] = Option(underlying remove k) + def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + + override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) + + override def mapFactory: mutable.HashMap.type = mutable.HashMap + } + + @SerialVersionUID(3L) + class JPropertiesWrapper(underlying: ju.Properties) + extends mutable.AbstractMap[String, String] + with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] + with Serializable { + + override def size = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = size + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String): Unit = { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty: JPropertiesWrapper = new JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + + override def mapFactory: mutable.HashMap.type = mutable.HashMap + } + + /** Thrown when certain Map operations attempt to put a null value. */ + private val PutNull = new ControlThrowable {} +} diff --git a/src/library/scala/collection/convert/StreamExtensions.scala b/src/library/scala/collection/convert/StreamExtensions.scala new file mode 100644 index 000000000000..90b8bcb9031d --- /dev/null +++ b/src/library/scala/collection/convert/StreamExtensions.scala @@ -0,0 +1,480 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert + +import java.util.Spliterator +import java.util.stream._ +import java.{lang => jl} + +import scala.annotation.implicitNotFound +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.jdk.CollectionConverters._ +import scala.jdk._ + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.jdk.javaapi.StreamConverters]]. + */ +trait StreamExtensions { + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The asJavaSeqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The asJavaParStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] => st.seqUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = false) + } + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] with EfficientSplit => st.parUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = true) + } + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = asJavaSeqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = asJavaSeqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaSeqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaParStream: LongStream = asJavaSeqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaSeqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaParStream: Stream[A] = asJavaSeqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = a.stepper.asJavaParStream + } + + + + // strings + + implicit class StringHasSeqParStream(s: String) { + /** + * A sequential stream on the characters of a string, same as [[asJavaSeqCharStream]]. See also + * [[asJavaSeqCodePointStream]]. + */ + def asJavaSeqStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ false) + /** + * A parallel stream on the characters of a string, same as [[asJavaParCharStream]]. See also + * [[asJavaParCodePointStream]]. + */ + def asJavaParStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ true) + + /** A sequential stream on the characters of a string. See also [[asJavaSeqCodePointStream]]. */ + def asJavaSeqCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ false) + /** A parallel stream on the characters of a string. See also [[asJavaParCodePointStream]]. */ + def asJavaParCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ true) + + /** A sequential stream on the code points of a string. See also [[asJavaSeqCharStream]]. */ + def asJavaSeqCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ false) + /** A parallel stream on the code points of a string. See also [[asJavaParCharStream]]. */ + def asJavaParCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ true) + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[A, C1])(implicit info: AccumulatorFactoryInfo[A, C1]): C1 = { + + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C1] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C1] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C1] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def asJavaPrimitiveStream[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Int, C1])(implicit info: AccumulatorFactoryInfo[Int, C1]): C1 = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Long, C1])(implicit info: AccumulatorFactoryInfo[Long, C1]): C1 = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Double, C1])(implicit info: AccumulatorFactoryInfo[Double, C1]): C1 = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +object StreamExtensions { + /** An implicit StreamShape instance connects element types with the corresponding specialized + * Stream and Stepper types. This is used in `asJavaStream` extension methods to create + * generic or primitive streams according to the element type. + */ + sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S + } + + object StreamShape extends StreamShapeLowPriority1 { + // primitive + implicit val intStreamShape : StreamShape[Int , IntStream , IntStepper] = mkIntStreamShape[Int] + implicit val longStreamShape : StreamShape[Long , LongStream , LongStepper] = mkLongStreamShape[Long] + implicit val doubleStreamShape: StreamShape[Double, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Double] + + // widening + implicit val byteStreamShape : StreamShape[Byte , IntStream , IntStepper] = mkIntStreamShape[Byte] + implicit val shortStreamShape: StreamShape[Short, IntStream , IntStepper] = mkIntStreamShape[Short] + implicit val charStreamShape : StreamShape[Char , IntStream , IntStepper] = mkIntStreamShape[Char] + implicit val floatStreamShape: StreamShape[Float, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Float] + + // boxed java primitives + + implicit val jIntegerStreamShape : StreamShape[jl.Integer , IntStream , IntStepper ] = mkIntStreamShape[jl.Integer] + implicit val jLongStreamShape : StreamShape[jl.Long , LongStream , LongStepper ] = mkLongStreamShape[jl.Long] + implicit val jDoubleStreamShape : StreamShape[jl.Double , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Double] + implicit val jByteStreamShape : StreamShape[jl.Byte , IntStream , IntStepper ] = mkIntStreamShape[jl.Byte] + implicit val jShortStreamShape : StreamShape[jl.Short , IntStream , IntStepper ] = mkIntStreamShape[jl.Short] + implicit val jCharacterStreamShape : StreamShape[jl.Character, IntStream , IntStepper ] = mkIntStreamShape[jl.Character] + implicit val jFloatStreamShape : StreamShape[jl.Float , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Float] + + private def mkIntStreamShape[T]: StreamShape[T, IntStream, IntStepper] = new StreamShape[T, IntStream, IntStepper] { + protected def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st.spliterator, par) + } + + private def mkLongStreamShape[T]: StreamShape[T, LongStream, LongStepper] = new StreamShape[T, LongStream, LongStepper] { + protected def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st.spliterator, par) + } + + private def mkDoubleStreamShape[T]: StreamShape[T, DoubleStream, DoubleStepper] = new StreamShape[T, DoubleStream, DoubleStepper] { + protected def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st.spliterator, par) + } + } + + trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } + } + + /** Connects a stream element type `A` to the corresponding, potentially specialized, Stream type. + * Used in the `stream.asJavaPrimitiveStream` extension method. + */ + sealed trait StreamUnboxer[A, S] { + def apply(s: Stream[A]): S + } + object StreamUnboxer { + implicit val intStreamUnboxer: StreamUnboxer[Int, IntStream] = new StreamUnboxer[Int, IntStream] { + def apply(s: Stream[Int]): IntStream = s.mapToInt(x => x) + } + implicit val javaIntegerStreamUnboxer: StreamUnboxer[jl.Integer, IntStream] = intStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Integer, IntStream]] + + implicit val longStreamUnboxer: StreamUnboxer[Long, LongStream] = new StreamUnboxer[Long, LongStream] { + def apply(s: Stream[Long]): LongStream = s.mapToLong(x => x) + } + implicit val javaLongStreamUnboxer: StreamUnboxer[jl.Long, LongStream] = longStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Long, LongStream]] + + implicit val doubleStreamUnboxer: StreamUnboxer[Double, DoubleStream] = new StreamUnboxer[Double, DoubleStream] { + def apply(s: Stream[Double]): DoubleStream = s.mapToDouble(x => x) + } + implicit val javaDoubleStreamUnboxer: StreamUnboxer[jl.Double, DoubleStream] = doubleStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Double, DoubleStream]] + } + + + + /** An implicit `AccumulatorFactoryInfo` connects primitive element types to the corresponding + * specialized [[scala.jdk.Accumulator]] factory. This is used in the `stream.toScala` extension methods + * to ensure collecting a primitive stream into a primitive accumulator does not box. + * + * When converting to a collection other than `Accumulator`, the generic + * `noAccumulatorFactoryInfo` is passed. + */ + trait AccumulatorFactoryInfo[A, C] { + val companion: AnyRef + } + trait LowPriorityAccumulatorFactoryInfo { + implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] + private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { + val companion: AnyRef = null + } + } + object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { + implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] + + private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { + val companion: AnyRef = AnyAccumulator + } + + implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { + val companion: AnyRef = IntAccumulator + } + + implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { + val companion: AnyRef = LongAccumulator + } + + implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { + val companion: AnyRef = DoubleAccumulator + } + + implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Long, IntAccumulator] = longAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Long, IntAccumulator]] + implicit val jDoubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Double, IntAccumulator] = doubleAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Double, IntAccumulator]] + } +} diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala deleted file mode 100644 index 7c51d8aa83e7..000000000000 --- a/src/library/scala/collection/convert/WrapAsJava.scala +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions - -@deprecated("use JavaConverters or consider ToJavaImplicits", since="2.12.0") -trait WrapAsJava extends LowPriorityWrapAsJava { - // provide higher-priority implicits with names that don't exist in JavaConverters for the case - // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions - // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789 - implicit def `deprecated asJavaIterator`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) - implicit def `deprecated asJavaEnumeration`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) - implicit def `deprecated asJavaIterable`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) - implicit def `deprecated asJavaCollection`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) - implicit def `deprecated bufferAsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) - implicit def `deprecated mutableSeqAsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) - implicit def `deprecated seqAsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) - implicit def `deprecated mutableSetAsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) - implicit def `deprecated setAsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) - implicit def `deprecated mutableMapAsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m) - implicit def `deprecated asJavaDictionary`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m) - implicit def `deprecated mapAsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m) - implicit def `deprecated mapAsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m) -} - -private[convert] trait LowPriorityWrapAsJava { - import Wrappers._ - - /** - * Implicitly converts a Scala Iterator to a Java Iterator. - * The returned Java Iterator is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Iterator)` then the original - * Java Iterator will be returned. - * - * @param it The Iterator to be converted. - * @return A Java Iterator view of the argument. - */ - implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match { - case null => null - case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterator to a Java Enumeration. - * The returned Java Enumeration is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Enumeration)` then the - * original Java Enumeration will be returned. - * - * @param it The Iterator to be converted. - * @return A Java Enumeration view of the argument. - */ - implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match { - case null => null - case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterable to a Java Iterable. - * The returned Java Iterable is backed by the provided Scala - * Iterable and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asIterable(java.lang.Iterable)` then the original - * Java Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Java Iterable view of the argument. - */ - implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { - case null => null - case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] - case _ => IterableWrapper(i) - } - - /** - * Implicitly converts a Scala Iterable to an immutable Java - * Collection. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asSizedIterable(java.util.Collection)` then the original - * Java Collection will be returned. - * - * @param it The SizedIterable to be converted. - * @return A Java Collection view of the argument. - */ - implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match { - case null => null - case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] - case _ => new IterableWrapper(it) - } - - /** - * Implicitly converts a Scala mutable Buffer to a Java List. - * The returned Java List is backed by the provided Scala - * Buffer and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Buffer was previously obtained from an implicit or - * explicit call of `asBuffer(java.util.List)` then the original - * Java List will be returned. - * - * @param b The Buffer to be converted. - * @return A Java List view of the argument. - */ - implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableBufferWrapper(b) - } - - /** - * Implicitly converts a Scala mutable Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param seq The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match { - case null => null - case JListWrapper(wrapped) => wrapped - case _ => new MutableSeqWrapper(seq) - } - - /** - * Implicitly converts a Scala Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param seq The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match { - case null => null - case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] - case _ => new SeqWrapper(seq) - } - - /** - * Implicitly converts a Scala mutable Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of `asSet(java.util.Set)` then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new MutableSetWrapper(s) - } - - /** - * Implicitly converts a Scala Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of asSet(java.util.Set) then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { - case null => null - case JSetWrapper(wrapped) => wrapped - case _ => new SetWrapper(s) - } - - /** - * Implicitly converts a Scala mutable Map to a Java Map. - * The returned Java Map is backed by the provided Scala - * Map and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Map was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java Map will be returned. - * - * @param m The Map to be converted. - * @return A Java Map view of the argument. - */ - implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped - case _ => new MutableMapWrapper(m) - } - - /** - * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * - * The returned Java `Dictionary` is backed by the provided Scala - * `Dictionary` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `Dictionary` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Dictionary)` then the original - * Java Dictionary will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Dictionary` view of the argument. - */ - implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { - case null => null - case JDictionaryWrapper(wrapped) => wrapped - case _ => new DictionaryWrapper(m) - } - - /** - * Implicitly converts a Scala `Map` to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and - * any side-effects of using it via the Java interface will be visible - * via the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java `Map` will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Map` view of the argument. - */ - implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { - case null => null - case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] - case _ => new MapWrapper(m) - } - - /** - * Implicitly converts a Scala mutable `concurrent.Map` to a Java - * `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `concurrent.Map` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `concurrent.Map` was previously obtained from an implicit or - * explicit call of `mapAsScalaConcurrentMap(java.util.concurrent.ConcurrentMap)` - * then the original Java ConcurrentMap will be returned. - * - * @param m The Scala `concurrent.Map` to be converted. - * @return A Java `ConcurrentMap` view of the argument. - */ - implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match { - case null => null - case JConcurrentMapWrapper(wrapped) => wrapped - case _ => new ConcurrentMapWrapper(m) - } -} - -@deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") -object WrapAsJava extends WrapAsJava diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala deleted file mode 100644 index c1756364816d..000000000000 --- a/src/library/scala/collection/convert/WrapAsScala.scala +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import scala.language.implicitConversions - -@deprecated("use JavaConverters or consider ToScalaImplicits", since="2.12.0") -trait WrapAsScala extends LowPriorityWrapAsScala { - // provide higher-priority implicits with names that don't exist in JavaConverters for the case - // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions - // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789 - implicit def `deprecated asScalaIterator`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) - implicit def `deprecated enumerationAsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) - implicit def `deprecated iterableAsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) - implicit def `deprecated collectionAsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) - implicit def `deprecated asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) - implicit def `deprecated asScalaSet`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) - implicit def `deprecated mapAsScalaMap`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m) - implicit def `deprecated mapAsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m) - implicit def `deprecated dictionaryAsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p) - implicit def `deprecated propertiesAsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) -} - -private[convert] trait LowPriorityWrapAsScala { - import Wrappers._ - - /** - * Implicitly converts a Java `Iterator` to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Iterator` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterator` was previously obtained from an implicit or - * explicit call of `asIterator(scala.collection.Iterator)` then the - * original Scala `Iterator` will be returned. - * - * @param it The `Iterator` to be converted. - * @return A Scala `Iterator` view of the argument. - */ - implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JIteratorWrapper(it) - } - - /** - * Implicitly converts a Java Enumeration to a Scala Iterator. - * The returned Scala Iterator is backed by the provided Java - * Enumeration and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Enumeration was previously obtained from an implicit or - * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)` - * then the original Scala Iterator will be returned. - * - * @param i The Enumeration to be converted. - * @return A Scala Iterator view of the argument. - */ - implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { - case null => null - case IteratorWrapper(wrapped) => wrapped - case _ => JEnumerationWrapper(i) - } - - /** - * Implicitly converts a Java `Iterable` to a Scala `Iterable`. - * - * The returned Scala `Iterable` is backed by the provided Java `Iterable` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterable` was previously obtained from an implicit or - * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)` - * then the original Scala Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JIterableWrapper(i) - } - - /** - * Implicitly converts a Java `Collection` to an Scala `Iterable`. - * - * If the Java `Collection` was previously obtained from an implicit or - * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)` - * then the original Scala `Iterable` will be returned. - * - * @param i The Collection to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { - case null => null - case IterableWrapper(wrapped) => wrapped - case _ => JCollectionWrapper(i) - } - - /** - * Implicitly converts a Java `List` to a Scala mutable `Buffer`. - * - * The returned Scala `Buffer` is backed by the provided Java `List` - * and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java `List` was previously obtained from an implicit or - * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)` - * then the original Scala `Buffer` will be returned. - * - * @param l The `List` to be converted. - * @return A Scala mutable `Buffer` view of the argument. - */ - implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { - case null => null - case MutableBufferWrapper(wrapped) => wrapped - case _ => new JListWrapper(l) - } - - /** - * Implicitly converts a Java Set to a Scala mutable Set. - * The returned Scala Set is backed by the provided Java - * Set and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Set was previously obtained from an implicit or - * explicit call of `asScalaSet(scala.collection.mutable.Set)` then - * the original Scala Set will be returned. - * - * @param s The Set to be converted. - * @return A Scala mutable Set view of the argument. - */ - implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { - case null => null - case MutableSetWrapper(wrapped) => wrapped - case _ => new JSetWrapper(s) - } - - /** - * Implicitly converts a Java `Map` to a Scala mutable `Map`. - * - * The returned Scala `Map` is backed by the provided Java `Map` and any - * side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * If the Java `Map` was previously obtained from an implicit or - * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then - * the original Scala Map will be returned. - * - * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), - * it is your responsibility to wrap all - * non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an - * atomic `get` when `null` values may be present. - * - * @param m The Map to be converted. - * @return A Scala mutable Map view of the argument. - */ - implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { - case null => null - case MutableMapWrapper(wrapped) => wrapped - case _ => new JMapWrapper(m) - } - - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match { - case null => null - case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) - } - - /** - * Implicitly converts a Java `Dictionary` to a Scala mutable - * `Map`. - * - * The returned Scala `Map` is backed by the provided Java - * `Dictionary` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param p The Dictionary to be converted. - * @return A Scala mutable Map view of the argument. - */ - implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { - case null => null - case DictionaryWrapper(wrapped) => wrapped - case _ => new JDictionaryWrapper(p) - } - - /** - * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. - * - * The returned Scala `Map[String, String]` is backed by the provided Java - * `Properties` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param p The Properties to be converted. - * @return A Scala mutable Map[String, String] view of the argument. - */ - implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { - case null => null - case _ => new JPropertiesWrapper(p) - } -} - -@deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") -object WrapAsScala extends WrapAsScala diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala deleted file mode 100644 index 74322ed2a3fd..000000000000 --- a/src/library/scala/collection/convert/Wrappers.scala +++ /dev/null @@ -1,463 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package convert - -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } -import WrapAsScala._ -import WrapAsJava._ - -/** Adapters for Java/Scala collections API. */ -private[collection] trait Wrappers { - trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { - val underlying: Iterable[A] - def size = underlying.size - override def iterator = IteratorWrapper(underlying.iterator) - override def isEmpty = underlying.isEmpty - } - - @SerialVersionUID(7914730360012802566L) - case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { - def hasNext = underlying.hasNext - def next() = underlying.next() - def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next() - override def remove() = throw new UnsupportedOperationException - } - - class ToIteratorWrapper[A](underlying : Iterator[A]) { - def asJava = new IteratorWrapper(underlying) - } - - @SerialVersionUID(-2624079708378729299L) - case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasNext - def next() = underlying.next - } - - @SerialVersionUID(1480199642890917878L) - case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasMoreElements - def next() = underlying.nextElement - } - - @SerialVersionUID(8702516763061989735L) - case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { } - - @SerialVersionUID(4914368587801013118L) - case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - @SerialVersionUID(-9156669203906593803L) - case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - override def size = underlying.size - override def isEmpty = underlying.isEmpty - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - @SerialVersionUID(-2066086677605085135L) - case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - } - - @SerialVersionUID(-3277343097189933650L) - case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { - val p = underlying(i) - underlying(i) = elem - p - } - } - - @SerialVersionUID(2065310383330290590L) - case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } - override def add(elem: A) = { underlying append elem; true } - override def remove(i: Int) = underlying remove i - } - - @SerialVersionUID(-7340917072424655477L) - case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { - def length = underlying.size - override def isEmpty = underlying.isEmpty - override def iterator: Iterator[A] = underlying.iterator - def apply(i: Int) = underlying.get(i) - def update(i: Int, elem: A) = underlying.set(i, elem) - def +=:(elem: A) = { underlying.subList(0, 0) add elem; this } - def +=(elem: A): this.type = { underlying add elem; this } - def insertAll(i: Int, elems: Traversable[A]) = { - val ins = underlying.subList(0, i) - elems.seq.foreach(ins.add(_)) - } - def remove(i: Int) = underlying.remove(i) - def clear() = underlying.clear() - def result = this - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying)) - } - - @SerialVersionUID(1L) - class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => - // Note various overrides to avoid performance gotchas. - override def contains(o: Object): Boolean = { - try { underlying.contains(o.asInstanceOf[A]) } - catch { case cce: ClassCastException => false } - } - override def isEmpty = underlying.isEmpty - def size = underlying.size - def iterator = new ju.Iterator[A] { - val ui = underlying.iterator - var prev: Option[A] = None - def hasNext = ui.hasNext - def next = { val e = ui.next(); prev = Some(e); e } - override def remove() = prev match { - case Some(e) => - underlying match { - case ms: mutable.Set[a] => - ms remove e - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - - @SerialVersionUID(-4801553198679985982L) - case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) { - override def add(elem: A) = { - val sz = underlying.size - underlying += elem - sz < underlying.size - } - override def remove(elem: AnyRef) = - try underlying remove elem.asInstanceOf[A] - catch { case ex: ClassCastException => false } - override def clear() = underlying.clear() - } - - @SerialVersionUID(-8813164664953372494L) - case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] { - - override def size = underlying.size - - def iterator = underlying.iterator - - def contains(elem: A): Boolean = underlying.contains(elem) - - def +=(elem: A): this.type = { underlying add elem; this } - def -=(elem: A): this.type = { underlying remove elem; this } - - override def add(elem: A): Boolean = underlying add elem - override def remove(elem: A): Boolean = underlying remove elem - override def clear() = underlying.clear() - - override def empty = JSetWrapper(new ju.HashSet[A]) - // Note: Clone cannot just call underlying.clone because in Java, only specific collections - // expose clone methods. Generically, they're protected. - override def clone() = - new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) - } - - @SerialVersionUID(1L) - class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] with Serializable { self => - override def size = underlying.size - - override def get(key: AnyRef): B = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] { - def size = self.size - - def iterator = new ju.Iterator[ju.Map.Entry[A, B]] { - val ui = underlying.iterator - var prev : Option[A] = None - - def hasNext = ui.hasNext - - def next() = { - val (k, v) = ui.next() - prev = Some(k) - new ju.Map.Entry[A, B] { - def getKey = k - def getValue = v - def setValue(v1 : B) = self.put(k, v1) - - - // It's important that this implementation conform to the contract - // specified in the javadocs of java.util.Map.Entry.hashCode - // - // See https://github.com/scala/bug/issues/10663 - override def hashCode = { - (if (k == null) 0 else k.hashCode()) ^ - (if (v == null) 0 else v.hashCode()) - } - - override def equals(other: Any) = other match { - case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue - case _ => false - } - } - } - - override def remove() { - prev match { - case Some(k) => - underlying match { - case mm: mutable.Map[a, _] => - mm remove k - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - } - - override def containsKey(key: AnyRef): Boolean = try { - // Note: Subclass of collection.Map with specific key type may redirect generic - // contains to specific contains, which will throw a ClassCastException if the - // wrong type is passed. This is why we need a type cast to A inside a try/catch. - underlying.contains(key.asInstanceOf[A]) - } catch { - case ex: ClassCastException => false - } - } - - @SerialVersionUID(8668425014051911127L) - case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) { - override def put(k: A, v: B) = underlying.put(k, v) match { - case Some(v1) => v1 - case None => null.asInstanceOf[B] - } - - override def remove(k: AnyRef): B = try { - underlying remove k.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def clear() = underlying.clear() - } - - trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] { - def underlying: ju.Map[A, B] - - override def size = underlying.size - - def get(k: A) = { - val v = underlying get k - if (v != null) - Some(v) - else if (underlying containsKey k) - Some(null.asInstanceOf[B]) - else - None - } - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v)) - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = Option(underlying remove k) - - def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { val e = ui.next(); (e.getKey, e.getValue) } - } - - override def clear() = underlying.clear() - - override def empty: Repr = null.asInstanceOf[Repr] - } - - /** Wraps a Java map as a Scala one. If the map is to support concurrent access, - * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized - * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility - * to wrap all non-atomic operations with `underlying.synchronized`. - * This includes `get`, as `java.util.Map`'s API does not allow for an - * atomic `get` when `null` values may be present. - */ - @SerialVersionUID(5258955232187049103L) - case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { - override def empty = JMapWrapper(new ju.HashMap[A, B]) - } - - @SerialVersionUID(3929791676502269860L) - class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { - - override def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - override def remove(k: AnyRef, v: AnyRef) = try { - underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B]) - } catch { - case ex: ClassCastException => - false - } - - override def replace(k: A, v: B): B = underlying.replace(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - override def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) - } - - /** Wraps a concurrent Java map as a Scala one. Single-element concurrent - * access is supported; multi-element operations such as maps and filters - * are not guaranteed to be atomic. - */ - @SerialVersionUID(-8245743033724996882L) - case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { - override def get(k: A) = Option(underlying get k) - - override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B]) - - def putIfAbsent(k: A, v: B): Option[B] = Option(underlying.putIfAbsent(k, v)) - - def remove(k: A, v: B): Boolean = underlying.remove(k, v) - - def replace(k: A, v: B): Option[B] = Option(underlying.replace(k, v)) - - def replace(k: A, oldvalue: B, newvalue: B): Boolean = - underlying.replace(k, oldvalue, newvalue) - } - - @SerialVersionUID(942915481780293390L) - case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] { - def size: Int = underlying.size - def isEmpty: Boolean = underlying.isEmpty - def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator) - def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator) - def get(key: AnyRef) = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - def put(key: A, value: B): B = underlying.put(key, value) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - override def remove(key: AnyRef) = try { - underlying remove key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - } - - @SerialVersionUID(-5214182838863307389L) - case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] { - override def size: Int = underlying.size - - def get(k: A) = Option(underlying get k) - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v)) - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = Option(underlying remove k) - - def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k)) - - override def clear() = underlying.clear() - } - - @SerialVersionUID(1265445269473530406L) - case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String] - with mutable.Map[String, String] - with mutable.MapLike[String, String, JPropertiesWrapper] { - - override def size = underlying.size - - def get(k: String) = { - val v = underlying get k - if (v != null) Some(v.asInstanceOf[String]) else None - } - - def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: String): this.type = { underlying remove key; this } - - override def put(k: String, v: String): Option[String] = { - val r = underlying.put(k, v) - if (r != null) Some(r.asInstanceOf[String]) else None - } - - override def update(k: String, v: String) { underlying.put(k, v) } - - override def remove(k: String): Option[String] = { - val r = underlying remove k - if (r != null) Some(r.asInstanceOf[String]) else None - } - - def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { - val e = ui.next() - (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) - } - } - - override def clear() = underlying.clear() - - override def empty = JPropertiesWrapper(new ju.Properties) - - def getProperty(key: String) = underlying.getProperty(key) - - def getProperty(key: String, defaultValue: String) = - underlying.getProperty(key, defaultValue) - - def setProperty(key: String, value: String) = - underlying.setProperty(key, value) - } -} - -@SerialVersionUID(0 - 5857859809262781311L) -object Wrappers extends Wrappers with Serializable diff --git a/src/library/scala/collection/convert/impl/ArrayStepper.scala b/src/library/scala/collection/convert/impl/ArrayStepper.scala new file mode 100644 index 000000000000..4e8408bca99a --- /dev/null +++ b/src/library/scala/collection/convert/impl/ArrayStepper.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): ObjectArrayStepper[A] = new ObjectArrayStepper[A](underlying, i0, half) +} + +private[collection] class BoxedBooleanArrayStepper(underlying: Array[Boolean], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[Boolean], BoxedBooleanArrayStepper](_i0, _iN) + with AnyStepper[Boolean] { + def nextStep(): Boolean = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): BoxedBooleanArrayStepper = new BoxedBooleanArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedByteArrayStepper(underlying: Array[Byte], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedByteArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedByteArrayStepper = new WidenedByteArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedCharArrayStepper(underlying: Array[Char], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedCharArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedCharArrayStepper = new WidenedCharArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedShortArrayStepper(underlying: Array[Short], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedShortArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedShortArrayStepper = new WidenedShortArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedFloatArrayStepper(underlying: Array[Float], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, WidenedFloatArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedFloatArrayStepper = new WidenedFloatArrayStepper(underlying, i0, half) +} + +private[collection] class DoubleArrayStepper(underlying: Array[Double], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleArrayStepper = new DoubleArrayStepper(underlying, i0, half) +} + +private[collection] class IntArrayStepper(underlying: Array[Int], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntArrayStepper = new IntArrayStepper(underlying, i0, half) +} + +private[collection] class LongArrayStepper(underlying: Array[Long], _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongArrayStepper](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongArrayStepper = new LongArrayStepper(underlying, i0, half) +} diff --git a/src/library/scala/collection/convert/impl/BinaryTreeStepper.scala b/src/library/scala/collection/convert/impl/BinaryTreeStepper.scala new file mode 100644 index 000000000000..d15977eced17 --- /dev/null +++ b/src/library/scala/collection/convert/impl/BinaryTreeStepper.scala @@ -0,0 +1,248 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + + +private[collection] object BinaryTreeStepper { + val emptyStack = new Array[AnyRef](0) +} + + +/** A generic stepper that can traverse ordered binary trees. + * The tree is assumed to have all the stuff on the left first, then the root, then everything on the right. + * + * Splits occur at the root of whatever has not yet been traversed (the substepper steps up to but + * does not include the root). + * + * The stepper maintains an internal stack, not relying on the tree traversal to be reversible. Trees with + * nodes that maintain a parent pointer may be traversed slightly faster without a stack, but splitting is + * more awkward. + * + * Algorithmically, this class implements a simple state machine that unrolls the left-leaning links in + * a binary tree onto a stack. At all times, the machine should be in one of these states: + * 1. Empty: `myCurrent` is `null` and `index` is `-1`. `stack` should also be `Array.empty` then. + * 2. Ready: `myCurrent` is not `null` and contains the next `A` to be extracted + * 3. Pending: `myCurrent` is `null` and `stack(index)` contains the next node to visit + * + * Subclasses should allow this class to do all the work of maintaining state; `next` should simply + * reduce `maxLength` by one, and consume `myCurrent` and set it to `null` if `hasNext` is true. + */ +private[collection] abstract class BinaryTreeStepperBase[A, T >: Null <: AnyRef, Sub >: Null, Semi <: Sub with BinaryTreeStepperBase[A, T, _, _]]( + protected var maxLength: Int, protected var myCurrent: T, protected var stack: Array[AnyRef], protected var index: Int, + protected val left: T => T, protected val right: T => T +) +extends EfficientSplit { + /** Unrolls a subtree onto the stack starting from a particular node, returning + * the last node found. This final node is _not_ placed on the stack, and + * may have things to its right. + */ + @tailrec protected final def unroll(from: T): T = { + val l = left(from) + if (l eq null) from + else { + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = from + unroll(l) + } + } + + /** Takes a subtree whose left side, if any, has already been visited, and unrolls + * the right side of the tree onto the stack, thereby detaching that node of + * the subtree from the stack entirely (so it is ready to use). It returns + * the node that is being detached. Note that the node must _not_ already be + * on the stack. + */ + protected final def detach(node: T): node.type = { + val r = right(node) + if (r ne null) { + val last = unroll(r) + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = last + } + node + } + + /** Given an empty state and the root of a new tree, initialize the tree properly + * to be in an (appropriate) ready state. Will do all sorts of wrong stuff if the + * tree is not already empty. + * + * Right now overwrites everything so could allow reuse, but isn't used for it. + */ + private[impl] final def initialize(root: T, size: Int): Unit = + if (root eq null) { + maxLength = 0 + myCurrent = null + stack = BinaryTreeStepper.emptyStack + index = -1 + } + else { + maxLength = size + index = -1 + myCurrent = detach(unroll(root)) + } + + protected def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): Semi + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = if (hasStep) maxLength else 0 + + def hasStep: Boolean = (myCurrent ne null) || (maxLength > 0 && { + if (index < 0) { maxLength = 0; stack = BinaryTreeStepper.emptyStack; false } + else { + val ans = stack(index).asInstanceOf[T] + index -= 1 + myCurrent = detach(ans) + true + } + }) + + /** Splits the tree at the root by giving everything unrolled on the stack to a new stepper, + * detaching the root, and leaving the right-hand side of the root unrolled. + * + * If the tree is empty or only has one element left, it returns `null` instead of splitting. + */ + def trySplit(): Sub = + if (!hasStep || index < 0) null + else { + val root = stack(0).asInstanceOf[T] + val leftStack = + if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) + else BinaryTreeStepper.emptyStack + val leftIndex = index - 1 + val leftCurrent = myCurrent + var leftMax = maxLength + index = -1 + detach(root) + myCurrent = root + leftMax -= 2+index + maxLength -= 2+leftIndex + semiclone(leftMax, leftCurrent, leftStack, leftIndex) + } +} + + +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A +) +extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): AnyBinaryTreeStepper[A, T] = + new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) +} +private[collection] object AnyBinaryTreeStepper { + def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double +) +extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): DoubleBinaryTreeStepper[T] = + new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object DoubleBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int +) +extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): IntBinaryTreeStepper[T] = + new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object IntBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + + +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long +) +extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): LongBinaryTreeStepper[T] = + new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object LongBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + diff --git a/src/library/scala/collection/convert/impl/BitSetStepper.scala b/src/library/scala/collection/convert/impl/BitSetStepper.scala new file mode 100644 index 000000000000..905afaaf4a0d --- /dev/null +++ b/src/library/scala/collection/convert/impl/BitSetStepper.scala @@ -0,0 +1,118 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{BitSetOps, IntStepper, Stepper} + + +private[collection] final class BitSetStepper( + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, + _i0: Int, _iN: Int, + private var cacheIndex: Int +) +extends InOrderStepperBase[IntStepper, BitSetStepper](_i0, _iN) +with IntStepper { + import BitSetOps.{WordLength, LogWL} + + // When `found` is set, `i0` is an element that exists + protected var found: Boolean = false + + @annotation.tailrec + protected def findNext(): Boolean = + if (i0 >= iN) false + else { + val ix = i0 >> LogWL + if (ix == cacheIndex || ix == cacheIndex+1) { + val i = scanLong(if (ix == cacheIndex) cache0 else cache1, i0 & (WordLength - 1)) + if (i >= 0) { + i0 = (i0 & ~(WordLength - 1)) | i + found = (i0 < iN) + found + } + else { + i0 = (i0 & ~(WordLength - 1)) + WordLength + findNext() + } + } + else if (underlying eq null) { + i0 = iN + found = false + found + } + else { + cacheIndex = ix + cache0 = underlying.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + findNext() + } + } + + def semiclone(half: Int): BitSetStepper = + if (underlying == null) { + val ans = new BitSetStepper(null, cache0, cache1, i0, half, cacheIndex) + ans.found = found + i0 = half + found = false + ans + } + else { + // Set up new stepper + val ixNewN = (half - 1) >> LogWL + val ans = + new BitSetStepper(if (ixNewN <= cacheIndex + 1) null else underlying, cache0, cache1, i0, half, cacheIndex) + if (found) ans.found = true + + // Advance old stepper to breakpoint + val ixOld0 = half >> LogWL + if (ixOld0 > cacheIndex + 1) { + cache0 = underlying.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cacheIndex = ixOld0 + i0 = half + found = false + } + + // Return new stepper + ans + } + + @annotation.tailrec + private[this] def scanLong(bits: Long, from: Int): Int = + if (from >= WordLength) -1 + else if ((bits & (1L << from)) != 0) from + else scanLong(bits, from + 1) + + def nextStep(): Int = + if (found || findNext()) { + found = false + val ans = i0 + i0 += 1 + ans + } + else Stepper.throwNSEE() +} + +private[collection] object BitSetStepper { + def from(bs: scala.collection.BitSetOps[_]): IntStepper with EfficientSplit = + new BitSetStepper( + if (bs.nwords <= 2) null else bs, + if (bs.nwords <= 0) -1L else bs.word(0), + if (bs.nwords <= 1) -1L else bs.word(1), + 0, + bs.nwords * BitSetOps.WordLength, + 0 + ) +} diff --git a/src/library/scala/collection/convert/impl/ChampStepper.scala b/src/library/scala/collection/convert/impl/ChampStepper.scala new file mode 100644 index 000000000000..ddf7c34dc65a --- /dev/null +++ b/src/library/scala/collection/convert/impl/ChampStepper.scala @@ -0,0 +1,245 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.immutable.Node + +/** A stepper that is a slightly elaborated version of the ChampBaseIterator; + * the main difference is that it knows when it should stop instead of running + * to the end of all trees. + */ +private[collection] abstract class ChampStepperBase[ + A, T <: Node[T], Sub >: Null, Semi <: Sub with ChampStepperBase[A, T, _, _] +](protected var maxSize: Int) +extends EfficientSplit { + import Node.MaxDepth + + // Much of this code is identical to ChampBaseIterator. If you change that, look here too! + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private var currentStackLevel: Int = -1 + private var nodeCursorsAndLengths: Array[Int] = _ + private var nodes: Array[T] = _ + + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + def initRoot(rootNode: T): Unit = { + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + false + } + + def characteristics: Int = 0 + + def estimateSize: Long = if (hasStep) maxSize else 0L + + def semiclone(): Semi + + final def hasStep: Boolean = maxSize > 0 && { + val ans = (currentValueCursor < currentValueLength) || searchNextValueNode() + if (!ans) maxSize = 0 + ans + } + + final def trySplit(): Sub = + if (!hasStep) null + else { + var fork = 0 + while (fork <= currentStackLevel && nodeCursorsAndLengths(2*fork) >= nodeCursorsAndLengths(2*fork + 1)) fork += 1 + if (fork > currentStackLevel && currentValueCursor > currentValueLength -2) null + else { + val semi = semiclone() + semi.maxSize = maxSize + semi.currentValueCursor = currentValueCursor + semi.currentValueNode = currentValueNode + if (fork > currentStackLevel) { + // Just need to finish the current node + semi.currentStackLevel = -1 + val i = (currentValueCursor + currentValueLength) >>> 1 + semi.currentValueLength = i + currentValueCursor = i + } + else { + // Need (at least some of) the full stack, so make an identical copy + semi.nodeCursorsAndLengths = java.util.Arrays.copyOf(nodeCursorsAndLengths, nodeCursorsAndLengths.length) + semi.nodes = java.util.Arrays.copyOf(nodes.asInstanceOf[Array[Node[T]]], nodes.length).asInstanceOf[Array[T]] + semi.currentStackLevel = currentStackLevel + semi.currentValueLength = currentValueLength + + // Split the top level of the stack where there's still something to split + // Could make this more efficient by duplicating code from searchNextValueNode + // instead of setting up for it to run normally. But splits tend to be rare, + // so it's not critically important. + // + // Note that this split can be kind of uneven; if we knew how many child nodes there + // were we could do better. + val i = (nodeCursorsAndLengths(2*fork) + nodeCursorsAndLengths(2*fork + 1)) >>> 1 + semi.nodeCursorsAndLengths(2*fork + 1) = i + var j = currentStackLevel + while (j > fork) { + nodeCursorsAndLengths(2*j) = nodeCursorsAndLengths(2*j + 1) + j -= 1 + } + nodeCursorsAndLengths(2*fork) = i + searchNextValueNode() + } + semi + } + } +} + + +private[collection] final class AnyChampStepper[A, T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => A) +extends ChampStepperBase[A, T, AnyStepper[A], AnyChampStepper[A, T]](_maxSize) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): AnyChampStepper[A, T] = new AnyChampStepper[A, T](0, extract) +} +private[collection] object AnyChampStepper { + def from[A, T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => A): AnyChampStepper[A, T] = { + val ans = new AnyChampStepper[A, T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class DoubleChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Double) +extends ChampStepperBase[Double, T, DoubleStepper, DoubleChampStepper[T]](_maxSize) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): DoubleChampStepper[T] = new DoubleChampStepper[T](0, extract) +} +private[collection] object DoubleChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Double): DoubleChampStepper[T] = { + val ans = new DoubleChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class IntChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Int) +extends ChampStepperBase[Int, T, IntStepper, IntChampStepper[T]](_maxSize) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): IntChampStepper[T] = new IntChampStepper[T](0, extract) +} +private[collection] object IntChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Int): IntChampStepper[T] = { + val ans = new IntChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class LongChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Long) +extends ChampStepperBase[Long, T, LongStepper, LongChampStepper[T]](_maxSize) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): LongChampStepper[T] = new LongChampStepper[T](0, extract) +} +private[collection] object LongChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Long): LongChampStepper[T] = { + val ans = new LongChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} diff --git a/src/library/scala/collection/convert/impl/InOrderStepperBase.scala b/src/library/scala/collection/convert/impl/InOrderStepperBase.scala new file mode 100644 index 000000000000..476b5c882177 --- /dev/null +++ b/src/library/scala/collection/convert/impl/InOrderStepperBase.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over a collection + * that has an indexable ordering but may have gaps. + * + * For collections that are guaranteed to not have gaps, use `IndexedStepperBase` instead. + */ +private[convert] abstract class InOrderStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) +extends EfficientSplit { + /** Set `true` if the element at `i0` is known to be there. `false` if either not known or is a gap. + */ + protected def found: Boolean + + /** Advance `i0` over any gaps, updating internal state so `found` is correct at the new position. + * Returns the new value of `found`. + */ + protected def findNext(): Boolean + + protected def semiclone(half: Int): Semi + + final def hasStep: Boolean = found || findNext() + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/src/library/scala/collection/convert/impl/IndexedSeqStepper.scala b/src/library/scala/collection/convert/impl/IndexedSeqStepper.scala new file mode 100644 index 000000000000..aa8fbe307278 --- /dev/null +++ b/src/library/scala/collection/convert/impl/IndexedSeqStepper.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): AnyIndexedSeqStepper[A] = new AnyIndexedSeqStepper[A](underlying, i0, half) +} + +private[collection] class DoubleIndexedSeqStepper[CC <: collection.IndexedSeqOps[Double, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleIndexedSeqStepper[CC]](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleIndexedSeqStepper[CC] = new DoubleIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class IntIndexedSeqStepper[CC <: collection.IndexedSeqOps[Int, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntIndexedSeqStepper[CC]](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntIndexedSeqStepper[CC] = new IntIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class LongIndexedSeqStepper[CC <: collection.IndexedSeqOps[Long, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongIndexedSeqStepper[CC]](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongIndexedSeqStepper[CC] = new LongIndexedSeqStepper[CC](underlying, i0, half) +} diff --git a/src/library/scala/collection/convert/impl/IndexedStepperBase.scala b/src/library/scala/collection/convert/impl/IndexedStepperBase.scala new file mode 100644 index 000000000000..d2094dd30da6 --- /dev/null +++ b/src/library/scala/collection/convert/impl/IndexedStepperBase.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSplit { + protected def semiclone(half: Int): Semi + + def hasStep: Boolean = i0 < iN + + def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/src/library/scala/collection/convert/impl/IteratorStepper.scala b/src/library/scala/collection/convert/impl/IteratorStepper.scala new file mode 100644 index 000000000000..8fac29cf96ae --- /dev/null +++ b/src/library/scala/collection/convert/impl/IteratorStepper.scala @@ -0,0 +1,129 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} +import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} + +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) + with AnyStepper[A] { + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + + def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + val acc = new AnyAccumulator[A] + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) + with DoubleStepper { + protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) + + def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new DoubleAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) + with IntStepper { + protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) + + def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new IntAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) + with LongStepper { + protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) + + def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new LongAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], Semi <: SP](final protected val underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + protected def semiclone(): Semi // Must initialize with null iterator! + def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED + def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} diff --git a/src/library/scala/collection/convert/impl/NumericRangeStepper.scala b/src/library/scala/collection/convert/impl/NumericRangeStepper.scala new file mode 100644 index 000000000000..3a03f8fabf63 --- /dev/null +++ b/src/library/scala/collection/convert/impl/NumericRangeStepper.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} +import scala.collection.immutable.NumericRange + +private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) +extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new AnyNumericRangeStepper[A](underlying, i0, half) +} + +private[collection] class IntNumericRangeStepper(underlying: NumericRange[Int], _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, IntNumericRangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new IntNumericRangeStepper(underlying, i0, half) +} + +private[collection] class LongNumericRangeStepper(underlying: NumericRange[Long], _i0: Int, _iN: Int) +extends IndexedStepperBase[LongStepper, LongNumericRangeStepper](_i0, _iN) +with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new LongNumericRangeStepper(underlying, i0, half) +} diff --git a/src/library/scala/collection/convert/impl/RangeStepper.scala b/src/library/scala/collection/convert/impl/RangeStepper.scala new file mode 100644 index 000000000000..46f803151704 --- /dev/null +++ b/src/library/scala/collection/convert/impl/RangeStepper.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{IntStepper, Stepper} + +/** Implements Stepper on an integer Range. You don't actually need the Range to do this, + * so only the relevant parts are included. Because the arguments are protected, they are + * not error-checked; `Range` is required to provide valid arguments. + */ +private[collection] final class RangeStepper(protected var myNext: Int, myStep: Int, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, RangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = myNext + myNext += myStep + i0 += 1 + ans + } + else Stepper.throwNSEE() + protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) + override def trySplit(): IntStepper = { + val old_i0 = i0 + val ans = super.trySplit() + myNext += (i0 - old_i0) * myStep + ans + } +} diff --git a/src/library/scala/collection/convert/impl/StringStepper.scala b/src/library/scala/collection/convert/impl/StringStepper.scala new file mode 100644 index 000000000000..e8c4d7073c43 --- /dev/null +++ b/src/library/scala/collection/convert/impl/StringStepper.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.lang.Character.{charCount, isLowSurrogate} +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{IntStepper, Stepper} + +/** Implements `Stepper` on a `String` where you step through chars packed into `Int`. + */ +private[collection] final class CharStringStepper(underlying: String, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, CharStringStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { val j = i0; i0 += 1; underlying.charAt(j) } + else Stepper.throwNSEE() + + def semiclone(half: Int): CharStringStepper = new CharStringStepper(underlying, i0, half) +} + +/** Implements `Stepper` on a `String` where you step through code points. + */ +private[collection] final class CodePointStringStepper(underlying: String, private var i0: Int, private var iN: Int) +extends IntStepper with EfficientSplit { + def characteristics: Int = Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED + def estimateSize: Long = iN - i0 + def hasStep: Boolean = i0 < iN + def nextStep(): Int = { + if (hasStep) { + val cp = underlying.codePointAt(i0) + i0 += charCount(cp) + cp + } + else Stepper.throwNSEE() + } + def trySplit(): CodePointStringStepper = + if (iN - 3 > i0) { + var half = (i0 + iN) >>> 1 + if (isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new CodePointStringStepper(underlying, i0, half) + i0 = half + ans + } + else null +} diff --git a/src/library/scala/collection/convert/impl/TableStepper.scala b/src/library/scala/collection/convert/impl/TableStepper.scala new file mode 100644 index 000000000000..2c144e4fae8f --- /dev/null +++ b/src/library/scala/collection/convert/impl/TableStepper.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + +private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( + protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int +) +extends EfficientSplit { + // Always holds table(i0); if `null` it is time to switch to the next element + protected var myCurrent: I = if (i0 < iN) table(i0) else null + + // Only call this when `myCurrent` is null (meaning we need to advance) + @annotation.tailrec + protected final def findNextCurrent(): Boolean = + if (i0 < iN) { + i0 += 1 + if (i0 >= iN) false + else { + myCurrent = table(i0) + if (myCurrent eq null) findNextCurrent() + else true + } + } + else false + + protected def semiclone(half: Int): Semi + + def characteristics: Int = 0 + + def estimateSize: Long = if (!hasStep) { maxLength = 0; 0 } else maxLength + + def hasStep: Boolean = (myCurrent ne null) || findNextCurrent() + + def trySplit(): Sub = { + if (iN-1 > i0 && maxLength > 0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + ans.myCurrent = myCurrent + myCurrent = table(half) + var inLeft = if (ans.myCurrent ne null) 1 else 0 + var inRight = if (myCurrent ne null) 1 else 0 + if (iN - i0 < 32) { + var i = i0+1 + while (i < half && (table(i) ne null)) { i += 1; inLeft += 1 } + i = half+1 + while (i < iN && (table(i) ne null)) { i += 1; inRight += 1 } + } + maxLength -= inLeft + ans.maxLength -= inRight + i0 = half + ans + } + else null + } +} + + +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int +) +extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): AnyTableStepper[A, I] = new AnyTableStepper[A, I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int +) +extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): DoubleTableStepper[I] = new DoubleTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class IntTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int +) +extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): IntTableStepper[I] = new IntTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class LongTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int +) +extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): LongTableStepper[I] = new LongTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + diff --git a/src/library/scala/collection/convert/impl/VectorStepper.scala b/src/library/scala/collection/convert/impl/VectorStepper.scala new file mode 100644 index 000000000000..ca0d45330a70 --- /dev/null +++ b/src/library/scala/collection/convert/impl/VectorStepper.scala @@ -0,0 +1,131 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( + _i0: Int, + _iN: Int, + protected val displayN: Int, + protected val trunk: Array[AnyRef] +) +extends IndexedStepperBase[Sub, Semi](_i0, _iN) { + protected var index: Int = 32 // Force an advanceData on the first element + protected var leaves: Array[AnyRef] = null + protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element + protected var twigs: Array[AnyRef] = null + + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index1 >= 32) initTo(iX) + else { + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = displayN match { + case 0 => + leaves = trunk + index = iX + case 1 => + twigs = trunk + index1 = iX >>> 5 + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + case _ => + var n = displayN + var dataN = trunk + while (n > 2) { + dataN = dataN((iX >> (5*n)) & 0x1F).asInstanceOf[Array[AnyRef]] + n -= 1 + } + twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index1 = (iX >> 5) & 0x1F + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + } +} + +private[collection] class AnyVectorStepper[A](_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[AnyStepper[A], AnyVectorStepper[A]](_i0, _iN, _displayN, _trunk) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[A] + } else Stepper.throwNSEE() + def semiclone(half: Int): AnyVectorStepper[A] = { + val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class DoubleVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[DoubleStepper, DoubleVectorStepper](_i0, _iN, _displayN, _trunk) +with DoubleStepper { + def nextStep(): Double = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Double] + } else Stepper.throwNSEE() + def semiclone(half: Int): DoubleVectorStepper = { + val ans = new DoubleVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[IntStepper, IntVectorStepper](_i0, _iN, _displayN, _trunk) +with IntStepper { + def nextStep(): Int = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Int] + } else Stepper.throwNSEE() + def semiclone(half: Int): IntVectorStepper = { + val ans = new IntVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[LongStepper, LongVectorStepper](_i0, _iN, _displayN, _trunk) +with LongStepper { + def nextStep(): Long = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Long] + } else Stepper.throwNSEE() + def semiclone(half: Int): LongVectorStepper = { + val ans = new LongVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala deleted file mode 100644 index 9a2c4c995663..000000000000 --- a/src/library/scala/collection/convert/package.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -package object convert { - @deprecated("use JavaConverters", since="2.12.0") - val decorateAsJava = new DecorateAsJava { } - @deprecated("use JavaConverters", since="2.12.0") - val decorateAsScala = new DecorateAsScala { } - @deprecated("use JavaConverters", since="2.12.0") - val decorateAll = JavaConverters - - @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") - val wrapAsJava = new WrapAsJava { } - @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") - val wrapAsScala = new WrapAsScala { } - @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12.0") - val wrapAll = new WrapAsJava with WrapAsScala { } -} diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala index f796ddbbfa83..4464b4935d07 100644 --- a/src/library/scala/collection/generic/BitOperations.scala +++ b/src/library/scala/collection/generic/BitOperations.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,15 +10,15 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package generic + /** Some bit operations. - * - * See [[http://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for - * an explanation of unsignedCompare. - */ + * + * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for + * an explanation of unsignedCompare. + */ private[collection] object BitOperations { trait Int { type Int = scala.Int diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala deleted file mode 100644 index b41dc86b7bcf..000000000000 --- a/src/library/scala/collection/generic/BitSetFactory.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection._ -import mutable.Builder - -/** @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define bitsetCanBuildFrom - * The standard `CanBuildFrom` instance for bitsets. - */ -trait BitSetFactory[Coll <: BitSet with BitSetLike[Coll]] { - def empty: Coll - def newBuilder: Builder[Int, Coll] - def apply(elems: Int*): Coll = (empty /: elems) (_ + _) - def bitsetCanBuildFrom = new CanBuildFrom[Coll, Int, Coll] { - def apply(from: Coll) = newBuilder - def apply() = newBuilder - } -} - diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala deleted file mode 100644 index a1803134f51c..000000000000 --- a/src/library/scala/collection/generic/CanBuildFrom.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.implicitNotFound - -/** A base trait for builder factories. - * - * @tparam From the type of the underlying collection that requests - * a builder to be created. - * @tparam Elem the element type of the collection to be created. - * @tparam To the type of the collection to be created. - * - * @see [[scala.collection.mutable.Builder]] - * @author Martin Odersky - * @author Adriaan Moors - * @since 2.8 - */ -@implicitNotFound(msg = "Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${From}.") -trait CanBuildFrom[-From, -Elem, +To] { - - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return a builder for collections of type `To` with element type `Elem`. - * The collections framework usually arranges things so - * that the created builder will build the same kind of collection - * as `from`. - */ - def apply(from: From): Builder[Elem, To] - - /** Creates a new builder from scratch. - * - * @return a builder for collections of type `To` with element type `Elem`. - * @see scala.collection.breakOut - */ - def apply(): Builder[Elem, To] -} diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala deleted file mode 100644 index ead36ffe7709..000000000000 --- a/src/library/scala/collection/generic/CanCombineFrom.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel._ - -/** A base trait for parallel builder factories. - * - * @tparam From the type of the underlying collection that requests a - * builder to be created. - * @tparam Elem the element type of the collection to be created. - * @tparam To the type of the collection to be created. - * @since 2.8 - */ -trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel { - def apply(from: From): Combiner[Elem, To] - def apply(): Combiner[Elem, To] -} - diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala deleted file mode 100644 index 37f9ee8ee682..000000000000 --- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** A template for companion objects of `ClassTagTraversable` and - * subclasses thereof. - * - * @define coll collection - * @define Coll `Traversable` - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ClassTagTraversableFactory[CC[X] <: Traversable[X] with GenericClassTagTraversableTemplate[X, CC]] - extends GenericClassTagCompanion[CC] { - - class GenericCanBuildFrom[A](implicit tag: ClassTag[A]) extends CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from.genericClassTagBuilder[A] - def apply = newBuilder[A] - } -} diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala deleted file mode 100644 index cc655d83e0fd..000000000000 --- a/src/library/scala/collection/generic/Clearable.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** This trait forms part of collections that can be cleared - * with a clear() call. - * - * @author Paul Phillips - * @since 2.10 - * @define coll clearable collection - * @define Coll `Clearable` - */ -trait Clearable { - /** Clears the $coll's contents. After this operation, the - * $coll is empty. - */ - def clear(): Unit -} diff --git a/src/library/scala/collection/generic/CommonErrors.scala b/src/library/scala/collection/generic/CommonErrors.scala new file mode 100644 index 000000000000..e9f863643d27 --- /dev/null +++ b/src/library/scala/collection/generic/CommonErrors.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + + +/** Some precomputed common errors to reduce the generated code size. + */ +private[collection] object CommonErrors { + /** IndexOutOfBounds exception with a known max index */ + @noinline + def indexOutOfBounds(index: Int, max: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${max})") + + /** IndexOutOfBounds exception with an unknown max index. */ + @noinline + def indexOutOfBounds(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max unknown)") +} diff --git a/src/library/scala/collection/generic/DefaultSerializationProxy.scala b/src/library/scala/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..2e584eaa427d --- /dev/null +++ b/src/library/scala/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala deleted file mode 100644 index 6ec66fb72190..000000000000 --- a/src/library/scala/collection/generic/FilterMonadic.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods - * of trait `TraversableLike`. - */ -trait FilterMonadic[+A, +Repr] extends Any { - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That - def foreach[U](f: A => U): Unit - def withFilter(p: A => Boolean): FilterMonadic[A, Repr] -} diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala deleted file mode 100644 index 0889436e056f..000000000000 --- a/src/library/scala/collection/generic/GenMapFactory.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.{Builder, MapBuilder} -import scala.language.higherKinds - -/** A template for companion objects of `Map` and subclasses thereof. - * - * @define coll map - * @define Coll `Map` - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A, B]]] { - - /** The type constructor of the collection that can be built by this factory */ - type Coll = CC[_, _] - - /** An empty $Coll */ - def empty[A, B]: CC[A, B] - - /** A collection of type $Coll that contains given key/value bindings. - * @param elems the key/value pairs that make up the $coll - * @tparam A the type of the keys - * @tparam B the type of the associated values - * @return a new $coll consisting key/value pairs given by `elems`. - */ - def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result() - - /** The default builder for $Coll objects. - * @tparam A the type of the keys - * @tparam B the type of the associated values - */ - def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = new MapBuilder[A, B, CC[A, B]](empty[A, B]) - - /** The standard `CanBuildFrom` class for maps. - */ - class MapCanBuildFrom[A, B] extends CanBuildFrom[Coll, (A, B), CC[A, B]] { - def apply(from: Coll) = newBuilder[A, B] - def apply() = newBuilder - } -} diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala deleted file mode 100644 index 37506756e185..000000000000 --- a/src/library/scala/collection/generic/GenSeqFactory.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of Seq and subclasses thereof. - * - * @since 2.8 - */ -abstract class GenSeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]] -extends GenTraversableFactory[CC] diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala deleted file mode 100644 index 89a6efbb09e3..000000000000 --- a/src/library/scala/collection/generic/GenSetFactory.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template for companion objects of `Set` and subclasses thereof. - * - * @define coll set - * @define Coll `Set` - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define setCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenSetFactory[CC[X] <: GenSet[X] with GenSetLike[X, CC[X]]] - extends GenericCompanion[CC] { - - def newBuilder[A]: Builder[A, CC[A]] - - /** $setCanBuildFromInfo - */ - def setCanBuildFrom[A] = new CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from match { - // When building from an existing Set, try to preserve its type: - case from: Set[_] => from.genericBuilder.asInstanceOf[Builder[A, CC[A]]] - case _ => newBuilder[A] - } - def apply() = newBuilder[A] - } -} diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala deleted file mode 100644 index a3288ba27d40..000000000000 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll `Traversable` - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] -extends GenericCompanion[CC] { - - private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance - - /** A generic implementation of the `CanBuildFrom` trait, which forwards - * all calls to `apply(from)` to the `genericBuilder` method of - * $coll `from`, and which forwards all calls of `apply()` to the - * `newBuilder` method of this factory. - */ - class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] { - /** Creates a new builder on request of a collection. - * @param from the collection requesting the builder to be created. - * @return the result of invoking the `genericBuilder` method on `from`. - */ - def apply(from: Coll) = from.genericBuilder[A] - - /** Creates a new builder from scratch - * @return the result of invoking the `newBuilder` method of this factory. - */ - def apply() = newBuilder[A] - } - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Traversable[A]*): CC[A] = { - val b = newBuilder[A] - // At present we're using IndexedSeq as a proxy for "has a cheap size method". - if (xss forall (_.isInstanceOf[IndexedSeq[_]])) - b.sizeHint(xss.map(_.size).sum) - - for (xs <- xss.seq) b ++= xs - b.result() - } - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = - tabulate(n1)(_ => fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = - tabulate(n1)(_ => fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(_ => fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3nd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[T: Integral](start: T, end: T, step: T): CC[T] = { - val num = implicitly[Integral[T]] - import num._ - - if (step == zero) throw new IllegalArgumentException("zero step") - val b = newBuilder[T] - b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) - var i = start - while (if (step < zero) end < i else i < end) { - b += i - i += step - } - b.result() - } - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = { - val b = newBuilder[A] - if (len > 0) { - b.sizeHint(len) - var acc = start - var i = 1 - b += acc - - while (i < len) { - acc = f(acc) - i += 1 - b += acc - } - } - b.result() - } -} diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala deleted file mode 100644 index 2aba79a75a00..000000000000 --- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** This class represents companions of classes which require ClassTags - * for their element types. - * - * @author Aleksandar Prokopec - */ -abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] { - protected[this] type Coll = CC[_] - - def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]] - - def empty[A: ClassTag]: CC[A] = newBuilder[A].result() - - def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = { - val b = newBuilder[A] - b ++= elems - b.result() - } -} diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala deleted file mode 100644 index 3627fb247184..000000000000 --- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds -import scala.reflect.ClassTag - -/** This trait represents collections classes which require class - * tags for their element types. - * - * @author Aleksandar Prokopec - * @since 2.8 - */ -trait GenericClassTagTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - implicit protected[this] val tag: ClassTag[A] - def classTagCompanion: GenericClassTagCompanion[CC] - def genericClassTagBuilder[B](implicit tag: ClassTag[B]): Builder[B, CC[B]] = classTagCompanion.newBuilder[B] - @deprecated("use classTagCompanion instead", "2.10.0") - def classManifestCompanion: GenericClassManifestCompanion[CC] = classTagCompanion - @deprecated("use genericClassTagBuilder instead", "2.10.0") - def genericClassManifestBuilder[B](implicit manifest: ClassManifest[B]): Builder[B, CC[B]] = genericClassTagBuilder[B](manifest) -} diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala deleted file mode 100644 index 2df924fccdb0..000000000000 --- a/src/library/scala/collection/generic/GenericCompanion.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template class for companion objects of "regular" collection classes - * represent an unconstrained higher-kinded type. Typically - * such classes inherit from trait `GenericTraversableTemplate`. - * @tparam CC The type constructor representing the collection class. - * @see [[scala.collection.generic.GenericTraversableTemplate]] - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll `CC` - */ -abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { - /** The underlying collection type with unknown element type */ - protected[this] type Coll = CC[_] - - /** The default builder for `$Coll` objects. - * @tparam A the type of the ${coll}'s elements - */ - def newBuilder[A]: Builder[A, CC[A]] - - /** An empty collection of type `$Coll[A]` - * @tparam A the type of the ${coll}'s elements - */ - def empty[A]: CC[A] = { - if ((this eq immutable.Seq) || (this eq collection.Seq)) Nil.asInstanceOf[CC[A]] - else newBuilder[A].result() - } - - /** Creates a $coll with the specified elements. - * @tparam A the type of the ${coll}'s elements - * @param elems the elements of the created $coll - * @return a new $coll with elements `elems` - */ - def apply[A](elems: A*): CC[A] = { - if (elems.isEmpty) empty[A] - else { - val b = newBuilder[A] - b ++= elems - b.result() - } - } -} diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala deleted file mode 100644 index 312fffebb655..000000000000 --- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** This class represents companions of classes which require the ordered trait - * for their element types. - * - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] { - protected[this] type Coll = CC[_] - - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] - - def empty[A: Ordering]: CC[A] = newBuilder[A].result() - - def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = { - val b = newBuilder[A] - b ++= elems - b.result() - } -} - diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala deleted file mode 100644 index da2a9d7817b1..000000000000 --- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** This trait represents collections classes which require - * ordered element types. - * - * @author Aleksandar Prokopec - */ -trait GenericOrderedTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - implicit protected[this] val ord: Ordering[A] - def orderedCompanion: GenericOrderedCompanion[CC] - def genericOrderedBuilder[B](implicit ord: Ordering[B]): Builder[B, CC[B]] = orderedCompanion.newBuilder[B] -} - diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala deleted file mode 100644 index 21c69465986b..000000000000 --- a/src/library/scala/collection/generic/GenericParCompanion.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap -import scala.language.higherKinds - -/** A template class for companion objects of parallel collection classes. - * They should be mixed in together with `GenericCompanion` type. - * - * @define Coll `ParIterable` - * @tparam CC the type constructor representing the collection class - * @since 2.8 - */ -trait GenericParCompanion[+CC[X] <: ParIterable[X]] { - /** The default builder for $Coll objects. - */ - def newBuilder[A]: Combiner[A, CC[A]] - - /** The parallel builder for $Coll objects. - */ - def newCombiner[A]: Combiner[A, CC[A]] -} - -trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { - def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] -} - diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala deleted file mode 100644 index c53556108e94..000000000000 --- a/src/library/scala/collection/generic/GenericParTemplate.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParIterable -import scala.collection.parallel.ParMap - -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** A template trait for collections having a companion. - * - * @tparam A the element type of the collection - * @tparam CC the type constructor representing the collection class - * @author Aleksandar Prokopec - * @since 2.8 - */ -trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]] -extends GenericTraversableTemplate[A, CC] - with HasNewCombiner[A, CC[A] @uncheckedVariance] -{ - def companion: GenericCompanion[CC] with GenericParCompanion[CC] - - protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner - - protected[this] override def newCombiner: Combiner[A, CC[A]] = { - val cb = companion.newCombiner[A] - cb - } - - override def genericBuilder[B]: Combiner[B, CC[B]] = genericCombiner[B] - - def genericCombiner[B]: Combiner[B, CC[B]] = { - val cb = companion.newCombiner[B] - cb - } - -} - - -trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable] -{ - protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = { - val cb = mapCompanion.newCombiner[K, V] - cb - } - - def mapCompanion: GenericParMapCompanion[CC] - - def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = { - val cb = mapCompanion.newCombiner[P, Q] - cb - } -} - diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala deleted file mode 100644 index 46050229cce8..000000000000 --- a/src/library/scala/collection/generic/GenericSeqCompanion.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -trait GenericSeqCompanion[CC[X] <: Traversable[X]] - extends GenericCompanion[CC] diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala deleted file mode 100644 index 106a19673c15..000000000000 --- a/src/library/scala/collection/generic/GenericSetTemplate.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import scala.language.higherKinds -/** - * @since 2.8 - */ -trait GenericSetTemplate[A, +CC[X] <: GenSet[X]] extends GenericTraversableTemplate[A, CC] { - def empty: CC[A] = companion.empty[A] -} - diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala deleted file mode 100644 index 283fde39d393..000000000000 --- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ /dev/null @@ -1,234 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.annotation.migration -import scala.annotation.unchecked.uncheckedVariance -import scala.language.higherKinds - -/** A template class for companion objects of ``regular`` collection classes - * that represent an unconstrained higher-kinded type. - * - * @tparam A The type of the collection elements. - * @tparam CC The type constructor representing the collection class. - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll Traversable - */ -trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] { - - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - */ - def foreach[U](f: A => U): Unit - - /** Selects the first element of this $coll. - * - * @return the first element of this $coll. - * @throws NoSuchElementException if the $coll is empty. - */ - def head: A - - /** Tests whether this $coll is empty. - * - * @return `true` if the $coll contain no elements, `false` otherwise. - */ - def isEmpty: Boolean - - /** The factory companion object that builds instances of class $Coll. - * (or its `Iterable` superclass where class $Coll is not a `Seq`.) - */ - def companion: GenericCompanion[CC] - - /** The builder that builds instances of type $Coll[A] - */ - protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A] - - /** The generic builder that builds instances of $Coll - * at arbitrary element types. - */ - def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B] - - private def sequential: TraversableOnce[A] = this.asInstanceOf[GenTraversableOnce[A]].seq - - /** Converts this $coll of pairs into two collections of the first and second - * half of each pair. - * - * {{{ - * val xs = $Coll( - * (1, "one"), - * (2, "two"), - * (3, "three")).unzip - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three)) - * }}} - * - * @tparam A1 the type of the first half of the element pairs - * @tparam A2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this $coll is a pair. - * @return a pair of ${coll}s, containing the first, respectively second - * half of each element pair of this $coll. - */ - def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - for (xy <- sequential) { - val (x, y) = asPair(xy) - b1 += x - b2 += y - } - (b1.result(), b2.result()) - } - - /** Converts this $coll of triples into three collections of the first, second, - * and third element of each triple. - * - * {{{ - * val xs = $Coll( - * (1, "one", '1'), - * (2, "two", '2'), - * (3, "three", '3')).unzip3 - * // xs == ($Coll(1, 2, 3), - * // $Coll(one, two, three), - * // $Coll(1, 2, 3)) - * }}} - * - * @tparam A1 the type of the first member of the element triples - * @tparam A2 the type of the second member of the element triples - * @tparam A3 the type of the third member of the element triples - * @param asTriple an implicit conversion which asserts that the element type - * of this $coll is a triple. - * @return a triple of ${coll}s, containing the first, second, respectively - * third member of each element triple of this $coll. - */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { - val b1 = genericBuilder[A1] - val b2 = genericBuilder[A2] - val b3 = genericBuilder[A3] - - for (xyz <- sequential) { - val (x, y, z) = asTriple(xyz) - b1 += x - b2 += y - b3 += z - } - (b1.result(), b2.result(), b3.result()) - } - - /** Converts this $coll of traversable collections into - * a $coll formed by the elements of these traversable - * collections. - * - * @tparam B the type of the elements of each traversable collection. - * @param asTraversable an implicit conversion which asserts that the element - * type of this $coll is a `GenTraversable`. - * @return a new $coll resulting from concatenating all element ${coll}s. - * - * @usecase def flatten[B]: $Coll[B] - * - * @inheritdoc - * - * The resulting collection's type will be guided by the - * static type of $coll. For example: - * - * {{{ - * val xs = List( - * Set(1, 2, 3), - * Set(1, 2, 3) - * ).flatten - * // xs == List(1, 2, 3, 1, 2, 3) - * - * val ys = Set( - * List(1, 2, 3), - * List(3, 2, 1) - * ).flatten - * // ys == Set(1, 2, 3) - * }}} - */ - def flatten[B](implicit asTraversable: A => /*<: /*<:= headSize) fail - bs(i) += x - i += 1 - } - if (i != headSize) - fail - } - val bb = genericBuilder[CC[B]] - for (b <- bs) bb += b.result - bb.result() - } -} - diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala deleted file mode 100644 index affe3ace5ecd..000000000000 --- a/src/library/scala/collection/generic/Growable.scala +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.annotation.tailrec - -/** This trait forms part of collections that can be augmented - * using a `+=` operator and that can be cleared of all elements using - * a `clear` method. - * - * @author Martin Odersky - * @since 2.8 - * @define coll growable collection - * @define Coll `Growable` - * @define add add - * @define Add add - */ -trait Growable[-A] extends Clearable { - - /** ${Add}s a single element to this $coll. - * - * @param elem the element to $add. - * @return the $coll itself - */ - def +=(elem: A): this.type - - /** ${Add}s two or more elements to this $coll. - * - * @param elem1 the first element to $add. - * @param elem2 the second element to $add. - * @param elems the remaining elements to $add. - * @return the $coll itself - */ - def +=(elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= elems - - /** ${Add}s all elements produced by a TraversableOnce to this $coll. - * - * @param xs the TraversableOnce producing the elements to $add. - * @return the $coll itself. - */ - def ++=(xs: TraversableOnce[A]): this.type = { - @tailrec def loop(xs: scala.collection.LinearSeq[A]) { - if (xs.nonEmpty) { - this += xs.head - loop(xs.tail) - } - } - xs match { - case xs: scala.collection.LinearSeq[_] => loop(xs) - case xs => xs foreach += - } - this - } - - /** Clears the $coll's contents. After this operation, the - * $coll is empty. - */ - def clear(): Unit -} diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala deleted file mode 100644 index 5d788f272d5c..000000000000 --- a/src/library/scala/collection/generic/HasNewBuilder.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder - -trait HasNewBuilder[+A, +Repr] extends Any { - /** The builder that builds instances of Repr */ - protected[this] def newBuilder: Builder[A, Repr] -} diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala deleted file mode 100644 index e5a8c3de1ff4..000000000000 --- a/src/library/scala/collection/generic/HasNewCombiner.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner - -/** - * @since 2.8 - */ -trait HasNewCombiner[+T, +Repr] { - protected[this] def newCombiner: Combiner[T, Repr] -} - diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala deleted file mode 100644 index 8d414802bd1f..000000000000 --- a/src/library/scala/collection/generic/ImmutableMapFactory.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `immutable.Map` and subclasses thereof. - * @author Martin Odersky - * @since 2.8 - */ -abstract class ImmutableMapFactory[CC[A, +B] <: immutable.Map[A, B] with immutable.MapLike[A, B, CC[A, B]]] extends MapFactory[CC] diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala deleted file mode 100644 index ce3e8e192f7a..000000000000 --- a/src/library/scala/collection/generic/ImmutableSetFactory.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.{ Builder, SetBuilder } -import scala.language.higherKinds - -abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]] - extends SetFactory[CC] { - private[collection] def emptyInstance: CC[Any] - override def empty[A] = emptyInstance.asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty[A]) -} diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala deleted file mode 100644 index 06fa481859b4..000000000000 --- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `SortedMap` and subclasses thereof. - * - * @since 2.8 - * @define Coll `SortedMap` - * @define coll sorted map - * @define factoryInfo - * This object provides a set of operations needed to create sorted maps of type `$Coll`. - * @author Martin Odersky - * @define sortedMapCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted maps - */ -abstract class ImmutableSortedMapFactory[CC[A, B] <: immutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] extends SortedMapFactory[CC] diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala deleted file mode 100644 index 30fa8215af61..000000000000 --- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `SortedSet` and subclasses thereof. - * - * @since 2.8 - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - * @define factoryInfo - * This object provides a set of operations needed to create sorted sets of type `$Coll`. - * @author Martin Odersky - * @define sortedSetCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted sets - */ -abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC] diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala deleted file mode 100644 index 336c1bea26c8..000000000000 --- a/src/library/scala/collection/generic/IndexedSeqFactory.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import language.higherKinds - -/** A template for companion objects of IndexedSeq and subclasses thereof. - * - * @since 2.11 - */ -abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] diff --git a/src/library/scala/collection/generic/IsIterable.scala b/src/library/scala/collection/generic/IsIterable.scala new file mode 100644 index 000000000000..2260f0f2aacb --- /dev/null +++ b/src/library/scala/collection/generic/IsIterable.scala @@ -0,0 +1,164 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +/** A trait which can be used to avoid code duplication when defining extension + * methods that should be applicable both to existing Scala collections (i.e., + * types extending `Iterable`) as well as other (potentially user-defined) + * types that could be converted to a Scala collection type. This trait + * makes it possible to treat Scala collections and types that can be implicitly + * converted to a collection type uniformly. For example, one can provide + * extension methods that work both on collection types and on `String`s (`String`s + * do not extend `Iterable`, but can be converted to `Iterable`) + * + * `IsIterable` provides three members: + * + * 1. type member `A`, which represents the element type of the target `Iterable[A]` + * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type + * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. + * + * ===Usage=== + * + * One must provide `IsIterable` as an implicit parameter type of an implicit + * conversion. Its usage is shown below. Our objective in the following example + * is to provide a generic extension method `mapReduce` to any type that extends + * or can be converted to `Iterable`. In our example, this includes + * `String`. + * + * {{{ + * import scala.collection.{Iterable, IterableOps} + * import scala.collection.generic.IsIterable + * + * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { + * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { + * val iter = it(coll).iterator + * var res = mapper(iter.next()) + * while (iter.hasNext) + * res = reducer(res, mapper(iter.next())) + * res + * } + * } + * + * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = + * new ExtensionMethods(coll, it) + * + * // See it in action! + * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 + * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 + *}}} + * + * Here, we begin by creating a class `ExtensionMethods` which contains our + * `mapReduce` extension method. + * + * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where + * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. + * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to + * call the `iterator` method on it. + * The remaining of the implementation is straightforward. + * + * The `withExtensions` implicit conversion makes the `mapReduce` operation available + * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. + * Note how we keep track of the precise type of the implicit `it` argument by using the + * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that + * so that the information carried by the type members `A` and `C` of the `it` argument + * is not lost. + * + * When the `mapReduce` method is called on some type of which it is not + * a member, implicit search is triggered. Because implicit conversion + * `withExtensions` is generic, it will be applied as long as an implicit + * value of type `IsIterable[Repr]` can be found. Given that the + * `IsIterable` companion object contains implicit members that return values of type + * `IsIterable`, this requirement is typically satisfied, and the chain + * of interactions described in the previous paragraph is set into action. + * (See the `IsIterable` companion object, which contains a precise + * specification of the available implicits.) + * + * ''Note'': Currently, it's not possible to combine the implicit conversion and + * the class with the extension methods into an implicit class due to + * limitations of type inference. + * + * ===Implementing `IsIterable` for New Types=== + * + * One must simply provide an implicit value of type `IsIterable` + * specific to the new type, or an implicit conversion which returns an + * instance of `IsIterable` specific to the new type. + * + * Below is an example of an implementation of the `IsIterable` trait + * where the `Repr` type is `Range`. + * + *{{{ + * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = + * new IsIterable[Range] { + * type A = Int + * type C = IndexedSeq[Int] + * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll + * } + *}}} + * + * (Note that in practice the `IsIterable[Range]` instance is already provided by + * the standard library, and it is defined as an `IsSeq[Range]` instance) + */ +trait IsIterable[Repr] extends IsIterableOnce[Repr] { + + /** The type returned by transformation operations that preserve the same elements + * type (e.g. `filter`, `take`). + * + * In practice, this type is often `Repr` itself, excepted in the case + * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. + */ + type C + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ + def apply(coll: Repr): IterableOps[A, Iterable, C] + +} + +object IsIterable extends IsIterableLowPriority { + + // Straightforward case: IterableOps subclasses + implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = CC0[A0] + def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll + } + + // The `BitSet` type can not be unified with the `CC0` parameter of + // the above definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = + new IsIterable[C0] { + type A = Int + type C = C0 + def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll + } + +} + +trait IsIterableLowPriority { + + // Makes `IsSeq` instances visible in `IsIterable` companion + implicit def isSeqLikeIsIterable[Repr](implicit + isSeqLike: IsSeq[Repr] + ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike + + // Makes `IsMap` instances visible in `IsIterable` companion + implicit def isMapLikeIsIterable[Repr](implicit + isMapLike: IsMap[Repr] + ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike + +} diff --git a/src/library/scala/collection/generic/IsIterableOnce.scala b/src/library/scala/collection/generic/IsIterableOnce.scala new file mode 100644 index 000000000000..82f0ec8b7332 --- /dev/null +++ b/src/library/scala/collection/generic/IsIterableOnce.scala @@ -0,0 +1,71 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `IterableOnce[A]`. + * + * This type enables simple enrichment of `IterableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { + * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { + * val b = bf.newBuilder(coll) + * for(e <- it(coll).iterator) f(e) foreach (b +=) + * b.result() + * } + * } + * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = + * new FilterMapImpl(coll, it) + * + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + */ +trait IsIterableOnce[Repr] { + + /** The type of elements we can traverse over (e.g. `Int`). */ + type A + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + val conversion: Repr => IterableOnce[A] = apply(_) + + /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ + def apply(coll: Repr): IterableOnce[A] + +} + +object IsIterableOnce extends IsIterableOnceLowPriority { + + // Straightforward case: IterableOnce subclasses + implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = + new IsIterableOnce[CC0[A0]] { + type A = A0 + def apply(coll: CC0[A0]): IterableOnce[A0] = coll + } + +} + +trait IsIterableOnceLowPriority { + + // Makes `IsIterable` instance visible in `IsIterableOnce` companion + implicit def isIterableLikeIsIterableOnce[Repr](implicit + isIterableLike: IsIterable[Repr] + ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike + +} diff --git a/src/library/scala/collection/generic/IsMap.scala b/src/library/scala/collection/generic/IsMap.scala new file mode 100644 index 000000000000..6178b2f2b7ca --- /dev/null +++ b/src/library/scala/collection/generic/IsMap.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import IsMap.Tupled +import scala.collection.immutable.{IntMap, LongMap} + +/** + * Type class witnessing that a collection type `Repr` + * has keys of type `K`, values of type `V` and has a conversion to + * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. + * + * This type enables simple enrichment of `Map`s with extension methods. + * + * @see [[scala.collection.generic.IsIterable]] + * @tparam Repr Collection type (e.g. `Map[Int, String]`) + */ +trait IsMap[Repr] extends IsIterable[Repr] { + + /** The type of keys */ + type K + + /** The type of values */ + type V + + type A = (K, V) + + /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` + * + * @note The third type parameter of the returned `MapOps` value is + * still `Iterable` (and not `Map`) because `MapView[K, V]` only + * extends `MapOps[K, V, View, View[A]]`. + */ + override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] + +} + +object IsMap { + + /** Convenient type level function that takes a unary type constructor `F[_]` + * and returns a binary type constructor that tuples its parameters and passes + * them to `F`. + * + * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. + */ + type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } + + // Map collections + implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = CC0[K0, V0] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c + } + + // MapView + implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = View[(K, V)] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c + } + + // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition + @deprecated("AnyRefMap is deprecated", "2.13.16") + implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = + new IsMap[mutable.AnyRefMap[K0, V0]] { + type K = K0 + type V = V0 + type C = mutable.AnyRefMap[K0, V0] + def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c + } + + // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters + implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = + new IsMap[IntMap[V0]] { + type K = Int + type V = V0 + type C = IntMap[V0] + def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c + } + + // LongMap is in a similar situation as IntMap + implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = + new IsMap[LongMap[V0]] { + type K = Long + type V = V0 + type C = LongMap[V0] + def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c + } + + // mutable.LongMap is in a similar situation as LongMap and IntMap + implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = + new IsMap[mutable.LongMap[V0]] { + type K = Long + type V = V0 + type C = mutable.LongMap[V0] + def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c + } + + +} diff --git a/src/library/scala/collection/generic/IsSeq.scala b/src/library/scala/collection/generic/IsSeq.scala new file mode 100644 index 000000000000..73d0cc9762d6 --- /dev/null +++ b/src/library/scala/collection/generic/IsSeq.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import scala.reflect.ClassTag + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for + * some types `A` and `C`. + * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * @see [[scala.collection.generic.IsIterable]] + */ +trait IsSeq[Repr] extends IsIterable[Repr] { + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` + * + * @note The second type parameter of the returned `SeqOps` value is + * still `Iterable` (and not `Seq`) because `SeqView[A]` only + * extends `SeqOps[A, View, View[A]]`. + */ + def apply(coll: Repr): SeqOps[A, Iterable, C] +} + +object IsSeq { + + private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = + new IsSeq[Seq[Any]] { + type A = Any + type C = Any + def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll + } + + implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = + seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] + + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsSeq[CC0[A0]] { + type A = A0 + type C = View[A] + def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + } + + implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = + new IsSeq[String] { + type A = Char + type C = String + def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = + new SeqOps[Char, immutable.ArraySeq, String] { + def length: Int = s.length + def apply(i: Int): Char = s.charAt(i) + def toIterable: Iterable[Char] = new immutable.WrappedString(s) + protected[this] def coll: String = s + protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString + def iterableFactory: IterableFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + override def empty: String = "" + protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder + def iterator: Iterator[Char] = s.iterator + } + } + + implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = + new IsSeq[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll + } + + implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + new IsSeq[Array[A0]] { + type A = A0 + type C = Array[A0] + def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = + new SeqOps[A, mutable.ArraySeq, Array[A]] { + def apply(i: Int): A = a(i) + def length: Int = a.length + def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + protected def coll: Array[A] = a + protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) + def iterableFactory: IterableFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + override def empty: Array[A] = Array.empty[A] + protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder + def iterator: Iterator[A] = a.iterator + } + } + + // `Range` can not be unified with the `CC0` parameter of the + // `seqOpsIsSeq` definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = + new IsSeq[C0] { + type A = Int + type C = immutable.IndexedSeq[Int] + def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll + } + +} diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala deleted file mode 100644 index 917e15e29d3a..000000000000 --- a/src/library/scala/collection/generic/IsSeqLike.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `SeqLike[A, Repr]`. - * - * This type enables simple enrichment of `Seq`s with extension methods which - * can make full use of the mechanics of the Scala collections framework in - * their implementation. - * - * Example usage: - * {{{ - * class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) { - * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = - * r.flatMap(f(_)) - * } - * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] = - * new FilterMapImpl(fr.conversion(r)) - * - * val l = List(1, 2, 3, 4, 5) - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - * - * @see [[scala.collection.Seq]] - * @see [[scala.collection.generic.IsTraversableLike]] - */ -trait IsSeqLike[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */ - val conversion: Repr => SeqLike[A, Repr] -} - -object IsSeqLike { - import scala.language.higherKinds - - implicit val stringRepr: IsSeqLike[String] { type A = Char } = - new IsSeqLike[String] { - type A = Char - val conversion = implicitly[String => SeqLike[Char, String]] - } - - implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } = - new IsSeqLike[C[A0]] { - type A = A0 - val conversion = conv - } -} diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala deleted file mode 100644 index 3a50bb3582ac..000000000000 --- a/src/library/scala/collection/generic/IsTraversableLike.scala +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** A trait which can be used to avoid code duplication when defining extension - * methods that should be applicable both to existing Scala collections (i.e., - * types extending `GenTraversableLike`) as well as other (potentially user-defined) - * types that could be converted to a Scala collection type. This trait - * makes it possible to treat Scala collections and types that can be implicitly - * converted to a collection type uniformly. For example, one can provide - * extension methods that work both on collection types and on `String`s (`String`s - * do not extend `GenTraversableLike`, but can be converted to `GenTraversableLike`) - * - * `IsTraversable` provides two members: - * - * 1. type member `A`, which represents the element type of the target `GenTraversableLike[A, Repr]` - * 1. value member `conversion`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `GenTraversableLike[A, Repr]`. - * - * ===Usage=== - * - * One must provide `IsTraversableLike` as an implicit parameter type of an implicit - * conversion. Its usage is shown below. Our objective in the following example - * is to provide a generic extension method `mapReduce` to any type that extends - * or can be converted to `GenTraversableLike`. In our example, this includes - * `String`. - * - * {{{ - * import scala.collection.GenTraversableLike - * import scala.collection.generic.IsTraversableLike - * - * class ExtensionMethods[A, Repr](coll: GenTraversableLike[A, Repr]) { - * def mapReduce[B](mapper: A => B)(reducer: (B, B) => B): B = { - * val iter = coll.toIterator - * var res = mapper(iter.next()) - * while (iter.hasNext) - * res = reducer(res, mapper(iter.next())) - * res - * } - * } - * - * implicit def withExtensions[Repr](coll: Repr)(implicit traversable: IsTraversableLike[Repr]) = - * new ExtensionMethods(traversable.conversion(coll)) - * - * // See it in action! - * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 - * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 - *}}} - * - * Here, we begin by creating a class `ExtensionMethods` which contains our - * `mapReduce` extension method. Note that `ExtensionMethods` takes a constructor - * argument `coll` of type `GenTraversableLike[A, Repr]`, where `A` represents the - * element type and `Repr` represents (typically) the collection type. The - * implementation of `mapReduce` itself is straightforward. - * - * The interesting bit is the implicit conversion `withExtensions`, which - * returns an instance of `ExtensionMethods`. This implicit conversion can - * only be applied if there is an implicit value `traversable` of type - * `IsTraversableLike[Repr]` in scope. Since `IsTraversableLike` provides - * value member `conversion`, which gives us a way to convert between whatever - * type we wish to add an extension method to (in this case, `Repr`) and - * `GenTraversableLike[A, Repr]`, we can now convert `coll` from type `Repr` - * to `GenTraversableLike[A, Repr]`. This allows us to create an instance of - * the `ExtensionMethods` class, which we pass our new - * `GenTraversableLike[A, Repr]` to. - * - * When the `mapReduce` method is called on some type of which it is not - * a member, implicit search is triggered. Because implicit conversion - * `withExtensions` is generic, it will be applied as long as an implicit - * value of type `IsTraversableLike[Repr]` can be found. Given that - * `IsTraversableLike` contains implicit members that return values of type - * `IsTraversableLike`, this requirement is typically satisfied, and the chain - * of interactions described in the previous paragraph is set into action. - * (See the `IsTraversableLike` companion object, which contains a precise - * specification of the available implicits.) - * - * ''Note'': Currently, it's not possible to combine the implicit conversion and - * the class with the extension methods into an implicit class due to - * limitations of type inference. - * - * ===Implementing `IsTraversableLike` for New Types=== - * - * One must simply provide an implicit value of type `IsTraversableLike` - * specific to the new type, or an implicit conversion which returns an - * instance of `IsTraversableLike` specific to the new type. - * - * Below is an example of an implementation of the `IsTraversableLike` trait - * where the `Repr` type is `String`. - * - *{{{ - * implicit val stringRepr: IsTraversableLike[String] { type A = Char } = - * new IsTraversableLike[String] { - * type A = Char - * val conversion = implicitly[String => GenTraversableLike[Char, String]] - * } - *}}} - * - * @author Miles Sabin - * @author J. Suereth - * @since 2.10 - */ -trait IsTraversableLike[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */ - val conversion: Repr => GenTraversableLike[A, Repr] -} - -object IsTraversableLike { - import scala.language.higherKinds - - implicit val stringRepr: IsTraversableLike[String] { type A = Char } = - new IsTraversableLike[String] { - type A = Char - val conversion = implicitly[String => GenTraversableLike[Char, String]] - } - - implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } = - new IsTraversableLike[C[A0]] { - type A = A0 - val conversion = conv - } -} diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala deleted file mode 100644 index 01c45ceb7798..000000000000 --- a/src/library/scala/collection/generic/IsTraversableOnce.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `GenTraversableOnce[A]`. - * - * This type enables simple enrichment of `GenTraversableOnce`s with extension - * methods which can make full use of the mechanics of the Scala collections - * framework in their implementation. - * - * Example usage, - * {{{ - * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) { - * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = { - * val b = cbf() - * for(e <- r.seq) f(e) foreach (b +=) - * b.result - * } - * } - * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = - * new FilterMapImpl[fr.A, Repr](fr.conversion(r)) - * - * val l = List(1, 2, 3, 4, 5) - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - * - * @author Miles Sabin - * @author J. Suereth - * @since 2.10 - */ -trait IsTraversableOnce[Repr] { - /** The type of elements we can traverse over. */ - type A - /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */ - val conversion: Repr => GenTraversableOnce[A] -} - -object IsTraversableOnce { - import scala.language.higherKinds - - implicit val stringRepr: IsTraversableOnce[String] { type A = Char } = - new IsTraversableOnce[String] { - type A = Char - val conversion = implicitly[String => GenTraversableOnce[Char]] - } - - implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } = - new IsTraversableOnce[C[A0]] { - type A = A0 - val conversion = conv - } -} - diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala deleted file mode 100644 index 7905ff054549..000000000000 --- a/src/library/scala/collection/generic/IterableForwarder.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection._ - -/** This trait implements a forwarder for iterable objects. It forwards - * all calls to a different iterable object, except for - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view` - * - all calls creating a new iterable object of the same kind - * - * The above methods are forwarded by subclass `IterableProxy`. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and methods can be forgotten", "2.11.0") -trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] { - - /** The iterable object to which calls are forwarded */ - protected def underlying: Iterable[A] - - // Iterable delegates - // Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def' - - override def iterator: Iterator[A] = underlying.iterator - override def sameElements[B >: A](that: GenIterable[B]): Boolean = underlying.sameElements(that) -} diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala deleted file mode 100644 index ded046302eb2..000000000000 --- a/src/library/scala/collection/generic/MapFactory.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Map` and subclasses thereof. - * - * @define coll map - * @define Coll Map - * @define factoryInfo - * This object provides a set of operations needed to create `$Coll` values. - * @author Martin Odersky - * @since 2.8 - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * @see CanBuildFrom - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] extends GenMapFactory[CC] { - - def empty[A, B]: CC[A, B] - -} diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala deleted file mode 100644 index e9648f261c0d..000000000000 --- a/src/library/scala/collection/generic/MutableMapFactory.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.Builder -import scala.language.higherKinds - -/** A template for companion objects of `mutable.Map` and subclasses thereof. - * @author Martin Odersky - * @since 2.8 - */ -abstract class MutableMapFactory[CC[A, B] <: mutable.Map[A, B] with mutable.MapLike[A, B, CC[A, B]]] - extends MapFactory[CC] { - - /** The default builder for $Coll objects. - * @tparam A the type of the keys - * @tparam B the type of the associated values - */ - override def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = empty[A, B] -} diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala deleted file mode 100644 index 001b1c387545..000000000000 --- a/src/library/scala/collection/generic/MutableSetFactory.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.{ Builder, GrowingBuilder } -import scala.language.higherKinds - -abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]] - extends SetFactory[CC] { - - def newBuilder[A]: Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty[A]) -} diff --git a/src/library/scala/collection/generic/MutableSortedMapFactory.scala b/src/library/scala/collection/generic/MutableSortedMapFactory.scala deleted file mode 100644 index bd1454d7c136..000000000000 --- a/src/library/scala/collection/generic/MutableSortedMapFactory.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** - * A template for companion objects of `SortedMap` and subclasses thereof. - * - * @tparam CC the type of the collection. - * - * @author Rui Gonçalves - * @since 2.12 - * @version 2.12 - * - * @define Coll `mutable.SortedMap` - * @define coll mutable sorted map - * @define factoryInfo - * This object provides a set of operations needed to create sorted maps of type `$Coll`. - * @define sortedMapCanBuildFromInfo - * The standard `CanBuildFrom` instance for sorted maps - */ -abstract class MutableSortedMapFactory[CC[A, B] <: mutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] - extends SortedMapFactory[CC] diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala deleted file mode 100644 index ae7fa89fa649..000000000000 --- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.mutable.{ Builder, GrowingBuilder } -import scala.language.higherKinds - -/** - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * - * @author Lucien Pereira - * - */ -abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] { - - /** - * mutable.SetBuilder uses '+' which is not a primitive for anything extending mutable.SetLike, - * this causes serious performance issues since each time 'elems = elems + x' - * is evaluated elems is cloned (which is O(n)). - * - * Fortunately GrowingBuilder comes to rescue. - * - */ - override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty) - -} diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala deleted file mode 100644 index 7ffc3e0529cf..000000000000 --- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]] -extends GenericOrderedCompanion[CC] { - - class GenericCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[CC[_], A, CC[A]] { - def apply(from: CC[_]) = from.genericOrderedBuilder[A] - def apply = newBuilder[A] - } - -} diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala deleted file mode 100644 index 702349388c92..000000000000 --- a/src/library/scala/collection/generic/ParFactory.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.ParIterable -import scala.language.higherKinds - -/** A template class for companion objects of `ParIterable` and subclasses - * thereof. This class extends `TraversableFactory` and provides a set of - * operations to create `$Coll` objects. - * - * @define coll parallel collection - * @define Coll `ParIterable` - * @since 2.8 - */ -abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]] -extends GenTraversableFactory[CC] - with GenericParCompanion[CC] { - - //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C] - - /** A generic implementation of the `CanCombineFrom` trait, which forwards - * all calls to `apply(from)` to the `genericParBuilder` method of the $coll - * `from`, and calls to `apply()` to this factory. - */ - class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner - override def apply() = newBuilder[A] - } -} diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala deleted file mode 100644 index d7b5368cd4bb..000000000000 --- a/src/library/scala/collection/generic/ParMapFactory.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.ParMap -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.mutable.Builder -import scala.language.higherKinds - -/** A template class for companion objects of `ParMap` and subclasses thereof. - * This class extends `TraversableFactory` and provides a set of operations - * to create `$Coll` objects. - * - * @define coll parallel map - * @define Coll `ParMap` - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]] -extends GenMapFactory[CC] - with GenericParMapCompanion[CC] { - - type MapColl = CC[_, _] - - /** The default builder for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V] - - /** The default combiner for $Coll objects. - * @tparam K the type of the keys - * @tparam V the type of the associated values - */ - def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] - - class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] { - def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]] - def apply() = newCombiner[K, V] - } - -} - diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala deleted file mode 100644 index b23a132bb4a8..000000000000 --- a/src/library/scala/collection/generic/ParSetFactory.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection.parallel.Combiner -import scala.collection.parallel.ParSet -import scala.collection.parallel.ParSetLike -import scala.language.higherKinds - -/** - * @author Aleksandar Prokopec - * @since 2.8 - */ -abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]] - extends GenSetFactory[CC] - with GenericParCompanion[CC] -{ - def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] - - def newCombiner[A]: Combiner[A, CC[A]] - - class GenericCanCombineFrom[A] extends CanCombineFrom[CC[_], A, CC[A]] { - override def apply(from: Coll) = from.genericCombiner[A] - override def apply() = newCombiner[A] - } -} - diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala deleted file mode 100644 index 918d2308823b..000000000000 --- a/src/library/scala/collection/generic/SeqFactory.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import scala.language.higherKinds - -/** A template for companion objects of Seq and subclasses thereof. - * - * @since 2.8 - */ -abstract class SeqFactory[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC]] -extends GenSeqFactory[CC] with TraversableFactory[CC] { - - /** This method is called in a pattern match { case Seq(...) => }. - * - * @param x the selector value - * @return sequence wrapped in an option, if this is a Seq, otherwise none - */ - def unapplySeq[A](x: CC[A]): Some[CC[A]] = Some(x) - -} - diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala deleted file mode 100644 index d1511e2fb9f8..000000000000 --- a/src/library/scala/collection/generic/SeqForwarder.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import scala.collection._ -import scala.collection.immutable.Range - -/** This class implements a forwarder for sequences. It forwards - * all calls to a different sequence object except for - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view`, `toSeq` - * - all calls creating a new sequence of the same kind - * - * The above methods are forwarded by subclass `SeqProxy`. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") -trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] { - - protected override def underlying: Seq[A] - - override def length: Int = underlying.length - override def apply(idx: Int): A = underlying.apply(idx) - override def lengthCompare(len: Int): Int = underlying lengthCompare len - override def isDefinedAt(x: Int): Boolean = underlying isDefinedAt x - override def segmentLength(p: A => Boolean, from: Int): Int = underlying.segmentLength(p, from) - override def prefixLength(p: A => Boolean) = underlying prefixLength p - override def indexWhere(p: A => Boolean): Int = underlying indexWhere p - override def indexWhere(p: A => Boolean, from: Int): Int = underlying.indexWhere(p, from) - override def indexOf[B >: A](elem: B): Int = underlying indexOf elem - override def indexOf[B >: A](elem: B, from: Int): Int = underlying.indexOf(elem, from) - override def lastIndexOf[B >: A](elem: B): Int = underlying lastIndexOf elem - override def lastIndexOf[B >: A](elem: B, end: Int): Int = underlying.lastIndexOf(elem, end) - override def lastIndexWhere(p: A => Boolean): Int = underlying lastIndexWhere p - override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end) - override def reverseIterator: Iterator[A] = underlying.reverseIterator - override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = underlying.startsWith(that, offset) - override def startsWith[B](that: GenSeq[B]): Boolean = underlying startsWith that - override def endsWith[B](that: GenSeq[B]): Boolean = underlying endsWith that - override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying indexOfSlice that - override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = underlying.indexOfSlice(that, from) - override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that - override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end) - override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that - override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem - override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p) - override def indices: Range = underlying.indices -} diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala deleted file mode 100644 index 8b21cf1de3bd..000000000000 --- a/src/library/scala/collection/generic/SetFactory.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]] - extends GenSetFactory[CC] with GenericSeqCompanion[CC] diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala deleted file mode 100644 index c9083a47c91b..000000000000 --- a/src/library/scala/collection/generic/Shrinkable.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** This trait forms part of collections that can be reduced - * using a `-=` operator. - * - * @author Martin Odersky - * @since 2.8 - * @define coll shrinkable collection - * @define Coll `Shrinkable` - */ -trait Shrinkable[-A] { - - /** Removes a single element from this $coll. - * - * @param elem the element to remove. - * @return the $coll itself - */ - def -=(elem: A): this.type - - /** Removes two or more elements from this $coll. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return the $coll itself - */ - def -=(elem1: A, elem2: A, elems: A*): this.type = { - this -= elem1 - this -= elem2 - this --= elems - } - - /** Removes all elements produced by an iterator from this $coll. - * - * @param xs the iterator producing the elements to remove. - * @return the $coll itself - */ - def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this } -} diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala deleted file mode 100644 index adda134d2a2c..000000000000 --- a/src/library/scala/collection/generic/Signalling.scala +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import java.util.concurrent.atomic.AtomicInteger - -/** - * A message interface serves as a unique interface to the - * part of the collection capable of receiving messages from - * a different task. - * - * One example of use of this is the `find` method, which can use the - * signalling interface to inform worker threads that an element has - * been found and no further search is necessary. - * - * @author prokopec - * - * @define abortflag - * Abort flag being true means that a worker can abort and produce whatever result, - * since its result will not affect the final result of computation. An example - * of operations using this are `find`, `forall` and `exists` methods. - * - * @define indexflag - * The index flag holds an integer which carries some operation-specific meaning. For - * instance, `takeWhile` operation sets the index flag to the position of the element - * where the predicate fails. Other workers may check this index against the indices - * they are working on and return if this index is smaller than their index. Examples - * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`. - */ -trait Signalling { - /** - * Checks whether an abort signal has been issued. - * - * $abortflag - * @return the state of the abort - */ - def isAborted: Boolean - - /** - * Sends an abort signal to other workers. - * - * $abortflag - */ - def abort(): Unit - - /** - * Returns the value of the index flag. - * - * $indexflag - * @return the value of the index flag - */ - def indexFlag: Int - - /** - * Sets the value of the index flag. - * - * $indexflag - * @param f the value to which the index flag is set. - */ - def setIndexFlag(f: Int) - - /** - * Sets the value of the index flag if argument is greater than current value. - * This method does this atomically. - * - * $indexflag - * @param f the value to which the index flag is set - */ - def setIndexFlagIfGreater(f: Int) - - /** - * Sets the value of the index flag if argument is lesser than current value. - * This method does this atomically. - * - * $indexflag - * @param f the value to which the index flag is set - */ - def setIndexFlagIfLesser(f: Int) - - /** - * A read only tag specific to the signalling object. It is used to give - * specific workers information on the part of the collection being operated on. - */ - def tag: Int -} - -/** - * This signalling implementation returns default values and ignores received signals. - */ -class DefaultSignalling extends Signalling with VolatileAbort { - def indexFlag = -1 - def setIndexFlag(f: Int) {} - def setIndexFlagIfGreater(f: Int) {} - def setIndexFlagIfLesser(f: Int) {} - - def tag = -1 -} - -/** - * An object that returns default values and ignores received signals. - */ -object IdleSignalling extends DefaultSignalling - -/** - * A mixin trait that implements abort flag behaviour using volatile variables. - */ -trait VolatileAbort extends Signalling { - @volatile private var abortflag = false - override def isAborted = abortflag - override def abort() = abortflag = true -} - -/** - * A mixin trait that implements index flag behaviour using atomic integers. - * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` - * and `setIndexIfLesser` are lock-free and support only monotonic changes. - */ -trait AtomicIndexFlag extends Signalling { - private val intflag: AtomicInteger = new AtomicInteger(-1) - abstract override def indexFlag = intflag.get - abstract override def setIndexFlag(f: Int) = intflag.set(f) - abstract override def setIndexFlagIfGreater(f: Int) = { - var loop = true - do { - val old = intflag.get - if (f <= old) loop = false - else if (intflag.compareAndSet(old, f)) loop = false - } while (loop) - } - abstract override def setIndexFlagIfLesser(f: Int) = { - var loop = true - do { - val old = intflag.get - if (f >= old) loop = false - else if (intflag.compareAndSet(old, f)) loop = false - } while (loop) - } -} - -/** - * An implementation of the signalling interface using delegates. - */ -trait DelegatedSignalling extends Signalling { - /** - * A delegate that method calls are redirected to. - */ - var signalDelegate: Signalling - - def isAborted = signalDelegate.isAborted - def abort() = signalDelegate.abort() - - def indexFlag = signalDelegate.indexFlag - def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f) - def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f) - def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f) - - def tag = signalDelegate.tag -} - -/** - * Class implementing delegated signalling. - */ -class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling - -/** - * Class implementing delegated signalling, but having its own distinct `tag`. - */ -class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala deleted file mode 100644 index 43be8cb83ba5..000000000000 --- a/src/library/scala/collection/generic/Sizing.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** A trait for objects which have a size. - */ -trait Sizing { - def size: Int -} diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala deleted file mode 100644 index 2dd6409b540b..000000000000 --- a/src/library/scala/collection/generic/SliceInterval.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** A container for the endpoints of a collection slice. - * The constructor is private to enforce the invariants: - * from >= 0, until >= 0, from <= until. - */ -private[collection] class SliceInterval private (val from: Int, val until: Int) { - // The width of this slice from end to end. This is the - // maximum size of the collection slice, but the collection - // need not have this many (or any) elements. Since - // from <= until is a constructor invariant, we don't have to - // check for negative values. - def width = until - from - - /** Returns a new SliceInterval with endpoints calculated in - * terms of the original collection. - * Example: - * {{{ - * val coll = (1 to 100).view.slice(10, 30).slice(1, 3) - * // the second call to slice causes the interval to - * // be recalculated: the result is SliceInterval(11, 13). - * }}} - */ - def recalculate(_from: Int, _until: Int): SliceInterval = { - val lo = _from max 0 - val elems = scala.math.min(_until - lo, width) - val start = from + lo - - if (elems <= 0) new SliceInterval(from, from) - else new SliceInterval(start, start + elems) - } - def recalculate(interval: SliceInterval): SliceInterval = - recalculate(interval.from, interval.until) -} - -object SliceInterval { - def apply(from: Int, until: Int) = { - val lo = from max 0 - val hi = until max 0 - - if (hi <= lo) new SliceInterval(lo, lo) - else new SliceInterval(lo, hi) - } -} diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala deleted file mode 100644 index fb428397a68e..000000000000 --- a/src/library/scala/collection/generic/Sorted.scala +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -/** Any collection (including maps) whose keys (or elements) are ordered. - * - * @author Sean McDirmid - * @since 2.8 - */ -trait Sorted[K, +This <: Sorted[K, This]] { - def ordering : Ordering[K] - - /** The current collection */ - protected def repr: This - - /** return as a projection the set of keys in this collection */ - def keySet: SortedSet[K] - - /** Returns the first key of the collection. */ - def firstKey: K - - /** Returns the last key of the collection. */ - def lastKey: K - - /** Comparison function that orders keys. */ - def compare(k0: K, k1: K): Int = ordering.compare(k0, k1) - - /** Creates a ranged projection of this collection. Any mutations in the - * ranged projection will update this collection and vice versa. - * - * Note: keys are not guaranteed to be consistent between this collection - * and the projection. This is the case for buffers where indexing is - * relative to the projection. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * `None` if there is no lower bound. - * @param until The upper-bound (exclusive) of the ranged projection. - * `None` if there is no upper bound. - */ - def rangeImpl(from: Option[K], until: Option[K]): This - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - def from(from: K): This = rangeImpl(Some(from), None) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def until(until: K): This = rangeImpl(None, Some(until)) - - /** Creates a ranged projection of this collection with both a lower-bound - * and an upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def range(from: K, until: K): This = rangeImpl(Some(from), Some(until)) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - def to(to: K): This = { - val i = keySet.from(to).iterator - if (i.isEmpty) return repr - val next = i.next() - if (compare(next, to) == 0) - if (i.isEmpty) repr - else until(i.next()) - else - until(next) - } - - /** - * Creates an iterator over all the keys(or elements) contained in this - * collection greater than or equal to `start` - * according to the ordering of this collection. x.keysIteratorFrom(y) - * is equivalent to but often more efficient than - * x.from(y).keysIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def keysIteratorFrom(start: K): Iterator[K] - - protected def hasAll(j: Iterator[K]): Boolean = { - val i = keySet.iterator - if (i.isEmpty) return j.isEmpty - - var in = i.next() - while (j.hasNext) { - val jn = j.next() - while ({ - val n = compare(jn, in) - if (n == 0) false - else if (n < 0) return false - else if (!i.hasNext) return false - else true - }) in = i.next() - } - true - } -} diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala deleted file mode 100644 index 1a58d879e728..000000000000 --- a/src/library/scala/collection/generic/SortedMapFactory.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.{Builder, MapBuilder} -import scala.language.higherKinds - -/** A template for companion objects of mutable.Map and subclasses thereof. - * - * @since 2.8 - */ -abstract class SortedMapFactory[CC[A, B] <: SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] { - - type Coll = CC[_, _] - - def empty[A, B](implicit ord: Ordering[A]): CC[A, B] - - def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = { - if (elems.isEmpty) empty - else (newBuilder[A, B](ord) ++= elems).result() - } - - def newBuilder[A, B](implicit ord: Ordering[A]): Builder[(A, B), CC[A, B]] = - new MapBuilder[A, B, CC[A, B]](empty(ord)) - - class SortedMapCanBuildFrom[A, B](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, (A, B), CC[A, B]] { - private[collection] def factory = SortedMapFactory.this - private[collection] def ordering = ord - def apply(from: Coll) = newBuilder[A, B](ord) - def apply() = newBuilder[A, B] - } -} diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala deleted file mode 100644 index 6081d23eb6eb..000000000000 --- a/src/library/scala/collection/generic/SortedSetFactory.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import mutable.{Builder, SetBuilder} -import scala.language.higherKinds - -/** A template for companion objects of Set and subclasses thereof. - * - * @since 2.8 - */ -abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A]]] { - type Coll = CC[_] - - def empty[A](implicit ord: Ordering[A]): CC[A] - - def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = { - if (elems isEmpty) empty - else (newBuilder[A](ord) ++= elems).result() - } - - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty) - - implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord) - - class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] { - private[collection] def factory = SortedSetFactory.this - private[collection] def ordering = ord - def apply(from: Coll) = newBuilder[A](ord) - def apply() = newBuilder[A](ord) - } -} diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala index 4e62ca8287ce..f8af03581aad 100644 --- a/src/library/scala/collection/generic/Subtractable.scala +++ b/src/library/scala/collection/generic/Subtractable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,61 +14,49 @@ package scala package collection package generic -import scala.collection.immutable.{HashSet, TreeMap, TreeSet} - - /** This trait represents collection-like objects that can be reduced - * using a '+' operator. It defines variants of `-` and `--` - * as convenience methods in terms of single-element removal `-`. - * @tparam A the type of the elements of the $coll. - * @tparam Repr the type of the $coll itself - * @author Martin Odersky - * @since 2.8 - * @define coll collection - * @define Coll Subtractable - */ + * using a '+' operator. It defines variants of `-` and `--` + * as convenience methods in terms of single-element removal `-`. + * + * @tparam A the type of the elements of the $coll. + * @tparam Repr the type of the $coll itself + * @define coll collection + * @define Coll Subtractable + */ +@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => /** The representation object of type `Repr` which contains the collection's elements - */ + */ protected def repr: Repr /** Creates a new $coll from this $coll with an element removed. - * @param elem the element to remove - * @return a new collection that contains all elements of the current $coll - * except one less occurrence of `elem`. - */ + * @param elem the element to remove + * @return a new collection that contains all elements of the current $coll + * except one less occurrence of `elem`. + */ def -(elem: A): Repr /** Creates a new $coll from this $coll with some elements removed. - * - * This method takes two or more elements to be removed. Another overloaded - * variant of this method handles the case where a single element is - * removed. - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the given elements. - */ + * + * This method takes two or more elements to be removed. Another overloaded + * variant of this method handles the case where a single element is + * removed. + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the given elements. + */ def -(elem1: A, elem2: A, elems: A*): Repr = this - elem1 - elem2 -- elems /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param xs the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def --(xs: GenTraversableOnce[A]): Repr = this match { - case hs: HashSet[A] if xs.isInstanceOf[HashSet[A]] => - hs.diff(xs.asInstanceOf[HashSet[A]]).asInstanceOf[Repr] - case ts: TreeMap[A, _] => - ts.removeAllImpl(xs).asInstanceOf[Repr] - case ts: TreeSet[A] => - ts.removeAll(xs).asInstanceOf[Repr] - case _ => - (repr /: xs.seq) (_ - _) - } + * collection. + * + * @param xs the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) } diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala deleted file mode 100644 index 80d05d46692b..000000000000 --- a/src/library/scala/collection/generic/TraversableFactory.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.language.higherKinds - -/** A template for companion objects of `Traversable` and subclasses thereof. - * This class provides a set of operations to create `$Coll` objects. - * It is typically inherited by companion objects of subclasses of `Traversable`. - * - * @since 2.8 - * - * @define coll collection - * @define Coll Traversable - * @define factoryInfo - * This object provides a set of operations to create `$Coll` values. - * @author Martin Odersky - * @define canBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * @see CanBuildFrom - * @define genericCanBuildFromInfo - * The standard `CanBuildFrom` instance for $Coll objects. - * The created value is an instance of class `GenericCanBuildFrom`, - * which forwards calls to create a new builder to the - * `genericBuilder` method of the requesting collection. - * @see CanBuildFrom - * @see GenericCanBuildFrom - */ -trait TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]] - extends GenTraversableFactory[CC] with GenericSeqCompanion[CC] - diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala deleted file mode 100644 index 311406a45100..000000000000 --- a/src/library/scala/collection/generic/TraversableForwarder.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic - -import scala.collection._ -import mutable.{ Buffer, StringBuilder } -import immutable.{ List, Stream } -import scala.reflect.ClassTag - -/** This trait implements a forwarder for traversable objects. It forwards - * all calls to a different traversable, except for: - * - * - `toString`, `hashCode`, `equals`, `stringPrefix` - * - `newBuilder`, `view` - * - * All calls creating a new traversable of the same kind. - * - * @author Martin Odersky - * @since 2.8 - */ -@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") -trait TraversableForwarder[+A] extends Traversable[A] { - /** The traversable object to which calls are forwarded. */ - protected def underlying: Traversable[A] - - override def foreach[U](f: A => U): Unit = underlying foreach f - override def isEmpty: Boolean = underlying.isEmpty - override def nonEmpty: Boolean = underlying.nonEmpty - override def size: Int = underlying.size - override def hasDefiniteSize = underlying.hasDefiniteSize - override def forall(p: A => Boolean): Boolean = underlying forall p - override def exists(p: A => Boolean): Boolean = underlying exists p - override def count(p: A => Boolean): Int = underlying count p - override def find(p: A => Boolean): Option[A] = underlying find p - override def foldLeft[B](z: B)(op: (B, A) => B): B = underlying.foldLeft(z)(op) - override def /: [B](z: B)(op: (B, A) => B): B = underlying./:(z)(op) - override def foldRight[B](z: B)(op: (A, B) => B): B = underlying.foldRight(z)(op) - override def :\ [B](z: B)(op: (A, B) => B): B = underlying.:\(z)(op) - override def reduceLeft[B >: A](op: (B, A) => B): B = underlying.reduceLeft(op) - override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = underlying.reduceLeftOption(op) - override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op) - override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = underlying.reduceRightOption(op) - override def sum[B >: A](implicit num: Numeric[B]): B = underlying sum num - override def product[B >: A](implicit num: Numeric[B]): B = underlying product num - override def min[B >: A](implicit cmp: Ordering[B]): A = underlying min cmp - override def max[B >: A](implicit cmp: Ordering[B]): A = underlying max cmp - override def head: A = underlying.head - override def headOption: Option[A] = underlying.headOption - override def last: A = underlying.last - override def lastOption: Option[A] = underlying.lastOption - override def copyToBuffer[B >: A](dest: Buffer[B]) = underlying.copyToBuffer(dest) - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len) - override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start) - override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs) - override def toArray[B >: A: ClassTag]: Array[B] = underlying.toArray - override def toList: List[A] = underlying.toList - override def toIterable: Iterable[A] = underlying.toIterable - override def toSeq: Seq[A] = underlying.toSeq - override def toIndexedSeq = underlying.toIndexedSeq - override def toBuffer[B >: A] = underlying.toBuffer - override def toStream: Stream[A] = underlying.toStream - override def toSet[B >: A]: immutable.Set[B] = underlying.toSet - override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = underlying.toMap(ev) - override def mkString(start: String, sep: String, end: String): String = underlying.mkString(start, sep, end) - override def mkString(sep: String): String = underlying.mkString(sep) - override def mkString: String = underlying.mkString - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = underlying.addString(b, start, sep, end) - override def addString(b: StringBuilder, sep: String): StringBuilder = underlying.addString(b, sep) - override def addString(b: StringBuilder): StringBuilder = underlying.addString(b) -} diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala index 0625db6fed04..5aaf90547384 100644 --- a/src/library/scala/collection/generic/package.scala +++ b/src/library/scala/collection/generic/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,20 +10,25 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection -import scala.language.higherKinds package object generic { - type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To] + @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") + type Clearable = scala.collection.mutable.Clearable - @deprecated("use ClassTagTraversableFactory instead", "2.10.0") - type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC] + @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") + type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] - @deprecated("use GenericClassTagCompanion instead", "2.10.0") - type GenericClassManifestCompanion[+CC[X] <: Traversable[X]] = GenericClassTagCompanion[CC] + @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") + type Growable[-A] = scala.collection.mutable.Growable[A] - @deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0") - type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC] + @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") + type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] + + @deprecated("Use IsIterable instead", "2.13.0") + type IsTraversableLike[Repr] = IsIterable[Repr] + + @deprecated("Use IsIterableOnce instead", "2.13.0") + type IsTraversableOnce[Repr] = IsIterableOnce[Repr] } diff --git a/src/library/scala/collection/immutable/ArraySeq.scala b/src/library/scala/collection/immutable/ArraySeq.scala new file mode 100644 index 000000000000..eafe9baa719f --- /dev/null +++ b/src/library/scala/collection/immutable/ArraySeq.scala @@ -0,0 +1,694 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime +import scala.util.Sorting +import scala.util.hashing.MurmurHash3 + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define coll immutable array + * @define Coll `ArraySeq` + */ +sealed abstract class ArraySeq[+A] + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] + with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] + with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] + with Serializable { + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + protected def elemTag: ClassTag[_] + + override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged + + /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break + * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. + * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an + * array of a supertype or subtype of the element type. */ + def unsafeArray: Array[_] + + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq + protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): A + + override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { + val dest = new Array[Any](length) + Array.copy(unsafeArray, 0, dest, 0, length) + dest(index) = elem + ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] + } + + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + + override def prepended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] + + override def appended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + + /** Fast concatenation of two [[ArraySeq]]s. + * + * @return null if optimisation not possible. + */ + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + // Optimise concatenation of two ArraySeqs + // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast + if (isEmpty) + that + else if (that.isEmpty) + this + else { + val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] + val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] + val mismatch = thisIsObj != thatIsObj + if (mismatch) + // Combining primatives and objects: abort + null + else if (thisIsObj) { + // A and B are objects + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[B]] + val len = ax.length + ay.length + val a = new Array[AnyRef](len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } else { + // A is a primative and B = A. Use this instance's protected ClassTag. + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[A]] + val len = ax.length + ay.length + val a = iterableEvidence.newArray(len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + } + } + + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = suffix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(unsafeArray) + b.addAll(suffix) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + suffix match { + case that: ArraySeq[_] => + val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = prefix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + unsafeArray.length) + b.addAll(unsafeArray) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + prefix match { + case that: ArraySeq[_] => + val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def zip[B](that: collection.IterableOnce[B]): ArraySeq[(A, B)] = + that match { + case bs: ArraySeq[B] => + ArraySeq.tabulate(length min bs.length) { i => + (apply(i), bs(i)) + } + case _ => + strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) + } + + override def take(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).take(n)).asInstanceOf[ArraySeq[A]] + + override def takeRight(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + + override def drop(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).drop(n)).asInstanceOf[ArraySeq[A]] + + override def dropRight(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + + override def slice(from: Int, until: Int): ArraySeq[A] = + if (from <= 0 && unsafeArray.length <= until) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + + override def foldLeft[B](z: B)(f: (B, A) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = 0 + while (i < array.length) { + val a = array(i).asInstanceOf[A] + b = f(b, a) + i += 1 + } + b + } + + override def foldRight[B](z: B)(f: (A, B) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = array.length + while (i > 0) { + i -= 1 + val a = array(i).asInstanceOf[A] + b = f(a, b) + } + b + } + + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).tail).asInstanceOf[ArraySeq[A]] + + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).reverse).asInstanceOf[ArraySeq[A]] + + override protected[this] def className = "ArraySeq" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(unsafeArray, 0, xs, start, copied) + } + copied + } + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = + if(unsafeArray.length <= 1) this + else { + val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] + } +} + +/** + * $factoryInfo + * @define coll immutable array + * @define Coll `ArraySeq` + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) + + def empty[A : ClassTag]: ArraySeq[A] = emptyImpl + + def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + case as: ArraySeq[A] => as + case _ => unsafeWrapArray(Array.from[A](it)) + } + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = + ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + + override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { + val elements = Array.ofDim[A](scala.math.max(n, 0)) + var i = 0 + while (i < n) { + ScalaRunTime.array_update(elements, i, f(i)) + i = i + 1 + } + ArraySeq.unsafeWrapArray(elements) + } + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. Any changes to wrapped array will break the expected immutability. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { + def elemTag: ClassTag[T] = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): T = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any): Boolean = that match { + case that: ofRef[_] => + Array.equals( + this.unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { + if(unsafeArray.length <= 1) this + else { + val a = unsafeArray.clone() + Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) + new ArraySeq.ofRef(a) + } + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) + else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { + // Type erases to `ManifestFactory.ByteManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Byte.type = ClassTag.Byte + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Byte = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = + if(length <= 1) this + else if(ord eq Ordering.Byte) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofByte(a) + } else super.sorted[B] + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { + // Type erases to `ManifestFactory.ShortManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Short.type = ClassTag.Short + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Short = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = + if(length <= 1) this + else if(ord eq Ordering.Short) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofShort(a) + } else super.sorted[B] + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { + // Type erases to `ManifestFactory.CharManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Char.type = ClassTag.Char + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Char = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = + if(length <= 1) this + else if(ord eq Ordering.Char) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofChar(a) + } else super.sorted[B] + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { + // Type erases to `ManifestFactory.IntManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Int.type = ClassTag.Int + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Int = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = + if(length <= 1) this + else if(ord eq Ordering.Int) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofInt(a) + } else super.sorted[B] + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { + // Type erases to `ManifestFactory.LongManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Long.type = ClassTag.Long + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Long = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = + if(length <= 1) this + else if(ord eq Ordering.Long) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofLong(a) + } else super.sorted[B] + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { + // Type erases to `ManifestFactory.FloatManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Float.type = ClassTag.Float + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Float = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { + // Type erases to `ManifestFactory.DoubleManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Double.type = ClassTag.Double + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Double = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { + // Type erases to `ManifestFactory.BooleanManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Boolean.type = ClassTag.Boolean + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Boolean = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = + if(length <= 1) this + else if(ord eq Ordering.Boolean) { + val a = unsafeArray.clone() + Sorting.stableSort(a) + new ArraySeq.ofBoolean(a) + } else super.sorted[B] + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { + // Type erases to `ManifestFactory.UnitManifest`, but can't annotate that because it's not accessible + protected def elemTag: ClassTag.Unit.type = ClassTag.Unit + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Unit = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala index e3bd0cbfcffd..a9b5837ff566 100644 --- a/src/library/scala/collection/immutable/BitSet.scala +++ b/src/library/scala/collection/immutable/BitSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,35 +14,38 @@ package scala package collection package immutable -import generic._ -import BitSetLike.{LogWL, updateArray} +import BitSetOps.{LogWL, updateArray} import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} /** A class for immutable bitsets. - * $bitsetinfo - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-bitsets"Scala's Collection Library overview"]] - * section on `Immutable BitSets` for more information. - * - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -@SerialVersionUID(1611436763290191562L) -abstract class BitSet extends scala.collection.AbstractSet[Int] - with SortedSet[Int] - with scala.collection.BitSet - with BitSetLike[BitSet] - with Serializable { - override def empty = BitSet.empty + * $bitsetinfo + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] + * section on `Immutable BitSets` for more information. + * + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +sealed abstract class BitSet + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { - protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) + override def unsorted: Set[Int] = this - /** Update word at index `idx`; enlarge set if `idx` outside range of set. - */ - protected def updateWord(idx: Int, w: Long): BitSet + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory: BitSet.type = BitSet + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) - /** Adds element to bitset, returning a new set. - */ - def + (elem: Int): BitSet = { + def incl(elem: Int): BitSet = { require(elem >= 0, "bitset element must be >= 0") if (contains(elem)) this else { @@ -51,37 +54,58 @@ abstract class BitSet extends scala.collection.AbstractSet[Int] } } - /** Removes element from bitset, returning a new set - */ - def - (elem: Int): BitSet = { + def excl(elem: Int): BitSet = { require(elem >= 0, "bitset element must be >= 0") if (contains(elem)) { val idx = elem >> LogWL updateWord(idx, word(idx) & ~(1L << elem)) } else this } + + /** Update word at index `idx`; enlarge set if `idx` outside range of set. + */ + protected def updateWord(idx: Int, w: Long): BitSet + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) } -/** $factoryInfo - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -object BitSet extends BitSetFactory[BitSet] { - /** The empty bitset */ - val empty: BitSet = new BitSet1(0L) +/** + * $factoryInfo + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { - private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + it match { + case bs: BitSet => bs + case _ => (newBuilder ++= it).result() + } - /** A builder that takes advantage of mutable BitSets. */ - def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] { - private[this] val b = new mutable.BitSet - def += (x: Int) = { b += x; this } - def clear() = b.clear() - def result() = b.toImmutable - } + final val empty: BitSet = new BitSet1(0L) + + def newBuilder: Builder[Int, BitSet] = + mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) - /** $bitsetCanBuildFrom */ - implicit val canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom + private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) /** A bitset containing all the bits in an array */ def fromBitMask(elems: Array[Long]): BitSet = { @@ -90,15 +114,14 @@ object BitSet extends BitSetFactory[BitSet] { else if (len == 1) new BitSet1(elems(0)) else if (len == 2) createSmall(elems(0), elems(1)) else { - val a = new Array[Long](len) - Array.copy(elems, 0, a, 0, len) + val a = java.util.Arrays.copyOf(elems, len) new BitSetN(a) } } /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ + * array without copying. + */ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { val len = elems.length if (len == 0) empty @@ -107,64 +130,246 @@ object BitSet extends BitSetFactory[BitSet] { else new BitSetN(elems) } - @SerialVersionUID(2260107458435649300L) + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") class BitSet1(val elems: Long) extends BitSet { - protected def nwords = 1 - protected def word(idx: Int) = if (idx == 0) elems else 0L - protected def updateWord(idx: Int, w: Long): BitSet = + protected[collection] def nwords = 1 + protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = if (idx == 0) new BitSet1(w) else if (idx == 1) createSmall(elems, w) - else fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) - override def head: Int = - if (elems == 0L) throw new NoSuchElementException("Empty BitSet") - else java.lang.Long.numberOfTrailingZeros(elems) - override def tail: BitSet = - if (elems == 0L) throw new NoSuchElementException("Empty BitSet") - else new BitSet1(elems - java.lang.Long.lowestOneBit(elems)) + else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case _ => + val newElems = elems & ~bs.word(0) + if (newElems == 0L) this.empty else new BitSet1(newElems) + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) + if (_elems == 0L) this.empty else new BitSet1(_elems) + } } - @SerialVersionUID(-860417644893387539L) - class BitSet2(val elems0: Long, elems1: Long) extends BitSet { - protected def nwords = 2 - protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L - protected def updateWord(idx: Int, w: Long): BitSet = + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { + protected[collection] def nwords = 2 + protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = if (idx == 0) new BitSet2(w, elems1) else if (idx == 1) createSmall(elems0, w) - else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) - override def head: Int = - if (elems0 == 0L) { - if (elems1 == 0) throw new NoSuchElementException("Empty BitSet") - 64 + java.lang.Long.numberOfTrailingZeros(elems1) + else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case 1 => + new BitSet2(elems0 & ~bs.word(0), elems1) + case _ => + val _elems0 = elems0 & ~bs.word(0) + val _elems1 = elems1 & ~bs.word(1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } else { + new BitSet1(_elems0) + } + } else { + new BitSet2(_elems0, _elems1) + } } - else java.lang.Long.numberOfTrailingZeros(elems0) - override def tail: BitSet = - if (elems0 == 0L) { - if (elems1 == 0L) throw new NoSuchElementException("Empty BitSet") - createSmall(elems0, elems1 - java.lang.Long.lowestOneBit(elems1)) + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) + val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } + else new BitSet1(_elems0) } - else new BitSet2(elems0 - java.lang.Long.lowestOneBit(elems0), elems1) + else new BitSet2(_elems0, _elems1) + } } - /** The implementing class for bit sets with elements >= 128 (exceeding - * the capacity of two long values). The constructor wraps an existing - * bit mask without copying, thus exposing a mutable part of the internal - * implementation. Care needs to be taken not to modify the exposed - * array. - */ - @SerialVersionUID(807040099560956194L) + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") class BitSetN(val elems: Array[Long]) extends BitSet { - protected def nwords = elems.length - protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L - protected def updateWord(idx: Int, w: Long): BitSet = fromBitMaskNoCopy(updateArray(elems, idx, w)) - override def tail: BitSet = { - val n = nwords - var i = 0 - while (i < n) { - val wi = word(i) - if (wi != 0L) return fromBitMaskNoCopy(updateArray(elems, i, wi - java.lang.Long.lowestOneBit(wi))) - i += 1 + protected[collection] def nwords = elems.length + + protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L + + protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. Two extra concerns for optimization are described below. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + * + * Tracking Changes: + * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, + * we check each word for if it has changed from its corresponding word in `this`. Once a single change is + * detected, we stop checking because the cost of the new Array must be paid anyways. + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = oldFirstWord & ~bs.word(0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } else { + var i = bsnwords - 1 + var anyChanges = false + var currentWord = 0L + while (i >= 0 && !anyChanges) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newElems = elems.clone() + newElems(i + 1) = currentWord + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + this.fromBitMaskNoCopy(newElems) + } else { + this + } + } + case _ => super.diff(that) + } + + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = nwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } } - throw new NoSuchElementException("Empty BitSet") } + + override def toBitMask: Array[Long] = elems.clone() + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) } } diff --git a/src/library/scala/collection/immutable/ChampCommon.scala b/src/library/scala/collection/immutable/ChampCommon.scala new file mode 100644 index 000000000000..7b3e3949a126 --- /dev/null +++ b/src/library/scala/collection/immutable/ChampCommon.scala @@ -0,0 +1,252 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.AbstractIterator +import java.lang.Integer.bitCount +import java.lang.Math.ceil +import java.lang.System.arraycopy + +private[collection] object Node { + final val HashCodeLength = 32 + + final val BitPartitionSize = 5 + + final val BitPartitionMask = (1 << BitPartitionSize) - 1 + + final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt + + final val BranchingFactor = 1 << BitPartitionSize + + final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask + + final def bitposFrom(mask: Int): Int = 1 << mask + + final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) + + final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) + +} + +private[collection] abstract class Node[T <: Node[T]] { + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): T + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): Any + + def getHash(index: Int): Int + + def cachedJavaKeySetHashCode: Int + + private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = + new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") + + protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a + * depth-first pre-order traversal, which yields first all payload elements of the current + * node before traversing sub-nodes (left to right). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseIterator[A, T <: Node[T]] extends AbstractIterator[A] { + + import Node.MaxDepth + + // Note--this code is duplicated to a large extent both in + // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. + // If you change this code, check those also in case they also + // need to be modified. + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] var nodeCursorsAndLengths: Array[Int] = _ + private[this] var nodes: Array[T] = _ + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + + def this(rootNode: T) = { + this() + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + + return false + } + + final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() + +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base + * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseReverseIterator[A, T <: Node[T]] extends AbstractIterator[A] { + + import Node.MaxDepth + + protected var currentValueCursor: Int = -1 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) + private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] + + def this(rootNode: T) = { + this() + pushNode(rootNode) + searchNextValueNode() + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = node.payloadArity - 1 + } + + private final def pushNode(node: T): Unit = { + currentStackLevel = currentStackLevel + 1 + + nodeStack(currentStackLevel) = node + nodeIndex(currentStackLevel) = node.nodeArity - 1 + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for rightmost node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 + + if (nodeCursor >= 0) { + val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) + pushNode(nextNode) + } else { + val currNode = nodeStack(currentStackLevel) + popNode() + + if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } + } + } + + return false + } + + final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() + +} diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala deleted file mode 100644 index 65d096e03ff0..000000000000 --- a/src/library/scala/collection/immutable/DefaultMap.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -/** A default map which implements the `+` and `-` - * methods of maps. It does so using the default builder for - * maps defined in the `Map` object. - * Instances that inherit from `DefaultMap[A, B]` still have to - * define: - * - * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * }}} - * - * It refers back to the original map. - * - * It might also be advisable to override `foreach` or - * `size` if efficient implementations can be found. - * - * @tparam A the type of the keys contained in this map. - * @tparam B the type of the values associated with the keys. - * - * @since 2.8 - */ -trait DefaultMap[A, +B] extends Map[A, B] { self => - - /** A default implementation which creates a new immutable map. - */ - override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { - val b = Map.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } - - /** A default implementation which creates a new immutable map. - */ - override def - (key: A): Map[A, B] = { - val b = newBuilder - for (kv <- this ; if kv._1 != key) b += kv - b.result() - } -} diff --git a/src/library/scala/collection/immutable/HasForeachEntry.scala b/src/library/scala/collection/immutable/HasForeachEntry.scala deleted file mode 100644 index 9863620d799c..000000000000 --- a/src/library/scala/collection/immutable/HasForeachEntry.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - -private[immutable] trait HasForeachEntry[A, +B] { - private[immutable] def foreachEntry[U](f: (A, B) => U): Unit -} diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 87253ec6eaf7..e9257f1948fc 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,1372 +11,2412 @@ */ package scala -package collection -package immutable +package collection.immutable -import java.{util => ju} +import java.lang.Integer.bitCount +import java.lang.System.arraycopy -import generic._ import scala.annotation.unchecked.{uncheckedVariance => uV} -import parallel.immutable.ParHashMap -import scala.runtime.{AbstractFunction1, AbstractFunction2} +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable, mutable.ReusableBuilder +import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} +import scala.runtime.AbstractFunction2 +import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 -/** This class implements immutable maps using a hash trie. - * - * '''Note:''' The builder of this hash map may return specialized representations for small maps. - * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values associated with the keys. - * - * @author Martin Odersky - * @author Tiark Rompf - * @since 2.3 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash-tries "Scala's Collection Library overview"]] - * section on `Hash Tries` for more information. - * @define Coll `immutable.HashMap` - * @define coll immutable hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(2L) -sealed class HashMap[A, +B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, HashMap[A, B]] - with Serializable - with CustomParallelizable[(A, B), ParHashMap[A, B]] - with HasForeachEntry[A, B] -{ - import HashMap.{bufferSize, concatMerger, nullToEmpty} +/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam K the type of the keys contained in this hash set. + * @tparam V the type of the values associated with the keys in this hash map. + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ - override def size: Int = 0 +final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) + extends AbstractMap[K, V] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with DefaultSerializable { - override def empty = HashMap.empty[A, B] + def this() = this(MapNode.empty) - def iterator: Iterator[(A,B)] = Iterator.empty + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() - override def foreach[U](f: ((A, B)) => U): Unit = () - private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = () - override def hashCode(): Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - val hasher = new Map.HashCodeAccumulator() - foreachEntry(hasher) - hasher.finalizeHash - } - } + override def mapFactory: MapFactory[HashMap] = HashMap + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 - def get(key: A): Option[B] = - get0(key, computeHash(key), 0) + override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet - override def getOrElse[V1 >: B](key: A, default: => V1): V1 = - getOrElse0(key, computeHash(key), 0, default) + private[immutable] final class HashKeySet extends ImmutableKeySet { - override final def contains(key: A): Boolean = - contains0(key, computeHash(key), 0) + private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = + if (newHashMap eq HashMap.this) this else newHashMap.keySet + private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet - override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = - updated0(key, computeHash(key), 0, value, null, null) + override def incl(elem: K): Set[K] = { + val originalHash = elem.## + val improvedHash = improve(originalHash) + val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + newKeySetOrThis(newNode) + } + override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) + override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) + override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) + } - override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = - updated0(kv._1, computeHash(kv._1), 0, kv._2, kv, null) + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleIterator[K, V](rootNode) + } - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] = - this + elem1 + elem2 ++ elems + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapKeyIterator[K, V](rootNode) + } + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else new MapValueIterator[K, V](rootNode) + } - def - (key: A): HashMap[A, B] = - removed0(key, computeHash(key), 0) + protected[immutable] def reverseIterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleReverseIterator[K, V](rootNode) + } - override def tail: HashMap[A, B] = this - head._1 + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) + } + s.asInstanceOf[S with EfficientSplit] + } - override def filter(p: ((A, B)) => Boolean) = { - val buffer = new Array[HashMap[A, B]](bufferSize(size)) - nullToEmpty(filter0(p, false, 0, buffer, 0)) + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) + } + s.asInstanceOf[S with EfficientSplit] } - override def filterNot(p: ((A, B)) => Boolean) = { - val buffer = new Array[HashMap[A, B]](bufferSize(size)) - nullToEmpty(filter0(p, true, 0, buffer, 0)) + override final def contains(key: K): Boolean = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) } - protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = null + override def apply(key: K): V = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.apply(key, keyUnimprovedHash, keyHash, 0) + } - protected def elemHashCode(key: A) = key.## + def get(key: K): Option[V] = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.get(key, keyUnimprovedHash, keyHash, 0) + } - protected final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) } - private[collection] def computeHash(key: A) = improve(elemHashCode(key)) + @inline private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode) - import HashMap.{Merger, MergeFunction, liftMerger} + def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) + } - private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None - private[collection] def getOrElse0[V1 >: B](key: A, hash: Int, level: Int, f: => V1): V1 = f - protected def contains0(key: A, hash: Int, level: Int): Boolean = false - private[collection] def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - new HashMap.HashMap1(key, hash, value, kv) + // preemptively overridden in anticipation of performance optimizations + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = + super.updatedWith[V1](key)(remappingFunction) - protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this + def removed(key: K): HashMap[K, V] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) + } - protected def writeReplace(): AnyRef = new HashMap.SerializationProxy(this) + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]): HashMap[K, V1] = that match { + case hm: HashMap[K, V1] => + if (isEmpty) hm + else { + val newNode = rootNode.concat(hm.rootNode, 0) + if (newNode eq hm.rootNode) hm + else newHashMapOrThis(newNode) + } + case hm: mutable.HashMap[K @unchecked, V @unchecked] => + val iter = hm.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => + val iter = lhm.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case _ => + class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { + var changed = false + var shallowlyMutableNodeMap: Int = 0 + var current: BitmapIndexedMapNode[K, V1] = rootNode + def apply(kv: (K, V1)) = apply(kv._1, kv._2) + def apply(key: K, value: V1): Unit = { + val originalHash = key.## + val improved = improve(originalHash) + if (!changed) { + current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + changed = true + shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + } + } else { + shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) + } + } + } + that match { + case thatMap: Map[K, V1] => + if (thatMap.isEmpty) this + else { + val accum = new accum + thatMap.foreachEntry(accum) + newHashMapOrThis(accum.current) + } + case _ => + val it = that.iterator + if (it.isEmpty) this + else { + val accum = new accum + it.foreach(accum) + newHashMapOrThis(accum.current) + } + } + } - def split: Seq[HashMap[A, B]] = Seq(this) + override def tail: HashMap[K, V] = this - head._1 - /** Creates a new map which is the merge of this and the argument hash map. - * - * Uses the specified collision resolution function if two keys are the same. - * The collision resolution function will always take the first argument from - * `this` hash map and the second from `that`. - * - * The `merged` method is on average more performant than doing a traversal and reconstructing a - * new immutable hash map from scratch. - * - * @tparam B1 the value type of the other hash map - * @param that the other hash map - * @param mergef the merge function or null if the first key-value pair is to be picked - */ - def merged[B1 >: B](that: HashMap[A, B1])(mergef: MergeFunction[A, B1]): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef)) + override def init: HashMap[K, V] = this - last._1 - protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that + override def head: (K, V) = iterator.next() - override def par = ParHashMap.fromTrie(this) + override def last: (K, V) = reverseIterator.next() - /* Override to avoid tuple allocation in foreach */ - private[collection] class HashMapKeys extends ImmutableDefaultKeySet { - override def foreach[U](f: A => U) = foreachEntry((key, _) => f(key)) - override lazy val hashCode = super.hashCode() - } - override def keySet: immutable.Set[A] = new HashMapKeys + override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) - /** The implementation class of the iterable returned by `values`. - */ - private[collection] class HashMapValues extends DefaultValuesIterable { - override def foreach[U](f: B => U) = foreachEntry((_, value) => f(value)) - } - override def values: scala.collection.Iterable[B] = new HashMapValues + override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) - override final def transform[W, That](f: (A, B) => W)(implicit bf: CanBuildFrom[HashMap[A, B], (A, W), That]): That = - if ((bf eq Map.canBuildFrom) || (bf eq HashMap.canBuildFrom)) castToThat(transformImpl(f)) - else super.transform(f)(bf) + /** Applies a function to each key, value, and **original** hash value in this Map */ + @inline private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) - /* `transform` specialized to return a HashMap */ - protected def transformImpl[W](f: (A, B) => W): HashMap[A, W] = HashMap.empty + override def equals(that: Any): Boolean = + that match { + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) + case _ => super.equals(that) + } - private def isCompatibleCBF(cbf: CanBuildFrom[_,_,_]): Boolean = { - cbf match { - case w: WrappedCanBuildFrom[_,_,_] => - isCompatibleCBF(w.wrapped) - case _ => - (cbf eq HashMap.canBuildFrom) || (cbf eq Map.canBuildFrom) + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be + // immutable. + val hashIterator = new MapKeyValueTupleHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) + // assert(hash == super.hashCode()) + hash } } - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = ++[(A, B1), Map[A, B1]](xs)(HashMap.canBuildFrom[A, B1]) - - override def ++[C >: (A, B), That](that: GenTraversableOnce[C])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - if (isCompatibleCBF(bf)) { - //here we know that That =:= HashMap[_, _], or compatible with it - if (this eq that.asInstanceOf[AnyRef]) castToThat(that) - else if (that.isEmpty) castToThat(this) - else { - val result: HashMap[A, B] = that match { - case thatHash: HashMap[A, B] => - this.merge0(thatHash, 0, concatMerger[A, B]) - case that => - var result: HashMap[A, B] = this - that.asInstanceOf[GenTraversableOnce[(A, B)]].foreach(result += _) - result + override protected[this] def className = "HashMap" + + /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge + * function to resolve any key collisions between the two HashMaps. + * + * @example {{{ + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(2 -> 2, 3 -> 2) + * + * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } + * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) + * + * }}} + * + * @param that the HashMap to merge this HashMap with + * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then + * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to + * `that.concat(this)` + * + * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or + * found in `this` or `that`, it is not defined which value will be chosen. For example: + * + * Colliding multiple results of merging: + * {{{ + * // key `3` collides between a result of merging keys `1` and `2` + * val left = HashMap(1 -> 1, 2 -> 2) + * val right = HashMap(1 -> 1, 2 -> 2) + * + * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } + * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) + * }}} + * Colliding results of merging with other keys: + * {{{ + * // key `2` collides between a result of merging `1`, and existing key `2` + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(1 -> 2) + * + * val merged = left.merged(right)((_,_) => 2 -> 3) + * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) + * }}} + * + */ + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + if (mergef == null) { + that ++ this + } else { + if (isEmpty) that + else if (that.isEmpty) this + else if (size == 1) { + val payload@(k, v) = rootNode.getPayload(0) + val originalHash = rootNode.getHash(0) + val improved = improve(originalHash) + + if (that.rootNode.containsKey(k, originalHash, improved, 0)) { + val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) + } + } else if (that.size == 0) { + val thatPayload@(k, v) = rootNode.getPayload(0) + val thatOriginalHash = rootNode.getHash(0) + val thatImproved = improve(thatOriginalHash) + + if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { + val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) } - castToThat(result) + } else { + val builder = new HashMapBuilder[K, V1] + rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + builder.result() } - } else super.++(that)(bf) - } + } - override def ++:[C >: (A, B), That](that: TraversableOnce[C])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - if (isCompatibleCBF(bf)) addSimple(that) - else super.++:(that) - } + override def transform[W](f: (K, V) => W): HashMap[K, W] = + newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] - override def ++:[C >: (A, B), That](that: scala.Traversable[C])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - if (isCompatibleCBF(bf)) addSimple(that) - else super.++:(that) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashMap.empty + else new HashMap(newRootNode) } - private def addSimple[C >: (A, B), That](that: TraversableOnce[C])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - //here we know that That =:= HashMap[_, _], or compatible with it - if (this eq that.asInstanceOf[AnyRef]) castToThat(that) - else if (that.isEmpty) castToThat(this) - else { - val merger = HashMap.concatMerger[A, B].invert - val result: HashMap[A, B] = that match { - case thatHash: HashMap[A, B] => - this.merge0(thatHash, 0, merger) - - case that: HasForeachEntry[A, B] => - //avoid the LazyRef as we don't have an @eager object - class adder extends AbstractFunction2[A, B, Unit] { - var result: HashMap[A, B] = HashMap.this - override def apply(key: A, value: B): Unit = { - result = result.updated0(key, computeHash(key), 0, value, null, merger) + + override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + if (isEmpty) { + this + } else { + keys match { + case hashSet: HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree + // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` + val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) + if (newRootNode eq rootNode) this + else if (newRootNode.size <= 0) HashMap.empty + else new HashMap(newRootNode) + } + case hashSet: collection.mutable.HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + val iter = hashSet.nodeIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case lhashSet: collection.mutable.LinkedHashSet[K] => + if (lhashSet.isEmpty) { + this + } else { + val iter = lhashSet.entryIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } } + newHashMapOrThis(curr) } - val adder = new adder - that foreachEntry adder - adder.result - case that => - //avoid the LazyRef as we don't have an @eager object - class adder extends AbstractFunction1[(A,B), Unit] { - var result: HashMap[A, B] = HashMap.this - override def apply(kv: (A, B)): Unit = { - val key = kv._1 - result = result.updated0(key, computeHash(key), 0, kv._2, kv, merger) + case _ => + val iter = keys.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty } } - val adder = new adder - that.asInstanceOf[GenTraversableOnce[(A,B)]] foreach adder - adder.result + newHashMapOrThis(curr) } - castToThat(result) } } - // These methods exist to encapsulate the `.asInstanceOf[That]` in a slightly safer way -- only suitable values can - // be cast and the type of the `CanBuildFrom` guides type inference. - private[this] def castToThat[C, That](m: HashMap[A, B])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - m.asInstanceOf[That] + override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, + // based on the result of applying `p` to its elements and subnodes. + super.partition(p) } - private[this] def castToThat[C, That](m: GenTraversableOnce[C])(implicit bf: CanBuildFrom[HashMap[A, B], C, That]): That = { - m.asInstanceOf[That] - } -} - -/** $factoryInfo - * @define Coll `immutable.HashMap` - * @define coll immutable hash map - * - * @author Tiark Rompf - * @since 2.3 - */ -object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { - override def newBuilder[A, B]: mutable.Builder[(A, B), HashMap[A, B]] = new HashMapBuilder[A, B] + override def take(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including + // those nodes in the resulting trie, until `n` total elements have been included. + super.take(n) + } - private[collection] abstract class Merger[A, B] { - def apply(kv1: (A, B), kv2: (A, B)): (A, B) - def invert: Merger[A, B] - def retainIdentical = false + override def takeRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and + // and including those nodes in the resulting trie, until `n` total elements have been included. + super.takeRight(n) } - private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1) + override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and + // including those nodes in the resulting trie, until `p` returns `false` + super.takeWhile(p) + } - private def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = - if (mergef == null) defaultMerger[A1, B1] else liftMerger0(mergef) + override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and + // dropping those nodes in the resulting trie, until `p` returns `true` + super.dropWhile(p) + } - private def defaultMerger[A, B]: Merger[A, B] = _defaultMerger.asInstanceOf[Merger[A, B]] - private[this] val _defaultMerger : Merger[Any, Any] = new Merger[Any, Any] { - override def apply(a: (Any, Any), b: (Any, Any)): (Any, Any) = a - override def retainIdentical: Boolean = true - override val invert: Merger[Any, Any] = new Merger[Any, Any] { - override def apply(a: (Any, Any), b: (Any, Any)): (Any, Any) = b - override def retainIdentical: Boolean = true - override def invert = defaultMerger - } + override def dropRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse + // order, and dropping all nodes until `n` elements have been dropped + super.dropRight(n) } - private def concatMerger[A, B]: Merger[A, B] = _concatMerger.asInstanceOf[Merger[A, B]] - private[this] val _concatMerger : Merger[Any, Any] = new Merger[Any, Any] { - override def apply(a: (Any, Any), b: (Any, Any)): (Any, Any) = { - if (a._1.asInstanceOf[AnyRef] eq b._1.asInstanceOf[AnyRef]) b - else (a._1, b._2) - } - override def retainIdentical: Boolean = true - override val invert: Merger[Any, Any] = new Merger[Any, Any] { - override def apply(a: (Any, Any), b: (Any, Any)): (Any, Any) = { - if (b._1.asInstanceOf[AnyRef] eq a._1.asInstanceOf[AnyRef]) a - else (b._1, a._2) - } - override def retainIdentical: Boolean = true - override def invert = concatMerger - } + override def drop(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and + // dropping all nodes until `n` elements have been dropped + super.drop(n) } - private[this] def liftMerger0[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = new Merger[A1, B1] { - self => - def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2) - val invert: Merger[A1, B1] = new Merger[A1, B1] { - def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1) - def invert: Merger[A1, B1] = self - } + override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and + // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. + // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality + // checks. + super.span(p) } - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (A, B), HashMap[A, B]]] - private val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] - def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]] +} - private object EmptyHashMap extends HashMap[Any, Nothing] { - override def head: (Any, Nothing) = throw new NoSuchElementException("Empty Map") - override def tail: HashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map") - } +private[immutable] object MapNode { - // utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code) - private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = { - val index0 = (hash0 >>> level) & 0x1f - val index1 = (hash1 >>> level) & 0x1f - if(index0 != index1) { - val bitmap = (1 << index0) | (1 << index1) - val elems = new Array[HashMap[A,B]](2) - if(index0 < index1) { - elems(0) = elem0 - elems(1) = elem1 - } else { - elems(0) = elem1 - elems(1) = elem0 - } - new HashTrieMap[A, B](bitmap, elems, size) - } else { - val elems = new Array[HashMap[A,B]](1) - val bitmap = (1 << index0) - elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size) - new HashTrieMap[A, B](bitmap, elems, size) - } - } + private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) - @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") - @SerialVersionUID(4549809275616486327L) - class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[this] var kvOrNull: (A,B @uV)) extends HashMap[A,B] { - override def size = 1 - - private[collection] def getKey = key - private[collection] def getHash = hash - private[collection] def computeHashFor(k: A) = computeHash(k) - - override def get0(key: A, hash: Int, level: Int): Option[B] = - if (hash == this.hash && key == this.key) Some(value) else None - override private[collection] def getOrElse0[V1 >: B](key: A, hash: Int, level: Int, f: => V1): V1 = - if (hash == this.hash && key == this.key) value else f - - override protected def contains0(key: A, hash: Int, level: Int): Boolean = - hash == this.hash && key == this.key - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - if (hash == this.hash) { - if (key == this.key) { - if (merger eq null) { - if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this - else new HashMap1(this.key, hash, value, if (this.key.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) kv else null) - } else if ( - (key.asInstanceOf[AnyRef] eq this.key.asInstanceOf[AnyRef]) && - (value.asInstanceOf[AnyRef] eq this.value.asInstanceOf[AnyRef]) && - merger.retainIdentical) { - this - } else { - val current = this.ensurePair - val nkv = merger(current, if (kv != null) kv else (key, value)) - if ((current eq nkv) || ( - (this.key.asInstanceOf[AnyRef] eq nkv._1.asInstanceOf[AnyRef]) && - (this.value.asInstanceOf[AnyRef] eq nkv._2.asInstanceOf[AnyRef]))) this - else new HashMap1(nkv._1, hash, nkv._2, nkv) - } - } else - // 32-bit hash collision (rare, but not impossible) - new HashMapCollision1(hash, ListMap.empty.updated(this.key, this.value).updated(key, value)) - } else { - // they have different hashes, but may collide at this level - find a level at which they don't - val that = new HashMap1[A, B1](key, hash, value, kv) - makeHashTrieMap[A, B1](this.hash, this, hash, that, level, 2) - } + def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = - if (hash == this.hash && key == this.key) HashMap.empty[A, B] else this + final val TupleLength = 2 - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B@uV]], offset0: Int): HashMap[A, B] = - if (negate ^ p(ensurePair)) this else null +} - override def iterator: Iterator[(A, B)] = Iterator(ensurePair) - override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair) - override private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = f(key, value) - // this method may be called multiple times in a multi-threaded environment, but that's ok - private[HashMap] def ensurePair: (A, B) = if (kvOrNull ne null) kvOrNull else { - kvOrNull = (key, value); kvOrNull - } - protected[HashMap] override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { - that match { - case hm1: HashMap1[A, B1] => - if ((this eq hm1) && merger.retainIdentical) this - else if (this.hash == hm1.hash && this.key == hm1.key) - if (merger eq HashMap.defaultMerger) this - else if (merger eq HashMap.defaultMerger.invert) hm1 - else this.updated0(hm1.key, hm1.hash, level, hm1.value, hm1.ensurePair, merger) - else this.updated0(hm1.key, hm1.hash, level, hm1.value, hm1.ensurePair, merger) - case _ => - that.updated0(key, hash, level, value, ensurePair, merger.invert) - } - } +private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V - override def equals(that: Any): Boolean = { - that match { - case hm: HashMap1[_, _] => - (this eq hm) || - (hm.hash == hash && hm.key == key && hm.value == value) - case _: HashMap[_, _] => - false - case _ => - super.equals(that) - } - } + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] - protected override def transformImpl[W](f: (A, B) => W): HashMap[A, W] = { - val value1 = f(key, value) - if (value1.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[HashMap1[A, W]] - else new HashMap1(key, hash, value1, null) - } - } + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 - @SerialVersionUID(-1917647429457579983L) - private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV]) - extends HashMap[A, B @uV] { - // assert(kvs.size > 1) + def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean - override def size = kvs.size + /** Returns a MapNode with the passed key-value assignment added + * + * @param key the key to add to the MapNode + * @param value the value to associate with `key` + * @param originalHash the original hash of `key` + * @param hash the improved hash of `key` + * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) + * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value + * argument. + * if false, then the key will be inserted if not already present, however if the key is present + * then the passed value will not replace the current value. That is, if `false`, then this + * method has `update if not exists` semantics. + */ + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] - override def get0(key: A, hash: Int, level: Int): Option[B] = - if (hash == this.hash) kvs.get(key) else None - override private[collection] def getOrElse0[V1 >: B](key: A, hash: Int, level: Int, f: => V1): V1 = - if (hash == this.hash) kvs.getOrElse(key, f) else f + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] - override protected def contains0(key: A, hash: Int, level: Int): Boolean = - hash == this.hash && kvs.contains(key) + def hasNodes: Boolean - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = - if (hash == this.hash) { - if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value)) - else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv)) - } else { - val that = new HashMap1(key, hash, value, kv) - makeHashTrieMap(this.hash, this, hash, that, level, size + 1) - } + def nodeArity: Int - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = - if (hash == this.hash) { - val kvs1 = kvs - key - kvs1.size match { - case 0 => - HashMap.empty[A,B] - case 1 => - val kv = kvs1.head - new HashMap1(kv._1,hash,kv._2,kv) - case x if x == kvs.size => - this - case _ => - new HashMapCollision1(hash, kvs1) - } - } else this + def getNode(index: Int): MapNode[K, V] - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = { - val kvs1 = if(negate) kvs.filterNot(p) else kvs.filter(p) - kvs1.size match { - case 0 => - null - case 1 => - val kv@(k,v) = kvs1.head - new HashMap1(k, hash, v, kv) - case x if x == kvs.size => - this - case _ => - new HashMapCollision1(hash, kvs1) - } - } + def hasPayload: Boolean - override def iterator: Iterator[(A,B)] = kvs.iterator - override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f) - override private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = kvs.foreachEntry(f) - override def split: Seq[HashMap[A, B]] = { - val (x, y) = kvs.splitAt(kvs.size / 2) - def newhm(lm: ListMap[A, B @uV]) = { - if (lm.size > 1) new HashMapCollision1(hash, lm) - else new HashMap1(lm.head._1, hash, lm.head._2, lm.head) - } - List(newhm(x), newhm(y)) - } - protected[HashMap] override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { - that match { - case hm: HashTrieMap[A, B1] => - //we ill get better performance and structural sharing by merging out one hashcode - //into something that has by definition more that one hashcode - hm.merge0(this, level, merger.invert) - case h1: HashMap1[A, B1] => - if (h1.hash != hash) makeHashTrieMap(hash, this, h1.hash, h1, level, size + 1) - else updated0(h1.key, h1.hash, level, h1.value, h1.ensurePair, merger) - case c: HashMapCollision1[A, B1] => - if (c.hash != hash) makeHashTrieMap(hash, this, c.hash, c, level, c.size + size) - else if (merger.retainIdentical && (c eq this)) this - else if ((merger eq defaultMerger) || (merger eq defaultMerger.invert)) { - val newkvs = if (merger eq defaultMerger) c.kvs ++ this.kvs else this.kvs ++ c.kvs - if (newkvs eq kvs) this - else if (newkvs eq c.kvs) c - else new HashMapCollision1(hash, newkvs) - } else { - var result: HashMap[A, B1] = null - if (size >= c.size) { - result = this - for (p <- c.kvs) result = result.updated0(p._1, hash, level, p._2, p, merger) - } else { - result = c - for (p <- kvs) result = result.updated0(p._1, hash, level, p._2, p, merger.invert) - } - result - } - case _ if that eq EmptyHashMap => this - } - } + def payloadArity: Int - override def equals(that: Any): Boolean = { - that match { - case hm: HashMapCollision1[_,_] => - (this eq hm) || - (hm.hash == hash && hm.kvs == kvs) - case _: HashMap[_, _] => - false - case _ => - super.equals(that) - } - } + def getKey(index: Int): K - protected override def transformImpl[W](f: (A, B) => W): HashMap[A, W] = { - new HashMapCollision1[A, W](hash, kvs transform f) - } - } + def getValue(index: Int): V - @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") - @SerialVersionUID(834418348325321784L) - class HashTrieMap[A, +B]( - private[HashMap] var bitmap0: Int, - private[HashMap] var elems0: Array[HashMap[A, B @uV]], - private[HashMap] var size0: Int - ) extends HashMap[A, B @uV] { - @inline private[collection] final def bitmap: Int = bitmap0 - @inline private[collection] final def elems: Array[HashMap[A, B @uV]] = elems0 + def getPayload(index: Int): (K, V) - // assert(Integer.bitCount(bitmap) == elems.length) - // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]])) + def size: Int - @inline override final def size = size0 + def foreach[U](f: ((K, V)) => U): Unit - override def get0(key: A, hash: Int, level: Int): Option[B] = { - // Note: this code is duplicated in contains0/getOrElse0/get0 - val index = (hash >>> level) & 0x1f - if (bitmap == - 1) { - elems(index).get0(key, hash, level + 5) - } else { - val mask = (1 << index) - if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask - 1)) - elems(offset).get0(key, hash, level + 5) - } else { - None - } - } - } - override private[collection] def getOrElse0[V1 >: B](key: A, hash: Int, level: Int, f: => V1): V1 = { - // Note: this code is duplicated in contains0/getOrElse0/get0 - val index = (hash >>> level) & 0x1f - if (bitmap == - 1) { - elems(index).getOrElse0(key, hash, level + 5, f) - } else { - val mask = (1 << index) - if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask - 1)) - elems(offset).getOrElse0(key, hash, level + 5, f) - } else { - f - } - } - } + def foreachEntry[U](f: (K, V) => U): Unit - override protected def contains0(key: A, hash: Int, level: Int): Boolean = { - // Note: this code is duplicated in contains0/getOrElse0/get0 - val index = (hash >>> level) & 0x1f - if (bitmap == - 1) { - elems(index).contains0(key, hash, level + 5) - } else { - val mask = (1 << index) - if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask - 1)) - elems(offset).contains0(key, hash, level + 5) - } else { - false - } - } - } + def foreachWithHash(f: (K, V, Int) => Unit): Unit - private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) - if(subNew eq sub) this else { - val elemsNew = elems.clone().asInstanceOf[Array[HashMap[A, B1]]] - elemsNew(offset) = subNew - new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) - } - } else { - val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) - System.arraycopy(elems, 0, elemsNew, 0, offset) - elemsNew(offset) = new HashMap1(key, hash, value, kv) - System.arraycopy(elems, offset, elemsNew, offset + 1, elems.length - offset) - new HashTrieMap(bitmap | mask, elemsNew, size + 1) - } - } + def transform[W](f: (K, V) => W): MapNode[K, W] - override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.removed0(key, hash, level + 5) - if (subNew eq sub) this - else if (subNew.isEmpty) { - val bitmapNew = bitmap ^ mask - if (bitmapNew != 0) { - val elemsNew = new Array[HashMap[A,B]](elems.length - 1) - System.arraycopy(elems, 0, elemsNew, 0, offset) - System.arraycopy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) - val sizeNew = size - sub.size - // if we have only one child, which is not a HashTrieSet but a self-contained set like - // HashSet1 or HashSetCollision1, return the child instead - if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]]) - elemsNew(0) - else - new HashTrieMap(bitmapNew, elemsNew, sizeNew) - } else - HashMap.empty[A,B] - } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) { - subNew - } else { - val elemsNew = new Array[HashMap[A,B]](elems.length) - System.arraycopy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - val sizeNew = size + (subNew.size - sub.size) - new HashTrieMap(bitmap, elemsNew, sizeNew) - } - } else { - this - } - } + def copy(): MapNode[K, V] - override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = { - // current offset - var offset = offset0 - // result size - var rs = 0 - // bitmap for kept elems - var kept = 0 - // loop over all elements - var i = 0 - while (i < elems.length) { - val result = elems(i).filter0(p, negate, level + 5, buffer, offset) - if (result ne null) { - buffer(offset) = result - offset += 1 - // add the result size - rs += result.size - // mark the bit i as kept - kept |= (1 << i) - } - i += 1 - } - if (offset == offset0) { - // empty - null - } else if (rs == size) { - // unchanged - this - } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieMap[A, B]]) { - // leaf - buffer(offset0) - } else { - // we have to return a HashTrieMap - val length = offset - offset0 - val elems1 = new Array[HashMap[A, B]](length) - System.arraycopy(buffer, offset0, elems1, 0, length) - val bitmap1 = if (length == elems.length) { - // we can reuse the original bitmap - bitmap - } else { - // calculate new bitmap by keeping just bits in the kept bitmask - keepBits(bitmap, kept) - } - new HashTrieMap(bitmap1, elems1, rs) - } - } + def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] - override def iterator: Iterator[(A, B)] = new TrieIterator[(A, B)](elems.asInstanceOf[Array[Iterable[(A, B)]]]) { - final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair - } + def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] - override def foreach[U](f: ((A, B)) => U): Unit = { - var i = 0 - while (i < elems.length) { - elems(i).foreach(f) - i += 1 - } - } - override private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = { - var i = 0 - while (i < elems.length) { - elems(i).foreachEntry(f) - i += 1 - } - } + /** Merges this node with that node, adding each resulting tuple to `builder` + * + * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` + * + * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, + * as `this` is, within the left tree + */ + def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit - private def posOf(n: Int, bm: Int) = { - var left = n - var i = -1 - var b = bm - while (left >= 0) { - i += 1 - if ((b & 1) != 0) left -= 1 - b = b >>> 1 - } - i - } + /** Returns the exact (equal by reference) key, and value, associated to a given key. + * If the key is not bound to a value, then an exception is thrown + */ + def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) - override def split: Seq[HashMap[A, B]] = if (size == 1) Seq(this) else { - val nodesize = Integer.bitCount(bitmap) - if (nodesize > 1) { - val splitpoint = nodesize / 2 - val bitsplitpoint = posOf(nodesize / 2, bitmap) - val bm1 = bitmap & (-1 << bitsplitpoint) - val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint)) + /** Adds all key-value pairs to a builder */ + def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit +} - val (e1, e2) = elems.splitAt(splitpoint) - val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size)) - val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size)) +private final class BitmapIndexedMapNode[K, +V]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { - List(hm1, hm2) - } else elems(0).split - } + releaseFence() - protected[HashMap] override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that match { - case hm: HashMap1[A, B1] => - this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[B1], hm.ensurePair, merger) - case that: HashTrieMap[A, B1] => - def mergeMaybeSubset(larger: HashTrieMap[A, B1], smaller: HashTrieMap[A, B1], merger: Merger[A, B1]):HashTrieMap[A, B1] = { - var resultElems: Array[HashMap[A, B1]] = null - var ai = 0 - var bi = 0 - var abm = larger.bitmap - var bbm = smaller.bitmap - val a = larger.elems - val b = smaller.elems - - //larger has all the bits or smaller, and if they have the same bits, is at least the bigger - //so we try to merge `smaller`into `larger`and hope that `larger is a superset - - //the additional size in the results, so the eventual size of the result is larger.size + additionalSize - var additionalSize = 0 - - // could be lsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - //we know abm contains all of the bits in bbm, we only loop through bbm - //bsb is the next lowest bit in smaller - var bsb = bbm ^ (bbm & (bbm - 1)) - while (bsb != 0) { - val skippedBitsInA = abm & (bsb - 1) - ai += Integer.bitCount(skippedBitsInA) - abm ^= skippedBitsInA - val aai = a(ai) - val bbi = b(bi) - - val result = if ((aai eq bbi) && merger.retainIdentical) aai - else aai.merge0(bbi, level + 5, merger) - if (result ne aai) { - if (resultElems eq null) - resultElems = a.clone() - additionalSize += result.size - aai.size - //assert (result.size > aai.size) - resultElems(ai) = result - } - abm ^= bsb - bbm ^= bsb - bsb = bbm ^ (bbm & (bbm - 1)) + import MapNode._ + import Node._ - ai += 1 - bi += 1 - } - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - if (resultElems eq null) larger // happy days - no change - else new HashTrieMap(larger.bitmap, resultElems, larger.size + additionalSize) - } - def mergeDistinct() : HashMap[A,B1] = { - // the maps are distinct, so its a bit simpler to combine - // and we can avoid all of the quite expensive size calls on the children - - var ai = 0 - var bi = 0 - var offset = 0 - val abm = this.bitmap - val bbm = that.bitmap - val a = this.elems - val b = that.elems - var allBits = abm | bbm - - val resultElems = new Array[HashMap[A, B1]](Integer.bitCount(allBits)) - // could be lsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - // lowest remaining bit - var lsb = allBits ^ (allBits & (allBits - 1)) - - while (lsb != 0) { - if ((lsb & abm) != 0) { - resultElems(offset) = a(ai) - ai += 1 - } else { - resultElems(offset) = b(bi) - bi += 1 - } - offset += 1 - allBits ^= lsb - lsb = allBits ^ (allBits & (allBits - 1)) - } - // we don't have to check whether the result is a leaf, since merge will only make the maps larger - // and this is not a leaf to begin with. - new HashTrieMap[A, B1](abm | bbm, resultElems, this.size + that.size) - } - def mergeCommon(): HashTrieMap[A, B1] = { - var ai = 0 - var bi = 0 - val abm = this.bitmap - val bbm = that.bitmap - val a = this.elems - val b = that.elems - var allBits = abm | bbm - val resultElems = new Array[HashMap[A, B1]](Integer.bitCount(allBits)) - - //output index - var offset = 0 - - // the size of the results so far - var rs = 0 - - // could be alsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - // lowest remaining bit - var lsb = allBits ^ (allBits & (allBits - 1)) - - var result: HashMap[A, B1] = null - // loop as long as there are bits left in either abm or bbm - while (lsb != 0) { - if ((lsb & abm) != 0) { - if ((lsb & bbm) != 0) { - // lsb is in a and b, so combine - val aai = a(ai) - val bbi = b(bi) - - result = if ((aai eq bbi) && merger.retainIdentical) aai - else aai.merge0(bbi, level + 5, merger) - ai += 1 - bi += 1 - } else { - // lsb is in a - result = a(ai) - ai += 1 - } - } else { - // lsb is in b - result = b(bi) - bi += 1 - } - // update lsb - allBits ^= lsb - lsb = allBits ^ (allBits & (allBits - 1)) + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) - resultElems(offset) = result - rs += result.size - offset += 1 - } - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - new HashTrieMap(abm | bbm, resultElems, rs) + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity - } + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length - // if we have a subset/superset relationship, then we can merge and not allocate if thats a real subset - // we check on that relationship based on the bitssets, and if the bitsets are the same than we look at the size - // to work out the subset vs the superset - // a superset here is a trie that has all the bits of the other and is possible to be a superset - // - // if the bits are distinct we can skip some processing so we have a path for that - // otherwise the general case - - val abm = this.bitmap - val bbm = that.bitmap - val allBits = abm | bbm - - if ((this eq that) && merger.retainIdentical) this - else if (allBits == abm && (allBits != bbm || this.size >= that.size)) mergeMaybeSubset(this, that, merger) - else if (allBits == bbm) mergeMaybeSubset(that, this, merger.invert) - else if ((abm & bbm) == 0) mergeDistinct() - else mergeCommon() - - case hm: HashMapCollision1[_, _] => - val index = (hm.hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.merge0(hm, level + 5, merger) - if(subNew eq sub) this else { - val elemsNew = elems.clone().asInstanceOf[Array[HashMap[A,B1]]] - // its just a little faster than new Array[HashMap[A,B1]](elems.length); System.arraycopy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) - } - } else { - val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) - System.arraycopy(elems, 0, elemsNew, 0, offset) - elemsNew(offset) = hm - System.arraycopy(elems, offset, elemsNew, offset + 1, elems.length - offset) - new HashTrieMap(bitmap | mask, elemsNew, size + hm.size) - } - case _ if that eq EmptyHashMap => this - case _ => sys.error("section supposed to be unreachable.") - } + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) - override def equals(that: Any): Boolean = { - that match { - case hm: HashTrieMap[_, _] => - (this eq hm) || { - this.bitmap == hm.bitmap && - this.size == hm.size && - ju.Arrays.equals(this.elems.asInstanceOf[Array[AnyRef]], hm.elems.asInstanceOf[Array[AnyRef]]) - } - case _: HashMap[_, _] => - false - case _ => - super.equals(that) - } - } + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) - protected override def transformImpl[W](f: (A, B) => W): HashMap[A, W] = { - val elems1 = new Array[HashMap[A, W]](elems.length) - var i = 0 - while (i < elems.length) { - val elem = elems(i) - if (elem ne null) { - val elem1 = elem.transformImpl(f) - elems1(i) = elem1 - } - i += 1 - } - new HashTrieMap[A, W](bitmap, elems1, size) - } + predicate1 && predicate2 && predicate3 } + */ - /** - * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection - * @param size the maximum size of the collection to be generated - * @return the maximum buffer size - */ - @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7) - - /** - * In many internal operations the empty map is represented as null for performance reasons. This method converts - * null to the empty map for use in public methods - */ - @inline private def nullToEmpty[A, B](m: HashMap[A, B]): HashMap[A, B] = if (m eq null) empty[A, B] else m - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - private def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 + def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] + def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] + + def getPayload(index: Int) = Tuple2( + content(TupleLength * index).asInstanceOf[K], + content(TupleLength * index + 1).asInstanceOf[V]) + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): MapNode[K, V] = + content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] + + def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException(s"key not found: $key") } - result } - @SerialVersionUID(2L) - private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream) { - val s = orig.size - out.writeInt(s) - for ((k,v) <- orig) { - out.writeObject(k) - out.writeObject(v) - } + def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) Some(this.getValue(index)) else None + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + None } + } - private def readObject(in: java.io.ObjectInputStream) { - orig = empty - val s = in.readInt() - for (i <- 0 until s) { - val key = in.readObject().asInstanceOf[A] - val value = in.readObject().asInstanceOf[B] - orig = orig.updated(key, value) - } + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val mask = maskFrom(hash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val payload = getPayload(index) + if (key == payload._1) payload else Iterator.empty.next() + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) + } else { + Iterator.empty.next() } - - private def readResolve(): AnyRef = orig } - //TODO share these with HashSet - they are the same - private def elemHashCode(key: Any) = key.## - - private final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) + def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) getValue(index) else f + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) + } else { + f + } } - private def computeHashImpl(key: Any) = improve(elemHashCode(key)) + override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) - /** Builder for HashMap. - */ - private[collection] final class HashMapBuilder[A, B] extends mutable.ReusableBuilder[(A, B), HashMap[A, B]] { - import java.util + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) + (originalHashes(index) == originalHash) && key == getKey(index) + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + false + } + } - /* Nodes in the tree are either regular HashMap1, HashTrieMap, HashMapCollision1, or a mutable HashTrieMap - mutable HashTrieMap nodes are designated by having size == -1 - mutable HashTrieMap nodes can have child nodes that are mutable, or immutable - immutable HashTrieMap child nodes can only be immutable + def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) - mutable HashTrieMap elems are always a Array of size 32,size -1, bitmap -1 - */ + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + if (replaceValue) { + val value0 = this.getValue(index) + if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) + this + else copyAndSetValue(bitpos, key, value) + } else this + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - /** The root node of the partially build hashmap */ - private var rootNode: HashMap[A, B] = HashMap.empty + copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) - private def isMutable(hs: HashMap[A, B]) = { - hs.isInstanceOf[HashTrieMap[A, B]] && hs.size == -1 - } + if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) + } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) + } - private def makeMutable(hs: HashTrieMap[A, B]): HashTrieMap[A, B] = { - if (isMutable(hs)) hs - else { - val elems = new Array[HashMap[A, B]](32) - var bit = 0 - var iBit = 0 - while (bit < 32) { - if ((hs.bitmap & (1 << bit)) != 0) { - elems(bit) = hs.elems(iBit) - iBit += 1 - } - bit += 1 + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param value the value to set `key` to + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + val value0 = this.getValue(index) + if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + content(idx + 1) = value } - new HashTrieMap[A, B](-1, elems, -1) + shallowlyMutableNodeMap + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos } - } - private def isLeaf(hm: HashMap[A, B]) = { - hm.isInstanceOf[HashMap1[A, B]] || hm.isInstanceOf[HashMapCollision1[A, B]] - } - @inline private def computeHash(key: A) = computeHashImpl(key) - - private def makeImmutable(hs: HashMap[A, B]): HashMap[A, B] = { - hs match { - case trie: HashTrieMap[A, B] if isMutable(trie) => - var bit = 0 - var bitmap = 0 - var size = 0 - while (bit < 32) { - if (trie.elems(bit) ne null) - trie.elems(bit) = makeImmutable(trie.elems(bit)) - if (trie.elems(bit) ne null) { - bitmap |= 1 << bit - size += trie.elems(bit).size - } - bit += 1 - } - Integer.bitCount(bitmap) match { - case 0 => null - case 1 - if isLeaf(trie.elems(Integer.numberOfTrailingZeros(bitmap))) => - trie.elems(Integer.numberOfTrailingZeros(bitmap)) - - case bc => - val elems = if (bc == 32) trie.elems else { - val elems = new Array[HashMap[A, B]](bc) - var oBit = 0 - bit = 0 - while (bit < 32) { - if (trie.elems(bit) ne null) { - elems(oBit) = trie.elems(bit) - oBit += 1 - } - bit += 1 - } - assert(oBit == bc) - elems - } - trie.size0 = size - trie.elems0 = elems - trie.bitmap0 = bitmap - trie + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeHashCode = subNode.cachedJavaKeySetHashCode + + var returnMutableNodeMap = shallowlyMutableNodeMap + + val subNodeNew: MapNode[K, V1] = subNode match { + case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) + if (result ne subNode) { + returnMutableNodeMap |= bitpos } - case _ => hs + result } - } - override def clear(): Unit = { - rootNode match { - case trie: HashTrieMap[A, B] if isMutable(trie) => - util.Arrays.fill(trie.elems.asInstanceOf[Array[AnyRef]], null) - case _ => rootNode = HashMap.empty[A, B] + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode + returnMutableNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = insertElement(originalHashes, dataIx, originalHash) + this.size += 1 + this.cachedJavaKeySetHashCode += keyHash + shallowlyMutableNodeMap + } + } + + def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + + if (key0 == key) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) + if (index == 0) + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) + else + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) + } else copyAndRemoveValue(bitpos, keyHash) + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) + // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] + } else { + // inline value (move to front) + copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + } else this + } else this + } + + def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + val newCachedHash = keyHash0 + keyHash1 + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + + if (mask0 < mask1) { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) + } else { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) + new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) } } + } + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + //dst(idx) = newKey + dst(idx + 1) = newValue + new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedMapNode[K, V1]( + dataMap, + nodeMap, + dst, + originalHashes, + size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) + } + + def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + // copy 'src' and remove 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) + } - override def result(): HashMap[A, B] = { - rootNode = nullToEmpty(makeImmutable(rootNode)) - VM.releaseFence() - rootNode + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the key currently at `bitpos` + * @param node the node to place at `bitpos` beneath `this` + */ + def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = dataMap ^ bitpos + this.nodeMap = nodeMap | bitpos + this.content = dst + this.originalHashes = dstHashes + this.size = size - 1 + node.size + this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, + originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + ) + } + + def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val key = node.getKey(0) + val value = node.getValue(0) + val src = this.content + val dst = new Array[Any](src.length - 1 + TupleLength) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 2 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = key + dst(idxNew + 1) = value + arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i)) + i += 1 } - override def +=(elem1: (A, B), elem2: (A, B), elems: (A, B)*): this.type = { - this += elem1 - this += elem2 - this ++= elems + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreach(f) + j += 1 } + } - override def +=(elem: (A, B)): this.type = { - val hash = computeHash(elem._1) - rootNode = addOne(rootNode, elem, hash, 0) - this + override def foreachEntry[U](f: (K, V) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getKey(i), getValue(i)) + i += 1 } - override def ++=(xs: TraversableOnce[(A, B)]): this.type = xs match { - case hm: HashMap[A, B] => - if (rootNode eq EmptyHashMap) { - if (!hm.isEmpty) - rootNode = hm - } - else - rootNode = addHashMap(rootNode, hm, 0) - this - case hm: mutable.HashMap[A, B] => - //TODO - super.++=(xs) - case _ => - super.++=(xs) + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachEntry(f) + j += 1 } + } - /** return the bit index of the rawIndex in the bitmap of the trie, or -1 if the bit is not in the bitmap */ - private def compressedIndex(trie: HashTrieMap[A, B], rawIndex: Int): Int = { - if (trie.bitmap == -1) rawIndex - else if ((trie.bitmap & (1 << rawIndex)) == 0) { - //the value is not in this index - -1 - } else { - Integer.bitCount(((1 << rawIndex) - 1) & trie.bitmap) - } + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + var i = 0 + val iN = payloadArity // arity doesn't change during this operation + while (i < iN) { + f(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 } - /** return the array index for the rawIndex, in the trie elem array - * The trie may be mutable, or immutable - * returns -1 if the trie is compressed and the index in not in the array */ - private def trieIndex(trie: HashTrieMap[A, B], rawIndex: Int): Int = { - if (isMutable(trie) || trie.bitmap == -1) rawIndex - else compressedIndex(trie, rawIndex) + } + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + var i = 0 + val iN = payloadArity + val jN = nodeArity + while (i < iN) { + builder.addOne(getKey(i), getValue(i), getHash(i)) + i += 1 } - def leafHash(leaf: HashMap[A, B]) = leaf match { - case m: HashMap1[A, B] => m.hash - case m: HashMapCollision1[A, B] => m.hash - case _ => throw new IllegalArgumentException(leaf.getClass.toString) + var j = 0 + while (j < jN) { + getNode(j).buildTo(builder) + j += 1 } + } - def makeMutableTrie(aLeaf: HashMap[A, B], bLeaf: HashMap[A, B], level: Int): HashTrieMap[A, B] = { - val elems = new Array[HashMap[A, B]](32) - val aRawIndex = (leafHash(aLeaf) >>> level) & 0x1f - val bRawIndex = (leafHash(bLeaf) >>> level) & 0x1f - if (aRawIndex == bRawIndex) { - elems(aRawIndex) = makeMutableTrie(aLeaf, bLeaf, level + 5) + override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { + var newContent: Array[Any] = null + val iN = payloadArity // arity doesn't change during this operation + val jN = nodeArity // arity doesn't change during this operation + val newContentLength = content.length + var i = 0 + while (i < iN) { + val key = getKey(i) + val value = getValue(i) + val newValue = f(key, value) + if (newContent eq null) { + if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { + newContent = content.clone() + newContent(TupleLength * i + 1) = newValue + } } else { - elems(aRawIndex) = aLeaf - elems(bRawIndex) = bLeaf + newContent(TupleLength * i + 1) = newValue } - new HashTrieMap[A, B](-1, elems, -1) + i += 1 } - private def addOne(toNode: HashMap[A, B], kv: (A, B), improvedHash: Int, level: Int): HashMap[A, B] = { - toNode match { - case leaf: HashMap1[A, B] => - if (leaf.hash == improvedHash) - leaf.updated0(kv._1, improvedHash, level, kv._2, kv, null) - else makeMutableTrie(leaf, new HashMap1(kv._1, improvedHash, kv._2, kv), level) - case leaf: HashMapCollision1[A, B] => - if (leaf.hash == improvedHash) - leaf.updated0(kv._1, improvedHash, level, kv._2, kv, null) - else makeMutableTrie(leaf, new HashMap1(kv._1, improvedHash, kv._2, kv), level) - - case trie: HashTrieMap[A, B] if isMutable((trie)) => - val arrayIndex = (improvedHash >>> level) & 0x1f - val old = trie.elems(arrayIndex) - trie.elems(arrayIndex) = if (old eq null) new HashMap1(kv._1, improvedHash, kv._2, kv) - else addOne(old, kv, improvedHash, level + 5) - trie - case trie: HashTrieMap[A, B] => - val rawIndex = (improvedHash >>> level) & 0x1f - val arrayIndex = compressedIndex(trie, rawIndex) - if (arrayIndex == -1) - addOne(makeMutable(trie), kv, improvedHash, level) - else { - val old = trie.elems(arrayIndex) - val merged = if (old eq null) new HashMap1(kv._1, improvedHash, kv._2, kv) - else addOne(old, kv, improvedHash, level + 5) - - if (merged eq old) trie - else { - val newMutableTrie = makeMutable(trie) - newMutableTrie.elems(rawIndex) = merged - newMutableTrie + var j = 0 + while (j < jN) { + val node = getNode(j) + val newNode = node.transform(f) + if (newContent eq null) { + if (newNode ne node) { + newContent = content.clone() + newContent(newContentLength - j - 1) = newNode + } + } else + newContent(newContentLength - j - 1) = newNode + j += 1 + } + if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + } + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) { + that.buildTo(builder) + return + } else if (bm.size == 0) { + buildTo(builder) + return + } + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + val minIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + { + var index = minIndex + var leftIdx = 0 + var rightIdx = 0 + + while (index < maxIndex) { + val bitpos = bitposFrom(index) + + if ((bitpos & dataMap) != 0) { + val leftKey = getKey(leftIdx) + val leftValue = getValue(leftIdx) + val leftOriginalHash = getHash(leftIdx) + if ((bitpos & bm.dataMap) != 0) { + // left data and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { + builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) + } else { + builder.addOne(leftKey, leftValue, leftOriginalHash) + builder.addOne(rightKey, rightValue, rightOriginalHash) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + // left data and right node + val subNode = bm.getNode(bm.nodeIndex(bitpos)) + val leftImprovedHash = improve(leftOriginalHash) + val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftData and rightNode, just build both children to builder + subNode.buildTo(builder) + builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) + } + } else { + // left data and nothing on right + builder.addOne(leftKey, leftValue, leftOriginalHash) + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + // left node and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + val rightImprovedHash = improve(rightOriginalHash) + + val subNode = getNode(nodeIndex(bitpos)) + val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftNode and rightData, just build both children to builder + subNode.buildTo(builder) + builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) + } + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // left node and right node + getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) + } else { + // left node and nothing on right + getNode(nodeIndex(bitpos)).buildTo(builder) } + } else if ((bitpos & bm.dataMap) != 0) { + // nothing on left, right data + val dataIndex = bm.dataIndex(bitpos) + builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // nothing on left, right node + bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) } - case empty if empty eq EmptyHashMap => toNode.updated0(kv._1, improvedHash, level, kv._2, kv, null) + + index += 1 + } } + case _: HashCollisionMapNode[_, _] => + throw new RuntimeException("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") + } + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedMapNode[_, _] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false } - private def addHashMap(toNode: HashMap[A, B], toBeAdded: HashMap[A, B], level: Int): HashMap[A, B] = { - toNode match { - case aLeaf: HashMap1[A, B] => addToLeafHashMap(aLeaf, aLeaf.hash, toBeAdded, level) - case aLeaf: HashMapCollision1[A, B] => addToLeafHashMap(aLeaf, aLeaf.hash, toBeAdded, level) - case trie: HashTrieMap[A, B] => addToTrieHashMap(trie, toBeAdded, level) - case empty if empty eq EmptyHashMap => toNode + + @inline private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 } + + isEqual } - private def addToLeafHashMap(toNode: HashMap[A, B], toNodeHash: Int, toBeAdded: HashMap[A, B], level: Int): HashMap[A, B] = { - assert (isLeaf(toNode)) - if (toNode eq toBeAdded) toNode - else toBeAdded match { - case bLeaf: HashMap1[A, B] => - if (toNodeHash == bLeaf.hash) toNode.merge0(bLeaf, level, concatMerger[A, B]) - else makeMutableTrie(toNode, bLeaf, level) - - case bLeaf: HashMapCollision1[A, B] => - if (toNodeHash == bLeaf.hash) toNode.merge0(bLeaf, level, concatMerger[A, B]) - else makeMutableTrie(toNode, bLeaf, level) - - case bTrie: HashTrieMap[A, B] => - val rawIndex = (toNodeHash >>> level) & 0x1f - val arrayIndex = compressedIndex(bTrie, rawIndex) - if (arrayIndex == -1) { - val result = makeMutable(bTrie) - result.elems(rawIndex) = toNode - result + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) + } + // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `bm` + var anyChangesMadeSoFar = false + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataRightOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + val leftOriginalHash = getHash(leftIdx) + if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { + leftDataRightDataRightOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true } else { - val newEle = addToLeafHashMap(toNode, toNodeHash, bTrie.elems(arrayIndex), level + 5) - if (newEle eq toBeAdded) - toBeAdded - else { - val result = makeMutable(bTrie) - result.elems(rawIndex) = newEle - result + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { + // nothing from `this` will make it into the result -- return early + return bm + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val rightNode = bm.getNode(rightNodeIdx) + val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) + if (rightNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftKey = getKey(leftDataIdx) + val leftValue = getValue(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + + val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) + + if (updated ne n) { + anyChangesMadeSoFar = true + } + + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + getNode(leftNodeIdx).updated( + key = bm.getKey(rightDataIdx), + value = bm.getValue(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift, + replaceValue = true + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = originalHashes(leftDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 } - case empty if empty isEmpty => - toNode + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } } + + if (anyChangesMadeSoFar) + new BitmapIndexedMapNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else bm + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + override def copy(): BitmapIndexedMapNode[K, V] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) * TupleLength + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() + i += 1 } + new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } - private def addToTrieHashMap(toNode: HashTrieMap[A, B], toBeAdded: HashMap[A, B], level: Int): HashMap[A, B] = { - def addFromLeaf(hash: Int): HashMap[A, B] = { - assert(isLeaf(toBeAdded)) - val rawIndex = (hash >>> level) & 0x1f - val arrayIndex = trieIndex(toNode, rawIndex) - if (arrayIndex == -1) { - val newToNode = makeMutable(toNode) - newToNode.elems(rawIndex) = toBeAdded - newToNode - } else { - val old = toNode.elems(arrayIndex) - if (old eq toBeAdded) toNode - else if (old eq null) { - assert(isMutable(toNode)) - toNode.elems(arrayIndex) = toBeAdded - toNode - } else { - val result = addHashMap(old, toBeAdded, level + 5) - if (result eq old) toNode - else { - val newToNode = makeMutable(toNode) - newToNode.elems(rawIndex) = result - newToNode - } + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else MapNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) } + + dataIndex += 1 } + + i += 1 } - if (toNode eq toBeAdded) toNode - else toBeAdded match { - case bLeaf: HashMap1[A, B] => - addFromLeaf(bLeaf.hash) - case bLeaf: HashMapCollision1[A, B] => - addFromLeaf(bLeaf.hash) - case bTrie: HashTrieMap[A, B] => - var result = toNode - var bBitSet = bTrie.bitmap - var bArrayIndex = 0 - while (bBitSet != 0) { - val rawIndex = Integer.numberOfTrailingZeros(bBitSet) - val arrayIndex = trieIndex(result, rawIndex) - val bValue = bTrie.elems(bArrayIndex) - if (arrayIndex == -1) { - result = makeMutable(result) - result.elems(rawIndex) = bValue + if (newDataMap == 0) { + MapNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize * TupleLength) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) + newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + + + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[MapNode[K, V]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos } else { - val aValue = result.elems(arrayIndex) - if (aValue ne bValue) { - if (aValue eq null) { - assert(isMutable(result)) - result.elems(rawIndex) = bValue - } else { - val resultAtIndex = addHashMap(aValue, bValue, level + 5) - if (resultAtIndex ne aValue) { - result = makeMutable(result) - result.elems(rawIndex) = resultAtIndex - } + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue() + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + if (newSize == 0) { + MapNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) + newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(TupleLength * newDataIndex) = node.getKey(0) + newContent(TupleLength * newDataIndex + 1) = node.getValue(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + } +} + +private final class HashCollisionMapNode[K, +V ]( + val originalHash: Int, + val hash: Int, + var content: Vector[(K, V @uV)] + ) extends MapNode[K, V] { + + import Node._ + + require(content.length >= 2) + + releaseFence() + + private[immutable] def indexOf(key: Any): Int = { + val iter = content.iterator + var i = 0 + while (iter.hasNext) { + if (iter.next()._1 == key) return i + i += 1 + } + -1 + } + + def size: Int = content.length + + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(Iterator.empty.next()) + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = + if (this.hash == hash) { + val index = indexOf(key) + if (index >= 0) Some(content(index)._2) else None + } else None + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val index = indexOf(key) + if (index >= 0) content(index) else Iterator.empty.next() + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { + if (this.hash == hash) { + indexOf(key) match { + case -1 => f + case other => content(other)._2 + } + } else f + } + + override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && indexOf(key) >= 0 + + def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = + this.hash == hash && { + val index = indexOf(key) + index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) + } + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { + val index = indexOf(key) + if (index >= 0) { + if (replaceValue) { + if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { + this + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) + } + } else { + this + } + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) + } + } + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { + if (!this.containsKey(key, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => + val (k, v) = updatedContent(0) + new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) + } + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): MapNode[K, V] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getKey(index: Int): K = getPayload(index)._1 + def getValue(index: Int): V = getPayload(index)._2 + + def getPayload(index: Int): (K, V) = content(index) + + override def getHash(index: Int): Int = originalHash + + def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) + + def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next._1, next._2, originalHash) + } + } + + override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { + val newContent = Vector.newBuilder[(K, W)] + val contentIter = content.iterator + // true if any values have been transformed to a different value via `f` + var anyChanges = false + while(contentIter.hasNext) { + val (k, v) = contentIter.next() + val newValue = f(k, v) + newContent.addOne((k, newValue)) + anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) + } + if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) + else this.asInstanceOf[HashCollisionMapNode[K, W]] + } + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionMapNode[_, _] => + (this eq node) || + (this.hash == node.hash) && + (this.content.length == node.content.length) && { + val iter = content.iterator + while (iter.hasNext) { + val (key, value) = iter.next() + val index = node.indexOf(key) + if (index < 0 || value != node.content(index)._2) { + return false } } + true + } + case _ => false + } + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { + case hc: HashCollisionMapNode[K, V1] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[(K, V1)] = null + val iter = content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (hc.indexOf(nextPayload._1) < 0) { + if (newContent eq null) { + newContent = new VectorBuilder[(K, V1)]() + newContent.addAll(hc.content) } - bBitSet ^= 1 << rawIndex - bArrayIndex += 1 + newContent.addOne(nextPayload) } - result + } + if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedMapNode[K, V1] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case hc: HashCollisionMapNode[K, V1] => + val iter = content.iterator + val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] - case empty if empty isEmpty => toNode + def rightIndexOf(key: K): Int = { + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i + i += 1 + } + -1 } + + while (iter.hasNext) { + val nextPayload = iter.next() + val index = rightIndexOf(nextPayload._1) + + if (index == -1) { + builder.addOne(nextPayload) + } else { + val rightPayload = rightArray(index).asInstanceOf[(K, V1)] + rightArray(index) = null + + builder.addOne(mergef(nextPayload, rightPayload)) + } + } + + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) + i += 1 + } + case _: BitmapIndexedMapNode[K, V1] => + throw new RuntimeException("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") + + } + + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + builder.addOne(k, v, originalHash, hash) } } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + MapNode.empty + } else if (newContentLength == 1) { + val (k, v) = newContent.head + new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + } else if (newContentLength == content.length) this + else new HashCollisionMapNode(originalHash, hash, newContent) + } + + override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def cachedJavaKeySetHashCode: Int = size * hash + +} + +private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[K, MapNode[K, V]](rootNode) { + + def next() = { + if (!hasNext) Iterator.empty.next() + + val key = currentValueNode.getKey(currentValueCursor) + currentValueCursor += 1 + + key + } + +} + +private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[V, MapNode[K, V]](rootNode) { + + def next() = { + if (!hasNext) Iterator.empty.next() + + val value = currentValueNode.getValue(currentValueCursor) + currentValueCursor += 1 + + value + } +} + +private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[(K, V), MapNode[K, V]](rootNode) { + + def next() = { + if (!hasNext) Iterator.empty.next() + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[(K, V), MapNode[K, V]](rootNode) { + + def next() = { + if (!hasNext) Iterator.empty.next() + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } +} + +private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[Any, MapNode[K, V]](rootNode) { + private[this] var hash = 0 + private[this] var value: V = _ + override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) + def next(): MapKeyValueTupleHashIterator[K, V] = { + if (!hasNext) Iterator.empty.next() + + hash = currentValueNode.getHash(currentValueCursor) + value = currentValueNode.getValue(currentValueCursor) + currentValueCursor -= 1 + this + } +} + +/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ +private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator[K, SetNode[K]](rootSetNode) { + /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ + def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { + var curr = rootMapNode + while (curr.size > 0 && hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + curr = curr.removed( + key = currentValueNode.getPayload(currentValueCursor), + keyHash = improve(originalHash), + originalHash = originalHash, + shift = 0 + ) + currentValueCursor += 1 + } + curr + } + + override def next(): K = Iterator.empty.next() +} + +/** + * $factoryInfo + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + @transient + private final val EmptyMap = new HashMap(MapNode.empty) + + def empty[K, V]: HashMap[K, V] = + EmptyMap.asInstanceOf[HashMap[K, V]] + + def from[K, V](source: collection.IterableOnce[(K, V)]): HashMap[K, V] = + source match { + case hs: HashMap[K, V] => hs + case _ => (newBuilder[K, V] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] +} + + +/** A Builder for a HashMap. + * $multipleResults + */ +private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { + import MapNode._ + import Node._ + + private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashMap as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashMap[K, V] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially built hashmap. */ + private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (rootNode.size == 0) value + else { + val originalHash = key.## + rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) + } + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap |= bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Upserts a key/value pair into mapNode, mutably */ + private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { + mapNode match { + case bm: BitmapIndexedMapNode[K, V] => + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val key0 = bm.getKey(index) + val key0UnimprovedHash = bm.getHash(index) + + if (key0UnimprovedHash == originalHash && key0 == key) { + bm.content(TupleLength * index + 1) = value + } else { + val value0 = bm.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew: MapNode[K, V] = + bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + } + + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHash = subNode.cachedJavaKeySetHashCode + update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash + } else { + insertValue(bm, bitpos, key, originalHash, keyHash, value) + } + case hc: HashCollisionMapNode[K, V] => + val index = hc.indexOf(key) + if (index < 0) { + hc.content = hc.content.appended((key, value)) + } else { + hc.content = hc.content.updated(index, (key, value)) + } + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private[this] def ensureUnaliased() = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private[this] def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashMap[K, V] = + if (rootNode.size == 0) { + HashMap.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashMap(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: (K, V)): this.type = { + ensureUnaliased() + val h = elem._1.## + val im = improve(h) + update(rootNode, elem._1, elem._2, h, im, 0) + this + } + + def addOne(key: K, value: V): this.type = { + ensureUnaliased() + val originalHash = key.## + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, hash, 0) + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + ensureUnaliased() + xs match { + case hm: HashMap[K, V] => + new ChampBaseIterator[(K, V), MapNode[K, V]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + mapNode = rootNode, + key = currentValueNode.getKey(currentValueCursor), + value = currentValueNode.getValue(currentValueCursor), + originalHash = originalHash, + keyHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + + override def next() = Iterator.empty.next() + } + case hm: collection.mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case lhm: collection.mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case thatMap: Map[K, V] => + thatMap.foreachEntry((key, value) => addOne(key, value)) + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size } diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 808e56e2e7aa..3c72236a5395 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,1477 +14,2103 @@ package scala package collection package immutable -import java.util +import java.lang.Integer.{bitCount, numberOfTrailingZeros} +import java.lang.System.arraycopy -import generic._ -import scala.collection.parallel.immutable.ParHashSet -import scala.annotation.tailrec -import scala.runtime.AbstractFunction1 +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 -/** This class implements immutable sets using a hash trie. - * - * '''Note:''' The builder of this hash set may return specialized representations for small sets. - * - * @tparam A the type of the elements contained in this hash set. - * - * @author Martin Odersky - * @author Tiark Rompf - * @since 2.3 - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - */ -@SerialVersionUID(2L) -sealed class HashSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, HashSet] - with SetLike[A, HashSet[A]] - with CustomParallelizable[A, ParHashSet[A]] - with Serializable -{ - import HashSet.{nullToEmpty, bufferSize} +/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam A the type of the elements contained in this hash set. + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) + extends AbstractSet[A] + with StrictOptimizedSetOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with DefaultSerializable { - override def companion: GenericCompanion[HashSet] = HashSet + def this() = this(SetNode.empty) - //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] { + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() - override def par = ParHashSet.fromTrie(this) + private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = + if (rootNode eq newRootNode) this else new HashSet(newRootNode) - override def size: Int = 0 + override def iterableFactory: IterableFactory[HashSet] = HashSet - override def empty = HashSet.empty[A] + override def knownSize: Int = rootNode.size - def iterator: Iterator[A] = Iterator.empty + override def size: Int = rootNode.size - override def foreach[U](f: A => U): Unit = () + override def isEmpty: Boolean = rootNode.size == 0 - def contains(e: A): Boolean = get0(e, computeHash(e), 0) + def iterator: Iterator[A] = { + if (isEmpty) Iterator.empty + else new SetIterator[A](rootNode) + } - override def subsetOf(that: GenSet[A]) = that match { - case that:HashSet[A] => - // call the specialized implementation with a level of 0 since both this and that are top-level hash sets - subsetOf0(that, 0) - case _ => - // call the generic implementation - super.subsetOf(that) + protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) + } + s.asInstanceOf[S with EfficientSplit] } - /** - * A specialized implementation of subsetOf for when both this and that are HashSet[A] and we can take advantage - * of the tree structure of both operands and the precalculated hashcodes of the HashSet1 instances. - * @param that the other set - * @param level the level of this and that hashset - * The purpose of level is to keep track of how deep we are in the tree. - * We need this information for when we arrive at a leaf and have to call get0 on that - * The value of level is 0 for a top-level HashSet and grows in increments of 5 - * @return true if all elements of this set are contained in that set - */ - protected def subsetOf0(that: HashSet[A], level: Int) = { - // The default implementation is for the empty set and returns true because the empty set is a subset of all sets - true + def contains(element: A): Boolean = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + rootNode.contains(element, elementUnimprovedHash, elementHash, 0) } - override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0) + def incl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } - override def + (elem1: A, elem2: A, elems: A*): HashSet[A] = - this + elem1 + elem2 ++ elems + def excl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } - override def union(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - if (this eq that) this - else nullToEmpty(union0(that, 0)) - case _ => - if (that.isEmpty) this else { - //avoid the LazyRef as we don't have an @eager object - class acc extends AbstractFunction1[A, Unit] { - var res = HashSet.this - override def apply(v1: A): Unit = res += v1 + override def concat(that: IterableOnce[A]): HashSet[A] = + that match { + case hs: HashSet[A] => + if (isEmpty) hs + else { + val newNode = rootNode.concat(hs.rootNode, 0) + if (newNode eq hs.rootNode) hs + else newHashSetOrThis(newNode) + } + case hs: collection.mutable.HashSet[A] => + val iter = hs.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case lhs: collection.mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } } - val acc = new acc - if (that.isInstanceOf[Set[A]]) - that foreach acc - else - that.iterator foreach acc - acc.res + this + case _ => + val iter = that.iterator + var current = rootNode + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + current = current.updated(element, originalHash, improved, 0) + + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + } + + override def tail: HashSet[A] = this - head + + override def init: HashSet[A] = this - last + + override def head: A = iterator.next() + + override def last: A = reverseIterator.next() + + override def foreach[U](f: A => U): Unit = rootNode.foreach(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ + @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set + * Stops iterating the first time that f returns `false`.*/ + @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) + + // For binary compatibility, the method used to have this signature by mistake. + // protected is public in bytecode. + protected def subsetOf(that: Set[A]): Boolean = subsetOf(that: collection.Set[A]) + + override def subsetOf(that: collection.Set[A]): Boolean = isEmpty || !that.isEmpty && (that match { + case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) + case _ => super.subsetOf(that) + }) + + override def equals(that: Any): Boolean = + that match { + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) + case _ => super.equals(that) + } + + override protected[this] def className = "HashSet" + + override def hashCode(): Int = { + val it = new SetHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) + //assert(hash == super.hashCode()) + hash + } + + override def diff(that: collection.Set[A]): HashSet[A] = { + if (isEmpty) { + this + } else { + that match { + case hashSet: HashSet[A] => + if (hashSet.isEmpty) this else { + val newRootNode = rootNode.diff(hashSet.rootNode, 0) + if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) + } + case hashSet: collection.mutable.HashSet[A] => + val iter = hashSet.nodeIterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next.key, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + + case other => + val thatKnownSize = other.knownSize + + if (thatKnownSize == 0) { + this + } else if (thatKnownSize <= size) { + /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so + we're likely to be the faster of the two at that. */ + removedAllWithShallowMutations(other) + } else { + // TODO: Develop more sophisticated heuristic for which branch to take + filterNot(other.contains) + } } + + } } - override def intersect(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - val buffer = new Array[HashSet[A]](bufferSize(this.size min that.size)) - nullToEmpty(intersect0(that, 0, buffer, 0)) - case _ => super.intersect(that) + /** Immutably removes all elements of `that` from this HashSet + * + * Mutation is used internally, but only on root SetNodes which this method itself creates. + * + * That is, this method is safe to call on published sets because it does not mutate `this` + */ + private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { + val iter = that.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this } - override def diff(that: GenSet[A]): HashSet[A] = that match { - case that: HashSet[A] => - val buffer = new Array[HashSet[A]](bufferSize(this.size)) - nullToEmpty(diff0(that, 0, buffer, 0)) - case _ => super.diff(that) + override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { + case set: scala.collection.Set[A] => diff(set) + case range: Range if range.length > size => + filter { + case i: Int => !range.contains(i) + case _ => true + } + + case _ => + removedAllWithShallowMutations(that) } - /** - * Union with a HashSet at a given level - * @param that a HashSet - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @return The union of this and that at the given level. Unless level is zero, the result is not a self-contained - * HashSet but needs to be stored at the correct depth - */ - private[immutable] def union0(that: HashSet[A], level: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return that - that + override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.partition(p) } - /** - * Intersection with another hash set at a given level - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes - * @param offset0 the first offset into the buffer in which we are allowed to write - * @return The intersection of this and that at the given level. Unless level is zero, the result is not a - * self-contained HashSet but needs to be stored at the correct depth - */ - private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return the empty set - null + override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.span(p) } - /** - * Diff with another hash set at a given level - * @param level the depth in the tree. We need to keep track of the level to know how deep we are in the tree - * @param buffer a temporary buffer that is used for temporarily storing elements when creating new branch nodes - * @param offset0 the first offset into the buffer in which we are allowed to write - * @return The diff of this and that at the given level. Unless level is zero, the result is not a - * self-contained HashSet but needs to be stored at the correct depth - */ - private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // the default implementation is for the empty set, so we just return the empty set - null + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashSet.empty + else new HashSet(newRootNode) } - def - (e: A): HashSet[A] = - nullToEmpty(removed0(e, computeHash(e), 0)) + override def intersect(that: collection.Set[A]): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.intersect(that) + } - override def tail: HashSet[A] = this - head + override def take(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.take(n) + } - override def filter(p: A => Boolean) = p match { - case hs: HashSet[A] => - intersect(hs) - case _ => - val buffer = new Array[HashSet[A]](bufferSize(size)) - nullToEmpty(filter0(p, false, 0, buffer, 0)) + override def takeRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeRight(n) } - override def filterNot(p: A => Boolean) = p match { - case hs: HashSet[A] => - diff(hs) - case _ => - val buffer = new Array[HashSet[A]](bufferSize(size)) - nullToEmpty(filter0(p, true, 0, buffer, 0)) + override def takeWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeWhile(p) } - protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = null + override def drop(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.drop(n) + } - protected def elemHashCode(key: A) = key.## + override def dropRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropRight(n) + } - protected final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) + override def dropWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropWhile(p) } +} + +private[immutable] object SetNode { + + private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] + + final val TupleLength = 1 - private[collection] def computeHash(key: A) = improve(elemHashCode(key)) +} + +private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): SetNode[A] + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): A + + def size: Int + + def foreach[U](f: A => U): Unit + + def subsetOf(that: SetNode[A], shift: Int): Boolean + + def copy(): SetNode[A] - protected def get0(key: A, hash: Int, level: Int): Boolean = false + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] - private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - new HashSet.HashSet1(key, hash) + def diff(that: SetNode[A], shift: Int): SetNode[A] - protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this + def concat(that: SetNode[A], shift: Int): SetNode[A] - protected def writeReplace(): AnyRef = new HashSet.SerializationProxy(this) + def foreachWithHash(f: (A, Int) => Unit): Unit - override def toSet[B >: A]: Set[B] = this.asInstanceOf[HashSet[B]] + def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean } -/** $factoryInfo - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - * - * @author Tiark Rompf - * @since 2.3 - * @define Coll `immutable.HashSet` - * @define coll immutable hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -object HashSet extends ImmutableSetFactory[HashSet] { - override def newBuilder[A]: mutable.Builder[A, HashSet[A]] = new HashSetBuilder[A] +private final class BitmapIndexedSetNode[A]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends SetNode[A] { - /** $setCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, HashSet[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] + import Node._ + import SetNode._ - private object EmptyHashSet extends HashSet[Any] { - override def head: Any = throw new NoSuchElementException("Empty Set") - override def tail: HashSet[Any] = throw new NoSuchElementException("Empty Set") - } - private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet - - // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code) - private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int, newSize: Int) : HashTrieSet[A] = { - // assert elem0.size + elem1.size == newSize - val index0 = (hash0 >>> level) & 0x1f - val index1 = (hash1 >>> level) & 0x1f - if(index0 != index1) { - val bitmap = (1 << index0) | (1 << index1) - val elems = new Array[HashSet[A]](2) - if(index0 < index1) { - elems(0) = elem0 - elems(1) = elem1 - } else { - elems(0) = elem1 - elems(1) = elem0 - } - new HashTrieSet[A](bitmap, elems, newSize) - } else { - val bitmap = (1 << index0) - val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5, newSize) - val elems = new Array[HashSet[A]](1) - elems(0) = child - new HashTrieSet[A](bitmap, elems, newSize) - } - } + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) - /** - * Common superclass of HashSet1 and HashSetCollision1, which are the two possible leaves of the Trie - */ - @SerialVersionUID(-8788235040812980474L) - private[HashSet] sealed abstract class LeafHashSet[A](private[HashSet] final val hash: Int) extends HashSet[A] - - @SerialVersionUID(7828248784025959392L) - class HashSet1[A](private[HashSet] val key: A, hash: Int) extends LeafHashSet[A](hash) { - override def size = 1 - - override protected def get0(key: A, hash: Int, level: Int): Boolean = - (hash == this.hash && key == this.key) - - override def equals(other: Any): Boolean = { - other match { - case that: HashSet1[A] => - (this eq that) || (this.hash == that.hash && this.key == that.key) - case _ : HashSet[_] => false - case _ => super.equals(other) - } - } + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new SetIterator[A](this).size - payloadArity >= 2 * nodeArity - override protected def subsetOf0(that: HashSet[A], level: Int) = { - // check if that contains this.key - // we use get0 with our key and hash at the correct level instead of calling contains, - // which would not work since that might not be a top-level HashSet - // and in any case would be inefficient because it would require recalculating the hash code - (this eq that) || that.get0(key, hash, level) - } + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash) - if (key == this.key) this - else - // 32-bit hash collision (rare, but not impossible) - new HashSetCollision1(hash, ListSet.empty + this.key + key, 2) - else - //size known to be 2 because this is a HashSet1, and so is the created set - makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level, 2) - - override private[immutable] def union0(that: HashSet[A], level: Int) = - that match { - case that: HashSet1[A] => - if (this.hash == that.hash) - if (this.key == that.key) this - else { - // 32-bit hash collision (rare, but not impossible) - new HashSetCollision1[A](hash, ListSet.empty + this.key + that.key, 2) - } - else { - // different hash code, so just create a branch node containing the two. - // size known to be 2 because this is a HashSet1, and so is the created set - makeHashTrieSet(this.hash, this, that.hash, that, level, 2) - } + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[SetNode[_]] == false) - case _ => //Trie, collision or empty - // we can exchange the arguments because union is symmetrical - // generally we prefer to return this where we can, - // but the result cannot be this for trie and collision (as they have size > 1) - // and is this for empty - that.union0(this, level) - } + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[SetNode[_]] == true) + + predicate1 && predicate2 && predicate3 + } + */ - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (that.get0(key, hash, level)) this else null + def getPayload(index: Int): A = content(index).asInstanceOf[A] - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (that.get0(key, hash, level)) null else this + override def getHash(index: Int): Int = originalHashes(index) - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash && key == this.key) null else this + def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (negate ^ p(key)) this else null + def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + return originalHashes(index) == originalHash && element == this.getPayload(index) + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) + } - override def iterator: Iterator[A] = Iterator(key) - override def foreach[U](f: A => U): Unit = f(key) + false } - @SerialVersionUID(-4499898620567995040L) - private[immutable] class HashSetCollision1[A](hash: Int, val ks: ListSet[A], override val size: Int) extends LeafHashSet[A](hash) { + def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) - override protected def get0(key: A, hash: Int, level: Int): Boolean = - if (hash == this.hash) ks.contains(key) else false + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) - override def equals(other: Any): Boolean = { - other match { - case that: HashSetCollision1[A] => - (this eq that) || (this.hash == that.hash && this.ks == that.ks) - case miss : HashSet[_] => false - case _ => super.equals(other) + if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { + return this + } else { + val element0UnimprovedHash = getHash(index) + val element0Hash = improve(element0UnimprovedHash) + if (originalHash == element0UnimprovedHash && element0 == element) { + return this + } else { + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) + } } } + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) - override protected def subsetOf0(that: HashSet[A], level: Int) = { - // we have to check each element - // we use get0 with our hash at the correct level instead of calling contains, - // which would not work since that might not be a top-level HashSet - // and in any case would be inefficient because it would require recalculating the hash code - (this eq that) || ks.forall(key => that.get0(key, hash, level)) + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNode eq subNodeNew) { + return this + } else { + return copyAndSetNode(bitpos, subNode, subNodeNew) + } } - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash) { - val ks1 = ks + key - // ListSet is guaranteed to return itself if key was already present - if (ks1 eq ks) - this - else - // create a new HashSetCollision with the existing hash - // we don't have to check for size=1 because union is never going to remove elements - new HashSetCollision1[A](hash, ks1, size + 1) + copyAndInsertValue(bitpos, element, originalHash, elementHash) + } + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated value is located in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated value is located, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = getPayload(index) + val element0UnimprovedHash = getHash(index) + if (element0UnimprovedHash == originalHash && element0 == element) { + shallowlyMutableNodeMap + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos } - else { - // size known to be one larger then my size - makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level, size + 1) + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode + + var returnNodeMap = shallowlyMutableNodeMap + + val subNodeNew: SetNode[A] = subNode match { + case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNodeNew ne subNode) { + returnNodeMap |= bitpos + } + subNodeNew } - override private[immutable] def union0(that: HashSet[A], level: Int): HashSet[A] = that match { - case that: HashSet1[A] => - if (that.hash != this.hash) - // Just create a branch node containing the two. - // size known to be one larger then my size - makeHashTrieSet(this.hash, this, that.hash, that, level, size + 1) - else { - val ks1 = ks + that.key - // ListSet is guaranteed to return itself if key was already present - if (ks1 eq ks) - this - else - // create a new HashSetCollision with the existing hash - // we don't have to check for size=1 because union is never going to remove elements - new HashSetCollision1[A](hash, ks1, size + 1) - } - case that: HashSetCollision1[A] => - if (that.hash != this.hash) - // Just create a branch node containing the two. - // size unknown - just the sum of the sizes - makeHashTrieSet(this.hash, this, that.hash, that, level, size + that.size) - else if (this eq that) this - else { - val ks1 = this.ks ++ that.ks - // ListSet is guaranteed to return itself when all elements are already in the set, - if (ks1 eq ks) this - else { - val newSize = ks1.size - if (newSize == that.size) - // we have to check this as well, since we don't want to create a new instance if this is a subset of that - // we dont care about the ordering on the ListSet - that - else - // create a new HashSetCollision with the existing hash - // we don't have to check for size=1 because union is never going to remove elements - new HashSetCollision1[A](hash, ks1, newSize) - } - } - case _ => - // we can swap this and that because union is symmetrical and that is either a - // HashTrieSet - in which case the result could not be this - // EmptyHashSet - in which case the result will be this - that.union0(this, level) + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + returnNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = element + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = dstHashes + this.size += 1 + this.cachedJavaKeySetHashCode += elementHash + shallowlyMutableNodeMap } + } - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (this eq that) this else { - // filter the keys, taking advantage of the fact that we know their hash code - val ks1 = ks.filter(that.get0(_, hash, level)) - ks1.size match { - case 0 => - // the empty set - null - case size if size == this.size => - // unchanged - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - this - case size if size == that.size => - // the other set - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - that - case 1 => - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - case newSize => - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1, newSize) + + def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + // Create new node with remaining pair. The new node will a) either become the new root + // returned, or b) unwrapped and inlined during returning. + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) + if (index == 0) new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) + else new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) + } + else copyAndRemoveValue(bitpos, elementHash) } + else this } - - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = - if (this eq that) null else { - val ks1 = ks.filterNot(that.get0(_, hash, level)) - ks1.size match { - case 0 => - // the empty set - null - case size if size == this.size => - // unchanged - // We do this check first since even if the result is of size 1 since - // it is preferable to return the existing set for better structural sharing - this + else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) + + if (subNodeNew eq subNode) this + // if subNodeNew is a hashCollision node, size has cost in Vector#length + else subNodeNew.size match { case 1 => - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - case newSize => - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1, newSize) + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + if (this.size == subNode.size) subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] + // inline value (move to front) + else copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) + case subNodeNewSize if subNodeNewSize > 1 => + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + case _ => this } } + else this + } + /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new + * node + * + * Should only be called on root nodes, because shift is assumed to be 0 + * + * @param element the element to remove + * @param originalHash the original hash of `element` + * @param elementHash the improved hash of `element` + */ + def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { + val mask = maskFrom(elementHash, 0) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + val newDataMap = dataMap ^ bitpos + if (index == 0) { + val newContent = Array[Any](getPayload(1)) + val newOriginalHashes = Array(originalHashes(1)) + val newCachedJavaKeySetHashCode = improve(getHash(1)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } else { + val newContent = Array[Any](getPayload(0)) + val newOriginalHashes = Array(originalHashes(0)) + val newCachedJavaKeySetHashCode = improve(getHash(0)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } + this.dataMap = newDataMap + this.nodeMap = 0 + this.size = 1 + this + } + else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = - if (hash == this.hash) { - val ks1 = ks - key - // ListSet guarantees to return itself if `key` is not present - if (ks1 eq ks) this - else if (size == 2) { - // our size was 2, its changed via removing one element, so it must be 1 now - // create a new HashSet1 with the hash we already know - new HashSet1(ks1.head, hash) - } else { - // we know our size, and we only removed one element, so it must be size -1 now - // create a new HashSetCollision with the hash we already know and the new keys - new HashSetCollision1(hash, ks1, size -1) + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = this.dataMap ^ bitpos + this.content = dst + this.originalHashes = dstHashes + this.size -= 1 + this.cachedJavaKeySetHashCode -= elementHash + this } } else this - - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - val ks1 = if(negate) ks.filterNot(p) else ks.filter(p) - if (ks1 eq ks) this - else ks1.size match { - case 0 => - null - case 1 => - new HashSet1(ks1.head, hash) - case x if x == size => + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] + + if (subNodeNew eq subNode) return this + + if (subNodeNew.size == 1) { + if (this.payloadArity == 0 && this.nodeArity == 1) { + this.dataMap = subNodeNew.dataMap + this.nodeMap = subNodeNew.nodeMap + this.content = subNodeNew.content + this.originalHashes = subNodeNew.originalHashes + this.size = subNodeNew.size + this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode this - case newSize => - new HashSetCollision1(hash, ks1, newSize) + } else { + migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) + this + } + } else { + // size must be > 1 + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size -= 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + this } - } + } else this + } - override def iterator: Iterator[A] = ks.iterator - override def foreach[U](f: A => U): Unit = ks.foreach(f) + def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { + // assert(key0 != key1) - } + if (shift >= HashCodeLength) { + new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) - /** - * A branch node of the HashTrieSet with at least one and up to 32 children. - * - * @param bitmap encodes which element corresponds to which child - * @param elems the up to 32 children of this node. - * the number of children must be identical to the number of 1 bits in bitmap - * @param size the total number of elements. This is stored just for performance reasons. - * @tparam A the type of the elements contained in this hash set. - * - * How levels work: - * - * When looking up or adding elements, the part of the hashcode that is used to address the children array depends - * on how deep we are in the tree. This is accomplished by having a level parameter in all internal methods - * that starts at 0 and increases by 5 (32 = 2^5) every time we go deeper into the tree. - * - * hashcode (binary): 00000000000000000000000000000000 - * level=0 (depth=0) ^^^^^ - * level=5 (depth=1) ^^^^^ - * level=10 (depth=2) ^^^^^ - * ... - * - * Be careful: a non-toplevel HashTrieSet is not a self-contained set, so e.g. calling contains on it will not work! - * It relies on its depth in the Trie for which part of a hash to use to address the children, but this information - * (the level) is not stored due to storage efficiency reasons but has to be passed explicitly! - * - * How bitmap and elems correspond: - * - * A naive implementation of a HashTrieSet would always have an array of size 32 for children and leave the unused - * children empty (null). But that would be very wasteful regarding memory. Instead, only non-empty children are - * stored in elems, and the bitmap is used to encode which elem corresponds to which child bucket. The lowest 1 bit - * corresponds to the first element, the second-lowest to the second, etc. - * - * bitmap (binary): 00010000000000000000100000000000 - * elems: [a,b] - * children: ---b----------------a----------- - */ - @SerialVersionUID(-1260675327783828535L) - class HashTrieSet[A](private[HashSet] var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private[HashSet] var size0: Int) - extends HashSet[A] { - @inline override final def size = size0 - // assert(Integer.bitCount(bitmap) == elems.length) - // assertion has to remain disabled until scala/bug#6197 is solved - // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]])) - - override protected def get0(key: A, hash: Int, level: Int): Boolean = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - if (bitmap == - 1) { - elems(index & 0x1f).get0(key, hash, level + 5) - } else if ((bitmap & mask) != 0) { - val offset = Integer.bitCount(bitmap & (mask-1)) - elems(offset).get0(key, hash, level + 5) - } else - false - } + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + val newCachedHashCode = keyHash0 + keyHash1 - override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask-1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.updated0(key, hash, level + 5) - if (sub eq subNew) this - else { - val elemsNew = new Array[HashSet[A]](elems.length) - System.arraycopy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - // assert (subNew.size - sub.size == 1) - new HashTrieSet(bitmap, elemsNew, size + 1) + if (mask0 < mask1) { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) + } else { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) } } else { - val elemsNew = new Array[HashSet[A]](elems.length + 1) - System.arraycopy(elems, 0, elemsNew, 0, offset) - elemsNew(offset) = new HashSet1(key, hash) - System.arraycopy(elems, offset, elemsNew, offset + 1, elems.length - offset) - val bitmapNew = bitmap | mask - new HashTrieSet(bitmapNew, elemsNew, size + 1) + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) + + new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) } } + } + def hasPayload: Boolean = dataMap != 0 - override private[immutable] def union0(that: HashSet[A], level: Int) = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes. - this - case that: LeafHashSet[A] => - val index = (that.hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask - 1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - if (sub eq that) this - else { - val sub1 = sub.union0(that, level + 5) - if (sub eq sub1) this - else { - val elems1 = elems.clone() - // its just a little faster than new Array[HashSet[A]](elems.length); System.arraycopy(elems, 0, elems1, 0, elems.length) - elems1(offset) = sub1 - new HashTrieSet(bitmap, elems1, size + (sub1.size - sub.size)) - } - } - } else { - val elems1 = new Array[HashSet[A]](elems.length + 1) - System.arraycopy(elems, 0, elems1, 0, offset) - elems1(offset) = that - System.arraycopy(elems, offset, elems1, offset + 1, elems.length - offset) - val bitmap1 = bitmap | mask - new HashTrieSet(bitmap1, elems1, size + that.size) - } - case that: HashTrieSet[A] => - def addMaybeSubset(larger: HashTrieSet[A], smaller: HashTrieSet[A]): HashTrieSet[A] = { - var resultElems: Array[HashSet[A]] = null - var ai = 0 - var bi = 0 - var abm = larger.bitmap - var bbm = smaller.bitmap - val a = larger.elems - val b = smaller.elems - - //larger has all the bits or smaller, and if they have the same bits, is at least the bigger - //so we try to merge `smaller`into `larger`and hope that `larger is a superset - - //the additional size in the results, so the eventual size of the result is larger.size + additionalSize - var additionalSize = 0 - - // could be lsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - //we know abm contains all of the bits in bbm, we only loop through bbm - //bsb is the next lowest bit in smaller - var bsb = bbm ^ (bbm & (bbm - 1)) - while (bsb != 0) { - val skippedBitsInA = abm & (bsb - 1) - ai += Integer.bitCount(skippedBitsInA) - abm ^= skippedBitsInA - val aai = a(ai) - val bbi = b(bi) - - val result = if (aai eq bbi) aai - else aai.union0(bbi, level + 5) - if (result ne aai) { - if (resultElems eq null) - resultElems = a.clone() - additionalSize += result.size - aai.size - //assert (result.size > aai.size) - resultElems(ai) = result - } - abm ^= bsb - bbm ^= bsb - bsb = bbm ^ (bbm & (bbm - 1)) + def payloadArity: Int = bitCount(dataMap) - ai += 1 - bi += 1 - } - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - if (resultElems eq null) larger // happy days - no change - else new HashTrieSet(larger.bitmap, resultElems, larger.size + additionalSize) - } + def hasNodes: Boolean = nodeMap != 0 - def addDistinct(that: HashTrieSet[A]): HashTrieSet[A] = { - - // the sets are distinct, so its a bit simpler to combine - // and we can avoid all of the quite expensive size calls on the children - - var ai = 0 - var bi = 0 - var offset = 0 - val abm = this.bitmap - val bbm = that.bitmap - val a = this.elems - val b = that.elems - var allBits = abm | bbm - - val resultElems = new Array[HashSet[A]](Integer.bitCount(allBits)) - // could be lsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - // lowest remaining bit - var lsb = allBits ^ (allBits & (allBits - 1)) - - while (lsb != 0) { - if ((lsb & abm) != 0) { - resultElems(offset) = a(ai) - ai += 1 - } else { - resultElems(offset) = b(bi) - bi += 1 - } - offset += 1 - allBits ^= lsb - lsb = allBits ^ (allBits & (allBits - 1)) - } - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - new HashTrieSet(abm | bbm, resultElems, this.size + that.size) - } + def nodeArity: Int = bitCount(nodeMap) - def addCommon(that: HashTrieSet[A]): HashTrieSet[A] = { - var ai = 0 - var bi = 0 - val abm = this.bitmap - val bbm = that.bitmap - val a = this.elems - val b = that.elems - var allBits = abm | bbm - val resultElems = new Array[HashSet[A]](Integer.bitCount(allBits)) - - //output index - var offset = 0 - - // the size of the results so far - var rs = 0 - - // could be alsb = Integer.lowestOneBit(abm) - //but is this faster!! - // keep fastest in step with adjustments in the loop - // lowest remaining bit - var lsb = allBits ^ (allBits & (allBits - 1)) - - var result: HashSet[A] = null - // loop as long as there are bits left in either abm or bbm - while (lsb != 0) { - if ((lsb & abm) != 0) { - if ((lsb & bbm) != 0) { - // lsb is in a and b, so combine - val aai = a(ai) - val bbi = b(bi) - - result = if (aai eq bbi) aai - else aai.union0(bbi, level + 5) - ai += 1 - bi += 1 - } else { - // lsb is in a - result = a(ai) - ai += 1 - } - } else { - // lsb is in b - result = b(bi) - bi += 1 - } - // update lsb - allBits ^= lsb - lsb = allBits ^ (allBits & (allBits - 1)) + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) - resultElems(offset) = result - rs += result.size - offset += 1 - } - // we don't have to check whether the result is a leaf, since union will only make the set larger - // and this is not a leaf to begin with. - new HashTrieSet(abm | bbm, resultElems, rs) + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) - } + def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedSetNode[A]( + dataMap = dataMap, + nodeMap = nodeMap, + content = dst, + originalHashes = originalHashes, + size = size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } - // if we have a subset/superset relationship, then we can merge and not allocate if thats a real subset - // we check on that relationship based on the bitssets, and if he bitsets are the same than we look at the size - // to work out the subset vs the superset - // a superset here is a trie that has all the bits of the other and is possible to be a superset - // - // if the bits are distinct we can skip some processing so we have a path for that - // otherwise the general case + def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx - val abm = this.bitmap - val bbm = that.bitmap - val allBits = abm | bbm + val src = this.content + val dst = new Array[Any](src.length + 1) - if (allBits == abm && (allBits != bbm || this.size >= that.size)) addMaybeSubset(this, that) - else if (allBits == bbm) addMaybeSubset(that, this) - else if ((abm & bbm) == 0) addDistinct(that) - else addCommon(that) + // copy 'src' and insert 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + 1, src.length - idx) + val dstHashes = insertElement(originalHashes, dataIx, originalHash) - case _ => this + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) + } + + def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = key + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - 1) + + // copy 'src' and remove 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) + } + + def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode + ) + } + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the element currently at `bitpos` + * @param node the node to place at `bitpos` + */ + def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) + content(idxNew) = node + + this.dataMap = this.dataMap ^ bitpos + this.nodeMap = this.nodeMap | bitpos + this.originalHashes = removeElement(originalHashes, dataIx) + this.size = this.size - 1 + node.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = node.getPayload(0) + arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedSetNode[A]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the node to migrate inline + * @param oldNode the node currently stored at position `bitpos` + * @param node the node containing the single element to migrate inline + */ + def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val element = node.getPayload(0) + arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) + content(dataIxNew) = element + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + + this.dataMap = this.dataMap | bitpos + this.nodeMap = this.nodeMap ^ bitpos + this.originalHashes = dstHashes + this.size = this.size - oldNode.size + 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + } + + def foreach[U](f: A => U): Unit = { + val thisPayloadArity = payloadArity + var i = 0 + while (i < thisPayloadArity) { + f(getPayload(i)) + i += 1 } - override private[immutable] def intersect0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes! - this - case that: LeafHashSet[A] => - // when that is a leaf, we can switch to the simpler Tree/Leaf implementation - // it is OK to swap the arguments because intersect is symmetric - // (we can't do this in case of diff, which is not symmetric) - that.intersect0(this, level, buffer, offset0) - case that: HashTrieSet[A] => - val a = this.elems - var abm = this.bitmap - var ai = 0 - - val b = that.elems - var bbm = that.bitmap - var bi = 0 - - // if the bitmasks do not overlap, the result is definitely empty so we can abort here - if ((abm & bbm) == 0) - return null - - // fetch a new temporary array that is guaranteed to be big enough (32 elements) - var offset = offset0 - var rs = 0 - var rbm = 0 - - // loop as long as there are bits left that are set in both abm and bbm - while ((abm & bbm) != 0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - if (alsb == blsb) { - val sub1 = a(ai).intersect0(b(bi), level + 5, buffer, offset) - if (sub1 ne null) { - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1 - offset += 1 + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity) { + getNode(j).foreach(f) + j += 1 + } + } + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case _: HashCollisionSetNode[A] => false + case node: BitmapIndexedSetNode[A] => + val thisBitmap = this.dataMap | this.nodeMap + val nodeBitmap = node.dataMap | node.nodeMap + + if ((thisBitmap | nodeBitmap) != nodeBitmap) + return false + + var bitmap = thisBitmap & nodeBitmap + var bitsToSkip = numberOfTrailingZeros(bitmap) + + var isValidSubset = true + while (isValidSubset && bitsToSkip < HashCodeLength) { + val bitpos = bitposFrom(bitsToSkip) + + isValidSubset = + if ((this.dataMap & bitpos) != 0) { + if ((node.dataMap & bitpos) != 0) { + // Data x Data + val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) + val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) + payload0 == payload1 + } else { + // Data x Node + val thisDataIndex = indexFrom(this.dataMap, bitpos) + val payload = this.getPayload(thisDataIndex) + val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) + val elementUnimprovedHash = getHash(thisDataIndex) + val elementHash = improve(elementUnimprovedHash) + subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) } - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 - } else if (unsignedCompare(alsb - 1, blsb - 1)) { - // alsb is smaller than blsb, or alsb is set and blsb is 0 - // in any case, alsb is guaranteed to be set here! - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb - ai += 1 - } else { - // blsb is smaller than alsb, or blsb is set and alsb is 0 - // in any case, blsb is guaranteed to be set here! - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb - bi += 1 + } else ((node.dataMap & bitpos) == 0) && { + // Node x Node + val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) + val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) + subNode0.subsetOf(subNode1, shift + BitPartitionSize) } - } - if (rbm == 0) { - // if the result bitmap is empty, the result is the empty set - null - } else if (rs == size) { - // if the result has the same number of elements as this, it must be identical to this, - // so we might as well return this - this - } else if (rs == that.size) { - // if the result has the same number of elements as that, it must be identical to that, - // so we might as well return that - that - } else { - val length = offset - offset0 - if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) - buffer(offset0) - else { - val elems = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems, 0, length) - new HashTrieSet[A](rbm, elems, rs) + val newBitmap = bitmap ^ bitpos + bitmap = newBitmap + bitsToSkip = numberOfTrailingZeros(newBitmap) + } + isValidSubset + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else SetNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) } + + dataIndex += 1 } - case _ => null - } - override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match { - case that if that eq this => - // shortcut for when that is this - // this happens often for nodes deeper in the tree, especially when that and this share a common "heritage" - // e.g. you have a large set A and do some small operations (adding and removing elements) to it to create B - // then A and B will have the vast majority of nodes in common, and this eq check will allow not even looking - // at these nodes! - null - case that: HashSet1[A] => - removed0(that.key, that.hash, level) - case that: HashTrieSet[A] => - val a = this.elems - var abm = this.bitmap - var ai = 0 - - val b = that.elems - var bbm = that.bitmap - var bi = 0 - - // fetch a new temporary array that is guaranteed to be big enough (32 elements) - var offset = offset0 - var rs = 0 - var rbm = 0 - - // loop until there are no more bits in abm - while(abm!=0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - if (alsb == blsb) { - val sub1 = a(ai).diff0(b(bi), level + 5, buffer, offset) - if (sub1 ne null) { - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1 - offset += 1 - } - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb; bi += 1 - } else if (unsignedCompare(alsb - 1, blsb - 1)) { - // alsb is smaller than blsb, or alsb is set and blsb is 0 - // in any case, alsb is guaranteed to be set here! - val sub1 = a(ai) - rs += sub1.size - rbm |= alsb - buffer(offset) = sub1; offset += 1 - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 - } else { - // blsb is smaller than alsb, or blsb is set and alsb is 0 - // in any case, blsb is guaranteed to be set here! - // clear lowest remaining one bit in bbm and increase the b index - bbm &= ~blsb; bi += 1 + i += 1 + } + + if (newDataMap == 0) { + SetNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex) = content(oldIndex) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 } + j += 1 } - if (rbm == 0) { - null - } else if (rs == this.size) { - // if the result has the same number of elements as this, it must be identical to this, - // so we might as well return this - this - } else { - val length = offset - offset0 - if (length == 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) - buffer(offset0) - else { - val elems = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems, 0, length) - new HashTrieSet[A](rbm, elems, rs) + + new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + + // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, + // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in + // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may + // return at runtime a SetNode[A], or a tuple of (A, Int, Int) + + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) } - } - case that: HashSetCollision1[A] => - // we remove the elements using removed0 so we can use the fact that we know the hash of all elements - // to be removed - @tailrec def removeAll(s:HashSet[A], r:ListSet[A]) : HashSet[A] = - if(r.isEmpty || (s eq null)) s - else removeAll(s.removed0(r.head, that.hash, level), r.tail) - removeAll(this, that.ks) - case _ => this - } - override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = { - val index = (hash >>> level) & 0x1f - val mask = (1 << index) - val offset = Integer.bitCount(bitmap & (mask-1)) - if ((bitmap & mask) != 0) { - val sub = elems(offset) - val subNew = sub.removed0(key, hash, level + 5) - if (sub eq subNew) this - else if (subNew eq null) { - val bitmapNew = bitmap ^ mask - if (bitmapNew != 0) { - if (elems.length == 2 && !elems(offset ^ 1).isInstanceOf[HashTrieSet[_]] ) { - // if we have only one child, which is not a HashTrieSet but a self-contained set like - // HashSet1 or HashSetCollision1, return the child instead - elems(offset ^ 1) + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos } else { - val elemsNew = new Array[HashSet[A]](elems.length - 1) - System.arraycopy(elems, 0, elemsNew, 0, offset) - System.arraycopy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1) - //assert (sub.size == 1) - val sizeNew = size - 1 - new HashTrieSet(bitmapNew, elemsNew, sizeNew) + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode } - } else - null - } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[_]]) { - subNew - } else { - val elemsNew = new Array[HashSet[A]](elems.length) - System.arraycopy(elems, 0, elemsNew, 0, elems.length) - elemsNew(offset) = subNew - //assert (subNew.size - sub.size == -1) - val sizeNew = size -1 - new HashTrieSet(bitmap, elemsNew, sizeNew) + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 } - } else { - this - } - } - override def equals(other: Any): Boolean = { - other match { - case that: HashTrieSet[A] => - (this eq that) || (this.bitmap == that.bitmap && this.size == that.size && - util.Arrays.equals(this.elems.asInstanceOf[Array[AnyRef]], that.elems.asInstanceOf[Array[AnyRef]])) - case _ : HashSet[_] => false - case _ => super.equals(other) + i += 1 } + + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) } + } - override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = (that eq this) || (that match { - case that: HashTrieSet[A] - if (this.bitmap & ~that.bitmap) == 0 - && this.size <= that.size => - // create local mutable copies of members - var abm = this.bitmap - val a = this.elems - var ai = 0 - val b = that.elems - var bbm = that.bitmap - var bi = 0 - if ((abm & bbm) == abm) { - // I tried rewriting this using tail recursion, but the generated java byte code was less than optimal - while(abm!=0) { - // highest remaining bit in abm - val alsb = abm ^ (abm & (abm - 1)) - // highest remaining bit in bbm - val blsb = bbm ^ (bbm & (bbm - 1)) - // if both trees have a bit set at the same position, we need to check the subtrees - if (alsb == blsb) { - // we are doing a comparison of a child of this with a child of that, - // so we have to increase the level by 5 to keep track of how deep we are in the tree - if (!a(ai).subsetOf0(b(bi), level + 5)) - return false - // clear lowest remaining one bit in abm and increase the a index - abm &= ~alsb; ai += 1 + override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) this + else if (size == 1) { + val h = getHash(0) + if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val originalHash = getHash(dataIndex) + val hash = improve(originalHash) + + if (!bm.contains(payload, originalHash, hash, shift)) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += hash } - // clear lowermost remaining one bit in bbm and increase the b index - // we must do this in any case - bbm &= ~blsb; bi += 1 + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + + val newSubNode: SetNode[A] = + if ((bitpos & bm.dataMap) != 0) { + val thatDataIndex = indexFrom(bm.dataMap, bitpos) + val thatPayload = bm.getPayload(thatDataIndex) + val thatOriginalHash = bm.getHash(thatDataIndex) + val thatHash = improve(thatOriginalHash) + oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) + } else if ((bitpos & bm.nodeMap) != 0) { + oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) + } else { + oldSubNode + } + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 } - true - } else { - // the bitmap of this contains more one bits than the bitmap of that, - // so this can not possibly be a subset of that - false - } - case _ => - // if the other set is a HashTrieSet but has less elements than this, it can not be a subset - // if the other set is a HashSet1, we can not be a subset of it because we are a HashTrieSet with at least two children (see assertion) - // if the other set is a HashSetCollision1, we can not be a subset of it because we are a HashTrieSet with at least two different hash codes - // if the other set is the empty set, we are not a subset of it because we are not empty - false - }) - - override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = { - // current offset - var offset = offset0 - // result size - var rs = 0 - // bitmap for kept elems - var kept = 0 - // loop over all elements - var i = 0 - while (i < elems.length) { - val result = elems(i).filter0(p, negate, level + 5, buffer, offset) - if (result ne null) { - buffer(offset) = result - offset += 1 - // add the result size - rs += result.size - // mark the bit i as kept - kept |= (1 << i) + + i += 1 } - i += 1 + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) } - if (offset == offset0) { - // empty - null - } else if (rs == size) { - // unchanged - this - } else if (offset == offset0 + 1 && !buffer(offset0).isInstanceOf[HashTrieSet[A]]) { - // leaf - buffer(offset0) - } else { - // we have to return a HashTrieSet - val length = offset - offset0 - val elems1 = new Array[HashSet[A]](length) - System.arraycopy(buffer, offset0, elems1, 0, length) - val bitmap1 = if (length == elems.length) { - // we can reuse the original bitmap - bitmap - } else { - // calculate new bitmap by keeping just bits in the kept bitmask - keepBits(bitmap, kept) + case _: HashCollisionSetNode[A] => + // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the + // same depth + throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") + } + + /** Utility method only for use in `diff` and `filterImpl` + * + * @param newSize the size of the new SetNode + * @param newDataMap the dataMap of the new SetNode + * @param newNodeMap the nodeMap of the new SetNode + * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new + * SetNode + * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new + * SetNode + * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode + * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, + * but which were nodes in `this` + * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated + * to data, in positions in the `nodeMigrateToDataTargetMap` + * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode + * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode + * @param newCachedHashCode the cached java keyset hashcode of the new SetNode + */ + private[this] def newNodeFrom( + newSize: Int, + newDataMap: Int, + newNodeMap: Int, + minimumIndex: Int, + oldDataPassThrough: Int, + nodesToPassThroughMap: Int, + nodeMigrateToDataTargetMap: Int, + nodesToMigrateToData: mutable.Queue[SetNode[A]], + mapOfNewNodes: Int, + newNodes: mutable.Queue[SetNode[A]], + newCachedHashCode: Int): BitmapIndexedSetNode[A] = { + if (newSize == 0) { + SetNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex) = getPayload(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(newDataIndex) = node.getPayload(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 } - new HashTrieSet(bitmap1, elems1, rs) + + i += 1 } + + new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) } + } - override def iterator = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) { - final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedSetNode[_] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false } - override def foreach[U](f: A => U): Unit = { + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true var i = 0 - while (i < elems.length) { - elems(i).foreach(f) + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) i += 1 } - } - } - protected def elemHashCode(key: Any) = key.## - protected final def improve(hcode: Int) = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) + isEqual + } } - private[collection] def computeHash(key: Any) = improve(elemHashCode(key)) + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") - /** - * Calculates the maximum buffer size given the maximum possible total size of the trie-based collection - * @param size the maximum size of the collection to be generated - * @return the maximum buffer size - */ - @inline private def bufferSize(size: Int): Int = (size + 6) min (32 * 7) + override def toString: String = f"BitmapIndexedSetNode(size=$size, dataMap=$dataMap%x, nodeMap=$nodeMap%x)" // content=${scala.runtime.ScalaRunTime.stringOf(content)} - /** - * In many internal operations the empty set is represented as null for performance reasons. This method converts - * null to the empty set for use in public methods - */ - @inline private def nullToEmpty[A](s: HashSet[A]): HashSet[A] = if (s eq null) empty[A] else s - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - private def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 + override def copy(): BitmapIndexedSetNode[A] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() + i += 1 } - result + new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) } - // unsigned comparison - @inline private[this] def unsignedCompare(i: Int, j: Int) = - (i < j) ^ (i < 0) ^ (j < 0) - - @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashSet[A]) extends Serializable { - private def writeObject(out: java.io.ObjectOutputStream) { - val s = orig.size - out.writeInt(s) - for (e <- orig) { - out.writeObject(e) + override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) } - } - private def readObject(in: java.io.ObjectInputStream) { - orig = empty - val s = in.readInt() - for (i <- 0 until s) { - val e = in.readObject().asInstanceOf[A] - orig = orig + e + // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `this` + var anyChangesMadeSoFar = false + + // bitmap containing `1` in any position that has any descendant in either left or right, either data or node + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataLeftOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { + leftDataRightDataLeftOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } } - } - private def readResolve(): AnyRef = orig - } + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets - /** Builder for HashSet. - */ - private[collection] final class HashSetBuilder[A] extends mutable.ReusableBuilder[A, HashSet[A]] { + if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { + // nothing from `bm` will make it into the result -- return early + return this + } - /* Nodes in the tree are either regular HashSet1, HashTrieSet, HashSetCollision1, or a mutable HashTrieSet - mutable HashTrieSet nodes are designated by having size == -1 + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) - mutable HashTrieSet nodes can have child nodes that are mutable, or immutable - immutable HashTrieSet child nodes can only be immutable + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 - mutable HashTrieSet elems are always a Array of size 32,size -1, bitmap -1 - */ + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 - /** The root node of the partially build hashmap */ - private var rootNode: HashSet[A] = HashSet.empty + val nextShift = shift + Node.BitPartitionSize - private def isMutable(hs: HashSet[A]) = { - hs.isInstanceOf[HashTrieSet[A]] && hs.size == -1 - } + var compressedDataIdx = 0 + var compressedNodeIdx = 0 - private def makeMutable(hs: HashTrieSet[A]): HashTrieSet[A] = { - if (isMutable(hs)) hs - else { - val elems = new Array[HashSet[A]](32) - var bit = 0 - var iBit = 0 - while (bit < 32) { - if ((hs.bitmap & (1 << bit)) != 0) { - elems(bit) = hs.elems(iBit) - iBit += 1 - } - bit += 1 - } - new HashTrieSet[A](-1, elems, -1) - } - } + var bitpos = minimumBitPos + var finished = false - private def makeImmutable(hs: HashSet[A]): HashSet[A] = { - hs match { - case trie: HashTrieSet[A] if isMutable(trie) => - var bit = 0 - var bitmap = 0 - var size = 0 - while (bit < 32) { - if (trie.elems(bit) ne null) - trie.elems(bit) = makeImmutable(trie.elems(bit)) - if (trie.elems(bit) ne null) { - bitmap |= 1 << bit - size += trie.elems(bit).size + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val leftNode = getNode(leftNodeIdx) + val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) + if (leftNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftPayload = getPayload(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) } - bit += 1 + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode } - Integer.bitCount(bitmap) match { - case 0 => null - case 1 - if trie.elems(Integer.numberOfTrailingZeros(bitmap)).isInstanceOf[LeafHashSet[A]] => - trie.elems(Integer.numberOfTrailingZeros(bitmap)) - - case bc => - val elems = if (bc == 32) trie.elems else { - val elems = new Array[HashSet[A]](bc) - var oBit = 0 - bit = 0 - while (bit < 32) { - if (trie.elems(bit) ne null) { - elems(oBit) = trie.elems(bit) - oBit += 1 - } - bit += 1 - } - assert(oBit == bc) - elems + else if ((bitpos & leftNodeRightData) != 0) { + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + val leftNode = getNode(leftNodeIdx) + val updated = leftNode.updated( + element = bm.getPayload(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift + ) + if (updated ne leftNode) { + anyChangesMadeSoFar = true } - trie.size0 = size - trie.elems = elems - trie.bitmap = bitmap - trie + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + val originalHash = originalHashes(leftDataIdx) + newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 } - case _ => hs - } - } - override def clear(): Unit = { - rootNode match { - case trie: HashTrieSet[A] if isMutable(trie) => - util.Arrays.fill(trie.elems.asInstanceOf[Array[AnyRef]], null) - case _ => rootNode = HashSet.empty + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } } + + if (anyChangesMadeSoFar) + new BitmapIndexedSetNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else this + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i), getHash(i)) + i += 1 } - override def result(): HashSet[A] = { - rootNode = nullToEmpty(makeImmutable(rootNode)) - VM.releaseFence() - rootNode + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 } + } + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + val thisPayloadArity = payloadArity + var pass = true + var i = 0 + while (i < thisPayloadArity && pass) { + pass &&= f(getPayload(i), getHash(i)) + i += 1 + } - override def +=(elem1: A, elem2: A, elems: A*): HashSetBuilder.this.type = { - this += elem1 - this += elem2 - this ++= elems + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity && pass) { + pass &&= getNode(j).foreachWithHashWhile(f) + j += 1 } + pass + } +} + +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { - override def +=(elem: A): HashSetBuilder.this.type = { - val hash = computeHash(elem) - rootNode = addOne(rootNode, elem, hash, 0) + import Node._ + + require(content.length >= 2) + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && content.contains(element) + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (this.contains(element, originalHash, hash, shift)) { this + } else { + new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) } - override def ++=(xs: TraversableOnce[A]): HashSetBuilder.this.type = xs match { - case hs: HashSet[A] => - if (rootNode.isEmpty) { - if (!hs.isEmpty) - rootNode = hs - } else - rootNode = addHashSet(rootNode, hs, 0) - this - // case hs: HashMap.HashMapKeys => - // //TODO - case hs: mutable.HashSet[A] => - //TODO - super.++=(xs) - case _ => - super.++=(xs) - } + /** + * Remove an element from the hash collision node. + * + * When after deletion only one element remains, we return a bit-mapped indexed node with a + * singleton element and a hash-prefix for trie level 0. This node will be then a) either become + * the new root, or b) unwrapped and inlined deeper in the trie. + */ + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (!this.contains(element, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(element0 => element0 == element) + // assert(updatedContent.size == content.size - 1) - def makeMutableTrie(aLeaf: LeafHashSet[A], bLeaf: LeafHashSet[A], level: Int): HashTrieSet[A] = { - val elems = new Array[HashSet[A]](32) - val aRawIndex = (aLeaf.hash >>> level) & 0x1f - val bRawIndex = (bLeaf.hash >>> level) & 0x1f - if (aRawIndex == bRawIndex) { - elems(aRawIndex) = makeMutableTrie(aLeaf, bLeaf, level + 5) - } else { - elems(aRawIndex) = aLeaf - elems(bRawIndex) = bLeaf + updatedContent.size match { + case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) + case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) } - new HashTrieSet[A](-1, elems, -1) } - def makeMutableTrie(leaf: LeafHashSet[A], elem: A, elemImprovedHash: Int, level: Int): HashTrieSet[A] = { - makeMutableTrie(leaf, new HashSet1(elem, elemImprovedHash), level) + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): SetNode[A] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getPayload(index: Int): A = content(index) + + override def getHash(index: Int): Int = originalHash + + def size: Int = content.length + + def foreach[U](f: A => U): Unit = { + val iter = content.iterator + while (iter.hasNext) { + f(iter.next()) } + } - private def addOne(toNode: HashSet[A], elem: A, improvedHash: Int, level: Int): HashSet[A] = { - toNode match { - case leaf: LeafHashSet[A] => - if (leaf.hash == improvedHash) - leaf.updated0(elem, improvedHash, level) - else makeMutableTrie(leaf, elem, improvedHash, level) - - case trie: HashTrieSet[A] if isMutable((trie)) => - val arrayIndex = (improvedHash >>> level) & 0x1f - val old = trie.elems(arrayIndex) - trie.elems(arrayIndex) = if (old eq null) new HashSet1(elem, improvedHash) - else addOne(old, elem, improvedHash, level + 5) - trie - - case trie: HashTrieSet[A] => - val rawIndex = (improvedHash >>> level) & 0x1f - val arrayIndex = compressedIndex(trie, rawIndex) - if (arrayIndex == -1) - addOne(makeMutable(trie), elem, improvedHash, level) - else { - val old = trie.elems(arrayIndex) - val merged = if (old eq null) new HashSet1(elem, improvedHash) - else addOne(old, elem, improvedHash, level + 5) - - if (merged eq old) trie - else { - val newMutableTrie = makeMutable(trie) - newMutableTrie.elems(rawIndex) = merged - newMutableTrie - } - } - case empty if empty.isEmpty => toNode.updated0(elem, improvedHash, level) - } + + override def cachedJavaKeySetHashCode: Int = size * hash + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case node: HashCollisionSetNode[A] => + this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) + case _ => + false + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + SetNode.empty + } else if (newContentLength == 1) { + new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) + } else if (newContent.length == content.length) this + else new HashCollisionSetNode(originalHash, hash, newContent) + } + + override def diff(that: SetNode[A], shift: Int): SetNode[A] = + filterImpl(that.contains(_, originalHash, hash, shift), flipped = true) + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionSetNode[_] => + (this eq node) || + (this.hash == node.hash) && + (this.content.size == node.content.size) && + this.content.forall(node.content.contains) + case _ => false } - /** return the bit index of the rawIndex in the bitmap of the trie, or -1 if the bit is not in the bitmap */ - private def compressedIndex(trie: HashTrieSet[A], rawIndex: Int): Int = { - if (trie.bitmap == -1) rawIndex - else if ((trie.bitmap & (1 << rawIndex)) == 0) { - //the value is not in this index - -1 + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy(): HashCollisionSetNode[A] = new HashCollisionSetNode[A](originalHash, hash, content) + + override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { + case hc: HashCollisionSetNode[A] => + if (hc eq this) { + this } else { - Integer.bitCount(((1 << rawIndex) - 1) & trie.bitmap) + var newContent: VectorBuilder[A] = null + val iter = hc.content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (!content.contains(nextPayload)) { + if (newContent eq null) { + newContent = new VectorBuilder() + newContent.addAll(this.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) } + case _: BitmapIndexedSetNode[A] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next.asInstanceOf[A], originalHash) } - /** return the array index for the rawIndex, in the trie elem array - * The trei may be mutable, or immutable - * returns -1 if the trie is compressed and the index in not in the array */ - private def trieIndex(trie: HashTrieSet[A], rawIndex: Int): Int = { - if (isMutable(trie) || trie.bitmap == -1) rawIndex - else compressedIndex(trie, rawIndex) + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + var stillGoing = true + val iter = content.iterator + while (iter.hasNext && stillGoing) { + val next = iter.next() + stillGoing &&= f(next.asInstanceOf[A], originalHash) } + stillGoing + } +} - private def addHashSet(toNode: HashSet[A], toBeAdded: HashSet[A], level: Int): HashSet[A] = { - toNode match { - case aLeaf: LeafHashSet[A] => addToLeafHashSet(aLeaf, toBeAdded, level) - case trie: HashTrieSet[A] => addToTrieHashSet(trie, toBeAdded, level) - case empty if empty.isEmpty => toNode - } +private final class SetIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[A, SetNode[A]](rootNode) { + + def next() = { + if (!hasNext) Iterator.empty.next() + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class SetReverseIterator[A](rootNode: SetNode[A]) + extends ChampBaseReverseIterator[A, SetNode[A]](rootNode) { + + def next(): A = { + if (!hasNext) Iterator.empty.next() + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } + +} + +private final class SetHashIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[AnyRef, SetNode[A]](rootNode) { + private[this] var hash = 0 + override def hashCode(): Int = hash + + def next(): AnyRef = { + if (!hasNext) Iterator.empty.next() + + hash = currentValueNode.getHash(currentValueCursor) + currentValueCursor += 1 + this + } + +} + + +/** + * $factoryInfo + * + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + @transient + private final val EmptySet = new HashSet(SetNode.empty) + + def empty[A]: HashSet[A] = + EmptySet.asInstanceOf[HashSet[A]] + + def from[A](source: collection.IterableOnce[A]): HashSet[A] = + source match { + case hs: HashSet[A] => hs + case _ if source.knownSize == 0 => empty[A] + case _ => (newBuilder[A] ++= source).result() } - private def addToTrieHashSet(toNode: HashTrieSet[A], toBeAdded: HashSet[A], level: Int): HashSet[A] = { - if (toNode eq toBeAdded) toNode - else toBeAdded match { - case bLeaf: LeafHashSet[A] => - val rawIndex = (bLeaf.hash >>> level) & 0x1f - val arrayIndex = trieIndex(toNode, rawIndex) - if (arrayIndex == -1) { - val newToNode = makeMutable(toNode) - newToNode.elems(rawIndex) = toBeAdded - newToNode + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder +} + +/** Builder for HashSet. + * $multipleResults + */ +private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { + import Node._ + import SetNode._ + + private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashSet as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashSet[A] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially built hashmap. */ + private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap = bm.dataMap | bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ + private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + bm.content(idx) = elem + } + + def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = + setNode match { + case bm: BitmapIndexedSetNode[A] => + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val element0 = bm.getPayload(index) + val element0UnimprovedHash = bm.getHash(index) + + if (element0UnimprovedHash == originalHash && element0 == element) { + setValue(bm, bitpos, element0) } else { - val old = toNode.elems(arrayIndex) - if (old eq toBeAdded) toNode - else if (old eq null) { - assert(isMutable(toNode)) - toNode.elems(arrayIndex) = toBeAdded - toNode - } else { - val result = addHashSet(old, toBeAdded, level + 5) - if (result eq old) toNode - else { - val newToNode = makeMutable(toNode) - newToNode.elems(rawIndex) = result - newToNode - } - } - } - case bTrie: HashTrieSet[A] => - var result = toNode - var bBitSet = bTrie.bitmap - var bArrayIndex = 0 - while (bBitSet != 0) { - val bValue = bTrie.elems(bArrayIndex) - val rawIndex = Integer.numberOfTrailingZeros(bBitSet) - val aArrayIndex = trieIndex(result, rawIndex) - if (aArrayIndex == -1) { - result = makeMutable(result) - result.elems(rawIndex) = bValue - } else { - val aValue = result.elems(aArrayIndex) - if (aValue ne bValue) { - if (aValue eq null) { - assert(isMutable(result)) - result.elems(rawIndex) = bValue - } else { - val resultAtIndex = addHashSet(aValue, bValue, level + 5) - if (resultAtIndex ne aValue) { - result = makeMutable(result) - result.elems(rawIndex) = resultAtIndex - } - } - } - } - bBitSet ^= 1 << rawIndex - bArrayIndex += 1 + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) } - result - case empty if empty.isEmpty => toNode - } + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHashCode = subNode.cachedJavaKeySetHashCode + update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode + } else { + insertValue(bm, bitpos, element, originalHash, elementHash) + } + case hc: HashCollisionSetNode[A] => + val index = hc.content.indexOf(element) + if (index < 0) { + hc.content = hc.content.appended(element) + } else { + hc.content = hc.content.updated(index, element) + } } - private def addToLeafHashSet(toNode: LeafHashSet[A], toBeAdded: HashSet[A], level: Int): HashSet[A] = { - if (toNode eq toBeAdded) toNode - else toBeAdded match { - case bLeaf: LeafHashSet[A] => - if (toNode.hash == bLeaf.hash) toNode.union0(bLeaf, level) - else makeMutableTrie(toNode, bLeaf, level) - case bTrie: HashTrieSet[A] => - val rawIndex = (toNode.hash >>> level) & 0x1f - val arrayIndex = compressedIndex(bTrie, rawIndex) - if (arrayIndex == -1) { - val result = makeMutable(bTrie) - result.elems(rawIndex) = toNode - result - } else { - val newEle = addToLeafHashSet(toNode, bTrie.elems(arrayIndex), level + 5) - if (newEle eq toBeAdded) - toBeAdded - else { - val result = makeMutable(bTrie) - result.elems(rawIndex) = newEle - result - } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private def ensureUnaliased():Unit = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashSet[A] = + if (rootNode.size == 0) { + HashSet.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashSet(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: A): this.type = { + ensureUnaliased() + val h = elem.## + val im = improve(h) + update(rootNode, elem, originalHash = h, elementHash = im, shift = 0) + this + } + + override def addAll(xs: IterableOnce[A]) = { + ensureUnaliased() + xs match { + case hm: HashSet[A] => + new ChampBaseIterator[A, SetNode[A]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + setNode = rootNode, + element = currentValueNode.getPayload(currentValueCursor), + originalHash = originalHash, + elementHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 } - case empty if empty.isEmpty => - toNode - } + override def next() = Iterator.empty.next() + } + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) } + + this } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size } diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala deleted file mode 100644 index c588f1abd30f..000000000000 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{ArrayBuffer, Builder} - -/** A subtrait of `collection.IndexedSeq` which represents indexed sequences - * that are guaranteed immutable. - * $indexedSeqInfo - */ -trait IndexedSeq[+A] extends Seq[A] - with scala.collection.IndexedSeq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - - /** Returns this $coll as an indexed sequence. - * - * A new indexed sequence will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable indexed sequences should do nothing on toIndexedSeq except cast themselves as an indexed sequence.", "2.11.0") - override def toIndexedSeq: IndexedSeq[A] = this - override def seq: IndexedSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `Vector`. - * @define coll indexed sequence - * @define Coll `IndexedSeq` - */ -object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { - class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable { - def length = buf.length - def apply(idx: Int) = buf.apply(idx) - } - def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -} diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index 0cc63108e9f5..1aa1a6108d0c 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,17 +10,17 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package immutable -import scala.collection.generic.{BitOperations, CanBuildFrom} -import scala.collection.mutable.{Builder, MapBuilder} +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions /** Utility class for integer maps. - * @author David MacIver - */ + */ private[immutable] object IntMapUtils extends BitOperations.Int { def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) @@ -38,25 +38,13 @@ private[immutable] object IntMapUtils extends BitOperations.Int { } } -import IntMapUtils._ +import IntMapUtils.{Int => _, _} /** A companion object for integer maps. - * - * @define Coll `IntMap` - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @since 2.7 - */ + * + * @define Coll `IntMap` + */ object IntMap { - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[IntMap[A], (Int, B), IntMap[B]]] - private val ReusableCBF = new CanBuildFrom[IntMap[Any], (Int, Any), IntMap[Any]] { - def apply(from: IntMap[Any]): Builder[(Int, Any), IntMap[Any]] = apply() - def apply(): Builder[(Int, Any), IntMap[Any]] = new MapBuilder[Int, Any, IntMap[Any]](empty[Any]) - } - def empty[T] : IntMap[T] = IntMap.Nil def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) @@ -64,7 +52,9 @@ object IntMap { def apply[T](elems: (Int, T)*): IntMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - @SerialVersionUID(-9137650114085457282L) + def from[V](coll: IterableOnce[(Int, V)]): IntMap[V] = + newBuilder[V].addAll(coll).result() + private[immutable] case object Nil extends IntMap[Nothing] { // Important! Without this equals method in place, an infinite // loop from Map.equals => size => pattern-match-on-Nil => equals @@ -77,13 +67,12 @@ object IntMap { } } - @SerialVersionUID(3302720273753906158L) private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ def withValue[S](s: S) = if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] else IntMap.Tip(key, s) } - @SerialVersionUID(-523093388545197183L) + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] @@ -91,9 +80,28 @@ object IntMap { } } -} + def newBuilder[V]: Builder[(Int, V), IntMap[V]] = + new ImmutableBuilder[(Int, V), IntMap[V]](empty) { + def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } + } -import IntMap._ + implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Int, AnyRef)]): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]) = IntMap.from(it) + def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) + implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) +} // Iterator over a non-empty IntMap. private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { @@ -110,19 +118,20 @@ private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends Ab buffer(index).asInstanceOf[IntMap[V]] } - def push(x: IntMap[V]) { + def push(x: IntMap[V]): Unit = { buffer(index) = x.asInstanceOf[AnyRef] index += 1 } push(it) /** - * What value do we assign to a tip? - */ + * What value do we assign to a tip? + */ def valueOf(tip: IntMap.Tip[V]): T def hasNext = index != 0 - final def next: T = + @tailrec + final def next(): T = pop match { case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { push(right) @@ -131,12 +140,12 @@ private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends Ab case IntMap.Bin(_, _, left, right) => { push(right) push(left) - next + next() } case t@IntMap.Tip(_, _) => valueOf(t) // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap // and don't return an IntMapIterator for IntMap.Nil. - case IntMap.Nil => sys.error("Empty maps not allowed as subtrees") + case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") } } @@ -155,22 +164,34 @@ private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapItera import IntMap._ /** Specialised immutable map structure for integer keys, based on - * [[http://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * '''Note:''' This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with integer keys. - * - * @since 2.7 - * @define Coll `immutable.IntMap` - * @define coll immutable integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ + * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with integer keys. + * + * @define Coll `immutable.IntMap` + * @define coll immutable integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] - with Map[Int, T] - with MapLike[Int, T, IntMap[T]] { + with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]): IntMap[T] = + intMapFrom[T](coll) + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]): IntMap[V2] = { + val b = IntMap.newBuilder[V2] + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Int, T), IntMap[T]](empty) { + def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } + } override def empty: IntMap[T] = IntMap.Nil @@ -181,36 +202,42 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] } /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of integer keys and corresponding values. - */ + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of integer keys and corresponding values. + */ def iterator: Iterator[(Int, T)] = this match { case IntMap.Nil => Iterator.empty case _ => new IntMapEntryIterator(this) } /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ override final def foreach[U](f: ((Int, T)) => U): Unit = this match { case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } case IntMap.Tip(key, value) => f((key, value)) case IntMap.Nil => } + override def foreachEntry[U](f: (Int, T) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case IntMap.Tip(key, value) => f(key, value) + case IntMap.Nil => + } + override def keysIterator: Iterator[Int] = this match { case IntMap.Nil => Iterator.empty case _ => new IntMapKeyIterator(this) } /** - * Loop over the keys of the map. The same as `keys.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey(f: Int => Unit): Unit = this match { + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Int => U): Unit = this match { case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } case IntMap.Tip(key, _) => f(key) case IntMap.Nil => @@ -222,21 +249,21 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] } /** - * Loop over the values of the map. The same as `values.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue(f: T => Unit): Unit = this match { + * Loop over the values of the map. The same as `values.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } case IntMap.Tip(_, value) => f(value) case IntMap.Nil => } - override def stringPrefix = "IntMap" - - override def isEmpty = this == IntMap.Nil + override protected[this] def className = "IntMap" + override def isEmpty = this eq IntMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { case IntMap.Bin(prefix, mask, left, right) => { val (newleft, newright) = (left.filter(f), right.filter(f)) @@ -249,7 +276,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Nil => IntMap.Nil } - def transform[S](f: (Int, T) => S): IntMap[S] = this match { + override def transform[S](f: (Int, T) => S): IntMap[S] = this match { case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) case IntMap.Nil => IntMap.Nil @@ -261,12 +288,14 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Bin(_, _, left, right) => left.size + right.size } + @tailrec final def get(key: Int): Option[T] = this match { case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None case IntMap.Nil => None } + @tailrec final override def getOrElse[S >: T](key: Int, default: => S): S = this match { case IntMap.Nil => default case IntMap.Tip(key2, value) => if (key == key2) value else default @@ -274,13 +303,14 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) } + @tailrec final override def apply(key: Int): T = this match { case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") - case IntMap.Nil => sys.error("key not found") + case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case IntMap.Nil => throw new IllegalArgumentException("key not found") } - def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) + override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { case IntMap.Bin(prefix, mask, left, right) => @@ -293,23 +323,35 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Nil => IntMap.Tip(key, value) } + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) + + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) + + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = + super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = + strictOptimizedCollect(IntMap.newBuilder[V2], pf) + /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to: - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update - * @param value The value to use if there is no conflict - * @param f The function used to resolve conflicts. - * @return The updated map. - */ + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to: + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update + * @param value The value to use if there is no conflict + * @param f The function used to resolve conflicts. + * @return The updated map. + */ def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) @@ -321,7 +363,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Nil => IntMap.Tip(key, value) } - def - (key: Int): IntMap[T] = this match { + def removed (key: Int): IntMap[T] = this match { case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) this else if (zero(key, mask)) bin(prefix, mask, left - key, right) @@ -333,14 +375,14 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] } /** - * A combined transform and filter function. Returns an `IntMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ + * A combined transform and filter function. Returns an `IntMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { case IntMap.Bin(prefix, mask, left, right) => val newleft = left.modifyOrRemove(f) @@ -354,33 +396,33 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] //hack to preserve sharing if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] else IntMap.Tip(key, value2) - } + } case IntMap.Nil => IntMap.Nil } /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) } else { if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + else join(p1, this, p2, that) } case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) @@ -389,16 +431,16 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] } /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => if (shorter(m1, m2)) { @@ -406,7 +448,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] else if (zero(p2, m1)) l1.intersectionWith(that, f) else r1.intersectionWith(that, f) } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { + else { if (!hasMatch(p1, p2, m2)) IntMap.Nil else if (zero(p1, m2)) this.intersectionWith(l2, f) else this.intersectionWith(r2, f) @@ -423,13 +465,13 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] } /** - * Left biased intersection. Returns the map that has all the same mappings - * as this but only for keys which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ + * Left biased intersection. Returns the map that has all the same mappings + * as this but only for keys which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ def intersection[R](that: IntMap[R]): IntMap[T] = this.intersectionWith(that, (key: Int, value: T, value2: R) => value) @@ -437,22 +479,24 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] this.unionWith[S](that, (key, x, y) => y) /** - * The entry with the lowest key value considered in unsigned order. - */ + * The entry with the lowest key value considered in unsigned order. + */ @tailrec final def firstKey: Int = this match { case Bin(_, _, l, r) => l.firstKey case Tip(k, v) => k - case IntMap.Nil => sys.error("Empty set") + case IntMap.Nil => throw new IllegalStateException("Empty set") } /** - * The entry with the highest key value considered in unsigned order. - */ + * The entry with the highest key value considered in unsigned order. + */ @tailrec final def lastKey: Int = this match { case Bin(_, _, l, r) => r.lastKey case Tip(k, v) => k - case IntMap.Nil => sys.error("Empty set") + case IntMap.Nil => throw new IllegalStateException("Empty set") } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) } diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala index ac0dc50dfc4e..a38c60ed8cc2 100644 --- a/src/library/scala/collection/immutable/Iterable.scala +++ b/src/library/scala/collection/immutable/Iterable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,38 +10,28 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package immutable +package scala.collection.immutable -import generic._ -import mutable.Builder -import parallel.immutable.ParIterable +import scala.collection.{IterableFactory, IterableFactoryDefaults} -/** A base trait for iterable collections that are guaranteed immutable. - * $iterableInfo - * - * @define Coll `immutable.Iterable` - * @define coll immutable iterable collection - */ -trait Iterable[+A] extends Traversable[A] -// with GenIterable[A] - with scala.collection.Iterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] - with Parallelizable[A, ParIterable[A]] -{ - override def companion: GenericCompanion[Iterable] = Iterable - protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `immutable.IterableLike` gets introduced, please move this there! - override def seq: Iterable[A] = this +/** A trait for collections that are guaranteed immutable. + * + * @tparam A the element type of the collection + * + * @define coll immutable collection + * @define Coll `immutable.Iterable` + */ +trait Iterable[+A] extends collection.Iterable[A] + with collection.IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + + override def iterableFactory: IterableFactory[Iterable] = Iterable } -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define Coll `immutable.Iterable` - * @define coll immutable iterable collection - */ -object Iterable extends TraversableFactory[Iterable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Iterable[A]] = new mutable.ListBuffer +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](List) { + override def from[E](it: IterableOnce[E]): Iterable[E] = it match { + case iterable: Iterable[E] => iterable + case _ => super.from(it) + } } diff --git a/src/library/scala/collection/immutable/LazyList.scala b/src/library/scala/collection/immutable/LazyList.scala new file mode 100644 index 000000000000..72425cf7045a --- /dev/null +++ b/src/library/scala/collection/immutable/LazyList.scala @@ -0,0 +1,1447 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} +import scala.language.implicitConversions +import scala.runtime.Statics + +/** This class implements an immutable linked list. We call it "lazy" + * because it computes its elements only when they are needed. + * + * Elements are memoized; that is, the value of each element is computed at most once. + * + * Elements are computed in order and are never skipped. + * As a consequence, accessing the tail causes the head to be computed first. + * + * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * don't know yet whether the list is empty. + * We say that it is lazy in its head. + * If you have tested that it is non-empty, + * then you also know that the head has been computed. + * + * It is also lazy in its tail, which is also a `LazyList`. + * You don't know whether the tail is empty until it is "forced", which is to say, + * until an element of the tail is computed. + * + * These important properties of `LazyList` depend on its construction using `#::` (or `#:::`). + * That operator is analogous to the "cons" of a strict `List`, `::`. + * It is "right-associative", so that the collection goes on the "right", + * and the element on the left of the operator is prepended to the collection. + * However, unlike the cons of a strict `List`, `#::` is lazy in its parameter, + * which is the element prepended to the left, and also lazy in its right-hand side, + * which is the `LazyList` being prepended to. + * (That is accomplished by implicitly wrapping the `LazyList`, as shown in the Scaladoc.) + * + * Other combinators from the collections API do not preserve this laziness. + * In particular, `++`, or `concat`, is "eager" or "strict" in its parameter + * and should not be used to compose `LazyList`s. + * + * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * all of the natural numbers `0`, `1`, `2`, ... For infinite sequences, + * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. + * + * Here is an example showing the Fibonacci sequence, + * which may be evaluated to an arbitrary number of elements: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map(n => n._1 + n._2) + * println { + * fibs.take(5).mkString(", ") + * } + * } + * // prints: 0, 1, 1, 2, 3 + * }}} + * + * To illustrate, let's add some output to the definition `fibs`, so we + * see what's going on. + * + * {{{ + * import scala.math.BigInt + * import scala.util.chaining._ + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: + * fibs.zip(fibs.tail).map(n => (n._1 + n._2) + * .tap(sum => println(s"Adding ${n._1} and ${n._2} => $sum"))) + * fibs.take(5).foreach(println) + * fibs.take(6).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 => 1 + * // 1 + * // Adding 1 and 1 => 2 + * // 2 + * // Adding 1 and 2 => 3 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 => 5 + * // 5 + * }}} + * + * Note that the definition of `fibs` uses `val` not `def`. + * Memoization of the `LazyList` requires us to retain a reference to the computed values. + * + * `LazyList` is considered an immutable data structure, even though its elements are computed on demand. + * Once the values are memoized they do not change. + * Moreover, the `LazyList` itself is defined once and references to it are interchangeable. + * Values that have yet to be memoized still "exist"; they simply haven't been computed yet. + * + * Memoization can be a source of memory leaks and must be used with caution. + * It avoids recomputing elements of the list, but if a reference to the head + * is retained unintentionally, then all elements will be retained. + * + * The caveat that all elements are computed in order means + * that some operations, such as [[drop]], [[dropWhile]], [[flatMap]] or [[collect]], + * may process a large number of intermediate elements before returning. + * + * Here's an example that illustrates these behaviors. + * Let's begin with an iteration of the natural numbers. + * + * {{{ + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.iterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that we retain only a reference to its Iterator. + * // That allows the LazyList to be garbage collected. + * // Using `def` to produce the LazyList in a method ensures + * // that no val is holding onto the head, as with lazylist1. + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.iterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * In the `fibs` example earlier, the fact that `tail` works at all is of interest. + * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known, then the act + * of determining `tail` would require the evaluation of `tail`, so the + * computation would be unable to progress, as in this code: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * The head, the tail and whether the list is empty is initially unknown. + * Once any of those are evaluated, they are all known, though if the tail is + * built with `#::` or `#:::`, its content still isn't evaluated. Instead, evaluating + * the tail's content is deferred until the tail's empty status, head or tail is + * evaluated. + * + * Delaying the evaluation of whether a LazyList is empty until it's needed + * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * + * Only when it's further evaluated (which may be never!) do any of the elements get forced. + * + * For example: + * + * {{{ + * def tailWithSideEffect: LazyList[Nothing] = { + * println("getting empty LazyList") + * LazyList.empty + * } + * + * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * + * val suspended = 1 #:: tailWithSideEffect // doesn't print anything + * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed + * val filtered = tail.filter(_ => false) // still nothing is printed + * filtered.isEmpty // prints "getting empty LazyList" + * }}} + * + * ---- + * + * You may sometimes encounter an exception like the following: + * + * {{{ + * java.lang.RuntimeException: "LazyList evaluation depends on its own result (self-reference); see docs for more info + * }}} + * + * This exception occurs when a `LazyList` is attempting to derive its next element + * from itself, and is attempting to read the element currently being evaluated. + * As a trivial example: + * + * {{{ + * lazy val a: LazyList[Int] = 1 #:: 2 #:: a.filter(_ > 2) + * }}} + * + * When attempting to evaluate the third element of `a`, it will skip the first two + * elements and read the third, but that element is already being evaluated. This is + * often caused by a subtle logic error; in this case, using `>=` in the `filter` + * would fix the error. + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for a summary. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(4L) +final class LazyList[+A] private (lazyState: AnyRef /* EmptyMarker.type | () => LazyList[A] */) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, LazyList, LazyList[A]] + with IterableFactoryDefaults[A, LazyList] + with Serializable { + import LazyList._ + + // kount() // LazyListTest.countAlloc + + private def this(head: A, tail: LazyList[A]) = { + this(LazyList.EmptyMarker) + _head = head + _tail = tail + } + + // used to synchronize lazy state evaluation + // after initialization (`_head ne Uninitialized`) + // - `null` if this is an empty lazy list + // - `head: A` otherwise (can be `null`, `_tail == null` is used to test emptiness) + @volatile private[this] var _head: Any /* Uninitialized | A */ = + if (lazyState eq EmptyMarker) null else Uninitialized + + // when `_head eq Uninitialized` + // - `lazySate: () => LazyList[A]` + // - MidEvaluation while evaluating lazyState + // when `_head ne Uninitialized` + // - `null` if this is an empty lazy list + // - `tail: LazyList[A]` otherwise + private[this] var _tail: AnyRef /* () => LazyList[A] | MidEvaluation.type | LazyList[A] */ = + if (lazyState eq EmptyMarker) null else lazyState + + private def rawHead: Any = _head + private def rawTail: AnyRef = _tail + + @inline private def isEvaluated: Boolean = _head.asInstanceOf[AnyRef] ne Uninitialized + + private def initState(): Unit = synchronized { + if (!isEvaluated) { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (_tail eq MidEvaluation) + throw new RuntimeException( + "LazyList evaluation depends on its own result (self-reference); see docs for more info") + + val fun = _tail.asInstanceOf[() => LazyList[A]] + _tail = MidEvaluation + val l = + // `fun` returns a LazyList that represents the state (head/tail) of `this`. We call `l.evaluated` to ensure + // `l` is initialized, to prevent races when reading `rawTail` / `rawHead` below. + // Often, lazy lists are created with `newLL(eagerCons(...))` so `l` is already initialized, but `newLL` also + // accepts non-evaluated lazy lists. + try fun().evaluated + // restore `fun` in finally so we can try again later if an exception was thrown (similar to lazy val) + finally _tail = fun + _tail = l.rawTail + _head = l.rawHead + } + } + + @tailrec private def evaluated: LazyList[A] = + if (isEvaluated) { + if (_tail == null) Empty + else this + } else { + initState() + evaluated + } + + override def iterableFactory: SeqFactory[LazyList] = LazyList + + // NOTE: `evaluated; this eq Empty` would be wrong. Deserialization of `Empty` creates a new + // instance with `null` fields, but the `evaluated` method always returns the canonical `Empty`. + @inline override def isEmpty: Boolean = evaluated eq Empty + + /** @inheritdoc + * + * $preservesLaziness + */ + override def knownSize: Int = if (knownIsEmpty) 0 else -1 + + override def head: A = + // inlined `isEmpty` to make it clear that `rawHead` below is initialized + if (evaluated eq Empty) throw new NoSuchElementException("head of empty lazy list") + else rawHead.asInstanceOf[A] + + override def tail: LazyList[A] = + // inlined `isEmpty` to make it clear that `rawTail` below is initialized + if (evaluated eq Empty) throw new UnsupportedOperationException("tail of empty lazy list") + else rawTail.asInstanceOf[LazyList[A]] + + @inline private[this] def knownIsEmpty: Boolean = isEvaluated && isEmpty + @inline private def knownNonEmpty: Boolean = isEvaluated && !isEmpty + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // LazyList.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new SerializationProxy[A](this) else this + + override protected[this] def className = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList // don't recompute the LazyList + case coll if coll.knownSize == 0 => Empty + case coll => eagerHeadFromIterator(coll.iterator) + } + else eagerCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appended[B >: A](elem: B): LazyList[B] = + if (knownIsEmpty) eagerCons(elem, Empty) + else lazyAppendedAll(Iterator.single(elem)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = + if (knownIsEmpty) eagerCons(z, Empty) + else scanLeftImpl(z)(op) + + private def scanLeftImpl[B](z: B)(op: (B, A) => B): LazyList[B] = + eagerCons( + z, + newLL { + if (isEmpty) Empty + else tail.scanLeftImpl(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = head + var left: LazyList[A] = tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = map(f).partition(_.isLeft) + (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) Empty + else filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) Empty + else filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = + new LazyList.WithFilter(coll, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prepended[B >: A](elem: B): LazyList[B] = eagerCons(elem, this) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else if (prefix.knownSize == 0) this + else newLL(eagerHeadPrependIterator(prefix.iterator)(this)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B](f: A => B): LazyList[B] = + if (knownIsEmpty) Empty + else mapImpl(f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) Empty + else eagerCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + if (knownIsEmpty) Empty + else collectImpl(this, pf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) Empty + else flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = + if (knownIsEmpty || that.knownSize == 0) Empty + else newLL(eagerHeadZipImpl(that.iterator)) + + private def eagerHeadZipImpl[B](it: Iterator[B]): LazyList[(A, B)] = + if (isEmpty || !it.hasNext) Empty + else eagerCons((head, it.next()), newLL { tail eagerHeadZipImpl it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + if (knownIsEmpty) { + if (that.knownSize == 0) Empty + else LazyList.continually(thisElem) zip that + } else { + if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + else newLL(eagerHeadZipAllImpl(that.iterator, thisElem, thatElem)) + } + } + + private def eagerHeadZipAllImpl[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + if (it.hasNext) { + if (isEmpty) eagerCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) eagerHeadZipImpl it }) + else eagerCons((head, it.next()), newLL { tail.eagerHeadZipAllImpl(it, thisElem, thatElem) }) + } else { + if (isEmpty) Empty + else eagerCons((head, thatElem), tail zip LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * This method is not particularly useful for a lazy list, as [[zip]] already preserves + * laziness. + * + * The `collection.LazyZip2` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + // just in case it can be meaningfully overridden at some point + override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, this.type] = + super.lazyZip(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (map(asPair(_)._1), map(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) Empty + else dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) Empty + else dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) Empty + else newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + eagerHeadDropRightImpl(scout) + } + } + + private def eagerHeadDropRightImpl(scout: LazyList[_]): LazyList[A] = + if (scout.isEmpty) Empty + else eagerCons(head, newLL(tail.eagerHeadDropRightImpl(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) Empty + else takeImpl(n) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) Empty + else newLL { + if (isEmpty) Empty + else eagerCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) Empty + else takeWhileImpl(p) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) Empty + else eagerCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) Empty + else takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(Empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(eagerCons(head, tl))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) Empty + else super.diff(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) Empty + else super.intersect(that) + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A](len: Int, elem: B): LazyList[B] = + if (len <= 0) this + else newLL { + if (isEmpty) LazyList.fill(len)(elem) + else eagerCons(head, tail.padTo(len - 1, elem)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) eagerHeadPrependIterator(other.iterator)(dropImpl(this, replaced)) + else if (isEmpty) eagerHeadFromIterator(other.iterator) + else eagerCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $evaluatesAllElements + */ + // overridden just in case a lazy implementation is developed at some point + override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A](index: Int, elem: B): LazyList[B] = + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = + newLL { + if (index <= 0) eagerCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else eagerCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): b.type = { + b.append(start) + if (!isEvaluated) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + // explicit param to prevent an ObjectRef for cursor + @inline def appendHead(c: LazyList[A]): Unit = b.append(sep).append(c.head) + var scout = tail + if (cursor ne scout) { + cursor = scout + if (scout.knownNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scout.knownNonEmpty) { + appendHead(cursor) + cursor = cursor.tail + scout = scout.tail + if (scout.knownNonEmpty) scout = scout.tail + } + } + } + if (!scout.knownNonEmpty) { // Not a cycle, scout hit an end (empty or non-evaluated) + while (cursor ne scout) { + appendHead(cursor) + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.isEvaluated) b.append(sep).append("") + } else { + // Cycle: the scout is `knownNonEmpty` and `eq cursor`. + // if the cycle starts at `this`, its elements were already added + if (cursor ne this) { + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + while (runner ne scout) { + runner = runner.tail + scout = scout.tail + } + while({ + val ct = cursor.tail + if (ct ne scout) { + // In `lazy val xs: LazyList[Int] = 1 #:: 2 #:: xs`, method `#::` creates a LazyList instance which ends up as the 3rd element. + // That 3rd element initially has unknown head/tail. Once it completes, the tail is assigned to be `xs.tail`. + // So in memory the structure is `LLx(1, LLy(2, LLz(1, )))`. + // In `toString` we skip the last element to maintain the illusion. + appendHead(cursor) + } + cursor = ct + cursor ne scout + }) () + } + b.append(sep).append("") + } + } + b.append(end) + b + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = { + if (!isEvaluated) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.isEvaluated) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.isEvaluated) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(4L) +object LazyList extends SeqFactory[LazyList] { + + // LazyListTest.countAlloc + // var k = 0 + // def kount(): Unit = k += 1 + + private object Uninitialized extends Serializable + private object MidEvaluation + private object EmptyMarker + + private val Empty: LazyList[Nothing] = new LazyList(EmptyMarker) + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => LazyList[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new LazyList with evaluated `head` and `tail`. */ + @inline private def eagerCons[A](hd: A, tl: LazyList[A]): LazyList[A] = new LazyList[A](hd, tl) + + private val anyToMarker: Any => Any = _ => Statics.pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) eagerCons(elem, filterImpl(rest, p, isFlipped)) else Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = Statics.pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) Empty + else eagerCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).iterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + eagerCons(head, newLL(eagerHeadPrependIterator(it)(flatMapImpl(rest, f)))) + } else Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while(!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(eagerCons(hd, newLL(tl))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #:: [B >: A](elem: => B): LazyList[B] = newLL(eagerCons(elem, newLL(l()))) + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ if coll.knownSize == 0 => empty[A] + case _ => newLL(eagerHeadFromIterator(coll.iterator)) + } + + def empty[A]: LazyList[A] = Empty + + /** Creates a LazyList with the elements of an iterator followed by a LazyList suffix. + * Eagerly evaluates the first element. + */ + private def eagerHeadPrependIterator[A](it: Iterator[A])(suffix: => LazyList[A]): LazyList[A] = + if (it.hasNext) eagerCons(it.next(), newLL(eagerHeadPrependIterator(it)(suffix))) + else suffix + + /** Creates a LazyList from an Iterator. Eagerly evaluates the first element. */ + private def eagerHeadFromIterator[A](it: Iterator[A]): LazyList[A] = + if (it.hasNext) eagerCons(it.next(), newLL(eagerHeadFromIterator(it))) + else Empty + + override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + if (xss.knownSize == 0) empty + else newLL(eagerHeadConcatIterators(xss.iterator)) + + private def eagerHeadConcatIterators[A](it: Iterator[collection.Iterable[A]]): LazyList[A] = + if (!it.hasNext) Empty + else eagerHeadPrependIterator(it.next().iterator)(eagerHeadConcatIterators(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + eagerCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(eagerCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(eagerCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(eagerCons(elem, LazyList.fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(eagerCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => eagerCons(elem, unfold(state)(f)) + case None => Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) + extends collection.WithFilter[A, LazyList] { + private[this] val filtered = lazyList.filter(p) + def map[B](f: A => B): LazyList[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + } + + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init Empty + list + } + + override def addOne(elem: A): this.type = { + val deferred = new DeferredState[A] + next init eagerCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def addAll(xs: IterableOnce[A]): this.type = { + if (xs.knownSize != 0) { + val deferred = new DeferredState[A] + next init eagerHeadPrependIterator(xs.iterator)(deferred.eval()) + next = deferred + } + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _tail: () => LazyList[A] = _ + + def eval(): LazyList[A] = { + val state = _tail + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => LazyList[A]): Unit = { + if (_tail != null) throw new IllegalStateException("already initialized") + _tail = () => state + } + } + } + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(4L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new mutable.ListBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + // scala/scala#10118: caution that no code path can evaluate `tail.evaluated` + // before the resulting LazyList is returned + val it = init.toList.iterator + coll = newLL(eagerHeadPrependIterator(it)(tail)) + } + + private[this] def readResolve(): Any = coll + } +} diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala deleted file mode 100644 index 954aec7eaf3b..000000000000 --- a/src/library/scala/collection/immutable/LinearSeq.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder - -/** A subtrait of `collection.LinearSeq` which represents sequences that - * are guaranteed immutable. - * $linearSeqInfo - */ -trait LinearSeq[+A] extends Seq[A] - with scala.collection.LinearSeq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll immutable linear sequence - * @define Coll `immutable.LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = new mutable.ListBuffer -} diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 52e6ff854a7e..cee22bcc6d54 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,11 @@ package scala package collection package immutable -import generic._ -import mutable.{Builder, ListBuffer} +import scala.annotation.unchecked.uncheckedVariance import scala.annotation.tailrec -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.runtime.AbstractFunction1 +import mutable.{Builder, ListBuffer} +import scala.collection.generic.{CommonErrors, DefaultSerializable} +import scala.runtime.Statics.releaseFence /** A class for immutable linked lists representing ordered collections * of elements of type `A`. @@ -31,8 +30,6 @@ import scala.runtime.AbstractFunction1 * This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access * pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`. * - * $usesMutableState - * * ==Performance== * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list. * This includes the index-based lookup of elements, `length`, `append` and `reverse`. @@ -51,13 +48,13 @@ import scala.runtime.AbstractFunction1 * val days = List("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday") * * // Make a list element-by-element - * val when = "AM" :: "PM" :: List() + * val when = "AM" :: "PM" :: Nil * * // Pattern match * days match { * case firstDay :: otherDays => * println("The first day of the week is: " + firstDay) - * case List() => + * case Nil => * println("There don't seem to be any week days.") * } * }}} @@ -68,80 +65,70 @@ import scala.runtime.AbstractFunction1 * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for * each reference to it. I.e. structural sharing is lost after serialization/deserialization. * - * @author Martin Odersky and others - * @since 1.0 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]] + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]] * section on `Lists` for more information. * * @define coll list * @define Coll `List` - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `List[B]` because an implicit of type `CanBuildFrom[List, B, That]` - * is defined in object `List`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `List`. * @define orderDependent * @define orderDependentFold * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(-6084104484083858598L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -sealed abstract class List[+A] extends AbstractSeq[A] - with LinearSeq[A] - with Product - with GenericTraversableTemplate[A, List] - with LinearSeqOptimized[A, List[A]] - with scala.Serializable { - override def companion: GenericCompanion[List] = List - - def isEmpty: Boolean - def head: A - def tail: List[A] - - // New methods in List +@SerialVersionUID(3L) +sealed abstract class List[+A] + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, List, List[A]] + with StrictOptimizedLinearSeqOps[A, List, List[A]] + with StrictOptimizedSeqOps[A, List, List[A]] + with IterableFactoryDefaults[A, List] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[List] = List /** Adds an element at the beginning of this list. - * @param x the element to prepend. - * @return a list which contains `x` as first element and - * which continues with this list. - * - * @usecase def ::(x: A): List[A] - * @inheritdoc - * - * Example: - * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}} - */ - def ::[B >: A] (x: B): List[B] = - new scala.collection.immutable.::(x, this) + * @param elem the element to prepend. + * @return a list which contains `x` as first element and + * which continues with this list. + * Example: + * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}} + */ + def :: [B >: A](elem: B): List[B] = new ::(elem, this) /** Adds the elements of a given list in front of this list. - * @param prefix The list elements to prepend. - * @return a list resulting from the concatenation of the given - * list `prefix` and this list. - * - * @usecase def :::(prefix: List[A]): List[A] - * @inheritdoc - * - * Example: - * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}} - */ - def :::[B >: A](prefix: List[B]): List[B] = + * + * Example: + * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}} + * + * @param prefix The list elements to prepend. + * @return a list resulting from the concatenation of the given + * list `prefix` and this list. + */ + def ::: [B >: A](prefix: List[B]): List[B] = if (isEmpty) prefix else if (prefix.isEmpty) this - else (new ListBuffer[B] ++= prefix).prependToList(this) + else { + val result = new ::[B](prefix.head, this) + var curr = result + var that = prefix.tail + while (!that.isEmpty) { + val temp = new ::[B](that.head, this) + curr.next = temp + curr = temp + that = that.tail + } + releaseFence() + result + } /** Adds the elements of a given list in reverse order in front of this list. - * `xs reverse_::: ys` is equivalent to - * `xs.reverse ::: ys` but is more efficient. - * - * @param prefix the prefix to reverse and then prepend - * @return the concatenation of the reversed prefix and the current list. - * - * @usecase def reverse_:::(prefix: List[A]): List[A] - * @inheritdoc - */ + * `xs reverse_::: ys` is equivalent to + * `xs.reverse ::: ys` but is more efficient. + * + * @param prefix the prefix to reverse and then prepend + * @return the concatenation of the reversed prefix and the current list. + */ def reverse_:::[B >: A](prefix: List[B]): List[B] = { var these: List[B] = this var pres = prefix @@ -152,77 +139,37 @@ sealed abstract class List[+A] extends AbstractSeq[A] these } - /** Builds a new list by applying a function to all elements of this list. - * Like `xs map f`, but returns `xs` unchanged if function - * `f` maps all elements to themselves (as determined by `eq`). - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a list resulting from applying the given function - * `f` to each element of this list and collecting the results. - * - * @usecase def mapConserve(f: A => A): List[A] - * @inheritdoc - */ - @inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { - // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`. - // If any successful optimization attempts or other changes are made, please rehash them there too. - @tailrec - def loop(mappedHead: List[B] = Nil, mappedLast: ::[B], unchanged: List[A], pending: List[A]): List[B] = - if (pending.isEmpty) { - if (mappedHead eq null) unchanged - else { - mappedLast.tl = unchanged - mappedHead - } - } - else { - val head0 = pending.head - val head1 = f(head0) - - if (head1 eq head0.asInstanceOf[AnyRef]) - loop(mappedHead, mappedLast, unchanged, pending.tail) - else { - var xc = unchanged - var mappedHead1: List[B] = mappedHead - var mappedLast1: ::[B] = mappedLast - while (xc ne pending) { - val next = new ::[B](xc.head, Nil) - if (mappedHead1 eq null) mappedHead1 = next - if (mappedLast1 ne null) mappedLast1.tl = next - mappedLast1 = next - xc = xc.tail + override final def isEmpty: Boolean = this eq Nil + + override def prepended[B >: A](elem: B): List[B] = elem :: this + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): List[B] = prefix match { + case xs: List[B] => xs ::: this + case _ if prefix.knownSize == 0 => this + case b: ListBuffer[B] if this.isEmpty => b.toList + case _ => + val iter = prefix.iterator + if (iter.hasNext) { + val result = new ::[B](iter.next(), this) + var curr = result + while (iter.hasNext) { + val temp = new ::[B](iter.next(), this) + curr.next = temp + curr = temp } - val next = new ::(head1, Nil) - if (mappedHead1 eq null) mappedHead1 = next - if (mappedLast1 ne null) mappedLast1.tl = next - mappedLast1 = next - val tail0 = pending.tail - loop(mappedHead1, mappedLast1, tail0, tail0) - + releaseFence() + result + } else { + this } - } - loop(null, null, this, this) - } - - private def isLikeListReusableCBF( bf: CanBuildFrom[_,_,_]): Boolean = { - (bf eq List.ReusableCBF) || - (bf eq immutable.LinearSeq.ReusableCBF) || (bf eq collection.LinearSeq.ReusableCBF) || - (bf eq immutable.Seq.ReusableCBF) ||(bf eq collection.Seq.ReusableCBF) } - // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = - if (isLikeListReusableCBF(bf)) (this ::: that.seq.toList).asInstanceOf[That] - else super.++(that) - - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match { - case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That] - case _ => super.+:(elem)(bf) + // When calling appendAll with another list `suffix`, avoid copying `suffix` + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): List[B] = suffix match { + case xs: List[B] => this ::: xs + case _ => super.appendedAll(suffix) } - override def toList: List[A] = this - override def take(n: Int): List[A] = if (isEmpty || n <= 0) Nil else { val h = new ::(head, Nil) var t = h @@ -231,33 +178,14 @@ sealed abstract class List[+A] extends AbstractSeq[A] while ({if (rest.isEmpty) return this; i < n}) { i += 1 val nx = new ::(rest.head, Nil) - t.tl = nx + t.next = nx t = nx rest = rest.tail } + releaseFence() h } - override def drop(n: Int): List[A] = { - var these = this - var count = n - while (!these.isEmpty && count > 0) { - these = these.tail - count -= 1 - } - these - } - - /** - * @example {{{ - * // Given a list - * val letters = List('a','b','c','d','e') - * - * // `slice` returns all elements beginning at index `from` and afterwards, - * // up until index `until` (excluding index `until`.) - * letters.slice(1,3) // Returns List('b','c') - * }}} - */ override def slice(from: Int, until: Int): List[A] = { val lo = scala.math.max(from, 0) if (until <= lo || isEmpty) Nil @@ -287,111 +215,84 @@ sealed abstract class List[+A] extends AbstractSeq[A] (b.toList, these) } - final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (isLikeListReusableCBF(bf)) { - if (this eq Nil) Nil.asInstanceOf[That] else { - val h = new ::[B](f(head), Nil) - var t: ::[B] = h - var rest = tail - while (rest ne Nil) { - val nx = new ::(f(rest.head), Nil) - t.tl = nx - t = nx - rest = rest.tail - } - h.asInstanceOf[That] - } + override def updated[B >: A](index: Int, elem: B): List[B] = { + var i = 0 + var current = this + val prefix = ListBuffer.empty[B] + while (i < index && current.nonEmpty) { + i += 1 + prefix += current.head + current = current.tail + } + if (i == index && current.nonEmpty) { + prefix.prependToList(elem :: current.tail) + } else { + throw CommonErrors.indexOutOfBounds(index = index, max = length - 1) } - else super.map(f) } - final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (isLikeListReusableCBF(bf)) { - if (this eq Nil) Nil.asInstanceOf[That] else { - var rest = this - var h: ::[B] = null - // Special case for first element - do { - val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied) - if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) h = new ::(x.asInstanceOf[B], Nil) - rest = rest.tail - if (rest eq Nil) return (if (h eq null ) Nil else h).asInstanceOf[That] - } while (h eq null) - var t = h - // Remaining elements - do { - val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied) - if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) { - val nx = new ::(x.asInstanceOf[B], Nil) - t.tl = nx - t = nx - } - rest = rest.tail - } while (rest ne Nil) - h.asInstanceOf[That] + final override def map[B](f: A => B): List[B] = { + if (this eq Nil) Nil else { + val h = new ::[B](f(head), Nil) + var t: ::[B] = h + var rest = tail + while (rest ne Nil) { + val nx = new ::(f(rest.head), Nil) + t.next = nx + t = nx + rest = rest.tail } + releaseFence() + h } - else super.collect(pf) } - final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { - if (isLikeListReusableCBF(bf)) { - if (this eq Nil) Nil.asInstanceOf[That] else { - var rest = this - class Appender extends AbstractFunction1[B, Unit] { - var h: ::[B] = null - var t: ::[B] = null - override def apply(b: B): Unit = { - if (h eq null) { - h = new ::(b, Nil) - t = h - } - else { - val nx = new ::(b, Nil) - t.tl = nx - t = nx - } - } - //where flatMap generates a List, we can reuse the last non-empty one - def appendLast(last: ::[B]): Unit = { - if (h eq null) h = last - else t.tl = last - } - } - // we only build an appender if we have to, to reduce allocations. It's a commom case that we have - // short lists to map over and often flatMap may provide only zero or one segment that is non empty - var appender: Appender = null - var lastList: ::[B] = null - while (rest ne Nil) { - val c = f(rest.head).seq - rest = rest.tail - if (c.asInstanceOf[AnyRef] ne Nil) { - if (lastList ne null) { - if (appender eq null) - appender = new Appender - lastList foreach appender - lastList = null - } - if (c.isInstanceOf[::[B]]) - lastList = c.asInstanceOf[::[B]] - else { - if (appender eq null) - appender = new Appender - c foreach appender - } - } + final override def collect[B](pf: PartialFunction[A, B]): List[B] = { + if (this eq Nil) Nil else { + var rest = this + var h: ::[B] = null + var x: Any = null + // Special case for first element + while (h eq null) { + x = pf.applyOrElse(rest.head, List.partialNotApplied) + if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) h = new ::(x.asInstanceOf[B], Nil) + rest = rest.tail + if (rest eq Nil) return if (h eq null) Nil else h + } + var t = h + // Remaining elements + while (rest ne Nil) { + x = pf.applyOrElse(rest.head, List.partialNotApplied) + if (x.asInstanceOf[AnyRef] ne List.partialNotApplied) { + val nx = new ::(x.asInstanceOf[B], Nil) + t.next = nx + t = nx } - val result = if ((appender eq null) || (appender.h eq null)) - if (lastList eq null) Nil else lastList - else { - if (lastList ne null) - appender.appendLast(lastList) - appender.h + rest = rest.tail + } + releaseFence() + h + } + } + + final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { + var rest = this + var h: ::[B] = null + var t: ::[B] = null + while (rest ne Nil) { + val it = f(rest.head).iterator + while (it.hasNext) { + val nx = new ::(it.next(), Nil) + if (t eq null) { + h = nx + } else { + t.next = nx } - result.asInstanceOf[That] + t = nx } + rest = rest.tail } - else super.flatMap(f) + if (h eq null) Nil else {releaseFence(); h} } @inline final override def takeWhile(p: A => Boolean): List[A] = { @@ -404,15 +305,6 @@ sealed abstract class List[+A] extends AbstractSeq[A] b.toList } - @inline final override def dropWhile(p: A => Boolean): List[A] = { - @tailrec - def loop(xs: List[A]): List[A] = - if (xs.isEmpty || !p(xs.head)) xs - else loop(xs.tail) - - loop(this) - } - @inline final override def span(p: A => Boolean): (List[A], List[A]) = { val b = new ListBuffer[A] var these = this @@ -425,7 +317,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] // Overridden with an implementation identical to the inherited one (at this time) // solely so it can be finalized and thus inlinable. - @inline final override def foreach[U](f: A => U) { + @inline final override def foreach[U](f: A => U): Unit = { var these = this while (!these.isEmpty) { f(these.head) @@ -433,7 +325,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] } } - override def reverse: List[A] = { + final override def reverse: List[A] = { var result: List[A] = Nil var these = this while (!these.isEmpty) { @@ -443,101 +335,348 @@ sealed abstract class List[+A] extends AbstractSeq[A] result } - override def foldRight[B](z: B)(op: (A, B) => B): B = - reverse.foldLeft(z)((right, left) => op(left, right)) + final override def foldRight[B](z: B)(op: (A, B) => B): B = { + var acc = z + var these: List[A] = reverse + while (!these.isEmpty) { + acc = op(these.head, acc) + these = these.tail + } + acc + } - override def stringPrefix = "List" + // Copy/Paste overrides to avoid interface calls inside loops. - override def toStream : Stream[A] = - if (isEmpty) Stream.Empty - else new Stream.Cons(head, tail.toStream) + override final def length: Int = { + var these = this + var len = 0 + while (!these.isEmpty) { + len += 1 + these = these.tail + } + len + } - // Create a proxy for Java serialization that allows us to avoid mutation - // during deserialization. This is the Serialization Proxy Pattern. - protected final def writeReplace(): AnyRef = new List.SerializationProxy(this) -} + override final def lengthCompare(len: Int): Int = { + @tailrec def loop(i: Int, xs: List[A]): Int = { + if (i == len) + if (xs.isEmpty) 0 else 1 + else if (xs.isEmpty) + -1 + else + loop(i + 1, xs.tail) + } + if (len < 0) 1 + else loop(0, coll) + } -/** The empty list. - * - * @author Martin Odersky - * @since 2.8 - */ -@SerialVersionUID(0 - 8256821097970055419L) -case object Nil extends List[Nothing] { - override def isEmpty = true - override def head: Nothing = - throw new NoSuchElementException("head of empty list") - override def tail: List[Nothing] = - throw new UnsupportedOperationException("tail of empty list") - // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals. - override def equals(that: Any) = that match { - case that1: scala.collection.GenSeq[_] => that1.isEmpty - case _ => false + override final def forall(p: A => Boolean): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (!p(these.head)) return false + these = these.tail + } + true } -} -/** A non empty list characterized by a head and a tail. - * @param head the first element of the list - * @param tl the list containing the remaining elements of this list after the first one. - * @tparam B the type of the list elements. - * @author Martin Odersky - * @since 2.8 - */ -@SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] { - override def tail : List[B] = tl - override def isEmpty: Boolean = false -} + override final def exists(p: A => Boolean): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (p(these.head)) return true + these = these.tail + } + false + } -/** $factoryInfo - * @define coll list - * @define Coll `List` - */ -object List extends SeqFactory[List] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + override final def contains[A1 >: A](elem: A1): Boolean = { + var these: List[A] = this + while (!these.isEmpty) { + if (these.head == elem) return true + these = these.tail + } + false + } - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A] + override final def find(p: A => Boolean): Option[A] = { + var these: List[A] = this + while (!these.isEmpty) { + if (p(these.head)) return Some(these.head) + these = these.tail + } + None + } - override def empty[A]: List[A] = Nil + override def last: A = { + if (isEmpty) throw new NoSuchElementException("List.last") + else { + var these = this + var scout = tail + while (!scout.isEmpty) { + these = scout + scout = scout.tail + } + these.head + } + } + + override def corresponds[B](that: collection.Seq[B])(p: (A, B) => Boolean): Boolean = that match { + case that: LinearSeq[B] => + var i = this + var j = that + while (!(i.isEmpty || j.isEmpty)) { + if (!p(i.head, j.head)) + return false + i = i.tail + j = j.tail + } + i.isEmpty && j.isEmpty + case _ => + super.corresponds(that)(p) + } - override def apply[A](xs: A*): List[A] = xs.toList + override protected[this] def className = "List" - private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this } + /** Builds a new list by applying a function to all elements of this list. + * Like `xs map f`, but returns `xs` unchanged if function + * `f` maps all elements to themselves (as determined by `eq`). + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a list resulting from applying the given function + * `f` to each element of this list and collecting the results. + */ + @`inline` final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { + // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`. + // If any successful optimization attempts or other changes are made, please rehash them there too. + @tailrec + def loop(mappedHead: List[B], mappedLast: ::[B], unchanged: List[A], pending: List[A]): List[B] = { + if (pending.isEmpty) { + if (mappedHead eq null) unchanged + else { + mappedLast.next = (unchanged: List[B]) + mappedHead + } + } + else { + val head0 = pending.head + val head1 = f(head0) + + if (head1 eq head0.asInstanceOf[AnyRef]) + loop(mappedHead, mappedLast, unchanged, pending.tail) + else { + var xc = unchanged + var mappedHead1: List[B] = mappedHead + var mappedLast1: ::[B] = mappedLast + while (xc ne pending) { + val next = new ::[B](xc.head, Nil) + if (mappedHead1 eq null) mappedHead1 = next + if (mappedLast1 ne null) mappedLast1.next = next + mappedLast1 = next + xc = xc.tail + } + val next = new ::(head1, Nil) + if (mappedHead1 eq null) mappedHead1 = next + if (mappedLast1 ne null) mappedLast1.next = next + mappedLast1 = next + val tail0 = pending.tail + loop(mappedHead1, mappedLast1, tail0, tail0) + + } + } + } + val result = loop(null, null, this, this) + releaseFence() + result + } + + override def filter(p: A => Boolean): List[A] = filterCommon(p, isFlipped = false) + + override def filterNot(p: A => Boolean): List[A] = filterCommon(p, isFlipped = true) + + private[this] def filterCommon(p: A => Boolean, isFlipped: Boolean): List[A] = { + + // everything seen so far so far is not included + @tailrec def noneIn(l: List[A]): List[A] = { + if (l.isEmpty) + Nil + else { + val h = l.head + val t = l.tail + if (p(h) != isFlipped) + allIn(l, t) + else + noneIn(t) + } + } - @SerialVersionUID(1L) - private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable { + // everything from 'start' is included, if everything from this point is in we can return the origin + // start otherwise if we discover an element that is out we must create a new partial list. + @tailrec def allIn(start: List[A], remaining: List[A]): List[A] = { + if (remaining.isEmpty) + start + else { + val x = remaining.head + if (p(x) != isFlipped) + allIn(start, remaining.tail) + else + partialFill(start, remaining) + } + } - private def writeObject(out: ObjectOutputStream) { - out.defaultWriteObject() - var xs: List[A] = orig - while (!xs.isEmpty) { - out.writeObject(xs.head) - xs = xs.tail + // we have seen elements that should be included then one that should be excluded, start building + def partialFill(origStart: List[A], firstMiss: List[A]): List[A] = { + val newHead = new ::(origStart.head, Nil) + var toProcess = origStart.tail + var currentLast = newHead + + // we know that all elements are :: until at least firstMiss.tail + while (!(toProcess eq firstMiss)) { + val newElem = new ::(toProcess.head, Nil) + currentLast.next = newElem + currentLast = newElem + toProcess = toProcess.tail } - out.writeObject(ListSerializeEnd) + + // at this point newHead points to a list which is a duplicate of all the 'in' elements up to the first miss. + // currentLast is the last element in that list. + + // now we are going to try and share as much of the tail as we can, only moving elements across when we have to. + var next = firstMiss.tail + var nextToCopy = next // the next element we would need to copy to our list if we cant share. + while (!next.isEmpty) { + // generally recommended is next.isNonEmpty but this incurs an extra method call. + val head: A = next.head + if (p(head) != isFlipped) { + next = next.tail + } else { + // its not a match - do we have outstanding elements? + while (!(nextToCopy eq next)) { + val newElem = new ::(nextToCopy.head, Nil) + currentLast.next = newElem + currentLast = newElem + nextToCopy = nextToCopy.tail + } + nextToCopy = next.tail + next = next.tail + } + } + + // we have remaining elements - they are unchanged attach them to the end + if (!nextToCopy.isEmpty) + currentLast.next = nextToCopy + + newHead + } + + val result = noneIn(this) + releaseFence() + result + } + + override def partition(p: A => Boolean): (List[A], List[A]) = { + if (isEmpty) List.TupleOfNil + else super.partition(p) match { + case (Nil, xs) => (Nil, this) + case (xs, Nil) => (this, Nil) + case pair => pair } + } + + final override def toList: List[A] = this - // Java serialization calls this before readResolve during deserialization. - // Read the whole list and store it in `orig`. - private def readObject(in: ObjectInputStream) { - in.defaultReadObject() - val builder = List.newBuilder[A] - while (true) in.readObject match { - case ListSerializeEnd => - orig = builder.result() - return - case a => - builder += a.asInstanceOf[A] + // Override for performance + override def equals(o: scala.Any): Boolean = { + @tailrec def listEq(a: List[_], b: List[_]): Boolean = + (a eq b) || { + val aEmpty = a.isEmpty + val bEmpty = b.isEmpty + if (!(aEmpty || bEmpty) && a.head == b.head) { + listEq(a.tail, b.tail) + } + else { + aEmpty && bEmpty + } } + + o match { + case that: List[_] => listEq(this, that) + case _ => super.equals(o) } + } - // Provide the result stored in `orig` for Java serialization - private def readResolve(): AnyRef = orig + // TODO: uncomment once bincompat allows (reference: scala/scala#9365) + /* + // Override for performance: traverse only as much as needed + // and share tail when nothing needs to be filtered out anymore + override def diff[B >: A](that: collection.Seq[B]): AnyRef = { + if (that.isEmpty || this.isEmpty) this + else if (tail.isEmpty) if (that.contains(head)) Nil else this + else { + val occ = occCounts(that) + val b = new ListBuffer[A]() + @tailrec + def rec(remainder: List[A]): List[A] = { + if(occ.isEmpty) b.prependToList(remainder) + else remainder match { + case Nil => b.result() + case head :: next => { + occ.updateWith(head){ + case None => { + b.append(head) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + rec(next) + } + } + } + rec(this) + } } + */ + } -/** Only used for list serialization */ -@SerialVersionUID(0L - 8476791151975527571L) -private[scala] case object ListSerializeEnd +// Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or +// before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally + extends List[A] { + releaseFence() + override def headOption: Some[A] = Some(head) + override def tail: List[A] = next +} + +case object Nil extends List[Nothing] { + override def head: Nothing = throw new NoSuchElementException("head of empty list") + override def headOption: None.type = None + override def tail: Nothing = throw new UnsupportedOperationException("tail of empty list") + override def last: Nothing = throw new NoSuchElementException("last of empty list") + override def init: Nothing = throw new UnsupportedOperationException("init of empty list") + override def knownSize: Int = 0 + override def iterator: Iterator[Nothing] = Iterator.empty + override def unzip[A1, A2](implicit asPair: Nothing => (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + + @transient + private[this] val EmptyUnzip = (Nil, Nil) +} + +/** + * $factoryInfo + * @define coll list + * @define Coll `List` + */ +@SerialVersionUID(3L) +object List extends StrictOptimizedSeqFactory[List] { + private val TupleOfNil = (Nil, Nil) + + def from[B](coll: collection.IterableOnce[B]): List[B] = Nil.prependedAll(coll) + + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer() + + def empty[A]: List[A] = Nil + + @transient + private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this } +} diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 1a14b4cce3f7..74d1697cac7f 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,46 +14,15 @@ package scala package collection package immutable -import generic._ -import scala.annotation.tailrec +import scala.annotation.{nowarn, tailrec} +import scala.collection.mutable.ReusableBuilder +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list map with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] - * section on `List Maps` for more information. - * @since 1 - * @define Coll ListMap - * @define coll list map - */ -object ListMap extends ImmutableMapFactory[ListMap] { - - /** - * $mapCanBuildFromInfo - */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (A, B), ListMap[A, B]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Any, Any] - - def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]] - - @SerialVersionUID(-8256686706655863282L) - private object EmptyListMap extends ListMap[Any, Nothing] - - @tailrec private def foldRightInternal[A, B, Z](map: ListMap[A, B], prevValue: Z, op: ((A, B), Z) => Z): Z = { - if (map.isEmpty) prevValue - else foldRightInternal(map.init, op(map.last, prevValue), op) - } -} - /** * This class implements immutable maps using a list-based data structure. List map iterators and - * traversal methods visit key-value pairs in the order whey were first inserted. + * traversal methods visit key-value pairs in the order they were first inserted. * * Entries are stored internally in reversed insertion order, which means the newest key is at the * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` @@ -63,134 +32,341 @@ object ListMap extends ImmutableMapFactory[ListMap] { * Instances of `ListMap` represent empty maps; they can be either created by calling the * constructor directly, or by applying the function `ListMap.empty`. * - * @tparam A the type of the keys contained in this list map - * @tparam B the type of the values associated with the keys + * @tparam K the type of the keys contained in this list map + * @tparam V the type of the values associated with the keys * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 * @define Coll ListMap * @define coll list map * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(301002838095710379L) -sealed class ListMap[A, +B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, ListMap[A, B]] - with Serializable - with HasForeachEntry[A,B] { +sealed class ListMap[K, +V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { - override def empty = ListMap.empty + override def mapFactory: MapFactory[ListMap] = ListMap override def size: Int = 0 + override def isEmpty: Boolean = true - def get(key: A): Option[B] = None + override def knownSize: Int = 0 + def get(key: K): Option[V] = None + + def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) - private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = { - var current = this - while (!current.isEmpty) { - f(current.key, current.value) - current = current.next + def removed(key: K): ListMap[K, V] = this + + def iterator: Iterator[(K, V)] = { + var curr: ListMap[K, V] = this + var res: List[(K, V)] = Nil + while (curr.nonEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next } + res.iterator } - override def hashCode(): Int = { - if (isEmpty) { - MurmurHash3.emptyMapHash - } else { - val hasher = new Map.HashCodeAccumulator() - foreachEntry(hasher) - hasher.finalizeHash + @nowarn("msg=overriding method keys") + override def keys: Iterable[K] = { + var curr: ListMap[K, V] = this + var res: List[K] = Nil + while (curr.nonEmpty) { + res = curr.key :: res + curr = curr.next } + res } - override def updated[B1 >: B](key: A, value: B1): ListMap[A, B1] = new Node[B1](key, value) - - def +[B1 >: B](kv: (A, B1)): ListMap[A, B1] = new Node[B1](kv._1, kv._2) - def -(key: A): ListMap[A, B] = this - - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] = - if (xs.isEmpty) this - else ((repr: ListMap[A, B1]) /: xs) (_ + _) - - def iterator: Iterator[(A, B)] = { - def reverseList = { - var curr: ListMap[A, B] = this - var res: List[(A, B)] = Nil - while (!curr.isEmpty) { - res = (curr.key, curr.value) :: res - curr = curr.next + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration + // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. + val _reversed = new immutable.AbstractMap[K, V] { + override def isEmpty: Boolean = ListMap.this.isEmpty + override def removed(key: K): Map[K, V] = ListMap.this.removed(key) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) + override def get(key: K): Option[V] = ListMap.this.get(key) + override def iterator: Iterator[(K, V)] = ListMap.this.iterator + override def foreachEntry[U](f: (K, V) => U): Unit = { + var curr: ListMap[K, V] = ListMap.this + while (curr.nonEmpty) { + f(curr.key, curr.value) + curr = curr.next + } + } } - res + MurmurHash3.mapHash(_reversed) } - reverseList.iterator } - protected def key: A = throw new NoSuchElementException("key of empty map") - protected def value: B = throw new NoSuchElementException("value of empty map") - protected def next: ListMap[A, B] = throw new NoSuchElementException("next of empty map") + private[immutable] def key: K = throw new NoSuchElementException("key of empty map") + private[immutable] def value: V = throw new NoSuchElementException("value of empty map") + private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") - override def foldRight[Z](z: Z)(op: ((A, B), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) - override def stringPrefix = "ListMap" + override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) + override protected[this] def className = "ListMap" +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @define Coll ListMap + * @define coll list map + */ +@SerialVersionUID(3L) +object ListMap extends MapFactory[ListMap] { /** * Represents an entry in the `ListMap`. */ - @SerialVersionUID(-6453056603889598734L) - protected class Node[B1 >: B](override protected val key: A, - override protected val value: B1) extends ListMap[A, B1] with Serializable { + private[immutable] final class Node[K, V]( + override private[immutable] val key: K, + private[immutable] var _value: V, + private[immutable] var _init: ListMap[K, V] + ) extends ListMap[K, V] { + releaseFence() + + override private[immutable] def value: V = _value override def size: Int = sizeInternal(this, 0) - @tailrec private[this] def sizeInternal(cur: ListMap[A, B1], acc: Int): Int = + @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = if (cur.isEmpty) acc else sizeInternal(cur.next, acc + 1) override def isEmpty: Boolean = false - override def apply(k: A): B1 = applyInternal(this, k) + override def knownSize: Int = -1 - @tailrec private[this] def applyInternal(cur: ListMap[A, B1], k: A): B1 = + @throws[NoSuchElementException] + override def apply(k: K): V = applyInternal(this, k) + + @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) else if (k == cur.key) cur.value else applyInternal(cur.next, k) - override def get(k: A): Option[B1] = getInternal(this, k) + override def get(k: K): Option[V] = getInternal(this, k) - @tailrec private[this] def getInternal(cur: ListMap[A, B1], k: A): Option[B1] = + @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = if (cur.isEmpty) None else if (k == cur.key) Some(cur.value) else getInternal(cur.next, k) - override def contains(k: A): Boolean = containsInternal(this, k) + override def contains(k: K): Boolean = containsInternal(this, k) - @tailrec private[this] def containsInternal(cur: ListMap[A, B1], k: A): Boolean = - if(cur.isEmpty) false + @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = + if (cur.isEmpty) false else if (k == cur.key) true else containsInternal(cur.next, k) - override def updated[B2 >: B1](k: A, v: B2): ListMap[A, B2] = { - val m = this - k - new m.Node[B2](k, v) - } + override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { - override def +[B2 >: B1](kv: (A, B2)): ListMap[A, B2] = { - val m = this - kv._1 - new m.Node[B2](kv._1, kv._2) - } + var index = -1 // the index (in reverse) where the key to update exists, if it is found + var found = false // true if the key is found int he map + var isDifferent = false // true if the key was found and the values are different - override def -(k: A): ListMap[A, B1] = removeInternal(k, this, Nil) + { + var curr: ListMap[K, V] = this + + while (curr.nonEmpty && !found) { + if (k == curr.key) { + found = true + isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] + } + index += 1 + curr = curr.init + } + } + + if (found) { + if (isDifferent) { + var newHead: ListMap.Node[K, V1] = null + var prev: ListMap.Node[K, V1] = null + var curr: ListMap[K, V1] = this + var i = 0 + while (i < index) { + val temp = new ListMap.Node(curr.key, curr.value, null) + if (prev ne null) { + prev._init = temp + } + prev = temp + curr = curr.init + if (newHead eq null) { + newHead = prev + } + i += 1 + } + val newNode = new ListMap.Node(curr.key, v, curr.init) + if (prev ne null) { + prev._init = newNode + } + releaseFence() + if (newHead eq null) newNode else newHead + } else { + this + } + } else { + new ListMap.Node(k, v, this) + } + } - @tailrec private[this] def removeInternal(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] = + @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = if (cur.isEmpty) acc.last - else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) } + else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } else removeInternal(k, cur.next, cur :: acc) - override protected def next: ListMap[A, B1] = ListMap.this + override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) + + override private[immutable] def next: ListMap[K, V] = _init + + override def last: (K, V) = (key, value) + override def init: ListMap[K, V] = next + + } + + def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] + + private object EmptyListMap extends ListMap[Any, Nothing] + + def from[K, V](it: collection.IterableOnce[(K, V)]): ListMap[K, V] = + it match { + case lm: ListMap[K, V] => lm + case lhm: collection.mutable.LinkedHashMap[K, V] => + // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each + // key-value pair + var current: ListMap[K, V] = empty[K, V] + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + current = new Node(firstEntry.key, firstEntry.value, current) + firstEntry = firstEntry.later + } + current + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end + var current: ListMap[K, V] = empty[K, V] + val iter = it.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + current = new Node(k, v, current) + } + current + + case _ => (newBuilder[K, V] ++= it).result() + } + + /** Returns a new ListMap builder + * + * The implementation safely handles additions after `result()` without calling `clear()` + * + * @tparam K the map key type + * @tparam V the map value type + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] - override def last: (A, B1) = (key, value) - override def init: ListMap[A, B1] = next + @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { + if (map.isEmpty) prevValue + else foldRightInternal(map.init, op(map.last, prevValue), op) + } +} + +/** Builder for ListMap. + * $multipleResults + */ +private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { + private[this] var isAliased: Boolean = false + private[this] var underlying: ListMap[K, V] = ListMap.empty + + override def clear(): Unit = { + underlying = ListMap.empty + isAliased = false + } + + override def result(): ListMap[K, V] = { + isAliased = true + releaseFence() + underlying + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + + @tailrec + private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { + case n: ListMap.Node[K, V] => + if (n.key == key) { + n._value = value + true + } else { + insertValueAtKeyReturnFound(n.init, key, value) + } + case _ => false + } + + def addOne(key: K, value: V): this.type = { + if (isAliased) { + underlying = underlying.updated(key, value) + } else { + if (!insertValueAtKeyReturnFound(underlying, key, value)) { + underlying = new ListMap.Node(key, value, underlying) + } + } + this + } + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + if (isAliased) { + super.addAll(xs) + } else if (underlying.nonEmpty) { + xs match { + case m: collection.Map[K, V] => + // if it is a map, then its keys will not collide with themselves. + // therefor we only need to check the already-existing elements for collisions. + // No need to check the entire list + + val iter = m.iterator + var newUnderlying = underlying + while (iter.hasNext) { + val next = iter.next() + if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { + newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) + } + } + underlying = newUnderlying + this + + case _ => + super.addAll(xs) + } + } else xs match { + case lhm: collection.mutable.LinkedHashMap[K, V] => + // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) + firstEntry = firstEntry.later + } + this + + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + val iter = xs.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + underlying = new ListMap.Node(k, v, underlying) + } + + this + case _ => + super.addAll(xs) + } } } diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index 03466e11761a..2e2758cb2747 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,37 +14,13 @@ package scala package collection package immutable -import generic._ +import mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list set with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @since 1 - * @define Coll ListSet - * @define coll list set - */ -object ListSet extends ImmutableSetFactory[ListSet] { - - /** - * $setCanBuildFromInfo - */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, ListSet[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] - - @SerialVersionUID(5010379588739277132L) - private object EmptyListSet extends ListSet[Any] - private[collection] def emptyInstance: ListSet[Any] = EmptyListSet -} +import scala.collection.generic.DefaultSerializable /** * This class implements immutable sets using a list-based data structure. List set iterators and - * traversal methods visit elements in the order whey were first inserted. + * traversal methods visit elements in the order they were first inserted. * * Elements are stored internally in reversed insertion order, which means the newest element is at * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and @@ -56,178 +32,107 @@ object ListSet extends ImmutableSetFactory[ListSet] { * * @tparam A the type of the elements contained in this list set * - * @author Matthias Zenger - * @since 1 * @define Coll ListSet * @define coll list set * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(-8417059026623606218L) -sealed class ListSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, ListSet] - with SetLike[A, ListSet[A]] - with Serializable { +sealed class ListSet[A] + extends AbstractSet[A] + with StrictOptimizedSetOps[A, ListSet, ListSet[A]] + with IterableFactoryDefaults[A, ListSet] + with DefaultSerializable { - override def companion: GenericCompanion[ListSet] = ListSet + override protected[this] def className: String = "ListSet" override def size: Int = 0 + override def knownSize: Int = 0 override def isEmpty: Boolean = true def contains(elem: A): Boolean = false - def +(elem: A): ListSet[A] = new Node(elem) - def -(elem: A): ListSet[A] = this - - override def ++(xs: GenTraversableOnce[A]): ListSet[A] = - xs match { - case _: this.type => this - case _ if xs.isEmpty => this - // we want to avoid using iterator as it causes allocations during reverseList - case ls: ListSet[A] => - if (isEmpty) ls - else { - // optimize add non-empty ListSet - @tailrec def skip(ls: ListSet[A], count: Int): ListSet[A] = - if (count == 0) ls else skip(ls.next, count - 1) - - @tailrec def containsLimited(n: ListSet[A], e: A, end: ListSet[A]): Boolean = - (n ne end) && (e == n.elem || containsLimited(n.next, e, end)) - - @tailrec def distanceTo(n: ListSet[A], end: ListSet[A], soFar: Int): Int = - if (n eq end) soFar else distanceTo(n.next, end, soFar + 1) - - // We hope to get some structural sharing so find the tail of the - // ListSet that are `eq` (or if there are not any then the ends of the lists), - // and we optimise the add to only iterate until we reach the common end - val lsSize = ls.size - val thisSize = this.size - val remaining = Math.min(thisSize, lsSize) - var thisTail = skip(this, thisSize - remaining) - var lsTail = skip(ls, lsSize - remaining) - //find out what part of the the ListSet is sharable - //as we can ignore the shared elements - while ((thisTail ne lsTail) && !lsTail.isEmpty) { - thisTail = thisTail.next - lsTail = lsTail.next - } - var toAdd = ls - var result: ListSet[A] = this - - // Its quite a common case that we are just adding a few elements, so it there are less than 5 elements we - // hold them in pending0..3 - // if there are more than these 4 we hold the rest in pending - var pending : Array[A] = null - var pending0, pending1, pending2, pending3: A = null.asInstanceOf[A] - var pendingCount = 0 - while (toAdd ne lsTail) { - val elem = toAdd.elem - if (!containsLimited(result, elem, lsTail)) { - pendingCount match { - case 0 => pending0 = elem - case 1 => pending1 = elem - case 2 => pending2 = elem - case 3 => pending3 = elem - case _ => - if (pending eq null) - pending = new Array[AnyRef](distanceTo(toAdd, lsTail, 0)).asInstanceOf[Array[A]] - pending(pendingCount - 4) = elem - } - pendingCount += 1 - } - toAdd = toAdd.next - } - // add the extra values. They are added in reverse order so as to ensure that the iteration order is correct - // remembering that the content is in the reverse order to the iteration order - // i.e. this.next is really the previous value - while (pendingCount > 0) { - val elem: A = pendingCount match { - case 1 => pending0 - case 2 => pending1 - case 3 => pending2 - case 4 => pending3 - case _ => pending(pendingCount - 5) - } - val r = result - result = new r.Node(elem) - pendingCount -= 1 - } - result - } - case _ => xs.foldLeft(repr)(_ + _) - } + def incl(elem: A): ListSet[A] = new Node(elem) + def excl(elem: A): ListSet[A] = this - def iterator: Iterator[A] = { - def reverseList = { - var curr: ListSet[A] = this - var res: List[A] = Nil - while (!curr.isEmpty) { - res = curr.elem :: res - curr = curr.next - } - res + def iterator: scala.collection.Iterator[A] = { + var curr: ListSet[A] = this + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next } - reverseList.iterator + res.iterator } protected def elem: A = throw new NoSuchElementException("elem of empty set") protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] - - override def stringPrefix = "ListSet" + override def iterableFactory: IterableFactory[ListSet] = ListSet /** * Represents an entry in the `ListSet`. */ - @SerialVersionUID(-787710309854855049L) - protected class Node(override protected val elem: A) extends ListSet[A] with Serializable { + protected class Node(override protected val elem: A) extends ListSet[A] { override def size = sizeInternal(this, 0) - + override def knownSize: Int = -1 @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = if (n.isEmpty) acc else sizeInternal(n.next, acc + 1) override def isEmpty: Boolean = false - override def contains(e: A) = containsInternal(this, e) + override def contains(e: A): Boolean = containsInternal(this, e) @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) - @tailrec private[this] def indexInternal(n: ListSet[A], e: A, i:Int): Int = - if (n.isEmpty) -1 - else if (n.elem == e) i - else indexInternal(n.next, e, i + 1) - - override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e) - - override def -(e: A): ListSet[A] = { - val index = indexInternal(this, e, 0) - if (index < 0) this - else if (index == 0) next - else { - val data = new Array[ListSet[A]](index) - @tailrec def store(i: Int, e: ListSet[A]): Unit = { - if (i < index) { - data(i) = e - store(i + 1, e.next) - } - } - @tailrec def reform(i: Int, e: ListSet[A]): ListSet[A] = { - if (i < 0) e - else reform (i -1, new e.Node(data(i).elem)) - } - store(0, this) - reform(index -1, data(index - 1).next.next) - } - } + override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) + + override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) + else removeInternal(k, cur.next, cur :: acc) override protected def next: ListSet[A] = ListSet.this override def last: A = elem + override def init: ListSet[A] = next } } + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @define Coll ListSet + * @define coll list set + */ +@SerialVersionUID(3L) +object ListSet extends IterableFactory[ListSet] { + + def from[E](it: scala.collection.IterableOnce[E]): ListSet[E] = + it match { + case ls: ListSet[E] => ls + case _ if it.knownSize == 0 => empty[E] + case _ => (newBuilder[E] ++= it).result() + } + + private object EmptyListSet extends ListSet[Any] { + override def knownSize: Int = 0 + } + private[collection] def emptyInstance: ListSet[Any] = EmptyListSet + + def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] + + def newBuilder[A]: Builder[A, ListSet[A]] = + new ImmutableBuilder[A, ListSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } +} diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index bef668f9c3f3..8ff968f58305 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,17 +10,19 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package immutable -import scala.collection.generic.{ CanBuildFrom, BitOperations } -import scala.collection.mutable.{ Builder, MapBuilder } +import java.lang.IllegalStateException + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions /** Utility class for long maps. - * @author David MacIver - */ + */ private[immutable] object LongMapUtils extends BitOperations.Long { def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) @@ -38,53 +40,64 @@ private[immutable] object LongMapUtils extends BitOperations.Long { } } -import LongMapUtils._ +import LongMapUtils.{Long => _, _} /** A companion object for long maps. - * - * @define Coll `LongMap` - * @define mapCanBuildFromInfo - * The standard `CanBuildFrom` instance for `$Coll` objects. - * The created value is an instance of class `MapCanBuildFrom`. - * @since 2.7 - */ + * + * @define Coll `LongMap` + */ object LongMap { - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[LongMap[A], (Long, B), LongMap[B]]] - private[this] val ReusableCBF = new CanBuildFrom[LongMap[Any], (Long, Any), LongMap[Any]] { - def apply(from: LongMap[Any]): Builder[(Long, Any), LongMap[Any]] = apply() - def apply(): Builder[(Long, Any), LongMap[Any]] = new MapBuilder[Long, Any, LongMap[Any]](empty[Any]) - } - def empty[T]: LongMap[T] = LongMap.Nil def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) def apply[T](elems: (Long, T)*): LongMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) -@SerialVersionUID(1224320979026293120L) + def from[V](coll: IterableOnce[(Long, V)]): LongMap[V] = + newBuilder[V].addAll(coll).result() + + def newBuilder[V]: Builder[(Long, V), LongMap[V]] = + new ImmutableBuilder[(Long, V), LongMap[V]](empty) { + def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } + } + private[immutable] case object Nil extends LongMap[Nothing] { // Important, don't remove this! See IntMap for explanation. override def equals(that : Any) = that match { - case (that: AnyRef) if (this eq that) => true - case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil - case that => super.equals(that) + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) } } -@SerialVersionUID(4938010434684160500L) private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { def withValue[S](s: S) = if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] else LongMap.Tip(key, s) } -@SerialVersionUID(2433491195925361636L) + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] else LongMap.Bin[S](prefix, mask, left, right) } } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) } // Iterator over a non-empty LongMap. @@ -102,19 +115,20 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends buffer(index).asInstanceOf[LongMap[V]] } - def push(x: LongMap[V]) { + def push(x: LongMap[V]): Unit = { buffer(index) = x.asInstanceOf[AnyRef] index += 1 } push(it) /** - * What value do we assign to a tip? - */ + * What value do we assign to a tip? + */ def valueOf(tip: LongMap.Tip[V]): T def hasNext = index != 0 - final def next: T = + @tailrec + final def next(): T = pop() match { case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { push(right) @@ -123,12 +137,12 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends case LongMap.Bin(_, _, left, right) => { push(right) push(left) - next + next() } case t@LongMap.Tip(_, _) => valueOf(t) // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap // and don't return an LongMapIterator for LongMap.Nil. - case LongMap.Nil => sys.error("Empty maps not allowed as subtrees") + case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") } } @@ -145,64 +159,80 @@ private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIt } /** - * Specialised immutable map structure for long keys, based on - * [[http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * Note: This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with the long keys. - * - * @since 2.7 - * @define Coll `immutable.LongMap` - * @define coll immutable long integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class LongMap[+T] -extends AbstractMap[Long, T] - with Map[Long, T] - with MapLike[Long, T, LongMap[T]] { + * Specialised immutable map structure for long keys, based on + * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * Note: This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with the long keys. + * + * @define Coll `immutable.LongMap` + * @define coll immutable long integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class LongMap[+T] extends AbstractMap[Long, T] + with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T)] @uncheckedVariance): LongMap[T] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Long, T), LongMap[T]](empty) { + def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } + } override def empty: LongMap[T] = LongMap.Nil override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Long, T)] + val buffer = new ListBuffer[(Long, T)] foreach(buffer += _) buffer.toList } /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of long keys and corresponding values. - */ + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of long keys and corresponding values. + */ def iterator: Iterator[(Long, T)] = this match { case LongMap.Nil => Iterator.empty case _ => new LongMapEntryIterator(this) } /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ override final def foreach[U](f: ((Long, T)) => U): Unit = this match { case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } case LongMap.Tip(key, value) => f((key, value)) case LongMap.Nil => } + override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case LongMap.Tip(key, value) => f(key, value) + case LongMap.Nil => + } + override def keysIterator: Iterator[Long] = this match { case LongMap.Nil => Iterator.empty case _ => new LongMapKeyIterator(this) } /** - * Loop over the keys of the map. The same as keys.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey(f: Long => Unit): Unit = this match { + * Loop over the keys of the map. The same as keys.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Long => U): Unit = this match { case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } case LongMap.Tip(key, _) => f(key) case LongMap.Nil => @@ -214,21 +244,21 @@ extends AbstractMap[Long, T] } /** - * Loop over the values of the map. The same as values.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue(f: T => Unit): Unit = this match { + * Loop over the values of the map. The same as values.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } case LongMap.Tip(_, value) => f(value) case LongMap.Nil => } - override def stringPrefix = "LongMap" - - override def isEmpty = this == LongMap.Nil + override protected[this] def className = "LongMap" + override def isEmpty = this eq LongMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { case LongMap.Bin(prefix, mask, left, right) => { val (newleft, newright) = (left.filter(f), right.filter(f)) @@ -241,7 +271,7 @@ extends AbstractMap[Long, T] case LongMap.Nil => LongMap.Nil } - def transform[S](f: (Long, T) => S): LongMap[S] = this match { + override def transform[S](f: (Long, T) => S): LongMap[S] = this match { case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) case LongMap.Nil => LongMap.Nil @@ -253,12 +283,14 @@ extends AbstractMap[Long, T] case LongMap.Bin(_, _, left, right) => left.size + right.size } + @tailrec final def get(key: Long): Option[T] = this match { case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None case LongMap.Nil => None } + @tailrec final override def getOrElse[S >: T](key: Long, default: => S): S = this match { case LongMap.Nil => default case LongMap.Tip(key2, value) => if (key == key2) value else default @@ -266,13 +298,14 @@ extends AbstractMap[Long, T] if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) } + @tailrec final override def apply(key: Long): T = this match { case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found") - case LongMap.Nil => sys.error("key not found") + case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case LongMap.Nil => throw new IllegalArgumentException("key not found") } - def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) + override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { case LongMap.Bin(prefix, mask, left, right) => @@ -286,22 +319,22 @@ extends AbstractMap[Long, T] } /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update. - * @param value The value to use if there is no conflict. - * @param f The function used to resolve conflicts. - * @return The updated map. - */ + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update. + * @param value The value to use if there is no conflict. + * @param f The function used to resolve conflicts. + * @return The updated map. + */ def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) @@ -313,7 +346,7 @@ extends AbstractMap[Long, T] case LongMap.Nil => LongMap.Tip(key, value) } - def -(key: Long): LongMap[T] = this match { + def removed(key: Long): LongMap[T] = this match { case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) this else if (zero(key, mask)) bin(prefix, mask, left - key, right) @@ -325,71 +358,71 @@ extends AbstractMap[Long, T] } /** - * A combined transform and filter function. Returns an `LongMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ + * A combined transform and filter function. Returns an `LongMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] - else bin(prefix, mask, newleft, newright) - } + case LongMap.Bin(prefix, mask, left, right) => { + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] + else bin(prefix, mask, newleft, newright) + } case LongMap.Tip(key, value) => f(key, value) match { case None => LongMap.Nil case Some(value2) => //hack to preserve sharing if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] else LongMap.Tip(key, value2) - } + } case LongMap.Nil => LongMap.Nil } /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) } else { if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join[S](p1, this, p2, that) // TODO: remove [S] when scala/bug#5548 is fixed + else join(p1, this, p2, that) } - case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when scala/bug#5548 is fixed + case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) case (LongMap.Nil, x) => x case (x, LongMap.Nil) => x } /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => if (shorter(m1, m2)) { @@ -397,7 +430,7 @@ extends AbstractMap[Long, T] else if (zero(p2, m1)) l1.intersectionWith(that, f) else r1.intersectionWith(that, f) } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { + else { if (!hasMatch(p1, p2, m2)) LongMap.Nil else if (zero(p1, m2)) this.intersectionWith(l2, f) else this.intersectionWith(r2, f) @@ -414,13 +447,13 @@ extends AbstractMap[Long, T] } /** - * Left biased intersection. Returns the map that has all the same mappings as this but only for keys - * which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ + * Left biased intersection. Returns the map that has all the same mappings as this but only for keys + * which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ def intersection[R](that: LongMap[R]): LongMap[T] = this.intersectionWith(that, (key: Long, value: T, value2: R) => value) @@ -431,15 +464,27 @@ extends AbstractMap[Long, T] final def firstKey: Long = this match { case LongMap.Bin(_, _, l, r) => l.firstKey case LongMap.Tip(k, v) => k - case LongMap.Nil => sys.error("Empty set") + case LongMap.Nil => throw new IllegalStateException("Empty set") } @tailrec final def lastKey: Long = this match { case LongMap.Bin(_, _, l, r) => r.lastKey case LongMap.Tip(k , v) => k - case LongMap.Nil => sys.error("Empty set") + case LongMap.Nil => throw new IllegalStateException("Empty set") } -} + def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = + super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) +} diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index e1a5f9c31666..8f372312512e 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,198 +14,288 @@ package scala package collection package immutable -import generic._ -import scala.util.hashing.MurmurHash3 +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.Map.Map4 +import scala.collection.mutable.{Builder, ReusableBuilder} +import SeqMap.{SeqMap1, SeqMap2, SeqMap3, SeqMap4} -/** - * A generic trait for immutable maps. Concrete classes have to provide - * functionality for the abstract methods in `Map`: - * - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V1)): Map[K, V1] - * def -(key: K): Map[K, V] - * }}} - * - * @since 1 - */ -trait Map[K, +V] extends Iterable[(K, V)] -// with GenMap[K, V] - with scala.collection.Map[K, V] - with MapLike[K, V, Map[K, V]] { self => +/** Base type of immutable Maps */ +trait Map[K, +V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] { - override def empty: Map[K, V] = Map.empty - - /** Returns this $coll as an immutable map. - * - * A new map will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") - override def toMap[T, U](implicit ev: (K, V) <:< (T, U)): immutable.Map[T, U] = - self.asInstanceOf[immutable.Map[T, U]] + override def mapFactory: scala.collection.MapFactory[Map] = Map - override def seq: Map[K, V] = this + override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = Map.from(this.asInstanceOf[Map[K2, V2]]) /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, d) + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[V1 >: V](d: K => V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) /** The same map with a given default value. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, x => d) + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) +} - /** Add a key/value pair to this map. +/** Base trait of immutable Maps implementations + * + * @define coll immutable map + * @define Coll `immutable.Map` + */ +trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] { + + protected def coll: C with CC[K, V] + + /** Removes a key from this map, returning a new map. + * + * @param key the key to be removed + * @return a new map without a binding for ''key'' + */ + def removed(key: K): C + + /** Alias for `removed` */ + @`inline` final def - (key: K): C = removed(key) + + @deprecated("Use -- with an explicit collection", "2.13.0") + def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * $willForceEvaluation + * + * @param keys the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def removedAll(keys: IterableOnce[K]): C = keys.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for `removedAll` */ + @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) + + /** Creates a new map obtained by updating this map with a given key/value pair. * @param key the key * @param value the value - * @return A new map with the new binding added to this map + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. */ - override def updated [V1 >: V](key: K, value: V1): Map[K, V1] - def + [V1 >: V](kv: (K, V1)): Map[K, V1] + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + + /** + * Update a mapping for the specified key and its current optionally mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a function that receives current optionally mapped value and return a new mapping + * @return A new map with the updated mapping with the key + */ + def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + + /** + * Alias for `updated` + * + * @param kv the key/value pair. + * @tparam V1 the type of the value in the key/value pair. + * @return A new map with the new binding added to this map. + */ + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + + /** This function transforms all the values of mappings contained + * in this map with function `f`. + * + * @param f A function over keys and values + * @return the updated map + */ + def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } + + override def keySet: Set[K] = new ImmutableKeySet + + /** The implementation class of the set returned by `keySet` */ + protected[immutable] class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem + def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this + } + } -/** $factoryInfo - * @define Coll `immutable.Map` - * @define coll immutable map - */ -object Map extends ImmutableMapFactory[Map] { +trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends MapOps[K, V, CC, C] + with collection.StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { - override def newBuilder[A, B]: mutable.Builder[(A, B), Map[A, B]] = new MapBuilderImpl[A, B] + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]): CC[K, V1] = { + var result: CC[K, V1] = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} - /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (K, V), Map[K, V]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] - def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] +/** + * $factoryInfo + * @define coll immutable map + * @define Coll `immutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory[Map] { - @SerialVersionUID(-7464981207502461188L) - class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault[K, V](underlying, d) with Map[K, V] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) - override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, d) - override def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, x => d) + @SerialVersionUID(3L) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + def get(key: K): Option[V] = underlying.get(key) + + override def default(key: K): V = defaultValue(key) + + override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory + + def iterator: Iterator[(K, V)] = underlying.iterator + + override def isEmpty: Boolean = underlying.isEmpty + + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault(underlying.concat(xs), defaultValue) + + def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) } - @SerialVersionUID(-5626373049574850357L) - private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable with HasForeachEntry[Any, Nothing]{ + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + + def from[K, V](it: IterableOnce[(K, V)]): Map[K, V] = + it match { + case it: Iterable[_] if it.isEmpty => empty[K, V] + // Since IterableOnce[(K, V)] launders the variance of K, + // identify only our implementations which can be soundly substituted. + // For example, the ordering used by sorted maps would fail on widened key type. (scala/bug#12745) + // The following type test is not sufficient: case m: Map[K, V] => m + case m: HashMap[K, V] => m + case m: Map1[K, V] => m + case m: Map2[K, V] => m + case m: Map3[K, V] => m + case m: Map4[K, V] => m + //case m: WithDefault[K, V] => m // cf SortedMap.WithDefault + //case m: SeqMap[K, V] => SeqMap.from(it) // inlined here to avoid hard dependency + case m: ListMap[K, V] => m + case m: TreeSeqMap[K, V] => m + case m: VectorMap[K, V] => m + case m: SeqMap1[K, V] => m + case m: SeqMap2[K, V] => m + case m: SeqMap3[K, V] => m + case m: SeqMap4[K, V] => m + + // Maps with a reified key type must be rebuilt, such as `SortedMap` and `IntMap`. + case _ => newBuilder[K, V].addAll(it).result() + } + + def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl + + @SerialVersionUID(3L) + private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) override def contains(key: Any) = false def get(key: Any): Option[Nothing] = None override def getOrElse [V1](key: Any, default: => V1): V1 = default - override def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def iterator: Iterator[(Any, Nothing)] = Iterator.empty override def keysIterator: Iterator[Any] = Iterator.empty override def valuesIterator: Iterator[Nothing] = Iterator.empty - override def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) - def + [V1](kv: (Any, V1)): Map[Any, V1] = updated(kv._1, kv._2) - override def ++[V1 >: Nothing](xs: GenTraversableOnce[(Any, V1)]): Map[Any, V1] = ++[(Any, V1), Map[Any, V1]](xs)(Map.canBuildFrom[Any, V1]) - override def ++[B >: (Any, Nothing), That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Map[Any, Nothing], B, That]): That = { - if (isMapCBF(bf)) - that match { - case hm: HashMap[a, b] if hm.size > 4 => hm.asInstanceOf[That] - case m: AnyRef if m eq EmptyMap => this.asInstanceOf[That] - case m: Map1[_, _] => m.asInstanceOf[That] - case m: Map2[_, _] => m.asInstanceOf[That] - case m: Map3[_, _] => m.asInstanceOf[That] - case m: Map4[_, _] => m.asInstanceOf[That] - - case _ => super.++(that)(bf) - } - else if (isHashMapCBF(bf)) - that match { - case hm: HashMap[a, b] => hm.asInstanceOf[That] - - case _ => super.++(that)(bf) - } - else super.++(that)(bf) - } - def - (key: Any): Map[Any, Nothing] = this - override def hashCode: Int = MurmurHash3.emptyMapHash - override private[immutable] def foreachEntry[U](f: (Any, Nothing) => U): Unit = () - } - @SerialVersionUID(3L) - private abstract class MapNIterator[T]() extends AbstractIterator[T] with Serializable { - private[this] var current = 0 - def hasNext = current < size - def apply(i: Int): T - def size: Int - def next(): T = - if (hasNext) { - val r = apply(current) - current += 1 - r - } else Iterator.empty.next() - - override def drop(n: Int): Iterator[T] = { - if (n > 0) current = Math.min(current + n, size) - this + def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def removed(key: Any): Map[Any, Nothing] = this + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]): Map[Any, V2] = suffix match { + case m: immutable.Map[Any, V2] => m + case _ => super.concat(suffix) } } - @SerialVersionUID(-9131943191104946031L) - class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with Map[K, V] with Serializable with HasForeachEntry[K, V] { - override def size = 1 - override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = key == key1 + @SerialVersionUID(3L) + final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = key == key1 def get(key: K): Option[V] = if (key == key1) Some(value1) else None override def getOrElse [V1 >: V](key: K, default: => V1): V1 = if (key == key1) value1 else default - override def iterator = Iterator.single((key1, value1)) + def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) override def keysIterator: Iterator[K] = Iterator.single(key1) override def valuesIterator: Iterator[V] = Iterator.single(value1) - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = if (key == key1) new Map1(key1, value) else new Map2(key1, value1, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = ++[(K, V1), Map[K, V1]](xs)(Map.canBuildFrom[K, V1]) - override def ++[B >: (K, V), That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Map[K, V], B, That]): That = { - if (isMapCBF(bf)) that match { - case m: AnyRef if m eq EmptyMap => this.asInstanceOf[That] - case m: Map1[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map2[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map3[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map4[K, V] => m.addTo(this).asInstanceOf[That] - case _ => super.++(that)(bf) - } else super.++(that)(bf) - } - private[Map] def addTo[V1 >: V](m : Map[K,V1]): Map[K, V1] = { - m.updated(key1, value1) - } - def - (key: K): Map[K, V] = + def removed(key: K): Map[K, V] = if (key == key1) Map.empty else this override def foreach[U](f: ((K, V)) => U): Unit = { f((key1, value1)) } override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) - override private[scala] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = if (pred((key1, value1)) != isFlipped) this else Map.empty + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] + else new Map1(key1, walue1) + } override def hashCode(): Int = { import scala.util.hashing.MurmurHash3 var a, b = 0 val N = 1 var c = 1 - var h = MurmurHash3.product2Hash(key1, value1) + var h = MurmurHash3.tuple2Hash(key1, value1) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 h = MurmurHash3.mapSeed h = MurmurHash3.mix(h, a) @@ -213,19 +303,18 @@ object Map extends ImmutableMapFactory[Map] { h = MurmurHash3.mixLast(h, c) MurmurHash3.finalizeHash(h, N) } - override private[immutable] def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - } } - @SerialVersionUID(-85684685400398742L) - class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with Map[K, V] with Serializable with HasForeachEntry[K, V] { - override def size = 2 - override def apply(key: K) = + @SerialVersionUID(3L) + final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else if (key == key2) value2 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) + override def contains(key: K): Boolean = (key == key1) || (key == key2) def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) @@ -234,38 +323,36 @@ object Map extends ImmutableMapFactory[Map] { if (key == key1) value1 else if (key == key2) value2 else default - //we have to insert these additional methods to avoid the compiler rewriting the field names and changing binary format - private def _getKey(i: Int) = i match { case 0 => key1 case 1 => key2} - private def _getValue(i: Int) = i match { case 0 => value1 case 1 => value2} - private abstract class Map2Iterator[T] extends MapNIterator[T]{ - final def getKey(i: Int) = _getKey(i) - final def getValue(i: Int) = _getValue(i) - override final def size = 2 - } - override def iterator: Iterator[(K,V)] = new Map2Iterator[(K,V)] {def apply(i: Int) = (getKey(i), getValue(i))} - override def keysIterator: Iterator[K] = new Map2Iterator[K] {def apply(i: Int) = getKey(i)} - override def valuesIterator: Iterator[V] = new Map2Iterator[V] {def apply(i: Int) = getValue(i)} - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = + def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map2Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map2Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map2Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 2 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = if (key == key1) new Map2(key1, value, key2, value2) else if (key == key2) new Map2(key1, value1, key2, value) else new Map3(key1, value1, key2, value2, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = ++[(K, V1), Map[K, V1]](xs)(Map.canBuildFrom[K, V1]) - override def ++[B >: (K, V), That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Map[K, V], B, That]): That = { - if (isMapCBF(bf)) that match { - case m: AnyRef if m eq EmptyMap => this.asInstanceOf[That] - case m: Map1[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map2[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map3[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map4[K, V] => m.addTo(this).asInstanceOf[That] - case _ => super.++(that)(bf) - } else super.++(that)(bf) - } - private[Map] def addTo[V1 >: V](m : Map[K,V1]): Map[K, V1] = { - m.updated(key1, value1). - updated(key2, value2) - } - def - (key: K): Map[K, V] = + def removed(key: K): Map[K, V] = if (key == key1) new Map1(key2, value2) else if (key == key2) new Map1(key1, value1) else this @@ -274,7 +361,7 @@ object Map extends ImmutableMapFactory[Map] { } override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) - override private[scala] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { var k1 = null.asInstanceOf[K] var v1 = null.asInstanceOf[V] var n = 0 @@ -287,21 +374,28 @@ object Map extends ImmutableMapFactory[Map] { case 2 => this } } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map2(key1, walue1, key2, walue2) + } override def hashCode(): Int = { import scala.util.hashing.MurmurHash3 var a, b = 0 val N = 2 var c = 1 - var h = MurmurHash3.product2Hash(key1, value1) + var h = MurmurHash3.tuple2Hash(key1, value1) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key2, value2) + h = MurmurHash3.tuple2Hash(key2, value2) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 h = MurmurHash3.mapSeed h = MurmurHash3.mix(h, a) @@ -309,21 +403,19 @@ object Map extends ImmutableMapFactory[Map] { h = MurmurHash3.mixLast(h, c) MurmurHash3.finalizeHash(h, N) } - override private[immutable] def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - } } - @SerialVersionUID(-6400718707310517135L) - class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with Map[K, V] with Serializable with HasForeachEntry[K, V] { - override def size = 3 - override def apply(key: K) = + @SerialVersionUID(3L) + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else if (key == key2) value2 else if (key == key3) value3 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) @@ -334,40 +426,38 @@ object Map extends ImmutableMapFactory[Map] { else if (key == key2) value2 else if (key == key3) value3 else default - //we have to insert these additional methods to avoid the compiler rewriting the field names and changing binary format - private def _getKey(i: Int) = i match { case 0 => key1 case 1 => key2 case 2 => key3} - private def _getValue(i: Int) = i match { case 0 => value1 case 1 => value2 case 2 => value3} - private abstract class Map3Iterator[T] extends MapNIterator[T]{ - final def getKey(i: Int) = _getKey(i) - final def getValue(i: Int) = _getValue(i) - override final def size = 3 - } - override def iterator: Iterator[(K,V)] = new Map3Iterator[(K,V)] {def apply(i: Int) = (getKey(i), getValue(i))} - override def keysIterator: Iterator[K] = new Map3Iterator[K] {def apply(i: Int) = getKey(i)} - override def valuesIterator: Iterator[V] = new Map3Iterator[V] {def apply(i: Int) = getValue(i)} - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = + def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map3Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map3Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map3Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 3 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = if (key == key1) new Map3(key1, value, key2, value2, key3, value3) else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = ++[(K, V1), Map[K, V1]](xs)(Map.canBuildFrom[K, V1]) - override def ++[B >: (K, V), That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Map[K, V], B, That]): That = { - if (isMapCBF(bf)) that match { - case m: AnyRef if m eq EmptyMap => this.asInstanceOf[That] - case m: Map1[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map2[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map3[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map4[K, V] => m.addTo(this).asInstanceOf[That] - case _ => super.++(that)(bf) - } else super.++(that)(bf) - } - private[Map] def addTo[V1 >: V](m : Map[K,V1]): Map[K, V1] = { - m.updated(key1, value1). - updated(key2, value2). - updated(key3, value3) - } - def - (key: K): Map[K, V] = + def removed(key: K): Map[K, V] = if (key == key1) new Map2(key2, value2, key3, value3) else if (key == key2) new Map2(key1, value1, key3, value3) else if (key == key3) new Map2(key1, value1, key2, value2) @@ -377,7 +467,7 @@ object Map extends ImmutableMapFactory[Map] { } override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) - override private[scala] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { var k1, k2 = null.asInstanceOf[K] var v1, v2 = null.asInstanceOf[V] var n = 0 @@ -392,26 +482,35 @@ object Map extends ImmutableMapFactory[Map] { case 3 => this } } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map3(key1, walue1, key2, walue2, key3, walue3) + } override def hashCode(): Int = { import scala.util.hashing.MurmurHash3 var a, b = 0 val N = 3 var c = 1 - var h = MurmurHash3.product2Hash(key1, value1) + var h = MurmurHash3.tuple2Hash(key1, value1) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key2, value2) + h = MurmurHash3.tuple2Hash(key2, value2) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key3, value3) + h = MurmurHash3.tuple2Hash(key3, value3) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 h = MurmurHash3.mapSeed h = MurmurHash3.mix(h, a) @@ -419,23 +518,22 @@ object Map extends ImmutableMapFactory[Map] { h = MurmurHash3.mixLast(h, c) MurmurHash3.finalizeHash(h, N) } - override private[immutable] def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - } } - @SerialVersionUID(-7992135791595275193L) - class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends AbstractMap[K, V] with Map[K, V] with Serializable with HasForeachEntry[K, V] { - override def size = 4 - override def apply(key: K) = + @SerialVersionUID(3L) + final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) + extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + + override def size: Int = 4 + override def knownSize: Int = 4 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else if (key == key2) value2 else if (key == key3) value3 else if (key == key4) value4 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) @@ -448,42 +546,40 @@ object Map extends ImmutableMapFactory[Map] { else if (key == key3) value3 else if (key == key4) value4 else default - //we have to insert these additional methods to avoid the compiler rewriting the field names and changing binary format - private def _getKey(i: Int) = i match { case 0 => key1 case 1 => key2 case 2 => key3 case 3 => key4 } - private def _getValue(i: Int) = i match { case 0 => value1 case 1 => value2 case 2 => value3 case 3 => value4} - private abstract class Map4Iterator[T] extends MapNIterator[T]{ - final def getKey(i: Int) = _getKey(i) - final def getValue(i: Int) = _getValue(i) - override final def size = 4 - } - override def iterator: Iterator[(K,V)] = new Map4Iterator[(K,V)] {def apply(i: Int) = (getKey(i), getValue(i))} - override def keysIterator: Iterator[K] = new Map4Iterator[K] {def apply(i: Int) = getKey(i)} - override def valuesIterator: Iterator[V] = new Map4Iterator[V] {def apply(i: Int) = getValue(i)} - override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = + def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map4Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map4Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map4Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 4 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case 3 => nextResult(key4, value4) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) - else (new HashMap).updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) - def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = ++[(K, V1), Map[K, V1]](xs)(Map.canBuildFrom[K, V1]) - override def ++[B >: (K, V), That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Map[K, V], B, That]): That = { - if (isMapCBF(bf)) that match { - case m: AnyRef if m eq EmptyMap => this.asInstanceOf[That] - case m: Map1[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map2[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map3[K, V] => m.addTo(this).asInstanceOf[That] - case m: Map4[K, V] => m.addTo(this).asInstanceOf[That] - case _ => super.++(that)(bf) - } else super.++(that)(bf) - } - private[Map] def addTo[V1 >: V](m : Map[K,V1]): Map[K, V1] = { - m.updated(key1, value1). - updated(key2, value2). - updated(key3, value3). - updated(key4, value4) - } - def - (key: K): Map[K, V] = + else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) + def removed(key: K): Map[K, V] = if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) @@ -494,7 +590,7 @@ object Map extends ImmutableMapFactory[Map] { } override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) - override private[scala] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { var k1, k2, k3 = null.asInstanceOf[K] var v1, v2, v3 = null.asInstanceOf[V] var n = 0 @@ -511,31 +607,44 @@ object Map extends ImmutableMapFactory[Map] { case 4 => this } } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + val walue4 = f(key4, value4) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && + (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) + } + private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = + builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) override def hashCode(): Int = { import scala.util.hashing.MurmurHash3 var a, b = 0 val N = 4 var c = 1 - var h = MurmurHash3.product2Hash(key1, value1) + var h = MurmurHash3.tuple2Hash(key1, value1) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key2, value2) + h = MurmurHash3.tuple2Hash(key2, value2) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key3, value3) + h = MurmurHash3.tuple2Hash(key3, value3) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 - h = MurmurHash3.product2Hash(key4, value4) + h = MurmurHash3.tuple2Hash(key4, value4) a += h b ^= h - if (h != 0) c *= h + c *= h | 1 h = MurmurHash3.mapSeed h = MurmurHash3.mix(h, a) @@ -543,115 +652,61 @@ object Map extends ImmutableMapFactory[Map] { h = MurmurHash3.mixLast(h, c) MurmurHash3.finalizeHash(h, N) } - override private[immutable] def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - f(key4, value4) - } } - private [immutable] final class HashCodeAccumulator extends scala.runtime.AbstractFunction2[Any, Any, Unit] { - import scala.util.hashing.MurmurHash3 - private var a, b, n = 0 - private var c = 1 - def apply(key: Any, value: Any): Unit = { - val h = MurmurHash3.product2Hash(key, value) - a += h - b ^= h - if (h != 0) c *= h - n += 1 - } - - def finalizeHash: Int = { - var h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, n) - } - } - - private def isHashMapCBF(cbf: CanBuildFrom[_,_,_]) = { - cbf match { - case w: WrappedCanBuildFrom[_,_,_] => - val unwrapped = w.wrapped - unwrapped eq HashMap.canBuildFrom - case _ => - cbf eq HashMap.canBuildFrom - } - } - private def isMapCBF(cbf: CanBuildFrom[_,_,_]) = { - cbf match { - case w: WrappedCanBuildFrom[_, _, _] => - val unwrapped = w.wrapped - unwrapped eq Map.canBuildFrom - case _ => - cbf eq Map.canBuildFrom - } - } - - /** Builder for Map. - */ - private final class MapBuilderImpl[K, V] extends mutable.ReusableBuilder[(K,V), Map[K, V]] { - - private[this] var elems: Map[K, V] = Map.empty[K, V] - private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMap.HashMapBuilder[K, V] = _ +} - override def clear(): Unit = { - elems =Map.empty[K, V] - if (hashMapBuilder ne null) - hashMapBuilder.clear() - switchedToHashMapBuilder = false - } +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] - override def result(): Map[K, V] = - if (switchedToHashMapBuilder) hashMapBuilder.result() else elems +private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { + private[this] var elems: Map[K, V] = Map.empty + private[this] var switchedToHashMapBuilder: Boolean = false + private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ - private def convertToHashMapBuilder(): Unit = { - switchedToHashMapBuilder = true - if (hashMapBuilder eq null) - hashMapBuilder = new HashMap.HashMapBuilder[K, V] + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + else elems.getOrElse(key, value) - hashMapBuilder ++= elems + override def clear(): Unit = { + elems = Map.empty + if (hashMapBuilder != null) { + hashMapBuilder.clear() } + switchedToHashMapBuilder = false + } - override def +=(elem: (K, V)): MapBuilderImpl.this.type = { - if (switchedToHashMapBuilder) { - hashMapBuilder += elem - } else if (elems.size < 4) { - elems = elems + elem + override def result(): Map[K, V] = + if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + + def addOne(key: K, value: V): this.type = { + if (switchedToHashMapBuilder) { + hashMapBuilder.addOne(key, value) + } else if (elems.size < 4) { + elems = elems.updated(key, value) + } else { + // assert(elems.size == 4) + if (elems.contains(key)) { + elems = elems.updated(key, value) } else { - val key = elem._1 - val newValue = elem._2 - elems.getOrElse(key, Sentinel) match { - case Sentinel => - convertToHashMapBuilder() - hashMapBuilder += elem - case existingValue => - if (existingValue.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) - elems = elems + elem + switchedToHashMapBuilder = true + if (hashMapBuilder == null) { + hashMapBuilder = new HashMapBuilder } + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) + hashMapBuilder.addOne(key, value) } - this } - override def ++=(xs: TraversableOnce[(K, V)]): MapBuilderImpl.this.type = { - xs match { - case _ if switchedToHashMapBuilder => - hashMapBuilder ++= xs + this + } - case map: collection.Map[K,V] if map.size > 4 => - convertToHashMapBuilder() - hashMapBuilder ++= map + def addOne(elem: (K, V)) = addOne(elem._1, elem._2) - case _ => super.++= (xs) - } + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToHashMapBuilder) { + hashMapBuilder.addAll(xs) this + } else { + super.addAll(xs) } - } - private val Sentinel = new Object } - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala deleted file mode 100644 index 52ae2acddbd1..000000000000 --- a/src/library/scala/collection/immutable/MapLike.scala +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ -import parallel.immutable.ParMap - -/** - * A generic template for immutable maps from keys of type `K` - * to values of type `V`. - * To implement a concrete map, you need to provide implementations of the - * following methods (where `This` is the type of the actual map implementation): - * - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def + [V1 >: V](kv: (K, V)): Map[K, V1] - * def - (key: K): This - * }}} - * - * If you wish that transformer methods like `take`, `drop`, `filter` return the - * same kind of map, you should also override: - * - * {{{ - * def empty: This - * }}} - * - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * - * @tparam K the type of the keys contained in this collection. - * @tparam V the type of the values associated with the keys. - * @tparam This The type of the actual map implementation. - * - * @author Martin Odersky - * @since 2.8 - * @define Coll immutable.Map - * @define coll immutable map - */ -trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] - extends scala.collection.MapLike[K, V, This] - with Parallelizable[(K, V), ParMap[K, V]] -{ -self => - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** A new immutable map updating this map with a given key/value mapping. - * @param key the key - * @param value the value - * @return A new map with the new key/value mapping - */ - override def updated [V1 >: V](key: K, value: V1): immutable.Map[K, V1] = this + ((key, value)) - - /** Add a key/value pair to this map, returning a new map. - * @param kv the key/value pair. - * @return A new map with the new binding added to this map. - */ - def + [V1 >: V] (kv: (K, V1)): immutable.Map[K, V1] - - /** Adds two or more elements to this collection and returns - * a new collection. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return A new map with the new bindings added to this map. - */ - override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): immutable.Map[K, V1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object consisting of key-value pairs. - * @return a new immutable map with the bindings of this map and those from `xs`. - */ - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): immutable.Map[K, V1] = - ((repr: immutable.Map[K, V1]) /: xs.seq) (_ + _) - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - override def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) with DefaultMap[K, V] - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - override def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) with DefaultMap[K, W] - - /** Collects all keys of this map in a set. - * @return a set containing all keys of this map. - */ - override def keySet: immutable.Set[K] = new ImmutableDefaultKeySet - - protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[K] { - override def + (elem: K): immutable.Set[K] = - if (this(elem)) this - else immutable.Set[K]() ++ this + elem - override def - (elem: K): immutable.Set[K] = - if (this(elem)) immutable.Set[K]() ++ this - elem - else this - - // ImmutableDefaultKeySet is only protected, so we won't warn on override. - // Someone could override in a way that makes widening not okay - // (e.g. by overriding +, though the version in this class is fine) - override def toSet[B >: K]: Set[B] = this.asInstanceOf[Set[B]] - } - - /** This function transforms all the values of mappings contained - * in this map with function `f`. - * - * @param f A function over keys and values - * @return the updated map - */ - def transform[W, That](f: (K, V) => W)(implicit bf: CanBuildFrom[This, (K, W), That]): That = { - val b = bf(repr) - for ((key, value) <- this) b += ((key, f(key, value))) - b.result() - } -} diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala deleted file mode 100644 index 75e5859be70c..000000000000 --- a/src/library/scala/collection/immutable/MapProxy.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -/** - * This is a simple wrapper class for `scala.collection.immutable.Map`. - * - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger, Martin Odersky - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { - override def repr = this - private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = - new MapProxy[A, B1] { val self = newSelf } - - override def empty = newProxy(self.empty) - override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value)) - - override def -(key: A) = newProxy(self - key) - override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv) - override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*)) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq) - - override def keySet: immutable.Set[A] = new SetProxy[A] { val self = MapProxy.this.self.keySet } - override def filterKeys(p: A => Boolean) = self.filterKeys(p) - override def mapValues[C](f: B => C) = self.mapValues(f) -} diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index c14fb5ded778..78efb2adafca 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,65 +10,107 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package immutable +package scala.collection.immutable -// TODO: Now the specialization exists there is no clear reason to have -// separate classes for Range/NumericRange. Investigate and consolidate. +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.collection.generic.CommonErrors /** `NumericRange` is a more generic version of the - * `Range` class which works with arbitrary types. - * It must be supplied with an `Integral` implementation of the - * range type. - * - * Factories for likely types include `Range.BigInt`, `Range.Long`, - * and `Range.BigDecimal`. `Range.Int` exists for completeness, but - * the `Int`-based `scala.Range` should be more performant. - * - * {{{ - * val r1 = new Range(0, 100, 1) - * val veryBig = Int.MaxValue.toLong + 1 - * val r2 = Range.Long(veryBig, veryBig + 100, 1) - * assert(r1 sameElements r2.map(_ - veryBig)) - * }}} - * - * @author Paul Phillips - * @define Coll `NumericRange` - * @define coll numeric range - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-5580158174769432538L) -abstract class NumericRange[T] - (val start: T, val end: T, val step: T, val isInclusive: Boolean) - (implicit num: Integral[T]) -extends AbstractSeq[T] with IndexedSeq[T] with Serializable { + * `Range` class which works with arbitrary types. + * It must be supplied with an `Integral` implementation of the + * range type. + * + * Factories for likely types include `Range.BigInt`, `Range.Long`, + * and `Range.BigDecimal`. `Range.Int` exists for completeness, but + * the `Int`-based `scala.Range` should be more performant. + * + * {{{ + * val r1 = Range(0, 100, 1) + * val veryBig = Int.MaxValue.toLong + 1 + * val r2 = Range.Long(veryBig, veryBig + 100, 1) + * assert(r1 sameElements r2.map(_ - veryBig)) + * }}} + * + * @define Coll `NumericRange` + * @define coll numeric range + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed class NumericRange[T]( + val start: T, + val end: T, + val step: T, + val isInclusive: Boolean +)(implicit + num: Integral[T] +) + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] + with Serializable { self => + + override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { + import scala.collection.convert._ + import impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) + case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) + case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Note that NumericRange must be invariant so that constructs - * such as "1L to 10 by 5" do not infer the range type as AnyVal. - */ + * such as "1L to 10 by 5" do not infer the range type as AnyVal. + */ import num._ // See comment in Range for why this must be lazy. - private lazy val numRangeElements: Int = - NumericRange.count(start, end, step, isInclusive) - - override def length = numRangeElements - override def isEmpty = length == 0 - override lazy val last: T = - if (length == 0) Nil.last + override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) + override lazy val isEmpty: Boolean = ( + (num.gt(start, end) && num.gt(step, num.zero)) + || (num.lt(start, end) && num.lt(step, num.zero)) + || (num.equiv(start, end) && !isInclusive) + ) + override def last: T = + if (isEmpty) Nil.head else locationAfterN(length - 1) + override def init: NumericRange[T] = + if (isEmpty) Nil.init + else new NumericRange(start, end - step, step, isInclusive) + + override def head: T = if (isEmpty) Nil.head else start + override def tail: NumericRange[T] = + if (isEmpty) Nil.tail + else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) + else new NumericRange.Exclusive(start + step, end, step) /** Create a new range with the start and end values of this range and - * a new `step`. - */ + * a new `step`. + */ def by(newStep: T): NumericRange[T] = copy(start, end, newStep) + /** Create a copy of this range. - */ - def copy(start: T, end: T, step: T): NumericRange[T] + */ + def copy(start: T, end: T, step: T): NumericRange[T] = + new NumericRange(start, end, step, isInclusive) + + @throws[IndexOutOfBoundsException] + def apply(idx: Int): T = { + if (idx < 0 || idx >= length) + throw CommonErrors.indexOutOfBounds(index = idx, max = length - 1) + else locationAfterN(idx) + } - override def foreach[U](f: T => U) { + override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { var count = 0 var current = start while (count < length) { @@ -78,6 +120,38 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { } } + private[this] def indexOfTyped(elem: T, from: Int): Int = + posOf(elem) match { + case pos if pos >= from => pos + case _ => -1 + } + + final override def indexOf[B >: T](elem: B, from: Int): Int = + try indexOfTyped(elem.asInstanceOf[T], from) + catch { case _: ClassCastException => super.indexOf(elem, from) } + + private[this] def lastIndexOfTyped(elem: T, end: Int): Int = + posOf(elem) match { + case pos if pos <= end => pos + case _ => -1 + } + + final override def lastIndexOf[B >: T](elem: B, end: Int = length - 1): Int = + try lastIndexOfTyped(elem.asInstanceOf[T], end) + catch { case _: ClassCastException => super.lastIndexOf(elem, end) } + + private[this] def posOf(i: T): Int = + /* + If i is in this NumericRange, its position can simply be calculated by taking the amount of values up till i. + NumericRange.count does this in an most efficient manner. + Note that the contains() method throws an exception if the range has more than Int.MaxValue elements, but so did + the original indexOf / lastIndexOf functions, so no functionality changed. */ + if (contains(i)) { + /* Because of zero indexing, the count is always one higher than the index. This can be simply solved by setting + isInclusive = false. */ + NumericRange.count(this.start, i, this.step, isInclusive = false) + } else -1 + // TODO: these private methods are straight copies from Range, duplicated // to guard against any (most likely illusory) performance drop. They should // be eliminated one way or another. @@ -86,99 +160,129 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { // whether it is a member of the sequence (i.e. when step > 1.) private def isWithinBoundaries(elem: T) = !isEmpty && ( (step > zero && start <= elem && elem <= last ) || - (step < zero && last <= elem && elem <= start) - ) + (step < zero && last <= elem && elem <= start) + ) // Methods like apply throw exceptions on invalid n, but methods like take/drop // are forgiving: therefore the checks are with the methods. private def locationAfterN(n: Int): T = start + (step * fromInt(n)) + private def crossesTheEndAfterN(n: Int): Boolean = { + // if we're sure that subtraction in the context of T won't overflow, we use this function + // to calculate the length of the range + def unsafeRangeLength(r: NumericRange[T]): T = { + val diff = num.minus(r.end, r.start) + val quotient = num.quot(diff, r.step) + val remainder = num.rem(diff, r.step) + if (!r.isInclusive && num.equiv(remainder, num.zero)) + num.max(quotient, num.zero) + else + num.max(num.plus(quotient, num.one), num.zero) + } + + // detects whether value can survive a bidirectional trip to -and then from- Int. + def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) + + val stepIsInTheSameDirectionAsStartToEndVector = + (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) + + if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 + + val sameSign = num.equiv(num.sign(start), num.sign(end)) + + if (sameSign) { // subtraction is safe + val len = unsafeRangeLength(this) + if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) + } else { + // split to two ranges, which subtraction is safe in both of them (around zero) + val stepsRemainderToZero = num.rem(start, step) + val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) + val closestToZero = if (walksOnZero) -step else stepsRemainderToZero + + /* + When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, + so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). + Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. + After performing such operation, there are some elements remaining in between and around zero, + which their length is represented by carry. + */ + val (l: NumericRange[T], r: NumericRange[T], carry: Int) = + if (num.lt(start, num.zero)) { + if (walksOnZero) { + val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) + } else { + (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) + } + } else { + if (walksOnZero) { + val twoStepsAfterZero = num.times(step, num.fromInt(2)) + (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) + } else { + val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) + } + } + + val leftLength = unsafeRangeLength(l) + val rightLength = unsafeRangeLength(r) + + // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, + // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) + if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) + n - num.toInt(leftLength) - carry >= num.toInt(rightLength) + else + num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) + } + } + // When one drops everything. Can't ever have unchecked operations // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } // will overflow. This creates an exclusive range where start == end // based on the given value. private def newEmptyRange(value: T) = NumericRange(value, value, step) - final override def take(n: Int): NumericRange[T] = ( - if (n <= 0 || length == 0) newEmptyRange(start) - else if (n >= length) this + override def take(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (crossesTheEndAfterN(n)) this else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) - ) + } - final override def drop(n: Int): NumericRange[T] = ( - if (n <= 0 || length == 0) this - else if (n >= length) newEmptyRange(end) + override def drop(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) this + else if (crossesTheEndAfterN(n)) newEmptyRange(end) else copy(locationAfterN(n), end, step) - ) - - def apply(idx: Int): T = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString) - else locationAfterN(idx) } + override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) + + override def reverse: NumericRange[T] = + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } + import NumericRange.defaultOrdering override def min[T1 >: T](implicit ord: Ordering[T1]): T = - // We can take the fast path: - // - If the Integral of this NumericRange is also the requested Ordering - // (Integral <: Ordering). This can happen for custom Integral types. - // - The Ordering is the default Ordering of a well-known Integral type. + // We can take the fast path: + // - If the Integral of this NumericRange is also the requested Ordering + // (Integral <: Ordering). This can happen for custom Integral types. + // - The Ordering is the default Ordering of a well-known Integral type. if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.signum(step) > 0) head + if (num.sign(step) > zero) head else last } else super.min(ord) override def max[T1 >: T](implicit ord: Ordering[T1]): T = - // See comment for fast path in min(). + // See comment for fast path in min(). if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.signum(step) > 0) last + if (num.sign(step) > zero) last else head } else super.max(ord) - // Motivated by the desire for Double ranges with BigDecimal precision, - // we need some way to map a Range and get another Range. This can't be - // done in any fully general way because Ranges are not arbitrary - // sequences but step-valued, so we have a custom method only we can call - // which we promise to use responsibly. - // - // The point of it all is that - // - // 0.0 to 1.0 by 0.1 - // - // should result in - // - // NumericRange[Double](0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0) - // - // and not - // - // NumericRange[Double](0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9) - // - // or perhaps more importantly, - // - // (0.1 to 0.3 by 0.1 contains 0.3) == true - // - private[immutable] def mapRange[A](fm: T => A)(implicit unum: Integral[A]): NumericRange[A] = { - val self = this - - // XXX This may be incomplete. - new NumericRange[A](fm(start), fm(end), fm(step), isInclusive) { - def copy(start: A, end: A, step: A): NumericRange[A] = - if (isInclusive) NumericRange.inclusive(start, end, step) - else NumericRange(start, end, step) - - private lazy val underlyingRange: NumericRange[T] = self - override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) } - override def isEmpty = underlyingRange.isEmpty - override def apply(idx: Int): A = fm(underlyingRange(idx)) - override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el) - - override def toString = { - def simpleOf(x: Any): String = x.getClass.getName.split("\\.").last - val stepped = simpleOf(underlyingRange.step) - s"${super.toString} (using $underlyingRange of $stepped)" - } - } - } - // a well-typed contains method. def containsTyped(x: T): Boolean = isWithinBoundaries(x) && (((x - start) % step) == zero) @@ -187,18 +291,18 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { try containsTyped(x.asInstanceOf[T]) catch { case _: ClassCastException => false } - final override def sum[B >: T](implicit num: Numeric[B]): B = { + override def sum[B >: T](implicit num: Numeric[B]): B = { if (isEmpty) num.zero - else if (numRangeElements == 1) head + else if (size == 1) head else { // If there is no overflow, use arithmetic series formula // a + ... (n terms total) ... + b = n*(a+b)/2 if ((num eq scala.math.Numeric.IntIsIntegral)|| - (num eq scala.math.Numeric.ShortIsIntegral)|| - (num eq scala.math.Numeric.ByteIsIntegral)|| - (num eq scala.math.Numeric.CharIsIntegral)) { + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)) { // We can do math with no overflow in a Long--easy - val exact = (numRangeElements * ((num toLong head) + (num toInt last))) / 2 + val exact = (size * ((num toLong head) + (num toInt last))) / 2 num fromInt exact.toInt } else if (num eq scala.math.Numeric.LongIsIntegral) { @@ -207,8 +311,8 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { val a = head.toLong val b = last.toLong val ans = - if ((numRangeElements & 1) == 0) (numRangeElements / 2) * (a + b) - else numRangeElements * { + if ((size & 1) == 0) (size / 2) * (a + b) + else size * { // Sum is even, but we might overflow it, so divide in pieces and add back remainder val ha = a/2 val hb = b/2 @@ -216,25 +320,13 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { } ans.asInstanceOf[B] } - else if ((num eq scala.math.Numeric.FloatAsIfIntegral) || - (num eq scala.math.Numeric.DoubleAsIfIntegral)) { - // Try to compute sum with reasonable accuracy, avoiding over/underflow - val numAsIntegral = num.asInstanceOf[Integral[B]] - import numAsIntegral._ - val a = math.abs(head.toDouble) - val b = math.abs(last.toDouble) - val two = num fromInt 2 - val nre = num fromInt numRangeElements - if (a > 1e38 || b > 1e38) nre * ((head / two) + (last / two)) // Compute in parts to avoid Infinity if possible - else (nre / two) * (head + last) // Don't need to worry about infinity; this will be more accurate and avoid underflow - } else if ((num eq scala.math.Numeric.BigIntIsIntegral) || - (num eq scala.math.Numeric.BigDecimalIsFractional)) { + (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { // No overflow, so we can use arithmetic series formula directly // (not going to worry about running out of memory) val numAsIntegral = num.asInstanceOf[Integral[B]] import numAsIntegral._ - ((num fromInt numRangeElements) * (head + last)) / (num fromInt 2) + ((num fromInt size) * (head + last)) / (num fromInt 2) } else { // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) @@ -254,33 +346,52 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { } } - override lazy val hashCode = super.hashCode() - override def equals(other: Any) = other match { + override lazy val hashCode: Int = super.hashCode() + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def equals(other: Any): Boolean = other match { case x: NumericRange[_] => (x canEqual this) && (length == x.length) && ( - (length == 0) || // all empty sequences are equal - (start == x.start && last == x.last) // same length and same endpoints implies equality - ) + (isEmpty) || // all empty sequences are equal + (start == x.start && last == x.last) // same length and same endpoints implies equality + ) case _ => super.equals(other) } - override def toString = { + override def toString: String = { val empty = if (isEmpty) "empty " else "" val preposition = if (isInclusive) "to" else "until" val stepped = if (step == 1) "" else s" by $step" s"${empty}NumericRange $start $preposition $end$stepped" } + + override protected[this] def className = "NumericRange" } /** A companion object for numeric ranges. - */ + * @define Coll `NumericRange` + * @define coll numeric range + */ object NumericRange { + private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { + def FAIL(boundary: T, step: T): Unit = { + val msg = boundary match { + case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" + case _ => "Precision" + } + throw new IllegalArgumentException( + s"$msg inadequate to represent steps of size $step near $boundary" + ) + } + if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) + if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) + } /** Calculates the number of elements in a range given start, end, step, and - * whether or not it is inclusive. Throws an exception if step == 0 or - * the number of elements exceeds the maximum Int. - */ + * whether or not it is inclusive. Throws an exception if step == 0 or + * the number of elements exceeds the maximum Int. + */ def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { val zero = num.zero val upward = num.lt(start, end) @@ -313,16 +424,19 @@ object NumericRange { } // If we reach this point, deferring to Int failed. // Numbers may be big. + if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { + bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) + } val one = num.one val limit = num.fromInt(Int.MaxValue) def check(t: T): T = if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") else t // If the range crosses zero, it might overflow when subtracted - val startside = num.signum(start) - val endside = num.signum(end) + val startside = num.sign(start) + val endside = num.sign(end) num.toInt{ - if (startside*endside >= 0) { + if (num.gteq(num.times(startside, endside), zero)) { // We're sure we can subtract these numbers. // Note that we do not use .rem because of different conventions for Long and BigInt val diff = num.minus(end, start) @@ -335,10 +449,18 @@ object NumericRange { // Jump in three pieces: // * start to -1 or 1, whichever is closer (waypointA) // * one step, which will take us at least to 0 (ends at waypointB) + // * (except with really small numbers) // * there to the end val negone = num.fromInt(-1) val startlim = if (posStep) negone else one - val startdiff = num.minus(startlim, start) + //Use start value if the start value is closer to zero than startlim + // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 + val startdiff = { + if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) + start + else + num.minus(startlim, start) + } val startq = check(num.quot(startdiff, step)) val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) val waypointB = num.plus(waypointA, step) @@ -366,19 +488,19 @@ object NumericRange { } } - @SerialVersionUID(-5986512874781685419L) + @SerialVersionUID(3L) class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, true) { - def copy(start: T, end: T, step: T): Inclusive[T] = + extends NumericRange(start, end, step, isInclusive = true) { + override def copy(start: T, end: T, step: T): Inclusive[T] = NumericRange.inclusive(start, end, step) def exclusive: Exclusive[T] = NumericRange(start, end, step) } - @SerialVersionUID(-7058074814271573640L) + @SerialVersionUID(3L) class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, false) { - def copy(start: T, end: T, step: T): Exclusive[T] = + extends NumericRange(start, end, step, isInclusive = false) { + override def copy(start: T, end: T, step: T): Exclusive[T] = NumericRange(start, end, step) def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) @@ -396,10 +518,24 @@ object NumericRange { Numeric.ByteIsIntegral -> Ordering.Byte, Numeric.CharIsIntegral -> Ordering.Char, Numeric.LongIsIntegral -> Ordering.Long, - Numeric.FloatAsIfIntegral -> Ordering.Float, - Numeric.DoubleAsIfIntegral -> Ordering.Double, Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal ) + @SerialVersionUID(3L) + private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { + import num.mkNumericOps + + private[this] var _hasNext = !self.isEmpty + private[this] var _next: T = self.start + private[this] val lastElement: T = if (_hasNext) self.last else self.start + override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 + def hasNext: Boolean = _hasNext + def next(): T = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = num.plus(value, self.step) + value + } + } } - diff --git a/src/library/scala/collection/immutable/OldRedBlackTree.scala b/src/library/scala/collection/immutable/OldRedBlackTree.scala deleted file mode 100644 index 033a0165cc3d..000000000000 --- a/src/library/scala/collection/immutable/OldRedBlackTree.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - -import java.io.IOException -import scala.annotation.meta.getter - -private[immutable] object RedBlackTree { - - // Trees for serialisation compat with 2.12 legacy format - // allow the format to be written in that manner for 2.12.11 and before - //on write this is the same format as before - //on read the `readResolve` will convert to the NewRedBlackTree format - - // the Tree children must be AnyRef as during construction then are RedBlackTree.Tree - // due to the readResolve the tree is migrated to new format and the children will be converted to - // NewRedBlackTree as they are read - - @SerialVersionUID(7757490705548110898L) - sealed abstract class Tree[A, +B]( - @(inline@getter) final val key: A, - @(inline@getter) final val value: B, - @(inline@getter) final val left: AnyRef, - @(inline@getter) final val right: AnyRef) - extends Serializable { - private def _count(tree: AnyRef) = if (tree eq null) 0 else tree.asInstanceOf[Tree[A, B]].count - @(inline @getter) final val count: Int = 1 + _count(left) + _count(right) - } - - @SerialVersionUID(6516527240275040268L) - final class RedTree[A, +B](key: A, - value: B, - left: AnyRef, - right: AnyRef) extends Tree[A, B](key, value, left, right) { - @throws[IOException] - private[this] def readResolve(): AnyRef = - NewRedBlackTree.RedTree(key, value, - this.left.asInstanceOf[NewRedBlackTree.Tree[A, B]], - this.right.asInstanceOf[NewRedBlackTree.Tree[A, B]]) - - override def toString: String = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" - } - - @SerialVersionUID(-3666942709716265983L) - final class BlackTree[A, +B](key: A, - value: B, - left: AnyRef, - right: AnyRef) extends Tree[A, B](key, value, left, right) { - @throws[IOException] - private[this] def readResolve(): AnyRef = - NewRedBlackTree.BlackTree(key, value, - this.left.asInstanceOf[NewRedBlackTree.Tree[A, B]], - this.right.asInstanceOf[NewRedBlackTree.Tree[A, B]]) - - override def toString: String = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" - } - - def from[A, B](tree: NewRedBlackTree.Tree[A, B]): Tree[A, B] = { - if (tree eq null) null - else { - val left = from(tree.left) - val right = from(tree.right) - if (NewRedBlackTree.isBlack(tree)) - new BlackTree(tree.key, tree.value, left, right) - else - new RedTree(tree.key, tree.value, left, right) - } - } - -} \ No newline at end of file diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala deleted file mode 100644 index 097337cc3534..000000000000 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.io.{File, FileReader, Reader} -import scala.reflect.ClassTag - -/** The `PagedSeq` object defines a lazy implementations of - * a random access sequence. - * - * Provides utility methods that return instances of `PagedSeq[Char]`. - * `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq` - * @since 2.7 - */ -@deprecated("this object will be moved to the scala-parser-combinators module", "2.11.8") -object PagedSeq { - final val UndeterminedEnd = Int.MaxValue - - /** Constructs a paged sequence from an iterator */ - def fromIterator[T: ClassTag](source: Iterator[T]): PagedSeq[T] = - new PagedSeq[T]((data: Array[T], start: Int, len: Int) => { - var i = 0 - while (i < len && source.hasNext) { - data(start + i) = source.next() - i += 1 - } - if (i == 0) -1 else i - }) - - /** Constructs a paged sequence from an iterable */ - def fromIterable[T: ClassTag](source: Iterable[T]): PagedSeq[T] = - fromIterator(source.iterator) - - /** Constructs a paged character sequence from a string iterator */ - def fromStrings(source: Iterator[String]): PagedSeq[Char] = { - var current: String = "" - def more(data: Array[Char], start: Int, len: Int): Int = - if (current.length != 0) { - val cnt = current.length min len - current.getChars(0, cnt, data, start) - current = current.substring(cnt) - if (cnt == len) cnt - else (more(data, start + cnt, len - cnt) max 0) + cnt - } else if (source.hasNext) { - current = source.next() - more(data, start, len) - } else -1 - new PagedSeq(more(_: Array[Char], _: Int, _: Int)) - } - - /** Constructs a paged character sequence from a string iterable */ - def fromStrings(source: Iterable[String]): PagedSeq[Char] = - fromStrings(source.iterator) - - /** Constructs a paged character sequence from a line iterator - * Lines do not contain trailing `\n` characters; The method inserts - * a line separator `\n` between any two lines in the sequence. - */ - def fromLines(source: Iterator[String]): PagedSeq[Char] = { - var isFirst = true - fromStrings(source map { line => - if (isFirst) { - isFirst = false - line - } else "\n"+line - }) - } - - /** Constructs a paged character sequence from a line iterable - * Lines do not contain trailing `\n` characters; The method inserts - * a line separator `\n` between any two lines in the sequence. - */ - def fromLines(source: Iterable[String]): PagedSeq[Char] = - fromLines(source.iterator) - - /** Constructs a paged character sequence from an input reader - */ - def fromReader(source: Reader): PagedSeq[Char] = - new PagedSeq(source.read(_: Array[Char], _: Int, _: Int)) - - /** Constructs a paged character sequence from an input file - */ - def fromFile(source: File): PagedSeq[Char] = - fromReader(new FileReader(source)) - - /** Constructs a paged character sequence from a file with given name - */ - def fromFile(source: String): PagedSeq[Char] = - fromFile(new File(source)) - - /** Constructs a paged character sequence from a scala.io.Source value - */ - def fromSource(source: scala.io.Source) = - fromLines(source.getLines()) -} - - -import PagedSeq._ - -/** An implementation of lazily computed sequences, where elements are stored - * in "pages", i.e. arrays of fixed size. - * - * A paged sequence is constructed from a function that produces more elements when asked. - * The producer function - `more`, is similar to the read method in java.io.Reader. - * The `more` function takes three parameters: an array of elements, a start index, and an end index. - * It should try to fill the array between start and end indices (excluding end index). - * It returns the number of elements produced, or -1 if end of logical input stream was reached - * before reading any element. - * - * @tparam T the type of the elements contained in this paged sequence, with an `ClassTag` context bound. - * - * @author Martin Odersky - * @since 2.7 - * @define Coll `PagedSeq` - * @define coll paged sequence - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("this class will be moved to the scala-parser-combinators module", "2.11.8") -class PagedSeq[T: ClassTag] protected( - more: (Array[T], Int, Int) => Int, - first1: Page[T], - start: Int, - end: Int) -extends scala.collection.AbstractSeq[T] - with scala.collection.IndexedSeq[T] -{ - def this(more: (Array[T], Int, Int) => Int) = this(more, new Page[T](0), 0, UndeterminedEnd) - - private var current: Page[T] = first1 - - private def latest = first1.latest - - private def addMore() = latest.addMore(more) - - private def page(absindex: Int) = { - if (absindex < current.start) - current = first1 - while (absindex >= current.end && current.next != null) - current = current.next - while (absindex >= current.end && !current.isLast) { - current = addMore() - } - current - } - - /** The length of the paged sequence - * @note Calling this method will force the entire sequence to be read. - */ - def length: Int = { - while (!latest.isLast && latest.end < end) addMore() - (latest.end min end) - start - } - - /** The element at position `index`. - */ - def apply(index: Int) = - if (isDefinedAt(index)) page(index + start)(index + start) - else throw new IndexOutOfBoundsException(index.toString) - - /** Predicate method to check if an element is defined - * at position `index` of the current sequence. - * Unlike `length` this operation does not force reading - * a lazy sequence to the end. - */ - override def isDefinedAt(index: Int) = - index >= 0 && index < end - start && { - val absidx = index + start - absidx >= 0 && absidx < page(absidx).end - } - - /** The subsequence from index `start` up to `end -1` if `end` - * is lesser than the length of the current sequence and up to - * length of the sequence otherwise. This is limited up to the length - * of the current sequence if `end` is larger than its length. - */ - override def slice(_start: Int, _end: Int): PagedSeq[T] = { - page(start) - val s = start + _start - val e = if (_end == UndeterminedEnd) _end else start + _end - var f = first1 - while (f.end <= s && !f.isLast) { - if (f.next eq null) f = f.addMore(more) - else f = f.next - } - // Warning -- not refining `more` means that slices can freely request and obtain - // data outside of their slice. This is part of the design of PagedSeq - // (to read pages!) but can be surprising. - new PagedSeq(more, f, s, e) - } - - /** The subsequence from index `start` up to - * the length of the current sequence. - */ - def slice(start: Int): PagedSeq[T] = slice(start, UndeterminedEnd) - - /** Convert sequence to string */ - override def toString = { - val buf = new StringBuilder - for (ch <- PagedSeq.this.iterator) buf append ch - buf.toString - } -} - - -/** Page containing up to PageSize characters of the input sequence. - */ -private class Page[T: ClassTag](val num: Int) { - - private final val PageSize = 4096 - - /** The next page in the sequence */ - var next : Page[T] = null - - /** A later page in the sequence, serves a cache for pointing to last page */ - var later : Page[T] = this - - /** The number of elements read into this page */ - var filled: Int = 0 - - /** Set true if the current page is the last in the sequence or if - * the `more` function returned -1 signalling end of input. */ - var isLast: Boolean = false - - /** The element array */ - final val data = new Array[T](PageSize) - - /** The index of the first element in this page relative to the whole sequence */ - final def start = num * PageSize - - /** The index of the element following the last element in this page relative - * to the whole sequence */ - final def end = start + filled - - /** The last page as currently present in the sequence; This can change as more - * elements get appended to the sequence. */ - final def latest: Page[T] = { - if (later.next != null) later = later.next.latest - later - } - - /** The element at the given sequence index. - * That index is relative to the whole sequence, not the page. */ - def apply(index: Int) = { - if (index < start || index - start >= filled) throw new IndexOutOfBoundsException(index.toString) - data(index - start) - } - - /** Produces more elements by calling `more` and adds them on the current page, - * or fills a subsequent page if current page is full. - * @note If current page is full, it is the last one in the sequence. */ - final def addMore(more: (Array[T], Int, Int) => Int): Page[T] = - if (filled == PageSize) { - next = new Page[T](num + 1) - next.addMore(more) - } else { - val count = more(data, filled, PageSize - filled) - if (count < 0) isLast = true - else filled += count - this - } -} diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 20f0ed72cc2f..89def7096aea 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,71 +10,81 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package immutable -import generic._ -import mutable.{ Builder, ListBuffer } +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{Builder, ListBuffer} /** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. - * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the - * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. - * - * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case - * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, - * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. - * - * @author Erik Stenman - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] - * section on `Immutable Queues` for more information. - * - * @define Coll `immutable.Queue` - * @define coll immutable queue - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. + * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the + * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. + * + * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case + * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, + * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] + * section on `Immutable Queues` for more information. + * + * @define Coll `immutable.Queue` + * @define coll immutable queue + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ -@SerialVersionUID(-7622936493364270175L) sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Queue] - with LinearSeqLike[A, Queue[A]] - with Serializable { + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with DefaultSerializable { - override def companion: GenericCompanion[Queue] = Queue + override def iterableFactory: SeqFactory[Queue] = Queue /** Returns the `n`-th element of this queue. - * The first element is at position `0`. - * - * @param n index of the element to return - * @return the element at position `n` in this queue. - * @throws java.util.NoSuchElementException if the queue is too short. - */ + * The first element is at position `0`. + * + * @param n index of the element to return + * @return the element at position `n` in this queue. + * @throws NoSuchElementException if the queue is too short. + */ override def apply(n: Int): A = { - val olen = out.length - if (n < olen) out.apply(n) - else { - val m = n - olen - val ilen = in.length - if (m < ilen) in.apply(ilen - m - 1) - else throw new NoSuchElementException("index out of range") + def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) + + var index = 0 + var curr = out + + while (index < n && curr.nonEmpty) { + index += 1 + curr = curr.tail + } + + if (index == n) { + if (curr.nonEmpty) curr.head + else if (in.nonEmpty) in.last + else indexOutOfRange() + } else { + val indexFromBack = n - index + val inLength = in.length + if (indexFromBack >= inLength) indexOutOfRange() + else in(inLength - indexFromBack - 1) } } /** Returns the elements in the list as an iterator - */ - override def iterator: Iterator[A] = (out ::: in.reverse).iterator + */ + override def iterator: Iterator[A] = out.iterator.concat(in.reverse) /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ + * + * @return true, iff there is no element in the queue. + */ override def isEmpty: Boolean = in.isEmpty && out.isEmpty override def head: A = @@ -87,6 +97,11 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) else throw new NoSuchElementException("tail on empty queue") + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + /* This is made to avoid inefficient implementation of iterator. */ override def forall(p: A => Boolean): Boolean = in.forall(p) && out.forall(p) @@ -95,62 +110,63 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L override def exists(p: A => Boolean): Boolean = in.exists(p) || out.exists(p) - override def stringPrefix = "Queue" + override protected[this] def className = "Queue" - /** Returns the length of the queue. - */ - override def length = in.length + out.length + /** Returns the length of the queue. */ + override def length: Int = in.length + out.length - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match { - case _: Queue.GenericCanBuildFrom[_] => new Queue(in, elem :: out).asInstanceOf[That] - case _ => super.+:(elem)(bf) - } + override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Queue[A], B, That]): That = bf match { - case _: Queue.GenericCanBuildFrom[_] => enqueue(elem).asInstanceOf[That] - case _ => super.:+(elem)(bf) - } + override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Queue[A], B, That]): That = { - if (bf eq Queue.ReusableCBF) { - val newIn = - if (that.isInstanceOf[Queue[_]]) { - val thatQueue: Queue[B] = that.asInstanceOf[Queue[B]] - thatQueue.in ++ (thatQueue.out reverse_::: this.in) - } else { - val lb = new ListBuffer[B] - that.seq.foreach(_ +=: lb) - lb.prependToList(this.in) + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { + val newIn = that match { + case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) + case that: List[B] => that reverse_::: this.in + case _ => + var result: List[B] = this.in + val iter = that.iterator + while (iter.hasNext) { + result = iter.next() :: result } - new Queue[B](newIn, this.out).asInstanceOf[That] - } else { - super.++(that)(bf) + result } + if (newIn eq this.in) this else new Queue[B](newIn, this.out) } /** Creates a new queue with element added at the end - * of the old queue. - * - * @param elem the element to insert - */ - def enqueue[B >: A](elem: B) = new Queue(elem :: in, out) - - /** Returns a new queue with all elements provided by an `Iterable` object - * added at the end of the queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - def enqueue[B >: A](iter: Iterable[B]) = - new Queue(iter.toList reverse_::: in, out) + * of the old queue. + * + * @param elem the element to insert + */ + def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") + @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) /** Returns a tuple with the first element in the queue, * and a new queue with this element removed. * - * @throws java.util.NoSuchElementException * @return the first element of the queue. + * @throws NoSuchElementException if the queue is empty */ def dequeue: (A, Queue[A]) = out match { case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) @@ -159,34 +175,42 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L } /** Optionally retrieves the first element and a queue of the remaining elements. - * - * @return A tuple of the first element of the queue, and a new queue with this element removed. - * If the queue is empty, `None` is returned. - */ + * + * @return A tuple of the first element of the queue, and a new queue with this element removed. + * If the queue is empty, `None` is returned. + */ def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) /** Returns the first element in the queue, or throws an error if there * is no element contained in the queue. * - * @throws java.util.NoSuchElementException + * @throws NoSuchElementException if the queue is empty * @return the first element. */ def front: A = head /** Returns a string representation of this queue. - */ - override def toString() = mkString("Queue(", ", ", ")") + */ + override def toString(): String = mkString("Queue(", ", ", ")") } /** $factoryInfo - * @define Coll `immutable.Queue` - * @define coll immutable queue - */ -object Queue extends SeqFactory[Queue] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList)) - override def empty[A]: Queue[A] = EmptyQueue.asInstanceOf[Queue[A]] + * @define Coll `immutable.Queue` + * @define coll immutable queue + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + + def from[A](source: IterableOnce[A]): Queue[A] = source match { + case q: Queue[A] => q + case _ => + val list = List.from(source) + if (list.isEmpty) empty + else new Queue(Nil, list) + } + + def empty[A]: Queue[A] = EmptyQueue override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 2d777b528761..9a35153ace64 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,11 @@ package scala package collection.immutable -import scala.collection.parallel.immutable.ParRange +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.RangeStepper +import scala.collection.generic.CommonErrors +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.util.hashing.MurmurHash3 /** The `Range` class represents integer values in range * ''[start;end)'' with non-zero step value `step`. @@ -27,63 +31,72 @@ import scala.collection.parallel.immutable.ParRange * }}} * * Ranges that contain more than `Int.MaxValue` elements can be created, but - * these overfull ranges have only limited capabilities. Any method that + * these overfull ranges have only limited capabilities. Any method that * could require a collection of over `Int.MaxValue` length to be created, or * could be asked to index beyond `Int.MaxValue` elements will throw an - * exception. Overfull ranges can safely be reduced in size by changing - * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, + * exception. Overfull ranges can safely be reduced in size by changing + * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, * `equals`, and access to the ends of the range (`head`, `last`, `tail`, * `init`) are also permitted on overfull ranges. * - * @param start the start of this range. - * @param end the end of the range. For exclusive ranges, e.g. - * `Range(0,3)` or `(0 until 3)`, this is one - * step past the last one in the range. For inclusive - * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, - * it may be in the range if it is not skipped by the step size. - * To find the last element inside a non-empty range, - use `last` instead. - * @param step the step for the range. - * - * @author Martin Odersky - * @author Paul Phillips - * @since 2.5 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#ranges "Scala's Collection Library overview"]] - * section on `Ranges` for more information. + * @param start the start of this range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + * use `last` instead. + * @param step the step for the range. * * @define coll range + * @define ccoll indexed sequence * @define mayNotTerminateInf * @define willNotTerminateInf * @define doesNotUseBuilders * '''Note:''' this method does not use builders to construct a new range, * and its complexity is O(1). */ -@SerialVersionUID(7618862778670199309L) -sealed class Range(val start: Int, val end: Int, val step: Int) -extends scala.collection.AbstractSeq[Int] - with IndexedSeq[Int] - with scala.collection.CustomParallelizable[Int, ParRange] - with Serializable -{ - override def par = new ParRange(this) - - private def gap = end.toLong - start.toLong - private def isExact = gap % step == 0 - private def hasStub = isInclusive || !isExact - private def longLength = gap / step + ( if (hasStub) 1 else 0 ) +@SerialVersionUID(3L) +sealed abstract class Range( + val start: Int, + val end: Int, + val step: Int +) + extends AbstractSeq[Int] + with IndexedSeq[Int] + with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with IterableFactoryDefaults[Int, IndexedSeq] + with Serializable { range => + + final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) + + override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new RangeStepper(start, step, 0, length) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } - // Check cannot be evaluated eagerly because we have a pattern where - // ranges are constructed like: "x to y by z" The "x to y" piece - // should not trigger an exception. So the calculation is delayed, - // which means it will not fail fast for those cases where failing was - // correct. - override final val isEmpty = ( - (start > end && step > 0) - || (start < end && step < 0) - || (start == end && !isInclusive) - ) + private[this] def gap = end.toLong - start.toLong + private[this] def isExact = gap % step == 0 + private[this] def hasStub = isInclusive || !isExact + private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) - private val numRangeElements: Int = { + def isInclusive: Boolean + + final override val isEmpty: Boolean = ( + (start > end && step > 0) + || (start < end && step < 0) + || (start == end && !isInclusive) + ) + + private[this] val numRangeElements: Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") else if (isEmpty) 0 else { @@ -93,8 +106,10 @@ extends scala.collection.AbstractSeq[Int] } } + final def length = if (numRangeElements < 0) fail() else numRangeElements + // This field has a sensible value only for non-empty ranges - private val lastElement = step match { + private[this] val lastElement = step match { case 1 => if (isInclusive) end else end-1 case -1 => if (isInclusive) end else end+1 case _ => @@ -105,51 +120,71 @@ extends scala.collection.AbstractSeq[Int] } /** The last element of this range. This method will return the correct value - * even if there are too many elements to iterate over. - */ - override def last = if (isEmpty) Nil.last else lastElement - override def head = if (isEmpty) Nil.head else start - - override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) head - else last - } else super.min(ord) + * even if there are too many elements to iterate over. + */ + final override def last: Int = + if (isEmpty) throw Range.emptyRangeError("last") else lastElement + final override def head: Int = + if (isEmpty) throw Range.emptyRangeError("head") else start - override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) last - else head - } else super.max(ord) + /** Creates a new range containing all the elements of this range except the last one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the last one. + */ + final override def init: Range = + if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) - protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step) + /** Creates a new range containing all the elements of this range except the first one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the first one. + */ + final override def tail: Range = { + if (isEmpty) throw Range.emptyRangeError("tail") + if (numRangeElements == 1) newEmptyRange(end) + else if(isInclusive) new Range.Inclusive(start + step, end, step) + else new Range.Exclusive(start + step, end, step) + } - /** Create a new range with the `start` and `end` values of this range and - * a new `step`. - * - * @return a new range with a different step - */ - def by(step: Int): Range = copy(start, end, step) + override def map[B](f: Int => B): IndexedSeq[B] = { + validateMaxLength() + super.map(f) + } - def isInclusive = false + final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = + if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) - override def size = length - override def length = if (numRangeElements < 0) fail() else numRangeElements + /** Create a new range with the `start` and `end` values of this range and + * a new `step`. + * + * @return a new range with a different step + */ + final def by(step: Int): Range = copy(start, end, step) - private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) - private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") - private def validateMaxLength() { + // Check cannot be evaluated eagerly because we have a pattern where + // ranges are constructed like: "x to y by z" The "x to y" piece + // should not trigger an exception. So the calculation is delayed, + // which means it will not fail fast for those cases where failing was + // correct. + private[this] def validateMaxLength(): Unit = { if (numRangeElements < 0) fail() } + private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) + private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") + @throws[IndexOutOfBoundsException] final def apply(idx: Int): Int = { validateMaxLength() - if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString) + if (idx < 0 || idx >= numRangeElements) + throw CommonErrors.indexOutOfBounds(index = idx, max = numRangeElements - 1) else start + (step * idx) } - @inline final override def foreach[@specialized(Unit) U](f: Int => U) { + /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { // Implementation chosen on the basis of favorable microbenchmarks // Note--initialization catches step == 0 so we don't need to here if (!isEmpty) { @@ -162,14 +197,44 @@ extends scala.collection.AbstractSeq[Int] } } + override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos >= from) pos else -1 + case _ => super.indexOf(elem, from) + } + + override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos <= end) pos else -1 + case _ => super.lastIndexOf(elem, end) + } + + private[this] def posOf(i: Int): Int = + if (contains(i)) (i - start) / step else -1 + + override def sameElements[B >: Int](that: IterableOnce[B]): Boolean = that match { + case other: Range => + (this.length : @annotation.switch) match { + case 0 => other.isEmpty + case 1 => other.length == 1 && this.start == other.start + case n => other.length == n && ( + (this.start == other.start) + && (this.step == other.step) + ) + } + case _ => super.sameElements(that) + } + /** Creates a new range containing the first `n` elements of this range. - * - * $doesNotUseBuilders - * - * @param n the number of elements to take. - * @return a new range consisting of `n` first elements. - */ - final override def take(n: Int): Range = ( + * + * @param n the number of elements to take. + * @return a new range consisting of `n` first elements. + */ + final override def take(n: Int): Range = if (n <= 0 || isEmpty) newEmptyRange(start) else if (n >= numRangeElements && numRangeElements >= 0) this else { @@ -177,16 +242,13 @@ extends scala.collection.AbstractSeq[Int] // but the logic is the same either way: take the first n new Range.Inclusive(start, locationAfterN(n - 1), step) } - ) /** Creates a new range containing all the elements of this range except the first `n` elements. - * - * $doesNotUseBuilders - * - * @param n the number of elements to drop. - * @return a new range consisting of all the elements of this range except `n` first elements. - */ - final override def drop(n: Int): Range = ( + * + * @param n the number of elements to drop. + * @return a new range consisting of all the elements of this range except `n` first elements. + */ + final override def drop(n: Int): Range = if (n <= 0 || isEmpty) this else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) else { @@ -194,53 +256,40 @@ extends scala.collection.AbstractSeq[Int] // but the logic is the same either way: go forwards n steps, keep the rest copy(locationAfterN(n), end, step) } - ) - /** Creates a new range containing the elements starting at `from` up to but not including `until`. - * - * $doesNotUseBuilders - * - * @param from the element at which to start - * @param until the element at which to end (not included in the range) - * @return a new range consisting of a contiguous interval of values in the old range - */ - override def slice(from: Int, until: Int): Range = - if (from <= 0) take(until) - else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + /** Creates a new range consisting of the last `n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def takeRight(n: Int): Range = { + if (n <= 0) newEmptyRange(start) + else if (numRangeElements >= 0) drop(numRangeElements - n) else { - val fromValue = locationAfterN(from) - if (from >= until) newEmptyRange(fromValue) - else new Range.Inclusive(fromValue, locationAfterN(until-1), step) + // Need to handle over-full range separately + val y = last + val x = y - step.toLong*(n-1) + if ((step > 0 && x < start) || (step < 0 && x > start)) this + else Range.inclusive(x.toInt, y, step) } - - /** Creates a new range containing all the elements of this range except the last one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the last one. - */ - final override def init: Range = { - if (isEmpty) - Nil.init - - dropRight(1) } - /** Creates a new range containing all the elements of this range except the first one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the first one. - */ - final override def tail: Range = { - if (isEmpty) - Nil.tail - - drop(1) + /** Creates a new range consisting of the initial `length - n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def dropRight(n: Int): Range = { + if (n <= 0) this + else if (numRangeElements >= 0) take(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last - step.toInt*n + if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) + else Range.inclusive(start, y.toInt, step) + } } // Advance from the start while we meet the given test - private def argTakeWhile(p: Int => Boolean): Long = { + private[this] def argTakeWhile(p: Int => Boolean): Long = { if (isEmpty) start else { var current = start @@ -250,15 +299,6 @@ extends scala.collection.AbstractSeq[Int] else current.toLong + step } } - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private def locationAfterN(n: Int) = start + (step * n) - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private def newEmptyRange(value: Int) = new Range(value, value, step) final override def takeWhile(p: Int => Boolean): Range = { val stop = argTakeWhile(p) @@ -266,98 +306,95 @@ extends scala.collection.AbstractSeq[Int] else { val x = (stop - step).toInt if (x == last) this - else new Range.Inclusive(start, x, step) + else Range.inclusive(start, x, step) } } + final override def dropWhile(p: Int => Boolean): Range = { val stop = argTakeWhile(p) if (stop == start) this else { val x = (stop - step).toInt if (x == last) newEmptyRange(last) - else new Range.Inclusive(x + step, last, step) + else Range.inclusive(x + step, last, step) } } + final override def span(p: Int => Boolean): (Range, Range) = { val border = argTakeWhile(p) if (border == start) (newEmptyRange(start), this) else { val x = (border - step).toInt if (x == last) (this, newEmptyRange(last)) - else (new Range.Inclusive(start, x, step), new Range.Inclusive(x+step, last, step)) + else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) } } - /** Creates a pair of new ranges, first consisting of elements before `n`, and the second - * of elements after `n`. - * - * $doesNotUseBuilders - */ - final override def splitAt(n: Int) = (take(n), drop(n)) - - /** Creates a new range consisting of the last `n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def takeRight(n: Int): Range = { - if (n <= 0) newEmptyRange(start) - else if (numRangeElements >= 0) drop(numRangeElements - n) + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + final override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) else { - // Need to handle over-full range separately - val y = last - val x = y - step.toLong*(n-1) - if ((step > 0 && x < start) || (step < 0 && x > start)) this - else new Range.Inclusive(x.toInt, y, step) + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else Range.inclusive(fromValue, locationAfterN(until-1), step) } - } - /** Creates a new range consisting of the initial `length - n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def dropRight(n: Int): Range = { - if (n <= 0) this - else if (numRangeElements >= 0) take(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - step.toInt*n - if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) - else new Range.Inclusive(start, y.toInt, step) - } - } + // Overridden only to refine the return type + final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) + + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private[this] def locationAfterN(n: Int) = start + (step * n) + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) /** Returns the reverse of this range. - * - * $doesNotUseBuilders - */ + */ final override def reverse: Range = if (isEmpty) this else new Range.Inclusive(last, start, -step) /** Make range inclusive. - */ - def inclusive = + */ + final def inclusive: Range = if (isInclusive) this else new Range.Inclusive(start, end, step) - final def contains(x: Int) = { - if (x==end && !isInclusive) false + final def contains(x: Int): Boolean = { + if (x == end && !isInclusive) false else if (step > 0) { if (x < start || x > end) false - else (step == 1) || (((x - start) % step) == 0) + else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) } else { if (x < end || x > start) false - else (step == -1) || (((x - start) % step) == 0) + else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) } } + /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ + override final def contains[B >: Int](elem: B): Boolean = elem match { + case i: Int => this.contains(i) + case _ => super.contains(elem) + } final override def sum[B >: Int](implicit num: Numeric[B]): Int = { if (num eq scala.math.Numeric.IntIsIntegral) { // this is normal integer range with usual addition. arithmetic series formula can be used if (isEmpty) 0 - else if (numRangeElements == 1) head - else ((numRangeElements * (head.toLong + last)) / 2).toInt + else if (size == 1) head + else ((size * (head.toLong + last)) / 2).toInt } else { // user provided custom Numeric, we cannot rely on arithmetic series formula if (isEmpty) num.toInt(num.zero) @@ -374,54 +411,135 @@ extends scala.collection.AbstractSeq[Int] } } - override def toIterable = this + final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) head + else last + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) last + else head + } else super.min(ord) - override def toSeq = this + final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) last + else head + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) head + else last + } else super.max(ord) + + override def tails: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.drop(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } - override def equals(other: Any) = other match { + override def inits: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.dropRight(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + final override def equals(other: Any): Boolean = other match { case x: Range => // Note: this must succeed for overfull ranges (length > Int.MaxValue) - (x canEqual this) && { - if (isEmpty) x.isEmpty // empty sequences are equal - else // this is non-empty... - x.nonEmpty && start == x.start && { // ...so other must contain something and have same start - val l0 = last - (l0 == x.last && ( // And same end - start == l0 || step == x.step // And either the same step, or not take any steps - )) - } - } + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } case _ => super.equals(other) } - /* Note: hashCode can't be overridden without breaking Seq's equals contract. */ + final override def hashCode: Int = + if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) + else super.hashCode - override def toString = { + final override def toString: String = { val preposition = if (isInclusive) "to" else "until" val stepped = if (step == 1) "" else s" by $step" val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" s"${prefix}Range $start $preposition $end$stepped" } + + override protected[this] def className = "Range" + + override def distinct: Range = this + + override def grouped(size: Int): Iterator[Range] = { + require(size >= 1, f"size=$size%d, but size must be positive") + if (isEmpty) { + Iterator.empty + } else { + val s = size + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = Range.this.length > i + override def next() = + if (hasNext) { + val x = Range.this.slice(i, i + s) + i += s + x + } else { + Iterator.empty.next() + } + } + } + } + + override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = + if (ord eq Ordering.Int) { + if (step > 0) { + this + } else { + reverse + } + } else { + super.sorted(ord) + } } -/** A companion object for the `Range` class. - */ +/** Companion object for ranges. */ object Range { + /** Counts the number of range elements. - * @pre step != 0 - * If the size of the range exceeds Int.MaxValue, the - * result will be negative. - */ + * precondition: step != 0 + * If the size of the range exceeds Int.MaxValue, the + * result will be negative. + */ def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") - val isEmpty = ( + val isEmpty = if (start == end) !isInclusive else if (start < end) step < 0 else step > 0 - ) + if (isEmpty) 0 else { // Counts with Longs so we can recognize too-large ranges. @@ -439,40 +557,43 @@ object Range { def count(start: Int, end: Int, step: Int): Int = count(start, end, step, isInclusive = false) - @SerialVersionUID(4237131469519710909L) - final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { -// override def par = new ParRange(this) - override def isInclusive = true - override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step) - } - /** Make a range from `start` until `end` (exclusive) with given step value. - * @note step != 0 - */ - def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step) + * @note step != 0 + */ + def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) /** Make a range from `start` until `end` (exclusive) with step value 1. - */ - def apply(start: Int, end: Int): Range = new Range(start, end, 1) + */ + def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) /** Make an inclusive range from `start` to `end` with given step value. - * @note step != 0 - */ - def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step) + * @note step != 0 + */ + def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) /** Make an inclusive range from `start` to `end` with step value 1. - */ - def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1) + */ + def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) + + @SerialVersionUID(3L) + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = true + } + + @SerialVersionUID(3L) + final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = false + } // BigInt and Long are straightforward generic ranges. object BigInt { - def apply(start: BigInt, end: BigInt, step: BigInt) = NumericRange(start, end, step) - def inclusive(start: BigInt, end: BigInt, step: BigInt) = NumericRange.inclusive(start, end, step) + def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) + def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) } object Long { - def apply(start: Long, end: Long, step: Long) = NumericRange(start, end, step) - def inclusive(start: Long, end: Long, step: Long) = NumericRange.inclusive(start, end, step) + def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) + def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) } // BigDecimal uses an alternative implementation of Numeric in which @@ -481,34 +602,14 @@ object Range { // imprecision or surprises might result from anything, although this may // not yet be fully implemented. object BigDecimal { - implicit val bigDecAsIntegral = scala.math.Numeric.BigDecimalAsIfIntegral + implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral - def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal) = + def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = NumericRange(start, end, step) - def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal) = + def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = NumericRange.inclusive(start, end, step) } - // Double works by using a BigDecimal under the hood for precise - // stepping, but mapping the sequence values back to doubles with - // .doubleValue. This constructs the BigDecimals by way of the - // String constructor (valueOf) instead of the Double one, which - // is necessary to keep 0.3d at 0.3 as opposed to - // 0.299999999999999988897769753748434595763683319091796875 or so. - object Double { - implicit val bigDecAsIntegral = scala.math.Numeric.BigDecimalAsIfIntegral - implicit val doubleAsIntegral = scala.math.Numeric.DoubleAsIfIntegral - def toBD(x: Double): BigDecimal = scala.math.BigDecimal valueOf x - - @deprecated("use Range.BigDecimal instead", "2.12.6") - def apply(start: Double, end: Double, step: Double) = - BigDecimal(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) - - @deprecated("use Range.BigDecimal.inclusive instead", "2.12.6") - def inclusive(start: Double, end: Double, step: Double) = - BigDecimal.inclusive(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) - } - // As there is no appealing default step size for not-really-integral ranges, // we offer a partially constructed object. class Partial[T, U](private val f: T => U) extends AnyVal { @@ -521,7 +622,50 @@ object Range { // indefinitely, for performance and because the compiler seems to bootstrap // off it and won't do so with our parameterized version without modifications. object Int { - def apply(start: Int, end: Int, step: Int) = NumericRange(start, end, step) - def inclusive(start: Int, end: Int, step: Int) = NumericRange.inclusive(start, end, step) + def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) + def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) + } + + private def emptyRangeError(what: String): Throwable = + new NoSuchElementException(what + " on empty Range") +} + +/** + * @param lastElement The last element included in the Range + * @param initiallyEmpty Whether the Range was initially empty or not + */ +@SerialVersionUID(3L) +private class RangeIterator( + start: Int, + step: Int, + lastElement: Int, + initiallyEmpty: Boolean +) extends AbstractIterator[Int] with Serializable { + private[this] var _hasNext: Boolean = !initiallyEmpty + private[this] var _next: Int = start + override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 + def hasNext: Boolean = _hasNext + @throws[NoSuchElementException] + def next(): Int = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = value + step + value + } + + override def drop(n: Int): Iterator[Int] = { + if (n > 0) { + val longPos = _next.toLong + step * n + if (step > 0) { + _next = Math.min(lastElement, longPos).toInt + _hasNext = longPos <= lastElement + } + else if (step < 0) { + _next = Math.max(lastElement, longPos).toInt + _hasNext = longPos >= lastElement + } + } + this } } diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index effbb86db29b..33f7d9ceb7e2 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,6 +16,7 @@ package immutable import scala.annotation.meta.{getter, setter} import scala.annotation.tailrec +import scala.runtime.Statics.releaseFence /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * @@ -24,7 +25,7 @@ import scala.annotation.tailrec * easily be used. The API represented by the RedBlackTree object tries to hide these * optimizations behind a reasonably clean API. */ -private[collection] object NewRedBlackTree { +private[collection] object RedBlackTree { def validate[A](tree: Tree[A, _])(implicit ordering: Ordering[A]): tree.type = { def impl(tree: Tree[A, _], keyProp: A => Boolean): Int = { assert(keyProp(tree.key), s"key check failed: $tree") @@ -45,8 +46,8 @@ private[collection] object NewRedBlackTree { def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { - case null => None - case tree => Some(tree.value) + case null => None + case found => Some(found.value) } @tailrec @@ -61,13 +62,13 @@ private[collection] object NewRedBlackTree { if (tree eq null) tree else if (tree.isMutable) { val res = tree.mutableBlack.makeImmutable - VM.releaseFence() + releaseFence() res } else tree.black } /** Create a new balanced tree where `newLeft` replaces `tree.left`. * tree and newLeft are never null */ - protected[this] final def mutableBalanceLeft[A, B, B1 >: B](tree: Tree[A, B], newLeft: Tree[A, B1]): Tree[A, B1] = { + protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { // Parameter trees // tree | newLeft // -- KV R | nl.L nl.KV nl.R @@ -110,7 +111,7 @@ private[collection] object NewRedBlackTree { } /** Create a new balanced tree where `newRight` replaces `tree.right`. * tree and newRight are never null */ - protected[this] final def mutableBalanceRight[A, B, B1 >: B](tree: Tree[A, B], newRight: Tree[A, B1]): Tree[A, B1] = { + protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { // Parameter trees // tree | newRight // L KV -- | nr.L nr.KV nr.R @@ -166,10 +167,6 @@ private[collection] object NewRedBlackTree { mutableBalanceLeft(tree, mutableUpd(tree.left, k)) else if (cmp > 0) mutableBalanceRight(tree, mutableUpd(tree.right, k)) - else if (k != tree.key) - tree.mutableWithK(k) - //Note - in 2.13 remove the above else clause - // due to the different handling of key equality else tree } } @@ -185,10 +182,6 @@ private[collection] object NewRedBlackTree { mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) else if (cmp > 0) mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) - else if (k != tree.key) - tree.mutableWithKV(k,v) - //Note - in 2.13 remove the above else clause - // due to the different handling of key equality else tree.mutableWithV(v) } } @@ -249,8 +242,8 @@ private[collection] object NewRedBlackTree { } /** - * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. - */ + * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. + */ def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp == 0) tree @@ -261,8 +254,8 @@ private[collection] object NewRedBlackTree { } /** - * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. - */ + * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. + */ def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp <= 0) maxBefore(tree.left, x) @@ -439,11 +432,8 @@ private[collection] object NewRedBlackTree { balanceLeft(tree, upd(tree.left, k, v, overwrite)) else if (cmp > 0) balanceRight(tree, upd(tree.right, k, v, overwrite)) - else if (overwrite || k != tree.key) - tree.withKV(k, v) - //Note - in 2.13 this is - // else if (overwrite) tree.withV(v) - //due to the changes in handling of keys + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) + tree.withV(v) else tree } private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { @@ -563,15 +553,15 @@ private[collection] object NewRedBlackTree { * */ private[immutable] final class Tree[A, +B]( - @(inline @getter @setter) private var _key: A, - @(inline @getter @setter) private var _value: AnyRef, - @(inline @getter @setter) private var _left: Tree[A, _], - @(inline @getter @setter) private var _right: Tree[A, _], - @(inline @getter @setter) private var _count: Int) + @(`inline` @getter @setter) private var _key: A, + @(`inline` @getter @setter) private var _value: AnyRef, + @(`inline` @getter @setter) private var _left: Tree[A, _], + @(`inline` @getter @setter) private var _right: Tree[A, _], + @(`inline` @getter @setter) private var _count: Int) { - @`inline` private[NewRedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 + @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 // read only APIs - @`inline` private[NewRedBlackTree] final def count = { + @`inline` private[RedBlackTree] final def count = { //devTimeAssert((_count & 0x7FFFFFFF) != 0) _count & colourMask } @@ -579,12 +569,10 @@ private[collection] object NewRedBlackTree { @`inline` private def mutableRetainingColour = _count & colourBit //inlined here to avoid outer object null checks - @`inline` private[NewRedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count @`inline` private[immutable] final def key = _key @`inline` private[immutable] final def value = _value.asInstanceOf[B] - //Note - in 2.13 this should be private[RedBlackTree] as its only needed or the 2.12 Old RedBlackTree @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] - //Note - in 2.13 this should be private[RedBlackTree] as its only needed or the 2.12 Old RedBlackTree @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] //Note - only used in tests outside RedBlackTree @`inline` private[immutable] final def isBlack = _count < 0 @@ -594,8 +582,8 @@ private[collection] object NewRedBlackTree { override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" //mutable APIs - private[NewRedBlackTree] def makeImmutable: Tree[A, B] = { - def makeImmutableImpl() = { + private[RedBlackTree] def makeImmutable: this.type = { + def makeImmutableImpl(): Unit = { if (isMutable) { var size = 1 if (_left ne null) { @@ -608,13 +596,12 @@ private[collection] object NewRedBlackTree { } _count |= size //retains colour } - this } makeImmutableImpl() this } - private[NewRedBlackTree] def mutableBlack: Tree[A, B] = { + private[RedBlackTree] def mutableBlack: Tree[A, B] = { if (isBlack) this else if (isMutable) { _count = initialBlackCount @@ -622,7 +609,7 @@ private[collection] object NewRedBlackTree { } else new Tree(_key, _value, _left, _right, initialBlackCount) } -// private[NewRedBlackTree] def mutableRed: Tree[A, B] = { +// private[RedBlackTree] def mutableRed: Tree[A, B] = { // if (isRed) this // else if (mutable) { // _count = initialRedCount @@ -630,48 +617,30 @@ private[collection] object NewRedBlackTree { // } // else new Tree(_key, _value, _left, _right, initialRedCount) // } - //Note - in 2.13 remove his method - //due to the handling of keys in 2.13 we never replace a key - private[NewRedBlackTree] def mutableWithK[B1 >: B](newKey: A): Tree[A, B1] = { - if (newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) this - else if (isMutable) { - _key = newKey - this - } else new Tree(newKey, _value.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) - } - private[NewRedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { + + private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this else if (isMutable) { _value = newValue.asInstanceOf[AnyRef] this } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) } - //Note - in 2.13 remove his method - //due to the handling of keys in 2.13 we never replace a key - private[NewRedBlackTree] def mutableWithKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { - if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && - (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this - else if (isMutable) { - _key = newKey - _value = newValue.asInstanceOf[AnyRef] - this - } else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) - } - private[NewRedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + + private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { if (_left eq newLeft) this else if (isMutable) { _left = newLeft this } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) } - private[NewRedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { if (_right eq newRight) this else if (isMutable) { _right = newRight this } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) } - private[NewRedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { if ((_left eq newLeft) && (_right eq newRight)) this else if (isMutable) { _left = newLeft @@ -679,7 +648,7 @@ private[collection] object NewRedBlackTree { this } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) } - private[NewRedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { if ((_left eq newLeft) && isBlack) this else if (isMutable) { _count = initialBlackCount @@ -687,7 +656,7 @@ private[collection] object NewRedBlackTree { this } else new Tree(_key, _value, newLeft, _right, initialBlackCount) } - private[NewRedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { if ((_right eq newRight) && isBlack) this else if (isMutable) { _count = initialBlackCount @@ -696,29 +665,29 @@ private[collection] object NewRedBlackTree { } else new Tree(_key, _value, _left, newRight, initialBlackCount) } - private[NewRedBlackTree] def black: Tree[A, B] = { + private[RedBlackTree] def black: Tree[A, B] = { //assertNotMutable(this) if (isBlack) this else new Tree(_key, _value, _left, _right, _count ^ colourBit) } - private[NewRedBlackTree] def red: Tree[A, B] = { + private[RedBlackTree] def red: Tree[A, B] = { //assertNotMutable(this) if (isRed) this else new Tree(_key, _value, _left, _right, _count ^ colourBit) } - private[NewRedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { + private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { //assertNotMutable(this) if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) } - private[NewRedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { + private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { //assertNotMutable(this) if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) } - private[NewRedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) if (newLeft eq _left) this @@ -727,7 +696,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) } } - private[NewRedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newRight) if (newRight eq _right) this @@ -736,7 +705,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) } } - private[NewRedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) if ((newLeft eq _left) && isBlack) this @@ -745,7 +714,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) } } - private[NewRedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) if ((newLeft eq _left) && isRed) this @@ -754,7 +723,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) } } - private[NewRedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newRight) if ((newRight eq _right) && isBlack) this @@ -763,7 +732,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) } } - private[NewRedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) if ((newRight eq _right) && isRed) this @@ -772,7 +741,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) } } - private[NewRedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) //assertNotMutable(newRight) @@ -782,7 +751,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) } } - private[NewRedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) //assertNotMutable(newRight) @@ -792,7 +761,7 @@ private[collection] object NewRedBlackTree { new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) } } - private[NewRedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { //assertNotMutable(this) //assertNotMutable(newLeft) //assertNotMutable(newRight) @@ -805,26 +774,23 @@ private[collection] object NewRedBlackTree { } //see #Tree docs "Colour, mutablity and size encoding" //we make these final vals because the optimiser inlines them, without reference to the enclosing module - private[NewRedBlackTree] final val colourBit = 0x80000000 - //really its ~colourBit but that doesnt get inlined - private[NewRedBlackTree] final val colourMask = colourBit - 1 - private[NewRedBlackTree] final val initialBlackCount = colourBit - private[NewRedBlackTree] final val initialRedCount = 0 + private[RedBlackTree] final val colourBit = 0x80000000 + private[RedBlackTree] final val colourMask = ~colourBit + private[RedBlackTree] final val initialBlackCount = colourBit + private[RedBlackTree] final val initialRedCount = 0 - @`inline` private[NewRedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) - @`inline` private[NewRedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) /** create a new immutable red tree. * left and right may be null */ - //Note - in 2.13 this should be private[RedBlackTree] as its only needed or the 2.12 Old RedBlackTree private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { //assertNotMutable(left) //assertNotMutable(right) val size = sizeOf(left) + sizeOf(right) + 1 new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) } - //Note - in 2.13 this should be private[RedBlackTree] as its only needed or the 2.12 Old RedBlackTree private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { //assertNotMutable(left) //assertNotMutable(right) @@ -841,7 +807,7 @@ private[collection] object NewRedBlackTree { // private def assertNotMutable(t:Tree[_,_]) = { // devTimeAssert ((t eq null) || t.count > 0) // } - private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends Iterator[R] { + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { protected[this] def nextResult(tree: Tree[A, B]): R override def hasNext: Boolean = lookahead ne null @@ -872,7 +838,7 @@ private[collection] object NewRedBlackTree { protected[this] val stackOfNexts = if (root eq null) null else { /* - * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. * * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) @@ -884,14 +850,14 @@ private[collection] object NewRedBlackTree { new Array[Tree[A, B]](maximumHeight) } private[this] var index = 0 - protected var lookahead: Tree[A, B] = if (start isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) /** - * Find the leftmost subtree whose key is equal to the given key, or if no such thing, - * the leftmost subtree with the key that would be "next" after it according - * to the ordering. Along the way build up the iterator's path stack so that "next" - * functionality works. - */ + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { @tailrec def find(tree: Tree[A, B]): Tree[A, B] = if (tree eq null) popNext() @@ -911,7 +877,7 @@ private[collection] object NewRedBlackTree { } private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { - override def nextResult(tree: Tree[A, B]) = ??? + override def nextResult(tree: Tree[A, B]): Nothing = ??? def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { var equal = true @@ -970,20 +936,34 @@ private[collection] object NewRedBlackTree { override def nextResult(tree: Tree[A, B]) = tree.value } - /** Build a Tree from an ordered sequence of key and values */ - def fromOrderedEntries[A, B](keys: Iterator[A], values: Iterator[B], size: Int): Tree[A, B] = { + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, Null] = size match { + case 0 => null + case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(x, null, left, right) + } + f(1, size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes def f(level: Int, size: Int): Tree[A, B] = size match { case 0 => null case 1 => - val k = keys.next() - val v = values.next() + val (k, v) = xs.next() mkTree(level != maxUsedDepth || level == 1, k, v, null, null) case n => val leftSize = (size-1)/2 val left = f(level+1, leftSize) - val k = keys.next() - val v = values.next() + val (k, v) = xs.next() val right = f(level+1, size-1-leftSize) BlackTree(k, v, left, right) } @@ -1001,8 +981,8 @@ private[collection] object NewRedBlackTree { val v2 = f(k, v) val r2 = transform(r, f) if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) - && (l2 eq l) - && (r2 eq r)) t.asInstanceOf[Tree[A, C]] + && (l2 eq l) + && (r2 eq r)) t.asInstanceOf[Tree[A, C]] else mkTree(t.isBlack, k, v2, l2, r2) } @@ -1024,7 +1004,7 @@ private[collection] object NewRedBlackTree { private[this] val null2 = (null, null) - def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = { + def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { if (t eq null) null2 else { object partitioner { @@ -1065,7 +1045,7 @@ private[collection] object NewRedBlackTree { } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // Constructing Red-Black Trees, Ralf Hinze: [[http://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { @@ -1256,7 +1236,7 @@ private[collection] object NewRedBlackTree { if((t1 eq null) || (t2 eq null)) t1 else if (t1 eq t2) null else { - val (l1, _, r1, k1) = split(t1, t2.key) + val (l1, _, r1, _) = split(t1, t2.key) val tl = _difference(l1, t2.left) val tr = _difference(r1, t2.right) join2(tl, tr) diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala index 4f68edec7529..81a40c1c375b 100644 --- a/src/library/scala/collection/immutable/Seq.scala +++ b/src/library/scala/collection/immutable/Seq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,36 +14,142 @@ package scala package collection package immutable -import generic._ -import mutable.Builder -import parallel.immutable.ParSeq - -/** A subtrait of `collection.Seq` which represents sequences - * that are guaranteed immutable. - * - * $seqInfo - * @define Coll `immutable.Seq` - * @define coll immutable sequence - */ trait Seq[+A] extends Iterable[A] -// with GenSeq[A] - with scala.collection.Seq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] - with Parallelizable[A, ParSeq[A]] -{ - override def companion: GenericCompanion[Seq] = Seq - override def toSeq: Seq[A] = this - override def seq: Seq[A] = this - protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there! -} - -/** $factoryInfo - * @define Coll `immutable.Seq` - * @define coll immutable sequence - */ -object Seq extends SeqFactory[Seq] { - /** genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Seq[A]] = new mutable.ListBuffer + with collection.Seq[A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] { + + override final def toSeq: this.type = this + + override def iterableFactory: SeqFactory[Seq] = Seq +} + +/** + * @define coll immutable sequence + * @define Coll `immutable.Seq` + */ +trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] + +/** + * $factoryInfo + * @define coll immutable sequence + * @define Coll `immutable.Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](List) { + override def from[E](it: IterableOnce[E]): Seq[E] = it match { + case s: Seq[E] => s + case _ => super.from(it) + } +} + +/** Base trait for immutable indexed sequences that have efficient `apply` and `length` */ +trait IndexedSeq[+A] extends Seq[A] + with collection.IndexedSeq[A] + with IndexedSeqOps[A, IndexedSeq, IndexedSeq[A]] + with IterableFactoryDefaults[A, IndexedSeq] { + + final override def toIndexedSeq: IndexedSeq[A] = this + + override def canEqual(that: Any): Boolean = that match { + case otherIndexedSeq: IndexedSeq[_] => length == otherIndexedSeq.length && super.canEqual(that) + case _ => super.canEqual(that) + } + + + override def sameElements[B >: A](o: IterableOnce[B]): Boolean = o match { + case that: IndexedSeq[_] => + (this eq that) || { + val length = this.length + var equal = length == that.length + if (equal) { + var index = 0 + // some IndexedSeq apply is less efficient than using Iterators + // e.g. Vector so we can compare the first few with apply and the rest with an iterator + // but if apply is more efficient than Iterators then we can use the apply for all the comparison + // we default to the minimum preferred length + val maxApplyCompare = { + val preferredLength = Math.min(applyPreferredMaxLength, that.applyPreferredMaxLength) + if (length > (preferredLength.toLong << 1)) preferredLength else length + } + while (index < maxApplyCompare && equal) { + equal = this (index) == that(index) + index += 1 + } + if ((index < length) && equal) { + val thisIt = this.iterator.drop(index) + val thatIt = that.iterator.drop(index) + while (equal && thisIt.hasNext) { + equal = thisIt.next() == thatIt.next() + } + } + } + equal + } + case _ => super.sameElements(o) + } + + /** a hint to the runtime when scanning values + * [[apply]] is preferred for scan with a max index less than this value + * [[iterator]] is preferred for scans above this range + * @return a hint about when to use [[apply]] or [[iterator]] + */ + protected def applyPreferredMaxLength: Int = IndexedSeqDefaults.defaultApplyPreferredMaxLength + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +object IndexedSeqDefaults { + val defaultApplyPreferredMaxLength: Int = + try System.getProperty( + "scala.collection.immutable.IndexedSeq.defaultApplyPreferredMaxLength", "64").toInt + catch { + case _: SecurityException => 64 + } +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { + override def from[E](it: IterableOnce[E]): IndexedSeq[E] = it match { + case is: IndexedSeq[E] => is + case _ => super.from(it) + } +} + +/** Base trait for immutable indexed Seq operations */ +trait IndexedSeqOps[+A, +CC[_], +C] + extends SeqOps[A, CC, C] + with collection.IndexedSeqOps[A, CC, C] { + + override def slice(from: Int, until: Int): C = { + // since we are immutable we can just share the same collection + if (from <= 0 && until >= length) coll + else super.slice(from, until) + } + +} + +/** Base trait for immutable linear sequences that have efficient `head` and `tail` */ +trait LinearSeq[+A] + extends Seq[A] + with collection.LinearSeq[A] + with LinearSeqOps[A, LinearSeq, LinearSeq[A]] + with IterableFactoryDefaults[A, LinearSeq] { + + override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq } + +@SerialVersionUID(3L) +object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { + override def from[E](it: IterableOnce[E]): LinearSeq[E] = it match { + case ls: LinearSeq[E] => ls + case _ => super.from(it) + } +} + +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] + extends Any with SeqOps[A, CC, C] + with collection.LinearSeqOps[A, CC, C] + +/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ +abstract class AbstractSeq[+A] extends scala.collection.AbstractSeq[A] with Seq[A] diff --git a/src/library/scala/collection/immutable/SeqMap.scala b/src/library/scala/collection/immutable/SeqMap.scala new file mode 100644 index 000000000000..03daef1481a8 --- /dev/null +++ b/src/library/scala/collection/immutable/SeqMap.scala @@ -0,0 +1,284 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** A base trait for ordered, immutable maps. + * + * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs + * without regard to ordering. + * + * All behavior is defined in terms of the abstract methods in `SeqMap`. + * It is sufficient for concrete subclasses to implement those methods. + * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] + extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + + +object SeqMap extends MapFactory[SeqMap] { + def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): SeqMap[K, V] = + it match { + //case sm: SeqMap[K, V] => sm + case m: ListMap[K, V] => m + case m: TreeSeqMap[K, V] => m + case m: VectorMap[K, V] => m + case m: SeqMap1[K, V] => m + case m: SeqMap2[K, V] => m + case m: SeqMap3[K, V] => m + case m: SeqMap4[K, V] => m + case it: Iterable[_] if it.isEmpty => empty[K, V] + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl + + @SerialVersionUID(3L) + private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) + def removed(key: Any): SeqMap[Any, Nothing] = this + } + + @SerialVersionUID(3L) + private[immutable] final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator = Iterator.single((key1, value1)) + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap1(key1, value) + else new SeqMap2(key1, value1, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) SeqMap.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + } + } + + @SerialVersionUID(3L) + private[immutable] final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap2(key1, value, key2, value2) + else if (key == key2) new SeqMap2(key1, value1, key2, value) + else new SeqMap3(key1, value1, key2, value2, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap1(key2, value2) + else if (key == key2) new SeqMap1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + } + } + + @SerialVersionUID(3L) + private[immutable] class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) + else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) + else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap2(key2, value2, key3, value3) + else if (key == key2) new SeqMap2(key1, value1, key3, value3) + else if (key == key3) new SeqMap2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + } + } + + @SerialVersionUID(3L) + private[immutable] final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 4 + override def knownSize: Int = 4 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) + else { + // Directly create the elements for performance reasons + val fields = Vector(key1, key2, key3, key4, key) + val underlying: Map[K, (Int, V1)] = + HashMap( + (key1, (0, value1)), + (key2, (1, value2)), + (key3, (2, value3)), + (key4, (3, value4)), + (key, (4, value)) + ) + new VectorMap(fields, underlying) + } + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + f(key4, value4) + } + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } + } +} diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index dd572c3d9c9e..e8509b58016e 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,89 +14,125 @@ package scala package collection package immutable -import generic._ -import parallel.immutable.ParSet +import scala.collection.immutable.Set.Set4 +import scala.collection.mutable.{Builder, ReusableBuilder} -/** A generic trait for immutable sets. - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @author Martin Odersky - * @define Coll `immutable.Set` - * @define coll immutable set - */ +/** Base trait for immutable set collections */ trait Set[A] extends Iterable[A] -// with GenSet[A] - with scala.collection.Set[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] - with Parallelizable[A, ParSet[A]] -{ - override def companion: GenericCompanion[Set] = Set - - - /** Returns this $coll as an immutable set, perhaps accepting a - * wider range of elements. Since it already is an - * immutable set, it will only be rebuilt if the underlying structure - * cannot be expanded to include arbitrary element types. - * For instance, `BitSet` and `SortedSet` will be rebuilt, as - * they require `Int` and sortable elements respectively. - * - * When in doubt, the set will be rebuilt. Rebuilt sets never - * need to be rebuilt again. - */ - override def toSet[B >: A]: Set[B] = { - // This way of building sets typically has the best benchmarks, surprisingly! - val sb = Set.newBuilder[B] - foreach(sb += _) - sb.result() - } + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + override def iterableFactory: IterableFactory[Set] = Set +} - override def seq: Set[A] = this - protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there! +/** Base trait for immutable set operations + * + * @define coll immutable set + * @define Coll `immutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] { + + /** Creates a new set with an additional element, unless the element is + * already present. + * + * @param elem the element to be added + * @return a new set that contains all elements of this set and that also + * contains `elem`. + */ + def incl(elem: A): C + + /** Alias for `incl` */ + override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated + + /** Creates a new set with a given element removed from this set. + * + * @param elem the element to be removed + * @return a new set that contains all elements of this set but that does not + * contain `elem`. + */ + def excl(elem: A): C + + /** Alias for `excl` */ + @`inline` final override def - (elem: A): C = excl(elem) + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param that the collection containing the elements to remove. + * @return a new $coll with the given elements removed, omitting duplicates. + */ + def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for removedAll */ + override final def -- (that: IterableOnce[A]): C = removedAll(that) } -/** $factoryInfo - * @define Coll `immutable.Set` - * @define coll immutable set - */ -object Set extends ImmutableSetFactory[Set] { - override def newBuilder[A]: mutable.Builder[A, Set[A]] = new SetBuilderImpl[A] +trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with collection.StrictOptimizedSetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { - /** $setCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Set[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] + override def concat(that: collection.IterableOnce[A]): C = { + var result: C = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +/** + * $factoryInfo + * @define coll immutable set + * @define Coll `immutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory[Set] { + + def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] + + def from[E](it: collection.IterableOnce[E]): Set[E] = + it match { + case _ if it.knownSize == 0 => empty[E] + // Since IterableOnce[E] launders the variance of E, + // identify only our implementations which can be soundly substituted. + // It's not sufficient to match `SortedSet[E]` to rebuild and `Set[E]` to retain. + case s: HashSet[E] => s + case s: ListSet[E] => s + case s: Set1[E] => s + case s: Set2[E] => s + case s: Set3[E] => s + case s: Set4[E] => s + case s: HashMap[E @unchecked, _]#HashKeySet => s + case s: MapOps[E, Any, Map, Map[E, Any]]#ImmutableKeySet @unchecked => s + // We also want `SortedSet` (and subclasses, such as `BitSet`) + // to rebuild themselves, to avoid element type widening issues. + case _ => newBuilder[E].addAll(it).result() + } + + def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] /** An optimized representation for immutable empty sets */ - @SerialVersionUID(-2443710944435909512L) - private object EmptySet extends AbstractSet[Any] with Set[Any] with Serializable { + @SerialVersionUID(3L) + private object EmptySet extends AbstractSet[Any] with Serializable { override def size: Int = 0 + override def isEmpty = true + override def knownSize: Int = size + override def filter(pred: Any => Boolean): Set[Any] = this + override def filterNot(pred: Any => Boolean): Set[Any] = this + override def removedAll(that: IterableOnce[Any]): Set[Any] = this + override def diff(that: collection.Set[Any]): Set[Any] = this + override def subsetOf(that: collection.Set[Any]): Boolean = true + override def intersect(that: collection.Set[Any]): Set[Any] = this + override def view: View[Any] = View.empty def contains(elem: Any): Boolean = false - def + (elem: Any): Set[Any] = new Set1(elem) - def - (elem: Any): Set[Any] = this + def incl(elem: Any): Set[Any] = new Set1(elem) + def excl(elem: Any): Set[Any] = this def iterator: Iterator[Any] = Iterator.empty override def foreach[U](f: Any => U): Unit = () - override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]] - - override def ++[B >: Any, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Set[Any], B, That]): That = { - if (bf eq Set.canBuildFrom) that match { - case hs: HashSet[Any] if hs.size > 4 => hs.asInstanceOf[That] - case EmptySet => EmptySet.asInstanceOf[That] - case hs: Set1[Any] => hs.asInstanceOf[That] - case hs: Set2[Any] => hs.asInstanceOf[That] - case hs: Set3[Any] => hs.asInstanceOf[That] - case hs: Set4[Any] => hs.asInstanceOf[That] - case _ => super.++(that) - } - else if (bf eq HashSet.canBuildFrom) that match { - case hs: HashSet[Any] => hs.asInstanceOf[That] - case _ => super.++(that) - } else super.++(that) - } - } private[collection] def emptyInstance: Set[Any] = EmptySet @@ -104,6 +140,7 @@ object Set extends ImmutableSetFactory[Set] { private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { private[this] var current = 0 private[this] var remainder = n + override def knownSize: Int = remainder def hasNext = remainder > 0 def apply(i: Int): A def next(): A = @@ -124,51 +161,43 @@ object Set extends ImmutableSetFactory[Set] { } /** An optimized representation for immutable sets of size 1 */ - @SerialVersionUID(1233385750652442003L) - class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with Set[A] with Serializable { + @SerialVersionUID(3L) + final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { override def size: Int = 1 - def contains(elem: A): Boolean = - elem == elem1 - def + (elem: A): Set[A] = + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 + def incl(elem: A): Set[A] = if (contains(elem)) this else new Set2(elem1, elem) - def - (elem: A): Set[A] = + def excl(elem: A): Set[A] = if (elem == elem1) Set.empty else this - def iterator: Iterator[A] = - Iterator.single(elem1) - override def foreach[U](f: A => U): Unit = { - f(elem1) - } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) - } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { - p(elem1) - } - override private[scala] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = + def iterator: Iterator[A] = Iterator.single(elem1) + override def foreach[U](f: A => U): Unit = f(elem1) + override def exists(p: A => Boolean): Boolean = p(elem1) + override def forall(p: A => Boolean): Boolean = p(elem1) + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = if (pred(elem1) != isFlipped) this else Set.empty - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { + + override def find(p: A => Boolean): Option[A] = if (p(elem1)) Some(elem1) else None - } override def head: A = elem1 override def tail: Set[A] = Set.empty - // Why is Set1 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]] } /** An optimized representation for immutable sets of size 2 */ - @SerialVersionUID(-6443011234944830092L) - class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with Set[A] with Serializable { + @SerialVersionUID(3L) + final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { override def size: Int = 2 - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 - def + (elem: A): Set[A] = + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 || elem == elem2 + def incl(elem: A): Set[A] = if (contains(elem)) this else new Set3(elem1, elem2, elem) - def - (elem: A): Set[A] = + def excl(elem: A): Set[A] = if (elem == elem1) new Set1(elem2) else if (elem == elem2) new Set1(elem1) else this @@ -180,13 +209,13 @@ object Set extends ImmutableSetFactory[Set] { override def foreach[U](f: A => U): Unit = { f(elem1); f(elem2) } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { + override def exists(p: A => Boolean): Boolean = { p(elem1) || p(elem2) } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { + override def forall(p: A => Boolean): Boolean = { p(elem1) && p(elem2) } - override private[scala] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { var r1: A = null.asInstanceOf[A] var n = 0 if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} @@ -198,28 +227,27 @@ object Set extends ImmutableSetFactory[Set] { case 2 => this } } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { + override def find(p: A => Boolean): Option[A] = { if (p(elem1)) Some(elem1) else if (p(elem2)) Some(elem2) else None } override def head: A = elem1 override def tail: Set[A] = new Set1(elem2) - // Why is Set2 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]] } /** An optimized representation for immutable sets of size 3 */ - @SerialVersionUID(-3590273538119220064L) - class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with Set[A] with Serializable { + @SerialVersionUID(3L) + final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { override def size: Int = 3 + override def isEmpty = false + override def knownSize: Int = size def contains(elem: A): Boolean = elem == elem1 || elem == elem2 || elem == elem3 - def + (elem: A): Set[A] = + def incl(elem: A): Set[A] = if (contains(elem)) this else new Set4(elem1, elem2, elem3, elem) - def - (elem: A): Set[A] = + def excl(elem: A): Set[A] = if (elem == elem1) new Set2(elem2, elem3) else if (elem == elem2) new Set2(elem1, elem3) else if (elem == elem3) new Set2(elem1, elem2) @@ -232,13 +260,13 @@ object Set extends ImmutableSetFactory[Set] { override def foreach[U](f: A => U): Unit = { f(elem1); f(elem2); f(elem3) } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { + override def exists(p: A => Boolean): Boolean = { p(elem1) || p(elem2) || p(elem3) } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { + override def forall(p: A => Boolean): Boolean = { p(elem1) && p(elem2) && p(elem3) } - override private[scala] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { var r1, r2: A = null.asInstanceOf[A] var n = 0 if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} @@ -252,7 +280,7 @@ object Set extends ImmutableSetFactory[Set] { case 3 => this } } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { + override def find(p: A => Boolean): Option[A] = { if (p(elem1)) Some(elem1) else if (p(elem2)) Some(elem2) else if (p(elem3)) Some(elem3) @@ -260,21 +288,20 @@ object Set extends ImmutableSetFactory[Set] { } override def head: A = elem1 override def tail: Set[A] = new Set2(elem2, elem3) - // Why is Set3 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]] } /** An optimized representation for immutable sets of size 4 */ - @SerialVersionUID(-3622399588156184395L) - class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with Set[A] with Serializable { + @SerialVersionUID(3L) + final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { override def size: Int = 4 + override def isEmpty = false + override def knownSize: Int = size def contains(elem: A): Boolean = elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 - def + (elem: A): Set[A] = + def incl(elem: A): Set[A] = if (contains(elem)) this - else new HashSet[A] + elem1 + elem2 + elem3 + elem4 + elem - def - (elem: A): Set[A] = + else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem + def excl(elem: A): Set[A] = if (elem == elem1) new Set3(elem2, elem3, elem4) else if (elem == elem2) new Set3(elem1, elem3, elem4) else if (elem == elem3) new Set3(elem1, elem2, elem4) @@ -288,13 +315,13 @@ object Set extends ImmutableSetFactory[Set] { override def foreach[U](f: A => U): Unit = { f(elem1); f(elem2); f(elem3); f(elem4) } - override def exists(@deprecatedName('f) p: A => Boolean): Boolean = { + override def exists(p: A => Boolean): Boolean = { p(elem1) || p(elem2) || p(elem3) || p(elem4) } - override def forall(@deprecatedName('f) p: A => Boolean): Boolean = { + override def forall(p: A => Boolean): Boolean = { p(elem1) && p(elem2) && p(elem3) && p(elem4) } - override private[scala] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { var r1, r2, r3: A = null.asInstanceOf[A] var n = 0 if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} @@ -311,7 +338,7 @@ object Set extends ImmutableSetFactory[Set] { } } - override def find(@deprecatedName('f) p: A => Boolean): Option[A] = { + override def find(p: A => Boolean): Option[A] = { if (p(elem1)) Some(elem1) else if (p(elem2)) Some(elem2) else if (p(elem3)) Some(elem3) @@ -320,71 +347,61 @@ object Set extends ImmutableSetFactory[Set] { } override def head: A = elem1 override def tail: Set[A] = new Set3(elem2, elem3, elem4) - // Why is Set4 non-final? Need to fix that! - @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]] + + private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = + builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) } - /** Builder for Set. - */ - private final class SetBuilderImpl[A] extends mutable.ReusableBuilder[A, Set[A]] { - import scala.collection.immutable.HashSet.HashSetBuilder - - private[this] var elems: Set[A] = Set.empty[A] - private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A] = _ - - override def clear(): Unit = { - elems = Set.empty[A] - if (hashSetBuilder ne null) - hashSetBuilder.clear() - switchedToHashSetBuilder = false - } +} - override def result(): Set[A] = - if (switchedToHashSetBuilder) hashSetBuilder.result() else elems +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] + +/** Builder for Set. + * $multipleResults + */ +private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { + private[this] var elems: Set[A] = Set.empty + private[this] var switchedToHashSetBuilder: Boolean = false + private[this] var hashSetBuilder: HashSetBuilder[A] = _ + + override def clear(): Unit = { + elems = Set.empty + if (hashSetBuilder != null) { + hashSetBuilder.clear() + } + switchedToHashSetBuilder = false + } - override def +=(elem: A) = { - if (switchedToHashSetBuilder) { - hashSetBuilder += elem - } else if (elems.size < 4) { - elems = elems + elem + override def result(): Set[A] = + if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + + def addOne(elem: A) = { + if (switchedToHashSetBuilder) { + hashSetBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem)) { + () // do nothing } else { - // assert(elems.size == 4) - if (elems.contains(elem)) { - () // do nothing - } else { - convertToHashSetBuilder() - hashSetBuilder += elem + switchedToHashSetBuilder = true + if (hashSetBuilder == null) { + hashSetBuilder = new HashSetBuilder } + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) + hashSetBuilder.addOne(elem) } - - this } - private def convertToHashSetBuilder(): Unit = { - switchedToHashSetBuilder = true - if (hashSetBuilder eq null) - hashSetBuilder = new HashSetBuilder - - hashSetBuilder ++= elems - } - - override def ++=(xs: TraversableOnce[A]): this.type = { - xs match { - case _ if switchedToHashSetBuilder => - hashSetBuilder ++= xs - - case set: collection.Set[A] if set.size > 4 => - convertToHashSetBuilder() - hashSetBuilder ++= set + this + } - case _ => super.++= (xs) - } + override def addAll(xs: IterableOnce[A]): this.type = + if (switchedToHashSetBuilder) { + hashSetBuilder.addAll(xs) this + } else { + super.addAll(xs) } - - } - - } - diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala deleted file mode 100644 index c86bfe4df025..000000000000 --- a/src/library/scala/collection/immutable/SetProxy.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -/** This is a simple wrapper class for [[scala.collection.immutable.Set]]. - * - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @tparam A type of the elements contained in this set proxy. - * - * @since 2.8 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { - override def repr = this - private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] = - new AbstractSet[B] with SetProxy[B] { val self = newSelf } - - override def empty = newProxy(self.empty) - override def + (elem: A) = newProxy(self + elem) - override def - (elem: A) = newProxy(self - elem) -} diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index e7b9b5722151..120ae23ae024 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,139 +14,164 @@ package scala package collection package immutable -import generic._ -import mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder + +/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. + * + * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in + * sorted order, according to the map's [[scala.math.Ordering]]. + * + * @example {{{ + * import scala.collection.immutable.SortedMap + * + * // Make a SortedMap via the companion object factory + * val weekdays = SortedMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + */ +trait SortedMap[K, +V] + extends Map[K, V] + with collection.SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) +} -/** A map whose keys are sorted. - * - * @tparam A the type of the keys contained in this sorted map. - * @tparam B the type of the values associated with the keys. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.4 - * @define Coll immutable.SortedMap - * @define coll immutable sorted map - */ -trait SortedMap[A, +B] extends Map[A, B] - with scala.collection.SortedMap[A, B] - with MapLike[A, B, SortedMap[A, B]] - with SortedMapLike[A, B, SortedMap[A, B]] -{ -self => - - override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] = - SortedMap.newBuilder[A, B] - - override def empty: SortedMap[A, B] = SortedMap.empty - override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) - override def keySet: immutable.SortedSet[A] = new DefaultKeySortedSet - - @SerialVersionUID(112809526508924148L) - protected class DefaultKeySortedSet extends super.DefaultKeySortedSet with immutable.SortedSet[A] { - override def + (elem: A): SortedSet[A] = - if (this(elem)) this - else SortedSet[A]() ++ this + elem - override def - (elem: A): SortedSet[A] = - if (this(elem)) SortedSet[A]() ++ this - elem - else this - override def rangeImpl(from : Option[A], until : Option[A]) : SortedSet[A] = { +trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => + + protected def coll: C with CC[K, V] + + def unsorted: Map[K, V] + + override def keySet: SortedSet[K] = new ImmutableKeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { val map = self.rangeImpl(from, until) - new map.DefaultKeySortedSet - } - override def toSet[C >: A]: Set[C] = { - // This way of building sets typically has the best benchmarks, surprisingly! - val sb = Set.newBuilder[C] - foreach(sb += _) - sb.result() + new map.ImmutableKeySortedSet } + def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) + def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) } - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - * @note needs to be overridden in subclasses - */ - def + [B1 >: B](kv: (A, B1)): SortedMap[A, B1] = throw new AbstractMethodError("SortedMap.+") - - /** Adds two or more elements to this collection and returns - * a new collection. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) - - override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) - override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} - override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p - override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v} + // We override these methods to fix their return type (which would be `Map` otherwise) + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { + // Implementation has been copied from `MapOps` + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } } + override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) +} - override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { - implicit def ordering: Ordering[A] = self.ordering - override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) - override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))} - override def keysIteratorFrom(start : A) = self keysIteratorFrom start - override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f - } +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with collection.StrictOptimizedSortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[k, v] if sm.ordering == this.ordering => - (sm canEqual this) && - (this.size == sm.size) && { - val i1 = this.iterator - val i2 = sm.iterator - var allEqual = true - while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next() - allEqual - } - // copy/pasted from super.equals for binary compat reasons! - case that: GenMap[b, _] => - GenMap.mapEquals(this, that) - case _ => - false && super.equals(that) // generate unused super accessor for binary compatibility (scala/scala#9311) + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = { + var result: CC[K, V2] = coll + val it = xs.iterator + while (it.hasNext) result = result + it.next() + result } } -/** $factoryInfo - * @define Coll immutable.SortedMap - * @define coll immutable sorted map - */ -object SortedMap extends ImmutableSortedMapFactory[SortedMap] { - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B] - - override def newBuilder[A, B](implicit ord: Ordering[A]): mutable.Builder[(A, B), SortedMap[A, B]] = TreeMap.newBuilder[A, B] - - private[collection] trait Default[A, +B] extends SortedMap[A, B] with scala.collection.SortedMap.Default[A, B] { - self => - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { - val b = SortedMap.newBuilder[A, B1] - b ++= this - b += ((kv._1, kv._2)) - b.result() - } +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - override def - (key: A): SortedMap[A, B] = { - val b = newBuilder - for (kv <- this; if kv._1 != key) b += kv - b.result() - } + override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match { + case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm + case _ => super.from(it) + } + + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { + + implicit def ordering: Ordering[K] = underlying.ordering + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault( underlying.concat(xs) , defaultValue) + + override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) } } diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala index 714302926247..2eda00ac6b2f 100644 --- a/src/library/scala/collection/immutable/SortedSet.scala +++ b/src/library/scala/collection/immutable/SortedSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,51 +14,44 @@ package scala package collection package immutable -import generic._ +/** Base trait for sorted sets */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { -/** A subtrait of `collection.SortedSet` which represents sorted sets - * which cannot be mutated. - * - * @author Sean McDirmid - * @author Martin Odersky - * @since 2.4 - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - */ -trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with SortedSetLike[A, SortedSet[A]] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] + override def unsorted: Set[A] = this - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[_] if ss.ordering == this.ordering => - (ss canEqual this) && - (this.size == ss.size) && { - val i1 = this.iterator - val i2 = ss.iterator - var allEqual = true - while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next - allEqual - } - // copy/pasted from super.equals for binary compat reasons! - case that: GenSet[_] => - GenSet.setEquals(this, that) - case _ => - false && super.equals(that) // generate unused super accessor for binary compatibility (scala/scala#9311) - } + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet } -/** $factoryInfo - * @define Coll `immutable.SortedSet` - * @define coll immutable sorted set - */ -object SortedSet extends ImmutableSortedSetFactory[SortedSet] { - /** $sortedSetCanBuildFromInfo */ - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A] - def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A] - override def newBuilder[A](implicit ord: Ordering[A]): mutable.Builder[A, SortedSet[A]] = TreeSet.newBuilder[A] +/** + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom + def unsorted: Set[A] +} + +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with collection.StrictOptimizedSortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { +} + +/** + * $factoryInfo + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { + override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss + case _ => super.from(it) + } } diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala deleted file mode 100644 index 956a4b97c95e..000000000000 --- a/src/library/scala/collection/immutable/Stack.scala +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ -import mutable.{ ArrayBuffer, Builder } - -/** $factoryInfo - * @define Coll `immutable.Stack` - * @define coll immutable stack - */ -object Stack extends SeqFactory[Stack] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Stack[A]] = new ArrayBuffer[A] mapResult (buf => new Stack(buf.toList)) -} - -/** This class implements immutable stacks using a list-based data - * structure. - * - * '''Note:''' This class exists only for historical reason and as an - * analogue of mutable stacks. - * Instead of an immutable stack you can just use a list. - * - * @tparam A the type of the elements contained in this stack. - * - * @author Matthias Zenger - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-stacks "Scala's Collection Library overview"]] - * section on `Immutable stacks` for more information. - * - * @define Coll `immutable.Stack` - * @define coll immutable stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1976480595012942526L) -@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0") -class Stack[+A] protected (protected val elems: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Stack] - with LinearSeqOptimized[A, Stack[A]] - with Serializable { - override def companion: GenericCompanion[Stack] = Stack - - def this() = this(Nil) - - /** Checks if this stack is empty. - * - * @return true, iff there is no element on the stack. - */ - override def isEmpty: Boolean = elems.isEmpty - - override def head = elems.head - override def tail = new Stack(elems.tail) - - /** Push an element on the stack. - * - * @param elem the element to push on the stack. - * @return the stack with the new element on top. - */ - def push[B >: A](elem: B): Stack[B] = new Stack(elem :: elems) - - /** Push a sequence of elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push[B >: A](elem1: B, elem2: B, elems: B*): Stack[B] = - this.push(elem1).push(elem2).pushAll(elems) - - /** Push all elements provided by the given traversable object onto - * the stack. The last element returned by the traversable object - * will be on top of the new stack. - * - * @param xs the iterator object. - * @return the stack with the new elements on top. - */ - def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] = - ((this: Stack[B]) /: xs.toIterator)(_ push _) - - /** Returns the top element of the stack. An error is signaled if - * there is no element on the stack. - * - * @throws java.util.NoSuchElementException - * @return the top element. - */ - def top: A = - if (!isEmpty) elems.head - else throw new NoSuchElementException("top of empty stack") - - /** Removes the top element from the stack. - * Note: should return `(A, Stack[A])` as for queues (mics) - * - * @throws java.util.NoSuchElementException - * @return the new stack without the former top element. - */ - def pop: Stack[A] = - if (!isEmpty) new Stack(elems.tail) - else throw new NoSuchElementException("pop of empty stack") - - def pop2: (A, Stack[A]) = - if (!isEmpty) (elems.head, new Stack(elems.tail)) - else throw new NoSuchElementException("pop of empty stack") - - override def reverse: Stack[A] = new Stack(elems.reverse) - - /** Returns an iterator over all elements on the stack. The iterator - * issues elements in the reversed order they were inserted into the - * stack (LIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = elems.iterator - - /** Returns a string representation of this stack. - */ - override def toString() = elems.mkString("Stack(", ", ", ")") -} diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 17cd904e045c..898a988735c6 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,682 +14,249 @@ package scala package collection package immutable -import generic._ -import mutable.{Builder, StringBuilder, LazyBuilder} +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + import scala.annotation.tailrec -import Stream.cons +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{ArrayBuffer, StringBuilder} import scala.language.implicitConversions +import Stream.cons -/** The class `Stream` implements lazy lists where elements - * are only evaluated when they are needed. Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 } - * - * fibs take 5 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * The `Stream` class also employs memoization such that previously computed - * values are converted from `Stream` elements to concrete values of type `A`. - * To illustrate, we will alter body of the `fibs` value above and take some - * more values: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * - * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip( - * fibs.tail).map(n => { - * println("Adding %d and %d".format(n._1, n._2)) - * n._1 + n._2 - * }) - * - * fibs take 5 foreach println - * fibs take 6 foreach println - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * There are a number of subtle points to the above example. - * - * - The definition of `fibs` is a `val` not a method. The memoization of the - * `Stream` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * - While the `Stream` is actually being modified during access, this does not - * change the notion of its immutability. Once the values are memoized they do - * not change and values that have yet to be memoized still "exist", they - * simply haven't been realized yet. - * - * - One must be cautious of memoization; you can very quickly eat up large - * amounts of memory if you're not careful. The reason for this is that the - * memoization of the `Stream` creates a structure much like - * [[scala.collection.immutable.List]]. So long as something is holding on to - * the head, the head holds on to the tail, and so it continues recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. we used - * `def` to define the `Stream`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. These necessarily hold onto the head, since - * they are methods on `Stream`, and a stream holds its own head. For - * computations of this sort where memoization is not desired, use - * `Iterator` when possible. - * - * {{{ - * // For example, let's build the natural numbers and do some silly iteration - * // over them. - * - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next, iter) - * } - * } - * - * // Our first Stream definition will be a val definition - * val stream1: Stream[Int] = { - * def loop(v: Int): Stream[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because stream1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the Stream is held in stream1 - * val it1 = stream1.iterator - * loop("Iterator1: ", it1.next, it1) - * - * // We can redefine this Stream such that all we have is the Iterator left - * // and allow the Stream to be garbage collected as required. Using a def - * // to provide the Stream ensures that no val is holding onto the head as - * // is the case with stream1 - * def stream2: Stream[Int] = { - * def loop(v: Int): Stream[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = stream2.iterator - * loop("Iterator2: ", it2.next, it2) - * - * // And, of course, we don't actually need a Stream at all for such a simple - * // problem. There's no reason to use a Stream if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next, it3) - * }}} - * - * - The fact that `tail` works at all is of interest. In the definition of - * `fibs` we have an initial `(0, 1, Stream(...))` so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known then the act - * of determining `tail` would require the evaluation of `tail` which would - * cause an infinite recursion and stack overflow. If we define a definition - * where the tail is not initially computable then we're going to have an - * infinite recursion: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: Stream[Int] = { - * def loop(h: Int, n: Int): Stream[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * Note that `mkString` forces evaluation of a `Stream`, but `addString` does - * not. In both cases, a `Stream` that is or ends in a cycle - * (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips - * through the cycle to `...`. Additionally, `addString` will display an - * un-memoized tail as `?`. - * - * @tparam A the type of the elements contained in this stream. - * - * @author Martin Odersky, Matthias Zenger - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#streams "Scala's Collection Library overview"]] - * section on `Streams` for more information. - - * @define naturalsEx def naturalsFrom(i: Int): Stream[Int] = i #:: naturalsFrom(i + 1) - * @define Coll `Stream` - * @define coll stream - * @define orderDependent - * @define orderDependentFold - * @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections. - */ +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) sealed abstract class Stream[+A] extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, Stream] - with LinearSeqOptimized[A, Stream[A]] - with Serializable { self => + with LinearSeq[A] + with LinearSeqOps[A, Stream, Stream[A]] + with IterableFactoryDefaults[A, Stream] + with Serializable { + def tail: Stream[A] - override def companion: GenericCompanion[Stream] = Stream + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type - /** Indicates whether or not the `Stream` is empty. - * - * @return `true` if the `Stream` is empty and `false` otherwise. - */ - def isEmpty: Boolean + override def iterableFactory: SeqFactory[Stream] = Stream - /** Gives constant time access to the first element of this `Stream`. Using - * the `fibs` example from earlier: - * - * {{{ - * println(fibs head) - * // prints - * // 0 - * }}} - * - * @return The first element of the `Stream`. - * @throws java.util.NoSuchElementException if the stream is empty. - */ - def head: A + override protected[this] def className: String = "Stream" - /** A stream consisting of the remaining elements of this stream after the - * first one. - * - * Note that this method does not force evaluation of the `Stream` but merely - * returns the lazy result. - * - * @return The tail of the `Stream`. - * @throws UnsupportedOperationException if the stream is empty. - */ - def tail: Stream[A] + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying Stream as elements + * are consumed. + * @note This function will force the realization of the entire Stream + * unless the `f` throws an exception. + */ + @tailrec + override final def foreach[U](f: A => U): Unit = { + if (!this.isEmpty) { + f(head) + tail.foreach(f) + } + } - /** Is the tail of this stream defined? */ - protected def tailDefined: Boolean + @tailrec + override final def find(p: A => Boolean): Option[A] = { + if(isEmpty) None + else if(p(head)) Some(head) + else tail.find(p) + } - // Implementation of abstract method in Traversable + override def take(n: Int): Stream[A] = { + if (n <= 0 || isEmpty) Stream.empty + else if (n == 1) new Stream.Cons(head, Stream.empty) + else new Stream.Cons(head, tail.take(n - 1)) + } - // New methods in Stream + /** Stream specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override final def foldLeft[B](z: B)(op: (B, A) => B): B = { + if (this.isEmpty) z + else tail.foldLeft(op(z, head))(op) + } /** The stream resulting from the concatenation of this stream with the argument stream. - * @param rest The stream that gets appended to this stream - * @return The stream containing elements of this stream and the traversable object. - */ - def append[B >: A](rest: => TraversableOnce[B]): Stream[B] = - if (isEmpty) rest.toStream else cons(head, tail append rest) + * @param rest The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + @deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0") + @inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest) - /** Forces evaluation of the whole stream and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: Stream[A] = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those = this - if (!these.isEmpty) these = these.tail - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } + protected[this] def writeReplace(): AnyRef = + if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this /** Prints elements of this stream one by one, separated by commas. */ - def print() { print(", ") } + @deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0") + @inline def print(): Unit = Console.print(this.force.mkString(", ")) /** Prints elements of this stream one by one, separated by `sep`. - * @param sep The separator string printed between consecutive elements. - */ - def print(sep: String) { - def loop(these: Stream[A], start: String) { - Console.print(start) - if (these.isEmpty) Console.print("empty") - else { - Console.print(these.head) - loop(these.tail, sep) - } - } - loop(this, "") - } - - /** Returns the length of this `Stream`. - * - * @note In order to compute the length of the `Stream`, it must first be - * fully realized, which could cause the complete evaluation of an infinite - * series, assuming that's what your `Stream` represents. - * - * @return The length of this `Stream`. - */ - override def length: Int = { - var len = 0 - var left = this - while (!left.isEmpty) { - len += 1 - left = left.tail - } - len - } - - // It's an imperfect world, but at least we can bottle up the - // imperfection in a capsule. - @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] - @inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]] - @inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) = - bf(repr).isInstanceOf[Stream.StreamBuilder[_]] + * @param sep The separator string printed between consecutive elements. + */ + @deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0") + @inline def print(sep: String): Unit = Console.print(this.force.mkString(sep)) - // Overridden methods from Traversable + /** The stream resulting from the concatenation of this stream with the argument stream. + * + * @param suffix The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix)) - override def toStream: Stream[A] = this + override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] = + if (isEmpty) z +: iterableFactory.empty + else cons(z, tail.scanLeft(op(z, head))(op)) - override def hasDefiniteSize: Boolean = isEmpty || { - if (!tailDefined) false + /** Stream specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `f`. + */ + override final def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (those eq these) return false - those = those.tail + var reducedRes: B = this.head + var left: Stream[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail } - false // Cycle detected + reducedRes } } - /** Create a new stream which contains all elements of this stream followed by - * all elements of Traversable `that`. - * - * @note It's subtle why this works. We know that if the target type of the - * [[scala.collection.mutable.Builder]] `That` is either a `Stream`, or one of - * its supertypes, or undefined, then `StreamBuilder` will be chosen for the - * implicit. We recognize that fact and optimize to get more laziness. - * - * @note This method doesn't cause the `Stream` to be fully realized but it - * should be noted that using the `++` operator from another collection type - * could cause infinite realization of a `Stream`. For example, referring to - * the definition of `fibs` in the preamble, the following would never return: - * `List(BigInt(12)) ++ fibs`. - * - * @tparam B The element type of the returned collection.'''That''' - * @param that The [[scala.collection.GenTraversableOnce]] to be concatenated - * to this `Stream`. - * @return A new collection containing the result of concatenating `this` with - * `that`. - */ - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[A] - if (isStreamBuilder(bf)) asThat( - if (isEmpty) that.toStream - else cons(head, asStream[A](tail ++ that)) - ) - else super.++(that)(bf) + override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - if (isStreamBuilder(bf)) asThat(cons(elem, this)) - else super.+:(elem)(bf) + override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false) - /** - * Create a new stream which contains all intermediate results of applying the - * operator to subsequent elements left to right. `scanLeft` is analogous to - * `foldLeft`. - * - * @note This works because the target type of the - * [[scala.collection.mutable.Builder]] `That` is a `Stream`. - * - * @param z The initial value for the scan. - * @param op A function that will apply operations to successive values in the - * `Stream` against previous accumulated results. - * @return A new collection containing the modifications from the application - * of `op`. - */ - override final def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream(z) - else cons(z, asStream[B](tail.scanLeft(op(z, head))(op))) - ) - else super.scanLeft(z)(op)(bf) + override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true) - /** Returns the stream resulting from applying the given function `f` to each - * element of this stream. This returns a lazy `Stream` such that it does not - * need to be fully realized. - * - * @example {{{ - * $naturalsEx - * naturalsFrom(1).map(_ + 10) take 5 mkString(", ") - * // produces: "11, 12, 13, 14, 15" - * }}} - * - * @tparam B The element type of the returned collection '''That'''. - * @param f function to apply to each element. - * @return `f(a,,0,,), ..., f(a,,n,,)` if this sequence is `a,,0,,, ..., a,,n,,`. - */ - override final def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream.Empty - else cons(f(head), asStream[B](tail map f)) - ) - else super.map(f)(bf) + private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { + // optimization: drop leading prefix of elems for which f returns false + // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise + var rest: Stream[A] = coll + while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail + // private utility func to avoid `this` on stack (would be needed for the lazy arg) + if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) + else iterableFactory.empty } - override final def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - if (!isStreamBuilder(bf)) super.collect(pf)(bf) - else { - // this implementation avoids: - // 1) stackoverflows (could be achieved with tailrec, too) - // 2) out of memory errors for big streams (`this` reference can be eliminated from the stack) - var rest: Stream[A] = this + /** A `collection.WithFilter` which allows GC of the head of stream during processing */ + override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] = + Stream.withFilter(coll, p) - // Avoids calling both `pf.isDefined` and `pf.apply`. - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) + override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll) - while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail + override final def map[B](f: A => B): Stream[B] = + if (isEmpty) iterableFactory.empty + else cons(f(head), tail.map(f)) - // without the call to the companion object, a thunk is created for the tail of the new stream, - // and the closure of the thunk will reference `this` - if (rest.isEmpty) Stream.Empty.asInstanceOf[That] - else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That] + @tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] = + if(isEmpty) Stream.empty + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Stream.collectedTail(newHead, this, pf) + else tail.collect(pf) } - } - /** Applies the given function `f` to each element of this stream, then - * concatenates the results. As with `map` this function does not need to - * realize the entire `Stream` but continues to keep it as a lazy `Stream`. - * - * @example {{{ - * // Let's create a Stream of Vectors, each of which contains the - * // collection of Fibonacci numbers up to the current value. We - * // can then 'flatMap' that Stream. - * - * val fibVec: Stream[Vector[Int]] = Vector(0) #:: Vector(0, 1) #:: fibVec.zip(fibVec.tail).map(n => { - * n._2 ++ Vector(n._1.last + n._2.last) - * }) - * - * fibVec take 5 foreach println - * // prints - * // Vector(0) - * // Vector(0, 1) - * // Vector(0, 1, 1) - * // Vector(0, 1, 1, 2) - * // Vector(0, 1, 1, 2, 3) - * - * // If we now want to `flatMap` across that stream by adding 10 - * // we can see what the series turns into: - * - * fibVec.flatMap(_.map(_ + 10)) take 15 mkString(", ") - * // produces: 10, 10, 11, 10, 11, 11, 10, 11, 11, 12, 10, 11, 11, 12, 13 - * }}} - * - * ''Note:'' Currently `flatMap` will evaluate as much of the Stream as needed - * until it finds a non-empty element for the head, which is non-lazy. - * - * @tparam B The element type of the returned collection '''That'''. - * @param f the function to apply on each element. - * @return `f(a,,0,,) ::: ... ::: f(a,,n,,)` if - * this stream is `[a,,0,,, ..., a,,n,,]`. - */ - override final def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[B] - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - if (isStreamBuilder(bf)) asThat( - if (isEmpty) Stream.Empty - else { - // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty - var nonEmptyPrefix = this - var prefix = f(nonEmptyPrefix.head).toStream - while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { - nonEmptyPrefix = nonEmptyPrefix.tail - if(!nonEmptyPrefix.isEmpty) - prefix = f(nonEmptyPrefix.head).toStream - } + @tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if(isEmpty) None + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Some(newHead) + else tail.collectFirst(pf) + } - if (nonEmptyPrefix.isEmpty) Stream.empty - else prefix append asStream[B](nonEmptyPrefix.tail flatMap f) + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.empty + else { + // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty + var nonEmptyPrefix: Stream[A] = coll + var prefix = iterableFactory.from(f(nonEmptyPrefix.head)) + while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { + nonEmptyPrefix = nonEmptyPrefix.tail + if(!nonEmptyPrefix.isEmpty) + prefix = iterableFactory.from(f(nonEmptyPrefix.head)) } - ) - else super.flatMap(f)(bf) - - override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { - // optimization: drop leading prefix of elems for which f returns false - // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise - var rest = this - while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail - // private utility func to avoid `this` on stack (would be needed for the lazy arg) - if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) - else Stream.Empty - } - - /** A FilterMonadic which allows GC of the head of stream during processing */ - @noinline // Workaround scala/bug#9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791 - override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p) - - /** A lazier Iterator than LinearSeqLike's. */ - override def iterator: Iterator[A] = new StreamIterator(self) - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying stream as elements - * are consumed. - * @note This function will force the realization of the entire stream - * unless the `f` throws an exception. - */ - @tailrec - override final def foreach[U](f: A => U) { - if (!this.isEmpty) { - f(head) - tail.foreach(f) + if (nonEmptyPrefix.isEmpty) iterableFactory.empty + else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f)) } - } - - /** Stream specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override final def foldLeft[B](z: B)(op: (B, A) => B): B = { - if (this.isEmpty) z - else tail.foldLeft(op(z, head))(op) - } - /** Stream specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `f`. - */ - override final def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] = + if (this.isEmpty || that.isEmpty) iterableFactory.empty else { - var reducedRes: B = this.head - var left = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail + val thatIterable = that match { + case that: collection.Iterable[B] => that + case _ => LazyList.from(that) } - reducedRes + cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail)) } - } - /** Returns all the elements of this stream that satisfy the predicate `p` - * returning of [[scala.Tuple2]] of `Stream`s obeying the partition predicate - * `p`. The order of the elements is preserved. - * - * @param p the predicate used to filter the stream. - * @return the elements of this stream satisfying `p`. - * - * @example {{{ - * $naturalsEx - * val parts = naturalsFrom(1) partition { _ % 2 == 0 } - * parts._1 take 10 mkString ", " - * // produces: "2, 4, 6, 8, 10, 12, 14, 16, 18, 20" - * parts._2 take 10 mkString ", " - * // produces: "1, 3, 5, 7, 9, 11, 13, 15, 17, 19" - * }}} - * - */ - override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) + override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0)) - /** Returns a stream formed from this stream and the specified stream `that` - * by associating each element of the former with the element at the same - * position in the latter. - * - * If one of the two streams is longer than the other, its remaining elements - * are ignored. - * - * The return type of this function may not be obvious. The lazy aspect of - * the returned value is different than that of `partition`. In `partition` - * we get back a [[scala.Tuple2]] of two lazy `Stream`s whereas here we get - * back a single lazy `Stream` of [[scala.Tuple2]]s where the - * [[scala.Tuple2]]'s type signature is `(A1, B)`. - * - * @tparam A1 The type of the first parameter of the zipped tuple - * @tparam B The type of the second parameter of the zipped tuple - * @tparam That The type of the returned `Stream`. - * @return `Stream({a,,0,,,b,,0,,}, ..., - * {a,,min(m,n),,,b,,min(m,n),,)}` when - * `Stream(a,,0,,, ..., a,,m,,) - * zip Stream(b,,0,,, ..., b,,n,,)` is invoked. - * - * @example {{{ - * $naturalsEx - * naturalsFrom(1) zip naturalsFrom(2) take 5 foreach println - * // prints - * // (1,2) - * // (2,3) - * // (3,4) - * // (4,5) - * // (5,6) - * }}} - */ - override final def zip[A1 >: A, B, That](that: scala.collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That = - // we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)] - if (isStreamBuilder(bf)) asThat( - if (this.isEmpty || that.isEmpty) Stream.Empty - else cons((this.head, that.head), asStream[(A1, B)](this.tail zip that.tail)) - ) - else super.zip(that)(bf) + protected def tailDefined: Boolean - /** Zips this iterable with its indices. `s.zipWithIndex` is equivalent to `s - * zip s.indices`. - * - * This method is much like `zip` in that it returns a single lazy `Stream` of - * [[scala.Tuple2]]. - * - * @tparam A1 The type of the first element of the [[scala.Tuple2]] in the - * resulting stream. - * @tparam That The type of the resulting `Stream`. - * @return `Stream({a,,0,,,0}, ..., {a,,n,,,n)}` - * - * @example {{{ - * $naturalsEx - * (naturalsFrom(1) zipWithIndex) take 5 foreach println - * // prints - * // (1,0) - * // (2,1) - * // (3,2) - * // (4,3) - * // (5,4) - * }}} - */ - override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Stream[A], (A1, Int), That]): That = - this.zip[A1, Int, That](Stream.from(0)) + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"`. + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } - /** Write all defined elements of this iterable into given string builder. - * The written text begins with the string `start` and is finished by the string - * `end`. Inside, the string representations of defined elements (w.r.t. - * the method `toString()`) are separated by the string `sep`. The method will - * not force evaluation of undefined elements. A tail of such elements will be - * represented by a `"?"` instead. A cyclic stream is represented by a `"..."` - * at the point where the cycle repeats. - * - * @param b The [[collection.mutable.StringBuilder]] factory to which we need - * to add the string elements. - * @param start The prefix of the resulting string (e.g. "Stream(") - * @param sep The separator between elements of the resulting string (e.g. ",") - * @param end The end of the resulting string (e.g. ")") - * @return The original [[collection.mutable.StringBuilder]] containing the - * resulting string. - */ - override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { - b append start - if (!isEmpty) { - b append head + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): b.type = { + b.append(start) + if (nonEmpty) { + b.append(head) var cursor = this - var n = 1 - if (cursor.tailDefined) { // If tailDefined, also !isEmpty + def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + if (tailDefined) { // If tailDefined, also !isEmpty var scout = tail - if (scout.isEmpty) { - // Single element. Bail out early. - b append end - return b - } if (cursor ne scout) { cursor = scout if (scout.tailDefined) { scout = scout.tail // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings while ((cursor ne scout) && scout.tailDefined) { - b append sep append cursor.head - n += 1 + appendCursorElement() cursor = cursor.tail scout = scout.tail if (scout.tailDefined) scout = scout.tail @@ -698,12 +265,11 @@ sealed abstract class Stream[+A] extends AbstractSeq[A] } if (!scout.tailDefined) { // Not a cycle, scout hit an end while (cursor ne scout) { - b append sep append cursor.head - n += 1 + appendCursorElement() cursor = cursor.tail } if (cursor.nonEmpty) { - b append sep append cursor.head + appendCursorElement() } } else { @@ -729,443 +295,108 @@ sealed abstract class Stream[+A] extends AbstractSeq[A] // advance one first unless runner didn't go anywhere (in which case // we've already looped once). if ((cursor eq scout) && (k > 0)) { - b append sep append cursor.head - n += 1 + appendCursorElement() cursor = cursor.tail } while (cursor ne scout) { - b append sep append cursor.head - n += 1 + appendCursorElement() cursor = cursor.tail } - // Subtract prefix length from total length for cycle reporting. - // (Not currently used, but probably a good idea for the future.) - n -= k } } - if (!cursor.isEmpty) { + if (cursor.nonEmpty) { // Either undefined or cyclic; we can check with tailDefined - if (!cursor.tailDefined) b append sep append "?" - else b append sep append "..." + if (!cursor.tailDefined) b.append(sep).append("") + else b.append(sep).append("") } } - b append end + b.append(end) b } - override def mkString(sep: String): String = mkString("", sep, "") - override def mkString: String = mkString("") - override def mkString(start: String, sep: String, end: String): String = { - this.force - super.mkString(start, sep, end) - } - override def toString = super.mkString(stringPrefix + "(", ", ", ")") - - override def splitAt(n: Int): (Stream[A], Stream[A]) = (take(n), drop(n)) - - /** Returns the `n` first elements of this `Stream` as another `Stream`, or - * else the whole `Stream`, if it has less than `n` elements. - * - * The result of `take` is, again, a `Stream` meaning that it also does not - * make any needless evaluations of the `Stream` itself, delaying that until - * the usage of the resulting `Stream`. - * - * @param n the number of elements to take. - * @return the `n` first elements of this stream. - * - * @example {{{ - * $naturalsEx - * scala> naturalsFrom(5) take 5 - * res1: scala.collection.immutable.Stream[Int] = Stream(5, ?) - * - * scala> naturalsFrom(5) take 5 mkString ", " - * // produces: "5, 6, 7, 8, 9" - * }}} - */ - override def take(n: Int): Stream[A] = ( - // Note that the n == 1 condition appears redundant but is not. - // It prevents "tail" from being referenced (and its head being evaluated) - // when obtaining the last element of the result. Such are the challenges - // of working with a lazy-but-not-really sequence. - if (n <= 0 || isEmpty) Stream.empty - else if (n == 1) cons(head, Stream.empty) - else cons(head, tail take n-1) - ) - - @tailrec final override def drop(n: Int): Stream[A] = - if (n <= 0 || isEmpty) this - else tail drop n-1 - - /** A substream starting at index `from` and extending up to (but not including) - * index `until`. This returns a `Stream` that is lazily evaluated. - * - * @param from The index of the first element of the returned subsequence - * @param until The index of the element following the returned subsequence - * @return A new string containing the elements requested from `start` until - * `end`. - * - * @example {{{ - * naturalsFrom(0) slice(50, 60) mkString ", " - * // produces: "50, 51, 52, 53, 54, 55, 56, 57, 58, 59" - * }}} - */ - override def slice(from: Int, until: Int): Stream[A] = { - val lo = from max 0 - if (until <= lo || isEmpty) Stream.empty - else this drop lo take (until - lo) - } - - /** The stream without its last element. - * - * @return A new `Stream` containing everything but the last element. If your - * `Stream` represents an infinite series, this method will not return. - * - * @throws UnsupportedOperationException if the stream is empty. - */ - override def init: Stream[A] = - if (isEmpty) super.init - else if (tail.isEmpty) Stream.Empty - else cons(head, tail.init) - - /** Returns the rightmost `n` elements from this iterable. - * - * @note Take serious caution here. If the `Stream` represents an infinite - * series then this function ''will not return''. The right most elements of - * an infinite series takes an infinite amount of time to produce. - * - * @param n the number of elements to take - * @return The last `n` elements from this `Stream`. - */ - override def takeRight(n: Int): Stream[A] = { - var these: Stream[A] = this - var lead = this drop n - while (!lead.isEmpty) { - these = these.tail - lead = lead.tail - } - these - } - /** - * @inheritdoc - * $willTerminateInf - */ - override def dropRight(n: Int): Stream[A] = { - // We make dropRight work for possibly infinite streams by carrying - // a buffer of the dropped size. As long as the buffer is full and the - // rest is non-empty, we can feed elements off the buffer head. When - // the rest becomes empty, the full buffer is the dropped elements. - def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = { - if (rest.isEmpty) Stream.empty - else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest) - else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail)) - } - if (n <= 0) this - else advance((this take n).toList, Nil, this drop n) - } - - /** Returns the longest prefix of this `Stream` whose elements satisfy the - * predicate `p`. - * - * @param p the test predicate. - * @return A new `Stream` representing the values that satisfy the predicate - * `p`. - * - * @example {{{ - + naturalsFrom(0) takeWhile { _ < 5 } mkString ", " - * produces: "0, 1, 2, 3, 4" - * }}} - */ - override def takeWhile(p: A => Boolean): Stream[A] = - if (!isEmpty && p(head)) cons(head, tail takeWhile p) - else Stream.Empty - - /** Returns the a `Stream` representing the longest suffix of this iterable - * whose first element does not satisfy the predicate `p`. - * - * @note This method realizes the entire `Stream` beyond the truth value of - * the predicate `p`. - * - * @param p the test predicate. - * @return A new `Stream` representing the results of applying `p` to the - * original `Stream`. - * - * @example {{{ - * // Assume we have a Stream that takes the first 20 natural numbers - * def naturalsLt50(i: Int): Stream[Int] = i #:: { if (i < 20) naturalsLt50(i * + 1) else Stream.Empty } - * naturalsLt50(0) dropWhile { _ < 10 } - * // produces: "10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20" - * }}} - */ - override def dropWhile(p: A => Boolean): Stream[A] = { - var these: Stream[A] = this - while (!these.isEmpty && p(these.head)) these = these.tail - these - } - - /** Builds a new stream from this stream in which any duplicates (as - * determined by `==`) have been removed. Among duplicate elements, only the - * first one is retained in the resulting `Stream`. - * - * @return A new `Stream` representing the result of applying distinctness to - * the original `Stream`. - * @example {{{ - * // Creates a Stream where every element is duplicated - * def naturalsFrom(i: Int): Stream[Int] = i #:: { i #:: naturalsFrom(i + 1) } - * naturalsFrom(1) take 6 mkString ", " - * // produces: "1, 1, 2, 2, 3, 3" - * (naturalsFrom(1) distinct) take 6 mkString ", " - * // produces: "1, 2, 3, 4, 5, 6" - * }}} - */ - override def distinct: Stream[A] = { - // This should use max memory proportional to N, whereas - // recursively calling distinct on the tail is N^2. - def loop(seen: Set[A], rest: Stream[A]): Stream[A] = { - if (rest.isEmpty) rest - else if (seen(rest.head)) loop(seen, rest.tail) - else cons(rest.head, loop(seen + rest.head, rest.tail)) - } - loop(Set(), this) - } - - /** Returns a new sequence of given length containing the elements of this - * sequence followed by zero or more occurrences of given elements. - * - * @tparam B The type of the value to pad with. - * @tparam That The type contained within the resulting `Stream`. - * @param len The number of elements to pad into the `Stream`. - * @param elem The value of the type `B` to use for padding. - * @return A new `Stream` representing the collection with values padding off - * to the end. If your `Stream` represents an infinite series, this method will - * not return. - * @example {{{ - * def naturalsFrom(i: Int): Stream[Int] = i #:: { if (i < 5) naturalsFrom(i + 1) else Stream.Empty } - * naturalsFrom(1) padTo(10, 0) foreach println - * // prints - * // 1 - * // 2 - * // 3 - * // 4 - * // 5 - * // 0 - * // 0 - * // 0 - * // 0 - * // 0 - * }}} - */ - override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { - def loop(len: Int, these: Stream[A]): Stream[B] = - if (these.isEmpty) Stream.fill(len)(elem) - else cons(these.head, loop(len - 1, these.tail)) - - if (isStreamBuilder(bf)) asThat(loop(len, this)) - else super.padTo(len, elem)(bf) - } - - /** A list consisting of all elements of this list in reverse order. - * - * @note This function must realize the entire `Stream` in order to perform - * this operation so if your `Stream` represents an infinite sequence then - * this function will never return. - * - * @return A new `Stream` containing the representing of the original `Stream` - * in reverse order. - * - * @example {{{ - * def naturalsFrom(i: Int): Stream[Int] = i #:: { if (i < 5) naturalsFrom(i + 1) else Stream.Empty } - * (naturalsFrom(1) reverse) foreach println - * // prints - * // 5 - * // 4 - * // 3 - * // 2 - * // 1 - * }}} - */ - override def reverse: Stream[A] = { - var result: Stream[A] = Stream.Empty - var these = this - while (!these.isEmpty) { - val r = Stream.consWrapper(result).#::(these.head) - r.tail // force it! - result = r - these = these.tail - } - result - } - - /** Evaluates and concatenates all elements within the `Stream` into a new - * flattened `Stream`. - * - * @tparam B The type of the elements of the resulting `Stream`. - * @return A new `Stream` of type `B` of the flattened elements of `this` - * `Stream`. - * @example {{{ - * val sov: Stream[Vector[Int]] = Vector(0) #:: Vector(0, 0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * sov.flatten take 10 mkString ", " - * // produces: "0, 0, 0, 0, 0, 0, 0, 0, 0, 0" - * }}} - */ - override def flatten[B](implicit asTraversable: A => /*<: Stream[A]) { - lazy val v = st - } - - private var these: LazyCell = _ + * @return a string representation of this collection. Undefined elements are + * represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been + * evaluated ; + * - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements, + * the second one has been evaluated ; + * - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains + * a cycle at the fourth element. + */ + override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString - def hasNext: Boolean = these.v.nonEmpty - def next(): A = - if (isEmpty) Iterator.empty.next() + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = isEmpty || { + if (!tailDefined) false else { - val cur = these.v - val result = cur.head - these = new LazyCell(cur.tail) - result + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected } - override def toStream = { - val result = these.v - these = new LazyCell(Stream.empty) - result } - override def toList = toStream.toList } -/** - * The object `Stream` provides helper functions to manipulate streams. - * - * @author Martin Odersky, Matthias Zenger - * @since 2.8 - */ +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) object Stream extends SeqFactory[Stream] { - /** The factory for streams. - * @note Methods such as map/flatMap will not invoke the `Builder` factory, - * but will return a new stream directly, to preserve laziness. - * The new stream is then cast to the factory's result type. - * This means that every CanBuildFrom that takes a - * Stream as its From type parameter must yield a stream as its result parameter. - * If that assumption is broken, cast errors might result. - */ - class StreamCanBuildFrom[A] extends GenericCanBuildFrom[A] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stream[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Stream[A]]] - private[this] val ReusableCBF = new StreamCanBuildFrom[Any] - - /** Creates a new builder for a stream */ - def newBuilder[A]: Builder[A, Stream[A]] = new StreamBuilder[A] - - /** A builder for streams - * @note This builder is lazy only in the sense that it does not go downs the spine - * of traversables that are added as a whole. If more laziness can be achieved, - * this builder should be bypassed. - */ - class StreamBuilder[A] extends LazyBuilder[A, Stream[A]] { - def result: Stream[A] = parts.toStream flatMap (_.toStream) - } - - object Empty extends Stream[Nothing] { - override def isEmpty = true - override def head = throw new NoSuchElementException("head of empty stream") - override def tail = throw new UnsupportedOperationException("tail of empty stream") - def tailDefined = false - } - - /** The empty stream */ - override def empty[A]: Stream[A] = Empty - - /** A stream consisting of given elements */ - override def apply[A](xs: A*): Stream[A] = xs.toStream - - /** A wrapper class that adds `#::` for cons and `#:::` for concat as operations - * to streams. - */ - class ConsWrapper[A](tl: => Stream[A]) { - /** Construct a stream consisting of a given first element followed by elements - * from a lazily evaluated Stream. - */ - def #::[B >: A](hd: B): Stream[B] = cons(hd, tl) - /** Construct a stream consisting of the concatenation of the given stream and - * a lazily evaluated Stream. - */ - def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix append tl - } - - /** A wrapper method that adds `#::` for cons and `#:::` for concat as operations - * to streams. + /* !!! #11997 This `object cons` must be defined lexically *before* `class Cons` below. + * Otherwise it prevents Scala.js from building on Windows. */ - implicit def consWrapper[A](stream: => Stream[A]): ConsWrapper[A] = - new ConsWrapper[A](stream) - - /** An extractor that allows to pattern match streams with `#::`. - */ - object #:: { - def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = - if (xs.isEmpty) None - else Some((xs.head, xs.tail)) - } - /** An alternative way of building and matching Streams using Stream.cons(hd, tl). - */ + */ object cons { - /** A stream consisting of a given first element and remaining elements - * @param hd The first element of the result stream - * @param tl The remaining elements of the result stream - */ - def apply[A](hd: A, tl: => Stream[A]) = new Cons(hd, tl) + * @param hd The first element of the result stream + * @param tl The remaining elements of the result stream + */ + def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl) /** Maps a stream to its head and tail */ def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) } - /** A lazy cons cell, from which streams are built. */ - @SerialVersionUID(-602202424901551803L) - final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] { - override def isEmpty = false - override def head = hd + //@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling + object Empty extends Stream[Nothing] { + override def isEmpty: Boolean = true + override def head: Nothing = throw new NoSuchElementException("head of empty stream") + override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream") + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = this + override def knownSize: Int = 0 + protected def tailDefined: Boolean = false + } + + @SerialVersionUID(3L) + final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { + override def isEmpty: Boolean = false @volatile private[this] var tlVal: Stream[A] = _ - @volatile private[this] var tlGen = tl _ - def tailDefined: Boolean = tlGen eq null + @volatile private[this] var tlGen = () => tl + protected def tailDefined: Boolean = tlGen eq null override def tail: Stream[A] = { if (!tailDefined) synchronized { @@ -1174,118 +405,165 @@ object Stream extends SeqFactory[Stream] { tlGen = null } } - tlVal } - override /*LinearSeqOptimized*/ - def sameElements[B >: A](that: GenIterable[B]): Boolean = { - @tailrec def consEq(a: Cons[_], b: Cons[_]): Boolean = { - if (a.head != b.head) false - else { - a.tail match { - case at: Cons[_] => - b.tail match { - case bt: Cons[_] => (at eq bt) || consEq(at, bt) - case _ => false - } - case _ => b.tail.isEmpty - } - } - } - that match { - case that: Cons[_] => consEq(this, that) - case _ => super.sameElements(that) + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: Stream[A] = this + if (!these.isEmpty) these = these.tail + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail } + this } + } - /** An infinite stream that repeatedly applies a given function to a start value. - * - * @param start the start value of the stream - * @param f the function that's repeatedly applied - * @return the stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A)(f: A => A): Stream[A] = cons(start, iterate(f(start))(f)) + implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l) - override def iterate[A](start: A, len: Int)(f: A => A): Stream[A] = - iterate(start)(f) take len + final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal { + /** Construct a Stream consisting of a given first element followed by elements + * from another Stream. + */ + def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l()) + /** Construct a Stream consisting of the concatenation of the given Stream and + * another Stream. + */ + def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l() + } - /** - * Create an infinite stream starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the stream - * @param step the increment value of the stream - * @return the stream starting at value `start`. - */ - def from(start: Int, step: Int): Stream[Int] = - cons(start, from(start+step, step)) + object #:: { + def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + } - /** - * Create an infinite stream starting at `start` and incrementing by `1`. - * - * @param start the start value of the stream - * @return the stream starting at value `start`. - */ - def from(start: Int): Stream[Int] = from(start, 1) + def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match { + case coll: Stream[A] => coll + case _ => fromIterator(coll.iterator) + } /** - * Create an infinite stream containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting stream - * @return the stream containing an infinite number of elem - */ - def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) + * @return A `Stream[A]` that gets its elements from the given `Iterator`. + * + * @param it Source iterator + * @tparam A type of elements + */ + // Note that the resulting `Stream` will be effectively iterable more than once because + // `Stream` memoizes its elements + def fromIterator[A](it: Iterator[A]): Stream[A] = + if (it.hasNext) { + new Stream.Cons(it.next(), fromIterator(it)) + } else Stream.Empty - override def fill[A](n: Int)(elem: => A): Stream[A] = - if (n <= 0) Empty else cons(elem, fill(n-1)(elem)) + def empty[A]: Stream[A] = Empty - override def tabulate[A](n: Int)(f: Int => A): Stream[A] = { - def loop(i: Int): Stream[A] = - if (i >= n) Empty else cons(f(i), loop(i+1)) - loop(0) - } + override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array)) - override def range[T: Integral](start: T, end: T, step: T): Stream[T] = { - val num = implicitly[Integral[T]] - import num._ + private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] = + new WithFilter[A](l, p) - if (if (step < zero) start <= end else end <= start) Empty - else cons(start, range(start + step, end, step)) + private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { + private[this] var s = l // set to null to allow GC after filtered + private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter + def map[B](f: A => B): Stream[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q) } - private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = { - cons(stream.head, stream.tail.filterImpl(p, isFlipped)) + /** An infinite Stream that repeatedly applies a given function to a start value. + * + * @param start the start value of the Stream + * @param f the function that's repeatedly applied + * @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A)(f: A => A): Stream[A] = { + cons(start, iterate(f(start))(f)) } - private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = { - cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]]) - } + /** + * Create an infinite Stream starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the Stream + * @param step the increment value of the Stream + * @return the Stream starting at value `start`. + */ + def from(start: Int, step: Int): Stream[Int] = + cons(start, from(start + step, step)) - /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of - * the `Stream` as it is processed. + /** + * Create an infinite Stream starting at `start` and incrementing by `1`. * - * Because this is not an inner class of `Stream` with a reference to the original - * head, it is now possible for GC to collect any leading and filtered-out elements - * which do not satisfy the filter, while the tail is still processing (see scala/bug#8990). + * @param start the start value of the Stream + * @return the Stream starting at value `start`. */ - private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] { - private var s = sl // set to null to allow GC after filtered - private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter + def from(start: Int): Stream[Int] = from(start, 1) - def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - filtered map f + /** + * Create an infinite Stream containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting Stream + * @return the Stream containing an infinite number of elem + */ + def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) - def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - filtered flatMap f - def foreach[U](f: A => U): Unit = - filtered foreach f + private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = { + cons(stream.head, stream.tail.filterImpl(p, isFlipped)) + } - def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] = - new StreamWithFilter[A](filtered, q) + private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = { + cons(head, stream.tail.collect(pf)) } + /** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated streams without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while(these.nonEmpty && these.tailDefined) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new ArrayBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[Stream[A]] + coll = (init ++: tail) + } + + protected[this] def readResolve(): Any = coll + } } diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala deleted file mode 100644 index 843d7084f798..000000000000 --- a/src/library/scala/collection/immutable/StreamView.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]] { } diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala deleted file mode 100644 index c36035934e15..000000000000 --- a/src/library/scala/collection/immutable/StreamViewLike.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ - -trait StreamViewLike[+A, - +Coll, - +This <: StreamView[A, Coll] with StreamViewLike[A, Coll, This]] -extends SeqView[A, Coll] - with SeqViewLike[A, Coll, This] -{ self => - - override def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = { - self.iterator.toStream.asInstanceOf[That] - } - - trait Transformed[+B] extends StreamView[B, Coll] with super.Transformed[B] { - override def toString = viewToString - } - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[+B] extends super.AbstractTransformed[B] with Transformed[B] - - trait EmptyView extends Transformed[Nothing] with super.EmptyView - - trait Forced[B] extends super.Forced[B] with Transformed[B] - - trait Sliced extends super.Sliced with Transformed[A] - - trait Mapped[B] extends super.Mapped[B] with Transformed[B] - - trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] - - trait Appended[B >: A] extends super.Appended[B] with Transformed[B] - - trait Filtered extends super.Filtered with Transformed[A] - - trait TakenWhile extends super.TakenWhile with Transformed[A] - - trait DroppedWhile extends super.DroppedWhile with Transformed[A] - - trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] - - trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] - - trait Reversed extends super.Reversed with Transformed[A] - - trait Patched[B >: A] extends super.Patched[B] with Transformed[B] - - trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] - - /** boilerplate */ - protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] - protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] - protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B] - protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] - protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] - protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered - protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced - protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile - protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile - protected override def newZipped[B](that: scala.collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B] - protected override def newZippedAll[A1 >: A, B](that: scala.collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = { - new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B] - } - protected override def newReversed: Transformed[A] = new Reversed { } - protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = { - new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B] - } - - override def stringPrefix = "StreamView" -} diff --git a/src/library/scala/collection/immutable/StrictOptimizedSeqOps.scala b/src/library/scala/collection/immutable/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..90b803d54e70 --- /dev/null +++ b/src/library/scala/collection/immutable/StrictOptimizedSeqOps.scala @@ -0,0 +1,84 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.generic.CommonErrors + +/** Trait that overrides operations to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps[+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with collection.StrictOptimizedSeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A => B): C = { + if (lengthCompare(1) <= 0) coll + else { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next else different = true + } + if (different) builder.result() else coll + } + } + + override def updated[B >: A](index: Int, elem: B): CC[B] = { + if (index < 0) + throw ( + if (knownSize >= 0) CommonErrors.indexOutOfBounds(index = index, max = knownSize) + else CommonErrors.indexOutOfBounds(index = index) + ) + val b = iterableFactory.newBuilder[B] + b.sizeHint(this) + var i = 0 + val it = iterator + while (i < index && it.hasNext) { + b += it.next() + i += 1 + } + if (!it.hasNext) + throw CommonErrors.indexOutOfBounds(index = index, max = i - 1) + b += elem + it.next() + while (it.hasNext) b += it.next() + b.result() + } + + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = { + val b = iterableFactory.newBuilder[B] + var i = 0 + val it = iterator + while (i < from && it.hasNext) { + b += it.next() + i += 1 + } + b ++= other + i = replaced + while (i > 0 && it.hasNext) { + it.next() + i -= 1 + } + while (it.hasNext) b += it.next() + b.result() + } + + override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) + +} diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala deleted file mode 100644 index cd5772284a27..000000000000 --- a/src/library/scala/collection/immutable/StringLike.scala +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import mutable.Builder -import scala.util.matching.Regex -import scala.math.ScalaNumber -import scala.reflect.ClassTag -import java.lang.{StringBuilder => JStringBuilder} - -/** A companion object for the `StringLike` containing some constants. - * @since 2.8 - */ -object StringLike { - // just statics for companion class. - private final val LF = 0x0A - private final val FF = 0x0C - private final val CR = 0x0D - private final val SU = 0x1A -} - -import StringLike._ - -/** A trait describing stringlike collections. - * - * @tparam Repr The type of the actual collection inheriting `StringLike`. - * - * @since 2.8 - * @define Coll `String` - * @define coll string - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -trait StringLike[+Repr] extends Any with scala.collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] { -self => - - /** Creates a string builder buffer as builder for this class */ - protected[this] def newBuilder: Builder[Char, Repr] - - /** Return element at index `n` - * @throws IndexOutOfBoundsException if the index is not valid - */ - def apply(n: Int): Char = toString charAt n - - def length: Int = toString.length - - override def mkString = toString - - override def slice(from: Int, until: Int): Repr = { - val start = from max 0 - val end = until min length - - if (start >= end) newBuilder.result() - else (newBuilder ++= toString.substring(start, end)).result() - } - - /** Return the current string concatenated `n` times. - */ - def *(n: Int): String = { - val s0 = toString - var ci = 0 max n - val sb = new JStringBuilder(s0.length * ci) - while (ci > 0) { - sb.append(s0) - ci -= 1 - } - sb.toString - } - - override def compare(other: String) = toString compareTo other - - private def isLineBreak(c: Char) = c == LF || c == FF - - /** - * Strip trailing line end character from this string if it has one. - * - * A line end character is one of - * - `LF` - line feed (`0x0A` hex) - * - `FF` - form feed (`0x0C` hex) - * - * If a line feed character `LF` is preceded by a carriage return `CR` - * (`0x0D` hex), the `CR` character is also stripped (Windows convention). - */ - def stripLineEnd: String = { - val len = toString.length - if (len == 0) toString - else { - val last = apply(len - 1) - if (isLineBreak(last)) - toString.substring(0, if (last == LF && len >= 2 && apply(len - 2) == CR) len - 2 else len - 1) - else - toString - } - } - - /** Return all lines in this string in an iterator, including trailing - * line end characters. - * - * This method is analogous to `s.split(EOL).toIterator`, - * except that any existing line endings are preserved in the result strings, - * and the empty string yields an empty iterator. - * - * A line end character is one of - * - `LF` - line feed (`0x0A`) - * - `FF` - form feed (`0x0C`) - */ - def linesWithSeparators: Iterator[String] = new AbstractIterator[String] { - val str = self.toString - private val len = str.length - private var index = 0 - def hasNext: Boolean = index < len - def next(): String = { - if (index >= len) throw new NoSuchElementException("next on empty iterator") - val start = index - while (index < len && !isLineBreak(apply(index))) index += 1 - index += 1 - str.substring(start, index min len) - } - } - - /** Return all lines in this string in an iterator, excluding trailing line - * end characters; i.e., apply `.stripLineEnd` to all lines - * returned by `linesWithSeparators`. - */ // TODO: deprecate on 2.13 to avoid conflict on Java 11, which introduces `String::lines` (this is why `linesIterator` has been un-deprecated) - def lines: Iterator[String] = - linesWithSeparators map (line => new WrappedString(line).stripLineEnd) - - /** Return all lines in this string in an iterator, excluding trailing line - * end characters; i.e., apply `.stripLineEnd` to all lines - * returned by `linesWithSeparators`. - */ - def linesIterator: Iterator[String] = - linesWithSeparators map (line => new WrappedString(line).stripLineEnd) - - /** Returns this string with first character converted to upper case. - * If the first character of the string is capitalized, it is returned unchanged. - * This method does not convert characters outside the Basic Multilingual Plane (BMP). - */ - def capitalize: String = - if (toString == null) null - else if (toString.length == 0) "" - else if (toString.charAt(0).isUpper) toString - else { - val chars = toString.toCharArray - chars(0) = chars(0).toUpper - new String(chars) - } - - /** Returns this string with the given `prefix` stripped. If this string does not - * start with `prefix`, it is returned unchanged. - */ - def stripPrefix(prefix: String) = - if (toString.startsWith(prefix)) toString.substring(prefix.length) - else toString - - /** Returns this string with the given `suffix` stripped. If this string does not - * end with `suffix`, it is returned unchanged. - */ - def stripSuffix(suffix: String) = - if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length) - else toString - - /** Replace all literal occurrences of `literal` with the literal string `replacement`. - * This method is equivalent to [[java.lang.String#replace]]. - * - * @param literal the string which should be replaced everywhere it occurs - * @param replacement the replacement string - * @return the resulting string - */ - def replaceAllLiterally(literal: String, replacement: String): String = toString.replace(literal, replacement) - - /** For every line in this string: - * - * Strip a leading prefix consisting of blanks or control characters - * followed by `marginChar` from the line. - */ - def stripMargin(marginChar: Char): String = { - val buf = new StringBuilder - for (line <- linesWithSeparators) { - val len = line.length - var index = 0 - while (index < len && line.charAt(index) <= ' ') index += 1 - buf append - (if (index < len && line.charAt(index) == marginChar) line.substring(index + 1) else line) - } - buf.toString - } - - /** For every line in this string: - * - * Strip a leading prefix consisting of blanks or control characters - * followed by `|` from the line. - */ - def stripMargin: String = stripMargin('|') - - private def escape(ch: Char): String = if ( - (ch >= 'a') && (ch <= 'z') || - (ch >= 'A') && (ch <= 'Z') || - (ch >= '0' && ch <= '9')) ch.toString - else "\\" + ch - - /** Split this string around the separator character - * - * If this string is the empty string, returns an array of strings - * that contains a single empty string. - * - * If this string is not the empty string, returns an array containing - * the substrings terminated by the start of the string, the end of the - * string or the separator character, excluding empty trailing substrings - * - * If the separator character is a surrogate character, only split on - * matching surrogate characters if they are not part of a surrogate pair - * - * The behaviour follows, and is implemented in terms of String.split(re: String) - * - * - * @example {{{ - * "a.b".split('.') //returns Array("a", "b") - * - * //splitting the empty string always returns the array with a single - * //empty string - * "".split('.') //returns Array("") - * - * //only trailing empty substrings are removed - * "a.".split('.') //returns Array("a") - * ".a.".split('.') //returns Array("", "a") - * "..a..".split('.') //returns Array("", "", "a") - * - * //all parts are empty and trailing - * ".".split('.') //returns Array() - * "..".split('.') //returns Array() - * - * //surrogate pairs - * val high = 0xD852.toChar - * val low = 0xDF62.toChar - * val highstring = high.toString - * val lowstring = low.toString - * - * //well-formed surrogate pairs are not split - * val highlow = highstring + lowstring - * highlow.split(high) //returns Array(highlow) - * - * //bare surrogate characters are split - * val bare = "_" + highstring + "_" - * bare.split(high) //returns Array("_", "_") - * - * }}} - * - * @param separator the character used as a delimiter - */ - def split(separator: Char): Array[String] = - toString.split(escape(separator)) - - - @throws(classOf[java.util.regex.PatternSyntaxException]) - def split(separators: Array[Char]): Array[String] = { - val re = separators.foldLeft("[")(_+escape(_)) + "]" - toString.split(re) - } - - /** You can follow a string with `.r`, turning it into a `Regex`. E.g. - * - * `"""A\w*""".r` is the regular expression for identifiers starting with `A`. - */ - def r: Regex = r() - - /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, - * with group names g1 through gn. - * - * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates - * and provides its subcomponents through groups named "month", "day" and - * "year". - * - * @param groupNames The names of the groups in the pattern, in the order they appear. - */ - def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*) - - /** - * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`. - */ - def toBoolean: Boolean = parseBoolean(toString) - /** - * Parse as a `Byte` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`. - */ - def toByte: Byte = java.lang.Byte.parseByte(toString) - /** - * Parse as a `Short` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`. - */ - def toShort: Short = java.lang.Short.parseShort(toString) - /** - * Parse as an `Int` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`. - */ - def toInt: Int = java.lang.Integer.parseInt(toString) - /** - * Parse as a `Long` (string must contain only decimal digits and optional leading `-`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`. - */ - def toLong: Long = java.lang.Long.parseLong(toString) - /** - * Parse as a `Float` (surrounding whitespace is removed with a `trim`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`. - * @throws java.lang.NullPointerException If the string is null. - */ - def toFloat: Float = java.lang.Float.parseFloat(toString) - /** - * Parse as a `Double` (surrounding whitespace is removed with a `trim`). - * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`. - * @throws java.lang.NullPointerException If the string is null. - */ - def toDouble: Double = java.lang.Double.parseDouble(toString) - - private def parseBoolean(s: String): Boolean = - if (s != null) s.toLowerCase match { - case "true" => true - case "false" => false - case _ => throw new IllegalArgumentException("For input string: \""+s+"\"") - } - else - throw new IllegalArgumentException("For input string: \"null\"") - - override def toArray[B >: Char : ClassTag]: Array[B] = - toString.toCharArray.asInstanceOf[Array[B]] - - private def unwrapArg(arg: Any): AnyRef = arg match { - case x: ScalaNumber => x.underlying - case x => x.asInstanceOf[AnyRef] - } - - /** Uses the underlying string as a pattern (in a fashion similar to - * printf in C), and uses the supplied arguments to fill in the - * holes. - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]], with the addition that - * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and - * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter` - * understands. - * - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException - */ - def format(args : Any*): String = - java.lang.String.format(toString, args map unwrapArg: _*) - - /** Like `format(args*)` but takes an initial `Locale` parameter - * which influences formatting as in `java.lang.String`'s format. - * - * The interpretation of the formatting patterns is described in - * [[java.util.Formatter]], with the addition that - * classes deriving from `ScalaNumber` (such as `scala.BigInt` and - * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter` - * understands. - * - * @param l an instance of `java.util.Locale` - * @param args the arguments used to instantiating the pattern. - * @throws java.lang.IllegalArgumentException - */ - def formatLocal(l: java.util.Locale, args: Any*): String = - java.lang.String.format(l, toString, args map unwrapArg: _*) -} diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala deleted file mode 100644 index 0a7feff309ee..000000000000 --- a/src/library/scala/collection/immutable/StringOps.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import mutable.StringBuilder - -/** This class serves as a wrapper providing [[scala.Predef.String]]s with all - * the operations found in indexed sequences. Where needed, `String`s are - * implicitly converted into instances of this class. - * - * The difference between this class and `WrappedString` is that calling transformer - * methods such as `filter` and `map` will yield a `String` object, whereas a - * `WrappedString` will remain a `WrappedString`. - * - * @param repr the actual representation of this string operations object. - * - * @since 2.8 - * @define Coll `String` - * @define coll string - */ -final class StringOps(override val repr: String) extends AnyVal with StringLike[String] { - - override protected[this] def thisCollection: WrappedString = new WrappedString(repr) - override protected[this] def toCollection(repr: String): WrappedString = new WrappedString(repr) - - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = StringBuilder.newBuilder - - override def apply(index: Int): Char = repr charAt index - override def slice(from: Int, until: Int): String = { - val start = if (from < 0) 0 else from - if (until <= start || start >= repr.length) - return "" - - val end = if (until > length) length else until - repr.substring(start, end) - } - override def toString = repr - override def length = repr.length - - def seq = new WrappedString(repr) -} diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala deleted file mode 100644 index 56a54a41e375..000000000000 --- a/src/library/scala/collection/immutable/Traversable.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import generic._ -import mutable.Builder - -/** A trait for traversable collections that are guaranteed immutable. - * $traversableInfo - * @define mutability immutable - * - * @define usesMutableState - * - * Note: Despite being an immutable collection, the implementation uses mutable state internally during - * construction. These state changes are invisible in single-threaded code but can lead to race conditions - * in some multi-threaded scenarios. The state of a new collection instance may not have been "published" - * (in the sense of the Java Memory Model specification), so that an unsynchronized non-volatile read from - * another thread may observe the object in an invalid state (see - * [[https://github.com/scala/bug/issues/7838 scala/bug#7838]] for details). Note that such a read is not - * guaranteed to ''ever'' see the written object at all, and should therefore not be used, regardless - * of this issue. The easiest workaround is to exchange values between threads through a volatile var. - */ -trait Traversable[+A] extends scala.collection.Traversable[A] -// with GenTraversable[A] - with GenericTraversableTemplate[A, Traversable] - with TraversableLike[A, Traversable[A]] - with Immutable { - override def companion: GenericCompanion[Traversable] = Traversable - override def seq: Traversable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `List`. - * @define coll immutable traversable collection - * @define Coll `immutable.Traversable` - */ -object Traversable extends TraversableFactory[Traversable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Traversable[A]] = new mutable.ListBuffer -} diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 9a83a0f9936e..970e9a174440 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,291 +14,277 @@ package scala package collection package immutable -import java.io.IOException - -import generic._ -import immutable.{NewRedBlackTree => RB} -import mutable.Builder import scala.annotation.tailrec -import scala.runtime.{AbstractFunction1, AbstractFunction2} -import scala.util.hashing.MurmurHash3 +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{RedBlackTree => RB} +import scala.collection.mutable.ReusableBuilder +import scala.runtime.AbstractFunction2 + +/** An immutable SortedMap whose values are stored in a red-black tree. + * + * This class is optimal when range queries will be performed, + * or when traversal in order of an ordering is desired. + * If you only need key lookups, and don't care in which order key-values + * are traversed in, consider using * [[scala.collection.immutable.HashMap]], + * which will generally have better performance. If you need insertion order, + * consider a * [[scala.collection.immutable.SeqMap]], which does not need to + * have an ordering supplied. + * + * @example {{{ + * import scala.collection.immutable.TreeMap + * + * // Make a TreeMap via the companion object factory + * val weekdays = TreeMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * @param ordering the implicit ordering used to compare objects of type `A`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll immutable.TreeMap + * @define coll immutable tree map + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + def this()(implicit ordering: Ordering[K]) = this(null)(ordering) + private[immutable] def tree0: RB.Tree[K, V] = tree + + private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) + + override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap + + def iterator: Iterator[(K, V)] = RB.iterator(tree) + + def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) + + override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) + + def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) + + override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( + size, tree, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } -/** $factoryInfo - * @define Coll immutable.TreeMap - * @define coll immutable tree map - */ -object TreeMap extends ImmutableSortedMapFactory[TreeMap] { - def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B] + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) + } + s.asInstanceOf[S with EfficientSplit] + } - override def newBuilder[A, B](implicit ord: Ordering[A]): mutable.Builder[(A, B), TreeMap[A, B]] = new TreeMapBuilder + def get(key: K): Option[V] = RB.get(tree, key) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default + else resultOrNull.value + } - private class TreeMapBuilder[A, B](implicit ordering: Ordering[A]) - extends RB.MapHelper[A, B] - with Builder[(A, B), TreeMap[A, B]] { - type Tree = RB.Tree[A, B] - private var tree:Tree = null + // override for performance -- no Some allocation + override def apply(key: K): V = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default(key) + else resultOrNull.value + } - def +=(elem: (A, B)): this.type = { - tree = mutableUpd(tree, elem._1, elem._2) - this - } - private object adder extends AbstractFunction2[A, B, Unit] { - // we cache tree to avoid the outer access to tree - // in the hot path (apply) - private[this] var accumulator :Tree = null - def addForEach(hasForEach: HasForeachEntry[A, B]): Unit = { - accumulator = tree - hasForEach.foreachEntry(this) - tree = accumulator - // be friendly to GC - accumulator = null - } + // override for performance -- no Some allocation + override def contains(key: K): Boolean = RB.contains(tree, key) - override def apply(key: A, value: B): Unit = { - accumulator = mutableUpd(accumulator, key, value) - } - } + def removed(key: K): TreeMap[K,V] = + newMapOrSelf(RB.delete(tree, key)) - override def ++=(xs: TraversableOnce[(A, B)]): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeMap[A, B] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0) - case that: HasForeachEntry[A, B] => - //add avoiding creation of tuples - adder.addForEach(that) - case _ => - super.++=(xs) - } - this - } + def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = + newMapOrSelf(RB.update(tree, key, value, overwrite = true)) - override def clear(): Unit = { - tree = null - } + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = + newMapOrSelf(that match { + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => + RB.union(tree, tm.tree) + case ls: LinearSeq[(K,V1)] => + if (ls.isEmpty) tree //to avoid the creation of the adder + else { + val adder = new Adder[V1] + adder.addAll(ls) + adder.finalTree + } + case _ => + val adder = new Adder[V1] + val it = that.iterator + while (it.hasNext) { + adder.apply(it.next()) + } + adder.finalTree + }) - override def result(): TreeMap[A, B] = new TreeMap[A, B](beforePublish(tree)) + override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match { + case ts: TreeSet[K] if ordering == ts.ordering => + newMapOrSelf(RB.difference(tree, ts.tree)) + case _ => super.removedAll(keys) } - private val legacySerialisation = System.getProperty("scala.collection.immutable.TreeMap.newSerialisation", "false") == "false" - - @SerialVersionUID(-5672253444750945796L) - private class TreeMapProxy[A, B]( - @transient private[this] var tree: RB.Tree[A, B], - @transient private[this] var ordering: Ordering[A]) extends Serializable { - - @throws[IOException] - private[this] def writeObject(out: java.io.ObjectOutputStream) = { - out.writeInt(RB.count(tree)) - out.writeObject(ordering) - RB.foreachEntry(tree, { - (k: A, v: B) => - out.writeObject(k) - out.writeObject(v) - }) - } - @throws[IOException] - private[this] def readObject(in: java.io.ObjectInputStream) = { - val size = in.readInt() - ordering = in.readObject().asInstanceOf[Ordering[A]] - size match { - case 0 => //tree is null already - case 1 => - val key = in.readObject().asInstanceOf[A] - val value = in.readObject().asInstanceOf[B] - tree = RB.update(null, key, value, true)(ordering) - case _ => - val keys = new Array[Any](size) - val values = new Array[Any](size) - var i = 0 - while (i < size) { - keys(i) = in.readObject() - values(i) = in.readObject() - i += 1 - } - tree = RB.fromOrderedEntries( - keys.iterator.asInstanceOf[Iterator[A]], - values.iterator.asInstanceOf[Iterator[B]], - size) - } - } - @throws[IOException] - private[this] def readResolve(): AnyRef = - new TreeMap(tree)(ordering) + + /** A new TreeMap with the entry added is returned, + * assuming that key is not in the TreeMap. + * + * @tparam V1 type of the values of the new bindings, a supertype of `V` + * @param key the key to be inserted + * @param value the value to be associated with `key` + * @return a new $coll with the inserted binding, if it wasn't present in the map + */ + @deprecated("Use `updated` instead", "2.13.0") + def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { + assert(!RB.contains(tree, key)) + updated(key, value) } -} + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) -/** This class implements immutable maps using a tree. - * - * @tparam A the type of the keys contained in this tree map. - * @tparam B the type of the values associated with the keys. - * @param ordering the implicit ordering used to compare objects of type `A`. - * - * @author Erik Stenman - * @author Matthias Zenger - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll immutable.TreeMap - * @define coll immutable tree map - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(4714724050750123970L) -final class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A]) - extends SortedMap[A, B] - with SortedMapLike[A, B, TreeMap[A, B]] - with MapLike[A, B, TreeMap[A, B]] - with Serializable - with HasForeachEntry[A, B] { - // Manually use this from inner classes to avoid having scalac rename `tree` to an expanded name which is not - // serialization compatible. - private[immutable] def tree0: RB.Tree[A, B] = tree - - override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = - TreeMap.newBuilder[A, B] - - override def size = RB.count(tree) - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = newMapOrSelf[B](RB.rangeImpl(tree, from, until)) - override def range(from: A, until: A): TreeMap[A, B] = newMapOrSelf[B](RB.range(tree, from, until)) - override def from(from: A): TreeMap[A, B] = newMapOrSelf[B](RB.from(tree, from)) - override def to(to: A): TreeMap[A, B] = newMapOrSelf[B](RB.to(tree, to)) - override def until(until: A): TreeMap[A, B] = newMapOrSelf[B](RB.until(tree, until)) - - override def firstKey = RB.smallest(tree).key - override def lastKey = RB.greatest(tree).key - override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) - - override def head = { + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + override def size: Int = RB.count(tree) + override def knownSize: Int = size + + override def isEmpty = size == 0 + + override def firstKey: K = RB.smallest(tree).key + + override def lastKey: K = RB.greatest(tree).key + + override def head: (K, V) = { val smallest = RB.smallest(tree) (smallest.key, smallest.value) } - override def headOption = if (RB.isEmpty(tree)) None else Some(head) - override def last = { + + override def last: (K, V) = { val greatest = RB.greatest(tree) (greatest.key, greatest.value) } - override def lastOption = if (RB.isEmpty(tree)) None else Some(last) - override def tail = newMapOrSelf(RB.delete(tree, firstKey)) - override def init = newMapOrSelf(RB.delete(tree, lastKey)) + override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) - override def drop(n: Int) = { + override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) + + override def drop(n: Int): TreeMap[K, V] = { if (n <= 0) this else if (n >= size) empty - else newMapOrSelf(RB.drop(tree, n)) + else new TreeMap(RB.drop(tree, n)) } - override def take(n: Int) = { + override def take(n: Int): TreeMap[K, V] = { if (n <= 0) empty else if (n >= size) this - else newMapOrSelf(RB.take(tree, n)) - } - - private def newMapOrSelf[B1 >: B](newTree: RB.Tree[A, B1]): TreeMap[A, B1] = { - if (newTree eq tree) this - else new TreeMap(newTree) + else new TreeMap(RB.take(tree, n)) } override def slice(from: Int, until: Int) = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else newMapOrSelf(RB.slice(tree, from, until)) + else new TreeMap(RB.slice(tree, from, until)) } - override def dropRight(n: Int) = take(size - math.max(n, 0)) - override def takeRight(n: Int) = drop(size - math.max(n, 0)) - override def splitAt(n: Int) = (take(n), drop(n)) + override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) - private[this] def countWhile(p: ((A, B)) => Boolean): Int = { + override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: ((K, V)) => Boolean): Int = { var result = 0 val it = iterator while (it.hasNext && p(it.next())) result += 1 result } - override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) - override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p)) - override def span(p: ((A, B)) => Boolean) = splitAt(countWhile(p)) - /** A factory to create empty maps of the same type of keys. - */ - override def empty: TreeMap[A, B] = newMapOrSelf(null) + override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) - /** A new TreeMap with the entry added is returned, - * if key is not in the TreeMap, otherwise - * the key is updated with the new entry. - * - * @tparam B1 type of the value of the new binding which is a supertype of `B` - * @param key the key that should be updated - * @param value the value to be associated with `key` - * @return a new $coll with the updated binding - */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = - newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) - /** Add a key/value pair to this map. - * @tparam B1 type of the value of the new binding, a supertype of `B` - * @param kv the key/value pair - * @return A new $coll with the new binding added to this map - */ - override def + [B1 >: B] (kv: (A, B1)): TreeMap[A, B1] = updated(kv._1, kv._2) + override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @tparam B1 type of the values of the new bindings, a supertype of `B` - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new $coll with the updated bindings - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): TreeMap[A, B1] = - this + elem1 + elem2 ++ elems + override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = + newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ - override def ++[B1 >: B] (xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] = { - xs match { - case tm: TreeMap[A, B] if ordering == tm.ordering => - newMapOrSelf(RB.union(tree, tm.tree0)) - case ls: LinearSeq[(A,B1)] => - if (ls.isEmpty) this //to avoid the creation of the adder - else { - val adder = new Adder[B1] - adder addAll ls - newMapOrSelf(adder.finalTree) - } - case _ => - val adder = new Adder[B1] - xs foreach adder - newMapOrSelf(adder.finalTree) - } + override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { + val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) + (newMapOrSelf(l), newMapOrSelf(r)) + } + + override def transform[W](f: (K, V) => W): TreeMap[K, W] = { + val t2 = RB.transform[K, V, W](tree, f) + if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] + else new TreeMap(t2) } - private final class Adder[B1 >: B] - extends RB.MapHelper[A, B1] with Function1[(A, B1), Unit] { - private var currentMutableTree: RB.Tree[A,B1] = tree0 + + private final class Adder[B1 >: V] + extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { + private var currentMutableTree: RB.Tree[K,B1] = tree0 def finalTree = beforePublish(currentMutableTree) - override def apply(kv: (A, B1)): Unit = { + override def apply(kv: (K, B1)): Unit = { currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) } - @tailrec def addAll(ls: LinearSeq[(A, B1)]): Unit = { + @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { if (!ls.isEmpty) { val kv = ls.head currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) @@ -306,129 +292,89 @@ final class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: } } } - - - /** A new TreeMap with the entry added is returned, - * assuming that key is not in the TreeMap. - * - * @tparam B1 type of the values of the new bindings, a supertype of `B` - * @param key the key to be inserted - * @param value the value to be associated with `key` - * @return a new $coll with the inserted binding, if it wasn't present in the map - */ - def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { - assert(!RB.contains(tree, key)) - newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + override def equals(obj: Any): Boolean = obj match { + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case _ => super.equals(obj) } - def - (key:A): TreeMap[A, B] = - newMapOrSelf(RB.delete(tree, key)) + override protected[this] def className = "TreeMap" +} - private[collection] def removeAllImpl(xs: GenTraversableOnce[A]): TreeMap[A, B] = xs match { - case ts: TreeSet[A] if ordering == ts.ordering => - newMapOrSelf(RB.difference(tree, ts.tree)) - case _ => - //TODO add an implementation of a mutable subtractor similar to ++ - //but at least this doesnt create a TreeMap for each iteration - object sub extends AbstractFunction1[A, Unit] { - var currentTree = tree0 - override def apply(k: A): Unit = { - currentTree = RB.delete(currentTree, k) +/** $factoryInfo + * @define Coll immutable.TreeMap + * @define coll immutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() + + def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] = + it match { + case tm: TreeMap[K, V] if ordering == tm.ordering => tm + case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => + new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) + case _ => + var t: RB.Tree[K, V] = null + val i = it.iterator + while (i.hasNext) { + val (k, v) = i.next() + t = RB.update(t, k, v, overwrite = true) } - } - xs.foreach(sub) - newMapOrSelf(sub.currentTree) - } - - - /** Check if this map maps `key` to a value and return the - * value if it exists. - * - * @param key the key of the mapping of interest - * @return the value of the mapping, if it exists - */ - override def get(key: A): Option[B] = RB.get(tree, key) - override def getOrElse[V1 >: B](key: A, default: => V1): V1 = { - val resultOrNull = RB.lookup(tree, key) - if (resultOrNull eq null) default - else resultOrNull.value - } - - - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ - override def iterator: Iterator[(A, B)] = RB.iterator(tree) - override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start)) - - override def keysIterator: Iterator[A] = RB.keysIterator(tree) - override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def valuesIterator: Iterator[B] = RB.valuesIterator(tree) - override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start)) - - override def contains(key: A): Boolean = RB.contains(tree, key) - override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) - - override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) - - override private[immutable] def foreachEntry[U](f: (A, B) => U): Unit = RB.foreachEntry(tree, f) - - override def hashCode(): Int = { - if (isEmpty) { - MurmurHash3.emptyMapHash - } else { - val hasher = new Map.HashCodeAccumulator() - RB.foreachEntry(tree, hasher) - hasher.finalizeHash + new TreeMap[K, V](t) } - } - override def keySet: SortedSet[A] = new TreeSet[A](tree)(ordering) - override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[A, B] if ordering == that.ordering => RB.entriesEqual(tree, that.tree0) - case _ => super.equals(obj) - } + def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] - override def values: scala.Iterable[B] = new DefaultValuesIterable { - override def foreach[U](f: B => U): Unit = RB.foreachEntry(tree0, {(_: A, value: B) => f(value)}) - } - override private[scala] def filterImpl(f: ((A, B)) => Boolean, isFlipped: Boolean) = - newMapOrSelf(RB.filterEntries[A, B](tree, (k, v) => isFlipped ^ f((k, v)))) + private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) + extends RB.MapHelper[K, V] + with ReusableBuilder[(K, V), TreeMap[K, V]] { + type Tree = RB.Tree[K, V] + private var tree:Tree = null - override def partition(p: ((A, B)) => Boolean): (TreeMap[A, B], TreeMap[A, B]) = { - val (l, r) = RB.partitionEntries[A, B](tree, (k, v) => p((k, v))) - (newMapOrSelf(l), newMapOrSelf(r)) - } - private def sameCBF(bf: CanBuildFrom[_,_,_]): Boolean = { - bf match { - case cbf: TreeMap.SortedMapCanBuildFrom[_, _] => - val factory:AnyRef = cbf.factory - ((factory eq TreeMap) || (factory eq immutable.SortedMap) || (factory eq collection.SortedMap)) && - cbf.ordering == ordering - case w: WrappedCanBuildFrom[_,_,_] => sameCBF(w.wrapped) - case _ => false + def addOne(elem: (K, V)): this.type = { + tree = mutableUpd(tree, elem._1, elem._2) + this } - } + private object adder extends AbstractFunction2[K, V, Unit] { + // we cache tree to avoid the outer access to tree + // in the hot path (apply) + private[this] var accumulator :Tree = null + def addForEach(hasForEach: collection.Map[K, V]): Unit = { + accumulator = tree + hasForEach.foreachEntry(this) + tree = accumulator + // be friendly to GC + accumulator = null + } + override def apply(key: K, value: V): Unit = { + accumulator = mutableUpd(accumulator, key, value) + } + } - override def transform[W, That](f: (A, B) => W)(implicit bf: CanBuildFrom[TreeMap[A, B], (A, W), That]): That = { - if (sameCBF(bf)) - newMapOrSelf(RB.transform[A, B, W](tree, f)).asInstanceOf[That] - else super.transform(f) - } + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeMap[K, V] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0) + case that: collection.Map[K, V] => + //add avoiding creation of tuples + adder.addForEach(that) + case _ => + super.addAll(xs) + } + this + } - @throws[IOException] - private[this] def writeReplace(): AnyRef = - if (TreeMap.legacySerialisation) this else new TreeMap.TreeMapProxy(tree, ordering) + override def clear(): Unit = { + tree = null + } - @throws[IOException] - private[this] def writeObject(out: java.io.ObjectOutputStream) = { - out.writeObject(ordering) - out.writeObject(immutable.RedBlackTree.from(tree)) + override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) } - - } diff --git a/src/library/scala/collection/immutable/TreeSeqMap.scala b/src/library/scala/collection/immutable/TreeSeqMap.scala new file mode 100644 index 000000000000..4eaa8487b6ff --- /dev/null +++ b/src/library/scala/collection/immutable/TreeSeqMap.scala @@ -0,0 +1,650 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec + +/** This class implements an immutable map that preserves order using + * a hash map for the key to value mapping to provide efficient lookup, + * and a tree for the ordering of the keys to provide efficient + * insertion/modification order traversal and destructuring. + * + * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) + * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) + * can be used instead if so specified at creation. + * + * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method + * can be used to switch to the specified ordering for the returned map. + * + * A key can be manually refreshed (i.e. placed at the end) via the + * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in + * use). + * + * Internally, an ordinal counter is increased for each insertion/modification + * and then the current ordinal is used as key in the tree map. After 2^32^ + * insertions/modifications the entire map is copied (thus resetting the ordinal + * counter). + * + * @tparam K the type of the keys contained in this map. + * @tparam V the type of the values associated with the keys in this map. + * @define coll immutable tree seq map + * @define Coll `immutable.TreeSeqMap` + */ +final class TreeSeqMap[K, +V] private ( + private val ordering: TreeSeqMap.Ordering[K], + private val mapping: TreeSeqMap.Mapping[K, V], + private val ordinal: Int, + val orderedBy: TreeSeqMap.OrderBy) + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] + with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { + + import TreeSeqMap._ + + override protected[this] def className: String = "TreeSeqMap" + + override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap + + override val size = mapping.size + + override def knownSize: Int = size + + override def isEmpty = size == 0 + + /* + // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible + // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. + override def empty = TreeSeqMap.empty[K, V](orderedBy) + */ + + def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == this.orderedBy) this + else if (isEmpty) TreeSeqMap.empty(orderBy) + else new TreeSeqMap(ordering, mapping, ordinal, orderBy) + } + + def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { + mapping.get(key) match { + case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => + // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. + TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) + case Some((o, _)) if orderedBy == OrderBy.Insertion => + new TreeSeqMap( + ordering.include(o, key), + mapping.updated[(Int, V1)](key, (o, value)), + ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. + orderedBy) + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + case None => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + } + } + + def removed(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + new TreeSeqMap( + ordering.exclude(o), + mapping.removed(key), + ordinal, + orderedBy) + case None => + this + } + } + + def refresh(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping, + o1, + orderedBy) + case None => + this + } + } + + def get(key: K): Option[V] = mapping.get(key).map(value) + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): (K, V) = binding(iter.next()) + } + + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): K = iter.next() + } + + override def valuesIterator: Iterator[V] = new AbstractIterator[V] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): V = value(binding(iter.next())) + } + + override def contains(key: K): Boolean = mapping.contains(key) + + override def head: (K, V) = binding(ordering.head) + + override def headOption = ordering.headOption.map(binding) + + override def last: (K, V) = binding(ordering.last) + + override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) + + override def tail: TreeSeqMap[K, V] = { + val (head, tail) = ordering.headTail + new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) + } + + override def init: TreeSeqMap[K, V] = { + val (init, last) = ordering.initLast + new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) + } + + override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { + val sz = size + if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) + else { + val sz = size + val f = if (from >= 0) from else 0 + val u = if (until <= sz) until else sz + val l = u - f + if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) + else if (l > sz / 2) { + // Remove front and rear incrementally if majority of elements are to be kept + val (front, rest) = ordering.splitAt(f) + val (ong, rear) = rest.splitAt(l) + var mng = this.mapping + val frontIter = front.iterator + while (frontIter.hasNext) { + mng = mng - frontIter.next() + } + val rearIter = rear.iterator + while (rearIter.hasNext) { + mng = mng - rearIter.next() + } + new TreeSeqMap(ong, mng, ordinal, orderedBy) + } else { + // Populate with builder otherwise + val bdr = newBuilder[K, V](orderedBy) + val iter = ordering.iterator + var i = 0 + while (i < f) { + iter.next() + i += 1 + } + while (i < u) { + val k = iter.next() + bdr.addOne((k, mapping(k)._2)) + i += 1 + } + bdr.result() + } + } + } + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val (k2, v2) = f((k, v)) + bdr.addOne((k2, v2)) + } + bdr.result() + } + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val jter = f((k, v)).iterator + while (jter.hasNext) { + val (k2, v2) = jter.next() + bdr.addOne((k2, v2)) + } + } + bdr.result() + } + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) + } + bdr.result() + } + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): TreeSeqMap[K, V2] = { + var ong: Ordering[K] = ordering + var mng: Mapping[K, V2] = mapping + var ord = increment(ordinal) + val iter = suffix.iterator + while (iter.hasNext) { + val (k, v2) = iter.next() + mng.get(k) match { + case Some((o, v)) => + if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) + else if (orderedBy == OrderBy.Modification) { + mng = mng.updated(k, (ord, v2)) + ong = ong.exclude(o).append(ord, k) + ord = increment(ord) + } + case None => + mng = mng.updated(k, (ord, v2)) + ong = ong.append(ord, k) + ord = increment(ord) + } + } + new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) + } + + @`inline` private[this] def value(p: (_, V)) = p._2 + @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) +} +object TreeSeqMap extends MapFactory[TreeSeqMap] { + sealed trait OrderBy + object OrderBy { + case object Insertion extends OrderBy + case object Modification extends OrderBy + } + + private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) + private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) + val Empty = EmptyByInsertion + def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) + def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == OrderBy.Modification) EmptyByModification + else EmptyByInsertion + }.asInstanceOf[TreeSeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): TreeSeqMap[K, V] = + it match { + case om: TreeSeqMap[K, V] => om + case _ => (newBuilder[K, V] ++= it).result() + } + + @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 + + def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + + final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + private[this] val bdr = new MapBuilderImpl[K, (Int, V)] + private[this] var ong = Ordering.empty[K] + private[this] var ord = 0 + private[this] var aliased: TreeSeqMap[K, V] = _ + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + bdr.getOrElse(key, null) match { + case (o, v) => + if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) + else if (orderedBy == OrderBy.Modification) { + bdr.addOne(key, (ord, value)) + ong = ong.exclude(o).appendInPlace(ord, key) + ord = increment(ord) + } + case null => + bdr.addOne(key, (ord, value)) + ong = ong.appendInPlace(ord, key) + ord = increment(ord) + } + } + this + } + + override def clear(): Unit = { + ong = Ordering.empty + ord = 0 + bdr.clear() + aliased = null + } + + override def result(): TreeSeqMap[K, V] = { + if (aliased eq null) { + aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) + } + aliased + } + } + + private type Mapping[K, +V] = Map[K, (Int, V)] + @annotation.unused + private val Mapping = Map + + /* The ordering implementation below is an adapted version of immutable.IntMap. */ + private[immutable] object Ordering { + import scala.collection.generic.BitOperations.Int.{Int => _, _} + + @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" + + def empty[T] : Ordering[T] = Zero + + def apply[T](elems: (Int, T)*): Ordering[T] = + elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) + + // Iterator over a non-empty Ordering. + final class Iterator[+V](it: Ordering[V]) { + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 Bins and + // one Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 + private[this] var index = 0 + private[this] val buffer = new Array[AnyRef](33) + + private[this] def pop = { + index -= 1 + buffer(index).asInstanceOf[Ordering[V]] + } + + private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + + if (it != Zero) push(it) + + def hasNext = index > 0 + @tailrec + def next(): V = + if (!hasNext) scala.collection.Iterator.empty.next() + else pop match { + case Bin(_,_, Tip(_, v), right) => + push(right) + v + case Bin(_, _, left, right) => + push(right) + push(left) + next() + case Tip(_, v) => v + // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering + // and don't return an Ordering.Iterator for Ordering.Zero. + case Zero => throw new IllegalStateException("empty subtree not allowed") + } + } + + object Iterator { + val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) + def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] + } + + case object Zero extends Ordering[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any): Boolean = that match { + case _: this.type => true + case _: Ordering[_] => false // The only empty Orderings are eq Nil + case _ => super.equals(that) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" + } + + final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] + else Tip(ord, s) + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" + } + + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] + else Bin[S](prefix, mask, left, right) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { + sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" + left.format(sb, subPrefix + "├── ", subPrefix + "│ ") + right.format(sb, subPrefix + "└── ", subPrefix + " ") + } + } + + private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) Bin(p, m, t1, t2) + else Bin(p, m, t2, t1) + } + + private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { + case (l, Zero) => l + case (Zero, r) => r + case (l, r) => Bin(prefix, mask, l, r) + } + } + + sealed abstract class Ordering[+T] { + import Ordering._ + import scala.annotation.tailrec + import scala.collection.generic.BitOperations.Int._ + + override final def toString: String = format + final def format: String = { + val sb = new StringBuilder + format(sb, "", "") + sb.toString() + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit + + @tailrec + final def head: T = this match { + case Zero => throw new NoSuchElementException("head of empty map") + case Tip(k, v) => v + case Bin(_, _, l, _) => l.head + } + + @tailrec + final def headOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, l, _) => l.headOption + } + + @tailrec + final def last: T = this match { + case Zero => throw new NoSuchElementException("last of empty map") + case Tip(_, v) => v + case Bin(_, _, _, r) => r.last + } + + @tailrec + final def lastOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, _, r) => r.lastOption + } + + @tailrec + final def ordinal: Int = this match { + case Zero => 0 + case Tip(o, _) => o + case Bin(_, _, _, r) => r.ordinal + } + + final def tail: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("tail of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => bin(p, m, l.tail, r) + } + + final def headTail: (T, Ordering[T]) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (v, Zero) + case Bin(p, m, l, r) => + val (head, tail) = l.headTail + (head, bin(p, m, tail, r)) + } + + final def init: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => + bin(p, m, l, r.init) + } + + final def initLast: (Ordering[T], T) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (Zero, v) + case Bin(p, m, l, r) => + val (init, last) = r.initLast + (bin(p, m, l, init), last) + } + + final def iterator: Iterator[T] = this match { + case Zero => Iterator.empty + case _ => new Iterator(this) + } + + final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) + else Bin(p, m, l, r.include(ordinal, value)) + } + + final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else Bin(p, m, l, r.append(ordinal, value)) + } + + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) if o >= ordinal => + throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") + case Tip(o, _) if parent == null => + join(ordinal, Tip(ordinal, value), o, this) + case Tip(o, _) => + parent.right = join(ordinal, Tip(ordinal, value), o, this) + parent + case b @ Bin(p, m, _, r) => + if (!hasMatch(ordinal, p, m)) { + val b2 = join(ordinal, Tip(ordinal, value), p, this) + if (parent != null) { + parent.right = b2 + parent + } else b2 + } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else { + r.appendInPlace1(b, ordinal, value) + this + } + } + + final def exclude(ordinal: Int): Ordering[T] = this match { + case Zero => + Zero + case Tip(o, _) => + if (ordinal == o) Zero + else this + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) this + else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) + else bin(p, m, l, r.exclude(ordinal)) + } + + final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { + var rear = Ordering.empty[T] + var i = n + (modifyOrRemove { (o, v) => + i -= 1 + if (i >= 0) Some(v) + else { + rear = rear.appendInPlace(o, v) + None + } + }, rear) + } + + /** + * A combined transform and filter function. Returns an `Ordering` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value) == Some(x)` the + * map contains `(key, x)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { + case Zero => Zero + case Tip(key, value) => + f(key, value) match { + case None => Zero + case Some(value2) => + // hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] + else Tip(key, value2) + } + case Bin(prefix, mask, left, right) => + val l = left.modifyOrRemove(f) + val r = right.modifyOrRemove(f) + if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] + else bin(prefix, mask, l, r) + } + } +} diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index a9825258a705..4348f62ece74 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,143 +14,58 @@ package scala package collection package immutable -import java.io.IOException - -import generic._ -import immutable.{NewRedBlackTree => RB} -import mutable.Builder +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.collection.immutable.{RedBlackTree => RB} import scala.runtime.AbstractFunction1 -/** $factoryInfo - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - */ -object TreeSet extends ImmutableSortedSetFactory[TreeSet] { - implicit def implicitBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = newBuilder[A](ordering) - override def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = - new TreeSetBuilder - - /** The empty set of this type - */ - def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A] - private class TreeSetBuilder[A](implicit ordering: Ordering[A]) - extends RB.SetHelper[A] - with Builder[A, TreeSet[A]] { - type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A, Any] = null - override def +=(elem: A): this.type = { - tree = mutableUpd(tree, elem) - this - } +/** This class implements immutable sorted sets using a tree. + * + * @tparam A the type of the elements contained in this tree set + * @param ordering the implicit ordering used to compare objects of type `A` + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { - override def ++=(xs: TraversableOnce[A]): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeSet[A] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree - else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A, _] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) - case _ => - super.++=(xs) - } - this - } + if (ordering eq null) throw new NullPointerException("ordering must not be null") - override def clear(): Unit = { - tree = null - } + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) - } - private val legacySerialisation = System.getProperty("scala.collection.immutable.TreeSet.newSerialisation", "false") == "false" - - @SerialVersionUID(-8462554036344260506L) - private class TreeSetProxy[A]( - @transient private[this] var tree: RB.Tree[A, Any], - @transient private[this] var ordering: Ordering[A]) extends Serializable { - - @throws[IOException] - private[this] def writeObject(out: java.io.ObjectOutputStream) = { - out.writeInt(RB.count(tree)) - out.writeObject(ordering) - RB.foreachKey(tree, out.writeObject) - } - @throws[IOException] - private[this] def readObject(in: java.io.ObjectInputStream) = { - val size = in.readInt() - ordering = in.readObject().asInstanceOf[Ordering[A]] - size match { - case 0 => //tree is null already - case 1 => - val entry = in.readObject().asInstanceOf[A] - tree = RB.update(null, entry, (), false)(ordering) - case _ => - val entries = new Array[Any](size) - var i = 0 - while (i < size) { - entries(i) = in.readObject() - i += 1 - } - tree = RB.fromOrderedEntries( - entries.iterator.asInstanceOf[Iterator[A]], - unitsIterator, - size) - } - } - @throws[IOException] - private[this] def readResolve(): AnyRef = - new TreeSet(tree)(ordering) - } - private[this] object unitsIterator extends AbstractIterator[Unit] { - override def hasNext: Boolean = true - override def next(): Unit = () - } -} + override def sortedIterableFactory: TreeSet.type = TreeSet -/** This class implements immutable sets using a tree. - * - * @tparam A the type of the elements contained in this tree set - * @param ordering the implicit ordering used to compare objects of type `A` - * - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-5685982407650748405L) -final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) - extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { + private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) + + override def size: Int = RB.count(tree) - if (ordering eq null) - throw new NullPointerException("ordering must not be null") + override def isEmpty = size == 0 - override def stringPrefix = "TreeSet" + override def head: A = RB.smallest(tree).key - override def size = RB.count(tree) + override def last: A = RB.greatest(tree).key - override def head = RB.smallest(tree).key - override def headOption = if (RB.isEmpty(tree)) None else Some(head) - override def last = RB.greatest(tree).key - override def lastOption = if (RB.isEmpty(tree)) None else Some(last) + override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) - override def tail = new TreeSet(RB.delete(tree, firstKey)) - override def init = new TreeSet(RB.delete(tree, lastKey)) + override def init: TreeSet[A] = new TreeSet(RB.init(tree)) override def min[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord == ordering) && nonEmpty) { + if ((ord eq ordering) && nonEmpty) { head } else { super.min(ord) @@ -158,35 +73,35 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ } override def max[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord == ordering) && nonEmpty) { + if ((ord eq ordering) && nonEmpty) { last } else { super.max(ord) } } - override def drop(n: Int) = { + override def drop(n: Int): TreeSet[A] = { if (n <= 0) this else if (n >= size) empty - else newSetOrSelf(RB.drop(tree, n)) + else new TreeSet(RB.drop(tree, n)) } - override def take(n: Int) = { + override def take(n: Int): TreeSet[A] = { if (n <= 0) empty else if (n >= size) this - else newSetOrSelf(RB.take(tree, n)) + else new TreeSet(RB.take(tree, n)) } - override def slice(from: Int, until: Int) = { + override def slice(from: Int, until: Int): TreeSet[A] = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else newSetOrSelf(RB.slice(tree, from, until)) + else new TreeSet(RB.slice(tree, from, until)) } - override def dropRight(n: Int) = take(size - math.max(n, 0)) - override def takeRight(n: Int) = drop(size - math.max(n, 0)) - override def splitAt(n: Int) = (take(n), drop(n)) + override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) private[this] def countWhile(p: A => Boolean): Int = { var result = 0 @@ -194,103 +109,81 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ while (it.hasNext && p(it.next())) result += 1 result } - override def dropWhile(p: A => Boolean) = drop(countWhile(p)) - override def takeWhile(p: A => Boolean) = take(countWhile(p)) - override def span(p: A => Boolean) = splitAt(countWhile(p)) + override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) + override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) - private def newSetOrSelf(t: RB.Tree[A, Any]) = { - if (t eq this.tree) this - else new TreeSet[A](t) - } + override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) - /** A factory to create empty sets of the same type of keys. - */ - override def empty = newSetOrSelf(null) + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - /** Creates a new `TreeSet` with the entry added. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def + (elem: A): TreeSet[A] = newSetOrSelf(RB.update(tree, elem, (), overwrite = false)) - - /** A new `TreeSet` with the entry added is returned, - * assuming that elem is not in the TreeSet. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def insert(elem: A): TreeSet[A] = { - assert(!RB.contains(tree, elem)) - newSetOrSelf(RB.update(tree, elem, (), overwrite = false)) + override def minAfter(key: A): Option[A] = { + val v = RB.minAfter(tree, key) + if (v eq null) Option.empty else Some(v.key) } - /** Creates a new `TreeSet` with the entry removed. - * - * @param elem a new element to add. - * @return a new $coll containing all the elements of this $coll except `elem`. - */ - def - (elem:A): TreeSet[A] = - if (!RB.contains(tree, elem)) this - else newSetOrSelf(RB.delete(tree, elem)) - - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return true, iff `elem` is contained in this set. - */ - def contains(elem: A): Boolean = RB.contains(tree, elem) + override def maxBefore(key: A): Option[A] = { + val v = RB.maxBefore(tree, key) + if (v eq null) Option.empty else Some(v.key) + } - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ def iterator: Iterator[A] = RB.keysIterator(tree) - override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - override def foreach[U](f: A => U) = RB.foreachKey(tree, f) + def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) - override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) - override def from(from: A): TreeSet[A] = newSetOrSelf(RB.from(tree, from)) - override def to(to: A): TreeSet[A] = newSetOrSelf(RB.to(tree, to)) - override def until(until: A): TreeSet[A] = newSetOrSelf(RB.until(tree, until)) - - override def firstKey = head - override def lastKey = last - - private def sameCBF(bf: CanBuildFrom[_,_,_]): Boolean = { - bf match { - case cbf: TreeSet.SortedSetCanBuildFrom[_] => - val factory:AnyRef = cbf.factory - ((factory eq TreeSet) || (factory eq immutable.SortedSet) || (factory eq collection.SortedSet)) && - cbf.ordering == ordering - case w: WrappedCanBuildFrom[_,_,_] => sameCBF(w.wrapped) - case _ => false + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[A, Any] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) } + s.asInstanceOf[S with EfficientSplit] } - private [collection] def addAllTreeSetImpl(ts: TreeSet[A]): TreeSet[A] = { - assert (ordering == ts.ordering) - newSetOrSelf(RB.union(tree, ts.tree)) - } + /** Checks if this set contains element `elem`. + * + * @param elem the element to check for membership. + * @return true, iff `elem` is contained in this set. + */ + def contains(elem: A): Boolean = RB.contains(tree, elem) + + override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) - private[scala] def addAllImpl[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[TreeSet[A], B, That]): That = { - that match { - case ts: TreeSet[A] if sameCBF(bf) => - newSetOrSelf(RB.union(tree, ts.tree)).asInstanceOf[That] + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) + + /** Creates a new `TreeSet` with the entry added. + * + * @param elem a new element to add. + * @return a new $coll containing `elem` and all the elements of this $coll. + */ + def incl(elem: A): TreeSet[A] = + newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) + + /** Creates a new `TreeSet` with the entry removed. + * + * @param elem a new element to add. + * @return a new $coll containing all the elements of this $coll except `elem`. + */ + def excl(elem: A): TreeSet[A] = + newSetOrSelf(RB.delete(tree, elem)) + + override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { + val t = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + RB.union(tree, ts.tree) case _ => - val b = bf(repr) - b ++= thisCollection - b ++= that.seq - b.result + val it = that.iterator + var t = tree + while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) + t } + newSetOrSelf(t) } - private [collection] def removeAll(xs : GenTraversableOnce[A]): TreeSet[A] = xs match { + override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { case ts: TreeSet[A] if ordering == ts.ordering => newSetOrSelf(RB.difference(tree, ts.tree)) case _ => @@ -302,12 +195,25 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ currentTree = RB.delete(currentTree, k) } } - xs.foreach(sub) + that.iterator.foreach(sub) newSetOrSelf(sub.currentTree) } - override private[scala] def filterImpl(f: A => Boolean, isFlipped: Boolean) = - newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => isFlipped ^ f(k)})) + override def intersect(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.intersect(tree, ts.tree)) + case _ => + super.intersect(that) + } + + override def diff(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + super.diff(that) + } + + override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) @@ -315,17 +221,76 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ } override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) case _ => super.equals(obj) } - @throws[IOException] - private[this] def writeReplace(): AnyRef = - if (TreeSet.legacySerialisation) this else new TreeSet.TreeSetProxy(tree, ordering) + override protected[this] def className = "TreeSet" +} + +/** + * $factoryInfo + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] + + def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => ts + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) + // The cast is needed to compile with Dotty: + // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound + new TreeSet[E](tree) + case _ => + var t: RB.Tree[E, Null] = null + val i = it.iterator + while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] + private class TreeSetBuilder[A](implicit ordering: Ordering[A]) + extends RB.SetHelper[A] + with ReusableBuilder[A, TreeSet[A]] { + type Tree = RB.Tree[A, Any] + private [this] var tree:RB.Tree[A, Any] = null + + override def addOne(elem: A): this.type = { + tree = mutableUpd(tree, elem) + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeSet[A] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree + else tree = RB.union(beforePublish(tree), ts.tree)(ordering) + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } - @throws[IOException] - private[this] def writeObject(out: java.io.ObjectOutputStream) = { - out.writeObject(ordering) - out.writeObject(immutable.RedBlackTree.from(tree)) + override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) } } diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala deleted file mode 100644 index d1998ca4d1a1..000000000000 --- a/src/library/scala/collection/immutable/TrieIterator.scala +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 } -import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 } -import scala.annotation.unchecked.{ uncheckedVariance => uV } -import scala.annotation.tailrec - -/** Abandons any pretense of type safety for speed. You can't say I - * didn't try: see r23934. - */ -private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) extends AbstractIterator[T] { - outer => - - private[immutable] def getElem(x: AnyRef): T - - def initDepth = 0 - def initArrayStack: Array[Array[Iterable[T @uV]]] = new Array[Array[Iterable[T]]](6) - def initPosStack = new Array[Int](6) - def initArrayD: Array[Iterable[T @uV]] = elems - def initPosD = 0 - def initSubIter: Iterator[T] = null // to traverse collision nodes - - private[this] var depth = initDepth - private[this] var arrayStack: Array[Array[Iterable[T @uV]]] = initArrayStack - private[this] var posStack = initPosStack - private[this] var arrayD: Array[Iterable[T @uV]] = initArrayD - private[this] var posD = initPosD - private[this] var subIter = initSubIter - - private[this] def getElems(x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashTrieMap[_, _] => x.elems - case x: HashTrieSet[_] => x.elems - }).asInstanceOf[Array[Iterable[T]]] - - private[this] def collisionToArray(x: Iterable[T]): Array[Iterable[T]] = (x match { - case x: HashMapCollision1[_, _] => x.kvs.map(x => HashMap(x)).toArray - case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray - }).asInstanceOf[Array[Iterable[T]]] - - private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T]) - - private def isTrie(x: AnyRef) = x match { - case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true - case _ => false - } - private def isContainer(x: AnyRef) = x match { - case _: HashMap1[_, _] | _: HashSet1[_] => true - case _ => false - } - - final class DupIterator(xs: Array[Iterable[T]]) extends { - override val initDepth = outer.depth - override val initArrayStack: Array[Array[Iterable[T @uV]]] = outer.arrayStack - override val initPosStack = outer.posStack - override val initArrayD: Array[Iterable[T @uV]] = outer.arrayD - override val initPosD = outer.posD - override val initSubIter = outer.subIter - } with TrieIterator[T](xs) { - final override def getElem(x: AnyRef): T = outer.getElem(x) - } - - def dupIterator: TrieIterator[T] = new DupIterator(elems) - - private[this] def newIterator(xs: Array[Iterable[T]]) = new TrieIterator(xs) { - final override def getElem(x: AnyRef): T = outer.getElem(x) - } - - private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) = - (newIterator(arr), arr.map(_.size).sum) - - private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = { - val (fst, snd) = arr.splitAt(arr.length / 2) - - (iteratorWithSize(snd), newIterator(fst)) - } - private[this] def splitArray(ad: Array[Iterable[T]]): SplitIterators = - if (ad.length > 1) arrayToIterators(ad) - else ad(0) match { - case _: HashMapCollision1[_, _] | _: HashSetCollision1[_] => - arrayToIterators(collisionToArray(ad(0))) - case _ => - splitArray(getElems(ad(0))) - } - - def hasNext = (subIter ne null) || depth >= 0 - def next(): T = { - if (subIter ne null) { - val el = subIter.next() - if (!subIter.hasNext) - subIter = null - el - } else - next0(arrayD, posD) - } - - @tailrec private[this] def next0(elems: Array[Iterable[T]], i: Int): T = { - if (i == elems.length-1) { // reached end of level, pop stack - depth -= 1 - if (depth >= 0) { - arrayD = arrayStack(depth) - posD = posStack(depth) - arrayStack(depth) = null - } else { - arrayD = null - posD = 0 - } - } else - posD += 1 - - val m = elems(i) - - // Note: this block is over twice as fast written this way as it is - // as a pattern match. Haven't started looking into why that is, but - // it's pretty sad the pattern matcher is that much slower. - if (isContainer(m)) - getElem(m) // push current pos onto stack and descend - else if (isTrie(m)) { - if (depth >= 0) { - arrayStack(depth) = arrayD - posStack(depth) = posD - } - depth += 1 - arrayD = getElems(m) - posD = 0 - next0(getElems(m), 0) - } - else { - subIter = m.iterator - next() - } - // The much slower version: - // - // m match { - // case _: HashMap1[_, _] | _: HashSet1[_] => - // getElem(m) // push current pos onto stack and descend - // case _: HashTrieMap[_,_] | _: HashTrieSet[_] => - // if (depth >= 0) { - // arrayStack(depth) = arrayD - // posStack(depth) = posD - // } - // depth += 1 - // arrayD = getElems(m) - // posD = 0 - // next0(getElems(m), 0) - // case _ => - // subIter = m.iterator - // next - // } - } - - // assumption: contains 2 or more elements - // splits this iterator into 2 iterators - // returns the 1st iterator, its number of elements, and the second iterator - def split: SplitIterators = { - // 0) simple case: no elements have been iterated - simply divide arrayD - if (arrayD != null && depth == 0 && posD == 0) - return splitArray(arrayD) - - // otherwise, some elements have been iterated over - // 1) collision case: if we have a subIter, we return subIter and elements after it - if (subIter ne null) { - val buff = subIter.toBuffer - subIter = null - ((buff.iterator, buff.length), this) - } - else { - // otherwise find the topmost array stack element - if (depth > 0) { - // 2) topmost comes before (is not) arrayD - // steal a portion of top to create a new iterator - if (posStack(0) == arrayStack(0).length - 1) { - // 2a) only a single entry left on top - // this means we have to modify this iterator - pop topmost - val snd = Array[Iterable[T]](arrayStack(0).last) - val szsnd = snd(0).size - // modify this - pop - depth -= 1 - 1 until arrayStack.length foreach (i => arrayStack(i - 1) = arrayStack(i)) - arrayStack(arrayStack.length - 1) = Array[Iterable[T]](null) - posStack = posStack.tail ++ Array[Int](0) - // we know that `this` is not empty, since it had something on the arrayStack and arrayStack elements are always non-empty - ((newIterator(snd), szsnd), this) - } else { - // 2b) more than a single entry left on top - val (fst, snd) = arrayStack(0).splitAt(arrayStack(0).length - (arrayStack(0).length - posStack(0) + 1) / 2) - arrayStack(0) = fst - (iteratorWithSize(snd), this) - } - } else { - // 3) no topmost element (arrayD is at the top) - // steal a portion of it and update this iterator - if (posD == arrayD.length - 1) { - // 3a) positioned at the last element of arrayD - val m = arrayD(posD) - arrayToIterators( - if (isTrie(m)) getElems(m) - else collisionToArray(m) - ) - } - else { - // 3b) arrayD has more free elements - val (fst, snd) = arrayD.splitAt(arrayD.length - (arrayD.length - posD + 1) / 2) - arrayD = fst - (iteratorWithSize(snd), this) - } - } - } - } -} diff --git a/src/library/scala/collection/immutable/VM.java b/src/library/scala/collection/immutable/VM.java deleted file mode 100644 index e528088e7578..000000000000 --- a/src/library/scala/collection/immutable/VM.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable; - - -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; -import java.lang.reflect.Field; - -// Backport from scala.runtime moved into s.c.immutable and made package private to -// avoid the need for MiMa whitelisting. -/* package */ abstract class VM { - static void releaseFence() throws Throwable { - RELEASE_FENCE.invoke(); - } - - private VM() { - } - - static final MethodHandle RELEASE_FENCE; - - static { - RELEASE_FENCE = mkHandle(); - } - - private static MethodHandle mkHandle() { - MethodHandles.Lookup lookup = MethodHandles.lookup(); - try { - return lookup.findStatic(Class.forName("java.lang.invoke.VarHandle"), "releaseFence", MethodType.methodType(Void.TYPE)); - } catch (ClassNotFoundException e) { - try { - Class unsafeClass = Class.forName("sun.misc.Unsafe"); - return lookup.findVirtual(unsafeClass, "storeFence", MethodType.methodType(void.class)).bindTo(findUnsafe(unsafeClass)); - } catch (NoSuchMethodException | ClassNotFoundException | IllegalAccessException e1) { - ExceptionInInitializerError error = new ExceptionInInitializerError(e1); - error.addSuppressed(e); - throw error; - } - } catch (NoSuchMethodException | IllegalAccessException e) { - throw new ExceptionInInitializerError(e); - } - } - - private static Object findUnsafe(Class unsafeClass) throws IllegalAccessException { - Object found = null; - for (Field field : unsafeClass.getDeclaredFields()) { - if (field.getType() == unsafeClass) { - field.setAccessible(true); - found = field.get(null); - break; - } - } - if (found == null) throw new IllegalStateException("No instance of Unsafe found"); - return found; - } -} diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 17968591c553..f38cdbc77b5d 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,1118 +10,2474 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package immutable +import java.lang.Math.{abs, max => mmax, min => mmin} +import java.util.Arrays.{copyOf, copyOfRange} +import java.util.{Arrays, Spliterator} + +import scala.annotation.switch import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic._ -import scala.collection.mutable.{Builder, ReusableBuilder} -import scala.collection.parallel.immutable.ParVector +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.{CommonErrors, DefaultSerializable} +import scala.collection.immutable.VectorInline._ +import scala.collection.immutable.VectorStatics._ +import scala.collection.mutable.ReusableBuilder + + +/** $factoryInfo + * @define Coll `Vector` + * @define coll vector + */ +@SerialVersionUID(3L) +object Vector extends StrictOptimizedSeqFactory[Vector] { + + def empty[A]: Vector[A] = Vector0 + + def from[E](it: collection.IterableOnce[E]): Vector[E] = + it match { + case v: Vector[E] => v + case _ => + val knownSize = it.knownSize + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= WIDTH) { + val a1: Arr1 = it match { + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => + as.unsafeArray.asInstanceOf[Arr1] + case it: Iterable[E] => + val a1 = new Arr1(knownSize) + @annotation.unused val copied = it.copyToArray(a1.asInstanceOf[Array[Any]]) + //assert(copied == knownSize) + a1 + case _ => + val a1 = new Arr1(knownSize) + @annotation.unused val copied = it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) + //assert(copied == knownSize) + a1.asInstanceOf[Arr1] + } + new Vector1[E](a1) + } else { + (newBuilder ++= it).result() + } + } -/** Companion object to the Vector class - */ -object Vector extends IndexedSeqFactory[Vector] { - def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - private[immutable] val NIL = new Vector[Nothing](0, 0, 0) - override def empty[A]: Vector[A] = NIL - - // Constants governing concat strategy for performance - private final val Log2ConcatFaster = 5 - private final val TinyAppendFaster = 2 - private val emptyIterator: VectorIterator[Nothing] = new VectorIterator[Nothing](0, 0) + def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] + + /** Create a Vector with the same element at each index. + * + * Unlike `fill`, which takes a by-name argument for the value and can thereby + * compute different values for each index, this method guarantees that all + * elements are identical. This allows sparse allocation in O(log n) time and space. + */ + private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { + //TODO Make public; this method is private for now because it is not forward binary compatible + if(n <= 0) Vector0 + else { + val b = new VectorBuilder[A] + b.initSparse(n, elem) + b.result() + } + } + + private val defaultApplyPreferredMaxLength: Int = + // explicit StringOps to avoid initialization cycle with Predef (scala/bug#13009) + try new StringOps(System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", + "250")).toInt + catch { + case _: SecurityException => 250 + } + + private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) } -// in principle, most members should be private. however, access privileges must -// be carefully chosen to not prevent method inlining /** Vector is a general-purpose, immutable data structure. It provides random access and updates - * in effectively constant time, as well as very fast append and prepend. Because vectors strike - * a good balance between fast random selections and fast random functional updates, they are - * currently the default implementation of immutable indexed sequences. It is backed by a little - * endian bit-mapped vector trie with a branching factor of 32. Locality is very good, but not - * contiguous, which is good for very large sequences. - * - * $usesMutableState - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#vectors "Scala's Collection Library overview"]] - * section on `Vectors` for more information. - * - * @tparam A the element type - * - * @define Coll `Vector` - * @define coll vector - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `Vector[B]` because an implicit of type `CanBuildFrom[Vector, B, That]` - * is defined in object `Vector`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `Vector`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-1334388273712300479L) -final class Vector[+A] private[immutable] (private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int) -extends AbstractSeq[A] - with IndexedSeq[A] - with GenericTraversableTemplate[A, Vector] - with IndexedSeqLike[A, Vector[A]] - with VectorPointer[A @uncheckedVariance] - with Serializable - with CustomParallelizable[A, ParVector[A]] -{ self => + * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). + * Because vectors strike a good balance between fast random selections and fast random functional updates, + * they are currently the default implementation of immutable indexed sequences. + * + * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass + * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the + * top level). + * + * Tree balancing: + * - Only the first dimension of an array may have a size < WIDTH + * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up + * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 + * - `prefix1` and `suffix1` are never empty + * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches + * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty + * - All arrays are left-aligned and truncated + * + * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running + * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. + */ +sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, Vector, Vector[A]] + with StrictOptimizedSeqOps[A, Vector, Vector[A]] + with IterableFactoryDefaults[A, Vector] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Vector] = Vector + + override final def length: Int = + if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 + else prefix1.length + + override final def iterator: Iterator[A] = + if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator + else new NewVectorIterator(this, length, vectorSliceCount) + + override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { + var i = 0 + val len = prefix1.length + while (i != len) { + if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { + // each 1 bit indicates that index passes the filter. + // all indices < i are also assumed to pass the filter + var bitmap = 0 + var j = i + 1 + while (j < len) { + if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { + bitmap |= (1 << j) + } + j += 1 + } + val newLen = i + java.lang.Integer.bitCount(bitmap) + + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + var k = 0 + while(k < i) { + b.addOne(prefix1(k).asInstanceOf[A]) + k += 1 + } + k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + b.addOne(prefix1(k).asInstanceOf[A]) + i += 1 + } + k += 1 + } + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + return b.result() + } else { + if (newLen == 0) return Vector0 + val newData = new Array[AnyRef](newLen) + System.arraycopy(prefix1, 0, newData, 0, i) + var k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + newData(i) = prefix1(k) + i += 1 + } + k += 1 + } + return new Vector1[A](newData) + } + } + i += 1 + } + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + b.initFrom(prefix1) + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + b.result() + } else this + } + + // Dummy overrides to refine result types for binary compatibility: + override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) + override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) + override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): Vector[B] = { + val k = prefix.knownSize + if (k == 0) this + else if (k < 0) super.prependedAll(prefix) + else prependedAll0(prefix, k) + } + + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]): Vector[B] = { + val k = suffix.knownSize + if (k == 0) this + else if (k < 0) super.appendedAll(suffix) + else appendedAll0(suffix, k) + } + + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = prefix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { + var v: Vector[B] = this + val it = IndexedSeq.from(prefix).reverseIterator + while (it.hasNext) v = it.next() +: v + v + } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { + var v = prefix.asInstanceOf[Vector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + } else if (k < this.size - AlignToFaster) { + new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() + } else super.prependedAll(prefix) + } + + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = suffix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit) { + var v: Vector[B] = this + suffix match { + case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) + case _ => suffix.iterator.foreach(x => v = v.appended(x)) + } + v + } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { + var v = suffix.asInstanceOf[Vector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = v.prepended(ri.next()) + v + } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { + val v = suffix.asInstanceOf[Vector[B]] + new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() + } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() + } + + override def className = "Vector" - override def companion: GenericCompanion[Vector] = Vector + @inline override final def take(n: Int): Vector[A] = slice(0, n) + @inline override final def drop(n: Int): Vector[A] = slice(n, length) + @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) + @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) + override def tail: Vector[A] = slice(1, length) + override def init: Vector[A] = slice(0, length-1) - private[immutable] var dirty = false + /** Like slice but parameters must be 0 <= lo < hi < length */ + protected[this] def slice0(lo: Int, hi: Int): Vector[A] - def length = endIndex - startIndex + /** Number of slices */ + protected[immutable] def vectorSliceCount: Int + /** Slice at index */ + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] + /** Length of all slices up to and including index */ + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int - override def par = new ParVector(this) + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) override def toVector: Vector[A] = this - override def lengthCompare(len: Int): Int = length - len + override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength - private[collection] final def initIterator[B >: A](s: VectorIterator[B]) { - s.initFrom(this) - if (dirty) s.stabilize(focus) - if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) + case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) + case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) + case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) + } + s.asInstanceOf[S with EfficientSplit] } - override def iterator: VectorIterator[A] = { - if (length == 0) Vector.emptyIterator - else { - val s = new VectorIterator[A](startIndex, endIndex) - initIterator(s) - s + protected[this] def ioob(index: Int): IndexOutOfBoundsException = + CommonErrors.indexOutOfBounds(index = index, max = length - 1) + + override final def head: A = + if (prefix1.length == 0) throw new NoSuchElementException("empty.head") + else prefix1(0).asInstanceOf[A] + + override final def last: A = { + if(this.isInstanceOf[BigVector[_]]) { + val suffix = this.asInstanceOf[BigVector[_]].suffix1 + if(suffix.length == 0) throw new NoSuchElementException("empty.tail") + else suffix(suffix.length-1) + } else prefix1(prefix1.length-1) + }.asInstanceOf[A] + + override final def foreach[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 0 + while (i < c) { + foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) + i += 1 } } - override /*SeqLike*/ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() + // The following definitions are needed for binary compatibility with ParVector + private[collection] def startIndex: Int = 0 + private[collection] def endIndex: Int = length + private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = + s.it = iterator.asInstanceOf[NewVectorIterator[B]] +} + + +/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ +private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { + + override final def slice(from: Int, until: Int): Vector[A] = { + val lo = mmax(from, 0) + val hi = mmin(until, length) + if (hi <= lo) Vector0 + else if (hi - lo == length) this + else slice0(lo, hi) + } +} + + +/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ +private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { + + protected[immutable] final def foreachRest[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 1 + while(i < c) { + foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) + i += 1 + } + } +} + + +/** Empty vector */ +private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { + + def apply(index: Int): Nothing = throw ioob(index) + + override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) + + override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def map[B](f: Nothing => B): Vector[B] = this + + override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") + + override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") + + protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this + + protected[immutable] def vectorSliceCount: Int = 0 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 + + override def equals(o: Any): Boolean = { + if(this eq o.asInstanceOf[AnyRef]) true + else o match { + case that: Vector[_] => false + case o => super.equals(o) + } + } + + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(prefix) + + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(suffix) + + override protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") +} + +/** Flat ArraySeq-like structure */ +private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { + + @inline def apply(index: Int): A = { + if(index >= 0 && index < prefix1.length) + prefix1(index).asInstanceOf[A] + else throw ioob(index) } - // Ideally, clients will inline calls to map all the way down, including the iterator/builder methods. - // In principle, escape analysis could even remove the iterator/builder allocations and do it - // with local variables exclusively. But we're not quite there yet ... + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < prefix1.length) + new Vector1(copyUpdate(prefix1, index, elem)) + else throw ioob(index) + } - def apply(index: Int): A = { - val idx = checkRangeConvert(index) - getElem(idx, idx ^ focus) + override def appended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) + else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) } - private def checkRangeConvert(index: Int) = { - val idx = index + startIndex - if (index >= 0 && idx < endIndex) - idx - else - throw new IndexOutOfBoundsException(index.toString) + override def prepended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) + else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) } - // If we have a default builder, there are faster ways to perform some operations - @inline private[this] def isDefaultCBF[A, B, That](bf: CanBuildFrom[Vector[A], B, That]): Boolean = - (bf eq immutable.Vector.ReusableCBF) || - (bf eq immutable.IndexedSeq.ReusableCBF) || (bf eq collection.IndexedSeq.ReusableCBF) || - (bf eq immutable.Seq.ReusableCBF) || (bf eq collection.Seq.ReusableCBF) + override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = + new Vector1(copyOfRange(prefix1, lo, hi)) - // SeqLike api + override def tail: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyTail(prefix1)) - override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF[A, B, That](bf)) - updateAt(index, elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.updated(index, elem)(bf) + override def init: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyInit(prefix1)) - override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF[A, B, That](bf)) - appendFront(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.+:(elem)(bf) + protected[immutable] def vectorSliceCount: Int = 1 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length - override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (isDefaultCBF(bf)) - appendBack(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly - else super.:+(elem)(bf) + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case data1b => new Vector1(data1b) + } - override def take(n: Int): Vector[A] = { - if (n <= 0) - Vector.empty - else if (startIndex < endIndex - n) - dropBack0(startIndex + n) - else - this + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val data1b = append1IfSpace(prefix1, suffix) + if(data1b ne null) new Vector1(data1b) + else super.appendedAll0(suffix, k) } +} + - override def drop(n: Int): Vector[A] = { - if (n <= 0) - this - else if (startIndex < endIndex - n) - dropFront0(startIndex + n) - else - Vector.empty +/** 2-dimensional radix-balanced finger tree */ +private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val data2: Arr2, + _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + data2: Arr2 = data2, + suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector2(prefix1, len1, data2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1 + if(io >= 0) { + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) data2(i2)(i1) + else suffix1(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) } - override def takeRight(n: Int): Vector[A] = { - if (n <= 0) - Vector.empty - else if (endIndex - n > startIndex) - dropFront0(endIndex - n) - else - this + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1) { + val io = index - len1 + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) } - override def dropRight(n: Int): Vector[A] = { - if (n <= 0) - this - else if (endIndex - n > startIndex) - dropBack0(endIndex - n) - else - Vector.empty + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) } - override /*IterableLike*/ - def head: A = { - if (isEmpty) throw new UnsupportedOperationException("empty.head") - apply(0) + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) + else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) + else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) } - override /*TraversableLike*/ - def tail: Vector[A] = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - drop(1) + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, data2) + b.consider(1, suffix1) + b.result() } - override /*TraversableLike*/ - def last: A = { - if (isEmpty) throw new UnsupportedOperationException("empty.last") - apply(length - 1) + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 3 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => data2 + case 2 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => length0 - suffix1.length + case 2 => length0 } - override /*TraversableLike*/ - def init: Vector[A] = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - dropRight(1) + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) } +} - override /*IterableLike*/ - def slice(from: Int, until: Int): Vector[A] = - take(until).drop(from) - override /*IterableLike*/ - def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n)) +/** 3-dimensional radix-balanced finger tree */ +private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val data3: Arr3, + private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + data3: Arr3 = data3, + suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12 + if(io >= 0) { + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i3 < data3.length) data3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } - // concat (suboptimal but avoids worst performance gotchas) - override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = { - if (isDefaultCBF(bf)) { - // We are sure we will create a Vector, so let's do it efficiently - import Vector.{Log2ConcatFaster, TinyAppendFaster} - if (that.isEmpty) this.asInstanceOf[That] - else { - val again = if (!that.isTraversableAgain) that.toVector else that.seq - again.size match { - // Often it's better to append small numbers of elements (or prepend if RHS is a vector) - case n if n <= TinyAppendFaster || n < (this.size >>> Log2ConcatFaster) => - var v: Vector[B] = this - for (x <- again) v = v :+ x - v.asInstanceOf[That] - case n if this.size < (n >>> Log2ConcatFaster) && again.isInstanceOf[Vector[_]] => - var v = again.asInstanceOf[Vector[B]] - val ri = this.reverseIterator - while (ri.hasNext) v = ri.next +: v - v.asInstanceOf[That] - case _ => super.++(again) - } + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12) { + val io = index - len12 + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) + else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) + else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), + data3 = mapElems(3, data3, f), + suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, data3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 5 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => data3 + case 3 => suffix2 + case 4 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len12 + data3.length*WIDTH2 + case 3 => length0 - suffix1.length + case 4 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + length0 = length0 + diff, + ) } - else super.++(that.seq) + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) } +} - // semi-private api - private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = { - val idx = checkRangeConvert(index) - val s = new Vector[B](startIndex, endIndex, idx) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, idx, focus ^ idx) // if dirty commit changes; go to new pos and prepare for writing - s.display0(idx & 31) = elem.asInstanceOf[AnyRef] - s +/** 4-dimensional radix-balanced finger tree */ +private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val data4: Arr4, + private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + data4: Arr4 = data4, + suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len123 + if(io >= 0) { + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i4 < data4.length) data4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) } - private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { - gotoPosWritable1(oldIndex, newIndex, xor) - } else { - gotoPosWritable0(newIndex, xor) - dirty = true + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len123) { + val io = index - len123 + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) + else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) + else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) } - private def gotoFreshPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { - gotoFreshPosWritable1(oldIndex, newIndex, xor) - } else { - gotoFreshPosWritable0(oldIndex, newIndex, xor) - dirty = true + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), + data4 = mapElems(4, data4, f), + suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, data4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() } - private[immutable] def appendFront[B >: A](value: B): Vector[B] = { - if (endIndex != startIndex) { - val blockIndex = (startIndex - 1) & ~31 - val lo = (startIndex - 1) & 31 + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 7 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => data4 + case 4 => suffix3 + case 5 => suffix2 + case 6 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len123 + data4.length*WIDTH3 + case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 + case 5 => length0 - suffix1.length + case 6 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + length0 = length0 + diff, + ) + } - if (startIndex != blockIndex + 32) { - val s = new Vector(startIndex - 1, endIndex, blockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 5-dimensional radix-balanced finger tree */ +private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val data5: Arr5, + private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + data5: Arr5 = data5, + suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1234 + if(io >= 0) { + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1234) { + val io = index - len1234 + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) + else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) + else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), + data5 = mapElems(5, data5, f), + suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, data5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } - val freeSpace = (1 << (5 * depth)) - endIndex // free space at the right given the current tree-structure depth - val shift = freeSpace & ~((1 << (5 * (depth - 1))) - 1) // number of elements by which we'll shift right (only move at top level) - val shiftBlocks = freeSpace >>> (5 * (depth - 1)) // number of top-level blocks - - if (shift != 0) { - // case A: we can shift right on the top level - if (depth > 1) { - val newBlockIndex = blockIndex + shift - val newFocus = focus + shift - - val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing - s.display0(lo) = value.asInstanceOf[AnyRef] - s + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 9 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => data5 + case 5 => suffix4 + case 6 => suffix3 + case 7 => suffix2 + case 8 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len1234 + data5.length*WIDTH4 + case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 7 => length0 - suffix1.length + case 8 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 6-dimensional radix-balanced finger tree */ +private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, + private[immutable] val data6: Arr6, + private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + prefix5: Arr5 = prefix5, len12345: Int = len12345, + data6: Arr6 = data6, + suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12345 + if(io >= 0) { + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) + else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1234) { + val io = index - len1234 + prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12345) { + val io = index - len12345 + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) + else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1234) { + val io = index - len1234 + copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), + data6 = mapElems(6, data6, f), + suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, prefix5) + b.consider(6, data6) + b.consider(5, suffix5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 11 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => prefix5 + case 5 => data6 + case 6 => suffix5 + case 7 => suffix4 + case 8 => suffix3 + case 9 => suffix2 + case 10 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len12345 + case 5 => len12345 + data6.length*WIDTH5 + case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 9 => length0 - suffix1.length + case 10 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + len12345 = len12345 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** Helper class for vector slicing. It is initialized with the validated start and end index, + * then the vector slices are added in succession with `consider`. No matter what the dimension + * of the originating vector is or where the cut is performed, this always results in a + * structure with the highest-dimensional data in the middle and fingers of decreasing dimension + * at both ends, which can be turned into a new vector with very little rebalancing. + */ +private final class VectorSliceBuilder(lo: Int, hi: Int) { + //println(s"***** VectorSliceBuilder($lo, $hi)") + + private[this] val slices = new Array[Array[AnyRef]](11) + private[this] var len, pos, maxDim = 0 + + @inline private[this] def prefixIdx(n: Int) = n-1 + @inline private[this] def suffixIdx(n: Int) = 11-n + + def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** consider($n, /${a.length})") + val count = a.length * (1 << (BITS*(n-1))) + val lo0 = mmax(lo-pos, 0) + val hi0 = mmin(hi-pos, count) + if(hi0 > lo0) { + addSlice(n, a, lo0, hi0) + len += (hi0 - lo0) + } + pos += count + } + + private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { + //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") + if(n == 1) { + add(1, copyOrUse(a, lo, hi)) + } else { + val bitsN = BITS * (n-1) + val widthN = 1 << bitsN + val loN = lo >>> bitsN + val hiN = hi >>> bitsN + val loRest = lo & (widthN - 1) + val hiRest = hi & (widthN - 1) + //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") + if(loRest == 0) { + if(hiRest == 0) { + add(n, copyOrUse(a, loN, hiN)) + } else { + if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } else { + if(hiN == loN) { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) + } else { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) + if(hiRest == 0) { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) } else { - val newBlockIndex = blockIndex + 32 - val newFocus = focus - - val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(0, shiftBlocks) // shift right by n elements - s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing - s.display0(shift - 1) = value.asInstanceOf[AnyRef] - s + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) } - } else if (blockIndex < 0) { - // case B: we need to move the whole structure - val move = (1 << (5 * (depth + 1))) - (1 << (5 * depth)) - val newBlockIndex = blockIndex + move - val newFocus = focus + move - - val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val newBlockIndex = blockIndex - val newFocus = focus - - val s = new Vector(startIndex - 1, endIndex, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s } } - } else { - // empty vector, just insert single element at the back - val elems = new Array[AnyRef](32) - elems(31) = value.asInstanceOf[AnyRef] - val s = new Vector(31, 32, 0) - s.depth = 1 - s.display0 = elems - s } } - private[immutable] def appendBack[B >: A](value: B): Vector[B] = { - if (endIndex != startIndex) { - val blockIndex = endIndex & ~31 - val lo = endIndex & 31 + private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** add($n, /${a.length})") + val idx = + if(n <= maxDim) suffixIdx(n) + else { maxDim = n; prefixIdx(n) } + slices(idx) = a.asInstanceOf[Array[AnyRef]] + } - if (endIndex != blockIndex) { - val s = new Vector(startIndex, endIndex + 1, blockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val shift = startIndex & ~((1 << (5 * (depth - 1))) - 1) - val shiftBlocks = startIndex >>> (5 * (depth - 1)) - - if (shift != 0) { - if (depth > 1) { - val newBlockIndex = blockIndex - shift - val newFocus = focus - shift - - val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s - } else { - val newBlockIndex = blockIndex - 32 - val newFocus = focus - - val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements - s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(32 - shift) = value.asInstanceOf[AnyRef] - s + def result[A](): Vector[A] = { + //println(s"***** result: $len, $maxDim") + if(len <= 32) { + if(len == 0) Vector0 + else { + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") + val a: Arr1 = + if(prefix1 ne null) { + if(suffix1 ne null) concatArrays(prefix1, suffix1) + else prefix1 + } else if(suffix1 ne null) suffix1 + else { + val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] + if(prefix2 ne null) prefix2(0) + else { + val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] + suffix2(0) + } } + new Vector1(a) + } + } else { + balancePrefix(1) + balanceSuffix(1) + var resultDim = maxDim + if(resultDim < 6) { + val pre = slices(prefixIdx(maxDim)) + val suf = slices(suffixIdx(maxDim)) + if((pre ne null) && (suf ne null)) { + // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, + // otherwise increase the dimension + if(pre.length + suf.length <= WIDTH-2) { + slices(prefixIdx(maxDim)) = concatArrays(pre, suf) + slices(suffixIdx(maxDim)) = null + } else resultDim += 1 } else { - val newBlockIndex = blockIndex - val newFocus = focus - - val s = new Vector(startIndex, endIndex + 1, newBlockIndex) - s.initFrom(this) - s.dirty = dirty - s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) - s.display0(lo) = value.asInstanceOf[AnyRef] - s + // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we + // only allow WIDTH-2 for the main data, so increase the dimension in this case + val one = if(pre ne null) pre else suf + if(one.length > WIDTH-2) resultDim += 1 } } - } else { - val elems = new Array[AnyRef](32) - elems(0) = value.asInstanceOf[AnyRef] - val s = new Vector(0, 1, 0) - s.depth = 1 - s.display0 = elems - s + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + val len1 = prefix1.length + val res = (resultDim: @switch) match { + case 2 => + val data2 = dataOr(2, empty2) + new Vector2[A](prefix1, len1, data2, suffix1, len) + case 3 => + val prefix2 = prefixOr(2, empty2) + val data3 = dataOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) + case 4 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val data4 = dataOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) + case 5 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val data5 = dataOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) + case 6 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val prefix5 = prefixOr(5, empty5) + val data6 = dataOr(6, empty6) + val suffix5 = suffixOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + val len12345 = len1234 + (prefix5.length * WIDTH4) + new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) + } + res } } + @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] else a + } - // low-level implementation (needs cleanup, maybe move to util class) + @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } - private def shiftTopLevel(oldLeft: Int, newLeft: Int) = (depth - 1) match { - case 0 => display0 = copyRange(display0, oldLeft, newLeft) - case 1 => display1 = copyRange(display1, oldLeft, newLeft) - case 2 => display2 = copyRange(display2, oldLeft, newLeft) - case 3 => display3 = copyRange(display3, oldLeft, newLeft) - case 4 => display4 = copyRange(display4, oldLeft, newLeft) - case 5 => display5 = copyRange(display5, oldLeft, newLeft) + @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] + else { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } } - private def zeroLeft(array: Array[AnyRef], index: Int): Unit = { - var i = 0 - while (i < index) { - array(i) = null - i += 1 + /** Ensure prefix is not empty */ + private[this] def balancePrefix(n: Int): Unit = { + if(slices(prefixIdx(n)) eq null) { + if(n == maxDim) { + slices(prefixIdx(n)) = slices(suffixIdx(n)) + slices(suffixIdx(n)) = null + } else { + balancePrefix(n+1) + val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(preN1 ne null) + slices(prefixIdx(n)) = preN1(0) + if(preN1.length == 1) { + slices(prefixIdx(n+1)) = null + if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n + } else { + slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] + } + } } } - private def zeroRight(array: Array[AnyRef], index: Int): Unit = { - var i = index - while (i < array.length) { - array(i) = null - i += 1 + /** Ensure suffix is not empty */ + private[this] def balanceSuffix(n: Int): Unit = { + if(slices(suffixIdx(n)) eq null) { + if(n == maxDim) { + slices(suffixIdx(n)) = slices(prefixIdx(n)) + slices(prefixIdx(n)) = null + } else { + balanceSuffix(n+1) + val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") + slices(suffixIdx(n)) = sufN1(sufN1.length-1) + if(sufN1.length == 1) { + slices(suffixIdx(n+1)) = null + if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n + } else { + slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] + } + } } } - private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = { - val copy = new Array[AnyRef](array.length) - java.lang.System.arraycopy(array, 0, copy, 0, right) - copy + override def toString: String = + s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" + + private[immutable] def getSlices: Array[Array[AnyRef]] = slices +} + + +final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { + + private[this] var a6: Arr6 = _ + private[this] var a5: Arr5 = _ + private[this] var a4: Arr4 = _ + private[this] var a3: Arr3 = _ + private[this] var a2: Arr2 = _ + private[this] var a1: Arr1 = new Arr1(WIDTH) + private[this] var len1, lenRest, offset = 0 + private[this] var prefixIsRightAligned = false + private[this] var depth = 1 + + @inline private[this] final def setLen(i: Int): Unit = { + len1 = i & MASK + lenRest = i - len1 } - private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = { - val copy = new Array[AnyRef](array.length) - java.lang.System.arraycopy(array, left, copy, left, copy.length - left) - copy + + override def knownSize: Int = len1 + lenRest - offset + + @inline def size: Int = knownSize + @inline def isEmpty: Boolean = knownSize == 0 + @inline def nonEmpty: Boolean = knownSize != 0 + + def clear(): Unit = { + a6 = null + a5 = null + a4 = null + a3 = null + a2 = null + a1 = new Arr1(WIDTH) + len1 = 0 + lenRest = 0 + offset = 0 + prefixIsRightAligned = false + depth = 1 + } + + private[immutable] def initSparse(size: Int, elem: A): Unit = { + setLen(size) + Arrays.fill(a1, elem) + if(size > WIDTH) { + a2 = new Array(WIDTH) + Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) + if(size > WIDTH2) { + a3 = new Array(WIDTH) + Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) + if(size > WIDTH3) { + a4 = new Array(WIDTH) + Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) + if(size > WIDTH4) { + a5 = new Array(WIDTH) + Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) + if(size > WIDTH5) { + a6 = new Array(LASTWIDTH) + Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) + depth = 6 + } else depth = 5 + } else depth = 4 + } else depth = 3 + } else depth = 2 + } else depth = 1 + } + + private[immutable] def initFrom(prefix1: Arr1): Unit = { + depth = 1 + setLen(prefix1.length) + a1 = copyOrUse(prefix1, 0, WIDTH) + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } } - private def preClean(depth: Int) = { - this.depth = depth - (depth - 1) match { + private[immutable] def initFrom(v: Vector[_]): this.type = { + (v.vectorSliceCount: @switch) match { case 0 => - display1 = null - display2 = null - display3 = null - display4 = null - display5 = null case 1 => - display2 = null - display3 = null - display4 = null - display5 = null + val v1 = v.asInstanceOf[Vector1[_]] + depth = 1 + setLen(v1.prefix1.length) + a1 = copyOrUse(v1.prefix1, 0, WIDTH) + case 3 => + val v2 = v.asInstanceOf[Vector2[_]] + val d2 = v2.data2 + a1 = copyOrUse(v2.suffix1, 0, WIDTH) + depth = 2 + offset = WIDTH - v2.len1 + setLen(v2.length0 + offset) + a2 = new Arr2(WIDTH) + a2(0) = v2.prefix1 + System.arraycopy(d2, 0, a2, 1, d2.length) + a2(d2.length+1) = a1 + case 5 => + val v3 = v.asInstanceOf[Vector3[_]] + val d3 = v3.data3 + val s2 = v3.suffix2 + a1 = copyOrUse(v3.suffix1, 0, WIDTH) + depth = 3 + offset = WIDTH2 - v3.len12 + setLen(v3.length0 + offset) + a3 = new Arr3(WIDTH) + a3(0) = copyPrepend(v3.prefix1, v3.prefix2) + System.arraycopy(d3, 0, a3, 1, d3.length) + a2 = copyOf(s2, WIDTH) + a3(d3.length+1) = a2 + a2(s2.length) = a1 + case 7 => + val v4 = v.asInstanceOf[Vector4[_]] + val d4 = v4.data4 + val s3 = v4.suffix3 + val s2 = v4.suffix2 + a1 = copyOrUse(v4.suffix1, 0, WIDTH) + depth = 4 + offset = WIDTH3 - v4.len123 + setLen(v4.length0 + offset) + a4 = new Arr4(WIDTH) + a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) + System.arraycopy(d4, 0, a4, 1, d4.length) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a4(d4.length+1) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 9 => + val v5 = v.asInstanceOf[Vector5[_]] + val d5 = v5.data5 + val s4 = v5.suffix4 + val s3 = v5.suffix3 + val s2 = v5.suffix2 + a1 = copyOrUse(v5.suffix1, 0, WIDTH) + depth = 5 + offset = WIDTH4 - v5.len1234 + setLen(v5.length0 + offset) + a5 = new Arr5(WIDTH) + a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) + System.arraycopy(d5, 0, a5, 1, d5.length) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a5(d5.length+1) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 11 => + val v6 = v.asInstanceOf[Vector6[_]] + val d6 = v6.data6 + val s5 = v6.suffix5 + val s4 = v6.suffix4 + val s3 = v6.suffix3 + val s2 = v6.suffix2 + a1 = copyOrUse(v6.suffix1, 0, WIDTH) + depth = 6 + offset = WIDTH5 - v6.len12345 + setLen(v6.length0 + offset) + a6 = new Arr6(LASTWIDTH) + a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) + System.arraycopy(d6, 0, a6, 1, d6.length) + a5 = copyOf(s5, WIDTH) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a6(d6.length+1) = a5 + a5(s5.length) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + } + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + this + } + + //TODO Make public; this method is only private for binary compatibility + private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { + if (len1 != 0 || lenRest != 0) + throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") + val (prefixLength, maxPrefixLength) = bigVector match { + case Vector0 => (0, 1) + case v1: Vector1[_] => (0, 1) + case v2: Vector2[_] => (v2.len1, WIDTH) + case v3: Vector3[_] => (v3.len12, WIDTH2) + case v4: Vector4[_] => (v4.len123, WIDTH3) + case v5: Vector5[_] => (v5.len1234, WIDTH4) + case v6: Vector6[_] => (v6.len12345, WIDTH5) + } + if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector + val overallPrefixLength = (before + prefixLength) % maxPrefixLength + offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength + // pretend there are already `offset` elements added + advanceN(offset & ~MASK) + len1 = offset & MASK + prefixIsRightAligned = true + this + } + + /** + * Removes `offset` leading `null`s in the prefix. + * This is needed after calling `alignTo` and subsequent additions, + * directly before the result is used for creating a new Vector. + * Note that the outermost array keeps its length to keep the + * Builder re-usable. + * + * example: + * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) + * becomes + * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) + */ + private[this] def leftAlignPrefix(): Unit = { + @inline def shrinkOffsetIfToLarge(width: Int): Unit = { + val newOffset = offset % width + lenRest -= offset - newOffset + offset = newOffset + } + var a: Array[AnyRef] = null // the array we modify + var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + if (depth >= 6) { + a = a6.asInstanceOf[Array[AnyRef]] + val i = offset >>> BITS5 + if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) + shrinkOffsetIfToLarge(WIDTH5) + if ((lenRest >>> BITS5) == 0) depth = 5 + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 5) { + if (a == null) a = a5.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS4) & MASK + if (depth == 5) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a5 = a.asInstanceOf[Arr5] + shrinkOffsetIfToLarge(WIDTH4) + if ((lenRest >>> BITS4) == 0) depth = 4 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 4) { + if (a == null) a = a4.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS3) & MASK + if (depth == 4) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a4 = a.asInstanceOf[Arr4] + shrinkOffsetIfToLarge(WIDTH3) + if ((lenRest >>> BITS3) == 0) depth = 3 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 3) { + if (a == null) a = a3.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS2) & MASK + if (depth == 3) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a3 = a.asInstanceOf[Arr3] + shrinkOffsetIfToLarge(WIDTH2) + if ((lenRest >>> BITS2) == 0) depth = 2 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 2) { + if (a == null) a = a2.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS) & MASK + if (depth == 2) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a2 = a.asInstanceOf[Arr2] + shrinkOffsetIfToLarge(WIDTH) + if ((lenRest >>> BITS) == 0) depth = 1 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 1) { + if (a == null) a = a1.asInstanceOf[Array[AnyRef]] + val i = offset & MASK + if (depth == 1) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a1 = a.asInstanceOf[Arr1] + len1 -= offset + offset = 0 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + } + prefixIsRightAligned = false + } + + def addOne(elem: A): this.type = { + if(len1 == WIDTH) advance() + a1(len1) = elem.asInstanceOf[AnyRef] + len1 += 1 + this + } + + private[this] def addArr1(data: Arr1): Unit = { + val dl = data.length + if(dl > 0) { + if(len1 == WIDTH) advance() + val copy1 = mmin(WIDTH-len1, dl) + val copy2 = dl - copy1 + System.arraycopy(data, 0, a1, len1, copy1) + len1 += copy1 + if(copy2 > 0) { + advance() + System.arraycopy(data, copy1, a1, 0, copy2) + len1 += copy2 + } + } + } + + private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { +// assert(dim >= 2) +// assert(lenRest % WIDTH == 0) +// assert(len1 == 0 || len1 == WIDTH) + if (slice.isEmpty) return + if (len1 == WIDTH) advance() + val sl = slice.length + (dim: @switch) match { case 2 => - display3 = null - display4 = null - display5 = null + // lenRest is always a multiple of WIDTH + val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS) & MASK + System.arraycopy(slice, 0, a2, destPos, copy1) + advanceN(WIDTH * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a2, 0, copy2) + advanceN(WIDTH * copy2) + } case 3 => - display4 = null - display5 = null + if (lenRest % WIDTH2 != 0) { + // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) + return + } + val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS2) & MASK + System.arraycopy(slice, 0, a3, destPos, copy1) + advanceN(WIDTH2 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a3, 0, copy2) + advanceN(WIDTH2 * copy2) + } case 4 => - display5 = null + if (lenRest % WIDTH3 != 0) { + // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) + return + } + val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS3) & MASK + System.arraycopy(slice, 0, a4, destPos, copy1) + advanceN(WIDTH3 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a4, 0, copy2) + advanceN(WIDTH3 * copy2) + } case 5 => + if (lenRest % WIDTH4 != 0) { + // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) + return + } + val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS4) & MASK + System.arraycopy(slice, 0, a5, destPos, copy1) + advanceN(WIDTH4 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a5, 0, copy2) + advanceN(WIDTH4 * copy2) + } + case 6 => // note width is now LASTWIDTH + if (lenRest % WIDTH5 != 0) { + // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) + return + } + val copy1 = sl + // there is no copy2 because there can't be another a6 to copy to + val destPos = lenRest >>> BITS5 + if (destPos + copy1 > LASTWIDTH) + throw new IllegalArgumentException("exceeding 2^31 elements") + System.arraycopy(slice, 0, a6, destPos, copy1) + advanceN(WIDTH5 * copy1) } } - // requires structure is at index cutIndex and writable at level 0 - private def cleanLeftEdge(cutIndex: Int) = { - if (cutIndex < (1 << 5)) { - zeroLeft(display0, cutIndex) - } else if (cutIndex < (1 << 10)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, cutIndex >>> 5) - } else if (cutIndex < (1 << 15)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, cutIndex >>> 10) - } else if (cutIndex < (1 << 20)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, cutIndex >>> 15) - } else if (cutIndex < (1 << 25)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, (cutIndex >>> 15) & 31) - display4 = copyRight(display4, cutIndex >>> 20) - } else if (cutIndex < (1 << 30)) { - zeroLeft(display0, cutIndex & 31) - display1 = copyRight(display1, (cutIndex >>> 5) & 31) - display2 = copyRight(display2, (cutIndex >>> 10) & 31) - display3 = copyRight(display3, (cutIndex >>> 15) & 31) - display4 = copyRight(display4, (cutIndex >>> 20) & 31) - display5 = copyRight(display5, cutIndex >>> 25) - } else { - throw new IllegalArgumentException() + private[this] def addVector(xs: Vector[A]): this.type = { + val sliceCount = xs.vectorSliceCount + var sliceIdx = 0 + while(sliceIdx < sliceCount) { + val slice = xs.vectorSlice(sliceIdx) + vectorSliceDim(sliceCount, sliceIdx) match { + case 1 => addArr1(slice.asInstanceOf[Arr1]) + case n if len1 == WIDTH || len1 == 0 => + addArrN(slice.asInstanceOf[Array[AnyRef]], n) + case n => foreachRec(n-2, slice, addArr1) + } + sliceIdx += 1 + } + this + } + + override def addAll(xs: IterableOnce[A]): this.type = xs match { + case v: Vector[_] => + if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) + else addVector(v.asInstanceOf[Vector[A]]) + case _ => + super.addAll(xs) + } + + private[this] def advance(): Unit = { + val idx = lenRest + WIDTH + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advanceN(n: Int): Unit = if (n > 0) { + // assert(n % 32 == 0) + val idx = lenRest + n + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advance1(idx: Int, xor: Int): Unit = { + if (xor <= 0) { // level = 6 or something very unexpected happened + throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") + } else if (xor < WIDTH2) { // level = 1 + if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } + a1 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + } else if (xor < WIDTH3) { // level = 2 + if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + } else if (xor < WIDTH4) { // level = 3 + if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + } else if (xor < WIDTH5) { // level = 4 + if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + } else { // level = 5 + if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a5 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + a6(idx >>> BITS5) = a5 } } - // requires structure is writable and at index cutIndex - private def cleanRightEdge(cutIndex: Int) = { - // we're actually sitting one block left if cutIndex lies on a block boundary - // this means that we'll end up erasing the whole block!! - - if (cutIndex <= (1 << 5)) { - zeroRight(display0, cutIndex) - } else if (cutIndex <= (1 << 10)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, cutIndex >>> 5) - } else if (cutIndex <= (1 << 15)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, cutIndex >>> 10) - } else if (cutIndex <= (1 << 20)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, cutIndex >>> 15) - } else if (cutIndex <= (1 << 25)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) - display4 = copyLeft(display4, cutIndex >>> 20) - } else if (cutIndex <= (1 << 30)) { - zeroRight(display0, ((cutIndex - 1) & 31) + 1) - display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) - display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) - display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) - display4 = copyLeft(display4, (((cutIndex - 1) >>> 20) & 31) + 1) - display5 = copyLeft(display5, cutIndex >>> 25) + def result(): Vector[A] = { + if (prefixIsRightAligned) leftAlignPrefix() + val len = len1 + lenRest + val realLen = len - offset + if(realLen == 0) Vector.empty + else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") + else if(len <= WIDTH) { + new Vector1(copyIfDifferentSize(a1, realLen)) + } else if(len <= WIDTH2) { + val i1 = (len-1) & MASK + val i2 = (len-1) >>> BITS + val data = copyOfRange(a2, 1, i2) + val prefix1 = a2(0) + val suffix1 = copyIfDifferentSize(a2(i2), i1+1) + new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) + } else if(len <= WIDTH3) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) + val data = copyOfRange(a3, 1, i3) + val prefix2 = copyTail(a3(0)) + val prefix1 = a3(0)(0) + val suffix2 = copyOf(a3(i3), i2) + val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) + } else if(len <= WIDTH4) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) + val data = copyOfRange(a4, 1, i4) + val prefix3 = copyTail(a4(0)) + val prefix2 = copyTail(a4(0)(0)) + val prefix1 = a4(0)(0)(0) + val suffix3 = copyOf(a4(i4), i3) + val suffix2 = copyOf(a4(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) + } else if(len <= WIDTH5) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) + val data = copyOfRange(a5, 1, i5) + val prefix4 = copyTail(a5(0)) + val prefix3 = copyTail(a5(0)(0)) + val prefix2 = copyTail(a5(0)(0)(0)) + val prefix1 = a5(0)(0)(0)(0) + val suffix4 = copyOf(a5(i5), i4) + val suffix3 = copyOf(a5(i5)(i4), i3) + val suffix2 = copyOf(a5(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) } else { - throw new IllegalArgumentException() + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) & MASK + val i6 = ((len-1) >>> BITS5) + val data = copyOfRange(a6, 1, i6) + val prefix5 = copyTail(a6(0)) + val prefix4 = copyTail(a6(0)(0)) + val prefix3 = copyTail(a6(0)(0)(0)) + val prefix2 = copyTail(a6(0)(0)(0)(0)) + val prefix1 = a6(0)(0)(0)(0)(0) + val suffix5 = copyOf(a6(i6), i5) + val suffix4 = copyOf(a6(i6)(i5), i4) + val suffix3 = copyOf(a6(i6)(i5)(i4), i3) + val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + val len12345 = len1234 + prefix5.length*WIDTH4 + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) } } - private def requiredDepth(xor: Int) = { - if (xor < (1 << 5)) 1 - else if (xor < (1 << 10)) 2 - else if (xor < (1 << 15)) 3 - else if (xor < (1 << 20)) 4 - else if (xor < (1 << 25)) 5 - else if (xor < (1 << 30)) 6 - else throw new IllegalArgumentException() + override def toString: String = + s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" + + private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( + a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], + a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] + ).asInstanceOf[Array[Array[_]]] +} + + +/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ +private[immutable] object VectorInline { + // compile-time numeric constants + final val BITS = 5 + final val WIDTH = 1 << BITS + final val MASK = WIDTH - 1 + final val BITS2 = BITS * 2 + final val WIDTH2 = 1 << BITS2 + final val BITS3 = BITS * 3 + final val WIDTH3 = 1 << BITS3 + final val BITS4 = BITS * 4 + final val WIDTH4 = 1 << BITS4 + final val BITS5 = BITS * 5 + final val WIDTH5 = 1 << BITS5 + final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: + final val Log2ConcatFaster = 5 + final val AlignToFaster = 64 + + type Arr1 = Array[AnyRef] + type Arr2 = Array[Array[AnyRef]] + type Arr3 = Array[Array[Array[AnyRef]]] + type Arr4 = Array[Array[Array[Array[AnyRef]]]] + type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] + type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] + + /** Dimension of the slice at index */ + @inline def vectorSliceDim(count: Int, idx: Int): Int = { + val c = count/2 + c+1-abs(idx-c) + } + + @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = + if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) + + @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) + + @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) + + @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = + if(a.length == len) a else copyOf[T](a, len) + + @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } + @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } + @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } + @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } + @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } + + @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { + val a1c = a1.clone() + a1c(idx1) = elem.asInstanceOf[AnyRef] + a1c } - private def dropFront0(cutIndex: Int): Vector[A] = { - val blockIndex = cutIndex & ~31 - val xor = cutIndex ^ (endIndex - 1) - val d = requiredDepth(xor) - val shift = cutIndex & ~((1 << (5 * d)) - 1) + @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { + val a2c = a2.clone() + a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) + a2c + } - // need to init with full display iff going to cutIndex requires swapping block at level >= d + @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { + val a3c = a3.clone() + a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) + a3c + } - val s = new Vector(cutIndex - shift, endIndex - shift, blockIndex - shift) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.preClean(d) - s.cleanLeftEdge(cutIndex - shift) - s + @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { + val a4c = a4.clone() + a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) + a4c } - private def dropBack0(cutIndex: Int): Vector[A] = { - val blockIndex = (cutIndex - 1) & ~31 - val xor = startIndex ^ (cutIndex - 1) - val d = requiredDepth(xor) - val shift = startIndex & ~((1 << (5 * d)) - 1) + @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { + val a5c = a5.clone() + a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) + a5c + } - val s = new Vector(startIndex - shift, cutIndex - shift, blockIndex - shift) - s.initFrom(this) - s.dirty = dirty - s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) - s.preClean(d) - s.cleanRightEdge(cutIndex - shift) - s + @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { + val a6c = a6.clone() + a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) + a6c + } + + @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { + val dest = copyOf[T](a, a.length+b.length) + System.arraycopy(b, 0, dest, a.length, b.length) + dest } } -class VectorIterator[+A](_startIndex: Int, endIndex: Int) -extends AbstractIterator[A] - with Iterator[A] - with VectorPointer[A @uncheckedVariance] { - private var blockIndex: Int = _startIndex & ~31 - private var lo: Int = _startIndex & 31 +/** Helper methods and constants for Vector. */ +private object VectorStatics { - private var endLo = math.min(endIndex - blockIndex, 32) + final def copyAppend1(a: Arr1, elem: Any): Arr1 = { + val alen = a.length + val ac = new Arr1(alen+1) + System.arraycopy(a, 0, ac, 0, alen) + ac(alen) = elem.asInstanceOf[AnyRef] + ac + } - def hasNext = _hasNext + final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { + val ac = copyOf(a, a.length+1) + ac(ac.length-1) = elem + ac + } - private var _hasNext = blockIndex + lo < endIndex + final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { + val ac = new Arr1(a.length+1) + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem.asInstanceOf[AnyRef] + ac + } - def next(): A = { - if (!_hasNext) throw new NoSuchElementException("reached iterator end") + final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem + ac + } + + final val empty1: Arr1 = new Array(0) + final val empty2: Arr2 = new Array(0) + final val empty3: Arr3 = new Array(0) + final val empty4: Arr4 = new Array(0) + final val empty5: Arr5 = new Array(0) + final val empty6: Arr6 = new Array(0) + + final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { + var i = 0 + val len = a.length + if(level == 0) { + while(i < len) { + f(a(i).asInstanceOf[A]) + i += 1 + } + } else { + val l = level-1 + while(i < len) { + foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + } + } - val res = display0(lo).asInstanceOf[A] - lo += 1 + final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { + var i = 0 + while(i < a.length) { + val v1 = a(i).asInstanceOf[AnyRef] + val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] + if(v1 ne v2) + return mapElems1Rest(a, f, i, v2) + i += 1 + } + a + } - if (lo == endLo) { - if (blockIndex + lo < endIndex) { - val newBlockIndex = blockIndex + 32 - gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex) + final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { + val ac = new Arr1(a.length) + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] + i += 1 + } + ac + } - blockIndex = newBlockIndex - endLo = math.min(endIndex - blockIndex, 32) - lo = 0 - } else { - _hasNext = false + final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { + if(n == 1) + mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] + else { + var i = 0 + while(i < a.length) { + val v1 = a(i) + val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) + if(v1 ne v2) + return mapElemsRest(n, a, f, i, v2) + i += 1 } + a } + } - res + final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + ac.asInstanceOf[Array[T]] } - private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0 + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) + case s => + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + @annotation.unused val copied = it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + //assert(copied == s) + prefix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-prefix1.length) { + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + @annotation.unused val copied = it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + //assert(copied == s) + prefix1b + } else null + } - /** Creates a new vector which consists of elements remaining in this iterator. - * Such a vector can then be split into several vectors using methods like `take` and `drop`. - */ - private[collection] def remainingVector: Vector[A] = { - val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo) - v.initFrom(this) - v + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) + case s => + val suffix1b = copyOf(suffix1, suffix1.length + s) + @annotation.unused val copied = it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + //assert(copied == s) + suffix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-suffix1.length) { + val suffix1b = copyOf(suffix1, suffix1.length + s) + @annotation.unused val copied = it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + //assert(copied == s) + suffix1b + } else null } } -/** A class to build instances of `Vector`. This builder is reusable. */ -final class VectorBuilder[A]() extends ReusableBuilder[A, Vector[A]] with VectorPointer[A @uncheckedVariance] { - // possible alternative: start with display0 = null, blockIndex = -32, lo = 32 - // to avoid allocating initial array if the result will be empty anyways +private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends AbstractIterator[A] with java.lang.Cloneable { - display0 = new Array[AnyRef](32) - depth = 1 + private[this] var a1: Arr1 = v.prefix1 + private[this] var a2: Arr2 = _ + private[this] var a3: Arr3 = _ + private[this] var a4: Arr4 = _ + private[this] var a5: Arr5 = _ + private[this] var a6: Arr6 = _ + private[this] var a1len = a1.length + private[this] var i1 = 0 // current index in a1 + private[this] var oldPos = 0 + private[this] var len1 = totalLength // remaining length relative to a1 - private var blockIndex = 0 - private var lo = 0 + private[this] var sliceIdx = 0 + private[this] var sliceDim = 1 + private[this] var sliceStart = 0 // absolute position + private[this] var sliceEnd = a1len // absolute position - def +=(elem: A): this.type = { - if (lo >= display0.length) { - val newBlockIndex = blockIndex + 32 - gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex) - blockIndex = newBlockIndex - lo = 0 - } - display0(lo) = elem.asInstanceOf[AnyRef] - lo += 1 - this - } + //override def toString: String = + // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" - override def ++=(xs: TraversableOnce[A]): this.type = super.++=(xs) + @inline override def knownSize = len1 - i1 - def result: Vector[A] = { - val size = blockIndex + lo - if (size == 0) - return Vector.empty - val s = new Vector[A](0, size, 0) // should focus front or back? - s.initFrom(this) - if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size! - s + @inline def hasNext: Boolean = len1 > i1 + + def next(): A = { + if(i1 == a1len) advance() + val r = a1(i1) + i1 += 1 + r.asInstanceOf[A] } - def clear(): Unit = { - display5 = null - display4 = null - display3 = null - display2 = null - display1 = null - display0 = new Array[AnyRef](32) - depth = 1 - blockIndex = 0 - lo = 0 + private[this] def advanceSlice(): Unit = { + if(!hasNext) Iterator.empty.next() + sliceIdx += 1 + var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) + while(slice.length == 0) { + sliceIdx += 1 + slice = v.vectorSlice(sliceIdx) + } + sliceStart = sliceEnd + sliceDim = vectorSliceDim(sliceCount, sliceIdx) + (sliceDim: @switch) match { + case 1 => a1 = slice.asInstanceOf[Arr1] + case 2 => a2 = slice.asInstanceOf[Arr2] + case 3 => a3 = slice.asInstanceOf[Arr3] + case 4 => a4 = slice.asInstanceOf[Arr4] + case 5 => a5 = slice.asInstanceOf[Arr5] + case 6 => a6 = slice.asInstanceOf[Arr6] + } + sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) + if(sliceEnd > totalLength) sliceEnd = totalLength + if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 } -} -private[immutable] trait VectorPointer[T] { - private[immutable] var depth: Int = _ - private[immutable] var display0: Array[AnyRef] = _ - private[immutable] var display1: Array[AnyRef] = _ - private[immutable] var display2: Array[AnyRef] = _ - private[immutable] var display3: Array[AnyRef] = _ - private[immutable] var display4: Array[AnyRef] = _ - private[immutable] var display5: Array[AnyRef] = _ - - // used - private[immutable] final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth) - - private[immutable] final def initFrom[U](that: VectorPointer[U], depth: Int) = { - this.depth = depth - (depth - 1) match { - case -1 => - case 0 => - display0 = that.display0 - case 1 => - display1 = that.display1 - display0 = that.display0 - case 2 => - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 3 => - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 4 => - display4 = that.display4 - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - case 5 => - display5 = that.display5 - display4 = that.display4 - display3 = that.display3 - display2 = that.display2 - display1 = that.display1 - display0 = that.display0 - } + private[this] def advance(): Unit = { + val pos = i1-len1+totalLength + if(pos == sliceEnd) advanceSlice() + if(sliceDim > 1) { + val io = pos - sliceStart + val xor = oldPos ^ io + advanceA(io, xor) + oldPos = io } + len1 -= i1 + a1len = mmin(a1.length, len1) + i1 = 0 + } - // requires structure is at pos oldIndex = xor ^ index - private[immutable] final def getElem(index: Int, xor: Int): T = { - if (xor < (1 << 5)) { // level = 0 - (display0 - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 10)) { // level = 1 - (display1 - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 15)) { // level = 2 - (display2 - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 20)) { // level = 3 - (display3 - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 25)) { // level = 4 - (display4 - ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else if (xor < (1 << 30)) { // level = 5 - (display5 - ((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - (index & 31).asInstanceOf[T]) - } else { // level = 6 - throw new IllegalArgumentException() - } + private[this] def advanceA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2(0) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3(0) + a1 = a2(0) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } else { + a5 = a6(io >>> BITS5) + a4 = a5(0) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) } + } - // go to specific position - // requires structure is at pos oldIndex = xor ^ index, - // ensures structure is at pos index - private[immutable] final def gotoPos(index: Int, xor: Int): Unit = { - if (xor < (1 << 5)) { // level = 0 - // we're already at the block start pos - } else if (xor < (1 << 10)) { // level = 1 - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 15)) { // level = 2 - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 20)) { // level = 3 - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 25)) { // level = 4 - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 30)) { // level = 5 - display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else { // level = 6 - throw new IllegalArgumentException() - } + private[this] def setA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else { + a5 = a6(io >>> BITS5) + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) } + } - // USED BY ITERATOR - - // xor: oldIndex ^ index - private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 10)) { // level = 1 - display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 15)) { // level = 2 - display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 20)) { // level = 3 - display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 25)) { // level = 4 - display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]] - display2 = display3(0).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else if (xor < (1 << 30)) { // level = 5 - display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]] - display3 = display4(0).asInstanceOf[Array[AnyRef]] - display2 = display3(0).asInstanceOf[Array[AnyRef]] - display1 = display2(0).asInstanceOf[Array[AnyRef]] - display0 = display1(0).asInstanceOf[Array[AnyRef]] - } else { // level = 6 - throw new IllegalArgumentException() + override def drop(n: Int): Iterator[A] = { + if(n > 0) { + val oldpos = i1-len1+totalLength + val newpos = mmin(oldpos + n, totalLength) + if(newpos == totalLength) { + i1 = 0 + len1 = 0 + a1len = 0 + } else { + while(newpos >= sliceEnd) advanceSlice() + val io = newpos - sliceStart + if(sliceDim > 1) { + val xor = oldPos ^ io + setA(io, xor) + oldPos = io + } + a1len = a1.length + i1 = io & MASK + len1 = i1 + (totalLength-newpos) + if(a1len > len1) a1len = len1 } } + this + } - // USED BY BUILDER - - // xor: oldIndex ^ index - private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 10)) { // level = 1 - if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth += 1 } - display0 = new Array(32) - display1((index >>> 5) & 31) = display0 - } else if (xor < (1 << 15)) { // level = 2 - if (depth == 2) { display2 = new Array(32); display2(0) = display1; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - } else if (xor < (1 << 20)) { // level = 3 - if (depth == 3) { display3 = new Array(32); display3(0) = display2; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - } else if (xor < (1 << 25)) { // level = 4 - if (depth == 4) { display4 = new Array(32); display4(0) = display3; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display3 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - display4((index >>> 20) & 31) = display3 - } else if (xor < (1 << 30)) { // level = 5 - if (depth == 5) { display5 = new Array(32); display5(0) = display4; depth += 1 } - display0 = new Array(32) - display1 = new Array(32) - display2 = new Array(32) - display3 = new Array(32) - display4 = new Array(32) - display1((index >>> 5) & 31) = display0 - display2((index >>> 10) & 31) = display1 - display3((index >>> 15) & 31) = display2 - display4((index >>> 20) & 31) = display3 - display5((index >>> 25) & 31) = display4 - } else { // level = 6 - throw new IllegalArgumentException() - } + override def take(n: Int): Iterator[A] = { + if(n < knownSize) { + val trunc = knownSize - mmax(0, n) + totalLength -= trunc + len1 -= trunc + if(len1 < a1len) a1len = len1 + if(totalLength < sliceEnd) sliceEnd = totalLength } + this + } - // STUFF BELOW USED BY APPEND / UPDATE + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = mmax(until, 0) - private[immutable] final def copyOf(a: Array[AnyRef]): Array[AnyRef] = { - val copy = new Array[AnyRef](a.length) - java.lang.System.arraycopy(a, 0, copy, 0, a.length) - copy - } + val n = + if(from > 0) { + drop(from) + _until - from + } else _until + take(n) + } - private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int): Array[AnyRef] = { - val x = array(index) - array(index) = null - copyOf(x.asInstanceOf[Array[AnyRef]]) + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) + var copied = 0 + val isBoxed = xs.isInstanceOf[Array[AnyRef]] + while(copied < total) { + if(i1 == a1len) advance() + val count = mmin(total-copied, a1.length-i1) + if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) + else Array.copy(a1, i1, xs, start+copied, count) + i1 += count + copied += count } + total + } - // make sure there is no aliasing - // requires structure is at pos index - // ensures structure is clean and at pos index and writable at all levels except 0 + override def toVector: Vector[A] = + v.slice(i1-len1+totalLength, totalLength) - private[immutable] final def stabilize(index: Int) = (depth - 1) match { - case 5 => - display5 = copyOf(display5) - display4 = copyOf(display4) - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display5((index >>> 25) & 31) = display4 - display4((index >>> 20) & 31) = display3 - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 4 => - display4 = copyOf(display4) - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display4((index >>> 20) & 31) = display3 - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 3 => - display3 = copyOf(display3) - display2 = copyOf(display2) - display1 = copyOf(display1) - display3((index >>> 15) & 31) = display2 - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 2 => - display2 = copyOf(display2) - display1 = copyOf(display1) - display2((index >>> 10) & 31) = display1 - display1((index >>> 5) & 31) = display0 - case 1 => - display1 = copyOf(display1) - display1((index >>> 5) & 31) = display0 - case 0 => - } + protected[immutable] def split(at: Int): NewVectorIterator[A] = { + val it2 = clone().asInstanceOf[NewVectorIterator[A]] + it2.take(at) + drop(at) + it2 + } +} - /// USED IN UPDATE AND APPEND BACK +private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) + extends Stepper[A] with EfficientSplit { - // prepare for writing at an existing position + protected[this] def build(it: NewVectorIterator[A]): Semi - // requires structure is clean and at pos oldIndex = xor ^ newIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match { - case 5 => - display5 = copyOf(display5) - display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 4 => - display4 = copyOf(display4) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 3 => - display3 = copyOf(display3) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 2 => - display2 = copyOf(display2) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 1 => - display1 = copyOf(display1) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - case 0 => - display0 = copyOf(display0) - } + final def hasStep: Boolean = it.hasNext + final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED - // requires structure is dirty and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { - if (xor < (1 << 5)) { // level = 0 - display0 = copyOf(display0) - } else if (xor < (1 << 10)) { // level = 1 - display1 = copyOf(display1) - display1((oldIndex >>> 5) & 31) = display0 - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 15)) { // level = 2 - display1 = copyOf(display1) - display2 = copyOf(display2) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 20)) { // level = 3 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 25)) { // level = 4 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display4 = copyOf(display4) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display4((oldIndex >>> 20) & 31) = display3 - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else if (xor < (1 << 30)) { // level = 5 - display1 = copyOf(display1) - display2 = copyOf(display2) - display3 = copyOf(display3) - display4 = copyOf(display4) - display5 = copyOf(display5) - display1((oldIndex >>> 5) & 31) = display0 - display2((oldIndex >>> 10) & 31) = display1 - display3((oldIndex >>> 15) & 31) = display2 - display4((oldIndex >>> 20) & 31) = display3 - display5((oldIndex >>> 25) & 31) = display4 - display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) - display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) - display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) - display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) - display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) - } else { // level = 6 - throw new IllegalArgumentException() - } - } + final def estimateSize: Long = it.knownSize + def trySplit(): Sub = { + val len = it.knownSize + if(len > 1) build(it.split(len >>> 1)) + else null + } - // USED IN DROP + override final def iterator: Iterator[A] = it +} - private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = { - val elems = new Array[AnyRef](32) - java.lang.System.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft, oldLeft)) - elems - } +private class AnyVectorStepper[A](it: NewVectorIterator[A]) + extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { + protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) + def nextStep(): A = it.next() +} +private class DoubleVectorStepper(it: NewVectorIterator[Double]) + extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { + protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) + def nextStep(): Double = it.next() +} - // USED IN APPEND - // create a new block at the bottom level (and possibly nodes on its path) and prepares for writing +private class IntVectorStepper(it: NewVectorIterator[Int]) + extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { + protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) + def nextStep(): Int = it.next() +} - // requires structure is clean and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos - if (xor < (1 << 5)) { // level = 0 - // we're already at the block start - } else if (xor < (1 << 10)) { // level = 1 - if (depth == 1) { - display1 = new Array(32) - display1((oldIndex >>> 5) & 31) = display0 - depth += 1 - } - display0 = new Array(32) - } else if (xor < (1 << 15)) { // level = 2 - if (depth == 2) { - display2 = new Array(32) - display2((oldIndex >>> 10) & 31) = display1 - depth += 1 - } - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 20)) { // level = 3 - if (depth == 3) { - display3 = new Array(32) - display3((oldIndex >>> 15) & 31) = display2 - depth += 1 - } - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 25)) { // level = 4 - if (depth == 4) { - display4 = new Array(32) - display4((oldIndex >>> 20) & 31) = display3 - depth += 1 - } - display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]] - if (display3 == null) display3 = new Array(32) - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else if (xor < (1 << 30)) { // level = 5 - if (depth == 5) { - display5 = new Array(32) - display5((oldIndex >>> 25) & 31) = display4 - depth += 1 - } - display4 = display5((newIndex >>> 25) & 31).asInstanceOf[Array[AnyRef]] - if (display4 == null) display4 = new Array(32) - display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]] - if (display3 == null) display3 = new Array(32) - display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]] - if (display2 == null) display2 = new Array(32) - display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]] - if (display1 == null) display1 = new Array(32) - display0 = new Array(32) - } else { // level = 6 - throw new IllegalArgumentException() - } - } +private class LongVectorStepper(it: NewVectorIterator[Long]) + extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { + protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) + def nextStep(): Long = it.next() +} - // requires structure is dirty and at pos oldIndex, - // ensures structure is dirty and at pos newIndex and writable at level 0 - private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { - stabilize(oldIndex) - gotoFreshPosWritable0(oldIndex, newIndex, xor) - } + +// The following definitions are needed for binary compatibility with ParVector +private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { + private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + def hasNext: Boolean = it.hasNext + def next(): A = it.next() + private[collection] def remainingElementCount: Int = it.size + private[collection] def remainingVector: Vector[A] = it.toVector } diff --git a/src/library/scala/collection/immutable/VectorMap.scala b/src/library/scala/collection/immutable/VectorMap.scala new file mode 100644 index 000000000000..361427a86c53 --- /dev/null +++ b/src/library/scala/collection/immutable/VectorMap.scala @@ -0,0 +1,278 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.{nowarn, tailrec} + +/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. + * + * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense + * of using extra memory and generally lower performance for other operations + * + * @tparam K the type of the keys contained in this vector map. + * @tparam V the type of the values associated with the keys in this vector map. + * + * @define coll immutable vector map + * @define Coll `immutable.VectorMap` + */ +final class VectorMap[K, +V] private ( + private[immutable] val fields: Vector[Any], // K | Tombstone | Null + private[immutable] val underlying: Map[K, (Int, V)], + dropped: Int) + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] + with MapFactoryDefaults[K, V, VectorMap, Iterable] { + + import VectorMap._ + + override protected[this] def className: String = "VectorMap" + + private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = this(fields, underlying, 0) + + override val size = underlying.size + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { + underlying.get(key) match { + case Some((slot, _)) => + new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) + case None => + new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) + } + } + + override def withDefault[V1 >: V](d: K => V1): Map[K, V1] = + new Map.WithDefault(this, d) + + override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = + new Map.WithDefault[K, V1](this, _ => d) + + def get(key: K): Option[V] = underlying.get(key) match { + case Some(v) => Some(v._2) + case None => None + } + + @tailrec + private def nextValidField(slot: Int): (Int, K) = { + if (slot >= fields.size) (-1, null.asInstanceOf[K]) + else fields(slot) match { + case Tombstone(distance) => nextValidField(slot + distance) + case k /*: K | Null */ => (slot, k.asInstanceOf[K]) + } + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val fieldsLength = fields.length + private[this] var slot = -1 + private[this] var key: K = null.asInstanceOf[K] + + private[this] def advance(): Unit = { + val nextSlot = slot + 1 + if (nextSlot >= fieldsLength) { + slot = fieldsLength + key = null.asInstanceOf[K] + } else { + nextValidField(nextSlot) match { + case (-1, _) => + slot = fieldsLength + key = null.asInstanceOf[K] + case (s, k) => + slot = s + key = k + } + } + } + + advance() + + override def hasNext: Boolean = slot < fieldsLength + + override def next(): (K, V) = + if (!hasNext) Iterator.empty.next() + else { + val result = (key, underlying(key)._2) + advance() + result + } + } + + // No-Op overrides to allow for more efficient steppers in a minor release. + // Refining the return type to `S with EfficientSplit` is binary compatible. + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) + + + def removed(key: K): VectorMap[K, V] = { + if (isEmpty) empty + else { + var fs = fields + val sz = fs.size + underlying.get(key) match { + case Some(_) if size == 1 => empty + case Some((slot, _)) => + val s = slot - dropped + + // Calculate next of kin + val next = + if (s < sz - 1) fs(s + 1) match { + case Tombstone(d) => s + d + 1 + case _ => s + 1 + } else s + 1 + + fs = fs.updated(s, Tombstone(next - s)) + + // Calculate first index of preceding tombstone sequence + val first = + if (s > 0) { + fs(s - 1) match { + case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 + case Tombstone(d) if d == 1 => s - 1 + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case _ => s + } + }else s + fs = fs.updated(first, Tombstone(next - first)) + + // Calculate last index of succeeding tombstone sequence + val last = next - 1 + if (last != first) { + fs = fs.updated(last, Tombstone(first - 1 - last)) + } + new VectorMap(fs, underlying - key, dropped) + case _ => + this + } + } + } + + override def mapFactory: MapFactory[VectorMap] = VectorMap + + override def contains(key: K): Boolean = underlying.contains(key) + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + val lastSlot = fields.length - 1 + val last = fields.last match { + case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] + case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => k.asInstanceOf[K] + } + (last, underlying(last)._2) + } + + override def lastOption: Option[(K, V)] = { + if (isEmpty) None + else Some(last) + } + + override def tail: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + val (slot, key) = nextValidField(0) + new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + } + + override def init: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + val lastSlot = fields.size - 1 + val (slot, key) = fields.last match { + case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) + case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => (lastSlot, k.asInstanceOf[K]) + } + new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) + } + + /** A [[Vector]] of the keys contained by this map. + * + * @return a [[Vector]] of the keys contained by this map. + */ + @nowarn("msg=overriding method keys") + override def keys: Vector[K] = keysIterator.toVector + + override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { + override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) + } +} + +object VectorMap extends MapFactory[VectorMap] { + //Class to mark deleted slots in 'fields'. + //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' + // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). + //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' + // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. + //For other deleted slots, it simply indicates that they have been deleted. + private[VectorMap] final case class Tombstone(distance: Int) + + private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = + new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) + + def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): VectorMap[K, V] = + it match { + case vm: VectorMap[K, V] => vm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] +} + +private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { + private[this] val vectorBuilder = new VectorBuilder[K] + private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] + private[this] var aliased: VectorMap[K, V] = _ + + override def clear(): Unit = { + vectorBuilder.clear() + mapBuilder.clear() + aliased = null + } + + override def result(): VectorMap[K, V] = { + if (aliased eq null) { + aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) + } + aliased + } + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + mapBuilder.getOrElse(key, null) match { + case (slot, _) => + mapBuilder.addOne(key, (slot, value)) + case null => + val vectorSize = vectorBuilder.size + vectorBuilder.addOne(key) + mapBuilder.addOne(key, (vectorSize, value)) + } + } + this + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) +} diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala index 2bca5b08bb0c..a6c0256fe800 100644 --- a/src/library/scala/collection/immutable/WrappedString.scala +++ b/src/library/scala/collection/immutable/WrappedString.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,56 +10,130 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package immutable -import generic._ -import mutable.{Builder, StringBuilder} +import scala.Predef.{wrapString => _, assert} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.CharStringStepper +import scala.collection.mutable.{Builder, StringBuilder} /** - * This class serves as a wrapper augmenting `String`s with all the operations - * found in indexed sequences. - * - * The difference between this class and `StringOps` is that calling transformer - * methods such as `filter` and `map` will yield an object of type `WrappedString` - * rather than a `String`. - * - * @param self a string contained within this wrapped string - * - * @since 2.8 - * @define Coll `WrappedString` - * @define coll wrapped string - */ -final class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] { + * This class serves as a wrapper augmenting `String`s with all the operations + * found in indexed sequences. + * + * The difference between this class and `StringOps` is that calling transformer + * methods such as `filter` and `map` will yield an object of type `WrappedString` + * rather than a `String`. + * + * @param self a string contained within this wrapped string + * + * @define Coll `WrappedString` + * @define coll wrapped string + */ +@SerialVersionUID(3L) +final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, WrappedString] + with Serializable { - override protected[this] def thisCollection: WrappedString = this - override protected[this] def toCollection(repr: WrappedString): WrappedString = repr + def apply(i: Int): Char = self.charAt(i) - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = WrappedString.newBuilder + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder + override def empty: WrappedString = WrappedString.empty override def slice(from: Int, until: Int): WrappedString = { val start = if (from < 0) 0 else from - if (until <= start || start >= repr.length) - return new WrappedString("") + if (until <= start || start >= self.length) + return WrappedString.empty val end = if (until > length) length else until - new WrappedString(repr.substring(start, end)) + new WrappedString(self.substring(start, end)) } override def length = self.length override def toString = self + override def view: StringView = new StringView(self) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { + val st = new CharStringStepper(self, 0, self.length) + val r = + if (shape.shape == StepperShape.CharShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = + that match { + case s: WrappedString => self.startsWith(s.self, offset) + case _ => super.startsWith(that, offset) + } + + override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = + that match { + case s: WrappedString => self.endsWith(s.self) + case _ => super.endsWith(that) + } + + override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { + case c: Char => self.indexOf(c, from) + case _ => super.indexOf(elem, from) + } + + override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = + elem match { + case c: Char => self.lastIndexOf(c, end) + case _ => super.lastIndexOf(elem, end) + } + + override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + (xs: Any) match { + case chs: Array[Char] => + val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) + self.getChars(0, copied, chs, start) + copied + case _ => super.copyToArray(xs, start, len) + } + + override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = + suffix match { + case s: WrappedString => new WrappedString(self concat s.self) + case _ => super.appendedAll(suffix) + } + + override def sameElements[B >: Char](o: IterableOnce[B]) = o match { + case s: WrappedString => self == s.self + case _ => super.sameElements(o) + } + + override protected[this] def className = "WrappedString" + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + override def equals(other: Any): Boolean = other match { + case that: WrappedString => + this.self == that.self + case _ => + super.equals(other) + } } /** A companion object for wrapped strings. - * - * @since 2.8 - */ -object WrappedString { - implicit val canBuildFrom: CanBuildFrom[WrappedString, Char, WrappedString] = new CanBuildFrom[WrappedString, Char, WrappedString] { - def apply(from: WrappedString) = newBuilder - def apply() = newBuilder + */ +@SerialVersionUID(3L) +object WrappedString extends SpecificIterableFactory[Char, WrappedString] { + def fromSpecific(it: IterableOnce[Char]): WrappedString = { + val b = newBuilder + b.sizeHint(it) + b ++= it + b.result() } + val empty: WrappedString = new WrappedString("") + def newBuilder: Builder[Char, WrappedString] = + new StringBuilder().mapResult(x => new WrappedString(x)) - def newBuilder: Builder[Char, WrappedString] = StringBuilder.newBuilder mapResult (x => new WrappedString(x)) + implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { + def unwrap: String = value.self + } } diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala new file mode 100644 index 000000000000..6a92c8cef284 --- /dev/null +++ b/src/library/scala/collection/immutable/package.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object immutable { + type StringOps = scala.collection.StringOps + val StringOps = scala.collection.StringOps + type StringView = scala.collection.StringView + val StringView = scala.collection.StringView + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + + @deprecated("Use Map instead of DefaultMap", "2.13.0") + type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] +} diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 8cef2815c93d..9ad433309b10 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,10 @@ package scala package collection package mutable -import generic.CanBuildFrom +import scala.annotation.meta.companionClass +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions /** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. * @@ -39,28 +42,28 @@ import generic.CanBuildFrom * rapidly as 2^30^ is approached. * */ -@SerialVersionUID(1L) -final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) -extends AbstractMap[K, V] - with Map[K, V] - with MapLike[K, V, AnyRefMap[K, V]] - with Serializable -{ +@(deprecated @companionClass)("Use `scala.collection.mutable.HashMap` instead for better performance.", since = "2.13.16") +class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[K, V] + with MapOps[K, V, Map, AnyRefMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] + with Serializable { + import AnyRefMap._ - def this() = this(AnyRefMap.exceptionDefault, 16, true) + def this() = this(AnyRefMap.exceptionDefault, 16, initBlank = true) /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K => V) = this(defaultEntry, 16, true) + def this(defaultEntry: K => V) = this(defaultEntry, 16, initBlank = true) /** Creates a new `AnyRefMap` with an initial buffer of specified size. * * An `AnyRefMap` can typically contain half as many elements as its buffer size * before it requires resizing. */ - def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) + def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, initBlank = true) /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, initBlank = true) private[this] var mask = 0 private[this] var _size = 0 @@ -71,7 +74,7 @@ extends AbstractMap[K, V] if (initBlank) defaultInitialize(initialBufferSize) - private[this] def defaultInitialize(n: Int) { + private[this] def defaultInitialize(n: Int): Unit = { mask = if (n<0) 0x7 else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 @@ -82,11 +85,23 @@ extends AbstractMap[K, V] private[collection] def initializeTo( m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] - ) { + ): Unit = { mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz } + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): AnyRefMap[K,V] = { + var sz = coll.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + coll.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder + override def size: Int = _size + override def knownSize: Int = size + override def isEmpty: Boolean = _size == 0 override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) private def imbalanced: Boolean = @@ -118,7 +133,7 @@ extends AbstractMap[K, V] e | MissingBit } - private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { + @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { var e = h & mask var x = 0 var g = 0 @@ -148,13 +163,17 @@ extends AbstractMap[K, V] val h = hashOf(key) var i = seekEntryOrOpen(h, key) if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) val value = { - val oh = _hashes + val ohs = _hashes + val j = i & IndexMask + val oh = ohs(j) val ans = defaultValue - if (oh ne _hashes) { + // Evaluating `defaultValue` may change the map + // - repack: the array is different + // - element added at `j`: since `i < 0`, the key was missing and `oh` is either 0 or MinValue. + // If `defaultValue` added an element at `j` then `_hashes(j)` must be different now. + // (`hashOf` never returns 0 or MinValue.) + if (ohs.ne(_hashes) || oh != _hashes(j)) { i = seekEntryOrOpen(h, key) if (i >= 0) _size -= 1 } @@ -197,9 +216,9 @@ extends AbstractMap[K, V] /** Defers to defaultEntry to find a default value for the key. Throws an * exception if no other default behavior was specified. */ - override def default(key: K) = defaultEntry(key) + override def default(key: K): V = defaultEntry(key) - private def repack(newMask: Int) { + private def repack(newMask: Int): Unit = { val oh = _hashes val ok = _keys val ov = _values @@ -231,7 +250,7 @@ extends AbstractMap[K, V] * improved performance. Repacking takes time proportional to the number * of entries in the map. */ - def repack() { + def repack(): Unit = { var m = mask if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask while (m > 8 && 8*_size < m) m = m >>> 1 @@ -240,12 +259,11 @@ extends AbstractMap[K, V] override def put(key: K, value: V): Option[V] = { val h = hashOf(key) - val k = key - val i = seekEntryOrOpen(h, k) + val i = seekEntryOrOpen(h, key) if (i < 0) { val j = i & IndexMask _hashes(j) = h - _keys(j) = k + _keys(j) = key _values(j) = value.asInstanceOf[AnyRef] _size += 1 if ((i & VacantBit) != 0) _vacant -= 1 @@ -255,7 +273,6 @@ extends AbstractMap[K, V] else { val ans = Some(_values(i).asInstanceOf[V]) _hashes(i) = h - _keys(i) = k _values(i) = value.asInstanceOf[AnyRef] ans } @@ -267,12 +284,11 @@ extends AbstractMap[K, V] */ override def update(key: K, value: V): Unit = { val h = hashOf(key) - val k = key - val i = seekEntryOrOpen(h, k) + val i = seekEntryOrOpen(h, key) if (i < 0) { val j = i & IndexMask _hashes(j) = h - _keys(j) = k + _keys(j) = key _values(j) = value.asInstanceOf[AnyRef] _size += 1 if ((i & VacantBit) != 0) _vacant -= 1 @@ -280,17 +296,20 @@ extends AbstractMap[K, V] } else { _hashes(i) = h - _keys(i) = k _values(i) = value.asInstanceOf[AnyRef] } } /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") def +=(key: K, value: V): this.type = { update(key, value); this } - def +=(kv: (K, V)): this.type = { update(kv._1, kv._2); this } + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } - def -=(key: K): this.type = { + def subtractOne(key: K): this.type = { val i = seekEntry(hashOf(key), key) if (i >= 0) { _size -= 1 @@ -302,7 +321,17 @@ extends AbstractMap[K, V] this } - def iterator: Iterator[(K, V)] = new Iterator[(K, V)] { + def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { + protected def nextResult(k: K, v: V) = (k, v) + } + override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { + protected def nextResult(k: K, v: V) = k + } + override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { + protected def nextResult(k: K, v: V) = v + } + + private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { private[this] val hz = _hashes private[this] val kz = _keys private[this] val vz = _values @@ -319,17 +348,20 @@ extends AbstractMap[K, V] true } - def next: (K, V) = { + def next(): A = { if (hasNext) { - val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) + val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) index += 1 ans } else throw new NoSuchElementException("next") } + + protected def nextResult(k: K, v: V): A } - override def foreach[U](f: ((K,V)) => U) { + + override def foreach[U](f: ((K,V)) => U): Unit = { var i = 0 var e = _size while (e > 0) { @@ -343,34 +375,51 @@ extends AbstractMap[K, V] } } + override def foreachEntry[U](f: (K,V) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) + i += 1 + e -= 1 + } + else return + } + } + override def clone(): AnyRefMap[K, V] = { val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = java.util.Arrays.copyOf(_values, _values.length) - val arm = new AnyRefMap[K, V](defaultEntry, 1, false) + val arm = new AnyRefMap[K, V](defaultEntry, 1, initBlank = false) arm.initializeTo(mask, _size, _vacant, hz, kz, vz) arm } - override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - arm += kv - arm - } + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - xs.foreach(kv => arm += kv) - arm + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) } - override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V1]] - arm += (key, value) + override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V2]] + xs.iterator.foreach(kv => arm += kv) arm } - private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) { + override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) + + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { var i,j = 0 while (i < _hashes.length & j < _size) { val h = _hashes(i) @@ -383,17 +432,17 @@ extends AbstractMap[K, V] } /** Applies a function to all keys of this map. */ - def foreachKey[A](f: K => A) { foreachElement[K,A](_keys, f) } + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A) { foreachElement[V,A](_values, f) } + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) /** Creates a new `AnyRefMap` with different values. * Unlike `mapValues`, this method generates a new * collection immediately. */ def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { - val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) + val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, initBlank = false) val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = new Array[AnyRef](_values.length) @@ -410,10 +459,16 @@ extends AbstractMap[K, V] arm } + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + /** Applies a transformation function to all values stored in this map. * Note: the default, if any, is not transformed. */ - def transformValues(f: V => V): this.type = { + def transformValuesInPlace(f: V => V): this.type = { var i,j = 0 while (i < _hashes.length & j < _size) { val h = _hashes(i) @@ -426,6 +481,33 @@ extends AbstractMap[K, V] this } + // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override). + // Previously, in Scala 2, f took `K with AnyRef` scala/bug#11035 + /** + * An overload of `map` which produces an `AnyRefMap`. + * + * @param f the mapping function must produce a key-value pair where the key is an `AnyRef` + * @param dummy an implicit placeholder for purposes of distinguishing the (erased) signature of this method + */ + def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.Map(this, f)) + /** + * An overload of `flatMap` which produces an `AnyRefMap`. + * + * @param f the mapping function must produce key-value pairs where the key is an `AnyRef` + * @param dummy an implicit placeholder for purposes of distinguishing the (erased) signature of this method + */ + def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.FlatMap(this, f)) + /** + * An overload of `collect` which produces an `AnyRefMap`. + * + * @param pf the mapping function must produce a key-value pair where the key is an `AnyRef` + * @param dummy an implicit placeholder for purposes of distinguishing the (erased) signature of this method + */ + def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) + override def clear(): Unit = { import java.util.Arrays.fill fill(_keys, null) @@ -435,46 +517,49 @@ extends AbstractMap[K, V] _vacant = 0 } + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "AnyRefMap" } +@deprecated("Use `scala.collection.mutable.HashMap` instead for better performance.", since = "2.13.16") object AnyRefMap { private final val IndexMask = 0x3FFFFFFF private final val MissingBit = 0x80000000 private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - @SerialVersionUID(1L) private class ExceptionDefault extends (Any => Nothing) with Serializable { def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) } private val exceptionDefault = new ExceptionDefault - implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] = - ReusableCBFInstance.asInstanceOf[CanBuildFrom[AnyRefMap[K, V], (J, U), AnyRefMap[J, U]]] - private[this] val ReusableCBFInstance = new CanBuildFrom[AnyRefMap[AnyRef, Any], (AnyRef, Any), AnyRefMap[AnyRef, Any]] { - def apply(from: AnyRefMap[AnyRef, Any]): AnyRefMapBuilder[AnyRef, Any] = apply() - def apply(): AnyRefMapBuilder[AnyRef, Any] = new AnyRefMapBuilder[AnyRef, Any] - } - /** A builder for instances of `AnyRefMap`. * * This builder can be reused to create multiple instances. */ final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] - def +=(entry: (K, V)): this.type = { + def addOne(entry: (K, V)): this.type = { elems += entry this } - def clear() { elems = new AnyRefMap[K, V] } + def clear(): Unit = elems = new AnyRefMap[K, V] def result(): AnyRefMap[K, V] = elems + override def knownSize: Int = elems.knownSize } /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = { - val sz = if (elems.hasDefiniteSize) elems.size else 4 + def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + + def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + + private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]): AnyRefMap[K, V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 val arm = new AnyRefMap[K, V](sz * 2) - elems.foreach{ case (k,v) => arm(k) = v } + elems.iterator.foreach{ case (k,v) => arm(k) = v } if (arm.size < (sz>>3)) arm.repack() arm } @@ -485,6 +570,19 @@ object AnyRefMap { /** Creates a new empty `AnyRefMap` with the supplied default */ def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + /** Creates a new `AnyRefMap` from an existing source collection. A source collection + * which is already an `AnyRefMap` gets cloned. + * + * @param source Source collection + * @tparam K the type of the keys + * @tparam V the type of the values + * @return a new `AnyRefMap` with the elements of `source` + */ + def from[K <: AnyRef, V](source: IterableOnce[(K, V)]): AnyRefMap[K, V] = source match { + case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] + case _ => buildFromIterableOnce(source) + } + /** Creates a new `AnyRefMap` from arrays of keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ @@ -505,8 +603,25 @@ object AnyRefMap { val arm = new AnyRefMap[K, V](sz * 2) val ki = keys.iterator val vi = values.iterator - while (ki.hasNext && vi.hasNext) arm(ki.next) = vi.next + while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() if (arm.size < (sz >> 3)) arm.repack() arm } + + implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from(it) + def newBuilder(from: Any): ReusableBuilder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) } diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 92f157bfd74f..bc0f39af6829 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,184 +14,394 @@ package scala package collection package mutable -import generic._ -import parallel.mutable.ParArray +import java.util.Arrays +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.{CommonErrors, DefaultSerializable} +import scala.runtime.PStatics.VM_MaxArraySize /** An implementation of the `Buffer` class using an array to * represent the assembled sequence internally. Append, update and random * access take constant time (amortized time). Prepends and removes are * linear in the buffer size. * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] * section on `Array Buffers` for more information. - * * @tparam A the type of this arraybuffer's elements. * * @define Coll `mutable.ArrayBuffer` * @define coll array buffer - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]` - * is defined in object `ArrayBuffer`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ArrayBuffer`. * @define orderDependent * @define orderDependentFold * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(1529165946227428979L) -class ArrayBuffer[A](override protected val initialSize: Int) +@SerialVersionUID(-1582447879429021880L) +class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] - with Buffer[A] - with GenericTraversableTemplate[A, ArrayBuffer] - with BufferLike[A, ArrayBuffer[A]] - with IndexedSeqOptimized[A, ArrayBuffer[A]] - with Builder[A, ArrayBuffer[A]] - with ResizableArray[A] - with CustomParallelizable[A, ParArray[A]] - with Serializable { + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with IterableFactoryDefaults[A, ArrayBuffer] + with DefaultSerializable { - override def companion: GenericCompanion[ArrayBuffer] = ArrayBuffer + def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) - import scala.collection.Traversable + def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) - def this() = this(16) + @transient private[this] var mutationCount: Int = 0 - def clear() { reduceToSize(0) } + // needs to be `private[collection]` or `protected[collection]` for parallel-collections + protected[collection] var array: Array[AnyRef] = initialElements + protected var size0 = initialSize - override def sizeHint(len: Int) { - if (len > size && len >= 1) { - val newarray = new Array[AnyRef](len) - java.lang.System.arraycopy(array, 0, newarray, 0, size0) - array = newarray - } + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) } - override def par = ParArray.handoff[A](array.asInstanceOf[Array[A]], size) + override def knownSize: Int = super[IndexedSeqOps].knownSize + + /** Ensure that the internal array has at least `n` cells. */ + protected def ensureSize(n: Int): Unit = { + array = ArrayBuffer.ensureSize(array, size0, n) + } - /** Appends a single element to this buffer and returns - * the identity of the buffer. It takes constant amortized time. + /** Uses the given size to resize internal storage, if necessary. * - * @param elem the element to append. - * @return the updated buffer. + * @param size Expected maximum number of elements. */ - def +=(elem: A): this.type = { - ensureSize(size0 + 1) - array(size0) = elem.asInstanceOf[AnyRef] - size0 += 1 - this + def sizeHint(size: Int): Unit = + if(size > length && size >= 1) ensureSize(size) + + /** Reduce length to `n`, nulling out all dropped elements */ + private def reduceToSize(n: Int): Unit = { + mutationCount += 1 + Arrays.fill(array, n, size0, null) + size0 = n } - /** Appends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - * @return the updated buffer. + /** Trims the ArrayBuffer to an appropriate size for the current + * number of elements (rounding up to the next natural size), + * which may replace the array by a shorter one. + * This allows releasing some unused memory. */ - override def ++=(xs: TraversableOnce[A]): this.type = xs match { - case v: scala.collection.IndexedSeqLike[_, _] => - val n = v.length - ensureSize(size0 + n) - v.copyToArray(array.asInstanceOf[scala.Array[Any]], size0, n) - size0 += n - this - case _ => - super.++=(xs) - } - - /** Prepends a single element to this buffer and returns - * the identity of the buffer. It takes time linear in - * the buffer size. - * - * @param elem the element to prepend. - * @return the updated buffer. + def trimToSize(): Unit = { + resize(length) + } + + /** Trims the `array` buffer size down to either a power of 2 + * or Int.MaxValue while keeping first `requiredLength` elements. */ - def +=:(elem: A): this.type = { + private def resize(requiredLength: Int): Unit = + array = ArrayBuffer.downsize(array, requiredLength) + + @inline private def checkWithinBounds(lo: Int, hi: Int) = { + if (lo < 0) throw CommonErrors.indexOutOfBounds(index = lo, max = size0 - 1) + if (hi > size0) throw CommonErrors.indexOutOfBounds(index = hi - 1, max = size0 - 1) + } + + def apply(n: Int): A = { + checkWithinBounds(n, n + 1) + array(n).asInstanceOf[A] + } + + def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index + 1) + mutationCount += 1 + array(index) = elem.asInstanceOf[AnyRef] + } + + def length = size0 + + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) + + override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer + + /** Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = reduceToSize(0) + + /** + * Clears this buffer and shrinks to @param size (rounding up to the next + * natural size) + * @param size + */ + def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { + clear() + resize(size) + this + } + + def addOne(elem: A): this.type = { + mutationCount += 1 + val newSize = size0 + 1 + if(array.length <= newSize - 1) ensureSize(newSize) + size0 = newSize + array(newSize - 1) = elem.asInstanceOf[AnyRef] + this + } + + // Overridden to use array copying for efficiency where possible. + override def addAll(elems: IterableOnce[A]): this.type = { + elems match { + case elems: ArrayBuffer[_] => + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureSize(size0 + elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } + case _ => super.addAll(elems) + } + this + } + + def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index) + mutationCount += 1 ensureSize(size0 + 1) - copy(0, 1, size0) - array(0) = elem.asInstanceOf[AnyRef] + Array.copy(array, index, array, index + 1, size0 - index) size0 += 1 + this(index) = elem + } + + def prepend(elem: A): this.type = { + insert(0, elem) this } - /** Prepends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - * @return the updated buffer. - */ - override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + checkWithinBounds(index, index) + elems match { + case elems: collection.Iterable[A] => + val elemsLength = elems.size + if (elemsLength > 0) { + mutationCount += 1 + ensureSize(size0 + elemsLength) + val len = size0 + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the same reference + val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") + size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy + } + case _ => insertAll(index, ArrayBuffer.from(elems)) + } + } - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a new - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param seq the traversable object providing all elements to insert. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def insertAll(n: Int, seq: Traversable[A]) { - if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString) - val len = seq.size - val newSize = size0 + len - ensureSize(newSize) - - copy(n, n + len, size0 - n) - seq.copyToArray(array.asInstanceOf[Array[Any]], n) - size0 = newSize + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int): A = { + checkWithinBounds(index, index + 1) + val res = this(index) + Array.copy(array, index + 1, array, index, size0 - (index + 1)) + reduceToSize(size0 - 1) + res + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = + if (count > 0) { + checkWithinBounds(index, index + count) + Array.copy(array, index + count, array, index, size0 - (index + count)) + reduceToSize(size0 - count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + @deprecated("Use 'this' instance instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def result(): this.type = this + + @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayBuffer" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } + this + } + + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) + + override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + override def reduceLeft[B >: A](op: (B, A) => B): B = + if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) + else super.reduceLeft(op) + + override def reduceRight[B >: A](op: (A, B) => B): B = + if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) + else super.reduceRight(op) + + override def sliding(size: Int, step: Int): Iterator[ArrayBuffer[A]] = + new MutationTracker.CheckedIterator(super.sliding(size = size, step = step), mutationCount) +} + +/** + * Factory object for the `ArrayBuffer` class. + * + * $factoryInfo + * + * @define coll array buffer + * @define Coll `mutable.ArrayBuffer` + */ +@SerialVersionUID(3L) +object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { + final val DefaultInitialSize = 16 + private[this] val emptyArray = new Array[AnyRef](0) + + def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + val k = coll.knownSize + if (k >= 0) { + // Avoid reallocation of buffer if length is known + val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + new ArrayBuffer[B](array, k) + } + else new ArrayBuffer[B] ++= coll + } + + def newBuilder[A]: Builder[A, ArrayBuffer[A]] = new GrowableBuilder[A, ArrayBuffer[A]](empty[A]) { + override def sizeHint(size: Int): Unit = elems.sizeHint(size) } - /** Removes the element on a given index position. It takes time linear in - * the buffer size. + def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + + /** + * The increased size for an array-backed collection. * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. + * @param arrayLen the length of the backing array + * @param targetLen the minimum length to resize up to + * @return + * - `-1` if no resizing is needed, else + * - `VM_MaxArraySize` if `arrayLen` is too large to be doubled, else + * - `max(targetLen, arrayLen * 2, DefaultInitialSize)`. + * - Throws an exception if `targetLen` exceeds `VM_MaxArraySize` or is negative (overflow). */ - override def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - else if (count == 0) return // Did nothing - if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString) - copy(n + count, n, size0 - (n + count)) - reduceToSize(size0 - count) + private[mutable] def resizeUp(arrayLen: Int, targetLen: Int): Int = + if (targetLen < 0) throw new RuntimeException(s"Overflow while resizing array of array-backed collection. Requested length: $targetLen; current length: $arrayLen; increase: ${targetLen - arrayLen}") + else if (targetLen <= arrayLen) -1 + else if (targetLen > VM_MaxArraySize) throw new RuntimeException(s"Array of array-backed collection exceeds VM length limit of $VM_MaxArraySize. Requested length: $targetLen; current length: $arrayLen") + else if (arrayLen > VM_MaxArraySize / 2) VM_MaxArraySize + else math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) + + // if necessary, copy (curSize elements of) the array to a new array of capacity n. + // Should use Array.copyOf(array, resizeEnsuring(array.length))? + private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Int): Array[AnyRef] = { + val newLen = resizeUp(array.length, targetSize) + if (newLen < 0) array + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, curSize) + res + } } - /** Removes the element at a given index position. - * - * @param n the index which refers to the element to delete. - * @return the element that was formerly at position `n`. + /** + * @param arrayLen the length of the backing array + * @param targetLen the length to resize down to, if smaller than `arrayLen` + * @return -1 if no resizing is needed, or the size for the new array otherwise */ - def remove(n: Int): A = { - val result = apply(n) - remove(n, 1) - result + private def resizeDown(arrayLen: Int, targetLen: Int): Int = + if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) + private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { + val newLen = resizeDown(array.length, targetSize) + if (newLen < 0) array + else if (newLen == 0) emptyArray + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, targetSize) + res + } } +} - def result: ArrayBuffer[A] = this +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) + } - /** Defines the prefix of the string representation. - */ - override def stringPrefix: String = "ArrayBuffer" + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] -} + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length + override protected[this] def className = "ArrayBufferView" -/** Factory object for the `ArrayBuffer` class. - * - * $factoryInfo - * @define coll array buffer - * @define Coll `ArrayBuffer` - */ -object ArrayBuffer extends SeqFactory[ArrayBuffer] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArrayBuffer[A]] = new ArrayBuffer[A] -} + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala index bb342e1c2801..e962dd024836 100644 --- a/src/library/scala/collection/mutable/ArrayBuilder.scala +++ b/src/library/scala/collection/mutable/ArrayBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,24 +10,74 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable +import scala.collection.mutable.ArrayBuffer.resizeUp import scala.reflect.ClassTag /** A builder class for arrays. - * - * @since 2.8 * * @tparam T the type of the elements for the builder. */ -@SerialVersionUID(-4721309866680431208L) -abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable +@SerialVersionUID(3L) +sealed abstract class ArrayBuilder[T] + extends ReusableBuilder[T, Array[T]] + with Serializable { + protected[this] var capacity: Int = 0 + protected[this] def elems: Array[T] // may not be allocated at size = capacity = 0 + protected var size: Int = 0 + + /** Current number of elements. */ + def length: Int = size + + /** Current number of elements. */ + override def knownSize: Int = size + + protected[this] final def ensureSize(size: Int): Unit = { + val newLen = resizeUp(capacity, size) + if (newLen > 0) resize(newLen) + } + + override final def sizeHint(size: Int): Unit = if (capacity < size) resize(size) + + def clear(): Unit = size = 0 + + protected[this] def resize(size: Int): Unit + + /** Add all elements of an array. */ + def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) + + /** Add a slice of an array. */ + def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + val offset1 = offset.max(0) + val length1 = length.max(0) + val effectiveLength = length1.min(xs.length - offset1) + doAddAll(xs, offset1, effectiveLength) + } + + private def doAddAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + if (length > 0) { + ensureSize(this.size + length) + Array.copy(xs, offset, elems, this.size, length) + size += length + } + this + } + + override def addAll(xs: IterableOnce[T]): this.type = { + val k = xs.knownSize + if (k > 0) { + ensureSize(this.size + k) + val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + size += k + } else if (k < 0) super.addAll(xs) + this + } +} /** A companion object for array builders. - * - * @since 2.8 */ object ArrayBuilder { @@ -36,23 +86,19 @@ object ArrayBuilder { * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. * @return a new empty array builder. */ - def make[T: ClassTag](): ArrayBuilder[T] = { + @inline def make[T: ClassTag]: ArrayBuilder[T] = { val tag = implicitly[ClassTag[T]] - val cls = tag.runtimeClass - if (cls.isPrimitive) { - cls match { - case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] - case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] - case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] - case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] - case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] - case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] - case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] - case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] - case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] - } - } else { - new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + tag.runtimeClass match { + case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] + case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] + case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] + case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] + case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] + case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] + case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] + case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] + case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] + case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] } } @@ -62,12 +108,10 @@ object ArrayBuilder { * * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. */ - @SerialVersionUID(-8376727444766075941L) - final class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] { + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { - private var elems: Array[T] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[T] = _ private def mkArray(size: Int): Array[T] = { if (capacity == size && capacity > 0) elems @@ -75,50 +119,33 @@ object ArrayBuilder { else java.util.Arrays.copyOf[T](elems, size) } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: T): this.type = { + def addOne(elem: T): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[T]): this.type = (xs.asInstanceOf[AnyRef]) match { - case xs: WrappedArray.ofRef[_] => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[T] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } + override def clear(): Unit = { + super.clear() + if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) + } + override def equals(other: Any): Boolean = other match { case x: ofRef[_] => (size == x.size) && (elems == x.elems) case _ => false @@ -128,59 +155,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `byte`s. It can be reused. */ - @SerialVersionUID(-3484148043254823366L) + @SerialVersionUID(3L) final class ofByte extends ArrayBuilder[Byte] { - private var elems: Array[Byte] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Byte] = _ private def mkArray(size: Int): Array[Byte] = { - if (size == 0) Array.emptyByteArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Byte](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Byte): this.type = { + def addOne(elem: Byte): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Byte]): this.type = xs match { - case xs: WrappedArray.ofByte => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Byte] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -194,59 +197,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `short`s. It can be reused. */ - @SerialVersionUID(3295904306819377609L) + @SerialVersionUID(3L) final class ofShort extends ArrayBuilder[Short] { - private var elems: Array[Short] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Short] = _ private def mkArray(size: Int): Array[Short] = { - if (size == 0) Array.emptyShortArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Short](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Short): this.type = { + def addOne(elem: Short): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Short]): this.type = xs match { - case xs: WrappedArray.ofShort => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Short] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -260,59 +239,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `char`s. It can be reused. */ - @SerialVersionUID(-8284807600792805165L) + @SerialVersionUID(3L) final class ofChar extends ArrayBuilder[Char] { - private var elems: Array[Char] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Char] = _ private def mkArray(size: Int): Array[Char] = { - if (size == 0) Array.emptyCharArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Char](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Char): this.type = { + def addOne(elem: Char): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Char]): this.type = xs match { - case xs: WrappedArray.ofChar => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Char] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -326,59 +281,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `int`s. It can be reused. */ - @SerialVersionUID(-3033902589330485711L) + @SerialVersionUID(3L) final class ofInt extends ArrayBuilder[Int] { - private var elems: Array[Int] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Int] = _ private def mkArray(size: Int): Array[Int] = { - if (size == 0) Array.emptyIntArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Int](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Int): this.type = { + def addOne(elem: Int): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Int]): this.type = xs match { - case xs: WrappedArray.ofInt => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Int] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -392,59 +323,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `long`s. It can be reused. */ - @SerialVersionUID(-4278005356053656861L) + @SerialVersionUID(3L) final class ofLong extends ArrayBuilder[Long] { - private var elems: Array[Long] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Long] = _ private def mkArray(size: Int): Array[Long] = { - if (size == 0) Array.emptyLongArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Long](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Long): this.type = { + def addOne(elem: Long): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Long]): this.type = xs match { - case xs: WrappedArray.ofLong => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Long] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -458,59 +365,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `float`s. It can be reused. */ - @SerialVersionUID(-740775369715282824L) + @SerialVersionUID(3L) final class ofFloat extends ArrayBuilder[Float] { - private var elems: Array[Float] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Float] = _ private def mkArray(size: Int): Array[Float] = { - if (size == 0) Array.emptyFloatArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Float](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Float): this.type = { + def addOne(elem: Float): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Float]): this.type = xs match { - case xs: WrappedArray.ofFloat => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Float] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -524,59 +407,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `double`s. It can be reused. */ - @SerialVersionUID(2549152794429074790L) + @SerialVersionUID(3L) final class ofDouble extends ArrayBuilder[Double] { - private var elems: Array[Double] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Double] = _ private def mkArray(size: Int): Array[Double] = { - if (size == 0) Array.emptyDoubleArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Double](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Double): this.type = { + def addOne(elem: Double): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Double]): this.type = xs match { - case xs: WrappedArray.ofDouble => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Double] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -590,59 +449,35 @@ object ArrayBuilder { } /** A class for array builders for arrays of `boolean`s. It can be reused. */ - @SerialVersionUID(-3574834070591819420L) + @SerialVersionUID(3L) class ofBoolean extends ArrayBuilder[Boolean] { - private var elems: Array[Boolean] = _ - private var capacity: Int = 0 - private var size: Int = 0 + protected var elems: Array[Boolean] = _ private def mkArray(size: Int): Array[Boolean] = { - if (size == 0) Array.emptyBooleanArray - else if (elems eq null) new Array(size) - else java.util.Arrays.copyOf(elems, size) + val newelems = new Array[Boolean](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems } - private def resize(size: Int) { + protected[this] def resize(size: Int): Unit = { elems = mkArray(size) capacity = size } - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: Boolean): this.type = { + def addOne(elem: Boolean): this.type = { ensureSize(size + 1) elems(size) = elem size += 1 this } - override def ++=(xs: TraversableOnce[Boolean]): this.type = xs match { - case xs: WrappedArray.ofBoolean => - ensureSize(this.size + xs.length) - Array.copy(xs.array, 0, elems, this.size, xs.length) - size += xs.length - this - case _ => - super.++=(xs) - } - - def clear() { size = 0 } - - def result() = { + def result(): Array[Boolean] = { if (capacity != 0 && capacity == size) { capacity = 0 - elems + val res = elems + elems = null + res } else mkArray(size) } @@ -656,30 +491,37 @@ object ArrayBuilder { } /** A class for array builders for arrays of `Unit` type. It can be reused. */ - @SerialVersionUID(1995804197797796249L) + @SerialVersionUID(3L) final class ofUnit extends ArrayBuilder[Unit] { - private var size: Int = 0 + protected def elems: Array[Unit] = throw new UnsupportedOperationException() - def +=(elem: Unit): this.type = { - size += 1 + def addOne(elem: Unit): this.type = { + val newSize = size + 1 + ensureSize(newSize) + size = newSize this } - override def ++=(xs: TraversableOnce[Unit]): this.type = { - size += xs.size + override def addAll(xs: IterableOnce[Unit]): this.type = { + val newSize = size + xs.iterator.size + ensureSize(newSize) + size = newSize this } - def clear() { size = 0 } + override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { + val newSize = size + length + ensureSize(newSize) + size = newSize + this + } def result() = { - if (size == 0) Array.emptyUnitArray - else { - val ans = new Array[Unit](size) - java.util.Arrays.fill(ans.asInstanceOf[Array[AnyRef]], ()) - ans - } + val ans = new Array[Unit](size) + var i = 0 + while (i < size) { ans(i) = (); i += 1 } + ans } override def equals(other: Any): Boolean = other match { @@ -687,6 +529,8 @@ object ArrayBuilder { case _ => false } + protected[this] def resize(size: Int): Unit = capacity = size + override def toString = "ArrayBuilder.ofUnit" } } diff --git a/src/library/scala/collection/mutable/ArrayDeque.scala b/src/library/scala/collection/mutable/ArrayDeque.scala new file mode 100644 index 000000000000..ca70f31d1869 --- /dev/null +++ b/src/library/scala/collection/mutable/ArrayDeque.scala @@ -0,0 +1,640 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.{CommonErrors, DefaultSerializable} +import scala.reflect.ClassTag + +/** An implementation of a double-ended queue that internally uses a resizable circular buffer. + * + * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) + * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) + * and thus insertions and removals from end/beginning are fast. + * + * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. + * + * @tparam A the type of this ArrayDeque's elements. + * + * @define Coll `mutable.ArrayDeque` + * @define coll array deque + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class ArrayDeque[A] protected ( + protected var array: Array[AnyRef], + private[ArrayDeque] var start: Int, + private[ArrayDeque] var end: Int +) extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with IterableFactoryDefaults[A, ArrayDeque] + with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] + with Cloneable[ArrayDeque[A]] + with DefaultSerializable { + + reset(array, start, end) + + private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") + requireBounds(idx = start, until = array.length) + requireBounds(idx = end, until = array.length) + this.array = array + this.start = start + this.end = end + } + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + // No-Op override to allow for more efficient stepper in a minor release. + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) + + def apply(idx: Int): A = { + requireBounds(idx) + _get(idx) + } + + def update(idx: Int, elem: A): Unit = { + requireBounds(idx) + _set(idx, elem) + } + + def addOne(elem: A): this.type = { + ensureSize(length + 1) + appendAssumingCapacity(elem) + } + + def prepend(elem: A): this.type = { + ensureSize(length + 1) + prependAssumingCapacity(elem) + } + + @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { + array(end) = elem.asInstanceOf[AnyRef] + end = end_+(1) + this + } + + @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { + start = start_-(1) + array(start) = elem.asInstanceOf[AnyRef] + this + } + + override def prependAll(elems: IterableOnce[A]): this.type = { + val it = elems.iterator + if (it.nonEmpty) { + val n = length + // The following code resizes the current collection at most once and traverses elems at most twice + elems.knownSize match { + // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq + case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) + + // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront + case srcLength if mustGrow(srcLength + n) => + val finalLength = srcLength + n + val array2 = ArrayDeque.alloc(finalLength) + @annotation.unused val copied = it.copyToArray(array2.asInstanceOf[Array[A]]) + //assert(copied == srcLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + + // Just fill up from (start - srcLength) to (start - 1) and move back start + case srcLength => + // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` + var i = 0 + while(i < srcLength) { + _set(i - srcLength, it.next()) + i += 1 + } + start = start_-(srcLength) + } + } + this + } + + override def addAll(elems: IterableOnce[A]): this.type = { + elems.knownSize match { + case srcLength if srcLength > 0 => + ensureSize(srcLength + length) + elems.iterator.foreach(appendAssumingCapacity) + case _ => elems.iterator.foreach(+=) + } + this + } + + def insert(idx: Int, elem: A): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prepend(elem) + } else if (idx == n) { + addOne(elem) + } else { + val finalLength = n + 1 + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + array2(idx) = elem.asInstanceOf[AnyRef] + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (n <= idx * 2) { + var i = n - 1 + while(i >= idx) { + _set(i + 1, _get(i)) + i -= 1 + } + end = end_+(1) + i += 1 + _set(i, elem) + } else { + var i = 0 + while(i < idx) { + _set(i - 1, _get(i)) + i += 1 + } + start = start_-(1) + _set(i, elem) + } + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prependAll(elems) + } else if (idx == n) { + addAll(elems) + } else { + // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) + val (it, srcLength) = { + val _srcLength = elems.knownSize + if (_srcLength >= 0) (elems.iterator, _srcLength) + else { + val indexed = IndexedSeq.from(elems) + (indexed.iterator, indexed.size) + } + } + if (it.nonEmpty) { + val finalLength = srcLength + n + // Either we resize right away or move prefix left or suffix right + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + @annotation.unused val copied = it.copyToArray(array2.asInstanceOf[Array[A]], idx) + //assert(copied == srcLength) + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx >= n) { // Cheaper to shift the suffix right + var i = n - 1 + while(i >= idx) { + _set(i + srcLength, _get(i)) + i -= 1 + } + end = end_+(srcLength) + while(it.hasNext) { + i += 1 + _set(i, it.next()) + } + } else { // Cheaper to shift prefix left + var i = 0 + while(i < idx) { + _set(i - srcLength, _get(i)) + i += 1 + } + start = start_-(srcLength) + while(it.hasNext) { + _set(i, it.next()) + i += 1 + } + } + } + } + } + + def remove(idx: Int, count: Int): Unit = { + if (count > 0) { + requireBounds(idx) + val n = length + val removals = Math.min(n - idx, count) + val finalLength = n - removals + val suffixStart = idx + removals + // If we know we can resize after removing, do it right away using arrayCopy + // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left + if (shouldShrink(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx <= finalLength) { // Cheaper to move the prefix right + var i = suffixStart - 1 + while(i >= removals) { + _set(i, _get(i - removals)) + i -= 1 + } + while(i >= 0) { + _set(i, null.asInstanceOf[A]) + i -= 1 + } + start = start_+(removals) + } else { // Cheaper to move the suffix left + var i = idx + while(i < finalLength) { + _set(i, _get(i + removals)) + i += 1 + } + while(i < n) { + _set(i, null.asInstanceOf[A]) + i += 1 + } + end = end_-(removals) + } + } else { + require(count == 0, s"removing negative number of elements: $count") + } + } + + def remove(idx: Int): A = { + val elem = this(idx) + remove(idx, 1) + elem + } + + override def subtractOne(elem: A): this.type = { + val idx = indexOf(elem) + if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API + this + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the first element (throws exception when empty) + * See also removeHeadOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeHead(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) + + @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + val elem = array(start) + array(start) = null + start = start_+(1) + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the last element (throws exception when empty) + * See also removeLastOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeLast(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) + + @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + end = end_-(1) + val elem = array(end) + array(end) = null + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * Remove all elements from this collection and return the elements while emptying this data structure + * @return + */ + def removeAll(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Remove all elements from this collection and return the elements in reverse while emptying this data structure + * @return + */ + def removeAllReverse(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the left of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(headOption.exists(f)) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the right of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(lastOption.exists(f)) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** Returns the first element which satisfies the given predicate after or at some start index + * and removes this element from the collections + * + * @param p the predicate used for choosing the first element + * @param from the start index + * @return the first element of the queue for which p yields true + */ + def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { + val i = indexWhere(p, from) + if (i < 0) None else Some(remove(i)) + } + + /** Returns all elements in this collection which satisfy the given predicate + * and removes those elements from this collections. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { + val res = scala.collection.immutable.Seq.newBuilder[A] + var i, j = 0 + while (i < size) { + if (p(this(i))) { + res += this(i) + } else { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + if (i != j) takeInPlace(j) + res.result() + } + + @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) + + def length = end_-(start) + + override def isEmpty = start == end + + override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) + + override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque + + /** + * Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = { + while(nonEmpty) { + removeHeadAssumingNonEmpty() + } + } + + /** + * Clears this buffer and shrinks to @param size + * + * @param size + * @return + */ + def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { + reset(array = ArrayDeque.alloc(size), start = 0, end = 0) + this + } + + protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + new ArrayDeque[A](array, start = 0, end) + + override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) + if (copied > 0) { + copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) + } + copied + } + + override def toArray[B >: A: ClassTag]: Array[B] = + copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) + + /** + * Trims the capacity of this ArrayDeque's instance to be the current size + */ + def trimToSize(): Unit = resize(length) + + // Utils for common modular arithmetic: + @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) + @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) + @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) + @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) + + // Note: here be overflow dragons! This is used for int overflow + // assumptions in resize(). Use caution changing. + @inline private[this] def mustGrow(len: Int) = { + len >= array.length + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def shouldShrink(len: Int) = { + // To avoid allocation churn, only shrink when array is large + // and less than 2/5 filled. + array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def canShrink(len: Int) = { + array.length > ArrayDeque.DefaultInitialSize && array.length - len > len + } + + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + + @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] + + // Assumes that 0 <= len. + private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { + val n = length + val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) + reset(array = array2, start = 0, end = n) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayDeque" +} + +/** + * $factoryInfo + * @define coll array deque + * @define Coll `ArrayDeque` + */ +@SerialVersionUID(3L) +object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { + + def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + val s = coll.knownSize + if (s >= 0) { + val array = alloc(s) + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") + new ArrayDeque[B](array, start = 0, end = s) + } else new ArrayDeque[B]() ++= coll + } + + def newBuilder[A]: Builder[A, ArrayDeque[A]] = + new GrowableBuilder[A, ArrayDeque[A]](empty) { + override def sizeHint(size: Int): Unit = { + elems.ensureSize(size) + } + } + + def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + + final val DefaultInitialSize = 16 + + /** + * We try to not repeatedly resize arrays smaller than this + */ + private[ArrayDeque] final val StableSize = 128 + + /** + * Allocates an array whose size is next power of 2 > `len` + * Largest possible len is 1<<30 - 1 + * + * @param len + * @return + */ + private[mutable] def alloc(len: Int) = { + require(len >= 0, s"Non-negative array size required") + val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 + require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") + new Array[AnyRef](Math.max(size, DefaultInitialSize)) + } +} + +trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { + protected def array: Array[AnyRef] + + final override def clone(): C = klone() + + protected def klone(): C + + protected def ofArray(array: Array[AnyRef], end: Int): C + + protected def start_+(idx: Int): Int + + @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = + if (idx < 0 || idx >= until) + throw CommonErrors.indexOutOfBounds(index = idx, max = until - 1) + + /** + * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray + * This copies maxItems elements from this collections srcStart to dest's destStart + * If we reach the end of either collections before we could copy maxItems, we simply stop copying + * + * @param dest + * @param srcStart + * @param destStart + * @param maxItems + */ + def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { + requireBounds(destStart, dest.length+1) + val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) + if (toCopy > 0) { + requireBounds(srcStart) + val startIdx = start_+(srcStart) + val block1 = Math.min(toCopy, array.length - startIdx) + Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) + val block2 = toCopy - block1 + if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) + } + dest + } + + override def reverse: C = { + val n = length + val arr = ArrayDeque.alloc(n) + var i = 0 + while(i < n) { + arr(i) = this(n - i - 1).asInstanceOf[AnyRef] + i += 1 + } + ofArray(arr, n) + } + + override def slice(from: Int, until: Int): C = { + val n = length + val left = Math.max(0, Math.min(n, from)) + val right = Math.max(0, Math.min(n, until)) + val len = right - left + if (len <= 0) { + empty + } else if (len >= n) { + klone() + } else { + val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) + ofArray(array2, len) + } + } + + override def sliding(@deprecatedName("window") size: Int, step: Int): Iterator[C] = + super.sliding(size = size, step = step) + + override def grouped(n: Int): Iterator[C] = sliding(n, n) +} diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala deleted file mode 100644 index e5ec996eab0d..000000000000 --- a/src/library/scala/collection/mutable/ArrayLike.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out the - * `deep` method for arrays and wrapped arrays and serves as a marker trait - * for array wrappers. - * - * @tparam A type of the elements contained in the array like object. - * @tparam Repr the type of the actual collection containing the elements. - * - * @define Coll `ArrayLike` - * @since 2.8 - */ -trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self => - - /** Creates a possible nested `IndexedSeq` which consists of all the elements - * of this array. If the elements are arrays themselves, the `deep` transformation - * is applied recursively to them. The `stringPrefix` of the `IndexedSeq` is - * "Array", hence the `IndexedSeq` prints like an array with all its - * elements shown, and the same recursively for any subarrays. - * - * Example: - * {{{ - * Array(Array(1, 2), Array(3, 4)).deep.toString - * }}} - * prints: `Array(Array(1, 2), Array(3, 4))` - * - * @return An possibly nested indexed sequence of consisting of all the elements of the array. - */ - def deep: scala.collection.IndexedSeq[Any] = new scala.collection.AbstractSeq[Any] with scala.collection.IndexedSeq[Any] { - def length = self.length - def apply(idx: Int): Any = self.apply(idx) match { - case x: AnyRef if x.getClass.isArray => WrappedArray.make(x).deep - case x => x - } - override def stringPrefix = "Array" - } -} diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala deleted file mode 100644 index 0d67933db94f..000000000000 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ /dev/null @@ -1,316 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.reflect.ClassTag -import parallel.mutable.ParArray - -/** This class serves as a wrapper for `Array`s with all the operations found in - * indexed sequences. Where needed, instances of arrays are implicitly converted - * into this class. - * - * The difference between this class and `WrappedArray` is that calling transformer - * methods such as `filter` and `map` will yield an array, whereas a `WrappedArray` - * will remain a `WrappedArray`. - * - * @since 2.8 - * - * @tparam T type of the elements contained in this array. - * - * @define Coll `Array` - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] { - - private def elementClass: Class[_] = - repr.getClass.getComponentType - - override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) { - val l = len min repr.length min (xs.length - start) - if (l > 0) Array.copy(repr, 0, xs, start, l) - } - - override def slice(from: Int, until: Int): Array[T] = { - val reprVal = repr - val lo = math.max(from, 0) - val hi = math.min(math.max(until, 0), reprVal.length) - val size = math.max(hi - lo, 0) - val result = java.lang.reflect.Array.newInstance(elementClass, size) - if (size > 0) { - Array.copy(reprVal, lo, result, 0, size) - } - result.asInstanceOf[Array[T]] - } - - override def toArray[U >: T : ClassTag]: Array[U] = { - val thatElementClass = implicitly[ClassTag[U]].runtimeClass - if (elementClass eq thatElementClass) - repr.asInstanceOf[Array[U]] - else - super.toArray[U] - } - - def :+[B >: T: ClassTag](elem: B): Array[B] = { - val currentLength = repr.length - val result = new Array[B](currentLength + 1) - Array.copy(repr, 0, result, 0, currentLength) - result(currentLength) = elem - result - } - - def +:[B >: T: ClassTag](elem: B): Array[B] = { - val currentLength = repr.length - val result = new Array[B](currentLength + 1) - result(0) = elem - Array.copy(repr, 0, result, 1, currentLength) - result - } - - override def par = ParArray.handoff(repr) - - /** Flattens a two-dimensional array by concatenating all its rows - * into a single array. - * - * @tparam U Type of row elements. - * @param asTrav A function that converts elements of this array to rows - arrays of type `U`. - * @return An array obtained by concatenating rows of this array. - */ - def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = { - val b = Array.newBuilder[U] - b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum) - for (xs <- this) - b ++= asTrav(xs) - b.result() - } - - /** Transposes a two dimensional array. - * - * @tparam U Type of row elements. - * @param asArray A function that converts elements of this array to rows - arrays of type `U`. - * @return An array obtained by replacing elements of this arrays with rows the represent. - */ - def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = { - val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass)) - if (isEmpty) bb.result() - else { - def mkRowBuilder() = Array.newBuilder(ClassTag[U](elementClass.getComponentType)) - val bs = asArray(head) map (_ => mkRowBuilder()) - for (xs <- this) { - var i = 0 - for (x <- asArray(xs)) { - bs(i) += x - i += 1 - } - } - for (b <- bs) bb += b.result() - bb.result() - } - } - - /** Converts an array of pairs into an array of first elements and an array of second elements. - * - * @tparam T1 the type of the first half of the element pairs - * @tparam T2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this Array is a pair. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @return a pair of Arrays, containing, respectively, the first and second half - * of each element pair of this Array. - */ - // implementation NOTE: ct1 and ct2 can't be written as context bounds because desugared - // implicits are put in front of asPair parameter that is supposed to guide type inference - def unzip[T1, T2](implicit asPair: T => (T1, T2), ct1: ClassTag[T1], ct2: ClassTag[T2]): (Array[T1], Array[T2]) = { - val a1 = new Array[T1](length) - val a2 = new Array[T2](length) - var i = 0 - while (i < length) { - val e = apply(i) - a1(i) = e._1 - a2(i) = e._2 - i += 1 - } - (a1, a2) - } - - /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. - * - * @tparam T1 the type of the first of three elements in the triple - * @tparam T2 the type of the second of three elements in the triple - * @tparam T3 the type of the third of three elements in the triple - * @param asTriple an implicit conversion which asserts that the element type - * of this Array is a triple. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @param ct3 a class tag for T3 type parameter that is required to create an instance - * of Array[T3] - * @return a triple of Arrays, containing, respectively, the first, second, and third - * elements from each element triple of this Array. - */ - // implementation NOTE: ct1, ct2, ct3 can't be written as context bounds because desugared - // implicits are put in front of asPair parameter that is supposed to guide type inference - def unzip3[T1, T2, T3](implicit asTriple: T => (T1, T2, T3), ct1: ClassTag[T1], ct2: ClassTag[T2], - ct3: ClassTag[T3]): (Array[T1], Array[T2], Array[T3]) = { - val a1 = new Array[T1](length) - val a2 = new Array[T2](length) - val a3 = new Array[T3](length) - var i = 0 - while (i < length) { - val e = apply(i) - a1(i) = e._1 - a2(i) = e._2 - a3(i) = e._3 - i += 1 - } - (a1, a2, a3) - } - - def seq = thisCollection -} - -/** - * A companion object for `ArrayOps`. - * - * @since 2.8 - */ -object ArrayOps { - - /** A class of `ArrayOps` for arrays containing reference types. */ - final class ofRef[T <: AnyRef](override val repr: Array[T]) extends AnyVal with ArrayOps[T] with ArrayLike[T, Array[T]] { - - override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr) - override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr) - override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](repr.getClass.getComponentType)) - - def length: Int = repr.length - def apply(index: Int): T = repr(index) - def update(index: Int, elem: T) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Byte`s. */ - final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] { - - override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr) - override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofByte - - def length: Int = repr.length - def apply(index: Int): Byte = repr(index) - def update(index: Int, elem: Byte) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Short`s. */ - final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] { - - override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr) - override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofShort - - def length: Int = repr.length - def apply(index: Int): Short = repr(index) - def update(index: Int, elem: Short) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Char`s. */ - final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] { - - override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr) - override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofChar - - def length: Int = repr.length - def apply(index: Int): Char = repr(index) - def update(index: Int, elem: Char) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Int`s. */ - final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] { - - override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr) - override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofInt - - def length: Int = repr.length - def apply(index: Int): Int = repr(index) - def update(index: Int, elem: Int) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Long`s. */ - final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] { - - override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr) - override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofLong - - def length: Int = repr.length - def apply(index: Int): Long = repr(index) - def update(index: Int, elem: Long) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Float`s. */ - final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] { - - override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr) - override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofFloat - - def length: Int = repr.length - def apply(index: Int): Float = repr(index) - def update(index: Int, elem: Float) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Double`s. */ - final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] { - - override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr) - override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofDouble - - def length: Int = repr.length - def apply(index: Int): Double = repr(index) - def update(index: Int, elem: Double) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays containing `Boolean`s. */ - final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] { - - override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr) - override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofBoolean - - def length: Int = repr.length - def apply(index: Int): Boolean = repr(index) - def update(index: Int, elem: Boolean) { repr(index) = elem } - } - - /** A subclass of `ArrayOps` for arrays of `Unit` types. */ - final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] { - - override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr) - override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr) - override protected[this] def newBuilder = new ArrayBuilder.ofUnit - - def length: Int = repr.length - def apply(index: Int): Unit = repr(index) - def update(index: Int, elem: Unit) { repr(index) = elem } - } -} diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 562401a83d87..0537092d0b13 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,107 +10,344 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable +import java.util.Arrays +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 -import generic._ -import parallel.mutable.ParArray +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { -/** A class for polymorphic arrays of elements that's represented - * internally by an array of objects. This means that elements of - * primitive types are boxed. - * - * @author Martin Odersky - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-sequences "Scala's Collection Library overview"]] - * section on `Array Sequences` for more information. - * - * @tparam A type of the elements contained in this array sequence. - * @param length the length of the underlying array. - * - * @define Coll `ArraySeq` - * @define coll array sequence - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ArraySeq[B]` because an implicit of type `CanBuildFrom[ArraySeq, B, ArraySeq[B]]` - * is defined in object `ArraySeq`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ArraySeq`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1530165946227428979L) -class ArraySeq[A](override val length: Int) -extends AbstractSeq[A] - with IndexedSeq[A] - with GenericTraversableTemplate[A, ArraySeq] - with IndexedSeqOptimized[A, ArraySeq[A]] - with CustomParallelizable[A, ParArray[A]] - with Serializable -{ + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] + b.sizeHint(coll, delta = 0) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(using elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) - override def companion: GenericCompanion[ArraySeq] = ArraySeq + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] - val array: Array[AnyRef] = new Array[AnyRef](length) + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit - override def par = ParArray.handoff(array.asInstanceOf[Array[A]], length) + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] - def apply(idx: Int): A = { - if (idx >= length) throw new IndexOutOfBoundsException(idx.toString) - array(idx).asInstanceOf[A] + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied } - def update(idx: Int, elem: A) { - if (idx >= length) throw new IndexOutOfBoundsException(idx.toString) - array(idx) = elem.asInstanceOf[AnyRef] + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) } - override def foreach[U](f: A => U) { - var i = 0 - while (i < length) { - f(array(i).asInstanceOf[A]) - i += 1 - } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) - /** Fills the given array `xs` with at most `len` elements of - * this traversable starting at position `start`. - * Copying will stop once either the end of the current traversable is reached or - * `len` elements have been copied or the end of the array is reached. + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. * - * @param xs the array to fill. - * @param start starting index. - * @param len number of elements to copy + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. */ - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - val len1 = len min (xs.length - start) min length - if (len1 > 0) Array.copy(array, 0, xs, start, len1) + def make[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag: ClassTag[T] = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] } - override def clone(): ArraySeq[A] = { - val cloned = array.clone().asInstanceOf[Array[AnyRef]] - new ArraySeq[A](length) { - override val array = cloned + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + // Type erases to `ManifestFactory.ByteManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Byte.type = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] } -} + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + // Type erases to `ManifestFactory.ShortManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Short.type = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } -/** $factoryInfo - * @define coll array sequence - * @define Coll `ArraySeq` - */ -object ArraySeq extends SeqFactory[ArraySeq] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArraySeq[A]] = - new ArrayBuffer[A] mapResult { buf => - val result = new ArraySeq[A](buf.length) - buf.copyToArray(result.array.asInstanceOf[Array[Any]], 0) - result + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + // Type erases to `ManifestFactory.CharManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Char.type = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + // Type erases to `ManifestFactory.IntManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Int.type = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + // Type erases to `ManifestFactory.LongManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Long.type = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + // Type erases to `ManifestFactory.FloatManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Float.type = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + // Type erases to `ManifestFactory.DoubleManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Double.type = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + // Type erases to `ManifestFactory.BooleanManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Boolean.type = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + // Type erases to `ManifestFactory.UnitManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Unit.type = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } } diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala deleted file mode 100644 index 5679d49e0ae1..000000000000 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ -import scala.reflect.ClassTag - -/** Factory object for the `ArrayStack` class. - * - * $factoryInfo - * @define coll array stack - * @define Coll `ArrayStack` - */ -object ArrayStack extends SeqFactory[ArrayStack] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ArrayStack[A]] = new ArrayStack[A] - def empty: ArrayStack[Nothing] = new ArrayStack() - def apply[A: ClassTag](elems: A*): ArrayStack[A] = { - val els: Array[AnyRef] = elems.reverseMap(_.asInstanceOf[AnyRef])(breakOut) - if (els.length == 0) new ArrayStack() - else new ArrayStack[A](els, els.length) - } - - private[mutable] def growArray(x: Array[AnyRef]) = { - val y = new Array[AnyRef](math.max(x.length * 2, 1)) - Array.copy(x, 0, y, 0, x.length) - y - } - - private[mutable] def clone(x: Array[AnyRef]) = { - val y = new Array[AnyRef](x.length) - Array.copy(x, 0, y, 0, x.length) - y - } -} - - -/** Simple stack class backed by an array. Should be significantly faster - * than the standard mutable stack. - * - * @author David MacIver - * @since 2.7 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-stacks "Scala's Collection Library overview"]] - * section on `Array Stacks` for more information. - * - * @tparam T type of the elements contained in this array stack. - * - * @define Coll `ArrayStack` - * @define coll array stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(8565219180626620510L) -class ArrayStack[T] private(private var table : Array[AnyRef], - private var index : Int) -extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqLike[T, ArrayStack[T]] - with GenericTraversableTemplate[T, ArrayStack] - with IndexedSeqOptimized[T, ArrayStack[T]] - with Cloneable[ArrayStack[T]] - with Builder[T, ArrayStack[T]] - with Serializable -{ - def this() = this(new Array[AnyRef](1), 0) - - /** Retrieve n'th element from stack, where top of stack has index 0. - * - * This is a constant time operation. - * - * @param n the index of the element to return - * @return the element at the specified index - * @throws IndexOutOfBoundsException if the index is out of bounds - */ - def apply(n: Int): T = - table(index - 1 - n).asInstanceOf[T] - - /** The number of elements in the stack */ - def length = index - - override def companion = ArrayStack - - /** Replace element at index `n` with the new element `newelem`. - * - * This is a constant time operation. - * - * @param n the index of the element to replace. - * @param newelem the new element. - * @throws IndexOutOfBoundsException if the index is not valid - */ - def update(n: Int, newelem: T) = - table(index - 1 - n) = newelem.asInstanceOf[AnyRef] - - /** Push an element onto the stack. - * - * @param x The element to push - */ - def push(x: T) { - if (index == table.length) table = ArrayStack.growArray(table) - table(index) = x.asInstanceOf[AnyRef] - index += 1 - } - - /** Pop the top element off the stack. - * - * @return the element on top of the stack - */ - def pop(): T = { - if (index == 0) sys.error("Stack empty") - index -= 1 - val x = table(index).asInstanceOf[T] - table(index) = null - x - } - - /** View the top element of the stack. - * - * Does not remove the element on the top. If the stack is empty, - * an exception is thrown. - * - * @return the element on top of the stack. - */ - def top: T = table(index - 1).asInstanceOf[T] - - /** Duplicate the top element of the stack. - * - * After calling this method, the stack will have an additional element at - * the top equal to the element that was previously at the top. - * If the stack is empty, an exception is thrown. - */ - def dup() = push(top) - - /** Empties the stack. */ - def clear() { - index = 0 - table = new Array(1) - } - - /** Empties the stack, passing all elements on it in LIFO order to the - * provided function. - * - * @param f The function to drain to. - */ - def drain(f: T => Unit) = while (!isEmpty) f(pop()) - - /** Pushes all the provided elements in the traversable object onto the stack. - * - * @param xs The source of elements to push. - * @return A reference to this stack. - */ - override def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this } - - /** Does the same as `push`, but returns the updated stack. - * - * @param x The element to push. - * @return A reference to this stack. - */ - def +=(x: T): this.type = { push(x); this } - - def result = { - reverseTable() - this - } - - private def reverseTable() { - var i = 0 - val until = index / 2 - while (i < until) { - val revi = index - i - 1 - val tmp = table(i) - table(i) = table(revi) - table(revi) = tmp - i += 1 - } - } - - /** Pop the top two elements off the stack, apply `f` to them and push the result - * back on to the stack. - * - * This function will throw an exception if stack contains fewer than 2 elements. - * - * @param f The function to apply to the top two elements. - */ - def combine(f: (T, T) => T): Unit = push(f(pop(), pop())) - - /** Repeatedly combine the top elements of the stack until the stack contains only - * one element. - * - * @param f The function to apply repeatedly to topmost elements. - */ - def reduceWith(f: (T, T) => T): Unit = while(size > 1) combine(f) - - override def size = index - - /** Evaluates the expression, preserving the contents of the stack so that - * any changes the evaluation makes to the stack contents will be undone after - * it completes. - * - * @param action The action to run. - */ - def preserving[T](action: => T) = { - val oldIndex = index - val oldTable = ArrayStack.clone(table) - - try { - action - } finally { - index = oldIndex - table = oldTable - } - } - - override def isEmpty: Boolean = index == 0 - - /** Creates and iterator over the stack in LIFO order. - * @return an iterator over the elements of the stack. - */ - override def iterator: Iterator[T] = new AbstractIterator[T] { - var currentIndex = index - def hasNext = currentIndex > 0 - def next() = { - currentIndex -= 1 - table(currentIndex).asInstanceOf[T] - } - } - - override def foreach[U](f: T => U) { - var currentIndex = index - while (currentIndex > 0) { - currentIndex -= 1 - f(table(currentIndex).asInstanceOf[T]) - } - } - - override def clone() = new ArrayStack[T](ArrayStack.clone(table), index) -} diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index db49cbd55ba9..ba77d7161a0b 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,197 +14,379 @@ package scala package collection package mutable -import generic._ -import BitSetLike.{LogWL, MaxSize} +import scala.collection.immutable.Range +import BitSetOps.{LogWL, MaxSize} +import scala.annotation.implicitNotFound -/** A class for mutable bitsets. - * - * $bitsetinfo - * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] - * section on `Mutable Bitsets` for more information. - * - * @define Coll `BitSet` - * @define coll bitset - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `BitSet[B]` because an implicit of type `CanBuildFrom[BitSet, B, BitSet]` - * is defined in object `BitSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `BitSet`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(8483111450368547763L) -class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] - with SortedSet[Int] - with scala.collection.BitSet - with BitSetLike[BitSet] - with SetLike[Int, BitSet] - with Serializable { - - override def empty = BitSet.empty - - /** Creates the bitset of a certain initial size. - * - * @param initSize initial size of the bitset. - */ - def this(initSize: Int) = this(new Array[Long]((initSize + 63) >> 6 max 1)) +/** + * A class for mutable bitsets. + * + * $bitsetinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] + * section on `Mutable Bitsets` for more information. + * + * @define Coll `BitSet` + * @define coll bitset + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class BitSet(protected[collection] final var elems: Array[Long]) + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedIterableOps[Int, Set, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) def this() = this(0) - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nwords = elems.length + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def word(idx: Int): Long = - if (idx < nwords) elems(idx) else 0L + def bitSetFactory: BitSet.type = BitSet - protected final def updateWord(idx: Int, w: Long) { - ensureCapacity(idx) - elems(idx) = w - } + override def unsorted: Set[Int] = this - protected final def ensureCapacity(idx: Int) { - require(idx < MaxSize) - if (idx >= nwords) { - var newlen = nwords - while (idx >= newlen) newlen = (newlen * 2) min MaxSize - elems = java.util.Arrays.copyOf(elems, newlen) - } - } + protected[collection] final def nwords: Int = elems.length - protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = { - if (words.length == 0) { - empty - } else { - new BitSet(words) - } - } + protected[collection] final def word(idx: Int): Long = + if (idx < nwords) elems(idx) else 0L + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = + if (elems.length == 0) empty + else new BitSet(elems) - override def add(elem: Int): Boolean = { + def addOne(elem: Int): this.type = { require(elem >= 0) - if (contains(elem)) false - else { + if (!contains(elem)) { val idx = elem >> LogWL updateWord(idx, word(idx) | (1L << elem)) - true } + this } - override def remove(elem: Int): Boolean = { + def subtractOne(elem: Int): this.type = { require(elem >= 0) if (contains(elem)) { val idx = elem >> LogWL updateWord(idx, word(idx) & ~(1L << elem)) - true - } else false + } + this } - @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0") - def += (elem: Int): this.type = { add(elem); this } + def clear(): Unit = { + elems = new Array[Long](elems.length) + } - @deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0") - def -= (elem: Int): this.type = { remove(elem); this } + protected final def updateWord(idx: Int, w: Long): Unit = { + ensureCapacity(idx) + elems(idx) = w + } + + protected final def ensureCapacity(idx: Int): Unit = { + require(idx < MaxSize) + if (idx >= nwords) { + var newlen = nwords + while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) + val elems1 = new Array[Long](newlen) + Array.copy(elems, 0, elems1, 0, nwords) + elems = elems1 + } + } + + def unconstrained: collection.Set[Int] = this /** Updates this bitset to the union with another bitset by performing a bitwise "or". - * - * @param other the bitset to form the union with. - * @return the bitset itself. - */ - def |= (other: BitSet): this.type = { + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: collection.BitSet): this.type = { ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { elems(i) = elems(i) | other.word(i) + i += 1 + } this } /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". - * - * @param other the bitset to form the intersection with. - * @return the bitset itself. - */ - def &= (other: BitSet): this.type = { + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: collection.BitSet): this.type = { // Different from other operations: no need to ensure capacity because // anything beyond the capacity is 0. Since we use other.word which is 0 // off the end, we also don't need to make sure we stay in bounds there. - for (i <- 0 until nwords) + var i = 0 + val thisnwords = nwords + while (i < thisnwords) { elems(i) = elems(i) & other.word(i) + i += 1 + } this } /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". - * - * @param other the bitset to form the symmetric difference with. - * @return the bitset itself. - */ - def ^= (other: BitSet): this.type = { + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: collection.BitSet): this.type = { ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + elems(i) = elems(i) ^ other.word(i) + i += 1 + } this } /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". - * - * @param other the bitset to form the difference with. - * @return the bitset itself. - */ - def &~= (other: BitSet): this.type = { - ensureCapacity(other.nwords - 1) - for (i <- 0 until other.nwords) + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: collection.BitSet): this.type = { + var i = 0 + val max = Math.min(nwords, other.nwords) + while (i < max) { elems(i) = elems(i) & ~other.word(i) + i += 1 + } this } - override def clear() { - elems = new Array[Long](elems.length) + override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) + + def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + override def addAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => + this |= bs + case range: Range => + if (range.nonEmpty) { + val start = range.min + if (start >= 0) { + val end = range.max + val endIdx = end >> LogWL + ensureCapacity(endIdx) + + if (range.step == 1 || range.step == -1) { + val startIdx = start >> LogWL + val wordStart = startIdx * BitSetOps.WordLength + val wordMask = -1L << (start - wordStart) + + if (endIdx > startIdx) { + elems(startIdx) |= wordMask + java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) + elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) + } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) + } else super.addAll(range) + } else super.addAll(range) + } + this + + case sorted: collection.SortedSet[Int] => + // if `sorted` is using the regular Int ordering, ensure capacity for the largest + // element up front to avoid multiple resizing allocations + if (sorted.nonEmpty) { + val ord = sorted.ordering + if (ord eq Ordering.Int) { + ensureCapacity(sorted.lastKey >> LogWL) + } else if (ord eq Ordering.Int.reverse) { + ensureCapacity(sorted.firstKey >> LogWL) + } + val iter = sorted.iterator + while (iter.hasNext) { + addOne(iter.next()) + } + } + + this + + case other => + super.addAll(other) } - /** Wraps this bitset as an immutable bitset backed by the array of bits - * of this bitset. - * - * @note Subsequent changes in this bitset will be reflected in the returned immutable bitset. - * - * @return an immutable set containing all the elements of this set. - */ - @deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " + - "BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " + - "immutability of the result.", "2.12.0") - def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems) - - override def clone(): BitSet = - new BitSet(elems.clone) + override def subsetOf(that: collection.Set[Int]): Boolean = that match { + case bs: collection.BitSet => + val thisnwords = this.nwords + val bsnwords = bs.nwords + val minWords = Math.min(thisnwords, bsnwords) + + // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there + var i = bsnwords + while (i < thisnwords) { + if (word(i) != 0L) return false + i += 1 + } + + // the higher range of `this` is all `0`s, fall back to lower range + var j = 0 + while (j < minWords) { + if ((word(j) & ~bs.word(j)) != 0L) return false + j += 1 + } + + true + case other => + super.subsetOf(other) + } + + override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => this &~= bs + case other => super.subtractAll(other) + } + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + i -= 1 + } + + if (i < 0) { + fromBitMaskNoCopy(Array(currentWord)) + } else { + val minimumNonZeroIndex: Int = i + 1 + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newArray) + } + } else { + // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index + val newElems = elems.clone() + var i = bsnwords - 1 + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newElems) + } + case _ => super.diff(that) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word + // index which lets us avoid: + // * over-allocating -- the resulting array will be exactly the right size + // * multiple resizing allocations -- the array is allocated one time, not log(n) times. + var i = nwords - 1 + var newArray: Array[Long] = null + while (i >= 0) { + val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + if (w != 0L) { + if (newArray eq null) { + newArray = new Array(i + 1) + } + newArray(i) = w + } + i -= 1 + } + if (newArray eq null) { + empty + } else { + fromBitMaskNoCopy(newArray) + } + } + + override def filterInPlace(p: Int => Boolean): this.type = { + val thisnwords = nwords + var i = 0 + while (i < thisnwords) { + elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) + i += 1 + } + this + } + + override def toBitMask: Array[Long] = elems.clone() } -/** $factoryInfo - * @define coll bitset - * @define Coll `BitSet` - */ -object BitSet extends BitSetFactory[BitSet] { - def empty: BitSet = new BitSet +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) - /** A growing builder for mutable Sets. */ - def newBuilder: Builder[Int, BitSet] = new GrowingBuilder[Int, BitSet](empty) + def empty: BitSet = new BitSet() - /** $bitsetCanBuildFrom */ - implicit val canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom + def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) /** A bitset containing all the bits in an array */ def fromBitMask(elems: Array[Long]): BitSet = { val len = elems.length if (len == 0) empty - else new BitSet(elems.clone) + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSet(a) + } } /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ + * array without copying. + */ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - if (elems.length == 0) { - empty - } else { - new BitSet(elems) - } + val len = elems.length + if (len == 0) empty + else new BitSet(elems) + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) } } diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala index e9a3bfed0817..2ec13c1fdbc5 100644 --- a/src/library/scala/collection/mutable/Buffer.scala +++ b/src/library/scala/collection/mutable/Buffer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,41 +10,309 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ +import scala.annotation.nowarn -/** Buffers are used to create sequences of elements incrementally by - * appending, prepending, or inserting new elements. It is also - * possible to access and modify elements in a random access fashion - * via the index of the element in the current sequence. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * - * @tparam A type of the elements contained in this buffer. + +/** A `Buffer` is a growable and shrinkable `Seq`. * - * @define Coll `Buffer` * @define coll buffer + * @define Coll `Buffer` */ -trait Buffer[A] extends Seq[A] - with GenericTraversableTemplate[A, Buffer] - with BufferLike[A, Buffer[A]] - with scala.Cloneable { - override def companion: GenericCompanion[Buffer] = Buffer +trait Buffer[A] + extends Seq[A] + with SeqOps[A, Buffer, Buffer[A]] + with Growable[A] + with Shrinkable[A] + with IterableFactoryDefaults[A, Buffer] { + + override def iterableFactory: SeqFactory[Buffer] = Buffer + + override def knownSize: Int = super[Seq].knownSize + + //TODO Prepend is a logical choice for a readable name of `+=:` but it conflicts with the renaming of `append` to `add` + /** Prepends a single element at the front of this $coll. + * + * @param elem the element to $add. + * @return the $coll itself + */ + def prepend(elem: A): this.type + + /** Appends the given elements to this buffer. + * + * @param elem the element to append. + * @return this $coll + */ + @`inline` final def append(elem: A): this.type = addOne(elem) + + @deprecated("Use appendAll instead", "2.13.0") + @`inline` final def append(elems: A*): this.type = addAll(elems) + + /** Appends the elements contained in a iterable object to this buffer. + * @param elems the iterable object containing the elements to append. + * @return this $coll + */ + @`inline` final def appendAll(@deprecatedName("xs") elems: IterableOnce[A]): this.type = addAll(elems) + + /** Alias for `prepend` */ + @`inline` final def +=: (elem: A): this.type = prepend(elem) + + /** Prepends the elements contained in a iterable object to this buffer. + * @param elems the iterable object containing the elements to append. + * @return this $coll + */ + def prependAll(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } + + @deprecated("Use prependAll instead", "2.13.0") + @`inline` final def prepend(elems: A*): this.type = prependAll(elems) + + /** Alias for `prependAll` */ + @inline final def ++=:(elems: IterableOnce[A]): this.type = prependAll(elems) + + /** Inserts a new element at a given index into this buffer. + * + * @param idx the index where the new elements is inserted. + * @param elem the element to insert. + * @throws IndexOutOfBoundsException if the index `idx` is not in the valid range + * `0 <= idx <= length`. + */ + @throws[IndexOutOfBoundsException] + def insert(idx: Int, elem: A): Unit + + /** Inserts new elements at the index `idx`. Opposed to method + * `update`, this method will not replace an element with a new + * one. Instead, it will insert a new element at index `idx`. + * + * @param idx the index where a new element will be inserted. + * @param elems the iterable object providing all elements to insert. + * @throws IndexOutOfBoundsException if `idx` is out of bounds. + */ + @throws[IndexOutOfBoundsException] + def insertAll(idx: Int, elems: IterableOnce[A]): Unit + + /** Removes the element at a given index position. + * + * @param idx the index which refers to the element to delete. + * @return the element that was formerly at index `idx`. + */ + @throws[IndexOutOfBoundsException] + def remove(idx: Int): A + + /** Removes the element on a given index position. It takes time linear in + * the buffer size. + * + * @param idx the index which refers to the first element to remove. + * @param count the number of elements to remove. + * @throws IndexOutOfBoundsException if the index `idx` is not in the valid range + * `0 <= idx <= length - count` (with `count > 0`). + * @throws IllegalArgumentException if `count < 0`. + */ + @throws[IndexOutOfBoundsException] + @throws[IllegalArgumentException] + def remove(idx: Int, count: Int): Unit + + /** Removes a single element from this buffer, at its first occurrence. + * If the buffer does not contain that element, it is unchanged. + * + * @param x the element to remove. + * @return the buffer itself + */ + def subtractOne (x: A): this.type = { + val i = indexOf(x) + if (i != -1) remove(i) + this + } + + /** Removes the first ''n'' elements of this buffer. + * + * @param n the number of elements to remove from the beginning + * of this buffer. + */ + @deprecated("use dropInPlace instead", since = "2.13.4") + def trimStart(n: Int): Unit = dropInPlace(n) + + /** Removes the last ''n'' elements of this buffer. + * + * @param n the number of elements to remove from the end + * of this buffer. + */ + @deprecated("use dropRightInPlace instead", since = "2.13.4") + def trimEnd(n: Int): Unit = dropRightInPlace(n) + + /** Replaces a slice of elements in this $coll by another sequence of elements. + * + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original $coll appends the patch to the end. + * If the `replaced` count would exceed the available elements, the difference in excess is ignored. + * + * @param from the index of the first replaced element + * @param patch the replacement sequence + * @param replaced the number of elements to drop in the original $coll + * @return this $coll + */ + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type + + // +=, ++=, clear inherited from Growable + // Per remark of @ichoran, we should preferably not have these: + // + // def +=:(elem: A): this.type = { insert(0, elem); this } + // def +=:(elem1: A, elem2: A, elems: A*): this.type = elem1 +=: elem2 +=: elems ++=: this + // def ++=:(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } + + /** Removes the first `n` elements from this $coll. + * + * @param n the number of elements to remove + * @return this $coll + * + */ + def dropInPlace(n: Int): this.type = { remove(0, normalized(n)); this } + + /** Removes the last `n` elements from this $coll. + * + * @param n the number of elements to remove + * @return this $coll + * + */ + def dropRightInPlace(n: Int): this.type = { + val norm = normalized(n) + remove(length - norm, norm) + this + } + + /** Retains the first `n` elements from this $coll and removes the rest. + * + * @param n the number of elements to retain + * @return this $coll + * + */ + def takeInPlace(n: Int): this.type = { + val norm = normalized(n) + remove(norm, length - norm) + this + } + + /** Retains the last `n` elements from this $coll and removes the rest. + * + * @param n the number of elements to retain + * @return this $coll + * + */ + def takeRightInPlace(n: Int): this.type = { remove(0, length - normalized(n)); this } + + /** Retains the specified slice from this $coll and removes the rest. + * + * @param start the lowest index to include + * @param end the lowest index to exclude + * @return this $coll + * + */ + def sliceInPlace(start: Int, end: Int): this.type = takeInPlace(end).dropInPlace(start) + + private def normalized(n: Int): Int = math.min(math.max(n, 0), length) + + /** Drops the longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return this $coll + * @see [[dropWhile]] + */ + def dropWhileInPlace(p: A => Boolean): this.type = { + val idx = indexWhere(!p(_)) + if (idx < 0) { clear(); this } else dropInPlace(idx) + } + + /** Retains the longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return this $coll + * @see [[takeWhile]] + */ + def takeWhileInPlace(p: A => Boolean): this.type = { + val idx = indexWhere(!p(_)) + if (idx < 0) this else takeInPlace(idx) + } + + /** Append the given element to this $coll until a target length is reached. + * + * @param len the target length + * @param elem the padding value + * @return this $coll + */ + def padToInPlace(len: Int, elem: A): this.type = { + while (length < len) +=(elem) + this + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Buffer" } -/** $factoryInfo - * @define coll buffer - * @define Coll `Buffer` - */ -object Buffer extends SeqFactory[Buffer] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Buffer[A]] = new ArrayBuffer +trait IndexedBuffer[A] extends IndexedSeq[A] + with IndexedSeqOps[A, IndexedBuffer, IndexedBuffer[A]] + with Buffer[A] + with IterableFactoryDefaults[A, IndexedBuffer] { + + override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer + + /** Replace the contents of this $coll with the flatmapped result. + * + * @param f the mapping function + * @return this $coll + */ + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + // There's scope for a better implementation which copies elements in place. + var i = 0 + val s = size + val newElems = new Array[IterableOnce[A]](s) + while (i < s) { newElems(i) = f(this(i)); i += 1 } + clear() + i = 0 + while (i < s) { ++=(newElems(i)); i += 1 } + this + } + + /** Replace the contents of this $coll with the filtered result. + * + * @param f the filtering function + * @return this $coll + */ + def filterInPlace(p: A => Boolean): this.type = { + var i, j = 0 + while (i < size) { + if (p(apply(i))) { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + + if (i == j) this else takeInPlace(j) + } + + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + val replaced0 = math.min(math.max(replaced, 0), length) + val i = math.min(math.max(from, 0), length) + var j = 0 + val iter = patch.iterator + while (iter.hasNext && j < replaced0 && i + j < length) { + update(i + j, iter.next()) + j += 1 + } + if (iter.hasNext) insertAll(i + j, iter) + else if (j < replaced0) remove(i + j, math.min(replaced0 - j, length - i - j)) + this + } } +@SerialVersionUID(3L) +object Buffer extends SeqFactory.Delegate[Buffer](ArrayBuffer) + +@SerialVersionUID(3L) +object IndexedBuffer extends SeqFactory.Delegate[IndexedBuffer](ArrayBuffer) + /** Explicit instantiation of the `Buffer` trait to reduce class file size in subclasses. */ abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A] diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala deleted file mode 100644 index 09214575b2bc..000000000000 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ -import script._ -import scala.annotation.migration - -/** A template trait for buffers of type `Buffer[A]`. - * - * Buffers are used to create sequences of elements incrementally by - * appending, prepending, or inserting new elements. It is also - * possible to access and modify elements in a random access fashion - * via the index of the element in the current sequence. - * - * @tparam A the type of the elements of the buffer - * @tparam This the type of the buffer itself. - * - * $buffernote - * - * @author Martin Odersky - * @author Matthias Zenger - * @since 2.8 - * @define buffernote @note - * This trait provides most of the operations of a `Buffer` independently of its representation. - * It is typically inherited by concrete implementations of buffers. - * - * To implement a concrete buffer, you need to provide implementations of the - * following methods: - * {{{ - * def apply(idx: Int): A - * def update(idx: Int, elem: A) - * def length: Int - * def clear() - * def +=(elem: A): this.type - * def +=:(elem: A): this.type - * def insertAll(n: Int, iter: Traversable[A]) - * def remove(n: Int): A - * }}} - * @define coll buffer - * @define Coll Buffer - * @define add append - * @define Add Append - * @define willNotTerminateInf - * @define mayNotTerminateInf - * @define compatMutate - * Note that for backward compatibility reasons, this method - * mutates the collection in place, unlike similar but - * undeprecated methods throughout the collections hierarchy. - */ -trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] - extends Growable[A] - with Shrinkable[A] - with Scriptable[A] - with Subtractable[A, This] - with SeqLike[A, This] - with scala.Cloneable -{ self : This => - - // Abstract methods from Seq: - - def apply(n: Int): A - def update(n: Int, newelem: A) - def length: Int - - // Abstract methods from Growable: - - def +=(elem: A): this.type - def clear() - - // Abstract methods new in this class: - - /** Prepends a single element to this buffer. - * @param elem the element to prepend. - * @return the buffer itself. - */ - def +=:(elem: A): this.type - - /** Inserts new elements at a given index into this buffer. - * - * @param n the index where new elements are inserted. - * @param elems the traversable collection containing the elements to insert. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length`. - */ - def insertAll(n: Int, elems: scala.collection.Traversable[A]) - - /** Removes the element at a given index from this buffer. - * - * @param n the index which refers to the element to delete. - * @return the previous element at index `n` - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n < length`. - */ - def remove(n: Int): A - - /** Removes a number of elements from a given index position. Subclasses of `BufferLike` - * will typically override this method to provide better performance than `count` - * successive calls to single-element `remove`. - * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. - */ - def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - for (i <- 0 until count) remove(n) - } - - /** Removes a single element from this buffer, at its first occurrence. - * If the buffer does not contain that element, it is unchanged. - * - * @param x the element to remove. - * @return the buffer itself - */ - def -= (x: A): this.type = { - val i = indexOf(x) - if (i != -1) remove(i) - this - } - - /** Prepends elements to this buffer. - * - * @param xs the TraversableOnce containing the elements to prepend. - * @return the buffer itself. - */ - def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this } - - /** Appends the given elements to this buffer. - * - * @param elems the elements to append. - */ - def append(elems: A*) { appendAll(elems) } - - /** Appends the elements contained in a traversable object to this buffer. - * @param xs the traversable object containing the elements to append. - */ - def appendAll(xs: TraversableOnce[A]) { this ++= xs } - - /** Prepends given elements to this buffer. - * @param elems the elements to prepend. - */ - def prepend(elems: A*) { prependAll(elems) } - - /** Prepends the elements contained in a traversable object to this buffer. - * @param xs the collection containing the elements to prepend. - */ - def prependAll(xs: TraversableOnce[A]) { xs ++=: this } - - /** Inserts new elements at a given index into this buffer. - * - * @param n the index where new elements are inserted. - * @param elems the traversable collection containing the elements to insert. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length`. - */ - def insert(n: Int, elems: A*) { insertAll(n, elems) } - - /** Removes the first ''n'' elements of this buffer. - * - * @param n the number of elements to remove from the beginning - * of this buffer. - */ - def trimStart(n: Int) { remove(0, n) } - - /** Removes the last ''n'' elements of this buffer. - * - * @param n the number of elements to remove from the end - * of this buffer. - */ - def trimEnd(n: Int) { remove(length - n max 0, n) } - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - */ - @deprecated("scripting is deprecated", "2.11.0") - def <<(cmd: Message[A]): Unit = cmd match { - case Include(Start, x) => prepend(x) - case Include(End, x) => append(x) - case Include(Index(n), x) => insert(n, x) - case Include(NoLo, x) => this += x - - case Update(Start, x) => update(0, x) - case Update(End, x) => update(length - 1, x) - case Update(Index(n), x) => update(n, x) - - case Remove(Start, x) => if (this(0) == x) remove(0) - case Remove(End, x) => if (this(length - 1) == x) remove(length - 1) - case Remove(Index(n), x) => if (this(n) == x) remove(n) - case Remove(NoLo, x) => this -= x - - case Reset() => clear() - case s: Script[_] => s.iterator foreach << - case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") - } - - /** Defines the prefix of this object's `toString` representation. - * @return a string representation which starts the result of `toString` applied to this set. - * Unless overridden this is simply `"Buffer"`. - */ - override def stringPrefix: String = "Buffer" - - /** Creates a new collection containing both the elements of this collection and the provided - * traversable object. - * - * @param xs the traversable object. - * @return a new collection consisting of all the elements of this collection and `xs`. - */ - @migration("`++` creates a new buffer. Use `++=` to add an element from this buffer and return that buffer itself.", "2.8.0") - def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq - - /** Creates a new collection with all the elements of this collection except `elem`. - * - * @param elem the element to remove. - * @return a new collection consisting of all the elements of this collection except `elem`. - */ - @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def -(elem: A): This = clone() -= elem - - /** Creates a new collection with all the elements of this collection except the two - * or more specified elements. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new collection consisting of all the elements of this collection except - * `elem1`, `elem2` and those in `elems`. - */ - @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def -(elem1: A, elem2: A, elems: A*): This = clone() -= elem1 -= elem2 --= elems - - /** Creates a new collection with all the elements of this collection except those - * provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new collection with all the elements of this collection except - * those in `xs` - */ - @migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0") - override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq - - /** Return a clone of this buffer. - * - * @return a `Buffer` with the same elements. - */ - override def clone(): This = { - val bf = newBuilder - bf ++= this - bf.result().asInstanceOf[This] - } -} diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala deleted file mode 100644 index b77e1d294003..000000000000 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - -/** This is a simple proxy class for `scala.collection.mutable.Buffer`. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @since 1 - * - * @tparam A type of the elements the buffer proxy contains. - * - * @define Coll `BufferProxy` - * @define coll buffer proxy - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait BufferProxy[A] extends Buffer[A] with Proxy { - - def self: Buffer[A] - - def length: Int = self.length - - override def iterator: Iterator[A] = self.iterator - - def apply(n: Int): A = self.apply(n) - - /** Append a single element to this buffer. - * - * @param elem the element to append. - */ - def +=(elem: A): this.type = { self.+=(elem); this } - - /** Appends a number of elements provided by a traversable object. - * - * @param xs the traversable object. - * @return a reference to this $coll. - */ - override def ++=(xs: TraversableOnce[A]): this.type = { self.++=(xs); this } - - /** Appends a sequence of elements to this buffer. - * - * @param elems the elements to append. - */ - override def append(elems: A*) { self.++=(elems) } - - /** Appends a number of elements provided by a traversable object. - * - * @param xs the traversable object. - */ - override def appendAll(xs: TraversableOnce[A]) { self.appendAll(xs) } - - /** Prepend a single element to this buffer and return - * the identity of the buffer. - * - * @param elem the element to append. - * @return a reference to this $coll. - */ - def +=:(elem: A): this.type = { self.+=:(elem); this } - - override def ++=:(xs: TraversableOnce[A]): this.type = { self.++=:(xs); this } - - /** Prepend an element to this list. - * - * @param elems the elements to prepend. - */ - override def prepend(elems: A*) { self.prependAll(elems) } - - /** Prepends a number of elements provided by a traversable object. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def prependAll(xs: TraversableOnce[A]) { self.prependAll(xs) } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a - * one. Instead, it will insert the new elements at index `n`. - * - * @param n the index where a new element will be inserted. - * @param elems the new elements to insert. - */ - override def insert(n: Int, elems: A*) { self.insertAll(n, elems) } - - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param iter the iterable object providing all elements to insert. - */ - def insertAll(n: Int, iter: scala.collection.Iterable[A]) { - self.insertAll(n, iter) - } - - override def insertAll(n: Int, iter: scala.collection.Traversable[A]) { - self.insertAll(n, iter) - } - - /** Replace element at index `n` with the new element `newelem`. - * - * @param n the index of the element to replace. - * @param newelem the new element. - */ - def update(n: Int, newelem: A) { self.update(n, newelem) } - - /** Removes the element on a given index position. - * - * @param n the index which refers to the element to delete. - */ - def remove(n: Int): A = self.remove(n) - - /** Clears the buffer contents. - */ - def clear() { self.clear() } - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - */ - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]) { self << cmd } - - /** Return a clone of this buffer. - * - * @return a `Buffer` with the same elements. - */ - override def clone(): Buffer[A] = new BufferProxy[A] { - def self = BufferProxy.this.self.clone() - } -} diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 4008d6464a37..e59fc8639104 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,136 +10,94 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import generic._ - -/** The base trait of all builders. - * A builder lets one construct a collection incrementally, by adding - * elements to the builder with `+=` and then converting to the required - * collection type with `result`. - * - * One cannot assume that a single `Builder` can build more than one - * instance of the desired collection. Particular subclasses may allow - * such behavior. Otherwise, `result` should be treated as a terminal - * operation: after it is called, no further methods should be called on - * the builder. Extend the [[collection.mutable.ReusableBuilder]] trait - * instead of `Builder` for builders that may be reused to build multiple - * instances. - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @since 2.8 - */ -trait Builder[-Elem, +To] extends Growable[Elem] { - - /** Adds a single element to the builder. - * @param elem the element to be added. - * @return the builder itself. - */ - def +=(elem: Elem): this.type +/** Base trait for collection builders. + * + * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) + * is undefined. No further methods should be called. It is common for mutable collections to be their own non-reusable + * Builder, in which case `result()` simply returns `this`. + * + * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` + */ +trait Builder[-A, +To] extends Growable[A] { self => /** Clears the contents of this builder. * After execution of this method the builder will contain no elements. */ - def clear() + def clear(): Unit - /** Produces a collection from the added elements. This is a terminal operation: - * the builder's contents are undefined after this operation, and no further - * methods should be called. - * - * @return a collection containing the elements added to this builder. - */ + /** Result collection consisting of all elements appended so far. */ def result(): To - /** Gives a hint how many elements are expected to be added - * when the next `result` is called. Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. + /** Gives a hint how many elements are expected to be added in total + * by the time `result` is called. + * + * Some builder classes will optimize their representation based on the hint. + * However, builder implementations are required to work correctly even if the hint is + * wrong, e.g., a different number of elements is added, or the hint is out of range. + * + * The default implementation simply ignores the hint. * * @param size the hint how many elements will be added. */ - def sizeHint(size: Int) {} + def sizeHint(size: Int): Unit = () - /** Gives a hint that one expects the `result` of this builder - * to have the same size as the given collection, plus some delta. This will - * provide a hint only if the collection is known to have a cheap - * `size` method, which is determined by calling `sizeHint`. + /** Gives a hint that the `result` of this builder is expected + * to have the same size as the given collection, plus some delta. * - * Some builder classes will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. + * This method provides a hint only if the collection has a known size, + * as specified by the following pseudocode: * - * @param coll the collection which serves as a hint for the result's size. - */ - def sizeHint(coll: TraversableLike[_, _]) { - coll.sizeHintIfCheap match { - case -1 => - case n => sizeHint(n) - } - } - - /** Gives a hint that one expects the `result` of this builder - * to have the same size as the given collection, plus some delta. This will - * provide a hint only if the collection is known to have a cheap - * `size` method. Currently this is assumed to be the case if and only if - * the collection is of type `IndexedSeqLike`. - * Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. + * {{{ + * if (coll.knownSize != -1) + * if (coll.knownSize + delta <= 0) sizeHint(0) + * else sizeHint(coll.knownSize + delta) + * }}} + * + * If the delta is negative and the result size is known to be negative, + * then the size hint is issued at zero. + * + * Some builder classes will optimize their representation based on the hint. + * However, builder implementations are required to work correctly even if the hint is + * wrong, i.e., if a different number of elements is added. * * @param coll the collection which serves as a hint for the result's size. - * @param delta a correction to add to the `coll.size` to produce the size hint. + * @param delta a correction to add to the `coll.size` to produce the size hint (zero if omitted). */ - def sizeHint(coll: TraversableLike[_, _], delta: Int) { - coll.sizeHintIfCheap match { + final def sizeHint(coll: scala.collection.IterableOnce[_], delta: Int = 0): Unit = + coll.knownSize match { case -1 => - case n => sizeHint(n + delta) + case sz => sizeHint(0 max sz + delta) } - } /** Gives a hint how many elements are expected to be added - * when the next `result` is called, together with an upper bound - * given by the size of some other collection. Some builder classes - * will optimize their representation based on the hint. However, - * builder implementations are still required to work correctly even if the hint is - * wrong, i.e. a different number of elements is added. - * - * @param size the hint how many elements will be added. - * @param boundingColl the bounding collection. If it is - * an IndexedSeqLike, then sizes larger - * than collection's size are reduced. - */ - def sizeHintBounded(size: Int, boundingColl: TraversableLike[_, _]) { - boundingColl.sizeHintIfCheap match { - case -1 => - case n => sizeHint(size min n) + * when the next `result` is called, together with an upper bound + * given by the size of some other collection. Some builder classes + * will optimize their representation based on the hint. However, + * builder implementations are still required to work correctly even if the hint is + * wrong, i.e. a different number of elements is added. + * + * @param size the hint how many elements will be added. + * @param boundingColl the bounding collection. If it is + * an IndexedSeqLike, then sizes larger + * than collection's size are reduced. + */ + // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { + val s = boundingColl.knownSize + if (s != -1) { + sizeHint(scala.math.min(s, size)) } } - /** Creates a new builder by applying a transformation function to - * the results of this builder. - * @param f the transformation function. - * @tparam NewTo the type of collection returned by `f`. - * @return a new builder which is the same as the current builder except - * that a transformation function is applied to this builder's result. - * - * @note The original builder should no longer be used after `mapResult` is called. - */ - def mapResult[NewTo](f: To => NewTo): Builder[Elem, NewTo] = - new Builder[Elem, NewTo] with Proxy { - val self = Builder.this - def +=(x: Elem): this.type = { self += x; this } - def clear() = self.clear() - override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this } - override def sizeHint(size: Int) = self.sizeHint(size) - override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl) - def result: NewTo = f(self.result()) - } + /** A builder resulting from this builder by mapping the result using `f`. */ + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo] = new Builder[A, NewTo] { + def addOne(x: A): this.type = { self += x; this } + def clear(): Unit = self.clear() + override def addAll(xs: IterableOnce[A]): this.type = { self ++= xs; this } + override def sizeHint(size: Int): Unit = self.sizeHint(size) + def result(): NewTo = f(self.result()) + override def knownSize: Int = self.knownSize + } } - diff --git a/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..6953dd0ed660 --- /dev/null +++ b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + protected val mutationCount: () => Int + + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala index 2ac9081c45c2..5c11faea155e 100644 --- a/src/library/scala/collection/mutable/Cloneable.scala +++ b/src/library/scala/collection/mutable/Cloneable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,16 +10,13 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable + /** A trait for cloneable collections. - * - * @since 2.8 - * - * @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound. - */ -trait Cloneable[+A <: AnyRef] extends scala.Cloneable { - override def clone(): A = super.clone().asInstanceOf[A] + * + * @tparam C Type of the collection, covariant and with reference types as upperbound. + */ +trait Cloneable[+C <: AnyRef] extends scala.Cloneable { + override def clone(): C = super.clone().asInstanceOf[C] } diff --git a/src/library/scala/collection/mutable/CollisionProofHashMap.scala b/src/library/scala/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..f56e679df2d2 --- /dev/null +++ b/src/library/scala/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,887 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, getOld = false) + + override def put(key: K, value: V): Option[V] = put0(key, value, getOld = true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, getOld = false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + sizeHint(xs, delta = contentSize) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, getOld = false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, red = false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala deleted file mode 100644 index 8c317e90949b..000000000000 --- a/src/library/scala/collection/mutable/DefaultEntry.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** Class used internally for default map model. - * @since 2.3 - */ -@SerialVersionUID(-3856907690109104385L) -final class DefaultEntry[A, B](val key: A, var value: B) - extends HashEntry[A, DefaultEntry[A, B]] with Serializable -{ - override def toString = chainString - - def chainString = { - "(kv: " + key + ", " + value + ")" + (if (next != null) " -> " + next.toString else "") - } -} diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala deleted file mode 100644 index e469455125cd..000000000000 --- a/src/library/scala/collection/mutable/DefaultMapModel.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** This class is used internally. It implements the mutable `Map` - * class in terms of three functions: `findEntry`, `addEntry`, and `entries`. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("this trait will be removed", "2.11.0") -trait DefaultMapModel[A, B] extends Map[A, B] { - - type Entry = DefaultEntry[A, B] - - protected def findEntry(key: A): Entry - protected def addEntry(e: Entry) - protected def entries: Iterator[Entry] - - def get(key: A): Option[B] = { - val e = findEntry(key) - if (e == null) None - else Some(e.value) - } - - override def put(key: A, value: B): Option[B] = { - val e = findEntry(key) - if (e == null) { addEntry(new Entry(key, value)); None } - else { val v = e.value; e.value = value; Some(v) } - } - - def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this } - - def iterator = entries map {e => (e.key, e.value)} - -} diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala deleted file mode 100644 index 3cdb2f5c9b65..000000000000 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** This class implements double linked lists where both the head (`elem`), - * the tail (`next`) and a reference to the previous node (`prev`) are mutable. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#double-linked-lists "Scala's Collection Library overview"]] - * section on `Double Linked Lists` for more information. - - * - * @tparam A the type of the elements contained in this double linked list. - * - * @define Coll `DoubleLinkedList` - * @define coll double linked list - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `DoubleLinkedList[B]` because an implicit of type `CanBuildFrom[DoubleLinkedList, B, DoubleLinkedList[B]]` - * is defined in object `DoubleLinkedList`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `DoubleLinkedList`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -@SerialVersionUID(-8144992287952814767L) -class DoubleLinkedList[A]() extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, DoubleLinkedList] - with DoubleLinkedListLike[A, DoubleLinkedList[A]] - with Serializable { - next = this - - /** Creates a node for the double linked list. - * - * @param elem the element this node contains. - * @param next the next node in the double linked list. - */ - def this(elem: A, next: DoubleLinkedList[A]) { - this() - if (next != null) { - this.elem = elem - this.next = next - this.next.prev = this - } - } - - override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList - - // Accurately clone this collection. See scala/bug#6296 - override def clone(): DoubleLinkedList[A] = { - val builder = newBuilder - builder ++= this - builder.result() - } -} - -/** $factoryInfo - * @define coll double linked list - * @define Coll `DoubleLinkedList` - */ -@deprecated("low-level linked lists are deprecated", "2.11.0") -object DoubleLinkedList extends SeqFactory[DoubleLinkedList] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, DoubleLinkedList[A]] = - new Builder[A, DoubleLinkedList[A]] { - def emptyList() = new DoubleLinkedList[A]() - var current = emptyList() - - def +=(elem: A): this.type = { - if (current.isEmpty) - current = new DoubleLinkedList(elem, emptyList()) - else - current append new DoubleLinkedList(elem, emptyList()) - - this - } - - def clear(): Unit = current = emptyList() - def result() = current - } -} diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala deleted file mode 100644 index 6aa80f174f30..000000000000 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This extensible class may be used as a basis for implementing double - * linked lists. Type variable `A` refers to the element type - * of the list, type variable `This` is used to model self - * types of linked lists. - * - * The invariant of this data structure is that `prev` is always a reference to - * the previous node in the list. If `this` is the first node of the list, `prev` - * will be `null`. - * Field `next` is set to `this` iff the list is empty. - * - * Examples (right arrow represents `next`, left arrow represents `prev`, - * `_` represents no value): - * - * {{{ - * - * Empty: - * - * null <-- [ _ ] --, - * [ ] <-` - * - * Single element: - * - * null <-- [ x ] --> [ _ ] --, - * [ ] <-- [ ] <-` - * - * More elements: - * - * null <-- [ x ] --> [ y ] --> [ z ] --> [ _ ] --, - * [ ] <-- [ ] <-- [ ] <-- [ ] <-` - * - * }}} - * - * @author Matthias Zenger - * @since 2.8 - * - * @tparam A type of the elements contained in the double linked list - * @tparam This the type of the actual linked list holding the elements - * - * @define Coll `DoubleLinkedList` - * @define coll double linked list - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self => - - /** A reference to the node in the linked list preceding the current node. */ - var prev: This = _ - - // returns that list if this list is empty - // otherwise modifies this list - override def append(that: This): This = - if (isEmpty) - that - else { - if (next.isEmpty) { - next = that - if (that.nonEmpty) that.prev = repr - } else { - next.append(that) - } - repr - } - - // cannot be called on empty lists - override def insert(that: This): Unit = { - super.insert(that) - if (that.nonEmpty) that.prev = repr - } - - /** Removes the current node from the double linked list. - * If the node was chained into a double linked list, it will no longer - * be a part of it. - * If the node was the last node in the list, i.e. a sentinel, this method - * does nothing. - * - * '''Note:''' this method will not set the fields `elem`, `next` or `prev` of the - * current node, i.e. `this` node itself will still point "into" the list it - * was in. - */ - @migration("Double linked list now removes the current node from the list.", "2.9.0") - def remove(): Unit = if (nonEmpty) { - next.prev = prev - if (prev ne null) prev.next = next // because this could be the first node - } - - private def atLocation[T](n: Int)(f: This => T)(onOutOfBounds: => T) = if (isEmpty) onOutOfBounds else { - var loc = repr - var left = n - while (left > 0) { - loc = loc.next - left -= 1 - if (loc.isEmpty) onOutOfBounds - } - f(loc) - } - - private def outofbounds(n: Int) = throw new IndexOutOfBoundsException(n.toString) - - override def drop(n: Int): This = super[SeqLike].drop(n) - override def tail = drop(1) - override def apply(n: Int): A = atLocation(n)(_.elem)(outofbounds(n)) - override def update(n: Int, x: A): Unit = atLocation(n)(_.elem = x)(outofbounds(n)) - override def get(n: Int): Option[A] = atLocation[Option[A]](n)(x => Some(x.elem))(None) -} diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala deleted file mode 100644 index 982057aa9b70..000000000000 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ /dev/null @@ -1,441 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import java.lang.Integer.rotateRight -import scala.util.hashing.byteswap32 - -/** An implementation class backing a `HashSet`. - * - * This trait is used internally. It can be mixed in with various collections relying on - * hash table as an implementation. - * - * @define coll flat hash table - * @since 2.3 - * @tparam A the type of the elements contained in the $coll. - */ -trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { - import FlatHashTable._ - - private final def tableDebug = false - - @transient private[collection] var _loadFactor = defaultLoadFactor - - /** The actual hash table. - */ - @transient protected var table: Array[AnyRef] = new Array(initialCapacity) - - /** The number of mappings contained in this hash table. - */ - @transient protected var tableSize = 0 - - /** The next size value at which to resize (capacity * load factor). - */ - @transient protected var threshold: Int = newThreshold(_loadFactor, initialCapacity) - - /** The array keeping track of number of elements in 32 element blocks. - */ - @transient protected var sizemap: Array[Int] = null - - @transient protected var seedvalue: Int = tableSizeSeed - - protected def capacity(expectedSize: Int) = HashTable.nextPositivePowerOfTwo(expectedSize) - - /** The initial size of the hash table. - */ - def initialSize: Int = 32 - - private def initialCapacity = capacity(initialSize) - - protected def randomSeed = seedGenerator.get.nextInt() - - protected def tableSizeSeed = Integer.bitCount(table.length - 1) - - /** - * Initializes the collection from the input stream. `f` will be called for each element - * read from the input stream in the order determined by the stream. This is useful for - * structures where iteration order is important (e.g. LinkedHashSet). - * - * The serialization format expected is the one produced by `serializeTo`. - */ - private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) { - in.defaultReadObject - - _loadFactor = in.readInt() - assert(_loadFactor > 0) - - val size = in.readInt() - tableSize = 0 - assert(size >= 0) - - table = new Array(capacity(sizeForThreshold(size, _loadFactor))) - threshold = newThreshold(_loadFactor, table.length) - - seedvalue = in.readInt() - - val smDefined = in.readBoolean() - if (smDefined) sizeMapInit(table.length) else sizemap = null - - var index = 0 - while (index < size) { - val elem = entryToElem(in.readObject()) - f(elem) - addElem(elem) - index += 1 - } - } - - /** - * Serializes the collection to the output stream by saving the load factor, collection - * size and collection elements. `foreach` determines the order in which the elements are saved - * to the stream. To deserialize, `init` should be used. - */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream) { - out.defaultWriteObject - out.writeInt(_loadFactor) - out.writeInt(tableSize) - out.writeInt(seedvalue) - out.writeBoolean(isSizeMapDefined) - iterator.foreach(out.writeObject) - } - - /** Finds an entry in the hash table if such an element exists. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def findEntry(elem: A): Option[A] = - findElemImpl(elem) match { - case null => None - case entry => Some(entryToElem(entry)) - } - - - /** Checks whether an element is contained in the hash table. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def containsElem(elem: A): Boolean = { - null != findElemImpl(elem) - } - - private def findElemImpl(elem: A): AnyRef = { - val searchEntry = elemToEntry(elem) - var h = index(searchEntry.hashCode) - var curEntry = table(h) - while (null != curEntry && curEntry != searchEntry) { - h = (h + 1) % table.length - curEntry = table(h) - } - curEntry - } - - /** Add elem if not yet in table. - * @return Returns `true` if a new elem was added, `false` otherwise. - */ - protected def addElem(elem: A) : Boolean = { - addEntry(elemToEntry(elem)) - } - - /** - * Add an entry (an elem converted to an entry via elemToEntry) if not yet in - * table. - * @return Returns `true` if a new elem was added, `false` otherwise. - */ - protected def addEntry(newEntry : AnyRef) : Boolean = { - var h = index(newEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == newEntry) return false - h = (h + 1) % table.length - curEntry = table(h) - //Statistics.collisions += 1 - } - table(h) = newEntry - tableSize = tableSize + 1 - nnSizeMapAdd(h) - if (tableSize >= threshold) growTable() - true - - } - - /** - * Removes an elem from the hash table returning true if the element was found (and thus removed) - * or false if it didn't exist. - */ - protected def removeElem(elem: A) : Boolean = { - if (tableDebug) checkConsistent() - def precedes(i: Int, j: Int) = { - val d = table.length >> 1 - if (i <= j) j - i < d - else i - j > d - } - val removalEntry = elemToEntry(elem) - var h = index(removalEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == removalEntry) { - var h0 = h - var h1 = (h0 + 1) % table.length - while (null != table(h1)) { - val h2 = index(table(h1).hashCode) - //Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length) - if (h2 != h1 && precedes(h2, h0)) { - //Console.println("shift "+h1+" to "+h0+"!") - table(h0) = table(h1) - h0 = h1 - } - h1 = (h1 + 1) % table.length - } - table(h0) = null - tableSize -= 1 - nnSizeMapRemove(h0) - if (tableDebug) checkConsistent() - return true - } - h = (h + 1) % table.length - curEntry = table(h) - } - false - } - - protected def iterator: Iterator[A] = new AbstractIterator[A] { - private var i = 0 - def hasNext: Boolean = { - while (i < table.length && (null == table(i))) i += 1 - i < table.length - } - def next(): A = - if (hasNext) { i += 1; entryToElem(table(i - 1)) } - else Iterator.empty.next() - } - - private def growTable() { - val oldtable = table - table = new Array[AnyRef](table.length * 2) - tableSize = 0 - nnSizeMapReset(table.length) - seedvalue = tableSizeSeed - threshold = newThreshold(_loadFactor, table.length) - var i = 0 - while (i < oldtable.length) { - val entry = oldtable(i) - if (null != entry) addEntry(entry) - i += 1 - } - if (tableDebug) checkConsistent() - } - - private def checkConsistent() { - for (i <- 0 until table.length) - if (table(i) != null && !containsElem(entryToElem(table(i)))) - assert(assertion = false, i+" "+table(i)+" "+table.mkString) - } - - - /* Size map handling code */ - - /* - * The following three methods (nn*) modify a size map only if it has been - * initialized, that is, if it's not set to null. - * - * The size map logically divides the hash table into `sizeMapBucketSize` element buckets - * by keeping an integer entry for each such bucket. Each integer entry simply denotes - * the number of elements in the corresponding bucket. - * Best understood through an example, see: - * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) - * sizemap = [ 2 | 3 ] (2 entries) - * where sizeMapBucketSize == 4. - * - */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - val p = h >> sizeMapBucketBitSize - sizemap(p) += 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 - } - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { - val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) - } - - private[collection] final def totalSizeMapBuckets = (table.length - 1) / sizeMapBucketSize + 1 - - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 - - // discards the previous sizemap and only allocates a new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInit(tableLength: Int) { - sizemap = new Array[Int](calcSizeMapSize(tableLength)) - } - - // discards the previous sizemap and populates the new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInitAndRebuild() { - // first allocate - sizeMapInit(table.length) - - // rebuild - val totalbuckets = totalSizeMapBuckets - var bucketidx = 0 - var tableidx = 0 - val tbl = table - var tableuntil = sizeMapBucketSize min tbl.length - while (bucketidx < totalbuckets) { - var currbucketsz = 0 - while (tableidx < tableuntil) { - if (tbl(tableidx) ne null) currbucketsz += 1 - tableidx += 1 - } - sizemap(bucketidx) = currbucketsz - tableuntil += sizeMapBucketSize - bucketidx += 1 - } - } - - private[collection] def printSizeMap() { - println(sizemap.mkString("szmap: [", ", ", "]")) - } - - private[collection] def printContents() { - println(table.mkString("[", ", ", "]")) - } - - protected def sizeMapDisable() = sizemap = null - - protected def isSizeMapDefined = sizemap ne null - - protected def alwaysInitSizeMap = false - - /* End of size map handling code */ - - protected final def index(hcode: Int) = { - // version 1 (no longer used - did not work with parallel hash tables) - // improve(hcode) & (table.length - 1) - - // version 2 (allows for parallel hash table construction) - val improved = improve(hcode, seedvalue) - val ones = table.length - 1 - (improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones - - // version 3 (solves scala/bug#5293 in most cases, but such a case would still arise for parallel hash tables) - // val hc = improve(hcode) - // val bbp = blockbitpos - // val ones = table.length - 1 - // val needed = Integer.bitCount(ones) - // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5) - // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc) - // val restmask = (1 << (needed - 5)) - 1 - // val improved = blockbits | (rest & restmask) - // improved - } - - protected def clearTable() { - var i = table.length - 1 - while (i >= 0) { table(i) = null; i -= 1 } - tableSize = 0 - nnSizeMapReset(table.length) - } - - private[collection] def hashTableContents = new FlatHashTable.Contents[A]( - _loadFactor, - table, - tableSize, - threshold, - seedvalue, - sizemap - ) - - protected def initWithContents(c: FlatHashTable.Contents[A]) = { - if (c != null) { - _loadFactor = c.loadFactor - table = c.table - tableSize = c.tableSize - threshold = c.threshold - seedvalue = c.seedvalue - sizemap = c.sizemap - } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() - } - -} - - -private[collection] object FlatHashTable { - - /** Creates a specific seed to improve hashcode of a hash table instance - * and ensure that iteration order vulnerabilities are not 'felt' in other - * hash tables. - * - * See scala/bug#5293. - */ - final def seedGenerator = new ThreadLocal[scala.util.Random] { - override def initialValue = new scala.util.Random - } - - private object NullSentinel { - override def hashCode = 0 - override def toString = "NullSentinel" - } - - /** The load factor for the hash table; must be < 500 (0.5) - */ - def defaultLoadFactor: Int = 450 - final def loadFactorDenum = 1000 - - def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) - - def newThreshold(_loadFactor: Int, size: Int) = { - val lf = _loadFactor - assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5") - (size.toLong * lf / loadFactorDenum ).toInt - } - - class Contents[A]( - val loadFactor: Int, - val table: Array[AnyRef], - val tableSize: Int, - val threshold: Int, - val seedvalue: Int, - val sizemap: Array[Int] - ) - - trait HashUtils[A] { - protected final def sizeMapBucketBitSize = 5 - // so that: - protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - - protected final def improve(hcode: Int, seed: Int) = rotateRight(byteswap32(hcode), seed) - - /** - * Elems have type A, but we store AnyRef in the table. Plus we need to deal with - * null elems, which need to be stored as NullSentinel - */ - protected final def elemToEntry(elem : A) : AnyRef = - if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef] - - /** - * Does the inverse translation of elemToEntry - */ - protected final def entryToElem(entry : AnyRef) : A = - (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A] - } - -} - - diff --git a/src/library/scala/collection/mutable/Growable.scala b/src/library/scala/collection/mutable/Growable.scala new file mode 100644 index 000000000000..b2d4806089dc --- /dev/null +++ b/src/library/scala/collection/mutable/Growable.scala @@ -0,0 +1,101 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** This trait forms part of collections that can be augmented + * using a `+=` operator and that can be cleared of all elements using + * a `clear` method. + * + * @define coll growable collection + * @define Coll `Growable` + * @define add add + * @define Add Add + */ +trait Growable[-A] extends Clearable { + + /** ${Add}s a single element to this $coll. + * + * @param elem the element to $add. + * @return the $coll itself + */ + def addOne(elem: A): this.type + + /** Alias for `addOne` */ + @inline final def += (elem: A): this.type = addOne(elem) + + //TODO This causes a conflict in StringBuilder; looks like a compiler bug + //@deprecated("Use addOne or += instead of append", "2.13.0") + //@`inline` final def append(elem: A): Unit = addOne(elem) + + /** ${Add}s two or more elements to this $coll. + * + * @param elem1 the first element to $add. + * @param elem2 the second element to $add. + * @param elems the remaining elements to $add. + * @return the $coll itself + */ + @deprecated("Use `++=` aka `addAll` instead of varargs `+=`; infix operations with an operand of multiple args will be deprecated", "2.13.0") + @inline final def += (elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= (elems: IterableOnce[A]) + + /** ${Add}s all elements produced by an IterableOnce to this $coll. + * + * @param elems the IterableOnce producing the elements to $add. + * @return the $coll itself. + */ + def addAll(@deprecatedName("xs") elems: IterableOnce[A]): this.type = { + if (elems.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(elems)) // avoid mutating under our own iterator + else { + val it = elems.iterator + while (it.hasNext) { + addOne(it.next()) + } + } + this + } + + /** Alias for `addAll` */ + @inline final def ++= (@deprecatedName("xs") elems: IterableOnce[A]): this.type = addAll(elems) + + /** The number of elements in the collection under construction, if it can be cheaply computed, -1 otherwise. + * + * @return The number of elements. The default implementation always returns -1. + */ + def knownSize: Int = -1 +} + +object Growable { + + /** + * Fills a `Growable` instance with the elements of a given iterable + * @param empty Instance to fill + * @param it Elements to add + * @tparam A Element type + * @return The filled instance + */ + def from[A](empty: Growable[A], it: collection.IterableOnce[A]): empty.type = empty ++= it + +} + +/** This trait forms part of collections that can be cleared + * with a clear() call. + * + * @define coll collection + */ +trait Clearable { + /** Clears the $coll's contents. After this operation, the + * $coll is empty. + */ + def clear(): Unit +} diff --git a/src/library/scala/collection/mutable/GrowableBuilder.scala b/src/library/scala/collection/mutable/GrowableBuilder.scala new file mode 100644 index 000000000000..247bc58da150 --- /dev/null +++ b/src/library/scala/collection/mutable/GrowableBuilder.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + + +/** The canonical builder for collections that are growable, i.e. that support an + * efficient `+=` method which adds an element to the collection. + * + * GrowableBuilders can produce only a single instance of the collection they are growing. + * + * @define Coll `GrowingBuilder` + * @define coll growing builder + */ +class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) + extends Builder[Elem, To] { + + def clear(): Unit = elems.clear() + + def result(): To = elems + + def addOne(elem: Elem): this.type = { elems += elem; this } + + override def addAll(xs: IterableOnce[Elem]): this.type = { elems.addAll(xs); this } + + override def knownSize: Int = elems.knownSize +} diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala deleted file mode 100644 index fcf7f03c5b45..000000000000 --- a/src/library/scala/collection/mutable/GrowingBuilder.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** The canonical builder for collections that are growable, i.e. that support an - * efficient `+=` method which adds an element to the collection. - * - * GrowableBuilders can produce only a single instance of the collection they are growing. - * - * @author Paul Phillips - * @since 2.8 - * - * @define Coll `GrowingBuilder` - * @define coll growing builder - */ -class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] { - protected var elems: To = empty - def +=(x: Elem): this.type = { elems += x; this } - def clear() { empty.clear } - def result: To = elems -} diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala deleted file mode 100644 index 65e7958ab7d5..000000000000 --- a/src/library/scala/collection/mutable/HashEntry.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** Class used internally. - * @since 2.8 - */ -trait HashEntry [A, E] { - val key: A - var next: E = _ -} diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index aed32dad9ab8..86aa9541c4e3 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,201 +10,645 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ -import scala.collection.parallel.mutable.ParHashMap +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 /** This class implements mutable maps using a hashtable. - * - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values assigned to keys in this hash map. - * - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `HashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[HashMap, (A, B), HashMap[A, B]]` - * is defined in object `HashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `HashMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1L) -class HashMap[A, B] private[collection] (contents: HashTable.Contents[A, DefaultEntry[A, B]]) -extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, HashMap[A, B]] - with HashTable[A, DefaultEntry[A, B]] - with CustomParallelizable[(A, B), ParHashMap[A, B]] - with Serializable -{ - initWithContents(contents) + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") +class HashMap[K, V](initialCapacity: Int, loadFactor: Double) + extends AbstractMap[K, V] + with MapOps[K, V, HashMap, HashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with Serializable { + + /* The HashMap class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) + + import HashMap.Node - type Entry = DefaultEntry[A, B] + /** The actual hash table. */ + private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) - override def empty: HashMap[A, B] = HashMap.empty[A, B] - override def clear() { clearTable() } - override def size: Int = tableSize + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) - def this() = this(null) + private[this] var contentSize = 0 - override def par = new ParHashMap[A, B](hashTableContents) + override def size: Int = contentSize - // contains and apply overridden to avoid option allocations. - override def contains(key: A): Boolean = findEntry(key) != null + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - override def apply(key: A): B = { - val result = findEntry(key) - if (result eq null) default(key) - else result.value + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + // + // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i + // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap + // and that is why unimproveHash simply forwards to this method + originalHash ^ (originalHash >>> 16) } - def get(key: A): Option[B] = { - val e = findEntry(key) - if (e eq null) None - else Some(e.value) + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(key: K): Boolean = findNode(key) ne null + + @`inline` private[this] def findNode(key: K): Node[K, V] = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) + } } - override def getOrElseUpdate(key: A, defaultValue: => B): B = { - val hash = elemHashCode(key) - val i = index(hash) - val firstEntry = findEntry(key, i) - if (firstEntry != null) firstEntry.value - else { - val table0 = table - val default = defaultValue - // Avoid recomputing index if the `defaultValue()` hasn't triggered - // a table resize. - val newEntryIndex = if (table0 eq table) i else index(hash) - val e = createNewEntry(key, default) - // Repeat search - // because evaluation of `default` can bring entry with `key` - val secondEntry = findEntry(key, newEntryIndex) - if (secondEntry == null) addEntry(e, newEntryIndex) - else secondEntry.value = default - default + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + sizeHint(xs) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case lhm: mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val entry = iter.next() + put0(entry.key, entry.value, entry.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } + } + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node[K, V] = null + var previousNode: Node[K, V] = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } + + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, getOld = false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue } } + override def subtractAll(xs: IterableOnce[K]): this.type = { + if (size == 0) { + return this + } - /* inlined HashTable.findEntry0 to preserve its visibility */ - private[this] def findEntry(key: A, h: Int): Entry = { - var e = table(h).asInstanceOf[Entry] - while (notFound(key, e)) - e = e.next - e + xs match { + case hs: immutable.HashSet[K] => + hs.foreachWithHashWhile { (k, h) => + remove0(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[K] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[K] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } } - private[this] def notFound(key: A, e: Entry): Boolean = (e != null) && !elemEquals(e.key, key) - /* inlined HashTable.addEntry0 to preserve its visibility */ - private[this] def addEntry(e: Entry, h: Int): B = { - if (tableSize >= threshold) addEntry(e) - else addEntry0(e, h) - e.value + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) } - /* extracted to make addEntry inlinable */ - private[this] def addEntry0(e: Entry, h: Int) { - e.next = table(h).asInstanceOf[Entry] - table(h) = e - tableSize += 1 - nnSizeMapAdd(h) + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) } - override def put(key: A, value: B): Option[B] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = new Node[K, V](key, hash, value, null) + case old => + var prev: Node[K, V] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + if(prev eq null) table(idx) = new Node(key, hash, value, old) + else prev.next = new Node(key, hash, value, prev.next) + } + contentSize += 1 + null } - override def update(key: A, value: B): Unit = put(key, value) + private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + private[this] var node: Node[K, V] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[K, V]): A + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } - override def remove(key: A): Option[B] = { - val e = removeEntry(key) - if (e ne null) Some(e.value) - else None + def next(): A = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } } - def += (kv: (A, B)): this.type = { - val e = findOrAddEntry(kv._1, kv._2) - if (e ne null) e.value = kv._2 - this + override def iterator: Iterator[(K, V)] = + if(size == 0) Iterator.empty + else new HashMapIterator[(K, V)] { + protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) + } + + override def keysIterator: Iterator[K] = + if(size == 0) Iterator.empty + else new HashMapIterator[K] { + protected[this] def extract(nd: Node[K, V]) = nd.key + } + + override def valuesIterator: Iterator[V] = + if(size == 0) Iterator.empty + else new HashMapIterator[V] { + protected[this] def extract(nd: Node[K, V]) = nd.value + } + + + /** Returns an iterator over the nodes stored in this HashMap */ + private[collection] def nodeIterator: Iterator[Node[K, V]] = + if(size == 0) Iterator.empty + else new HashMapIterator[Node[K, V]] { + protected[this] def extract(nd: Node[K, V]) = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). + asInstanceOf[S with EfficientSplit] + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[K, V] = preLow + var lastHigh: Node[K, V] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } } - def -=(key: A): this.type = { removeEntry(key); this } + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - def iterator = entriesIterator map (e => ((e.key, e.value))) + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - override def foreach[U](f: ((A, B)) => U): Unit = foreachEntry(e => f((e.key, e.value))) + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } - /* Override to avoid tuple allocation in foreach */ - override def keySet: scala.collection.Set[A] = new DefaultKeySet { - override def foreach[U](f: A => U) = foreachEntry(e => f(e.key)) + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd.value) } - /* Override to avoid tuple allocation in foreach */ - override def values: scala.collection.Iterable[B] = new DefaultValuesIterable { - override def foreach[U](f: B => U) = foreachEntry(e => f(e.value)) + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd.value } - /* Override to avoid tuple allocation */ - override def keysIterator: Iterator[A] = new AbstractIterator[A] { - val iter = entriesIterator - def hasNext = iter.hasNext - def next() = iter.next().key + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findNode(key) + if (nd eq null) default else nd.value + } } - /* Override to avoid tuple allocation */ - override def valuesIterator: Iterator[B] = new AbstractIterator[B] { - val iter = entriesIterator - def hasNext = iter.hasNext - def next() = iter.next().value + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findNode(key, hash) + } + if(nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, getOld = false, hash, newIdx) + default + } + } } - private[collection] def entriesIterator0: Iterator[DefaultEntry[A, B]] = entriesIterator + override def put(key: K, value: V): Option[V] = put0(key, value, getOld = true) match { + case null => None + case sm => sm + } - /** Toggles whether a size map is used to track hash map statistics. - */ - def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild() - } else sizeMapDisable() + override def remove(key: K): Option[V] = remove0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def update(key: K, value: V): Unit = put0(key, value, getOld = false) + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, getOld = false); this } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size - protected def createNewEntry[B1](key: A, value: B1): Entry = { - new Entry(key, value.asInstanceOf[B]) + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreachEntry(f) + i += 1 + } } - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def filterInPlace(p: (K, V) => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key, head.value)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key, next.value)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this } - private def readObject(in: java.io.ObjectInputStream) { - init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject())) + // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) + private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val len = table.length + var i = 0 + while (i < len) { + var n = table(i) + while (n ne null) { + n.value = f(n.key, n.value) + n = n.next + } + i += 1 + } + this } + override def mapFactory: MapFactory[HashMap] = HashMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "HashMap" + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new HashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[K, V]): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } } -/** $factoryInfo - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - */ -object HashMap extends MutableMapFactory[HashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = - ReusableCBF.asInstanceOf[MapCanBuildFrom[A, B]] - private[this] val ReusableCBF = new MapCanBuildFrom[Any, Any] +/** + * $factoryInfo + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]): HashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) + } - def empty[A, B]: HashMap[A, B] = new HashMap[A, B] + private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { + def key: K = _key + def hash: Int = _hash + def value: V = _value + def value_= (v: V): Unit = _value = v + def next: Node[K, V] = _next + def next_= (n: Node[K, V]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K, V] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if(_next ne null) _next.foreach(f) + } + + @tailrec + def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if(_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $next" + } } diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index ef538a55696a..9f0abbfa6cfd 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,103 +10,447 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ -import scala.collection.parallel.mutable.ParHashSet +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 /** This class implements mutable sets using a hashtable. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `HashSet[B]` because an implicit of type `CanBuildFrom[HashSet, B, HashSet[B]]` - * is defined in object `HashSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `HashSet`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(1L) -class HashSet[A] private[collection] (contents: FlatHashTable.Contents[A]) -extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, HashSet] - with SetLike[A, HashSet[A]] - with FlatHashTable[A] - with CustomParallelizable[A, ParHashSet[A]] - with Serializable -{ - initWithContents(contents) + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class HashSet[A](initialCapacity: Int, loadFactor: Double) + extends AbstractSet[A] + with SetOps[A, HashSet, HashSet[A]] + with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with Serializable { + + def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) + + import HashSet.Node + + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(elem: A): Boolean = findNode(elem) ne null + + @`inline` private[this] def findNode(elem: A): Node[A] = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } - def this() = this(null) + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } - override def companion: GenericCompanion[HashSet] = HashSet + override def addAll(xs: IterableOnce[A]): this.type = { + sizeHint(xs, delta = 0) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } - override def size: Int = tableSize + override def subtractAll(xs: IterableOnce[A]): this.type = { + if (size == 0) { + return this + } - def contains(elem: A): Boolean = containsElem(elem) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } - def += (elem: A): this.type = { addElem(elem); this } + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + table(idx) = new Node(elem, hash, null) + case old => + var prev: Node[A] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + if(prev eq null) + table(idx) = new Node(elem, hash, old) + else + prev.next = new Node(elem, hash, prev.next) + } + contentSize += 1 + true + } - def -= (elem: A): this.type = { removeElem(elem); this } + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } - override def par = new ParHashSet(hashTableContents) + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) - override def add(elem: A): Boolean = addElem(elem) + private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { + private[this] var i = 0 + private[this] var node: Node[A] = null + private[this] val len = table.length - override def remove(elem: A): Boolean = removeElem(elem) + protected[this] def extract(nd: Node[A]): B - override def clear() { clearTable() } + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } - override def iterator: Iterator[A] = super[FlatHashTable].iterator + def next(): B = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[A] = new HashSetIterator[A] { + override protected[this] def extract(nd: Node[A]): A = nd.key + } + + /** Returns an iterator over the nodes stored in this HashSet */ + private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { + override protected[this] def extract(nd: Node[A]): Node[A] = nd + } - override def foreach[U](f: A => U) { + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) + val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[A] = preLow + var lastHigh: Node[A] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + /* + private[mutable] def checkTable(): Unit = { var i = 0 - val len = table.length - while (i < len) { - val curEntry = table(i) - if (curEntry ne null) f(entryToElem(curEntry)) + var count = 0 + var prev: Node[A] = null + while(i < table.length) { + var n = table(i) + prev = null + while(n != null) { + count += 1 + assert(index(n.hash) == i) + if(prev ne null) assert(prev.hash <= n.hash) + prev = n + n = n.next + } i += 1 } + assert(contentSize == count) } + */ + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - override def clone() = new HashSet[A] ++= this + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - private def writeObject(s: java.io.ObjectOutputStream) { - serializeTo(s) + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 } - private def readObject(in: java.io.ObjectInputStream) { - init(in, x => ()) + override def iterableFactory: IterableFactory[HashSet] = HashSet + + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: A => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } } - /** Toggles whether a size map is used to track hash map statistics. - */ - def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild() - } else sizeMapDisable() + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) -} + override protected[this] def className = "HashSet" -/** $factoryInfo - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - */ -object HashSet extends MutableSetFactory[HashSet] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, HashSet[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] - override def empty[A]: HashSet[A] = new HashSet[A] + override def hashCode: Int = { + val setIterator = this.iterator + val hashIterator: Iterator[Any] = + if (setIterator.isEmpty) setIterator + else new HashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[A]): Any = { + hash = unimproveHash(nd.hash) + this + } + } + MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) + } } +/** + * $factoryInfo + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + def from[B](it: scala.collection.IterableOnce[B]): HashSet[B] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashSet[B](cap, defaultLoadFactor) ++= it + } + + def empty[A]: HashSet[A] = new HashSet[A] + + def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { + def key: K = _key + def hash: Int = _hash + def next: Node[K] = _next + def next_= (n: Node[K]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: K => U): Unit = { + f(_key) + if(_next ne null) _next.foreach(f) + } + + override def toString = s"Node($key, $hash) -> $next" + } +} diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index dc499561e0af..d58f6e01b7ac 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,12 +11,15 @@ */ package scala -package collection -package mutable +package collection.mutable + +import collection.{AbstractIterator, Iterator} import java.lang.Integer.{numberOfLeadingZeros, rotateRight} import scala.util.hashing.byteswap32 +import java.lang.Integer + /** This class can be used to construct data structures that are based * on hashtables. Class `HashTable[A]` implements a hashtable * that maps keys of type `A` to values of the fully abstract @@ -30,37 +33,36 @@ import scala.util.hashing.byteswap32 * its size is automatically doubled. Both parameters may be changed by * overriding the corresponding values in class `HashTable`. * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * * @tparam A type of the elements contained in this hash table. */ -trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { +// Not used in the standard library, but used in scala-parallel-collections +private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { // Replacing Entry type parameter by abstract type member here allows to not expose to public // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. // However, I'm afraid it's too late now for such breaking change. import HashTable._ - @transient protected var _loadFactor = defaultLoadFactor + protected var _loadFactor = defaultLoadFactor /** The actual hash table. */ - @transient protected var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) + protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) /** The number of mappings contained in this hash table. */ - @transient protected var tableSize: Int = 0 + protected[collection] var tableSize: Int = 0 + + final def size: Int = tableSize /** The next size value at which to resize (capacity * load factor). */ - @transient protected var threshold: Int = initialThreshold(_loadFactor) + protected[collection] var threshold: Int = initialThreshold(_loadFactor) /** The array keeping track of the number of elements in 32 element blocks. */ - @transient protected var sizemap: Array[Int] = null + protected var sizemap: Array[Int] = null - @transient protected var seedvalue: Int = tableSizeSeed + protected var seedvalue: Int = tableSizeSeed protected def tableSizeSeed = Integer.bitCount(table.length - 1) @@ -86,9 +88,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * Initializes the collection from the input stream. `readEntry` will be called for each * entry to be read from the input stream. */ - private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) { - in.defaultReadObject - + private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { _loadFactor = in.readInt() assert(_loadFactor > 0) @@ -119,8 +119,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To * deserialize, `init` should be used. */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) { - out.defaultWriteObject + private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { out.writeInt(_loadFactor) out.writeInt(tableSize) out.writeInt(seedvalue) @@ -131,11 +130,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU /** Find entry with given key in table, null if not found. */ - @deprecatedOverriding("No sensible way to override findEntry as private findEntry0 is used in multiple places internally.", "2.11.0") - protected def findEntry(key: A): Entry = + final def findEntry(key: A): Entry = findEntry0(key, index(elemHashCode(key))) - private[this] def findEntry0(key: A, h: Int): Entry = { + protected[collection] final def findEntry0(key: A, h: Int): Entry = { var e = table(h).asInstanceOf[Entry] while (e != null && !elemEquals(e.key, key)) e = e.next e @@ -144,12 +142,11 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU /** Add entry to table * pre: no entry with same key exists */ - @deprecatedOverriding("No sensible way to override addEntry as private addEntry0 is used in multiple places internally.", "2.11.0") - protected def addEntry(e: Entry) { + protected[collection] final def addEntry(e: Entry): Unit = { addEntry0(e, index(elemHashCode(e.key))) } - private[this] def addEntry0(e: Entry, h: Int) { + protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { e.next = table(h).asInstanceOf[Entry] table(h) = e tableSize = tableSize + 1 @@ -164,7 +161,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * Returns entry found in table or null. * New entries are created by calling `createNewEntry` method. */ - protected def findOrAddEntry[B](key: A, value: B): Entry = { + def findOrAddEntry(key: A, value: B): Entry = { val h = index(elemHashCode(key)) val e = findEntry0(key, h) if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } @@ -174,13 +171,16 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * This method is guaranteed to be called only once and in case that the entry * will be added. In other words, an implementation may be side-effecting. */ - protected def createNewEntry[B](key: A, value: B): Entry + def createNewEntry(key: A, value: B): Entry /** Remove entry from table if present. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def removeEntry(key: A) : Entry = { - val h = index(elemHashCode(key)) + final def removeEntry(key: A) : Entry = { + removeEntry0(key, index(elemHashCode(key))) + } + /** Remove entry from table if present. + */ + private[collection] final def removeEntry0(key: A, h: Int) : Entry = { var e = table(h).asInstanceOf[Entry] if (e != null) { if (elemEquals(e.key, key)) { @@ -209,7 +209,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU /** An iterator returning all entries. */ - protected def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { + def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { val iterTable = table var idx = lastPopulatedIndex var es = iterTable(idx) @@ -227,7 +227,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU } /** Avoid iterator for a 2x faster traversal. */ - protected def foreachEntry[U](f: Entry => U) { + def foreachEntry[U](f: Entry => U): Unit = { val iterTable = table var idx = lastPopulatedIndex var es = iterTable(idx) @@ -246,14 +246,14 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU /** Remove all entries from table */ - protected def clearTable() { + def clearTable(): Unit = { var i = table.length - 1 while (i >= 0) { table(i) = null; i = i - 1 } tableSize = 0 nnSizeMapReset(0) } - private def resize(newSize: Int) { + private def resize(newSize: Int): Unit = { val oldTable = table table = new Array(newSize) nnSizeMapReset(table.length) @@ -292,18 +292,15 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * is converted into a parallel hash table, the size map is initialized, as it will be needed * there. */ - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { sizemap(h >> sizeMapBucketBitSize) += 1 } - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { sizemap(h >> sizeMapBucketBitSize) -= 1 } - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { val nsize = calcSizeMapSize(tableLength) if (sizemap.length != nsize) sizemap = new Array[Int](nsize) else java.util.Arrays.fill(sizemap, 0) @@ -311,17 +308,15 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 // discards the previous sizemap and only allocates a new one - protected def sizeMapInit(tableLength: Int) { + protected def sizeMapInit(tableLength: Int): Unit = { sizemap = new Array[Int](calcSizeMapSize(tableLength)) } // discards the previous sizemap and populates the new one - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapInitAndRebuild() { + protected final def sizeMapInitAndRebuild() = { sizeMapInit(table.length) // go through the buckets, count elements @@ -347,15 +342,13 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU } } - private[collection] def printSizeMap() { - println(sizemap.toList) + private[collection] def printSizeMap() = { + println(sizemap.to(collection.immutable.List)) } - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def sizeMapDisable() = sizemap = null + protected final def sizeMapDisable() = sizemap = null - @deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0") - protected def isSizeMapDefined = sizemap ne null + protected final def isSizeMapDefined = sizemap ne null // override to automatically initialize the size map protected def alwaysInitSizeMap = false @@ -368,32 +361,11 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * Note: we take the most significant bits of the hashcode, not the lower ones * this is of crucial importance when populating the table in parallel */ - protected final def index(hcode: Int): Int = { + protected[collection] final def index(hcode: Int): Int = { val ones = table.length - 1 val exponent = Integer.numberOfLeadingZeros(ones) (improve(hcode, seedvalue) >>> exponent) & ones } - - protected def initWithContents(c: HashTable.Contents[A, Entry]) = { - if (c != null) { - _loadFactor = c.loadFactor - table = c.table - tableSize = c.tableSize - threshold = c.threshold - seedvalue = c.seedvalue - sizemap = c.sizemap - } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() - } - - private[collection] def hashTableContents = new HashTable.Contents( - _loadFactor, - table, - tableSize, - threshold, - seedvalue, - sizemap - ) } private[collection] object HashTable { @@ -413,7 +385,7 @@ private[collection] object HashTable { // so that: protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - protected def elemHashCode(key: KeyType) = key.## + protected[collection] def elemHashCode(key: KeyType) = key.## /** * Defer to a high-quality hash in [[scala.util.hashing]]. @@ -426,7 +398,7 @@ private[collection] object HashTable { * h = h + (h << 4) * h ^ (h >>> 10) * }}} - * the rest of the computation is due to scala/bug#5293 + * the rest of the computation is due to SI-5293 */ protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) } @@ -435,27 +407,11 @@ private[collection] object HashTable { * Returns a power of two >= `target`. */ private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} - class Contents[A, Entry >: Null <: HashEntry[A, Entry]]( - val loadFactor: Int, - val table: Array[HashEntry[A, Entry]], - val tableSize: Int, - val threshold: Int, - val seedvalue: Int, - val sizemap: Array[Int] - ) { - import scala.collection.DebugUtils._ - private[collection] def debugInformation = buildString { - append => - append("Hash table contents") - append("-------------------") - append("Table: [" + arrayString(table, 0, table.length) + "]") - append("Table size: " + tableSize) - append("Load factor: " + loadFactor) - append("Seedvalue: " + seedvalue) - append("Threshold: " + threshold) - append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]") - } - } - +/** Class used internally. + */ +private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { + val key: A + var next: E = _ } diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala deleted file mode 100644 index 76bc07898525..000000000000 --- a/src/library/scala/collection/mutable/History.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** `History[A, B]` objects may subscribe to events of - * type `A` published by an object of type `B`. - * The history subscriber object records all published events - * up to maximum number of `maxHistory` events. - * - * @author Matthias Zenger - * @since 1 - * - * @tparam Evt Type of events. - * @tparam Pub Type of publishers. - */ -@SerialVersionUID(5219213543849892588L) -class History[Evt, Pub] -extends AbstractIterable[(Pub, Evt)] - with Subscriber[Evt, Pub] - with Iterable[(Pub, Evt)] - with Serializable -{ - protected val log: Queue[(Pub, Evt)] = new Queue - val maxHistory: Int = 1000 - - /** Notifies this listener with an event by enqueuing it in the log. - * - * @param pub the publisher. - * @param event the event. - */ - def notify(pub: Pub, event: Evt) { - if (log.length >= maxHistory) - log.dequeue() - - log.enqueue((pub, event)) - } - - override def size: Int = log.length - def iterator: Iterator[(Pub, Evt)] = log.iterator - def events: Iterator[Evt] = log.iterator map (_._2) - - def clear() { log.clear() } - - /** Checks if two history objects are structurally identical. - * - * @return true, iff both history objects contain the same sequence of elements. - */ - override def equals(obj: Any): Boolean = obj match { - case that: History[_, _] => this.log equals that.log - case _ => false - } - override def hashCode = log.hashCode() -} diff --git a/src/library/scala/collection/mutable/ImmutableBuilder.scala b/src/library/scala/collection/mutable/ImmutableBuilder.scala new file mode 100644 index 000000000000..3907cfd55305 --- /dev/null +++ b/src/library/scala/collection/mutable/ImmutableBuilder.scala @@ -0,0 +1,31 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + + +/** + * Reusable builder for immutable collections + */ +abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) + extends ReusableBuilder[A, C] { + + protected var elems: C = empty + + def clear(): Unit = { elems = empty } + + def result(): C = elems + + override def knownSize: Int = elems.knownSize +} diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala deleted file mode 100644 index c9ee72a9e02c..000000000000 --- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This class can be used as an adaptor to create mutable maps from - * immutable map implementations. Only method `empty` has - * to be redefined if the immutable map on which this mutable map is - * originally based is not empty. `empty` is supposed to - * return the representation of an empty map. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - */ -@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") -class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B]) -extends AbstractMap[A, B] - with Map[A, B] - with Serializable -{ - - override def size: Int = imap.size - - def get(key: A): Option[B] = imap.get(key) - - override def isEmpty: Boolean = imap.isEmpty - - override def apply(key: A): B = imap.apply(key) - - override def contains(key: A): Boolean = imap.contains(key) - - override def isDefinedAt(key: A) = imap.isDefinedAt(key) - - override def keySet: scala.collection.Set[A] = imap.keySet - - override def keysIterator: Iterator[A] = imap.keysIterator - - @migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0") - override def keys: scala.collection.Iterable[A] = imap.keys - - override def valuesIterator: Iterator[B] = imap.valuesIterator - - @migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0") - override def values: scala.collection.Iterable[B] = imap.values - - def iterator: Iterator[(A, B)] = imap.iterator - - override def toList: List[(A, B)] = imap.toList - - override def update(key: A, value: B): Unit = { imap = imap.updated(key, value) } - - def -= (key: A): this.type = { imap = imap - key; this } - - def += (kv: (A, B)): this.type = { imap = imap + kv; this } - - override def clear(): Unit = { imap = imap.empty } - - override def transform(f: (A, B) => B): this.type = { imap = imap.transform(f); this } - - override def retain(p: (A, B) => Boolean): this.type = { - imap = imap.filter(xy => p(xy._1, xy._2)) - this - } - - override def toString() = imap.toString() -} - diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala deleted file mode 100644 index c3dce8a68480..000000000000 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** This class can be used as an adaptor to create mutable sets from - * immutable set implementations. Only method `empty` has - * to be redefined if the immutable set on which this mutable set is - * originally based is not empty. `empty` is supposed to - * return the representation of an empty set. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") -class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) -extends AbstractSet[A] - with Set[A] - with Serializable { - - override def size: Int = set.size - - override def isEmpty: Boolean = set.isEmpty - - def contains(elem: A): Boolean = set.contains(elem) - - override def foreach[U](f: A => U): Unit = set.foreach(f) - - override def exists(p: A => Boolean): Boolean = set.exists(p) - - override def toList: List[A] = set.toList - - override def toString = set.toString - - def iterator: Iterator[A] = set.iterator - - def +=(elem: A): this.type = { set = set + elem; this } - - def -=(elem: A): this.type = { set = set - elem; this } - - override def clear(): Unit = { set = set.empty } -} diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala index a3fbd1bc77d9..464bc00d45db 100644 --- a/src/library/scala/collection/mutable/IndexedSeq.scala +++ b/src/library/scala/collection/mutable/IndexedSeq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,31 +10,74 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ +trait IndexedSeq[T] extends Seq[T] + with scala.collection.IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] { -/** A subtrait of `collection.IndexedSeq` which represents sequences - * that can be mutated. - * - * $indexedSeqInfo - */ -trait IndexedSeq[A] extends Seq[A] - with scala.collection.IndexedSeq[A] - with GenericTraversableTemplate[A, IndexedSeq] - with IndexedSeqLike[A, IndexedSeq[A]] { - override def companion: GenericCompanion[IndexedSeq] = IndexedSeq - override def seq: IndexedSeq[A] = this + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq } -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable indexed sequence - * @define Coll `mutable.IndexedSeq` - */ -object IndexedSeq extends SeqFactory[IndexedSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A] +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) + +trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] + extends scala.collection.IndexedSeqOps[A, CC, C] + with SeqOps[A, CC, C] { + + /** Modifies this $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return this $coll modified by replacing all elements with the + * result of applying the given function `f` to each element + * of this $coll. + */ + def mapInPlace(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.SeqOps.sorted]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + val len = this.length + if (len > 1) { + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- this) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + update(i, arr(i).asInstanceOf[A]) + i += 1 + } + } + this + } + + /** Sorts this $coll in place according to a comparison function. + * + * @see [[scala.collection.SeqOps.sortWith]] + */ + def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) + + /** Sorts this $coll in place according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.collection.SeqOps.sortBy]] + */ + def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) + } diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala deleted file mode 100644 index 4419a391e4a4..000000000000 --- a/src/library/scala/collection/mutable/IndexedSeqLike.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** A subtrait of scala.collection.IndexedSeq which represents sequences - * that can be mutated. - * - * It declares a method `update` which allows updating an element - * at a specific index in the sequence. - * - * This trait just implements `iterator` in terms of `apply` and `length`. - * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations - * to make them run faster under the assumption of fast random access with `apply`. - * - * $indexedSeqInfo - * - * @tparam A the element type of the $coll - * @tparam Repr the type of the actual $coll containing the elements. - * - * @define Coll `IndexedSeq` - * @define coll mutable indexed sequence - * @define indexedSeqInfo - * @author Martin Odersky - * @since 2.8 - * @define willNotTerminateInf - * @define mayNotTerminateInf - */ -trait IndexedSeqLike[A, +Repr] extends Any with scala.collection.IndexedSeqLike[A, Repr] { self => - - override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]] - override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]] - - /** Replaces element at given index with a new value. - * - * @param idx the index of the element to replace. - * @param elem the new value. - * @throws IndexOutOfBoundsException if the index is not valid. - */ - def update(idx: Int, elem: A) - - /** Creates a view of this iterable @see Iterable.View - */ - override def view = new IndexedSeqView[A, Repr] { - protected lazy val underlying = self.repr - override def iterator = self.iterator - override def length = self.length - override def apply(idx: Int) = self.apply(idx) - override def update(idx: Int, elem: A) = self.update(idx, elem) - } - - /** A sub-sequence view starting at index `from` - * and extending up to (but not including) index `until`. - * - * @param from The index of the first element of the slice - * @param until The index of the element following the slice - * @note The difference between `view` and `slice` is that `view` produces - * a view of the current sequence, whereas `slice` produces a new sequence. - * - * @note view(from, to) is equivalent to view.slice(from, to) - */ - override def view(from: Int, until: Int) = view.slice(from, until) -} diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala deleted file mode 100644 index 7924bd15d32d..000000000000 --- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** A subtrait of scala.collection.IndexedSeq which represents sequences - * that can be mutated. - * - * @since 2.8 - */ -trait IndexedSeqOptimized[A, +Repr] extends Any with IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr] diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala deleted file mode 100644 index 2f094680c62b..000000000000 --- a/src/library/scala/collection/mutable/IndexedSeqView.scala +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -import TraversableView.NoBuilder - -/** A non-strict view of a mutable `IndexedSeq`. - * $viewInfo - * Some of the operations of this class will yield again a mutable indexed sequence, - * others will just yield a plain indexed sequence of type `collection.IndexedSeq`. - * Because this is a leaf class there is no associated `Like` class. - * @author Martin Odersky - * @since 2.8 - * @tparam A the element type of the view - * @tparam Coll the type of the underlying collection containing the elements. - */ -trait IndexedSeqView[A, +Coll] extends IndexedSeq[A] - with IndexedSeqOptimized[A, IndexedSeqView[A, Coll]] - with SeqView[A, Coll] - with SeqViewLike[A, Coll, IndexedSeqView[A, Coll]] { -self => - - private[this] type This = IndexedSeqView[A, Coll] - - def update(idx: Int, elem: A): Unit - - trait Transformed[B] extends IndexedSeqView[B, Coll] with super.Transformed[B] { - def update(idx: Int, elem: B): Unit - override def toString = viewToString - } - - /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */ - private[collection] abstract class AbstractTransformed[B] extends super.AbstractTransformed[B] with Transformed[B] - - // pre: until <= self.length - trait Sliced extends super.Sliced with Transformed[A] { - override def length = endpoints.width - def update(idx: Int, elem: A) = - if (idx >= 0 && idx + from < until) self.update(idx + from, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait Filtered extends super.Filtered with Transformed[A] { - def update(idx: Int, elem: A) = self.update(index(idx), elem) - } - - trait TakenWhile extends super.TakenWhile with Transformed[A] { - def update(idx: Int, elem: A) = - if (idx < len) self.update(idx, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait DroppedWhile extends super.DroppedWhile with Transformed[A] { - def update(idx: Int, elem: A) = - if (idx >= 0) self.update(idx + start, elem) - else throw new IndexOutOfBoundsException(idx.toString) - } - - trait Reversed extends super.Reversed with Transformed[A] { - def update(idx: Int, elem: A) = self.update(self.length - 1 - idx, elem) - } - - /** Boilerplate method, to override in each subclass - * This method could be eliminated if Scala had virtual classes - */ - protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered - protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced - protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile - protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile - protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed - - override def filter(p: A => Boolean): This = newFiltered(p) - override def init: This = newSliced(SliceInterval(0, self.length - 1)) - override def drop(n: Int): This = newSliced(SliceInterval(n, self.length)) - override def take(n: Int): This = newSliced(SliceInterval(0, n min self.length)) - override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until min self.length)) - override def dropWhile(p: A => Boolean): This = newDroppedWhile(p) - override def takeWhile(p: A => Boolean): This = newTakenWhile(p) - override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p)) - override def splitAt(n: Int): (This, This) = (take(n), drop(n)) // !!! - override def reverse: This = newReversed - override def tail: IndexedSeqView[A, Coll] = if (isEmpty) super.tail else slice(1, length) -} - -/** An object containing the necessary implicit definitions to make - * `SeqView`s work. Its definitions are generally not accessed directly by clients. - * - * Note that the `canBuildFrom` factories yield `SeqView`s, not `IndexedSeqView`s. - * This is intentional, because not all operations yield again a `mutable.IndexedSeqView`. - * For instance, `map` just gives a `SeqView`, which reflects the fact that - * `map` cannot do its work and maintain a pointer into the original indexed sequence. - */ -object IndexedSeqView { - type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]} - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] = - new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] { - def apply(from: Coll) = new NoBuilder - def apply() = new NoBuilder - } - implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] = - new CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] { - def apply(from: TraversableView[_, Array[_]]) = new NoBuilder - def apply() = new NoBuilder - } -} diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala index ba55be2ace9c..c84d0e6ec675 100644 --- a/src/library/scala/collection/mutable/Iterable.scala +++ b/src/library/scala/collection/mutable/Iterable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,37 +10,25 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import generic._ -import parallel.mutable.ParIterable +import scala.collection.{IterableFactory, IterableFactoryDefaults} -/** A base trait for iterable collections that can be mutated. - * $iterableInfo - */ -trait Iterable[A] extends Traversable[A] -// with GenIterable[A] - with scala.collection.Iterable[A] - with GenericTraversableTemplate[A, Iterable] - with IterableLike[A, Iterable[A]] - with Parallelizable[A, ParIterable[A]] -{ - override def companion: GenericCompanion[Iterable] = Iterable - protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `mutable.IterableLike` gets introduced, please move this there! - override def seq: Iterable[A] = this -} +trait Iterable[A] + extends collection.Iterable[A] + with collection.IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable iterable collection - * @define Coll `mutable.Iterable` - */ -object Iterable extends TraversableFactory[Iterable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Iterable[A]] = new ArrayBuffer + override def iterableFactory: IterableFactory[Iterable] = Iterable } +/** + * $factoryInfo + * @define coll mutable collection + * @define Coll `mutable.Iterable` + */ +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) + /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala deleted file mode 100644 index 409696f139ae..000000000000 --- a/src/library/scala/collection/mutable/LazyBuilder.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** A builder that constructs its result lazily. Iterators or iterables to - * be added to this builder with `++=` are not evaluated until `result` is called. - * - * This builder can be reused. - * - * @since 2.8 - * - * @tparam Elem type of the elements for this builder. - * @tparam To type of the collection this builder builds. - */ -abstract class LazyBuilder[Elem, +To] extends ReusableBuilder[Elem, To] { - /** The different segments of elements to be added to the builder, represented as iterators */ - protected var parts = new ListBuffer[TraversableOnce[Elem]] - def +=(x: Elem): this.type = { parts += List(x); this } - override def ++=(xs: TraversableOnce[Elem]): this.type = { parts += xs ; this } - def result(): To - def clear() { parts.clear() } -} diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala deleted file mode 100644 index 1d48258ecc07..000000000000 --- a/src/library/scala/collection/mutable/LinearSeq.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** A subtrait of `collection.LinearSeq` which represents sequences - * that can be mutated. - * $linearSeqInfo - * - * @define Coll `LinearSeq` - * @define coll linear sequence - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-lists "Scala's Collection Library overview"]] - * section on `Mutable Lists` for more information. - */ -trait LinearSeq[A] extends Seq[A] - with scala.collection.LinearSeq[A] - with GenericTraversableTemplate[A, LinearSeq] - with LinearSeqLike[A, LinearSeq[A]] { - override def companion: GenericCompanion[LinearSeq] = LinearSeq - override def seq: LinearSeq[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is a `MutableList`. - * @define coll mutable linear sequence - * @define Coll `mutable.LinearSeq` - */ -object LinearSeq extends SeqFactory[LinearSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, LinearSeq[A]] = new MutableList[A] -} diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala deleted file mode 100644 index 38c17806dbde..000000000000 --- a/src/library/scala/collection/mutable/LinkedEntry.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** Class for the linked hash map entry, used internally. - * @since 2.8 - */ -@SerialVersionUID(-2671939643954900582L) -final class LinkedEntry[A, B](val key: A, var value: B) - extends HashEntry[A, LinkedEntry[A, B]] with Serializable { - var earlier: LinkedEntry[A, B] = null - var later: LinkedEntry[A, B] = null -} - diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index b5daf4055e1a..d529fee42596 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,139 +14,307 @@ package scala package collection package mutable -import generic._ +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -object LinkedHashMap extends MutableMapFactory[LinkedHashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), LinkedHashMap[A, B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (A, B), LinkedHashMap[A, B]]] - private [this] val ReusableCBF = new MapCanBuildFrom[Any, Any] - def empty[A, B] = new LinkedHashMap[A, B] -} /** This class implements mutable maps using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. * - * @tparam A the type of the keys contained in this hash map. - * @tparam B the type of the values assigned to keys in this hash map. + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. * * @define Coll `LinkedHashMap` * @define coll linked hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedHashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[LinkedHashMap, (A, B), LinkedHashMap[A, B]]` - * is defined in object `LinkedHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedHashMap`. * @define mayNotTerminateInf * @define willNotTerminateInf * @define orderDependent * @define orderDependentFold */ -@SerialVersionUID(1L) -class LinkedHashMap[A, B] extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, LinkedHashMap[A, B]] - with HashTable[A, LinkedEntry[A, B]] - with Serializable -{ +@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") +class LinkedHashMap[K, V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] + with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap + + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + + private[collection] def _firstEntry: Entry = firstEntry + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null - override def empty = LinkedHashMap.empty[A, B] - override def size = tableSize + /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) - type Entry = LinkedEntry[A, B] + private[this] var threshold: Int = newThreshold(table.length) - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null + private[this] var contentSize = 0 - def get(key: A): Option[B] = { + override def last: (K, V) = + if (size > 0) (lastEntry.key, lastEntry.value) + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") + + override def lastOption: Option[(K, V)] = + if (size > 0) Some((lastEntry.key, lastEntry.value)) + else None + + override def head: (K, V) = + if (size > 0) (firstEntry.key, firstEntry.value) + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") + + override def headOption: Option[(K, V)] = + if (size > 0) Some((firstEntry.key, firstEntry.value)) + else None + + override def size = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def get(key: K): Option[V] = { val e = findEntry(key) if (e == null) None else Some(e.value) } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } - override def contains(key: A): Boolean = { + override def contains(key: K): Boolean = { if (getClass eq classOf[LinkedHashMap[_, _]]) findEntry(key) != null else super.contains(key) // A subclass might override `get`, use the default implementation `contains`. } - override def put(key: A, value: B): Option[B] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } + override def put(key: K, value: V): Option[V] = put0(key, value, getOld = true) match { + case null => None + case sm => sm } - override def remove(key: A): Option[B] = { - val e = removeEntry(key) - if (e eq null) None - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - Some(e.value) + override def update(key: K, value: V): Unit = put0(key, value, getOld = false) + + override def remove(key: K): Option[V] = removeEntry0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findEntry(key) + if (nd eq null) default else nd.value } } - @deprecatedOverriding("+= should not be overridden so it stays consistent with put.", "2.11.0") - def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this } + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findEntry(key, hash) + } + if (nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if (contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, getOld = false, hash, newIdx) + default + } + } + } - @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0") - def -=(key: A): this.type = { remove(key); this } + private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) - def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } - else Iterator.empty.next() + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def removeEntry0(elem: K, hash: Int): Entry = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } } - protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) { - override def empty = LinkedHashMap.empty + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) } + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p) + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) - protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) { - override def empty = LinkedHashMap.empty - } + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f) + @`inline` private[this] def findEntry(key: K): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } - protected class DefaultKeySet extends super.DefaultKeySet { - override def empty = LinkedHashSet.empty + def addOne(kv: (K, V)): this.type = { + put(kv._1, kv._2) + this } - override def keySet: scala.collection.Set[A] = new DefaultKeySet + def subtractOne(key: K): this.type = { + remove(key) + this + } - override def keysIterator: Iterator[A] = new AbstractIterator[A] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.key; cur = cur.later; res } + private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } else Iterator.empty.next() } - override def valuesIterator: Iterator[B] = new AbstractIterator[B] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.value; cur = cur.later; res } - else Iterator.empty.next() + def iterator: Iterator[(K, V)] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[(K, V)] { + def extract(nd: Entry): (K, V) = (nd.key, nd.value) + } + + protected class LinkedKeySet extends KeySet { + override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet + } + + override def keySet: collection.Set[K] = new LinkedKeySet + + override def keysIterator: Iterator[K] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[K] { + def extract(nd: Entry): K = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[Entry] { + def extract(nd: Entry): Entry = nd + } + + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundEntry: Entry = null + var previousEntry: Entry = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousEntry = prev + foundEntry = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findEntry(nd, nd.next, k, h) + } + + findEntry(null, nd, key, hash) + } + + val previousValue = foundEntry match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousEntry != null) previousEntry.next = foundEntry.next + else table(indexedHash) = foundEntry.next + deleteEntry(foundEntry) + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, getOld = false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundEntry.value = newValue + } + nextValue + } } - override def foreach[U](f: ((A, B)) => U) { + override def valuesIterator: Iterator[V] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[V] { + def extract(nd: Entry): V = nd.value + } + + + override def foreach[U](f: ((K, V)) => U): Unit = { var cur = firstEntry while (cur ne null) { f((cur.key, cur.value)) @@ -154,38 +322,188 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] } } - protected override def foreachEntry[U](f: Entry => U) { + override def foreachEntry[U](f: (K, V) => U): Unit = { var cur = firstEntry while (cur ne null) { - f(cur) + f(cur.key, cur.value) cur = cur.later } } - protected def createNewEntry[B1](key: A, value: B1): Entry = { - val e = new Entry(key, value.asInstanceOf[B]) + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { + val e = new Entry(key, hash, value) if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } + else { + lastEntry.later = e + e.earlier = lastEntry + } lastEntry = e e } - override def clear() { - clearTable() - firstEntry = null - lastEntry = null + /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null } - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) } - private def readObject(in: java.io.ObjectInputStream) { - firstEntry = null - lastEntry = null - init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject())) + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = createNewEntry(key, hash, value) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if (getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewEntry(key, hash, value) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + null + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new LinkedHashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashMap" +} + +/** $factoryInfo + * @define Coll `LinkedHashMap` + * @define coll linked hash map + */ +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + + def empty[K, V] = new LinkedHashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]) = { + val newlhm = empty[K, V] + newlhm.sizeHint(it, delta = 0) + newlhm.addAll(it) + newlhm } + + def newBuilder[K, V]: GrowableBuilder[(K, V), LinkedHashMap[K, V]] = new GrowableBuilder(empty[K, V]) + + /** Class for the linked hash map entry, used internally. + */ + private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + var earlier: LinkedEntry[K, V] = null + var later: LinkedEntry[K, V] = null + var next: LinkedEntry[K, V] = null + + @tailrec + final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 } diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index af8f364f4a1e..1a189d607010 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,82 +14,115 @@ package scala package collection package mutable -import generic._ +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 /** This class implements mutable sets using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. * - * @author Matthias Zenger - * @author Martin Odersky - * @author Pavel Pavlov - * @since 1 - * * @tparam A the type of the elements contained in this set. * * @define Coll `LinkedHashSet` * @define coll linked hash set - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedHashSet[B]` because an implicit of type `CanBuildFrom[LinkedHashSet, B, LinkedHashSet[B]]` - * is defined in object `LinkedHashSet`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedHashSet`. * @define mayNotTerminateInf * @define willNotTerminateInf * @define orderDependent * @define orderDependentFold */ -@SerialVersionUID(1L) -class LinkedHashSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, LinkedHashSet] - with SetLike[A, LinkedHashSet[A]] - with HashTable[A, LinkedHashSet.Entry[A]] - with Serializable -{ - override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet +@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") +class LinkedHashSet[A] + extends AbstractSet[A] + with SetOps[A, LinkedHashSet, LinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] + with IterableFactoryDefaults[A, LinkedHashSet] + with DefaultSerializable { + + override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet + + // stepper is not overridden to use XTableStepper because that stepper would not return the + // elements in insertion order + + /*private*/ type Entry = LinkedHashSet.Entry[A] + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) - type Entry = LinkedHashSet.Entry[A] + private[this] var threshold: Int = newThreshold(table.length) - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null + private[this] var contentSize = 0 - override def size: Int = tableSize + override def last: A = + if (size > 0) lastEntry.key + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") + + override def lastOption: Option[A] = + if (size > 0) Some(lastEntry.key) + else None + + override def head: A = + if (size > 0) firstEntry.key + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") + + override def headOption: Option[A] = + if (size > 0) Some(firstEntry.key) + else None + + override def size: Int = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 def contains(elem: A): Boolean = findEntry(elem) ne null - @deprecatedOverriding("+= should not be overridden so it stays consistent with add.", "2.11.0") - def += (elem: A): this.type = { add(elem); this } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } - @deprecatedOverriding("-= should not be overridden so it stays consistent with remove.", "2.11.0") - def -= (elem: A): this.type = { remove(elem); this } + override def add(elem: A): Boolean = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(elem) + put0(elem, hash, index(hash)) + } - override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null + def addOne(elem: A): this.type = { + add(elem) + this + } - override def remove(elem: A): Boolean = { - val e = removeEntry(elem) - if (e eq null) false - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - true - } + def subtractOne(elem: A): this.type = { + remove(elem) + this } - def iterator: Iterator[A] = new AbstractIterator[A] { - private var cur = firstEntry - def hasNext = cur ne null - def next = - if (hasNext) { val res = cur.key; cur = cur.later; res } + override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) + + private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } else Iterator.empty.next() } - override def foreach[U](f: A => U) { + def iterator: Iterator[A] = new LinkedHashSetIterator[A] { + override def extract(nd: Entry): A = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { + override def extract(nd: Entry): Entry = nd + } + + override def foreach[U](f: A => U): Unit = { var cur = firstEntry while (cur ne null) { f(cur.key) @@ -97,56 +130,219 @@ class LinkedHashSet[A] extends AbstractSet[A] } } - protected override def foreachEntry[U](f: Entry => U) { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt + + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: A): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) } } - protected def createNewEntry[B](key: A, dummy: B): Entry = { - val e = new Entry(key) + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: A, hash: Int): Entry = { + val e = new Entry(key, hash) if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } + else { + lastEntry.later = e + e.earlier = lastEntry + } lastEntry = e e } - override def clear() { - clearTable() - firstEntry = null - lastEntry = null + /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null } - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { e => out.writeObject(e.key) }) + private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { + table(idx) match { + case null => + table(idx) = createNewEntry(elem, hash) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewEntry(elem, hash) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true } - private def readObject(in: java.io.ObjectInputStream) { - firstEntry = null - lastEntry = null - init(in, createNewEntry(in.readObject().asInstanceOf[A], null)) + private[this] def remove0(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[A], 0) + val preHigh = new Entry(null.asInstanceOf[A], 0) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + val setHashIterator = + if (isEmpty) this.iterator + else { + new LinkedHashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = unimproveHash(nd.hash) + this + } + } + } + MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashSet" } /** $factoryInfo * @define Coll `LinkedHashSet` * @define coll linked hash set */ -object LinkedHashSet extends MutableSetFactory[LinkedHashSet] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, LinkedHashSet[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] +@SerialVersionUID(3L) +object LinkedHashSet extends IterableFactory[LinkedHashSet] { + override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + def from[E](it: collection.IterableOnce[E]) = { + val newlhs = empty[E] + newlhs.sizeHint(it, delta = 0) + newlhs.addAll(it) + newlhs + } + + def newBuilder[A]: GrowableBuilder[A, LinkedHashSet[A]] = new GrowableBuilder(empty[A]) + /** Class for the linked hash set entry, used internally. - * @since 2.10 */ - @SerialVersionUID(6056749505994053009L) - private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable { + private[mutable] final class Entry[A](val key: A, val hash: Int) { var earlier: Entry[A] = null var later: Entry[A] = null + var next: Entry[A] = null + + @tailrec + final def findEntry(k: A, h: Int): Entry[A] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 } diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala deleted file mode 100644 index 7d051fc33946..000000000000 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** A more traditional/primitive style of linked list where the "list" is also the "head" link. Links can be manually - * created and manipulated, though the use of the API, when possible, is recommended. - * - * The danger of directly manipulating next: - * {{{ - * scala> val b = LinkedList(1) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * - * scala> b.next = null - * - * scala> println(b) - * java.lang.NullPointerException - * }}} - * - * $singleLinkedListExample - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#linked-lists "Scala's Collection Library overview"]] - * section on `Linked Lists` for more information. - * - * @tparam A the type of the elements contained in this linked list. - * - * @constructor Creates an "empty" list, defined as a single node with no data element and next pointing to itself. - - * @define Coll `LinkedList` - * @define coll linked list - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `LinkedList[B]` because an implicit of type `CanBuildFrom[LinkedList, B, LinkedList[B]]` - * is defined in object `LinkedList`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `LinkedList`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define collectExample Example: - * {{{ - * scala> val a = LinkedList(1, 2, 3) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3) - * - * scala> val addOne: PartialFunction[Any, Float] = {case i: Int => i + 1.0f} - * addOne: PartialFunction[Any,Float] = - * - * scala> val b = a.collect(addOne) - * b: scala.collection.mutable.LinkedList[Float] = LinkedList(2.0, 3.0, 4.0) - * - * scala> val c = LinkedList('a') - * c: scala.collection.mutable.LinkedList[Char] = LinkedList(a) - * - * scala> val d = a ++ c - * d: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, 3, a) - * - * scala> val e = d.collect(addOne) - * e: scala.collection.mutable.LinkedList[Float] = LinkedList(2.0, 3.0, 4.0) - * }}} - */ -@SerialVersionUID(-7308240733518833071L) -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -class LinkedList[A]() extends AbstractSeq[A] - with LinearSeq[A] - with GenericTraversableTemplate[A, LinkedList] - with LinkedListLike[A, LinkedList[A]] - with Serializable { - next = this - - /** Creates a new list. If the parameter next is null, the result is an empty list. Otherwise, the result is - * a list with elem at the head, followed by the contents of next. - * - * Note that next is part of the new list, as opposed to the +: operator, - * which makes a new copy of the original list. - * - * @example - * {{{ - * scala> val m = LinkedList(1) - * m: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * - * scala> val n = new LinkedList[Int](2, m) - * n: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1) - * }}} - */ - def this(elem: A, next: LinkedList[A]) { - this() - if (next != null) { - this.elem = elem - this.next = next - } - } - - override def companion: GenericCompanion[LinkedList] = LinkedList -} - -/** $factoryInfo - * @define Coll `LinkedList` - * @define coll linked list - */ -@deprecated("low-level linked lists are deprecated", "2.11.0") -object LinkedList extends SeqFactory[LinkedList] { - override def empty[A]: LinkedList[A] = new LinkedList[A] - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, LinkedList[A]] = - (new MutableList) mapResult ((l: MutableList[A]) => l.toLinkedList) -} diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala deleted file mode 100644 index 3653729237b4..000000000000 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.tailrec - -/** This extensible class may be used as a basis for implementing linked - * list. Type variable `A` refers to the element type of the - * list, type variable `This` is used to model self types of - * linked lists. - * - * $singleLinkedListExample - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 2.8 - * - * @tparam A type of the elements contained in the linked list - * @tparam This the type of the actual linked list holding the elements - * - * @define Coll `LinkedList` - * @define coll linked list - * - * @define singleLinkedListExample - * If the list is empty `next` must be set to `this`. The last node in every - * mutable linked list is empty. - * - * Examples (`_` represents no value): - * - * {{{ - * - * Empty: - * - * [ _ ] --, - * [ ] <-` - * - * Single element: - * - * [ x ] --> [ _ ] --, - * [ ] <-` - * - * More elements: - * - * [ x ] --> [ y ] --> [ z ] --> [ _ ] --, - * [ ] <-` - * - * }}} - */ -@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") -trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self => - - var elem: A = _ - var next: This = _ - - override def isEmpty = next eq this - - /** Determines the length of this $coll by traversing and counting every - * node. - */ - override def length: Int = length0(repr, 0) - - @tailrec private def length0(elem: This, acc: Int): Int = - if (elem.isEmpty) acc else length0(elem.next, acc + 1) - - override def head: A = - if (isEmpty) throw new NoSuchElementException - else elem - - override def tail: This = { - require(nonEmpty, "tail of empty list") - next - } - - /** If `this` is empty then it does nothing and returns `that`. Otherwise, appends `that` to `this`. The append - * requires a full traversal of `this`. - * - * Examples: - * - * {{{ - * scala> val a = LinkedList(1, 2) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> val b = LinkedList(1, 2) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> a.append(b) - * res0: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 1, 2) - * - * scala> println(a) - * LinkedList(1, 2, 1, 2) - * }}} - * - * {{{ - * scala> val a = new LinkedList[Int]() - * a: scala.collection.mutable.LinkedList[Int] = LinkedList() - * - * scala> val b = LinkedList(1, 2) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> val c = a.append(b) - * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> println(a) - * LinkedList() - * }}} - * - * @return the list after append (this is the list itself if nonempty, - * or list `that` if list this is empty. ) - */ - def append(that: This): This = { - @tailrec - def loop(x: This) { - if (x.next.isEmpty) x.next = that - else loop(x.next) - } - if (isEmpty) that - else { loop(repr); repr } - } - - /** Insert linked list `that` at current position of this linked list - * @note this linked list must not be empty - */ - def insert(that: This): Unit = { - require(nonEmpty, "insert into empty list") - if (that.nonEmpty) { - that append next - next = that - } - } - - override def drop(n: Int): This = { - var i = 0 - var these: This = repr - while (i < n && !these.isEmpty) { - these = these.next - i += 1 - } - these - } - - private def atLocation[T](n: Int)(f: This => T) = { - val loc = drop(n) - if (loc.nonEmpty) f(loc) - else throw new IndexOutOfBoundsException(n.toString) - } - - override def apply(n: Int): A = atLocation(n)(_.elem) - def update(n: Int, x: A): Unit = atLocation(n)(_.elem = x) - - def get(n: Int): Option[A] = { - val loc = drop(n) - if (loc.nonEmpty) Some(loc.elem) - else None - } - - override def iterator: Iterator[A] = new AbstractIterator[A] { - var elems = self - def hasNext = elems.nonEmpty - def next = { - val res = elems.elem - elems = elems.next - res - } - } - - override def foreach[U](f: A => U) { - var these = this - while (these.nonEmpty) { - f(these.elem) - these = these.next - } - } - - /** Return a clone of this list. - * - * @return a `LinkedList` with the same elements. - */ - override def clone(): This = { - val bf = newBuilder - bf ++= this - bf.result() - } -} diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 477bc67ef979..241f1edc480b 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,474 +10,412 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ -import immutable.{List, Nil, ::} -import java.io.{ObjectOutputStream, ObjectInputStream} +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.CommonErrors +import scala.collection.immutable.{::, List, Nil} +import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} + +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence /** A `Buffer` implementation backed by a list. It provides constant time - * prepend and append. Most other operations are linear. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list-buffers "Scala's Collection Library overview"]] - * section on `List Buffers` for more information. - * - * @tparam A the type of this list buffer's elements. - * - * @define Coll `ListBuffer` - * @define coll list buffer - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]` - * is defined in object `ListBuffer`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ListBuffer`. - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3419063961353022662L) -final class ListBuffer[A] - extends AbstractBuffer[A] - with Buffer[A] - with GenericTraversableTemplate[A, ListBuffer] - with BufferLike[A, ListBuffer[A]] - with ReusableBuilder[A, List[A]] - with SeqForwarder[A] - with Serializable -{ - override def companion: GenericCompanion[ListBuffer] = ListBuffer - - import scala.collection.Traversable - import scala.collection.immutable.ListSerializeEnd - - /** Expected invariants: - * If start.isEmpty, last0 == null - * If start.nonEmpty, last0 != null - * If len == 0, start.isEmpty - * If len > 0, start.nonEmpty - */ - private var start: List[A] = Nil - private var last0: ::[A] = _ - private[this] var exported: Boolean = false + * prepend and append. Most other operations are linear. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#list-buffers "Scala's Collection Library overview"]] + * section on `List Buffers` for more information. + * + * @tparam A the type of this list buffer's elements. + * + * @define Coll `ListBuffer` + * @define coll list buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-8428291952499836345L) +class ListBuffer[A] + extends AbstractBuffer[A] + with SeqOps[A, ListBuffer, ListBuffer[A]] + with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] + with ReusableBuilder[A, immutable.List[A]] + with IterableFactoryDefaults[A, ListBuffer] + with DefaultSerializable { + @transient private[this] var mutationCount: Int = 0 + + private var first: List[A] = Nil + private var last0: ::[A] = null // last element (`last0` just because the name `last` is already taken) + private[this] var aliased = false private[this] var len = 0 - protected def underlying: List[A] = start - - private def writeObject(out: ObjectOutputStream) { - // write start - var xs: List[A] = start - while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail } - out.writeObject(ListSerializeEnd) + private type Predecessor = ::[A] /*| Null*/ - // no need to write last0 + def iterator: Iterator[A] = new MutationTracker.CheckedIterator(first.iterator, mutationCount) - // write if exported - out.writeBoolean(exported) + override def iterableFactory: SeqFactory[ListBuffer] = ListBuffer - // write the length - out.writeInt(len) - } + @throws[IndexOutOfBoundsException] + def apply(i: Int) = first.apply(i) - private def readObject(in: ObjectInputStream) { - // read start, set last0 appropriately - var elem: A = in.readObject.asInstanceOf[A] - if (elem == ListSerializeEnd) { - start = Nil - last0 = null - } else { - var current = new ::(elem, Nil) - start = current - elem = in.readObject.asInstanceOf[A] - while (elem != ListSerializeEnd) { - val list = new ::(elem, Nil) - current.tl = list - current = list - elem = in.readObject.asInstanceOf[A] - } - last0 = current - start - } + def length = len + override def knownSize = len - // read if exported - exported = in.readBoolean() + override def isEmpty: Boolean = len == 0 - // read the length - len = in.readInt() + private def copyElems(): Unit = { + val buf = new ListBuffer[A].freshFrom(this) + first = buf.first + last0 = buf.last0 + aliased = false } - /** The current length of the buffer. - * - * This operation takes constant time. - */ - override def length = len - - // Don't use the inherited size, which forwards to a List and is O(n). - override def size = length - - // Override with efficient implementations using the extra size information available to ListBuffer. - override def isEmpty: Boolean = len == 0 - override def nonEmpty: Boolean = len > 0 + // we only call this before mutating things, so it's + // a good place to track mutations for the iterator + private def ensureUnaliased(): Unit = { + mutationCount += 1 + if (aliased) copyElems() + } - // Implementations of abstract methods in Buffer + // Avoids copying where possible. + override def toList: List[A] = { + aliased = nonEmpty + // We've accumulated a number of mutations to `List.tail` by this stage. + // Make sure they are visible to threads that the client of this ListBuffer might be about + // to share this List with. + releaseFence() + first + } - override def apply(n: Int): A = - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString()) - else super.apply(n) + def result(): immutable.List[A] = toList - /** Replaces element at index `n` with the new element - * `newelem`. Takes time linear in the buffer size. (except the - * first element, which is updated in constant time). - * - * @param n the index of the element to replace. - * @param x the new element. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def update(n: Int, x: A) { - // We check the bounds early, so that we don't trigger copying. - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString) - ensureUnaliased() - if (n == 0) { - val newElem = new :: (x, start.tail) - if (last0 eq start) { - last0 = newElem - } - start = newElem - } else { - var cursor = start - var i = 1 - while (i < n) { - cursor = cursor.tail - i += 1 - } - val newElem = new :: (x, cursor.tail.tail) - if (last0 eq cursor.tail) { - last0 = newElem - } - cursor.asInstanceOf[::[A]].tl = newElem + /** Prepends the elements of this buffer to a given list + * + * @param xs the list to which elements are prepended + */ + def prependToList(xs: List[A]): List[A] = { + if (isEmpty) xs + else { + ensureUnaliased() + last0.next = xs + toList } } - /** Appends a single element to this buffer. This operation takes constant time. - * - * @param x the element to append. - * @return this $coll. - */ - def += (x: A): this.type = { + def clear(): Unit = { + mutationCount += 1 + first = Nil + len = 0 + last0 = null + aliased = false + } + + final def addOne(elem: A): this.type = { ensureUnaliased() - val last1 = new ::[A](x, Nil) - if (len == 0) start = last1 else last0.tl = last1 + val last1 = new ::[A](elem, Nil) + if (len == 0) first = last1 else last0.next = last1 last0 = last1 len += 1 this } - override def ++=(xs: TraversableOnce[A]): this.type = xs match { - case x: AnyRef if x eq this => this ++= (this take size) - case _ => super.++=(xs) - - } - - override def ++=:(xs: TraversableOnce[A]): this.type = - if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs) - - /** Clears the buffer contents. - */ - def clear() { - start = Nil - last0 = null - exported = false - len = 0 + // MUST only be called on fresh instances + private def freshFrom(xs: IterableOnce[A]): this.type = { + val it = xs.iterator + if (it.hasNext) { + var len = 1 + var last0 = new ::[A](it.next(), Nil) + first = last0 + while (it.hasNext) { + val last1 = new ::[A](it.next(), Nil) + last0.next = last1 + last0 = last1 + len += 1 + } + // copy local vars into instance + this.len = len + this.last0 = last0 + } + this } - /** Prepends a single element to this buffer. This operation takes constant - * time. - * - * @param x the element to prepend. - * @return this $coll. - */ - def +=: (x: A): this.type = { - ensureUnaliased() - val newElem = new :: (x, start) - if (isEmpty) last0 = newElem - start = newElem - len += 1 + override final def addAll(xs: IterableOnce[A]): this.type = { + val it = xs.iterator + if (it.hasNext) { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + if (len == 0) first = fresh.first + else last0.next = fresh.first + last0 = fresh.last0 + len += fresh.length + } this } - /** Inserts new elements at the index `n`. Opposed to method - * `update`, this method will not replace an element with a new - * one. Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param seq the iterable object providing all elements to insert. - * @throws IndexOutOfBoundsException if `n` is out of bounds. - */ - def insertAll(n: Int, seq: Traversable[A]) { - // We check the bounds early, so that we don't trigger copying. - if (n < 0 || n > len) throw new IndexOutOfBoundsException(n.toString) + override def subtractOne(elem: A): this.type = { ensureUnaliased() - var elems = seq.toList.reverse - len += elems.length - if (n == 0) { - while (!elems.isEmpty) { - val newElem = new :: (elems.head, start) - if (start.isEmpty) last0 = newElem - start = newElem - elems = elems.tail - } - } else { - var cursor = start - var i = 1 - while (i < n) { + if (isEmpty) {} + else if (first.head == elem) { + first = first.tail + reduceLengthBy(1) + } + else { + var cursor = first + while (!cursor.tail.isEmpty && cursor.tail.head != elem) { cursor = cursor.tail - i += 1 } - while (!elems.isEmpty) { - val newElem = new :: (elems.head, cursor.tail) - if (cursor.tail.isEmpty) last0 = newElem - cursor.asInstanceOf[::[A]].tl = newElem - elems = elems.tail + if (!cursor.tail.isEmpty) { + val z = cursor.asInstanceOf[::[A]] + if (z.next == last0) + last0 = z + z.next = cursor.tail.tail + reduceLengthBy(1) } } + this } /** Reduce the length of the buffer, and null out last0 - * if this reduces the length to 0. - */ - private def reduceLengthBy(num: Int) { + * if this reduces the length to 0. + */ + private def reduceLengthBy(num: Int): Unit = { len -= num if (len <= 0) // obviously shouldn't be < 0, but still better not to leak last0 = null } - /** Removes a given number of elements on a given index position. May take - * time linear in the buffer size. - * - * @param n the index which refers to the first element to remove. - * @param count the number of elements to remove. - * @throws IndexOutOfBoundsException if the index `n` is not in the valid range - * `0 <= n <= length - count` (with `count > 0`). - * @throws IllegalArgumentException if `count < 0`. - */ - override def remove(n: Int, count: Int) { - if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString) - else if (count == 0) return // Nothing to do - if (n < 0 || n > len - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString) + // returns the `::` at `i - 1` (such that its `next` at position `i` can be mutated), or `null` if `i == 0`. + private def predecessor(i: Int): Predecessor = + if (i == 0) null + else if (i == len) last0 + else { + var j = i - 1 + var p = first + while (j > 0) { + p = p.tail + j -= 1 + } + p.asInstanceOf[Predecessor] + } + + private def getNext(p: Predecessor): List[A] = + if (p == null) first else p.next + + def update(idx: Int, elem: A): Unit = { ensureUnaliased() - val n1 = n max 0 - val count1 = count min (len - n1) - if (n1 == 0) { - var c = count1 - while (c > 0) { - start = start.tail - c -= 1 + if (idx < 0 || idx >= len) throw CommonErrors.indexOutOfBounds(index = idx, max = len - 1) + if (idx == 0) { + val newElem = new :: (elem, first.tail) + if (last0 eq first) { + last0 = newElem } + first = newElem } else { - var cursor = start - var i = 1 - while (i < n1) { - cursor = cursor.tail - i += 1 - } - var c = count1 - while (c > 0) { - if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]] - cursor.asInstanceOf[::[A]].tl = cursor.tail.tail - c -= 1 + // `p` can not be `null` because the case where `idx == 0` is handled above + val p = predecessor(idx) + val newElem = new :: (elem, p.tail.tail) + if (last0 eq p.tail) { + last0 = newElem } + p.asInstanceOf[::[A]].next = newElem } - reduceLengthBy(count1) } -// Implementation of abstract method in Builder + def insert(idx: Int, elem: A): Unit = { + ensureUnaliased() + if (idx < 0 || idx > len) throw CommonErrors.indexOutOfBounds(index = idx, max = len - 1) + if (idx == len) addOne(elem) + else { + val p = predecessor(idx) + val nx = elem :: getNext(p) + if(p eq null) first = nx else p.next = nx + len += 1 + } + } - /** Returns the accumulated `List`. - * - * This method may be called multiple times to obtain snapshots of the list in different stages of construction. - */ - def result: List[A] = toList + def prepend(elem: A): this.type = { + insert(0, elem) + this + } - /** Converts this buffer to a list. Takes constant time. The buffer is - * copied lazily the first time it is mutated. - */ - override def toList: List[A] = { - exported = !isEmpty - start + // `fresh` must be a `ListBuffer` that only we have access to + private def insertAfter(prev: Predecessor, fresh: ListBuffer[A]): Unit = { + if (!fresh.isEmpty) { + val follow = getNext(prev) + if (prev eq null) first = fresh.first else prev.next = fresh.first + fresh.last0.next = follow + if (follow.isEmpty) last0 = fresh.last0 + len += fresh.length + } } - // scala/bug#11869 - override def toSeq: collection.Seq[A] = toList - override def toIterable: collection.Iterable[A] = toList - override def toStream: immutable.Stream[A] = toList.toStream // mind the laziness + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + if (idx < 0 || idx > len) throw CommonErrors.indexOutOfBounds(index = idx, max = len - 1) + val it = elems.iterator + if (it.hasNext) { + if (idx == len) addAll(it) + else { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + insertAfter(predecessor(idx), fresh) + } + } + } -// New methods in ListBuffer + def remove(idx: Int): A = { + ensureUnaliased() + if (idx < 0 || idx >= len) throw CommonErrors.indexOutOfBounds(index = idx, max = len - 1) + val p = predecessor(idx) + val nx = getNext(p) + if(p eq null) { + first = nx.tail + if(first.isEmpty) last0 = null + } else { + if(last0 eq nx) last0 = p + p.next = nx.tail + } + len -= 1 + nx.head + } - /** Prepends the elements of this buffer to a given list - * - * @param xs the list to which elements are prepended - */ - def prependToList(xs: List[A]): List[A] = { - if (isEmpty) xs - else { + def remove(idx: Int, count: Int): Unit = + if (count > 0) { ensureUnaliased() - last0.tl = xs - toList + if (idx < 0 || idx + count > len) throw new IndexOutOfBoundsException(s"$idx to ${idx + count} is out of bounds (min 0, max ${len - 1})") + removeAfter(predecessor(idx), count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) } + + private def removeAfter(prev: Predecessor, n: Int) = { + @tailrec def ahead(p: List[A], n: Int): List[A] = + if (n == 0) p else ahead(p.tail, n - 1) + val nx = ahead(getNext(prev), n) + if(prev eq null) first = nx else prev.next = nx + if(nx.isEmpty) last0 = prev + len -= n } -// Overrides of methods in Buffer + /** Replace the contents of this $coll with the mapped result. + * + * @param f the mapping function + * @return this $coll + */ + def mapInPlace(f: A => A): this.type = { + mutationCount += 1 + val buf = new ListBuffer[A] + for (elem <- this) buf += f(elem) + first = buf.first + last0 = buf.last0 + aliased = false // we just assigned from a new instance + this + } - /** Removes the element on a given index position. May take time linear in - * the buffer size. + /** Replace the contents of this $coll with the flatmapped result. * - * @param n the index which refers to the element to delete. - * @return n the element that was formerly at position `n`. - * @note an element must exists at position `n`. - * @throws IndexOutOfBoundsException if `n` is out of bounds. + * @param f the mapping function + * @return this $coll */ - def remove(n: Int): A = { - if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString()) - ensureUnaliased() - var old = start.head - if (n == 0) { - start = start.tail - } else { - var cursor = start - var i = 1 - while (i < n) { - cursor = cursor.tail - i += 1 + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + mutationCount += 1 + var src = first + var dst: List[A] = null + last0 = null + len = 0 + while(!src.isEmpty) { + val it = f(src.head).iterator + while(it.hasNext) { + val v = new ::(it.next(), Nil) + if(dst eq null) dst = v else last0.next = v + last0 = v + len += 1 } - old = cursor.tail.head - if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]] - cursor.asInstanceOf[::[A]].tl = cursor.tail.tail + src = src.tail } - reduceLengthBy(1) - old + first = if(dst eq null) Nil else dst + aliased = false // we just rebuilt a fresh, unaliased instance + this } - /** Remove a single element from this buffer. May take time linear in the - * buffer size. + /** Replace the contents of this $coll with the filtered result. * - * @param elem the element to remove. - * @return this $coll. + * @param p the filtering predicate + * @return this $coll */ - override def -= (elem: A): this.type = { + def filterInPlace(p: A => Boolean): this.type = { ensureUnaliased() - if (isEmpty) {} - else if (start.head == elem) { - start = start.tail - reduceLengthBy(1) - } - else { - var cursor = start - while (!cursor.tail.isEmpty && cursor.tail.head != elem) { - cursor = cursor.tail - } - if (!cursor.tail.isEmpty) { - val z = cursor.asInstanceOf[::[A]] - if (z.tl == last0) - last0 = z - z.tl = cursor.tail.tail - reduceLengthBy(1) + var prev: Predecessor = null + var cur: List[A] = first + while (!cur.isEmpty) { + val follow = cur.tail + if (!p(cur.head)) { + if(prev eq null) first = follow + else prev.next = follow + len -= 1 + } else { + prev = cur.asInstanceOf[Predecessor] } + cur = follow } + last0 = prev this } - /** Selects the last element. + def patchInPlace(from: Int, patch: collection.IterableOnce[A], replaced: Int): this.type = { + val _len = len + val _from = math.max(from, 0) // normalized + val _replaced = math.max(replaced, 0) // normalized + val it = patch.iterator + + val nonEmptyPatch = it.hasNext + val nonEmptyReplace = (_from < _len) && (_replaced > 0) + + // don't want to add a mutation or check aliasing (potentially expensive) + // if there's no patching to do + if (nonEmptyPatch || nonEmptyReplace) { + val fresh = new ListBuffer[A].freshFrom(it) + ensureUnaliased() + val i = math.min(_from, _len) + val n = math.min(_replaced, _len) + val p = predecessor(i) + removeAfter(p, math.min(n, _len - i)) + insertAfter(p, fresh) + } + this + } + + /** + * Selects the last element. * - * Runs in constant time. + * Runs in constant time. * - * @return the last element of this buffer. - * @throws NoSuchElementException if this buffer is empty. + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. */ - override def last: A = - if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") - else last0.head + override def last: A = if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer") else last0.head - /** Optionally selects the last element. + /** + * Optionally selects the last element. * - * Runs in constant time. + * Runs in constant time. * - * @return `Some` of the last element of this buffer if the buffer is nonempty, `None` if it is empty. + * @return the last element of this $coll$ if it is nonempty, `None` if it is empty. */ override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head) - /** Returns an iterator over this `ListBuffer`. The iterator will reflect - * changes made to the underlying `ListBuffer` beyond the next element; - * the next element's value is cached so that `hasNext` and `next` are - * guaranteed to be consistent. In particular, an empty `ListBuffer` - * will give an empty iterator even if the `ListBuffer` is later filled. - */ - override def iterator: Iterator[A] = new AbstractIterator[A] { - // Have to be careful iterating over mutable structures. - // This used to have "(cursor ne last0)" as part of its hasNext - // condition, which means it can return true even when the iterator - // is exhausted. Inconsistent results are acceptable when one mutates - // a structure while iterating, but we should never return hasNext == true - // on exhausted iterators (thus creating exceptions) merely because - // values were changed in-place. - var cursor: List[A] = if (ListBuffer.this.isEmpty) Nil else start - - def hasNext: Boolean = cursor ne Nil - def next(): A = - if (!hasNext) throw new NoSuchElementException("next on empty Iterator") - else { - val ans = cursor.head - cursor = cursor.tail - ans - } - } - - // Private methods - private def ensureUnaliased() = { - if (exported) copy() - } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ListBuffer" - /** Copy contents of this buffer */ - private def copy() { - if (isEmpty) return - var cursor = start - val limit = last0.tail - clear() - while (cursor ne limit) { - this += cursor.head - cursor = cursor.tail - } - } +} - override def equals(that: Any): Boolean = that match { - case that: ListBuffer[_] => this.start equals that.start - case _ => super.equals(that) - } +@SerialVersionUID(3L) +object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { - /** Returns a clone of this buffer. - * - * @return a `ListBuffer` with the same elements. - */ - override def clone(): ListBuffer[A] = (new ListBuffer[A]) ++= this + def from[A](coll: collection.IterableOnce[A]): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) - /** Defines the prefix of the string representation. - * - * @return the string representation of this buffer. - */ - override def stringPrefix: String = "ListBuffer" -} + def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) -/** $factoryInfo - * @define Coll `ListBuffer` - * @define coll list buffer - */ -object ListBuffer extends SeqFactory[ListBuffer] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A]) + def empty[A]: ListBuffer[A] = new ListBuffer[A] } diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala index 32473eeb2958..e1a273bfd5af 100644 --- a/src/library/scala/collection/mutable/ListMap.scala +++ b/src/library/scala/collection/mutable/ListMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,75 +10,73 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ -import annotation.tailrec +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.List /** A simple mutable map backed by a list, so it preserves insertion order. - * - * @tparam A the type of the keys contained in this list map. - * @tparam B the type of the values assigned to keys in this list map. - * - * @define Coll `mutable.ListMap` - * @define coll mutable list map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `ListMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[ListMap, (A, B), ListMap[A, B]]` - * is defined in object `ListMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `ListMap`. - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@SerialVersionUID(-3362098515407812442L) -class ListMap[A, B] -extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, ListMap[A, B]] - with Serializable { + * + * @tparam K the type of the keys contained in this list map. + * @tparam V the type of the values assigned to keys in this list map. + * + * @define Coll `mutable.ListMap` + * @define coll mutable list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +class ListMap[K, V] + extends AbstractMap[K, V] + with MapOps[K, V, ListMap, ListMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { - override def empty = ListMap.empty[A, B] + override def mapFactory: MapFactory[ListMap] = ListMap - private var elems: List[(A, B)] = List() - private var siz: Int = 0 + private[this] var elems: List[(K, V)] = List() + private[this] var siz: Int = 0 - def get(key: A): Option[B] = elems find (_._1 == key) map (_._2) - def iterator: Iterator[(A, B)] = elems.iterator + def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) + def iterator: Iterator[(K, V)] = elems.iterator - @deprecatedOverriding("No sensible way to override += as private remove is used in multiple places internally.", "2.11.0") - def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this } + final override def addOne(kv: (K, V)) = { + val (e, key0) = remove(kv._1, elems, List()) + elems = (key0, kv._2) :: e + siz += 1; this + } - @deprecatedOverriding("No sensible way to override -= as private remove is used in multiple places internally.", "2.11.0") - def -= (key: A) = { elems = remove(key, elems, List()); this } + final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } @tailrec - private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = { - if (elems.isEmpty) acc - else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail } + private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { + if (elems.isEmpty) (acc, key) + else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } else remove(key, elems.tail, elems.head :: acc) } + final override def clear(): Unit = { elems = List(); siz = 0 } - @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0") - override def clear() = { elems = List(); siz = 0 } - - @deprecatedOverriding("No sensible way to override as this functionality relies upon access to private methods.", "2.11.0") - override def size: Int = siz + final override def size: Int = siz + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override protected[this] def stringPrefix = "ListMap" } /** $factoryInfo - * @define Coll `mutable.ListMap` - * @define coll mutable list map - */ -object ListMap extends MutableMapFactory[ListMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: ListMap[A, B] = new ListMap[A, B] + * @define Coll `mutable.ListMap` + * @define coll mutable list map + */ +@SerialVersionUID(3L) +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +object ListMap extends MapFactory[ListMap] { + def empty[K, V]: ListMap[K, V] = new ListMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) } diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index 35eefa1669af..e36c337437e3 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,55 +10,63 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic.CanBuildFrom +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions /** This class implements mutable maps with `Long` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically substantially faster with `LongMap` than [[HashMap]]. Methods - * that act on the whole map, including `foreach` and `map` are not in - * general expected to be faster than with a generic map, save for those - * that take particular advantage of the internal structure of the map: - * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `LongMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29 entries (approximately - * 500 million). The maximum capacity is 2^30, but performance will degrade - * rapidly as 2^30 is approached. - * - */ -@SerialVersionUID(3311432836435989440L) + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically substantially faster with `LongMap` than [[HashMap]]. Methods + * that act on the whole map, including `foreach` and `map` are not in + * general expected to be faster than with a generic map, save for those + * that take particular advantage of the internal structure of the map: + * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `LongMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29 entries (approximately + * 500 million). The maximum capacity is 2^30, but performance will degrade + * rapidly as 2^30 is approached. + * + */ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) -extends AbstractMap[Long, V] - with Map[Long, V] - with MapLike[Long, V, LongMap[V]] - with Serializable -{ + extends AbstractMap[Long, V] + with MapOps[Long, V, Map, LongMap[V]] + with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] + with Serializable { import LongMap._ - def this() = this(LongMap.exceptionDefault, 16, true) + def this() = this(LongMap.exceptionDefault, 16, initBlank = true) + + // TODO: override clear() with an optimization more tailored for efficiency. + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]): LongMap[V] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) + def this(defaultEntry: Long => V) = this(defaultEntry, 16, initBlank = true) /** Creates a new `LongMap` with an initial buffer of specified size. - * - * A LongMap can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) + * + * A LongMap can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, initBlank = true) /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, initBlank = true) private[this] var mask = 0 private[this] var extraKeys: Int = 0 @@ -80,12 +88,14 @@ extends AbstractMap[Long, V] } private[collection] def initializeTo( - m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] - ) { + m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + ): Unit = { mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz } override def size: Int = _size + (extraKeys+1)/2 + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 override def empty: LongMap[V] = new LongMap() private def imbalanced: Boolean = @@ -175,13 +185,17 @@ extends AbstractMap[Long, V] else { var i = seekEntryOrOpen(key) if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) val value = { - val ok = _keys + val oks = _keys + val j = i & IndexMask + val ok = oks(j) val ans = defaultValue - if (ok ne _keys) { + // Evaluating `defaultValue` may change the map + // - repack: the array is different + // - element added at `j`: since `i < 0`, the key was missing and `ok` is either 0 or MinValue. + // If `defaultValue` added an element at `j` then `_keys(j)` must be different now. + // (`_keys` never contains 0 or MinValue.) + if (oks.ne(_keys) || ok != _keys(j)) { i = seekEntryOrOpen(key) if (i >= 0) _size -= 1 } @@ -200,12 +214,12 @@ extends AbstractMap[Long, V] } /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ def getOrNull(key: Long): V = { if (key == -key) { if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] @@ -219,9 +233,9 @@ extends AbstractMap[Long, V] } /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead. - */ + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead. + */ override def apply(key: Long): V = { if (key == -key) { if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) @@ -235,11 +249,11 @@ extends AbstractMap[Long, V] } /** The user-supplied default value for the key. Throws an exception - * if no other default behavior was specified. - */ + * if no other default behavior was specified. + */ override def default(key: Long) = defaultEntry(key) - private def repack(newMask: Int) { + private def repack(newMask: Int): Unit = { val ok = _keys val ov = _values mask = newMask @@ -266,12 +280,7 @@ extends AbstractMap[Long, V] * improved performance. Repacking takes time proportional to the number * of entries in the map. */ - def repack() { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } + def repack(): Unit = repack(repackMask(mask, _size = _size, _vacant = _vacant)) override def put(key: Long, value: V): Option[V] = { if (key == -key) { @@ -309,9 +318,9 @@ extends AbstractMap[Long, V] } /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to a `LongMap`. - */ + * + * This is the fastest way to add an entry to a `LongMap`. + */ override def update(key: Long, value: V): Unit = { if (key == -key) { if (key == 0) { @@ -341,11 +350,15 @@ extends AbstractMap[Long, V] } /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") def +=(key: Long, value: V): this.type = { update(key, value); this } - def +=(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } - def -=(key: Long): this.type = { + def subtractOne(key: Long): this.type = { if (key == -key) { if (key == 0L) { extraKeys &= 0x2 @@ -368,7 +381,7 @@ extends AbstractMap[Long, V] this } - def iterator: Iterator[(Long, V)] = new Iterator[(Long, V)] { + def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { private[this] val kz = _keys private[this] val vz = _values @@ -394,7 +407,7 @@ extends AbstractMap[Long, V] index += 1 true }) - def next = { + def next() = { if (nextPair == null && !hasNext) throw new NoSuchElementException("next") val ans = nextPair if (anotherPair != null) { @@ -406,7 +419,11 @@ extends AbstractMap[Long, V] } } - override def foreach[U](f: ((Long,V)) => U) { + // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. + override def keysIterator: Iterator[Long] = super.keysIterator + override def valuesIterator: Iterator[V] = super.valuesIterator + + override def foreach[U](f: ((Long,V)) => U): Unit = { if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) var i,j = 0 @@ -420,34 +437,55 @@ extends AbstractMap[Long, V] } } + override def foreachEntry[U](f: (Long,V) => U): Unit = { + if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k, _values(i).asInstanceOf[V]) + } + i += 1 + } + } + override def clone(): LongMap[V] = { val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = java.util.Arrays.copyOf(_values, _values.length) - val lm = new LongMap[V](defaultEntry, 1, false) + val lm = new LongMap[V](defaultEntry, 1, initBlank = false) lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) lm } + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { val lm = clone().asInstanceOf[LongMap[V1]] lm += kv lm } - override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - xs.foreach(kv => lm += kv) - lm + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) } - override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = { + override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = { val lm = clone().asInstanceOf[LongMap[V1]] - lm += (key, value) + xs.iterator.foreach(kv => lm += kv) lm } + override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = + clone().asInstanceOf[LongMap[V1]].addOne(key, value) + /** Applies a function to all keys of this map. */ - def foreachKey[A](f: Long => A) { + def foreachKey[A](f: Long => A): Unit = { if ((extraKeys & 1) == 1) f(0L) if ((extraKeys & 2) == 2) f(Long.MinValue) var i,j = 0 @@ -462,7 +500,7 @@ extends AbstractMap[Long, V] } /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A) { + def foreachValue[A](f: V => A): Unit = { if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) var i,j = 0 @@ -477,13 +515,13 @@ extends AbstractMap[Long, V] } /** Creates a new `LongMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ def mapValuesNow[V1](f: V => V1): LongMap[V1] = { val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) + val lm = new LongMap[V1](LongMap.exceptionDefault, 1, initBlank = false) val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = new Array[AnyRef](_values.length) var i,j = 0 @@ -500,9 +538,15 @@ extends AbstractMap[Long, V] } /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValues(f: V => V): this.type = { + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] var i,j = 0 @@ -516,42 +560,62 @@ extends AbstractMap[Long, V] } this } + + /** An overload of `map` which produces a `LongMap`. + * + * @param f the mapping function + */ + def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + /** An overload of `flatMap` which produces a `LongMap`. + * + * @param f the mapping function + */ + def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + /** An overload of `collect` which produces a `LongMap`. + * + * @param pf the mapping function + */ + def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) + + override protected[this] def className = "LongMap" } object LongMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 + private final val IndexMask = 0x3FFF_FFFF + private final val MissingBit = 0x8000_0000 + private final val VacantBit = 0x4000_0000 + private final val MissVacant = 0xC000_0000 private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) - implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] = - ReusableCBF.asInstanceOf[CanBuildFrom[LongMap[V], (Long, U), LongMap[U]]] - private[this] val ReusableCBF = new CanBuildFrom[LongMap[Any], (Long, Any), LongMap[Any]] { - def apply(from: LongMap[Any]): LongMapBuilder[Any] = apply() - def apply(): LongMapBuilder[Any] = new LongMapBuilder[Any] - } - /** A builder for instances of `LongMap`. - * - * This builder can be reused to create multiple instances. - */ + * + * This builder can be reused to create multiple instances. + */ final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { private[collection] var elems: LongMap[V] = new LongMap[V] - def +=(entry: (Long, V)): this.type = { + override def addOne(entry: (Long, V)): this.type = { elems += entry this } - def clear() { elems = new LongMap[V] } + def clear(): Unit = elems = new LongMap[V] def result(): LongMap[V] = elems + override def knownSize: Int = elems.knownSize } /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[V](elems: (Long, V)*): LongMap[V] = { - val sz = if (elems.hasDefiniteSize) elems.size else 4 + def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + + private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]): LongMap[V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 val lm = new LongMap[V](sz * 2) - elems.foreach{ case (k,v) => lm(k) = v } + elems.iterator.foreach{ case (k,v) => lm(k) = v } if (lm.size < (sz>>3)) lm.repack() lm } @@ -562,9 +626,23 @@ object LongMap { /** Creates a new empty `LongMap` with the supplied default */ def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) + /** Creates a new `LongMap` from an existing source collection. A source collection + * which is already a `LongMap` gets cloned. + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new `LongMap` with the elements of `source` + */ + def from[V](source: IterableOnce[(Long, V)]): LongMap[V] = source match { + case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] + case _ => buildFromIterableOnce(source) + } + + def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + /** Creates a new `LongMap` from arrays of keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { val sz = math.min(keys.length, values.length) val lm = new LongMap[V](sz * 2) @@ -575,15 +653,39 @@ object LongMap { } /** Creates a new `LongMap` from keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = { + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { val sz = math.min(keys.size, values.size) val lm = new LongMap[V](sz * 2) val ki = keys.iterator val vi = values.iterator - while (ki.hasNext && vi.hasNext) lm(ki.next) = vi.next + while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() if (lm.size < (sz >> 3)) lm.repack() lm } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any): ReusableBuilder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) + + private def repackMask(mask: Int, _size: Int, _vacant: Int): Int = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && _size < (m >>> 3)) m = m >>> 1 + m /*.ensuring(_size <= _ + 1)*/ + } } diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 69185c1f1894..8659b45e86e6 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,71 +14,254 @@ package scala package collection package mutable -import generic._ - -/** A base trait for maps that can be mutated. - * $mapNote - * $mapTags - * @since 1.0 - * @author Matthias Zenger - */ +/** Base type of mutable Maps */ trait Map[K, V] extends Iterable[(K, V)] -// with GenMap[K, V] - with scala.collection.Map[K, V] - with MapLike[K, V, Map[K, V]] { + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with Growable[(K, V)] + with Shrinkable[K] + with MapFactoryDefaults[K, V, Map, Iterable] { - override def empty: Map[K, V] = Map.empty + override def mapFactory: scala.collection.MapFactory[Map] = Map - override def seq: Map[K, V] = this + /* + //TODO consider keeping `remove` because it returns the removed entry + @deprecated("Use subtract or -= instead of remove", "2.13.0") + def remove(key: K): Option[V] = { + val old = get(key) + if(old.isDefined) subtract(key) + old + } + */ /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) +} + +/** + * @define coll mutable map + * @define Coll `mutable.Map` + */ +trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] + with Cloneable[C] + with Builder[(K, V), C] + with Growable[(K, V)] + with Shrinkable[K] { + + def result(): C = coll + + @deprecated("Use - or remove on an immutable Map", "2.13.0") + final def - (key: K): C = clone() -= key + + @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") + final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys + + /** Adds a new key/value pair to this map and optionally returns previously bound value. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key + * before the `put` operation was executed, or `None` if `key` + * was not defined in the map before. + */ + def put(key: K, value: V): Option[V] = { + val r = get(key) + update(key, value) + r + } + + /** Adds a new key/value pair to this map. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update(key: K, value: V): Unit = { coll += ((key, value)) } + + /** + * Update a mapping for the specified key and its current optionally mapped value + * (`Some` if there is current mapping, `None` if not). * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value + * @param key the key value + * @param remappingFunction a function that receives current optionally mapped value and return a new mapping + * @return the new value associated with the specified key */ - def withDefault(d: K => V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, d) + def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = this.get(key) + val nextValue = remappingFunction(previousValue) + (previousValue, nextValue) match { + case (None, None) => // do nothing + case (Some(_), None) => this.remove(key) + case (_, Some(v)) => this.update(key,v) + } + nextValue + } - /** The same map with a given default value. + /** If given key is already in this map, returns associated value. * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * Otherwise, computes value from given expression `defaultValue`, stores with key + * in map and returns that value. * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value + * Concurrent map implementations may evaluate the expression `defaultValue` + * multiple times, or may evaluate `defaultValue` without inserting the result. + * + * @param key the key to test + * @param defaultValue the computation yielding the value to associate with `key`, if + * `key` is previously unbound. + * @return the value associated with key (either previously or as a result + * of executing the method). */ - def withDefaultValue(d: V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, x => d) + def getOrElseUpdate(key: K, @deprecatedName("op", since="2.13.13") defaultValue: => V): V = + get(key) match { + case Some(v) => v + case None => val d = defaultValue; this(key) = d; d + } + + /** Removes a key from this map, returning the value associated previously + * with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the map before. + */ + def remove(key: K): Option[V] = { + val r = get(key) + if (r.isDefined) this -= key + r + } + + def clear(): Unit = { keysIterator foreach -= } + + override def clone(): C = empty ++= this + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) + + /** Retains only those mappings for which the predicate + * `p` returns `true`. + * + * @param p The test predicate + */ + def filterInPlace(p: (K, V) => Boolean): this.type = { + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 + } + } + this + } + + @deprecated("Use mapValuesInPlace instead", "2.13.0") + @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) + + /** Applies a transformation function to all values contained in this map. + * The transformation function produces new values from existing keys + * associated values. + * + * @param f the transformation to apply + * @return the map itself. + */ + def mapValuesInPlace(f: (K, V) => V): this.type = { + if (!isEmpty) this match { + case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) + case _ => + val array = this.toArray[Any] + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + update(k, f(k, v)) + i += 1 + } + } + this + } + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) + + override def knownSize: Int = super[IterableOps].knownSize } -/** $factoryInfo - * The current default implementation of a $Coll is a `HashMap`. - * @define coll mutable map - * @define Coll `mutable.Map` - */ -object Map extends MutableMapFactory[Map] { - /** $canBuildFromInfo */ - implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (K, V), Map[K, V]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] - - def empty[K, V]: Map[K, V] = new HashMap[K, V] - - @SerialVersionUID(3886083557164597477L) - class WithDefault[K, V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault(underlying, d) with Map[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} - override def empty = new WithDefault(underlying.empty, d) - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) - override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - - /** If these methods aren't overridden to thread through the underlying map, - * successive calls to withDefault* have no effect. - */ - override def withDefault(d: K => V): mutable.Map[K, V] = new WithDefault[K, V](underlying, d) - override def withDefaultValue(d: V): mutable.Map[K, V] = new WithDefault[K, V](underlying, x => d) +/** + * $factoryInfo + * @define coll mutable map + * @define Coll `mutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](HashMap) { + + @SerialVersionUID(3L) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + override def default(key: K): V = defaultValue(key) + + def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = underlying.knownSize + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def clear(): Unit = underlying.clear() + + def get(key: K): Option[V] = underlying.get(key) + + def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) } + } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala deleted file mode 100644 index fb289f768fef..000000000000 --- a/src/library/scala/collection/mutable/MapBuilder.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** The canonical builder for immutable maps, working with the map's `+` method - * to add new elements. - * Collections are built from their `empty` element using this + method. - * - * @tparam A Type of the keys for the map this builder creates. - * @tparam B Type of the values for the map this builder creates. - * @tparam Coll The type of the actual collection this builder builds. - * @param empty The empty element of the collection. - * - * @since 2.8 - */ -class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll) -extends ReusableBuilder[(A, B), Coll] { - protected var elems: Coll = empty - def +=(x: (A, B)): this.type = { - elems = (elems + x).asInstanceOf[Coll] - // the cast is necessary because right now we cannot enforce statically that - // for every map of type Coll, `+` yields again a Coll. With better support - // for hk-types we might be able to enforce this in the future, though. - this - } - def clear() { elems = empty } - def result: Coll = elems -} diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala deleted file mode 100644 index b24a98eb8b24..000000000000 --- a/src/library/scala/collection/mutable/MapLike.scala +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ -import scala.annotation.migration -import scala.collection.parallel.mutable.ParMap - -/** A template trait for mutable maps. - * $mapNote - * $mapTags - * @define Coll `mutable.Map` - * @define coll mutable map - * @since 2.8 - * - * @define mapNote - * '''Implementation note:''' - * This trait provides most of the operations of a mutable `Map` - * independently of its representation. It is typically inherited by - * concrete implementations of maps. - * - * To implement a concrete mutable map, you need to provide - * implementations of the following methods: - * {{{ - * def get(key: K): Option[V] - * def iterator: Iterator[(K, V)] - * def += (kv: (K, V)): This - * def -= (key: K): This - * }}} - * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - */ -trait MapLike[K, V, +This <: MapLike[K, V, This] with Map[K, V]] - extends scala.collection.MapLike[K, V, This] - with Builder[(K, V), This] - with Growable[(K, V)] - with Shrinkable[K] - with Cloneable[This] - with Parallelizable[(K, V), ParMap[K, V]] -{ self => - - /** A common implementation of `newBuilder` for all mutable maps - * in terms of `empty`. - * - * Overrides `MapLike` implementation for better efficiency. - */ - override protected[this] def newBuilder: Builder[(K, V), This] = empty - - protected[this] override def parCombiner = ParMap.newCombiner[K, V] - - /** Converts this $coll to a sequence. - * - * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. - */ - override def toSeq: collection.Seq[(K, V)] = { - // ArrayBuffer for efficiency, preallocated to the right size. - val result = new ArrayBuffer[(K, V)](size) - foreach(result += _) - result - } - - - /** Adds a new key/value pair to this map and optionally returns previously bound value. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key the key to update - * @param value the new value - * @return an option value containing the value associated with the key - * before the `put` operation was executed, or `None` if `key` - * was not defined in the map before. - */ - def put(key: K, value: V): Option[V] = { - val r = get(key) - update(key, value) - r - } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key The key to update - * @param value The new value - */ - def update(key: K, value: V) { this += ((key, value)) } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * @param kv the key/value pair. - * @return the map itself - */ - def += (kv: (K, V)): this.type - - /** Creates a new map consisting of all key/value pairs of the current map - * plus a new pair of a given key and value. - * - * @param key The key to add - * @param value The new value - * @return A fresh immutable map with the binding from `key` to - * `value` added to this map. - */ - override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) - - /** Creates a new map containing a new key/value mapping and all the key/value mappings - * of this map. - * - * Mapping `kv` will override existing mappings from this map with the same key. - * - * @param kv the key/value mapping to be added - * @return a new map containing mappings of this map and the mapping `kv`. - */ - @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - def + [V1 >: V] (kv: (K, V1)): Map[K, V1] = clone().asInstanceOf[Map[K, V1]] += kv - - /** Creates a new map containing two or more key/value mappings and all the key/value - * mappings of this map. - * - * Specified mappings will override existing mappings from this map with the same keys. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new map containing mappings of this map and two or more specified mappings. - */ - @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): Map[K, V1] = - clone().asInstanceOf[Map[K, V1]] += elem1 += elem2 ++= elems - - /** Creates a new map containing the key/value mappings provided by the specified traversable object - * and all the key/value mappings of this map. - * - * Note that existing mappings from this map with the same key as those in `xs` will be overridden. - * - * @param xs the traversable object. - * @return a new map containing mappings of this map and those provided by `xs`. - */ - @migration("`++` creates a new map. Use `++=` to add an element to this map and return that map itself.", "2.8.0") - override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = - clone().asInstanceOf[Map[K, V1]] ++= xs.seq - - /** Removes a key from this map, returning the value associated previously - * with that key as an option. - * @param key the key to be removed - * @return an option value containing the value associated previously with `key`, - * or `None` if `key` was not defined in the map before. - */ - def remove(key: K): Option[V] = { - val r = get(key) - this -= key - r - } - - /** Removes a key from this map. - * @param key the key to be removed - * @return the map itself. - */ - def -= (key: K): this.type - - /** Creates a new map with all the key/value mappings of this map except the key/value mapping - * with the specified key. - * - * @param key the key to be removed - * @return a new map with all the mappings of this map except that with a key `key`. - */ - @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(key: K): This = clone() -= key - - /** Removes all bindings from the map. After this operation has completed, - * the map will be empty. - */ - def clear() { keysIterator foreach -= } - - /** If given key is already in this map, returns associated value. - * - * Otherwise, computes value from given expression `op`, stores with key - * in map and returns that value. - * - * Concurrent map implementations may evaluate the expression `op` - * multiple times, or may evaluate `op` without inserting the result. - * - * @param key the key to test - * @param op the computation yielding the value to associate with `key`, if - * `key` is previously unbound. - * @return the value associated with key (either previously or as a result - * of executing the method). - */ - def getOrElseUpdate(key: K, op: => V): V = - get(key) match { - case Some(v) => v - case None => val d = op; this(key) = d; d - } - - /** Applies a transformation function to all values contained in this map. - * The transformation function produces new values from existing keys - * associated values. - * - * @param f the transformation to apply - * @return the map itself. - */ - def transform(f: (K, V) => V): this.type = { - this.iterator foreach { - case (key, value) => update(key, f(key, value)) - } - this - } - - /** Retains only those mappings for which the predicate - * `p` returns `true`. - * - * @param p The test predicate - */ - def retain(p: (K, V) => Boolean): this.type = { - for ((k, v) <- this.toList) // scala/bug#7269 toList avoids ConcurrentModificationException - if (!p(k, v)) this -= k - - this - } - - override def clone(): This = empty ++= repr - - /** The result when this map is used as a builder - * @return the map representation itself. - */ - def result: This = repr - - /** Creates a new map with all the key/value mappings of this map except mappings with keys - * equal to any of the two or more specified keys. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new map containing all the mappings of this map except mappings - * with a key equal to `elem1`, `elem2` or any of `elems`. - */ - @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(elem1: K, elem2: K, elems: K*): This = - clone() -= elem1 -= elem2 --= elems - - /** Creates a new map with all the key/value mappings of this map except mappings with keys - * equal to any of those provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new map with all the key/value mappings of this map except mappings - * with a key equal to a key from `xs`. - */ - @migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0") - override def --(xs: GenTraversableOnce[K]): This = clone() --= xs.seq -} diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala deleted file mode 100644 index d88e07fa672d..000000000000 --- a/src/library/scala/collection/mutable/MapProxy.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** - * This trait implements a proxy for [[scala.collection.mutable.Map]]. - * - * It is most useful for assembling customized map abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger, Martin Odersky - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { - private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = - new MapProxy[A, B1] { val self = newSelf } - - override def repr = this - override def empty: MapProxy[A, B] = new MapProxy[A, B] { val self = MapProxy.this.self.empty } - override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value)) - - override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv) - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*)) - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq) - override def -(key: A) = newProxy(self - key) - - override def += (kv: (A, B)) = { self += kv ; this } - override def -= (key: A) = { self -= key ; this } -} diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala index c4408dad29f9..b06a99b15d51 100644 --- a/src/library/scala/collection/mutable/MultiMap.scala +++ b/src/library/scala/collection/mutable/MultiMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,71 +10,67 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable /** A trait for mutable maps with multiple values assigned to a key. - * - * This class is typically used as a mixin. It turns maps which map `A` - * to `Set[B]` objects into multimaps that map `A` to `B` objects. - * - * @example {{{ - * // first import all necessary types from package `collection.mutable` - * import collection.mutable.{ HashMap, MultiMap, Set } - * - * // to create a `MultiMap` the easiest way is to mixin it into a normal - * // `Map` instance - * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] - * - * // to add key-value pairs to a multimap it is important to use - * // the method `addBinding` because standard methods like `+` will - * // overwrite the complete key-value pair instead of adding the - * // value to the existing key - * mm.addBinding(1, "a") - * mm.addBinding(2, "b") - * mm.addBinding(1, "c") - * - * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` - * - * // to check if the multimap contains a value there is method - * // `entryExists`, which allows to traverse the including set - * mm.entryExists(1, _ == "a") == true - * mm.entryExists(1, _ == "b") == false - * mm.entryExists(2, _ == "b") == true - * - * // to remove a previous added value there is the method `removeBinding` - * mm.removeBinding(1, "a") - * mm.entryExists(1, _ == "a") == false - * }}} - * - * @define coll multimap - * @define Coll `MultiMap` - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - */ -trait MultiMap[A, B] extends Map[A, Set[B]] { + * + * This class is typically used as a mixin. It turns maps which map `K` + * to `Set[V]` objects into multimaps that map `K` to `V` objects. + * + * @example {{{ + * // first import all necessary types from package `collection.mutable` + * import collection.mutable.{ HashMap, MultiMap, Set } + * + * // to create a `MultiMap` the easiest way is to mixin it into a normal + * // `Map` instance + * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] + * + * // to add key-value pairs to a multimap it is important to use + * // the method `addBinding` because standard methods like `+` will + * // overwrite the complete key-value pair instead of adding the + * // value to the existing key + * mm.addBinding(1, "a") + * mm.addBinding(2, "b") + * mm.addBinding(1, "c") + * + * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` + * + * // to check if the multimap contains a value there is method + * // `entryExists`, which allows to traverse the including set + * mm.entryExists(1, _ == "a") == true + * mm.entryExists(1, _ == "b") == false + * mm.entryExists(2, _ == "b") == true + * + * // to remove a previous added value there is the method `removeBinding` + * mm.removeBinding(1, "a") + * mm.entryExists(1, _ == "a") == false + * }}} + * + * @define coll multimap + * @define Coll `MultiMap` + */ +@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") +trait MultiMap[K, V] extends Map[K, Set[V]] { /** Creates a new set. - * - * Classes that use this trait as a mixin can override this method - * to have the desired implementation of sets assigned to new keys. - * By default this is `HashSet`. - * - * @return An empty set of values of type `B`. - */ - protected def makeSet: Set[B] = new HashSet[B] + * + * Classes that use this trait as a mixin can override this method + * to have the desired implementation of sets assigned to new keys. + * By default this is `HashSet`. + * + * @return An empty set of values of type `V`. + */ + protected def makeSet: Set[V] = new HashSet[V] /** Assigns the specified `value` to a specified `key`. If the key - * already has a binding to equal to `value`, nothing is changed; - * otherwise a new binding is added for that `key`. - * - * @param key The key to which to bind the new value. - * @param value The value to bind to the key. - * @return A reference to this multimap. - */ - def addBinding(key: A, value: B): this.type = { + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. + * + * @param key The key to which to bind the new value. + * @param value The value to bind to the key. + * @return A reference to this multimap. + */ + def addBinding(key: K, value: V): this.type = { get(key) match { case None => val set = makeSet @@ -87,32 +83,32 @@ trait MultiMap[A, B] extends Map[A, Set[B]] { } /** Removes the binding of `value` to `key` if it exists, otherwise this - * operation doesn't have any effect. - * - * If this was the last value assigned to the specified key, the - * set assigned to that key will be removed as well. - * - * @param key The key of the binding. - * @param value The value to remove. - * @return A reference to this multimap. - */ - def removeBinding(key: A, value: B): this.type = { + * operation doesn't have any effect. + * + * If this was the last value assigned to the specified key, the + * set assigned to that key will be removed as well. + * + * @param key The key of the binding. + * @param value The value to remove. + * @return A reference to this multimap. + */ + def removeBinding(key: K, value: V): this.type = { get(key) match { case None => - case Some(set) => - set -= value - if (set.isEmpty) this -= key + case Some(set) => + set -= value + if (set.isEmpty) this -= key } this } /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. - * - * @param key The key for which the predicate is checked. - * @param p The predicate which a value assigned to the key must satisfy. - * @return A boolean if such a binding exists - */ - def entryExists(key: A, p: B => Boolean): Boolean = get(key) match { + * + * @param key The key for which the predicate is checked. + * @param p The predicate which a value assigned to the key must satisfy. + * @return A boolean if such a binding exists + */ + def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { case None => false case Some(set) => set exists p } diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala deleted file mode 100644 index 8749c808d060..000000000000 --- a/src/library/scala/collection/mutable/MutableList.scala +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ -import immutable.List - -/** - * This class is used internally to represent mutable lists. It is the - * basis for the implementation of the class `Queue`. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @define Coll `mutable.MutableList` - * @define coll mutable list - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-lists "Scala's Collection Library overview"]] - * section on `Mutable Lists` for more information. - */ -@SerialVersionUID(5938451523372603072L) -class MutableList[A] -extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOptimized[A, MutableList[A]] - with GenericTraversableTemplate[A, MutableList] - with Builder[A, MutableList[A]] - with Serializable -{ - override def companion: GenericCompanion[MutableList] = MutableList - - override protected[this] def newBuilder: Builder[A, MutableList[A]] = new MutableList[A] - - protected var first0: LinkedList[A] = new LinkedList[A] - protected var last0: LinkedList[A] = first0 - protected var len: Int = 0 - - def toQueue = new Queue(first0, last0, len) - - /** Is the list empty? - */ - override def isEmpty = len == 0 - - /** Returns the first element in this list - */ - override def head: A = if (nonEmpty) first0.head else throw new NoSuchElementException - - /** Returns the rest of this list - */ - override def tail: MutableList[A] = { - val tl = new MutableList[A] - tailImpl(tl) - tl - } - - protected final def tailImpl(tl: MutableList[A]) { - require(nonEmpty, "tail of empty list") - tl.first0 = first0.tail - tl.len = len - 1 - tl.last0 = if (tl.len == 0) tl.first0 else last0 - } - - /** Prepends a single element to this list. This operation takes constant - * time. - * @param elem the element to prepend. - * @return this $coll. - */ - def +=: (elem: A): this.type = { prependElem(elem); this } - - /** Returns the length of this list. - */ - override def length: Int = len - - /** Returns the `n`-th element of this list. - * @throws IndexOutOfBoundsException if index does not exist. - */ - override def apply(n: Int): A = first0.apply(n) - - /** Updates the `n`-th element of this list to a new value. - * @throws IndexOutOfBoundsException if index does not exist. - */ - def update(n: Int, x: A): Unit = first0.update(n, x) - - /** Returns the `n`-th element of this list or `None` - * if index does not exist. - */ - def get(n: Int): Option[A] = first0.get(n) - - protected def prependElem(elem: A) { - first0 = new LinkedList[A](elem, first0) - if (len == 0) last0 = first0 - len = len + 1 - } - - protected def appendElem(elem: A) { - if (len == 0) { - prependElem(elem) - } else { - last0.next = new LinkedList[A] - last0 = last0.next - last0.elem = elem - last0.next = new LinkedList[A] // for performance, use sentinel `object` instead? - len = len + 1 - } - } - - /** Returns an iterator over up to `length` elements of this list. - */ - override def iterator: Iterator[A] = if (isEmpty) Iterator.empty else - new AbstractIterator[A] { - var elems = first0 - var count = len - def hasNext = count > 0 && elems.nonEmpty - def next() = { - if (!hasNext) throw new NoSuchElementException - count = count - 1 - val e = elems.elem - elems = if (count == 0) null else elems.next - e - } - } - - override def last = { - if (isEmpty) throw new NoSuchElementException("MutableList.empty.last") - last0.elem - } - - /** Returns an instance of [[scala.List]] containing the same - * sequence of elements. - */ - override def toList: List[A] = first0.toList - - /** Returns the current list of elements as a linked List - * sequence of elements. - */ - private[mutable] def toLinkedList: LinkedList[A] = first0 - - /** Appends a single element to this buffer. This takes constant time. - * - * @param elem the element to append. - */ - def +=(elem: A): this.type = { appendElem(elem); this } - - def clear() { - first0 = new LinkedList[A] - last0 = first0 - len = 0 - } - - def result = this - - override def clone(): MutableList[A] = { - val bf = newBuilder - bf ++= seq - bf.result() - } -} - -object MutableList extends SeqFactory[MutableList] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, MutableList[A]] = new MutableList[A] -} diff --git a/src/library/scala/collection/mutable/MutationTracker.scala b/src/library/scala/collection/mutable/MutationTracker.scala new file mode 100644 index 000000000000..fe0314068a43 --- /dev/null +++ b/src/library/scala/collection/mutable/MutationTracker.scala @@ -0,0 +1,78 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.ConcurrentModificationException + +/** + * Utilities to check that mutations to a client that tracks + * its mutations have not occurred since a given point. + * [[Iterator `Iterator`]]s that perform this check automatically + * during iteration can be created by wrapping an `Iterator` + * in a [[MutationTracker.CheckedIterator `CheckedIterator`]], + * or by manually using the [[MutationTracker.checkMutations() `checkMutations`]] + * and [[MutationTracker.checkMutationsForIteration() `checkMutationsForIteration`]] + * methods. + */ +private object MutationTracker { + + /** + * Checks whether or not the actual mutation count differs from + * the expected one, throwing an exception, if it does. + * + * @param expectedCount the expected mutation count + * @param actualCount the actual mutation count + * @param message the exception message in case of mutations + * @throws ConcurrentModificationException if the expected and actual + * mutation counts differ + */ + @throws[ConcurrentModificationException] + def checkMutations(expectedCount: Int, actualCount: Int, message: String): Unit = { + if (actualCount != expectedCount) throw new ConcurrentModificationException(message) + } + + /** + * Checks whether or not the actual mutation count differs from + * the expected one, throwing an exception, if it does. This method + * produces an exception message saying that it was called because a + * backing collection was mutated during iteration. + * + * @param expectedCount the expected mutation count + * @param actualCount the actual mutation count + * @throws ConcurrentModificationException if the expected and actual + * mutation counts differ + */ + @throws[ConcurrentModificationException] + @inline def checkMutationsForIteration(expectedCount: Int, actualCount: Int): Unit = + checkMutations(expectedCount, actualCount, "mutation occurred during iteration") + + /** + * An iterator wrapper that checks if the underlying collection has + * been mutated. + * + * @param underlying the underlying iterator + * @param mutationCount a by-name provider of the current mutation count + * @tparam A the type of the iterator's elements + */ + final class CheckedIterator[A](underlying: Iterator[A], mutationCount: => Int) extends AbstractIterator[A] { + private[this] val expectedCount = mutationCount + + def hasNext: Boolean = { + checkMutationsForIteration(expectedCount, mutationCount) + underlying.hasNext + } + def next(): A = underlying.next() + } +} diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala deleted file mode 100644 index 8d9e27006407..000000000000 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Buffer` class into which this abstract - * class is mixed in. Class `ObservableBuffer` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable] { - type Pub <: ObservableBuffer[A] - - abstract override def +=(element: A): this.type = { - super.+=(element) - publish(new Include(End, element) with Undoable { - def undo() { trimEnd(1) } - }) - this - } - - abstract override def ++=(xs: TraversableOnce[A]): this.type = { - for (x <- xs) this += x - this - } - - abstract override def +=:(element: A): this.type = { - super.+=:(element) - publish(new Include(Start, element) with Undoable { - def undo() { trimStart(1) } - }) - this - } - - abstract override def update(n: Int, newelement: A): Unit = { - val oldelement = apply(n) - super.update(n, newelement) - publish(new Update(Index(n), newelement) with Undoable { - def undo() { update(n, oldelement) } - }) - } - - abstract override def remove(n: Int): A = { - val oldelement = apply(n) - super.remove(n) - publish(new Remove(Index(n), oldelement) with Undoable { - def undo() { insert(n, oldelement) } - }) - oldelement - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo() { throw new UnsupportedOperationException("cannot undo") } - }) - } - - abstract override def insertAll(n: Int, elems: scala.collection.Traversable[A]) { - super.insertAll(n, elems) - var curr = n - 1 - val msg = elems.foldLeft(new Script[A]() with Undoable { - def undo() { throw new UnsupportedOperationException("cannot undo") } - }) { - case (msg, elem) => - curr += 1 - msg += Include(Index(curr), elem) - } - publish(msg) - } - -} diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala deleted file mode 100644 index ef490f0a835e..000000000000 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Map` class into which this abstract - * class is mixed in. Class `ObservableMap` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable] { - - type Pub <: ObservableMap[A, B] - - abstract override def += (kv: (A, B)): this.type = { - val (key, value) = kv - - get(key) match { - case None => - super.+=(kv) - publish(new Include((key, value)) with Undoable { - def undo = -=(key) - }) - case Some(old) => - super.+=(kv) - publish(new Update((key, value)) with Undoable { - def undo = +=((key, old)) - }) - } - this - } - - abstract override def -= (key: A): this.type = { - get(key) match { - case None => - case Some(old) => - super.-=(key) - publish(new Remove((key, old)) with Undoable { - def undo = update(key, old) - }) - } - this - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo(): Unit = throw new UnsupportedOperationException("cannot undo") - }) - } -} diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala deleted file mode 100644 index 6852b1ee3e2c..000000000000 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - -/** This class is typically used as a mixin. It adds a subscription - * mechanism to the `Set` class into which this abstract - * class is mixed in. Class `ObservableSet` publishes - * events of the type `Message`. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") -trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] { - - type Pub <: ObservableSet[A] - - abstract override def +=(elem: A): this.type = { - if (!contains(elem)) { - super.+=(elem) - publish(new Include(elem) with Undoable { def undo = -=(elem) }) - } - this - } - - abstract override def -=(elem: A): this.type = { - if (contains(elem)) { - super.-=(elem) - publish(new Remove(elem) with Undoable { def undo = +=(elem) }) - } - this - } - - abstract override def clear(): Unit = { - super.clear() - publish(new Reset with Undoable { - def undo(): Unit = throw new UnsupportedOperationException("cannot undo") - }) - } -} diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index 4d81587499dd..5840a0abc954 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,23 +10,29 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable +import java.lang.Integer.numberOfLeadingZeros +import java.util.ConcurrentModificationException +import scala.collection.generic.DefaultSerializable + /** - * @define Coll `OpenHashMap` - * @define coll open hash map - * - * @since 2.7 - */ -object OpenHashMap { + * @define Coll `OpenHashMap` + * @define coll open hash map + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +@SerialVersionUID(3L) +object OpenHashMap extends MapFactory[OpenHashMap] { - def apply[K, V](elems : (K, V)*) = new OpenHashMap[K, V] ++= elems def empty[K, V] = new OpenHashMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it + + def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) /** A hash table entry. - * + * * The entry is occupied if and only if its `value` is a `Some`; * deleted if and only if its `value` is `None`. * If its `key` is not the default value of type `Key`, the entry is occupied. @@ -35,60 +41,63 @@ object OpenHashMap { final private class OpenEntry[Key, Value](var key: Key, var hash: Int, var value: Option[Value]) + + private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) } -/** A mutable hash map based on an open hashing scheme. The precise scheme is - * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unnecessarily penalised. In particular, - * mappings of consecutive integer keys should work without significant - * performance loss. - * - * @tparam Key type of the keys in this map. - * @tparam Value type of the values in this map. - * @param initialSize the initial size of the internal hash table. - * - * @author David MacIver - * @since 2.7 - * - * @define Coll `OpenHashMap` - * @define coll open hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ +/** A mutable hash map based on an open addressing method. The precise scheme is + * undefined, but it should make a reasonable effort to ensure that an insert + * with consecutive hash codes is not unnecessarily penalised. In particular, + * mappings of consecutive integer keys should work without significant + * performance loss. + * + * @tparam Key type of the keys in this map. + * @tparam Value type of the values in this map. + * @param initialSize the initial size of the internal hash table. + * + * @define Coll `OpenHashMap` + * @define coll open hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") class OpenHashMap[Key, Value](initialSize : Int) -extends AbstractMap[Key, Value] - with Map[Key, Value] - with MapLike[Key, Value, OpenHashMap[Key, Value]] { + extends AbstractMap[Key, Value] + with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] + with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] + with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] + with DefaultSerializable { import OpenHashMap.OpenEntry private type Entry = OpenEntry[Key, Value] /** A default constructor creates a hashmap with initial size `8`. - */ + */ def this() = this(8) - override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value] + override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap - private[this] val actualInitialSize = HashTable.nextPositivePowerOfTwo(initialSize) + private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) - private var mask = actualInitialSize - 1 + private[this] var mask = actualInitialSize - 1 /** The hash table. - * + * * The table's entries are initialized to `null`, indication of an empty slot. * A slot is either deleted or occupied if and only if the entry is non-`null`. */ private[this] var table = new Array[Entry](actualInitialSize) - private var _size = 0 - private var deleted = 0 + private[this] var _size = 0 + private[this] var deleted = 0 // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. private[this] var modCount = 0 override def size = _size - private[this] def size_=(s : Int) { _size = s } - + override def knownSize: Int = size + private[this] def size_=(s : Int): Unit = _size = s + override def isEmpty: Boolean = _size == 0 /** Returns a mangled hash code of the provided key. */ protected def hashOf(key: Key) = { var h = key.## @@ -113,14 +122,14 @@ extends AbstractMap[Key, Value] /** Return the index of the first slot in the hash table (in probe order) * that is, in order of preference, either occupied by the given key, deleted, or empty. - * + * * @param hash hash value for `key` */ private[this] def findIndex(key: Key, hash: Int): Int = { var index = hash & mask var j = 0 - /** Index of the first slot containing a deleted entry, or -1 if none found yet. */ + // Index of the first slot containing a deleted entry, or -1 if none found yet var firstDeletedIndex = -1 var entry = table(index) @@ -139,15 +148,14 @@ extends AbstractMap[Key, Value] if (firstDeletedIndex == -1) index else firstDeletedIndex } - override def update(key: Key, value: Value) { - put(key, value) - } + // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. + override def update(key: Key, value: Value): Unit = put(key, value) + + @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") + def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } - @deprecatedOverriding("+= should not be overridden in order to maintain consistency with put.", "2.11.0") - def += (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } - - @deprecatedOverriding("-= should not be overridden in order to maintain consistency with remove.", "2.11.0") - def -= (key: Key): this.type = { remove(key); this } + @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") + def subtractOne (key: Key): this.type = { remove(key); this } override def put(key: Key, value: Value): Option[Value] = put(key, hashOf(key), value) @@ -176,7 +184,7 @@ extends AbstractMap[Key, Value] } /** Delete the hash table slot contained in the given entry. */ - @inline + @`inline` private[this] def deleteSlot(entry: Entry) = { entry.key = null.asInstanceOf[Key] entry.hash = 0 @@ -202,7 +210,7 @@ extends AbstractMap[Key, Value] var j = 0 while(entry != null){ if (entry.hash == hash && - entry.key == key){ + entry.key == key){ return entry.value } @@ -214,27 +222,39 @@ extends AbstractMap[Key, Value] } /** An iterator over the elements of this map. Use of this iterator follows - * the same contract for concurrent modification as the foreach method. - * - * @return the iterator - */ - def iterator: Iterator[(Key, Value)] = new AbstractIterator[(Key, Value)] { - var index = 0 - val initialModCount = modCount - - private[this] def advance() { - if (initialModCount != modCount) sys.error("Concurrent modification") + * the same contract for concurrent modification as the foreach method. + * + * @return the iterator + */ + def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { + override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + } + + override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { + override protected def nextResult(node: Entry): Key = node.key + } + override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { + override protected def nextResult(node: Entry): Value = node.value.get + } + + private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { + private[this] var index = 0 + private[this] val initialModCount = modCount + + private[this] def advance(): Unit = { + if (initialModCount != modCount) throw new ConcurrentModificationException while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 } def hasNext = {advance(); index <= mask } - def next = { + def next() = { advance() val result = table(index) index += 1 - (result.key, result.value.get) + nextResult(result) } + protected def nextResult(node: Entry): A } override def clone() = { @@ -244,36 +264,43 @@ extends AbstractMap[Key, Value] } /** Loop over the key, value mappings of this map. - * - * The behaviour of modifying the map during an iteration is as follows: - * - Deleting a mapping is always permitted. - * - Changing the value of mapping which is already present is permitted. - * - Anything else is not permitted. It will usually, but not always, throw an exception. - * - * @tparam U The return type of the specified function `f`, return result of which is ignored. - * @param f The function to apply to each key, value mapping. - */ - override def foreach[U](f : ((Key, Value)) => U) { + * + * The behaviour of modifying the map during an iteration is as follows: + * - Deleting a mapping is always permitted. + * - Changing the value of mapping which is already present is permitted. + * - Anything else is not permitted. It will usually, but not always, throw an exception. + * + * @tparam U The return type of the specified function `f`, return result of which is ignored. + * @param f The function to apply to each key, value mapping. + */ + override def foreach[U](f : ((Key, Value)) => U): Unit = { val startModCount = modCount foreachUndeletedEntry(entry => { - if (modCount != startModCount) sys.error("Concurrent Modification") + if (modCount != startModCount) throw new ConcurrentModificationException f((entry.key, entry.value.get))} ) } + override def foreachEntry[U](f : (Key, Value) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f(entry.key, entry.value.get)} + ) + } - private[this] def foreachUndeletedEntry(f : Entry => Unit){ + private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { table.foreach(entry => if (entry != null && entry.value != None) f(entry)) } - override def transform(f : (Key, Value) => Value) = { + override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) this } - override def retain(f : (Key, Value) => Boolean) = { + override def filterInPlace(f : (Key, Value) => Boolean): this.type = { foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) this } - override def stringPrefix = "OpenHashMap" + override protected[this] def stringPrefix = "OpenHashMap" } diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index 3c70e0f371ad..147cffc22a95 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,44 +10,55 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering -/** This class implements priority queues using a heap. - * To prioritize elements of type A there must be an implicit - * Ordering[A] available at creation. - * - * If multiple elements have the same priority in the ordering of this - * PriorityQueue, no guarantees are made regarding the order in which elements +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements * are returned by `dequeue` or `dequeueAll`. In particular, that means this - * class does not guarantee first in first out behaviour that may be - * incorrectly inferred from the Queue part of the name of this class. + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". * * Only the `dequeue` and `dequeueAll` methods will return elements in priority * order (while removing elements from the heap). Standard collection methods - * including `drop`, `iterator`, and `toString` will remove or traverse the heap + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements * in whichever order seems most convenient. * - * Therefore, printing a `PriorityQueue` will not reveal the priority order of - * the elements, though the highest-priority element will be printed first. To - * print the elements in order, one must duplicate the `PriorityQueue` (by using - * `clone`, for instance) and then dequeue them: + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. * - * @example {{{ - * val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) - * println(pq) // elements probably not in order - * println(pq.clone.dequeueAll) // prints Vector(7, 5, 3, 2, 1) + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) * }}} * * @tparam A type of the elements in this priority queue. * @param ord implicit ordering used to compare the elements of type `A`. * - * @author Matthias Zenger - * @since 1 - * * @define Coll PriorityQueue * @define coll priority queue * @define orderDependent @@ -55,46 +66,73 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(736425014438295802L) sealed class PriorityQueue[A](implicit val ord: Ordering[A]) - extends AbstractIterable[A] - with Iterable[A] - with GenericOrderedTraversableTemplate[A, PriorityQueue] - with IterableLike[A, PriorityQueue[A]] - with Growable[A] - with Builder[A, PriorityQueue[A]] - with Serializable - with scala.Cloneable + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable { - import ord._ - @SerialVersionUID(3491656538574147683L) - private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] with Serializable { + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + def p_size0 = size0 def p_size0_=(s: Int) = size0 = s def p_array = array def p_ensureSize(n: Int) = super.ensureSize(n) - def p_swap(a: Int, b: Int) = super.swap(a, b) + def p_ensureAdditionalSize(n: Int) = super.ensureSize(size0 + n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } } - protected[this] override def newBuilder = PriorityQueue.newBuilder[A] - private val resarr = new ResizableArrayAccess[A] - resarr.p_size0 += 1 // we do not use array(0) + // we do not use array(0) + // storing the root of the heap at array(1) simplifies the calculations for + // parent and child indices: for a given index k, the parent of k is k / 2, + // the left child is k * 2, and the right child is k * 2 + 1 + resarr.p_size0 += 1 + /** Alias for [[size]]. */ def length: Int = resarr.length - 1 // adjust length accordingly override def size: Int = length + override def knownSize: Int = length override def isEmpty: Boolean = resarr.p_size0 < 2 - override def repr = this - def result = this + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + /** Replace the contents of this $coll with the mapped result. + * + * @param f the mapping function + * @return this $coll + */ + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } - override def orderedCompanion = PriorityQueue + def result() = this private def toA(x: AnyRef): A = x.asInstanceOf[A] protected def fixUp(as: Array[AnyRef], m: Int): Unit = { var k: Int = m - while (k > 1 && toA(as(k / 2)) < toA(as(k))) { + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { resarr.p_swap(k, k / 2) k = k / 2 } @@ -105,9 +143,10 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) var k: Int = m while (n >= 2 * k) { var j = 2 * k - if (j < n && toA(as(j)) < toA(as(j + 1))) + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) j += 1 - if (toA(as(k)) >= toA(as(j))) + if (ord.gteq(toA(as(k)), toA(as(j)))) return k != m else { val h = as(k) @@ -120,21 +159,21 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } /** Inserts a single element into the priority queue. - * - * @param elem the element to insert. - * @return this $coll. - */ - def +=(elem: A): this.type = { - resarr.p_ensureSize(resarr.p_size0 + 1) + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] fixUp(resarr.p_array, resarr.p_size0) resarr.p_size0 += 1 this } - override def ++=(xs: TraversableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]): this.type = { val from = resarr.p_size0 - for (x <- xs) unsafeAdd(x) + for (x <- xs.iterator) unsafeAdd(x) heapify(from) this } @@ -142,7 +181,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) private def unsafeAdd(elem: A): Unit = { // like += but skips fixUp, which breaks the ordering invariant // a series of unsafeAdds MUST be followed by heapify - resarr.p_ensureSize(resarr.p_size0 + 1) + resarr.p_ensureAdditionalSize(1) resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] resarr.p_size0 += 1 } @@ -192,25 +231,25 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } } - /** Adds all elements provided by a `TraversableOnce` object - * into the priority queue. - * - * @param xs a traversable object. - * @return a new priority queue containing elements of both `xs` and `this`. - */ - def ++(xs: GenTraversableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs.seq } + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ + * + * @param elems the elements to add. + */ def enqueue(elems: A*): Unit = { this ++= elems } /** Returns the element with the highest priority in the queue, * and removes this element from the queue. * - * @throws java.util.NoSuchElementException - * @return the element with the highest priority. + * @return the element with the highest priority. + * @throws NoSuchElementException if no element to remove from heap */ def dequeue(): A = if (resarr.p_size0 > 1) { @@ -223,56 +262,53 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } else throw new NoSuchElementException("no element to remove from heap") - def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = { - val b = bf.apply() + /** Dequeues all elements and returns them in a sequence, in priority order. */ + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) while (nonEmpty) { b += dequeue() } - b.result() + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] } /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - def clear(): Unit = { resarr.p_size0 = 1 } + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } /** Returns an iterator which yields all the elements. - * - * Note: The order of elements returned is undefined. - * If you want to traverse the elements in priority queue - * order, use `clone().dequeueAll.iterator`. - * - * @return an iterator over all the elements. - */ - override def iterator: Iterator[A] = new AbstractIterator[A] { - private var i = 1 - def hasNext: Boolean = i < resarr.p_size0 - def next(): A = { - val n = resarr.p_array(i) - i += 1 - toA(n) - } - } + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) /** Returns the reverse of this priority queue. The new priority queue has - * the same elements as the original, but the opposite ordering. - * - * For example, the element with the highest priority in `pq` has the lowest - * priority in `pq.reverse`, and vice versa. - * - * Ties are handled arbitrarily. Elements with equal priority may or - * may not be reversed with respect to each other. - * - * @return the reversed priority queue. - */ - def reverse = { + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { val revq = new PriorityQueue[A]()(ord.reverse) // copy the existing data into the new array backwards // this won't put it exactly into the correct order, @@ -290,14 +326,14 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) /** Returns an iterator which yields all the elements in the reverse order - * than that returned by the method `iterator`. - * - * Note: The order of elements returned is undefined. - * - * @return an iterator over all elements sorted in descending order. - */ + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private var i = resarr.p_size0 - 1 + private[this] var i = resarr.p_size0 - 1 def hasNext: Boolean = i >= 1 def next(): A = { val n = resarr.p_array(i) @@ -306,38 +342,30 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } } - /** The hashCode method always yields an error, since it is not - * safe to use mutable queues as keys in hash tables. - * - * @return never. - */ - override def hashCode(): Int = - throw new UnsupportedOperationException("unsuitable as hash key") - /** Returns a regular queue containing the same elements. - * - * Note: the order of elements is undefined. - */ + * + * Note: the order of elements is undefined. + */ def toQueue: Queue[A] = new Queue[A] ++= this.iterator /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ + * + * @return the string representation of this queue. + */ override def toString() = toList.mkString("PriorityQueue(", ", ", ")") /** Converts this $coll to a list. - * - * Note: the order of elements is undefined. - * - * @return a list containing all elements of this $coll. - */ - override def toList = this.iterator.toList + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ + * + * @return a priority queue with the same elements. + */ override def clone(): PriorityQueue[A] = { val pq = new PriorityQueue[A] val n = resarr.p_size0 @@ -346,188 +374,40 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) pq.resarr.p_size0 = n pq } -} - -object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] { - def newBuilder[A](implicit ord: Ordering[A]): Builder[A, PriorityQueue[A]] = { - new Builder[A, PriorityQueue[A]] { - val pq = new PriorityQueue[A] - def +=(elem: A): this.type = { pq.unsafeAdd(elem); this } - def result(): PriorityQueue[A] = { pq.heapify(1); pq } - def clear(): Unit = pq.clear() + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) } + copied } - implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue[A]] = new GenericCanBuildFrom[A] -} - - -/** This class servers as a proxy for priority queues. The - * elements of the queue have to be ordered in terms of the - * `Ordered[T]` class. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A] with Proxy { - def self: PriorityQueue[A] - - /** Creates a new iterator over all elements contained in this - * object. - * - * @return the new iterator - */ - override def iterator: Iterator[A] = self.iterator - - /** Returns the length of this priority queue. - */ - override def length: Int = self.length - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = self.isEmpty - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { self += elem; this } - - /** Adds all elements provided by an iterator into the priority queue. - * - * @param it an iterator - */ - override def ++=(it: TraversableOnce[A]): this.type = { - self ++= it - this - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = self ++= elems - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @return the element with the highest priority. - */ - override def dequeue(): A = self.dequeue() - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = self.head - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = self.clear() + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue - /** Returns a regular queue containing the same elements. - */ - override def toQueue: Queue[A] = self.toQueue + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) - /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ - override def clone(): PriorityQueue[A] = new PriorityQueueProxy[A] { - def self = PriorityQueueProxy.this.self.clone() - } + override protected[this] def className = "PriorityQueue" } -/** This class implements synchronized priority queues using a binary heap. - * The elements of the queue have to be ordered in terms of the `Ordered[T]` class. - * - * @tparam A type of the elements contained in this synchronized priority queue - * @param ord implicit ordering used to compared elements of type `A` - * - * @author Matthias Zenger - * @since 1 - * @define Coll `SynchronizedPriorityQueue` - * @define coll synchronized priority queue - */ -@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0") -sealed class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] { - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { - synchronized { - super.+=(elem) - } - this - } - - /** Adds all elements of a traversable object into the priority queue. - * - * @param xs a traversable object - */ - override def ++=(xs: TraversableOnce[A]): this.type = { - synchronized { - super.++=(xs) +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() } - this } - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) } - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @return the element with the highest priority. - */ - override def dequeue(): A = synchronized { super.dequeue() } + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = synchronized { super.head } - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Returns an iterator which yield all the elements of the priority - * queue in descending priority order. - * - * @return an iterator over all elements sorted in descending order. - */ - override def iterator: Iterator[A] = synchronized { super.iterator } - - /** Checks if two queues are structurally identical. - * - * @return true, iff both queues contain the same sequence of elements. - */ - override def equals(that: Any): Boolean = synchronized { super.equals(that) } - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString(): String = synchronized { super.toString() } + def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } } diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala deleted file mode 100644 index 93a4d7b3b943..000000000000 --- a/src/library/scala/collection/mutable/Publisher.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** `Publisher[A,This]` objects publish events of type `A` - * to all registered subscribers. When subscribing, a subscriber may specify - * a filter which can be used to constrain the number of events sent to the - * subscriber. Subscribers may suspend their subscription, or reactivate a - * suspended subscription. Class `Publisher` is typically used - * as a mixin. The abstract type `Pub` models the type of the publisher itself. - * - * @tparam Evt type of the published event. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - */ -trait Publisher[Evt] { - - type Pub <: Publisher[Evt] - type Sub = Subscriber[Evt, Pub] - type Filter = Evt => Boolean - - /** The publisher itself of type `Pub`. Implemented by a cast from `this` here. - * Needs to be overridden if the actual publisher is different from `this`. - */ - protected val self: Pub = this.asInstanceOf[Pub] - - private val filters = new HashMap[Sub, Set[Filter]] with MultiMap[Sub, Filter] - private val suspended = new HashSet[Sub] - - def subscribe(sub: Sub) { subscribe(sub, event => true) } - def subscribe(sub: Sub, filter: Filter) { filters.addBinding(sub, filter) } - def suspendSubscription(sub: Sub) { suspended += sub } - def activateSubscription(sub: Sub) { suspended -= sub } - def removeSubscription(sub: Sub) { filters -= sub } - def removeSubscriptions() { filters.clear() } - - protected def publish(event: Evt) { - filters.keys.foreach(sub => - if (!suspended.contains(sub) && - filters.entryExists(sub, p => p(event))) - sub.notify(self, event) - ) - } - - /** Checks if two publishers are structurally identical. - * - * @return true, iff both publishers contain the same sequence of elements. - */ - override def equals(obj: Any): Boolean = obj match { - case that: Publisher[_] => filters == that.filters && suspended == that.suspended - case _ => false - } -} diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index 40e742c2e922..cc3dad2e2495 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,190 +10,129 @@ * additional information regarding copyright ownership. */ -package scala -package collection +package scala.collection package mutable -import generic._ +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#queues "Scala's Collection Library overview"]] - * section on `Queues` for more information. - * - * @define Coll `mutable.Queue` - * @define coll mutable queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-5130636723247980089L) -class Queue[A] -extends MutableList[A] - with LinearSeqOptimized[A, Queue[A]] - with GenericTraversableTemplate[A, Queue] - with Cloneable[Queue[A]] - with Serializable -{ - override def companion: GenericCompanion[Queue] = Queue - - override protected[this] def newBuilder = companion.newBuilder[A] - - private[mutable] def this(fst: LinkedList[A], lst: LinkedList[A], lng: Int) { - this() - first0 = fst - last0 = lst - len = lng - } - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - def enqueue(elems: A*): Unit = this ++= elems - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @throws java.util.NoSuchElementException - * @return the first element of the queue. - */ - def dequeue(): A = - if (isEmpty) - throw new NoSuchElementException("queue empty") - else { - val res = first0.elem - first0 = first0.next - decrementLength() - res - } +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * @define Coll `mutable.Queue` + * @define coll mutable queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with ArrayDequeOps[A, Queue, Queue[A]] + with Cloneable[Queue[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Queue] = Queue + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Queue" + + /** + * Add elements to the end of this queue + * + * @param elem + * @return this + */ + def enqueue(elem: A): this.type = this += elem + + /** Enqueue two or more elements at the end of the queue. The last element + * of the sequence will be on end of the queue. + * + * @param elems the element sequence. + * @return this + */ + def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) + + /** Enqueues all elements in the given iterable object into the queue. The + * last element in the iterable object will be on front of the new queue. + * + * @param elems the iterable object. + * @return this + */ + def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems + + /** + * Removes the first element from this queue and returns it + * + * @return + * @throws NoSuchElementException when queue is empty + */ + def dequeue(): A = removeHead() /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ + * given predicate, and removes this element from the queue. + * + * @param p the predicate used for choosing the first element + * @return the first element of the queue for which p yields true + */ def dequeueFirst(p: A => Boolean): Option[A] = - if (isEmpty) - None - else if (p(first0.elem)) { - val res: Option[A] = Some(first0.elem) - first0 = first0.next - decrementLength() - res - } else { - val optElem = removeFromList(p) - if (optElem != None) decrementLength() - optElem - } - - private def removeFromList(p: A => Boolean): Option[A] = { - var leftlst = first0 - var res: Option[A] = None - while (leftlst.next.nonEmpty && !p(leftlst.next.elem)) { - leftlst = leftlst.next - } - if (leftlst.next.nonEmpty) { - res = Some(leftlst.next.elem) - if (leftlst.next eq last0) last0 = leftlst - leftlst.next = leftlst.next.next - } - res - } + removeFirst(p) /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def dequeueAll(p: A => Boolean): Seq[A] = { - if (first0.isEmpty) - Seq.empty - else { - val res = new ArrayBuffer[A] - while ((first0.nonEmpty) && p(first0.elem)) { - res += first0.elem - first0 = first0.next - decrementLength() - } - if (first0.isEmpty) res - else removeAllFromList(p, res) - } - } - - private def removeAllFromList(p: A => Boolean, res: ArrayBuffer[A]): ArrayBuffer[A] = { - var leftlst = first0 - while (leftlst.next.nonEmpty) { - if (p(leftlst.next.elem)) { - res += leftlst.next.elem - if (leftlst.next eq last0) last0 = leftlst - leftlst.next = leftlst.next.next - decrementLength() - } else leftlst = leftlst.next - } - res - } - - /** Return the proper suffix of this list which starts with the first element that satisfies `p`. - * That element is unlinked from the list. If no element satisfies `p`, return None. - */ - @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0") - def extractFirst(start: LinkedList[A], p: A => Boolean): Option[LinkedList[A]] = { - if (isEmpty) None - else { - var cell = start - while ((cell.next.nonEmpty) && !p(cell.next.elem)) { - cell = cell.next - } - if (cell.next.isEmpty) - None - else { - val res: Option[LinkedList[A]] = Some(cell.next) - cell.next = cell.next.next - decrementLength() - res - } - } - } + * given predicate, and removes those elements from the queue. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = + removeAll(p) + + /** + * Returns and dequeues all elements from the queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - def front: A = head - - - // TODO - Don't override this just for new to create appropriate type.... - override def tail: Queue[A] = { - val tl = new Queue[A] - tailImpl(tl) - tl - } - - override def clone(): Queue[A] = { - val bf = newBuilder - bf ++= seq + * is no element contained in the queue. + * + * @return the first element. + */ + @`inline` final def front: A = head + + override protected def klone(): Queue[A] = { + val bf = newSpecificBuilder + bf ++= this bf.result() } - private[this] def decrementLength() { - len -= 1 - if (len == 0) last0 = first0 - } + override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + new Queue(array, start = 0, end) + } +/** + * $factoryInfo + * @define coll queue + * @define Coll `Queue` + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + + def from[A](source: IterableOnce[A]): Queue[A] = empty ++= source + + def empty[A]: Queue[A] = new Queue -object Queue extends SeqFactory[Queue] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) - def newBuilder[A]: Builder[A, Queue[A]] = new MutableList[A] mapResult { _.toQueue } } diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala deleted file mode 100644 index 87765b0b0145..000000000000 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @tparam A type of the elements in this queue proxy. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait QueueProxy[A] extends Queue[A] with Proxy { - - def self: Queue[A] - - /** Access element number `n`. - * - * @return the element at index `n`. - */ - override def apply(n: Int): A = self.apply(n) - - /** Returns the length of this queue. - */ - override def length: Int = self.length - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = self.isEmpty - - /** Inserts a single element at the end of the queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = { self += elem; this } - - /** Adds all elements provided by an iterator at the end of the queue. The - * elements are prepended in the order they are given out by the iterator. - * - * @param it an iterator - */ - override def ++=(it: TraversableOnce[A]): this.type = { - self ++= it - this - } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*) { self ++= elems } - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @return the first element of the queue. - */ - override def dequeue(): A = self.dequeue() - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - override def front: A = self.front - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = self.clear() - - /** Returns an iterator over all elements on the queue. - * - * @return an iterator over all queue elements. - */ - override def iterator: Iterator[A] = self.iterator - - /** This method clones the queue. - * - * @return a queue with the same elements. - */ - override def clone(): Queue[A] = new QueueProxy[A] { - def self = QueueProxy.this.self.clone() - } -} diff --git a/src/library/scala/collection/mutable/RedBlackTree.scala b/src/library/scala/collection/mutable/RedBlackTree.scala index 4cf953c08a53..aca36f0271d8 100644 --- a/src/library/scala/collection/mutable/RedBlackTree.scala +++ b/src/library/scala/collection/mutable/RedBlackTree.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,18 +10,17 @@ * additional information regarding copyright ownership. */ -package scala.collection.mutable +package scala +package collection.mutable import scala.annotation.tailrec -import scala.collection.Iterator +import collection.{AbstractIterator, Iterator} +import java.lang.String /** * An object containing the red-black tree implementation used by mutable `TreeMaps`. * * The trees implemented in this object are *not* thread safe. - * - * @author Rui Gonçalves - * @since 2.12 */ private[collection] object RedBlackTree { @@ -32,13 +31,11 @@ private[collection] object RedBlackTree { // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) // on the size of the range. - @SerialVersionUID(21575944040195605L) - final class Tree[A, B](var root: Node[A, B], var size: Int) extends Serializable - - @SerialVersionUID(1950599696441054720L) - final class Node[A, B](var key: A, var value: B, var red: Boolean, - var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) extends Serializable { + final class Tree[A, B](var root: Node[A, B], var size: Int) { + def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) + } + final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" } @@ -48,11 +45,11 @@ private[collection] object RedBlackTree { object Node { - @inline def apply[A, B](key: A, value: B, red: Boolean, + @`inline` def apply[A, B](key: A, value: B, red: Boolean, left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = new Node(key, value, red, left, right, parent) - @inline def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = new Node(key, value, red, null, null, parent) def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) @@ -86,7 +83,7 @@ private[collection] object RedBlackTree { else node } - def contains[A: Ordering](tree: Tree[A, _], key: A) = getNode(tree.root, key) ne null + def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { case null => None @@ -444,15 +441,28 @@ private[collection] object RedBlackTree { if (node.right ne null) foreachNodeNonNull(node.right, f) } - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = foreachNodeKey(tree.root, f) - - private[this] def foreachNodeKey[A, U](node: Node[A, _], f: A => U): Unit = - if (node ne null) foreachNodeKeyNonNull(node, f) + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + def g(node: Node[A, _]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } - private[this] def foreachNodeKeyNonNull[A, U](node: Node[A, _], f: A => U): Unit = { - if (node.left ne null) foreachNodeKeyNonNull(node.left, f) - f(node.key) - if (node.right ne null) foreachNodeKeyNonNull(node.right, f) + def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + def g(node: Node[A, B]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key, node.value) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) } def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) @@ -476,12 +486,13 @@ private[collection] object RedBlackTree { new ValuesIterator(tree, start, end) private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) - (implicit ord: Ordering[A]) extends Iterator[R] { + (implicit ord: Ordering[A]) extends AbstractIterator[R] { - protected[this] def nextResult(node: Node[A, B]): R + protected def nextResult(node: Node[A, B]): R def hasNext: Boolean = nextNode ne null + @throws[NoSuchElementException] def next(): R = nextNode match { case null => throw new NoSuchElementException("next on empty iterator") case node => @@ -588,4 +599,54 @@ private[collection] object RedBlackTree { isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 } + + // building + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, Null] = size match { + case 0 => null + case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(x, null, red = false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(k, v, red = false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + def copyTree[A, B](n: Node[A, B]): Node[A, B] = + if(n eq null) null else { + val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) + if(c.left != null) c.left.parent = c + if(c.right != null) c.right.parent = c + c + } } diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala deleted file mode 100644 index 9d181531a0b9..000000000000 --- a/src/library/scala/collection/mutable/ResizableArray.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** This class is used internally to implement data structures that - * are based on resizable arrays. - * - * @tparam A type of the elements contained in this resizable array. - * - * @author Matthias Zenger, Burak Emir - * @author Martin Odersky - * @since 1 - */ -trait ResizableArray[A] extends IndexedSeq[A] - with GenericTraversableTemplate[A, ResizableArray] - with IndexedSeqOptimized[A, ResizableArray[A]] { - - override def companion: GenericCompanion[ResizableArray] = ResizableArray - - protected def initialSize: Int = 16 - protected var array: Array[AnyRef] = new Array[AnyRef](math.max(initialSize, 1)) - protected var size0: Int = 0 - - //########################################################################## - // implement/override methods of IndexedSeq[A] - - /** Returns the length of this resizable array. - */ - def length: Int = size0 - - def apply(idx: Int) = { - if (idx >= size0) throw new IndexOutOfBoundsException(idx.toString) - array(idx).asInstanceOf[A] - } - - def update(idx: Int, elem: A) { - if (idx >= size0) throw new IndexOutOfBoundsException(idx.toString) - array(idx) = elem.asInstanceOf[AnyRef] - } - - override def foreach[U](f: A => U) { - var i = 0 - // size is cached here because profiling reports a lot of time spent calling - // it on every iteration. I think it's likely a profiler ghost but it doesn't - // hurt to lift it into a local. - val top = size - while (i < top) { - f(array(i).asInstanceOf[A]) - i += 1 - } - } - - /** Fills the given array `xs` with at most `len` elements of this - * traversable starting at position `start`. - * - * Copying will stop once either the end of the current traversable is - * reached or `len` elements have been copied or the end of the array - * is reached. - * - * @param xs the array to fill. - * @param start starting index. - * @param len number of elements to copy - */ - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { - val len1 = len min (xs.length - start) min length - if (len1 > 0) Array.copy(array, 0, xs, start, len1) - } - - //########################################################################## - - /** Remove elements of this array at indices after `sz`. - */ - def reduceToSize(sz: Int) { - require(sz <= size0) - while (size0 > sz) { - size0 -= 1 - array(size0) = null - } - } - - /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int) { - // Use a Long to prevent overflows - val arrayLength: Long = array.length - if (n > arrayLength) { - var newSize: Long = arrayLength * 2 - while (n > newSize) - newSize = newSize * 2 - // Clamp newSize to Int.MaxValue - if (newSize > Int.MaxValue) newSize = Int.MaxValue - - val newArray: Array[AnyRef] = new Array(newSize.toInt) - java.lang.System.arraycopy(array, 0, newArray, 0, size0) - array = newArray - } - } - - /** Swap two elements of this array. - */ - protected def swap(a: Int, b: Int) { - val h = array(a) - array(a) = array(b) - array(b) = h - } - - /** Move parts of the array. - */ - protected def copy(m: Int, n: Int, len: Int) { - scala.compat.Platform.arraycopy(array, m, array, n, len) - } -} - -object ResizableArray extends SeqFactory[ResizableArray] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ResizableArray[A]] = - ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, ResizableArray[A]] = new ArrayBuffer[A] -} diff --git a/src/library/scala/collection/mutable/ReusableBuilder.scala b/src/library/scala/collection/mutable/ReusableBuilder.scala index 993e87c209cd..c8565d6953f1 100644 --- a/src/library/scala/collection/mutable/ReusableBuilder.scala +++ b/src/library/scala/collection/mutable/ReusableBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,39 +14,42 @@ package scala package collection package mutable + /** `ReusableBuilder` is a marker trait that indicates that a `Builder` - * can be reused to build more than one instance of a collection. In - * particular, calling `result` followed by `clear` will produce a - * collection and reset the builder to begin building a new collection - * of the same type. - * - * It is up to subclasses to implement this behavior, and to document any - * other behavior that varies from standard `ReusableBuilder` usage - * (e.g. operations being well-defined after a call to `result`, or allowing - * multiple calls to result to obtain different snapshots of a collection under - * construction). - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @since 2.12 - */ + * can be reused to build more than one instance of a collection. In + * particular, calling `result()` followed by `clear()` will produce a + * collection and reset the builder to begin building a new collection + * of the same type. + * + * In general no method other than `clear()` may be called after `result()`. + * It is up to subclasses to implement and to document other allowed sequences + * of operations (e.g. calling other methods after `result()` in order to obtain + * different snapshots of a collection under construction). + * + * @tparam Elem the type of elements that get added to the builder. + * @tparam To the type of collection that it produced. + * + * @define multipleResults + * + * This Builder can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { /** Clears the contents of this builder. - * After execution of this method, the builder will contain no elements. - * - * If executed immediately after a call to `result`, this allows a new - * instance of the same type of collection to be built. - */ + * After execution of this method, the builder will contain no elements. + * + * If executed immediately after a call to `result()`, this allows a new + * instance of the same type of collection to be built. + */ override def clear(): Unit // Note: overriding for Scaladoc only! /** Produces a collection from the added elements. - * - * After a call to `result`, the behavior of all other methods is undefined - * save for `clear`. If `clear` is called, then the builder is reset and - * may be used to build another instance. - * - * @return a collection containing the elements added to this builder. - */ + * + * After a call to `result`, the behavior of all other methods is undefined + * save for `clear()`. If `clear()` is called, then the builder is reset and + * may be used to build another instance. + * + * @return a collection containing the elements added to this builder. + */ override def result(): To // Note: overriding for Scaladoc only! } diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala deleted file mode 100644 index 41106e1d4d77..000000000000 --- a/src/library/scala/collection/mutable/RevertibleHistory.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** A revertible history is a `History` object which supports - * an undo operation. Type variable `Evt` refers to the type - * of the published events, `Pub` denotes the publisher type. - * Type `Pub` is typically a subtype of `Publisher`. - * - * @tparam Evt type of the events - * @tparam Pub type of the publisher - * - * @author Matthias Zenger - * @since 2.8 - */ -class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Undoable with Serializable { - - /** Rollback the full history. - */ - def undo(): Unit = { - val old = log.toList.reverse - clear() - old.foreach { case (sub, event) => event.undo() } - } -} diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala index f50dfb62bd27..afabb834a63f 100644 --- a/src/library/scala/collection/mutable/Seq.scala +++ b/src/library/scala/collection/mutable/Seq.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,40 +10,57 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import generic._ +import scala.collection.{IterableFactoryDefaults, SeqFactory} +trait Seq[A] + extends Iterable[A] + with collection.Seq[A] + with SeqOps[A, Seq, Seq[A]] + with IterableFactoryDefaults[A, Seq] { -/** A subtrait of `collection.Seq` which represents sequences - * that can be mutated. - * - * $seqInfo - * - * The class adds an `update` method to `collection.Seq`. - * - * @define Coll `mutable.Seq` - * @define coll mutable sequence - */ -trait Seq[A] extends Iterable[A] -// with GenSeq[A] - with scala.collection.Seq[A] - with GenericTraversableTemplate[A, Seq] - with SeqLike[A, Seq[A]] { - override def companion: GenericCompanion[Seq] = Seq - override def seq: Seq[A] = this + override def iterableFactory: SeqFactory[Seq] = Seq } -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable sequence - * @define Coll `mutable.Seq` - */ -object Seq extends SeqFactory[Seq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Seq[A]] = new ArrayBuffer +/** + * $factoryInfo + * @define coll mutable sequence + * @define Coll `mutable.Seq` + */ +@SerialVersionUID(3L) +object Seq extends SeqFactory.Delegate[Seq](ArrayBuffer) + +/** + * @define coll mutable sequence + * @define Coll `mutable.Seq` + */ +trait SeqOps[A, +CC[_], +C <: AnyRef] + extends collection.SeqOps[A, CC, C] + with Cloneable[C] { + + override def clone(): C = { + val b = newSpecificBuilder + b ++= this + b.result() + } + + /** Replaces element at given index with a new value. + * + * @param idx the index of the element to replace. + * @param elem the new value. + * @throws IndexOutOfBoundsException if the index is not valid. + */ + @throws[IndexOutOfBoundsException] + def update(idx: Int, elem: A): Unit + + @deprecated("Use `mapInPlace` on an `IndexedSeq` instead", "2.13.0") + @`inline`final def transform(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } } /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala deleted file mode 100644 index 9a161a15e042..000000000000 --- a/src/library/scala/collection/mutable/SeqLike.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import parallel.mutable.ParSeq - -/** A template trait for mutable sequences of type `mutable.Seq[A]`. - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - */ -trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]] - extends scala.collection.SeqLike[A, This] - with Cloneable[This] - with Parallelizable[A, ParSeq[A]] -{ - self => - - protected[this] override def parCombiner = ParSeq.newCombiner[A] - - /** Replaces element at given index with a new value. - * - * @param idx the index of the element to replace. - * @param elem the new value. - * @throws IndexOutOfBoundsException if the index is not valid. - */ - def update(idx: Int, elem: A) - - /** Applies a transformation function to all values contained in this sequence. - * The transformation function produces new values from existing elements. - * - * @param f the transformation to apply - * @return the sequence itself. - */ - def transform(f: A => A): this.type = { - var i = 0 - this foreach { el => - this(i) = f(el) - i += 1 - } - this - } -} diff --git a/src/library/scala/collection/mutable/SeqMap.scala b/src/library/scala/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..dda2c47c3447 --- /dev/null +++ b/src/library/scala/collection/mutable/SeqMap.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala index 6e56daadea8b..cede5411a349 100644 --- a/src/library/scala/collection/mutable/Set.scala +++ b/src/library/scala/collection/mutable/Set.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,41 +10,113 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import generic._ +import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} -/** A generic trait for mutable sets. - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @define Coll `mutable.Set` - * @define coll mutable set - */ -trait Set[A] extends Iterable[A] -// with GenSet[A] - with scala.collection.Set[A] - with GenericSetTemplate[A, Set] - with SetLike[A, Set[A]] { - override def companion: GenericCompanion[Set] = Set - override def seq: Set[A] = this +/** Base trait for mutable sets */ +trait Set[A] + extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + + override def iterableFactory: IterableFactory[Set] = Set } -/** $factoryInfo - * The current default implementation of a $Coll is a `HashSet`. - * @define coll mutable set - * @define Coll `mutable.Set` - */ -object Set extends MutableSetFactory[Set] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Set[A]]] - private[this] val ReusableCBF = setCanBuildFrom[Any] - override def empty[A]: Set[A] = HashSet.empty[A] +/** + * @define coll mutable set + * @define Coll `mutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] + with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below + with Cloneable[C] + with Builder[A, C] + with Growable[A] + with Shrinkable[A] { + + def result(): C = coll + + /** Check whether the set contains the given element, and add it if not. + * + * @param elem the element to be added + * @return true if the element was added + */ + def add(elem: A): Boolean = + !contains(elem) && { + coll += elem; true + } + + /** Updates the presence of a single element in this set. + * + * This method allows one to add or remove an element `elem` + * from this set depending on the value of parameter `included`. + * Typically, one would use the following syntax: + * {{{ + * set(elem) = true // adds element + * set(elem) = false // removes element + * }}} + * + * @param elem the element to be added or removed + * @param included a flag indicating whether element should be included or excluded. + */ + def update(elem: A, included: Boolean): Unit = { + if (included) add(elem) + else remove(elem) + } + + /** Removes an element from this set. + * + * @param elem the element to be removed + * @return true if this set contained the element before it was removed + */ + def remove(elem: A): Boolean = { + val res = contains(elem) + coll -= elem + res + } + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) + + /** Removes all elements from the set for which do not satisfy a predicate. + * @param p the predicate used to test elements. Only elements for + * which `p` returns `true` are retained in the set; all others + * are removed. + */ + def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val elem = array(i).asInstanceOf[A] + if (!p(elem)) { + this -= elem + } + i += 1 + } + } + this + } + + override def clone(): C = empty ++= this + + override def knownSize: Int = super[IterableOps].knownSize } +/** + * $factoryInfo + * @define coll mutable set + * @define Coll `mutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](HashSet) + + /** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala deleted file mode 100644 index 51ac55871bda..000000000000 --- a/src/library/scala/collection/mutable/SetBuilder.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** The canonical builder for mutable Sets. - * - * @tparam A The type of the elements that will be contained in this set. - * @tparam Coll The type of the actual collection this set builds. - * @param empty The empty element of the collection. - * @since 2.8 - */ -class SetBuilder[A, Coll <: scala.collection.Set[A] -with scala.collection.SetLike[A, Coll]](empty: Coll) -extends ReusableBuilder[A, Coll] { - protected var elems: Coll = empty - def +=(x: A): this.type = { elems = elems + x; this } - def clear() { elems = empty } - def result: Coll = elems -} diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala deleted file mode 100644 index 7a0b2c16ac6f..000000000000 --- a/src/library/scala/collection/mutable/SetLike.scala +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ -import script._ -import scala.annotation.migration -import parallel.mutable.ParSet - -/** A template trait for mutable sets of type `mutable.Set[A]`. - * - * This trait provides most of the operations of a `mutable.Set` independently of its representation. - * It is typically inherited by concrete implementations of sets. - * - * $setNote - * - * @tparam A the type of the elements of the set - * @tparam This the type of the set itself. - * - * @author Martin Odersky - * @since 2.8 - * - * @define setNote - * - * To implement a concrete mutable set, you need to provide implementations - * of the following methods: - * {{{ - * def contains(elem: A): Boolean - * def iterator: Iterator[A] - * def += (elem: A): this.type - * def -= (elem: A): this.type - * }}} - * If you wish that methods like `take`, - * `drop`, `filter` return the same kind of set, - * you should also override: - * {{{ - * def empty: This - * }}} - * It is also good idea to override methods `foreach` and - * `size` for efficiency. - * @define addDuplicates - * Note that duplicates (elements for which `equals` yields true) will be - * removed, but it is not specified whether it will be an element of this - * set or a newly added element. - * @define coll mutable set - * @define Coll mutable.Set - */ -trait SetLike[A, +This <: SetLike[A, This] with Set[A]] - extends scala.collection.SetLike[A, This] - with Scriptable[A] - with Builder[A, This] - with Growable[A] - with Shrinkable[A] - with Cloneable[mutable.Set[A]] - with Parallelizable[A, ParSet[A]] -{ self => - - /** A common implementation of `newBuilder` for all mutable sets - * in terms of `empty`. Overrides the implementation in `collection.SetLike` - * for better efficiency. - */ - override protected[this] def newBuilder: Builder[A, This] = empty - - protected[this] override def parCombiner = ParSet.newCombiner[A] - - /** Converts this $coll to a sequence. - * - * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. - */ - override def toSeq: collection.Seq[A] = { - // ArrayBuffer for efficiency, preallocated to the right size. - val result = new ArrayBuffer[A](size) - foreach(result += _) - result - } - - /** Adds an element to this $coll. - * - * @param elem the element to be added - * @return `true` if the element was not yet present in the set, `false` otherwise. - */ - def add(elem: A): Boolean = { - val r = contains(elem) - this += elem - !r - } - - /** Removes an element from this set. - * - * @param elem The element to be removed. - * @return `true` if the element was previously present in the set, `false` otherwise. - */ - def remove(elem: A): Boolean = { - val r = contains(elem) - this -= elem - r - } - - /** Updates the presence of a single element in this set. - * - * This method allows one to add or remove an element `elem` - * from this set depending on the value of parameter `included`. - * Typically, one would use the following syntax: - * {{{ - * set(elem) = true // adds element - * set(elem) = false // removes element - * }}} - * - * @param elem the element to be added or removed - * @param included a flag indicating whether element should be included or excluded. - */ - def update(elem: A, included: Boolean) { - if (included) this += elem else this -= elem - } - - // abstract methods from Growable/Shrinkable - - /** Adds a single element to the set. */ - def +=(elem: A): this.type - def -=(elem: A): this.type - - /** Removes all elements from the set for which do not satisfy a predicate. - * @param p the predicate used to test elements. Only elements for - * which `p` returns `true` are retained in the set; all others - * are removed. - */ - def retain(p: A => Boolean): Unit = - for (elem <- this.toList) // scala/bug#7269 toList avoids ConcurrentModificationException - if (!p(elem)) this -= elem - - /** Removes all elements from the set. After this operation is completed, - * the set will be empty. - */ - def clear(): Unit = - for (elem <- this.toList) - this -= elem - - override def clone(): This = empty ++= repr.seq - - /** The result when this set is used as a builder - * @return the set representation itself. - */ - def result: This = repr - - /** Creates a new set consisting of all the elements of this set and `elem`. - * - * $addDuplicates - * - * @param elem the element to add. - * @return a new set consisting of elements of this set and `elem`. - */ - @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0") - override def + (elem: A): This = clone() += elem - - /** Creates a new set consisting of all the elements of this set and two or more - * specified elements. - * - * $addDuplicates - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - * @return a new set consisting of all the elements of this set, `elem1`, - * `elem2` and those in `elems`. - */ - @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0") - override def + (elem1: A, elem2: A, elems: A*): This = - clone() += elem1 += elem2 ++= elems - - /** Creates a new set consisting of all the elements of this set and those - * provided by the specified traversable object. - * - * $addDuplicates - * - * @param xs the traversable object. - * @return a new set consisting of elements of this set and those in `xs`. - */ - @migration("`++` creates a new set. Use `++=` to add elements to this set and return that set itself.", "2.8.0") - override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq - - /** Creates a new set consisting of all the elements of this set except `elem`. - * - * @param elem the element to remove. - * @return a new set consisting of all the elements of this set except `elem`. - */ - @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0") - override def -(elem: A): This = clone() -= elem - - /** Creates a new set consisting of all the elements of this set except the two - * or more specified elements. - * - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new set consisting of all the elements of this set except - * `elem1`, `elem2` and `elems`. - */ - @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0") - override def -(elem1: A, elem2: A, elems: A*): This = - clone() -= elem1 -= elem2 --= elems - - /** Creates a new set consisting of all the elements of this set except those - * provided by the specified traversable object. - * - * @param xs the traversable object. - * @return a new set consisting of all the elements of this set except - * elements from `xs`. - */ - @migration("`--` creates a new set. Use `--=` to remove elements from this set and return that set itself.", "2.8.0") - override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq - - /** Send a message to this scriptable object. - * - * @param cmd the message to send. - * @throws UnsupportedOperationException - * if the message was not understood. - */ - @deprecated("scripting is deprecated", "2.11.0") - def <<(cmd: Message[A]): Unit = cmd match { - case Include(_, x) => this += x - case Remove(_, x) => this -= x - case Reset() => clear() - case s: Script[_] => s.iterator foreach << - case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") - } -} diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala deleted file mode 100644 index d7cecb1976cb..000000000000 --- a/src/library/scala/collection/mutable/SetProxy.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** This is a simple wrapper class for [[scala.collection.mutable.Set]]. - * It is most useful for assembling customized set abstractions - * dynamically using object composition and forwarding. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { - override def repr = this - override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty } - override def + (elem: A) = { self += elem ; this } - override def - (elem: A) = { self -= elem ; this } - - def +=(elem: A) = { self += elem; this } - def -=(elem: A) = { self -= elem; this } -} diff --git a/src/library/scala/collection/mutable/Shrinkable.scala b/src/library/scala/collection/mutable/Shrinkable.scala new file mode 100644 index 000000000000..acf1b4bf42ac --- /dev/null +++ b/src/library/scala/collection/mutable/Shrinkable.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec + +/** This trait forms part of collections that can be reduced + * using a `-=` operator. + * + * @define coll shrinkable collection + * @define Coll `Shrinkable` + */ +trait Shrinkable[-A] { + + /** Removes a single element from this $coll. + * + * @param elem the element to remove. + * @return the $coll itself + */ + def subtractOne(elem: A): this.type + + /** Alias for `subtractOne` */ + @`inline` final def -= (elem: A): this.type = subtractOne(elem) + + /** Removes two or more elements from this $coll. + * + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return the $coll itself + */ + @deprecated("Use `--=` aka `subtractAll` instead of varargs `-=`; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def -= (elem1: A, elem2: A, elems: A*): this.type = { + this -= elem1 + this -= elem2 + this --= elems + } + + /** Removes all elements produced by an iterator from this $coll. + * + * @param xs the iterator producing the elements to remove. + * @return the $coll itself + */ + def subtractAll(xs: collection.IterableOnce[A]): this.type = { + @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { + if (xs.nonEmpty) { + subtractOne(xs.head) + loop(xs.tail) + } + } + if (xs.asInstanceOf[AnyRef] eq this) { // avoid mutating under our own iterator + xs match { + case xs: Clearable => xs.clear() + case xs => subtractAll(Buffer.from(xs)) + } + } else { + xs match { + case xs: collection.LinearSeq[A] => loop(xs) + case xs => xs.iterator.foreach(subtractOne) + } + } + this + } + + /** Alias for `subtractAll` */ + @`inline` final def --= (xs: collection.IterableOnce[A]): this.type = subtractAll(xs) + +} diff --git a/src/library/scala/collection/mutable/SortedMap.scala b/src/library/scala/collection/mutable/SortedMap.scala index b214b0efd4ba..1884840f91e2 100644 --- a/src/library/scala/collection/mutable/SortedMap.scala +++ b/src/library/scala/collection/mutable/SortedMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,58 +11,93 @@ */ package scala -package collection -package mutable +package collection.mutable -import generic._ +import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} /** - * A mutable map whose keys are sorted. - * - * @tparam A the type of the keys contained in this sorted map. - * @tparam B the type of the values associated with the keys. - * - * @author Rui Gonçalves - * @since 2.12 - * - * @define Coll mutable.SortedMap - * @define coll mutable sorted map - */ -trait SortedMap[A, B] - extends Map[A, B] - with collection.SortedMap[A, B] - with MapLike[A, B, SortedMap[A, B]] - with SortedMapLike[A, B, SortedMap[A, B]] { + * Base type for mutable sorted map collections + */ +trait SortedMap[K, V] + extends collection.SortedMap[K, V] + with Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this - override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B] + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - override def empty: SortedMap[A, B] = SortedMap.empty + /** The same sorted map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault(d: K => V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) - override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) +} - override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = clone().asInstanceOf[SortedMap[A, B1]] += kv +trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends collection.SortedMapOps[K, V, CC, C] + with MapOps[K, V, Map, C] { - override def +[B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1)*): SortedMap[A, B1] = - clone().asInstanceOf[SortedMap[A, B1]] += elem1 += elem2 ++= elems + def unsorted: Map[K, V] - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = - clone().asInstanceOf[SortedMap[A, B1]] ++= xs.seq + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) } -/** - * $factoryInfo - * - * @define Coll mutable.SortedMap - * @define coll mutable sorted map - */ -object SortedMap extends MutableSortedMapFactory[SortedMap] { +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B] + @SerialVersionUID(3L) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] + with Serializable { - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = - new SortedMapCanBuildFrom[A, B] -} + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory -/** Explicit instantiation of the `SortedMap` trait to reduce class file size in subclasses. */ -abstract class AbstractSortedMap[A, B] extends scala.collection.mutable.AbstractMap[A, B] with SortedMap[A, B] + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + implicit def ordering: Ordering[K] = underlying.ordering + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala index 75486e11f7fb..7faf70b87cdc 100644 --- a/src/library/scala/collection/mutable/SortedSet.scala +++ b/src/library/scala/collection/mutable/SortedSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,46 +14,32 @@ package scala package collection package mutable -import generic._ - -/** - * Base trait for mutable sorted set. - * - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * - * @author Lucien Pereira - * +/** Base type for mutable sorted set collections */ -trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]] - with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] { +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { - /** Needs to be overridden in subclasses. */ - override def empty: SortedSet[A] = SortedSet.empty[A] + override def unsorted: Set[A] = this + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet } /** - * A template for mutable sorted set companion objects. - * - * @define Coll `mutable.SortedSet` - * @define coll mutable sorted set - * @define factoryInfo - * This object provides a set of operations needed to create sorted sets of type mutable.SortedSet. - * @define sortedSetCanBuildFromInfo - * Standard `CanBuildFrom` instance for sorted sets. - * - * @author Lucien Pereira - * - */ -object SortedSet extends MutableSortedSetFactory[SortedSet] { - def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A] - - def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A] - - // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific - override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom + * @define coll mutable sorted set + * @define Coll `mutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] } -/** Explicit instantiation of the `SortedSet` trait to reduce class file size in subclasses. */ -abstract class AbstractSortedSet[A] extends scala.collection.mutable.AbstractSet[A] with SortedSet[A] +/** + * $factoryInfo + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 874b6960a483..01aacc22c65e 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,44 +10,21 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import generic._ -import scala.collection.immutable.{List, Nil} -import scala.collection.Iterator -import scala.annotation.migration - -/** Factory object for the `mutable.Stack` class. - * - * $factoryInfo - * @define coll mutable stack - * @define Coll `mutable.Stack` - */ -object Stack extends SeqFactory[Stack] { - class StackBuilder[A] extends Builder[A, Stack[A]] { - val lbuff = new ListBuffer[A] - def +=(elem: A) = { lbuff += elem; this } - def clear() = lbuff.clear() - def result = new Stack(lbuff.result) - } - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Stack[A]] = new StackBuilder[A] - val empty: Stack[Nothing] = new Stack(Nil) -} +import scala.annotation.{migration, nowarn} +import scala.collection.generic.DefaultSerializable +import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} /** A stack implements a data structure which allows to store and retrieve * objects in a last-in-first-out (LIFO) fashion. * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * * @tparam A type of the elements contained in this stack. * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks "Scala's Collection Library overview"]] - * section on `Stacks` for more information. * @define Coll `Stack` * @define coll stack * @define orderDependent @@ -55,125 +32,111 @@ object Stack extends SeqFactory[Stack] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use a List assigned to a var instead.", "2.12.0") -class Stack[A] private (var elems: List[A]) -extends AbstractSeq[A] - with Seq[A] - with SeqLike[A, Stack[A]] - with GenericTraversableTemplate[A, Stack] - with Cloneable[Stack[A]] - with Serializable -{ - def this() = this(Nil) - - override def companion = Stack - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = elems.isEmpty +@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") +class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Stack, Stack[A]] + with StrictOptimizedSeqOps[A, Stack, Stack[A]] + with IterableFactoryDefaults[A, Stack] + with ArrayDequeOps[A, Stack, Stack[A]] + with Cloneable[Stack[A]] + with DefaultSerializable { - /** The number of elements in the stack */ - override def length = elems.length + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - /** Retrieve `n`-th element from stack, where top of stack has index `0`. - * - * This is a linear time operation. - * - * @param index the index of the element to return - * @return the element at the specified index - * @throws IndexOutOfBoundsException if the index is out of bounds - */ - override def apply(index: Int) = elems(index) + override def iterableFactory: SeqFactory[Stack] = Stack - /** Replace element at index `n` with the new element `newelem`. - * - * This is a linear time operation. - * - * @param n the index of the element to replace. - * @param newelem the new element. - * @throws IndexOutOfBoundsException if the index is not valid - */ - def update(n: Int, newelem: A) = - if(n < 0 || n >= length) throw new IndexOutOfBoundsException(n.toString) - else elems = elems.take(n) ++ (newelem :: elems.drop(n+1)) + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Stack" - /** Push an element on the stack. - * - * @param elem the element to push on the stack. - * @return the stack with the new element on top. - */ - def push(elem: A): this.type = { elems = elem :: elems; this } + /** + * Add elements to the top of this stack + * + * @param elem + * @return + */ + def push(elem: A): this.type = prepend(elem) /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push(elem1: A, elem2: A, elems: A*): this.type = - this.push(elem1).push(elem2).pushAll(elems) + * of the sequence will be on top of the new stack. + * + * @param elems the element sequence. + * @return the stack with the new elements on top. + */ + def push(elem1: A, elem2: A, elems: A*): this.type = { + val k = elems.knownSize + ensureSize(length + (if(k >= 0) k + 2 else 3)) + prepend(elem1).prepend(elem2).pushAll(elems) + } - /** Push all elements in the given traversable object onto the stack. The - * last element in the traversable object will be on top of the new stack. - * - * @param xs the traversable object. - * @return the stack with the new elements on top. - */ - def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this } + /** Push all elements in the given iterable object onto the stack. The + * last element in the iterable object will be on top of the new stack. + * + * @param elems the iterable object. + * @return the stack with the new elements on top. + */ + def pushAll(elems: scala.collection.IterableOnce[A]): this.type = + prependAll(elems match { + case it: scala.collection.Seq[A] => it.view.reverse + case it => IndexedSeq.from(it).view.reverse + }) + + /** + * Removes the top element from this stack and return it + * + * @return + * @throws NoSuchElementException when stack is empty + */ + def pop(): A = removeHead() + + /** + * Pop all elements from this stack and return it + * + * @return The removed elements + */ + def popAll(): scala.collection.Seq[A] = removeAll() + + /** + * Returns and removes all elements from the top of this stack which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) /** Returns the top element of the stack. This method will not remove * the element from the stack. An error is signaled if there is no * element on the stack. * - * @throws java.util.NoSuchElementException + * @throws NoSuchElementException if the stack is empty * @return the top element */ - def top: A = - elems.head + @`inline` final def top: A = head - /** Removes the top element from the stack. - * - * @throws java.util.NoSuchElementException - * @return the top element - */ - def pop(): A = { - val res = elems.head - elems = elems.tail - res + override protected def klone(): Stack[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() } - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - def clear(): Unit = elems = Nil + override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + new Stack(array, start = 0, end) - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the reversed order they were inserted into the - * stack (LIFO order). - * - * @return an iterator over all stack elements. - */ - @migration("`iterator` traverses in FIFO order.", "2.8.0") - override def iterator: Iterator[A] = elems.iterator +} - /** Creates a list of all stack elements in LIFO order. - * - * @return the created list. - */ - @migration("`toList` traverses in FIFO order.", "2.8.0") - override def toList: List[A] = elems +/** + * $factoryInfo + * @define coll stack + * @define Coll `Stack` + */ +@SerialVersionUID(3L) +object Stack extends StrictOptimizedSeqFactory[Stack] { - @migration("`foreach` traverses in FIFO order.", "2.8.0") - override def foreach[U](f: A => U): Unit = super.foreach(f) + def from[A](source: IterableOnce[A]): Stack[A] = empty ++= source + + def empty[A]: Stack[A] = new Stack + + def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) - /** This method clones the stack. - * - * @return a stack with the same elements. - */ - override def clone(): Stack[A] = new Stack[A](elems) } diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala deleted file mode 100644 index 81f578eb575b..000000000000 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements in this stack proxy. - * - * @author Matthias Zenger - * @since 1 - */ -@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") -trait StackProxy[A] extends Stack[A] with Proxy { - - def self: Stack[A] - - /** Access element number `n`. - * - * @return the element at index `n`. - */ - override def apply(n: Int): A = self.apply(n) - - /** Returns the length of this stack. - */ - override def length: Int = self.length - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = self.isEmpty - - /** Pushes a single element on top of the stack. - * - * @param elem the element to push onto the stack - */ - def +=(elem: A): this.type = { - self push elem - this - } - - override def pushAll(xs: TraversableOnce[A]): this.type = { self pushAll xs; this } - - override def push(elem1: A, elem2: A, elems: A*): this.type = { - self.push(elem1).push(elem2).pushAll(elems) - this - } - - override def push(elem: A): this.type = { - self.push(elem) - this - } - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @return the top element - */ - override def top: A = self.top - - /** Removes the top element from the stack. - */ - override def pop(): A = self.pop() - - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - override def clear(): Unit = self.clear() - - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the order they were inserted into the stack - * (FIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = self.iterator - - /** Creates a list of all stack elements in FIFO order. - * - * @return the created list. - */ - override def toList: List[A] = self.toList - - /** This method clones the stack. - * - * @return a stack with the same elements. - */ - override def clone(): Stack[A] = new StackProxy[A] { - def self = StackProxy.this.self.clone() - } -} diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala index 9e655076c8f3..ad9755389c48 100644 --- a/src/library/scala/collection/mutable/StringBuilder.scala +++ b/src/library/scala/collection/mutable/StringBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,368 +10,445 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection.mutable -import java.lang.{ StringBuilder => JavaStringBuilder } -import scala.annotation.migration -import immutable.StringLike +import scala.collection.{IterableFactoryDefaults, IterableOnce} +import scala.collection.immutable.WrappedString -/** A builder for mutable sequence of characters. This class provides an API - * mostly compatible with `java.lang.StringBuilder`, except where there are - * conflicts with the Scala collections API (such as the `reverse` method.) +import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + charArrayOps => _, + genericWrapArray => _, + wrapCharArray => _, + wrapString => _, + //_ +} + +/** A builder of `String` which is also a mutable sequence of characters. + * + * This class provides an API mostly compatible with `java.lang.StringBuilder`, + * except where there are conflicts with the Scala collections API, such as the `reverse` method: + * [[reverse]] produces a new `StringBuilder`, and [[reverseInPlace]] mutates this builder. + * + * Mutating operations return either `this.type`, i.e., the current builder, or `Unit`. + * + * Other methods extract data or information from the builder without mutating it. + * + * The distinction is also reflected in naming conventions used by collections, + * such as `append`, which mutates, and `appended`, which does not, or `reverse`, + * which does not mutate, and `reverseInPlace`, which does. + * + * The `String` result may be obtained using either `result()` or `toString`. + * + * $multipleResults + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#stringbuilders "Scala's Collection Library overview"]] + * section on `StringBuilders` for more information. * - * @author Stephane Micheloud - * @author Martin Odersky - * @since 2.7 * @define Coll `mutable.IndexedSeq` * @define coll string builder - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stringbuilders "Scala's Collection Library overview"]] - * section on `StringBuilders` for more information. */ -@SerialVersionUID(0 - 8525408645367278351L) -final class StringBuilder(private val underlying: JavaStringBuilder) - extends AbstractSeq[Char] - with java.lang.CharSequence - with IndexedSeq[Char] - with StringLike[StringBuilder] - with ReusableBuilder[Char, String] - with Serializable { - - override protected[this] def thisCollection: StringBuilder = this - override protected[this] def toCollection(repr: StringBuilder): StringBuilder = repr +@SerialVersionUID(3L) +final class StringBuilder(val underlying: java.lang.StringBuilder) extends AbstractSeq[Char] + with ReusableBuilder[Char, String] + with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, StringBuilder] + with IterableFactoryDefaults[Char, IndexedSeq] + with java.lang.CharSequence + with Serializable { - /** Creates a string builder buffer as builder for this class */ - override protected[this] def newBuilder = new GrowingBuilder(new StringBuilder) - - /** Constructs a string builder initialized with string value `initValue` - * and with additional character capacity `initCapacity`. - */ - def this(initCapacity: Int, initValue: String) = - this(new JavaStringBuilder(initValue.length + initCapacity) append initValue) + def this() = this(new java.lang.StringBuilder) /** Constructs a string builder with no characters in it and an - * initial capacity of 16 characters. - */ - def this() = this(16, "") - - /** Constructs a string builder with no characters in it and an - * initial capacity specified by the `capacity` argument. - * - * @param capacity the initial capacity. - * @throws NegativeArraySizeException if capacity < 0. - */ - def this(capacity: Int) = this(capacity, "") + * initial capacity specified by the `capacity` argument. + * + * @param capacity the initial capacity. + * @throws java.lang.NegativeArraySizeException if capacity < 0. + */ + def this(capacity: Int) = this(new java.lang.StringBuilder(capacity)) /** Constructs a string builder with initial characters - * equal to characters of `str`. - */ - def this(str: String) = this(16, str) + * equal to characters of `str`. + */ + def this(str: String) = this(new java.lang.StringBuilder(str)) - def toArray: Array[Char] = { - val arr = new Array[Char](length) - underlying.getChars(0, length, arr, 0) - arr - } + /** Constructs a string builder initialized with string value `initValue` + * and with additional character capacity `initCapacity`. + */ + def this(initCapacity: Int, initValue: String) = + this(new java.lang.StringBuilder(initValue.length + initCapacity) append initValue) - override def length: Int = underlying.length() - def length_=(n: Int) { underlying.setLength(n) } + // Methods required to make this an IndexedSeq: + def apply(i: Int): Char = underlying.charAt(i) - /** Clears the builder contents. - */ - def clear(): Unit = setLength(0) + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): StringBuilder = + new StringBuilder() appendAll coll - /** Sets the length of the character sequence. If the current sequence - * is shorter than the given length, it is padded with nulls; if it is - * longer, it is truncated. - * - * @param len the new length - * @throws IndexOutOfBoundsException if the argument is negative. - */ - def setLength(len: Int) { underlying setLength len } + override protected def newSpecificBuilder: Builder[Char, StringBuilder] = + new GrowableBuilder(new StringBuilder()) - /** Returns the current capacity, which is the size of the underlying array. - * A new array will be allocated if the current capacity is exceeded. - * - * @return the capacity - */ - def capacity: Int = underlying.capacity() + override def empty: StringBuilder = new StringBuilder() - /** Ensure that the capacity is at least the given argument. - * If the argument is greater than the current capacity, new - * storage will be allocated with size equal to the given - * argument or to `(2 * capacity + 2)`, whichever is larger. - * - * @param newCapacity the minimum desired capacity. - */ - def ensureCapacity(newCapacity: Int) { underlying ensureCapacity newCapacity } + @inline def length: Int = underlying.length - /** Returns the Char at the specified index, counting from 0 as in Arrays. - * - * @param index the index to look up - * @return the Char at the given index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def charAt(index: Int): Char = underlying charAt index + def length_=(n: Int): Unit = underlying.setLength(n) - /** Equivalent to charAt. - */ - override def apply(index: Int): Char = underlying charAt index + override def knownSize: Int = super[IndexedSeqOps].knownSize - /** Removes the Char at the specified index. The sequence is - * shortened by one. - * - * @param index The index to remove. - * @return This StringBuilder. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def deleteCharAt(index: Int): StringBuilder = { - underlying deleteCharAt index - this - } + def addOne(x: Char): this.type = { underlying.append(x); this } - /** Update the sequence at the given index to hold the specified Char. - * - * @param index the index to modify. - * @param ch the new Char. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def setCharAt(index: Int, ch: Char): Unit = underlying.setCharAt(index, ch) + def clear(): Unit = underlying.setLength(0) - /** Equivalent to setCharAt. - */ - def update(i: Int, c: Char): Unit = setCharAt(i, c) + /** Overloaded version of `addAll` that takes a string */ + def addAll(s: String): this.type = { underlying.append(s); this } - /** Returns a new String made up of a subsequence of this sequence, - * beginning at the given index and extending to the end of the sequence. - * - * target.substring(start) is equivalent to target.drop(start) - * - * @param start The starting index, inclusive. - * @return The new String. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - def substring(start: Int): String = substring(start, length) + /** Alias for `addAll` */ + def ++= (s: String): this.type = addAll(s) - /** Returns a new String made up of a subsequence of this sequence, - * beginning at the start index (inclusive) and extending to the - * end index (exclusive). - * - * target.substring(start, end) is equivalent to target.slice(start, end).mkString - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @return The new String. - * @throws StringIndexOutOfBoundsException If either index is out of bounds, - * or if start > end. - */ - def substring(start: Int, end: Int): String = underlying.substring(start, end) + def result() = underlying.toString - /** For implementing CharSequence. - */ - def subSequence(start: Int, end: Int): java.lang.CharSequence = - substring(start, end) + override def toString: String = result() - /** Appends the given Char to the end of the sequence. - */ - def +=(x: Char): this.type = { append(x); this } + override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + ct.runtimeClass match { + case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] + case _ => super.toArray + } - /** Optimization. + /** Returns the contents of this StringBuilder as an `Array[Char]`. + * + * @return An array with the characters from this builder. */ - def ++=(s: String): this.type = { - underlying append s - this + def toCharArray: Array[Char] = { + val len = underlying.length + val arr = new Array[Char](len) + underlying.getChars(0, len, arr, 0) + arr } - def appendAll(xs: String): StringBuilder = { + // append* methods delegate to the underlying java.lang.StringBuilder: + + def appendAll(xs: String): this.type = { underlying append xs this } - /** !!! This should create a new sequence. - */ - def +(x: Char): this.type = { +=(x); this } - /** Appends the string representation of the given argument, - * which is converted to a String with `String.valueOf`. - * - * @param x an `Any` object. - * @return this StringBuilder. - */ - def append(x: Any): StringBuilder = { + * which is converted to a String with `String.valueOf`. + * + * @param x an `Any` object. + * @return this StringBuilder. + */ + def append(x: Any): this.type = { underlying append String.valueOf(x) this } /** Appends the given String to this sequence. - * - * @param s a String. - * @return this StringBuilder. - */ - def append(s: String): StringBuilder = { + * + * @param s a String. + * @return this StringBuilder. + */ + def append(s: String): this.type = { underlying append s this } + /** Appends the given CharSequence to this sequence. + * + * @param cs a CharSequence. + * @return this StringBuilder. + */ + def append(cs: java.lang.CharSequence): this.type = { + underlying.append(cs match { + // Both cases call into append(), but java SB + // looks up type at runtime and has fast path for SB. + case s: StringBuilder => s.underlying + case _ => cs + }) + this + } + /** Appends the specified string builder to this sequence. - * - * @param sb - * @return - */ - def append(sb: StringBuilder): StringBuilder = { - underlying append sb + * + * @param s + * @return + */ + def append(s: StringBuilder): this.type = { + underlying append s.underlying this } - /** Appends all the Chars in the given Seq[Char] to this sequence. - * - * @param xs the characters to be appended. - * @return this StringBuilder. - */ - def appendAll(xs: TraversableOnce[Char]): StringBuilder = appendAll(xs.toArray) + /** Appends all the Chars in the given IterableOnce[Char] to this sequence. + * + * @param xs the characters to be appended. + * @return this StringBuilder. + */ + def appendAll(xs: IterableOnce[Char]): this.type = { + xs match { + case x: WrappedString => underlying append x.unwrap + case x: ArraySeq.ofChar => underlying append x.array + case x: StringBuilder => underlying append x.underlying + case _ => + val ks = xs.knownSize + if (ks != 0) { + val b = underlying + if (ks > 0) b.ensureCapacity(b.length + ks) + val it = xs.iterator + while (it.hasNext) { b append it.next() } + } + } + this + } /** Appends all the Chars in the given Array[Char] to this sequence. - * - * @param xs the characters to be appended. - * @return a reference to this object. - */ - def appendAll(xs: Array[Char]): StringBuilder = { + * + * @param xs the characters to be appended. + * @return a reference to this object. + */ + def appendAll(xs: Array[Char]): this.type = { underlying append xs this } /** Appends a portion of the given Array[Char] to this sequence. - * - * @param xs the Array containing Chars to be appended. - * @param offset the index of the first Char to append. - * @param len the numbers of Chars to append. - * @return this StringBuilder. - */ - def appendAll(xs: Array[Char], offset: Int, len: Int): StringBuilder = { + * + * @param xs the Array containing Chars to be appended. + * @param offset the index of the first Char to append. + * @param len the numbers of Chars to append. + * @return this StringBuilder. + */ + def appendAll(xs: Array[Char], offset: Int, len: Int): this.type = { underlying.append(xs, offset, len) this } /** Append the String representation of the given primitive type - * to this sequence. The argument is converted to a String with - * String.valueOf. - * - * @param x a primitive value - * @return This StringBuilder. - */ - def append(x: Boolean): StringBuilder = { underlying append x ; this } - def append(x: Byte): StringBuilder = append(x.toInt) - def append(x: Short): StringBuilder = append(x.toInt) - def append(x: Int): StringBuilder = { underlying append x ; this } - def append(x: Long): StringBuilder = { underlying append x ; this } - def append(x: Float): StringBuilder = { underlying append x ; this } - def append(x: Double): StringBuilder = { underlying append x ; this } - def append(x: Char): StringBuilder = { underlying append x ; this } + * to this sequence. The argument is converted to a String with + * String.valueOf. + * + * @param x a primitive value + * @return This StringBuilder. + */ + def append(x: Boolean): this.type = { underlying append x ; this } + def append(x: Byte): this.type = append(x.toInt) + def append(x: Short): this.type = append(x.toInt) + def append(x: Int): this.type = { underlying append x ; this } + def append(x: Long): this.type = { underlying append x ; this } + def append(x: Float): this.type = { underlying append x ; this } + def append(x: Double): this.type = { underlying append x ; this } + def append(x: Char): this.type = { underlying append x ; this } /** Remove a subsequence of Chars from this sequence, starting at the - * given start index (inclusive) and extending to the end index (exclusive) - * or to the end of the String, whichever comes first. - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @return This StringBuilder. - * @throws StringIndexOutOfBoundsException if start < 0 || start > end - */ - def delete(start: Int, end: Int): StringBuilder = { + * given start index (inclusive) and extending to the end index (exclusive) + * or to the end of the String, whichever comes first. + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @return This StringBuilder. + * @throws StringIndexOutOfBoundsException if start < 0 || start > end + */ + def delete(start: Int, end: Int): this.type = { underlying.delete(start, end) this } /** Replaces a subsequence of Chars with the given String. The semantics - * are as in delete, with the String argument then inserted at index 'start'. - * - * @param start The beginning index, inclusive. - * @param end The ending index, exclusive. - * @param str The String to be inserted at the start index. - * @return This StringBuilder. - * @throws StringIndexOutOfBoundsException if start < 0, start > length, or start > end - */ - def replace(start: Int, end: Int, str: String): StringBuilder = { + * are as in delete, with the String argument then inserted at index 'start'. + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @param str The String to be inserted at the start index. + * @return This StringBuilder. + * @throws StringIndexOutOfBoundsException if start < 0, start > length, or start > end + */ + def replace(start: Int, end: Int, str: String): this.type = { underlying.replace(start, end, str) this } /** Inserts a subarray of the given Array[Char] at the given index - * of this sequence. - * - * @param index index at which to insert the subarray. - * @param str the Array from which Chars will be taken. - * @param offset the index of the first Char to insert. - * @param len the number of Chars from 'str' to insert. - * @return This StringBuilder. - * - * @throws StringIndexOutOfBoundsException if index < 0, index > length, - * offset < 0, len < 0, or (offset + len) > str.length. - */ - def insertAll(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder = { + * of this sequence. + * + * @param index index at which to insert the subarray. + * @param str the Array from which Chars will be taken. + * @param offset the index of the first Char to insert. + * @param len the number of Chars from 'str' to insert. + * @return This StringBuilder. + * + * @throws StringIndexOutOfBoundsException if index < 0, index > length, + * offset < 0, len < 0, or (offset + len) > str.length. + */ + def insertAll(index: Int, str: Array[Char], offset: Int, len: Int): this.type = { underlying.insert(index, str, offset, len) this } /** Inserts the String representation (via String.valueOf) of the given - * argument into this sequence at the given index. + * argument into this sequence at the given index. + * + * @param index the index at which to insert. + * @param x a value. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insert(index: Int, x: Any): this.type = insert(index, String.valueOf(x)) + + /** Inserts the String into this character sequence. + * + * @param index the index at which to insert. + * @param x a String. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insert(index: Int, x: String): this.type = { + underlying.insert(index, x) + this + } + + /** Inserts the given Seq[Char] into this sequence at the given index. + * + * @param index the index at which to insert. + * @param xs the Seq[Char]. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insertAll(index: Int, xs: IterableOnce[Char]): this.type = + insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) + + /** Inserts the given Array[Char] into this sequence at the given index. + * + * @param index the index at which to insert. + * @param xs the Array[Char]. + * @return this StringBuilder. + * @throws StringIndexOutOfBoundsException if the index is out of bounds. + */ + def insertAll(index: Int, xs: Array[Char]): this.type = { + underlying.insert(index, xs) + this + } + + /** Calls String.valueOf on the given primitive value, and inserts the + * String at the given index. + * + * @param index the offset position. + * @param x a primitive value. + * @return this StringBuilder. + */ + def insert(index: Int, x: Boolean): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Byte): this.type = insert(index, x.toInt) + def insert(index: Int, x: Short): this.type = insert(index, x.toInt) + def insert(index: Int, x: Int): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Long): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Float): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Double): this.type = insert(index, String.valueOf(x)) + def insert(index: Int, x: Char): this.type = insert(index, String.valueOf(x)) + + /** Sets the length of the character sequence. If the current sequence + * is shorter than the given length, it is padded with nulls; if it is + * longer, it is truncated. + * + * @param len the new length + * @throws IndexOutOfBoundsException if the argument is negative. + */ + def setLength(len: Int): Unit = underlying.setLength(len) + + def update(idx: Int, elem: Char): Unit = underlying.setCharAt(idx, elem) + + + /** Like reverse, but destructively updates the target StringBuilder. * - * @param index the index at which to insert. - * @param x a value. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. + * @return the reversed StringBuilder (same as the target StringBuilder) */ - def insert(index: Int, x: Any): StringBuilder = insert(index, String.valueOf(x)) + @deprecated("Use reverseInPlace instead", "2.13.0") + final def reverseContents(): this.type = reverseInPlace() - /** Inserts the String into this character sequence. + /** Like reverse, but destructively updates the target StringBuilder. * - * @param index the index at which to insert. - * @param x a String. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. + * @return the reversed StringBuilder (same as the target StringBuilder) */ - def insert(index: Int, x: String): StringBuilder = { - underlying.insert(index, x) + def reverseInPlace(): this.type = { + underlying.reverse() this } - /** Inserts the given Seq[Char] into this sequence at the given index. + + /** Returns the current capacity, which is the size of the underlying array. + * A new array will be allocated if the current capacity is exceeded. * - * @param index the index at which to insert. - * @param xs the Seq[Char]. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. + * @return the capacity */ - def insertAll(index: Int, xs: TraversableOnce[Char]): StringBuilder = insertAll(index, xs.toArray) + def capacity: Int = underlying.capacity - /** Inserts the given Array[Char] into this sequence at the given index. + /** Ensure that the capacity is at least the given argument. + * If the argument is greater than the current capacity, new + * storage will be allocated with size equal to the given + * argument or to `(2 * capacity + 2)`, whichever is larger. * - * @param index the index at which to insert. - * @param xs the Array[Char]. - * @return this StringBuilder. - * @throws StringIndexOutOfBoundsException if the index is out of bounds. + * @param newCapacity the minimum desired capacity. */ - def insertAll(index: Int, xs: Array[Char]): StringBuilder = { - underlying.insert(index, xs) + def ensureCapacity(newCapacity: Int): Unit = { underlying.ensureCapacity(newCapacity) } + + /** Returns the Char at the specified index, counting from 0 as in Arrays. + * + * @param index the index to look up + * @return the Char at the given index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def charAt(index: Int): Char = underlying.charAt(index) + + /** Removes the Char at the specified index. The sequence is + * shortened by one. + * + * @param index The index to remove. + * @return This StringBuilder. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def deleteCharAt(index: Int): this.type = { + underlying.deleteCharAt(index) this } - /** Calls String.valueOf on the given primitive value, and inserts the - * String at the given index. + /** Update the sequence at the given index to hold the specified Char. + * + * @param index the index to modify. + * @param ch the new Char. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def setCharAt(index: Int, ch: Char): this.type = { + underlying.setCharAt(index, ch) + this + } + + /** Returns a new String made up of a subsequence of this sequence, + * beginning at the given index and extending to the end of the sequence. + * + * target.substring(start) is equivalent to target.drop(start) * - * @param index the offset position. - * @param x a primitive value. - * @return this StringBuilder. + * @param start The starting index, inclusive. + * @return The new String. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + def substring(start: Int): String = underlying.substring(start, length) + + /** Returns a new String made up of a subsequence of this sequence, + * beginning at the start index (inclusive) and extending to the + * end index (exclusive). + * + * target.substring(start, end) is equivalent to target.slice(start, end).mkString + * + * @param start The beginning index, inclusive. + * @param end The ending index, exclusive. + * @return The new String. + * @throws StringIndexOutOfBoundsException If either index is out of bounds, + * or if start > end. + */ + def substring(start: Int, end: Int): String = underlying.substring(start, end) + + /** For implementing CharSequence. */ - def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Byte): StringBuilder = insert(index, x.toInt) - def insert(index: Int, x: Short): StringBuilder = insert(index, x.toInt) - def insert(index: Int, x: Int): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Long): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Float): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Double): StringBuilder = insert(index, String.valueOf(x)) - def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x)) + def subSequence(start: Int, end: Int): java.lang.CharSequence = + underlying.substring(start, end) /** Finds the index of the first occurrence of the specified substring. * @@ -403,52 +480,6 @@ final class StringBuilder(private val underlying: JavaStringBuilder) */ def lastIndexOf(str: String, fromIndex: Int): Int = underlying.lastIndexOf(str, fromIndex) - /** Creates a new StringBuilder with the reversed contents of this one. - * If surrogate pairs are present, they are treated as indivisible units: each - * pair will appear in the same order in the updated sequence. - * - * @return the reversed StringBuilder - */ - @migration("`reverse` returns a new instance. Use `reverseContents` to update in place and return that StringBuilder itself.", "2.8.0") - override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying).reverse) - - override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying)) - - /** Like reverse, but destructively updates the target StringBuilder. - * - * @return the reversed StringBuilder (same as the target StringBuilder) - */ - def reverseContents(): StringBuilder = { - underlying.reverse() - this - } - - /** Returns a new String representing the data in this sequence. - * - * @note because toString is inherited from AnyRef and used for - * many purposes, it is better practice to call mkString - * to obtain a StringBuilder result. - * @return the current contents of this sequence as a String - */ - override def toString = underlying.toString - - /** Returns a new String representing the data in this sequence. - * - * @return the current contents of this sequence as a String - */ - override def mkString = toString - - /** Returns the result of this Builder (a String). - * - * If this method is called multiple times, each call will result in a snapshot of the buffer at that point in time. - * In particular, a `StringBuilder` can be used to build multiple independent strings by emptying the buffer with `clear` - * after each call to `result`. - * - * @return the string assembled by this StringBuilder - */ - def result(): String = toString - - /** Tests whether this builder is empty. * * This method is required for JDK15+ compatibility @@ -459,5 +490,6 @@ final class StringBuilder(private val underlying: JavaStringBuilder) } object StringBuilder { + @deprecated("Use `new StringBuilder()` instead of `StringBuilder.newBuilder`", "2.13.0") def newBuilder = new StringBuilder } diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala deleted file mode 100644 index 4f205b7fff25..000000000000 --- a/src/library/scala/collection/mutable/Subscriber.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -/** `Subscriber[A, B]` objects may subscribe to events of type `A` - * published by an object of type `B`. `B` is typically a subtype of - * [[scala.collection.mutable.Publisher]]. - * - * @author Matthias Zenger - * @author Martin Odersky - * @since 1 - */ -trait Subscriber[-Evt, -Pub] { - def notify(pub: Pub, event: Evt): Unit -} diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala deleted file mode 100644 index 165ac9e72ec9..000000000000 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - -/** This class should be used as a mixin. It synchronizes the `Buffer` - * methods of the class into which it is mixed in. - * - * @tparam A type of the elements contained in this buffer. - * - * @author Matthias Zenger - * @since 1 - * @define Coll `SynchronizedBuffer` - * @define coll synchronized buffer - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") -trait SynchronizedBuffer[A] extends Buffer[A] { - - import scala.collection.Traversable - - abstract override def length: Int = synchronized { - super.length - } - - abstract override def iterator: Iterator[A] = synchronized { - super.iterator - } - - abstract override def apply(n: Int): A = synchronized { - super.apply(n) - } - - /** Append a single element to this buffer. - * - * @param elem the element to append. - */ - abstract override def +=(elem: A): this.type = synchronized[this.type] { - super.+=(elem) - } - - /** Appends a number of elements provided by a traversable object via - * its `foreach` method. - * The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def ++(xs: GenTraversableOnce[A]): Self = synchronized { - super.++(xs) - } - - /** Appends a number of elements provided by a traversable object - * via its `foreach` method. - * - * @param xs the iterable object. - */ - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.++=(xs) - } - - /** Appends a sequence of elements to this buffer. - * - * @param elems the elements to append. - */ - override def append(elems: A*): Unit = synchronized { - super.++=(elems) - } - - /** Appends a number of elements provided by a traversable object - * via its `foreach` method. - * - * @param xs the traversable object. - */ - override def appendAll(xs: TraversableOnce[A]): Unit = synchronized { - super.appendAll(xs) - } - - /** Prepend a single element to this buffer and return - * the identity of the buffer. - * - * @param elem the element to append. - */ - abstract override def +=:(elem: A): this.type = synchronized[this.type] { - super.+=:(elem) - } - - /** Prepends a number of elements provided by a traversable object - * via its `foreach` method. The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def ++=:(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=:(xs) } - - /** Prepend an element to this list. - * - * @param elems the elements to prepend. - */ - override def prepend(elems: A*): Unit = prependAll(elems) - - /** Prepends a number of elements provided by a traversable object - * via its `foreach` method. The identity of the buffer is returned. - * - * @param xs the traversable object. - */ - override def prependAll(xs: TraversableOnce[A]): Unit = synchronized { - super.prependAll(xs) - } - - /** Inserts new elements at the index `n`. Opposed to method `update`, - * this method will not replace an element with a one. - * Instead, it will insert the new elements at index `n`. - * - * @param n the index where a new element will be inserted. - * @param elems the new elements to insert. - */ - override def insert(n: Int, elems: A*): Unit = synchronized { - super.insertAll(n, elems) - } - - /** Inserts new elements at the index `n`. Opposed to method `update`, - * this method will not replace an element with a one. - * Instead, it will insert a new element at index `n`. - * - * @param n the index where a new element will be inserted. - * @param xs the traversable object providing all elements to insert. - */ - abstract override def insertAll(n: Int, xs: Traversable[A]): Unit = synchronized { - super.insertAll(n, xs) - } - - /** Replace element at index `n` with the new element `newelem`. - * - * @param n the index of the element to replace. - * @param newelem the new element. - */ - abstract override def update(n: Int, newelem: A): Unit = synchronized { - super.update(n, newelem) - } - - /** Removes the element on a given index position. - * - * @param n the index which refers to the element to delete. - */ - abstract override def remove(n: Int): A = synchronized { - super.remove(n) - } - - /** Clears the buffer contents. - */ - abstract override def clear(): Unit = synchronized { - super.clear() - } - - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]): Unit = synchronized { - super.<<(cmd) - } - - /** Return a clone of this buffer. - * - * @return an `ArrayBuffer` with the same elements. - */ - override def clone(): Self = synchronized { - super.clone() - } - - /** The `hashCode` method always yields an error, since it is not - * safe to use buffers as keys in hash tables. - * - * @return never. - */ - override def hashCode(): Int = synchronized { - super.hashCode() - } -} diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala deleted file mode 100644 index 7b5ebfc9652b..000000000000 --- a/src/library/scala/collection/mutable/SynchronizedMap.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.migration - -/** This class should be used as a mixin. It synchronizes the `Map` - * functions of the class into which it is mixed in. - * - * @tparam A type of the keys contained in this map. - * @tparam B type of the values associated with keys. - * - * @author Matthias Zenger, Martin Odersky - * @since 1 - * @define Coll `SynchronizedMap` - * @define coll synchronized map - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0") -trait SynchronizedMap[A, B] extends Map[A, B] { - - abstract override def get(key: A): Option[B] = synchronized { super.get(key) } - abstract override def iterator: Iterator[(A, B)] = synchronized { super.iterator } - abstract override def += (kv: (A, B)): this.type = synchronized[this.type] { super.+=(kv) } - abstract override def -= (key: A): this.type = synchronized[this.type] { super.-=(key) } - - override def size: Int = synchronized { super.size } - override def put(key: A, value: B): Option[B] = synchronized { super.put(key, value) } - override def update(key: A, value: B): Unit = synchronized { super.update(key, value) } - override def remove(key: A): Option[B] = synchronized { super.remove(key) } - override def clear(): Unit = synchronized { super.clear() } - override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) } - override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) } - override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) } - @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0") - override def values: scala.collection.Iterable[B] = synchronized { super.values } - override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator } - override def clone(): Self = synchronized { super.clone() } - override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) } - override def apply(key: A): B = synchronized { super.apply(key) } - override def keySet: scala.collection.Set[A] = synchronized { super.keySet } - @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0") - override def keys: scala.collection.Iterable[A] = synchronized { super.keys } - override def keysIterator: Iterator[A] = synchronized { super.keysIterator } - override def isEmpty: Boolean = synchronized { super.isEmpty } - override def contains(key: A): Boolean = synchronized {super.contains(key) } - override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) } - - // @deprecated("see Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) } - // can't override -, -- same type! - // @deprecated override def -(key: A): Self = synchronized { super.-(key) } - - // !!! todo: also add all other methods -} - diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala deleted file mode 100644 index af16dfa66175..000000000000 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** This is a synchronized version of the `Queue[T]` class. It - * implements a data structure that allows one to insert and retrieve - * elements in a first-in-first-out (FIFO) manner. - * - * @tparam A type of elements contained in this synchronized queue. - * - * @author Matthias Zenger - * @since 1 - * @define Coll `SynchronizedQueue` - * @define coll synchronized queue - */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") -class SynchronizedQueue[A] extends Queue[A] { - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Inserts a single element at the end of the queue. - * - * @param elem the element to insert - */ - override def +=(elem: A): this.type = synchronized[this.type] { super.+=(elem) } - - /** Adds all elements provided by a `TraversableOnce` object - * at the end of the queue. The elements are prepended in the order they - * are given out by the iterator. - * - * @param xs a traversable object - */ - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=(xs) } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) } - - /** Returns the first element in the queue, and removes this element - * from the queue. - * - * @return the first element of the queue. - */ - override def dequeue(): A = synchronized { super.dequeue() } - - /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ - override def dequeueFirst(p: A => Boolean): Option[A] = synchronized { super.dequeueFirst(p) } - - /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - override def dequeueAll(p: A => Boolean): Seq[A] = synchronized { super.dequeueAll(p) } - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - override def front: A = synchronized { super.front } - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Checks if two queues are structurally identical. - * - * @return true, iff both queues contain the same sequence of elements. - */ - override def equals(that: Any): Boolean = synchronized { super.equals(that) } - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString() = synchronized { super.toString() } -} diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala deleted file mode 100644 index b73ea7501817..000000000000 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import script._ - -/** This class should be used as a mixin. It synchronizes the `Set` - * functions of the class into which it is mixed in. - * - * @tparam A type of the elements contained in this synchronized set. - * - * @author Matthias Zenger - * @since 1 - * @define Coll `SynchronizedSet` - * @define coll synchronized set - */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0") -trait SynchronizedSet[A] extends Set[A] { - abstract override def size: Int = synchronized { - super.size - } - - override def isEmpty: Boolean = synchronized { - super.isEmpty - } - - abstract override def contains(elem: A) = synchronized { - super.contains(elem) - } - - abstract override def +=(elem: A): this.type = synchronized[this.type] { - super.+=(elem) - } - - override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.++=(xs) - } - - abstract override def -=(elem: A): this.type = synchronized[this.type] { - super.-=(elem) - } - - override def --=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { - super.--=(xs) - } - - override def update(elem: A, included: Boolean): Unit = synchronized { - super.update(elem, included) - } - - override def add(elem: A): Boolean = synchronized { - super.add(elem) - } - - override def remove(elem: A): Boolean = synchronized { - super.remove(elem) - } - - override def intersect(that: scala.collection.GenSet[A]) = synchronized { - super.intersect(that) - } - - abstract override def clear(): Unit = synchronized { - super.clear() - } - - override def subsetOf(that: scala.collection.GenSet[A]) = synchronized { - super.subsetOf(that) - } - - override def foreach[U](f: A => U) = synchronized { - super.foreach(f) - } - - override def retain(p: A => Boolean) = synchronized { - super.retain(p) - } - - override def toList: List[A] = synchronized { - super.toList - } - - override def toString = synchronized { - super.toString - } - - @deprecated("scripting is deprecated", "2.11.0") - override def <<(cmd: Message[A]): Unit = synchronized { - super.<<(cmd) - } - - override def clone(): Self = synchronized { - super.clone() - } -} diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala deleted file mode 100644 index 555bab709079..000000000000 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** This is a synchronized version of the `Stack[T]` class. It - * implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements contained in this stack. - * - * @author Matthias Zenger - * @since 1 - * @define Coll `SynchronizedStack` - * @define coll synchronized stack - */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0") -class SynchronizedStack[A] extends Stack[A] { - - /** Checks if the stack is empty. - * - * @return true, iff there is no element on the stack - */ - override def isEmpty: Boolean = synchronized { super.isEmpty } - - /** Pushes a single element on top of the stack. - * - * @param elem the element to push onto the stack - */ - override def push(elem: A): this.type = synchronized[this.type] { super.push(elem) } - - /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elem1 the first element to push. - * @param elem2 the second element to push. - * @param elems the element sequence that will be pushed. - * @return the stack with the new elements on top. - */ - override def push(elem1: A, elem2: A, elems: A*): this.type = synchronized[this.type] { super.push(elem1, elem2, elems: _*) } - - /** Pushes all elements provided by a traversable object - * on top of the stack. The elements are pushed in the order the - * traversable object is traversed. - * - * @param xs a traversable object - */ - override def pushAll(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.pushAll(elems) } - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @return the top element - */ - override def top: A = synchronized { super.top } - - /** Removes the top element from the stack. - */ - override def pop(): A = synchronized { super.pop() } - - /** - * Removes all elements from the stack. After this operation completed, - * the stack will be empty. - */ - override def clear(): Unit = synchronized { super.clear() } - - /** Returns an iterator over all elements on the stack. This iterator - * is stable with respect to state changes in the stack object; i.e. - * such changes will not be reflected in the iterator. The iterator - * issues elements in the order they were inserted into the stack - * (FIFO order). - * - * @return an iterator over all stack elements. - */ - override def iterator: Iterator[A] = synchronized { super.iterator } - - /** Creates a list of all stack elements in FIFO order. - * - * @return the created list. - */ - override def toList: List[A] = synchronized { super.toList } - - /** Returns a textual representation of a stack as a string. - * - * @return the string representation of this stack. - */ - override def toString = synchronized { super.toString } -} diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala deleted file mode 100644 index 511a60049dfa..000000000000 --- a/src/library/scala/collection/mutable/Traversable.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import generic._ - -/** A trait for traversable collections that can be mutated. - * $traversableInfo - * @define mutability mutable - */ -trait Traversable[A] extends scala.collection.Traversable[A] -// with GenTraversable[A] - with GenericTraversableTemplate[A, Traversable] - with TraversableLike[A, Traversable[A]] - with Mutable { - override def companion: GenericCompanion[Traversable] = Traversable - override def seq: Traversable[A] = this -} - -/** $factoryInfo - * The current default implementation of a $Coll is an `ArrayBuffer`. - * @define coll mutable traversable collection - * @define Coll `mutable.Traversable` - */ -object Traversable extends TraversableFactory[Traversable] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - def newBuilder[A]: Builder[A, Traversable[A]] = new ArrayBuffer -} - - diff --git a/src/library/scala/collection/mutable/TreeMap.scala b/src/library/scala/collection/mutable/TreeMap.scala index 5a13be23e3e9..076239278299 100644 --- a/src/library/scala/collection/mutable/TreeMap.scala +++ b/src/library/scala/collection/mutable/TreeMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,155 +14,201 @@ package scala package collection package mutable -import scala.collection.generic._ +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} /** - * $factoryInfo - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -object TreeMap extends MutableSortedMapFactory[TreeMap] { + * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { - def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) + override def sortedMapFactory: TreeMap.type = TreeMap - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = - new SortedMapCanBuildFrom[A, B] -} + /** + * Creates an empty `TreeMap`. + * @param ord the implicit ordering used to compare objects of type `K`. + * @return an empty `TreeMap`. + */ + def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) -/** - * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree map. - * @tparam B the type of the values associated with the keys. - * - * @author Rui Gonçalves - * @since 2.12 - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -@SerialVersionUID(-2558985573956740112L) -sealed class TreeMap[A, B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A]) - extends AbstractSortedMap[A, B] - with SortedMap[A, B] - with MapLike[A, B, TreeMap[A, B]] - with SortedMapLike[A, B, TreeMap[A, B]] - with Serializable { + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree) + } - /** - * Creates an empty `TreeMap`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeMap`. - */ - def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, None) + } + + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, None) + } - override def empty = TreeMap.empty - override protected[this] def newBuilder = TreeMap.newBuilder[A, B] + def keysIteratorFrom(start: K): Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, Some(start)) + } + + def iteratorFrom(start: K): Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree, Some(start)) + } + + override def valuesIteratorFrom(start: K): Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, Some(start)) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( + size, tree.root, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } + + def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } - /** - * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and - * vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMapView(from, until) - - def -=(key: A): this.type = { RB.delete(tree, key); this } - def +=(kv: (A, B)): this.type = { RB.insert(tree, kv._1, kv._2); this } - - def get(key: A) = RB.get(tree, key) - - def iterator = RB.iterator(tree) - def iteratorFrom(start: A) = RB.iterator(tree, Some(start)) - def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) - def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, Some(start)) - - override def size = RB.size(tree) - override def isEmpty = RB.isEmpty(tree) - override def contains(key: A) = RB.contains(tree, key) - - override def head = RB.min(tree).get - override def headOption = RB.min(tree) - override def last = RB.max(tree).get - override def lastOption = RB.max(tree) - - override def keysIterator = RB.keysIterator(tree) - override def valuesIterator = RB.valuesIterator(tree) - - override def foreach[U](f: ((A, B)) => U): Unit = RB.foreach(tree, f) - override def transform(f: (A, B) => B) = { RB.transform(tree, f); this } override def clear(): Unit = RB.clear(tree) - override def stringPrefix = "TreeMap" + def get(key: K): Option[V] = RB.get(tree, key) /** - * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - @SerialVersionUID(2219159283273389116L) - private[this] final class TreeMapView(from: Option[A], until: Option[A]) extends TreeMap[A, B](tree) { + * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and + * vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def contains(key: K): Boolean = RB.contains(tree, key) + + override def head: (K, V) = RB.min(tree).get + + override def last: (K, V) = RB.max(tree).get + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) + + override protected[this] def className: String = "TreeMap" + + + /** + * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) case (None, _) => newFrom case _ => from } /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) case (None, _) => newUntil case _ => until } /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: A): Boolean = { + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: K): Boolean = { val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 afterFrom && beforeUntil } - override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = - new TreeMapView(pickLowerBound(from), pickUpperBound(until)) - - override def get(key: A) = if (isInsideViewBounds(key)) RB.get(tree, key) else None + override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = + new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) - override def iterator = RB.iterator(tree, from, until) - override def iteratorFrom(start: A) = RB.iterator(tree, pickLowerBound(Some(start)), until) - override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None - override def size = iterator.length - override def isEmpty = !iterator.hasNext - override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) + override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) + override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) + override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) + override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) + override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) + override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext + override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) override def head = headOption.get override def headOption = { @@ -185,15 +231,27 @@ sealed class TreeMap[A, B] private (tree: RB.Tree[A, B])(implicit val ordering: // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: ((A, B)) => U): Unit = iterator.foreach(f) - override def transform(f: (A, B) => B) = { - iterator.foreach { case (key, value) => update(key, f(key, value)) } - this - } - - override def valuesIterator: Iterator[B] = RB.valuesIterator(tree, from, until) - override def keysIterator: Iterator[A] = RB.keysIterator(tree, from, until) + override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) override def clone() = super.clone().rangeImpl(from, until) } + +} + +/** + * $factoryInfo + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = + Growable.from(empty[K, V], it) + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + + def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + } diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 59d7e351d359..9820af9037ca 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,130 +11,113 @@ */ package scala -package collection -package mutable +package collection.mutable -import generic._ +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} +import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} /** - * @define Coll `mutable.TreeSet` - * @define coll mutable tree set - * @factoryInfo - * Companion object of TreeSet providing factory related utilities. - * - * @author Lucien Pereira - * - */ -object TreeSet extends MutableSortedSetFactory[TreeSet] { - /** - * The empty set of this type - */ - def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]() - - /** $sortedMapCanBuildFromInfo */ - implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, TreeSet[A]] = - new SortedSetCanBuildFrom[A] -} - -/** - * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree set. - * - * @author Rui Gonçalves - * @since 2.10 - * - * @define Coll mutable.TreeSet - * @define coll mutable tree set - */ + * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam A the type of the keys contained in this tree set. + * + * @define Coll mutable.TreeSet + * @define coll mutable tree set + */ // Original API designed in part by Lucien Pereira -@SerialVersionUID(-3642111301929493640L) -sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) - extends AbstractSortedSet[A] - with SortedSet[A] - with SetLike[A, TreeSet[A]] - with SortedSetLike[A, TreeSet[A]] - with Serializable { +sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedIterableOps[A, Set, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { if (ordering eq null) throw new NullPointerException("ordering must not be null") /** - * Creates an empty `TreeSet`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeSet`. - */ + * Creates an empty `TreeSet`. + * @param ord the implicit ordering used to compare objects of type `A`. + * @return an empty `TreeSet`. + */ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) - override def empty = TreeSet.empty - override protected[this] def newBuilder = TreeSet.newBuilder[A] + override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet - /** - * Creates a ranged projection of this set. Any mutations in the ranged projection affect will update the original set - * and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetView(from, until) + def iterator: collection.Iterator[A] = RB.keysIterator(tree) - def -=(key: A): this.type = { RB.delete(tree, key); this } - def +=(elem: A): this.type = { RB.insert(tree, elem, null); this } + def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) - def contains(elem: A) = RB.contains(tree, elem) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[A, Null] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } - def iterator = RB.keysIterator(tree) - def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) - override def iteratorFrom(start: A) = RB.keysIterator(tree, Some(start)) + def addOne(elem: A): this.type = { + RB.insert(tree, elem, null) + this + } - override def size = RB.size(tree) - override def isEmpty = RB.isEmpty(tree) + def subtractOne(elem: A): this.type = { + RB.delete(tree, elem) + this + } + + def clear(): Unit = RB.clear(tree) + + def contains(elem: A): Boolean = RB.contains(tree, elem) + + def unconstrained: collection.Set[A] = this - override def head = RB.minKey(tree).get - override def headOption = RB.minKey(tree) - override def last = RB.maxKey(tree).get - override def lastOption = RB.maxKey(tree) + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) + override protected[this] def className: String = "TreeSet" - override def min[B >: A](implicit cmp: Ordering[B]): A = - if ((cmp == ordering) && nonEmpty) head else super.min(cmp) + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) - override def max[B >: A](implicit cmp: Ordering[B]): A = - if ((cmp == ordering) && nonEmpty) last else super.max(cmp) + override def head: A = RB.minKey(tree).get + + override def last: A = RB.maxKey(tree).get + + override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) + + override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - override def clear(): Unit = RB.clear(tree) - override def stringPrefix = "TreeSet" /** - * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - @SerialVersionUID(7087824939194006086L) - private[this] final class TreeSetView(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { + * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. + * + * Only keys between this projection's key range will ever appear as elements of this set, independently of whether + * the elements are added through the original set or through this view. That means that if one inserts an element in + * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. + * Mutations are always reflected in the original set, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) case (None, _) => newFrom @@ -142,8 +125,8 @@ sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: } /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) case (None, _) => newUntil @@ -151,8 +134,8 @@ sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: } /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ private[this] def isInsideViewBounds(key: A): Boolean = { val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 @@ -160,19 +143,19 @@ sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: } override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = - new TreeSetView(pickLowerBound(from), pickUpperBound(until)) + new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) override def iterator = RB.keysIterator(tree, from, until) - override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def size = iterator.length - override def isEmpty = !iterator.hasNext + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext - override def head = headOption.get - override def headOption = { + override def head: A = headOption.get + override def headOption: Option[A] = { val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) (elem, until) match { case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None @@ -180,7 +163,7 @@ sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: } } - override def last = lastOption.get + override def last: A = lastOption.get override def lastOption = { val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) (elem, from) match { @@ -194,6 +177,42 @@ sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. override def foreach[U](f: A => U): Unit = iterator.foreach(f) - override def clone() = super.clone().rangeImpl(from, until) + override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) + + } + +} + +/** + * $factoryInfo + * @define Coll `mutable.TreeSet` + * @define coll mutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + + def from[E](it: IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => + new TreeSet[E](ts.tree.treeCopy()) + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) + case _ => + val t: RB.Tree[E, Null] = RB.Tree.empty + val i = it.iterator + while (i.hasNext) RB.insert(t, i.next(), null) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty + def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } + def result(): TreeSet[A] = new TreeSet[A](tree) + def clear(): Unit = { tree = RB.Tree.empty } } } diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala deleted file mode 100644 index aa0c06dabfd2..000000000000 --- a/src/library/scala/collection/mutable/Undoable.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - - -/** Classes that mix in the `Undoable` class provide an operation - * `undo` which can be used to undo the last operation. - * - * @author Matthias Zenger - * @since 1 - */ -trait Undoable { - /** Undo the last operation. - */ - def undo(): Unit -} diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala index e8f2bd98d65e..4aecac001505 100644 --- a/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,51 +10,51 @@ * additional information regarding copyright ownership. */ -package scala -package collection.mutable +package scala.collection +package mutable -import scala.collection.AbstractIterator -import scala.collection.Iterator -import scala.collection.generic._ import scala.annotation.tailrec +import scala.collection.generic.{CommonErrors, DefaultSerializable} import scala.reflect.ClassTag +import scala.collection.immutable.Nil /** A buffer that stores elements in an unrolled linked list. - * - * Unrolled linked lists store elements in linked fixed size - * arrays. - * - * Unrolled buffers retain locality and low memory overhead - * properties of array buffers, but offer much more efficient - * element addition, since they never reallocate and copy the - * internal array. - * - * However, they provide `O(n/m)` complexity random access, - * where `n` is the number of elements, and `m` the size of - * internal array chunks. - * - * Ideal to use when: - * - elements are added to the buffer and then all of the - * elements are traversed sequentially - * - two unrolled buffers need to be concatenated (see `concat`) - * - * Better than singly linked lists for random access, but - * should still be avoided for such a purpose. - * - * @define coll unrolled buffer - * @define Coll `UnrolledBuffer` - * @author Aleksandar Prokopec - * - */ -@SerialVersionUID(1L) + * + * Unrolled linked lists store elements in linked fixed size + * arrays. + * + * Unrolled buffers retain locality and low memory overhead + * properties of array buffers, but offer much more efficient + * element addition, since they never reallocate and copy the + * internal array. + * + * However, they provide `O(n/m)` complexity random access, + * where `n` is the number of elements, and `m` the size of + * internal array chunks. + * + * Ideal to use when: + * - elements are added to the buffer and then all of the + * elements are traversed sequentially + * - two unrolled buffers need to be concatenated (see `concat`) + * + * Better than singly linked lists for random access, but + * should still be avoided for such a purpose. + * + * @define coll unrolled buffer + * @define Coll `UnrolledBuffer` + * + */ +@SerialVersionUID(3L) sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) -extends scala.collection.mutable.AbstractBuffer[T] - with scala.collection.mutable.Buffer[T] - with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]] - with GenericClassTagTraversableTemplate[T, UnrolledBuffer] - with scala.collection.mutable.Builder[T, UnrolledBuffer[T]] - with Serializable -{ + extends AbstractBuffer[T] + with Buffer[T] + with Seq[T] + with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] + with Builder[T, UnrolledBuffer[T]] + with DefaultSerializable { + import UnrolledBuffer.Unrolled @transient private var headptr = newUnrolled @@ -67,34 +67,37 @@ extends scala.collection.mutable.AbstractBuffer[T] private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last private[collection] def size_=(s: Int) = sz = s - protected[this] override def newBuilder = new UnrolledBuffer[T] + protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer + protected def iterableEvidence: ClassTag[T] = tag + + override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged protected def newUnrolled = new Unrolled[T](this) // The below would allow more flexible behavior without requiring inheritance // that is risky because all the important internals are private. // private var myLengthPolicy: Int => Int = x => x - // + // // /** Specifies how the array lengths should vary. - // * + // * // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length // * policy can be given that changes this scheme to, for instance, an // * exponential growth. - // * + // * // * @param nextLength computes the length of the next array from the length of the latest one // */ // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) - def classTagCompanion = UnrolledBuffer + def classTagCompanion: UnrolledBuffer.type = UnrolledBuffer /** Concatenates the target unrolled buffer to this unrolled buffer. - * - * The specified buffer `that` is cleared after this operation. This is - * an O(1) operation. - * - * @param that the unrolled buffer whose elements are added to this buffer - */ + * + * The specified buffer `that` is cleared after this operation. This is + * an O(1) operation. + * + * @param that the unrolled buffer whose elements are added to this buffer + */ def concat(that: UnrolledBuffer[T]) = { // bind the two together if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr @@ -111,13 +114,13 @@ extends scala.collection.mutable.AbstractBuffer[T] this } - def +=(elem: T) = { + def addOne(elem: T) = { lastptr = lastptr.append(elem) sz += 1 this } - def clear() { + def clear(): Unit = { headptr = newUnrolled lastptr = headptr sz = 0 @@ -128,7 +131,7 @@ extends scala.collection.mutable.AbstractBuffer[T] var node: Unrolled[T] = headptr scan() - private def scan() { + private def scan(): Unit = { pos += 1 while (pos >= node.size) { pos = 0 @@ -137,7 +140,7 @@ extends scala.collection.mutable.AbstractBuffer[T] } } def hasNext = node ne null - def next = if (hasNext) { + def next() = if (hasNext) { val r = node.array(pos) scan() r @@ -147,43 +150,76 @@ extends scala.collection.mutable.AbstractBuffer[T] // this should be faster than the iterator override def foreach[U](f: T => U) = headptr.foreach(f) - def result = this + def result() = this def length = sz + override def knownSize: Int = sz + def apply(idx: Int) = if (idx >= 0 && idx < sz) headptr(idx) - else throw new IndexOutOfBoundsException(idx.toString) + else throw CommonErrors.indexOutOfBounds(index = idx, max = sz - 1) def update(idx: Int, newelem: T) = if (idx >= 0 && idx < sz) headptr(idx) = newelem - else throw new IndexOutOfBoundsException(idx.toString) + else throw CommonErrors.indexOutOfBounds(index = idx, max = sz - 1) + + /** Replace the contents of this $coll with the mapped result. + * + * @param f the mapping function + * @return this $coll + */ + def mapInPlace(f: T => T): this.type = { + headptr.mapInPlace(f) + this + } def remove(idx: Int) = if (idx >= 0 && idx < sz) { sz -= 1 headptr.remove(idx, this) - } else throw new IndexOutOfBoundsException(idx.toString) + } else throw CommonErrors.indexOutOfBounds(index = idx, max = sz - 1) + + @tailrec final def remove(idx: Int, count: Int): Unit = + if (count > 0) { + remove(idx) + remove(idx, count-1) + } - def +=:(elem: T) = { + def prepend(elem: T) = { headptr = headptr prepend elem sz += 1 this } - def insertAll(idx: Int, elems: scala.collection.Traversable[T]) = + def insert(idx: Int, elem: T): Unit = + insertAll(idx, elem :: Nil) + + def insertAll(idx: Int, elems: IterableOnce[T]): Unit = if (idx >= 0 && idx <= sz) { - headptr.insertAll(idx, elems, this) - sz += elems.size - } else throw new IndexOutOfBoundsException(idx.toString) + sz += headptr.insertAll(idx, elems, this) + } else throw CommonErrors.indexOutOfBounds(index = idx, max = sz - 1) + + override def subtractOne(elem: T): this.type = { + if (headptr.subtractOne(elem, this)) { + sz -= 1 + } + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[T], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } - private def writeObject(out: java.io.ObjectOutputStream) { + private def writeObject(out: java.io.ObjectOutputStream): Unit = { out.defaultWriteObject out writeInt sz for (elem <- this) out writeObject elem } - private def readObject(in: java.io.ObjectInputStream) { + private def readObject(in: java.io.ObjectInputStream): Unit = { in.defaultReadObject val num = in.readInt @@ -200,22 +236,32 @@ extends scala.collection.mutable.AbstractBuffer[T] override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this - override def stringPrefix = "UnrolledBuffer" + override protected[this] def className = "UnrolledBuffer" } -object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { - /** $genericCanBuildFromInfo */ - implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] = - new GenericCanBuildFrom[T] - def newBuilder[T](implicit t: ClassTag[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T] +@SerialVersionUID(3L) +object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => + + val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) + + def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + def from[A : ClassTag](source: scala.collection.IterableOnce[A]): UnrolledBuffer[A] = newBuilder[A].addAll(source) + + def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + final val waterline: Int = 50 + + final def waterlineDenom: Int = 100 + + @deprecated("Use waterlineDenom instead.", "2.13.0") + final val waterlineDelim: Int = waterlineDenom - val waterline = 50 - val waterlineDelim = 100 // TODO -- fix this name! It's a denominator, not a delimiter. (But it's part of the API so we can't just change it.) private[collection] val unrolledlength = 32 /** Unrolled buffer node. - */ + */ class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) @@ -231,7 +277,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { next = new Unrolled[T](0, new Array[T](nextlength), null, buff) next append elem } - def foreach[U](f: T => U) { + def foreach[U](f: T => U): Unit = { var unrolled = this var i = 0 while (unrolled ne null) { @@ -246,6 +292,21 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { unrolled = unrolled.next } } + def mapInPlace(f: T => T): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + chunkarr(i) = f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } @tailrec final def apply(idx: Int): T = if (idx < size) array(idx) else next.apply(idx - size) @tailrec final def update(idx: Int, newelem: T): Unit = @@ -268,7 +329,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { newhead } // shifts right assuming enough space - private def shiftright() { + private def shiftright(): Unit = { var i = size - 1 while (i >= 0) { array(i + 1) = array(i) @@ -286,8 +347,21 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { if (tryMergeWithNext()) buffer.lastPtr = this r } else next.remove(idx - size, buffer) + + @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { + var i = 0 + while (i < size) { + if(array(i) == elem) { + remove(i, buffer) + return true + } + i += 1 + } + if(next ne null) next.subtractOne(elem, buffer) else false + } + // shifts left elements after `leftb` (overwrites `leftb`) - private def shiftleft(leftb: Int) { + private def shiftleft(leftb: Int): Unit = { var i = leftb while (i < (size - 1)) { array(i) = array(i + 1) @@ -295,7 +369,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { } nullout(i, i + 1) } - protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDelim)) { + protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { // copy the next array, then discard the next node Array.copy(next.array, 0, array, size, next.size) size = size + next.size @@ -303,36 +377,47 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { if (next eq null) true else false // checks if last node was thrown out } else false - @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = { + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T], buffer: UnrolledBuffer[T]): Int = { if (idx < size) { - // divide this node at the appropriate position and insert all into head - // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) - Array.copy(array, idx, newnextnode.array, 0, size - idx) - newnextnode.size = size - idx - newnextnode.next = next - - // update this - nullout(idx, size) - size = idx - next = null - - // insert everything from iterable to this - var curr = this - for (elem <- t) curr = curr append elem - curr.next = newnextnode - - // try to merge the last node of this with the newnextnode and fix tail pointer if needed - if (curr.tryMergeWithNext()) buffer.lastPtr = curr - else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode and fix tail pointer if needed + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + appended } else if (idx == size || (next eq null)) { - var curr = this - for (elem <- t) curr = curr append elem + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + appended } else next.insertAll(idx - size, t, buffer) } - private def nullout(from: Int, until: Int) { + + private def nullout(from: Int, until: Int): Unit = { var idx = from while (idx < until) { array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! @@ -349,15 +434,14 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] { tryMergeWithNext() } - override def toString = array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") + override def toString: String = + array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") } - } - // This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: // Todo -- revisit whether inheritance is the best way to achieve this functionality private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz - protected override def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) } diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala index 02ee46d5762d..ae0230c8ab83 100644 --- a/src/library/scala/collection/mutable/WeakHashMap.scala +++ b/src/library/scala/collection/mutable/WeakHashMap.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,48 +14,42 @@ package scala package collection package mutable -import generic._ -import convert.Wrappers._ +import scala.annotation.nowarn +import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} /** A hash map with references to entries which are weakly reachable. Entries are * removed from this map when the key is no longer (strongly) referenced. This class wraps * `java.util.WeakHashMap`. * - * @tparam A type of keys contained in this map - * @tparam B type of values associated with the keys + * @tparam K type of keys contained in this map + * @tparam V type of values associated with the keys * - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] * section on `Weak Hash Maps` for more information. * * @define Coll `WeakHashMap` * @define coll weak hash map - * @define thatinfo the class of the returned collection. In the standard library configuration, - * `That` is always `WeakHashMap[A, B]` if the elements contained in the resulting collection are - * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[WeakHashMap, (A, B), WeakHashMap[A, B]]` - * is defined in object `WeakHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have - * to contain pairs of type `(A, B)`, which is `Iterable`. - * @define bfinfo an implicit value of class `CanBuildFrom` which determines the - * result class `That` from the current representation type `Repr` - * and the new element type `B`. This is usually the `canBuildFrom` value - * defined in object `WeakHashMap`. * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(-853182442555455877L) -class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap) - with JMapWrapperLike[A, B, WeakHashMap[A, B]] { - override def empty = new WeakHashMap[A, B] +@SerialVersionUID(3L) +class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) + with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] + with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { + override def empty = new WeakHashMap[K, V] + override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "WeakHashMap" } /** $factoryInfo * @define Coll `WeakHashMap` * @define coll weak hash map */ -object WeakHashMap extends MutableMapFactory[WeakHashMap] { - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), WeakHashMap[A, B]] = - ReusableCBF.asInstanceOf[CanBuildFrom[Coll, (A, B), WeakHashMap[A, B]]] - private[this] val ReusableCBF = new MapCanBuildFrom[Nothing, Nothing] - def empty[A, B]: WeakHashMap[A, B] = new WeakHashMap[A, B] +@SerialVersionUID(3L) +object WeakHashMap extends MapFactory[WeakHashMap] { + def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[K, V](it: collection.IterableOnce[(K, V)]): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) } diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala deleted file mode 100644 index bab94f8ae323..000000000000 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.reflect.ClassTag -import scala.collection.generic._ -import scala.collection.parallel.mutable.ParArray -import scala.util.hashing.MurmurHash3 - -import java.util.Arrays - -/** - * A class representing `Array[T]`. - * - * @tparam T type of the elements in this wrapped array. - * - * @author Martin Odersky, Stephane Micheloud - * @since 2.8 - * @define Coll `WrappedArray` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -abstract class WrappedArray[T] -extends AbstractSeq[T] - with IndexedSeq[T] - with ArrayLike[T, WrappedArray[T]] - with CustomParallelizable[T, ParArray[T]] -{ - - override protected[this] def thisCollection: WrappedArray[T] = this - override protected[this] def toCollection(repr: WrappedArray[T]): WrappedArray[T] = repr - - /** The tag of the element type */ - def elemTag: ClassTag[T] - - @deprecated("use elemTag instead", "2.10.0") - def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](elemTag.runtimeClass.asInstanceOf[Class[T]]) - - /** The length of the array */ - def length: Int - - /** The element at given index */ - def apply(index: Int): T - - /** Update element at given index */ - def update(index: Int, elem: T): Unit - - /** The underlying array */ - def array: Array[T] - - override def par = ParArray.handoff(array) - - private def elementClass: Class[_] = - array.getClass.getComponentType - - override def toArray[U >: T : ClassTag]: Array[U] = { - val thatElementClass = implicitly[ClassTag[U]].runtimeClass - if (elementClass eq thatElementClass) - array.asInstanceOf[Array[U]] - else - super.toArray[U] - } - - override def stringPrefix = "WrappedArray" - - /** Clones this object, including the underlying Array. */ - override def clone(): WrappedArray[T] = WrappedArray make array.clone() - - /** Creates new builder for this collection ==> move to subclasses - */ - override protected[this] def newBuilder: Builder[T, WrappedArray[T]] = - new WrappedArrayBuilder[T](elemTag) - -} - -/** A companion object used to create instances of `WrappedArray`. - */ -object WrappedArray { - // This is reused for all calls to empty. - private val EmptyWrappedArray = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T <: AnyRef]: WrappedArray[T] = EmptyWrappedArray.asInstanceOf[WrappedArray[T]] - - // If make is called explicitly we use whatever we're given, even if it's - // empty. This may be unnecessary (if WrappedArray is to honor the collections - // contract all empty ones must be equal, so discriminating based on the reference - // equality of an empty array should not come up) but we may as well be - // conservative since wrapRefArray contributes most of the unnecessary allocations. - def make[T](x: AnyRef): WrappedArray[T] = (x match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[WrappedArray[T]] - - implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] = { - val tag = implicitly[ClassTag[T]] - val cls = tag.runtimeClass - (if (cls.isPrimitive) { - tag.runtimeClass match { - case java.lang.Integer.TYPE => cbfIntArray - case java.lang.Double.TYPE => cbfDoubleArray - case java.lang.Long.TYPE => cbfLongArray - case java.lang.Float.TYPE => cbfFloatArray - case java.lang.Character.TYPE => cbfCharArray - case java.lang.Byte.TYPE => cbfByteArray - case java.lang.Short.TYPE => cbfShortArray - case java.lang.Boolean.TYPE => cbfBooleanArray - case java.lang.Void.TYPE => cbfUnitArray - } - } else if (cls == ObjectClass) { - cbfObjectArray - } else { - refCBF[T with AnyRef](tag.asInstanceOf[ClassTag[T with AnyRef]]) - }).asInstanceOf[CanBuildFrom[WrappedArray[_], T, WrappedArray[T]]] - } - - private[this] val ObjectClass = classOf[Object] - - private[this] val cbfBooleanArray = new CanBuildFrom[WrappedArray[_], Boolean, WrappedArray[Boolean]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofBoolean mapResult WrappedArray.make[Boolean] - def apply = new ArrayBuilder.ofBoolean mapResult WrappedArray.make[Boolean] - } - private[this] val cbfByteArray = new CanBuildFrom[WrappedArray[_], Byte, WrappedArray[Byte]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofByte mapResult WrappedArray.make[Byte] - def apply = new ArrayBuilder.ofByte mapResult WrappedArray.make[Byte] - } - private[this] val cbfCharArray = new CanBuildFrom[WrappedArray[_], Char, WrappedArray[Char]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofChar mapResult WrappedArray.make[Char] - def apply = new ArrayBuilder.ofChar mapResult WrappedArray.make[Char] - } - private[this] val cbfDoubleArray = new CanBuildFrom[WrappedArray[_], Double, WrappedArray[Double]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofDouble mapResult WrappedArray.make[Double] - def apply = new ArrayBuilder.ofDouble mapResult WrappedArray.make[Double] - } - private[this] val cbfFloatArray = new CanBuildFrom[WrappedArray[_], Float, WrappedArray[Float]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofFloat mapResult WrappedArray.make[Float] - def apply = new ArrayBuilder.ofFloat mapResult WrappedArray.make[Float] - } - private[this] val cbfIntArray = new CanBuildFrom[WrappedArray[_], Int, WrappedArray[Int]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofInt mapResult WrappedArray.make[Int] - def apply = new ArrayBuilder.ofInt mapResult WrappedArray.make[Int] - } - private[this] val cbfLongArray = new CanBuildFrom[WrappedArray[_], Long, WrappedArray[Long]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofLong mapResult WrappedArray.make[Long] - def apply = new ArrayBuilder.ofLong mapResult WrappedArray.make[Long] - } - private[this] val cbfShortArray = new CanBuildFrom[WrappedArray[_], Short, WrappedArray[Short]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofShort mapResult WrappedArray.make[Short] - def apply = new ArrayBuilder.ofShort mapResult WrappedArray.make[Short] - } - private[this] val cbfUnitArray = new CanBuildFrom[WrappedArray[_], Unit, WrappedArray[Unit]] { - def apply(from: WrappedArray[_]) = new ArrayBuilder.ofUnit mapResult WrappedArray.make[Unit] - def apply = new ArrayBuilder.ofUnit mapResult WrappedArray.make[Unit] - } - private[this] val cbfObjectArray = refCBF[Object] - private[this] def refCBF[T <: AnyRef](implicit m: ClassTag[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] = - new CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] { - def apply(from: WrappedArray[_]): Builder[T, WrappedArray[T]] = - ArrayBuilder.make[T]()(m) mapResult WrappedArray.make[T] - - def apply: Builder[T, WrappedArray[T]] = - new ArrayBuilder.ofRef[T]()(m) mapResult WrappedArray.make[T] - } - - def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer - - @SerialVersionUID(3456489343829468865L) - final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable { - def elemTag = ClassTag[T](array.getClass.getComponentType) - def length: Int = array.length - def apply(index: Int): T = array(index).asInstanceOf[T] - def update(index: Int, elem: T) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - } - - @SerialVersionUID(-4502363748086738L) - final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable { - def elemTag = ClassTag.Byte - def length: Int = array.length - def apply(index: Int): Byte = array(index) - def update(index: Int, elem: Byte) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedBytesHash(array) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3569089221887297170L) - final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable { - def elemTag = ClassTag.Short - def length: Int = array.length - def apply(index: Int): Short = array(index) - def update(index: Int, elem: Short) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(4353470320490138993L) - final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable { - def elemTag = ClassTag.Char - def length: Int = array.length - def apply(index: Int): Char = array(index) - def update(index: Int, elem: Char) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(-3796494337148298008L) - final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable { - def elemTag = ClassTag.Int - def length: Int = array.length - def apply(index: Int): Int = array(index) - def update(index: Int, elem: Int) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(7604729449860217276L) - final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable { - def elemTag = ClassTag.Long - def length: Int = array.length - def apply(index: Int): Long = array(index) - def update(index: Int, elem: Long) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(-5070075925231686368L) - final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable { - def elemTag = ClassTag.Float - def length: Int = array.length - def apply(index: Int): Float = array(index) - def update(index: Int, elem: Float) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(6556610635003622495L) - final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable { - def elemTag = ClassTag.Double - def length: Int = array.length - def apply(index: Int): Double = array(index) - def update(index: Int, elem: Double) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(-4835600351252182105L) - final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable { - def elemTag = ClassTag.Boolean - def length: Int = array.length - def apply(index: Int): Boolean = array(index) - def update(index: Int, elem: Boolean) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - } - - @SerialVersionUID(3443664051778905707L) - final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable { - def elemTag = ClassTag.Unit - def length: Int = array.length - def apply(index: Int): Unit = array(index) - def update(index: Int, elem: Unit) { array(index) = elem } - override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofUnit => array.length == that.array.length - case _ => super.equals(that) - } - } -} diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala deleted file mode 100644 index a1705fc499af..000000000000 --- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.reflect.ClassTag - -/** A builder class for arrays. - * - * This builder can be reused. - * - * @tparam A type of elements that can be added to this builder. - * @param tag class tag for objects of type `A`. - * - * @since 2.8 - */ -class WrappedArrayBuilder[A](tag: ClassTag[A]) extends ReusableBuilder[A, WrappedArray[A]] { - - @deprecated("use tag instead", "2.10.0") - val manifest: ClassTag[A] = tag - - private var elems: WrappedArray[A] = _ - private var capacity: Int = 0 - private var size: Int = 0 - - private def mkArray(size: Int): WrappedArray[A] = { - if (size == 0) tag.emptyWrappedArray - else { - import java.util.Arrays.copyOf - val runtimeClass = tag.runtimeClass - if (runtimeClass.isPrimitive) - runtimeClass match { - case java.lang.Integer.TYPE => - val array = if (elems eq null) new Array[Int](size) else copyOf(elems.array.asInstanceOf[Array[Int]], size) - new WrappedArray.ofInt(array).asInstanceOf[WrappedArray[A]] - case java.lang.Double.TYPE => - val array = if (elems eq null) new Array[Double](size) else copyOf(elems.array.asInstanceOf[Array[Double]], size) - new WrappedArray.ofDouble(array).asInstanceOf[WrappedArray[A]] - case java.lang.Long.TYPE => - val array = if (elems eq null) new Array[Long](size) else copyOf(elems.array.asInstanceOf[Array[Long]], size) - new WrappedArray.ofLong(array).asInstanceOf[WrappedArray[A]] - case java.lang.Float.TYPE => - val array = if (elems eq null) new Array[Float](size) else copyOf(elems.array.asInstanceOf[Array[Float]], size) - new WrappedArray.ofFloat(array).asInstanceOf[WrappedArray[A]] - case java.lang.Character.TYPE => - val array = if (elems eq null) new Array[Char](size) else copyOf(elems.array.asInstanceOf[Array[Char]], size) - new WrappedArray.ofChar(array).asInstanceOf[WrappedArray[A]] - case java.lang.Byte.TYPE => - val array = if (elems eq null) new Array[Byte](size) else copyOf(elems.array.asInstanceOf[Array[Byte]], size) - new WrappedArray.ofByte(array).asInstanceOf[WrappedArray[A]] - case java.lang.Short.TYPE => - val array = if (elems eq null) new Array[Short](size) else copyOf(elems.array.asInstanceOf[Array[Short]], size) - new WrappedArray.ofShort(array).asInstanceOf[WrappedArray[A]] - case java.lang.Boolean.TYPE => - val array = if (elems eq null) new Array[Boolean](size) else copyOf(elems.array.asInstanceOf[Array[Boolean]], size) - new WrappedArray.ofBoolean(array).asInstanceOf[WrappedArray[A]] - case java.lang.Void.TYPE => - val array = if (elems eq null) new Array[Unit](size) else copyOf(elems.array.asInstanceOf[Array[AnyRef]], size).asInstanceOf[Array[Unit]] - new WrappedArray.ofUnit(array).asInstanceOf[WrappedArray[A]] - } - else { - val array = if (elems eq null) new Array[A with AnyRef](size) else copyOf(elems.array.asInstanceOf[Array[A with AnyRef]], size) - new WrappedArray.ofRef(array).asInstanceOf[WrappedArray[A]] - } - } - } - - private def resize(size: Int) { - elems = mkArray(size) - capacity = size - } - - override def sizeHint(size: Int) { - if (capacity < size) resize(size) - } - - private def ensureSize(size: Int) { - if (capacity < size) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - def +=(elem: A): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def clear() { size = 0 } - - def result() = { - if (capacity != 0 && capacity == size) { - capacity = 0 - elems - } - else mkArray(size) - } - - // todo: add ++= -} \ No newline at end of file diff --git a/src/library/scala/collection/mutable/package.scala b/src/library/scala/collection/mutable/package.scala new file mode 100644 index 000000000000..4ad5df4813d8 --- /dev/null +++ b/src/library/scala/collection/mutable/package.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object mutable { + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + type WrappedArray[X] = ArraySeq[X] + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + val WrappedArray = ArraySeq + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + type ArrayStack[X] = Stack[X] + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + val ArrayStack = Stack + + @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") + type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] + + @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") + type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] + + @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") + type IndexedOptimizedSeq[A] = IndexedSeq[A] + + @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") + type IndexedOptimizedBuffer[A] = IndexedBuffer[A] +} diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala index 63c3a8504072..f6a89b5c288c 100644 --- a/src/library/scala/collection/package.scala +++ b/src/library/scala/collection/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,120 +12,69 @@ package scala -/** - * Contains the base traits and objects needed to use and extend Scala's collection library. - * - * == Guide == - * - * A detailed guide for using the collections library is available - * at [[http://docs.scala-lang.org/overviews/collections/introduction.html]]. - * Developers looking to extend the collections library can find a description - * of its architecture at - * [[http://docs.scala-lang.org/overviews/core/architecture-of-scala-collections.html]]. - * - * == Using Collections == - * - * It is convenient to treat all collections as either - * a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as - * these traits define the vast majority of operations - * on a collection. - * - * Collections can, of course, be treated as specifically as needed, and - * the library is designed to ensure that - * the methods that transform collections will return a collection of the same - * type: {{{ - * scala> val array = Array(1,2,3,4,5,6) - * array: Array[Int] = Array(1, 2, 3, 4, 5, 6) - * - * scala> array map { _.toString } - * res0: Array[String] = Array(1, 2, 3, 4, 5, 6) - * - * scala> val list = List(1,2,3,4,5,6) - * list: List[Int] = List(1, 2, 3, 4, 5, 6) - * - * scala> list map { _.toString } - * res1: List[String] = List(1, 2, 3, 4, 5, 6) - * - * }}} - * - * == Creating Collections == - * - * The most common way to create a collection is to use its companion object as - * a factory. The three most commonly used collections are - * [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and - * [[scala.collection.immutable.Map]]. - * They can be used directly as shown below since their companion objects are - * all available as type aliases in either the [[scala]] package or in - * `scala.Predef`. New collections are created like this: - * {{{ - * scala> val seq = Seq(1,2,3,4,1) - * seq: Seq[Int] = List(1, 2, 3, 4, 1) - * - * scala> val set = Set(1,2,3,4,1) - * set: scala.collection.immutable.Set[Int] = Set(1, 2, 3, 4) - * - * scala> val map = Map(1 -> "one", 2 -> "two", 3 -> "three", 2 -> "too") - * map: scala.collection.immutable.Map[Int,String] = Map(1 -> one, 2 -> too, 3 -> three) - * }}} - * - * It is also typical to prefer the [[scala.collection.immutable]] collections - * over those in [[scala.collection.mutable]]; the types aliased in - * the `scala.Predef` object are the immutable versions. - * - * Also note that the collections library was carefully designed to include several implementations of - * each of the three basic collection types. These implementations have specific performance - * characteristics which are described - * in [[http://docs.scala-lang.org/overviews/collections/performance-characteristics.html the guide]]. - * - * The concrete parallel collections also have specific performance characteristics which are - * described in [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#performance-characteristics the parallel collections guide]] - * - * === Converting to and from Java Collections === - * - * The [[scala.collection.JavaConverters]] object provides a collection - * of decorators that allow converting between Scala and Java collections using `asScala` - * and `asJava` methods. - */ package object collection { - import scala.collection.generic.CanBuildFrom + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+X] = IterableOnce[X] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + val TraversableOnce = IterableOnce + @deprecated("Use SeqOps instead of SeqLike", "2.13.0") + type SeqLike[A, T] = SeqOps[A, Seq, T] + @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") + type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] - /** Provides a CanBuildFrom instance that builds a specific target collection (`To') - * irrespective of the original collection (`From'). - */ - def breakOut[From, T, To](implicit b: CanBuildFrom[Nothing, T, To]): CanBuildFrom[From, T, To] = - // can't just return b because the argument to apply could be cast to From in b - new WrappedCanBuildFrom[From, T, To](b) + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversableOnce[+X] = IterableOnce[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversableOnce = IterableOnce + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenIterable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenIterable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSeq[+X] = Seq[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSeq = Seq + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSet[X] = Set[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSet = Set + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenMap[K, +V] = Map[K, V] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenMap = Map - private[collection] final class WrappedCanBuildFrom[From, T, To]( - val wrapped: CanBuildFrom[Nothing, T, To]) - extends CanBuildFrom[From, T, To] { - def apply(from: From) = wrapped.apply() - - def apply() = wrapped.apply() - } -} - -package collection { - /** Collection internal utility functions. + /** Needed to circumvent a difficulty between dotty and scalac concerning + * the right top type for a type parameter of kind * -> *. + * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. + * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. */ - private[collection] object DebugUtils { - def unsupported(msg: String) = throw new UnsupportedOperationException(msg) - def noSuchElement(msg: String) = throw new NoSuchElementException(msg) - def indexOutOfBounds(index: Int) = throw new IndexOutOfBoundsException(index.toString) - def illegalArgument(msg: String) = throw new IllegalArgumentException(msg) - - def buildString(closure: (Any => Unit) => Unit): String = { - var output = "" - closure(output += _ + "\n") + private[scala] type AnyConstr[X] = Any - output - } + /** An extractor used to head/tail deconstruct sequences. */ + object +: { + /** Splits a sequence into head +: tail. + * @return Some((head, tail)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = + if(t.isEmpty) None + else Some(t.head -> t.tail) + } - def arrayString[T](array: Array[T], from: Int, until: Int): String = { - array.slice(from, until) map { - case null => "n/a" - case x => "" + x - } mkString " | " - } + /** An extractor used to init/last deconstruct sequences. */ + object :+ { + /** Splits a sequence into init :+ last. + * @return Some((init, last)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = + if(t.isEmpty) None + else Some(t.init -> t.last) } } diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala deleted file mode 100644 index 49c188b2e632..000000000000 --- a/src/library/scala/collection/parallel/Combiner.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.Parallel -import scala.collection.mutable.Builder -import scala.collection.generic.Sizing - -/** The base trait for all combiners. - * A combiner incremental collection construction just like - * a regular builder, but also implements an efficient merge operation of two builders - * via `combine` method. Once the collection is constructed, it may be obtained by invoking - * the `result` method. - * - * The complexity of the `combine` method should be less than linear for best - * performance. The `result` method doesn't have to be a constant time operation, - * but may be performed in parallel. - * - * @tparam Elem the type of the elements added to the builder - * @tparam To the type of the collection the builder produces - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { - - @transient - @volatile - var _combinerTaskSupport = defaultTaskSupport - - def combinerTaskSupport = { - val cts = _combinerTaskSupport - if (cts eq null) { - _combinerTaskSupport = defaultTaskSupport - defaultTaskSupport - } else cts - } - - def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts - - /** Combines the contents of the receiver builder and the `other` builder, - * producing a new builder containing both their elements. - * - * This method may combine the two builders by copying them into a larger collection, - * by producing a lazy view that gets evaluated once `result` is invoked, or use - * a merge operation specific to the data structure in question. - * - * Note that both the receiver builder and `other` builder become invalidated - * after the invocation of this method, and should be cleared (see `clear`) - * if they are to be used again. - * - * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is, - * they are the same objects in memory: - * - * {{{ - * c1.combine(c2) - * }}} - * - * always does nothing and returns `c1`. - * - * @tparam N the type of elements contained by the `other` builder - * @tparam NewTo the type of collection produced by the `other` builder - * @param other the other builder - * @return the parallel builder containing both the elements of this and the `other` builder - */ - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] - - /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared - * across several threads constructing the collection. - * - * By default, this method returns `false`. - */ - def canBeShared: Boolean = false - - /** Constructs the result and sets the appropriate tasksupport object to the resulting collection - * if this is applicable. - */ - def resultWithTaskSupport: To = { - val res = result() - setTaskSupport(res, combinerTaskSupport) - } -} - -/* -private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] { - abstract override def result = { - val res = super.result - res - } -} -*/ diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala deleted file mode 100644 index fd888c5e573f..000000000000 --- a/src/library/scala/collection/parallel/ParIterable.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.GenIterable -import scala.collection.generic._ -import scala.collection.parallel.mutable.ParArrayCombiner - -/** A template trait for parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[+T] -extends GenIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] - - def stringPrefix = "ParIterable" -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] - - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] -} - diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala deleted file mode 100644 index 496da06b3c91..000000000000 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ /dev/null @@ -1,1514 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.language.{ higherKinds, implicitConversions } - -import scala.collection.mutable.Builder -import scala.collection.mutable.ArrayBuffer -import scala.collection.IterableLike -import scala.collection.Parallel -import scala.collection.Parallelizable -import scala.collection.CustomParallelizable -import scala.collection.generic._ -import scala.collection.GenIterableLike -import scala.collection.GenIterable -import scala.collection.GenTraversableOnce -import scala.collection.GenTraversable -import immutable.HashMapCombiner -import scala.reflect.ClassTag - -import scala.annotation.unchecked.uncheckedVariance - -import scala.collection.parallel.ParallelCollectionImplicits._ - - -/** A template trait for parallel collections of type `ParIterable[T]`. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * @tparam Repr the type of the actual collection containing the elements - * - * @define paralleliterableinfo - * This is a base trait for Scala parallel collections. It defines behaviour - * common to all parallel collections. Concrete parallel collections should - * inherit this trait and `ParIterable` if they want to define specific combiner - * factories. - * - * Parallel operations are implemented with divide and conquer style algorithms that - * parallelize well. The basic idea is to split the collection into smaller parts until - * they are small enough to be operated on sequentially. - * - * All of the parallel operations are implemented as tasks within this trait. Tasks rely - * on the concept of splitters, which extend iterators. Every parallel collection defines: - * - * {{{ - * def splitter: IterableSplitter[T] - * }}} - * - * which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`. - * Splitters have a method `remaining` to check the remaining number of elements, - * and method `split` which is defined by splitters. Method `split` divides the splitters - * iterate over into disjunct subsets: - * - * {{{ - * def split: Seq[Splitter] - * }}} - * - * which splits the splitter into a sequence of disjunct subsplitters. This is typically a - * very fast operation which simply creates wrappers around the receiver collection. - * This can be repeated recursively. - * - * Tasks are scheduled for execution through a - * [[scala.collection.parallel.TaskSupport]] object, which can be changed - * through the `tasksupport` setter of the collection. - * - * Method `newCombiner` produces a new combiner. Combiners are an extension of builders. - * They provide a method `combine` which combines two combiners and returns a combiner - * containing elements of both combiners. - * This method can be implemented by aggressively copying all the elements into the new combiner - * or by lazily binding their results. It is recommended to avoid copying all of - * the elements for performance reasons, although that cost might be negligible depending on - * the use case. Standard parallel collection combiners avoid copying when merging results, - * relying either on a two-step lazy construction or specific data-structure properties. - * - * Methods: - * - * {{{ - * def seq: Sequential - * def par: Repr - * }}} - * - * produce the sequential or parallel implementation of the collection, respectively. - * Method `par` just returns a reference to this parallel collection. - * Method `seq` is efficient - it will not copy the elements. Instead, - * it will create a sequential version of the collection using the same underlying data structure. - * Note that this is not the case for sequential collections in general - they may copy the elements - * and produce a different underlying data structure. - * - * The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible - * way to change between different collection types. - * - * Since this trait extends the `GenIterable` trait, methods like `size` must also - * be implemented in concrete collections, while `iterator` forwards to `splitter` by - * default. - * - * Each parallel collection is bound to a specific fork/join pool, on which dormant worker - * threads are kept. The fork/join pool contains other information such as the parallelism - * level, that is, the number of processors used. When a collection is created, it is assigned the - * default fork/join pool found in the `scala.parallel` package object. - * - * Parallel collections are not necessarily ordered in terms of the `foreach` - * operation (see `Traversable`). Parallel sequences have a well defined order for iterators - creating - * an iterator and traversing the elements linearly will always yield the same order. - * However, bulk operations such as `foreach`, `map` or `filter` always occur in undefined orders for all - * parallel collections. - * - * Existing parallel collection implementations provide strict parallel iterators. Strict parallel iterators are aware - * of the number of elements they have yet to traverse. It's also possible to provide non-strict parallel iterators, - * which do not know the number of elements remaining. To do this, the new collection implementation must override - * `isStrictSplitterCollection` to `false`. This will make some operations unavailable. - * - * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `splitter`, - * `newCombiner` and `seq`. Having an implicit combiner factory requires extending this trait in addition, as - * well as providing a companion object, as with regular collections. - * - * Method `size` is implemented as a constant time operation for parallel collections, and parallel collection - * operations rely on this assumption. - * - * @author Aleksandar Prokopec - * @since 2.9 - * - * @define sideeffects - * The higher-order functions passed to certain operations may contain side-effects. Since implementations - * of bulk operations may not be sequential, this means that side-effects may not be predictable and may - * produce data-races, deadlocks or invalidation of state if care is not taken. It is up to the programmer - * to either avoid using side-effects or to use some form of synchronization when accessing mutable data. - * - * @define pbfinfo - * An implicit value of class `CanCombineFrom` which determines the - * result class `That` from the current representation type `Repr` and - * and the new element type `B`. This builder factory can provide a parallel - * builder for the resulting collection. - * - * @define abortsignalling - * This method will use `abort` signalling capabilities. This means - * that splitters may send and read `abort` signals. - * - * @define indexsignalling - * This method will use `indexFlag` signalling capabilities. This means - * that splitters may set and read the `indexFlag` state. - * @define Coll `ParIterable` - * @define coll parallel iterable - */ -trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]] -extends GenIterableLike[T, Repr] - with CustomParallelizable[T, Repr] - with Parallel - with HasNewCombiner[T, Repr] -{ -self: ParIterableLike[T, Repr, Sequential] => - - @transient - @volatile - private var _tasksupport = defaultTaskSupport - - protected def initTaskSupport() { - _tasksupport = defaultTaskSupport - } - - /** The task support object which is responsible for scheduling and - * load-balancing tasks to processors. - * - * @see [[scala.collection.parallel.TaskSupport]] - */ - def tasksupport = { - val ts = _tasksupport - if (ts eq null) { - _tasksupport = defaultTaskSupport - defaultTaskSupport - } else ts - } - - /** Changes the task support object which is responsible for scheduling and - * load-balancing tasks to processors. - * - * A task support object can be changed in a parallel collection after it - * has been created, but only during a quiescent period, i.e. while there - * are no concurrent invocations to parallel collection methods. - * - * Here is a way to change the task support of a parallel collection: - * - * {{{ - * import scala.collection.parallel._ - * val pc = mutable.ParArray(1, 2, 3) - * pc.tasksupport = new ForkJoinTaskSupport( - * new java.util.concurrent.ForkJoinPool(2)) - * }}} - * - * @see [[scala.collection.parallel.TaskSupport]] - */ - def tasksupport_=(ts: TaskSupport) = _tasksupport = ts - - def seq: Sequential - - def repr: Repr = this.asInstanceOf[Repr] - - final def isTraversableAgain = true - - def hasDefiniteSize = true - - def isEmpty = size == 0 - - def nonEmpty = size != 0 - - def head = iterator.next() - - def headOption = if (nonEmpty) Some(head) else None - - def tail = drop(1) - - def last = { - var lst = head - for (x <- this.seq) lst = x - lst - } - - def lastOption = if (nonEmpty) Some(last) else None - - def init = take(size - 1) - - /** Creates a new parallel iterator used to traverse the elements of this parallel collection. - * This iterator is more specific than the iterator of the returned by `iterator`, and augmented - * with additional accessor and transformer methods. - * - * @return a parallel iterator - */ - protected[parallel] def splitter: IterableSplitter[T] - - /** Creates a new split iterator used to traverse the elements of this collection. - * - * By default, this method is implemented in terms of the protected `splitter` method. - * - * @return a split iterator - */ - def iterator: Splitter[T] = splitter - - override def par: Repr = repr - - /** Denotes whether this parallel collection has strict splitters. - * - * This is true in general, and specific collection instances may choose to - * override this method. Such collections will fail to execute methods - * which rely on splitters being strict, i.e. returning a correct value - * in the `remaining` method. - * - * This method helps ensure that such failures occur on method invocations, - * rather than later on and in unpredictable ways. - */ - def isStrictSplitterCollection = true - - /** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool. - * This method forwards the call to `newCombiner`. - */ - //protected[this] def newBuilder: scala.collection.mutable.Builder[T, Repr] = newCombiner - - /** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour. - * The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there - * was no previous phase (in which case this method must return `newc`). - * - * @param oldc The combiner that is the result of the previous task, or `None` if there was no previous task. - * @param newc The new, empty combiner that can be used. - * @return Either `newc` or `oldc`. - */ - protected def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]): Combiner[S, That] = newc - - type SSCTask[R, Tp] = StrictSplitterCheckTask[R, Tp] - - /* helper traits - to avoid structural invocations */ - - trait TaskOps[R, Tp] { - def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] - // public method with inaccessible types in parameters - def compose[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): SeqComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]] - def parallel[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): ParComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]] - } - - trait BuilderOps[Elem, To] { - trait Otherwise[Cmb] { - def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]): Unit - } - - def ifIs[Cmb](isbody: Cmb => Unit): Otherwise[Cmb] - def isCombiner: Boolean - def asCombiner: Combiner[Elem, To] - } - - trait SignallingOps[PI <: DelegatedSignalling] { - def assign(cntx: Signalling): PI - } - - /* convenience task operations wrapper */ - protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] { - def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) { - def map(r: R): R1 = mapping(r) - } - - def compose[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3) = new SeqComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]](tsk, t2) { - def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr) - } - - def parallel[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3) = new ParComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]](tsk, t2) { - def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr) - } - } - - protected def wrap[R](body: => R) = new NonDivisible[R] { - def leaf(prevr: Option[R]) = result = body - @volatile var result: R = null.asInstanceOf[R] - } - - /* convenience signalling operations wrapper */ - protected implicit def delegatedSignalling2ops[PI <: DelegatedSignalling](it: PI) = new SignallingOps[PI] { - def assign(cntx: Signalling): PI = { - it.signalDelegate = cntx - it - } - } - - protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] { - def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] { - def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]) { - if (cb.getClass == t.runtimeClass) isbody(cb.asInstanceOf[Cmb]) else notbody - } - } - def isCombiner = cb.isInstanceOf[Combiner[_, _]] - def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] - } - - protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { - def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass` - def apply() = bf.apply() - } - - protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr] - - def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end) - - def mkString(sep: String): String = seq.mkString("", sep, "") - - def mkString: String = seq.mkString("") - - override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - - def canEqual(other: Any) = true - - /** Reduces the elements of this sequence using the specified associative binary operator. - * - * $undefinedorder - * - * Note this method has a different signature than the `reduceLeft` - * and `reduceRight` methods of the trait `Traversable`. - * The result of reducing may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U A type parameter for the binary operator, a supertype of `T`. - * @param op A binary operator that must be associative. - * @return The result of applying reduce operator `op` between all the elements if the collection is nonempty. - * @throws UnsupportedOperationException - * if this $coll is empty. - */ - def reduce[U >: T](op: (U, U) => U): U = { - tasksupport.executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get }) - } - - /** Optionally reduces the elements of this sequence using the specified associative binary operator. - * - * $undefinedorder - * - * Note this method has a different signature than the `reduceLeftOption` - * and `reduceRightOption` methods of the trait `Traversable`. - * The result of reducing may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U A type parameter for the binary operator, a supertype of `T`. - * @param op A binary operator that must be associative. - * @return An option value containing result of applying reduce operator `op` between all - * the elements if the collection is nonempty, and `None` otherwise. - */ - def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op)) - - /** Folds the elements of this sequence using the specified associative binary operator. - * The order in which the elements are reduced is unspecified and may be nondeterministic. - * - * Note this method has a different signature than the `foldLeft` - * and `foldRight` methods of the trait `Traversable`. - * The result of folding may only be a supertype of this parallel collection's - * type parameter `T`. - * - * @tparam U a type parameter for the binary operator, a supertype of `T`. - * @param z a neutral element for the fold operation, it may be added to the result - * an arbitrary number of times, not changing the result (e.g. `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication) - * @param op a binary operator that must be associative - * @return the result of applying fold operator `op` between all the elements and `z` - */ - def fold[U >: T](z: U)(op: (U, U) => U): U = { - tasksupport.executeAndWaitResult(new Fold(z, op, splitter)) - } - - /** Aggregates the results of applying an operator to subsequent elements. - * - * This is a more general form of `fold` and `reduce`. It has similar semantics, but does - * not require the result to be a supertype of the element type. It traverses the elements in - * different partitions sequentially, using `seqop` to update the result, and then - * applies `combop` to results from different partitions. The implementation of this - * operation may operate on an arbitrary number of collection partitions, so `combop` - * may be invoked arbitrary number of times. - * - * For example, one might want to process some elements and then produce a `Set`. In this - * case, `seqop` would process an element and append it to the set, while `combop` - * would concatenate two sets from different partitions together. The initial value - * `z` would be an empty set. - * - * {{{ - * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _) - * }}} - * - * Another example is calculating geometric mean from a collection of doubles - * (one would typically require big doubles for this). - * - * @tparam S the type of accumulated results - * @param z the initial value for the accumulated result of the partition - this - * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) and may be evaluated - * more than once - * @param seqop an operator used to accumulate results within a partition - * @param combop an associative operator used to combine results from different partitions - */ - def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = { - tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter)) - } - - def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op) - - def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op) - - def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op) - - def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op) - - def reduceLeftOption[U >: T](op: (U, T) => U): Option[U] = seq.reduceLeftOption(op) - - def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op) - - /** Applies a function `f` to all the elements of $coll in an undefined order. - * - * @tparam U the result type of the function applied to each element, which is always discarded - * @param f function applied to each element - */ - def foreach[U](f: T => U) = { - tasksupport.executeAndWaitResult(new Foreach(f, splitter)) - } - - def count(p: T => Boolean): Int = { - tasksupport.executeAndWaitResult(new Count(p, splitter)) - } - - def sum[U >: T](implicit num: Numeric[U]): U = { - tasksupport.executeAndWaitResult(new Sum[U](num, splitter)) - } - - def product[U >: T](implicit num: Numeric[U]): U = { - tasksupport.executeAndWaitResult(new Product[U](num, splitter)) - } - - def min[U >: T](implicit ord: Ordering[U]): T = { - tasksupport.executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T] - } - - def max[U >: T](implicit ord: Ordering[U]): T = { - tasksupport.executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T] - } - - def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = { - if (isEmpty) throw new UnsupportedOperationException("empty.maxBy") - - reduce((x, y) => if (cmp.gteq(f(x), f(y))) x else y) - } - - def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = { - if (isEmpty) throw new UnsupportedOperationException("empty.minBy") - - reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) - } - - def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.map(f)(bf2seq(bf))*/ - - def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) - } otherwise seq.collect(pf)(bf2seq(bf))*/ - - def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.flatMap(f)(bf2seq(bf))*/ - - /** Tests whether a predicate holds for all elements of this $coll. - * - * $abortsignalling - * - * @param p a predicate used to test elements - * @return true if `p` holds for all elements, false otherwise - */ - def forall(@deprecatedName('pred) p: T => Boolean): Boolean = { - tasksupport.executeAndWaitResult(new Forall(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Tests whether a predicate holds for some element of this $coll. - * - * $abortsignalling - * - * @param p a predicate used to test elements - * @return true if `p` holds for some element, false otherwise - */ - def exists(@deprecatedName('pred) p: T => Boolean): Boolean = { - tasksupport.executeAndWaitResult(new Exists(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Finds some element in the collection for which the predicate holds, if such - * an element exists. The element may not necessarily be the first such element - * in the iteration order. - * - * If there are multiple elements obeying the predicate, the choice is nondeterministic. - * - * $abortsignalling - * - * @param p predicate used to test the elements - * @return an option value with the element if such an element exists, or `None` otherwise - */ - def find(@deprecatedName('pred) p: T => Boolean): Option[T] = { - tasksupport.executeAndWaitResult(new Find(p, splitter assign new DefaultSignalling with VolatileAbort)) - } - - /** Creates a combiner factory. Each combiner factory instance is used - * once per invocation of a parallel transformer method for a single - * collection. - * - * The default combiner factory creates a new combiner every time it - * is requested, unless the combiner is thread-safe as indicated by its - * `canBeShared` method. In this case, the method returns a factory which - * returns the same combiner each time. This is typically done for - * concurrent parallel collections, the combiners of which allow - * thread safe access. - */ - protected[this] def combinerFactory = { - val combiner = newCombiner - combiner.combinerTaskSupport = tasksupport - if (combiner.canBeShared) new CombinerFactory[T, Repr] { - val shared = combiner - def apply() = shared - def doesShareCombiners = true - } else new CombinerFactory[T, Repr] { - def apply() = { - val r = newCombiner - r.combinerTaskSupport = tasksupport - r - } - def doesShareCombiners = false - } - } - - protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = { - val combiner = cbf() - combiner.combinerTaskSupport = tasksupport - if (combiner.canBeShared) new CombinerFactory[S, That] { - val shared = combiner - def apply() = shared - def doesShareCombiners = true - } else new CombinerFactory[S, That] { - def apply() = { - val r = cbf() - r.combinerTaskSupport = tasksupport - r - } - def doesShareCombiners = false - } - } - - def withFilter(pred: T => Boolean): Repr = filter(pred) - - def filter(pred: T => Boolean): Repr = { - tasksupport.executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - def filterNot(pred: T => Boolean): Repr = { - tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { - if (that.isParallel && bf.isParallel) { - // println("case both are parallel") - val other = that.asParIterable - val pbf = bf.asParallel - val cfactory = combinerFactory(() => pbf(repr)) - val copythis = new Copy(cfactory, splitter) - val copythat = wrap { - val othtask = new other.Copy(cfactory, other.splitter) - tasksupport.executeAndWaitResult(othtask) - } - val task = (copythis parallel copythat) { _ combine _ } mapResult { - _.resultWithTaskSupport - } - tasksupport.executeAndWaitResult(task) - } else if (bf(repr).isCombiner) { - // println("case parallel builder, `that` not parallel") - val copythis = new Copy(combinerFactory(() => bf(repr).asCombiner), splitter) - val copythat = wrap { - val cb = bf(repr).asCombiner - for (elem <- that.seq) cb += elem - cb - } - tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport }) - } else { - // println("case not a parallel builder") - val b = bf(repr) - this.splitter.copy2builder[U, That, Builder[U, That]](b) - for (elem <- that.seq) b += elem - setTaskSupport(b.result(), tasksupport) - } - } - - def partition(pred: T => Boolean): (Repr, Repr) = { - tasksupport.executeAndWaitResult( - new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - } - ) - } - - def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { - val r = tasksupport.executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult { - rcb => rcb.groupByKey(() => combinerFactory()) - }) - setTaskSupport(r, tasksupport) - } - - def take(n: Int): Repr = { - val actualn = if (size > n) n else size - if (actualn < MIN_FOR_COPY) take_sequential(actualn) - else tasksupport.executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult { - _.resultWithTaskSupport - }) - } - - private def take_sequential(n: Int) = { - val cb = newCombiner - cb.sizeHint(n) - val it = splitter - var left = n - while (left > 0) { - cb += it.next - left -= 1 - } - cb.resultWithTaskSupport - } - - def drop(n: Int): Repr = { - val actualn = if (size > n) n else size - if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn) - else tasksupport.executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - private def drop_sequential(n: Int) = { - val it = splitter drop n - val cb = newCombiner - cb.sizeHint(size - n) - while (it.hasNext) cb += it.next - cb.resultWithTaskSupport - } - - override def slice(unc_from: Int, unc_until: Int): Repr = { - val from = unc_from min size max 0 - val until = unc_until min size max from - if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) - else tasksupport.executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.resultWithTaskSupport }) - } - - private def slice_sequential(from: Int, until: Int): Repr = { - val cb = newCombiner - var left = until - from - val it = splitter drop from - while (left > 0) { - cb += it.next - left -= 1 - } - cb.resultWithTaskSupport - } - - def splitAt(n: Int): (Repr, Repr) = { - tasksupport.executeAndWaitResult( - new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - } - ) - } - - /** Computes a prefix scan of the elements of the collection. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam U element type of the resulting collection - * @tparam That type of the resulting collection - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * @param bf $bfinfo - * @return a collection containing the prefix scan of the elements in the original collection - * - * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T] - * @inheritdoc - * - * @return a new $coll containing the prefix scan of the elements in this $coll - */ - def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - if (tasksupport.parallelismLevel > 1) { - if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { - cb => cb.resultWithTaskSupport - }) - }) else setTaskSupport((bf(repr) += z).result(), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) - - def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport) - - def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport) - - /** Takes the longest prefix of elements that satisfy the predicate. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return the longest prefix of this $coll of elements that satisfy the predicate `pred` - */ - def takeWhile(pred: T => Boolean): Repr = { - val cbf = combinerFactory - if (cbf.doesShareCombiners) { - val parseqspan = toSeq.takeWhile(pred) - tasksupport.executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult { - _.resultWithTaskSupport - }) - } else { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult { - _._1.resultWithTaskSupport - }) - } - } - - /** Splits this $coll into a prefix/suffix pair according to a predicate. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return a pair consisting of the longest prefix of the collection for which all - * the elements satisfy `pred`, and the rest of the collection - */ - def span(pred: T => Boolean): (Repr, Repr) = { - val cbf = combinerFactory - if (cbf.doesShareCombiners) { - val (xs, ys) = toSeq.span(pred) - val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.resultWithTaskSupport } - val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.resultWithTaskSupport } - val copyall = (copyxs parallel copyys) { - (xr, yr) => (xr, yr) - } - tasksupport.executeAndWaitResult(copyall) - } else { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { - p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport) - }) - } - } - - /** Drops all elements in the longest prefix of elements that satisfy the predicate, - * and returns a collection composed of the remaining elements. - * - * $indexsignalling - * The index flag is initially set to maximum integer value. - * - * @param pred the predicate used to test the elements - * @return a collection composed of all the elements after the longest prefix of elements - * in this $coll that satisfy the predicate `pred` - */ - def dropWhile(pred: T => Boolean): Repr = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult( - new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { - _._2.resultWithTaskSupport - } - ) - } - - def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) - - def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start) - - def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) { - tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter)) - } - - def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that) - - def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) - } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) - - def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) - - def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport) - - protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { - tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }) - } - - protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = { - tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport }) - } - - @deprecated("use .seq.view instead", "2.11.0") - def view = seq.view - - override def toArray[U >: T: ClassTag]: Array[U] = { - val arr = new Array[U](size) - copyToArray(arr) - arr - } - - override def toList: List[T] = seq.toList - - override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq - - override def toStream: Stream[T] = seq.toStream - - override def toIterator: Iterator[T] = splitter - - // the methods below are overridden - - override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers? - - override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] - - override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]] - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - - override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) - - override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) - - override def toVector: Vector[T] = to[Vector] - - override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { - toParCollection[T, Col[T]](() => cbf().asCombiner) - } else seq.to(cbf) - - /* tasks */ - - protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] { - def requiresStrictSplitters = false - if (requiresStrictSplitters && !isStrictSplitterCollection) - throw new UnsupportedOperationException("This collection does not provide strict splitters.") - } - - /** Standard accessor task that iterates over the elements of the collection. - * - * @tparam R type of the result of this method (`R` for result). - * @tparam Tp the representation type of the task at hand. - */ - protected trait Accessor[R, Tp] - extends StrictSplitterCheckTask[R, Tp] { - protected[this] val pit: IterableSplitter[T] - protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] - def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel) - def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure - private[parallel] override def signalAbort = pit.abort() - override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" - } - - protected[this] trait NonDivisibleTask[R, Tp] extends StrictSplitterCheckTask[R, Tp] { - def shouldSplitFurther = false - def split = throw new UnsupportedOperationException("Does not split.") - } - - protected[this] trait NonDivisible[R] extends NonDivisibleTask[R, NonDivisible[R]] - - protected[this] abstract class Composite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (val ft: First, val st: Second) - extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] { - def combineResults(fr: FR, sr: SR): R - @volatile var result: R = null.asInstanceOf[R] - private[parallel] override def signalAbort() { - ft.signalAbort() - st.signalAbort() - } - protected def mergeSubtasks() { - ft mergeThrowables st - if (throwable eq null) result = combineResults(ft.result, st.result) - } - override def requiresStrictSplitters = ft.requiresStrictSplitters || st.requiresStrictSplitters - } - - /** Sequentially performs one task after another. */ - protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) - extends Composite[FR, SR, R, First, Second](f, s) { - def leaf(prevr: Option[R]) = { - tasksupport.executeAndWaitResult(ft) : Any - tasksupport.executeAndWaitResult(st) : Any - mergeSubtasks() - } - } - - /** Performs two tasks in parallel, and waits for both to finish. */ - protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) - extends Composite[FR, SR, R, First, Second](f, s) { - def leaf(prevr: Option[R]) = { - val ftfuture: () => Any = tasksupport.execute(ft) - tasksupport.executeAndWaitResult(st) : Any - ftfuture() - mergeSubtasks() - } - } - - protected[this] abstract class ResultMapping[R, Tp, R1](val inner: StrictSplitterCheckTask[R, Tp]) - extends NonDivisibleTask[R1, ResultMapping[R, Tp, R1]] { - @volatile var result: R1 = null.asInstanceOf[R1] - def map(r: R): R1 - def leaf(prevr: Option[R1]) = { - val initialResult = tasksupport.executeAndWaitResult(inner) - result = map(initialResult) - } - private[parallel] override def signalAbort() { - inner.signalAbort() - } - override def requiresStrictSplitters = inner.requiresStrictSplitters - } - - protected trait Transformer[R, Tp] extends Accessor[R, Tp] - - protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Unit, Foreach[S]] { - @volatile var result: Unit = () - def leaf(prevr: Option[Unit]) = pit.foreach(op) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p) - } - - protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Int, Count] { - // val pittxt = pit.toString - @volatile var result: Int = 0 - def leaf(prevr: Option[Int]) = result = pit.count(pred) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Count(pred, p) - override def merge(that: Count) = result = result + that.result - // override def toString = "CountTask(" + pittxt + ")" - } - - protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Reduce[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p) - override def merge(that: Reduce[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = Some(op(result.get, that.result.get)) - override def requiresStrictSplitters = true - } - - protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Fold[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.fold(z)(op) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p) - override def merge(that: Fold[U]) = result = op(result, that.result) - } - - protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T]) - extends Accessor[S, Aggregate[S]] { - @volatile var result: S = null.asInstanceOf[S] - def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p) - override def merge(that: Aggregate[S]) = result = combop(result, that.result) - } - - protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Sum[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.sum(num) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p) - override def merge(that: Sum[U]) = result = num.plus(result, that.result) - } - - protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[U, Product[U]] { - @volatile var result: U = null.asInstanceOf[U] - def leaf(prevr: Option[U]) = result = pit.product(num) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p) - override def merge(that: Product[U]) = result = num.times(result, that.result) - } - - protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Min[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p) - override def merge(that: Min[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = if (ord.lteq(result.get, that.result.get)) result else that.result - override def requiresStrictSplitters = true - } - - protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Max[U]] { - @volatile var result: Option[U] = None - def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord)) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p) - override def merge(that: Max[U]) = - if (this.result == None) result = that.result - else if (that.result != None) result = if (ord.gteq(result.get, that.result.get)) result else that.result - override def requiresStrictSplitters = true - } - - protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], Map[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p) - override def merge(that: Map[S, That]) = result = result combine that.result - } - - protected[this] class Collect[S, That] - (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], Collect[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Collect(pf, pbf, p) - override def merge(that: Collect[S, That]) = result = result combine that.result - } - - protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[S, That], FlatMap[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = new FlatMap(f, pbf, p) - override def merge(that: FlatMap[S, That]) = { - //debuglog("merging " + result + " and " + that.result) - result = result combine that.result - //debuglog("merged into " + result) - } - } - - protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Boolean, Forall] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) - override def merge(that: Forall) = result = result && that.result - } - - protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Boolean, Exists] { - @volatile var result: Boolean = false - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) - override def merge(that: Exists) = result = result || that.result - } - - protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) - extends Accessor[Option[U], Find[U]] { - @volatile var result: Option[U] = None - def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) - override def merge(that: Find[U]) = if (this.result == None) result = that.result - } - - protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Filter[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.filter2combiner(pred, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new Filter(pred, cbf, p) - override def merge(that: Filter[U, This]) = result = result combine that.result - } - - protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], FilterNot[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.filterNot2combiner(pred, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new FilterNot(pred, cbf, p) - override def merge(that: FilterNot[U, This]) = result = result combine that.result - } - - protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, That], Copy[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Copy[U, That](cfactory, p) - override def merge(that: Copy[U, That]) = result = result combine that.result - } - - protected[this] class Partition[U >: T, This >: Repr] - (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p) - override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) - } - - protected[this] class GroupBy[K, U >: T]( - f: U => K, - mcf: () => HashMapCombiner[K, U], - protected[this] val pit: IterableSplitter[T] - ) extends Transformer[HashMapCombiner[K, U], GroupBy[K, U]] { - @volatile var result: Result = null - final def leaf(prev: Option[Result]) = { - // note: HashMapCombiner doesn't merge same keys until evaluation - val cb = mcf() - while (pit.hasNext) { - val elem = pit.next() - cb += f(elem) -> elem - } - result = cb - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new GroupBy(f, mcf, p) - override def merge(that: GroupBy[K, U]) = { - // note: this works because we know that a HashMapCombiner doesn't merge same keys until evaluation - // --> we know we're not dropping any mappings - result = (result combine that.result).asInstanceOf[HashMapCombiner[K, U]] - } - } - - protected[this] class Take[U >: T, This >: Repr] - (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Take[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = { - result = pit.take2combiner(n, reuse(prev, cbf())) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes; if untilp <= n) yield { - if (untilp + p.remaining < n) new Take(p.remaining, cbf, p) - else new Take(n - untilp, cbf, p) - } - } - override def merge(that: Take[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Drop[U >: T, This >: Repr] - (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Drop[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield { - if (withp - p.remaining > n) new Drop(0, cbf, p) - else new Drop(n - withp + p.remaining, cbf, p) - } - } - override def merge(that: Drop[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Slice[U >: T, This >: Repr] - (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, This], Slice[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield { - val f = (from max untilp) - untilp - val u = (until min (untilp + p.remaining)) - untilp - new Slice(f, u, cbf, p) - } - } - override def merge(that: Slice[U, This]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class SplitAt[U >: T, This >: Repr] - (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter())) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p) - } - override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) - override def requiresStrictSplitters = true - } - - protected[this] class TakeWhile[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] { - @volatile var result: (Combiner[U, This], Boolean) = null - def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) { - result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf())) - if (!result._2) pit.setIndexFlagIfLesser(pos) - } else result = (reuse(prev.map(_._1), cbf()), false) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p) - } - override def merge(that: TakeWhile[U, This]) = if (result._2) { - result = (result._1 combine that.result._1, that.result._2) - } - override def requiresStrictSplitters = true - } - - protected[this] class Span[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) - extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] { - @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) { - // val lst = pit.toList - // val pa = mutable.ParArray(lst: _*) - // val str = "At leaf we will iterate: " + pa.splitter.toList - result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised - // println("\nAt leaf result is: " + result) - if (result._2.size > 0) pit.setIndexFlagIfLesser(pos) - } else { - result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter()))) - } - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p) - } - override def merge(that: Span[U, This]) = result = if (result._2.size == 0) { - (result._1 combine that.result._1, that.result._2) - } else { - (result._1, result._2 combine that.result._1 combine that.result._2) - } - override def requiresStrictSplitters = true - } - - protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - val sizes = pits.map(_.remaining) - val opits = othpit.psplitWithSignalling(sizes: _*) - (pits zip opits) map { p => new Zip(pbf, p._1, p._2) } - } - override def merge(that: Zip[U, S, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class ZipAll[U >: T, S, That] - (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf()) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = if (pit.remaining <= len) { - val pits = pit.splitWithSignalling - val sizes = pits.map(_.remaining) - val opits = othpit.psplitWithSignalling(sizes: _*) - ((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) } - } else { - val opits = othpit.psplitWithSignalling(pit.remaining) - val diff = len - pit.remaining - Seq( - new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed - new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1)) - ) - } - override def merge(that: ZipAll[U, S, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: IterableSplitter[T]) - extends Accessor[Unit, CopyToArray[U, This]] { - @volatile var result: Unit = () - def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len) - protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield { - val plen = p.remaining min (len - untilp) - new CopyToArray[U, This](from + untilp, plen, array, p) - } - } - override def requiresStrictSplitters = true - } - - protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) - extends Transformer[Combiner[U, That], ToParCollection[U, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Combiner[U, That]]) { - result = cbf() - while (pit.hasNext) result += pit.next - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParCollection[U, That](cbf, p) - override def merge(that: ToParCollection[U, That]) = result = result combine that.result - } - - protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) - extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Combiner[(K, V), That]]) { - result = cbf() - while (pit.hasNext) result += pit.next - } - protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParMap[K, V, That](cbf, p)(ev) - override def merge(that: ToParMap[K, V, That]) = result = result combine that.result - } - - protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) - extends Transformer[ScanTree[U], CreateScanTree[U]] { - @volatile var result: ScanTree[U] = null - def leaf(prev: Option[ScanTree[U]]) = if (pit.remaining > 0) { - val trees = ArrayBuffer[ScanTree[U]]() - var i = from - val until = from + len - val blocksize = scanBlockSize - while (i < until) { - trees += scanBlock(i, scala.math.min(blocksize, pit.remaining)) - i += blocksize - } - - // merge trees - result = mergeTrees(trees, 0, trees.length) - } else result = null // no elements to scan (merge will take care of `null`s) - private def scanBlock(from: Int, len: Int): ScanTree[U] = { - val pitdup = pit.dup - new ScanLeaf(pitdup, op, from, len, None, pit.reduceLeft(len, op)) - } - private def mergeTrees(trees: ArrayBuffer[ScanTree[U]], from: Int, howmany: Int): ScanTree[U] = if (howmany > 1) { - val half = howmany / 2 - ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half)) - } else trees(from) - protected[this] def newSubtask(pit: IterableSplitter[T]) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield { - new CreateScanTree(untilp, p.remaining, z, op, p) - } - } - override def merge(that: CreateScanTree[U]) = if (this.result != null) { - if (that.result != null) result = ScanNode(result, that.result) - } else result = that.result - override def requiresStrictSplitters = true - } - - protected[this] class FromScanTree[U >: T, That] - (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That]) - extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) { - val cb = reuse(prev, cbf()) - iterate(tree, cb) - result = cb - } - private def iterate(tree: ScanTree[U], cb: Combiner[U, That]): Unit = tree match { - case ScanNode(left, right) => - iterate(left, cb) - iterate(right, cb) - case ScanLeaf(p, _, _, len, Some(prev), _) => - p.scanToCombiner(len, prev.acc, op, cb) - case ScanLeaf(p, _, _, len, None, _) => - cb += z - p.scanToCombiner(len, z, op, cb) - } - def split = tree match { - case ScanNode(left, right) => Seq( - new FromScanTree(left, z, op, cbf), - new FromScanTree(right, z, op, cbf) - ) - case _ => throw new UnsupportedOperationException("Cannot be split further") - } - def shouldSplitFurther = tree match { - case ScanNode(_, _) => true - case ScanLeaf(_, _, _, _, _, _) => false - } - override def merge(that: FromScanTree[U, That]) = result = result combine that.result - } - - /* scan tree */ - - protected[this] def scanBlockSize = (thresholdFromSize(size, tasksupport.parallelismLevel) / 2) max 1 - - protected[this] trait ScanTree[U >: T] { - def beginsAt: Int - def pushdown(v: U): Unit - def leftmost: ScanLeaf[U] - def rightmost: ScanLeaf[U] - def print(depth: Int = 0): Unit - } - - protected[this] case class ScanNode[U >: T](left: ScanTree[U], right: ScanTree[U]) extends ScanTree[U] { - right.pushdown(left.rightmost.acc) - right.leftmost.prev = Some(left.rightmost) - - val leftmost = left.leftmost - val rightmost = right.rightmost - - def beginsAt = left.beginsAt - def pushdown(v: U) { - left.pushdown(v) - right.pushdown(v) - } - def print(depth: Int) { - println((" " * depth) + "ScanNode, begins at " + beginsAt) - left.print(depth + 1) - right.print(depth + 1) - } - } - - protected[this] case class ScanLeaf[U >: T] - (pit: IterableSplitter[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U) - extends ScanTree[U] { - def beginsAt = from - def pushdown(v: U) = { - acc = op(v, acc) - } - def leftmost = this - def rightmost = this - def print(depth: Int) = println((" " * depth) + this) - } - - /* alias methods */ - - @deprecated("Use foldLeft instead of /:", "2.12.10") - def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op) - - @deprecated("Use foldRight instead of :\\", "2.12.10") - def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op) - - /* debug information */ - - private[parallel] def debugInformation = "Parallel collection: " + this.getClass - - private[parallel] def brokenInvariants = Seq[String]() - - // private val dbbuff = ArrayBuffer[String]() - // def debugBuffer: ArrayBuffer[String] = dbbuff - def debugBuffer: ArrayBuffer[String] = null - - private[parallel] def debugclear() = synchronized { - debugBuffer.clear() - } - - private[parallel] def debuglog(s: String) = synchronized { - debugBuffer += s - } - - import scala.collection.DebugUtils._ - private[parallel] def printDebugBuffer() = println(buildString { - append => - for (s <- debugBuffer) { - append(s) - } - }) -} diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala deleted file mode 100644 index f0ef2022fd57..000000000000 --- a/src/library/scala/collection/parallel/ParMap.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.Map -import scala.collection.GenMap -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom - -/** A template trait for parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], Map[K, V]] -{ -self => - - def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - def empty: ParMap[K, V] = new mutable.ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map - * because of variance issues. - */ - abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { - override def size = underlying.size - def get(key: A) = underlying.get(key) - def splitter = underlying.splitter - override def default(key: A): B = d(key) - } -} diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala deleted file mode 100644 index 5d176dda4d5f..000000000000 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.MapLike -import scala.collection.GenMapLike -import scala.collection.Map - -import scala.annotation.unchecked.uncheckedVariance - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - +V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: Map[K, V] with MapLike[K, V, Sequential]] -extends GenMapLike[K, V, Repr] - with ParIterableLike[(K, V), Repr, Sequential] -{ -self => - - def default(key: K): V = throw new NoSuchElementException("key not found: " + key) - - def empty: Repr - - def apply(key: K) = get(key) match { - case Some(v) => v - case None => default(key) - } - - def getOrElse[U >: V](key: K, default: => U): U = get(key) match { - case Some(v) => v - case None => default - } - - def contains(key: K): Boolean = get(key).isDefined - - def isDefinedAt(key: K): Boolean = contains(key) - - private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] = - new IterableSplitter[K] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._1 - def split = { - val ss = iter.split.map(keysIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = keysIterator(iter.dup) - } - - def keysIterator: IterableSplitter[K] = keysIterator(splitter) - - private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] = - new IterableSplitter[V] { - i => - val iter = s - def hasNext = iter.hasNext - def next() = iter.next()._2 - def split = { - val ss = iter.split.map(valuesIterator(_)) - ss.foreach { _.signalDelegate = i.signalDelegate } - ss - } - def remaining = iter.remaining - def dup = valuesIterator(iter.dup) - } - - def valuesIterator: IterableSplitter[V] = valuesIterator(splitter) - - protected class DefaultKeySet extends ParSet[K] { - def contains(key : K) = self.contains(key) - def splitter = keysIterator(self.splitter) - def + (elem: K): ParSet[K] = - (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - def - (elem: K): ParSet[K] = - (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem - override def size = self.size - override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) - override def seq = self.seq.keySet - } - - protected class DefaultValuesIterable extends ParIterable[V] { - def splitter = valuesIterator(self.splitter) - override def size = self.size - override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) - def seq = self.seq.values - } - - def keySet: ParSet[K] = new DefaultKeySet - - def keys: ParIterable[K] = keySet - - def values: ParIterable[V] = new DefaultValuesIterable - - def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] { - lazy val filtered = self.filter(kv => p(kv._1)) - override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) - def splitter = filtered.splitter - override def contains(key: K) = self.contains(key) && p(key) - def get(key: K) = if (!p(key)) None else self.get(key) - def seq = self.seq.filterKeys(p) - def size = filtered.size - def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key - } - - def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { - override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) - def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) - override def size = self.size - override def contains(key: K) = self.contains(key) - def get(key: K) = self.get(key).map(f) - def seq = self.seq.mapValues(f) - def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv - def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key - } - - // note - should not override toMap (could be mutable) -} diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala deleted file mode 100644 index 6c0939f9fe40..000000000000 --- a/src/library/scala/collection/parallel/ParSeq.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.ParFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.GenSeq -import scala.collection.parallel.mutable.ParArrayCombiner - -/** A template trait for parallel sequences. - * - * $parallelseqinfo - * - * $sideeffects - * - * @tparam T the type of the elements in this parallel sequence - * - * @author Aleksandar Prokopec - */ -trait ParSeq[+T] extends GenSeq[T] - with ParIterable[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], Seq[T]] -{ - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - //protected[this] override def newBuilder = ParSeq.newBuilder[T] - - def apply(i: Int): T - - override def toString = super[ParIterable].toString - - override def stringPrefix = getClass.getSimpleName -} - -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] -} diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala deleted file mode 100644 index b6d104d402ab..000000000000 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ /dev/null @@ -1,483 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator } -import scala.collection.generic.DefaultSignalling -import scala.collection.generic.AtomicIndexFlag -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.VolatileAbort - -import scala.collection.parallel.ParallelCollectionImplicits._ - -/** A template trait for sequences of type `ParSeq[T]`, representing - * parallel sequences with element type `T`. - * - * $parallelseqinfo - * - * @tparam T the type of the elements contained in this collection - * @tparam Repr the type of the actual collection containing the elements - * @tparam Sequential the type of the sequential version of this parallel collection - * - * @define parallelseqinfo - * Parallel sequences inherit the `Seq` trait. Their indexing and length computations - * are defined to be efficient. Like their sequential counterparts - * they always have a defined order of elements. This means they will produce resulting - * parallel sequences in the same way sequential sequences do. However, the order - * in which they perform bulk operations on elements to produce results is not defined and is generally - * nondeterministic. If the higher-order functions given to them produce no sideeffects, - * then this won't be noticeable. - * - * This trait defines a new, more general `split` operation and reimplements the `split` - * operation of `ParallelIterable` trait using the new `split` operation. - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]] -extends scala.collection.GenSeqLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] { -self => - - protected[this] type SuperParIterator = IterableSplitter[T] - - /** A more refined version of the iterator found in the `ParallelIterable` trait, - * this iterator can be split into arbitrary subsets of iterators. - * - * @return an iterator that can be split into subsets of precise size - */ - protected[parallel] def splitter: SeqSplitter[T] - - override def iterator: PreciseSplitter[T] = splitter - - override def size = length - - /** Used to iterate elements using indices */ - protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { - private var i = start - - def hasNext = i < end - - def next(): T = if (i < end) { - val x = self(i) - i += 1 - x - } else Iterator.empty.next() - - def head = self(i) - - final def remaining = end - i - - def dup = new Elements(i, end) {} - - def split = psplit(remaining / 2, remaining - remaining / 2) - - def psplit(sizes: Int*) = { - val incr = sizes.scanLeft(0)(_ + _) - for ((from, until) <- incr.init zip incr.tail) yield { - new Elements(start + from, (start + until) min end) {} - } - } - - override def toString = "Elements(" + start + ", " + end + ")" - } - - /* ParallelSeq methods */ - - /** Returns the length of the longest segment of elements starting at - * a given position satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the length of the longest segment of elements starting at `from` and - * satisfying the predicate - */ - def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 - } - - /** Finds the first element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to maximum integer value. - * - * @param p the predicate used to test the elements - * @param from the starting offset for the search - * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else { - val realfrom = if (from < 0) 0 else from - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MaxValue) - tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) - } - - /** Finds the last element satisfying some predicate. - * - * $indexsignalling - * - * The index flag is initially set to minimum integer value. - * - * @param p the predicate used to test the elements - * @param end the maximum offset for the search - * @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`, - * or `-1`, if none exists - */ - def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else { - val until = if (end >= length) length else end + 1 - val ctx = new DefaultSignalling with AtomicIndexFlag - ctx.setIndexFlag(Int.MinValue) - tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) - } - - def reverse: Repr = { - tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport }) - } - - def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport } - ) - } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result }) - } otherwise seq.reverseMap(f)(bf2seq(bf))*/ - - /** Tests whether this $coll contains the given sequence at a given index. - * - * $abortsignalling - * - * @tparam S the element type of `that` parallel sequence - * @param that the parallel sequence this sequence is being searched for - * @param offset the starting offset for the search - * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise - */ - def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat => - if (offset < 0 || offset >= length) offset == length && pthat.length == 0 - else if (pthat.length == 0) true - else if (pthat.length > length - offset) false - else { - val ctx = new DefaultSignalling with VolatileAbort - tasksupport.executeAndWaitResult( - new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter) - ) - } - } otherwise seq.startsWith(that, offset) - - override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter)) - } otherwise seq.sameElements(that) - - /** Tests whether this $coll ends with the given parallel sequence. - * - * $abortsignalling - * - * @tparam S the type of the elements of `that` sequence - * @param that the sequence to test - * @return `true` if this $coll has `that` as a suffix, `false` otherwise - */ - def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat => - if (that.length == 0) true - else if (that.length > length) false - else { - val ctx = new DefaultSignalling with VolatileAbort - val tlen = that.length - tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) - } - } otherwise seq.endsWith(that) - - def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val realreplaced = replaced min (length - from) - if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { - val that = patch.asParSeq - val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) - val cfactory = combinerFactory(() => bf(repr).asCombiner) - val copystart = new Copy[U, That](cfactory, pits(0)) - val copymiddle = wrap { - val tsk = new that.Copy[U, That](cfactory, that.splitter) - tasksupport.executeAndWaitResult(tsk) - } - val copyend = new Copy[U, That](cfactory, pits(2)) - tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { - _.resultWithTaskSupport - }) - } else patch_sequential(from, patch.seq, replaced) - } - - private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - val from = 0 max fromarg - val b = bf(repr) - val repl = (r min (length - from)) max 0 - val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) - b ++= pits(0) - b ++= patch - b ++= pits(2) - setTaskSupport(b.result(), tasksupport) - } - - def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { - tasksupport.executeAndWaitResult( - new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport) - /*bf ifParallel { pbf => - tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result }) - } otherwise seq.updated(index, elem)(bf2seq(bf))*/ - - def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(0, mutable.ParArray(elem), 0) - } - - def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = { - patch(length, mutable.ParArray(elem), 0) - } - - def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { - patch(length, new immutable.Repetition(elem, len - length), 0) - } else patch(length, Nil, 0) - - override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { - val thatseq = that.asParSeq - tasksupport.executeAndWaitResult( - new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { - _.resultWithTaskSupport - } - ) - } else super.zip(that)(bf) - - /** Tests whether every element of this $coll relates to the - * corresponding element of another parallel sequence by satisfying a test predicate. - * - * $abortsignalling - * - * @param that the other parallel sequence - * @param p the test predicate, which relates elements from both sequences - * @tparam S the type of the elements of `that` - * @return `true` if both parallel sequences have the same length and - * `p(x, y)` is `true` for all corresponding elements `x` of this $coll - * and `y` of `that`, otherwise `false` - */ - def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat => - val ctx = new DefaultSignalling with VolatileAbort - length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter)) - } otherwise seq.corresponds(that)(p) - - def diff[U >: T](that: GenSeq[U]): Repr = sequentially { - _ diff that - } - - /** Computes the multiset intersection between this $coll and another sequence. - * - * @param that the sequence of elements to intersect with. - * @tparam U the element type of `that` parallel sequence - * @return a new collection of type `That` which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - * - * @usecase def intersect(that: Seq[T]): $Coll[T] - * @inheritdoc - * - * $mayNotTerminateInf - * - * @return a new $coll which contains all elements of this $coll - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[U >: T](that: GenSeq[U]) = sequentially { - _ intersect that - } - - /** Builds a new $coll from this $coll without any duplicate elements. - * $willNotTerminateInf - * - * @return A new $coll which contains the first occurrence of every element of this $coll. - */ - def distinct: Repr = sequentially { - _.distinct - } - - override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - - override def toSeq = this.asInstanceOf[ParSeq[T]] - - @deprecated("use .seq.view", "2.11.0") - override def view = seq.view - - /* tasks */ - - protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]] - - protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] { - protected[this] val pit: SeqSplitter[T] - } - - protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp] - - protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[(Int, Boolean), SegmentLength] { - @volatile var result: (Int, Boolean) = null - def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) { - val itsize = pit.remaining - val seglen = pit.prefixLength(pred) - result = (seglen, itsize == seglen) - if (!result._2) pit.setIndexFlagIfLesser(from) - } else result = (0, false) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) - } - override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) - override def requiresStrictSplitters = true - } - - protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[Int, IndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (from < pit.indexFlag) { - val r = pit.indexWhere(pred) - if (r != -1) { - result = from + r - pit.setIndexFlagIfLesser(from) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) - } - override def merge(that: IndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result min that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T]) - extends Accessor[Int, LastIndexWhere] { - @volatile var result: Int = -1 - def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) { - val r = pit.lastIndexWhere(pred) - if (r != -1) { - result = pos + r - pit.setIndexFlagIfGreater(pos) - } - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) - } - override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { - if (that.result != -1) result max that.result else result - } - override def requiresStrictSplitters = true - } - - protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[U, This], Reverse[U, This]] { - @volatile var result: Combiner[U, This] = null - def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf())) - protected[this] def newSubtask(p: SuperParIterator) = new Reverse(cbf, down(p)) - override def merge(that: Reverse[U, This]) = result = that.result combine result - } - - protected[this] class ReverseMap[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[S, That], ReverseMap[S, That]] { - @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = new ReverseMap(f, pbf, down(p)) - override def merge(that: ReverseMap[S, That]) = result = that.result combine result - } - - protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) - extends Accessor[Boolean, SameElements[U]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.sameElements(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) - } - override def merge(that: SameElements[U]) = result = result && that.result - override def requiresStrictSplitters = true - } - - protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T]) - extends Transformer[Combiner[U, That], Updated[U, That]] { - @volatile var result: Combiner[U, That] = null - def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val pits = pit.splitWithSignalling - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) - } - override def merge(that: Updated[U, That]) = result = result combine that.result - override def requiresStrictSplitters = true - } - - protected[this] class Zip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { - @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf()) - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = len / 2 - val sp = len - len / 2 - val pits = pit.psplitWithSignalling(fp, sp) - val opits = otherpit.psplitWithSignalling(fp, sp) - Seq( - new Zip(fp, cf, pits(0), opits(0)), - new Zip(sp, cf, pits(1), opits(1)) - ) - } - override def merge(that: Zip[U, S, That]) = result = result combine that.result - } - - protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) - extends Accessor[Boolean, Corresponds[S]] { - @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { - result = pit.corresponds(corr)(otherpit) - if (!result) pit.abort() - } - protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException - override def split = { - val fp = pit.remaining / 2 - val sp = pit.remaining - fp - for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) - } - override def merge(that: Corresponds[S]) = result = result && that.result - override def requiresStrictSplitters = true - } -} diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala deleted file mode 100644 index a49561cf1fd2..000000000000 --- a/src/library/scala/collection/parallel/ParSet.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel - -import scala.collection.generic._ - -/** A template trait for parallel sets. - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSet[T] - extends GenSet[T] - with GenericParTemplate[T, ParSet] - with ParIterable[T] - with ParSetLike[T, ParSet[T], Set[T]] -{ self => - - override def empty: ParSet[T] = mutable.ParHashSet[T]() - - //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" -} - -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala deleted file mode 100644 index 24568bdefb7e..000000000000 --- a/src/library/scala/collection/parallel/ParSetLike.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.SetLike -import scala.collection.GenSetLike -import scala.collection.GenSet -import scala.collection.Set - -/** A template trait for parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `ParSet` - * @define coll parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: Set[T] with SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with ParIterableLike[T, Repr, Sequential] -{ self => - - def empty: Repr - - // note: should not override toSet (could be mutable) - - def union(that: GenSet[T]): Repr = sequentially { - _ union that - } - - def diff(that: GenSet[T]): Repr = sequentially { - _ diff that - } -} diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala deleted file mode 100644 index b87389f239ab..000000000000 --- a/src/library/scala/collection/parallel/PreciseSplitter.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.Seq - -/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters - * that traverse disjoint subsets of arbitrary sizes. - * - * Implementors might want to override the parameterless `split` method for efficiency. - * - * @tparam T type of the elements this splitter traverses - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait PreciseSplitter[+T] extends Splitter[T] { - - /** Splits the splitter into disjunct views. - * - * This overloaded version of the `split` method is specific to precise splitters. - * It returns a sequence of splitters, each iterating some subset of the - * elements in this splitter. The sizes of the subsplitters in the partition is equal to - * the size in the corresponding argument, as long as there are enough elements in this - * splitter to split it that way. - * - * If there aren't enough elements, a zero element splitter is appended for each additional argument. - * If there are additional elements, an additional splitter is appended at the end to compensate. - * - * For example, say we have a splitter `ps` with 100 elements. Invoking: - * {{{ - * ps.split(50, 25, 25, 10, 5) - * }}} - * will return a sequence of five splitters, last two views being empty. On the other hand, calling: - * {{{ - * ps.split(50, 40) - * }}} - * will return a sequence of three splitters, last of them containing ten elements. - * - * '''Note:''' this method actually invalidates the current splitter. - * - * Unlike the case with `split` found in splitters, views returned by this method can be empty. - * - * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets - * @return a sequence of disjunct subsequence iterators of this parallel iterator - */ - def psplit(sizes: Int*): Seq[PreciseSplitter[T]] - - def split: Seq[PreciseSplitter[T]] -} diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala deleted file mode 100644 index d02a9a5974b9..000000000000 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ /dev/null @@ -1,681 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.generic.Signalling -import scala.collection.generic.DelegatedSignalling -import scala.collection.generic.IdleSignalling -import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce -import scala.collection.parallel.immutable.repetition - -private[collection] trait RemainsIterator[+T] extends Iterator[T] { - /** The number of elements this iterator has yet to iterate. - * This method doesn't change the state of the iterator. - */ - def remaining: Int - - /** For most collections, this is a cheap operation. - * Exceptions can override this method. - */ - def isRemainingCheap = true -} - -/** Augments iterators with additional methods, mostly transformers, - * assuming they iterate an iterable collection. - * - * @tparam T type of the elements iterated. - */ -private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[T] { - - /* accessors */ - - override def count(p: T => Boolean): Int = { - var i = 0 - while (hasNext) if (p(next())) i += 1 - i - } - - override def reduce[U >: T](op: (U, U) => U): U = { - var r: U = next() - while (hasNext) r = op(r, next()) - r - } - - override def fold[U >: T](z: U)(op: (U, U) => U): U = { - var r = z - while (hasNext) r = op(r, next()) - r - } - - override def sum[U >: T](implicit num: Numeric[U]): U = { - var r: U = num.zero - while (hasNext) r = num.plus(r, next()) - r - } - - override def product[U >: T](implicit num: Numeric[U]): U = { - var r: U = num.one - while (hasNext) r = num.times(r, next()) - r - } - - override def min[U >: T](implicit ord: Ordering[U]): T = { - var r = next() - while (hasNext) { - val curr = next() - if (ord.lteq(curr, r)) r = curr - } - r - } - - override def max[U >: T](implicit ord: Ordering[U]): T = { - var r = next() - while (hasNext) { - val curr = next() - if (ord.gteq(curr, r)) r = curr - } - r - } - - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) { - var i = from - val until = from + len - while (i < until && hasNext) { - array(i) = next() - i += 1 - } - } - - def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { - var i = howmany - 1 - var u: U = next() - while (i > 0 && hasNext) { - u = op(u, next()) - i -= 1 - } - u - } - - /* transformers to combiners */ - - def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += f(next()) - cb - } - - def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - val runWith = pf.runWith(cb += _) - while (hasNext) { - val curr = next() - runWith(curr) - } - cb - } - - def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(repr) - while (hasNext) { - val traversable = f(next()).seq - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable - } - cb - } - - def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = { - if (isRemainingCheap) b.sizeHint(remaining) - while (hasNext) b += next - b - } - - def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { - while (hasNext) { - val curr = next() - if (pred(curr)) cb += curr - } - cb - } - - def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { - while (hasNext) { - val curr = next() - if (!pred(curr)) cb += curr - } - cb - } - - def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { - while (hasNext) { - val curr = next() - if (pred(curr)) btrue += curr - else bfalse += curr - } - (btrue, bfalse) - } - - def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { - cb.sizeHint(n) - var left = n - while (left > 0) { - cb += next - left -= 1 - } - cb - } - - def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { - drop(n) - if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += next - cb - } - - def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = { - drop(from) - var left = scala.math.max(until - from, 0) - cb.sizeHint(left) - while (left > 0) { - cb += next - left -= 1 - } - cb - } - - def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = { - before.sizeHint(at) - if (isRemainingCheap) after.sizeHint(remaining - at) - var left = at - while (left > 0) { - before += next - left -= 1 - } - while (hasNext) after += next - (before, after) - } - - def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = { - var loop = true - while (hasNext && loop) { - val curr = next() - if (p(curr)) cb += curr - else loop = false - } - (cb, loop) - } - - def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = { - var isBefore = true - while (hasNext && isBefore) { - val curr = next() - if (p(curr)) before += curr - else { - if (isRemainingCheap) after.sizeHint(remaining + 1) - after += curr - isBefore = false - } - } - while (hasNext) after += next - (before, after) - } - - def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, array: Array[A], from: Int) { - var last = z - var i = from - while (hasNext) { - last = op(last, next()) - array(i) = last - i += 1 - } - } - - def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { - var curr = startValue - while (hasNext) { - curr = op(curr, next()) - cb += curr - } - cb - } - - def scanToCombiner[U >: T, That](howmany: Int, startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { - var curr = startValue - var left = howmany - while (left > 0) { - curr = op(curr, next()) - cb += curr - left -= 1 - } - cb - } - - def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) - while (hasNext && otherpit.hasNext) { - cb += ((next(), otherpit.next())) - } - cb - } - - def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) - while (this.hasNext && that.hasNext) cb += ((this.next(), that.next())) - while (this.hasNext) cb += ((this.next(), thatelem)) - while (that.hasNext) cb += ((thiselem, that.next())) - cb - } - -} - - -private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] { - - /** The exact number of elements this iterator has yet to iterate. - * This method doesn't change the state of the iterator. - */ - def remaining: Int - - /* accessors */ - - def prefixLength(pred: T => Boolean): Int = { - var total = 0 - var loop = true - while (hasNext && loop) { - if (pred(next())) total += 1 - else loop = false - } - total - } - - override def indexWhere(pred: T => Boolean): Int = { - var i = 0 - var loop = true - while (hasNext && loop) { - if (pred(next())) loop = false - else i += 1 - } - if (loop) -1 else i - } - - def lastIndexWhere(pred: T => Boolean): Int = { - var pos = -1 - var i = 0 - while (hasNext) { - if (pred(next())) pos = i - i += 1 - } - pos - } - - def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = { - while (hasNext && that.hasNext) { - if (!corr(next(), that.next())) return false - } - hasNext == that.hasNext - } - - /* transformers */ - - def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - if (isRemainingCheap) cb.sizeHint(remaining) - var lst = List[T]() - while (hasNext) lst ::= next - while (lst != Nil) { - cb += lst.head - lst = lst.tail - } - cb - } - - def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = cbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - var lst = List[S]() - while (hasNext) lst ::= f(next()) - while (lst != Nil) { - cb += lst.head - lst = lst.tail - } - cb - } - - def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = { - //val cb = cbf(repr) - if (isRemainingCheap) cb.sizeHint(remaining) - var j = 0 - while (hasNext) { - if (j == index) { - cb += elem - next() - } else cb += next - j += 1 - } - cb - } - -} - - -/** Parallel iterators allow splitting and provide a `remaining` method to - * obtain the number of elements remaining in the iterator. - * - * @tparam T type of the elements iterated. - */ -trait IterableSplitter[+T] -extends AugmentedIterableIterator[T] - with Splitter[T] - with Signalling - with DelegatedSignalling -{ -self => - - var signalDelegate: Signalling = IdleSignalling - - /** Creates a copy of this iterator. */ - def dup: IterableSplitter[T] - - def split: Seq[IterableSplitter[T]] - - def splitWithSignalling: Seq[IterableSplitter[T]] = { - val pits = split - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel) - - /** The number of elements this iterator has yet to traverse. This method - * doesn't change the state of the iterator. - * - * This method is used to provide size hints to builders and combiners, and - * to approximate positions of iterators within a data structure. - * - * '''Note''': This method may be implemented to return an upper bound on the number of elements - * in the iterator, instead of the exact number of elements to iterate. - * Parallel collections which have such iterators are called non-strict-splitter collections. - * - * In that case, 2 considerations must be taken into account: - * - * 1) classes that inherit `ParIterable` must reimplement methods `take`, `drop`, `slice`, `splitAt`, `copyToArray` - * and all others using this information. - * - * 2) if an iterator provides an upper bound on the number of elements, then after splitting the sum - * of `remaining` values of split iterators must be less than or equal to this upper bound. - */ - def remaining: Int - - protected def buildString(closure: (String => Unit) => Unit): String = { - var output = "" - def appendln(s: String) = output += s + "\n" - closure(appendln) - output - } - - private[parallel] def debugInformation = { - // can be overridden in subclasses - "Parallel iterator: " + this.getClass - } - - /* iterator transformers */ - - class Taken(taken: Int) extends IterableSplitter[T] { - var remaining = taken min self.remaining - def hasNext = remaining > 0 - def next = { remaining -= 1; self.next() } - def dup: IterableSplitter[T] = self.dup.take(taken) - def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) } - protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = { - val sizes = sq.scanLeft(0)(_ + _.remaining) - val shortened = for ((it, (from, until)) <- sq zip (sizes.init zip sizes.tail)) yield - if (until < remaining) it else taker(it, remaining - from) - shortened filter { _.remaining > 0 } - } - } - /** To lower "virtual class" boilerplate tax, implement creation - * in method and override this method in the subclass. - */ - private[collection] def newTaken(until: Int): Taken = new Taken(until) - private[collection] def newSliceInternal[U <: Taken](it: U, from1: Int): U = { - var count = from1 - while (count > 0 && it.hasNext) { - it.next - count -= 1 - } - it - } - /** Drop implemented as simple eager consumption. */ - override def drop(n: Int): IterableSplitter[T] = { - var i = 0 - while (i < n && hasNext) { - next() - i += 1 - } - this - } - override def take(n: Int): IterableSplitter[T] = newTaken(n) - override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1) - - class Mapped[S](f: T => S) extends IterableSplitter[S] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext - def next = f(self.next()) - def remaining = self.remaining - def dup: IterableSplitter[S] = self.dup map f - def split: Seq[IterableSplitter[S]] = self.split.map { _ map f } - } - - override def map[S](f: T => S) = new Mapped(f) - - class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] { - signalDelegate = self.signalDelegate - protected var curr: IterableSplitter[U] = self - def hasNext = if (curr.hasNext) true else if (curr eq self) { - curr = that - curr.hasNext - } else false - def next = if (curr eq self) { - hasNext - curr.next() - } else curr.next() - def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining - protected def firstNonEmpty = (curr eq self) && curr.hasNext - def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that) - def split: Seq[IterableSplitter[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split - } - - def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that) - - class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext && that.hasNext - def next = (self.next(), that.next()) - def remaining = self.remaining min that.remaining - def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that) - def split: Seq[IterableSplitter[(T, S)]] = { - val selfs = self.split - val sizes = selfs.map(_.remaining) - val thats = that.psplit(sizes: _*) - (selfs zip thats) map { p => p._1 zipParSeq p._2 } - } - } - - def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that) - - class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S) - extends IterableSplitter[(U, S)] { - signalDelegate = self.signalDelegate - def hasNext = self.hasNext || that.hasNext - def next = if (self.hasNext) { - if (that.hasNext) (self.next(), that.next()) - else (self.next(), thatelem) - } else (thiselem, that.next()) - - def remaining = self.remaining max that.remaining - def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem) - def split: Seq[IterableSplitter[(U, S)]] = { - val selfrem = self.remaining - val thatrem = that.remaining - val thisit = if (selfrem < thatrem) self.appendParIterable[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self - val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that - val zipped = thisit zipParSeq thatit - zipped.split - } - } - - def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem) -} - -/** Parallel sequence iterators allow splitting into arbitrary subsets. - * - * @tparam T type of the elements iterated. - */ -trait SeqSplitter[+T] -extends IterableSplitter[T] - with AugmentedSeqIterator[T] - with PreciseSplitter[T] -{ -self => - def dup: SeqSplitter[T] - def split: Seq[SeqSplitter[T]] - def psplit(sizes: Int*): Seq[SeqSplitter[T]] - - override def splitWithSignalling: Seq[SeqSplitter[T]] = { - val pits = split - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = { - val pits = psplit(sizes: _*) - pits foreach { _.signalDelegate = signalDelegate } - pits - } - - /** The number of elements this iterator has yet to traverse. This method - * doesn't change the state of the iterator. Unlike the version of this method in the supertrait, - * method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number - * of elements remaining in the iterator. - * - * @return an exact number of elements this iterator has yet to iterate - */ - def remaining: Int - - /* iterator transformers */ - - class Taken(tk: Int) extends super.Taken(tk) with SeqSplitter[T] { - override def dup = super.dup.asInstanceOf[SeqSplitter[T]] - override def split: Seq[SeqSplitter[T]] = super.split.asInstanceOf[Seq[SeqSplitter[T]]] - def psplit(sizes: Int*): Seq[SeqSplitter[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) } - } - override private[collection] def newTaken(until: Int): Taken = new Taken(until) - override def take(n: Int): SeqSplitter[T] = newTaken(n) - override def slice(from1: Int, until1: Int): SeqSplitter[T] = newSliceInternal(newTaken(until1), from1) - - class Mapped[S](f: T => S) extends super.Mapped[S](f) with SeqSplitter[S] { - override def dup = super.dup.asInstanceOf[SeqSplitter[S]] - override def split: Seq[SeqSplitter[S]] = super.split.asInstanceOf[Seq[SeqSplitter[S]]] - def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f } - } - - override def map[S](f: T => S) = new Mapped(f) - - class Appended[U >: T, PI <: SeqSplitter[U]](it: PI) extends super.Appended[U, PI](it) with SeqSplitter[U] { - override def dup = super.dup.asInstanceOf[SeqSplitter[U]] - override def split: Seq[SeqSplitter[U]] = super.split.asInstanceOf[Seq[SeqSplitter[U]]] - def psplit(sizes: Int*): Seq[SeqSplitter[U]] = if (firstNonEmpty) { - val selfrem = self.remaining - - // split sizes - var appendMiddle = false - val szcum = sizes.scanLeft(0)(_ + _) - val splitsizes = sizes.zip(szcum.init zip szcum.tail).flatMap { t => - val (sz, (from, until)) = t - if (from < selfrem && until > selfrem) { - appendMiddle = true - Seq(selfrem - from, until - selfrem) - } else Seq(sz) - } - val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem) - val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 }) - - // split iterators - val selfs = self.psplit(selfsizes: _*) - val thats = that.psplit(thatsizes: _*) - - // appended last in self with first in rest if necessary - if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, SeqSplitter[U]](thats.head)) ++ thats.tail - else selfs ++ thats - } else curr.asInstanceOf[SeqSplitter[U]].psplit(sizes: _*) - } - - def appendParSeq[U >: T, PI <: SeqSplitter[U]](that: PI) = new Appended[U, PI](that) - - class Zipped[S](ti: SeqSplitter[S]) extends super.Zipped[S](ti) with SeqSplitter[(T, S)] { - override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]] - override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]] - def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 } - } - - override def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that) - - class ZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] { - override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]] - private def patchem = { - val selfrem = self.remaining - val thatrem = that.remaining - val thisit = if (selfrem < thatrem) self.appendParSeq[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self - val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that - (thisit, thatit) - } - override def split: Seq[SeqSplitter[(U, S)]] = { - val (thisit, thatit) = patchem - val zipped = thisit zipParSeq thatit - zipped.split - } - def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = { - val (thisit, thatit) = patchem - val zipped = thisit zipParSeq thatit - zipped.psplit(sizes: _*) - } - } - - override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem) - - def reverse: SeqSplitter[T] = { - val pa = mutable.ParArray.fromTraversables(self).reverse - new pa.ParArrayIterator { - override def reverse = self - } - } - - class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] { - signalDelegate = self.signalDelegate - private[this] val trio = { - val pits = self.psplit(from, replaced, self.remaining - from - replaced) - (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2) - } - def hasNext = trio.hasNext - def next = trio.next - def remaining = trio.remaining - def dup = self.dup.patchParSeq(from, patch, replaced) - def split = trio.split - def psplit(sizes: Int*) = trio.psplit(sizes: _*) - } - - def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced) - -} diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala deleted file mode 100644 index 28e3e524a3aa..000000000000 --- a/src/library/scala/collection/parallel/Splitter.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.{ Seq, Iterator } - -/** A splitter (or a split iterator) can be split into more splitters that traverse over - * disjoint subsets of elements. - * - * @tparam T type of the elements this splitter traverses - * - * @since 2.9 - * @author Aleksandar Prokopec - */ -trait Splitter[+T] extends Iterator[T] { - - /** Splits the iterator into a sequence of disjunct views. - * - * Returns a sequence of split iterators, each iterating over some subset of the - * elements in the collection. These subsets are disjoint and should be approximately - * equal in size. These subsets are not empty, unless the iterator is empty in which - * case this method returns a sequence with a single empty iterator. If the splitter has - * more than two elements, this method will return two or more splitters. - * - * Implementors are advised to keep this partition relatively small - two splitters are - * already enough when partitioning the collection, although there may be a few more. - * - * '''Note:''' this method actually invalidates the current splitter. - * - * @return a sequence of disjunct iterators of the collection - */ - def split: Seq[Splitter[T]] - /* - * '''Note:''' splitters in this sequence may actually be empty and it can contain a splitter - * which iterates over the same elements as the original splitter AS LONG AS calling `split` - * a finite number of times on the resulting splitters eventually returns a nontrivial partition. - * - * Note that the docs contract above yields implementations which are a subset of implementations - * defined by this fineprint. - * - * The rationale behind this is best given by the following example: - * try splitting an iterator over a linear hash table. - */ -} - -object Splitter { - def empty[T]: Splitter[T] = new Splitter[T] { - def hasNext = false - def next = Iterator.empty.next() - def split = Seq(this) - } -} diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala deleted file mode 100644 index 90907f176d1b..000000000000 --- a/src/library/scala/collection/parallel/TaskSupport.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import java.util.concurrent.ThreadPoolExecutor -import java.util.concurrent.ForkJoinPool -import scala.concurrent.ExecutionContext - -/** A trait implementing the scheduling of a parallel collection operation. - * - * Parallel collections are modular in the way operations are scheduled. Each - * parallel collection is parameterized with a task support object which is - * responsible for scheduling and load-balancing tasks to processors. - * - * A task support object can be changed in a parallel collection after it has - * been created, but only during a quiescent period, i.e. while there are no - * concurrent invocations to parallel collection methods. - * - * There are currently a few task support implementations available for - * parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]] - * uses a fork-join pool internally. - * - * The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the - * default execution context implementation found in scala.concurrent, and it - * reuses the thread pool used in scala.concurrent. - * - * The execution context task support is set to each parallel collection by - * default, so parallel collections reuse the same fork-join pool as the - * future API. - * - * Here is a way to change the task support of a parallel collection: - * - * {{{ - * import scala.collection.parallel._ - * val pc = mutable.ParArray(1, 2, 3) - * pc.tasksupport = new ForkJoinTaskSupport( - * new java.util.concurrent.ForkJoinPool(2)) - * }}} - * - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section - * on the parallel collection's guide for more information. - */ -trait TaskSupport extends Tasks - -/** A task support that uses a fork join pool to schedule tasks. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool) -extends TaskSupport with AdaptiveWorkStealingForkJoinTasks - -/** A task support that uses a thread pool executor to schedule tasks. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -@deprecated("use `ForkJoinTaskSupport` instead", "2.11.0") -class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool) -extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks - -/** A task support that uses an execution context to schedule tasks. - * - * It can be used with the default execution context implementation in the - * `scala.concurrent` package. It internally forwards the call to either a - * forkjoin based task support or a thread pool executor one, depending on - * what the execution context uses. - * - * By default, parallel collections are parameterized with this task support - * object, so parallel collections share the same execution context backend - * as the rest of the `scala.concurrent` package. - * - * @see [[scala.collection.parallel.TaskSupport]] for more information. - */ -class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global) -extends TaskSupport with ExecutionContextTasks diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala deleted file mode 100644 index 61482f7a8d3d..000000000000 --- a/src/library/scala/collection/parallel/Tasks.scala +++ /dev/null @@ -1,558 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import java.util.concurrent.ThreadPoolExecutor -import java.util.concurrent.{ForkJoinPool, RecursiveAction, ForkJoinWorkerThread} -import scala.concurrent.ExecutionContext -import scala.util.control.Breaks._ -import scala.annotation.unchecked.uncheckedVariance - -trait Task[R, +Tp] { - type Result = R - - def repr = this.asInstanceOf[Tp] - - /** Body of the task - non-divisible unit of work done by this task. - * Optionally is provided with the result from the previous completed task - * or `None` if there was no previous task (or the previous task is uncompleted or unknown). - */ - def leaf(result: Option[R]) - - /** A result that can be accessed once the task is completed. */ - var result: R - - /** Decides whether or not this task should be split further. */ - def shouldSplitFurther: Boolean - - /** Splits this task into a list of smaller tasks. */ - private[parallel] def split: Seq[Task[R, Tp]] - - /** Read of results of `that` task and merge them into results of this one. */ - private[parallel] def merge(that: Tp @uncheckedVariance) {} - - // exception handling mechanism - @volatile var throwable: Throwable = null - def forwardThrowable() = if (throwable != null) throw throwable - - // tries to do the leaf computation, storing the possible exception - private[parallel] def tryLeaf(lastres: Option[R]) { - try { - tryBreakable { - leaf(lastres) - result = result // ensure that effects of `leaf` are visible to readers of `result` - } catchBreak { - signalAbort() - } - } catch { - case thr: Throwable => - result = result // ensure that effects of `leaf` are visible - throwable = thr - signalAbort() - } - } - - private[parallel] def tryMerge(t: Tp @uncheckedVariance) { - val that = t.asInstanceOf[Task[R, Tp]] - if (this.throwable == null && that.throwable == null) merge(t) - mergeThrowables(that) - } - - private[parallel] def mergeThrowables(that: Task[_, _]) { - if (this.throwable != null && that.throwable != null) - this.throwable.addSuppressed(that.throwable) - else if (this.throwable == null && that.throwable != null) - this.throwable = that.throwable - } - - // override in concrete task implementations to signal abort to other tasks - private[parallel] def signalAbort() {} -} - - -/** A trait that declares task execution capabilities used - * by parallel collections. - */ -trait Tasks { - - private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]() - - private[parallel] def debuglog(s: String) = synchronized { - debugMessages += s - } - - trait WrappedTask[R, +Tp] { - /** the body of this task - what it executes, how it gets split and how results are merged. */ - val body: Task[R, Tp] - - def split: Seq[WrappedTask[R, Tp]] - /** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */ - def compute() - /** Start task. */ - def start() - /** Wait for task to finish. */ - def sync() - /** Try to cancel the task. - * @return `true` if cancellation is successful. - */ - def tryCancel(): Boolean - /** If the task has been cancelled successfully, those syncing on it may - * automatically be notified, depending on the implementation. If they - * aren't, this release method should be called after processing the - * cancelled task. - * - * This method may be overridden. - */ - def release() {} - } - - /* task control */ - - /** The type of the environment is more specific in the implementations. */ - val environment: AnyRef - - /** Executes a task and returns a future. Forwards an exception if some task threw it. */ - def execute[R, Tp](fjtask: Task[R, Tp]): () => R - - /** Executes a result task, waits for it to finish, then returns its result. Forwards an exception if some task threw it. */ - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R - - /** Retrieves the parallelism level of the task execution environment. */ - def parallelismLevel: Int - -} - - - -/** This trait implements scheduling by employing - * an adaptive work stealing technique. - */ -trait AdaptiveWorkStealingTasks extends Tasks { - - trait WrappedTask[R, Tp] extends super.WrappedTask[R, Tp] { - @volatile var next: WrappedTask[R, Tp] = null - @volatile var shouldWaitFor = true - - def split: Seq[WrappedTask[R, Tp]] - - def compute() = if (body.shouldSplitFurther) { - internal() - release() - } else { - body.tryLeaf(None) - release() - } - - def internal() = { - var last = spawnSubtasks() - - last.body.tryLeaf(None) - last.release() - body.result = last.body.result - body.throwable = last.body.throwable - - while (last.next != null) { - // val lastresult = Option(last.body.result) - last = last.next - if (last.tryCancel()) { - // println("Done with " + beforelast.body + ", next direct is " + last.body) - last.body.tryLeaf(Some(body.result)) - last.release() - } else { - // println("Done with " + beforelast.body + ", next sync is " + last.body) - last.sync() - } - // println("Merging " + body + " with " + last.body) - body.tryMerge(last.body.repr) - } - } - - def spawnSubtasks() = { - var last: WrappedTask[R, Tp] = null - var head: WrappedTask[R, Tp] = this - do { - val subtasks = head.split - head = subtasks.head - for (t <- subtasks.tail.reverse) { - t.next = last - last = t - t.start() - } - } while (head.body.shouldSplitFurther) - head.next = last - head - } - - def printChain() = { - var curr = this - var chain = "chain: " - while (curr != null) { - chain += curr + " ---> " - curr = curr.next - } - println(chain) - } - } - - // specialize ctor - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp] - -} - - -/** An implementation of tasks objects based on the Java thread pooling API. */ -@deprecated("use `ForkJoinTasks` instead", "2.11.0") -trait ThreadPoolTasks extends Tasks { - import java.util.concurrent._ - - trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] { - // initially, this is null - // once the task is started, this future is set and used for `sync` - // utb: var future: Future[_] = null - @volatile var owned = false - @volatile var completed = false - - def start() = synchronized { - // debuglog("Starting " + body) - // utb: future = executor.submit(this) - executor.synchronized { - incrTasks() - executor.submit(this) - } - } - def sync() = synchronized { - // debuglog("Syncing on " + body) - // utb: future.get() - executor.synchronized { - val coresize = executor.getCorePoolSize - if (coresize < totaltasks) { - executor.setCorePoolSize(coresize + 1) - //assert(executor.getCorePoolSize == (coresize + 1)) - } - } - while (!completed) this.wait - } - def tryCancel() = synchronized { - // utb: future.cancel(false) - if (!owned) { - // debuglog("Cancelling " + body) - owned = true - true - } else false - } - def run() = { - // utb: compute - var isOkToRun = false - synchronized { - if (!owned) { - owned = true - isOkToRun = true - } - } - if (isOkToRun) { - // debuglog("Running body of " + body) - compute() - } else { - // just skip - // debuglog("skipping body of " + body) - } - } - override def release() = synchronized { - //println("releasing: " + this + ", body: " + this.body) - completed = true - executor.synchronized { - decrTasks() - } - this.notifyAll - } - } - - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp] - - val environment: ThreadPoolExecutor - def executor = environment.asInstanceOf[ThreadPoolExecutor] - def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]] - @volatile var totaltasks = 0 - - private def incrTasks() = synchronized { - totaltasks += 1 - } - - private def decrTasks() = synchronized { - totaltasks -= 1 - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing without wait: " + task) - t.start() - - () => { - t.sync() - t.body.forwardThrowable() - t.body.result - } - } - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing with wait: " + task) - t.start() - - t.sync() - t.body.forwardThrowable() - t.body.result - } - - def parallelismLevel = ThreadPoolTasks.numCores - -} - -@deprecated("use `ForkJoinTasks` instead", "2.11.0") -object ThreadPoolTasks { - import java.util.concurrent._ - - val numCores = Runtime.getRuntime.availableProcessors - - val tcount = new atomic.AtomicLong(0L) - - val defaultThreadPool = new ThreadPoolExecutor( - numCores, - Int.MaxValue, - 60L, TimeUnit.MILLISECONDS, - new LinkedBlockingQueue[Runnable], - new ThreadFactory { - def newThread(r: Runnable) = { - val t = new Thread(r) - t.setName("pc-thread-" + tcount.incrementAndGet) - t.setDaemon(true) - t - } - }, - new ThreadPoolExecutor.CallerRunsPolicy - ) -} - -object FutureThreadPoolTasks { - import java.util.concurrent._ - - val numCores = Runtime.getRuntime.availableProcessors - - val tcount = new atomic.AtomicLong(0L) - - val defaultThreadPool = Executors.newCachedThreadPool() -} - - - -/** - * A trait describing objects that provide a fork/join pool. - */ -trait HavingForkJoinPool { - def forkJoinPool: ForkJoinPool -} - - -/** An implementation trait for parallel tasks based on the fork/join framework. - * - * @define fjdispatch - * If the current thread is a fork/join worker thread, the task's `fork` method will - * be invoked. Otherwise, the task will be executed on the fork/join pool. - */ -trait ForkJoinTasks extends Tasks with HavingForkJoinPool { - - trait WrappedTask[R, +Tp] extends RecursiveAction with super.WrappedTask[R, Tp] { - def start() = fork - def sync() = join - def tryCancel = tryUnfork - } - - // specialize ctor - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp] - - /** The fork/join pool of this collection. - */ - def forkJoinPool: ForkJoinPool = environment.asInstanceOf[ForkJoinPool] - val environment: ForkJoinPool - - /** Executes a task and does not wait for it to finish - instead returns a future. - * - * $fjdispatch - */ - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val fjtask = newWrappedTask(task) - - Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq forkJoinPool => fjtask.fork() - case _ => forkJoinPool.execute(fjtask) - } - () => { - fjtask.sync() - fjtask.body.forwardThrowable() - fjtask.body.result - } - } - - /** Executes a task on a fork/join pool and waits for it to finish. - * Returns its result when it does. - * - * $fjdispatch - * - * @return the result of the task - */ - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - val fjtask = newWrappedTask(task) - - Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq forkJoinPool => fjtask.fork() - case _ => forkJoinPool.execute(fjtask) - } - fjtask.sync() - // if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body) - fjtask.body.forwardThrowable() - fjtask.body.result - } - - def parallelismLevel = forkJoinPool.getParallelism -} - -object ForkJoinTasks { - lazy val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() -} - -/* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them. - */ -trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks { - - class WrappedTask[R, Tp](val body: Task[R, Tp]) - extends super[ForkJoinTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] { - def split = body.split.map(b => newWrappedTask(b)) - } - - def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b) -} - -@deprecated("use `AdaptiveWorkStealingForkJoinTasks` instead", "2.11.0") -trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks { - - class WrappedTask[R, Tp](val body: Task[R, Tp]) - extends super[ThreadPoolTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] { - def split = body.split.map(b => newWrappedTask(b)) - } - - def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b) -} - -/** An implementation of the `Tasks` that uses Scala `Future`s to compute - * the work encapsulated in each task. - */ -private[parallel] final class FutureTasks(executor: ExecutionContext) extends Tasks { - import scala.concurrent._ - import scala.util._ - - private val maxdepth = (math.log(parallelismLevel) / math.log(2) + 1).toInt - - val environment: ExecutionContext = executor - - /** Divides this task into a lot of small tasks and executes them asynchronously - * using futures. - * Folds the futures and merges them asynchronously. - */ - private def exec[R, Tp](topLevelTask: Task[R, Tp]): Future[R] = { - implicit val ec = environment - - /** Constructs a tree of futures where tasks can be reasonably split. - */ - def compute(task: Task[R, Tp], depth: Int): Future[Task[R, Tp]] = { - if (task.shouldSplitFurther && depth < maxdepth) { - val subtasks = task.split - val subfutures = for (subtask <- subtasks.iterator) yield compute(subtask, depth + 1) - subfutures.reduceLeft { (firstFuture, nextFuture) => - for { - firstTask <- firstFuture - nextTask <- nextFuture - } yield { - firstTask tryMerge nextTask.repr - firstTask - } - } andThen { - case Success(firstTask) => - task.throwable = firstTask.throwable - task.result = firstTask.result - case Failure(exception) => - task.throwable = exception - } - } else Future { - task.tryLeaf(None) - task - } - } - - compute(topLevelTask, 0) map { t => - t.forwardThrowable() - t.result - } - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val future = exec(task) - val callback = () => { - Await.result(future, scala.concurrent.duration.Duration.Inf) - } - callback - } - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - execute(task)() - } - - def parallelismLevel = Runtime.getRuntime.availableProcessors -} - -/** This tasks implementation uses execution contexts to spawn a parallel computation. - * - * As an optimization, it internally checks whether the execution context is the - * standard implementation based on fork/join pools, and if it is, creates a - * `ForkJoinTaskSupport` that shares the same pool to forward its request to it. - * - * Otherwise, it uses an execution context exclusive `Tasks` implementation to - * divide the tasks into smaller chunks and execute operations on it. - */ -trait ExecutionContextTasks extends Tasks { - def executionContext = environment - - val environment: ExecutionContext - - /** A driver serves as a target for this proxy `Tasks` object. - * - * If the execution context has the standard implementation and uses fork/join pools, - * the driver is `ForkJoinTaskSupport` with the same pool, as an optimization. - * Otherwise, the driver will be a Scala `Future`-based implementation. - */ - private val driver: Tasks = executionContext match { - case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match { - case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp) - case _ => new FutureTasks(environment) - } - case _ => new FutureTasks(environment) - } - - def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task - - def parallelismLevel = driver.parallelismLevel -} diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala deleted file mode 100644 index fdd096af8129..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.immutable - -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParMapFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.immutable.{ HashMap, TrieIterator } -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.parallel.Task - -/** Immutable parallel hash map, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -@SerialVersionUID(1L) -class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]] - with Serializable -{ -self => - - def this() = this(HashMap.empty[K, V]) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = HashMapCombiner[K, V] - - def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(k: K) = new ParHashMap(trie - k) - - def +[U >: V](kv: (K, U)) = new ParHashMap(trie + kv) - - def get(k: K) = trie.get(k) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) - extends IterableSplitter[(K, V)] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { - val phit = new ParHashMapIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashMapIterator(fst, fstlength), - new ParHashMapIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } - } - def next(): (K, V) = { - i += 1 - val r = triter.next() - r - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - override def toString = "HashTrieIterator(" + sz + ")" - } - - /* debug */ - - private[parallel] def printDebugInfo() { - println("Parallel hash trie") - println("Top level inner trie type: " + trie.getClass) - trie match { - case hm: HashMap.HashMap1[k, v] => - println("single node type") - println("key stored: " + hm.getKey) - println("hash of key: " + hm.getHash) - println("computed hash of " + hm.getKey + ": " + hm.computeHashFor(hm.getKey)) - println("trie.get(key): " + hm.get(hm.getKey)) - case _ => - println("other kind of node") - } - } -} - -/** $factoryInfo - * @define Coll `immutable.ParHashMap` - * @define coll immutable parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = { - new CanCombineFromMap[K, V] - } - - def fromTrie[K, V](t: HashMap[K, V]) = new ParHashMap(t) - - var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) -} - -private[parallel] abstract class HashMapCombiner[K, V] -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { -//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => - import HashMapCombiner._ - val emptyTrie = HashMap.empty[K, V] - - def +=(elem: (K, V)) = { - sz += 1 - val hc = emptyTrie.computeHash(elem._1) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[(K, V)] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, V]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, V] - else if (sz == 1) new ParHashMap[K, V](root(0)) - else { - val trie = new HashMap.HashTrieMap(bitmap, root, sz) - new ParHashMap[K, V](trie) - } - } - - def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashMap[K, AnyRef]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashMap[K, Repr] - else if (sz == 1) new ParHashMap[K, Repr](root(0).asInstanceOf[HashMap[K, Repr]]) - else { - val trie = new HashMap.HashTrieMap(bitmap, root.asInstanceOf[Array[HashMap[K, Repr]]], sz) - new ParHashMap[K, Repr](trie) - } - } - - override def toString = { - "HashTrieCombiner(sz: " + size + ")" - //"HashTrieCombiner(buckets:\n\t" + buckets.filter(_ != null).mkString("\n\t") + ")\n" - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - result = result - } - private def createTrie(elems: Unrolled[(K, V)]): HashMap[K, V] = { - var trie = new HashMap[K, V] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv, null) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } - - class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) - extends Task[Unit, CreateGroupedTrie[Repr]] { - @volatile var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createGroupedTrie(bucks(i)).asInstanceOf[HashMap[K, AnyRef]] - i += 1 - } - result = result - } - private def createGroupedTrie(elems: Unrolled[(K, V)]): HashMap[K, Repr] = { - var trie = new HashMap[K, Combiner[V, Repr]] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val kv = chunkarr(i) - val hc = trie.computeHash(kv._1) - - // check to see if already present - val cmb: Combiner[V, Repr] = trie.get0(kv._1, hc, rootbits) match { - case Some(cmb) => cmb - case None => - val cmb: Combiner[V, Repr] = cbf() - trie = trie.updated0[Combiner[V, Repr]](kv._1, hc, rootbits, cmb, null, null) - cmb - } - cmb += kv._2 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] - } - private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { - case hm1: HashMap.HashMap1[_, _] => - val evaledvalue = hm1.value.result - new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) - case hmc: HashMap.HashMapCollision1[_, _] => - val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } - new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) - case htm: HashMap.HashTrieMap[k, v] => - var i = 0 - while (i < htm.elems.length) { - htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] - i += 1 - } - htm.asInstanceOf[HashMap[K, Repr]] - case empty => empty.asInstanceOf[HashMap[K, Repr]] - } - def split = { - val fp = howmany / 2 - List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object HashMapCombiner { - def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala deleted file mode 100644 index d761e109cc28..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.immutable - - - -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.mutable.UnrolledBuffer -import scala.collection.generic.ParSetFactory -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.GenericCompanion -import scala.collection.immutable.{ HashSet, TrieIterator } -import scala.collection.parallel.Task - - - -/** Immutable parallel hash set, based on hash tries. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the set - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tries Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tries for more information. - * - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], HashSet[T]] - with Serializable -{ -self => - - def this() = this(HashSet.empty[T]) - - override def companion: GenericCompanion[ParHashSet] with GenericParCompanion[ParHashSet] = ParHashSet - - override def empty: ParHashSet[T] = new ParHashSet[T] - - def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) - - override def seq = trie - - def -(e: T) = new ParHashSet(trie - e) - - def +(e: T) = new ParHashSet(trie + e) - - def contains(e: T): Boolean = trie.contains(e) - - override def size = trie.size - - protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match { - case Some(old) => old - case None => newc - } - - class ParHashSetIterator(var triter: Iterator[T], val sz: Int) - extends IterableSplitter[T] { - var i = 0 - def dup = triter match { - case t: TrieIterator[_] => - dupFromIterator(t.dupIterator) - case _ => - val buff = triter.toBuffer - triter = buff.iterator - dupFromIterator(buff.iterator) - } - private def dupFromIterator(it: Iterator[T]) = { - val phit = new ParHashSetIterator(it, sz) - phit.i = i - phit - } - def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { - case t: TrieIterator[_] => - val previousRemaining = remaining - val ((fst, fstlength), snd) = t.split - val sndlength = previousRemaining - fstlength - Seq( - new ParHashSetIterator(fst, fstlength), - new ParHashSetIterator(snd, sndlength) - ) - case _ => - // iterator of the collision map case - val buff = triter.toBuffer - val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } - } - def next(): T = { - i += 1 - triter.next() - } - def hasNext: Boolean = { - i < sz - } - def remaining = sz - i - } - -} - - -/** $factoryInfo - * @define Coll `immutable.ParHashSet` - * @define coll immutable parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - def newCombiner[T]: Combiner[T, ParHashSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = - new GenericCanCombineFrom[T] - - def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) -} - - -private[immutable] abstract class HashSetCombiner[T] -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - import HashSetCombiner._ - import HashSet.computeHash - val emptyTrie = HashSet.empty[T] - - def +=(elem: T) = { - sz += 1 - val hc = computeHash(elem) - val pos = hc & 0x1f - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[Any] - } - // add to bucket - buckets(pos) += elem - this - } - - def result = { - val bucks = buckets.filter(_ != null).map(_.headPtr) - val root = new Array[HashSet[T]](bucks.length) - - combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length)) - - var bitmap = 0 - var i = 0 - while (i < rootsize) { - if (buckets(i) ne null) bitmap |= 1 << i - i += 1 - } - val sz = root.foldLeft(0)(_ + _.size) - - if (sz == 0) new ParHashSet[T] - else if (sz == 1) new ParHashSet[T](root(0)) - else { - val trie = new HashSet.HashTrieSet(bitmap, root, sz) - new ParHashSet[T](trie) - } - } - - /* tasks */ - - class CreateTrie(bucks: Array[Unrolled[Any]], root: Array[HashSet[T]], offset: Int, howmany: Int) - extends Task[Unit, CreateTrie] { - var result = () - def leaf(prev: Option[Unit]) = { - var i = offset - val until = offset + howmany - while (i < until) { - root(i) = createTrie(bucks(i)) - i += 1 - } - } - private def createTrie(elems: Unrolled[Any]): HashSet[T] = { - var trie = new HashSet[T] - - var unrolled = elems - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val v = chunkarr(i).asInstanceOf[T] - val hc = computeHash(v) - trie = trie.updated0(v, hc, rootbits) // internal API, private[collection] - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - trie - } - def split = { - val fp = howmany / 2 - List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) - } -} - - -object HashSetCombiner { - def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} - - private[immutable] val rootbits = 5 - private[immutable] val rootsize = 1 << 5 -} diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala deleted file mode 100644 index fa1e21523069..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic._ -import scala.collection.parallel.ParIterableLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[+T] -extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] - with Immutable -{ - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - // if `immutable.ParIterableLike` is introduced, please move these 4 methods there - override def toIterable: ParIterable[T] = this - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = - new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T] -} diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala deleted file mode 100644 index 2537da71bcd4..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParMap.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.ParMapFactory -import scala.collection.generic.GenericParMapTemplate -import scala.collection.generic.GenericParMapCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.ParMapLike -import scala.collection.parallel.Combiner - -/** A template trait for immutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, +V] -extends scala.collection/*.immutable*/.GenMap[K, V] - with GenericParMapTemplate[K, V, ParMap] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] -{ -self => - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - override def stringPrefix = "ParMap" - - override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - def + [U >: V](kv: (K, U)): ParMap[K, U] - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d) - -} - - - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d) - override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d) - override def seq = underlying.seq.withDefault(d) - } - -} diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala deleted file mode 100644 index 3bd0b4961566..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.immutable - -import scala.collection.immutable.Range -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.Iterator - -/** Parallel ranges. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @param range the sequential range this parallel range was obtained from - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-range Scala's Parallel Collections Library overview]] - * section on `ParRange` for more information. - * - * @define Coll `immutable.ParRange` - * @define coll immutable parallel range - */ -@SerialVersionUID(1L) -class ParRange(val range: Range) -extends ParSeq[Int] - with Serializable -{ -self => - - override def seq = range - - @inline final def length = range.length - - @inline final def apply(idx: Int) = range.apply(idx) - - def splitter = new ParRangeIterator - - class ParRangeIterator(range: Range = self.range) - extends SeqSplitter[Int] { - override def toString = "ParRangeIterator(over: " + range + ")" - private var ind = 0 - private val len = range.length - - final def remaining = len - ind - - final def hasNext = ind < len - - final def next = if (hasNext) { - val r = range.apply(ind) - ind += 1 - r - } else Iterator.empty.next() - - private def rangeleft = range.drop(ind) - - def dup = new ParRangeIterator(rangeleft) - - def split = { - val rleft = rangeleft - val elemleft = rleft.length - if (elemleft < 2) Seq(new ParRangeIterator(rleft)) - else Seq( - new ParRangeIterator(rleft.take(elemleft / 2)), - new ParRangeIterator(rleft.drop(elemleft / 2)) - ) - } - - def psplit(sizes: Int*) = { - var rleft = rangeleft - for (sz <- sizes) yield { - val fronttaken = rleft.take(sz) - rleft = rleft.drop(sz) - new ParRangeIterator(fronttaken) - } - } - - /* accessors */ - - override def foreach[U](f: Int => U): Unit = { - rangeleft.foreach(f.asInstanceOf[Int => Unit]) - ind = len - } - - override def reduce[U >: Int](op: (U, U) => U): U = { - val r = rangeleft.reduceLeft(op) - ind = len - r - } - - /* transformers */ - - override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = { - while (hasNext) { - cb += f(next) - } - cb - } - } - - override def toString = s"Par$range" -} - -object ParRange { - def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( - if (inclusive) new Range.Inclusive(start, end, step) - else new Range(start, end, step) - ) -} diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala deleted file mode 100644 index 31f33950e74d..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[+T] -extends scala.collection/*.immutable*/.GenSeq[T] - with scala.collection.parallel.ParSeq[T] - with ParIterable[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] -{ - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - override def toSeq: ParSeq[T] = this -} - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] -} diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala deleted file mode 100644 index f509dde5192c..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParSet.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic._ -import scala.collection.parallel.ParSetLike -import scala.collection.parallel.Combiner - -/** An immutable variant of `ParSet`. - * - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -trait ParSet[T] -extends scala.collection/*.immutable*/.GenSet[T] - with GenericParTemplate[T, ParSet] - with parallel.ParSet[T] - with ParIterable[T] - with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] -{ -self => - override def empty: ParSet[T] = ParHashSet[T]() - - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - - override def stringPrefix = "ParSet" - - // ok, because this could only violate `apply` and we can live with that - override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]] -} - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] - - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] -} diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala deleted file mode 100644 index c81c88a624f6..000000000000 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.immutable - -import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory} -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import mutable.ArrayBuffer -import immutable.Vector -import immutable.VectorBuilder -import immutable.VectorIterator - -/** Immutable parallel vectors, based on vectors. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the vector - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-vector Scala's Parallel Collections Library overview]] - * section on `ParVector` for more information. - * - * @define Coll `immutable.ParVector` - * @define coll immutable parallel vector - */ -class ParVector[+T](private[this] val vector: Vector[T]) -extends ParSeq[T] - with GenericParTemplate[T, ParVector] - with ParSeqLike[T, ParVector[T], Vector[T]] - with Serializable -{ - override def companion = ParVector - - def this() = this(Vector()) - - def apply(idx: Int) = vector.apply(idx) - - def length = vector.length - - def splitter: SeqSplitter[T] = { - val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) - vector.initIterator(pit) - pit - } - - override def seq: Vector[T] = vector - - override def toVector: Vector[T] = vector - - class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { - def remaining: Int = remainingElementCount - def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter - def split: Seq[ParVectorIterator] = { - val rem = remaining - if (rem >= 2) psplit(rem / 2, rem - rem / 2) - else Seq(this) - } - def psplit(sizes: Int*): Seq[ParVectorIterator] = { - var remvector = remainingVector - val splitted = new ArrayBuffer[Vector[T]] - for (sz <- sizes) { - splitted += remvector.take(sz) - remvector = remvector.drop(sz) - } - splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) - } - } -} - -/** $factoryInfo - * @define Coll `immutable.ParVector` - * @define coll immutable parallel vector - */ -object ParVector extends ParFactory[ParVector] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] = - new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T] - - def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]] -} - -private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] { -//self: EnvironmentPassingCombiner[T, ParVector[T]] => - var sz = 0 - val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T] - - def size: Int = sz - - def +=(elem: T): this.type = { - vectors.last += elem - sz += 1 - this - } - - def clear() = { - vectors.clear() - vectors += new VectorBuilder[T] - sz = 0 - } - - def result: ParVector[T] = { - val rvb = new VectorBuilder[T] - for (vb <- vectors) { - rvb ++= vb.result - } - new ParVector(rvb.result) - } - - def combine[U <: T, NewTo >: ParVector[T]](other: Combiner[U, NewTo]) = if (other eq this) this else { - val that = other.asInstanceOf[LazyParVectorCombiner[T]] - sz += that.sz - vectors ++= that.vectors - this - } -} diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala deleted file mode 100644 index 3fdd77068e92..000000000000 --- a/src/library/scala/collection/parallel/immutable/package.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -package immutable { - /** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method. - * - * @tparam T type of the elements - * @param elem the element in the repetition - * @param length the length of the collection - */ - private[parallel] class Repetition[T](elem: T, val length: Int) extends ParSeq[T] { - self => - - def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx) - override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] { - override def length: Int = self.length - override def apply(idx: Int): T = self.apply(idx) - override def iterator: Iterator[T] = Iterator.continually(elem).take(length) - override def par: ParSeq[T] = self - } - def update(idx: Int, elem: T) = throw new UnsupportedOperationException - - class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { - def remaining = until - i - def hasNext = i < until - def next = { i += 1; elem } - def dup = new ParIterator(i, until, elem) - def psplit(sizes: Int*) = { - val incr = sizes.scanLeft(0)(_ + _) - for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) - } - def split = psplit(remaining / 2, remaining - remaining / 2) - } - - def splitter = new ParIterator - } -} - -package object immutable { - /* package level methods */ - def repetition[T](elem: T, len: Int) = new Repetition(elem, len) -} diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala deleted file mode 100644 index c0052d54d703..000000000000 --- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic.Growable -import scala.collection.generic.Sizing -import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.Combiner - -/** Implements combining contents of two combiners - * by postponing the operation until `result` method is called. It chains - * the leaf results together instead of evaluating the actual collection. - * - * @tparam Elem the type of the elements in the combiner - * @tparam To the type of the collection the combiner produces - * @tparam Buff the type of the buffers that contain leaf results and this combiner chains together - */ -trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] { -//self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] => - val chain: ArrayBuffer[Buff] - val lastbuff = chain.last - def +=(elem: Elem) = { lastbuff += elem; this } - def result: To = allocateAndCopy - def clear() = { chain.clear() } - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) { - if (other.isInstanceOf[LazyCombiner[_, _, _]]) { - val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]] - newLazyCombiner(chain ++= that.chain) - } else throw new UnsupportedOperationException("Cannot combine with combiner of different type.") - } else this - def size = chain.foldLeft(0)(_ + _.size) - - /** Method that allocates the data structure and copies elements into it using - * `size` and `chain` members. - */ - def allocateAndCopy: To - def newLazyCombiner(buffchain: ArrayBuffer[Buff]): LazyCombiner[Elem, To, Buff] -} diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala deleted file mode 100644 index 68d2f267e3f8..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ /dev/null @@ -1,722 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - - - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.Combiner -import scala.collection.parallel.SeqSplitter -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Task -import scala.collection.parallel.CHECK_RATE -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.Builder -import scala.collection.GenTraversableOnce -import scala.reflect.ClassTag - -/** Parallel sequence holding elements in a linear array. - * - * `ParArray` is a parallel sequence with a predefined size. The size of the array - * cannot be changed after it's been created. - * - * `ParArray` internally keeps an array containing the elements. This means that - * bulk operations based on traversal ensure fast access to elements. `ParArray` uses lazy builders that - * create the internal data array only after the size of the array is known. In the meantime, they keep - * the result set fragmented. The fragments - * are copied into the resulting data array in parallel using fast array copy operations once all the combiners - * are populated in parallel. - * - * @tparam T type of the elements in the array - * - * @author Aleksandar Prokopec - * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-array Scala's Parallel Collections Library overview]] - * section on `ParArray` for more information. - * - * @define Coll `ParArray` - * @define coll parallel array - * - */ -@SerialVersionUID(1L) -class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T]) -extends ParSeq[T] - with GenericParTemplate[T, ParArray] - with ParSeqLike[T, ParArray[T], ArraySeq[T]] - with Serializable -{ -self => - - @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]] - - override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray - - def this(sz: Int) = this { - require(sz >= 0) - new ArraySeq[T](sz) - } - - def apply(i: Int) = array(i).asInstanceOf[T] - - def update(i: Int, elem: T) = array(i) = elem - - def length = arrayseq.length - - override def seq = arrayseq - - protected[parallel] def splitter: ParArrayIterator = { - val pit = new ParArrayIterator - pit - } - - class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) - extends SeqSplitter[T] { - def hasNext = i < until - - def next = { - val elem = arr(i) - i += 1 - elem.asInstanceOf[T] - } - - def remaining = until - i - - def dup = new ParArrayIterator(i, until, arr) - - def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { - var traversed = i - val total = sizesIncomplete.reduceLeft(_ + _) - val left = remaining - val sizes = if (total >= left) sizesIncomplete else sizesIncomplete :+ (left - total) - for (sz <- sizes) yield if (traversed < until) { - val start = traversed - val end = (traversed + sz) min until - traversed = end - new ParArrayIterator(start, end, arr) - } else { - new ParArrayIterator(traversed, traversed, arr) - } - } - - override def split: Seq[ParArrayIterator] = { - val left = remaining - if (left >= 2) { - val splitpoint = left / 2 - val sq = Seq( - new ParArrayIterator(i, i + splitpoint, arr), - new ParArrayIterator(i + splitpoint, until, arr)) - i = until - sq - } else { - Seq(this) - } - } - - override def toString = "ParArrayIterator(" + i + ", " + until + ")" - - /* overrides for efficiency */ - - /* accessors */ - - override def foreach[U](f: T => U) = { - foreach_quick(f, arr, until, i) - i = until - } - - private def foreach_quick[U](f: T => U, a: Array[Any], ntil: Int, from: Int) = { - var j = from - while (j < ntil) { - f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def count(p: T => Boolean) = { - val c = count_quick(p, arr, until, i) - i = until - c - } - - private def count_quick(p: T => Boolean, a: Array[Any], ntil: Int, from: Int) = { - var cnt = 0 - var j = from - while (j < ntil) { - if (p(a(j).asInstanceOf[T])) cnt += 1 - j += 1 - } - cnt - } - - override def foldLeft[S](z: S)(op: (S, T) => S): S = { - val r = foldLeft_quick(arr, until, op, z) - i = until - r - } - - private def foldLeft_quick[S](a: Array[Any], ntil: Int, op: (S, T) => S, z: S): S = { - var j = i - var sum = z - while (j < ntil) { - sum = op(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op) - - override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) - - override def sum[U >: T](implicit num: Numeric[U]): U = { - val s = sum_quick(num, arr, until, i, num.zero) - i = until - s - } - - private def sum_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, zero: U): U = { - var j = from - var sum = zero - while (j < ntil) { - sum = num.plus(sum, a(j).asInstanceOf[T]) - j += 1 - } - sum - } - - override def product[U >: T](implicit num: Numeric[U]): U = { - val p = product_quick(num, arr, until, i, num.one) - i = until - p - } - - private def product_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, one: U): U = { - var j = from - var prod = one - while (j < ntil) { - prod = num.times(prod, a(j).asInstanceOf[T]) - j += 1 - } - prod - } - - override def forall(p: T => Boolean): Boolean = { - if (isAborted) return false - - var all = true - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - all = forall_quick(p, array, nextuntil, i) - if (all) i = nextuntil - else { - i = until - abort() - } - - if (isAborted) return false - } - all - } - - // it's faster to use a separate small method - private def forall_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) j += 1 - else return false - } - true - } - - override def exists(p: T => Boolean): Boolean = { - if (isAborted) return true - - var some = false - while (i < until) { - val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE - - some = exists_quick(p, array, nextuntil, i) - if (some) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return true - } - some - } - - // faster to use separate small method - private def exists_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = { - var j = start - while (j < nextuntil) { - if (p(a(j).asInstanceOf[T])) return true - else j += 1 - } - false - } - - override def find(p: T => Boolean): Option[T] = { - if (isAborted) return None - - var r: Option[T] = None - while (i < until) { - val nextuntil = if ((i + CHECK_RATE) < until) (i + CHECK_RATE) else until - - r = find_quick(p, array, nextuntil, i) - - if (r != None) { - i = until - abort() - } else i = nextuntil - - if (isAborted) return r - } - r - } - - private def find_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Option[T] = { - var j = start - while (j < nextuntil) { - val elem = a(j).asInstanceOf[T] - if (p(elem)) return Some(elem) - else j += 1 - } - None - } - - override def drop(n: Int): ParArrayIterator = { - i += n - this - } - - override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) { - val totallen = (self.length - i) min len min (array.length - from) - Array.copy(arr, i, array, from, totallen) - i += totallen - } - - override def prefixLength(pred: T => Boolean): Int = { - val r = prefixLength_quick(pred, arr, until, i) - i += r + 1 - r - } - - private def prefixLength_quick(pred: T => Boolean, a: Array[Any], ntil: Int, startpos: Int): Int = { - var j = startpos - var endpos = ntil - while (j < endpos) { - if (pred(a(j).asInstanceOf[T])) j += 1 - else endpos = j - } - endpos - startpos - } - - override def indexWhere(pred: T => Boolean): Int = { - val r = indexWhere_quick(pred, arr, until, i) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def indexWhere_quick(pred: T => Boolean, a: Array[Any], ntil: Int, from: Int): Int = { - var j = from - var pos = -1 - while (j < ntil) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = ntil - } else j += 1 - } - pos - } - - override def lastIndexWhere(pred: T => Boolean): Int = { - val r = lastIndexWhere_quick(pred, arr, i, until) - val ret = if (r != -1) r - i else r - i = until - ret - } - - private def lastIndexWhere_quick(pred: T => Boolean, a: Array[Any], from: Int, ntil: Int): Int = { - var pos = -1 - var j = ntil - 1 - while (j >= from) { - if (pred(a(j).asInstanceOf[T])) { - pos = j - j = -1 - } else j -= 1 - } - pos - } - - override def sameElements(that: Iterator[_]): Boolean = { - var same = true - while (i < until && that.hasNext) { - if (arr(i) != that.next) { - i = until - same = false - } - i += 1 - } - same - } - - /* transformers */ - - override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = cbf(self.repr) - cb.sizeHint(remaining) - map2combiner_quick(f, arr, cb, until, i) - i = until - cb - } - - private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - cb += f(a(j).asInstanceOf[T]) - j += 1 - } - } - - override def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - collect2combiner_quick(pf, arr, cb, until, i) - i = until - cb - } - - private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) { - var j = from - val runWith = pf.runWith(b => cb += b) - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - runWith(curr) - j += 1 - } - } - - override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { - //val cb = pbf(self.repr) - while (i < until) { - val traversable = f(arr(i).asInstanceOf[T]) - if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator - else cb ++= traversable.seq - i += 1 - } - cb - } - - override def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filter2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (pred(curr)) cb += curr - j += 1 - } - } - - override def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]) = { - filterNot2combiner_quick(pred, cb, arr, until, i) - i = until - cb - } - - private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = i - while(j < ntil) { - val curr = a(j).asInstanceOf[T] - if (!pred(curr)) cb += curr - j += 1 - } - } - - override def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](cb: Bld): Bld = { - cb.sizeHint(remaining) - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, pac.lastbuff.size, until - i) - pac.lastbuff.setInternalSize(remaining) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - Array.copy(arr, i, targetarr, 0, until - i) - pac.buff.size = pac.buff.size + until - i - pac.buff.lastPtr.size = until - i - } otherwise { - copy2builder_quick(cb, arr, until, i) - i = until - } - } - cb - } - - private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - b += a(j).asInstanceOf[T] - j += 1 - } - } - - override def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { - partition2combiners_quick(pred, btrue, bfalse, arr, until, i) - i = until - (btrue, bfalse) - } - - private def partition2combiners_quick[U >: T, This](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { - var j = from - while (j < ntil) { - val curr = a(j).asInstanceOf[T] - if (p(curr)) btrue += curr else bfalse += curr - j += 1 - } - } - - override def take2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - cb.sizeHint(n) - val ntil = i + n - val a = arr - while (i < ntil) { - cb += a(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]) = { - drop(n) - cb.sizeHint(remaining) - while (i < until) { - cb += arr(i).asInstanceOf[T] - i += 1 - } - cb - } - - override def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - cb.ifIs[ResizableParArrayCombiner[T]] { - pac => - // with res. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.lastbuff.setInternalSize(sz) - } otherwise { - cb.ifIs[UnrolledParArrayCombiner[T]] { - pac => - // with unr. combiner: - val sz = remaining - pac.sizeHint(sz) - val targetarr: Array[Any] = pac.buff.lastPtr.array.asInstanceOf[Array[Any]] - reverse2combiner_quick(targetarr, arr, 0, i, until) - pac.buff.size = pac.buff.size + sz - pac.buff.lastPtr.size = sz - } otherwise super.reverse2combiner(cb) - } - cb - } - - private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], targfrom: Int, srcfrom: Int, srcuntil: Int) { - var j = srcfrom - var k = targfrom + srcuntil - srcfrom - 1 - while (j < srcuntil) { - targ(k) = a(j) - j += 1 - k -= 1 - } - } - - override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int) { - scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from) - i = until - } - - protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int) { - var last = z - var j = srcfrom - var k = destfrom - while (j < srcntil) { - last = op(last, srcarr(j).asInstanceOf[U]) - destarr(k) = last - j += 1 - k += 1 - } - } - - } - - /* operations */ - - private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] - - override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[S](length) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - - // fill it in parallel - tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length)) - - // wrap it into a parallel array - (new ParArray[S](targarrseq)).asInstanceOf[That] - } else super.map(f)(bf) - - override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That = - if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) { - // reserve an array - val targarrseq = new ArraySeq[U](length + 1) - val targetarr = targarrseq.array.asInstanceOf[Array[Any]] - targetarr(0) = z - - // do a parallel prefix scan - if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult { - tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr)) - }) - - // wrap the array into a parallel array - (new ParArray[U](targarrseq)).asInstanceOf[That] - } else super.scan(z)(op)(cbf) - - /* tasks */ - - class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any]) - extends Task[Unit, ScanToArray[U]] { - var result = () - - def leaf(prev: Option[Unit]) = iterate(tree) - private def iterate(tree: ScanTree[U]): Unit = tree match { - case ScanNode(left, right) => - iterate(left) - iterate(right) - case ScanLeaf(_, _, from, len, Some(prev), _) => - scanLeaf(array, targetarr, from, len, prev.acc) - case ScanLeaf(_, _, from, len, None, _) => - scanLeaf(array, targetarr, from, len, z) - } - private def scanLeaf(srcarr: Array[Any], targetarr: Array[Any], from: Int, len: Int, startval: U) { - var i = from - val until = from + len - var curr = startval - val operation = op - while (i < until) { - curr = operation(curr, srcarr(i).asInstanceOf[U]) - i += 1 - targetarr(i) = curr - } - } - def split = tree match { - case ScanNode(left, right) => Seq( - new ScanToArray(left, z, op, targetarr), - new ScanToArray(right, z, op, targetarr) - ) - case _ => sys.error("Can only split scan tree internal nodes.") - } - def shouldSplitFurther = tree match { - case ScanNode(_, _) => true - case _ => false - } - } - - class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] { - var result = () - - def leaf(prev: Option[Unit]) = { - val tarr = targetarr - val sarr = array - var i = offset - val until = offset + howmany - while (i < until) { - tarr(i) = f(sarr(i).asInstanceOf[T]) - i += 1 - } - } - def split = { - val fp = howmany / 2 - List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) - } - - /* serialization */ - - private def writeObject(out: java.io.ObjectOutputStream) { - out.defaultWriteObject - } - - private def readObject(in: java.io.ObjectInputStream) { - in.defaultReadObject - - // get raw array from arrayseq - array = arrayseq.array.asInstanceOf[Array[Any]] - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParArray` - * @define coll parallel array - */ -object ParArray extends ParFactory[ParArray] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParArray[T]] = new GenericCanCombineFrom[T] - def newBuilder[T]: Combiner[T, ParArray[T]] = newCombiner - def newCombiner[T]: Combiner[T, ParArray[T]] = ParArrayCombiner[T] - - /** Creates a new parallel array by wrapping the specified array. - */ - def handoff[T](arr: Array[T]): ParArray[T] = wrapOrRebuild(arr, arr.length) - - /** Creates a new parallel array by wrapping a part of the specified array. - */ - def handoff[T](arr: Array[T], sz: Int): ParArray[T] = wrapOrRebuild(arr, sz) - - private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match { - case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz)) - case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz)) - } - - def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = { - val newarr = new Array[T](arr.length) - Array.copy(arr, 0, newarr, 0, arr.length) - handoff(newarr) - } - - def fromTraversables[T](xss: GenTraversableOnce[T]*) = { - val cb = ParArrayCombiner[T]() - for (xs <- xss) { - cb ++= xs.seq - } - cb.result - } - -} diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala deleted file mode 100644 index b108f32eaf87..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.mutable - -import scala.collection.parallel.IterableSplitter - -/** Parallel flat hash table. - * - * @tparam T type of the elements in the $coll. - * @define coll table - * @define Coll `ParFlatHashTable` - * - * @author Aleksandar Prokopec - */ -trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { - - override def alwaysInitSizeMap = true - - abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) - extends IterableSplitter[T] with SizeMapUtils { - import scala.collection.DebugUtils._ - - private[this] var traversed = 0 - private[this] val itertable = table - - if (hasNext) scan() - - private[this] def scan() { - while (itertable(idx) eq null) { - idx += 1 - } - } - - def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] - - def remaining = totalsize - traversed - def hasNext = traversed < totalsize - def next() = if (hasNext) { - val r = entryToElem(itertable(idx)) - traversed += 1 - idx += 1 - if (hasNext) scan() - r - } else Iterator.empty.next() - def dup = newIterator(idx, until, totalsize) - def split = if (remaining > 1) { - val divpt = (until + idx) / 2 - - val fstidx = idx - val fstuntil = divpt - val fsttotal = calcNumElems(idx, divpt, itertable.length, sizeMapBucketSize) - val fstit = newIterator(fstidx, fstuntil, fsttotal) - - val sndidx = divpt - val snduntil = until - val sndtotal = remaining - fsttotal - val sndit = newIterator(sndidx, snduntil, sndtotal) - - Seq(fstit, sndit) - } else Seq(this) - - override def debugInformation = buildString { - append => - append("Parallel flat hash table iterator") - append("---------------------------------") - append("Traversed/total: " + traversed + " / " + totalsize) - append("Table idx/until: " + idx + " / " + until) - append("Table length: " + itertable.length) - append("Table: ") - append(arrayString(itertable, 0, itertable.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - - protected def countElems(from: Int, until: Int) = { - var count = 0 - var i = from - while (i < until) { - if (itertable(i) ne null) count += 1 - i += 1 - } - count - } - - protected def countBucketSizes(frombucket: Int, untilbucket: Int) = { - var count = 0 - var i = frombucket - while (i < untilbucket) { - count += sizemap(i) - i += 1 - } - count - } - } -} diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala deleted file mode 100644 index 4e699f936f94..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel -package mutable - -import scala.collection.generic._ -import scala.collection.mutable.DefaultEntry -import scala.collection.mutable.HashEntry -import scala.collection.mutable.HashTable -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - -/** A parallel hash map. - * - * `ParHashMap` is a parallel map which internally keeps elements within a hash table. - * It uses chaining to resolve collisions. - * - * @tparam K type of the keys in the parallel hash map - * @tparam V type of the values in the parallel hash map - * - * @define Coll `ParHashMap` - * @define coll parallel hash map - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] - with ParHashTable[K, DefaultEntry[K, V]] - with Serializable -{ -self => - initWithContents(contents) - - type Entry = scala.collection.mutable.DefaultEntry[K, V] - - def this() = this(null) - - override def mapCompanion: GenericParMapCompanion[ParHashMap] = ParHashMap - - override def empty: ParHashMap[K, V] = new ParHashMap[K, V] - - protected[this] override def newCombiner = ParHashMapCombiner[K, V] - - override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) - - def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) - - override def size = tableSize - - override def clear() = clearTable() - - def get(key: K): Option[V] = { - val e = findEntry(key) - if (e eq null) None - else Some(e.value) - } - - def put(key: K, value: V): Option[V] = { - val e = findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } - } - - def update(key: K, value: V): Unit = put(key, value) - - def remove(key: K): Option[V] = { - val e = removeEntry(key) - if (e ne null) Some(e.value) - else None - } - - def += (kv: (K, V)): this.type = { - val e = findOrAddEntry(kv._1, kv._2) - if (e ne null) e.value = kv._2 - this - } - - def -=(key: K): this.type = { removeEntry(key); this } - - override def stringPrefix = "ParHashMap" - - class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) - extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { - def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value) - - def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = - new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) - } - - protected def createNewEntry[V1](key: K, value: V1): Entry = { - new Entry(key, value.asInstanceOf[V]) - } - - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) - } - - private[parallel] override def brokenInvariants = { - // bucket by bucket, count elements - val buckets = for (i <- 0 until (table.length / sizeMapBucketSize)) yield checkBucket(i) - - // check if each element is in the position corresponding to its key - val elems = for (i <- 0 until table.length) yield checkEntry(i) - - buckets.flatMap(x => x) ++ elems.flatMap(x => x) - } - - private def checkBucket(i: Int) = { - def count(e: HashEntry[K, DefaultEntry[K, V]]): Int = if (e eq null) 0 else 1 + count(e.next) - val expected = sizemap(i) - val found = ((i * sizeMapBucketSize) until ((i + 1) * sizeMapBucketSize)).foldLeft(0) { - (acc, c) => acc + count(table(c)) - } - if (found != expected) List("Found " + found + " elements, while sizemap showed " + expected) - else Nil - } - - private def checkEntry(i: Int) = { - def check(e: HashEntry[K, DefaultEntry[K, V]]): List[String] = if (e eq null) Nil else - if (index(elemHashCode(e.key)) == i) check(e.next) - else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next) - check(table(i)) - } -} - -/** $factoryInfo - * @define Coll `mutable.ParHashMap` - * @define coll parallel hash map - */ -object ParHashMap extends ParMapFactory[ParHashMap] { - var iters = 0 - - def empty[K, V]: ParHashMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParHashMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] -} - -private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) - with scala.collection.mutable.HashTable.HashUtils[K] -{ - private val nonmasklen = ParHashMapCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: (K, V)) = { - sz += 1 - val hc = improve(elemHashCode(elem._1), seedvalue) - val pos = (hc >>> nonmasklen) - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[DefaultEntry[K, V]]() - } - // add to bucket - buckets(pos) += new DefaultEntry(elem._1, elem._2) - this - } - - def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 - // construct table - val table = new AddingHashTable(size, tableLoadFactor, seedvalue) - val bucks = buckets.map(b => if (b ne null) b.headPtr else null) - val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length)) - table.setSize(insertcount) - // TODO compare insertcount and size to see if compression is needed - val c = table.hashTableContents - new ParHashMap(c) - } else { - // construct a normal table and fill it sequentially - // TODO parallelize by keeping separate sizemaps and merging them - object table extends HashTable[K, DefaultEntry[K, V]] { - type Entry = DefaultEntry[K, V] - def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) } - def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] - sizeMapInit(table.length) - } - var i = 0 - while (i < ParHashMapCombiner.numblocks) { - if (buckets(i) ne null) { - for (elem <- buckets(i)) table.insertEntry(elem) - } - i += 1 - } - new ParHashMap(table.hashTableContents) - } - - /* classes */ - - /** A hash table which will never resize itself. Knowing the number of elements in advance, - * it allocates the table of the required size when created. - * - * Entries are added using the `insertEntry` method. This method checks whether the element - * exists and updates the size map. It returns false if the key was already in the table, - * and true if the key was successfully inserted. It does not update the number of elements - * in the table. - */ - private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { - import HashTable._ - _loadFactor = lf - table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) - tableSize = 0 - seedvalue = _seedvalue - threshold = newThreshold(_loadFactor, table.length) - sizeMapInit(table.length) - def setSize(sz: Int) = tableSize = sz - def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { - var h = index(elemHashCode(e.key)) - val olde = table(h).asInstanceOf[DefaultEntry[K, V]] - - // check if key already exists - var ce = olde - while (ce ne null) { - if (ce.key == e.key) { - h = -1 - ce = null - } else ce = ce.next - } - - // if key does not already exist - if (h != -1) { - e.next = olde - table(h) = e - nnSizeMapAdd(h) - true - } else false - } - protected def createNewEntry[X](key: K, x: X) = ??? - } - - /* tasks */ - - import UnrolledBuffer.Unrolled - - class FillBlocks(buckets: Array[Unrolled[DefaultEntry[K, V]]], table: AddingHashTable, offset: Int, howmany: Int) - extends Task[Int, FillBlocks] { - var result = Int.MinValue - def leaf(prev: Option[Int]) = { - var i = offset - val until = offset + howmany - result = 0 - while (i < until) { - result += fillBlock(i, buckets(i)) - i += 1 - } - } - private def fillBlock(block: Int, elems: Unrolled[DefaultEntry[K, V]]) = { - var insertcount = 0 - var unrolled = elems - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - if (t.insertEntry(elem)) insertcount += 1 - i += 1 - } - i = 0 - unrolled = unrolled.next - } - insertcount - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks) { - this.result += that.result - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } -} - -private[parallel] object ParHashMapCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] -} diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala deleted file mode 100644 index 94c0109326be..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - - - -import scala.collection.generic._ -import scala.collection.mutable.FlatHashTable -import scala.collection.parallel.Combiner -import scala.collection.mutable.UnrolledBuffer -import scala.collection.parallel.Task - - - -/** A parallel hash set. - * - * `ParHashSet` is a parallel set which internally keeps elements within a hash table. - * It uses linear probing to resolve collisions. - * - * @tparam T type of the elements in the $coll. - * - * @define Coll `ParHashSet` - * @define coll parallel hash set - * - * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tables Scala's Parallel Collections Library overview]] - * section on Parallel Hash Tables for more information. - */ -@SerialVersionUID(1L) -class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) -extends ParSet[T] - with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] - with ParFlatHashTable[T] - with Serializable -{ - initWithContents(contents) - // println("----> new par hash set!") - // java.lang.Thread.dumpStack - // println(debugInformation) - - def this() = this(null) - - override def companion = ParHashSet - - override def empty = new ParHashSet - - override def iterator = splitter - - override def size = tableSize - - def clear() = clearTable() - - override def seq = new scala.collection.mutable.HashSet(hashTableContents) - - def +=(elem: T) = { - addElem(elem) - this - } - - def -=(elem: T) = { - removeElem(elem) - this - } - - override def stringPrefix = "ParHashSet" - - def contains(elem: T) = containsElem(elem) - - def splitter = new ParHashSetIterator(0, table.length, size) - - class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) - extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { - def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) - } - - private def writeObject(s: java.io.ObjectOutputStream) { - serializeTo(s) - } - - private def readObject(in: java.io.ObjectInputStream) { - init(in, x => ()) - } - - import scala.collection.DebugUtils._ - override def debugInformation = buildString { - append => - append("Parallel flat hash table set") - append("No. elems: " + tableSize) - append("Table length: " + table.length) - append("Table: ") - append(arrayString(table, 0, table.length)) - append("Sizemap: ") - append(arrayString(sizemap, 0, sizemap.length)) - } - -} - - -/** $factoryInfo - * @define Coll `mutable.ParHashSet` - * @define coll parallel hash set - */ -object ParHashSet extends ParSetFactory[ParHashSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParHashSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParHashSet[T]] = newCombiner - - override def newCombiner[T]: Combiner[T, ParHashSet[T]] = ParHashSetCombiner.apply[T] -} - - -private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int) -extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks) -with scala.collection.mutable.FlatHashTable.HashUtils[T] { -//self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - private val nonmasklen = ParHashSetCombiner.nonmasklength - private val seedvalue = 27 - - def +=(elem: T) = { - val entry = elemToEntry(elem) - sz += 1 - val hc = improve(entry.hashCode, seedvalue) - val pos = hc >>> nonmasklen - if (buckets(pos) eq null) { - // initialize bucket - buckets(pos) = new UnrolledBuffer[AnyRef] - } - // add to bucket - buckets(pos) += entry - this - } - - def result: ParHashSet[T] = { - val contents = if (size >= ParHashSetCombiner.numblocks * sizeMapBucketSize) parPopulate else seqPopulate - new ParHashSet(contents) - } - - private def parPopulate: FlatHashTable.Contents[T] = { - // construct it in parallel - val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue) - val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length)) - var leftinserts = 0 - for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry) - table.setSize(leftinserts + inserted) - table.hashTableContents - } - - private def seqPopulate: FlatHashTable.Contents[T] = { - // construct it sequentially - // TODO parallelize by keeping separate size maps and merging them - val tbl = new FlatHashTable[T] { - sizeMapInit(table.length) - seedvalue = ParHashSetCombiner.this.seedvalue - for { - buffer <- buckets - if buffer ne null - entry <- buffer - } addEntry(entry) - } - tbl.hashTableContents - } - - /* classes */ - - /** A flat hash table which doesn't resize itself. It accepts the number of elements - * it has to take and allocates the underlying hash table in advance. - * Elements can only be added to it. The final size has to be adjusted manually. - * It is internal to `ParHashSet` combiners. - */ - class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] { - _loadFactor = lf - table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor))) - tableSize = 0 - threshold = FlatHashTable.newThreshold(_loadFactor, table.length) - seedvalue = inseedvalue - sizeMapInit(table.length) - - override def toString = "AFHT(%s)".format(table.length) - - def tableLength = table.length - - def setSize(sz: Int) = tableSize = sz - - /** - * The elements are added using the `insertElem` method. This method accepts three - * arguments: - * - * @param insertAt where to add the element (set to -1 to use its hashcode) - * @param comesBefore the position before which the element should be added to - * @param newEntry the element to be added - * - * If the element is to be inserted at the position corresponding to its hash code, - * the table will try to add the element in such a position if possible. Collisions are resolved - * using linear hashing, so the element may actually have to be added to a position - * that follows the specified one. In the case that the first unoccupied position - * comes after `comesBefore`, the element is not added and the method simply returns -1, - * indicating that it couldn't add the element in a position that comes before the - * specified one. - * If the element is already present in the hash table, it is not added, and this method - * returns 0. If the element is added, it returns 1. - */ - def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = { - var h = insertAt - if (h == -1) h = index(newEntry.hashCode) - var curEntry = table(h) - while (null != curEntry) { - if (curEntry == newEntry) return 0 - h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!! - if (h >= comesBefore) return -1 - curEntry = table(h) - } - table(h) = newEntry - - // this is incorrect since we set size afterwards anyway and a counter - // like this would not even work: - // - // tableSize = tableSize + 1 - // - // furthermore, it completely bogs down the parallel - // execution when there are multiple workers - - nnSizeMapAdd(h) - 1 - } - } - - /* tasks */ - - class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int) - extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] { - var result = (Int.MinValue, new UnrolledBuffer[AnyRef]) - - def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) { - var i = offset - var totalinserts = 0 - var leftover = new UnrolledBuffer[AnyRef]() - while (i < (offset + howmany)) { - val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover) - totalinserts += inserted - leftover = intonextblock - i += 1 - } - result = (totalinserts, leftover) - } - private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits - private def blockStart(block: Int) = block * blocksize - private def nextBlockStart(block: Int) = (block + 1) * blocksize - private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val beforePos = nextBlockStart(block) - - // store the elems - val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]()) - - // store the leftovers - val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers) - - // return the no. of stored elements tupled with leftovers - (elemsIn + leftoversIn, elemsLeft concat leftoversLeft) - } - private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = { - val leftovers = new UnrolledBuffer[AnyRef] - var inserted = 0 - - var unrolled = elems.headPtr - var i = 0 - val t = table - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val entry = chunkarr(i) - val res = t.insertEntry(atPos, beforePos, entry) - if (res >= 0) inserted += res - else leftovers += entry - i += 1 - } - i = 0 - unrolled = unrolled.next - } - - // slower: - // var it = elems.iterator - // while (it.hasNext) { - // val elem = it.next - // val res = table.insertEntry(atPos, beforePos, elem.asInstanceOf[T]) - // if (res >= 0) inserted += res - // else leftovers += elem - // } - - (inserted, leftovers) - } - def split = { - val fp = howmany / 2 - List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) - } - override def merge(that: FillBlocks) { - // take the leftovers from the left task, store them into the block of the right task - val atPos = blockStart(that.offset) - val beforePos = blockStart(that.offset + that.howmany) - val (inserted, remainingLeftovers) = insertAll(atPos, beforePos, this.result._2) - - // anything left after trying the store the left leftovers is added to the right task leftovers - // and a new leftovers set is produced in this way - // the total number of successfully inserted elements is adjusted accordingly - result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) - } - -} - - -private[parallel] object ParHashSetCombiner { - private[mutable] val discriminantbits = 5 - private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) - private[mutable] val nonmasklength = 32 - discriminantbits - - def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]] -} - diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala deleted file mode 100644 index aceb9e0217b3..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.mutable - -import scala.collection.mutable.HashEntry -import scala.collection.parallel.IterableSplitter - -/** Provides functionality for hash tables with linked list buckets, - * enriching the data structure by fulfilling certain requirements - * for their parallel construction and iteration. - */ -trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] { - - override def alwaysInitSizeMap = true - - /** A parallel iterator returning all the entries. - */ - abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] - (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) - extends IterableSplitter[T] with SizeMapUtils { - private val itertable = table - private var traversed = 0 - scan() - - def entry2item(e: Entry): T - def newIterator(idxFrom: Int, idxUntil: Int, totalSize: Int, es: Entry): IterRepr - - def hasNext = { - es ne null - } - - def next(): T = { - val res = es - es = es.next - scan() - traversed += 1 - entry2item(res) - } - - def scan() { - while (es == null && idx < until) { - es = itertable(idx).asInstanceOf[Entry] - idx = idx + 1 - } - } - - def remaining = totalsize - traversed - - private[parallel] override def debugInformation = { - buildString { - append => - append("/--------------------\\") - append("Parallel hash table entry iterator") - append("total hash table elements: " + tableSize) - append("pos: " + idx) - append("until: " + until) - append("traversed: " + traversed) - append("totalsize: " + totalsize) - append("current entry: " + es) - append("underlying from " + idx + " until " + until) - append(itertable.slice(idx, until).map(x => if (x != null) x.toString else "n/a").mkString(" | ")) - append("\\--------------------/") - } - } - - def dup = newIterator(idx, until, totalsize, es) - - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { - if (until > idx) { - // there is at least one more slot for the next iterator - // divide the rest of the table - val divsz = (until - idx) / 2 - - // second iterator params - val sidx = idx + divsz + 1 // + 1 preserves iteration invariant - val suntil = until - val ses = itertable(sidx - 1).asInstanceOf[Entry] // sidx - 1 ensures counting from the right spot - val stotal = calcNumElems(sidx - 1, suntil, table.length, sizeMapBucketSize) - - // first iterator params - val fidx = idx - val funtil = idx + divsz - val fes = es - val ftotal = totalsize - stotal - - Seq( - newIterator(fidx, funtil, ftotal, fes), - newIterator(sidx, suntil, stotal, ses) - ) - } else { - // otherwise, this is the last entry in the table - all what remains is the chain - // so split the rest of the chain - val arr = convertToArrayBuffer(es) - val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) - arrpit.split - } - } else Seq(this.asInstanceOf[IterRepr]) - - private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { - val buff = mutable.ArrayBuffer[Entry]() - var curr = chainhead - while (curr ne null) { - buff += curr - curr = curr.next - } - // println("converted " + remaining + " element iterator into buffer: " + buff) - buff map { e => entry2item(e) } - } - - protected def countElems(from: Int, until: Int) = { - var c = 0 - var idx = from - var es: Entry = null - while (idx < until) { - es = itertable(idx).asInstanceOf[Entry] - while (es ne null) { - c += 1 - es = es.next - } - idx += 1 - } - c - } - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int) = { - var c = 0 - var idx = fromBucket - while (idx < untilBucket) { - c += sizemap(idx) - idx += 1 - } - c - } - } -} diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala deleted file mode 100644 index 5cb5cf20e488..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.{ ParIterableLike, Combiner } - -/** A template trait for mutable parallel iterable collections. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @tparam T the element type of the collection - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParIterable[T] extends scala.collection.GenIterable[T] - with scala.collection.parallel.ParIterable[T] - with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], Iterable[T]] - with Mutable { - override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable - //protected[this] override def newBuilder = ParIterable.newBuilder[T] - - // if `mutable.ParIterableLike` is introduced, please move these methods there - override def toIterable: ParIterable[T] = this - - override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) - - def seq: scala.collection.mutable.Iterable[T] -} - -/** $factoryInfo - */ -object ParIterable extends ParFactory[ParIterable] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T] -} diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala deleted file mode 100644 index 27093089c16a..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParMap.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A template trait for mutable parallel maps. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMap[K, V] -extends GenMap[K, V] - with parallel.ParMap[K, V] - with ParIterable[(K, V)] - with GenericParMapTemplate[K, V, ParMap] - with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]] -{ - - protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] - - override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap - - override def empty: ParMap[K, V] = new ParHashMap[K, V] - - def seq: scala.collection.mutable.Map[K, V] - - override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value)) - - /** The same map with a given default function. - * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d) -} - -object ParMap extends ParMapFactory[ParMap] { - def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] - - def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V] - - class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) - extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { - override def += (kv: (K, V)) = {underlying += kv; this} - def -= (key: K) = {underlying -= key; this} - override def empty = new WithDefault(underlying.empty, d) - override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) - override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) - override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) - override def seq = underlying.seq.withDefault(d) - def clear() = underlying.clear() - def put(key: K, value: V): Option[V] = underlying.put(key, value) - - /** If these methods aren't overridden to thread through the underlying map, - * successive calls to withDefault* have no effect. - */ - override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d) - override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d) - } -} diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala deleted file mode 100644 index 28f76fc54b2e..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel -package mutable - -import scala.collection.generic._ -import scala.collection.mutable.Cloneable -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel maps. This trait is to be mixed in - * with concrete parallel maps to override the representation type. - * - * $sideeffects - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * @define Coll `ParMap` - * @define coll parallel map - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParMapLike[K, - V, - +Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V], - +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]] -extends scala.collection.GenMapLike[K, V, Repr] - with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential] - with Growable[(K, V)] - with Shrinkable[K] - with Cloneable[Repr] -{ - // note: should not override toMap - - def put(key: K, value: V): Option[V] - - def +=(kv: (K, V)): this.type - - def -=(key: K): this.type - - def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv - - def -(key: K) = this.clone() -= key - - def clear(): Unit -} diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala deleted file mode 100644 index 29d2889bc7f4..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParSeq.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic.GenericParTemplate -import scala.collection.generic.GenericCompanion -import scala.collection.generic.GenericParCompanion -import scala.collection.generic.CanCombineFrom -import scala.collection.generic.ParFactory -import scala.collection.parallel.ParSeqLike -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSeq`. - * - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] - with ParIterable[T] - with scala.collection.parallel.ParSeq[T] - with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { -self => - override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq - //protected[this] override def newBuilder = ParSeq.newBuilder[T] - - def update(i: Int, elem: T): Unit - - def seq: scala.collection.mutable.Seq[T] - - override def toSeq: ParSeq[T] = this -} - - -/** $factoryInfo - * @define Coll `mutable.ParSeq` - * @define coll mutable parallel sequence - */ -object ParSeq extends ParFactory[ParSeq] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] - - def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] -} diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala deleted file mode 100644 index bef46205961c..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParSet.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner - -/** A mutable variant of `ParSet`. - * - * @author Aleksandar Prokopec - */ -trait ParSet[T] -extends scala.collection/*.mutable*/.GenSet[T] - with ParIterable[T] - with scala.collection.parallel.ParSet[T] - with GenericParTemplate[T, ParSet] - with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] -{ -self => - override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet - override def empty: ParSet[T] = ParHashSet() - def seq: scala.collection.mutable.Set[T] -} - - -/** $factoryInfo - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - */ -object ParSet extends ParSetFactory[ParSet] { - implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T] - - override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder - - override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner -} diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala deleted file mode 100644 index 9a35a522d1ee..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package parallel.mutable - -import scala.collection.mutable.Cloneable -import scala.collection.GenSetLike -import scala.collection.generic.Growable -import scala.collection.generic.Shrinkable - -/** A template trait for mutable parallel sets. This trait is mixed in with concrete - * parallel sets to override the representation type. - * - * $sideeffects - * - * @tparam T the element type of the set - * @define Coll `mutable.ParSet` - * @define coll mutable parallel set - * - * @author Aleksandar Prokopec - * @since 2.9 - */ -trait ParSetLike[T, - +Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T], - +Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]] -extends GenSetLike[T, Repr] - with scala.collection.parallel.ParIterableLike[T, Repr, Sequential] - with scala.collection.parallel.ParSetLike[T, Repr, Sequential] - with Growable[T] - with Shrinkable[T] - with Cloneable[Repr] -{ -self => - override def empty: Repr - - def +=(elem: T): this.type - - def -=(elem: T): this.type - - def +(elem: T) = this.clone() += elem - - def -(elem: T) = this.clone() -= elem - - // note: should not override toSet -} diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala deleted file mode 100644 index 8a15d694fa70..000000000000 --- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.generic._ -import scala.collection.parallel.Combiner -import scala.collection.parallel.IterableSplitter -import scala.collection.parallel.Task -import scala.collection.concurrent.BasicNode -import scala.collection.concurrent.TNode -import scala.collection.concurrent.LNode -import scala.collection.concurrent.CNode -import scala.collection.concurrent.SNode -import scala.collection.concurrent.INode -import scala.collection.concurrent.TrieMap -import scala.collection.concurrent.TrieMapIterator - -/** Parallel TrieMap collection. - * - * It has its bulk operations parallelized, but uses the snapshot operation - * to create the splitter. This means that parallel bulk operations can be - * called concurrently with the modifications. - * - * @author Aleksandar Prokopec - * @since 2.10 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-concurrent-tries Scala's Parallel Collections Library overview]] - * section on `ParTrieMap` for more information. - */ -final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) -extends ParMap[K, V] - with GenericParMapTemplate[K, V, ParTrieMap] - with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]] - with ParTrieMapCombiner[K, V] - with Serializable -{ - def this() = this(new TrieMap) - - override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap - - override def empty: ParTrieMap[K, V] = ParTrieMap.empty - - protected[this] override def newCombiner = ParTrieMap.newCombiner - - override def seq = ctrie - - def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], true) - - override def clear() = ctrie.clear() - - def result = this - - def get(key: K): Option[V] = ctrie.get(key) - - def put(key: K, value: V): Option[V] = ctrie.put(key, value) - - def update(key: K, value: V): Unit = ctrie.update(key, value) - - def remove(key: K): Option[V] = ctrie.remove(key) - - def +=(kv: (K, V)): this.type = { - ctrie.+=(kv) - this - } - - def -=(key: K): this.type = { - ctrie.-=(key) - this - } - - override def size = { - val in = ctrie.readRoot() - val r = in.gcasRead(ctrie) - r match { - case tn: TNode[_, _] => tn.cachedSize(ctrie) - case ln: LNode[_, _] => ln.cachedSize(ctrie) - case cn: CNode[_, _] => - tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array)) - cn.cachedSize(ctrie) - } - } - - override def stringPrefix = "ParTrieMap" - - /* tasks */ - - /** Computes TrieMap size in parallel. */ - class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { - var result = -1 - def leaf(prev: Option[Int]) = { - var sz = 0 - var i = offset - val until = offset + howmany - while (i < until) { - array(i) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ctrie) - } - i += 1 - } - result = sz - } - def split = { - val fp = howmany / 2 - Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) - } - def shouldSplitFurther = howmany > 1 - override def merge(that: Size) = result = result + that.result - } -} - -private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) -extends TrieMapIterator[K, V](lev, ct, mustInit) - with IterableSplitter[(K, V)] -{ - // only evaluated if `remaining` is invoked (which is not used by most tasks) - lazy val totalsize = ct.par.size - var iterated = 0 - - protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit) - - override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = { - val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) - level < maxsplits - } - - def dup = { - val it = newIterator(0, ct, _mustInit = false) - dupTo(it) - it.iterated = this.iterated - it - } - - override def next() = { - iterated += 1 - super.next() - } - - def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] - - override def isRemainingCheap = false - - def remaining: Int = totalsize - iterated -} - -/** Only used within the `ParTrieMap`. */ -private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { - - def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = - if (this eq other) this - else throw new UnsupportedOperationException("This shouldn't have been called in the first place.") - - override def canBeShared = true -} - -object ParTrieMap extends ParMapFactory[ParTrieMap] { - def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] - def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] - - implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V] -} diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala deleted file mode 100644 index 60138d44735d..000000000000 --- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.Task - -/** An array combiner that uses a chain of arraybuffers to store elements. */ -trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] { - - override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) - - // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. - final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) - - def allocateAndCopy = if (chain.size > 1) { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) - - new ParArray(arrayseq) - } else { // optimisation if there is only 1 array - new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size)) - } - - override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain - - /* tasks */ - - class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] { - var result = () - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (stbuff, stind) = findStart(offset) - var buffind = stbuff - var ind = stind - var arrayIndex = offset - while (totalleft > 0) { - val currbuff = chain(buffind) - val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind - val until = ind + chunksize - - copyChunk(currbuff.internalArray, ind, array, arrayIndex, until) - arrayIndex += chunksize - ind += chunksize - - totalleft -= chunksize - buffind += 1 - ind = 0 - } - } - private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) { - Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart) - } - private def findStart(pos: Int) = { - var left = pos - var buffind = 0 - while (left >= chain(buffind).size) { - left -= chain(buffind).size - buffind += 1 - } - (buffind, left) - } - def split = { - val fp = howmany / 2 - List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - } -} - -object ResizableParArrayCombiner { - def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { - new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]] - } - def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) -} diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala deleted file mode 100644 index 483c7343f084..000000000000 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel.mutable - -import scala.collection.mutable.ArraySeq -import scala.collection.mutable.DoublingUnrolledBuffer -import scala.collection.mutable.UnrolledBuffer.Unrolled -import scala.collection.parallel.Combiner -import scala.collection.parallel.Task - -/** An array combiner that uses doubling unrolled buffers to store elements. */ -trait UnrolledParArrayCombiner[T] -extends Combiner[T, ParArray[T]] { -//self: EnvironmentPassingCombiner[T, ParArray[T]] => - // because size is doubling, random access is O(logn)! - val buff = new DoublingUnrolledBuffer[Any] - - def +=(elem: T) = { - buff += elem - this - } - - def result = { - val arrayseq = new ArraySeq[T](size) - val array = arrayseq.array.asInstanceOf[Array[Any]] - - combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) - - new ParArray(arrayseq) - } - - def clear() { - buff.clear() - } - - override def sizeHint(sz: Int) = { - buff.lastPtr.next = new Unrolled(0, new Array[Any](sz), null, buff) - buff.lastPtr = buff.lastPtr.next - } - - def combine[N <: T, NewTo >: ParArray[T]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = other match { - case that if that eq this => this // just return this - case that: UnrolledParArrayCombiner[t] => - buff concat that.buff - this - case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.") - } - - def size = buff.size - - /* tasks */ - - class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) - extends Task[Unit, CopyUnrolledToArray] { - var result = () - - def leaf(prev: Option[Unit]) = if (howmany > 0) { - var totalleft = howmany - val (startnode, startpos) = findStart(offset) - var curr = startnode - var pos = startpos - var arroffset = offset - while (totalleft > 0) { - val lefthere = scala.math.min(totalleft, curr.size - pos) - Array.copy(curr.array, pos, array, arroffset, lefthere) - // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) - totalleft -= lefthere - arroffset += lefthere - pos = 0 - curr = curr.next - } - } - private def findStart(pos: Int) = { - var left = pos - var node = buff.headPtr - while ((left - node.size) >= 0) { - left -= node.size - node = node.next - } - (node, left) - } - def split = { - val fp = howmany / 2 - List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) - } - def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) - override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" - } -} - -object UnrolledParArrayCombiner { - def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] -} - diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala deleted file mode 100644 index c594b61caf3e..000000000000 --- a/src/library/scala/collection/parallel/mutable/package.scala +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.parallel - -import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.ArraySeq -import scala.collection.generic.Sizing - -package object mutable { - /* aliases */ - type ParArrayCombiner[T] = ResizableParArrayCombiner[T] - val ParArrayCombiner = ResizableParArrayCombiner -} - -package mutable { - /* classes and traits */ - private[mutable] trait SizeMapUtils { - - protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = { - // find the first bucket - val fbindex = from / sizeMapBucketSize - - // find the last bucket - val lbindex = until / sizeMapBucketSize - // note to self: FYI if you define lbindex as from / sizeMapBucketSize, the first branch - // below always triggers and tests pass, so you spend a great day benchmarking and profiling - - if (fbindex == lbindex) { - // if first and last are the same, just count between `from` and `until` - // return this count - countElems(from, until) - } else { - // otherwise count in first, then count in last - val fbuntil = ((fbindex + 1) * sizeMapBucketSize) min tableLength - val fbcount = countElems(from, fbuntil) - val lbstart = lbindex * sizeMapBucketSize - val lbcount = countElems(lbstart, until) - - // and finally count the elements in all the buckets between first and last using a sizemap - val inbetween = countBucketSizes(fbindex + 1, lbindex) - - // return the sum - fbcount + inbetween + lbcount - } - } - - protected def countElems(from: Int, until: Int): Int - - protected def countBucketSizes(fromBucket: Int, untilBucket: Int): Int - } - - /* hack-arounds */ - private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing { - def internalArray = array - def setInternalSize(s: Int) = size0 = s - override def sizeHint(len: Int) = { - if (len > size && len >= 1) - java.util.Arrays.copyOf(array, len) - } - } - - private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) { - override val array = arr - override val length = sz - override def stringPrefix = "ArraySeq" - } -} diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala deleted file mode 100644 index d276e451fb7d..000000000000 --- a/src/library/scala/collection/parallel/package.scala +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.collection.generic.CanBuildFrom -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.mutable.ParArray -import scala.collection.mutable.UnrolledBuffer -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions - -/** Package object for parallel collections. - */ -package object parallel { - /* constants */ - val MIN_FOR_COPY = 512 - val CHECK_RATE = 512 - val SQRT2 = math.sqrt(2) - val availableProcessors = java.lang.Runtime.getRuntime.availableProcessors - - /* functions */ - - /** Computes threshold from the size of the collection and the parallelism level. - */ - def thresholdFromSize(sz: Int, parallelismLevel: Int) = { - val p = parallelismLevel - if (p > 1) 1 + sz / (8 * p) - else sz - } - - val defaultTaskSupport: TaskSupport = new ExecutionContextTaskSupport - - def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = { - c match { - case pc: ParIterableLike[_, _, _] => pc.tasksupport = t - case _ => // do nothing - } - c - } - - /** Adds toParArray method to collection classes. */ - implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.GenTraversableOnce[T]) { - def toParArray = { - val t = asGto(c) - if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]] - else { - val it = t.toIterator - val cb = mutable.ParArrayCombiner[T]() - while (it.hasNext) cb += it.next - cb.result - } - } - } -} - - -package parallel { - /** Implicit conversions used in the implementation of parallel collections. */ - private[collection] object ParallelCollectionImplicits { - implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] { - def isParallel = bf.isInstanceOf[Parallel] - def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new Otherwise[R] { - def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody - } - } - implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] { - def isParallel = t.isInstanceOf[Parallel] - def isParIterable = t.isInstanceOf[ParIterable[_]] - def asParIterable = t.asInstanceOf[ParIterable[T]] - def isParSeq = t.isInstanceOf[ParSeq[_]] - def asParSeq = t.asInstanceOf[ParSeq[T]] - def ifParSeq[R](isbody: ParSeq[T] => R) = new Otherwise[R] { - def otherwise(notbody: => R) = if (isParallel) isbody(asParSeq) else notbody - } - } - implicit def throwable2ops(self: Throwable) = new ThrowableOps { - def alongWith(that: Throwable) = (self, that) match { - case (self: CompositeThrowable, that: CompositeThrowable) => new CompositeThrowable(self.throwables ++ that.throwables) - case (self: CompositeThrowable, _) => new CompositeThrowable(self.throwables + that) - case (_, that: CompositeThrowable) => new CompositeThrowable(that.throwables + self) - case _ => new CompositeThrowable(Set(self, that)) - } - } - } - - trait FactoryOps[From, Elem, To] { - trait Otherwise[R] { - def otherwise(notbody: => R): R - } - - def isParallel: Boolean - def asParallel: CanCombineFrom[From, Elem, To] - def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R): Otherwise[R] - } - - trait TraversableOps[T] { - trait Otherwise[R] { - def otherwise(notbody: => R): R - } - - def isParallel: Boolean - def isParIterable: Boolean - def asParIterable: ParIterable[T] - def isParSeq: Boolean - def asParSeq: ParSeq[T] - def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R] - } - - @deprecated("this trait will be removed", "2.11.0") - trait ThrowableOps { - @deprecated("this method will be removed", "2.11.0") - def alongWith(that: Throwable): Throwable - } - - /* classes */ - - trait CombinerFactory[U, Repr] { - /** Provides a combiner used to construct a collection. */ - def apply(): Combiner[U, Repr] - /** The call to the `apply` method can create a new combiner each time. - * If it does, this method returns `false`. - * The same combiner factory may be used each time (typically, this is - * the case for concurrent collections, which are thread safe). - * If so, the method returns `true`. - */ - def doesShareCombiners: Boolean - } - - /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ - @deprecated("this class will be removed.", "2.11.0") - final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception( - "Multiple exceptions thrown during a parallel computation: " + - throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n") - ) - - - /** A helper iterator for iterating very small array buffers. - * Automatically forwards the signal delegate when splitting. - */ - private[parallel] class BufferSplitter[T] - (private val buffer: scala.collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: scala.collection.generic.Signalling) - extends IterableSplitter[T] { - signalDelegate = _sigdel - def hasNext = index < until - def next = { - val r = buffer(index) - index += 1 - r - } - def remaining = until - index - def dup = new BufferSplitter(buffer, index, until, signalDelegate) - def split: Seq[IterableSplitter[T]] = if (remaining > 1) { - val divsz = (until - index) / 2 - Seq( - new BufferSplitter(buffer, index, index + divsz, signalDelegate), - new BufferSplitter(buffer, index + divsz, until, signalDelegate) - ) - } else Seq(this) - private[parallel] override def debugInformation = { - buildString { - append => - append("---------------") - append("Buffer iterator") - append("buffer: " + buffer) - append("index: " + index) - append("until: " + until) - append("---------------") - } - } - } - - /** A helper combiner which contains an array of buckets. Buckets themselves - * are unrolled linked lists. Some parallel collections are constructed by - * sorting their result set according to some criteria. - * - * A reference `buckets` to buckets is maintained. Total size of all buckets - * is kept in `sz` and maintained whenever 2 bucket combiners are combined. - * - * Clients decide how to maintain these by implementing `+=` and `result`. - * Populating and using the buckets is up to the client. While populating them, - * the client should update `sz` accordingly. Note that a bucket is by default - * set to `null` to save space - the client should initialize it. - * Note that in general the type of the elements contained in the buckets `Buck` - * doesn't have to correspond to combiner element type `Elem`. - * - * This class simply gives an efficient `combine` for free - it chains - * the buckets together. Since the `combine` contract states that the receiver (`this`) - * becomes invalidated, `combine` reuses the receiver and returns it. - * - * Methods `beforeCombine` and `afterCombine` are called before and after - * combining the buckets, respectively, given that the argument to `combine` - * is not `this` (as required by the `combine` contract). - * They can be overridden in subclasses to provide custom behaviour by modifying - * the receiver (which will be the return value). - */ - private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] - (private val bucketnumber: Int) - extends Combiner[Elem, To] { - //self: EnvironmentPassingCombiner[Elem, To] => - protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber) - protected var sz: Int = 0 - - def size = sz - - def clear() = { - buckets = new Array[UnrolledBuffer[Buck]](bucketnumber) - sz = 0 - } - - def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - - def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - - def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = { - if (this eq other) this - else other match { - case _: BucketCombiner[_, _, _, _] => - beforeCombine(other) - val that = other.asInstanceOf[BucketCombiner[Elem, To, Buck, CombinerType]] - - var i = 0 - while (i < bucketnumber) { - if (buckets(i) eq null) - buckets(i) = that.buckets(i) - else if (that.buckets(i) ne null) - buckets(i) concat that.buckets(i) - - i += 1 - } - sz = sz + that.size - afterCombine(other) - this - case _ => - sys.error("Unexpected combiner type.") - } - } - } -} diff --git a/src/library/scala/collection/readme-if-you-want-to-add-something.txt b/src/library/scala/collection/readme-if-you-want-to-add-something.txt deleted file mode 100644 index 6700cb7b6807..000000000000 --- a/src/library/scala/collection/readme-if-you-want-to-add-something.txt +++ /dev/null @@ -1,50 +0,0 @@ -Conventions for Collection Implementors - -Martin Odersky -19 Mar 2010 - -This note describes some conventions which must be followed to keep -the collection libraries consistent. - -We distinguish in the following between two kinds of methods - - - ``Accessors'' access some of the elements of a collection, but return a result which - is unrelated to the collection. - Example of accessors are: head, foldLeft, indexWhere, toSeq. - - - ``Transformers'' access elements of a collection and produce a new collection of related - type as a result. The relation might either be direct (same type as receiver) - or indirect, linked by a CanBuildFrom implicit. - Example of transformers are: filter, map, groupBy, zip. - -1. Proxies - -Every collection type has a Proxy class that forwards all operations to -an underlying collection. Proxy methods are all implemented in classes -with names ending in `ProxyLike'. If you add a new method to a collection -class you need to add the same method to the corresponding ProxyLike class. - -2. Forwarders - -Classes Traversable, Iterable, and Seq also have forwarders, which -forward all collection-specific accessor operations to an underlying -collection. These are defined as classes with names ending -in `Forwarder' in package collection.generic. If you add a new -accessor method to a Seq or one of its collection superclasses, you -need to add the same method to the corresponding forwarder class. - -3. Views - -Classes Traversable, Iterable, Seq, IndexedSeq, and mutable.IndexedSeq -support views. Their operations are all defined in classes with names -ending in `ViewLike'. If you add a new transformer method to one of -the above collection classes, you need to add the same method to the -corresponding view class. Failure to do so will cause the -corresponding method to fail at runtime with an exception like -UnsupportedOperationException("coll.newBuilder"). If there is no good -way to implement the operation in question lazily, there's a fallback -using the newForced method. See the definition of sorted in trait -SeqViewLike as an example. - - - diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala deleted file mode 100644 index afac787f0d23..000000000000 --- a/src/library/scala/collection/script/Location.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package script - -/** Class `Location` describes locations in messages implemented by - * class [[scala.collection.script.Message]]. - * - * @author Matthias Zenger - * @since 2.8 - */ - -@deprecated("scripting is deprecated", "2.11.0") -sealed abstract class Location - -@deprecated("scripting is deprecated", "2.11.0") -case object Start extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case object End extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case object NoLo extends Location - -@deprecated("scripting is deprecated", "2.11.0") -case class Index(n: Int) extends Location diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala deleted file mode 100644 index 61543d10a10c..000000000000 --- a/src/library/scala/collection/script/Message.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package script - -import mutable.ArrayBuffer - -/** Class `Message` represents messages that are issued by observable - * collection classes whenever a data structure is changed. Class `Message` - * has several subclasses for the various kinds of events: `Update` - * `Remove`, `Include`, `Reset`, and `Script`. - * - * @author Matthias Zenger - * @since 2.8 - */ -@deprecated("scripting is deprecated", "2.11.0") -trait Message[+A] - -/** This observable update refers to inclusion operations that add new elements - * to collection classes. - * - * @author Matthias Zenger - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Include[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This observable update refers to destructive modification operations - * of elements from collection classes. - * - * @author Matthias Zenger - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Update[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This observable update refers to removal operations of elements - * from collection classes. - * - * @author Matthias Zenger - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Remove[+A](location: Location, elem: A) extends Message[A] { - def this(elem: A) = this(NoLo, elem) -} - -/** This command refers to reset operations. - * - * @author Matthias Zenger - */ -@deprecated("scripting is deprecated", "2.11.0") -case class Reset[+A]() extends Message[A] - -/** Objects of this class represent compound messages consisting - * of a sequence of other messages. - * - * @author Matthias Zenger - */ -@deprecated("scripting is deprecated", "2.11.0") -class Script[A] extends ArrayBuffer[Message[A]] with Message[A] { - - override def toString(): String = { - var res = "Script(" - val it = this.iterator - var i = 1 - while (it.hasNext) { - if (i > 1) - res = res + ", " - res = res + "[" + i + "] " + it.next - i += 1 - } - res + ")" - } -} diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala deleted file mode 100644 index 6fdf954342e2..000000000000 --- a/src/library/scala/collection/script/Scriptable.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package script - -/** Classes that mix in the `Scriptable` class allow messages to be sent to - * objects of that class. - * - * @author Matthias Zenger - * @since 2.8 - */ -@deprecated("scripting is deprecated", "2.11.0") -trait Scriptable[A] { - /** Send a message to this scriptable object. - */ - def <<(cmd: Message[A]): Unit -} diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala index 74def3a5255e..b3ecb194a12c 100644 --- a/src/library/scala/compat/Platform.scala +++ b/src/library/scala/compat/Platform.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,7 @@ package scala package compat -import java.lang.System - +@deprecated("Will be removed in the future.", since = "2.13.0") object Platform { /** Thrown when a stack overflow occurs because a method or function recurses too deeply. @@ -22,6 +21,7 @@ object Platform { * On the JVM, this is a type alias for `java.lang.StackOverflowError`, which itself extends `java.lang.Error`. * The same rules apply to catching a `java.lang.Error` as for Java, that it indicates a serious problem that a reasonable application should not try and catch. */ + @deprecated("Use `java.lang.StackOverflowError` instead.", since = "2.13.0") type StackOverflowError = java.lang.StackOverflowError /** This is a type alias for `java.util.ConcurrentModificationException`, @@ -29,6 +29,7 @@ object Platform { * For example, many common collection types do not allow modifying a collection * while it is being iterated over. */ + @deprecated("Use `java.util.ConcurrentModificationException` instead.", since = "2.13.0") type ConcurrentModificationException = java.util.ConcurrentModificationException /** Copies `length` elements of array `src` starting at position `srcPos` to the @@ -51,7 +52,8 @@ object Platform { * after `srcPos` or `destPos` in `src` and `dest` respectively. */ @inline - def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) { + @deprecated("Use `java.lang.System#arraycopy` instead.", since = "2.13.0") + def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { System.arraycopy(src, srcPos, dest, destPos, length) } @@ -79,6 +81,7 @@ object Platform { * @throws java.lang.NegativeArraySizeException if the specified length is negative */ @inline + @deprecated("Use `java.lang.reflect.Array#newInstance` instead.", since = "2.13.0") def createArray(elemClass: Class[_], length: Int): AnyRef = java.lang.reflect.Array.newInstance(elemClass, length) @@ -87,7 +90,8 @@ object Platform { * @throws java.lang.NullPointerException If `arr` is `null`. */ @inline - def arrayclear(arr: Array[Int]) { java.util.Arrays.fill(arr, 0) } + @deprecated("Use `java.util.Arrays#fill` instead.", since = "2.13.0") + def arrayclear(arr: Array[Int]): Unit = { java.util.Arrays.fill(arr, 0) } /** Returns the `Class` object associated with the class or interface with the given string name using the current `ClassLoader`. * On the JVM, invoking this method is equivalent to: `java.lang.Class.forName(name)` @@ -104,6 +108,7 @@ object Platform { * }}} */ @inline + @deprecated("Use `java.lang.Class#forName` instead.", since = "2.13.0") def getClassForName(name: String): Class[_] = java.lang.Class.forName(name) /** The default line separator. @@ -111,7 +116,8 @@ object Platform { * On the JVM, this is equivalent to calling the method: * `java.lang.System.lineSeparator` */ - val EOL = scala.util.Properties.lineSeparator + @deprecated("Use `java.lang.System#lineSeparator` instead.", since = "2.13.0") + val EOL: String = System.lineSeparator /** The current time in milliseconds. The time is counted since 1 January 1970 * UTC. @@ -120,6 +126,7 @@ object Platform { * precise than a millisecond. */ @inline + @deprecated("Use `java.lang.System#currentTimeMillis` instead.", since = "2.13.0") def currentTime: Long = System.currentTimeMillis() /** Runs the garbage collector. @@ -129,9 +136,11 @@ object Platform { * The underlying JVM is free to ignore this request. */ @inline + @deprecated("Use `java.lang.System#gc` instead.", since = "2.13.0") def collectGarbage(): Unit = System.gc() /** The name of the default character set encoding as a string */ @inline + @deprecated("Use `java.nio.charset.Charset.defaultCharset#name` instead.", since = "2.13.0") def defaultCharsetName: String = java.nio.charset.Charset.defaultCharset.name } diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index d201a14570f2..1372e1614ac8 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala index 183df3cf0bbc..ac197c89f8c1 100644 --- a/src/library/scala/concurrent/BatchingExecutor.scala +++ b/src/library/scala/concurrent/BatchingExecutor.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,31 @@ package scala.concurrent import java.util.concurrent.Executor -import scala.annotation.tailrec +import java.util.Objects +import scala.util.control.NonFatal +import scala.annotation.{switch, tailrec} + +/** + * Marker trait to indicate that a Runnable is Batchable by BatchingExecutors + */ +trait Batchable { + self: Runnable => +} + +private[concurrent] object BatchingExecutorStatics { + final val emptyBatchArray: Array[Runnable] = new Array[Runnable](0) + + // Max number of Runnables executed nested before starting to batch (to prevent stack exhaustion) + final val syncPreBatchDepth = 16 + + // Max number of Runnables processed in one go (to prevent starvation of other tasks on the pool) + final val runLimit = 1024 + + object MissingParentBlockContext extends BlockContext { + override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = + try thunk finally throw new IllegalStateException("BUG in BatchingExecutor.Batch: parentBlockContext is null") + } +} /** * Mixin trait for an Executor @@ -25,14 +49,7 @@ import scala.annotation.tailrec * thread which may improve CPU affinity. However, * if tasks passed to the Executor are blocking * or expensive, this optimization can prevent work-stealing - * and make performance worse. Also, some ExecutionContext - * may be fast enough natively that this optimization just - * adds overhead. - * The default ExecutionContext.global is already batching - * or fast enough not to benefit from it; while - * `fromExecutor` and `fromExecutorService` do NOT add - * this optimization since they don't know whether the underlying - * executor will benefit from it. + * and make performance worse. * A batching executor can create deadlocks if code does * not use `scala.concurrent.blocking` when it should, * because tasks created within other tasks will block @@ -40,82 +57,214 @@ import scala.annotation.tailrec * This executor may run tasks in any order, including LIFO order. * There are no ordering guarantees. * - * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable - * in the calling thread synchronously. It must enqueue/handoff the Runnable. + * WARNING: Only use *EITHER* `submitAsyncBatched` OR `submitSyncBatched`!! + * + * When you implement this trait for async executors like thread pools, + * you're going to need to implement it something like the following: + * + * {{{ + * final override def submitAsync(runnable: Runnable): Unit = + * super[SuperClass].execute(runnable) // To prevent reentrancy into `execute` + * + * final override def execute(runnable: Runnable): Unit = + * if (runnable.isInstanceOf[Batchable]) // Or other logic + * submitAsyncBatched(runnable) + * else + * submitAsync(runnable) + * + * final override def reportFailure(cause: Throwable): Unit = … + * }}} + * + * And if you want to implement if for a sync, trampolining, executor you're + * going to implement it something like this: + * + * {{{ + * final override def submitAsync(runnable: Runnable): Unit = () + * + * final override def execute(runnable: Runnable): Unit = + * submitSyncBatched(runnable) // You typically will want to batch everything + * + * final override def reportFailure(cause: Throwable): Unit = + * ExecutionContext.defaultReporter(cause) // Or choose something more fitting + * }}} + * */ private[concurrent] trait BatchingExecutor extends Executor { + private[this] final val _tasksLocal = new ThreadLocal[AnyRef]() + + /* + * Batch implements a LIFO queue (stack) and is used as a trampolining Runnable. + * In order to conserve allocations, the first element in the batch is stored "unboxed" in + * the `first` field. Subsequent Runnables are stored in the array called `other`. + */ + private[this] sealed abstract class AbstractBatch protected (protected final var first: Runnable, protected final var other: Array[Runnable], protected final var size: Int) { + + private[this] final def ensureCapacity(curSize: Int): Array[Runnable] = { + val curOther = this.other + val curLen = curOther.length + if (curSize <= curLen) curOther + else { + val newLen = if (curLen == 0) 4 else curLen << 1 + + if (newLen <= curLen) throw new StackOverflowError("Space limit of asynchronous stack reached: " + curLen) + val newOther = new Array[Runnable](newLen) + System.arraycopy(curOther, 0, newOther, 0, curLen) + this.other = newOther + newOther + } + } + + final def push(r: Runnable): Unit = { + val sz = this.size + if(sz == 0) + this.first = r + else + ensureCapacity(sz)(sz - 1) = r + this.size = sz + 1 + } - // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside - private val _tasksLocal = new ThreadLocal[List[Runnable]]() - - private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext { - private var parentBlockContext: BlockContext = _ - // this method runs in the delegate ExecutionContext's thread - override def run(): Unit = { - require(_tasksLocal.get eq null) - - val prevBlockContext = BlockContext.current - BlockContext.withBlockContext(this) { - try { - parentBlockContext = prevBlockContext - - @tailrec def processBatch(batch: List[Runnable]): Unit = batch match { - case null | Nil => () - case head :: tail => - _tasksLocal set tail - try { - head.run() - } catch { - case t: Throwable => - // if one task throws, move the - // remaining tasks to another thread - // so we can throw the exception - // up to the invoking executor - val remaining = _tasksLocal.get - _tasksLocal set Nil - unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails? - throw t // rethrow - } - processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here + @tailrec protected final def runN(n: Int): Unit = + if (n > 0) + (this.size: @switch) match { + case 0 => + case 1 => + val next = this.first + this.first = null + this.size = 0 + next.run() + runN(n - 1) + case sz => + val o = this.other + val next = o(sz - 2) + o(sz - 2) = null + this.size = sz - 1 + next.run() + runN(n - 1) } + } + + private[this] final class AsyncBatch private(_first: Runnable, _other: Array[Runnable], _size: Int) extends AbstractBatch(_first, _other, _size) with Runnable with BlockContext with (BlockContext => Throwable) { + private[this] final var parentBlockContext: BlockContext = BatchingExecutorStatics.MissingParentBlockContext + + final def this(runnable: Runnable) = this(runnable, BatchingExecutorStatics.emptyBatchArray, 1) + + override final def run(): Unit = { + _tasksLocal.set(this) // This is later cleared in `apply` or `runWithoutResubmit` + + val f = resubmit(BlockContext.usingBlockContext(this)(this)) + + if (f != null) + throw f + } + + /* LOGIC FOR ASYNCHRONOUS BATCHES */ + override final def apply(prevBlockContext: BlockContext): Throwable = try { + parentBlockContext = prevBlockContext + runN(BatchingExecutorStatics.runLimit) + null + } catch { + case t: Throwable => t // We are handling exceptions on the outside of this method + } finally { + parentBlockContext = BatchingExecutorStatics.MissingParentBlockContext + _tasksLocal.remove() + } - processBatch(initial) - } finally { - _tasksLocal.remove() - parentBlockContext = null + /* Attempts to resubmit this Batch to the underlying ExecutionContext, + * this only happens for Batches where `resubmitOnBlock` is `true`. + * Only attempt to resubmit when there are `Runnables` left to process. + * Note that `cause` can be `null`. + */ + private[this] final def resubmit(cause: Throwable): Throwable = + if (this.size > 0) { + try { submitForExecution(this); cause } catch { + case inner: Throwable => + if (NonFatal(inner)) { + val e = new ExecutionException("Non-fatal error occurred and resubmission failed, see suppressed exception.", cause) + e.addSuppressed(inner) + e + } else inner } - } + } else cause // TODO: consider if NonFatals should simply be `reportFailure`:ed rather than rethrown + + private[this] final def cloneAndClear(): AsyncBatch = { + val newBatch = new AsyncBatch(this.first, this.other, this.size) + this.first = null + this.other = BatchingExecutorStatics.emptyBatchArray + this.size = 0 + newBatch + } + + override final def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = { + // If we know there will be blocking, we don't want to keep tasks queued up because it could deadlock. + if(this.size > 0) + submitForExecution(cloneAndClear()) // If this throws then we have bigger problems + + parentBlockContext.blockOn(thunk) // Now delegate the blocking to the previous BC } + } - override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = { - // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock. - { - val tasks = _tasksLocal.get - _tasksLocal set null - if ((tasks ne null) && tasks.nonEmpty) - unbatchedExecute(new Batch(tasks)) + private[this] final class SyncBatch(runnable: Runnable) extends AbstractBatch(runnable, BatchingExecutorStatics.emptyBatchArray, 1) with Runnable { + @tailrec override final def run(): Unit = { + try runN(BatchingExecutorStatics.runLimit) catch { + case ie: InterruptedException => + reportFailure(ie) // TODO: Handle InterruptedException differently? + case f if NonFatal(f) => + reportFailure(f) } - // now delegate the blocking to the previous BC - require(parentBlockContext ne null) - parentBlockContext.blockOn(thunk) + if (this.size > 0) + run() } } - protected def unbatchedExecute(r: Runnable): Unit + /** MUST throw a NullPointerException when `runnable` is null + * When implementing a sync BatchingExecutor, it is RECOMMENDED + * to implement this method as `runnable.run()` + */ + protected def submitForExecution(runnable: Runnable): Unit - override def execute(runnable: Runnable): Unit = { - if (batchable(runnable)) { // If we can batch the runnable - _tasksLocal.get match { - case null => unbatchedExecute(new Batch(runnable :: Nil)) // If we aren't in batching mode yet, enqueue batch - case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch - } - } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying + /** Reports that an asynchronous computation failed. + * See `ExecutionContext.reportFailure(throwable: Throwable)` + */ + protected def reportFailure(throwable: Throwable): Unit + + /** + * WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same + * implementation of `BatchingExecutor` + */ + protected final def submitAsyncBatched(runnable: Runnable): Unit = { + val b = _tasksLocal.get + if (b.isInstanceOf[AsyncBatch]) b.asInstanceOf[AsyncBatch].push(runnable) + else submitForExecution(new AsyncBatch(runnable)) } - /** Override this to define which runnables will be batched. */ - def batchable(runnable: Runnable): Boolean = runnable match { - case _: OnCompleteRunnable => true - case _ => false + /** + * WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same + * implementation of `BatchingExecutor` + */ + protected final def submitSyncBatched(runnable: Runnable): Unit = { + Objects.requireNonNull(runnable, "runnable is null") + val tl = _tasksLocal + val b = tl.get + if (b.isInstanceOf[SyncBatch]) b.asInstanceOf[SyncBatch].push(runnable) + else { + val i = if (b ne null) b.asInstanceOf[java.lang.Integer].intValue else 0 + if (i < BatchingExecutorStatics.syncPreBatchDepth) { + tl.set(java.lang.Integer.valueOf(i + 1)) + try submitForExecution(runnable) // User code so needs to be try-finally guarded here + catch { + case ie: InterruptedException => + reportFailure(ie) // TODO: Handle InterruptedException differently? + case f if NonFatal(f) => + reportFailure(f) + } + finally tl.set(b) + } else { + val batch = new SyncBatch(runnable) + tl.set(batch) + submitForExecution(batch) + tl.set(b) // Batch only throws fatals so no need for try-finally here + } + } } } diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala index 6e87f9a775c4..37483c307fd0 100644 --- a/src/library/scala/concurrent/BlockContext.scala +++ b/src/library/scala/concurrent/BlockContext.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,12 +13,12 @@ package scala.concurrent /** - * A context to be notified by `scala.concurrent.blocking` when + * A context to be notified by [[scala.concurrent.blocking]] when * a thread is about to block. In effect this trait provides - * the implementation for `scala.concurrent.Await`. - * `scala.concurrent.Await.result()` and `scala.concurrent.Await.ready()` + * the implementation for [[scala.concurrent.Await]]. + * [[scala.concurrent.Await.result]] and [[scala.concurrent.Await.ready]] * locates an instance of `BlockContext` by first looking for one - * provided through `BlockContext.withBlockContext()` and failing that, + * provided through [[BlockContext.withBlockContext]] and failing that, * checking whether `Thread.currentThread` is an instance of `BlockContext`. * So a thread pool can have its `java.lang.Thread` instances implement * `BlockContext`. There's a default `BlockContext` used if the thread @@ -29,7 +29,7 @@ package scala.concurrent * {{{ * val oldContext = BlockContext.current * val myContext = new BlockContext { - * override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { + * override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = { * // you'd have code here doing whatever you need to do * // when the thread is about to block. * // Then you'd chain to the previous context: @@ -45,46 +45,66 @@ package scala.concurrent trait BlockContext { /** Used internally by the framework; - * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`. - * - * Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead. - */ - def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T + * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`. + * + * Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead. + * + * In implementations of this method it is RECOMMENDED to first check if `permission` is `null` and + * if it is, throw an `IllegalArgumentException`. + * + * @throws IllegalArgumentException if the `permission` is `null` + */ + def blockOn[T](thunk: => T)(implicit permission: CanAwait): T } object BlockContext { - private object DefaultBlockContext extends BlockContext { - override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk + private[this] object DefaultBlockContext extends BlockContext { + override final def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = thunk } /** - * @return the `BlockContext` that will be used if no other is found. - **/ - def defaultBlockContext: BlockContext = DefaultBlockContext + * The default block context will execute the supplied thunk immediately. + * @return the `BlockContext` that will be used if no other is found. + **/ + final def defaultBlockContext: BlockContext = DefaultBlockContext + + private[this] final val contextLocal = new ThreadLocal[BlockContext]() - private val contextLocal = new ThreadLocal[BlockContext]() + private[this] final def prefer(candidate: BlockContext): BlockContext = + if (candidate ne null) candidate + else { + val t = Thread.currentThread + if (t.isInstanceOf[BlockContext]) t.asInstanceOf[BlockContext] + else DefaultBlockContext + } + + /** + * @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point + **/ + final def current: BlockContext = prefer(contextLocal.get) /** - @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point + * Installs a current `BlockContext` around executing `body`. **/ - def current: BlockContext = contextLocal.get match { - case null => Thread.currentThread match { - case ctx: BlockContext => ctx - case _ => DefaultBlockContext + final def withBlockContext[T](blockContext: BlockContext)(body: => T): T = { + val old = contextLocal.get // can be null + if (old eq blockContext) body + else { + contextLocal.set(blockContext) + try body finally contextLocal.set(old) } - case some => some } /** - * Installs a current `BlockContext` around executing `body`. + * Installs the BlockContext `blockContext` around the invocation to `f` and passes in the previously installed BlockContext to `f`. + * @return the value produced by applying `f` **/ - def withBlockContext[T](blockContext: BlockContext)(body: => T): T = { + final def usingBlockContext[I, T](blockContext: BlockContext)(f: BlockContext => T): T = { val old = contextLocal.get // can be null - try { + if (old eq blockContext) f(prefer(old)) + else { contextLocal.set(blockContext) - body - } finally { - contextLocal.set(old) + try f(prefer(old)) finally contextLocal.set(old) } } } diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala index fddb77cc0dfc..a9ada60e3da0 100644 --- a/src/library/scala/concurrent/Channel.scala +++ b/src/library/scala/concurrent/Channel.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,25 +16,25 @@ package scala.concurrent * which are read by one or more reader threads. * * @tparam A type of data exchanged - * @author Martin Odersky */ +@deprecated("Use `java.util.concurrent.LinkedTransferQueue` instead.", since = "2.13.0") class Channel[A] { - class LinkedList[A] { + private class LinkedList { var elem: A = _ - var next: LinkedList[A] = null + var next: LinkedList = _ } - private var written = new LinkedList[A] // FIFO queue, realized through - private var lastWritten = written // aliasing of a linked list - private var nreaders = 0 + private[this] var written = new LinkedList // FIFO queue, realized through + private[this] var lastWritten = written // aliasing of a linked list + private[this] var nreaders = 0 /** Append a value to the FIFO queue to be read by `read`. * This operation is nonblocking and can be executed by any thread. * * @param x object to enqueue to this channel */ - def write(x: A) = synchronized { + def write(x: A): Unit = synchronized { lastWritten.elem = x - lastWritten.next = new LinkedList[A] + lastWritten.next = new LinkedList lastWritten = lastWritten.next if (nreaders > 0) notify() } diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala index 0ac51a1cf8db..1a450c3c0458 100644 --- a/src/library/scala/concurrent/DelayedLazyVal.scala +++ b/src/library/scala/concurrent/DelayedLazyVal.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,10 +23,8 @@ package scala.concurrent * * @param f the function to obtain the current value at any point in time * @param body the computation to run to completion in another thread - * - * @author Paul Phillips - * @since 2.8 */ +@deprecated("`DelayedLazyVal` Will be removed in the future.", since = "2.13.0") class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext){ @volatile private[this] var _isDone = false private[this] lazy val complete = f() @@ -35,7 +33,7 @@ class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionConte * * @return true if the computation is complete. */ - def isDone = _isDone + def isDone: Boolean = _isDone /** The current result of f(), or the final result if complete. * @@ -43,5 +41,7 @@ class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionConte */ def apply(): T = if (isDone) complete else f() - exec.execute(new Runnable { def run = { body; _isDone = true } }) + exec.execute(() => { + body; _isDone = true + }) } diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 7590226e9dcf..b132e2dee5b7 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -32,8 +32,8 @@ import scala.annotation.implicitNotFound * While it is possible to simply import * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an * implicit `ExecutionContext`, application developers should carefully - * consider where they want to set execution policy; - * ideally, one place per application—or per logically related section of code— + * consider where they want to define the execution policy; + * ideally, one place per application — or per logically related section of code — * will make a decision about which `ExecutionContext` to use. * That is, you will mostly want to avoid hardcoding, especially via an import, * `scala.concurrent.ExecutionContext.Implicits.global`. @@ -57,12 +57,12 @@ import scala.annotation.implicitNotFound * knowing that only that library's network operations will be affected. * Application callback execution can be configured separately. */ -@implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass +@implicitNotFound("""Cannot find an implicit ExecutionContext. You might add an (implicit ec: ExecutionContext) parameter to your method. The ExecutionContext is used to configure how and on which -thread pools Futures will run, so the specific ExecutionContext -that is selected is important. +thread pools asynchronous tasks (such as Futures) will run, +so the specific ExecutionContext that is selected is important. If your application does not define an ExecutionContext elsewhere, consider using Scala's global ExecutionContext by defining @@ -81,7 +81,7 @@ trait ExecutionContext { * * @param cause the cause of the failure */ - def reportFailure(@deprecatedName('t) cause: Throwable): Unit + def reportFailure(@deprecatedName("t") cause: Throwable): Unit /** Prepares for the execution of a task. Returns the prepared * execution context. The recommended implementation of @@ -100,18 +100,19 @@ trait ExecutionContext { * preparation later. */ @deprecated("preparation of ExecutionContexts will be removed", "2.12.0") + // This cannot be removed until there is a suitable replacement def prepare(): ExecutionContext = this } /** * An [[ExecutionContext]] that is also a - * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. + * Java [[java.util.concurrent.Executor Executor]]. */ trait ExecutionContextExecutor extends ExecutionContext with Executor /** * An [[ExecutionContext]] that is also a - * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. + * Java [[java.util.concurrent.ExecutorService ExecutorService]]. */ trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService @@ -120,36 +121,130 @@ trait ExecutionContextExecutorService extends ExecutionContextExecutor with Exec */ object ExecutionContext { /** - * The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global - * `ExecutionContext` explicitly. + * The global [[ExecutionContext]]. This default `ExecutionContext` implementation is backed by a work-stealing thread + * pool. It can be configured via the following system properties: * - * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. - * It can be configured via the following [[scala.sys.SystemProperties]]: - * - * `scala.concurrent.context.minThreads` = defaults to "1" - * `scala.concurrent.context.numThreads` = defaults to "x1" (i.e. the current number of available processors * 1) - * `scala.concurrent.context.maxThreads` = defaults to "x1" (i.e. the current number of available processors * 1) - * `scala.concurrent.context.maxExtraThreads` = defaults to "256" + * - `scala.concurrent.context.minThreads` = defaults to "1" + * - `scala.concurrent.context.numThreads` = defaults to "x1" (i.e. the current number of available processors * 1) + * - `scala.concurrent.context.maxThreads` = defaults to "x1" (i.e. the current number of available processors * 1) + * - `scala.concurrent.context.maxExtraThreads` = defaults to "256" * * The pool size of threads is then `numThreads` bounded by `minThreads` on the lower end and `maxThreads` on the high end. * * The `maxExtraThreads` is the maximum number of extra threads to have at any given time to evade deadlock, - * see [[scala.concurrent.BlockContext]]. + * see [[scala.concurrent.blocking]]. + * + * The `global` execution context can be used explicitly, by defining an + * `implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global`, or by importing + * [[ExecutionContext.Implicits.global]]. + * + * == Batching short-lived nested tasks == + * + * Asynchronous code with short-lived nested tasks is executed more efficiently when using + * `ExecutionContext.opportunistic` (continue reading to learn why it is `private[scala]` and how to access it). + * + * `ExecutionContext.opportunistic` uses the same thread pool as `ExecutionContext.global`. It attempts to batch + * nested task and execute them on the same thread as the enclosing task. This is ideally suited to execute + * short-lived tasks as it reduces the overhead of context switching. + * + * WARNING: long-running and/or blocking tasks should be demarcated within [[scala.concurrent.blocking]]-blocks + * to ensure that any pending tasks in the current batch can be executed by another thread on `global`. + * + * === How to use === + * + * This field is `private[scala]` to maintain binary compatibility. It was added in 2.13.4, code that references it + * directly fails to run with a 2.13.0-3 Scala library. * - * @return the global `ExecutionContext` + * Libraries should not reference this field directly because users of the library might be using an earlier Scala + * version. In order to use the batching `ExecutionContext` in a library, the code needs to fall back to `global` + * in case the `opportunistic` field is missing (example below). The resulting `ExecutionContext` has batching + * behavior in all Scala 2.13 versions (`global` is batching in 2.13.0-3). + * + * {{{ + * implicit val ec: scala.concurrent.ExecutionContext = try { + * scala.concurrent.ExecutionContext.getClass + * .getDeclaredMethod("opportunistic") + * .invoke(scala.concurrent.ExecutionContext) + * .asInstanceOf[scala.concurrent.ExecutionContext] + * } catch { + * case _: NoSuchMethodException => + * scala.concurrent.ExecutionContext.global + * } + * }}} + * + * Application authors can safely use the field because the Scala version at run time is the same as at compile time. + * Options to bypass the access restriction include: + * + * 1. Using a structural type (example below). This uses reflection at run time. + * 1. Writing a Scala `object` in the `scala` package (example below). + * 1. Writing a Java source file. This works because `private[scala]` is emitted as `public` in Java bytecode. + * + * {{{ + * // Option 1 + * implicit val ec: scala.concurrent.ExecutionContext = + * (scala.concurrent.ExecutionContext: + * {def opportunistic: scala.concurrent.ExecutionContextExecutor} + * ).opportunistic + * + * // Option 2 + * package scala { + * object OpportunisticEC { + * implicit val ec: scala.concurrent.ExecutionContext = + * scala.concurrent.ExecutionContext.opportunistic + * } + * } + * }}} + * + * @return the global [[ExecutionContext]] */ - def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor] + final lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor) + + /** + * WARNING: Only ever execute logic which will quickly return control to the caller. + * + * This `ExecutionContext` steals execution time from other threads by having its + * `Runnable`s run on the `Thread` which calls `execute` and then yielding back control + * to the caller after *all* its `Runnable`s have been executed. + * Nested invocations of `execute` will be trampolined to prevent uncontrolled stack space growth. + * + * When using `parasitic` with abstractions such as `Future` it will in many cases be non-deterministic + * as to which `Thread` will be executing the logic, as it depends on when/if that `Future` is completed. + * + * Do *not* call any blocking code in the `Runnable`s submitted to this `ExecutionContext` + * as it will prevent progress by other enqueued `Runnable`s and the calling `Thread`. + * + * Symptoms of misuse of this `ExecutionContext` include, but are not limited to, deadlocks + * and severe performance problems. + * + * Any `NonFatal` or `InterruptedException`s will be reported to the `defaultReporter`. + */ + object parasitic extends ExecutionContextExecutor with BatchingExecutor { + override final def submitForExecution(runnable: Runnable): Unit = runnable.run() + override final def execute(runnable: Runnable): Unit = submitSyncBatched(runnable) + override final def reportFailure(t: Throwable): Unit = defaultReporter(t) + } + + /** + * See [[ExecutionContext.global]]. + */ + private[scala] lazy val opportunistic: ExecutionContextExecutor = new ExecutionContextExecutor with BatchingExecutor { + final override def submitForExecution(runnable: Runnable): Unit = global.execute(runnable) + + final override def execute(runnable: Runnable): Unit = + if ((!runnable.isInstanceOf[impl.Promise.Transformation[_,_]] || runnable.asInstanceOf[impl.Promise.Transformation[_,_]].benefitsFromBatching) && runnable.isInstanceOf[Batchable]) + submitAsyncBatched(runnable) + else + submitForExecution(runnable) + + override final def reportFailure(t: Throwable): Unit = global.reportFailure(t) + } object Implicits { /** - * The implicit global `ExecutionContext`. Import `global` when you want to provide the global - * `ExecutionContext` implicitly. - * - * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default, - * the thread pool uses a target number of worker threads equal to the number of - * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]]. + * An accessor that can be used to import the global `ExecutionContext` into the implicit scope, + * see [[ExecutionContext.global]]. */ - implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor) + implicit final def global: ExecutionContext = ExecutionContext.global } /** Creates an `ExecutionContext` from the given `ExecutorService`. @@ -192,11 +287,9 @@ object ExecutionContext { */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - /** The default reporter simply prints the stack trace of the `Throwable` to [[http://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. + /** The default reporter simply prints the stack trace of the `Throwable` to [[java.lang.System#err System.err]]. * * @return the function for error reporting */ - def defaultReporter: Throwable => Unit = _.printStackTrace() + final val defaultReporter: Throwable => Unit = _.printStackTrace() } - - diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 4f12a8379419..4142d8400200 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,22 +12,23 @@ package scala.concurrent -import scala.language.higherKinds -import java.util.concurrent.{CountDownLatch, TimeUnit} -import java.util.concurrent.atomic.{AtomicInteger, AtomicReference} - -import scala.util.control.NonFatal +import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.locks.LockSupport +import scala.util.control.{NoStackTrace, NonFatal} import scala.util.{Failure, Success, Try} import scala.concurrent.duration._ -import scala.collection.generic.CanBuildFrom +import scala.collection.BuildFrom +import scala.collection.mutable.{ArrayBuffer, Builder} import scala.reflect.ClassTag +import scala.concurrent.ExecutionContext.parasitic +import scala.concurrent.impl.Promise.DefaultPromise - -/** A `Future` represents a value which may or may not *currently* be available, +/** A `Future` represents a value which may or may not be currently available, * but will be available at some point, or an exception if that value could not be made available. * - * Asynchronous computations that yield futures are created with the `Future.apply` call and are computed using a supplied `ExecutionContext`, - * which can be backed by a Thread pool. + * Asynchronous computations are created by calling `Future.apply`, which yields instances of `Future`. + * Computations are executed using an `ExecutionContext`, which is usually supplied implicitly, + * and which is commonly backed by a thread pool. * * {{{ * import ExecutionContext.Implicits.global @@ -40,9 +41,16 @@ import scala.reflect.ClassTag * } * }}} * - * @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang + * Note that the `global` context is convenient but restricted: + * "fatal" exceptions are reported only by printing a stack trace, + * and the underlying thread pool may be shared by a mix of jobs. + * For any nontrivial application, see the caveats explained at [[ExecutionContext]] + * and also the overview linked below, which explains + * [[https://docs.scala-lang.org/overviews/core/futures.html#exceptions exception handling]] + * in depth. + * * - * @see [[http://docs.scala-lang.org/overviews/core/futures.html Futures and Promises]] + * @see [[https://docs.scala-lang.org/overviews/core/futures.html Futures and Promises]] * * @define multipleCallbacks * Multiple callbacks may be registered; there is no guarantee that they will be @@ -52,12 +60,11 @@ import scala.reflect.ClassTag * This future may contain a throwable object and this means that the future failed. * Futures obtained through combinators have the same exception as the future they were obtained from. * The following throwable objects are not contained in the future: - * - `Error` - errors are not contained within futures + * - `Error` - fatal errors are not contained within futures * - `InterruptedException` - not contained within futures * - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures * - * Instead, the future is completed with a ExecutionException with one of the exceptions above - * as the cause. + * Instead, the future is completed with an ExecutionException that has one of the exceptions above as its cause. * If a future is failed with a `scala.runtime.NonLocalReturnControl`, * it is completed with a value from that throwable instead. * @@ -97,57 +104,9 @@ import scala.reflect.ClassTag * Completion of the Future must *happen-before* the invocation of the callback. */ trait Future[+T] extends Awaitable[T] { - import Future.{ InternalCallbackExecutor => internalExecutor } /* Callbacks */ - /** When this future is completed successfully (i.e., with a value), - * apply the provided partial function to the value if the partial function - * is defined at that value. - * - * If the future has already been completed with a value, - * this will either be applied immediately or be scheduled asynchronously. - * - * Note that the returned value of `pf` will be discarded. - * - * $swallowsExceptions - * $multipleCallbacks - * $callbackInContext - * - * @group Callbacks - */ - @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12.0") - def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete { - case Success(v) => - pf.applyOrElse[T, Any](v, Predef.identity[T]) // Exploiting the cached function to avoid MatchError - case _ => - } - - /** When this future is completed with a failure (i.e., with a throwable), - * apply the provided callback to the throwable. - * - * $caughtThrowables - * - * If the future has already been completed with a failure, - * this will either be applied immediately or be scheduled asynchronously. - * - * Will not be called in case that the future is completed with a value. - * - * Note that the returned value of `pf` will be discarded. - * - * $swallowsExceptions - * $multipleCallbacks - * $callbackInContext - * - * @group Callbacks - */ - @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12.0") - def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete { - case Failure(t) => - pf.applyOrElse[Throwable, Any](t, Predef.identity[Throwable]) // Exploiting the cached function to avoid MatchError - case _ => - } - /** When this future is completed, either through an exception, or a value, * apply the provided function. * @@ -164,8 +123,7 @@ trait Future[+T] extends Awaitable[T] { * @param f the function to be executed when this `Future` completes * @group Callbacks */ - def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit - + def onComplete[U](f: Try[T] => U)(implicit executor: ExecutionContext): Unit /* Miscellaneous */ @@ -206,11 +164,7 @@ trait Future[+T] extends Awaitable[T] { * @return a failed projection of this `Future`. * @group Transformations */ - def failed: Future[Throwable] = - transform({ - case Failure(t) => Success(t) - case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable.")) - })(internalExecutor) + def failed: Future[Throwable] = transform(Future.failedFun)(parasitic) /* Monadic operations */ @@ -241,8 +195,9 @@ trait Future[+T] extends Awaitable[T] { */ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = transform { - case Success(r) => Try(s(r)) - case Failure(t) => Try(throw f(t)) // will throw fatal errors! + t => + if (t.isInstanceOf[Success[T]]) t map s + else throw f(t.asInstanceOf[Failure[T]].exception) // will throw fatal errors! } /** Creates a new Future by applying the specified function to the result @@ -304,8 +259,9 @@ trait Future[+T] extends Awaitable[T] { * @group Transformations */ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith { - case Success(s) => f(s) - case Failure(_) => this.asInstanceOf[Future[S]] + t => + if(t.isInstanceOf[Success[T]]) f(t.asInstanceOf[Success[T]].value) + else this.asInstanceOf[Future[S]] // Safe cast } /** Creates a new future with one level of nesting flattened, this method is equivalent @@ -314,7 +270,7 @@ trait Future[+T] extends Awaitable[T] { * @tparam S the type of the returned `Future` * @group Transformations */ - def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor) + def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(parasitic) /** Creates a new future by filtering the value of the current future with a predicate. * @@ -336,8 +292,14 @@ trait Future[+T] extends Awaitable[T] { * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException` * @group Transformations */ - def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = - map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") } + def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = + transform { + t => + if (t.isInstanceOf[Success[T]]) { + if (p(t.asInstanceOf[Success[T]].value)) t + else Future.filterFailure + } else t + } /** Used by for-comprehensions. * @group Transformations @@ -370,8 +332,11 @@ trait Future[+T] extends Awaitable[T] { * @group Transformations */ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = - map { - r => pf.applyOrElse(r, (t: T) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t)) + transform { + t => + if (t.isInstanceOf[Success[T]]) + Success(pf.applyOrElse(t.asInstanceOf[Success[T]].value, Future.collectFailed)) + else t.asInstanceOf[Failure[S]] } /** Creates a new future that will handle any matching throwable that this @@ -414,37 +379,39 @@ trait Future[+T] extends Awaitable[T] { */ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = transformWith { - case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this) - case Success(_) => this + t => + if (t.isInstanceOf[Failure[T]]) { + val result = pf.applyOrElse(t.asInstanceOf[Failure[T]].exception, Future.recoverWithFailed) + if (result ne Future.recoverWithFailedMarker) result + else this + } else this } /** Zips the values of `this` and `that` future, and creates * a new future holding the tuple of their results. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @param that the other `Future` * @return a `Future` with the results of both futures or the failure of the first of them that failed * @group Transformations */ - def zip[U](that: Future[U]): Future[(T, U)] = { - implicit val ec = internalExecutor - flatMap { r1 => that.map(r2 => (r1, r2)) } - } + def zip[U](that: Future[U]): Future[(T, U)] = + zipWith(that)(Future.zipWithTuple2Fun)(parasitic) /** Zips the values of `this` and `that` future using a function `f`, * and creates a new future holding the result. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. - * If the application of `f` throws a throwable, the resulting future - * is failed with that throwable if it is non-fatal. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @tparam R the type of the resulting `Future` @@ -453,8 +420,14 @@ trait Future[+T] extends Awaitable[T] { * @return a `Future` with the result of the application of `f` to the results of `this` and `that` * @group Transformations */ - def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = - flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor) + def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + // This is typically overriden by the implementation in DefaultPromise, which provides + // symmetric fail-fast behavior regardless of which future fails first. + // + // TODO: remove this implementation and make Future#zipWith abstract + // when we're next willing to make a binary incompatible change + flatMap(r1 => that.map(r2 => f(r1, r2)))(if (executor.isInstanceOf[BatchingExecutor]) executor else parasitic) + } /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, * the result of the `that` future if `that` is completed successfully. @@ -464,7 +437,7 @@ trait Future[+T] extends Awaitable[T] { * * Example: * {{{ - * val f = Future { sys.error("failed") } + * val f = Future { throw new RuntimeException("failed") } * val g = Future { 5 } * val h = f fallbackTo g * h foreach println // Eventually prints 5 @@ -478,8 +451,12 @@ trait Future[+T] extends Awaitable[T] { def fallbackTo[U >: T](that: Future[U]): Future[U] = if (this eq that) this else { - implicit val ec = internalExecutor - recoverWith { case _ => that } recoverWith { case _ => this } + implicit val ec = parasitic + transformWith { + t => + if (t.isInstanceOf[Success[T]]) this + else that transform { tt => if (tt.isInstanceOf[Success[U]]) tt else t } + } } /** Creates a new `Future[S]` which is completed with this `Future`'s result if @@ -491,7 +468,7 @@ trait Future[+T] extends Awaitable[T] { * @group Transformations */ def mapTo[S](implicit tag: ClassTag[S]): Future[S] = { - implicit val ec = internalExecutor + implicit val ec = parasitic val boxedClass = { val c = tag.runtimeClass if (c.isPrimitive) Future.toBoxed(c) else c @@ -516,7 +493,7 @@ trait Future[+T] extends Awaitable[T] { * {{{ * val f = Future { 5 } * f andThen { - * case r => sys.error("runtime exception") + * case r => throw new RuntimeException("runtime exception") * } andThen { * case Failure(t) => println(t) * case Success(v) => println(v) @@ -533,9 +510,9 @@ trait Future[+T] extends Awaitable[T] { def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = transform { result => - try pf.applyOrElse[Try[T], Any](result, Predef.identity[Try[T]]) - catch { case NonFatal(t) => executor reportFailure t } - + try pf.applyOrElse[Try[T], Any](result, Future.id[Try[T]]) + catch { case t if NonFatal(t) => executor.reportFailure(t) } + // TODO: use `finally`? result } } @@ -549,7 +526,11 @@ trait Future[+T] extends Awaitable[T] { */ object Future { - private[concurrent] val toBoxed = Map[Class[_], Class[_]]( + /** + * Utilities, hoisted functions, etc. + */ + + private[concurrent] final val toBoxed = Map[Class[_], Class[_]]( classOf[Boolean] -> classOf[java.lang.Boolean], classOf[Byte] -> classOf[java.lang.Byte], classOf[Char] -> classOf[java.lang.Character], @@ -561,59 +542,103 @@ object Future { classOf[Unit] -> classOf[scala.runtime.BoxedUnit] ) + private[this] final val _cachedId: AnyRef => AnyRef = Predef.identity _ + + private[concurrent] final def id[T]: T => T = _cachedId.asInstanceOf[T => T] + + private[concurrent] final val collectFailed = + (t: Any) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t) with NoStackTrace + + private[concurrent] final val filterFailure = + Failure[Nothing](new NoSuchElementException("Future.filter predicate is not satisfied") with NoStackTrace) + + private[this] final val failedFailure = + Failure[Nothing](new NoSuchElementException("Future.failed not completed with a throwable.") with NoStackTrace) + + private[concurrent] final val failedFailureFuture: Future[Nothing] = + scala.concurrent.Future.fromTry(failedFailure) + + private[this] final val _failedFun: Try[Any] => Try[Throwable] = + v => if (v.isInstanceOf[Failure[Any]]) Success(v.asInstanceOf[Failure[Any]].exception) else failedFailure + + private[concurrent] final def failedFun[T]: Try[T] => Try[Throwable] = _failedFun.asInstanceOf[Try[T] => Try[Throwable]] + + private[concurrent] final val recoverWithFailedMarker: Future[Nothing] = + scala.concurrent.Future.failed(new Throwable with NoStackTrace) + + private[concurrent] final val recoverWithFailed = (t: Throwable) => recoverWithFailedMarker + + private[this] final val _zipWithTuple2: (Any, Any) => (Any, Any) = Tuple2.apply _ + private[concurrent] final def zipWithTuple2Fun[T,U] = _zipWithTuple2.asInstanceOf[(T,U) => (T,U)] + + private[this] final val _addToBuilderFun: (Builder[Any, Nothing], Any) => Builder[Any, Nothing] = (b: Builder[Any, Nothing], e: Any) => b += e + private[concurrent] final def addToBuilderFun[A, M] = _addToBuilderFun.asInstanceOf[Function2[Builder[A, M], A, Builder[A, M]]] + /** A Future which is never completed. */ - final object never extends Future[Nothing] { + object never extends Future[Nothing] { - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { + @throws[TimeoutException] + @throws[InterruptedException] + override final def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { + import Duration.{Undefined, Inf, MinusInf} atMost match { - case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => new CountDownLatch(1).await() - case Duration.MinusInf => // Drop out - case f: FiniteDuration => - if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS) + case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") + case `Inf` => + while(!Thread.interrupted()) { + LockSupport.park(this) + } + throw new InterruptedException + case `MinusInf` => // Drop out + case f: FiniteDuration if f > Duration.Zero => + var now = System.nanoTime() + val deadline = now + f.toNanos + while((deadline - now) > 0) { + LockSupport.parkNanos(this, deadline - now) + if (Thread.interrupted()) + throw new InterruptedException + now = System.nanoTime() + } + // Done waiting, drop out + case _: FiniteDuration => // Drop out if 0 or less + case x: Duration.Infinite => throw new MatchError(x) } throw new TimeoutException(s"Future timed out after [$atMost]") } - @throws(classOf[TimeoutException]) - @throws(classOf[InterruptedException]) - override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = { + @throws[TimeoutException] + @throws[InterruptedException] + override final def result(atMost: Duration)(implicit permit: CanAwait): Nothing = { ready(atMost) throw new TimeoutException(s"Future timed out after [$atMost]") } - override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = () - override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () - override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = () - override def isCompleted: Boolean = false - override def value: Option[Try[Nothing]] = None - override def failed: Future[Throwable] = this - override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = () - override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this - override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this - override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this - override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this - override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this - override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this - override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this - override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this - override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this - override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this - override def zip[U](that: Future[U]): Future[(Nothing, U)] = this - override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this - override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this - override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this - override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this - - override def toString: String = "Future()" + override final def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = () + override final def isCompleted: Boolean = false + override final def value: Option[Try[Nothing]] = None + override final def failed: Future[Throwable] = this + override final def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = () + override final def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this + override final def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this + override final def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this + override final def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this + override final def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this + override final def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this + override final def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this + override final def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this + override final def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this + override final def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this + override final def zip[U](that: Future[U]): Future[(Nothing, U)] = this + override final def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this + override final def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this + override final def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this + override final def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this + override final def toString: String = "Future()" } - /** A Future which is always completed with the Unit value. + /** A Future which is completed with the Unit value. */ - val unit: Future[Unit] = successful(()) + final val unit: Future[Unit] = fromTry(Success(())) /** Creates an already completed Future with the specified exception. * @@ -621,7 +646,7 @@ object Future { * @param exception the non-null instance of `Throwable` * @return the newly created `Future` instance */ - def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future + final def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future /** Creates an already completed Future with the specified result. * @@ -629,7 +654,7 @@ object Future { * @param result the given successful value * @return the newly created `Future` instance */ - def successful[T](result: T): Future[T] = Promise.successful(result).future + final def successful[T](result: T): Future[T] = Promise.successful(result).future /** Creates an already completed Future with the specified result or exception. * @@ -637,7 +662,7 @@ object Future { * @param result the result of the returned `Future` instance * @return the newly created `Future` instance */ - def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future + final def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation. * @@ -646,6 +671,7 @@ object Future { * {{{ * val f1 = Future(expr) * val f2 = Future.unit.map(_ => expr) + * val f3 = Future.unit.transform(_ => Success(expr)) * }}} * * The result becomes available once the asynchronous computation is completed. @@ -655,75 +681,81 @@ object Future { * @param executor the execution context on which the future is run * @return the `Future` holding the result of the computation */ - def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = + final def apply[T](body: => T)(implicit executor: ExecutionContext): Future[T] = unit.map(_ => body) - /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]` - * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`. + /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation once it completes. + * + * The following expressions are semantically equivalent: + * + * {{{ + * val f1 = Future(expr).flatten + * val f2 = Future.delegate(expr) + * val f3 = Future.unit.flatMap(_ => expr) + * }}} + * + * The result becomes available once the resulting Future of the asynchronous computation is completed. + * + * @tparam T the type of the result + * @param body the asynchronous computation, returning a Future + * @param executor the execution context on which the `body` is evaluated in + * @return the `Future` holding the result of the computation + */ + final def delegate[T](body: => Future[T])(implicit executor: ExecutionContext): Future[T] = + unit.flatMap(_ => body) + + /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms, in essence, a `IterableOnce[Future[A]]` + * into a `Future[IterableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`. * * @tparam A the type of the value inside the Futures - * @tparam M the type of the `TraversableOnce` of Futures - * @param in the `TraversableOnce` of Futures which will be sequenced - * @return the `Future` of the `TraversableOnce` of results - */ - def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { - in.foldLeft(successful(cbf(in))) { - (fr, fa) => fr.zipWith(fa)(_ += _) - }.map(_.result())(InternalCallbackExecutor) - } + * @tparam CC the type of the `IterableOnce` of Futures + * @tparam To the type of the resulting collection + * @param in the `IterableOnce` of Futures which will be sequenced + * @return the `Future` of the resulting collection + */ + final def sequence[A, CC[X] <: IterableOnce[X], To](in: CC[Future[A]])(implicit bf: BuildFrom[CC[Future[A]], A, To], executor: ExecutionContext): Future[To] = + in.iterator.foldLeft(successful(bf.newBuilder(in))) { + (fr, fa) => fr.zipWith(fa)(Future.addToBuilderFun) + }.map(_.result())(if (executor.isInstanceOf[BatchingExecutor]) executor else parasitic) /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future * in the list that is completed. This means no matter if it is completed as a success or as a failure. * * @tparam T the type of the value in the future - * @param futures the `TraversableOnce` of Futures in which to find the first completed + * @param futures the `IterableOnce` of Futures in which to find the first completed * @return the `Future` holding the result of the future that is first to be completed */ - def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = { - val p = Promise[T]() - val firstCompleteHandler = new AtomicReference[Promise[T]](p) with (Try[T] => Unit) { - override def apply(v1: Try[T]): Unit = getAndSet(null) match { - case null => () - case some => some tryComplete v1 - } - } - futures foreach { _ onComplete firstCompleteHandler } - p.future - } - - /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result - * of the first `Future` with a result that matches the predicate. - * - * @tparam T the type of the value in the future - * @param futures the `TraversableOnce` of Futures to search - * @param p the predicate which indicates if it's a match - * @return the `Future` holding the optional result of the search - */ - @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12.0") - def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { - val futuresBuffer = futures.toBuffer - if (futuresBuffer.isEmpty) successful[Option[T]](None) + final def firstCompletedOf[T](futures: IterableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = { + val i = futures.iterator + if (!i.hasNext) Future.never else { - val result = Promise[Option[T]]() - val ref = new AtomicInteger(futuresBuffer.size) - val search: Try[T] => Unit = v => try { - v match { - case Success(r) if p(r) => result tryComplete Success(Some(r)) - case _ => + val p = Promise[T]() + val firstCompleteHandler = new AtomicReference(List.empty[() => Unit]) with (Try[T] => Unit) { + final def apply(res: Try[T]): Unit = { + val deregs = getAndSet(null) + if (deregs != null) { + p.tryComplete(res) // tryComplete is likely to be cheaper than complete + deregs.foreach(_.apply()) + } } - } finally { - if (ref.decrementAndGet == 0) { - result tryComplete Success(None) + } + var completed = false + while (i.hasNext && !completed) { + val deregs = firstCompleteHandler.get + if (deregs == null) completed = true + else i.next() match { + case dp: DefaultPromise[T @unchecked] => + val d = dp.onCompleteWithUnregister(firstCompleteHandler) + if (!firstCompleteHandler.compareAndSet(deregs, d :: deregs)) + d.apply() + case f => + f.onComplete(firstCompleteHandler) } } - - futuresBuffer.foreach(_ onComplete search) - - result.future + p.future } } - /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored. * @@ -732,15 +764,14 @@ object Future { * @param p the predicate which indicates if it's a match * @return the `Future` holding the optional result of the search */ - def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { + final def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { def searchNext(i: Iterator[Future[T]]): Future[Option[T]] = - if (!i.hasNext) successful[Option[T]](None) - else { - i.next().transformWith { - case Success(r) if p(r) => successful(Some(r)) - case other => searchNext(i) - } - } + if (!i.hasNext) successful(None) + else i.next().transformWith { + case Success(r) if p(r) => successful(Some(r)) + case _ => searchNext(i) + } + searchNext(futures.iterator) } @@ -762,10 +793,10 @@ object Future { * @param op the fold operation to be applied to the zero and futures * @return the `Future` holding the result of the fold */ - def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = + final def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = foldNext(futures.iterator, zero, op) - private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = + private[this] final def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = if (!i.hasNext) successful(prevValue) else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) } @@ -781,16 +812,16 @@ object Future { * * @tparam T the type of the value of the input Futures * @tparam R the type of the value of the returned `Future` - * @param futures the `TraversableOnce` of Futures to be folded + * @param futures the `IterableOnce` of Futures to be folded * @param zero the start value of the fold * @param op the fold operation to be applied to the zero and futures * @return the `Future` holding the result of the fold */ @deprecated("use Future.foldLeft instead", "2.12.0") - def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { + // not removed in 2.13, to facilitate 2.11/2.12/2.13 cross-building; remove further down the line (see scala/scala#6319) + def fold[T, R](futures: IterableOnce[Future[T]])(zero: R)(@deprecatedName("foldFun") op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = if (futures.isEmpty) successful(zero) - else sequence(futures).map(_.foldLeft(zero)(op)) - } + else sequence(futures)(ArrayBuffer, executor).map(_.foldLeft(zero)(op)) /** Initiates a non-blocking, asynchronous, fold over the supplied futures * where the fold-zero is the result value of the first `Future` in the collection. @@ -801,15 +832,15 @@ object Future { * }}} * @tparam T the type of the value of the input Futures * @tparam R the type of the value of the returned `Future` - * @param futures the `TraversableOnce` of Futures to be reduced + * @param futures the `IterableOnce` of Futures to be reduced * @param op the reduce operation which is applied to the results of the futures * @return the `Future` holding the result of the reduce */ @deprecated("use Future.reduceLeft instead", "2.12.0") - def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { + // not removed in 2.13, to facilitate 2.11/2.12/2.13 cross-building; remove further down the line (see scala/scala#6319) + final def reduce[T, R >: T](futures: IterableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection")) - else sequence(futures).map(_ reduceLeft op) - } + else sequence(futures)(ArrayBuffer, executor).map(_ reduceLeft op) /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures * where the zero is the result value of the first `Future`. @@ -824,13 +855,13 @@ object Future { * @param op the reduce operation which is applied to the results of the futures * @return the `Future` holding the result of the reduce */ - def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { + final def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { val i = futures.iterator if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection")) else i.next() flatMap { v => foldNext(i, v, op) } } - /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` + /** Asynchronously and non-blockingly transforms a `IterableOnce[A]` into a `Future[IterableOnce[B]]` * using the provided function `A => Future[B]`. * This is useful for performing a parallel map. For example, to apply a function to all items of a list * in parallel: @@ -838,52 +869,21 @@ object Future { * {{{ * val myFutureList = Future.traverse(myList)(x => Future(myFunc(x))) * }}} - * @tparam A the type of the value inside the Futures in the `TraversableOnce` + * @tparam A the type of the value inside the Futures in the collection * @tparam B the type of the value of the returned `Future` - * @tparam M the type of the `TraversableOnce` of Futures - * @param in the `TraversableOnce` of Futures which will be sequenced - * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results - * @return the `Future` of the `TraversableOnce` of results - */ - def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] = - in.foldLeft(successful(cbf(in))) { - (fr, a) => fr.zipWith(fn(a))(_ += _) - }.map(_.result())(InternalCallbackExecutor) - - - // This is used to run callbacks which are internal - // to scala.concurrent; our own callbacks are only - // ever used to eventually run another callback, - // and that other callback will have its own - // executor because all callbacks come with - // an executor. Our own callbacks never block - // and have no "expected" exceptions. - // As a result, this executor can do nothing; - // some other executor will always come after - // it (and sometimes one will be before it), - // and those will be performing the "real" - // dispatch to code outside scala.concurrent. - // Because this exists, ExecutionContext.defaultExecutionContext - // isn't instantiated by Future internals, so - // if some code for some reason wants to avoid - // ever starting up the default context, it can do so - // by just not ever using it itself. scala.concurrent - // doesn't need to create defaultExecutionContext as - // a side effect. - private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor { - override protected def unbatchedExecute(r: Runnable): Unit = - r.run() - override def reportFailure(t: Throwable): Unit = - throw new IllegalStateException("problem in scala.concurrent internal callback", t) - } + * @tparam M the type of the collection of Futures + * @param in the collection to be mapped over with the provided function to produce a collection of Futures that is then sequenced into a Future collection + * @param fn the function to be mapped over the collection to produce a collection of Futures + * @return the `Future` of the collection of results + */ + final def traverse[A, B, M[X] <: IterableOnce[X]](in: M[A])(fn: A => Future[B])(implicit bf: BuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] = + in.iterator.foldLeft(successful(bf.newBuilder(in))) { + (fr, a) => fr.zipWith(fn(a))(Future.addToBuilderFun) + }.map(_.result())(if (executor.isInstanceOf[BatchingExecutor]) executor else parasitic) } -/** A marker indicating that a `java.lang.Runnable` provided to `scala.concurrent.ExecutionContext` - * wraps a callback provided to `Future.onComplete`. - * All callbacks provided to a `Future` end up going through `onComplete`, so this allows an - * `ExecutionContext` to special-case callbacks that were executed by `Future` if desired. - */ -trait OnCompleteRunnable { +@deprecated("Superseded by `scala.concurrent.Batchable`", "2.13.0") +trait OnCompleteRunnable extends Batchable { self: Runnable => } diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala index 13fe4303f4f3..3250e656941a 100644 --- a/src/library/scala/concurrent/JavaConversions.scala +++ b/src/library/scala/concurrent/JavaConversions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,20 +17,21 @@ import scala.language.implicitConversions /** The `JavaConversions` object provides implicit conversions supporting * interoperability between Scala and Java concurrency classes. - * - * @author Philipp Haller */ +@deprecated("Use the factory methods in `ExecutionContext` instead", "2.13.0") object JavaConversions { /** * Creates a new `ExecutionContext` which uses the provided `ExecutorService`. */ + @deprecated("Use `ExecutionContext.fromExecutorService` instead", "2.13.0") implicit def asExecutionContext(exec: ExecutorService): ExecutionContextExecutorService = ExecutionContext.fromExecutorService(exec) /** * Creates a new `ExecutionContext` which uses the provided `Executor`. */ + @deprecated("Use `ExecutionContext.fromExecutor` instead", "2.13.0") implicit def asExecutionContext(exec: Executor): ExecutionContextExecutor = ExecutionContext.fromExecutor(exec) diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala deleted file mode 100644 index 89e4feddf685..000000000000 --- a/src/library/scala/concurrent/Lock.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.concurrent - -/** This class ... - * - * @author Martin Odersky - */ -@deprecated("use java.util.concurrent.locks.Lock", "2.11.2") -class Lock { - var available = true - - def acquire() = synchronized { - while (!available) wait() - available = false - } - - def release() = synchronized { - available = true - notify() - } -} diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index a82ac719ca7c..cf3f23543c5a 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,6 +17,10 @@ import scala.util.{ Try, Success, Failure } /** Promise is an object which can be completed with a value or failed * with an exception. * + * A promise should always eventually be completed, whether for success or failure, + * in order to avoid unintended resource retention for any associated Futures' + * callbacks or transformations. + * * @define promiseCompletion * If the promise has already been fulfilled, failed or has timed out, * calling this method will throw an IllegalStateException. @@ -64,18 +68,19 @@ trait Promise[T] { * * @return This promise */ - final def completeWith(other: Future[T]): this.type = tryCompleteWith(other) + def completeWith(other: Future[T]): this.type = { + if (other ne this.future) // this tryCompleteWith this doesn't make much sense + other.onComplete(this tryComplete _)(ExecutionContext.parasitic) + + this + } /** Attempts to complete this promise with the specified future, once that future is completed. * * @return This promise */ - final def tryCompleteWith(other: Future[T]): this.type = { - if (other ne this.future) { // this tryCompleteWith this doesn't make much sense - other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor) - } - this - } + @deprecated("Since this method is semantically equivalent to `completeWith`, use that instead.", "2.13.0") + final def tryCompleteWith(other: Future[T]): this.type = completeWith(other) /** Completes the promise with a value. * @@ -83,7 +88,7 @@ trait Promise[T] { * * $promiseCompletion */ - def success(@deprecatedName('v) value: T): this.type = complete(Success(value)) + def success(value: T): this.type = complete(Success(value)) /** Tries to complete the promise with a value. * @@ -101,7 +106,7 @@ trait Promise[T] { * * $promiseCompletion */ - def failure(@deprecatedName('t) cause: Throwable): this.type = complete(Failure(cause)) + def failure(cause: Throwable): this.type = complete(Failure(cause)) /** Tries to complete the promise with an exception. * @@ -109,35 +114,35 @@ trait Promise[T] { * * @return If the promise has already been completed returns `false`, or `true` otherwise. */ - def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause)) + def tryFailure(cause: Throwable): Boolean = tryComplete(Failure(cause)) } object Promise { /** Creates a promise object which can be completed with a value. * * @tparam T the type of the value in the promise - * @return the newly created `Promise` object + * @return the newly created `Promise` instance */ - def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]() + final def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]() /** Creates an already completed Promise with the specified exception. * * @tparam T the type of the value in the promise - * @return the newly created `Promise` object + * @return the newly created `Promise` instance */ - def failed[T](exception: Throwable): Promise[T] = fromTry(Failure(exception)) + final def failed[T](exception: Throwable): Promise[T] = fromTry(Failure(exception)) /** Creates an already completed Promise with the specified result. * * @tparam T the type of the value in the promise - * @return the newly created `Promise` object + * @return the newly created `Promise` instance */ - def successful[T](result: T): Promise[T] = fromTry(Success(result)) + final def successful[T](result: T): Promise[T] = fromTry(Success(result)) /** Creates an already completed Promise with the specified result or exception. * * @tparam T the type of the value in the promise - * @return the newly created `Promise` object + * @return the newly created `Promise` instance */ - def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result) + final def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.DefaultPromise[T](result) } diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala index 1f18dc602c68..8792524524c3 100644 --- a/src/library/scala/concurrent/SyncChannel.scala +++ b/src/library/scala/concurrent/SyncChannel.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,30 +15,30 @@ package scala.concurrent /** A `SyncChannel` allows one to exchange data synchronously between * a reader and a writer thread. The writer thread is blocked until the * data to be written has been read by a corresponding reader thread. - * - * @author Philipp Haller - * @since 2.0 */ +@deprecated("Use `java.util.concurrent.Exchanger` instead.", since = "2.13.0") class SyncChannel[A] { - private var pendingWrites = List[(A, SyncVar[Boolean])]() - private var pendingReads = List[SyncVar[A]]() + private final val Signal = () + private type Signal = Unit + private[this] var pendingWrites = List[(A, SyncVar[Signal])]() + private[this] var pendingReads = List[SyncVar[A]]() - def write(data: A) { + def write(data: A): Unit = { // create write request - val writeReq = new SyncVar[Boolean] + val writeReq = new SyncVar[Signal] this.synchronized { // check whether there is a reader waiting - if (!pendingReads.isEmpty) { + if (pendingReads.nonEmpty) { val readReq = pendingReads.head pendingReads = pendingReads.tail // let reader continue - readReq put data + readReq.put(data) // resolve write request - writeReq put true + writeReq.put(Signal) } else { // enqueue write request @@ -55,16 +55,16 @@ class SyncChannel[A] { this.synchronized { // check whether there is a writer waiting - if (!pendingWrites.isEmpty) { + if (pendingWrites.nonEmpty) { // read data val (data, writeReq) = pendingWrites.head pendingWrites = pendingWrites.tail // let writer continue - writeReq.put(true) + writeReq.put(Signal) // resolve read request - readReq.put (data) + readReq.put(data) } else { // enqueue read request diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 5c548e672def..66c5fd1bb81d 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,11 +18,11 @@ import java.util.concurrent.TimeUnit * All methods are synchronized. * * @tparam A type of the contained value - * @author Martin Odersky */ +@deprecated("Use `java.util.concurrent.LinkedBlockingQueue with capacity 1` instead.", since = "2.13.0") class SyncVar[A] { - private var isDefined: Boolean = false - private var value: A = _ + private[this] var isDefined: Boolean = false + private[this] var value: A = _ /** * Wait for this SyncVar to become defined and then get @@ -43,7 +43,7 @@ class SyncVar[A] { wait(timeout) val elapsed = System.nanoTime() - start // nanoTime should be monotonic, but it's not possible to rely on that. - // See http://bugs.java.com/view_bug.do?bug_id=6458294 + // See https://bugs.java.com/view_bug.do?bug_id=6458294 if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed) } @@ -90,14 +90,6 @@ class SyncVar[A] { finally unsetVal() } - // TODO: this method should be private - // [Heather] the reason why: it doesn't take into consideration - // whether or not the SyncVar is already defined. So, set has been - // deprecated in order to eventually be able to make "setting" private - @deprecated("use `put` to ensure a value cannot be overwritten without a corresponding `take`", "2.10.0") - // NOTE: Used by sbt 0.13.0-M2 and below - def set(x: A): Unit = setVal(x) - /** Place a value in the SyncVar. If the SyncVar already has a stored value, * wait until another thread takes it. */ def put(x: A): Unit = synchronized { @@ -110,18 +102,6 @@ class SyncVar[A] { isDefined } - // TODO: this method should be private - // [Heather] the reason why: it doesn't take into consideration - // whether or not the SyncVar is already defined. So, unset has been - // deprecated in order to eventually be able to make "unsetting" private - @deprecated("use `take` to ensure a value is never discarded", "2.10.0") - // NOTE: Used by sbt 0.13.0-M2 and below - def unset(): Unit = synchronized { - isDefined = false - value = null.asInstanceOf[A] - notifyAll() - } - // `setVal` exists so as to retroactively deprecate `set` without // deprecation warnings where we use `set` internally. The // implementation of `set` was moved to `setVal` to achieve this diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala index 0f7975071a79..353d0f30fff8 100644 --- a/src/library/scala/concurrent/duration/Deadline.scala +++ b/src/library/scala/concurrent/duration/Deadline.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,7 +23,7 @@ package scala.concurrent.duration * * Its main purpose is to manage repeated attempts to achieve something (like * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All - * durations are measured according to `System.nanoTime` aka wall-time; this + * durations are measured according to `System.nanoTime`; this * does not take into account changes to the system clock (such as leap * seconds). */ @@ -64,7 +64,7 @@ case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] { /** * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration. */ - def compare(other: Deadline) = time compare other.time + def compare(other: Deadline): Int = time compare other.time } object Deadline { @@ -79,7 +79,7 @@ object Deadline { * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration. */ implicit object DeadlineIsOrdered extends Ordering[Deadline] { - def compare(a: Deadline, b: Deadline) = a compare b + def compare(a: Deadline, b: Deadline): Int = a compare b } } diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 8bd9961e6129..1312bb12d1d5 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,8 @@ package scala.concurrent.duration -import java.lang.{ Double => JDouble, Long => JLong } +import java.lang.{ Double => JDouble } +import scala.collection.StringParsers object Duration { @@ -20,7 +21,7 @@ object Duration { * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if * * - the unit is NANOSECONDS - * - and the length has an absolute value greater than 2^53 + * - and the length has an absolute value greater than `2^53` * * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. * @@ -44,28 +45,29 @@ object Duration { def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit)) // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53 - private[this] final val maxPreciseDouble = 9007199254740992d + // private[this] final val maxPreciseDouble = 9007199254740992d // not used after https://github.com/scala/scala/pull/9233 /** * Parse String into Duration. Format is `""`, where * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`. + * designated by `"Inf"`, `"PlusInf"`, `"+Inf"`, `"Duration.Inf"` and `"-Inf"`, `"MinusInf"` or `"Duration.MinusInf"`. + * Undefined is designated by `"Duration.Undefined"`. * * @throws NumberFormatException if format is not parsable */ def apply(s: String): Duration = { val s1: String = s filterNot (_.isWhitespace) s1 match { - case "Inf" | "PlusInf" | "+Inf" => Inf - case "MinusInf" | "-Inf" => MinusInf - case _ => + case "Inf" | "PlusInf" | "+Inf" | "Duration.Inf" => Inf + case "MinusInf" | "-Inf" | "Duration.MinusInf" => MinusInf + case "Duration.Undefined" => Undefined + case _ => val unitName = s1.reverse.takeWhile(_.isLetter).reverse timeUnit get unitName match { case Some(unit) => val valueStr = s1 dropRight unitName.length - val valueD = JDouble.parseDouble(valueStr) - if (valueD >= -maxPreciseDouble && valueD <= maxPreciseDouble) Duration(valueD, unit) - else Duration(JLong.parseLong(valueStr), unit) + StringParsers.parseLong(valueStr).map(Duration(_, unit)) + .getOrElse(Duration(JDouble.parseDouble(valueStr), unit)) case _ => throw new NumberFormatException("format error " + s) } } @@ -74,7 +76,7 @@ object Duration { // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds") private[this] def words(s: String) = (s.trim split "\\s+").toList private[this] def expandLabels(labels: String): List[String] = { - val hd :: rest = words(labels) + val hd :: rest = words(labels): @unchecked hd :: rest.flatMap(s => List(s, s + "s")) } private[this] val timeUnitLabels = List( @@ -89,7 +91,7 @@ object Duration { // TimeUnit => standard label protected[duration] val timeUnitName: Map[TimeUnit, String] = - timeUnitLabels.toMap.mapValues(s => words(s).last).toMap + timeUnitLabels.toMap.view.mapValues(s => words(s).last).toMap // Label => TimeUnit protected[duration] val timeUnit: Map[String, TimeUnit] = @@ -106,7 +108,7 @@ object Duration { * Extract length and time unit out of a duration, if it is finite. */ def unapply(d: Duration): Option[(Long, TimeUnit)] = - if (d.isFinite()) Some((d.length, d.unit)) else None + if (d.isFinite) Some((d.length, d.unit)) else None /** * Construct a possibly infinite or undefined Duration from the given number of nanoseconds. @@ -132,12 +134,12 @@ object Duration { fromNanos(nanos.round) } - private[this] final val µs_per_ns = 1000L - private[this] final val ms_per_ns = µs_per_ns * 1000 - private[this] final val s_per_ns = ms_per_ns * 1000 - private[this] final val min_per_ns = s_per_ns * 60 - private[this] final val h_per_ns = min_per_ns * 60 - private[this] final val d_per_ns = h_per_ns * 24 + private[this] final val ns_per_µs = 1000L + private[this] final val ns_per_ms = ns_per_µs * 1000 + private[this] final val ns_per_s = ns_per_ms * 1000 + private[this] final val ns_per_min = ns_per_s * 60 + private[this] final val ns_per_h = ns_per_min * 60 + private[this] final val ns_per_d = ns_per_h * 24 /** * Construct a finite duration from the given number of nanoseconds. The @@ -147,12 +149,12 @@ object Duration { * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated) */ def fromNanos(nanos: Long): FiniteDuration = { - if (nanos % d_per_ns == 0) Duration(nanos / d_per_ns, DAYS) - else if (nanos % h_per_ns == 0) Duration(nanos / h_per_ns, HOURS) - else if (nanos % min_per_ns == 0) Duration(nanos / min_per_ns, MINUTES) - else if (nanos % s_per_ns == 0) Duration(nanos / s_per_ns, SECONDS) - else if (nanos % ms_per_ns == 0) Duration(nanos / ms_per_ns, MILLISECONDS) - else if (nanos % µs_per_ns == 0) Duration(nanos / µs_per_ns, MICROSECONDS) + if (nanos % ns_per_d == 0) Duration(nanos / ns_per_d , DAYS) + else if (nanos % ns_per_h == 0) Duration(nanos / ns_per_h , HOURS) + else if (nanos % ns_per_min == 0) Duration(nanos / ns_per_min, MINUTES) + else if (nanos % ns_per_s == 0) Duration(nanos / ns_per_s , SECONDS) + else if (nanos % ns_per_ms == 0) Duration(nanos / ns_per_ms , MILLISECONDS) + else if (nanos % ns_per_µs == 0) Duration(nanos / ns_per_µs , MICROSECONDS) else Duration(nanos, NANOSECONDS) } @@ -171,17 +173,17 @@ object Duration { * * The particular comparison semantics mirror those of Double.NaN. * - * '''''Use `eq` when checking an input of a method against this value.''''' + * '''''Use [[eq]] when checking an input of a method against this value.''''' */ val Undefined: Infinite = new Infinite { override def toString = "Duration.Undefined" - override def equals(other: Any) = false + override def equals(other: Any): Boolean = false override def +(other: Duration): Duration = this override def -(other: Duration): Duration = this override def *(factor: Double): Duration = this override def /(factor: Double): Duration = this override def /(other: Duration): Double = Double.NaN - def compare(other: Duration) = if (other eq this) 0 else 1 + def compare(other: Duration): Int = if (other eq this) 0 else 1 def unary_- : Duration = this def toUnit(unit: TimeUnit): Double = Double.NaN private def readResolve(): AnyRef = Undefined // Instructs deserialization to use this same instance @@ -212,7 +214,7 @@ object Duration { case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1) } - final def isFinite() = false + final def isFinite = false private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations") final def length: Long = fail("length") @@ -234,15 +236,15 @@ object Duration { * matching its semantics in arithmetic operations. */ val Inf: Infinite = new Infinite { - override def toString = "Duration.Inf" - def compare(other: Duration) = other match { + override def toString: String = "Duration.Inf" + def compare(other: Duration): Int = other match { case x if x eq Undefined => -1 // Undefined != Undefined case x if x eq this => 0 // `case Inf` will include null checks in the byte code case _ => 1 } - def unary_- : Duration = MinusInf + def unary_- : Duration = MinusInf def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity - private def readResolve(): AnyRef = Inf // Instructs deserialization to use this same instance + private def readResolve(): AnyRef = Inf // Instructs deserialization to use this same instance } /** @@ -251,11 +253,11 @@ object Duration { * matching its semantics in arithmetic operations. */ val MinusInf: Infinite = new Infinite { - override def toString = "Duration.MinusInf" - def compare(other: Duration) = if (other eq this) 0 else -1 + override def toString: String = "Duration.MinusInf" + def compare(other: Duration): Int = if (other eq this) 0 else -1 def unary_- : Duration = Inf def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity - private def readResolve(): AnyRef = MinusInf // Instructs deserialization to use this same instance + private def readResolve(): AnyRef = MinusInf // Instructs deserialization to use this same instance } // Java Factories @@ -269,7 +271,7 @@ object Duration { * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if * * - the unit is NANOSECONDS - * - and the length has an absolute value greater than 2^53 + * - and the length has an absolute value greater than `2^53` * * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively. * @@ -297,7 +299,7 @@ object Duration { * The natural ordering of durations matches the natural ordering for Double, including non-finite values. */ implicit object DurationIsOrdered extends Ordering[Duration] { - def compare(a: Duration, b: Duration) = a compare b + def compare(a: Duration, b: Duration): Int = a compare b } } @@ -329,7 +331,7 @@ object Duration { * {{{ * import scala.concurrent.duration._ * - * val duration = 100 millis + * val duration = 100.millis * }}} * * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.''''' @@ -366,7 +368,7 @@ object Duration { * * @define exc @throws IllegalArgumentException when invoked on a non-finite duration * - * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is +-(2^63-1)ns, and no conversion to infinite durations takes place. + * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is `+-(2^63-1)`ns, and no conversion to infinite durations takes place. */ sealed abstract class Duration extends Serializable with Ordered[Duration] { /** @@ -474,7 +476,7 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] { * This method returns whether this duration is finite, which is not the same as * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]]. */ - def isFinite(): Boolean + def isFinite: Boolean /** * Return the smaller of this and that duration as determined by the natural ordering. */ @@ -492,41 +494,41 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] { * * $ovf */ - def div(divisor: Double) = this / divisor + def div(divisor: Double): Duration = this / divisor /** * Return the quotient of this and that duration as floating-point number. The semantics are * determined by Double as if calculating the quotient of the nanosecond lengths of both factors. */ - def div(other: Duration) = this / other - def gt(other: Duration) = this > other - def gteq(other: Duration) = this >= other - def lt(other: Duration) = this < other - def lteq(other: Duration) = this <= other + def div(other: Duration): Double = this / other + def gt(other: Duration): Boolean = this > other + def gteq(other: Duration): Boolean = this >= other + def lt(other: Duration): Boolean = this < other + def lteq(other: Duration): Boolean = this <= other /** * Return the difference of that duration and this. When involving non-finite summands the semantics match those * of Double. * * $ovf */ - def minus(other: Duration) = this - other + def minus(other: Duration): Duration = this - other /** * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those * of Double. * * $ovf */ - def mul(factor: Double) = this * factor + def mul(factor: Double): Duration = this * factor /** * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]]. */ - def neg() = -this + def neg(): Duration = -this /** * Return the sum of that duration and this. When involving non-finite summands the semantics match those * of Double. * * $ovf */ - def plus(other: Duration) = this + other + def plus(other: Duration): Duration = this + other /** * Return duration which is equal to this duration but with a coarsest Unit, or self in case it is already the coarsest Unit *

    @@ -544,11 +546,11 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] { object FiniteDuration { implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] { - def compare(a: FiniteDuration, b: FiniteDuration) = a compare b + def compare(a: FiniteDuration, b: FiniteDuration): Int = a compare b } - def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit) - def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit)) + def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit) + def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit)) // limit on abs. value of durations in their units private final val max_ns = Long.MaxValue @@ -562,7 +564,7 @@ object FiniteDuration { /** * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain - * this guarantee statically. The range of this class is limited to +-(2^63-1)ns, which is roughly 292 years. + * this guarantee statically. The range of this class is limited to `+-(2^63-1)`ns, which is roughly 292 years. */ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration { import FiniteDuration._ @@ -574,36 +576,36 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio /* * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_- */ - case NANOSECONDS ⇒ bounded(max_ns) - case MICROSECONDS ⇒ bounded(max_µs) - case MILLISECONDS ⇒ bounded(max_ms) - case SECONDS ⇒ bounded(max_s) - case MINUTES ⇒ bounded(max_min) - case HOURS ⇒ bounded(max_h) - case DAYS ⇒ bounded(max_d) - case _ ⇒ + case NANOSECONDS => bounded(max_ns) + case MICROSECONDS => bounded(max_µs) + case MILLISECONDS => bounded(max_ms) + case SECONDS => bounded(max_s) + case MINUTES => bounded(max_min) + case HOURS => bounded(max_h) + case DAYS => bounded(max_d) + case _ => val v = DAYS.convert(length, unit) -max_d <= v && v <= max_d }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)") - def toNanos = unit.toNanos(length) - def toMicros = unit.toMicros(length) - def toMillis = unit.toMillis(length) - def toSeconds = unit.toSeconds(length) - def toMinutes = unit.toMinutes(length) - def toHours = unit.toHours(length) - def toDays = unit.toDays(length) - def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u) + def toNanos: Long = unit.toNanos(length) + def toMicros: Long = unit.toMicros(length) + def toMillis: Long = unit.toMillis(length) + def toSeconds: Long = unit.toSeconds(length) + def toMinutes: Long = unit.toMinutes(length) + def toHours: Long = unit.toHours(length) + def toDays: Long = unit.toDays(length) + def toUnit(u: TimeUnit): Double = toNanos.toDouble / NANOSECONDS.convert(1, u) /** * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`. */ def fromNow: Deadline = Deadline.now + this - private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" ) - override def toString = "" + length + " " + unitString + private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" ) + override def toString: String = "" + length + " " + unitString - def compare(other: Duration) = other match { + def compare(other: Duration): Int = other match { case x: FiniteDuration => toNanos compare x.toNanos case _ => -(other compare this) } @@ -620,22 +622,22 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio new FiniteDuration(totalLength, commonUnit) } - def +(other: Duration) = other match { + def +(other: Duration): Duration = other match { case x: FiniteDuration => add(x.length, x.unit) case _ => other } - def -(other: Duration) = other match { + def -(other: Duration): Duration = other match { case x: FiniteDuration => add(-x.length, x.unit) case _ => -other } - def *(factor: Double) = + def *(factor: Double): Duration = if (!factor.isInfinite) fromNanos(toNanos * factor) else if (JDouble.isNaN(factor)) Undefined else if ((factor > 0) ^ (this < Zero)) Inf else MinusInf - def /(divisor: Double) = + def /(divisor: Double): Duration = if (!divisor.isInfinite) fromNanos(toNanos / divisor) else if (JDouble.isNaN(divisor)) Undefined else Zero @@ -643,34 +645,34 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio // if this is made a constant, then scalac will elide the conditional and always return +0.0, scala/bug#6331 private[this] def minusZero = -0d def /(divisor: Duration): Double = - if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos + if (divisor.isFinite) toNanos.toDouble / divisor.toNanos else if (divisor eq Undefined) Double.NaN else if ((length < 0) ^ (divisor > Zero)) 0d else minusZero // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite - def +(other: FiniteDuration) = add(other.length, other.unit) - def -(other: FiniteDuration) = add(-other.length, other.unit) - def plus(other: FiniteDuration) = this + other - def minus(other: FiniteDuration) = this - other - def min(other: FiniteDuration) = if (this < other) this else other - def max(other: FiniteDuration) = if (this > other) this else other + def +(other: FiniteDuration): FiniteDuration = add(other.length, other.unit) + def -(other: FiniteDuration): FiniteDuration = add(-other.length, other.unit) + def plus(other: FiniteDuration): FiniteDuration = this + other + def minus(other: FiniteDuration): FiniteDuration = this - other + def min(other: FiniteDuration): FiniteDuration = if (this < other) this else other + def max(other: FiniteDuration): FiniteDuration = if (this > other) this else other // overloaded methods taking Long so that you can calculate while statically staying finite /** * Return the quotient of this duration and the given integer factor. * - * @throws ArithmeticException if the factor is 0 + * @throws java.lang.ArithmeticException if the factor is 0 */ - def /(divisor: Long) = fromNanos(toNanos / divisor) + def /(divisor: Long): FiniteDuration = fromNanos(toNanos / divisor) /** * Return the product of this duration and the given integer factor. * * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration */ - def *(factor: Long) = new FiniteDuration(safeMul(length, factor), unit) + def *(factor: Long): FiniteDuration = new FiniteDuration(safeMul(length, factor), unit) /* * This method avoids the use of Long division, which saves 95% of the time spent, @@ -695,24 +697,24 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio /** * Return the quotient of this duration and the given integer factor. * - * @throws ArithmeticException if the factor is 0 + * @throws java.lang.ArithmeticException if the factor is 0 */ - def div(divisor: Long) = this / divisor + def div(divisor: Long): FiniteDuration = this / divisor /** * Return the product of this duration and the given integer factor. * * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration */ - def mul(factor: Long) = this * factor + def mul(factor: Long): FiniteDuration = this * factor - def unary_- = Duration(-length, unit) + def unary_- : FiniteDuration = Duration(-length, unit) - final def isFinite() = true + final def isFinite = true final override def toCoarsest: FiniteDuration = { def loop(length: Long, unit: TimeUnit): FiniteDuration = { - def coarserOrThis(coarser: TimeUnit, divider: Int) = + def coarserOrThis(coarser: TimeUnit, divider: Int): FiniteDuration = if (length % divider == 0) loop(length / divider, coarser) else if (unit == this.unit) this else FiniteDuration(length, unit) @@ -732,9 +734,9 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio else loop(length, unit) } - override def equals(other: Any) = other match { + override def equals(other: Any): Boolean = other match { case x: FiniteDuration => toNanos == x.toNanos case _ => super.equals(other) } - override def hashCode = toNanos.toInt + override def hashCode: Int = toNanos.toInt } diff --git a/src/library/scala/concurrent/duration/DurationConversions.scala b/src/library/scala/concurrent/duration/DurationConversions.scala index 789723e301ed..30036331be73 100644 --- a/src/library/scala/concurrent/duration/DurationConversions.scala +++ b/src/library/scala/concurrent/duration/DurationConversions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,59 +19,59 @@ import DurationConversions._ trait DurationConversions extends Any { protected def durationIn(unit: TimeUnit): FiniteDuration - def nanoseconds = durationIn(NANOSECONDS) - def nanos = nanoseconds - def nanosecond = nanoseconds - def nano = nanoseconds + def nanoseconds: FiniteDuration = durationIn(NANOSECONDS) + def nanos: FiniteDuration = nanoseconds + def nanosecond: FiniteDuration = nanoseconds + def nano: FiniteDuration = nanoseconds - def microseconds = durationIn(MICROSECONDS) - def micros = microseconds - def microsecond = microseconds - def micro = microseconds + def microseconds: FiniteDuration = durationIn(MICROSECONDS) + def micros: FiniteDuration = microseconds + def microsecond: FiniteDuration = microseconds + def micro: FiniteDuration = microseconds - def milliseconds = durationIn(MILLISECONDS) - def millis = milliseconds - def millisecond = milliseconds - def milli = milliseconds + def milliseconds: FiniteDuration = durationIn(MILLISECONDS) + def millis: FiniteDuration = milliseconds + def millisecond: FiniteDuration = milliseconds + def milli: FiniteDuration = milliseconds - def seconds = durationIn(SECONDS) - def second = seconds + def seconds: FiniteDuration = durationIn(SECONDS) + def second: FiniteDuration = seconds - def minutes = durationIn(MINUTES) - def minute = minutes + def minutes: FiniteDuration = durationIn(MINUTES) + def minute: FiniteDuration = minutes - def hours = durationIn(HOURS) - def hour = hours + def hours: FiniteDuration = durationIn(HOURS) + def hour: FiniteDuration = hours - def days = durationIn(DAYS) - def day = days + def days: FiniteDuration = durationIn(DAYS) + def day: FiniteDuration = days - def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds) - def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) - def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) - def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) + def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds) + def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) + def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) + def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c) def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds) - def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) - def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) - def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) + def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) + def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) + def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c) def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds) - def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) + def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) + def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) + def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c) - def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds) - def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c) + def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds) + def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c) - def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes) - def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c) + def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes) + def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c) - def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours) - def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c) + def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours) + def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c) - def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days) - def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c) + def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days) + def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c) } /** @@ -85,12 +85,12 @@ object DurationConversions { implicit object spanConvert extends Classifier[span.type] { type R = FiniteDuration - def convert(d: FiniteDuration) = d + def convert(d: FiniteDuration): FiniteDuration = d } implicit object fromNowConvert extends Classifier[fromNow.type] { type R = Deadline - def convert(d: FiniteDuration) = Deadline.now + d + def convert(d: FiniteDuration): Deadline = Deadline.now + d } } diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala index 7373384f8d44..f81b8777f6d0 100644 --- a/src/library/scala/concurrent/duration/package.scala +++ b/src/library/scala/concurrent/duration/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -53,15 +53,15 @@ package object duration { implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit) implicit final class DurationInt(private val n: Int) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit) + override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit) } implicit final class DurationLong(private val n: Long) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit) + override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit) } implicit final class DurationDouble(private val d: Double) extends AnyVal with DurationConversions { - override protected def durationIn(unit: TimeUnit): FiniteDuration = + override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(d, unit) match { case f: FiniteDuration => f case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d) @@ -72,16 +72,16 @@ package object duration { * Avoid reflection based invocation by using non-duck type */ implicit final class IntMult(private val i: Int) extends AnyVal { - def *(d: Duration) = d * i.toDouble - def *(d: FiniteDuration) = d * i.toLong + def *(d: Duration): Duration = d * i.toDouble + def *(d: FiniteDuration): FiniteDuration = d * i.toLong } implicit final class LongMult(private val i: Long) extends AnyVal { - def *(d: Duration) = d * i.toDouble - def *(d: FiniteDuration) = d * i.toLong + def *(d: Duration): Duration = d * i.toDouble + def *(d: FiniteDuration): FiniteDuration = d * i.toLong } implicit final class DoubleMult(private val f: Double) extends AnyVal { - def *(d: Duration) = d * f.toDouble + def *(d: Duration): Duration = d * f.toDouble } } diff --git a/src/library/scala/concurrent/forkjoin/package.scala b/src/library/scala/concurrent/forkjoin/package.scala deleted file mode 100644 index a7fca7431825..000000000000 --- a/src/library/scala/concurrent/forkjoin/package.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.concurrent -import java.util.{concurrent => juc} -import java.util.Collection - -package object forkjoin { - @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") - type ForkJoinPool = juc.ForkJoinPool - @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") - object ForkJoinPool { - type ForkJoinWorkerThreadFactory = juc.ForkJoinPool.ForkJoinWorkerThreadFactory - type ManagedBlocker = juc.ForkJoinPool.ManagedBlocker - - val defaultForkJoinWorkerThreadFactory: ForkJoinWorkerThreadFactory = juc.ForkJoinPool.defaultForkJoinWorkerThreadFactory - def managedBlock(blocker: ManagedBlocker): Unit = juc.ForkJoinPool.managedBlock(blocker) - } - - @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") - type ForkJoinTask[T] = juc.ForkJoinTask[T] - @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") - object ForkJoinTask extends scala.Serializable { - def adapt(runnable: Runnable): ForkJoinTask[_] = juc.ForkJoinTask.adapt(runnable) - def adapt[T](callable: juc.Callable[_ <: T]): ForkJoinTask[T] = juc.ForkJoinTask.adapt(callable) - def adapt[T](runnable: Runnable, result: T): ForkJoinTask[T] = juc.ForkJoinTask.adapt(runnable, result) - def getPool(): ForkJoinPool = juc.ForkJoinTask.getPool - def getQueuedTaskCount(): Int = juc.ForkJoinTask.getQueuedTaskCount - def getSurplusQueuedTaskCount(): Int = juc.ForkJoinTask.getSurplusQueuedTaskCount - def helpQuiesce(): Unit = juc.ForkJoinTask.helpQuiesce - def inForkJoinPool(): Boolean = juc.ForkJoinTask.inForkJoinPool - def invokeAll[T <: ForkJoinTask[_]](tasks: Collection[T]): Collection[T] = juc.ForkJoinTask.invokeAll(tasks) - def invokeAll[T](t1: ForkJoinTask[T]): Unit = juc.ForkJoinTask.invokeAll(t1) - def invokeAll[T](tasks: ForkJoinTask[T]*): Unit = juc.ForkJoinTask.invokeAll(tasks: _*) - } - - @deprecated("use java.util.concurrent.ForkJoinWorkerThread directly, instead of this alias", "2.12.0") - type ForkJoinWorkerThread = juc.ForkJoinWorkerThread - @deprecated("use java.util.concurrent.LinkedTransferQueue directly, instead of this alias", "2.12.0") - type LinkedTransferQueue[T] = juc.LinkedTransferQueue[T] - @deprecated("use java.util.concurrent.RecursiveAction directly, instead of this alias", "2.12.0") - type RecursiveAction = juc.RecursiveAction - @deprecated("use java.util.concurrent.RecursiveTask directly, instead of this alias", "2.12.0") - type RecursiveTask[T] = juc.RecursiveTask[T] - - @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") - type ThreadLocalRandom = juc.ThreadLocalRandom - @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") - object ThreadLocalRandom extends scala.Serializable { - // For source compatibility, current must declare the empty argument list. - // Having no argument list makes more sense since it doesn't have any side effects, - // but existing callers will break if they invoked it as `current()`. - def current() = juc.ThreadLocalRandom.current - } -} diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 5ec1eac4d9e6..262a12b1b4b9 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,43 +12,30 @@ package scala.concurrent.impl -import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } -import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.{ Semaphore, ForkJoinPool, ForkJoinWorkerThread, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } import java.util.Collection import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } -import scala.annotation.tailrec - -private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor { +private[scala] class ExecutionContextImpl private[impl] (final val executor: Executor, final val reporter: Throwable => Unit) extends ExecutionContextExecutor { require(executor ne null, "Executor must not be null") - override def execute(runnable: Runnable) = executor execute runnable - override def reportFailure(t: Throwable) = reporter(t) + override final def execute(runnable: Runnable): Unit = executor execute runnable + override final def reportFailure(t: Throwable): Unit = reporter(t) } - private[concurrent] object ExecutionContextImpl { final class DefaultThreadFactory( - daemonic: Boolean, - maxBlockers: Int, - prefix: String, - uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { + final val daemonic: Boolean, + final val maxBlockers: Int, + final val prefix: String, + final val uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { require(prefix ne null, "DefaultThreadFactory.prefix must be non null") require(maxBlockers >= 0, "DefaultThreadFactory.maxBlockers must be greater-or-equal-to 0") - private final val currentNumberOfBlockers = new AtomicInteger(0) - - @tailrec private final def newBlocker(): Boolean = currentNumberOfBlockers.get() match { - case `maxBlockers` | Int.`MaxValue` => false - case other => currentNumberOfBlockers.compareAndSet(other, other + 1) || newBlocker() - } - - @tailrec private final def freeBlocker(): Boolean = currentNumberOfBlockers.get() match { - case 0 => false - case other => currentNumberOfBlockers.compareAndSet(other, other - 1) || freeBlocker() - } + private final val blockerPermits = new Semaphore(maxBlockers) + @annotation.nowarn("cat=deprecation") def wire[T <: Thread](thread: T): T = { thread.setDaemon(daemonic) thread.setUncaughtExceptionHandler(uncaught) @@ -60,41 +47,38 @@ private[concurrent] object ExecutionContextImpl { def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext { - private[this] var isBlocked: Boolean = false // This is only ever read & written if this thread is the current thread - final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = - if ((Thread.currentThread eq this) && !isBlocked && newBlocker()) { + private[this] final var isBlocked: Boolean = false // This is only ever read & written if this thread is the current thread + final override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = + if ((Thread.currentThread eq this) && !isBlocked && blockerPermits.tryAcquire()) { try { - isBlocked = true val b: ForkJoinPool.ManagedBlocker with (() => T) = new ForkJoinPool.ManagedBlocker with (() => T) { - private[this] var result: T = null.asInstanceOf[T] - private[this] var done: Boolean = false + private[this] final var result: T = null.asInstanceOf[T] + private[this] final var done: Boolean = false final override def block(): Boolean = { - try { - if (!done) - result = thunk - } finally { + if (!done) { + result = thunk // If this throws then it will stop blocking. done = true } - true + isReleasable } final override def isReleasable = done - final override def apply(): T = result } + isBlocked = true ForkJoinPool.managedBlock(b) b() } finally { isBlocked = false - freeBlocker() + blockerPermits.release() } } else thunk // Unmanaged blocking }) } - def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = { + def createDefaultExecutorService(reporter: Throwable => Unit): ExecutionContextExecutorService = { def getInt(name: String, default: String) = (try System.getProperty(name, default) catch { case e: SecurityException => default }) match { @@ -102,54 +86,52 @@ private[concurrent] object ExecutionContextImpl { case other => other.toInt } - def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling) - val numThreads = getInt("scala.concurrent.context.numThreads", "x1") - // The hard limit on the number of active threads that the thread factory will produce - val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1") - - val desiredParallelism = range( - getInt("scala.concurrent.context.minThreads", "1"), - numThreads, - maxNoOfThreads) - - // The thread factory must provide additional threads to support managed blocking. - val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256") - - val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler { - override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause) + val desiredParallelism = // A range between min and max given num + scala.math.min( + scala.math.max( + getInt("scala.concurrent.context.minThreads", "1"), + getInt("scala.concurrent.context.numThreads", "x1")), + getInt("scala.concurrent.context.maxThreads", "x1") + ) + + val threadFactory = new DefaultThreadFactory(daemonic = true, + maxBlockers = getInt("scala.concurrent.context.maxExtraThreads", "256"), + prefix = "scala-execution-context-global", + uncaught = (thread: Thread, cause: Throwable) => reporter(cause)) + + new ForkJoinPool(desiredParallelism, threadFactory, threadFactory.uncaught, true) with ExecutionContextExecutorService { + final override def reportFailure(cause: Throwable): Unit = + getUncaughtExceptionHandler() match { + case null => + case some => some.uncaughtException(Thread.currentThread, cause) + } } - - val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true, - maxBlockers = maxExtraThreads, - prefix = "scala-execution-context-global", - uncaught = uncaughtExceptionHandler) - - new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) } - def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = - new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter) + def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextExecutor = + e match { + case null => createDefaultExecutorService(reporter) + case some => new ExecutionContextImpl(some, reporter) + } def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): - ExecutionContextImpl with ExecutionContextExecutorService = { - new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter) - with ExecutionContextExecutorService { - final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService] - override def execute(command: Runnable) = executor.execute(command) - override def shutdown() { asExecutorService.shutdown() } - override def shutdownNow() = asExecutorService.shutdownNow() - override def isShutdown = asExecutorService.isShutdown - override def isTerminated = asExecutorService.isTerminated - override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit) - override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable) - override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t) - override def submit(runnable: Runnable) = asExecutorService.submit(runnable) - override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables) - override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit) - override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables) - override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit) - } - } + ExecutionContextExecutorService = es match { + case null => createDefaultExecutorService(reporter) + case some => + new ExecutionContextImpl(some, reporter) with ExecutionContextExecutorService { + private[this] final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService] + final override def shutdown() = asExecutorService.shutdown() + final override def shutdownNow() = asExecutorService.shutdownNow() + final override def isShutdown = asExecutorService.isShutdown + final override def isTerminated = asExecutorService.isTerminated + final override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit) + final override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable) + final override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t) + final override def submit(runnable: Runnable) = asExecutorService.submit(runnable) + final override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables) + final override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit) + final override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables) + final override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit) + } + } } - - diff --git a/src/library/scala/concurrent/impl/FutureConvertersImpl.scala b/src/library/scala/concurrent/impl/FutureConvertersImpl.scala new file mode 100644 index 000000000000..a9eed4cbb055 --- /dev/null +++ b/src/library/scala/concurrent/impl/FutureConvertersImpl.scala @@ -0,0 +1,101 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent.impl + +import java.util.concurrent.{CompletableFuture, CompletionStage, TimeUnit} +import java.util.function.{BiConsumer, BiFunction, Consumer, Function => JFunction} + +import scala.concurrent.Future +import scala.concurrent.impl.Promise.DefaultPromise +import scala.util.{Failure, Success, Try} + +private[scala] object FutureConvertersImpl { + final class CF[T](val wrapped: Future[T]) extends CompletableFuture[T] with (Try[T] => Unit) { + override def apply(t: Try[T]): Unit = t match { + case Success(v) => complete(v) + case Failure(e) => completeExceptionally(e) + } + + // Ensure that completions of this future cannot hold the Scala Future's completer hostage + + override def thenApply[U](fn: JFunction[_ >: T, _ <: U]): CompletableFuture[U] = thenApplyAsync(fn) + + override def thenAccept(fn: Consumer[_ >: T]): CompletableFuture[Void] = thenAcceptAsync(fn) + + override def thenRun(fn: Runnable): CompletableFuture[Void] = thenRunAsync(fn) + + override def thenCombine[U, V](cs: CompletionStage[_ <: U], fn: BiFunction[_ >: T, _ >: U, _ <: V]): CompletableFuture[V] = thenCombineAsync(cs, fn) + + override def thenAcceptBoth[U](cs: CompletionStage[_ <: U], fn: BiConsumer[_ >: T, _ >: U]): CompletableFuture[Void] = thenAcceptBothAsync(cs, fn) + + override def runAfterBoth(cs: CompletionStage[_], fn: Runnable): CompletableFuture[Void] = runAfterBothAsync(cs, fn) + + override def applyToEither[U](cs: CompletionStage[_ <: T], fn: JFunction[_ >: T, U]): CompletableFuture[U] = applyToEitherAsync(cs, fn) + + override def acceptEither(cs: CompletionStage[_ <: T], fn: Consumer[_ >: T]): CompletableFuture[Void] = acceptEitherAsync(cs, fn) + + override def runAfterEither(cs: CompletionStage[_], fn: Runnable): CompletableFuture[Void] = runAfterEitherAsync(cs, fn) + + override def thenCompose[U](fn: JFunction[_ >: T, _ <: CompletionStage[U]]): CompletableFuture[U] = thenComposeAsync(fn) + + override def whenComplete(fn: BiConsumer[_ >: T, _ >: Throwable]): CompletableFuture[T] = whenCompleteAsync(fn) + + override def handle[U](fn: BiFunction[_ >: T, Throwable, _ <: U]): CompletableFuture[U] = handleAsync(fn) + + override def exceptionally(fn: JFunction[Throwable, _ <: T]): CompletableFuture[T] = { + val cf = new CompletableFuture[T] + whenCompleteAsync((t, e) => { + if (e == null) cf.complete(t) + else { + val n: AnyRef = + try { + fn(e).asInstanceOf[AnyRef] + } catch { + case thr: Throwable => + cf.completeExceptionally(thr) + this + } + if (n ne this) cf.complete(n.asInstanceOf[T]) + } + } + ) + cf + } + + /** + * @inheritdoc + * + * WARNING: completing the result of this method will not complete the underlying + * Scala Future or Promise (ie, the one that that was passed to `toJava`.) + */ + override def toCompletableFuture: CompletableFuture[T] = this + + override def obtrudeValue(value: T): Unit = throw new UnsupportedOperationException("obtrudeValue may not be used on the result of toJava(scalaFuture)") + + override def obtrudeException(ex: Throwable): Unit = throw new UnsupportedOperationException("obtrudeException may not be used on the result of toJava(scalaFuture)") + + override def get(): T = scala.concurrent.blocking(super.get()) + + override def get(timeout: Long, unit: TimeUnit): T = scala.concurrent.blocking(super.get(timeout, unit)) + + override def toString(): String = super[CompletableFuture].toString + } + + final class P[T](val wrapped: CompletionStage[T]) extends DefaultPromise[T] with BiFunction[T, Throwable, Unit] { + override def apply(v: T, e: Throwable): Unit = { + if (e == null) success(v) + else failure(e) + } + } +} + diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 616d0c77db2d..89f1addb8aa8 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -11,412 +11,533 @@ */ package scala.concurrent.impl - -import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException } -import scala.concurrent.Future.InternalCallbackExecutor -import scala.concurrent.duration.{ Duration, FiniteDuration } -import scala.annotation.tailrec -import scala.util.control.NonFatal -import scala.util.{ Try, Success, Failure } - +import scala.concurrent.{Batchable, CanAwait, ExecutionContext, ExecutionException, Future, TimeoutException} +import scala.concurrent.duration.Duration +import scala.annotation.{nowarn, switch, tailrec} +import scala.util.control.{ControlThrowable, NonFatal} +import scala.util.{Failure, Success, Try} +import scala.runtime.NonLocalReturnControl import java.util.concurrent.locks.AbstractQueuedSynchronizer import java.util.concurrent.atomic.AtomicReference - -private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] { - def future: this.type = this - - import scala.concurrent.Future - import scala.concurrent.impl.Promise.DefaultPromise - - private[this] final def completeWithFailure(p: Promise[_], t: Throwable): Unit = { - if (NonFatal(t)) p.complete(Failure(t)) - else if (t.isInstanceOf[InterruptedException]) { - if (p.tryComplete(Failure(new ExecutionException("Boxed InterruptedException", t)))) - Thread.currentThread.interrupt() - } else throw t +import java.util.Objects.requireNonNull +import java.io.{IOException, NotSerializableException, ObjectInputStream, ObjectOutputStream} + +/** + * Latch used to implement waiting on a DefaultPromise's result. + * + * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java + * Written by Doug Lea with assistance from members of JCP JSR-166 + * Expert Group and released to the public domain, as explained at + * https://creativecommons.org/publicdomain/zero/1.0/ + */ +private[impl] final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) { + //@volatie not needed since we use acquire/release + /*@volatile*/ private[this] var _result: Try[T] = null + final def result: Try[T] = _result + override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1 + override protected def tryReleaseShared(ignore: Int): Boolean = { + setState(1) + true + } + override def apply(value: Try[T]): Unit = { + _result = value // This line MUST go before releaseShared + releaseShared(1) } +} - override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = { - val p = new DefaultPromise[S]() - onComplete { result => - try { p.complete(f(result)) } catch { case t: Throwable => completeWithFailure(p, t) } +private[concurrent] object Promise { + /** + * Link represents a completion dependency between 2 DefaultPromises. + * As the DefaultPromise referred to by a Link can itself be linked to another promise + * `relink` traverses such chains and compresses them so that the link always points + * to the root of the dependency chain. + * + * In order to conserve memory, the owner of a Link (a DefaultPromise) is not stored + * on the Link, but is instead passed in as a parameter to the operation(s). + * + * If when compressing a chain of Links it is discovered that the root has been completed, + * the `owner`'s value is completed with that value, and the Link chain is discarded. + **/ + private[concurrent] final class Link[T](to: DefaultPromise[T]) extends AtomicReference[DefaultPromise[T]](to) { + /** + * Compresses this chain and returns the currently known root of this chain of Links. + **/ + final def promise(owner: DefaultPromise[T]): DefaultPromise[T] = { + val c = get() + compressed(current = c, target = c, owner = owner) } - p.future - } - // If possible, link DefaultPromises to avoid space leaks - override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = { - val p = new DefaultPromise[S]() - onComplete { - v => try f(v) match { - case fut if fut eq this => p complete v.asInstanceOf[Try[S]] - case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) - case fut => p completeWith fut - } catch { case t: Throwable => completeWithFailure(p, t) } + /** + * The combination of traversing and possibly unlinking of a given `target` DefaultPromise. + **/ + @inline @tailrec private[this] final def compressed(current: DefaultPromise[T], target: DefaultPromise[T], owner: DefaultPromise[T]): DefaultPromise[T] = { + val value = target.get() + if (value.isInstanceOf[Callbacks[_]]) { + if (compareAndSet(current, target)) target // Link + else compressed(current = get(), target = target, owner = owner) // Retry + } else if (value.isInstanceOf[Link[_]]) compressed(current = current, target = value.asInstanceOf[Link[T]].get(), owner = owner) // Compress + else /*if (value.isInstanceOf[Try[T]])*/ { + owner.unlink(value.asInstanceOf[Try[T]]) // Discard links + owner + } } - p.future } - override def toString: String = value match { - case Some(result) => "Future("+result+")" - case None => "Future()" - } -} + /** + * The process of "resolving" a Try is to validate that it only contains + * those values which makes sense in the context of Futures. + **/ + // requireNonNull is paramount to guard against null completions + private[this] final def resolve[T](value: Try[T]): Try[T] = + if (requireNonNull(value).isInstanceOf[Success[T]]) value + else { + val t = value.asInstanceOf[Failure[T]].exception + if (t.isInstanceOf[ControlThrowable] || t.isInstanceOf[InterruptedException] || t.isInstanceOf[Error]) { + if (t.isInstanceOf[NonLocalReturnControl[T @unchecked]]) + Success(t.asInstanceOf[NonLocalReturnControl[T]].value) + else + Failure(new ExecutionException("Boxed Exception", t)) + } else value + } -/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`. - */ -private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable { - // must be filled in before running it - var value: Try[T] = null + // Left non-final to enable addition of extra fields by Java/Scala converters in scala-java8-compat. + class DefaultPromise[T] private[this] (initial: AnyRef) extends AtomicReference[AnyRef](initial) with scala.concurrent.Promise[T] with scala.concurrent.Future[T] with (Try[T] => Unit) { + /** + * Constructs a new, completed, Promise. + */ + final def this(result: Try[T]) = this(resolve(result): AnyRef) - override def run() = { - require(value ne null) // must set value to non-null before running! - try onComplete(value) catch { case NonFatal(e) => executor reportFailure e } - } + /** + * Constructs a new, un-completed, Promise. + */ + final def this() = this(Noop: AnyRef) - def executeWithValue(v: Try[T]): Unit = { - require(value eq null) // can't complete it twice - value = v - // Note that we cannot prepare the ExecutionContext at this point, since we might - // already be running on a different thread! - try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t } - } -} + /** + * WARNING: the `resolved` value needs to have been pre-resolved using `resolve()` + * INTERNAL API + */ + override final def apply(resolved: Try[T]): Unit = + tryComplete0(get(), resolved) -private[concurrent] object Promise { + /** + * Returns the associated `Future` with this `Promise` + */ + override final def future: Future[T] = this - private def resolveTry[T](source: Try[T]): Try[T] = source match { - case Failure(t) => resolver(t) - case _ => source - } + override final def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = + dispatchOrAddCallbacks(get(), new Transformation[T, S](Xform_transform, f, executor)) - private def resolver[T](throwable: Throwable): Try[T] = throwable match { - case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value.asInstanceOf[T]) - case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t)) - case t: InterruptedException => Failure(new ExecutionException("Boxed InterruptedException", t)) - case e: Error => Failure(new ExecutionException("Boxed Error", e)) - case t => Failure(t) - } + override final def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = + dispatchOrAddCallbacks(get(), new Transformation[T, S](Xform_transformWith, f, executor)) + + override final def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + val state = get() + if (state.isInstanceOf[Try[_]]) { + if (state.asInstanceOf[Try[T]].isFailure) this.asInstanceOf[Future[R]] + else { + val l = state.asInstanceOf[Success[T]].get + that.map(r => f(l, r)) + } + } else { + val buffer = new AtomicReference[Success[Any]]() + val zipped = new DefaultPromise[R]() + + val thisF: Try[T] => Unit = { + case left: Success[_] => + val right = buffer.getAndSet(left).asInstanceOf[Success[U]] + if (right ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } - /** - * Latch used to implement waiting on a DefaultPromise's result. - * - * Inspired by: [[http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java]] - * Written by Doug Lea with assistance from members of JCP JSR-166 - * Expert Group and released to the public domain, as explained at - * [[http://creativecommons.org/publicdomain/zero/1.0/]] - */ - private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) { - override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1 - override protected def tryReleaseShared(ignore: Int): Boolean = { - setState(1) - true + val thatF: Try[U] => Unit = { + case right: Success[_] => + val left = buffer.getAndSet(right).asInstanceOf[Success[T]] + if (left ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } + // Cheaper than this.onComplete since we already polled the state + this.dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_onComplete, thisF, executor)) + that.onComplete(thatF) + zipped.future } - override def apply(ignored: Try[T]): Unit = releaseShared(1) } + override final def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = { + val state = get() + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) + } - /** Default promise implementation. - * - * A DefaultPromise has three possible states. It can be: - * - * 1. Incomplete, with an associated list of callbacks waiting on completion. - * 2. Complete, with a result. - * 3. Linked to another DefaultPromise. - * - * If a DefaultPromise is linked to another DefaultPromise, it will - * delegate all its operations to that other promise. This means that two - * DefaultPromises that are linked will appear, to external callers, to have - * exactly the same state and behaviour. For instance, both will appear as - * incomplete, or as complete with the same result value. - * - * A DefaultPromise stores its state entirely in the AnyRef cell exposed by - * AtomicReference. The type of object stored in the cell fully describes the - * current state of the promise. - * - * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks - * to call when it is eventually completed. - * 2. Try[T] - The promise is complete and now contains its value. - * 3. DefaultPromise[T] - The promise is linked to another promise. - * - * The ability to link DefaultPromises is needed to prevent memory leaks when - * using Future.flatMap. The previous implementation of Future.flatMap used - * onComplete handlers to propagate the ultimate value of a flatMap operation - * to its promise. Recursive calls to flatMap built a chain of onComplete - * handlers and promises. Unfortunately none of the handlers or promises in - * the chain could be collected until the handlers had been called and - * detached, which only happened when the final flatMap future was completed. - * (In some situations, such as infinite streams, this would never actually - * happen.) Because of the fact that the promise implementation internally - * created references between promises, and these references were invisible to - * user code, it was easy for user code to accidentally build large chains of - * promises and thereby leak memory. - * - * The problem of leaks is solved by automatically breaking these chains of - * promises, so that promises don't refer to each other in a long chain. This - * allows each promise to be individually collected. The idea is to "flatten" - * the chain of promises, so that instead of each promise pointing to its - * neighbour, they instead point directly the promise at the root of the - * chain. This means that only the root promise is referenced, and all the - * other promises are available for garbage collection as soon as they're no - * longer referenced by user code. - * - * To make the chains flattenable, the concept of linking promises together - * needed to become an explicit feature of the DefaultPromise implementation, - * so that the implementation to navigate and rewire links as needed. The idea - * of linking promises is based on the [[https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala - * Twitter promise implementation]]. - * - * In practice, flattening the chain cannot always be done perfectly. When a - * promise is added to the end of the chain, it scans the chain and links - * directly to the root promise. This prevents the chain from growing forwards - * But the root promise for a chain can change, causing the chain to grow - * backwards, and leaving all previously-linked promise pointing at a promise - * which is no longer the root promise. - * - * To mitigate the problem of the root promise changing, whenever a promise's - * methods are called, and it needs a reference to its root promise it calls - * the `compressedRoot()` method. This method re-scans the promise chain to - * get the root promise, and also compresses its links so that it links - * directly to whatever the current root promise is. This ensures that the - * chain is flattened whenever `compressedRoot()` is called. And since - * `compressedRoot()` is called at every possible opportunity (when getting a - * promise's value, when adding an onComplete handler, etc), this will happen - * frequently. Unfortunately, even this eager relinking doesn't absolutely - * guarantee that the chain will be flattened and that leaks cannot occur. - * However eager relinking does greatly reduce the chance that leaks will - * occur. - * - * Future.flatMap links DefaultPromises together by calling the `linkRootOf` - * method. This is the only externally visible interface to linked - * DefaultPromises, and `linkedRootOf` is currently only designed to be called - * by Future.flatMap. - */ - // Left non-final to enable addition of extra fields by Java/Scala converters - // in scala-java8-compat. - class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] { - - /** Get the root promise for this promise, compressing the link chain to that - * promise if necessary. - * - * For promises that are not linked, the result of calling - * `compressedRoot()` will the promise itself. However for linked promises, - * this method will traverse each link until it locates the root promise at - * the base of the link chain. - * - * As a side effect of calling this method, the link from this promise back - * to the root promise will be updated ("compressed") to point directly to - * the root promise. This allows intermediate promises in the link chain to - * be garbage collected. Also, subsequent calls to this method should be - * faster as the link chain will be shorter. - */ - private def compressedRoot(): DefaultPromise[T] = - get() match { - case linked: DefaultPromise[_] => compressedRoot(linked) - case _ => this - } + override final def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = { + val state = get() + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_flatMap, f, executor)) + else this.asInstanceOf[Future[S]] + } - @tailrec - private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = { - val target = linked.asInstanceOf[DefaultPromise[T]].root - if (linked eq target) target - else if (compareAndSet(linked, target)) target - else { - get() match { - case newLinked: DefaultPromise[_] => compressedRoot(newLinked) - case _ => this - } - } + override final def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { + val state = get() + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_map, f, executor)) + else this.asInstanceOf[Future[S]] } - /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`. - * The `compressedRoot()` method should be called instead of this method, as it is important - * to compress the link chain whenever possible. - */ - @tailrec - private def root: DefaultPromise[T] = - get() match { - case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root - case _ => this - } + override final def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = { + val state = get() + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, T](Xform_filter, p, executor)) // Short-circuit if we get a Success + else this + } - /** Try waiting for this promise to be completed. + override final def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = { + val state = get() + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_collect, pf, executor)) // Short-circuit if we get a Success + else this.asInstanceOf[Future[S]] + } + + override final def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = { + val state = get() + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recoverWith, pf, executor)) // Short-circuit if we get a Failure + else this.asInstanceOf[Future[U]] + } + + override final def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = { + val state = get() + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recover, pf, executor)) // Short-circuit if we get a Failure + else this.asInstanceOf[Future[U]] + } + + override final def mapTo[S](implicit tag: scala.reflect.ClassTag[S]): Future[S] = + if (!get().isInstanceOf[Failure[_]]) super[Future].mapTo[S](tag) // Short-circuit if we get a Success + else this.asInstanceOf[Future[S]] + + + override final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = + dispatchOrAddCallbacks(get(), new Transformation[T, Unit](Xform_onComplete, func, executor)) + + /** The same as [[onComplete]], but additionally returns a function which can be + * invoked to unregister the callback function. Removing a callback from a long-lived + * future can enable garbage collection of objects referenced by the closure. */ - protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) { - import Duration.Undefined - atMost match { - case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => - val l = new CompletionLatch[T]() - onComplete(l)(InternalCallbackExecutor) - l.acquireSharedInterruptibly(1) - case Duration.MinusInf => // Drop out - case f: FiniteDuration => - if (f > Duration.Zero) { - val l = new CompletionLatch[T]() - onComplete(l)(InternalCallbackExecutor) - l.tryAcquireSharedNanos(1, f.toNanos) - } - } + private[concurrent] final def onCompleteWithUnregister[U](func: Try[T] => U)(implicit executor: ExecutionContext): () => Unit = { + val t = new Transformation[T, Unit](Xform_onComplete, func, executor) + dispatchOrAddCallbacks(get(), t) + () => unregisterCallback(t) + } - isCompleted - } else true // Already completed + override final def failed: Future[Throwable] = + if (!get().isInstanceOf[Success[_]]) super.failed + else Future.failedFailureFuture // Cached instance in case of already known success + + @tailrec override final def toString: String = { + val state = get() + if (state.isInstanceOf[Try[_]]) "Future("+state+")" + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).toString + else /*if (state.isInstanceOf[Callbacks[T]]) */ "Future()" + } + + private[this] final def tryAwait0(atMost: Duration): Try[T] = + if (atMost ne Duration.Undefined) { + val v = value0 + if (v ne null) v + else { + val r = + if (atMost <= Duration.Zero) null + else { + val l = new CompletionLatch[T]() + onComplete(l)(ExecutionContext.parasitic) + + if (atMost.isFinite) + l.tryAcquireSharedNanos(1, atMost.toNanos) + else + l.acquireSharedInterruptibly(1) + + l.result + } + if (r ne null) r + else throw new TimeoutException("Future timed out after [" + atMost + "]") + } + } else throw new IllegalArgumentException("Cannot wait for Undefined duration of time") @throws(classOf[TimeoutException]) @throws(classOf[InterruptedException]) - final def ready(atMost: Duration)(implicit permit: CanAwait): this.type = - if (tryAwait(atMost)) this - else throw new TimeoutException("Futures timed out after [" + atMost + "]") + final def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { + tryAwait0(atMost) + this + } @throws(classOf[Exception]) final def result(atMost: Duration)(implicit permit: CanAwait): T = - ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here + tryAwait0(atMost).get // returns the value, or throws the contained exception - def value: Option[Try[T]] = value0 + override final def isCompleted: Boolean = value0 ne null - @tailrec - private def value0: Option[Try[T]] = get() match { - case c: Try[_] => Some(c.asInstanceOf[Try[T]]) - case dp: DefaultPromise[_] => compressedRoot(dp).value0 - case _ => None - } + override final def value: Option[Try[T]] = Option(value0) - override final def isCompleted: Boolean = isCompleted0 - - @tailrec - private def isCompleted0: Boolean = get() match { - case _: Try[_] => true - case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0 - case _ => false + @tailrec // returns null if not completed + private final def value0: Try[T] = { + val state = get() + if (state.isInstanceOf[Try[_]]) state.asInstanceOf[Try[T]] + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).value0 + else /*if (state.isInstanceOf[Callbacks[T]])*/ null } - final def tryComplete(value: Try[T]): Boolean = { - val resolved = resolveTry(value) - tryCompleteAndGetListeners(resolved) match { - case null => false - case rs if rs.isEmpty => true - case rs => rs.foreach(r => r.executeWithValue(resolved)); true - } + override final def tryComplete(value: Try[T]): Boolean = { + val state = get() + if (state.isInstanceOf[Try[_]]) false + else tryComplete0(state, resolve(value)) } - /** Called by `tryComplete` to store the resolved value and get the list of - * listeners, or `null` if it is already completed. - */ - @tailrec - private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = { - get() match { - case raw: List[_] => - val cur = raw.asInstanceOf[List[CallbackRunnable[T]]] - if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v) - case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v) - case _ => null + @tailrec // WARNING: important that the supplied Try really is resolve():d + private[Promise] final def tryComplete0(state: AnyRef, resolved: Try[T]): Boolean = + if (state.isInstanceOf[Callbacks[_]]) { + if (compareAndSet(state, resolved)) { + if (state ne Noop) submitWithValue(state.asInstanceOf[Callbacks[T]], resolved) + true + } else tryComplete0(get(), resolved) + } else if (state.isInstanceOf[Link[_]]) { + val p = state.asInstanceOf[Link[T]].promise(this) // If this returns owner/this, we are in a completed link + (p ne this) && p.tryComplete0(p.get(), resolved) // Use this to get tailcall optimization and avoid re-resolution + } else /* if(state.isInstanceOf[Try[T]]) */ false + + override final def completeWith(other: Future[T]): this.type = { + if (other ne this) { + val state = get() + if (!state.isInstanceOf[Try[_]]) { + val resolved = if (other.isInstanceOf[DefaultPromise[_]]) other.asInstanceOf[DefaultPromise[T]].value0 else other.value.orNull + if (resolved ne null) tryComplete0(state, resolved) + else other.onComplete(this)(ExecutionContext.parasitic) + } } - } - final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = - dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func)) + this + } /** Tries to add the callback, if already completed, it dispatches the callback to be executed. * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks * to the root promise when linking two promises together. */ - @tailrec - private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = { - get() match { - case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]]) - case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable) - case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) () - else dispatchOrAddCallback(runnable) + @tailrec private final def dispatchOrAddCallbacks[C <: Callbacks[T]](state: AnyRef, callbacks: C): C = + if (state.isInstanceOf[Try[_]]) { + submitWithValue(callbacks, state.asInstanceOf[Try[T]]) // invariant: callbacks should never be Noop here + callbacks + } else if (state.isInstanceOf[Callbacks[_]]) { + if(compareAndSet(state, if (state ne Noop) concatCallbacks(callbacks, state.asInstanceOf[Callbacks[T]]) else callbacks)) callbacks + else dispatchOrAddCallbacks(get(), callbacks) + } else /*if (state.isInstanceOf[Link[T]])*/ { + val p = state.asInstanceOf[Link[T]].promise(this) + p.dispatchOrAddCallbacks(p.get(), callbacks) + } + + @tailrec private def unregisterCallback(t: Transformation[_, _]): Unit = { + val state = get() + if (state eq t) { + if (!compareAndSet(state, Noop)) unregisterCallback(t) + } else if (state.isInstanceOf[ManyCallbacks[_]]) { + if (!compareAndSet(state, removeCallback(state.asInstanceOf[ManyCallbacks[T]], t))) unregisterCallback(t) } } - /** Link this promise to the root of another promise using `link()`. Should only be - * be called by transformWith. - */ - protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot()) - - /** Link this promise to another promise so that both promises share the same - * externally-visible state. Depending on the current state of this promise, this - * may involve different things. For example, any onComplete listeners will need - * to be transferred. - * - * If this promise is already completed, then the same effect as linking - - * sharing the same completed value - is achieved by simply sending this - * promise's result to the target promise. - */ + // IMPORTANT: Noop should never be passed in here, neither as left OR as right + @tailrec private[this] final def concatCallbacks(left: Callbacks[T], right: Callbacks[T]): Callbacks[T] = + if (left.isInstanceOf[Transformation[T,_]]) new ManyCallbacks[T](left.asInstanceOf[Transformation[T,_]], right) + else /*if (left.isInstanceOf[ManyCallbacks[T]) */ { // This should only happen when linking + val m = left.asInstanceOf[ManyCallbacks[T]] + concatCallbacks(m.rest, new ManyCallbacks(m.first, right)) + } + + @tailrec private[this] final def removeCallback(cs: Callbacks[T], t: Transformation[_, _], result: Callbacks[T] = null): AnyRef = + if (cs eq t) { + if (result == null) Noop + else result + } + else if (cs.isInstanceOf[ManyCallbacks[_]]) { + val m = cs.asInstanceOf[ManyCallbacks[T]] + if (m.first eq t) { + if (result == null) m.rest + else concatCallbacks(m.rest, result) + } + else removeCallback(m.rest, t, if (result == null) m.first else new ManyCallbacks(m.first, result)) + } else cs + + // IMPORTANT: Noop should not be passed in here, `callbacks` cannot be null @tailrec - private def link(target: DefaultPromise[T]): Unit = if (this ne target) { - get() match { - case r: Try[_] => - if (!target.tryComplete(r.asInstanceOf[Try[T]])) - throw new IllegalStateException("Cannot link completed promises together") - case dp: DefaultPromise[_] => - compressedRoot(dp).link(target) - case listeners: List[_] if compareAndSet(listeners, target) => - if (listeners.nonEmpty) - listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_)) - case _ => - link(target) + private[this] final def submitWithValue(callbacks: Callbacks[T], resolved: Try[T]): Unit = + if(callbacks.isInstanceOf[ManyCallbacks[T]]) { + val m: ManyCallbacks[T] = callbacks.asInstanceOf[ManyCallbacks[T]] + m.first.submitWithValue(resolved) + submitWithValue(m.rest, resolved) + } else { + callbacks.asInstanceOf[Transformation[T, _]].submitWithValue(resolved) } - } - } - /** An already completed Future is given its result at creation. - * - * Useful in Future-composition when a value to contribute is already available. - */ - object KeptPromise { - import scala.concurrent.Future - import scala.reflect.ClassTag + /** Link this promise to the root of another promise. + */ + @tailrec private[concurrent] final def linkRootOf(target: DefaultPromise[T], link: Link[T]): Unit = + if (this ne target) { + val state = get() + if (state.isInstanceOf[Try[_]]) { + if(!target.tryComplete0(target.get(), state.asInstanceOf[Try[T]])) + throw new IllegalStateException("Cannot link completed promises together") + } else if (state.isInstanceOf[Callbacks[_]]) { + val l = if (link ne null) link else new Link(target) + val p = l.promise(this) + if ((this ne p) && compareAndSet(state, l)) { + if (state ne Noop) p.dispatchOrAddCallbacks(p.get(), state.asInstanceOf[Callbacks[T]]) // Noop-check is important here + } else linkRootOf(p, l) + } else /* if (state.isInstanceOf[Link[T]]) */ + state.asInstanceOf[Link[T]].promise(this).linkRootOf(target, link) + } - private[this] sealed trait Kept[T] extends Promise[T] { - def result: Try[T] + /** + * Unlinks (removes) the link chain if the root is discovered to be already completed, + * and completes the `owner` with that result. + **/ + @tailrec private[concurrent] final def unlink(resolved: Try[T]): Unit = { + val state = get() + if (state.isInstanceOf[Link[_]]) { + val next = if (compareAndSet(state, resolved)) state.asInstanceOf[Link[T]].get() else this + next.unlink(resolved) + } else tryComplete0(state, resolved) + } - override def value: Option[Try[T]] = Some(result) + @throws[IOException] + private def writeObject(out: ObjectOutputStream): Unit = + throw new NotSerializableException("Promises and Futures cannot be serialized") - override def isCompleted: Boolean = true + @throws[IOException] + @throws[ClassNotFoundException] + private def readObject(in: ObjectInputStream): Unit = + throw new NotSerializableException("Promises and Futures cannot be deserialized") + } - override def tryComplete(value: Try[T]): Boolean = false + // Constant byte tags for unpacking transformation function inputs or outputs + // These need to be Ints to get compiled into constants. + final val Xform_noop = 0 + final val Xform_map = 1 + final val Xform_flatMap = 2 + final val Xform_transform = 3 + final val Xform_transformWith = 4 + final val Xform_foreach = 5 + final val Xform_onComplete = 6 + final val Xform_recover = 7 + final val Xform_recoverWith = 8 + final val Xform_filter = 9 + final val Xform_collect = 10 + + /* Marker trait + */ + sealed trait Callbacks[-T] - override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = - (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result) + final class ManyCallbacks[-T](final val first: Transformation[T,_], final val rest: Callbacks[T]) extends Callbacks[T] { + override final def toString: String = "ManyCallbacks" + } - override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this + private[this] final val Noop = new Transformation[Nothing, Nothing](Xform_noop, null, ExecutionContext.parasitic) + + /** + * A Transformation[F, T] receives an F (it is a Callback[F]) and applies a transformation function to that F, + * Producing a value of type T (it is a Promise[T]). + * In order to conserve allocations, indirections, and avoid introducing bi/mega-morphicity the transformation + * function's type parameters are erased, and the _xform tag will be used to reify them. + **/ + final class Transformation[-F, T] private[this] ( + private[this] final var _fun: Any => Any, + private[this] final var _ec: ExecutionContext, + private[this] final var _arg: Try[F], + private[this] final val _xform: Int + ) extends DefaultPromise[T]() with Callbacks[F] with Runnable with Batchable { + final def this(xform: Int, f: _ => _, ec: ExecutionContext) = + this(f.asInstanceOf[Any => Any], ec.prepare(): @nowarn("cat=deprecation"), null, xform) + + final def benefitsFromBatching: Boolean = _xform != Xform_onComplete && _xform != Xform_foreach + + // Gets invoked when a value is available, schedules it to be run():ed by the ExecutionContext + // submitWithValue *happens-before* run(), through ExecutionContext.execute. + // Invariant: _arg is `null`, _ec is non-null. `this` ne Noop. + // requireNonNull(resolved) will hold as guarded by `resolve` + final def submitWithValue(resolved: Try[F]): this.type = { + _arg = resolved + val e = _ec + try e.execute(this) /* Safe publication of _arg, _fun, _ec */ + catch { + case t: Throwable => + _fun = null // allow to GC + _arg = null // see above + _ec = null // see above again + handleFailure(t, e) + } - override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get + this } - private[this] final class Successful[T](val result: Success[T]) extends Kept[T] { - override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = () - override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future - override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this - override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this - override def fallbackTo[U >: T](that: Future[U]): Future[U] = this - } + private[this] final def handleFailure(t: Throwable, e: ExecutionContext): Unit = { + val wasInterrupted = t.isInstanceOf[InterruptedException] + if (wasInterrupted || NonFatal(t)) { + val completed = tryComplete0(get(), resolve(Failure(t))) + if (completed && wasInterrupted) Thread.currentThread.interrupt() - private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] { - private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]] - - override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = () - override def failed: Future[Throwable] = KeptPromise(Success(result.exception)).future - override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = () - override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S] - override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this - override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S] - override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)] - override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R] - override def fallbackTo[U >: T](that: Future[U]): Future[U] = - if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor) - override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S] + // Report or rethrow failures which are unlikely to otherwise be noticed + if (_xform == Xform_foreach || _xform == Xform_onComplete || !completed) + e.reportFailure(t) + } else throw t } - def apply[T](result: Try[T]): scala.concurrent.Promise[T] = - resolveTry(result) match { - case s @ Success(_) => new Successful(s) - case f @ Failure(_) => new Failed(f) + // Gets invoked by the ExecutionContext, when we have a value to transform. + override final def run(): Unit = { + val v = _arg + val fun = _fun + val ec = _ec + _fun = null // allow to GC + _arg = null // see above + _ec = null // see above + try { + val resolvedResult: Try[_] = + (_xform: @switch) match { + case Xform_noop => + null + case Xform_map => + if (v.isInstanceOf[Success[F]]) Success(fun(v.get)) else v // Faster than `resolve(v map fun)` + case Xform_flatMap => + if (v.isInstanceOf[Success[F]]) { + val f = fun(v.get) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + null + } else v + case Xform_transform => + resolve(fun(v).asInstanceOf[Try[T]]) + case Xform_transformWith => + val f = fun(v) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + null + case Xform_foreach => + v.foreach(fun) + null + case Xform_onComplete => + fun(v) + null + case Xform_recover => + if (v.isInstanceOf[Failure[_]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T + case Xform_recoverWith => + if (v.isInstanceOf[Failure[F]]) { + val f = fun.asInstanceOf[PartialFunction[Throwable, Future[T]]].applyOrElse(v.asInstanceOf[Failure[F]].exception, Future.recoverWithFailed) + if (f ne Future.recoverWithFailedMarker) { + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + null + } else v + } else v + case Xform_filter => + if (v.isInstanceOf[Failure[F]] || fun.asInstanceOf[F => Boolean](v.get)) v else Future.filterFailure + case Xform_collect => + if (v.isInstanceOf[Success[F]]) Success(fun.asInstanceOf[PartialFunction[F, T]].applyOrElse(v.get, Future.collectFailed)) else v + case _ => + Failure(new IllegalStateException("BUG: encountered transformation promise with illegal type: " + _xform)) // Safe not to `resolve` + } + if (resolvedResult ne null) + tryComplete0(get(), resolvedResult.asInstanceOf[Try[T]]) // T is erased anyway so we won't have any use for it above + } catch { + case t: Throwable => handleFailure(t, ec) } + } } - } diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index edd71d0883e0..d648a1c90a15 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ import scala.annotation.implicitNotFound * * A more detailed guide to Futures and Promises, including discussion and examples * can be found at - * [[http://docs.scala-lang.org/overviews/core/futures.html]]. + * [[https://docs.scala-lang.org/overviews/core/futures.html]]. * * == Common Imports == * @@ -75,8 +75,8 @@ import scala.annotation.implicitNotFound * import ExecutionContext.Implicits.global // implicit execution context * * val firstZebra: Future[Int] = Future { - * val source = scala.io.Source.fromFile("/etc/dictionaries-common/words") - * source.toSeq.indexOfSlice("zebra") + * val words = Files.readAllLines("/etc/dictionaries-common/words").asScala + * words.indexOfSlice("zebra") * } * }}} * @@ -110,28 +110,6 @@ package object concurrent { type CancellationException = java.util.concurrent.CancellationException type TimeoutException = java.util.concurrent.TimeoutException - /** Starts an asynchronous computation and returns a `Future` object with the result of that computation. - * - * The result becomes available once the asynchronous computation is completed. - * - * @tparam T the type of the result - * @param body the asynchronous computation - * @param executor the execution context on which the future is run - * @return the `Future` holding the result of the computation - */ - @deprecated("use `Future { ... }` instead", "2.11.0") - // removal planned for 2.13.0 - def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body) - - /** Creates a promise object which can be completed with a value or an exception. - * - * @tparam T the type of the value in the promise - * @return the newly created `Promise` object - */ - @deprecated("use `Promise[T]()` instead", "2.11.0") - // removal planned for 2.13.0 - def promise[T](): Promise[T] = Promise[T]() - /** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust * the runtime's behavior. * Properly marking blocking code may improve performance or avoid deadlocks. @@ -143,7 +121,7 @@ package object concurrent { * @throws InterruptedException in the case that a wait within the blocking `body` was interrupted */ @throws(classOf[Exception]) - def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission) + final def blocking[T](body: => T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission) } package concurrent { @@ -191,7 +169,7 @@ package concurrent { */ @throws(classOf[TimeoutException]) @throws(classOf[InterruptedException]) - def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = awaitable match { + final def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = awaitable match { case f: Future[T] if f.isCompleted => awaitable.ready(atMost)(AwaitPermission) case _ => blocking(awaitable.ready(atMost)(AwaitPermission)) } @@ -218,8 +196,8 @@ package concurrent { */ @throws(classOf[TimeoutException]) @throws(classOf[InterruptedException]) - def result[T](awaitable: Awaitable[T], atMost: Duration): T = awaitable match { - case f: Future[_] if f.isCompleted => awaitable.result(atMost)(AwaitPermission) + final def result[T](awaitable: Awaitable[T], atMost: Duration): T = awaitable match { + case f: Future[T] if f.isCompleted => f.result(atMost)(AwaitPermission) case _ => blocking(awaitable.result(atMost)(AwaitPermission)) } } diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index 42dccf60cb69..bb1ded88437c 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -40,29 +40,25 @@ import scala.annotation.meta._ * // warning: there were three deprecation warnings in total; re-run with -deprecation for details * }}} * + * The Scala compiler also warns about using definitions annotated with [[java.lang.Deprecated]]. However it is + * recommended to use the Scala `@deprecated` annotation in Scala code because it allows providing a deprecation message. + * * '''`@deprecated` in the Scala language and its standard library'''
    * * A deprecated element of the Scala language or a definition in the Scala standard library will * be preserved at least for the current major version. * - * This means that an element deprecated in some 2.12.x release will be preserved in - * all 2.12.x releases, but may be removed in 2.13. (A deprecated element - * might be kept longer to ease migration. Developers should not rely on this.) - * - * '''Special deprecation policy for Scala 2.12'''
    - * The Scala team has decided to enact a special deprecation policy for Scala 2.12:
    - * - * As an upgrade from 2.11 to 2.12 also requires upgrading from Java 6 to Java 8, - * deprecated elements will not normally be removed in this release, to ease migration - * and cross-building. + * This means that an element deprecated in some 2.13.x release will be preserved in + * all 2.13.x releases, but may be removed in the future. (A deprecated element + * might be kept longer to ease migration, but developers should not rely on this.) * - * @see The official documentation on [[http://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]]. + * @see The official documentation on [[https://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]]. * @param message the message to print during compilation if the definition is accessed * @param since a string identifying the first version in which the definition was deprecated - * @since 2.3 * @see [[scala.deprecatedInheritance]] * @see [[scala.deprecatedOverriding]] * @see [[scala.deprecatedName]] */ @getter @setter @beanGetter @beanSetter @field -class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") +class deprecated(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index 4dc2e44f4977..f95b3ef100a0 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -42,10 +42,9 @@ import scala.annotation.meta._ * * @param message the message to print during compilation if the class was sub-classed * @param since a string identifying the first version in which inheritance was deprecated - * @since 2.10 * @see [[scala.deprecated]] * @see [[scala.deprecatedOverriding]] * @see [[scala.deprecatedName]] */ @getter @setter @beanGetter @beanSetter -class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +final class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index 33f601890038..1a8341b8d498 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,32 +14,40 @@ package scala import scala.annotation.meta._ - - /** An annotation that designates that the name of a parameter is deprecated. - * - * Using this name in a named argument generates a deprecation warning. - * - * Library authors should state the library's deprecation policy in their documentation to give - * developers guidance on how long a deprecated name will be preserved. - * - * Library authors should prepend the name of their library to the version number to help - * developers distinguish deprecations coming from different libraries: - * - * {{{ - * def inc(x: Int, @deprecatedName('y, "FooLib 12.0") n: Int): Int = x + n - * inc(1, y = 2) - * }}} - * will produce the following warning: - * {{{ - * warning: the parameter name y is deprecated (since FooLib 12.0): use n instead - * inc(1, y = 2) - * ^ - * }}} - * - * @since 2.8.1 - * @see [[scala.deprecated]] - * @see [[scala.deprecatedInheritance]] - * @see [[scala.deprecatedOverriding]] - */ +/** An annotation that designates that the name of a parameter is deprecated. + * + * Using this name in a named argument generates a deprecation warning. + * + * If the `name` is omitted, then using the canonical name is deprecated. + * In that case, lints such as `-Xlint:named-booleans` which encourage + * the use of a name will not warn. + * + * Library authors should state the library's deprecation policy in their documentation to give + * developers guidance on how long a deprecated name will be preserved. + * + * Library authors should prepend the name of their library to the version number to help + * developers distinguish deprecations coming from different libraries: + * + * {{{ + * def inc(x: Int, @deprecatedName("y", "FooLib 12.0") n: Int): Int = x + n + * inc(1, y = 2) + * }}} + * will produce the following warning: + * {{{ + * warning: the parameter name y is deprecated (since FooLib 12.0): use n instead + * inc(1, y = 2) + * ^ + * }}} + * + * @see [[scala.deprecated]] + * @see [[scala.deprecatedInheritance]] + * @see [[scala.deprecatedOverriding]] + */ @param -class deprecatedName(name: Symbol = Symbol(""), since: String = "") extends scala.annotation.StaticAnnotation +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") +class deprecatedName(name: String = "", since: String = "") extends scala.annotation.StaticAnnotation { + // at the time we remove these constructors, we should also change this from a StaticAnnotation to + // a ConstantAnnotation; for now, the presence of auxiliary constructors blocks that change + @deprecated("The parameter name should be a String, not a symbol.", "2.13.0") def this(name: Symbol, since: String) = this(name.name, since) + @deprecated("The parameter name should be a String, not a symbol.", "2.13.0") def this(name: Symbol) = this(name.name, "") +} diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index 30a5e82dfcca..0268bee15a10 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -43,10 +43,10 @@ import scala.annotation.meta._ * * @param message the message to print during compilation if the member was overridden * @param since a string identifying the first version in which overriding was deprecated - * @since 2.10 * @see [[scala.deprecated]] * @see [[scala.deprecatedInheritance]] * @see [[scala.deprecatedName]] */ @getter @setter @beanGetter @beanSetter -class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") +class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala index 6c28c20e7daf..d7d7b55d8d3c 100644 --- a/src/library/scala/inline.scala +++ b/src/library/scala/inline.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,18 @@ package scala /** - * An annotation on methods that requests that the compiler should try especially hard to inline the - * annotated method. The annotation can be used at definition site or at callsite. + * An annotation for methods that the optimizer should inline. + * + * Note that by default, the Scala optimizer is disabled and no callsites are inlined. See + * `-opt:help` and [[https://docs.scala-lang.org/overviews/compiler-options/optimizer.html the overview document]] + * for information on how to enable the optimizer and inliner. + * + * When inlining is enabled, the inliner will always try to inline methods or callsites annotated + * `@inline` (under the condition that inlining from the defining class is allowed). + * If inlining is not possible, for example because the method is not + * final, an optimizer warning will be issued. See `-Wopt:help` for details. + * + * Examples: * * {{{ * @inline final def f1(x: Int) = x @@ -23,7 +33,7 @@ package scala * * def t1 = f1(1) // inlined if possible * def t2 = f2(1) // not inlined - * def t3 = f3(1) // may be inlined (heuristics) + * def t3 = f3(1) // may be inlined (the inliner heuristics can select the callsite) * def t4 = f1(1): @noinline // not inlined (override at callsite) * def t5 = f2(1): @inline // inlined if possible (override at callsite) * def t6 = f3(1): @inline // inlined if possible @@ -37,7 +47,6 @@ package scala * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined * }}} - * - * @author Lex Spoon */ +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class inline extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala index 906f1098d0ce..ca27dac45a6b 100644 --- a/src/library/scala/io/AnsiColor.scala +++ b/src/library/scala/io/AnsiColor.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 23fe9115765d..2369b528f8f7 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,11 +15,10 @@ package scala.io import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader } import Source.DefaultBufSize import scala.collection.{ Iterator, AbstractIterator } +import scala.collection.mutable.StringBuilder /** This object provides convenience methods to create an iterable * representation of a source file. - * - * @author Burak Emir, Paul Phillips */ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val codec: Codec) extends Source { def this(inputStream: InputStream)(implicit codec: Codec) = this(inputStream, DefaultBufSize)(codec) @@ -31,13 +30,13 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod // block of data to be read from the stream, which will then be lost // to getLines if it creates a new reader, even though next() was // never called on the original. - private var charReaderCreated = false - private lazy val charReader = { + private[this] var charReaderCreated = false + private[this] lazy val charReader = { charReaderCreated = true bufferedReader() } - override lazy val iter = ( + override val iter = ( Iterator continually (codec wrap charReader.read()) takeWhile (_ != -1) @@ -66,7 +65,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] { - private val lineReader = decachedReader + private[this] val lineReader = decachedReader var nextLine: String = null override def hasNext = { @@ -87,17 +86,29 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod override def getLines(): Iterator[String] = new BufferedLineIterator - /** Efficiently converts the entire remaining input into a string. */ - override def mkString = { - // Speed up slurping of whole data set in the simplest cases. - val allReader = decachedReader - val sb = new StringBuilder - val buf = new Array[Char](bufferSize) - var n = 0 - while (n != -1) { - n = allReader.read(buf) - if (n>0) sb.appendAll(buf, 0, n) - } - sb.result - } + /** Efficiently appends the entire remaining input. + * + * Note: This function may temporarily load the entire buffer into + * memory. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + if (sep.isEmpty) { + val allReader = decachedReader + val buf = new Array[Char](bufferSize) + val jsb = sb.underlying + + if (start.length != 0) jsb.append(start) + var n = allReader.read(buf) + while (n != -1) { + jsb.append(buf, 0, n) + n = allReader.read(buf) + } + if (end.length != 0) jsb.append(end) + sb + // This case is expected to be uncommon, so we're reusing code at + // the cost of temporary memory allocations. + // mkString will callback into BufferedSource.addString to read + // the Buffer into a String, and then we use StringOps.addString + // for the interspersing of sep. + } else mkString.addString(sb, start, sep, end) } diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index 3b7b4e9e1907..a6eeab50b299 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,8 @@ package scala package io -import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action } +import java.nio.charset.{CharacterCodingException, Charset, CharsetDecoder, CharsetEncoder, CodingErrorAction => Action} +import java.nio.charset.StandardCharsets.{ISO_8859_1, UTF_8} import scala.annotation.migration import scala.language.implicitConversions @@ -81,17 +82,17 @@ trait LowPriorityCodecImplicits { } object Codec extends LowPriorityCodecImplicits { - final val ISO8859: Codec = new Codec(Charset forName "ISO-8859-1") - final val UTF8: Codec = new Codec(Charset forName "UTF-8") + final val ISO8859: Codec = Codec(ISO_8859_1) + final val UTF8: Codec = Codec(UTF_8) /** Optimistically these two possible defaults will be the same thing. * In practice this is not necessarily true, and in fact Sun classifies * the fact that you can influence anything at all via -Dfile.encoding * as an accident, with any anomalies considered "not a bug". */ - def defaultCharsetCodec = apply(Charset.defaultCharset) - def fileEncodingCodec = apply(scala.util.Properties.encodingString) - def default = defaultCharsetCodec + def defaultCharsetCodec: Codec = apply(Charset.defaultCharset) + def fileEncodingCodec: Codec = apply(scala.util.Properties.encodingString) + def default: Codec = defaultCharsetCodec def apply(encoding: String): Codec = new Codec(Charset forName encoding) def apply(charSet: Charset): Codec = new Codec(charSet) diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala index 6e9a2ce12ae6..5a0a4aecdc31 100644 --- a/src/library/scala/io/Position.scala +++ b/src/library/scala/io/Position.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,6 +13,8 @@ package scala package io +import annotation.nowarn + /** The object Position provides convenience methods to encode * line and column number in one single integer. The encoded line * (column) numbers range from 0 to `LINE_MASK` (`COLUMN_MASK`), @@ -35,7 +37,6 @@ package io * {{{ * encode(line1,column1) <= encode(line2,column2) * }}} - * @author Burak Emir (translated from work by Matthias Zenger and others) */ @deprecated("this class will be removed", "2.10.0") private[scala] abstract class Position { @@ -72,13 +73,12 @@ private[scala] abstract class Position { def toString(pos: Int): String = line(pos) + ":" + column(pos) } +@nowarn private[scala] object Position extends Position { - def checkInput(line: Int, column: Int) { + def checkInput(line: Int, column: Int): Unit = { if (line < 0) - throw new IllegalArgumentException(line + " < 0") - if ((line == 0) && (column != 0)) - throw new IllegalArgumentException(line + "," + column + " not allowed") - if (column < 0) - throw new IllegalArgumentException(line + "," + column + " not allowed") + throw new IllegalArgumentException(s"$line < 0") + if (line == 0 && column != 0 || column < 0) + throw new IllegalArgumentException(s"$line,$column not allowed") } } diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index 1cbfb1182dce..360c9fe0cf6d 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,14 @@ package scala package io -import scala.collection.AbstractIterator -import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable } -import java.net.{ URI, URL } +import scala.collection.{AbstractIterator, BufferedIterator} +import java.io.{Closeable, FileInputStream, FileNotFoundException, InputStream, PrintStream, File => JFile} +import java.net.{URI, URL} + +import scala.annotation.nowarn /** This object provides convenience methods to create an iterable * representation of a source file. - * - * @author Burak Emir, Paul Phillips */ object Source { val DefaultBufSize = 2048 @@ -98,7 +98,7 @@ object Source { bufferSize, () => fromFile(file, bufferSize)(codec), () => inputStream.close() - )(codec) withDescription ("file:" + file.getAbsolutePath) + )(codec) withDescription s"file:${file.getAbsolutePath}" } /** Create a `Source` from array of bytes, decoding @@ -115,8 +115,9 @@ object Source { /** Create a `Source` from array of bytes, assuming * one byte per character (ISO-8859-1 encoding.) */ + @deprecated("Use `fromBytes` and specify an encoding", since="2.13.9") def fromRawBytes(bytes: Array[Byte]): Source = - fromString(new String(bytes, Codec.ISO8859.name)) + fromString(new String(bytes, Codec.ISO8859.charSet)) /** creates `Source` from file with given file: URI */ @@ -131,7 +132,7 @@ object Source { /** same as fromURL(new URL(s)) */ def fromURL(s: String)(implicit codec: Codec): BufferedSource = - fromURL(new URL(s))(codec) + fromURL(new URI(s).toURL)(codec) /** same as fromInputStream(url.openStream())(Codec(enc)) */ @@ -178,7 +179,10 @@ object Source { * @return the buffered source */ def fromResource(resource: String, classLoader: ClassLoader = Thread.currentThread().getContextClassLoader())(implicit codec: Codec): BufferedSource = - fromInputStream(classLoader.getResourceAsStream(resource)) + Option(classLoader.getResourceAsStream(resource)) match { + case Some(in) => fromInputStream(in) + case None => throw new FileNotFoundException(s"resource '$resource' was not found in the classpath from the given classloader.") + } } @@ -215,8 +219,8 @@ abstract class Source extends Iterator[Char] with Closeable { private[this] val sb = new StringBuilder lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered - def isNewline(ch: Char) = ch == '\r' || ch == '\n' - def getc() = iter.hasNext && { + def isNewline(ch: Char): Boolean = ch == '\r' || ch == '\n' + def getc(): Boolean = iter.hasNext && { val ch = iter.next() if (ch == '\n') false else if (ch == '\r') { @@ -230,8 +234,8 @@ abstract class Source extends Iterator[Char] with Closeable { true } } - def hasNext = iter.hasNext - def next = { + def hasNext: Boolean = iter.hasNext + def next(): String = { sb.clear() while (getc()) { } sb.toString @@ -246,12 +250,13 @@ abstract class Source extends Iterator[Char] with Closeable { /** Returns `'''true'''` if this source has more characters. */ - def hasNext = iter.hasNext + def hasNext: Boolean = iter.hasNext /** Returns next character. */ def next(): Char = positioner.next() + @nowarn("cat=deprecation") class Positioner(encoder: Position) { def this() = this(RelaxedPosition) /** the last character returned by next. */ @@ -285,6 +290,7 @@ abstract class Source extends Iterator[Char] with Closeable { /** A Position implementation which ignores errors in * the positions. */ + @nowarn("cat=deprecation") object RelaxedPosition extends Position { def checkInput(line: Int, column: Int): Unit = () } @@ -292,8 +298,8 @@ abstract class Source extends Iterator[Char] with Closeable { object NoPositioner extends Positioner(Position) { override def next(): Char = iter.next() } - def ch = positioner.ch - def pos = positioner.pos + def ch: Char = positioner.ch + def pos: Int = positioner.pos /** Reports an error message to the output stream `out`. * @@ -304,7 +310,7 @@ abstract class Source extends Iterator[Char] with Closeable { def reportError( pos: Int, msg: String, - out: PrintStream = Console.err) + out: PrintStream = Console.err): Unit = { nerrors += 1 report(pos, msg, out) @@ -316,7 +322,7 @@ abstract class Source extends Iterator[Char] with Closeable { * @param msg the error message to report * @param out PrintStream to use */ - def report(pos: Int, msg: String, out: PrintStream) { + def report(pos: Int, msg: String, out: PrintStream): Unit = { val line = Position line pos val col = Position column pos @@ -331,7 +337,7 @@ abstract class Source extends Iterator[Char] with Closeable { def reportWarning( pos: Int, msg: String, - out: PrintStream = Console.out) + out: PrintStream = Console.out): Unit = { nwarnings += 1 report(pos, "warning! " + msg, out) @@ -364,7 +370,7 @@ abstract class Source extends Iterator[Char] with Closeable { } /** The close() method closes the underlying resource. */ - def close() { + def close(): Unit = { if (closeFunction != null) closeFunction() } diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala index 6324da5c2ede..a39f99b4d689 100644 --- a/src/library/scala/io/StdIn.scala +++ b/src/library/scala/io/StdIn.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/jdk/Accumulator.scala b/src/library/scala/jdk/Accumulator.scala new file mode 100644 index 000000000000..0398a204b9a0 --- /dev/null +++ b/src/library/scala/jdk/Accumulator.scala @@ -0,0 +1,404 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{Stepper, StepperShape, mutable} +import scala.language.implicitConversions + +/** Accumulators are mutable sequences with two distinct features: + * - An accumulator can be appended efficiently to another + * - There are manually specialized Accumulators for `Int`, `Long` and `Double` that don't box + * the elements + * + * These two features make Accumulators a good candidate to collect the results of a parallel Java + * stream pipeline into a Scala collection. The + * [[scala.collection.convert.StreamExtensions.StreamHasToScala.toScala]] extension method on Java + * streams (available by importing + * [[scala.jdk.StreamConverters `scala.jdk.StreamConverters._`]]) is specialized for + * Accumulators: they are built in parallel, the parts are merged efficiently. + * + * Building specialized Accumulators is handled transparently. As a user, using the + * [[Accumulator]] object as a factory automatically creates an [[IntAccumulator]], + * [[LongAccumulator]], [[DoubleAccumulator]] or [[AnyAccumulator]] depending on the element type. + * + * Note: to run the example, start the Scala REPL with `scala -Yrepl-class-based` to avoid + * deadlocks, see [[https://github.com/scala/bug/issues/9076]]. + * + * {{{ + * scala> import scala.jdk.StreamConverters._ + * import scala.jdk.StreamConverters._ + * + * scala> def isPrime(n: Int): Boolean = !(2 +: (3 to Math.sqrt(n).toInt by 2) exists (n % _ == 0)) + * isPrime: (n: Int)Boolean + * + * scala> val intAcc = (1 to 10000).asJavaParStream.filter(isPrime).toScala(scala.jdk.Accumulator) + * intAcc: scala.jdk.IntAccumulator = IntAccumulator(1, 3, 5, 7, 11, 13, 17, 19, ... + * + * scala> val stringAcc = (1 to 100).asJavaParStream.mapToObj("<>" * _).toScala(Accumulator) + * stringAcc: scala.jdk.AnyAccumulator[String] = AnyAccumulator(<>, <><>, <><><>, ... + * }}} + * + * There are two possibilities to process elements of a primitive Accumulator without boxing: + * specialized operations of the Accumulator, or the Stepper interface. The most common collection + * operations are overloaded or overridden in the primitive Accumulator classes, for example + * [[IntAccumulator.map(f:Int=>Int)* IntAccumulator.map]] or [[IntAccumulator.exists]]. + * Thanks to Scala's function specialization, + * `intAcc.exists(x => testOn(x))` does not incur boxing. + * + * The [[scala.collection.Stepper]] interface provides iterator-like `hasStep` and `nextStep` methods, and is + * specialized for `Int`, `Long` and `Double`. The `intAccumulator.stepper` method creates an + * [[scala.collection.IntStepper]] that yields the elements of the accumulator without boxing. + * + * Accumulators can hold more than `Int.MaxValue` elements. They have a [[sizeLong]] method that + * returns the size as a `Long`. Note that certain operations defined in [[scala.collection.Seq]] + * are implemented using [[length]], so they will not work correctly for large accumulators. + * + * The [[Accumulator]] class is a base class to share code between [[AnyAccumulator]] (for + * reference types) and the manual specializations [[IntAccumulator]], [[LongAccumulator]] and + * [[DoubleAccumulator]]. + */ +abstract class Accumulator[@specialized(Double, Int, Long) A, +CC[X] <: mutable.Seq[X], +C <: mutable.Seq[A]] + extends mutable.Seq[A] + with mutable.Builder[A, C] { + + /** + * Implementation Details + * + * Every subclass has two arrays + * - `current: Array[A]` + * - `history: Array[Array[A]]` + * + * Elements are added to `current` at [[index]] until it's full, then `current` is added to `history` at [[hIndex]]. + * [[nextBlockSize]] defines the size of the next `current`. See also [[cumulative]]. + */ + private[jdk] var index: Int = 0 + private[jdk] var hIndex: Int = 0 + private[jdk] var totalSize: Long = 0L + + /** + * The total number of elements stored in the history up to `history(i)` (where `0 <= i < hIndex`). + * This method is constant-time, the cumulative lengths are stored. + * - [[AnyAccumulator]] keeps a separate array to store the cumulative lengths. + * - [[LongAccumulator]] and [[DoubleAccumulator]] store the cumulative length at the last slot in every + * array in the history. Every array is allocated with 1 extra slot for this purpose. [[DoubleAccumulator]] + * converts the length to double for storing and back to long, which is correct for lengths that fit in the + * double's 52 fraction bits (so any collection that fits in memory). + * - [[IntAccumulator]] uses the last two slots in every array to store the cumulative length, every array is + * allocated with 1 extra slot. So `history(0)` has 17 slots of which the first 15 store elements. + */ + private[jdk] def cumulative(i: Int): Long + + private[jdk] def nextBlockSize: Int = { + if (totalSize < 32) 16 + else if (totalSize <= Int.MaxValue) { + val bit = 64 - jl.Long.numberOfLeadingZeros(totalSize) + 1 << (bit - (bit >> 2)) + } + else 1 << 24 + } + + protected def efficientStepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + final override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = + efficientStepper(shape) + + final override def length: Int = + if (sizeLong < Int.MaxValue) sizeLong.toInt + else throw new IllegalArgumentException(s"Size too large for an Int: $sizeLong") + + final override def knownSize: Int = if (sizeLong < Int.MaxValue) size else -1 + + /** Size of the accumulated collection, as a `Long` */ + final def sizeLong: Long = totalSize + + /** Remove all accumulated elements from this accumulator. */ + def clear(): Unit = { + index = 0 + hIndex = 0 + totalSize = 0L + } + + private[jdk] def seekSlot(ix: Long): Long = { + var lo = -1 + var hi = hIndex + while (lo + 1 < hi) { + val m = (lo + hi) >>> 1 // Shift allows division-as-unsigned, prevents overflow + if (cumulative(m) > ix) hi = m + else lo = m + } + (hi.toLong << 32) | (if (hi==0) ix else ix - cumulative(hi-1)).toInt + } +} + +/** Contains factory methods to build Accumulators. + * + * Note that the `Accumulator` object itself is not a factory, but it is implicitly convert to + * a factory according to the element type, see [[Accumulator.toFactory]]. + * + * This allows passing the `Accumulator` object as argument when a [[collection.Factory]], and + * the implicit [[Accumulator.AccumulatorFactoryShape]] instance is used to build a specialized + * Accumulator according to the element type: + * + * {{{ + * scala> val intAcc = Accumulator(1,2,3) + * intAcc: scala.collection.convert.IntAccumulator = IntAccumulator(1, 2, 3) + * + * scala> val anyAccc = Accumulator("K") + * anyAccc: scala.collection.convert.AnyAccumulator[String] = AnyAccumulator(K) + * + * scala> val intAcc2 = List(1,2,3).to(Accumulator) + * intAcc2: scala.jdk.IntAccumulator = IntAccumulator(1, 2, 3) + * + * scala> val anyAcc2 = List("K").to(Accumulator) + * anyAcc2: scala.jdk.AnyAccumulator[String] = AnyAccumulator(K) + * }}} + * + * @define coll Accumulator + * @define Coll `Accumulator` + */ +object Accumulator { + implicit def toFactory[A, C](sa: Accumulator.type)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): collection.Factory[A, C] = canAccumulate.factory + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the ${coll}’s elements + * @tparam C the (inferred) specific type of the $coll + * @return a new $coll with the elements of `source` + */ + def from[A, C](source: IterableOnce[A])(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + source.iterator.to(canAccumulate.factory) + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A, C](implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + canAccumulate.empty + + /** Creates an $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @tparam C the (inferred) specific type of the $coll + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A, C](elems: A*)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + canAccumulate.factory.fromSpecific(elems) + + /** Produces an $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return an $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A, C](start: A, len: Int)(f: A => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(new collection.View.Iterate(start, len)(f)) + + /** Produces an $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @tparam C Type (usually inferred) of the $coll + * @return an $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S, C](init: S)(f: S => Option[(A, S)])(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(new collection.View.Unfold(init)(f)) + + /** Produces an $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return an $coll with values `start, start + 1, ..., end - 1` + */ + def range[A: Integral, C](start: A, end: A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(collection.immutable.NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces an $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return an $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A: Integral, C](start: A, end: A, step: A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(collection.immutable.NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + * @tparam C the specific type of the $coll + */ + def newBuilder[A, C](implicit canAccumulate: AccumulatorFactoryShape[A, C]): collection.mutable.Builder[A, C] = + canAccumulate.factory.newBuilder + + /** Produces an $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return An $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A, C](n: Int)(elem: => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(new collection.View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return An $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A, C](n1: Int, n2: Int)(elem: => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[C] = + fill(n1)(fill(n2)(elem)(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[C]) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return An $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A, C](n1: Int, n2: Int, n3: Int)(elem: => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[C]] = + fill(n1)(fill(n2, n3)(elem)(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[C]]) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return An $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A, C](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]] = + fill(n1)(fill(n2, n3, n4)(elem)(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[AnyAccumulator[C]]]) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return An $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A, C](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]]] = + fill(n1)(fill(n2, n3, n4, n5)(elem)(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]]]) + + /** Produces an $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return An $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A, C](n: Int)(f: Int => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + from(new collection.View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return An $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A, C](n1: Int, n2: Int)(f: (Int, Int) => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[C] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[C]) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return An $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A, C](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[C]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[C]]) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return An $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A, C](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[AnyAccumulator[C]]]) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return An $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A, C](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): AnyAccumulator[AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))(canAccumulate))(AccumulatorFactoryShape.anyAccumulatorFactoryShape[AnyAccumulator[AnyAccumulator[AnyAccumulator[C]]]]) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A, C](xss: Iterable[A]*)(implicit canAccumulate: AccumulatorFactoryShape[A, C]): C = + if (xss.isEmpty) canAccumulate.empty + else { + val b = canAccumulate.factory.newBuilder + xss.foreach(b ++= _) + b.result() + } + + /** An implicit `AccumulatorFactoryShape` is used in Accumulator factory method to return + * specialized variants according to the element type. + */ + sealed trait AccumulatorFactoryShape[A, C] { + def factory: collection.Factory[A, C] + def empty: C + } + + object AccumulatorFactoryShape extends LowPriorityAccumulatorFactoryShape { + implicit val doubleAccumulatorFactoryShape: AccumulatorFactoryShape[Double, DoubleAccumulator] = new AccumulatorFactoryShape[Double, DoubleAccumulator] { + def factory: collection.Factory[Double, DoubleAccumulator] = DoubleAccumulator + def empty: DoubleAccumulator = DoubleAccumulator.empty + } + + implicit val intAccumulatorFactoryShape: AccumulatorFactoryShape[Int, IntAccumulator] = new AccumulatorFactoryShape[Int, IntAccumulator] { + def factory: collection.Factory[Int, IntAccumulator] = IntAccumulator + def empty: IntAccumulator = IntAccumulator.empty + } + + implicit val longAccumulatorFactoryShape: AccumulatorFactoryShape[Long, LongAccumulator] = new AccumulatorFactoryShape[Long, LongAccumulator] { + def factory: collection.Factory[Long, LongAccumulator] = LongAccumulator + def empty: LongAccumulator = LongAccumulator.empty + } + + implicit val jDoubleAccumulatorFactoryShape: AccumulatorFactoryShape[jl.Double, DoubleAccumulator] = doubleAccumulatorFactoryShape.asInstanceOf[AccumulatorFactoryShape[jl.Double, DoubleAccumulator]] + implicit val jIntegerAccumulatorFactoryShape: AccumulatorFactoryShape[jl.Integer, IntAccumulator] = intAccumulatorFactoryShape.asInstanceOf[AccumulatorFactoryShape[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryShape: AccumulatorFactoryShape[jl.Long, LongAccumulator] = longAccumulatorFactoryShape.asInstanceOf[AccumulatorFactoryShape[jl.Long, LongAccumulator]] + } + + sealed trait LowPriorityAccumulatorFactoryShape { + implicit def anyAccumulatorFactoryShape[A]: AccumulatorFactoryShape[A, AnyAccumulator[A]] = anyAccumulatorFactoryShapePrototype.asInstanceOf[AccumulatorFactoryShape[A, AnyAccumulator[A]]] + + private val anyAccumulatorFactoryShapePrototype = new AccumulatorFactoryShape[AnyRef, AnyAccumulator[AnyRef]] { + def factory: collection.Factory[AnyRef, AnyAccumulator[AnyRef]] = collection.IterableFactory.toFactory(AnyAccumulator) + def empty: AnyAccumulator[AnyRef] = AnyAccumulator.empty[AnyRef] + } + } +} diff --git a/src/library/scala/jdk/AnyAccumulator.scala b/src/library/scala/jdk/AnyAccumulator.scala new file mode 100644 index 000000000000..fa952105fcca --- /dev/null +++ b/src/library/scala/jdk/AnyAccumulator.scala @@ -0,0 +1,380 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.util.Spliterator +import java.util.function.Consumer + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AnyStepper, Factory, IterableFactoryDefaults, SeqFactory, Stepper, StepperShape, mutable} +import scala.reflect.ClassTag + +/** An Accumulator for arbitrary element types, see [[Accumulator]]. */ +final class AnyAccumulator[A] + extends Accumulator[A, AnyAccumulator, AnyAccumulator[A]] + with mutable.SeqOps[A, AnyAccumulator, AnyAccumulator[A]] + with IterableFactoryDefaults[A, AnyAccumulator] + with Serializable { + private[jdk] var current: Array[AnyRef] = AnyAccumulator.emptyAnyRefArray + private[jdk] var history: Array[Array[AnyRef]] = AnyAccumulator.emptyAnyRefArrayArray + private[jdk] var cumul: Array[Long] = AnyAccumulator.emptyLongArray + + private[jdk] def cumulative(i: Int): Long = cumul(i) + + override protected[this] def className: String = "AnyAccumulator" + + def efficientStepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = + shape.parUnbox(new AnyAccumulatorStepper[A](this.asInstanceOf[AnyAccumulator[A]])) + + private def expand(): Unit = { + if (index > 0) { + if (hIndex >= history.length) hExpand() + history(hIndex) = current + cumul(hIndex) = (if (hIndex > 0) cumulative(hIndex-1) else 0) + index + hIndex += 1 + } + current = new Array[AnyRef](nextBlockSize) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) { + history = new Array[Array[AnyRef]](4) + cumul = new Array[Long](4) + } + else { + history = java.util.Arrays.copyOf(history, history.length << 1) + cumul = java.util.Arrays.copyOf(cumul, cumul.length << 1) + } + } + + /** Appends an element to this `AnyAccumulator`. */ + def addOne(a: A): this.type = { + totalSize += 1 + if (index >= current.length) expand() + current(index) = a.asInstanceOf[AnyRef] + index += 1 + this + } + + /** Result collection consisting of all elements appended so far. */ + override def result(): AnyAccumulator[A] = this + + /** Removes all elements from `that` and appends them to this `AnyAccumulator`. */ + def drain[A1 <: A](that: AnyAccumulator[A1]): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val n = (that.cumulative(h) - prev).toInt + if (current.length - index >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = that.cumulative(h) + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index >= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + cumul = java.util.Arrays.copyOf(cumul, n) + } + var pv = if (hIndex > 0) cumulative(hIndex-1) else 0L + if (index > 0) { + pv += index + cumul(hIndex) = pv + history(hIndex) = if (index < (current.length >>> 3) && current.length > 32) java.util.Arrays.copyOf(current, index) else current + hIndex += 1 + } + while (h < that.hIndex) { + pv += that.cumulative(h) - prev + prev = that.cumulative(h) + cumul(hIndex) = pv + history(hIndex) = that.history(h) + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear() + } + + override def clear(): Unit = { + super.clear() + current = AnyAccumulator.emptyAnyRefArray + history = AnyAccumulator.emptyAnyRefArrayArray + cumul = AnyAccumulator.emptyLongArray + } + + /** Retrieves the `ix`th element. */ + def apply(ix: Long): A = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt).asInstanceOf[A] + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt).asInstanceOf[A] + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + def apply(i: Int): A = apply(i.toLong) + + def update(idx: Long, elem: A): Unit = { + if (totalSize - idx <= index || hIndex == 0) current((idx - (totalSize - index)).toInt) = elem.asInstanceOf[AnyRef] + else { + val w = seekSlot(idx) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) = elem.asInstanceOf[AnyRef] + } + } + + def update(idx: Int, elem: A): Unit = update(idx.toLong, elem) + + /** Returns an `Iterator` over the contents of this `AnyAccumulator`. */ + def iterator: Iterator[A] = stepper.iterator + + def countLong(p: A => Boolean): Long = { + var r = 0L + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + /** Copy the elements in this `AnyAccumulator` into an `Array` */ + override def toArray[B >: A : ClassTag]: Array[B] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[B](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val n = cumulative(h) - pv + pv = cumulative(h) + var i = 0 + while (i < n) { + a(j) = x(i).asInstanceOf[B] + i += 1 + j += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + a(j) = current(i).asInstanceOf[B] + i += 1 + j += 1 + } + a + } + + /** Copies the elements in this `AnyAccumulator` to a `List` */ + override def toList: List[A] = { + var ans: List[A] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i).asInstanceOf[A] :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i).asInstanceOf[A] :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** + * Copy the elements in this `AnyAccumulator` to a specified collection. Example use: + * `acc.to(Vector)`. + */ + override def to[C1](factory: Factory[A, C1]): C1 = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + factory.fromSpecific(iterator) + } + + override def iterableFactory: SeqFactory[AnyAccumulator] = AnyAccumulator + + private def writeReplace(): AnyRef = new AnyAccumulator.SerializationProxy(this) +} + +object AnyAccumulator extends collection.SeqFactory[AnyAccumulator] { + private val emptyAnyRefArray = new Array[AnyRef](0) + private val emptyAnyRefArrayArray = new Array[Array[AnyRef]](0) + private val emptyLongArray = new Array[Long](0) + + import java.util.{function => jf} + + /** A `Supplier` of `AnyAccumulator`s, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def supplier[A]: jf.Supplier[AnyAccumulator[A]] = () => new AnyAccumulator[A] + + /** A `BiConsumer` that adds an element to an `AnyAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def adder[A]: jf.BiConsumer[AnyAccumulator[A], A] = (ac: AnyAccumulator[A], a: A) => ac addOne a + + /** A `BiConsumer` that adds an `Int` to an `AnyAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def unboxedIntAdder: jf.ObjIntConsumer[AnyAccumulator[Int]] = (ac: AnyAccumulator[Int], a: Int) => ac addOne a + + /** A `BiConsumer` that adds a `Long` to an `AnyAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def unboxedLongAdder: jf.ObjLongConsumer[AnyAccumulator[Long]] = (ac: AnyAccumulator[Long], a: Long) => ac addOne a + + /** A `BiConsumer` that adds a `Double` to an `AnyAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def unboxedDoubleAdder: jf.ObjDoubleConsumer[AnyAccumulator[Double]] = (ac: AnyAccumulator[Double], a: Double) => ac addOne a + + /** A `BiConsumer` that merges `AnyAccumulator`s, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def merger[A]: jf.BiConsumer[AnyAccumulator[A], AnyAccumulator[A]] = (a1: AnyAccumulator[A], a2: AnyAccumulator[A]) => a1 drain a2 + + def from[A](source: IterableOnce[A]): AnyAccumulator[A] = source match { + case acc: AnyAccumulator[A] => acc + case _ => new AnyAccumulator[A].addAll(source) + } + + def empty[A]: AnyAccumulator[A] = new AnyAccumulator[A] + + def newBuilder[A]: mutable.Builder[A, AnyAccumulator[A]] = new AnyAccumulator[A] + + class SerializationProxy[A](@transient private val acc: AnyAccumulator[A]) extends Serializable { + @transient private var result: AnyAccumulator[AnyRef] = _ + + private def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val size = acc.sizeLong + out.writeLong(size) + val st = acc.stepper + while (st.hasStep) + out.writeObject(st.nextStep()) + } + + private def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val res = new AnyAccumulator[AnyRef]() + var elems = in.readLong() + while (elems > 0) { + res += in.readObject() + elems -= 1L + } + result = res + } + + private def readResolve(): AnyRef = result + } +} + +private[jdk] class AnyAccumulatorStepper[A](private[this] val acc: AnyAccumulator[A]) extends AnyStepper[A] with EfficientSplit { + import java.util.Spliterator._ + + private var h: Int = 0 + private var i: Int = 0 + private var a: Array[AnyRef] = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n: Long = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N: Long = acc.totalSize + + private def duplicateSelf(limit: Long): AnyAccumulatorStepper[A] = { + val ans = new AnyAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics: Int = ORDERED | SIZED | SUBSIZED + + def estimateSize: Long = N + + def hasStep: Boolean = N > 0 + + def nextStep(): A = + if (N <= 0) throw new NoSuchElementException("Next in empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i).asInstanceOf[A] + i += 1 + N -= 1 + ans + } + + def trySplit(): AnyStepper[A] = + if (N <= 1) null + else { + val half = N >> 1 + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } + + override def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator[B](this) { + // Overridden for efficiency + override def tryAdvance(c: Consumer[_ >: B]): Boolean = { + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i).asInstanceOf[B]) + i += 1 + N -= 1 + true + } + } + + // Overridden for efficiency + override def forEachRemaining(f: java.util.function.Consumer[_ >: B]): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f.accept(a(i).asInstanceOf[B]) + i += 1 + } + N -= (n - i0) + } + } + } +} diff --git a/src/library/scala/jdk/CollectionConverters.scala b/src/library/scala/jdk/CollectionConverters.scala new file mode 100644 index 000000000000..9cbe1c5fea43 --- /dev/null +++ b/src/library/scala/jdk/CollectionConverters.scala @@ -0,0 +1,95 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import scala.collection.convert.{AsJavaExtensions, AsScalaExtensions} + +/** This object provides extension methods that convert between Scala and Java collections. + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.CollectionConverters]] instead. + * + * Note: to create [[java.util.stream.Stream Java Streams]] that operate on Scala collections + * (sequentially or in parallel), use [[StreamConverters]]. + * + * {{{ + * import scala.jdk.CollectionConverters._ + * val s: java.util.Set[String] = Set("one", "two").asJava + * }}} + * + * The conversions return adapters for the corresponding API, i.e., the collections are wrapped, + * not copied. Changes to the original collection are reflected in the view, and vice versa: + * + * {{{ + * scala> import scala.jdk.CollectionConverters._ + * + * scala> val s = collection.mutable.Set("one") + * s: scala.collection.mutable.Set[String] = HashSet(one) + * + * scala> val js = s.asJava + * js: java.util.Set[String] = [one] + * + * scala> js.add("two") + * + * scala> s + * res2: scala.collection.mutable.Set[String] = HashSet(two, one) + * }}} + * + * The following conversions are supported via `asScala` and `asJava`: + * + * {{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + * }}} + * + * The following conversions are supported via `asScala` and through + * specially-named extension methods to convert to Java collections, as shown: + * + * {{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + * }}} + * + * In addition, the following one-way conversions are provided via `asJava`: + * + * {{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + * }}} + * + * The following one way conversion is provided via `asScala`: + * + * {{{ + * java.util.Properties => scala.collection.mutable.Map + * }}} + * + * In all cases, converting from a source type to a target type and back + * again will return the original source object. For example: + * + * {{{ + * import scala.jdk.CollectionConverters._ + * + * val source = new scala.collection.mutable.ListBuffer[Int] + * val target: java.util.List[Int] = source.asJava + * val other: scala.collection.mutable.Buffer[Int] = target.asScala + * assert(source eq other) + * }}} + */ +object CollectionConverters extends AsJavaExtensions with AsScalaExtensions diff --git a/src/library/scala/jdk/DoubleAccumulator.scala b/src/library/scala/jdk/DoubleAccumulator.scala new file mode 100644 index 000000000000..dfdb2feba9ea --- /dev/null +++ b/src/library/scala/jdk/DoubleAccumulator.scala @@ -0,0 +1,488 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.util.Spliterator +import java.util.function.{Consumer, DoubleConsumer} +import java.{lang => jl} + +import scala.annotation._ +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AnyStepper, DoubleStepper, Factory, SeqFactory, Stepper, StepperShape, mutable} +import scala.language.implicitConversions + +/** A specialized Accumulator that holds `Double`s without boxing, see [[Accumulator]]. */ +final class DoubleAccumulator + extends Accumulator[Double, AnyAccumulator, DoubleAccumulator] + with mutable.SeqOps[Double, AnyAccumulator, DoubleAccumulator] + with Serializable { + private[jdk] var current: Array[Double] = DoubleAccumulator.emptyDoubleArray + private[jdk] var history: Array[Array[Double]] = DoubleAccumulator.emptyDoubleArrayArray + + private[jdk] def cumulative(i: Int) = { val x = history(i); x(x.length-1).toLong } + + override protected[this] def className: String = "DoubleAccumulator" + + def efficientStepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = { + val st = new DoubleAccumulatorStepper(this) + val r = + if (shape.shape == StepperShape.DoubleShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParDoubleStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private def expand(): Unit = { + if (index > 0) { + current(current.length-1) = (if (hIndex > 0) { val x = history(hIndex-1); x(x.length-1) } else 0) + index + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Double](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Double]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `DoubleAccumulator`. */ + def addOne(a: Double): this.type = { + totalSize += 1 + if (index+1 >= current.length) expand() + current(index) = a + index += 1 + this + } + + /** Result collection consisting of all elements appended so far. */ + override def result(): DoubleAccumulator = this + + /** Removes all elements from `that` and appends them to this `DoubleAccumulator`. */ + def drain(that: DoubleAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 1 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 1>= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - jl.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = if (hIndex > 0) cumulative(hIndex-1) else 0L + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 1) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 1) = pv.toDouble // see comment on Accumulator.cumulative + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 1) = pv.toDouble // see comment on Accumulator.cumulative + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear() + } + + override def clear(): Unit = { + super.clear() + current = DoubleAccumulator.emptyDoubleArray + history = DoubleAccumulator.emptyDoubleArrayArray + } + + /** Retrieves the `ix`th element. */ + def apply(ix: Long): Double = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + def apply(i: Int): Double = apply(i.toLong) + + def update(idx: Long, elem: Double): Unit = { + if (totalSize - idx <= index || hIndex == 0) current((idx - (totalSize - index)).toInt) = elem + else { + val w = seekSlot(idx) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) = elem + } + } + + def update(idx: Int, elem: Double): Unit = update(idx.toLong, elem) + + /** Returns an `Iterator` over the contents of this `DoubleAccumulator`. The `Iterator` is not specialized. */ + def iterator: Iterator[Double] = stepper.iterator + + override def foreach[U](f: Double => U): Unit = { + val s = stepper + while (s.hasStep) f(s.nextStep()) + } + + def map(f: Double => Double): DoubleAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addOne(f(s.nextStep())) + b.result() + } + + def flatMap(f: Double => IterableOnce[Double]): DoubleAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addAll(f(s.nextStep())) + b.result() + } + + def collect(pf: PartialFunction[Double, Double]): DoubleAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + pf.runWith(b.addOne)(n) + } + b.result() + } + + private def filterAccImpl(pred: Double => Boolean, not: Boolean): DoubleAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + if (pred(n) != not) b.addOne(n) + } + b.result() + } + + override def filter(pred: Double => Boolean): DoubleAccumulator = filterAccImpl(pred, not = false) + + override def filterNot(pred: Double => Boolean): DoubleAccumulator = filterAccImpl(pred, not = true) + + override def forall(p: Double => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (!p(s.nextStep())) return false + true + } + + override def exists(p: Double => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) return true + false + } + + override def count(p: Double => Boolean): Int = { + var r = 0 + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + def countLong(p: Double => Boolean): Long = { + var r = 0L + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + /** Copies the elements in this `DoubleAccumulator` into an `Array[Double]` */ + @nowarn // cat=lint-overload see toArray[B: ClassTag] + def toArray: Array[Double] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Double](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = x(x.length-1).toLong + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `DoubleAccumulator` to a `List` */ + override def toList: List[Double] = { + var ans: List[Double] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** + * Copy the elements in this `DoubleAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to(Vector)` + */ + override def to[C1](factory: Factory[Double, C1]): C1 = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + factory.fromSpecific(iterator) + } + + override protected def fromSpecific(coll: IterableOnce[Double]): DoubleAccumulator = DoubleAccumulator.fromSpecific(coll) + override protected def newSpecificBuilder: DoubleAccumulator = DoubleAccumulator.newBuilder + override def iterableFactory: SeqFactory[AnyAccumulator] = AnyAccumulator + + override def empty: DoubleAccumulator = DoubleAccumulator.empty + + private def writeReplace(): AnyRef = new DoubleAccumulator.SerializationProxy(this) +} + +object DoubleAccumulator extends collection.SpecificIterableFactory[Double, DoubleAccumulator] { + private val emptyDoubleArray = new Array[Double](0) + private val emptyDoubleArrayArray = new Array[Array[Double]](0) + + implicit def toJavaDoubleAccumulator(ia: DoubleAccumulator.type): collection.SpecificIterableFactory[jl.Double, DoubleAccumulator] = DoubleAccumulator.asInstanceOf[collection.SpecificIterableFactory[jl.Double, DoubleAccumulator]] + + import java.util.{function => jf} + + /** A `Supplier` of `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ + def supplier: jf.Supplier[DoubleAccumulator] = () => new DoubleAccumulator + + /** A `BiConsumer` that adds an element to an `DoubleAccumulator`, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. */ + def adder: jf.ObjDoubleConsumer[DoubleAccumulator] = (ac: DoubleAccumulator, a: Double) => ac addOne a + + /** A `BiConsumer` that adds a boxed `Double` to an `DoubleAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder: jf.BiConsumer[DoubleAccumulator, Double] = (ac: DoubleAccumulator, a: Double) => ac addOne a + + /** A `BiConsumer` that merges `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ + def merger: jf.BiConsumer[DoubleAccumulator, DoubleAccumulator] = (a1: DoubleAccumulator, a2: DoubleAccumulator) => a1 drain a2 + + private def fromArray(a: Array[Double]): DoubleAccumulator = { + val r = new DoubleAccumulator + var i = 0 + while (i < a.length) { r addOne a(i); i += 1 } + r + } + + override def fromSpecific(it: IterableOnce[Double]): DoubleAccumulator = it match { + case acc: DoubleAccumulator => acc + case as: collection.immutable.ArraySeq.ofDouble => fromArray(as.unsafeArray) + case as: collection.mutable.ArraySeq.ofDouble => fromArray(as.array) // this case ensures Array(1).to(Accumulator) doesn't box + case _ => (new DoubleAccumulator).addAll(it) + } + + override def empty: DoubleAccumulator = new DoubleAccumulator + + override def newBuilder: DoubleAccumulator = new DoubleAccumulator + + class SerializationProxy[A](@transient private val acc: DoubleAccumulator) extends Serializable { + @transient private var result: DoubleAccumulator = _ + + private def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val size = acc.sizeLong + out.writeLong(size) + val st = acc.stepper + while (st.hasStep) + out.writeDouble(st.nextStep()) + } + + private def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val res = new DoubleAccumulator() + var elems = in.readLong() + while (elems > 0) { + res += in.readDouble() + elems -= 1L + } + result = res + } + + private def readResolve(): AnyRef = result + } +} + +private[jdk] class DoubleAccumulatorStepper(private val acc: DoubleAccumulator) extends DoubleStepper with EfficientSplit { + import java.util.Spliterator._ + + private var h: Int = 0 + private var i: Int = 0 + private var a: Array[Double] = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n: Long = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N: Long = acc.totalSize + + private def duplicateSelf(limit: Long): DoubleAccumulatorStepper = { + val ans = new DoubleAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics: Int = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize: Long = N + + def hasStep: Boolean = N > 0 + + def nextStep(): Double = + if (n <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + def trySplit(): DoubleStepper = + if (N <= 1) null + else { + val half = N >> 1 + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } + + override def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) { + // Overridden for efficiency + override def tryAdvance(c: DoubleConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + } + + // Overridden for efficiency + override def forEachRemaining(c: DoubleConsumer): Unit = + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + + // Overridden for efficiency + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + } +} diff --git a/src/library/scala/jdk/DurationConverters.scala b/src/library/scala/jdk/DurationConverters.scala new file mode 100644 index 000000000000..a98cd5b709d6 --- /dev/null +++ b/src/library/scala/jdk/DurationConverters.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.time.{Duration => JDuration} + +import scala.concurrent.duration.FiniteDuration + +/** This object provides extension methods that convert between Scala and Java duration types. + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.DurationConverters]] instead. + */ +object DurationConverters { + implicit class JavaDurationOps(private val duration: JDuration) extends AnyVal { + /** Convert a Java duration to a Scala duration, see [[javaapi.DurationConverters.toScala]]. */ + def toScala: FiniteDuration = javaapi.DurationConverters.toScala(duration) + } + + implicit final class ScalaDurationOps(private val duration: FiniteDuration) extends AnyVal { + /** Convert a Scala duration to a Java duration, see [[javaapi.DurationConverters.toJava]]. */ + def toJava: JDuration = javaapi.DurationConverters.toJava(duration) + } +} diff --git a/src/library/scala/jdk/FunctionConverters.scala b/src/library/scala/jdk/FunctionConverters.scala new file mode 100644 index 000000000000..3c2d42564df0 --- /dev/null +++ b/src/library/scala/jdk/FunctionConverters.scala @@ -0,0 +1,52 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +// GENERATED CODE: DO NOT EDIT. + + +package scala.jdk + +/** This object provides extension methods that convert between Scala and Java function types. + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.FunctionConverters]] instead. + * + * Using the `.asJava` extension method on a Scala function produces the most specific possible + * Java function type: + * + * {{{ + * scala> import scala.jdk.FunctionConverters._ + * scala> val f = (x: Int) => x + 1 + * + * scala> val jf1 = f.asJava + * jf1: java.util.function.IntUnaryOperator = ... + * }}} + * + * More generic Java function types can be created using the corresponding `asJavaXYZ` extension + * method: + * + * {{{ + * scala> val jf2 = f.asJavaFunction + * jf2: java.util.function.Function[Int,Int] = ... + * + * scala> val jf3 = f.asJavaUnaryOperator + * jf3: java.util.function.UnaryOperator[Int] = ... + * }}} + * + * Converting a Java function to Scala is done using the `asScala` extension method: + * + * {{{ + * scala> List(1,2,3).map(jf2.asScala) + * res1: List[Int] = List(2, 3, 4) + * }}} + */ +object FunctionConverters extends Priority0FunctionExtensions diff --git a/src/library/scala/jdk/FunctionExtensions.scala b/src/library/scala/jdk/FunctionExtensions.scala new file mode 100644 index 000000000000..e932609e7af5 --- /dev/null +++ b/src/library/scala/jdk/FunctionExtensions.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +// GENERATED CODE: DO NOT EDIT. + + +package scala.jdk + +import language.implicitConversions + +trait Priority3FunctionExtensions { + import FunctionWrappers._ + + @inline implicit def enrichAsJavaBiFunction[T, U, R](sf: scala.Function2[T, U, R]): RichFunction2AsBiFunction[T, U, R] = new RichFunction2AsBiFunction[T, U, R](sf) +} + + + +import language.implicitConversions + +trait Priority2FunctionExtensions extends Priority3FunctionExtensions { + import FunctionWrappers._ + + @inline implicit def enrichAsJavaBiConsumer[T, U](sf: scala.Function2[T, U, Unit]): RichFunction2AsBiConsumer[T, U] = new RichFunction2AsBiConsumer[T, U](sf) + + @inline implicit def enrichAsJavaBiPredicate[T, U](sf: scala.Function2[T, U, Boolean]): RichFunction2AsBiPredicate[T, U] = new RichFunction2AsBiPredicate[T, U](sf) + + @inline implicit def enrichAsJavaFunction[T, R](sf: scala.Function1[T, R]): RichFunction1AsFunction[T, R] = new RichFunction1AsFunction[T, R](sf) + + @inline implicit def enrichAsJavaToDoubleBiFunction[T, U](sf: scala.Function2[T, U, Double]): RichFunction2AsToDoubleBiFunction[T, U] = new RichFunction2AsToDoubleBiFunction[T, U](sf) + + @inline implicit def enrichAsJavaToIntBiFunction[T, U](sf: scala.Function2[T, U, Int]): RichFunction2AsToIntBiFunction[T, U] = new RichFunction2AsToIntBiFunction[T, U](sf) + + @inline implicit def enrichAsJavaToLongBiFunction[T, U](sf: scala.Function2[T, U, Long]): RichFunction2AsToLongBiFunction[T, U] = new RichFunction2AsToLongBiFunction[T, U](sf) +} + + + +import language.implicitConversions + +trait Priority1FunctionExtensions extends Priority2FunctionExtensions { + import FunctionWrappers._ + + @inline implicit def enrichAsJavaBinaryOperator[T, A1, A2](sf: scala.Function2[T, A1, A2])(implicit evA1: =:=[A1, T], evA2: =:=[A2, T]): RichFunction2AsBinaryOperator[T] = new RichFunction2AsBinaryOperator[T](sf.asInstanceOf[scala.Function2[T, T, T]]) + + @inline implicit def enrichAsJavaConsumer[T](sf: scala.Function1[T, Unit]): RichFunction1AsConsumer[T] = new RichFunction1AsConsumer[T](sf) + + @inline implicit def enrichAsJavaDoubleFunction[A0, R](sf: scala.Function1[A0, R])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleFunction[R] = new RichFunction1AsDoubleFunction[R](sf.asInstanceOf[scala.Function1[Double, R]]) + + @inline implicit def enrichAsJavaIntFunction[A0, R](sf: scala.Function1[A0, R])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntFunction[R] = new RichFunction1AsIntFunction[R](sf.asInstanceOf[scala.Function1[Int, R]]) + + @inline implicit def enrichAsJavaLongFunction[A0, R](sf: scala.Function1[A0, R])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongFunction[R] = new RichFunction1AsLongFunction[R](sf.asInstanceOf[scala.Function1[Long, R]]) + + @inline implicit def enrichAsJavaObjDoubleConsumer[T, A1](sf: scala.Function2[T, A1, Unit])(implicit evA1: =:=[A1, Double]): RichFunction2AsObjDoubleConsumer[T] = new RichFunction2AsObjDoubleConsumer[T](sf.asInstanceOf[scala.Function2[T, Double, Unit]]) + + @inline implicit def enrichAsJavaObjIntConsumer[T, A1](sf: scala.Function2[T, A1, Unit])(implicit evA1: =:=[A1, Int]): RichFunction2AsObjIntConsumer[T] = new RichFunction2AsObjIntConsumer[T](sf.asInstanceOf[scala.Function2[T, Int, Unit]]) + + @inline implicit def enrichAsJavaObjLongConsumer[T, A1](sf: scala.Function2[T, A1, Unit])(implicit evA1: =:=[A1, Long]): RichFunction2AsObjLongConsumer[T] = new RichFunction2AsObjLongConsumer[T](sf.asInstanceOf[scala.Function2[T, Long, Unit]]) + + @inline implicit def enrichAsJavaPredicate[T](sf: scala.Function1[T, Boolean]): RichFunction1AsPredicate[T] = new RichFunction1AsPredicate[T](sf) + + @inline implicit def enrichAsJavaSupplier[T](sf: scala.Function0[T]): RichFunction0AsSupplier[T] = new RichFunction0AsSupplier[T](sf) + + @inline implicit def enrichAsJavaToDoubleFunction[T](sf: scala.Function1[T, Double]): RichFunction1AsToDoubleFunction[T] = new RichFunction1AsToDoubleFunction[T](sf) + + @inline implicit def enrichAsJavaToIntFunction[T](sf: scala.Function1[T, Int]): RichFunction1AsToIntFunction[T] = new RichFunction1AsToIntFunction[T](sf) + + @inline implicit def enrichAsJavaToLongFunction[T](sf: scala.Function1[T, Long]): RichFunction1AsToLongFunction[T] = new RichFunction1AsToLongFunction[T](sf) + + @inline implicit def enrichAsJavaUnaryOperator[T, A1](sf: scala.Function1[T, A1])(implicit evA1: =:=[A1, T]): RichFunction1AsUnaryOperator[T] = new RichFunction1AsUnaryOperator[T](sf.asInstanceOf[scala.Function1[T, T]]) +} + + + +import language.implicitConversions + +trait Priority0FunctionExtensions extends Priority1FunctionExtensions { + import FunctionWrappers._ + + @inline implicit def enrichAsJavaBooleanSupplier(sf: scala.Function0[Boolean]): RichFunction0AsBooleanSupplier = new RichFunction0AsBooleanSupplier(sf) + + @inline implicit def enrichAsJavaDoubleBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Double])(implicit evA0: =:=[A0, Double], evA1: =:=[A1, Double]): RichFunction2AsDoubleBinaryOperator = new RichFunction2AsDoubleBinaryOperator(sf.asInstanceOf[scala.Function2[Double, Double, Double]]) + + @inline implicit def enrichAsJavaDoubleConsumer[A0](sf: scala.Function1[A0, Unit])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleConsumer = new RichFunction1AsDoubleConsumer(sf.asInstanceOf[scala.Function1[Double, Unit]]) + + @inline implicit def enrichAsJavaDoublePredicate[A0](sf: scala.Function1[A0, Boolean])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoublePredicate = new RichFunction1AsDoublePredicate(sf.asInstanceOf[scala.Function1[Double, Boolean]]) + + @inline implicit def enrichAsJavaDoubleSupplier(sf: scala.Function0[Double]): RichFunction0AsDoubleSupplier = new RichFunction0AsDoubleSupplier(sf) + + @inline implicit def enrichAsJavaDoubleToIntFunction[A0](sf: scala.Function1[A0, Int])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToIntFunction = new RichFunction1AsDoubleToIntFunction(sf.asInstanceOf[scala.Function1[Double, Int]]) + + @inline implicit def enrichAsJavaDoubleToLongFunction[A0](sf: scala.Function1[A0, Long])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToLongFunction = new RichFunction1AsDoubleToLongFunction(sf.asInstanceOf[scala.Function1[Double, Long]]) + + @inline implicit def enrichAsJavaDoubleUnaryOperator[A0](sf: scala.Function1[A0, Double])(implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleUnaryOperator = new RichFunction1AsDoubleUnaryOperator(sf.asInstanceOf[scala.Function1[Double, Double]]) + + @inline implicit def enrichAsJavaIntBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Int])(implicit evA0: =:=[A0, Int], evA1: =:=[A1, Int]): RichFunction2AsIntBinaryOperator = new RichFunction2AsIntBinaryOperator(sf.asInstanceOf[scala.Function2[Int, Int, Int]]) + + @inline implicit def enrichAsJavaIntConsumer[A0](sf: scala.Function1[A0, Unit])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntConsumer = new RichFunction1AsIntConsumer(sf.asInstanceOf[scala.Function1[Int, Unit]]) + + @inline implicit def enrichAsJavaIntPredicate[A0](sf: scala.Function1[A0, Boolean])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntPredicate = new RichFunction1AsIntPredicate(sf.asInstanceOf[scala.Function1[Int, Boolean]]) + + @inline implicit def enrichAsJavaIntSupplier(sf: scala.Function0[Int]): RichFunction0AsIntSupplier = new RichFunction0AsIntSupplier(sf) + + @inline implicit def enrichAsJavaIntToDoubleFunction[A0](sf: scala.Function1[A0, Double])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntToDoubleFunction = new RichFunction1AsIntToDoubleFunction(sf.asInstanceOf[scala.Function1[Int, Double]]) + + @inline implicit def enrichAsJavaIntToLongFunction[A0](sf: scala.Function1[A0, Long])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntToLongFunction = new RichFunction1AsIntToLongFunction(sf.asInstanceOf[scala.Function1[Int, Long]]) + + @inline implicit def enrichAsJavaIntUnaryOperator[A0](sf: scala.Function1[A0, Int])(implicit evA0: =:=[A0, Int]): RichFunction1AsIntUnaryOperator = new RichFunction1AsIntUnaryOperator(sf.asInstanceOf[scala.Function1[Int, Int]]) + + @inline implicit def enrichAsJavaLongBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Long])(implicit evA0: =:=[A0, Long], evA1: =:=[A1, Long]): RichFunction2AsLongBinaryOperator = new RichFunction2AsLongBinaryOperator(sf.asInstanceOf[scala.Function2[Long, Long, Long]]) + + @inline implicit def enrichAsJavaLongConsumer[A0](sf: scala.Function1[A0, Unit])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongConsumer = new RichFunction1AsLongConsumer(sf.asInstanceOf[scala.Function1[Long, Unit]]) + + @inline implicit def enrichAsJavaLongPredicate[A0](sf: scala.Function1[A0, Boolean])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongPredicate = new RichFunction1AsLongPredicate(sf.asInstanceOf[scala.Function1[Long, Boolean]]) + + @inline implicit def enrichAsJavaLongSupplier(sf: scala.Function0[Long]): RichFunction0AsLongSupplier = new RichFunction0AsLongSupplier(sf) + + @inline implicit def enrichAsJavaLongToDoubleFunction[A0](sf: scala.Function1[A0, Double])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongToDoubleFunction = new RichFunction1AsLongToDoubleFunction(sf.asInstanceOf[scala.Function1[Long, Double]]) + + @inline implicit def enrichAsJavaLongToIntFunction[A0](sf: scala.Function1[A0, Int])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongToIntFunction = new RichFunction1AsLongToIntFunction(sf.asInstanceOf[scala.Function1[Long, Int]]) + + @inline implicit def enrichAsJavaLongUnaryOperator[A0](sf: scala.Function1[A0, Long])(implicit evA0: =:=[A0, Long]): RichFunction1AsLongUnaryOperator = new RichFunction1AsLongUnaryOperator(sf.asInstanceOf[scala.Function1[Long, Long]]) + + + + @inline implicit def enrichAsScalaFromBiConsumer[T, U](jf: java.util.function.BiConsumer[T, U]): RichBiConsumerAsFunction2[T, U] = new RichBiConsumerAsFunction2[T, U](jf) + + @inline implicit def enrichAsScalaFromBiFunction[T, U, R](jf: java.util.function.BiFunction[T, U, R]): RichBiFunctionAsFunction2[T, U, R] = new RichBiFunctionAsFunction2[T, U, R](jf) + + @inline implicit def enrichAsScalaFromBiPredicate[T, U](jf: java.util.function.BiPredicate[T, U]): RichBiPredicateAsFunction2[T, U] = new RichBiPredicateAsFunction2[T, U](jf) + + @inline implicit def enrichAsScalaFromBinaryOperator[T](jf: java.util.function.BinaryOperator[T]): RichBinaryOperatorAsFunction2[T] = new RichBinaryOperatorAsFunction2[T](jf) + + @inline implicit def enrichAsScalaFromBooleanSupplier(jf: java.util.function.BooleanSupplier): RichBooleanSupplierAsFunction0 = new RichBooleanSupplierAsFunction0(jf) + + @inline implicit def enrichAsScalaFromConsumer[T](jf: java.util.function.Consumer[T]): RichConsumerAsFunction1[T] = new RichConsumerAsFunction1[T](jf) + + @inline implicit def enrichAsScalaFromDoubleBinaryOperator(jf: java.util.function.DoubleBinaryOperator): RichDoubleBinaryOperatorAsFunction2 = new RichDoubleBinaryOperatorAsFunction2(jf) + + @inline implicit def enrichAsScalaFromDoubleConsumer(jf: java.util.function.DoubleConsumer): RichDoubleConsumerAsFunction1 = new RichDoubleConsumerAsFunction1(jf) + + @inline implicit def enrichAsScalaFromDoubleFunction[R](jf: java.util.function.DoubleFunction[R]): RichDoubleFunctionAsFunction1[R] = new RichDoubleFunctionAsFunction1[R](jf) + + @inline implicit def enrichAsScalaFromDoublePredicate(jf: java.util.function.DoublePredicate): RichDoublePredicateAsFunction1 = new RichDoublePredicateAsFunction1(jf) + + @inline implicit def enrichAsScalaFromDoubleSupplier(jf: java.util.function.DoubleSupplier): RichDoubleSupplierAsFunction0 = new RichDoubleSupplierAsFunction0(jf) + + @inline implicit def enrichAsScalaFromDoubleToIntFunction(jf: java.util.function.DoubleToIntFunction): RichDoubleToIntFunctionAsFunction1 = new RichDoubleToIntFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromDoubleToLongFunction(jf: java.util.function.DoubleToLongFunction): RichDoubleToLongFunctionAsFunction1 = new RichDoubleToLongFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromDoubleUnaryOperator(jf: java.util.function.DoubleUnaryOperator): RichDoubleUnaryOperatorAsFunction1 = new RichDoubleUnaryOperatorAsFunction1(jf) + + @inline implicit def enrichAsScalaFromFunction[T, R](jf: java.util.function.Function[T, R]): RichFunctionAsFunction1[T, R] = new RichFunctionAsFunction1[T, R](jf) + + @inline implicit def enrichAsScalaFromIntBinaryOperator(jf: java.util.function.IntBinaryOperator): RichIntBinaryOperatorAsFunction2 = new RichIntBinaryOperatorAsFunction2(jf) + + @inline implicit def enrichAsScalaFromIntConsumer(jf: java.util.function.IntConsumer): RichIntConsumerAsFunction1 = new RichIntConsumerAsFunction1(jf) + + @inline implicit def enrichAsScalaFromIntFunction[R](jf: java.util.function.IntFunction[R]): RichIntFunctionAsFunction1[R] = new RichIntFunctionAsFunction1[R](jf) + + @inline implicit def enrichAsScalaFromIntPredicate(jf: java.util.function.IntPredicate): RichIntPredicateAsFunction1 = new RichIntPredicateAsFunction1(jf) + + @inline implicit def enrichAsScalaFromIntSupplier(jf: java.util.function.IntSupplier): RichIntSupplierAsFunction0 = new RichIntSupplierAsFunction0(jf) + + @inline implicit def enrichAsScalaFromIntToDoubleFunction(jf: java.util.function.IntToDoubleFunction): RichIntToDoubleFunctionAsFunction1 = new RichIntToDoubleFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromIntToLongFunction(jf: java.util.function.IntToLongFunction): RichIntToLongFunctionAsFunction1 = new RichIntToLongFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromIntUnaryOperator(jf: java.util.function.IntUnaryOperator): RichIntUnaryOperatorAsFunction1 = new RichIntUnaryOperatorAsFunction1(jf) + + @inline implicit def enrichAsScalaFromLongBinaryOperator(jf: java.util.function.LongBinaryOperator): RichLongBinaryOperatorAsFunction2 = new RichLongBinaryOperatorAsFunction2(jf) + + @inline implicit def enrichAsScalaFromLongConsumer(jf: java.util.function.LongConsumer): RichLongConsumerAsFunction1 = new RichLongConsumerAsFunction1(jf) + + @inline implicit def enrichAsScalaFromLongFunction[R](jf: java.util.function.LongFunction[R]): RichLongFunctionAsFunction1[R] = new RichLongFunctionAsFunction1[R](jf) + + @inline implicit def enrichAsScalaFromLongPredicate(jf: java.util.function.LongPredicate): RichLongPredicateAsFunction1 = new RichLongPredicateAsFunction1(jf) + + @inline implicit def enrichAsScalaFromLongSupplier(jf: java.util.function.LongSupplier): RichLongSupplierAsFunction0 = new RichLongSupplierAsFunction0(jf) + + @inline implicit def enrichAsScalaFromLongToDoubleFunction(jf: java.util.function.LongToDoubleFunction): RichLongToDoubleFunctionAsFunction1 = new RichLongToDoubleFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromLongToIntFunction(jf: java.util.function.LongToIntFunction): RichLongToIntFunctionAsFunction1 = new RichLongToIntFunctionAsFunction1(jf) + + @inline implicit def enrichAsScalaFromLongUnaryOperator(jf: java.util.function.LongUnaryOperator): RichLongUnaryOperatorAsFunction1 = new RichLongUnaryOperatorAsFunction1(jf) + + @inline implicit def enrichAsScalaFromObjDoubleConsumer[T](jf: java.util.function.ObjDoubleConsumer[T]): RichObjDoubleConsumerAsFunction2[T] = new RichObjDoubleConsumerAsFunction2[T](jf) + + @inline implicit def enrichAsScalaFromObjIntConsumer[T](jf: java.util.function.ObjIntConsumer[T]): RichObjIntConsumerAsFunction2[T] = new RichObjIntConsumerAsFunction2[T](jf) + + @inline implicit def enrichAsScalaFromObjLongConsumer[T](jf: java.util.function.ObjLongConsumer[T]): RichObjLongConsumerAsFunction2[T] = new RichObjLongConsumerAsFunction2[T](jf) + + @inline implicit def enrichAsScalaFromPredicate[T](jf: java.util.function.Predicate[T]): RichPredicateAsFunction1[T] = new RichPredicateAsFunction1[T](jf) + + @inline implicit def enrichAsScalaFromSupplier[T](jf: java.util.function.Supplier[T]): RichSupplierAsFunction0[T] = new RichSupplierAsFunction0[T](jf) + + @inline implicit def enrichAsScalaFromToDoubleBiFunction[T, U](jf: java.util.function.ToDoubleBiFunction[T, U]): RichToDoubleBiFunctionAsFunction2[T, U] = new RichToDoubleBiFunctionAsFunction2[T, U](jf) + + @inline implicit def enrichAsScalaFromToDoubleFunction[T](jf: java.util.function.ToDoubleFunction[T]): RichToDoubleFunctionAsFunction1[T] = new RichToDoubleFunctionAsFunction1[T](jf) + + @inline implicit def enrichAsScalaFromToIntBiFunction[T, U](jf: java.util.function.ToIntBiFunction[T, U]): RichToIntBiFunctionAsFunction2[T, U] = new RichToIntBiFunctionAsFunction2[T, U](jf) + + @inline implicit def enrichAsScalaFromToIntFunction[T](jf: java.util.function.ToIntFunction[T]): RichToIntFunctionAsFunction1[T] = new RichToIntFunctionAsFunction1[T](jf) + + @inline implicit def enrichAsScalaFromToLongBiFunction[T, U](jf: java.util.function.ToLongBiFunction[T, U]): RichToLongBiFunctionAsFunction2[T, U] = new RichToLongBiFunctionAsFunction2[T, U](jf) + + @inline implicit def enrichAsScalaFromToLongFunction[T](jf: java.util.function.ToLongFunction[T]): RichToLongFunctionAsFunction1[T] = new RichToLongFunctionAsFunction1[T](jf) + + @inline implicit def enrichAsScalaFromUnaryOperator[T](jf: java.util.function.UnaryOperator[T]): RichUnaryOperatorAsFunction1[T] = new RichUnaryOperatorAsFunction1[T](jf) +} diff --git a/src/library/scala/jdk/FunctionWrappers.scala b/src/library/scala/jdk/FunctionWrappers.scala new file mode 100644 index 000000000000..27153ffed820 --- /dev/null +++ b/src/library/scala/jdk/FunctionWrappers.scala @@ -0,0 +1,1090 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +// GENERATED CODE: DO NOT EDIT. + + +package scala.jdk + +object FunctionWrappers { + case class FromJavaBiConsumer[T, U](jf: java.util.function.BiConsumer[T, U]) extends scala.Function2[T, U, Unit] { + def apply(x1: T, x2: U) = jf.accept(x1, x2) + } + + class RichBiConsumerAsFunction2[T, U](private val underlying: java.util.function.BiConsumer[T, U]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, Unit] = underlying match { + case AsJavaBiConsumer((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, Unit]] + case _ => new FromJavaBiConsumer[T, U](underlying) + } + } + + case class AsJavaBiConsumer[T, U](sf: scala.Function2[T, U, Unit]) extends java.util.function.BiConsumer[T, U] { + def accept(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsBiConsumer[T, U](private val underlying: scala.Function2[T, U, Unit]) extends AnyVal { + @inline def asJava: java.util.function.BiConsumer[T, U] = underlying match { + case FromJavaBiConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.BiConsumer[T, U]] + case _ => new AsJavaBiConsumer[T, U](underlying) + }; + @inline def asJavaBiConsumer: java.util.function.BiConsumer[T, U] = underlying match { + case FromJavaBiConsumer((sf @ _)) => sf.asInstanceOf[java.util.function.BiConsumer[T, U]] + case _ => new AsJavaBiConsumer[T, U](underlying) + } + } + + + case class FromJavaBiFunction[T, U, R](jf: java.util.function.BiFunction[T, U, R]) extends scala.Function2[T, U, R] { + def apply(x1: T, x2: U) = jf.apply(x1, x2) + } + + class RichBiFunctionAsFunction2[T, U, R](private val underlying: java.util.function.BiFunction[T, U, R]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, R] = underlying match { + case AsJavaBiFunction((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, R]] + case _ => new FromJavaBiFunction[T, U, R](underlying) + } + } + + case class AsJavaBiFunction[T, U, R](sf: scala.Function2[T, U, R]) extends java.util.function.BiFunction[T, U, R] { + def apply(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsBiFunction[T, U, R](private val underlying: scala.Function2[T, U, R]) extends AnyVal { + @inline def asJava: java.util.function.BiFunction[T, U, R] = underlying match { + case FromJavaBiFunction((jf @ _)) => jf.asInstanceOf[java.util.function.BiFunction[T, U, R]] + case _ => new AsJavaBiFunction[T, U, R](underlying) + }; + @inline def asJavaBiFunction: java.util.function.BiFunction[T, U, R] = underlying match { + case FromJavaBiFunction((sf @ _)) => sf.asInstanceOf[java.util.function.BiFunction[T, U, R]] + case _ => new AsJavaBiFunction[T, U, R](underlying) + } + } + + + case class FromJavaBiPredicate[T, U](jf: java.util.function.BiPredicate[T, U]) extends scala.Function2[T, U, Boolean] { + def apply(x1: T, x2: U) = jf.test(x1, x2) + } + + class RichBiPredicateAsFunction2[T, U](private val underlying: java.util.function.BiPredicate[T, U]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, Boolean] = underlying match { + case AsJavaBiPredicate((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, Boolean]] + case _ => new FromJavaBiPredicate[T, U](underlying) + } + } + + case class AsJavaBiPredicate[T, U](sf: scala.Function2[T, U, Boolean]) extends java.util.function.BiPredicate[T, U] { + def test(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsBiPredicate[T, U](private val underlying: scala.Function2[T, U, Boolean]) extends AnyVal { + @inline def asJava: java.util.function.BiPredicate[T, U] = underlying match { + case FromJavaBiPredicate((jf @ _)) => jf.asInstanceOf[java.util.function.BiPredicate[T, U]] + case _ => new AsJavaBiPredicate[T, U](underlying) + }; + @inline def asJavaBiPredicate: java.util.function.BiPredicate[T, U] = underlying match { + case FromJavaBiPredicate((sf @ _)) => sf.asInstanceOf[java.util.function.BiPredicate[T, U]] + case _ => new AsJavaBiPredicate[T, U](underlying) + } + } + + + case class FromJavaBinaryOperator[T](jf: java.util.function.BinaryOperator[T]) extends scala.Function2[T, T, T] { + def apply(x1: T, x2: T) = jf.apply(x1, x2) + } + + class RichBinaryOperatorAsFunction2[T](private val underlying: java.util.function.BinaryOperator[T]) extends AnyVal { + @inline def asScala: scala.Function2[T, T, T] = underlying match { + case AsJavaBinaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function2[T, T, T]] + case _ => new FromJavaBinaryOperator[T](underlying) + } + } + + case class AsJavaBinaryOperator[T](sf: scala.Function2[T, T, T]) extends java.util.function.BinaryOperator[T] { + def apply(x1: T, x2: T) = sf.apply(x1, x2) + } + + class RichFunction2AsBinaryOperator[T](private val underlying: scala.Function2[T, T, T]) extends AnyVal { + @inline def asJava: java.util.function.BinaryOperator[T] = underlying match { + case FromJavaBinaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.BinaryOperator[T]] + case _ => new AsJavaBinaryOperator[T](underlying) + }; + @inline def asJavaBinaryOperator: java.util.function.BinaryOperator[T] = underlying match { + case FromJavaBinaryOperator((sf @ _)) => sf.asInstanceOf[java.util.function.BinaryOperator[T]] + case _ => new AsJavaBinaryOperator[T](underlying) + } + } + + + case class FromJavaBooleanSupplier(jf: java.util.function.BooleanSupplier) extends scala.Function0[Boolean] { + def apply() = jf.getAsBoolean() + } + + class RichBooleanSupplierAsFunction0(private val underlying: java.util.function.BooleanSupplier) extends AnyVal { + @inline def asScala: scala.Function0[Boolean] = underlying match { + case AsJavaBooleanSupplier((sf @ _)) => sf.asInstanceOf[scala.Function0[Boolean]] + case _ => new FromJavaBooleanSupplier(underlying) + } + } + + case class AsJavaBooleanSupplier(sf: scala.Function0[Boolean]) extends java.util.function.BooleanSupplier { + def getAsBoolean() = sf.apply() + } + + class RichFunction0AsBooleanSupplier(private val underlying: scala.Function0[Boolean]) extends AnyVal { + @inline def asJava: java.util.function.BooleanSupplier = underlying match { + case FromJavaBooleanSupplier((jf @ _)) => jf.asInstanceOf[java.util.function.BooleanSupplier] + case _ => new AsJavaBooleanSupplier(underlying) + } + } + + + case class FromJavaConsumer[T](jf: java.util.function.Consumer[T]) extends scala.Function1[T, Unit] { + def apply(x1: T) = jf.accept(x1) + } + + class RichConsumerAsFunction1[T](private val underlying: java.util.function.Consumer[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, Unit] = underlying match { + case AsJavaConsumer((sf @ _)) => sf.asInstanceOf[scala.Function1[T, Unit]] + case _ => new FromJavaConsumer[T](underlying) + } + } + + case class AsJavaConsumer[T](sf: scala.Function1[T, Unit]) extends java.util.function.Consumer[T] { + def accept(x1: T) = sf.apply(x1) + } + + class RichFunction1AsConsumer[T](private val underlying: scala.Function1[T, Unit]) extends AnyVal { + @inline def asJava: java.util.function.Consumer[T] = underlying match { + case FromJavaConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.Consumer[T]] + case _ => new AsJavaConsumer[T](underlying) + }; + @inline def asJavaConsumer: java.util.function.Consumer[T] = underlying match { + case FromJavaConsumer((sf @ _)) => sf.asInstanceOf[java.util.function.Consumer[T]] + case _ => new AsJavaConsumer[T](underlying) + } + } + + + case class FromJavaDoubleBinaryOperator(jf: java.util.function.DoubleBinaryOperator) extends scala.Function2[Double, Double, Double] { + def apply(x1: scala.Double, x2: scala.Double) = jf.applyAsDouble(x1, x2) + } + + class RichDoubleBinaryOperatorAsFunction2(private val underlying: java.util.function.DoubleBinaryOperator) extends AnyVal { + @inline def asScala: scala.Function2[Double, Double, Double] = underlying match { + case AsJavaDoubleBinaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function2[Double, Double, Double]] + case _ => new FromJavaDoubleBinaryOperator(underlying) + } + } + + case class AsJavaDoubleBinaryOperator(sf: scala.Function2[Double, Double, Double]) extends java.util.function.DoubleBinaryOperator { + def applyAsDouble(x1: scala.Double, x2: scala.Double) = sf.apply(x1, x2) + } + + class RichFunction2AsDoubleBinaryOperator(private val underlying: scala.Function2[Double, Double, Double]) extends AnyVal { + @inline def asJava: java.util.function.DoubleBinaryOperator = underlying match { + case FromJavaDoubleBinaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleBinaryOperator] + case _ => new AsJavaDoubleBinaryOperator(underlying) + } + } + + + case class FromJavaDoubleConsumer(jf: java.util.function.DoubleConsumer) extends scala.Function1[Double, Unit] { + def apply(x1: scala.Double) = jf.accept(x1) + } + + class RichDoubleConsumerAsFunction1(private val underlying: java.util.function.DoubleConsumer) extends AnyVal { + @inline def asScala: scala.Function1[Double, Unit] = underlying match { + case AsJavaDoubleConsumer((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, Unit]] + case _ => new FromJavaDoubleConsumer(underlying) + } + } + + case class AsJavaDoubleConsumer(sf: scala.Function1[Double, Unit]) extends java.util.function.DoubleConsumer { + def accept(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoubleConsumer(private val underlying: scala.Function1[Double, Unit]) extends AnyVal { + @inline def asJava: java.util.function.DoubleConsumer = underlying match { + case FromJavaDoubleConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleConsumer] + case _ => new AsJavaDoubleConsumer(underlying) + } + } + + + case class FromJavaDoubleFunction[R](jf: java.util.function.DoubleFunction[R]) extends scala.Function1[Double, R] { + def apply(x1: scala.Double) = jf.apply(x1) + } + + class RichDoubleFunctionAsFunction1[R](private val underlying: java.util.function.DoubleFunction[R]) extends AnyVal { + @inline def asScala: scala.Function1[Double, R] = underlying match { + case AsJavaDoubleFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, R]] + case _ => new FromJavaDoubleFunction[R](underlying) + } + } + + case class AsJavaDoubleFunction[R](sf: scala.Function1[Double, R]) extends java.util.function.DoubleFunction[R] { + def apply(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoubleFunction[R](private val underlying: scala.Function1[Double, R]) extends AnyVal { + @inline def asJava: java.util.function.DoubleFunction[R] = underlying match { + case FromJavaDoubleFunction((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleFunction[R]] + case _ => new AsJavaDoubleFunction[R](underlying) + }; + @inline def asJavaDoubleFunction: java.util.function.DoubleFunction[R] = underlying match { + case FromJavaDoubleFunction((sf @ _)) => sf.asInstanceOf[java.util.function.DoubleFunction[R]] + case _ => new AsJavaDoubleFunction[R](underlying) + } + } + + + case class FromJavaDoublePredicate(jf: java.util.function.DoublePredicate) extends scala.Function1[Double, Boolean] { + def apply(x1: scala.Double) = jf.test(x1) + } + + class RichDoublePredicateAsFunction1(private val underlying: java.util.function.DoublePredicate) extends AnyVal { + @inline def asScala: scala.Function1[Double, Boolean] = underlying match { + case AsJavaDoublePredicate((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, Boolean]] + case _ => new FromJavaDoublePredicate(underlying) + } + } + + case class AsJavaDoublePredicate(sf: scala.Function1[Double, Boolean]) extends java.util.function.DoublePredicate { + def test(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoublePredicate(private val underlying: scala.Function1[Double, Boolean]) extends AnyVal { + @inline def asJava: java.util.function.DoublePredicate = underlying match { + case FromJavaDoublePredicate((jf @ _)) => jf.asInstanceOf[java.util.function.DoublePredicate] + case _ => new AsJavaDoublePredicate(underlying) + } + } + + + case class FromJavaDoubleSupplier(jf: java.util.function.DoubleSupplier) extends scala.Function0[Double] { + def apply() = jf.getAsDouble() + } + + class RichDoubleSupplierAsFunction0(private val underlying: java.util.function.DoubleSupplier) extends AnyVal { + @inline def asScala: scala.Function0[Double] = underlying match { + case AsJavaDoubleSupplier((sf @ _)) => sf.asInstanceOf[scala.Function0[Double]] + case _ => new FromJavaDoubleSupplier(underlying) + } + } + + case class AsJavaDoubleSupplier(sf: scala.Function0[Double]) extends java.util.function.DoubleSupplier { + def getAsDouble() = sf.apply() + } + + class RichFunction0AsDoubleSupplier(private val underlying: scala.Function0[Double]) extends AnyVal { + @inline def asJava: java.util.function.DoubleSupplier = underlying match { + case FromJavaDoubleSupplier((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleSupplier] + case _ => new AsJavaDoubleSupplier(underlying) + } + } + + + case class FromJavaDoubleToIntFunction(jf: java.util.function.DoubleToIntFunction) extends scala.Function1[Double, Int] { + def apply(x1: scala.Double) = jf.applyAsInt(x1) + } + + class RichDoubleToIntFunctionAsFunction1(private val underlying: java.util.function.DoubleToIntFunction) extends AnyVal { + @inline def asScala: scala.Function1[Double, Int] = underlying match { + case AsJavaDoubleToIntFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, Int]] + case _ => new FromJavaDoubleToIntFunction(underlying) + } + } + + case class AsJavaDoubleToIntFunction(sf: scala.Function1[Double, Int]) extends java.util.function.DoubleToIntFunction { + def applyAsInt(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoubleToIntFunction(private val underlying: scala.Function1[Double, Int]) extends AnyVal { + @inline def asJava: java.util.function.DoubleToIntFunction = underlying match { + case FromJavaDoubleToIntFunction((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleToIntFunction] + case _ => new AsJavaDoubleToIntFunction(underlying) + } + } + + + case class FromJavaDoubleToLongFunction(jf: java.util.function.DoubleToLongFunction) extends scala.Function1[Double, Long] { + def apply(x1: scala.Double) = jf.applyAsLong(x1) + } + + class RichDoubleToLongFunctionAsFunction1(private val underlying: java.util.function.DoubleToLongFunction) extends AnyVal { + @inline def asScala: scala.Function1[Double, Long] = underlying match { + case AsJavaDoubleToLongFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, Long]] + case _ => new FromJavaDoubleToLongFunction(underlying) + } + } + + case class AsJavaDoubleToLongFunction(sf: scala.Function1[Double, Long]) extends java.util.function.DoubleToLongFunction { + def applyAsLong(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoubleToLongFunction(private val underlying: scala.Function1[Double, Long]) extends AnyVal { + @inline def asJava: java.util.function.DoubleToLongFunction = underlying match { + case FromJavaDoubleToLongFunction((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleToLongFunction] + case _ => new AsJavaDoubleToLongFunction(underlying) + } + } + + + case class FromJavaDoubleUnaryOperator(jf: java.util.function.DoubleUnaryOperator) extends scala.Function1[Double, Double] { + def apply(x1: scala.Double) = jf.applyAsDouble(x1) + } + + class RichDoubleUnaryOperatorAsFunction1(private val underlying: java.util.function.DoubleUnaryOperator) extends AnyVal { + @inline def asScala: scala.Function1[Double, Double] = underlying match { + case AsJavaDoubleUnaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function1[Double, Double]] + case _ => new FromJavaDoubleUnaryOperator(underlying) + } + } + + case class AsJavaDoubleUnaryOperator(sf: scala.Function1[Double, Double]) extends java.util.function.DoubleUnaryOperator { + def applyAsDouble(x1: scala.Double) = sf.apply(x1) + } + + class RichFunction1AsDoubleUnaryOperator(private val underlying: scala.Function1[Double, Double]) extends AnyVal { + @inline def asJava: java.util.function.DoubleUnaryOperator = underlying match { + case FromJavaDoubleUnaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.DoubleUnaryOperator] + case _ => new AsJavaDoubleUnaryOperator(underlying) + } + } + + + case class FromJavaFunction[T, R](jf: java.util.function.Function[T, R]) extends scala.Function1[T, R] { + def apply(x1: T) = jf.apply(x1) + } + + class RichFunctionAsFunction1[T, R](private val underlying: java.util.function.Function[T, R]) extends AnyVal { + @inline def asScala: scala.Function1[T, R] = underlying match { + case AsJavaFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[T, R]] + case _ => new FromJavaFunction[T, R](underlying) + } + } + + case class AsJavaFunction[T, R](sf: scala.Function1[T, R]) extends java.util.function.Function[T, R] { + def apply(x1: T) = sf.apply(x1) + } + + class RichFunction1AsFunction[T, R](private val underlying: scala.Function1[T, R]) extends AnyVal { + @inline def asJava: java.util.function.Function[T, R] = underlying match { + case FromJavaFunction((jf @ _)) => jf.asInstanceOf[java.util.function.Function[T, R]] + case _ => new AsJavaFunction[T, R](underlying) + }; + @inline def asJavaFunction: java.util.function.Function[T, R] = underlying match { + case FromJavaFunction((sf @ _)) => sf.asInstanceOf[java.util.function.Function[T, R]] + case _ => new AsJavaFunction[T, R](underlying) + } + } + + + case class FromJavaIntBinaryOperator(jf: java.util.function.IntBinaryOperator) extends scala.Function2[Int, Int, Int] { + def apply(x1: scala.Int, x2: scala.Int) = jf.applyAsInt(x1, x2) + } + + class RichIntBinaryOperatorAsFunction2(private val underlying: java.util.function.IntBinaryOperator) extends AnyVal { + @inline def asScala: scala.Function2[Int, Int, Int] = underlying match { + case AsJavaIntBinaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function2[Int, Int, Int]] + case _ => new FromJavaIntBinaryOperator(underlying) + } + } + + case class AsJavaIntBinaryOperator(sf: scala.Function2[Int, Int, Int]) extends java.util.function.IntBinaryOperator { + def applyAsInt(x1: scala.Int, x2: scala.Int) = sf.apply(x1, x2) + } + + class RichFunction2AsIntBinaryOperator(private val underlying: scala.Function2[Int, Int, Int]) extends AnyVal { + @inline def asJava: java.util.function.IntBinaryOperator = underlying match { + case FromJavaIntBinaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.IntBinaryOperator] + case _ => new AsJavaIntBinaryOperator(underlying) + } + } + + + case class FromJavaIntConsumer(jf: java.util.function.IntConsumer) extends scala.Function1[Int, Unit] { + def apply(x1: scala.Int) = jf.accept(x1) + } + + class RichIntConsumerAsFunction1(private val underlying: java.util.function.IntConsumer) extends AnyVal { + @inline def asScala: scala.Function1[Int, Unit] = underlying match { + case AsJavaIntConsumer((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, Unit]] + case _ => new FromJavaIntConsumer(underlying) + } + } + + case class AsJavaIntConsumer(sf: scala.Function1[Int, Unit]) extends java.util.function.IntConsumer { + def accept(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntConsumer(private val underlying: scala.Function1[Int, Unit]) extends AnyVal { + @inline def asJava: java.util.function.IntConsumer = underlying match { + case FromJavaIntConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.IntConsumer] + case _ => new AsJavaIntConsumer(underlying) + } + } + + + case class FromJavaIntFunction[R](jf: java.util.function.IntFunction[R]) extends scala.Function1[Int, R] { + def apply(x1: scala.Int) = jf.apply(x1) + } + + class RichIntFunctionAsFunction1[R](private val underlying: java.util.function.IntFunction[R]) extends AnyVal { + @inline def asScala: scala.Function1[Int, R] = underlying match { + case AsJavaIntFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, R]] + case _ => new FromJavaIntFunction[R](underlying) + } + } + + case class AsJavaIntFunction[R](sf: scala.Function1[Int, R]) extends java.util.function.IntFunction[R] { + def apply(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntFunction[R](private val underlying: scala.Function1[Int, R]) extends AnyVal { + @inline def asJava: java.util.function.IntFunction[R] = underlying match { + case FromJavaIntFunction((jf @ _)) => jf.asInstanceOf[java.util.function.IntFunction[R]] + case _ => new AsJavaIntFunction[R](underlying) + }; + @inline def asJavaIntFunction: java.util.function.IntFunction[R] = underlying match { + case FromJavaIntFunction((sf @ _)) => sf.asInstanceOf[java.util.function.IntFunction[R]] + case _ => new AsJavaIntFunction[R](underlying) + } + } + + + case class FromJavaIntPredicate(jf: java.util.function.IntPredicate) extends scala.Function1[Int, Boolean] { + def apply(x1: scala.Int) = jf.test(x1) + } + + class RichIntPredicateAsFunction1(private val underlying: java.util.function.IntPredicate) extends AnyVal { + @inline def asScala: scala.Function1[Int, Boolean] = underlying match { + case AsJavaIntPredicate((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, Boolean]] + case _ => new FromJavaIntPredicate(underlying) + } + } + + case class AsJavaIntPredicate(sf: scala.Function1[Int, Boolean]) extends java.util.function.IntPredicate { + def test(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntPredicate(private val underlying: scala.Function1[Int, Boolean]) extends AnyVal { + @inline def asJava: java.util.function.IntPredicate = underlying match { + case FromJavaIntPredicate((jf @ _)) => jf.asInstanceOf[java.util.function.IntPredicate] + case _ => new AsJavaIntPredicate(underlying) + } + } + + + case class FromJavaIntSupplier(jf: java.util.function.IntSupplier) extends scala.Function0[Int] { + def apply() = jf.getAsInt() + } + + class RichIntSupplierAsFunction0(private val underlying: java.util.function.IntSupplier) extends AnyVal { + @inline def asScala: scala.Function0[Int] = underlying match { + case AsJavaIntSupplier((sf @ _)) => sf.asInstanceOf[scala.Function0[Int]] + case _ => new FromJavaIntSupplier(underlying) + } + } + + case class AsJavaIntSupplier(sf: scala.Function0[Int]) extends java.util.function.IntSupplier { + def getAsInt() = sf.apply() + } + + class RichFunction0AsIntSupplier(private val underlying: scala.Function0[Int]) extends AnyVal { + @inline def asJava: java.util.function.IntSupplier = underlying match { + case FromJavaIntSupplier((jf @ _)) => jf.asInstanceOf[java.util.function.IntSupplier] + case _ => new AsJavaIntSupplier(underlying) + } + } + + + case class FromJavaIntToDoubleFunction(jf: java.util.function.IntToDoubleFunction) extends scala.Function1[Int, Double] { + def apply(x1: scala.Int) = jf.applyAsDouble(x1) + } + + class RichIntToDoubleFunctionAsFunction1(private val underlying: java.util.function.IntToDoubleFunction) extends AnyVal { + @inline def asScala: scala.Function1[Int, Double] = underlying match { + case AsJavaIntToDoubleFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, Double]] + case _ => new FromJavaIntToDoubleFunction(underlying) + } + } + + case class AsJavaIntToDoubleFunction(sf: scala.Function1[Int, Double]) extends java.util.function.IntToDoubleFunction { + def applyAsDouble(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntToDoubleFunction(private val underlying: scala.Function1[Int, Double]) extends AnyVal { + @inline def asJava: java.util.function.IntToDoubleFunction = underlying match { + case FromJavaIntToDoubleFunction((jf @ _)) => jf.asInstanceOf[java.util.function.IntToDoubleFunction] + case _ => new AsJavaIntToDoubleFunction(underlying) + } + } + + + case class FromJavaIntToLongFunction(jf: java.util.function.IntToLongFunction) extends scala.Function1[Int, Long] { + def apply(x1: scala.Int) = jf.applyAsLong(x1) + } + + class RichIntToLongFunctionAsFunction1(private val underlying: java.util.function.IntToLongFunction) extends AnyVal { + @inline def asScala: scala.Function1[Int, Long] = underlying match { + case AsJavaIntToLongFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, Long]] + case _ => new FromJavaIntToLongFunction(underlying) + } + } + + case class AsJavaIntToLongFunction(sf: scala.Function1[Int, Long]) extends java.util.function.IntToLongFunction { + def applyAsLong(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntToLongFunction(private val underlying: scala.Function1[Int, Long]) extends AnyVal { + @inline def asJava: java.util.function.IntToLongFunction = underlying match { + case FromJavaIntToLongFunction((jf @ _)) => jf.asInstanceOf[java.util.function.IntToLongFunction] + case _ => new AsJavaIntToLongFunction(underlying) + } + } + + + case class FromJavaIntUnaryOperator(jf: java.util.function.IntUnaryOperator) extends scala.Function1[Int, Int] { + def apply(x1: scala.Int) = jf.applyAsInt(x1) + } + + class RichIntUnaryOperatorAsFunction1(private val underlying: java.util.function.IntUnaryOperator) extends AnyVal { + @inline def asScala: scala.Function1[Int, Int] = underlying match { + case AsJavaIntUnaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function1[Int, Int]] + case _ => new FromJavaIntUnaryOperator(underlying) + } + } + + case class AsJavaIntUnaryOperator(sf: scala.Function1[Int, Int]) extends java.util.function.IntUnaryOperator { + def applyAsInt(x1: scala.Int) = sf.apply(x1) + } + + class RichFunction1AsIntUnaryOperator(private val underlying: scala.Function1[Int, Int]) extends AnyVal { + @inline def asJava: java.util.function.IntUnaryOperator = underlying match { + case FromJavaIntUnaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.IntUnaryOperator] + case _ => new AsJavaIntUnaryOperator(underlying) + } + } + + + case class FromJavaLongBinaryOperator(jf: java.util.function.LongBinaryOperator) extends scala.Function2[Long, Long, Long] { + def apply(x1: scala.Long, x2: scala.Long) = jf.applyAsLong(x1, x2) + } + + class RichLongBinaryOperatorAsFunction2(private val underlying: java.util.function.LongBinaryOperator) extends AnyVal { + @inline def asScala: scala.Function2[Long, Long, Long] = underlying match { + case AsJavaLongBinaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function2[Long, Long, Long]] + case _ => new FromJavaLongBinaryOperator(underlying) + } + } + + case class AsJavaLongBinaryOperator(sf: scala.Function2[Long, Long, Long]) extends java.util.function.LongBinaryOperator { + def applyAsLong(x1: scala.Long, x2: scala.Long) = sf.apply(x1, x2) + } + + class RichFunction2AsLongBinaryOperator(private val underlying: scala.Function2[Long, Long, Long]) extends AnyVal { + @inline def asJava: java.util.function.LongBinaryOperator = underlying match { + case FromJavaLongBinaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.LongBinaryOperator] + case _ => new AsJavaLongBinaryOperator(underlying) + } + } + + + case class FromJavaLongConsumer(jf: java.util.function.LongConsumer) extends scala.Function1[Long, Unit] { + def apply(x1: scala.Long) = jf.accept(x1) + } + + class RichLongConsumerAsFunction1(private val underlying: java.util.function.LongConsumer) extends AnyVal { + @inline def asScala: scala.Function1[Long, Unit] = underlying match { + case AsJavaLongConsumer((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, Unit]] + case _ => new FromJavaLongConsumer(underlying) + } + } + + case class AsJavaLongConsumer(sf: scala.Function1[Long, Unit]) extends java.util.function.LongConsumer { + def accept(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongConsumer(private val underlying: scala.Function1[Long, Unit]) extends AnyVal { + @inline def asJava: java.util.function.LongConsumer = underlying match { + case FromJavaLongConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.LongConsumer] + case _ => new AsJavaLongConsumer(underlying) + } + } + + + case class FromJavaLongFunction[R](jf: java.util.function.LongFunction[R]) extends scala.Function1[Long, R] { + def apply(x1: scala.Long) = jf.apply(x1) + } + + class RichLongFunctionAsFunction1[R](private val underlying: java.util.function.LongFunction[R]) extends AnyVal { + @inline def asScala: scala.Function1[Long, R] = underlying match { + case AsJavaLongFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, R]] + case _ => new FromJavaLongFunction[R](underlying) + } + } + + case class AsJavaLongFunction[R](sf: scala.Function1[Long, R]) extends java.util.function.LongFunction[R] { + def apply(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongFunction[R](private val underlying: scala.Function1[Long, R]) extends AnyVal { + @inline def asJava: java.util.function.LongFunction[R] = underlying match { + case FromJavaLongFunction((jf @ _)) => jf.asInstanceOf[java.util.function.LongFunction[R]] + case _ => new AsJavaLongFunction[R](underlying) + }; + @inline def asJavaLongFunction: java.util.function.LongFunction[R] = underlying match { + case FromJavaLongFunction((sf @ _)) => sf.asInstanceOf[java.util.function.LongFunction[R]] + case _ => new AsJavaLongFunction[R](underlying) + } + } + + + case class FromJavaLongPredicate(jf: java.util.function.LongPredicate) extends scala.Function1[Long, Boolean] { + def apply(x1: scala.Long) = jf.test(x1) + } + + class RichLongPredicateAsFunction1(private val underlying: java.util.function.LongPredicate) extends AnyVal { + @inline def asScala: scala.Function1[Long, Boolean] = underlying match { + case AsJavaLongPredicate((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, Boolean]] + case _ => new FromJavaLongPredicate(underlying) + } + } + + case class AsJavaLongPredicate(sf: scala.Function1[Long, Boolean]) extends java.util.function.LongPredicate { + def test(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongPredicate(private val underlying: scala.Function1[Long, Boolean]) extends AnyVal { + @inline def asJava: java.util.function.LongPredicate = underlying match { + case FromJavaLongPredicate((jf @ _)) => jf.asInstanceOf[java.util.function.LongPredicate] + case _ => new AsJavaLongPredicate(underlying) + } + } + + + case class FromJavaLongSupplier(jf: java.util.function.LongSupplier) extends scala.Function0[Long] { + def apply() = jf.getAsLong() + } + + class RichLongSupplierAsFunction0(private val underlying: java.util.function.LongSupplier) extends AnyVal { + @inline def asScala: scala.Function0[Long] = underlying match { + case AsJavaLongSupplier((sf @ _)) => sf.asInstanceOf[scala.Function0[Long]] + case _ => new FromJavaLongSupplier(underlying) + } + } + + case class AsJavaLongSupplier(sf: scala.Function0[Long]) extends java.util.function.LongSupplier { + def getAsLong() = sf.apply() + } + + class RichFunction0AsLongSupplier(private val underlying: scala.Function0[Long]) extends AnyVal { + @inline def asJava: java.util.function.LongSupplier = underlying match { + case FromJavaLongSupplier((jf @ _)) => jf.asInstanceOf[java.util.function.LongSupplier] + case _ => new AsJavaLongSupplier(underlying) + } + } + + + case class FromJavaLongToDoubleFunction(jf: java.util.function.LongToDoubleFunction) extends scala.Function1[Long, Double] { + def apply(x1: scala.Long) = jf.applyAsDouble(x1) + } + + class RichLongToDoubleFunctionAsFunction1(private val underlying: java.util.function.LongToDoubleFunction) extends AnyVal { + @inline def asScala: scala.Function1[Long, Double] = underlying match { + case AsJavaLongToDoubleFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, Double]] + case _ => new FromJavaLongToDoubleFunction(underlying) + } + } + + case class AsJavaLongToDoubleFunction(sf: scala.Function1[Long, Double]) extends java.util.function.LongToDoubleFunction { + def applyAsDouble(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongToDoubleFunction(private val underlying: scala.Function1[Long, Double]) extends AnyVal { + @inline def asJava: java.util.function.LongToDoubleFunction = underlying match { + case FromJavaLongToDoubleFunction((jf @ _)) => jf.asInstanceOf[java.util.function.LongToDoubleFunction] + case _ => new AsJavaLongToDoubleFunction(underlying) + } + } + + + case class FromJavaLongToIntFunction(jf: java.util.function.LongToIntFunction) extends scala.Function1[Long, Int] { + def apply(x1: scala.Long) = jf.applyAsInt(x1) + } + + class RichLongToIntFunctionAsFunction1(private val underlying: java.util.function.LongToIntFunction) extends AnyVal { + @inline def asScala: scala.Function1[Long, Int] = underlying match { + case AsJavaLongToIntFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, Int]] + case _ => new FromJavaLongToIntFunction(underlying) + } + } + + case class AsJavaLongToIntFunction(sf: scala.Function1[Long, Int]) extends java.util.function.LongToIntFunction { + def applyAsInt(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongToIntFunction(private val underlying: scala.Function1[Long, Int]) extends AnyVal { + @inline def asJava: java.util.function.LongToIntFunction = underlying match { + case FromJavaLongToIntFunction((jf @ _)) => jf.asInstanceOf[java.util.function.LongToIntFunction] + case _ => new AsJavaLongToIntFunction(underlying) + } + } + + + case class FromJavaLongUnaryOperator(jf: java.util.function.LongUnaryOperator) extends scala.Function1[Long, Long] { + def apply(x1: scala.Long) = jf.applyAsLong(x1) + } + + class RichLongUnaryOperatorAsFunction1(private val underlying: java.util.function.LongUnaryOperator) extends AnyVal { + @inline def asScala: scala.Function1[Long, Long] = underlying match { + case AsJavaLongUnaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function1[Long, Long]] + case _ => new FromJavaLongUnaryOperator(underlying) + } + } + + case class AsJavaLongUnaryOperator(sf: scala.Function1[Long, Long]) extends java.util.function.LongUnaryOperator { + def applyAsLong(x1: scala.Long) = sf.apply(x1) + } + + class RichFunction1AsLongUnaryOperator(private val underlying: scala.Function1[Long, Long]) extends AnyVal { + @inline def asJava: java.util.function.LongUnaryOperator = underlying match { + case FromJavaLongUnaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.LongUnaryOperator] + case _ => new AsJavaLongUnaryOperator(underlying) + } + } + + + case class FromJavaObjDoubleConsumer[T](jf: java.util.function.ObjDoubleConsumer[T]) extends scala.Function2[T, Double, Unit] { + def apply(x1: T, x2: scala.Double) = jf.accept(x1, x2) + } + + class RichObjDoubleConsumerAsFunction2[T](private val underlying: java.util.function.ObjDoubleConsumer[T]) extends AnyVal { + @inline def asScala: scala.Function2[T, Double, Unit] = underlying match { + case AsJavaObjDoubleConsumer((sf @ _)) => sf.asInstanceOf[scala.Function2[T, Double, Unit]] + case _ => new FromJavaObjDoubleConsumer[T](underlying) + } + } + + case class AsJavaObjDoubleConsumer[T](sf: scala.Function2[T, Double, Unit]) extends java.util.function.ObjDoubleConsumer[T] { + def accept(x1: T, x2: scala.Double) = sf.apply(x1, x2) + } + + class RichFunction2AsObjDoubleConsumer[T](private val underlying: scala.Function2[T, Double, Unit]) extends AnyVal { + @inline def asJava: java.util.function.ObjDoubleConsumer[T] = underlying match { + case FromJavaObjDoubleConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.ObjDoubleConsumer[T]] + case _ => new AsJavaObjDoubleConsumer[T](underlying) + }; + @inline def asJavaObjDoubleConsumer: java.util.function.ObjDoubleConsumer[T] = underlying match { + case FromJavaObjDoubleConsumer((sf @ _)) => sf.asInstanceOf[java.util.function.ObjDoubleConsumer[T]] + case _ => new AsJavaObjDoubleConsumer[T](underlying) + } + } + + + case class FromJavaObjIntConsumer[T](jf: java.util.function.ObjIntConsumer[T]) extends scala.Function2[T, Int, Unit] { + def apply(x1: T, x2: scala.Int) = jf.accept(x1, x2) + } + + class RichObjIntConsumerAsFunction2[T](private val underlying: java.util.function.ObjIntConsumer[T]) extends AnyVal { + @inline def asScala: scala.Function2[T, Int, Unit] = underlying match { + case AsJavaObjIntConsumer((sf @ _)) => sf.asInstanceOf[scala.Function2[T, Int, Unit]] + case _ => new FromJavaObjIntConsumer[T](underlying) + } + } + + case class AsJavaObjIntConsumer[T](sf: scala.Function2[T, Int, Unit]) extends java.util.function.ObjIntConsumer[T] { + def accept(x1: T, x2: scala.Int) = sf.apply(x1, x2) + } + + class RichFunction2AsObjIntConsumer[T](private val underlying: scala.Function2[T, Int, Unit]) extends AnyVal { + @inline def asJava: java.util.function.ObjIntConsumer[T] = underlying match { + case FromJavaObjIntConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.ObjIntConsumer[T]] + case _ => new AsJavaObjIntConsumer[T](underlying) + }; + @inline def asJavaObjIntConsumer: java.util.function.ObjIntConsumer[T] = underlying match { + case FromJavaObjIntConsumer((sf @ _)) => sf.asInstanceOf[java.util.function.ObjIntConsumer[T]] + case _ => new AsJavaObjIntConsumer[T](underlying) + } + } + + + case class FromJavaObjLongConsumer[T](jf: java.util.function.ObjLongConsumer[T]) extends scala.Function2[T, Long, Unit] { + def apply(x1: T, x2: scala.Long) = jf.accept(x1, x2) + } + + class RichObjLongConsumerAsFunction2[T](private val underlying: java.util.function.ObjLongConsumer[T]) extends AnyVal { + @inline def asScala: scala.Function2[T, Long, Unit] = underlying match { + case AsJavaObjLongConsumer((sf @ _)) => sf.asInstanceOf[scala.Function2[T, Long, Unit]] + case _ => new FromJavaObjLongConsumer[T](underlying) + } + } + + case class AsJavaObjLongConsumer[T](sf: scala.Function2[T, Long, Unit]) extends java.util.function.ObjLongConsumer[T] { + def accept(x1: T, x2: scala.Long) = sf.apply(x1, x2) + } + + class RichFunction2AsObjLongConsumer[T](private val underlying: scala.Function2[T, Long, Unit]) extends AnyVal { + @inline def asJava: java.util.function.ObjLongConsumer[T] = underlying match { + case FromJavaObjLongConsumer((jf @ _)) => jf.asInstanceOf[java.util.function.ObjLongConsumer[T]] + case _ => new AsJavaObjLongConsumer[T](underlying) + }; + @inline def asJavaObjLongConsumer: java.util.function.ObjLongConsumer[T] = underlying match { + case FromJavaObjLongConsumer((sf @ _)) => sf.asInstanceOf[java.util.function.ObjLongConsumer[T]] + case _ => new AsJavaObjLongConsumer[T](underlying) + } + } + + + case class FromJavaPredicate[T](jf: java.util.function.Predicate[T]) extends scala.Function1[T, Boolean] { + def apply(x1: T) = jf.test(x1) + } + + class RichPredicateAsFunction1[T](private val underlying: java.util.function.Predicate[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, Boolean] = underlying match { + case AsJavaPredicate((sf @ _)) => sf.asInstanceOf[scala.Function1[T, Boolean]] + case _ => new FromJavaPredicate[T](underlying) + } + } + + case class AsJavaPredicate[T](sf: scala.Function1[T, Boolean]) extends java.util.function.Predicate[T] { + def test(x1: T) = sf.apply(x1) + } + + class RichFunction1AsPredicate[T](private val underlying: scala.Function1[T, Boolean]) extends AnyVal { + @inline def asJava: java.util.function.Predicate[T] = underlying match { + case FromJavaPredicate((jf @ _)) => jf.asInstanceOf[java.util.function.Predicate[T]] + case _ => new AsJavaPredicate[T](underlying) + }; + @inline def asJavaPredicate: java.util.function.Predicate[T] = underlying match { + case FromJavaPredicate((sf @ _)) => sf.asInstanceOf[java.util.function.Predicate[T]] + case _ => new AsJavaPredicate[T](underlying) + } + } + + + case class FromJavaSupplier[T](jf: java.util.function.Supplier[T]) extends scala.Function0[T] { + def apply() = jf.get() + } + + class RichSupplierAsFunction0[T](private val underlying: java.util.function.Supplier[T]) extends AnyVal { + @inline def asScala: scala.Function0[T] = underlying match { + case AsJavaSupplier((sf @ _)) => sf.asInstanceOf[scala.Function0[T]] + case _ => new FromJavaSupplier[T](underlying) + } + } + + case class AsJavaSupplier[T](sf: scala.Function0[T]) extends java.util.function.Supplier[T] { + def get() = sf.apply() + } + + class RichFunction0AsSupplier[T](private val underlying: scala.Function0[T]) extends AnyVal { + @inline def asJava: java.util.function.Supplier[T] = underlying match { + case FromJavaSupplier((jf @ _)) => jf.asInstanceOf[java.util.function.Supplier[T]] + case _ => new AsJavaSupplier[T](underlying) + }; + @inline def asJavaSupplier: java.util.function.Supplier[T] = underlying match { + case FromJavaSupplier((sf @ _)) => sf.asInstanceOf[java.util.function.Supplier[T]] + case _ => new AsJavaSupplier[T](underlying) + } + } + + + case class FromJavaToDoubleBiFunction[T, U](jf: java.util.function.ToDoubleBiFunction[T, U]) extends scala.Function2[T, U, Double] { + def apply(x1: T, x2: U) = jf.applyAsDouble(x1, x2) + } + + class RichToDoubleBiFunctionAsFunction2[T, U](private val underlying: java.util.function.ToDoubleBiFunction[T, U]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, Double] = underlying match { + case AsJavaToDoubleBiFunction((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, Double]] + case _ => new FromJavaToDoubleBiFunction[T, U](underlying) + } + } + + case class AsJavaToDoubleBiFunction[T, U](sf: scala.Function2[T, U, Double]) extends java.util.function.ToDoubleBiFunction[T, U] { + def applyAsDouble(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsToDoubleBiFunction[T, U](private val underlying: scala.Function2[T, U, Double]) extends AnyVal { + @inline def asJava: java.util.function.ToDoubleBiFunction[T, U] = underlying match { + case FromJavaToDoubleBiFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToDoubleBiFunction[T, U]] + case _ => new AsJavaToDoubleBiFunction[T, U](underlying) + }; + @inline def asJavaToDoubleBiFunction: java.util.function.ToDoubleBiFunction[T, U] = underlying match { + case FromJavaToDoubleBiFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToDoubleBiFunction[T, U]] + case _ => new AsJavaToDoubleBiFunction[T, U](underlying) + } + } + + + case class FromJavaToDoubleFunction[T](jf: java.util.function.ToDoubleFunction[T]) extends scala.Function1[T, Double] { + def apply(x1: T) = jf.applyAsDouble(x1) + } + + class RichToDoubleFunctionAsFunction1[T](private val underlying: java.util.function.ToDoubleFunction[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, Double] = underlying match { + case AsJavaToDoubleFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[T, Double]] + case _ => new FromJavaToDoubleFunction[T](underlying) + } + } + + case class AsJavaToDoubleFunction[T](sf: scala.Function1[T, Double]) extends java.util.function.ToDoubleFunction[T] { + def applyAsDouble(x1: T) = sf.apply(x1) + } + + class RichFunction1AsToDoubleFunction[T](private val underlying: scala.Function1[T, Double]) extends AnyVal { + @inline def asJava: java.util.function.ToDoubleFunction[T] = underlying match { + case FromJavaToDoubleFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToDoubleFunction[T]] + case _ => new AsJavaToDoubleFunction[T](underlying) + }; + @inline def asJavaToDoubleFunction: java.util.function.ToDoubleFunction[T] = underlying match { + case FromJavaToDoubleFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToDoubleFunction[T]] + case _ => new AsJavaToDoubleFunction[T](underlying) + } + } + + + case class FromJavaToIntBiFunction[T, U](jf: java.util.function.ToIntBiFunction[T, U]) extends scala.Function2[T, U, Int] { + def apply(x1: T, x2: U) = jf.applyAsInt(x1, x2) + } + + class RichToIntBiFunctionAsFunction2[T, U](private val underlying: java.util.function.ToIntBiFunction[T, U]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, Int] = underlying match { + case AsJavaToIntBiFunction((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, Int]] + case _ => new FromJavaToIntBiFunction[T, U](underlying) + } + } + + case class AsJavaToIntBiFunction[T, U](sf: scala.Function2[T, U, Int]) extends java.util.function.ToIntBiFunction[T, U] { + def applyAsInt(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsToIntBiFunction[T, U](private val underlying: scala.Function2[T, U, Int]) extends AnyVal { + @inline def asJava: java.util.function.ToIntBiFunction[T, U] = underlying match { + case FromJavaToIntBiFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToIntBiFunction[T, U]] + case _ => new AsJavaToIntBiFunction[T, U](underlying) + }; + @inline def asJavaToIntBiFunction: java.util.function.ToIntBiFunction[T, U] = underlying match { + case FromJavaToIntBiFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToIntBiFunction[T, U]] + case _ => new AsJavaToIntBiFunction[T, U](underlying) + } + } + + + case class FromJavaToIntFunction[T](jf: java.util.function.ToIntFunction[T]) extends scala.Function1[T, Int] { + def apply(x1: T) = jf.applyAsInt(x1) + } + + class RichToIntFunctionAsFunction1[T](private val underlying: java.util.function.ToIntFunction[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, Int] = underlying match { + case AsJavaToIntFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[T, Int]] + case _ => new FromJavaToIntFunction[T](underlying) + } + } + + case class AsJavaToIntFunction[T](sf: scala.Function1[T, Int]) extends java.util.function.ToIntFunction[T] { + def applyAsInt(x1: T) = sf.apply(x1) + } + + class RichFunction1AsToIntFunction[T](private val underlying: scala.Function1[T, Int]) extends AnyVal { + @inline def asJava: java.util.function.ToIntFunction[T] = underlying match { + case FromJavaToIntFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToIntFunction[T]] + case _ => new AsJavaToIntFunction[T](underlying) + }; + @inline def asJavaToIntFunction: java.util.function.ToIntFunction[T] = underlying match { + case FromJavaToIntFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToIntFunction[T]] + case _ => new AsJavaToIntFunction[T](underlying) + } + } + + + case class FromJavaToLongBiFunction[T, U](jf: java.util.function.ToLongBiFunction[T, U]) extends scala.Function2[T, U, Long] { + def apply(x1: T, x2: U) = jf.applyAsLong(x1, x2) + } + + class RichToLongBiFunctionAsFunction2[T, U](private val underlying: java.util.function.ToLongBiFunction[T, U]) extends AnyVal { + @inline def asScala: scala.Function2[T, U, Long] = underlying match { + case AsJavaToLongBiFunction((sf @ _)) => sf.asInstanceOf[scala.Function2[T, U, Long]] + case _ => new FromJavaToLongBiFunction[T, U](underlying) + } + } + + case class AsJavaToLongBiFunction[T, U](sf: scala.Function2[T, U, Long]) extends java.util.function.ToLongBiFunction[T, U] { + def applyAsLong(x1: T, x2: U) = sf.apply(x1, x2) + } + + class RichFunction2AsToLongBiFunction[T, U](private val underlying: scala.Function2[T, U, Long]) extends AnyVal { + @inline def asJava: java.util.function.ToLongBiFunction[T, U] = underlying match { + case FromJavaToLongBiFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToLongBiFunction[T, U]] + case _ => new AsJavaToLongBiFunction[T, U](underlying) + }; + @inline def asJavaToLongBiFunction: java.util.function.ToLongBiFunction[T, U] = underlying match { + case FromJavaToLongBiFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToLongBiFunction[T, U]] + case _ => new AsJavaToLongBiFunction[T, U](underlying) + } + } + + + case class FromJavaToLongFunction[T](jf: java.util.function.ToLongFunction[T]) extends scala.Function1[T, Long] { + def apply(x1: T) = jf.applyAsLong(x1) + } + + class RichToLongFunctionAsFunction1[T](private val underlying: java.util.function.ToLongFunction[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, Long] = underlying match { + case AsJavaToLongFunction((sf @ _)) => sf.asInstanceOf[scala.Function1[T, Long]] + case _ => new FromJavaToLongFunction[T](underlying) + } + } + + case class AsJavaToLongFunction[T](sf: scala.Function1[T, Long]) extends java.util.function.ToLongFunction[T] { + def applyAsLong(x1: T) = sf.apply(x1) + } + + class RichFunction1AsToLongFunction[T](private val underlying: scala.Function1[T, Long]) extends AnyVal { + @inline def asJava: java.util.function.ToLongFunction[T] = underlying match { + case FromJavaToLongFunction((jf @ _)) => jf.asInstanceOf[java.util.function.ToLongFunction[T]] + case _ => new AsJavaToLongFunction[T](underlying) + }; + @inline def asJavaToLongFunction: java.util.function.ToLongFunction[T] = underlying match { + case FromJavaToLongFunction((sf @ _)) => sf.asInstanceOf[java.util.function.ToLongFunction[T]] + case _ => new AsJavaToLongFunction[T](underlying) + } + } + + + case class FromJavaUnaryOperator[T](jf: java.util.function.UnaryOperator[T]) extends scala.Function1[T, T] { + def apply(x1: T) = jf.apply(x1) + } + + class RichUnaryOperatorAsFunction1[T](private val underlying: java.util.function.UnaryOperator[T]) extends AnyVal { + @inline def asScala: scala.Function1[T, T] = underlying match { + case AsJavaUnaryOperator((sf @ _)) => sf.asInstanceOf[scala.Function1[T, T]] + case _ => new FromJavaUnaryOperator[T](underlying) + } + } + + case class AsJavaUnaryOperator[T](sf: scala.Function1[T, T]) extends java.util.function.UnaryOperator[T] { + def apply(x1: T) = sf.apply(x1) + } + + class RichFunction1AsUnaryOperator[T](private val underlying: scala.Function1[T, T]) extends AnyVal { + @inline def asJava: java.util.function.UnaryOperator[T] = underlying match { + case FromJavaUnaryOperator((jf @ _)) => jf.asInstanceOf[java.util.function.UnaryOperator[T]] + case _ => new AsJavaUnaryOperator[T](underlying) + }; + @inline def asJavaUnaryOperator: java.util.function.UnaryOperator[T] = underlying match { + case FromJavaUnaryOperator((sf @ _)) => sf.asInstanceOf[java.util.function.UnaryOperator[T]] + case _ => new AsJavaUnaryOperator[T](underlying) + } + } +} diff --git a/src/library/scala/jdk/FutureConverters.scala b/src/library/scala/jdk/FutureConverters.scala new file mode 100644 index 000000000000..9b9b6ad3c8b6 --- /dev/null +++ b/src/library/scala/jdk/FutureConverters.scala @@ -0,0 +1,43 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.util.concurrent.CompletionStage + +import scala.concurrent.Future + +/** This object provides extension methods that convert between Scala [[scala.concurrent.Future]] and Java + * [[java.util.concurrent.CompletionStage]] + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.FutureConverters]] instead. + * + * Note that the bridge is implemented at the read-only side of asynchronous handles, namely + * [[scala.concurrent.Future]] (instead of [[scala.concurrent.Promise]]) and [[java.util.concurrent.CompletionStage]] (instead of + * [[java.util.concurrent.CompletableFuture]]). This is intentional, as the semantics of bridging + * the write-handles would be prone to race conditions; if both ends (`CompletableFuture` and + * `Promise`) are completed independently at the same time, they may contain different values + * afterwards. For this reason, `toCompletableFuture` is not supported on the created + * `CompletionStage`s. + */ +object FutureConverters { + implicit class FutureOps[T](private val f: Future[T]) extends AnyVal { + /** Convert a Scala Future to a Java CompletionStage, see [[javaapi.FutureConverters.asJava]]. */ + def asJava: CompletionStage[T] = javaapi.FutureConverters.asJava(f) + } + + implicit class CompletionStageOps[T](private val cs: CompletionStage[T]) extends AnyVal { + /** Convert a Java CompletionStage to a Scala Future, see [[javaapi.FutureConverters.asScala]]. */ + def asScala: Future[T] = javaapi.FutureConverters.asScala(cs) + } +} diff --git a/src/library/scala/jdk/IntAccumulator.scala b/src/library/scala/jdk/IntAccumulator.scala new file mode 100644 index 000000000000..9b7a904b36e3 --- /dev/null +++ b/src/library/scala/jdk/IntAccumulator.scala @@ -0,0 +1,493 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.util.Spliterator +import java.util.function.{Consumer, IntConsumer} +import java.{lang => jl} + +import scala.annotation._ +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AnyStepper, Factory, IntStepper, SeqFactory, Stepper, StepperShape, mutable} +import scala.language.implicitConversions + +/** A specialized Accumulator that holds `Int`s without boxing, see [[Accumulator]]. */ +final class IntAccumulator + extends Accumulator[Int, AnyAccumulator, IntAccumulator] + with mutable.SeqOps[Int, AnyAccumulator, IntAccumulator] + with Serializable { + private[jdk] var current: Array[Int] = IntAccumulator.emptyIntArray + private[jdk] var history: Array[Array[Int]] = IntAccumulator.emptyIntArrayArray + + private[jdk] def cumulative(i: Int) = { val x = history(i); x(x.length-2).toLong << 32 | (x(x.length-1)&0xFFFFFFFFL) } + + override protected[this] def className: String = "IntAccumulator" + + def efficientStepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new IntAccumulatorStepper(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private def expand(): Unit = { + if (index > 0) { + val cuml = (if (hIndex > 0) cumulative(hIndex-1) else 0) + index + current(current.length-2) = (cuml >>> 32).toInt + current(current.length-1) = (cuml & 0xFFFFFFFFL).toInt + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Int](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Int]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `IntAccumulator`. */ + def addOne(a: Int): this.type = { + totalSize += 1 + if (index+2 >= current.length) expand() + current(index) = a + index += 1 + this + } + + /** Result collection consisting of all elements appended so far. */ + override def result(): IntAccumulator = this + + /** Removes all elements from `that` and appends them to this `IntAccumulator`. */ + def drain(that: IntAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 2 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 2 >= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - jl.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = if (hIndex > 0) cumulative(hIndex-1) else 0L + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 2) + ans(ans.length - 2) = current(current.length - 2) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 2) = (pv >>> 32).toInt + x(x.length - 1) = (pv & 0xFFFFFFFFL).toInt + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 2) = (pv >>> 32).toInt + x(x.length - 1) = (pv & 0xFFFFFFFFL).toInt + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear() + } + + override def clear(): Unit = { + super.clear() + current = IntAccumulator.emptyIntArray + history = IntAccumulator.emptyIntArrayArray + } + + /** Retrieves the `ix`th element. */ + def apply(ix: Long): Int = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + def apply(i: Int): Int = apply(i.toLong) + + def update(idx: Long, elem: Int): Unit = { + if (totalSize - idx <= index || hIndex == 0) current((idx - (totalSize - index)).toInt) = elem + else { + val w = seekSlot(idx) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) = elem + } + } + + def update(idx: Int, elem: Int): Unit = update(idx.toLong, elem) + + /** Returns an `Iterator` over the contents of this `IntAccumulator`. The `Iterator` is not specialized. */ + def iterator: Iterator[Int] = stepper.iterator + + override def foreach[U](f: Int => U): Unit = { + val s = stepper + while (s.hasStep) f(s.nextStep()) + } + + def map(f: Int => Int): IntAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addOne(f(s.nextStep())) + b.result() + } + + def flatMap(f: Int => IterableOnce[Int]): IntAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addAll(f(s.nextStep())) + b.result() + } + + def collect(pf: PartialFunction[Int, Int]): IntAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + pf.runWith(b.addOne)(n) + } + b.result() + } + + private def filterAccImpl(pred: Int => Boolean, not: Boolean): IntAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + if (pred(n) != not) b.addOne(n) + } + b.result() + } + + override def filter(pred: Int => Boolean): IntAccumulator = filterAccImpl(pred, not = false) + + override def filterNot(pred: Int => Boolean): IntAccumulator = filterAccImpl(pred, not = true) + + override def forall(p: Int => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (!p(s.nextStep())) return false + true + } + + override def exists(p: Int => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) return true + false + } + + override def count(p: Int => Boolean): Int = { + var r = 0 + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + def countLong(p: Int => Boolean): Long = { + var r = 0L + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + /** Copies the elements in this `IntAccumulator` into an `Array[Int]` */ + @nowarn // cat=lint-overload see toArray[B: ClassTag] + def toArray: Array[Int] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Int](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = cumulative(h) + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `IntAccumulator` to a `List` */ + override def toList: List[Int] = { + var ans: List[Int] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** + * Copy the elements in this `IntAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to(Vector)` + */ + override def to[C1](factory: Factory[Int, C1]): C1 = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + factory.fromSpecific(iterator) + } + + override protected def fromSpecific(coll: IterableOnce[Int]): IntAccumulator = IntAccumulator.fromSpecific(coll) + override protected def newSpecificBuilder: IntAccumulator = IntAccumulator.newBuilder + override def iterableFactory: SeqFactory[AnyAccumulator] = AnyAccumulator + + override def empty: IntAccumulator = IntAccumulator.empty + + private def writeReplace(): AnyRef = new IntAccumulator.SerializationProxy(this) +} + +object IntAccumulator extends collection.SpecificIterableFactory[Int, IntAccumulator] { + private val emptyIntArray = new Array[Int](0) + private val emptyIntArrayArray = new Array[Array[Int]](0) + + implicit def toJavaIntegerAccumulator(ia: IntAccumulator.type): collection.SpecificIterableFactory[jl.Integer, IntAccumulator] = IntAccumulator.asInstanceOf[collection.SpecificIterableFactory[jl.Integer, IntAccumulator]] + + import java.util.{function => jf} + + /** A `Supplier` of `IntAccumulator`s, suitable for use with `java.util.stream.IntStream`'s `collect` method. Suitable for `Stream[Int]` also. */ + def supplier: jf.Supplier[IntAccumulator] = () => new IntAccumulator + + /** A `BiConsumer` that adds an element to an `IntAccumulator`, suitable for use with `java.util.stream.IntStream`'s `collect` method. */ + def adder: jf.ObjIntConsumer[IntAccumulator] = (ac: IntAccumulator, a: Int) => ac addOne a + + /** A `BiConsumer` that adds a boxed `Int` to an `IntAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder: jf.BiConsumer[IntAccumulator, Int] = (ac: IntAccumulator, a: Int) => ac addOne a + + /** A `BiConsumer` that merges `IntAccumulator`s, suitable for use with `java.util.stream.IntStream`'s `collect` method. Suitable for `Stream[Int]` also. */ + def merger: jf.BiConsumer[IntAccumulator, IntAccumulator] = (a1: IntAccumulator, a2: IntAccumulator) => a1 drain a2 + + private def fromArray(a: Array[Int]): IntAccumulator = { + val r = new IntAccumulator + var i = 0 + while (i < a.length) { r addOne a(i); i += 1 } + r + } + + override def fromSpecific(it: IterableOnce[Int]): IntAccumulator = it match { + case acc: IntAccumulator => acc + case as: collection.immutable.ArraySeq.ofInt => fromArray(as.unsafeArray) + case as: collection.mutable.ArraySeq.ofInt => fromArray(as.array) // this case ensures Array(1).to(Accumulator) doesn't box + case _ => (new IntAccumulator).addAll(it) + } + + override def empty: IntAccumulator = new IntAccumulator + + override def newBuilder: IntAccumulator = new IntAccumulator + + class SerializationProxy[A](@transient private val acc: IntAccumulator) extends Serializable { + @transient private var result: IntAccumulator = _ + + private def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val size = acc.sizeLong + out.writeLong(size) + val st = acc.stepper + while (st.hasStep) + out.writeInt(st.nextStep()) + } + + private def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val res = new IntAccumulator() + var elems = in.readLong() + while (elems > 0) { + res += in.readInt() + elems -= 1L + } + result = res + } + + private def readResolve(): AnyRef = result + } +} + +private[jdk] class IntAccumulatorStepper(private val acc: IntAccumulator) extends IntStepper with EfficientSplit { + import java.util.Spliterator._ + + private var h: Int = 0 + private var i: Int = 0 + private var a: Array[Int] = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n: Long = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N: Long = acc.totalSize + + private def duplicateSelf(limit: Long): IntAccumulatorStepper = { + val ans = new IntAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics: Int = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize: Long = N + + def hasStep: Boolean = N > 0 + + def nextStep(): Int = + if (N <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + def trySplit(): IntStepper = + if (N <= 1) null + else { + val half = N >> 1 + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } + + override def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) { + // Overridden for efficiency + override def tryAdvance(c: IntConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + } + + // Overridden for efficiency + override def forEachRemaining(c: IntConsumer): Unit = + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + + // Overridden for efficiency + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + } +} diff --git a/src/library/scala/jdk/LongAccumulator.scala b/src/library/scala/jdk/LongAccumulator.scala new file mode 100644 index 000000000000..38b868ae1111 --- /dev/null +++ b/src/library/scala/jdk/LongAccumulator.scala @@ -0,0 +1,488 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.util.Spliterator +import java.util.function.{Consumer, LongConsumer} +import java.{lang => jl} + +import scala.annotation._ +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AnyStepper, Factory, LongStepper, SeqFactory, Stepper, StepperShape, mutable} +import scala.language.implicitConversions + +/** A specialized Accumulator that holds `Long`s without boxing, see [[Accumulator]]. */ +final class LongAccumulator + extends Accumulator[Long, AnyAccumulator, LongAccumulator] + with mutable.SeqOps[Long, AnyAccumulator, LongAccumulator] + with Serializable { + private[jdk] var current: Array[Long] = LongAccumulator.emptyLongArray + private[jdk] var history: Array[Array[Long]] = LongAccumulator.emptyLongArrayArray + + private[jdk] def cumulative(i: Int) = { val x = history(i); x(x.length-1) } + + override protected[this] def className: String = "LongAccumulator" + + def efficientStepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = { + val st = new LongAccumulatorStepper(this) + val r = + if (shape.shape == StepperShape.LongShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParLongStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private def expand(): Unit = { + if (index > 0) { + current(current.length-1) = (if (hIndex > 0) { val x = history(hIndex-1); x(x.length-1) } else 0) + index + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Long](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Long]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `LongAccumulator`. */ + def addOne(a: Long): this.type = { + totalSize += 1 + if (index+1 >= current.length) expand() + current(index) = a + index += 1 + this + } + + /** Result collection consisting of all elements appended so far. */ + override def result(): LongAccumulator = this + + /** Removes all elements from `that` and appends them to this `LongAccumulator`. */ + def drain(that: LongAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 1 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 1>= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - jl.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = if (hIndex > 0) cumulative(hIndex-1) else 0L + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 1) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 1) = pv + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 1) = pv + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear() + } + + override def clear(): Unit = { + super.clear() + current = LongAccumulator.emptyLongArray + history = LongAccumulator.emptyLongArrayArray + } + + /** Retrieves the `ix`th element. */ + def apply(ix: Long): Long = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + def apply(i: Int): Long = apply(i.toLong) + + def update(idx: Long, elem: Long): Unit = { + if (totalSize - idx <= index || hIndex == 0) current((idx - (totalSize - index)).toInt) = elem + else { + val w = seekSlot(idx) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) = elem + } + } + + def update(idx: Int, elem: Long): Unit = update(idx.toLong, elem) + + /** Returns an `Iterator` over the contents of this `LongAccumulator`. The `Iterator` is not specialized. */ + def iterator: Iterator[Long] = stepper.iterator + + override def foreach[U](f: Long => U): Unit = { + val s = stepper + while (s.hasStep) f(s.nextStep()) + } + + def map(f: Long => Long): LongAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addOne(f(s.nextStep())) + b.result() + } + + def flatMap(f: Long => IterableOnce[Long]): LongAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) + b.addAll(f(s.nextStep())) + b.result() + } + + def collect(pf: PartialFunction[Long, Long]): LongAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + pf.runWith(b.addOne)(n) + } + b.result() + } + + private def filterAccImpl(pred: Long => Boolean, not: Boolean): LongAccumulator = { + val b = newSpecificBuilder + val s = stepper + while (s.hasStep) { + val n = s.nextStep() + if (pred(n) != not) b.addOne(n) + } + b.result() + } + + override def filter(pred: Long => Boolean): LongAccumulator = filterAccImpl(pred, not = false) + + override def filterNot(pred: Long => Boolean): LongAccumulator = filterAccImpl(pred, not = true) + + override def forall(p: Long => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (!p(s.nextStep())) return false + true + } + + override def exists(p: Long => Boolean): Boolean = { + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) return true + false + } + + override def count(p: Long => Boolean): Int = { + var r = 0 + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + def countLong(p: Long => Boolean): Long = { + var r = 0L + val s = stepper + while (s.hasStep) + if (p(s.nextStep())) r += 1 + r + } + + /** Copies the elements in this `LongAccumulator` into an `Array[Long]` */ + @nowarn // cat=lint-overload see toArray[B: ClassTag] + def toArray: Array[Long] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Long](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = x(x.length-1) + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `LongAccumulator` to a `List` */ + override def toList: List[Long] = { + var ans: List[Long] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** + * Copy the elements in this `LongAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to(Vector)` + */ + override def to[C1](factory: Factory[Long, C1]): C1 = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + factory.fromSpecific(iterator) + } + + override protected def fromSpecific(coll: IterableOnce[Long]): LongAccumulator = LongAccumulator.fromSpecific(coll) + override protected def newSpecificBuilder: LongAccumulator = LongAccumulator.newBuilder + override def iterableFactory: SeqFactory[AnyAccumulator] = AnyAccumulator + + override def empty: LongAccumulator = LongAccumulator.empty + + private def writeReplace(): AnyRef = new LongAccumulator.SerializationProxy(this) +} + +object LongAccumulator extends collection.SpecificIterableFactory[Long, LongAccumulator] { + private val emptyLongArray = new Array[Long](0) + private val emptyLongArrayArray = new Array[Array[Long]](0) + + implicit def toJavaLongAccumulator(ia: LongAccumulator.type): collection.SpecificIterableFactory[jl.Long, LongAccumulator] = LongAccumulator.asInstanceOf[collection.SpecificIterableFactory[jl.Long, LongAccumulator]] + + import java.util.{function => jf} + + /** A `Supplier` of `LongAccumulator`s, suitable for use with `java.util.stream.LongStream`'s `collect` method. Suitable for `Stream[Long]` also. */ + def supplier: jf.Supplier[LongAccumulator] = () => new LongAccumulator + + /** A `BiConsumer` that adds an element to an `LongAccumulator`, suitable for use with `java.util.stream.LongStream`'s `collect` method. */ + def adder: jf.ObjLongConsumer[LongAccumulator] = (ac: LongAccumulator, a: Long) => ac addOne a + + /** A `BiConsumer` that adds a boxed `Long` to an `LongAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder: jf.BiConsumer[LongAccumulator, Long] = (ac: LongAccumulator, a: Long) => ac addOne a + + /** A `BiConsumer` that merges `LongAccumulator`s, suitable for use with `java.util.stream.LongStream`'s `collect` method. Suitable for `Stream[Long]` also. */ + def merger: jf.BiConsumer[LongAccumulator, LongAccumulator] = (a1: LongAccumulator, a2: LongAccumulator) => a1 drain a2 + + private def fromArray(a: Array[Long]): LongAccumulator = { + val r = new LongAccumulator + var i = 0 + while (i < a.length) { r addOne a(i); i += 1 } + r + } + + override def fromSpecific(it: IterableOnce[Long]): LongAccumulator = it match { + case acc: LongAccumulator => acc + case as: collection.immutable.ArraySeq.ofLong => fromArray(as.unsafeArray) + case as: collection.mutable.ArraySeq.ofLong => fromArray(as.array) // this case ensures Array(1).to(Accumulator) doesn't box + case _ => (new LongAccumulator).addAll(it) + } + + override def empty: LongAccumulator = new LongAccumulator + + override def newBuilder: LongAccumulator = new LongAccumulator + + class SerializationProxy[A](@transient private val acc: LongAccumulator) extends Serializable { + @transient private var result: LongAccumulator = _ + + private def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val size = acc.sizeLong + out.writeLong(size) + val st = acc.stepper + while (st.hasStep) + out.writeLong(st.nextStep()) + } + + private def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val res = new LongAccumulator() + var elems = in.readLong() + while (elems > 0) { + res += in.readLong() + elems -= 1L + } + result = res + } + + private def readResolve(): AnyRef = result + } +} + +private[jdk] class LongAccumulatorStepper(private val acc: LongAccumulator) extends LongStepper with EfficientSplit { + import java.util.Spliterator._ + + private var h: Int = 0 + private var i: Int = 0 + private var a: Array[Long] = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n: Long = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N: Long = acc.totalSize + + private def duplicateSelf(limit: Long): LongAccumulatorStepper = { + val ans = new LongAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics: Int = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize: Long = N + + def hasStep: Boolean = N > 0 + + def nextStep(): Long = + if (n <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + def trySplit(): LongStepper = + if (N <= 1) null + else { + val half = N >> 1 + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } + + override def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) { + // Overridden for efficiency + override def tryAdvance(c: LongConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => + if (N <= 0) false + else { + if (i >= n) loadMore() + c.accept(a(i)) + i += 1 + N -= 1 + true + } + } + + // Overridden for efficiency + override def forEachRemaining(c: LongConsumer): Unit = + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + + // Overridden for efficiency + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + c.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + } +} diff --git a/src/library/scala/jdk/OptionConverters.scala b/src/library/scala/jdk/OptionConverters.scala new file mode 100644 index 000000000000..5fbfef206394 --- /dev/null +++ b/src/library/scala/jdk/OptionConverters.scala @@ -0,0 +1,111 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong} + +/** This object provides extension methods that convert between Scala `Option` and Java `Optional` + * types. + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.OptionConverters]] instead. + * + * Scala `Option` is extended with a `toJava` method that creates a corresponding `Optional`, and + * a `toJavaPrimitive` method that creates a specialized variant (e.g., `OptionalInt`) if + * applicable. + * + * Java `Optional` is extended with a `toScala` method and a `toJavaPrimitive` method. + * + * Finally, specialized `Optional` types are extended with `toScala` and `toJavaGeneric` methods. + * + * Example usage: + * + * {{{ + * import scala.jdk.OptionConverters._ + * val a = Option("example").toJava // Creates java.util.Optional[String] containing "example" + * val b = (None: Option[String]).toJava // Creates an empty java.util.Optional[String] + * val c = a.toScala // Back to Option("example") + * val d = b.toScala // Back to None typed as Option[String] + * val e = Option(2.7).toJava // java.util.Optional[Double] containing boxed 2.7 + * val f = Option(2.7).toJavaPrimitive // java.util.OptionalDouble containing 2.7 (not boxed) + * val g = f.toScala // Back to Option(2.7) + * val h = f.toJavaGeneric // Same as e + * val i = e.toJavaPrimitive // Same as f + * }}} + */ +object OptionConverters { + /** Provides conversions from Java `Optional` to Scala `Option` and specialized `Optional` types */ + implicit class RichOptional[A](private val o: java.util.Optional[A]) extends AnyVal { + /** Convert a Java `Optional` to a Scala `Option` */ + def toScala: Option[A] = if (o.isPresent) Some(o.get) else None + + /** Convert a Java `Optional` to a Scala `Option` */ + @deprecated("Use `toScala` instead", "2.13.0") + def asScala: Option[A] = if (o.isPresent) Some(o.get) else None + + /** Convert a generic Java `Optional` to a specialized variant */ + def toJavaPrimitive[O](implicit shape: OptionShape[A, O]): O = shape.fromJava(o) + } + + /** Provides conversions from Scala `Option` to Java `Optional` types */ + implicit class RichOption[A](private val o: Option[A]) extends AnyVal { + /** Convert a Scala `Option` to a generic Java `Optional` */ + def toJava: Optional[A] = o match { case Some(a) => Optional.ofNullable(a); case _ => Optional.empty[A] } + + /** Convert a Scala `Option` to a generic Java `Optional` */ + @deprecated("Use `toJava` instead", "2.13.0") + def asJava: Optional[A] = o match { case Some(a) => Optional.ofNullable(a); case _ => Optional.empty[A] } + + /** Convert a Scala `Option` to a specialized Java `Optional` */ + def toJavaPrimitive[O](implicit shape: OptionShape[A, O]): O = shape.fromScala(o) + } + + /** Provides conversions from `OptionalDouble` to Scala `Option` and the generic `Optional` */ + implicit class RichOptionalDouble(private val o: OptionalDouble) extends AnyVal { + /** Convert a Java `OptionalDouble` to a Scala `Option` */ + def toScala: Option[Double] = if (o.isPresent) Some(o.getAsDouble) else None + + /** Convert a Java `OptionalDouble` to a Scala `Option` */ + @deprecated("Use `toScala` instead", "2.13.0") + def asScala: Option[Double] = if (o.isPresent) Some(o.getAsDouble) else None + + /** Convert a Java `OptionalDouble` to a generic Java `Optional` */ + def toJavaGeneric: Optional[Double] = if (o.isPresent) Optional.of(o.getAsDouble) else Optional.empty[Double] + } + + /** Provides conversions from `OptionalInt` to Scala `Option` and the generic `Optional` */ + implicit class RichOptionalInt(private val o: OptionalInt) extends AnyVal { + /** Convert a Java `OptionalInt` to a Scala `Option` */ + def toScala: Option[Int] = if (o.isPresent) Some(o.getAsInt) else None + + /** Convert a Java `OptionalInt` to a Scala `Option` */ + @deprecated("Use `toScala` instead", "2.13.0") + def asScala: Option[Int] = if (o.isPresent) Some(o.getAsInt) else None + + /** Convert a Java `OptionalInt` to a generic Java `Optional` */ + def toJavaGeneric: Optional[Int] = if (o.isPresent) Optional.of(o.getAsInt) else Optional.empty[Int] + } + + /** Provides conversions from `OptionalLong` to Scala `Option` and the generic `Optional` */ + implicit class RichOptionalLong(private val o: OptionalLong) extends AnyVal { + /** Convert a Java `OptionalLong` to a Scala `Option` */ + def toScala: Option[Long] = if (o.isPresent) Some(o.getAsLong) else None + + /** Convert a Java `OptionalLong` to a Scala `Option` */ + @deprecated("Use `toScala` instead", "2.13.0") + def asScala: Option[Long] = if (o.isPresent) Some(o.getAsLong) else None + + /** Convert a Java `OptionalLong` to a generic Java `Optional` */ + def toJavaGeneric: Optional[Long] = if (o.isPresent) Optional.of(o.getAsLong) else Optional.empty[Long] + } +} diff --git a/src/library/scala/jdk/OptionShape.scala b/src/library/scala/jdk/OptionShape.scala new file mode 100644 index 000000000000..e56b3296e439 --- /dev/null +++ b/src/library/scala/jdk/OptionShape.scala @@ -0,0 +1,67 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong} +import java.{lang => jl} + +import scala.annotation.implicitNotFound + +/** A type class implementing conversions from a generic Scala `Option` or Java `Optional` to + * a specialized Java variant (for `Double`, `Int` and `Long`). + * + * @tparam A the primitive type wrapped in an option + * @tparam O the specialized Java `Optional` wrapping an element of type `A` + */ +@implicitNotFound("No specialized Optional type exists for elements of type ${A}") +sealed abstract class OptionShape[A, O] { + /** Converts from `Optional` to the specialized variant `O` */ + def fromJava(o: Optional[A]): O + /** Converts from `Option` to the specialized variant `O` */ + def fromScala(o: Option[A]): O +} + +object OptionShape { + implicit val doubleOptionShape: OptionShape[Double, OptionalDouble] = new OptionShape[Double, OptionalDouble] { + def fromJava(o: Optional[Double]): OptionalDouble = + if (o.isPresent) OptionalDouble.of(o.get) else OptionalDouble.empty + + def fromScala(o: Option[Double]): OptionalDouble = o match { + case Some(d) => OptionalDouble.of(d) + case _ => OptionalDouble.empty + } + } + implicit val jDoubleOptionShape: OptionShape[jl.Double, OptionalDouble] = doubleOptionShape.asInstanceOf[OptionShape[jl.Double, OptionalDouble]] + + implicit val intOptionShape: OptionShape[Int, OptionalInt] = new OptionShape[Int, OptionalInt] { + def fromJava(o: Optional[Int]): OptionalInt = + if (o.isPresent) OptionalInt.of(o.get) else OptionalInt.empty + + def fromScala(o: Option[Int]): OptionalInt = o match { + case Some(d) => OptionalInt.of(d) + case _ => OptionalInt.empty + } + } + implicit val jIntegerOptionShape: OptionShape[jl.Integer, OptionalInt] = intOptionShape.asInstanceOf[OptionShape[jl.Integer, OptionalInt]] + + implicit val longOptionShape: OptionShape[Long, OptionalLong] = new OptionShape[Long, OptionalLong] { + def fromJava(o: Optional[Long]): OptionalLong = + if (o.isPresent) OptionalLong.of(o.get) else OptionalLong.empty + + def fromScala(o: Option[Long]): OptionalLong = o match { + case Some(d) => OptionalLong.of(d) + case _ => OptionalLong.empty + } + } + implicit val jLongOptionShape: OptionShape[jl.Long, OptionalLong] = longOptionShape.asInstanceOf[OptionShape[jl.Long, OptionalLong]] +} diff --git a/src/library/scala/jdk/StreamConverters.scala b/src/library/scala/jdk/StreamConverters.scala new file mode 100644 index 000000000000..e3338bdba011 --- /dev/null +++ b/src/library/scala/jdk/StreamConverters.scala @@ -0,0 +1,92 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk + +import scala.collection.convert.StreamExtensions + +/** This object provides extension methods to create [[java.util.stream.Stream Java Streams]] that + * operate on Scala collections (sequentially or in parallel). For more information on Java + * streams, consult the documentation + * ([[https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html]]). + * + * When writing Java code, use the explicit conversion methods defined in + * [[javaapi.StreamConverters]] instead. + * + * The methods `asJavaSeqStream` and `asJavaParStream` convert a collection to a Java Stream: + * + * {{{ + * scala> import scala.jdk.StreamConverters._ + * + * scala> val s = (1 to 10).toList.asJavaSeqStream + * s: java.util.stream.IntStream = java.util.stream.IntPipeline\$Head@7b1e5e55 + * + * scala> s.map(_ * 2).filter(_ > 5).toScala(List) + * res1: List[Int] = List(6, 8, 10, 12, 14, 16, 18, 20) + * }}} + * + * Note: using parallel streams in the Scala REPL causes deadlocks, see + * [[https://github.com/scala/bug/issues/9076]]. As a workaround, use `scala -Yrepl-class-based`. + * + * {{{ + * scala> def isPrime(n: Int): Boolean = !(2 +: (3 to Math.sqrt(n).toInt by 2) exists (n % _ == 0)) + * isPrime: (n: Int)Boolean + * + * scala> (10000 to 1000000).asJavaParStream.filter(isPrime).toScala(Vector) + * res6: scala.collection.immutable.Vector[Int] = Vector(10007, 10009, 10037, 10039, ... + * }}} + * + * A Java [[Stream]] provides operations on a sequence of elements. Streams are created from + * [[java.util.Spliterator Spliterators]], which are similar to Iterators with the additional + * capability to partition off some of their elements. This partitioning, if supported by the + * Spliterator, is used for parallelizing Stream operations. + * + * Scala collections have a method [[scala.collection.IterableOnce.stepper `stepper`]] that + * returns a [[scala.collection.Stepper]] for the collection, which in turn can be converted to a + * Spliterator for creating a Java Stream. + * + * The `asJavaSeqStream ` extension method is available on any Scala collection. The + * `asJavaParStream` extension method can only be invoked on collections where the return type of + * the [[scala.collection.IterableOnce.stepper `stepper`]] method is marked with the + * [[scala.collection.Stepper.EfficientSplit]] marker trait. This trait is added to steppers that + * support partitioning, and therefore efficient parallel processing. + * + * The following extension methods are available: + * + * | Collection Type | Extension Methods | + * | --- | --- | + * | `IterableOnce` | `asJavaSeqStream` | + * | `IndexedSeq`, Arrays, `BitSet`, `Accumulator`, `HashMap`, `HashSet`, `Range`, `TreeMap`, `TreeSet`, `Vector`, Strings | `asJavaParStream` | + * | `Map` | `asJavaSeqKeyStream`, `asJavaSeqValueStream` | + * | `HashMap`, `TreeMap` | `asJavaParKeyStream`, `asJavaParValueStream` | + * | `Stepper` | `asJavaSeqStream` | + * | `Stepper with EfficientSplit` | `asJavaParStream` | + * | Strings | `asJavaSeqStream`, `asJavaParStream`, `asJavaSeqCharStream`, `asJavaParCharStream`, `asJavaSeqCodePointStream`, `asJavaParCodePointStream` | + * | Java streams | `toScala`, `asJavaPrimitiveStream` | + * + * The `asJavaPrimitiveStream` method converts a `Stream[Int]` to an `IntStream`. It is the dual + * of the `boxed` method defined on primitive streams (e.g., `IntStream.boxed` is a + * `Stream[Integer]`). + * + * The `toScala` extension methods on Java streams collects the result of a stream pipeline into a + * Scala collection, for example `stream.toScala(List)`, `stream.toScala(Vector)`. Note that + * transformation operations on streams are lazy (also called "intermediate"), terminal operations + * such as `forEach`, `count` or `toScala` trigger the evaluation. + * + * Collecting a parallel stream to a collection can be performed in parallel. This is beneficial if + * the target collection supports efficient merging of the segments that are built in parallel. + * To support this use case, the Scala standard library provides the [[Accumulator]] collection. + * This collection supports efficient parallel construction, and it has specialized subtypes for + * `Int`, `Long` and `Double` so that primitive Java streams can be collected to a Scala collection + * without boxing the elements. + */ +object StreamConverters extends StreamExtensions diff --git a/src/library/scala/jdk/javaapi/CollectionConverters.scala b/src/library/scala/jdk/javaapi/CollectionConverters.scala new file mode 100644 index 000000000000..8bf1bb9e2a41 --- /dev/null +++ b/src/library/scala/jdk/javaapi/CollectionConverters.scala @@ -0,0 +1,77 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk.javaapi + +import scala.collection.convert.{AsJavaConverters, AsScalaConverters} + +/** This object contains methods that convert between Scala and Java collections. + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in + * [[scala.jdk.CollectionConverters]]. + * + * Note: to create [[java.util.stream.Stream Java Streams]] that operate on Scala collections + * (sequentially or in parallel), use [[StreamConverters]]. + * + * {{{ + * // Java Code + * import scala.jdk.javaapi.CollectionConverters; + * public class A { + * public void t(scala.collection.immutable.List l) { + * java.util.List jl = CollectionConverters.asJava(l); + * } + * } + * }}} + * + * The conversions return adapters for the corresponding API, i.e., the collections are wrapped, + * not copied. Changes to the original collection are reflected in the view, and vice versa. + * + * The following conversions are supported via `asScala` and `asJava`: + * + * {{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + * }}} + * + * The following conversions are supported via `asScala` and through + * specially-named methods to convert to Java collections, as shown: + * + * {{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + * }}} + * + * In addition, the following one-way conversions are provided via `asJava`: + * + * {{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + * }}} + * + * The following one way conversion is provided via `asScala`: + * + * {{{ + * java.util.Properties => scala.collection.mutable.Map + * }}} + * + * In all cases, converting from a source type to a target type and back + * again will return the original source object. + */ +object CollectionConverters extends AsJavaConverters with AsScalaConverters diff --git a/src/library/scala/jdk/javaapi/DurationConverters.scala b/src/library/scala/jdk/javaapi/DurationConverters.scala new file mode 100644 index 000000000000..00285a11c41a --- /dev/null +++ b/src/library/scala/jdk/javaapi/DurationConverters.scala @@ -0,0 +1,73 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk.javaapi + +import java.time.temporal.ChronoUnit +import java.time.{Duration => JDuration} +import java.util.concurrent.TimeUnit + +import scala.concurrent.duration.{Duration, FiniteDuration} + +/** This object contains methods that convert between Scala and Java duration types. + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in [[scala.jdk.DurationConverters]]. + */ +object DurationConverters { + /** Convert a Java duration to a Scala duration. If the nanosecond part of the Java duration is + * zero, the returned duration will have a time unit of seconds. If there is a nanoseconds part, + * the Scala duration will have a time unit of nanoseconds. + * + * @throws IllegalArgumentException If the given Java Duration is out of bounds of what can be + * expressed by [[scala.concurrent.duration.FiniteDuration]]. + */ + def toScala(duration: JDuration): FiniteDuration = { + val originalSeconds = duration.getSeconds + val originalNanos = duration.getNano + if (originalNanos == 0) { + if (originalSeconds == 0) Duration.Zero + else FiniteDuration(originalSeconds, TimeUnit.SECONDS) + } else if (originalSeconds == 0) { + FiniteDuration(originalNanos, TimeUnit.NANOSECONDS) + } else { + try { + val secondsAsNanos = Math.multiplyExact(originalSeconds, 1000000000) + val totalNanos = secondsAsNanos + originalNanos + if ((totalNanos < 0 && secondsAsNanos < 0) || (totalNanos > 0 && secondsAsNanos > 0)) + FiniteDuration(totalNanos, TimeUnit.NANOSECONDS) + else + throw new ArithmeticException() + } catch { + case _: ArithmeticException => + throw new IllegalArgumentException(s"Java duration $duration cannot be expressed as a Scala duration") + } + } + } + + /** Convert a Scala `FiniteDuration` to a Java duration. Note that the Scala duration keeps the + * time unit it was created with, while a Java duration always is a pair of seconds and nanos, + * so the unit it lost. + */ + def toJava(duration: FiniteDuration): JDuration = { + if (duration.length == 0) JDuration.ZERO + else duration.unit match { + case TimeUnit.NANOSECONDS => JDuration.ofNanos(duration.length) + case TimeUnit.MICROSECONDS => JDuration.of(duration.length, ChronoUnit.MICROS) + case TimeUnit.MILLISECONDS => JDuration.ofMillis(duration.length) + case TimeUnit.SECONDS => JDuration.ofSeconds(duration.length) + case TimeUnit.MINUTES => JDuration.ofMinutes(duration.length) + case TimeUnit.HOURS => JDuration.ofHours(duration.length) + case TimeUnit.DAYS => JDuration.ofDays(duration.length) + } + } +} diff --git a/src/library/scala/jdk/javaapi/FunctionConverters.scala b/src/library/scala/jdk/javaapi/FunctionConverters.scala new file mode 100644 index 000000000000..22c9769bbf42 --- /dev/null +++ b/src/library/scala/jdk/javaapi/FunctionConverters.scala @@ -0,0 +1,956 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +// GENERATED CODE: DO NOT EDIT. + + +package scala.jdk.javaapi + +/** This object contains methods that convert between Scala and Java function types. + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in [[scala.jdk.FunctionConverters]]. + * + * For details how the function converters work, see [[scala.jdk.FunctionConverters]]. + * + */ +object FunctionConverters { + import scala.jdk.FunctionWrappers._ + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromBiConsumer[T, U](jf: java.util.function.BiConsumer[T, U]): scala.Function2[T, U, scala.runtime.BoxedUnit] = jf match { + case AsJavaBiConsumer((f @ _)) => f.asInstanceOf[scala.Function2[T, U, scala.runtime.BoxedUnit]] + case _ => new FromJavaBiConsumer[T, U](jf).asInstanceOf[scala.Function2[T, U, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaBiConsumer[T, U](sf: scala.Function2[T, U, scala.runtime.BoxedUnit]): java.util.function.BiConsumer[T, U] = ((sf): AnyRef) match { + case FromJavaBiConsumer((f @ _)) => f.asInstanceOf[java.util.function.BiConsumer[T, U]] + case _ => new AsJavaBiConsumer[T, U](sf.asInstanceOf[scala.Function2[T, U, Unit]]) + } + + + @inline def asScalaFromBiFunction[T, U, R](jf: java.util.function.BiFunction[T, U, R]): scala.Function2[T, U, R] = jf match { + case AsJavaBiFunction((f @ _)) => f.asInstanceOf[scala.Function2[T, U, R]] + case _ => new FromJavaBiFunction[T, U, R](jf).asInstanceOf[scala.Function2[T, U, R]] + } + + @inline def asJavaBiFunction[T, U, R](sf: scala.Function2[T, U, R]): java.util.function.BiFunction[T, U, R] = ((sf): AnyRef) match { + case FromJavaBiFunction((f @ _)) => f.asInstanceOf[java.util.function.BiFunction[T, U, R]] + case _ => new AsJavaBiFunction[T, U, R](sf.asInstanceOf[scala.Function2[T, U, R]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromBiPredicate[T, U](jf: java.util.function.BiPredicate[T, U]): scala.Function2[T, U, java.lang.Boolean] = jf match { + case AsJavaBiPredicate((f @ _)) => f.asInstanceOf[scala.Function2[T, U, java.lang.Boolean]] + case _ => new FromJavaBiPredicate[T, U](jf).asInstanceOf[scala.Function2[T, U, java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaBiPredicate[T, U](sf: scala.Function2[T, U, java.lang.Boolean]): java.util.function.BiPredicate[T, U] = ((sf): AnyRef) match { + case FromJavaBiPredicate((f @ _)) => f.asInstanceOf[java.util.function.BiPredicate[T, U]] + case _ => new AsJavaBiPredicate[T, U](sf.asInstanceOf[scala.Function2[T, U, Boolean]]) + } + + + @inline def asScalaFromBinaryOperator[T](jf: java.util.function.BinaryOperator[T]): scala.Function2[T, T, T] = jf match { + case AsJavaBinaryOperator((f @ _)) => f.asInstanceOf[scala.Function2[T, T, T]] + case _ => new FromJavaBinaryOperator[T](jf).asInstanceOf[scala.Function2[T, T, T]] + } + + @inline def asJavaBinaryOperator[T](sf: scala.Function2[T, T, T]): java.util.function.BinaryOperator[T] = ((sf): AnyRef) match { + case FromJavaBinaryOperator((f @ _)) => f.asInstanceOf[java.util.function.BinaryOperator[T]] + case _ => new AsJavaBinaryOperator[T](sf.asInstanceOf[scala.Function2[T, T, T]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromBooleanSupplier(jf: java.util.function.BooleanSupplier): scala.Function0[java.lang.Boolean] = jf match { + case AsJavaBooleanSupplier((f @ _)) => f.asInstanceOf[scala.Function0[java.lang.Boolean]] + case _ => new FromJavaBooleanSupplier(jf).asInstanceOf[scala.Function0[java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaBooleanSupplier(sf: scala.Function0[java.lang.Boolean]): java.util.function.BooleanSupplier = ((sf): AnyRef) match { + case FromJavaBooleanSupplier((f @ _)) => f.asInstanceOf[java.util.function.BooleanSupplier] + case _ => new AsJavaBooleanSupplier(sf.asInstanceOf[scala.Function0[Boolean]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromConsumer[T](jf: java.util.function.Consumer[T]): scala.Function1[T, scala.runtime.BoxedUnit] = jf match { + case AsJavaConsumer((f @ _)) => f.asInstanceOf[scala.Function1[T, scala.runtime.BoxedUnit]] + case _ => new FromJavaConsumer[T](jf).asInstanceOf[scala.Function1[T, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaConsumer[T](sf: scala.Function1[T, scala.runtime.BoxedUnit]): java.util.function.Consumer[T] = ((sf): AnyRef) match { + case FromJavaConsumer((f @ _)) => f.asInstanceOf[java.util.function.Consumer[T]] + case _ => new AsJavaConsumer[T](sf.asInstanceOf[scala.Function1[T, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleBinaryOperator(jf: java.util.function.DoubleBinaryOperator): scala.Function2[java.lang.Double, java.lang.Double, java.lang.Double] = jf match { + case AsJavaDoubleBinaryOperator((f @ _)) => f.asInstanceOf[scala.Function2[java.lang.Double, java.lang.Double, java.lang.Double]] + case _ => new FromJavaDoubleBinaryOperator(jf).asInstanceOf[scala.Function2[java.lang.Double, java.lang.Double, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleBinaryOperator(sf: scala.Function2[java.lang.Double, java.lang.Double, java.lang.Double]): java.util.function.DoubleBinaryOperator = ((sf): AnyRef) match { + case FromJavaDoubleBinaryOperator((f @ _)) => f.asInstanceOf[java.util.function.DoubleBinaryOperator] + case _ => new AsJavaDoubleBinaryOperator(sf.asInstanceOf[scala.Function2[Double, Double, Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleConsumer(jf: java.util.function.DoubleConsumer): scala.Function1[java.lang.Double, scala.runtime.BoxedUnit] = jf match { + case AsJavaDoubleConsumer((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, scala.runtime.BoxedUnit]] + case _ => new FromJavaDoubleConsumer(jf).asInstanceOf[scala.Function1[java.lang.Double, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleConsumer(sf: scala.Function1[java.lang.Double, scala.runtime.BoxedUnit]): java.util.function.DoubleConsumer = ((sf): AnyRef) match { + case FromJavaDoubleConsumer((f @ _)) => f.asInstanceOf[java.util.function.DoubleConsumer] + case _ => new AsJavaDoubleConsumer(sf.asInstanceOf[scala.Function1[Double, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleFunction[R](jf: java.util.function.DoubleFunction[R]): scala.Function1[java.lang.Double, R] = jf match { + case AsJavaDoubleFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, R]] + case _ => new FromJavaDoubleFunction[R](jf).asInstanceOf[scala.Function1[java.lang.Double, R]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleFunction[R](sf: scala.Function1[java.lang.Double, R]): java.util.function.DoubleFunction[R] = ((sf): AnyRef) match { + case FromJavaDoubleFunction((f @ _)) => f.asInstanceOf[java.util.function.DoubleFunction[R]] + case _ => new AsJavaDoubleFunction[R](sf.asInstanceOf[scala.Function1[Double, R]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoublePredicate(jf: java.util.function.DoublePredicate): scala.Function1[java.lang.Double, java.lang.Boolean] = jf match { + case AsJavaDoublePredicate((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, java.lang.Boolean]] + case _ => new FromJavaDoublePredicate(jf).asInstanceOf[scala.Function1[java.lang.Double, java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoublePredicate(sf: scala.Function1[java.lang.Double, java.lang.Boolean]): java.util.function.DoublePredicate = ((sf): AnyRef) match { + case FromJavaDoublePredicate((f @ _)) => f.asInstanceOf[java.util.function.DoublePredicate] + case _ => new AsJavaDoublePredicate(sf.asInstanceOf[scala.Function1[Double, Boolean]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleSupplier(jf: java.util.function.DoubleSupplier): scala.Function0[java.lang.Double] = jf match { + case AsJavaDoubleSupplier((f @ _)) => f.asInstanceOf[scala.Function0[java.lang.Double]] + case _ => new FromJavaDoubleSupplier(jf).asInstanceOf[scala.Function0[java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleSupplier(sf: scala.Function0[java.lang.Double]): java.util.function.DoubleSupplier = ((sf): AnyRef) match { + case FromJavaDoubleSupplier((f @ _)) => f.asInstanceOf[java.util.function.DoubleSupplier] + case _ => new AsJavaDoubleSupplier(sf.asInstanceOf[scala.Function0[Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleToIntFunction(jf: java.util.function.DoubleToIntFunction): scala.Function1[java.lang.Double, java.lang.Integer] = jf match { + case AsJavaDoubleToIntFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, java.lang.Integer]] + case _ => new FromJavaDoubleToIntFunction(jf).asInstanceOf[scala.Function1[java.lang.Double, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleToIntFunction(sf: scala.Function1[java.lang.Double, java.lang.Integer]): java.util.function.DoubleToIntFunction = ((sf): AnyRef) match { + case FromJavaDoubleToIntFunction((f @ _)) => f.asInstanceOf[java.util.function.DoubleToIntFunction] + case _ => new AsJavaDoubleToIntFunction(sf.asInstanceOf[scala.Function1[Double, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleToLongFunction(jf: java.util.function.DoubleToLongFunction): scala.Function1[java.lang.Double, java.lang.Long] = jf match { + case AsJavaDoubleToLongFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, java.lang.Long]] + case _ => new FromJavaDoubleToLongFunction(jf).asInstanceOf[scala.Function1[java.lang.Double, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleToLongFunction(sf: scala.Function1[java.lang.Double, java.lang.Long]): java.util.function.DoubleToLongFunction = ((sf): AnyRef) match { + case FromJavaDoubleToLongFunction((f @ _)) => f.asInstanceOf[java.util.function.DoubleToLongFunction] + case _ => new AsJavaDoubleToLongFunction(sf.asInstanceOf[scala.Function1[Double, Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromDoubleUnaryOperator(jf: java.util.function.DoubleUnaryOperator): scala.Function1[java.lang.Double, java.lang.Double] = jf match { + case AsJavaDoubleUnaryOperator((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Double, java.lang.Double]] + case _ => new FromJavaDoubleUnaryOperator(jf).asInstanceOf[scala.Function1[java.lang.Double, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaDoubleUnaryOperator(sf: scala.Function1[java.lang.Double, java.lang.Double]): java.util.function.DoubleUnaryOperator = ((sf): AnyRef) match { + case FromJavaDoubleUnaryOperator((f @ _)) => f.asInstanceOf[java.util.function.DoubleUnaryOperator] + case _ => new AsJavaDoubleUnaryOperator(sf.asInstanceOf[scala.Function1[Double, Double]]) + } + + + @inline def asScalaFromFunction[T, R](jf: java.util.function.Function[T, R]): scala.Function1[T, R] = jf match { + case AsJavaFunction((f @ _)) => f.asInstanceOf[scala.Function1[T, R]] + case _ => new FromJavaFunction[T, R](jf).asInstanceOf[scala.Function1[T, R]] + } + + @inline def asJavaFunction[T, R](sf: scala.Function1[T, R]): java.util.function.Function[T, R] = ((sf): AnyRef) match { + case FromJavaFunction((f @ _)) => f.asInstanceOf[java.util.function.Function[T, R]] + case _ => new AsJavaFunction[T, R](sf.asInstanceOf[scala.Function1[T, R]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntBinaryOperator(jf: java.util.function.IntBinaryOperator): scala.Function2[java.lang.Integer, java.lang.Integer, java.lang.Integer] = jf match { + case AsJavaIntBinaryOperator((f @ _)) => f.asInstanceOf[scala.Function2[java.lang.Integer, java.lang.Integer, java.lang.Integer]] + case _ => new FromJavaIntBinaryOperator(jf).asInstanceOf[scala.Function2[java.lang.Integer, java.lang.Integer, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntBinaryOperator(sf: scala.Function2[java.lang.Integer, java.lang.Integer, java.lang.Integer]): java.util.function.IntBinaryOperator = ((sf): AnyRef) match { + case FromJavaIntBinaryOperator((f @ _)) => f.asInstanceOf[java.util.function.IntBinaryOperator] + case _ => new AsJavaIntBinaryOperator(sf.asInstanceOf[scala.Function2[Int, Int, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntConsumer(jf: java.util.function.IntConsumer): scala.Function1[java.lang.Integer, scala.runtime.BoxedUnit] = jf match { + case AsJavaIntConsumer((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, scala.runtime.BoxedUnit]] + case _ => new FromJavaIntConsumer(jf).asInstanceOf[scala.Function1[java.lang.Integer, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntConsumer(sf: scala.Function1[java.lang.Integer, scala.runtime.BoxedUnit]): java.util.function.IntConsumer = ((sf): AnyRef) match { + case FromJavaIntConsumer((f @ _)) => f.asInstanceOf[java.util.function.IntConsumer] + case _ => new AsJavaIntConsumer(sf.asInstanceOf[scala.Function1[Int, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntFunction[R](jf: java.util.function.IntFunction[R]): scala.Function1[java.lang.Integer, R] = jf match { + case AsJavaIntFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, R]] + case _ => new FromJavaIntFunction[R](jf).asInstanceOf[scala.Function1[java.lang.Integer, R]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntFunction[R](sf: scala.Function1[java.lang.Integer, R]): java.util.function.IntFunction[R] = ((sf): AnyRef) match { + case FromJavaIntFunction((f @ _)) => f.asInstanceOf[java.util.function.IntFunction[R]] + case _ => new AsJavaIntFunction[R](sf.asInstanceOf[scala.Function1[Int, R]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntPredicate(jf: java.util.function.IntPredicate): scala.Function1[java.lang.Integer, java.lang.Boolean] = jf match { + case AsJavaIntPredicate((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Boolean]] + case _ => new FromJavaIntPredicate(jf).asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntPredicate(sf: scala.Function1[java.lang.Integer, java.lang.Boolean]): java.util.function.IntPredicate = ((sf): AnyRef) match { + case FromJavaIntPredicate((f @ _)) => f.asInstanceOf[java.util.function.IntPredicate] + case _ => new AsJavaIntPredicate(sf.asInstanceOf[scala.Function1[Int, Boolean]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntSupplier(jf: java.util.function.IntSupplier): scala.Function0[java.lang.Integer] = jf match { + case AsJavaIntSupplier((f @ _)) => f.asInstanceOf[scala.Function0[java.lang.Integer]] + case _ => new FromJavaIntSupplier(jf).asInstanceOf[scala.Function0[java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntSupplier(sf: scala.Function0[java.lang.Integer]): java.util.function.IntSupplier = ((sf): AnyRef) match { + case FromJavaIntSupplier((f @ _)) => f.asInstanceOf[java.util.function.IntSupplier] + case _ => new AsJavaIntSupplier(sf.asInstanceOf[scala.Function0[Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntToDoubleFunction(jf: java.util.function.IntToDoubleFunction): scala.Function1[java.lang.Integer, java.lang.Double] = jf match { + case AsJavaIntToDoubleFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Double]] + case _ => new FromJavaIntToDoubleFunction(jf).asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntToDoubleFunction(sf: scala.Function1[java.lang.Integer, java.lang.Double]): java.util.function.IntToDoubleFunction = ((sf): AnyRef) match { + case FromJavaIntToDoubleFunction((f @ _)) => f.asInstanceOf[java.util.function.IntToDoubleFunction] + case _ => new AsJavaIntToDoubleFunction(sf.asInstanceOf[scala.Function1[Int, Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntToLongFunction(jf: java.util.function.IntToLongFunction): scala.Function1[java.lang.Integer, java.lang.Long] = jf match { + case AsJavaIntToLongFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Long]] + case _ => new FromJavaIntToLongFunction(jf).asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntToLongFunction(sf: scala.Function1[java.lang.Integer, java.lang.Long]): java.util.function.IntToLongFunction = ((sf): AnyRef) match { + case FromJavaIntToLongFunction((f @ _)) => f.asInstanceOf[java.util.function.IntToLongFunction] + case _ => new AsJavaIntToLongFunction(sf.asInstanceOf[scala.Function1[Int, Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromIntUnaryOperator(jf: java.util.function.IntUnaryOperator): scala.Function1[java.lang.Integer, java.lang.Integer] = jf match { + case AsJavaIntUnaryOperator((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Integer]] + case _ => new FromJavaIntUnaryOperator(jf).asInstanceOf[scala.Function1[java.lang.Integer, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaIntUnaryOperator(sf: scala.Function1[java.lang.Integer, java.lang.Integer]): java.util.function.IntUnaryOperator = ((sf): AnyRef) match { + case FromJavaIntUnaryOperator((f @ _)) => f.asInstanceOf[java.util.function.IntUnaryOperator] + case _ => new AsJavaIntUnaryOperator(sf.asInstanceOf[scala.Function1[Int, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongBinaryOperator(jf: java.util.function.LongBinaryOperator): scala.Function2[java.lang.Long, java.lang.Long, java.lang.Long] = jf match { + case AsJavaLongBinaryOperator((f @ _)) => f.asInstanceOf[scala.Function2[java.lang.Long, java.lang.Long, java.lang.Long]] + case _ => new FromJavaLongBinaryOperator(jf).asInstanceOf[scala.Function2[java.lang.Long, java.lang.Long, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongBinaryOperator(sf: scala.Function2[java.lang.Long, java.lang.Long, java.lang.Long]): java.util.function.LongBinaryOperator = ((sf): AnyRef) match { + case FromJavaLongBinaryOperator((f @ _)) => f.asInstanceOf[java.util.function.LongBinaryOperator] + case _ => new AsJavaLongBinaryOperator(sf.asInstanceOf[scala.Function2[Long, Long, Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongConsumer(jf: java.util.function.LongConsumer): scala.Function1[java.lang.Long, scala.runtime.BoxedUnit] = jf match { + case AsJavaLongConsumer((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, scala.runtime.BoxedUnit]] + case _ => new FromJavaLongConsumer(jf).asInstanceOf[scala.Function1[java.lang.Long, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongConsumer(sf: scala.Function1[java.lang.Long, scala.runtime.BoxedUnit]): java.util.function.LongConsumer = ((sf): AnyRef) match { + case FromJavaLongConsumer((f @ _)) => f.asInstanceOf[java.util.function.LongConsumer] + case _ => new AsJavaLongConsumer(sf.asInstanceOf[scala.Function1[Long, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongFunction[R](jf: java.util.function.LongFunction[R]): scala.Function1[java.lang.Long, R] = jf match { + case AsJavaLongFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, R]] + case _ => new FromJavaLongFunction[R](jf).asInstanceOf[scala.Function1[java.lang.Long, R]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongFunction[R](sf: scala.Function1[java.lang.Long, R]): java.util.function.LongFunction[R] = ((sf): AnyRef) match { + case FromJavaLongFunction((f @ _)) => f.asInstanceOf[java.util.function.LongFunction[R]] + case _ => new AsJavaLongFunction[R](sf.asInstanceOf[scala.Function1[Long, R]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongPredicate(jf: java.util.function.LongPredicate): scala.Function1[java.lang.Long, java.lang.Boolean] = jf match { + case AsJavaLongPredicate((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, java.lang.Boolean]] + case _ => new FromJavaLongPredicate(jf).asInstanceOf[scala.Function1[java.lang.Long, java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongPredicate(sf: scala.Function1[java.lang.Long, java.lang.Boolean]): java.util.function.LongPredicate = ((sf): AnyRef) match { + case FromJavaLongPredicate((f @ _)) => f.asInstanceOf[java.util.function.LongPredicate] + case _ => new AsJavaLongPredicate(sf.asInstanceOf[scala.Function1[Long, Boolean]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongSupplier(jf: java.util.function.LongSupplier): scala.Function0[java.lang.Long] = jf match { + case AsJavaLongSupplier((f @ _)) => f.asInstanceOf[scala.Function0[java.lang.Long]] + case _ => new FromJavaLongSupplier(jf).asInstanceOf[scala.Function0[java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongSupplier(sf: scala.Function0[java.lang.Long]): java.util.function.LongSupplier = ((sf): AnyRef) match { + case FromJavaLongSupplier((f @ _)) => f.asInstanceOf[java.util.function.LongSupplier] + case _ => new AsJavaLongSupplier(sf.asInstanceOf[scala.Function0[Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongToDoubleFunction(jf: java.util.function.LongToDoubleFunction): scala.Function1[java.lang.Long, java.lang.Double] = jf match { + case AsJavaLongToDoubleFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, java.lang.Double]] + case _ => new FromJavaLongToDoubleFunction(jf).asInstanceOf[scala.Function1[java.lang.Long, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongToDoubleFunction(sf: scala.Function1[java.lang.Long, java.lang.Double]): java.util.function.LongToDoubleFunction = ((sf): AnyRef) match { + case FromJavaLongToDoubleFunction((f @ _)) => f.asInstanceOf[java.util.function.LongToDoubleFunction] + case _ => new AsJavaLongToDoubleFunction(sf.asInstanceOf[scala.Function1[Long, Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongToIntFunction(jf: java.util.function.LongToIntFunction): scala.Function1[java.lang.Long, java.lang.Integer] = jf match { + case AsJavaLongToIntFunction((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, java.lang.Integer]] + case _ => new FromJavaLongToIntFunction(jf).asInstanceOf[scala.Function1[java.lang.Long, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongToIntFunction(sf: scala.Function1[java.lang.Long, java.lang.Integer]): java.util.function.LongToIntFunction = ((sf): AnyRef) match { + case FromJavaLongToIntFunction((f @ _)) => f.asInstanceOf[java.util.function.LongToIntFunction] + case _ => new AsJavaLongToIntFunction(sf.asInstanceOf[scala.Function1[Long, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromLongUnaryOperator(jf: java.util.function.LongUnaryOperator): scala.Function1[java.lang.Long, java.lang.Long] = jf match { + case AsJavaLongUnaryOperator((f @ _)) => f.asInstanceOf[scala.Function1[java.lang.Long, java.lang.Long]] + case _ => new FromJavaLongUnaryOperator(jf).asInstanceOf[scala.Function1[java.lang.Long, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaLongUnaryOperator(sf: scala.Function1[java.lang.Long, java.lang.Long]): java.util.function.LongUnaryOperator = ((sf): AnyRef) match { + case FromJavaLongUnaryOperator((f @ _)) => f.asInstanceOf[java.util.function.LongUnaryOperator] + case _ => new AsJavaLongUnaryOperator(sf.asInstanceOf[scala.Function1[Long, Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromObjDoubleConsumer[T](jf: java.util.function.ObjDoubleConsumer[T]): scala.Function2[T, java.lang.Double, scala.runtime.BoxedUnit] = jf match { + case AsJavaObjDoubleConsumer((f @ _)) => f.asInstanceOf[scala.Function2[T, java.lang.Double, scala.runtime.BoxedUnit]] + case _ => new FromJavaObjDoubleConsumer[T](jf).asInstanceOf[scala.Function2[T, java.lang.Double, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaObjDoubleConsumer[T](sf: scala.Function2[T, java.lang.Double, scala.runtime.BoxedUnit]): java.util.function.ObjDoubleConsumer[T] = ((sf): AnyRef) match { + case FromJavaObjDoubleConsumer((f @ _)) => f.asInstanceOf[java.util.function.ObjDoubleConsumer[T]] + case _ => new AsJavaObjDoubleConsumer[T](sf.asInstanceOf[scala.Function2[T, Double, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromObjIntConsumer[T](jf: java.util.function.ObjIntConsumer[T]): scala.Function2[T, java.lang.Integer, scala.runtime.BoxedUnit] = jf match { + case AsJavaObjIntConsumer((f @ _)) => f.asInstanceOf[scala.Function2[T, java.lang.Integer, scala.runtime.BoxedUnit]] + case _ => new FromJavaObjIntConsumer[T](jf).asInstanceOf[scala.Function2[T, java.lang.Integer, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaObjIntConsumer[T](sf: scala.Function2[T, java.lang.Integer, scala.runtime.BoxedUnit]): java.util.function.ObjIntConsumer[T] = ((sf): AnyRef) match { + case FromJavaObjIntConsumer((f @ _)) => f.asInstanceOf[java.util.function.ObjIntConsumer[T]] + case _ => new AsJavaObjIntConsumer[T](sf.asInstanceOf[scala.Function2[T, Int, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromObjLongConsumer[T](jf: java.util.function.ObjLongConsumer[T]): scala.Function2[T, java.lang.Long, scala.runtime.BoxedUnit] = jf match { + case AsJavaObjLongConsumer((f @ _)) => f.asInstanceOf[scala.Function2[T, java.lang.Long, scala.runtime.BoxedUnit]] + case _ => new FromJavaObjLongConsumer[T](jf).asInstanceOf[scala.Function2[T, java.lang.Long, scala.runtime.BoxedUnit]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaObjLongConsumer[T](sf: scala.Function2[T, java.lang.Long, scala.runtime.BoxedUnit]): java.util.function.ObjLongConsumer[T] = ((sf): AnyRef) match { + case FromJavaObjLongConsumer((f @ _)) => f.asInstanceOf[java.util.function.ObjLongConsumer[T]] + case _ => new AsJavaObjLongConsumer[T](sf.asInstanceOf[scala.Function2[T, Long, Unit]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromPredicate[T](jf: java.util.function.Predicate[T]): scala.Function1[T, java.lang.Boolean] = jf match { + case AsJavaPredicate((f @ _)) => f.asInstanceOf[scala.Function1[T, java.lang.Boolean]] + case _ => new FromJavaPredicate[T](jf).asInstanceOf[scala.Function1[T, java.lang.Boolean]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaPredicate[T](sf: scala.Function1[T, java.lang.Boolean]): java.util.function.Predicate[T] = ((sf): AnyRef) match { + case FromJavaPredicate((f @ _)) => f.asInstanceOf[java.util.function.Predicate[T]] + case _ => new AsJavaPredicate[T](sf.asInstanceOf[scala.Function1[T, Boolean]]) + } + + + @inline def asScalaFromSupplier[T](jf: java.util.function.Supplier[T]): scala.Function0[T] = jf match { + case AsJavaSupplier((f @ _)) => f.asInstanceOf[scala.Function0[T]] + case _ => new FromJavaSupplier[T](jf).asInstanceOf[scala.Function0[T]] + } + + @inline def asJavaSupplier[T](sf: scala.Function0[T]): java.util.function.Supplier[T] = ((sf): AnyRef) match { + case FromJavaSupplier((f @ _)) => f.asInstanceOf[java.util.function.Supplier[T]] + case _ => new AsJavaSupplier[T](sf.asInstanceOf[scala.Function0[T]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToDoubleBiFunction[T, U](jf: java.util.function.ToDoubleBiFunction[T, U]): scala.Function2[T, U, java.lang.Double] = jf match { + case AsJavaToDoubleBiFunction((f @ _)) => f.asInstanceOf[scala.Function2[T, U, java.lang.Double]] + case _ => new FromJavaToDoubleBiFunction[T, U](jf).asInstanceOf[scala.Function2[T, U, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToDoubleBiFunction[T, U](sf: scala.Function2[T, U, java.lang.Double]): java.util.function.ToDoubleBiFunction[T, U] = ((sf): AnyRef) match { + case FromJavaToDoubleBiFunction((f @ _)) => f.asInstanceOf[java.util.function.ToDoubleBiFunction[T, U]] + case _ => new AsJavaToDoubleBiFunction[T, U](sf.asInstanceOf[scala.Function2[T, U, Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToDoubleFunction[T](jf: java.util.function.ToDoubleFunction[T]): scala.Function1[T, java.lang.Double] = jf match { + case AsJavaToDoubleFunction((f @ _)) => f.asInstanceOf[scala.Function1[T, java.lang.Double]] + case _ => new FromJavaToDoubleFunction[T](jf).asInstanceOf[scala.Function1[T, java.lang.Double]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToDoubleFunction[T](sf: scala.Function1[T, java.lang.Double]): java.util.function.ToDoubleFunction[T] = ((sf): AnyRef) match { + case FromJavaToDoubleFunction((f @ _)) => f.asInstanceOf[java.util.function.ToDoubleFunction[T]] + case _ => new AsJavaToDoubleFunction[T](sf.asInstanceOf[scala.Function1[T, Double]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToIntBiFunction[T, U](jf: java.util.function.ToIntBiFunction[T, U]): scala.Function2[T, U, java.lang.Integer] = jf match { + case AsJavaToIntBiFunction((f @ _)) => f.asInstanceOf[scala.Function2[T, U, java.lang.Integer]] + case _ => new FromJavaToIntBiFunction[T, U](jf).asInstanceOf[scala.Function2[T, U, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToIntBiFunction[T, U](sf: scala.Function2[T, U, java.lang.Integer]): java.util.function.ToIntBiFunction[T, U] = ((sf): AnyRef) match { + case FromJavaToIntBiFunction((f @ _)) => f.asInstanceOf[java.util.function.ToIntBiFunction[T, U]] + case _ => new AsJavaToIntBiFunction[T, U](sf.asInstanceOf[scala.Function2[T, U, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToIntFunction[T](jf: java.util.function.ToIntFunction[T]): scala.Function1[T, java.lang.Integer] = jf match { + case AsJavaToIntFunction((f @ _)) => f.asInstanceOf[scala.Function1[T, java.lang.Integer]] + case _ => new FromJavaToIntFunction[T](jf).asInstanceOf[scala.Function1[T, java.lang.Integer]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToIntFunction[T](sf: scala.Function1[T, java.lang.Integer]): java.util.function.ToIntFunction[T] = ((sf): AnyRef) match { + case FromJavaToIntFunction((f @ _)) => f.asInstanceOf[java.util.function.ToIntFunction[T]] + case _ => new AsJavaToIntFunction[T](sf.asInstanceOf[scala.Function1[T, Int]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToLongBiFunction[T, U](jf: java.util.function.ToLongBiFunction[T, U]): scala.Function2[T, U, java.lang.Long] = jf match { + case AsJavaToLongBiFunction((f @ _)) => f.asInstanceOf[scala.Function2[T, U, java.lang.Long]] + case _ => new FromJavaToLongBiFunction[T, U](jf).asInstanceOf[scala.Function2[T, U, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToLongBiFunction[T, U](sf: scala.Function2[T, U, java.lang.Long]): java.util.function.ToLongBiFunction[T, U] = ((sf): AnyRef) match { + case FromJavaToLongBiFunction((f @ _)) => f.asInstanceOf[java.util.function.ToLongBiFunction[T, U]] + case _ => new AsJavaToLongBiFunction[T, U](sf.asInstanceOf[scala.Function2[T, U, Long]]) + } + + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asScalaFromToLongFunction[T](jf: java.util.function.ToLongFunction[T]): scala.Function1[T, java.lang.Long] = jf match { + case AsJavaToLongFunction((f @ _)) => f.asInstanceOf[scala.Function1[T, java.lang.Long]] + case _ => new FromJavaToLongFunction[T](jf).asInstanceOf[scala.Function1[T, java.lang.Long]] + } + + /** Note: this method uses the boxed type `java.lang.X` (or `BoxedUnit`) instead of the + * primitive type `scala.X` to improve compatibility when using it in Java code (the + * Scala compiler emits `C[Int]` as `C[Object]` in bytecode due to + * [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In Scala code, add + * `import scala.jdk.FunctionConverters._` and use the extension methods instead. + */ + @inline def asJavaToLongFunction[T](sf: scala.Function1[T, java.lang.Long]): java.util.function.ToLongFunction[T] = ((sf): AnyRef) match { + case FromJavaToLongFunction((f @ _)) => f.asInstanceOf[java.util.function.ToLongFunction[T]] + case _ => new AsJavaToLongFunction[T](sf.asInstanceOf[scala.Function1[T, Long]]) + } + + + @inline def asScalaFromUnaryOperator[T](jf: java.util.function.UnaryOperator[T]): scala.Function1[T, T] = jf match { + case AsJavaUnaryOperator((f @ _)) => f.asInstanceOf[scala.Function1[T, T]] + case _ => new FromJavaUnaryOperator[T](jf).asInstanceOf[scala.Function1[T, T]] + } + + @inline def asJavaUnaryOperator[T](sf: scala.Function1[T, T]): java.util.function.UnaryOperator[T] = ((sf): AnyRef) match { + case FromJavaUnaryOperator((f @ _)) => f.asInstanceOf[java.util.function.UnaryOperator[T]] + case _ => new AsJavaUnaryOperator[T](sf.asInstanceOf[scala.Function1[T, T]]) + } +} diff --git a/src/library/scala/jdk/javaapi/FutureConverters.scala b/src/library/scala/jdk/javaapi/FutureConverters.scala new file mode 100644 index 000000000000..d28a8da8a92e --- /dev/null +++ b/src/library/scala/jdk/javaapi/FutureConverters.scala @@ -0,0 +1,89 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk.javaapi + +import java.util.concurrent.{CompletableFuture, CompletionStage} +import scala.concurrent.impl.FutureConvertersImpl.{CF, P} +import scala.concurrent.{ExecutionContext, Future} +import scala.util.Success + +/** This object contains methods that convert between Scala [[scala.concurrent.Future]] and Java [[java.util.concurrent.CompletionStage]]. + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in [[scala.jdk.FutureConverters]]. + * + * Note that the bridge is implemented at the read-only side of asynchronous handles, namely + * [[scala.concurrent.Future]] (instead of [[scala.concurrent.Promise]]) and [[java.util.concurrent.CompletionStage]] (instead of + * [[java.util.concurrent.CompletableFuture]]). This is intentional, as the semantics of bridging + * the write-handles would be prone to race conditions; if both ends (`CompletableFuture` and + * `Promise`) are completed independently at the same time, they may contain different values + * afterwards. For this reason, `toCompletableFuture` is not supported on the created + * `CompletionStage`s. + */ +object FutureConverters { + /** Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the same value or exception as the + * given Scala [[scala.concurrent.Future]] when that completes. Since the Future is a read-only representation, + * this CompletionStage does not support the `toCompletableFuture` method. + * + * The semantics of Scala Future demand that all callbacks are invoked asynchronously by default, + * therefore the returned CompletionStage routes all calls to synchronous transformations to + * their asynchronous counterparts, i.e., `thenRun` will internally call `thenRunAsync`. + * + * @param f The Scala Future which may eventually supply the completion for the returned + * CompletionStage + * @return a CompletionStage that runs all callbacks asynchronously and does not support the + * CompletableFuture interface + */ + def asJava[T](f: Future[T]): CompletionStage[T] = { + f match { + case p: P[T] => p.wrapped + // in theory not safe (could be `class C extends Future[A] with CompletionStage[B]`): + case c: CompletionStage[T @unchecked] => c + case _ => + val cf = new CF[T](f) + f.onComplete(cf)(ExecutionContext.parasitic) + cf + } + } + + /** Returns a Scala [[scala.concurrent.Future]] that will be completed with the same value or exception as the + * given [[java.util.concurrent.CompletionStage]] when that completes. Transformations of the returned Future are + * executed asynchronously as specified by the ExecutionContext that is given to the combinator + * methods. + * + * @param cs The CompletionStage which may eventually supply the completion for the returned + * Scala Future + * @return a Scala Future that represents the CompletionStage's completion + */ + def asScala[T](cs: CompletionStage[T]): Future[T] = { + cs match { + case cf: CF[T] => cf.wrapped + // in theory not safe (could be `class C extends Future[A] with CompletionStage[B]`): + case f: Future[T @unchecked] => f + case _ => + val p = new P[T](cs) + val completedCF = cs match { + case cf0: CompletableFuture[T @unchecked] => + // drop `MinimalStage` (scala/bug#12918) + val cf = cf0.toCompletableFuture + if (cf.isDone && !cf.isCompletedExceptionally) cf else null + case _ => null + } + if (completedCF != null) + p.tryComplete(Success(completedCF.join())) + else + cs.handle(p) + p.future + } + } +} diff --git a/src/library/scala/jdk/javaapi/OptionConverters.scala b/src/library/scala/jdk/javaapi/OptionConverters.scala new file mode 100644 index 000000000000..27ae7b4e6060 --- /dev/null +++ b/src/library/scala/jdk/javaapi/OptionConverters.scala @@ -0,0 +1,84 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk.javaapi + +import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong} +import java.{lang => jl} + +/** This object contains methods that convert between Scala `Option` and Java `Optional` types. + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in [[scala.jdk.OptionConverters]]. + * + * @define primitiveNote Note: this method uses the boxed type `java.lang.X` instead of the + * primitive type `scala.X` to improve compatibility when using it in + * Java code (the Scala compiler emits `C[Int]` as `C[Object]` in bytecode + * due to [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In + * Scala code, add `import scala.jdk.OptionConverters._` and use the + * extension methods instead. + */ +object OptionConverters { + /** Convert a Scala `Option` to a Java `Optional` */ + def toJava[A](o: Option[A]): Optional[A] = o match { + case Some(a) => Optional.ofNullable(a) + case _ => Optional.empty[A] + } + + /** Convert a Scala `Option[java.lang.Double]` to a Java `OptionalDouble` + * + * $primitiveNote + */ + def toJavaOptionalDouble(o: Option[jl.Double]): OptionalDouble = o match { + case Some(a) => OptionalDouble.of(a) + case _ => OptionalDouble.empty + } + + /** Convert a Scala `Option[java.lang.Integer]` to a Java `OptionalInt` + * + * $primitiveNote + */ + def toJavaOptionalInt(o: Option[jl.Integer]): OptionalInt = o match { + case Some(a) => OptionalInt.of(a) + case _ => OptionalInt.empty + } + + /** Convert a Scala `Option[java.lang.Long]` to a Java `OptionalLong` + * + * $primitiveNote + */ + def toJavaOptionalLong(o: Option[jl.Long]): OptionalLong = o match { + case Some(a) => OptionalLong.of(a) + case _ => OptionalLong.empty + } + + /** Convert a Java `Optional` to a Scala `Option` */ + def toScala[A](o: Optional[A]): Option[A] = if (o.isPresent) Some(o.get) else None + + /** Convert a Java `OptionalDouble` to a Scala `Option[java.lang.Double]` + * + * $primitiveNote + */ + def toScala(o: OptionalDouble): Option[jl.Double] = if (o.isPresent) Some(o.getAsDouble) else None + + /** Convert a Java `OptionalInt` to a Scala `Option[java.lang.Integer]` + * + * $primitiveNote + */ + def toScala(o: OptionalInt): Option[jl.Integer] = if (o.isPresent) Some(o.getAsInt) else None + + /** Convert a Java `OptionalLong` to a Scala `Option[java.lang.Long]` + * + * $primitiveNote + */ + def toScala(o: OptionalLong): Option[jl.Long] = if (o.isPresent) Some(o.getAsLong) else None +} diff --git a/src/library/scala/jdk/javaapi/StreamConverters.scala b/src/library/scala/jdk/javaapi/StreamConverters.scala new file mode 100644 index 000000000000..d5adeb84ab71 --- /dev/null +++ b/src/library/scala/jdk/javaapi/StreamConverters.scala @@ -0,0 +1,356 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.jdk.javaapi + +import java.util.stream.{DoubleStream, IntStream, LongStream, Stream, StreamSupport} +import java.{lang => jl} + +/** This object contains methods to create Java Streams that operate on Scala collections + * (sequentially or in parallel). For more information on Java streams, consult the documentation + * ([[https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html]]). + * + * The explicit conversion methods defined here are intended to be used in Java code. For Scala + * code, it is recommended to use the extension methods defined in [[scala.jdk.StreamConverters]]. + * + * Note: to convert between Scala collections and classic Java collections, use + * [[CollectionConverters]]. + * + * For details how the stream converters work, see [[scala.jdk.StreamConverters]]. + * + * @define parNote Note: parallel processing is only efficient for collections that have a + * [[scala.collection.Stepper]] implementation which supports efficient splitting. For collections + * where this is the case, the [[scala.collection.IterableOnce.stepper `stepper`]] + * method has a return type marked `with EfficientSplit`. + * + * @define primitiveNote Note: this method uses the boxed type `java.lang.X` instead of the + * primitive type `scala.X` to improve compatibility when using it in + * Java code (the Scala compiler emits `C[Int]` as `C[Object]` in bytecode + * due to [[https://github.com/scala/bug/issues/4214 scala/bug#4214]]). In + * Scala code, add `import scala.jdk.StreamConverters._` and use the + * extension methods instead. + */ +object StreamConverters { + ///////////////////////////////////// + // sequential streams for collections + ///////////////////////////////////// + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for a Scala collection. */ + def asJavaSeqStream[A](cc: IterableOnce[A]): Stream[A] = StreamSupport.stream(cc.stepper.spliterator, false) + + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqIntStream (cc: IterableOnce[jl.Integer]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqIntStreamFromByte (cc: IterableOnce[jl.Byte]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqIntStreamFromShort(cc: IterableOnce[jl.Short]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqIntStreamFromChar (cc: IterableOnce[jl.Character]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, false) + + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqDoubleStream (cc: IterableOnce[jl.Double]): DoubleStream = StreamSupport.doubleStream(cc.stepper.spliterator, false) + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqDoubleStreamFromFloat(cc: IterableOnce[jl.Float]): DoubleStream = StreamSupport.doubleStream(cc.stepper.spliterator, false) + + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for a Scala collection. + * + * $primitiveNote + */ + def asJavaSeqLongStream(cc: IterableOnce[jl.Long]): LongStream = StreamSupport.longStream(cc.stepper.spliterator, false) + + // Map Key Streams + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of a Scala Map. */ + def asJavaSeqKeyStream[K, V](m: collection.Map[K, V]): Stream[K] = StreamSupport.stream(m.keyStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyIntStream [V](m: collection.Map[jl.Integer, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyIntStreamFromByte [V](m: collection.Map[jl.Byte, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyIntStreamFromShort[V](m: collection.Map[jl.Short, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyIntStreamFromChar [V](m: collection.Map[jl.Character, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyDoubleStream [V](m: collection.Map[jl.Double, V]): DoubleStream = StreamSupport.doubleStream(m.keyStepper.spliterator, false) + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyDoubleStreamFromFloat[V](m: collection.Map[jl.Float, V]): DoubleStream = StreamSupport.doubleStream(m.keyStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for the keys of a Scala Map. + * + * $primitiveNote + */ + def asJavaSeqKeyLongStream[V](m: collection.Map[jl.Long, V]): LongStream = StreamSupport.longStream(m.keyStepper.spliterator, false) + + // Map Value Streams + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of a Scala Map. */ + def asJavaSeqValueStream[K, V](m: collection.Map[K, V]): Stream[V] = StreamSupport.stream(m.valueStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueIntStream [K](m: collection.Map[K, jl.Integer]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueIntStreamFromByte [K](m: collection.Map[K, jl.Byte]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueIntStreamFromShort[K](m: collection.Map[K, jl.Short]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, false) + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueIntStreamFromChar [K](m: collection.Map[K, jl.Character]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueDoubleStream [K](m: collection.Map[K, jl.Double]): DoubleStream = StreamSupport.doubleStream(m.valueStepper.spliterator, false) + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueDoubleStreamFromFloat[K](m: collection.Map[K, jl.Float]): DoubleStream = StreamSupport.doubleStream(m.valueStepper.spliterator, false) + + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for the values of a + * + * $primitiveNote + */ + def asJavaSeqValueLongStream[K](m: collection.Map[K, jl.Long]): LongStream = StreamSupport.longStream(m.valueStepper.spliterator, false) + + /////////////////////////////////// + // parallel streams for collections + /////////////////////////////////// + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for a Scala collection. + * + * $parNote + */ + def asJavaParStream[A](cc: IterableOnce[A]): Stream[A] = StreamSupport.stream(cc.stepper.spliterator, true) + + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParIntStream (cc: IterableOnce[jl.Integer]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParIntStreamFromByte (cc: IterableOnce[jl.Byte]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParIntStreamFromShort(cc: IterableOnce[jl.Short]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParIntStreamFromChar (cc: IterableOnce[jl.Character]): IntStream = StreamSupport.intStream(cc.stepper.spliterator, true) + + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParDoubleStream (cc: IterableOnce[jl.Double]): DoubleStream = StreamSupport.doubleStream(cc.stepper.spliterator, true) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParDoubleStreamFromFloat(cc: IterableOnce[jl.Float]): DoubleStream = StreamSupport.doubleStream(cc.stepper.spliterator, true) + + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for a Scala collection. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParLongStream(cc: IterableOnce[jl.Long]): LongStream = StreamSupport.longStream(cc.stepper.spliterator, true) + + + // Map Key Streams + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of a Scala Map. + * + * $parNote + */ + def asJavaParKeyStream[K, V](m: collection.Map[K, V]): Stream[K] = StreamSupport.stream(m.keyStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyIntStream [V](m: collection.Map[jl.Integer, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyIntStreamFromByte [V](m: collection.Map[jl.Byte, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyIntStreamFromShort[V](m: collection.Map[jl.Short, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyIntStreamFromChar [V](m: collection.Map[jl.Character, V]): IntStream = StreamSupport.intStream(m.keyStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyDoubleStream [V](m: collection.Map[jl.Double, V]): DoubleStream = StreamSupport.doubleStream(m.keyStepper.spliterator, true) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyDoubleStreamFromFloat[V](m: collection.Map[jl.Float, V]): DoubleStream = StreamSupport.doubleStream(m.keyStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for the keys of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParKeyLongStream[V](m: collection.Map[jl.Long, V]): LongStream = StreamSupport.longStream(m.keyStepper.spliterator, true) + + // Map Value Streams + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of a Scala Map. + * + * $parNote + */ + def asJavaParValueStream[K, V](m: collection.Map[K, V]): Stream[V] = StreamSupport.stream(m.valueStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueIntStream [K](m: collection.Map[K, jl.Integer]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueIntStreamFromByte [K](m: collection.Map[K, jl.Byte]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueIntStreamFromShort[K](m: collection.Map[K, jl.Short]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, true) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueIntStreamFromChar [K](m: collection.Map[K, jl.Character]): IntStream = StreamSupport.intStream(m.valueStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueDoubleStream [K](m: collection.Map[K, jl.Double]): DoubleStream = StreamSupport.doubleStream(m.valueStepper.spliterator, true) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueDoubleStreamFromFloat[K](m: collection.Map[K, jl.Float]): DoubleStream = StreamSupport.doubleStream(m.valueStepper.spliterator, true) + + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for the values of a Scala Map. + * + * $parNote + * + * $primitiveNote + */ + def asJavaParValueLongStream[K](m: collection.Map[K, jl.Long]): LongStream = StreamSupport.longStream(m.valueStepper.spliterator, true) +} diff --git a/src/library/scala/jdk/package.scala b/src/library/scala/jdk/package.scala new file mode 100644 index 000000000000..386a6886cefd --- /dev/null +++ b/src/library/scala/jdk/package.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +/** The jdk package contains utilities to interact with JDK classes. + * + * This packages offers a number of converters, that are able to wrap or copy + * types from the scala library to equivalent types in the JDK class library + * and vice versa: + * + * - [[CollectionConverters]], converting collections like [[scala.collection.Seq]], + * [[scala.collection.Map]], [[scala.collection.Set]], + * [[scala.collection.mutable.Buffer]], [[scala.collection.Iterator]] + * and [[scala.collection.Iterable]] to their JDK counterparts + * - [[OptionConverters]], converting between [[Option]] and + * [[java.util.Optional]] and primitive variations + * - [[StreamConverters]], to create JDK Streams from scala collections + * - [[DurationConverters]], for conversions between scala + * [[scala.concurrent.duration.FiniteDuration]] and [[java.time.Duration]] + * - [[FunctionConverters]], from scala Functions to java + * [[java.util.function.Function]], [[java.util.function.UnaryOperator]], + * [[java.util.function.Consumer]] and [[java.util.function.Predicate]], as + * well as primitive variations and Bi-variations. + * + * By convention, converters that wrap an object to provide a different + * interface to the same underlying data structure use .asScala and .asJava + * extension methods, whereas converters that copy the underlying data structure + * use .toScala and .toJava. + * + * In the [[javaapi]] package, the same converters can be found with a + * java-friendly interface that don't rely on implicit enrichments. + * + * Additionally, this package offers [[Accumulator]]s, capable of efficiently + * traversing JDK Streams. + **/ +package object jdk diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala index b81f75a0dc4e..d0359b8ac55f 100644 --- a/src/library/scala/language.scala +++ b/src/library/scala/language.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,11 +24,11 @@ package scala * * The language features are: * - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait - * - [[postfixOps `postfixOps`]] enables postfix operators - * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types - * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members - * - [[higherKinds `higherKinds`]] enables writing higher-kinded types * - [[existentials `existentials`]] enables writing existential types + * - [[higherKinds `higherKinds`]] enables writing higher-kinded types + * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members + * - [[postfixOps `postfixOps`]] enables postfix operators (not recommended) + * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types * - [[experimental `experimental`]] contains newer features that have not yet been tested in production * * @groupname production Language Features @@ -39,10 +39,11 @@ object language { import languageFeature._ - /** Where enabled, direct or indirect subclasses of trait scala.Dynamic can - * be defined. Unless dynamics is enabled, a definition of a class, trait, - * or object that has Dynamic as a base trait is rejected. Dynamic member - * selection of existing subclasses of trait Dynamic are unaffected; + /** Only where this feature is enabled, can direct or indirect subclasses of trait scala.Dynamic + * be defined. If `dynamics` is not enabled, a definition of a class, trait, + * or object that has `Dynamic` as a base trait is rejected by the compiler. + * + * Selections of dynamic members of existing subclasses of trait `Dynamic` are unaffected; * they can be used anywhere. * * '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing @@ -56,19 +57,28 @@ object language { */ implicit lazy val dynamics: dynamics = languageFeature.dynamics - /** Only where enabled, postfix operator notation `(expr op)` will be allowed. + /** Only where this feature is enabled, is postfix operator notation `(expr op)` permitted. + * If `postfixOps` is not enabled, an expression using postfix notation is rejected by the compiler. * - * '''Why keep the feature?''' Several DSLs written in Scala need the notation. + * '''Why keep the feature?''' Postfix notation is preserved for backward + * compatibility only. Historically, several DSLs written in Scala need the notation. * * '''Why control it?''' Postfix operators interact poorly with semicolon inference. - * Most programmers avoid them for this reason. + * Most programmers avoid them for this reason alone. Postfix syntax is + * associated with an abuse of infix notation, `a op1 b op2 c op3`, + * that can be harder to read than ordinary method invocation with judicious + * use of parentheses. It is recommended not to enable this feature except for + * legacy code. * * @group production */ implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps - /** Only where enabled, accesses to members of structural types that need - * reflection are supported. Reminder: A structural type is a type of the form + /** Where this feature is enabled, accesses to members of structural types that need + * reflection are supported. If `reflectiveCalls` is not enabled, an expression + * requiring reflection will trigger a warning from the compiler. + * + * A structural type is a type of the form * `Parents { Decls }` where `Decls` contains declarations of new members that do * not override any member in `Parents`. To access one of these members, a * reflective call is needed. @@ -85,19 +95,29 @@ object language { */ implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls - /** Only where enabled, definitions of implicit conversions are allowed. An - * implicit conversion is an implicit value of unary function type `A => B`, + /** Where this feature is enabled, definitions of implicit conversion methods are allowed. + * If `implicitConversions` is not enabled, the definition of an implicit + * conversion method will trigger a warning from the compiler. + * + * An implicit conversion is an implicit value of unary function type `A => B`, * or an implicit method that has in its first parameter section a single, * non-implicit parameter. Examples: * * {{{ - * implicit def stringToInt(s: String): Int = s.length - * implicit val conv = (s: String) => s.length - * implicit def listToX(xs: List[T])(implicit f: T => X): X = ... + * implicit def intToString(i: Int): String = s"\$i" + * implicit val conv: Int => String = i => s"\$i" + * implicit val numerals: List[String] = List("zero", "one", "two", "three") + * implicit val strlen: String => Int = _.length + * implicit def listToInt[T](xs: List[T])(implicit f: T => Int): Int = xs.map(f).sum * }}} * - * implicit values of other types are not affected, and neither are implicit - * classes. + * This language feature warns only for implicit conversions introduced by methods. + * + * Other values, including functions or data types which extend `Function1`, + * such as `Map`, `Set`, and `List`, do not warn. + * + * Implicit class definitions, which introduce a conversion to the wrapping class, + * also do not warn. * * '''Why keep the feature?''' Implicit conversions are central to many aspects * of Scala’s core libraries. @@ -112,7 +132,9 @@ object language { */ implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions - /** Only where this flag is enabled, higher-kinded types can be written. + /** Where this feature is enabled, higher-kinded types can be written. + * If `higherKinds` is not enabled, a higher-kinded type such as `F[A]` + * will trigger a warning from the compiler. * * '''Why keep the feature?''' Higher-kinded types enable the definition of very general * abstractions such as functor, monad, or arrow. A significant set of advanced @@ -133,11 +155,15 @@ object language { * * @group production */ + @deprecated("higherKinds no longer needs to be imported explicitly", "2.13.1") implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds - /** Only where enabled, existential types that cannot be expressed as wildcard + /** Where this feature is enabled, existential types that cannot be expressed as wildcard * types can be written and are allowed in inferred types of values or return - * types of methods. Existential types with wildcard type syntax such as `List[_]`, + * types of methods. If `existentials` is not enabled, those cases will trigger + * a warning from the compiler. + * + * Existential types with wildcard type syntax such as `List[_]`, * or `Map[String, _]` are not affected. * * '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard @@ -153,8 +179,8 @@ object language { */ implicit lazy val existentials: existentials = languageFeature.existentials - /** The experimental object contains features that have been recently added but have not - * been thoroughly tested in production yet. + /** The experimental object contains features that are known to have unstable API or + * behavior that may change in future releases. * * Experimental features '''may undergo API changes''' in future releases, so production * code should not rely on them. @@ -169,8 +195,11 @@ object language { import languageFeature.experimental._ - /** Where enabled, macro definitions are allowed. Macro implementations and - * macro applications are unaffected; they can be used anywhere. + /** Only where this feature is enabled, are macro definitions allowed. + * If `macros` is not enabled, macro definitions are rejected by the compiler. + * + * Macro implementations and macro applications are not governed by this + * language feature; they can be used anywhere. * * '''Why introduce the feature?''' Macros promise to make the language more regular, * replacing ad-hoc language constructs with a general powerful abstraction diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala index 891f0d7d19d1..236774c990ba 100644 --- a/src/library/scala/languageFeature.scala +++ b/src/library/scala/languageFeature.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,7 +20,7 @@ object languageFeature { sealed trait dynamics object dynamics extends dynamics - @meta.languageFeature("postfix operator #", enableRequired = false) + @meta.languageFeature("postfix operator #", enableRequired = true) sealed trait postfixOps object postfixOps extends postfixOps @@ -32,11 +32,13 @@ object languageFeature { sealed trait implicitConversions object implicitConversions extends implicitConversions + @deprecated("scala.language.higherKinds no longer needs to be imported explicitly", "2.13.1") @meta.languageFeature("higher-kinded type", enableRequired = false) sealed trait higherKinds + @deprecated("scala.language.higherKinds no longer needs to be imported explicitly", "2.13.1") object higherKinds extends higherKinds - @meta.languageFeature("#, which cannot be expressed by wildcards, ", enableRequired = false) + @meta.languageFeature("#, which cannot be expressed by wildcards,", enableRequired = false) sealed trait existentials object existentials extends existentials diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index 5320f1801211..e70cdbab41e4 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,37 +15,35 @@ package math import scala.language.implicitConversions -import java.math.{ MathContext, BigDecimal => BigDec } +import java.math.{ + BigDecimal => BigDec, + MathContext, + RoundingMode => JRM, +} import scala.collection.immutable.NumericRange -/** - * @author Stephane Micheloud - * @author Rex Kerr - * @since 2.7 - */ object BigDecimal { private final val maximumHashScale = 4934 // Quit maintaining hash identity with BigInt beyond this scale private final val hashCodeNotComputed = 0x5D50690F // Magic value (happens to be "BigDecimal" old MurmurHash3 value) private final val deci2binary = 3.3219280948873626 // Ratio of log(10) to log(2) - private val minCached = -512 - private val maxCached = 512 + private[this] val minCached = -512 + private[this] val maxCached = 512 val defaultMathContext = MathContext.DECIMAL128 /** Cache only for defaultMathContext using BigDecimals in a small range. */ - private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) + private[this] lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) object RoundingMode extends Enumeration { // Annoying boilerplate to ensure consistency with java.math.RoundingMode - import java.math.{RoundingMode => RM} type RoundingMode = Value - val UP = Value(RM.UP.ordinal) - val DOWN = Value(RM.DOWN.ordinal) - val CEILING = Value(RM.CEILING.ordinal) - val FLOOR = Value(RM.FLOOR.ordinal) - val HALF_UP = Value(RM.HALF_UP.ordinal) - val HALF_DOWN = Value(RM.HALF_DOWN.ordinal) - val HALF_EVEN = Value(RM.HALF_EVEN.ordinal) - val UNNECESSARY = Value(RM.UNNECESSARY.ordinal) + val UP = Value(JRM.UP.ordinal) + val DOWN = Value(JRM.DOWN.ordinal) + val CEILING = Value(JRM.CEILING.ordinal) + val FLOOR = Value(JRM.FLOOR.ordinal) + val HALF_UP = Value(JRM.HALF_UP.ordinal) + val HALF_DOWN = Value(JRM.HALF_DOWN.ordinal) + val HALF_EVEN = Value(JRM.HALF_EVEN.ordinal) + val UNNECESSARY = Value(JRM.UNNECESSARY.ordinal) } /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */ @@ -128,7 +126,7 @@ object BigDecimal { /** Constructs a `BigDecimal` that exactly represents the number * specified in base 10 in a character array. */ - def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs)) + def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs)) /** Constructs a `BigDecimal` using the java BigDecimal static @@ -139,20 +137,6 @@ object BigDecimal { */ def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d) - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor, specifying a `MathContext` that is - * used for computations but isn't used for rounding. Use - * `BigDecimal.decimal` to use `MathContext` for rounding, - * or `BigDecimal(java.math.BigDecimal.valueOf(d), mc)` for - * no rounding. - * - * @param d the specified double value - * @param mc the `MathContext` used for future computations - * @return the constructed `BigDecimal` - */ - @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11.0") - def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc) - /** Constructs a `BigDecimal` using the java BigDecimal static * valueOf constructor. * @@ -161,21 +145,6 @@ object BigDecimal { */ def valueOf(x: Long): BigDecimal = apply(x) - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. This is unlikely to do what you want; - * use `valueOf(f.toDouble)` or `decimal(f)` instead. - */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") - def valueOf(f: Float): BigDecimal = valueOf(f.toDouble) - - /** Constructs a `BigDecimal` using the java BigDecimal static - * valueOf constructor. This is unlikely to do what you want; - * use `valueOf(f.toDouble)` or `decimal(f)` instead. - */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") - def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc) - - /** Constructs a `BigDecimal` whose value is equal to that of the * specified `Integer` value. * @@ -261,12 +230,6 @@ object BigDecimal { */ def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc) - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") - def apply(x: Float): BigDecimal = apply(x.toDouble) - - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") - def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc) - /** Translates a character array representation of a `BigDecimal` * into a `BigDecimal`. */ @@ -329,10 +292,7 @@ object BigDecimal { new BigDecimal(new BigDec(unscaledVal.bigInteger, scale, mc), mc) /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */ - def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext) - - @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11.0") - def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc) + def apply(bd: BigDec): BigDecimal = new BigDecimal(bd, defaultMathContext) /** Implicit conversion from `Int` to `BigDecimal`. */ implicit def int2bigDecimal(i: Int): BigDecimal = apply(i) @@ -393,9 +353,6 @@ object BigDecimal { * and powers. The left-hand argument's `MathContext` always determines the * degree of rounding, if any, and is the one propagated through arithmetic * operations that do not apply rounding themselves. - * - * @author Stephane Micheloud - * @author Rex Kerr */ final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext) extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] { @@ -435,7 +392,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ * with large exponents. */ override def hashCode(): Int = { - if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode + if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode() computedHashCode } @@ -464,24 +421,6 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ override def isValidChar = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue override def isValidInt = noArithmeticException(toIntExact) def isValidLong = noArithmeticException(toLongExact) - /** Tests whether the value is a valid Float. "Valid" has several distinct meanings, however. Use - * `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning. - * By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want. - */ - @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11.0") - def isValidFloat = { - val f = toFloat - !f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0 - } - /** Tests whether the value is a valid Double. "Valid" has several distinct meanings, however. Use - * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning. - * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want. - */ - @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11.0") - def isValidDouble = { - val d = toDouble - !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0 - } /** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */ def isDecimalDouble = { @@ -525,9 +464,9 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ catch { case _: ArithmeticException => false } } - def isWhole() = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0 + def isWhole = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0 - def underlying = bigDecimal + def underlying: java.math.BigDecimal = bigDecimal /** Compares this BigDecimal with the specified BigDecimal for equality. @@ -540,11 +479,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Addition of BigDecimals */ - def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal add that.bigDecimal, mc) + def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.add(that.bigDecimal, mc), mc) /** Subtraction of BigDecimals */ - def - (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal subtract that.bigDecimal, mc) + def - (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.subtract(that.bigDecimal, mc), mc) /** Multiplication of BigDecimals */ @@ -557,15 +496,15 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Division and Remainder - returns tuple containing the result of * divideToIntegralValue and the remainder. The computation is exact: no rounding is applied. */ - def /% (that: BigDecimal): (BigDecimal, BigDecimal) = - this.bigDecimal.divideAndRemainder(that.bigDecimal) match { - case Array(q, r) => (new BigDecimal(q, mc), new BigDecimal(r, mc)) - } + def /% (that: BigDecimal): (BigDecimal, BigDecimal) = { + val qr = this.bigDecimal.divideAndRemainder(that.bigDecimal, mc) + (new BigDecimal(qr(0), mc), new BigDecimal(qr(1), mc)) + } /** Divide to Integral value. */ def quot (that: BigDecimal): BigDecimal = - new BigDecimal(this.bigDecimal divideToIntegralValue that.bigDecimal, mc) + new BigDecimal(this.bigDecimal.divideToIntegralValue(that.bigDecimal, mc), mc) /** Returns the minimum of this and that, or this if the two are equal */ @@ -583,11 +522,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Remainder after dividing this by that. */ - def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal remainder that.bigDecimal, mc) + def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.remainder(that.bigDecimal, mc), mc) /** Remainder after dividing this by that. */ - def % (that: BigDecimal): BigDecimal = this remainder that + def % (that: BigDecimal): BigDecimal = this.remainder(that) /** Returns a BigDecimal whose value is this ** n. */ @@ -595,7 +534,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Returns a BigDecimal whose value is the negation of this BigDecimal */ - def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(), mc) + def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(mc), mc) /** Returns the absolute value of this BigDecimal */ @@ -608,9 +547,16 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ */ def signum: Int = this.bigDecimal.signum() + /** Returns the sign of this BigDecimal; + * -1 if it is less than 0, + * +1 if it is greater than 0, + * 0 if it is equal to 0. + */ + def sign: BigDecimal = signum + /** Returns the precision of this `BigDecimal`. */ - def precision: Int = this.bigDecimal.precision() + def precision: Int = this.bigDecimal.precision /** Returns a BigDecimal rounded according to the supplied MathContext settings, but * preserving its own MathContext for future operations. @@ -628,7 +574,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Returns the scale of this `BigDecimal`. */ - def scale: Int = this.bigDecimal.scale() + def scale: Int = this.bigDecimal.scale /** Returns the size of an ulp, a unit in the last place, of this BigDecimal. */ @@ -643,11 +589,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ */ def setScale(scale: Int): BigDecimal = if (this.scale == scale) this - else new BigDecimal(this.bigDecimal setScale scale, mc) + else new BigDecimal(this.bigDecimal.setScale(scale), mc) def setScale(scale: Int, mode: RoundingMode): BigDecimal = if (this.scale == scale) this - else new BigDecimal(this.bigDecimal.setScale(scale, mode.id), mc) + else new BigDecimal(this.bigDecimal.setScale(scale, JRM.valueOf(mode.id)), mc) /** Converts this BigDecimal to a Byte. * If the BigDecimal is too big to fit in a Byte, only the low-order 8 bits are returned. @@ -745,7 +691,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ new Range.Partial(until(end, _)) /** Same as the one-argument `until`, but creates the range immediately. */ - def until(end: BigDecimal, step: BigDecimal) = Range.BigDecimal(this, end, step) + def until(end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = Range.BigDecimal(this, end, step) /** Like `until`, but inclusive of the end value. */ def to(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Inclusive[BigDecimal]] = @@ -756,20 +702,20 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ /** Converts this `BigDecimal` to a scala.BigInt. */ - def toBigInt(): BigInt = new BigInt(this.bigDecimal.toBigInteger()) + def toBigInt: BigInt = new BigInt(this.bigDecimal.toBigInteger) /** Converts this `BigDecimal` to a scala.BigInt if it * can be done losslessly, returning Some(BigInt) or None. */ - def toBigIntExact(): Option[BigInt] = - if (isWhole()) { - try Some(new BigInt(this.bigDecimal.toBigIntegerExact())) + def toBigIntExact: Option[BigInt] = + if (isWhole) { + try Some(new BigInt(this.bigDecimal.toBigIntegerExact)) catch { case _: ArithmeticException => None } } else None /** Returns the decimal String representation of this BigDecimal. */ - override def toString(): String = this.bigDecimal.toString() + override def toString: String = this.bigDecimal.toString } diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index fb73eb3129dd..9a17ee02a51a 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,17 +14,30 @@ package scala package math import java.math.BigInteger + +import scala.annotation.nowarn import scala.language.implicitConversions +import scala.collection.immutable.NumericRange -/** - * @author Martin Odersky - * @since 2.1 - */ object BigInt { - private val minCached = -1024 - private val maxCached = 1024 - private val cache = new Array[BigInt](maxCached - minCached + 1) + private val longMinValueBigInteger = BigInteger.valueOf(Long.MinValue) + private val longMinValue = new BigInt(longMinValueBigInteger, Long.MinValue) + + private[this] val minCached = -1024 + private[this] val maxCached = 1024 + private[this] val cache = new Array[BigInt](maxCached - minCached + 1) + + private[this] def getCached(i: Int): BigInt = { + val offset = i - minCached + var n = cache(offset) + if (n eq null) { + n = new BigInt(null, i.toLong) + cache(offset) = n + } + n + } + private val minusOne = BigInteger.valueOf(-1) /** Constructs a `BigInt` whose value is equal to that of the @@ -34,12 +47,7 @@ object BigInt { * @return the constructed `BigInt` */ def apply(i: Int): BigInt = - if (minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n } - n - } else new BigInt(BigInteger.valueOf(i.toLong)) + if (minCached <= i && i <= maxCached) getCached(i) else apply(i: Long) /** Constructs a `BigInt` whose value is equal to that of the * specified long value. @@ -48,52 +56,62 @@ object BigInt { * @return the constructed `BigInt` */ def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigInt(BigInteger.valueOf(l)) + if (minCached <= l && l <= maxCached) getCached(l.toInt) + else if (l == Long.MinValue) longMinValue + else new BigInt(null, l) /** Translates a byte array containing the two's-complement binary * representation of a BigInt into a BigInt. */ def apply(x: Array[Byte]): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the sign-magnitude representation of a BigInt into a BigInt. + * + * @param signum signum of the number (-1 for negative, 0 for zero, 1 + * for positive). + * @param magnitude big-endian binary representation of the magnitude of + * the number. */ def apply(signum: Int, magnitude: Array[Byte]): BigInt = - new BigInt(new BigInteger(signum, magnitude)) + apply(new BigInteger(signum, magnitude)) /** Constructs a randomly generated positive BigInt that is probably prime, * with the specified bitLength. */ def apply(bitlength: Int, certainty: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(bitlength, certainty, rnd.self)) + apply(new BigInteger(bitlength, certainty, rnd.self)) /** Constructs a randomly generated BigInt, uniformly distributed over the * range `0` to `(2 ^ numBits - 1)`, inclusive. */ def apply(numbits: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(numbits, rnd.self)) + apply(new BigInteger(numbits, rnd.self)) /** Translates the decimal String representation of a BigInt into a BigInt. */ def apply(x: String): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the string representation of a `BigInt` in the * specified `radix` into a BigInt. */ def apply(x: String, radix: Int): BigInt = - new BigInt(new BigInteger(x, radix)) + apply(new BigInteger(x, radix)) /** Translates a `java.math.BigInteger` into a BigInt. */ - def apply(x: BigInteger): BigInt = - new BigInt(x) + def apply(x: BigInteger): BigInt = { + if (x.bitLength <= 63) { + val l = x.longValue + if (minCached <= l && l <= maxCached) getCached(l.toInt) else new BigInt(x, l) + } else new BigInt(x, Long.MinValue) + } /** Returns a positive BigInt that is probably prime, with the specified bitLength. */ def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt = - new BigInt(BigInteger.probablePrime(bitLength, rnd.self)) + apply(BigInteger.probablePrime(bitLength, rnd.self)) /** Implicit conversion from `Int` to `BigInt`. */ @@ -106,24 +124,110 @@ object BigInt { /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. */ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = if (x eq null) null else apply(x) + + // this method is adapted from Google Guava's version at + // https://github.com/google/guava/blob/master/guava/src/com/google/common/math/LongMath.java + // that code carries the following notice: + // * Copyright (C) 2011 The Guava Authors + // * + // * Licensed under the Apache License, Version 2.0 (the "License") + /** + * Returns the greatest common divisor of a and b. Returns 0 if a == 0 && b == 0. + */ + private def longGcd(a: Long, b: Long): Long = { + // both a and b must be >= 0 + if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. + // BigInteger.gcd is consistent with this decision. + return b + } + else if (b == 0) return a // similar logic + /* + * Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. This is + * >60% faster than the Euclidean algorithm in benchmarks. + */ + val aTwos = java.lang.Long.numberOfTrailingZeros(a) + var a1 = a >> aTwos // divide out all 2s + + val bTwos = java.lang.Long.numberOfTrailingZeros(b) + var b1 = b >> bTwos + while (a1 != b1) { // both a, b are odd + // The key to the binary GCD algorithm is as follows: + // Both a1 and b1 are odd. Assume a1 > b1; then gcd(a1 - b1, b1) = gcd(a1, b1). + // But in gcd(a1 - b1, b1), a1 - b1 is even and b1 is odd, so we can divide out powers of two. + // We bend over backwards to avoid branching, adapting a technique from + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax + val delta = a1 - b1 // can't overflow, since a1 and b1 are nonnegative + val minDeltaOrZero = delta & (delta >> (java.lang.Long.SIZE - 1)) + // equivalent to Math.min(delta, 0) + a1 = delta - minDeltaOrZero - minDeltaOrZero // sets a to Math.abs(a - b) + + // a is now nonnegative and even + b1 += minDeltaOrZero // sets b to min(old a, b) + + a1 >>= java.lang.Long.numberOfTrailingZeros(a1) // divide out all 2s, since 2 doesn't divide b + + } + a1 << scala.math.min(aTwos, bTwos) + } + } -/** - * @author Martin Odersky +/** A type with efficient encoding of arbitrary integers. + * + * It wraps `java.math.BigInteger`, with optimization for small values that can be encoded in a `Long`. */ -final class BigInt(val bigInteger: BigInteger) +final class BigInt private (private var _bigInteger: BigInteger, private val _long: Long) extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigInt] { + // The class has a special encoding for integer that fit in a Long *and* are not equal to Long.MinValue. + // + // The Long value Long.MinValue is a tag specifying that the integer is encoded in the BigInteger field. + // + // There are three possible states for the class fields (_bigInteger, _long) + // 1. (null, l) where l != Long.MinValue, encodes the integer "l" + // 2. (b, l) where l != Long.MinValue; then b is a BigInteger with value l, encodes "l" == "b" + // 3a. (b, Long.MinValue) where b == Long.MinValue, encodes Long.MinValue + // 3b. (b, Long.MinValue) where b does not fit in a Long, encodes "b" + // + // There is only one possible transition 1. -> 2., when the method .bigInteger is called, then the field + // _bigInteger caches the result. + // + // The case 3a. is the only one where the BigInteger could actually fit in a Long, but as its value is used as a + // tag, we'll take the slow path instead. + // + // Additionally, we know that if this.isValidLong is true, then _long is the encoded value. + + /** Public constructor present for compatibility. Use the BigInt.apply companion object method instead. */ + def this(bigInteger: BigInteger) = this( + bigInteger, // even if it is a short BigInteger, we cache the instance + if (bigInteger.bitLength <= 63) + bigInteger.longValue // if _bigInteger is actually equal to Long.MinValue, no big deal, its value acts as a tag + else Long.MinValue + ) + + /** Returns whether the integer is encoded in the Long. Returns true for all values fitting in a Long except + * Long.MinValue. */ + private def longEncoding: Boolean = _long != Long.MinValue + + def bigInteger: BigInteger = { + val read = _bigInteger + if (read ne null) read else { + val write = BigInteger.valueOf(_long) + _bigInteger = write // reference assignment is atomic; this is multi-thread safe (if possibly wasteful) + write + } + } + /** Returns the hash code for this BigInt. */ override def hashCode(): Int = - if (isValidLong) unifiedPrimitiveHashcode() + if (isValidLong) unifiedPrimitiveHashcode else bigInteger.## - /** Compares this BigInt with the specified value for equality. - */ + /** Compares this BigInt with the specified value for equality. */ + @nowarn("cat=other-non-cooperative-equals") override def equals(that: Any): Boolean = that match { case that: BigInt => this equals that case that: BigDecimal => that equals this @@ -131,14 +235,16 @@ final class BigInt(val bigInteger: BigInteger) case that: Float => isValidFloat && toFloat == that case x => isValidLong && unifiedPrimitiveEquals(x) } - override def isValidByte = this >= Byte.MinValue && this <= Byte.MaxValue - override def isValidShort = this >= Short.MinValue && this <= Short.MaxValue - override def isValidChar = this >= Char.MinValue && this <= Char.MaxValue - override def isValidInt = this >= Int.MinValue && this <= Int.MaxValue - def isValidLong = this >= Long.MinValue && this <= Long.MaxValue + + override def isValidByte: Boolean = _long >= Byte.MinValue && _long <= Byte.MaxValue /* && longEncoding */ + override def isValidShort: Boolean = _long >= Short.MinValue && _long <= Short.MaxValue /* && longEncoding */ + override def isValidChar: Boolean = _long >= Char.MinValue && _long <= Char.MaxValue /* && longEncoding */ + override def isValidInt: Boolean = _long >= Int.MinValue && _long <= Int.MaxValue /* && longEncoding */ + def isValidLong: Boolean = longEncoding || _bigInteger == BigInt.longMinValueBigInteger // rhs of || tests == Long.MinValue + /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`. */ - def isValidFloat = { + def isValidFloat: Boolean = { val bitLen = bitLength (bitLen <= 24 || { @@ -151,7 +257,7 @@ final class BigInt(val bigInteger: BigInteger) } /** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`. */ - def isValidDouble = { + def isValidDouble: Boolean = { val bitLen = bitLength (bitLen <= 53 || { @@ -173,148 +279,271 @@ final class BigInt(val bigInteger: BigInteger) } @deprecated("isWhole on an integer type is always true", "2.12.15") - def isWhole() = true - def underlying = bigInteger + def isWhole: Boolean = true + def underlying: BigInteger = bigInteger /** Compares this BigInt with the specified BigInt for equality. */ - def equals (that: BigInt): Boolean = compare(that) == 0 + def equals(that: BigInt): Boolean = + if (this.longEncoding) + that.longEncoding && (this._long == that._long) + else + !that.longEncoding && (this._bigInteger == that._bigInteger) /** Compares this BigInt with the specified BigInt */ - def compare (that: BigInt): Int = this.bigInteger.compareTo(that.bigInteger) + def compare(that: BigInt): Int = + if (this.longEncoding) { + if (that.longEncoding) java.lang.Long.compare(this._long, that._long) else -that._bigInteger.signum() + } else { + if (that.longEncoding) _bigInteger.signum() else this._bigInteger.compareTo(that._bigInteger) + } /** Addition of BigInts */ - def + (that: BigInt): BigInt = new BigInt(this.bigInteger.add(that.bigInteger)) + def +(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x + y + if ((~(x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.add(that.bigInteger)) + } /** Subtraction of BigInts */ - def - (that: BigInt): BigInt = new BigInt(this.bigInteger.subtract(that.bigInteger)) + def -(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x - y + if (((x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.subtract(that.bigInteger)) + } /** Multiplication of BigInts */ - def * (that: BigInt): BigInt = new BigInt(this.bigInteger.multiply(that.bigInteger)) + def *(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x * y + // original code checks the y != Long.MinValue, but when longEncoding is true, that is never the case + // if (x == 0 || (y == z / x && !(x == -1 && y == Long.MinValue))) return BigInt(z) + if (x == 0 || y == z / x) return BigInt(z) + } + BigInt(this.bigInteger.multiply(that.bigInteger)) + } /** Division of BigInts */ - def / (that: BigInt): BigInt = new BigInt(this.bigInteger.divide(that.bigInteger)) + def /(that: BigInt): BigInt = + // in the fast path, note that the original code avoided storing -Long.MinValue in a long: + // if (this._long != Long.MinValue || that._long != -1) return BigInt(this._long / that._long) + // but we know this._long cannot be Long.MinValue, because Long.MinValue is the tag for bigger integers + if (this.longEncoding && that.longEncoding) BigInt(this._long / that._long) + else BigInt(this.bigInteger.divide(that.bigInteger)) /** Remainder of BigInts */ - def % (that: BigInt): BigInt = new BigInt(this.bigInteger.remainder(that.bigInteger)) + def %(that: BigInt): BigInt = + // see / for the original logic regarding Long.MinValue + if (this.longEncoding && that.longEncoding) BigInt(this._long % that._long) + else BigInt(this.bigInteger.remainder(that.bigInteger)) /** Returns a pair of two BigInts containing (this / that) and (this % that). */ - def /% (that: BigInt): (BigInt, BigInt) = { - val dr = this.bigInteger.divideAndRemainder(that.bigInteger) - (new BigInt(dr(0)), new BigInt(dr(1))) - } + def /%(that: BigInt): (BigInt, BigInt) = + if (this.longEncoding && that.longEncoding) { + val x = this._long + val y = that._long + // original line: if (x != Long.MinValue || y != -1) return (BigInt(x / y), BigInt(x % y)) + (BigInt(x / y), BigInt(x % y)) + } else { + val dr = this.bigInteger.divideAndRemainder(that.bigInteger) + (BigInt(dr(0)), BigInt(dr(1))) + } /** Leftshift of BigInt */ - def << (n: Int): BigInt = new BigInt(this.bigInteger.shiftLeft(n)) + def <<(n: Int): BigInt = + if (longEncoding && n <= 0) (this >> (-n)) else BigInt(this.bigInteger.shiftLeft(n)) /** (Signed) rightshift of BigInt */ - def >> (n: Int): BigInt = new BigInt(this.bigInteger.shiftRight(n)) + def >>(n: Int): BigInt = + if (longEncoding && n >= 0) { + if (n < 64) BigInt(_long >> n) + else if (_long < 0) BigInt(-1) + else BigInt(0) // for _long >= 0 + } else BigInt(this.bigInteger.shiftRight(n)) /** Bitwise and of BigInts */ - def & (that: BigInt): BigInt = new BigInt(this.bigInteger.and(that.bigInteger)) + def &(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & that._long) + else BigInt(this.bigInteger.and(that.bigInteger)) /** Bitwise or of BigInts */ - def | (that: BigInt): BigInt = new BigInt(this.bigInteger.or (that.bigInteger)) + def |(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long | that._long) + else BigInt(this.bigInteger.or(that.bigInteger)) /** Bitwise exclusive-or of BigInts */ - def ^ (that: BigInt): BigInt = new BigInt(this.bigInteger.xor(that.bigInteger)) + def ^(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long ^ that._long) + else BigInt(this.bigInteger.xor(that.bigInteger)) /** Bitwise and-not of BigInts. Returns a BigInt whose value is (this & ~that). */ - def &~ (that: BigInt): BigInt = new BigInt(this.bigInteger.andNot(that.bigInteger)) + def &~(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & ~that._long) + else BigInt(this.bigInteger.andNot(that.bigInteger)) /** Returns the greatest common divisor of abs(this) and abs(that) */ - def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger)) + def gcd(that: BigInt): BigInt = + if (this.longEncoding) { + if (this._long == 0) return that.abs + // if (this._long == Long.MinValue) return (-this) gcd that + // this != 0 && this != Long.MinValue + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + BigInt(BigInt.longGcd(this._long.abs, that._long.abs)) + } else that gcd this // force the BigInteger on the left + } else { + // this is not a valid long + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + val red = (this._bigInteger mod BigInteger.valueOf(that._long.abs)).longValue() + if (red == 0) return that.abs + BigInt(BigInt.longGcd(that._long.abs, red)) + } else BigInt(this.bigInteger.gcd(that.bigInteger)) + } + /** Returns a BigInt whose value is (this mod that). * This method differs from `%` in that it always returns a non-negative BigInt. + * @param that A positive number */ - def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger)) + def mod(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding && that._long > 0) { + val res = this._long % that._long + if (res >= 0) BigInt(res) else BigInt(res + that._long) + } else BigInt(this.bigInteger.mod(that.bigInteger)) /** Returns the minimum of this and that */ - def min (that: BigInt): BigInt = new BigInt(this.bigInteger.min(that.bigInteger)) + def min(that: BigInt): BigInt = + if (this <= that) this else that /** Returns the maximum of this and that */ - def max (that: BigInt): BigInt = new BigInt(this.bigInteger.max(that.bigInteger)) + def max(that: BigInt): BigInt = + if (this >= that) this else that /** Returns a BigInt whose value is (this raised to the power of exp). */ - def pow (exp: Int): BigInt = new BigInt(this.bigInteger.pow(exp)) + def pow(exp: Int): BigInt = BigInt(this.bigInteger.pow(exp)) /** Returns a BigInt whose value is * (this raised to the power of exp modulo m). */ - def modPow (exp: BigInt, m: BigInt): BigInt = - new BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) + def modPow(exp: BigInt, m: BigInt): BigInt = BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) /** Returns a BigInt whose value is (the inverse of this modulo m). */ - def modInverse (m: BigInt): BigInt = new BigInt(this.bigInteger.modInverse(m.bigInteger)) + def modInverse(m: BigInt): BigInt = BigInt(this.bigInteger.modInverse(m.bigInteger)) /** Returns a BigInt whose value is the negation of this BigInt */ - def unary_- : BigInt = new BigInt(this.bigInteger.negate()) + def unary_- : BigInt = if (longEncoding) BigInt(-_long) else BigInt(this.bigInteger.negate()) /** Returns the absolute value of this BigInt */ - def abs: BigInt = new BigInt(this.bigInteger.abs()) + def abs: BigInt = if (signum < 0) -this else this + + /** Returns the sign of this BigInt; + * -1 if it is less than 0, + * +1 if it is greater than 0, + * 0 if it is equal to 0. + */ + def signum: Int = if (longEncoding) java.lang.Long.signum(_long) else _bigInteger.signum() /** Returns the sign of this BigInt; * -1 if it is less than 0, * +1 if it is greater than 0, * 0 if it is equal to 0. */ - def signum: Int = this.bigInteger.signum() + def sign: BigInt = BigInt(signum) /** Returns the bitwise complement of this BigInt */ - def unary_~ : BigInt = new BigInt(this.bigInteger.not()) + def unary_~ : BigInt = + // it is equal to -(this + 1) + if (longEncoding && _long != Long.MaxValue) BigInt(-(_long + 1)) else BigInt(this.bigInteger.not()) /** Returns true if and only if the designated bit is set. */ - def testBit (n: Int): Boolean = this.bigInteger.testBit(n) + def testBit(n: Int): Boolean = + if (longEncoding && n >= 0) { + if (n <= 63) + (_long & (1L << n)) != 0 + else + _long < 0 // give the sign bit + } else this.bigInteger.testBit(n) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. */ - def setBit (n: Int): BigInt = new BigInt(this.bigInteger.setBit(n)) + def setBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62 && n >= 0) BigInt(_long | (1L << n)) else BigInt(this.bigInteger.setBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. */ - def clearBit(n: Int): BigInt = new BigInt(this.bigInteger.clearBit(n)) + def clearBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62 && n >= 0) BigInt(_long & ~(1L << n)) else BigInt(this.bigInteger.clearBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. */ - def flipBit (n: Int): BigInt = new BigInt(this.bigInteger.flipBit(n)) + def flipBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62 && n >= 0) BigInt(_long ^ (1L << n)) else BigInt(this.bigInteger.flipBit(n)) /** Returns the index of the rightmost (lowest-order) one bit in this BigInt * (the number of zero bits to the right of the rightmost one bit). */ - def lowestSetBit: Int = this.bigInteger.getLowestSetBit() + def lowestSetBit: Int = + if (longEncoding) { + if (_long == 0) -1 else java.lang.Long.numberOfTrailingZeros(_long) + } else this.bigInteger.getLowestSetBit() /** Returns the number of bits in the minimal two's-complement representation of this BigInt, * excluding a sign bit. */ - def bitLength: Int = this.bigInteger.bitLength() + def bitLength: Int = + // bitLength is defined as ceil(log2(this < 0 ? -this : this + 1))) + // where ceil(log2(x)) = 64 - numberOfLeadingZeros(x - 1) + if (longEncoding) { + if (_long < 0) 64 - java.lang.Long.numberOfLeadingZeros(-(_long + 1)) // takes care of Long.MinValue + else 64 - java.lang.Long.numberOfLeadingZeros(_long) + } else _bigInteger.bitLength() /** Returns the number of bits in the two's complement representation of this BigInt * that differ from its sign bit. */ - def bitCount: Int = this.bigInteger.bitCount() + def bitCount: Int = + if (longEncoding) { + if (_long < 0) java.lang.Long.bitCount(-(_long + 1)) else java.lang.Long.bitCount(_long) + } else this.bigInteger.bitCount() /** Returns true if this BigInt is probably prime, false if it's definitely composite. * @param certainty a measure of the uncertainty that the caller is willing to tolerate: @@ -323,28 +552,28 @@ final class BigInt(val bigInteger: BigInteger) * The execution time of this method is proportional to the value of * this parameter. */ - def isProbablePrime(certainty: Int) = this.bigInteger.isProbablePrime(certainty) + def isProbablePrime(certainty: Int): Boolean = this.bigInteger.isProbablePrime(certainty) /** Converts this BigInt to a byte. * If the BigInt is too big to fit in a byte, only the low-order 8 bits are returned. * Note that this conversion can lose information about the overall magnitude of the * BigInt value as well as return a result with the opposite sign. */ - override def byteValue = intValue.toByte + override def byteValue: Byte = intValue.toByte /** Converts this BigInt to a short. * If the BigInt is too big to fit in a short, only the low-order 16 bits are returned. * Note that this conversion can lose information about the overall magnitude of the * BigInt value as well as return a result with the opposite sign. */ - override def shortValue = intValue.toShort + override def shortValue: Short = intValue.toShort /** Converts this BigInt to a char. * If the BigInt is too big to fit in a char, only the low-order 16 bits are returned. * Note that this conversion can lose information about the overall magnitude of the * BigInt value and that it always returns a positive result. */ - def charValue = intValue.toChar + def charValue: Char = intValue.toChar /** Converts this BigInt to an int. * If the BigInt is too big to fit in an int, only the low-order 32 bits @@ -352,7 +581,7 @@ final class BigInt(val bigInteger: BigInteger) * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def intValue = this.bigInteger.intValue + def intValue: Int = if (longEncoding) _long.toInt else this.bigInteger.intValue /** Converts this BigInt to a long. * If the BigInt is too big to fit in a long, only the low-order 64 bits @@ -360,21 +589,23 @@ final class BigInt(val bigInteger: BigInteger) * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def longValue = this.bigInteger.longValue + def longValue: Long = if (longEncoding) _long else _bigInteger.longValue /** Converts this `BigInt` to a `float`. * If this `BigInt` has too great a magnitude to represent as a float, * it will be converted to `Float.NEGATIVE_INFINITY` or * `Float.POSITIVE_INFINITY` as appropriate. */ - def floatValue = this.bigInteger.floatValue + def floatValue: Float = this.bigInteger.floatValue /** Converts this `BigInt` to a `double`. * if this `BigInt` has too great a magnitude to represent as a double, * it will be converted to `Double.NEGATIVE_INFINITY` or * `Double.POSITIVE_INFINITY` as appropriate. */ - def doubleValue = this.bigInteger.doubleValue + def doubleValue: Double = + if (isValidLong && (-(1L << 53) <= _long && _long <= (1L << 53))) _long.toDouble + else this.bigInteger.doubleValue /** Create a `NumericRange[BigInt]` in range `[start;end)` * with the specified step, where start is the target BigInt. @@ -383,15 +614,15 @@ final class BigInt(val bigInteger: BigInteger) * @param step the distance between elements (defaults to 1) * @return the range */ - def until(end: BigInt, step: BigInt = BigInt(1)) = Range.BigInt(this, end, step) + def until(end: BigInt, step: BigInt = BigInt(1)): NumericRange.Exclusive[BigInt] = Range.BigInt(this, end, step) /** Like until, but inclusive of the end value. */ - def to(end: BigInt, step: BigInt = BigInt(1)) = Range.BigInt.inclusive(this, end, step) + def to(end: BigInt, step: BigInt = BigInt(1)): NumericRange.Inclusive[BigInt] = Range.BigInt.inclusive(this, end, step) /** Returns the decimal String representation of this BigInt. */ - override def toString(): String = this.bigInteger.toString() + override def toString(): String = if (longEncoding) _long.toString() else _bigInteger.toString() /** Returns the String representation in the specified radix of this BigInt. */ diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala index a3aa6f984e01..b5d01b362901 100644 --- a/src/library/scala/math/Equiv.scala +++ b/src/library/scala/math/Equiv.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,13 +14,14 @@ package scala package math import java.util.Comparator +import scala.annotation.migration /** A trait for representing equivalence relations. It is important to * distinguish between a type that can be compared for equality or * equivalence and a representation of equivalence on some type. This * trait is for representing the latter. * - * An [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] + * An [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] * is a binary relation on a type. This relation is exposed as * the `equiv` method of the `Equiv` trait. The relation must be: * @@ -28,9 +29,6 @@ import java.util.Comparator * 1. symmetric: `equiv(x, y) == equiv(y, x)` for any `x` and `y` of type `T`. * 1. transitive: if `equiv(x, y) == true` and `equiv(y, z) == true`, then * `equiv(x, z) == true` for any `x`, `y`, and `z` of type `T`. - * - * @author Geoffrey Washburn, Paul Phillips - * @since 2.7 */ trait Equiv[T] extends Any with Serializable { @@ -42,24 +40,468 @@ trait Equiv[T] extends Any with Serializable { trait LowPriorityEquiv { self: Equiv.type => - implicit def universalEquiv[T] : Equiv[T] = universal[T] + /** + * @deprecated This implicit universal `Equiv` instance allows accidentally + * comparing instances of types for which equality isn't well-defined or implemented. + * (For example, it does not make sense to compare two `Function1` instances.) + * + * Use `Equiv.universal` explicitly instead. If you really want an implicit universal `Equiv` instance + * despite the potential problems, consider `implicit def universalEquiv[T]: Equiv[T] = universal[T]`. + */ + @deprecated("Use explicit Equiv.universal instead. See Scaladoc entry for more information: " + + "https://www.scala-lang.org/api/current/scala/math/Equiv$.html#universalEquiv[T]:scala.math.Equiv[T]", + since = "2.13.0") + implicit def universalEquiv[T]: Equiv[T] = universal[T] } object Equiv extends LowPriorityEquiv { - def reference[T <: AnyRef] : Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = x eq y + def reference[T <: AnyRef]: Equiv[T] = { _ eq _ } + def universal[T]: Equiv[T] = { _ == _ } + def fromComparator[T](cmp: Comparator[T]): Equiv[T] = { + (x, y) => cmp.compare(x, y) == 0 + } + def fromFunction[T](cmp: (T, T) => Boolean): Equiv[T] = { + (x, y) => cmp(x, y) + } + def by[T, S: Equiv](f: T => S): Equiv[T] = + ((x, y) => implicitly[Equiv[S]].equiv(f(x), f(y))) + + @inline def apply[T: Equiv]: Equiv[T] = implicitly[Equiv[T]] + + /* copied from Ordering */ + + private final val optionSeed = 43 + private final val iterableSeed = 47 + + private final class IterableEquiv[CC[X] <: Iterable[X], T](private val eqv: Equiv[T]) extends Equiv[CC[T]] { + def equiv(x: CC[T], y: CC[T]): Boolean = { + val xe = x.iterator + val ye = y.iterator + + while (xe.hasNext && ye.hasNext) { + if (!eqv.equiv(xe.next(), ye.next())) return false + } + + xe.hasNext == ye.hasNext + } + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: IterableEquiv[_, _] => this.eqv == that.eqv + case _ => false + } + override def hashCode(): Int = eqv.hashCode() * iterableSeed + } + + trait ExtraImplicits { + /** Not in the standard scope due to the potential for divergence: + * For instance `implicitly[Equiv[Any]]` diverges in its presence. + */ + implicit def seqEquiv[CC[X] <: scala.collection.Seq[X], T](implicit eqv: Equiv[T]): Equiv[CC[T]] = + new IterableEquiv[CC, T](eqv) + + implicit def sortedSetEquiv[CC[X] <: scala.collection.SortedSet[X], T](implicit eqv: Equiv[T]): Equiv[CC[T]] = + new IterableEquiv[CC, T](eqv) } - def universal[T] : Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = x == y + + /** An object containing implicits which are not in the default scope. */ + object Implicits extends ExtraImplicits { } + + implicit object Unit extends Equiv[Unit] { + def equiv(x: Unit, y: Unit): Boolean = true } - def fromComparator[T](cmp: Comparator[T]): Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = cmp.compare(x, y) == 0 + + implicit object Boolean extends Equiv[Boolean] { + def equiv(x: Boolean, y: Boolean): Boolean = x == y } - def fromFunction[T](cmp: (T, T) => Boolean): Equiv[T] = new Equiv[T] { - def equiv(x: T, y: T) = cmp(x, y) + + implicit object Byte extends Equiv[Byte] { + def equiv(x: Byte, y: Byte): Boolean = x == y + } + + implicit object Char extends Equiv[Char] { + def equiv(x: Char, y: Char): Boolean = x == y + } + + implicit object Short extends Equiv[Short] { + def equiv(x: Short, y: Short): Boolean = x == y + } + + implicit object Int extends Equiv[Int] { + def equiv(x: Int, y: Int): Boolean = x == y + } + + implicit object Long extends Equiv[Long] { + def equiv(x: Long, y: Long): Boolean = x == y + } + + /** `Equiv`s for `Float`s. + * + * @define floatEquiv Because the behaviour of `Float`s specified by IEEE is + * not consistent with behaviors required of an equivalence + * relation for `NaN` (it is not reflexive), there are two + * equivalences defined for `Float`: `StrictEquiv`, which + * is reflexive, and `IeeeEquiv`, which is consistent + * with IEEE spec and floating point operations defined in + * [[scala.math]]. + */ + object Float { + /** An equivalence for `Float`s which is reflexive (treats all `NaN`s + * as equivalent), and treats `-0.0` and `0.0` as not equivalent; it + * behaves the same as [[java.lang.Float.compare]]. + * + * $floatEquiv + * + * This equivalence may be preferable for collections. + * + * @see [[IeeeEquiv]] + */ + trait StrictEquiv extends Equiv[Float] { + def equiv(x: Float, y: Float): Boolean = java.lang.Float.compare(x, y) == 0 + } + implicit object StrictEquiv extends StrictEquiv + + /** An equivalence for `Float`s which is consistent with IEEE specifications. + * + * $floatEquiv + * + * This equivalence may be preferable for numeric contexts. + * + * @see [[StrictEquiv]] + */ + trait IeeeEquiv extends Equiv[Float] { + override def equiv(x: Float, y: Float): Boolean = x == y + } + implicit object IeeeEquiv extends IeeeEquiv + } + + @migration( + " The default implicit equivalence for floats no longer conforms to\n" + + " to IEEE 754's behavior for -0.0F and NaN.\n" + + " Import `Equiv.Float.IeeeEquiv` to recover the previous behavior.\n" + + " See also https://www.scala-lang.org/api/current/scala/math/Equiv$$Float$.html.", "2.13.2") + implicit object DeprecatedFloatEquiv extends Float.StrictEquiv + + /** `Equiv`s for `Double`s. + * + * @define doubleEquiv Because the behaviour of `Double`s specified by IEEE is + * not consistent with behaviors required of an equivalence + * relation for `NaN` (it is not reflexive), there are two + * equivalences defined for `Double`: `StrictEquiv`, which + * is reflexive, and `IeeeEquiv`, which is consistent + * with IEEE spec and floating point operations defined in + * [[scala.math]]. + */ + object Double { + /** An equivalence for `Double`s which is reflexive (treats all `NaN`s + * as equivalent), and treats `-0.0` and `0.0` as not equivalent; it + * behaves the same as [[java.lang.Double.compare]]. + * + * $doubleEquiv + * + * This equivalence may be preferable for collections. + * + * @see [[IeeeEquiv]] + */ + trait StrictEquiv extends Equiv[Double] { + def equiv(x: Double, y: Double): Boolean = java.lang.Double.compare(x, y) == 0 + } + implicit object StrictEquiv extends StrictEquiv + + /** An equivalence for `Double`s which is consistent with IEEE specifications. + * + * $doubleEquiv + * + * This equivalence may be preferable for numeric contexts. + * + * @see [[StrictEquiv]] + */ + trait IeeeEquiv extends Equiv[Double] { + def equiv(x: Double, y: Double): Boolean = x == y + } + implicit object IeeeEquiv extends IeeeEquiv + } + @migration( + " The default implicit equivalence for doubles no longer conforms to\n" + + " to IEEE 754's behavior for -0.0D and NaN.\n" + + " Import `Equiv.Double.IeeeEquiv` to recover the previous behavior.\n" + + " See also https://www.scala-lang.org/api/current/scala/math/Equiv$$Double$.html.", "2.13.2") + implicit object DeprecatedDoubleEquiv extends Double.StrictEquiv + + implicit object BigInt extends Equiv[BigInt] { + def equiv(x: BigInt, y: BigInt): Boolean = x == y + } + + implicit object BigDecimal extends Equiv[BigDecimal] { + def equiv(x: BigDecimal, y: BigDecimal): Boolean = x == y + } + + implicit object String extends Equiv[String] { + def equiv(x: String, y: String): Boolean = x == y + } + + implicit object Symbol extends Equiv[Symbol] { + def equiv(x: Symbol, y: Symbol): Boolean = x == y + } + + implicit def Option[T](implicit eqv: Equiv[T]): Equiv[Option[T]] = + new OptionEquiv[T](eqv) + + private[this] final class OptionEquiv[T](private val eqv: Equiv[T]) extends Equiv[Option[T]] { + def equiv(x: Option[T], y: Option[T]): Boolean = (x, y) match { + case (None, None) => true + case (Some(x), Some(y)) => eqv.equiv(x, y) + case _ => false + } + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: OptionEquiv[_] => this.eqv == that.eqv + case _ => false + } + override def hashCode(): Int = eqv.hashCode() * optionSeed + } + + implicit def Tuple2[T1, T2](implicit eqv1: Equiv[T1], eqv2: Equiv[T2]): Equiv[(T1, T2)] = + new Tuple2Equiv(eqv1, eqv2) + + private[this] final class Tuple2Equiv[T1, T2](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2]) extends Equiv[(T1, T2)] { + def equiv(x: (T1, T2), y: (T1, T2)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple2Equiv[_, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2).hashCode() + } + + implicit def Tuple3[T1, T2, T3](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3]) : Equiv[(T1, T2, T3)] = + new Tuple3Equiv(eqv1, eqv2, eqv3) + + private[this] final class Tuple3Equiv[T1, T2, T3](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3]) extends Equiv[(T1, T2, T3)] { + def equiv(x: (T1, T2, T3), y: (T1, T2, T3)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple3Equiv[_, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3).hashCode() + } + + implicit def Tuple4[T1, T2, T3, T4](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4]) : Equiv[(T1, T2, T3, T4)] = + new Tuple4Equiv(eqv1, eqv2, eqv3, eqv4) + + private[this] final class Tuple4Equiv[T1, T2, T3, T4](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4]) + extends Equiv[(T1, T2, T3, T4)] { + def equiv(x: (T1, T2, T3, T4), y: (T1, T2, T3, T4)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple4Equiv[_, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4).hashCode() + } + + implicit def Tuple5[T1, T2, T3, T4, T5](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5]): Equiv[(T1, T2, T3, T4, T5)] = + new Tuple5Equiv(eqv1, eqv2, eqv3, eqv4, eqv5) + + private[this] final class Tuple5Equiv[T1, T2, T3, T4, T5](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4], + private val eqv5: Equiv[T5]) + extends Equiv[(T1, T2, T3, T4, T5)] { + def equiv(x: (T1, T2, T3, T4, T5), y: (T1, T2, T3, T4, T5)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) && + eqv5.equiv(x._5, y._5) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple5Equiv[_, _, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 && + this.eqv5 == that.eqv5 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5).hashCode() + } + + implicit def Tuple6[T1, T2, T3, T4, T5, T6](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6]): Equiv[(T1, T2, T3, T4, T5, T6)] = + new Tuple6Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6) + + private[this] final class Tuple6Equiv[T1, T2, T3, T4, T5, T6](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4], + private val eqv5: Equiv[T5], + private val eqv6: Equiv[T6]) + extends Equiv[(T1, T2, T3, T4, T5, T6)] { + def equiv(x: (T1, T2, T3, T4, T5, T6), y: (T1, T2, T3, T4, T5, T6)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) && + eqv5.equiv(x._5, y._5) && + eqv6.equiv(x._6, y._6) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple6Equiv[_, _, _, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 && + this.eqv5 == that.eqv5 && + this.eqv6 == that.eqv6 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6).hashCode() + } + + implicit def Tuple7[T1, T2, T3, T4, T5, T6, T7](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7]): Equiv[(T1, T2, T3, T4, T5, T6, T7)] = + new Tuple7Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7) + + private[this] final class Tuple7Equiv[T1, T2, T3, T4, T5, T6, T7](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4], + private val eqv5: Equiv[T5], + private val eqv6: Equiv[T6], + private val eqv7: Equiv[T7]) + extends Equiv[(T1, T2, T3, T4, T5, T6, T7)] { + def equiv(x: (T1, T2, T3, T4, T5, T6, T7), y: (T1, T2, T3, T4, T5, T6, T7)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) && + eqv5.equiv(x._5, y._5) && + eqv6.equiv(x._6, y._6) && + eqv7.equiv(x._7, y._7) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple7Equiv[_, _, _, _, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 && + this.eqv5 == that.eqv5 && + this.eqv6 == that.eqv6 && + this.eqv7 == that.eqv7 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7).hashCode() + } + + implicit def Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7], eqv8: Equiv[T8]): Equiv[(T1, T2, T3, T4, T5, T6, T7, T8)] = + new Tuple8Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8) + + private[this] final class Tuple8Equiv[T1, T2, T3, T4, T5, T6, T7, T8](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4], + private val eqv5: Equiv[T5], + private val eqv6: Equiv[T6], + private val eqv7: Equiv[T7], + private val eqv8: Equiv[T8]) + extends Equiv[(T1, T2, T3, T4, T5, T6, T7, T8)] { + def equiv(x: (T1, T2, T3, T4, T5, T6, T7, T8), y: (T1, T2, T3, T4, T5, T6, T7, T8)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) && + eqv5.equiv(x._5, y._5) && + eqv6.equiv(x._6, y._6) && + eqv7.equiv(x._7, y._7) && + eqv8.equiv(x._8, y._8) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple8Equiv[_, _, _, _, _, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 && + this.eqv5 == that.eqv5 && + this.eqv6 == that.eqv6 && + this.eqv7 == that.eqv7 && + this.eqv8 == that.eqv8 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8).hashCode() + } + + implicit def Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7], eqv8 : Equiv[T8], eqv9: Equiv[T9]): Equiv[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = + new Tuple9Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8, eqv9) + + private[this] final class Tuple9Equiv[T1, T2, T3, T4, T5, T6, T7, T8, T9](private val eqv1: Equiv[T1], + private val eqv2: Equiv[T2], + private val eqv3: Equiv[T3], + private val eqv4: Equiv[T4], + private val eqv5: Equiv[T5], + private val eqv6: Equiv[T6], + private val eqv7: Equiv[T7], + private val eqv8: Equiv[T8], + private val eqv9: Equiv[T9]) + extends Equiv[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] { + def equiv(x: (T1, T2, T3, T4, T5, T6, T7, T8, T9), y: (T1, T2, T3, T4, T5, T6, T7, T8, T9)): Boolean = + eqv1.equiv(x._1, y._1) && + eqv2.equiv(x._2, y._2) && + eqv3.equiv(x._3, y._3) && + eqv4.equiv(x._4, y._4) && + eqv5.equiv(x._5, y._5) && + eqv6.equiv(x._6, y._6) && + eqv7.equiv(x._7, y._7) && + eqv8.equiv(x._8, y._8) && + eqv9.equiv(x._9, y._9) + + override def equals(obj: scala.Any): Boolean = obj match { + case that: AnyRef if this eq that => true + case that: Tuple9Equiv[_, _, _, _, _, _, _, _, _] => + this.eqv1 == that.eqv1 && + this.eqv2 == that.eqv2 && + this.eqv3 == that.eqv3 && + this.eqv4 == that.eqv4 && + this.eqv5 == that.eqv5 && + this.eqv6 == that.eqv6 && + this.eqv7 == that.eqv7 && + this.eqv8 == that.eqv8 && + this.eqv9 == that.eqv9 + case _ => false + } + override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8, eqv9).hashCode() } - def by[T, S: Equiv](f: T => S): Equiv[T] = - fromFunction((x, y) => implicitly[Equiv[S]].equiv(f(x), f(y))) - def apply[T: Equiv] : Equiv[T] = implicitly[Equiv[T]] } diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala index 9b57f1a06c7e..2066cc65d8ac 100644 --- a/src/library/scala/math/Fractional.scala +++ b/src/library/scala/math/Fractional.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,13 +15,10 @@ package math import scala.language.implicitConversions -/** - * @since 2.8 - */ trait Fractional[T] extends Numeric[T] { def div(x: T, y: T): T - class FractionalOps(lhs: T) extends Ops(lhs) { + class FractionalOps(lhs: T) extends NumericOps(lhs) { def /(rhs: T) = div(lhs, rhs) } override implicit def mkNumericOps(lhs: T): FractionalOps = @@ -29,6 +26,8 @@ trait Fractional[T] extends Numeric[T] { } object Fractional { + @inline def apply[T](implicit frac: Fractional[T]): Fractional[T] = frac + trait ExtraImplicits { implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x) } diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala index 3d7a8135bc4c..d5dd189d64fe 100644 --- a/src/library/scala/math/Integral.scala +++ b/src/library/scala/math/Integral.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,14 +15,11 @@ package math import scala.language.implicitConversions -/** - * @since 2.8 - */ trait Integral[T] extends Numeric[T] { def quot(x: T, y: T): T def rem(x: T, y: T): T - class IntegralOps(lhs: T) extends Ops(lhs) { + class IntegralOps(lhs: T) extends NumericOps(lhs) { def /(rhs: T) = quot(lhs, rhs) def %(rhs: T) = rem(lhs, rhs) def /%(rhs: T) = (quot(lhs, rhs), rem(lhs, rhs)) @@ -31,6 +28,8 @@ trait Integral[T] extends Numeric[T] { } object Integral { + @inline def apply[T](implicit int: Integral[T]): Integral[T] = int + trait ExtraImplicits { /** The regrettable design of Numeric/Integral/Fractional has them all * bumping into one another when searching for this implicit, so they diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala index 937dd2da24da..84028f13f833 100644 --- a/src/library/scala/math/Numeric.scala +++ b/src/library/scala/math/Numeric.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,12 +13,13 @@ package scala package math +import scala.collection.StringParsers import scala.language.implicitConversions +import scala.util.Try -/** - * @since 2.8 - */ object Numeric { + @inline def apply[T](implicit num: Numeric[T]): Numeric[T] = num + trait ExtraImplicits { /** These implicits create conversions from a value for which an implicit Numeric * exists to the inner class which creates infix operations. Once imported, you @@ -27,7 +28,7 @@ object Numeric { * def plus[T: Numeric](x: T, y: T) = x + y * }}} */ - implicit def infixNumericOps[T](x: T)(implicit num: Numeric[T]): Numeric[T]#Ops = new num.Ops(x) + implicit def infixNumericOps[T](x: T)(implicit num: Numeric[T]): Numeric[T]#NumericOps = new num.NumericOps(x) } object Implicits extends ExtraImplicits { } @@ -39,6 +40,7 @@ object Numeric { def rem(x: BigInt, y: BigInt): BigInt = x % y def negate(x: BigInt): BigInt = -x def fromInt(x: Int): BigInt = BigInt(x) + def parseString(str: String): Option[BigInt] = Try(BigInt(str)).toOption def toInt(x: BigInt): Int = x.intValue def toLong(x: BigInt): Long = x.longValue def toFloat(x: BigInt): Float = x.floatValue @@ -54,10 +56,13 @@ object Numeric { def rem(x: Int, y: Int): Int = x % y def negate(x: Int): Int = -x def fromInt(x: Int): Int = x + def parseString(str: String): Option[Int] = StringParsers.parseInt(str) def toInt(x: Int): Int = x def toLong(x: Int): Long = x.toLong def toFloat(x: Int): Float = x.toFloat def toDouble(x: Int): Double = x.toDouble + override def signum(x: Int): Int = math.signum(x) + override def sign(x: Int): Int = math.signum(x) } implicit object IntIsIntegral extends IntIsIntegral with Ordering.IntOrdering @@ -69,10 +74,13 @@ object Numeric { def rem(x: Short, y: Short): Short = (x % y).toShort def negate(x: Short): Short = (-x).toShort def fromInt(x: Int): Short = x.toShort + def parseString(str: String): Option[Short] = StringParsers.parseShort(str) def toInt(x: Short): Int = x.toInt def toLong(x: Short): Long = x.toLong def toFloat(x: Short): Float = x.toFloat def toDouble(x: Short): Double = x.toDouble + override def signum(x: Short): Int = math.signum(x.toInt) + override def sign(x: Short): Short = math.signum(x.toInt).toShort } implicit object ShortIsIntegral extends ShortIsIntegral with Ordering.ShortOrdering @@ -84,10 +92,13 @@ object Numeric { def rem(x: Byte, y: Byte): Byte = (x % y).toByte def negate(x: Byte): Byte = (-x).toByte def fromInt(x: Int): Byte = x.toByte + def parseString(str: String): Option[Byte] = StringParsers.parseByte(str) def toInt(x: Byte): Int = x.toInt def toLong(x: Byte): Long = x.toLong def toFloat(x: Byte): Float = x.toFloat def toDouble(x: Byte): Double = x.toDouble + override def signum(x: Byte): Int = math.signum(x.toInt) + override def sign(x: Byte): Byte = math.signum(x.toInt).toByte } implicit object ByteIsIntegral extends ByteIsIntegral with Ordering.ByteOrdering @@ -99,10 +110,13 @@ object Numeric { def rem(x: Char, y: Char): Char = (x % y).toChar def negate(x: Char): Char = (-x).toChar def fromInt(x: Int): Char = x.toChar + def parseString(str: String): Option[Char] = Try(str.toInt.toChar).toOption def toInt(x: Char): Int = x.toInt def toLong(x: Char): Long = x.toLong def toFloat(x: Char): Float = x.toFloat def toDouble(x: Char): Double = x.toDouble + override def signum(x: Char): Int = math.signum(x.toInt) + override def sign(x: Char): Char = math.signum(x.toInt).toChar } implicit object CharIsIntegral extends CharIsIntegral with Ordering.CharOrdering @@ -114,69 +128,81 @@ object Numeric { def rem(x: Long, y: Long): Long = x % y def negate(x: Long): Long = -x def fromInt(x: Int): Long = x.toLong + def parseString(str: String): Option[Long] = StringParsers.parseLong(str) def toInt(x: Long): Int = x.toInt def toLong(x: Long): Long = x def toFloat(x: Long): Float = x.toFloat def toDouble(x: Long): Double = x.toDouble + override def signum(x: Long): Int = math.signum(x).toInt + override def sign(x: Long): Long = math.signum(x) } implicit object LongIsIntegral extends LongIsIntegral with Ordering.LongOrdering - trait FloatIsConflicted extends Numeric[Float] { + trait FloatIsFractional extends Fractional[Float] { def plus(x: Float, y: Float): Float = x + y def minus(x: Float, y: Float): Float = x - y def times(x: Float, y: Float): Float = x * y def negate(x: Float): Float = -x def fromInt(x: Int): Float = x.toFloat + def parseString(str: String): Option[Float] = StringParsers.parseFloat(str) def toInt(x: Float): Int = x.toInt def toLong(x: Float): Long = x.toLong def toFloat(x: Float): Float = x def toDouble(x: Float): Double = x.toDouble + def div(x: Float, y: Float): Float = x / y // logic in Numeric base trait mishandles abs(-0.0f) override def abs(x: Float): Float = math.abs(x) + // logic in Numeric base trait mishandles sign(-0.0f) and sign(Float.NaN) + override def sign(x: Float): Float = math.signum(x) } - trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] { - def div(x: Float, y: Float): Float = x / y - } - trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] { - def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue - def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue - } - implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering - object FloatAsIfIntegral extends FloatAsIfIntegral with Ordering.FloatOrdering { - } + implicit object FloatIsFractional extends FloatIsFractional with Ordering.Float.IeeeOrdering - trait DoubleIsConflicted extends Numeric[Double] { + trait DoubleIsFractional extends Fractional[Double] { def plus(x: Double, y: Double): Double = x + y def minus(x: Double, y: Double): Double = x - y def times(x: Double, y: Double): Double = x * y def negate(x: Double): Double = -x def fromInt(x: Int): Double = x.toDouble + def parseString(str: String): Option[Double] = StringParsers.parseDouble(str) def toInt(x: Double): Int = x.toInt def toLong(x: Double): Long = x.toLong def toFloat(x: Double): Float = x.toFloat def toDouble(x: Double): Double = x + def div(x: Double, y: Double): Double = x / y // logic in Numeric base trait mishandles abs(-0.0) override def abs(x: Double): Double = math.abs(x) + // logic in Numeric base trait mishandles sign(-0.0) and sign(Double.NaN) + override def sign(x: Double): Double = math.signum(x) } - trait DoubleIsFractional extends DoubleIsConflicted with Fractional[Double] { - def div(x: Double, y: Double): Double = x / y - } - trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] { - def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue - def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue - } + implicit object DoubleIsFractional extends DoubleIsFractional with Ordering.Double.IeeeOrdering trait BigDecimalIsConflicted extends Numeric[BigDecimal] { - def plus(x: BigDecimal, y: BigDecimal): BigDecimal = x + y - def minus(x: BigDecimal, y: BigDecimal): BigDecimal = x - y - def times(x: BigDecimal, y: BigDecimal): BigDecimal = x * y + // works around pollution of math context by ignoring identity element + def plus(x: BigDecimal, y: BigDecimal): BigDecimal = { + import BigDecimalIsConflicted._0 + if (x eq _0) y else x + y + } + def minus(x: BigDecimal, y: BigDecimal): BigDecimal = { + import BigDecimalIsConflicted._0 + if (x eq _0) -y else x - y + } + // works around pollution of math context by ignoring identity element + def times(x: BigDecimal, y: BigDecimal): BigDecimal = { + import BigDecimalIsConflicted._1 + if (x eq _1) y else x * y + } def negate(x: BigDecimal): BigDecimal = -x def fromInt(x: Int): BigDecimal = BigDecimal(x) + def parseString(str: String): Option[BigDecimal] = Try(BigDecimal(str)).toOption def toInt(x: BigDecimal): Int = x.intValue def toLong(x: BigDecimal): Long = x.longValue def toFloat(x: BigDecimal): Float = x.floatValue def toDouble(x: BigDecimal): Double = x.doubleValue } + private object BigDecimalIsConflicted { + private val _0 = BigDecimal(0) // cached zero is ordinarily cached for default math context + private val _1 = BigDecimal(1) // cached one is ordinarily cached for default math context + } trait BigDecimalIsFractional extends BigDecimalIsConflicted with Fractional[BigDecimal] { def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y @@ -186,13 +212,10 @@ object Numeric { def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y } - // For Double and BigDecimal we offer implicit Fractional objects, but also one + // For BigDecimal we offer an implicit Fractional object, but also one // which acts like an Integral type, which is useful in NumericRange. implicit object BigDecimalIsFractional extends BigDecimalIsFractional with Ordering.BigDecimalOrdering object BigDecimalAsIfIntegral extends BigDecimalAsIfIntegral with Ordering.BigDecimalOrdering - - implicit object DoubleIsFractional extends DoubleIsFractional with Ordering.DoubleOrdering - object DoubleAsIfIntegral extends DoubleAsIfIntegral with Ordering.DoubleOrdering } trait Numeric[T] extends Ordering[T] { @@ -201,6 +224,7 @@ trait Numeric[T] extends Ordering[T] { def times(x: T, y: T): T def negate(x: T): T def fromInt(x: Int): T + def parseString(str: String): Option[T] def toInt(x: T): Int def toLong(x: T): Long def toFloat(x: T): Float @@ -210,22 +234,28 @@ trait Numeric[T] extends Ordering[T] { def one = fromInt(1) def abs(x: T): T = if (lt(x, zero)) negate(x) else x - def signum(x: T): Int = + + @deprecated("use `sign` method instead", since = "2.13.0") def signum(x: T): Int = if (lt(x, zero)) -1 else if (gt(x, zero)) 1 else 0 + def sign(x: T): T = + if (lt(x, zero)) negate(one) + else if (gt(x, zero)) one + else zero - class Ops(lhs: T) { + class NumericOps(lhs: T) { def +(rhs: T) = plus(lhs, rhs) def -(rhs: T) = minus(lhs, rhs) def *(rhs: T) = times(lhs, rhs) - def unary_-() = negate(lhs) - def abs(): T = Numeric.this.abs(lhs) - def signum(): Int = Numeric.this.signum(lhs) - def toInt(): Int = Numeric.this.toInt(lhs) - def toLong(): Long = Numeric.this.toLong(lhs) - def toFloat(): Float = Numeric.this.toFloat(lhs) - def toDouble(): Double = Numeric.this.toDouble(lhs) + def unary_- = negate(lhs) + def abs: T = Numeric.this.abs(lhs) + @deprecated("use `sign` method instead", since = "2.13.0") def signum: Int = Numeric.this.signum(lhs) + def sign: T = Numeric.this.sign(lhs) + def toInt: Int = Numeric.this.toInt(lhs) + def toLong: Long = Numeric.this.toLong(lhs) + def toFloat: Float = Numeric.this.toFloat(lhs) + def toDouble: Double = Numeric.this.toDouble(lhs) } - implicit def mkNumericOps(lhs: T): Ops = new Ops(lhs) + implicit def mkNumericOps(lhs: T): NumericOps = new NumericOps(lhs) } diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala index 7e000f09de9c..fe3ed90453e0 100644 --- a/src/library/scala/math/Ordered.scala +++ b/src/library/scala/math/Ordered.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -55,7 +55,6 @@ import scala.language.implicitConversions * provide it yourself either when inheriting or instantiating. * * @see [[scala.math.Ordering]], [[scala.math.PartiallyOrdered]] - * @author Martin Odersky */ trait Ordered[A] extends Any with java.lang.Comparable[A] { diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index b3fe609ed564..5a34f1fe91a9 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,16 +14,18 @@ package scala package math import java.util.Comparator -import scala.language.{implicitConversions, higherKinds} + +import scala.language.implicitConversions +import scala.annotation.migration /** Ordering is a trait whose instances each represent a strategy for sorting * instances of a type. * * Ordering's companion object defines many implicit objects to deal with - * subtypes of AnyVal (e.g. Int, Double), String, and others. + * subtypes of [[AnyVal]] (e.g. `Int`, `Double`), `String`, and others. * * To sort instances by one or more member variables, you can take advantage - * of these built-in orderings using Ordering.by and Ordering.on: + * of these built-in orderings using [[Ordering.by]] and [[Ordering.on]]: * * {{{ * import scala.util.Sorting @@ -36,9 +38,10 @@ import scala.language.{implicitConversions, higherKinds} * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) * }}} * - * An Ordering[T] is implemented by specifying compare(a:T, b:T), which - * decides how to order two instances a and b. Instances of Ordering[T] can be - * used by things like scala.util.Sorting to sort collections like Array[T]. + * An `Ordering[T]` is implemented by specifying the [[compare]] method, + * `compare(a: T, b: T): Int`, which decides how to order two instances + * `a` and `b`. Instances of `Ordering[T]` can be used by things like + * `scala.util.Sorting` to sort collections like `Array[T]`. * * For example: * @@ -50,32 +53,29 @@ import scala.language.{implicitConversions, higherKinds} * * // sort by age * object AgeOrdering extends Ordering[Person] { - * def compare(a:Person, b:Person) = a.age compare b.age + * def compare(a:Person, b:Person) = a.age.compare(b.age) * } * Sorting.quickSort(people)(AgeOrdering) * }}} * - * This trait and scala.math.Ordered both provide this same functionality, but - * in different ways. A type T can be given a single way to order itself by - * extending Ordered. Using Ordering, this same type may be sorted in many - * other ways. Ordered and Ordering both provide implicits allowing them to be + * This trait and [[scala.math.Ordered]] both provide this same functionality, but + * in different ways. A type `T` can be given a single way to order itself by + * extending `Ordered`. Using `Ordering`, this same type may be sorted in many + * other ways. `Ordered` and `Ordering` both provide implicits allowing them to be * used interchangeably. * - * You can import scala.math.Ordering.Implicits to gain access to other + * You can `import scala.math.Ordering.Implicits._` to gain access to other * implicit orderings. * - * @author Geoffrey Washburn - * @since 2.7 - * @see [[scala.math.Ordered]], [[scala.util.Sorting]] + * @see [[scala.math.Ordered]], [[scala.util.Sorting]], [[scala.math.Ordering.Implicits]] */ -@annotation.implicitNotFound(msg = "No implicit Ordering defined for ${T}.") trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable { outer => /** Returns whether a comparison between `x` and `y` is defined, and if so * the result of `compare(x, y)`. */ - def tryCompare(x: T, y: T) = Some(compare(x, y)) + def tryCompare(x: T, y: T): Some[Int] = Some(compare(x, y)) /** Returns an integer whose sign communicates how x compares to y. * @@ -103,14 +103,32 @@ trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializabl override def equiv(x: T, y: T): Boolean = compare(x, y) == 0 /** Return `x` if `x` >= `y`, otherwise `y`. */ - def max(x: T, y: T): T = if (gteq(x, y)) x else y + def max[U <: T](x: U, y: U): U = if (gteq(x, y)) x else y /** Return `x` if `x` <= `y`, otherwise `y`. */ - def min(x: T, y: T): T = if (lteq(x, y)) x else y + def min[U <: T](x: U, y: U): U = if (lteq(x, y)) x else y - /** Return the opposite ordering of this one. */ + /** Return the opposite ordering of this one. + * + * Implementations overriding this method MUST override [[isReverseOf]] + * as well if they change the behavior at all (for example, caching does + * not require overriding it). + */ override def reverse: Ordering[T] = new Ordering.Reverse[T](this) + /** Returns whether or not the other ordering is the opposite + * ordering of this one. + * + * Equivalent to `other == this.reverse`. + * + * Implementations should only override this method if they are overriding + * [[reverse]] as well. + */ + def isReverseOf(other: Ordering[_]): Boolean = other match { + case that: Ordering.Reverse[_] => that.outer == this + case _ => false + } + /** Given f, a function from U into T, creates an Ordering[U] whose compare * function is equivalent to: * @@ -122,13 +140,60 @@ trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializabl def compare(x: U, y: U) = outer.compare(f(x), f(y)) } - /** This inner class defines comparison operators available for `T`. */ - class Ops(lhs: T) { - def <(rhs: T) = lt(lhs, rhs) - def <=(rhs: T) = lteq(lhs, rhs) - def >(rhs: T) = gt(lhs, rhs) - def >=(rhs: T) = gteq(lhs, rhs) - def equiv(rhs: T) = Ordering.this.equiv(lhs, rhs) + /** Creates an Ordering[T] whose compare function returns the + * result of this Ordering's compare function, if it is non-zero, + * or else the result of `other`s compare function. + * + * @example + * {{{ + * case class Pair(a: Int, b: Int) + * + * val pairOrdering = Ordering.by[Pair, Int](_.a) + * .orElse(Ordering.by[Pair, Int](_.b)) + * }}} + * + * @param other an Ordering to use if this Ordering returns zero + */ + def orElse(other: Ordering[T]): Ordering[T] = (x, y) => { + val res1 = outer.compare(x, y) + if (res1 != 0) res1 else other.compare(x, y) + } + + /** Given f, a function from T into S, creates an Ordering[T] whose compare + * function returns the result of this Ordering's compare function, + * if it is non-zero, or else a result equivalent to: + * + * {{{ + * Ordering[S].compare(f(x), f(y)) + * }}} + * + * This function is equivalent to passing the result of `Ordering.by(f)` + * to `orElse`. + * + * @example + * {{{ + * case class Pair(a: Int, b: Int) + * + * val pairOrdering = Ordering.by[Pair, Int](_.a) + * .orElseBy[Int](_.b) + * }}} + */ + def orElseBy[S](f: T => S)(implicit ord: Ordering[S]): Ordering[T] = (x, y) => { + val res1 = outer.compare(x, y) + if (res1 != 0) res1 else ord.compare(f(x), f(y)) + } + + /** This inner class defines comparison operators available for `T`. + * + * It can't extend `AnyVal` because it is not a top-level class + * or a member of a statically accessible object. + */ + class OrderingOps(lhs: T) { + def <(rhs: T): Boolean = lt(lhs, rhs) + def <=(rhs: T): Boolean = lteq(lhs, rhs) + def >(rhs: T): Boolean = gt(lhs, rhs) + def >=(rhs: T): Boolean = gteq(lhs, rhs) + def equiv(rhs: T): Boolean = Ordering.this.equiv(lhs, rhs) def max(rhs: T): T = Ordering.this.max(lhs, rhs) def min(rhs: T): T = Ordering.this.min(lhs, rhs) } @@ -136,19 +201,23 @@ trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializabl /** This implicit method augments `T` with the comparison operators defined * in `scala.math.Ordering.Ops`. */ - implicit def mkOrderingOps(lhs: T): Ops = new Ops(lhs) + implicit def mkOrderingOps(lhs: T): OrderingOps = new OrderingOps(lhs) } trait LowPriorityOrderingImplicits { + + type AsComparable[A] = A => Comparable[_ >: A] + /** This would conflict with all the nice implicit Orderings * available, but thanks to the magic of prioritized implicits * via subclassing we can make `Ordered[A] => Ordering[A]` only * turn up if nothing else works. Since `Ordered[A]` extends * `Comparable[A]` anyway, we can throw in some Java interop too. */ - implicit def ordered[A <% Comparable[A]]: Ordering[A] = new Ordering[A] { - def compare(x: A, y: A): Int = x compareTo y + implicit def ordered[A](implicit asComparable: AsComparable[A]): Ordering[A] = new Ordering[A] { + def compare(x: A, y: A): Int = asComparable(x).compareTo(y) } + implicit def comparatorToOrdering[A](implicit cmp: Comparator[A]): Ordering[A] = new Ordering[A] { def compare(x: A, y: A) = cmp.compare(x, y) } @@ -164,29 +233,38 @@ object Ordering extends LowPriorityOrderingImplicits { private final val optionSeed = 43 private final val iterableSeed = 47 - def apply[T](implicit ord: Ordering[T]) = ord + @inline def apply[T](implicit ord: Ordering[T]) = ord + + /** An ordering which caches the value of its reverse. */ + sealed trait CachedReverse[T] extends Ordering[T] { + private[this] val _reverse = super.reverse + override final def reverse: Ordering[T] = _reverse + override final def isReverseOf(other: Ordering[_]): Boolean = other eq _reverse + } /** A reverse ordering */ - private final class Reverse[T](private val outer: Ordering[T]) extends Ordering[T] { - override def reverse: Ordering[T] = outer + private final class Reverse[T](private[Ordering] val outer: Ordering[T]) extends Ordering[T] { + override def reverse: Ordering[T] = outer + override def isReverseOf(other: Ordering[_]): Boolean = other == outer + def compare(x: T, y: T): Int = outer.compare(y, x) override def lteq(x: T, y: T): Boolean = outer.lteq(y, x) override def gteq(x: T, y: T): Boolean = outer.gteq(y, x) override def lt(x: T, y: T): Boolean = outer.lt(y, x) override def gt(x: T, y: T): Boolean = outer.gt(y, x) override def equiv(x: T, y: T): Boolean = outer.equiv(y, x) - override def max(x: T, y: T): T = outer.min(x, y) - override def min(x: T, y: T): T = outer.max(x, y) + override def max[U <: T](x: U, y: U): U = outer.min(x, y) + override def min[U <: T](x: U, y: U): U = outer.max(x, y) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Reverse[T] => this.outer == that.outer + case that: Reverse[_] => this.outer == that.outer case _ => false } override def hashCode(): Int = outer.hashCode() * reverseSeed } - private final val IntReverse: Ordering[Int] = new Reverse(Ordering.Int) + @SerialVersionUID(-2996748994664583574L) private final class IterableOrdering[CC[X] <: Iterable[X], T](private val ord: Ordering[T]) extends Ordering[CC[T]] { def compare(x: CC[T], y: CC[T]): Int = { val xe = x.iterator @@ -202,7 +280,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: IterableOrdering[CC, T] => this.ord == that.ord + case that: IterableOrdering[_, _] => this.ord == that.ord case _ => false } override def hashCode(): Int = ord.hashCode() * iterableSeed @@ -212,7 +290,10 @@ object Ordering extends LowPriorityOrderingImplicits { /** Not in the standard scope due to the potential for divergence: * For instance `implicitly[Ordering[Any]]` diverges in its presence. */ - implicit def seqDerivedOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = + implicit def seqOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = + new IterableOrdering[CC, T](ord) + + implicit def sortedSetOrdering[CC[X] <: scala.collection.SortedSet[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] = new IterableOrdering[CC, T](ord) /** This implicit creates a conversion from any value for which an @@ -223,7 +304,7 @@ object Ordering extends LowPriorityOrderingImplicits { * def lessThan[T: Ordering](x: T, y: T) = x < y * }}} */ - implicit def infixOrderingOps[T](x: T)(implicit ord: Ordering[T]): Ordering[T]#Ops = new ord.Ops(x) + implicit def infixOrderingOps[T](x: T)(implicit ord: Ordering[T]): Ordering[T]#OrderingOps = new ord.OrderingOps(x) } /** An object containing implicits which are not in the default scope. */ @@ -260,80 +341,265 @@ object Ordering extends LowPriorityOrderingImplicits { trait UnitOrdering extends Ordering[Unit] { def compare(x: Unit, y: Unit) = 0 } + @SerialVersionUID(4089257611611206746L) implicit object Unit extends UnitOrdering trait BooleanOrdering extends Ordering[Boolean] { - def compare(x: Boolean, y: Boolean) = java.lang.Boolean.compare(x, y) + def compare(x: Boolean, y: Boolean): Int = java.lang.Boolean.compare(x, y) } + @SerialVersionUID(-94703182178890445L) implicit object Boolean extends BooleanOrdering trait ByteOrdering extends Ordering[Byte] { - def compare(x: Byte, y: Byte) = java.lang.Byte.compare(x, y) + def compare(x: Byte, y: Byte): Int = java.lang.Byte.compare(x, y) } + @SerialVersionUID(-2268545360148786406L) implicit object Byte extends ByteOrdering trait CharOrdering extends Ordering[Char] { - def compare(x: Char, y: Char) = java.lang.Character.compare(x, y) + def compare(x: Char, y: Char): Int = java.lang.Character.compare(x, y) } + @SerialVersionUID(2588141633104296698L) implicit object Char extends CharOrdering trait ShortOrdering extends Ordering[Short] { - def compare(x: Short, y: Short) = java.lang.Short.compare(x, y) + def compare(x: Short, y: Short): Int = java.lang.Short.compare(x, y) } + @SerialVersionUID(4919657051864630912L) implicit object Short extends ShortOrdering trait IntOrdering extends Ordering[Int] { - def compare(x: Int, y: Int) = java.lang.Integer.compare(x, y) - override def reverse: Ordering[Int] = IntReverse + def compare(x: Int, y: Int): Int = java.lang.Integer.compare(x, y) } - implicit object Int extends IntOrdering + @SerialVersionUID(-8412871093094815037L) + implicit object Int extends IntOrdering with CachedReverse[Int] trait LongOrdering extends Ordering[Long] { - def compare(x: Long, y: Long) = java.lang.Long.compare(x, y) + def compare(x: Long, y: Long): Int = java.lang.Long.compare(x, y) } + @SerialVersionUID(-5231423581640563981L) implicit object Long extends LongOrdering - trait FloatOrdering extends Ordering[Float] { - def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) + /** `Ordering`s for `Float`s. + * + * The default extends `Ordering.Float.TotalOrdering`. + * + * `Ordering.Float.TotalOrdering` uses the `java.lang.Float.compare` semantics for all operations. + * Scala also provides the `Ordering.Float.IeeeOrdering` semantics. Which uses the IEEE 754 semantics + * for float ordering. + * + * Historically: `IeeeOrdering` was used in Scala from 2.10.x through 2.12.x. This changed in 2.13.0 + * to `TotalOrdering`. + * + * Prior to Scala 2.10.0, the `Ordering` instance used semantics + * consistent with `java.lang.Float.compare`. + * + * Scala 2.10.0 changed the implementation of `lt`, `equiv`, `min`, etc., to be + * IEEE 754 compliant, while keeping the `compare` method NOT compliant, + * creating an internally inconsistent instance. IEEE 754 specifies that + * `0.0F == -0.0F`. In addition, it requires all comparisons with `Float.NaN` return + * `false` thus `0.0F < Float.NaN`, `0.0F > Float.NaN`, and + * `Float.NaN == Float.NaN` all yield `false`, analogous `None` in `flatMap`. + * + * + * {{{ + * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).sorted // List(-Infinity, 0.0, 1.0, NaN) + * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).min // -Infinity + * implicitly[Ordering[Float]].lt(0.0F, 0.0F / 0.0F) // true + * { + * import Ordering.Float.IeeeOrdering + * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).sorted // List(-Infinity, 0.0, 1.0, NaN) + * List(0.0F, 1.0F, 0.0F / 0.0F, -1.0F / 0.0F).min // NaN + * implicitly[Ordering[Float]].lt(0.0F, 0.0F / 0.0F) // false + * } + * }}} + * + * @define floatOrdering Because the behavior of `Float`s specified by IEEE is + * not consistent with a total ordering when dealing with + * `NaN`, there are two orderings defined for `Float`: + * `TotalOrdering`, which is consistent with a total + * ordering, and `IeeeOrdering`, which is consistent + * as much as possible with IEEE spec and floating point + * operations defined in [[scala.math]]. + */ + object Float { + /** An ordering for `Float`s which is a fully consistent total ordering, + * and treats `NaN` as larger than all other `Float` values; it behaves + * the same as [[java.lang.Float.compare]]. + * + * $floatOrdering + * + * This ordering may be preferable for sorting collections. + * + * @see [[IeeeOrdering]] + */ + trait TotalOrdering extends Ordering[Float] { + def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) + } + @SerialVersionUID(2951539161283192433L) + implicit object TotalOrdering extends TotalOrdering - override def lteq(x: Float, y: Float): Boolean = x <= y - override def gteq(x: Float, y: Float): Boolean = x >= y - override def lt(x: Float, y: Float): Boolean = x < y - override def gt(x: Float, y: Float): Boolean = x > y - override def equiv(x: Float, y: Float): Boolean = x == y - override def max(x: Float, y: Float): Float = math.max(x, y) - override def min(x: Float, y: Float): Float = math.min(x, y) + /** An ordering for `Float`s which is consistent with IEEE specifications + * whenever possible. + * + * - `lt`, `lteq`, `equiv`, `gteq` and `gt` are consistent with primitive + * comparison operations for `Float`s, and return `false` when called with + * `NaN`. + * - `min` and `max` are consistent with `math.min` and `math.max`, and + * return `NaN` when called with `NaN` as either argument. + * - `compare` behaves the same as [[java.lang.Float.compare]]. + * + * $floatOrdering + * + * This ordering may be preferable for numeric contexts. + * + * @see [[TotalOrdering]] + */ + trait IeeeOrdering extends Ordering[Float] { + def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) + + override def lteq(x: Float, y: Float): Boolean = x <= y + override def gteq(x: Float, y: Float): Boolean = x >= y + override def lt(x: Float, y: Float): Boolean = x < y + override def gt(x: Float, y: Float): Boolean = x > y + override def equiv(x: Float, y: Float): Boolean = x == y + override def max[U <: Float](x: U, y: U): U = math.max(x, y).asInstanceOf[U] + override def min[U <: Float](x: U, y: U): U = math.min(x, y).asInstanceOf[U] + } + @SerialVersionUID(2142189527751553605L) + implicit object IeeeOrdering extends IeeeOrdering } - implicit object Float extends FloatOrdering - - trait DoubleOrdering extends Ordering[Double] { - def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) + @migration( + " The default implicit ordering for floats now maintains consistency\n" + + " between its `compare` method and its `lt`, `min`, `equiv`, etc., methods,\n" + + " which means nonconforming to IEEE 754's behavior for -0.0F and NaN.\n" + + " The sort order of floats remains the same, however, with NaN at the end.\n" + + " Import Ordering.Float.IeeeOrdering to recover the previous behavior.\n" + + " See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Float$.html.", "2.13.0") + @SerialVersionUID(-8500693657289762132L) + implicit object DeprecatedFloatOrdering extends Float.TotalOrdering + + /** `Ordering`s for `Double`s. + * + * The behavior of the comparison operations provided by the default (implicit) + * ordering on `Double` changed in 2.10.0 and 2.13.0. + * Prior to Scala 2.10.0, the `Ordering` instance used semantics + * consistent with `java.lang.Double.compare`. + * + * Scala 2.10.0 changed the implementation of `lt`, `equiv`, `min`, etc., to be + * IEEE 754 compliant, while keeping the `compare` method NOT compliant, + * creating an internally inconsistent instance. IEEE 754 specifies that + * `0.0 == -0.0`. In addition, it requires all comparisons with `Double.NaN` return + * `false` thus `0.0 < Double.NaN`, `0.0 > Double.NaN`, and + * `Double.NaN == Double.NaN` all yield `false`, analogous `None` in `flatMap`. + * + * Recognizing the limitation of the IEEE 754 semantics in terms of ordering, + * Scala 2.13.0 created two instances: `Ordering.Double.IeeeOrdering`, which retains + * the IEEE 754 semantics from Scala 2.12.x, and `Ordering.Double.TotalOrdering`, + * which brings back the `java.lang.Double.compare` semantics for all operations. + * The default extends `TotalOrdering`. + * + * {{{ + * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).sorted // List(-Infinity, 0.0, 1.0, NaN) + * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).min // -Infinity + * implicitly[Ordering[Double]].lt(0.0, 0.0 / 0.0) // true + * { + * import Ordering.Double.IeeeOrdering + * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).sorted // List(-Infinity, 0.0, 1.0, NaN) + * List(0.0, 1.0, 0.0 / 0.0, -1.0 / 0.0).min // NaN + * implicitly[Ordering[Double]].lt(0.0, 0.0 / 0.0) // false + * } + * }}} + * + * @define doubleOrdering Because the behavior of `Double`s specified by IEEE is + * not consistent with a total ordering when dealing with + * `NaN`, there are two orderings defined for `Double`: + * `TotalOrdering`, which is consistent with a total + * ordering, and `IeeeOrdering`, which is consistent + * as much as possible with IEEE spec and floating point + * operations defined in [[scala.math]]. + */ + object Double { + /** An ordering for `Double`s which is a fully consistent total ordering, + * and treats `NaN` as larger than all other `Double` values; it behaves + * the same as [[java.lang.Double.compare]]. + * + * $doubleOrdering + * + * This ordering may be preferable for sorting collections. + * + * @see [[IeeeOrdering]] + */ + trait TotalOrdering extends Ordering[Double] { + def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) + } + @SerialVersionUID(-831119229746134011L) + implicit object TotalOrdering extends TotalOrdering - override def lteq(x: Double, y: Double): Boolean = x <= y - override def gteq(x: Double, y: Double): Boolean = x >= y - override def lt(x: Double, y: Double): Boolean = x < y - override def gt(x: Double, y: Double): Boolean = x > y - override def equiv(x: Double, y: Double): Boolean = x == y - override def max(x: Double, y: Double): Double = math.max(x, y) - override def min(x: Double, y: Double): Double = math.min(x, y) + /** An ordering for `Double`s which is consistent with IEEE specifications + * whenever possible. + * + * - `lt`, `lteq`, `equiv`, `gteq` and `gt` are consistent with primitive + * comparison operations for `Double`s, and return `false` when called with + * `NaN`. + * - `min` and `max` are consistent with `math.min` and `math.max`, and + * return `NaN` when called with `NaN` as either argument. + * - `compare` behaves the same as [[java.lang.Double.compare]]. + * + * $doubleOrdering + * + * This ordering may be preferable for numeric contexts. + * + * @see [[TotalOrdering]] + */ + trait IeeeOrdering extends Ordering[Double] { + def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) + + override def lteq(x: Double, y: Double): Boolean = x <= y + override def gteq(x: Double, y: Double): Boolean = x >= y + override def lt(x: Double, y: Double): Boolean = x < y + override def gt(x: Double, y: Double): Boolean = x > y + override def equiv(x: Double, y: Double): Boolean = x == y + override def max[U <: Double](x: U, y: U): U = math.max(x, y).asInstanceOf[U] + override def min[U <: Double](x: U, y: U): U = math.min(x, y).asInstanceOf[U] + } + @SerialVersionUID(5722631152457877238L) + implicit object IeeeOrdering extends IeeeOrdering } - implicit object Double extends DoubleOrdering + @migration( + " The default implicit ordering for doubles now maintains consistency\n" + + " between its `compare` method and its `lt`, `min`, `equiv`, etc., methods,\n" + + " which means nonconforming to IEEE 754's behavior for -0.0 and NaN.\n" + + " The sort order of doubles remains the same, however, with NaN at the end.\n" + + " Import Ordering.Double.IeeeOrdering to recover the previous behavior.\n" + + " See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Double$.html.", "2.13.0") + @SerialVersionUID(-7340686892557971538L) + implicit object DeprecatedDoubleOrdering extends Double.TotalOrdering trait BigIntOrdering extends Ordering[BigInt] { def compare(x: BigInt, y: BigInt) = x.compare(y) } + @SerialVersionUID(-3075297647817530785L) implicit object BigInt extends BigIntOrdering trait BigDecimalOrdering extends Ordering[BigDecimal] { def compare(x: BigDecimal, y: BigDecimal) = x.compare(y) } + @SerialVersionUID(-833457937756812905L) implicit object BigDecimal extends BigDecimalOrdering trait StringOrdering extends Ordering[String] { def compare(x: String, y: String) = x.compareTo(y) } + @SerialVersionUID(1302240016074071079L) implicit object String extends StringOrdering + trait SymbolOrdering extends Ordering[Symbol] { + def compare(x: Symbol, y: Symbol): Int = x.name.compareTo(y.name) + } + @SerialVersionUID(1996702162912307637L) + implicit object Symbol extends SymbolOrdering + trait OptionOrdering[T] extends Ordering[Option[T]] { def optionOrdering: Ordering[T] def compare(x: Option[T], y: Option[T]) = (x, y) match { @@ -345,33 +611,42 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: OptionOrdering[T] => this.optionOrdering == that.optionOrdering + case that: OptionOrdering[_] => this.optionOrdering == that.optionOrdering case _ => false } override def hashCode(): Int = optionOrdering.hashCode() * optionSeed } - implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] = - new OptionOrdering[T] { val optionOrdering = ord } + implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] = { + @SerialVersionUID(6958068162830323876L) + class O extends OptionOrdering[T] { val optionOrdering = ord } + new O() + } + /** @deprecated Iterables are not guaranteed to have a consistent order, so the `Ordering` + * returned by this method may not be stable or meaningful. If you are using a type + * with a consistent order (such as `Seq`), use its `Ordering` (found in the + * [[Implicits]] object) instead. + */ + @deprecated("Iterables are not guaranteed to have a consistent order; if using a type with a " + + "consistent order (e.g. Seq), use its Ordering (found in the Ordering.Implicits object)", since = "2.13.0") implicit def Iterable[T](implicit ord: Ordering[T]): Ordering[Iterable[T]] = new IterableOrdering[Iterable, T](ord) implicit def Tuple2[T1, T2](implicit ord1: Ordering[T1], ord2: Ordering[T2]): Ordering[(T1, T2)] = new Tuple2Ordering(ord1, ord2) + @SerialVersionUID(4945084135299531202L) private[this] final class Tuple2Ordering[T1, T2](private val ord1: Ordering[T1], private val ord2: Ordering[T2]) extends Ordering[(T1, T2)] { def compare(x: (T1, T2), y: (T1, T2)): Int = { val compare1 = ord1.compare(x._1, y._1) if (compare1 != 0) return compare1 - val compare2 = ord2.compare(x._2, y._2) - if (compare2 != 0) return compare2 - 0 + ord2.compare(x._2, y._2) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple2Ordering[T1, T2] => + case that: Tuple2Ordering[_, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 case _ => false @@ -382,6 +657,7 @@ object Ordering extends LowPriorityOrderingImplicits { implicit def Tuple3[T1, T2, T3](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3]) : Ordering[(T1, T2, T3)] = new Tuple3Ordering(ord1, ord2, ord3) + @SerialVersionUID(-5367223704121832335L) private[this] final class Tuple3Ordering[T1, T2, T3](private val ord1: Ordering[T1], private val ord2: Ordering[T2], private val ord3: Ordering[T3]) extends Ordering[(T1, T2, T3)] { @@ -390,14 +666,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare1 != 0) return compare1 val compare2 = ord2.compare(x._2, y._2) if (compare2 != 0) return compare2 - val compare3 = ord3.compare(x._3, y._3) - if (compare3 != 0) return compare3 - 0 + ord3.compare(x._3, y._3) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple3Ordering[T1, T2, T3] => + case that: Tuple3Ordering[_, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 @@ -409,6 +683,7 @@ object Ordering extends LowPriorityOrderingImplicits { implicit def Tuple4[T1, T2, T3, T4](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4]) : Ordering[(T1, T2, T3, T4)] = new Tuple4Ordering(ord1, ord2, ord3, ord4) + @SerialVersionUID(-6055313861145218178L) private[this] final class Tuple4Ordering[T1, T2, T3, T4](private val ord1: Ordering[T1], private val ord2: Ordering[T2], private val ord3: Ordering[T3], @@ -421,14 +696,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare2 != 0) return compare2 val compare3 = ord3.compare(x._3, y._3) if (compare3 != 0) return compare3 - val compare4 = ord4.compare(x._4, y._4) - if (compare4 != 0) return compare4 - 0 + ord4.compare(x._4, y._4) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple4Ordering[T1, T2, T3, T4] => + case that: Tuple4Ordering[_, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -441,6 +714,7 @@ object Ordering extends LowPriorityOrderingImplicits { implicit def Tuple5[T1, T2, T3, T4, T5](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5]): Ordering[(T1, T2, T3, T4, T5)] = new Tuple5Ordering(ord1, ord2, ord3, ord4, ord5) + @SerialVersionUID(-5517329921227646061L) private[this] final class Tuple5Ordering[T1, T2, T3, T4, T5](private val ord1: Ordering[T1], private val ord2: Ordering[T2], private val ord3: Ordering[T3], @@ -456,14 +730,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare3 != 0) return compare3 val compare4 = ord4.compare(x._4, y._4) if (compare4 != 0) return compare4 - val compare5 = ord5.compare(x._5, y._5) - if (compare5 != 0) return compare5 - 0 + ord5.compare(x._5, y._5) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple5Ordering[T1, T2, T3, T4, T5] => + case that: Tuple5Ordering[_, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -474,6 +746,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5).hashCode() } + @SerialVersionUID(3045467524192969060L) implicit def Tuple6[T1, T2, T3, T4, T5, T6](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6]): Ordering[(T1, T2, T3, T4, T5, T6)] = new Tuple6Ordering(ord1, ord2, ord3, ord4, ord5, ord6) @@ -495,14 +768,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare4 != 0) return compare4 val compare5 = ord5.compare(x._5, y._5) if (compare5 != 0) return compare5 - val compare6 = ord6.compare(x._6, y._6) - if (compare6 != 0) return compare6 - 0 + ord6.compare(x._6, y._6) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple6Ordering[T1, T2, T3, T4, T5, T6] => + case that: Tuple6Ordering[_, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -517,6 +788,7 @@ object Ordering extends LowPriorityOrderingImplicits { implicit def Tuple7[T1, T2, T3, T4, T5, T6, T7](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7]): Ordering[(T1, T2, T3, T4, T5, T6, T7)] = new Tuple7Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7) + @SerialVersionUID(1253188205893682451L) private[this] final class Tuple7Ordering[T1, T2, T3, T4, T5, T6, T7](private val ord1: Ordering[T1], private val ord2: Ordering[T2], private val ord3: Ordering[T3], @@ -538,14 +810,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare5 != 0) return compare5 val compare6 = ord6.compare(x._6, y._6) if (compare6 != 0) return compare6 - val compare7 = ord7.compare(x._7, y._7) - if (compare7 != 0) return compare7 - 0 + ord7.compare(x._7, y._7) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple7Ordering[T1, T2, T3, T4, T5, T6, T7] => + case that: Tuple7Ordering[_, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -558,6 +828,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6, ord7).hashCode() } + @SerialVersionUID(4003095353309354068L) implicit def Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8: Ordering[T8]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8)] = new Tuple8Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8) @@ -585,14 +856,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare6 != 0) return compare6 val compare7 = ord7.compare(x._7, y._7) if (compare7 != 0) return compare7 - val compare8 = ord8.compare(x._8, y._8) - if (compare8 != 0) return compare8 - 0 + ord8.compare(x._8, y._8) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple8Ordering[T1, T2, T3, T4, T5, T6, T7, T8] => + case that: Tuple8Ordering[_, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -606,6 +875,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def hashCode(): Int = (ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8).hashCode() } + @SerialVersionUID(8185342054829975001L) implicit def Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit ord1: Ordering[T1], ord2: Ordering[T2], ord3: Ordering[T3], ord4: Ordering[T4], ord5: Ordering[T5], ord6: Ordering[T6], ord7: Ordering[T7], ord8 : Ordering[T8], ord9: Ordering[T9]): Ordering[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = new Tuple9Ordering(ord1, ord2, ord3, ord4, ord5, ord6, ord7, ord8, ord9) @@ -636,14 +906,12 @@ object Ordering extends LowPriorityOrderingImplicits { if (compare7 != 0) return compare7 val compare8 = ord8.compare(x._8, y._8) if (compare8 != 0) return compare8 - val compare9 = ord9.compare(x._9, y._9) - if (compare9 != 0) return compare9 - 0 + ord9.compare(x._9, y._9) } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple9Ordering[T1, T2, T3, T4, T5, T6, T7, T8, T9] => + case that: Tuple9Ordering[_, _, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala index d7bc97d2cc86..e8ea9d355344 100644 --- a/src/library/scala/math/PartialOrdering.scala +++ b/src/library/scala/math/PartialOrdering.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,7 +18,7 @@ package math * of partial ordering on some type. This trait is for representing the * latter. * - * A [[http://en.wikipedia.org/wiki/Partially_ordered_set partial ordering]] is a + * A [[https://en.wikipedia.org/wiki/Partially_ordered_set partial ordering]] is a * binary relation on a type `T`, exposed as the `lteq` method of this trait. * This relation must be: * @@ -31,14 +31,11 @@ package math * for any `x`, `y`, and `z` of type `T`. * * Additionally, a partial ordering induces an - * [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] + * [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] * on a type `T`: `x` and `y` of type `T` are equivalent if and only if * `lteq(x, y) && lteq(y, x) == '''true'''`. This equivalence relation is * exposed as the `equiv` method, inherited from the * [[scala.math.Equiv Equiv]] trait. - * - * @author Geoffrey Washburn - * @since 2.7 */ trait PartialOrdering[T] extends Equiv[T] { @@ -85,3 +82,7 @@ trait PartialOrdering[T] extends Equiv[T] { override def equiv(x: T, y: T) = outer.equiv(y, x) } } + +object PartialOrdering { + @inline def apply[T](implicit ev: PartialOrdering[T]): PartialOrdering[T] = ev +} diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala index 05d52d42a199..b955879ae0f1 100644 --- a/src/library/scala/math/PartiallyOrdered.scala +++ b/src/library/scala/math/PartiallyOrdered.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,10 +14,10 @@ package scala package math /** A class for partially ordered data. - * - * @author Martin Odersky */ -trait PartiallyOrdered[+A] { +trait PartiallyOrdered[+A] extends Any { + + type AsPartiallyOrdered[B] = B => PartiallyOrdered[B] /** Result of comparing `'''this'''` with operand `that`. * Returns `None` if operands are not comparable. @@ -26,24 +26,27 @@ trait PartiallyOrdered[+A] { * - `x == 0` iff `'''this''' == that` * - `x > 0` iff `'''this''' > that` */ - def tryCompareTo [B >: A <% PartiallyOrdered[B]](that: B): Option[Int] + def tryCompareTo [B >: A: AsPartiallyOrdered](that: B): Option[Int] - def < [B >: A <% PartiallyOrdered[B]](that: B): Boolean = + def < [B >: A: AsPartiallyOrdered](that: B): Boolean = (this tryCompareTo that) match { case Some(x) if x < 0 => true case _ => false } - def > [B >: A <% PartiallyOrdered[B]](that: B): Boolean = + + def > [B >: A: AsPartiallyOrdered](that: B): Boolean = (this tryCompareTo that) match { case Some(x) if x > 0 => true case _ => false } - def <= [B >: A <% PartiallyOrdered[B]](that: B): Boolean = + + def <= [B >: A: AsPartiallyOrdered](that: B): Boolean = (this tryCompareTo that) match { case Some(x) if x <= 0 => true case _ => false } - def >= [B >: A <% PartiallyOrdered[B]](that: B): Boolean = + + def >= [B >: A: AsPartiallyOrdered](that: B): Boolean = (this tryCompareTo that) match { case Some(x) if x >= 0 => true case _ => false diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java index b81745574287..5ed76ec3fb22 100644 --- a/src/library/scala/math/ScalaNumber.java +++ b/src/library/scala/math/ScalaNumber.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,6 @@ package scala.math; /** A marker class for Number types introduced by Scala - * @author Martin Odersky, Paul Phillips - * @version 2.8 - * @since 2.8 */ public abstract class ScalaNumber extends java.lang.Number { protected abstract boolean isWhole(); diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala index 81e6b8f3d5b2..a3fa90c98c9d 100644 --- a/src/library/scala/math/ScalaNumericConversions.scala +++ b/src/library/scala/math/ScalaNumericConversions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,7 @@ package math * extend ScalaNumber (which excludes value classes.) */ trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversions { - def underlying(): Object + def underlying: Object } /** Conversions which present a consistent conversion interface @@ -25,50 +25,49 @@ trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversion */ trait ScalaNumericAnyConversions extends Any { /** @return `'''true'''` if this number has no decimal component, `'''false'''` otherwise. */ - def isWhole(): Boolean - def underlying(): Any + def isWhole: Boolean - def byteValue(): Byte - def shortValue(): Short - def intValue(): Int - def longValue(): Long - def floatValue(): Float - def doubleValue(): Double + def byteValue: Byte + def shortValue: Short + def intValue: Int + def longValue: Long + def floatValue: Float + def doubleValue: Double /** Returns the value of this as a [[scala.Char]]. This may involve * rounding or truncation. */ - def toChar = intValue().toChar + def toChar = intValue.toChar /** Returns the value of this as a [[scala.Byte]]. This may involve * rounding or truncation. */ - def toByte = byteValue() + def toByte = byteValue /** Returns the value of this as a [[scala.Short]]. This may involve * rounding or truncation. */ - def toShort = shortValue() + def toShort = shortValue /** Returns the value of this as an [[scala.Int]]. This may involve * rounding or truncation. */ - def toInt = intValue() + def toInt = intValue /** Returns the value of this as a [[scala.Long]]. This may involve * rounding or truncation. */ - def toLong = longValue() + def toLong = longValue /** Returns the value of this as a [[scala.Float]]. This may involve * rounding or truncation. */ - def toFloat = floatValue() + def toFloat = floatValue /** Returns the value of this as a [[scala.Double]]. This may involve * rounding or truncation. */ - def toDouble = doubleValue() + def toDouble = doubleValue /** Returns `true` iff this has a zero fractional part, and is within the * range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`. @@ -90,7 +89,7 @@ trait ScalaNumericAnyConversions extends Any { */ def isValidChar = isWhole && (toInt >= Char.MinValue && toInt <= Char.MaxValue) - protected def unifiedPrimitiveHashcode() = { + protected def unifiedPrimitiveHashcode = { val lv = toLong if (lv >= Int.MinValue && lv <= Int.MaxValue) lv.toInt else lv.## diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala index 31d196eda93c..dbfde894aa0c 100644 --- a/src/library/scala/math/package.scala +++ b/src/library/scala/math/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,49 +24,64 @@ package scala * @groupprio math-const 10 * * @groupname minmax Minimum and Maximum - * @groupdesc minmax Find the min or max of two numbers. Note: [[scala.collection.TraversableOnce]] has + * @groupdesc minmax Find the min or max of two numbers. Note: [[scala.collection.IterableOnceOps]] has * min and max methods which determine the min or max of a collection. * @groupprio minmax 20 * * @groupname rounding Rounding * @groupprio rounding 30 * + * @groupname scaling Scaling + * @groupdesc scaling Scaling with rounding guarantees + * @groupprio scaling 40 + * * @groupname explog Exponential and Logarithmic - * @groupprio explog 40 + * @groupprio explog 50 * * @groupname trig Trigonometric * @groupdesc trig Arguments in radians - * @groupprio trig 50 + * @groupprio trig 60 * * @groupname angle-conversion Angular Measurement Conversion - * @groupprio angle-conversion 60 + * @groupprio angle-conversion 70 * * @groupname hyperbolic Hyperbolic - * @groupprio hyperbolic 70 + * @groupprio hyperbolic 80 * * @groupname abs Absolute Values * @groupdesc abs Determine the magnitude of a value by discarding the sign. Results are >= 0. - * @groupprio abs 80 + * @groupprio abs 90 * - * @groupname signum Signs - * @groupdesc signum Extract the sign of a value. Results are -1, 0 or 1. - * Note that these are not pure forwarders to the java versions. - * In particular, the return type of java.lang.Long.signum is Int, - * but here it is widened to Long so that each overloaded variant + * @groupname signs Signs + * @groupdesc signs For `signum` extract the sign of a value. Results are -1, 0 or 1. + * Note the `signum` methods are not pure forwarders to the Java versions. + * In particular, the return type of `java.lang.Long.signum` is `Int`, + * but here it is widened to `Long` so that each overloaded variant * will return the same numeric type it is passed. - * @groupprio signum 90 + * @groupprio signs 100 * * @groupname root-extraction Root Extraction - * @groupprio root-extraction 100 + * @groupprio root-extraction 110 * * @groupname polar-coords Polar Coordinates - * @groupprio polar-coords 110 + * @groupprio polar-coords 120 * * @groupname ulp Unit of Least Precision - * @groupprio ulp 120 + * @groupprio ulp 130 * * @groupname randomisation Pseudo Random Number Generation - * @groupprio randomisation 130 + * @groupprio randomisation 140 + * + * @groupname exact Exact Arithmetic + * @groupdesc exact Integral addition, multiplication, stepping and conversion throwing ArithmeticException instead of underflowing or overflowing + * @groupprio exact 150 + * + * @groupname modquo Modulus and Quotient + * @groupdesc modquo Calculate quotient values by rounding to negative infinity + * @groupprio modquo 160 + * + * @groupname adjacent-float Adjacent Floats + * @groupprio adjacent-float 170 */ package object math { /** The `Double` value that is closer than any other to `e`, the base of @@ -210,19 +225,61 @@ package object math { /** @group minmax */ def min(x: Double, y: Double): Double = java.lang.Math.min(x, y) - /** @group signum + /** @group signs * @note Forwards to [[java.lang.Integer]] */ def signum(x: Int): Int = java.lang.Integer.signum(x) - /** @group signum + /** @group signs * @note Forwards to [[java.lang.Long]] */ def signum(x: Long): Long = java.lang.Long.signum(x) - /** @group signum */ + /** @group signs */ def signum(x: Float): Float = java.lang.Math.signum(x) - /** @group signum */ + /** @group signs */ def signum(x: Double): Double = java.lang.Math.signum(x) + /** @group modquo */ + def floorDiv(x: Int, y: Int): Int = java.lang.Math.floorDiv(x, y) + + /** @group modquo */ + def floorDiv(x: Long, y: Long): Long = java.lang.Math.floorDiv(x, y) + + /** @group modquo */ + def floorMod(x: Int, y: Int): Int = java.lang.Math.floorMod(x, y) + + /** @group modquo */ + def floorMod(x: Long, y: Long): Long = java.lang.Math.floorMod(x, y) + + /** @group signs */ + def copySign(magnitude: Double, sign: Double): Double = java.lang.Math.copySign(magnitude, sign) + + /** @group signs */ + def copySign(magnitude: Float, sign: Float): Float = java.lang.Math.copySign(magnitude, sign) + + /** @group adjacent-float */ + def nextAfter(start: Double, direction: Double): Double = java.lang.Math.nextAfter(start, direction) + + /** @group adjacent-float */ + def nextAfter(start: Float, direction: Double): Float = java.lang.Math.nextAfter(start, direction) + + /** @group adjacent-float */ + def nextUp(d: Double): Double = java.lang.Math.nextUp(d) + + /** @group adjacent-float */ + def nextUp(f: Float): Float = java.lang.Math.nextUp(f) + + /** @group adjacent-float */ + def nextDown(d: Double): Double = java.lang.Math.nextDown(d) + + /** @group adjacent-float */ + def nextDown(f: Float): Float = java.lang.Math.nextDown(f) + + /** @group scaling */ + def scalb(d: Double, scaleFactor: Int): Double = java.lang.Math.scalb(d, scaleFactor) + + /** @group scaling */ + def scalb(f: Float, scaleFactor: Int): Float = java.lang.Math.scalb(f, scaleFactor) + // ----------------------------------------------------------------------- // root functions // ----------------------------------------------------------------------- @@ -271,6 +328,12 @@ package object math { */ def expm1(x: Double): Double = java.lang.Math.expm1(x) + /** @group explog */ + def getExponent(f: Float): Int = java.lang.Math.getExponent(f) + + /** @group explog */ + def getExponent(d: Double): Int = java.lang.Math.getExponent(d) + // ----------------------------------------------------------------------- // logarithmic functions // ----------------------------------------------------------------------- @@ -326,6 +389,50 @@ package object math { */ def ulp(x: Float): Float = java.lang.Math.ulp(x) - /** @group rounding */ + /** @group exact */ def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y) + + // ----------------------------------------------------------------------- + // exact functions + // ----------------------------------------------------------------------- + + /** @group exact */ + def addExact(x: Int, y: Int): Int = java.lang.Math.addExact(x, y) + + /** @group exact */ + def addExact(x: Long, y: Long): Long = java.lang.Math.addExact(x, y) + + /** @group exact */ + def subtractExact(x: Int, y: Int): Int = java.lang.Math.subtractExact(x, y) + + /** @group exact */ + def subtractExact(x: Long, y: Long): Long = java.lang.Math.subtractExact(x, y) + + /** @group exact */ + def multiplyExact(x: Int, y: Int): Int = java.lang.Math.multiplyExact(x, y) + + /** @group exact */ + def multiplyExact(x: Long, y: Long): Long = java.lang.Math.multiplyExact(x, y) + + /** @group exact */ + def incrementExact(x: Int): Int = java.lang.Math.incrementExact(x) + + /** @group exact */ + def incrementExact(x: Long) = java.lang.Math.incrementExact(x) + + /** @group exact */ + def decrementExact(x: Int) = java.lang.Math.decrementExact(x) + + /** @group exact */ + def decrementExact(x: Long) = java.lang.Math.decrementExact(x) + + /** @group exact */ + def negateExact(x: Int) = java.lang.Math.negateExact(x) + + /** @group exact */ + def negateExact(x: Long) = java.lang.Math.negateExact(x) + + /** @group exact */ + def toIntExact(x: Long): Int = java.lang.Math.toIntExact(x) + } diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala index b0676d503423..6a453d1809d1 100644 --- a/src/library/scala/native.scala +++ b/src/library/scala/native.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,7 +22,6 @@ package scala * while discarding the method's body (if any). The body will be type checked if present. * * A method marked @native must be a member of a class, not a trait (since 2.12). - * - * @since 2.6 */ +@deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class native extends scala.annotation.StaticAnnotation {} diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala index 89192fd66f0b..eede8d5051f2 100644 --- a/src/library/scala/noinline.scala +++ b/src/library/scala/noinline.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,15 @@ package scala /** - * An annotation on methods that forbids the compiler to inline the method, no matter how safe the - * inlining appears to be. The annotation can be used at definition site or at callsite. + * An annotation for methods that the optimizer should not inline. + * + * Note that by default, the Scala optimizer is disabled and no callsites are inlined. See + * `-opt:help` for information how to enable the optimizer and inliner. + * + * When inlining is enabled, the inliner will never inline methods or callsites annotated + * `@noinline`. + * + * Examples: * * {{{ * @inline final def f1(x: Int) = x @@ -23,7 +30,7 @@ package scala * * def t1 = f1(1) // inlined if possible * def t2 = f2(1) // not inlined - * def t3 = f3(1) // may be inlined (heuristics) + * def t3 = f3(1) // may be inlined (the inliner heuristics can select the callsite) * def t4 = f1(1): @noinline // not inlined (override at callsite) * def t5 = f2(1): @inline // inlined if possible (override at callsite) * def t6 = f3(1): @inline // inlined if possible @@ -37,8 +44,5 @@ package scala * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined * }}} - * - * @author Lex Spoon - * @since 2.5 */ -class noinline extends scala.annotation.StaticAnnotation +final class noinline extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index 38d12a629ec7..f84dcfd85a73 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,16 @@ * additional information regarding copyright ownership. */ +import scala.annotation.migration + /** * Core Scala types. They are always available without an explicit import. * @contentDiagram hideNodes "scala.Serializable" */ package object scala { + type Cloneable = java.lang.Cloneable + type Serializable = java.io.Serializable + type Throwable = java.lang.Throwable type Exception = java.lang.Exception type Error = java.lang.Error @@ -37,23 +42,31 @@ package object scala { override def toString = "object AnyRef" } - type TraversableOnce[+A] = scala.collection.TraversableOnce[A] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+A] = scala.collection.IterableOnce[A] + + type IterableOnce[+A] = scala.collection.IterableOnce[A] - type Traversable[+A] = scala.collection.Traversable[A] - val Traversable = scala.collection.Traversable + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+A] = scala.collection.Iterable[A] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = scala.collection.Iterable type Iterable[+A] = scala.collection.Iterable[A] val Iterable = scala.collection.Iterable - type Seq[+A] = scala.collection.Seq[A] - val Seq = scala.collection.Seq + @migration("scala.Seq is now scala.collection.immutable.Seq instead of scala.collection.Seq", "2.13.0") + type Seq[+A] = scala.collection.immutable.Seq[A] + val Seq = scala.collection.immutable.Seq - type IndexedSeq[+A] = scala.collection.IndexedSeq[A] - val IndexedSeq = scala.collection.IndexedSeq + @migration("scala.IndexedSeq is now scala.collection.immutable.IndexedSeq instead of scala.collection.IndexedSeq", "2.13.0") + type IndexedSeq[+A] = scala.collection.immutable.IndexedSeq[A] + val IndexedSeq = scala.collection.immutable.IndexedSeq type Iterator[+A] = scala.collection.Iterator[A] val Iterator = scala.collection.Iterator + @deprecated("Use scala.collection.BufferedIterator instead of scala.BufferedIterator", "2.13.0") type BufferedIterator[+A] = scala.collection.BufferedIterator[A] type List[+A] = scala.collection.immutable.List[A] @@ -61,15 +74,28 @@ package object scala { val Nil = scala.collection.immutable.Nil - type ::[A] = scala.collection.immutable.::[A] + type ::[+A] = scala.collection.immutable.::[A] val :: = scala.collection.immutable.:: val +: = scala.collection.+: val :+ = scala.collection.:+ + @deprecated("Use LazyList instead of Stream", "2.13.0") type Stream[+A] = scala.collection.immutable.Stream[A] + @deprecated("Use LazyList instead of Stream", "2.13.0") val Stream = scala.collection.immutable.Stream - val #:: = scala.collection.immutable.Stream.#:: + + type LazyList[+A] = scala.collection.immutable.LazyList[A] + val LazyList = scala.collection.immutable.LazyList + // This should be an alias to LazyList.#:: but we need to support Stream, too + //val #:: = scala.collection.immutable.LazyList.#:: + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + @deprecated("Prefer LazyList instead", since = "2.13.0") + def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + } type Vector[+A] = scala.collection.immutable.Vector[A] val Vector = scala.collection.immutable.Vector @@ -118,19 +144,4 @@ package object scala { type Right[+A, +B] = scala.util.Right[A, B] val Right = scala.util.Right - // Annotations which we might move to annotation.* -/* - type SerialVersionUID = annotation.SerialVersionUID - type deprecated = annotation.deprecated - type deprecatedName = annotation.deprecatedName - type inline = annotation.inline - type native = annotation.native - type noinline = annotation.noinline - type remote = annotation.remote - type specialized = annotation.specialized - type transient = annotation.transient - type throws = annotation.throws - type unchecked = annotation.unchecked.unchecked - type volatile = annotation.volatile - */ } diff --git a/src/library/scala/ref/PhantomReference.scala b/src/library/scala/ref/PhantomReference.scala index 15ce0fb63896..0790f539d03d 100644 --- a/src/library/scala/ref/PhantomReference.scala +++ b/src/library/scala/ref/PhantomReference.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,16 +12,10 @@ package scala.ref -/** - * @author Sean McDirmid - */ class PhantomReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] { val underlying: java.lang.ref.PhantomReference[_ <: T] = new PhantomReferenceWithWrapper[T](value, queue, this) } -/** - * @author Philipp Haller - */ private class PhantomReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: PhantomReference[T]) extends java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/src/library/scala/ref/Reference.scala b/src/library/scala/ref/Reference.scala index 5da0a62f5e1b..02e673fa4184 100644 --- a/src/library/scala/ref/Reference.scala +++ b/src/library/scala/ref/Reference.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,15 +14,14 @@ package scala.ref /** * @see `java.lang.ref.Reference` - * @author Sean McDirmid */ trait Reference[+T <: AnyRef] extends Function0[T] { /** return the underlying value */ def apply(): T /** return `Some` underlying if it hasn't been collected, otherwise `None` */ def get: Option[T] - override def toString = get.map(_.toString).getOrElse("") + override def toString: String = get.map(_.toString).getOrElse("") def clear(): Unit def enqueue(): Boolean - def isEnqueued(): Boolean + def isEnqueued: Boolean } diff --git a/src/library/scala/ref/ReferenceQueue.scala b/src/library/scala/ref/ReferenceQueue.scala index e3e7befd14e5..70743708c732 100644 --- a/src/library/scala/ref/ReferenceQueue.scala +++ b/src/library/scala/ref/ReferenceQueue.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,14 +12,10 @@ package scala.ref -/** - * @author Sean McDirmid - * @author Philipp Haller - */ class ReferenceQueue[+T <: AnyRef] { private[ref] val underlying: java.lang.ref.ReferenceQueue[_ <: T] = new java.lang.ref.ReferenceQueue[T] - override def toString = underlying.toString + override def toString: String = underlying.toString protected def Wrapper(jref: java.lang.ref.Reference[_]): Option[Reference[T]] = jref match { diff --git a/src/library/scala/ref/ReferenceWrapper.scala b/src/library/scala/ref/ReferenceWrapper.scala index 54c4a43448c4..4e681ed18570 100644 --- a/src/library/scala/ref/ReferenceWrapper.scala +++ b/src/library/scala/ref/ReferenceWrapper.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,9 @@ package scala.ref -/** - * @author Sean McDirmid - */ +import scala.annotation.nowarn + +@nowarn("cat=deprecation") trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy { val underlying: java.lang.ref.Reference[_ <: T] override def get = Option(underlying.get) @@ -23,15 +23,12 @@ trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy { if (ret eq null) throw new NoSuchElementException ret } - def clear() = underlying.clear() - def enqueue = underlying.enqueue - def isEnqueued = underlying.isEnqueued - def self = underlying + def clear(): Unit = underlying.clear() + def enqueue(): Boolean = underlying.enqueue() + def isEnqueued: Boolean = underlying.isEnqueued + def self: java.lang.ref.Reference[_ <: T] = underlying } -/** - * @author Philipp Haller - */ private trait ReferenceWithWrapper[T <: AnyRef] { val wrapper: ReferenceWrapper[T] } diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala index 32e3def47c26..859eef5e7fef 100644 --- a/src/library/scala/ref/SoftReference.scala +++ b/src/library/scala/ref/SoftReference.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,9 +12,6 @@ package scala.ref -/** - * @author Sean McDirmid - */ class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] { def this(value : T) = this(value, null) @@ -24,19 +21,15 @@ class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends /** * A companion object that implements an extractor for `SoftReference` values - * @author Rebecca Claire Murphy */ object SoftReference { /** Creates a `SoftReference` pointing to `value` */ - def apply[T <: AnyRef](value: T) = new SoftReference(value) + def apply[T <: AnyRef](value: T): SoftReference[T] = new SoftReference(value) /** Optionally returns the referenced value, or `None` if that value no longer exists */ def unapply[T <: AnyRef](sr: SoftReference[T]): Option[T] = Option(sr.underlying.get) } -/** - * @author Philipp Haller - */ private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T]) extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala index 51ed4701eb8b..5ca06063590b 100644 --- a/src/library/scala/ref/WeakReference.scala +++ b/src/library/scala/ref/WeakReference.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,6 @@ package scala.ref * A wrapper class for java.lang.ref.WeakReference * The new functionality is (1) results are Option values, instead of using null. * (2) There is an extractor that maps the weak reference itself into an option. - * @author Sean McDirmid */ class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] { def this(value: T) = this(value, null) @@ -28,14 +27,11 @@ class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends Re object WeakReference { /** Creates a weak reference pointing to `value` */ - def apply[T <: AnyRef](value: T) = new WeakReference(value) + def apply[T <: AnyRef](value: T): WeakReference[T] = new WeakReference(value) /** Optionally returns the referenced value, or `None` if that value no longer exists */ def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = Option(wr.underlying.get) } -/** - * @author Philipp Haller - */ private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: WeakReference[T]) extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T] diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala index 94c61bbbfbb5..cc8d0a457c2a 100644 --- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala +++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,10 @@ package scala package reflect -import scala.collection.mutable.{ WrappedArray, ArrayBuilder } -import java.lang.{ Class => jClass } +import scala.collection.mutable.{ArrayBuilder, ArraySeq} +import java.lang.{Class => jClass} + +import scala.annotation.{nowarn, tailrec} @deprecated("use scala.reflect.ClassTag instead", "2.10.0") trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { @@ -25,6 +27,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { def erasure: jClass[_] = runtimeClass private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = { + @tailrec def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = { left.nonEmpty && { val next = left.head @@ -63,8 +66,8 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { // List[String] <: AnyRef // Map[Int, Int] <: Iterable[(Int, Int)] // - // Given the manifest for Map[A, B] how do I determine that a - // supertype has single type argument (A, B) ? I don't see how we + // Given the manifest for Map[K, V] how do I determine that a + // supertype has single type argument (K, V) ? I don't see how we // can say whether X <:< Y when type arguments are involved except // when the erasure is the same, even before considering variance. !cannotMatch && { @@ -91,16 +94,13 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { case _ => false } - protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] = - java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] + protected def arrayClass[A](tp: jClass[_]): jClass[Array[A]] = + java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[A]]] @deprecated("use wrap instead", "2.10.0") def arrayManifest: ClassManifest[Array[T]] = ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this) - override def newArray(len: Int): Array[T] = - java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - @deprecated("use wrap.newArray instead", "2.10.0") def newArray2(len: Int): Array[Array[T]] = java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len) @@ -122,9 +122,9 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]] @deprecated("create WrappedArray directly instead", "2.10.0") - def newWrappedArray(len: Int): WrappedArray[T] = + def newWrappedArray(len: Int): ArraySeq[T] = // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests - new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]] + new ArraySeq.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[ArraySeq[T]] @deprecated("use ArrayBuilder.make(this) instead", "2.10.0") def newArrayBuilder(): ArrayBuilder[T] = @@ -152,6 +152,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { * so we need to somehow nudge them into migrating prior to removing stuff out of the blue. * Hence we've introduced this design decision as the lesser of two evils. */ +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifest.*""") object ClassManifestFactory { val Byte = ManifestFactory.Byte val Short = ManifestFactory.Short @@ -204,7 +205,7 @@ object ClassManifestFactory { def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = new ClassTypeManifest[T](Some(prefix), clazz, args.toList) - def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match { + def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = (arg: @unchecked) match { case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest } @@ -233,6 +234,7 @@ object ClassManifestFactory { /** Manifest for the class type `clazz[args]`, where `clazz` is * a top-level or static class */ +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifest""") @SerialVersionUID(1L) private class ClassTypeManifest[T]( prefix: Option[OptManifest[_]], diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 9158fbc514ed..5ebd3f1506e1 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,11 +13,10 @@ package scala package reflect -import java.lang.{ Class => jClass } +import java.lang.{Class => jClass} import java.lang.ref.{WeakReference => jWeakReference} - -import scala.collection.mutable -import scala.runtime.BoxedUnit +import scala.annotation.{implicitNotFound, nowarn} +import scala.runtime.ClassValueCompat /** * @@ -25,7 +24,7 @@ import scala.runtime.BoxedUnit * field. This is particularly useful for instantiating `Array`s whose element types are unknown * at compile time. * - * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags#TypeTag]]s, in that they + * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags.TypeTag]]s, in that they * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a * type, without necessarily knowing all of its argument types. This runtime information is enough @@ -44,21 +43,13 @@ import scala.runtime.BoxedUnit * }}} * * See [[scala.reflect.api.TypeTags]] for more examples, or the - * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] + * [[https://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] * for more details. * */ -@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}") +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifestDeprecatedApis""") +@implicitNotFound(msg = "No ClassTag available for ${T}") trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { - - @transient private[scala] lazy val emptyArray : Array[T] = { - val componentType = - if (runtimeClass eq java.lang.Void.TYPE) classOf[BoxedUnit] else runtimeClass - java.lang.reflect.Array.newInstance(componentType, 0).asInstanceOf[Array[T]] - } - @transient private[scala] lazy val emptyWrappedArray: mutable.WrappedArray[T] = - mutable.WrappedArray.make[T](emptyArray) - // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` // class tags, and all tags in general, should be as minimalistic as possible @@ -71,23 +62,8 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) /** Produces a new array with element type `T` and length `len` */ - override def newArray(len: Int): Array[T] = { - if (runtimeClass.isPrimitive) { - runtimeClass match { - case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]] - case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]] - case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]] - case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]] - case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]] - case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]] - case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]] - case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]] - case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]] - } - } else { - java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - } - } + def newArray(len: Int): Array[T] = + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] /** A ClassTag[T] can serve as an extractor that matches only objects of type T. * @@ -117,19 +93,21 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial * Class tags corresponding to primitive types and constructor/extractor for ClassTags. */ object ClassTag { - private val ObjectTYPE = classOf[java.lang.Object] - private val NothingTYPE = classOf[scala.runtime.Nothing$] - private val NullTYPE = classOf[scala.runtime.Null$] - - val Byte : ClassTag[scala.Byte] = Manifest.Byte - val Short : ClassTag[scala.Short] = Manifest.Short - val Char : ClassTag[scala.Char] = Manifest.Char - val Int : ClassTag[scala.Int] = Manifest.Int - val Long : ClassTag[scala.Long] = Manifest.Long - val Float : ClassTag[scala.Float] = Manifest.Float - val Double : ClassTag[scala.Double] = Manifest.Double - val Boolean : ClassTag[scala.Boolean] = Manifest.Boolean - val Unit : ClassTag[scala.Unit] = Manifest.Unit + private[this] val ObjectTYPE = classOf[java.lang.Object] + private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] + private[this] val NullTYPE = classOf[scala.runtime.Null$] + + import ManifestFactory._ + + val Byte : ByteManifest = Manifest.Byte + val Short : ShortManifest = Manifest.Short + val Char : CharManifest = Manifest.Char + val Int : IntManifest = Manifest.Int + val Long : LongManifest = Manifest.Long + val Float : FloatManifest = Manifest.Float + val Double : DoubleManifest = Manifest.Double + val Boolean : BooleanManifest = Manifest.Boolean + val Unit : UnitManifest = Manifest.Unit val Any : ClassTag[scala.Any] = Manifest.Any val Object : ClassTag[java.lang.Object] = Manifest.Object val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal @@ -138,7 +116,7 @@ object ClassTag { val Null : ClassTag[scala.Null] = Manifest.Null private val cacheDisabled = java.lang.Boolean.getBoolean("scala.reflect.classtag.cache.disable") - private[this] object cache extends ClassValue[jWeakReference[ClassTag[_]]] { + private[this] object cache extends ClassValueCompat[jWeakReference[ClassTag[_]]] { override def computeValue(runtimeClass: jClass[_]): jWeakReference[ClassTag[_]] = new jWeakReference(computeTag(runtimeClass)) @@ -151,24 +129,24 @@ object ClassTag { case _ => new GenericClassTag[AnyRef](runtimeClass) } - private def primitiveClassTag[T](runtimeClass: Class[_]): ClassTag[_] = runtimeClass match { - case java.lang.Byte.TYPE => ClassTag.Byte - case java.lang.Short.TYPE => ClassTag.Short - case java.lang.Character.TYPE => ClassTag.Char - case java.lang.Integer.TYPE => ClassTag.Int - case java.lang.Long.TYPE => ClassTag.Long - case java.lang.Float.TYPE => ClassTag.Float - case java.lang.Double.TYPE => ClassTag.Double - case java.lang.Boolean.TYPE => ClassTag.Boolean - case java.lang.Void.TYPE => ClassTag.Unit - } + private def primitiveClassTag[T](runtimeClass: Class[_]): ClassTag[_] = + (runtimeClass: @unchecked) match { + case java.lang.Byte.TYPE => ClassTag.Byte + case java.lang.Short.TYPE => ClassTag.Short + case java.lang.Character.TYPE => ClassTag.Char + case java.lang.Integer.TYPE => ClassTag.Int + case java.lang.Long.TYPE => ClassTag.Long + case java.lang.Float.TYPE => ClassTag.Float + case java.lang.Double.TYPE => ClassTag.Double + case java.lang.Boolean.TYPE => ClassTag.Boolean + case java.lang.Void.TYPE => ClassTag.Unit + } } @SerialVersionUID(1L) private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { override def newArray(len: Int): Array[T] = { - if (len == 0) emptyArray - else java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] } } diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 1e9b0a2a55d3..9f382fdd800e 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,8 @@ package scala package reflect -import scala.collection.mutable.{ArrayBuilder, WrappedArray} +import scala.annotation.{implicitNotFound, nowarn} +import scala.collection.mutable.{ArrayBuilder, ArraySeq} /** A `Manifest[T]` is an opaque descriptor for type T. Its supported use * is to give access to the erasure of the type as a `Class` instance, as @@ -30,19 +31,20 @@ import scala.collection.mutable.{ArrayBuilder, WrappedArray} * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding * - * // Methods manifest, classManifest, and optManifest are in [[scala.Predef]]. + * // Methods manifest and optManifest are in [[scala.Predef]]. * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] * isApproxSubType[List[String], List[AnyRef]] // true * isApproxSubType[List[String], List[Int]] // false * - * def methods[T: ClassManifest] = classManifest[T].erasure.getMethods - * def retType[T: ClassManifest](name: String) = + * def methods[T: Manifest] = manifest[T].runtimeClass.getMethods + * def retType[T: Manifest](name: String) = * methods[T] find (_.getName == name) map (_.getGenericReturnType) * * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) * }}} */ -@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifest(DeprecatedApis.*)?""") +@implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") trait Manifest[T] extends ClassManifest[T] with Equals { @@ -65,8 +67,85 @@ trait Manifest[T] extends ClassManifest[T] with Equals { override def hashCode = this.runtimeClass.## } +/** The object `Manifest` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + */ +// TODO undeprecated until Scala reflection becomes non-experimental +// @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +object Manifest { + /* Forward all the public members of ManifestFactory, since this object used + * to be a `private val Manifest = ManifestFactory` in the package object. It + * was moved here because it needs to be in the same file as `trait Manifest` + * defined above. + */ + + def valueManifests: List[AnyValManifest[_]] = + ManifestFactory.valueManifests + + val Byte: ManifestFactory.ByteManifest = ManifestFactory.Byte + val Short: ManifestFactory.ShortManifest = ManifestFactory.Short + val Char: ManifestFactory.CharManifest = ManifestFactory.Char + val Int: ManifestFactory.IntManifest = ManifestFactory.Int + val Long: ManifestFactory.LongManifest = ManifestFactory.Long + val Float: ManifestFactory.FloatManifest = ManifestFactory.Float + val Double: ManifestFactory.DoubleManifest = ManifestFactory.Double + val Boolean: ManifestFactory.BooleanManifest = ManifestFactory.Boolean + val Unit: ManifestFactory.UnitManifest = ManifestFactory.Unit + + val Any: Manifest[scala.Any] = ManifestFactory.Any + val Object: Manifest[java.lang.Object] = ManifestFactory.Object + val AnyRef: Manifest[scala.AnyRef] = ManifestFactory.AnyRef + val AnyVal: Manifest[scala.AnyVal] = ManifestFactory.AnyVal + val Null: Manifest[scala.Null] = ManifestFactory.Null + val Nothing: Manifest[scala.Nothing] = ManifestFactory.Nothing + + /** Manifest for the singleton type `value.type`. */ + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = + ManifestFactory.singleType[T](value) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: Predef.Class[_]): Manifest[T] = + ManifestFactory.classType[T](clazz) + + /** Manifest for the class type `clazz`, where `clazz` is + * a top-level or static class and args are its type arguments. */ + def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](clazz, arg1, args: _*) + + /** Manifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.classType[T](prefix, clazz, args: _*) + + def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = + ManifestFactory.arrayType[T](arg) + + /** Manifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = + ManifestFactory.abstractType[T](prefix, name, upperBound, args: _*) + + /** Manifest for the unknown type `_ >: L <: U` in an existential. */ + def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = + ManifestFactory.wildcardType[T](lowerBound, upperBound) + + /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ + def intersectionType[T](parents: Manifest[_]*): Manifest[T] = + ManifestFactory.intersectionType[T](parents: _*) + +} + // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifest(DeprecatedApis.*)?""") @SerialVersionUID(1L) abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { override def <:<(that: ClassManifest[_]): Boolean = @@ -87,15 +166,16 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. * Why so complicated? Read up the comments for `ClassManifestFactory`. */ +@nowarn("""cat=deprecation&origin=scala\.reflect\.ClassManifest(DeprecatedApis.*)?""") object ManifestFactory { def valueManifests: List[AnyValManifest[_]] = List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) @SerialVersionUID(1L) - private class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { - def runtimeClass = java.lang.Byte.TYPE - override def newArray(len: Int): Array[Byte] = new Array[Byte](len) - override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len)) + final private[reflect] class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { + def runtimeClass: Class[java.lang.Byte] = java.lang.Byte.TYPE + @inline override def newArray(len: Int): Array[Byte] = new Array[Byte](len) + override def newWrappedArray(len: Int): ArraySeq[Byte] = new ArraySeq.ofByte(new Array[Byte](len)) override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() override def unapply(x: Any): Option[Byte] = { x match { @@ -105,13 +185,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Byte } - val Byte: AnyValManifest[Byte] = new ByteManifest + val Byte: ByteManifest = new ByteManifest @SerialVersionUID(1L) - private class ShortManifest extends AnyValManifest[scala.Short]("Short") { - def runtimeClass = java.lang.Short.TYPE - override def newArray(len: Int): Array[Short] = new Array[Short](len) - override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len)) + final private[reflect] class ShortManifest extends AnyValManifest[scala.Short]("Short") { + def runtimeClass: Class[java.lang.Short] = java.lang.Short.TYPE + @inline override def newArray(len: Int): Array[Short] = new Array[Short](len) + override def newWrappedArray(len: Int): ArraySeq[Short] = new ArraySeq.ofShort(new Array[Short](len)) override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() override def unapply(x: Any): Option[Short] = { x match { @@ -121,13 +201,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Short } - val Short: AnyValManifest[Short] = new ShortManifest + val Short: ShortManifest = new ShortManifest @SerialVersionUID(1L) - private class CharManifest extends AnyValManifest[scala.Char]("Char") { - def runtimeClass = java.lang.Character.TYPE - override def newArray(len: Int): Array[Char] = new Array[Char](len) - override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len)) + final private[reflect] class CharManifest extends AnyValManifest[scala.Char]("Char") { + def runtimeClass: Class[java.lang.Character] = java.lang.Character.TYPE + @inline override def newArray(len: Int): Array[Char] = new Array[Char](len) + override def newWrappedArray(len: Int): ArraySeq[Char] = new ArraySeq.ofChar(new Array[Char](len)) override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() override def unapply(x: Any): Option[Char] = { x match { @@ -137,13 +217,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Char } - val Char: AnyValManifest[Char] = new CharManifest + val Char: CharManifest = new CharManifest @SerialVersionUID(1L) - private class IntManifest extends AnyValManifest[scala.Int]("Int") { - def runtimeClass = java.lang.Integer.TYPE - override def newArray(len: Int): Array[Int] = new Array[Int](len) - override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len)) + final private[reflect] class IntManifest extends AnyValManifest[scala.Int]("Int") { + def runtimeClass: Class[java.lang.Integer] = java.lang.Integer.TYPE + @inline override def newArray(len: Int): Array[Int] = new Array[Int](len) + override def newWrappedArray(len: Int): ArraySeq[Int] = new ArraySeq.ofInt(new Array[Int](len)) override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() override def unapply(x: Any): Option[Int] = { x match { @@ -153,13 +233,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Int } - val Int: AnyValManifest[Int] = new IntManifest + val Int: IntManifest = new IntManifest @SerialVersionUID(1L) - private class LongManifest extends AnyValManifest[scala.Long]("Long") { - def runtimeClass = java.lang.Long.TYPE - override def newArray(len: Int): Array[Long] = new Array[Long](len) - override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len)) + final private[reflect] class LongManifest extends AnyValManifest[scala.Long]("Long") { + def runtimeClass: Class[java.lang.Long] = java.lang.Long.TYPE + @inline override def newArray(len: Int): Array[Long] = new Array[Long](len) + override def newWrappedArray(len: Int): ArraySeq[Long] = new ArraySeq.ofLong(new Array[Long](len)) override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() override def unapply(x: Any): Option[Long] = { x match { @@ -169,13 +249,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Long } - val Long: AnyValManifest[Long] = new LongManifest + val Long: LongManifest = new LongManifest @SerialVersionUID(1L) - private class FloatManifest extends AnyValManifest[scala.Float]("Float") { - def runtimeClass = java.lang.Float.TYPE - override def newArray(len: Int): Array[Float] = new Array[Float](len) - override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len)) + final private[reflect] class FloatManifest extends AnyValManifest[scala.Float]("Float") { + def runtimeClass: Class[java.lang.Float] = java.lang.Float.TYPE + @inline override def newArray(len: Int): Array[Float] = new Array[Float](len) + override def newWrappedArray(len: Int): ArraySeq[Float] = new ArraySeq.ofFloat(new Array[Float](len)) override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() override def unapply(x: Any): Option[Float] = { x match { @@ -185,15 +265,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Float } - val Float: AnyValManifest[Float] = new FloatManifest + val Float: FloatManifest = new FloatManifest @SerialVersionUID(1L) - private class DoubleManifest extends AnyValManifest[scala.Double]("Double") { - def runtimeClass = java.lang.Double.TYPE - override def newArray(len: Int): Array[Double] = { - new Array[Double](len) - } - override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len)) + final private[reflect] class DoubleManifest extends AnyValManifest[scala.Double]("Double") { + def runtimeClass: Class[java.lang.Double] = java.lang.Double.TYPE + @inline override def newArray(len: Int): Array[Double] = new Array[Double](len) + override def newWrappedArray(len: Int): ArraySeq[Double] = new ArraySeq.ofDouble(new Array[Double](len)) override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() override def unapply(x: Any): Option[Double] = { @@ -204,13 +282,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Double } - val Double: AnyValManifest[Double] = new DoubleManifest + val Double: DoubleManifest = new DoubleManifest @SerialVersionUID(1L) - private class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { - def runtimeClass = java.lang.Boolean.TYPE - override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) - override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len)) + final private[reflect] class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { + def runtimeClass: Class[java.lang.Boolean] = java.lang.Boolean.TYPE + @inline override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) + override def newWrappedArray(len: Int): ArraySeq[Boolean] = new ArraySeq.ofBoolean(new Array[Boolean](len)) override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() override def unapply(x: Any): Option[Boolean] = { x match { @@ -220,13 +298,13 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Boolean } - val Boolean: AnyValManifest[Boolean] = new BooleanManifest + val Boolean: BooleanManifest = new BooleanManifest @SerialVersionUID(1L) - private class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { - def runtimeClass = java.lang.Void.TYPE - override def newArray(len: Int): Array[Unit] = new Array[Unit](len) - override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len)) + final private[reflect] class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { + def runtimeClass: Class[java.lang.Void] = java.lang.Void.TYPE + @inline override def newArray(len: Int): Array[Unit] = new Array[Unit](len) + override def newWrappedArray(len: Int): ArraySeq[Unit] = new ArraySeq.ofUnit(new Array[Unit](len)) override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit() override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] @@ -239,14 +317,14 @@ object ManifestFactory { } private def readResolve(): Any = Manifest.Unit } - val Unit: AnyValManifest[Unit] = new UnitManifest + val Unit: UnitManifest = new UnitManifest - private val ObjectTYPE = classOf[java.lang.Object] - private val NothingTYPE = classOf[scala.runtime.Nothing$] - private val NullTYPE = classOf[scala.runtime.Null$] + private[this] val ObjectTYPE = classOf[java.lang.Object] + private[this] val NothingTYPE = classOf[scala.runtime.Nothing$] + private[this] val NullTYPE = classOf[scala.runtime.Null$] @SerialVersionUID(1L) - private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { + final private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { override def newArray(len: Int) = new Array[scala.Any](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) private def readResolve(): Any = Manifest.Any @@ -254,7 +332,7 @@ object ManifestFactory { val Any: Manifest[scala.Any] = new AnyManifest @SerialVersionUID(1L) - private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { + final private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { override def newArray(len: Int) = new Array[java.lang.Object](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.Object @@ -264,7 +342,7 @@ object ManifestFactory { val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] @SerialVersionUID(1L) - private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { + final private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { override def newArray(len: Int) = new Array[scala.AnyVal](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.AnyVal @@ -272,7 +350,7 @@ object ManifestFactory { val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest @SerialVersionUID(1L) - private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { + final private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { override def newArray(len: Int) = new Array[scala.Null](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) && (that ne Nothing) && !(that <:< AnyVal) @@ -281,7 +359,7 @@ object ManifestFactory { val Null: Manifest[scala.Null] = new NullManifest @SerialVersionUID(1L) - private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { + final private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { override def newArray(len: Int) = new Array[scala.Nothing](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) private def readResolve(): Any = Manifest.Nothing @@ -289,8 +367,8 @@ object ManifestFactory { val Nothing: Manifest[scala.Nothing] = new NothingManifest @SerialVersionUID(1L) - private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { - lazy val runtimeClass = value.getClass + final private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { + lazy val runtimeClass: Class[_ <: AnyRef] = value.getClass override lazy val toString = value.toString + ".type" } @@ -343,7 +421,7 @@ object ManifestFactory { arg.asInstanceOf[Manifest[T]].arrayManifest @SerialVersionUID(1L) - private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Seq[Manifest[_]]) extends Manifest[T] { + private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: scala.collection.Seq[Manifest[_]]) extends Manifest[T] { def runtimeClass = upperBound override val typeArguments = args.toList override def toString = prefix.toString+"#"+name+argString @@ -370,12 +448,14 @@ object ManifestFactory { new WildcardManifest[T](lowerBound, upperBound) @SerialVersionUID(1L) - private class IntersectionTypeManifest[T](parents: Seq[Manifest[_]]) extends Manifest[T] { - def runtimeClass = parents.head.runtimeClass + private class IntersectionTypeManifest[T](parents: Array[Manifest[_]]) extends Manifest[T] { + // We use an `Array` instead of a `Seq` for `parents` to avoid cyclic dependencies during deserialization + // which can cause serialization proxies to leak and cause a ClassCastException. + def runtimeClass = parents(0).runtimeClass override def toString = parents.mkString(" with ") } /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ def intersectionType[T](parents: Manifest[_]*): Manifest[T] = - new IntersectionTypeManifest[T](parents) + new IntersectionTypeManifest[T](parents.toArray) } diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index cbe75a9c5313..4980ed5bd6b4 100644 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -28,13 +28,13 @@ object NameTransformer { final val SETTER_SUFFIX_STRING = "_$eq" final val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$" - private val nops = 128 - private val ncodes = 26 * 26 + private[this] val nops = 128 + private[this] val ncodes = 26 * 26 private class OpCodes(val op: Char, val code: String, val next: OpCodes) - private val op2code = new Array[String](nops) - private val code2op = new Array[OpCodes](ncodes) + private[this] val op2code = new Array[String](nops) + private[this] val code2op = new Array[OpCodes](ncodes) private def enterOp(op: Char, code: String) = { op2code(op.toInt) = code val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a' diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala index 840e39709014..819ffede46d3 100644 --- a/src/library/scala/reflect/NoManifest.scala +++ b/src/library/scala/reflect/NoManifest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,4 +19,4 @@ package reflect // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") object NoManifest extends OptManifest[Nothing] with Serializable { override def toString = "" -} \ No newline at end of file +} diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala index 09a79f725695..f3b3c3117236 100644 --- a/src/library/scala/reflect/OptManifest.scala +++ b/src/library/scala/reflect/OptManifest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,9 +16,7 @@ package reflect /** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]]. * * It is either a `Manifest` or the value `NoManifest`. - * - * @author Martin Odersky */ // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") -trait OptManifest[+T] extends Serializable \ No newline at end of file +trait OptManifest[+T] extends Serializable diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java index f749e33bf3fa..29a77dc2f352 100644 --- a/src/library/scala/reflect/ScalaLongSignature.java +++ b/src/library/scala/reflect/ScalaLongSignature.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java index 99d8c0387b62..dbd5a46bfd10 100644 --- a/src/library/scala/reflect/ScalaSignature.java +++ b/src/library/scala/reflect/ScalaSignature.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/reflect/macros/internal/macroImpl.scala b/src/library/scala/reflect/macros/internal/macroImpl.scala index b57419b16b80..c26426a079f2 100644 --- a/src/library/scala/reflect/macros/internal/macroImpl.scala +++ b/src/library/scala/reflect/macros/internal/macroImpl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,4 +27,4 @@ package internal * To lessen the weirdness we define this annotation as `private[scala]`. * It will not prevent pickling, but it will prevent application developers (and scaladocs) from seeing the annotation. */ -private[scala] class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation +private[scala] final class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 42b5c3b3dba1..caf79866c71e 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,8 @@ package scala -import java.lang.reflect.{ AccessibleObject => jAccessibleObject } +import java.lang.reflect.{AccessibleObject => jAccessibleObject} +import scala.annotation.nowarn package object reflect { @@ -47,13 +48,6 @@ package object reflect { @deprecated("use scala.reflect.ClassTag instead", "2.10.0") val ClassManifest = ClassManifestFactory - /** The object `Manifest` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - */ - // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") - val Manifest = ManifestFactory - def classTag[T](implicit ctag: ClassTag[T]) = ctag /** Make a java reflection object accessible, if it is not already @@ -61,7 +55,10 @@ package object reflect { * attempt, it is caught and discarded. */ def ensureAccessible[T <: jAccessibleObject](m: T): T = { - if (!m.isAccessible) { + // This calls `setAccessible` unnecessarily, because `isAccessible` is only `true` if `setAccessible(true)` + // was called before, not if the reflected object is inherently accessible. + // TODO: replace by `canAccess` once we're on JDK 9+ + if (!m.isAccessible: @nowarn("cat=deprecation")) { try m setAccessible true catch { case _: SecurityException => } // does nothing } diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala deleted file mode 100644 index a8921006713a..000000000000 --- a/src/library/scala/remote.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -/** - * An annotation that designates the class to which it is applied as remotable. - * - * For instance, the Scala code - * {{{ - * @remote trait Hello { - * def sayHello(): String - * } - * }}} - * is equivalent to the following Java code: - * {{{ - * public interface Hello extends java.rmi.Remote { - * String sayHello() throws java.rmi.RemoteException; - * } - * }}} - */ -@deprecated("extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods", "2.12.0") -class remote extends scala.annotation.StaticAnnotation {} diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala index 1d0658dd13ac..c322efcd6281 100644 --- a/src/library/scala/runtime/AbstractFunction0.scala +++ b/src/library/scala/runtime/AbstractFunction0.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala index c4db90a4c750..49977d8a1393 100644 --- a/src/library/scala/runtime/AbstractFunction1.scala +++ b/src/library/scala/runtime/AbstractFunction1.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,6 @@ package scala.runtime -abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] { +abstract class AbstractFunction1[@specialized(Specializable.Arg) -T1, @specialized(Specializable.Return) +R] extends Function1[T1, R] { } diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala index 0c4a08cbfac7..121c3cc0d53b 100644 --- a/src/library/scala/runtime/AbstractFunction10.scala +++ b/src/library/scala/runtime/AbstractFunction10.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction11.scala b/src/library/scala/runtime/AbstractFunction11.scala index 26dc92a984d8..c4321c10f142 100644 --- a/src/library/scala/runtime/AbstractFunction11.scala +++ b/src/library/scala/runtime/AbstractFunction11.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala index f52587200984..3f6d666f9c42 100644 --- a/src/library/scala/runtime/AbstractFunction12.scala +++ b/src/library/scala/runtime/AbstractFunction12.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala index f5280e525973..264de0f87296 100644 --- a/src/library/scala/runtime/AbstractFunction13.scala +++ b/src/library/scala/runtime/AbstractFunction13.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala index af6784cffaf2..1e92d1e04bae 100644 --- a/src/library/scala/runtime/AbstractFunction14.scala +++ b/src/library/scala/runtime/AbstractFunction14.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala index aac162fac06a..ed9b6b187e39 100644 --- a/src/library/scala/runtime/AbstractFunction15.scala +++ b/src/library/scala/runtime/AbstractFunction15.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala index 699842586255..839efed863b6 100644 --- a/src/library/scala/runtime/AbstractFunction16.scala +++ b/src/library/scala/runtime/AbstractFunction16.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala index 2531a748d885..ee91b466ea5b 100644 --- a/src/library/scala/runtime/AbstractFunction17.scala +++ b/src/library/scala/runtime/AbstractFunction17.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala index 14d0dd72046a..83aaf6b10c44 100644 --- a/src/library/scala/runtime/AbstractFunction18.scala +++ b/src/library/scala/runtime/AbstractFunction18.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala index 13d633113954..93741656a585 100644 --- a/src/library/scala/runtime/AbstractFunction19.scala +++ b/src/library/scala/runtime/AbstractFunction19.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction2.scala b/src/library/scala/runtime/AbstractFunction2.scala index b39793a9817d..7c8d1628e545 100644 --- a/src/library/scala/runtime/AbstractFunction2.scala +++ b/src/library/scala/runtime/AbstractFunction2.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,6 @@ package scala.runtime -abstract class AbstractFunction2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function2[T1, T2, R] { +abstract class AbstractFunction2[@specialized(Specializable.Args) -T1, @specialized(Specializable.Args) -T2, @specialized(Specializable.Return) +R] extends Function2[T1, T2, R] { } diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala index 4debd7473893..b2858b27c125 100644 --- a/src/library/scala/runtime/AbstractFunction20.scala +++ b/src/library/scala/runtime/AbstractFunction20.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction21.scala b/src/library/scala/runtime/AbstractFunction21.scala index 523a42f4ebea..e36e6b043959 100644 --- a/src/library/scala/runtime/AbstractFunction21.scala +++ b/src/library/scala/runtime/AbstractFunction21.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction22.scala b/src/library/scala/runtime/AbstractFunction22.scala index 7c77f05e7f03..f9cf63a9542d 100644 --- a/src/library/scala/runtime/AbstractFunction22.scala +++ b/src/library/scala/runtime/AbstractFunction22.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction3.scala b/src/library/scala/runtime/AbstractFunction3.scala index 829125342d3c..fbeb3e7b1cf8 100644 --- a/src/library/scala/runtime/AbstractFunction3.scala +++ b/src/library/scala/runtime/AbstractFunction3.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction4.scala b/src/library/scala/runtime/AbstractFunction4.scala index 6f479f3395ae..9a91280eea52 100644 --- a/src/library/scala/runtime/AbstractFunction4.scala +++ b/src/library/scala/runtime/AbstractFunction4.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala index 50ad931bdd77..a7880cdb0a14 100644 --- a/src/library/scala/runtime/AbstractFunction5.scala +++ b/src/library/scala/runtime/AbstractFunction5.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala index e60229bb03df..0a8c4eeacc70 100644 --- a/src/library/scala/runtime/AbstractFunction6.scala +++ b/src/library/scala/runtime/AbstractFunction6.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala index 1f6eae1291fa..d0f18b0dcbd4 100644 --- a/src/library/scala/runtime/AbstractFunction7.scala +++ b/src/library/scala/runtime/AbstractFunction7.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala index 06677c3e3923..4f1a528a4ed9 100644 --- a/src/library/scala/runtime/AbstractFunction8.scala +++ b/src/library/scala/runtime/AbstractFunction8.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala index 863e73f18177..f4ebb395bd5a 100644 --- a/src/library/scala/runtime/AbstractFunction9.scala +++ b/src/library/scala/runtime/AbstractFunction9.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala index 2b3dd8fb1b27..f4e8ae1b7818 100644 --- a/src/library/scala/runtime/AbstractPartialFunction.scala +++ b/src/library/scala/runtime/AbstractPartialFunction.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,11 +23,8 @@ package runtime * of partial function literals. * * This trait is used as a basis for implementation of all partial function literals. - * - * @author Pavel Pavlov - * @since 2.10 */ -abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self => +abstract class AbstractPartialFunction[@specialized(Specializable.Arg) -T1, @specialized(Specializable.Return) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self => // this method must be overridden for better performance, // for backwards compatibility, fall back to the one inherited from PartialFunction // this assumes the old-school partial functions override the apply method, though diff --git a/src/library/scala/runtime/ArrayCharSequence.scala b/src/library/scala/runtime/ArrayCharSequence.scala new file mode 100644 index 000000000000..971b0ac24c0d --- /dev/null +++ b/src/library/scala/runtime/ArrayCharSequence.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package runtime + +// Still need this one since the implicit class ArrayCharSequence only converts +// a single argument. +final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence { + // yikes + // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: signature: ([C)V) + // Constructor must call super() or this() + // + // def this(xs: Array[Char]) = this(xs, 0, xs.length) + + def length: Int = math.max(0, end - start) + def charAt(index: Int): Char = { + if (0 <= index && index < length) + xs(start + index) + else throw new ArrayIndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length - 1})") + } + def subSequence(start0: Int, end0: Int): CharSequence = { + if (start0 < 0) throw new ArrayIndexOutOfBoundsException(s"$start0 is out of bounds (min 0, max ${length -1})") + else if (end0 > length) throw new ArrayIndexOutOfBoundsException(s"$end0 is out of bounds (min 0, max ${xs.length -1})") + else if (end0 <= start0) new ArrayCharSequence(xs, 0, 0) + else { + val newlen = end0 - start0 + val start1 = start + start0 + new ArrayCharSequence(xs, start1, start1 + newlen) + } + } + override def toString = { + val start = math.max(this.start, 0) + val end = math.min(xs.length, start + length) + + if (start >= end) "" else new String(xs, start, end - start) + } +} diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java index 60341a3d7e73..2c43fd719366 100644 --- a/src/library/scala/runtime/BooleanRef.java +++ b/src/library/scala/runtime/BooleanRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class BooleanRef implements java.io.Serializable { +public final class BooleanRef implements java.io.Serializable { private static final long serialVersionUID = -5730524563015615974L; public boolean elem; diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java index c190763f4db2..aaa986f87f1a 100644 --- a/src/library/scala/runtime/BoxedUnit.java +++ b/src/library/scala/runtime/BoxedUnit.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java index 002d0f332de0..3ddc2516fbb9 100644 --- a/src/library/scala/runtime/BoxesRunTime.java +++ b/src/library/scala/runtime/BoxesRunTime.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,11 +23,7 @@ * - The generalised comparison method to be used when an object may * be a boxed value. * - Standard value operators for boxed number and quasi-number values. - * - * @author Gilles Dubochet - * @author Martin Odersky - * @contributor Stepan Koltsov - * @version 2.0 */ + */ public final class BoxesRunTime { private static final int CHAR = 0, /* BYTE = 1, SHORT = 2, */ INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7; diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java index dfc91c4d19fd..4630440fd7a7 100644 --- a/src/library/scala/runtime/ByteRef.java +++ b/src/library/scala/runtime/ByteRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class ByteRef implements java.io.Serializable { +public final class ByteRef implements java.io.Serializable { private static final long serialVersionUID = -100666928446877072L; public byte elem; diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java index a0448b0bba24..05e8fa55c982 100644 --- a/src/library/scala/runtime/CharRef.java +++ b/src/library/scala/runtime/CharRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class CharRef implements java.io.Serializable { +public final class CharRef implements java.io.Serializable { private static final long serialVersionUID = 6537214938268005702L; public char elem; diff --git a/src/library/scala/runtime/ClassValueCompat.scala b/src/library/scala/runtime/ClassValueCompat.scala new file mode 100644 index 000000000000..09a619f7a5f5 --- /dev/null +++ b/src/library/scala/runtime/ClassValueCompat.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + + +import scala.runtime.ClassValueCompat._ + +private[scala] abstract class ClassValueCompat[T] extends ClassValueInterface[T] { self => + private val instance: ClassValueInterface[T] = + if (classValueAvailable) new JavaClassValue() + else new FallbackClassValue() + + private class JavaClassValue extends ClassValue[T] with ClassValueInterface[T] { + override def computeValue(cls: Class[_]): T = self.computeValue(cls) + } + + private class FallbackClassValue extends ClassValueInterface[T] { + override def get(cls: Class[_]): T = self.computeValue(cls) + + override def remove(cls: Class[_]): Unit = {} + } + + def get(cls: Class[_]): T = instance.get(cls) + + def remove(cls: Class[_]): Unit = instance.remove(cls) + + protected def computeValue(cls: Class[_]): T +} + +private[scala] object ClassValueCompat { + trait ClassValueInterface[T] { + def get(cls: Class[_]): T + + def remove(cls: Class[_]): Unit + } + + private val classValueAvailable: Boolean = try { + Class.forName("java.lang.ClassValue", false, classOf[Object].getClassLoader) + true + } catch { + case _: ClassNotFoundException => false + } +} diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java index 1b2d0421cf4b..52b40cde396e 100644 --- a/src/library/scala/runtime/DoubleRef.java +++ b/src/library/scala/runtime/DoubleRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class DoubleRef implements java.io.Serializable { +public final class DoubleRef implements java.io.Serializable { private static final long serialVersionUID = 8304402127373655534L; public double elem; diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java index c3d037d5651a..d28d62a0ccc2 100644 --- a/src/library/scala/runtime/FloatRef.java +++ b/src/library/scala/runtime/FloatRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class FloatRef implements java.io.Serializable { +public final class FloatRef implements java.io.Serializable { private static final long serialVersionUID = -5793980990371366933L; public float elem; diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java index 95c3cccaa774..d456c3a750b3 100644 --- a/src/library/scala/runtime/IntRef.java +++ b/src/library/scala/runtime/IntRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class IntRef implements java.io.Serializable { +public final class IntRef implements java.io.Serializable { private static final long serialVersionUID = 1488197132022872888L; public int elem; diff --git a/src/library/scala/runtime/LambdaDeserialize.java b/src/library/scala/runtime/LambdaDeserialize.java deleted file mode 100644 index f927699f7b0e..000000000000 --- a/src/library/scala/runtime/LambdaDeserialize.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.runtime; - - -import java.lang.invoke.*; -import java.util.HashMap; - -public final class LambdaDeserialize { - public static final MethodType DESERIALIZE_LAMBDA_MT = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", LambdaDeserialize.class.getClassLoader()); - - private MethodHandles.Lookup lookup; - private final HashMap cache = new HashMap<>(); - private final LambdaDeserializer$ l = LambdaDeserializer$.MODULE$; - private final HashMap targetMethodMap; - - private LambdaDeserialize(MethodHandles.Lookup lookup, MethodHandle[] targetMethods) { - this.lookup = lookup; - targetMethodMap = new HashMap<>(targetMethods.length); - for (MethodHandle targetMethod : targetMethods) { - MethodHandleInfo info = lookup.revealDirect(targetMethod); - String key = nameAndDescriptorKey(info.getName(), info.getMethodType().toMethodDescriptorString()); - targetMethodMap.put(key, targetMethod); - } - } - - public Object deserializeLambda(SerializedLambda serialized) { - return l.deserializeLambda(lookup, cache, targetMethodMap, serialized); - } - - public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType, MethodHandle... targetMethods) throws Throwable { - MethodHandle deserializeLambda = lookup.findVirtual(LambdaDeserialize.class, "deserializeLambda", DESERIALIZE_LAMBDA_MT); - MethodHandle exact = deserializeLambda.bindTo(new LambdaDeserialize(lookup, targetMethods)).asType(invokedType); - return new ConstantCallSite(exact); - } - public static String nameAndDescriptorKey(String name, String descriptor) { - return name + descriptor; - } -} diff --git a/src/library/scala/runtime/LambdaDeserialize.scala b/src/library/scala/runtime/LambdaDeserialize.scala new file mode 100644 index 000000000000..b4270d63e643 --- /dev/null +++ b/src/library/scala/runtime/LambdaDeserialize.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +import java.lang.invoke._ +import java.util + +import scala.annotation.varargs +import scala.collection.immutable + +final class LambdaDeserialize private (lookup: MethodHandles.Lookup, targetMethods: Array[MethodHandle]) { + private val targetMethodMap: util.HashMap[String, MethodHandle] = new util.HashMap[String, MethodHandle](targetMethods.length) + + for (targetMethod <- targetMethods) { + val info = lookup.revealDirect(targetMethod) + val key = LambdaDeserialize.nameAndDescriptorKey(info.getName, info.getMethodType.toMethodDescriptorString) + targetMethodMap.put(key, targetMethod) + } + + private val cache = new util.HashMap[String, MethodHandle] + + def deserializeLambda(serialized: SerializedLambda): AnyRef = LambdaDeserializer.deserializeLambda(lookup, cache, targetMethodMap, serialized) +} + +object LambdaDeserialize { + @varargs @throws[Throwable] + def bootstrap(lookup: MethodHandles.Lookup, invokedName: String, invokedType: MethodType, targetMethods: MethodHandle*): CallSite = { + val targetMethodsArray = targetMethods.asInstanceOf[immutable.ArraySeq[_]].unsafeArray.asInstanceOf[Array[MethodHandle]] + val exact = MethodHandleConstants.LAMBDA_DESERIALIZE_DESERIALIZE_LAMBDA.bindTo(new LambdaDeserialize(lookup, targetMethodsArray)).asType(invokedType) + new ConstantCallSite(exact) + } + + def nameAndDescriptorKey(name: String, descriptor: String): String = name + descriptor +} diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index dc54c349eda6..76fc5d778bec 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -31,13 +31,12 @@ object LambdaDeserializer { * concurrent deserialization of the same lambda expression may spin up more than one class. * * Assumptions: - * - No additional marker interfaces are required beyond `{java.io,scala.}Serializable`. These are + * - No additional marker interfaces are required beyond `java.io.Serializable`. These are * not stored in `SerializedLambda`, so we can't reconstitute them. * - No additional bridge methods are passed to `altMetafactory`. Again, these are not stored. * * @param lookup The factory for method handles. Must have access to the implementation method, the - * functional interface class, and `java.io.Serializable` or `scala.Serializable` as - * required. + * functional interface class, and `java.io.Serializable`. * @param cache A cache used to avoid spinning up a class for each deserialization of a given lambda. May be `null` * @param serialized The lambda to deserialize. Note that this is typically created by the `readResolve` * member of the anonymous class created by `LambdaMetaFactory`. @@ -45,6 +44,13 @@ object LambdaDeserializer { */ def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + val result = deserializeLambdaOrNull(lookup, cache, targetMethodMap, serialized) + if (result == null) throw new IllegalArgumentException("Illegal lambda deserialization") + else result + } + + def deserializeLambdaOrNull(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], + targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { assert(targetMethodMap != null) def slashDot(name: String) = name.replaceAll("/", ".") val loader = lookup.lookupClass().getClassLoader @@ -86,12 +92,10 @@ object LambdaDeserializer { val implMethod: MethodHandle = if (targetMethodMap.containsKey(key)) { targetMethodMap.get(key) } else { - throw new IllegalArgumentException("Illegal lambda deserialization") + return null } - val flags: Int = LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS - val isScalaFunction = functionalInterfaceClass.getName.startsWith("scala.Function") - val markerInterface: Class[_] = loader.loadClass(if (isScalaFunction) ScalaSerializable else JavaIOSerializable) + val flags: Int = LambdaMetafactory.FLAG_SERIALIZABLE LambdaMetafactory.altMetafactory( lookup, getFunctionalInterfaceMethodName, invokedType, @@ -99,18 +103,19 @@ object LambdaDeserializer { /* samMethodType = */ funcInterfaceSignature, /* implMethod = */ implMethod, /* instantiatedMethodType = */ instantiated, - /* flags = */ flags.asInstanceOf[AnyRef], - /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], - /* markerInterfaces[0] = */ markerInterface + /* flags = */ flags.asInstanceOf[AnyRef] ) } val factory: MethodHandle = if (cache == null) { - makeCallSite.getTarget + val callSite = makeCallSite + if (callSite == null) return null + callSite.getTarget } else cache.synchronized{ cache.get(key) match { case null => val callSite = makeCallSite + if (callSite == null) return null val temp = callSite.getTarget cache.put(key, temp) temp @@ -121,13 +126,4 @@ object LambdaDeserializer { val captures = Array.tabulate(serialized.getCapturedArgCount)(n => serialized.getCapturedArg(n)) factory.invokeWithArguments(captures: _*) } - - private val ScalaSerializable = "scala.Serializable" - - private val JavaIOSerializable = { - // We could actually omit this marker interface as LambdaMetaFactory will add it if - // the FLAG_SERIALIZABLE is set and of the provided markers extend it. But the code - // is cleaner if we uniformly add a single marker, so I'm leaving it in place. - "java.io.Serializable" - } } diff --git a/src/library/scala/runtime/LazyRef.scala b/src/library/scala/runtime/LazyRef.scala index 60a17b3d1c95..ee0364cfab94 100644 --- a/src/library/scala/runtime/LazyRef.scala +++ b/src/library/scala/runtime/LazyRef.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.runtime /** Classes used as holders for lazy vals defined in methods. */ -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyRef[T] extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -30,7 +30,7 @@ class LazyRef[T] extends Serializable { override def toString = s"LazyRef ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyBoolean extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -46,7 +46,7 @@ class LazyBoolean extends Serializable { override def toString = s"LazyBoolean ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyByte extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -64,7 +64,7 @@ class LazyByte extends Serializable { override def toString = s"LazyByte ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyChar extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -80,7 +80,7 @@ class LazyChar extends Serializable { override def toString = s"LazyChar ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyShort extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -96,7 +96,7 @@ class LazyShort extends Serializable { override def toString = s"LazyShort ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyInt extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -112,7 +112,7 @@ class LazyInt extends Serializable { override def toString = s"LazyInt ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyLong extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -128,7 +128,7 @@ class LazyLong extends Serializable { override def toString = s"LazyLong ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyFloat extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -144,7 +144,7 @@ class LazyFloat extends Serializable { override def toString = s"LazyFloat ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyDouble extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -160,7 +160,7 @@ class LazyDouble extends Serializable { override def toString = s"LazyDouble ${if (_initialized) s"of: ${_value}" else "thunk"}" } -@SerialVersionUID(1l) +@SerialVersionUID(1L) class LazyUnit extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java index ef35b4dd01ed..9e189af0ef2b 100644 --- a/src/library/scala/runtime/LongRef.java +++ b/src/library/scala/runtime/LongRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class LongRef implements java.io.Serializable { +public final class LongRef implements java.io.Serializable { private static final long serialVersionUID = -3567869820105829499L; public long elem; diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala index 9406efe3c5bb..2aa41c9e352a 100644 --- a/src/library/scala/runtime/MethodCache.scala +++ b/src/library/scala/runtime/MethodCache.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/MethodHandleConstants.java b/src/library/scala/runtime/MethodHandleConstants.java new file mode 100644 index 000000000000..16773431f86a --- /dev/null +++ b/src/library/scala/runtime/MethodHandleConstants.java @@ -0,0 +1,35 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.lang.invoke.SerializedLambda; + +class MethodHandleConstants { + // static final MethodHandles are optimized by the JIT (https://stackoverflow.com/a/14146641/248998) + static final MethodHandle LAMBDA_DESERIALIZE_DESERIALIZE_LAMBDA; + + static { + LAMBDA_DESERIALIZE_DESERIALIZE_LAMBDA = lookupDeserialize(); + } + + private static MethodHandle lookupDeserialize() { + try { + return MethodHandles.lookup().findVirtual(Class.forName("scala.runtime.LambdaDeserialize"), "deserializeLambda", MethodType.methodType(Object.class, SerializedLambda.class)); + } catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) { + throw new ExceptionInInitializerError(e); + } + } +} diff --git a/src/library/scala/runtime/ModuleSerializationProxy.scala b/src/library/scala/runtime/ModuleSerializationProxy.scala new file mode 100644 index 000000000000..ad12bd17f7bc --- /dev/null +++ b/src/library/scala/runtime/ModuleSerializationProxy.scala @@ -0,0 +1,43 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +import java.io.Serializable +import java.security.PrivilegedActionException +import java.security.PrivilegedExceptionAction +import scala.annotation.nowarn + +private[runtime] object ModuleSerializationProxy { + private val instances: ClassValueCompat[Object] = new ClassValueCompat[Object] { + @nowarn("cat=deprecation") // AccessController is deprecated on JDK 17 + def getModule(cls: Class[_]): Object = + java.security.AccessController.doPrivileged( + (() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) + override protected def computeValue(cls: Class[_]): Object = + try getModule(cls) + catch { + case e: PrivilegedActionException => + rethrowRuntime(e.getCause) + } + } + + private def rethrowRuntime(e: Throwable): Object = e match { + case re: RuntimeException => throw re + case _ => throw new RuntimeException(e) + } +} + +@SerialVersionUID(1L) +final class ModuleSerializationProxy(moduleClass: Class[_]) extends Serializable { + private def readResolve = ModuleSerializationProxy.instances.get(moduleClass) +} diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala index c14fe6b3fb56..8abd48501e3c 100644 --- a/src/library/scala/runtime/NonLocalReturnControl.scala +++ b/src/library/scala/runtime/NonLocalReturnControl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,12 @@ * additional information regarding copyright ownership. */ -package scala -package runtime +package scala.runtime import scala.util.control.ControlThrowable -class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable { +// remove Unit specialization when binary compatibility permits +@annotation.nowarn("cat=lint-unit-specialization") +class NonLocalReturnControl[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit) T](val key: AnyRef, val value: T) extends ControlThrowable { final override def fillInStackTrace(): Throwable = this } diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala index 314ffc2e6240..cb098a5507da 100644 --- a/src/library/scala/runtime/Nothing$.scala +++ b/src/library/scala/runtime/Nothing$.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala index a56f4c2df8b2..d279f861e94a 100644 --- a/src/library/scala/runtime/Null$.scala +++ b/src/library/scala/runtime/Null$.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java index 452985490662..04545449e9a1 100644 --- a/src/library/scala/runtime/ObjectRef.java +++ b/src/library/scala/runtime/ObjectRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class ObjectRef implements java.io.Serializable { +public final class ObjectRef implements java.io.Serializable { private static final long serialVersionUID = -9055728157600312291L; public T elem; diff --git a/src/library/scala/runtime/PStatics.scala b/src/library/scala/runtime/PStatics.scala new file mode 100644 index 000000000000..9196e21e75be --- /dev/null +++ b/src/library/scala/runtime/PStatics.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +// things that should be in `Statics`, but can't be yet for bincompat reasons +// TODO 3.T: move to `Statics` +private[scala] object PStatics { + // `Int.MaxValue - 8` traditional soft limit to maximize compatibility with diverse JVMs + // See https://stackoverflow.com/a/8381338 for example + final val VM_MaxArraySize = 2147483639 +} diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala index ca7fd39cddae..8b1c1525cedf 100644 --- a/src/library/scala/runtime/RichBoolean.scala +++ b/src/library/scala/runtime/RichBoolean.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,5 +15,5 @@ package runtime final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] { - protected def ord = scala.math.Ordering.Boolean + protected def ord: scala.math.Ordering.Boolean.type = scala.math.Ordering.Boolean } diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala index 998b1fbef6fd..6060d9b75a79 100644 --- a/src/library/scala/runtime/RichByte.scala +++ b/src/library/scala/runtime/RichByte.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,22 +13,23 @@ package scala package runtime - final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] { - protected def num = scala.math.Numeric.ByteIsIntegral - protected def ord = scala.math.Ordering.Byte + protected def num: scala.math.Numeric.ByteIsIntegral.type = scala.math.Numeric.ByteIsIntegral + protected def ord: scala.math.Ordering.Byte.type = scala.math.Ordering.Byte - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self - override def shortValue() = self.toShort + override def doubleValue = self.toDouble + override def floatValue = self.toFloat + override def longValue = self.toLong + override def intValue = self.toInt + override def byteValue = self + override def shortValue = self.toShort override def isValidByte = true + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine signum and sign too but forwards binary compatibility doesn't allow us to. override def abs: Byte = math.abs(self).toByte override def max(that: Byte): Byte = math.max(self, that).toByte override def min(that: Byte): Byte = math.min(self, that).toByte - override def signum: Int = math.signum(self.toInt) } diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala index 72554741a7a5..2bdb80be96f4 100644 --- a/src/library/scala/runtime/RichChar.scala +++ b/src/library/scala/runtime/RichChar.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,26 +13,25 @@ package scala package runtime - -import java.lang.Character - final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] { - protected def num = scala.math.Numeric.CharIsIntegral - protected def ord = scala.math.Ordering.Char + protected def num: scala.math.Numeric.CharIsIntegral.type = scala.math.Numeric.CharIsIntegral + protected def ord: scala.math.Ordering.Char.type = scala.math.Ordering.Char - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort + override def doubleValue = self.toDouble + override def floatValue = self.toFloat + override def longValue = self.toLong + override def intValue = self.toInt + override def byteValue = self.toByte + override def shortValue = self.toShort override def isValidChar = true + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine signum and sign too but forwards binary compatibility doesn't allow us to. override def abs: Char = self override def max(that: Char): Char = math.max(self.toInt, that.toInt).toChar override def min(that: Char): Char = math.min(self.toInt, that.toInt).toChar - override def signum: Int = math.signum(self.toInt) def asDigit: Int = Character.digit(self, Character.MAX_RADIX) diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala index 6f99e8442c5c..b453e69deee0 100644 --- a/src/library/scala/runtime/RichDouble.scala +++ b/src/library/scala/runtime/RichDouble.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,16 +14,15 @@ package scala package runtime final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] { - protected def num = scala.math.Numeric.DoubleIsFractional - protected def ord = scala.math.Ordering.Double - protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral + protected def num: Fractional[Double] = scala.math.Numeric.DoubleIsFractional + protected def ord: Ordering[Double] = scala.math.Ordering.Double.TotalOrdering - override def doubleValue() = self - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort + override def doubleValue = self + override def floatValue = self.toFloat + override def longValue = self.toLong + override def intValue = self.toInt + override def byteValue = self.toByte + override def shortValue = self.toShort override def isWhole = { val l = self.toLong @@ -39,13 +38,18 @@ final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Dou def isNaN: Boolean = java.lang.Double.isNaN(self) def isInfinity: Boolean = java.lang.Double.isInfinite(self) + def isFinite: Boolean = java.lang.Double.isFinite(self) def isPosInfinity: Boolean = Double.PositiveInfinity == self def isNegInfinity: Boolean = Double.NegativeInfinity == self + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine sign too but forwards binary compatibility doesn't allow us to. override def abs: Double = math.abs(self) override def max(that: Double): Double = math.max(self, that) override def min(that: Double): Double = math.min(self, that) - override def signum: Int = math.signum(self).toInt // !!! NaN + @deprecated("signum does not handle -0.0 or Double.NaN; use `sign` method instead", since = "2.13.0") + override def signum: Int = math.signum(self).toInt def round: Long = math.round(self) def ceil: Double = math.ceil(self) diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala deleted file mode 100644 index 0e2168ddf709..000000000000 --- a/src/library/scala/runtime/RichException.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - -import scala.compat.Platform.EOL - -@deprecated("use Throwable#getStackTrace", "2.11.0") -final class RichException(exc: Throwable) { - def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL) -} diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala index dbccd5bd0b78..c6570ab10615 100644 --- a/src/library/scala/runtime/RichFloat.scala +++ b/src/library/scala/runtime/RichFloat.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,16 +14,15 @@ package scala package runtime final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] { - protected def num = scala.math.Numeric.FloatIsFractional - protected def ord = scala.math.Ordering.Float - protected def integralNum = scala.math.Numeric.FloatAsIfIntegral + protected def num: Fractional[Float] = scala.math.Numeric.FloatIsFractional + protected def ord: Ordering[Float] = scala.math.Ordering.Float.TotalOrdering - override def doubleValue() = self.toDouble - override def floatValue() = self - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort + override def doubleValue = self.toDouble + override def floatValue = self + override def longValue = self.toLong + override def intValue = self.toInt + override def byteValue = self.toByte + override def shortValue = self.toShort override def isWhole = { val l = self.toLong @@ -39,13 +38,18 @@ final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float def isNaN: Boolean = java.lang.Float.isNaN(self) def isInfinity: Boolean = java.lang.Float.isInfinite(self) + def isFinite: Boolean = java.lang.Float.isFinite(self) def isPosInfinity: Boolean = Float.PositiveInfinity == self def isNegInfinity: Boolean = Float.NegativeInfinity == self + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine sign too but forwards binary compatibility doesn't allow us to. override def abs: Float = math.abs(self) override def max(that: Float): Float = math.max(self, that) override def min(that: Float): Float = math.min(self, that) - override def signum: Int = math.signum(self).toInt // !!! NaN + @deprecated("signum does not handle -0.0f or Float.NaN; use `sign` method instead", since = "2.13.0") + override def signum: Int = math.signum(self).toInt def round: Int = math.round(self) def ceil: Float = math.ceil(self.toDouble).toFloat diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala index b5deccaf4e84..7499e5078006 100644 --- a/src/library/scala/runtime/RichInt.scala +++ b/src/library/scala/runtime/RichInt.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,29 +18,31 @@ import scala.collection.immutable.Range // Note that this does not implement IntegralProxy[Int] so that it can return // the Int-specific Range class from until/to. final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] { - protected def num = scala.math.Numeric.IntIsIntegral - protected def ord = scala.math.Ordering.Int + protected def num: scala.math.Numeric.IntIsIntegral.type = scala.math.Numeric.IntIsIntegral + protected def ord: scala.math.Ordering.Int.type = scala.math.Ordering.Int - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self - override def byteValue() = self.toByte - override def shortValue() = self.toShort + override def doubleValue = self.toDouble + override def floatValue = self.toFloat + override def longValue = self.toLong + override def intValue = self + override def byteValue = self.toByte + override def shortValue = self.toShort /** Returns `'''true'''` if this number has no decimal component. * Always `'''true'''` for `RichInt`. */ @deprecated("isWhole on an integer type is always true", "2.12.15") - def isWhole() = true + def isWhole = true override def isValidInt = true def isValidLong = true + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine signum and sign too but forwards binary compatibility doesn't allow us to. override def abs: Int = math.abs(self) override def max(that: Int): Int = math.max(self, that) override def min(that: Int): Int = math.min(self, that) - override def signum: Int = math.signum(self) /** There is no reason to round an `Int`, but this method is provided to avoid accidental loss of precision from a detour through `Float`. */ @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala index 1cf00fbaf74e..1f5bc5d0da4b 100644 --- a/src/library/scala/runtime/RichLong.scala +++ b/src/library/scala/runtime/RichLong.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,15 +14,15 @@ package scala package runtime final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] { - protected def num = scala.math.Numeric.LongIsIntegral - protected def ord = scala.math.Ordering.Long + protected def num: scala.math.Numeric.LongIsIntegral.type = scala.math.Numeric.LongIsIntegral + protected def ord: scala.math.Ordering.Long.type = scala.math.Ordering.Long - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self.toShort + override def doubleValue = self.toDouble + override def floatValue = self.toFloat + override def longValue = self + override def intValue = self.toInt + override def byteValue = self.toByte + override def shortValue = self.toShort override def isValidByte = self.toByte.toLong == self override def isValidShort = self.toShort.toLong == self @@ -32,10 +32,12 @@ final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] { // override def isValidFloat = self.toFloat.toLong == self && self != Long.MaxValue // override def isValidDouble = self.toDouble.toLong == self && self != Long.MaxValue + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine signum and sign too but forwards binary compatibility doesn't allow us to. override def abs: Long = math.abs(self) override def max(that: Long): Long = math.max(self, that) override def min(that: Long): Long = math.min(self, that) - override def signum: Int = math.signum(self).toInt /** There is no reason to round a `Long`, but this method is provided to avoid accidental conversion to `Int` through `Float`. */ @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala index f15698fb2949..1f4ebfaf0b1a 100644 --- a/src/library/scala/runtime/RichShort.scala +++ b/src/library/scala/runtime/RichShort.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,22 +13,23 @@ package scala package runtime - final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] { - protected def num = scala.math.Numeric.ShortIsIntegral - protected def ord = scala.math.Ordering.Short + protected def num: scala.math.Numeric.ShortIsIntegral.type = scala.math.Numeric.ShortIsIntegral + protected def ord: scala.math.Ordering.Short.type = scala.math.Ordering.Short - override def doubleValue() = self.toDouble - override def floatValue() = self.toFloat - override def longValue() = self.toLong - override def intValue() = self.toInt - override def byteValue() = self.toByte - override def shortValue() = self + override def doubleValue = self.toDouble + override def floatValue = self.toFloat + override def longValue = self.toLong + override def intValue = self.toInt + override def byteValue = self.toByte + override def shortValue = self override def isValidShort = true + // These method are all overridden and redefined to call out to scala.math to avoid 3 allocations: + // the primitive boxing, the value class boxing and instantiation of the Numeric num. + // We'd like to redefine signum and sign too but forwards binary compatibility doesn't allow us to. override def abs: Short = math.abs(self.toInt).toShort override def max(that: Short): Short = math.max(self.toInt, that.toInt).toShort override def min(that: Short): Short = math.min(self.toInt, that.toInt).toShort - override def signum: Int = math.signum(self.toInt) } diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index f4a55d32e1bb..a39c0e7854d3 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,23 +17,21 @@ import scala.collection.immutable import scala.math.ScalaNumericAnyConversions import immutable.NumericRange import Proxy.Typed +import scala.annotation.nowarn /** Base classes for the Rich* wrappers of the primitive types. * As with all classes in scala.runtime.*, this is not a supported API. - * - * @author Paul Phillips - * @since 2.9 */ +@nowarn("cat=deprecation") trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed[T] with OrderedProxy[T] { protected implicit def num: Numeric[T] - def underlying() = self.asInstanceOf[AnyRef] - def doubleValue() = num.toDouble(self) - def floatValue() = num.toFloat(self) - def longValue() = num.toLong(self) - def intValue() = num.toInt(self) - def byteValue() = intValue().toByte - def shortValue() = intValue().toShort + def doubleValue = num.toDouble(self) + def floatValue = num.toFloat(self) + def longValue = num.toLong(self) + def intValue = num.toInt(self) + def byteValue = intValue.toByte + def shortValue = intValue.toShort /** Returns `'''this'''` if `'''this''' < that` or `that` otherwise. */ def min(that: T): T = num.min(self, that) @@ -41,12 +39,19 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed def max(that: T): T = num.max(self, that) /** Returns the absolute value of `'''this'''`. */ def abs = num.abs(self) + /** + * Returns the sign of `'''this'''`. + * zero if the argument is zero, -zero if the argument is -zero, + * one if the argument is greater than zero, -one if the argument is less than zero, + * and NaN if the argument is NaN where applicable. + */ + def sign: T = num.sign(self) /** Returns the signum of `'''this'''`. */ - def signum = num.signum(self) + @deprecated("use `sign` method instead", since = "2.13.0") def signum: Int = num.signum(self) } trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] { @deprecated("isWhole on an integer type is always true", "2.12.15") - def isWhole() = true + def isWhole = true } trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProxy[T] { protected implicit def num: Integral[T] @@ -57,28 +62,20 @@ trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProx def to(end: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, num.one) def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) } -trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T] { +trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] { protected implicit def num: Fractional[T] - protected implicit def integralNum: Integral[T] - /** In order to supply predictable ranges, we require an Integral[T] which provides - * us with discrete operations on the (otherwise fractional) T. See Numeric.DoubleAsIfIntegral - * for an example. - */ - type ResultWithoutStep = Range.Partial[T, NumericRange[T]] - - def isWhole() = false - @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _)) - @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) - @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _)) - @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) + def isWhole = false } +@nowarn("cat=deprecation") trait OrderedProxy[T] extends Any with Ordered[T] with Typed[T] { protected def ord: Ordering[T] def compare(y: T) = ord.compare(self, y) } + +@nowarn("cat=deprecation") trait RangedProxy[T] extends Any with Typed[T] { type ResultWithoutStep diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index ec407ea97acf..5c227b33c5ef 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,14 +13,14 @@ package scala package runtime -import scala.collection.{ TraversableView, AbstractIterator, GenIterable } -import scala.collection.mutable.WrappedArray -import scala.collection.immutable.{ StringLike, NumericRange } -import scala.collection.generic.{ Sorted, IsTraversableLike } -import scala.reflect.{ ClassTag, classTag } -import java.lang.{ Class => jClass } - -import java.lang.reflect.{ Method => JMethod } +import scala.collection.{AbstractIterator, AnyConstr, SortedOps, StrictOptimizedIterableOps, StringOps, StringView, View} +import scala.collection.generic.IsIterable +import scala.collection.immutable.{ArraySeq, NumericRange} +import scala.collection.mutable.StringBuilder +import scala.math.min +import scala.reflect.{ClassTag, classTag} +import java.lang.{Class => jClass} +import java.lang.reflect.{Method => JMethod} /** The object ScalaRunTime provides support methods required by * the scala runtime. All these methods should be considered @@ -34,8 +34,8 @@ object ScalaRunTime { clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) // A helper method to make my life in the pattern matcher a lot easier. - def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr = - traversable conversion coll drop num + def drop[Repr](coll: Repr, num: Int)(implicit iterable: IsIterable[Repr] { type C <: Repr }): Repr = + iterable(coll) drop num /** Return the class object representing an array with element class `clazz`. */ @@ -54,7 +54,7 @@ object ScalaRunTime { /** Retrieve generic array element */ def array_apply(xs: AnyRef, idx: Int): Any = { - xs match { + (xs: @unchecked) match { case x: Array[AnyRef] => x(idx).asInstanceOf[Any] case x: Array[Int] => x(idx).asInstanceOf[Any] case x: Array[Double] => x(idx).asInstanceOf[Any] @@ -70,7 +70,7 @@ object ScalaRunTime { /** update generic array element */ def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { - xs match { + (xs: @unchecked) match { case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] case x: Array[Int] => x(idx) = value.asInstanceOf[Int] case x: Array[Double] => x(idx) = value.asInstanceOf[Double] @@ -85,20 +85,11 @@ object ScalaRunTime { } /** Get generic array length */ - def array_length(xs: AnyRef): Int = xs match { - case x: Array[AnyRef] => x.length - case x: Array[Int] => x.length - case x: Array[Double] => x.length - case x: Array[Long] => x.length - case x: Array[Float] => x.length - case x: Array[Char] => x.length - case x: Array[Byte] => x.length - case x: Array[Short] => x.length - case x: Array[Boolean] => x.length - case null => throw new NullPointerException - } + @inline def array_length(xs: AnyRef): Int = java.lang.reflect.Array.getLength(xs) - def array_clone(xs: AnyRef): AnyRef = xs match { + // TODO: bytecode Object.clone() will in fact work here and avoids + // the type switch. See Array_clone comment in BCodeBodyBuilder. + def array_clone(xs: AnyRef): AnyRef = (xs: @unchecked) match { case x: Array[AnyRef] => x.clone() case x: Array[Int] => x.clone() case x: Array[Double] => x.clone() @@ -129,7 +120,7 @@ object ScalaRunTime { dest } } - src match { + (src: @unchecked) match { case x: Array[AnyRef] => x case x: Array[Int] => copy(x) case x: Array[Double] => copy(x) @@ -161,16 +152,22 @@ object ScalaRunTime { // More background at ticket #2318. def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) + // This is called by the synthetic case class `toString` method. + // It originally had a `CaseClass` parameter type which was changed to `Product`. def _toString(x: Product): String = x.productIterator.mkString(x.productPrefix + "(", ",", ")") - def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) + // This method is called by case classes compiled by older Scala 2.13 / Scala 3 versions, so it needs to stay. + // In newer versions, the synthetic case class `hashCode` has either the calculation inlined or calls + // `MurmurHash3.productHash`. + // There used to be an `_equals` method as well which was removed in 5e7e81ab2a. + def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.caseClassHash(x) /** A helper for case classes. */ def typedProductIterator[T](x: Product): Iterator[T] = { new AbstractIterator[T] { - private var c: Int = 0 - private val cmax = x.productArity + private[this] var c: Int = 0 + private[this] val cmax = x.productArity def hasNext = c < cmax def next() = { val result = x.productElement(c) @@ -180,10 +177,6 @@ object ScalaRunTime { } } - /** Old implementation of `##`. */ - @deprecated("Use scala.runtime.Statics.anyHash instead.", "2.12.0") - def hash(x: Any): Int = Statics.anyHash(x.asInstanceOf[Object]) - /** Given any Scala value, convert it to a String. * * The primary motivation for this method is to provide a means for @@ -225,17 +218,17 @@ object ScalaRunTime { // Range/NumericRange have a custom toString to avoid walking a gazillion elements case _: Range | _: NumericRange[_] => true // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 - case _: Sorted[_, _] => true + case _: SortedOps[_, _] => true // StringBuilder(a, b, c) and similar not so attractive - case _: StringLike[_] => true + case _: StringView | _: StringOps | _: StringBuilder => true // Don't want to evaluate any elements in a view - case _: TraversableView[_, _] => true + case _: View[_] => true // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] // -> catch those by isXmlNode and isXmlMetaData. // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom // collections which may have useful toString methods - ticket #3710 // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. - case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) + case x: Iterable[_] => (!x.isInstanceOf[StrictOptimizedIterableOps[_, AnyConstr, _]]) || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass) // Otherwise, nothing could possibly go wrong case _ => false } @@ -249,9 +242,9 @@ object ScalaRunTime { // Special casing Unit arrays, the value class which uses a reference array type. def arrayToString(x: AnyRef) = { if (x.getClass.getComponentType == classOf[BoxedUnit]) - 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") + (0 until min(array_length(x), maxElements)).map(_ => "()").mkString("Array(", ", ", ")") else - WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") + x.asInstanceOf[Array[_]].iterator.take(maxElements).map(inner).mkString("Array(", ", ", ")") } // The recursively applied attempt to prettify Array printing. @@ -264,11 +257,10 @@ object ScalaRunTime { case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x case x if useOwnToString(x) => x.toString case x: AnyRef if isArray(x) => arrayToString(x) - case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: scala.collection.Map[_, _] => x.iterator.take(maxElements).map(mapInner).mkString(x.collectionClassName + "(", ", ", ")") + case x: Iterable[_] => x.iterator.take(maxElements).map(inner).mkString(x.collectionClassName + "(", ", ", ")") case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma - case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") + case x: Product if isTuple(x) => x.productIterator.map(inner).mkString("(", ",", ")") case x => x.toString } @@ -281,10 +273,27 @@ object ScalaRunTime { } /** stringOf formatted for use in a repl result. */ - def replStringOf(arg: Any, maxElements: Int): String = { - val s = stringOf(arg, maxElements) - val nl = if (s contains "\n") "\n" else "" + def replStringOf(arg: Any, maxElements: Int): String = + stringOf(arg, maxElements) match { + case null => "null toString" + case s if s.indexOf('\n') >= 0 => "\n" + s + "\n" + case s => s + "\n" + } - nl + s + "\n" - } + // Convert arrays to immutable.ArraySeq for use with Scala varargs. + // By construction, calls to these methods always receive a fresh (and non-null), non-empty array. + // In cases where an empty array would appear, the compiler uses a direct reference to Nil instead. + // Synthetic Java varargs forwarders (@annotation.varargs or varargs bridges when overriding) may pass + // `null` to these methods; but returning `null` or `ArraySeq(null)` makes little difference in practice. + def genericWrapArray[T](xs: Array[T]): ArraySeq[T] = ArraySeq.unsafeWrapArray(xs) + def wrapRefArray[T <: AnyRef](xs: Array[T]): ArraySeq[T] = new ArraySeq.ofRef[T](xs) + def wrapIntArray(xs: Array[Int]): ArraySeq[Int] = new ArraySeq.ofInt(xs) + def wrapDoubleArray(xs: Array[Double]): ArraySeq[Double] = new ArraySeq.ofDouble(xs) + def wrapLongArray(xs: Array[Long]): ArraySeq[Long] = new ArraySeq.ofLong(xs) + def wrapFloatArray(xs: Array[Float]): ArraySeq[Float] = new ArraySeq.ofFloat(xs) + def wrapCharArray(xs: Array[Char]): ArraySeq[Char] = new ArraySeq.ofChar(xs) + def wrapByteArray(xs: Array[Byte]): ArraySeq[Byte] = new ArraySeq.ofByte(xs) + def wrapShortArray(xs: Array[Short]): ArraySeq[Short] = new ArraySeq.ofShort(xs) + def wrapBooleanArray(xs: Array[Boolean]): ArraySeq[Boolean] = new ArraySeq.ofBoolean(xs) + def wrapUnitArray(xs: Array[Unit]): ArraySeq[Unit] = new ArraySeq.ofUnit(xs) } diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala deleted file mode 100644 index f14aab284ea0..000000000000 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - -@deprecated("use Predef.SeqCharSequence", "2.11.0") -final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence { - def length: Int = xs.length - def charAt(index: Int): Char = xs(index) - def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end)) - override def toString = xs.mkString("") -} - -@deprecated("use `java.nio.CharBuffer.wrap` instead", "2.12.13") -final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence { - // yikes - // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: signature: ([C)V) - // Constructor must call super() or this() - // - // def this(xs: Array[Char]) = this(xs, 0, xs.length) - - def length: Int = math.max(0, end - start) - def charAt(index: Int): Char = { - if (0 <= index && index < length) - xs(start + index) - else throw new ArrayIndexOutOfBoundsException(index) - } - def subSequence(start0: Int, end0: Int): CharSequence = { - if (start0 < 0) throw new ArrayIndexOutOfBoundsException(start0) - else if (end0 > length) throw new ArrayIndexOutOfBoundsException(end0) - else if (end0 <= start0) new ArrayCharSequence(xs, 0, 0) - else { - val newlen = end0 - start0 - val start1 = start + start0 - new ArrayCharSequence(xs, start1, start1 + newlen) - } - } - override def toString = { - val start = math.max(this.start, 0) - val end = math.min(xs.length, start + length) - - if (start >= end) "" else new String(xs, start, end - start) - } -} diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java index 9862d03fdf36..11fd2aece720 100644 --- a/src/library/scala/runtime/ShortRef.java +++ b/src/library/scala/runtime/ShortRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class ShortRef implements java.io.Serializable { +public final class ShortRef implements java.io.Serializable { private static final long serialVersionUID = 4218441291229072313L; public short elem; diff --git a/src/library/scala/runtime/Static.java b/src/library/scala/runtime/Static.java new file mode 100644 index 000000000000..1971fe3b463e --- /dev/null +++ b/src/library/scala/runtime/Static.java @@ -0,0 +1,25 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime; + +import java.lang.invoke.*; + +public final class Static { + private Static() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, MethodType invokedType, MethodHandle handle, Object... args) throws Throwable { + Object value = handle.invokeWithArguments(args); + return new ConstantCallSite(MethodHandles.constant(invokedType.returnType(), value)); + } +} diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java index ae62c21d2a77..34dc1818065a 100644 --- a/src/library/scala/runtime/Statics.java +++ b/src/library/scala/runtime/Statics.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,6 +12,11 @@ package scala.runtime; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.lang.reflect.Field; + /** Not for public consumption. Usage by the runtime only. */ @@ -134,4 +139,61 @@ private static int anyHashNumber(Number x) { return x.hashCode(); } + + /** Used as a marker object to return from PartialFunctions */ + public static final Object pfMarker = new Object(); + + // @ForceInline would be nice here. + public static void releaseFence() throws Throwable { + VM.RELEASE_FENCE.invoke(); + } + + final static class VM { + static final MethodHandle RELEASE_FENCE; + + static { + RELEASE_FENCE = mkHandle(); + } + + private static MethodHandle mkHandle() { + MethodHandles.Lookup lookup = MethodHandles.lookup(); + try { + return lookup.findStatic(Class.forName("java.lang.invoke.VarHandle"), "releaseFence", MethodType.methodType(Void.TYPE)); + } catch (NoSuchMethodException | ClassNotFoundException e) { + try { + Class unsafeClass = Class.forName("sun.misc.Unsafe"); + return lookup.findVirtual(unsafeClass, "storeFence", MethodType.methodType(void.class)).bindTo(findUnsafe(unsafeClass)); + } catch (NoSuchMethodException | ClassNotFoundException | IllegalAccessException e1) { + ExceptionInInitializerError error = new ExceptionInInitializerError(e1); + error.addSuppressed(e); + throw error; + } + } catch (IllegalAccessException e) { + throw new ExceptionInInitializerError(e); + } + } + + private static Object findUnsafe(Class unsafeClass) throws IllegalAccessException { + Object found = null; + for (Field field : unsafeClass.getDeclaredFields()) { + if (field.getType() == unsafeClass) { + field.setAccessible(true); + found = field.get(null); + break; + } + } + if (found == null) throw new IllegalStateException("No instance of Unsafe found"); + return found; + } + } + + /** + * Just throws an exception. + * Used by the synthetic `productElement` and `productElementName` methods in case classes. + * Delegating the exception-throwing to this function reduces the bytecode size of the case class. + */ + public static final T ioobe(int n) throws IndexOutOfBoundsException { + throw new IndexOutOfBoundsException(String.valueOf(n)); + } + } diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala deleted file mode 100644 index 04881769ed63..000000000000 --- a/src/library/scala/runtime/StringAdd.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - - -/** A wrapper class that adds string concatenation `+` to any value */ -@deprecated("use Predef.StringAdd", "2.11.0") -final class StringAdd(val self: Any) extends AnyVal { - def +(other: String) = String.valueOf(self) + other -} diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala deleted file mode 100644 index 481c6c6b2962..000000000000 --- a/src/library/scala/runtime/StringFormat.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package runtime - - -/** A wrapper class that adds a `formatted` operation to any value - */ -@deprecated("use Predef.StringFormat", "2.11.0") -final class StringFormat(val self: Any) extends AnyVal { - /** Returns string formatted according to given `format` string. - * Format strings are as for `String.format` - * (@see java.lang.String.format). - */ - @inline def formatted(fmtstr: String): String = fmtstr format self -} diff --git a/src/library/scala/runtime/StructuralCallSite.java b/src/library/scala/runtime/StructuralCallSite.java deleted file mode 100644 index 36da98a49d72..000000000000 --- a/src/library/scala/runtime/StructuralCallSite.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.runtime; - - -import java.lang.invoke.*; -import java.lang.ref.SoftReference; -import java.lang.reflect.Method; - -public final class StructuralCallSite { - - private Class[] parameterTypes; - private SoftReference cache = new SoftReference<>(new EmptyMethodCache()); - - private StructuralCallSite(MethodType callType) { - parameterTypes = callType.parameterArray(); - } - - public MethodCache get() { - MethodCache cache = this.cache.get(); - if (cache == null) { - cache = new EmptyMethodCache(); - this.cache = new SoftReference<>(cache); - } - return cache; - } - - public Method find(Class receiver) { - return get().find(receiver); - } - - public Method add(Class receiver, Method m) { - cache = new SoftReference(get().add(receiver, m)); - return m; - } - public Class[] parameterTypes() { - return parameterTypes; - } - - public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType, MethodType reflectiveCallType) throws Throwable { - StructuralCallSite structuralCallSite = new StructuralCallSite(reflectiveCallType); - return new ConstantCallSite(MethodHandles.constant(StructuralCallSite.class, structuralCallSite)); - } -} diff --git a/src/library/scala/runtime/StructuralCallSite.scala b/src/library/scala/runtime/StructuralCallSite.scala new file mode 100644 index 000000000000..8e245e6c99db --- /dev/null +++ b/src/library/scala/runtime/StructuralCallSite.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +import java.lang.invoke._ +import java.lang.ref.SoftReference +import java.lang.reflect.Method + +final class StructuralCallSite private (callType: MethodType) { + private var cache: SoftReference[MethodCache] = new SoftReference(new EmptyMethodCache) + + val parameterTypes: Array[Class[_]] = callType.parameterArray + + def get: MethodCache = { + var cache = this.cache.get + if (cache == null) { + cache = new EmptyMethodCache + this.cache = new SoftReference(cache) + } + cache + } + + def find(receiver: Class[_]): Method = get.find(receiver) + + def add(receiver: Class[_], m: Method): Method = { + cache = new SoftReference(get.add(receiver, m)) + m + } +} + +object StructuralCallSite { + def bootstrap(lookup: MethodHandles.Lookup, invokedName: String, invokedType: MethodType, reflectiveCallType: MethodType): CallSite = { + val structuralCallSite = new StructuralCallSite(reflectiveCallType) + new ConstantCallSite(MethodHandles.constant(classOf[StructuralCallSite], structuralCallSite)) + } +} diff --git a/src/library/scala/runtime/SymbolLiteral.java b/src/library/scala/runtime/SymbolLiteral.java index 560fef53333c..67f59b15fbe2 100644 --- a/src/library/scala/runtime/SymbolLiteral.java +++ b/src/library/scala/runtime/SymbolLiteral.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java deleted file mode 100644 index 967b7033fa99..000000000000 --- a/src/library/scala/runtime/TraitSetter.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.runtime; - -/** A marker annotation to tag a setter of a mutable variable in a trait - */ -@Deprecated -public @interface TraitSetter { -} \ No newline at end of file diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index fee5618e5375..e49bf9e1aba6 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,130 +14,131 @@ package scala package runtime -import scala.collection.{ TraversableLike, IterableLike } -import scala.collection.generic.{ CanBuildFrom => CBF } -import scala.language.{ higherKinds, implicitConversions } +import scala.collection.{BuildFrom, IterableOps} +import scala.language.implicitConversions /** This interface is intended as a minimal interface, not complicated * by the requirement to resolve type constructors, for implicit search (which only - * needs to find an implicit conversion to Traversable for our purposes.) - * @define Coll `ZippedTraversable2` + * needs to find an implicit conversion to Iterable for our purposes.) + * @define Coll `ZippedIterable2` * @define coll collection * @define collectExample * @define willNotTerminateInf */ -trait ZippedTraversable2[+El1, +El2] extends Any { - def foreach[U](f: (El1, El2) => U): Unit +@deprecated("Use scala.collection.LazyZip2.", "2.13.0") +trait ZippedIterable2[+El1, +El2] extends Any { + def iterator: Iterator[(El1, El2)] + def isEmpty: Boolean } -object ZippedTraversable2 { - implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = { - new scala.collection.AbstractTraversable[(El1, El2)] { - def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f) +@deprecated("Use scala.collection.LazyZip2.", "2.13.0") +object ZippedIterable2 { + implicit def zippedIterable2ToIterable[El1, El2](zz: ZippedIterable2[El1, El2]): Iterable[(El1, El2)] = { + new scala.collection.AbstractIterable[(El1, El2)] { + def iterator: Iterator[(El1, El2)] = zz.iterator + override def isEmpty: Boolean = zz.isEmpty } } } -final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] { +@deprecated("Use scala.collection.LazyZip2.", "2.13.0") +final class Tuple2Zipped[El1, It1 <: Iterable[El1], El2, It2 <: Iterable[El2]](private val colls: (It1, It2)) extends AnyVal with ZippedIterable2[El1, El2] { private def coll1 = colls._1 private def coll2 = colls._2 - def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) - b.sizeHint(coll1) + def map[B, To](f: (El1, El2) => B)(implicit bf: BuildFrom[It1, B, To]): To = { + val b = bf.newBuilder(coll1) + b.sizeHint(coll1, delta = 0) + val elems1 = coll1.iterator val elems2 = coll2.iterator - for (el1 <- coll1) { - if (elems2.hasNext) - b += f(el1, elems2.next()) - else - return b.result() + while (elems1.hasNext && elems2.hasNext) { + b += f(elems1.next(), elems2.next()) } b.result() } - def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) + def flatMap[B, To](f: (El1, El2) => IterableOnce[B])(implicit bf: BuildFrom[It1, B, To]): To = { + val b = bf.newBuilder(coll1) + val elems1 = coll1.iterator val elems2 = coll2.iterator - for (el1 <- coll1) { - if (elems2.hasNext) - b ++= f(el1, elems2.next()) - else - return b.result() + while (elems1.hasNext && elems2.hasNext) { + b ++= f(elems1.next(), elems2.next()) } b.result() } - def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = { - val b1 = cbf1(coll1.repr) - val b2 = cbf2(coll2.repr) + def filter[To1, To2](f: (El1, El2) => Boolean)(implicit bf1: BuildFrom[It1, El1, To1], bf2: BuildFrom[It2, El2, To2]): (To1, To2) = { + val b1 = bf1.newBuilder(coll1) + val b2 = bf2.newBuilder(coll2) + val elems1 = coll1.iterator val elems2 = coll2.iterator - for (el1 <- coll1) { - if (elems2.hasNext) { - val el2 = elems2.next() - if (f(el1, el2)) { - b1 += el1 - b2 += el2 - } + while (elems1.hasNext && elems2.hasNext) { + val el1 = elems1.next() + val el2 = elems2.next() + if (f(el1, el2)) { + b1 += el1 + b2 += el2 } - else return (b1.result(), b2.result()) } (b1.result(), b2.result()) } - def exists(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = { + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator val elems2 = coll2.iterator - for (el1 <- coll1) { - if (elems2.hasNext) { - if (p(el1, elems2.next())) - return true + while (elems1.hasNext && elems2.hasNext) { + if (p(elems1.next(), elems2.next())) { + return true } - else return false } false } - def forall(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = + def forall(p: (El1, El2) => Boolean): Boolean = !exists((x, y) => !p(x, y)) + def iterator: Iterator[(El1, El2)] = coll1.iterator.zip(coll2.iterator) + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator val elems2 = coll2.iterator - for (el1 <- coll1) { - if (elems2.hasNext) - f(el1, elems2.next()) - else - return + while (elems1.hasNext && elems2.hasNext) { + f(elems1.next(), elems2.next()) } } override def toString = s"($coll1, $coll2).zipped" } +@deprecated("Use scala.collection.LazyZip2.", since = "2.13.0") object Tuple2Zipped { final class Ops[T1, T2](private val x: (T1, T2)) extends AnyVal { - def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That] - (implicit w1: T1 <:< CC1[El1], - w2: T2 <:< CC2[El2], - bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2), That] + @deprecated("Use xs.lazyZip(yz).map((_, _))", since = "2.13.0") + def invert[El1, It1[a] <: Iterable[a], El2, It2[a] <: Iterable[a], That] + (implicit w1: T1 <:< It1[El1], + w2: T2 <:< It2[El2], + bf: BuildFrom[T1, (El1, El2), That] ): That = { - val buf = bf(x._1) - val it1 = x._1.toIterator - val it2 = x._2.toIterator + val buf = bf.newBuilder(x._1) + val it1 = x._1.iterator + val it2 = x._2.iterator while (it1.hasNext && it2.hasNext) buf += ((it1.next(), it2.next())) buf.result() } - def zipped[El1, Repr1, El2, Repr2] - (implicit w1: T1 => TraversableLike[El1, Repr1], - w2: T2 => IterableLike[El2, Repr2] - ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((x._1, x._2)) + @deprecated("Use xs.lazyZip(ys)", since = "2.13.0") + def zipped[El1, It1 <: Iterable[El1], El2, It2 <: Iterable[El2]] + (implicit w1: T1 => IterableOps[El1, Iterable, It1] with It1, + w2: T2 => IterableOps[El2, Iterable, It2] with It2 + ): Tuple2Zipped[El1, It1, El2, It2] = new Tuple2Zipped((w1(x._1), w2(x._2))) } } diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 94f094b10d59..b1e8763f9891 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,144 +14,143 @@ package scala package runtime -import scala.collection.{ TraversableLike, IterableLike } -import scala.collection.generic.{ CanBuildFrom => CBF } -import scala.language.{ higherKinds, implicitConversions } +import scala.collection.{BuildFrom, IterableOps} +import scala.language.implicitConversions -/** See comment on ZippedTraversable2 - * @define Coll `ZippedTraversable3` +/** See comment on ZippedIterable2 + * @define Coll `ZippedIterable3` * @define coll collection * @define collectExample * @define willNotTerminateInf */ -trait ZippedTraversable3[+El1, +El2, +El3] extends Any { - def foreach[U](f: (El1, El2, El3) => U): Unit +@deprecated("Use scala.collection.LazyZip3.", "2.13.0") +trait ZippedIterable3[+El1, +El2, +El3] extends Any { + def iterator: Iterator[(El1, El2, El3)] + def isEmpty: Boolean } -object ZippedTraversable3 { - implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = { - new scala.collection.AbstractTraversable[(El1, El2, El3)] { - def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f) +@deprecated("Use scala.collection.LazyZip3.", "2.13.0") +object ZippedIterable3 { + implicit def zippedIterable3ToIterable[El1, El2, El3](zz: ZippedIterable3[El1, El2, El3]): Iterable[(El1, El2, El3)] = { + new scala.collection.AbstractIterable[(El1, El2, El3)] { + def iterator: Iterator[(El1, El2, El3)] = zz.iterator + override def isEmpty: Boolean = zz.isEmpty } } } -final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3])) - extends AnyVal with ZippedTraversable3[El1, El2, El3] { +@deprecated("Use scala.collection.LazyZip3.", "2.13.0") +final class Tuple3Zipped[El1, It1 <: Iterable[El1], El2, It2 <: Iterable[El2], El3, It3 <: Iterable[El3]](private val colls: (It1, It2, It3)) + extends AnyVal with ZippedIterable3[El1, El2, El3] { private def coll1 = colls._1 private def coll2 = colls._2 private def coll3 = colls._3 - def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) + def map[B, To](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[It1, B, To]): To = { + val b = bf.newBuilder(coll1) + val elems1 = coll1.iterator val elems2 = coll2.iterator val elems3 = coll3.iterator - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - b += f(el1, elems2.next(), elems3.next()) - else - return b.result() + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) { + b += f(elems1.next(), elems2.next(), elems3.next()) } b.result() } - def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(coll1.repr) + def flatMap[B, To](f: (El1, El2, El3) => IterableOnce[B])(implicit bf: BuildFrom[It1, B, To]): To = { + val b = bf.newBuilder(coll1) + val elems1 = coll1.iterator val elems2 = coll2.iterator val elems3 = coll3.iterator - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - b ++= f(el1, elems2.next(), elems3.next()) - else - return b.result() + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) { + b ++= f(elems1.next(), elems2.next(), elems3.next()) } b.result() } def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)( - implicit cbf1: CBF[Repr1, El1, To1], - cbf2: CBF[Repr2, El2, To2], - cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = { - val b1 = cbf1(coll1.repr) - val b2 = cbf2(coll2.repr) - val b3 = cbf3(coll3.repr) + implicit bf1: BuildFrom[It1, El1, To1], + bf2: BuildFrom[It2, El2, To2], + bf3: BuildFrom[It3, El3, To3]): (To1, To2, To3) = { + val b1 = bf1.newBuilder(coll1) + val b2 = bf2.newBuilder(coll2) + val b3 = bf3.newBuilder(coll3) + val elems1 = coll1.iterator val elems2 = coll2.iterator val elems3 = coll3.iterator - def result = (b1.result(), b2.result(), b3.result()) - - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) { - val el2 = elems2.next() - val el3 = elems3.next() - - if (f(el1, el2, el3)) { - b1 += el1 - b2 += el2 - b3 += el3 - } + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val el1 = elems1.next() + val el2 = elems2.next() + val el3 = elems3.next() + + if (f(el1, el2, el3)) { + b1 += el1 + b2 += el2 + b3 += el3 } - else return result } - - result + (b1.result(), b2.result(), b3.result()) } - def exists(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = { + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator val elems2 = coll2.iterator val elems3 = coll3.iterator - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) { - if (p(el1, elems2.next(), elems3.next())) - return true + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) { + if (p(elems1.next(), elems2.next(), elems3.next())) { + return true } - else return false } false } - def forall(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((x, y, z) => !p(x, y, z)) + def iterator: Iterator[(El1, El2, El3)] = coll1.iterator.zip(coll2.iterator).zip(coll3.iterator).map { case ((a, b), c) => (a, b, c)} + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator val elems2 = coll2.iterator val elems3 = coll3.iterator - for (el1 <- coll1) { - if (elems2.hasNext && elems3.hasNext) - f(el1, elems2.next(), elems3.next()) - else - return + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) { + f(elems1.next(), elems2.next(), elems3.next()) } } override def toString = s"($coll1, $coll2, $coll3).zipped" } +@deprecated("Use scala.collection.LazyZip3.", since = "2.13.0") object Tuple3Zipped { final class Ops[T1, T2, T3](private val x: (T1, T2, T3)) extends AnyVal { - def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That] - (implicit w1: T1 <:< CC1[El1], - w2: T2 <:< CC2[El2], - w3: T3 <:< CC3[El3], - bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That] + @deprecated("Use xs.lazyZip(yz).lazyZip(zs).map((_, _, _))", since = "2.13.0") + def invert[El1, It1[a] <: Iterable[a], El2, It2[a] <: Iterable[a], El3, It3[a] <: Iterable[a], That] + (implicit w1: T1 <:< It1[El1], + w2: T2 <:< It2[El2], + w3: T3 <:< It3[El3], + bf: BuildFrom[T1, (El1, El2, El3), That] ): That = { - val buf = bf(x._1) - val it1 = x._1.toIterator - val it2 = x._2.toIterator - val it3 = x._3.toIterator + val buf = bf.newBuilder(x._1) + val it1 = x._1.iterator + val it2 = x._2.iterator + val it3 = x._3.iterator while (it1.hasNext && it2.hasNext && it3.hasNext) buf += ((it1.next(), it2.next(), it3.next())) buf.result() } - def zipped[El1, Repr1, El2, Repr2, El3, Repr3] - (implicit w1: T1 => TraversableLike[El1, Repr1], - w2: T2 => IterableLike[El2, Repr2], - w3: T3 => IterableLike[El3, Repr3] - ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped((x._1, x._2, x._3)) + @deprecated("Use xs.lazyZip(ys).lazyZip(zs)", since = "2.13.0") + def zipped[El1, It1 <: Iterable[El1], El2, It2 <: Iterable[El2], El3, It3 <: Iterable[El3]] + (implicit w1: T1 => IterableOps[El1, Iterable, It1] with It1, + w2: T2 => IterableOps[El2, Iterable, It2] with It2, + w3: T3 => IterableOps[El3, Iterable, It3] with It3 + ): Tuple3Zipped[El1, It1, El2, It2, El3, It3] = new Tuple3Zipped((w1(x._1), w2(x._2), w3(x._3))) } } diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java index 50b49a05b3d6..0436cf5ee882 100644 --- a/src/library/scala/runtime/VolatileBooleanRef.java +++ b/src/library/scala/runtime/VolatileBooleanRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileBooleanRef implements java.io.Serializable { +public final class VolatileBooleanRef implements java.io.Serializable { private static final long serialVersionUID = -5730524563015615974L; volatile public boolean elem; diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java index 016bc890c617..23ea7ce3d32e 100644 --- a/src/library/scala/runtime/VolatileByteRef.java +++ b/src/library/scala/runtime/VolatileByteRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileByteRef implements java.io.Serializable { +public final class VolatileByteRef implements java.io.Serializable { private static final long serialVersionUID = -100666928446877072L; volatile public byte elem; diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java index 6e39a80cdda6..b8d11584556a 100644 --- a/src/library/scala/runtime/VolatileCharRef.java +++ b/src/library/scala/runtime/VolatileCharRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileCharRef implements java.io.Serializable { +public final class VolatileCharRef implements java.io.Serializable { private static final long serialVersionUID = 6537214938268005702L; volatile public char elem; diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java index b61f0ffc198d..809a27040540 100644 --- a/src/library/scala/runtime/VolatileDoubleRef.java +++ b/src/library/scala/runtime/VolatileDoubleRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.runtime; -public class VolatileDoubleRef implements java.io.Serializable { +public final class VolatileDoubleRef implements java.io.Serializable { private static final long serialVersionUID = 8304402127373655534L; volatile public double elem; diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java index e14ed0123cba..954c7522c407 100644 --- a/src/library/scala/runtime/VolatileFloatRef.java +++ b/src/library/scala/runtime/VolatileFloatRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileFloatRef implements java.io.Serializable { +public final class VolatileFloatRef implements java.io.Serializable { private static final long serialVersionUID = -5793980990371366933L; volatile public float elem; diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java index 2553f59a39cb..a3d2c33eab71 100644 --- a/src/library/scala/runtime/VolatileIntRef.java +++ b/src/library/scala/runtime/VolatileIntRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.runtime; -public class VolatileIntRef implements java.io.Serializable { +public final class VolatileIntRef implements java.io.Serializable { private static final long serialVersionUID = 1488197132022872888L; volatile public int elem; diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java index 5e0ebf7f1dd1..9e93e0b49e3b 100644 --- a/src/library/scala/runtime/VolatileLongRef.java +++ b/src/library/scala/runtime/VolatileLongRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileLongRef implements java.io.Serializable { +public final class VolatileLongRef implements java.io.Serializable { private static final long serialVersionUID = -3567869820105829499L; volatile public long elem; diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java index 77b770ec1314..78aef1eaff26 100644 --- a/src/library/scala/runtime/VolatileObjectRef.java +++ b/src/library/scala/runtime/VolatileObjectRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileObjectRef implements java.io.Serializable { +public final class VolatileObjectRef implements java.io.Serializable { private static final long serialVersionUID = -9055728157600312291L; volatile public T elem; diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java index 9d84f9d1314a..87a0c12dd7ed 100644 --- a/src/library/scala/runtime/VolatileShortRef.java +++ b/src/library/scala/runtime/VolatileShortRef.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,8 +12,7 @@ package scala.runtime; - -public class VolatileShortRef implements java.io.Serializable { +public final class VolatileShortRef implements java.io.Serializable { private static final long serialVersionUID = 4218441291229072313L; volatile public short elem; diff --git a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java deleted file mode 100644 index f17aa30006f4..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcB$sp extends scala.Function0, java.io.Serializable { - byte apply$mcB$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToByte(apply$mcB$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcB$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcB$sp.scala new file mode 100644 index 000000000000..cccb1a1a9430 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcB$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcB$sp extends Function0[Any] with Serializable { + def apply$mcB$sp(): Byte + override def apply(): Any = scala.runtime.BoxesRunTime.boxToByte(apply$mcB$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java deleted file mode 100644 index 961c6123a715..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcC$sp extends scala.Function0, java.io.Serializable { - char apply$mcC$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToCharacter(apply$mcC$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcC$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcC$sp.scala new file mode 100644 index 000000000000..c4e0ed82bd2f --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcC$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcC$sp extends Function0[Any] with Serializable { + def apply$mcC$sp(): Char + override def apply(): Any = scala.runtime.BoxesRunTime.boxToCharacter(apply$mcC$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java deleted file mode 100644 index ea523556ec3a..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcD$sp extends scala.Function0, java.io.Serializable { - double apply$mcD$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcD$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcD$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcD$sp.scala new file mode 100644 index 000000000000..21b9e0152222 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcD$sp extends Function0[Any] with Serializable { + def apply$mcD$sp(): Double + override def apply(): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcD$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java deleted file mode 100644 index 232dd7c2b7d7..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcF$sp extends scala.Function0, java.io.Serializable { - float apply$mcF$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcF$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcF$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcF$sp.scala new file mode 100644 index 000000000000..b90637f54c60 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcF$sp extends Function0[Any] with Serializable { + def apply$mcF$sp(): Float + override def apply(): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcF$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java deleted file mode 100644 index ce7efc254580..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcI$sp extends scala.Function0, java.io.Serializable { - int apply$mcI$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcI$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcI$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcI$sp.scala new file mode 100644 index 000000000000..dd4e0738f985 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcI$sp extends Function0[Any] with Serializable { + def apply$mcI$sp(): Int + override def apply(): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcI$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java deleted file mode 100644 index d3407ea5b648..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcJ$sp extends scala.Function0, java.io.Serializable { - long apply$mcJ$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJ$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.scala new file mode 100644 index 000000000000..25d340fa9aae --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcJ$sp extends Function0[Any] with Serializable { + def apply$mcJ$sp(): Long + override def apply(): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJ$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java deleted file mode 100644 index 2e98a0666127..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcS$sp extends scala.Function0, java.io.Serializable { - short apply$mcS$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToShort(apply$mcS$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcS$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcS$sp.scala new file mode 100644 index 000000000000..44d2a6ae934f --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcS$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcS$sp extends Function0[Any] with Serializable { + def apply$mcS$sp(): Short + override def apply(): Any = scala.runtime.BoxesRunTime.boxToShort(apply$mcS$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java deleted file mode 100644 index b5c427ac9e5b..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcV$sp extends scala.Function0, java.io.Serializable { - void apply$mcV$sp(); - - default Object apply() { apply$mcV$sp(); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcV$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcV$sp.scala new file mode 100644 index 000000000000..867d5cfe1052 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcV$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcV$sp extends Function0[Any] with Serializable { + def apply$mcV$sp(): Unit + override def apply(): Any = { + apply$mcV$sp() + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java deleted file mode 100644 index f5c8f9162356..000000000000 --- a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0$mcZ$sp extends scala.Function0, java.io.Serializable { - boolean apply$mcZ$sp(); - - default Object apply() { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZ$sp()); } -} diff --git a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.scala b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.scala new file mode 100644 index 000000000000..9f9389aa4445 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction0$mcZ$sp extends Function0[Any] with Serializable { + def apply$mcZ$sp(): Boolean + override def apply(): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZ$sp()) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java deleted file mode 100644 index dbdc62c1797e..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDD$sp extends scala.Function1, java.io.Serializable { - double apply$mcDD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.scala new file mode 100644 index 000000000000..98491234c57c --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcDD$sp extends Function1[Any, Any] with Serializable { + def apply$mcDD$sp(v1: Double): Double + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java deleted file mode 100644 index e442cba96f5c..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDF$sp extends scala.Function1, java.io.Serializable { - double apply$mcDF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.scala new file mode 100644 index 000000000000..702e7201ac7a --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcDF$sp extends Function1[Any, Any] with Serializable { + def apply$mcDF$sp(v1: Float): Double + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java deleted file mode 100644 index a541f949695b..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDI$sp extends scala.Function1, java.io.Serializable { - double apply$mcDI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.scala new file mode 100644 index 000000000000..3d1badf17393 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcDI$sp extends Function1[Any, Any] with Serializable { + def apply$mcDI$sp(v1: Int): Double + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java deleted file mode 100644 index 938eeb8dd608..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcDJ$sp extends scala.Function1, java.io.Serializable { - double apply$mcDJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.scala new file mode 100644 index 000000000000..ce0c27f8f279 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcDJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcDJ$sp(v1: Long): Double + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java deleted file mode 100644 index 3efccac1f425..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFD$sp extends scala.Function1, java.io.Serializable { - float apply$mcFD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.scala new file mode 100644 index 000000000000..eb436da193bf --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcFD$sp extends Function1[Any, Any] with Serializable { + def apply$mcFD$sp(v1: Double): Float + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java deleted file mode 100644 index cb01b24028b6..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFF$sp extends scala.Function1, java.io.Serializable { - float apply$mcFF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.scala new file mode 100644 index 000000000000..c7a253449554 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcFF$sp extends Function1[Any, Any] with Serializable { + def apply$mcFF$sp(v1: Float): Float + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java deleted file mode 100644 index 325ee40d7f5a..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFI$sp extends scala.Function1, java.io.Serializable { - float apply$mcFI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.scala new file mode 100644 index 000000000000..c77a5272ab66 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcFI$sp extends Function1[Any, Any] with Serializable { + def apply$mcFI$sp(v1: Int): Float + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java deleted file mode 100644 index b65ed4897dbc..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcFJ$sp extends scala.Function1, java.io.Serializable { - float apply$mcFJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.scala new file mode 100644 index 000000000000..c0ceaae6856a --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcFJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcFJ$sp(v1: Long): Float + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java deleted file mode 100644 index ad1f94e5c4f1..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcID$sp extends scala.Function1, java.io.Serializable { - int apply$mcID$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcID$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcID$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcID$sp.scala new file mode 100644 index 000000000000..742b46d23602 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcID$sp extends Function1[Any, Any] with Serializable { + def apply$mcID$sp(v1: Double): Int + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcID$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java deleted file mode 100644 index 09fd883167fd..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcIF$sp extends scala.Function1, java.io.Serializable { - int apply$mcIF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.scala new file mode 100644 index 000000000000..d5dc57ea6d49 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcIF$sp extends Function1[Any, Any] with Serializable { + def apply$mcIF$sp(v1: Float): Int + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java deleted file mode 100644 index 226ab78904ee..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcII$sp extends scala.Function1, java.io.Serializable { - int apply$mcII$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcII$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcII$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcII$sp.scala new file mode 100644 index 000000000000..1c84c49dccd9 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcII$sp extends Function1[Any, Any] with Serializable { + def apply$mcII$sp(v1: Int): Int + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcII$sp(scala.runtime.BoxesRunTime.unboxToInt(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java deleted file mode 100644 index 3f32f6d67740..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcIJ$sp extends scala.Function1, java.io.Serializable { - int apply$mcIJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.scala new file mode 100644 index 000000000000..298afef64518 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcIJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcIJ$sp(v1: Long): Int + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java deleted file mode 100644 index da09801f57a9..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJD$sp extends scala.Function1, java.io.Serializable { - long apply$mcJD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.scala new file mode 100644 index 000000000000..a315b0ca7e5c --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcJD$sp extends Function1[Any, Any] with Serializable { + def apply$mcJD$sp(v1: Double): Long + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java deleted file mode 100644 index 4f0a57dd1ded..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJF$sp extends scala.Function1, java.io.Serializable { - long apply$mcJF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.scala new file mode 100644 index 000000000000..b810c01df2be --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcJF$sp extends Function1[Any, Any] with Serializable { + def apply$mcJF$sp(v1: Float): Long + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java deleted file mode 100644 index ec2ad51ab265..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJI$sp extends scala.Function1, java.io.Serializable { - long apply$mcJI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.scala new file mode 100644 index 000000000000..11a0e2c9513b --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcJI$sp extends Function1[Any, Any] with Serializable { + def apply$mcJI$sp(v1: Int): Long + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java deleted file mode 100644 index 8c4a8b198908..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcJJ$sp extends scala.Function1, java.io.Serializable { - long apply$mcJJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.scala new file mode 100644 index 000000000000..e6388f802798 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcJJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcJJ$sp(v1: Long): Long + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java deleted file mode 100644 index d423bad709a0..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVD$sp extends scala.Function1, java.io.Serializable { - void apply$mcVD$sp(double v1); - - default Object apply(Object t) { apply$mcVD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.scala new file mode 100644 index 000000000000..51b919af7bd6 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcVD$sp extends Function1[Any, Any] with Serializable { + def apply$mcVD$sp(v1: Double): Unit + override def apply(t: Any): Any = { + apply$mcVD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java deleted file mode 100644 index 878f0e84a03b..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVF$sp extends scala.Function1, java.io.Serializable { - void apply$mcVF$sp(float v1); - - default Object apply(Object t) { apply$mcVF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.scala new file mode 100644 index 000000000000..7c032068bd06 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcVF$sp extends Function1[Any, Any] with Serializable { + def apply$mcVF$sp(v1: Float): Unit + override def apply(t: Any): Any = { + apply$mcVF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java deleted file mode 100644 index e004a9820ab9..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVI$sp extends scala.Function1, java.io.Serializable { - void apply$mcVI$sp(int v1); - - default Object apply(Object t) { apply$mcVI$sp(scala.runtime.BoxesRunTime.unboxToInt(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.scala new file mode 100644 index 000000000000..06c78e4d4074 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcVI$sp extends Function1[Any, Any] with Serializable { + def apply$mcVI$sp(v1: Int): Unit + override def apply(t: Any): Any = { + apply$mcVI$sp(scala.runtime.BoxesRunTime.unboxToInt(t)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java deleted file mode 100644 index fa5eaab36029..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcVJ$sp extends scala.Function1, java.io.Serializable { - void apply$mcVJ$sp(long v1); - - default Object apply(Object t) { apply$mcVJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.scala new file mode 100644 index 000000000000..ade57aaad9ea --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcVJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcVJ$sp(v1: Long): Unit + override def apply(t: Any): Any = { + apply$mcVJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java deleted file mode 100644 index aa4af07a7701..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZD$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZD$sp(double v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.scala new file mode 100644 index 000000000000..070c0fed73ee --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcZD$sp extends Function1[Any, Any] with Serializable { + def apply$mcZD$sp(v1: Double): Boolean + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java deleted file mode 100644 index 8d319747633a..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZF$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZF$sp(float v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.scala new file mode 100644 index 000000000000..af1cab6c44d4 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcZF$sp extends Function1[Any, Any] with Serializable { + def apply$mcZF$sp(v1: Float): Boolean + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java deleted file mode 100644 index 41b469ac6073..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZI$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZI$sp(int v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.scala new file mode 100644 index 000000000000..d9d5f5417b7b --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcZI$sp extends Function1[Any, Any] with Serializable { + def apply$mcZI$sp(v1: Int): Boolean + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java deleted file mode 100644 index ce8794cf80be..000000000000 --- a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1$mcZJ$sp extends scala.Function1, java.io.Serializable { - boolean apply$mcZJ$sp(long v1); - - default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.scala b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.scala new file mode 100644 index 000000000000..b1ac4f5fb0c1 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction1$mcZJ$sp extends Function1[Any, Any] with Serializable { + def apply$mcZJ$sp(v1: Long): Boolean + override def apply(t: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java deleted file mode 100644 index 4a3ae9f43101..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDD$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.scala new file mode 100644 index 000000000000..a7dba65aa372 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDDD$sp(v1: Double, v2: Double): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java deleted file mode 100644 index 1a55fce1ec79..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDI$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.scala new file mode 100644 index 000000000000..f4dd19493eaa --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDDI$sp(v1: Double, v2: Int): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java deleted file mode 100644 index 44645e1d9589..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDDJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.scala new file mode 100644 index 000000000000..5ded95bd8910 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDDJ$sp(v1: Double, v2: Long): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java deleted file mode 100644 index 6ada9c9903b2..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDID$sp extends scala.Function2, java.io.Serializable { - double apply$mcDID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.scala new file mode 100644 index 000000000000..2aabd59d8c8e --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDID$sp(v1: Int, v2: Double): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java deleted file mode 100644 index 9030379ae3ec..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDII$sp extends scala.Function2, java.io.Serializable { - double apply$mcDII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.scala new file mode 100644 index 000000000000..ad4467dbe07e --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDII$sp(v1: Int, v2: Int): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java deleted file mode 100644 index d1e386d7d2c3..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDIJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.scala new file mode 100644 index 000000000000..4bc84d0b9a51 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDIJ$sp(v1: Int, v2: Long): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java deleted file mode 100644 index 007fc1a5d25e..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJD$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.scala new file mode 100644 index 000000000000..bc8f02173ad1 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDJD$sp(v1: Long, v2: Double): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java deleted file mode 100644 index e599ea8ae6ff..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJI$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.scala new file mode 100644 index 000000000000..f139ad404716 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDJI$sp(v1: Long, v2: Int): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java deleted file mode 100644 index 59a5369d793d..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcDJJ$sp extends scala.Function2, java.io.Serializable { - double apply$mcDJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.scala new file mode 100644 index 000000000000..8d3b45df6e42 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcDJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcDJJ$sp(v1: Long, v2: Long): Double + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java deleted file mode 100644 index 8037e1daf1ff..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDD$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.scala new file mode 100644 index 000000000000..4381735c8973 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFDD$sp(v1: Double, v2: Double): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java deleted file mode 100644 index d44f63ab127f..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDI$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.scala new file mode 100644 index 000000000000..e3e5b9a9026d --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFDI$sp(v1: Double, v2: Int): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java deleted file mode 100644 index f9db67bc4ca7..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFDJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.scala new file mode 100644 index 000000000000..1a26782f59dc --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFDJ$sp(v1: Double, v2: Long): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java deleted file mode 100644 index b46abeb21b63..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFID$sp extends scala.Function2, java.io.Serializable { - float apply$mcFID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.scala new file mode 100644 index 000000000000..08f8ac2872d6 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFID$sp(v1: Int, v2: Double): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java deleted file mode 100644 index c2a7e363fd21..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFII$sp extends scala.Function2, java.io.Serializable { - float apply$mcFII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.scala new file mode 100644 index 000000000000..8a482dfa722f --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFII$sp(v1: Int, v2: Int): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java deleted file mode 100644 index 70333183785d..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFIJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.scala new file mode 100644 index 000000000000..a3f02eb64c01 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFIJ$sp(v1: Int, v2: Long): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java deleted file mode 100644 index 97f08283b0c4..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJD$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.scala new file mode 100644 index 000000000000..a78fed85f23d --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFJD$sp(v1: Long, v2: Double): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java deleted file mode 100644 index c2cf343bb1cc..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJI$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.scala new file mode 100644 index 000000000000..52dd6c11fe40 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFJI$sp(v1: Long, v2: Int): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java deleted file mode 100644 index 5c66d8fcd3f0..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcFJJ$sp extends scala.Function2, java.io.Serializable { - float apply$mcFJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.scala new file mode 100644 index 000000000000..15e91de115c1 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcFJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcFJJ$sp(v1: Long, v2: Long): Float + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java deleted file mode 100644 index b54e0d5dfebe..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDD$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.scala new file mode 100644 index 000000000000..09f7188d5447 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIDD$sp(v1: Double, v2: Double): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java deleted file mode 100644 index f618c54d6b80..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDI$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.scala new file mode 100644 index 000000000000..d53a99a61972 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIDI$sp(v1: Double, v2: Int): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java deleted file mode 100644 index 8022caac201b..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIDJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.scala new file mode 100644 index 000000000000..eedd1db3df17 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIDJ$sp(v1: Double, v2: Long): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java deleted file mode 100644 index 345c09d78030..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIID$sp extends scala.Function2, java.io.Serializable { - int apply$mcIID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.scala new file mode 100644 index 000000000000..067044482034 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIID$sp(v1: Int, v2: Double): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java deleted file mode 100644 index a6a3fd7760cf..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIII$sp extends scala.Function2, java.io.Serializable { - int apply$mcIII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.scala new file mode 100644 index 000000000000..c868fad4e8f5 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIII$sp(v1: Int, v2: Int): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java deleted file mode 100644 index eb71410a18a5..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIIJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.scala new file mode 100644 index 000000000000..c23e514092aa --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIIJ$sp(v1: Int, v2: Long): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java deleted file mode 100644 index 1e2c3e5ad7d0..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJD$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.scala new file mode 100644 index 000000000000..48f58e3d3859 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIJD$sp(v1: Long, v2: Double): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java deleted file mode 100644 index fe59c998c6cd..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJI$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.scala new file mode 100644 index 000000000000..c9ce9c1186d3 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIJI$sp(v1: Long, v2: Int): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java deleted file mode 100644 index 10099ed319b6..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcIJJ$sp extends scala.Function2, java.io.Serializable { - int apply$mcIJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.scala new file mode 100644 index 000000000000..7749b84ebefe --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcIJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcIJJ$sp(v1: Long, v2: Long): Int + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java deleted file mode 100644 index 14921383cc07..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDD$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.scala new file mode 100644 index 000000000000..4e2885c0c8b1 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJDD$sp(v1: Double, v2: Double): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java deleted file mode 100644 index dc998df44210..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDI$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.scala new file mode 100644 index 000000000000..29e102ae3e01 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJDI$sp(v1: Double, v2: Int): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java deleted file mode 100644 index 493ada4e5b1c..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJDJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.scala new file mode 100644 index 000000000000..537d83a4e9d5 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJDJ$sp(v1: Double, v2: Long): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java deleted file mode 100644 index 4f99b940eb2b..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJID$sp extends scala.Function2, java.io.Serializable { - long apply$mcJID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.scala new file mode 100644 index 000000000000..c943f1bbcd1e --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJID$sp(v1: Int, v2: Double): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java deleted file mode 100644 index dd3d5c2e98e5..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJII$sp extends scala.Function2, java.io.Serializable { - long apply$mcJII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.scala new file mode 100644 index 000000000000..387d6424c8d1 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJII$sp(v1: Int, v2: Int): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java deleted file mode 100644 index d9f4801f2ba8..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJIJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.scala new file mode 100644 index 000000000000..ef33074c5b5e --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJIJ$sp(v1: Int, v2: Long): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java deleted file mode 100644 index cc2e12a96306..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJD$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.scala new file mode 100644 index 000000000000..314930812281 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJJD$sp(v1: Long, v2: Double): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java deleted file mode 100644 index ee5c626fedca..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJI$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.scala new file mode 100644 index 000000000000..87bc0de39256 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJJI$sp(v1: Long, v2: Int): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java deleted file mode 100644 index d145a115d44d..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcJJJ$sp extends scala.Function2, java.io.Serializable { - long apply$mcJJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.scala new file mode 100644 index 000000000000..391582994fc6 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcJJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcJJJ$sp(v1: Long, v2: Long): Long + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToLong(apply$mcJJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java deleted file mode 100644 index 292e0a18211b..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDD$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.scala new file mode 100644 index 000000000000..056fee1df387 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVDD$sp(v1: Double, v2: Double): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java deleted file mode 100644 index 77c331523d26..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDI$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.scala new file mode 100644 index 000000000000..0683881a2923 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVDI$sp(v1: Double, v2: Int): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java deleted file mode 100644 index 0c32c921e8a4..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVDJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.scala new file mode 100644 index 000000000000..9a50555c1921 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVDJ$sp(v1: Double, v2: Long): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java deleted file mode 100644 index f0a3a7a19d35..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVID$sp extends scala.Function2, java.io.Serializable { - void apply$mcVID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.scala new file mode 100644 index 000000000000..b0df076be14f --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVID$sp(v1: Int, v2: Double): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java deleted file mode 100644 index f9c715e9a5b4..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVII$sp extends scala.Function2, java.io.Serializable { - void apply$mcVII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.scala new file mode 100644 index 000000000000..822fa89df106 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVII$sp(v1: Int, v2: Int): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java deleted file mode 100644 index e3ef11544333..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVIJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.scala new file mode 100644 index 000000000000..b922343162e6 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVIJ$sp(v1: Int, v2: Long): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java deleted file mode 100644 index f344e8a47e95..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJD$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { apply$mcVJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.scala new file mode 100644 index 000000000000..bb514d145017 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVJD$sp(v1: Long, v2: Double): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java deleted file mode 100644 index 8a5329a37249..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJI$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { apply$mcVJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.scala new file mode 100644 index 000000000000..079e48276a7d --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVJI$sp(v1: Long, v2: Int): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java deleted file mode 100644 index bd1e1be8dfbe..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcVJJ$sp extends scala.Function2, java.io.Serializable { - void apply$mcVJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { apply$mcVJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.scala new file mode 100644 index 000000000000..4b80f04c9dba --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcVJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcVJJ$sp(v1: Long, v2: Long): Unit + override def apply(v1: Any, v2: Any): Any = { + apply$mcVJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)) + scala.runtime.BoxedUnit.UNIT + } +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java deleted file mode 100644 index 2d4f462c9b08..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDD$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDD$sp(double v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.scala new file mode 100644 index 000000000000..0918660b802d --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZDD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZDD$sp(v1: Double, v2: Double): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java deleted file mode 100644 index 7f06d9cfe768..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDI$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDI$sp(double v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.scala new file mode 100644 index 000000000000..4514d78b8f3a --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZDI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZDI$sp(v1: Double, v2: Int): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java deleted file mode 100644 index 9e0bc7f7fffa..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZDJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZDJ$sp(double v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.scala new file mode 100644 index 000000000000..daebfd82f041 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZDJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZDJ$sp(v1: Double, v2: Long): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java deleted file mode 100644 index a2433ad05c5f..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZID$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZID$sp(int v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.scala new file mode 100644 index 000000000000..c0a06d1c9373 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZID$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZID$sp(v1: Int, v2: Double): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java deleted file mode 100644 index 089c3f6292b5..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZII$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZII$sp(int v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.scala new file mode 100644 index 000000000000..c49fea6a2543 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZII$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZII$sp(v1: Int, v2: Int): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java deleted file mode 100644 index fab7a530c3ca..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZIJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZIJ$sp(int v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.scala new file mode 100644 index 000000000000..bbf003ebd17b --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZIJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZIJ$sp(v1: Int, v2: Long): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java deleted file mode 100644 index dca526b7fb7a..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJD$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJD$sp(long v1, double v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.scala new file mode 100644 index 000000000000..ab08030714e2 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZJD$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZJD$sp(v1: Long, v2: Double): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java deleted file mode 100644 index 28e0243c88de..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJI$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJI$sp(long v1, int v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.scala new file mode 100644 index 000000000000..5f32af16f201 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZJI$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZJI$sp(v1: Long, v2: Int): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))) +} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java deleted file mode 100644 index 864c7139dfe3..000000000000 --- a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2$mcZJJ$sp extends scala.Function2, java.io.Serializable { - boolean apply$mcZJJ$sp(long v1, long v2); - - default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.scala b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.scala new file mode 100644 index 000000000000..b588644fb0c4 --- /dev/null +++ b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.scala @@ -0,0 +1,18 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime.java8 + +@FunctionalInterface trait JFunction2$mcZJJ$sp extends Function2[Any, Any, Any] with Serializable { + def apply$mcZJJ$sp(v1: Long, v2: Long): Boolean + override def apply(v1: Any, v2: Any): Any = scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))) +} diff --git a/src/library/scala/runtime/package.scala b/src/library/scala/runtime/package.scala deleted file mode 100644 index b1b5f473f864..000000000000 --- a/src/library/scala/runtime/package.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala - -package object runtime { } diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala index af9dcd0cc91e..c099612e1370 100644 --- a/src/library/scala/specialized.scala +++ b/src/library/scala/specialized.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,12 +25,10 @@ import Specializable._ * {{{ * class MyList[@specialized(Int, Double, Boolean) T] .. * }}} - * - * @since 2.8 */ // class tspecialized[T](group: Group[T]) extends scala.annotation.StaticAnnotation { -class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation { +final class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation { def this(types: Specializable*) = this(new Group(types.toList)) def this() = this(Primitives) } diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala index 94e4c7c56fd7..70066c070c37 100644 --- a/src/library/scala/sys/BooleanProp.scala +++ b/src/library/scala/sys/BooleanProp.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -52,10 +52,14 @@ object BooleanProp { def set(newValue: String) = "" + value def setValue[T1 >: Boolean](newValue: T1): Boolean = value def get: String = "" + value - val clear, enable, disable, toggle = () def option = if (isSet) Some(value) else None //def or[T1 >: Boolean](alt: => T1): T1 = if (value) true else alt + def clear() = () + def enable() = () + def disable() = () + def toggle() = () + protected def zero = false } diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala index 35b260951cf6..7645e4ac8993 100644 --- a/src/library/scala/sys/Prop.scala +++ b/src/library/scala/sys/Prop.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,9 +18,6 @@ package sys * is not a requirement. * * See `scala.sys.SystemProperties` for an example usage. - * - * @author Paul Phillips - * @since 2.9 */ trait Prop[+T] { /** The full name of the property, e.g., "java.awt.headless". diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala index 3a73d3df6aa1..390c5c9c576d 100644 --- a/src/library/scala/sys/PropImpl.scala +++ b/src/library/scala/sys/PropImpl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala index aa2d2a50d05e..d7a8ae5bb3ca 100644 --- a/src/library/scala/sys/ShutdownHookThread.scala +++ b/src/library/scala/sys/ShutdownHookThread.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,16 +15,13 @@ package sys /** A minimal Thread wrapper to enhance shutdown hooks. It knows * how to unregister itself. - * - * @author Paul Phillips - * @since 2.9 */ -class ShutdownHookThread private (name: String) extends Thread(name) { - def remove() = runtime removeShutdownHook this +class ShutdownHookThread private (runnable: Runnable, name: String) extends Thread(runnable, name) { + def remove() = Runtime.getRuntime removeShutdownHook this } object ShutdownHookThread { - private var hookNameCount: Int = 0 + private[this] var hookNameCount: Int = 0 private def hookName(): String = synchronized { hookNameCount += 1 "shutdownHook" + hookNameCount @@ -33,10 +30,8 @@ object ShutdownHookThread { * given code. */ def apply(body: => Unit): ShutdownHookThread = { - val t = new ShutdownHookThread(hookName()) { - override def run() = body - } - runtime addShutdownHook t + val t = new ShutdownHookThread(() => body, hookName()) + Runtime.getRuntime addShutdownHook t t } } diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 799921b99151..1f848a73358e 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,12 +13,11 @@ package scala package sys -import scala.collection.{ mutable, Iterator } -import scala.collection.JavaConverters._ +import scala.collection.{mutable, Iterator} +import scala.jdk.CollectionConverters._ import java.security.AccessControlException import scala.language.implicitConversions - /** A bidirectional map wrapping the java System properties. * Changes to System properties will be immediately visible in the map, * and modifications made to the map will be immediately applied to the @@ -27,15 +26,11 @@ import scala.language.implicitConversions * will be caught and discarded. * @define Coll `collection.mutable.Map` * @define coll mutable map - * - * @author Paul Phillips - * @since 2.9 */ class SystemProperties -extends mutable.AbstractMap[String, String] - with mutable.Map[String, String] { +extends mutable.AbstractMap[String, String] { - override def empty = mutable.Map[String, String]() + override def empty: mutable.Map[String, String] = mutable.Map[String, String]() override def default(key: String): String = null def iterator: Iterator[(String, String)] = wrapAccess { @@ -43,18 +38,21 @@ extends mutable.AbstractMap[String, String] names map (k => (k, ps getProperty k)) filter (_._2 ne null) } getOrElse Iterator.empty + override def isEmpty: Boolean = iterator.isEmpty def names: Iterator[String] = wrapAccess ( System.getProperties().stringPropertyNames().asScala.iterator ) getOrElse Iterator.empty - def get(key: String) = + def get(key: String): Option[String] = wrapAccess(Option(System.getProperty(key))) flatMap (x => x) - override def contains(key: String) = + override def contains(key: String): Boolean = wrapAccess(super.contains(key)) exists (x => x) - def -= (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } - def += (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } + override def clear(): Unit = wrapAccess(System.getProperties().clear()) + def subtractOne (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } + def addOne (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrapAccess[T](body: => T): Option[T] = try Some(body) catch { case _: AccessControlException => None } } @@ -69,7 +67,7 @@ object SystemProperties { /** An unenforceable, advisory only place to do some synchronization when * mutating system properties. */ - def exclusively[T](body: => T) = this synchronized body + def exclusively[T](body: => T): T = this synchronized body implicit def systemPropertiesToCompanion(p: SystemProperties): SystemProperties.type = this @@ -90,7 +88,5 @@ object SystemProperties { lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey) lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey) lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey) - @deprecated("use noTraceSuppression", "2.12.0") - def noTraceSupression = noTraceSuppression } diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala index 61453ae74f52..122f19d12c3a 100644 --- a/src/library/scala/sys/package.scala +++ b/src/library/scala/sys/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,15 +12,12 @@ package scala -import scala.collection.immutable -import scala.collection.JavaConverters._ +import scala.collection.immutable.ArraySeq +import scala.jdk.CollectionConverters._ /** The package object `scala.sys` contains methods for reading * and altering core aspects of the virtual machine as well as the * world outside of it. - * - * @author Paul Phillips - * @since 2.9 */ package object sys { /** Throw a new RuntimeException with the supplied message. @@ -57,11 +54,21 @@ package object sys { */ def props: SystemProperties = new SystemProperties + // TODO: consider whether layering a Map on top of Java's properties is really needed -- we could simply provide: + // def prop(p: String) = Option(System.getProperty(p)) + /** An immutable Map representing the current system environment. + * + * If lookup fails, use `System.getenv(_)` for case-insensitive lookup + * on a certain platform. If that also fails, throw `NoSuchElementException`. * * @return a Map containing the system environment variables. */ - def env: immutable.Map[String, String] = immutable.Map(System.getenv().asScala.toSeq: _*) + def env: Map[String, String] = Map.from(System.getenv().asScala).withDefault { v => + val s = System.getenv(v) + if (s == null) throw new NoSuchElementException(v) + s + } /** Register a shutdown hook to be run when the VM exits. * The hook is automatically registered: the returned value can be ignored, @@ -85,6 +92,6 @@ package object sys { val tarray = new Array[Thread](num) val got = Thread.enumerate(tarray) - tarray take got + ArraySeq.unsafeWrapArray(tarray).take(got) } } diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 31acd4aa73dc..a242fe312bbf 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,6 @@ package process import processInternal._ import java.io.{ BufferedReader, InputStreamReader, FilterInputStream, FilterOutputStream } import java.util.concurrent.LinkedBlockingQueue -import scala.collection.immutable.Stream import scala.annotation.tailrec /** @@ -39,16 +38,38 @@ object BasicIO { /** Used to separate lines in the `processFully` function that takes `Appendable`. */ final val Newline = System.lineSeparator + private[process] final class LazilyListed[T]( + val process: T => Unit, + val done: Int => Unit, + val lazyList: LazyList[T] + ) + + private[process] object LazilyListed { + def apply[T](nonzeroException: Boolean, capacity: Integer): LazilyListed[T] = { + val queue = new LinkedBlockingQueue[Either[Int, T]](capacity) + val ll = LazyList.unfold(queue) { q => + q.take() match { + case Left(0) => None + case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else None + case Right(s) => Some((s, q)) + } + } + new LazilyListed((s: T) => queue put Right(s), code => queue put Left(code), ll) + } + } + + @deprecated("internal", since = "2.13.4") private[process] final class Streamed[T]( val process: T => Unit, val done: Int => Unit, val stream: () => Stream[T] ) + @deprecated("internal", since = "2.13.4") private[process] object Streamed { - def apply[T](nonzeroException: Boolean): Streamed[T] = { - val q = new LinkedBlockingQueue[Either[Int, T]] - def next(): Stream[T] = q.take match { + def apply[T](nonzeroException: Boolean, capacity: Integer): Streamed[T] = { + val q = new LinkedBlockingQueue[Either[Int, T]](capacity) + def next(): Stream[T] = q.take() match { case Left(0) => Stream.empty case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty case Right(s) => Stream.cons(s, next()) @@ -58,7 +79,7 @@ object BasicIO { } private[process] trait Uncloseable extends Closeable { - final override def close() { } + final override def close(): Unit = () } private[process] object Uncloseable { def apply(in: InputStream): InputStream = new FilterInputStream(in) with Uncloseable { } @@ -87,7 +108,7 @@ object BasicIO { def apply(withIn: Boolean, output: String => Unit, log: Option[ProcessLogger]) = new ProcessIO(input(withIn), processFully(output), getErr(log)) - /** Creates a `ProcessIO` that appends its output to a `StringBuffer`. It can + /** Creates a `ProcessIO` that appends its output to an `Appendable`. It can * attach the process input to stdin, and it will either send the error * stream to stderr, or to a `ProcessLogger`. * @@ -101,13 +122,13 @@ object BasicIO { * }}} * * @param withIn True if the process input should be attached to stdin. - * @param buffer A `StringBuffer` which will receive the process normal + * @param buffer An `Appendable` which will receive the process normal * output. * @param log An optional `ProcessLogger` to which the output should be * sent. If `None`, output will be sent to stderr. * @return A `ProcessIO` with the characteristics above. */ - def apply(withIn: Boolean, buffer: StringBuffer, log: Option[ProcessLogger]) = + def apply(withIn: Boolean, buffer: Appendable, log: Option[ProcessLogger]) = new ProcessIO(input(withIn), processFully(buffer), getErr(log)) /** Creates a `ProcessIO` from a `ProcessLogger` . It can attach the @@ -166,23 +187,24 @@ object BasicIO { */ def processFully(processLine: String => Unit): InputStream => Unit = in => { val reader = new BufferedReader(new InputStreamReader(in)) - try processLinesFully(processLine)(reader.readLine) + try processLinesFully(processLine)(() => reader.readLine()) finally reader.close() } /** Calls `processLine` with the result of `readLine` until the latter returns * `null` or the current thread is interrupted. */ - def processLinesFully(processLine: String => Unit)(readLine: () => String) { - def working = (Thread.currentThread.isInterrupted == false) + def processLinesFully(processLine: String => Unit)(readLine: () => String): Unit = { + def working = !Thread.currentThread.isInterrupted def halting = { Thread.currentThread.interrupt(); null } + @tailrec def readFully(): Unit = if (working) { val line = try readLine() catch { case _: InterruptedException => halting - case e: IOException if !working => halting + case _: IOException if !working => halting } if (line != null) { processLine(line) @@ -196,13 +218,16 @@ object BasicIO { def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o) /** Returns a function `OutputStream => Unit` that either reads the content - * from stdin or does nothing. This function can be used by + * from stdin or does nothing but close the stream. This function can be used by * [[scala.sys.process.ProcessIO]]. */ - def input(connect: Boolean): OutputStream => Unit = { outputToProcess => - if (connect) connectToIn(outputToProcess) - outputToProcess.close() - } + def input(connect: Boolean): OutputStream => Unit = if (connect) connectToStdIn else connectNoOp + + /** A sentinel value telling ProcessBuilderImpl to redirect. */ + private[process] val connectToStdIn: OutputStream => Unit = _ => () + + /** A sentinel value telling ProcessBuilderImpl not to process. */ + private[process] val connectNoOp: OutputStream => Unit = _ => () /** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */ def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput)) @@ -232,9 +257,9 @@ object BasicIO { buffer append Newline } - private[this] def transferFullyImpl(in: InputStream, out: OutputStream) { + private[this] def transferFullyImpl(in: InputStream, out: OutputStream): Unit = { val buffer = new Array[Byte](BufferSize) - @tailrec def loop() { + @tailrec def loop(): Unit = { val byteCount = in.read(buffer) if (byteCount > 0) { out.write(buffer, 0, byteCount) diff --git a/src/library/scala/sys/process/Parser.scala b/src/library/scala/sys/process/Parser.scala new file mode 100644 index 000000000000..e9a25a0f9366 --- /dev/null +++ b/src/library/scala/sys/process/Parser.scala @@ -0,0 +1,116 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.sys.process + +import scala.annotation.tailrec + +/** A simple enough command line parser using shell quote conventions. + */ +private[scala] object Parser { + private final val DQ = '"' + private final val SQ = '\'' + private final val EOF = -1 + + /** Split the line into tokens separated by whitespace or quotes. + * + * @return either an error message or reverse list of tokens + */ + def tokenize(line: String, errorFn: String => Unit): List[String] = { + import Character.isWhitespace + import java.lang.{StringBuilder => Builder} + import collection.mutable.ArrayBuffer + + var accum: List[String] = Nil + var pos = 0 + var start = 0 + val qpos = new ArrayBuffer[Int](16) // positions of paired quotes + + def cur: Int = if (done) EOF else line.charAt(pos) + def bump() = pos += 1 + def done = pos >= line.length + + // Skip to the next quote as given. + def skipToQuote(q: Int): Boolean = { + var escaped = false + def terminal: Boolean = cur match { + case _ if escaped => escaped = false ; false + case '\\' => escaped = true ; false + case `q` | EOF => true + case _ => false + } + while (!terminal) bump() + !done + } + // Skip to a word boundary, where words can be quoted and quotes can be escaped + def skipToDelim(): Boolean = { + var escaped = false + def quote() = { qpos += pos ; bump() } + @tailrec def advance(): Boolean = cur match { + case _ if escaped => escaped = false ; bump() ; advance() + case '\\' => escaped = true ; bump() ; advance() + case q @ (DQ | SQ) => { quote() ; skipToQuote(q) } && { quote() ; advance() } + case EOF => true + case c if isWhitespace(c) => true + case _ => bump(); advance() + } + advance() + } + def skipWhitespace() = while (isWhitespace(cur)) bump() + def copyText() = { + val buf = new Builder + var p = start + var i = 0 + while (p < pos) { + if (i >= qpos.size) { + buf.append(line, p, pos) + p = pos + } else if (p == qpos(i)) { + buf.append(line, qpos(i)+1, qpos(i+1)) + p = qpos(i+1)+1 + i += 2 + } else { + buf.append(line, p, qpos(i)) + p = qpos(i) + } + } + buf.toString + } + def text() = { + val res = + if (qpos.isEmpty) line.substring(start, pos) + else if (qpos(0) == start && qpos(1) == pos) line.substring(start+1, pos-1) + else copyText() + qpos.clear() + res + } + def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") + def badescape() = errorFn("trailing backslash") + + @tailrec def loop(): List[String] = { + skipWhitespace() + start = pos + if (done) accum.reverse + else if (!skipToDelim()) { badquote(); Nil } + else if (pos > line.length) { badescape(); Nil } + else { + accum ::= text() + loop() + } + } + loop() + } + + class ParseException(msg: String) extends RuntimeException(msg) + + def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) +} diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala index 485ca97fa0f5..531971125e6d 100644 --- a/src/library/scala/sys/process/Process.scala +++ b/src/library/scala/sys/process/Process.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,6 +18,7 @@ import processInternal._ import ProcessBuilder._ import scala.language.implicitConversions + /** Represents a process that is running or has finished running. * It may be a compound process with several underlying native processes (such as `a #&& b`). * @@ -60,14 +61,14 @@ trait ProcessCreation { * * @example {{{ apply("cat" :: files) }}} */ - def apply(command: Seq[String]): ProcessBuilder = apply(command, None) + def apply(command: scala.collection.Seq[String]): ProcessBuilder = apply(command, None) /** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`, * and a sequence of `String` representing the arguments. * * @example {{{ apply("cat", files) }}} */ - def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None) + def apply(command: String, arguments: scala.collection.Seq[String]): ProcessBuilder = apply(command +: arguments, None) /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra * environment variables. @@ -82,7 +83,7 @@ trait ProcessCreation { * * @example {{{ apply("java" :: javaArgs, new java.io.File("/opt/app"), "CLASSPATH" -> "library.jar") }}} */ - def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder = + def apply(command: scala.collection.Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder = apply(command, Some(cwd), extraEnv: _*) /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to @@ -90,21 +91,15 @@ trait ProcessCreation { * * @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}} */ - def apply(command: String, cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { - apply(command.split("""\s+"""), cwd, extraEnv : _*) - // not smart to use this on windows, because CommandParser uses \ to escape ". - /*CommandParser.parse(command) match { - case Left(errorMsg) => error(errorMsg) - case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*) - }*/ - } + def apply(command: String, cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = + apply(Parser.tokenize(command), cwd, extraEnv: _*) /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to * `File` and extra environment variables. * * @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}} */ - def apply(command: Seq[String], cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { + def apply(command: scala.collection.Seq[String], cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { val jpb = new JProcessBuilder(command.toArray: _*) cwd foreach (jpb directory _) extraEnv foreach { case (k, v) => jpb.environment.put(k, v) } @@ -145,7 +140,7 @@ trait ProcessCreation { /** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of * something else for which there's an implicit conversion to `Source`. */ - def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert) + def applySeq[T](builders: scala.collection.Seq[T])(implicit convert: T => Source): scala.collection.Seq[Source] = builders.map(convert) /** Creates a [[scala.sys.process.ProcessBuilder]] from one or more * [[scala.sys.process.ProcessBuilder.Source]], which can then be @@ -173,9 +168,9 @@ trait ProcessCreation { * * This will concatenate the output of all sources. */ - def cat(files: Seq[Source]): ProcessBuilder = { + def cat(files: scala.collection.Seq[Source]): ProcessBuilder = { require(files.nonEmpty) - files map (_.cat) reduceLeft (_ #&& _) + files.map(_.cat).reduceLeft(_ #&& _) } } @@ -190,7 +185,7 @@ trait ProcessImplicits { /** Return a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence * of values for which an implicit conversion to `Source` is available. */ - implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = applySeq(builders) + implicit def buildersToProcess[T](builders: scala.collection.Seq[T])(implicit convert: T => Source): scala.collection.Seq[Source] = applySeq(builders) /** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */ implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder) @@ -210,7 +205,7 @@ trait ProcessImplicits { * input to a process. For example: * {{{ * import scala.sys.process._ - * Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") ! + * Seq("xmllint", "--html", "-") #< new java.net.URL("https://www.scala-lang.org") #> new java.io.File("fixed.html") ! * }}} */ implicit def urlToProcess(url: URL): URLBuilder = apply(url) @@ -223,5 +218,5 @@ trait ProcessImplicits { * be the command to be executed, and the remaining will be its arguments. * When using this, arguments may contain spaces. */ - implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command) + implicit def stringSeqToProcess(command: scala.collection.Seq[String]): ProcessBuilder = apply(command) } diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index d598b2b8ca2c..bf1c1507707d 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package sys package process import processInternal._ -import ProcessBuilder._ +import ProcessBuilder.{Sink, Source} /** Represents a sequence of one or more external processes that can be * executed. A `ProcessBuilder` can be a single external process, or a @@ -33,8 +33,8 @@ import ProcessBuilder._ * // Executes "ls" and sends output to stdout * "ls".! * - * // Execute "ls" and assign a `Stream[String]` of its output to "contents". - * val contents = Process("ls").lineStream + * // Execute "ls" and assign a `LazyList[String]` of its output to "contents". + * val contents = Process("ls").lazyLines * * // Here we use a `Seq` to make the parameter whitespace-safe * def contentsOf(dir: String): String = Seq("ls", dir).!! @@ -67,7 +67,7 @@ import ProcessBuilder._ * something else such as a `java.io.File` or a `java.io.InputStream`. * For example: * {{{ - * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") ! + * new URL("https://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") ! * }}} * * ==Starting Processes== @@ -84,29 +84,29 @@ import ProcessBuilder._ * of the last one in the chain of execution. * - `!!`: blocks until all external commands exit, and returns a `String` * with the output generated. - * - `lineStream`: returns immediately like `run`, and the output being generated - * is provided through a `Stream[String]`. Getting the next element of that - * `Stream` may block until it becomes available. This method will throw an + * - `lazyLines`: returns immediately like `run`, and the output being generated + * is provided through a `LazyList[String]`. Getting the next element of that + * `LazyList` may block until it becomes available. This method will throw an * exception if the return code is different than zero -- if this is not - * desired, use the `lineStream_!` method. + * desired, use the `lazyLines_!` method. * * ==Handling Input and Output== * * If not specified, the input of the external commands executed with `run` or * `!` will not be tied to anything, and the output will be redirected to the - * stdout and stderr of the Scala process. For the methods `!!` and `lineStream`, no + * stdout and stderr of the Scala process. For the methods `!!` and `lazyLines`, no * input will be provided, and the output will be directed according to the * semantics of these methods. * * Some methods will cause stdin to be used as input. Output can be controlled - * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lineStream` will only + * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lazyLines` will only * redirect error output when passed a `ProcessLogger`. If one desires full * control over input and output, then a [[scala.sys.process.ProcessIO]] can be * used with `run`. * - * For example, we could silence the error output from `lineStream_!` like this: + * For example, we could silence the error output from `lazyLines_!` like this: * {{{ - * val etcFiles = "find /etc" lineStream_! ProcessLogger(line => ()) + * val etcFiles = "find /etc" lazyLines_! ProcessLogger(line => ()) * }}} * * ==Extended Example== @@ -134,13 +134,6 @@ import ProcessBuilder._ * Note: though it is not shown above, the equivalent of a shell's `;` would be * `###`. The reason for this name is that `;` is a reserved token in Scala. * - * Note: the `lines` method, though deprecated, may conflict with the `StringLike` - * method of the same name. To avoid this, one may wish to call the builders in - * `Process` instead of importing `scala.sys.process._`. The example above would be - * {{{ - * import scala.sys.process.Process - * Process("find src -name *.scala -exec grep null {} ;") #| Process("xargs test -z") #&& Process("echo null-free") #|| Process("echo null detected") ! - * }}} */ trait ProcessBuilder extends Source with Sink { /** Starts the process represented by this builder, blocks until it exits, and @@ -169,17 +162,102 @@ trait ProcessBuilder extends Source with Sink { */ def !!<(log: ProcessLogger): String + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. Standard error is sent to the console. If the process exits + * with a non-zero value, the LazyList will provide all lines up to termination + * and then throw an exception. + */ + def lazyLines: LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the LazyList. + * Standard error is sent to the console. If the process exits + * with a non-zero value, the LazyList will provide all lines up to termination + * and then throw an exception. + */ + def lazyLines(capacity: Integer): LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the LazyList will provide all lines up + * to termination and then throw an exception. + */ + def lazyLines(log: ProcessLogger): LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the LazyList. + * Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the LazyList will provide all lines up + * to termination and then throw an exception. + */ + def lazyLines(log: ProcessLogger, capacity: Integer): LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. Standard error is sent to the console. If the process exits + * with a non-zero value, the LazyList will provide all lines up to termination + * but will not throw an exception. + */ + def lazyLines_! : LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the console. If the process exits + * with a non-zero value, the LazyList will provide all lines up to termination + * but will not throw an exception. + */ + def lazyLines_!(capacity: Integer): LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the LazyList will provide all lines up + * to termination but will not throw an exception. + */ + def lazyLines_!(log: ProcessLogger): LazyList[String] + + /** Starts the process represented by this builder. The output is returned as + * a LazyList that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the LazyList will provide all lines up + * to termination but will not throw an exception. + */ + def lazyLines_!(log: ProcessLogger, capacity: Integer): LazyList[String] + /** Starts the process represented by this builder. The output is returned as * a Stream that blocks when lines are not available but the process has not * completed. Standard error is sent to the console. If the process exits * with a non-zero value, the Stream will provide all lines up to termination * and then throw an exception. */ + @deprecated("use lazyLines", since = "2.13.0") def lineStream: Stream[String] - /** Deprecated (renamed). Use `lineStream` instead. */ - @deprecated("use lineStream instead", "2.11.0") - def lines: Stream[String] = lineStream + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the console. If the process exits + * with a non-zero value, the Stream will provide all lines up to termination + * and then throw an exception. + */ + @deprecated("use lazyLines", since = "2.13.0") + def lineStream(capacity: Integer): Stream[String] /** Starts the process represented by this builder. The output is returned as * a Stream that blocks when lines are not available but the process has not @@ -187,11 +265,20 @@ trait ProcessBuilder extends Source with Sink { * process exits with a non-zero value, the Stream will provide all lines up * to termination and then throw an exception. */ + @deprecated("use lazyLines", since = "2.13.0") def lineStream(log: ProcessLogger): Stream[String] - /** Deprecated (renamed). Use `lineStream(log: ProcessLogger)` instead. */ - @deprecated("use lineStream instead", "2.11.0") - def lines(log: ProcessLogger): Stream[String] = lineStream(log) + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the Stream will provide all lines up + * to termination and then throw an exception. + */ + @deprecated("use lazyLines", since = "2.13.0") + def lineStream(log: ProcessLogger, capacity: Integer): Stream[String] /** Starts the process represented by this builder. The output is returned as * a Stream that blocks when lines are not available but the process has not @@ -199,11 +286,20 @@ trait ProcessBuilder extends Source with Sink { * with a non-zero value, the Stream will provide all lines up to termination * but will not throw an exception. */ + @deprecated("use lazyLines_!", since = "2.13.0") def lineStream_! : Stream[String] - /** Deprecated (renamed). Use `lineStream_!` instead. */ - @deprecated("use lineStream_! instead", "2.11.0") - def lines_! : Stream[String] = lineStream_! + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the console. If the process exits + * with a non-zero value, the Stream will provide all lines up to termination + * but will not throw an exception. + */ + @deprecated("use lazyLines_!", since = "2.13.0") + def lineStream_!(capacity: Integer): Stream[String] /** Starts the process represented by this builder. The output is returned as * a Stream that blocks when lines are not available but the process has not @@ -211,11 +307,20 @@ trait ProcessBuilder extends Source with Sink { * process exits with a non-zero value, the Stream will provide all lines up * to termination but will not throw an exception. */ + @deprecated("use lazyLines_!", since = "2.13.0") def lineStream_!(log: ProcessLogger): Stream[String] - /** Deprecated (renamed). Use `lineStream_!(log: ProcessLogger)` instead. */ - @deprecated("use lineStream_! instead", "2.11.0") - def lines_!(log: ProcessLogger): Stream[String] = lineStream_!(log) + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. + * The producer process will block if the given capacity of lines if filled + * without being consumed from the stream. + * Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the Stream will provide all lines up + * to termination but will not throw an exception. + */ + @deprecated("use lazyLines_!", since = "2.13.0") + def lineStream_!(log: ProcessLogger, capacity: Integer): Stream[String] /** Starts the process represented by this builder, blocks until it exits, and * returns the exit code. Standard output and error are sent to the console. @@ -343,7 +448,7 @@ object ProcessBuilder extends ProcessBuilderImpl { def #>(out: => OutputStream): ProcessBuilder = #> (new OStreamBuilder(out, "")) /** Writes the output stream of this process to a [[scala.sys.process.ProcessBuilder]]. */ - def #>(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(toSource, b, false) + def #>(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(toSource, b, toError = false) /** Returns a [[scala.sys.process.ProcessBuilder]] representing this `Source`. */ def cat = toSource @@ -369,6 +474,6 @@ object ProcessBuilder extends ProcessBuilderImpl { def #<(in: => InputStream): ProcessBuilder = #< (new IStreamBuilder(in, "")) /** Reads the output of a [[scala.sys.process.ProcessBuilder]] into the input stream of this process. */ - def #<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, toSink, false) + def #<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, toSink, toError = false) } } diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala index fdaeb2e59e1a..186ad134a218 100644 --- a/src/library/scala/sys/process/ProcessBuilderImpl.scala +++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,10 +16,14 @@ package process import processInternal._ import Process._ -import java.io.{ FileInputStream, FileOutputStream } -import BasicIO.{ Uncloseable, Streamed } +import BasicIO.{LazilyListed, Streamed, Uncloseable} import Uncloseable.protect +import java.io.{FileInputStream, FileOutputStream} +import java.util.concurrent.LinkedBlockingQueue + +import scala.util.control.NonFatal + private[process] trait ProcessBuilderImpl { self: ProcessBuilder.type => @@ -32,7 +36,7 @@ private[process] trait ProcessBuilderImpl { override def canPipeTo = true } - private[process] class URLInput(url: URL) extends IStreamBuilder(url.openStream, url.toString) + private[process] class URLInput(url: URL) extends IStreamBuilder(url.openStream(), url.toString) private[process] class FileInput(file: File) extends IStreamBuilder(new FileInputStream(file), file.getAbsolutePath) private[process] class FileOutput(file: File, append: Boolean) extends OStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath) @@ -56,7 +60,7 @@ private[process] trait ProcessBuilderImpl { ) extends AbstractBuilder { override def run(io: ProcessIO): Process = { - val success = new SyncVar[Boolean] + val success = new LinkedBlockingQueue[Boolean](1) def go(): Unit = { var ok = false try { @@ -64,7 +68,7 @@ private[process] trait ProcessBuilderImpl { ok = true } finally success.put(ok) } - val t = Spawn(go(), io.daemonizeThreads) + val t = Spawn("ThreadProcess", io.daemonizeThreads)(go()) new ThreadProcess(t, success) } } @@ -72,15 +76,22 @@ private[process] trait ProcessBuilderImpl { /** Represents a simple command without any redirection or combination. */ private[process] class Simple(p: JProcessBuilder) extends AbstractBuilder { override def run(io: ProcessIO): Process = { + import java.lang.ProcessBuilder.Redirect.{INHERIT => Inherit} + import io.{daemonizeThreads, processError, processOutput, writeInput} + + val inherit = writeInput eq BasicIO.connectToStdIn + if (inherit) p.redirectInput(Inherit) + val process = p.start() // start the external process - import io._ // spawn threads that process the input, output, and error streams using the functions defined in `io` - val inThread = Spawn(writeInput(process.getOutputStream), daemon = true) - val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads) + val inThread = + if (inherit || (writeInput eq BasicIO.connectNoOp)) null + else Spawn("Simple-input", daemon = true)(writeInput(process.getOutputStream)) + val outThread = Spawn("Simple-output", daemonizeThreads)(processOutput(process.getInputStream())) val errorThread = if (p.redirectErrorStream) Nil - else List(Spawn(processError(process.getErrorStream), daemonizeThreads)) + else List(Spawn("Simple-error", daemonizeThreads)(processError(process.getErrorStream()))) new SimpleProcess(process, inThread, outThread :: errorThread) } @@ -89,12 +100,14 @@ private[process] trait ProcessBuilderImpl { } private[scala] abstract class AbstractBuilder extends ProcessBuilder with Sink with Source { - protected def toSource = this - protected def toSink = this + protected def toSource: AbstractBuilder = this + protected def toSink: AbstractBuilder = this + + private[this] val defaultStreamCapacity = 4096 def #|(other: ProcessBuilder): ProcessBuilder = { require(other.canPipeTo, "Piping to multiple processes is not supported.") - new PipedBuilder(this, other, false) + new PipedBuilder(this, other, toError = false) } def #||(other: ProcessBuilder): ProcessBuilder = new OrBuilder(this, other) def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other) @@ -110,10 +123,23 @@ private[process] trait ProcessBuilderImpl { def !!< = slurp(None, withIn = true) def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true) - def lineStream: Stream[String] = lineStream(withInput = false, nonZeroException = true, None) - def lineStream(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log)) - def lineStream_! : Stream[String] = lineStream(withInput = false, nonZeroException = false, None) - def lineStream_!(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log)) + def lazyLines: LazyList[String] = lazyLines(withInput = false, nonZeroException = true, None, defaultStreamCapacity) + def lazyLines(log: ProcessLogger): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, Some(log), defaultStreamCapacity) + def lazyLines_! : LazyList[String] = lazyLines(withInput = false, nonZeroException = false, None, defaultStreamCapacity) + def lazyLines_!(log: ProcessLogger): LazyList[String] = lazyLines(withInput = false, nonZeroException = false, Some(log), defaultStreamCapacity) + def lazyLines(capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, None, capacity) + def lazyLines(log: ProcessLogger, capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = true, Some(log), capacity) + def lazyLines_!(capacity: Integer) : LazyList[String] = lazyLines(withInput = false, nonZeroException = false, None, capacity) + def lazyLines_!(log: ProcessLogger, capacity: Integer): LazyList[String] = lazyLines(withInput = false, nonZeroException = false, Some(log), capacity) + + @deprecated("internal", since = "2.13.4") def lineStream: Stream[String] = lineStream(withInput = false, nonZeroException = true, None, defaultStreamCapacity) + @deprecated("internal", since = "2.13.4") def lineStream(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log), defaultStreamCapacity) + @deprecated("internal", since = "2.13.4") def lineStream_! : Stream[String] = lineStream(withInput = false, nonZeroException = false, None, defaultStreamCapacity) + @deprecated("internal", since = "2.13.4") def lineStream_!(log: ProcessLogger): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log), defaultStreamCapacity) + @deprecated("internal", since = "2.13.4") def lineStream(capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = true, None, capacity) + @deprecated("internal", since = "2.13.4") def lineStream(log: ProcessLogger, capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = true, Some(log), capacity) + @deprecated("internal", since = "2.13.4") def lineStream_!(capacity: Integer) : Stream[String] = lineStream(withInput = false, nonZeroException = false, None, capacity) + @deprecated("internal", since = "2.13.4") def lineStream_!(log: ProcessLogger, capacity: Integer): Stream[String] = lineStream(withInput = false, nonZeroException = false, Some(log), capacity) def ! = run(connectInput = false).exitValue() def !(io: ProcessIO) = run(io).exitValue() @@ -138,15 +164,40 @@ private[process] trait ProcessBuilderImpl { else scala.sys.error("Nonzero exit value: " + code) } + private[this] def lazyLines( + withInput: Boolean, + nonZeroException: Boolean, + log: Option[ProcessLogger], + capacity: Integer + ): LazyList[String] = { + val lazilyListed = LazilyListed[String](nonZeroException, capacity) + val process = run(BasicIO(withInput, lazilyListed.process, log)) + + // extract done from lazilyListed so that the anonymous function below closes over just the done and not the whole lazilyListed (see https://github.com/scala/bug/issues/12185) + val done = lazilyListed.done + + Spawn("LazyLines") { + done { + try process.exitValue() + catch { + case NonFatal(_) => -2 + } + } + } + lazilyListed.lazyList + } + + @deprecated("internal", since = "2.13.4") private[this] def lineStream( withInput: Boolean, nonZeroException: Boolean, - log: Option[ProcessLogger] + log: Option[ProcessLogger], + capacity: Integer ): Stream[String] = { - val streamed = Streamed[String](nonZeroException) + val streamed = Streamed[String](nonZeroException, capacity) val process = run(BasicIO(withInput, streamed.process, log)) - Spawn(streamed done process.exitValue()) + Spawn("LineStream")(streamed done process.exitValue()) streamed.stream() } @@ -158,16 +209,16 @@ private[process] trait ProcessBuilderImpl { } private[process] class URLImpl(url: URL) extends URLBuilder with Source { - protected def toSource = new URLInput(url) + protected def toSource: URLInput = new URLInput(url) } private[process] class FileImpl(base: File) extends FileBuilder with Sink with Source { - protected def toSource = new FileInput(base) - protected def toSink = new FileOutput(base, false) + protected def toSource: FileInput = new FileInput(base) + protected def toSink: FileOutput = new FileOutput(base, append = false) def #<<(f: File): ProcessBuilder = #<<(new FileInput(f)) def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u)) def #<<(s: => InputStream): ProcessBuilder = #<<(new IStreamBuilder(s, "")) - def #<<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, new FileOutput(base, true), false) + def #<<(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(b, new FileOutput(base, append = true), toError = false) } private[process] abstract class BasicBuilder extends AbstractBuilder { @@ -197,27 +248,27 @@ private[process] trait ProcessBuilderImpl { toError: Boolean ) extends SequentialBuilder(first, second, if (toError) "#|!" else "#|") { - override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError) + override def createProcess(io: ProcessIO): PipedProcesses = new PipedProcesses(first, second, io, toError) } private[process] class AndBuilder( first: ProcessBuilder, second: ProcessBuilder ) extends SequentialBuilder(first, second, "#&&") { - override def createProcess(io: ProcessIO) = new AndProcess(first, second, io) + override def createProcess(io: ProcessIO): AndProcess = new AndProcess(first, second, io) } private[process] class OrBuilder( first: ProcessBuilder, second: ProcessBuilder ) extends SequentialBuilder(first, second, "#||") { - override def createProcess(io: ProcessIO) = new OrProcess(first, second, io) + override def createProcess(io: ProcessIO): OrProcess = new OrProcess(first, second, io) } private[process] class SequenceBuilder( first: ProcessBuilder, second: ProcessBuilder ) extends SequentialBuilder(first, second, "###") { - override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) + override def createProcess(io: ProcessIO): ProcessSequence = new ProcessSequence(first, second, io) } } diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala index 154b4632839e..a4898d2c9b1e 100644 --- a/src/library/scala/sys/process/ProcessIO.scala +++ b/src/library/scala/sys/process/ProcessIO.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -56,7 +56,7 @@ final class ProcessIO( val processError: InputStream => Unit, val daemonizeThreads: Boolean ) { - def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, false) + def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, daemonizeThreads = false) /** Creates a new `ProcessIO` with a different handler for the process input. */ def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, daemonizeThreads) @@ -68,5 +68,5 @@ final class ProcessIO( def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, daemonizeThreads) /** Creates a new `ProcessIO`, with `daemonizeThreads` true. */ - def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, true) + def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, daemonizeThreads = true) } diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index 35b873979dba..3ed0c5766412 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,21 +10,23 @@ * additional information regarding copyright ownership. */ -package scala -package sys -package process +package scala.sys.process import processInternal._ -import java.io.{ PipedInputStream, PipedOutputStream } + +import java.util.concurrent.LinkedBlockingQueue +import java.io.{PipedInputStream, PipedOutputStream} + +import scala.annotation.tailrec private[process] trait ProcessImpl { self: Process.type => /** Runs provided code in a new Thread and returns the Thread instance. */ private[process] object Spawn { - def apply(f: => Unit): Thread = apply(f, daemon = false) - def apply(f: => Unit, daemon: Boolean): Thread = { - val thread = new Thread() { override def run() = { f } } + def apply(prefix: String, daemon: Boolean = false)(f: => Unit): Thread = { + val thread = new Thread() { override def run() = f } + thread.setName(prefix + "-spawn-" + thread.getName) thread.setDaemon(daemon) thread.start() thread @@ -32,14 +34,15 @@ private[process] trait ProcessImpl { } private[process] object Future { def apply[T](f: => T): (Thread, () => T) = { - val result = new SyncVar[Either[Throwable, T]] - def run(): Unit = - try result.put(Right(f)) - catch { case e: Exception => result.put(Left(e)) } + val result = new LinkedBlockingQueue[Either[Throwable, T]](1) + def run(): Unit = { + val value = try Right(f) catch { case e: Exception => Left(e) } + result.put(value) + } - val t = Spawn(run()) + val t = Spawn("Future")(run()) - (t, () => result.get match { + (t, () => result.take() match { case Right(value) => value case Left(exception) => throw exception }) @@ -94,16 +97,24 @@ private[process] trait ProcessImpl { def start() = { futureThread ;() } protected lazy val (processThread, (futureThread, futureValue), destroyer) = { - val code = new SyncVar[Option[Int]]() - val thread = Spawn { + val code = new LinkedBlockingQueue[Option[Int]](1) + val thread = Spawn("CompoundProcess") { var value: Option[Int] = None try value = runAndExitValue() + catch { + case _: IndexOutOfBoundsException + | _: IOException + | _: NullPointerException + | _: SecurityException + | _: UnsupportedOperationException + => value = Some(-1) + } finally code.put(value) } ( thread, - Future(code.get), // thread.join() + Future(code.take()), // thread.join() () => thread.interrupt() ) } @@ -118,19 +129,22 @@ private[process] trait ProcessImpl { } private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess { - protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString)) + protected def newSource: PipeSource = new PipeSource(a.toString) + protected def newSink: PipeSink = new PipeSink(b.toString) + protected[this] override def runAndExitValue() = runAndExitValue(newSource, newSink) protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = { source connectOut sink source.start() sink.start() - /** Release PipeSource, PipeSink and Process in the correct order. - * If once connect Process with Source or Sink, then the order of releasing them - * must be Source -> Sink -> Process, otherwise IOException will be thrown. */ - def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = { + /* Release PipeSource, PipeSink and Process in the correct order. + * If once connect Process with Source or Sink, then the order of releasing them + * must be Source -> Sink -> Process, otherwise IOException will be thrown. + */ + def releaseResources(so: PipeSource, sk: PipeSink, ps: Process*) = { so.release() sk.release() - p foreach( _.destroy() ) + ps.foreach(_.destroy()) } val firstIO = @@ -151,9 +165,11 @@ private[process] trait ProcessImpl { throw err } runInterruptible { - source.join() val exit1 = first.exitValue() + source.done() + source.join() val exit2 = second.exitValue() + sink.done() // Since file redirection (e.g. #>) is implemented as a piped process, // we ignore its exit value so cmd #> file doesn't always return 0. if (b.hasExitValue) exit2 else exit1 @@ -173,53 +189,54 @@ private[process] trait ProcessImpl { if (isSink) dst else src } } - private def ioHandler(e: IOException) { - println("I/O error " + e.getMessage + " for process: " + labelFn()) - e.printStackTrace() - } + private def ioHandler(e: IOException): Unit = e.printStackTrace() } - private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) { + private[process] class PipeSource(label: => String) extends PipeThread(isSink = false, () => label) { + setName(s"PipeSource($label)-$getName") protected[this] val pipe = new PipedOutputStream - protected[this] val source = new LinkedBlockingQueue[Option[InputStream]] - override def run(): Unit = { - try { - source.take match { - case Some(in) => runloop(in, pipe) + protected[this] val source = new LinkedBlockingQueue[Option[InputStream]](1) + override final def run(): Unit = { + @tailrec def go(): Unit = + source.take() match { + case Some(in) => runloop(in, pipe) ; go() case None => } - } + try go() catch onInterrupt(()) finally BasicIO close pipe } - def connectIn(in: InputStream): Unit = source add Some(in) + def connectIn(in: InputStream): Unit = source.put(Some(in)) def connectOut(sink: PipeSink): Unit = sink connectIn pipe def release(): Unit = { interrupt() - source add None + done() join() } + def done() = source.put(None) } - private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) { + private[process] class PipeSink(label: => String) extends PipeThread(isSink = true, () => label) { + setName(s"PipeSink($label)-$getName") protected[this] val pipe = new PipedInputStream - protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]] + protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]](1) override def run(): Unit = { - try { - sink.take match { - case Some(out) => runloop(pipe, out) + @tailrec def go(): Unit = + sink.take() match { + case Some(out) => runloop(pipe, out) ; go() case None => } - } + try go() catch onInterrupt(()) finally BasicIO close pipe } - def connectOut(out: OutputStream): Unit = sink add Some(out) + def connectOut(out: OutputStream): Unit = sink.put(Some(out)) def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut def release(): Unit = { interrupt() - sink add None + done() join() } + def done() = sink.put(None) } /** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O. @@ -229,19 +246,25 @@ private[process] trait ProcessImpl { private[this] val (thread, value) = Future(action) override def isAlive() = thread.isAlive() override def exitValue() = value() - override def destroy() { } + override def destroy(): Unit = { } } - /** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the - * output and error streams of the process. `inputThread` is the Thread created to write to the input stream of - * the process. - * The implementation of `exitValue` interrupts `inputThread` and then waits until all I/O threads die before - * returning. */ + /** A thin wrapper around a java.lang.Process. + * + * `outputThreads` are the Threads created to read from the + * output and error streams of the process. + * + * `inputThread` is the Thread created to write to the input stream of + * the process. It may be null if stdin was inherited. + * + * The implementation of `exitValue` interrupts `inputThread` + * and then waits until all I/O threads die before returning. + */ private[process] class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process { override def isAlive() = p.isAlive() override def exitValue() = { try p.waitFor() // wait for the process to terminate - finally inputThread.interrupt() // we interrupt the input thread to notify it that it can terminate + finally interrupt() outputThreads foreach (_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this) p.exitValue() @@ -251,12 +274,14 @@ private[process] trait ProcessImpl { outputThreads foreach (_.interrupt()) // on destroy, don't bother consuming any more output p.destroy() } - finally inputThread.interrupt() + finally interrupt() } + // we interrupt the input thread to notify it that it can terminate + private[this] def interrupt(): Unit = if (inputThread != null) inputThread.interrupt() } - private[process] final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process { + private[process] final class ThreadProcess(thread: Thread, success: LinkedBlockingQueue[Boolean]) extends Process { override def isAlive() = thread.isAlive() - override def exitValue() = if (success.get) 0 else 1 // thread.join() + override def exitValue() = if (success.take()) 0 else 1 // thread.join() override def destroy() = thread.interrupt() } } diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala index 4666d476152c..e048ca5c9446 100644 --- a/src/library/scala/sys/process/ProcessLogger.scala +++ b/src/library/scala/sys/process/ProcessLogger.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -60,7 +60,7 @@ trait ProcessLogger { /** A [[scala.sys.process.ProcessLogger]] that writes output to a file. */ class FileProcessLogger(file: File) extends ProcessLogger with Closeable with Flushable { - private val writer = ( + private[this] val writer = ( new PrintWriter( new BufferedWriter( new OutputStreamWriter( diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index 07445af4df70..11e6640d8e21 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,8 @@ // scala -J-Dscala.process.debug // for process debugging output. // -package scala.sys { +package scala.sys +package process /** This package handles the execution of external processes. The contents of * this package can be divided in three groups, according to their * responsibilities: @@ -29,7 +30,7 @@ package scala.sys { * * {{{ * import scala.sys.process._ - * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lineStream + * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lazyLines * }}} * * We describe below the general concepts and architecture of the package, @@ -88,7 +89,7 @@ package scala.sys { * * To further control what how the process will be run, such as specifying * the directory in which it will be run, see the factories on - * [[scala.sys.process.Process]]'s object companion. + * [[scala.sys.process.Process]]'s companion object. * * Once the desired `ProcessBuilder` is available, it can be executed in * different ways, depending on how one desires to control its I/O, and what @@ -96,7 +97,7 @@ package scala.sys { * * - Return status of the process (`!` methods) * - Output of the process as a `String` (`!!` methods) - * - Continuous output of the process as a `Stream[String]` (`lineStream` methods) + * - Continuous output of the process as a `LazyList[String]` (`lazyLines` methods) * - The `Process` representing it (`run` methods) * * Some simple examples of these methods: @@ -110,10 +111,10 @@ package scala.sys { * val dirContents = "ls".!! * * // This "fire-and-forgets" the method, which can be lazily read through - * // a Stream[String] - * def sourceFilesAt(baseDir: String): Stream[String] = { + * // a LazyList[String] + * def sourceFilesAt(baseDir: String): LazyList[String] = { * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") - * cmd.lineStream + * cmd.lazyLines * } * }}} * @@ -144,7 +145,7 @@ package scala.sys { * * - [[scala.sys.process.ProcessIO]]: provides the low level abstraction. * - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction - * for output, and can be created through its object companion + * for output, and can be created through its companion object. * - [[scala.sys.process.BasicIO]]: a library of helper methods for the * creation of `ProcessIO`. * - This package object itself, with a few implicit conversions. @@ -167,12 +168,12 @@ package scala.sys { * } * * // This "fire-and-forgets" the method, which can be lazily read through - * // a Stream[String], and accumulates all errors on a StringBuffer - * def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = { + * // a LazyList[String], and accumulates all errors on a StringBuffer + * def sourceFilesAt(baseDir: String): (LazyList[String], StringBuffer) = { * val buffer = new StringBuffer() * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") - * val lineStream = cmd lineStream_! ProcessLogger(buffer append _) - * (lineStream, buffer) + * val lazyLines = cmd lazyLines_! ProcessLogger(buffer append _) + * (lazyLines, buffer) * } * }}} * @@ -186,7 +187,7 @@ package scala.sys { * import java.io.File * import java.net.URL * import scala.sys.process._ - * new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") ! + * new URL("https://www.scala-lang.org/") #> new File("scala-lang.html") ! * }}} * * More information about the other ways of controlling I/O can be found @@ -204,14 +205,8 @@ package scala.sys { * - `destroy()`: this will kill the external process and close the streams * associated with it. */ - package object process extends ProcessImplicits { - /** The arguments passed to `java` when creating this process */ - @deprecated(message = "to adhere to the compact1 profile this method will be removed", since = "2.12.5") // https://github.com/scala/scala-dev/issues/437 - def javaVmArguments: List[String] = { - import scala.collection.JavaConverters._ - - java.lang.management.ManagementFactory.getRuntimeMXBean.getInputArguments.asScala.toList - } + @annotation.nowarn("msg=package object inheritance") + object `package` extends ProcessImplicits { /** The input stream of this process */ def stdin = java.lang.System.in /** The output stream of this process */ @@ -223,7 +218,6 @@ package scala.sys { // if (isWin) Array("cmd.exe", "/C", _) // else Array("sh", "-c", _) - package process { // These are in a nested object instead of at the package level // due to the issues described in tickets #3160 and #3836. private[process] object processInternal { @@ -238,11 +232,12 @@ package scala.sys { type InputStream = java.io.InputStream type JProcess = java.lang.Process type JProcessBuilder = java.lang.ProcessBuilder - type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T] type OutputStream = java.io.OutputStream - type SyncVar[T] = scala.concurrent.SyncVar[T] type URL = java.net.URL + @deprecated("Use `java.util.concurrent.LinkedBlockingQueue with capacity 1` instead.", since = "2.13.4") + type SyncVar[T] = scala.concurrent.SyncVar[T] + def onError[T](handler: Throwable => T): Throwable =?> T = { case e @ _ => handler(e) } @@ -263,5 +258,3 @@ package scala.sys { Console.println("[process] " + (msgs mkString " ")) } } - } -} diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala deleted file mode 100644 index 56b99e5e4d41..000000000000 --- a/src/library/scala/text/Document.scala +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.text - -import java.io.Writer - -@deprecated("this object will be removed", "2.11.0") -case object DocNil extends Document -@deprecated("this object will be removed", "2.11.0") -case object DocBreak extends Document -@deprecated("this class will be removed", "2.11.0") -case class DocText(txt: String) extends Document -@deprecated("this class will be removed", "2.11.0") -case class DocGroup(doc: Document) extends Document -@deprecated("this class will be removed", "2.11.0") -case class DocNest(indent: Int, doc: Document) extends Document -@deprecated("this class will be removed", "2.11.0") -case class DocCons(hd: Document, tl: Document) extends Document - -/** - * A basic pretty-printing library, based on Lindig's strict version - * of Wadler's adaptation of Hughes' pretty-printer. - * - * @author Michel Schinz - * @version 1.0 - */ -@deprecated("this class will be removed", "2.11.0") -abstract class Document { - def ::(hd: Document): Document = DocCons(hd, this) - def ::(hd: String): Document = DocCons(DocText(hd), this) - def :/:(hd: Document): Document = hd :: DocBreak :: this - def :/:(hd: String): Document = hd :: DocBreak :: this - - /** - * Format this document on `writer` and try to set line - * breaks so that the result fits in `width` columns. - */ - def format(width: Int, writer: Writer) { - type FmtState = (Int, Boolean, Document) - - def fits(w: Int, state: List[FmtState]): Boolean = state match { - case _ if w < 0 => - false - case List() => - true - case (_, _, DocNil) :: z => - fits(w, z) - case (i, b, DocCons(h, t)) :: z => - fits(w, (i,b,h) :: (i,b,t) :: z) - case (_, _, DocText(t)) :: z => - fits(w - t.length(), z) - case (i, b, DocNest(ii, d)) :: z => - fits(w, (i + ii, b, d) :: z) - case (_, false, DocBreak) :: z => - fits(w - 1, z) - case (_, true, DocBreak) :: z => - true - case (i, _, DocGroup(d)) :: z => - fits(w, (i, false, d) :: z) - } - - def spaces(n: Int) { - var rem = n - while (rem >= 16) { writer write " "; rem -= 16 } - if (rem >= 8) { writer write " "; rem -= 8 } - if (rem >= 4) { writer write " "; rem -= 4 } - if (rem >= 2) { writer write " "; rem -= 2} - if (rem == 1) { writer write " " } - } - - def fmt(k: Int, state: List[FmtState]): Unit = state match { - case List() => () - case (_, _, DocNil) :: z => - fmt(k, z) - case (i, b, DocCons(h, t)) :: z => - fmt(k, (i, b, h) :: (i, b, t) :: z) - case (i, _, DocText(t)) :: z => - writer write t - fmt(k + t.length(), z) - case (i, b, DocNest(ii, d)) :: z => - fmt(k, (i + ii, b, d) :: z) - case (i, true, DocBreak) :: z => - writer write "\n" - spaces(i) - fmt(i, z) - case (i, false, DocBreak) :: z => - writer write " " - fmt(k + 1, z) - case (i, b, DocGroup(d)) :: z => - val fitsFlat = fits(width - k, (i, false, d) :: z) - fmt(k, (i, !fitsFlat, d) :: z) - case _ => - () - } - - fmt(0, (0, false, DocGroup(this)) :: Nil) - } -} - -@deprecated("this object will be removed", "2.11.0") -object Document { - /** The empty document */ - def empty = DocNil - - /** A break, which will either be turned into a space or a line break */ - def break = DocBreak - - /** A document consisting of some text literal */ - def text(s: String): Document = DocText(s) - - /** - * A group, whose components will either be printed with all breaks - * rendered as spaces, or with all breaks rendered as line breaks. - */ - def group(d: Document): Document = DocGroup(d) - - /** A nested document, which will be indented as specified. */ - def nest(i: Int, d: Document): Document = DocNest(i, d) -} diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala index 484742cc3b7c..d5391ce9242c 100644 --- a/src/library/scala/throws.scala +++ b/src/library/scala/throws.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,10 +22,7 @@ package scala * def read() = in.read() * } * }}} - * - * @author Nikolay Mihaylov - * @since 2.1 */ -class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation { +final class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation { def this(clazz: Class[T]) = this("") } diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala index ed815b16031a..b40ce540547a 100644 --- a/src/library/scala/transient.scala +++ b/src/library/scala/transient.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,4 +15,4 @@ package scala import scala.annotation.meta._ @field -class transient extends scala.annotation.StaticAnnotation +final class transient extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/typeConstraints.scala b/src/library/scala/typeConstraints.scala new file mode 100644 index 000000000000..f17205e122cf --- /dev/null +++ b/src/library/scala/typeConstraints.scala @@ -0,0 +1,243 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +import scala.annotation.implicitNotFound + +/** An instance of `A <:< B` witnesses that `A` is a subtype of `B`. + * Requiring an implicit argument of the type `A <:< B` encodes + * the generalized constraint `A <: B`. + * + * To constrain any abstract type `T` that's in scope in a method's + * argument list (not just the method's own type parameters) simply + * add an implicit argument of type `T <:< U`, where `U` is the required + * upper bound; or for lower-bounds, use: `L <:< T`, where `L` is the + * required lower bound. + * + * In case of any confusion over which method goes in what direction, all the "Co" methods (including + * [[apply]]) go from left to right in the type ("with" the type), and all the "Contra" methods go + * from right to left ("against" the type). E.g., [[apply]] turns a `From` into a `To`, and + * [[substituteContra]] replaces the `To`s in a type with `From`s. + * + * In part contributed by Jason Zaugg. + * + * @tparam From a type which is proved a subtype of `To` + * @tparam To a type which is proved a supertype of `From` + * + * @example [[scala.Option#flatten]] + * {{{ + * sealed trait Option[+A] { + * // def flatten[B, A <: Option[B]]: Option[B] = ... + * // won't work, since the A in flatten shadows the class-scoped A. + * def flatten[B](implicit ev: A <:< Option[B]): Option[B] + * = if(isEmpty) None else ev(get) + * // Because (A <:< Option[B]) <: (A => Option[B]), ev can be called to turn the + * // A from get into an Option[B], and because ev is implicit, that call can be + * // left out and inserted automatically. + * } + * }}} + * + * @see [[=:=]] for expressing equality constraints + * + * @define isProof This method is impossible to implement without `throw`ing or otherwise "cheating" unless + * `From <: To`, so it ensures that this really represents a subtyping relationship. + * @define contraCo contravariant in the first argument and covariant in the second + * @define contraCon a contravariant type constructor + * @define coCon a covariant type constructor + * @define sameDiff but with a (potentially) different type + * @define tp <:< + */ +// All of these methods are reimplemented unsafely in =:=.singleton to avoid any indirection. +// They are here simply for reference as the "correct", safe implementations. +@implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.") +sealed abstract class <:<[-From, +To] extends (From => To) with Serializable { + /** Substitute `To` for `From` and `From` for `To` in the type `F[To, From]`, given that `F` is $contraCo. + * Essentially swaps `To` and `From` in `ftf`'s type. + * + * Equivalent in power to each of [[substituteCo]] and [[substituteContra]]. + * + * $isProof + * + * @return `ftf`, $sameDiff + */ + def substituteBoth[F[-_, +_]](ftf: F[To, From]): F[From, To] + // = substituteCo[({type G[+T] = F[From, T]})#G](substituteContra[({type G[-T] = F[T, From})#G](ftf)) + // = substituteContra[({type G[-T] = F[T, To]})#G](substituteCo[({type G[+T] = F[From, T]})#G](ftf)) + /** Substitute the `From` in the type `F[From]`, where `F` is $coCon, for `To`. + * + * Equivalent in power to each of [[substituteBoth]] and [[substituteContra]]. + * + * $isProof + * + * @return `ff`, $sameDiff + */ + def substituteCo[F[+_]](ff: F[From]): F[To] = { + type G[-_, +T] = F[T] + substituteBoth[G](ff) + } + // = substituteContra[({type G[-T] = F[T] => F[To]})#G](identity)(ff) + /** Substitute the `To` in the type `F[To]`, where `F` is $contraCon, for `From`. + * + * Equivalent in power to each of [[substituteBoth]] and [[substituteCo]]. + * + * $isProof + * + * @return `ft`, $sameDiff + */ + def substituteContra[F[-_]](ft: F[To]): F[From] = { + type G[-T, +_] = F[T] + substituteBoth[G](ft) + } + // = substituteCo[({type G[+T] = F[T] => F[From]})#G](identity)(ft) + + /** Coerce a `From` into a `To`. This is guaranteed to be the identity function. + * + * This method is often called implicitly as an implicit `A $tp B` doubles as an implicit view `A => B`. + * + * @param f some value of type `From` + * @return `f`, $sameDiff + */ + override def apply(f: From): To = { + type Id[+X] = X + substituteCo[Id](f) + } + + override def compose[C](r: C => From): C => To = { + type G[+T] = C => T + substituteCo[G](r) + } + /** If `From <: To` and `C <: From`, then `C <: To` (subtyping is transitive) */ + def compose[C](r: C <:< From): C <:< To = { + type G[+T] = C <:< T + substituteCo[G](r) + } + override def andThen[C](r: To => C): From => C = { + type G[-T] = T => C + substituteContra[G](r) + } + /** If `From <: To` and `To <: C`, then `From <: C` (subtyping is transitive) */ + def andThen[C](r: To <:< C): From <:< C = { + type G[-T] = T <:< C + substituteContra[G](r) + } + + /** Lift this evidence over $coCon `F`. */ + def liftCo[F[+_]]: F[From] <:< F[To] = { + type G[+T] = F[From] <:< F[T] + substituteCo[G](implicitly[G[From]]) + } + /** Lift this evidence over $contraCon `F`. */ + def liftContra[F[-_]]: F[To] <:< F[From] = { + type G[-T] = F[To] <:< F[T] + substituteContra[G](implicitly[G[To]]) + } +} + +object <:< { + // the only instance for <:< and =:=, used to avoid overhead + private val singleton: =:=[Any, Any] = new =:=[Any,Any] { + override def substituteBoth[F[_, _]](ftf: F[Any, Any]) = ftf + override def substituteCo [F[_]](ff: F[Any]) = ff + override def substituteContra[F[_]](ff: F[Any]) = ff + override def apply(x: Any) = x + override def flip: Any =:= Any = this + override def compose[C](r: C => Any) = r + override def compose[C](r: C <:< Any) = r + override def compose[C](r: C =:= Any) = r + override def andThen[C](r: Any => C) = r + override def andThen[C](r: Any <:< C) = r + override def andThen[C](r: Any =:= C) = r + override def liftCo [F[_]] = asInstanceOf[F[Any] =:= F[Any]] + override def liftContra[F[_]] = asInstanceOf[F[Any] =:= F[Any]] + override def toString = "generalized constraint" + } + + /** `A =:= A` for all `A` (equality is reflexive). This also provides implicit views `A <:< B` + * when `A <: B`, because `(A =:= A) <: (A <:< A) <: (A <:< B)`. + */ + implicit def refl[A]: A =:= A = singleton.asInstanceOf[A =:= A] + // = new =:=[A, A] { override def substituteBoth[F[_, _]](faa: F[A, A]): F[A, A] = faa } + + /** If `A <: B` and `B <: A`, then `A = B` (subtyping is antisymmetric) */ + def antisymm[A, B](implicit l: A <:< B, r: B <:< A): A =:= B = singleton.asInstanceOf[A =:= B] + // = ??? (I don't think this is possible to implement "safely") +} + +/** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. It also acts as a `A <:< B`, + * but not a `B <:< A` (directly) due to restrictions on subclassing. + * + * In case of any confusion over which method goes in what direction, all the "Co" methods (including + * [[apply]]) go from left to right in the type ("with" the type), and all the "Contra" methods go + * from right to left ("against" the type). E.g., [[apply]] turns a `From` into a `To`, and + * [[substituteContra]] replaces the `To`s in a type with `From`s. + * + * @tparam From a type which is proved equal to `To` + * @tparam To a type which is proved equal to `From` + * + * @example An in-place variant of [[scala.collection.mutable.ArrayBuffer#transpose]] {{{ + * implicit class BufOps[A](private val buf: ArrayBuffer[A]) extends AnyVal { + * def inPlaceTranspose[E]()(implicit ev: A =:= ArrayBuffer[E]) = ??? + * // Because ArrayBuffer is invariant, we can't make do with just a A <:< ArrayBuffer[E] + * // Getting buffers *out* from buf would work, but adding them back *in* wouldn't. + * } + * }}} + * @see [[<:<]] for expressing subtyping constraints + * + * @define isProof This method is impossible to implement without `throw`ing or otherwise "cheating" unless + * `From = To`, so it ensures that this really represents a type equality. + * @define contraCo a type constructor of two arguments + * @define contraCon any type constructor + * @define coCon any type constructor + * @define tp =:= + */ +// Most of the notes on <:< above apply to =:= as well +@implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.") +sealed abstract class =:=[From, To] extends (From <:< To) with Serializable { + override def substituteBoth[F[_, _]](ftf: F[To, From]): F[From, To] + override def substituteCo[F[_]](ff: F[From]): F[To] = { + type G[_, T] = F[T] + substituteBoth[G](ff) + } + // = substituteContra[({type G[T] = F[T] => F[To]})#G](identity)(ff) + override def substituteContra[F[_]](ft: F[To]): F[From] = { + type G[T, _] = F[T] + substituteBoth[G](ft) + } + // = substituteCo[({type G[T] = F[T] => F[From]})#G](identity)(ft) + + /** @inheritdoc */ override def apply(f: From) = super.apply(f) + + /** If `From = To` then `To = From` (equality is symmetric) */ + def flip: To =:= From = { + type G[T, F] = F =:= T + substituteBoth[G](this) + } + + /** If `From = To` and `C = From`, then `C = To` (equality is transitive) */ + def compose[C](r: C =:= From): C =:= To = { + type G[T] = C =:= T + substituteCo[G](r) + } + /** If `From = To` and `To = C`, then `From = C` (equality is transitive) */ + def andThen[C](r: To =:= C): From =:= C = { + type G[T] = T =:= C + substituteContra[G](r) + } + + override def liftCo[F[_]]: F[From] =:= F[To] = { + type G[T] = F[T] =:= F[To] + substituteContra[G](implicitly[G[To]]) + } + /** Lift this evidence over the type constructor `F`, but flipped. */ + override def liftContra[F[_]]: F[To] =:= F[From] = liftCo[F].flip +} diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala index 00136bbb4ede..6927db7293e5 100644 --- a/src/library/scala/unchecked.scala +++ b/src/library/scala/unchecked.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package scala /** An annotation to designate that the annotated entity * should not be considered for additional compiler checks. * Specific applications include annotating the subject of - * a match expression to suppress exhaustiveness warnings, and + * a match expression to suppress exhaustiveness and reachability warnings, and * annotating a type argument in a match case to suppress * unchecked warnings. * @@ -34,7 +34,5 @@ package scala * def g(xs: Any) = xs match { case x: List[String @unchecked] => x.head } * } * }}} - * - * @since 2.4 */ -class unchecked extends scala.annotation.Annotation {} +final class unchecked extends scala.annotation.Annotation {} diff --git a/src/library/scala/util/ChainingOps.scala b/src/library/scala/util/ChainingOps.scala new file mode 100644 index 000000000000..4bfbdc82a833 --- /dev/null +++ b/src/library/scala/util/ChainingOps.scala @@ -0,0 +1,65 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package util + +import scala.language.implicitConversions + +trait ChainingSyntax { + @inline implicit final def scalaUtilChainingOps[A](a: A): ChainingOps[A] = new ChainingOps(a) +} + +/** Adds chaining methods `tap` and `pipe` to every type. + */ +final class ChainingOps[A](private val self: A) extends AnyVal { + /** Applies `f` to the value for its side effects, and returns the original value. + * + * {{{ + * scala> import scala.util.chaining._ + * + * scala> val xs = List(1, 2, 3).tap(ys => println("debug " + ys.toString)) + * debug List(1, 2, 3) + * xs: List[Int] = List(1, 2, 3) + * }}} + * + * @param f the function to apply to the value. + * @tparam U the result type of the function `f`. + * @return the original value `self`. + */ + def tap[U](f: A => U): A = { + f(self) + self + } + + /** Converts the value by applying the function `f`. + * + * {{{ + * scala> import scala.util.chaining._ + * + * scala> val times6 = (_: Int) * 6 + * times6: Int => Int = \$\$Lambda\$2023/975629453@17143b3b + * + * scala> val i = (1 - 2 - 3).pipe(times6).pipe(scala.math.abs) + * i: Int = 24 + * }}} + * + * Note: `(1 - 2 - 3).pipe(times6)` may have a small amount of overhead at + * runtime compared to the equivalent `{ val temp = 1 - 2 - 3; times6(temp) }`. + * + * @param f the function to apply to the value. + * @tparam B the result type of the function `f`. + * @return a new value resulting from applying the given function + * `f` to this value. + */ + def pipe[B](f: A => B): B = f(self) +} diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala index ef2e47a33e10..a2568c5a9c69 100644 --- a/src/library/scala/util/DynamicVariable.scala +++ b/src/library/scala/util/DynamicVariable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -37,13 +37,10 @@ import java.lang.InheritableThreadLocal * of the stack of bindings from the parent thread, and * from then on the bindings for the new thread * are independent of those for the original thread. - * - * @author Lex Spoon - * @since 2.6 */ class DynamicVariable[T](init: T) { - private val tl = new InheritableThreadLocal[T] { - override def initialValue = init.asInstanceOf[T with AnyRef] + private[this] val tl = new InheritableThreadLocal[T] { + override def initialValue: T with AnyRef = init.asInstanceOf[T with AnyRef] } /** Retrieve the current value */ diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index d330fe52e572..0542decbcf46 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,7 @@ package scala package util -/** Represents a value of one of two possible types (a disjoint union.) +/** Represents a value of one of two possible types (a disjoint union). * An instance of `Either` is an instance of either [[scala.util.Left]] or [[scala.util.Right]]. * * A common use of `Either` is as an alternative to [[scala.Option]] for dealing @@ -51,10 +51,10 @@ package util * * Since `Either` defines the methods `map` and `flatMap`, it can also be used in for comprehensions: * {{{ - * val right1 = Right(1) : Right[Double, Int] + * val right1 = Right(1) : Right[Double, Int] * val right2 = Right(2) * val right3 = Right(3) - * val left23 = Left(23.0) : Left[Double, Int] + * val left23 = Left(23.0) : Left[Double, Int] * val left42 = Left(42.0) * * for { @@ -85,6 +85,14 @@ package util * // Similarly, refutable patterns are not supported: * for (x: Int <- right1) yield x * // error: value withFilter is not a member of Right[Double,Int] + * + * // To use a filtered value, convert to an Option first, + * // which drops the Left case, as None contains no value: + * for { + * i <- right1.toOption + * if i > 0 + * } yield i + * * }}} * * Since `for` comprehensions use `map` and `flatMap`, the types @@ -116,9 +124,6 @@ package util * } yield x + y + z * // Left(42.0), but unexpectedly a `Either[Double,String]` * }}} - * - * @author Tony Morris, Workingmouse - * @since 2.7 */ sealed abstract class Either[+A, +B] extends Product with Serializable { /** Projects this `Either` as a `Left`. @@ -171,6 +176,7 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * * Because `Either` is right-biased, this method is not normally needed. */ + @deprecated("Either is now right-biased, use methods directly on Either", "2.13.0") def right = Either.RightProjection(this) /** Applies `fa` if this is a `Left` or `fb` if this is a `Right`. @@ -283,6 +289,19 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { case _ => or } + /** Returns this `Right` or the given argument if this is a `Left`. + * + * {{{ + * Right(1) orElse Left(2) // Right(1) + * Left(1) orElse Left(2) // Left(2) + * Left(1) orElse Left(2) orElse Right(3) // Right(3) + * }}} + */ + def orElse[A1 >: A, B1 >: B](or: => Either[A1, B1]): Either[A1, B1] = this match { + case Right(_) => this + case _ => or + } + /** Returns `true` if this is a `Right` and its value is equal to `elem` (as determined by `==`), * returns `false` otherwise. * @@ -342,6 +361,24 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { case _ => this.asInstanceOf[Either[A1, B1]] } + + /** Returns the right value if this is right + * or this value if this is left + * + * @example {{{ + * val l: Either[String, Either[String, Int]] = Left("pancake") + * val rl: Either[String, Either[String, Int]] = Right(Left("flounder")) + * val rr: Either[String, Either[String, Int]] = Right(Right(7)) + * + * l.flatten //Either[String, Int]: Left("pancake") + * rl.flatten //Either[String, Int]: Left("flounder") + * rr.flatten //Either[String, Int]: Right(7) + * }}} + * + * Equivalent to `flatMap(id => id)` + */ + def flatten[A1 >: A, B1](implicit ev: B <:< Either[A1, B1]): Either[A1, B1] = flatMap(ev) + /** The given function is applied if this is a `Right`. * * {{{ @@ -421,25 +458,37 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { } /** The left side of the disjoint union, as opposed to the [[scala.util.Right]] side. - * - * @author Tony Morris, Workingmouse */ -final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Either[A, B] { +final case class Left[+A, +B](value: A) extends Either[A, B] { def isLeft = true def isRight = false - @deprecated("Use .value instead.", "2.12.0") def a: A = value + /** + * Upcasts this `Left[A, B]` to `Either[A, B1]` + * {{{ + * Left(1) // Either[Int, Nothing] + * Left(1).withRight[String] // Either[Int, String] + * }}} + */ + def withRight[B1 >: B]: Either[A, B1] = this + } /** The right side of the disjoint union, as opposed to the [[scala.util.Left]] side. - * - * @author Tony Morris, Workingmouse */ -final case class Right[+A, +B](@deprecatedName('b, "2.12.0") value: B) extends Either[A, B] { +final case class Right[+A, +B](value: B) extends Either[A, B] { def isLeft = false def isRight = true - @deprecated("Use .value instead.", "2.12.0") def b: B = value + /** + * Upcasts this `Right[A, B]` to `Either[A1, B]` + * {{{ + * Right("x") // Either[Nothing, String] + * Right("x").withLeft[Int] // Either[Int, String] + * }}} + */ + def withLeft[A1 >: A]: Either[A1, B] = this + } object Either { @@ -477,11 +526,10 @@ object Either { /** Projects an `Either` into a `Left`. * - * @author Tony Morris, Workingmouse * @see [[scala.util.Either#left]] */ final case class LeftProjection[+A, +B](e: Either[A, B]) { - /** Returns the value from this `Left` or throws `java.util.NoSuchElementException` + /** Returns the value from this `Left` or throws `NoSuchElementException` * if this is a `Right`. * * {{{ @@ -489,8 +537,9 @@ object Either { * Right(12).left.get // NoSuchElementException * }}} * - * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]] + * @throws NoSuchElementException if the projection is [[scala.util.Right]] */ + @deprecated("use `Either.swap.getOrElse` instead", "2.13.0") def get: A = e match { case Left(a) => a case _ => throw new NoSuchElementException("Either.left.get on Right") @@ -530,7 +579,7 @@ object Either { * Right(12).left.forall(_ > 10) // true * }}} */ - def forall(@deprecatedName('f) p: A => Boolean): Boolean = e match { + def forall(p: A => Boolean): Boolean = e match { case Left(a) => p(a) case _ => true } @@ -544,7 +593,7 @@ object Either { * Right(12).left.exists(_ > 10) // false * }}} */ - def exists(@deprecatedName('f) p: A => Boolean): Boolean = e match { + def exists(p: A => Boolean): Boolean = e match { case Left(a) => p(a) case _ => false } @@ -583,11 +632,26 @@ object Either { * Right(12).left.filter(_ > 10) // None * }}} */ + @deprecated("Use `filterToOption`, which more accurately reflects the return type", "2.13.0") def filter[B1](p: A => Boolean): Option[Either[A, B1]] = e match { case x @ Left(a) if p(a) => Some(x.asInstanceOf[Either[A, B1]]) case _ => None } + /** Returns `None` if this is a `Right` or if the given predicate + * `p` does not hold for the left value, otherwise, returns a `Left`. + * + * {{{ + * Left(12).left.filterToOption(_ > 10) // Some(Left(12)) + * Left(7).left.filterToOption(_ > 10) // None + * Right(12).left.filterToOption(_ > 10) // None + * }}} + */ + def filterToOption[B1](p: A => Boolean): Option[Either[A, B1]] = e match { + case x @ Left(a) if p(a) => Some(x.asInstanceOf[Either[A, B1]]) + case _ => None + } + /** Returns a `Seq` containing the `Left` value if it exists or an empty * `Seq` if this is a `Right`. * @@ -620,21 +684,21 @@ object Either { * Because `Either` is already right-biased, this class is not normally needed. * (It is retained in the library for now for easy cross-compilation between Scala * 2.11 and 2.12.) - * - * @author Tony Morris, Workingmouse */ + @deprecated("Either is now right-biased, calls to `right` should be removed", "2.13.0") final case class RightProjection[+A, +B](e: Either[A, B]) { /** Returns the value from this `Right` or throws - * `java.util.NoSuchElementException` if this is a `Left`. + * `NoSuchElementException` if this is a `Left`. * * {{{ * Right(12).right.get // 12 * Left(12).right.get // NoSuchElementException * }}} * - * @throws java.util.NoSuchElementException if the projection is `Left`. + * @throws NoSuchElementException if the projection is `Left`. */ + @deprecated("Use `Either.toOption.get` instead", "2.13.0") def get: B = e match { case Right(b) => b case _ => throw new NoSuchElementException("Either.right.get on Left") @@ -688,7 +752,7 @@ object Either { * Left(12).right.exists(_ > 10) // false * }}} */ - def exists(@deprecatedName('f) p: B => Boolean): Boolean = e match { + def exists(p: B => Boolean): Boolean = e match { case Right(b) => p(b) case _ => false } @@ -724,11 +788,27 @@ object Either { * Left(12).right.filter(_ > 10) // None * }}} */ + @deprecated("Use `filterToOption`, which more accurately reflects the return type", "2.13.0") def filter[A1](p: B => Boolean): Option[Either[A1, B]] = e match { case Right(b) if p(b) => Some(Right(b)) case _ => None } + /** Returns `None` if this is a `Left` or if the + * given predicate `p` does not hold for the right value, + * otherwise, returns a `Right`. + * + * {{{ + * Right(12).right.filterToOption(_ > 10) // Some(Right(12)) + * Right(7).right.filterToOption(_ > 10) // None + * Left(12).right.filterToOption(_ > 10) // None + * }}} + */ + def filterToOption[A1](p: B => Boolean): Option[Either[A1, B]] = e match { + case r @ Right(b) if p(b) => Some(r.asInstanceOf[Either[A1, B]]) + case _ => None + } + /** Returns a `Seq` containing the `Right` value if * it exists or an empty `Seq` if this is a `Left`. * diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala deleted file mode 100644 index 35571ad4bc13..000000000000 --- a/src/library/scala/util/MurmurHash.scala +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package util - -/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm - * (32 bit version); reference: [[https://github.com/aappleby/smhasher]] - * - * This is the hash used by collections and case classes (including - * tuples). - * - * @author Rex Kerr - * @since 2.9 - */ - -import java.lang.Integer.{ rotateLeft => rotl } -import scala.collection.Iterator - -/** A class designed to generate well-distributed non-cryptographic - * hashes. It is designed to be passed to a collection's foreach method, - * or can take individual hash values with append. Its own hash code is - * set equal to the hash code of whatever it is hashing. - */ -@deprecated("use the object MurmurHash3 instead", "2.10.0") -class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) { - import MurmurHash._ - - private var h = startHash(seed) - private var c = hiddenMagicA - private var k = hiddenMagicB - private var hashed = false - private var hashvalue = h - - /** Begin a new hash using the same seed. */ - def reset() { - h = startHash(seed) - c = hiddenMagicA - k = hiddenMagicB - hashed = false - } - - /** Incorporate the hash value of one item. */ - def apply(t: T) { - h = extendHash(h,t.##,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Incorporate a known hash value. */ - def append(i: Int) { - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Retrieve the hash value */ - def hash = { - if (!hashed) { - hashvalue = finalizeHash(h) - hashed = true - } - hashvalue - } - override def hashCode = hash -} - -/** An object designed to generate well-distributed non-cryptographic - * hashes. It is designed to hash a collection of integers; along with - * the integers to hash, it generates two magic streams of integers to - * increase the distribution of repetitive input sequences. Thus, - * three methods need to be called at each step (to start and to - * incorporate a new integer) to update the values. Only one method - * needs to be called to finalize the hash. - */ -@deprecated("use the object MurmurHash3 instead", "2.10.0") -// NOTE: Used by sbt 0.13.0-M2 and below -object MurmurHash { - // Magic values used for MurmurHash's 32 bit hash. - // Don't change these without consulting a hashing expert! - final private val visibleMagic = 0x971e137b - final private val hiddenMagicA = 0x95543787 - final private val hiddenMagicB = 0x2ad7eb25 - final private val visibleMixer = 0x52dce729 - final private val hiddenMixerA = 0x7b7d159c - final private val hiddenMixerB = 0x6bce6396 - final private val finalMixer1 = 0x85ebca6b - final private val finalMixer2 = 0xc2b2ae35 - - // Arbitrary values used for hashing certain classes - final private val seedString = 0xf7ca7fd2 - final private val seedArray = 0x3c074a61 - - /** The first 23 magic integers from the first stream are stored here */ - val storedMagicA = - Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray - - /** The first 23 magic integers from the second stream are stored here */ - val storedMagicB = - Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray - - /** Begin a new hash with a seed value. */ - def startHash(seed: Int) = seed ^ visibleMagic - - /** The initial magic integers in the first stream. */ - def startMagicA = hiddenMagicA - - /** The initial magic integer in the second stream. */ - def startMagicB = hiddenMagicB - - /** Incorporates a new value into an existing hash. - * - * @param hash the prior hash value - * @param value the new value to incorporate - * @param magicA a magic integer from the stream - * @param magicB a magic integer from a different stream - * @return the updated hash value - */ - def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = { - (hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer - } - - /** Given a magic integer from the first stream, compute the next */ - def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA - - /** Given a magic integer from the second stream, compute the next */ - def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB - - /** Once all hashes have been incorporated, this performs a final mixing */ - def finalizeHash(hash: Int) = { - var i = (hash ^ (hash>>>16)) - i *= finalMixer1 - i ^= (i >>> 13) - i *= finalMixer2 - i ^= (i >>> 16) - i - } - - /** Compute a high-quality hash of an array */ - def arrayHash[@specialized T](a: Array[T]) = { - var h = startHash(a.length * seedArray) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j < a.length) { - h = extendHash(h, a(j).##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 1 - } - finalizeHash(h) - } - - /** Compute a high-quality hash of a string */ - def stringHash(s: String) = { - var h = startHash(s.length * seedString) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j+1 < s.length) { - val i = (s.charAt(j)<<16) + s.charAt(j+1) - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 2 - } - if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k) - finalizeHash(h) - } - - /** Compute a hash that is symmetric in its arguments--that is, - * where the order of appearance of elements does not matter. - * This is useful for hashing sets, for example. - */ - def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = { - var a,b,n = 0 - var c = 1 - xs.seq.foreach(i => { - val h = i.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - }) - var h = startHash(seed * n) - h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) - h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) - h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) - finalizeHash(h) - } -} diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index b53e67eac577..24dee49ae951 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,13 +13,14 @@ package scala package util -import java.io.{ IOException, PrintWriter } -import java.util.jar.Attributes.{ Name => AttributeName } +import java.io.{IOException, PrintWriter} +import java.util.jar.Attributes.{Name => AttributeName} +import scala.annotation.tailrec /** Loads `library.properties` from the jar. */ object Properties extends PropertiesTrait { - protected def propCategory = "library" - protected def pickJarBasedOn = classOf[Option[_]] + protected def propCategory = "library" + protected def pickJarBasedOn: Class[Option[_]] = classOf[Option[_]] /** Scala manifest attributes. */ @@ -52,7 +53,7 @@ private[scala] trait PropertiesTrait { def propIsSet(name: String) = System.getProperty(name) != null def propIsSetTo(name: String, value: String) = propOrNull(name) == value - def propOrElse(name: String, alt: String) = System.getProperty(name, alt) + def propOrElse(name: String, alt: => String) = Option(System.getProperty(name)).getOrElse(alt) def propOrEmpty(name: String) = propOrElse(name, "") def propOrNull(name: String) = propOrElse(name, null) def propOrNone(name: String) = Option(propOrNull(name)) @@ -60,55 +61,38 @@ private[scala] trait PropertiesTrait { def setProp(name: String, value: String) = System.setProperty(name, value) def clearProp(name: String) = System.clearProperty(name) - def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt + def envOrElse(name: String, alt: => String) = Option(System getenv name) getOrElse alt def envOrNone(name: String) = Option(System getenv name) - def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt + def envOrSome(name: String, alt: => Option[String]) = envOrNone(name) orElse alt // for values based on propFilename, falling back to System properties - def scalaPropOrElse(name: String, alt: String): String = scalaPropOrNone(name).getOrElse(alt) + def scalaPropOrElse(name: String, alt: => String): String = scalaPropOrNone(name).getOrElse(alt) def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)).orElse(propOrNone("scala." + name)) - /** The numeric portion of the runtime Scala version, if this is a final - * release. If for instance the versionString says "version 2.9.0.final", - * this would return Some("2.9.0"). - * - * @return Some(version) if this is a final release build, None if - * it is an RC, Beta, etc. or was built from source, or if the version - * cannot be read. + /** The version of the Scala runtime, if this is not a snapshot. */ - val releaseVersion = - for { - v <- scalaPropOrNone("maven.version.number") - if !(v endsWith "-SNAPSHOT") - } yield v - - /** The development Scala version, if this is not a final release. - * The precise contents are not guaranteed, but it aims to provide a - * unique repository identifier (currently the svn revision) in the - * fourth dotted segment if the running version was built from source. - * - * @return Some(version) if this is a non-final version, None if this - * is a final release or the version cannot be read. + val releaseVersion = scalaPropOrNone("maven.version.number").filterNot(_.endsWith("-SNAPSHOT")) + + /** The version of the Scala runtime, if this is a snapshot. */ - val developmentVersion = - for { - v <- scalaPropOrNone("maven.version.number") - if v endsWith "-SNAPSHOT" - ov <- scalaPropOrNone("version.number") - } yield ov - - /** Either the development or release version if known, otherwise - * the empty string. + val developmentVersion = scalaPropOrNone("maven.version.number").filter(_.endsWith("-SNAPSHOT")).flatMap(_ => scalaPropOrNone("version.number")) + + /** The version of the Scala runtime, or the empty string if unknown. + * + * Note that the version of the Scala library need not correlate with the version of the Scala compiler + * used to emit either the library or user code. + * + * For example, Scala 3.0 and 3.1 use the Scala 2.13 library, which is reflected in this version string. + * For the Dotty version, see `dotty.tools.dotc.config.Properties.versionNumberString`. */ def versionNumberString = scalaPropOrEmpty("version.number") - /** The version number of the jar this was loaded from plus "version " prefix, - * or "version (unknown)" if it cannot be determined. + /** A verbose alternative to [[versionNumberString]]. */ - val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2024, LAMP/EPFL and Lightbend, Inc.") + val versionString = s"version ${scalaPropOrElse("version.number", "(unknown)")}" + val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2025, LAMP/EPFL and Lightbend, Inc. dba Akka") /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. @@ -146,19 +130,19 @@ private[scala] trait PropertiesTrait { /* Some derived values. */ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */ - def isWin = osName startsWith "Windows" - // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for + lazy val isWin = osName.startsWith("Windows") + // See https://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110. /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */ - def isMac = osName startsWith "Mac OS X" + lazy val isMac = osName.startsWith("Mac OS X") /** Returns `true` iff the underlying operating system is a Linux distribution. */ - def isLinux = osName startsWith "Linux" + lazy val isLinux = osName.startsWith("Linux") /* Some runtime values. */ - private[scala] def isAvian = javaVmName contains "Avian" + private[scala] lazy val isAvian = javaVmName.contains("Avian") private[scala] def coloredOutputEnabled: Boolean = propOrElse("scala.color", "auto") match { - case "auto" => !isWin && consoleIsTerminal + case "auto" => consoleIsTerminal case s => "" == s || "true".equalsIgnoreCase(s) } @@ -177,8 +161,7 @@ private[scala] trait PropertiesTrait { // and finally the system property based javaHome. def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) - // private[scala] for 2.12 - private[this] def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString" + private[scala] def versionFor(command: String) = s"Scala $command $versionString -- $copyrightString" def versionMsg = versionFor(propCategory) def scalaCmd = if (isWin) "scala.bat" else "scala" @@ -218,6 +201,7 @@ private[scala] trait PropertiesTrait { val n = if (depth < 2 && r.isEmpty) -2 else s.substring(0, i).toInt (n, r) } + @tailrec def compareVersions(s: String, v: String, depth: Int): Int = { if (depth >= 3) 0 else { @@ -235,8 +219,13 @@ private[scala] trait PropertiesTrait { } } + /** Compares the given specification version to the major version of the platform. + * @param version a specification major version number + */ + def isJavaAtLeast(version: Int): Boolean = isJavaAtLeast(math.max(version, 0).toString) + // provide a main method so version info can be obtained by running this - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val writer = new PrintWriter(Console.err, true) writer println versionMsg } diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala index 116391cf434a..84b44fcc91f5 100644 --- a/src/library/scala/util/Random.scala +++ b/src/library/scala/util/Random.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,15 +13,12 @@ package scala package util +import scala.annotation.{migration, tailrec} import scala.collection.mutable.ArrayBuffer -import scala.collection.generic.CanBuildFrom -import scala.collection.immutable.{ List, Stream } -import scala.language.{implicitConversions, higherKinds} +import scala.collection.BuildFrom +import scala.collection.immutable.LazyList +import scala.language.implicitConversions -/** - * @author Stephane Micheloud - * - */ class Random(val self: java.util.Random) extends AnyRef with Serializable { /** Creates a new random number generator using a single long seed. */ def this(seed: Long) = this(new java.util.Random(seed)) @@ -40,18 +37,47 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { /** Generates random bytes and places them into a user-supplied byte * array. */ - def nextBytes(bytes: Array[Byte]) { self.nextBytes(bytes) } - + def nextBytes(bytes: Array[Byte]): Unit = { self.nextBytes(bytes) } + + /** Generates `n` random bytes and returns them in a new array. */ + def nextBytes(n: Int): Array[Byte] = { + val bytes = new Array[Byte](0 max n) + self.nextBytes(bytes) + bytes + } + /** Returns the next pseudorandom, uniformly distributed double value * between 0.0 and 1.0 from this random number generator's sequence. */ def nextDouble(): Double = self.nextDouble() + /** Returns the next pseudorandom, uniformly distributed double value + * between min (inclusive) and max (exclusive) from this random number generator's sequence. + */ + def between(minInclusive: Double, maxExclusive: Double): Double = { + require(minInclusive < maxExclusive, "Invalid bounds") + + val next = nextDouble() * (maxExclusive - minInclusive) + minInclusive + if (next < maxExclusive) next + else Math.nextAfter(maxExclusive, Double.NegativeInfinity) + } + /** Returns the next pseudorandom, uniformly distributed float value * between 0.0 and 1.0 from this random number generator's sequence. */ def nextFloat(): Float = self.nextFloat() + /** Returns the next pseudorandom, uniformly distributed float value + * between min (inclusive) and max (exclusive) from this random number generator's sequence. + */ + def between(minInclusive: Float, maxExclusive: Float): Float = { + require(minInclusive < maxExclusive, "Invalid bounds") + + val next = nextFloat() * (maxExclusive - minInclusive) + minInclusive + if (next < maxExclusive) next + else Math.nextAfter(maxExclusive, Float.NegativeInfinity) + } + /** Returns the next pseudorandom, Gaussian ("normally") distributed * double value with mean 0.0 and standard deviation 1.0 from this * random number generator's sequence. @@ -69,11 +95,90 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { */ def nextInt(n: Int): Int = self.nextInt(n) + /** Returns a pseudorandom, uniformly distributed int value between min + * (inclusive) and the specified value max (exclusive), drawn from this + * random number generator's sequence. + */ + def between(minInclusive: Int, maxExclusive: Int): Int = { + require(minInclusive < maxExclusive, "Invalid bounds") + + val difference = maxExclusive - minInclusive + if (difference >= 0) { + nextInt(difference) + minInclusive + } else { + /* The interval size here is greater than Int.MaxValue, + * so the loop will exit with a probability of at least 1/2. + */ + @tailrec + def loop(): Int = { + val n = nextInt() + if (n >= minInclusive && n < maxExclusive) n + else loop() + } + loop() + } + } + /** Returns the next pseudorandom, uniformly distributed long value * from this random number generator's sequence. */ def nextLong(): Long = self.nextLong() + /** Returns a pseudorandom, uniformly distributed long value between 0 + * (inclusive) and the specified value (exclusive), drawn from this + * random number generator's sequence. + */ + def nextLong(n: Long): Long = { + require(n > 0, "n must be positive") + + /* + * Divide n by two until small enough for nextInt. On each + * iteration (at most 31 of them but usually much less), + * randomly choose both whether to include high bit in result + * (offset) and whether to continue with the lower vs upper + * half (which makes a difference only if odd). + */ + + var offset = 0L + var _n = n + + while (_n >= Integer.MAX_VALUE) { + val bits = nextInt(2) + val halfn = _n >>> 1 + val nextn = + if ((bits & 2) == 0) halfn + else _n - halfn + if ((bits & 1) == 0) + offset += _n - nextn + _n = nextn + } + offset + nextInt(_n.toInt) + } + + /** Returns a pseudorandom, uniformly distributed long value between min + * (inclusive) and the specified value max (exclusive), drawn from this + * random number generator's sequence. + */ + def between(minInclusive: Long, maxExclusive: Long): Long = { + require(minInclusive < maxExclusive, "Invalid bounds") + + val difference = maxExclusive - minInclusive + if (difference >= 0) { + nextLong(difference) + minInclusive + } else { + /* The interval size here is greater than Long.MaxValue, + * so the loop will exit with a probability of at least 1/2. + */ + @tailrec + def loop(): Long = { + val n = nextLong() + if (n >= minInclusive && n < maxExclusive) n + else loop() + } + loop() + } + } + /** Returns a pseudorandomly generated String. This routine does * not take any measures to preserve the randomness of the distribution * in the face of factors like unicode's variable-length encoding, @@ -83,14 +188,23 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { * @param length the desired length of the String * @return the String */ - def nextString(length: Int) = { - def safeChar() = { + def nextString(length: Int): String = { + def safeChar(): Char = { val surrogateStart: Int = 0xD800 val res = nextInt(surrogateStart - 1) + 1 res.toChar } - - List.fill(length)(safeChar()).mkString + if (length <= 0) { + "" + } else { + val arr = new Array[Char](length) + var i = 0 + while (i < length) { + arr(i) = safeChar() + i += 1 + } + new String(arr) + } } /** Returns the next pseudorandom, uniformly distributed value @@ -102,16 +216,16 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { (self.nextInt(high - low) + low).toChar } - def setSeed(seed: Long) { self.setSeed(seed) } + def setSeed(seed: Long): Unit = { self.setSeed(seed) } /** Returns a new collection of the same type in a randomly chosen order. * * @return the shuffled collection */ - def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = { + def shuffle[T, C](xs: IterableOnce[T])(implicit bf: BuildFrom[xs.type, T, C]): C = { val buf = new ArrayBuffer[T] ++= xs - def swap(i1: Int, i2: Int) { + def swap(i1: Int, i2: Int): Unit = { val tmp = buf(i1) buf(i1) = buf(i2) buf(i2) = tmp @@ -122,29 +236,26 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { swap(n - 1, k) } - (bf(xs) ++= buf).result() + (bf.newBuilder(xs) ++= buf).result() } - /** Returns a Stream of pseudorandomly chosen alphanumeric characters, + /** Returns a LazyList of pseudorandomly chosen alphanumeric characters, * equally chosen from A-Z, a-z, and 0-9. - * - * @since 2.8 */ - def alphanumeric: Stream[Char] = { + @migration("`alphanumeric` returns a LazyList instead of a Stream", "2.13.0") + def alphanumeric: LazyList[Char] = { def nextAlphaNum: Char = { val chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" chars charAt (self nextInt chars.length) } - Stream continually nextAlphaNum + LazyList continually nextAlphaNum } } /** The object `Random` offers a default implementation * of scala.util.Random and random-related convenience methods. - * - * @since 2.8 */ object Random extends Random { diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 97811a3c69fd..7e2da2434f82 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,10 +34,6 @@ import scala.math.Ordering * Note also that high-performance non-default sorts for numeric types * are not provided. If this is required, it is advisable to investigate * other libraries that cover this use case. - * - * @author Ross Judson - * @author Adriaan Moors - * @author Rex Kerr */ object Sorting { /** Sort an array of Doubles using `java.util.Arrays.sort`. */ @@ -70,8 +66,8 @@ object Sorting { else if (ord.compare(a(i0), a(iK)) < 0) i0 else - if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1 - else iK + if (ord.compare(a(iN - 1), a(iK)) <= 0) iK + else iN - 1 val pivot = a(pL) // pL is the start of the pivot block; move it into the middle if needed if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK } @@ -217,71 +213,90 @@ object Sorting { } // Why would you even do this? - private def booleanSort(a: Array[Boolean]): Unit = { - var i = 0 + private def booleanSort(a: Array[Boolean], from: Int, until: Int): Unit = { + var i = from var n = 0 - while (i < a.length) { + while (i < until) { if (!a(i)) n += 1 i += 1 } i = 0 while (i < n) { - a(i) = false + a(from + i) = false i += 1 } - while (i < a.length) { - a(i) = true + while (from + i < until) { + a(from + i) = true i += 1 } } // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible) // Maybe also rename all these methods to `sort`. - @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match { + @inline private def sort[T](a: Array[T], from: Int, until: Int, ord: Ordering[T]): Unit = (a: @unchecked) match { case _: Array[AnyRef] => // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes) if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering") - java.util.Arrays.sort(a, ord) - case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord) - case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! - case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord) - case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! - case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord) - case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord) - case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord) - case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord) + java.util.Arrays.sort(a, from, until, ord) + case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a, from, until) else mergeSort[Int](a, from, until, ord) + case a: Array[Double] => mergeSort[Double](a, from, until, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a, from, until) else mergeSort[Long](a, from, until, ord) + case a: Array[Float] => mergeSort[Float](a, from, until, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a, from, until) else mergeSort[Char](a, from, until, ord) + case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a, from, until) else mergeSort[Byte](a, from, until, ord) + case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a, from, until) else mergeSort[Short](a, from, until, ord) + case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a, from, until) else mergeSort[Boolean](a, from, until, ord) // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case. case null => throw new NullPointerException } - // TODO: remove unnecessary ClassTag (not binary compatible) - /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K]) + /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. + * Uses `java.util.Arrays.sort` unless `K` is a primitive type. This is the same as `stableSort(a, 0, a.length)`. */ + @`inline` def stableSort[K: Ordering](a: Array[K]): Unit = stableSort(a, 0, a.length) + + /** Sort array `a` or a part of it using the Ordering on its elements, preserving the original ordering where possible. + * Uses `java.util.Arrays.sort` unless `K` is a primitive type. + * + * @param a The array to sort + * @param from The first index in the array to sort + * @param until The last index (exclusive) in the array to sort + */ + def stableSort[K: Ordering](a: Array[K], from: Int, until: Int): Unit = sort(a, from, until, Ordering[K]) + + /** Sort array `a` using function `f` that computes the less-than relation for each element. + * Uses `java.util.Arrays.sort` unless `K` is a primitive type. This is the same as `stableSort(a, f, 0, a.length)`. */ + @`inline` def stableSort[K](a: Array[K], f: (K, K) => Boolean): Unit = stableSort(a, f, 0, a.length) - // TODO: Remove unnecessary ClassTag (not binary compatible) // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) - /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f) + /** Sort array `a` or a part of it using function `f` that computes the less-than relation for each element. + * Uses `java.util.Arrays.sort` unless `K` is a primitive type. + * + * @param a The array to sort + * @param f A function that computes the less-than relation for each element + * @param from The first index in the array to sort + * @param until The last index (exclusive) in the array to sort + */ + def stableSort[K](a: Array[K], f: (K, K) => Boolean, from: Int, until: Int): Unit = sort(a, from, until, Ordering fromLessThan f) /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = { + def stableSort[K: ClassTag: Ordering](a: scala.collection.Seq[K]): Array[K] = { val ret = a.toArray - sort(ret, Ordering[K]) + sort(ret, 0, ret.length, Ordering[K]) ret } // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { + def stableSort[K: ClassTag](a: scala.collection.Seq[K], f: (K, K) => Boolean): Array[K] = { val ret = a.toArray - sort(ret, Ordering fromLessThan f) + sort(ret, 0, ret.length, Ordering fromLessThan f) ret } /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ - def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = { + def stableSort[K: ClassTag, M: Ordering](a: scala.collection.Seq[K], f: K => M): Array[K] = { val ret = a.toArray - sort(ret, Ordering[M] on f) + sort(ret, 0, ret.length, Ordering[M] on f) ret } } diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index 6ae8eadacb8f..c17d457c9fe5 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,16 +13,19 @@ package scala package util +import scala.runtime.Statics import scala.util.control.NonFatal /** - * The `Try` type represents a computation that may either result in an exception, or return a - * successfully computed value. It's similar to, but semantically different from the [[scala.util.Either]] type. + * The `Try` type represents a computation that may fail during evaluation by raising an exception. + * It holds either a successfully computed value or the exception that was thrown. + * This is similar to the [[scala.util.Either]] type, but with different semantics. * - * Instances of `Try[T]`, are either an instance of [[scala.util.Success]][T] or [[scala.util.Failure]][T]. + * Instances of `Try[T]` are an instance of either [[scala.util.Success]][T] or [[scala.util.Failure]][T]. * - * For example, `Try` can be used to perform division on a user-defined input, without the need to do explicit - * exception-handling in all of the places that an exception might occur. + * For example, consider a computation that performs division on user-defined input. + * `Try` can reduce or eliminate the need for explicit exception handling in all of the places + * where an exception might be thrown. * * Example: * {{{ @@ -57,11 +60,6 @@ import scala.util.control.NonFatal * Serious system errors, on the other hand, will be thrown. * * ''Note:'': all Try combinators will catch exceptions and return failure unless otherwise specified in the documentation. - * - * `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack. - * - * @author based on Twitter's original implementation in com.twitter.util. - * @since 2.10 */ sealed abstract class Try[+T] extends Product with Serializable { @@ -136,8 +134,7 @@ sealed abstract class Try[+T] extends Product with Serializable { * collection" contract even though it seems unlikely to matter much in a * collection with max size 1. */ - @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12.0") - class WithFilter(p: T => Boolean) { + final class WithFilter(p: T => Boolean) { def map[U](f: T => U): Try[U] = Try.this filter p map f def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f def foreach[U](f: T => U): Unit = Try.this filter p foreach f @@ -148,13 +145,13 @@ sealed abstract class Try[+T] extends Product with Serializable { * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. * This is like `flatMap` for the exception. */ - def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] + def recoverWith[U >: T](pf: PartialFunction[Throwable, Try[U]]): Try[U] /** * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`. * This is like map for the exception. */ - def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U] + def recover[U >: T](pf: PartialFunction[Throwable, U]): Try[U] /** * Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`. @@ -189,7 +186,7 @@ sealed abstract class Try[+T] extends Product with Serializable { * then `fa` is applied with this exception. * * @example {{{ - * val result: Try[Throwable, Int] = Try { string.toInt } + * val result: Try[Int] = Try { string.toInt } * log(result.fold( * ex => "Operation failed with " + ex, * v => "Operation produced value: " + v @@ -205,12 +202,21 @@ sealed abstract class Try[+T] extends Product with Serializable { } object Try { - /** Constructs a `Try` using the by-name parameter. This - * method will ensure any non-fatal exception is caught and a - * `Failure` object is returned. + /** Constructs a `Try` using the by-name parameter as a result value. + * + * The evaluation of `r` is attempted once. + * + * Any non-fatal exception is caught and results in a `Failure` + * that holds the exception. + * + * @param r the result value to compute + * @return the result of evaluating the value, as a `Success` or `Failure` */ def apply[T](r: => T): Try[T] = - try Success(r) catch { + try { + val r1 = r + Success(r1) + } catch { case NonFatal(e) => Failure(e) } } @@ -230,17 +236,26 @@ final case class Failure[+T](exception: Throwable) extends Try[T] { override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]] override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]] override def filter(p: T => Boolean): Try[T] = this - override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = - try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) } - override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = - try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) } + override def recover[U >: T](pf: PartialFunction[Throwable, U]): Try[U] = { + val marker = Statics.pfMarker + try { + val v = pf.applyOrElse(exception, (x: Throwable) => marker) + if (marker ne v.asInstanceOf[AnyRef]) Success(v.asInstanceOf[U]) else this + } catch { case NonFatal(e) => Failure(e) } + } + override def recoverWith[U >: T](pf: PartialFunction[Throwable, Try[U]]): Try[U] = { + val marker = Statics.pfMarker + try { + val v = pf.applyOrElse(exception, (x: Throwable) => marker) + if (marker ne v.asInstanceOf[AnyRef]) v.asInstanceOf[Try[U]] else this + } catch { case NonFatal(e) => Failure(e) } + } override def failed: Try[Throwable] = Success(exception) override def toOption: Option[T] = None override def toEither: Either[Throwable, T] = Left(exception) override def fold[U](fa: Throwable => U, fb: T => U): U = fa(exception) } - final case class Success[+T](value: T) extends Try[T] { override def isFailure: Boolean = false override def isSuccess: Boolean = true @@ -253,17 +268,20 @@ final case class Success[+T](value: T) extends Try[T] { override def foreach[U](f: T => U): Unit = f(value) override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s override def map[U](f: T => U): Try[U] = Try[U](f(value)) - override def collect[U](pf: PartialFunction[T, U]): Try[U] = + override def collect[U](pf: PartialFunction[T, U]): Try[U] = { + val marker = Statics.pfMarker try { - if (pf isDefinedAt value) Success(pf(value)) + val v = pf.applyOrElse(value, ((x: T) => marker).asInstanceOf[Function[T, U]]) + if (marker ne v.asInstanceOf[AnyRef]) Success(v) else Failure(new NoSuchElementException("Predicate does not hold for " + value)) } catch { case NonFatal(e) => Failure(e) } + } override def filter(p: T => Boolean): Try[T] = try { if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value)) } catch { case NonFatal(e) => Failure(e) } - override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this - override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this + override def recover[U >: T](pf: PartialFunction[Throwable, U]): Try[U] = this + override def recoverWith[U >: T](pf: PartialFunction[Throwable, Try[U]]): Try[U] = this override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed")) override def toOption: Option[T] = Some(value) override def toEither: Either[Throwable, T] = Right(value) diff --git a/test/junit/scala/tools/testing/Using.scala b/src/library/scala/util/Using.scala similarity index 87% rename from test/junit/scala/tools/testing/Using.scala rename to src/library/scala/util/Using.scala index e2413b8c0085..ebec5e7007ec 100644 --- a/test/junit/scala/tools/testing/Using.scala +++ b/src/library/scala/util/Using.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,9 +10,8 @@ * additional information regarding copyright ownership. */ -package scala.tools.testing +package scala.util -import scala.util.Try import scala.util.control.{ControlThrowable, NonFatal} /** A utility for performing automatic resource management. It can be used to perform an @@ -45,20 +44,54 @@ import scala.util.control.{ControlThrowable, NonFatal} * import java.io.{BufferedReader, FileReader} * import scala.util.{Try, Using} * + * val files = List("file1.txt", "file2.txt", "file3.txt", "file4.txt") * val lines: Try[Seq[String]] = Using.Manager { use => - * val r1 = use(new BufferedReader(new FileReader("file1.txt"))) - * val r2 = use(new BufferedReader(new FileReader("file2.txt"))) - * val r3 = use(new BufferedReader(new FileReader("file3.txt"))) - * val r4 = use(new BufferedReader(new FileReader("file4.txt"))) + * // acquire resources + * def mkreader(filename: String) = use(new BufferedReader(new FileReader(filename))) * * // use your resources here * def lines(reader: BufferedReader): Iterator[String] = * Iterator.continually(reader.readLine()).takeWhile(_ != null) * - * (lines(r1) ++ lines(r2) ++ lines(r3) ++ lines(r4)).toList + * files.map(mkreader).flatMap(lines) * } * }}} * + * Composed or "wrapped" resources may be acquired in order of construction, + * if "underlying" resources are not closed. Although redundant in this case, + * here is the previous example with a wrapped call to `use`: + * {{{ + * def mkreader(filename: String) = use(new BufferedReader(use(new FileReader(filename)))) + * }}} + * + * Custom resources can be registered on construction by requiring an implicit `Manager`. + * This ensures they will be released even if composition fails: + * {{{ + * import scala.util.Using + * + * case class X(x: String)(implicit mgr: Using.Manager) extends AutoCloseable { + * override def close() = println(s"CLOSE $x") + * mgr.acquire(this) + * } + * case class Y(y: String)(x: String)(implicit mgr: Using.Manager) extends AutoCloseable { + * val xres = X(x) + * override def close() = println(s"CLOSE $y") + * // an error during construction releases previously acquired resources + * require(y != null, "y is null") + * mgr.acquire(this) + * } + * + * Using.Manager { implicit mgr => + * val y = Y("Y")("X") + * println(s"USE $y") + * } + * println { + * Using.Manager { implicit mgr => + * Y(null)("X") + * } + * } // Failure(java.lang.IllegalArgumentException: requirement failed: y is null) + * }}} + * * If you wish to avoid wrapping management and operations in a `Try`, you can use * [[Using.resource `Using.resource`]], which throws any exceptions that occur. * @@ -92,8 +125,8 @@ import scala.util.control.{ControlThrowable, NonFatal} * - `java.lang.LinkageError` * - `java.lang.InterruptedException` and `java.lang.ThreadDeath` * - [[scala.util.control.NonFatal fatal exceptions]], excluding `scala.util.control.ControlThrowable` + * - all other exceptions, excluding `scala.util.control.ControlThrowable` * - `scala.util.control.ControlThrowable` - * - all other exceptions * * When more than two exceptions are thrown, the first two are combined and * re-thrown as described above, and each successive exception thrown is combined @@ -149,7 +182,7 @@ object Using { * the resource is released when the manager is closed, and then * returns the (unmodified) resource. */ - def apply[R: Releasable](resource: R): R = { + def apply[R: Releasable](resource: R): resource.type = { acquire(resource) resource } @@ -227,13 +260,14 @@ object Using { } private def preferentiallySuppress(primary: Throwable, secondary: Throwable): Throwable = { + @annotation.nowarn("cat=deprecation") // avoid warning on mention of ThreadDeath def score(t: Throwable): Int = t match { case _: VirtualMachineError => 4 case _: LinkageError => 3 case _: InterruptedException | _: ThreadDeath => 2 - case _: ControlThrowable => 0 + case _: ControlThrowable => -1 // below everything case e if !NonFatal(e) => 1 // in case this method gets out of sync with NonFatal - case _ => -1 + case _ => 0 } @inline def suppress(t: Throwable, suppressed: Throwable): Throwable = { t.addSuppressed(suppressed); t } @@ -387,6 +421,8 @@ object Using { } object Releasable { + // prefer explicit types 2.14 + //implicit val AutoCloseableIsReleasable: Releasable[AutoCloseable] = new Releasable[AutoCloseable] {} /** An implicit `Releasable` for [[java.lang.AutoCloseable `AutoCloseable`s]]. */ implicit object AutoCloseableIsReleasable extends Releasable[AutoCloseable] { def release(resource: AutoCloseable): Unit = resource.close() diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala index 87deedc25478..888867c0acaf 100644 --- a/src/library/scala/util/control/Breaks.scala +++ b/src/library/scala/util/control/Breaks.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,78 +10,104 @@ * additional information regarding copyright ownership. */ -package scala -package util.control +package scala.util.control -/** A class that can be instantiated for the break control abstraction. +/** Provides the `break` control abstraction. + * + * The `break` method uses a `ControlThrowable` to transfer + * control up the stack to an enclosing `breakable`. + * + * It is typically used to abruptly terminate a `for` loop, + * but can be used to return from an arbitrary computation. + * + * Control resumes after the `breakable`. + * + * If there is no matching `breakable`, the `BreakControl` + * thrown by `break` is handled in the usual way: if not + * caught, it may terminate the current `Thread`. + * + * `BreakControl` carries no stack trace, so the default + * exception handler does not print useful diagnostic + * information; there is no compile-time warning if there + * is no matching `breakable`. + * + * A catch clause using `NonFatal` is safe to use with + * `break`; it will not short-circuit the transfer + * of control to the enclosing `breakable`. + * + * A `breakable` matches a call to `break` if the methods + * were invoked on the same receiver object, which may be the + * convenience value `Breaks`. + * * Example usage: * {{{ * val mybreaks = new Breaks * import mybreaks.{break, breakable} * * breakable { - * for (...) { - * if (...) break() + * for (x <- xs) { + * if (done) break() + * f(x) * } * } * }}} - * Calls to break from one instantiation of `Breaks` will never - * target breakable objects of some other instantiation. + * Calls to `break` from one instance of `Breaks` will never + * resume at the `breakable` of some other instance. + * + * Any intervening exception handlers should use `NonFatal`, + * or use `Try` for evaluation: + * {{{ + * val mybreaks = new Breaks + * import mybreaks.{break, breakable} + * + * breakable { + * for (x <- xs) Try { if (quit) break else f(x) }.foreach(println) + * } + * }}} */ class Breaks { - private val breakException = new BreakControl + private[this] val breakException = new BreakControl - /** - * A block from which one can exit with a `break`. The `break` may be - * executed further down in the call stack provided that it is called on the - * exact same instance of `Breaks`. + /** A block from which one can exit with a `break`. The `break` may be + * executed further down in the call stack provided that it is called on the + * exact same instance of `Breaks`. */ - def breakable(op: => Unit) { - try { - op - } catch { - case ex: BreakControl => - if (ex ne breakException) throw ex - } - } + def breakable(op: => Unit): Unit = + try op catch { case ex: BreakControl if ex eq breakException => } sealed trait TryBlock[T] { - def catchBreak(onBreak: =>T): T + def catchBreak(onBreak: => T): T } - /** - * This variant enables the execution of a code block in case of a `break()`: + /** Try a computation that produces a value, supplying a default + * to be used if the computation terminates with a `break`. + * * {{{ * tryBreakable { - * for (...) { - * if (...) break() - * } + * (1 to 3).map(i => if (math.random < .5) break else i * 2) * } catchBreak { - * doCleanup() + * Vector.empty * } * }}} */ - def tryBreakable[T](op: =>T) = new TryBlock[T] { - def catchBreak(onBreak: =>T) = try { - op - } catch { - case ex: BreakControl => - if (ex ne breakException) throw ex - onBreak + def tryBreakable[T](op: => T): TryBlock[T] = + new TryBlock[T] { + def catchBreak(onBreak: => T) = + try op catch { case ex: BreakControl if ex eq breakException => onBreak } } - } - /** - * Break from dynamically closest enclosing breakable block using this exact - * `Breaks` instance. + /** Break from the dynamically closest enclosing breakable block that also uses + * this `Breaks` instance. * - * @note This might be different than the statically closest enclosing block! + * @note This might be different from the statically closest enclosing block! + * @note Invocation without parentheses relies on the conversion to "empty application". */ - def break(): Nothing = { throw breakException } + def break(): Nothing = throw breakException } /** An object that can be used for the break control abstraction. + * * Example usage: * {{{ * import Breaks.{break, breakable} diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala index c9d545c34b0f..ea5ff549e121 100644 --- a/src/library/scala/util/control/ControlThrowable.scala +++ b/src/library/scala/util/control/ControlThrowable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,30 +10,38 @@ * additional information regarding copyright ownership. */ -package scala -package util.control +package scala.util.control -/** A marker trait indicating that the `Throwable` it is mixed into is - * intended for flow control. +/** A parent class for throwable objects intended for flow control. * - * Note that `Throwable` subclasses which extend this trait may extend any - * other `Throwable` subclass (eg. `RuntimeException`) and are not required - * to extend `Throwable` directly. + * Instances of `ControlThrowable` should not normally be caught. + * + * As a convenience, `NonFatal` does not match `ControlThrowable`. * - * Instances of `Throwable` subclasses marked in this way should not normally - * be caught. Where catch-all behaviour is required `ControlThrowable` - * should be propagated, for example: * {{{ - * import scala.util.control.ControlThrowable + * import scala.util.control.{Breaks, NonFatal}, Breaks.{break, breakable} * - * try { - * // Body might throw arbitrarily - * } catch { - * case c: ControlThrowable => throw c // propagate - * case t: Exception => log(t) // log and suppress + * breakable { + * for (v <- values) { + * try { + * if (p(v)) break + * else ??? + * } catch { + * case NonFatal(t) => log(t) // can't catch a break + * } + * } * } * }}} * - * @author Miles Sabin + * Suppression is disabled, because flow control should not suppress + * an exceptional condition. Stack traces are also disabled, allowing + * instances of `ControlThrowable` to be safely reused. + * + * Instances of `ControlThrowable` should not normally have a cause. + * Legacy subclasses may set a cause using `initCause`. */ -trait ControlThrowable extends Throwable with NoStackTrace +abstract class ControlThrowable(message: String) extends Throwable( + message, /*cause*/ null, /*enableSuppression=*/ false, /*writableStackTrace*/ false) { + + def this() = this(message = null) +} diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala index b63c57973042..181bb22743a7 100644 --- a/src/library/scala/util/control/Exception.scala +++ b/src/library/scala/util/control/Exception.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,8 @@ package scala package util package control -import scala.reflect.{ ClassTag, classTag } +import scala.annotation.tailrec +import scala.reflect.{ClassTag, classTag} import scala.language.implicitConversions /** Classes representing the components of exception handling. @@ -33,16 +34,16 @@ import scala.language.implicitConversions * import scala.util.control.Exception._ * import java.net._ * - * val s = "http://www.scala-lang.org/" + * val s = "https://www.scala-lang.org/" * - * // Some(http://www.scala-lang.org/) + * // Some(https://www.scala-lang.org/) * val x1: Option[URL] = catching(classOf[MalformedURLException]) opt new URL(s) * - * // Right(http://www.scala-lang.org/) + * // Right(https://www.scala-lang.org/) * val x2: Either[Throwable,URL] = * catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s) * - * // Success(http://www.scala-lang.org/) + * // Success(https://www.scala-lang.org/) * val x3: Try[URL] = catching(classOf[MalformedURLException], classOf[NullPointerException]) withTry new URL(s) * * val defaultUrl = new URL("http://example.com") @@ -59,7 +60,7 @@ import scala.language.implicitConversions * def printUrl(url: String) : Unit = { * val con = new URL(url) openConnection() * val source = scala.io.Source.fromInputStream(con.getInputStream()) - * source.getLines.foreach(println) + * source.getLines().foreach(println) * } * * val badUrl = "htt/xx" @@ -68,7 +69,7 @@ import scala.language.implicitConversions * // at java.net.URL.(URL.java:586) * withThrowableLogging { printUrl(badUrl) } * - * val goodUrl = "http://www.scala-lang.org/" + * val goodUrl = "https://www.scala-lang.org/" * // Prints page content, * // <!DOCTYPE html> * // <html> @@ -147,26 +148,24 @@ import scala.language.implicitConversions * @groupdesc logic-container Containers for catch and finally behavior. * * @define protectedExceptions `ControlThrowable` or `InterruptedException` - * - * @author Paul Phillips */ object Exception { type Catcher[+T] = PartialFunction[Throwable, T] - def mkCatcher[Ex <: Throwable: ClassTag, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] { + def mkCatcher[Ex <: Throwable: ClassTag, T](isDef: Ex => Boolean, f: Ex => T): PartialFunction[Throwable, T] = new Catcher[T] { private def downcast(x: Throwable): Option[Ex] = if (classTag[Ex].runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex]) else None - def isDefinedAt(x: Throwable) = downcast(x) exists isDef + def isDefinedAt(x: Throwable): Boolean = downcast(x) exists isDef def apply(x: Throwable): T = f(downcast(x).get) } - def mkThrowableCatcher[T](isDef: Throwable => Boolean, f: Throwable => T) = mkCatcher(isDef, f) + def mkThrowableCatcher[T](isDef: Throwable => Boolean, f: Throwable => T): PartialFunction[Throwable, T] = mkCatcher[Throwable, T](isDef, f) - implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]) = - mkCatcher(pf.isDefinedAt _, pf.apply _) + implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]): Catcher[T] = + mkCatcher(pf.isDefinedAt, pf.apply) /** !!! Not at all sure of every factor which goes into this, * and/or whether we need multiple standard variations. @@ -181,13 +180,13 @@ object Exception { trait Described { protected val name: String - private var _desc: String = "" - def desc = _desc + private[this] var _desc: String = "" + def desc: String = _desc def withDesc(s: String): this.type = { _desc = s this } - override def toString() = name + "(" + desc + ")" + override def toString(): String = name + "(" + desc + ")" } /** A container class for finally code. @@ -197,7 +196,7 @@ object Exception { protected val name = "Finally" def and(other: => Unit): Finally = new Finally({ body ; other }) - def invoke() { body } + def invoke(): Unit = { body } } /** A container class for catch/finally logic. @@ -260,8 +259,8 @@ object Exception { * but with the supplied `apply` method replacing the current one. */ def withApply[U](f: Throwable => U): Catch[U] = { val pf2 = new Catcher[U] { - def isDefinedAt(x: Throwable) = pf isDefinedAt x - def apply(x: Throwable) = f(x) + def isDefinedAt(x: Throwable): Boolean = pf isDefinedAt x + def apply(x: Throwable): U = f(x) } new Catch(pf2, fin, rethrow) } @@ -344,10 +343,9 @@ object Exception { * }}} * @group dsl */ - // TODO: Add return type - def handling[T](exceptions: Class[_]*) = { - def fun(f: Throwable => T) = catching(exceptions: _*) withApply f - new By[Throwable => T, Catch[T]](fun _) + def handling[T](exceptions: Class[_]*): By[Throwable => T, Catch[T]] = { + def fun(f: Throwable => T): Catch[T] = catching(exceptions: _*) withApply f + new By[Throwable => T, Catch[T]](fun) } /** Returns a `Catch` object with no catch logic and the argument as the finally logic. @@ -359,6 +357,7 @@ object Exception { * @group composition-catch */ def unwrapping[T](exceptions: Class[_]*): Catch[T] = { + @tailrec def unwrap(x: Throwable): Throwable = if (wouldMatch(x, exceptions) && x.getCause != null) unwrap(x.getCause) else x diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala index b3788db4530f..f34fec38180c 100644 --- a/src/library/scala/util/control/NoStackTrace.scala +++ b/src/library/scala/util/control/NoStackTrace.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,9 +19,6 @@ package util.control * [[scala.sys.SystemProperties]]. * * @note Since JDK 1.7, a similar effect can be achieved with `class Ex extends Throwable(..., writableStackTrace = false)` - * - * @author Paul Phillips - * @since 2.8 */ trait NoStackTrace extends Throwable { override def fillInStackTrace(): Throwable = @@ -33,6 +30,6 @@ object NoStackTrace { final def noSuppression = _noSuppression // two-stage init to make checkinit happy, since sys.SystemProperties.noTraceSuppression.value calls back into NoStackTrace.noSuppression - final private var _noSuppression = false - _noSuppression = sys.SystemProperties.noTraceSuppression.value + final private[this] var _noSuppression = false + _noSuppression = System.getProperty("scala.control.noTraceSuppression", "").equalsIgnoreCase("true") } diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala index a499229f2bc4..80d8812a42ef 100644 --- a/src/library/scala/util/control/NonFatal.scala +++ b/src/library/scala/util/control/NonFatal.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,14 +33,15 @@ package util.control * }}} */ object NonFatal { - /** - * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal - */ - def apply(t: Throwable): Boolean = t match { - // VirtualMachineError includes OutOfMemoryError and other fatal errors - case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false - case _ => true - } + /** + * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal + */ + @annotation.nowarn("cat=deprecation") // avoid warning on mention of ThreadDeath + def apply(t: Throwable): Boolean = t match { + // VirtualMachineError includes OutOfMemoryError and other fatal errors + case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false + case _ => true + } /** * Returns Some(t) if NonFatal(t) == true, otherwise None */ diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala index bdc25170fa66..717ea9004f3f 100644 --- a/src/library/scala/util/control/TailCalls.scala +++ b/src/library/scala/util/control/TailCalls.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,16 +10,19 @@ * additional information regarding copyright ownership. */ -package scala -package util.control +package scala.util.control + +import annotation.tailrec /** Methods exported by this object implement tail calls via trampolining. - * Tail calling methods have to return their result using `done` or call the - * next method using `tailcall`. Both return a `TailRec` object. The result - * of evaluating a tailcalling function can be retrieved from a `Tailrec` + * + * Tail calling methods must either return their result using `done` or call the + * next method using `tailcall`. Both return an instance of `TailRec`. The result + * of evaluating a tailcalling function can be retrieved from a `TailRec` * value using method `result`. + * * Implemented as described in "Stackless Scala with Free Monads" - * [[http://blog.higher-order.com/assets/trampolines.pdf]] + * [[https://blog.higher-order.com/assets/trampolines.pdf]] * * Here's a usage example: * {{{ @@ -37,74 +40,72 @@ package util.control * if (n < 2) done(n) else for { * x <- tailcall(fib(n - 1)) * y <- tailcall(fib(n - 2)) - * } yield (x + y) + * } yield x + y * * fib(40).result * }}} */ object TailCalls { - /** This class represents a tailcalling computation + /** This class represents a tailcalling computation. */ - abstract class TailRec[+A] { + sealed abstract class TailRec[+A] { /** Continue the computation with `f`. */ - final def map[B](f: A => B): TailRec[B] = - flatMap(a => Call(() => Done(f(a)))) + final def map[B](f: A => B): TailRec[B] = flatMap(a => Call(() => Done(f(a)))) /** Continue the computation with `f` and merge the trampolining - * of this computation with that of `f`. */ - final def flatMap[B](f: A => TailRec[B]): TailRec[B] = - this match { - case Done(a) => Call(() => f(a)) - case c@Call(_) => Cont(c, f) - // Take advantage of the monad associative law to optimize the size of the required stack - case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x) flatMap f) - } + * of this computation with that of `f`. */ + final def flatMap[B](f: A => TailRec[B]): TailRec[B] = this match { + case Done(a) => Call(() => f(a)) + case Call(_) => Cont(this, f) + // Take advantage of the monad associative law to optimize the size of the required stack + case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x).flatMap(f)) + } /** Returns either the next step of the tailcalling computation, * or the result if there are no more steps. */ - @annotation.tailrec final def resume: Either[() => TailRec[A], A] = this match { - case Done(a) => Right(a) - case Call(k) => Left(k) + @tailrec final def resume: Either[() => TailRec[A], A] = this match { + case Done(a) => Right(a) + case Call(k) => Left(k) case Cont(a, f) => a match { - case Done(v) => f(v).resume - case Call(k) => Left(() => k().flatMap(f)) - case Cont(b, g) => b.flatMap(x => g(x) flatMap f).resume + case Done(v) => f(v).resume + case Call(k) => Left(() => k().flatMap(f)) + case Cont(b, g) => b.flatMap(x => g(x).flatMap(f)).resume } } /** Returns the result of the tailcalling computation. */ - @annotation.tailrec final def result: A = this match { - case Done(a) => a - case Call(t) => t().result + @tailrec final def result: A = this match { + case Done(a) => a + case Call(t) => t().result case Cont(a, f) => a match { - case Done(v) => f(v).result - case Call(t) => t().flatMap(f).result - case Cont(b, g) => b.flatMap(x => g(x) flatMap f).result + case Done(v) => f(v).result + case Call(t) => t().flatMap(f).result + case Cont(b, g) => b.flatMap(x => g(x).flatMap(f)).result } } } - /** Internal class representing a tailcall */ + /** Internal class representing a tailcall. */ protected case class Call[A](rest: () => TailRec[A]) extends TailRec[A] /** Internal class representing the final result returned from a tailcalling - * computation */ + * computation. */ protected case class Done[A](value: A) extends TailRec[A] /** Internal class representing a continuation with function A => TailRec[B]. - * It is needed for the flatMap to be implemented. */ + * It is needed for the flatMap to be implemented. */ protected case class Cont[A, B](a: TailRec[A], f: A => TailRec[B]) extends TailRec[B] - /** Performs a tailcall + /** Perform a tailcall. * @param rest the expression to be evaluated in the tailcall * @return a `TailRec` object representing the expression `rest` */ def tailcall[A](rest: => TailRec[A]): TailRec[A] = Call(() => rest) - /** Used to return final result from tailcalling computation + /** Return the final result from a tailcalling computation. * @param `result` the result value * @return a `TailRec` object representing a computation which immediately * returns `result` diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala index 21ff35fa5163..ca96e4d1f4a5 100644 --- a/src/library/scala/util/hashing/ByteswapHashing.scala +++ b/src/library/scala/util/hashing/ByteswapHashing.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala index d995e22d8c57..28067cce9992 100644 --- a/src/library/scala/util/hashing/Hashing.scala +++ b/src/library/scala/util/hashing/Hashing.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,8 +23,6 @@ import scala.annotation.implicitNotFound * * Note: when using a custom `Hashing`, make sure to use it with the `Equiv` * such that if any two objects are equal, then their hash codes must be equal. - * - * @since 2.10 */ @implicitNotFound(msg = "No implicit Hashing defined for ${T}.") trait Hashing[T] extends Serializable { @@ -36,7 +34,7 @@ object Hashing { def hash(x: T) = x.## } - implicit def default[T] = new Default[T] + implicit def default[T]: Default[T] = new Default[T] def fromFunction[T](f: T => Int) = new Hashing[T] { def hash(x: T) = f(x) diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala index 4e9248d376c2..1fa98e790445 100644 --- a/src/library/scala/util/hashing/MurmurHash3.scala +++ b/src/library/scala/util/hashing/MurmurHash3.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,9 +13,7 @@ package scala package util.hashing -import java.lang.Integer.{rotateLeft => rotl} - -import scala.runtime.AbstractFunction1 +import java.lang.Integer.{ rotateLeft => rotl } private[hashing] class MurmurHash3 { /** Mix in a block of data into an intermediate hash value. */ @@ -54,23 +52,43 @@ private[hashing] class MurmurHash3 { h } - private[scala] def product2Hash(x: Any, y: Any, seed: Int): Int = { + private[scala] def tuple2Hash(x: Int, y: Int, seed: Int): Int = { var h = seed - h = mix(h, x.##) - h = mix(h, y.##) + h = mix(h, "Tuple2".hashCode) + h = mix(h, x) + h = mix(h, y) finalizeHash(h, 2) } - /** Compute the hash of a product */ - final def productHash(x: Product, seed: Int): Int = { + + // @deprecated("use `caseClassHash` instead", "2.13.17") + // The deprecation is commented because this method is called by the synthetic case class hashCode. + // In this case, the `seed` already has the case class name mixed in and `ignorePrefix` is set to true. + // Case classes compiled before 2.13.17 call this method with `productSeed` and `ignorePrefix = false`. + // See `productHashCode` in `SyntheticMethods` for details. + final def productHash(x: Product, seed: Int, ignorePrefix: Boolean = false): Int = { val arr = x.productArity - // Case objects have the hashCode inlined directly into the - // synthetic hashCode method, but this method should still give - // a correct result if passed a case object. - if (arr == 0) { - x.productPrefix.hashCode + if (arr == 0) + if (!ignorePrefix) x.productPrefix.hashCode else seed + else { + var h = seed + if (!ignorePrefix) h = mix(h, x.productPrefix.hashCode) + var i = 0 + while (i < arr) { + h = mix(h, x.productElement(i).##) + i += 1 + } + finalizeHash(h, arr) } + } + + /** See the [[MurmurHash3.caseClassHash(x:Product,caseClassName:String)]] overload */ + final def caseClassHash(x: Product, seed: Int, caseClassName: String): Int = { + val arr = x.productArity + val aye = (if (caseClassName != null) caseClassName else x.productPrefix).hashCode + if (arr == 0) aye else { var h = seed + h = mix(h, aye) var i = 0 while (i < arr) { h = mix(h, x.productElement(i).##) @@ -80,6 +98,7 @@ private[hashing] class MurmurHash3 { } } + /** Compute the hash of a string */ final def stringHash(str: String, seed: Int): Int = { var h = seed @@ -97,70 +116,108 @@ private[hashing] class MurmurHash3 { * where the order of appearance of elements does not matter. * This is useful for hashing sets, for example. */ - final def unorderedHash(xs: TraversableOnce[Any], seed: Int): Int = { - if (xs.isEmpty) { - var h = seed - h = mix(h, 0) - h = mix(h, 0) - h = mixLast(h, 1) - finalizeHash(h, 0) - } - else { - //avoid the LazyRef as we don't have an @eager object - class hasher extends AbstractFunction1[Any, Unit] { - var a, b, n = 0 - var c = 1 - override def apply(x: Any): Unit = { - val h = x.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - } - } - val hasher = new hasher - xs foreach hasher - var h = seed - h = mix(h, hasher.a) - h = mix(h, hasher.b) - h = mixLast(h, hasher.c) - finalizeHash(h, hasher.n) + final def unorderedHash(xs: IterableOnce[Any], seed: Int): Int = { + var a, b, n = 0 + var c = 1 + val iterator = xs.iterator + while (iterator.hasNext) { + val x = iterator.next() + val h = x.## + a += h + b ^= h + c *= h | 1 + n += 1 } + var h = seed + h = mix(h, a) + h = mix(h, b) + h = mixLast(h, c) + finalizeHash(h, n) } - /** Compute a hash that depends on the order of its arguments. - */ - final def orderedHash(xs: TraversableOnce[Any], seed: Int): Int = { - if (xs.isEmpty) finalizeHash(seed, 0) - else { - //avoid the LazyRef as we don't have an @eager object - class hasher extends AbstractFunction1[Any, Unit] { - var n = 0 - var h = seed - override def apply(x: Any): Unit = { - h = mix(h, x.##) - n += 1 + + /** Compute a hash that depends on the order of its arguments. Potential range + * hashes are recognized to produce a hash that is compatible with rangeHash. + */ + final def orderedHash(xs: IterableOnce[Any], seed: Int): Int = { + val it = xs.iterator + var h = seed + if(!it.hasNext) return finalizeHash(h, 0) + val x0 = it.next() + if(!it.hasNext) return finalizeHash(mix(h, x0.##), 1) + val x1 = it.next() + + val initial = x0.## + h = mix(h, initial) + val h0 = h + var prev = x1.## + val rangeDiff = prev - initial + var i = 2 + while (it.hasNext) { + h = mix(h, prev) + val hash = it.next().## + if(rangeDiff != hash - prev || rangeDiff == 0) { + h = mix(h, hash) + i += 1 + while (it.hasNext) { + h = mix(h, it.next().##) + i += 1 } + return finalizeHash(h, i) } - val hasher = new hasher - xs foreach hasher - finalizeHash(hasher.h, hasher.n) + prev = hash + i += 1 } + avalanche(mix(mix(h0, rangeDiff), prev)) + } - /** Compute the hash of an array. - */ + /** Compute the hash of an array. Potential range hashes are recognized to produce a + * hash that is compatible with rangeHash. + */ final def arrayHash[@specialized T](a: Array[T], seed: Int): Int = { var h = seed - var i = 0 - while (i < a.length) { - h = mix(h, a(i).##) - i += 1 + val l = a.length + l match { + case 0 => + finalizeHash(h, 0) + case 1 => + finalizeHash(mix(h, a(0).##), 1) + case _ => + val initial = a(0).## + h = mix(h, initial) + val h0 = h + var prev = a(1).## + val rangeDiff = prev - initial + var i = 2 + while (i < l) { + h = mix(h, prev) + val hash = a(i).## + if(rangeDiff != hash - prev || rangeDiff == 0) { + h = mix(h, hash) + i += 1 + while (i < l) { + h = mix(h, a(i).##) + i += 1 + } + return finalizeHash(h, l) + } + prev = hash + i += 1 + } + avalanche(mix(mix(h0, rangeDiff), prev)) } - finalizeHash(h, a.length) } + /** Compute the hash of a Range with at least 2 elements. Ranges with fewer + * elements need to use seqHash instead. The `last` parameter must be the + * actual last element produced by a Range, not the nominal `end`. + */ + final def rangeHash(start: Int, step: Int, last: Int, seed: Int): Int = + avalanche(mix(mix(mix(seed, start), step), last)) + /** Compute the hash of a byte array. Faster than arrayHash, because - * it hashes 4 bytes at once. + * it hashes 4 bytes at once. Note that the result is not compatible with + * arrayHash! */ final def bytesHash(data: Array[Byte], seed: Int): Int = { var len = data.length @@ -193,18 +250,76 @@ private[hashing] class MurmurHash3 { finalizeHash(h, data.length) } + /** Compute the hash of an IndexedSeq. Potential range hashes are recognized to produce a + * hash that is compatible with rangeHash. + */ + final def indexedSeqHash(a: scala.collection.IndexedSeq[Any], seed: Int): Int = { + var h = seed + val l = a.length + l match { + case 0 => + finalizeHash(h, 0) + case 1 => + finalizeHash(mix(h, a(0).##), 1) + case _ => + val initial = a(0).## + h = mix(h, initial) + val h0 = h + var prev = a(1).## + val rangeDiff = prev - initial + var i = 2 + while (i < l) { + h = mix(h, prev) + val hash = a(i).## + if(rangeDiff != hash - prev || rangeDiff == 0) { + h = mix(h, hash) + i += 1 + while (i < l) { + h = mix(h, a(i).##) + i += 1 + } + return finalizeHash(h, l) + } + prev = hash + i += 1 + } + avalanche(mix(mix(h0, rangeDiff), prev)) + } + } + + /** Compute the hash of a List. Potential range hashes are recognized to produce a + * hash that is compatible with rangeHash. + */ final def listHash(xs: scala.collection.immutable.List[_], seed: Int): Int = { var n = 0 var h = seed + var rangeState = 0 // 0 = no data, 1 = first elem read, 2 = has valid diff, 3 = invalid + var rangeDiff = 0 + var prev = 0 + var initial = 0 var elems = xs while (!elems.isEmpty) { val head = elems.head val tail = elems.tail - h = mix(h, head.##) + val hash = head.## + h = mix(h, hash) + rangeState match { + case 0 => + initial = hash + rangeState = 1 + case 1 => + rangeDiff = hash - prev + rangeState = 2 + case 2 => + if(rangeDiff != hash - prev || rangeDiff == 0) rangeState = 3 + case _ => + } + prev = hash n += 1 elems = tail } - finalizeHash(h, n) + if(rangeState == 2) rangeHash(initial, rangeDiff, prev, seed) + else finalizeHash(h, n) } } @@ -225,7 +340,9 @@ private[hashing] class MurmurHash3 { * This is based on the earlier MurmurHash3 code by Rex Kerr, but the * MurmurHash3 algorithm was since changed by its creator Austin Appleby * to remedy some weaknesses and improve performance. This represents the - * latest and supposedly final version of the algorithm (revision 136). + * latest and supposedly final version of the algorithm (revision 136). Even + * so, test the generated hashes in between Scala versions, even for point + * releases, as fast, non-cryptographic hashing algorithms evolve rapidly. * * @see [[https://github.com/aappleby/smhasher]] */ @@ -239,27 +356,83 @@ object MurmurHash3 extends MurmurHash3 { final val mapSeed = "Map".hashCode final val setSeed = "Set".hashCode - def arrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, arraySeed) - def bytesHash(data: Array[Byte]): Int = arrayHash(data, arraySeed) - def orderedHash(xs: TraversableOnce[Any]): Int = orderedHash(xs, symmetricSeed) - private [scala] def product2Hash(x: Any, y: Any): Int = product2Hash(x, y, productSeed) - def productHash(x: Product): Int = productHash(x, productSeed) - def stringHash(x: String): Int = stringHash(x, stringSeed) - def unorderedHash(xs: TraversableOnce[Any]): Int = unorderedHash(xs, traversableSeed) + def arrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, arraySeed) + def bytesHash(data: Array[Byte]): Int = bytesHash(data, arraySeed) + def orderedHash(xs: IterableOnce[Any]): Int = orderedHash(xs, symmetricSeed) + def stringHash(x: String): Int = stringHash(x, stringSeed) + def unorderedHash(xs: IterableOnce[Any]): Int = unorderedHash(xs, traversableSeed) + def rangeHash(start: Int, step: Int, last: Int): Int = rangeHash(start, step, last, seqSeed) - private[scala] def wrappedArrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed) - private[scala] def wrappedBytesHash(data: Array[Byte]): Int = arrayHash(data, seqSeed) + @deprecated("use `caseClassHash` instead", "2.13.17") + def productHash(x: Product): Int = caseClassHash(x, productSeed, null) + + /** + * Compute the `hashCode` of a case class instance. This method returns the same value as `x.hashCode` + * if `x` is an instance of a case class with the default, synthetic `hashCode`. + * + * This method can be used to implement case classes with a cached `hashCode`: + * {{{ + * case class C(data: Data) { + * override lazy val hashCode: Int = MurmurHash3.caseClassHash(this) + * } + * }}} + * + * '''NOTE''': For case classes (or subclasses) that override `productPrefix`, the `caseClassName` parameter + * needs to be specified in order to obtain the same result as the synthetic `hashCode`. Otherwise, the value + * is not in sync with the case class `equals` method (scala/bug#13033). + * + * {{{ + * scala> case class C(x: Int) { override def productPrefix = "Y" } + * + * scala> C(1).hashCode + * val res0: Int = -668012062 + * + * scala> MurmurHash3.caseClassHash(C(1)) + * val res1: Int = 1015658380 + * + * scala> MurmurHash3.caseClassHash(C(1), "C") + * val res2: Int = -668012062 + * }}} + */ + def caseClassHash(x: Product, caseClassName: String = null): Int = caseClassHash(x, productSeed, caseClassName) + + private[scala] def arraySeqHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed) + private[scala] def tuple2Hash(x: Any, y: Any): Int = tuple2Hash(x.##, y.##, productSeed) /** To offer some potential for optimization. */ def seqHash(xs: scala.collection.Seq[_]): Int = xs match { + case xs: scala.collection.IndexedSeq[_] => indexedSeqHash(xs, seqSeed) case xs: List[_] => listHash(xs, seqSeed) case xs => orderedHash(xs, seqSeed) } - def mapHash(xs: scala.collection.Map[_, _]): Int = unorderedHash(xs, mapSeed) - def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed) + def mapHash(xs: scala.collection.Map[_, _]): Int = { + if (xs.isEmpty) emptyMapHash + else { + class accum extends Function2[Any, Any, Unit] { + var a, b, n = 0 + var c = 1 + override def apply(k: Any, v: Any): Unit = { + val h = tuple2Hash(k, v) + a += h + b ^= h + c *= h | 1 + n += 1 + } + } + val accum = new accum + var h = mapSeed + xs.foreachEntry(accum) + h = mix(h, accum.a) + h = mix(h, accum.b) + h = mixLast(h, accum.c) + finalizeHash(h, accum.n) + } + } + private[scala] val emptyMapHash = unorderedHash(Nil, mapSeed) + def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed) class ArrayHashing[@specialized T] extends Hashing[Array[T]] { def hash(a: Array[T]) = arrayHash(a) @@ -271,47 +444,39 @@ object MurmurHash3 extends MurmurHash3 { def hash(data: Array[Byte]) = bytesHash(data) } - def orderedHashing = new Hashing[TraversableOnce[Any]] { - def hash(xs: TraversableOnce[Any]) = orderedHash(xs) + def orderedHashing = new Hashing[IterableOnce[Any]] { + def hash(xs: IterableOnce[Any]) = orderedHash(xs) } + @deprecated("use `caseClassHashing` instead", "2.13.17") def productHashing = new Hashing[Product] { - def hash(x: Product) = productHash(x) + def hash(x: Product) = caseClassHash(x) } - def stringHashing = new Hashing[String] { - def hash(x: String) = stringHash(x) + def caseClassHashing = new Hashing[Product] { + def hash(x: Product) = caseClassHash(x) } - def unorderedHashing = new Hashing[TraversableOnce[Any]] { - def hash(xs: TraversableOnce[Any]) = unorderedHash(xs) + def stringHashing = new Hashing[String] { + def hash(x: String) = stringHash(x) } - /** All this trouble and foreach still appears faster. - * Leaving in place in case someone would like to investigate further. - */ - /** - def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = { - var n = 0 - var h = seed - var elems = xs - while (elems.nonEmpty) { - h = mix(h, elems.head.##) - n += 1 - elems = elems.tail - } - finalizeHash(h, n) + def unorderedHashing = new Hashing[IterableOnce[Any]] { + def hash(xs: IterableOnce[Any]) = unorderedHash(xs) } - def indexedSeqHash(xs: scala.collection.IndexedSeq[_], seed: Int): Int = { - var n = 0 - var h = seed - val len = xs.length - while (n < len) { - h = mix(h, xs(n).##) - n += 1 - } - finalizeHash(h, n) - } - */ +// /** All this trouble and foreach still appears faster. +// * Leaving in place in case someone would like to investigate further. +// */ +// def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = { +// var n = 0 +// var h = seed +// var elems = xs +// while (elems.nonEmpty) { +// h = mix(h, elems.head.##) +// n += 1 +// elems = elems.tail +// } +// finalizeHash(h, n) +// } } diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala index f8ca83cf5339..530b729e2f81 100644 --- a/src/library/scala/util/hashing/package.scala +++ b/src/library/scala/util/hashing/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 6e256fe67771..a90171243e3a 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,7 +33,8 @@ import java.util.regex.{ Pattern, Matcher } * and, if it does, to extract or transform the parts that match. * * === Usage === - * This class delegates to the [[java.util.regex]] package of the Java Platform. + + * This class delegates to the [[https://docs.oracle.com/javase/8/docs/api/java/util/regex/package-summary.html java.util.regex]] package of the Java Platform. * See the documentation for [[java.util.regex.Pattern]] for details about * the regular expression syntax for pattern strings. * @@ -114,6 +115,14 @@ import java.util.regex.{ Pattern, Matcher } * val allYears = for (m <- date.findAllMatchIn(dates)) yield m.group(1) * }}} * + * To check whether input is matched by the regex: + * + * {{{ + * date.matches("2018-03-01") // true + * date.matches("Today is 2018-03-01") // false + * date.unanchored.matches("Today is 2018-03-01") // true + * }}} + * * To iterate over the matched strings, use `findAllIn`, which returns a special iterator * that can be queried for the `MatchData` of the last match: * @@ -183,10 +192,6 @@ import java.util.regex.{ Pattern, Matcher } * * @see [[java.util.regex.Pattern]] * - * @author Thibaud Hottelier - * @author Philipp Haller - * @author Martin Odersky - * * @param pattern The compiled pattern * @param groupNames A mapping from names to indices in capture groups * @@ -214,15 +219,18 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year" * }}} * - * Group names supplied to the constructor are preferred to inline group names - * when retrieving matched groups by name. Not all platforms support inline names. + * Inline group names are preferred over group names supplied to the constructor + * when retrieving matched groups by name. Group names supplied to the constructor + * should be considered deprecated. * * This constructor does not support options as flags, which must be - * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`. + * supplied as inline flags in the pattern string: `(?idmsuxU)`. * * @param regex The regular expression to compile. * @param groupNames Names of capturing groups. */ + // we cannot add the alternative `def this(regex: String)` in a forward binary compatible way: + // @deprecated("use inline group names like (?X) instead", "2.13.7") def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) /** Tries to match a [[java.lang.CharSequence]]. @@ -275,12 +283,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @param s The string to match * @return The matches */ - def unapplySeq(s: CharSequence): Option[List[String]] = s match { - case null => None - case _ => - val m = pattern matcher s - if (runMatcher(m)) Regex.extractGroupsFromMatcher(m) - else None + def unapplySeq(s: CharSequence): Option[List[String]] = { + val m = pattern.matcher(s) + if (runMatcher(m)) Some(List.tabulate(m.groupCount) { i => m.group(i + 1) }) + else None } /** Tries to match the String representation of a [[scala.Char]]. @@ -332,26 +338,12 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * and the result of that match is used. */ def unapplySeq(m: Match): Option[List[String]] = - if (m == null || m.matched == null) None + if (m.matched == null) None else if (m.matcher.pattern == this.pattern) Regex.extractGroupsFromMatch(m) else unapplySeq(m.matched) - /** Tries to match target. - * @param target The string to match - * @return The matches - */ - @deprecated("extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0") - def unapplySeq(target: Any): Option[List[String]] = target match { - case s: CharSequence => - val m = pattern matcher s - if (runMatcher(m)) Regex.extractGroupsFromMatcher(m) - else None - case m: Match => unapplySeq(m.matched) - case _ => None - } - // @see UnanchoredRegex - protected def runMatcher(m: Matcher) = m.matches() + protected def runMatcher(m: Matcher): Boolean = m.matches() /** Return all non-overlapping matches of this `Regex` in the given character * sequence as a [[scala.util.matching.Regex.MatchIterator]], @@ -392,7 +384,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @return A [[scala.util.matching.Regex.MatchIterator]] of matched substrings. * @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}} */ - def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames) + def findAllIn(source: CharSequence): MatchIterator = new Regex.MatchIterator(source, this, groupNames) /** Return all non-overlapping matches of this regexp in given character sequence as a * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]]. @@ -403,11 +395,11 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends */ def findAllMatchIn(source: CharSequence): Iterator[Match] = { val matchIterator = findAllIn(source) - new Iterator[Match] { + new AbstractIterator[Match] { def hasNext = matchIterator.hasNext - def next: Match = { + def next(): Match = { matchIterator.next() - new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force + new Match(matchIterator.source, matchIterator.matcher, matchIterator._groupNames).force } } } @@ -471,6 +463,18 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends if (m.lookingAt) Some(new Match(source, m, groupNames)) else None } + /** Returns whether this `Regex` matches the given character sequence. + * + * Like the extractor, this method takes anchoring into account. + * + * @param source The text to match against + * @return true if and only if `source` matches this `Regex`. + * @see [[Regex#unanchored]] + * @example {{{"""\d+""".r matches "123" // returns true}}} + */ + def matches(source: CharSequence): Boolean = + runMatcher(pattern.matcher(source)) + /** Replaces all matches by a string. * * $replacementString @@ -504,9 +508,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @return The target string after replacements. */ def replaceAllIn(target: CharSequence, replacer: Match => String): String = { - val it = new Regex.MatchIterator(target, this, groupNames).replacementData - it foreach (md => it replace replacer(md)) - it.replaced + val rit = new Regex.MatchIterator(target, this, groupNames).replacementData + for (matchdata <- rit; replacement = replacer(matchdata)) + rit.replace(replacement) + rit.replaced } /** @@ -531,11 +536,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @return The target string after replacements. */ def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = { - val it = new Regex.MatchIterator(target, this, groupNames).replacementData - for (matchdata <- it ; replacement <- replacer(matchdata)) - it replace replacement - - it.replaced + val rit = new Regex.MatchIterator(target, this, groupNames).replacementData + for (matchdata <- rit; replacement <- replacer(matchdata)) + rit.replace(replacement) + rit.replaced } /** Replaces the first match by a string. @@ -561,7 +565,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends pattern.split(toSplit) /** Create a new Regex with the same pattern, but no requirement that - * the entire String matches in extractor patterns. + * the entire String matches in extractor patterns and [[Regex#matches]]. * * Normally, matching on `date` behaves as though the pattern were * enclosed in anchors, `"^pattern\$"`. @@ -591,7 +595,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends def regex: String = pattern.pattern /** The string defining the regular expression */ - override def toString = regex + override def toString: String = regex } /** A [[Regex]] that finds the first match when used in a pattern match. @@ -599,8 +603,8 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @see [[Regex#unanchored]] */ trait UnanchoredRegex extends Regex { - override protected def runMatcher(m: Matcher) = m.find() - override def unanchored = this + override protected def runMatcher(m: Matcher): Boolean = m.find() + override def unanchored: UnanchoredRegex = this } /** This object defines inner classes that describe @@ -620,6 +624,7 @@ object Regex { val source: CharSequence /** The names of the groups, or an empty sequence if none defined */ + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") val groupNames: Seq[String] /** The number of capturing groups in the pattern. @@ -686,7 +691,11 @@ object Regex { if (end(i) >= 0) source.subSequence(end(i), source.length) else null - private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex + @scala.annotation.nowarn("msg=deprecated") + private def groupNamesNowarn: Seq[String] = groupNames + + private[this] lazy val nameToIndex: Map[String, Int] = + Map[String, Int]() ++ ("" :: groupNamesNowarn.toList).zipWithIndex /** Returns the group with the given name. * @@ -699,7 +708,7 @@ object Regex { * @throws IllegalArgumentException if the requested group name is not defined */ def group(id: String): String = ( - if (groupNames.isEmpty) + if (groupNamesNowarn.isEmpty) matcher group id else nameToIndex.get(id) match { @@ -709,33 +718,36 @@ object Regex { ) /** The matched string; equivalent to `matched.toString`. */ - override def toString = matched + override def toString: String = matched } /** Provides information about a successful match. */ class Match(val source: CharSequence, protected[matching] val matcher: Matcher, - val groupNames: Seq[String]) extends MatchData { + _groupNames: Seq[String]) extends MatchData { + + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames /** The index of the first matched character. */ - val start = matcher.start + val start: Int = matcher.start /** The index following the last matched character. */ - val end = matcher.end + val end: Int = matcher.end /** The number of subgroups. */ - def groupCount = matcher.groupCount + def groupCount: Int = matcher.groupCount - private lazy val starts: Array[Int] = - ((0 to groupCount) map matcher.start).toArray - private lazy val ends: Array[Int] = - ((0 to groupCount) map matcher.end).toArray + private[this] lazy val starts: Array[Int] = + Array.tabulate(groupCount + 1) { matcher.start } + private[this] lazy val ends: Array[Int] = + Array.tabulate(groupCount + 1) { matcher.end } /** The index of the first matched character in group `i`. */ - def start(i: Int) = starts(i) + def start(i: Int): Int = starts(i) /** The index following the last matched character in group `i`. */ - def end(i: Int) = ends(i) + def end(i: Int): Int = ends(i) /** The match itself with matcher-dependent lazy vals forced, * so that match is valid even once matcher is advanced. @@ -775,25 +787,8 @@ object Regex { } } - private def extractGroupsFromMatch(m: Match): Option[List[String]] = { - var res = List.empty[String] - var index = m.groupCount - while (index > 0) { - res ::= m.group(index) - index -= 1 - } - Some(res) - } - - private def extractGroupsFromMatcher(m: Matcher): Option[List[String]] = { - var res = List.empty[String] - var index = m.groupCount - while (index > 0) { - res ::= m.group(index) - index -= 1 - } - Some(res) - } + @inline private def extractGroupsFromMatch(m: Match): Option[List[String]] = + Some(List.tabulate(m.groupCount) { i => m.group(i + 1) }) /** A class to step through a sequence of regex matches. * @@ -807,8 +802,11 @@ object Regex { * * @see [[java.util.regex.Matcher]] */ - class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String]) - extends AbstractIterator[String] with Iterator[String] with MatchData { self => + class MatchIterator(val source: CharSequence, val regex: Regex, private[Regex] val _groupNames: Seq[String]) + extends AbstractIterator[String] with MatchData { self => + + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames protected[Regex] val matcher = regex.pattern.matcher(source) @@ -843,7 +841,7 @@ object Regex { } /** Report emptiness. */ - override def toString = super[AbstractIterator].toString + override def toString: String = super[AbstractIterator].toString // ensure we're at a match private[this] def ensure(): Unit = nextSeen match { @@ -866,38 +864,37 @@ object Regex { def end(i: Int): Int = { ensure() ; matcher.end(i) } /** The number of subgroups. */ - def groupCount = { ensure() ; matcher.groupCount } + def groupCount: Int = { ensure() ; matcher.groupCount } /** Convert to an iterator that yields MatchData elements instead of Strings. */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext - def next = { self.next(); new Match(source, matcher, groupNames).force } + def next() = { self.next(); new Match(source, matcher, _groupNames).force } } /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { - def matcher = self.matcher + protected def matcher = self.matcher def hasNext = self.hasNext - def next = { self.next(); new Match(source, matcher, groupNames).force } + def next(): Match = { self.next(); new Match(source, matcher, _groupNames).force } } } - /** - * A trait able to build a string with replacements assuming it has a matcher. - * Meant to be mixed in with iterators. + /** Internal trait used by `replaceAllIn` and `replaceSomeIn`. */ private[matching] trait Replacement { protected def matcher: Matcher - private val sb = new java.lang.StringBuffer + private[this] val sb = new java.lang.StringBuffer // StringBuffer for JDK 8 compatibility + // Appends the remaining input and returns the result text. def replaced = { - val newsb = new java.lang.StringBuffer(sb) - matcher.appendTail(newsb) - newsb.toString + matcher.appendTail(sb) + sb.toString } - def replace(rs: String) = matcher.appendReplacement(sb, rs) + // Appends the input prefix and the replacement text. + def replace(replacement: String) = matcher.appendReplacement(sb, replacement) } /** Quotes strings to be used literally in regex patterns. diff --git a/src/library/scala/util/package.scala b/src/library/scala/util/package.scala new file mode 100644 index 000000000000..1b8b84dd2bfe --- /dev/null +++ b/src/library/scala/util/package.scala @@ -0,0 +1,20 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +package object util { + /** + * Adds chaining methods `tap` and `pipe` to every type. See [[ChainingOps]]. + */ + object chaining extends ChainingSyntax +} diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala index ac3e80ef5f03..2f4f3c37e676 100644 --- a/src/library/scala/volatile.scala +++ b/src/library/scala/volatile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,4 +15,4 @@ package scala import scala.annotation.meta._ @field -class volatile extends scala.annotation.StaticAnnotation +final class volatile extends scala.annotation.StaticAnnotation diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala index bc622393d5b3..90e7aca4bdfe 100644 --- a/src/manual/scala/man1/Command.scala +++ b/src/manual/scala/man1/Command.scala @@ -41,7 +41,7 @@ trait Command { def authors = Section("AUTHOR", "Written by Martin Odersky and other members of the " & - Link("Scala team", "http://www.scala-lang.org/news/2014/01/22/10-years-of-scala.html") & ".") + Link("Scala team", "https://www.scala-lang.org/news/2014/01/22/10-years-of-scala.html") & ".") def copyright = Section("COPYRIGHT", diff --git a/src/manual/scala/man1/fsc.scala b/src/manual/scala/man1/fsc.scala index bb16a53a87d7..efe86693d9e4 100644 --- a/src/manual/scala/man1/fsc.scala +++ b/src/manual/scala/man1/fsc.scala @@ -16,7 +16,7 @@ object fsc extends Command { val name = Section("NAME", MBold(command) & " " & NDash & " Fast offline compiler for the " & - Link("Scala 2", "http://scala-lang.org/") & " language") + Link("Scala 2", "https://scala-lang.org/") & " language") val synopsis = Section("SYNOPSIS", @@ -123,9 +123,9 @@ object fsc extends Command { "Specify the options to be passed to the " & MBold("java") & " command defined by " & MBold("JAVACMD") & ".", - "With Java 1.5 (or newer) one may for example configure the " & - "memory usage of the JVM as follows: " & - Mono("JAVA_OPTS=\"-Xmx512M -Xms16M -Xss16M\"") + "One might for example configure the " & + "memory usage of the JVM with: " & + Mono("JAVA_OPTS=\"-Xmx2G -Xss16M\"") )))) val exitStatus = Section("EXIT STATUS", diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala index f7a0e7f61da2..99161fe00512 100644 --- a/src/manual/scala/man1/scala.scala +++ b/src/manual/scala/man1/scala.scala @@ -16,7 +16,7 @@ object scala extends Command { val name = Section("NAME", MBold(command) & " " & NDash & " Run code in the " & - Link("Scala 2", "http://scala-lang.org/") & + Link("Scala 2", "https://scala-lang.org/") & " language") val synopsis = Section("SYNOPSIS", @@ -178,9 +178,9 @@ object scala extends Command { "Specify the options to be passed to the " & MBold("java") & " command defined by " & MBold("JAVACMD") & ".", - "With Java 1.5 (or newer) one may for example configure the " & - "memory usage of the JVM as follows: " & - Mono("JAVA_OPTS=\"-Xmx512M -Xms16M -Xss16M\"") + "One might for example configure the " & + "memory usage of the JVM with: " & + Mono("JAVA_OPTS=\"-Xmx2G -Xss16M\"") )))) val examples = Section("EXAMPLES", diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 1a2f864077c9..1e7e04584781 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -13,7 +13,7 @@ object scalac extends Command { val name = Section("NAME", MBold(command) & " " & NDash & " Compiler for the " & - Link("Scala 2", "http://scala-lang.org/") & " language") + Link("Scala 2", "https://scala-lang.org/") & " language") val synopsis = Section("SYNOPSIS", @@ -94,7 +94,7 @@ object scalac extends Command { "Specify character encoding used by source files.", "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") & ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " & - "code in the Scala interpreter will return the default value " & + "code in the Scala REPL will return the default value " & "on your system:", MBold(" scala> ") & Mono("new java.io.InputStreamReader(System.in).getEncoding"))), @@ -199,9 +199,6 @@ object scalac extends Command { CmdOption("Xelide-below", Argument("n")), "Calls to " & MItalic("@elidable") & " methods are omitted if method priority is lower than argument."), - Definition( - CmdOption("Xexperimental"), - "Enable experimental extensions"), Definition( CmdOption("Xfatal-warnings"), "Fail the compilation if there are any warnings."), @@ -209,10 +206,7 @@ object scalac extends Command { CmdOption("Xfull-lubs"), "Retain pre 2.10 behavior of less aggressive truncation of least upper bounds."), Definition( - CmdOption("Xfuture"), - "Turn on future language features."), - Definition( - CmdOption("Xgenerate-phase-graph", Argument("file")), + CmdOption("Vphase-graph", Argument("file")), "Generate the phase graphs (outputs .dot files) to fileX.dot."), Definition( CmdOption("Xlint"), @@ -226,9 +220,6 @@ object scalac extends Command { Definition( CmdOption("Xlog-implicit-conversions"), "Print a message whenever an implicit conversion is inserted."), - Definition( - CmdOption("Xlog-implicits"), - "Show more detail on why some implicits are not applicable."), Definition( CmdOption("Xlog-reflective-calls"), "Print a message when a reflective method call is generated."), @@ -238,9 +229,6 @@ object scalac extends Command { Definition( CmdOption("Xmain-class", Argument("path")), "Class for manifest's Main-Class entry (only useful with -d )."), - Definition( - CmdOption("Xmax-classfile-name", Argument("n")), - "Maximum filename length for generated classes."), Definition( CmdOptionBound("Xmigration:", Argument("version")), "Warn about constructs whose behavior may have changed since" & Argument("version") & "."), @@ -250,12 +238,9 @@ object scalac extends Command { Definition( CmdOption("Xno-patmat-analysis"), "Don't perform exhaustivity/unreachability analysis. Also, ignore " & MItalic("@switch") & " annotation."), - Definition( - CmdOption("Xno-uescape"), - "Disable handling of " & BSlash & "u unicode escapes"), Definition( CmdOption("Xnojline"), - "Do not use JLine for editing."), + "Do not use JLine at all for REPL input."), Definition( CmdOptionBound("Xplugin:", Argument("paths")), "Load a plugin from each classpath."), @@ -272,13 +257,13 @@ object scalac extends Command { CmdOption("Xpluginsdir", Argument("path")), "Path to search for plugin archives."), Definition( - CmdOptionBound("Xprint:", Argument("phases")), + CmdOptionBound("Vprint:", Argument("phases")), "Print out program after " & Argument("phases") & " (see below)."), Definition( - CmdOption("Xprint-pos"), + CmdOption("Vprint-pos"), "Print tree positions, as offsets."), Definition( - CmdOption("Xprint-types"), + CmdOption("Vprint-types"), "Print tree types (debugging option)."), Definition( CmdOption("Xprompt"), @@ -440,9 +425,9 @@ object scalac extends Command { "Specify the options to be passed to the " & MBold("java") & " command defined by " & MBold("JAVACMD") & ".", - "With Java 1.5 (or newer) one may for example configure the " & - "memory usage of the JVM as follows: " & - Mono("JAVA_OPTS=\"-Xmx512M -Xms16M -Xss16M\"") + "One might for example configure the " & + "memory usage of the JVM with: " & + Mono("JAVA_OPTS=\"-Xmx2G -Xss16M\"") )))) val examples = Section("EXAMPLES", diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala index 922b3d242aba..675bb4ec01c1 100644 --- a/src/manual/scala/man1/scaladoc.scala +++ b/src/manual/scala/man1/scaladoc.scala @@ -13,7 +13,7 @@ object scaladoc extends Command { protected def cn = new Error().getStackTrace()(0).getClassName() - val scalaLink = Link("Scala 2", "http://scala-lang.org/") + val scalaLink = Link("Scala 2", "https://scala-lang.org/") val name = Section("NAME", @@ -77,7 +77,11 @@ object scaladoc extends Command { "Define a URL to be concatenated with source locations for link to source files."), Definition( CmdOption("doc-external-doc", Argument("external-doc")), - "Define a comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."))), + "Define a comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."), + Definition( + CmdOption("jdk-api-doc-base", Argument("url")), + "Define a URL to be concatenated with source locations for link to Java API.")) + ), Section("Compiler Options", DefinitionList( @@ -120,7 +124,7 @@ object scaladoc extends Command { "Specify character encoding used by source files.", "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") & ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " & - "code in the Scala interpreter will return the default value " & + "code in the Scala REPL will return the default value " & "on your system:", MBold(" scala> ") & Mono("new java.io.InputStreamReader(System.in).getEncoding")))))) diff --git a/src/manual/scala/man1/scalap.scala b/src/manual/scala/man1/scalap.scala index 34d60dbc8f0b..a06b5f5b88a0 100644 --- a/src/manual/scala/man1/scalap.scala +++ b/src/manual/scala/man1/scalap.scala @@ -16,7 +16,7 @@ object scalap extends Command { val name = Section("NAME", MBold(command) & " " & NDash & " Scala class file decoder for the " & - Link("Scala 2", "http://scala-lang.org/") & " language") + Link("Scala 2", "https://scala-lang.org/") & " language") val synopsis = Section("SYNOPSIS", diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala index c703f5b7a9a2..dfafab61ba37 100644 --- a/src/manual/scala/tools/docutil/EmitHtml.scala +++ b/src/manual/scala/tools/docutil/EmitHtml.scala @@ -17,13 +17,13 @@ object EmitHtml { .replaceAll(">", ">") /* */ - def emitSection(section: Section, depth: Int) { - def emitPara(text: AbstractText) { + def emitSection(section: Section, depth: Int): Unit = { + def emitPara(text: AbstractText): Unit = { out println "

    " emitText(text) out println "\n
    " } - def emitText(text: AbstractText) { + def emitText(text: AbstractText): Unit = { text match { case seq:SeqText => seq.components foreach emitText @@ -90,7 +90,7 @@ object EmitHtml { } } - def emitParagraph(para: Paragraph) { + def emitParagraph(para: Paragraph): Unit = { para match { case TextParagraph(text) => out println "

    " @@ -143,7 +143,7 @@ object EmitHtml { section.paragraphs foreach emitParagraph } - private def emit3columns(col1: String, col2: String, col3: String) { + private def emit3columns(col1: String, col2: String, col3: String): Unit = { out println "

    " out println col1 out println "
    " @@ -155,21 +155,21 @@ object EmitHtml { out println "" } - private def emitHeader(col1: String, col2: String, col3: String) { + private def emitHeader(col1: String, col2: String, col3: String): Unit = { out println "" out println "
    " emit3columns(col1, col2, col3) out println "
    " } - private def emitFooter(col1: String, col2: String, col3: String) { + private def emitFooter(col1: String, col2: String, col3: String): Unit = { out println "" out println "
    " emit3columns(col1, col2, col3) out println "
    " } - def emitDocument(document: Document) { + def emitDocument(document: Document): Unit = { out.println("") out.println("") out.println("\n") @@ -207,19 +207,19 @@ object EmitHtml { case _ => sys.exit(1) } - def emitHtml(classname: String, outStream: java.io.OutputStream = out.out) { - if(outStream != out.out) out setOut outStream - try { - val cl = this.getClass.getClassLoader() - val clasz = cl loadClass classname - val meth = clasz getDeclaredMethod "manpage" - val doc = meth.invoke(null).asInstanceOf[Document] - emitDocument(doc) - } catch { - case ex: Exception => - ex.printStackTrace() - System.err println "Error in EmitManPage" - sys.exit(1) + def emitHtml(classname: String, outStream: java.io.OutputStream = out.out): Unit = + Console.withOut(outStream) { + try { + val cl = this.getClass.getClassLoader() + val clasz = cl loadClass classname + val meth = clasz getDeclaredMethod "manpage" + val doc = meth.invoke(null).asInstanceOf[Document] + emitDocument(doc) + } catch { + case ex: Exception => + ex.printStackTrace() + System.err println "Error in EmitManPage" + sys.exit(1) + } } - } } diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala index 21f1bf514a54..441883e62395 100644 --- a/src/manual/scala/tools/docutil/EmitManPage.scala +++ b/src/manual/scala/tools/docutil/EmitManPage.scala @@ -18,12 +18,12 @@ object EmitManPage { def escape(text: String) = text.replaceAll("-", "\\-") - def emitSection(section: Section, depth: Int) { - def emitPara(text: AbstractText) { + def emitSection(section: Section, depth: Int): Unit = { + def emitPara(text: AbstractText): Unit = { emitText(text) out println "\n.IP" } - def emitText(text: AbstractText) { + def emitText(text: AbstractText): Unit = { text match { case seq:SeqText => seq.components foreach emitText @@ -70,9 +70,9 @@ object EmitManPage { for (d <- definitions) { out println ".TP" emitText(d.term) - out.println + out.println() emitText(d.description) - if (n > 1) { out.println; n -= 1 } + if (n > 1) { out.println(); n -= 1 } } case Link(label, url) => @@ -83,17 +83,17 @@ object EmitManPage { } } - def emitParagraph(para: Paragraph) { + def emitParagraph(para: Paragraph): Unit = { para match { case TextParagraph(text) => out println ".PP" emitText(text) - out.println + out.println() case BlockQuote(text) => out println ".TP" emitText(text) - out.println + out.println() case CodeSample(text) => out println "\n.nf" @@ -104,7 +104,7 @@ object EmitManPage { for (item <- lst.items) { out println ".IP" emitText(item) - out.println + out.println() } case lst:NumberedList => @@ -114,7 +114,7 @@ object EmitManPage { val item = lst.items(idx) out.println(".IP \" " + (idx+1) + ".\"") emitText(item) - out.println + out.println() } case TitledPara(title, text) => @@ -144,13 +144,13 @@ object EmitManPage { section.paragraphs foreach emitParagraph } - def emitDocument(doc: Document) { + def emitDocument(doc: Document): Unit = { out println ".\\\" ##########################################################################" out println ".\\\" # __ #" out println ".\\\" # ________ ___ / / ___ Scala 2 On-line Manual Pages #" out println ".\\\" # / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL #" out println ".\\\" # __\\ \\/ /__/ __ |/ /__/ __ | #" - out println ".\\\" # /____/\\___/_/ |_/____/_/ | | http://scala-lang.org/ #" + out println ".\\\" # /____/\\___/_/ |_/____/_/ | | https://scala-lang.org/ #" out println ".\\\" # |/ #" out println ".\\\" ##########################################################################" out println ".\\\"" @@ -169,19 +169,19 @@ object EmitManPage { case _ => sys.exit(1) } - def emitManPage(classname: String, outStream: java.io.OutputStream = out.out) { - if(outStream != out.out) out setOut outStream - try { - val cl = this.getClass.getClassLoader() - val clasz = cl loadClass classname - val meth = clasz getDeclaredMethod "manpage" - val doc = meth.invoke(null).asInstanceOf[Document] - emitDocument(doc) - } catch { - case ex: Exception => - ex.printStackTrace() - System.err println "Error in EmitManPage" - sys.exit(1) + def emitManPage(classname: String, outStream: java.io.OutputStream = out.out): Unit = + Console.withOut(outStream) { + try { + val cl = this.getClass.getClassLoader() + val clasz = cl loadClass classname + val meth = clasz getDeclaredMethod "manpage" + val doc = meth.invoke(null).asInstanceOf[Document] + emitDocument(doc) + } catch { + case ex: Exception => + ex.printStackTrace() + System.err println "Error in EmitManPage" + sys.exit(1) + } } - } } diff --git a/src/manual/scala/tools/docutil/ManMaker.scala b/src/manual/scala/tools/docutil/ManMaker.scala index 4b8bfc6606f4..4c19ccac27c6 100644 --- a/src/manual/scala/tools/docutil/ManMaker.scala +++ b/src/manual/scala/tools/docutil/ManMaker.scala @@ -1,16 +1,13 @@ /* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stephane Micheloud + * Copyright 2005-2018 LAMP/EPFL * Adapted from Lex Spoon's sbaz manual */ package scala.tools.docutil -import org.apache.tools.ant.Task - import java.io.{File, FileOutputStream} -class ManMaker extends Task { +class ManMaker { /** The command for which to generate the man page */ private var command: List[String] = Nil @@ -22,22 +19,22 @@ class ManMaker extends Task { private var manout: Option[File] = None - def setCommand(input: String) { + def setCommand(input: String): Unit = { command = input.split(",").toList.flatMap { s => val st = s.trim() if (st != "") List(st) else Nil } } - def setHtmlout(input: File) { + def setHtmlout(input: File): Unit = { htmlout = Some(input) } - def setManout(input: File) { + def setManout(input: File): Unit = { manout = Some(input) } - override def execute() { + def execute(): Unit = { if (command.isEmpty) sys.error("Attribute 'command' is not set.") if (htmlout.isEmpty) sys.error("Attribute 'htmlout' is not set.") if (manout.isEmpty) sys.error("Attribute 'manout' is not set.") @@ -60,7 +57,7 @@ class ManMaker extends Task { /** Command line runner for ManMaker which is called from the sbt build. */ object ManMaker extends App { - val Array(commands, htmlout, manout) = args + val Array(commands, htmlout, manout) = args: @unchecked val mm = new ManMaker mm.setCommand(commands) mm.setHtmlout(new File(htmlout)) diff --git a/src/manual/scala/tools/docutil/ManPage.scala b/src/manual/scala/tools/docutil/ManPage.scala index 853c17b94c0b..cfd9844629f2 100644 --- a/src/manual/scala/tools/docutil/ManPage.scala +++ b/src/manual/scala/tools/docutil/ManPage.scala @@ -24,7 +24,7 @@ object ManPage { case class Emph(contents: AbstractText) extends AbstractText case class Mono(contents: AbstractText) extends AbstractText case class Quote(contents: AbstractText) extends AbstractText - implicit def str2text(str: String) = Text(str) + implicit def str2text(str: String): Text = Text(str) case class Definition(term: AbstractText, description: AbstractText) case class DefinitionList(definitions: Definition*) extends AbstractText @@ -37,14 +37,14 @@ object ManPage { case class CodeSample(text: String) extends Paragraph case class BlockQuote(text: AbstractText) extends Paragraph implicit def text2para(text: AbstractText): Paragraph = TextParagraph(text) - implicit def str2para(str: String) = text2para(str2text(str)) + implicit def str2para(str: String): Paragraph = text2para(str2text(str)) case class BulletList(items: AbstractText*) extends Paragraph case class NumberedList(items: AbstractText*) extends Paragraph case class TitledPara(title: String, text: AbstractText) extends Paragraph case class EmbeddedSection(section: Section) extends Paragraph - implicit def section2Para(section: Section) = EmbeddedSection(section) + implicit def section2Para(section: Section): EmbeddedSection = EmbeddedSection(section) case class Section(title: String, paragraphs: Paragraph*) diff --git a/src/manual/scala/tools/docutil/resources/css/style.css b/src/manual/scala/tools/docutil/resources/css/style.css index 62768298cb97..2cf3bd7c9669 100644 --- a/src/manual/scala/tools/docutil/resources/css/style.css +++ b/src/manual/scala/tools/docutil/resources/css/style.css @@ -10,7 +10,7 @@ font-size: 90%; } -/* see http://www.maxdesign.com.au/presentation/external/ */ +/* see https://www.maxdesign.com.au/presentation/external/ */ a.external span { position: absolute; left: -5000px; diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html index 18e234393012..a634daa31e07 100644 --- a/src/manual/scala/tools/docutil/resources/index.html +++ b/src/manual/scala/tools/docutil/resources/index.html @@ -17,7 +17,7 @@
    @@ -87,9 +87,9 @@

    [Solaris and Linux] + href="https://java.sun.com/j2se/1.5.0/docs/tooldocs/solaris/classpath.html">Solaris and Linux] [Windows] + href="https://java.sun.com/j2se/1.5.0/docs/tooldocs/windows/classpath.html">Windows] @@ -99,7 +99,7 @@

    [Solaris, Linux and Windows] + href="https://java.sun.com/j2se/1.5.0/docs/tooldocs/findingclasses.html">Solaris, Linux and Windows] @@ -180,7 +180,7 @@


    - Copyright (c) 2002-2013 EPFL, + Copyright (c) 2002-2013 EPFL, Lausanne, unless specified otherwise.
    All rights reserved.
    diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest/scala/tools/partest/AsmNode.scala index 89291ad5b106..25272feff90d 100644 --- a/src/partest/scala/tools/partest/AsmNode.scala +++ b/src/partest/scala/tools/partest/AsmNode.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.tools.partest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.tools.asm import asm._ import asm.tree._ diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala index 2f8ead6b84aa..e27beb2769ef 100644 --- a/src/partest/scala/tools/partest/BytecodeTest.scala +++ b/src/partest/scala/tools/partest/BytecodeTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,11 +12,11 @@ package scala.tools.partest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.tools.asm.{ClassReader, ClassWriter} import scala.tools.asm.tree._ import java.io.{InputStream, File => JFile} - +import scala.tools.testkit.ASMConverters import AsmNode._ import scala.tools.nsc.CloseableRegistry @@ -82,7 +82,7 @@ abstract class BytecodeTest { println(s"Different member counts in $name1 and $name2") false } - else (ms1, ms2).zipped.forall { (m1, m2) => + else ms1.lazyZip(ms2).forall { (m1, m2) => val c1 = f(m1) val c2 = f(m2).replace(name2, name1) if (c1 == c2) @@ -97,7 +97,7 @@ abstract class BytecodeTest { /** * Compare the bytecodes of two methods. * - * For the `similar` function, you probably want to pass [[ASMConverters.equivalentBytecode]]. + * For the `similar` function, you probably want to pass [[scala.tools.testkit.ASMConverters.equivalentBytecode]]. */ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (List[Instruction], List[Instruction]) => Boolean) = { val isa = instructionsFromMethod(methA) @@ -113,8 +113,8 @@ abstract class BytecodeTest { val width = isa.map(_.toString.length).max val lineWidth = len.toString.length (1 to len) foreach { line => - val isaPadded = isa.map(_.toString) orElse Stream.continually("") - val isbPadded = isb.map(_.toString) orElse Stream.continually("") + val isaPadded = isa.map(_.toString) orElse LazyList.continually("") + val isbPadded = isb.map(_.toString) orElse LazyList.continually("") val a = isaPadded(line-1) val b = isbPadded(line-1) diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala index b896738e575a..71886ef4a7e9 100644 --- a/src/partest/scala/tools/partest/CompilerTest.scala +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,10 +12,15 @@ package scala.tools.partest +import scala.language.implicitConversions import scala.reflect.runtime.{universe => ru} import scala.tools.nsc._ -/** For testing compiler internals directly. +/** A DirectTest for testing compiler internals. + * The test must implement the `check` function to check + * the result of compiling the `code`; the test may override + * `sources` instead to check multiple sources. + * * Each source code string in "sources" will be compiled, and * the check function will be called with the source code and the * resulting CompilationUnit. The check implementation should @@ -26,13 +31,12 @@ abstract class CompilerTest extends DirectTest { def check(source: String, unit: global.CompilationUnit): Unit lazy val global: Global = newCompiler() - lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *) + lazy val computedSources = sources + lazy val units: List[global.CompilationUnit] = compilationUnits(global)(computedSources: _ *) import global._ - import definitions.{ compilerTypeFromTag } - - override def extraSettings = "-usejavacp -d " + testOutput.path + import definitions.compilerTypeFromTag - def show() = (sources, units).zipped.foreach(check) + def show() = computedSources.lazyZip(units).foreach(check) // Override at least one of these... def code = "" @@ -44,7 +48,7 @@ abstract class CompilerTest extends DirectTest { if (sym eq NoSymbol) NoType else appliedType(sym, compilerTypeFromTag(t)) } - implicit def mkMkType(sym: Symbol) = new MkType(sym) + implicit def mkMkType(sym: Symbol): MkType = new MkType(sym) def allMembers(root: Symbol): List[Symbol] = { def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = { diff --git a/src/partest/scala/tools/partest/ConsoleLog.scala b/src/partest/scala/tools/partest/ConsoleLog.scala index 6f35fea437ae..53df36711f76 100644 --- a/src/partest/scala/tools/partest/ConsoleLog.scala +++ b/src/partest/scala/tools/partest/ConsoleLog.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -65,8 +65,9 @@ class ConsoleLog(colorEnabled: Boolean) { def echoWarning(msg: String) = echo(bold(red(msg))) def printDot(): Unit = printProgress(".") - def printS(): Unit = printProgress(_warning + "s" +_default) + def printS(): Unit = printProgress(_warning + "s" +_default) def printEx(): Unit = printProgress(_failure + "X" + _default) + def printUnknown(): Unit = printProgress(_failure + "?" + _default) // crash or uninit'd private def printProgress(icon: String): Unit = synchronized { if (dotCount >= DotWidth) { outline("\n" + icon) diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index b6538764b481..44bc08b5c05e 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,11 @@ package scala.tools.partest import scala.reflect.internal.util.{BatchSourceFile, SourceFile} -import scala.tools.cmd.CommandLineParser +import scala.sys.process.{Parser => CommandLineParser} import scala.tools.nsc._ import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.settings.ScalaVersion +import scala.util.chaining._ /** Test with code which is embedded as a string. * @@ -30,7 +31,7 @@ import scala.tools.nsc.settings.ScalaVersion * * There are helper methods for creating settings and * invoking a (newly constructed) compiler. - */ + */ abstract class DirectTest { // The program being tested in some fashion def code: String @@ -41,22 +42,29 @@ abstract class DirectTest { def testPath = SFile(sys.props("partest.test-path")) def testOutput = Directory(sys.props("partest.output")) + protected def pathOf(locations: String*) = locations.mkString(sys.props("path.separator")) + + // convenient for test classes not in a subpackage of scala + final protected def tokenize(line: String): List[String] = CommandLineParser.tokenize(line) + // override to add additional settings besides -d testOutput.path - def extraSettings: String = "" + // default is -usejavacp + def extraSettings: String = "-usejavacp" // a default Settings object using only extraSettings - def settings: Settings = newSettings(CommandLineParser.tokenize(extraSettings)) + def settings: Settings = newSettings(tokenize(extraSettings)) // settings factory using given args and also debug settings - def newSettings(args: List[String]) = { - val s = new Settings - val allArgs = args ++ CommandLineParser.tokenize(debugSettings) + def newSettings(args: List[String]): Settings = newBaseSettings().tap { s => + val allArgs = debugSettings.pipe(db => if (db.isEmpty) args else args ++ tokenize(db)) log(s"newSettings: allArgs = $allArgs") val (success, residual) = s.processArguments(allArgs, processAll = false) assert(success && residual.isEmpty, s"Bad settings [${args.mkString(",")}], residual [${residual.mkString(",")}]") - s } + // scaladoc has custom settings + def newBaseSettings(): Settings = new Settings + // new compiler using given ad hoc args, -d and extraSettings def newCompiler(args: String*): Global = { - val settings = newSettings(CommandLineParser.tokenize(s"""-d "${testOutput.path}" ${extraSettings}""") ++ args.toList) + val settings = newSettings(tokenize(s"""-d "${testOutput.path}" ${extraSettings}""") ++ args.toList) newCompiler(settings) } @@ -113,7 +121,7 @@ abstract class DirectTest { catch { case t: Exception => println(t.getMessage) - t.printStackTrace + t.printStackTrace() sys.exit(1) } diff --git a/src/partest/scala/tools/partest/IcodeComparison.scala b/src/partest/scala/tools/partest/IcodeComparison.scala index 7c4c46628800..ae28cfef1755 100644 --- a/src/partest/scala/tools/partest/IcodeComparison.scala +++ b/src/partest/scala/tools/partest/IcodeComparison.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -49,7 +49,7 @@ abstract class IcodeComparison extends DirectTest { /** Compile the test code and return the contents of all * (sorted) .icode files, which are immediately deleted. * @param arg0 at least one arg is required - * @param args must include -Xprint-icode:phase + * @param args must include -Vprint-icode:phase */ def collectIcode(arg0: String, args: String*): List[String] = { compile("-d" :: testOutput.path :: arg0 :: args.toList : _*) @@ -65,7 +65,7 @@ abstract class IcodeComparison extends DirectTest { } /** Collect icode at the default phase, `printIcodeAfterPhase`. */ - def collectIcode(): List[String] = collectIcode(s"-Xprint-icode:$printIcodeAfterPhase") + def collectIcode(): List[String] = collectIcode(s"-Vprint-icode:$printIcodeAfterPhase") /** Default show is showComparison. May be overridden for showIcode or similar. */ def show() = showComparison() @@ -74,8 +74,8 @@ abstract class IcodeComparison extends DirectTest { * then print the diff of the icode. */ def showComparison() = { - val lines1 = collectIcode(s"-Xprint-icode:$printSuboptimalIcodeAfterPhase") - val lines2 = collectIcode("-optimise", s"-Xprint-icode:$printIcodeAfterPhase") + val lines1 = collectIcode(s"-Vprint-icode:$printSuboptimalIcodeAfterPhase") + val lines2 = collectIcode("-optimise", s"-Vprint-icode:$printIcodeAfterPhase") println(compareContents(lines1, lines2)) } diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala index ec9e20a27875..688c87d708ec 100644 --- a/src/partest/scala/tools/partest/IcodeTest.scala +++ b/src/partest/scala/tools/partest/IcodeTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -35,7 +35,7 @@ abstract class IcodeTest extends DirectTest { compile("-d" :: testOutput.path :: args.toList : _*) val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode") - try icodeFiles sortBy (_.name) flatMap (f => f.lines.toList) + try icodeFiles sortBy (_.name) flatMap (f => f.lines().toList) finally icodeFiles foreach (f => f.delete()) } diff --git a/src/partest/scala/tools/partest/JUnitTest.scala b/src/partest/scala/tools/partest/JUnitTest.scala index f37708aac808..85410fe199c4 100644 --- a/src/partest/scala/tools/partest/JUnitTest.scala +++ b/src/partest/scala/tools/partest/JUnitTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,7 +33,7 @@ abstract class JUnitTest(classes: Class[_]*) extends App { trimStack(ex.getCause) } - val unwrapped = Exceptional.unwrap(ex) + val unwrapped = Exceptional.rootCause(ex) trimStack(unwrapped) unwrapped.printStackTrace() } diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest/scala/tools/partest/JavapTest.scala index d2c126138bb1..05fb811d3166 100644 --- a/src/partest/scala/tools/partest/JavapTest.scala +++ b/src/partest/scala/tools/partest/JavapTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala index 9fb7bb05c74f..6d44a0163080 100644 --- a/src/partest/scala/tools/partest/MemoryTest.scala +++ b/src/partest/scala/tools/partest/MemoryTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ abstract class MemoryTest { val rt = Runtime.getRuntime() def memUsage() = { import java.lang.management._ - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val pools = ManagementFactory.getMemoryPoolMXBeans.asScala pools.map(_.getUsage.getUsed).sum / 1000000d } @@ -31,7 +31,7 @@ abstract class MemoryTest { var i = 0 while (i < calcsPerIter) { calc(); i += 1 } 1 to 5 foreach (_ => rt.gc()) - history += memUsage + history += memUsage() } 1 to 5 foreach (_ => stressTestIter()) diff --git a/src/partest/scala/tools/partest/ParserTest.scala b/src/partest/scala/tools/partest/ParserTest.scala deleted file mode 100644 index 2ee796305881..000000000000 --- a/src/partest/scala/tools/partest/ParserTest.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -/** A class for testing parser output. - * Just supply the `code` and update the check file. - */ -abstract class ParserTest extends DirectTest { - - override def extraSettings: String = "-usejavacp -Ystop-after:parser -Xprint:parser" - - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } -} diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala index 4dfea7bbea7e..2e9afd5c633d 100644 --- a/src/partest/scala/tools/partest/PartestDefaults.scala +++ b/src/partest/scala/tools/partest/PartestDefaults.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -40,7 +40,7 @@ object PartestDefaults { def waitTime = Duration(prop("partest.timeout") getOrElse "4 hours") def printDurationThreshold = java.lang.Integer.getInteger("partest.print.duration.threshold.ms", 5000) - final val migrateFlagsFiles = false + //def timeout = "1200000" // per-test timeout // probe for the named executable private def jdkexec(name: String): Option[String] = { diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest/scala/tools/partest/ReplTest.scala index e4f2aaabe505..af1e1cc729d3 100644 --- a/src/partest/scala/tools/partest/ReplTest.scala +++ b/src/partest/scala/tools/partest/ReplTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,10 +12,11 @@ package scala.tools.partest +import java.io.File.pathSeparator + import scala.tools.nsc.Settings -import scala.tools.nsc.interpreter.{ILoop, replProps} -import scala.util.matching.Regex -import scala.util.matching.Regex.Match +import scala.tools.nsc.interpreter.shell.{ILoop, ShellConfig} +import scala.util.matching.Regex.{quoteReplacement, Match} /** Test code or commands in a REPL. * @@ -35,42 +36,28 @@ abstract class ReplTest extends DirectTest { if (getClass.getClassLoader.getParent != null) { s.classpath.value = s.classpath.value match { case "" => testOutput.toString - case s => s + java.io.File.pathSeparator + testOutput.toString + case cp => s"$cp$pathSeparator$testOutput" } s.usejavacp.value = true } transformSettings(s) } - def normalize(s: String) = s + /** Transform a line of output, for comparison to expected output. */ + protected def normalize(s: String): String = s /** True for SessionTest to preserve session text. */ - def inSession: Boolean = false - /** True to preserve welcome header, eliding version number. */ - def welcoming: Boolean = false - lazy val header = replProps.welcome - def eval() = { - val s = settings - log("eval(): settings = " + s) - val transcript = ILoop.runForTranscript(code, s, inSession = inSession) + protected def inSession: Boolean = false + /** Config for test. */ + protected def shellConfig(testSettings: Settings): ShellConfig = ILoop.TestConfig(testSettings) + /** The normalized output from evaluating the `code` script. */ + protected def eval(): Iterator[String] = { + val testSettings = settings + log(s"eval(): settings = $testSettings") + val transcript = ILoop.runForTranscript(code, testSettings, shellConfig(testSettings), inSession = inSession) log(s"transcript[[$transcript]]") - val lines = transcript.linesIterator - val clean = - if (welcoming) { - val welcome = "(Welcome to Scala).*".r - lines map { - case welcome(s) => s - case s => s - } - } else { - lines.drop(header.linesIterator.size) - } - clean.map(normalize) + transcript.linesIterator.map(normalize) } - def show() = eval() foreach println -} - -/** Retain and normalize the welcome message. */ -trait Welcoming { this: ReplTest => - override def welcoming = true + /** Print the transcript produced by `eval`. */ + override def show() = eval().foreach(println) } /** Strip Any.toString's id@abcdef16 hashCodes. These are generally at end of result lines. */ @@ -90,7 +77,7 @@ trait Lambdaless extends ReplTest { } object Lambdaless { private val lambdaless = """\$Lambda(?:\$\d+)?/(?:0x[a-f0-9]{16}|\d+)(?:@[a-fA-F0-9]+)?""".r - private def stripLambdaClassName(s: String): String = lambdaless.replaceAllIn(s, Regex.quoteReplacement("")) + private def stripLambdaClassName(s: String): String = lambdaless.replaceAllIn(s, quoteReplacement("")) } /** Normalize a REPL stack trace by stripping line numbers and count of elided frames. */ @@ -100,13 +87,13 @@ trait StackCleaner extends ReplTest { } object StackCleaner { private val elidedAndMore = """(\s+\.{3} )\d+( elided and )\d+( more)""".r - private val elidedOrMore = """(\s+\.{3} )\d+( (?:elided|more))""".r + private val elidedOrMore = """(\s+\.{3} )\d+( (?:elided|more))""".r private val frame = """(\s+at [^(]+\(:)\d+(\))""".r private def stripFrameCount(line: String) = line match { - case elidedAndMore(ellipsis, infix, suffix) => s"$ellipsis???$infix???$suffix" // must be before `elided` - case elidedOrMore(ellipsis, suffix) => s"$ellipsis???$suffix" - case frame(prefix, suffix) => s"${prefix}XX${suffix}" - case s => s + case elidedAndMore(ellipsis, infix, suffix) => s"$ellipsis???$infix???$suffix" // must precede `elidedOrMore` + case elidedOrMore(ellipsis, suffix) => s"$ellipsis???$suffix" + case frame(prefix, suffix) => s"${prefix}XX${suffix}" + case _ => line } } @@ -126,7 +113,7 @@ object StackCleaner { */ abstract class SessionTest extends ReplTest { /** Session transcript. */ - def session: String = testPath.changeExtension("check").toFile.slurp + def session: String = testPath.changeExtension("check").toFile.slurp() /** Expected output, as an iterator, optionally marginally stripped. */ def expected = if (stripMargins) session.stripMargin.linesIterator else session.linesIterator @@ -146,12 +133,13 @@ abstract class SessionTest extends ReplTest { override final def code = pasted.findAllMatchIn(expected.mkString("", "\n", "\n")).map { case pasted(null, null, prompted) => def continued(m: Match): Option[String] = m match { - case margin(text) => Some(Regex.quoteReplacement(text)) + case margin(text) => Some(quoteReplacement(text)) case _ => None } margin.replaceSomeIn(prompted, continued) case pasted(cmd, pasted, null) => cmd + pasted + "\u0004" + case x => throw new MatchError(x) }.mkString // Just the last line of the interactive prompt @@ -167,7 +155,7 @@ abstract class SessionTest extends ReplTest { } object SessionTest { // \R for line break since Java 8 - private def input(prompt: String) = raw"""(?m)^$prompt(:pa.*\R)// Entering paste mode.*\R\R((?:.*\R)*)\R// Exiting paste mode.*\R|^scala> (.*\R(?:\s*\| .*\R)*)""".r + private def input(prompt: String) = raw"""(?m)^$prompt(:pa.*\R)// Entering paste mode.*\R\R((?:.*\R)*)// Exiting paste mode.*\R|^scala> (.*\R(?:\s*\| .*\R)*)""".r private val margin = """(?m)^\s*\| (.*)$""".r } diff --git a/src/partest/scala/tools/partest/ScaladocJavaModelTest.scala b/src/partest/scala/tools/partest/ScaladocJavaModelTest.scala index 3f89dc71f167..c3e251ced27a 100644 --- a/src/partest/scala/tools/partest/ScaladocJavaModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocJavaModelTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index a088e5ba6e0a..31f0e692bfa2 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,13 +12,13 @@ package scala.tools.partest -import scala.tools.cmd.CommandLineParser import scala.tools.nsc._ +import scala.tools.nsc.doc.{DocFactory, Universe} import scala.tools.nsc.doc.base.comment._ import scala.tools.nsc.doc.model._ import scala.tools.nsc.doc.model.diagram._ -import scala.tools.nsc.doc.{DocFactory, Universe} import scala.tools.nsc.reporters.ConsoleReporter +import scala.util.chaining._ /** A class for testing scaladoc model generation * - you need to specify the code in the `code` method @@ -72,7 +72,7 @@ abstract class ScaladocModelTest extends DirectTest { try { // 1 - compile with scaladoc and get the model out - val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}) + val universe = model.getOrElse { sys.error("Scaladoc Model Test ERROR: No universe generated!") } // 2 - check the model generated testModel(universe.rootPackage) println("Done.") @@ -85,22 +85,23 @@ abstract class ScaladocModelTest extends DirectTest { System.setErr(prevErr) } - private[this] var settings: doc.Settings = null + private[this] var docSettings: doc.Settings = null + + // custom settings, silencing "model contains X documentable templates" + override def newBaseSettings(): doc.Settings = new doc.Settings(_ => ()).tap(_.scaladocQuietRun = true) + override def newSettings(args: List[String]): doc.Settings = super.newSettings(args).asInstanceOf[doc.Settings] + override def settings: doc.Settings = newSettings(tokenize(s"$extraSettings $scaladocSettings")) // create a new scaladoc compiler def newDocFactory: DocFactory = { - settings = new doc.Settings(_ => ()) - settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! - val args = extraSettings + " " + scaladocSettings - new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think - val docFact = new DocFactory(new ConsoleReporter(settings), settings) - docFact + docSettings = settings + new DocFactory(new ConsoleReporter(docSettings), docSettings) } // compile with scaladoc and output the result def model: Option[Universe] = newDocFactory.makeUniverse(Right(code)) - // finally, enable easy navigation inside the entities + // enable easy navigation inside the entities object access { implicit class TemplateAccess(tpl: DocTemplateEntity) { @@ -113,6 +114,9 @@ abstract class ScaladocModelTest extends DirectTest { def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")") def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t}) + def _annotation(name: String): DocTemplateEntity = getTheFirst(_annotations(name), tpl.qualifiedName + ".annotation(" + name + ")") + def _annotations(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with AnnotationClass => t}) + def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")") def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t}) @@ -178,20 +182,19 @@ abstract class ScaladocModelTest extends DirectTest { case _ => "" } c match { - case c: Comment => - extractText(c.body) - case b: Body => - extractText(b) + case c: Comment => extractText(c.body) + case b: Body => extractText(b) + case x => throw new MatchError(x) } } def countLinks(c: Comment, p: EntityLink => Boolean): Int = countLinksInBody(c.body, p) - def countLinksInBody(body: Body, p: EntityLink => Boolean): Int = { + def countLinksInBody(body: Body, linkTest: EntityLink => Boolean): Int = { def countLinks(b: Any): Int = b match { - case el: EntityLink if p(el) => 1 - case s: Seq[_] => s.toList.map(countLinks(_)).sum - case p: Product => p.productIterator.toList.map(countLinks(_)).sum + case el: EntityLink if linkTest(el) => 1 + case s: collection.Seq[_] => s.toList.map(countLinks(_)).sum + case p: Product => p.productIterator.map(countLinks(_)).sum case _ => 0 } countLinks(body) diff --git a/src/partest/scala/tools/partest/ScriptTest.scala b/src/partest/scala/tools/partest/ScriptTest.scala index 3fdc32ac6774..18775fb4a310 100644 --- a/src/partest/scala/tools/partest/ScriptTest.scala +++ b/src/partest/scala/tools/partest/ScriptTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,10 +21,10 @@ abstract class ScriptTest extends DirectTest { def testmain = "TestMain" override def extraSettings = s"-usejavacp -Xscript $testmain" def scriptPath = testPath changeExtension "script" - def code = scriptPath.toFile.slurp + def code = scriptPath.toFile.slurp() def argv = Seq.empty[String] def show() = { - compile() + assert(compile()) ScalaClassLoader(getClass.getClassLoader).run(testmain, argv) } } diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala deleted file mode 100644 index ce76d29e67fa..000000000000 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -import java.security._ -import java.util._ - -abstract class SecurityTest extends App { - def throwIt(x: Any) = throw new AccessControlException("" + x) - def propertyCheck(p: PropertyPermission): Unit = throwIt(p) - - def check(perm: Permission): Unit = perm match { - case p: PropertyPermission => propertyCheck(p) - case _ => () - } -} diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest/scala/tools/partest/SigTest.scala index 49a756ea1945..b54487b30fe9 100644 --- a/src/partest/scala/tools/partest/SigTest.scala +++ b/src/partest/scala/tools/partest/SigTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala index 32e4ec84136f..613c2d996212 100644 --- a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala +++ b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/StubErrorMessageTest.scala b/src/partest/scala/tools/partest/StubErrorMessageTest.scala index 723d947a6f19..949850babcea 100644 --- a/src/partest/scala/tools/partest/StubErrorMessageTest.scala +++ b/src/partest/scala/tools/partest/StubErrorMessageTest.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -51,9 +51,9 @@ trait StubErrorMessageTest extends StoreReporterDirectTest { if (extraUserCode == "") compileCode(userCode) else compileCode(userCode, extraUserCode) import scala.reflect.internal.util.Position - filteredInfos.sortBy(_.pos.point).foreach { report => + filteredInfos.foreach { report => print(if (report.severity == storeReporter.ERROR) "error: " else "") - println(Position.formatMessage(report.pos, report.msg, true)) + println(Position.formatMessage(report.pos, report.msg, shortenFile = true)) } } } diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala index ff9041dd7129..9a5847a35938 100644 --- a/src/partest/scala/tools/partest/TestKinds.scala +++ b/src/partest/scala/tools/partest/TestKinds.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala index 3b6dc49444ac..056b8d73ecfd 100644 --- a/src/partest/scala/tools/partest/TestState.scala +++ b/src/partest/scala/tools/partest/TestState.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala deleted file mode 100644 index 6aa597e2bdd3..000000000000 --- a/src/partest/scala/tools/partest/TestUtil.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -import scala.reflect.{ classTag, ClassTag } - -trait TestUtil { - /** Given function and block of code, evaluates code block, - * calls function with nanoseconds elapsed, and returns block result. - */ - def timed[T](f: Long => Unit)(body: => T): T = { - val start = System.nanoTime - val result = body - val end = System.nanoTime - - f(end - start) - result - } - /** Times body and returns (nanos, result). - */ - def alsoNanos[T](body: => T): (Long, T) = { - var nanos = 0L - val result = timed(nanos = _)(body) - - (nanos, result) - } - def nanos(body: => Unit): Long = alsoNanos(body)._1 - - def intercept[T <: Exception : ClassTag](code: => Unit): Unit = - try { - code - assert(false, "did not throw " + classTag[T]) - } catch { - case ex: Exception if classTag[T].runtimeClass isInstance ex => - } -} - -// Used in tests. -object TestUtil extends TestUtil { -} diff --git a/src/partest/scala/tools/partest/Util.scala b/src/partest/scala/tools/partest/Util.scala index d3a6b3536dbe..26beb886505f 100644 --- a/src/partest/scala/tools/partest/Util.scala +++ b/src/partest/scala/tools/partest/Util.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,55 +12,7 @@ package scala.tools.partest -import scala.language.experimental.macros - object Util { - /** - * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out. - * {{{ - * trace> "".isEmpty - * res: Boolean = true - * - * }}} - * - * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding - * test code in a string. - */ - def trace[A](a: A): A = macro traceImpl[A] - - import scala.reflect.macros.blackbox.Context - def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = { - import c.universe._ - import definitions._ - - // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0, - // because this impairs reflection refactorings - // - // val exprCode = c.literal(show(a.tree)) - // val exprType = c.literal(show(a.actualType)) - // reify { - // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n") - // a.splice - // } - - c.Expr(Block( - List(Apply( - Select(Ident(PredefModule), TermName("println")), - List(Apply( - Select(Apply( - Select(Ident(ScalaPackage), TermName("StringContext")), - List( - Literal(Constant("trace> ")), - Literal(Constant("\\nres: ")), - Literal(Constant(" = ")), - Literal(Constant("\\n")))), - TermName("s")), - List( - Literal(Constant(show(a.tree))), - Literal(Constant(show(a.actualType))), - a.tree))))), - a.tree)) - } def prettyArray(a: Array[_]): collection.IndexedSeq[Any] = new collection.AbstractSeq[Any] with collection.IndexedSeq[Any] { def length = a.length @@ -70,7 +22,7 @@ object Util { case x => x } - override def stringPrefix = "Array" + override def className = "Array" } implicit class ArrayDeep(val a: Array[_]) extends AnyVal { diff --git a/src/partest/scala/tools/partest/async/CompletableFutureAwait.scala b/src/partest/scala/tools/partest/async/CompletableFutureAwait.scala index 4327722b01e9..a209720911f7 100644 --- a/src/partest/scala/tools/partest/async/CompletableFutureAwait.scala +++ b/src/partest/scala/tools/partest/async/CompletableFutureAwait.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,6 +19,7 @@ import java.util.function.BiConsumer import scala.annotation.compileTimeOnly import scala.language.experimental.macros import scala.reflect.macros.blackbox +import scala.tools.testkit.async.AsyncStateMachine import scala.util.{Failure, Success, Try} object CompletableFutureAwait { diff --git a/src/partest/scala/tools/partest/async/OptionDsl.scala b/src/partest/scala/tools/partest/async/OptionDsl.scala index 0964c0740d55..adc6a0141b86 100644 --- a/src/partest/scala/tools/partest/async/OptionDsl.scala +++ b/src/partest/scala/tools/partest/async/OptionDsl.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,6 +16,7 @@ package async import scala.annotation.compileTimeOnly import scala.language.experimental.macros import scala.reflect.macros.blackbox +import scala.tools.testkit.async.AsyncStateMachine object OptionAwait { def optionally[T](body: T): Option[T] = macro impl diff --git a/src/partest/scala/tools/partest/async/OutputAwait.scala b/src/partest/scala/tools/partest/async/OutputAwait.scala index a026d9ba9a04..96a7bbd6c226 100644 --- a/src/partest/scala/tools/partest/async/OutputAwait.scala +++ b/src/partest/scala/tools/partest/async/OutputAwait.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,6 +17,7 @@ import scala.collection.immutable.HashMap import scala.collection.mutable import scala.language.experimental.macros import scala.reflect.macros.blackbox +import scala.tools.testkit.async.AsyncStateMachine object OutputAwait { def writing[T](body: T): Output[T] = macro impl @@ -52,7 +53,7 @@ object Output { val mutableMap = collection.mutable.HashMap[K, mutable.Builder[V, Vector[V]]]() for ((k, v) <- written) mutableMap.getOrElseUpdate(k, Vector.newBuilder[V]) += v val immutableMapBuilder = collection.immutable.HashMap.newBuilder[K, Vector[V]] - immutableMapBuilder ++= mutableMap.mapValues(_.result()) + immutableMapBuilder ++= mutableMap.view.mapValues(_.result()) immutableMapBuilder.result() } } diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala index e6598714128b..6127f82b8403 100644 --- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala +++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ package scala.tools.partest.instrumented -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class MethodCallTrace(className: String, methodName: String, methodDescriptor: String) { override def toString(): String = className + "." + methodName + methodDescriptor @@ -78,9 +78,13 @@ object Instrumentation { res } + private val ignoredClasses = Set("scala/Console$", "scala/io/AnsiColor") + + // Accommodate Console.println(stuff) but not Predef.println in instrumented code. + // That allows println(true), to avoid warning on true: Any for example. val standardFilter: MethodCallTrace => Boolean = t => { - // ignore all calls to Console trigger by printing - t.className != "scala/Console$" && + // ignore all calls to classes triggered by Console.println + !ignoredClasses.contains(t.className) && // console accesses DynamicVariable, let's discard it too !t.className.startsWith("scala/util/DynamicVariable") } diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java index d67e7d3572ac..fdb37ad0d06f 100644 --- a/src/partest/scala/tools/partest/instrumented/Profiler.java +++ b/src/partest/scala/tools/partest/instrumented/Profiler.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/nest/AbstractRunner.scala b/src/partest/scala/tools/partest/nest/AbstractRunner.scala index 0d7349c65b1b..1118e1995bae 100644 --- a/src/partest/scala/tools/partest/nest/AbstractRunner.scala +++ b/src/partest/scala/tools/partest/nest/AbstractRunner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,6 +18,7 @@ import utils.Properties._ import scala.tools.nsc.Properties.{propOrFalse, setProp, versionMsg} import scala.collection.mutable import scala.reflect.internal.util.Collections.distinctBy +import scala.sys.process.Process import scala.util.{Try, Success, Failure} import java.util.concurrent.Executors import java.util.concurrent.TimeUnit @@ -33,9 +34,11 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour val debug: Boolean = config.optDebug || propOrFalse("partest.debug") val verbose: Boolean = config.optVerbose val terse: Boolean = config.optTerse + val realeasy: Boolean = config.optDev + val testBranch: Boolean = config.optBranch protected val printSummary = true - protected val partestCmd = "test/partest" + protected val partestCmd = "partest" private[this] var totalTests = 0 private[this] val passedTests = mutable.ListBuffer[TestState]() @@ -65,6 +68,12 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour private[this] val realSysErr = System.err + val gitRunner = List("/usr/local/bin/git", "/usr/bin/git").map(f => new java.io.File(f)).find(_.canRead) + def runGit[R](cmd: String)(f: LazyList[String] => R): Option[R] = + Try { + gitRunner.map(git => f(Process(s"$git $cmd").lazyLines_!)) + }.toOption.flatten + def statusLine(state: TestState, durationMs: Long) = { import state._ import TestState._ @@ -99,14 +108,10 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour diffed ::: logged } if (terse) { - if (state.isOk) { - if (state.isSkipped) printS() else printDot() - Nil - } - else { - printEx() - statusLine(state, durationMs) :: errInfo - } + if (state.isSkipped) { printS(); Nil } + else if (state.isOk) { printDot() ; Nil } + else if (state.shortStatus(0) == '?') { printUnknown() ; Nil } + else { printEx() ; statusLine(state, durationMs) :: errInfo } } else { echo(statusLine(state, durationMs)) if (!state.isOk) errInfo.foreach(echo) @@ -167,21 +172,34 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour } } - def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ") - echo("# Failed test paths (this command will update checkfiles)") - echo(partestCmd + " --update-check \\\n " + files_s + "\n") + if (failed0.size == 1) { + echo("# A test failed. To update the check file:") + echo(s"$partestCmd --update-check ${failed0.head.testIdent}") + } + else { + val bslash = "\\" + def files_s = failed0.map(_.testFile).mkString(s" ${bslash}\n ") + echo("# Failed test paths (this command will update checkfiles)") + echo(s"$partestCmd --update-check ${bslash}\n $files_s\n") + } } if (printSummary) { echo(message) levyJudgment() } + if (realeasy) + for (lines <- runGit("status --porcelain")(_.filter(_.endsWith(".check")).map(_.drop(3))) if lines.nonEmpty) { + echo(bold(red("# There are uncommitted check files!"))) + for (file <- lines) + echo(s"$file\n") + } } } /** Run the tests and return the success status */ def run(): Boolean = { - setUncaughtHandler + setUncaughtHandler() if (config.optVersion) echo(versionMsg) else if (config.optHelp) { @@ -190,7 +208,7 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour } else { val norm = Function.chain(Seq(testIdentToTestPath, checkFileToTestFile, testFileToTestDir, testDirToTestFile)) - val (individualTests, invalid) = config.parsed.residualArgs map (p => norm(Path(p))) partition denotesTestPath + val (individualTests, invalid) = config.parsed.residualArgs.map(p => norm(Path(p))).partition(denotesTestPath) if (invalid.nonEmpty) { if (verbose) invalid foreach (p => echoWarning(s"Discarding invalid test path " + p)) @@ -215,9 +233,71 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour paths.sortBy(_.toString) } + // tests touched on this branch + val branchedTests: List[Path] = if (!testBranch) Nil else { + import scala.util.chaining._ + //* issue/12494 8dfd7f015d [upstream/2.13.x: ahead 1] Allow companion access boundary + //git rev-parse --abbrev-ref HEAD + //git branch -vv --list issue/1234 + //git diff --name-only upstream/2.13.x + val parseVerbose = raw"\* \S+ \S+ \[([^:]+): .*\] .*".r + def parseTracking(line: String) = line match { + case parseVerbose(tracking) => tracking.tap(ref => echo(s"Tracking $ref")) + case _ => "upstream/2.13.x".tap(default => echoWarning(s"Tracking default $default, failed to understand '$line'")) + } + case class MADFile(path: String, status: Int) + // D test/files/neg/t12590.scala + def madden(line: String): MADFile = + line.split("\\s+") match { + case Array(mad, p) => + val score = mad match { case "M" => 0 case "A" => 1 case "D" => -1 } + MADFile(p, score) + case _ => + echoWarning(s"diff --name-status, failed to understand '$line'") + MADFile("NOPATH", -1) + } + def isPresent(mad: MADFile) = mad.status >= 0 + def isTestFiles(path: String) = path.startsWith("test/files/") + val maybeFiles = + for { + current <- runGit("rev-parse --abbrev-ref HEAD")(_.head).tap(_.foreach(b => echo(s"Testing on branch $b"))) + tracking <- runGit(s"branch -vv --list $current")(lines => parseTracking(lines.head)) + files <- runGit(s"diff --name-status $tracking")(lines => lines.map(madden).filter(isPresent).map(_.path).filter(isTestFiles).toList) + } + yield files + //test/files/neg/t12349.check + //test/files/neg/t12349/t12349a.java + //test/files/neg/t12349/t12349b.scala + //test/files/neg/t12349/t12349c.scala + //test/files/neg/t12494.check + //test/files/neg/t12494.scala + maybeFiles.getOrElse(Nil).flatMap { s => + val path = Path(s) + val segs = path.segments + if (segs.length < 4 || !standardKinds.contains(segs(2))) Nil + else if (segs.length > 4) { + val prefix = Path(path.segments.take(4).mkString("/")) + List(pathSettings.testParent / prefix) + } + else { + // p.check -> p.scala or p + val norm = + if (!path.hasExtension("scala") && !path.isDirectory) { + val asDir = Path(path.path.stripSuffix(s".${path.extension}")) + if (asDir.exists) asDir + else asDir.addExtension("scala") + } + else path + List(pathSettings.testParent / norm) + } + } + .distinct.filter(denotesTestPath) + } + val isRerun = config.optFailed val rerunTests = if (isRerun) testKinds.failedTests else Nil - def miscTests = individualTests ++ greppedTests ++ rerunTests + val specialTests = if (realeasy) List(Path("test/files/run/t6240-universe-code-gen.scala")) else Nil + def miscTests = List(individualTests, greppedTests, branchedTests, rerunTests, specialTests).flatten val givenKinds = standardKinds filter config.parsed.isSet val kinds = ( @@ -233,15 +313,21 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour def testContributors = { List( - if (rerunTests.isEmpty) "" else "previously failed tests", - if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories", - if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'", - if (individualTests.isEmpty) "" else "specified tests" - ) filterNot (_ == "") mkString ", " + (rerunTests, "previously failed tests"), + (kindsTests, s"${kinds.size} named test categories"), + (greppedTests, s"${greppedTests.size} tests matching '$grepExpr'"), + (branchedTests, s"${branchedTests.size} tests modified on this branch"), + (individualTests, "specified tests"), + (specialTests, "other tests you might have forgotten"), + ).filterNot(_._1.isEmpty).map(_._2) match { + case Nil => "the well of despair. I see you're not in a testing mood." + case one :: Nil => one + case all => all.init.mkString("", ", ", s", and ${all.last}") + } } - val allTests: Array[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical).sortBy(_.toString).toArray - val grouped = (allTests groupBy kindOf).toArray sortBy (x => standardKinds indexOf x._1) + val allTests: Array[Path] = distinctBy(miscTests ::: kindsTests)(_.toCanonical).sortBy(_.toString).toArray + val grouped = allTests.groupBy(kindOf).toArray.sortBy(x => standardKinds.indexOf(x._1)) onlyIndividualTests = individualTests.nonEmpty && rerunTests.isEmpty && kindsTests.isEmpty && greppedTests.isEmpty totalTests = allTests.size diff --git a/src/partest/scala/tools/partest/nest/CommandLine.scala b/src/partest/scala/tools/partest/nest/CommandLine.scala new file mode 100644 index 000000000000..f3c22f107554 --- /dev/null +++ b/src/partest/scala/tools/partest/nest/CommandLine.scala @@ -0,0 +1,103 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.partest.nest + +import scala.collection.mutable.ListBuffer +import scala.sys.process.Parser.tokenize + +trait CommandLineConfig { + def enforceArity: Boolean = true + def onlyKnownOptions: Boolean = true +} + +/** An instance of a command line, parsed according to a Spec. + */ +class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig { + def this(spec: Reference, line: String) = this(spec, tokenize(line)) + def this(spec: Reference, args: Array[String]) = this(spec, args.toList) + + import spec.{ isUnaryOption, isBinaryOption, isExpandOption } + + val Terminator = "--" + val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true + + def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption) + def errorFn(msg: String) = println(msg) + + /** argMap is option -> argument (or "true" if it is a unary argument) + * residualArgs are what is left after removing the options and their args. + */ + lazy val (argMap, residualArgs): (Map[String, String], List[String]) = { + val residualBuffer = new ListBuffer[String] + + def loop(args: List[String]): Map[String, String] = { + def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() } + + /* Returns Some(List(args)) if this option expands to an + * argument list and it's not returning only the same arg. + */ + def expand(s1: String) = { + if (isExpandOption(s1)) { + val s2 = spec expandArg s1 + if (s2 == List(s1)) None + else Some(s2) + } + else None + } + + /* Assumes known options have all been ruled out already. */ + def isUnknown(opt: String) = + onlyKnownOptions && (opt startsWith "-") && { + errorFn(s"Option '$opt' not recognized.") + true + } + + args match { + case Nil => Map() + case Terminator :: xs => residual(xs) + case x :: Nil => + expand(x) match { + case Some(expanded) => loop(expanded) + case _ => + if (isBinaryOption(x) && enforceArity) + errorFn(s"Option '$x' requires argument, found EOF instead.") + + if (isUnaryOption(x)) mapForUnary(x) + else if (isUnknown(x)) Map() + else residual(args) + } + + case x1 :: (tail @ (x2 :: xs)) => + expand(x1) match { + case Some(expanded) => loop(expanded ++ tail) + case _ => + if (x2 == Terminator) mapForUnary(x1) ++ residual(xs) + else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail) + else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs) + else if (isUnknown(x1)) loop(args.tail) + else residual(List(x1)) ++ loop(args.tail) + } + } + } + + (loop(originalArgs), residualBuffer.map(stripQuotes).toList) + } + + def apply(arg: String) = argMap(arg) + def get(arg: String) = argMap get arg + def isSet(arg: String) = argMap contains arg + + def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse + + override def toString() = argMap.toString + " " + residualArgs.toString +} diff --git a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala index 3ba255ad4dc0..e73b20fa0af6 100644 --- a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala +++ b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,6 +16,7 @@ import java.io.FileDescriptor import java.net.InetAddress import java.security.Permission +@deprecated("JDK 17 deprecates SecurityManager", since="2.13.7") class DelegatingSecurityManager(delegate: SecurityManager) extends SecurityManager { override def checkExit(status: Int): Unit = if (delegate ne null) delegate.checkExit(status) override def checkPermission(perm: Permission): Unit = if (delegate ne null) delegate.checkPermission(perm) diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala index 2e6e83b7b7a1..91f7bbb61180 100644 --- a/src/partest/scala/tools/partest/nest/DirectCompiler.scala +++ b/src/partest/scala/tools/partest/nest/DirectCompiler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,29 +13,31 @@ package scala.tools.partest package nest -import java.io.{FileWriter, PrintWriter} - +import java.io.{BufferedReader, FileWriter, PrintWriter} import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.NoPosition +import scala.reflect.internal.util.{CodeAction, NoPosition, Position, ScalaClassLoader} import scala.reflect.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.{CompilerCommand, Global, Settings} +import scala.util.chaining._ import scala.sys.process._ object ExtConsoleReporter { def apply(settings: Settings, writer: PrintWriter) = { - val r = new ConsoleReporter(settings, Console.in, writer, writer) - r.shortname = true - r + val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader + loader.create[ConsoleReporter](settings.reporter.value, settings.errorFn)(settings, Console.in, writer, writer).tap(_.shortname = true) } } +class PlainReporter(settings: Settings, reader: BufferedReader, writer: PrintWriter, echo: PrintWriter) extends ConsoleReporter(settings, reader, writer, echo) { + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = writer.println(s"[$severity] [$pos]: $msg") +} class TestSettings(cp: String, error: String => Unit) extends Settings(error) { @deprecated("Use primary constructor", "1.0.12") def this(cp: String) = this(cp, _ => ()) - nowarnings.value = false - encoding.value = "UTF-8" - classpath.value = cp + nowarnings.value = false + encoding.value = "UTF-8" + classpath.value = cp //lint.add("_") } @@ -51,11 +53,11 @@ class DirectCompiler(val runner: Runner) { def newGlobal(settings: Settings, logWriter: FileWriter): Global = newGlobal(settings, ExtConsoleReporter(settings, new PrintWriter(logWriter, true))) + /** Massage args to merge plugins and fix paths. - * Plugin path can be relative to test root, or cwd is out. - * While we're at it, mix in the baseline options, too. - * That's how ant passes in the plugins dir. - */ + * Plugin path can be relative to test root, or cwd (".") means use output dir and copy scalac-plugin.xml there. + * Mix in the baseline options from the suiteRunner (scalacOpts, scalacExtraArgs). + */ private def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): Seq[String] = { val dir = runner.suiteRunner.pathSettings.testRoot // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual) @@ -84,35 +86,37 @@ class DirectCompiler(val runner: Runner) { runner.suiteRunner.scalacExtraArgs ++ filteredOpts ++ others ++ Xplugin } + private def updatePluginPath(args: List[String]): Seq[String] = { + import runner.testInfo.testFile + val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile + updatePluginPath(args, AbstractFile.getDirectory(runner.outDir), AbstractFile.getDirectory(srcDir)) + } def compile(opts0: List[String], sources: List[File]): TestState = { import runner.{sources => _, _} import testInfo._ - // adding codelib.jar to the classpath - // codelib provides the possibility to override standard reify - // this shields the massive amount of reification tests from changes in the API - val codeLib = suiteRunner.pathSettings.srcCodeLib.fold[List[Path]](x => Nil, lib => List[Path](lib)) // add the instrumented library version to classpath -- must come first val specializedOverride: List[Path] = if (kind == "specialized") List(suiteRunner.pathSettings.srcSpecLib.fold(sys.error, identity)) else Nil - val classPath: List[Path] = specializedOverride ++ codeLib ++ fileManager.testClassPath ++ List[Path](outDir) + val classPath: List[Path] = specializedOverride ++ fileManager.testClassPath ++ List[Path](outDir) val parseArgErrors = ListBuffer.empty[String] val testSettings = new TestSettings(FileManager.joinPaths(classPath), s => parseArgErrors += s) val logWriter = new FileWriter(logFile) - val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile - val opts = updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir) + val opts = updatePluginPath(opts0) val command = new CompilerCommand(opts.toList, testSettings) val reporter = ExtConsoleReporter(testSettings, new PrintWriter(logWriter, true)) val global = newGlobal(testSettings, reporter) def errorCount = reporter.errorCount - testSettings.outputDirs setSingleOutput outDir.getPath + // usually, -d outDir, but don't override setting by the test + if (!testSettings.outdir.isSetByUser) + testSettings.outputDirs.setSingleOutput(outDir.getPath) def reportError(s: String): Unit = reporter.error(NoPosition, s) @@ -124,7 +128,7 @@ class DirectCompiler(val runner: Runner) { if (command.files.nonEmpty) reportError(command.files.mkString("flags file may only contain compiler options, found: ", space, "")) } - suiteRunner.verbose(s"% compiling ${ sources.map(_.testIdent).mkString(space) }${ if (suiteRunner.debug) " -d " + outDir else ""}") + suiteRunner.verbose(sources.map(_.testIdent).mkString("% compiling ", space, if (suiteRunner.debug) s" -d $outDir" else "")) def execCompile() = if (command.shouldStopWithInfo) { @@ -132,12 +136,14 @@ class DirectCompiler(val runner: Runner) { runner genFail "compilation stopped with info" } else { new global.Run compile sources.map(_.getPath) - if (!reporter.hasErrors) runner.genPass() - else { - reporter.finish() - reporter.close() - runner.genFail(s"compilation failed with $errorCount errors") - } + val result = + if (!reporter.hasErrors) runner.genPass() + else { + reporter.finish() + runner.genFail(s"compilation failed with $errorCount errors") + } + reporter.close() + result } def execOtherCompiler() = { @@ -151,11 +157,10 @@ class DirectCompiler(val runner: Runner) { else runner.genFail(s"compilation failed") } - try { + try if (suiteRunner.config.optCompilerPath.isEmpty) execCompile() else execOtherCompiler() - } - catch { case t: Throwable => reportError(t.getMessage) ; runner.genCrash(t) } - finally { logWriter.close() } + catch runner.crashHandler + finally logWriter.close() } } diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index 348487c4d1cf..6beceaef17dd 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -54,7 +54,7 @@ object FileManager { def mapFile(file: File, replace: String => String): Unit = { val f = SFile(file) - f.printlnAll(f.lines.toList map replace: _*) + f.printlnAll(f.lines().toList map replace: _*) } def jarsWithPrefix(dir: Directory, name: String): Iterator[SFile] = @@ -74,11 +74,12 @@ object FileManager { * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal */ def compareContents(original: Seq[String], revised: Seq[String], originalName: String = "a", revisedName: String = "b"): String = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ + import com.github.difflib.{DiffUtils, UnifiedDiffUtils} - val diff = difflib.DiffUtils.diff(original.asJava, revised.asJava) + val diff = DiffUtils.diff(original.asJava, revised.asJava) if (diff.getDeltas.isEmpty) "" - else difflib.DiffUtils.generateUnifiedDiff(originalName, revisedName, original.asJava, diff, 1).asScala.mkString("\n") + else UnifiedDiffUtils.generateUnifiedDiff(originalName, revisedName, original.asJava, diff, 1).asScala.mkString("\n") } def withTempFile[A](outFile: File, fileBase: String, lines: Seq[String])(body: File => A): A = { diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/partest/scala/tools/partest/nest/FromString.scala similarity index 78% rename from src/compiler/scala/tools/cmd/FromString.scala rename to src/partest/scala/tools/partest/nest/FromString.scala index 7eb20e43bbc2..1a801f9d818f 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/partest/scala/tools/partest/nest/FromString.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,10 +10,8 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.tools.partest.nest -import nsc.io.Directory import scala.reflect.OptManifest /** A general mechanism for defining how a command line argument @@ -30,6 +28,8 @@ abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunctio } object FromString { + import scala.sys.process.Parser.tokenize + import scala.tools.nsc.io.Directory // We need this because we clash with the String => Path implicits. private def toDir(s: String) = new Directory(new java.io.File(s)) @@ -39,21 +39,21 @@ object FromString { override def isDefinedAt(s: String) = toDir(s).isDirectory def apply(s: String): Directory = if (isDefinedAt(s)) toDir(s) - else cmd.runAndExit(println("'%s' is not an existing directory." format s)) + else runAndExit(println("'%s' is not an existing directory." format s)) } def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] { private def resolve(s: String) = (toDir(s) toAbsoluteWithRoot root).toDirectory override def isDefinedAt(s: String) = resolve(s).isDirectory def apply(s: String): Directory = if (isDefinedAt(s)) resolve(s) - else cmd.runAndExit(println("'%s' is not an existing directory." format resolve(s))) + else runAndExit(println("'%s' is not an existing directory." format resolve(s))) } /** Argument expander, i.e. turns single argument "foo bar baz" into argument * list "foo", "bar", "baz". */ val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] { - def apply(s: String) = toArgs(s) + def apply(s: String) = tokenize(s) } /** Identity. @@ -65,8 +65,7 @@ object FromString { /** Implicit as the most likely to be useful as-is. */ implicit val IntFromString: FromString[Int] = new FromString[Int] { - override def isDefinedAt(s: String) = safeToInt(s).isDefined - def apply(s: String) = safeToInt(s).get - def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None } + override def isDefinedAt(s: String) = s.toIntOption.isDefined + def apply(s: String) = s.toIntOption.getOrElse(0) } } diff --git a/src/partest/scala/tools/partest/nest/Instance.scala b/src/partest/scala/tools/partest/nest/Instance.scala new file mode 100644 index 000000000000..dabe6d1ba988 --- /dev/null +++ b/src/partest/scala/tools/partest/nest/Instance.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.partest.nest + +import scala.language.implicitConversions + +/** The trait mixed into each instance of a specification. + * + * @see Reference + */ +trait Instance extends Spec { + def parsed: CommandLine + + protected def help(str: => String): Unit = () + + def isSet(s: String) = parsed isSet toOpt(s) + def originalArgs = parsed.originalArgs // the full original list + def residualArgs = parsed.residualArgs // only args which were not options or args to options + + type OptionMagic = Opt.Instance + protected implicit def optionMagicAdditions(name: String): Opt.Instance = new Opt.Instance(programInfo, parsed, name) +} diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/partest/scala/tools/partest/nest/Interpolation.scala similarity index 87% rename from src/compiler/scala/tools/cmd/Interpolation.scala rename to src/partest/scala/tools/partest/nest/Interpolation.scala index 7d3ebd501d8c..522a108ccffe 100644 --- a/src/compiler/scala/tools/cmd/Interpolation.scala +++ b/src/partest/scala/tools/partest/nest/Interpolation.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,9 +10,7 @@ * additional information regarding copyright ownership. */ -package scala -package tools -package cmd +package scala.tools.partest.nest /** Interpolation logic for generated files. The idea is to be * able to write in terms of @@THIS@@ and @@THAT@@ and the reference @@ -40,15 +38,15 @@ trait Interpolation { object Interpolation { /** A simple template for generating bash completion functions. */ - lazy val bashTemplate = """ + lazy val bashTemplate = s""" |_@@PROGRAM@@() |{ | local cur opts base | COMPREPLY=() - | cur="${COMP_WORDS[COMP_CWORD]}" + | cur="$${COMP_WORDS[COMP_CWORD]}" | opts="@@ALLOPTIONS@@" | - | COMPREPLY=($(compgen -W "${opts}" -- ${cur})) + | COMPREPLY=($$(compgen -W "$${opts}" -- $${cur})) | _filedir | return 0 |} && complete -F _@@PROGRAM@@ @@PROGRAM@@ diff --git a/src/compiler/scala/tools/cmd/Meta.scala b/src/partest/scala/tools/partest/nest/Meta.scala similarity index 93% rename from src/compiler/scala/tools/cmd/Meta.scala rename to src/partest/scala/tools/partest/nest/Meta.scala index d913de51b32b..28ffec7e89b0 100644 --- a/src/compiler/scala/tools/cmd/Meta.scala +++ b/src/partest/scala/tools/partest/nest/Meta.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,10 +10,9 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.tools.partest.nest -import nsc.io.File +import scala.tools.nsc.io.File import Interpolation._ /** Meta-options for command line tools. We could have all kinds @@ -30,7 +29,7 @@ object Meta { self: Spec with Interpolation => Bash.name --> runAndExit(Bash.action()) - val selfUpdateName = SelfUpdate.name --| ; + val selfUpdateName = SelfUpdate.name.--| if (selfUpdateName.isDefined) runAndExit(SelfUpdate.action()) diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/partest/scala/tools/partest/nest/Opt.scala similarity index 92% rename from src/compiler/scala/tools/cmd/Opt.scala rename to src/partest/scala/tools/partest/nest/Opt.scala index 28f1677fc978..9c64896301c9 100644 --- a/src/compiler/scala/tools/cmd/Opt.scala +++ b/src/partest/scala/tools/partest/nest/Opt.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,10 +10,10 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.tools.partest.nest -import nsc.Properties.envOrElse +import scala.tools.nsc.Properties.envOrElse +import scala.util.chaining._ import Spec.Info /** Machinery for what amounts to a command line specification DSL. @@ -67,7 +67,7 @@ object Opt { def choiceOf[T: FromString](choices: T*) = { addBinary(opt) ; None } def expandTo(args: String*) = { addExpand(name, args.toList) ; addHelpAlias(() => args mkString " ") } - def /(descr: String) = returning(name)(_ => addHelp(() => helpFormatStr.format(opt, descr))) + def /(descr: String) = name.tap(_ => addHelp(() => helpFormatStr.format(opt, descr))) } class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error { @@ -76,7 +76,7 @@ object Opt { def --| = parsed get opt def --^[T: FromString] = { val fs = implicitly[FromString[T]] - --| map { arg => + --|.map { arg => if (fs isDefinedAt arg) fs(arg) else failOption(arg, "not a " + fs.targetString) } diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala index 4fff5f3c3474..e9ed01018acd 100644 --- a/src/partest/scala/tools/partest/nest/PathSettings.scala +++ b/src/partest/scala/tools/partest/nest/PathSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -44,5 +44,4 @@ class PathSettings(testSourcePath: String) { val srcDir = Directory((testRoot / testSourcePath).toCanonical) def srcSpecLib = findJar("instrumented", Directory(srcDir / "speclib")) - def srcCodeLib = findJar("code", Directory(srcDir / "codelib"), Directory(testRoot / "files" / "codelib") /* work with --srcpath pending */) } diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/partest/scala/tools/partest/nest/Reference.scala similarity index 88% rename from src/compiler/scala/tools/cmd/Reference.scala rename to src/partest/scala/tools/partest/nest/Reference.scala index c837bee156ec..93c83cf6d40b 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/partest/scala/tools/partest/nest/Reference.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,11 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.tools.partest.nest import scala.collection.mutable.ListBuffer -import nsc.Properties.envOrNone +import scala.language.implicitConversions +import scala.tools.nsc.Properties.envOrNone /** Mixes in the specification trait and uses the vals therein to * side-effect private accumulators. From this emerges formatted help, @@ -45,7 +45,7 @@ trait Reference extends Spec { final def apply(args: String*): ThisCommandLine = creator(propertyArgs ++ args flatMap expandArg) type OptionMagic = Opt.Reference - protected implicit def optionMagicAdditions(name: String) = new Opt.Reference(programInfo, options, name) + protected implicit def optionMagicAdditions(name: String): Opt.Reference = new Opt.Reference(programInfo, options, name) } object Reference { @@ -99,7 +99,7 @@ object Reference { lazy val binary = _binary.distinct lazy val all = unary ++ binary lazy val expansionMap = _expand - lazy val helpMsg = _help map (f => f() + "\n") mkString - lazy val longestArg = all map (_.length) max + lazy val helpMsg = _help.map(f => f() + "\n").mkString + lazy val longestArg = all.map(_.length).max } } diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala index 95f9c4558a7a..fbdd16a69c1f 100644 --- a/src/partest/scala/tools/partest/nest/Runner.scala +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,24 +16,26 @@ package nest import java.io.{Console => _, _} import java.lang.reflect.InvocationTargetException import java.nio.charset.Charset -import java.nio.file.{Files, StandardOpenOption} +import java.nio.file.{Files, Path, StandardOpenOption}, StandardOpenOption.{APPEND, CREATE} -import scala.collection.mutable.ListBuffer +import scala.collection.mutable, mutable.ListBuffer import scala.concurrent.duration.Duration import scala.reflect.internal.FatalError -import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.internal.util.ScalaClassLoader, ScalaClassLoader.URLClassLoader import scala.sys.process.{Process, ProcessLogger} -import scala.tools.nsc.Properties.{isWin, propOrEmpty} +import scala.tools.nsc.Properties.{isAvian, isWin, javaSpecVersion, propOrEmpty} import scala.tools.nsc.{CompilerCommand, Global, Settings} import scala.tools.nsc.reporters.ConsoleReporter +import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.util.stackTraceString +import scala.util.{Failure, Success, Try, Using} +import scala.util.Properties.isJavaAtLeast +import scala.util.chaining._ +import scala.util.control.{ControlThrowable, NonFatal} +import scala.util.matching.Regex.quoteReplacement import ClassPath.join -import TestState.{Crash, Fail, Pass, Skip, Updated} import FileManager.{compareContents, joinPaths, withTempFile} -import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader -import scala.util.{Failure, Success, Try} -import scala.util.Properties.javaSpecVersion -import scala.util.control.ControlThrowable +import TestState.{Crash, Fail, Pass, Skip, Updated} /** pos/t1234.scala or pos/t1234 if dir */ case class TestInfo(testFile: File) { @@ -48,9 +50,6 @@ case class TestInfo(testFile: File) { /** pos/t1234.check */ val checkFile: File = testFile.changeExtension("check") - /** pos/t1234.flags */ - def flagsFile: File = testFile.changeExtension("flags") - // outputs /** pos/t1234-pos.log */ @@ -67,7 +66,7 @@ case class TestInfo(testFile: File) { } /** Run a single test. */ -class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner => +class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { private val stopwatch = new Stopwatch() import testInfo._ @@ -78,20 +77,22 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = private val _transcript = new TestTranscript + // start log event def pushTranscript(msg: String) = _transcript.add(msg) + // append to last log in transcript + def appendTranscript(log: String) = _transcript.append(log) + lazy val outDir = { outFile.mkdirs() ; outFile } - def showCrashInfo(t: Throwable): Unit = { - System.err.println(s"Crashed running test $testIdent: " + t) - if (!suiteRunner.terse) - System.err.println(stackTraceString(t)) - } - protected def crashHandler: PartialFunction[Throwable, TestState] = { - case t: InterruptedException => - genTimeout() + // if there is a checkfile, log message for diff; otherwise log stack trace for post-mortem + def crashHandler: PartialFunction[Throwable, TestState] = { + case _: InterruptedException => genTimeout() + case t: FatalError if checkFile.canRead => + logFile.appendAll(s"fatal error: ${t.getMessage}") + genCrash(t) case t: Throwable => - showCrashInfo(t) + if (!suiteRunner.terse) System.err.println(s"Crashed running test $testIdent: " + t) logFile.appendAll(stackTraceString(t)) genCrash(t) } @@ -116,7 +117,7 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = joinPaths(outDir :: testClassPath), "-J-Duser.language=en", "-J-Duser.country=US" - ) ++ (toolArgsFor(files)("javac") + ) ++ (toolArgsFor(files)(ToolName.javacOpt) ) ++ (files.map(_.getAbsolutePath) ) @@ -127,31 +128,21 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = } /** Evaluate an action body and judge whether it passed. */ - def nextTestAction[T](body: => T)(eval: PartialFunction[T, TestState]): TestState = eval.applyOrElse(body, (_: T) => genPass) + def nextTestAction[T](body: => T)(eval: PartialFunction[T, TestState]): TestState = eval.applyOrElse(body, (_: T) => genPass()) /** If the action does not result in true, fail the action. */ def nextTestActionExpectTrue(reason: String, body: => Boolean): TestState = nextTestAction(body) { case false => genFail(reason) } /** Fail the action. */ - def nextTestActionFailing(reason: String): TestState = nextTestActionExpectTrue(reason, false) + def nextTestActionFailing(reason: String): TestState = nextTestActionExpectTrue(reason, body = false) - private def assembleTestCommand(outDir: File, logFile: File): List[String] = { - // check whether there is a ".javaopts" file - val argsFile = testFile changeExtension "javaopts" - val javaopts = readOptionsFile(argsFile) + private def assembleTestCommand(outDir: File, javaopts: List[String]): List[String] = { if (javaopts.nonEmpty) - suiteRunner.verbose(s"Found javaopts file '$argsFile', using options: '${javaopts.mkString(",")}'") - - // Note! As this currently functions, suiteRunner.javaOpts must precede argString - // because when an option is repeated to java only the last one wins. - // That means until now all the .javaopts files were being ignored because - // they all attempt to change options which are also defined in - // partest.java_opts, leading to debug output like: - // - // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k' - // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...] + suiteRunner.verbose(s"Using java options: '${javaopts.mkString(",")}'") + val propertyOpts = propertyOptions(fork = true).map { case (k, v) => s"-D$k=$v" } val classpath = joinPaths(extraClasspath ++ testClassPath) + // `javaopts` last; for repeated arguments, the last one wins javaCmdPath +: ( (suiteRunner.javaOpts.split(' ') ++ extraJavaOptions ++ javaopts).filter(_ != "").toList ++ Seq( "-classpath", @@ -204,36 +195,36 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = protected def runCommand(args: Seq[String], outFile: File): Boolean = { val nonzero = 17 // rounding down from 17.3 //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0 - val pl = ProcessLogger(outFile) - def run: Int = { - val p = - Try(Process(args).run(pl)) match { - case Failure(e) => outFile.appendAll(stackTraceString(e)) ; return -1 - case Success(v) => v + Using.resource(ProcessLogger(outFile)) { pl => + def run: Int = { + val p = + Try(Process(args).run(pl)) match { + case Failure(e) => outFile.appendAll(stackTraceString(e)) ; return -1 + case Success(v) => v + } + try p.exitValue() + catch { + case e: InterruptedException => + suiteRunner.verbose(s"Interrupted waiting for command to finish (${args mkString " "})") + p.destroy() + nonzero + case t: Throwable => + suiteRunner.verbose(s"Exception waiting for command to finish: $t (${args mkString " "})") + p.destroy() + throw t } - try p.exitValue - catch { - case e: InterruptedException => - suiteRunner.verbose(s"Interrupted waiting for command to finish (${args mkString " "})") - p.destroy - nonzero - case t: Throwable => - suiteRunner.verbose(s"Exception waiting for command to finish: $t (${args mkString " "})") - p.destroy - throw t } + pl.buffer(run) == 0 } - try pl.buffer(run) == 0 - finally pl.close() } - private def execTest(outDir: File, logFile: File): TestState = { - val cmd = assembleTestCommand(outDir, logFile) + private def execTest(outDir: File, logFile: File, javaopts: List[String]): TestState = { + val cmd = assembleTestCommand(outDir, javaopts) pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName) nextTestAction(runCommand(cmd, logFile)) { case false => - _transcript append EOL + logFile.fileContents + appendTranscript(EOL + logFile.fileContents) genFail("non-zero exit code") } } @@ -245,38 +236,33 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = def run(): Unit = { StreamCapture.withExtraProperties(propertyOptions(fork = false).toMap) { try { - val out = Files.newOutputStream(log.toPath, StandardOpenOption.APPEND) + val out = Files.newOutputStream(log.toPath, CREATE, APPEND) try { val loader = new URLClassLoader(classesDir.toURI.toURL :: Nil, getClass.getClassLoader) StreamCapture.capturingOutErr(out) { val cls = loader.loadClass("Test") val main = cls.getDeclaredMethod("main", classOf[Array[String]]) - try { - main.invoke(null, Array[String]("jvm")) - } catch { - case ite: InvocationTargetException => throw ite.getCause - } + try main.invoke(null, Array[String]("jvm")) + catch { case ite: InvocationTargetException => throw ite.getCause } } - } finally { - out.close() } + finally out.close() } catch { case t: ControlThrowable => throw t - case t: Throwable => + case NonFatal(t) => // We'll let the checkfile diffing report this failure - Files.write(log.toPath, stackTraceString(t).getBytes(Charset.defaultCharset()), StandardOpenOption.APPEND) + Files.write(log.toPath, stackTraceString(t).getBytes(Charset.defaultCharset()), CREATE, APPEND) + case t: Throwable => + val data = (if (t.getMessage != null) t.getMessage else t.getClass.getName).getBytes(Charset.defaultCharset()) + Files.write(log.toPath, data, CREATE, APPEND) + throw t } } } pushTranscript(s" > ${logFile.getName}") - - TrapExit(() => run()) match { - case Left((status, throwable)) if status != 0 => - genFail("non-zero exit code") - case _ => - genPass - } + run() + genPass() } } @@ -296,8 +282,6 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = * A missing flag evaluates the same as true. */ def filteredCheck: Seq[String] = { - import scala.util.Properties.{javaSpecVersion, isAvian} - import scala.tools.nsc.settings.ScalaVersion // use lines in block with this label? def retainOn(expr0: String) = { val expr = expr0.trim @@ -313,6 +297,7 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = val current = ScalaVersion(javaSpecVersion) if (up != null) current >= required else current == required case "avian" => isAvian + case "isWin" => isWin case "true" => true case "-optimise" | "-optimize" => flagWasSet("-optimise") || flagWasSet("-optimize") @@ -342,18 +327,10 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = compareContents(original = checked, revised = logged, originalName = checkname, revisedName = logFile.getName) } - val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead) - val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options") - // --color=always --word-diff - def gitDiff(f1: File, f2: File): Option[String] = { - try gitRunner map { git => - val cmd = s"$git diff $gitDiffOptions $f1 $f2" - val diff = Process(cmd).lineStream_!.drop(4).map(_ + "\n").mkString - - "\n" + diff - } - catch { case t: Exception => None } + val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options") + // --color=always --word-diff + runGit(s"diff $gitDiffOptions $f1 $f2")(_.drop(4).map(_ + "\n").mkString).map("\n" + _) } /** Normalize the log output by applying test-specific filters @@ -369,37 +346,33 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = * any Windows backslashes with the one true file separator char. */ def normalizeLog(): Unit = { - import scala.util.matching.Regex - // Apply judiciously; there are line comments in the "stub implementations" error output. - val slashes = """[/\\]+""".r + val slashes = """[/\\]+""".r def squashSlashes(s: String) = slashes.replaceAllIn(s, "/") // this string identifies a path and is also snipped from log output. - val elided = parentFile.getAbsolutePath + val elided = parentFile.getAbsolutePath // something to mark the elision in the log file (disabled) - val ellipsis = "" //".../" // using * looks like a comment + val ellipsis = "" //".../" // using * looks like a comment // no spaces in test file paths below root, because otherwise how to detect end of path string? val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\S]*)""".r - def canonicalize(s: String): String = - pathFinder.replaceAllIn(s, m => Regex.quoteReplacement(ellipsis + squashSlashes(m group 1))) + def canonicalize: String => String = + s => pathFinder.replaceAllIn(s, m => quoteReplacement(ellipsis + squashSlashes(m.group(1)))) - def masters = { + def masters = { val files = List(new File(parentFile, "filters"), new File(suiteRunner.pathSettings.srcDir.path, "filters")) - files.filter(_.exists).flatMap(_.fileLines).map(_.trim).filter(s => !(s startsWith "#")) + files.filter(_.exists).flatMap(_.fileLines).map(_.trim).filterNot(_.startsWith("#")) } - val filters = toolArgs("filter", split = false) ++ masters - val elisions = ListBuffer[String]() - //def lineFilter(s: String): Boolean = !(filters exists (s contains _)) - def lineFilter(s: String): Boolean = ( - filters map (_.r) forall { r => - val res = (r findFirstIn s).isEmpty - if (!res) elisions += s - res + val filters = toolArgs(ToolName.filter) ++ masters + lazy val elisions = ListBuffer[String]() + def lineFilter(s: String): Boolean = + filters.map(_.r).forall { r => + val unfiltered = r.findFirstIn(s).isEmpty + if (!unfiltered && suiteRunner.verbose) elisions += s + unfiltered } - ) logFile.mapInPlace(canonicalize)(lineFilter) if (suiteRunner.verbose && elisions.nonEmpty) { @@ -414,7 +387,7 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = normalizeLog() pushTranscript(s"diff $checkFile $logFile") currentDiff match { - case "" => genPass + case "" => genPass() case diff if config.optUpdateCheck => suiteRunner.verbose("Updating checkfile " + checkFile) checkFile.writeAll(logFile.fileContents) @@ -424,134 +397,125 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = val bestDiff = if (!checkFile.canRead) diff else - gitRunner.flatMap(_ => withTempFile(outFile, fileBase, filteredCheck)(f => + gitRunner.flatMap(_ => withTempFile(outDir, fileBase, filteredCheck)(f => gitDiff(f, logFile))).getOrElse(diff) - _transcript append bestDiff + appendTranscript(bestDiff) genFail("output differs") } } - /** 1. Creates log file and output directory. - * 2. Runs script function, providing log file and output directory as arguments. - * 2b. or, just run the script without context and return a new context - */ - def runInContext(body: => TestState): TestState = { - body - } - /** Grouped files in group order, and lex order within each group. */ - def groupedFiles(sources: List[File]): List[List[File]] = ( - if (sources.tail.nonEmpty) { - val grouped = sources groupBy (_.group) - grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName)) + def groupedFiles(sources: List[File]): List[List[File]] = + if (sources.sizeIs > 1) { + val grouped = sources.groupBy(_.group) + grouped.keys.toList.sorted.map(grouped(_).sortBy(_.getName)) } else List(sources) - ) /** Source files for the given test file. */ def sources(file: File): List[File] = if (file.isDirectory) file.listFiles.toList.filter(_.isJavaOrScala) else List(file) def newCompiler = new DirectCompiler(this) - def attemptCompile(sources: List[File]): TestState = { - (testFile :: (if (testFile.isDirectory) sources else Nil)).map(_.changeExtension("flags")).find(_.exists()) match { - // TODO: Deferred until the remaining 2.12 tests work without their flags files - //case Some(flagsFile) => - //genFail(s"unexpected flags file $flagsFile (use source comment // scalac: -Xfatal-warnings)") - case _ => - val state = newCompiler.compile(flagsForCompilation(sources), sources) - if (!state.isOk) - pushTranscript(s"$EOL${logFile.fileContents}") - - state + def attemptCompile(sources: List[File], extraFlags: List[String] = Nil): TestState = + newCompiler.compile(flagsForCompilation(sources) ::: extraFlags, sources).tap { state => + if (!state.isOk) appendTranscript(EOL + logFile.fileContents) } - } - // all sources in a round may contribute flags via .flags files or // scalac: -flags + // all sources in a round may contribute flags via // scalac: -flags + // under --realeasy, if a jvm isn't specified, require the minimum viable using -release 8 + // to avoid accidentally committing a test that requires a later JVM. def flagsForCompilation(sources: List[File]): List[String] = { - val perTest = readOptionsFile(flagsFile) - val perGroup = if (testFile.isDirectory) { - sources.flatMap(f => readOptionsFile(f.changeExtension("flags"))) - } else Nil - val perFile = toolArgsFor(sources)("scalac") - perTest ++ perGroup ++ perFile + var perFile = toolArgsFor(sources)(ToolName.scalac) + if (parentFile.getParentFile.getName == "macro-annot") + perFile ::= "-Ymacro-annotations" + if (realeasy && isJavaAtLeast(9) && !perFile.exists(releaseFlag.matches) && toolArgsFor(sources)(ToolName.jvm).isEmpty) + perFile ::= "-release:8" + perFile } + private val releaseFlag = raw"--?release(?::\d+)?".r // inspect sources for tool args - def toolArgs(tool: String, split: Boolean = true): List[String] = - toolArgsFor(sources(testFile))(tool, split) - - // inspect given files for tool args of the form `tool: args` - // if args string ends in close comment, drop the `*` `/` - // if split, parse the args string as command line. + def toolArgs(tool: ToolName): List[String] = toolArgsFor(sources(testFile))(tool) + + // for each file, cache the args for each tool + private val fileToolArgs = new mutable.HashMap[Path, Map[ToolName, List[String]]] + //private val optionsPattern = raw"\s*//>\s*using\s+(?:([^.]+)\.)?option(s)?\s+(.*)".r + private val optionsPattern = raw"\s*//>\s*using\s+(${ToolName.alts})\s+(.*)".r + + // Inspect given files for tool args in header line comments of the form `// tool: args`. + // If the line comment starts `//>`, accept `using option` or `using options` pragmas + // to define options to`scalac`. Or take `using test.options`, where test scope is used for test options. + // (`test` scope is not used that way by scala-cli, where framework args are passed on command line.) + // (One could imagine `using test.testOpt` for framework args.) + // If `filter:`, return entire line as if quoted, else parse the args string as command line. + // Currently, we look for scalac, javac, java, jvm, filter, test. // - def toolArgsFor(files: List[File])(tool: String, split: Boolean = true): List[String] = { - def argsFor(f: File): List[String] = { - import scala.tools.cmd.CommandLineParser.tokenize - val max = 10 - val tag = s"$tool:" - val endc = "*" + "/" // be forgiving of /* scalac: ... */ - def stripped(s: String) = s.substring(s.indexOf(tag) + tag.length).stripSuffix(endc) - def argsplitter(s: String): List[String] = if (split) tokenize(s) else List(s.trim()) - val src = Files.lines(f.toPath, codec.charSet) - val args: Option[String] = try { - val x: java.util.stream.Stream[String] = src.limit(max).filter(_.contains(tag)).map(stripped) - val s = x.findAny.orElse("") - if (s == "") None else Some(s) - } finally src.close() - args.map((arg: String) => argsplitter(arg)).getOrElse(Nil) + def toolArgsFor(files: List[File])(tool: ToolName): List[String] = { + def argsFor(f: File): List[String] = fileToolArgs.getOrElseUpdate(f.toPath, readToolArgs(f)).apply(tool) + def readToolArgs(f: File): Map[ToolName, List[String]] = optionsFromHeader(readHeaderFrom(f)) + def optionsFromHeader(header: List[String]) = { + import scala.sys.process.Parser.tokenize + def matchLine(line: String): List[(ToolName, List[String])] = line match { + case optionsPattern(scope, rest) => + val named = Try { + if (scope == null) ToolName.scalac + else ToolName.named(scope) + }.toOption + named match { + case None => + suiteRunner.verbose(s"ignoring pragma with unknown scope '$scope': $line") + Nil + case Some(name) => + val settings = tokenize(rest).filter(_ != ",").map(_.stripSuffix(",")) + if (settings.isEmpty) Nil + else (name, settings) :: Nil + } + case _ => Nil + } + header.flatMap(matchLine) + .groupBy(_._1) + .map { case (k, kvs) => (k, kvs.flatMap(_._2)) } + .withDefaultValue(List.empty[String]) } + def readHeaderFrom(f: File): List[String] = + Using.resource(Files.lines(f.toPath, codec.charSet))(_.limit(10).toArray()).toList.map(_.toString) files.flatMap(argsFor) } - abstract class CompileRound { - def fs: List[File] - def result: TestState + sealed abstract class CompileRound { + def files: List[File] def description: String + protected def computeResult: TestState + + final lazy val result: TestState = { pushTranscript(description); computeResult } - def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " " - def isOk = result.isOk - def mkScalacString(): String = s"""scalac $fsString""" - override def toString = description + ( if (result.isOk) "" else "\n" + result.status ) + final protected def fsString = files.map(_.toString.stripPrefix(s"$parentFile/")).mkString(" ") + final override def toString = description + ( if (result.isOk) "" else "\n" + result.status ) } - case class OnlyJava(fs: List[File]) extends CompileRound { + final case class OnlyJava(files: List[File]) extends CompileRound { def description = s"""javac $fsString""" - lazy val result = { pushTranscript(description) ; javac(fs) } + override protected def computeResult = javac(files) } - case class OnlyScala(fs: List[File]) extends CompileRound { - def description = mkScalacString() - lazy val result = { pushTranscript(description) ; attemptCompile(fs) } + final case class OnlyScala(files: List[File]) extends CompileRound { + def description = s"""scalac $fsString""" + override protected def computeResult = attemptCompile(files) } - case class ScalaAndJava(fs: List[File]) extends CompileRound { - def description = mkScalacString() - lazy val result = { pushTranscript(description) ; attemptCompile(fs) } + final case class ScalaAndJava(files: List[File]) extends CompileRound { + def description = s"""scalac $fsString""" + override protected def computeResult = attemptCompile(files) } - case class SkipRound(fs: List[File], state: TestState) extends CompileRound { + final case class SkipRound(files: List[File], state: TestState) extends CompileRound { def description: String = state.status - lazy val result = { pushTranscript(description); state } + override protected def computeResult = state } def compilationRounds(file: File): List[CompileRound] = { - val sources = runner.sources(file) - - if (PartestDefaults.migrateFlagsFiles) { - def writeFlags(f: File, flags: List[String]) = - if (flags.nonEmpty) f.writeAll((s"// scalac: ${ojoin(flags: _*)}" +: f.fileLines).map(_ + EOL): _*) - val flags = readOptionsFile(flagsFile) - sources.filter(_.isScala).foreach { - case `testFile` => - writeFlags(testFile, flags) - flagsFile.delete() - case f => - val more = f.changeExtension("flags") - writeFlags(f, flags ::: readOptionsFile(more)) - more.delete() - } - } - - val Range = """(\d+)(?:(\+)|(?: *\- *(\d+)))?""".r - val currentJavaVersion = javaSpecVersion.stripPrefix("1.").toInt - val skipStates = toolArgsFor(sources)("javaVersion", split = false).flatMap { + import scala.util.Properties.javaSpecVersion + val Range = """(\d+)(?:(\+)|(?:-(\d+)))?""".r + lazy val currentJavaVersion = javaSpecVersion.stripPrefix("1.").toInt + val allFiles = sources(file) + val skipStates = toolArgsFor(allFiles)(ToolName.jvm).flatMap { case v @ Range(from, plus, to) => val ok = if (plus == null) @@ -559,18 +523,20 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = else from.toInt <= currentJavaVersion && currentJavaVersion <= to.toInt else currentJavaVersion >= from.toInt - if (ok) None + if (ok && suiteRunner.realeasy && from.toInt > 8) Some(genSkip(s"skipped on Java $javaSpecVersion, compiling against JDK8 but must run on $v")) + else if (ok) None else Some(genSkip(s"skipped on Java $javaSpecVersion, only running on $v")) case v => - Some(genFail(s"invalid javaVersion range in test comment: $v")) + Some(genFail(s"invalid jvm range in test comment: $v")) + } + skipStates.headOption match { + case Some(state) => List(SkipRound(List(file), state)) + case _ => groupedFiles(allFiles).flatMap(mixedCompileGroup) } - skipStates.headOption - .map(state => List(SkipRound(List(file), state))) - .getOrElse(groupedFiles(sources).flatMap(mixedCompileGroup)) } def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = { - val (scalaFiles, javaFiles) = allFiles partition (_.isScala) + val (scalaFiles, javaFiles) = allFiles.partition(_.isScala) val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles)) val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles)) @@ -578,31 +544,41 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = } def runPosTest(): TestState = - if (checkFile.exists) genFail("unexpected check file for pos test (use -Xfatal-warnings with neg test to verify warnings)") - else runTestCommon() - - def runNegTest(): TestState = runInContext { - // test result !isOk, usually because DNC. So pass/fail depending on whether diffIsOk (matches check file). - // Skip and Crash remain fails, except a Crash due to FatalError only will defer to check file comparison. - def checked(r: CompileRound) = r.result match { + if (checkFile.exists) genFail("unexpected check file for pos test (use -Werror with neg test to verify warnings)") + else runTestCommon()() + + def runNegTest(): TestState = { + // a "crash test" passes if the error is not FatalError and there is a check file to compare. + // a neg test passes if the log compares same to check file. + // under "//> using retest.option -some-flags", also check pos compilation after adding the extra flags. + def checked(r: TestState) = r match { case s: Skip => s case crash @ Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => crash - case dnc @ _ => diffIsOk + case _ => + val negRes = diffIsOk + toolArgs(ToolName.retest) match { + case extraFlags if extraFlags.nonEmpty && !negRes.isSkipped && negRes.isOk => + // transcript visible under partest --verbose or after failure + val debug = s"recompile $testIdent with extra flags ${extraFlags.mkString(" ")}" + suiteRunner.verbose(s"% $debug") + pushTranscript(debug) + attemptCompile(sources(testFile), extraFlags = extraFlags) + case _ => negRes + } } - compilationRounds(testFile) - .find(r => !r.result.isOk || r.result.isSkipped) - .map(checked) - .getOrElse(genFail("expected compilation failure")) + runTestCommon(checked, expectCompile = false)(identity) } // run compilation until failure, evaluate `andAlso` on success - def runTestCommon(andAlso: => TestState = genPass): TestState = runInContext { - // DirectCompiler already says compilation failed - compilationRounds(testFile) - .find(r => !r.result.isOk || r.result.isSkipped) - .map(_.result) - .getOrElse(genPass) - .andAlso(andAlso) + def runTestCommon(inspector: TestState => TestState = identity, expectCompile: Boolean = true)(andAlso: TestState => TestState = _ => genPass()): TestState = { + val rnds = compilationRounds(testFile) + if (rnds.isEmpty) genFail("nothing to compile") + else + rnds.find(r => !r.result.isOk || r.result.isSkipped).map(r => inspector(r.result)) match { + case Some(res) => res.andAlso(andAlso(res)) + case None if !expectCompile => genFail("expected compilation failure") + case None => andAlso(null) + } } def extraClasspath = kind match { @@ -667,7 +643,7 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = def loop(): TestState = { logWriter.print(prompt) resReader.readLine() match { - case null | "" => logWriter.close() ; genPass + case null | "" => logWriter.close() ; genPass() case line => resCompile(line) andAlso loop() } } @@ -693,41 +669,35 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = case k if k.endsWith("-neg") => runNegTest() case _ => runRunTest() } - (state, stopwatch.stop) + (state, stopwatch.stop()) } private def runRunTest(): TestState = { - val argsFile = testFile changeExtension "javaopts" - val javaopts = readOptionsFile(argsFile) + val javaopts = toolArgs(ToolName.javaOpt) val execInProcess = PartestDefaults.execInProcess && javaopts.isEmpty && !Set("specialized", "instrumented").contains(testFile.getParentFile.getName) - def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile) + def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile, javaopts) def noexec() = genSkip("no-exec: tests compiled but not run") - runTestCommon(if (suiteRunner.config.optNoExec) noexec() else exec().andAlso(diffIsOk)) + runTestCommon()(_ => if (suiteRunner.config.optNoExec) noexec() else exec().andAlso(diffIsOk)) } - private def decompileClass(clazz: Class[_], isPackageObject: Boolean): String = { - import scala.tools.scalap - import scalap.scalax.rules.scalasig.ByteCode - - scalap.Main.decompileScala(ByteCode.forClass(clazz).bytes, isPackageObject) - } - - def runScalapTest(): TestState = runTestCommon { - val isPackageObject = testFile.getName startsWith "package" + def runScalapTest(): TestState = runTestCommon() { _ => + import scala.tools.scalap, scalap.scalax.rules.scalasig.ByteCode, scalap.Main.decompileScala + val isPackageObject = testFile.getName.startsWith("package") val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package") val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader) - logFile writeAll decompileClass(loader loadClass className, isPackageObject) + def decompileClass(clazz: Class[_]): String = decompileScala(ByteCode.forClass(clazz).bytes, isPackageObject) + logFile.writeAll(decompileClass(loader.loadClass(className))) diffIsOk } def runScriptTest(): TestState = { import scala.sys.process._ - val args: String = testFile.changeExtension("args").fileContents + val args = testFile.changeExtension("args").fileContents val cmdFile = if (isWin) testFile changeExtension "bat" else testFile val succeeded = (((s"$cmdFile $args" #> logFile).!) == 0) - val result = if (succeeded) genPass else genFail(s"script $cmdFile failed to run") + val result = if (succeeded) genPass() else genFail(s"script $cmdFile failed to run") result andAlso diffIsOk } @@ -743,8 +713,8 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { runner = def pass(s: String) = bold(green("% ")) + s def fail(s: String) = bold(red("% ")) + s _transcript.toList match { - case Nil => Nil - case xs => (xs.init map pass) :+ fail(xs.last) + case init :+ last => init.map(pass) :+ fail(last) + case _ => Nil } } } @@ -811,6 +781,31 @@ final class TestTranscript { private[this] val buf = ListBuffer[String]() def add(action: String): this.type = { buf += action ; this } - def append(text: String): Unit = { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) } + def append(text: String): Unit = { val s = buf.last ; buf.dropRightInPlace(1) ; buf += (s + text) } def toList = buf.toList } + +// Tool names in test file header: scalac, javacOpt, javaOpt, jvm, filter, test, retest. +sealed trait ToolName +object ToolName { + case object scalac extends ToolName + case object javacOpt extends ToolName + case object javaOpt extends ToolName + case object jvm extends ToolName + case object test extends ToolName + case object retest extends ToolName + case object filter extends ToolName + val values = Array(scalac, javacOpt, javaOpt, jvm, test, retest, filter) + def named(s: String): ToolName = s match { + case "options" => scalac + case "test.options" => test + case "retest.options" => retest + case _ => values.find(_.toString == s).getOrElse(throw new IllegalArgumentException(s)) + } + def option(toolName: ToolName): String = toolName match { + case `scalac` => "options" + case `test` | `retest` => s"$toolName.options" + case _ => toolName.toString + } + val alts = values.map(option).mkString("|") +} diff --git a/src/partest/scala/tools/partest/nest/RunnerSpec.scala b/src/partest/scala/tools/partest/nest/RunnerSpec.scala index cbd420664545..04097a663c25 100644 --- a/src/partest/scala/tools/partest/nest/RunnerSpec.scala +++ b/src/partest/scala/tools/partest/nest/RunnerSpec.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,6 @@ package scala.tools.partest.nest import language.postfixOps -import scala.tools.cmd.{ CommandLine, Interpolation, Meta, Reference, Spec, Instance } - trait RunnerSpec extends Spec with Meta.StdOpts with Interpolation { def referenceSpec = RunnerSpec def programInfo = Spec.Info( @@ -26,7 +24,7 @@ trait RunnerSpec extends Spec with Meta.StdOpts with Interpolation { heading("Test categories:") val optPos = "pos" / "run compilation tests (success)" --? val optNeg = "neg" / "run compilation tests (failure)" --? - val optRun = "run" / "run interpreter and backend tests" --? + val optRun = "run" / "run REPL and backend tests" --? val optJvm = "jvm" / "run JVM backend tests" --? val optRes = "res" / "run resident compiler tests" --? val optScalap = "scalap" / "run scalap tests" --? @@ -57,8 +55,10 @@ trait RunnerSpec extends Spec with Meta.StdOpts with Interpolation { val optDebug = "debug" / "enable debugging output, preserve generated files" --? heading("Other options:") - val optVersion = "version" / "show Scala version and exit" --? - val optHelp = "help" / "show this page and exit" --? + val optDev = "realeasy" / "real easy way to test --release 8 and check uncommitted checks" --? + val optBranch = "branch" / "test changes on this branch" --? + val optVersion = "version" / "show Scala version and exit" --? + val optHelp = "help" / "show this page and exit" --? } @@ -68,5 +68,6 @@ object RunnerSpec extends RunnerSpec with Reference { type ThisCommandLine = CommandLine def creator(args: List[String]): ThisCommandLine = new CommandLine(RunnerSpec, args) + // TODO: restructure to avoid using early initializers def forArgs(args: Array[String]): Config = new { val parsed = creator(args.toList) } with Config } diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/partest/scala/tools/partest/nest/Spec.scala similarity index 87% rename from src/compiler/scala/tools/cmd/Spec.scala rename to src/partest/scala/tools/partest/nest/Spec.scala index 9f890f4f73c5..65a892bad758 100644 --- a/src/compiler/scala/tools/cmd/Spec.scala +++ b/src/partest/scala/tools/partest/nest/Spec.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,8 +10,10 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.tools.partest.nest + +import scala.language.implicitConversions +import scala.util.chaining._ /** This trait works together with others in scala.tools.cmd to allow * declaratively specifying a command line program, with many attendant @@ -42,7 +44,7 @@ object Spec { private var _buf: List[T] = Nil def convert(s: String) = implicitly[FromString[T]] apply s - def apply(s: String): T = returning(convert(s))(_buf +:= _) + def apply(s: String): T = convert(s).tap(_buf +:= _) lazy val get = _buf } diff --git a/src/partest/scala/tools/partest/nest/Stopwatch.scala b/src/partest/scala/tools/partest/nest/Stopwatch.scala index 9112cc7da9ac..0e8d656fe23d 100644 --- a/src/partest/scala/tools/partest/nest/Stopwatch.scala +++ b/src/partest/scala/tools/partest/nest/Stopwatch.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala index b016f784d143..b1ac81d19f24 100644 --- a/src/partest/scala/tools/partest/nest/StreamCapture.scala +++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,8 @@ package scala.tools.partest.nest import java.io.{Console => _, _} import java.nio.charset.Charset +import scala.util.Using + object StreamCapture { def savingSystem[A](body: => A): A = { val savedOut = System.out @@ -34,18 +36,15 @@ object StreamCapture { def capturingOutErr[A](output: OutputStream)(body: => A): A = { val charset = Charset.defaultCharset() - val printStream = new PrintStream(output, /*autoflush=*/true, charset.name()) - savingSystem { - System.setOut(printStream) - System.setErr(printStream) - try { + Using.resource(new PrintStream(output, /*autoflush=*/true, charset.name())) { printStream => + savingSystem { + System.setOut(printStream) + System.setErr(printStream) Console.withErr(printStream) { Console.withOut(printStream) { body } } - } finally { - printStream.close() } } } @@ -62,7 +61,8 @@ object StreamCapture { extra.foreach { case (k, v) => modified.setProperty(k, v) } // Trying to avoid other threads seeing the new properties object prior to the new entries // https://github.com/scala/scala/pull/6391#issuecomment-371346171 - UnsafeAccess.U.storeFence() + // (JDK 22 deprecates `storeFence`; once we drop JDK 8 we can use the VarHandles one instead) + UnsafeAccess.U.storeFence(): @annotation.nowarn("cat=deprecation") System.setProperties(modified) try { action diff --git a/src/partest/scala/tools/partest/nest/TrapExit.scala b/src/partest/scala/tools/partest/nest/TrapExit.scala deleted file mode 100644 index 8e4e1d7cb50b..000000000000 --- a/src/partest/scala/tools/partest/nest/TrapExit.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest.nest - -object TrapExit { - - private class TrapExitThrowable(val status: Int) extends Throwable { - override def getMessage: String = throw this - override def getCause: Throwable = throw this - } - - def apply[A](action: () => A): Either[(Int, Throwable), A] = { - val saved = System.getSecurityManager - System.setSecurityManager(new DelegatingSecurityManager(saved) { - override def checkExit(status: Int): Unit = throw new TrapExitThrowable(status) - }) - try { - Right(action()) - } catch { - case te: TrapExitThrowable => - Left((te.status, te)) - } finally { - System.setSecurityManager(saved) - } - } -} diff --git a/src/partest/scala/tools/partest/nest/UnsafeAccess.java b/src/partest/scala/tools/partest/nest/UnsafeAccess.java index b28060d4f1d3..fd6958f26816 100644 --- a/src/partest/scala/tools/partest/nest/UnsafeAccess.java +++ b/src/partest/scala/tools/partest/nest/UnsafeAccess.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/nest/package.scala b/src/partest/scala/tools/partest/nest/package.scala index 4cf38946b13b..38a87b0c9398 100644 --- a/src/partest/scala/tools/partest/nest/package.scala +++ b/src/partest/scala/tools/partest/nest/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index 2a5df1613254..8130d836c70d 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,7 +17,8 @@ import java.util.concurrent.{Callable, ExecutorService} import scala.concurrent.duration.Duration import scala.io.Codec -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ +import scala.language.implicitConversions import scala.tools.nsc.util.Exceptional package object partest { @@ -47,7 +48,7 @@ package object partest { Thread.setDefaultUncaughtExceptionHandler( new Thread.UncaughtExceptionHandler { def uncaughtException(thread: Thread, t: Throwable): Unit = { - val t1 = Exceptional.unwrap(t) + val t1 = Exceptional.rootCause(t) System.err.println(s"Uncaught exception on thread $thread: $t1") t1.printStackTrace() } @@ -57,6 +58,12 @@ package object partest { /** Sources have a numerical group, specified by name_7 and so on. */ private val GroupPattern = """.*_(\d+)""".r + private object IntOf { + def unapply(ds: String): Some[Int] = Some { + try ds.toInt + catch { case _: NumberFormatException => -1 } + } + } implicit class `special string ops`(private val s: String) extends AnyVal { def linesIfNonEmpty: Iterator[String] = if (!s.isEmpty) s.linesIterator else Iterator.empty @@ -84,11 +91,11 @@ package object partest { def hasExtension(ext: String) = sf hasExtension ext def changeExtension(ext: String): File = (sf changeExtension ext).jfile - /** The group number for this source file, or -1 for no group. */ + /** The group number for this source file, or -1 for no group or out of range. */ def group: Int = sf.stripExtension match { - case GroupPattern(g) if g.toInt >= 0 => g.toInt - case _ => -1 + case GroupPattern(IntOf(g)) => g + case _ => -1 } // Files.readString on jdk 11 @@ -123,13 +130,9 @@ package object partest { implicit def temporaryPath2File(x: Path): File = x.jfile implicit def stringPathToJavaFile(path: String): File = new File(path) - implicit lazy val implicitConversions = scala.language.implicitConversions - def fileSeparator = java.io.File.separator def pathSeparator = java.io.File.pathSeparator - def words(s: String): List[String] = (s.trim split "\\s+").toList - def timed[T](body: => T): (T, Long) = { val t1 = System.currentTimeMillis val result = body @@ -142,18 +145,6 @@ package object partest { def basename(name: String): String = Path(name).stripExtension - /** In order to allow for spaces in flags/options, this - * parses .flags, .javaopts, javacopts etc files as follows: - * If it is exactly one line, it is split (naively) on spaces. - * If it contains more than one line, each line is its own - * token, spaces and all. - */ - def readOptionsFile(file: File): List[String] = - file.fileLines match { - case x :: Nil => words(x) - case xs => xs - } - def findProgram(name: String): Option[File] = { val pathDirs = sys.env("PATH") match { case null => List("/usr/local/bin", "/usr/bin", "/bin") diff --git a/src/partest/scala/tools/partest/sbt/Framework.scala b/src/partest/scala/tools/partest/sbt/Framework.scala index ac746780628c..e1f644f25b8c 100644 --- a/src/partest/scala/tools/partest/sbt/Framework.scala +++ b/src/partest/scala/tools/partest/sbt/Framework.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -52,7 +52,7 @@ case class PartestTask(taskDef: TaskDef, args: Array[String]) extends Task { if (Runtime.getRuntime().maxMemory() / (1024*1024) < 800) loggers foreach (_.warn(s"""Low heap size detected (~ ${Runtime.getRuntime().maxMemory() / (1024*1024)}M). Please add the following to your build.sbt: javaOptions in Test += "-Xmx1G"""")) - try runner.run + try runner.run() catch { case ex: ClassNotFoundException => loggers foreach { l => l.error("Please make sure partest is running in a forked VM by including the following line in build.sbt:\nfork in Test := true") } diff --git a/src/partest/scala/tools/partest/sbt/SBTRunner.scala b/src/partest/scala/tools/partest/sbt/SBTRunner.scala index 891c51acf88d..be475660cfb7 100644 --- a/src/partest/scala/tools/partest/sbt/SBTRunner.scala +++ b/src/partest/scala/tools/partest/sbt/SBTRunner.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala index 86fc6ec81963..674d427821bb 100644 --- a/src/partest/scala/tools/partest/utils/Properties.scala +++ b/src/partest/scala/tools/partest/utils/Properties.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,5 +17,5 @@ package utils object Properties extends scala.util.PropertiesTrait { protected def propCategory = "scala-partest" protected def pickJarBasedOn = classOf[nest.Runner] - override def isAvian = super.isAvian + override lazy val isAvian = javaVmName.contains("Avian") } diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala index fa0a77b47062..d25357b12a88 100644 --- a/src/reflect/scala/reflect/api/Annotations.scala +++ b/src/reflect/scala/reflect/api/Annotations.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,12 +25,11 @@ import scala.collection.immutable.ListMap * *
      *
    • ''Java annotations'': annotations on definitions produced by the Java compiler, i.e., subtypes of [[java.lang.annotation.Annotation]] - * attached to program definitions. When read by Scala reflection, the [[scala.annotation.ClassfileAnnotation]] trait - * is automatically added as a superclass to every Java annotation type.
    • + * attached to program definitions. *
    • ''Scala annotations'': annotations on definitions or types produced by the Scala compiler.
    • *
    * - * When a Scala annotation that inherits from [[scala.annotation.StaticAnnotation]] or [[scala.annotation.ClassfileAnnotation]] is compiled, + * When a Scala annotation that inherits from [[scala.annotation.StaticAnnotation]] is compiled, * it is stored as special attributes in the corresponding classfile, and not as a Java annotation. Note that subclassing * just [[scala.annotation.Annotation]] is not enough to have the corresponding metadata persisted for runtime reflection. * @@ -42,7 +41,7 @@ import scala.collection.immutable.ListMap * has to analyze trees representing annotation arguments to manually extract corresponding values. Towards that end, * arguments of an annotation can be obtained via `annotation.tree.children.tail`. * - * For more information about `Annotation`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] + * For more information about `Annotation`s, see the [[https://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] * * @contentDiagram hideNodes "*Api" * @group ReflectionAPI @@ -92,6 +91,20 @@ trait Annotations { self: Universe => @deprecated("use `tree.children.tail` instead", "2.11.0") def scalaArgs: List[Tree] + /** For arguments in [[scalaArgs]], this method returns `true` if the argument AST is a default inserted + * by the compiler, not an explicit argument passed in source code. + * + * Since Scala 2.13.17, the defaults are ASTs of the default expression in the annotation definition. + * Example: + * {{{ + * class ann(x: Int = 42) extends Annotation + * @ann class C + * }}} + * The `annotation.scalaArgs.head` is an AST `Literal(Constant(42))` for which the `argIsDefault` method + * returns `true`. + */ + def argIsDefault(tree: Tree): Boolean + /** Payload of the Java annotation: a list of name-value pairs. * Empty for Scala annotations. */ @@ -114,112 +127,4 @@ trait Annotations { self: Universe => */ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") trait JavaArgumentApi - - /** A literal argument to a Java annotation as `"use X instead"` in `@Deprecated("use X instead")` - * @template - * @group Annotations - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - type LiteralArgument >: Null <: LiteralArgumentApi with JavaArgument - - /** The constructor/extractor for `LiteralArgument` instances. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - val LiteralArgument: LiteralArgumentExtractor - - /** An extractor class to create and pattern match with syntax `LiteralArgument(value)` - * where `value` is the constant argument. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - abstract class LiteralArgumentExtractor { - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def apply(value: Constant): LiteralArgument - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def unapply(arg: LiteralArgument): Option[Constant] - } - - /** The API of `LiteralArgument` instances. - * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. - * @group API - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - trait LiteralArgumentApi { - /** The underlying compile-time constant value. */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def value: Constant - } - - /** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})` - * @template - * @group Annotations - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - type ArrayArgument >: Null <: ArrayArgumentApi with JavaArgument - - /** The constructor/extractor for `ArrayArgument` instances. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - val ArrayArgument: ArrayArgumentExtractor - - /** An extractor class to create and pattern match with syntax `ArrayArgument(args)` - * where `args` is the argument array. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - abstract class ArrayArgumentExtractor { - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def apply(args: Array[JavaArgument]): ArrayArgument - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def unapply(arg: ArrayArgument): Option[Array[JavaArgument]] - } - - /** API of `ArrayArgument` instances. - * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. - * @group API - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - trait ArrayArgumentApi { - /** The underlying array of Java annotation arguments. */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def args: Array[JavaArgument] - } - - /** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`. - * @template - * @group Annotations - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - type NestedArgument >: Null <: NestedArgumentApi with JavaArgument - - /** The constructor/extractor for `NestedArgument` instances. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - val NestedArgument: NestedArgumentExtractor - - /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)` - * where `annotation` is the nested annotation. - * @group Extractors - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - abstract class NestedArgumentExtractor { - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def apply(annotation: Annotation): NestedArgument - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def unapply(arg: NestedArgument): Option[Annotation] - } - - /** API of `NestedArgument` instances. - * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. - * @group API - */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - trait NestedArgumentApi { - /** The underlying nested annotation. */ - @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") - def annotation: Annotation - } } diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala index 81ddc81c15ec..d35b3a0eaeec 100644 --- a/src/reflect/scala/reflect/api/Constants.scala +++ b/src/reflect/scala/reflect/api/Constants.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,16 +19,25 @@ package api * * According to the section 6.24 "Constant Expressions" of the Scala language specification, * certain expressions (dubbed ''constant expressions'') can be evaluated by the Scala compiler at compile-time. + * Examples include "true", "0" and "classOf[List]". * - * [[scala.reflect.api.Constants#Constant]] instances represent certain kinds of these expressions - * (with values stored in the `value` field and its strongly-typed views named `booleanValue`, `intValue` etc.), namely: - * 1. Literals of primitive value classes (bytes, shorts, ints, longs, floats, doubles, chars, booleans and voids). - * 1. String literals. - * 1. References to classes (typically constructed with [[scala.Predef#classOf]]). - * 1. References to enumeration values. + * `Constant` instances can be matched against and can be constructed directly, as if they were case classes: + * {{{ + * assert(Constant(true).value == true) + * Constant(true) match { + * case Constant(s: String) => println("A string: " + s) + * case Constant(b: Boolean) => println("A boolean value: " + b) + * case Constant(x) => println("Something else: " + x) + * } + * }}} * - * Such constants are used to represent literals in abstract syntax trees (the [[scala.reflect.api.Trees#Literal]] node) - * and literal arguments for Java class file annotations (the [[scala.reflect.api.Annotations#LiteralArgument]] class). + * `Constant` instances can wrap the following kinds of expressions: + * 1. Literals of primitive value classes ([[scala.Byte `Byte`]], [[scala.Short `Short`]], [[scala.Int `Int`]], [[scala.Long `Long`]], [[scala.Float `Float`]], [[scala.Double `Double`]], [[scala.Char `Char`]], [[scala.Boolean `Boolean`]] and [[scala.Unit `Unit`]]) - represented directly as the corresponding type + * 1. String literals - represented as instances of `String`. + * 1. References to classes, typically constructed with [[scala.Predef#classOf]] - represented as [[scala.reflect.api.Types#Type types]]. + * 1. References to enumeration values - represented as [[scala.reflect.api.Symbols#Symbol symbols]]. + * + * Instances are used to represent literals in abstract syntax trees, inside [[scala.reflect.api.Trees#Literal]] nodes. * * === Example === * @@ -43,10 +52,12 @@ package api * Enumeration value references are represented as instances of [[scala.reflect.api.Symbols#Symbol]], which on JVM point to methods * that return underlying enum values. To inspect an underlying enumeration or to get runtime value of a reference to an enum, * one should use a [[scala.reflect.api.Mirrors#RuntimeMirror]] (the simplest way to get such a mirror is again [[scala.reflect.runtime.package#currentMirror]]). - + * * {{{ + * // File "JavaSimpleEnumeration.java" * enum JavaSimpleEnumeration { FOO, BAR } * + * // File "JavaSimpleAnnotation.java" * import java.lang.annotation.*; * @Retention(RetentionPolicy.RUNTIME) * @Target({ElementType.TYPE}) @@ -55,6 +66,7 @@ package api * JavaSimpleEnumeration enumRef(); * } * + * // File "JavaAnnottee.java" * @JavaSimpleAnnotation( * classRef = JavaAnnottee.class, * enumRef = JavaSimpleEnumeration.BAR @@ -62,33 +74,33 @@ package api * public class JavaAnnottee {} * }}} * {{{ - * import scala.reflect.runtime.universe._ - * import scala.reflect.runtime.{currentMirror => cm} - * - * object Test extends App { - * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs - * def jarg(name: String) = jann(TermName(name)).asInstanceOf[LiteralArgument].value - * - * val classRef = jarg("classRef").typeValue - * println(showRaw(classRef)) // TypeRef(ThisType(), JavaAnnottee, List()) - * println(cm.runtimeClass(classRef)) // class JavaAnnottee - * - * val enumRef = jarg("enumRef").symbolValue - * println(enumRef) // value BAR - * - * val siblings = enumRef.owner.info.decls - * val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic) - * println(enumValues) // Scope{ - * // final val FOO: JavaSimpleEnumeration; - * // final val BAR: JavaSimpleEnumeration - * // } - * - * // doesn't work because of https://github.com/scala/bug/issues/6459 - * // val enumValue = mirror.reflectField(enumRef.asTerm).get - * val enumClass = cm.runtimeClass(enumRef.owner.asClass) - * val enumValue = enumClass.getDeclaredField(enumRef.name.toString).get(null) - * println(enumValue) // BAR - * } + * val javaArgs = typeOf[JavaAnnottee].typeSymbol.annotations(0).tree.children.tail + * + * def jArg[A](lhs: String): Option[A] = javaArgs + * .map { case NamedArg(lhs, Literal(const)) => (lhs.toString, const) } + * .find(_._1 == lhs) + * .map(_._2.value.asInstanceOf[A]) + * + * // class reference, cast to Type + * val classRef = jArg[Type]("classRef").get + * println(showRaw(classRef)) // TypeRef(ThisType(), JavaAnnottee, List()) + * println(cm.runtimeClass(classRef)) // class JavaAnnottee + * // enum value reference, cast to Symbol + * val enumRef = jArg[Symbol]("enumRef").get + * println(enumRef) // value BAR + * + * val siblings = enumRef.owner.info.decls + * val enumValues = siblings.filter(_.isJavaEnum) + * println(enumValues) // Scope{ + * // final val FOO: JavaSimpleEnumeration; + * // final val BAR: JavaSimpleEnumeration + * // } + * + * // doesn't work because of https://github.com/scala/bug/issues/6459 + * // val enumValue = mirror.reflectField(enumRef.asTerm).get + * val enumClass = cm.runtimeClass(enumRef.owner.asClass) + * val enumValue = enumClass.getDeclaredField(enumRef.name.toString).get(null) + * println(enumValue) // BAR * }}} * * @contentDiagram hideNodes "*Api" @@ -97,98 +109,6 @@ package api trait Constants { self: Universe => - /** - * This "virtual" case class represents the reflection interface for literal expressions which can not be further - * broken down or evaluated, such as "true", "0", "classOf[List]". Such values become parts of the Scala abstract - * syntax tree representing the program. The constants - * correspond to section 6.24 "Constant Expressions" of the - * [[http://www.scala-lang.org/files/archive/spec/2.12/ Scala Language Specification]]. - * - * Such constants are used to represent literals in abstract syntax trees (the [[scala.reflect.api.Trees#Literal]] node) - * and literal arguments for Java class file annotations (the [[scala.reflect.api.Annotations#LiteralArgument]] class). - * - * Constants can be matched against and can be constructed directly, as if they were case classes: - * {{{ - * assert(Constant(true).value == true) - * Constant(true) match { - * case Constant(s: String) => println("A string: " + s) - * case Constant(b: Boolean) => println("A boolean value: " + b) - * case Constant(x) => println("Something else: " + x) - * } - * }}} - * - * `Constant` instances can wrap certain kinds of these expressions: - * 1. Literals of primitive value classes ([[scala.Byte `Byte`]], [[scala.Short `Short`]], [[scala.Int `Int`]], [[scala.Long `Long`]], [[scala.Float `Float`]], [[scala.Double `Double`]], [[scala.Char `Char`]], [[scala.Boolean `Boolean`]] and [[scala.Unit `Unit`]]) - represented directly as the corresponding type - * 1. String literals - represented as instances of the `String`. - * 1. References to classes, typically constructed with [[scala.Predef#classOf]] - represented as [[scala.reflect.api.Types#Type types]]. - * 1. References to enumeration values - represented as [[scala.reflect.api.Symbols#Symbol symbols]]. - * - * Class references are represented as instances of [[scala.reflect.api.Types#Type]] - * (because when the Scala compiler processes a class reference, the underlying runtime class might not yet have - * been compiled). To convert such a reference to a runtime class, one should use the [[scala.reflect.api.Mirrors#RuntimeMirror#runtimeClass `runtimeClass`]] method of a - * mirror such as [[scala.reflect.api.Mirrors#RuntimeMirror `RuntimeMirror`]] (the simplest way to get such a mirror is using - * [[scala.reflect.runtime#currentMirror `scala.reflect.runtime.currentMirror`]]). - * - * Enumeration value references are represented as instances of [[scala.reflect.api.Symbols#Symbol]], which on JVM point to methods - * that return underlying enum values. To inspect an underlying enumeration or to get runtime value of a reference to an enum, - * one should use a [[scala.reflect.api.Mirrors#RuntimeMirror]] (the simplest way to get such a mirror is again [[scala.reflect.runtime.package#currentMirror]]). - * - * Usage example: - * {{{ - * enum JavaSimpleEnumeration { FOO, BAR } - * - * import java.lang.annotation.*; - * @Retention(RetentionPolicy.RUNTIME) - * @Target({ElementType.TYPE}) - * public @interface JavaSimpleAnnotation { - * Class classRef(); - * JavaSimpleEnumeration enumRef(); - * } - * - * @JavaSimpleAnnotation( - * classRef = JavaAnnottee.class, - * enumRef = JavaSimpleEnumeration.BAR - * ) - * public class JavaAnnottee {} - * }}} - * {{{ - * import scala.reflect.runtime.universe._ - * import scala.reflect.runtime.{currentMirror => cm} - * - * object Test extends App { - * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs - * def jarg(name: String) = jann(TermName(name)) match { - * // Constant is always wrapped into a Literal or LiteralArgument tree node - * case LiteralArgument(ct: Constant) => value - * case _ => sys.error("Not a constant") - * } - * - * val classRef = jarg("classRef").value.asInstanceOf[Type] - * // ideally one should match instead of casting - * println(showRaw(classRef)) // TypeRef(ThisType(), JavaAnnottee, List()) - * println(cm.runtimeClass(classRef)) // class JavaAnnottee - * - * val enumRef = jarg("enumRef").value.asInstanceOf[Symbol] - * // ideally one should match instead of casting - * println(enumRef) // value BAR - * - * val siblings = enumRef.owner.info.decls - * val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic) - * println(enumValues) // Scope{ - * // final val FOO: JavaSimpleEnumeration; - * // final val BAR: JavaSimpleEnumeration - * // } - * - * // doesn't work because of https://github.com/scala/bug/issues/6459 - * // val enumValue = mirror.reflectField(enumRef.asTerm).get - * val enumClass = cm.runtimeClass(enumRef.owner.asClass) - * val enumValue = enumClass.getDeclaredField(enumRef.name.toString).get(null) - * println(enumValue) // BAR - * } - * }}} - * @template - * @group Constants - */ type Constant >: Null <: AnyRef with ConstantApi /** The constructor/extractor for `Constant` instances. diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala index bc3781412bab..7fae461b1304 100644 --- a/src/reflect/scala/reflect/api/Exprs.scala +++ b/src/reflect/scala/reflect/api/Exprs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index a8116613354b..04fe62ef9d35 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -61,7 +61,7 @@ import scala.language.implicitConversions * quite possible that in future releases of the reflection API, flag sets could be replaced with something else. * * For more details about `FlagSet`s and other aspects of Scala reflection, see the - * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] + * [[https://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * * @group ReflectionAPI * @@ -190,7 +190,7 @@ trait FlagSets { self: Universe => /** Flag indicating that tree represents a parameter of the primary constructor of some class * or a synthetic member underlying thereof. E.g. here's how 'class C(val x: Int)' is represented: - * + * {{{ * [[syntax trees at end of parser]]// Scala source: tmposDU52 * class C extends scala.AnyRef { * val x: Int = _; @@ -210,12 +210,13 @@ trait FlagSets { self: Universe => * Modifiers(), nme.CONSTRUCTOR, List(), * List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree))), TypeTree(), * Block(List(pendingSuperCall), Literal(Constant(()))))))))) + * }}} */ val PARAMACCESSOR: FlagSet /** Flag indicating that tree represents a parameter of the primary constructor of some case class * or a synthetic member underlying thereof. E.g. here's how 'case class C(val x: Int)' is represented: - * + * {{{ * [[syntax trees at end of parser]]// Scala source: tmpnHkJ3y * case class C extends scala.Product with scala.Serializable { * val x: Int = _; @@ -235,6 +236,7 @@ trait FlagSets { self: Universe => * Modifiers(), nme.CONSTRUCTOR, List(), * List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree))), TypeTree(), * Block(List(pendingSuperCall), Literal(Constant(()))))))))) + * }}} */ val CASEACCESSOR: FlagSet diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala index 073c71e3b89b..ad836bbda039 100644 --- a/src/reflect/scala/reflect/api/ImplicitTags.scala +++ b/src/reflect/scala/reflect/api/ImplicitTags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,11 @@ package scala package reflect package api +import scala.annotation.nowarn + /** Tags which preserve the identity of abstract types in the face of erasure. * Can be used for pattern matching, instance tests, serialization and the like. + * * @group Tags */ trait ImplicitTags { @@ -51,10 +54,8 @@ trait ImplicitTags { // Tags for Annotations. implicit val AnnotationTag: ClassTag[Annotation] + @nowarn("""cat=deprecation&origin=scala\.reflect\.api\.Annotations\.JavaArgument""") implicit val JavaArgumentTag: ClassTag[JavaArgument] - implicit val LiteralArgumentTag: ClassTag[LiteralArgument] - implicit val ArrayArgumentTag: ClassTag[ArrayArgument] - implicit val NestedArgumentTag: ClassTag[NestedArgument] // Tags for Symbols. implicit val TermSymbolTag: ClassTag[TermSymbol] @@ -75,7 +76,7 @@ trait ImplicitTags { implicit val AnnotatedTag: ClassTag[Annotated] implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree] implicit val ApplyTag: ClassTag[Apply] - implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg] + implicit val NamedArgTag: ClassTag[NamedArg] implicit val AssignTag: ClassTag[Assign] implicit val BindTag: ClassTag[Bind] implicit val BlockTag: ClassTag[Block] diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala index cfd853e70213..21681be868e5 100644 --- a/src/reflect/scala/reflect/api/Internals.scala +++ b/src/reflect/scala/reflect/api/Internals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package reflect package api import scala.language.implicitConversions -import scala.language.higherKinds /** * EXPERIMENTAL @@ -403,19 +402,19 @@ trait Internals { self: Universe => /** @see [[TreeDecorator]] */ class TreeDecoratorApi[T <: Tree](val tree: T) { - /** @see [[internal.freeTerms]] */ + /** @see [[InternalApi.freeTerms]] */ def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree) - /** @see [[internal.freeTypes]] */ + /** @see [[InternalApi.freeTypes]] */ def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree) - /** @see [[internal.substituteSymbols]] */ + /** @see [[InternalApi.substituteSymbols]] */ def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to) - /** @see [[internal.substituteTypes]] */ + /** @see [[InternalApi.substituteTypes]] */ def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to) - /** @see [[internal.substituteThis]] */ + /** @see [[InternalApi.substituteThis]] */ def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to) } @@ -427,49 +426,49 @@ trait Internals { self: Universe => /** @see [[SymbolDecorator]] */ class SymbolDecoratorApi[T <: Symbol](val symbol: T) { - /** @see [[internal.isFreeTerm]] */ + /** @see [[InternalApi.isFreeTerm]] */ def isFreeTerm: Boolean = internal.isFreeTerm(symbol) - /** @see [[internal.asFreeTerm]] */ + /** @see [[InternalApi.asFreeTerm]] */ def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol) - /** @see [[internal.isFreeType]] */ + /** @see [[InternalApi.isFreeType]] */ def isFreeType: Boolean = internal.isFreeType(symbol) - /** @see [[internal.asFreeType]] */ + /** @see [[InternalApi.asFreeType]] */ def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol) - /** @see [[internal.newTermSymbol]] */ + /** @see [[InternalApi.newTermSymbol]] */ def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags) - /** @see [[internal.newModuleAndClassSymbol]] */ + /** @see [[InternalApi.newModuleAndClassSymbol]] */ def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags) - /** @see [[internal.newMethodSymbol]] */ + /** @see [[InternalApi.newMethodSymbol]] */ def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags) - /** @see [[internal.newTypeSymbol]] */ + /** @see [[InternalApi.newTypeSymbol]] */ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags) - /** @see [[internal.newClassSymbol]] */ + /** @see [[InternalApi.newClassSymbol]] */ def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags) - /** @see [[internal.isErroneous]] */ + /** @see [[InternalApi.isErroneous]] */ def isErroneous: Boolean = internal.isErroneous(symbol) - /** @see [[internal.isSkolem]] */ + /** @see [[InternalApi.isSkolem]] */ def isSkolem: Boolean = internal.isSkolem(symbol) - /** @see [[internal.deSkolemize]] */ + /** @see [[InternalApi.deSkolemize]] */ def deSkolemize: Symbol = internal.deSkolemize(symbol) - /** @see [[internal.initialize]] */ + /** @see [[InternalApi.initialize]] */ def initialize: T = internal.initialize(symbol) - /** @see [[internal.fullyInitialize]] */ + /** @see [[InternalApi.fullyInitialize]] */ def fullyInitialize: T = internal.fullyInitialize(symbol) - /** @see [[internal.flags]] */ + /** @see [[InternalApi.flags]] */ def flags: FlagSet = internal.flags(symbol) } @@ -481,7 +480,7 @@ trait Internals { self: Universe => /** @see [[TypeDecorator]] */ implicit class TypeDecoratorApi[T <: Type](val tp: T) { - /** @see [[internal.fullyInitialize]] */ + /** @see [[InternalApi.fullyInitialize]] */ def fullyInitialize: T = internal.fullyInitialize(tp) } } @@ -531,7 +530,7 @@ trait Internals { self: Universe => /** Set symbol's type signature to given type. * @return the symbol itself */ - def setInfo[S <: Symbol](sym: S, tpe: Type): S + def setInfo[S <: Symbol](sym: S, tpe: Type): sym.type /** Set symbol's annotations to given annotations `annots`. */ @@ -759,7 +758,7 @@ trait Internals { self: Universe => trait SyntacticFilterExtractor { def apply(test: Tree): Tree - def unapply(tree: Tree): Option[(Tree)] + def unapply(tree: Tree): Option[Tree] } val SyntacticEmptyTypeTree: SyntacticEmptyTypeTreeExtractor @@ -866,12 +865,6 @@ trait Internals { self: Universe => } } - @deprecated("use `internal.reificationSupport` instead", "2.11.0") - val build: ReificationSupportApi - - @deprecated("use `internal.ReificationSupportApi` instead", "2.11.0") - type BuildApi = ReificationSupportApi - /** This trait provides support for importers, a facility to migrate reflection artifacts between universes. * ''Note: this trait should typically be used only rarely.'' * @@ -1081,11 +1074,13 @@ trait Internals { self: Universe => * a single `compat._` import will fix things for you. * @group Internal */ + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") val compat: Compat /** @see [[compat]] * @group Internal */ + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") type Compat <: CompatApi /** Presence of an implicit value of this type in scope @@ -1093,171 +1088,28 @@ trait Internals { self: Universe => * @group Internal */ @scala.annotation.implicitNotFound("This method has been removed from the public API. Import compat._ or migrate away.") + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") class CompatToken /** @see [[compat]] * @group Internal */ + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") trait CompatApi { /** @see [[CompatToken]] */ - implicit val token = new CompatToken - - /** @see [[InternalApi.typeTagToManifest]] */ - @deprecated("use `internal.typeTagToManifest` instead", "2.11.0") - def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] = - internal.typeTagToManifest(mirror, tag) - - /** @see [[InternalApi.manifestToTypeTag]] */ - @deprecated("use `internal.manifestToTypeTag` instead", "2.11.0") - def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] = - internal.manifestToTypeTag(mirror, manifest) - - /** @see [[InternalApi.newScopeWith]] */ - @deprecated("use `internal.newScopeWith` instead", "2.11.0") - def newScopeWith(elems: Symbol*): Scope = - internal.newScopeWith(elems: _*) + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit val token: CompatToken = new CompatToken /** Scala 2.10 compatibility enrichments for BuildApi. */ - implicit class CompatibleBuildApi(api: BuildApi) { - /** @see [[BuildApi.setInfo]] */ - @deprecated("use `internal.reificationSupport.setInfo` instead", "2.11.0") - def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = internal.reificationSupport.setInfo(sym, tpe) - - /** @see [[BuildApi.FlagsRepr]] */ - @deprecated("use `internal.reificationSupport.FlagsRepr` instead", "2.11.0") - def flagsFromBits(bits: Long): FlagSet = internal.reificationSupport.FlagsRepr(bits) - - /** @see [[BuildApi.noSelfType]] */ - @deprecated("use `noSelfType` instead", "2.11.0") - def emptyValDef: ValDef = noSelfType - - /** @see [[BuildApi.mkThis]] */ - @deprecated("use `internal.reificationSupport.mkThis` instead", "2.11.0") - def This(sym: Symbol): Tree = internal.reificationSupport.mkThis(sym) - - /** @see [[BuildApi.mkSelect]] */ - @deprecated("use `internal.reificationSupport.mkSelect` instead", "2.11.0") - def Select(qualifier: Tree, sym: Symbol): Select = internal.reificationSupport.mkSelect(qualifier, sym) - - /** @see [[BuildApi.mkIdent]] */ - @deprecated("use `internal.reificationSupport.mkIdent` instead", "2.11.0") - def Ident(sym: Symbol): Ident = internal.reificationSupport.mkIdent(sym) - - /** @see [[BuildApi.mkTypeTree]] */ - @deprecated("use `internal.reificationSupport.mkTypeTree` instead", "2.11.0") - def TypeTree(tp: Type): TypeTree = internal.reificationSupport.mkTypeTree(tp) - } + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class CompatibleBuildApi(api: ReificationSupportApi) /** Scala 2.10 compatibility enrichments for Tree. */ - implicit class CompatibleTree(tree: Tree) { - /** @see [[InternalApi.freeTerms]] */ - @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree) - - /** @see [[InternalApi.freeTypes]] */ - @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree) - - /** @see [[InternalApi.substituteSymbols]] */ - @deprecated("use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to) - - /** @see [[InternalApi.substituteTypes]] */ - @deprecated("use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to) - - /** @see [[InternalApi.substituteThis]] */ - @deprecated("use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to) - } + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class CompatibleTree(tree: Tree) /** Scala 2.10 compatibility enrichments for Tree. */ - implicit class CompatibleSymbol(symbol: Symbol) { - @deprecated("This API is unreliable. Use `isPrivateThis` or `isProtectedThis` instead", "2.11.0") - def isLocal: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isLocal - - @deprecated("This API is unreliable. Use `overrides.nonEmpty` instead", "2.11.0") - def isOverride: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isOverride - - /** @see [[InternalApi.isFreeTerm]] */ - @deprecated("use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def isFreeTerm: Boolean = internal.isFreeTerm(symbol) - - /** @see [[InternalApi.asFreeTerm]] */ - @deprecated("use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol) - - /** @see [[InternalApi.isFreeType]] */ - @deprecated("use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def isFreeType: Boolean = internal.isFreeType(symbol) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def isErroneous: Boolean = internal.isErroneous(symbol) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def isSkolem: Boolean = internal.isSkolem(symbol) - - /** @see [[InternalApi.asFreeType]] */ - @deprecated("use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def deSkolemize: Symbol = internal.deSkolemize(symbol) - } - - /** @see [[InternalApi.singleType]] */ - @deprecated("use `internal.singleType` instead", "2.11.0") - def singleType(pre: Type, sym: Symbol): Type = internal.singleType(pre, sym) - - /** @see [[InternalApi.refinedType]] */ - @deprecated("use `internal.refinedType` instead", "2.11.0") - def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = internal.refinedType(parents, owner, decls, pos) - - /** @see [[InternalApi.refinedType]] */ - @deprecated("use `internal.refinedType` instead", "2.11.0") - def refinedType(parents: List[Type], owner: Symbol): Type = internal.refinedType(parents, owner) - - /** @see [[InternalApi.typeRef]] */ - @deprecated("use `internal.typeRef` instead", "2.11.0") - def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = internal.typeRef(pre, sym, args) - - /** @see [[InternalApi.intersectionType]] */ - @deprecated("use `internal.intersectionType` instead", "2.11.0") - def intersectionType(tps: List[Type]): Type = internal.intersectionType(tps) - - /** @see [[InternalApi.intersectionType]] */ - @deprecated("use `internal.intersectionType` instead", "2.11.0") - def intersectionType(tps: List[Type], owner: Symbol): Type = internal.intersectionType(tps, owner) - - /** @see [[InternalApi.polyType]] */ - @deprecated("use `internal.polyType` instead", "2.11.0") - def polyType(tparams: List[Symbol], tpe: Type): Type = internal.polyType(tparams, tpe) - - /** @see [[InternalApi.existentialAbstraction]] */ - @deprecated("use `internal.existentialAbstraction` instead", "2.11.0") - def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = internal.existentialAbstraction(tparams, tpe0) + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class CompatibleSymbol(symbol: Symbol) } } diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala index bb28b87459d0..f5c042f88786 100644 --- a/src/reflect/scala/reflect/api/JavaUniverse.scala +++ b/src/reflect/scala/reflect/api/JavaUniverse.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,7 +24,7 @@ package api * and vice versa. It can also perform reflective invocations (getting/setting field values, * calling methods, etc). * - * See the [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] for details on how to use runtime reflection. + * See the [[https://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] for details on how to use runtime reflection. * * @groupname JavaUniverse Java Mirrors * @group ReflectionAPI @@ -51,7 +51,7 @@ trait JavaUniverse extends Universe { self => * become capable of performing reflective invocations (getting/setting field values, calling methods, etc). * * For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the - * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] + * [[https://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] * * @group JavaMirrors */ @@ -63,9 +63,9 @@ trait JavaUniverse extends Universe { self => /** Creates a runtime reflection mirror from a JVM classloader. * * For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the - * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] + * [[https://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] * * @group JavaMirrors */ def runtimeMirror(cl: ClassLoader): Mirror -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala index 14b4acffc868..65cb99c9ff7d 100644 --- a/src/reflect/scala/reflect/api/Liftables.scala +++ b/src/reflect/scala/reflect/api/Liftables.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,7 +18,7 @@ trait Liftables { self: Universe => /** A type class that defines a representation of `T` as a `Tree`. * - * @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] + * @see [[https://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] */ trait Liftable[T] { def apply(value: T): Tree @@ -44,7 +44,7 @@ trait Liftables { self: Universe => * lifted: universe.Tree = O * }}} * - * @see [[http://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] + * @see [[https://docs.scala-lang.org/overviews/quasiquotes/lifting.html]] */ def apply[T](f: T => Tree): Liftable[T] = new Liftable[T] { def apply(value: T): Tree = f(value) } @@ -52,7 +52,7 @@ trait Liftables { self: Universe => /** A type class that defines a way to extract instance of `T` from a `Tree`. * - * @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] + * @see [[https://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] */ trait Unliftable[T] { def unapply(tree: Tree): Option[T] @@ -78,7 +78,7 @@ trait Liftables { self: Universe => * scala> val q"\${_: O.type}" = q"\$Oref" * }}} * - * @see [[http://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] + * @see [[https://docs.scala-lang.org/overviews/quasiquotes/unlifting.html]] */ def apply[T](pf: PartialFunction[Tree, T]): Unliftable[T] = new Unliftable[T] { def unapply(value: Tree): Option[T] = pf.lift(value) diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala index 98180498e429..6cd554f3d38d 100644 --- a/src/reflect/scala/reflect/api/Mirror.scala +++ b/src/reflect/scala/reflect/api/Mirror.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,7 +19,7 @@ package api * * The base class for all mirrors. * - * See [[scala.reflect.api.Mirrors]] or [[docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] + * See [[scala.reflect.api.Mirrors]] or [[https://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * for a complete overview of `Mirror`s. * * @tparam U the type of the universe this mirror belongs to. diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index 379e82706bd7..ee373c5fa463 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -113,7 +113,7 @@ package api * via `ModuleMirror.instance`). Entry point: `val mm = im.reflectMethod()`. * Example: * {{{ - * scala> val methodX = typeOf[C].declaration(TermName("x")).asMethod + * scala> val methodX = typeOf[C].decl(TermName("x")).asMethod * methodX: reflect.runtime.universe.MethodSymbol = method x * * scala> val mm = im.reflectMethod(methodX) @@ -138,7 +138,7 @@ package api * scala> val im = m.reflect(new C) * im: reflect.runtime.universe.InstanceMirror = instance mirror for C@5f0c8ac1 * - * scala> val fieldX = typeOf[C].declaration(TermName("x")).asTerm.accessed.asTerm + * scala> val fieldX = typeOf[C].decl(TermName("x")).asTerm.accessed.asTerm * fieldX: reflect.runtime.universe.TermSymbol = value x * scala> val fmX = im.reflectField(fieldX) * fmX: reflect.runtime.universe.FieldMirror = field mirror for C.x (bound to C@5f0c8ac1) @@ -148,7 +148,7 @@ package api * * scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field! * - * scala> val fieldY = typeOf[C].declaration(TermName("y")).asTerm.accessed.asTerm + * scala> val fieldY = typeOf[C].decl(TermName("y")).asTerm.accessed.asTerm * fieldY: reflect.runtime.universe.TermSymbol = variable y * * scala> val fmY = im.reflectField(fieldY) @@ -181,7 +181,7 @@ package api * scala> val cm = m.reflectClass(classC) * cm: reflect.runtime.universe.ClassMirror = class mirror for C (bound to null) * - * scala> val ctorC = typeOf[C].declaration(ru.nme.CONSTRUCTOR).asMethod + * scala> val ctorC = typeOf[C].decl(ru.nme.CONSTRUCTOR).asMethod * ctorC: reflect.runtime.universe.MethodSymbol = constructor C * * scala> val ctorm = cm.reflectConstructor(ctorC) @@ -213,7 +213,7 @@ package api * }}} * * For more information about `Mirrors`s, see the - * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] + * [[https://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]] * * @contentDiagram hideNodes "*Api" * @group ReflectionAPI @@ -506,7 +506,7 @@ trait Mirrors { self: Universe => def runtimeClass(tpe: Type): RuntimeClass /** Maps a Scala class symbol to the corresponding Java class object - * @throws ClassNotFoundException if there is no Java class + * @throws java.lang.ClassNotFoundException if there is no Java class * corresponding to the given Scala class symbol. * Note: If the Scala symbol is ArrayClass, a ClassNotFound exception is thrown * because there is no unique Java class corresponding to a Scala generic array @@ -516,7 +516,7 @@ trait Mirrors { self: Universe => /** A class symbol for the specified runtime class. * @return The class symbol for the runtime class in the current class loader. * @throws java.lang.ClassNotFoundException if no class with that name exists - * @throws scala.reflect.ScalaReflectionException if no corresponding symbol exists + * @throws scala.ScalaReflectionException if no corresponding symbol exists * to do: throws anything else? */ def classSymbol(rtcls: RuntimeClass): ClassSymbol @@ -524,7 +524,7 @@ trait Mirrors { self: Universe => /** A module symbol for the specified runtime class. * @return The module symbol for the runtime class in the current class loader. * @throws java.lang.ClassNotFoundException if no class with that name exists - * @throws scala.reflect.ScalaReflectionException if no corresponding symbol exists + * @throws scala.ScalaReflectionException if no corresponding symbol exists * to do: throws anything else? */ def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 818ff985729a..bee0d6f28ffe 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,6 @@ package scala package reflect package api -import scala.language.implicitConversions - /** * EXPERIMENTAL * @@ -35,25 +33,33 @@ import scala.language.implicitConversions * * To search for a type member, one can follow the same procedure, using `TypeName` instead. * - * For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] + * For more information about creating and using `Name`s, see the [[https://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] * * @contentDiagram hideNodes "*Api" * @group ReflectionAPI */ trait Names { - /** An implicit conversion from String to TermName. - * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`. - * @group Names + /** A former implicit conversion from String to TermName. + * + * This used to be an implicit conversion, enabling an alternative notation + * `"map": TermName` as opposed to `TermName("map")`. It is only kept for + * binary compatibility reasons, and should not be used anymore. + * + * @group Names */ - @deprecated("use explicit `TermName(s)` instead", "2.11.0") - implicit def stringToTermName(s: String): TermName = TermName(s) - - /** An implicit conversion from String to TypeName. - * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`. - * @group Names + @deprecated("use `TermName(s)` instead", "2.11.0") + def stringToTermName(s: String): TermName = TermName(s) + + /** A former implicit conversion from String to TypeName. + * + * This used to be an implicit conversion, enabling an alternative notation + * `"List": TypeName` as opposed to `TypeName("List")`. It is only kept for + * binary compatibility reasons, and should not be used anymore. + * + * @group Names */ - @deprecated("use explicit `TypeName(s)` instead", "2.11.0") - implicit def stringToTypeName(s: String): TypeName = TypeName(s) + @deprecated("use `TypeName(s)` instead", "2.11.0") + def stringToTypeName(s: String): TypeName = TypeName(s) /** The abstract type of names. * @group Names diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala index b6cc08146fcb..7f2960de1c4e 100644 --- a/src/reflect/scala/reflect/api/Position.scala +++ b/src/reflect/scala/reflect/api/Position.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -58,7 +58,7 @@ import scala.reflect.macros.Attachments * pos.focus Converts a range position to an offset position focused on the point * pos.makeTransparent Convert an opaque range into a transparent range * - * For more information about `Position`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] + * For more information about `Position`s, see the [[https://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]] * * @groupname Common Commonly used methods * @group ReflectionAPI diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala index c224f644401e..d19b12874950 100644 --- a/src/reflect/scala/reflect/api/Positions.scala +++ b/src/reflect/scala/reflect/api/Positions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 37402441248f..e0df010c7ae5 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -143,7 +143,7 @@ import java.io.{ PrintWriter, StringWriter } * }}} * * For more details about `Printer`s and other aspects of Scala reflection, see the - * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] + * [[https://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * * @group ReflectionAPI */ @@ -151,7 +151,7 @@ trait Printers { self: Universe => /** @group Printers */ protected trait TreePrinter { - def print(args: Any*) + def print(args: Any*): Unit protected var printTypes = false protected var printIds = false protected var printOwners = false @@ -188,12 +188,14 @@ trait Printers { self: Universe => val buffer = new StringWriter() val writer = new PrintWriter(buffer) val printer = mkPrinter(writer) - printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes) - printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds) - printOwners.value.map(printOwners => if (printOwners) printer.withOwners else printer.withoutOwners) - printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds) - printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors) - printPositions.value.map(printPositions => if (printPositions) printer.withPositions else printer.withoutPositions) + + printTypes.value.foreach(if (_) printer.withTypes else printer.withoutTypes) + printIds.value.foreach(if (_) printer.withIds else printer.withoutIds) + printOwners.value.foreach(if (_) printer.withOwners else printer.withoutOwners) + printKinds.value.foreach(if (_) printer.withKinds else printer.withoutKinds) + printMirrors.value.foreach(if (_) printer.withMirrors else printer.withoutMirrors) + printPositions.value.foreach(if (_) printer.withPositions else printer.withoutPositions) + printer.print(what) writer.flush() buffer.toString diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala index ac09b441b8f5..d5acff1fdfe8 100644 --- a/src/reflect/scala/reflect/api/Quasiquotes.scala +++ b/src/reflect/scala/reflect/api/Quasiquotes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,7 +19,7 @@ trait Quasiquotes { self: Universe => * that are also known as quasiquotes. With their help you can easily manipulate * Scala reflection ASTs. * - * @see [[http://docs.scala-lang.org/overviews/quasiquotes/intro.html]] + * @see [[https://docs.scala-lang.org/overviews/quasiquotes/intro.html]] */ implicit class Quasiquote(ctx: StringContext) { protected trait api { diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala index 2bf5b82fa7ac..e1ea67c19166 100644 --- a/src/reflect/scala/reflect/api/Scopes.scala +++ b/src/reflect/scala/reflect/api/Scopes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala index 80756be1a70c..8451dbed7015 100644 --- a/src/reflect/scala/reflect/api/StandardDefinitions.scala +++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -139,10 +139,10 @@ trait StandardDefinitions { * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * - * scala> m.params(0)(0).info + * scala> m.paramLists(0)(0).info * res1: reflect.runtime.universe.Type = => scala.Int * - * scala> showRaw(m.params(0)(0).info) + * scala> showRaw(m.paramLists(0)(0).info) * res2: String = TypeRef( * ThisType(scala), * scala., // <-- ByNameParamClass @@ -167,10 +167,10 @@ trait StandardDefinitions { * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * - * scala> m.params(0)(0).info + * scala> m.paramLists(0)(0).info * res1: reflect.runtime.universe.Type = [Object] * - * scala> showRaw(m.params(0)(0).info) + * scala> showRaw(m.paramLists(0)(0).info) * res2: String = TypeRef( * ThisType(scala), * scala., // <-- JavaRepeatedParamClass @@ -192,10 +192,10 @@ trait StandardDefinitions { * scala> val m = typeOf[C].member(TermName("m")).asMethod * m: reflect.runtime.universe.MethodSymbol = method m * - * scala> m.params(0)(0).info + * scala> m.paramLists(0)(0).info * res1: reflect.runtime.universe.Type = scala.Int* * - * scala> showRaw(m.params(0)(0).info) + * scala> showRaw(m.paramLists(0)(0).info) * res2: String = TypeRef( * ThisType(scala), * scala., // <-- RepeatedParamClass diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala index ee06b512f805..fdc83ef0f970 100644 --- a/src/reflect/scala/reflect/api/StandardLiftables.scala +++ b/src/reflect/scala/reflect/api/StandardLiftables.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -154,72 +154,100 @@ trait StandardLiftables { self: Universe => case Apply(ScalaDot(stdnme.Symbol), List(Literal(Constant(name: String)))) => scala.Symbol(name) } - implicit def unliftName[T <: Name : ClassTag]: Unliftable[T] = Unliftable[T] { case Ident(name: T) => name; case Bind(name: T, Ident(stdnme.WILDCARD)) => name } + implicit def unliftName[T <: Name : ClassTag]: Unliftable[T] = Unliftable[T] { + case Ident(name: T) => name + case Bind(name: T, Ident(stdnme.WILDCARD)) => name + } implicit def unliftType: Unliftable[Type] = Unliftable[Type] { case tt: TypeTree if tt.tpe != null => tt.tpe } implicit def unliftConstant: Unliftable[Constant] = Unliftable[Constant] { case Literal(const) => const } - implicit def unliftTuple2[T1, T2](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2]): Unliftable[Tuple2[T1, T2]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: Nil) => Tuple2(v1, v2) - } - implicit def unliftTuple3[T1, T2, T3](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3]): Unliftable[Tuple3[T1, T2, T3]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: Nil) => Tuple3(v1, v2, v3) - } - implicit def unliftTuple4[T1, T2, T3, T4](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4]): Unliftable[Tuple4[T1, T2, T3, T4]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: Nil) => Tuple4(v1, v2, v3, v4) - } - implicit def unliftTuple5[T1, T2, T3, T4, T5](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5]): Unliftable[Tuple5[T1, T2, T3, T4, T5]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: Nil) => Tuple5(v1, v2, v3, v4, v5) - } - implicit def unliftTuple6[T1, T2, T3, T4, T5, T6](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6]): Unliftable[Tuple6[T1, T2, T3, T4, T5, T6]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: Nil) => Tuple6(v1, v2, v3, v4, v5, v6) - } - implicit def unliftTuple7[T1, T2, T3, T4, T5, T6, T7](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7]): Unliftable[Tuple7[T1, T2, T3, T4, T5, T6, T7]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: Nil) => Tuple7(v1, v2, v3, v4, v5, v6, v7) - } - implicit def unliftTuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8]): Unliftable[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: Nil) => Tuple8(v1, v2, v3, v4, v5, v6, v7, v8) - } - implicit def unliftTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9]): Unliftable[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: Nil) => Tuple9(v1, v2, v3, v4, v5, v6, v7, v8, v9) - } - implicit def unliftTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10]): Unliftable[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: Nil) => Tuple10(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) - } - implicit def unliftTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11]): Unliftable[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: Nil) => Tuple11(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) - } - implicit def unliftTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12]): Unliftable[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: Nil) => Tuple12(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) - } - implicit def unliftTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13]): Unliftable[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: Nil) => Tuple13(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) - } - implicit def unliftTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14]): Unliftable[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: Nil) => Tuple14(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) - } - implicit def unliftTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15]): Unliftable[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: Nil) => Tuple15(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) - } - implicit def unliftTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16]): Unliftable[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: Nil) => Tuple16(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) - } - implicit def unliftTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17]): Unliftable[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: Nil) => Tuple17(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) - } - implicit def unliftTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18]): Unliftable[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: Nil) => Tuple18(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) - } - implicit def unliftTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19]): Unliftable[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: Nil) => Tuple19(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) - } - implicit def unliftTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20]): Unliftable[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: Nil) => Tuple20(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) - } - implicit def unliftTuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20], UnliftT21: Unliftable[T21]): Unliftable[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: UnliftT21(v21) :: Nil) => Tuple21(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) - } - implicit def unliftTuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20], UnliftT21: Unliftable[T21], UnliftT22: Unliftable[T22]): Unliftable[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] = Unliftable { - case SyntacticTuple(UnliftT1(v1) :: UnliftT2(v2) :: UnliftT3(v3) :: UnliftT4(v4) :: UnliftT5(v5) :: UnliftT6(v6) :: UnliftT7(v7) :: UnliftT8(v8) :: UnliftT9(v9) :: UnliftT10(v10) :: UnliftT11(v11) :: UnliftT12(v12) :: UnliftT13(v13) :: UnliftT14(v14) :: UnliftT15(v15) :: UnliftT16(v16) :: UnliftT17(v17) :: UnliftT18(v18) :: UnliftT19(v19) :: UnliftT20(v20) :: UnliftT21(v21) :: UnliftT22(v22) :: Nil) => Tuple22(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) + // GENERATED CODE: + // scala> def m(n: Int) = s" implicit def unliftTuple$n[${(1 to n).map("T" + _).mkString(", ")}](implicit ${(1 to n).map(i => "UnliftT" + i + ": Unliftable[T" + i + "]").mkString(", ")}): Unliftable[(${(1 to n).map("T" + _).mkString(", ")})] =\n new TupleUnlifter[(${(1 to n).map("T" + _).mkString(", ")})](Array(${(1 to n).map("UnliftT" + _).mkString(", ")})) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple$n(${(1 to n).map(i => "rs(" + (i - 1) + ")").mkString(", ")})}"; println((2 to 22).map(m).mkString("\n\n")) + implicit def unliftTuple2[T1, T2](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2]): Unliftable[(T1, T2)] = + new TupleUnlifter[(T1, T2)](Array(UnliftT1, UnliftT2)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple2(rs(0), rs(1))} + + implicit def unliftTuple3[T1, T2, T3](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3]): Unliftable[(T1, T2, T3)] = + new TupleUnlifter[(T1, T2, T3)](Array(UnliftT1, UnliftT2, UnliftT3)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple3(rs(0), rs(1), rs(2))} + + implicit def unliftTuple4[T1, T2, T3, T4](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4]): Unliftable[(T1, T2, T3, T4)] = + new TupleUnlifter[(T1, T2, T3, T4)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple4(rs(0), rs(1), rs(2), rs(3))} + + implicit def unliftTuple5[T1, T2, T3, T4, T5](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5]): Unliftable[(T1, T2, T3, T4, T5)] = + new TupleUnlifter[(T1, T2, T3, T4, T5)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple5(rs(0), rs(1), rs(2), rs(3), rs(4))} + + implicit def unliftTuple6[T1, T2, T3, T4, T5, T6](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6]): Unliftable[(T1, T2, T3, T4, T5, T6)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple6(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5))} + + implicit def unliftTuple7[T1, T2, T3, T4, T5, T6, T7](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7]): Unliftable[(T1, T2, T3, T4, T5, T6, T7)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple7(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6))} + + implicit def unliftTuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple8(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7))} + + implicit def unliftTuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple9(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8))} + + implicit def unliftTuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple10(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9))} + + implicit def unliftTuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple11(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10))} + + implicit def unliftTuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple12(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11))} + + implicit def unliftTuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple13(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12))} + + implicit def unliftTuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple14(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13))} + + implicit def unliftTuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple15(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14))} + + implicit def unliftTuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple16(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15))} + + implicit def unliftTuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple17(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16))} + + implicit def unliftTuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17, UnliftT18)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple18(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16), rs(17))} + + implicit def unliftTuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17, UnliftT18, UnliftT19)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple19(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16), rs(17), rs(18))} + + implicit def unliftTuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17, UnliftT18, UnliftT19, UnliftT20)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple20(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16), rs(17), rs(18), rs(19))} + + implicit def unliftTuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20], UnliftT21: Unliftable[T21]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17, UnliftT18, UnliftT19, UnliftT20, UnliftT21)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple21(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16), rs(17), rs(18), rs(19), rs(20))} + + implicit def unliftTuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](implicit UnliftT1: Unliftable[T1], UnliftT2: Unliftable[T2], UnliftT3: Unliftable[T3], UnliftT4: Unliftable[T4], UnliftT5: Unliftable[T5], UnliftT6: Unliftable[T6], UnliftT7: Unliftable[T7], UnliftT8: Unliftable[T8], UnliftT9: Unliftable[T9], UnliftT10: Unliftable[T10], UnliftT11: Unliftable[T11], UnliftT12: Unliftable[T12], UnliftT13: Unliftable[T13], UnliftT14: Unliftable[T14], UnliftT15: Unliftable[T15], UnliftT16: Unliftable[T16], UnliftT17: Unliftable[T17], UnliftT18: Unliftable[T18], UnliftT19: Unliftable[T19], UnliftT20: Unliftable[T20], UnliftT21: Unliftable[T21], UnliftT22: Unliftable[T22]): Unliftable[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22)] = + new TupleUnlifter[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22)](Array(UnliftT1, UnliftT2, UnliftT3, UnliftT4, UnliftT5, UnliftT6, UnliftT7, UnliftT8, UnliftT9, UnliftT10, UnliftT11, UnliftT12, UnliftT13, UnliftT14, UnliftT15, UnliftT16, UnliftT17, UnliftT18, UnliftT19, UnliftT20, UnliftT21, UnliftT22)) { override protected def buildTuple(rs: Array[AnyRef]) = Tuple22(rs(0), rs(1), rs(2), rs(3), rs(4), rs(5), rs(6), rs(7), rs(8), rs(9), rs(10), rs(11), rs(12), rs(13), rs(14), rs(15), rs(16), rs(17), rs(18), rs(19), rs(20), rs(21))} + // END GENERATED CODE + + private abstract class TupleUnlifter[T](unlifts: Array[Unliftable[_]]) extends Unliftable[T] { + override def unapply(tree: Tree): Option[T] = SyntacticTuple.unapply(tree) match { + case Some(values) => + val arity = unlifts.length + val iterator = values.iterator + val rs = new Array[AnyRef](arity) + var i = 0 + while (i < arity) { + if (!iterator.hasNext) return None + unlifts(i).unapply(iterator.next()) match { + case Some(x) => + rs(i) = x.asInstanceOf[AnyRef] + case None => return None + } + i += 1 + } + if (iterator.hasNext) None + else Some(buildTuple(rs).asInstanceOf[T]) + case None => None + } + protected def buildTuple(rs: Array[AnyRef]): AnyRef } } diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala index 8c3a7507bc82..21931cb3c600 100644 --- a/src/reflect/scala/reflect/api/StandardNames.scala +++ b/src/reflect/scala/reflect/api/StandardNames.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index 65be68470eab..5f48540e21ce 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -55,7 +55,7 @@ package api * to obtain the symbol that represents their declaration. During the typechecking phase, the compiler looks up the * symbol based on the name and scope and sets the [[Trees.SymTreeApi.symbol `symbol` field]] of tree nodes. * - * For more information about `Symbol` usage and attached intricacies, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols]] + * For more information about `Symbol` usage and attached intricacies, see the [[https://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols]] * * @group ReflectionAPI * @@ -336,7 +336,7 @@ trait Symbols { self: Universe => * * This method always returns signatures in the most generic way possible, even if the underlying symbol is obtained from an * instantiation of a generic type. For example, signature - * of the method `def map[B](f: (A) ⇒ B): List[B]`, which refers to the type parameter `A` of the declaring class `List[A]`, + * of the method `def map[B](f: (A) => B): List[B]`, which refers to the type parameter `A` of the declaring class `List[A]`, * will always feature `A`, regardless of whether `map` is loaded from the `List[_]` or from `List[Int]`. To get a signature * with type parameters appropriately instantiated, one should use `infoIn`. * @@ -442,7 +442,9 @@ trait Symbols { self: Universe => def privateWithin: Symbol /** Does this symbol represent the definition of a package? - * Known issues: [[https://github.com/scala/bug/issues/6732]]. + * + * True for term symbols that are packages and for type symbols + * for which `isPackageClass` is true. * * @group Tests */ @@ -458,7 +460,7 @@ trait Symbols { self: Universe => /** Is this symbol static (i.e. with no outer instance)? * Q: When exactly is a sym marked as STATIC? * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep. - * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 + * https://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 * * @group Tests */ @@ -561,7 +563,7 @@ trait Symbols { self: Universe => * $SYMACCESSORS * @group API */ - trait TermSymbolApi extends SymbolApi { this: TermSymbol => + trait TermSymbolApi extends SymbolApi { this: TermSymbol with TermSymbolApi => /** Term symbols have their names of type `TermName`. */ final type NameType = TermName diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala index 056e1c8bcbcd..8777d8f60956 100644 --- a/src/reflect/scala/reflect/api/TreeCreator.scala +++ b/src/reflect/scala/reflect/api/TreeCreator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 0012646aa8fb..0a2ff1318e83 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,8 @@ package scala package reflect package api +import scala.annotation.{nowarn, tailrec} + /** * EXPERIMENTAL * @@ -24,7 +26,7 @@ package api * * In Scala reflection, APIs that produce or use `Tree`s are: * - * - '''Annotations''' which use trees to represent their arguments, exposed in [[scala.reflect.api.Annotations#scalaArgs Annotation.scalaArgs]]. + * - '''Annotations''' which use trees to represent their arguments, exposed in [[scala.reflect.api.Annotations.AnnotationApi#scalaArgs Annotation.scalaArgs]]. * - '''[[scala.reflect.api.Universe#reify reify]]''', a special method on [[scala.reflect.api.Universe]] that takes an expression and returns an AST which represents the expression. * - '''Macros and runtime compilation with toolboxes''' which both use trees as their program representation medium. * @@ -50,7 +52,7 @@ package api * print( showRaw( reify{5}.tree ) )` // prints Literal(Constant(5)) * }}} * - * For more information about `Tree`s, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, Types]]. + * For more information about `Tree`s, see the [[https://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, Types]]. * * @groupname Traversal Tree Traversal and Transformation * @groupprio Traversal 1 @@ -253,6 +255,9 @@ trait Trees { self: Universe => * For example, the `List` part of `Ident(TermName("List"))`. */ def name: Name + + /** Position of the subtree bearing the name. */ + def namePos: Position } /** A tree which references a symbol-carrying entity. @@ -395,7 +400,7 @@ trait Trees { self: Universe => def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)] - /** @see [[InternalApi.classDef]] */ + /** @see [[Internals.InternalApi.classDef]] */ @deprecated("use `internal.classDef` instead", "2.11.0") def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ClassDef = internal.classDef(sym, impl) } @@ -444,7 +449,7 @@ trait Trees { self: Universe => def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)] - /** @see [[InternalApi.moduleDef]] */ + /** @see [[Internals.InternalApi.moduleDef]] */ @deprecated("use `internal.moduleDef` instead", "2.11.0") def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ModuleDef = internal.moduleDef(sym, impl) } @@ -524,11 +529,11 @@ trait Trees { self: Universe => def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)] - /** @see [[InternalApi.valDef]] */ + /** @see [[Internals.InternalApi.valDef]] */ @deprecated("use `internal.valDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): ValDef = internal.valDef(sym, rhs) - /** @see [[InternalApi.valDef]] */ + /** @see [[Internals.InternalApi.valDef]] */ @deprecated("use `internal.valDef` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): ValDef = internal.valDef(sym) } @@ -575,23 +580,23 @@ trait Trees { self: Universe => def apply(mods: Modifiers, name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] - /** @see [[InternalApi.defDef]] */ + /** @see [[Internals.InternalApi.defDef]] */ @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, vparamss, rhs) - /** @see [[InternalApi.defDef]] */ + /** @see [[Internals.InternalApi.defDef]] */ @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, vparamss, rhs) - /** @see [[InternalApi.defDef]] */ + /** @see [[Internals.InternalApi.defDef]] */ @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, mods: Modifiers, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, rhs) - /** @see [[InternalApi.defDef]] */ + /** @see [[Internals.InternalApi.defDef]] */ @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs) - /** @see [[InternalApi.defDef]] */ + /** @see [[Internals.InternalApi.defDef]] */ @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, rhs: List[List[Symbol]] => Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs) } @@ -647,11 +652,11 @@ trait Trees { self: Universe => def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)] - /** @see [[InternalApi.typeDef]] */ + /** @see [[Internals.InternalApi.typeDef]] */ @deprecated("use `internal.typeDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): TypeDef = internal.typeDef(sym, rhs) - /** @see [[InternalApi.typeDef]] */ + /** @see [[Internals.InternalApi.typeDef]] */ @deprecated("use `internal.typeDef` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): TypeDef = internal.typeDef(sym) } @@ -715,7 +720,7 @@ trait Trees { self: Universe => def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)] - /** @see [[InternalApi.labelDef]] */ + /** @see [[Internals.InternalApi.labelDef]] */ @deprecated("use `internal.labelDef` instead", "2.11.0") def apply(sym: Symbol, params: List[Symbol], rhs: Tree)(implicit token: CompatToken): LabelDef = internal.labelDef(sym, params, rhs) } @@ -758,7 +763,7 @@ trait Trees { self: Universe => */ val ImportSelector: ImportSelectorExtractor - /** An extractor class to create and pattern match with syntax `ImportSelector(name:, namePos, rename, renamePos)`. + /** An extractor class to create and pattern match with syntax `ImportSelector(name, namePos, rename, renamePos)`. * This is not an AST node, it is used as a part of the `Import` node. * @group Extractors */ @@ -788,6 +793,18 @@ trait Trees { self: Universe => * Is equal to -1 is the position is unknown. */ def renamePos: Int + + /** Does the selector mask or hide a name? `import x.{y => _}` */ + def isMask: Boolean + + /** Does the selector introduce a specific name? `import a.b, x.{y => z}` */ + def isSpecific: Boolean + + /** Does the selector introduce a specific name by rename? `x.{y => z}` */ + def isRename: Boolean + + /** Is the selector a wildcard import that introduces all available names? `import x._` */ + def isWildcard: Boolean } /** Import clause @@ -812,11 +829,11 @@ trait Trees { self: Universe => * Selectors are a list of ImportSelectors, which conceptually are pairs of names (from, to). * The last (and maybe only name) may be a nme.WILDCARD. For instance: * - * import qual.{x, y => z, _} + * import qual.{w => _, x, y => z, _} * * Would be represented as: * - * Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null))) + * Import(qual, List(("w", WILDCARD), ("x", "x"), ("y", "z"), (WILDCARD, null))) * * The symbol of an `Import` is an import symbol @see Symbol.newImport. * It's used primarily as a marker to check that the import has been typechecked. @@ -1227,14 +1244,14 @@ trait Trees { self: Universe => * @group Trees * @template */ - type AssignOrNamedArg >: Null <: AssignOrNamedArgApi with TermTree + type NamedArg >: Null <: NamedArgApi with TermTree - /** The constructor/extractor for `AssignOrNamedArg` instances. + /** The constructor/extractor for `NamedArg` instances. * @group Extractors */ - val AssignOrNamedArg: AssignOrNamedArgExtractor + val NamedArg: NamedArgExtractor - /** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`. + /** An extractor class to create and pattern match with syntax `NamedArg(lhs, rhs)`. * This AST node corresponds to the following Scala code: * * {{{ @@ -1246,15 +1263,15 @@ trait Trees { self: Universe => * * @group Extractors */ - abstract class AssignOrNamedArgExtractor { - def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg - def unapply(assignOrNamedArg: AssignOrNamedArg): Option[(Tree, Tree)] + abstract class NamedArgExtractor { + def apply(lhs: Tree, rhs: Tree): NamedArg + def unapply(namedArg: NamedArg): Option[(Tree, Tree)] } /** The API that all assigns support * @group API */ - trait AssignOrNamedArgApi extends TermTreeApi { this: AssignOrNamedArg => + trait NamedArgApi extends TermTreeApi { this: NamedArg => /** The left-hand side of the expression. */ def lhs: Tree @@ -2173,7 +2190,7 @@ trait Trees { self: Universe => /** 0-1 argument list new, based on a symbol. * @group Factories */ - @deprecated("use q\"new ${sym.toType}(..$args)\" instead", "2.10.1") + @deprecated("use q\"new $"+"{sym.toType}(..$"+"args)\" instead", "2.10.1") def New(sym: Symbol, args: Tree*): Tree /** A factory method for `Apply` nodes. @@ -2339,10 +2356,10 @@ trait Trees { self: Universe => */ def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign - /** Creates a `AssignOrNamedArg` node from the given components, having a given `tree` as a prototype. + /** Creates a `NamedArg` node from the given components, having a given `tree` as a prototype. * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result. */ - def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg + def NamedArg(tree: Tree, lhs: Tree, rhs: Tree): NamedArg /** Creates a `If` node from the given components, having a given `tree` as a prototype. * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result. @@ -2480,7 +2497,7 @@ trait Trees { self: Universe => def traverseModifiers(mods: Modifiers): Unit = traverseAnnotations(mods.annotations) /** Traverses a single tree. */ - def traverse(tree: Tree): Unit = itraverse(this, tree) + def traverse(tree: Tree): Unit = itraverse(this, tree): @nowarn("cat=deprecation") def traversePattern(pat: Tree): Unit = traverse(pat) def traverseGuard(guard: Tree): Unit = traverse(guard) def traverseTypeAscription(tpt: Tree): Unit = traverse(tpt) @@ -2501,7 +2518,7 @@ trait Trees { self: Universe => def traverseParamss(vparamss: List[List[Tree]]): Unit = vparamss foreach traverseParams /** Traverses a list of trees with a given owner symbol. */ - def traverseStats(stats: List[Tree], exprOwner: Symbol) { + def traverseStats(stats: List[Tree], exprOwner: Symbol): Unit = { stats foreach (stat => if (exprOwner != currentOwner) atOwner(exprOwner)(traverse(stat)) else traverse(stat) @@ -2509,7 +2526,7 @@ trait Trees { self: Universe => } /** Performs a traversal with a given owner symbol. */ - def atOwner(owner: Symbol)(traverse: => Unit) { + def atOwner(owner: Symbol)(traverse: => Unit): Unit = { val prevOwner = currentOwner currentOwner = owner traverse @@ -2525,12 +2542,22 @@ trait Trees { self: Universe => * because pattern matching on abstract types we have here degrades performance. * @group Traversal */ + // FIXME: `Tree`/`TreeApi` does not contain a `traverse` method, so methods + // calling this (and not its override) are unable to follow the deprecation + // message. Once this is fixed, please fix callers of this method and remove + // the `@nowarn` annotation from them + @deprecated("Use Tree#traverse instead", "2.12.3") protected def itraverse(traverser: Traverser, tree: Tree): Unit = throw new MatchError(tree) /** Provides an extension hook for the traversal strategy. * Future-proofs against new node types. * @group Traversal */ + // FIXME: `Tree`/`TreeApi` does not contain a `traverse` method, so methods + // calling this (and not its override) are unable to follow the deprecation + // message. Once this is fixed, please fix callers of this method and remove + // the `@nowarn` annotation from them + @deprecated("Use Tree#traverse instead", "2.12.3") protected def xtraverse(traverser: Traverser, tree: Tree): Unit = throw new MatchError(tree) /** A class that implement a default tree transformation strategy: breadth-first component-wise cloning. @@ -2545,14 +2572,14 @@ trait Trees { self: Universe => /** The enclosing method of the currently transformed tree. */ protected def currentMethod = { - def enclosingMethod(sym: Symbol): Symbol = + @tailrec def enclosingMethod(sym: Symbol): Symbol = if (sym.isMethod || sym == NoSymbol) sym else enclosingMethod(sym.owner) enclosingMethod(currentOwner) } /** The enclosing class of the currently transformed tree. */ protected def currentClass = { - def enclosingClass(sym: Symbol): Symbol = + @tailrec def enclosingClass(sym: Symbol): Symbol = if (sym.isClass || sym == NoSymbol) sym else enclosingClass(sym.owner) enclosingClass(currentOwner) } @@ -2560,7 +2587,7 @@ trait Trees { self: Universe => // protected def currentPackage = currentOwner.enclosingTopLevelClass.owner /** Transforms a single tree. */ - def transform(tree: Tree): Tree = itransform(this, tree) + def transform(tree: Tree): Tree = itransform(this, tree): @nowarn("cat=deprecation") /** Transforms a list of trees. */ def transformTrees(trees: List[Tree]): List[Tree] = @@ -2582,9 +2609,10 @@ trait Trees { self: Universe => /** Transforms a list of lists of `ValDef` nodes. */ def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] = treess mapConserve (transformValDefs(_)) - /** Transforms a list of `CaseDef` nodes. */ + /** Transforms a list of `MemberDef` nodes. */ def transformMemberDefs(trees: List[MemberDef]): List[MemberDef] = trees mapConserve (tree => transform(tree).asInstanceOf[MemberDef]) + /** Transforms a list of `CaseDef` nodes. */ def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] = trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef]) /** Transforms a list of `Ident` nodes. */ @@ -2615,6 +2643,11 @@ trait Trees { self: Universe => * because pattern matching on abstract types we have here degrades performance. * @group Traversal */ + // FIXME: `Tree`/`TreeApi` does not contain a `transform` method, so methods + // calling this (and not its override) are unable to follow the deprecation + // message. Once this is fixed, please fix callers of this method and remove + // the `@nowarn` annotation from them + @deprecated("Use Tree#transform instead", since = "2.13.4") protected def itransform(transformer: Transformer, tree: Tree): Tree = throw new MatchError(tree) /** Provides an extension hook for the transformation strategy. diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala index 8718d6a285e5..7514d879d541 100644 --- a/src/reflect/scala/reflect/api/TypeCreator.scala +++ b/src/reflect/scala/reflect/api/TypeCreator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index 6e0f5427906f..45ee8ef70715 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package reflect package api import java.io.ObjectStreamException +import scala.annotation.nowarn /** * A `TypeTag[T]` encapsulates the runtime type representation of some type `T`. @@ -58,7 +59,7 @@ import java.io.ObjectStreamException * Each of these methods constructs a `TypeTag[T]` or `ClassTag[T]` for the given * type argument `T`. * - * === #2 Using an implicit parameter of type `TypeTag[T]`, `ClassTag[T]`, or `WeakTypeTag[T]` + * === #2 Using an implicit parameter of type `TypeTag[T]`, `ClassTag[T]`, or `WeakTypeTag[T]` === * * For example: * {{{ @@ -150,7 +151,7 @@ import java.io.ObjectStreamException * APIs to use `Tag`s. * * For more information about `TypeTag`s, see the - * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] + * [[https://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] * * @see [[scala.reflect.ClassTag]], [[scala.reflect.api.TypeTags#TypeTag]], [[scala.reflect.api.TypeTags#WeakTypeTag]] * @group ReflectionAPI @@ -170,7 +171,7 @@ trait TypeTags { self: Universe => * [[scala.reflect.api.TypeTags#TypeTag]] instead, which statically guarantees this property. * * For more information about `TypeTag`s, see the - * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] + * [[https://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] * * @see [[scala.reflect.api.TypeTags]] * @group TypeTags @@ -290,7 +291,7 @@ trait TypeTags { self: Universe => def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = { (mirror1: AnyRef) match { - case m: scala.reflect.runtime.JavaMirrors#JavaMirror + case m: scala.reflect.runtime.JavaMirrors#JavaMirror @nowarn("cat=deprecation") if cacheMaterializedTypeTags && tpec1.getClass.getName.contains("$typecreator") && tpec1.getClass.getDeclaredFields.length == 0 => // excludes type creators that splice in bound types. @@ -317,6 +318,8 @@ trait TypeTags { self: Universe => /* @group TypeTags */ // This class only exists to silence MIMA complaining about a binary incompatibility. // Only the top-level class (api.PredefTypeCreator) should be used. + @deprecated("This class only exists to silence MIMA complaining about a binary incompatibility.", since="forever") + @annotation.unused private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator { def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = { copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index 5f4fb72a0510..a01926c08d42 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,10 +34,24 @@ package api * In this example, a [[scala.reflect.api.Types#TypeRef]] is returned, which corresponds to the type constructor `List` * applied to the type argument `Int`. * + * In the case of a generic type, you can also combine it with other types + * using [[scala.reflect.api.Types#appliedType]]. For example: + * + * {{{ + * scala> val intType = typeOf[Int] + * intType: reflect.runtime.universe.Type = Int + * + * scala> val listType = typeOf[List[_]] + * listType: reflect.runtime.universe.Type = List[_] + * + * scala> appliedType(listType.typeConstructor, intType) + * res0: reflect.runtime.universe.Type = List[Int] + * }}} + * * ''Note:'' Method `typeOf` does not work for types with type parameters, such as `typeOf[List[A]]` where `A` is * a type parameter. In this case, use [[scala.reflect.api.TypeTags#weakTypeOf]] instead. * - * For other ways to instantiate types, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html corresponding section of the Reflection Guide]]. + * For other ways to instantiate types, see the [[https://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html corresponding section of the Reflection Guide]]. * * === Common Operations on Types === * @@ -51,16 +65,15 @@ package api * For example, to look up the `map` method of `List`, one can do: * * {{{ - * scala> typeOf[List[_]].member("map": TermName) + * scala> typeOf[List[_]].member(TermName("map")) * res1: reflect.runtime.universe.Symbol = method map * }}} * - * For more information about `Type`s, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, and Types]] + * For more information about `Type`s, see the [[https://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, and Types]] * * @groupname TypeCreators Types - Creation * @groupname TypeOps Types - Operations * @group ReflectionAPI - * * @contentDiagram hideNodes "*Api" */ trait Types { @@ -124,7 +137,7 @@ trait Types { * Unlike `members` this method doesn't returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `decls.sorted` to get an ordered list of members. */ def decls: MemberScope @@ -137,7 +150,7 @@ trait Types { * Unlike `declarations` this method also returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `members.sorted` to get an ordered list of members. */ def members: MemberScope @@ -294,7 +307,7 @@ trait Types { * * {{{ * scala> class C { def foo[T](x: T)(y: T) = ??? } - * defined class C + * class C * * scala> typeOf[C].member(TermName("foo")).asMethod * res0: reflect.runtime.universe.MethodSymbol = method foo @@ -324,10 +337,10 @@ trait Types { * * {{{ * scala> class C { - * | def foo[T](x: T)(y: T) = ??? - * | def bar: Int = ??? + * | def foo[T](x: T)(y: T) = ??? + * | def bar: Int = ??? * | } - * defined class C + * class C * * scala> typeOf[C].member(TermName("foo")).asMethod * res0: reflect.runtime.universe.MethodSymbol = method foo @@ -350,7 +363,7 @@ trait Types { * scala> typeOf[C].member(TermName("bar")).asMethod * res6: reflect.runtime.universe.MethodSymbol = method bar * - * scala> res6.info + * scala> res6.info // vanilla NullaryMethodType * res7: reflect.runtime.universe.Type = => scala.Int * * scala> res6.info.resultType @@ -387,7 +400,7 @@ trait Types { def map(f: Type => Type): Type /** Apply `f` to each part of this type, for side effects only */ - def foreach(f: Type => Unit) + def foreach(f: Type => Unit): Unit /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`, * or None if none exists. @@ -441,7 +454,7 @@ trait Types { abstract class ThisTypeExtractor { def unapply(tpe: ThisType): Option[Symbol] - /** @see [[InternalApi.thisType]] */ + /** @see [[Internals.InternalApi.thisType]] */ @deprecated("use `internal.thisType` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): Type = internal.thisType(sym) } @@ -480,7 +493,7 @@ trait Types { abstract class SingleTypeExtractor { def unapply(tpe: SingleType): Option[(Type, Symbol)] - /** @see [[InternalApi.singleType]] */ + /** @see [[Internals.InternalApi.singleType]] */ @deprecated("use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0") def apply(pre: Type, sym: Symbol)(implicit token: CompatToken): Type = internal.singleType(pre, sym) } @@ -497,7 +510,7 @@ trait Types { def sym: Symbol } /** The `SuperType` type is not directly written, but arises when `C.super` is used - * as a prefix in a `TypeRef` or `SingleType`. It's internal presentation is + * as a prefix in a `TypeRef` or `SingleType`. Its internal presentation is * {{{ * SuperType(thistpe, supertpe) * }}} @@ -514,13 +527,13 @@ trait Types { */ val SuperType: SuperTypeExtractor - /** An extractor class to create and pattern match with syntax `SingleType(thistpe, supertpe)` + /** An extractor class to create and pattern match with syntax `SuperType(thistpe, supertpe)` * @group Extractors */ abstract class SuperTypeExtractor { def unapply(tpe: SuperType): Option[(Type, Type)] - /** @see [[InternalApi.superType]] */ + /** @see [[Internals.InternalApi.superType]] */ @deprecated("use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0") def apply(thistpe: Type, supertpe: Type)(implicit token: CompatToken): Type = internal.superType(thistpe, supertpe) } @@ -540,12 +553,17 @@ trait Types { */ def supertpe: Type } - /** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant. - * The REPL expresses constant types like `Int(11)`. Here are some constants with their types: + + /** A `ConstantType` type cannot be expressed in user programs; it is inferred as the type of a constant. + * Here are some constants with their types and the internal string representation: * {{{ - * 1 ConstantType(Constant(1)) - * "abc" ConstantType(Constant("abc")) + * 1 ConstantType(Constant(1)) Int(1) + * "abc" ConstantType(Constant("abc")) String("abc") * }}} + * + * ConstantTypes denote values that may safely be constant folded during type checking. + * The `deconst` operation returns the equivalent type that will not be constant folded. + * * @template * @group Types */ @@ -563,7 +581,7 @@ trait Types { abstract class ConstantTypeExtractor { def unapply(tpe: ConstantType): Option[Constant] - /** @see [[InternalApi.constantType]] */ + /** @see [[Internals.InternalApi.constantType]] */ @deprecated("use `value.tpe` or `internal.constantType` instead", "2.11.0") def apply(value: Constant)(implicit token: CompatToken): ConstantType = internal.constantType(value) } @@ -606,7 +624,7 @@ trait Types { abstract class TypeRefExtractor { def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])] - /** @see [[InternalApi.typeRef]] */ + /** @see [[Internals.InternalApi.typeRef]] */ @deprecated("use `internal.typeRef` instead", "2.11.0") def apply(pre: Type, sym: Symbol, args: List[Type])(implicit token: CompatToken): Type = internal.typeRef(pre, sym, args) } @@ -666,11 +684,11 @@ trait Types { abstract class RefinedTypeExtractor { def unapply(tpe: RefinedType): Option[(List[Type], Scope)] - /** @see [[InternalApi.refinedType]] */ + /** @see [[Internals.InternalApi.refinedType]] */ @deprecated("use `internal.refinedType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls) - /** @see [[InternalApi.refinedType]] */ + /** @see [[Internals.InternalApi.refinedType]] */ @deprecated("use `internal.refinedType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope, clazz: Symbol)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls, clazz) } @@ -715,7 +733,7 @@ trait Types { abstract class ClassInfoTypeExtractor { def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)] - /** @see [[InternalApi.classInfoType]] */ + /** @see [[Internals.InternalApi.classInfoType]] */ @deprecated("use `internal.classInfoType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol)(implicit token: CompatToken): ClassInfoType = internal.classInfoType(parents, decls, typeSymbol) } @@ -764,7 +782,7 @@ trait Types { abstract class MethodTypeExtractor { def unapply(tpe: MethodType): Option[(List[Symbol], Type)] - /** @see [[InternalApi.methodType]] */ + /** @see [[Internals.InternalApi.methodType]] */ @deprecated("use `internal.methodType` instead", "2.11.0") def apply(params: List[Symbol], resultType: Type)(implicit token: CompatToken): MethodType = internal.methodType(params, resultType) } @@ -800,7 +818,7 @@ trait Types { abstract class NullaryMethodTypeExtractor { def unapply(tpe: NullaryMethodType): Option[(Type)] - /** @see [[InternalApi.nullaryMethodType]] */ + /** @see [[Internals.InternalApi.nullaryMethodType]] */ @deprecated("use `internal.nullaryMethodType` instead", "2.11.0") def apply(resultType: Type)(implicit token: CompatToken): NullaryMethodType = internal.nullaryMethodType(resultType) } @@ -834,7 +852,7 @@ trait Types { abstract class PolyTypeExtractor { def unapply(tpe: PolyType): Option[(List[Symbol], Type)] - /** @see [[InternalApi.polyType]] */ + /** @see [[Internals.InternalApi.polyType]] */ @deprecated("use `internal.polyType` instead", "2.11.0") def apply(typeParams: List[Symbol], resultType: Type)(implicit token: CompatToken): PolyType = internal.polyType(typeParams, resultType) } @@ -872,7 +890,7 @@ trait Types { abstract class ExistentialTypeExtractor { def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)] - /** @see [[InternalApi.existentialType]] */ + /** @see [[Internals.InternalApi.existentialType]] */ @deprecated("use `internal.existentialType` instead", "2.11.0") def apply(quantified: List[Symbol], underlying: Type)(implicit token: CompatToken): ExistentialType = internal.existentialType(quantified, underlying) } @@ -910,7 +928,7 @@ trait Types { abstract class AnnotatedTypeExtractor { def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type)] - /** @see [[InternalApi.annotatedType]] */ + /** @see [[Internals.InternalApi.annotatedType]] */ @deprecated("use `internal.annotatedType` instead", "2.11.0") def apply(annotations: List[Annotation], underlying: Type)(implicit token: CompatToken): AnnotatedType = internal.annotatedType(annotations, underlying) } @@ -954,7 +972,7 @@ trait Types { abstract class TypeBoundsExtractor { def unapply(tpe: TypeBounds): Option[(Type, Type)] - /** @see [[InternalApi.typeBounds]] */ + /** @see [[Internals.InternalApi.typeBounds]] */ @deprecated("use `internal.typeBounds` instead", "2.11.0") def apply(lo: Type, hi: Type)(implicit token: CompatToken): TypeBounds = internal.typeBounds(lo, hi) } @@ -1007,7 +1025,7 @@ trait Types { abstract class BoundedWildcardTypeExtractor { def unapply(tpe: BoundedWildcardType): Option[TypeBounds] - /** @see [[InternalApi.boundedWildcardType]] */ + /** @see [[Internals.InternalApi.boundedWildcardType]] */ @deprecated("use `internal.boundedWildcardType` instead", "2.11.0") def apply(bounds: TypeBounds)(implicit token: CompatToken): BoundedWildcardType = internal.boundedWildcardType(bounds) } @@ -1031,7 +1049,21 @@ trait Types { */ def glb(ts: List[Type]): Type - /** A creator for type applications + /** A creator for type applications. + * + * Useful to combine and create types out of generic ones. For example: + * + * {{{ + * scala> val boolType = typeOf[Boolean] + * boolType: reflect.runtime.universe.Type = Boolean + * + * scala> val optionType = typeOf[Option[_]] + * optionType: reflect.runtime.universe.Type = Option[_] + * + * scala> appliedType(optionType.typeConstructor, boolType) + * res0: reflect.runtime.universe.Type = Option[Boolean] + * }}} + * * @group TypeOps */ def appliedType(tycon: Type, args: List[Type]): Type diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala index 1ac9815cff9e..aa3ead9037e9 100644 --- a/src/reflect/scala/reflect/api/Universe.scala +++ b/src/reflect/scala/reflect/api/Universe.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -63,7 +63,7 @@ package api * } * }}} * - * For more information about `Universe`s, see the [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]] + * For more information about `Universe`s, see the [[https://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]] * * @groupprio Universe -1 * @group ReflectionAPI diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala index 9faa876411ea..e9d36dac83b7 100644 --- a/src/reflect/scala/reflect/api/package.scala +++ b/src/reflect/scala/reflect/api/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -33,7 +33,7 @@ import scala.reflect.api.{Universe => ApiUniverse} * - [[scala.reflect.api.Universe]] * * For more information about Scala Reflection, see the - * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] + * [[https://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * * @groupname ReflectionAPI Scala Reflection API * @groupprio API 9 @@ -57,4 +57,4 @@ package object api { // todo. once we have implicit macros for tag generation, we can remove these anchors private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = macro ??? private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = macro ??? -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 1284f7f331e4..70c5eb87e336 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -88,16 +88,16 @@ trait AnnotationCheckers { // Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary. /** The list of annotation checkers that have been registered */ - private var annotationCheckers: List[AnnotationChecker] = Nil + private[this] var annotationCheckers: List[AnnotationChecker] = Nil /** Register an annotation checker. Typically these are added by compiler plugins. */ - def addAnnotationChecker(checker: AnnotationChecker) { + def addAnnotationChecker(checker: AnnotationChecker): Unit = { if (!(annotationCheckers contains checker)) annotationCheckers = checker :: annotationCheckers } /** Remove all annotation checkers */ - def removeAllAnnotationCheckers() { + def removeAllAnnotationCheckers(): Unit = { annotationCheckers = Nil } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 3d4c0bd5f3b9..53d26444db25 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,9 +14,8 @@ package scala package reflect package internal -import scala.annotation.tailrec +import scala.annotation.{nowarn, tailrec} import scala.collection.immutable.ListMap -import scala.language.postfixOps /** AnnotationInfo and its helpers */ trait AnnotationInfos extends api.Annotations { self: SymbolTable => @@ -60,7 +59,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def removeAnnotation(cls: Symbol): Self = filterAnnotations(ann => !(ann matches cls)) - final def withAnnotation(annot: AnnotationInfo): Self = withAnnotations(List(annot)) + def withAnnotation(annot: AnnotationInfo): Self @tailrec private def dropOtherAnnotations(anns: List[AnnotationInfo], cls: Symbol): List[AnnotationInfo] = anns match { @@ -69,14 +68,18 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => } } - /** Arguments to classfile annotations (which are written to - * bytecode as java annotations) are either: - * + /** + * Arguments to constant annotations (Annotations defined in Java or extending + * ConstantAnnotation). Arguments are either: * - constants * - arrays of constants - * - or nested classfile annotations + * - or nested classfile annotations (only for Java annotation) + * + * TODO: rename to `ConstantAnnotationArg` */ + @nowarn("""cat=deprecation&origin=scala\.reflect\.api\.Annotations\.JavaArgumentApi""") sealed abstract class ClassfileAnnotArg extends Product with JavaArgumentApi + type JavaArgument = ClassfileAnnotArg implicit val JavaArgumentTag: ClassTag[ClassfileAnnotArg] = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg]) case object UnmappableAnnotArg extends ClassfileAnnotArg @@ -84,89 +87,21 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => * `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or * an instance of a Java enumeration value). */ - case class LiteralAnnotArg(const: Constant) - extends ClassfileAnnotArg with LiteralArgumentApi { - def value = const + case class LiteralAnnotArg(const: Constant) extends ClassfileAnnotArg { override def toString = const.escapedStringValue } - object LiteralAnnotArg extends LiteralArgumentExtractor /** Represents an array of classfile annotation arguments */ - case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) - extends ClassfileAnnotArg with ArrayArgumentApi { + case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) extends ClassfileAnnotArg { override def toString = args.mkString("[", ", ", "]") } - object ArrayAnnotArg extends ArrayArgumentExtractor /** Represents a nested classfile annotation */ - case class NestedAnnotArg(annInfo: AnnotationInfo) - extends ClassfileAnnotArg with NestedArgumentApi { + case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg { // The nested annotation should not have any Scala annotation arguments assert(annInfo.args.isEmpty, annInfo.args) - def annotation = annInfo override def toString = annInfo.toString } - object NestedAnnotArg extends NestedArgumentExtractor - - type JavaArgument = ClassfileAnnotArg - type LiteralArgument = LiteralAnnotArg - val LiteralArgument = LiteralAnnotArg - implicit val LiteralArgumentTag: ClassTag[LiteralAnnotArg] = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg]) - type ArrayArgument = ArrayAnnotArg - val ArrayArgument = ArrayAnnotArg - implicit val ArrayArgumentTag: ClassTag[ArrayAnnotArg] = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg]) - type NestedArgument = NestedAnnotArg - val NestedArgument = NestedAnnotArg - implicit val NestedArgumentTag: ClassTag[NestedAnnotArg] = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg]) - - /** A specific annotation argument that encodes an array of bytes as an - * array of `Long`. The type of the argument declared in the annotation - * must be `String`. This specialised class is used to encode Scala - * signatures for reasons of efficiency, both in term of class-file size - * and in term of compiler performance. - * Details about the storage format of pickles at the bytecode level (classfile annotations) can be found in SIP-10. - */ - case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg { - override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]") - lazy val sevenBitsMayBeZero: Array[Byte] = { - mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes)) - } - - /* In order to store a byte array (the pickle) using a bytecode-level annotation, - * the most compact representation is used (which happens to be string-constant and not byte array as one would expect). - * However, a String constant in a classfile annotation is limited to a maximum of 65535 characters. - * Method `fitsInOneString` tells us whether the pickle can be held by a single classfile-annotation of string-type. - * Otherwise an array of strings will be used. - */ - def fitsInOneString: Boolean = { - // due to escaping, a zero byte in a classfile-annotation of string-type takes actually two characters. - var i = 0 - var numZeros = 0 - while (i < sevenBitsMayBeZero.length) { - if (sevenBitsMayBeZero(i) == 0) numZeros += 1 - i += 1 - } - - (sevenBitsMayBeZero.length + numZeros) <= 65535 - } - - def sigAnnot: Type = - if (fitsInOneString) - definitions.ScalaSignatureAnnotation.tpe - else - definitions.ScalaLongSignatureAnnotation.tpe - - private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = { - var i = 0 - val srclen = src.length - while (i < srclen) { - val in = src(i) - src(i) = (if (in == 0x7f) 0.toByte else (in + 1).toByte) - i += 1 - } - src - } - } object AnnotationInfo { def marker(atp: Type): AnnotationInfo = @@ -181,7 +116,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo = new CompleteAnnotationInfo(atp, args, assocs) - def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])] = + def unapply(info: AnnotationInfo): Some[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])] = Some((info.atp, info.args, info.assocs)) def mkFilter(category: Symbol, defaultRetention: Boolean)(ann: AnnotationInfo) = @@ -201,12 +136,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => assert(args.isEmpty || assocs.isEmpty, atp) // necessary for reification, see Reifiers.scala for more info - private var orig: Tree = EmptyTree + private[this] var orig: Tree = EmptyTree def original = orig def setOriginal(t: Tree): this.type = { orig = t - this setPos t.pos - this + setPos(t.pos) } override def toString = completeAnnotationToString(this) @@ -215,7 +149,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => private[scala] def completeAnnotationToString(annInfo: AnnotationInfo) = { import annInfo._ val s_args = if (!args.isEmpty) args.mkString("(", ", ", ")") else "" - val s_assocs = if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else "" + val s_assocs = if (!assocs.isEmpty) assocs.map { case (x, y) => s"$x = $y" }.mkString("(", ", ", ")") else "" s"${atp}${s_args}${s_assocs}" } @@ -223,8 +157,9 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => * definitions) have to be lazy (#1782) */ class LazyAnnotationInfo(lazyInfo: => AnnotationInfo) extends AnnotationInfo { - private var forced = false - private lazy val forcedInfo = try lazyInfo finally forced = true + private[this] var _forced = false + protected def forced = _forced + private lazy val forcedInfo = try lazyInfo finally _forced = true def atp: Type = forcedInfo.atp def args: List[Tree] = forcedInfo.args @@ -233,39 +168,155 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def setOriginal(t: Tree): this.type = { forcedInfo.setOriginal(t); this } // We should always be able to print things without forcing them. - override def toString = if (forced) forcedInfo.toString else "@" + override def toString = if (_forced) forcedInfo.toString else "@" - override def pos: Position = if (forced) forcedInfo.pos else NoPosition + override def pos: Position = if (_forced) forcedInfo.pos else NoPosition override def completeInfo(): Unit = forcedInfo } final class ExtraLazyAnnotationInfo(sym: => Symbol, lazyInfo: => AnnotationInfo) extends LazyAnnotationInfo(lazyInfo) { private[this] lazy val typeSymbol = sym - override def symbol: Symbol = typeSymbol + // If `forced` to UnmappableAnnotation, ensure to return NoSymbol, otherwise `ann.matches(annCls)` can be incorrect + override def symbol: Symbol = if (forced) super.symbol else typeSymbol } - /** Typed information about an annotation. It can be attached to either - * a symbol or an annotated type. + /** + * Typed information about an annotation. It can be attached to either a symbol or an annotated type. + * + * `atp` is the type of the annotation class, the `symbol` method returns its [[Symbol]]. * - * Annotations are written to the classfile as Java annotations - * if `atp` conforms to `ClassfileAnnotation` (the classfile parser adds - * this interface to any Java annotation class). + * If `atp` conforms to `ConstantAnnotation` (which is true for annotations defined in Java), the annotation + * arguments are compile-time constants represented in `assocs`. Note that default arguments are *not* present + * in `assocs`. The `assocsWithDefaults` extends `assocs` with the default values from the annotation definition. + * Example: `class a(x: Int = 1) extends ConstantAnnotation`.F or `@ann()` without arguments `assocsWithDefaults` + * contains `x -> 1`. * - * Annotations are pickled (written to scala symtab attribute in the - * classfile) if `atp` inherits form `StaticAnnotation`. + * If `atp` is not a `ConstantAnnotation`, the annotation arguments are represented as type trees in `args`. + * These trees are not transformed by any phases following the type-checker. + * Note that default arguments are inserted into the `args` list. Example: `class a(x: Int = 1) extends Annotation`. + * For `@ann()` without arguments, `args` is `List(1)`. + * The `argIsDefault` method tells if an annotation argument is explicit or a default inserted by the compiler. * - * `args` stores arguments to Scala annotations, represented as typed - * trees. Note that these trees are not transformed by any phases - * following the type-checker. + * Annotations are written to the classfile as Java annotations if `atp` conforms to `ClassfileAnnotation` + * (the classfile parser adds this interface to any Java annotation class). * - * `assocs` stores arguments to classfile annotations as name-value pairs. + * Annotations are pickled (written to scala symtab attribute in the classfile) if `atp` inherits from + * `StaticAnnotation`, such annotations are visible under separate compilation. */ abstract class AnnotationInfo extends AnnotationApi { def atp: Type def args: List[Tree] def assocs: List[(Name, ClassfileAnnotArg)] + /** See [[AnnotationInfo]] */ + def argIsDefault(arg: Tree): Boolean = arg match { + case NamedArg(_, a) => argIsDefault(a) + case treeInfo.Applied(fun, _, _) if fun.symbol != null && fun.symbol.isDefaultGetter => + // if the annotation class was compiled with an old compiler, parameters with defaults don't have a + // `@defaultArg` meta-annotation and the typer inserts a call to the default getter + true + case _ => + // When inserting defaults, the tpe of the argument tree is tagged with the `@defaultArg` annotation. + arg.tpe.hasAnnotation(DefaultArgAttr) + } + + /** See [[AnnotationInfo]]. Note: for Java-defined annotations, this method returns `Nil`. */ + def assocsWithDefaults: List[(Name, ClassfileAnnotArg)] = { + val explicit = assocs.toMap + // ConstantAnnotations cannot have auxiliary constructors, nor multiple parameter lists + val params = symbol.primaryConstructor.paramss.headOption.getOrElse(Nil) + params.flatMap(p => { + val arg = explicit.get(p.name).orElse( + p.getAnnotation(DefaultArgAttr).flatMap(_.args.headOption).collect { + case Literal(c) => LiteralAnnotArg(c) + }) + arg.map(p.name -> _) + }) + } + + /** + * The `assocs` of this annotation passed to the `parent` class. + * + * `parent` needs to be either the annotation class itself or its direct superclass. + * + * If `parent` is the superclass, this method returns the arguments passed at the annotation definition. + * + * Example:given `class nodep extends nowarn("cat=deprecation")`, the call `assocsForSuper(NowarnClassSymbol)` + * returns `List('value' -> "cat=deprecation")`. + */ + def assocsForSuper(parent: Symbol): List[(Name, ClassfileAnnotArg)] = + if (symbol == parent) assocs + else if (symbol.superClass == parent) { + val superConstArgs: Map[String, ClassfileAnnotArg] = symbol.annotations.filter(_.matches(SuperArgAttr)).flatMap(_.args match { + case List(Literal(param), Literal(value)) => Some(param.stringValue -> LiteralAnnotArg(value)) + case _ => None + }).toMap + parent.primaryConstructor.paramss.headOption.getOrElse(Nil).flatMap(p => superConstArgs.get(p.name.toString).map(p.name -> _)) + } else Nil + + + /** + * The `args` of this annotation passed to the `parent` class. + * + * `parent` needs to be either the annotation class itself or its direct superclass. + * + * If `parent` is the superclass, this method returns the arguments passed at the annotation definition. Forwarded + * arguments are supported. + * + * Example: + * + * {{{ + * class ann(x: Int = 1, y: Int = 2) extends Annotation + * class sub(z: Int) extends ann(y = z) + * @sub(3) def f = 1 + * }}} + * + * The call `argsForSuper(symbolOfAnn)` returns `List(1, 3)`. The argument `1` is the default used in the super + * call, the value `3` is a forwarded argument. + */ + def argsForSuper(parent: Symbol): List[Tree] = + if (symbol == parent) args + else if (symbol.superClass == parent) { + val subArgs = symbol.primaryConstructor.paramss.headOption.getOrElse(Nil).map(_.name.toString).zip(args).toMap + val superArgs: Map[String, Tree] = symbol.annotations.filter(_.matches(SuperArgAttr)).flatMap(_.args match { + case List(Literal(param), value) => Some(param.stringValue -> value) + case _ => None + }).toMap + val superFwdArgs: Map[String, String] = symbol.annotations.filter(_.matches(SuperFwdArgAttr)).flatMap(_.args match { + case List(Literal(param), Literal(subParam)) => Some(param.stringValue -> subParam.stringValue) + case _ => None + }).toMap + val params = parent.primaryConstructor.paramss.headOption.getOrElse(Nil) + val res = params.flatMap(p => { + val n = p.name.toString + superArgs.get(n).orElse(subArgs.get(superFwdArgs.getOrElse(n, ""))) + }) + if (params.lengthCompare(res) == 0) res else Nil + } else Nil + + /** + * Obtain the constructor symbol that was used for this annotation. + * If the annotation does not have secondary constructors, use `symbol.primaryConstructor` instead. + * + * To use this method in a compiler plugin, invoke it as follows: + * `val sym = annotationInfo.constructorSymbol(tree => global.exitingTyper(global.typer.typed(tree)))` + * + * Annotation arguments can be paired with the corresponding annotation parameters: + * `sym.paramss.head.zip(annotationInfo.args): List[(Symbol, Tree)]` + * + * Background: Before type checking, `@ann(x)` is represented as a tree `Apply(Select(New(ann), ), x)`. + * That tree is type checked as such and the resulting typed tree is used to build the `AnnotationInfo`. + * The information which constructor symbol was used is not represented in the `AnnoationInfo`. + * Adding it would be difficult because it affects the pickle format. + */ + def constructorSymbol(typer: Tree => Tree): Symbol = { + typer(New(atp, args: _*)) match { + case Apply(constr @ Select(New(_), nme.CONSTRUCTOR), _) => constr.symbol + case _ => atp.typeSymbol.primaryConstructor + } + } + def tpe = atp def scalaArgs = args def javaArgs = ListMap(assocs: _*) @@ -277,7 +328,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => // see annotationArgRewriter lazy val isTrivial = atp.isTrivial && !hasArgWhich(_.isInstanceOf[This]) - private var rawpos: Position = NoPosition + private[this] var rawpos: Position = NoPosition def pos = rawpos def setPos(pos: Position): this.type = { // Syncnote: Setpos inaccessible to reflection, so no sync in rawpos necessary. rawpos = pos @@ -331,7 +382,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => /** Check whether the type or any of the arguments are erroneous */ def isErroneous = atp.isErroneous || args.exists(_.isErroneous) - def isStatic = symbol.isNonBottomSubClass(StaticAnnotationClass) && symbol != NowarnClass + final def isStatic = symbol.isStaticAnnotation /** Check whether any of the arguments mention a symbol */ def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym) @@ -339,7 +390,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue) def intArg(index: Int) = constantAtIndex(index) map (_.intValue) def booleanArg(index: Int) = constantAtIndex(index) map (_.booleanValue) - def symbolArg(index: Int) = argAtIndex(index) collect { + def symbolArg(index: Int) = argAtIndex(args, index) collect { case Apply(fun, Literal(str) :: Nil) if fun.symbol == definitions.Symbol_apply => newTermName(str.stringValue) } @@ -358,12 +409,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case Annotated(_, t) => lit(t) case _ => None } - - argAtIndex(index).flatMap(lit) + if (args.nonEmpty) argAtIndex(args, index).flatMap(lit) + else if (assocs.nonEmpty) argAtIndex(assocs, index) collect { + case (_, LiteralAnnotArg(const)) => const + } else None } - def argAtIndex(index: Int): Option[Tree] = - if (index < args.size) Some(args(index)) else None + def argAtIndex[T](l: List[T], index: Int): Option[T] = + if (index < l.size) Some(l(index)) else None def transformArgs(f: List[Tree] => List[Tree]): AnnotationInfo = new CompleteAnnotationInfo(atp, f(args), assocs) @@ -379,7 +432,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => object Annotation extends AnnotationExtractor { def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, ClassfileAnnotArg]): Annotation = AnnotationInfo(tpe, scalaArgs, javaArgs.toList) - def unapply(annotation: Annotation): Option[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] = + def unapply(annotation: Annotation): Some[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] = Some((annotation.tpe, annotation.scalaArgs, annotation.javaArgs)) } implicit val AnnotationTag: ClassTag[AnnotationInfo] = ClassTag[AnnotationInfo](classOf[AnnotationInfo]) @@ -391,7 +444,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => val tpe = if (const.tag == UnitTag) UnitTpe else ConstantType(const) Literal(const) setType tpe case ArrayAnnotArg(jargs) => - val args = jargs map reverseEngineerArg + val args = jargs.map(reverseEngineerArg _) // TODO: I think it would be a good idea to typecheck Java annotations using a more traditional algorithm // sure, we can't typecheck them as is using the `new jann(foo = bar)` syntax (because jann is going to be an @interface) // however we can do better than `typedAnnotation` by desugaring the aforementioned expression to @@ -400,17 +453,17 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => // in that case we're going to have correctly typed Array.apply calls, however that's 2.12 territory // and for 2.11 exposing an untyped call to ArrayModule should suffice Apply(Ident(ArrayModule), args.toList) - case NestedAnnotArg(ann: Annotation) => - annotationToTree(ann) + case NestedAnnotArg(jarg: Annotation) => + annotationToTree(jarg) case _ => EmptyTree } def reverseEngineerArgs(jargs: List[(Name, ClassfileAnnotArg)]): List[Tree] = jargs match { - case (name, jarg) :: rest => AssignOrNamedArg(Ident(name), reverseEngineerArg(jarg)) :: reverseEngineerArgs(rest) + case (name, jarg) :: rest => NamedArg(Ident(name), reverseEngineerArg(jarg)) :: reverseEngineerArgs(rest) case Nil => Nil } - if (ann.javaArgs.isEmpty) ann.scalaArgs - else reverseEngineerArgs(ann.javaArgs.toList) + if (ann.assocs.isEmpty) ann.args + else reverseEngineerArgs(ann.assocs) } // TODO: at the moment, constructor selection is unattributed, because AnnotationInfos lack necessary information @@ -424,18 +477,19 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => def encodeJavaArg(arg: Tree): ClassfileAnnotArg = arg match { case Literal(const) => LiteralAnnotArg(const) - case Apply(ArrayModule, args) => ArrayAnnotArg(args map encodeJavaArg toArray) + case Apply(ArrayModule, args) => ArrayAnnotArg(args.map(encodeJavaArg).toArray) case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => NestedAnnotArg(treeToAnnotation(arg)) case _ => throw new Exception(s"unexpected java argument shape $arg: literals, arrays and nested annotations are supported") } + // TODO: Java annotations with a single `value` parameter can be created without a named argument. def encodeJavaArgs(args: List[Tree]): List[(Name, ClassfileAnnotArg)] = args match { - case AssignOrNamedArg(Ident(name), arg) :: rest => (name, encodeJavaArg(arg)) :: encodeJavaArgs(rest) - case arg :: rest => throw new Exception(s"unexpected java argument shape $arg: only AssignOrNamedArg trees are supported") + case NamedArg(Ident(name), arg) :: rest => (name, encodeJavaArg(arg)) :: encodeJavaArgs(rest) + case arg :: rest => throw new Exception(s"unexpected java argument shape $arg: only NamedArg trees are supported") case Nil => Nil } val atp = tpt.tpe if (atp != null && (atp.typeSymbol isNonBottomSubClass StaticAnnotationClass)) AnnotationInfo(atp, args, Nil) - else if (atp != null && (atp.typeSymbol isNonBottomSubClass ClassfileAnnotationClass)) AnnotationInfo(atp, Nil, encodeJavaArgs(args)) + else if (atp != null && (atp.typeSymbol.isJavaDefined || atp.typeSymbol.isNonBottomSubClass(ConstantAnnotationClass))) AnnotationInfo(atp, Nil, encodeJavaArgs(args)) else throw new Exception(s"unexpected annotation type $atp: only subclasses of StaticAnnotation and ClassfileAnnotation are supported") case _ => throw new Exception("""unexpected tree shape: only q"new $annType(..$args)" is supported""") @@ -451,19 +505,12 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => * as well as “new-style” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Type] = { - ann match { - case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => - None - // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) - case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) => - Some(tpe) - // new-style: @throws[Exception], @throws[Exception]("cause") - case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) => - Some(arg) - case AnnotationInfo(TypeRef(_, _, Nil), _, _) => - Some(ThrowableTpe) - } + def unapply(ann: AnnotationInfo): Option[Type] = ann match { + case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None + case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) => Some(tpe) // old-style + case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) => Some(arg) // new-style + case AnnotationInfo(TypeRef(_, _, Nil), _, _) => Some(ThrowableTpe) + case _ => None } } } diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 8f373391bce2..d36d085830d0 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -69,7 +69,7 @@ trait BaseTypeSeqs { // (while NoType is in there to indicate a cycle in this BTS, during the execution of // the mergePrefixAndArgs below, the elems get copied without the pending map, // so that NoType's are seen instead of the original type --> spurious compile error) - private val pending = new mutable.BitSet(length) + private[this] val pending = new mutable.BitSet(length) /** The type at i'th position in this sequence; lazy types are returned evaluated. */ def apply(i: Int): Type = @@ -230,23 +230,21 @@ trait BaseTypeSeqs { i += 1 } var minTypes: List[Type] = List() - def alreadyInMinTypes(tp: Type): Boolean = { - @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match { - case Nil => false - case x :: xs => (tp =:= x) || loop(xs) + def updateInMinTypes(tp: Type): Unit = { + def loop(tps: List[Type]): Boolean = tps match { + case x :: tps => (tp =:= x) || loop(tps) + case _ => false } - loop(minTypes) + if (!loop(minTypes)) + minTypes ::= tp } i = 0 while (i < nparents) { if (nextTypeSymbol(i) == minSym) { nextRawElem(i) match { - case RefinedType(variants, decls) => - for (tp <- variants) - if (!alreadyInMinTypes(tp)) minTypes ::= tp - case tp => - if (!alreadyInMinTypes(tp)) minTypes ::= tp + case RefinedType(variants, _) => variants.foreach(updateInMinTypes) + case ntp => updateInMinTypes(ntp) } index(i) = index(i) + 1 } @@ -257,7 +255,8 @@ trait BaseTypeSeqs { } } val elems = new Array[Type](btsSize) - buf.copyToArray(elems, 0) + @annotation.unused val copied = buf.copyToArray(elems, 0) + //assert(copied == btsSize, "array copied") // Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG newBaseTypeSeq(parents, elems) } diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala index 42734006fa68..66869a75eec3 100644 --- a/src/reflect/scala/reflect/internal/CapturedVariables.scala +++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala index 8efe3be13b89..311b176bf04b 100644 --- a/src/reflect/scala/reflect/internal/Chars.scala +++ b/src/reflect/scala/reflect/internal/Chars.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,13 +14,11 @@ package scala package reflect package internal -import scala.language.postfixOps - import scala.annotation.switch -import java.lang.{ Character => JCharacter } /** Contains constants and classifier methods for characters */ trait Chars { + import Chars.CodePoint // Be very careful touching these. // Apparently trivial changes to the way you write these constants // will cause Scanners.scala to go from a nice efficient switch to @@ -49,8 +47,7 @@ trait Chars { /** Convert a character to a backslash-u escape */ def char2uescape(c: Char): String = { - @inline def hexChar(ch: Int): Char = - ( if (ch < 10) '0' else 'A' - 10 ) + ch toChar + @inline def hexChar(ch: Int): Char = ((if (ch < 10) '0' else 'A' - 10) + ch).toChar char2uescapeArray(2) = hexChar((c >> 12) ) char2uescapeArray(3) = hexChar((c >> 8) % 16) @@ -75,28 +72,46 @@ trait Chars { '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' /** Can character start an alphanumeric Scala identifier? */ - def isIdentifierStart(c: Char): Boolean = - (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: Char): Boolean = (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: CodePoint): Boolean = (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) /** Can character form part of an alphanumeric Scala identifier? */ - def isIdentifierPart(c: Char) = - (c == '$') || Character.isUnicodeIdentifierPart(c) + def isIdentifierPart(c: Char) = (c == '$') || Character.isUnicodeIdentifierPart(c) + + def isIdentifierPart(c: CodePoint) = (c == '$') || Character.isUnicodeIdentifierPart(c) /** Is character a math or other symbol in Unicode? */ def isSpecial(c: Char) = { val chtp = Character.getType(c) chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt } - - private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_' - private final val letterGroups = { - import JCharacter._ - Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER) + def isSpecial(codePoint: CodePoint) = { + val chtp = Character.getType(codePoint) + chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt } - def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch) + + // used for precedence + import Character.{LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER} + def isScalaLetter(c: Char): Boolean = + Character.getType(c) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } + def isScalaLetter(c: CodePoint): Boolean = + Character.getType(c) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } /** Can character form part of a Scala operator name? */ - def isOperatorPart(c : Char) : Boolean = (c: @switch) match { + def isOperatorPart(c: Char): Boolean = (c: @switch) match { + case '~' | '!' | '@' | '#' | '%' | + '^' | '*' | '+' | '-' | '<' | + '>' | '?' | ':' | '=' | '&' | + '|' | '/' | '\\' => true + case c => isSpecial(c) + } + def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -111,4 +126,6 @@ trait Chars { } } -object Chars extends Chars { } +object Chars extends Chars { + type CodePoint = Int +} diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index 9bfc2012f767..12709fbc34b1 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,7 +21,7 @@ object ClassfileConstants { final val JAVA_MAJOR_VERSION = 45 final val JAVA_MINOR_VERSION = 3 - /** (see http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.1) + /** (see https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.1) * * If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also * be set (ch. 2.13.1). @@ -352,9 +352,9 @@ object ClassfileConstants { case JAVA_ACC_FINAL => FINAL case JAVA_ACC_SYNTHETIC => SYNTHETIC | ARTIFACT // maybe should be just artifact? case JAVA_ACC_STATIC => STATIC - case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED - case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT - case JAVA_ACC_ENUM => JAVA_ENUM + case JAVA_ACC_ABSTRACT => if (isClass) ABSTRACT else DEFERRED + case JAVA_ACC_INTERFACE => TRAIT | INTERFACE | ABSTRACT + case JAVA_ACC_ENUM => if (isClass) JAVA_ENUM | SEALED else JAVA_ENUM case JAVA_ACC_ANNOTATION => JAVA_ANNOTATION case _ => 0L } diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala index c7883ae7fdcf..1e95a1b15f87 100644 --- a/src/reflect/scala/reflect/internal/Constants.scala +++ b/src/reflect/scala/reflect/internal/Constants.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,48 +14,48 @@ package scala package reflect package internal -import scala.annotation.switch +import scala.annotation.{nowarn, switch} trait Constants extends api.Constants { self: SymbolTable => import definitions._ - final val NoTag = 0 - final val UnitTag = 1 - final val BooleanTag = 2 - final val ByteTag = 3 - final val ShortTag = 4 - final val CharTag = 5 - final val IntTag = 6 - final val LongTag = 7 - final val FloatTag = 8 - final val DoubleTag = 9 - final val StringTag = 10 - final val NullTag = 11 - final val ClazzTag = 12 + final val NoTag = 0 + final val UnitTag = 1 + final val BooleanTag = 2 + final val ByteTag = 3 + final val ShortTag = 4 + final val CharTag = 5 + final val IntTag = 6 + final val LongTag = 7 + final val FloatTag = 8 + final val DoubleTag = 9 + final val StringTag = 10 + final val NullTag = 11 + final val ClazzTag = 12 // For supporting java enumerations inside java annotations (see ClassfileParser) - final val EnumTag = 13 + final val EnumTag = 13 case class Constant(value: Any) extends ConstantApi { import java.lang.Double.doubleToRawLongBits import java.lang.Float.floatToRawIntBits val tag: Int = value match { - case null => NullTag - case x: Unit => UnitTag - case x: Boolean => BooleanTag - case x: Byte => ByteTag - case x: Short => ShortTag - case x: Int => IntTag - case x: Long => LongTag - case x: Float => FloatTag - case x: Double => DoubleTag - case x: String => StringTag - case x: Char => CharTag - case x: Type => ClazzTag - case x: Symbol => EnumTag - case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass) + case null => NullTag + case x: Unit => UnitTag + case x: Boolean => BooleanTag + case x: Byte => ByteTag + case x: Short => ShortTag + case x: Int => IntTag + case x: Long => LongTag + case x: Float => FloatTag + case x: Double => DoubleTag + case x: String => StringTag + case x: Char => CharTag + case x: Type => ClazzTag + case x: Symbol => EnumTag + case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass) } def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue @@ -63,9 +63,11 @@ trait Constants extends api.Constants { def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag - def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag + def isFloatRepresentable: Boolean = ByteTag <= tag && tag <= FloatTag && (tag != IntTag || intValue == intValue.toFloat.toInt) && (tag != LongTag || longValue == longValue.toFloat.toLong) + def isDoubleRepresentable: Boolean = ByteTag <= tag && tag <= DoubleTag && (tag != LongTag || longValue == longValue.toDouble.toLong) def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag def isNonUnitAnyVal = BooleanTag <= tag && tag <= DoubleTag + def isSuitableLiteralType = BooleanTag <= tag && tag <= NullTag def isAnyVal = UnitTag <= tag && tag <= DoubleTag def tpe: Type = tag match { @@ -108,7 +110,8 @@ trait Constants extends api.Constants { case DoubleTag => doubleToRawLongBits(value.asInstanceOf[Double]) == doubleToRawLongBits(that.value.asInstanceOf[Double]) case _ => - this.value.equals(that.value) + // we do not want cooperative equality for determining if constants are equal + this.value.equals(that.value): @nowarn("cat=other-non-cooperative-equals") } } case _ => false @@ -217,9 +220,9 @@ trait Constants extends api.Constants { Constant(intValue) else if (target == LongClass && isLongRange) Constant(longValue) - else if (target == FloatClass && isFloatRange) + else if (target == FloatClass && isFloatRepresentable) Constant(floatValue) - else if (target == DoubleClass && isNumeric) + else if (target == DoubleClass && isDoubleRepresentable) Constant(doubleValue) else null @@ -230,23 +233,63 @@ trait Constants extends api.Constants { else if (tag == ClazzTag) signature(typeValue) else value.toString() - def escapedChar(ch: Char): String = (ch: @switch) match { - case '\b' => "\\b" - case '\t' => "\\t" - case '\n' => "\\n" - case '\f' => "\\f" - case '\r' => "\\r" - case '"' => "\\\"" - case '\'' => "\\\'" - case '\\' => "\\\\" - case _ => if (ch.isControl) "\\u%04X".format(ch.toInt) else String.valueOf(ch) - } - def escapedStringValue: String = { - def escape(text: String): String = text flatMap escapedChar + import java.lang.StringBuilder + def requiresFormat(c: Char): Boolean = + (c: @switch) match { + case '\b' | '\t' | '\n' | '\f' | '\r' | '"' | '\'' | '\\' => true + case c => c.isControl + } + def escapedChar(b: StringBuilder, c: Char): Unit = { + def quadNibble(b: StringBuilder, x: Int, i: Int): Unit = + if (i < 4) { + quadNibble(b, x >> 4, i + 1) + val n = x & 0xF + val c = if (n < 10) '0' + n else 'A' + (n - 10) + b.append(c.toChar) + } + val replace = (c: @switch) match { + case '\b' => "\\b" + case '\t' => "\\t" + case '\n' => "\\n" + case '\f' => "\\f" + case '\r' => "\\r" + case '"' => "\\\"" + case '\'' => "\\\'" + case '\\' => "\\\\" + case c => + if (c.isControl) { + b.append("\\u") + quadNibble(b, c.toInt, 0) + } + else b.append(c) + return + } + b.append(replace) + } + def escape(text: String) = { + def mustBuild: Boolean = { + var i = 0 + while (i < text.length) { + if (requiresFormat(text.charAt(i))) return true + i += 1 + } + false + } + if (mustBuild) { + val b = new StringBuilder(text.length + 16).append('"') + var i = 0 + while (i < text.length) { + escapedChar(b, text.charAt(i)) + i += 1 + } + b.append('"').toString + } + else "\"" + text + "\"" + } tag match { case NullTag => "null" - case StringTag => "\"" + escape(stringValue) + "\"" + case StringTag => escape(stringValue) case ClazzTag => def show(tpe: Type) = "classOf[" + signature(tpe) + "]" typeValue match { @@ -261,10 +304,17 @@ trait Constants extends api.Constants { show(clazz.tpe_*) case _ => show(typeValue) } - case CharTag => "'" + escapedChar(charValue) + "'" - case LongTag => longValue.toString() + "L" - case EnumTag => symbolValue.name.toString() - case _ => String.valueOf(value) + case CharTag => + val c = charValue + if (requiresFormat(c)) { + val b = new StringBuilder().append('\'') + escapedChar(b, c) + b.append('\'').toString + } + else "'" + c + "'" + case LongTag => longValue.toString() + "L" + case EnumTag => symbolValue.name.toString() + case _ => String.valueOf(value) } } def typeValue: Type = value.asInstanceOf[Type] @@ -277,7 +327,7 @@ trait Constants extends api.Constants { h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. val valueHash = tag match { case NullTag => 0 - // We could just use value.hashCode here, at the cost of a collition between different NaNs + // We could just use value.hashCode here, at the cost of a collision between different NaNs case FloatTag => java.lang.Integer.hashCode(floatToRawIntBits(value.asInstanceOf[Float])) case DoubleTag => java.lang.Long.hashCode(doubleToRawLongBits(value.asInstanceOf[Double])) case _ => value.hashCode() diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index ae9d497bbb83..5ed8fa9b4bcc 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,17 +14,17 @@ package scala package reflect package internal -import scala.language.postfixOps - -import scala.annotation.meta +import scala.annotation.{meta, migration, nowarn, tailrec} import scala.collection.mutable import Flags._ import scala.reflect.api.{Universe => ApiUniverse} +import PartialFunction.cond +import util.StringContextStripMarginOps trait Definitions extends api.StandardDefinitions { self: SymbolTable => - import rootMirror.{getModuleByName, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageIfDefined, getPackageObjectIfDefined, requiredClass, requiredModule} + import rootMirror.{getModuleByName, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageIfDefined, getPackageObjectIfDefined, requiredClass, requiredModule} object definitions extends DefinitionsClass @@ -37,16 +37,18 @@ trait Definitions extends api.StandardDefinitions { private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = { val clazz = owner.newClassSymbol(name, NoPosition, flags) - clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) markAllCompleted + clazz.setInfoAndEnter(ClassInfoType(parents, newScope, clazz)).markAllCompleted() } - private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = { + private def newMethod(owner: Symbol, name: TermName, formals: List[Type], mkMeth: List[TermSymbol] => Type, flags: Long): MethodSymbol = { val msym = owner.newMethod(name.encode, NoPosition, flags) val params = msym.newSyntheticValueParams(formals) - val info = if (owner.isJavaDefined) JavaMethodType(params, restpe) else MethodType(params, restpe) - msym setInfo info markAllCompleted + val info = mkMeth(params) + msym.setInfo(info).markAllCompleted() } private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = - owner.info.decls enter newMethod(owner, name, formals, restpe, flags) + owner.info.decls enter newMethod(owner, name, formals, MethodType(_, restpe), flags) + private def enterNewNullaryMethod(owner: Symbol, name: TermName, restpe: Type, flags: Long): MethodSymbol = + owner.info.decls enter newMethod(owner, name, Nil, _ => NullaryMethodType(restpe), flags) // the scala value classes trait ValueClassDefinitions { @@ -54,7 +56,7 @@ trait Definitions extends api.StandardDefinitions { import ClassfileConstants._ - private val nameToWeight = Map[Name, Int]( + private[this] val nameToWeight = Map[Name, Int]( tpnme.Byte -> 2, tpnme.Char -> 3, tpnme.Short -> 4, @@ -64,7 +66,7 @@ trait Definitions extends api.StandardDefinitions { tpnme.Double -> 96 ) - private val nameToTag = Map[Name, Char]( + private[this] val nameToTag = Map[Name, Char]( tpnme.Byte -> BYTE_TAG, tpnme.Char -> CHAR_TAG, tpnme.Short -> SHORT_TAG, @@ -102,14 +104,15 @@ trait Definitions extends api.StandardDefinitions { lazy val lazyHolders = symbolsMap(ScalaValueClasses, x => getClassIfDefined("scala.runtime.Lazy" + x)) lazy val LazyRefClass = getClassIfDefined("scala.runtime.LazyRef") lazy val LazyUnitClass = getClassIfDefined("scala.runtime.LazyUnit") + lazy val RichFloatClass = getClassIfDefined("scala.runtime.RichFloat") lazy val allRefClasses: Set[Symbol] = { refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass) } def isNumericSubClass(sub: Symbol, sup: Symbol) = ( - (numericWeight contains sub) - && (numericWeight contains sup) + isNumericValueClass(sub) + && isNumericValueClass(sup) && (numericWeight(sup) % numericWeight(sub) == 0) ) @@ -180,12 +183,7 @@ trait Definitions extends api.StandardDefinitions { } def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses - lazy val ScalaIntegralValueClasses: Set[Symbol] = Set( - CharClass, - ByteClass, - ShortClass, - IntClass, - LongClass) + lazy val ScalaIntegralValueClasses: Set[Symbol] = Set(CharClass, ByteClass, ShortClass, IntClass, LongClass) def underlyingOfValueClass(clazz: Symbol): Type = clazz.derivedValueClassUnbox.tpe.resultType @@ -193,7 +191,7 @@ trait Definitions extends api.StandardDefinitions { } abstract class DefinitionsClass extends DefinitionsApi with ValueClassDefinitions { - private var isInitialized = false + private[this] var isInitialized = false def isDefinitionsInitialized = isInitialized // It becomes tricky to create dedicated objects for other symbols because @@ -202,6 +200,7 @@ trait Definitions extends api.StandardDefinitions { lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass lazy val ScalaPackage = getPackage("scala") lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass + lazy val ScalaPackageObject = getPackageObjectIfDefined("scala") lazy val RuntimePackage = getPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass @@ -232,7 +231,7 @@ trait Definitions extends api.StandardDefinitions { /** Fully initialize the symbol, type, or scope. */ - def fullyInitializeSymbol(sym: Symbol): Symbol = { + def fullyInitializeSymbol(sym: Symbol): sym.type = { sym.initialize // Watch out for those darn raw types on method parameters if (sym.owner.initialize.isJavaDefined) @@ -242,23 +241,25 @@ trait Definitions extends api.StandardDefinitions { fullyInitializeType(sym.tpe_*) sym } - def fullyInitializeType(tp: Type): Type = { + def fullyInitializeType(tp: Type): tp.type = { tp.typeParams foreach fullyInitializeSymbol mforeach(tp.paramss)(fullyInitializeSymbol) tp } - def fullyInitializeScope(scope: Scope): Scope = { + def fullyInitializeScope(scope: Scope): scope.type = { scope.sorted foreach fullyInitializeSymbol scope } /** Is this symbol a member of Object or Any? */ - def isUniversalMember(sym: Symbol) = ObjectClass isSubClass sym.owner + def isUniversalMember(sym: Symbol): Boolean = + if (sym.isOverloaded) sym.alternatives.exists(alt => ObjectClass.isSubClass(alt.owner)) + else ObjectClass.isSubClass(sym.owner) /** Is this symbol unimportable? Unimportable symbols include: * - constructors, because is not a real name * - private[this] members, which cannot be referenced from anywhere else * - members of Any or Object, because every instance will inherit a - * definition which supersedes the imported one + * definition which supersedes the imported one, unless renamed */ def isUnimportable(sym: Symbol) = ( (sym eq NoSymbol) @@ -277,13 +278,6 @@ trait Definitions extends api.StandardDefinitions { def isUnitType(tp: Type) = tp.typeSymbol == UnitClass && tp.annotations.isEmpty - def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info) - def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match { - case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe) - case MethodType(_, MethodType(p :: _, _)) if !p.isImplicit => true - case _ => false - } - private def fixupAsAnyTrait(tpe: Type): Type = tpe match { case ClassInfoType(parents, decls, clazz) => if (parents.head.typeSymbol == AnyClass) tpe @@ -291,13 +285,13 @@ trait Definitions extends api.StandardDefinitions { assert(parents.head.typeSymbol == ObjectClass, parents) ClassInfoType(AnyTpe :: parents.tail, decls, clazz) } - case PolyType(tparams, restpe) => - PolyType(tparams, fixupAsAnyTrait(restpe)) + case PolyType(tparams, restpe) => PolyType(tparams, fixupAsAnyTrait(restpe)) + case _ => throw new MatchError(tpe) } // top types - lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted - lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted + lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT).markAllCompleted() + lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe).markAllCompleted() lazy val ObjectClass = getRequiredClass("java.lang.Object") // Cached types for core monomorphic classes @@ -307,7 +301,19 @@ trait Definitions extends api.StandardDefinitions { lazy val BoxedUnitTpe = BoxedUnitClass.tpe lazy val NothingTpe = NothingClass.tpe lazy val NullTpe = NullClass.tpe + + /** Represents `java.lang.Object` as referenced from Scala code. */ lazy val ObjectTpe = ObjectClass.tpe + + /** ObjectTpeJava is a TypeRef that's structurally equal to ObjectTpe, but with its own object identity. + * + * When referenced from Java (source or bytecode), `Object` should be considered equal to Scala's `Any`, + * as these types are both conceptually the top of the subtyping lattice of the respective languages. + * + * We use `ObjectTpeJava`'s identity to equate it, but not `ObjectTpe`, to `AnyTpe` in subtyping and type equality. + */ + lazy val ObjectTpeJava = new ObjectTpeJavaRef + lazy val SerializableTpe = SerializableClass.tpe lazy val StringTpe = StringClass.tpe lazy val ThrowableTpe = ThrowableClass.tpe @@ -320,7 +326,7 @@ trait Definitions extends api.StandardDefinitions { val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT) val av_constr = anyval.newClassConstructor(NoPosition) anyval.info.decls enter av_constr - anyval markAllCompleted + anyval.markAllCompleted() }).asInstanceOf[ClassSymbol] def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_) @@ -332,7 +338,7 @@ trait Definitions extends api.StandardDefinitions { locally { this initFlags ABSTRACT | FINAL this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this) - this markAllCompleted + this.markAllCompleted() } final override def isBottomClass = true final override def isThreadsafe(purpose: SymbolOps): Boolean = true @@ -356,6 +362,7 @@ trait Definitions extends api.StandardDefinitions { lazy val NullPointerExceptionClass = getClassByName("java.lang.NullPointerException") lazy val ThrowableClass = getClassByName("java.lang.Throwable") lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError] + lazy val RuntimeExceptionClass = requiredClass[RuntimeException] lazy val IllegalArgExceptionClass = requiredClass[IllegalArgumentException] lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor @@ -371,8 +378,6 @@ trait Definitions extends api.StandardDefinitions { lazy val DynamicClass = requiredClass[Dynamic] // fundamental modules - lazy val SysPackage = getPackageObject("scala.sys") - def Sys_error = getMemberMethod(SysPackage, nme.error) // Modules whose members are in the default namespace // scala/bug#5941: ScalaPackage and JavaLangPackage are never ever shared between mirrors @@ -383,39 +388,39 @@ trait Definitions extends api.StandardDefinitions { lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass) lazy val PredefModule = requiredModule[scala.Predef.type] - def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) def Predef_??? = getMemberMethod(PredefModule, nme.???) def Predef_locally = getMemberMethod(PredefModule, nme.locally) def isPredefMemberNamed(sym: Symbol, name: Name) = ( (sym.name == name) && (sym.owner == PredefModule.moduleClass) ) + def wrapVarargsArrayMethod(tp: Type) = getMemberMethod(ScalaRunTimeModule, wrapVarargsArrayMethodName(tp)) + /** Specialization. */ lazy val SpecializableModule = requiredModule[Specializable] lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type] + lazy val MurmurHash3Module = requiredModule[scala.util.hashing.MurmurHash3.type] lazy val SymbolModule = requiredModule[scala.Symbol.type] def Symbol_apply = getMemberMethod(SymbolModule, nme.apply) // classes with special meanings - lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] - lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber] - lazy val DelayedInitClass = requiredClass[scala.DelayedInit] - def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit) - - lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint] - lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) markAllCompleted - lazy val SerializableClass = requiredClass[scala.Serializable] - lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait - lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait - lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable] - lazy val JavaNumberClass = requiredClass[java.lang.Number] - lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] - lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote] - lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException] - lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]] - lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]] + lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber] + lazy val DelayedInitClass = requiredClass[scala.DelayedInit]: @nowarn("cat=deprecation") + def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit) + + lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint] + lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL).markAllCompleted() + lazy val ListOfSingletonClassTpe = SingletonClass.tpe :: Nil + lazy val SerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait + lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait + lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable] modifyInfo fixupAsAnyTrait + lazy val JavaNumberClass = requiredClass[java.lang.Number] + lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] + lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]] + lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]] + lazy val JavaRecordClass = getClassIfDefined("java.lang.Record") lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe) lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe)) @@ -428,15 +433,16 @@ trait Definitions extends api.StandardDefinitions { def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe_*) def isByName(param: Symbol) = isByNameParamType(param.tpe_*) def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf + def isTypeTestSymbol(sym: Symbol) = sym == Any_isInstanceOf || sym == Object_isInstanceOf def isJavaVarArgsMethod(m: Symbol) = m.isMethod && (m.rawInfo match { case completer: LazyType => completer.isJavaVarargsMethod case _ => isJavaVarArgs(m.info.params) }) - def isJavaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isJavaRepeatedParamType(params.last.tpe) - def isScalaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isScalaRepeatedParamType(params.last.tpe) - def isVarArgsList(params: Seq[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe) - def isVarArgTypes(formals: Seq[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last) + def isJavaVarArgs(params: scala.collection.Seq[Symbol]) = !params.isEmpty && isJavaRepeatedParamType(params.last.tpe) + def isScalaVarArgs(params: scala.collection.Seq[Symbol]) = !params.isEmpty && isScalaRepeatedParamType(params.last.tpe) + def isVarArgsList(params: scala.collection.Seq[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe) + def isVarArgTypes(formals: scala.collection.Seq[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last) def firstParamType(tpe: Type): Type = tpe.paramTypes match { case p :: _ => p @@ -447,7 +453,8 @@ trait Definitions extends api.StandardDefinitions { case _ => false } - def hasRepeatedParam(tp: Type): Boolean = tp match { + @tailrec + final def hasRepeatedParam(tp: Type): Boolean = tp match { case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe) case PolyType(_, restpe) => hasRepeatedParam(restpe) case _ => false @@ -460,13 +467,18 @@ trait Definitions extends api.StandardDefinitions { else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp else tp ) - def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp + def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp // We don't need to deal with JavaRepeatedParamClass here, as `repeatedToSeq` is only called in the patmat translation for Scala sources. - def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp - def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp - def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(_ <:< AnyRefTpe) - def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) - def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) + def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp + def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp + def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(elemtp => elemtp <:< AnyRefTpe || (elemtp eq ObjectTpeJava)) + def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) + def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) + + // Classes treated specially with respect to -Ywarn-unused + lazy val SubTypeClass = requiredClass[scala.<:<[_,_]] + lazy val SameTypeClass = requiredClass[scala.=:=[_,_]] + lazy val DummyImplicitClass = requiredClass[scala.DummyImplicit] // collections classes lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]] @@ -474,17 +486,26 @@ trait Definitions extends api.StandardDefinitions { lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] def List_cons = getMemberMethod(ListClass, nme.CONS) - lazy val SeqClass = requiredClass[scala.collection.Seq[_]] + @migration("SeqClass now refers to scala.collection.immutable.Seq", "2.13.0") + lazy val SeqClass = requiredClass[scala.collection.immutable.Seq[_]] + lazy val SeqFactoryClass = requiredModule[scala.collection.SeqFactory.type] + lazy val UnapplySeqWrapperClass = getTypeMember(SeqFactoryClass, tpnme.UnapplySeqWrapper) + lazy val JavaStringBuilderClass = requiredClass[java.lang.StringBuilder] lazy val JavaStringBufferClass = requiredClass[java.lang.StringBuffer] lazy val JavaCharSequenceClass = requiredClass[java.lang.CharSequence] - lazy val TraversableClass = requiredClass[scala.collection.Traversable[_]] + @deprecated("Use IterableClass instead of TraversableClass", "2.13.0") + def TraversableClass = IterableClass lazy val ListModule = requiredModule[scala.collection.immutable.List.type] def List_apply = getMemberMethod(ListModule, nme.apply) + lazy val ListModuleAlias = getMemberValue(ScalaPackageClass, nme.List) lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type] - lazy val SeqModule = requiredModule[scala.collection.Seq.type] - lazy val ISeqModule = requiredModule[scala.collection.immutable.Seq.type] + lazy val NilModuleAlias = getMemberValue(ScalaPackageClass, nme.Nil) + @migration("SeqModule now refers to scala.collection.immutable.Seq", "2.13.0") + lazy val SeqModule = requiredModule[scala.collection.immutable.Seq.type] + lazy val SeqModuleAlias = getMemberValue(ScalaPackageClass, nme.Seq) + lazy val Collection_SeqModule = requiredModule[scala.collection.Seq.type] // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] @@ -555,10 +576,9 @@ trait Definitions extends api.StandardDefinitions { lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful - private def Context_210 = if (settings.isScala211) NoSymbol else getClassIfDefined("scala.reflect.macros.Context") // needed under -Xsource:2.10 - lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful + lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context") // defined in scala-reflect.jar, so we need to be careful - lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful + lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context") // defined in scala-reflect.jar, so we need to be careful def MacroContextPrefix = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.prefix)) def MacroContextPrefixType = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType)) def MacroContextUniverse = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.universe)) @@ -567,9 +587,41 @@ trait Definitions extends api.StandardDefinitions { def MacroContextTreeType = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.Tree)) lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl] + /**Implementation of a class that is identical to `scala.reflect.macros.internal.macroImpl`, + * but only exists at compile time + */ + lazy val MacroImplLocationAnnotation = { + val internalPkg = MacroImplAnnotation.owner.suchThat(_.isPackageClass) + val MacroImplLocation = internalPkg.newClassSymbol(tpnme.macroImplLocation, NoPosition) + MacroImplLocation.setPrivateWithin(ScalaPackage) + MacroImplLocation.setInfoAndEnter(ClassInfoType(AnnotationClass.tpe :: Nil, newScope, MacroImplLocation)) + // getter + MacroImplLocation.newMethod( + name = nme.unpickledMacroImpl, + newFlags = STABLE | ACCESSOR | PARAMACCESSOR + ).setInfoAndEnter(internal.nullaryMethodType(AnyTpe)).markAllCompleted() + // field + MacroImplLocation.newValue( + name = nme.unpickledMacroImpl, + newFlags = PRIVATE | LOCAL | PARAMACCESSOR + ).setInfoAndEnter(AnyTpe).markAllCompleted() + // ctor + val ctor = MacroImplLocation.newConstructor(NoPosition) + val param = ctor.newValueParameter(nme.unpickledMacroImpl).setInfo(AnyTpe) + ctor.setInfoAndEnter(MethodType(param :: Nil, MacroImplLocation.tpe)).markAllCompleted() + MacroImplLocation.addAnnotation( + sym = CompileTimeOnlyAttr, + arg = Literal(Constant( + s"illegal reference to $MacroImplLocation, it is an implementation detail of unpickling TASTy")) + ) + MacroImplLocation.markAllCompleted() + } + lazy val StringContextClass = requiredClass[scala.StringContext] lazy val StringContextModule = requiredModule[scala.StringContext.type] + lazy val ValueOfClass = getClassIfDefined("scala.ValueOf") + // scala/bug#8392 a reflection universe on classpath may not have // quasiquotes, if e.g. crosstyping with -Xsource on lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) ApiQuasiquotesClass.info.decl(tpnme.Quasiquote) else NoSymbol @@ -590,6 +642,9 @@ trait Definitions extends api.StandardDefinitions { lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type] lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type] + // Serialization + lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass[scala.runtime.ModuleSerializationProxy] + def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol @@ -599,14 +654,13 @@ trait Definitions extends api.StandardDefinitions { case _ => false }) // The given class has a main method. - def hasJavaMainMethod(sym: Symbol): Boolean = - (sym.tpe member nme.main).alternatives exists isJavaMainMethod + def hasJavaMainMethod(sym: Symbol): Boolean = sym.tpe.member(nme.main).alternatives.exists(isJavaMainMethod) class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends VarArityClassApi { - private val offset = countFrom - init.size + private[this] val offset = countFrom - init.size private def isDefinedAt(i: Int) = i < seq.length + offset && i >= offset - val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map { i => getRequiredClass("scala." + name + i) }).toVector - private val symSet = new SymbolSet(seq.toList) + val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map(i => getRequiredClass(s"scala.$name$i"))).toVector + private[this] val symSet = new SymbolSet(seq.toList) def contains(sym: Symbol): Boolean = symSet.contains(sym) def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol def specificType(args: List[Type], others: List[Type] = Nil): Type = { @@ -633,7 +687,7 @@ trait Definitions extends api.StandardDefinitions { def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe :: Nil) def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe :: Nil) - def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match { + def wrapVarargsArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match { case ByteClass => nme.wrapByteArray case ShortClass => nme.wrapShortArray case CharClass => nme.wrapCharArray @@ -650,6 +704,7 @@ trait Definitions extends api.StandardDefinitions { def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) def isFunctionSymbol(sym: Symbol) = FunctionClass contains unspecializedSymbol(sym) + def isAbstractFunctionSymbol(sym: Symbol) = AbstractFunctionClass contains unspecializedSymbol(sym) def isProductNSymbol(sym: Symbol) = ProductClass contains unspecializedSymbol(sym) lazy val TryClass = requiredClass[scala.util.Try[_]] @@ -657,7 +712,9 @@ trait Definitions extends api.StandardDefinitions { lazy val SuccessClass = requiredClass[scala.util.Success[_]] lazy val FutureClass = requiredClass[scala.concurrent.Future[_]] lazy val PromiseClass = requiredClass[scala.concurrent.Promise[_]] - lazy val NonFatalClass = requiredClass[scala.util.control.NonFatal.type] + + lazy val NonFatalModule = requiredModule[scala.util.control.NonFatal.type] + lazy val NonFatal_apply = getMemberMethod(NonFatalModule, nme.apply) def unspecializedSymbol(sym: Symbol): Symbol = { if (sym hasFlag SPECIALIZED) { @@ -733,6 +790,22 @@ trait Definitions extends api.StandardDefinitions { // tends to change the course of events by forcing types. def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden) + // Are we expecting something function-ish? This considers FunctionN / SAM / ProtoType that matches functions + def isFunctionProto(pt: Type): Boolean = + (isFunctionType(pt) + || (pt match { case pt: ProtoType => pt.expectsFunctionType case _ => false }) // TODO: this does not work for Function0 + || samOf(pt).exists + ) + + // @requires pt.typeSymbol == PartialFunctionClass + def partialFunctionArgResTypeFromProto(pt: Type): (Type, Type) = + pt match { + case oap: OverloadedArgProto => (oap.hofParamTypes.head, WildcardType) + case _ => + val arg :: res :: Nil = pt.baseType(PartialFunctionClass).typeArgs: @unchecked + (arg, res) + } + // the number of arguments expected by the function described by `tp` (a FunctionN or SAM type), // or `-1` if `tp` does not represent a function type or SAM // for use during typers (after fields, samOf will be confused by abstract accessors for trait fields) @@ -745,17 +818,60 @@ trait Definitions extends api.StandardDefinitions { } } - // the argument types expected by the function described by `tp` (a FunctionN or SAM type), - // or `Nil` if `tp` does not represent a function type or SAM (or if it happens to be Function0...) - def functionOrSamArgTypes(tp: Type): List[Type] = { + // the argument types expected by the function described by `tp` (a FunctionN or PartialFunction or SAM type), + // or `Nil` if `tp` does not represent a function type or PartialFunction or SAM (or if it happens to be Function0...) + def functionOrPfOrSamArgTypes(tp: Type): List[Type] = { val dealiased = tp.dealiasWiden - if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.init + if (isFunctionTypeDirect(dealiased) || isPartialFunctionType(dealiased)) dealiased.typeArgs.init else samOf(tp) match { case samSym if samSym.exists => tp.memberInfo(samSym).paramTypes case _ => Nil } } + /** + * Convert a SAM type to the corresponding FunctionType, + * extrapolating BoundedWildcardTypes in the process + * (no type precision is lost by the extrapolation, + * but this facilitates dealing with the types arising from Java's use-site variance). + */ + def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = + tp match { + case pt: ProtoType => pt.asFunctionType + case _ => + val samSym = sam orElse samOf(tp) + + def correspondingFunctionSymbol = { + val numVparams = samSym.info.params.length + if (numVparams > definitions.MaxFunctionArity) NoSymbol + else FunctionClass(numVparams) + } + + if (samSym.exists && tp.typeSymbol != correspondingFunctionSymbol) // don't treat Functions as SAMs + wildcardExtrapolation(methodToExpressionTp(tp memberInfo samSym)) + else NoType + } + + /** Automatically perform the following conversions on expression types: + * A method type becomes the corresponding function type. + * A nullary method type becomes its result type. + * Implicit parameters are skipped. + * This method seems to be performance critical. + */ + final def methodToExpressionTp(tp: Type): Type = tp match { + case PolyType(_, restpe) => + logResult(sm"""|Normalizing PolyType in infer: + | was: $restpe + | now""")(methodToExpressionTp(restpe)) + case mt @ MethodType(_, restpe) if mt.isImplicit => methodToExpressionTp(restpe) + case mt @ MethodType(_, restpe) => + if (phase.erasedTypes) FunctionClass(mt.params.length).tpe + else functionType(mt.paramTypes, methodToExpressionTp(restpe)) + case NullaryMethodType(restpe) => methodToExpressionTp(restpe) + case ExistentialType(tparams, qtpe) => newExistentialType(tparams, methodToExpressionTp(qtpe)) + case _ => tp // @MAT aliases already handled by subtyping + } + // the SAM's parameters and the Function's formals must have the same length // (varargs etc don't come into play, as we're comparing signatures, not checking an application) def samMatchesFunctionBasedOnArity(sam: Symbol, formals: List[Any]): Boolean = @@ -767,6 +883,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product] def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity) def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement) + def Product_productElementName = getMemberIfDefined(ProductRootClass, nme.productElementName) def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator) def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix) def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_) @@ -794,7 +911,8 @@ trait Definitions extends api.StandardDefinitions { * Type helps ensure people can't come to depend on accidental * aspects of its behavior. This is all of it! */ - def finalResultType(tp: Type): Type = tp match { + @tailrec + final def finalResultType(tp: Type): Type = tp match { case PolyType(_, restpe) => finalResultType(restpe) case MethodType(_, restpe) => finalResultType(restpe) case NullaryMethodType(restpe) => finalResultType(restpe) @@ -804,19 +922,19 @@ trait Definitions extends api.StandardDefinitions { * This makes it like 1000x easier to see the overall logic * of the method. */ - def isStable(tp: Type): Boolean = tp match { - case _: SingletonType => true - case NoPrefix => true + @tailrec + final def isStable(tp: Type): Boolean = tp match { + case NoPrefix | _: SingletonType => true case TypeRef(_, NothingClass | SingletonClass, _) => true - case TypeRef(_, sym, _) if sym.isAbstractType => tp.upperBound.typeSymbol isSubClass SingletonClass + case TypeRef(_, sym, _) if sym.isAbstractType => tp.upperBound.typeSymbol.isSubClass(SingletonClass) case TypeRef(pre, sym, _) if sym.isModuleClass => isStable(pre) - case TypeRef(_, _, _) => val normalize = tp.normalize; (normalize ne tp) && isStable(normalize) + case _: TypeRef => val norm = tp.normalize; (norm ne tp) && isStable(norm) case TypeVar(origin, _) => isStable(origin) - case AnnotatedType(_, atp) => isStable(atp) // Really? + case ExistentialType(qs, underlying) => isStable(deriveTypeWithWildcards(qs)(underlying)) case _: SimpleTypeProxy => isStable(tp.underlying) case _ => false } - def isVolatile(tp: Type): Boolean = { + final def isVolatile(tp: Type): Boolean = { // need to be careful not to fall into an infinite recursion here // because volatile checking is done before all cycles are detected. // the case to avoid is an abstract type directly or @@ -837,7 +955,7 @@ trait Definitions extends api.StandardDefinitions { volatileRecursions += 1 try safeIsVolatile finally volatileRecursions -= 1 } - /** A refined type P1 with ... with Pn { decls } is volatile if + /* A refined type P1 with ... with Pn { decls } is volatile if * one of the parent types Pi is an abstract type, and * either i > 1, or decls or a following parent Pj, j > 1, contributes * an abstract member. @@ -847,7 +965,7 @@ trait Definitions extends api.StandardDefinitions { * a member of the whole type. */ def isVolatileRefinedType: Boolean = { - val RefinedType(parents, decls) = tp + val RefinedType(parents, decls) = (tp: @unchecked) def isVisibleDeferred(m: Symbol) = m.isDeferred && ((tp nonPrivateMember m.name).alternatives contains m) def contributesAbstractMembers(p: Type) = p.deferredMembers exists isVisibleDeferred def dropConcreteParents = parents dropWhile (p => !p.typeSymbol.isAbstractType) @@ -891,54 +1009,77 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - private[this] val doSam = settings.isScala212 || (settings.isScala211 && settings.Xexperimental) - + private[this] val samCache = perRunCaches.newAnyRefMap[Symbol, Symbol]() /** The single abstract method declared by type `tp` (or `NoSymbol` if it cannot be found). - * - * The method must be monomorphic and have exactly one parameter list. - * The class defining the method is a supertype of `tp` that - * has a public no-arg primary constructor and it can be subclassed (not final or sealed). - */ - def samOf(tp: Type): Symbol = if (!doSam) NoSymbol else if (!isNonRefinementClassType(unwrapToClass(tp))) NoSymbol else { - // look at erased type because we (only) care about what ends up in bytecode - // (e.g., an alias type is fine as long as is compiles to a single-abstract-method) - val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol - - if (tpSym.exists && tpSym.isClass && !(tpSym hasFlag (FINAL | SEALED)) - // if tp has a constructor (its class is not a trait), it must be public and must not take any arguments - // (implementation restriction: implicit argument lists are excluded to simplify type inference in adaptToSAM) - && { val ctor = tpSym.primaryConstructor - !ctor.exists || (!ctor.isOverloaded && ctor.isPublic && ctor.info.params.isEmpty && ctor.info.paramSectionCount <= 1)} - // we won't be able to create an instance of tp if it doesn't correspond to its self type - // (checking conformance gets complicated when tp is not fully defined, so let's just rule out self types entirely) - && !tpSym.hasSelfType - ) { - - // find the single abstract member, if there is one - // don't go out requiring DEFERRED members, as you will get them even if there's a concrete override: - // scala> abstract class X { def m: Int } - // scala> class Y extends X { def m: Int = 1} - // scala> typeOf[Y].deferredMembers - // Scopes(method m, method getClass) - // - // scala> typeOf[Y].members.filter(_.isDeferred) - // Scopes() - // must filter out "universal" members (getClass is deferred for some reason) - val deferredMembers = ( - tp.membersBasedOnFlags(excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD).toList.filter( - mem => mem.isDeferred && !isUniversalMember(mem) - ) // TODO: test - ) + * + * The method must be monomorphic and have exactly one parameter list. + * The class defining the method is a supertype of `tp` that + * has a public no-arg primary constructor and it can be subclassed (not final or sealed). + * + * Note that this is also used during erasure (TODO: maybe we could simplify typedFunction for post-typer usage and avoid this?), + * and the caching means that samOf is effectively computed during typer (assuming the same inputs were presented to samOf during that phase). + * It's kind of strange that erasure sees deferredMembers that typer does not (see commented out assert below) + */ + def samOf(tp: Type): Symbol = { + @tailrec def isEligible(tp: Type): Boolean = unwrapToClass(tp) match { + case TypeRef(_, sym, _) => sym.isClass && !sym.isRefinementClass + case RefinedType(parent :: Nil, decls) => decls.forall(_.isType) && isEligible(parent) + case _ => false + } - // if there is only one, it's monomorphic and has a single argument list - if (deferredMembers.lengthCompare(1) == 0 && - deferredMembers.head.typeParams.isEmpty && - deferredMembers.head.info.paramSectionCount == 1) - deferredMembers.head - else NoSymbol + if (isEligible(tp)) { + // look at erased type because we (only) care about what ends up in bytecode + // (e.g., an alias type is fine as long as is compiles to a single-abstract-method) + val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol + + def compute: Symbol = { + if (tpSym.exists && tpSym.isClass && !(tpSym hasFlag (FINAL | SEALED)) + // if tp has a constructor (its class is not a trait), it must be public and must not take any arguments + // (implementation restriction: implicit argument lists are excluded to simplify type inference in adaptToSAM) + && { + val ctor = tpSym.primaryConstructor + !ctor.exists || (!ctor.isOverloaded && ctor.isPublic && ctor.info.params.isEmpty && ctor.info.paramSectionCount <= 1) + } + // we won't be able to create an instance of tp if it doesn't correspond to its self type + // (checking conformance gets complicated when tp is not fully defined, so let's just rule out self types entirely) + && !tpSym.hasSelfType) { + // find the single abstract member, if there is one + // don't go out requiring DEFERRED members, as you will get them even if there's a concrete override: + // scala> abstract class X { def m: Int } + // scala> class Y extends X { def m: Int = 1} + // scala> typeOf[Y].deferredMembers + // Scopes(method m, method getClass) + // + // scala> typeOf[Y].members.filter(_.isDeferred) + // Scopes() + // must filter out "universal" members (getClass is deferred for some reason) + val deferredMembers = + tpSym.info.membersBasedOnFlags(excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD) + .toList + .filter(mem => mem.isDeferred && !isUniversalMember(mem)) + + // if there is only one, it's monomorphic and has a single argument list + if (deferredMembers.lengthCompare(1) == 0 && + deferredMembers.head.typeParams.isEmpty && + deferredMembers.head.info.paramSectionCount == 1) + deferredMembers.head + else NoSymbol + } else NoSymbol + } + + // fails in test/files/jvm/t10512b.scala + // { val res = samCache.getOrElseUpdate(tpSym, compute); assert(compute eq res, s"samOf($tp) cache discrepancy $compute <-> $res") } + + samCache.getOrElseUpdate(tpSym, compute) } else NoSymbol } + def samOfProto(pt: Type): Symbol = + pt match { + case proto: ProtoType => samOf(proto.underlying) // TODO: add more semantic accessor to ProtoType? + case pt => samOf(pt) + } + def arrayType(arg: Type) = appliedType(ArrayClass, arg :: Nil) def byNameType(arg: Type) = appliedType(ByNameParamClass, arg :: Nil) def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp :: Nil) @@ -950,19 +1091,14 @@ trait Definitions extends api.StandardDefinitions { // For name-based pattern matching, derive the "element type" (type argument of Option/Seq) // from the relevant part of the signature of various members (get/head/apply/drop) def elementTypeFromGet(tp: Type) = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)()) - def elementTypeFromHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)()) def elementTypeFromApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe)) - def elementTypeFromDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe)) def resultOfIsEmpty(tp: Type) = resultOfMatchingMethod(tp, nme.isEmpty)() // scala/bug#8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible) // extractor to limit exposure to regressions like the reported problem with existentials. // TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match { - case x :: Nil => - val x1 = x - val x2 = repackExistential(x1) - x2 + case x :: Nil => repackExistential(x) case _ => or } @@ -1032,7 +1168,7 @@ trait Definitions extends api.StandardDefinitions { lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, AnyTpe :: Nil, BooleanTpe) lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, IntTpe) lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, StringTpe) - lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, IntTpe, FINAL) + lazy val Any_## = enterNewNullaryMethod(AnyClass, nme.HASHHASH, IntTpe, FINAL) // Any_getClass requires special handling. The return type is determined on // a per-call-site basis as if the function being called were actually: @@ -1125,12 +1261,12 @@ trait Definitions extends api.StandardDefinitions { normalizedParents(parents) mkString " with " def valueParamsString(tp: Type) = tp match { - case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")") + case MethodType(params, _) => params.map(_.defString).mkString("(", ", ", ")") case _ => "" } // members of class java.lang.{ Object, String } - lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, IntTpe, FINAL) + lazy val Object_## = enterNewNullaryMethod(ObjectClass, nme.HASHHASH, IntTpe, FINAL) lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL) lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL) lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, AnyRefTpe :: Nil, BooleanTpe, FINAL) @@ -1145,6 +1281,7 @@ trait Definitions extends api.StandardDefinitions { def Object_finalize = getMemberMethod(ObjectClass, nme.finalize_) def Object_notify = getMemberMethod(ObjectClass, nme.notify_) def Object_notifyAll = getMemberMethod(ObjectClass, nme.notifyAll_) + def Object_wait = getMemberMethod(ObjectClass, nme.wait_) def Object_equals = getMemberMethod(ObjectClass, nme.equals_) def Object_hashCode = getMemberMethod(ObjectClass, nme.hashCode_) def Object_toString = getMemberMethod(ObjectClass, nme.toString_) @@ -1172,7 +1309,7 @@ trait Definitions extends api.StandardDefinitions { // Annotation base classes lazy val AnnotationClass = requiredClass[scala.annotation.Annotation] - lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation] + lazy val ConstantAnnotationClass = getClassIfDefined("scala.annotation.ConstantAnnotation") lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation] // Java annotation annotations @@ -1181,7 +1318,6 @@ trait Definitions extends api.StandardDefinitions { lazy val AnnotationRepeatableAttr = requiredClass[java.lang.annotation.Repeatable] // Annotations - lazy val BridgeClass = requiredClass[scala.annotation.bridge] lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable] lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound] lazy val ImplicitAmbiguousClass = getClassIfDefined("scala.annotation.implicitAmbiguous") @@ -1194,25 +1330,38 @@ trait Definitions extends api.StandardDefinitions { lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable] lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance] + // Tasty Unpickling Helpers - only access when Scala 3 library is expected to be available + lazy val ChildAnnotationClass = getClassIfDefined("scala.annotation.internal.Child") + lazy val RepeatedAnnotationClass = getClassIfDefined("scala.annotation.internal.Repeated") + lazy val TargetNameAnnotationClass = getClassIfDefined("scala.annotation.targetName") + lazy val StaticMethodAnnotationClass = getClassIfDefined("scala.annotation.static") + lazy val PolyFunctionClass = getClassIfDefined("scala.PolyFunction") + lazy val ExperimentalAnnotationClass = getClassIfDefined("scala.annotation.experimental") + lazy val AnnotationDefaultClass = getClassIfDefined("scala.annotation.internal.AnnotationDefault") + lazy val JavaAnnotationClass = requiredClass[java.lang.annotation.Annotation] + lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty] lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty] lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.annotation.compileTimeOnly") + lazy val DefaultArgAttr = getClassIfDefined("scala.annotation.meta.defaultArg") lazy val DeprecatedAttr = requiredClass[scala.deprecated] lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName] lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance] lazy val DeprecatedOverridingAttr = requiredClass[scala.deprecatedOverriding] lazy val NativeAttr = requiredClass[scala.native] - lazy val RemoteAttr = requiredClass[scala.remote] lazy val ScalaInlineClass = requiredClass[scala.inline] lazy val ScalaNoInlineClass = requiredClass[scala.noinline] lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID] lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(), List(nme.value -> LiteralAnnotArg(Constant(0)))) lazy val SpecializedClass = requiredClass[scala.specialized] + lazy val SuperArgAttr = getClassIfDefined("scala.annotation.meta.superArg") + lazy val SuperFwdArgAttr = getClassIfDefined("scala.annotation.meta.superFwdArg") lazy val ThrowsClass = requiredClass[scala.throws[_]] lazy val TransientAttr = requiredClass[scala.transient] lazy val UncheckedClass = requiredClass[scala.unchecked] lazy val UncheckedBoundsClass = getClassIfDefined("scala.reflect.internal.annotations.uncheckedBounds") lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized] + lazy val UnusedClass = requiredClass[scala.annotation.unused] lazy val VolatileAttr = requiredClass[scala.volatile] lazy val JavaDeprecatedAttr = requiredClass[java.lang.Deprecated] lazy val FunctionalInterfaceClass = requiredClass[java.lang.FunctionalInterface] @@ -1229,16 +1378,20 @@ trait Definitions extends api.StandardDefinitions { lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject? lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature] + // Used by macro annotations + lazy val InheritedAttr = requiredClass[java.lang.annotation.Inherited] + lazy val JUnitAnnotations = List("Test", "Ignore", "Before", "After", "BeforeClass", "AfterClass").map(n => getClassIfDefined("org.junit." + n)) // Language features lazy val languageFeatureModule = getRequiredModule("scala.languageFeature") - def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || ( + @tailrec + final def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || ( // Trying to allow for deprecated locations sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol) ) - lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet + lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members.filter(_ isSubClass StaticAnnotationClass).toSet // According to the scala.annotation.meta package object: // * By default, annotations on (`val`-, `var`- or plain) constructor parameters @@ -1270,7 +1423,7 @@ trait Definitions extends api.StandardDefinitions { } private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member", addendum: String = "") = { - throw new FatalError(owner + " does not have a " + what + " " + name + addendum) + throw new FatalError(s"$owner does not have a $what ${name}${addendum}") } def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name)) @@ -1282,7 +1435,8 @@ trait Definitions extends api.StandardDefinitions { if (segs.isEmpty || segs.head != root.simpleName) NoSymbol else findNamedMember(segs.tail, root) } - def findNamedMember(segs: List[Name], root: Symbol): Symbol = + @tailrec + final def findNamedMember(segs: List[Name], root: Symbol): Symbol = if (segs.isEmpty) root else findNamedMember(segs.tail, root.info member segs.head) @@ -1322,9 +1476,10 @@ trait Definitions extends api.StandardDefinitions { } } def getMemberMethod(owner: Symbol, name: Name): TermSymbol = { + def miss = fatalMissingSymbol(owner, name, "method") getMember(owner, name.toTermName) match { - case x: TermSymbol => x - case _ => fatalMissingSymbol(owner, name, "method") + case x: TermSymbol => x.filter(_.isMethod).orElse(miss).asInstanceOf[TermSymbol] + case _ => miss } } def getDeclMethod(owner: Symbol, name: Name): TermSymbol = { @@ -1364,12 +1519,17 @@ trait Definitions extends api.StandardDefinitions { private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol = owner.newAliasType(name) setInfoAndEnter alias + // TODO: this is an unfortunate trade-off: on the one hand, `T*` is not a first-class type, and it shouldn't be compatible with T. + // This matters for overloading resolution, where a vararg method should be seen as less specific than a non-vararg one, + // since you can pass a T to a method that expects a T*, but you can't pass a T* to a method that takes a T + // (except if you allow converting T* to Seq[T], which should not be done through subtyping but instead using a conversion, IMO.) + // On the other hand, inside a method body, an argument of type T* can be treated as a Seq[T]. private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = { val clazz = enterNewClass(ScalaPackageClass, name, Nil) val tparam = clazz.newSyntheticTypeParam("T0", flags) val parents = List(AnyRefTpe, parentFn(tparam)) - clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) markAllCompleted + clazz.setInfo(GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))).markAllCompleted() } def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = { @@ -1379,6 +1539,7 @@ trait Definitions extends api.StandardDefinitions { case (Some(formals), restpe) => MethodType(msym.newSyntheticValueParams(formals), restpe) case (_, restpe) => NullaryMethodType(restpe) } + msym.setInfo(genPolyType(tparams, mtpe)).markAllCompleted() } def enterNewPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = { @@ -1386,7 +1547,6 @@ trait Definitions extends api.StandardDefinitions { owner.info.decls.enter(m) m } - /** T1 means one type parameter. Nullary means no param lists. */ def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = @@ -1447,7 +1607,8 @@ trait Definitions extends api.StandardDefinitions { /** Lists core classes that do have underlying bytecode, but are adjusted on-the-fly in every reflection universe */ lazy val hijackedCoreClasses = List( ComparableClass, - JavaSerializableClass + JavaCloneableClass, + SerializableClass ) /** Lists symbols that are synthesized or hijacked by the compiler. * @@ -1462,7 +1623,7 @@ trait Definitions extends api.StandardDefinitions { private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass /** Is symbol a value class? */ - def isPrimitiveValueClass(sym: Symbol) = ScalaValueClassesSet contains sym + def isPrimitiveValueClass(sym: Symbol) = ScalaValueClassesSet.contains(sym) def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ @@ -1477,7 +1638,7 @@ trait Definitions extends api.StandardDefinitions { else boxedClass.map(kvp => (kvp._2: Symbol, kvp._1)).getOrElse(sym, NoSymbol) /** Is type's symbol a numeric value class? */ - def isNumericValueType(tp: Type): Boolean = tp match { + def isNumericValueType(tp: Type): Boolean = tp.widen match { case TypeRef(_, sym, _) => isNumericValueClass(sym) case _ => false } @@ -1486,7 +1647,7 @@ trait Definitions extends api.StandardDefinitions { // todo: reconcile with javaSignature!!! def signature(tp: Type): String = { - def erasure(tp: Type): Type = tp match { + @tailrec def erasure(tp: Type): Type = tp match { case st: SubType => erasure(st.supertype) case RefinedType(parents, _) => erasure(parents.head) case _ => tp @@ -1506,14 +1667,13 @@ trait Definitions extends api.StandardDefinitions { } // documented in JavaUniverse.init - def init() { - if (isInitialized) return + def init(): Unit = if (!isInitialized) { ObjectClass.initialize ScalaPackageClass.initialize symbolsNotPresentInBytecode NoSymbol isInitialized = true - } //init + } class UniverseDependentTypes(universe: Tree) { lazy val nameType = universeMemberType(tpnme.Name) @@ -1533,14 +1693,13 @@ trait Definitions extends api.StandardDefinitions { /** Efficient access to member symbols which must be looked up each run. Access via `currentRun.runDefinitions` */ final class RunDefinitions { - lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS) + // The given symbol represents String.+ + // TODO: this misses Predef.any2stringadd + def isStringAddition(sym: Symbol) = sym == String_+ + lazy val String_valueOf_Int = getMemberMethod(StringClass.companionModule, nme.valueOf).suchThat( x => x.paramss.head.length == 1 && x.firstParam.info.typeSymbol == IntClass) - // The given symbol represents either String.+ or StringAdd.+ - // TODO: this misses Predef.any2stringadd - def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ - lazy val StringContext_f = getMemberMethod(StringContextClass, nme.f) lazy val StringContext_s = getMemberMethod(StringContextClass, nme.s) lazy val StringContext_raw = getMemberMethod(StringContextClass, nme.raw_) @@ -1574,13 +1733,45 @@ trait Definitions extends api.StandardDefinitions { lazy val Option_apply = getMemberMethod(OptionModule, nme.apply) lazy val Option_isDefined: Symbol = getMemberMethod(OptionClass, TermName("isDefined")) lazy val Option_get: Symbol = getMemberMethod(OptionClass, TermName("get")) - lazy val List_apply = DefinitionsClass.this.List_apply - - /** - * Is the given symbol `List.apply`? - * To to avoid bootstrapping cycles, this return false if the given symbol or List itself is not initialized. + private lazy val List_apply = DefinitionsClass.this.List_apply + private lazy val Seq_apply = { + val result = getMemberMethod(DefinitionsClass.this.SeqModule, nme.apply) + assert(result == getMemberMethod(DefinitionsClass.this.Collection_SeqModule, nme.apply), "Expected collection.Seq and immutable.Seq to have the same apply member") + result + } + /* This is for translating uses of List() into Nil. + * + * 2.12 would see scala.collection.immutable.List.apply[Nothing] + * 2.13 sees scala.`package`.List().apply or after typer scala.`package`.List().apply(scala.collection.immutable.Nil).$asInstanceOf[List] + * + * Conservative check to avoid cycles is restored. */ - def isListApply(sym: Symbol) = sym.isInitialized && ListModule.hasCompleteInfo && sym == List_apply + final def isListApply(tree: Tree): Boolean = + tree.symbol.isInitialized && ListModule.hasCompleteInfo && (tree.symbol == List_apply || tree.symbol.name == nme.apply) && cond(tree) { + case treeInfo.Applied(Select(qual, _), _, _) => + treeInfo.isQualifierSafeToElide(qual) && (qual.symbol == ListModule || qual.symbol == ListModuleAlias /*|| isListAlias(qual.tpe)*/) + } + /* + private def isListAlias(tpe: Type): Boolean = cond(tpe) { + case SingleType(_, _) => tpe.widen.typeSymbol.companionSymbol == ListModule + } + */ + + final def isSeqApply(tree: Tree): Boolean = isListApply(tree) || { + /* + * This is now also used for converting {Seq, List}.apply(a, b, c) to `a :: b :: c :: Nil` in CleanUp. + */ + def isSeqFactory(sym: Symbol) = sym == SeqModule || sym == SeqModuleAlias || sym == Collection_SeqModule + + (tree.symbol == Seq_apply) && (tree match { + case treeInfo.Applied(core @ Select(qual, _), _, _) => + treeInfo.isQualifierSafeToElide(qual) && isSeqFactory(qual.symbol) + case _ => false + }) + } + + final def isNil(sym: Symbol) = sym == NilModule || sym == NilModuleAlias + def isPredefClassOf(sym: Symbol) = if (PredefModule.hasCompleteInfo) sym == Predef_classOf else isPredefMemberNamed(sym, nme.classOf) lazy val TagMaterializers = Map[Symbol, Symbol]( @@ -1589,17 +1780,21 @@ trait Definitions extends api.StandardDefinitions { TypeTagClass -> materializeTypeTag ) lazy val TagSymbols = TagMaterializers.keySet - lazy val Predef_conforms = (getMemberIfDefined(PredefModule, nme.conforms) - orElse getMemberMethod(PredefModule, TermName("conforms"))) // TODO: predicate on -Xsource:2.10 (for now, needed for transition from M8 -> RC1) - lazy val Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - - lazy val Predef_double2Double = getMemberMethod(PredefModule, nme.double2Double) - lazy val Predef_float2Float = getMemberMethod(PredefModule, nme.float2Float) - lazy val Predef_byte2Byte = getMemberMethod(PredefModule, nme.byte2Byte) - lazy val Predef_short2Short = getMemberMethod(PredefModule, nme.short2Short) - lazy val Predef_char2Character = getMemberMethod(PredefModule, nme.char2Character) - lazy val Predef_int2Integer = getMemberMethod(PredefModule, nme.int2Integer) - lazy val Predef_long2Long = getMemberMethod(PredefModule, nme.long2Long) + + // Methods treated specially by implicit search + lazy val Predef_conforms = getMemberIfDefined(PredefModule, nme.conforms) + lazy val SubTypeModule = requiredModule[scala.<:<[_,_]] + lazy val SubType_refl = getMemberMethod(SubTypeModule, nme.refl) + + lazy val Predef_classOf = getMemberMethod(PredefModule, nme.classOf) + + lazy val Predef_double2Double = getMemberMethod(PredefModule, nme.double2Double) + lazy val Predef_float2Float = getMemberMethod(PredefModule, nme.float2Float) + lazy val Predef_byte2Byte = getMemberMethod(PredefModule, nme.byte2Byte) + lazy val Predef_short2Short = getMemberMethod(PredefModule, nme.short2Short) + lazy val Predef_char2Character = getMemberMethod(PredefModule, nme.char2Character) + lazy val Predef_int2Integer = getMemberMethod(PredefModule, nme.int2Integer) + lazy val Predef_long2Long = getMemberMethod(PredefModule, nme.long2Long) lazy val Predef_boolean2Boolean = getMemberMethod(PredefModule, nme.boolean2Boolean) lazy val PreDef_primitives2Primitives = @@ -1607,9 +1802,8 @@ trait Definitions extends api.StandardDefinitions { Predef_char2Character, Predef_int2Integer, Predef_long2Long, Predef_boolean2Boolean) lazy val Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) - lazy val Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) - lazy val Predef_genericWrapRefArray = getMemberMethod(PredefModule, nme.genericWrapArray) lazy val Predef_??? = DefinitionsClass.this.Predef_??? + lazy val Predef_any2stringaddMethod = getMemberMethod(PredefModule, nme.any2stringadd).suchThat(_.isMethod) lazy val arrayApplyMethod = getMemberMethod(ScalaRunTimeModule, nme.array_apply) lazy val arrayUpdateMethod = getMemberMethod(ScalaRunTimeModule, nme.array_update) @@ -1617,7 +1811,21 @@ trait Definitions extends api.StandardDefinitions { lazy val arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone) lazy val ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible) lazy val arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass) - lazy val traversableDropMethod = getMemberMethod(ScalaRunTimeModule, nme.drop) + lazy val wrapVarargsRefArrayMethod = getMemberMethod(ScalaRunTimeModule, nme.wrapRefArray) + lazy val genericWrapVarargsRefArrayMethod = getMemberMethod(ScalaRunTimeModule, nme.genericWrapArray) + lazy val primitiveWrapArrayMethod = Seq[Symbol]( + getMemberMethod(ScalaRunTimeModule, nme.wrapBooleanArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapByteArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapCharArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapIntArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapDoubleArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapFloatArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapLongArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapShortArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapUnitArray) + ) + + lazy val RuntimeStatics_ioobe = getMemberMethod(RuntimeStaticsModule, nme.ioobe) lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group) @@ -1637,7 +1845,10 @@ trait Definitions extends api.StandardDefinitions { lazy val PostfixOpsFeature = getLanguageFeature("postfixOps") lazy val ReflectiveCallsFeature = getLanguageFeature("reflectiveCalls") lazy val ImplicitConversionsFeature = getLanguageFeature("implicitConversions") + + @deprecated("scala.language.higherKinds no longer needs to be imported explicitly", "2.13.1") lazy val HigherKindsFeature = getLanguageFeature("higherKinds") + lazy val ExistentialsFeature = getLanguageFeature("existentials") lazy val ApiUniverseReify = ApiUniverseClass.map(sym => getDeclIfDefined(sym, nme.reify)) diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala index 36690cae6d43..0eb6cad3cf0d 100644 --- a/src/reflect/scala/reflect/internal/Depth.scala +++ b/src/reflect/scala/reflect/internal/Depth.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -31,6 +31,7 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] { override def toString = s"Depth($depth)" } +@FunctionalInterface trait DepthFunction[A] { def apply(a: A): Depth } object Depth { diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 5df285887582..722a08bd71b4 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,8 @@ package scala package reflect package internal +import scala.annotation.tailrec + /** The name of this trait defines the eventual intent better than * it does the initial contents. @@ -33,7 +35,7 @@ trait ExistentialsAndSkolems { class Deskolemizer extends LazyType { override val typeParams = tparams val typeSkolems = typeParams map (_.newTypeSkolem setInfo this) - override def complete(sym: Symbol) { + override def complete(sym: Symbol): Unit = { // The info of a skolem is the skolemized info of the // actual type parameter of the skolem sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems) @@ -55,10 +57,10 @@ trait ExistentialsAndSkolems { * the typeSymbol is not amongst the symbols being hidden. */ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = { - def safeBound(t: Type): Type = + @tailrec def safeBound(t: Type): Type = if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.upperBound) else t - def hiBound(s: Symbol): Type = safeBound(s.existentialBound.upperBound) match { + def hiBound(s: Symbol): Type = safeBound(s.existentialBound.upperBound).resultType match { case tp @ RefinedType(parents, decls) => val parents1 = parents mapConserve safeBound if (parents eq parents1) tp @@ -69,8 +71,8 @@ trait ExistentialsAndSkolems { // Hanging onto lower bound in case anything interesting // happens with it. mapFrom(hidden)(s => s.existentialBound match { - case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s)) - case _ => hiBound(s) + case GenPolyType(tparams, TypeBounds(lo, _)) => genPolyType(tparams, TypeBounds(lo, hiBound(s))) + case _ => hiBound(s) }) } @@ -122,5 +124,5 @@ trait ExistentialsAndSkolems { */ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Symbol = NoSymbol): Type = if (hidden.isEmpty) tp - else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction) + else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction(_, _)) } diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala index 759acd116f37..a39b64f839ba 100644 --- a/src/reflect/scala/reflect/internal/FatalError.scala +++ b/src/reflect/scala/reflect/internal/FatalError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala index ceb592da4cbc..c1fd55ba419f 100644 --- a/src/reflect/scala/reflect/internal/FlagSets.scala +++ b/src/reflect/scala/reflect/internal/FlagSets.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index ee64912ac093..7a88d2781de3 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -80,7 +80,7 @@ package internal // 57: notOVERRIDE // 58: notPRIVATE // 59: -// 60: +// 60: SCALA3X // 61: // 62: // 63: @@ -88,36 +88,37 @@ package internal /** Flags set on Modifiers instances in the parsing stage. */ class ModifierFlags { - final val IMPLICIT = 1 << 9 - final val FINAL = 1 << 5 // May not be overridden. Note that java final implies much more than scala final. - final val PRIVATE = 1 << 2 - final val PROTECTED = 1 << 0 - - final val SEALED = 1 << 10 - final val OVERRIDE = 1 << 1 - final val CASE = 1 << 11 - final val ABSTRACT = 1 << 3 // abstract class, or used in conjunction with abstract override. + final val IMPLICIT = 1L << 9 + final val FINAL = 1L << 5 // May not be overridden. Note that java final implies much more than scala final. + final val PRIVATE = 1L << 2 + final val PROTECTED = 1L << 0 + + final val SEALED = 1L << 10 + final val OVERRIDE = 1L << 1 + final val CASE = 1L << 11 + final val ABSTRACT = 1L << 3 // abstract class, or used in conjunction with abstract override. // Note difference to DEFERRED! - final val DEFERRED = 1 << 4 // was `abstract' for members | trait is virtual - final val INTERFACE = 1 << 7 // symbol is an interface. the flag is set for: + final val DEFERRED = 1L << 4 // was `abstract` for members | trait is virtual + final val INTERFACE = 1L << 7 // symbol is an interface. the flag is set for: // - scala-defined traits with only abstract methods or fields // - any java-defined interface (even if it has default methods) - final val MUTABLE = 1 << 12 // symbol is a mutable variable. - final val PARAM = 1 << 13 // symbol is a (value or type) parameter to a method - final val MACRO = 1 << 15 // symbol is a macro definition - - final val COVARIANT = 1 << 16 // symbol is a covariant type variable - final val BYNAMEPARAM = 1 << 16 // parameter is by name - final val CONTRAVARIANT = 1 << 17 // symbol is a contravariant type variable - final val ABSOVERRIDE = 1 << 18 // combination of abstract & override - final val LOCAL = 1 << 19 // symbol is local to current class (i.e. private[this] or protected[this] + final val MUTABLE = 1L << 12 // symbol is a mutable variable. + final val PARAM = 1L << 13 // symbol is a (value or type) parameter to a method + final val MACRO = 1L << 15 // symbol is a macro definition + + final val COVARIANT = 1L << 16 // symbol is a covariant type variable + final val BYNAMEPARAM = 1L << 16 // parameter is by name + final val CONTRAVARIANT = 1L << 17 // symbol is a contravariant type variable + final val ABSOVERRIDE = 1L << 18 // combination of abstract & override + final val LOCAL = 1L << 19 // symbol is local to current class (i.e. private[this] or protected[this] // pre: PRIVATE or PROTECTED are also set - final val JAVA = 1 << 20 // symbol was defined by a Java class - final val STATIC = 1 << 23 // static field, method or class - final val CASEACCESSOR = 1 << 24 // symbol is a case parameter (or its accessor, or a GADT skolem) - final val TRAIT = 1 << 25 // symbol is a trait - final val DEFAULTPARAM = 1 << 25 // the parameter has a default value - final val PARAMACCESSOR = 1 << 29 // for field definitions generated for primary constructor + final val JAVA = 1L << 20 // symbol was defined by a Java class + final val SCALA3X = 1L << 60 // class was defined in Scala 3 + final val STATIC = 1L << 23 // static field, method or class + final val CASEACCESSOR = 1L << 24 // symbol is a case parameter (or its accessor, or a GADT skolem) + final val TRAIT = 1L << 25 // symbol is a trait + final val DEFAULTPARAM = 1L << 25 // the parameter has a default value + final val PARAMACCESSOR = 1L << 29 // for field definitions generated for primary constructor // parameters (no matter if it's a 'val' parameter or not) // for parameters of a primary constructor ('val' or not) // for the accessor methods generated for 'val' or 'var' parameters @@ -141,22 +142,22 @@ object ModifierFlags extends ModifierFlags /** All flags and associated operations */ class Flags extends ModifierFlags { - final val METHOD = 1 << 6 // a method - final val MODULE = 1 << 8 // symbol is module or class implementing a module - final val PACKAGE = 1 << 14 // symbol is a java package - - final val CAPTURED = 1 << 16 // variable is accessed from nested function. Set by LambdaLift. - final val LABEL = 1 << 17 // method symbol is a label. Set by TailCall - final val INCONSTRUCTOR = 1 << 17 // class symbol is defined in this/superclass constructor. - final val SYNTHETIC = 1 << 21 // symbol is compiler-generated (compare with ARTIFACT) - final val STABLE = 1 << 22 // functions that are assumed to be stable + final val METHOD = 1L << 6 // a method + final val MODULE = 1L << 8 // symbol is module or class implementing a module + final val PACKAGE = 1L << 14 // symbol is a java package + + final val CAPTURED = 1L << 16 // variable is accessed from nested function. Set by LambdaLift. + final val LABEL = 1L << 17 // method symbol is a label. Set by TailCall + final val INCONSTRUCTOR = 1L << 17 // class symbol is defined in this/superclass constructor. + final val SYNTHETIC = 1L << 21 // symbol is compiler-generated (compare with ARTIFACT) + final val STABLE = 1L << 22 // functions that are assumed to be stable // (typically, access methods for valdefs) // or classes that do not contain abstract types. - final val BRIDGE = 1 << 26 // function is a bridge method. Set by Erasure - final val ACCESSOR = 1 << 27 // a value or variable accessor (getter or setter) + final val BRIDGE = 1L << 26 // function is a bridge method. Set by Erasure + final val ACCESSOR = 1L << 27 // a value or variable accessor (getter or setter) - final val SUPERACCESSOR = 1 << 28 // a super accessor - final val MODULEVAR = 1 << 30 // for variables: is the variable caching a module value + final val SUPERACCESSOR = 1L << 28 // a super accessor + final val MODULEVAR = 1L << 30 // for variables: is the variable caching a module value final val IS_ERROR = 1L << 32 // symbol is an error symbol final val OVERLOADED = 1L << 33 // symbol is overloaded @@ -202,7 +203,7 @@ class Flags extends ModifierFlags { // The flags (1L << 59) to (1L << 63) are currently unused. If added to the InitialFlags mask, // they could be used as normal flags. - final val InitialFlags = 0x0007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + final val InitialFlags = 0x1007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + (1L << 60) final val LateFlags = 0x00F8000000000000L // flags that override flags in (1L << 4) to (1L << 8): DEFERRED, FINAL, INTERFACE, METHOD, MODULE final val AntiFlags = 0x0700000000000000L // flags that cancel flags in 1L to (1L << 2): PROTECTED, OVERRIDE, PRIVATE final val LateShift = 47 @@ -212,7 +213,7 @@ class Flags extends ModifierFlags { final val PhaseIndependentFlags = 0xF807FFFFFFFFFE08L //this should be // final val PhaseIndependentFlags = (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift))) - // but the constant folder doesnt optimise this! Good news is that is expected to be fixed soon :-) + // but the constant folder does not optimise this! Good news is that is expected to be fixed soon :-) assert (PhaseIndependentFlags == (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift))) @@ -226,7 +227,7 @@ class Flags extends ModifierFlags { // ------- late flags (set by a transformer phase) --------------------------------- // // Summary of when these are claimed to be first used. - // You can get this output with scalac -Xshow-phases -Ydebug. + // You can get this output with scalac -Vphases -Vdebug. // // refchecks 7 [START] // specialize 13 [START] @@ -237,14 +238,14 @@ class Flags extends ModifierFlags { // notPRIVATE set in Symbols#makeNotPrivate, IExplicitOuter#transform, Inliners. // notPROTECTED set in ExplicitOuter#transform. -// final val lateDEFERRED = (DEFERRED: Long) << LateShift // unused -// final val lateFINAL = (FINAL: Long) << LateShift // only used for inliner -- could be subsumed by notPRIVATE? -// final val lateMETHOD = (METHOD: Long) << LateShift // unused -// final val lateMODULE = (MODULE: Long) << LateShift // unused +// final val lateDEFERRED = (0L + DEFERRED) << LateShift // unused +// final val lateFINAL = (0L + FINAL) << LateShift // only used for inliner -- could be subsumed by notPRIVATE? +// final val lateMETHOD = (0L + METHOD) << LateShift // unused +// final val lateMODULE = (0L + MODULE) << LateShift // unused -// final val notOVERRIDE = (OVERRIDE: Long) << AntiShift // unused - final val notPRIVATE = (PRIVATE: Long) << AntiShift - final val notPROTECTED = (PROTECTED: Long) << AntiShift +// final val notOVERRIDE = (0L + OVERRIDE) << AntiShift // unused + final val notPRIVATE = (0L + PRIVATE) << AntiShift + final val notPROTECTED = (0L + PROTECTED) << AntiShift // ------- masks ----------------------------------------------------------------------- @@ -320,11 +321,15 @@ class Flags extends ModifierFlags { /** These flags are not pickled */ - final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING + final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING | SCALA3X // A precaution against future additions to FlagsNotPickled turning out // to be overloaded flags thus not-pickling more than intended. - assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled)) + assert( + (OverloadedFlagsMask & FlagsNotPickled) == 0, + "overloaded flags should not overlap with FlagsNotPickled; found: " + + flagsToString(OverloadedFlagsMask & FlagsNotPickled) + ) /** These flags are pickled */ final val PickledFlags = ( @@ -355,20 +360,20 @@ class Flags extends ModifierFlags { // The flags from 0x001 to 0x800 are different in the raw flags // and in the pickled format. - private final val IMPLICIT_PKL = (1 << 0) - private final val FINAL_PKL = (1 << 1) - private final val PRIVATE_PKL = (1 << 2) - private final val PROTECTED_PKL = (1 << 3) - private final val SEALED_PKL = (1 << 4) - private final val OVERRIDE_PKL = (1 << 5) - private final val CASE_PKL = (1 << 6) - private final val ABSTRACT_PKL = (1 << 7) - private final val DEFERRED_PKL = (1 << 8) - private final val METHOD_PKL = (1 << 9) - private final val MODULE_PKL = (1 << 10) - private final val INTERFACE_PKL = (1 << 11) - - private final val PKL_MASK = 0x00000FFF + private final val IMPLICIT_PKL = (1L << 0) + private final val FINAL_PKL = (1L << 1) + private final val PRIVATE_PKL = (1L << 2) + private final val PROTECTED_PKL = (1L << 3) + private final val SEALED_PKL = (1L << 4) + private final val OVERRIDE_PKL = (1L << 5) + private final val CASE_PKL = (1L << 6) + private final val ABSTRACT_PKL = (1L << 7) + private final val DEFERRED_PKL = (1L << 8) + private final val METHOD_PKL = (1L << 9) + private final val MODULE_PKL = (1L << 10) + private final val INTERFACE_PKL = (1L << 11) + + //private final val PKL_MASK = 0x00000FFF /** Pickler correspondence, ordered roughly by frequency of occurrence */ private def rawPickledCorrespondence = Array[(Long, Long)]( @@ -386,11 +391,11 @@ class Flags extends ModifierFlags { (ABSTRACT, ABSTRACT_PKL) ) - private val mappedRawFlags = rawPickledCorrespondence map (_._1) - private val mappedPickledFlags = rawPickledCorrespondence map (_._2) + private[this] val mappedRawFlags = rawPickledCorrespondence map (_._1) + private[this] val mappedPickledFlags = rawPickledCorrespondence map (_._2) private class MapFlags(from: Array[Long], to: Array[Long]) extends (Long => Long) { - val fromSet = (0L /: from) (_ | _) + val fromSet = from.foldLeft(0L) (_ | _) def apply(flags: Long): Long = { var result = flags & ~fromSet @@ -432,7 +437,7 @@ class Flags extends ModifierFlags { case MACRO => "" // (1L << 15) case BYNAMEPARAM => "" // (1L << 16) case CONTRAVARIANT => "" // (1L << 17) - case ABSOVERRIDE => "absoverride" // (1L << 18) + case ABSOVERRIDE => "abstract override" // (1L << 18) case LOCAL => "" // (1L << 19) case JAVA => "" // (1L << 20) case SYNTHETIC => "" // (1L << 21) @@ -473,8 +478,8 @@ class Flags extends ModifierFlags { case `notPROTECTED` => "" // (1L << 56) case 0x200000000000000L => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) - case NEEDS_TREES => "" // (1L << 59) - case 0x1000000000000000L => "" // (1L << 60) + case NEEDS_TREES => "" // (1L << 59) + case SCALA3X => "" // (1L << 60) case 0x2000000000000000L => "" // (1L << 61) case 0x4000000000000000L => "" // (1L << 62) case 0x8000000000000000L => "" // (1L << 63) @@ -493,7 +498,9 @@ class Flags extends ModifierFlags { else "private[" + privateWithin + "]" ) - @deprecated("use flagString on the flag-carrying member", "2.10.0") + // FIXME: This method is used several places internally. Remove its + // internal use and then re-deprecate it. + // @deprecated("use flagString on the flag-carrying member", "2.10.0") private[scala] def flagsToString(flags: Long, privateWithin: String): String = { val access = accessString(flags, privateWithin) val nonAccess = flagsToString(flags & ~AccessFlags) @@ -501,7 +508,9 @@ class Flags extends ModifierFlags { List(nonAccess, access) filterNot (_ == "") mkString " " } - @deprecated("use flagString on the flag-carrying member", "2.10.0") + // FIXME: This method is used several places internally. Remove its + // internal use and then re-deprecate it. + // @deprecated("use flagString on the flag-carrying member", "2.10.0") private[scala] def flagsToString(flags: Long): String = { // Fast path for common case if (flags == 0L) "" else { @@ -512,9 +521,9 @@ class Flags extends ModifierFlags { if ((flags & mask) != 0L) { val s = flagToString(mask) if (s.length > 0) { - if (sb eq null) sb = new StringBuilder append s - else if (sb.length == 0) sb append s - else sb append " " append s + if (sb eq null) sb = new StringBuilder + else if (!sb.isEmpty) sb.append(" ") + sb.append(s) } } i += 1 diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala index e59c7781b8d0..948c99184056 100644 --- a/src/reflect/scala/reflect/internal/FreshNames.scala +++ b/src/reflect/scala/reflect/internal/FreshNames.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala index b298a6954af1..c9e0abb855a5 100644 --- a/src/reflect/scala/reflect/internal/HasFlags.scala +++ b/src/reflect/scala/reflect/internal/HasFlags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -121,8 +121,6 @@ trait HasFlags { def isOverride = hasFlag(OVERRIDE) def isParamAccessor = hasFlag(PARAMACCESSOR) def isPrivate = hasFlag(PRIVATE) - @deprecated ("use `hasPackageFlag` instead", "2.11.0") - def isPackage = hasFlag(PACKAGE) def isPrivateLocal = hasAllFlags(PrivateLocal) def isProtected = hasFlag(PROTECTED) def isProtectedLocal = hasAllFlags(ProtectedLocal) @@ -142,11 +140,11 @@ trait HasFlags { while (i <= MaxBitPosition) { val flag = Flags.rawFlagPickledOrder(i) if ((bits & flag) != 0L) { - val s = resolveOverloadedFlag(flag) + val s = Flags.flagToString(flag) if (s.length > 0) { - if (sb eq null) sb = new StringBuilder append s - else if (sb.length == 0) sb append s - else sb append " " append s + if (sb eq null) sb = new StringBuilder + else if (!sb.isEmpty) sb.append(" ") + sb.append(s) } } i += 1 diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 93c0093b6e8c..37c18f74cecf 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -52,7 +52,7 @@ trait Importers { to: SymbolTable => // fixups and maps prevent stackoverflows in importer var pendingSyms = 0 var pendingTpes = 0 - lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]() + lazy val fixups = scala.collection.mutable.ListBuffer[Function0[Unit]]() def addFixup(fixup: => Unit): Unit = fixups += (() => fixup) def tryFixup(): Unit = { if (pendingSyms == 0 && pendingTpes == 0) { @@ -126,6 +126,7 @@ trait Importers { to: SymbolTable => case null => null case theirloc: from.Tree => importTree(theirloc) case theirloc: from.Symbol => importSymbol(theirloc) + case x => throw new MatchError(x) } myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags) case their: from.ModuleClassSymbol => @@ -142,8 +143,8 @@ trait Importers { to: SymbolTable => } my.associatedFile = their.associatedFile my - case their: from.TypeSymbol => - myowner.newTypeSymbol(myname.toTypeName, mypos, myflags) + case their: from.TypeSymbol => myowner.newTypeSymbol(myname.toTypeName, mypos, myflags) + case x => throw new MatchError(x) } symMap.weakUpdate(their, my) markFlagsCompleted(my)(mask = AllFlags) @@ -205,7 +206,7 @@ trait Importers { to: SymbolTable => myexisting.orElse { val my = cachedRecreateSymbol(their) if (myscope != NoType) { - assert(myscope.decls.lookup(myname) == NoSymbol, myname+" "+myscope.decl(myname)+" "+myexisting) + assert(myscope.decls.lookup(myname) == NoSymbol, s"$myname ${myscope.decl(myname)} $myexisting") myscope.decls enter my } my @@ -285,6 +286,7 @@ trait Importers { to: SymbolTable => NoPrefix case null => null + case x => throw new MatchError(x) } def importType(their: from.Type): Type = { @@ -359,8 +361,8 @@ trait Importers { to: SymbolTable => new Function(vparams map importValDef, importTree(body)) case from.Assign(lhs, rhs) => new Assign(importTree(lhs), importTree(rhs)) - case from.AssignOrNamedArg(lhs, rhs) => - new AssignOrNamedArg(importTree(lhs), importTree(rhs)) + case from.NamedArg(lhs, rhs) => + new NamedArg(importTree(lhs), importTree(rhs)) case from.If(cond, thenp, elsep) => new If(importTree(cond), importTree(thenp), importTree(elsep)) case from.Match(selector, cases) => @@ -396,7 +398,7 @@ trait Importers { to: SymbolTable => case from.Ident(name) => new Ident(importName(name)) case from.ReferenceToBoxed(ident) => - new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident }) + new ReferenceToBoxed(importTree(ident).asInstanceOf[Ident]) case from.Literal(constant @ from.Constant(_)) => new Literal(importConstant(constant)) case theirtt @ from.TypeTree() => @@ -421,6 +423,7 @@ trait Importers { to: SymbolTable => EmptyTree case null => null + case x => throw new MatchError(x) } def importTree(their: from.Tree): Tree = { @@ -456,8 +459,6 @@ trait Importers { to: SymbolTable => LiteralAnnotArg(importConstant(constant)) case from.ArrayAnnotArg(args) => ArrayAnnotArg(args map importAnnotArg) - case from.ScalaSigBytes(bytes) => - ScalaSigBytes(bytes) case from.NestedAnnotArg(annInfo) => NestedAnnotArg(importAnnotationInfo(annInfo)) case from.UnmappableAnnotArg => diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala index 8023f9f8fb32..85a8eea82b05 100644 --- a/src/reflect/scala/reflect/internal/InfoTransformers.scala +++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,8 @@ package scala package reflect package internal +import scala.annotation.tailrec + trait InfoTransformers { self: SymbolTable => @@ -27,7 +29,8 @@ trait InfoTransformers { val changesBaseClasses: Boolean def transform(sym: Symbol, tpe: Type): Type - def insert(that: InfoTransformer) { + @tailrec + final def insert(that: InfoTransformer): Unit = { assert(this.pid != that.pid, this.pid) if (that.pid < this.pid) { @@ -35,7 +38,7 @@ trait InfoTransformers { } else if (next.pid <= that.pid && next.pid != NoPhase.id) { next insert that } else { - log("Inserting info transformer %s following %s".format(phaseOf(that.pid), phaseOf(this.pid))) + log(s"Inserting info transformer ${phaseOf(that.pid)} following ${phaseOf(this.pid)}") that.next = next that.prev = this next.prev = that @@ -47,7 +50,8 @@ trait InfoTransformers { * If no such exists, the InfoTransformer with the next * higher pid. */ - def nextFrom(from: Phase#Id): InfoTransformer = + @tailrec + final def nextFrom(from: Phase#Id): InfoTransformer = if (from == this.pid) this else if (from < this.pid) if (prev.pid < from) this diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala index 31f97bb0df08..af9fd3468935 100644 --- a/src/reflect/scala/reflect/internal/Internals.scala +++ b/src/reflect/scala/reflect/internal/Internals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,7 +25,9 @@ trait Internals extends api.Internals { type Internal = MacroInternalApi lazy val internal: Internal = new SymbolTableInternal {} + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") type Compat = MacroCompatApi + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") lazy val compat: Compat = new Compat {} trait SymbolTableInternal extends MacroInternalApi { diff --git a/src/reflect/scala/reflect/internal/JDK9Reflectors.java b/src/reflect/scala/reflect/internal/JDK9Reflectors.java index e0deddf114a7..483e494af7b0 100644 --- a/src/reflect/scala/reflect/internal/JDK9Reflectors.java +++ b/src/reflect/scala/reflect/internal/JDK9Reflectors.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala index 687a6ed8cd81..79813ee0fec6 100644 --- a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala +++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,38 +18,40 @@ import scala.language.implicitConversions import java.lang.annotation.{ Annotation => jAnnotation } import java.lang.reflect.{ - Member => jMember, Constructor => jConstructor, Method => jMethod, - AnnotatedElement => jAnnotatedElement, Type => jType, - TypeVariable => jTypeVariable + Constructor => jConstructor, Method => jMethod, + Type => jType, TypeVariable => jTypeVariable } /** This class tries to abstract over some of the duplication * in java.lang.reflect.{ Method, Constructor }. */ -class JMethodOrConstructor(val member: jMember with jAnnotatedElement) { - def isVarArgs: Boolean = member match { - case m: jMethod => m.isVarArgs - case m: jConstructor[_] => m.isVarArgs +sealed abstract class JMethodOrConstructor { + def isVarArgs: Boolean = this match { + case JMethod(m) => m.isVarArgs + case JConstructor(m) => m.isVarArgs } - def typeParams: Array[_ <: jTypeVariable[_]] = member match { - case m: jMethod => m.getTypeParameters - case m: jConstructor[_] => m.getTypeParameters + def typeParams: Array[_ <: jTypeVariable[_]] = this match { + case JMethod(m) => m.getTypeParameters + case JConstructor(m) => m.getTypeParameters } - def paramTypes: Array[jType] = member match { - case m: jMethod => m.getGenericParameterTypes - case m: jConstructor[_] => m.getGenericParameterTypes + def paramTypes: Array[jType] = this match { + case JMethod(m) => m.getGenericParameterTypes + case JConstructor(m) => m.getGenericParameterTypes } - def paramAnnotations: Array[Array[jAnnotation]] = member match { - case m: jMethod => m.getParameterAnnotations - case m: jConstructor[_] => m.getParameterAnnotations + def paramAnnotations: Array[Array[jAnnotation]] = this match { + case JMethod(m) => m.getParameterAnnotations + case JConstructor(m) => m.getParameterAnnotations } - def resultType: jType = member match { - case m: jMethod => m.getGenericReturnType - case m: jConstructor[_] => classOf[Unit] + def resultType: jType = this match { + case JMethod(m) => m.getGenericReturnType + case JConstructor(_) => classOf[Unit] } } object JMethodOrConstructor { - implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = new JMethodOrConstructor(m) - implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = new JMethodOrConstructor(m) + implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = JMethod(m) + implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = JConstructor(m) } + +final case class JMethod(m: jMethod) extends JMethodOrConstructor +final case class JConstructor(m: jConstructor[_]) extends JMethodOrConstructor diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala index 8b07833c213c..726dd4bf329d 100644 --- a/src/reflect/scala/reflect/internal/JavaAccFlags.scala +++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 2a2b2511ba48..31e15b16321b 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,8 @@ package scala package reflect package internal -import scala.reflect.internal.util.StringOps.{ countAsString, countElementsAsString } +import scala.annotation.nowarn +import scala.reflect.internal.util.StringOps.{countAsString, countElementsAsString} trait Kinds { self: SymbolTable => @@ -54,7 +55,7 @@ trait Kinds { } } private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String = - f(a+qualify(a,p), p+qualify(p,a)) + f(a.toString+qualify(a,p), p.toString+qualify(p,a)) // Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over // the place, but here we need it for the message to make sense. @@ -77,11 +78,11 @@ trait Kinds { private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = ( if (xs.isEmpty) "" - else xs map f.tupled mkString ("\n", ", ", "") + else xs.map(f.tupled).mkString("\n", ", ", "") ) def errorMessage(targ: Type, tparam: Symbol): String = ( - (targ+"'s type parameters do not match "+tparam+"'s expected parameters:") + (s"${targ}'s type parameters do not match ${tparam}'s expected parameters:") + buildMessage(arity, arityMessage) + buildMessage(variance, varianceMessage) + buildMessage(strictness, strictnessMessage) @@ -119,6 +120,7 @@ trait Kinds { * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since * List's type parameter is also covariant and its bounds are weaker than <: Int */ + @nowarn("cat=lint-nonlocal-return") def checkKindBounds0( tparams: List[Symbol], targs: List[Type], @@ -127,46 +129,46 @@ trait Kinds { explainErrors: Boolean ): List[(Type, Symbol, KindErrors)] = { - // instantiate type params that come from outside the abstract type we're currently checking - def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) - // check that the type parameters hkargs to a higher-kinded type conform to the // expected params hkparams + @nowarn("cat=lint-nonlocal-return") def checkKindBoundsHK( hkargs: List[Symbol], arg: Symbol, + argPre: Type, + argOwner: Symbol, param: Symbol, - paramowner: Symbol, + paramOwner: Symbol, underHKParams: List[Symbol], - withHKArgs: List[Symbol] + withHKArgs: List[Symbol], + flip: Boolean ): KindErrors = { var kindErrors: KindErrors = NoKindErrors - def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs) // @M sometimes hkargs != arg.typeParams, the symbol and the type may // have very different type parameters val hkparams = param.typeParams - def kindCheck(cond: Boolean, f: KindErrors => KindErrors) { - if (!cond) - kindErrors = f(kindErrors) - } + def kindCheck(cond: Boolean, f: KindErrors => KindErrors): Unit = + if (!cond) kindErrors = f(kindErrors) if (settings.isDebug) { - log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner) - log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner) + log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramOwner) + log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ argOwner) log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) } - if (!sameLength(hkargs, hkparams)) { + if (!sameLength(hkargs, hkparams)) return { // Any and Nothing are kind-overloaded if (arg == AnyClass || arg == NothingClass) NoKindErrors // shortcut: always set error, whether explainTypesOrNot - else return kindErrors.arityError(arg -> param) + else kindErrors.arityError(arg -> param) } else foreach2(hkargs, hkparams) { (hkarg, hkparam) => if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind * - kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam)) + if (flip) kindCheck(variancesMatch(hkparam, hkarg), _.varianceError(hkparam -> hkarg)) + else kindCheck(variancesMatch(hkarg, hkparam), _.varianceError(hkarg -> hkparam)) + // instantiateTypeParams(tparams, targs) // higher-order bounds, may contain references to type arguments // substSym(hkparams, hkargs) @@ -176,11 +178,12 @@ trait Kinds { // conceptually the same. Could also replace the types by // polytypes, but can't just strip the symbols, as ordering // is lost then. - val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner) - val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner) - val argumentBounds = transform(hkarg.info.bounds, owner) + val declaredBounds = hkparam.info.instantiateTypeParams(tparams, targs).bounds.asSeenFrom(pre, paramOwner) + val declaredBoundsInst = declaredBounds.substSym(underHKParams, withHKArgs).asSeenFrom(pre, owner) + val argumentBounds = hkarg.info.bounds.asSeenFrom(argPre, argOwner).asSeenFrom(pre, owner) - kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam)) + if (flip) kindCheck(argumentBounds <:< declaredBoundsInst, _.strictnessError(hkparam -> hkarg)) + else kindCheck(declaredBoundsInst <:< argumentBounds, _.strictnessError(hkarg -> hkparam)) debuglog( "checkKindBoundsHK base case: " + hkparam + @@ -196,10 +199,13 @@ trait Kinds { kindErrors ++= checkKindBoundsHK( hkarg.typeParams, hkarg, + argPre, + argOwner, hkparam, - paramowner, + paramOwner, underHKParams ++ hkparam.typeParams, - withHKArgs ++ hkarg.typeParams + withHKArgs ++ hkarg.typeParams, + !flip ) } if (!explainErrors && !kindErrors.isEmpty) @@ -217,15 +223,18 @@ trait Kinds { flatMap2(tparams, targs) { (tparam, targ) => // Prevent WildcardType from causing kind errors, as typevars may be higher-order if (targ == WildcardType) Nil else { - // force symbol load for #4205 - targ.typeSymbolDirect.info + // NOTE: *not* targ.typeSymbol, which normalizes + // force initialize symbol for scala/bug#4205 + val targSym = targ.typeSymbolDirect.initialize + // NOTE: *not* targ.prefix, which normalizes + val targPre = targ.prefixDirect // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!! val tparamsHO = targ.typeParams if (targ.isHigherKinded || tparam.typeParams.nonEmpty) { - // NOTE: *not* targ.typeSymbol, which normalizes val kindErrors = checkKindBoundsHK( - tparamsHO, targ.typeSymbolDirect, tparam, - tparam.owner, tparam.typeParams, tparamsHO + tparamsHO, targSym, targPre, targSym.owner, + tparam, tparam.owner, tparam.typeParams, tparamsHO, + flip = false ) if (kindErrors.isEmpty) Nil else { if (explainErrors) List((targ, tparam, kindErrors)) @@ -259,7 +268,7 @@ trait Kinds { */ def scalaNotation: String - /** Kind notation used in http://adriaanm.github.com/files/higher.pdf. + /** Kind notation used in https://adriaanm.github.com/files/higher.pdf. * Proper types are expressed as *. * Type constructors are expressed * -> *(lo, hi) -(+)-> *. */ @@ -286,7 +295,7 @@ trait Kinds { case 2 => "X" case 3 => "Y" case 4 => "Z" - case n if n < 12 => ('O'.toInt - 5 + n).toChar.toString + case x if x < 12 => ('O'.toInt - 5 + x).toChar.toString case _ => "V" } } @@ -312,7 +321,7 @@ trait Kinds { case Head(o, _, _) => o case _ => 0 }).max - StringState((tokens /: (0 to maxOrder)) { (ts: Seq[ScalaNotation], o: Int) => + StringState((0 to maxOrder).foldLeft(tokens){ (ts: Seq[ScalaNotation], o: Int) => if (countByOrder(o) <= 1) ts map { case Head(`o`, _, a) => Head(o, None, a) @@ -342,7 +351,7 @@ trait Kinds { private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = { s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString)) } - def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString) + def scalaNotation: String = Kind.Head(order, None, None).toString + bounds.scalaNotation(_.toString) def starNotation: String = "*" + bounds.starNotation(_.toString) } object ProperTypeKind { @@ -402,8 +411,8 @@ trait Kinds { abstract class InferKind { protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel) - def apply(sym: Symbol): Kind = infer(sym, true) - def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, true) + def apply(sym: Symbol): Kind = infer(sym, topLevel = true) + def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, topLevel = true) } def apply(pre: Type): InferKind = new InferKind { @@ -411,7 +420,7 @@ trait Kinds { val bounds = if (topLevel) TypeBounds.empty else tpe.asSeenFrom(pre, owner).bounds if(!tpe.isHigherKinded) ProperTypeKind(bounds) - else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, false))(p) }) + else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, topLevel = false))(p) }) } } } diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 8a4c485a7829..eceb7b9a77f2 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -45,20 +45,22 @@ trait Mirrors extends api.Mirrors { else definitions.findNamedMember(segs.tail, RootClass.info member segs.head) } + /** Todo: organize similar to mkStatic in scala.reflect.Base */ + @annotation.unused + private def getModuleOrClass(path: Name, len: Int): Symbol = + getModuleOrClass(path.toString, len, path.newName(_)) + private def getModuleOrClass(path: String, len: Int, toName: String => Name): Symbol = { - val point = path lastIndexOf ('.', len - 1) - val owner = - if (point > 0) getModuleOrClass(path, point, newTermName(_)) - else RootClass - - val name = toName(path.substring(point + 1, len)) - val sym = owner.info member name - val result = if (name.isTermName) sym.suchThat(_ hasFlag MODULE) else sym + val point = path.lastIndexOf('.', len - 1) + val owner = if (point > 0) getModuleOrClass(path, point, newTermName(_)) else RootClass + val name = toName(path.substring(point + 1, len)) + val sym = owner.info.member(name) + val result = if (name.isTermName) sym.suchThat(_.hasFlag(MODULE)) else sym if (result != NoSymbol) result else { if (settings.isDebug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { - MissingRequirementError.notFound((if (name.isTermName) "object " else "class ")+path+" in "+thisMirror) + MissingRequirementError.notFound(s"${if (name.isTermName) "object" else "class"} $path in $thisMirror") } } } @@ -66,7 +68,7 @@ trait Mirrors extends api.Mirrors { /** If you're looking for a class, pass a type name. * If a module, a term name. * - * Unlike `getModuleOrClass`, this function + * Unlike `staticModuleOrClass`, this function * loads unqualified names from the root package. */ private def getModuleOrClass(path: String, toName: String => Name): Symbol = @@ -96,14 +98,13 @@ trait Mirrors extends api.Mirrors { /************************ loaders of class symbols ************************/ - private def ensureClassSymbol(fullname: String, sym: Symbol): ClassSymbol = { + private def ensureClassSymbol(fullname: String, sym: Symbol): ClassSymbol = sym match { case x: ClassSymbol => x case _ => MissingRequirementError.notFound("class " + fullname) } - } - @deprecated("Use overload that accepts a String.", "2.12.13") + @deprecated("Use overload that accepts a String.", "2.13.0") def getClassByName(fullname: Name): ClassSymbol = ensureClassSymbol(fullname.toString, getModuleOrClass(fullname.toString, fullname.length, newTypeName(_))) @@ -123,7 +124,7 @@ trait Mirrors extends api.Mirrors { def getClassIfDefined(fullname: String): Symbol = getClassIfDefined(fullname, newTypeName(_)) - @deprecated("Use overload that accepts a String.", "2.12.13") + @deprecated("Use overload that accepts a String.", "2.13.0") def getClassIfDefined(fullname: Name): Symbol = wrapMissing(getClassByName(fullname.toTypeName)) @@ -145,10 +146,10 @@ trait Mirrors extends api.Mirrors { private def ensureModuleSymbol(fullname: String, sym: Symbol, allowPackages: Boolean): ModuleSymbol = sym match { case x: ModuleSymbol if allowPackages || !x.hasPackageFlag => x - case _ => MissingRequirementError.notFound("object " + fullname) + case _ => MissingRequirementError.notFound(s"object $fullname") } - @deprecated("Use overload that accepts a String.", "2.12.13") + @deprecated("Use overload that accepts a String.", "2.13.0") def getModuleByName(fullname: Name): ModuleSymbol = getModuleByName(fullname.toString) @@ -171,7 +172,7 @@ trait Mirrors extends api.Mirrors { def getModuleIfDefined(fullname: String): Symbol = wrapMissing(getModuleByName(fullname)) - @deprecated("Use overload that accepts a String.", "2.12.13") + @deprecated("Use overload that accepts a String.", "2.13.0") def getModuleIfDefined(fullname: Name): Symbol = getModuleIfDefined(fullname.toString) @@ -192,7 +193,7 @@ trait Mirrors extends api.Mirrors { case _ => MissingRequirementError.notFound("package " + fullname) } - @deprecated("use overload that accepts a String.", since = "2.12.13") + @deprecated("use overload that accepts a String.", since = "2.13.0") def getPackage(fullname: TermName): ModuleSymbol = getPackage(fullname.toString) def getPackage(fullname: String): ModuleSymbol = @@ -244,8 +245,7 @@ trait Mirrors extends api.Mirrors { try body catch { case _: MissingRequirementError => NoSymbol } - def init() { - if (initialized) return + def init(): Unit = if (!initialized) { // Still fiddling with whether it's cleaner to do some of this setup here // or from constructors. The latter approach tends to invite init order issues. diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala index b31cfc41eed7..91b2abcfaed0 100644 --- a/src/reflect/scala/reflect/internal/MissingRequirementError.scala +++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala index 50e80d488429..288a488bd99a 100644 --- a/src/reflect/scala/reflect/internal/Mode.scala +++ b/src/reflect/scala/reflect/internal/Mode.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,50 +14,45 @@ package scala package reflect package internal -import scala.language.implicitConversions - object Mode { - private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits) def apply(bits: Int): Mode = new Mode(bits) /** NOmode, EXPRmode and PATTERNmode are mutually exclusive. */ - final val NOmode: Mode = 0x000 - final val EXPRmode: Mode = 0x001 - final val PATTERNmode: Mode = 0x002 + final val NOmode: Mode = Mode(0x000) + final val EXPRmode: Mode = Mode(0x001) + final val PATTERNmode: Mode = Mode(0x002) - /** TYPEmode needs a comment. <-- XXX. - */ - final val TYPEmode: Mode = 0x004 + final val TYPEmode: Mode = Mode(0x004) /** SCCmode is orthogonal to above. When set we are * in the this or super constructor call of a constructor. */ - final val SCCmode: Mode = 0x008 + final val SCCmode: Mode = Mode(0x008) /** FUNmode is orthogonal to above. * When set we are looking for a method or constructor. */ - final val FUNmode: Mode = 0x010 + final val FUNmode: Mode = Mode(0x010) /** POLYmode is orthogonal to above. * When set expression types can be polymorphic. */ - final val POLYmode: Mode = 0x020 + final val POLYmode: Mode = Mode(0x020) /** QUALmode is orthogonal to above. When set * expressions may be packages and Java statics modules. */ - final val QUALmode: Mode = 0x040 + final val QUALmode: Mode = Mode(0x040) /** TAPPmode is set for the function/type constructor * part of a type application. When set we do not decompose PolyTypes. */ - final val TAPPmode: Mode = 0x080 + final val TAPPmode: Mode = Mode(0x080) /** LHSmode is set for the left-hand side of an assignment. */ - final val LHSmode: Mode = 0x400 + final val LHSmode: Mode = Mode(0x400) /** BYVALmode is set when we are typing an expression * that occurs in a by-value position. An expression e1 is in by-value @@ -66,39 +61,73 @@ object Mode { * arguments or the conditional of an if-then-else clause. * This mode has been added to support continuations. */ - final val BYVALmode: Mode = 0x8000 + final val BYVALmode: Mode = Mode(0x8000) /** TYPEPATmode is set when we are typing a type in a pattern. */ - final val TYPEPATmode: Mode = 0x10000 + final val TYPEPATmode: Mode = Mode(0x10000) + + /** This mode is set when starting to type check a `Select`, `Apply` or `TypeApply`, e.g., `x.y` + * or `a.b.foo[T](x, y).bar(z)`. Stabilizers (a feature added in PR scala/scala#5999) created + * when typing the expression are emitted in a new enclosing block, e.g. + * { + * val $stabilizer$1 = a.b + * val $stabilizer$2 = $stabilizer1.foo[T](x, y) + * $stabilizer$2.bar(z) + * } + * + * The flag is sticky for typing the function of an Apply (`forFunMode`) and qualifiers of + * nested selections (`MonoQualifierModes`), but cleared for argument expressions + * (`onlySticky`). So `a.b.foo(a.b.bar)` becomes + * { + * val $stabilizer$1 = a.b + * $stabilizer$1.foo({ + * val $stabilizer$2 = a.b + * $stabilizer$2.bar + * }) + * } + */ + final val APPSELmode: Mode = Mode(0x20000) + + /** + * Enabled while typing annotations. In this mode, no locals are created for named / default arguments and default + * arguments are AST copies of the default expression. Example: + * + * {{{ + * class a(x: Int = xDefault, y: Int) extends Annotation + * @a(y = yExpr) def f = 0 // annotation is typed as `new a(xDefault, yExpr)` + * new a(y = yExpr) // typed as `{ val x$1 = yExpr; val x$2 = a.init$default$1(); new a(x$2, x$1) }` + * }}} + */ + final val ANNOTmode: Mode = Mode(0x40000) private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode - private val StickyModesForFun: Mode = StickyModes | SCCmode - final val MonoQualifierModes: Mode = EXPRmode | QUALmode - final val PolyQualifierModes: Mode = EXPRmode | QUALmode | POLYmode - final val OperatorModes: Mode = EXPRmode | POLYmode | TAPPmode | FUNmode - - /** Translates a mask of mode flags into something readable. - */ - private val modeNameMap = Map[Int, String]( // TODO why duplicate the bitmasks here, rather than just referring to this.EXPRmode etc? - (1 << 0) -> "EXPRmode", - (1 << 1) -> "PATTERNmode", - (1 << 2) -> "TYPEmode", - (1 << 3) -> "SCCmode", - (1 << 4) -> "FUNmode", - (1 << 5) -> "POLYmode", - (1 << 6) -> "QUALmode", - (1 << 7) -> "TAPPmode", - (1 << 8) -> "<>", // formerly SUPERCONSTRmode - (1 << 9) -> "<>", // formerly SNDTRYmode - (1 << 10) -> "LHSmode", - (1 << 11) -> "<>", - (1 << 12) -> "<>", // formerly STARmode - (1 << 13) -> "<>", // formerly ALTmode - (1 << 14) -> "<>", // formerly HKmode - (1 << 15) -> "BYVALmode", - (1 << 16) -> "TYPEPATmode" - ).map({ case (k, v) => Mode(k) -> v }) + private val StickyModesForFun: Mode = StickyModes | SCCmode | ANNOTmode + final val MonoQualifierModes: Mode = EXPRmode | QUALmode | APPSELmode + final val PolyQualifierModes: Mode = MonoQualifierModes | POLYmode + final val OperatorModes: Mode = EXPRmode | POLYmode | TAPPmode | FUNmode + + /** Translates a mask of mode flags into something readable. */ + private val modeNameMap = Map[Mode, String]( + EXPRmode -> "EXPRmode", + PATTERNmode -> "PATTERNmode", + TYPEmode -> "TYPEmode", + SCCmode -> "SCCmode", + FUNmode -> "FUNmode", + POLYmode -> "POLYmode", + QUALmode -> "QUALmode", + TAPPmode -> "TAPPmode", + LHSmode -> "LHSmode", + BYVALmode -> "BYVALmode", + TYPEPATmode -> "TYPEPATmode", + APPSELmode -> "APPSELmode", + ANNOTmode -> "ANNOTmode", + ) + + // Former modes and their values: + // SUPERCONSTRmode (0x100), SNDTRYmode (0x200), CONSTmode (0x800) + // STARmode (0x1000), ALTmode (0x2000), HKmode (0x4000) + // RETmode (0x20000) - now APPSELmode } import Mode._ @@ -109,7 +138,7 @@ final class Mode private (val bits: Int) extends AnyVal { def onlyTypePat = this & TYPEPATmode def onlySticky = this & Mode.StickyModes - def forFunMode = this & Mode.StickyModesForFun | FUNmode | POLYmode | BYVALmode + def forFunMode = this & Mode.StickyModesForFun | FUNmode | POLYmode | BYVALmode | APPSELmode def forTypeMode = if (typingPatternOrTypePat) TYPEmode | TYPEPATmode else TYPEmode def inAll(required: Mode) = (this & required) == required @@ -144,5 +173,5 @@ final class Mode private (val bits: Int) extends AnyVal { override def toString = if (this == NOmode) "NOmode" - else (modeNameMap filterKeys inAll).values.toList.sorted mkString "-" + else modeNameMap.view.filterKeys(inAll).values.toList.sorted.mkString("-") } diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 63a8fcbfdd43..2f6c8e01d166 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,6 @@ package scala package reflect package internal -import scala.language.implicitConversions - import scala.io.Codec trait Names extends api.Names { @@ -35,7 +33,7 @@ trait Names extends api.Names { // // Discussion: https://groups.google.com/forum/#!search/biased$20scala-internals/scala-internals/0cYB7SkJ-nM/47MLhsgw8jwJ protected def synchronizeNames: Boolean = false - private val nameLock: Object = new Object + private[this] val nameLock: Object = new Object /** Memory to store all names sequentially. */ @@ -44,13 +42,14 @@ trait Names extends api.Names { def chrs: Array[Char] = _chrs @deprecated("Don't access name table contents directly.", "2.12.9") def chrs_=(cs: Array[Char]) = _chrs = cs - private var nc = 0 + private[this] var nc = 0 + final def nameTableSize: Int = nc /** Hashtable for finding term names quickly. */ - private val termHashtable = new Array[TermName](HASH_SIZE) + private[this] val termHashtable = new Array[TermName](HASH_SIZE) /** Hashtable for finding type names quickly. */ - private val typeHashtable = new Array[TypeName](HASH_SIZE) + private[this] val typeHashtable = new Array[TypeName](HASH_SIZE) final def allNames(): Iterator[TermName] = termHashtable.iterator.filter(_ ne null).flatMap(n => Iterator.iterate(n)(_.next).takeWhile(_ ne null)) @@ -76,7 +75,7 @@ trait Names extends api.Names { } /** Enter characters into chrs array. */ - private def enterChars(cs: Array[Char], offset: Int, len: Int) { + private def enterChars(cs: Array[Char], offset: Int, len: Int): Unit = { var i = 0 while (i < len) { if (nc + i == _chrs.length) { @@ -202,15 +201,11 @@ trait Names extends api.Names { type ThisNameType >: Null <: Name protected[this] def thisName: ThisNameType - // Note that "Name with ThisNameType" should be redundant - // because ThisNameType <: Name, but due to scala/bug#6161 the - // compile loses track of this fact. - /** Index into name table */ final def start: Int = index /** The next name in the same hash bucket. */ - def next: Name with ThisNameType + def next: ThisNameType /** The length of this name. */ final def length: Int = len @@ -224,21 +219,20 @@ trait Names extends api.Names { def toTermName: TermName def toTypeName: TypeName def companionName: Name - @deprecated("Use either toTermName or toTypeName", "2.12.9") - def bothNames: List[Name] = List(toTermName, toTypeName) final def asTypeOf[N <: Name](other: N): N = (if (other.isTermName) toTermName else toTypeName).asInstanceOf[N] /** Return the subname with characters from from to to-1. */ - def subName(from: Int, to: Int): Name with ThisNameType + def subName(from: Int, to: Int): ThisNameType + override def subSequence(from: Int, to: Int): CharSequence = subName(from, to) /** Return a new name of the same variety. */ - def newName(str: String): Name with ThisNameType + def newName(str: String): ThisNameType /** Return a new name based on string transformation. */ - def mapName(f: String => String): Name with ThisNameType = newName(f(toString)) + def mapName(f: String => String): ThisNameType = newName(f(toString)) /** Copy bytes of this name to buffer cs, starting at position `offset`. */ final def copyChars(cs: Array[Char], offset: Int) = @@ -416,11 +410,36 @@ trait Names extends api.Names { false } + def lastIndexOf(s: String): Int = if (s.isEmpty) length else { + val slength = s.length() + val lastIndex = slength - 1 + val lastChar = s.charAt(lastIndex) + val contents = _chrs + val base = start + val min = base + lastIndex + var end = base + length - 1 + + while (end >= min) { + if (contents(end) == lastChar) { + var i = end - 1 + val i0 = i - lastIndex + var at = lastIndex - 1 + while (i > i0 && contents(i) == s.charAt(at)) { + i -= 1 + at -= 1 + } + if (i == i0) return i0 + 1 - base + } + end -= 1 + } + -1 + } + /** Some thoroughly self-explanatory convenience functions. They * assume that what they're being asked to do is known to be valid. */ - final def startChar: Char = this charAt 0 - final def endChar: Char = this charAt len - 1 + final def startChar: Char = charAt(0) + final def endChar: Char = charAt(len - 1) final def startsWith(char: Char): Boolean = len > 0 && startChar == char final def startsWith(name: String): Boolean = startsWith(name, 0) final def endsWith(char: Char): Boolean = len > 0 && endChar == char @@ -437,7 +456,6 @@ trait Names extends api.Names { /** The lastPos methods already return -1 on failure. */ def lastIndexOf(ch: Char): Int = lastPos(ch) - def lastIndexOf(s: String): Int = toString lastIndexOf s /** Replace all occurrences of `from` by `to` in * name; result is always a term name. @@ -489,10 +507,35 @@ trait Names extends api.Names { def append(separator: Char, suffix: Name) = newName(toString + separator + suffix) def prepend(prefix: String) = newName("" + prefix + this) + def stripSuffix(suffix: String): ThisNameType = + if (endsWith(suffix)) dropRight(suffix.length) else thisName // OPT avoid creating a Name with `suffix` + + def stripSuffix(suffix: Name): ThisNameType = + if (endsWith(suffix)) dropRight(suffix.length) else thisName + + def take(n: Int): ThisNameType = subName(0, n) + def drop(n: Int): ThisNameType = subName(n, length) + def dropRight(n: Int): ThisNameType = subName(0, length - n) + + def dropLocal: TermName = toTermName stripSuffix NameTransformer.LOCAL_SUFFIX_STRING + def dropSetter: TermName = toTermName stripSuffix NameTransformer.SETTER_SUFFIX_STRING + def dropModule: ThisNameType = stripSuffix(NameTransformer.MODULE_SUFFIX_STRING) + def localName: TermName = getterName append NameTransformer.LOCAL_SUFFIX_STRING + def setterName: TermName = getterName append NameTransformer.SETTER_SUFFIX_STRING + def getterName: TermName = dropTraitSetterSeparator.dropSetter.dropLocal + def extensionName: TermName = append("$extension").toTermName + + private def dropTraitSetterSeparator: TermName = + indexOf(NameTransformer.TRAIT_SETTER_SEPARATOR_STRING) match { + case -1 => toTermName + case idx => toTermName.drop(idx + NameTransformer.TRAIT_SETTER_SEPARATOR_STRING.length) + } + def decodedName: ThisNameType = newName(decode) - def isOperatorName: Boolean = decode != toString // used by ide - def longString: String = nameKind + " " + decode - def debugString = { val s = decode ; if (isTypeName) s + "!" else s } + def isOperatorName: Boolean = decoded != toString + def longString: String = s"$nameKind $decoded" + def debugString = if (isTypeName) s"$decoded!" else decoded + final def toStringWithSuffix(suffix: String): String = { val builder = new java.lang.StringBuilder(length + suffix.length) builder.append(this: CharSequence) @@ -501,37 +544,10 @@ trait Names extends api.Names { } override final def toString: String = if (cachedString == null) new String(_chrs, index, len) else cachedString - final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = { + final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = buffer.append(_chrs, this.start + start, length) - } - } - - implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) - implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name) - implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name) - - /** FIXME: This is a good example of something which is pure "value class" but cannot - * reap the benefits because an (unused) \$outer pointer so it is not single-field. - */ - final class NameOps[T <: Name](name: T) { - import NameTransformer._ - def stripSuffix(suffix: String): T = if (name endsWith suffix) dropRight(suffix.length) else name // OPT avoid creating a Name with `suffix` - def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name - def take(n: Int): T = name.subName(0, n).asInstanceOf[T] - def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T] - def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T] - def dropLocal: TermName = name.toTermName stripSuffix LOCAL_SUFFIX_STRING - def dropSetter: TermName = name.toTermName stripSuffix SETTER_SUFFIX_STRING - def dropModule: T = this stripSuffix MODULE_SUFFIX_STRING - def localName: TermName = getterName append LOCAL_SUFFIX_STRING - def setterName: TermName = getterName append SETTER_SUFFIX_STRING - def getterName: TermName = dropTraitSetterSeparator.dropSetter.dropLocal - - private def dropTraitSetterSeparator: TermName = - name indexOf TRAIT_SETTER_SEPARATOR_STRING match { - case -1 => name.toTermName - case idx => name.toTermName drop idx drop TRAIT_SETTER_SEPARATOR_STRING.length - } + final def appendTo(sb: StringBuilder, start: Int, length: Int): sb.type = + sb.appendAll(_chrs, this.start + start, length) } implicit val NameTag: ClassTag[Name] = ClassTag[Name](classOf[Name]) diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala index f6cf8dd5d938..50229277af54 100644 --- a/src/reflect/scala/reflect/internal/Phase.scala +++ b/src/reflect/scala/reflect/internal/Phase.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,10 +22,10 @@ abstract class Phase(val prev: Phase) extends Ordered[Phase] { val id: Id = if (prev eq null) 0 else prev.id + 1 /** New flags visible after this phase has completed */ - def nextFlags: Long = 0l + def nextFlags: Long = 0L /** New flags visible once this phase has started */ - def newFlags: Long = 0l + def newFlags: Long = 0L val fmask = ( if (prev eq null) Flags.InitialFlags @@ -48,9 +48,7 @@ abstract class Phase(val prev: Phase) extends Ordered[Phase] { def checkable: Boolean = true // NOTE: sbt injects its own phases which extend this class, and not GlobalPhase, so we must implement this logic here - private var _erasedTypes = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "erasure" || prev.erasedTypes) - protected def erasedTypes_=(value: Boolean): Unit = {_erasedTypes = value} - final def erasedTypes: Boolean = _erasedTypes // overridden in back-end + final val erasedTypes: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "erasure" || prev.erasedTypes) final val flatClasses: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "flatten" || prev.flatClasses) final val specialized: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "specialize" || prev.specialized) final val refChecked: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "refchecks" || prev.refChecked) @@ -78,10 +76,10 @@ abstract class Phase(val prev: Phase) extends Ordered[Phase] { object NoPhase extends Phase(null) { def name = "" override def keepsTypeParams = false - def run() { throw new Error("NoPhase.run") } + def run(): Unit = throw new Error("NoPhase.run") } object SomePhase extends Phase(NoPhase) { def name = "" - def run() { throw new Error("SomePhase.run") } + def run(): Unit = throw new Error("SomePhase.run") } diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 7f2d8779dfd2..28a456207cc1 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -44,19 +44,26 @@ trait Positions extends api.Positions { self: SymbolTable => * The point of the wrapping position is the point of the default position. * If some of the trees are ranges, returns a range position enclosing all ranges * Otherwise returns default position that is either focused or not. + * If the default point falls outside the calculated range, widen the result to include it. */ def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true) - def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { - if (useOffsetPositions) default else { - val accum = new WrappingPosAccumulator() - var rest = trees - while (rest ne Nil) { - val head = rest.head - rest = rest.tail + + private def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = if (useOffsetPositions) default else { + // TODO: a tree's range position should cover the positions of all trees it "includes" + // (inclusion mostly refers to subtrees, but also other attributes reached through the tree, such as its annotations/modifiers); + // concretely, a MemberDef's position should cover its annotations (scala/bug#11060) + // Workaround, which explicitly includes annotations of traversed trees, can be removed when TODO above is resolved: + val accum = new WrappingPosAccumulator() + def loop(trees: List[Tree]): Position = trees match { + case head :: rest => accum(head) - } - accum.result(default, focus) + head match { + case md: MemberDef => loop(md.mods.annotations ::: rest) + case _ => loop(rest) + } + case _ => accum.result(default, focus) } + loop(trees) } private final class WrappingPosAccumulator extends (Tree => Boolean) { private[this] var min: Int = _ @@ -66,11 +73,19 @@ trait Positions extends api.Positions { self: SymbolTable => max = Int.MinValue } reset() - def result(default: Position, focus: Boolean): Position = { - if (min > max) - if (focus) default.focus else default //there are no ranges - else Position.range(default.source, min, default.point, max) - } + def result(default: Position, focus: Boolean): Position = + if (min > max) // there are no ranges + if (focus) default.focus else default + else { + val point = default.pointOrElse(min) + if (point < min || point > max) { + val start = Math.min(min, point) + val end = Math.max(max, point) + Position.range(default.source, start = start, point = point, end = end) + } + else + Position.range(default.source, start = min, point = point, end = max) + } override def apply(v1: Tree): Boolean = { val pos = v1.pos if (pos.isRange) { @@ -82,8 +97,8 @@ trait Positions extends api.Positions { self: SymbolTable => } /** A position that wraps the non-empty set of trees. - * The point of the wrapping position is the point of the first trees' position. - * If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees + * The point of the wrapping position is the point of the first tree's position. + * If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees. * Otherwise returns a synthetic offset position to point. */ def wrappingPos(trees: List[Tree]): Position = { @@ -97,9 +112,8 @@ trait Positions extends api.Positions { self: SymbolTable => * shortening the range, assigning TransparentPositions * to some of the nodes in `tree` or focusing on the position. */ - def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, focus = true) } - def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { - if (useOffsetPositions) return + def ensureNonOverlapping(tree: Tree, others: List[Tree]): Unit = ensureNonOverlapping(tree, others, focus = true) + def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean): Unit = if (!useOffsetPositions) { def isOverlapping(pos: Position) = pos.isRange && (others exists (pos overlaps _.pos)) @@ -119,53 +133,52 @@ trait Positions extends api.Positions { self: SymbolTable => if (useOffsetPositions) Position.offset(source, point) else Position.range(source, start, point, end) - abstract class ChildSolidDescendantsCollector extends Traverser { // don't traverse annotations override def traverseModifiers(mods: Modifiers): Unit = () override def traverse(tree: Tree): Unit = - if (tree ne EmptyTree) { + if (tree ne EmptyTree) if (tree.pos.isTransparent) super.traverse(tree) - else { - traverseSolidChild(tree) - } - } - def traverseSolidChild(t: Tree): Unit - def apply(t: Tree): Unit = super.traverse(t) - } + else traverseSolidChild(tree) - private[this] def reportTree(prefix: String, tree: Tree) { - val source = if (tree.pos.isDefined) tree.pos.source else "" - inform("== " + prefix + " tree [" + tree.id + "] of type " + tree.productPrefix + " at " + tree.pos.show + source) - inform("") - inform(treeStatus(tree)) - inform("") - } + def traverseSolidChild(t: Tree): Unit - private[this] def positionError(topTree: Tree, msg: String)(body: => Unit) { - inform("======= Position error\n" + msg) - body - inform("\nWhile validating #" + topTree.id) - inform(treeStatus(topTree)) - inform("\nChildren:") - topTree.children foreach (t => inform(" " + treeStatus(t, topTree))) - inform("=======") - throw new ValidateException(msg) + def apply(t: Tree): Unit = super.traverse(t) } private val posStartOrdering: Ordering[Tree] = new Ordering[Tree] { override def compare(x: Tree, y: Tree): Int = { - @inline def posOf(t: Tree): Int = { + def posOf(t: Tree): Int = { val pos = t.pos if (pos eq NoPosition) Int.MinValue else pos.start } Integer.compare(posOf(x), posOf(y)) } } + def validatePositions(tree: Tree): Unit = if (!useOffsetPositions) { + def reportTree(prefix: String, tree: Tree): Unit = { + val source = if (tree.pos.isDefined) tree.pos.source else "" + inform("== " + prefix + " tree [" + tree.id + "] of type " + tree.productPrefix + " at " + tree.pos.show + source) + inform("") + inform(treeStatus(tree)) + inform("") + } + + def positionError(topTree: Tree, msg: String)(body: => Unit): Unit = { + inform("======= Position error\n" + msg) + body + inform("\nWhile validating #" + topTree.id) + inform(treeStatus(topTree)) + inform("\nChildren:") + topTree.children foreach (t => inform(" " + treeStatus(t, topTree))) + inform("=======") + throw new ValidateException(msg) + } + object worker { - val trace = settings.Yposdebug && settings.verbose + val trace = settings.Yposdebug.value && settings.verbose.value val topTree = tree object solidChildrenCollector extends ChildSolidDescendantsCollector { @@ -192,7 +205,7 @@ trait Positions extends api.Positions { self: SymbolTable => } //we dont care about zeroing the array - def clear() {size = 0} + def clear(): Unit = {size = 0} def traverseSolidChild(t: Tree): Unit = { if (size == childSolidDescendants.length) { @@ -204,73 +217,70 @@ trait Positions extends api.Positions { self: SymbolTable => } } - def loop(tree: Tree, encltree: Tree) { - if (!tree.isEmpty && tree.canHaveAttrs) { - val treePos = tree.pos - if (trace) - inform("[%10s] %s".format("validate", treeStatus(tree, encltree))) - - if (!treePos.isDefined) - positionError(topTree, "Unpositioned tree #" + tree.id) { - inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) - inform("%15s %s".format("enclosing", treeStatus(encltree))) - encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) - } + def loop(tree: Tree, encltree: Tree): Unit = if (!tree.isEmpty && tree.canHaveAttrs) { + val treePos = tree.pos + if (trace) + inform(f"[${"validate"}%10s] ${treeStatus(tree, encltree)}") - solidChildrenCollector(tree) - val numChildren = solidChildrenCollector.collectedSize + if (!treePos.isDefined) + positionError(topTree, s"Unpositioned tree #${tree.id}") { + inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) + inform("%15s %s".format("enclosing", treeStatus(encltree))) + encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) + } - if (treePos.isRange) { - val enclPos = encltree.pos - if (!enclPos.isRange) - positionError(topTree, "Synthetic tree [" + encltree.id + "] contains nonsynthetic tree [" + tree.id + "]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - if (!(enclPos includes treePos)) - positionError(topTree, "Enclosing tree [" + encltree.id + "] does not include tree [" + tree.id + "]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } + solidChildrenCollector(tree) + val numChildren = solidChildrenCollector.collectedSize - if (numChildren > 1) { - val childSolidDescendants = solidChildrenCollector.sortedArray - var t1 = childSolidDescendants(0) - var t1Pos = t1.pos - var i = 1 - while (i < numChildren) { - val t2 = childSolidDescendants(i) - val t2Pos = t2.pos - if (t1Pos.overlaps(t2Pos)) { - positionError(topTree, "Overlapping trees") { - reportTree("Ancestor", tree) - reportTree("First overlapping", t1) - reportTree("Second overlapping", t2) - } - } - //why only for range - if (t2Pos.isRange) { - t1 = t2 - t1Pos = t2Pos + if (treePos.isRange) { + val enclPos = encltree.pos + if (!enclPos.isRange) + positionError(topTree, "Synthetic tree [" + encltree.id + "] contains nonsynthetic tree [" + tree.id + "]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + if (!enclPos.includes(treePos)) + positionError(topTree, "Enclosing tree [" + encltree.id + "] does not include tree [" + tree.id + "]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + + if (numChildren > 1) { + val childSolidDescendants = solidChildrenCollector.sortedArray + var t1 = childSolidDescendants(0) + var t1Pos = t1.pos + var i = 1 + while (i < numChildren) { + val t2 = childSolidDescendants(i) + val t2Pos = t2.pos + if (t1Pos.overlaps(t2Pos)) { + positionError(topTree, "Overlapping trees") { + reportTree("Ancestor", tree) + reportTree("First overlapping", t1) + reportTree("Second overlapping", t2) } - i += 1 } + if (t2Pos.isRange) { // only ranges overlap, so check ranges pairwise + t1 = t2 + t1Pos = t2Pos + } + i += 1 } } - if (numChildren > 0) { - if (numChildren == 1) { - val first = solidChildrenCollector.child(0) - solidChildrenCollector.clear() - loop(first, tree) - } else { - val snap = solidChildrenCollector.borrowArray - var i = 0 - while (i < numChildren) { - loop(snap(i), tree) - i += 1 - } - solidChildrenCollector.spareArray(snap) + } + if (numChildren > 0) { + if (numChildren == 1) { + val first = solidChildrenCollector.child(0) + solidChildrenCollector.clear() + loop(first, tree) + } else { + val snap = solidChildrenCollector.borrowArray + var i = 0 + while (i < numChildren) { + loop(snap(i), tree) + i += 1 } + solidChildrenCollector.spareArray(snap) } } } @@ -292,14 +302,14 @@ trait Positions extends api.Positions { self: SymbolTable => inform("error while set children pos "+pos+" of "+parent.children) throw ex } - private val setChildrenPosAccumulator = new ReusableInstance[SetChildrenPosAccumulator](() => new SetChildrenPosAccumulator, isCompilerUniverse) + private val setChildrenPosAccumulator = ReusableInstance[SetChildrenPosAccumulator](new SetChildrenPosAccumulator, enabled = isCompilerUniverse) private final class SetChildrenPosAccumulator extends (Tree => Boolean) { private[this] val wrappingPosAccumulator = new WrappingPosAccumulator private[this] var pos: Position = _ def set(pos: Position, parent: Tree): Unit = { wrappingPosAccumulator.reset() this.pos = pos - try parent.foreachChild(this) + try parent.foreachChild(apply) finally { this.pos = null } @@ -307,7 +317,7 @@ trait Positions extends api.Positions { self: SymbolTable => def apply(tree: Tree): Boolean = { wrappingPosAccumulator.reset() if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - tree.foreachChild(this) + tree.foreachChild(apply) tree.foreachChild(wrappingPosAccumulator) val wrappingPos = wrappingPosAccumulator.result(pos, focus = true) tree setPos wrappingPos @@ -331,7 +341,7 @@ trait Positions extends api.Positions { self: SymbolTable => this.last } protected def isEligible(t: Tree) = !t.pos.isTransparent - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { t match { case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) => traverse(tt.original) @@ -339,7 +349,8 @@ trait Positions extends api.Positions { self: SymbolTable => if (t.pos includes pos) { if (isEligible(t)) last = t super.traverse(t) - } else t match { + } + t match { case mdef: MemberDef => val annTrees = mdef.mods.annotations match { case Nil if mdef.symbol != null => @@ -359,18 +370,18 @@ trait Positions extends api.Positions { self: SymbolTable => override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null } - trait PosAssigner extends Traverser { + trait PosAssigner extends InternalTraverser { var pos: Position } protected[this] lazy val posAssigner: PosAssigner = new DefaultPosAssigner protected class DefaultPosAssigner extends PosAssigner { var pos: Position = _ - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (!t.canHaveAttrs) () else if (t.pos == NoPosition) { t.setPos(pos) - super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if? + t.traverse(this) // TODO: bug? shouldn't the traverse be outside of the if? // @PP: it's pruning whenever it encounters a node with a // position, which I interpret to mean that (in the author's // mind at least) either the children of a positioned node will @@ -389,23 +400,20 @@ trait Positions extends api.Positions { self: SymbolTable => /** Position a tree. * This means: Set position of a node and position all its unpositioned children. */ - def atPos[T <: Tree](pos: Position)(tree: T): T = { + def atPos[T <: Tree](pos: Position)(tree: T): tree.type = { if (useOffsetPositions || !pos.isOpaqueRange) { posAssigner.pos = pos posAssigner.traverse(tree) - tree } - else { - if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - tree.setPos(pos) - tree.onlyChild match { - case EmptyTree => - setChildrenPos(pos, tree) - case only => - atPos(pos)(only) - } + else if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { + tree.setPos(pos) + tree.onlyChild match { + case EmptyTree => + setChildrenPos(pos, tree) + case only => + atPos(pos)(only) } - tree } + tree } } diff --git a/src/reflect/scala/reflect/internal/Precedence.scala b/src/reflect/scala/reflect/internal/Precedence.scala index 8395e4f885f4..c5e8d5432a3d 100644 --- a/src/reflect/scala/reflect/internal/Precedence.scala +++ b/src/reflect/scala/reflect/internal/Precedence.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,26 +10,23 @@ * additional information regarding copyright ownership. */ -package scala -package reflect -package internal +package scala.reflect.internal import scala.annotation.switch -import Chars._ +import Chars.{CodePoint, isOperatorPart, isScalaLetter} final class Precedence private (val level: Int) extends AnyVal with Ordered[Precedence] { - def compare(that: Precedence): Int = level compare that.level + def compare(that: Precedence): Int = level.compare(that.level) override def toString = s"Precedence($level)" } - object Precedence extends (Int => Precedence) { - private val ErrorName = "" + private[this] val ErrorName = "" private def isAssignmentOp(name: String) = name match { case "!=" | "<=" | ">=" | "" => false - case _ => name.last == '=' && name.head != '=' && isOperatorPart(name.head) + case _ => name.last == '=' && name.head != '=' && isOperatorPart(name.codePointAt(0)) } - private def firstChar(ch: Char): Precedence = apply((ch: @switch) match { + private def firstChar(c: CodePoint): Precedence = apply((c: @switch) match { case '|' => 2 case '^' => 3 case '&' => 4 @@ -38,13 +35,13 @@ object Precedence extends (Int => Precedence) { case ':' => 7 case '+' | '-' => 8 case '*' | '/' | '%' => 9 - case _ => if (isScalaLetter(ch)) 1 else 10 + case _ => if (isScalaLetter(c)) 1 else 10 }) def apply(level: Int): Precedence = new Precedence(level) def apply(name: String): Precedence = name match { case "" | ErrorName => this(-1) case _ if isAssignmentOp(name) => this(0) - case _ => firstChar(name charAt 0) + case _ => firstChar(name.codePointAt(0)) } } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index fbbe192fe782..3c46bd942237 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,9 +16,12 @@ package scala package reflect package internal -import java.io.{ OutputStream, PrintWriter, Writer } +import java.io.{OutputStream, PrintWriter, Writer} + import Flags._ -import scala.compat.Platform.EOL +import java.lang.System.{lineSeparator => EOL} + +import scala.annotation.{nowarn, tailrec} trait Printers extends api.Printers { self: SymbolTable => @@ -28,7 +31,7 @@ trait Printers extends api.Printers { self: SymbolTable => def quotedName(name: Name, decode: Boolean): String = { val s = if (decode) name.decode else name.toString val term = name.toTermName - if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s + if (nme.keywords(term) && term != nme.USCOREkw) s"`$s`" else s } def quotedName(name: Name): String = quotedName(name, decode = false) @@ -65,6 +68,11 @@ trait Printers extends api.Printers { self: SymbolTable => } } + @nowarn("""cat=deprecation&origin=scala\.reflect\.internal\.Printers\.TreePrinter""") + final type InternalTreePrinter = TreePrinter + + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use InternalTreePrinter instead", since = "2.13.4") class TreePrinter(out: PrintWriter) extends super.TreePrinter { protected var indentMargin = 0 protected val indentStep = 2 @@ -80,6 +88,10 @@ trait Printers extends api.Printers { self: SymbolTable => def indent() = indentMargin += indentStep def undent() = indentMargin -= indentStep + protected def checkForBlank(cond: Boolean) = if (cond) " " else "" + protected def blankForOperatorName(name: Name) = checkForBlank(name.isOperatorName) + protected def blankForName(name: Name) = checkForBlank(name.isOperatorName || name.endsWith("_")) + def printPosition(tree: Tree) = if (printPositions) comment(print(tree.pos.show)) @@ -97,7 +109,8 @@ trait Printers extends api.Printers { self: SymbolTable => out.write(indentString, 0, indentMargin) } - def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit): Unit = + @tailrec + final def printSeq[A](ls: List[A])(printelem: A => Unit)(printsep: => Unit): Unit = ls match { case List() => case List(x) => printelem(x) @@ -124,7 +137,7 @@ trait Printers extends api.Printers { self: SymbolTable => } else if (t.mods.hasFlag(COVARIANT)) { print("+") } - printParam(t) + printTParam(t) }{print(", ")}; print("]") } @@ -155,20 +168,20 @@ trait Printers extends api.Printers { self: SymbolTable => def printValueParams(ts: List[ValDef], inParentheses: Boolean = true): Unit = parenthesize(inParentheses){ printImplicitInParamsList(ts) - printSeq(ts){printParam}{print(", ")} + printSeq(ts){printVParam}{print(", ")} } - def printParam(tree: Tree) = - tree match { - case vd @ ValDef(mods, name, tp, rhs) => - printPosition(tree) - printAnnotations(vd) - print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs) - case TypeDef(mods, name, tparams, rhs) => - printPosition(tree) - print(symName(tree, name)) - printTypeParams(tparams); print(rhs) - } + def printVParam(vd: ValDef) = { + printPosition(vd) + printAnnotations(vd) + print(symName(vd, vd.name)); printOpt(": ", vd.tpt); printOpt(" = ", vd.rhs) + } + + def printTParam(td: TypeDef) = { + printPosition(td) + print(symName(td, td.name)) + printTypeParams(td.tparams); print(td.rhs) + } def printBlock(tree: Tree) = tree match { @@ -209,8 +222,8 @@ trait Printers extends api.Printers { self: SymbolTable => annots foreach (annot => print(s"@$annot ")) } - private var currentOwner: Symbol = NoSymbol - private var selectorType: Type = NoType + private[this] var currentOwner: Symbol = NoSymbol + private[this] var selectorType: Type = NoType protected def printPackageDef(tree: PackageDef, separator: String) = { val PackageDef(packaged, stats) = tree @@ -219,7 +232,7 @@ trait Printers extends api.Printers { self: SymbolTable => } protected def printValDef(tree: ValDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = { - val ValDef(mods, name, tp, rhs) = tree + val ValDef(mods, _, _, _) = tree printAnnotations(tree) printModifiers(tree, mods) print(if (mods.isMutable) "var " else "val ", resultName) @@ -228,7 +241,7 @@ trait Printers extends api.Printers { self: SymbolTable => } protected def printDefDef(tree: DefDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = { - val DefDef(mods, name, tparams, vparamss, tp, rhs) = tree + val DefDef(mods, _, tparams, vparamss, _, _) = tree printAnnotations(tree) printModifiers(tree, mods) print("def " + resultName) @@ -239,12 +252,12 @@ trait Printers extends api.Printers { self: SymbolTable => } protected def printTypeDef(tree: TypeDef, resultName: => String) = { - val TypeDef(mods, name, tparams, rhs) = tree + val TypeDef(mods, _, tparams, rhs) = tree if (mods hasFlag (PARAM | DEFERRED)) { printAnnotations(tree) printModifiers(tree, mods) print("type ") - printParam(tree) + printTParam(tree) } else { printAnnotations(tree) printModifiers(tree, mods) @@ -255,21 +268,19 @@ trait Printers extends api.Printers { self: SymbolTable => } protected def printImport(tree: Import, resSelect: => String) = { - val Import(expr, selectors) = tree - // Is this selector renaming a name (i.e, {name1 => name2}) - def isNotRename(s: ImportSelector): Boolean = - s.name == nme.WILDCARD || s.name == s.rename + val Import(_, selectors) = tree def selectorToString(s: ImportSelector): String = { - val from = quotedName(s.name) - if (isNotRename(s)) from - else from + "=>" + quotedName(s.rename) + def selectorName(n: Name): String = if (s.isWildcard) nme.WILDCARD.decoded else quotedName(n) + if (s.isGiven) s.rename.decoded + else if (s.isRename || s.isMask) s"${selectorName(s.name)}=>${selectorName(s.rename)}" + else selectorName(s.name) } print("import ", resSelect, ".") selectors match { case List(s) => - // If there is just one selector and it is not renaming a name, no braces are needed - if (isNotRename(s)) print(selectorToString(s)) + // If there is just one selector and it is not renaming or masking a name, no braces are needed + if (!s.isRename && !s.isMask) print(selectorToString(s)) else print("{", selectorToString(s), "}") // If there is more than one selector braces are always needed case many => @@ -280,17 +291,13 @@ trait Printers extends api.Printers { self: SymbolTable => protected def printCaseDef(tree: CaseDef) = { val CaseDef(pat, guard, body) = tree print("case ") - def patConstr(pat: Tree): Tree = pat match { - case Apply(fn, args) => patConstr(fn) - case _ => pat - } - - print(pat); printOpt(" if ", guard) + print(pat) + printOpt(" if ", guard) print(" => ", body) } protected def printFunction(tree: Function)(printValueParams: => Unit) = { - val Function(vparams, body) = tree + val Function(_, body) = tree print("(") printValueParams print(" => ", body, ")") @@ -306,10 +313,15 @@ trait Printers extends api.Printers { self: SymbolTable => } protected def printSuper(tree: Super, resultName: => String, checkSymbol: Boolean = true) = { - val Super(This(qual), mix) = tree + val Super(This(qual), mix) = tree: @unchecked if (qual.nonEmpty || (checkSymbol && tree.symbol != NoSymbol)) print(resultName + ".") print("super") if (mix.nonEmpty) print(s"[$mix]") + else if (settings.isDebug) tree.tpe match { + case st: SuperType => print(s"[${st.supertpe}]") + case tp: Type => print(s"[$tp]") + case _ => + } } protected def printThis(tree: This, resultName: => String) = { @@ -352,7 +364,11 @@ trait Printers extends api.Printers { self: SymbolTable => } case dd @ DefDef(mods, name, tparams, vparamss, tp, rhs) => - printDefDef(dd, symName(tree, name))(printOpt(": ", tp))(printOpt(" = ", rhs)) + printDefDef(dd, symName(tree, name)) { + // place space after symbolic def name (def !: Unit does not compile) + if (tparams.isEmpty && vparamss.isEmpty) printOpt(blankForName(name.encodedName) + ": ", tp) + else printOpt(": ", tp) + } (printOpt(" = ", rhs)) case td @ TypeDef(mods, name, tparams, rhs) => printTypeDef(td, symName(tree, name)) @@ -412,7 +428,7 @@ trait Printers extends api.Printers { self: SymbolTable => case Assign(lhs, rhs) => print(lhs, " = ", rhs) - case AssignOrNamedArg(lhs, rhs) => + case NamedArg(lhs, rhs) => print(lhs, " = ", rhs) case If(cond, thenp, elsep) => @@ -484,7 +500,7 @@ trait Printers extends api.Printers { self: SymbolTable => } case an @ Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) => - def printAnnot() { + def printAnnot(): Unit = { print("@", tpt) if (args.nonEmpty) printRow(args, "(", ",", ")") @@ -542,7 +558,7 @@ trait Printers extends api.Printers { self: SymbolTable => } // it's the printer for AST-based code generation - class CodePrinter(out: PrintWriter, printRootPkg: Boolean) extends TreePrinter(out) { + class CodePrinter(out: PrintWriter, printRootPkg: Boolean) extends InternalTreePrinter(out) { protected val parentsStack = scala.collection.mutable.Stack[Tree]() protected def currentTree = if (parentsStack.nonEmpty) Some(parentsStack.top) else None @@ -558,7 +574,7 @@ trait Printers extends api.Printers { self: SymbolTable => def addBackquotes(s: String) = if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch) || isDot(ch)) || - (name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash)))) + ((name.isOperatorName || decName.exists(isOperatorPart)) && decName.exists(isScalaLetter) && !decName.contains(bslash)))) s"`$s`" else s if (name == nme.CONSTRUCTOR) "this" @@ -589,10 +605,6 @@ trait Printers extends api.Printers { self: SymbolTable => } } - protected def checkForBlank(cond: Boolean) = if (cond) " " else "" - protected def blankForOperatorName(name: Name) = checkForBlank(name.isOperatorName) - protected def blankForName(name: Name) = checkForBlank(name.isOperatorName || name.endsWith("_")) - protected def resolveSelect(t: Tree): String = { t match { // case for: 1) (if (a) b else c).meth1.meth2 or 2) 1 + 5 should be represented as (1).+(5) @@ -626,14 +638,17 @@ trait Printers extends api.Printers { self: SymbolTable => val defaultClasses = List(tpnme.AnyRef, tpnme.Object) val defaultTraitsForCase = List(tpnme.Product, tpnme.Serializable) protected def removeDefaultTypesFromList(trees: List[Tree])(classesToRemove: List[Name] = defaultClasses)(traitsToRemove: List[Name]) = { + @tailrec def removeDefaultTraitsFromList(trees: List[Tree], traitsToRemove: List[Name]): List[Tree] = trees match { - case Nil => trees case init :+ last => last match { - case Select(Ident(sc), name) if traitsToRemove.contains(name) && sc == nme.scala_ => + case Select(Select(Ident(nme.scala_), nme.PACKAGE), name) if traitsToRemove.contains(name) => + removeDefaultTraitsFromList(init, traitsToRemove) + case Select(Ident(nme.scala_), name) if traitsToRemove.contains(name) => removeDefaultTraitsFromList(init, traitsToRemove) case _ => trees } + case _ => trees } removeDefaultTraitsFromList(removeDefaultClassesFromList(trees, classesToRemove), traitsToRemove) @@ -685,35 +700,32 @@ trait Printers extends api.Printers { self: SymbolTable => List(IMPLICIT, CASE, LAZY, SEALED).foreach{flag => if (mods.hasFlag(flag)) print(s"${mods.flagBitsToString(flag)} ")} } - def printParam(tree: Tree, primaryCtorParam: Boolean): Unit = - tree match { - case vd @ ValDef(mods, name, tp, rhs) => - printPosition(tree) - printAnnotations(vd) - val mutableOrOverride = mods.isOverride || mods.isMutable - val hideCtorMods = mods.isParamAccessor && mods.isPrivateLocal && !mutableOrOverride - val hideCaseCtorMods = mods.isCaseAccessor && mods.isPublic && !mutableOrOverride - - if (primaryCtorParam && !(hideCtorMods || hideCaseCtorMods)) { - printModifiers(mods, primaryCtorParam) - print(if (mods.isMutable) "var " else "val ") - } - print(printedName(name), blankForName(name)) - printOpt(": ", tp) - printOpt(" = ", rhs) - case TypeDef(_, name, tparams, rhs) => - printPosition(tree) - print(printedName(name)) - printTypeParams(tparams) - print(rhs) - case _ => - super.printParam(tree) + def printVParam(vd: ValDef, primaryCtorParam: Boolean): Unit = { + printPosition(vd) + printAnnotations(vd) + val mutableOrOverride = vd.mods.isOverride || vd.mods.isMutable + val hideCtorMods = vd.mods.isParamAccessor && vd.mods.isPrivateLocal && !mutableOrOverride + val hideCaseCtorMods = vd.mods.isCaseAccessor && vd.mods.isPublic && !mutableOrOverride + + if (primaryCtorParam && !(hideCtorMods || hideCaseCtorMods)) { + printModifiers(vd.mods, primaryCtorParam) + print(if (vd.mods.isMutable) "var " else "val ") } + print(printedName(vd.name), blankForName(vd.name)) + printOpt(": ", vd.tpt) + printOpt(" = ", vd.rhs) + } - override def printParam(tree: Tree): Unit = { - printParam(tree, primaryCtorParam = false) + def printTParam(td: TypeDef, primaryCtorParam: Boolean): Unit = { + printPosition(td) + print(printedName(td.name)) + printTypeParams(td.tparams) + print(td.rhs) } + override def printVParam(vd: ValDef): Unit = printVParam(vd, primaryCtorParam = false) + override def printTParam(td: TypeDef): Unit = printTParam(td, primaryCtorParam = false) + protected def printArgss(argss: List[List[Tree]]) = argss foreach {x: List[Tree] => if (!(x.isEmpty && argss.size == 1)) printRow(x, "(", ", ", ")")} @@ -727,11 +739,10 @@ trait Printers extends api.Printers { self: SymbolTable => case treeInfo.Applied(core, _, argss) => print("@") core match { - case Select(New(tree), _) => print(tree) + case Select(New(ann), _) => print(ann) case _ => } printArgss(argss) - case _ => super.printTree(tree) } } @@ -758,7 +769,7 @@ trait Printers extends api.Printers { self: SymbolTable => print("trait ", printedName(name)) printTypeParams(tparams) - val build.SyntacticTraitDef(_, _, _, _, parents, _, _) = tree + val build.SyntacticTraitDef(_, _, _, _, parents, _, _) = tree: @unchecked parents // classes } else { @@ -766,26 +777,30 @@ trait Printers extends api.Printers { self: SymbolTable => print("class ", printedName(name)) printTypeParams(tparams) - val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl - - // constructor's modifier - if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { - print(" ") - printModifiers(ctorMods, primaryCtorParam = false) - } + cl match { + case build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) => + // constructor's modifier + if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { + print(" ") + printModifiers(ctorMods, primaryCtorParam = false) + } - def printConstrParams(ts: List[ValDef]): Unit = { - parenthesize() { - printImplicitInParamsList(ts) - printSeq(ts)(printParam(_, primaryCtorParam = true))(print(", ")) - } - } - // constructor's params processing (don't print single empty constructor param list) - vparamss match { - case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => - case _ => vparamss foreach printConstrParams + def printConstrParams(ts: List[ValDef]): Unit = { + parenthesize() { + printImplicitInParamsList(ts) + printSeq(ts)(printVParam(_, primaryCtorParam = true))(print(", ")) + } + } + // constructor's params processing (don't print single empty constructor param list) + vparamss match { + case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => + case _ => vparamss foreach printConstrParams + } + parents + case _ => + // Can get here with erroneous code, like `{@deprecatedName ` + Nil } - parents } // get trees without default classes and traits (when they are last) @@ -808,7 +823,7 @@ trait Printers extends api.Printers { self: SymbolTable => case md @ ModuleDef(mods, name, impl) => printAnnotations(md) printModifiers(tree, mods) - val Template(parents, self, methods) = impl + val Template(parents, _, _) = impl val parWithoutAnyRef = removeDefaultClassesFromList(parents) print("object " + printedName(name), if (parWithoutAnyRef.nonEmpty) " extends " else "", impl) @@ -833,12 +848,12 @@ trait Printers extends api.Printers { self: SymbolTable => case LabelDef(name, params, rhs) => if (name.startsWith(nme.WHILE_PREFIX)) { - val If(cond, thenp, elsep) = rhs + val If(cond, thenp, _) = rhs: @unchecked print("while (", cond, ") ") - val Block(list, wh) = thenp + val Block(list, _) = thenp: @unchecked printColumn(list, "", ";", "") } else if (name.startsWith(nme.DO_WHILE_PREFIX)) { - val Block(bodyList, ifCond @ If(cond, thenp, elsep)) = rhs + val Block(bodyList, If(cond, _, _)) = rhs: @unchecked print("do ") printColumn(bodyList, "", ";", "") print(" while (", cond, ") ") @@ -855,7 +870,7 @@ trait Printers extends api.Printers { self: SymbolTable => val printedParents = currentParent map { case _: CompoundTypeTree => parents - case ClassDef(mods, name, _, _) if mods.isCase => removeDefaultTypesFromList(parents)()(List(tpnme.Product, tpnme.Serializable)) + case ClassDef(mods, name, _, _) if mods.isCase => removeDefaultTypesFromList(parents)()(defaultTraitsForCase) case _ => removeDefaultClassesFromList(parents) } getOrElse (parents) @@ -878,7 +893,7 @@ trait Printers extends api.Printers { self: SymbolTable => } if (printedParents.nonEmpty) { - val (clParent :: traits) = printedParents + val (clParent :: traits) = printedParents: @unchecked print(clParent) val constrArgss = ap match { @@ -988,7 +1003,7 @@ trait Printers extends api.Printers { self: SymbolTable => tree match { // processing methods ending on colons (x \: list) case Apply(Block(l1 @ List(sVD: ValDef), a1 @ Apply(Select(_, methodName), l2 @ List(Ident(iVDName)))), l3) - if sVD.mods.isSynthetic && treeInfo.isLeftAssoc(methodName) && sVD.name == iVDName => + if sVD.mods.isSynthetic && nme.isLeftAssoc(methodName) && sVD.name == iVDName => val printBlock = Block(l1, Apply(a1, l3)) print(printBlock) case Apply(tree1, _) if (needsParentheses(tree1)(insideAnnotated = false)) => @@ -1023,6 +1038,7 @@ trait Printers extends api.Printers { self: SymbolTable => print(qual) case Select(qual, name) => + @tailrec def checkRootPackage(tr: Tree): Boolean = (currentParent match { //check that Select is not for package def name case Some(_: PackageDef) => false @@ -1113,24 +1129,24 @@ trait Printers extends api.Printers { self: SymbolTable => } /** Hook for extensions */ - def xprintTree(treePrinter: TreePrinter, tree: Tree) = + def xprintTree(treePrinter: InternalTreePrinter, tree: Tree) = treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")")) - def newCodePrinter(writer: PrintWriter, tree: Tree, printRootPkg: Boolean): TreePrinter = + def newCodePrinter(writer: PrintWriter, tree: Tree, printRootPkg: Boolean): InternalTreePrinter = new CodePrinter(writer, printRootPkg) - def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) - def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream)) - def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) + def newTreePrinter(writer: PrintWriter): InternalTreePrinter = new InternalTreePrinter(writer) + def newTreePrinter(stream: OutputStream): InternalTreePrinter = newTreePrinter(new PrintWriter(stream)) + def newTreePrinter(): InternalTreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) /** A writer that writes to the current Console and * is sensitive to replacement of the Console's * output stream. */ object ConsoleWriter extends Writer { - override def write(str: String) { Console.print(str) } + override def write(str: String): Unit = { Console.print(str) } - def write(cbuf: Array[Char], off: Int, len: Int) { + def write(cbuf: Array[Char], off: Int, len: Int): Unit = { write(new String(cbuf, off, len)) } @@ -1144,10 +1160,10 @@ trait Printers extends api.Printers { self: SymbolTable => private class Footnotes { import scala.collection.mutable.{Map, WeakHashMap, SortedSet} - private val index = Map[Class[_], WeakHashMap[Any, Int]]() + private[this] val index = Map[Class[_], WeakHashMap[Any, Int]]() private def classIndex[T: ClassTag] = index.getOrElseUpdate(classTag[T].runtimeClass, WeakHashMap[Any, Int]()) - private val counters = Map[Class[_], Int]() + private[this] val counters = Map[Class[_], Int]() private def nextCounter[T: ClassTag] = { val clazz = classTag[T].runtimeClass counters.getOrElseUpdate(clazz, 0) @@ -1155,7 +1171,7 @@ trait Printers extends api.Printers { self: SymbolTable => counters(clazz) } - private val footnotes = Map[Class[_], SortedSet[Int]]() + private[this] val footnotes = Map[Class[_], SortedSet[Int]]() private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]()) def put[T: ClassTag](any: T): Int = { @@ -1182,10 +1198,10 @@ trait Printers extends api.Printers { self: SymbolTable => // emits more or less verbatim representation of the provided tree class RawTreePrinter(out: PrintWriter) extends super.TreePrinter { - private var depth = 0 - private var printTypesInFootnotes = true - private var printingFootnotes = false - private val footnotes = new Footnotes() + private[this] var depth = 0 + private[this] var printTypesInFootnotes = true + private[this] var printingFootnotes = false + private[this] val footnotes = new Footnotes() def print(args: Any*): Unit = { // don't print type footnotes if the argument is a mere type diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala index 841baa3b1109..598c4a0c90bf 100644 --- a/src/reflect/scala/reflect/internal/PrivateWithin.scala +++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,7 +24,7 @@ trait PrivateWithin { propagatePackageBoundary(JavaAccFlags(c), syms: _*) def propagatePackageBoundary(m: jMember, syms: Symbol*): Unit = propagatePackageBoundary(JavaAccFlags(m), syms: _*) - def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*) { + def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*): Unit = { if (jflags.hasPackageAccessBoundary) syms foreach setPackageAccessBoundary } diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 9fc4338c3085..f480d229c266 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -60,7 +60,7 @@ trait ReificationSupport { self: SymbolTable => def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym.setAnnotations(annots) - def setInfo[S <: Symbol](sym: S, tpe: Type): S = + def setInfo[S <: Symbol](sym: S, tpe: Type): sym.type = sym.setInfo(tpe).markAllCompleted() def mkThis(sym: Symbol): Tree = self.This(sym) @@ -114,7 +114,7 @@ trait ReificationSupport { self: SymbolTable => } def mkAnnotation(tree: Tree): Tree = tree match { - case SyntacticNew(Nil, SyntacticApplied(SyntacticAppliedType(_, _), _) :: Nil, noSelfType, Nil) => + case SyntacticNew(Nil, SyntacticApplied(SyntacticAppliedType(_, _), _) :: Nil, `noSelfType`, Nil) => tree case _ => throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation." + @@ -124,7 +124,7 @@ trait ReificationSupport { self: SymbolTable => def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation) def mkParam(argss: List[List[Tree]], extraFlags: FlagSet = NoFlags, excludeFlags: FlagSet = DEFERRED): List[List[ValDef]] = - argss.map { args => args.map { mkParam(_, extraFlags, excludeFlags) } } + argss.map(_.map(mkParam(_, extraFlags, excludeFlags))) def mkParam(tree: Tree, extraFlags: FlagSet, excludeFlags: FlagSet): ValDef = tree match { case Typed(Ident(name: TermName), tpt) => @@ -295,7 +295,7 @@ trait ReificationSupport { self: SymbolTable => else { val (rawEdefs, rest) = tbody.span(treeInfo.isEarlyDef) val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef) - val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest)) + val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest)): @unchecked val evdefs = gvdefs.zip(lvdefs).map { // TODO: in traits, early val defs are defdefs case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) => @@ -314,7 +314,7 @@ trait ReificationSupport { self: SymbolTable => case other => other } // undo flag modifications by merging flag info from constructor args and fieldDefs - val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap + val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods case x => throw new MatchError(x) }.toMap def ctorArgsCorrespondToFields = vparamssRestoredImplicits.flatten.forall { vd => modsMap.contains(vd.name) } if (!ctorArgsCorrespondToFields) None else { @@ -336,16 +336,20 @@ trait ReificationSupport { self: SymbolTable => require(vd.rhs.isEmpty, "self types must have empty right hand side") copyValDef(vd)(mods = (vd.mods | PRIVATE) & (~DEFERRED)) case _ => - throw new IllegalArgumentException(s"$tree is not a valid representation of self type, " + - """consider reformatting into q"val $self: $T" shape""") + throw new IllegalArgumentException( + s"""$tree is not a valid representation of self type, consider reformatting into q"val $$self: $$T" shape""") } object SyntacticClassDef extends SyntacticClassDefExtractor { def apply(mods: Modifiers, name: TypeName, tparams: List[Tree], constrMods: Modifiers, vparamss: List[List[Tree]], earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = { - val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L) - val vparamss0 = mkParam(vparamss, extraFlags, excludeFlags = DEFERRED | PARAM) + val extraCaseFlags = if (mods.isCase) CASEACCESSOR else 0L + val excludeFlags = DEFERRED | PARAM + val vparamss0 = + if (vparamss.isEmpty) vparamss.asInstanceOf[List[List[ValDef]]] + else mkParam(vparamss.head :: Nil, PARAMACCESSOR | extraCaseFlags, excludeFlags) ++ + mkParam(vparamss.tail, PARAMACCESSOR, excludeFlags) val tparams0 = mkTparams(tparams) val parents0 = gen.mkParents(mods, if (mods.isCase) parents.filter { @@ -527,7 +531,7 @@ trait ReificationSupport { self: SymbolTable => object SyntacticFunction extends SyntacticFunctionExtractor { def apply(params: List[Tree], body: Tree): Function = { - val params0 :: Nil = mkParam(params :: Nil, PARAM) + val params0 :: Nil = mkParam(params :: Nil, PARAM): @unchecked require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values") Function(params0, body) } @@ -561,7 +565,7 @@ trait ReificationSupport { self: SymbolTable => if (name != nme.CONSTRUCTOR) rhs else rhs match { case Block(_, _) => rhs - case _ => Block(List(rhs), gen.mkSyntheticUnit) + case _ => Block(List(rhs), gen.mkSyntheticUnit()) } } DefDef(mods, name, tparams0, vparamss0, tpt, rhs0) @@ -595,7 +599,7 @@ trait ReificationSupport { self: SymbolTable => def apply(lhs: Tree, rhs: Tree): Tree = gen.mkAssign(lhs, rhs) def unapply(tree: Tree): Option[(Tree, Tree)] = tree match { case Assign(lhs, rhs) => Some((lhs, rhs)) - case AssignOrNamedArg(lhs, rhs) => Some((lhs, rhs)) + case NamedArg(lhs, rhs) => Some((lhs, rhs)) case Apply(Select(fn, nme.update), args :+ rhs) => Some((atPos(fn.pos)(Apply(fn, args)), rhs)) case _ => None } @@ -872,10 +876,10 @@ trait ReificationSupport { self: SymbolTable => // drop potential @scala.unchecked annotation protected object MaybeUnchecked { def unapply(tree: Tree): Some[Tree] = tree match { - case Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), annottee) => + case Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, `noSelfType`, Nil), annottee) => Some(annottee) case Typed(annottee, MaybeTypeTreeOriginal( - Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, noSelfType, Nil), _))) => + Annotated(SyntacticNew(Nil, ScalaDot(tpnme.unchecked) :: Nil, `noSelfType`, Nil), _))) => Some(annottee) case annottee => Some(annottee) } @@ -900,7 +904,7 @@ trait ReificationSupport { self: SymbolTable => case Typed( Block( List(ClassDef(clsMods, tpnme.ANON_FUN_NAME, Nil, Template( - List(abspf: TypeTree, ser: TypeTree), noSelfType, List( + List(abspf: TypeTree, ser: TypeTree), `noSelfType`, List( DefDef(_, nme.CONSTRUCTOR, _, _, _, _), DefDef(_, nme.applyOrElse, _, _, _, Match(_, cases :+ @@ -969,11 +973,8 @@ trait ReificationSupport { self: SymbolTable => object SyntacticImport extends SyntacticImportExtractor { // construct/deconstruct {_} import selector private object WildcardSelector { - def apply(offset: Int): ImportSelector = ImportSelector(nme.WILDCARD, offset, null, -1) - def unapply(sel: ImportSelector): Option[Int] = sel match { - case ImportSelector(nme.WILDCARD, offset, null, -1) => Some(offset) - case _ => None - } + def apply(offset: Int): ImportSelector = ImportSelector.wildAt(offset) + def unapply(sel: ImportSelector): Option[Int] = if (sel.isWildcard) Some(sel.namePos) else None } // construct/deconstruct {foo} import selector @@ -991,24 +992,20 @@ trait ReificationSupport { self: SymbolTable => private object RenameSelector { def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector = ImportSelector(name1, offset1, name2, offset2) - def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = sel match { - case ImportSelector(_, _, null | nme.WILDCARD, _) => - None - case ImportSelector(name1, offset1, name2, offset2) if name1 != name2 => + def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = + if (sel.isRename) { + val ImportSelector(name1, offset1, name2, offset2) = sel Some((name1.toTermName, offset1, name2.toTermName, offset2)) - case _ => - None - } + } else None } // construct/deconstruct {foo => _} import selector private object UnimportSelector { def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, nme.WILDCARD, -1) - def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match { - case ImportSelector(name, offset, nme.WILDCARD, _) => Some((name.toTermName, offset)) - case _ => None - } + def unapply(sel: ImportSelector): Option[(TermName, Int)] = + if (sel.isMask) Some((sel.name.toTermName, sel.namePos)) + else None } // represent {_} import selector as pq"_" @@ -1089,6 +1086,7 @@ trait ReificationSupport { self: SymbolTable => case NameSelector(name, offset) => NameSelectorRepr(name, derivedPos(imp, offset)) case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2)) case UnimportSelector(name, offset) => UnimportSelectorRepr(name, derivedPos(imp, offset)) + case x => throw new MatchError(x) } Some((imp.expr, selectors)) } diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index 5fa446d887de..1c91a1177170 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package reflect package internal import scala.annotation.unchecked.uncheckedStable +import scala.reflect.internal.util.CodeAction import settings.MutableSettings /** Provides delegates to the reporter doing the actual work. @@ -35,7 +36,7 @@ trait Reporting { self : Positions => type PerRunReporting <: PerRunReportingBase protected def PerRunReporting: PerRunReporting abstract class PerRunReportingBase { - def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit + def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String, actions: List[CodeAction] = Nil): Unit /** Have we already supplemented the error message of a compiler crash? */ private[this] var supplementedError = false @@ -55,7 +56,7 @@ trait Reporting { self : Positions => @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") def inform(msg: String): Unit = inform(NoPosition, msg) @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") - @deprecated("Use `runReporting.warning` instead") + @deprecated("Use `runReporting.warning` instead", since = "2.13.4") def warning(msg: String): Unit = warning(NoPosition, msg) // globalError(msg: String) used to abort -- not sure that was a good idea, so I made it more regular // (couldn't find any uses that relied on old behavior) @@ -72,7 +73,7 @@ trait Reporting { self : Positions => @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") def inform(pos: Position, msg: String) = reporter.echo(pos, msg) @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") - @deprecated("Use `runReporting.warning` instead") + @deprecated("Use `runReporting.warning` instead", since = "2.13.4") def warning(pos: Position, msg: String) = reporter.warning(pos, msg) @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2") def globalError(pos: Position, msg: String) = reporter.error(pos, msg) @@ -96,24 +97,33 @@ abstract class Reporter { @uncheckedStable final def WARNING: Severity = Reporter.WARNING @uncheckedStable final def ERROR: Severity = Reporter.ERROR - // TODO: rename to `doReport`, remove the `force` parameter. + // TODO: rename to `doReport`, remove the `force` parameter (but sbt compat). // Note: `force` is ignored. It used to mean: if `!force`, the reporter may skip INFO messages. // If `force`, INFO messages were always printed. Now, INFO messages are always printed. + @deprecatedOverriding("extend scala.tools.nsc.reporters.FilteringReporter, and override doReport instead", "2.13.12") protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit + def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = + info0(pos, msg, severity, force = false) + /** @return Reporter.Display, or override for Count, Suppress */ def filter(pos: Position, msg: String, severity: Severity): Int = Reporter.Display - final def echo(msg: String): Unit = echo(util.NoPosition, msg) - final def echo(pos: Position, msg: String): Unit = if (filter(pos, msg, INFO) == 0) info0(pos, msg, INFO, force = true) - def warning(pos: Position, msg: String): Unit = filteredInfo(pos, msg, WARNING) - def error(pos: Position, msg: String): Unit = filteredInfo(pos, msg, ERROR) + final def echo(msg: String): Unit = echo(util.NoPosition, msg) + final def echo(pos: Position, msg: String, actions: List[CodeAction] = Nil): Unit = + if (filter(pos, msg, INFO) == 0) doReport(pos, msg, INFO, actions) + + final def warning(pos: Position, msg: String, actions: List[CodeAction] = Nil): Unit = + filteredInfo(pos, msg, WARNING, actions) + + final def error(pos: Position, msg: String, actions: List[CodeAction] = Nil): Unit = + filteredInfo(pos, msg, ERROR, actions) - private def filteredInfo(pos: Position, msg: String, severity: Severity): Unit = { + private def filteredInfo(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = { val f = filter(pos, msg, severity) - if (f <= 1) increment(severity) - if (f == 0) info0(pos, msg, severity, force = false) + if (f < Reporter.Suppress) increment(severity) + if (f == Reporter.Display) doReport(pos, msg, severity, actions) } def increment(severity: Severity): Unit = severity match { diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index f7165cd4db36..74f757393913 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,8 @@ package reflect package internal import scala.annotation.tailrec -import scala.collection.generic.Clearable +import scala.collection.{AbstractIterable, AbstractIterator} +import scala.collection.mutable.Clearable import scala.reflect.internal.util.Statistics trait Scopes extends api.Scopes { self: SymbolTable => @@ -62,11 +63,24 @@ trait Scopes extends api.Scopes { self: SymbolTable => def unapplySeq(decls: Scope): Some[Seq[Symbol]] = Some(decls.toList) } + /** A default Scope iterator, that retrieves elements in the order given by ScopeEntry. */ + private[Scopes] class ScopeIterator(owner: Scope) extends AbstractIterator[Symbol] { + private[this] var elem: ScopeEntry = owner.elems + + def hasNext: Boolean = (elem ne null) && (elem.owner == this.owner) + def next(): Symbol = + if (hasNext) { + val res = elem + elem = elem.next + res.sym + } else throw new NoSuchElementException + } + /** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead. * This is necessary because when run from reflection every scope needs to have a * SynchronizedScope as mixin. */ - class Scope protected[Scopes]() extends ScopeApi with MemberScopeApi { + class Scope protected[Scopes]() extends AbstractIterable[Symbol] with ScopeApi with MemberScopeApi { scopeCount += 1 private[scala] var elems: ScopeEntry = _ @@ -81,9 +95,9 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** a cache for all elements, to be used by symbol iterator. */ - private var elemsCache: List[Symbol] = null - private var cachedSize = -1 - private def flushElemsCache() { + private[this] var elemsCache: List[Symbol] = null + private[this] var cachedSize = -1 + private def flushElemsCache(): Unit = { elemsCache = null cachedSize = -1 } @@ -123,7 +137,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** enter a scope entry */ - protected def enterEntry(e: ScopeEntry) { + protected def enterEntry(e: ScopeEntry): Unit = { flushElemsCache() if (hashtable ne null) enterInHash(e) @@ -139,12 +153,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** enter a symbol */ - def enter[T <: Symbol](sym: T): T = { + def enter[T <: Symbol](sym: T): sym.type = { enterEntry(newScopeEntry(sym, this)) sym } - final def enterBefore(sym: Symbol, next: ScopeEntry): Symbol = { + final def enterBefore(sym: Symbol, next: ScopeEntry): sym.type = { assert(this != EmptyScope, sym) require(sym.name.hashCode() == next.sym.name.hashCode(), (sym, next.sym)) require(sym != next.sym, (sym, next.sym)) @@ -163,7 +177,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** enter a symbol, asserting that no symbol with same name exists in scope */ - def enterUnique(sym: Symbol) { + def enterUnique(sym: Symbol): Unit = { assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString)) enter(sym) } @@ -174,12 +188,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => else existing.sym.asInstanceOf[T] } - private def createHash() { + private def createHash(): Unit = { hashtable = new Array[ScopeEntry](HASHSIZE) enterAllInHash(elems) } - private def enterAllInHash(e: ScopeEntry, n: Int = 0) { + private def enterAllInHash(e: ScopeEntry, n: Int = 0): Unit = { if (e ne null) { if (n < maxRecursions) { enterAllInHash(e.next, n + 1) @@ -196,7 +210,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => } } - def rehash(sym: Symbol, newname: Name) { + def rehash(sym: Symbol, newname: Name): Unit = { if (hashtable ne null) { val index = sym.name.start & HASHMASK var e1 = hashtable(index) @@ -223,7 +237,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** remove entry */ - def unlink(e: ScopeEntry) { + def unlink(e: ScopeEntry): Unit = { if (elems == e) { elems = e.next } else { @@ -245,7 +259,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** remove symbol */ - def unlink(sym: Symbol) { + def unlink(sym: Symbol): Unit = { var e = lookupEntry(sym.name) while (e ne null) { if (e.sym == sym) unlink(e) @@ -299,14 +313,14 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** Returns an iterator yielding every symbol with given name in this scope. */ - def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] { - var e = lookupEntry(name) + def lookupAll(name: Name): Iterator[Symbol] = new AbstractIterator[Symbol] { + private[this] var e = lookupEntry(name) def hasNext: Boolean = e ne null def next(): Symbol = try e.sym finally e = lookupNextEntry(e) } - def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new Iterator[ScopeEntry] { - var e = lookupEntry(name) + def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new AbstractIterator[ScopeEntry] { + private[this] var e = lookupEntry(name) def hasNext: Boolean = e ne null def next(): ScopeEntry = try e finally e = lookupNextEntry(e) } @@ -399,7 +413,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => } entryContainsSym(this lookupEntry sym.name) } - other.toList forall scopeContainsSym + other.reverseIterator.forall(scopeContainsSym) } /** Return all symbols as a list in the order they were entered in this scope. @@ -432,14 +446,19 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator + /** Returns all symbols as an iterator, in an order reversed to that in which they + * were entered: symbols first in the scopes are last out of the iterator. + * NOTE: when using the `reverseIterator`, it is not safe to mutate the Scope. + * So, be careful not to use this when you do need to mutate this Scope. */ + def reverseIterator: Iterator[Symbol] = new ScopeIterator(this) + override def foreach[U](p: Symbol => U): Unit = toList foreach p - // TODO in 2.13.x, s/sameLength(result, filtered)/result eq filtered/, taking advantage of - // the new conservation in List.filter/filterNot override def filterNot(p: Symbol => Boolean): Scope = { val result = toList val filtered = result.filterNot(p) - if (sameLength(result, filtered)) this else newScopeWith(filtered: _*) + if (result eq filtered) this + else newScopeWith(filtered: _*) } override def filter(p: Symbol => Boolean): Scope = { val result = toList @@ -450,8 +469,8 @@ trait Scopes extends api.Scopes { self: SymbolTable => @deprecated("use `toList.reverse` instead", "2.10.0") // Used in sbt 0.12.4 def reverse: List[Symbol] = toList.reverse - override def mkString(start: String, sep: String, end: String) = - toList.map(_.defString).mkString(start, sep, end) + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + toList.map(_.defString).addString(sb, start, sep, end) override def toString(): String = mkString("Scope{\n ", ";\n ", "\n}") } @@ -510,7 +529,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => def newScopeWith(elems: Symbol*): Scope = { val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val scope = newScope - elems foreach scope.enter + elems.foreach(scope.enter(_)) if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) scope } @@ -527,7 +546,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** The empty scope (immutable). */ object EmptyScope extends Scope { - override def enterEntry(e: ScopeEntry) { + override def enterEntry(e: ScopeEntry): Unit = { abort("EmptyScope.enter") } } diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 393f586af3d1..fe9f22663010 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -64,15 +64,11 @@ trait StdAttachments { * @param samTp the expected type that triggered sam conversion (may be a subtype of the type corresponding to sam's owner) * @param sam the single abstract method implemented by the Function we're attaching this to * @param synthCls the (synthetic) class representing the eventual implementation class (spun at runtime by LMF on the JVM) - * - * @since 2.12.0-M4 */ case class SAMFunction(samTp: Type, sam: Symbol, synthCls: Symbol) extends PlainAttachment case object DelambdafyTarget extends PlainAttachment - case class JustMethodReference(lambdaTarget: Symbol) extends PlainAttachment - /** When present, indicates that the host `Ident` has been created from a backquoted identifier. */ case object BackquotedIdentifierAttachment extends PlainAttachment @@ -92,10 +88,19 @@ trait StdAttachments { */ case object NoWarnAttachment extends PlainAttachment + /** A pattern binding that shadows a symbol in scope. Removed by refchecks. + */ + case class PatShadowAttachment(shadowed: Symbol) + /** Indicates that a `ValDef` was synthesized from a pattern definition, `val P(x)`. */ case object PatVarDefAttachment extends PlainAttachment + /** Indicates that a definition was part of either a pattern or "sequence shorthand" + * that introduced multiple definitions. All variables must be either `val` or `var`. + */ + case object MultiDefAttachment extends PlainAttachment + /** Identifies trees are either result or intermediate value of for loop desugaring. */ case object ForAttachment extends PlainAttachment @@ -127,10 +132,58 @@ trait StdAttachments { */ case object KnownDirectSubclassesCalled extends PlainAttachment + case object DottyEnumSingleton extends PlainAttachment + + class DottyParameterisedTrait(val params: List[Symbol]) + + class DottyOpaqueTypeAlias(val tpe: Type) + class QualTypeSymAttachment(val sym: Symbol) + case object ConstructorNeedsFence extends PlainAttachment + + /** Mark the syntax for linting purposes. */ + case object MultiargInfixAttachment extends PlainAttachment + + case object NullaryOverrideAdapted extends PlainAttachment + // When typing a Def with this attachment, change the owner of its RHS from origalOwner to the symbol of the Def case class ChangeOwnerAttachment(originalOwner: Symbol) - case class LookupAmbiguityWarning(msg: String) extends PlainAttachment + case object InterpolatedString extends PlainAttachment + + case object VirtualStringContext extends PlainAttachment + + case object CaseApplyInheritAccess extends PlainAttachment + + // Use of _root_ is in correct leading position of selection + case object RootSelection extends PlainAttachment + + /** Marks a Typed tree with Unit tpt. */ + case object TypedExpectingUnitAttachment extends PlainAttachment + def explicitlyUnit(tree: Tree): Boolean = tree.hasAttachment[TypedExpectingUnitAttachment.type] + + /** For `val i = 42`, marks field as inferred so accessor (getter) can warn if implicit. */ + case object FieldTypeInferred extends PlainAttachment + + case class LookupAmbiguityWarning(msg: String, fix: String) extends PlainAttachment + + /** Java sealed classes may be qualified with a permits clause specifying allowed subclasses. */ + case class PermittedSubclasses(permits: List[Tree]) extends PlainAttachment + case class PermittedSubclassSymbols(permits: List[Symbol]) extends PlainAttachment + + case class NamePos(pos: Position) extends PlainAttachment + + /** Not a named arg in an application. Used for suspicious literal booleans. */ + case object UnnamedArg extends PlainAttachment + + /** Adapted under value discard at typer. */ + case object DiscardedValue extends PlainAttachment + /** Discarded pure expression observed at refchecks. */ + case object DiscardedExpr extends PlainAttachment + /** Anonymous parameter of `if (_)` may be inferred as Boolean. */ + case object BooleanParameterType extends PlainAttachment + + /** Force desugaring Match trees, don't emit switches. Attach to DefDef trees or their symbol. */ + case object ForceMatchDesugar extends PlainAttachment } diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala index 24c50aa5f379..2ee80fd851ea 100644 --- a/src/reflect/scala/reflect/internal/StdCreators.scala +++ b/src/reflect/scala/reflect/internal/StdCreators.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -31,4 +31,4 @@ trait StdCreators { if (m eq mirror) tpe.asInstanceOf[U # Type] else throw new IllegalArgumentException(s"Type tag defined in $mirror cannot be migrated to other mirrors.") } -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index c2932408dccb..9775fa7bcdc0 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,9 +14,8 @@ package scala package reflect package internal -import scala.language.implicitConversions - import java.security.MessageDigest + import Chars.isOperatorPart import scala.annotation.switch import scala.collection.immutable @@ -38,7 +37,7 @@ trait StdNames { * CommonNames constructor out of the starting gate. This is its builder. */ private class KeywordSetBuilder { - private var kws: Set[TermName] = Set() + private[this] var kws: Set[TermName] = Set() def apply(s: String): TermName = { val result = newTermNameCached(s) kws = kws + result @@ -54,24 +53,21 @@ trait StdNames { /** * COMPACTIFY * - * The hashed name has the form (prefix + marker + md5 + marker + suffix), where - * - prefix/suffix.length = MaxNameLength / 4 - * - md5.length = 32 - * - * We obtain the formula: + * The maximum length of a filename on some platforms is 240 chars (docker). + * Therefore, compactify names that would create a filename longer than that. + * A compactified name looks like + * prefix + $$$$ + md5 + $$$$ + suffix, + * where the prefix and suffix are the first and last quarter of the name, + * respectively. * - * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + suffixLength - * - * (+suffixLength for ".class" and potential module class suffix that is added *after* this transform). - * - * MaxNameLength can therefore be computed as follows: + * So how long is too long? For a (flattened class) name, the resulting file + * will be called "name.class", or, if it's a module class, "name$.class" + * (see scala/bug#8199). Therefore the maximum suffix is 7 characters, and + * names that are over (240 - 7) characters get compactified. */ - val marker = "$$$$" - val maxSuffixLength = "$.class".length + 1 // potential module class suffix and file extension - val MaxNameLength = math.min( - settings.maxClassfileName.value - maxSuffixLength, - 2 * (settings.maxClassfileName.value - maxSuffixLength - 2*marker.length - 32) - ) + final val marker = "$$$$" + final val MaxSuffixLength = 7 // "$.class".length + 1 // potential module class suffix and file extension + final val MaxNameLength = 240 - MaxSuffixLength def toMD5(s: String, edge: Int): String = { val prefix = s take edge val suffix = s takeRight edge @@ -91,14 +87,12 @@ trait StdNames { abstract class CommonNames extends NamesApi { type NameType >: Null <: Name - // Masking some implicits so as to allow our targeted => NameType. - protected val stringToTermName = null - protected val stringToTypeName = null - protected implicit def createNameType(name: String): NameType + protected def nameType(name: String): NameType def flattenedName(owner: Symbol, name: Name): NameType = { val flat = owner.name.toString + NAME_JOIN_STRING + name.toString - if (owner.isJava) flat else compactify(flat) // scala/bug#11277 + val nameString = if (owner.isJava) flat else compactify(flat) // scala/bug#11277 + nameType(nameString) } // TODO: what is the purpose of all this duplication!?!?! @@ -111,18 +105,18 @@ trait StdNames { final val TRAIT_SETTER_SEPARATOR_STRING = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING final val SINGLETON_SUFFIX = ".type" - val ANON_CLASS_NAME: NameType = "$anon" - val DELAMBDAFY_LAMBDA_CLASS_NAME: NameType = "$lambda" - val ANON_FUN_NAME: NameType = "$anonfun" - val EMPTY: NameType = "" - val EMPTY_PACKAGE_NAME: NameType = "" - val IMPORT: NameType = "" - val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING - val MODULE_VAR_SUFFIX: NameType = MODULE_VAR_SUFFIX_STRING - val PACKAGE: NameType = "package" - val ROOT: NameType = "" - val SPECIALIZED_SUFFIX: NameType = "$sp" - val CASE_ACCESSOR: NameType = "$access" + val ANON_CLASS_NAME: NameType = nameType("$anon") + val DELAMBDAFY_LAMBDA_CLASS_NAME: NameType = nameType("$lambda") + val ANON_FUN_NAME: NameType = nameType("$anonfun") + val EMPTY: NameType = nameType("") + val EMPTY_PACKAGE_NAME: NameType = nameType("") + val IMPORT: NameType = nameType("") + val MODULE_SUFFIX_NAME: NameType = nameType(MODULE_SUFFIX_STRING) + val MODULE_VAR_SUFFIX: NameType = nameType(MODULE_VAR_SUFFIX_STRING) + val PACKAGE: NameType = nameType("package") + val ROOT: NameType = nameType("") + val SPECIALIZED_SUFFIX: NameType = nameType("$sp") + val CASE_ACCESSOR: NameType = nameType("$access") val NESTED_IN: String = "$nestedIn" val NESTED_IN_ANON_CLASS: String = NESTED_IN + ANON_CLASS_NAME.toString.replace("$", "") @@ -146,39 +140,44 @@ trait StdNames { // value types (and AnyRef) are all used as terms as well // as (at least) arguments to the @specialize annotation. - final val Boolean: NameType = "Boolean" - final val Byte: NameType = "Byte" - final val Char: NameType = "Char" - final val Double: NameType = "Double" - final val Float: NameType = "Float" - final val Int: NameType = "Int" - final val Long: NameType = "Long" - final val Short: NameType = "Short" - final val Unit: NameType = "Unit" + final val Boolean: NameType = nameType("Boolean") + final val Byte: NameType = nameType("Byte") + final val Char: NameType = nameType("Char") + final val Double: NameType = nameType("Double") + final val Float: NameType = nameType("Float") + final val Int: NameType = nameType("Int") + final val Long: NameType = nameType("Long") + final val Short: NameType = nameType("Short") + final val Unit: NameType = nameType("Unit") // some types whose companions we utilize - final val AnyRef: NameType = "AnyRef" - final val Array: NameType = "Array" - final val List: NameType = "List" - final val Option: NameType = "Option" - final val Seq: NameType = "Seq" - final val Symbol: NameType = "Symbol" - final val WeakTypeTag: NameType = "WeakTypeTag" - final val TypeTag : NameType = "TypeTag" - final val Expr: NameType = "Expr" - final val String: NameType = "String" - final val StringContext: NameType = "StringContext" + final val AnyRef: NameType = nameType("AnyRef") + final val Array: NameType = nameType("Array") + final val List: NameType = nameType("List") + final val Option: NameType = nameType("Option") + final val Seq: NameType = nameType("Seq") + final val Symbol: NameType = nameType("Symbol") + final val WeakTypeTag: NameType = nameType("WeakTypeTag") + final val TypeTag : NameType = nameType("TypeTag") + final val Expr: NameType = nameType("Expr") + final val String: NameType = nameType("String") + + // some names whose name we utilize + final val StringContextName: NameType = nameType("StringContext") // fictions we use as both types and terms - final val ERROR: NameType = "" - final val NO_NAME: NameType = "" // formerly NOSYMBOL - final val WILDCARD: NameType = "_" + final val ERROR: NameType = nameType("") + final val NO_NAME: NameType = nameType("") // formerly NOSYMBOL + final val WILDCARD: NameType = nameType("_") } + // FIXME: This class requires early initializers to work, which are deprecated + // and will not be supported in 3.0. Please change the design and remove + // the early initializer. /** This should be the first trait in the linearization. */ // abstract class Keywords extends CommonNames { abstract class Keywords extends { - private val kw = new KeywordSetBuilder + private[this] val kw = new KeywordSetBuilder final val ABSTRACTkw: TermName = kw("abstract") final val CASEkw: TermName = kw("case") @@ -209,7 +208,6 @@ trait StdNames { final val RETURNkw: TermName = kw("return") final val SEALEDkw: TermName = kw("sealed") final val SUPERkw: TermName = kw("super") - final val THENkw: TermName = kw("then") final val THISkw: TermName = kw("this") final val THROWkw: TermName = kw("throw") final val TRAITkw: TermName = kw("trait") @@ -236,88 +234,94 @@ trait StdNames { final val keywords = kw.result } with CommonNames { final val javaKeywords = new JavaKeywords() + final val javaRestrictedIdentifiers = new JavaRestrictedIdentifiers() } abstract class TypeNames extends Keywords with TypeNamesApi { override type NameType = TypeName - protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name) - - final val BYNAME_PARAM_CLASS_NAME: NameType = "" - final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "" - final val LOCAL_CHILD: NameType = "" - final val REFINE_CLASS_NAME: NameType = "" - final val REPEATED_PARAM_CLASS_NAME: NameType = "" - final val WILDCARD_STAR: NameType = "_*" - final val REIFY_TREECREATOR_PREFIX: NameType = "$treecreator" - final val REIFY_TYPECREATOR_PREFIX: NameType = "$typecreator" - final val MACRO_BUNDLE_SUFFIX: NameType = "$Bundle" - - final val Any: NameType = "Any" - final val AnyVal: NameType = "AnyVal" - final val App: NameType = "App" - final val FlagSet: NameType = "FlagSet" - final val Mirror: NameType = "Mirror" - final val Modifiers: NameType = "Modifiers" - final val Nothing: NameType = "Nothing" - final val Null: NameType = "Null" - final val Object: NameType = "Object" - final val PrefixType: NameType = "PrefixType" - final val Product: NameType = "Product" - final val Serializable: NameType = "Serializable" - final val Singleton: NameType = "Singleton" - final val Throwable: NameType = "Throwable" - final val unchecked: NameType = "unchecked" - - final val api: NameType = "api" - final val Annotation: NameType = "Annotation" - final val CaseDef: NameType = "CaseDef" - final val ClassfileAnnotation: NameType = "ClassfileAnnotation" - final val ClassManifest: NameType = "ClassManifest" - final val Enum: NameType = "Enum" - final val Group: NameType = "Group" - final val implicitNotFound: NameType = "implicitNotFound" - final val Liftable: NameType = "Liftable" - final val Unliftable: NameType = "Unliftable" - final val Name: NameType = "Name" - final val Tree: NameType = "Tree" - final val Text: NameType = "Text" - final val TermName: NameType = "TermName" - final val Type : NameType = "Type" - final val TypeName: NameType = "TypeName" - final val TypeDef: NameType = "TypeDef" - final val Quasiquote: NameType = "Quasiquote" + protected def nameType(name: String): TypeName = newTypeNameCached(name) + + final val BYNAME_PARAM_CLASS_NAME: NameType = nameType("") + final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = nameType("") + final val LOCAL_CHILD: NameType = nameType("") + final val REFINE_CLASS_NAME: NameType = nameType("") + final val REPEATED_PARAM_CLASS_NAME: NameType = nameType("") + final val WILDCARD_STAR: NameType = nameType("_*") + final val REIFY_TREECREATOR_PREFIX: NameType = nameType("$treecreator") + final val REIFY_TYPECREATOR_PREFIX: NameType = nameType("$typecreator") + final val MACRO_BUNDLE_SUFFIX: NameType = nameType("$Bundle") + + final val Any: NameType = nameType("Any") + final val AnyVal: NameType = nameType("AnyVal") + final val App: NameType = nameType("App") + final val FlagSet: NameType = nameType("FlagSet") + final val Mirror: NameType = nameType("Mirror") + final val Modifiers: NameType = nameType("Modifiers") + final val Nothing: NameType = nameType("Nothing") + final val Null: NameType = nameType("Null") + final val Object: NameType = nameType("Object") + final val PrefixType: NameType = nameType("PrefixType") + final val Product: NameType = nameType("Product") + final val Record: NameType = nameType("Record") + final val Serializable: NameType = nameType("Serializable") + final val Singleton: NameType = nameType("Singleton") + final val Throwable: NameType = nameType("Throwable") + final val unchecked: NameType = nameType("unchecked") + final val ValueOf: NameType = nameType("ValueOf") + + final val api: NameType = nameType("api") + final val Annotation: NameType = nameType("Annotation") + final val CaseDef: NameType = nameType("CaseDef") + final val ClassManifest: NameType = nameType("ClassManifest") + final val Enum: NameType = nameType("Enum") + final val Group: NameType = nameType("Group") + final val implicitNotFound: NameType = nameType("implicitNotFound") + final val Liftable: NameType = nameType("Liftable") + final val Unliftable: NameType = nameType("Unliftable") + final val Name: NameType = nameType("Name") + final val StaticAnnotation: NameType = nameType("StaticAnnotation") + final val Tree: NameType = nameType("Tree") + final val Text: NameType = nameType("Text") + final val TermName: NameType = nameType("TermName") + final val Type : NameType = nameType("Type") + final val TypeName: NameType = nameType("TypeName") + final val TypeDef: NameType = nameType("TypeDef") + final val Quasiquote: NameType = nameType("Quasiquote") + final val macroImplLocation: NameType = nameType("macroImplLocation") + final val UnapplySeqWrapper: NameType = nameType("UnapplySeqWrapper") // async - final val stateMachine: NameType = "stateMachine$async" + final val stateMachine: NameType = nameType("stateMachine$async") // quasiquote-specific names - final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$" - final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$" - final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$" + final val QUASIQUOTE_FUNCTION: NameType = nameType("$quasiquote$function$") + final val QUASIQUOTE_MODS: NameType = nameType("$quasiquote$mods$") + final val QUASIQUOTE_TUPLE: NameType = nameType("$quasiquote$tuple$") // Annotation simple names, used in Namer - final val BeanPropertyAnnot: NameType = "BeanProperty" - final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty" - final val bridgeAnnot: NameType = "bridge" + final val BeanPropertyAnnot: NameType = nameType("BeanProperty") + final val BooleanBeanPropertyAnnot: NameType = nameType("BooleanBeanProperty") // Classfile Attributes - final val AnnotationDefaultATTR: NameType = "AnnotationDefault" - final val BridgeATTR: NameType = "Bridge" - final val CodeATTR: NameType = "Code" - final val ConstantValueATTR: NameType = "ConstantValue" - final val DeprecatedATTR: NameType = "Deprecated" - final val ExceptionsATTR: NameType = "Exceptions" - final val InnerClassesATTR: NameType = "InnerClasses" - final val MethodParametersATTR: NameType = "MethodParameters" - final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME - final val ScalaATTR: NameType = "Scala" - final val ScalaSignatureATTR: NameType = "ScalaSig" - final val SignatureATTR: NameType = "Signature" - final val SourceFileATTR: NameType = "SourceFile" - final val SyntheticATTR: NameType = "Synthetic" - - final val scala_ : NameType = "scala" + final val AnnotationDefaultATTR: NameType = nameType("AnnotationDefault") + final val BridgeATTR: NameType = nameType("Bridge") + final val CodeATTR: NameType = nameType("Code") + final val ConstantValueATTR: NameType = nameType("ConstantValue") + final val DeprecatedATTR: NameType = nameType("Deprecated") + final val ExceptionsATTR: NameType = nameType("Exceptions") + final val InnerClassesATTR: NameType = nameType("InnerClasses") + final val MethodParametersATTR: NameType = nameType("MethodParameters") + final val RuntimeAnnotationATTR: NameType = nameType("RuntimeVisibleAnnotations") // RetentionPolicy.RUNTIME + final val ScalaATTR: NameType = nameType("Scala") + final val TASTYATTR: NameType = nameType("TASTY") + final val ScalaSignatureATTR: NameType = nameType("ScalaSig") + final val SignatureATTR: NameType = nameType("Signature") + final val SourceFileATTR: NameType = nameType("SourceFile") + final val SyntheticATTR: NameType = nameType("Synthetic") + final val PermittedSubclassesATTR: NameType = nameType("PermittedSubclasses") + + final val scala_ : NameType = nameType("scala") // Scala 3 special type val AND: NameType = nme.AND.toTypeName @@ -329,7 +333,7 @@ trait StdNames { abstract class TermNames extends Keywords with TermNamesApi { override type NameType = TermName - protected implicit def createNameType(name: String): TermName = newTermNameCached(name) + protected def nameType(name: String): TermName = newTermNameCached(name) /** Base strings from which synthetic names are derived. */ val BITMAP_PREFIX = "bitmap$" @@ -353,49 +357,57 @@ trait StdNames { val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names val QUAL_PREFIX = "qual$" val NAMEDARG_PREFIX = "x$" + val RIGHT_ASSOC_OP_PREFIX = "rassoc$" + val STABILIZER_PREFIX = "stabilizer$" // Compiler internal names - val ANYname: NameType = "" - val CONSTRUCTOR: NameType = "" - val DEFAULT_CASE: NameType = "defaultCase$" - val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$" - val FAKE_LOCAL_THIS: NameType = "this$" - val LAZY_SLOW_SUFFIX: NameType = "$lzycompute" - val UNIVERSE_BUILD_PREFIX: NameType = "$u.internal.reificationSupport." - val UNIVERSE_PREFIX: NameType = "$u." - val UNIVERSE_SHORT: NameType = "$u" - val MIRROR_PREFIX: NameType = "$m." - val MIRROR_SHORT: NameType = "$m" - val MIRROR_UNTYPED: NameType = "$m$untyped" - val REIFY_FREE_PREFIX: NameType = "free$" - val REIFY_FREE_THIS_SUFFIX: NameType = "$this" - val REIFY_FREE_VALUE_SUFFIX: NameType = "$value" - val REIFY_SYMDEF_PREFIX: NameType = "symdef$" - val QUASIQUOTE_CASE: NameType = "$quasiquote$case$" - val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$" + val ANYname: NameType = nameType("") + val CONSTRUCTOR: NameType = nameType("") + val CLASS_CONSTRUCTOR: NameType = nameType("") + val DEFAULT_CASE: NameType = nameType("defaultCase$") + val EQEQ_LOCAL_VAR: NameType = nameType("eqEqTemp$") + val FAKE_LOCAL_THIS: NameType = nameType("this$") + val LAZY_SLOW_SUFFIX: NameType = nameType("$lzycompute") + val UNIVERSE_BUILD_PREFIX: NameType = nameType("$u.internal.reificationSupport.") + val UNIVERSE_PREFIX: NameType = nameType("$u.") + val UNIVERSE_SHORT: NameType = nameType("$u") + val MIRROR_PREFIX: NameType = nameType("$m.") + val MIRROR_SHORT: NameType = nameType("$m") + val MIRROR_UNTYPED: NameType = nameType("$m$untyped") + val REIFY_FREE_PREFIX: NameType = nameType("free$") + val REIFY_FREE_THIS_SUFFIX: NameType = nameType(s"$$this") + val REIFY_FREE_VALUE_SUFFIX: NameType = nameType(s"$$value") // looks like missing interpolator due to `value` in scope + val REIFY_SYMDEF_PREFIX: NameType = nameType("symdef$") + val QUASIQUOTE_CASE: NameType = nameType("$quasiquote$case$") + val QUASIQUOTE_EARLY_DEF: NameType = nameType("$quasiquote$early$def$") val QUASIQUOTE_FILE: String = "" - val QUASIQUOTE_FOR_ENUM: NameType = "$quasiquote$for$enum$" + val QUASIQUOTE_FOR_ENUM: NameType = nameType("$quasiquote$for$enum$") val QUASIQUOTE_NAME_PREFIX: String = "nn$" - val QUASIQUOTE_PACKAGE_STAT: NameType = "$quasiquote$package$stat$" - val QUASIQUOTE_PARAM: NameType = "$quasiquote$param$" - val QUASIQUOTE_PAT_DEF: NameType = "$quasiquote$pat$def$" + val QUASIQUOTE_PACKAGE_STAT: NameType = nameType("$quasiquote$package$stat$") + val QUASIQUOTE_PARAM: NameType = nameType("$quasiquote$param$") + val QUASIQUOTE_PAT_DEF: NameType = nameType("$quasiquote$pat$def$") val QUASIQUOTE_PREFIX: String = "qq$" - val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$" - val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$" + val QUASIQUOTE_REFINE_STAT: NameType = nameType("$quasiquote$refine$stat$") + val QUASIQUOTE_TUPLE: NameType = nameType("$quasiquote$tuple$") val QUASIQUOTE_UNLIFT_HELPER: String = "$quasiquote$unlift$helper$" - val MIXIN_CONSTRUCTOR: NameType = "$init$" - val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$" - val OUTER: NameType = "$outer" + val MIXIN_CONSTRUCTOR: NameType = nameType("$init$") + val MODULE_INSTANCE_FIELD: NameType = nameType(NameTransformer.MODULE_INSTANCE_NAME) // "MODULE$" + val OUTER: NameType = nameType("$outer") val OUTER_LOCAL: NameType = OUTER.localName - val OUTER_ARG: NameType = "arg" + OUTER - val OUTER_SYNTH: NameType = "" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter - val ROOTPKG: NameType = "_root_" - val SELECTOR_DUMMY: NameType = "" - val SELF: NameType = "$this" - val SETTER_SUFFIX: NameType = NameTransformer.SETTER_SUFFIX_STRING - val SPECIALIZED_INSTANCE: NameType = "specInstance$" - val STAR: NameType = "*" - val THIS: NameType = "_$this" + val OUTER_ARG: NameType = nameType("arg" + OUTER) + val OUTER_SYNTH: NameType = nameType("") // emitted by pattern matcher, replaced by outer accessor in explicitouter + val ROOTPKG: NameType = nameType("_root_") + val SELECTOR_DUMMY: NameType = nameType("") + val SELF: NameType = nameType(s"$$this") + val SETTER_SUFFIX: NameType = nameType(NameTransformer.SETTER_SUFFIX_STRING) + val SPECIALIZED_INSTANCE: NameType = nameType("specInstance$") + val STAR: NameType = nameType("*") + val THIS: NameType = nameType(s"_$$this") + + + val annottees: NameType = nameType("annottees") // for macro annotations + val macroTransform: NameType = nameType("macroTransform") // for macro annotations + val unpickledMacroImpl: NameType = nameType("unpickledMacroImpl") // for tasty macro unpickling def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX @@ -408,11 +420,21 @@ trait StdNames { def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING) def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_NAME + def isFreshTermName(name: Name) = name.startsWith(FRESH_TERM_NAME_PREFIX) /** Is name a variable name? */ def isVariableName(name: Name): Boolean = { + import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} val first = name.startChar - ( ((first.isLower && first.isLetter) || first == '_') + def isLowerLetterSupplementary: Boolean = + first == '$' && { + val decoded = name.decoded + isHighSurrogate(decoded.charAt(0)) && decoded.length > 1 && isLowSurrogate(decoded.charAt(1)) && { + val codepoint = toCodePoint(decoded.charAt(0), decoded.charAt(1)) + isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) + } + } + ( ((first.isLower && first.isLetter) || first == '_' || isLowerLetterSupplementary) && (name != nme.false_) && (name != nme.true_) && (name != nme.null_) @@ -425,6 +447,9 @@ trait StdNames { name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar) } + /** Is name a left-associative operator? */ + def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') + private def expandedNameInternal(name: TermName, base: Symbol, separator: String): TermName = newTermNameCached(base.fullName('$') + separator + name) @@ -445,10 +470,10 @@ trait StdNames { * or $ followed by an operator that gets encoded, go directly to compiler * crash. Do not pass go and don't even think about collecting any $$ */ - def unexpandedName(name: Name): Name = { - if (!name.containsChar('$')) name // lastIndexOf calls Name.toString, add a fast path to avoid that. - else name lastIndexOf "$$" match { + def unexpandedName(name: Name): Name = + name.lastIndexOf("$$") match { case 0 | -1 => name + case 1 if name.charAt(0) == '_' => if (name.isTermName) nme.WILDCARD else tpnme.WILDCARD case idx0 => // Sketchville - We've found $$ but if it's part of $$$ or $$$$ // or something we need to keep the bonus dollars, so e.g. foo$$$outer @@ -456,9 +481,8 @@ trait StdNames { var idx = idx0 while (idx > 0 && name.charAt(idx - 1) == '$') idx -= 1 - name drop idx + 2 + name.drop(idx + 2) } - } @deprecated("use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name) @deprecated("use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule @@ -506,9 +530,9 @@ trait StdNames { // Nominally, name$default$N, encoded for def defaultGetterName(name: Name, pos: Int): TermName = ( if (isConstructorName(name)) - DEFAULT_GETTER_INIT_STRING + pos + nameType(DEFAULT_GETTER_INIT_STRING + pos) else - name + DEFAULT_GETTER_STRING + pos + nameType(name.toString + DEFAULT_GETTER_STRING + pos) ) // Nominally, name from name$default$N, CONSTRUCTOR for def defaultGetterToMethod(name: Name): TermName = ( @@ -520,8 +544,23 @@ trait StdNames { } ) + def splitDefaultGetterName(name: Name): (Name, Int) = { + val (n, i) = + if (name.startsWith(DEFAULT_GETTER_INIT_STRING)) (nme.CONSTRUCTOR, DEFAULT_GETTER_INIT_STRING.length) + else name.indexOf(DEFAULT_GETTER_STRING) match { + case -1 => (name.toTermName, -1) + case idx => (name.toTermName.take(idx), idx + DEFAULT_GETTER_STRING.length) + } + if (i < 0) (n, -1) + else { + val j = name.indexOf('$', i) // f$default$7$extension + val idx = name.subSequence(i, if (j < 0) name.length else j) + (n, idx.toString.toInt) + } + } + def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">") - def superName(name: Name, mix: Name = EMPTY): TermName = newTermName(SUPER_PREFIX_STRING + name + (if (mix.isEmpty) "" else "$" + mix)) + def superName(name: Name, mix: Name = EMPTY): TermName = newTermName(s"${SUPER_PREFIX_STRING}${name}${if (mix.isEmpty) "" else s"$$$mix"}") /** The name of an accessor for protected symbols. */ def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name) @@ -533,42 +572,42 @@ trait StdNames { private def existentialName0(i: Int) = newTypeName("_" + i) final def existentialName(i: Int): TypeName = if (i < existentialNames.length) existentialNames(i) else existentialName0(i) - final val Nil: NameType = "Nil" - final val Predef: NameType = "Predef" - - val _1 : NameType = "_1" - val _2 : NameType = "_2" - val _3 : NameType = "_3" - val _4 : NameType = "_4" - val _5 : NameType = "_5" - val _6 : NameType = "_6" - val _7 : NameType = "_7" - val _8 : NameType = "_8" - val _9 : NameType = "_9" - val _10 : NameType = "_10" - val _11 : NameType = "_11" - val _12 : NameType = "_12" - val _13 : NameType = "_13" - val _14 : NameType = "_14" - val _15 : NameType = "_15" - val _16 : NameType = "_16" - val _17 : NameType = "_17" - val _18 : NameType = "_18" - val _19 : NameType = "_19" - val _20 : NameType = "_20" - val _21 : NameType = "_21" - val _22 : NameType = "_22" - - val x_0 : NameType = "x$0" - val x_1 : NameType = "x$1" - val x_2 : NameType = "x$2" - val x_3 : NameType = "x$3" - val x_4 : NameType = "x$4" - val x_5 : NameType = "x$5" - val x_6 : NameType = "x$6" - val x_7 : NameType = "x$7" - val x_8 : NameType = "x$8" - val x_9 : NameType = "x$9" + final val Nil: NameType = nameType("Nil") + final val Predef: NameType = nameType("Predef") + + val _1 : NameType = nameType("_1") + val _2 : NameType = nameType("_2") + val _3 : NameType = nameType("_3") + val _4 : NameType = nameType("_4") + val _5 : NameType = nameType("_5") + val _6 : NameType = nameType("_6") + val _7 : NameType = nameType("_7") + val _8 : NameType = nameType("_8") + val _9 : NameType = nameType("_9") + val _10 : NameType = nameType("_10") + val _11 : NameType = nameType("_11") + val _12 : NameType = nameType("_12") + val _13 : NameType = nameType("_13") + val _14 : NameType = nameType("_14") + val _15 : NameType = nameType("_15") + val _16 : NameType = nameType("_16") + val _17 : NameType = nameType("_17") + val _18 : NameType = nameType("_18") + val _19 : NameType = nameType("_19") + val _20 : NameType = nameType("_20") + val _21 : NameType = nameType("_21") + val _22 : NameType = nameType("_22") + + val x_0 : NameType = nameType("x$0") + val x_1 : NameType = nameType("x$1") + val x_2 : NameType = nameType("x$2") + val x_3 : NameType = nameType("x$3") + val x_4 : NameType = nameType("x$4") + val x_5 : NameType = nameType("x$5") + val x_6 : NameType = nameType("x$6") + val x_7 : NameType = nameType("x$7") + val x_8 : NameType = nameType("x$8") + val x_9 : NameType = nameType("x$9") def syntheticParamName(i: Int): TermName = (i: @switch) match { case 0 => nme.x_0 @@ -581,7 +620,7 @@ trait StdNames { case 7 => nme.x_7 case 8 => nme.x_8 case 9 => nme.x_9 - case _ => newTermName("x$" + i) + case _ => newTermName(s"x$$$i") } def productAccessorName(j: Int): TermName = (j: @switch) match { @@ -611,354 +650,371 @@ trait StdNames { } val ??? = encode("???") - - val wrapRefArray: NameType = "wrapRefArray" - val wrapByteArray: NameType = "wrapByteArray" - val wrapShortArray: NameType = "wrapShortArray" - val wrapCharArray: NameType = "wrapCharArray" - val wrapIntArray: NameType = "wrapIntArray" - val wrapLongArray: NameType = "wrapLongArray" - val wrapFloatArray: NameType = "wrapFloatArray" - val wrapDoubleArray: NameType = "wrapDoubleArray" - val wrapBooleanArray: NameType = "wrapBooleanArray" - val wrapUnitArray: NameType = "wrapUnitArray" - val genericWrapArray: NameType = "genericWrapArray" - - val double2Double: NameType = "double2Double" - val float2Float: NameType = "float2Float" - val byte2Byte: NameType = "byte2Byte" - val short2Short: NameType = "short2Short" - val char2Character: NameType = "char2Character" - val int2Integer: NameType = "int2Integer" - val long2Long: NameType = "long2Long" - val boolean2Boolean: NameType = "boolean2Boolean" + val =:= = encode("=:=") + val <:< = encode("<:<") + + val DummyImplicit: NameType = nameType("DummyImplicit") + + val wrapRefArray: NameType = nameType("wrapRefArray") + val wrapByteArray: NameType = nameType("wrapByteArray") + val wrapShortArray: NameType = nameType("wrapShortArray") + val wrapCharArray: NameType = nameType("wrapCharArray") + val wrapIntArray: NameType = nameType("wrapIntArray") + val wrapLongArray: NameType = nameType("wrapLongArray") + val wrapFloatArray: NameType = nameType("wrapFloatArray") + val wrapDoubleArray: NameType = nameType("wrapDoubleArray") + val wrapBooleanArray: NameType = nameType("wrapBooleanArray") + val wrapUnitArray: NameType = nameType("wrapUnitArray") + val genericWrapArray: NameType = nameType("genericWrapArray") + + val copyArrayToImmutableIndexedSeq: NameType = nameType("copyArrayToImmutableIndexedSeq") + + val double2Double: NameType = nameType("double2Double") + val float2Float: NameType = nameType("float2Float") + val byte2Byte: NameType = nameType("byte2Byte") + val short2Short: NameType = nameType("short2Short") + val char2Character: NameType = nameType("char2Character") + val int2Integer: NameType = nameType("int2Integer") + val long2Long: NameType = nameType("long2Long") + val boolean2Boolean: NameType = nameType("boolean2Boolean") // Scala 3 import syntax - val as: NameType = "as" - - // Scala 3 soft keywords - val infix: NameType = "infix" - val open: NameType = "open" - val using: NameType = "using" + val as: NameType = nameType("as") // Scala 3 hard keywords - val `given`: NameType = "given" + val `enum`: NameType = nameType("enum") + val `export`: NameType = nameType("export") + val `given`: NameType = nameType("given") + val `then`: NameType = nameType("then") + + // Scala 3 soft keywords + val infix: NameType = nameType("infix") + val open: NameType = nameType("open") + val using: NameType = nameType("using") // Compiler utilized names - val AnnotatedType: NameType = "AnnotatedType" - val Annotation: NameType = "Annotation" - val Any: NameType = "Any" - val AnyVal: NameType = "AnyVal" - val Apply: NameType = "Apply" - val ArrayAnnotArg: NameType = "ArrayAnnotArg" - val CaseDef: NameType = "CaseDef" - val ClassInfoType: NameType = "ClassInfoType" - val ConstantType: NameType = "ConstantType" - val EmptyPackage: NameType = "EmptyPackage" - val EmptyPackageClass: NameType = "EmptyPackageClass" - val ExistentialType: NameType = "ExistentialType" - val Flag : NameType = "Flag" - val FlagsRepr: NameType = "FlagsRepr" - val Ident: NameType = "Ident" - val ImplicitParams: NameType = "ImplicitParams" - val Import: NameType = "Import" - val Literal: NameType = "Literal" - val LiteralAnnotArg: NameType = "LiteralAnnotArg" - val MethodType: NameType = "MethodType" - val Modifiers: NameType = "Modifiers" - val NestedAnnotArg: NameType = "NestedAnnotArg" - val New: NameType = "New" - val NoFlags: NameType = "NoFlags" - val NoSymbol: NameType = "NoSymbol" - val NoMods: NameType = "NoMods" - val Nothing: NameType = "Nothing" - val Null: NameType = "Null" - val NullaryMethodType: NameType = "NullaryMethodType" - val Object: NameType = "Object" - val PolyType: NameType = "PolyType" - val RefinedType: NameType = "RefinedType" - val RootPackage: NameType = "RootPackage" - val RootClass: NameType = "RootClass" - val Select: NameType = "Select" - val SelectFromTypeTree: NameType = "SelectFromTypeTree" - val SingleType: NameType = "SingleType" - val SuperType: NameType = "SuperType" - val This: NameType = "This" - val ThisType: NameType = "ThisType" - val Tuple2: NameType = "Tuple2" - val TYPE_ : NameType = "TYPE" - val TypeBounds: NameType = "TypeBounds" - val TypeRef: NameType = "TypeRef" - val TypeTree: NameType = "TypeTree" - val UNIT : NameType = "UNIT" - val accessor: NameType = "accessor" - val add_ : NameType = "add" - val annotation: NameType = "annotation" - val anyHash: NameType = "anyHash" - val anyValClass: NameType = "anyValClass" - val apply: NameType = "apply" - val applyDynamic: NameType = "applyDynamic" - val applyDynamicNamed: NameType = "applyDynamicNamed" - val applyOrElse: NameType = "applyOrElse" - val args : NameType = "args" - val arrayClass: NameType = "arrayClass" - val array_apply : NameType = "array_apply" - val array_clone : NameType = "array_clone" - val array_length : NameType = "array_length" - val array_update : NameType = "array_update" - val asModule: NameType = "asModule" - val asType: NameType = "asType" - val asInstanceOf_ : NameType = "asInstanceOf" - val asInstanceOf_Ob : NameType = "$asInstanceOf" - val async : NameType = "async" - val await : NameType = "await" - val box: NameType = "box" - val byteValue: NameType = "byteValue" - val bytes: NameType = "bytes" - val c: NameType = "c" - val canEqual_ : NameType = "canEqual" - val classOf: NameType = "classOf" - val clone_ : NameType = "clone" - val collection: NameType = "collection" - val conforms: NameType = "$conforms" // dollar prefix to avoid accidental shadowing - val copy: NameType = "copy" - val create: NameType = "create" - val currentMirror: NameType = "currentMirror" - val delayedInit: NameType = "delayedInit" - val delayedInitArg: NameType = "delayedInit$body" - val dollarScope: NameType = "$scope" - val doubleHash: NameType = "doubleHash" - val doubleValue: NameType = "doubleValue" - val drop: NameType = "drop" - val elem: NameType = "elem" - val noSelfType: NameType = "noSelfType" - val empty: NameType = "empty" - val ensureAccessible : NameType = "ensureAccessible" - val eq: NameType = "eq" - val equalsNumChar : NameType = "equalsNumChar" - val equalsNumNum : NameType = "equalsNumNum" - val equalsNumObject : NameType = "equalsNumObject" - val equals_ : NameType = "equals" - val error: NameType = "error" - val ex: NameType = "ex" - val experimental: NameType = "experimental" - val f: NameType = "f" - val false_ : NameType = "false" - val filter: NameType = "filter" - val finalize_ : NameType = "finalize" - val find_ : NameType = "find" - val flatMap: NameType = "flatMap" - val floatHash: NameType = "floatHash" - val floatValue: NameType = "floatValue" - val foreach: NameType = "foreach" - val freshTermName: NameType = "freshTermName" - val freshTypeName: NameType = "freshTypeName" - val get: NameType = "get" - val parameterTypes: NameType = "parameterTypes" - val hashCode_ : NameType = "hashCode" - val head : NameType = "head" - val immutable: NameType = "immutable" - val implicitly: NameType = "implicitly" - val in: NameType = "in" - val initialize : NameType = "initialize" - val initialized : NameType = "initialized" - val internal: NameType = "internal" - val intValue: NameType = "intValue" - val inlinedEquals: NameType = "inlinedEquals" - val isArray: NameType = "isArray" - val isDefinedAt: NameType = "isDefinedAt" - val isEmpty: NameType = "isEmpty" - val isInfinite: NameType = "isInfinite" - val isInstanceOf_ : NameType = "isInstanceOf" - val isInstanceOf_Ob : NameType = "$isInstanceOf" - val isNaN: NameType = "isNaN" - val java: NameType = "java" - val key: NameType = "key" - val lang: NameType = "lang" - val length: NameType = "length" - val lengthCompare: NameType = "lengthCompare" - val locally: NameType = "locally" - val longHash: NameType = "longHash" - val longValue: NameType = "longValue" - val macroContext : NameType = "c" - val main: NameType = "main" - val manifestToTypeTag: NameType = "manifestToTypeTag" - val map: NameType = "map" - val materializeClassTag: NameType = "materializeClassTag" - val materializeWeakTypeTag: NameType = "materializeWeakTypeTag" - val materializeTypeTag: NameType = "materializeTypeTag" - val moduleClass : NameType = "moduleClass" - val mkAnnotation: NameType = "mkAnnotation" - val mkEarlyDef: NameType = "mkEarlyDef" - val mkIdent: NameType = "mkIdent" - val mkPackageStat: NameType = "mkPackageStat" - val mkRefineStat: NameType = "mkRefineStat" - val mkRefTree: NameType = "mkRefTree" - val mkSelect: NameType = "mkSelect" - val mkThis: NameType = "mkThis" - val mkTypeTree: NameType = "mkTypeTree" - val ne: NameType = "ne" - val newArray: NameType = "newArray" - val newFreeTerm: NameType = "newFreeTerm" - val newFreeType: NameType = "newFreeType" - val newNestedSymbol: NameType = "newNestedSymbol" - val newScopeWith: NameType = "newScopeWith" - val notifyAll_ : NameType = "notifyAll" - val notify_ : NameType = "notify" - val null_ : NameType = "null" - val pendingSuperCall: NameType = "pendingSuperCall" - val prefix : NameType = "prefix" - val productArity: NameType = "productArity" - val productElement: NameType = "productElement" - val productIterator: NameType = "productIterator" - val productPrefix: NameType = "productPrefix" - val raw_ : NameType = "raw" - val readResolve: NameType = "readResolve" - val reify : NameType = "reify" - val reificationSupport : NameType = "reificationSupport" - val rootMirror : NameType = "rootMirror" - val runtime: NameType = "runtime" - val runtimeClass: NameType = "runtimeClass" - val runtimeMirror: NameType = "runtimeMirror" - val s: NameType = "s" - val scala_ : NameType = "scala" - val selectDynamic: NameType = "selectDynamic" - val selectOverloadedMethod: NameType = "selectOverloadedMethod" - val selectTerm: NameType = "selectTerm" - val selectType: NameType = "selectType" - val self: NameType = "self" - val setAnnotations: NameType = "setAnnotations" - val setInfo: NameType = "setInfo" - val setSymbol: NameType = "setSymbol" - val setType: NameType = "setType" - val shortValue: NameType = "shortValue" - val splice: NameType = "splice" - val staticClass : NameType = "staticClass" - val staticModule : NameType = "staticModule" - val staticPackage : NameType = "staticPackage" - val synchronized_ : NameType = "synchronized" - val ScalaDot: NameType = "ScalaDot" - val TermName: NameType = "TermName" - val this_ : NameType = "this" - val thisPrefix : NameType = "thisPrefix" - val toArray: NameType = "toArray" - val toList: NameType = "toList" - val toObjectArray : NameType = "toObjectArray" - val toStats: NameType = "toStats" - val TopScope: NameType = "TopScope" - val toString_ : NameType = "toString" - val toTypeConstructor: NameType = "toTypeConstructor" - val tpe : NameType = "tpe" - val tree : NameType = "tree" - val true_ : NameType = "true" - val typedProductIterator: NameType = "typedProductIterator" - val TypeName: NameType = "TypeName" - val typeTagToManifest: NameType = "typeTagToManifest" - val unapply: NameType = "unapply" - val unapplySeq: NameType = "unapplySeq" - val unbox: NameType = "unbox" - val unit: NameType = "unit" - val universe: NameType = "universe" - val UnliftListElementwise: NameType = "UnliftListElementwise" - val UnliftListOfListsElementwise: NameType = "UnliftListOfListsElementwise" - val update: NameType = "update" - val updateDynamic: NameType = "updateDynamic" - val value: NameType = "value" - val valueOf : NameType = "valueOf" - val values : NameType = "values" - val wait_ : NameType = "wait" - val withFilter: NameType = "withFilter" - val writeReplace: NameType = "writeReplace" - val xml: NameType = "xml" - val zero: NameType = "zero" + val AnnotatedType: NameType = nameType("AnnotatedType") + val Annotation: NameType = nameType("Annotation") + val Any: NameType = nameType("Any") + val AnyVal: NameType = nameType("AnyVal") + val Apply: NameType = nameType("Apply") + val ArrayAnnotArg: NameType = nameType("ArrayAnnotArg") + val CaseDef: NameType = nameType("CaseDef") + val ClassInfoType: NameType = nameType("ClassInfoType") + val ConstantType: NameType = nameType("ConstantType") + val EmptyPackage: NameType = nameType("EmptyPackage") + val EmptyPackageClass: NameType = nameType("EmptyPackageClass") + val ExistentialType: NameType = nameType("ExistentialType") + val Flag : NameType = nameType("Flag") + val FlagsRepr: NameType = nameType("FlagsRepr") + val Ident: NameType = nameType("Ident") + val ImplicitParams: NameType = nameType("ImplicitParams") + val Import: NameType = nameType("Import") + val Literal: NameType = nameType("Literal") + val LiteralAnnotArg: NameType = nameType("LiteralAnnotArg") + val MethodType: NameType = nameType("MethodType") + val Modifiers: NameType = nameType("Modifiers") + val NestedAnnotArg: NameType = nameType("NestedAnnotArg") + val New: NameType = nameType("New") + val NoFlags: NameType = nameType("NoFlags") + val NoSymbol: NameType = nameType("NoSymbol") + val NoMods: NameType = nameType("NoMods") + val Nothing: NameType = nameType("Nothing") + val Null: NameType = nameType("Null") + val NullaryMethodType: NameType = nameType("NullaryMethodType") + val Object: NameType = nameType("Object") + val PolyType: NameType = nameType("PolyType") + val RefinedType: NameType = nameType("RefinedType") + val RootPackage: NameType = nameType("RootPackage") + val RootClass: NameType = nameType("RootClass") + val Select: NameType = nameType("Select") + val SelectFromTypeTree: NameType = nameType("SelectFromTypeTree") + val SingleType: NameType = nameType("SingleType") + val SuperType: NameType = nameType("SuperType") + val This: NameType = nameType("This") + val ThisType: NameType = nameType("ThisType") + val Tuple2: NameType = nameType("Tuple2") + val TYPE_ : NameType = nameType("TYPE") + val TypeBounds: NameType = nameType("TypeBounds") + val TypeRef: NameType = nameType("TypeRef") + val TypeTree: NameType = nameType("TypeTree") + val UNIT : NameType = nameType("UNIT") + val accessor: NameType = nameType("accessor") + val add_ : NameType = nameType("add") + val annotation: NameType = nameType("annotation") + val any2stringadd: NameType = nameType("any2stringadd") + val anyHash: NameType = nameType("anyHash") + val anyValClass: NameType = nameType("anyValClass") + val apply: NameType = nameType("apply") + val applyDynamic: NameType = nameType("applyDynamic") + val applyDynamicNamed: NameType = nameType("applyDynamicNamed") + val applyOrElse: NameType = nameType("applyOrElse") + val args : NameType = nameType("args") + val arrayClass: NameType = nameType("arrayClass") + val array_apply : NameType = nameType("array_apply") + val array_clone : NameType = nameType("array_clone") + val array_length : NameType = nameType("array_length") + val array_update : NameType = nameType("array_update") + val asModule: NameType = nameType("asModule") + val asType: NameType = nameType("asType") + val asInstanceOf_ : NameType = nameType("asInstanceOf") + val asInstanceOf_Ob : NameType = nameType(s"$$asInstanceOf") // looks like missing interpolator due to Any member in scope + val async : NameType = nameType("async") + val await : NameType = nameType("await") + val box: NameType = nameType("box") + val byteValue: NameType = nameType("byteValue") + val bytes: NameType = nameType("bytes") + val c: NameType = nameType("c") + val canEqual_ : NameType = nameType("canEqual") + val classOf: NameType = nameType("classOf") + val clone_ : NameType = nameType("clone") + val collection: NameType = nameType("collection") + val conforms: NameType = nameType(s"$$conforms") // $ prefix to avoid shadowing Predef.conforms + val copy: NameType = nameType("copy") + val create: NameType = nameType("create") + val currentMirror: NameType = nameType("currentMirror") + val curried: NameType = nameType("curried") + val delayedInit: NameType = nameType("delayedInit") + val delayedInitArg: NameType = nameType("delayedInit$body") + val dollarScope: NameType = nameType("$scope") + val doubleHash: NameType = nameType("doubleHash") + val doubleValue: NameType = nameType("doubleValue") + val drop: NameType = nameType("drop") + val elem: NameType = nameType("elem") + val noSelfType: NameType = nameType("noSelfType") + val empty: NameType = nameType("empty") + val ensureAccessible : NameType = nameType("ensureAccessible") + val eq: NameType = nameType("eq") + val equalsNumChar : NameType = nameType("equalsNumChar") + val equalsNumNum : NameType = nameType("equalsNumNum") + val equalsNumObject : NameType = nameType("equalsNumObject") + val equals_ : NameType = nameType("equals") + val error: NameType = nameType("error") + val ex: NameType = nameType("ex") + val experimental: NameType = nameType("experimental") + val f: NameType = nameType("f") + val false_ : NameType = nameType("false") + val filter: NameType = nameType("filter") + val finalize_ : NameType = nameType("finalize") + val find_ : NameType = nameType("find") + val flatMap: NameType = nameType("flatMap") + val floatHash: NameType = nameType("floatHash") + val floatValue: NameType = nameType("floatValue") + val foreach: NameType = nameType("foreach") + val freshTermName: NameType = nameType("freshTermName") + val freshTypeName: NameType = nameType("freshTypeName") + val get: NameType = nameType("get") + val hashCode_ : NameType = nameType("hashCode") + val head : NameType = nameType("head") + val immutable: NameType = nameType("immutable") + val implicitly: NameType = nameType("implicitly") + val in: NameType = nameType("in") + val initialize : NameType = nameType("initialize") + val initialized : NameType = nameType("initialized") + val internal: NameType = nameType("internal") + val inlinedEquals: NameType = nameType("inlinedEquals") + val intValue: NameType = nameType("intValue") + val ioobe : NameType = nameType("ioobe") + val isArray: NameType = nameType("isArray") + val isDefinedAt: NameType = nameType("isDefinedAt") + val isEmpty: NameType = nameType("isEmpty") + val isInfinite: NameType = nameType("isInfinite") + val isInstanceOf_ : NameType = nameType("isInstanceOf") + val isInstanceOf_Ob : NameType = nameType(s"$$isInstanceOf") // looks like missing interpolator due to Any member in scope + val isNaN: NameType = nameType("isNaN") + val java: NameType = nameType("java") + val key: NameType = nameType("key") + val lang: NameType = nameType("lang") + val length: NameType = nameType("length") + val lengthCompare: NameType = nameType("lengthCompare") + val locally: NameType = nameType("locally") + val longHash: NameType = nameType("longHash") + val longValue: NameType = nameType("longValue") + val macroContext : NameType = nameType("c") + val main: NameType = nameType("main") + val manifestToTypeTag: NameType = nameType("manifestToTypeTag") + val map: NameType = nameType("map") + val materializeClassTag: NameType = nameType("materializeClassTag") + val materializeWeakTypeTag: NameType = nameType("materializeWeakTypeTag") + val materializeTypeTag: NameType = nameType("materializeTypeTag") + val moduleClass : NameType = nameType("moduleClass") + val mkAnnotation: NameType = nameType("mkAnnotation") + val mkEarlyDef: NameType = nameType("mkEarlyDef") + val mkIdent: NameType = nameType("mkIdent") + val mkPackageStat: NameType = nameType("mkPackageStat") + val mkRefineStat: NameType = nameType("mkRefineStat") + val mkRefTree: NameType = nameType("mkRefTree") + val mkSelect: NameType = nameType("mkSelect") + val mkThis: NameType = nameType("mkThis") + val mkTypeTree: NameType = nameType("mkTypeTree") + val ne: NameType = nameType("ne") + val newArray: NameType = nameType("newArray") + val newFreeTerm: NameType = nameType("newFreeTerm") + val newFreeType: NameType = nameType("newFreeType") + val newNestedSymbol: NameType = nameType("newNestedSymbol") + val newScopeWith: NameType = nameType("newScopeWith") + val notifyAll_ : NameType = nameType("notifyAll") + val notify_ : NameType = nameType("notify") + val null_ : NameType = nameType("null") + val parameterTypes: NameType = nameType("parameterTypes") + val pendingSuperCall: NameType = nameType("pendingSuperCall") + val prefix : NameType = nameType("prefix") + val productArity: NameType = nameType("productArity") + val productElement: NameType = nameType("productElement") + val productElementName: NameType = nameType("productElementName") + val productIterator: NameType = nameType("productIterator") + val productPrefix: NameType = nameType("productPrefix") + val raw_ : NameType = nameType("raw") + val readResolve: NameType = nameType("readResolve") + val releaseFence: NameType = nameType("releaseFence") + val refl: NameType = nameType("refl") + val reify : NameType = nameType("reify") + val reificationSupport : NameType = nameType("reificationSupport") + val rootMirror : NameType = nameType("rootMirror") + val runtime: NameType = nameType("runtime") + val runtimeClass: NameType = nameType("runtimeClass") + val runtimeMirror: NameType = nameType("runtimeMirror") + val s: NameType = nameType("s") + val scala_ : NameType = nameType("scala") + val selectDynamic: NameType = nameType("selectDynamic") + val selectOverloadedMethod: NameType = nameType("selectOverloadedMethod") + val selectTerm: NameType = nameType("selectTerm") + val selectType: NameType = nameType("selectType") + val self: NameType = nameType("self") + val setAnnotations: NameType = nameType("setAnnotations") + val setInfo: NameType = nameType("setInfo") + val setSymbol: NameType = nameType("setSymbol") + val setType: NameType = nameType("setType") + val shortValue: NameType = nameType("shortValue") + val splice: NameType = nameType("splice") + val staticClass : NameType = nameType("staticClass") + val staticModule : NameType = nameType("staticModule") + val staticPackage : NameType = nameType("staticPackage") + val synchronized_ : NameType = nameType("synchronized") + val ScalaDot: NameType = nameType("ScalaDot") + val TermName: NameType = nameType("TermName") + val this_ : NameType = nameType("this") + val thisPrefix : NameType = nameType("thisPrefix") + val toArray: NameType = nameType("toArray") + val toList: NameType = nameType("toList") + val toObjectArray : NameType = nameType("toObjectArray") + val toSeq: NameType = nameType("toSeq") + val toStats: NameType = nameType("toStats") + val TopScope: NameType = nameType("TopScope") + val toString_ : NameType = nameType("toString") + val toTypeConstructor: NameType = nameType("toTypeConstructor") + val tpe : NameType = nameType("tpe") + val tree : NameType = nameType("tree") + val true_ : NameType = nameType("true") + val tupled: NameType = nameType("tupled") + val typedProductIterator: NameType = nameType("typedProductIterator") + val TypeName: NameType = nameType("TypeName") + val typeTagToManifest: NameType = nameType("typeTagToManifest") + val unapply: NameType = nameType("unapply") + val unapplySeq: NameType = nameType("unapplySeq") + val unbox: NameType = nameType("unbox") + val unit: NameType = nameType("unit") + val universe: NameType = nameType("universe") + val UnliftListElementwise: NameType =nameType( "UnliftListElementwise") + val UnliftListOfListsElementwise: NameType = nameType("UnliftListOfListsElementwise") + val update: NameType = nameType("update") + val updateDynamic: NameType = nameType("updateDynamic") + val value: NameType = nameType("value") + val valueOf : NameType = nameType("valueOf") + val values : NameType = nameType("values") + val wait_ : NameType = nameType("wait") + val withFilter: NameType = nameType("withFilter") + val writeReplace: NameType = nameType("writeReplace") + val xml: NameType = nameType("xml") + val zero: NameType = nameType("zero") // async - val result : NameType = "result$async" - val awaitable : NameType = "awaitable$async" - val completed : NameType = "completed$async" - val stateMachine : NameType = "stateMachine$async" - val state : NameType = "state" - val tr : NameType = "tr$async" - val t : NameType = "throwable$async" - val trGetResult : NameType = "tryGetResult$async" + val result : NameType = nameType(s"result$$async") // avoid missing interpolator warnings + val awaitable : NameType = nameType(s"awaitable$$async") + val completed : NameType = nameType(s"completed$$async") + val stateMachine : NameType = nameType(s"stateMachine$$async") + val state : NameType = nameType("state") + val tr : NameType = nameType(s"tr$$async") + val t : NameType = nameType(s"throwable$$async") + val trGetResult : NameType = nameType(s"tryGetResult$$async") // quasiquote interpolators: - val q: NameType = "q" - val tq: NameType = "tq" - val cq: NameType = "cq" - val pq: NameType = "pq" - val fq: NameType = "fq" + val q: NameType = nameType("q") + val tq: NameType = nameType("tq") + val cq: NameType = nameType("cq") + val pq: NameType = nameType("pq") + val fq: NameType = nameType("fq") // quasiquote's syntactic combinators - val SyntacticAnnotatedType: NameType = "SyntacticAnnotatedType" - val SyntacticApplied: NameType = "SyntacticApplied" - val SyntacticAppliedType: NameType = "SyntacticAppliedType" - val SyntacticAssign: NameType = "SyntacticAssign" - val SyntacticBlock: NameType = "SyntacticBlock" - val SyntacticClassDef: NameType = "SyntacticClassDef" - val SyntacticCompoundType: NameType = "SyntacticCompoundType" - val SyntacticDefDef: NameType = "SyntacticDefDef" - val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree" - val SyntacticExistentialType: NameType = "SyntacticExistentialType" - val SyntacticFilter: NameType = "SyntacticFilter" - val SyntacticFor: NameType = "SyntacticFor" - val SyntacticForYield: NameType = "SyntacticForYield" - val SyntacticFunction: NameType = "SyntacticFunction" - val SyntacticFunctionType: NameType = "SyntacticFunctionType" - val SyntacticImport: NameType = "SyntacticImport" - val SyntacticMatch: NameType = "SyntacticMatch" - val SyntacticNew: NameType = "SyntacticNew" - val SyntacticObjectDef: NameType = "SyntacticObjectDef" - val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef" - val SyntacticPartialFunction: NameType = "SyntacticPartialFunction" - val SyntacticPatDef: NameType = "SyntacticPatDef" - val SyntacticSelectTerm: NameType = "SyntacticSelectTerm" - val SyntacticSelectType: NameType = "SyntacticSelectType" - val SyntacticSingletonType: NameType = "SyntacticSingletonType" - val SyntacticTermIdent: NameType = "SyntacticTermIdent" - val SyntacticTraitDef: NameType = "SyntacticTraitDef" - val SyntacticTry: NameType = "SyntacticTry" - val SyntacticTuple: NameType = "SyntacticTuple" - val SyntacticTupleType: NameType = "SyntacticTupleType" - val SyntacticTypeApplied: NameType = "SyntacticTypeApplied" - val SyntacticTypeIdent: NameType = "SyntacticTypeIdent" - val SyntacticTypeProjection: NameType = "SyntacticTypeProjection" - val SyntacticValDef: NameType = "SyntacticValDef" - val SyntacticValEq: NameType = "SyntacticValEq" - val SyntacticValFrom: NameType = "SyntacticValFrom" - val SyntacticVarDef: NameType = "SyntacticVarDef" + val SyntacticAnnotatedType: NameType = nameType("SyntacticAnnotatedType") + val SyntacticApplied: NameType = nameType("SyntacticApplied") + val SyntacticAppliedType: NameType = nameType("SyntacticAppliedType") + val SyntacticAssign: NameType = nameType("SyntacticAssign") + val SyntacticBlock: NameType = nameType("SyntacticBlock") + val SyntacticClassDef: NameType = nameType("SyntacticClassDef") + val SyntacticCompoundType: NameType = nameType("SyntacticCompoundType") + val SyntacticDefDef: NameType = nameType("SyntacticDefDef") + val SyntacticEmptyTypeTree: NameType = nameType("SyntacticEmptyTypeTree") + val SyntacticExistentialType: NameType = nameType("SyntacticExistentialType") + val SyntacticFilter: NameType = nameType("SyntacticFilter") + val SyntacticFor: NameType = nameType("SyntacticFor") + val SyntacticForYield: NameType = nameType("SyntacticForYield") + val SyntacticFunction: NameType = nameType("SyntacticFunction") + val SyntacticFunctionType: NameType = nameType("SyntacticFunctionType") + val SyntacticImport: NameType = nameType("SyntacticImport") + val SyntacticMatch: NameType = nameType("SyntacticMatch") + val SyntacticNew: NameType = nameType("SyntacticNew") + val SyntacticObjectDef: NameType = nameType("SyntacticObjectDef") + val SyntacticPackageObjectDef: NameType = nameType("SyntacticPackageObjectDef") + val SyntacticPartialFunction: NameType = nameType("SyntacticPartialFunction") + val SyntacticPatDef: NameType = nameType("SyntacticPatDef") + val SyntacticSelectTerm: NameType = nameType("SyntacticSelectTerm") + val SyntacticSelectType: NameType = nameType("SyntacticSelectType") + val SyntacticSingletonType: NameType = nameType("SyntacticSingletonType") + val SyntacticTermIdent: NameType = nameType("SyntacticTermIdent") + val SyntacticTraitDef: NameType = nameType("SyntacticTraitDef") + val SyntacticTry: NameType = nameType("SyntacticTry") + val SyntacticTuple: NameType = nameType("SyntacticTuple") + val SyntacticTupleType: NameType = nameType("SyntacticTupleType") + val SyntacticTypeApplied: NameType = nameType("SyntacticTypeApplied") + val SyntacticTypeIdent: NameType = nameType("SyntacticTypeIdent") + val SyntacticTypeProjection: NameType = nameType("SyntacticTypeProjection") + val SyntacticValDef: NameType = nameType("SyntacticValDef") + val SyntacticValEq: NameType = nameType("SyntacticValEq") + val SyntacticValFrom: NameType = nameType("SyntacticValFrom") + val SyntacticVarDef: NameType = nameType("SyntacticVarDef") // unencoded operators object raw { - final val BANG : NameType = "!" - final val BAR : NameType = "|" - final val DOLLAR: NameType = "$" - final val GE: NameType = ">=" - final val LE: NameType = "<=" - final val MINUS: NameType = "-" - final val NE: NameType = "!=" - final val PLUS : NameType = "+" - final val STAR : NameType = "*" - final val TILDE: NameType = "~" - final val QMARK: NameType = "?" + final val BANG : NameType = nameType("!") + final val BAR : NameType = nameType("|") + final val DOLLAR: NameType = nameType("$") + final val GE: NameType = nameType(">=") + final val LE: NameType = nameType("<=") + final val MINUS: NameType = nameType("-") + final val NE: NameType = nameType("!=") + final val PLUS : NameType = nameType("+") + final val STAR : NameType = nameType("*") + final val TILDE: NameType = nameType("~") + final val QMARK: NameType = nameType("?") final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) } // value-conversion methods - val toByte: NameType = "toByte" - val toShort: NameType = "toShort" - val toChar: NameType = "toChar" - val toInt: NameType = "toInt" - val toLong: NameType = "toLong" - val toFloat: NameType = "toFloat" - val toDouble: NameType = "toDouble" + val toByte: NameType = nameType("toByte") + val toShort: NameType = nameType("toShort") + val toChar: NameType = nameType("toChar") + val toInt: NameType = nameType("toInt") + val toLong: NameType = nameType("toLong") + val toFloat: NameType = nameType("toFloat") + val toDouble: NameType = nameType("toDouble") // primitive operation methods for structural types mostly // overlap with the above, but not for these two. - val toCharacter: NameType = "toCharacter" - val toInteger: NameType = "toInteger" + val toCharacter: NameType = nameType("toCharacter") + val toInteger: NameType = nameType("toInteger") def newLazyValSlowComputeName(lzyValName: Name) = (lzyValName stripSuffix MODULE_VAR_SUFFIX append LAZY_SLOW_SUFFIX).toTermName @@ -997,33 +1053,35 @@ trait StdNames { val UNARY_- = encode("unary_-") val UNARY_! = encode("unary_!") + val isEncodedUnary = Set[Name](UNARY_~, UNARY_+, UNARY_-, UNARY_!) + // Grouped here so Cleanup knows what tests to perform. val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE) val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames - val add: NameType = "add" - val complement: NameType = "complement" - val divide: NameType = "divide" - val multiply: NameType = "multiply" - val negate: NameType = "negate" - val positive: NameType = "positive" - val shiftLogicalRight: NameType = "shiftLogicalRight" - val shiftSignedLeft: NameType = "shiftSignedLeft" - val shiftSignedRight: NameType = "shiftSignedRight" - val subtract: NameType = "subtract" - val takeAnd: NameType = "takeAnd" - val takeConditionalAnd: NameType = "takeConditionalAnd" - val takeConditionalOr: NameType = "takeConditionalOr" - val takeModulo: NameType = "takeModulo" - val takeNot: NameType = "takeNot" - val takeOr: NameType = "takeOr" - val takeXor: NameType = "takeXor" - val testEqual: NameType = "testEqual" - val testGreaterOrEqualThan: NameType = "testGreaterOrEqualThan" - val testGreaterThan: NameType = "testGreaterThan" - val testLessOrEqualThan: NameType = "testLessOrEqualThan" - val testLessThan: NameType = "testLessThan" - val testNotEqual: NameType = "testNotEqual" + val add: NameType = nameType("add") + val complement: NameType = nameType("complement") + val divide: NameType = nameType("divide") + val multiply: NameType = nameType("multiply") + val negate: NameType = nameType("negate") + val positive: NameType = nameType("positive") + val shiftLogicalRight: NameType = nameType("shiftLogicalRight") + val shiftSignedLeft: NameType = nameType("shiftSignedLeft") + val shiftSignedRight: NameType = nameType("shiftSignedRight") + val subtract: NameType = nameType("subtract") + val takeAnd: NameType = nameType("takeAnd") + val takeConditionalAnd: NameType = nameType("takeConditionalAnd") + val takeConditionalOr: NameType = nameType("takeConditionalOr") + val takeModulo: NameType = nameType("takeModulo") + val takeNot: NameType = nameType("takeNot") + val takeOr: NameType = nameType("takeOr") + val takeXor: NameType = nameType("takeXor") + val testEqual: NameType = nameType("testEqual") + val testGreaterOrEqualThan: NameType = nameType("testGreaterOrEqualThan") + val testGreaterThan: NameType = nameType("testGreaterThan") + val testLessOrEqualThan: NameType = nameType("testLessOrEqualThan") + val testLessThan: NameType = nameType("testLessThan") + val testNotEqual: NameType = nameType("testNotEqual") def toUnaryName(name: TermName): TermName = name match { case raw.MINUS => UNARY_- @@ -1077,7 +1135,7 @@ trait StdNames { def primitiveMethodName(name: Name): TermName = primitiveInfixMethodName(name) match { case NO_NAME => primitivePostfixMethodName(name) - case name => name + case ok_name => ok_name } /** Translate a String into a list of simple TypeNames and TermNames. @@ -1119,10 +1177,10 @@ trait StdNames { def newBitmapName(bitmapPrefix: Name, n: Int) = bitmapPrefix append ("" + n) def isTransientBitmap(name: Name) = name == nme.BITMAP_TRANSIENT || name == nme.BITMAP_CHECKINIT_TRANSIENT - val BITMAP_NORMAL: NameType = BITMAP_PREFIX + "" // initialization bitmap for public/protected lazy vals - val BITMAP_TRANSIENT: NameType = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals - val BITMAP_CHECKINIT: NameType = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values - val BITMAP_CHECKINIT_TRANSIENT: NameType = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values + val BITMAP_NORMAL: NameType = nameType(BITMAP_PREFIX + "") // initialization bitmap for public/protected lazy vals + val BITMAP_TRANSIENT: NameType = nameType(BITMAP_PREFIX + "trans$") // initialization bitmap for transient lazy vals + val BITMAP_CHECKINIT: NameType = nameType(BITMAP_PREFIX + "init$") // initialization bitmap for checkinit values + val BITMAP_CHECKINIT_TRANSIENT: NameType = nameType(BITMAP_PREFIX + "inittrans$") // initialization bitmap for transient checkinit values } lazy val typeNames: tpnme.type = tpnme @@ -1132,8 +1190,8 @@ trait StdNames { /** For fully qualified type names. */ object fulltpnme extends TypeNames { - val RuntimeNothing: NameType = "scala.runtime.Nothing$" - val RuntimeNull: NameType = "scala.runtime.Null$" + val RuntimeNothing: NameType = nameType("scala.runtime.Nothing$") + val RuntimeNull: NameType = nameType("scala.runtime.Null$") } /** Java binary names, like scala/runtime/Nothing$. @@ -1158,18 +1216,18 @@ trait StdNames { def getMethod_ = sn.GetMethod def invoke_ = sn.Invoke - val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean" - val isBoxedNumber: NameType = "isBoxedNumber" + val isBoxedNumberOrBoolean: NameType = nameType("isBoxedNumberOrBoolean") + val isBoxedNumber: NameType = nameType("isBoxedNumber") - val reflPolyCacheName: NameType = "reflPoly$Cache" - val reflParamsCacheName: NameType = "reflParams$Cache" - val reflMethodName: NameType = "reflMethod$Method" - val argument: NameType = "" + val reflPolyCacheName: NameType = nameType("reflPoly$Cache") + val reflParamsCacheName: NameType = nameType("reflParams$Cache") + val reflMethodName: NameType = nameType("reflMethod$Method") + val argument: NameType = nameType("") } class JavaKeywords { - private val kw = new KeywordSetBuilder + private[this] val kw = new KeywordSetBuilder final val ABSTRACTkw: TermName = kw("abstract") final val ASSERTkw: TermName = kw("assert") @@ -1227,10 +1285,22 @@ trait StdNames { final val keywords = kw.result } + // The identifiers non-sealed, permits, record, sealed, var, and yield are restricted identifiers + // because they are not allowed in some contexts. + // A type identifier is an identifier that is not the character sequence permits, record, sealed, var, or yield. + // An unqualified method identifier is an identifier that is not the character sequence yield. (JLS 3.8) + class JavaRestrictedIdentifiers { + final val PERMITS: TermName = TermName("permits") + final val RECORD: TermName = TermName("record") + final val SEALED: TermName = TermName("sealed") + final val UNSEALED: TermName = TermName("non-sealed") + final val NON: TermName = TermName("non") + final val VAR: TermName = TermName("var") + final val YIELD: TermName = TermName("yield") + } + sealed abstract class SymbolNames { - protected val stringToTermName = null - protected val stringToTypeName = null - protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) + protected def nameType(s: String): TypeName = newTypeNameCached(s) final val BoxedBoolean: String = "java.lang.Boolean" final val BoxedByte: String = "java.lang.Byte" diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index 98c0056d3c84..f0a6ed671039 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,7 @@ package scala package reflect package internal -import scala.collection.mutable -import util.HashSet +import util.{HashSet, StringContextStripMarginOps} import scala.annotation.tailrec /** An abstraction for considering symbol pairs. @@ -37,13 +36,6 @@ abstract class SymbolPairs { val global: SymbolTable import global._ - /** Are types tp1 and tp2 equivalent seen from the perspective - * of `baseClass`? For instance List[Int] and Seq[Int] are =:= - * when viewed from IterableClass. - */ - def sameInBaseClass(baseClass: Symbol)(tp1: Type, tp2: Type) = - (tp1 baseType baseClass) =:= (tp2 baseType baseClass) - final case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) { private[this] val self = base.thisType @@ -51,12 +43,12 @@ abstract class SymbolPairs { def rootType: Type = self def lowType: Type = self memberType low - def lowErased: Type = erasure.specialErasure(base)(low.tpe) + def lowErased: Type = erasure.specialErasure(low)(low.tpe) def lowClassBound: Type = classBoundAsSeen(low.tpe.typeSymbol) def highType: Type = self memberType high def highInfo: Type = self memberInfo high - def highErased: Type = erasure.specialErasure(base)(high.tpe) + def highErased: Type = erasure.specialErasure(high)(high.tpe) def highClassBound: Type = classBoundAsSeen(high.tpe.typeSymbol) def isErroneous = low.tpe.isErroneous || high.tpe.isErroneous @@ -98,9 +90,10 @@ abstract class SymbolPairs { abstract class Cursor(val base: Symbol) { cursor => - final val self = base.thisType // The type relative to which symbols are seen. - private val decls = newScope // all the symbols which can take part in a pair. - private val size = bases.length + final val self = base.thisType // The type relative to which symbols are seen. + private[this] val decls = newScope // all the symbols which can take part in a pair. + @annotation.unused + private[this] val size = bases.length /** A symbol for which exclude returns true will not appear as * either end of a pair. @@ -113,39 +106,21 @@ abstract class SymbolPairs { */ protected def matches(high: Symbol): Boolean - /** The parents and base classes of `base`. Can be refined in subclasses. - */ - protected def parents: List[Type] = base.info.parents - protected def bases: List[Symbol] = base.info.baseClasses - - /** An implementation of BitSets as arrays (maybe consider collection.BitSet - * for that?) The main purpose of this is to implement - * intersectionContainsElement efficiently. - */ - private type BitSet = Array[Int] - - /** A mapping from all base class indices to a bitset - * which indicates whether parents are subclasses. - * - * i \in subParents(j) iff - * exists p \in parents, b \in baseClasses: - * i = index(p) - * j = index(b) - * p isSubClass b - * p.baseType(b) == self.baseType(b) - */ - private val subParents = new Array[BitSet](size) + /** Even if a pair `matches`, should the cursor skip this pair? + * + * @param lowClass owner of the next low symbol + * @param highClass owner of the next hi symbol + * @return whether to skip this pair + */ + protected def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = false - /** A map from baseclasses of to ints, with smaller ints meaning lower in - * linearization order. Symbols that are not baseclasses map to -1. - */ - private val index = new mutable.HashMap[Symbol, Int] { override def default(key: Symbol) = -1 } + protected def bases: List[Symbol] = base.info.baseClasses /** The scope entries that have already been visited as highSymbol * (but may have been excluded via hasCommonParentAsSubclass.) * These will not appear as lowSymbol. */ - private val visited = HashSet[ScopeEntry]("visited", 64) + private[this] val visited = HashSet[ScopeEntry]("visited", 64) /** Initialization has to run now so decls is populated before * the declaration of curEntry. @@ -174,9 +149,9 @@ abstract class SymbolPairs { next() // populate the above data structures - private def init() { + private def init(): Unit = { // Fill `decls` with lower symbols shadowing higher ones - def fillDecls(bcs: List[Symbol], deferred: Boolean) { + def fillDecls(bcs: List[Symbol], deferred: Boolean): Unit = { if (!bcs.isEmpty) { fillDecls(bcs.tail, deferred) var e = bcs.head.info.decls.elems @@ -187,77 +162,31 @@ abstract class SymbolPairs { } } } - var i = 0 - for (bc <- bases) { - index(bc) = i - subParents(i) = new BitSet(size) - i += 1 - } - for (p <- parents) { - val pIndex = index(p.typeSymbol) - if (pIndex >= 0) - for (bc <- p.baseClasses ; if sameInBaseClass(bc)(p, self)) { - val bcIndex = index(bc) - if (bcIndex >= 0) - include(subParents(bcIndex), pIndex) - } - } + // first, deferred (this will need to change if we change lookup rules!) fillDecls(bases, deferred = true) // then, concrete. fillDecls(bases, deferred = false) } - private def include(bs: BitSet, n: Int) { - val nshifted = n >> 5 - val nmask = 1 << (n & 31) - bs(nshifted) |= nmask - } - - /** Implements `bs1 * bs2 * {0..n} != 0`. - * Used in hasCommonParentAsSubclass */ - private def intersectionContainsElementLeq(bs1: BitSet, bs2: BitSet, n: Int): Boolean = { - val nshifted = n >> 5 - val nmask = 1 << (n & 31) - var i = 0 - while (i < nshifted) { - if ((bs1(i) & bs2(i)) != 0) return true - i += 1 - } - (bs1(nshifted) & bs2(nshifted) & (nmask | nmask - 1)) != 0 - } + // We can only draw conclusions about linearisation from a non-trait parent; skip Object, being the top of the lattice. + protected lazy val nonTraitParent: Symbol = + base.info.firstParent.typeSymbol.filter(sym => !sym.isTrait && sym != definitions.ObjectClass) - /** Do `sym1` and `sym2` have a common subclass in `parents`? - * In that case we do not follow their pairs. - */ - private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = { - val index1 = index(sym1.owner) - (index1 >= 0) && { - val index2 = index(sym2.owner) - (index2 >= 0) && { - intersectionContainsElementLeq( - subParents(index1), subParents(index2), index1 min index2) - } - } - } - - @tailrec private def advanceNextEntry() { + @tailrec private def advanceNextEntry(): Unit = { if (nextEntry ne null) { nextEntry = decls lookupNextEntry nextEntry if (nextEntry ne null) { val high = nextEntry.sym val isMatch = matches(high) && { visited addEntry nextEntry ; true } // side-effect visited on all matches - // skip nextEntry if a class in `parents` is a subclass of the - // owners of both low and high. - if (isMatch && !hasCommonParentAsSubclass(lowSymbol, high)) - highSymbol = high - else - advanceNextEntry() + // Advance if no match, or if the particular cursor is not interested in this pair + if (!isMatch || skipOwnerPair(low.owner, high.owner)) advanceNextEntry() + else highSymbol = high } } } - @tailrec private def advanceCurEntry() { + @tailrec private def advanceCurEntry(): Unit = { if (curEntry ne null) { curEntry = curEntry.next if (curEntry ne null) { @@ -277,14 +206,15 @@ abstract class SymbolPairs { def hasNext = curEntry ne null def currentPair = new SymbolPair(base, low, high) - def iterator = new Iterator[SymbolPair] { + def iterator: Iterator[SymbolPair] = new collection.AbstractIterator[SymbolPair] { def hasNext = cursor.hasNext def next() = try cursor.currentPair finally cursor.next() } // Note that next is called once during object initialization to // populate the fields tracking the current symbol pair. - def next() { + @tailrec + final def next(): Unit = { if (curEntry ne null) { lowSymbol = curEntry.sym advanceNextEntry() // sets highSymbol diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 15d337dfdd98..765a1dff4c8e 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,7 @@ package internal import java.net.URLClassLoader -import scala.annotation.elidable +import scala.annotation.{elidable, nowarn, tailrec} import scala.collection.mutable import util._ import java.util.concurrent.TimeUnit @@ -74,11 +74,16 @@ abstract class SymbolTable extends macros.Universe def log(msg: => AnyRef): Unit - protected def elapsedMessage(msg: String, start: Long) = - msg + " in " + (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - start) + "ms" + protected def elapsedMessage(msg: String, startNs: Long) = + msg + " in " + (TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNs)) + "ms" - def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]") - def informTime(msg: String, start: Long) = if (settings.verbose) informProgress(elapsedMessage(msg, start)) + def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") + def informTime(msg: String, startNs: Long) = if (settings.verbose.value) informProgress(elapsedMessage(msg, startNs)) + @inline final def informingProgress[T](msg: => String)(fn: => T) : T = { + val verbose: Boolean = settings.verbose.value + val start = if (verbose) System.nanoTime() else 0L + try fn finally if (verbose) informTime(msg, start) + } def shouldLogAtThisPhase = false def isPastTyper = false @@ -90,11 +95,7 @@ abstract class SymbolTable extends macros.Universe def settings: MutableSettings - @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false - @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false - - @deprecated("use devWarning if this is really a warning; otherwise use log", "2.11.0") - def debugwarn(msg: => String): Unit = devWarning(msg) + def isSymbolLockTracingEnabled: Boolean = isDeveloper /** Override with final implementation for inlining. */ def debuglog(msg: => String): Unit = if (settings.isDebug) log(msg) @@ -153,32 +154,19 @@ abstract class SymbolTable extends macros.Universe result } - // Getting in front of Predef's asserts to supplement with more info; see `supplementErrorMessage`. - // This has the happy side effect of masking the one argument forms of assert/require - // (but for now they're reproduced here, because there are a million uses internal and external to fix). @inline - final def assert(assertion: Boolean, message: => Any): Unit = { - // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument. + final def assert(assertion: Boolean, message: => Any): Unit = if (!assertion) throwAssertionError(message) - } - // Let's consider re-deprecating this in the 2.13 series, to encourage informative messages. - //@deprecated("prefer to use the two-argument form", since = "2.12.5") - final def assert(assertion: Boolean): Unit = { - assert(assertion, "") - } + @deprecated("consider supplying an explanatory message", since = "2.12.5") + final def assert(assertion: Boolean): Unit = assert(assertion, "") @inline - final def require(requirement: Boolean, message: => Any): Unit = { - // calling Predef.require would send a freshly allocated closure wrapping the one received as argument. + final def require(requirement: Boolean, message: => Any): Unit = if (!requirement) throwRequirementError(message) - } - // Let's consider re-deprecating this in the 2.13 series, to encourage informative messages. - //@deprecated("prefer to use the two-argument form", since = "2.12.5") - final def require(requirement: Boolean): Unit = { - require(requirement, "") - } + @deprecated("consider supplying an explanatory message", since = "2.12.5") + final def require(requirement: Boolean): Unit = require(requirement, "") // extracted from `assert`/`require` to make them as small (and inlineable) as possible private[internal] def throwAssertionError(msg: Any): Nothing = @@ -186,8 +174,8 @@ abstract class SymbolTable extends macros.Universe private[internal] def throwRequirementError(msg: Any): Nothing = throw new java.lang.IllegalArgumentException(s"requirement failed: ${supplementErrorMessage(String valueOf msg)}") - @inline final def findSymbol(xs: TraversableOnce[Symbol])(p: Symbol => Boolean): Symbol = { - xs find p getOrElse NoSymbol + @inline final def findSymbol(xs: IterableOnce[Symbol])(p: Symbol => Boolean): Symbol = { + xs.iterator find p getOrElse NoSymbol } // For too long have we suffered in order to sort NAMES. @@ -205,7 +193,7 @@ abstract class SymbolTable extends macros.Universe /** Dump each symbol to stdout after shutdown. */ - final val traceSymbolActivity = sys.props contains "scalac.debug.syms" + final val traceSymbolActivity = System.getProperty("scalac.debug.syms") != null object traceSymbols extends { val global: SymbolTable.this.type = SymbolTable.this } with util.TraceSymbolActivity @@ -215,7 +203,7 @@ abstract class SymbolTable extends macros.Universe /** Check that the executing thread is the compiler thread. No-op here, * overridden in interactive.Global. */ @elidable(elidable.WARNING) - def assertCorrectThread() {} + def assertCorrectThread(): Unit = {} /** A last effort if symbol in a select . is not found. * This is overridden by the reflection compiler to make up a package @@ -237,7 +225,7 @@ abstract class SymbolTable extends macros.Universe type RunId = Int final val NoRunId = 0 - private val phStack: collection.mutable.ArrayStack[Phase] = new collection.mutable.ArrayStack() + private[this] val phStack: collection.mutable.Stack[Phase] = new collection.mutable.Stack() private[this] var ph: Phase = NoPhase private[this] var per = NoPeriod @@ -248,10 +236,10 @@ abstract class SymbolTable extends macros.Universe def atPhaseStackMessage = atPhaseStack match { case Nil => "" - case ps => ps.reverseMap("->" + _).mkString("(", " ", ")") + case ps => ps.reverseIterator.map("->" + _).mkString("(", " ", ")") } - final def phase_=(p: Phase) { + final def phase_=(p: Phase): Unit = { ph = p per = period(currentRunId, p.id) } @@ -263,7 +251,7 @@ abstract class SymbolTable extends macros.Universe } current } - final def popPhase(ph: Phase) { + final def popPhase(ph: Phase): Unit = { if (keepPhaseStack) { phStack.pop() } @@ -335,23 +323,24 @@ abstract class SymbolTable extends macros.Universe final def isValid(period: Period): Boolean = period != 0 && runId(period) == currentRunId && { val pid = phaseId(period) - if (phase.id > pid) infoTransformers.nextFrom(pid).pid >= phase.id - else infoTransformers.nextFrom(phase.id).pid >= pid + if (phase.id > pid) nextFrom(pid).pid >= phase.id + else nextFrom(phase.id).pid >= pid } final def isValidForBaseClasses(period: Period): Boolean = { + @tailrec def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = ( it.pid >= limit || !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit) ) period != 0 && runId(period) == currentRunId && { val pid = phaseId(period) - if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id) - else noChangeInBaseClasses(infoTransformers.nextFrom(phase.id), pid) + if (phase.id > pid) noChangeInBaseClasses(nextFrom(pid), phase.id) + else noChangeInBaseClasses(nextFrom(phase.id), pid) } } - def openPackageModule(container: Symbol, dest: Symbol) { + def openPackageModule(container: Symbol, dest: Symbol): Unit = { // unlink existing symbols in the package for (member <- container.info.decls.iterator) { if (!member.isPrivate && !member.isConstructor) { @@ -391,8 +380,8 @@ abstract class SymbolTable extends macros.Universe } val newParams = method.newSyntheticValueParams(formals.init :+ definitions.javaRepeatedType(elemtp)) MethodType(newParams, rtpe) - case PolyType(tparams, rtpe) => - PolyType(tparams, arrayToRepeated(rtpe)) + case PolyType(tparams, rtpe) => PolyType(tparams, arrayToRepeated(rtpe)) + case x => throw new MatchError(x) } abstract class SymLoader extends LazyType { @@ -400,28 +389,26 @@ abstract class SymbolTable extends macros.Universe } /** if there's a `package` member object in `pkgClass`, enter its members into it. */ - def openPackageModule(pkgClass: Symbol) { - - val pkgModule = pkgClass.packageObject + def openPackageModule(pkgClass: Symbol, force: Boolean = false): Unit = { + val pkgModule = pkgClass.packageObject def fromSource = pkgModule.rawInfo match { case ltp: SymLoader => ltp.fromSource case _ => false } - if (pkgModule.isModule && !fromSource) { + if (pkgModule.isModule && !fromSource) openPackageModule(pkgModule, pkgClass) - } } object perRunCaches { - import scala.collection.generic.Clearable + import scala.collection.mutable.Clearable // Weak references so the garbage collector will take care of // letting us know when a cache is really out of commission. import java.lang.ref.WeakReference - private var caches = List[WeakReference[Clearable]]() - private var javaCaches = List[JavaClearable[_]]() + private[this] var caches = List[WeakReference[Clearable]]() + private[this] var javaCaches = List[JavaClearable[_]]() - def recordCache[T <: Clearable](cache: T): T = { + def recordCache[T <: Clearable](cache: T): cache.type = { cache match { case jc: JavaClearable[_] => javaCaches ::= jc @@ -432,7 +419,7 @@ abstract class SymbolTable extends macros.Universe } /** Closes the provided classloader at the conclusion of this Run */ - final def recordClassloader(loader: ClassLoader): ClassLoader = { + final def recordClassloader(loader: ClassLoader): loader.type = { def attemptClose(loader: ClassLoader): Unit = { loader match { case u: URLClassLoader => debuglog("Closing classloader " + u); u.close() @@ -460,21 +447,25 @@ abstract class SymbolTable extends macros.Universe } } - def clearAll() = { + def clearAll(): Unit = { debuglog("Clearing " + (caches.size + javaCaches.size) + " caches.") - caches foreach (ref => Option(ref.get).foreach(_.clear)) + caches foreach (ref => Option(ref.get).foreach(_.clear())) caches = caches.filterNot(_.get == null) - javaCaches foreach (_.clear) + javaCaches foreach (_.clear()) javaCaches = javaCaches.filter(_.isValid) } - def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]()) - def newMap[K, V]() = recordCache(mutable.HashMap[K, V]()) - def newSet[K]() = recordCache(mutable.HashSet[K]()) - def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]()) + def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap.empty[K, V]) + def newMap[K, V]() = recordCache(mutable.HashMap.empty[K, V]) + def newSet[K]() = recordCache(mutable.HashSet.empty[K]) + def newWeakSet[K <: AnyRef]() = recordCache(WeakHashSet.empty[K]) + + @nowarn("cat=deprecation") + def newAnyRefMap[K <: AnyRef, V]() = recordCache(mutable.AnyRefMap.empty[K, V]) + @nowarn("cat=deprecation") + def newAnyRefMap[K <: AnyRef, V](default: K => V) = recordCache(mutable.AnyRefMap.withDefault[K, V](default)) - def newAnyRefMap[K <: AnyRef, V]() = recordCache(mutable.AnyRefMap[K, V]()) /** * Register a cache specified by a factory function and (optionally) a cleanup function. * @@ -508,6 +499,7 @@ abstract class SymbolTable extends macros.Universe val changesBaseClasses = true def transform(sym: Symbol, tpe: Type): Type = tpe } + var nextFrom: Array[InfoTransformer] = null private final val MaxPhases = 256 /** The phase which has given index as identifier. */ @@ -520,12 +512,6 @@ abstract class SymbolTable extends macros.Universe @deprecated("use enteringPhase", "2.10.0") // Used in sbt 0.12.4 @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op) - - /** - * Adds the `sm` String interpolator to a [[scala.StringContext]]. - */ - implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps - protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c4ec7c6fcebb..57548f7b3ed5 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -12,19 +12,18 @@ /* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky */ package scala package reflect package internal -import scala.collection.immutable -import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance } -import Flags._ import scala.annotation.tailrec +import scala.collection.mutable.{ArrayBuffer, ListBuffer} import scala.reflect.io.{AbstractFile, NoAbstractFile} + +import util.{ReusableInstance, Statistics, shortClassOfInstance} +import Flags._ import Variance._ trait Symbols extends api.Symbols { self: SymbolTable => @@ -37,17 +36,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def nextId() = { ids += 1; ids } /** Used to keep track of the recursion depth on locked symbols */ - private var _recursionTable = immutable.Map.empty[Symbol, Int] + private[this] var _recursionTable = Map.empty[Symbol, Int] def recursionTable = _recursionTable - def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable = value + def recursionTable_=(value: Map[Symbol, Int]) = _recursionTable = value - private var _lockedCount = 0 + private[this] var _lockedCount = 0 def lockedCount = this._lockedCount def lockedCount_=(i: Int) = _lockedCount = i + private[this] val _lockingTrace = ArrayBuffer.empty[Symbol] + private[this] val lockTracing: Boolean = self.isSymbolLockTracingEnabled @deprecated("Global existential IDs no longer used", "2.12.1") - private var existentialIds = 0 + private[this] var existentialIds = 0 @deprecated("Global existential IDs no longer used", "2.12.1") protected def nextExistentialId() = { existentialIds += 1; existentialIds } @deprecated("Use overload that accepts an id", "2.12.1") @@ -55,7 +56,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def freshExistentialName(suffix: String, id: Int): TypeName = newTypeName("_" + id + suffix) // Set the fields which point companions at one another. Returns the module. - def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = { + def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): m.type = { moduleClass.sourceModule = m m setModuleClass moduleClass m @@ -76,7 +77,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * The original owner of a symbol is needed in some places in the backend. Ideally, owners should * be versioned like the type history. */ - private val originalOwnerMap = perRunCaches.newAnyRefMap[Symbol, Symbol]() + private[this] val originalOwnerMap = perRunCaches.newAnyRefMap[Symbol, Symbol]() // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, @@ -124,6 +125,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isJavaEnum: Boolean = hasJavaEnumFlag def isJavaAnnotation: Boolean = hasJavaAnnotationFlag + def isStaticAnnotation: Boolean = + initialize.hasJavaAnnotationFlag || isNonBottomSubClass(StaticAnnotationClass) && this != NowarnClass def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match { case n: TermName => newTermSymbol(n, pos, newFlags) @@ -136,7 +139,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => enclosingPackage.info.decls.foreach { sym => if(sourceFile == sym.sourceFile) { - sym.rawInfo.forceDirectSuperclasses + sym.rawInfo.forceDirectSuperclasses() } } @@ -268,12 +271,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => traceSymbols.recordNewSymbol(this) def validTo = _validTo - def validTo_=(x: Period) { _validTo = x} + def validTo_=(x: Period): Unit = { _validTo = x} def setName(name: Name): this.type = { this.name = asNameType(name) ; this } // Update the surrounding scopes - protected[this] def changeNameInOwners(name: Name) { + protected[this] def changeNameInOwners(name: Name): Unit = { if (owner.isClass) { var ifs = owner.infos while (ifs != null) { @@ -303,13 +306,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => def shortSymbolClass = shortClassOfInstance(this) def symbolCreationString: String = ( "%s%25s | %-40s | %s".format( - if (settings.uniqid) "%06d | ".format(id) else "", + if (settings.uniqid.value) "%06d | ".format(id) else "", shortSymbolClass, name.decode + " in " + owner, rawFlagString ) ) - // ------ creators ------------------------------------------------------------------- final def newValue(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = @@ -399,10 +401,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * pre.memberType(m) */ - final def newOverloaded(pre: Type, alternatives: List[Symbol]): TermSymbol = ( - newTermSymbol(alternatives.head.name.toTermName, alternatives.head.pos, OVERLOADED) - setInfo OverloadedType(pre, alternatives) - ) + final def newOverloaded(pre: Type, alternatives: List[Symbol]): TermSymbol = { + val triedCookingFlag = if (alternatives.forall(_.hasFlag(TRIEDCOOKING))) TRIEDCOOKING else 0L + + newTermSymbol(alternatives.head.name.toTermName, alternatives.head.pos, OVERLOADED | triedCookingFlag) setInfo OverloadedType(pre, alternatives) + } final def newErrorValue(name: TermName): TermSymbol = newTermSymbol(name, pos, SYNTHETIC | IS_ERROR) setInfo ErrorType @@ -552,19 +555,23 @@ trait Symbols extends api.Symbols { self: SymbolTable => // True if the symbol is unlocked. // True if the symbol is locked but still below the allowed recursion depth. // False otherwise - private[scala] def lockOK: Boolean = { - ((_rawflags & LOCKED) == 0L) || - ((settings.Yrecursion.value != 0) && - (recursionTable get this match { - case Some(n) => (n <= settings.Yrecursion.value) - case None => true })) - } + private[scala] def lockOK: Boolean = ( + (_rawflags & LOCKED) == 0L || { + val limit = settings.Yrecursion.value + limit != 0 && ( + recursionTable.get(this) match { + case Some(n) => n <= limit + case None => true + }) + } + ) // Lock a symbol, using the handler if the recursion depth becomes too great. private[scala] def lock(handler: => Unit): Boolean = { + if (lockTracing) _lockingTrace.addOne(this) if ((_rawflags & LOCKED) != 0L) { if (settings.Yrecursion.value != 0) { - recursionTable get this match { + recursionTable.get(this) match { case Some(n) => if (n > settings.Yrecursion.value) { handler @@ -577,7 +584,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => recursionTable += (this -> 1) true } - } else { handler; false } + } else { + handler + false + } } else { _rawflags |= LOCKED true @@ -585,13 +595,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => } // Unlock a symbol - private[scala] def unlock() = { + private[scala] def unlock(): Unit = if ((_rawflags & LOCKED) != 0L) { _rawflags &= ~LOCKED + if (lockTracing && !_lockingTrace.isEmpty) + _lockingTrace.remove(index = _lockingTrace.size - 1, count = 1) // dropRightInPlace(1) if (settings.Yrecursion.value != 0) recursionTable -= this } - } // ----- tests ---------------------------------------------------------------------- @@ -665,6 +676,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isLabel = false /** Package/package object tests */ + def isPackage = false def isPackageClass = false def isPackageObject = false def isPackageObjectClass = false @@ -686,6 +698,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * to be the case if it is final, and any type parameters are invariant. */ def hasOnlyBottomSubclasses = { + @tailrec def loop(tparams: List[Symbol]): Boolean = tparams match { case Nil => true case x :: xs => x.variance.isInvariant && loop(xs) @@ -736,14 +749,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Does symbol have ANY flag in `mask` set? */ final def hasFlag(mask: Long): Boolean = getFlag(mask) != 0 - def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong) - /** Does symbol have ALL the flags in `mask` set? */ final def hasAllFlags(mask: Long): Boolean = getFlag(mask) == mask def setFlag(mask: Long): this.type = { _rawflags |= mask ; this } def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this } - def resetFlags() { rawflags = 0L } + def resetFlags(): Unit = { rawflags = 0 } /** Default implementation calls the generic string function, which * will print overloaded flags as . Subclasses @@ -770,7 +781,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def flags_=(fs: Long) = _rawflags = fs - def rawflags_=(x: Long) { _rawflags = x } + def rawflags_=(x: Long): Unit = { _rawflags = x } final def hasGetter = isTerm && nme.isLocalName(name) @@ -838,10 +849,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters. + * Do the same for any accessed symbols to preserve serialization compatibility. * Implementation in TermSymbol. */ - def expandName(base: Symbol) { } + def expandName(base: Symbol): Unit = () // In java.lang, Predef, or scala package/package object def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner) @@ -855,7 +866,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => def skipPackageObject: Symbol = this /** The package object symbol corresponding to this package or package class symbol, or NoSymbol otherwise */ - def packageObject: Symbol = + @tailrec + final def packageObject: Symbol = if (isPackageClass) tpe.packageObject else if (hasPackageFlag) moduleClass.packageObject else NoSymbol @@ -896,13 +908,16 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) } - def isStrictFP: Boolean = !isDeferred && (hasAnnotation(ScalaStrictFPAttr) || originalOwner.isStrictFP) - def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) - def hasBridgeAnnotation = hasAnnotation(BridgeClass) - def isDeprecated = hasAnnotation(DeprecatedAttr) || (isJava && hasAnnotation(JavaDeprecatedAttr)) + @tailrec + final def isStrictFP: Boolean = this != NoSymbol && !isDeferred && (hasAnnotation(ScalaStrictFPAttr) || originalOwner.isStrictFP) + def isSerializable = info.baseClasses.exists(_ == SerializableClass) + def isDeprecated = hasAnnotation(DeprecatedAttr) || hasAnnotation(JavaDeprecatedAttr) def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0) - def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1) - def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0 orElse Some(nme.NO_NAME)) + def deprecationVersion = getAnnotation(DeprecatedAttr).flatMap(_.stringArg(1)) match { + case v @ Some(_) => v + case _ => getAnnotation(JavaDeprecatedAttr).flatMap(_.stringArg(0)) + } + def deprecatedParamName = getAnnotation(DeprecatedNameAttr).flatMap(ann => ann.symbolArg(0).orElse(ann.stringArg(0).map(newTermName)).orElse(Some(nme.NO_NAME))) def deprecatedParamVersion = getAnnotation(DeprecatedNameAttr) flatMap (_ stringArg 1) def hasDeprecatedInheritanceAnnotation = hasAnnotation(DeprecatedInheritanceAttr) @@ -927,12 +942,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) } def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) } def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) } - def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) } + def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0)) def implicitAmbiguousMsg = getAnnotation(ImplicitAmbiguousClass) flatMap { _.stringArg(0) } def isCompileTimeOnly = hasAnnotation(CompileTimeOnlyAttr) def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0) + def isExperimental = hasAnnotation(ExperimentalAnnotationClass) + /** Is this symbol an accessor method for outer? */ final def isOuterAccessor = hasFlag(STABLE | ARTIFACT) && (unexpandedName == nme.OUTER) @@ -959,6 +976,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this symbol a synthetic apply or unapply method in a companion object of a case class? */ // xeno-by: why this obscure use of the CASE flag? why not simply compare name with nme.apply and nme.unapply? + // dnw: "fixed in Dotty"? In Scala 3 unapply methods don't use the CASE flag. final def isCaseApplyOrUnapply = isMethod && isCase && isSynthetic @@ -968,12 +986,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isModuleVar = hasFlag(MODULEVAR) + final def isScala3Defined = hasFlag(SCALA3X) + /** * Is this symbol static (i.e. with no outer instance)? * Q: When exactly is a sym marked as STATIC? * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or * any number of levels deep. - * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 + * https://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6 * * TODO: should this only be invoked on class / module symbols? because there's also `isStaticMember`. * @@ -1005,29 +1025,30 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStaticOwner: Boolean = isPackageClass || isModuleClass && isStatic - /** A helper function for isEffectivelyFinal. */ - private def isNotOverridden = ( - owner.isClass && ( - owner.isEffectivelyFinal - || (owner.isSealed && owner.sealedChildren.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol))) - ) - ) - /** Is this symbol effectively final? I.e, it cannot be overridden */ final def isEffectivelyFinal: Boolean = ( - (this hasFlag FINAL | PACKAGE) - || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects) - || isTerm && (isPrivate || isLocalToBlock || (hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED))) + hasFlag(FINAL | PACKAGE) && this != SingletonClass + || isModuleOrModuleClass + || isTerm && (isPrivate || isLocalToBlock || hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED)) // We track known subclasses of term-owned classes, use that to infer finality. // However, don't look at owner for refinement classes (it's basically arbitrary). || isClass && !isRefinementClass && originalOwner.isTerm && children.isEmpty ) /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */ - final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden) + final def isEffectivelyFinalOrNotOverridden: Boolean = { + def isNotOverriddenAt(c: Symbol, hasLocalOwner: Boolean): Boolean = { + def checkOverrideIn(sc: Symbol) = overridingSymbol(sc) == NoSymbol && isNotOverriddenAt(sc, hasLocalOwner || sc.originalOwner.isTerm) + c.isClass && (c.isEffectivelyFinal || { + (c.isSealed || hasLocalOwner) && c.children.forall(checkOverrideIn) + }) + } + isEffectivelyFinal || isTerm && !isDeferred && isNotOverriddenAt(owner, owner.originalOwner.isTerm) + } /** Is this symbol owned by a package? */ final def isTopLevel = owner.isPackageClass + // shim for sbt's compiler interface /** Is this symbol defined in a block? */ @deprecated("use isLocalToBlock instead", "2.11.0") final def isLocal: Boolean = owner.isTerm @@ -1080,6 +1101,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (2) it is abstract override and its super symbol in `base` is * nonexistent or incomplete. */ + @tailrec final def isIncompleteIn(base: Symbol): Boolean = this.isDeferred || (this hasFlag ABSOVERRIDE) && { @@ -1088,21 +1110,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def exists: Boolean = !isTopLevel || { - val isSourceLoader = rawInfo match { - case sl: SymLoader => sl.fromSource - case _ => false - } - def warnIfSourceLoader() { + def warnIfSourceLoader(): false = { + val isSourceLoader = rawInfo match { + case sl: SymLoader => sl.fromSource + case _ => false + } + // Predef is completed early due to its autoimport; we used to get here when type checking its + // parent LowPriorityImplicits. See comment in c5441dc for more elaboration. + // Since the fix for scala/bug#7335 Predef parents must be defined in Predef.scala, and we should not + // get here anymore. if (isSourceLoader) - // Predef is completed early due to its autoimport; we used to get here when type checking its - // parent LowPriorityImplicits. See comment in c5441dc for more elaboration. - // Since the fix for scala/bug#7335 Predef parents must be defined in Predef.scala, and we should not - // get here anymore. devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.") + false } - - rawInfo load this - rawInfo != NoType || { warnIfSourceLoader(); false } + rawInfo.load(this) + rawInfo != NoType || warnIfSourceLoader() } final def isInitialized: Boolean = @@ -1126,13 +1148,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Can this symbol be loaded by a reflective mirror? * - * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs. + * Scalac relies on `ScalaSignature` annotation to retain symbols across compilation runs. * Such annotations (also called "pickles") are applied on top-level classes and include information * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block) * are typically unreachable and information about them gets lost. * * This method is useful for macro writers who wish to save certain ASTs to be used at runtime. - * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment. + * With `isLocatable` it's possible to check whether a tree can be retained as is, or it needs special treatment. */ final def isLocatable: Boolean = { if (this == NoSymbol) return false @@ -1214,7 +1236,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // when under some flag. Define per-phase invariants for owner/owned relationships, // e.g. after flatten all classes are owned by package classes, there are lots and // lots of these to be declared (or more realistically, discovered.) - def owner_=(owner: Symbol) { + def owner_=(owner: Symbol): Unit = { saveOriginalOwner(this) assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code") if (traceSymbolActivity) @@ -1227,10 +1249,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain - def ownersIterator: Iterator[Symbol] = new Iterator[Symbol] { - private var current = Symbol.this + def ownersIterator: Iterator[Symbol] = new collection.AbstractIterator[Symbol] { + private[this] var current = Symbol.this def hasNext = current ne NoSymbol - def next = { val r = current; current = current.owner; r } + def next() = { val r = current; current = current.owner; r } } /** Same as `ownerChain contains sym` but more efficient, and @@ -1294,13 +1316,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def fullName(separator: Char): String = fullName(separator, "") private def fullName(separator: Char, suffix: CharSequence): String = { - var b: java.lang.StringBuffer = null + var b: StringBuilder = null def loop(size: Int, sym: Symbol): Unit = { val symName = sym.name val nSize = symName.length - (if (symName.endsWith(nme.LOCAL_SUFFIX_STRING)) 1 else 0) if (sym.isRoot || sym.isRootPackage || sym == NoSymbol || sym.owner.isEffectiveRoot) { val capacity = size + nSize - b = new java.lang.StringBuffer(capacity) + b = new StringBuilder(capacity) symName.appendTo(b, 0, nSize) } else { loop(size + nSize + 1, sym.effectiveOwner.enclClass) @@ -1308,7 +1330,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => symName.appendTo(b, 0, nSize) } } - loop(suffix.length(), this) + loop(suffix.length, this) b.append(suffix) b.toString } @@ -1363,6 +1385,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = new TermSymbol(this, pos, name) initFlags newFlags + final def newExtensionMethodSymbol(companion: Symbol, pos: Position): MethodSymbol = { + val extensionMeth = ( + companion.moduleClass.newMethod(this.name.extensionName, pos, this.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL) + setAnnotations this.annotations + ) + defineOriginalOwner(extensionMeth, this.owner) + // @strictfp on class means strictfp on all methods, but `setAnnotations` won't copy it + if (this.isStrictFP && !extensionMeth.hasAnnotation(ScalaStrictFPAttr)) + extensionMeth.addAnnotation(ScalaStrictFPAttr) + this.removeAnnotation(TailrecClass) // it's on the extension method, now. + companion.info.decls.enter(extensionMeth) + } + final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = { // Package before Module, Module before Method, or we might grab the wrong guy. if ((newFlags & PACKAGE) != 0) @@ -1428,7 +1463,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def privateWithin = { _privateWithin } - def privateWithin_=(sym: Symbol) { _privateWithin = sym } + def privateWithin_=(sym: Symbol): Unit = { _privateWithin = sym } def setPrivateWithin(sym: Symbol): this.type = { privateWithin_=(sym) ; this } /** Does symbol have a private or protected qualifier set? */ @@ -1529,13 +1564,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => if ((_rawflags & LOCKED) != 0L) { // rolled out once for performance lock { setInfo(ErrorType) - throw CyclicReference(this, tp) + val trace = + if (lockTracing) { + val t = _lockingTrace.toArray + _lockingTrace.clear() + t + } else CyclicReference.emptyTrace + throw CyclicReference(this, tp, trace) } } else { + if (lockTracing) _lockingTrace.addOne(this) _rawflags |= LOCKED } + val current = phase if (isCompilerUniverse) { - val current = phase try { assertCorrectThread() phase = phaseOf(infos.validFrom) @@ -1544,24 +1586,24 @@ trait Symbols extends api.Symbols { self: SymbolTable => unlock() phase = current } - } else { - // In runtime reflection, there is only on phase, so don't mutate Global.phase which would lead to warnings - // of data races from when using TSAN to assess thread safety. - try { - tp.complete(this) - } finally { - unlock() - } } + else + // In runtime reflection, there is only one phase, so don't mutate Global.phase + // which would lead to warnings of data races from when using TSAN to assess thread safety. + try tp.complete(this) + finally unlock() } catch { case ex: CyclicReference => - devWarning("... hit cycle trying to complete " + this.fullLocationString) + devWarning(s"... hit cycle trying to complete $fullLocationString") throw ex } - def info_=(info: Type) { - assert(info ne null) - infos = TypeHistory(currentPeriod, info, null) + def info_=(info: Type): Unit = { + assert(info ne null, "Can't assign a null type") + if (infos ne null) { + infos.reset(currentPeriod, info) + } else + infos = TypeHistory(currentPeriod, info, null) unlock() _validTo = if (info.isComplete) currentPeriod else NoPeriod } @@ -1569,7 +1611,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Set initial info. */ def setInfo(info: Type): this.type = { info_=(info); this } /** Modifies this symbol's info in place. */ - def modifyInfo(f: Type => Type): this.type = setInfo(f(info)) + def modifyInfo(f: Type => Type): this.type = { + val i = info + val r = f(i) + if (r ne i) + setInfo(r) + this + } /** Substitute second list of symbols for first in current info. * * NOTE: this discards the type history (uses setInfo) @@ -1588,7 +1636,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** Set new info valid from start of this phase. */ - def updateInfo(info: Type): Symbol = { + def updateInfo(info: Type): this.type = { val pid = phaseId(infos.validFrom) assert(pid <= phase.id, (pid, phase.id)) if (pid == phase.id) infos = infos.prev @@ -1614,7 +1662,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => val outer = Symbols.this var infos = this.infos - outer.assert(infos != null) + outer.assert(infos != null, "infos must not be null") if (_validTo != NoPeriod) { val curPeriod = outer.currentPeriod @@ -1632,7 +1680,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => // scala/bug#8871 Discard all but the first element of type history. Specialization only works in the resident // compiler / REPL if re-run its info transformer in this run to correctly populate its // per-run caches, e.g. typeEnv - infos = adaptInfo(infos.oldest) + adaptInfo(infos.oldest) + infos = this.infos } //assert(runId(validTo) == currentRunId, name) @@ -1652,7 +1701,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def transformInfos(infos0: TypeHistory, curPhase: Phase, curPeriod: Period): TypeHistory = { assertCorrectThread() var infos = infos0 - var itr = infoTransformers.nextFrom(phaseId(_validTo)) + var itr = nextFrom(phaseId(_validTo)) infoTransformers = itr; // caching optimization while (itr.pid != NoPhase.id && itr.pid < curPhase.id) { phase = phaseWithId(itr.pid) @@ -1670,9 +1719,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => } // adapt to new run in fsc. - private def adaptInfo(oldest: TypeHistory): TypeHistory = { - assert(isCompilerUniverse) - assert(oldest.prev == null) + private def adaptInfo(oldest: TypeHistory): Unit = { + assert(isCompilerUniverse, "Must be compiler universe") + assert(oldest.prev == null, "Previous history must be null") val pid = phaseId(oldest.validFrom) _validTo = period(currentRunId, pid) @@ -1682,16 +1731,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (info1 eq oldest.info) { oldest.validFrom = validTo this.infos = oldest - oldest } else { - this.infos = TypeHistory(validTo, info1, null) - this.infos + if (this.infos ne null) { + this.infos = this.infos.reset(validTo, info1) + } else + this.infos = TypeHistory(validTo, info1, null) } } /** Raises a `MissingRequirementError` if this symbol is a `StubSymbol` */ - def failIfStub() {} + def failIfStub(): Unit = {} /** Initialize the symbol */ final def initialize: this.type = { @@ -1700,12 +1750,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def maybeInitialize = { try { initialize ; true } - catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false } + catch { case _: CyclicReference => debuglog(s"Hit cycle in maybeInitialize of $this") ; false } } /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { - assert(isCompilerUniverse) + assert(isCompilerUniverse, "Must be compiler universe") var infos = this.infos while ((infos ne null) && phaseId(infos.validFrom) > pid) infos = infos.prev infos ne null @@ -1725,8 +1775,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => info // force the current info if (isJavaDefined || isType && owner.isJavaDefined) this modifyInfo rawToExistential - else if (isOverloaded) - alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential) + else if (isOverloaded) { + for (alt <- alternatives) { + alt.setFlag(TRIEDCOOKING) + if (alt.isJavaDefined) alt.modifyInfo(rawToExistential) + } + } this } @@ -1811,9 +1865,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => } val oldsyms = oldsymbuf.toList val newsyms = newsymbuf.toList - for (sym <- newsyms) { - addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms))) - } + for (sym <- newsyms) + addMember(thistp, tp, sym.modifyInfo(_.substThisAndSym(this, thistp, oldsyms, newsyms))) } tp } @@ -1835,14 +1888,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** - * Adds the interface scala.Serializable to the parents of a ClassInfoType. + * Adds the interface java.io.Serializable to the parents of a ClassInfoType. * Note that the tree also has to be updated accordingly. */ - def makeSerializable() { + def makeSerializable(): Unit = { info match { case ci @ ClassInfoType(_, _, _) => setInfo(ci.copy(parents = ci.parents :+ SerializableTpe)) - invalidateCaches(ci.typeSymbol.typeOfThis, ci.typeSymbol :: Nil) + invalidateCaches(ci.typeSymbol.typeOfThis, Set(ci.typeSymbol)) case i => abort("Only ClassInfoTypes can be made serializable: "+ i) } @@ -1850,9 +1903,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ----- setters implemented in selected subclasses ------------------------------------- - def typeOfThis_=(tp: Type) { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) } - def sourceModule_=(sym: Symbol) { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) } - def addChild(sym: Symbol) { throw new UnsupportedOperationException("addChild inapplicable for " + this) } + def typeOfThis_=(tp: Type): Unit = { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) } + def sourceModule_=(sym: Symbol): Unit = { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) } + def addChild(sym: Symbol): Unit = { throw new UnsupportedOperationException("addChild inapplicable for " + this) } // ----- annotations ------------------------------------------------------------ @@ -1876,14 +1929,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => def withAnnotations(annots: List[AnnotationInfo]): this.type = setAnnotations(annots ::: annotations) + def withAnnotation(anno: AnnotationInfo): this.type = + setAnnotations(anno :: annotations) + def withoutAnnotations: this.type = setAnnotations(Nil) def filterAnnotations(p: AnnotationInfo => Boolean): this.type = setAnnotations(annotations filter p) - def addAnnotation(annot: AnnotationInfo): this.type = - setAnnotations(annot :: annotations) + override def removeAnnotation(sym: Symbol): this.type = filterAnnotations(!_.matches(sym)) + + def addAnnotation(annot: AnnotationInfo): this.type = setAnnotations(annotations.appended(annot)) // Convenience for the overwhelmingly common cases, and avoid varags and listbuilders final def addAnnotation(sym: Symbol): this.type = { @@ -1929,7 +1986,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * inheritance graph (i.e. subclass.isLess(superclass) always holds). * the ordering is given by: (_.isType, -_.baseTypeSeq.length) for type symbols, followed by `id`. */ - final def isLess(that: Symbol): Boolean = { + final def isLess(that: Symbol): Boolean = (this ne that) && { def baseTypeSeqLength(sym: Symbol) = if (sym.isAbstractType) 1 + sym.info.upperBound.baseTypeSeq.length else sym.info.baseTypeSeq.length @@ -1945,6 +2002,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (this isNestedIn that) holds iff this symbol is defined within * a class or method defining that symbol */ + @tailrec final def isNestedIn(that: Symbol): Boolean = owner == that || owner != NoSymbol && (owner isNestedIn that) @@ -1987,7 +2045,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => var alts0: List[Symbol] = alternatives var alts1: List[Symbol] = Nil - while (alts0.nonEmpty) { + while (!alts0.isEmpty) { if (cond(alts0.head)) alts1 ::= alts0.head else @@ -2028,7 +2086,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => setInfo (this.info cloneInfo clone) setAnnotations this.annotations ) - this.attachments.all.foreach(clone.updateAttachment) + assert(clone.attachments.isEmpty, "cloned symbol cannot have attachments") + clone.setAttachments(this.attachments.cloneAttachments) if (clone.thisSym != clone) clone.typeOfThis = (clone.typeOfThis cloneInfo clone) @@ -2041,6 +2100,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Internal method to clone a symbol's implementation with the given flags and no info. */ def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeOfClonedSymbol + def flipped: Symbol = this + // ------ access to related symbols -------------------------------------------------- /** The next enclosing class. */ @@ -2066,7 +2127,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * scala> intp("Foo2").thisSym * res1: $r.intp.global.Symbol = value self * - * Martin says: The reason `thisSym' is `this' is so that thisType can be this.thisSym.tpe. + * Martin says: The reason `thisSym` is `this` is so that thisType can be this.thisSym.tpe. * It's a trick to shave some cycles off. * * Morale: DO: if (clazz.typeOfThis.typeConstructor ne clazz.typeConstructor) ... @@ -2090,8 +2151,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * argument in the first parameter list of the primary constructor. * The empty list for all other classes. * - * This list will be sorted to correspond to the declaration order - * in the constructor parameter + * This list will be sorted to correspond to the declaration order + * in the constructor parameter */ final def caseFieldAccessors: List[Symbol] = { // We can't rely on the ordering of the case field accessors within decls -- @@ -2102,22 +2163,26 @@ trait Symbols extends api.Symbols { self: SymbolTable => // // The slightly more principled approach of using the paramss of the // primary constructor leads to cycles in, for example, pos/t5084.scala. - val primaryNames = constrParamAccessors map (_.name.dropLocal) - def nameStartsWithOrigDollar(name: Name, prefix: Name) = - name.startsWith(prefix) && name.length > prefix.length + 1 && name.charAt(prefix.length) == '$' + val primaryNames = constrParamAccessors.map { p => + if (p.hasFlag(EXPANDEDNAME)) p.unexpandedName.dropLocal + else p.name.dropLocal + } - def rec(remaningAccessors: List[Symbol], foundAccessors: List[(Symbol, Int)], remainingNames: List[(Name, Int)]): List[Symbol] = { - remaningAccessors match { + def loop(remainingAccessors: List[Symbol], foundAccessors: List[(Symbol, Int)], remainingNames: List[(Name, Int)]): List[Symbol] = + remainingAccessors match { case Nil => foundAccessors.sortBy(_._2).map(_._1) - case acc :: tail => { - val i = remainingNames.collectFirst { case (name, i) if acc.name == name || nameStartsWithOrigDollar(acc.name, name) => i} - rec(tail, (acc, i.get) :: foundAccessors, remainingNames.filterNot { case (_, ii) => Some(ii) == i} ) - } + case acc :: remainingAccessors => + def nameStartsWithOrigDollar(name: Name, prefix: Name) = + name.startsWith(prefix) && name.length > prefix.length + 1 && name.charAt(prefix.length) == '$' + remainingNames.collectFirst { + case (name, i) if acc.name == name || nameStartsWithOrigDollar(acc.name, name) => i + } match { + case Some(i) => loop(remainingAccessors, (acc, i) :: foundAccessors, remainingNames.filter(_._2 != i)) + case x => throw new MatchError(x) + } } - } - rec(caseFieldAccessorsUnsorted.sortBy(s => -s.name.length), Nil, primaryNames.zipWithIndex.sortBy{ case (n, _) => -n.length}) - + loop(caseFieldAccessorsUnsorted.sortBy(-_.name.length), foundAccessors = Nil, primaryNames.zipWithIndex.sortBy(-_._1.length)) } private final def caseFieldAccessorsUnsorted: List[Symbol] = info.decls.toList.filter(_.isCaseAccessorMethod) @@ -2148,6 +2213,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * This is the enclosing class, except for classes defined locally to constructors, * where it is the outer class of the enclosing class. */ + @tailrec final def outerClass: Symbol = if (this == NoSymbol) { // ideally we shouldn't get here, but it's better to harden against this than suffer the infinite loop in scala/bug#9133 @@ -2252,20 +2318,25 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (or, for traits: `$init`) of `C`. * */ + @tailrec final def logicallyEnclosingMember: Symbol = if (isLocalDummy) enclClass.primaryConstructor else if (isMethod || isClass || this == NoSymbol) this else owner.logicallyEnclosingMember /** The top-level class containing this symbol, using the current owner chain. */ - def enclosingTopLevelClass: Symbol = - if (isTopLevel) { + @tailrec + final def enclosingTopLevelClass: Symbol = + if (this eq NoSymbol) this + else if (isTopLevel) { if (isClass) this else moduleClass } else owner.enclosingTopLevelClass /** The top-level class or local dummy symbol containing this symbol, using the original owner chain. */ - def originalEnclosingTopLevelClassOrDummy: Symbol = - if (isTopLevel) { + @tailrec + final def originalEnclosingTopLevelClassOrDummy: Symbol = + if (this eq NoSymbol) this + else if (isTopLevel) { if (isClass) this else moduleClass.orElse(this) } else originalOwner.originalEnclosingTopLevelClassOrDummy @@ -2384,11 +2455,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * @param baseClass is a base class of this symbol's owner. */ - final def overriddenSymbol(baseClass: Symbol): Symbol = ( + final def overriddenSymbol(baseClass: Symbol): Symbol = { // concrete always overrides abstract, so don't let an abstract definition // claim to be overriding an inherited concrete one. - matchingInheritedSymbolIn(baseClass) filter (res => res.isDeferred || !this.isDeferred) - ) + val matching = matchingInheritedSymbolIn(baseClass) + if (isDeferred) matching.filter(_.isDeferred) else matching + } private def matchingInheritedSymbolIn(baseClass: Symbol): Symbol = if (canMatchInheritedSymbols) matchingSymbol(baseClass, owner.thisType) else NoSymbol @@ -2397,12 +2469,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * @param ofclazz is a subclass of this symbol's owner */ - final def overridingSymbol(ofclazz: Symbol): Symbol = ( - if (canMatchInheritedSymbols) - matchingSymbol(ofclazz, ofclazz.thisType) - else - NoSymbol - ) + final def overridingSymbol(ofclazz: Symbol): Symbol = + if (canMatchInheritedSymbols) matchingSymbol(ofclazz, ofclazz.thisType) else NoSymbol /** If false, this symbol cannot possibly participate in an override, * either as overrider or overridee. For internal use; you should consult @@ -2548,7 +2616,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * is not a constructor nor a static module rename it by expanding its name to avoid name clashes * @param base the fully qualified name of this class will be appended if name expansion is needed */ - final def makeNotPrivate(base: Symbol) { + @tailrec + final def makeNotPrivate(base: Symbol): Unit = { if (this.isPrivate) { setFlag(notPRIVATE) // this makes it effectively final (isEffectivelyFinal) // don't set FINAL -- methods not marked final by user should not end up final in bytecode @@ -2562,7 +2631,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Remove any access boundary and clear flags PROTECTED | PRIVATE. */ - def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags + def makePublic: this.type = this setPrivateWithin NoSymbol resetFlag AccessFlags /** The first parameter to the first argument list of this method, * or NoSymbol if inapplicable. @@ -2581,7 +2650,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if ( (file eq NoAbstractFile) || { val path = file.path - path.endsWith(".class") || path.endsWith(".sig") + path.endsWith(".class") || path.endsWith(".sig") || path.endsWith(".tasty") }) null else file } @@ -2589,7 +2658,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * Never null; if there is no associated file, returns NoAbstractFile. */ def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile - def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) } + def associatedFile_=(f: AbstractFile): Unit = { abort("associatedFile_= inapplicable for " + this) } /** If this is a sealed or local class, its known direct subclasses. * Otherwise, the empty set. @@ -2602,7 +2671,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def sealedDescendants: Set[Symbol] = if (!isSealed) Set(this) else children.flatMap(_.sealedDescendants) + this @inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt - @inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this } + @inline final def andAlso(f: Symbol => Unit): this.type = { if (this ne NoSymbol) f(this) ; this } @inline final def fold[T](none: => T)(f: Symbol => T): T = if (this ne NoSymbol) f(this) else none @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this) @@ -2619,7 +2688,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (the initial Name) before falling back on id, which varies depending * on exactly when a symbol is loaded. */ - final def sealedSortName: String = initName + "#" + id + final def sealedSortName: String = initName.toString + "#" + id /** String representation of symbol's definition key word */ final def keyString: String = @@ -2665,6 +2734,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR") else if (isClassConstructor) ("constructor", "constructor", "CTOR") else if (isMethod) ("method", "method", "METH") + //else if (isValueParameter) ("value parameter", "parameter", "VAL") else if (isTerm) ("value", "value", "VAL") else ("", "", "???") @@ -2759,8 +2829,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * for backward compatibility reasons. */ def locationString: String = ownsString match { - case "" => "" - case s => " in " + s + case "" => "" + case s => s" in $s" } def fullLocationString: String = toString + locationString def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>" @@ -2803,25 +2873,31 @@ trait Symbols extends api.Symbols { self: SymbolTable => } private def defStringCompose(infoString: String) = compose( - flagString, - keyString, + compose(flagString, keyString), varianceString + nameString + infoString + flagsExplanationString ) + /** String representation of symbol's definition. It uses the * symbol's raw info to avoid forcing types. */ def defString = defStringCompose(signatureString) + def defStringWithoutImplicit = compose( + keyString, + varianceString + nameString + signatureString + flagsExplanationString + ) + /** String representation of symbol's definition, using the supplied * info rather than the symbol's. */ def defStringSeenAs(info: Type) = defStringCompose(infoString(info)) - /** Concatenate strings separated by spaces */ - private def compose(ss: String*) = ss filter (_ != "") mkString " " + /** Concatenate non-empty strings separated by a space. */ + private def compose(x: String, y: String): String = + if (x.isEmpty) y else if (y.isEmpty) x else s"$x $y" - def isSingletonExistential = - nme.isSingletonName(name) && (info.upperBound.typeSymbol isSubClass SingletonClass) + def isSingletonExistential: Boolean = + nme.isSingletonName(name) /** String representation of existentially bound variable */ def existentialToString = @@ -2839,7 +2915,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => type TypeOfClonedSymbol = TermSymbol - override def name_=(name: Name) { + override def name_=(name: Name): Unit = { if (name != rawname) { super.name_=(name) // logging changeNameInOwners(name) @@ -2853,6 +2929,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isValue = !(isModule && hasFlag(PACKAGE | JAVA)) override def isVariable = isMutable && !isMethod override def isTermMacro = hasFlag(MACRO) + def isAnnotationMacro = hasFlag(MACRO) && name == nme.macroTransform && owner.isClass && owner.hasFlag(MACRO) // interesting only for lambda lift. Captured variables are accessed from inner lambdas. override def isCapturedVariable = hasAllFlags(MUTABLE | CAPTURED) && !hasFlag(METHOD) @@ -2888,8 +2965,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isMixinConstructor = rawname == nme.MIXIN_CONSTRUCTOR override def isConstructor = isClassConstructor || isMixinConstructor + override def isPackage = hasFlag(PACKAGE) override def isPackageObject = isModule && (rawname == nme.PACKAGE) + override def isExistentiallyBound = this hasFlag EXISTENTIAL + // The name in comments is what it is being disambiguated from. // TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names. override def resolveOverloadedFlag(flag: Long) = flag match { @@ -2901,7 +2981,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def referenced: Symbol = _referenced - def referenced_=(x: Symbol) { _referenced = x } + def referenced_=(x: Symbol): Unit = { _referenced = x } def existentialBound = singletonBounds(this.tpe) @@ -2932,25 +3012,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (unexpandedName endsWith (nme.OUTER)) initialize.referenced else NoSymbol - def setModuleClass(clazz: Symbol): TermSymbol = { + def setModuleClass(clazz: Symbol): this.type = { assert(isModule, this) referenced = clazz this } /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters + * Do the same for any accessed symbols to preserve serialization compatibility. */ - override def expandName(base: Symbol) { + override def expandName(base: Symbol): Unit = { if (!hasFlag(EXPANDEDNAME)) { setFlag(EXPANDEDNAME) - if (hasAccessorFlag && !isDeferred) { - accessed.expandName(base) - } - else if (hasGetter) { - getterIn(owner).expandName(base) - setterIn(owner).expandName(base) - } + if (hasAccessorFlag && !isDeferred) accessed.expandName(base) name = nme.expandedName(name.toTermName, base) } } @@ -2960,13 +3034,16 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class for module symbols */ class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends TermSymbol(initOwner, initPos, initName) with ModuleSymbolApi { - private var flatname: TermName = _ + private[this] var flatname: TermName = _ override def associatedFile = moduleClass.associatedFile - override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f } + override def associatedFile_=(f: AbstractFile): Unit = { moduleClass.associatedFile = f } override def moduleClass = referenced + final override def skipPackageObject: Symbol = + if (isPackageObject) owner else this + override def owner = { // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner @@ -3000,7 +3077,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Note also that trait vals are modelled as getters, and thus that user-supplied code appears in their rhs. // Originally, it may have been an optimization to skip methods that were not user-defined (getters), // but it doesn't even exclude setters, contrary to its original comment (// exclude all accessors) - override def isSourceMethod = !(this hasFlag STABLE) + override def isSourceMethod = !hasStableFlag // unfortunately having the CASEACCESSOR flag does not actually mean you // are a case accessor (you can also be a field.) @@ -3011,7 +3088,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // any more because of t8011.scala, which demonstrates a problem with the extension methods // phase. As it moves a method body to an extension method in the companion, it substitutes // the new type parameter symbols into the method body, which mutates the base type sequence of - // a local class symbol. We can no longer assume that`mtpePre eq pre` is a sufficient condition + // a local class symbol. We can no longer assume that `mtpePre eq pre` is a sufficient condition // to use the cached result here. // // Elaborating: If we allow for the possibility of mutation of symbol infos, `sym.tpeHK.asSeenFrom(pre, sym.owner)` @@ -3039,16 +3116,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isVarargs: Boolean = definitions.isVarArgsList(paramss.flatten) - override def returnType: Type = { - def loop(tpe: Type): Type = - tpe match { - case NullaryMethodType(ret) => loop(ret) - case MethodType(_, ret) => loop(ret) - case PolyType(_, tpe) => loop(tpe) - case tpe => tpe - } - loop(info) - } + override def returnType: Type = definitions.finalResultType(info) override def exceptions = { rawInfo match { @@ -3064,7 +3132,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends TypeSymbol(initOwner, initPos, initName) { type TypeOfClonedSymbol = TypeSymbol - override def variance = if (isLocalToThis) Bivariant else info.typeSymbol.variance + override def variance = + // A non-applied parameterized type alias can appear in any variance position + if (typeParams.nonEmpty) + Invariant + else if (isLocalToThis) + Bivariant + else + info.typeSymbol.variance + override def isContravariant = variance.isContravariant override def isCovariant = variance.isCovariant final override def isAliasType = true @@ -3117,10 +3193,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => case _ => super.resolveOverloadedFlag(flag) } - private var tyconCache: Type = null - private var tyconRunId = NoRunId - private var tpeCache: Type = _ - private var tpePeriod = NoPeriod + private[this] var tyconCache: Type = null + private[this] var tyconRunId = NoRunId + private[this] var tpeCache: Type = _ + private[this] var tpePeriod = NoPeriod override def isAbstractType = this hasFlag DEFERRED override def isContravariant = this hasFlag CONTRAVARIANT @@ -3135,7 +3211,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // TODO - don't allow names to be renamed in this unstructured a fashion. // Rename as little as possible. Enforce invariants on all renames. - override def name_=(name: Name) { + override def name_=(name: Name): Unit = { if (name != rawname) { super.name_=(name) // logging changeNameInOwners(name) @@ -3172,13 +3248,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def tpeHK: Type = typeConstructor private def tyconCacheNeedsUpdate = (tyconCache eq null) || tyconRunId != currentRunId - private def setTyconCache(tycon: Type) { + private def setTyconCache(tycon: Type): Unit = { tyconCache = tycon tyconRunId = currentRunId assert(tyconCache ne null, this) } - private def maybeUpdateTypeCache() { + private def maybeUpdateTypeCache(): Unit = { if (tpePeriod != currentPeriod) { if (isValid(tpePeriod)) tpePeriod = currentPeriod @@ -3186,7 +3262,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => updateTypeCache() // perform the actual update } } - private def updateTypeCache() { + private def updateTypeCache(): Unit = { if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor) @@ -3205,7 +3281,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => setTyconCache(tpeCache) } - override def info_=(tp: Type) { + override def info_=(tp: Type): Unit = { tpePeriod = NoPeriod tyconCache = null super.info_=(tp) @@ -3310,6 +3386,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isCaseClass = this hasFlag CASE override def isClassLocalToConstructor = this hasFlag INCONSTRUCTOR override def isModuleClass = this hasFlag MODULE + override def isPackage = hasFlag(PACKAGE) // i.e., isPackageClass override def isPackageClass = this hasFlag PACKAGE override def isTrait = this hasFlag TRAIT @@ -3367,7 +3444,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else _associatedFile } ) - override def associatedFile_=(f: AbstractFile) { _associatedFile = f } + override def associatedFile_=(f: AbstractFile): Unit = { _associatedFile = f } override def reset(completer: Type): this.type = { super.reset(completer) @@ -3399,7 +3476,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def thisSym: Symbol = thissym /** Sets the self type of the class */ - override def typeOfThis_=(tp: Type) { + override def typeOfThis_=(tp: Type): Unit = { thissym = newThisSym(nme.this_, pos).setInfo(tp) } @@ -3413,6 +3490,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => clone } + override lazy val flipped: ClassSymbol = { + val clone = cloneSymbol(owner) + clone.rawInfo.typeParams.foreach { sym => + if (sym.isContravariant) sym.resetFlag(Flag.CONTRAVARIANT).setFlag(Flag.COVARIANT) + } + clone + } + override def derivedValueClassUnbox = // (info.decl(nme.unbox)) orElse uncomment once we accept unbox methods (info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse @@ -3420,7 +3505,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var childSet: Set[Symbol] = Set() override def children = childSet - override def addChild(sym: Symbol) { + override def addChild(sym: Symbol): Unit = { if(!isPastTyper && hasAttachment[KnownDirectSubclassesCalled.type] && !childSet.contains(sym)) globalError(s"knownDirectSubclasses of ${this.name} observed before subclass ${sym.name} registered") @@ -3451,13 +3536,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName) extends ClassSymbol(owner, pos, name) { - private[this] var module: Symbol = _ + private[this] var moduleSymbol: Symbol = _ private[this] var typeOfThisCache: Type = _ private[this] var typeOfThisPeriod = NoPeriod - private var implicitMembersCacheValue: Scope = EmptyScope - private var implicitMembersCacheKey1: Type = NoType - private var implicitMembersCacheKey2: ScopeEntry = null + private[this] var implicitMembersCacheValue: Scope = EmptyScope + private[this] var implicitMembersCacheKey1: Type = NoType + private[this] var implicitMembersCacheKey2: ScopeEntry = null override def isModuleClass = true override def linkedClassOfClass = companionClass @@ -3484,8 +3569,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => implicitMembersCacheValue } // The null check seems to be necessary for the reifier. - override def sourceModule = if (module ne null) module else companionModule - override def sourceModule_=(module: Symbol) { this.module = module } + override def sourceModule = if (moduleSymbol ne null) moduleSymbol else companionModule + override def sourceModule_=(module: Symbol): Unit = { this.moduleSymbol = module } } class PackageObjectClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position) @@ -3507,7 +3592,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => class RefinementClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position) extends ClassSymbol(owner0, pos0, tpnme.REFINE_CLASS_NAME) { - override def name_=(name: Name) { + override def name_=(name: Name): Unit = { abort("Cannot set name of RefinementClassSymbol to " + name) } override def isRefinementClass = true @@ -3589,14 +3674,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => privateWithin = this override def info_=(info: Type) = { - infos = TypeHistory(1, NoType, null) + infos = noTypeHistory unlock() validTo = currentPeriod } override def flagMask = AllFlags override def exists = false override def isHigherOrderTypeParameter = false - override def isStrictFP = false override def companionClass = NoSymbol override def companionModule = NoSymbol override def companionSymbol = NoSymbol @@ -3606,8 +3690,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def locationString: String = "" override def enclClassChain = Nil override def enclClass: Symbol = this - override def enclosingTopLevelClass: Symbol = this - override def originalEnclosingTopLevelClassOrDummy: Symbol = this override def enclosingPackageClass: Symbol = this override def enclMethod: Symbol = this override def associatedFile = NoAbstractFile @@ -3642,7 +3724,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (syms.isEmpty) Nil else { val syms1 = mapList(syms)(symFn) - val map = new SubstSymMap(syms, syms1) + val map = SubstSymMap(syms, syms1) syms1.foreach(_.modifyInfo(map)) syms1 } @@ -3701,12 +3783,24 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def deriveTypeWithWildcards(syms: List[Symbol])(tpe: Type): Type = { if (syms.isEmpty) tpe - else tpe.instantiateTypeParams(syms, syms map (_ => WildcardType)) + else tpe.instantiateTypeParams(syms, WildcardType.fillList(syms.length)) } /** Convenience functions which derive symbols by cloning. */ def cloneSymbols(syms: List[Symbol]): List[Symbol] = - deriveSymbols(syms, _.cloneSymbol) + if (syms.isEmpty) Nil + else { + val syms1 = mapList(syms)(_.cloneSymbol) + cloneSymbolsSubstSymMap.using { (msm: SubstSymMap) => + msm.reset(syms, syms1) + syms1.foreach(_.modifyInfo(msm)) + } + syms1 + } + + private[this] val cloneSymbolsSubstSymMap: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) + def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] = deriveSymbols(syms, _ cloneSymbol owner) @@ -3751,32 +3845,52 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Return closest enclosing method, unless shadowed by an enclosing class. */ // TODO Move back to ExplicitOuter when the other call site is removed. // no use of closures here in the interest of speed. + @tailrec final def closestEnclMethod(from: Symbol): Symbol = if (from.isSourceMethod) from else if (from.isClass) NoSymbol else closestEnclMethod(from.owner) /** An exception for cyclic references of symbol definitions */ - case class CyclicReference(sym: Symbol, info: Type) - extends TypeError("illegal cyclic reference involving " + sym) { + case class CyclicReference(sym: Symbol, info: Type, trace: Array[Symbol] = CyclicReference.emptyTrace) + extends TypeError(s"illegal cyclic reference involving $sym") { if (settings.isDebug) printStackTrace() } + object CyclicReference { + val emptyTrace: Array[Symbol] = Array.empty[Symbol] + } /** A class for type histories */ - private case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) { + private final case class TypeHistory protected (private var _validFrom: Period, private var _info: Type, private var _prev: TypeHistory) { assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this) assert(validFrom != NoPeriod, this) + def validFrom: Int = _validFrom + def validFrom_=(x: Int): Unit = {_validFrom = x } + def info: Type = _info + def prev: TypeHistory = _prev + + // OPT: mutate the current TypeHistory rather than creating a new one. TypeHistory instances should not be shared. + final def reset(validFrom: Period, info: Type): TypeHistory = + if (this ne noTypeHistory) { + this._validFrom = validFrom + this._info = info + this._prev = null + this + } else + TypeHistory(validFrom, info, null) + private def phaseString = { val phase = phaseOf(validFrom) s"$phase: ${exitingPhase(phase)(info.toString)}" } - override def toString = toList reverseMap (_.phaseString) mkString ", " + override def toString = toList.reverseIterator map (_.phaseString) mkString ", " - def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList ) + private def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList ) - def oldest: TypeHistory = if (prev == null) this else prev.oldest + @tailrec def oldest: TypeHistory = if (prev == null) this else prev.oldest } + private[this] final val noTypeHistory = TypeHistory(1, NoType, null) // ----- Hoisted closures and convenience methods, for compile time reductions ------- @@ -3788,6 +3902,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => case _ => true } + private[scala] final def argsDependOnPrefix(sym: Symbol): Boolean = { + val tt = sym.owner.thisType + + @tailrec + def loop(mt: Type): Boolean = { + mt match { + case MethodType(params, restpe) => params.exists(_.info.dealias.exists(_ == tt)) || loop(restpe) + case PolyType(_, restpe) => loop(restpe) + case _ => false + } + } + + tt.isInstanceOf[SingletonType] && loop(sym.info) + } + // -------------- Completion -------------------------------------------------------- // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe` @@ -3795,15 +3924,22 @@ trait Symbols extends api.Symbols { self: SymbolTable => val AllOps = SymbolOps(isFlagRelated = false, mask = 0L) def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask) - private def forEachRelevantSymbols(syms: Seq[Symbol], fn: Symbol => Unit): Unit = - syms.foreach { sym => - fn(sym) - fn(sym.moduleClass) - fn(sym.sourceModule) - } + private def forEachRelevantSymbol(sym: Symbol, fn: Symbol => Unit): Unit = { + fn(sym) + fn(sym.moduleClass) + fn(sym.sourceModule) + } - def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = forEachRelevantSymbols(syms, _.markFlagsCompleted(mask)) - def markAllCompleted(syms: Symbol*): Unit = forEachRelevantSymbols(syms, _.markAllCompleted) + final def markFlagsCompleted(sym: Symbol)(mask: Long): Unit = forEachRelevantSymbol(sym, _.markFlagsCompleted(mask)) + final def markFlagsCompleted(sym1: Symbol, sym2: Symbol)(mask: Long): Unit = { + markFlagsCompleted(sym1)(mask) + markFlagsCompleted(sym2)(mask) + } + final def markAllCompleted(sym: Symbol): Unit = forEachRelevantSymbol(sym, _.markAllCompleted()) + final def markAllCompleted(sym1: Symbol, sym2: Symbol): Unit = { + markAllCompleted(sym1) + markAllCompleted(sym2) + } } trait SymbolsStats { diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index a2fda093ef08..d0e46d98de99 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,10 @@ package reflect package internal import Flags._ -import util._ +import util.{FreshNameCreator, ListOfNil} +import scala.annotation.tailrec import scala.collection.mutable.ListBuffer +import scala.util.chaining._ abstract class TreeGen { val global: SymbolTable @@ -146,10 +148,12 @@ abstract class TreeGen { // || patPre.typeSymbol.isPackageClass // || selPre =:= patPre) - def mkAttributedQualifierIfPossible(prefix: Type): Option[Tree] = prefix match { + @tailrec final def mkAttributedQualifierIfPossible(prefix: Type): Option[Tree] = prefix match { case NoType | NoPrefix | ErrorType => None case TypeRef(_, sym, _) if sym.isModule || sym.isClass || sym.isType => None - case pre => Some(mkAttributedQualifier(prefix)) + case RefinedType(parents, _) if !parents.exists(_.isStable) => None + case AnnotatedType(_, tpe) => mkAttributedQualifierIfPossible(tpe) + case prefix => Some(mkAttributedQualifier(prefix)) } @@ -180,7 +184,7 @@ abstract class TreeGen { def mkUnattributedRef(sym: Symbol): RefTree = mkUnattributedRef(sym.fullNameAsName('.')) def mkUnattributedRef(fullName: Name): RefTree = { - val hd :: tl = nme.segments(fullName.toString, assumeTerm = fullName.isTermName) + val hd :: tl = nme.segments(fullName.toString, assumeTerm = fullName.isTermName): @unchecked tl.foldLeft(Ident(hd): RefTree)(Select(_,_)) } @@ -320,7 +324,7 @@ abstract class TreeGen { /** Wrap an expression in a named argument. */ def mkNamedArg(name: Name, tree: Tree): Tree = mkNamedArg(Ident(name), tree) - def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(AssignOrNamedArg(lhs, rhs)) + def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(NamedArg(lhs, rhs)) /** Builds a tuple */ def mkTuple(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match { @@ -353,7 +357,7 @@ abstract class TreeGen { Apply(Select(tree1, Boolean_or), List(tree2)) def mkRuntimeUniverseRef: Tree = { - assert(ReflectRuntimeUniverse != NoSymbol) + assert(ReflectRuntimeUniverse != NoSymbol, "Missing ReflectRuntimeUniverse") mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse) } @@ -396,7 +400,7 @@ abstract class TreeGen { val (edefs, rest) = body span treeInfo.isEarlyDef val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef - val gvdefs = evdefs map { + val gvdefs = evdefs collect { case vdef @ ValDef(_, _, tpt, _) => copyValDef(vdef)( // atPos for the new tpt is necessary, since the original tpt might have no position @@ -420,11 +424,8 @@ abstract class TreeGen { if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit) vparamss1 = List() :: vparamss1 val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass - // this requires knowing which of the parents is a type macro and which is not - // and that's something that cannot be found out before typer - // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon) // this means that we don't know what will be the arguments of the super call - // therefore here we emit a dummy which gets populated when the template is named and typechecked + // here we emit a dummy which gets populated when the template is named and typechecked Some( atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) ( DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), mkLiteralUnit)))) @@ -451,7 +452,7 @@ abstract class TreeGen { else parents def mkClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], templ: Template): ClassDef = { - val isInterface = mods.isTrait && (templ.body forall treeInfo.isInterfaceMember) + val isInterface = mods.isTrait && templ.body.forall(treeInfo.isInterfaceMember) val mods1 = if (isInterface) (mods | Flags.INTERFACE) else mods ClassDef(mods1, name, tparams, templ) } @@ -630,18 +631,18 @@ abstract class TreeGen { * 4. * * for (P <- G; E; ...) ... - * => + * ==> * for (P <- G.filter (P => E); ...) ... * * 5. For N < MaxTupleArity: * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * for (P_1 <- G; P_2 = E_2; P_N = E_N; ...) * ==> * for (TupleN(P_1, P_2, ... P_N) <- * for (x_1 @ P_1 <- G) yield { * val x_2 @ P_2 = E_2 * ... - * val x_N & P_N = E_N + * val x_N @ P_N = E_N * TupleN(x_1, ..., x_N) * } ...) * @@ -663,13 +664,20 @@ abstract class TreeGen { * the limits given by pat and body. */ def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = { - def wrapped = wrappingPos(List(pat, body)) - def splitpos = (if (pos != NoPosition) wrapped.withPoint(pos.point) else pos).makeTransparent + val splitpos = { + val wrapped = wrappingPos(List(pat, body)) + // ignore proposed point if not in range + val res = + if (pos != NoPosition && wrapped.start <= pos.point && pos.point < wrapped.end) wrapped.withPoint(pos.point) + else pos + res.makeTransparent + } matchVarPattern(pat) match { case Some((name, tpt)) => - Function( - List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }), - body) setPos splitpos + val p = atPos(pat.pos) { + ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) + }.tap(propagatePatVarDefAttachments(pat, _)) + Function(List(p), body).setPos(splitpos) case None => atPos(splitpos) { mkVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false) @@ -682,22 +690,28 @@ abstract class TreeGen { def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree = // ForAttachment on the method selection is used to differentiate // result of for desugaring from a regular method call - Apply(Select(qual, meth) setPos qual.pos updateAttachment ForAttachment, - List(makeClosure(pos, pat, body))) setPos pos + Apply(Select(qual, meth).setPos(qual.pos).updateAttachment(ForAttachment), + List(makeClosure(pos, pat, body))).setPos(pos) - /* If `pat` is not yet a `Bind` wrap it in one with a fresh name */ - def makeBind(pat: Tree): Tree = pat match { - case Bind(_, _) => pat - case _ => Bind(freshTermName(), pat) setPos pat.pos + /* If `pat` is not yet a `Bind` wrap it in one with a fresh name. + * If the fresh patvar is for tupling in the desugared expression, + * it receives the transparent position of the pattern, so it is never warned about. + * Otherwise, add NoWarnAttachment. + */ + def makeBind(pat: Tree): Bind = pat match { + case pat: Bind => pat + case _ => Bind(freshTermName(), pat).setPos(pat.pos) + .tap(bind => if (!bind.pos.isTransparent) bind.updateAttachment(NoWarnAttachment)) } /* A reference to the name bound in Bind `pat`. */ - def makeValue(pat: Tree): Tree = pat match { - case Bind(name, _) => Ident(name) setPos pat.pos.focus + def makeValue(pat: Bind): Ident = pat match { + case Bind(name, _) => Ident(name).setPos(pat.pos.focus) } - /* The position of the closure that starts with generator at position `genpos`. */ - def closurePos(genpos: Position) = + // The position of the closure that starts with generator at position `genpos`. + // This position is carried by ValFrom. + def closurePos(genpos: Position): Position = if (genpos == NoPosition) NoPosition else { val end = body.pos match { @@ -711,54 +725,72 @@ abstract class TreeGen { case (t @ ValFrom(pat, rhs)) :: Nil => makeCombination(closurePos(t.pos), mapName, rhs, pat, body) case (t @ ValFrom(pat, rhs)) :: (rest @ (ValFrom(_, _) :: _)) => - makeCombination(closurePos(t.pos), flatMapName, rhs, pat, - mkFor(rest, sugarBody)) + makeCombination(closurePos(t.pos), flatMapName, rhs, pat, body = mkFor(rest, sugarBody)) case (t @ ValFrom(pat, rhs)) :: Filter(test) :: rest => - mkFor(ValFrom(pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)).setPos(t.pos) :: rest, sugarBody) + mkFor(ValFrom(pat, makeCombination(rhs.pos | test.pos, nme.withFilter, rhs, pat.duplicate, test)).setPos(t.pos) :: rest, sugarBody) case (t @ ValFrom(pat, rhs)) :: rest => - val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile { ValEq.unapply(_).nonEmpty } - assert(!valeqs.isEmpty) + val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(ValEq.unapply(_).nonEmpty) + assert(!valeqs.isEmpty, "Missing ValEq") val rest1 = rest.drop(valeqs.length) - val pats = valeqs map { case ValEq(pat, _) => pat } - val rhss = valeqs map { case ValEq(_, rhs) => rhs } + val (pats, rhss) = valeqs.map(ValEq.unapply(_).get).unzip val defpat1 = makeBind(pat) - val defpats = pats map makeBind - val pdefs = (defpats, rhss).zipped flatMap mkPatDef - val ids = (defpat1 :: defpats) map makeValue + val defpats = pats.map(makeBind) + val pdefs = defpats.lazyZip(rhss).flatMap((p, r) => mkPatDef(Modifiers(0), p, r, r.pos, forFor = true)) + val tupled = { + val ids = (defpat1 :: defpats).map(makeValue) + atPos(wrappingPos(ids))(mkTuple(ids).updateAttachment(ForAttachment)) + } val rhs1 = mkFor( List(ValFrom(defpat1, rhs).setPos(t.pos)), - Yield(Block(pdefs, atPos(wrappingPos(ids)) { mkTuple(ids) }) setPos wrappingPos(pdefs))) - val allpats = (pat :: pats) map (_.duplicate) + Yield(Block(pdefs, tupled).setPos(wrappingPos(pdefs))) + ) + val untupled = { + val allpats = (pat :: pats).map(_.duplicate) + atPos(wrappingPos(allpats))(mkTuple(allpats).updateAttachment(ForAttachment)) + } val pos1 = if (t.pos == NoPosition) NoPosition else rangePos(t.pos.source, t.pos.start, t.pos.point, rhs1.pos.end) - val vfrom1 = ValFrom(atPos(wrappingPos(allpats)) { mkTuple(allpats) }, rhs1).setPos(pos1) + val vfrom1 = ValFrom(untupled, rhs1).setPos(pos1) mkFor(vfrom1 :: rest1, sugarBody) case _ => EmptyTree //may happen for erroneous input - } } - /** Create tree for pattern definition */ - def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = - mkPatDef(Modifiers(0), pat, rhs) - + // Fresh terms are not warnable. Bindings written `x@_` may be deemed unwarnable. private def propagateNoWarnAttachment(from: Tree, to: ValDef): to.type = if (isPatVarWarnable && from.hasAttachment[NoWarnAttachment.type]) to.updateAttachment(NoWarnAttachment) else to - // Keep marker for `x@_`, add marker for `val C(x) = ???` to distinguish from ordinary `val x = ???`. + // Distinguish patvar in pattern `val C(x) = ???` from `val x = ???`. Also `for (P(x) <- G)`. private def propagatePatVarDefAttachments(from: Tree, to: ValDef): to.type = propagateNoWarnAttachment(from, to).updateAttachment(PatVarDefAttachment) + /** Create tree for pattern definition */ + def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = + mkPatDef(Modifiers(0), pat, rhs, rhs.pos) + /** Create tree for pattern definition */ - def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(mods, pat, rhs, rhs.pos)(fresh) - def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree, rhsPos: Position)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match { + def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(mods, pat, rhs, rhs.pos) + + def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree, rhsPos: Position)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(mods, pat, rhs, rhsPos, forFor = false) + + private def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree, rhsPos: Position, forFor: Boolean)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match { case Some((name, tpt)) => - List(atPos(pat.pos union rhsPos) { - propagateNoWarnAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) - }) + atPos(pat.pos | rhsPos) { + ValDef(mods, name.toTermName, tpt, rhs) + .tap { vd => + val namePos = pat match { + case id @ Ident(_) => id.pos + case Typed(id @ Ident(_), _) => id.pos + case pat => pat.pos + } + vd.updateAttachment(NamePos(namePos)) + if (forFor) propagatePatVarDefAttachments(pat, vd) + else propagateNoWarnAttachment(pat, vd) + } + } :: Nil case None => // in case there is exactly one variable x_1 in pattern @@ -770,7 +802,8 @@ abstract class TreeGen { // ... // val/var x_N = t$._N - val rhsUnchecked = mkUnchecked(rhs) + val linting = isVarDefWarnable + val rhsUnchecked = if (linting) rhs else mkUnchecked(rhs) // TODO: clean this up -- there is too much information packed into mkPatDef's `pat` argument // when it's a simple identifier (case Some((name, tpt)) -- above), @@ -783,37 +816,47 @@ abstract class TreeGen { case Typed(expr, tpt) if !expr.isInstanceOf[Ident] => val rhsTypedUnchecked = if (tpt.isEmpty) rhsUnchecked - else Typed(rhsUnchecked, tpt) setPos (rhsPos union tpt.pos) + else Typed(rhsUnchecked, tpt).setPos(rhsPos | tpt.pos) (expr, rhsTypedUnchecked) case ok => (ok, rhsUnchecked) } val vars = getVariables(pat1) - val matchExpr = atPos((pat1.pos union rhsPos).makeTransparent) { + val matchExpr = atPos((pat1.pos | rhsPos).makeTransparent) { Match( rhs1, List( atPos(pat1.pos) { - CaseDef(pat1, EmptyTree, mkTuple(vars map (_._1) map Ident.apply)) + val args = vars.map { + case (name, _, pos, _) => Ident(name).setPos(pos.makeTransparent) // cf makeValue + } + CaseDef(pat1, EmptyTree, mkTuple(args).updateAttachment(ForAttachment)) } )) } vars match { - case List((vname, tpt, pos, original)) => - List(atPos(pat.pos union pos union rhsPos) { - propagatePatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, matchExpr)) - }) + case (vname, tpt, pos, original) :: Nil => + atPos(pat.pos | pos | rhsPos) { + ValDef(mods, vname.toTermName, tpt, matchExpr) + .updateAttachment(NamePos(pos)) + .tap(propagatePatVarDefAttachments(original, _)) + } :: Nil case _ => val tmp = freshTermName() - val firstDef = - atPos(matchExpr.pos) { - ValDef(Modifiers(PrivateLocal | SYNTHETIC | ARTIFACT | (mods.flags & LAZY)), - tmp, TypeTree(), matchExpr) - } + val firstDef = atPos(matchExpr.pos) { + ValDef(Modifiers(PrivateLocal | SYNTHETIC | ARTIFACT | (mods.flags & LAZY)), tmp, TypeTree(), matchExpr) + .tap(vd => if (vars.isEmpty) { + vd.updateAttachment(PatVarDefAttachment) // warn later if this introduces a Unit-valued field + if (mods.isImplicit) + currentRun.reporting.deprecationWarning(matchExpr.pos, "Implicit pattern definition binds no variables", since="2.13", "", "") + }) + } var cnt = 0 val restDefs = for ((vname, tpt, pos, original) <- vars) yield atPos(pos) { cnt += 1 - propagatePatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), TermName("_" + cnt)))) + ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), TermName(s"_$cnt"))) + .updateAttachment(NamePos(pos)) + .tap(propagatePatVarDefAttachments(original, _)) } firstDef :: restDefs } @@ -821,43 +864,35 @@ abstract class TreeGen { /** Create tree for for-comprehension generator */ def mkGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree)(implicit fresh: FreshNameCreator): Tree = { - val pat1 = patvarTransformerForFor.transform(pat) + val pat1 = patvarTransformer.transform(pat) if (valeq) ValEq(pat1, rhs).setPos(pos) else ValFrom(pat1, mkCheckIfRefutable(pat1, rhs)).setPos(pos) } - private def unwarnable(pat: Tree): Tree = { - pat foreach { - case b @ Bind(_, _) => b updateAttachment NoWarnAttachment - case _ => - } - pat - } - def mkCheckIfRefutable(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator) = if (treeInfo.isVarPatternDeep(pat)) rhs else { val cases = List( - CaseDef(unwarnable(pat.duplicate), EmptyTree, Literal(Constant(true))), + CaseDef(pat.duplicate, EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) ) val visitor = mkVisitor(cases, checkExhaustive = false, nme.CHECK_IF_REFUTABLE_STRING) atPos(rhs.pos)(Apply(Select(rhs, nme.withFilter), visitor :: Nil)) } - /** If tree is a variable pattern, return Some("its name and type"). - * Otherwise return none */ + /** If tree is a variable pattern, return Some("its name and type"), otherwise None. + * A varpat is x, x @ _, x @ (_: T), x: T. + * For normal identifiers, backticks don't matter, but as a special case, + * backticked underscore is a variable and not a wildcard. + */ private def matchVarPattern(tree: Tree): Option[(Name, Tree)] = { - def wildType(t: Tree): Option[Tree] = t match { - case Ident(x) if x.toTermName == nme.WILDCARD => Some(TypeTree()) - case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt) - case _ => None - } + import nme.{WILDCARD => WC} tree match { - case Ident(name) => Some((name, TypeTree())) - case Bind(name, body) => wildType(body) map (x => (name, x)) - case Typed(Ident(name), tpt) => Some((name, tpt)) - case _ => None + case id @ Ident(name) if name.toTermName != WC || id.isBackquoted => Some((name, TypeTree())) + case Bind(name, Ident(x)) if x.toTermName == WC => Some((name, TypeTree())) + case Bind(name, Typed(Ident(x), tpt)) if x.toTermName == WC => Some((name, tpt)) + case Typed(id @ Ident(name), tpt) if name.toTermName != WC || id.isBackquoted => Some((name, tpt)) + case _ => None } } @@ -874,34 +909,36 @@ abstract class TreeGen { * synthetic for all nodes that contain a variable position. */ class GetVarTraverser extends Traverser { - val buf = new ListBuffer[(Name, Tree, Position, Tree)] + val buf = ListBuffer.empty[(Name, Tree, Position, Bind)] def namePos(tree: Tree, name: Name): Position = if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus else { val start = tree.pos.start val end = start + name.decode.length - rangePos(tree.pos.source, start, start, end) + rangePos(tree.pos.source, start = start, point = start, end = end) // Bind should get NamePos in parser } override def traverse(tree: Tree): Unit = { - def seenName(name: Name) = buf exists (_._1 == name) - def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name), tree)) + def add(name: Name, t: Tree, b: Bind) = { + val seenName = buf.exists(_._1 == name) + if (!seenName) buf.addOne((name, t, namePos(tree, name), b)) + } val bl = buf.length tree match { - case Bind(nme.WILDCARD, _) => + case Bind(nme.WILDCARD, _) => super.traverse(tree) - case Bind(name, Typed(tree1, tpt)) => + case tree @ Bind(name, Typed(tree1, tpt)) => val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate - add(name, newTree) + add(name, newTree, tree) traverse(tree1) - case Bind(name, tree1) => + case tree @ Bind(name, tree1) => // can assume only name range as position, as otherwise might overlap // with binds embedded in pattern tree1 - add(name, TypeTree()) + add(name, TypeTree(), tree) traverse(tree1) case _ => @@ -925,19 +962,15 @@ abstract class TreeGen { * x becomes x @ _ * x: T becomes x @ (_: T) */ - class PatvarTransformer(forFor: Boolean) extends Transformer { + class PatvarTransformer extends Transformer { override def transform(tree: Tree): Tree = tree match { case Ident(name) if treeInfo.isVarPattern(tree) && name != nme.WILDCARD => atPos(tree.pos) { - val b = Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))) - if (forFor && isPatVarWarnable) b updateAttachment NoWarnAttachment - else b + Bind(name, atPos(tree.pos.focus) { Ident(nme.WILDCARD) }) } case Typed(id @ Ident(name), tpt) if treeInfo.isVarPattern(id) && name != nme.WILDCARD => atPos(tree.pos.withPoint(id.pos.point)) { - Bind(name, atPos(tree.pos.withStart(tree.pos.point)) { - Typed(Ident(nme.WILDCARD), tpt) - }) + Bind(name, atPos(tree.pos.withStart(tree.pos.point)) { Typed(Ident(nme.WILDCARD), tpt) }) } case Apply(fn @ Apply(_, _), args) => treeCopy.Apply(tree, transform(fn), transformTrees(args)) @@ -957,11 +990,11 @@ abstract class TreeGen { /** Can be overridden to depend on settings.warnUnusedPatvars. */ def isPatVarWarnable: Boolean = true - /** Not in for comprehensions, whether to warn unused pat vars depends on flag. */ - object patvarTransformer extends PatvarTransformer(forFor = false) + /** Can be overridden to depend on settings.lintValPatterns. */ + def isVarDefWarnable: Boolean = false - /** Tag pat vars in for comprehensions. */ - object patvarTransformerForFor extends PatvarTransformer(forFor = true) + /** Not in for comprehensions, whether to warn unused pat vars depends on flag. */ + object patvarTransformer extends PatvarTransformer // annotate the expression with @unchecked def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) { diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index a93146af057c..74409540ba79 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,13 +15,13 @@ package reflect package internal import Flags._ +import scala.annotation.tailrec -/** This class ... - * - * @author Martin Odersky - * @version 1.0 - */ abstract class TreeInfo { + // FIXME: With `global` as a `val`, implementers must use early initializers, which + // are deprecated and will not be supported in 3.0. Please change the design, + // remove the early initializers from implementers, and then remove the + // `@nowarn` annotations from implementers. val global: SymbolTable import global._ @@ -53,7 +53,7 @@ abstract class TreeInfo { case EmptyTree => true case Import(_, _) => true case TypeDef(_, _, _, _) => true - case DefDef(mods, _, _, _, _, __) => mods.isDeferred + case DefDef(mods, _, _, _, _, _) => mods.isDeferred case ValDef(mods, _, _, _) => mods.isDeferred case _ => false } @@ -174,7 +174,8 @@ abstract class TreeInfo { * takes a different code path than all to follow; but they are safe to inline * because the expression result from evaluating them is always the same. */ - def isExprSafeToInline(tree: Tree): Boolean = tree match { + @tailrec + final def isExprSafeToInline(tree: Tree): Boolean = tree match { case EmptyTree | This(_) | Super(_, _) @@ -215,8 +216,10 @@ abstract class TreeInfo { * don't reuse it for important matters like inlining * decisions. */ - def isPureExprForWarningPurposes(tree: Tree): Boolean = tree match { + @tailrec + final def isPureExprForWarningPurposes(tree: Tree): Boolean = tree match { case Typed(expr, _) => isPureExprForWarningPurposes(expr) + case Function(_, _) => true case EmptyTree | Literal(Constant(())) => false case _ => def isWarnableRefTree = tree match { @@ -278,6 +281,11 @@ abstract class TreeInfo { case _ => false } + def isPartialFunctionMissingParamType(tree: Tree): Boolean = tree match { + case Match(EmptyTree, _) => true + case _ => false + } + /** Is symbol potentially a getter of a variable? */ @@ -343,6 +351,42 @@ abstract class TreeInfo { case _ => false } + /** Is tree an application with result `this.type`? + * Accept `b.addOne(x)` and also `xs(i) += x` + * where the op is an assignment operator. + */ + def isThisTypeResult(tree: Tree): Boolean = tree match { + case Applied(fun @ Select(receiver, op), _, argss) => + tree.tpe match { + case ThisType(sym) => + sym == receiver.symbol + case SingleType(_, sym) => + sym == receiver.symbol || argss.exists(_.exists(sym == _.symbol)) + case _ => + def checkSingle(sym: Symbol): Boolean = + (sym == receiver.symbol) || { + receiver match { + case Apply(_, _) => Precedence(op.decoded).level == 0 // xs(i) += x + case _ => receiver.symbol != null && + (receiver.symbol.isGetter || receiver.symbol.isField) // xs.addOne(x) for var xs + } + } + @tailrec def loop(mt: Type): Boolean = mt match { + case MethodType(_, restpe) => + restpe match { + case ThisType(sym) => checkSingle(sym) + case SingleType(_, sym) => checkSingle(sym) + case _ => loop(restpe) + } + case PolyType(_, restpe) => loop(restpe) + case _ => false + } + fun.symbol != null && loop(fun.symbol.info) + } + case _ => + tree.tpe.isInstanceOf[ThisType] + } + /** * Named arguments can transform a constructor call into a block, e.g. * (b = foo, a = bar) @@ -359,8 +403,9 @@ abstract class TreeInfo { tree } - /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ - def stripCast(tree: Tree): Tree = tree match { + /** Strips layers of `.asInstanceOf[T]` / `_.\$asInstanceOf[T]()` from an expression */ + @tailrec + final def stripCast(tree: Tree): Tree = tree match { case TypeApply(sel @ Select(inner, _), _) if isCastSymbol(sel.symbol) => stripCast(inner) case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCastSymbol(sel.symbol) => @@ -428,6 +473,7 @@ abstract class TreeInfo { * */ def isVarPatternDeep(tree: Tree): Boolean = { + @tailrec def isVarPatternDeep0(tree: Tree): Boolean = { tree match { case Bind(name, pat) => isVarPatternDeep0(pat) @@ -478,7 +524,7 @@ abstract class TreeInfo { case _ => false } case md: MemberDef => !md.mods.isSynthetic - case tree => true + case _ => true } def lazyValDefRhs(body: Tree) = @@ -489,20 +535,17 @@ abstract class TreeInfo { def recoverBody(body: List[Tree]) = body map { case vd @ ValDef(vmods, vname, _, vrhs) if nme.isLocalName(vname) => - tbody find { - case dd: DefDef => dd.name == vname.dropLocal - case _ => false - } map { dd => - val DefDef(dmods, dname, _, _, _, drhs) = dd - // get access flags from DefDef - val defDefMask = Flags.AccessFlags | OVERRIDE | IMPLICIT | DEFERRED - val vdMods = (vmods &~ defDefMask) | (dmods & defDefMask).flags - // for most cases lazy body should be taken from accessor DefDef - val vdRhs = if (vmods.isLazy) lazyValDefRhs(drhs) else vrhs - copyValDef(vd)(mods = vdMods, name = dname, rhs = vdRhs) - } getOrElse (vd) + tbody.collectFirst { + case DefDef(dmods, dname, _, _, _, drhs) if dname == vname.dropLocal => + // get access flags from DefDef + val defDefMask = Flags.AccessFlags | OVERRIDE | IMPLICIT | DEFERRED + val vdMods = (vmods &~ defDefMask) | (dmods & defDefMask).flags + // for most cases lazy body should be taken from accessor DefDef + val vdRhs = if (vmods.isLazy) lazyValDefRhs(drhs) else vrhs + copyValDef(vd)(mods = vdMods, name = dname, rhs = vdRhs) + }.getOrElse(vd) // for abstract and some lazy val/vars - case dd @ DefDef(mods, name, _, _, tpt, rhs) if mods.hasAccessorFlag => + case DefDef(mods, name, _, _, tpt, rhs) if mods.hasAccessorFlag => // transform getter mods to field val vdMods = (if (!mods.hasStableFlag) mods | Flags.MUTABLE else mods &~ Flags.STABLE) &~ Flags.ACCESSOR ValDef(vdMods, name, tpt, rhs) @@ -526,6 +569,12 @@ abstract class TreeInfo { case _ => Nil } + /** The modifiers of the first constructor in `stats`. */ + def firstConstructorMods(stats: List[Tree]): Modifiers = firstConstructor(stats) match { + case DefDef(mods, _, _, _, _, _) => mods + case _ => Modifiers() + } + /** The value definitions marked PRESUPER in this statement sequence */ def preSuperFields(stats: List[Tree]): List[ValDef] = stats collect { case vd: ValDef if isEarlyValDef(vd) => vd } @@ -559,22 +608,19 @@ abstract class TreeInfo { case _ => false } - /** Translates an Assign(_, _) node to AssignOrNamedArg(_, _) if + /** Translates an Assign(_, _) node to NamedArg(_, _) if * the lhs is a simple ident. Otherwise returns unchanged. */ def assignmentToMaybeNamedArg(tree: Tree) = tree match { - case t @ Assign(id: Ident, rhs) => atPos(t.pos)(AssignOrNamedArg(id, rhs)) + case t @ Assign(id: Ident, rhs) => atPos(t.pos)(NamedArg(id, rhs)) case t => t } - /** Is name a left-associative operator? */ - def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') - /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass /** can this type be a type pattern */ - def mayBeTypePat(tree: Tree): Boolean = tree match { + final def mayBeTypePat(tree: Tree): Boolean = tree match { case CompoundTypeTree(Template(tps, _, Nil)) => tps exists mayBeTypePat case Annotated(_, tp) => mayBeTypePat(tp) case AppliedTypeTree(constr, args) => mayBeTypePat(constr) || args.exists(_.isInstanceOf[Bind]) @@ -642,7 +688,7 @@ abstract class TreeInfo { def catchesThrowable(cdef: CaseDef) = ( cdef.guard.isEmpty && (unbind(cdef.pat) match { case Ident(nme.WILDCARD) => true - case i@Ident(name) => hasNoSymbol(i) + case i@Ident(_) => hasNoSymbol(i) case _ => false }) ) @@ -695,7 +741,8 @@ abstract class TreeInfo { } /** The underlying pattern ignoring any bindings */ - def unbind(x: Tree): Tree = x match { + @tailrec + final def unbind(x: Tree): Tree = x match { case Bind(_, y) => unbind(y) case y => y } @@ -735,6 +782,17 @@ abstract class TreeInfo { ((sym ne null) && sym.initialize.isTrait) } + def hasExplicitUnit(tree: Tree): Boolean = + explicitlyUnit(tree) || { + tree match { + case Apply(f, _) => hasExplicitUnit(f) + case TypeApply(f, _) => hasExplicitUnit(f) + case AppliedTypeTree(f, _) => hasExplicitUnit(f) + case Block(_, expr) => hasExplicitUnit(expr) + case _ => false + } + } + /** Applications in Scala can have one of the following shapes: * * 1) naked core: Ident(_) or Select(_, _) or basically anything else @@ -781,6 +839,7 @@ abstract class TreeInfo { * The original tree if it's not an application. */ def callee: Tree = { + @tailrec def loop(tree: Tree): Tree = tree match { case Apply(fn, _) => loop(fn) case tree => tree @@ -794,7 +853,7 @@ abstract class TreeInfo { def core: Tree = callee match { case TypeApply(fn, _) => fn case AppliedTypeTree(fn, _) => fn - case tree => tree + case callee => callee } /** The type arguments of the `callee`. @@ -821,7 +880,7 @@ abstract class TreeInfo { /** Returns a wrapper that knows how to destructure and analyze applications. */ final def dissectApplied(tree: Tree) = new Applied(tree) - /** Equivalent ot disectApplied(tree).core, but more efficient */ + /** Equivalent to dissectApplied(tree).core, but more efficient */ @scala.annotation.tailrec final def dissectCore(tree: Tree): Tree = tree match { case TypeApply(fun, _) => @@ -852,10 +911,24 @@ abstract class TreeInfo { unapply(dissectApplied(tree)) } + /** + * Deconstructs an application into fun (typically a Select), targs and argss. + * Unlike `Applied`, only matches if the tree is actually an application (Apply and / or TypeApply). + */ + object Application { + def unapply(tree: Tree): Option[(Tree, List[Tree], List[List[Tree]])] = { + val ap = new Applied(tree) + val core = ap.core + if (core eq tree) None + else Some((core, ap.targs, ap.argss)) + } + } + /** Does list of trees start with a definition of - * a class of module with given name (ignoring imports) + * a class or module with given name (ignoring imports) */ - def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match { + @tailrec + final def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match { case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name) case Annotated(_, tree1) :: _ => firstDefinesClassOrObject(List(tree1), name) case ModuleDef(_, `name`, _) :: _ => true @@ -868,6 +941,7 @@ abstract class TreeInfo { */ object Unapplied { // Duplicated with `spliceApply` + @tailrec def unapply(tree: Tree): Option[Tree] = tree match { // scala/bug#7868 Admit Select() to account for numeric widening, e.g. .toInt case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil) @@ -877,25 +951,6 @@ abstract class TreeInfo { } } - /** Is this file the body of a compilation unit which should not - * have Predef imported? - */ - def noPredefImportForUnit(body: Tree) = { - // Top-level definition whose leading imports include Predef. - def isLeadingPredefImport(defn: Tree): Boolean = defn match { - case PackageDef(_, defs1) => defs1 exists isLeadingPredefImport - case Import(expr, _) => isReferenceToPredef(expr) - case _ => false - } - // Compilation unit is class or object 'name' in package 'scala' - def isUnitInScala(tree: Tree, name: Name) = tree match { - case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name) - case _ => false - } - - isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body) - } - def isAbsTypeDef(tree: Tree) = tree match { case TypeDef(_, _, _, TypeBoundsTree(_, _)) => true case TypeDef(_, _, _, rhs) => rhs.tpe.isInstanceOf[TypeBounds] @@ -933,12 +988,19 @@ abstract class TreeInfo { def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed) + private object LiteralNameOrAdapted { + def unapply(tree: Tree) = tree match { + case Literal(Constant(name)) => Some(name) + case Apply(_, List(Literal(Constant(name)))) => Some(name) + case _ => None + } + } class DynamicApplicationExtractor(nameTest: Name => Boolean) { def unapply(tree: Tree) = tree match { - case Apply(TypeApply(Select(qual, oper), _), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name)) - case Apply(Select(qual, oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name)) - case Apply(Ident(oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((EmptyTree, name)) - case _ => None + case Apply(TypeApply(Select(qual, oper), _), List(LiteralNameOrAdapted(name))) if nameTest(oper) => Some((qual, name)) + case Apply(Select(qual, oper), List(LiteralNameOrAdapted(name))) if nameTest(oper) => Some((qual, name)) + case Apply(Ident(oper), List(LiteralNameOrAdapted(name))) if nameTest(oper) => Some((EmptyTree, name)) + case _ => None } } object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic) @@ -946,14 +1008,15 @@ abstract class TreeInfo { object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed) object MacroImplReference { + @tailrec private def refPart(tree: Tree): Tree = tree match { case TypeApply(fun, _) => refPart(fun) case ref: RefTree => ref case _ => EmptyTree } - def unapply(tree: Tree) = refPart(tree) match { - case ref: RefTree => { + def unapply(tree: Tree): Option[(Boolean, Boolean, Symbol, Symbol, List[Tree])] = refPart(tree) match { + case ref: RefTree => val qual = ref.qualifier val isBundle = definitions.isMacroBundleType(qual.tpe) val isBlackbox = @@ -969,15 +1032,15 @@ abstract class TreeInfo { if (qualSym.isModule) qualSym.moduleClass else qualSym } Some((isBundle, isBlackbox, owner, ref.symbol, dissectApplied(tree).targs)) - } - case _ => None + case _ => None } } - def isNullaryInvocation(tree: Tree): Boolean = + @tailrec + final def isNullaryInvocation(tree: Tree): Boolean = tree.symbol != null && tree.symbol.isMethod && (tree match { case TypeApply(fun, _) => isNullaryInvocation(fun) - case tree: RefTree => true + case _: RefTree => true case _ => false }) @@ -986,8 +1049,154 @@ abstract class TreeInfo { sym != null && sym.isTermMacro && !sym.isErroneous } - def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match { + @tailrec + final def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match { case Block(_, expr) => isMacroApplicationOrBlock(expr) case tree => isMacroApplication(tree) } } + +// imported from scalamacros/paradise +trait MacroAnnotionTreeInfo { self: TreeInfo => + import global._ + import definitions._ + import build.{SyntacticClassDef, SyntacticTraitDef} + + def primaryConstructorArity(tree: ClassDef): Int = treeInfo.firstConstructor(tree.impl.body) match { + case DefDef(_, _, _, params :: _, _, _) => params.length + case x => throw new MatchError(x) + } + + def anyConstructorHasDefault(tree: ClassDef): Boolean = tree.impl.body exists { + case DefDef(_, nme.CONSTRUCTOR, _, paramss, _, _) => mexists(paramss)(_.mods.hasDefault) + case _ => false + } + + def isMacroAnnotation(tree: ClassDef): Boolean = { + val clazz = tree.symbol + def isAnnotation = clazz isNonBottomSubClass AnnotationClass + def hasMacroTransformMethod = clazz.info.member(nme.macroTransform) != NoSymbol + clazz != null && isAnnotation && hasMacroTransformMethod + } + + case class AnnotationZipper(annotation: Tree, annottee: Tree, owner: Tree) + + // TODO: no immediate idea how to write this in a sane way + def getAnnotationZippers(tree: Tree): List[AnnotationZipper] = { + def loop[T <: Tree](tree: T, deep: Boolean): List[AnnotationZipper] = tree match { + case SyntacticClassDef(mods, name, tparams, constrMods, vparamss, earlyDefs, parents, selfdef, body) => + val czippers = mods.annotations.map { ann => + val mods1 = mods.mapAnnotations(_ diff List(ann)) + val annottee = PatchedSyntacticClassDef(mods1, name, tparams, constrMods, vparamss, earlyDefs, parents, selfdef, body) + AnnotationZipper(ann, annottee, annottee) + } + if (!deep) czippers + else { + val tzippers = for { + tparam <- tparams + AnnotationZipper(ann, tparam1: TypeDef, _) <- loop(tparam, deep = false) + tparams1 = tparams.updated(tparams.indexOf(tparam), tparam1) + } yield AnnotationZipper(ann, tparam1, PatchedSyntacticClassDef(mods, name, tparams1, constrMods, vparamss, earlyDefs, parents, selfdef, body)) + val vzippers = for { + vparams <- vparamss + vparam <- vparams + AnnotationZipper(ann, vparam1: ValDef, _) <- loop(vparam, deep = false) + vparams1 = vparams.updated(vparams.indexOf(vparam), vparam1) + vparamss1 = vparamss.updated(vparamss.indexOf(vparams), vparams1) + } yield AnnotationZipper(ann, vparam1, PatchedSyntacticClassDef(mods, name, tparams, constrMods, vparamss1, earlyDefs, parents, selfdef, body)) + czippers ++ tzippers ++ vzippers + } + case SyntacticTraitDef(mods, name@_, tparams, earlyDefs@_, parents@_, selfdef@_, body@_) => + val tdef = tree.asInstanceOf[ClassDef] + val czippers = mods.annotations.map(ann => { + val annottee = tdef.copy(mods = mods.mapAnnotations(_ diff List(ann))) + AnnotationZipper(ann, annottee, annottee) + }) + if (!deep) czippers + else { + val tzippers = for { + tparam <- tparams + AnnotationZipper(ann, tparam1: TypeDef, _) <- loop(tparam, deep = false) + tparams1 = tparams.updated(tparams.indexOf(tparam), tparam1) + } yield AnnotationZipper(ann, tparam1, tdef.copy(tparams = tparams1)) + czippers ++ tzippers + } + case mdef @ ModuleDef(mods, _, _) => + mods.annotations.map(ann => { + val annottee = mdef.copy(mods = mods.mapAnnotations(_ diff List(ann))) + AnnotationZipper(ann, annottee, annottee) + }) + case ddef @ DefDef(mods, _, tparams, vparamss, _, _) => + val dzippers = mods.annotations.map(ann => { + val annottee = ddef.copy(mods = mods.mapAnnotations(_ diff List(ann))) + AnnotationZipper(ann, annottee, annottee) + }) + if (!deep) dzippers + else { + val tzippers = for { + tparam <- tparams + AnnotationZipper(ann, tparam1: TypeDef, _) <- loop(tparam, deep = false) + tparams1 = tparams.updated(tparams.indexOf(tparam), tparam1) + } yield AnnotationZipper(ann, tparam1, ddef.copy(tparams = tparams1)) + val vzippers = for { + vparams <- vparamss + vparam <- vparams + AnnotationZipper(ann, vparam1: ValDef, _) <- loop(vparam, deep = false) + vparams1 = vparams.updated(vparams.indexOf(vparam), vparam1) + vparamss1 = vparamss.updated(vparamss.indexOf(vparams), vparams1) + } yield AnnotationZipper(ann, vparam1, ddef.copy(vparamss = vparamss1)) + dzippers ++ tzippers ++ vzippers + } + case vdef @ ValDef(mods, _, _, _) => + mods.annotations.map(ann => { + val annottee = vdef.copy(mods = mods.mapAnnotations(_ diff List(ann))) + AnnotationZipper(ann, annottee, annottee) + }) + case tdef @ TypeDef(mods, _, tparams, _) => + val tzippers = mods.annotations.map(ann => { + val annottee = tdef.copy(mods = mods.mapAnnotations(_ diff List(ann))) + AnnotationZipper(ann, annottee, annottee) + }) + if (!deep) tzippers + else { + val ttzippers = for { + tparam <- tparams + AnnotationZipper(ann, tparam1: TypeDef, _) <- loop(tparam, deep = false) + tparams1 = tparams.updated(tparams.indexOf(tparam), tparam1) + } yield AnnotationZipper(ann, tparam1, tdef.copy(tparams = tparams1)) + tzippers ++ ttzippers + } + case _ => + Nil + } + loop(tree, deep = true) + } + + private object PatchedSyntacticClassDef { + def apply(mods: Modifiers, name: TypeName, tparams: List[Tree], + constrMods: Modifiers, vparamss: List[List[Tree]], + earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = { + // NOTE: works around SI-8771 and hopefully fixes https://github.com/scalamacros/paradise/issues/53 for good + SyntacticClassDef(mods, name, tparams, constrMods, vparamss.map(_.map(_.duplicate)), earlyDefs, parents, selfType, body) + } + } + + // Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) + final def splitAtSuper(stats: List[Tree], classOnly: Boolean): (List[Tree], List[Tree]) = { + @tailrec + def isConstr(tree: Tree): Boolean = tree match { + case Block(_, expr) => + isConstr(expr) // scala/bug#6481 account for named argument blocks + case Apply(Select(New(_), _), _) => + false // scala/bug#11736 don't treat `new X` statements as super calls + case Apply(fun, _) => + (fun.symbol ne null) && (if (classOnly) fun.symbol.isClassConstructor else fun.symbol.isConstructor) + case _ => + false + } + val (pre, rest0) = stats span (!isConstr(_)) + val (supercalls, rest) = rest0 span (isConstr(_)) + (pre ::: supercalls, rest) + } + +} diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 4c8ef226610d..cc1f6b7eccaf 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,10 +15,11 @@ package reflect package internal import Flags._ +import scala.annotation.{nowarn, tailrec} import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.reflect.macros.Attachments -import util.{ReusableInstance, Statistics} +import util.{ReusableInstance, Statistics, StringContextStripMarginOps} trait Trees extends api.Trees { self: SymbolTable => @@ -57,7 +58,7 @@ trait Trees extends api.Trees { def defineType(tp: Type): this.type = setType(tp) def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch? - def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) } + def symbol_=(sym: Symbol): Unit = { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) } def setSymbol(sym: Symbol): this.type = { symbol = sym; this } def hasSymbolField = false @deprecated("use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField @@ -109,7 +110,7 @@ trait Trees extends api.Trees { override def orElse(alt: => Tree) = if (!isEmpty) this else alt - override def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) } + override def foreach(f: Tree => Unit): Unit = { new ForeachTreeTraverser(f).traverse(this) } override def withFilter(f: Tree => Boolean): List[Tree] = { val ft = new FilterTreeTraverser(f) @@ -234,25 +235,26 @@ trait Trees extends api.Trees { /** Sets the tree's type to the result of the given function. * If the type is null, it remains null - the function is not called. */ - def modifyType(f: Type => Type): Tree = + def modifyType(f: Type => Type): this.type = if (tpe eq null) this else this setType f(tpe) /** If `pf` is defined for a given subtree, call super.traverse(pf(tree)), * otherwise super.traverse(tree). */ - def foreachPartial(pf: PartialFunction[Tree, Tree]) { + def foreachPartial(pf: PartialFunction[Tree, Tree]): Unit = { new ForeachPartialTreeTraverser(pf).traverse(this) } - def changeOwner(pairs: (Symbol, Symbol)*): Tree = { - pairs.foldLeft(this) { case (t, (oldOwner, newOwner)) => - new ChangeOwnerTraverser(oldOwner, newOwner) apply t + def changeOwner(pairs: (Symbol, Symbol)*): this.type = { + pairs.foreach { + case (oldOwner, newOwner) => changeOwner(oldOwner, newOwner) } + this } - def changeOwner(from: Symbol, to: Symbol): Tree = - new ChangeOwnerTraverser(from, to) apply this + def changeOwner(from: Symbol, to: Symbol): this.type = + new ChangeOwnerTraverser(from, to).apply(this) def shallowDuplicate: Tree = new ShallowDuplicator(this) transform this def shortClass: String = (getClass.getName split "[.$]").last @@ -273,6 +275,8 @@ trait Trees extends api.Trees { else "" ) } + def transform(transformer: Transformer): Tree = xtransform(transformer, this) + def traverse(traverser: Traverser): Unit = xtraverse(traverser, this): @nowarn("cat=deprecation") } trait TermTree extends Tree with TermTreeApi @@ -289,6 +293,7 @@ trait Trees extends api.Trees { def getterName: TermName = name.getterName def setterName: TermName = name.setterName def localName: TermName = name.localName + def namePos: Position = this.attachments.get[NamePos].map(_.pos).getOrElse(this.pos) } trait RefTree extends SymTree with NameTree with RefTreeApi { @@ -305,17 +310,19 @@ trait Trees extends api.Trees { case qual if qual.isType => assert(name.isTypeName, s"qual = $qual, name = $name") SelectFromTypeTree(qual, name.toTypeName) + case x => throw new MatchError(x) } def unapply(refTree: RefTree): Option[(Tree, Name)] = Some((refTree.qualifier, refTree.name)) } - abstract class DefTree extends SymTree with NameTree with DefTreeApi { + sealed abstract class DefTree extends SymTree with NameTree with DefTreeApi { def name: Name override def isDef = true } - abstract class MemberDef extends DefTree with MemberDefApi { + sealed abstract class MemberDef extends DefTree with MemberDefApi { def mods: Modifiers + def keyword: String = this match { case TypeDef(_, _, _, _) => "type" case ClassDef(mods, _, _, _) => if (mods hasFlag TRAIT) "trait" else "class" @@ -323,7 +330,6 @@ trait Trees extends api.Trees { case ModuleDef(_, _, _) => "object" case PackageDef(_, _) => "package" case ValDef(mods, _, _, _) => if (mods hasFlag MUTABLE) "var" else "val" - case _ => "" } } @@ -331,15 +337,38 @@ trait Trees extends api.Trees { extends MemberDef with PackageDefApi { def name = pid.name def mods = NoMods + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.PackageDef( + this, transformer.transform(pid).asInstanceOf[RefTree], + transformer.atOwner(mclass(this.symbol)) { + transformer.transformStats(stats, transformer.currentOwner) + } + ) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(pid) + traverser.traverseStats(stats, mclass(this.symbol)) + } } object PackageDef extends PackageDefExtractor - abstract class ImplDef extends MemberDef with ImplDefApi { + sealed abstract class ImplDef extends MemberDef with ImplDefApi { def impl: Template } case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template) - extends ImplDef with ClassDefApi + extends ImplDef with ClassDefApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(this.symbol) { + transformer.treeCopy.ClassDef(this, transformer.transformModifiers(mods), name, + transformer.transformTypeDefs(tparams), transformer.transformTemplate(impl)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(symbol) { + traverser.traverseModifiers(mods) + traverser.traverseName(name) + traverser.traverseParams(tparams) + traverser.traverse(impl) + } + } object ClassDef extends ClassDefExtractor { /** @param sym the class symbol * @param impl the implementation template @@ -362,7 +391,18 @@ trait Trees extends api.Trees { } case class ModuleDef(mods: Modifiers, name: TermName, impl: Template) - extends ImplDef with ModuleDefApi + extends ImplDef with ModuleDefApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(mclass(this.symbol)) { + transformer.treeCopy.ModuleDef(this, transformer.transformModifiers(mods), + name, transformer.transformTemplate(impl)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(mclass(symbol)) { + traverser.traverseModifiers(mods) + traverser.traverseName(name) + traverser.traverse(impl) + } + } object ModuleDef extends ModuleDefExtractor { /** * @param sym the class symbol @@ -374,7 +414,7 @@ trait Trees extends api.Trees { } } - abstract class ValOrDefDef extends MemberDef with ValOrDefDefApi { + sealed abstract class ValOrDefDef extends MemberDef with ValOrDefDefApi { def name: TermName def tpt: Tree def rhs: Tree @@ -388,14 +428,42 @@ trait Trees extends api.Trees { } } - case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi + case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(this.symbol) { + transformer.treeCopy.ValDef(this, transformer.transformModifiers(mods), + name, transformer.transform(tpt), transformer.transform(rhs)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(symbol) { + traverser.traverseModifiers(mods) + traverser.traverseName(name) + traverser.traverseTypeAscription(tpt) + traverser.traverse(rhs) + } + } object ValDef extends ValDefExtractor { def apply(sym: Symbol): ValDef = newValDef(sym, EmptyTree)() def apply(sym: Symbol, rhs: Tree): ValDef = newValDef(sym, rhs)() } case class DefDef(mods: Modifiers, name: TermName, tparams: List[TypeDef], - vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi + vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(this.symbol) { + transformer.treeCopy.DefDef(this, transformer.transformModifiers(mods), name, + transformer.transformTypeDefs(tparams), transformer.transformValDefss(vparamss), + transformer.transform(tpt), transformer.transform(rhs)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(symbol) { + traverser.traverseModifiers(mods) + traverser.traverseName(name) + traverser.traverseParams(tparams) + traverser.traverseParamss(vparamss) + traverser.traverseTypeAscription(tpt) + traverser.traverse(rhs) + } + + } object DefDef extends DefDefExtractor { def apply(sym: Symbol, rhs: Tree): DefDef = newDefDef(sym, rhs)() def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = newDefDef(sym, rhs)(vparamss = vparamss) @@ -405,7 +473,19 @@ trait Trees extends api.Trees { } case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree) - extends MemberDef with TypeDefApi + extends MemberDef with TypeDefApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(this.symbol) { + transformer.treeCopy.TypeDef(this, transformer.transformModifiers(mods), name, + transformer.transformTypeDefs(tparams), transformer.transform(rhs)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(symbol) { + traverser.traverseModifiers(mods) + traverser.traverseName(name) + traverser.traverseParams(tparams) + traverser.traverse(rhs) + } + } object TypeDef extends TypeDefExtractor { /** A TypeDef node which defines abstract type or type parameter for given `sym` */ def apply(sym: Symbol): TypeDef = newTypeDef(sym, TypeBoundsTree(sym))() @@ -413,7 +493,15 @@ trait Trees extends api.Trees { } case class LabelDef(name: TermName, params: List[Ident], rhs: Tree) - extends DefTree with TermTree with LabelDefApi + extends DefTree with TermTree with LabelDefApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.LabelDef(this, name, transformer.transformIdents(params), transformer.transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy` + override def traverse(traverser: Traverser): Unit = { + traverser.traverseName(name) + traverser.traverseParams(params) + traverser.traverse(rhs) + } + } object LabelDef extends LabelDefExtractor { def apply(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = atPos(sym.pos) { @@ -421,42 +509,136 @@ trait Trees extends api.Trees { } } - case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) extends ImportSelectorApi + /** A selector in an import clause `import x.name as rename`. + * For a normal import, name and rename are the same. + * For a rename, they are different. + * A wildcard import has name `_` and null rename. + * A "masking" import has rename `_` (where name is not `_`). + * + * The unhappy special cases are: + * - import member named `_` has rename `_` like normal. (backward compat) + * - import given members is a wildcard but rename `given`. (forward compat) + * + * Client must distinguish isWildcard and isGiven. + */ + case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) extends ImportSelectorApi { + assert(isWildcard || rename != null, s"Bad import selector $name => $rename") + def isWildcard = name == nme.WILDCARD && rename == null + def isGiven = name == nme.WILDCARD && rename == nme.`given` + def isMask = name != nme.WILDCARD && rename == nme.WILDCARD + def isRename = name != rename && rename != null && rename != nme.WILDCARD && name != nme.WILDCARD + def isSpecific = if (name == nme.WILDCARD) rename == nme.WILDCARD else rename != nme.WILDCARD + private def isLiteralWildcard = name == nme.WILDCARD && rename == nme.WILDCARD + private def sameName(name: Name, other: Name) = (name eq other) || (name ne null) && name.start == other.start && name.length == other.length + def hasName(other: Name) = sameName(name, other) + def introduces(target: Name) = + if (target == nme.WILDCARD) isLiteralWildcard + else target != null && !isGiven && sameName(rename, target) + } object ImportSelector extends ImportSelectorExtractor { - val wild = ImportSelector(nme.WILDCARD, -1, null, -1) - val wildList = List(wild) // OPT This list is shared for performance. + private val wild = ImportSelector(nme.WILDCARD, -1, null, -1) + val wildList = List(wild) // OPT This list is shared for performance. Used for unpositioned synthetic only. + def wildAt(pos: Int) = ImportSelector(nme.WILDCARD, pos, null, -1) + def givenAt(pos: Int) = ImportSelector(nme.WILDCARD, pos, nme.`given`, -1) + def mask(name: Name) = ImportSelector(name, -1, nme.WILDCARD, -1) } case class Import(expr: Tree, selectors: List[ImportSelector]) - extends SymTree with ImportApi + extends SymTree with ImportApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Import(this, transformer.transform(expr), selectors) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(expr) + selectors foreach traverser.traverseImportSelector + } + def posOf(sel: ImportSelector): Position = { + val pos0 = this.pos + val start = sel.namePos + if (start >= 0 && selectors.contains(sel)) { + val hasRename = sel.rename != null && sel.renamePos >= 0 // !sel.isWildcard + val end = if (hasRename) sel.renamePos + sel.rename.length else start + sel.name.length + pos0.copyRange(start, start, end) + } + else pos0 + } + } object Import extends ImportExtractor case class Template(parents: List[Tree], self: ValDef, body: List[Tree]) - extends SymTree with TemplateApi + extends SymTree with TemplateApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Template(this, transformer.transformTrees(parents), transformer.transformValDef(self), transformer.transformStats(body, this.symbol)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverseParents(parents) + traverser.traverseSelfType(self) + traverser.traverseStats(body, this.symbol) + } + } object Template extends TemplateExtractor case class Block(stats: List[Tree], expr: Tree) - extends TermTree with BlockApi + extends TermTree with BlockApi { + override def transform(transformer: Transformer): Tree = + treeCopy.Block(this, transformer.transformStats(stats, transformer.currentOwner), transformer.transform(expr)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverseTrees(stats) + traverser.traverse(expr) + } + } object Block extends BlockExtractor case class CaseDef(pat: Tree, guard: Tree, body: Tree) - extends Tree with CaseDefApi + extends Tree with CaseDefApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.CaseDef(this, transformer.transform(pat), transformer.transform(guard), transformer.transform(body)) + override def traverse(traverser: Traverser): Unit = { + traverser.traversePattern(pat) + traverser.traverseGuard(guard) + traverser.traverse(body) + } + } object CaseDef extends CaseDefExtractor case class Alternative(trees: List[Tree]) - extends TermTree with AlternativeApi + extends TermTree with AlternativeApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Alternative(this, transformer.transformTrees(trees)) + override def traverse(traverser: Traverser): Unit = + traverser.traverseTrees(trees) + } object Alternative extends AlternativeExtractor case class Star(elem: Tree) - extends TermTree with StarApi + extends TermTree with StarApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Star(this, transformer.transform(elem)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(elem) + } + } object Star extends StarExtractor case class Bind(name: Name, body: Tree) - extends DefTree with BindApi + extends DefTree with BindApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Bind(this, name, transformer.transform(body)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverseName(name) + traverser.traverse(body) + } + } object Bind extends BindExtractor case class UnApply(fun: Tree, args: List[Tree]) - extends TermTree with UnApplyApi + extends TermTree with UnApplyApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.UnApply(this, transformer.transform(fun), transformer.transformTrees(args)) + // bq: see test/.../unapplyContexts2.scala + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(fun) + traverser.traverseTrees(args) + } + } object UnApply extends UnApplyExtractor /** An array of expressions. This AST node needs to be translated in backend. @@ -475,45 +657,122 @@ trait Trees extends api.Trees { * Literal("%s%d"), * ArrayValue(, List(Ident("foo"), Literal(42)))) */ - case class ArrayValue(elemtpt: Tree, elems: List[Tree]) extends TermTree + case class ArrayValue(elemtpt: Tree, elems: List[Tree]) extends TermTree { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.ArrayValue(this, transformer.transform(elemtpt), transformer.transformTrees(elems)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(elemtpt) + traverser.traverseTrees(elems) + } + } case class Function(vparams: List[ValDef], body: Tree) - extends SymTree with TermTree with FunctionApi + extends SymTree with TermTree with FunctionApi { + override def transform(transformer: Transformer): Tree = + transformer.atOwner(this.symbol) { + transformer.treeCopy.Function(this, transformer.transformValDefs(vparams), transformer.transform(body)) + } + override def traverse(traverser: Traverser): Unit = traverser.atOwner(this.symbol) { + traverser.traverseParams(vparams) ; traverser.traverse(body) + } + } object Function extends FunctionExtractor case class Assign(lhs: Tree, rhs: Tree) - extends TermTree with AssignApi + extends TermTree with AssignApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Assign(this, transformer.transform(lhs), transformer.transform(rhs)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(lhs) + traverser.traverse(rhs) + } + } object Assign extends AssignExtractor - case class AssignOrNamedArg(lhs: Tree, rhs: Tree) - extends TermTree with AssignOrNamedArgApi - object AssignOrNamedArg extends AssignOrNamedArgExtractor + case class NamedArg(lhs: Tree, rhs: Tree) + extends TermTree with NamedArgApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.NamedArg(this, transformer.transform(lhs), transformer.transform(rhs)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(lhs) + traverser.traverse(rhs) + } + } + object NamedArg extends NamedArgExtractor case class If(cond: Tree, thenp: Tree, elsep: Tree) - extends TermTree with IfApi + extends TermTree with IfApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.If(this, transformer.transform(cond), transformer.transform(thenp), transformer.transform(elsep)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(cond) + traverser.traverse(thenp) + traverser.traverse(elsep) + } + } object If extends IfExtractor case class Match(selector: Tree, cases: List[CaseDef]) - extends TermTree with MatchApi + extends TermTree with MatchApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Match(this, transformer.transform(selector), transformer.transformCaseDefs(cases)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(selector) + traverser.traverseCases(cases) + } + } object Match extends MatchExtractor case class Return(expr: Tree) - extends SymTree with TermTree with ReturnApi + extends SymTree with TermTree with ReturnApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Return(this, transformer.transform(expr)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(expr) + } + } object Return extends ReturnExtractor case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree) - extends TermTree with TryApi + extends TermTree with TryApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Try(this, transformer.transform(block), transformer.transformCaseDefs(catches), transformer.transform(finalizer)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(block) + traverser.traverseCases(catches) + traverser.traverse(finalizer) + } + } object Try extends TryExtractor case class Throw(expr: Tree) - extends TermTree with ThrowApi + extends TermTree with ThrowApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Throw(this, transformer.transform(expr)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(expr) + } + } object Throw extends ThrowExtractor - case class New(tpt: Tree) extends TermTree with NewApi + case class New(tpt: Tree) extends TermTree with NewApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.New(this, transformer.transform(tpt)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(tpt) + } + } object New extends NewExtractor case class Typed(expr: Tree, tpt: Tree) - extends TermTree with TypedApi + extends TermTree with TypedApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Typed(this, transformer.transform(expr), transformer.transform(tpt)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(expr) + traverser.traverseTypeAscription(tpt) + } + } object Typed extends TypedExtractor // represents `expr _`, as specified in Method Values of spec/06-expressions.md @@ -536,14 +795,26 @@ trait Trees extends api.Trees { assert(fun.isTerm, fun) override def symbol: Symbol = fun.symbol - override def symbol_=(sym: Symbol) { fun.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { fun.symbol = sym } + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.TypeApply(this, transformer.transform(fun), transformer.transformTrees(args)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(fun) + traverser.traverseTypeArgs(args) + } } object TypeApply extends TypeApplyExtractor case class Apply(fun: Tree, args: List[Tree]) extends GenericApply with ApplyApi { override def symbol: Symbol = fun.symbol - override def symbol_=(sym: Symbol) { fun.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { fun.symbol = sym } + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Apply(this, transformer.transform(fun), transformer.transformTrees(args)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(fun) + traverser.traverseTrees(args) + } } object Apply extends ApplyExtractor @@ -567,16 +838,35 @@ trait Trees extends api.Trees { Apply(init, args.toList) } - case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree + case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.ApplyDynamic(this, transformer.transform(qual), transformer.transformTrees(args)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(qual) + traverser.traverseTrees(args) + } + } case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi { override def symbol: Symbol = qual.symbol - override def symbol_=(sym: Symbol) { qual.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { qual.symbol = sym } + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Super(this, transformer.transform(qual), mix) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(qual) + traverser.traverseName(mix) + } } object Super extends SuperExtractor case class This(qual: TypeName) - extends SymTree with TermTree with ThisApi + extends SymTree with TermTree with ThisApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.This(this, qual) + override def traverse(traverser: Traverser): Unit = { + traverser.traverseName(qual) + } + } object This extends ThisExtractor case class Select(qualifier: Tree, name: Name) @@ -584,46 +874,97 @@ trait Trees extends api.Trees { // !!! assert disabled due to test case pos/annotDepMethType.scala triggering it. // assert(qualifier.isTerm, qualifier) + + override def transform(transformer: Transformer): Tree = { + transformer.treeCopy.Select(this, transformer.transform(qualifier), name) + } + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(qualifier) + traverser.traverseName(name) + } } object Select extends SelectExtractor case class Ident(name: Name) extends RefTree with IdentApi { def qualifier: Tree = EmptyTree def isBackquoted = this.hasAttachment[BackquotedIdentifierAttachment.type] + override def transform(transformer: Transformer): Tree = { + transformer.treeCopy.Ident(this, name) + } + override def traverse(traverser: Traverser): Unit = { + traverser.traverseName(name) + } } object Ident extends IdentExtractor case class ReferenceToBoxed(ident: Ident) extends TermTree with ReferenceToBoxedApi { override def symbol: Symbol = ident.symbol - override def symbol_=(sym: Symbol) { ident.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { ident.symbol = sym } + override def transform(transformer: Transformer): Tree = { + transformer.treeCopy.ReferenceToBoxed(this, transformer.transform(ident) match { + case idt1: Ident => idt1 + case x => throw new MatchError(x) + }) + } + override def traverse(traverser: Traverser): Unit = traverser.traverse(ident) } object ReferenceToBoxed extends ReferenceToBoxedExtractor case class Literal(value: Constant) extends TermTree with LiteralApi { - assert(value ne null) + assert(value ne null, "null value for literal") + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Literal(this, value) + override def traverse(traverser: Traverser): Unit = { + traverser.traverseConstant(value) + } } object Literal extends LiteralExtractor // @deprecated("will be removed and then be re-introduced with changed semantics, use Literal(Constant(x)) instead") // def Literal(x: Any) = new Literal(Constant(x)) - case class Annotated(annot: Tree, arg: Tree) extends Tree with AnnotatedApi + case class Annotated(annot: Tree, arg: Tree) extends Tree with AnnotatedApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.Annotated(this, transformer.transform(annot), transformer.transform(arg)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(annot) + traverser.traverse(arg) + } + } object Annotated extends AnnotatedExtractor case class SingletonTypeTree(ref: Tree) - extends TypTree with SingletonTypeTreeApi + extends TypTree with SingletonTypeTreeApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.SingletonTypeTree(this, transformer.transform(ref)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(ref) + } + } object SingletonTypeTree extends SingletonTypeTreeExtractor case class SelectFromTypeTree(qualifier: Tree, name: TypeName) extends RefTree with TypTree with SelectFromTypeTreeApi { assert(qualifier.isType, qualifier) + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.SelectFromTypeTree(this, transformer.transform(qualifier), name) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(qualifier) + traverser.traverseName(name) + } } object SelectFromTypeTree extends SelectFromTypeTreeExtractor case class CompoundTypeTree(templ: Template) - extends TypTree with CompoundTypeTreeApi + extends TypTree with CompoundTypeTreeApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.CompoundTypeTree(this, transformer.transformTemplate(templ)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(templ) + } + } object CompoundTypeTree extends CompoundTypeTreeExtractor case class AppliedTypeTree(tpt: Tree, args: List[Tree]) @@ -632,16 +973,36 @@ trait Trees extends api.Trees { assert(tpt.isType, tpt) override def symbol: Symbol = tpt.symbol - override def symbol_=(sym: Symbol) { tpt.symbol = sym } + override def symbol_=(sym: Symbol): Unit = { tpt.symbol = sym } + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.AppliedTypeTree(this, transformer.transform(tpt), transformer.transformTrees(args)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(tpt) + traverser.traverseTypeArgs(args) + } } object AppliedTypeTree extends AppliedTypeTreeExtractor case class TypeBoundsTree(lo: Tree, hi: Tree) - extends TypTree with TypeBoundsTreeApi + extends TypTree with TypeBoundsTreeApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.TypeBoundsTree(this, transformer.transform(lo), transformer.transform(hi)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(lo) + traverser.traverse(hi) + } + } object TypeBoundsTree extends TypeBoundsTreeExtractor case class ExistentialTypeTree(tpt: Tree, whereClauses: List[MemberDef]) - extends TypTree with ExistentialTypeTreeApi + extends TypTree with ExistentialTypeTreeApi { + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.ExistentialTypeTree(this, transformer.transform(tpt), transformer.transformMemberDefs(whereClauses)) + override def traverse(traverser: Traverser): Unit = { + traverser.traverse(tpt) + traverser.traverseTrees(whereClauses) + } + } object ExistentialTypeTree extends ExistentialTypeTreeExtractor case class TypeTree() extends TypTree with TypeTreeApi { @@ -657,6 +1018,7 @@ trait Trees extends api.Trees { def original: Tree = orig def setOriginal(tree: Tree): this.type = { + @tailrec def followOriginal(t: Tree): Tree = t match { case tt: TypeTree => followOriginal(tt.original) case t => t @@ -683,6 +1045,11 @@ trait Trees extends api.Trees { } this } + override def transform(transformer: Transformer): Tree = + transformer.treeCopy.TypeTree(this) + override def traverse(traverser: Traverser): Unit = + () + } object TypeTree extends TypeTreeExtractor @@ -739,8 +1106,8 @@ trait Trees extends api.Trees { new Function(vparams, body).copyAttrs(tree) def Assign(tree: Tree, lhs: Tree, rhs: Tree) = new Assign(lhs, rhs).copyAttrs(tree) - def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = - new AssignOrNamedArg(lhs, rhs).copyAttrs(tree) + def NamedArg(tree: Tree, lhs: Tree, rhs: Tree) = + new NamedArg(lhs, rhs).copyAttrs(tree) def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = new If(cond, thenp, elsep).copyAttrs(tree) def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) = @@ -892,10 +1259,10 @@ trait Trees extends api.Trees { if (lhs0 == lhs) && (rhs0 == rhs) => t case _ => treeCopy.Assign(tree, lhs, rhs) } - def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match { - case t @ AssignOrNamedArg(lhs0, rhs0) + def NamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match { + case t @ NamedArg(lhs0, rhs0) if (lhs0 == lhs) && (rhs0 == rhs) => t - case _ => treeCopy.AssignOrNamedArg(tree, lhs, rhs) + case _ => treeCopy.NamedArg(tree, lhs, rhs) } def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = tree match { case t @ If(cond0, thenp0, elsep0) @@ -1023,17 +1390,6 @@ trait Trees extends api.Trees { } } - // Belongs in TreeInfo but then I can't reach it from Printers. - def isReferenceToScalaMember(t: Tree, Id: Name) = t match { - case Ident(Id) => true - case Select(Ident(nme.scala_), Id) => true - case Select(Select(Ident(nme.ROOTPKG), nme.scala_), Id) => true - case _ => false - } - /** Is the tree Predef, scala.Predef, or _root_.scala.Predef? - */ - def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef) - // --- modifiers implementation --------------------------------------- /** @param privateWithin the qualifier for a private (a type name) @@ -1097,7 +1453,7 @@ trait Trees extends api.Trees { else Modifiers(flags, privateWithin, newAnns) setPositions positions } - override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions) + override def toString = s"Modifiers($flagString, ${annotations.mkString(",")}, $positions)" } object Modifiers extends ModifiersExtractor @@ -1123,7 +1479,7 @@ trait Trees extends api.Trees { super.setType(NoType) override def canHaveAttrs = false - override def setPos(pos: Position) = { requireLegal(pos, NoPosition, "pos"); this } + override def setPos(pos: Position): this.type = { requireLegal(pos, NoPosition, "pos"); this } override def pos_=(pos: Position) = setPos(pos) override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this } override def tpe_=(t: Type) = setType(t) @@ -1135,21 +1491,33 @@ trait Trees extends api.Trees { override def removeAttachment[T: ClassTag]: this.type = attachmentWarning() private def attachmentWarning(): this.type = {devWarning(s"Attempt to mutate attachments on $self ignored"); this} - private def requireLegal(value: Any, allowed: Any, what: String) = ( - if (value != allowed) { + private def requireLegal(value: Any, allowed: Any, what: String): Unit = + if (value != allowed && this != pendingSuperCall) { log(s"can't set $what for $self to value other than $allowed") if (settings.isDebug && settings.isDeveloper) - (new Throwable).printStackTrace + new Throwable(s"can't set $what for $self to value other than $allowed").printStackTrace } - ) + override def traverse(traverser: Traverser): Unit = () } - case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) } + case object EmptyTree extends TermTree with CannotHaveAttrs { + override def isEmpty = true + val asList = List(this) + override def transform(transformer: Transformer): Tree = this + } object noSelfType extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs @deprecated("use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType + class InternalTransformer extends Transformer { + override def transform(tree: Tree): Tree = tree.transform(this) + } + class InternalTraverser extends Traverser { + override def traverse(tree: Tree): Unit = tree.traverse(this) + override def apply[T <: Tree](tree: T): tree.type = super.apply(tree) + } + def newValDef(sym: Symbol, rhs: Tree)( mods: Modifiers = Modifiers(sym.flags), name: TermName = sym.name.toTermName, @@ -1167,6 +1535,14 @@ trait Trees extends api.Trees { ): DefDef = ( atPos(sym.pos)(DefDef(mods, name, tparams, vparamss, tpt, rhs)) setSymbol sym ) + def newDefDefAt(pos: Position)(sym: Symbol, rhs: Tree)( + mods: Modifiers = Modifiers(sym.flags), + name: TermName = sym.name.toTermName, + tparams: List[TypeDef] = sym.typeParams map TypeDef.apply, + vparamss: List[List[ValDef]] = mapParamss(sym)(ValDef.apply), + tpt: Tree = TypeTreeMemberType(sym) + ): DefDef = + atPos(pos)(DefDef(mods, name, tparams, vparamss, tpt, rhs)).setSymbol(sym) def newTypeDef(sym: Symbol, rhs: Tree)( mods: Modifiers = Modifiers(sym.flags), @@ -1242,16 +1618,12 @@ trait Trees extends api.Trees { /** Block factory that flattens directly nested blocks. */ - def Block(stats: Tree*): Block = { - if (stats.isEmpty) Block(Nil, Literal(Constant(()))) - else stats match { - case Seq(b @ Block(_, _)) => b - case Seq(stat) => Block(stats.toList, Literal(Constant(()))) - case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last) - } + def Block(stats: Tree*): Block = stats match { + case Seq(b @ Block(_, _)) => b + case init :+ last => Block(init.toList, last) + case _ => Block(stats.toList, Literal(Constant(()))) } - /** Delegate for a TypeTree symbol. This operation is unsafe because * it may trigger type checking when forcing the type symbol of the * underlying type. @@ -1261,283 +1633,52 @@ trait Trees extends api.Trees { // --- generic traversers and transformers + @deprecated("Use Tree#traverse instead", since = "2.12.3") override protected def itraverse(traverser: Traverser, tree: Tree): Unit = { - import traverser._ - - def traverseMemberDef(md: MemberDef, owner: Symbol): Unit = atOwner(owner) { - traverseModifiers(md.mods) - traverseName(md.name) - md match { - case ClassDef(_, _, tparams, impl) => traverseParams(tparams) ; traverse(impl) - case ModuleDef(_, _, impl) => traverse(impl) - case ValDef(_, _, tpt, rhs) => traverseTypeAscription(tpt) ; traverse(rhs) - case TypeDef(_, _, tparams, rhs) => traverseParams(tparams) ; traverse(rhs) - case DefDef(_, _, tparams, vparamss, tpt, rhs) => - traverseParams(tparams) - traverseParamss(vparamss) - traverseTypeAscription(tpt) - traverse(rhs) - } - } - def traverseComponents(): Unit = tree match { - case LabelDef(name, params, rhs) => - traverseName(name) - traverseParams(params) - traverse(rhs) - case Import(expr, selectors) => - traverse(expr) - selectors foreach traverseImportSelector - case Annotated(annot, arg) => - traverse(annot) - traverse(arg) - case Template(parents, self, body) => - traverseParents(parents) - traverseSelfType(self) - traverseStats(body, tree.symbol) - case Block(stats, expr) => - traverseTrees(stats) - traverse(expr) - case CaseDef(pat, guard, body) => - traversePattern(pat) - traverseGuard(guard) - traverse(body) - case Alternative(trees) => - traverseTrees(trees) - case Star(elem) => - traverse(elem) - case Bind(name, body) => - traverseName(name) - traverse(body) - case UnApply(fun, args) => - traverse(fun) - traverseTrees(args) - case ArrayValue(elemtpt, trees) => - traverse(elemtpt) - traverseTrees(trees) - case Assign(lhs, rhs) => - traverse(lhs) - traverse(rhs) - case AssignOrNamedArg(lhs, rhs) => - traverse(lhs) - traverse(rhs) - case If(cond, thenp, elsep) => - traverse(cond) - traverse(thenp) - traverse(elsep) - case Match(selector, cases) => - traverse(selector) - traverseCases(cases) - case Return(expr) => - traverse(expr) - case Try(block, catches, finalizer) => - traverse(block) - traverseCases(catches) - traverse(finalizer) - case Throw(expr) => - traverse(expr) - case New(tpt) => - traverse(tpt) - case Typed(expr, tpt) => - traverse(expr) - traverseTypeAscription(tpt) - case TypeApply(fun, args) => - traverse(fun) - traverseTypeArgs(args) - case Apply(fun, args) => - traverse(fun) - traverseTrees(args) - case ApplyDynamic(qual, args) => - traverse(qual) - traverseTrees(args) - case Super(qual, mix) => - traverse(qual) - traverseName(mix) - case This(qual) => - traverseName(qual) - case Select(qualifier, selector) => - traverse(qualifier) - traverseName(selector) - case Ident(name) => - traverseName(name) - case ReferenceToBoxed(idt) => - traverse(idt) - case Literal(const) => - traverseConstant(const) - case TypeTree() => - ; - case SingletonTypeTree(ref) => - traverse(ref) - case SelectFromTypeTree(qualifier, selector) => - traverse(qualifier) - traverseName(selector) - case CompoundTypeTree(templ) => - traverse(templ) - case AppliedTypeTree(tpt, args) => - traverse(tpt) - traverseTypeArgs(args) - case TypeBoundsTree(lo, hi) => - traverse(lo) - traverse(hi) - case ExistentialTypeTree(tpt, whereClauses) => - traverse(tpt) - traverseTrees(whereClauses) - case _ => - xtraverse(traverser, tree) - } - - if (tree.canHaveAttrs) { - tree match { - case PackageDef(pid, stats) => traverse(pid) ; traverseStats(stats, mclass(tree.symbol)) - case md: ModuleDef => traverseMemberDef(md, mclass(tree.symbol)) - case md: MemberDef => traverseMemberDef(md, tree.symbol) - case Function(vparams, body) => atOwner(tree.symbol) { traverseParams(vparams) ; traverse(body) } - case _ => traverseComponents() - } - } + tree.traverse(traverser) } //OPT ordered according to frequency to speed it up. + @deprecated("Use Tree#transform instead", since = "2.12.3") override protected def itransform(transformer: Transformer, tree: Tree): Tree = { - import transformer._ - val treeCopy = transformer.treeCopy - - // begin itransform - tree match { - case Ident(name) => - treeCopy.Ident(tree, name) - case Select(qualifier, selector) => - treeCopy.Select(tree, transform(qualifier), selector) - case Apply(fun, args) => - treeCopy.Apply(tree, transform(fun), transformTrees(args)) - case TypeTree() => - treeCopy.TypeTree(tree) - case Literal(value) => - treeCopy.Literal(tree, value) - case This(qual) => - treeCopy.This(tree, qual) - case ValDef(mods, name, tpt, rhs) => - atOwner(tree.symbol) { - treeCopy.ValDef(tree, transformModifiers(mods), - name, transform(tpt), transform(rhs)) - } - case DefDef(mods, name, tparams, vparamss, tpt, rhs) => - atOwner(tree.symbol) { - treeCopy.DefDef(tree, transformModifiers(mods), name, - transformTypeDefs(tparams), transformValDefss(vparamss), - transform(tpt), transform(rhs)) - } - case Block(stats, expr) => - treeCopy.Block(tree, transformStats(stats, currentOwner), transform(expr)) - case If(cond, thenp, elsep) => - treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep)) - case CaseDef(pat, guard, body) => - treeCopy.CaseDef(tree, transform(pat), transform(guard), transform(body)) - case TypeApply(fun, args) => - treeCopy.TypeApply(tree, transform(fun), transformTrees(args)) - case AppliedTypeTree(tpt, args) => - treeCopy.AppliedTypeTree(tree, transform(tpt), transformTrees(args)) - case Bind(name, body) => - treeCopy.Bind(tree, name, transform(body)) - case Function(vparams, body) => - atOwner(tree.symbol) { - treeCopy.Function(tree, transformValDefs(vparams), transform(body)) - } - case Match(selector, cases) => - treeCopy.Match(tree, transform(selector), transformCaseDefs(cases)) - case New(tpt) => - treeCopy.New(tree, transform(tpt)) - case Assign(lhs, rhs) => - treeCopy.Assign(tree, transform(lhs), transform(rhs)) - case AssignOrNamedArg(lhs, rhs) => - treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs)) - case Try(block, catches, finalizer) => - treeCopy.Try(tree, transform(block), transformCaseDefs(catches), transform(finalizer)) - case EmptyTree => - tree - case Throw(expr) => - treeCopy.Throw(tree, transform(expr)) - case Super(qual, mix) => - treeCopy.Super(tree, transform(qual), mix) - case TypeBoundsTree(lo, hi) => - treeCopy.TypeBoundsTree(tree, transform(lo), transform(hi)) - case Typed(expr, tpt) => - treeCopy.Typed(tree, transform(expr), transform(tpt)) - case Import(expr, selectors) => - treeCopy.Import(tree, transform(expr), selectors) - case Template(parents, self, body) => - treeCopy.Template(tree, transformTrees(parents), transformValDef(self), transformStats(body, tree.symbol)) - case ClassDef(mods, name, tparams, impl) => - atOwner(tree.symbol) { - treeCopy.ClassDef(tree, transformModifiers(mods), name, - transformTypeDefs(tparams), transformTemplate(impl)) - } - case ModuleDef(mods, name, impl) => - atOwner(mclass(tree.symbol)) { - treeCopy.ModuleDef(tree, transformModifiers(mods), - name, transformTemplate(impl)) - } - case TypeDef(mods, name, tparams, rhs) => - atOwner(tree.symbol) { - treeCopy.TypeDef(tree, transformModifiers(mods), name, - transformTypeDefs(tparams), transform(rhs)) - } - case LabelDef(name, params, rhs) => - treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy' - case PackageDef(pid, stats) => - treeCopy.PackageDef( - tree, transform(pid).asInstanceOf[RefTree], - atOwner(mclass(tree.symbol)) { - transformStats(stats, currentOwner) - } - ) - case Annotated(annot, arg) => - treeCopy.Annotated(tree, transform(annot), transform(arg)) - case SingletonTypeTree(ref) => - treeCopy.SingletonTypeTree(tree, transform(ref)) - case SelectFromTypeTree(qualifier, selector) => - treeCopy.SelectFromTypeTree(tree, transform(qualifier), selector) - case CompoundTypeTree(templ) => - treeCopy.CompoundTypeTree(tree, transformTemplate(templ)) - case ExistentialTypeTree(tpt, whereClauses) => - treeCopy.ExistentialTypeTree(tree, transform(tpt), transformMemberDefs(whereClauses)) - case Return(expr) => - treeCopy.Return(tree, transform(expr)) - case Alternative(trees) => - treeCopy.Alternative(tree, transformTrees(trees)) - case Star(elem) => - treeCopy.Star(tree, transform(elem)) - case UnApply(fun, args) => - treeCopy.UnApply(tree, transform(fun), transformTrees(args)) // bq: see test/.../unapplyContexts2.scala - case ArrayValue(elemtpt, trees) => - treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees)) - case ApplyDynamic(qual, args) => - treeCopy.ApplyDynamic(tree, transform(qual), transformTrees(args)) - case ReferenceToBoxed(idt) => - treeCopy.ReferenceToBoxed(tree, transform(idt) match { case idt1: Ident => idt1 }) - case _ => - xtransform(transformer, tree) - } + tree.transform(transformer) } private def mclass(sym: Symbol) = sym map (_.asModule.moduleClass) // --- specific traversers and transformers - class ForeachPartialTreeTraverser(pf: PartialFunction[Tree, Tree]) extends Traverser { - override def traverse(tree: Tree) { + class ForeachPartialTreeTraverser(pf: PartialFunction[Tree, Tree]) extends InternalTraverser { + override def traverse(tree: Tree): Unit = { val t = if (pf isDefinedAt tree) pf(tree) else tree super.traverse(t) } } - class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser { - final def change(sym: Symbol) = { + class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends InternalTraverser { + protected val changedSymbols = mutable.Set.empty[Symbol] + protected val treeTypes = mutable.Set.empty[Type] + + def change(sym: Symbol): Unit = { if (sym != NoSymbol && sym.owner == oldowner) { sym.owner = newowner - if (sym.isModule) sym.moduleClass.owner = newowner + changedSymbols += sym + if (sym.isModule) { + sym.moduleClass.owner = newowner + changedSymbols += sym.moduleClass + } } } - override def traverse(tree: Tree) { + + override def apply[T <: Tree](tree: T): tree.type = { + traverse(tree) + if (changedSymbols.nonEmpty) + new InvalidateTypeCaches(changedSymbols).invalidate(treeTypes) + tree + } + + override def traverse(tree: Tree): Unit = { + if (tree.tpe != null) treeTypes += tree.tpe tree match { case _: Return => if (tree.symbol == oldowner) { @@ -1553,19 +1694,44 @@ trait Trees extends api.Trees { change(tree.symbol) case _ => } - super.traverse(tree) + tree.traverse(this) + } + } + + class LocalOwnersTraverser extends InternalTraverser { + val result: mutable.Set[Symbol] = mutable.Set.empty[Symbol] + + override def traverse(tree: Tree): Unit = { + tree match { + case _: DefTree | _: Function if(tree.hasExistingSymbol) => + result += tree.symbol + case _ => + } + tree.traverse(this) + } + } + + def changeNonLocalOwners(tree: Tree, newowner: Symbol): Unit = { + val localOwnersTraverser = new LocalOwnersTraverser + localOwnersTraverser(tree) + val localOwners = localOwnersTraverser.result + localOwners.foreach { sym => + if (!localOwners.contains(sym.owner)) { + sym.owner = newowner + if (sym.isModule) sym.moduleClass.owner = newowner + } } } - private class ShallowDuplicator(orig: Tree) extends Transformer { + private class ShallowDuplicator(orig: Tree) extends InternalTransformer { override val treeCopy = newStrictTreeCopier override def transform(tree: Tree) = - if (tree eq orig) super.transform(tree) + if (tree eq orig) tree.transform(this) else tree } /** A transformer that replaces tree `from` with tree `to` in a given tree */ - class TreeReplacer(from: Tree, to: Tree, positionAware: Boolean) extends Transformer { + class TreeReplacer(from: Tree, to: Tree, positionAware: Boolean) extends InternalTransformer { override def transform(t: Tree): Tree = { if (t == from) to else if (!positionAware || (t.pos includes from.pos) || t.pos.isTransparent) super.transform(t) @@ -1575,16 +1741,18 @@ trait Trees extends api.Trees { // Create a readable string describing a substitution. private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = { - "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ") + val toAndFro = from.lazyZip(to).map((f, t) => s"$f -> $t").mkString("(", ", ", ")") + s"subst[$fromStr, $toStr]$toAndFro" } // NOTE: calls shallowDuplicate on trees in `to` to avoid problems when symbols in `from` // occur multiple times in the `tree` passed to `transform`, // otherwise, the resulting Tree would be a graph, not a tree... this breaks all sorts of stuff, // notably concerning the mutable aspects of Trees (such as setting their .tpe) - class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer { + class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends InternalTransformer { override def transform(tree: Tree): Tree = tree match { case Ident(_) => + @tailrec def subst(from: List[Symbol], to: List[Tree]): Tree = if (from.isEmpty) tree else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`? @@ -1598,26 +1766,26 @@ trait Trees extends api.Trees { /** Substitute clazz.this with `to`. `to` must be an attributed tree. */ - class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer { + class ThisSubstituter(clazz: Symbol, to: => Tree) extends InternalTransformer { val newtpe = to.tpe override def transform(tree: Tree) = { tree modifyType (_.substThis(clazz, newtpe)) tree match { case This(_) if tree.symbol == clazz => to - case _ => super.transform(tree) + case _ => tree.transform(this) } } } - class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser { - override def traverse(tree: Tree) { + class TypeMapTreeSubstituter(val typeMap: TypeMap) extends InternalTraverser { + override def traverse(tree: Tree): Unit = { tree modifyType typeMap if (tree.isDef) tree.symbol modifyInfo typeMap - super.traverse(tree) + tree.traverse(this) } - override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate) + override def apply[T <: Tree](tree: T): tree.type = super.apply(tree.duplicate) } class TreeTypeSubstituter(val from: List[Symbol], val to: List[Type]) extends TypeMapTreeSubstituter(new SubstTypeMap(from, to)) { @@ -1627,7 +1795,7 @@ trait Trees extends api.Trees { lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List()) - class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(new SubstSymMap(from, to)) { + class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(SubstSymMap(from, to)) { override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to) } @@ -1640,16 +1808,21 @@ trait Trees extends api.Trees { * without copying, and trees that define symbols with an `info` that refer * a symbol in `from` will have a new type assigned. */ - class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer { - val symSubst = new SubstSymMap(from, to) - private var mutatedSymbols: List[Symbol] = Nil + class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends InternalTransformer { + val symSubst = SubstSymMap(from, to) + + protected val changedSymbols = mutable.Set.empty[Symbol] + protected val treeTypes = mutable.Set.empty[Type] + override def transform(tree: Tree): Tree = { - def subst(from: List[Symbol], to: List[Symbol]) { + @tailrec + def subst(from: List[Symbol], to: List[Symbol]): Unit = { if (!from.isEmpty) if (tree.symbol == from.head) tree setSymbol to.head else subst(from.tail, to.tail) } tree modifyType symSubst + if (tree.tpe != null) treeTypes += tree.tpe if (tree.hasSymbolField) { subst(from, to) @@ -1662,7 +1835,7 @@ trait Trees extends api.Trees { |TreeSymSubstituter: updated info of symbol ${sym} | Old: ${showRaw(sym.info, printTypes = true, printIds = true)} | New: ${showRaw(newInfo, printTypes = true, printIds = true)}""") - mutatedSymbols ::= sym + changedSymbols += sym sym updateInfo newInfo } } @@ -1680,58 +1853,58 @@ trait Trees extends api.Trees { case Select(qual, name0) if tree.symbol != NoSymbol => treeCopy.Select(tree, transform(qual), tree.symbol.name) case _ => - super.transform(tree) + tree.transform(this) } } else super.transform(tree) } def apply[T <: Tree](tree: T): T = { val tree1 = transform(tree) - invalidateTreeTpeCaches(tree1, mutatedSymbols) + if (changedSymbols.nonEmpty) + new InvalidateTypeCaches(changedSymbols).invalidate(treeTypes) tree1.asInstanceOf[T] } override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to) } - class ForeachTreeTraverser(f: Tree => Unit) extends Traverser { - override def traverse(t: Tree) { + class ForeachTreeTraverser(f: Tree => Unit) extends InternalTraverser { + override def traverse(t: Tree): Unit = { f(t) - super.traverse(t) + t.traverse(this) } } - class FilterTreeTraverser(p: Tree => Boolean) extends Traverser { + class FilterTreeTraverser(p: Tree => Boolean) extends InternalTraverser { val hits = mutable.ListBuffer[Tree]() - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (p(t)) hits += t - super.traverse(t) + t.traverse(this) } } - class CollectTreeTraverser[T](pf: PartialFunction[Tree, T]) extends Traverser { + class CollectTreeTraverser[T](pf: PartialFunction[Tree, T]) extends InternalTraverser { val results = mutable.ListBuffer[T]() - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (pf.isDefinedAt(t)) results += pf(t) - super.traverse(t) + t.traverse(this) } } - class FindTreeTraverser(p: Tree => Boolean) extends Traverser { + class FindTreeTraverser(p: Tree => Boolean) extends InternalTraverser { var result: Option[Tree] = None - override def traverse(t: Tree) { + override def traverse(t: Tree): Unit = { if (result.isEmpty) { - if (p(t)) result = Some(t) - super.traverse(t) + if (p(t)) result = Some(t) else t.traverse(this) } } } private lazy val duplicator = new Duplicator(focusPositions = true) - private class Duplicator(focusPositions: Boolean) extends Transformer { + private class Duplicator(focusPositions: Boolean) extends InternalTransformer { override val treeCopy = newStrictTreeCopier override def transform(t: Tree) = { - val t1 = super.transform(t) + val t1 = t.transform(this) if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus t1 } @@ -1744,6 +1917,16 @@ trait Trees extends api.Trees { t1 } } + + final def focusInPlace(t: Tree): t.type = + if (useOffsetPositions) t else { focuser traverse t; t } + private object focuser extends InternalTraverser { + override def traverse(t: Tree) = { + t setPos t.pos.focus + t traverse this + } + } + trait TreeStackTraverser extends Traverser { var path: List[Tree] = Nil abstract override def traverse(t: Tree) = { @@ -1753,25 +1936,6 @@ trait Trees extends api.Trees { } } - /** Tracks the classes currently under construction during a transform */ - trait UnderConstructionTransformer extends Transformer { - import collection.mutable - - protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz - - /** The stack of class symbols in which a call to this() or to the super - * constructor, or early definition is active */ - private val selfOrSuperCalls = mutable.Stack[Symbol]() - - abstract override def transform(tree: Tree) = { - if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree)) { - selfOrSuperCalls push currentOwner.owner - try super.transform(tree) - finally selfOrSuperCalls.pop() - } else super.transform(tree) - } - } - def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree // this is necessary to avoid crashes like https://github.com/scalamacros/paradise/issues/1 @@ -1805,7 +1969,7 @@ trait Trees extends api.Trees { if (rhs eq null) rhs0 else rhs ) case t => - sys.error("Not a DefDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a DefDef: " + t + "/" + t.getClass) } def copyValDef(tree: Tree)( mods: Modifiers = null, @@ -1821,7 +1985,7 @@ trait Trees extends api.Trees { if (rhs eq null) rhs0 else rhs ) case t => - sys.error("Not a ValDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ValDef: " + t + "/" + t.getClass) } def copyTypeDef(tree: Tree)( mods: Modifiers = null, @@ -1837,7 +2001,7 @@ trait Trees extends api.Trees { if (rhs eq null) rhs0 else rhs ) case t => - sys.error("Not a TypeDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a TypeDef: " + t + "/" + t.getClass) } def copyClassDef(tree: Tree)( mods: Modifiers = null, @@ -1853,7 +2017,7 @@ trait Trees extends api.Trees { if (impl eq null) impl0 else impl ) case t => - sys.error("Not a ClassDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ClassDef: " + t + "/" + t.getClass) } def copyModuleDef(tree: Tree)( @@ -1868,56 +2032,56 @@ trait Trees extends api.Trees { if (impl eq null) impl0 else impl ) case t => - sys.error("Not a ModuleDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ModuleDef: " + t + "/" + t.getClass) } def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match { case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) => treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0)) case t => - sys.error("Not a DefDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a DefDef: " + t + "/" + t.getClass) } def deriveValDef(vdef: Tree)(applyToRhs: Tree => Tree): ValDef = vdef match { case ValDef(mods0, name0, tpt0, rhs0) => treeCopy.ValDef(vdef, mods0, name0, tpt0, applyToRhs(rhs0)) case t => - sys.error("Not a ValDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ValDef: " + t + "/" + t.getClass) } def deriveTemplate(templ: Tree)(applyToBody: List[Tree] => List[Tree]): Template = templ match { case Template(parents0, self0, body0) => treeCopy.Template(templ, parents0, self0, applyToBody(body0)) case t => - sys.error("Not a Template: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a Template: " + t + "/" + t.getClass) } def deriveClassDef(cdef: Tree)(applyToImpl: Template => Template): ClassDef = cdef match { case ClassDef(mods0, name0, tparams0, impl0) => treeCopy.ClassDef(cdef, mods0, name0, tparams0, applyToImpl(impl0)) case t => - sys.error("Not a ClassDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ClassDef: " + t + "/" + t.getClass) } def deriveModuleDef(mdef: Tree)(applyToImpl: Template => Template): ModuleDef = mdef match { case ModuleDef(mods0, name0, impl0) => treeCopy.ModuleDef(mdef, mods0, name0, applyToImpl(impl0)) case t => - sys.error("Not a ModuleDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a ModuleDef: " + t + "/" + t.getClass) } def deriveCaseDef(cdef: Tree)(applyToBody: Tree => Tree): CaseDef = cdef match { case CaseDef(pat0, guard0, body0) => treeCopy.CaseDef(cdef, pat0, guard0, applyToBody(body0)) case t => - sys.error("Not a CaseDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a CaseDef: " + t + "/" + t.getClass) } def deriveLabelDef(ldef: Tree)(applyToRhs: Tree => Tree): LabelDef = ldef match { case LabelDef(name0, params0, rhs0) => treeCopy.LabelDef(ldef, name0, params0, applyToRhs(rhs0)) case t => - sys.error("Not a LabelDef: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a LabelDef: " + t + "/" + t.getClass) } def deriveFunction(func: Tree)(applyToRhs: Tree => Tree): Function = func match { case Function(params0, rhs0) => treeCopy.Function(func, params0, applyToRhs(rhs0)) case t => - sys.error("Not a Function: " + t + "/" + t.getClass) + throw new IllegalStateException("Not a Function: " + t + "/" + t.getClass) } private final class OnlyChildAccumulator extends (Tree => Boolean) { @@ -1939,63 +2103,63 @@ trait Trees extends api.Trees { } } } - private val onlyChildAccumulator = new ReusableInstance[OnlyChildAccumulator](() => new OnlyChildAccumulator, isCompilerUniverse) + private val onlyChildAccumulator = ReusableInstance[OnlyChildAccumulator](new OnlyChildAccumulator, enabled = isCompilerUniverse) // -------------- Classtags -------------------------------------------------------- - implicit val AlternativeTag = ClassTag[Alternative](classOf[Alternative]) - implicit val AnnotatedTag = ClassTag[Annotated](classOf[Annotated]) - implicit val AppliedTypeTreeTag = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree]) - implicit val ApplyTag = ClassTag[Apply](classOf[Apply]) - implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg]) - implicit val AssignTag = ClassTag[Assign](classOf[Assign]) - implicit val BindTag = ClassTag[Bind](classOf[Bind]) - implicit val BlockTag = ClassTag[Block](classOf[Block]) - implicit val CaseDefTag = ClassTag[CaseDef](classOf[CaseDef]) - implicit val ClassDefTag = ClassTag[ClassDef](classOf[ClassDef]) - implicit val CompoundTypeTreeTag = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree]) - implicit val DefDefTag = ClassTag[DefDef](classOf[DefDef]) - implicit val DefTreeTag = ClassTag[DefTree](classOf[DefTree]) + implicit val AlternativeTag: ClassTag[Alternative] = ClassTag[Alternative](classOf[Alternative]) + implicit val AnnotatedTag: ClassTag[Annotated] = ClassTag[Annotated](classOf[Annotated]) + implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree] = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree]) + implicit val ApplyTag: ClassTag[Apply] = ClassTag[Apply](classOf[Apply]) + implicit val NamedArgTag: ClassTag[NamedArg] = ClassTag[NamedArg](classOf[NamedArg]) + implicit val AssignTag: ClassTag[Assign] = ClassTag[Assign](classOf[Assign]) + implicit val BindTag: ClassTag[Bind] = ClassTag[Bind](classOf[Bind]) + implicit val BlockTag: ClassTag[Block] = ClassTag[Block](classOf[Block]) + implicit val CaseDefTag: ClassTag[CaseDef] = ClassTag[CaseDef](classOf[CaseDef]) + implicit val ClassDefTag: ClassTag[ClassDef] = ClassTag[ClassDef](classOf[ClassDef]) + implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree] = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree]) + implicit val DefDefTag: ClassTag[DefDef] = ClassTag[DefDef](classOf[DefDef]) + implicit val DefTreeTag: ClassTag[DefTree] = ClassTag[DefTree](classOf[DefTree]) implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree] = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree]) - implicit val FunctionTag = ClassTag[Function](classOf[Function]) - implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply]) - implicit val IdentTag = ClassTag[Ident](classOf[Ident]) - implicit val IfTag = ClassTag[If](classOf[If]) - implicit val ImplDefTag = ClassTag[ImplDef](classOf[ImplDef]) - implicit val ImportSelectorTag = ClassTag[ImportSelector](classOf[ImportSelector]) - implicit val ImportTag = ClassTag[Import](classOf[Import]) - implicit val LabelDefTag = ClassTag[LabelDef](classOf[LabelDef]) - implicit val LiteralTag = ClassTag[Literal](classOf[Literal]) - implicit val MatchTag = ClassTag[Match](classOf[Match]) - implicit val MemberDefTag = ClassTag[MemberDef](classOf[MemberDef]) - implicit val ModuleDefTag = ClassTag[ModuleDef](classOf[ModuleDef]) - implicit val NameTreeTag = ClassTag[NameTree](classOf[NameTree]) - implicit val NewTag = ClassTag[New](classOf[New]) - implicit val PackageDefTag = ClassTag[PackageDef](classOf[PackageDef]) - implicit val ReferenceToBoxedTag = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed]) - implicit val RefTreeTag = ClassTag[RefTree](classOf[RefTree]) - implicit val ReturnTag = ClassTag[Return](classOf[Return]) - implicit val SelectFromTypeTreeTag = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree]) - implicit val SelectTag = ClassTag[Select](classOf[Select]) - implicit val SingletonTypeTreeTag = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree]) - implicit val StarTag = ClassTag[Star](classOf[Star]) - implicit val SuperTag = ClassTag[Super](classOf[Super]) - implicit val SymTreeTag = ClassTag[SymTree](classOf[SymTree]) - implicit val TemplateTag = ClassTag[Template](classOf[Template]) - implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree]) - implicit val ThisTag = ClassTag[This](classOf[This]) - implicit val ThrowTag = ClassTag[Throw](classOf[Throw]) - implicit val TreeTag = ClassTag[Tree](classOf[Tree]) - implicit val TryTag = ClassTag[Try](classOf[Try]) - implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree]) - implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply]) - implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree]) - implicit val TypeDefTag = ClassTag[TypeDef](classOf[TypeDef]) - implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree]) - implicit val TypedTag = ClassTag[Typed](classOf[Typed]) - implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply]) - implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef]) - implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef]) + implicit val FunctionTag: ClassTag[Function] = ClassTag[Function](classOf[Function]) + implicit val GenericApplyTag: ClassTag[GenericApply] = ClassTag[GenericApply](classOf[GenericApply]) + implicit val IdentTag: ClassTag[Ident] = ClassTag[Ident](classOf[Ident]) + implicit val IfTag: ClassTag[If] = ClassTag[If](classOf[If]) + implicit val ImplDefTag: ClassTag[ImplDef] = ClassTag[ImplDef](classOf[ImplDef]) + implicit val ImportSelectorTag: ClassTag[ImportSelector] = ClassTag[ImportSelector](classOf[ImportSelector]) + implicit val ImportTag: ClassTag[Import] = ClassTag[Import](classOf[Import]) + implicit val LabelDefTag: ClassTag[LabelDef] = ClassTag[LabelDef](classOf[LabelDef]) + implicit val LiteralTag: ClassTag[Literal] = ClassTag[Literal](classOf[Literal]) + implicit val MatchTag: ClassTag[Match] = ClassTag[Match](classOf[Match]) + implicit val MemberDefTag: ClassTag[MemberDef] = ClassTag[MemberDef](classOf[MemberDef]) + implicit val ModuleDefTag: ClassTag[ModuleDef] = ClassTag[ModuleDef](classOf[ModuleDef]) + implicit val NameTreeTag: ClassTag[NameTree] = ClassTag[NameTree](classOf[NameTree]) + implicit val NewTag: ClassTag[New] = ClassTag[New](classOf[New]) + implicit val PackageDefTag: ClassTag[PackageDef] = ClassTag[PackageDef](classOf[PackageDef]) + implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed] = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed]) + implicit val RefTreeTag: ClassTag[RefTree] = ClassTag[RefTree](classOf[RefTree]) + implicit val ReturnTag: ClassTag[Return] = ClassTag[Return](classOf[Return]) + implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree] = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree]) + implicit val SelectTag: ClassTag[Select] = ClassTag[Select](classOf[Select]) + implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree] = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree]) + implicit val StarTag: ClassTag[Star] = ClassTag[Star](classOf[Star]) + implicit val SuperTag: ClassTag[Super] = ClassTag[Super](classOf[Super]) + implicit val SymTreeTag: ClassTag[SymTree] = ClassTag[SymTree](classOf[SymTree]) + implicit val TemplateTag: ClassTag[Template] = ClassTag[Template](classOf[Template]) + implicit val TermTreeTag: ClassTag[TermTree] = ClassTag[TermTree](classOf[TermTree]) + implicit val ThisTag: ClassTag[This] = ClassTag[This](classOf[This]) + implicit val ThrowTag: ClassTag[Throw] = ClassTag[Throw](classOf[Throw]) + implicit val TreeTag: ClassTag[Tree] = ClassTag[Tree](classOf[Tree]) + implicit val TryTag: ClassTag[Try] = ClassTag[Try](classOf[Try]) + implicit val TypTreeTag: ClassTag[TypTree] = ClassTag[TypTree](classOf[TypTree]) + implicit val TypeApplyTag: ClassTag[TypeApply] = ClassTag[TypeApply](classOf[TypeApply]) + implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree] = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree]) + implicit val TypeDefTag: ClassTag[TypeDef] = ClassTag[TypeDef](classOf[TypeDef]) + implicit val TypeTreeTag: ClassTag[TypeTree] = ClassTag[TypeTree](classOf[TypeTree]) + implicit val TypedTag: ClassTag[Typed] = ClassTag[Typed](classOf[Typed]) + implicit val UnApplyTag: ClassTag[UnApply] = ClassTag[UnApply](classOf[UnApply]) + implicit val ValDefTag: ClassTag[ValDef] = ClassTag[ValDef](classOf[ValDef]) + implicit val ValOrDefDefTag: ClassTag[ValOrDefDef] = ClassTag[ValOrDefDef](classOf[ValOrDefDef]) } trait TreesStats { diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index da25135bb296..ed99069fda2e 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -63,29 +63,12 @@ trait TypeDebugging { /** Light color wrappers. */ - object typeDebug { - import scala.Console._ - - private val colorsOk = scala.util.Properties.coloredOutputEnabled - private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s - private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s - - def inLightRed(s: String) = inColor(s, RED) - def inLightGreen(s: String) = inColor(s, GREEN) - def inLightMagenta(s: String) = inColor(s, MAGENTA) - def inLightCyan(s: String): String = inColor(s, CYAN) - def inGreen(s: String): String = inBold(s, GREEN) - def inRed(s: String): String = inBold(s, RED) - def inBlue(s: String): String = inBold(s, BLUE) - def inCyan(s: String): String = inBold(s, CYAN) - def inMagenta(s: String) = inBold(s, MAGENTA) - def resetColor(s: String): String = if (colorsOk) s + RESET else s - + object typeDebug extends TypeDebugging.AnsiColor { private def to_s(x: Any): String = x match { // otherwise case classes are caught looking like products case _: Tree | _: Type => "" + x - case x: TraversableOnce[_] => x mkString ", " - case x: Product => x.productIterator mkString ("(", ", ", ")") + case x: IterableOnce[_] => x.iterator mkString ", " + case x: Product => x.productIterator.mkString("(", ", ", ")") case _ => "" + x } def ptBlock(label: String, pairs: (String, Any)*): String = { @@ -121,19 +104,23 @@ trait TypeDebugging { } def ptTypeParam(td: TypeDef): String = { val TypeDef(_, name, tparams, rhs) = td - name + ptTypeParams(tparams) + ptTree(rhs) + name.toString + ptTypeParams(tparams) + ptTree(rhs) } def ptTypeParams(tparams: List[TypeDef]): String = str brackets (tparams map ptTypeParam) object str { def parentheses(xs: List[_]): String = xs.mkString("(", ", ", ")") + def params(params: List[Symbol]): String = { + val paramsStrPre = if (params.nonEmpty && params.head.isImplicit) "(implicit " else "(" + params.map(_.defStringWithoutImplicit).mkString(paramsStrPre, ", ", ")") + } def brackets(xs: List[_]): String = if (xs.isEmpty) "" else xs.mkString("[", ", ", "]") def tparams(tparams: List[Type]): String = brackets(tparams map debug) def parents(ps: List[Type]): String = (ps map debug).mkString(" with ") def refine(defs: Scope): String = defs.toList.mkString("{", " ;\n ", "}") def bounds(lo: Type, hi: Type): String = { val lo_s = if (lo.isNothing) "" else s" >: $lo" - val hi_s = if (hi.isAny) "" else s" <: $hi" + val hi_s = if (typeIsAnyOrJavaObject(hi)) "" else s" <: $hi" lo_s + hi_s } } @@ -152,7 +139,37 @@ trait TypeDebugging { } def debugString(tp: Type) = debug(tp) } - def paramString(tp: Type) = typeDebug.str parentheses (tp.params map (_.defString)) - def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString)) + def paramString(tp: Type) = typeDebug.str params tp.params + def typeParamsString(tp: Type) = typeDebug.str.brackets(tp.typeParams.map(_.defString)) def debugString(tp: Type) = typeDebug debugString tp } + +object TypeDebugging { + object AnsiColor extends AnsiColor { + implicit class StringColorOps(private val s: String) extends AnyVal { + def red = inLightRed(s) + def green = inLightGreen(s) + def yellow = inLightYellow(s) + def blue = inLightBlue(s) + } + } + + trait AnsiColor extends scala.io.AnsiColor { + private[this] val colorsOk = scala.util.Properties.coloredOutputEnabled + private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s + private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s + + def inLightRed(s: String) = inColor(s, RED) + def inLightBlue(s: String) = inColor(s, BLUE) + def inLightGreen(s: String) = inColor(s, GREEN) + def inLightYellow(s: String): String = inColor(s, YELLOW) + def inLightMagenta(s: String) = inColor(s, MAGENTA) + def inLightCyan(s: String): String = inColor(s, CYAN) + def inGreen(s: String): String = inBold(s, GREEN) + def inRed(s: String): String = inBold(s, RED) + def inBlue(s: String): String = inBold(s, BLUE) + def inCyan(s: String): String = inBold(s, CYAN) + def inMagenta(s: String) = inBold(s, MAGENTA) + def resetColor(s: String): String = if (colorsOk) s + RESET else s + } +} diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1ef1fb61c773..aac8d2f7ee63 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,18 +15,18 @@ package reflect package internal import java.util.Objects - -import scala.collection.{immutable, mutable} +import scala.collection.mutable import scala.ref.WeakReference -import mutable.{ListBuffer, LinkedHashSet} +import mutable.{LinkedHashSet, ListBuffer} import Flags._ import scala.util.control.ControlThrowable -import scala.annotation.tailrec -import util.Statistics +import scala.annotation.{tailrec, unused} +import util.{ReusableInstance, Statistics} import util.ThreeValues._ import Variance._ import Depth._ import TypeConstants._ +import scala.util.chaining._ /* A standard type pattern match: case ErrorType => @@ -97,14 +97,14 @@ trait Types import definitions._ import statistics._ - private var explainSwitch = false - private final val emptySymbolSet = immutable.Set.empty[Symbol] + private[this] var explainSwitch = false + @unused private final val emptySymbolSet = Set.empty[Symbol] + @unused private final val breakCycles = settings.breakCycles.value /** In case anyone wants to turn on type parameter bounds being used * to seed type constraints. */ - private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints" - private final val sharperSkolems = sys.props contains "scalac.experimental.sharper-skolems" + private final val sharperSkolems = System.getProperty("scalac.experimental.sharper-skolems") != null /** Caching the most recent map has a 75-90% hit rate. */ private object substTypeMapCache { @@ -120,7 +120,7 @@ trait Types /** The current skolemization level, needed for the algorithms * in isSameType, isSubType that do constraint solving under a prefix. */ - private var _skolemizationLevel = 0 + private[this] var _skolemizationLevel = 0 def skolemizationLevel = _skolemizationLevel def skolemizationLevel_=(value: Int) = _skolemizationLevel = value @@ -129,7 +129,7 @@ trait Types * It makes use of the fact that these two operations depend only on the parents, * not on the refinement. */ - private val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]() + private[this] val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]() def intersectionWitness = _intersectionWitness /** A proxy for a type (identified by field `underlying`) that forwards most @@ -152,10 +152,7 @@ trait Types override def params = underlying.params override def paramTypes = underlying.paramTypes override def termSymbol = underlying.termSymbol - override def termSymbolDirect = underlying.termSymbolDirect override def typeParams = underlying.typeParams - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms = underlying.boundSyms override def typeSymbol = underlying.typeSymbol override def typeSymbolDirect = underlying.typeSymbolDirect override def widen = underlying.widen @@ -165,6 +162,7 @@ trait Types override def upperBound = underlying.upperBound override def parents = underlying.parents override def prefix = underlying.prefix + override def prefixDirect = underlying.prefixDirect override def decls = underlying.decls override def baseType(clazz: Symbol) = underlying.baseType(clazz) override def baseTypeSeq = underlying.baseTypeSeq @@ -177,19 +175,7 @@ trait Types * forwarded here. Some operations are rewrapped again. */ trait RewrappingTypeProxy extends SimpleTypeProxy { - protected def maybeRewrap(newtp: Type) = ( - if (newtp eq underlying) this - else { - // - BoundedWildcardTypes reach here during erroneous compilation: neg/t6258 - // - Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800 - // - Avoid reusing the existing Wrapped(RefinedType) when we've be asked to wrap an =:= RefinementTypeRef, the - // distinction is important in base type sequences. See TypesTest.testExistentialRefinement - // - Otherwise, if newtp =:= underlying, don't rewrap it. - val hasSpecialMeaningBeyond_=:= = newtp.isWildcard || newtp.isHigherKinded || newtp.isInstanceOf[RefinementTypeRef] - if (!hasSpecialMeaningBeyond_=:= && (newtp =:= underlying)) this - else rewrap(newtp) - } - ) + protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp) protected def rewrap(newtp: Type): Type // the following are all operations in class Type that are overridden in some subclass @@ -214,7 +200,7 @@ trait Types override def prefixString = underlying.prefixString override def isComplete = underlying.isComplete override def complete(sym: Symbol) = underlying.complete(sym) - override def load(sym: Symbol) { underlying.load(sym) } + override def load(sym: Symbol): Unit = { underlying.load(sym) } override def withAnnotations(annots: List[AnnotationInfo]) = maybeRewrap(underlying.withAnnotations(annots)) override def withoutAnnotations = maybeRewrap(underlying.withoutAnnotations) } @@ -241,7 +227,7 @@ trait Types } // erasure screws up all ThisTypes for modules into PackageTypeRefs // we need to unscrew them, or certain typechecks will fail mysteriously - // http://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581 + // https://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581 result = result.map(tpe => tpe match { case tpe: PackageTypeRef => ThisType(tpe.sym) case _ => tpe @@ -275,7 +261,7 @@ trait Types */ def isTrivial: Boolean = false - /** Is this type higher-kinded, i.e., is it a type constructor @M */ + /** Is this type higher-kinded, i.e., is it a type constructor \@M */ def isHigherKinded: Boolean = false def takesTypeArgs: Boolean = this.isHigherKinded @@ -314,7 +300,7 @@ trait Types * This is assessed to be the case if the class is final, * and all type parameters (if any) are invariant. */ - def isFinalType = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable + def isFinalType: Boolean = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable /** Is this type completed (i.e. not a lazy type)? */ def isComplete: Boolean = true @@ -323,12 +309,12 @@ trait Types def isShowAsInfixType: Boolean = false /** If this is a lazy type, assign a new type to `sym`. */ - def complete(sym: Symbol) {} + def complete(sym: Symbol): Unit = () /** If this is a lazy type corresponding to a subclass add it to its * parents children */ - def forceDirectSuperclasses: Unit = () + def forceDirectSuperclasses(): Unit = () /** The term symbol associated with the type * Note that the symbol of the normalized type is returned (@see normalize) @@ -343,10 +329,6 @@ trait Types */ def typeSymbol: Symbol = NoSymbol - /** The term symbol ''directly'' associated with the type. - */ - def termSymbolDirect: Symbol = termSymbol - /** The type symbol ''directly'' associated with the type. * In other words, no normalization is performed: if this is an alias type, * the symbol returned is that of the alias, not the underlying type. @@ -418,15 +400,21 @@ trait Types * The empty list for all other types */ def parents: List[Type] = List() - /** For a class with nonEmpty parents, the first parent. + /** For a class with !isEmpty parents, the first parent. * Otherwise some specific fixed top type. */ - def firstParent = if (!parents.isEmpty) parents.head else ObjectTpe + def firstParent: Type = if (!parents.isEmpty) parents.head else ObjectTpe /** For a typeref or single-type, the prefix of the normalized type (@see normalize). * NoType for all other types. */ def prefix: Type = NoType + /** The prefix ''directly'' associated with the type. + * In other words, no normalization is performed: if this is an alias type, + * the prefix returned is that of the alias, not the underlying type. + */ + def prefixDirect: Type = prefix + /** A chain of all typeref or singletype prefixes of this type, longest first. * (Only used from safeToString.) */ @@ -436,7 +424,7 @@ trait Types case _ => List() } - /** This type, without its type arguments @M */ + /** This type, without its type arguments \@M */ def typeConstructor: Type = this /** For a typeref, its arguments. The empty list for all other types */ @@ -480,14 +468,9 @@ trait Types * the empty list for all other types */ def typeParams: List[Symbol] = List() - /** For a (potentially wrapped) poly, method or existential type, its directly bound symbols, - * the empty set for all other types */ - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - def boundSyms: immutable.Set[Symbol] = emptySymbolSet - /** Replace formal type parameter symbols with actual type arguments. ErrorType on arity mismatch. * - * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M + * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- \@M */ def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type = if (sameLength(formals, actuals)) this.subst(formals, actuals) else ErrorType @@ -544,7 +527,8 @@ trait Types * !!! - and yet it is still inadequate, because aliases and singletons * might lurk in the upper bounds of an abstract type. See scala/bug#7051. */ - def dealiasWiden: Type = { + @tailrec + final def dealiasWiden: Type = { val widened = widen if (this ne widened) widened.dealiasWiden else { @@ -753,7 +737,7 @@ trait Types */ def substSym(from: List[Symbol], to: List[Symbol]): Type = if ((from eq to) || from.isEmpty) this - else new SubstSymMap(from, to) apply this + else SubstSymMap(from, to).apply(this) /** Substitute all occurrences of `ThisType(from)` in this type by `to`. * @@ -783,8 +767,8 @@ trait Types def withFilter(p: Type => Boolean) = new FilterMapForeach(p) class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){ - def foreach[U](f: Type => U): Unit = collect(Type.this) foreach f - def map[T](f: Type => T): List[T] = collect(Type.this) map f + def foreach[U](f: Type => U): Unit = this.collect(Type.this).foreach(f) + def map[T](f: Type => T): List[T] = this.collect(Type.this).map(f) } @inline final def orElse(alt: => Type): Type = if (this ne NoType) this else alt @@ -795,14 +779,14 @@ trait Types def find(p: Type => Boolean): Option[Type] = new FindTypeCollector(p).collect(this) /** Apply `f` to each part of this type */ - def foreach(f: Type => Unit) { new ForEachTypeTraverser(f).traverse(this) } + def foreach(f: Type => Unit): Unit = { new ForEachTypeTraverser(f).traverse(this) } /** Apply `pf` to each part of this type on which the function is defined */ def collect[T](pf: PartialFunction[Type, T]): List[T] = new CollectTypeCollector(pf).collect(this) /** Apply `f` to each part of this type; children get mapped before their parents */ def map(f: Type => Type): Type = new TypeMap { - def apply(x: Type) = f(mapOver(x)) + def apply(x: Type) = f(x.mapOver(this)) } apply this /** Is there part of this type which satisfies predicate `p`? */ @@ -835,7 +819,7 @@ trait Types case _ => false } case TypeRef(_, sym, args) => - val that1 = existentialAbstraction(args map (_.typeSymbol), that) + val that1 = existentialAbstraction(args.map(_.typeSymbol), that) (that ne that1) && (this <:< that1) && { debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1") true @@ -897,7 +881,7 @@ trait Types * there is a Ti so that T <:< Ti <:< T'. * * This is also known as the upward closed set of the partially ordered set of - * class types under Symbol#isLess (a refinement of Symbol#isSubclass). + * class types under Symbol#isLess (a refinement of Symbol#isSubClass). * * See "Base Types and Member Definitions" in spec/03-types.md. */ @@ -990,14 +974,14 @@ trait Types } /** If this is a symbol loader type, load and assign a new type to `sym`. */ - def load(sym: Symbol) {} + def load(sym: Symbol): Unit = {} - private def findDecl(name: Name, excludedFlags: Int): Symbol = { - var alts: List[Symbol] = List() + private def findDecl(name: Name, excludedFlags: Long): Symbol = { + var alts: List[Symbol] = Nil var sym: Symbol = NoSymbol var e: ScopeEntry = decls.lookupEntry(name) while (e ne null) { - if (!e.sym.hasFlag(excludedFlags.toLong)) { + if (!e.sym.hasFlag(excludedFlags)) { if (sym == NoSymbol) sym = e.sym else { if (alts.isEmpty) alts = sym :: Nil @@ -1007,7 +991,7 @@ trait Types e = decls.lookupNextEntry(e) } if (alts.isEmpty) sym - else (baseClasses.head.newOverloaded(this, alts)) + else baseClasses.head.newOverloaded(this, alts) } /** Find all members meeting the flag requirements. @@ -1025,7 +1009,7 @@ trait Types def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = { def findMembersInternal = new FindMembers(this, excludedFlags, requiredFlags).apply() if (this.isGround) findMembersInternal - else suspendingTypeVars(typeVarsInType(this))(findMembersInternal) + else suspendingTypeVars(typeVarsInTypeRev(this))(findMembersInternal) } /** @@ -1044,7 +1028,7 @@ trait Types } if (this.isGround) findMemberInternal - else suspendingTypeVars(typeVarsInType(this))(findMemberInternal) + else suspendingTypeVars(typeVarsInTypeRev(this))(findMemberInternal) } /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */ @@ -1072,9 +1056,13 @@ trait Types def filterAnnotations(p: AnnotationInfo => Boolean): Type = this def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this) def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this) + def withAnnotation(anno: AnnotationInfo): Type = withAnnotations(List(anno)) /** The kind of this type; used for debugging */ def kind: String = "unknown type of class "+getClass() + + def mapOver(map: TypeMap): Type = this + def foldOver(folder: TypeFolder): Unit = {} } // Subclasses ------------------------------------------------------------ @@ -1098,8 +1086,6 @@ trait Types override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq override def baseTypeSeqDepth: Depth = supertype.baseTypeSeqDepth override def baseClasses: List[Symbol] = supertype.baseClasses - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms: Set[Symbol] = emptySymbolSet } /** A base class for types that represent a single value @@ -1111,15 +1097,13 @@ trait Types // Spec: "The base types of a singleton type `$p$.type` are the base types of the type of $p$." // override def baseTypeSeq: BaseTypeSeq = underlying.baseTypeSeq override def isHigherKinded = false // singleton type classifies objects, thus must be kind * - override def safeToString: String = { - // Avoiding printing Predef.type and scala.package.type as "type", - // since in all other cases we omit those prefixes. - val pre = underlying.typeSymbol.skipPackageObject - if (pre.isOmittablePrefix) pre.fullName + ".type" - else prefixString + "type" - } - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms: Set[Symbol] = emptySymbolSet + // Avoid printing Predef.type and scala.package.type as "type", + // since in all other cases we omit those prefixes. Do not skipPackageObject. + override def safeToString: String = + termSymbol match { + case s if s.isOmittablePrefix => s"${if (s.isPackageObjectOrClass || s.isJavaDefined) s.fullNameString else s.nameString}.type" + case _ => s"${prefixString}type" + } } /** An object representing an erroneous type */ @@ -1144,10 +1128,14 @@ trait Types /** An object representing an unknown type, used during type inference. * If you see WildcardType outside of inference it is almost certainly a bug. */ - case object WildcardType extends Type { - override def isWildcard = true + case object WildcardType extends ProtoType { override def safeToString: String = "?" override def kind = "WildcardType" + + /** Equivalent to `List.fill(WildcardType)`, but more efficient as short lists are drawn from a cache. */ + def fillList(n: Int): List[WildcardType.type] = if (n < FillListCacheLimit) FillListCache(n) else List.fill(n)(WildcardType) + private[this] final val FillListCacheLimit = 32 + private[this] lazy val FillListCache: Array[List[WildcardType.type]] = Array.iterate(List[WildcardType.type](), FillListCacheLimit)(WildcardType :: _) } /** BoundedWildcardTypes, used only during type inference, are created in * two places that I can find: @@ -1159,16 +1147,224 @@ trait Types * type is created: a MethodType with parameters typed as * BoundedWildcardTypes. */ - case class BoundedWildcardType(override val bounds: TypeBounds) extends Type with BoundedWildcardTypeApi { + case class BoundedWildcardType(override val bounds: TypeBounds) extends ProtoType with BoundedWildcardTypeApi { override def upperBound: Type = bounds.hi override def lowerBound: Type = bounds.lo - override def isWildcard = true + override def isMatchedBy(tp: Type, depth: Depth)= isSubType(tp, bounds.hi, depth) + override def canMatch(tp: Type, depth: Depth): Boolean = isSubType(bounds.lo, tp, depth) + override def registerTypeEquality(tp: Type): Boolean = bounds.containsType(tp) + override def toBounds: TypeBounds = bounds + override def members = bounds.lo.members + + override def toVariantType: Type = bounds override def safeToString: String = "?" + bounds override def kind = "BoundedWildcardType" + override def mapOver(map: TypeMap): Type = { + val bounds1 = map(bounds) + if (bounds1 eq bounds) this + else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds]) + } + override def foldOver(folder: TypeFolder): Unit = folder(bounds) } object BoundedWildcardType extends BoundedWildcardTypeExtractor + abstract class ProtoType extends Type { + def toBounds: TypeBounds = TypeBounds.empty + + override def isWildcard = true + override def members = ErrorType.decls + + // tp <:< this prototype? + def isMatchedBy(tp: Type, depth: Depth): Boolean = true + + // could this prototype <:< tp? + def canMatch(tp: Type, depth: Depth): Boolean = true + + // when comparing for type equality + def registerTypeEquality(tp: Type): Boolean = true + + // Does this prototype denote that we're expecting a function? + def expectsFunctionType: Boolean = false + + def asFunctionType: Type = NoType + + // represent this type as a ground type for use in varianceInType + def toVariantType: Type = NoType + } + + /** Lazily compute expected types for arguments to overloaded methods. + * Primarily to improve parameter type inference for higher-order overloaded methods. + * + * Normally, overload resolution types the arguments to the alternatives without an expected type. + * However, typing function literals and eta-expansion are driven by the expected type: + * - function literals usually don't have parameter types, which are derived from the expected type; + * - eta-expansion right now only happens when a function/sam type is expected. + * + * Now that the collections are full of overloaded HO methods, we should try harder to type check them nicely. + * + * (This paragraph is conceptually true, but not a spec.) To avoid breaking existing code, + * we only provide an expected type (for each argument position) when: + * - there is at least one FunctionN type expected by one of the overloads: + * in this case, the expected type is a FunctionN[Ti, ?], where Ti are the argument types (they must all be =:=), + * and the expected result type is elided using a wildcard. + * This does not exclude any overloads that expect a SAM, because they conform to a function type through SAM conversion + * - OR: all overloads expect a SAM type of the same class, but with potentially varying result types (argument types must be =:=) + * - OR: all expected types collapse to the same type (by =:=, pushing down method type params to arguments types) + * + * We allow polymorphic cases, taking account any instantiation by the AntiPolyType prefix. + * Constructors of polymorphic classes are not supported (type param occurrences use fresh symbols, hard to relate to class's type params). + * + * In all other cases, the old behavior is maintained: Wildcard is expected. + */ + final case class OverloadedArgProto(argIdx: Either[Int, Name], pre: Type, alternatives: List[Symbol])(origUndets: List[Symbol]) extends ProtoType with SimpleTypeProxy { + override def safeToString: String = underlying.safeToString + override def kind = "OverloadedArgProto" + + override def underlying: Type = protoTp + + // If underlying is not wildcard, we may have constrained a first-try-typing too much, + // so, when `!isWildcard` implicit search will try again with no expected type at all. + // See e.g., adaptToArguments's code paths that depend on `isWildcard` + override def isWildcard = underlying.isWildcard + + // Always match if we couldn't collapse the expected types contributed for this argument by the alternatives. + // TODO: could we just match all function-ish types as an optimization? We previously used WildcardType + override def isMatchedBy(tp: Type, depth: Depth): Boolean = + isPastTyper || underlying == WildcardType || + isSubType(tp, underlying, depth) || + // NOTE: converting tp to a function type won't work, since `tp` need not be an actual sam type, + // just some subclass of the sam expected by one of our overloads + sameTypesFoldedSam.exists { underlyingSam => isSubType(tp, underlyingSam, depth) } // overload_proto_collapse.scala:55 + + // Empty signals failure. We don't consider the 0-ary HOF case, since we are only concerned with inferring param types for these functions anyway + def hofParamTypes = functionOrPfOrSamArgTypes(underlying) + + override def expectsFunctionType: Boolean = !hofParamTypes.isEmpty + + // TODO: include result type? + override def asFunctionType = + if (expectsFunctionType) functionType(hofParamTypes, WildcardType) + else NoType + + override def mapOver(map: TypeMap): Type = { + val pre1 = pre.mapOver(map) + val alts1 = map.mapOver(alternatives) + if ((pre ne pre1) || (alternatives ne alts1)) OverloadedArgProto(argIdx, pre1, alts1)(origUndets) + else this + } + + override def foldOver(folder: TypeFolder): Unit = { + pre.foldOver(folder) + folder.foldOver(alternatives) + } + + // TODO + // override def registerTypeEquality(tp: Type): Boolean = protoTp =:= tp + + + // TODO: use =:=, but `!(typeOf[String with AnyRef] =:= typeOf[String])` (https://github.com/scala/scala-dev/issues/530) + private def same(x: Type, y: Type) = (x <:< y) && (y <:< x) + + private object ParamAtIdx { + def unapply(params: List[Symbol]): Option[Type] = { + lazy val lastParamTp = params.last.tpe + + val argIdxMapped = argIdx match { + case Left(idx) => idx + case Right(name) => params.indexWhere(p => p.name == name && !p.isSynthetic) + } + + // if we're asking for the last argument, or past, and it happens to be a repeated param -- strip the vararg marker and return the type + if (!params.isEmpty && params.lengthCompare(argIdxMapped + 1) <= 0 && isRepeatedParamType(lastParamTp)) { + Some(lastParamTp.dealiasWiden.typeArgs.head) + } else if (params.isDefinedAt(argIdxMapped)) { + Some(dropByName(params(argIdxMapped).tpe)) + } else None + } + } + + // replace origUndets: in chained calls, drop undets coming from earlier parts of the chain -- see pos/t11511 + // replace tparams in top-level PolyType: we don't want bounded wildcards showing up for an f-bounded type param... + private def toWild(tp: Type): Type = tp match { + case PolyType(tparams, tp) => + val undets = tparams ++ origUndets + new SubstTypeMap(undets, WildcardType.fillList(undets.length)).apply(tp) + case tp => + new SubstTypeMap(origUndets, WildcardType.fillList(origUndets.length)).apply(tp) + } + + private lazy val sameTypesFolded = { + // Collect all expected types contributed by the various alternatives for this argument (TODO: repeated params?) + // Relative to `pre` at `alt.owner`, with `alt`'s type params approximated. + def typeOfAlt(alt: Symbol): Type = + // Use memberType so that a pre: AntiPolyType can instantiate its type params + pre.memberType(alt) match { + case PolyType(tparams, MethodType(ParamAtIdx(paramTp), res)) => PolyType(tparams, paramTp.asSeenFrom(pre, alt.owner)) + case MethodType(ParamAtIdx(paramTp), res) + if !(alt.isConstructor && alt.owner.info.isInstanceOf[PolyType]) => paramTp.asSeenFrom(pre, alt.owner) // TODO: can we simplify this (Are those params in origUndets by chance?) + // this is just too ugly, but the type params are out of whack and thus toWild won't catch them unless we rewrite as follows: + // if (alt.isConstructor && alt.owner.info.isInstanceOf[PolyType]) { + // PolyType(alt.owner.info.typeParams.map(_.tpe.asSeenFrom(pre, alt.owner).typeSymbol), paramTp.asSeenFrom(pre, alt.owner)) + // } else paramTp.asSeenFrom(pre, alt.owner) + case _ => NoType + } + // alternatives.map(fili).contains(NoType) implies sameTypesFolded.contains(NoType) + // so, if one alternative did not contribute an argument type, we'll not collapse this column + alternatives.foldLeft(Nil: List[Type]) { case (acc, alter) => + typeOfAlt(alter) match { + case WildcardType => acc + case tp => if (acc.exists(same(tp, _))) acc else tp :: acc + } + } + } + + private lazy val sameTypesFoldedSam = + sameTypesFolded.iterator.map(toWild).filter(tp => samOf(tp).exists).toList + + // Try to collapse all expected argument types (already distinct by =:=) into a single expected type, + // so that we can use it to as the expected type to drive parameter type inference for a function literal argument. + private lazy val protoTp = { + val ABORT = (NoType, false, false) + + // we also consider any function-ish type equal as long as the argument types are + def sameHOArgTypes(tp1: Type, tp2: Type) = tp1 == WildcardType || { + val hoArgTypes1 = functionOrPfOrSamArgTypes(tp1.resultType) + // println(s"sameHOArgTypes($tp1, $tp2) --> $hoArgTypes1 === $hoArgTypes2 : $res") + !hoArgTypes1.isEmpty && hoArgTypes1.corresponds(functionOrPfOrSamArgTypes(tp2.resultType))(same) + } + + // TODO: compute functionOrPfOrSamArgTypes during fold? + val (sameHoArgTypesFolded, partialFun, regularFun) = + sameTypesFolded.foldLeft((WildcardType: Type, false, false)) { + case (ABORT, _) => ABORT + case ((acc, partialFun, regularFun), tp) if sameHOArgTypes(acc, tp) => + val wild = toWild(tp) + (tp, partialFun || isPartialFunctionType(wild), regularFun || isFunctionType(wild)) + case _ => ABORT // different HO argument types encountered + } + + if ((sameHoArgTypesFolded eq WildcardType) || (sameHoArgTypesFolded eq NoType)) WildcardType + else functionOrPfOrSamArgTypes(toWild(sameHoArgTypesFolded)) match { + case Nil => + // Ok, it's not a function proto, but we did collapse to only one type -- why not use that as our expected type? + // we exclude constructors because a polymorphic class's type params are not represented as part of the constructor method's type, and thus toWild won't work + sameTypesFolded match { + case onlyType :: Nil => + // println(s"collapsed argument types at index $argIdx to ${toWild(onlyType)} for ${alternatives map (alt => (alt, pre memberType alt))} ") + toWild(onlyType) + case _ => WildcardType + } + case hofArgs => + if (partialFun) appliedType(PartialFunctionClass, hofArgs :+ WildcardType) + else if (regularFun) functionType(hofArgs, WildcardType) + // if we saw a variety of SAMs, can't collapse them -- what if they were accidental sams and we're not going to supply a function literal? + else if (sameTypesFolded.lengthCompare(1) == 0) toWild(sameTypesFolded.head) + else WildcardType + } + } + } + /** An object representing a non-existing type */ case object NoType extends Type { override def isTrivial: Boolean = true @@ -1223,7 +1419,7 @@ trait Types * Cannot be created directly; one should always use `singleType` for creation. */ abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType with SingleTypeApi { - private var trivial: ThreeValue = UNKNOWN + private[this] var trivial: ThreeValue = UNKNOWN override def isTrivial: Boolean = { if (trivial == UNKNOWN) trivial = fromBoolean(pre.isTrivial) toBoolean(trivial) @@ -1270,6 +1466,15 @@ trait Types else pre.prefixString + sym.nameString + "." ) override def kind = "SingleType" + override def mapOver(map: TypeMap): Type = { + if (sym.isPackageClass) this // short path + else { + val pre1 = map(pre) + if (pre1 eq pre) this + else singleType(pre1, sym) + } + } + override def foldOver(folder: TypeFolder): Unit = folder(pre) } final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym) @@ -1280,20 +1485,23 @@ trait Types } } - protected def defineUnderlyingOfSingleType(tpe: SingleType) = { + protected def defineUnderlyingOfSingleType(tpe: SingleType): Unit = { val period = tpe.underlyingPeriod if (period != currentPeriod) { tpe.underlyingPeriod = currentPeriod if (!isValid(period)) { // [Eugene to Paul] needs review - tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType + tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else { + val result = tpe.pre.memberType(tpe.sym).resultType + if (isScalaRepeatedParamType(result)) repeatedToSeq(result) else result + } assert(tpe.underlyingCache ne tpe, tpe) } } } abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType with SuperTypeApi { - private var trivial: ThreeValue = UNKNOWN + private[this] var trivial: ThreeValue = UNKNOWN override def isTrivial: Boolean = { if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial) toBoolean(trivial) @@ -1304,6 +1512,16 @@ trait Types override def prefixString = thistpe.prefixString.replaceAll("""\bthis\.$""", "super.") override def narrow: Type = thistpe.narrow override def kind = "SuperType" + override def mapOver(map: TypeMap): Type = { + val thistp1 = map(thistpe) + val supertp1 = map(supertpe) + if ((thistp1 eq thistpe) && (supertp1 eq supertpe)) this + else SuperType(thistp1, supertp1) + } + override def foldOver(folder: TypeFolder): Unit = { + folder(thistpe) + folder(supertpe) + } } final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp) @@ -1327,8 +1545,8 @@ trait Types case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi } - def emptyLowerBound = TypeBounds.isEmptyLower(lo) - def emptyUpperBound = TypeBounds.isEmptyUpper(hi) + private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard + private def emptyUpperBound = typeIsAnyOrJavaObject(hi) || hi.isWildcard def isEmptyBounds = emptyLowerBound && emptyUpperBound override def safeToString = scalaNotation(_.toString) @@ -1336,19 +1554,27 @@ trait Types /** Bounds notation used in Scala syntax. * For example +This <: scala.collection.generic.Sorted[K,This]. */ - private[internal] def scalaNotation(typeString: Type => String): String = { + private[internal] def scalaNotation(typeString: Type => String): String = (if (emptyLowerBound) "" else " >: " + typeString(lo)) + (if (emptyUpperBound) "" else " <: " + typeString(hi)) - } - /** Bounds notation used in http://adriaanm.github.com/files/higher.pdf. + /** Bounds notation used in https://adriaanm.github.com/files/higher.pdf. * For example *(scala.collection.generic.Sorted[K,This]). */ - private[internal] def starNotation(typeString: Type => String): String = { + private[internal] def starNotation(typeString: Type => String): String = if (emptyLowerBound && emptyUpperBound) "" - else if (emptyLowerBound) "(" + typeString(hi) + ")" - else "(%s, %s)" format (typeString(lo), typeString(hi)) - } + else if (emptyLowerBound) s"(${typeString(hi)})" + else s"(${typeString(lo)}, ${typeString(hi)})" override def kind = "TypeBoundsType" + override def mapOver(map: TypeMap): Type = { + val lo1 = map match { + case vtm: VariancedTypeMap => vtm.flipped(vtm(lo)) + case _ => map(lo) + } + val hi1 = map(hi) + if ((lo1 eq lo) && (hi1 eq hi)) this + else TypeBounds(lo1, hi1) + } + override def foldOver(folder: TypeFolder): Unit = { folder(lo); folder(hi) } } final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) @@ -1360,8 +1586,6 @@ trait Types def apply(lo: Type, hi: Type): TypeBounds = { unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds] } - def isEmptyUpper(hi: Type): Boolean = hi.isAny || hi.isWildcard - def isEmptyLower(lo: Type): Boolean = lo.isNothing || lo.isWildcard } object CompoundType { @@ -1380,7 +1604,7 @@ trait Types private[reflect] var baseTypeSeqPeriod = NoPeriod private[reflect] var baseClassesCache: List[Symbol] = _ private[reflect] var baseClassesPeriod = NoPeriod - private[Types] def invalidatedCompoundTypeCaches() { + private[Types] def invalidatedCompoundTypeCaches(): Unit = { baseTypeSeqCache = null baseTypeSeqPeriod = NoPeriod baseClassesCache = null @@ -1501,7 +1725,7 @@ trait Types val toOrigin = appliedType(tv.origin.typeSymbol.typeConstructor, tv.typeArgs.mapConserve(this)) tvarFor(toOrigin) = tv toOrigin - case _ => mapOver(tp) + case _ => tp.mapOver(this) } } // computes tvarFor @@ -1511,7 +1735,7 @@ trait Types val paramToVarMap = tvarFor.toMap // capture the map so we can undo the rewrite when the BTS is queried later def apply(tp: Type): Type = tp match { case tr: TypeRef => paramToVarMap.getOrElse(tr, mapOver(tp)) - case _ => mapOver(tp) + case _ => tp.mapOver(this) } } @@ -1540,12 +1764,12 @@ trait Types } } } - //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG + //Console.println(s"baseTypeSeq(${tpe.typeSymbol}) = ${tpe.baseTypeSeqCache.toList}") //DEBUG if (tpe.baseTypeSeqCache eq undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol) + throw new TypeError(s"illegal cyclic inheritance involving ${tpe.typeSymbol}") } - protected def defineBaseClassesOfCompoundType(tpe: CompoundType) { + protected def defineBaseClassesOfCompoundType(tpe: CompoundType): Unit = { val period = tpe.baseClassesPeriod if (period != currentPeriod) { tpe.baseClassesPeriod = currentPeriod @@ -1572,7 +1796,7 @@ trait Types case class RefinedType(override val parents: List[Type], override val decls: Scope) extends CompoundType with RefinedTypeApi { override def isHigherKinded = ( - parents.nonEmpty && + !parents.isEmpty && (parents.forall(_.isHigherKinded)) && !phase.erasedTypes ) @@ -1591,20 +1815,20 @@ trait Types normalized } - private var normalized: Type = _ + private[this] var normalized: Type = _ private def normalizeImpl = { // TODO see comments around def intersectionType and def merge // scala/bug#8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala val flattened: LinkedHashSet[Type] = LinkedHashSet.empty[Type] def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp def loop(tp: Type): Unit = dealiasRefinement(tp) match { - case RefinedType(parents, ds) if ds.isEmpty => parents.foreach(loop) - case tp => flattened.add(tp) + case RefinedType(parents, decls) if decls.isEmpty => parents.foreach(loop) + case tp1 => flattened.add(tp1) } parents foreach loop if (decls.isEmpty && flattened.size == 1) { flattened.head - } else if (!flattened.sameElements(parents)) { + } else if (!flattened.iterator.sameElements(parents)) { refinedType(flattened.toList, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition) } else if (isHigherKinded) { etaExpand @@ -1629,6 +1853,15 @@ trait Types } override def kind = "RefinedType" + override def mapOver(map: TypeMap): Type = { + val parents1 = parents mapConserve map + val decls1 = map.mapOver(decls) + copyRefinedType(this, parents1, decls1) + } + override def foldOver(folder: TypeFolder): Unit = { + parents.foreach(folder) + folder.foldOver(decls) + } } final class RefinedType0(parents: List[Type], decls: Scope, clazz: Symbol) extends RefinedType(parents, decls) { @@ -1641,7 +1874,7 @@ trait Types } /** Overridden in reflection compiler */ - def validateClassInfo(tp: ClassInfoType) {} + def validateClassInfo(tp: ClassInfoType): Unit = {} /** A class representing a class info */ @@ -1661,7 +1894,7 @@ trait Types private final val Initializing = 1 private final val Initialized = 2 - private type RefMap = Map[Symbol, immutable.Set[Symbol]] + private type RefMap = Map[Symbol, Set[Symbol]] /** All type parameters reachable from given type parameter * by a path which contains at least one expansive reference. @@ -1685,7 +1918,7 @@ trait Types * it is accessed only from expansiveRefs, which is called only from * Typer. */ - private var refs: Array[RefMap] = _ + private[this] var refs: Array[RefMap] = _ /** The initialization state of the class: UnInitialized --> Initializing --> Initialized * Syncnote: This var need not be protected with synchronized, because @@ -1706,7 +1939,7 @@ trait Types /** Augment existing refs map with reference
    from -> to
    * @param which <- {NonExpansive, Expansive} */ - private def addRef(which: Int, from: Symbol, to: Symbol) { + private def addRef(which: Int, from: Symbol, to: Symbol): Unit = { refs(which) = refs(which) + (from -> (getRefs(which, from) + to)) } @@ -1714,12 +1947,13 @@ trait Types * all elements
    sym
    of set `to`. * @param which <- {NonExpansive, Expansive} */ - private def addRefs(which: Int, from: Symbol, to: Set[Symbol]) { + private def addRefs(which: Int, from: Symbol, to: Set[Symbol]): Unit = { refs(which) = refs(which) + (from -> (getRefs(which, from) ++ to)) } /** The ClassInfoType which belongs to the class containing given type parameter */ + @tailrec private def classInfo(tparam: Symbol): ClassInfoType = tparam.owner.info.resultType match { case ci: ClassInfoType => ci @@ -1727,12 +1961,13 @@ trait Types // (this can happen only for erroneous programs). } + // TODO should we pull this out to reduce memory footprint of ClassInfoType? private object enterRefs extends TypeMap { - private var tparam: Symbol = _ + private[this] var tparam: Symbol = _ def apply(tp: Type): Type = { tp match { - case tr @ TypeRef(_, sym, args) if args.nonEmpty => + case tr @ TypeRef(_, sym, args) if !args.isEmpty => val tparams = tr.initializedTypeParams devWarningIf(!sameLength(tparams, args)) { s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args" @@ -1747,9 +1982,9 @@ trait Types } case _ => } - mapOver(tp) + tp.mapOver(this) } - def enter(tparam0: Symbol, parent: Type) { + def enter(tparam0: Symbol, parent: Type): Unit = { this.tparam = tparam0 this(parent) } @@ -1757,7 +1992,7 @@ trait Types /** Compute initial (one-step) references and set state to `Initializing`. */ - private def computeRefs() { + private def computeRefs(): Unit = { refs = Array(Map(), Map()) typeSymbol.typeParams foreach { tparam => parents foreach { p => @@ -1812,24 +2047,56 @@ trait Types class PackageClassInfoType(decls: Scope, clazz: Symbol) extends ClassInfoType(List(), decls, clazz) - /** A class representing a constant type. + /** A class representing a constant type. A constant type is either the inferred type of a constant + * value or an explicit or inferred literal type. Both may be constant folded at the type level, + * however literal types are not folded at the term level and do not elide effects. */ - abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi { - override def underlying: Type = value.tpe - assert(underlying.typeSymbol != UnitClass) + abstract class ConstantType extends SingletonType with ConstantTypeApi { + //assert(underlying.typeSymbol != UnitClass) + val value: Constant + override def isTrivial: Boolean = true - override def deconst: Type = underlying.deconst - override def safeToString: String = - underlying.toString + "(" + value.escapedStringValue + ")" override def kind = "ConstantType" } - final class UniqueConstantType(value: Constant) extends ConstantType(value) - object ConstantType extends ConstantTypeExtractor { + def apply(c: Constant): ConstantType = FoldableConstantType(c) + def unapply(tpe: ConstantType): Some[Constant] = Some(tpe.value) + } + + /** A class representing the inferred type of a constant value. Constant types and their + * corresponding terms are constant-folded during type checking. To avoid constant folding, use + * the type returned by `deconst` instead. + */ + abstract case class FoldableConstantType(value: Constant) extends ConstantType { + override def underlying: Type = + if (value.isSuitableLiteralType) LiteralType(value) else value.tpe + override def deconst: Type = underlying.deconst + override def safeToString: String = underlying.widen.toString + "(" + value.escapedStringValue + ")" + } + + final class UniqueConstantType(value: Constant) extends FoldableConstantType(value) + + object FoldableConstantType { def apply(value: Constant) = unique(new UniqueConstantType(value)) } + /** A class representing an explicit or inferred literal type. Literal types may be be folded at + * at the type level during type checking, however they will not be folded at the term level and + * effects will not be elided. + */ + abstract case class LiteralType(value: Constant) extends ConstantType { + override def underlying: Type = value.tpe + override def deconst: Type = this + override def safeToString: String = value.escapedStringValue + } + + final class UniqueLiteralType(value: Constant) extends LiteralType(value) + + object LiteralType { + def apply(value: Constant) = unique(new UniqueLiteralType(value)) + } + class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) { require(args0 ne Nil, this) @@ -1856,14 +2123,16 @@ trait Types super.invalidateTypeRefCaches() narrowedCache = null } - override def forceDirectSuperclasses: Unit = + override def forceDirectSuperclasses() = sym0.rawInfo.decls.foreach { decl => - if(decl.isModule || !decl.isTerm) decl.rawInfo.forceDirectSuperclasses + if (decl.isModule || !decl.isTerm) decl.rawInfo.forceDirectSuperclasses() } override protected def finishPrefix(rest: String) = objectPrefix + rest override def directObjectString = super.safeToString override def toLongString = toString - override def safeToString = prefixString + "type" + override def safeToString = + if (sym.isOmittablePrefix) s"${if (sym.isPackageObjectOrClass || sym.isJavaDefined) sym.fullNameString else sym.nameString}.type" + else s"${prefixString}type" override def prefixString = if (sym.isOmittablePrefix) "" else prefix.prefixString + sym.nameString + "." } class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) { @@ -1916,9 +2185,9 @@ trait Types * several times. Hence, no need to protected with synchronized in a multi-threaded * usage scenario. */ - private var relativeInfoCache: Type = _ - private var relativeInfoCacheValidForPeriod: Period = NoPeriod - private var relativeInfoCacheValidForSymInfo: Type = _ + private[this] var relativeInfoCache: Type = _ + private[this] var relativeInfoCacheValidForPeriod: Period = NoPeriod + private[this] var relativeInfoCacheValidForSymInfo: Type = _ override private[Types] def invalidateTypeRefCaches(): Unit = { super.invalidateTypeRefCaches() @@ -1960,6 +2229,10 @@ trait Types override def termSymbol = if (this ne normalize) normalize.termSymbol else super.termSymbol override def typeSymbol = if (this ne normalize) normalize.typeSymbol else sym + // Avoid calling super.isError when we're a type constructor, as that will eta-expand, which can cause spurious cycles, + // without resulting in additional information about our error state in any case + override def isError: Boolean = sym.isError || !isHigherKinded && super.isError + override protected[Types] def parentsImpl: List[Type] = normalize.parents map relativize // `baseClasses` is sensitive to type args when referencing type members @@ -1980,7 +2253,13 @@ trait Types // beta-reduce, but don't do partial application -- cycles have been checked in typeRef override protected def normalizeImpl = - if (typeParamsMatchArgs) betaReduce.normalize + if (typeParamsMatchArgs){ + val br = betaReduce + if (br ne this) + br.normalize + else + throw new MalformedType(pre, sym.nameString) + } else if (isHigherKinded) super.normalizeImpl else { // if we are overriding a type alias in an erroneous way, don't just @@ -2066,12 +2345,31 @@ trait Types /** A class for named types of the form * `.[args]` * Cannot be created directly; one should always use `typeRef` - * for creation. (@M: Otherwise hashing breaks) + * for creation. (\@M: Otherwise hashing breaks) * - * @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty + * \@M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty */ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends UniqueType with TypeRefApi { - private var trivial: ThreeValue = UNKNOWN + override def mapOver(map: TypeMap): Type = { + val pre1 = map(pre) + val args1 = map match { + case map: VariancedTypeMap if !args.isEmpty && ! map.variance.isInvariant => + val tparams = sym.typeParams + if (tparams.isEmpty) + args mapConserve map + else + map.mapOverArgs(args, tparams) + case _ => + args mapConserve map + } + if ((pre1 eq pre) && (args1 eq args)) this + else copyTypeRef(this, pre1, this.coevolveSym(pre1), args1) + } + override def foldOver(folder: TypeFolder): Unit = { + folder(pre) + args.foreach(folder) + } + private[this] var trivial: ThreeValue = UNKNOWN override def isTrivial: Boolean = { if (trivial == UNKNOWN) trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args)) @@ -2083,7 +2381,7 @@ trait Types override def isShowAsInfixType: Boolean = hasLength(args, 2) && sym.getAnnotation(ShowAsInfixAnnotationClass) - .map(_ booleanArg 0 getOrElse true) + .map(_.booleanArg(0).getOrElse(true)) .getOrElse(!Character.isUnicodeIdentifierStart(sym.decodedName.head)) private[Types] def invalidateTypeRefCaches(): Unit = { @@ -2097,7 +2395,7 @@ trait Types private[reflect] var parentsPeriod = NoPeriod private[reflect] var baseTypeSeqCache: BaseTypeSeq = _ private[reflect] var baseTypeSeqPeriod = NoPeriod - @volatile private var normalized: Type = _ + @volatile private[this] var normalized: Type = _ //OPT specialize hashCode override final def computeHashCode = { @@ -2119,14 +2417,18 @@ trait Types if (this eq other.asInstanceOf[AnyRef]) true else other match { case otherTypeRef: TypeRef => - Objects.equals(pre, otherTypeRef.pre) && sym.eq(otherTypeRef.sym) && sameElementsEquals(args, otherTypeRef.args) + Objects.equals(pre, otherTypeRef.pre) && + sym.eq(otherTypeRef.sym) && + sameElementsEquals(args, otherTypeRef.args) && + // `ObjectTpeJavaRef` is not structurally equal to `ObjectTpe` -- they should not be collapsed by `unique` + !(this.isInstanceOf[ObjectTpeJavaRef] || otherTypeRef.isInstanceOf[ObjectTpeJavaRef]) case _ => false } } // interpret symbol's info in terms of the type's prefix and type args - protected def relativeInfo: Type = appliedType(sym.info.asSeenFrom(pre, sym.owner), argsOrDummies) + protected def relativeInfo: Type = appliedType(sym.info.asSeenFrom(pre, sym.owner), args) // @M: propagate actual type params (args) to `tp`, by replacing // formal type parameters with actual ones. If tp is higher kinded, @@ -2227,10 +2529,9 @@ trait Types } // TODO: test case that is compiled in a specific order and in different runs - private[Types] final def defineNormalized: Unit = { - if (normalized eq null) // In runtime reflection, this null check is part of double-checked locking - normalized = normalizeImpl - } + private[Types] final def defineNormalized() : Unit = + // In runtime reflection, this null check is part of double-checked locking + if (normalized eq null) normalized = normalizeImpl override def isGround = ( sym.isPackageClass @@ -2241,7 +2542,25 @@ trait Types // must initialise symbol, see test/files/pos/ticket0137.scala val tpars = initializedTypeParams if (tpars.isEmpty) this - else typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce? + else { + // It's not clear which owner we should use (we don't know the context we're in), + // but pos/t10762 shows it can't be the class (`sym`) that owns the type params, + // as that will confuse ASF during separate compilation. + // + // During pickling, a pickle-local symbol (the type param) that has a non-pickle-local owner (the class), + // will get a new owner (the pickle root, a class) assigned to it by localizedOwner. + // This causes spurious recompilation, as well as confusion in ASF. + // Thus, use a pickle-local term symbol owner and avoid this whole owner-rejiggering. + val pickleLocalOwner = sym.newLocalDummy(sym.pos) + + // Since we're going to lose the information denoted by the prefix when pulling the type params + // out for use as binders in the PolyType, we must eagerly rewrite their infos using relativize + // to preserve that knowledge. + val denotedLocallyOwnedTpars = cloneSymbolsAtOwnerAndModify(tpars, pickleLocalOwner, relativize) + + // @PP: use typeConstructor! #3343, #4018, #4347. + PolyType(denotedLocallyOwnedTpars, TypeRef(pre, sym, denotedLocallyOwnedTpars map (_.typeConstructor))) + } } // only need to rebind type aliases, as typeRef already handles abstract types @@ -2254,8 +2573,8 @@ trait Types override def baseTypeSeqDepth = baseTypeSeq.maxDepth override def prefix = pre + override def prefixDirect = pre override def termSymbol = super.termSymbol - override def termSymbolDirect = super.termSymbol override def typeArgs = args override def typeOfThis = relativize(sym.typeOfThis) override def typeSymbol = sym @@ -2317,7 +2636,7 @@ trait Types private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic) private def refinementString = ( if (sym.isStructuralRefinement) - refinementDecls map (_.defString) mkString("{", "; ", "}") + refinementDecls.map(_.defString).mkString("{", "; ", "}") else "" ) protected def finishPrefix(rest: String) = ( @@ -2338,7 +2657,7 @@ trait Types * Therefore, if op is left associative, anything on its right * needs to be parenthesized if it's an infix type, and vice versa. */ // we should only get here after `isShowInfixType` says we have 2 args - val l :: r :: Nil = args + val l :: r :: Nil = args: @unchecked val isRightAssoc = typeSymbol.decodedName endsWith ":" @@ -2349,10 +2668,9 @@ trait Types s"$lstr ${sym.decodedName} $rstr" } private def customToString = sym match { - case RepeatedParamClass | JavaRepeatedParamClass => args.head + "*" - case ByNameParamClass => "=> " + args.head - case _ => - if (isFunctionTypeDirect(this)) { + case RepeatedParamClass | JavaRepeatedParamClass => args.head.toString + "*" + case ByNameParamClass if !args.isEmpty => "=> " + args.head + case _ if isFunctionTypeDirect(this) => // Aesthetics: printing Function1 as T => R rather than (T) => R // ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable // from (T1, T2) => R. @@ -2370,15 +2688,11 @@ trait Types case xs => xs.init.mkString("(", ", ", ")") + " => " + xs.last } - } - else if (isShowAsInfixType) - infixTypeString - else if (isTupleTypeDirect(this)) - tupleTypeString - else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias)) - "" + dealias - else - "" + case _ if isShowAsInfixType => infixTypeString + case _ if isTupleTypeDirect(this) => tupleTypeString + case _ if sym.isAliasType && (this ne dealias) && prefixChain.exists(_.termSymbol.isSynthetic) + => "" + dealias + case _ => "" } override def safeToString = { val custom = if (settings.isDebug) "" else customToString @@ -2393,12 +2707,13 @@ trait Types else if (sym.isPackageClass || sym.isPackageObjectOrClass) sym.skipPackageObject.fullName + "." else if (isStable && nme.isSingletonName(sym.name)) - tpnme.dropSingletonName(sym.name) + "." + tpnme.dropSingletonName(sym.name).toString + "." else super.prefixString ) // Suppressing case class copy method which risks subverting our single point of creation. - private def copy = null + @deprecated("Suppressing case class copy method", since="forever") + @unused private def copy = null override def kind = "TypeRef" } @@ -2408,7 +2723,21 @@ trait Types private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) private final class AliasNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AliasTypeRef private final class AbstractNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AbstractTypeRef - private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) + private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) { + override def contains(sym0: Symbol): Boolean = (sym eq sym0) || pre.contains(sym0) + } + + /** Expose ObjectTpeJavaRef so we can create a non-uniqued ObjectTpeJava + * (using a type test rather than `eq`, which causes cycles). + * + * NOTE: + * - definitions.ObjectTpe is forced first, so that it ends up in the unique cache. + * - the created TypeRef is structurally equal to ObjectTpe, but with its own identity + * - we don't want the TypeRef we create here to be unique'd + */ + private[internal] final class ObjectTpeJavaRef extends NoArgsTypeRef(definitions.ObjectTpe.prefix, definitions.ObjectClass) { + override def contains(sym0: Symbol): Boolean = (sym eq sym0) || pre.contains(sym0) + } object TypeRef extends TypeRefExtractor { def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({ @@ -2429,7 +2758,7 @@ trait Types } protected def defineNormalized(tr: TypeRef): Unit = { - tr.defineNormalized + tr.defineNormalized() } protected def defineParentsOfTypeRef(tpe: TypeRef) = { @@ -2459,8 +2788,9 @@ trait Types } } } + //Console.println(s"baseTypeSeq(${tpe.typeSymbol}) = ${tpe.baseTypeSeqCache.toList}") //DEBUG if (tpe.baseTypeSeqCache == undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + tpe.sym) + throw new TypeError(s"illegal cyclic inheritance involving ${tpe.sym}") } /** A class representing a method type with parameters. @@ -2472,7 +2802,7 @@ trait Types case class MethodType(override val params: List[Symbol], override val resultType: Type) extends Type with MethodTypeApi { - private var trivial: ThreeValue = UNKNOWN + private[this] var trivial: ThreeValue = UNKNOWN override def isTrivial: Boolean = { if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams) toBoolean(trivial) @@ -2486,7 +2816,7 @@ trait Types val len = params.length val paramsTpes: Array[Type] = new Array[Type](len) - // returns the result of ```params.forall(_.tpe.isTrivial))``` + // returns the result of `params.forall(_.tpe.isTrivial))` // along the way, it loads each param' tpe into array def forallIsTrivial: Boolean = { var res = true @@ -2531,7 +2861,6 @@ trait Types } def isImplicit = (params ne Nil) && params.head.isImplicit - def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized? override def paramSectionCount: Int = resultType.paramSectionCount + 1 @@ -2539,19 +2868,23 @@ trait Types override def paramTypes = mapList(params)(_.tpe) // OPT use mapList rather than .map - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms = resultType.boundSyms ++ params + final def resultTypeOwnParamTypes: Type = + if (isTrivial || phase.erasedTypes) resultType + else resultType0(paramTypes) override def resultType(actuals: List[Type]) = if (isTrivial || phase.erasedTypes) resultType - else if (/*isDependentMethodType &&*/ sameLength(actuals, params)) { + else resultType0(actuals) + + private def resultType0(actuals: List[Type]): Type = + if (/*isDependentMethodType &&*/ sameLength(actuals, params)) { val idm = new InstantiateDependentMap(params, actuals) - val res = idm(resultType) + val res = idm(resultType).deconst existentialAbstraction(idm.existentialsNeeded, res) } else existentialAbstraction(params, resultType) - private var isdepmeth: ThreeValue = UNKNOWN + private[this] var isdepmeth: ThreeValue = UNKNOWN override def isDependentMethodType: Boolean = { if (isdepmeth == UNKNOWN) isdepmeth = fromBoolean(IsDependentCollector.collect(resultType.dealias)) toBoolean(isdepmeth) @@ -2561,7 +2894,12 @@ trait Types //TODO this may be generalised so that the only constraint is dependencies are acyclic def approximate: MethodType = MethodType(params, resultApprox) - override def safeToString = paramString(this) + resultType + //Format (a: A)(b: B)(implicit c: C, d: D): E + override def safeToString = { + s"${paramString(this)}${ + resultType match { case _: MethodType => "" case _ => ": "} + }$resultType" + } override def cloneInfo(owner: Symbol) = { val vparams = cloneSymbolsAtOwner(params, owner) @@ -2575,16 +2913,27 @@ trait Types this override def kind = "MethodType" + override def mapOver(map: TypeMap): Type = { + val params1 = map match { + case vtm: VariancedTypeMap => vtm.flipped(vtm.mapOver(params)) + case _ => map.mapOver(params) + } + val result1 = map(resultType) + if ((params1 eq params) && (result1 eq resultType)) this + else copyMethodType(this, params1, result1.substSym(params, params1)) + } + override def foldOver(folder: TypeFolder): Unit = { + folder.foldOver(params) + folder(resultType) + } } object MethodType extends MethodTypeExtractor - class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) { - override def isJava = true - } - - // TODO: rename so it's more appropriate for the type that is for a method without argument lists - // ("nullary" erroneously implies it has an argument list with zero arguments, it actually has zero argument lists) + /** A method without parameter lists. + * + * Note: a MethodType with paramss that is a ListOfNil is called "nilary", to disambiguate. + */ case class NullaryMethodType(override val resultType: Type) extends Type with NullaryMethodTypeApi { override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations) override def prefix: Type = resultType.prefix @@ -2597,10 +2946,14 @@ trait Types override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth override def baseClasses: List[Symbol] = resultType.baseClasses override def baseType(clazz: Symbol): Type = resultType.baseType(clazz) - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms = resultType.boundSyms - override def safeToString: String = "=> "+ resultType + override def safeToString: String = resultType.toString override def kind = "NullaryMethodType" + override def mapOver(map: TypeMap): Type = { + val result1 = map(resultType) + if (result1 eq resultType) this + else NullaryMethodType(result1) + } + override def foldOver(folder: TypeFolder): Unit = folder(resultType) } object NullaryMethodType extends NullaryMethodTypeExtractor @@ -2620,7 +2973,7 @@ trait Types case class PolyType(override val typeParams: List[Symbol], override val resultType: Type) extends Type with PolyTypeApi { //assert(!(typeParams contains NoSymbol), this) - assert(typeParams.nonEmpty, this) // used to be a marker for nullary method type, illegal now (see @NullaryMethodType) + assert(!typeParams.isEmpty, this) // used to be a marker for nullary method type, illegal now (see @NullaryMethodType) override def paramSectionCount: Int = resultType.paramSectionCount override def paramss: List[List[Symbol]] = resultType.paramss @@ -2630,8 +2983,6 @@ trait Types override def decls: Scope = resultType.decls override def termSymbol: Symbol = resultType.termSymbol override def typeSymbol: Symbol = resultType.typeSymbol - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*) override def prefix: Type = resultType.prefix override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth @@ -2665,13 +3016,28 @@ trait Types this override def kind = "PolyType" + override def mapOver(map: TypeMap): Type = { + val tparams1 = map match { + case vtm: VariancedTypeMap => vtm.flipped(vtm.mapOver(typeParams)) + case _ => map.mapOver(typeParams) + } + val result1 = map(resultType) + if ((tparams1 eq typeParams) && (result1 eq resultType)) this + else PolyType(tparams1, result1.substSym(typeParams, tparams1)) + } + override def foldOver(folder: TypeFolder): Unit = { + folder.foldOver(typeParams) + folder(resultType) + } + } object PolyType extends PolyTypeExtractor /** A creator for existential types which flattens nested existentials. */ - def newExistentialType(quantified: List[Symbol], underlying: Type): Type = + @tailrec + final def newExistentialType(quantified: List[Symbol], underlying: Type): Type = if (quantified.isEmpty) underlying else underlying match { case ExistentialType(qs, restpe) => newExistentialType(quantified ::: qs, restpe) @@ -2684,12 +3050,10 @@ trait Types override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp) override def isTrivial = false + override def bounds = TypeBounds(lowerBound, upperBound) override def lowerBound = maybeRewrap(underlying.lowerBound) override def upperBound = maybeRewrap(underlying.upperBound) - override def parents = underlying.parents map maybeRewrap - @deprecated("No longer used in the compiler implementation", since = "2.12.3") - override def boundSyms = quantified.toSet override def prefix = maybeRewrap(underlying.prefix) override def typeArgs = underlying.typeArgs map maybeRewrap override def params = underlying.params mapConserve { param => @@ -2780,8 +3144,8 @@ trait Types } private def existentialClauses = { - val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }") - if (settings.explaintypes) "(" + str + ")" else str + val str = quantified.map(_.existentialToString).mkString(" forSome { ", "; ", " }") + if (settings.explaintypes.value) "(" + str + ")" else str } /** An existential can only be printed with wildcards if: @@ -2808,18 +3172,18 @@ trait Types !(qset contains sym) && !isQuantified(pre) case _ => false - } + } } - override def safeToString: String = { - underlying match { - case TypeRef(pre, sym, args) if !settings.isDebug && isRepresentableWithWildcards => - "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]") - case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => - "(" + underlying + ")" + existentialClauses - case _ => - "" + underlying + existentialClauses - } + override def safeToString: String = underlying match { + case TypeRef(pre, sym, args) if !settings.isDebug && isRepresentableWithWildcards => + val ref = typeRef(pre, sym, Nil).toString + val wildcards = wildcardArgsString(quantified.toSet, args) + if (wildcards.isEmpty) ref else ref + wildcards.mkString("[", ", ", "]") + case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => + "(" + underlying + ")" + existentialClauses + case _ => + underlying.toString + existentialClauses } override def cloneInfo(owner: Symbol) = @@ -2841,6 +3205,16 @@ trait Types isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.inst)) } } + override def mapOver(map: TypeMap): Type = { + val quantified1 = map.mapOver(quantified) + val underlying1 = map(underlying) + if ((quantified1 eq quantified) && (underlying1 eq underlying)) this + else newExistentialType(quantified1, underlying1.substSym(quantified, quantified1)) + } + override def foldOver(folder: TypeFolder): Unit = { + folder.foldOver(quantified) + folder(underlying) + } } object ExistentialType extends ExistentialTypeExtractor @@ -2853,6 +3227,13 @@ trait Types override def safeToString = (alternatives map pre.memberType).mkString("", " ", "") override def kind = "OverloadedType" + override def mapOver(map: TypeMap): Type = { + val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else map(pre) + if (pre1 eq pre) this + else OverloadedType(pre1, alternatives) + } + override def foldOver(folder: TypeFolder): Unit = + if (! pre.isInstanceOf[ClassInfoType]) folder(pre) } /** The canonical creator for OverloadedTypes. @@ -2877,6 +3258,16 @@ trait Types override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs) override def kind = "AntiPolyType" + override def mapOver(map: TypeMap): Type = { + val pre1 = map(pre) + val targs1 = targs mapConserve map + if ((pre1 eq pre) && (targs1 eq targs)) this + else AntiPolyType(pre1, targs1) + } + override def foldOver(folder: TypeFolder): Unit = { + folder(pre) + targs.foreach(folder) + } } object HasTypeMember { @@ -2887,9 +3278,9 @@ trait Types bound.decls enter bsym bound } - def unapply(tp: Type): Option[(TypeName, Type)] = tp match { - case RefinedType(List(WildcardType), Scope(sym)) => Some((sym.name.toTypeName, sym.info)) - case _ => None + def unapply(tp: Type): Boolean = tp match { + case RefinedType(List(WildcardType), scope) => scope.size == 1 + case _ => false } } @@ -2917,29 +3308,8 @@ trait Types value } - /** Create a new TypeConstraint based on the given symbol. - */ - private def deriveConstraint(tparam: Symbol): TypeConstraint = { - /** Must force the type parameter's info at this point - * or things don't end well for higher-order type params. - * See scala/bug#5359. - */ - val bounds = tparam.info.bounds - /* We can seed the type constraint with the type parameter - * bounds as long as the types are concrete. This should lower - * the complexity of the search even if it doesn't improve - * any results. - */ - if (propagateParameterBoundsToTypeVars) { - val exclude = bounds.isEmptyBounds || (bounds exists (_.typeSymbolDirect.isNonClassType)) + def precludesWidening(tp: Type) = tp.isStable || tp.typeSymbol.isSubClass(SingletonClass) - if (exclude) new TypeConstraint - else TypeVar.trace("constraint", "For " + tparam.fullLocationString)( - new TypeConstraint(bounds) - ) - } - else new TypeConstraint - } def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true) def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false) def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil) @@ -2969,20 +3339,27 @@ trait Types tv ) } - private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar = - createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable) + private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar = { + val constr = new TypeConstraint + if (precludesWidening(tparam.info.upperBound)) { + constr.stopWidening() + constr.addHiBound(SingletonClass.typeConstructor) // TODO: why do we need the additional hi-bound? see sip23-widen + } + + createTypeVar(tparam.typeConstructor, constr, Nil, tparam.typeParams, untouchable) + } } - /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.) + /** Precondition: !params.isEmpty. (args.nonEmpty enforced structurally.) */ class HKTypeVar( _origin: Type, _constr: TypeConstraint, override val params: List[Symbol] ) extends TypeVar(_origin, _constr) { - - require(params.nonEmpty, this) - override def isHigherKinded = true + require(!params.isEmpty, this) + override def isHigherKinded: Boolean = true + override def typeParams: List[Symbol] = params } /** Precondition: `params.length == typeArgs.length > 0` (enforced structurally). */ @@ -2992,8 +3369,10 @@ trait Types override val params: List[Symbol], override val typeArgs: List[Type] ) extends TypeVar(_origin, _constr) { - require(params.nonEmpty && sameLength(params, typeArgs), this) + require(!params.isEmpty && sameLength(params, typeArgs), this) override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]") + override def setInst(tp: Type): this.type = + super.setInst(if (isSubArgs(typeArgs, tp.typeArgs, params, Depth.AnyDepth)) tp.typeConstructor else NoType) } trait UntouchableTypeVar extends TypeVar { @@ -3102,7 +3481,7 @@ trait Types this } - def addLoBound(tp: Type, isNumericBound: Boolean = false) { + def addLoBound(tp: Type, isNumericBound: Boolean = false): Unit = { assert(tp != this, tp) // implies there is a cycle somewhere (?) //println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG if (!sharesConstraints(tp)) { @@ -3111,7 +3490,7 @@ trait Types } } - def addHiBound(tp: Type, isNumericBound: Boolean = false) { + def addHiBound(tp: Type, isNumericBound: Boolean = false): Unit = { // assert(tp != this) //println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG if (!sharesConstraints(tp)) { @@ -3123,8 +3502,9 @@ trait Types // ignore subtyping&equality checks while true -- see findMember // OPT: This could be Either[TypeVar, Boolean], but this encoding was chosen instead to save allocations. - private var _suspended: Type = ConstantFalse - private[Types] def suspended: Boolean = (_suspended: @unchecked) match { + private[this] var _suspended: Type = ConstantFalse + @tailrec + private[Types] final def suspended: Boolean = (_suspended: @unchecked) match { case ConstantFalse => false case ConstantTrue => true case tv: TypeVar => tv.suspended @@ -3137,6 +3517,7 @@ trait Types */ protected final def sharesConstraints(other: Type): Boolean = other match { case other: TypeVar => constr == other.constr // scala/bug#8237 avoid cycles. Details in pos/t8237.scala + case PolyType(_, other: TypeVar) => constr == other.constr case _ => false } private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse @@ -3154,11 +3535,10 @@ trait Types */ def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = { // println("regBound: "+(safeToString, debugString(tp), isLowerBound)) //@MDEBUG - if (isLowerBound) - assert(tp != this) + if (isLowerBound) assert(tp != this, "Lower bound of this type") // side effect: adds the type to upper or lower bounds - def addBound(tp: Type) { + def addBound(tp: Type): Unit = { if (isLowerBound) addLoBound(tp, isNumericBound) else addHiBound(tp, isNumericBound) } @@ -3185,12 +3565,14 @@ trait Types * }}} */ def unifySimple = { - val sym = tp.typeSymbol - if (sym == NothingClass || sym == AnyClass) { // kind-polymorphic - // scala/bug#7126 if we register some type alias `T=Any`, we can later end - // with malformed types like `T[T]` during type inference in - // `handlePolymorphicCall`. No such problem if we register `Any`. - addBound(sym.tpe) + // scala/bug#7126 if we register some type alias `T=Any`, we can later end + // with malformed types like `T[T]` during type inference in + // `handlePolymorphicCall`. No such problem if we register `Any`. + if (typeIsNothing(tp)) { // kind-polymorphic + addBound(NothingTpe) + true + } else if(typeIsAnyExactly(tp)) { // kind-polymorphic + addBound(AnyTpe) true } else if (params.isEmpty) { addBound(tp) @@ -3203,47 +3585,53 @@ trait Types * TC1[T1,..., TN] <: TC2[T'1,...,T'N] * }}} * Checks subtyping of higher-order type vars, and uses variances as defined in the - * type parameter we're trying to infer (the result will be sanity-checked later). + * type parameter we're trying to infer (the result will be confidence-checked later). */ def unifyFull(tpe: Type): Boolean = { + def unifiableKinds(lhs: List[Symbol], rhs: List[Symbol]): Boolean = + sameLength(lhs, rhs) && !exists2(lhs, rhs)((l, r) => !unifiableKinds(l.typeParams, r.typeParams)) + def unifySpecific(tp: Type) = { val tpTypeArgs = tp.typeArgs - val arityDelta = compareLengths(typeArgs, tpTypeArgs) - if (arityDelta == 0) { - val lhs = if (isLowerBound) tpTypeArgs else typeArgs - val rhs = if (isLowerBound) typeArgs else tpTypeArgs - // This is a higher-kinded type var with same arity as tp. - // If so (see scala/bug#7517), side effect: adds the type constructor itself as a bound. - isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true} - } else if (settings.YpartialUnification && arityDelta < 0 && typeArgs.nonEmpty) { - // Simple algorithm as suggested by Paul Chiusano in the comments on scala/bug#2712 - // - // https://github.com/scala/bug/issues/2712#issuecomment-292374655 - // - // Treat the type constructor as curried and partially applied, we treat a prefix - // as constants and solve for the suffix. For the example in the ticket, unifying - // M[A] with Int => Int this unifies as, - // - // M[t] = [t][Int => t] --> abstract on the right to match the expected arity - // A = Int --> capture the remainder on the left - // - // A more "natural" unifier might be M[t] = [t][t => t]. There's lots of scope for - // experimenting with alternatives here. - val numCaptured = tpTypeArgs.length - typeArgs.length - val (captured, abstractedArgs) = tpTypeArgs.splitAt(numCaptured) - - val (lhs, rhs) = - if (isLowerBound) (abstractedArgs, typeArgs) - else (typeArgs, abstractedArgs) - - isSubArgs(lhs, rhs, params, AnyDepth) && { - val tpSym = tp.typeSymbolDirect - val abstractedTypeParams = tpSym.typeParams.drop(numCaptured).map(_.cloneSymbol(tpSym)) - - addBound(PolyType(abstractedTypeParams, appliedType(tp.typeConstructor, captured ++ abstractedTypeParams.map(_.tpeHK)))) - true - } - } else false + val numCaptured = tpTypeArgs.length - typeArgs.length + val tpSym = tp.typeSymbolDirect + val abstractedTypeParams = tpSym.typeParams.drop(numCaptured) + if(!unifiableKinds(typeSymbolDirect.typeParams, abstractedTypeParams)) false + else { + if (numCaptured == 0) { + val lhs = if (isLowerBound) tpTypeArgs else typeArgs + val rhs = if (isLowerBound) typeArgs else tpTypeArgs + // This is a higher-kinded type var with same arity as tp. + // If so (see scala/bug#7517), side effect: adds the type constructor itself as a bound. + isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true} + } else if (numCaptured > 0) { + // Simple algorithm as suggested by Paul Chiusano in the comments on scala/bug#2712 + // + // https://github.com/scala/bug/issues/2712#issuecomment-292374655 + // + // Treat the type constructor as curried and partially applied, we treat a prefix + // as constants and solve for the suffix. For the example in the ticket, unifying + // M[A] with Int => Int this unifies as, + // + // M[t] = [t][Int => t] --> abstract on the right to match the expected arity + // A = Int --> capture the remainder on the left + // + // A more "natural" unifier might be M[t] = [t][t => t]. There's lots of scope for + // experimenting with alternatives here. + val abstractedArgs = tpTypeArgs.drop(numCaptured) + + val (lhs, rhs) = + if (isLowerBound) (abstractedArgs, typeArgs) + else (typeArgs, abstractedArgs) + + isSubArgs(lhs, rhs, params, AnyDepth) && { + val captured = tpTypeArgs.take(numCaptured) + val clonedParams = abstractedTypeParams.map(_.cloneSymbol(tpSym)) + addBound(PolyType(clonedParams, appliedType(tp.typeConstructor, captured ++ clonedParams.map(_.tpeHK)))) + true + } + } else false + } } // The type with which we can successfully unify can be hidden // behind singleton types and type aliases. @@ -3279,8 +3667,8 @@ trait Types checkSubtype(tp, origin) else if (instValid) // type var is already set checkSubtype(tp, inst) - else isRelatable(tp) && { - unifySimple || unifyFull(tp) || ( + else unrelatable(tp) match { + case Nil => unifySimple || unifyFull(tp) || ( // only look harder if our gaze is oriented toward Any isLowerBound && ( (tp.parents exists unifyFull) || ( @@ -3290,6 +3678,14 @@ trait Types ) ) ) + + case skolems => + val existential = existentialTransform(skolems, tp)(existentialAbstraction(_, _, flipVariance = !isLowerBound)) + // `isRelatable(existential)` defends against F-bounds. We've added one layer of existential abstraction to remove + // skolems that occur immediately in the underlying type `tp`. If after this transformation, the type still + // contains skolems from another level, it could be F-bounded, and we give up to avoid looping. + isRelatable(existential) && + registerBound(existential, isLowerBound = isLowerBound, isNumericBound = isNumericBound) } } @@ -3303,11 +3699,9 @@ trait Types if (suspended) tp =:= origin else if (instValid) checkIsSameType(tp) else isRelatable(tp) && { - val newInst = wildcardToTypeVarMap(tp) - (constr isWithinBounds newInst) && { - setInst(newInst) - true - } + // Calling `identityTypeMap` instantiates valid type vars (see `TypeVar.mapOver`). + val newInst = identityTypeMap(tp) + constr.isWithinBounds(newInst) && setInst(newInst).instValid } } @@ -3321,9 +3715,9 @@ trait Types registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false) } - private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match { - case ts: TypeSkolem => ts.level > level - case _ => false + private def unrelatable(tp: Type): List[TypeSkolem] = { + UnrelatableCollector.barLevel = level + UnrelatableCollector.collect(tp) } @@ -3331,7 +3725,10 @@ trait Types * This is not the case if `tp` contains type skolems whose * skolemization level is higher than the level of this variable. */ - def isRelatable(tp: Type) = !(tp exists isSkolemAboveLevel) + def isRelatable(tp: Type): Boolean = { + IsRelatableCollector.barLevel = level + IsRelatableCollector.collect(tp) + } override def normalize: Type = ( if (instValid) inst @@ -3347,8 +3744,8 @@ trait Types override def typeSymbol = origin.typeSymbol private def tparamsOfSym(sym: Symbol) = sym.info match { - case PolyType(tparams, _) if tparams.nonEmpty => - tparams map (_.defString) mkString("[", ",", "]") + case PolyType(tparams, _) if !tparams.isEmpty => + tparams.map(_.defString).mkString("[", ",", "]") case _ => "" } def originName = origin.typeSymbolDirect.decodedName @@ -3365,7 +3762,7 @@ trait Types if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#" } - private def levelString = if (settings.explaintypes) level else "" + private def levelString = if (settings.explaintypes.value) level else "" override def safeToString = ( if ((constr eq null) || (inst eq null)) "TVar<" + originName + "=null>" else if (inst ne NoType) "=?" + inst @@ -3382,6 +3779,22 @@ trait Types TypeVar(origin, constr.cloneInternal, typeArgs, params) ) } + + override def mapOver(map: TypeMap): Type = + if (constr.instValid) { + // ideally TypeVar.inst should handle this, + // but it would have to be disentangled from TypeVar.constr.inst + map(appliedType(constr.inst, typeArgs)) + } else map match { + case map: VariancedTypeMap => + //@M !args.isEmpty implies !typeParams.isEmpty + applyArgs(map.mapOverArgs(typeArgs, params)) + case _ => applyArgs(typeArgs mapConserve map) + } + + override def foldOver(folder: TypeFolder): Unit = + if (constr.instValid) folder(constr.inst) + else this.typeArgs.foreach(folder) } /** A type carrying some annotations. Created by the typechecker @@ -3400,7 +3813,11 @@ trait Types override def isTrivial: Boolean = underlying.isTrivial && annotations.forall(_.isTrivial) - override def safeToString = annotations.mkString(underlying + " @", " @", "") + override def safeToString = { + val wrap = isShowAsInfixType || isFunctionTypeDirect(this) + val ul = underlying.toString.pipe(s => if (wrap) s"($s)" else s) + annotations.mkString(ul + " @", " @", "") + } override def filterAnnotations(p: AnnotationInfo => Boolean): Type = { val (yes, no) = annotations partition p @@ -3417,6 +3834,9 @@ trait Types if (annots.isEmpty) this else copy(annots ::: this.annotations) + override def withAnnotation(anno: AnnotationInfo): Type = + copy(anno :: this.annotations) + /** Remove any annotations from this type. * TODO - is it allowed to nest AnnotatedTypes? If not then let's enforce * that at creation. At the moment if they do ever turn up nested this @@ -3434,6 +3854,7 @@ trait Types override def lowerBound: Type = bounds.lo override def upperBound: Type = bounds.hi + // ** Replace formal type parameter symbols with actual type arguments. * / override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = { val annotations1 = annotations.map(info => AnnotationInfo(info.atp.instantiateTypeParams( @@ -3454,6 +3875,17 @@ trait Types } override def kind = "AnnotatedType" + override def mapOver(map: TypeMap): Type = { + val annotations1 = map.mapOverAnnotations(annotations) + val underlying1 = map(underlying) + if ((annotations1 eq annotations) && (underlying1 eq underlying)) this + else if (annotations1.isEmpty) underlying1 + else AnnotatedType(annotations1, underlying1) + } + override def foldOver(folder: TypeFolder): Unit = { + folder.foldOverAnnotations(annotations) + folder(underlying) + } } /** Creator for AnnotatedTypes. It returns the underlying type if annotations.isEmpty @@ -3478,13 +3910,17 @@ trait Types */ case class NamedType(name: Name, tp: Type) extends Type { override def safeToString: String = name.toString +": "+ tp + // TODO is this needed? We only seem to get here in ContainsCollector in error message generation + // override def mapOver(map: TypeMap): Type = map.apply(tp) } /** As with NamedType, used only when calling isApplicable. * Records that the application has a wildcard star (aka _*) * at the end of it. */ case class RepeatedType(tp: Type) extends Type { - override def safeToString: String = tp + ": _*" + override def safeToString: String = tp.toString + ": _*" + // TODO is this needed? We only seem to get here in ContainsCollector in error message generation + // override def mapOver(map: TypeMap): Type = map.apply(tp) } /** A temporary type representing the erasure of a user-defined value type. @@ -3516,7 +3952,7 @@ trait Types */ abstract class LazyType extends Type { override def isComplete: Boolean = false - override def complete(sym: Symbol) + override def complete(sym: Symbol): Unit override def safeToString = "" override def kind = "LazyType" def isJavaVarargsMethod: Boolean = false @@ -3593,28 +4029,36 @@ trait Types def refinedType(parents: List[Type], owner: Symbol): Type = refinedType(parents, owner, newScope, owner.pos) - def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = - if ((parents eq original.parents) && (decls eq original.decls)) original + private[this] val copyRefinedTypeSSM: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) + + def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope, owner: Symbol = null) = + if ((parents eq original.parents) && (decls eq original.decls) && (owner eq null)) original else { - val owner = original.typeSymbol.owner + val newOwner = if (owner != null) owner else original.typeSymbol.owner val result = if (isIntersectionTypeForLazyBaseType(original)) intersectionTypeForLazyBaseType(parents) - else refinedType(parents, owner) - val syms1 = decls.toList - for (sym <- syms1) - result.decls.enter(sym.cloneSymbol(result.typeSymbol).resetFlag(OVERRIDE)) - val syms2 = result.decls.toList - val resultThis = result.typeSymbol.thisType - for (sym <- syms2) - sym modifyInfo (_ substThisAndSym(original.typeSymbol, resultThis, syms1, syms2)) - + else refinedType(parents, newOwner) + if (! decls.isEmpty){ + val syms1 = decls.toList + for (sym <- syms1) + result.decls.enter(sym.cloneSymbol(result.typeSymbol).resetFlag(OVERRIDE)) + val syms2 = result.decls.toList + val resultThis = result.typeSymbol.thisType + val substThisMap = new SubstThisMap(original.typeSymbol, resultThis) + copyRefinedTypeSSM.using { (msm: SubstSymMap) => + msm.reset(syms1, syms2) + syms2.foreach(_.modifyInfo(info => msm.apply(substThisMap.apply(info)))) + } + } result } /** The canonical creator for typerefs * todo: see how we can clean this up a bit */ - def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = { + @tailrec + final def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = { // type alias selections are rebound in TypeMap ("coevolved", // actually -- see #3731) e.g., when type parameters that are // referenced by the alias are instantiated in the prefix. See @@ -3650,15 +4094,8 @@ trait Types typeRef(pre, sym, args) } - /** The canonical creator for implicit method types */ - def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType = - new JavaMethodType(params, resultType) // don't unique this! - - /** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */ - def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match { - case _: JavaMethodType => JavaMethodType(params, restpe) - case _ => MethodType(params, restpe) - } + /** Create a new MethodType */ + def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = MethodType(params, restpe) /** A creator for intersection type where intersections of a single type are * replaced by the type itself, and repeated parent classes are merged. @@ -3714,7 +4151,7 @@ trait Types if (false && isDefinitionsInitialized) { assert(isUseableAsTypeArgs(args), { val tapp_s = s"""$tycon[${args mkString ", "}]""" - val arg_s = args filterNot isUseableAsTypeArg map (t => t + "/" + t.getClass) mkString ", " + val arg_s = args filterNot isUseableAsTypeArg map (t => t.toString + "/" + t.getClass) mkString ", " s"$tapp_s includes illegal type argument $arg_s" }) } @@ -3747,24 +4184,12 @@ trait Types /** A creator and extractor for type parameterizations that strips empty type parameter lists. * Use this factory method to indicate the type has kind * (it's a polymorphic value) - * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty). - * - * PP to AM: I've co-opted this for where I know tparams may well be empty, and - * expecting to get back `tpe` in such cases. Re being "forgiving" below, - * can we instead say this is the canonical creator for polyTypes which - * may or may not be poly? (It filched the standard "canonical creator" name.) */ object GenPolyType { - def apply(tparams: List[Symbol], tpe: Type): Type = { - tpe match { - case MethodType(_, _) => - assert(tparams forall (_.isInvariant), "Trying to create a method with variant type parameters: " + ((tparams, tpe))) - case _ => - } - if (tparams.nonEmpty) typeFun(tparams, tpe) - else tpe // it's okay to be forgiving here - } - def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match { + def apply(tparams: List[Symbol], tpe: Type): Type = + if (tparams.isEmpty) tpe else PolyType(tparams, tpe) + + def unapply(tpe: Type): Some[(List[Symbol], Type)] = tpe match { case PolyType(tparams, restpe) => Some((tparams, restpe)) case _ => Some((Nil, tpe)) } @@ -3774,16 +4199,59 @@ trait Types @deprecated("use genPolyType(...) instead", "2.10.0") // Used in reflection API def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe) - /** A creator for anonymous type functions, where the symbol for the type function still needs to be created. + /** A creator for a type functions, assuming the type parameters tps already have the right owner. */ + def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body) + + /** We will need to clone the info of the original method (which obtains clones + * of the method type parameters), clone the type parameters of the value class, + * and create a new polymethod with the union of all those type parameters, with + * their infos adjusted to be consistent with their new home. Example: * - * TODO: - * type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion - * higher-order subtyping expects eta-expansion of type constructors that arise from a class; here, the type params are owned by that class, but is that the right thing to do? + * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { + * def baz[B >: A](x: B): List[B] = x :: xs + * // baz has to be transformed into this extension method, where + * // A is cloned from class Foo and B is cloned from method baz: + * // def extension\$baz[B >: A <: Any, A >: Nothing <: AnyRef](\$this: Foo[A])(x: B): List[B] + * } + * + * TODO: factor out the logic for consolidating type parameters from a class + * and a method for re-use elsewhere, because nobody will get this right without + * some higher level facilities. */ - def typeFunAnon(tps: List[Symbol], body: Type): Type = typeFun(tps, body) + def extensionMethInfo(currentOwner: Symbol, extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = { + val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth + // Start with the class type parameters - clones will be method type parameters + // so must drop their variance. + val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) + + val thisParamType = appliedType(clazz, tparamsFromClass.map(_.tpeHK)) + val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType + val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) + val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) + + def fixres(tp: Type) = tp.substThisAndSym(clazz, selfParamType, clazz.typeParams, tparamsFromClass) + def fixtparam(tp: Type) = tp.substSym(clazz.typeParams, tparamsFromClass) + + // We can't substitute symbols on the entire polytype because we + // need to modify the bounds of the cloned type parameters, but we + // don't want to substitute for the cloned type parameters themselves. + val tparams = tparamsFromMethod ::: tparamsFromClass + tparams.foreach(_ modifyInfo fixtparam) + GenPolyType(tparams, fixres(resultType)) + + // For reference, calling fix on the GenPolyType plays out like this: + // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] + // do not conform to method extension$baz#16148's type parameter bounds + // + // And the difference is visible here. See how B is bounded from below by A#16149 + // in both cases, but in the failing case, the other type parameter has turned into + // a different A. (What is that A? It is a clone of the original A created in + // SubstMap during the call to substSym, but I am not clear on all the particulars.) + // + // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] + // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] + } - /** A creator for a type functions, assuming the type parameters tps already have the right owner. */ - def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body) /** A creator for existential types. This generates: * @@ -3801,30 +4269,63 @@ trait Types * indirectly referenced by type `tpe1`. If there are no remaining type * parameters, simply returns result type `tpe`. */ - def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = - if (tparams.isEmpty) tpe0 - else { - val tpe = normalizeAliases(tpe0) - val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe - var tparams0 = tparams - var tparams1 = tparams0 filter tpe1.contains - - while (tparams1 != tparams0) { - tparams0 = tparams1 - tparams1 = tparams filter { p => - tparams1 exists { p1 => p1 == p || (p1.info contains p) } + def existentialAbstraction(tparams: List[Symbol], tpe0: Type, flipVariance: Boolean = false): Type = { + + /* We want to narrow the list of type parameters tparams to only those which are either + * (a) directly contained by tpe, or + * (b) contained by the typeInfo of another parameter from tparams, known to be referred by tpe + */ + def transitiveReferredFrom(tpe: Type): List[Symbol] = tparams match { + case tparam :: Nil => + // This is for optimisation: should be equivalent to general one. + if (tpe contains tparam) tparams else Nil + case _ => + /* Algorithm to compute transitive closure, using several temporary lists (mutable ListBuffer) + * - pending: elements from tparams not yet known to be in the transitiveClosure + * - border: we know they are in closure, but we use them for search new elements + * - closed: already in closure, and we already searched for new elements. + * + * Invariant: pending, closed, and border form a partition of `tparams`. + * Each element in tparams goes from pending to border, and from border to closed. + * We separate border from closed to avoid recomputing `Type.contains` for same elements. + */ + val pending = ListBuffer.empty[Symbol] + var border = ListBuffer.empty[Symbol] + partitionInto(tparams, tpe.contains, border, pending) + val closed = ListBuffer.empty[Symbol] + var nextBorder = ListBuffer.empty[Symbol] + while (!border.isEmpty) { + nextBorder.clear() + pending.filterInPlace { paramTodo => + !border.exists(_.info contains paramTodo) || { + nextBorder += paramTodo; + false + } + } + closed ++= border + val swap = border + border = nextBorder + nextBorder = swap } - } - newExistentialType(tparams1, tpe1) + if (closed.length == tparams.length) tparams else closed.toList } + if (tparams.isEmpty || (tpe0 eq NoType)) tpe0 + else { + val tpe = normalizeAliases(tpe0) + val extrapolation = new ExistentialExtrapolation(tparams) + if (flipVariance) extrapolation.variance = Contravariant + val tpe1 = extrapolation.extrapolate(tpe) + newExistentialType(transitiveReferredFrom(tpe1), tpe1) + } + } // end existentialAbstraction // Hash consing -------------------------------------------------------------- - private val initialUniquesCapacity = 4096 - private var uniques: util.WeakHashSet[Type] = _ - private var uniqueRunId = NoRunId + private[this] val initialUniquesCapacity = 4096 + private[this] var uniques: util.WeakHashSet[Type] = _ + private[this] var uniqueRunId = NoRunId final def howManyUniqueTypes: Int = if (uniques == null) 0 else uniques.size @@ -3856,9 +4357,9 @@ trait Types override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here } - object unwrapToClass extends ClassUnwrapper(existential = true) { } - object unwrapToStableClass extends ClassUnwrapper(existential = false) { } - object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { } + object unwrapToClass extends ClassUnwrapper(existential = true) + object unwrapToStableClass extends ClassUnwrapper(existential = false) + object unwrapWrapperTypes extends TypeUnwrapper(poly = true, existential = true, annotated = true, nullary = true) def elementExtract(container: Symbol, tp: Type): Type = { assert(!container.isAliasType, container) @@ -3867,24 +4368,21 @@ trait Types case _ => NoType } } - def elementExtractOption(container: Symbol, tp: Type): Option[Type] = { + def elementExtractOption(container: Symbol, tp: Type): Option[Type] = elementExtract(container, tp) match { case NoType => None - case tp => Some(tp) + case tp1 => Some(tp1) } - } - def elementTest(container: Symbol, tp: Type)(f: Type => Boolean): Boolean = { + def elementTest(container: Symbol, tp: Type)(f: Type => Boolean): Boolean = elementExtract(container, tp) match { case NoType => false - case tp => f(tp) + case tp1 => f(tp1) } - } - def elementTransform(container: Symbol, tp: Type)(f: Type => Type): Type = { + def elementTransform(container: Symbol, tp: Type)(f: Type => Type): Type = elementExtract(container, tp) match { case NoType => NoType - case tp => f(tp) + case tp1 => f(tp1) } - } def transparentShallowTransform(container: Symbol, tp: Type)(f: Type => Type): Type = { def loop(tp: Type): Type = tp match { @@ -3908,15 +4406,6 @@ trait Types def containsExistential(tpe: Type) = tpe.exists(_.typeSymbol.isExistentiallyBound) def existentialsInType(tpe: Type) = tpe.withFilter(_.typeSymbol.isExistentiallyBound).map(_.typeSymbol) - private def isDummyOf(tpe: Type)(targ: Type) = { - val sym = targ.typeSymbol - sym.isTypeParameter && sym.owner == tpe.typeSymbol - } - def isDummyAppliedType(tp: Type) = tp.dealias match { - case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr) - case _ => false - } - def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = { val eparams = tparams map (tparam => clazz.newExistential(tparam.name.toTypeName, clazz.pos) setInfo tparam.info.bounds) @@ -3943,7 +4432,7 @@ trait Types && isRawIfWithoutArgs(sym) ) - def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(hi :: SingletonClass.tpe :: Nil)) + def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(hi :: ListOfSingletonClassTpe)) /** * A more persistent version of `Type#memberType` which does not require @@ -3981,7 +4470,7 @@ trait Types case _: ConstantType => tp // Java enum constants: don't widen to the enum type! case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect. } - val memType = widened asSeenFrom (pre, tp.typeSymbol.owner) + val memType = widened.asSeenFrom(pre, tp.typeSymbol.owner) if (tp eq widened) memType else memType.narrow } else loop(tp.prefix) memberType tp.typeSymbol @@ -4009,13 +4498,11 @@ trait Types * as a function over the maximum depth `td` of these types, and * the maximum depth `bd` of all types in the base type sequences of these types. */ - private def lubDepthAdjust(td: Depth, bd: Depth): Depth = ( - if (settings.XfullLubs) bd - else if (bd <= Depth(3)) bd + private def lubDepthAdjust(td: Depth, bd: Depth): Depth = + if (bd <= Depth(3)) bd else if (bd <= Depth(5)) td max bd.decr else if (bd <= Depth(7)) td max (bd decr 2) else td.decr max (bd decr 3) - ) private def infoTypeDepth(sym: Symbol): Depth = typeDepth(sym.info) private def symTypeDepth(syms: List[Symbol]): Depth = Depth.maximumBy(syms)(infoTypeDepth) @@ -4055,23 +4542,25 @@ trait Types throw new MatchError((tp1, tp2)) } - def check(tp1: Type, tp2: Type) = ( - if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL)) - tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol) + def check(tp1: Type, tp2: Type) = { + val sym1 = tp1.typeSymbol + if (sym1.isClass && sym1.hasFlag(FINAL) && sym1 != SingletonClass) + tp1 <:< tp2 || isNumericValueClass(sym1) && isNumericValueClass(tp2.typeSymbol) else tp1.baseClasses forall (bc => tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc))) - ) + } check(tp1, tp2) && check(tp2, tp1) } - def normalizePlus(tp: Type): Type = { + @tailrec + final def normalizePlus(tp: Type): Type = { if (isRawType(tp)) rawToExistential(tp) else tp.normalize match { // Unify the representations of module classes - case st@SingleType(_, sym) if sym.isModule => st.underlying.normalize - case st@ThisType(sym) if sym.isModuleClass => normalizePlus(st.underlying) - case _ => tp.normalize + case st @ SingleType(_, _) if st.typeSymbol.isModuleClass => st.underlying.normalize + case st @ ThisType(sym) if sym.isModuleClass => normalizePlus(st.underlying) + case tpNorm => tpNorm } } @@ -4088,13 +4577,40 @@ trait Types /** Are `tps1` and `tps2` lists of pairwise equivalent types? */ - def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _) - - private var _basetypeRecursions: Int = 0 + def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = { + // OPT: hand inlined (tps1 corresponds tps2)(_ =:= _) to avoid cost of boolean unboxing (which includes + // a null check) + var i = tps1 + var j = tps2 + while (!(i.isEmpty || j.isEmpty)) { + if (!(i.head =:= j.head)) + return false + i = i.tail + j = j.tail + } + i.isEmpty && j.isEmpty + } + + /** Are `tps1` and `tps2` lists of pairwise equivalent symbols according to `_.tpe` ? */ + def isSameSymbolTypes(syms1: List[Symbol], syms2: List[Symbol]): Boolean = { + // OPT: hand inlined (syms1 corresponds syms1)((x, y) (x.tpe =:= y.tpe)) to avoid cost of boolean unboxing (which includes + // a null check) + var i = syms1 + var j = syms2 + while (!(i.isEmpty || j.isEmpty)) { + if (!(i.head.tpe =:= j.head.tpe)) + return false + i = i.tail + j = j.tail + } + i.isEmpty && j.isEmpty + } + + private[this] var _basetypeRecursions: Int = 0 def basetypeRecursions = _basetypeRecursions def basetypeRecursions_=(value: Int) = _basetypeRecursions = value - private val _pendingBaseTypes = new mutable.HashSet[Type] + private[this] val _pendingBaseTypes = new mutable.HashSet[Type] def pendingBaseTypes = _pendingBaseTypes /** Does this type have a prefix that begins with a type variable, @@ -4102,29 +4618,33 @@ trait Types * type selections with the same name of equal (as determined by `=:=`) prefixes are * considered equal in regard to `=:=`. */ - def isEligibleForPrefixUnification(tp: Type): Boolean = tp match { + @tailrec + final def isEligibleForPrefixUnification(tp: Type): Boolean = tp match { case SingleType(pre, sym) => !(sym hasFlag PACKAGE) && isEligibleForPrefixUnification(pre) case tv@TypeVar(_, constr) => !tv.instValid || isEligibleForPrefixUnification(constr.inst) case RefinedType(_, _) => true + case ThisType(sym) => sym.hasSelfType case _ => false } def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType) /** This appears to be equivalent to tp.isInstanceof[SingletonType], - * except it excludes ConstantTypes. + * except it excludes FoldableConstantTypes. */ def isSingleType(tp: Type) = tp match { case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true + case LiteralType(_) => true case _ => false } def isConstantType(tp: Type) = tp match { - case ConstantType(_) => true - case _ => false + case FoldableConstantType(_) => true + case _ => false } - def isExistentialType(tp: Type): Boolean = tp match { + @tailrec + final def isExistentialType(tp: Type): Boolean = tp match { case _: ExistentialType => true case tp: Type if tp.dealias ne tp => isExistentialType(tp.dealias) case _ => false @@ -4174,11 +4694,10 @@ trait Types } ****/ private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match { - case WildcardType => true - case BoundedWildcardType(_) => true - case ErrorType => true - case _: TypeVar => true - case _ => false + case ErrorType => true + case _: ProtoType => true + case _: TypeVar => true + case _ => false } private def isAlwaysValueType(tp: Type) = tp match { case RefinedType(_, _) => true @@ -4197,6 +4716,7 @@ trait Types * can be given: true == value type, false == non-value type. Otherwise, * an exception is thrown. */ + @tailrec private def isValueElseNonValue(tp: Type): Boolean = tp match { case tp if isAlwaysValueType(tp) => true case tp if isAlwaysNonValueType(tp) => false @@ -4206,7 +4726,7 @@ trait Types case ThisType(sym) => !sym.isPackageClass // excludes packages case TypeRef(_, sym, _) => !sym.isPackageClass // excludes packages case PolyType(_, _) => true // poly-methods excluded earlier - case tp => sys.error("isValueElseNonValue called with third-way type " + tp) + case tp => throw new IllegalArgumentException("isValueElseNonValue called with third-way type " + tp) } /** SLS 3.2, Value Types @@ -4225,7 +4745,7 @@ trait Types * The specification-enumerated non-value types are method types, polymorphic * method types, and type constructors. Supplements to the specified set of * non-value types include: types which wrap non-value symbols (packages - * and statics), overloaded types. Varargs and by-name types T* and (=>T) are + * and statics), overloaded types. Varargs and by-name types T* and (=> T) are * not designated non-value types because there is code which depends on using * them as type arguments, but their precise status is unclear. */ @@ -4333,7 +4853,7 @@ trait Types case mt2 @ MethodType(params2, res2) => // sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performance-wise?) mt1.isImplicit == mt2.isImplicit && - matchingParams(params1, params2, mt1.isJava, mt2.isJava) && + matchingParams(params1, params2) && matchesQuantified(params1, params2, res1, res2) case NullaryMethodType(res2) => if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple) @@ -4428,7 +4948,7 @@ trait Types */ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */ - protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match { + protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol]): Boolean = syms1 match { case Nil => syms2.isEmpty case sym1 :: rest1 => @@ -4438,10 +4958,7 @@ trait Types case sym2 :: rest2 => val tp1 = sym1.tpe val tp2 = sym2.tpe - (tp1 =:= tp2 || - syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass || - syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) && - matchingParams(rest1, rest2, syms1isJava, syms2isJava) + tp1 =:= tp2 && matchingParams(rest1, rest2) } } @@ -4469,13 +4986,16 @@ trait Types } /** A list of the typevars in a type. */ - def typeVarsInType(tp: Type): List[TypeVar] = { + def typeVarsInType(tp: Type): List[TypeVar] = + typeVarsInTypeRev(tp).reverse + + private[this] def typeVarsInTypeRev(tp: Type): List[TypeVar] = { var tvs: List[TypeVar] = Nil tp foreach { case t: TypeVar => tvs ::= t case _ => } - tvs.reverse + tvs } // If this type contains type variables, put them to sleep for a while. @@ -4508,8 +5028,8 @@ trait Types } if (!needsStripping) (ts, Nil) // fast path for common case else { - val tparams = mutable.ListBuffer[Symbol]() - val stripped = mutable.ListBuffer[Type]() + val tparams = ListBuffer[Symbol]() + val stripped = ListBuffer[Type]() def stripType(tp: Type): Unit = tp match { case rt: RefinedType if isIntersectionTypeForLazyBaseType(rt) => if (expandLazyBaseType) @@ -4524,7 +5044,7 @@ trait Types case tv@TypeVar(_, constr) => if (tv.instValid) stripType(constr.inst) else if (tv.untouchable) stripped += tv - else abort("trying to do lub/glb of typevar " + tv) + else {} // ignore when this happens (neg/t10514.scala) -- don't abort("trying to do lub/glb of typevar " + tv) case tp => stripped += tp } ts foreach stripType @@ -4545,9 +5065,9 @@ trait Types case tp :: Nil => tp case TypeRef(_, sym, _) :: rest => val pres = tps map (_.prefix) // prefix normalizes automatically - val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth) + val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth) val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments - val capturedParams = new ListBuffer[Symbol] + val capturedParams = new ListBuffer[Symbol] try { if (sym == ArrayClass && phase.erasedTypes) { // special treatment for lubs of array types after erasure: @@ -4584,19 +5104,28 @@ trait Types NoType } else { - if (tparam.variance == variance) lub(as, depth.decr) - else if (tparam.variance == variance.flip) glb(as, depth.decr) + val hktParams = tparam.initialize.typeParams + val hktArgs = hktParams.map(_.typeConstructor) + def applyHK(tp: Type) = appliedType(tp, hktArgs) + def bindHK(tp: Type) = typeFun(hktParams, tp) + // Make `as` well-kinded by binding higher-order type params of `tparam` + // (so that the type arguments in `as` have the same kind as the type parameter `tparam`). + val asKinded = if (hktParams.isEmpty) as else as.map(a => bindHK(applyHK(a))) + + if (tparam.variance == variance) lub(asKinded, depth.decr) + else if (tparam.variance == variance.flip) glb(asKinded, depth.decr) else { - val l = lub(as, depth.decr) - val g = glb(as, depth.decr) + val l = lub(asKinded, depth.decr) + val g = glb(asKinded, depth.decr) if (l <:< g) l - else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we + else { + // @M this has issues with f-bounds, see #2251 + // Martin: Not sure there is a good way to fix it. For the moment we // just err on the conservative side, i.e. with a bound that is too high. - // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251 capturedParamIds += 1 val capturedParamId = capturedParamIds - - val qvar = commonOwner(as).freshExistential("", capturedParamId) setInfo TypeBounds(g, l) + val bounds = if (hktParams.isEmpty) TypeBounds(g, l) else bindHK(TypeBounds(applyHK(g), applyHK(l))) + val qvar = commonOwner(as).freshExistential("", capturedParamId) setInfo bounds capturedParams += qvar qvar.tpe } @@ -4625,8 +5154,8 @@ trait Types /** Make symbol `sym` a member of scope `tp.decls` * where `thistp` is the narrowed owner type of the scope. */ - def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Depth) { - assert(sym != NoSymbol) + def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Depth): Unit = { + assert(sym != NoSymbol, "Adding member NoSymbol") // debuglog("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG if (!specializesSym(thistp, sym, depth)) { if (sym.isTerm) @@ -4663,8 +5192,8 @@ trait Types if (settings.isDebug) printStackTrace() } - class NoCommonType(tps: List[Type]) extends Throwable( - "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable + class NoCommonType(tps: List[Type]) extends ControlThrowable( + "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) /** A throwable signalling a malformed type */ class MalformedType(msg: String) extends TypeError(msg) { @@ -4672,7 +5201,7 @@ trait Types } /** The current indentation string for traces */ - private var _indent: String = "" + private[this] var _indent: String = "" protected def indent = _indent protected def indent_=(value: String) = _indent = value @@ -4687,13 +5216,13 @@ trait Types } /** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */ - def explainTypes(found: Type, required: Type) { - if (settings.explaintypes) withTypesExplained(found <:< required) + def explainTypes(found: Type, required: Type): Unit = { + if (settings.explaintypes.value) withTypesExplained(found <:< required) } /** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */ - def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) { - if (settings.explaintypes) withTypesExplained(op(found, required)) + def explainTypes(op: (Type, Type) => Any, found: Type, required: Type): Unit = { + if (settings.explaintypes.value) withTypesExplained(op(found, required)) } /** Execute `op` while printing a trace of the operations on types executed. */ @@ -4702,8 +5231,12 @@ trait Types try { explainSwitch = true; op } finally { explainSwitch = s } } + def isUnboundedGeneric(tp: Type) = tp match { + case t @ TypeRef(_, sym, _) => sym.isAbstractType && (!(t <:< AnyRefTpe) || (t.upperBound eq ObjectTpeJava)) + case _ => false + } def isBoundedGeneric(tp: Type) = tp match { - case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe) + case TypeRef(_, sym, _) if sym.isAbstractType => tp <:< AnyRefTpe && !(tp.upperBound eq ObjectTpeJava) case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym) case _ => false } @@ -4728,46 +5261,111 @@ trait Types */ def importableMembers(pre: Type): Scope = pre.members filter isImportable - def objToAny(tp: Type): Type = - if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe - else tp - - def invalidateTreeTpeCaches(tree: Tree, updatedSyms: List[Symbol]) = if (updatedSyms.nonEmpty) + def invalidateTreeTpeCaches(tree: Tree, updatedSyms: collection.Set[Symbol]) = if (!updatedSyms.isEmpty) { + val invldtr = new InvalidateTypeCaches(updatedSyms) for (t <- tree if t.tpe != null) - for (tp <- t.tpe) { - invalidateCaches(tp, updatedSyms) - } + invldtr.invalidate(t.tpe) + } + + def invalidateCaches(t: Type, updatedSyms: collection.Set[Symbol]): Unit = + new InvalidateTypeCaches(updatedSyms).invalidate(t) + + class InvalidateTypeCaches(changedSymbols: collection.Set[Symbol]) extends TypeFolder { + private var res = false + private val seen = new java.util.IdentityHashMap[Type, Boolean] + + def invalidate(tps: Iterable[Type]): Unit = { + res = false + seen.clear() + try tps.foreach(invalidateImpl) + finally seen.clear() + } + + def invalidate(tp: Type): Unit = invalidate(List(tp)) + + protected def invalidateImpl(tp: Type): Boolean = Option(seen.get(tp)).getOrElse { + val saved = res + try { + apply(tp) + res + } finally res = saved + } + + def apply(tp: Type): Unit = tp match { + case _ if seen.containsKey(tp) => + + case tr: TypeRef => + val preInvalid = invalidateImpl(tr.pre) + var argsInvalid = false + tr.args.foreach(arg => argsInvalid = invalidateImpl(arg) || argsInvalid) + if (preInvalid || argsInvalid || changedSymbols(tr.sym)) { + tr.invalidateTypeRefCaches() + res = true + } + seen.put(tp, res) + + case ct: CompoundType if ct.baseClasses.exists(changedSymbols) => + ct.invalidatedCompoundTypeCaches() + res = true + seen.put(tp, res) - def invalidateCaches(t: Type, updatedSyms: List[Symbol]): Unit = - t match { - case tr: TypeRef if updatedSyms.contains(tr.sym) => tr.invalidateTypeRefCaches() - case ct: CompoundType if ct.baseClasses.exists(updatedSyms.contains) => ct.invalidatedCompoundTypeCaches() case st: SingleType => - if (updatedSyms.contains(st.sym)) st.invalidateSingleTypeCaches() - val underlying = st.underlying - if (underlying ne st) - invalidateCaches(underlying, updatedSyms) + val preInvalid = invalidateImpl(st.pre) + if (preInvalid || changedSymbols(st.sym)) { + st.invalidateSingleTypeCaches() + res = true + } + val underInvalid = (st.underlying ne st) && invalidateImpl(st.underlying) + res ||= underInvalid + seen.put(tp, res) + case _ => + tp.foldOver(this) + seen.put(tp, res) } - + } val shorthands = Set( "scala.collection.immutable.List", "scala.collection.immutable.Nil", - "scala.collection.Seq", + "scala.collection.immutable.Seq", + "scala.collection.immutable.IndexedSeq", + "scala.collection.mutable.StringBuilder", "scala.collection.Traversable", "scala.collection.Iterable", - "scala.collection.mutable.StringBuilder", - "scala.collection.IndexedSeq", "scala.collection.Iterator") - @deprecated("Use _.tpe", "2.12.12") // used by scala-meta, leave until they remove the dependency. - private[scala] val treeTpe = (t: Tree) => t.tpe private[scala] val typeContainsTypeVar = { val collector = new FindTypeCollector(_.isInstanceOf[TypeVar]); (tp: Type) => collector.collect(tp).isDefined } private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe + @tailrec + private[scala] final def typeIsNothing(tp: Type): Boolean = + tp.dealias match { + case PolyType(_, resultType) => typeIsNothing(resultType) + case TypeRef(_, NothingClass, _) => true + case _ => false + } + + @tailrec + private[scala] final def typeIsAnyOrJavaObject(tp: Type): Boolean = + tp.dealias match { + case PolyType(_, resultType) => typeIsAnyOrJavaObject(resultType) + case TypeRef(_, AnyClass, _) => true + case _: ObjectTpeJavaRef => true + case _ => false + } + + private[scala] final def typeIsAnyExactly(tp: Type): Boolean = + tp.dealias match { + case PolyType(_, resultType) => typeIsAnyExactly(resultType) + case TypeRef(_, AnyClass, _) => true + case _ => false + } + + private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded + /** The maximum depth of type `tp` */ - def typeDepth(tp: Type): Depth = tp match { + final def typeDepth(tp: Type): Depth = tp match { case TypeRef(pre, sym, args) => typeDepth(pre) max maxDepth(args).incr case RefinedType(parents, decls) => maxDepth(parents) max symTypeDepth(decls.toList).incr case TypeBounds(lo, hi) => typeDepth(lo) max typeDepth(hi) diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala index be1f79ecf2d3..34b65d58091e 100644 --- a/src/reflect/scala/reflect/internal/Variance.scala +++ b/src/reflect/scala/reflect/internal/Variance.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -92,6 +92,7 @@ object Variance { val Contravariant = new Variance(-1) val Invariant = new Variance(0) + @FunctionalInterface trait Extractor[A] { def apply(x: A): Variance } trait Extractor2[A, B] { def apply(x: A, y: B): Variance } diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 9dc2f5d4f4a2..41faa069c242 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package reflect package internal import Variance._ -import scala.collection.mutable import scala.annotation.tailrec import scala.reflect.internal.util.ReusableInstance @@ -27,45 +26,36 @@ trait Variances { /** Used in Refchecks. * TODO - eliminate duplication with varianceInType */ - class VarianceValidator extends Traverser { - private val escapedLocals = mutable.HashSet[Symbol]() + class VarianceValidator extends InternalTraverser { // A flag for when we're in a refinement, meaning method parameter types // need to be checked. - private var inRefinement = false + private[this] var inRefinement = false @inline private def withinRefinement(body: => Type): Type = { val saved = inRefinement inRefinement = true try body finally inRefinement = saved } - /** Is every symbol in the owner chain between `site` and the owner of `sym` - * either a term symbol or private[this]? If not, add `sym` to the set of - * escaped locals. - * @pre sym.isLocalToThis - */ - @tailrec final def checkForEscape(sym: Symbol, site: Symbol) { - if (site == sym.owner || site == sym.owner.moduleClass || site.hasPackageFlag) () // done - else if (site.isTerm || site.isPrivateLocal) checkForEscape(sym, site.owner) // ok - recurse to owner - else escapedLocals += sym - } - - protected def issueVarianceError(base: Symbol, sym: Symbol, required: Variance): Unit = () + protected def issueVarianceError(base: Symbol, sym: Symbol, required: Variance, tpe: Type): Unit = () // Flip occurrences of type parameters and parameters, unless // - it's a constructor, or case class factory or extractor + // - it's a type parameter / parameter of a local definition // - it's a type parameter of tvar's owner. def shouldFlip(sym: Symbol, tvar: Symbol) = ( sym.isParameter + && !sym.owner.isLocalToThis && !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem && tvar.owner == sym.owner) ) + // Is `sym` is local to a term or is private[this] or protected[this]? - def isExemptFromVariance(sym: Symbol): Boolean = !sym.owner.isClass || ( - (sym.isLocalToThis || sym.isSuperAccessor) // super accessors are implicitly local #4345 - && !escapedLocals(sym) - ) + def isExemptFromVariance(sym: Symbol): Boolean = + // super accessors are implicitly local #4345 + !sym.owner.isClass || sym.isLocalToThis || sym.isSuperAccessor - private object ValidateVarianceMap extends TypeMap(trackVariance = true) { - private var base: Symbol = _ + private object ValidateVarianceMap extends VariancedTypeMap { + private[this] var base: Symbol = _ + private[this] var inLowerBoundOf: Symbol = _ /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`. * The search proceeds from `base` to the owner of `tvar`. @@ -78,31 +68,29 @@ trait Variances { * leading to unsoundness (see scala/bug#6566). */ def relativeVariance(tvar: Symbol): Variance = { - def nextVariance(sym: Symbol, v: Variance): Variance = ( + def nextVariance(sym: Symbol, v: Variance): Variance = if (shouldFlip(sym, tvar)) v.flip else if (isExemptFromVariance(sym)) Bivariant - else if (sym.isAliasType) ( - // Unsound pre-2.11 behavior preserved under -Xsource:2.10 - if (settings.isScala211 || sym.isOverridingSymbol) Invariant - else { - currentRun.reporting.deprecationWarning(sym.pos, "Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond", "2.11.0", site = "", origin = "") - Bivariant - } - ) + else if (sym.isAliasType) Invariant else v - ) - def loop(sym: Symbol, v: Variance): Variance = ( - if (sym == tvar.owner || v.isBivariant) v + + @tailrec + def loop(sym: Symbol, v: Variance): Variance = + if (v.isBivariant) v + else if (sym == tvar.owner) + // We can't move this to `shouldFlip`, because it's needed only once at the end. + if (inLowerBoundOf == sym) v.flip else v else loop(sym.owner, nextVariance(sym, v)) - ) + loop(base, Covariant) } + def isUncheckedVariance(tp: Type) = tp match { case AnnotatedType(annots, _) => annots exists (_ matches definitions.uncheckedVarianceClass) case _ => false } - private def checkVarianceOfSymbol(sym: Symbol) { + private def checkVarianceOfSymbol(sym: Symbol): Unit = { val relative = relativeVariance(sym) val required = relative * variance if (!relative.isBivariant) { @@ -110,17 +98,19 @@ trait Variances { def base_s = s"$base in ${base.owner}" + (if (base.owner.isClass) "" else " in " + base.owner.enclClass) log(s"verifying $sym_s is $required at $base_s") if (sym.variance != required) - issueVarianceError(base, sym, required) + issueVarianceError(base, sym, required, base.info) } } + override def mapOver(decls: Scope): Scope = { decls foreach (sym => withVariance(if (sym.isAliasType) Invariant else variance)(this(sym.info))) decls } + private def resultTypeOnly(tp: Type) = tp match { - case mt: MethodType => !inRefinement - case pt: PolyType => true - case _ => false + case _: MethodType => !inRefinement + case _: PolyType => true + case _ => false } /** For PolyTypes, type parameters are skipped because they are defined @@ -131,75 +121,141 @@ trait Variances { def apply(tp: Type): Type = { tp match { case _ if isUncheckedVariance(tp) => - case _ if resultTypeOnly(tp) => this(tp.resultType) - case TypeRef(_, sym, _) if shouldDealias(sym) => this(tp.normalize) - case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp) - case RefinedType(_, _) => withinRefinement(mapOver(tp)) - case ClassInfoType(parents, _, _) => parents foreach this - case mt @ MethodType(_, result) => flipped(mt.paramTypes foreach this) ; this(result) - case _ => mapOver(tp) + case _ if resultTypeOnly(tp) => apply(tp.resultType) + case TypeRef(_, sym, _) if shouldDealias(sym) => apply(tp.normalize) + case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym); tp.mapOver(this) + case RefinedType(_, _) => withinRefinement(tp.mapOver(this)) + case ClassInfoType(parents, _, _) => parents.foreach(apply) + case mt @ MethodType(_, result) => flipped(mt.paramTypes.foreach(apply)); apply(result) + case _ => tp.mapOver(this) } // We're using TypeMap here for type traversal only. To avoid wasteful symbol // cloning during the recursion, it is important to return the input `tp`, rather // than the result of the pattern match above, which normalizes types. tp } + private def shouldDealias(sym: Symbol): Boolean = { // The RHS of (private|protected)[this] type aliases are excluded from variance checks. This is // implemented in relativeVariance. // As such, we need to expand references to them to retain soundness. Example: neg/t8079a.scala sym.isAliasType && isExemptFromVariance(sym) } - def validateDefinition(base: Symbol) { - val saved = this.base + + /** Validate the variance of types in the definition of `base`. + * + * Traverse the type signature of `base` and for each type parameter: + * - Calculate the relative variance between `base` and the type parameter's owner by + * walking the owner chain of `base`. + * - Calculate the required variance of the type parameter which is the product of the + * relative variance and the current variance in the type signature of `base`. + * - Ensure that the declared variance of the type parameter is compatible with the + * required variance, otherwise issue an error. + * + * Lower bounds need special handling. By default the variance is flipped when entering a + * lower bound. In most cases this is the correct behaviour except for the type parameters + * of higher-kinded types. E.g. in `Foo` below `x` occurs in covariant position: + * `class Foo[F[+_]] { type G[+x] >: F[x] }` + * + * To handle this special case, track when entering the lower bound of a HKT in a variable + * and flip the relative variance for its type parameters. (flipping the variance a second + * time negates the first flip). + */ + def validateDefinition(base: Symbol): Unit = { this.base = base - try apply(base.info) - finally this.base = saved + base.info match { + case PolyType(_, TypeBounds(lo, hi)) => + inLowerBoundOf = base + try flipped(apply(lo)) + finally inLowerBoundOf = null + apply(hi) + case other => + apply(other) + } } } - /** Validate variance of info of symbol `base` */ - private def validateVariance(base: Symbol) { - ValidateVarianceMap validateDefinition base + private object PolyTypeVarianceMap extends TypeMap { + + private def ownerOf(pt: PolyType): Symbol = + pt.typeParams.head.owner + + private def checkPolyTypeParam(pt: PolyType, tparam: Symbol, tpe: Type): Unit = + if (!tparam.isInvariant) { + val required = varianceInType(tpe, considerUnchecked = true)(tparam) + if (!required.isBivariant && tparam.variance != required) + issueVarianceError(ownerOf(pt), tparam, required, pt) + } + + def apply(tp: Type): Type = { + tp match { + case pt @ PolyType(typeParams, TypeBounds(lo, hi)) => + typeParams.foreach { tparam => + checkPolyTypeParam(pt, tparam, lo) + checkPolyTypeParam(pt, tparam, hi) + } + + pt.mapOver(this) + + case pt @ PolyType(typeParams, resultType) => + typeParams.foreach(checkPolyTypeParam(pt, _, resultType)) + pt.mapOver(this) + + case _ => + tp.mapOver(this) + } + + tp + } } - override def traverse(tree: Tree) { + /** Validate the variance of (the type parameters of) PolyTypes in `tpe`. + * + * `validateDefinition` cannot handle PolyTypes in arbitrary position, because in general + * the relative variance of such types cannot be computed by walking the owner chain. + * + * Instead this method applies a naive algorithm which is correct but less efficient: + * use `varianceInType` to check each type parameter of a PolyType separately. + */ + def validateVarianceOfPolyTypesIn(tpe: Type): Unit = + PolyTypeVarianceMap(tpe) + + override def traverse(tree: Tree): Unit = { def sym = tree.symbol // No variance check for object-private/protected methods/values. // Or constructors, or case class factory or extractor. def skip = ( sym == NoSymbol - || sym.isLocalToThis - || sym.owner.isConstructor - || sym.owner.isCaseApplyOrUnapply + || sym.owner.isConstructor // FIXME: this is unsound - scala/bug#8737 + || sym.owner.isCaseApplyOrUnapply // same treatment as constructors + || sym.isParamAccessor && sym.isLocalToThis // local class parameters are construction only ) + tree match { - case defn: MemberDef if skip => + case _: MemberDef if skip => debuglog(s"Skipping variance check of ${sym.defString}") case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) => - validateVariance(sym) - super.traverse(tree) + ValidateVarianceMap.validateDefinition(sym) + tree.traverse(this) case ModuleDef(_, _, _) => - validateVariance(sym.moduleClass) - super.traverse(tree) + ValidateVarianceMap.validateDefinition(sym.moduleClass) + tree.traverse(this) case ValDef(_, _, _, _) => - validateVariance(sym) + ValidateVarianceMap.validateDefinition(sym) case DefDef(_, _, tparams, vparamss, _, _) => - validateVariance(sym) + ValidateVarianceMap.validateDefinition(sym) traverseTrees(tparams) traverseTreess(vparamss) case Template(_, _, _) => - super.traverse(tree) - case CompoundTypeTree(templ) => - super.traverse(tree) - + tree.traverse(this) + case CompoundTypeTree(_) => + tree.traverse(this) // scala/bug#7872 These two cases make sure we don't miss variance exploits // in originals, e.g. in `foo[({type l[+a] = List[a]})#l]` case tt @ TypeTree() if tt.original != null => - super.traverse(tt.original) - case tt : TypTree => - super.traverse(tt) - + tt.original.traverse(this) + case tt: TypTree => + tt.traverse(this) case _ => } } @@ -210,20 +266,22 @@ trait Variances { Variance.foldExtract(tps)(t => varianceInType(t)(tparam)) /** Compute variance of type parameter `tparam` in type `tp`. */ - final def varianceInType(tp: Type)(tparam: Symbol): Variance = { - varianceInTypeCache.using(_.apply(tp, tparam)) - } - private[this] val varianceInTypeCache = new ReusableInstance[varianceInType](() => new varianceInType, enabled = isCompilerUniverse) + final def varianceInType(tp: Type, considerUnchecked: Boolean = false)(tparam: Symbol): Variance = + varianceInTypeCache.using(_.apply(tp, tparam, considerUnchecked)) + + private[this] val varianceInTypeCache = ReusableInstance[varianceInType](new varianceInType, enabled = isCompilerUniverse) private final class varianceInType { private[this] var tp: Type = _ private[this] var tparam: Symbol = _ + private[this] var considerUnchecked = false import Variance._ private def inArgs(sym: Symbol, args: List[Type]): Variance = foldExtract2(args, sym.typeParams)(inArgParam) private def inSyms(syms: List[Symbol]): Variance = foldExtract(syms)(inSym) private def inTypes(tps: List[Type]): Variance = foldExtract(tps)(inType) private def inAnnots(anns: List[AnnotationInfo]): Variance = foldExtract(anns)(inAnnotationAtp) + private def unchecked(anns: List[AnnotationInfo]): Boolean = considerUnchecked && anns.exists(_.matches(definitions.uncheckedVarianceClass)) // OPT these extractors are hoisted to fields to reduce allocation. We're also avoiding Function1[_, Variance] to // avoid value class boxing. @@ -231,12 +289,12 @@ trait Variances { private[this] lazy val inArgParam: Extractor2[Type, Symbol] = (a, b) => inType(a) * b.variance private[this] lazy val inSym: Extractor[Symbol] = (sym: Symbol) => if (sym.isAliasType) inType(sym.info).cut else inType(sym.info) private[this] val inType: Extractor[Type] = { - case ErrorType | WildcardType | NoType | NoPrefix => Bivariant + case pt: ProtoType => inType(pt.toVariantType) + case ErrorType | NoType | NoPrefix => Bivariant case ThisType(_) | ConstantType(_) => Bivariant case TypeRef(_, tparam, _) if tparam eq this.tparam => Covariant - case BoundedWildcardType(bounds) => inType(bounds) case NullaryMethodType(restpe) => inType(restpe) - case SingleType(pre, sym) => inType(pre) + case SingleType(pre, _) => inType(pre) case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args) case TypeBounds(lo, hi) => inType(lo).flip & inType(hi) @@ -244,16 +302,21 @@ trait Variances { case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) + case AnnotatedType(annots, _) if unchecked(annots) => Bivariant case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) + case SuperType(thistpe, supertpe) => inType(thistpe) & inType(supertpe) + case x => throw new MatchError(x) } - def apply(tp: Type, tparam: Symbol): Variance = { + def apply(tp: Type, tparam: Symbol, considerUnchecked: Boolean): Variance = { this.tp = tp this.tparam = tparam + this.considerUnchecked = considerUnchecked try inType(tp) finally { this.tp = null this.tparam = null + this.considerUnchecked = false } } } diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala index 46cdb8914554..a4b022f876d0 100644 --- a/src/reflect/scala/reflect/internal/annotations/package.scala +++ b/src/reflect/scala/reflect/internal/annotations/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala index 8932a31fd9cc..b9333a920f3a 100644 --- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala +++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,7 +19,5 @@ package annotations * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized * code the uses an inferred type of an expression as the type of an artifact val/def (for example, * a temporary value introduced by an ANF transform). See [[https://github.com/scala/bug/issues/7694]]. - * - * @since 2.10.3 */ final class uncheckedBounds extends scala.annotation.StaticAnnotation diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala index fdc6a9b233c5..098e5d93587a 100644 --- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala +++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,25 @@ package scala package reflect.internal.pickling +/** + * Helper methods to serialize a byte array as String that can be written as "modified" UTF-8 + * to classfiles. + * + * Modified UTF-8 is the same as UTF-8, except for 0x00, which is represented as the "overlong" + * 0xC0 0x80. Constant strings in classfiles use this encoding. + * + * Encoding (according to SID-10): + * - The 8-bit bytes are split into 7-bit bytes, e.g., 0xff 0x0f becomes 0x7f 0x1f 0x00 + * - Every bit is incremented by 1 (modulo 0x80), in the example we get 0x00, 0x20 0x01 + * - 0x00 is mapped to the overlong encoding, so we get 0xC0 0x80 0x20 0x01 + * + * The +1 increment should reduce the number of (overlong) zeros in the resulting string, as + * 0x7f is (hoped to be) more common than 0x00. + */ object ByteCodecs { - + /** + * Increment each element by 1, then map 0x00 to 0xC0 0x80. Returns a fresh array. + */ def avoidZero(src: Array[Byte]): Array[Byte] = { var i = 0 val srclen = src.length @@ -41,6 +58,9 @@ object ByteCodecs { dst } + /** + * Map 0xC0 0x80 to 0x00, then subtract 1 from each element. In-place. + */ def regenerateZero(src: Array[Byte]): Int = { var i = 0 val srclen = src.length @@ -62,6 +82,7 @@ object ByteCodecs { j } + /** Returns a new array */ def encode8to7(src: Array[Byte]): Array[Byte] = { val srclen = src.length val dstlen = (srclen * 8 + 6) / 7 @@ -128,6 +149,7 @@ object ByteCodecs { dst } + /** In-place */ def decode7to8(src: Array[Byte], srclen: Int): Int = { var i = 0 var j = 0 diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index 35d9f40d777b..27b4aba29d18 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,8 @@ package reflect package internal package pickling +import scala.annotation.tailrec + /** Variable length byte arrays, with methods for basic pickling and unpickling. * * @param data The initial buffer @@ -27,19 +29,26 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { var readIndex = from var writeIndex = to - /** Double bytes array */ - private def dble(): Unit = { - bytes = java.util.Arrays.copyOf(bytes, bytes.length * 2) + @inline + private def growTo(targetCapacity: Int): Unit = { + val bytes1 = new Array[Byte](targetCapacity) + Array.copy(bytes, 0, bytes1, 0, writeIndex) + bytes = bytes1 } def ensureCapacity(capacity: Int) = - while (bytes.length < writeIndex + capacity) dble() + if (bytes.length < writeIndex + capacity) { + var newCapacity = bytes.length + while (newCapacity < writeIndex + capacity) newCapacity <<= 1 + growTo(newCapacity) + } // -- Basic output routines -------------------------------------------- /** Write a byte of data */ - def writeByte(b: Int) { - if (writeIndex == bytes.length) dble() + def writeByte(b: Int): Unit = { + if (writeIndex == bytes.length) + growTo(bytes.length << 1) bytes(writeIndex) = b.toByte writeIndex += 1 } @@ -57,8 +66,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * if the long value is in the range Int.MIN_VALUE to * Int.MAX_VALUE. */ - def writeLongNat(x: Long) { - def writeNatPrefix(x: Long) { + def writeLongNat(x: Long): Unit = { + def writeNatPrefix(x: Long): Unit = { val y = x >>> 7 if (y != 0L) writeNatPrefix(y) writeByte(((x & 0x7f) | 0x80).toInt) @@ -71,8 +80,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { /** Write a natural number `x` at position `pos`. * If number is more than one byte, shift rest of array to make space. */ - def patchNat(pos: Int, x: Int) { - def patchNatPrefix(x: Int) { + def patchNat(pos: Int, x: Int): Unit = { + @tailrec + def patchNatPrefix(x: Int): Unit = { writeByte(0) Array.copy(bytes, pos, bytes, pos+1, writeIndex - (pos+1)) bytes(pos) = ((x & 0x7f) | 0x80).toByte @@ -88,7 +98,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * * @param x The long number to be written. */ - def writeLong(x: Long) { + def writeLong(x: Long): Unit = { val y = x >> 8 val z = x & 0xff if (-y != (z >> 7)) writeLong(y) @@ -161,7 +171,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { /** Perform operation `op` the number of * times specified. Concatenate the results into a list. */ - def times[T](n: Int, op: ()=>T): List[T] = + def times[T](n: Int, op: () => T): List[T] = if (n == 0) List() else op() :: times(n-1, op) /** Pickle = majorVersion_Nat minorVersion_Nat nbEntries_Nat {Entry} diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index 1d15546c97d9..fdc616558d0c 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,9 +19,6 @@ package pickling * * If you extend the format, be sure to increase the * version minor number. - * - * @author Martin Odersky - * @version 1.0 */ object PickleFormat { @@ -64,6 +61,7 @@ object PickleFormat { * | 34 LITERALnull len_Nat * | 35 LITERALclass len_Nat tpe_Ref * | 36 LITERALenum len_Nat sym_Ref + * | 37 LITERALsymbol len_Nat name_Ref * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody * | 41 CHILDREN len_Nat sym_Ref {sym_Ref} * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref} @@ -127,7 +125,7 @@ object PickleFormat { * len is remaining length after `len`. */ val MajorVersion = 5 - val MinorVersion = 0 + val MinorVersion = 2 final val TERMname = 1 final val TYPEname = 2 @@ -166,6 +164,7 @@ object PickleFormat { final val LITERALnull = 34 final val LITERALclass = 35 final val LITERALenum = 36 + final val LITERALsymbol = 37 // TODO: Never pickled, to be dropped once we have a STARR that does not emit it. final val SYMANNOT = 40 final val CHILDREN = 41 final val ANNOTATEDtpe = 42 diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala index dcd4bb9298db..366fbbadd140 100644 --- a/src/reflect/scala/reflect/internal/pickling/Translations.scala +++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,6 +16,7 @@ package internal package pickling import PickleFormat._ +import scala.annotation.tailrec import util.shortClassOfInstance trait Translations { @@ -37,7 +38,7 @@ trait Translations { // truly terrible idea. It reaches the height of its powers in // combination with scala's insistence on helpfully tupling // multiple arguments passed to a single-arg AnyRef. - def picklerTag(ref: AnyRef): Int = ref match { + final def picklerTag(ref: AnyRef): Int = ref match { case tp: Type => picklerTag(tp) case sym: Symbol => picklerTag(sym) case const: Constant => LITERAL + const.tag @@ -49,7 +50,7 @@ trait Translations { case (_: Symbol, _: AnnotationInfo) => SYMANNOT // symbol annotations, i.e. on terms case (_: Symbol, _: List[_]) => CHILDREN // the direct subclasses of a sealed symbol case _: Modifiers => MODIFIERS - case _ => sys.error(s"unpicklable entry ${shortClassOfInstance(ref)} $ref") + case _ => throw new IllegalStateException(s"unpicklable entry ${shortClassOfInstance(ref)} $ref") } /** Local symbols only. The assessment of locality depends @@ -66,9 +67,11 @@ trait Translations { case _: TypeSymbol => ALIASsym case _: TermSymbol if sym.isModule => MODULEsym case _: TermSymbol => VALsym + case x => throw new MatchError(x) } - def picklerTag(tpe: Type): Int = tpe match { + @tailrec + final def picklerTag(tpe: Type): Int = tpe match { case NoType => NOtpe case NoPrefix => NOPREFIXtpe case _: ThisType => THIStpe @@ -85,6 +88,7 @@ trait Translations { case _: ExistentialType => EXISTENTIALtpe case StaticallyAnnotatedType(_, _) => ANNOTATEDtpe case _: AnnotatedType => picklerTag(tpe.underlying) + case x => throw new MatchError(x) } def picklerSubTag(tree: Tree): Int = tree match { @@ -131,6 +135,7 @@ trait Translations { case _: AppliedTypeTree => APPLIEDTYPEtree case _: TypeBoundsTree => TYPEBOUNDStree case _: ExistentialTypeTree => EXISTENTIALTYPEtree + case x => throw new MatchError(x) } } diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 6add2d84a5c6..bf51c0009f17 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,9 +26,6 @@ import scala.collection.mutable.ListBuffer import scala.annotation.switch import scala.util.control.NonFatal -/** @author Martin Odersky - * @version 1.0 - */ abstract class UnPickler { val symbolTable: SymbolTable import symbolTable._ @@ -41,7 +38,7 @@ abstract class UnPickler { * @param moduleRoot the top-level module which is unpickled * @param filename filename associated with bytearray, only used for error messages */ - def unpickle(bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) { + def unpickle(bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String): Unit = { try { assert(classRoot != NoSymbol && moduleRoot != NoSymbol, s"The Unpickler expects a class and module symbol: $classRoot - $moduleRoot") new Scan(bytes, offset, classRoot, moduleRoot, filename).run() @@ -56,25 +53,25 @@ abstract class UnPickler { * * Useful for reporting on stub errors and cyclic errors. */ - private val completingStack = new mutable.ArrayBuffer[Symbol](24) + private[this] val completingStack = new mutable.ArrayBuffer[Symbol](24) class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug checkVersion() - private val loadingMirror = mirrorThatLoaded(classRoot) + private[this] val loadingMirror = mirrorThatLoaded(classRoot) /** A map from entry numbers to array offsets */ - private val index = createIndex + private[this] val index = createIndex /** A map from entry numbers to symbols, types, or annotations */ - private val entries = new Array[AnyRef](index.length) + private[this] val entries = new Array[AnyRef](index.length) /** A map from symbols to their associated `decls` scopes */ - private val symScopes = mutable.HashMap[Symbol, Scope]() + private[this] val symScopes = mutable.HashMap[Symbol, Scope]() - private def expect(expected: Int, msg: => String) { + private def expect(expected: Int, msg: => String): Unit = { val tag = readByte() if (tag != expected) errorBadSignature(s"$msg ($tag)") @@ -89,7 +86,7 @@ abstract class UnPickler { } // Laboriously unrolled for performance. - def run() { + def run(): Unit = { var i = 0 while (i < index.length) { if (entries(i) == null && isSymbolEntry(i)) @@ -111,7 +108,7 @@ abstract class UnPickler { } } - private def checkVersion() { + private def checkVersion(): Unit = { val major = readNat() val minor = readNat() if (major != MajorVersion || minor > MinorVersion) @@ -156,11 +153,6 @@ abstract class UnPickler { tag == CHILDREN } - private def maybeReadSymbol(): Either[Int, Symbol] = readNat() match { - case index if isSymbolRef(index) => Right(at(index, readSymbol)) - case index => Left(index) - } - /** Does entry represent a refinement symbol? * pre: Entry is a class symbol */ @@ -168,7 +160,7 @@ abstract class UnPickler { val savedIndex = readIndex readIndex = index(i) val tag = readByte().toInt - assert(tag == CLASSsym) + assert(tag == CLASSsym, "Entry must be a class symbol") readNat(); // read length val result = readNameRef() == tpnme.REFINE_CLASS_NAME @@ -230,14 +222,15 @@ abstract class UnPickler { def moduleAdvice(missing: String): String = { val module = - if (missing.startsWith("scala.xml")) Some(("org.scala-lang.modules", "scala-xml")) - else if (missing.startsWith("scala.util.parsing")) Some(("org.scala-lang.modules", "scala-parser-combinators")) - else if (missing.startsWith("scala.swing")) Some(("org.scala-lang.modules", "scala-swing")) + if (missing.startsWith("scala.xml")) Some(("org.scala-lang.modules", "scala-xml")) + else if (missing.startsWith("scala.util.parsing")) Some(("org.scala-lang.modules", "scala-parser-combinators")) + else if (missing.startsWith("scala.swing")) Some(("org.scala-lang.modules", "scala-swing")) + else if (missing.startsWith("scala.collection.parallel")) Some(("org.scala-lang.modules", "scala-parallel-collections")) else None (module map { case (group, art) => s"""\n(NOTE: It looks like the $art module is missing; try adding a dependency on "$group" : "$art". - | See http://docs.scala-lang.org/overviews/ for more information.)""".stripMargin + | See https://docs.scala-lang.org/overviews/ for more information.)""".stripMargin } getOrElse "") } @@ -284,13 +277,14 @@ abstract class UnPickler { // symbols that were pickled with Pickler.writeSymInfo val nameref = readNat() - val name = at(nameref, readName) + val name = at(nameref, () => readName()) val owner = readSymbolRef() val flags = pickledToRawFlags(readLongNat()) - val (privateWithin, inforef) = maybeReadSymbol() match { - case Left(index) => NoSymbol -> index - case Right(sym) => sym -> readNat() + val (privateWithin: Symbol, inforef: Int) = { + val index = readNat() + if (isSymbolRef(index)) (at(index, () => readSymbol()), readNat()) + else (NoSymbol, index) } def isModuleFlag = (flags & MODULE) != 0L @@ -300,7 +294,7 @@ abstract class UnPickler { def pflags = flags & PickledFlags def finishSym(sym: Symbol): Symbol = { - /** + /* * member symbols (symbols owned by a class) are added to the class's scope, with a number * of exceptions: * @@ -354,7 +348,7 @@ abstract class UnPickler { sym case MODULEsym => - val moduleClass = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... () + val moduleClass = at(inforef, () => readType()).typeSymbol if (isModuleRoot) moduleRoot setFlag pflags else owner.newLinkedModule(moduleClass, pflags) @@ -367,14 +361,14 @@ abstract class UnPickler { }) } - protected def readType(forceProperType: Boolean = false): Type = { + protected def readType(): Type = { val tag = readByte() val end = readEnd() @inline def all[T](body: => T): List[T] = until(end, () => body) - def readTypes() = all(readTypeRef) - def readSymbols() = all(readSymbolRef) - def readAnnots() = all(readAnnotationRef) + def readTypes() = all(readTypeRef()) + def readSymbols() = all(readSymbolRef()) + def readAnnots() = all(readAnnotationRef()) // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType. // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct @@ -400,6 +394,12 @@ abstract class UnPickler { ThisType(sym) } + def mkTypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = { + if (classRoot.isJava && (sym == definitions.ObjectClass)) { + definitions.ObjectTpeJava + } else TypeRef(pre, sym, args) + } + // We're stuck with the order types are pickled in, but with judicious use // of named parameters we can recapture a declarative flavor in a few cases. // But it's still a rat's nest of ad-hockery. @@ -410,7 +410,7 @@ abstract class UnPickler { case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // scala/bug#7596 account for overloading case SUPERtpe => SuperType(readTypeRef(), readTypeRef()) case CONSTANTtpe => ConstantType(readConstantRef()) - case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes()) + case TYPEREFtpe => mkTypeRef(readTypeRef(), readSymbolRef(), readTypes()) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes()) case METHODtpe => MethodTypeRef(readTypeRef(), readSymbols()) @@ -441,6 +441,7 @@ abstract class UnPickler { case LITERALnull => Constant(null) case LITERALclass => Constant(readTypeRef()) case LITERALenum => Constant(readSymbolRef()) + case LITERALsymbol => Constant(null) // TODO: needed until we have a STARR that does not emit it. case _ => noSuchConstantTag(tag, len) } } @@ -450,9 +451,9 @@ abstract class UnPickler { /** Read children and store them into the corresponding symbol. */ - protected def readChildren() { + protected def readChildren(): Unit = { val tag = readByte() - assert(tag == CHILDREN) + assert(tag == CHILDREN, "Entry must be children") val end = readEnd() val target = readSymbolRef() while (readIndex != end) target addChild readSymbolRef() @@ -462,9 +463,9 @@ abstract class UnPickler { * as a Constant or a Tree. */ protected def readAnnotArg(i: Int): Tree = bytes(index(i)) match { - case TREE => at(i, readTree) + case TREE => at(i, () => readTree()) case _ => - val const = at(i, readConstant) + val const = at(i, () => readConstant()) Literal(const) setType const.tpe } @@ -473,12 +474,12 @@ abstract class UnPickler { private def readArrayAnnot() = { readByte() // skip the `annotargarray` tag val end = readEnd() - until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag) + until(end, () => readClassfileAnnotArg(readNat())).toArray } protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match { - case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation)) + case ANNOTINFO => NestedAnnotArg(at(i, () => readAnnotation())) case ANNOTARGARRAY => at(i, () => ArrayAnnotArg(readArrayAnnot())) - case _ => LiteralAnnotArg(at(i, readConstant)) + case _ => LiteralAnnotArg(at(i, () => readConstant())) } /** Read an AnnotationInfo. Not to be called directly, use @@ -491,7 +492,7 @@ abstract class UnPickler { while (readIndex != end) { val argref = readNat() if (isNameEntry(argref)) { - val name = at(argref, readName) + val name = at(argref, () => readName()) val arg = readClassfileAnnotArg(readNat()) assocs += ((name, arg)) } @@ -504,7 +505,7 @@ abstract class UnPickler { /** Read an annotation and as a side effect store it into * the symbol it requests. Called at top-level, for all * (symbol, annotInfo) entries. */ - protected def readSymbolAnnotation() { + protected def readSymbolAnnotation(): Unit = { expect(SYMANNOT, "symbol annotation expected") val end = readEnd() val target = readSymbolRef() @@ -525,15 +526,6 @@ abstract class UnPickler { @inline def all[T](body: => T): List[T] = until(end, () => body) @inline def rep[T](body: => T): List[T] = times(readNat(), () => body) - // !!! What is this doing here? - def fixApply(tree: Apply, tpe: Type): Apply = { - val Apply(fun, args) = tree - if (fun.symbol.isOverloaded) { - fun setType fun.symbol.info - inferMethodAlternative(fun, args map (_.tpe), tpe) - } - tree - } def ref() = readTreeRef() def caseRef() = readCaseDefRef() def modsRef() = readModifiersRef() @@ -552,60 +544,72 @@ abstract class UnPickler { } def selectorsRef() = all(ImportSelector(nameRef(), -1, nameRef(), -1)) - /** A few of the most popular trees have been pulled to the top for - * switch efficiency purposes. + // For ASTs we pickle the `tpe` and the `symbol`. References to symbols (`EXTref`) are pickled as owner + name, + // which means overloaded symbols cannot be resolved. + // This method works around that by selecting the overload based on the tree type. + def fixOverload(t: Tree, tpe: Type): Unit = t match { + case sel: Select => + if (sel.symbol.isOverloaded) { + val qt = sel.qualifier.tpe + sel.symbol.alternatives.find(alt => qt.memberType(alt).matches(tpe)).foreach(sel.setSymbol) + } + case _ => + } + + /* A few of the most popular trees have been pulled to the top for + * switch efficiency purposes. */ - def readTree(tpe: Type): Tree = (tag: @switch) match { - case IDENTtree => Ident(nameRef) - case SELECTtree => Select(ref, nameRef) - case APPLYtree => fixApply(Apply(ref, all(ref)), tpe) // !!! - case BINDtree => Bind(nameRef, ref) - case BLOCKtree => all(ref) match { case stats :+ expr => Block(stats, expr) } - case IFtree => If(ref, ref, ref) - case LITERALtree => Literal(constRef) - case TYPEAPPLYtree => TypeApply(ref, all(ref)) - case TYPEDtree => Typed(ref, ref) - case ALTERNATIVEtree => Alternative(all(ref)) - case ANNOTATEDtree => Annotated(ref, ref) - case APPLIEDTYPEtree => AppliedTypeTree(ref, all(ref)) - case APPLYDYNAMICtree => ApplyDynamic(ref, all(ref)) - case ARRAYVALUEtree => ArrayValue(ref, all(ref)) - case ASSIGNtree => Assign(ref, ref) - case CASEtree => CaseDef(ref, ref, ref) - case CLASStree => ClassDef(modsRef, typeNameRef, rep(tparamRef), implRef) - case COMPOUNDTYPEtree => CompoundTypeTree(implRef) - case DEFDEFtree => DefDef(modsRef, termNameRef, rep(tparamRef), rep(rep(vparamRef)), ref, ref) - case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(memberRef)) - case FUNCTIONtree => Function(rep(vparamRef), ref) - case IMPORTtree => Import(ref, selectorsRef) - case LABELtree => LabelDef(termNameRef, rep(idRef), ref) - case MATCHtree => Match(ref, all(caseRef)) - case MODULEtree => ModuleDef(modsRef, termNameRef, implRef) - case NEWtree => New(ref) - case PACKAGEtree => PackageDef(refTreeRef, all(ref)) - case RETURNtree => Return(ref) - case SELECTFROMTYPEtree => SelectFromTypeTree(ref, typeNameRef) - case SINGLETONTYPEtree => SingletonTypeTree(ref) - case STARtree => Star(ref) - case SUPERtree => Super(ref, typeNameRef) - case TEMPLATEtree => Template(rep(ref), vparamRef, all(ref)) - case THIStree => This(typeNameRef) - case THROWtree => Throw(ref) - case TREtree => Try(ref, rep(caseRef), ref) - case TYPEBOUNDStree => TypeBoundsTree(ref, ref) - case TYPEDEFtree => TypeDef(modsRef, typeNameRef, rep(tparamRef), ref) + def readTree(): Tree = (tag: @switch) match { + case IDENTtree => Ident(nameRef()) + case SELECTtree => Select(ref(), nameRef()) + case APPLYtree => Apply(ref(), all(ref())) + case BINDtree => Bind(nameRef(), ref()) + case BLOCKtree => all(ref()) match { case stats :+ expr => Block(stats, expr) case x => throw new MatchError(x) } + case IFtree => If(ref(), ref(), ref()) + case LITERALtree => Literal(constRef()) + case TYPEAPPLYtree => TypeApply(ref(), all(ref())) + case TYPEDtree => Typed(ref(), ref()) + case ALTERNATIVEtree => Alternative(all(ref())) + case ANNOTATEDtree => Annotated(ref(), ref()) + case APPLIEDTYPEtree => AppliedTypeTree(ref(), all(ref())) + case APPLYDYNAMICtree => ApplyDynamic(ref(), all(ref())) + case ARRAYVALUEtree => ArrayValue(ref(), all(ref())) + case ASSIGNtree => Assign(ref(), ref()) + case CASEtree => CaseDef(ref(), ref(), ref()) + case CLASStree => ClassDef(modsRef(), typeNameRef(), rep(tparamRef()), implRef()) + case COMPOUNDTYPEtree => CompoundTypeTree(implRef()) + case DEFDEFtree => DefDef(modsRef(), termNameRef(), rep(tparamRef()), rep(rep(vparamRef())), ref(), ref()) + case EXISTENTIALTYPEtree => ExistentialTypeTree(ref(), all(memberRef())) + case FUNCTIONtree => Function(rep(vparamRef()), ref()) + case IMPORTtree => Import(ref(), selectorsRef()) + case LABELtree => LabelDef(termNameRef(), rep(idRef()), ref()) + case MATCHtree => Match(ref(), all(caseRef())) + case MODULEtree => ModuleDef(modsRef(), termNameRef(), implRef()) + case NEWtree => New(ref()) + case PACKAGEtree => PackageDef(refTreeRef(), all(ref())) + case RETURNtree => Return(ref()) + case SELECTFROMTYPEtree => SelectFromTypeTree(ref(), typeNameRef()) + case SINGLETONTYPEtree => SingletonTypeTree(ref()) + case STARtree => Star(ref()) + case SUPERtree => Super(ref(), typeNameRef()) + case TEMPLATEtree => Template(rep(ref()), vparamRef(), all(ref())) + case THIStree => This(typeNameRef()) + case THROWtree => Throw(ref()) + case TREtree => Try(ref(), rep(caseRef()), ref()) + case TYPEBOUNDStree => TypeBoundsTree(ref(), ref()) + case TYPEDEFtree => TypeDef(modsRef(), typeNameRef(), rep(tparamRef()), ref()) case TYPEtree => TypeTree() - case UNAPPLYtree => UnApply(ref, all(ref)) - case VALDEFtree => ValDef(modsRef, termNameRef, ref, ref) + case UNAPPLYtree => UnApply(ref(), all(ref())) + case VALDEFtree => ValDef(modsRef(), termNameRef(), ref(), ref()) case _ => noSuchTreeTag(tag, end) } val tpe = readTypeRef() val sym = if (isTreeSymbolPickled(tag)) readSymbolRef() else null - val result = readTree(tpe) + val result = readTree() - if (sym ne null) result setSymbol sym - result setType tpe + if (sym ne null) fixOverload(result.setSymbol(sym), tpe) + result.setType(tpe) } /* Read an abstract syntax tree */ @@ -650,12 +654,12 @@ abstract class UnPickler { r.asInstanceOf[Symbol] } - protected def readNameRef(): Name = at(readNat(), readName) - protected def readTypeRef(): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... () - protected def readConstantRef(): Constant = at(readNat(), readConstant) - protected def readAnnotationRef(): AnnotationInfo = at(readNat(), readAnnotation) - protected def readModifiersRef(): Modifiers = at(readNat(), readModifiers) - protected def readTreeRef(): Tree = at(readNat(), readTree) + protected def readNameRef(): Name = at(readNat(), () => readName()) + protected def readTypeRef(): Type = at(readNat(), () => readType()) + protected def readConstantRef(): Constant = at(readNat(), () => readConstant()) + protected def readAnnotationRef(): AnnotationInfo = at(readNat(), () => readAnnotation()) + protected def readModifiersRef(): Modifiers = at(readNat(), () => readModifiers()) + protected def readTreeRef(): Tree = at(readNat(), () => readTree()) protected def readTypeNameRef(): TypeName = readNameRef().toTypeName @@ -699,8 +703,6 @@ abstract class UnPickler { protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) - def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that. - def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i) def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j) @@ -715,29 +717,17 @@ abstract class UnPickler { /** A lazy type which when completed returns type at index `i`. */ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter { - private val definedAtRunId = currentRunId - private val p = phase + private[this] val definedAtRunId = currentRunId + private[this] val p = phase protected def completeInternal(sym: Symbol) : Unit = try { completingStack += sym - val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType` - - // This is a temporary fix allowing to read classes generated by an older, buggy pickler. - // See the generation of the LOCAL_CHILD class in Pickler.scala. In an earlier version, the - // pickler did not add the ObjectTpe superclass, it used a trait as the first parent. This - // tripped an assertion in AddInterfaces which checks that the first parent is not a trait. - // This workaround can probably be removed in 2.12, because the 2.12 compiler is supposed - // to only read classfiles generated by 2.12. - val fixLocalChildTp = if (sym.rawname == tpnme.LOCAL_CHILD) tp match { - case ClassInfoType(superClass :: traits, decls, typeSymbol) if superClass.typeSymbol.isTrait => - ClassInfoType(definitions.ObjectTpe :: superClass :: traits, decls, typeSymbol) - case _ => tp - } else tp + val tp = at(i, () => readType()) if (p ne null) { - slowButSafeEnteringPhase(p)(sym setInfo fixLocalChildTp) + slowButSafeEnteringPhase(p)(sym setInfo tp) } if (currentRunId != definedAtRunId) - sym.setInfo(adaptToNewRunMap(fixLocalChildTp)) + sym.setInfo(adaptToNewRunMap(tp)) } catch { case e: MissingRequirementError => throw toTypeError(e) @@ -749,7 +739,7 @@ abstract class UnPickler { completeInternal(sym) if (!isCompilerUniverse) markAllCompleted(sym) } - override def load(sym: Symbol) { complete(sym) } + override def load(sym: Symbol): Unit = { complete(sym) } } /** A lazy type which when completed returns type at index `i` and sets alias @@ -759,7 +749,7 @@ abstract class UnPickler { override def completeInternal(sym: Symbol) = try { super.completeInternal(sym) - var alias = at(j, readSymbol) + var alias = at(j, () => readSymbol()) if (alias.isOverloaded) { alias = slowButSafeEnteringPhase(picklerPhase)(alias suchThat { alt => diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala index 03c4dea76bc9..a964b9967728 100644 --- a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,18 +14,17 @@ package scala package reflect.internal package settings -/** A Settings abstraction boiled out of the original highly mutable Settings - * class with the intention of creating an ImmutableSettings which can be used - * interchangeably. Except of course without the mutants. +/** Abstract settings, which is refined for `reflect` and `nsc`. */ - trait AbsSettings { - type Setting <: AbsSettingValue // Fix to the concrete Setting type + /** Subclasses should further constrain Setting. */ + type Setting <: AbsSettingValue + + /** A setting is a value which may have been specified by the user. */ trait AbsSettingValue { type T <: Any def value: T def isDefault: Boolean } } - diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index eab662c751b9..56ff2990a36c 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -47,11 +47,8 @@ abstract class MutableSettings extends AbsSettings { } def async: BooleanSetting - def Xexperimental: BooleanSetting - def XfullLubs: BooleanSetting def XnoPatmatAnalysis: BooleanSetting def Xprintpos: BooleanSetting - def strictInference: BooleanSetting def Yposdebug: BooleanSetting def Yrangepos: BooleanSetting def Yshowsymowners: BooleanSetting @@ -60,35 +57,32 @@ abstract class MutableSettings extends AbsSettings { def debug: BooleanSetting def developer: BooleanSetting def explaintypes: BooleanSetting - def overrideObjects: BooleanSetting def printtypes: BooleanSetting def uniqid: BooleanSetting def verbose: BooleanSetting - def YpartialUnification: BooleanSetting - def Yvirtpatmat: BooleanSetting - // Define them returning a `Boolean` to avoid breaking bincompat change - // TODO: Add these fields typed as `BooleanSetting` for 2.13.x - def YhotStatisticsEnabled: Boolean = false - def YstatisticsEnabled: Boolean = false + def YhotStatisticsEnabled: BooleanSetting + def YstatisticsEnabled: BooleanSetting def Yrecursion: IntSetting - def maxClassfileName: IntSetting - - def isScala211: Boolean - def isScala212: Boolean - private[scala] def isScala213: Boolean } object MutableSettings { import scala.language.implicitConversions - /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */ + /** Support the common use case, `if (settings.debug) println("Hello, martin.")`. + * + * Unfortunately, due to the way the `Settings` hierarchy is structured, this abstraction incurs boxing. + * Although boxing the Boolean primitive may be a trivial cost for a single invocation, + * it is significant for a test in a hot spot. Therefore, this method is deprecated. + * For the convenience of plugin authors, it has not been removed outright. + */ + @deprecated("Use `setting.value` directly to avoid boxing.", since="2.13.9") @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { - @inline final def areStatisticsEnabled = (StatisticsStatics.COLD_STATS_GETTER.invokeExact(): Boolean) && settings.YstatisticsEnabled - @inline final def areHotStatisticsEnabled = (StatisticsStatics.HOT_STATS_GETTER.invokeExact(): Boolean) && settings.YhotStatisticsEnabled - @inline final def isDebug: Boolean = (StatisticsStatics.DEBUG_GETTER.invokeExact(): Boolean) && settings.debug - @inline final def isDeveloper: Boolean = (StatisticsStatics.DEVELOPER_GETTER.invokeExact(): Boolean) && settings.developer + @inline final def areStatisticsEnabled = (StatisticsStatics.COLD_STATS_GETTER.invokeExact(): Boolean) && settings.YstatisticsEnabled.value + @inline final def areHotStatisticsEnabled = (StatisticsStatics.HOT_STATS_GETTER.invokeExact(): Boolean) && settings.YhotStatisticsEnabled.value + @inline final def isDebug: Boolean = (StatisticsStatics.DEBUG_GETTER.invokeExact(): Boolean) && settings.debug.value + @inline final def isDeveloper: Boolean = (StatisticsStatics.DEVELOPER_GETTER.invokeExact(): Boolean) && settings.developer.value } } diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala index 7c0d353e4479..d97fb426a5d5 100644 --- a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala +++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,23 +27,20 @@ private[internal] trait CommonOwners { * of thistype or prefixless typerefs/singletype occurrences in given list * of types. */ - protected[internal] def commonOwner(tps: List[Type]): Symbol = { + protected[internal] def commonOwner(tps: List[Type]): Symbol = if (tps.isEmpty) NoSymbol else { commonOwnerMap.clear() - tps foreach (commonOwnerMap traverse _) + tps foreach (commonOwnerMap) if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol } - } protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj - protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] { - var result: Symbol = _ - - def clear() { result = null } + protected class CommonOwnerMap extends TypeCollector[Symbol](null) { + def clear(): Unit = { result = null } - private def register(sym: Symbol) { + private def register(sym: Symbol): Unit = { // First considered type is the trivial result. if ((result eq null) || (sym eq NoSymbol)) result = sym @@ -51,11 +48,11 @@ private[internal] trait CommonOwners { while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result)) result = result.owner } - def traverse(tp: Type) = tp.normalize match { + def apply(tp: Type) = tp.normalize match { case ThisType(sym) => register(sym) - case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse + case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach apply case SingleType(NoPrefix, sym) => register(sym.owner) - case _ => mapOver(tp) + case _ => tp.foldOver(this) } } diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 1a58aed24bc0..d131bcddcd27 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package tpe import util.ReusableInstance import Flags._ +import scala.runtime.Statics.releaseFence trait FindMembers { this: SymbolTable => @@ -215,9 +216,9 @@ trait FindMembers { } } - private[reflect] final class FindMembers(tpe: Type, excludedFlags: Long, requiredFlags: Long) + private[reflect] final class FindMembers(tpe0: Type, excludedFlags0: Long, requiredFlags0: Long) extends FindMemberBase[Scope]() { - init(tpe, nme.ANYname, excludedFlags, requiredFlags) + init(tpe0, nme.ANYname, excludedFlags0, requiredFlags0) private[this] var _membersScope: Scope = null private def membersScope: Scope = { @@ -241,7 +242,7 @@ trait FindMembers { if (isNew) members.enter(sym) } } - private[reflect] val findMemberInstance: ReusableInstance[FindMember] = new ReusableInstance(() => new FindMember, enabled = isCompilerUniverse) + private[reflect] val findMemberInstance: ReusableInstance[FindMember] = ReusableInstance(new FindMember, enabled = isCompilerUniverse) private[reflect] final class FindMember extends FindMemberBase[Symbol] { @@ -296,7 +297,7 @@ trait FindMembers { } if (isNew) { val lastM1 = new ::(sym, null) - lastM.tl = lastM1 + lastM.next = lastM1 lastM = lastM1 } } @@ -304,7 +305,7 @@ trait FindMembers { // Cache for the member type of the first member we find. private[this] var _member0Tpe: Type = null private[this] def member0Tpe: Type = { - assert(member0 != null) + assert(member0 != null, "member0 must not be null for member type") if (_member0Tpe eq null) _member0Tpe = self.memberType(member0) _member0Tpe } @@ -320,13 +321,14 @@ trait FindMembers { } else member0 } else { if (settings.areStatisticsEnabled) statistics.incCounter(multMemberCount) - lastM.tl = Nil + lastM.next = Nil + releaseFence() initBaseClasses.head.newOverloaded(tpe, members) } } - private[scala] final class HasMember(tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) extends FindMemberBase[Boolean] { - init(tpe, name, excludedFlags, requiredFlags) + private[scala] final class HasMember(tpe0: Type, name0: Name, excludedFlags0: Long, requiredFlags0: Long) extends FindMemberBase[Boolean] { + init(tpe0, name0, excludedFlags0, requiredFlags0) private[this] var _result = false override protected def result: Boolean = _result diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 8ae68044ef76..eb57d1cf1d64 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,7 @@ package reflect package internal package tpe -import scala.collection.mutable +import scala.collection.mutable, mutable.ListBuffer import scala.annotation.tailrec import Variance._ @@ -25,13 +25,12 @@ private[internal] trait GlbLubs { import definitions._ import statistics._ - private final val printLubs = scala.sys.props contains "scalac.debug.lub" - private final val strictInference = settings.strictInference + private final val printLubs = System.getProperty("scalac.debug.lub") != null /** In case anyone wants to turn off lub verification without reverting anything. */ private final val verifyLubs = true - private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Depth) { + private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Depth): Unit = { import util.TableDef import TableDef.Column def str(tp: Type) = { @@ -58,57 +57,26 @@ private[internal] trait GlbLubs { println("** Depth is " + depth + "\n" + formatted) } - /** From a list of types, find any which take type parameters - * where the type parameter bounds contain references to other - * any types in the list (including itself.) - * - * @return List of symbol pairs holding the recursive type - * parameter and the parameter which references it. - */ - def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = { - if (ts.isEmpty) Nil - else { - val sym = ts.head.typeSymbol - require(ts.tail forall (_.typeSymbol == sym), ts) - for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield - p -> in - } - } - - // only called when strictInference - private def willViolateRecursiveBounds(tp: Type, ts: List[Type], tsElimSub: List[Type]) = { - val typeSym = ts.head.typeSymbol // we're uniform, the `.head` is as good as any. - def fbounds = findRecursiveBounds(ts) map (_._2) - def isRecursive = typeSym.typeParams exists fbounds.contains - - isRecursive && (transposeSafe(tsElimSub map (_.normalize.typeArgs)) match { - case Some(arggsTransposed) => - val mergedTypeArgs = (tp match { case et: ExistentialType => et.underlying; case _ => tp}).typeArgs - exists3(typeSym.typeParams, mergedTypeArgs, arggsTransposed) { - (param, arg, lubbedArgs) => - val isExistential = arg.typeSymbol.isExistentiallyBound - val isInFBound = fbounds contains param - val wasLubbed = !lubbedArgs.exists(_ =:= arg) - (!isExistential && isInFBound && wasLubbed) - } - case None => false - }) - } - /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix), * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types: * * xs <= ys iff forall y in ys exists x in xs such that x <: y * - * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor) + * @param tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor) * (these type parameters may be referred to by type arguments in the BTS column of those types, * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params) - * @arg tsBts a matrix whose columns are basetype sequences + * @param tsBts a matrix whose columns are basetype sequences * the first row is the original list of types for which we're computing the lub * (except that type constructors have been applied to their dummyArgs) - * @See baseTypeSeq for a definition of sorted and upwards closed. + * @see baseTypeSeq for a definition of sorted and upwards closed. */ - def lubList(ts: List[Type], depth: Depth): List[Type] = { + def lubList(ts: List[Type], depth: Depth): List[Type] = ts match { + case Nil => Nil + case ty :: Nil => ty.baseTypeSeq.toList + case _ => lubList_x(ts, depth) + } + + private[this] def lubList_x(ts: List[Type], depth: Depth): List[Type] = { var lubListDepth = Depth.Zero // This catches some recursive situations which would otherwise // befuddle us, e.g. pos/hklub0.scala @@ -120,72 +88,101 @@ private[internal] trait GlbLubs { logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor) case _ => tp } - // pretypes is a tail-recursion-preserving accumulator. - @tailrec - def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = { - lubListDepth = lubListDepth.incr - if (tsBts.isEmpty || tsBts.exists(_.isEmpty)) pretypes.reverse - else if (tsBts.tail.isEmpty) pretypes reverse_::: tsBts.head - else { + val baseTypeSeqs: Array[BaseTypeSeq] = mapToArray(ts)(_.baseTypeSeq) + val ices: Array[Int] = new Array[Int](baseTypeSeqs.length) + + def printLubMatrixAux(depth: Depth): Unit = { + val btsMap: Map[Type, List[Type]] = ts.zipWithIndex.map { + case (ty, ix) => ty -> baseTypeSeqs(ix).toList.drop(ices(ix)) + }.toMap + printLubMatrix(btsMap, depth) + } + + def headOf(ix: Int) = baseTypeSeqs(ix).rawElem(ices(ix)) + + val pretypes: ListBuffer[Type] = ListBuffer.empty[Type] + + var isFinished = false + while (!isFinished && ices(0) < baseTypeSeqs(0).length) { + lubListDepth = lubListDepth.incr + // Step 1: run through the List with these variables: + // 1) Is there any empty list? Are they equal or are we taking the smallest? + // isFinished: tsBts.exists(typeListIsEmpty) + // Is the frontier made up of types with the same symbol? + var isUniformFrontier = true + var sym = headOf(0).typeSymbol + // var tsYs = tsBts + var ix = 0 + while (!isFinished && ix < baseTypeSeqs.length) { + if (ices(ix) == baseTypeSeqs(ix).length) + isFinished = true + else { + val btySym = headOf(ix).typeSymbol + isUniformFrontier = isUniformFrontier && (sym eq btySym) + if (btySym isLess sym) + sym = btySym + } + ix += 1 + } + // Produce a single type for this frontier by merging the prefixes and arguments of those + // typerefs that share the same symbol: that symbol is the current maximal symbol for which + // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before + // merging, strip targs that refer to bound tparams (when we're computing the lub of type + // constructors.) Also filter out all types that are a subtype of some other type. + if (!isFinished) { // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts. // Invariant: all symbols "under" (closer to the first row) the frontier // are smaller (according to _.isLess) than the ones "on and beyond" the frontier - val ts0 = tsBts map (_.head) - - // Is the frontier made up of types with the same symbol? - val isUniformFrontier = (ts0: @unchecked) match { - case t :: ts => ts forall (_.typeSymbol == t.typeSymbol) + val ts0 = { + var ys: List[Type] = Nil + var kx = baseTypeSeqs.length + while (kx > 0){ + kx -= 1 + ys = headOf(kx) :: ys + } + ys } - // Produce a single type for this frontier by merging the prefixes and arguments of those - // typerefs that share the same symbol: that symbol is the current maximal symbol for which - // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before - // merging, strip targs that refer to bound tparams (when we're computing the lub of type - // constructors.) Also filter out all types that are a subtype of some other type. if (isUniformFrontier) { - val tails = tsBts map (_.tail) - val ts1 = elimSub(ts0, depth) map elimHigherOrderTypeParam + val ts1 = elimSub(ts0, depth).map(elimHigherOrderTypeParam) mergePrefixAndArgs(ts1, Covariant, depth) match { - case NoType => loop(pretypes, tails) - case tp if strictInference && willViolateRecursiveBounds(tp, ts0, ts1) => - log(s"Breaking recursion in lublist, advancing frontier and discarding merged prefix/args from $tp") - loop(pretypes, tails) - case tp => - loop(tp :: pretypes, tails) + case NoType => + case tp => pretypes += tp + } + var jx = 0 + while (jx < baseTypeSeqs.length){ + ices(jx) += 1 + jx += 1 } } else { // frontier is not uniform yet, move it beyond the current minimal symbol; // lather, rinse, repeat - val sym = minSym(ts0) - val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts) + var jx = 0 + while (jx < baseTypeSeqs.length){ + if (headOf(jx).typeSymbol == sym) + ices(jx) += 1 + jx += 1 + } if (printLubs) { - val str = (newtps.zipWithIndex map { case (tps, idx) => - tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n") - }).mkString("") - - println("Frontier(\n" + str + ")") - printLubMatrix((ts zip tsBts).toMap, lubListDepth) + println { + baseTypeSeqs.zipWithIndex.map { case (tps, idx) => + tps.toList.drop(ices(idx)).map(" " + _).mkString(" (" + idx + ")\n", "\n", "\n") + } + .mkString("Frontier(\n", "", ")") + } + printLubMatrixAux(lubListDepth) } - - loop(pretypes, newtps) } } } - val initialBTSes = ts map (_.baseTypeSeq.toList) if (printLubs) - printLubMatrix((ts zip initialBTSes).toMap, depth) + printLubMatrixAux(depth) - loop(Nil, initialBTSes) + pretypes.toList } - /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */ - private def minSym(tps: List[Type]): Symbol = - (tps.head.typeSymbol /: tps.tail) { - (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1 - } - /** A minimal type list which has a given list of types as its base type sequence */ def spanningTypes(ts: List[Type]): List[Type] = ts match { case List() => List() @@ -194,34 +191,66 @@ private[internal] trait GlbLubs { rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol))) } - /** Eliminate from list of types all elements which are a supertype - * of some other element of the list. */ - private def elimSuper(ts: List[Type]): List[Type] = ts match { - case List() | List(_) => ts - case t :: ts1 => - val rest = elimSuper(ts1 filter (t1 => !(t <:< t1))) - if (rest exists (t1 => t1 <:< t)) rest else t :: rest - } + // OPT: hoist allocation of the collector and lambda out of the loop in partition + private val isWildCardOrNonGroundTypeVarCollector = new FindTypeCollector( { + case tv: TypeVar => !tv.isGround + case t => t.isWildcard + }) - /** Eliminate from list of types all elements which are a subtype - * of some other element of the list. */ - private def elimSub(ts: List[Type], depth: Depth): List[Type] = { - def elimSub0(ts: List[Type]): List[Type] = ts match { - case List() => ts - case List(t) => ts + /** From a list of types, retain only maximal types as determined by the partial order `po`. */ + private def maxTypes(ts: List[Type])(po: (Type, Type) => Boolean): List[Type] = { + def stacked(ts: List[Type]): List[Type] = ts match { case t :: ts1 => - val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, depth.decr))) - if (rest exists (t1 => isSubType(t, t1, depth.decr))) rest else t :: rest + val ts2 = stacked(ts1.filterNot(po(_, t))) + if (ts2.exists(po(t, _))) ts2 else t :: ts2 + case Nil => Nil } - val ts0 = elimSub0(ts) - if (ts0.isEmpty || ts0.tail.isEmpty) ts0 - else { - val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden)) - if (ts1 eq ts0) ts0 - else elimSub(ts1, depth) + + // loop thru tails, filtering for survivors of po test with the current element, which is saved for later culling + @tailrec + def loop(survivors: List[Type], toCull: List[Type]): List[Type] = survivors match { + case h :: rest => + loop(rest.filterNot(po(_, h)), h :: toCull) + case _ => + // unwind the stack of saved elements, accumulating a result containing elements surviving po (in swapped order) + def sieve(res: List[Type], remaining: List[Type]): List[Type] = remaining match { + case h :: tail => + val res1 = if (res.exists(po(h, _))) res else h :: res + sieve(res1, tail) + case _ => res + } + toCull match { + case _ :: Nil => toCull + case _ => sieve(Nil, toCull) + } + } + + // The order here matters because type variables and wildcards can act both as subtypes and supertypes. + val sorted = { + val (wilds, ts1) = partitionConserve(ts)(isWildCardOrNonGroundTypeVarCollector.collect(_).isDefined) + ts1 ::: wilds } + if (sorted.lengthCompare(5) > 0) loop(sorted, Nil) + else stacked(sorted) } + /** Eliminate from list of types all elements which are a supertype + * of some other element of the list. */ + private def elimSuper(ts: List[Type]): List[Type] = + if (ts.lengthCompare(1) <= 0) ts + else maxTypes(ts)((t1, t2) => t2 <:< t1) + + /** Eliminate from list of types all elements which are a subtype + * of some other element of the list. */ + @tailrec private def elimSub(ts: List[Type], depth: Depth): List[Type] = + if (ts.lengthCompare(1) <= 0) ts else { + val ts1 = maxTypes(ts)(isSubType(_, _, depth.decr)) + if (ts1.lengthCompare(1) <= 0) ts1 else { + val ts2 = ts1.mapConserve(t => elimAnonymousClass(t.dealiasWiden)) + if (ts1 eq ts2) ts1 else elimSub(ts2, depth) + } + } + /** Does this set of types have the same weak lub as * it does regular lub? This is exposed so lub callers * can discover whether the trees they are typing will @@ -251,16 +280,19 @@ private[internal] trait GlbLubs { lub(tps) ) + // Need to widen result when using isNumericSubType to compare. + // isNumericSubType considers types after dealiasWiden, so should perform same transform on return. + // Example unit test: `numericLub(0, 1) == Int` (without the dealiasWiden one of the types would be returned as-is...) def numericLub(ts: List[Type]) = ts reduceLeft ((t1, t2) => - if (isNumericSubType(t1, t2)) t2 - else if (isNumericSubType(t2, t1)) t1 + if (isNumericSubType(t1, t2)) t2.dealiasWiden + else if (isNumericSubType(t2, t1)) t1.dealiasWiden else IntTpe) - private val _lubResults = new mutable.HashMap[(Depth, List[Type]), Type] + private[this] val _lubResults = new mutable.HashMap[(Depth, List[Type]), Type] def lubResults = _lubResults - private val _glbResults = new mutable.HashMap[(Depth, List[Type]), Type] + private[this] val _glbResults = new mutable.HashMap[(Depth, List[Type]), Type] def glbResults = _glbResults def lub(ts: List[Type]): Type = ts match { @@ -291,14 +323,12 @@ private[internal] trait GlbLubs { } /** The least upper bound wrt <:< of a list of types */ - protected[internal] def lub(ts: List[Type], depth: Depth): Type = { + protected[internal] def lub(ts0: List[Type], depth: Depth): Type = { def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match { case List() => NothingTpe case List(t) => t - case ts @ PolyType(tparams, _) :: _ => - val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => - tparam.cloneSymbol.setInfo(glb(bounds, depth))) - PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1))) + case (pt @ PolyType(_, _)) :: rest => + polyTypeMatch(pt, rest, depth, glb, lub0) case ts @ (mt @ MethodType(params, _)) :: rest => MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes))) case ts @ NullaryMethodType(_) :: rest => @@ -355,10 +385,12 @@ private[internal] trait GlbLubs { else if (symtypes.tail forall (symtypes.head =:= _)) proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head) else { - val lo = glb(symtypes map (_.lowerBound), depth.decr) - val hi = lub(symtypes map (_.upperBound), depth.decr) + val lubBs = TypeBounds( + glb(symtypes.map(_.lowerBound), depth.decr), + lub(symtypes.map(_.upperBound), depth.decr) + ) lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos) - .setInfoOwnerAdjusted(TypeBounds(lo, hi)) + .setInfoOwnerAdjusted(lubBs) } } } @@ -384,7 +416,7 @@ private[internal] trait GlbLubs { // In theory this should not be necessary, but higher-order type // parameters are not handled correctly. val ok = ts forall { t => - isSubType(t, lubRefined, depth) || { + isSubType(t, lubRefined, depth.decr) || { if (settings.isDebug || printLubs) { Console.println( "Malformed lub: " + lubRefined + "\n" + @@ -405,15 +437,15 @@ private[internal] trait GlbLubs { existentialAbstraction(tparams, dropIllegalStarTypes(lubType)) } if (printLubs) { - println(indent + "lub of " + ts + " at depth "+depth)//debug + println(indent + "lub of " + ts0 + " at depth "+depth)//debug indent = indent + " " - assert(indent.length <= 100) + assert(indent.length <= 100, "LUB is highly indented") } if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) - val res = lub0(ts) + val res = lub0(ts0) if (printLubs) { indent = indent stripSuffix " " - println(indent + "lub of " + ts + " is " + res)//debug + println(indent + "lub of " + ts0 + " is " + res)//debug } res } @@ -426,7 +458,7 @@ private[internal] trait GlbLubs { * The counter breaks this recursion after two calls. * If the recursion is broken, no member is added to the glb. */ - private var globalGlbDepth = Depth.Zero + private[this] var globalGlbDepth = Depth.Zero private final val globalGlbLimit = Depth(2) /** The greatest lower bound of a list of types (as determined by `<:<`). */ @@ -453,14 +485,12 @@ private[internal] trait GlbLubs { /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized * with regard to `elimSuper`. */ - protected def glbNorm(ts: List[Type], depth: Depth): Type = { + protected def glbNorm(ts0: List[Type], depth: Depth): Type = { def glb0(ts0: List[Type]): Type = ts0 match { case List() => AnyTpe case List(t) => t - case ts @ PolyType(tparams, _) :: _ => - val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => - tparam.cloneSymbol.setInfo(lub(bounds, depth))) - PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth)) + case (pt @ PolyType(_, _)) :: rest => + polyTypeMatch(pt, rest, depth, lub, glb0) case ts @ (mt @ MethodType(params, _)) :: rest => MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth)) case ts @ NullaryMethodType(_) :: rest => @@ -483,7 +513,7 @@ private[internal] trait GlbLubs { val (ts, tparams) = stripExistentialsAndTypeVars(ts0) val glbOwner = commonOwner(ts) val ts1 = { - val res = mutable.ListBuffer.empty[Type] + val res = ListBuffer.empty[Type] def loop(ty: Type): Unit = ty match { case RefinedType(ps, _) => ps.foreach(loop) case _ => res += ty @@ -500,7 +530,7 @@ private[internal] trait GlbLubs { def glbsym(proto: Symbol): Symbol = { val prototp = glbThisType.memberInfo(proto) val symtypes: List[Type] = { - val res = mutable.ListBuffer.empty[Type] + val res = ListBuffer.empty[Type] ts foreach { t => t.nonPrivateMember(proto.name).alternatives foreach { alt => val mi = glbThisType.memberInfo(alt) @@ -510,7 +540,7 @@ private[internal] trait GlbLubs { } res.toList } - assert(!symtypes.isEmpty) + assert(!symtypes.isEmpty, "No types for GLB") proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted( if (proto.isTerm) glb(symtypes, depth.decr) else { @@ -561,13 +591,13 @@ private[internal] trait GlbLubs { existentialAbstraction(tparams, glbType) } catch { case GlbFailure => - if (ts forall (t => NullTpe <:< t)) NullTpe + if (ts0.forall(NullTpe <:< _)) NullTpe else NothingTpe } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) - glb0(ts) + glb0(ts0) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } @@ -576,31 +606,42 @@ private[internal] trait GlbLubs { * Returns list of list of bounds infos, where corresponding type * parameters are renamed to tparams. */ - private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = { - def getBounds(tp: Type): List[Type] = tp match { - case PolyType(tparams1, _) if sameLength(tparams1, tparams) => - tparams1 map (tparam => tparam.info.substSym(tparams1, tparams)) + private def polyTypeMatch( + ptHead: PolyType, + ptRest: List[Type], + depth: Depth, + infoBoundTop: (List[Type], Depth) => Type, + resultTypeBottom: List[Type] => Type + ): PolyType = { + val tparamsHead: List[Symbol] = ptHead.typeParams + + @tailrec + def normalizeIter(ty: Type): PolyType = ty match { + case pt @ PolyType(typeParams, _) if sameLength(typeParams, tparamsHead) => pt case tp => - if (tp ne tp.normalize) getBounds(tp.normalize) - else throw new NoCommonType(tps) + val tpn = tp.normalize + if (tp ne tpn) normalizeIter(tpn) else throw new NoCommonType(ptHead :: ptRest) } - tps map getBounds - } - /** All types in list must be polytypes with type parameter lists of - * same length as tparams. - * Returns list of instance types, where corresponding type - * parameters are renamed to tparams. - */ - private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = { - def transformResultType(tp: Type): Type = tp match { - case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) => - restpe.substSym(tparams1, tparams) - case tp => - if (tp ne tp.normalize) transformResultType(tp.normalize) - else throw new NoCommonType(tps) + // Since ptHead = PolyType(tparamsHead, _), no need to normalize it or unify tparams + val ntps: List[PolyType] = ptHead :: ptRest.map(normalizeIter) + + val tparams1: List[Symbol] = { + def unifyBounds(ntp: PolyType): List[Type] = { + val tparams1 = ntp.typeParams + tparams1 map (tparam => tparam.info.substSym(tparams1, tparamsHead)) + } + val boundsTts : List[List[Type]] = ntps.tail.map(unifyBounds).transpose + map2(tparamsHead, boundsTts){ (tparam, bounds) => + tparam.cloneSymbol.setInfo(infoBoundTop(tparam.info :: bounds, depth)) + } + } + // Do we also need to apply substSym(typeParams, tparams1) to ptHead.resultType ?? + val matchingInstTypes: List[Type] = ntps.map { ntp => + ntp.resultType.substSym(ntp.typeParams, tparams1) } - tps map transformResultType + + PolyType(tparams1, resultTypeBottom(matchingInstTypes)) } /** All types in list must be method types with equal parameter types. diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index abc8ebe9ad9d..0d46744d614f 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,9 +15,9 @@ package reflect package internal package tpe -import scala.collection.{ mutable } -import util.TriState import scala.annotation.tailrec +import scala.collection.mutable +import util.{StringContextStripMarginOps, TriState} trait TypeComparers { self: SymbolTable => @@ -27,7 +27,7 @@ trait TypeComparers { private final val LogPendingSubTypesThreshold = TypeConstants.DefaultLogThreshhold - private val _pendingSubTypes = new mutable.HashSet[SubTypePair] + private[this] val _pendingSubTypes = new mutable.HashSet[SubTypePair] def pendingSubTypes = _pendingSubTypes final case class SubTypePair(tp1: Type, tp2: Type) { @@ -39,10 +39,10 @@ trait TypeComparers { // // I added a tests to show that we detect the cycle: neg/t8146-no-finitary* - override def toString = tp1+" <: methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= subst(p2.info)) - && (res1 =:= subst(res2)) - ) + val substMap = SubstSymMap(tparams2, tparams1) + ( + (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= substMap(p2.info)) + && (res1 =:= substMap(res2)) + ) + } } // scala/bug#2066 This prevents overrides with incompatible variance in higher order type parameters. private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = { def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod) - !settings.isScala211 || ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance + ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance } private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) = - !settings.isScala211 || methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant + methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant def isSameType2(tp1: Type, tp2: Type): Boolean = { def retry() = { @@ -210,7 +213,7 @@ trait TypeComparers { * up any type constraints naive enough to get into their hot rods. */ def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match { - case BoundedWildcardType(bounds) => bounds containsType rhs + case pt: ProtoType => pt.registerTypeEquality(rhs) case tv @ TypeVar(_, _) => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1) case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs) case _ => false @@ -246,7 +249,8 @@ trait TypeComparers { case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false } case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false } case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 eq sym2 ; case _ => false } - case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false } + case FoldableConstantType(c1) => tp2 match { case FoldableConstantType(c2) => c1 == c2 ; case _ => false } + case LiteralType(c1) => tp2 match { case LiteralType(c2) => c1 == c2 ; case _ => false } case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false } case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false } case _ => false @@ -347,18 +351,16 @@ trait TypeComparers { val PolyType(tparams1, res1) = tp1 val PolyType(tparams2, res2) = tp2 - sameLength(tparams1, tparams2) && { + sameLength(tparams1, tparams2) && (tparams2 corresponds tparams1)(methodHigherOrderTypeParamsSubVariance) && { // fast-path: polymorphic method type -- type params cannot be captured val isMethod = tparams1.head.owner.isMethod //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) - def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes) - def sub2(tp: Type) = tp.substSym(tparams2, substitutes) - def cmp(p1: Symbol, p2: Symbol) = ( - methodHigherOrderTypeParamsSubVariance(p2, p1) - && sub2(p2.info) <:< sub1(p1.info) - ) + val sub1: Type => Type = if (isMethod) (tp => tp) else SubstSymMap(tparams1, substitutes) + val sub2: Type => Type = SubstSymMap(tparams2, substitutes) + + def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) } } @@ -379,36 +381,45 @@ trait TypeComparers { // @assume tp1.isHigherKinded || tp2.isHigherKinded def isHKSubType(tp1: Type, tp2: Type, depth: Depth): Boolean = { + def hkSubVariance(tparams1: List[Symbol], tparams2: List[Symbol]) = + (tparams1 corresponds tparams2)(methodHigherOrderTypeParamsSubVariance) + def isSubHKTypeVar(tp1: Type, tp2: Type) = (tp1, tp2) match { - case (tv1 @ TypeVar(_, _), tv2 @ TypeVar(_, _)) => - reporter.warning(tv1.typeSymbol.pos, - sm"""|compiler bug: Unexpected code path: testing two type variables for subtype relation: - | ${tv1} <:< ${tv2} - |Please report bug at https://github.com/scala/bug/issues - """.trim) - false - case (tp1, tv2 @ TypeVar(_, _)) => + case (tv1: TypeVar, tv2: TypeVar) => + devWarning(sm"Unexpected code path: testing two type variables for subtype relation: $tv1 <:< $tv2") + tv1 eq tv2 + case (_, tv2: TypeVar) => val ntp1 = tp1.normalize - (tv2.params corresponds ntp1.typeParams)(methodHigherOrderTypeParamsSubVariance) && - { tv2.addLoBound(ntp1); true } - case (tv1 @ TypeVar(_, _), tp2) => + val kindsMatch = (ntp1.typeSymbol eq AnyClass) || hkSubVariance(tv2.params, ntp1.typeParams) + if (kindsMatch) tv2.addLoBound(ntp1) + kindsMatch + case (tv1: TypeVar, _) => val ntp2 = tp2.normalize - (ntp2.typeParams corresponds tv1.params)(methodHigherOrderTypeParamsSubVariance) && - { tv1.addHiBound(ntp2); true } + val kindsMatch = (ntp2.typeSymbol eq NothingClass) || hkSubVariance(ntp2.typeParams, tv1.params) + if (kindsMatch) tv1.addHiBound(ntp2) + kindsMatch case _ => false } def isSub(tp1: Type, tp2: Type) = - settings.isScala213 && isSubHKTypeVar(tp1, tp2) || - isSub2(tp1.normalize, tp2.normalize) // @M! normalize reduces higher-kinded case to PolyType's + isSubHKTypeVar(tp1, tp2) || + isSub2(tp1.normalize, tp2.normalize) // @M! normalize reduces higher-kinded typeref to PolyType def isSub2(ntp1: Type, ntp2: Type) = (ntp1, ntp2) match { - case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side - case (_, TypeRef(_, NothingClass, _)) => false - case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) - case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side - case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes + case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) + case (WildcardType, _) | (_, WildcardType) => true // treat `?` as kind-polymorphic + case (TypeRef(_, AnyClass, _), _) | (_, TypeRef(_, NothingClass, _)) => false // avoid some warnings when Nothing/Any are on the other side + case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side + // TODO: rethink whether ExistentialType should be considered isHigherKinded when its underlying type is; + // in any case, we do need to handle one of the types being an existential + case (ntp1, et2: ExistentialType) => et2.withTypeVars(isSubType(ntp1, _, depth), depth) + case (et1: ExistentialType, ntp2) => + try { + skolemizationLevel += 1 + isSubType(et1.skolemizeExistential, ntp2, depth) + } finally { skolemizationLevel -= 1 } + case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s" devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}") false @@ -428,7 +439,7 @@ trait TypeComparers { private def isSubType2(tp1: Type, tp2: Type, depth: Depth): Boolean = { def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth) - if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) + if (tp1.isInstanceOf[SingletonType] && tp2.isInstanceOf[SingletonType]) return (tp1 =:= tp2) || isThisAndSuperSubtype(tp1, tp2) || retry(tp1.underlying, tp2) if (tp1.isHigherKinded || tp2.isHigherKinded) @@ -448,7 +459,7 @@ trait TypeComparers { // These typerefs are pattern matched up and down far more // than is necessary. val sym1 = tr1.sym - val sym2 = tr2.sym + val sym2 = if (!phase.erasedTypes && (tr2 eq ObjectTpeJava)) AnyClass else tr2.sym val pre1 = tr1.pre val pre2 = tr2.pre (((if (sym1 eq sym2) phase.erasedTypes || sym1.rawowner.hasPackageFlag || isSubType(pre1, pre2, depth) @@ -472,11 +483,10 @@ trait TypeComparers { case AnnotatedType(_, _) => isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && annotationsConform(tp1, tp2) - case BoundedWildcardType(bounds) => - isSubType(tp1, bounds.hi, depth) + case tp2: ProtoType => tp2.isMatchedBy(tp1, depth) case tv2 @ TypeVar(_, constr2) => tp1 match { - case AnnotatedType(_, _) | BoundedWildcardType(_) => + case AnnotatedType(_, _) | _: ProtoType => secondTry case _ => tv2.registerBound(tp1, isLowerBound = true) @@ -486,19 +496,19 @@ trait TypeComparers { } /* Second try, on the left: - * - unwrap AnnotatedTypes, BoundedWildcardTypes, + * - ProtoType (usually a BoundedWildcardType) + * - unwrap AnnotatedTypes * - bind typevars, * - handle existential types by skolemization. */ def secondTry = tp1 match { + case pt: ProtoType => pt.canMatch(tp2, depth) case AnnotatedType(_, _) => isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && annotationsConform(tp1, tp2) - case BoundedWildcardType(bounds) => - isSubType(tp1.lowerBound, tp2, depth) case tv @ TypeVar(_,_) => tv.registerBound(tp2, isLowerBound = false) - case ExistentialType(_, _) => + case ExistentialType(_, _) => // TODO: fast initial try for tp1 and tp2 both existentials? (first try instantiating tp2's existentials to tp1's skolems?) try { skolemizationLevel += 1 isSubType(tp1.skolemizeExistential, tp2, depth) @@ -547,7 +557,7 @@ trait TypeComparers { val res2 = mt2.resultType (sameLength(params1, params2) && mt1.isImplicit == mt2.isImplicit && - matchingParams(params1, params2, mt1.isJava, mt2.isJava) && + matchingParams(params1, params2) && isSubType(res1.substSym(params1, params2), res2, depth)) // TODO: if mt1.params.isEmpty, consider NullaryMethodType? case _ => @@ -583,7 +593,7 @@ trait TypeComparers { case tr1 @ TypeRef(pre1, sym1, _) => def nullOnLeft = tp2 match { case TypeRef(_, sym2, _) => sym1 isBottomSubClass sym2 - case _ => isSingleType(tp2) && retry(tp1, tp2.widen) + case _ => isSingleType(tp2) && tp2.widen <:< AnyRefTpe && retry(tp1, tp2.widen) } sym1 match { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index e42caeaf2644..40f8a2e59541 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,17 +15,17 @@ package reflect package internal package tpe -import scala.collection.{ generic } -import generic.Clearable import scala.collection.mutable.BitSet +import scala.collection.mutable.Clearable +import scala.reflect.internal.util.ReusableInstance private[internal] trait TypeConstraints { self: SymbolTable => import definitions._ /** A log of type variable with their original constraints. Used in order - * to undo constraints in the case of isSubType/isSameType failure. - */ + * to undo constraints in the case of isSubType/isSameType failure. + */ private lazy val _undoLog = new UndoLog def undoLog = _undoLog @@ -44,7 +44,7 @@ private[internal] trait TypeConstraints { /** Undo all changes to constraints to type variables up to `limit`. */ //OPT this method is public so we can do `manual inlining` - def undoTo(limit: UndoPairs) { + def undoTo(limit: UndoPairs): Unit = { assertCorrectThread() while ((log ne limit) && log.nonEmpty) { val UndoPair(tv, constr) = log.head @@ -54,9 +54,9 @@ private[internal] trait TypeConstraints { } /** No sync necessary, because record should only - * be called from within an undo or undoUnless block, - * which is already synchronized. - */ + * be called from within an undo or undoUnless block, + * which is already synchronized. + */ private[reflect] def record(tv: TypeVar) = { log ::= UndoPair(tv, tv.constr.cloneInternal) } @@ -96,22 +96,26 @@ private[internal] trait TypeConstraints { */ /** Guard these lists against AnyClass and NothingClass appearing, - * else loBounds.isEmpty will have different results for an empty - * constraint and one with Nothing as a lower bound. [Actually - * guarding addLoBound/addHiBound somehow broke raw types so it - * only guards against being created with them.] - */ - private var lobounds = lo0 filterNot (_.isNothing) - private var hibounds = hi0 filterNot (_.isAny) - private var numlo = numlo0 - private var numhi = numhi0 - private var avoidWidening = avoidWidening0 + * else loBounds.isEmpty will have different results for an empty + * constraint and one with Nothing as a lower bound. [Actually + * guarding addLoBound/addHiBound somehow broke raw types so it + * only guards against being created with them.] + */ + private[this] var lobounds = lo0 filterNot (_.isNothing) + private[this] var hibounds = hi0 filterNot (_.isAny) + private[this] var numlo = numlo0 + private[this] var numhi = numhi0 + private[this] var avoidWidening = avoidWidening0 def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds def avoidWiden: Boolean = avoidWidening + def stopWidening(): Unit = avoidWidening = true - def addLoBound(tp: Type, isNumericBound: Boolean = false) { + def stopWideningIfPrecluded(): Unit = + if (instValid && TypeVar.precludesWidening(inst)) stopWidening() + + def addLoBound(tp: Type, isNumericBound: Boolean = false): Unit = { // For some reason which is still a bit fuzzy, we must let Nothing through as // a lower bound despite the fact that Nothing is always a lower bound. My current // supposition is that the side-effecting type constraint accumulation mechanism @@ -120,7 +124,7 @@ private[internal] trait TypeConstraints { // See pos/t6367 and pos/t6499 for the competing test cases. val mustConsider = tp.typeSymbol match { case NothingClass => true - case _ => !(lobounds contains tp) + case _ => !lobounds.contains(tp) } if (mustConsider) { if (isNumericBound && isNumericValueType(tp)) { @@ -133,21 +137,18 @@ private[internal] trait TypeConstraints { } } - def checkWidening(tp: Type) { - if(tp.isStable) avoidWidening = true + def checkWidening(tp: Type): Unit = { + if (TypeVar.precludesWidening(tp)) stopWidening() else tp match { - case HasTypeMember(_, _) => avoidWidening = true + case HasTypeMember() => stopWidening() case _ => } } - def addHiBound(tp: Type, isNumericBound: Boolean = false) { + def addHiBound(tp: Type, isNumericBound: Boolean = false): Unit = { // My current test case only demonstrates the need to let Nothing through as // a lower bound, but I suspect the situation is symmetrical. - val mustConsider = tp.typeSymbol match { - case AnyClass => true - case _ => !(hibounds contains tp) - } + val mustConsider = typeIsAnyOrJavaObject(tp) || !(hibounds contains tp) if (mustConsider) { checkWidening(tp) if (isNumericBound && isNumericValueType(tp)) { @@ -186,7 +187,7 @@ private[internal] trait TypeConstraints { case tp :: Nil => " >: " + tp case tps => tps.mkString(" >: (", ", ", ")") } - val hi = hiBounds filterNot (_.isAny) match { + val hi = hiBounds filterNot typeIsAnyOrJavaObject match { case Nil => "" case tp :: Nil => " <: " + tp case tps => tps.mkString(" <: (", ", ", ")") @@ -198,6 +199,14 @@ private[internal] trait TypeConstraints { } } + private[this] val containsCollectorInstances: ReusableInstance[ContainsCollector] = ReusableInstance(new ContainsCollector(null), enabled = isCompilerUniverse) + + private[this] def containsSymbol(tp: Type, sym: Symbol): Boolean = + containsCollectorInstances.using { cc => + cc.reset(sym) + cc.collect(tp) + } + /** Solve constraint collected in types `tvars`. * * @param tvars All type variables to be instantiated. @@ -209,87 +218,81 @@ private[internal] trait TypeConstraints { def solve(tvars: List[TypeVar], tparams: List[Symbol], getVariance: Variance.Extractor[Symbol], upper: Boolean, depth: Depth): Boolean = { assert(tvars.corresponds(tparams)((tvar, tparam) => tvar.origin.typeSymbol eq tparam), (tparams, tvars.map(_.origin.typeSymbol))) val areContravariant: BitSet = BitSet.empty - foreachWithIndex(tparams){(tparam, ix) => - if (getVariance(tparam).isContravariant) areContravariant += ix - } + foreachWithIndex(tparams)((tparam, ix) => if (getVariance(tparam).isContravariant) areContravariant += ix) + + @inline def toBound(hi: Boolean, tparam: Symbol) = if (hi) tparam.info.upperBound else tparam.info.lowerBound - def solveOne(tvar: TypeVar, ix: Int): Unit = { - val tparam = tvar.origin.typeSymbol - val isContravariant = areContravariant(ix) + def solveOne(tvar: TypeVar, isContravariant: Boolean): Unit = if (tvar.constr.inst == NoType) { + tvar.constr.inst = null // mark tvar as being solved + val up = if (isContravariant) !upper else upper - tvar.constr.inst = null - val bound: Type = if (up) tparam.info.upperBound else tparam.info.lowerBound - //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) - var cyclic = bound contains tparam - foreachWithIndex(tvars){ (tvar2, jx) => - val tparam2 = tvar2.origin.typeSymbol - val ok = (tparam2 != tparam) && ( - (bound contains tparam2) - || up && (tparam2.info.lowerBound =:= tparam.tpeHK) - || !up && (tparam2.info.upperBound =:= tparam.tpeHK) - ) - if (ok) { - if (tvar2.constr.inst eq null) cyclic = true - solveOne(tvar2, jx) + val tparam = tvar.origin.typeSymbol + + // don't use =:= -- we just want to know whether the tparam occurs + // (using =:= may side-effect additional constraints / unify too much, e.g. with wildcard -- scala/bug#11558) + @inline def tvarIsBoundOf(tparamOther: Symbol) = + toBound(!up, tparamOther).dealias match { + case TypeRef(_, `tparam`, Nil) => true // make sure typeArgs.isEmpty: it gets complicated with type constructor variables -- don't flip those around + // TODO could add the PolyType equivalent for eta-expanded type constructors + case _ => false + } + + val bound = toBound(up, tparam) + var otherTypeVarBeingSolved = false + + // Solve other type vars, they are relevant when: + // - our current bound mentions the other tparam + // - our current tparam equals the other tparam's bound (we'll add the symmetric bound below) + foreachWithIndex(tvars) { (tvarOther, ix) => + val tparamOther = tvarOther.origin.typeSymbol + if ((tparamOther ne tparam) && containsSymbol(bound, tparamOther) || tvarIsBoundOf(tparamOther)) { + if (tvarOther.constr.inst eq null) otherTypeVarBeingSolved = true + solveOne(tvarOther, areContravariant(ix)) } } - if (!cyclic) { + + if (!(otherTypeVarBeingSolved || containsSymbol(bound, tparam))) { + val boundSym = bound.typeSymbol if (up) { - if (bound.typeSymbol != AnyClass) { - debuglog(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)") - tvar addHiBound bound.instantiateTypeParams(tparams, tvars) - } - for (tparam2 <- tparams) - tparam2.info.lowerBound.dealias match { - case TypeRef(_, `tparam`, _) => - debuglog(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) - case _ => - } + if (boundSym != AnyClass) + tvar.addHiBound(bound.instantiateTypeParams(tparams, tvars)) } else { - if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) { - debuglog(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)") - tvar addLoBound bound.instantiateTypeParams(tparams, tvars) + if (boundSym != tparam && boundSym != NothingClass) + tvar.addLoBound(bound.instantiateTypeParams(tparams, tvars)) + } + + // Derive more constraints for `tvar` from its symmetric occurrences in the bounds of other tparams. + tvars.foreach { tvarOther => + val tparamOther = tvarOther.origin.typeSymbol + if ((tparamOther ne tparam) && tvarIsBoundOf(tparamOther)) { + if (up) tvar.addHiBound(tvarOther) else tvar.addLoBound(tvarOther) } - for (tparam2 <- tparams) - tparam2.info.upperBound.dealias match { - case TypeRef(_, `tparam`, _) => - debuglog(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) - case _ => - } } } - tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar - //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))) - val newInst = ( - if (up) { - if (depth.isAnyDepth) glb(tvar.constr.hiBounds) - else glb(tvar.constr.hiBounds, depth) - } - else { - if (depth.isAnyDepth) lub(tvar.constr.loBounds) - else lub(tvar.constr.loBounds, depth) + tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar (about to lub/glb the bounds) + + val newInst = + if (up || tvar.constr.hiBounds.exists(isSingleType)) { // If we have a singleton upper bound then we should use it. + if (depth.isAnyDepth) glb(tvar.constr.hiBounds) else glb(tvar.constr.hiBounds, depth) + } else { + if (depth.isAnyDepth) lub(tvar.constr.loBounds) else lub(tvar.constr.loBounds, depth) } - ) - debuglog(s"$tvar setInst $newInst") + // debuglog(s"$tvar setInst $newInst") tvar setInst newInst - //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG } - } // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info))) - foreachWithIndex(tvars)(solveOne) + foreachWithIndex(tvars)((tvar, i) => solveOne(tvar, areContravariant(i))) - def logBounds(tv: TypeVar) = log { - val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}" - s"Inferred type for ${tv.originString} (${tv.inst}) $what" - } +// def logBounds(tv: TypeVar) = log { +// val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}" +// s"Inferred type for ${tv.originString} (${tv.inst}) $what" +// } - tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv))) + tvars forall (_.instWithinBounds) // || logBounds(tv) } } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index c5b972f37548..09bb8fb7bd34 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,10 +15,12 @@ package reflect package internal package tpe -import scala.collection.{ mutable, immutable } +import scala.annotation.{nowarn, tailrec} +import scala.collection.{immutable, mutable} +import scala.collection.mutable.ListBuffer import Flags._ -import scala.annotation.tailrec import Variance._ +import util.StringContextStripMarginOps private[internal] trait TypeMaps { self: SymbolTable => @@ -29,11 +31,11 @@ private[internal] trait TypeMaps { * so it is no longer carries the too-stealthy name "deAlias". */ object normalizeAliases extends TypeMap { - def apply(tp: Type): Type = mapOver(tp match { + def apply(tp: Type): Type = (tp match { case TypeRef(_, sym, _) if sym.isAliasType && tp.isHigherKinded => logResult(s"Normalized type alias function $tp")(tp.normalize) case TypeRef(_, sym, _) if sym.isAliasType => tp.normalize case tp => tp - }) + }).mapOver(this) } /** Remove any occurrence of type from this type and its parents */ @@ -45,22 +47,23 @@ private[internal] trait TypeMaps { case tp1 @ RefinedType(parents, decls) => parents filter (_.typeSymbol != SingletonClass) match { case Nil => AnyTpe - case p :: Nil if decls.isEmpty => mapOver(p) - case ps => mapOver(copyRefinedType(tp1, ps, decls)) + case p :: Nil if decls.isEmpty => p.mapOver(this) + case ps => copyRefinedType(tp1, ps, decls).mapOver(this) } case tp1 => - mapOver(tp1) + tp1.mapOver(this) } } } /** Type with all top-level occurrences of abstract types replaced by their bounds */ object abstractTypesToBounds extends TypeMap { + @tailrec def apply(tp: Type): Type = tp match { case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias) case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.upperBound) case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls) - case AnnotatedType(_, _) => mapOver(tp) + case AnnotatedType(_, _) => tp.mapOver(this) case _ => tp // no recursion - top level only } } @@ -78,7 +81,7 @@ private[internal] trait TypeMaps { case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg) case _ => - mapOver(tp) + tp.mapOver(this) } } @@ -99,138 +102,11 @@ private[internal] trait TypeMaps { /** A prototype for mapping a function over all possible types */ - abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) { - def this() = this(trackVariance = false) + abstract class TypeMap extends (Type => Type) { def apply(tp: Type): Type - private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant - - def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x } - def variance = _variance - /** Map this function over given type */ - def mapOver(tp: Type): Type = tp match { - case tr @ TypeRef(pre, sym, args) => - val pre1 = this(pre) - val args1 = ( - if (trackVariance && args.nonEmpty && !variance.isInvariant) { - val tparams = sym.typeParams - if (tparams.isEmpty) - args mapConserve this - else - mapOverArgs(args, tparams) - } else { - args mapConserve this - } - ) - if ((pre1 eq pre) && (args1 eq args)) tp - else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1) - case ThisType(_) => tp - case SingleType(pre, sym) => - if (sym.isPackageClass) tp // short path - else { - val pre1 = this(pre) - if (pre1 eq pre) tp - else singleType(pre1, sym) - } - case MethodType(params, result) => - val params1 = flipped(mapOver(params)) - val result1 = this(result) - if ((params1 eq params) && (result1 eq result)) tp - else copyMethodType(tp, params1, result1.substSym(params, params1)) - case PolyType(tparams, result) => - val tparams1 = flipped(mapOver(tparams)) - val result1 = this(result) - if ((tparams1 eq tparams) && (result1 eq result)) tp - else PolyType(tparams1, result1.substSym(tparams, tparams1)) - case NullaryMethodType(result) => - val result1 = this(result) - if (result1 eq result) tp - else NullaryMethodType(result1) - case ConstantType(_) => tp - case SuperType(thistp, supertp) => - val thistp1 = this(thistp) - val supertp1 = this(supertp) - if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp - else SuperType(thistp1, supertp1) - case TypeBounds(lo, hi) => - val lo1 = flipped(this(lo)) - val hi1 = this(hi) - if ((lo1 eq lo) && (hi1 eq hi)) tp - else TypeBounds(lo1, hi1) - case BoundedWildcardType(bounds) => - val bounds1 = this(bounds) - if (bounds1 eq bounds) tp - else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds]) - case rtp @ RefinedType(parents, decls) => - val parents1 = parents mapConserve this - val decls1 = mapOver(decls) - copyRefinedType(rtp, parents1, decls1) - case ExistentialType(tparams, result) => - val tparams1 = mapOver(tparams) - val result1 = this(result) - if ((tparams1 eq tparams) && (result1 eq result)) tp - else newExistentialType(tparams1, result1.substSym(tparams, tparams1)) - case OverloadedType(pre, alts) => - val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre) - if (pre1 eq pre) tp - else OverloadedType(pre1, alts) - case AntiPolyType(pre, args) => - val pre1 = this(pre) - val args1 = args mapConserve this - if ((pre1 eq pre) && (args1 eq args)) tp - else AntiPolyType(pre1, args1) - case tv@TypeVar(_, constr) => - if (constr.instValid) this(constr.inst) - else { - val args = tv.typeArgs - val args1 = mapOverArgs(args, tv.params) //@M !args.isEmpty implies !typeParams.isEmpty - if (args1 eq args) tv - else tv.applyArgs(args1) - } - case AnnotatedType(annots, atp) => - val annots1 = mapOverAnnotations(annots) - val atp1 = this(atp) - if ((annots1 eq annots) && (atp1 eq atp)) tp - else if (annots1.isEmpty) atp1 - else AnnotatedType(annots1, atp1) - /* - case ErrorType => tp - case WildcardType => tp - case NoType => tp - case NoPrefix => tp - case ErasedSingleType(sym) => tp - */ - case _ => - tp - // throw new Error("mapOver inapplicable for " + tp); - } - - @inline final def withVariance[T](v: Variance)(body: => T): T = { - val saved = variance - variance = v - try body finally variance = saved - } - @inline final def flipped[T](body: => T): T = { - if (trackVariance) variance = variance.flip - try body - finally if (trackVariance) variance = variance.flip - } - protected final def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = ( - if (trackVariance) - map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg))) - else - args mapConserve this - ) - /** Applies this map to the symbol's info, setting variance = Invariant - * if necessary when the symbol is an alias. - */ - private def applyToSymbolInfo(sym: Symbol, info: Type): Type = { - if (trackVariance && !variance.isInvariant && sym.isAliasType) - withVariance(Invariant)(this(info)) - else - this(info) - } + def mapOver(tp: Type): Type = if (tp eq null) tp else tp.mapOver(this) /** The index of the first symbol in `origSyms` which would have its info * transformed by this type map. @@ -245,6 +121,7 @@ private[internal] trait TypeMaps { } loop(0, origSyms) } + protected def applyToSymbolInfo(sym: Symbol, info: Type): Type = this(info) /** Map this function over given scope */ def mapOver(scope: Scope): Scope = { @@ -270,7 +147,7 @@ private[internal] trait TypeMaps { def mapOver(annot: AnnotationInfo): AnnotationInfo = { val AnnotationInfo(atp, args, assocs) = annot - val atp1 = mapOver(atp) + val atp1 = atp.mapOver(this) val args1 = mapOverAnnotArgs(args) // there is no need to rewrite assocs, as they are constants @@ -293,6 +170,7 @@ private[internal] trait TypeMaps { else args1 } + @nowarn("cat=lint-nonlocal-return") def mapOver(tree: Tree): Tree = mapOver(tree, () => return UnmappableTree) @@ -301,7 +179,7 @@ private[internal] trait TypeMaps { * The default is to transform the tree with * TypeMapTransformer. */ - def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = + def mapOver(tree: Tree, giveup: () => Nothing): Tree = (new TypeMapTransformer).transform(tree) /** This transformer leaves the tree alone except to remap @@ -318,23 +196,77 @@ private[internal] trait TypeMaps { } } + abstract class VariancedTypeMap extends TypeMap { + + private[this] var _variance: Variance = Covariant + + def variance_=(x: Variance) = { _variance = x } + def variance = _variance + + @inline final def withVariance[T](v: Variance)(body: => T): T = { + val saved = variance + variance = v + try body finally variance = saved + } + @inline final def flipped[T](body: => T): T = { + variance = variance.flip + try body + finally variance = variance.flip + } + + final def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = { + val oldVariance = variance + map2Conserve(args, tparams)((arg, tparam) => withVariance(oldVariance * tparam.variance)(this(arg))) + } + + /** Applies this map to the symbol's info, setting variance = Invariant + * if necessary when the symbol is an alias. */ + override protected final def applyToSymbolInfo(sym: Symbol, info: Type): Type = + if (!variance.isInvariant && sym.isAliasType) + withVariance(Invariant)(this(info)) + else + this(info) + } + + abstract class TypeFolder extends (Type => Unit) { + /** Map this function over given type */ + def apply(tp: Type): Unit // = if (tp ne null) tp.foldOver(this) + + /** Map this function over given type */ + def foldOver(syms: List[Symbol]): Unit = syms.foreach( sym => apply(sym.info) ) + + def foldOver(scope: Scope): Unit = { + val elems = scope.toList + foldOver(elems) + } + + def foldOverAnnotations(annots: List[AnnotationInfo]): Unit = + annots foreach foldOver + + def foldOver(annot: AnnotationInfo): Unit = { + val AnnotationInfo(atp, args, _) = annot + atp.foldOver(this) + foldOverAnnotArgs(args) + } + + def foldOverAnnotArgs(args: List[Tree]): Unit = + args foreach foldOver + + def foldOver(tree: Tree): Unit = apply(tree.tpe) + } + abstract class TypeTraverser extends TypeMap { def traverse(tp: Type): Unit def apply(tp: Type): Type = { traverse(tp); tp } } - abstract class TypeTraverserWithResult[T] extends TypeTraverser { - def result: T - def clear(): Unit - } - - abstract class TypeCollector[T](initial: T) extends TypeTraverser { + abstract class TypeCollector[T](initial: T) extends TypeFolder { var result: T = _ def collect(tp: Type): T = { val saved = result try { result = initial - traverse(tp) + apply(tp) result } finally { result = saved // support reentrant use of a single instance of this collector. @@ -351,7 +283,7 @@ private[internal] trait TypeMaps { * in ClassFileParser.sigToType (where it is usually done). */ def rawToExistential = new TypeMap { - private var expanded = immutable.Set[Symbol]() + private[this] var expanded = immutable.Set[Symbol]() def apply(tp: Type): Type = tp match { case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) => if (expanded contains sym) AnyRefTpe @@ -363,7 +295,7 @@ private[internal] trait TypeMaps { expanded -= sym } case _ => - mapOver(tp) + tp.mapOver(this) } } /*** @@ -383,12 +315,13 @@ private[internal] trait TypeMaps { /** Used by existentialAbstraction. */ - class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) { - private val occurCount = mutable.HashMap[Symbol, Int]() + class ExistentialExtrapolation(tparams: List[Symbol]) extends VariancedTypeMap { + private[this] val occurCount = mutable.HashMap[Symbol, Int]() + private[this] val anyContains = new ContainsAnyKeyCollector(occurCount) private def countOccs(tp: Type) = { tp foreach { case TypeRef(_, sym, _) => - if (tparams contains sym) + if (occurCount contains sym) occurCount(sym) += 1 case _ => () } @@ -419,7 +352,7 @@ private[internal] trait TypeMaps { val word = if (variance.isPositive) "upper" else "lower" s"Widened lone occurrence of $tp1 inside existential to $word bound" } - if (!repl.typeSymbol.isBottomClass && !tparams.exists(repl.contains)) + if (!repl.typeSymbol.isBottomClass && ! anyContains.collect(repl)) debuglogResult(msg)(repl) else tp1 @@ -435,7 +368,7 @@ private[internal] trait TypeMaps { if ((pre1 eq pre) || !pre1.isStable) tp else singleType(pre1, sym) } - case _ => super.mapOver(tp) + case _ => tp.mapOver(this) } // Do not discard the types of existential idents. The @@ -455,12 +388,13 @@ private[internal] trait TypeMaps { * For example, the MethodType given by `def bla(x: (_ >: String)): (_ <: Int)` * is both a subtype and a supertype of `def bla(x: String): Int`. */ - object wildcardExtrapolation extends TypeMap(trackVariance = true) { + object wildcardExtrapolation extends VariancedTypeMap { def apply(tp: Type): Type = tp match { case BoundedWildcardType(TypeBounds(lo, AnyTpe)) if variance.isContravariant => lo + case BoundedWildcardType(TypeBounds(lo, ObjectTpeJava)) if variance.isContravariant => lo case BoundedWildcardType(TypeBounds(NothingTpe, hi)) if variance.isCovariant => hi - case tp => mapOver(tp) + case tp => tp.mapOver(this) } } @@ -481,7 +415,7 @@ private[internal] trait TypeMaps { /** A map to compute the asSeenFrom method. */ class AsSeenFromMap(seenFromPrefix0: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints { - private val seenFromPrefix: Type = if (seenFromPrefix0.typeSymbolDirect.hasPackageFlag && !seenFromClass.hasPackageFlag) + private[this] val seenFromPrefix: Type = if (seenFromPrefix0.typeSymbolDirect.hasPackageFlag && !seenFromClass.hasPackageFlag) seenFromPrefix0.packageObject.typeOfThis else seenFromPrefix0 // Some example source constructs relevant in asSeenFrom: @@ -510,16 +444,17 @@ private[internal] trait TypeMaps { case tp @ ThisType(_) => thisTypeAsSeen(tp) case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp) case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp) - case _ => mapOver(tp) + case _ => tp.mapOver(this) } - private var _capturedSkolems: List[Symbol] = Nil - private var _capturedParams: List[Symbol] = Nil - private val isStablePrefix = seenFromPrefix.isStable + private[this] var _capturedSkolems: List[Symbol] = Nil + private[this] var _capturedParams: List[Symbol] = Nil + private[this] val isStablePrefix = seenFromPrefix.isStable // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate // but less succinct name. private def isBaseClassOfEnclosingClass(base: Symbol) = { + @tailrec def loop(encl: Symbol): Boolean = ( isPossiblePrefix(encl) && ((encl isSubClass base) || loop(encl.owner.enclClass)) @@ -538,7 +473,7 @@ private[internal] trait TypeMaps { && isBaseClassOfEnclosingClass(sym.owner) ) - private var capturedThisIds = 0 + private[this] var capturedThisIds = 0 private def nextCapturedThisId() = { capturedThisIds += 1; capturedThisIds } /** Creates an existential representing a type parameter which appears * in the prefix of a ThisType. @@ -553,7 +488,7 @@ private[internal] trait TypeMaps { qvar.tpe } } - protected def captureSkolems(skolems: List[Symbol]) { + protected def captureSkolems(skolems: List[Symbol]): Unit = { for (p <- skolems; if !(capturedSkolems contains p)) { debuglog(s"Captured $p seen from $seenFromPrefix") _capturedSkolems ::= p @@ -568,8 +503,8 @@ private[internal] trait TypeMaps { * @param rhs a type application constructed from `clazz` */ private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = { - val TypeRef(_, lhsSym, lhsArgs) = lhs - val TypeRef(_, rhsSym, rhsArgs) = rhs + val TypeRef(_, lhsSym, lhsArgs) = lhs: @unchecked + val TypeRef(_, rhsSym, rhsArgs) = rhs: @unchecked require(lhsSym.owner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") // Find the type parameter position; we'll use the corresponding argument. @@ -616,15 +551,16 @@ private[internal] trait TypeMaps { // are not influenced by the prefix through which they are seen. Note that type params of // anonymous type functions, which currently can only arise from normalising type aliases, are // owned by the type alias of which they are the eta-expansion. - private def classParameterAsSeen(classParam: Type): Type = { - val TypeRef(_, tparam, _) = classParam + private def classParameterAsSeen(classParam: TypeRef): Type = { + val tparam = classParam.sym + @tailrec def loop(pre: Type, clazz: Symbol): Type = { // have to deconst because it may be a Class[T] def nextBase = (pre baseType clazz).deconst //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary if (skipPrefixOf(pre, clazz)) - mapOver(classParam) + classParam.mapOver(this) else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner)) loop(nextBase.prefix, clazz.owner) else nextBase match { @@ -675,7 +611,7 @@ private[internal] trait TypeMaps { /** Rewrite `This` trees in annotation argument trees */ override def transform(tree: Tree): Tree = super.transform(tree) match { case This(_) if matchesThis(tree.symbol) => newThis() - case tree => tree + case transformed => transformed } } @@ -686,7 +622,7 @@ private[internal] trait TypeMaps { // was touched. This takes us to one allocation per AsSeenFromMap rather // than an allocation on every call to mapOver, and no extra work when the // tree only has its types remapped. - override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { + override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { if (isStablePrefix) annotationArgRewriter transform tree else { @@ -698,13 +634,14 @@ private[internal] trait TypeMaps { } private def thisTypeAsSeen(tp: ThisType): Type = { + @tailrec def loop(pre: Type, clazz: Symbol): Type = { val pre1 = pre match { case SuperType(thistpe, _) => thistpe case _ => pre } if (skipPrefixOf(pre, clazz)) - mapOver(tp) // TODO - is mapOver necessary here? + tp.mapOver(this) // TODO - is mapOver necessary here? else if (!matchesPrefixAndClass(pre, clazz)(tp.sym)) loop((pre baseType clazz).prefix, clazz.owner) else if (pre1.isStable) @@ -727,21 +664,46 @@ private[internal] trait TypeMaps { override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" } - /** A base class to compute all substitutions */ - abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap { - // OPT this check was 2-3% of some profiles, demoted to -Xdev - if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + /** A base class to compute all substitutions. */ + abstract class SubstMap[T >: Null](from0: List[Symbol], to0: List[T]) extends TypeMap { + private[this] var from: List[Symbol] = from0 + private[this] var to: List[T] = to0 private[this] var fromHasTermSymbol = false private[this] var fromMin = Int.MaxValue private[this] var fromMax = Int.MinValue private[this] var fromSize = 0 - from.foreach { - sym => - fromMin = math.min(fromMin, sym.id) - fromMax = math.max(fromMax, sym.id) - fromSize += 1 - if (sym.isTerm) fromHasTermSymbol = true + + // So SubstTypeMap can expose them publicly + // while SubstMap can continue to access them as private fields + protected[this] final def accessFrom: List[Symbol] = from + protected[this] final def accessTo: List[T] = to + + reset(from0, to0) + def reset(from0: List[Symbol], to0: List[T]): this.type = { + // OPT this check was 2-3% of some profiles, demoted to -Xdev + if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + + from = from0 + to = to0 + + fromHasTermSymbol = false + fromMin = Int.MaxValue + fromMax = Int.MinValue + fromSize = 0 + + def scanFrom(ss: List[Symbol]): Unit = + ss match { + case sym :: rest => + fromMin = math.min(fromMin, sym.id) + fromMax = math.max(fromMax, sym.id) + fromSize += 1 + if (sym.isTerm) fromHasTermSymbol = true + scanFrom(rest) + case _ => () + } + scanFrom(from) + this } /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ @@ -790,7 +752,7 @@ private[internal] trait TypeMaps { } def apply(tp0: Type): Type = if (from.isEmpty) tp0 else { - val tp = mapOver(renameBoundSyms(tp0)) + val tp = renameBoundSyms(tp0).mapOver(this) def substFor(sym: Symbol) = subst(tp, sym, from, to) tp match { @@ -824,76 +786,86 @@ private[internal] trait TypeMaps { } /** A map to implement the `substSym` method. */ - class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { + class SubstSymMap(from0: List[Symbol], to0: List[Symbol]) extends SubstMap[Symbol](from0, to0) { def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) - protected def toType(fromtp: Type, sym: Symbol) = fromtp match { - case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args) - case SingleType(pre, _) => singleType(pre, sym) + private[this] final def from: List[Symbol] = accessFrom + private[this] final def to: List[Symbol] = accessTo + + protected def toType(fromTpe: Type, sym: Symbol) = fromTpe match { + case TypeRef(pre, _, args) => copyTypeRef(fromTpe, pre, sym, args) + case SingleType(pre, _) => singleType(pre, sym) + case x => throw new MatchError(x) } - @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = ( + + @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = if (from.isEmpty) sym // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from)) else if (matches(from.head, sym)) to.head else subst(sym, from.tail, to.tail) - ) - private def substFor(sym: Symbol) = subst(sym, from, to) - override def apply(tp: Type): Type = ( - if (from.isEmpty) tp - else tp match { + private def substFor(sym: Symbol) = + subst(sym, from, to) + + override def apply(tpe: Type): Type = + if (from.isEmpty) tpe else tpe match { case TypeRef(pre, sym, args) if pre ne NoPrefix => val newSym = substFor(sym) - // mapOver takes care of subst'ing in args - mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) ) - // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams)) + // mapOver takes care of substituting in args + (if (sym eq newSym) tpe else copyTypeRef(tpe, pre, newSym, args)).mapOver(this) + // assert(newSym.typeParams.length == sym.typeParams.length, "typeParams mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams)) case SingleType(pre, sym) if pre ne NoPrefix => val newSym = substFor(sym) - mapOver( if (sym eq newSym) tp else singleType(pre, newSym) ) + (if (sym eq newSym) tpe else singleType(pre, newSym)).mapOver(this) + case tp: RefinedType => + val owner = tpe.typeSymbol.owner + val newOwner = substFor(owner) + (if (newOwner eq owner) tpe else copyRefinedType(tp, tp.parents, tp.decls, newOwner)).mapOver(this) case _ => - super.apply(tp) + super.apply(tpe) } - ) object mapTreeSymbols extends TypeMapTransformer { val strictCopy = newStrictTreeCopier - def termMapsTo(sym: Symbol) = from indexOf sym match { - case -1 => None - case idx => Some(to(idx)) - } - // if tree.symbol is mapped to another symbol, passes the new symbol into the // constructor `trans` and sets the symbol and the type on the resulting tree. - def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match { - case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe - case None => tree - } + def transformIfMapped(tree: Tree)(trans: Symbol => Tree): Tree = + from.indexOf(tree.symbol) match { + case -1 => tree + case idx => + val toSym = to(idx) + trans(toSym).setSymbol(toSym).setType(tree.tpe) + } // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified. - override def transform(tree: Tree) = { + override def transform(tree: Tree): Tree = // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary. super.transform(tree) match { case id @ Ident(_) => - transformIfMapped(id)(toSym => - strictCopy.Ident(id, toSym.name)) - - case sel @ Select(qual, name) => - transformIfMapped(sel)(toSym => - strictCopy.Select(sel, qual, toSym.name)) - - case tree => tree + transformIfMapped(id)(toSym => strictCopy.Ident(id, toSym.name)) + case sel @ Select(qual, _) => + transformIfMapped(sel)(toSym => strictCopy.Select(sel, qual, toSym.name)) + case transformed => transformed } - } } - override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { + + override def mapOver(tree: Tree, giveup: () => Nothing): Tree = mapTreeSymbols.transform(tree) - } + } + + object SubstSymMap { + def apply(): SubstSymMap = new SubstSymMap() + def apply(from: List[Symbol], to: List[Symbol]): SubstSymMap = new SubstSymMap(from, to) + def apply(fromto: (Symbol, Symbol)): SubstSymMap = new SubstSymMap(fromto) } /** A map to implement the `subst` method. */ - class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) { - protected def toType(fromtp: Type, tp: Type) = tp + class SubstTypeMap(from0: List[Symbol], to0: List[Type]) extends SubstMap[Type](from0, to0) { + final def from: List[Symbol] = accessFrom + final def to: List[Type] = accessTo + + override protected def toType(fromtp: Type, tp: Type) = tp override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { object trans extends TypeMapTransformer { @@ -918,7 +890,7 @@ private[internal] trait TypeMaps { class SubstThisMap(from: Symbol, to: Type) extends TypeMap { def apply(tp: Type): Type = tp match { case ThisType(sym) if (sym == from) => to - case _ => mapOver(tp) + case _ => tp.mapOver(this) } } @@ -928,7 +900,7 @@ private[internal] trait TypeMaps { case TypeRef(_, sym, _) if from contains sym => BoundedWildcardType(sym.info.bounds) case _ => - mapOver(tp) + tp.mapOver(this) } } catch { case ex: MalformedType => @@ -937,17 +909,16 @@ private[internal] trait TypeMaps { } // dependent method types - object IsDependentCollector extends TypeCollector(false) { - def traverse(tp: Type) { + object IsDependentCollector extends TypeCollector(initial = false) { + def apply(tp: Type): Unit = if (tp.isImmediatelyDependent) result = true - else if (!result) mapOver(tp.dealias) - } + else if (!result) tp.dealias.foldOver(this) } object ApproximateDependentMap extends TypeMap { def apply(tp: Type): Type = if (tp.isImmediatelyDependent) WildcardType - else mapOver(tp) + else tp.mapOver(this) } /** Note: This map is needed even for non-dependent method types, despite what the name might imply. @@ -1036,11 +1007,11 @@ private[internal] trait TypeMaps { */ def apply(tp: Type): Type = tp match { case SingleType(NoPrefix, StabilizedArgTp(tp)) => tp - case _ => mapOver(tp) + case _ => tp.mapOver(this) } //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon) - override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = { + override def mapOver(arg: Tree, giveup: () => Nothing): Tree = { // TODO: this should be simplified; in the stable case, one can // probably just use an Ident to the tree.symbol. // @@ -1070,30 +1041,24 @@ private[internal] trait TypeMaps { } } - /** A map to convert every occurrence of a wildcard type to a fresh - * type variable */ - object wildcardToTypeVarMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case WildcardType => - TypeVar(tp, new TypeConstraint) - case BoundedWildcardType(bounds) => - TypeVar(tp, new TypeConstraint(bounds)) - case _ => - mapOver(tp) - } + /** A map that is conceptually an identity, but in practice may perform some side effects. */ + object identityTypeMap extends TypeMap { + def apply(tp: Type): Type = tp.mapOver(this) } /** A map to convert each occurrence of a type variable to its origin. */ object typeVarToOriginMap extends TypeMap { def apply(tp: Type): Type = tp match { case TypeVar(origin, _) => origin - case _ => mapOver(tp) + case _ => tp.mapOver(this) } } - /** A map to implement the `contains` method. */ - class ContainsCollector(sym: Symbol) extends TypeCollector(false) { - def traverse(tp: Type) { + abstract class ExistsTypeRefCollector extends TypeCollector[Boolean](initial = false) { + + protected def pred(sym: Symbol): Boolean + + def apply(tp: Type): Unit = if (!result) { tp match { case _: ExistentialType => @@ -1102,75 +1067,99 @@ private[internal] trait TypeMaps { // // We can just map over the components and wait until we see the underlying type before we call // normalize. - mapOver(tp) + tp.foldOver(this) + case TypeRef(_, sym1, _) if pred(sym1) => result = true // catch aliases before normalization case _ => tp.normalize match { - case TypeRef(_, sym1, _) if (sym == sym1) => result = true + case TypeRef(_, sym1, _) if pred(sym1) => result = true case refined: RefinedType => - mapOver(tp.prefix) - mapOver(refined) - case SingleType(_, sym1) if (sym == sym1) => result = true - case _ => mapOver(tp) + tp.prefix.foldOver(this) // Assumption is that tp was a TypeRef prior to normalization so we should + // mapOver its prefix + refined.foldOver(this) + case SingleType(_, sym1) if pred(sym1) => result = true + case _ => tp.foldOver(this) } } } + + private class CollectingTraverser(p: Tree => Boolean) extends FindTreeTraverser(p) { + def collect(arg: Tree): Boolean = { + /*super[FindTreeTraverser].*/ result = None + traverse(arg) + /*super[FindTreeTraverser].*/ result.isDefined + } } - override def mapOver(arg: Tree) = { - for (t <- arg) { - traverse(t.tpe) - if (t.symbol == sym) - result = true + private lazy val findInTree = { + def inTree(t: Tree): Boolean = { + if (pred(t.symbol)) result = true else apply(t.tpe) + result } - arg + new CollectingTraverser(inTree) + } + + override def foldOver(arg: Tree) = if (!result) findInTree.collect(arg) + } + + /** A map to implement the `contains` method. */ + class ContainsCollector(private[this] var sym: Symbol) extends ExistsTypeRefCollector { + def reset(nsym: Symbol): Unit = { + result = false + sym = nsym } + override protected def pred(sym1: Symbol): Boolean = sym1 == sym + } + class ContainsAnyKeyCollector(symMap: mutable.HashMap[Symbol, _]) extends ExistsTypeRefCollector { + override protected def pred(sym1: Symbol): Boolean = symMap.contains(sym1) } /** A map to implement the `filter` method. */ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { override def collect(tp: Type) = super.collect(tp).reverse - def traverse(tp: Type) { + override def apply(tp: Type): Unit = { if (p(tp)) result ::= tp - mapOver(tp) + tp.foldOver(this) } } /** A map to implement the `collect` method. */ class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) { - override def collect(tp: Type) = super.collect(tp).reverse + val buffer: ListBuffer[T] = ListBuffer.empty + + override def collect(tp: Type): List[T] = { + apply(tp) + val result = buffer.result() + buffer.clear() + result + } - def traverse(tp: Type) { - if (pf.isDefinedAt(tp)) result ::= pf(tp) - mapOver(tp) + override def apply(tp: Type): Unit = { + if (pf.isDefinedAt(tp)) buffer += pf(tp) + tp.foldOver(this) } } class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser { - def traverse(tp: Type) { + def traverse(tp: Type): Unit = { f(tp) - mapOver(tp) + tp.mapOver(this) } } /** A map to implement the `filter` method. */ class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) { - def traverse(tp: Type) { - if (result.isEmpty) { - if (p(tp)) result = Some(tp) - mapOver(tp) - } - } + def apply(tp: Type): Unit = + if (result.isEmpty) + if (p(tp)) result = Some(tp) else tp.foldOver(this) } - /** A map to implement the `contains` method. */ - object ErroneousCollector extends TypeCollector(false) { - def traverse(tp: Type) { + object ErroneousCollector extends TypeCollector(initial = false) { + def apply(tp: Type): Unit = if (!result) { result = tp.isError - mapOver(tp) + if (!result) tp.foldOver(this) } - } } object adaptToNewRunMap extends TypeMap { @@ -1194,6 +1183,7 @@ private[internal] trait TypeMaps { throw new MissingTypeControl // For build manager and presentation compiler purposes } /* The two symbols have the same fully qualified name */ + @tailrec def corresponds(sym1: Symbol, sym2: Symbol): Boolean = sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner)) if (!corresponds(sym.owner, rebind0.owner)) { @@ -1292,13 +1282,30 @@ private[internal] trait TypeMaps { val parents1 = parents mapConserve (this) if (parents1 eq parents) tp else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos) - case SuperType(_, _) => mapOver(tp) - case TypeBounds(_, _) => mapOver(tp) - case TypeVar(_, _) => mapOver(tp) - case AnnotatedType(_, _) => mapOver(tp) - case ExistentialType(_, _) => mapOver(tp) + case SuperType(_, _) => tp.mapOver(this) + case TypeBounds(_, _) => tp.mapOver(this) + case TypeVar(_, _) => tp.mapOver(this) + case AnnotatedType(_, _) => tp.mapOver(this) + case ExistentialType(_, _) => tp.mapOver(this) case _ => tp } } + object UnrelatableCollector extends CollectTypeCollector[TypeSkolem](PartialFunction.empty) { + var barLevel: Int = 0 + + override def apply(tp: Type): Unit = tp match { + case TypeRef(_, ts: TypeSkolem, _) if ts.level > barLevel => buffer += ts + case _ => tp.foldOver(this) + } + } + + object IsRelatableCollector extends TypeCollector[Boolean](initial = true) { + var barLevel: Int = 0 + + def apply(tp: Type): Unit = if (result) tp match { + case TypeRef(_, ts: TypeSkolem, _) if ts.level > barLevel => result = false + case _ => tp.foldOver(this) + } + } } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index f4acdb99150b..4c4a7a3f34c7 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -24,11 +24,11 @@ private[internal] trait TypeToStrings { */ final val maxToStringRecursions = 50 - private var _toStringRecursions = 0 + private[this] var _toStringRecursions = 0 def toStringRecursions = _toStringRecursions def toStringRecursions_=(value: Int) = _toStringRecursions = value - private val _toStringSubjects = HashSet[Type]() + private[this] val _toStringSubjects = HashSet[Type]() def toStringSubjects = _toStringSubjects protected def typeToString(tpe: Type): String = diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 5a77d1be1d53..ae599366c1b1 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,14 @@ package reflect package internal package transform +import scala.annotation.tailrec + trait Erasure { + // FIXME: With `global` as a `val`, implementers must use early initializers, which + // are deprecated and will not be supported in 3.0. Please change the design, + // remove the early initializers from implementers, and then remove the + // `@nowarn` annotations from implementers. val global: SymbolTable import global._ import definitions._ @@ -27,6 +33,7 @@ trait Erasure { /** Is `tp` an unbounded generic type (i.e. which could be instantiated * with primitive as well as class types)?. */ + @tailrec private def genericCore(tp: Type): Type = tp.dealiasWiden match { /* A Java Array is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is * erased to Object. However, there is only symbol for the Array class. So to make the distinction between @@ -68,9 +75,9 @@ trait Erasure { /** Arrays despite their finality may turn up as refined type parents, * e.g. with "tagged types" like Array[Int] with T. */ - protected def unboundedGenericArrayLevel(tp: Type): Int = tp match { - case GenericArray(level, core) if !(core <:< AnyRefTpe) => level - case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")((ps map unboundedGenericArrayLevel).max) + def unboundedGenericArrayLevel(tp: Type): Int = tp match { + case GenericArray(level, core) if !(core <:< AnyRefTpe || core.upperBound == ObjectTpeJava) => level + case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")(unboundedGenericArrayLevel(intersectionDominator(ps))) case _ => 0 } @@ -90,13 +97,14 @@ trait Erasure { * This method needs to be called at a phase no later than erasurephase */ def erasedValueClassArg(tref: TypeRef): Type = { - assert(!phase.erasedTypes) + assert(!phase.erasedTypes, "Types are erased") val clazz = tref.sym if (valueClassIsParametric(clazz)) { - val underlying = tref.memberType(clazz.derivedValueClassUnbox).resultType - boxingErasure(underlying) + val erasureMap = if (clazz.isScala3Defined) boxing3Erasure else boxingErasure + erasureMap(tref.memberType(clazz.derivedValueClassUnbox).resultType) } else { - scalaErasure(underlyingOfValueClass(clazz)) + val erasureMap = if (clazz.isScala3Defined) scala3Erasure else scalaErasure + erasureMap(underlyingOfValueClass(clazz)) } } @@ -105,13 +113,13 @@ trait Erasure { * This method needs to be called at a phase no later than erasurephase */ def valueClassIsParametric(clazz: Symbol): Boolean = { - assert(!phase.erasedTypes) - clazz.typeParams contains - clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol + assert(!phase.erasedTypes, "valueClassIsParametric called after erasure") + clazz.typeParams contains clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol } abstract class ErasureMap extends TypeMap { def mergeParents(parents: List[Type]): Type + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type def eraseNormalClassRef(tref: TypeRef): Type = { val TypeRef(pre, clazz, args) = tref @@ -124,25 +132,27 @@ trait Erasure { protected def eraseDerivedValueClassRef(tref: TypeRef): Type = erasedValueClassArg(tref) def apply(tp: Type): Type = tp match { - case ConstantType(ct) => + case FoldableConstantType(ct) => // erase classOf[List[_]] to classOf[List]. special case for classOf[Unit], avoid erasing to classOf[BoxedUnit]. - if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != UnitClass) ConstantType(Constant(apply(ct.typeValue))) - else tp + if (ct.tag == ClazzTag) + if (ct.typeValue.typeSymbol == UnitClass) tp + else ConstantType(Constant(apply(ct.typeValue))) + else ct.tpe case st: ThisType if st.sym.isPackageClass => tp case st: SubType => apply(st.supertype) case tref @ TypeRef(pre, sym, args) => - if (sym eq ArrayClass) - if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe - else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) - else typeRef(apply(pre), sym, args map applyInArray) + def isDottyEnumSingleton(sym: Symbol): Boolean = + sym.isScala3Defined && sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton.type] + if (sym eq ArrayClass) eraseArray(tp, pre, args) else if ((sym eq AnyClass) || (sym eq AnyValClass) || (sym eq SingletonClass)) ObjectTpe else if (sym eq UnitClass) BoxedUnitTpe else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) + else if (isDottyEnumSingleton(sym)) apply(mergeParents(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. else if (sym.isClass) eraseNormalClassRef(tref) - else apply(sym.info asSeenFrom (pre, sym.owner)) // alias type or abstract type + else apply(transparentDealias(sym, pre, sym.owner)) // alias type or abstract type (including opaque type) case PolyType(tparams, restpe) => apply(restpe) case ExistentialType(tparams, restpe) => @@ -153,7 +163,7 @@ trait Erasure { if (restpe.typeSymbol == UnitClass) UnitTpe // this replaces each typeref that refers to an argument // by the type `p.tpe` of the actual argument p (p in params) - else apply(mt.resultType(mt.paramTypes))) + else apply(mt.resultTypeOwnParamTypes)) case RefinedType(parents, decls) => apply(mergeParents(parents)) case AnnotatedType(_, atp) => @@ -174,12 +184,12 @@ trait Erasure { if (newParents eq parents) tp else ClassInfoType(newParents, decls, clazz) - // can happen while this map is being used before erasure (e.g. when reasoning about sam types) + // A BoundedWildcardType, e.g., can happen while this map is being used before erasure (e.g. when reasoning about sam types) // the regular mapOver will cause a class cast exception because TypeBounds don't erase to TypeBounds - case _: BoundedWildcardType => tp // skip + case pt: ProtoType => pt // skip case _ => - mapOver(tp) + tp.mapOver(this) } /* scala/bug#10551, scala/bug#10646: @@ -234,12 +244,18 @@ trait Erasure { * parents |Ps|, but with duplicate references of Object removed. * - for all other types, the type itself (with any sub-components erased) */ - def erasure(sym: Symbol): ErasureMap = - if (sym == NoSymbol || !sym.enclClass.isJavaDefined) scalaErasure - else if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure - else javaErasure + def erasure(sym: Symbol): ErasureMap = { + if (sym == NoSymbol) return scalaErasure + val enclosing = sym.enclClass + if (enclosing.isJavaDefined) { + if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure + else javaErasure + } + else if (enclosing.isScala3Defined) scala3Erasure + else scalaErasure + } - /** This is used as the Scala erasure during the erasure phase itself + /** This is used as the Scala erasure during the erasure phase itself. * It differs from normal erasure in that value classes are erased to ErasedValueTypes which * are then later converted to the underlying parameter type in phase posterasure. */ @@ -249,9 +265,9 @@ trait Erasure { else if (sym.isClassConstructor) specialConstructorErasure(sym.owner, tp) else - specialScalaErasure(tp) + specialScalaErasureFor(sym)(tp) - def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = { + def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = tpe match { case PolyType(tparams, restpe) => specialConstructorErasure(clazz, restpe) @@ -259,16 +275,14 @@ trait Erasure { specialConstructorErasure(clazz, restpe) case mt @ MethodType(params, restpe) => MethodType( - cloneSymbolsAndModify(params, specialScalaErasure), + cloneSymbolsAndModify(params, specialScalaErasureFor(clazz)), specialConstructorErasure(clazz, restpe)) case TypeRef(pre, `clazz`, args) => typeRef(pre, clazz, List()) case tp => - if (!(clazz == ArrayClass || tp.isError)) - assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz") - specialScalaErasure(tp) + assert(clazz == ArrayClass || tp.isError, s"unexpected constructor erasure $tp for $clazz") + specialScalaErasureFor(clazz)(tp) } - } /** Scala's more precise erasure than java's is problematic as follows: * @@ -282,7 +296,8 @@ trait Erasure { * For this reason and others (such as distinguishing constructors from other methods) * erasure is now (Symbol, Type) => Type rather than Type => Type. */ - class ScalaErasureMap extends ErasureMap { + abstract class ScalaErasureMap extends ErasureMap with Scala2JavaArrayErasure { + /** In scala, calculate a useful parent. * An intersection such as `Object with Trait` erases to Trait. */ @@ -290,7 +305,213 @@ trait Erasure { intersectionDominator(parents) } - class JavaErasureMap extends ErasureMap { + trait Scala2JavaArrayErasure { self: ErasureMap => + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = + if (unboundedGenericArrayLevel(arrayRef) == 1) ObjectTpe + else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) + else typeRef(self(pre), ArrayClass, args map applyInArray) + + } + + class Scala3ErasureMap extends ErasureMap { self => + + def mergeParents(parents: List[Type]): Type = { + erasedGlb(parents.map(self(_))) + } + + def mergeParentsInArray(parents: List[Type]): Type = { + erasedGlb(parents.map(super.applyInArray(_))) + } + + override def applyInArray(tp: Type): Type = { + tp match { + case RefinedType(parents, _) => + super.applyInArray(mergeParentsInArray(parents)) + case _ => + super.applyInArray(tp) + } + } + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = { + if (isGenericArrayElement(args.head)) ObjectTpe + else typeRef(self(pre), ArrayClass, args map applyInArray) + } + + /** Scala 3 implementation of erasure for intersection types. + * @param components the erased component types of the intersection. + */ + private def erasedGlb(components: List[Type]): Type = { + + /** A comparison function that induces a total order on erased types, + * where `A <= B` implies that the erasure of `A & B` should be A. + * + * This order respects the following properties: + * - ErasedValueTypes <= non-ErasedValueTypes + * - arrays <= non-arrays + * - primitives <= non-primitives + * - real classes <= traits + * - subtypes <= supertypes + * + * Since this isn't enough to order to unrelated classes, we use + * lexicographic ordering of the class symbol full name as a tie-breaker. + * This ensure that `A <= B && B <= A` iff `A =:= B`. + */ + def compareErasedGlb(tp1: Type, tp2: Type): Int = { + // this check is purely an optimization. + if (tp1 eq tp2) return 0 + + val isEVT1 = tp1.isInstanceOf[ErasedValueType] + val isEVT2 = tp2.isInstanceOf[ErasedValueType] + if (isEVT1 && isEVT2) { + return compareErasedGlb( + tp1.asInstanceOf[ErasedValueType].valueClazz.tpe_*, + tp2.asInstanceOf[ErasedValueType].valueClazz.tpe_*) + } + else if (isEVT1) + return -1 + else if (isEVT2) + return 1 + + val sym1 = tp1.baseClasses.head + val sym2 = tp2.baseClasses.head + + def compareClasses: Int = { + if (sym1.isSubClass(sym2)) + -1 + else if (sym2.isSubClass(sym1)) + 1 + else + sym1.fullName.compareTo(sym2.fullName) + } + + val isArray1 = tp1.typeArgs.nonEmpty && sym1.isSubClass(definitions.ArrayClass) + val isArray2 = tp2.typeArgs.nonEmpty && sym2.isSubClass(definitions.ArrayClass) + if (isArray1 && isArray2) + return compareErasedGlb(tp1.typeArgs.head, tp2.typeArgs.head) + else if (isArray1) + return -1 + else if (isArray2) + return 1 + + val isPrimitive1 = sym1.isPrimitiveValueClass + val isPrimitive2 = sym2.isPrimitiveValueClass + if (isPrimitive1 && isPrimitive2) + return compareClasses + else if (isPrimitive1) + return -1 + else if (isPrimitive2) + return 1 + + val isRealClass1 = sym1.isClass && !sym1.isTrait + val isRealClass2 = sym2.isClass && !sym2.isTrait + if (isRealClass1 && isRealClass2) + return compareClasses + else if (isRealClass1) + return -1 + else if (isRealClass2) + return 1 + + compareClasses + } + + components.min((t, u) => compareErasedGlb(t, u)) + } + + /** Dotty implementation of Array Erasure: + * + * Is `Array[tp]` a generic Array that needs to be erased to `Object`? + * This is true if among the subtypes of `Array[tp]` there is either: + * - both a reference array type and a primitive array type + * (e.g. `Array[_ <: Int | String]`, `Array[_ <: Any]`) + * - or two different primitive array types (e.g. `Array[_ <: Int | Double]`) + * In both cases the erased lub of those array types on the JVM is `Object`. + */ + private def isGenericArrayElement(tp: Type): Boolean = { + + object DottyTypeProxy { + + def unapply(tp: Type): Option[Type] = { + val superTpe = translucentSuperType(tp) + if (superTpe ne NoType) Some(superTpe) else None + } + + def translucentSuperType(tp: Type): Type = tp match { + case tp: TypeRef => transparentDealias(tp.sym, tp.pre, tp.sym.owner) + case tp: SingleType => tp.underlying + case tp: ThisType => tp.sym.typeOfThis + case tp: ConstantType => tp.value.tpe + case tp: RefinedType if tp.decls.nonEmpty => intersectionType(tp.parents) + case tp: PolyType => tp.resultType + case tp: ExistentialType => tp.underlying + case tp: TypeBounds => tp.hi + case tp: AnnotatedType => tp.underlying + case tp: SuperType => tp.thistpe.baseType(tp.supertpe.typeSymbol) + case tp => NoType + } + + } + + object DottyAndType { + def unapply(tp: RefinedType): Boolean = tp.decls.isEmpty + } + + /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: + * - If we can always store such values in a reference array, return Object + * - If we can always store them in a specific primitive array, return the + * corresponding primitive class + * - Otherwise, return `NoSymbol`. + */ + def arrayUpperBound(tp: Type): Symbol = tp.dealias match { + case TypeRef(_, sym, _) if sym.isClass => + val cls = sym + // Only a few classes have both primitives and references as subclasses. + if ((cls eq AnyClass) || (cls eq AnyValClass) || (cls eq SingletonClass)) + NoSymbol + // We only need to check for primitives because derived value classes in arrays are always boxed. + else if (cls.isPrimitiveValueClass) + cls + else + ObjectClass + case DottyTypeProxy(unwrapped) => + arrayUpperBound(unwrapped) + case tp @ DottyAndType() => + // Find first `p` in `parents` where `arrayUpperBound(p) ne NoSymbol` + def loop(tps: List[Type]): Symbol = tps match { + case p :: tps => + val ub = arrayUpperBound(p) + if (ub ne NoSymbol) ub + else loop(tps) + case nil => NoSymbol + } + loop(tp.parents) + case _ => + NoSymbol + } + + /** Can one of the JVM Array type store all possible values of type `t`? */ + def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol + + def isOpaque(sym: Symbol) = sym.isScala3Defined && !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] + + tp.dealias match { + case tp @ TypeRef(_, sym, _) if !isOpaque(sym) => + !sym.isClass && + !sym.isJavaDefined && // In Java code, Array[T] can never erase to Object + !fitsInJVMArray(tp) + case DottyTypeProxy(unwrapped) => + isGenericArrayElement(unwrapped) + case tp @ DottyAndType() => + tp.parents.forall(isGenericArrayElement) + case _ => + false + } + + } + + } + + class JavaErasureMap extends ErasureMap with Scala2JavaArrayErasure { /** In java, always take the first parent. * An intersection such as `Object with Trait` erases to Object. */ @@ -302,16 +523,28 @@ trait Erasure { } object scalaErasure extends ScalaErasureMap + object scala3Erasure extends Scala3ErasureMap - /** This is used as the Scala erasure during the erasure phase itself - * It differs from normal erasure in that value classes are erased to ErasedValueTypes which - * are then later unwrapped to the underlying parameter type in phase posterasure. - */ - object specialScalaErasure extends ScalaErasureMap { + trait SpecialScalaErasure extends ErasureMap { override def eraseDerivedValueClassRef(tref: TypeRef): Type = ErasedValueType(tref.sym, erasedValueClassArg(tref)) } + /** This is used as the Scala erasure during the erasure phase itself. + * It differs from normal erasure in that value classes are erased to ErasedValueTypes which + * are then later unwrapped to the underlying parameter type in phase posterasure. + */ + object specialScalaErasure extends ScalaErasureMap with SpecialScalaErasure + + /** This is used as the Scala erasure for Scala 3 methods during the erasure phase itself. + * @see specialScalaErasure + */ + object specialScala3Erasure extends Scala3ErasureMap with SpecialScalaErasure + + def specialScalaErasureFor(sym: Symbol): ErasureMap = + if (sym.isScala3Defined) specialScala3Erasure + else specialScalaErasure + object javaErasure extends JavaErasureMap object verifiedJavaErasure extends JavaErasureMap { @@ -324,8 +557,9 @@ trait Erasure { } } - object boxingErasure extends ScalaErasureMap { - private var boxPrimitives = true + trait BoxingErasure extends ErasureMap { + + private[this] var boxPrimitives = true override def applyInArray(tp: Type): Type = { val saved = boxPrimitives @@ -337,10 +571,15 @@ trait Erasure { override def eraseNormalClassRef(tref: TypeRef) = if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe else super.eraseNormalClassRef(tref) + override def eraseDerivedValueClassRef(tref: TypeRef) = super.eraseNormalClassRef(tref) + } + object boxingErasure extends ScalaErasureMap with BoxingErasure + object boxing3Erasure extends Scala3ErasureMap with BoxingErasure + /** The intersection dominator (SLS 3.7) of a list of types is computed as follows. * * - If the list contains one or more occurrences of scala.Array with @@ -376,6 +615,21 @@ trait Erasure { } } + /** For a type alias, get its info as seen from + * the current prefix and owner. + * Sees through opaque type aliases. + */ + def transparentDealias(sym: Symbol, pre: Type, owner: Symbol) = { + @inline def visible(tp: Type) = tp.asSeenFrom(pre, owner) + + if (sym.isScala3Defined && !sym.isClass) + sym.attachments.get[DottyOpaqueTypeAlias] + .map(alias => visible(alias.tpe)) + .getOrElse(visible(sym.info)) + else + visible(sym.info) + } + /** The symbol's erased info. This is the type's erasure, except for the following primitive symbols: * * - $asInstanceOf --> [T]T @@ -402,8 +656,9 @@ trait Erasure { if (sym.isClassConstructor) // TODO: switch on name for all branches -- this one is sym.name == nme.CONSTRUCTOR tp match { case MethodType(params, TypeRef(pre, sym1, args)) => - MethodType(cloneSymbolsAndModify(params, specialErasure(sym)), + MethodType(cloneSymbolsAndModify(params, tp => specialErasure(sym)(tp)), typeRef(specialErasure(sym)(pre), sym1, args)) + case x => throw new MatchError(x) } else if (sym.name == nme.apply) tp diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala index 724c6d17180f..2420acb61b00 100644 --- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala +++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,10 @@ package internal package transform trait PostErasure { + // FIXME: With `global` as a `val`, implementers must use early initializers, which + // are deprecated and will not be supported in 3.0. Please change the design, + // remove the early initializers from implementers, and then remove the + // `@nowarn` annotations from implementers. val global: SymbolTable import global._ @@ -22,7 +26,8 @@ trait PostErasure { def apply(tp: Type) = tp match { case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp))) case ErasedValueType(_, underlying) => underlying - case _ => mapOver(tp) + case null => null + case _ => tp.mapOver(this) } } diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index e611a232fcb1..37874253adb9 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,8 +26,8 @@ trait Transforms { self: SymbolTable => * in the standard library. Or is it already? */ private class Lazy[T](op: => T) { - private var value: T = _ - private var _isDefined = false + private[this] var value: T = _ + private[this] var _isDefined = false def isDefined = _isDefined def force: T = { if (!isDefined) { value = op; _isDefined = true } @@ -35,9 +35,9 @@ trait Transforms { self: SymbolTable => } } - private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry) - private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure) - private val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure) + private[this] val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry) + private[this] val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure) + private[this] val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure) def uncurry = uncurryLazy.force def erasure = erasureLazy.force @@ -48,7 +48,10 @@ trait Transforms { self: SymbolTable => erasure.transformInfo(sym, uncurry.transformInfo(sym, sym.info))) - def transformedType(tpe: Type) = - postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe))) + def transformedType(tpe: Type) = { + val symbol = tpe.widen.typeSymbol + val erasureMap = if (symbol.isScala3Defined) erasure.scala3Erasure else erasure.scalaErasure + postErasure.elimErasedValueType(erasureMap(uncurry.uncurry(tpe))) + } } diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index 6bdbeccb4518..afc1a5e4f37b 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,10 +16,15 @@ package internal package transform import Flags._ +import scala.annotation.tailrec import scala.collection.mutable trait UnCurry { + // FIXME: With `global` as a `val`, implementers must use early initializers, which + // are deprecated and will not be supported in 3.0. Please change the design, + // remove the early initializers from implementers, and then remove the + // `@nowarn` annotations from implementers. val global: SymbolTable import global._ import definitions._ @@ -41,6 +46,7 @@ trait UnCurry { private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp val uncurry: TypeMap = new TypeMap { + @tailrec def apply(tp0: Type): Type = { val tp = expandAlias(tp0) tp match { @@ -62,31 +68,25 @@ trait UnCurry { case DesugaredParameterType(desugaredTpe) => apply(desugaredTpe) case _ => - expandAlias(mapOver(tp)) + expandAlias(tp.mapOver(this)) } } } object DesugaredParameterType { - def isUnboundedGeneric(tp: Type) = tp match { - case t @ TypeRef(_, sym, _) if sym.isAbstractType => - TypeBounds.isEmptyUpper(sym.info.resultType.upperBound) - case _ => false - } - def unapply(tpe: Type): Option[Type] = tpe match { case TypeRef(pre, ByNameParamClass, arg :: Nil) => Some(functionType(List(), arg)) case TypeRef(pre, RepeatedParamClass, arg :: Nil) => Some(seqType(arg)) case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) => - Some(arrayType(if (isUnboundedGeneric(arg)) ObjectTpe else arg)) + Some(arrayType(if (isUnboundedGeneric(arg)) ObjectTpeJava else arg)) case _ => None } } - private val uncurryType = new TypeMap { + private[this] val uncurryType = new TypeMap { def apply(tp0: Type): Type = { val tp = expandAlias(tp0) tp match { @@ -99,20 +99,19 @@ trait UnCurry { // while processing one of its superclasses (such as java.lang.Object). Since we // don't need the more precise `matches` semantics, we only check the symbol, which // is anyway faster and safer - for (decl <- decls if decl.annotations.exists(_.symbol == VarargsClass)) { - if (mexists(decl.paramss)(sym => definitions.isRepeatedParamType(sym.tpe))) { - varargOverloads += varargForwarderSym(clazz, decl, exitingPhase(phase)(decl.info)) - } - } + for (decl <- decls) + if (decl.annotations.exists(_.symbol == VarargsClass) + && mexists(decl.paramss)(sym => definitions.isRepeatedParamType(sym.tpe))) + varargOverloads += varargForwarderSym(clazz, decl) if ((parents1 eq parents) && varargOverloads.isEmpty) tp else { val newDecls = decls.cloneScope - varargOverloads.foreach(newDecls.enter) + varargOverloads.foreach(newDecls.enter(_)) ClassInfoType(parents1, newDecls, clazz) } // @MAT normalize in decls?? case PolyType(_, _) => - mapOver(tp) + tp.mapOver(this) case _ => tp @@ -120,11 +119,9 @@ trait UnCurry { } } - private def varargForwarderSym(currentClass: Symbol, origSym: Symbol, newInfo: Type): Symbol = { + private def varargForwarderSym(currentClass: Symbol, origSym: Symbol): Symbol = { val forwSym = origSym.cloneSymbol(currentClass, VARARGS | SYNTHETIC | origSym.flags & ~DEFERRED, origSym.name.toTermName).withoutAnnotations - // we are using `origSym.info`, which contains the type *before* the transformation - // so we still see repeated parameter types (uncurry replaces them with Seq) def toArrayType(tp: Type, newParam: Symbol): Type = { val arg = elementType(SeqClass, tp) val elem = if (arg.typeSymbol.isTypeParameterOrSkolem && !(arg <:< AnyRefTpe)) { @@ -146,6 +143,8 @@ trait UnCurry { arrayType(elem) } + // we are using `origSym.info`, which contains the type *before* the transformation + // so we still see repeated parameter types (uncurry replaces them with Seq) foreach2(forwSym.paramss, origSym.info.paramss){ (fsps, origPs) => foreach2(fsps, origPs){ (p, sym) => if (definitions.isRepeatedParamType(sym.tpe)) diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 0375bde1639f..9c35289a9805 100644 --- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,14 +14,18 @@ package scala package reflect.internal.util import scala.collection.mutable +import scala.collection.immutable.ArraySeq import scala.reflect.io.AbstractFile -import java.net.{ URL, URLConnection, URLStreamHandler } +import java.net.{URI, URL, URLConnection, URLStreamHandler} import java.security.cert.Certificate -import java.security.{ ProtectionDomain, CodeSource } -import java.util.{ Collections => JCollections, Enumeration => JEnumeration } +import java.security.{CodeSource, ProtectionDomain} +import java.util.{Collections => JCollections, Enumeration => JEnumeration} + +import scala.annotation.nowarn object AbstractFileClassLoader { // should be a method on AbstractFile, but adding in `internal.util._` for now as we're in a minor release + @nowarn("cat=lint-nonlocal-return") private[scala] final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { var file: AbstractFile = base for (dirPart <- pathParts.init) { @@ -35,8 +39,6 @@ object AbstractFileClassLoader { } /** A class loader that loads files from a [[scala.reflect.io.AbstractFile]]. - * - * @author Lex Spoon */ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent) @@ -47,12 +49,13 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) else s"${name.replace('.', '/')}.class" protected def findAbstractFile(name: String): AbstractFile = { - AbstractFileClassLoader.lookupPath(root)(name split '/', directory = false) + AbstractFileClassLoader.lookupPath(root)(ArraySeq.unsafeWrapArray(name split '/'), directory = false) } protected def dirNameToPath(name: String): String = name.replace('.', '/') + @nowarn("cat=lint-nonlocal-return") protected def findAbstractDir(name: String): AbstractFile = { var file: AbstractFile = root val pathParts = dirNameToPath(name) split '/' @@ -73,6 +76,10 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) else defineClass(name, bytes, 0, bytes.length, protectionDomain) } + + // on JDK 20 the URL constructor we're using is deprecated, but the recommended + // replacement, URL.of, doesn't exist on JDK 8 + @annotation.nowarn("cat=deprecation") override protected def findResource(name: String): URL = findAbstractFile(name) match { case null => null case file => new URL(null, s"memory:${file.path}", new URLStreamHandler { @@ -82,6 +89,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) } }) } + override protected def findResources(name: String): JEnumeration[URL] = findResource(name) match { case null => JCollections.enumeration(JCollections.emptyList[URL]) //JCollections.emptyEnumeration[URL] case url => JCollections.enumeration(JCollections.singleton(url)) @@ -95,7 +103,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) val n = s.lastIndexOf('!') if (n < 0) null else { val path = s.substring(0, n) - new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null) + new ProtectionDomain(new CodeSource(new URI(path).toURL, null.asInstanceOf[Array[Certificate]]), null, this, null) } } } @@ -106,8 +114,9 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) throw new UnsupportedOperationException() } + // TODO: `getPackage` is deprecated in JDK 9+ - what should be overridden instead? override def getPackage(name: String): Package = findAbstractDir(name) match { - case null => super.getPackage(name) + case null => super.getPackage(name): @nowarn("cat=deprecation") case file => packages.getOrElseUpdate(name, { val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) ctor.setAccessible(true) diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index f9bb24f00a85..1a816210d8e2 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/util/BooleanContainer.java b/src/reflect/scala/reflect/internal/util/BooleanContainer.java deleted file mode 100644 index dd1d9cfd826e..000000000000 --- a/src/reflect/scala/reflect/internal/util/BooleanContainer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.reflect.internal.util; - -/** - * Represents a container with a boolean value that tells the compiler whether - * an option is enabled or not. This class is used for configuration purposes - * (see scala.reflect.internal.util.Statistics). - */ -class BooleanContainer { - private final boolean value; - - public BooleanContainer(boolean value) { - this.value = value; - } - - public boolean isEnabledNow() { - return value; - } - - protected final static class TrueContainer extends BooleanContainer { - TrueContainer() { - super(true); - } - } - - protected final static class FalseContainer extends BooleanContainer { - FalseContainer() { - super(false); - } - } -} \ No newline at end of file diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala index be0a1bb5018b..b7ea49c901b2 100644 --- a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,7 +13,6 @@ package scala.reflect.internal.util import java.io.Closeable -import java.lang.management.ManagementFactory import java.nio.file.Path import java.util.concurrent.TimeUnit @@ -21,6 +20,7 @@ import scala.collection.mutable object ChromeTrace { + @annotation.unused // spurious private object EventType { final val Start = "B" final val Instant = "I" @@ -41,8 +41,9 @@ object ChromeTrace { final class ChromeTrace(f: Path) extends Closeable { import ChromeTrace.EventType private val traceWriter = FileUtils.newAsyncBufferedWriter(f) - private val context = mutable.ArrayStack[JsonContext](TopContext) + private val context = mutable.Stack[JsonContext](TopContext) private val tidCache = new ThreadLocal[String]() { + @annotation.nowarn("cat=deprecation") override def initialValue(): String = f"${Thread.currentThread().getId}%05d" } objStart() @@ -51,7 +52,17 @@ final class ChromeTrace(f: Path) extends Closeable { arrStart() traceWriter.newLine() - private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + private val pid: String = try { + // Using reflection to avoid a hard-dependency on non-compact1 profile parts of the Java library from scala-reflect + val getRuntimeMXBean = Class.forName("java.lang.management.ManagementFactory").getMethod("getRuntimeMXBean") + val runtimeMXBean = getRuntimeMXBean.invoke(null) + val getName = Class.forName("java.lang.management.RuntimeMXBean").getMethod("getName") + val name = getName.invoke(runtimeMXBean).asInstanceOf[String] + name.replaceAll("@.*", "") + } catch { + case _: Throwable => + "0" + } override def close(): Unit = { arrEnd() @@ -102,7 +113,7 @@ final class ChromeTrace(f: Path) extends Closeable { def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) - private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String): Unit = { objStart() str("cat", cat) str("name", name) @@ -155,7 +166,7 @@ final class ChromeTrace(f: Path) extends Closeable { else traceWriter.write(",") case _ => } - context.push(ObjectContext(true)) + context.push(ObjectContext(first = true)) traceWriter.write("{") } private def objEnd(): Unit = { @@ -164,7 +175,7 @@ final class ChromeTrace(f: Path) extends Closeable { } private def arrStart(): Unit = { traceWriter.write("[") - context.push(ArrayContext(true)) + context.push(ArrayContext(first = true)) } private def arrEnd(): Unit = { traceWriter.write("]") @@ -177,8 +188,8 @@ final class ChromeTrace(f: Path) extends Closeable { case oc @ ObjectContext(first) => if (first) oc.first = false else traceWriter.write(",") - case context => - throw new IllegalStateException("Wrong context: " + context) + case otherContext => + throw new IllegalStateException(s"Wrong context: $otherContext") } traceWriter.write("\"") traceWriter.write(name) diff --git a/src/reflect/scala/reflect/internal/util/CodeAction.scala b/src/reflect/scala/reflect/internal/util/CodeAction.scala new file mode 100644 index 000000000000..70ca144f4d8a --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/CodeAction.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect +package internal +package util + +/** + * EXPERIMENTAL + * + * CodeAction is used to communicate code edit suggestion to tooling in + * a structured manner. + * + * @see `CodeAction` + * + * @groupname Common Commonly used methods + * @group ReflectionAPI + */ +case class CodeAction(title: String, description: Option[String], edits: List[TextEdit]) + +object CodeAction { + def apply(title: String, pos: Position, newText: String, desc: String, check: => Boolean = true): List[CodeAction] = + if (check) List(CodeAction(title, Some(desc), List(TextEdit(pos, newText)))) + else Nil + + private lazy val parens = raw"\(.*\)".r + def maybeWrapInParens(s: String) = if (s.contains(" ") && !parens.matches(s)) s"($s)" else s + def wrapInParens(s: String) = if (!parens.matches(s)) s"($s)" else s +} + +/** + * EXPERIMENTAL + * + * + * @groupname Common Commonly used methods + * @group ReflectionAPI + */ +case class TextEdit(position: Position, newText: String) { + def delta: Int = newText.length - (position.end - position.start) +} diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 90156670b38c..7d3adff49f06 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,10 +13,12 @@ package scala package reflect.internal.util -import scala.collection.{ mutable, immutable } +import scala.reflect.ClassTag +import scala.collection.AbstractIterator +import scala.collection.{immutable, mutable} import scala.annotation.tailrec import mutable.ListBuffer -import java.util.NoSuchElementException +import scala.runtime.Statics.releaseFence /** Profiler driven changes. * TODO - inlining doesn't work from here because of the bug that @@ -53,7 +55,7 @@ trait Collections { * but people are branching out in their collections so here's an overload. */ final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f) - final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f) + final def mforeach[A](xss: Iterable[Iterable[A]])(f: A => Unit) = xss foreach (_ foreach f) /** A version of List#map, specialized for List, and optimized to avoid allocation if `as` is empty */ final def mapList[A, B](as: List[A])(f: A => B): List[B] = if (as eq Nil) Nil else { @@ -62,10 +64,11 @@ trait Collections { var rest = as.tail while (rest ne Nil) { val next = new ::(f(rest.head), Nil) - tail.tl = next + tail.next = next tail = next rest = rest.tail } + releaseFence() head } @@ -83,11 +86,11 @@ trait Collections { } final def collectFirst[A, B](as: List[A])(pf: PartialFunction[A, B]): Option[B] = { - @tailrec - def loop(rest: List[A]): Option[B] = rest match { - case Nil => None - case a :: as if pf.isDefinedAt(a) => Some(pf(a)) - case a :: as => loop(as) + def loop(as: List[A]): Option[B] = as match { + case a :: as => + if (pf.isDefinedAt(a)) Some(pf(a)) + else loop(as) + case _ => None } loop(as) } @@ -136,7 +139,9 @@ trait Collections { } } } - loop(null, xs, xs, ys) + val result = loop(null, xs, xs, ys) + releaseFence() + result } final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = { @@ -156,7 +161,7 @@ trait Collections { ys1 = ys1.tail ys2 = ys2.tail } - if (lb eq null) Nil else lb.result + if (lb eq null) Nil else lb.result() } // compare to foldLeft[A, B](xs) @@ -172,7 +177,7 @@ trait Collections { res } - final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = { + final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Iterable[B]]): List[B] = { val lb = new ListBuffer[B] for (x <- elems ; if pf isDefinedAt x) lb ++= pf(x) @@ -197,7 +202,7 @@ trait Collections { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) } - final def foreachWithIndex[A](xs: List[A])(f: (A, Int) => Unit) { + final def foreachWithIndex[A](xs: List[A])(f: (A, Int) => Unit): Unit = { var index = 0 var ys = xs while (!ys.isEmpty) { @@ -208,8 +213,8 @@ trait Collections { } // @inline - final def findOrElse[A](xs: TraversableOnce[A])(p: A => Boolean)(orElse: => A): A = { - xs find p getOrElse orElse + final def findOrElse[A](xs: IterableOnce[A])(p: A => Boolean)(orElse: => A): A = { + xs.iterator find p getOrElse orElse } final def mapFrom[A, A1 >: A, B](xs: List[A])(f: A => B): Map[A1, B] = { @@ -310,18 +315,18 @@ trait Collections { } final def mapFilter2[A, B, C](itA: Iterator[A], itB: Iterator[B])(f: (A, B) => Option[C]): Iterator[C] = - new Iterator[C] { + new AbstractIterator[C] { private[this] var head: Option[C] = None private[this] def advanceHead(): Unit = while (head.isEmpty && itA.hasNext && itB.hasNext) { - val x = itA.next - val y = itB.next + val x = itA.next() + val y = itB.next() head = f(x, y) } def hasNext: Boolean = { advanceHead() - ! head.isEmpty + !head.isEmpty } def next(): C = { @@ -332,6 +337,28 @@ trait Collections { } } + final def mapToArray[A, B: ClassTag](xs: List[A])(f: A => B): Array[B] = { + val arr = new Array[B](xs.length) + var ix = 0 + var ys = xs + while (ix < arr.length){ + arr(ix) = f(ys.head) + ix += 1 + ys = ys.tail + } + arr + } + + final def mapFromArray[A, B](arr: Array[A])(f: A => B): List[B] = { + var ix = arr.length + var xs: List[B] = Nil + while (ix > 0){ + ix -= 1 + xs = f(arr(ix)) :: xs + } + xs + } + // "Opt" suffix or traverse clashes with the various traversers' traverses final def sequenceOpt[A](as: List[Option[A]]): Option[List[A]] = traverseOpt(as)(identity) final def traverseOpt[A, B](as: List[A])(f: A => Option[B]): Option[List[B]] = @@ -350,6 +377,15 @@ trait Collections { Some(result.toList) } + final def partitionInto[A](xs: List[A], pred: A => Boolean, ayes: ListBuffer[A], nays: ListBuffer[A]): Unit = { + var ys = xs + while (!ys.isEmpty) { + val y = ys.head + if (pred(y)) ayes.addOne(y) else nays.addOne(y) + ys = ys.tail + } + } + final def bitSetByPredicate[A](xs: List[A])(pred: A => Boolean): mutable.BitSet = { val bs = new mutable.BitSet() var ys = xs @@ -363,11 +399,6 @@ trait Collections { bs } - final def sequence[A](as: List[Option[A]]): Option[List[A]] = { - if (as.exists (_.isEmpty)) None - else Some(as.flatten) - } - final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try { Some(ass.transpose) } catch { @@ -389,6 +420,69 @@ trait Collections { @tailrec final def sumSize(xss: List[List[_]], acc: Int): Int = if (xss.isEmpty) acc else sumSize(xss.tail, acc + xss.head.size) + + final def fillList[T](n: Int)(t: T): List[T] = { + var i = 0 + var result: List[T] = Nil + while (i < n) { + result = t :: result + i += 1 + } + result + } + + final def mapToArray[A, B](as: List[A], arr: Array[B], i: Int)(f: A => B): Unit = { + var these = as + var index = i + while (!these.isEmpty) { + arr(index) = f(these.head) + index += 1 + these = these.tail + } + } + + private val TupleOfNil = (Nil, Nil) + final def partitionConserve[A](as: List[A])(p: A => Boolean): (List[A], List[A]) = { + if (as.isEmpty) TupleOfNil + else { + var b0 = true + var canConserve = true + var ys = as + var ayes: ListBuffer[A] = null + var nays: ListBuffer[A] = null + var n = 0 + while (!ys.isEmpty) { + val y = ys.head + val b = p(y) + if (canConserve) { + if (n == 0) b0 = b + else if (b != b0) { + canConserve = false + ayes = new ListBuffer[A] + nays = new ListBuffer[A] + val prefix = if (b0) ayes else nays + var j = 0 + var zs = as + while (j < n) { + prefix += zs.head + zs = zs.tail + j += 1 + } + (if (b) ayes else nays) += y + } + n += 1 + } else { + (if (b) ayes else nays) += y + } + ys = ys.tail + } + if (canConserve) + if (b0) (as, Nil) else (Nil, as) + else + (ayes.toList, nays.toList) + } + } + } object Collections extends Collections diff --git a/src/reflect/scala/reflect/internal/util/FileUtils.scala b/src/reflect/scala/reflect/internal/util/FileUtils.scala index ef5955775648..b2fe3a5b0132 100644 --- a/src/reflect/scala/reflect/internal/util/FileUtils.scala +++ b/src/reflect/scala/reflect/internal/util/FileUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.reflect.internal.util import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} import java.nio.CharBuffer -import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets}, StandardCharsets.UTF_8 import java.nio.file.{Files, OpenOption, Path} import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.atomic.AtomicBoolean @@ -25,7 +25,7 @@ import scala.concurrent.{Await, Promise} import scala.util.{Failure, Success} object FileUtils { - def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + def newAsyncBufferedWriter(path: Path, charset: Charset = UTF_8, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { val encoder: CharsetEncoder = charset.newEncoder val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) @@ -188,7 +188,7 @@ object FileUtils { finally scheduled.set(false) //we are not scheduled any more - //as a last check ensure that we didnt race with an addition to the queue + //as a last check ensure that we didn't race with an addition to the queue //order is essential - queue is checked before CAS if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { global.execute(background) diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala index 248e15b9edf5..79186346b255 100644 --- a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala +++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala index 42c7f9256aed..8176b83a32aa 100644 --- a/src/reflect/scala/reflect/internal/util/HashSet.scala +++ b/src/reflect/scala/reflect/internal/util/HashSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -20,13 +20,13 @@ object HashSet { new HashSet[T](label, initialCapacity) } -class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with scala.collection.generic.Clearable { - private var used = 0 - private var table = new Array[AnyRef](initialCapacity) +class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with scala.collection.mutable.Clearable { + private[this] var used = 0 + private[this] var table = new Array[AnyRef](initialCapacity) private def index(x: Int): Int = math.abs(x % table.length) def size: Int = used - def clear() { + def clear(): Unit = { used = 0 table = new Array[AnyRef](initialCapacity) } @@ -57,7 +57,7 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte entry.asInstanceOf[T] } - def addEntry(x: T) { + def addEntry(x: T): Unit = { var h = index(x.##) var entry = table(h) while (entry ne null) { @@ -69,12 +69,12 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte used += 1 if (used > (table.length >> 2)) growTable() } - def addEntries(xs: TraversableOnce[T]) { - xs foreach addEntry + def addEntries(xs: IterableOnce[T]): Unit = { + xs.iterator foreach addEntry } - def iterator = new Iterator[T] { - private var i = 0 + def iterator: Iterator[T] = new collection.AbstractIterator[T] { + private[this] var i = 0 def hasNext: Boolean = { while (i < table.length && (table(i) eq null)) i += 1 i < table.length @@ -84,7 +84,7 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte else null } - private def addOldEntry(x: T) { + private def addOldEntry(x: T): Unit = { var h = index(x.##) var entry = table(h) while (entry ne null) { @@ -94,7 +94,7 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte table(h) = x } - private def growTable() { + private def growTable(): Unit = { val oldtable = table val growthFactor = if (table.length <= initialCapacity) 8 diff --git a/src/reflect/scala/reflect/internal/util/JavaClearable.scala b/src/reflect/scala/reflect/internal/util/JavaClearable.scala index 2b287ea927bd..4132ad49e926 100644 --- a/src/reflect/scala/reflect/internal/util/JavaClearable.scala +++ b/src/reflect/scala/reflect/internal/util/JavaClearable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,17 +15,17 @@ package scala.reflect.internal.util import java.lang.ref.WeakReference import java.util.{Collection => JCollection, Map => JMap} -import scala.collection.generic.Clearable +import scala.collection.mutable.Clearable object JavaClearable { def forCollection[T <: JCollection[_]](data: T): JavaClearable[T] = new JavaClearableCollection(new WeakReference(data)) def forMap[T <: JMap[_,_]](data: T): JavaClearable[T] = new JavaClearableMap(new WeakReference(data)) private final class JavaClearableMap[T <: JMap[_,_]](dataRef:WeakReference[T]) extends JavaClearable(dataRef) { - override def clear: Unit = Option(dataRef.get) foreach (_.clear()) + override def clear() = Option(dataRef.get).foreach(_.clear()) } private final class JavaClearableCollection[T <: JCollection[_]](dataRef:WeakReference[T]) extends JavaClearable(dataRef) { - override def clear: Unit = Option(dataRef.get) foreach (_.clear()) + override def clear() = Option(dataRef.get).foreach(_.clear()) } } sealed abstract class JavaClearable[T <: AnyRef] protected (protected val dataRef: WeakReference[T]) extends Clearable { @@ -44,7 +44,7 @@ sealed abstract class JavaClearable[T <: AnyRef] protected (protected val dataRe case _ => false } - def clear : Unit + def clear(): Unit def isValid = dataRef.get() ne null } diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala index 5f3e49e30180..ec581d3fd424 100644 --- a/src/reflect/scala/reflect/internal/util/Origins.scala +++ b/src/reflect/scala/reflect/internal/util/Origins.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -50,7 +50,7 @@ abstract class Origins { def newRep(xs: StackSlice): Rep def repString(rep: Rep): String - private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0 + private[this] val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0 private def add(xs: Rep) = origins(xs) += 1 private def total = origins.values.foldLeft(0L)(_ + _) @@ -77,11 +77,13 @@ abstract class Origins { } object Origins { - private val counters = mutable.HashMap[String, Origins]() - private val thisClass = this.getClass.getName + private[this] val counters = mutable.HashMap[String, Origins]() + private[this] val thisClass = this.getClass.getName locally { - sys.addShutdownHook(counters.values foreach (_.purge())) + Runtime.getRuntime.addShutdownHook(new Thread(() => + counters.values foreach (_.purge())) + ) } case class OriginId(className: String, methodName: String) { diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala index 0cd249fb1982..eb7728b2cf10 100644 --- a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,8 +25,8 @@ object OwnerOnlyChmod { private def canPosix(path: Path) = Files.getFileStore(path).supportsFileAttributeView(classOf[PosixFileAttributeView]) - private val posixDir = EnumSet.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE) - private val posixFile = EnumSet.of(OWNER_READ, OWNER_WRITE) + private[this] val posixDir = EnumSet.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE) + private[this] val posixFile = EnumSet.of(OWNER_READ, OWNER_WRITE) /** Remove group/other permissions for `file`, it if exists, and if the runtime environment supports modifying permissions. */ def chmod(path: Path): Unit = { @@ -38,7 +38,7 @@ object OwnerOnlyChmod { val acls = { val builder = AclEntry.newBuilder - builder.setPrincipal(view.getOwner) + builder.setPrincipal(view.getOwner()) builder.setPermissions(AclEntryPermission.values(): _*) builder.setType(AclEntryType.ALLOW) val entry = builder.build diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index af0392598bdf..4387c83d67a2 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -34,12 +34,15 @@ class Position extends macros.EmptyAttachments with api.Position with InternalPo } object Position { - val tabInc = 8 + final val tabInc = 8 private def validate[T <: Position](pos: T): T = { - if (pos.isRange) - assert(pos.start <= pos.end, s"bad position: ${pos.show}") - + if (pos.isRange) { + import pos.{pos => _, _} + assert(start <= end, s"bad position: ${pos.show}") + //assert(start <= point && point <= end, s"bad position: point $point out of range $start..$end: ${pos.show}") + //assert(start <= point && point <= end, s"bad position: point $point out of range $start..$end: ${pos.show}\n${pos.lineContent}\n${pos.lineCaret}") + } pos } @@ -66,8 +69,10 @@ class OffsetPosition(sourceIn: SourceFile, pointIn: Int) extends DefinedPosition override def start = point override def end = point } -class RangePosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends OffsetPosition(sourceIn, pointIn) { +class RangePosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends DefinedPosition { override def isRange = true + override def source = sourceIn + override def point = pointIn override def start = startIn override def end = endIn } @@ -127,14 +132,29 @@ private[util] trait InternalPositionImpl { def isOpaqueRange = isRange && !isTransparent def pointOrElse(alt: Int): Int = if (isDefined) point else alt def makeTransparent: Position = if (isOpaqueRange) Position.transparent(source, start, point, end) else this - - /** Copy a range position with a changed value. - */ - def withStart(start: Int): Position = copyRange(start = start) - def withPoint(point: Int): Position = if (isRange) copyRange(point = point) else Position.offset(source, point) - def withEnd(end: Int): Position = copyRange(end = end) - def withSource(source: SourceFile): Position = copyRange(source = source) - def withShift(shift: Int): Position = Position.range(source, start + shift, point + shift, end + shift) + final def makeTransparentIf(cond: Boolean): Position = + if (cond && isOpaqueRange) Position.transparent(source, start, point, end) else this + + /* Copy a range position with a changed value. */ + /* Note: the result is validated (start <= end), use `copyRange` to update both at the same time. */ + /** If start differs, copy a range position or promote an offset. */ + def withStart(start: Int): Position = if (isDefined && this.start != start) copyRange(start = start) else this + /** If point differs, copy a range position or return an offset. */ + def withPoint(point: Int): Position = + if (!isDefined || this.point == point) this else if (isRange) copyRange(point = point) else asOffset(point) + /** If end differs, copy a range position or promote an offset. */ + def withEnd(end: Int): Position = if (isDefined && this.end != end) copyRange(end = end) else this + def withSource(source: SourceFile): Position = + if (isRange) copyRange(source = source) + else if (isDefined) Position.offset(source, point) + else this + def withShift(shift: Int): Position = + if (isRange) Position.range(source, start + shift, point + shift, end + shift) + else if (isDefined) asOffset(point + shift) + else this + + def copyRange(start: Int = start, point: Int = point, end: Int = end, source: SourceFile = source) = + Position.range(source, start, point, end) /** Convert a range position to a simple offset. */ @@ -142,6 +162,13 @@ private[util] trait InternalPositionImpl { def focus: Position = if (this.isRange) asOffset(point) else this def focusEnd: Position = if (this.isRange) asOffset(end) else this + /** Convert an offset position to a degenerate range. + * + * Note that withPoint does not promote to range, but withStart and withEnd may do so. + * It would be more disciplined to require explicit promotion with toRange. + */ + def toRange: Position = if (this.isRange) this else copyRange() + /** If you have it in for punctuation you might not like these methods. * However I think they're aptly named. * @@ -150,17 +177,27 @@ private[util] trait InternalPositionImpl { * |^ means union, taking the point of the rhs * ^| means union, taking the point of the lhs */ - def |(that: Position, poses: Position*): Position = poses.foldLeft(this | that)(_ | _) + //def |(that: Position, poses: Position*): Position = poses.foldLeft(this | that)(_ | _) def |(that: Position): Position = this union that def ^(point: Int): Position = this withPoint point def |^(that: Position): Position = (this | that) ^ that.point def ^|(that: Position): Position = (this | that) ^ this.point - def union(pos: Position): Position = ( - if (!pos.isRange) this - else if (this.isRange) copyRange(start = start min pos.start, end = end max pos.end) - else pos - ) + /** Widen a range to include the other operand. + * If this position is a range, preserve its point; otherwise, the point of the other operand. + * Note that NoPosition | offset is not promoted to an offset position. + * Nor is offset | offset promoted to range. + */ + def union(pos: Position): Position = { + def ranged(point: Int) = Position.range(source, start = start.min(pos.start), point = point, end = end.max(pos.end)) + if (pos.isRange) { + if (this.isRange) ranged(point) + else if (this.isDefined) ranged(pos.point) + else pos + } + else if (this.isRange && pos.isDefined && !this.includes(pos)) ranged(point) + else this + } def includes(pos: Position): Boolean = isRange && pos.isDefined && start <= pos.start && pos.end <= end def properlyIncludes(pos: Position): Boolean = includes(pos) && (start < pos.start || pos.end < end) @@ -172,12 +209,31 @@ private[util] trait InternalPositionImpl { // necessary condition to establish that there is overlap. def overlaps(pos: Position): Boolean = bothRanges(pos) && start < pos.end && pos.start < end - def line: Int = if (hasSource) source.offsetToLine(point) + 1 else 0 - def column: Int = if (hasSource) calculateColumn() else 0 - def lineContent: String = if (hasSource) source.lineToString(line - 1) else "" - def lineCaret: String = if (hasSource) " " * (column - 1) + "^" else "" + private def line0 = source.offsetToLine(point) + private def lineOffset = source.lineToOffset(line0) + def line: Int = if (hasSource) line0 + 1 else 0 + def column: Int = if (!hasSource) 0 else { + var idx = lineOffset + var col = 0 + while (idx != point) { + col += (if (source.content(idx) == '\t') Position.tabInc - col % Position.tabInc else 1) + idx += 1 + } + col + 1 + } + def lineContent: String = if (hasSource) source.lineToString(line0) else "" + def lineCaret: String = if (!hasSource) "" else { + val buf = new StringBuilder + var idx = lineOffset + while (idx < point) { + buf.append(if (source.content(idx) == '\t') '\t' else ' ') + idx += 1 + } + buf.append('^') + buf.toString + } @deprecated("use `lineCaret`", since="2.11.0") - def lineCarat: String = lineCaret + def lineCarat: String = lineCaret def showError(msg: String): String = { def escaped(s: String) = { @@ -189,16 +245,11 @@ private[util] trait InternalPositionImpl { sb.toString } else s } - def errorAt(p: Pos) = { - def where = p.line - def content = escaped(p.lineContent) - def indicator = p.lineCaret - f"$where: $msg%n$content%n$indicator" - } + import java.lang.System.{lineSeparator => NL} finalPosition match { case FakePos(fmsg) => s"$fmsg $msg" case NoPosition => msg - case pos => errorAt(pos) + case pos => s"${pos.line}: ${msg}${NL}${escaped(pos.lineContent)}${NL}${pos.lineCaret}" } } def showDebug: String = toString @@ -214,18 +265,6 @@ private[util] trait InternalPositionImpl { that.isDefined && this.point == that.point && this.source.file == that.source.file private def asOffset(point: Int): Position = Position.offset(source, point) - private def copyRange(source: SourceFile = source, start: Int = start, point: Int = point, end: Int = end): Position = - Position.range(source, start, point, end) - - private def calculateColumn(): Int = { - var idx = source.lineToOffset(source.offsetToLine(point)) - var col = 0 - while (idx != point) { - col += (if (source.content(idx) == '\t') Position.tabInc - col % Position.tabInc else 1) - idx += 1 - } - col + 1 - } private def hasSource = source ne NoSourceFile private def bothRanges(that: Position) = isRange && that.isRange private def bothDefined(that: Position) = isDefined && that.isDefined diff --git a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala index bf0f46f64206..69625315e59f 100644 --- a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala +++ b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,28 +10,45 @@ * additional information regarding copyright ownership. */ -package scala -package reflect -package internal -package util +package scala.reflect.internal.util -/** A wrapper for a re-entrant, cached instance of a value of type `T`. +import scala.collection.mutable.ArrayBuffer +import scala.util.chaining._ + +/** A wrapper for a list of cached instances of a type `T`. + * The wrapper is recursion-reentrant: several instances are kept, so + * at each depth of reentrance we are reusing the instance for that. + * + * An instance is created eagerly, then more instances + * are allocated as needed on re-entry. Once allocated, + * cached instances are not reclaimed for the life of this ReusableInstance. * * Not thread safe. */ -final class ReusableInstance[T <: AnyRef](make: () => T, enabled: Boolean) { - private val cached = make() - private var taken = false +final class ReusableInstance[T <: AnyRef] private (make: => T, initialSize: Int) { + private[this] val cache = if (initialSize > 0) new ArrayBuffer[T](initialSize).tap(_.addOne(make)) else null + private[this] var taken = 0 @inline def using[R](action: T => R): R = - if (!enabled || taken) action(make()) - else try { - taken = true - action(cached) - } finally taken = false + if (cache == null) + action(make) + else { + if (taken == cache.size) + cache += make + taken += 1 + try action(cache(taken-1)) finally taken -= 1 + } } object ReusableInstance { + private final val InitialSize = 4 + + def apply[T <: AnyRef](make: => T, initialSize: Int): ReusableInstance[T] = new ReusableInstance[T](make, initialSize) + + def apply[T <: AnyRef](make: => T): ReusableInstance[T] = + apply(make, InitialSize) def apply[T <: AnyRef](make: => T, enabled: Boolean): ReusableInstance[T] = - new ReusableInstance[T](make _, enabled) -} \ No newline at end of file + if (enabled) apply(make) else apply(make, -1) + def apply[T <: AnyRef](make: => T, initialSize: Int, enabled: Boolean): ReusableInstance[T] = + if (enabled) apply(make, initialSize) else apply(make, -1) +} diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala index 53f60800d550..0e67fa7d3c52 100644 --- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -43,25 +43,36 @@ final class RichClassLoader(private val self: JClassLoader) extends AnyVal { def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true) private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] = - catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt + catching(classOf[ClassFormatError], classOf[ClassNotFoundException], classOf[SecurityException]) opt Class.forName(path, initialize, self).asInstanceOf[Class[T]] /** Create an instance of a class with this classloader */ def create(path: String): AnyRef = - tryToInitializeClass[AnyRef](path).map(_.newInstance()).orNull + tryToInitializeClass[AnyRef](path).map(_.getConstructor().newInstance()).orNull /** Create an instance with ctor args, or invoke errorFn before throwing. */ - def create[T <: AnyRef : ClassTag](path: String, errorFn: String => Unit)(args: AnyRef*): T = { + def create[T <: AnyRef : ClassTag](path: String, errorFn: String => Unit)(args: Any*): T = { def fail(msg: String) = error(msg, new IllegalArgumentException(msg)) - def error(msg: String, e: Throwable) = { errorFn(msg) ; throw e } + def error(msg: String, e: Throwable) = { errorFn(msg); throw e } try { val clazz = Class.forName(path, /*initialize =*/ true, /*loader =*/ self) - if (classTag[T].runtimeClass isAssignableFrom clazz) { + if (classTag[T].runtimeClass.isAssignableFrom(clazz)) { val ctor = { - val maybes = clazz.getConstructors filter (c => c.getParameterCount == args.size && - (c.getParameterTypes zip args).forall { case (k, a) => k isAssignableFrom a.getClass }) + val bySize = clazz.getConstructors.filter(_.getParameterCount == args.size) + if (bySize.isEmpty) fail(s"No constructor takes ${args.size} parameters.") + def isAssignable(k: Class[?], a: Any): Boolean = + if (k == classOf[Int]) a.isInstanceOf[Integer] + else if (k == classOf[Boolean]) a.isInstanceOf[java.lang.Boolean] + else if (k == classOf[Long]) a.isInstanceOf[java.lang.Long] + else k.isAssignableFrom(a.getClass) + val maybes = bySize.filter(c => c.getParameterTypes.zip(args).forall { case (k, a) => isAssignable(k, a) }) if (maybes.size == 1) maybes.head - else fail(s"Constructor must accept arg list (${args map (_.getClass.getName) mkString ", "}): ${path}") + else if (bySize.size == 1) + fail(s"One constructor takes ${args.size} parameters but ${ + bySize.head.getParameterTypes.zip(args).collect { case (k, a) if !isAssignable(k, a) => s"$k != ${a.getClass}" }.mkString("; ") + }.") + else + fail(s"Constructor must accept arg list (${args.map(_.getClass.getName).mkString(", ")}): ${path}") } (ctor.newInstance(args: _*)).asInstanceOf[T] } else { @@ -90,7 +101,7 @@ final class RichClassLoader(private val self: JClassLoader) extends AnyVal { } /** Run the main method of a class to be loaded by this classloader */ - def run(objectName: String, arguments: Seq[String]) { + def run(objectName: String, arguments: Seq[String]): Unit = { val clsToRun = tryToInitializeClass(objectName) getOrElse ( throw new ClassNotFoundException(objectName) ) @@ -156,15 +167,13 @@ object ScalaClassLoader { } def contextLoader = apply(Thread.currentThread.getContextClassLoader) def appLoader = apply(JClassLoader.getSystemClassLoader) - def setContext(cl: JClassLoader) = Thread.currentThread.setContextClassLoader(cl) class URLClassLoader(urls: Seq[URL], parent: JClassLoader) extends JURLClassLoader(urls.toArray, parent) with ScalaClassLoader with HasClassPath { - - private var classloaderURLs: Seq[URL] = urls + private[this] var classloaderURLs: Seq[URL] = urls def classPathURLs: Seq[URL] = classloaderURLs /** Override to widen to public */ diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala index 4728f7ddc339..2880020cfcd1 100644 --- a/src/reflect/scala/reflect/internal/util/Set.scala +++ b/src/reflect/scala/reflect/internal/util/Set.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index 18fd2b96a789..3daf337db3a5 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,12 +13,13 @@ package scala package reflect.internal.util -import scala.reflect.io.{ AbstractFile, VirtualFile } -import scala.collection.mutable.ArrayBuffer +import scala.reflect.io.{AbstractFile, VirtualFile} +import scala.collection.mutable.ArrayBuilder import scala.annotation.tailrec import java.util.regex.Pattern import java.io.IOException import scala.reflect.internal.Chars._ +import PartialFunction.cond /** abstract base class of a source file used in the compiler */ abstract class SourceFile { @@ -30,7 +31,7 @@ abstract class SourceFile { def length : Int def lineCount: Int def position(offset: Int): Position = { - assert(offset < length, file + ": " + offset + " >= " + length) + assert(offset < length, file.toString + ": " + offset + " >= " + length) Position.offset(this, offset) } @@ -55,7 +56,23 @@ abstract class SourceFile { final def skipWhitespace(offset: Int): Int = if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset - def identifier(pos: Position): Option[String] = None + def identFrom(pos: Position): Option[String] = + Option.when(pos.isDefined && pos.source == this && pos.point != -1) { + def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c) + new String(content drop pos.point takeWhile isOK) + } + + def sourceAt(pos: Position): String = + if (pos.start < pos.end) new String(content.slice(pos.start, pos.end)) else "" + + def indexWhere(p: Char => Boolean, start: Int, step: Int = 1): Int = { + var i = start + while (i >= 0 && i < content.length) { + if (p(content(i))) return i + i += step + } + -1 + } /** An iterator over the lines between `start` and `end`. * @@ -123,6 +140,10 @@ class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], overri else pos withSource underlying withShift start } +/* See PerRunReporting.repSrc */ +class ReplBatchSourceFile(filename: String, content: String, val parserSource: BatchSourceFile) + extends BatchSourceFile(filename, content) + /** a file whose contents do not change over time */ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends SourceFile { def this(_file: AbstractFile) = this(_file, _file.toCharArray) @@ -144,14 +165,6 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So def start = 0 def isSelfContained = true - override def identifier(pos: Position) = - if (pos.isDefined && pos.source == this && pos.point != -1) { - def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c) - Some(new String(content drop pos.point takeWhile isOK)) - } else { - super.identifier(pos) - } - private def charAtIsEOL(idx: Int)(p: Char => Boolean) = { // don't identify the CR in CR LF as a line break, since LF will do. def notCRLF0 = content(idx) != CR || !content.isDefinedAt(idx + 1) || content(idx + 1) != LF @@ -162,31 +175,33 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So def isLineBreak(idx: Int) = charAtIsEOL(idx)(isLineBreakChar) /** True if the index is included by an EOL sequence. */ - def isEndOfLine(idx: Int) = (content isDefinedAt idx) && PartialFunction.cond(content(idx)) { - case CR | LF => true - } + def isEndOfLine(idx: Int) = content.isDefinedAt(idx) && cond(content(idx)) { case CR | LF => true } /** True if the index is end of an EOL sequence. */ - def isAtEndOfLine(idx: Int) = charAtIsEOL(idx) { - case CR | LF => true - case _ => false - } + def isAtEndOfLine(idx: Int) = charAtIsEOL(idx)(c => c == CR || c == LF) private lazy val lineIndices: Array[Int] = { def calculateLineIndices(cs: Array[Char]) = { - val buf = new ArrayBuffer[Int] - buf += 0 - for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1 - buf += cs.length // sentinel, so that findLine below works smoother - buf.toArray + val buf = new ArrayBuilder.ofInt + buf.sizeHint(cs.length / 30) // pick a short avg line length and hope to avoid reallocation and extra copy + buf.addOne(0) + @tailrec def fillLines(i: Int): Unit = + if (i < cs.length) { + if (isAtEndOfLine(i)) buf.addOne(i + 1) + fillLines(i + 1) + } + fillLines(0) + buf.addOne(cs.length) // sentinel, so that findLine below works smoother + buf.result() } calculateLineIndices(content) } - def lineToOffset(index: Int): Int = { - val offset = lineIndices(index) - if (offset < length) offset else throw new IndexOutOfBoundsException(index.toString) - } + def lineToOffset(index: Int): Int = + lineIndices(index) match { + case offset if offset < length => offset + case _ => throw new IndexOutOfBoundsException(index.toString) + } private[this] var lastLine = 0 @@ -195,8 +210,10 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So */ def offsetToLine(offset: Int): Int = { val lines = lineIndices + if (lines.isEmpty || offset < lines.head || offset >= lines.last) throw new IndexOutOfBoundsException(offset.toString) + @tailrec def findLine(lo: Int, hi: Int, mid: Int): Int = ( - if (mid < lo || hi < mid) mid // minimal sanity check - as written this easily went into infinite loopyland + if (mid < lo || hi < mid) mid // minimal confidence check - as written this easily went into infinite loopyland else if (offset < lines(mid)) findLine(lo, mid - 1, (lo + mid - 1) / 2) else if (offset >= lines(mid + 1)) findLine(mid + 1, hi, (mid + 1 + hi) / 2) else mid @@ -214,8 +231,8 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So override def equals(that : Any) = that match { - case that : BatchSourceFile => file.path == that.file.path && start == that.start - case _ => false + case that: BatchSourceFile if !file.isVirtual && !that.file.isVirtual => file.path == that.file.path && start == that.start + case _ => super.equals(that) } - override def hashCode = file.path.## + start.## + override def hashCode = if (!file.isVirtual) file.path.## + start.## else super.hashCode() } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 94cc82d8bc7c..4736c2a7add2 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,6 +18,7 @@ import scala.reflect.internal.SymbolTable import scala.reflect.internal.settings.MutableSettings import java.util.concurrent.atomic.{AtomicInteger, AtomicLong} +import scala.annotation.nowarn import scala.runtime.LongRef abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { @@ -107,10 +108,10 @@ quant) /** Create a new quantity map that shows as `prefix` and is active in given phases. */ - def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue) + def newQuantMap[K, V](prefix: String, phases: String*)(initValue: => V)(implicit ev: V => Ordered[V]): QuantMap[K, V] = new QuantMap(prefix, phases, initValue) /** Same as newQuantMap, where the key type is fixed to be Class[_] */ - def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue) + def newByClass[V](prefix: String, phases: String*)(initValue: => V)(implicit ev: V => Ordered[V]): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue) /** Create a new timer stack */ def newTimerStack() = new TimerStack() @@ -173,17 +174,16 @@ quant) class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity { def start() = (value, underlying.value) - def stop(prev: (Int, Int)) { + def stop(prev: (Int, Int)): Unit = { val (value0, uvalue0) = prev value = value0 + underlying.value - uvalue0 } - override def toString = - value + showPercent(value.toLong, underlying.value.toLong) + override def toString = s"${value}${showPercent(value.toLong, underlying.value.toLong)}" } class Timer(val prefix: String, val phases: Seq[String]) extends Quantity { - private val totalThreads = new AtomicInteger() - private val threadNanos = new ThreadLocal[LongRef] { + private[this] val totalThreads = new AtomicInteger() + private[this] val threadNanos = new ThreadLocal[LongRef] { override def initialValue() = { totalThreads.incrementAndGet() new LongRef(0) @@ -195,7 +195,7 @@ quant) def start(): TimerSnapshot = { (threadNanos.get.elem, System.nanoTime()) } - def stop(prev: TimerSnapshot) { + def stop(prev: TimerSnapshot): Unit = { val (nanos0, start) = prev val newThreadNanos = nanos0 + System.nanoTime() - start val threadNanosCount = threadNanos.get @@ -230,16 +230,21 @@ quant) override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific" } + // FIXME: Redesign `QuantMap` so that it no longer extends `HashMap` (probably + // using composition), then remove the `@nowarn` annotation /** A mutable map quantity where missing elements are automatically inserted * on access by executing `initValue`. */ - class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V) - extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity { + @nowarn("""cat=deprecation&origin=scala\.collection\.mutable\.HashMap""") + class QuantMap[K, V](val prefix: String, val phases: Seq[String], initValue: => V)(implicit ev: V => Ordered[V]) + extends mutable.HashMap[K, V] with Quantity { override def default(key: K) = { val elem = initValue this(key) = elem elem } + //TODO clients may need to do additional synchronization; QuantMap used to extend SynchronizedMap before 2.13 + override def apply(key: K): V = super.apply(key) override def toString = this.toSeq.sortWith(_._2 > _._2).map { case (cls: Class[_], elem) => @@ -256,7 +261,7 @@ quant) * Note: Not threadsafe */ class TimerStack { - private var elems: List[(StackableTimer, Long)] = Nil + private[this] var elems: List[(StackableTimer, Long)] = Nil /** Start given timer and push it onto the stack */ def push(t: StackableTimer): TimerSnapshot = { elems = (t, 0L) :: elems @@ -267,7 +272,7 @@ quant) def pop(prev: TimerSnapshot) = { val (nanos0, start) = prev val duration = System.nanoTime() - start - val (topTimer, nestedNanos) :: rest = elems + val (topTimer, nestedNanos) :: rest = elems: @unchecked topTimer.totalNanos.addAndGet(nanos0 + duration) topTimer.specificNanos += duration - nestedNanos topTimer.timings.incrementAndGet() diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index 76c1644e18bf..937a3471c4da 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index abfceac40574..8b439b563d91 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,9 +19,6 @@ import java.lang.System.{lineSeparator => EOL} /** This object provides utility methods to extract elements * from Strings. - * - * @author Martin Odersky - * @version 1.0 */ trait StringOps { def oempty(xs: String*) = xs filterNot (x => x == null || x == "") @@ -31,7 +28,7 @@ trait StringOps { case w :: Nil => w case _ => def lcp(ss: List[String]): String = { - val w :: ws = ss + val w :: ws = ss: @unchecked if (w == "") "" else if (ws exists (s => s == "" || (s charAt 0) != (w charAt 0))) "" else w.substring(0, 1) + lcp(ss map (_ substring 1)) @@ -68,6 +65,8 @@ trait StringOps { def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] = splitAt(str, str indexWhere f, doDropIndex) + def splitAround(str: String, idx: Int): Option[(String, String)] = splitAt(str, idx, doDropIndex = true) + def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] = if (idx == -1) None else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx))) @@ -78,22 +77,13 @@ trait StringOps { def countElementsAsString(n: Int, element: String): String = n match { case 0 => s"no ${element}s" - case 1 => s"one ${element}" + case 1 => s"1 ${element}" case _ => s"${countAsString(n)} ${element}s" } - /** Turns a count into a friendly English description if n<=4. - * Otherwise, a scary math representation. + /** String conversion. */ - def countAsString(n: Int): String = - n match { - case 0 => "none" - case 1 => "one" - case 2 => "two" - case 3 => "three" - case 4 => "four" - case _ => n.toString - } + def countAsString(n: Int): String = Integer.toString(n) } object StringOps extends StringOps diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala index c07e8c002c57..e81e5260479f 100644 --- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala +++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,11 +19,11 @@ trait StripMarginInterpolator { def stringContext: StringContext /** - * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]] + * A safe combination of [[scala.collection.StringOps#stripMargin]] * and [[scala.StringContext#raw]]. * * The margin of each line is defined by whitespace leading up to a '|' character. - * This margin is stripped '''before''' the arguments are interpolated into to string. + * This margin is stripped '''before''' the arguments are interpolated into the string. * * String escape sequences are '''not''' processed; this interpolator is designed to * be used with triple quoted Strings. @@ -31,23 +31,30 @@ trait StripMarginInterpolator { * {{{ * scala> val foo = "f|o|o" * foo: String = f|o|o - * scala> sm"""|${foo} + * scala> sm"""|${foo}| * |""" * res0: String = - * "f|o|o + * "f|o|o| * " * }}} */ - final def sm(args: Any*): String = { - def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak + final def sm(args: Any*): String = impl('|', args: _*) + + private final def impl(sep: Char, args: Any*): String = { + def isLineBreak(c: Char) = c == Chars.LF || c == Chars.FF // compatible with CharArrayReader def stripTrailingPart(s: String) = { val (pre, post) = s.span(c => !isLineBreak(c)) - pre + post.stripMargin + pre + post.stripMargin(sep) } val stripped: List[String] = stringContext.parts.toList match { - case head :: tail => head.stripMargin :: (tail map stripTrailingPart) + case head :: tail => head.stripMargin(sep) :: tail.map(stripTrailingPart) case Nil => Nil } new StringContext(stripped: _*).raw(args: _*) } + + /** Like the `sm` interpolator, but strips quotation-style delimiter `>` + * and merges the resulting lines into a single line string. + */ + final def sq(args: Any*): String = impl('>', args: _*).linesIterator.mkString } diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index b6e98c6eedee..6661cbbfee25 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala package reflect.internal.util import TableDef._ -import scala.language.postfixOps +import scala.collection.immutable.AbstractSeq /** A class for representing tabular data in a way that preserves * its inner beauty. @@ -28,7 +28,7 @@ class TableDef[T](_cols: Column[T]*) { def ~(next: Column[T]) = retThis(cols :+= next) // Below this point should all be considered private/internal. - private var cols: List[Column[T]] = _cols.toList + private[this] var cols: List[Column[T]] = _cols.toList def defaultSep(index: Int) = if (index > (cols.size - 2)) "" else " " def sepAfter(i: Int): String = defaultSep(i) @@ -39,12 +39,12 @@ class TableDef[T](_cols: Column[T]*) { def colApply(el: T) = colFunctions map (f => f(el)) def retThis(body: => Unit): this.type = { body ; this } - class Table(val rows: Seq[T]) extends Seq[T] { + class Table(val rows: Seq[T]) extends AbstractSeq[T] { def iterator = rows.iterator def apply(index: Int) = rows(index) def length = rows.length - def maxColWidth(col: Column[T]) = col.name +: (rows map col.f) map (_.toString.length) max + def maxColWidth(col: Column[T]) = (col.name +: rows.map(col.f)).map(_.toString.length).max def specs = cols map (_ formatSpec rows) val colWidths = cols map maxColWidth @@ -54,11 +54,11 @@ class TableDef[T](_cols: Column[T]*) { val headers = List( headFormat.format(colNames: _*), - (colWidths, sepWidths).zipped map ((w1, w2) => "-" * w1 + " " * w2) mkString + colWidths.lazyZip(sepWidths).map((w1, w2) => "-" * w1 + " " * w2).mkString ) def mkFormatString(sepf: Int => String): String = - specs.zipWithIndex map { case (c, i) => c + sepf(i) } mkString + specs.zipWithIndex.map { case (c, i) => c + sepf(i) }.mkString def toFormattedSeq = argLists map (xs => rowFormat.format(xs: _*)) def allToSeq = headers ++ toFormattedSeq @@ -73,7 +73,7 @@ class TableDef[T](_cols: Column[T]*) { object TableDef { case class Column[-T](name: String, f: T => Any, left: Boolean) { - def maxWidth(elems: Seq[T]): Int = name +: (elems map f) map (_.toString.length) max + def maxWidth(elems: Seq[T]): Int = (name +: elems.map(f)).map(_.toString.length).max def formatSpec(elems: Seq[T]): String = { val justify = if (left) "-" else "" "%" + justify + maxWidth(elems) + "s" diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala index bbc75dc1e0a4..298721941c7c 100644 --- a/src/reflect/scala/reflect/internal/util/ThreeValues.scala +++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index 1e6236b49b38..c92979d3b1ec 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,30 +17,34 @@ package util import scala.collection.mutable trait TraceSymbolActivity { + // FIXME: With `global` as a `val`, implementers must use early initializers, which + // are deprecated and will not be supported in 3.0. Please change the design, + // remove the early initializers from implementers, and then remove the + // `@nowarn` annotations from implementers. val global: SymbolTable import global._ private[this] var enabled = traceSymbolActivity if (enabled && global.isCompilerUniverse) - scala.sys addShutdownHook showAllSymbols() + Runtime.getRuntime.addShutdownHook(new Thread(() => showAllSymbols())) val allSymbols = mutable.Map[Int, Symbol]() val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil val allTrees = mutable.Set[Tree]() - def recordSymbolsInTree(tree: Tree) { + def recordSymbolsInTree(tree: Tree): Unit = { if (enabled) allTrees += tree } - def recordNewSymbol(sym: Symbol) { + def recordNewSymbol(sym: Symbol): Unit = { if (enabled && sym.id > 1) { allSymbols(sym.id) = sym allChildren(sym.owner.id) ::= sym.id } } - def recordNewSymbolOwner(sym: Symbol, newOwner: Symbol) { + def recordNewSymbolOwner(sym: Symbol, newOwner: Symbol): Unit = { if (enabled) { val sid = sym.id val oid = sym.owner.id @@ -56,14 +60,14 @@ trait TraceSymbolActivity { private def signature(id: Int) = enteringPhase(erasurePhase)(allSymbols(id).defString) private def dashes(s: Any): String = ("" + s) map (_ => '-') - private def show(s1: Any, ss: Any*) { + private def show(s1: Any, ss: Any*): Unit = { println("%-12s".format(s1) +: ss mkString " ") } - private def showHeader(s1: Any, ss: Any*) { + private def showHeader(s1: Any, ss: Any*): Unit = { show(s1, ss: _*) show(dashes(s1), ss map dashes: _*) } - private def showSym(sym: Symbol) { + private def showSym(sym: Symbol): Unit = { def prefix = (" " * (sym.ownerChain.length - 1)) + sym.id try println("%s#%s %s".format(prefix, sym.accurateKindString, sym.name.decode)) catch { @@ -71,7 +75,7 @@ trait TraceSymbolActivity { } allChildren(sym.id).sorted foreach showIdAndRemove } - private def showIdAndRemove(id: Int) { + private def showIdAndRemove(id: Int): Unit = { allSymbols remove id foreach showSym } private def symbolStr(id: Int): String = { @@ -85,22 +89,20 @@ trait TraceSymbolActivity { sym.name.decode + "#" + sym.id } - private def freq[T, U](xs: scala.collection.Traversable[T])(fn: T => U): List[(U, Int)] = { - val ys = xs groupBy fn mapValues (_.size) - ys.toList sortBy (-_._2) - } + private def freq[T, U](xs: collection.Iterable[T])(fn: T => U): List[(U, Int)] = + xs.groupMapReduce(fn)(_ => 1)(_ + _).toList.sortBy(-_._2) - private def showMapFreq[T](xs: scala.collection.Map[T, Traversable[_]])(showFn: T => String) { - xs.mapValues(_.size).toList.sortBy(-_._2) take 100 foreach { case (k, size) => + private def showMapFreq[T](xs: collection.Map[T, Iterable[_]])(showFn: T => String): Unit = { + xs.view.mapValues(_.size).toList.sortBy(-_._2) take 100 foreach { case (k, size) => show(size, showFn(k)) } println("\n") } - private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = { + private def showFreq[T, U](xs: Iterable[T])(groupFn: T => U, showFn: U => String) = { showMapFreq(xs.toList groupBy groupFn)(showFn) } - def showAllSymbols() { + def showAllSymbols(): Unit = { if (!enabled) return enabled = false allSymbols(1) = NoSymbol @@ -133,7 +135,7 @@ trait TraceSymbolActivity { "%4s owners (%s)".format( owners.size, - owners.take(3).map({ case (k, v) => v + "/" + k }).mkString(", ") + ", ..." + owners.take(3).map({ case (k, v) => s"${v}/${k}" }).mkString(", ") + ", ..." ) }) } diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala index 384fabe6a1ca..ff3722cc22d2 100644 --- a/src/reflect/scala/reflect/internal/util/TriState.scala +++ b/src/reflect/scala/reflect/internal/util/TriState.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -27,7 +27,7 @@ final class TriState private (val value: Int) extends AnyVal { def booleanValue = this match { case True => true case False => false - case _ => sys.error("Not a Boolean value") + case _ => throw new IllegalStateException("Not a Boolean value") } } diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index f45c8dcf2a97..c8d6f031dcc8 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -13,8 +13,10 @@ package scala package reflect.internal.util -import java.lang.ref.{WeakReference, ReferenceQueue} +import java.lang.ref.{ReferenceQueue, WeakReference} + import scala.annotation.tailrec +import scala.collection.{AbstractIndexedSeqView, IndexedSeqView} import scala.collection.mutable.{Set => MSet} /** @@ -27,7 +29,7 @@ import scala.collection.mutable.{Set => MSet} * This set implementation is not in general thread safe without external concurrency control. However it behaves * properly when GC concurrently collects elements in this set. */ -final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] { +final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends MSet[A] { import WeakHashSet._ @@ -72,6 +74,8 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D private[this] def computeThreshold: Int = (table.size * loadFactor).ceil.toInt + def get(elem: A): Option[A] = Option(findEntry(elem)) + /** * find the bucket associated with an element's hash code */ @@ -94,7 +98,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * remove a single entry from a linked list in a given bucket */ - private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) { + private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]): Unit = { prevEntry match { case null => table(bucket) = entry.tail case _ => prevEntry.tail = entry.tail @@ -105,7 +109,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * remove entries associated with elements that have been gc'ed */ - private[this] def removeStaleEntries() { + private[this] def removeStaleEntries(): Unit = { def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]] @tailrec @@ -130,7 +134,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * Double the size of the internal table */ - private[this] def resize() { + private[this] def resize(): Unit = { val oldTable = table table = new Array[Entry[A]](oldTable.size * 2) threshold = computeThreshold @@ -155,8 +159,10 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D tableLoop(0) } + def contains(elem: A): Boolean = findEntry(elem) ne null + // from scala.reflect.internal.Set, find an element or null if it isn't contained - override def findEntry(elem: A): A = elem match { + def findEntry(elem: A): A = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") case _ => { removeStaleEntries() @@ -207,7 +213,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D } // add an element to this set unless it's already in there and return this set - override def +(elem: A): this.type = elem match { + override def addOne (elem: A): this.type = elem match { case null => throw new NullPointerException("WeakHashSet cannot hold nulls") case _ => { removeStaleEntries() @@ -215,7 +221,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D val bucket = bucketFor(hash) val oldHead = table(bucket) - def add() { + def add(): Unit = { table(bucket) = new Entry(elem, hash, oldHead, queue) count += 1 if (count > threshold) resize() @@ -233,13 +239,8 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D } } - def +=(elem: A) = this + elem - - // from scala.reflect.internal.Set - override def addEntry(x: A) { this += x } - // remove an element from this set and return this set - override def -(elem: A): this.type = elem match { + override def subtractOne(elem: A): this.type = elem match { case null => this case _ => { removeStaleEntries() @@ -259,7 +260,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D } } - def -=(elem: A) = this - elem + override def -(elem: A) = subtractOne(elem) // empty this set override def clear(): Unit = { @@ -281,18 +282,18 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D count } - override def apply(x: A): Boolean = this contains x - + override def isEmpty: Boolean = size == 0 override def foreach[U](f: A => U): Unit = iterator foreach f - // It has the `()` because iterator runs `removeStaleEntries()` - override def toList(): List[A] = iterator.toList + // It had the `()` because iterator runs `removeStaleEntries()`. + // Instead of just using a different name, keep the name and lose the parens. + override def toList: List[A] = iterator.toList // Iterator over all the elements in this set in no particular order override def iterator: Iterator[A] = { removeStaleEntries() - new Iterator[A] { + new collection.AbstractIterator[A] { /** * the bucket currently being examined. Initially it's set past the last bucket and will be decremented @@ -351,7 +352,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D * the entries must be stable. If any are garbage collected during validation * then an assertion may inappropriately fire. */ - def fullyValidate: Unit = { + def fullyValidate(): Unit = { var computedCount = 0 var bucket = 0 while (bucket < table.size) { @@ -377,7 +378,17 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * Produces a diagnostic dump of the table that underlies this hash set. */ - def dump = table.deep + def dump = { + def deep[T](a: Array[T]): IndexedSeqView[Any] = new AbstractIndexedSeqView[Any] { + def length = a.length + def apply(idx: Int): Any = a(idx) match { + case x: AnyRef if x.getClass.isArray => deep(x.asInstanceOf[Array[_]]) + case x => x + } + override def className = "Array" + } + deep(table) + } /** * Number of buckets that hold collisions. Useful for diagnosing performance issues. @@ -413,5 +424,8 @@ object WeakHashSet { val defaultInitialCapacity = 16 val defaultLoadFactor = .75 - def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor) + def apply[A <: AnyRef](initialCapacity: Int = defaultInitialCapacity, loadFactor: Double = defaultLoadFactor) = + new WeakHashSet[A](initialCapacity, loadFactor) + + def empty[A <: AnyRef]: WeakHashSet[A] = new WeakHashSet[A]() } diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala index 92086cb6c0b1..2c48816cba09 100644 --- a/src/reflect/scala/reflect/internal/util/package.scala +++ b/src/reflect/scala/reflect/internal/util/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 3abeaa1159df..8463a4a076d0 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,17 +14,17 @@ package scala package reflect package io -import java.io.{ IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream } -import java.io.{ File => JFile } +import java.io.{BufferedOutputStream, ByteArrayOutputStream, IOException, InputStream, OutputStream} +import java.io.{File => JFile} import java.net.URL +import java.nio.ByteBuffer + +import scala.collection.AbstractIterable /** * An abstraction over files for use in the reflection/compiler libraries. * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' - * - * @author Philippe Altherr - * @version 1.0, 23/03/2004 */ object AbstractFile { /** Returns "getFile(new File(path))". */ @@ -48,7 +48,7 @@ object AbstractFile { */ def getDirectory(file: File): AbstractFile = if (file.isDirectory) new PlainFile(file) - else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file + else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive.fromFile(file) else null /** @@ -63,7 +63,7 @@ object AbstractFile { else getFile(f) } else null - def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url + def getResources(url: URL): AbstractFile = ZipArchive.fromManifestURL(url) } /** @@ -92,7 +92,7 @@ object AbstractFile { * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -abstract class AbstractFile extends Iterable[AbstractFile] { +abstract class AbstractFile extends AbstractIterable[AbstractFile] { /** Returns the name of this abstract file. */ def name: String @@ -192,13 +192,14 @@ abstract class AbstractFile extends Iterable[AbstractFile] { out.toByteArray() } } + def toByteBuffer: ByteBuffer = ByteBuffer.wrap(toByteArray) /** Returns the context of this file (if applicable) in a byte array. This array might _not_ be defensively copied. */ def unsafeToByteArray: Array[Byte] = toByteArray /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] - + override def isEmpty: Boolean = iterator.isEmpty /** Returns the abstract file in this abstract directory with the specified * name. If there is no such file, returns `null`. The argument * `directory` tells whether to look for a directory or diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala index 24415a3cdacb..d02bdb239424 100644 --- a/src/reflect/scala/reflect/io/Directory.scala +++ b/src/reflect/scala/reflect/io/Directory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -35,9 +35,6 @@ object Directory { } /** An abstraction for directories. - * - * @author Paul Phillips - * @since 2.8 * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index a091b3c45dc5..95f66ffb68d7 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -42,9 +42,6 @@ object File { * precedence if supplied.) If neither is available, the value * of scala.io.Codec.default is used. * - * @author Paul Phillips - * @since 2.8 - * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars { @@ -106,15 +103,9 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w try Some(slurp()) catch { case _: IOException => None } - /** Reflection since we're into the java 6+ API. + /** Ignores SecurityException. */ - def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = { - type JBoolean = java.lang.Boolean - val method = - try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean]) - catch { case _: NoSuchMethodException => return false } - - try method.invoke(jfile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue - catch { case _: Exception => false } - } + def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = + try jfile.setExecutable(executable, ownerOnly) + catch { case _: SecurityException => false } } diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala index 49430c6428cb..72c74be68873 100644 --- a/src/reflect/scala/reflect/io/FileOperationException.scala +++ b/src/reflect/scala/reflect/io/FileOperationException.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala deleted file mode 100644 index e7ba5e04075d..000000000000 --- a/src/reflect/scala/reflect/io/IOStats.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package reflect.io - - -// Due to limitations in the Statistics machinery, these are only -// reported if this patch is applied. -// -// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala -// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala -// @@ -109,7 +109,7 @@ quant) -// * Quantities with non-empty prefix are printed in the statistics info. -// */ -// trait Quantity { -// - if (enabled && prefix.nonEmpty) { -// + if (prefix.nonEmpty) { -// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" -// qs(key) = this -// } -// @@ -243,7 +243,7 @@ quant) -// * -// * to remove all Statistics code from build -// */ -// - final val canEnable = _enabled -// + final val canEnable = true // _enabled -// -// We can commit this change as the first diff reverts a fix for an IDE memory leak. - -// The following has been commented out because IOStats cannot be used in the -// call-sites since they are disconnected from the statistics infrastructure. -//private[io] object IOStats { -// val fileExistsCount = Statistics.newCounter("# File.exists calls") -// val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") -// val fileIsFileCount = Statistics.newCounter("# File.isFile calls") -//} diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala index 3183a1d53e39..0ce3ef39f7dd 100644 --- a/src/reflect/scala/reflect/io/NoAbstractFile.scala +++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index c8558c789413..ec7076ffe02a 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,8 +16,9 @@ package io import scala.language.implicitConversions -import java.io.{ RandomAccessFile, File => JFile } -import java.net.{ URI, URL } +import java.io.{RandomAccessFile, File => JFile} +import java.net.{URI, URL} +import scala.annotation.tailrec import scala.util.Random.alphanumeric /** An abstraction for filesystem paths. The differences between @@ -31,16 +32,19 @@ import scala.util.Random.alphanumeric * Also available are createFile and createDirectory, which attempt * to create the path in question. * - * @author Paul Phillips - * @since 2.8 - * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Path { def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName) - def isExtensionJarOrZip(name: String): Boolean = { - name.endsWith(".jar") || name.endsWith(".zip") - } + def isExtensionJarOrZip(name: String): Boolean = + name.lastIndexOf('.') match { + case i if i >= 0 => + val xt = name.substring(i + 1) + xt.equalsIgnoreCase("jar") || xt.equalsIgnoreCase("zip") + case _ => false + } + + /** Lower case "extension", following the last dot. */ def extension(name: String): String = { val i = name.lastIndexOf('.') if (i < 0) "" @@ -135,6 +139,7 @@ class Path private[io] (val jfile: JFile) { def relativize(other: Path) = { assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other) + @tailrec def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = { (baseSegs, otherSegs) match { case (b :: bs, o :: os) if b == o => createRelativePath(bs, os) @@ -228,13 +233,13 @@ class Path private[io] (val jfile: JFile) { // creations def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = { val res = if (force) jfile.mkdirs() else jfile.mkdir() - if (!res && failIfExists && exists) fail("Directory '%s' already exists." format name) + if (!res && failIfExists && exists) fail(s"Directory '$name' already exists.") else if (isDirectory) toDirectory else new Directory(jfile) } def createFile(failIfExists: Boolean = false): File = { val res = jfile.createNewFile() - if (!res && failIfExists && exists) fail("File '%s' already exists." format name) + if (!res && failIfExists && exists) fail(s"File '$name' already exists.") else if (isFile) toFile else new File(jfile) } diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index ad7d1c8f5953..8b1535c77ebd 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,6 +14,9 @@ package scala package reflect package io +import java.nio.ByteBuffer +import java.nio.file.StandardOpenOption +import java.util /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { @@ -50,6 +53,20 @@ class PlainFile(val givenPath: Path) extends AbstractFile { override def input = givenPath.toFile.inputStream() override def output = givenPath.toFile.outputStream() override def sizeOption = Some(givenPath.length.toInt) + override def toByteBuffer: ByteBuffer = { + val chan = java.nio.file.Files.newByteChannel(file.toPath, util.EnumSet.of(StandardOpenOption.READ)) + try { + import java.nio.ByteBuffer + val buffer: ByteBuffer = ByteBuffer.allocate(chan.size.toInt) + var endOfInput = false + while (!endOfInput ) { + endOfInput = chan.read(buffer) < 0 + buffer.compact() + } + buffer.flip() + buffer + } finally chan.close() + } override def hashCode(): Int = fpath.hashCode() override def equals(that: Any): Boolean = that match { @@ -102,7 +119,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { new PlainFile(givenPath / name) } -private[scala] class PlainNioFile(nioPath: java.nio.file.Path) extends AbstractFile { +final class PlainNioFile(val nioPath: java.nio.file.Path) extends AbstractFile { import java.nio.file._ assert(nioPath ne null) @@ -116,7 +133,29 @@ private[scala] class PlainNioFile(nioPath: java.nio.file.Path) extends AbstractF override lazy val canonicalPath = super.canonicalPath - override def underlyingSource = Some(this) + override def underlyingSource = { + val fileSystem = nioPath.getFileSystem + fileSystem.provider().getScheme match { + case "jar" => + val fileStores = fileSystem.getFileStores.iterator() + if (fileStores.hasNext) { + val jarPath = fileStores.next().name + try { + Some(new PlainNioFile(Paths.get(jarPath.stripSuffix(fileSystem.getSeparator)))) + } catch { + case _: InvalidPathException => + None + } + } else None + case "jrt" => + if (nioPath.getNameCount > 2 && nioPath.startsWith("/modules")) { + // TODO limit this to OpenJDK based JVMs? + val moduleName = nioPath.getName(1) + Some(new PlainNioFile(Paths.get(System.getProperty("java.home"), "jmods", moduleName.toString + ".jmod"))) + } else None + case _ => None + } + } private val fpath = nioPath.toAbsolutePath.toString @@ -148,8 +187,8 @@ private[scala] class PlainNioFile(nioPath: java.nio.file.Path) extends AbstractF /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] = { try { - import scala.collection.JavaConverters._ - val it = Files.newDirectoryStream(nioPath).iterator() + import scala.jdk.CollectionConverters._ + val it = Files.newDirectoryStream(nioPath).iterator it.asScala.map(new PlainNioFile(_)) } catch { case _: NotDirectoryException => Iterator.empty diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala index 080bbee0eeec..9f160fd42b09 100644 --- a/src/reflect/scala/reflect/io/RootPath.scala +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,8 +17,7 @@ import java.nio import java.nio.file.Files import java.nio.file.spi.FileSystemProvider -import scala.collection.JavaConverters.collectionAsScalaIterableConverter - +import scala.jdk.CollectionConverters._ abstract class RootPath extends Closeable { def root: nio.file.Path diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index beda92614dab..8d652aadcb0b 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,16 +15,14 @@ package reflect package io import java.net.URL -import java.io.{ BufferedInputStream, InputStream } -import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable } +import java.io.{BufferedInputStream, InputStream} +import java.io.{BufferedReader, InputStreamReader, Closeable => JCloseable} import scala.io.{ Codec, BufferedSource, Source } import scala.collection.mutable.ArrayBuffer import Path.fail +import scala.annotation.tailrec /** Traits for objects which can be represented as Streams. - * - * @author Paul Phillips - * @since 2.8 * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -63,7 +61,8 @@ object Streamable { lazy val in = bufferedInput() var offset = 0 - def loop() { + @tailrec + def loop(): Unit = { if (offset < len) { val read = in.read(arr, offset, len - offset) if (read >= 0) { diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 7cc558b6647f..39db28815418 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,8 +19,6 @@ import scala.collection.mutable /** * An in-memory directory. * - * @author Lex Spoon - * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory]) @@ -39,21 +37,21 @@ extends AbstractFile { val lastModified: Long = System.currentTimeMillis override def file = null - override def input = sys.error("directories cannot be read") - override def output = sys.error("directories cannot be written") + override def input = throw new IllegalStateException("directories cannot be read") + override def output = throw new IllegalStateException("directories cannot be written") /** Does this abstract file denote an existing file? */ - def create() { unsupported() } + def create(): Unit = { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported() } + def delete(): Unit = { unsupported() } /** Returns an abstract file with the given name. It does not * check that it exists. */ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() - private val files = mutable.Map.empty[String, AbstractFile] + private[this] val files = mutable.Map.empty[String, AbstractFile] // the toList is so that the directory may continue to be // modified while its elements are iterated @@ -76,7 +74,7 @@ extends AbstractFile { dir } - def clear() { + def clear(): Unit = { files.clear() } } diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 01b5f1533fd4..90b3ab7a541b 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,9 +17,6 @@ package io import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream, File => JFile } /** This class implements an in-memory file. - * - * @author Philippe Altherr - * @version 1.0, 23/03/2004 * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -39,7 +36,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF case _ => false } - private var content = Array.emptyByteArray + private[this] var content = Array.emptyByteArray def absolute = this @@ -52,7 +49,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF override def output: OutputStream = { new ByteArrayOutputStream() { - override def close() { + override def close(): Unit = { super.close() content = toByteArray() } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index a1a7884de77f..3964030b0190 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,15 +14,17 @@ package scala package reflect package io +import java.lang.Boolean.{getBoolean => booleanProperty} import java.net.URL import java.io.{ByteArrayInputStream, FilterInputStream, IOException, InputStream} import java.io.{File => JFile} import java.util.concurrent.{ArrayBlockingQueue, TimeUnit} import java.util.zip.{ZipEntry, ZipFile, ZipInputStream} import java.util.jar.Manifest -import scala.collection.mutable -import scala.collection.JavaConverters._ + import scala.annotation.tailrec +import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.internal.JDK9Reflectors import ZipArchive._ @@ -30,14 +32,10 @@ import ZipArchive._ * it is for performance: we come through here a lot on every run. Be careful * about changing it. * - * @author Philippe Altherr (original version) - * @author Paul Phillips (this one) - * @version 2.0, - * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object ZipArchive { - private[io] val closeZipFile = sys.props.get("scala.classpath.closeZip").map(_.toBoolean).getOrElse(false) + private[io] val closeZipFile = booleanProperty("scala.classpath.closeZip") // The maximum number of entries retained in the pool associated with each FileZipArchive. FileZipArchive // instances are shared across compiler threads (unless -YdisableFlatCpCaching), but to actually enable // concurrent access to the data per-thread instance of the underlying j.u.ZipFile must be created. These @@ -80,6 +78,22 @@ object ZipArchive { if (front) path.substring(0, idx + 1) else path.substring(idx + 1) } + @deprecated("Kept for compatibility", "2.13.1") + def pathToDotted(path: String): String = { + if (RootEntry == path) "" + else { + val slashEnd = path.endsWith("/") + val len = path.length - (if (slashEnd) 1 else 0) + val result = new Array[Char](len) + var i = 0 + while (i < len) { + val char = path.charAt(i) + result(i) = if (char == '/') '.' else char + i += 1 + } + new String(result) + } + } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class ZipArchive(override val file: JFile, release: Option[String]) extends AbstractFile with Equals { @@ -119,16 +133,19 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext } } - private def ensureDir(dirs: java.util.Map[String, DirEntry], path: String): DirEntry = { - dirs get path match { - case null => - val parent = ensureDir(dirs, dirName(path)) - val dir = new DirEntry(path) - parent.entries(baseName(path)) = dir - dirs.put(path, dir) - dir - case v => v - } + protected def getDir(dirs: java.util.Map[String, DirEntry], entry: ZipEntry): DirEntry = { + def ensureDir(path: String): DirEntry = + dirs.get(path) match { + case null => + val parent = ensureDir(dirName(path)) + val dir = new DirEntry(path) + parent.entries(baseName(path)) = dir + dirs.put(path, dir) + dir + case dir => dir + } + val name = if (entry.isDirectory) entry.getName else dirNameUsingLast(entry.getName) + ensureDir(name) } @volatile private[this] var lastDirName: String = RootEntry @@ -143,10 +160,6 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext result } } - protected def getDir(dirs: java.util.Map[String, DirEntry], entry: ZipEntry): DirEntry = { - if (entry.isDirectory) ensureDir(dirs, entry.getName) - else ensureDir(dirs, dirNameUsingLast(entry.getName)) - } def close(): Unit } @@ -236,11 +249,11 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch val root = new DirEntry(RootEntry) dirs.put(RootEntry, root) val zipFile = openZipFile() - val enum = zipFile.entries() + val entries = zipFile.entries() try { - while (enum.hasMoreElements) { - val zipEntry = enum.nextElement + while (entries.hasMoreElements) { + val zipEntry = entries.nextElement if (!zipEntry.getName.startsWith("META-INF/versions/")) { if (!zipEntry.isDirectory) { val dir = getDir(dirs, zipEntry) @@ -263,8 +276,11 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } } } finally { - if (!ZipArchive.closeZipFile) + if (ZipArchive.closeZipFile) { + zipFile.close() + } else { zipFilePool.release(zipFile) + } } root } @@ -298,7 +314,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) closeables ::= in - @tailrec def loop() { + @tailrec def loop(): Unit = { val zipEntry = in.getNextEntry() class EmptyFileEntry() extends Entry(zipEntry.getName) { override def toByteArray: Array[Byte] = null @@ -310,7 +326,8 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len) var offset = 0 - def loop() { + @tailrec + def loop(): Unit = { if (offset < len) { val read = in.read(arr, offset, len - offset) if (read >= 0) { @@ -371,15 +388,14 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { dirs.put(RootEntry, root) val manifest = new Manifest(input) closeables ::= input - - val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) + val iter = manifest.getEntries().keySet().iterator.asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) for (zipEntry <- iter) { val dir = getDir(dirs, zipEntry) if (!zipEntry.isDirectory) { class FileEntry() extends Entry(zipEntry.getName) { override def lastModified = zipEntry.getTime() - override def input = resourceInputStream(path) + override def input = resourceInputStream(this.path) override def sizeOption = None } val f = new FileEntry() diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala index b03a7067e1ca..e39a1987fb3b 100644 --- a/src/reflect/scala/reflect/macros/Aliases.scala +++ b/src/reflect/scala/reflect/macros/Aliases.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 6fb8dfb33710..a85ac8f948f6 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package reflect package macros import reflect.internal.util.Position +import scala.runtime.ClassValueCompat /** * EXPERIMENTAL @@ -54,8 +55,15 @@ abstract class Attachments { self => } /** An underlying payload of the given class type `T`. */ - def get[T: ClassTag]: Option[T] = - (all find matchesTag[T]).asInstanceOf[Option[T]] + def get[T: ClassTag]: Option[T] = { + val it = all.iterator + val matchesTagFn = matchesTag[T] + while (it.hasNext) { // OPT: hotspot, hand roll `Set.find`. + val datum = it.next() + if (matchesTagFn(datum)) return Some(datum.asInstanceOf[T]) + } + None + } /** Check underlying payload contains an instance of type `T`. */ def contains[T: ClassTag]: Boolean = @@ -98,10 +106,11 @@ abstract class Attachments { self => } def isEmpty: Boolean + def cloneAttachments: Attachments { type Pos = self.Pos } = this } private object Attachments { - private val matchesTagCache = new ClassValue[Function1[Any, Boolean]] { + private val matchesTagCache = new ClassValueCompat[Function1[Any, Boolean]] { override def computeValue(cls: Class[_]): Function[Any, Boolean] = cls.isInstance(_) } } @@ -120,14 +129,18 @@ private final class SingleAttachment[P >: Null](override val pos: P, val att: An type Pos = P def withPos(newPos: Pos) = new SingleAttachment[Pos](newPos, att) override def isEmpty: Boolean = false + override def cloneAttachments: Attachments { type Pos = P } = new SingleAttachment[P](pos, att) override def all = Set.empty[Any] + att override def contains[T](implicit tt: ClassTag[T]) = tt.runtimeClass.isInstance(att) override def get[T](implicit tt: ClassTag[T]) = if (contains(tt)) Some(att.asInstanceOf[T]) else None - override def update[T](newAtt: T)(implicit tt: ClassTag[T]) = + override def update[T](newAtt: T)(implicit tt: ClassTag[T]) = { + //assert(tt ne classTag[Any]) if (contains(tt)) new SingleAttachment[P](pos, newAtt) else new NonemptyAttachments[P](pos, Set.empty[Any] + att + newAtt) + } override def remove[T](implicit tt: ClassTag[T]) = if (contains(tt)) pos.asInstanceOf[Attachments { type Pos = P }] else this + override def toString = s"SingleAttachment at $pos: $att" } // scala/bug#7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the @@ -136,4 +149,5 @@ private final class NonemptyAttachments[P >: Null](override val pos: P, override type Pos = P def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all) override def isEmpty: Boolean = false + override def cloneAttachments: Attachments { type Pos = P } = new NonemptyAttachments[P](pos, all) } diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 362600b665d5..915b230267a3 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,7 +30,7 @@ package macros * This is somewhat aligned with the overall evolution of macros during the 2.11 development cycle, where we played with * `c.introduceTopLevel` and `c.introduceMember`, but at the end of the day decided to reject them. * - * If you're relying on the now deprecated APIs, consider using the new [[c.internal.enclosingOwner]] method that can be used to obtain + * If you're relying on the now deprecated APIs, consider using the new [[Internals.ContextInternalApi.enclosingOwner]] method that can be used to obtain * the names of enclosing definitions. Alternatively try reformulating your macros in terms of completely local expansion * and/or joining a discussion of a somewhat related potential language feature at [[https://groups.google.com/forum/#!topic/scala-debate/f4CLmYShX6Q]]. * We also welcome questions and suggestions on our mailing lists, where we would be happy to further discuss this matter. diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala index 311b10244c70..21337a6d3cfc 100644 --- a/src/reflect/scala/reflect/macros/Evals.scala +++ b/src/reflect/scala/reflect/macros/Evals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala index 6cd146627867..9827c7999246 100644 --- a/src/reflect/scala/reflect/macros/ExprUtils.scala +++ b/src/reflect/scala/reflect/macros/ExprUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala index ab59cf1dca43..71a6c5c1cd74 100644 --- a/src/reflect/scala/reflect/macros/FrontEnds.scala +++ b/src/reflect/scala/reflect/macros/FrontEnds.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala index d61e26040a04..b56d868460ed 100644 --- a/src/reflect/scala/reflect/macros/Infrastructure.scala +++ b/src/reflect/scala/reflect/macros/Infrastructure.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Internals.scala b/src/reflect/scala/reflect/macros/Internals.scala index fae9d3b5ddb9..dc9e493da4b6 100644 --- a/src/reflect/scala/reflect/macros/Internals.scala +++ b/src/reflect/scala/reflect/macros/Internals.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala index 79a3d90c44a3..ed9c35b9b25b 100644 --- a/src/reflect/scala/reflect/macros/Names.scala +++ b/src/reflect/scala/reflect/macros/Names.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala index 5a5a10e4e2fd..bffa9529431e 100644 --- a/src/reflect/scala/reflect/macros/Parsers.scala +++ b/src/reflect/scala/reflect/macros/Parsers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala index c2cf2e3bdcd2..aaae39fad258 100644 --- a/src/reflect/scala/reflect/macros/Reifiers.scala +++ b/src/reflect/scala/reflect/macros/Reifiers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 4e22608f597d..44ba1ac89708 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -91,7 +91,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException @@ -103,7 +103,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException @@ -120,7 +120,7 @@ trait Typers { * are observationally different from typed trees (also known as typer trees, typechecked trees or attributed trees), * * Usually, if some compiler API takes a tree, then both untyped and typed trees will do. However in some cases, - * only untyped or only typed trees are appropriate. For example, [[eval]] only accepts untyped trees and one can only splice + * only untyped or only typed trees are appropriate. For example, [[Evals.eval]] only accepts untyped trees and one can only splice * typed trees inside typed trees. Therefore in the current reflection API, there is a need in functions * that go back and forth between untyped and typed trees. For this we have [[typecheck]] and `untypecheck`. * @@ -128,7 +128,7 @@ trait Typers { * which makes it sometimes corrupt trees so that they don't make sense anymore. Unfortunately, there's no workaround for that. * We plan to fix this issue soon, but for now please keep it in mind. * - * @see [[http://stackoverflow.com/questions/20936509/scala-macros-what-is-the-difference-between-typed-aka-typechecked-an-untyped]] + * @see [[https://stackoverflow.com/questions/20936509/scala-macros-what-is-the-difference-between-typed-aka-typechecked-an-untyped]] */ def untypecheck(tree: Tree): Tree } diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 0757f3e8de94..909c2c984160 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package reflect package macros import scala.language.implicitConversions -import scala.language.higherKinds /** * EXPERIMENTAL @@ -32,10 +31,10 @@ import scala.language.higherKinds */ abstract class Universe extends scala.reflect.api.Universe { - /** @inheritdoc */ + /** @see [[MacroInternalApi]] */ override type Internal <: MacroInternalApi - /** @inheritdoc */ + /** @see [[InternalApi]] */ trait MacroInternalApi extends InternalApi { internal => /** Adds a given symbol to the given scope. @@ -167,10 +166,10 @@ abstract class Universe extends scala.reflect.api.Universe { */ def subpatterns(tree: Tree): Option[List[Tree]] - /** @inheritdoc */ + /** @see MacroDecoratorApi */ override type Decorators <: MacroDecoratorApi - /** @inheritdoc */ + /** @see DecoratorApi */ trait MacroDecoratorApi extends DecoratorApi { /** Extension methods for scopes */ type ScopeDecorator[T <: Scope] <: MacroScopeDecoratorApi[T] @@ -180,10 +179,10 @@ abstract class Universe extends scala.reflect.api.Universe { /** @see [[ScopeDecorator]] */ class MacroScopeDecoratorApi[T <: Scope](val scope: T) { - /** @see [[internal.enter]] */ + /** @see [[MacroInternalApi.enter]] */ def enter(sym: Symbol): T = internal.enter(scope, sym) - /** @see [[internal.unlink]] */ + /** @see [[MacroInternalApi.unlink]] */ def unlink(sym: Symbol): T = internal.unlink(scope, sym) } @@ -192,28 +191,28 @@ abstract class Universe extends scala.reflect.api.Universe { /** @see [[TreeDecorator]] */ class MacroTreeDecoratorApi[T <: Tree](override val tree: T) extends TreeDecoratorApi[T](tree) { - /** @see [[internal.changeOwner]] */ + /** @see [[MacroInternalApi.changeOwner]] */ def changeOwner(prev: Symbol, next: Symbol): tree.type = internal.changeOwner(tree, prev, next) - /** @see [[internal.attachments]] */ + /** @see [[MacroInternalApi.attachments]] */ def attachments: Attachments { type Pos = Position } = internal.attachments(tree) - /** @see [[internal.updateAttachment]] */ + /** @see [[MacroInternalApi.updateAttachment]] */ def updateAttachment[A: ClassTag](attachment: A): tree.type = internal.updateAttachment(tree, attachment) - /** @see [[internal.removeAttachment]] */ + /** @see [[MacroInternalApi.removeAttachment]] */ def removeAttachment[A: ClassTag]: T = internal.removeAttachment[A](tree) - /** @see [[internal.setPos]] */ + /** @see [[MacroInternalApi.setPos]] */ def setPos(newpos: Position): T = internal.setPos(tree, newpos) - /** @see [[internal.setType]] */ + /** @see [[MacroInternalApi.setType]] */ def setType(tp: Type): T = internal.setType(tree, tp) - /** @see [[internal.defineType]] */ + /** @see [[MacroInternalApi.defineType]] */ def defineType(tp: Type): T = internal.defineType(tree, tp) - /** @see [[internal.setSymbol]] */ + /** @see [[MacroInternalApi.setSymbol]] */ def setSymbol(sym: Symbol): T = internal.setSymbol(tree, sym) } @@ -225,7 +224,7 @@ abstract class Universe extends scala.reflect.api.Universe { /** @see [[TypeTreeDecorator]] */ class MacroTypeTreeDecoratorApi[T <: TypeTree](val tt: T) { - /** @see [[internal.setOriginal]] */ + /** @see [[MacroInternalApi.setOriginal]] */ def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree) } @@ -234,34 +233,34 @@ abstract class Universe extends scala.reflect.api.Universe { /** @see [[TreeDecorator]] */ class MacroSymbolDecoratorApi[T <: Symbol](override val symbol: T) extends SymbolDecoratorApi[T](symbol) { - /** @see [[internal.attachments]] */ + /** @see [[MacroInternalApi.attachments]] */ def attachments: Attachments { type Pos = Position } = internal.attachments(symbol) - /** @see [[internal.updateAttachment]] */ + /** @see [[MacroInternalApi.updateAttachment]] */ def updateAttachment[A: ClassTag](attachment: A): T = internal.updateAttachment(symbol, attachment) - /** @see [[internal.removeAttachment]] */ + /** @see [[MacroInternalApi.removeAttachment]] */ def removeAttachment[A: ClassTag]: T = internal.removeAttachment[A](symbol) - /** @see [[internal.setOwner]] */ + /** @see [[MacroInternalApi.setOwner]] */ def setOwner(newowner: Symbol): T = internal.setOwner(symbol, newowner) - /** @see [[internal.setInfo]] */ + /** @see [[MacroInternalApi.setInfo]] */ def setInfo(tpe: Type): T = internal.setInfo(symbol, tpe) - /** @see [[internal.setAnnotations]] */ + /** @see [[MacroInternalApi.setAnnotations]] */ def setAnnotations(annots: Annotation*): T = internal.setAnnotations(symbol, annots: _*) - /** @see [[internal.setName]] */ + /** @see [[MacroInternalApi.setName]] */ def setName(name: Name): T = internal.setName(symbol, name) - /** @see [[internal.setPrivateWithin]] */ + /** @see [[MacroInternalApi.setPrivateWithin]] */ def setPrivateWithin(sym: Symbol): T = internal.setPrivateWithin(symbol, sym) - /** @see [[internal.setFlag]] */ + /** @see [[MacroInternalApi.setFlag]] */ def setFlag(flags: FlagSet): T = internal.setFlag(symbol, flags) - /** @see [[internal.setFlag]] */ + /** @see [[MacroInternalApi.setFlag]] */ def resetFlag(flags: FlagSet): T = internal.resetFlag(symbol, flags) } } @@ -343,109 +342,29 @@ abstract class Universe extends scala.reflect.api.Universe { def mkCast(tree: Tree, pt: Type): Tree } - /** @see [[internal.gen]] */ @deprecated("use `internal.gen` instead", "2.11.0") val treeBuild: TreeGen /** @inheritdoc */ + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") type Compat <: MacroCompatApi /** @see [[compat]] * @group Internal */ + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") trait MacroCompatApi extends CompatApi { /** Scala 2.10 compatibility enrichments for Symbol. */ - implicit class MacroCompatibleSymbol(symbol: Symbol) { - /** @see [[InternalMacroApi.attachments]] */ - @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def attachments: Attachments { type Pos = Position } = internal.attachments(symbol) - - /** @see [[InternalMacroApi.updateAttachment]] */ - @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def updateAttachment[T: ClassTag](attachment: T): Symbol = internal.updateAttachment[T](symbol, attachment) - - /** @see [[InternalMacroApi.removeAttachment]] */ - @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def removeAttachment[T: ClassTag]: Symbol = internal.removeAttachment[T](symbol) - - /** @see [[InternalMacroApi.setInfo]] */ - @deprecated("use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setTypeSignature(tpe: Type): Symbol = internal.setInfo(symbol, tpe) - - /** @see [[InternalMacroApi.setAnnotations]] */ - @deprecated("use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setAnnotations(annots: Annotation*): Symbol = internal.setAnnotations(symbol, annots: _*) - - /** @see [[InternalMacroApi.setName]] */ - @deprecated("use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setName(name: Name): Symbol = internal.setName(symbol, name) - - /** @see [[InternalMacroApi.setPrivateWithin]] */ - @deprecated("use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setPrivateWithin(sym: Symbol): Symbol = internal.setPrivateWithin(symbol, sym) - } + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class MacroCompatibleSymbol(symbol: Symbol) /** Scala 2.10 compatibility enrichments for TypeTree. */ - implicit class MacroCompatibleTree(tree: Tree) { - /** @see [[InternalMacroApi.attachments]] */ - @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def attachments: Attachments { type Pos = Position } = internal.attachments(tree) - - /** @see [[InternalMacroApi.updateAttachment]] */ - @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def updateAttachment[T: ClassTag](attachment: T): Tree = internal.updateAttachment[T](tree, attachment) - - /** @see [[InternalMacroApi.removeAttachment]] */ - @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def removeAttachment[T: ClassTag]: Tree = internal.removeAttachment[T](tree) - - /** @see [[InternalMacroApi.setPos]] */ - @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def pos_=(pos: Position): Unit = internal.setPos(tree, pos) - - /** @see [[InternalMacroApi.setPos]] */ - @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setPos(newpos: Position): Tree = internal.setPos(tree, newpos) - - /** @see [[InternalMacroApi.setType]] */ - @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def tpe_=(t: Type): Unit = internal.setType(tree, t) - - /** @see [[InternalMacroApi.setType]] */ - @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setType(tp: Type): Tree = internal.setType(tree, tp) - - /** @see [[InternalMacroApi.defineType]] */ - @deprecated("use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def defineType(tp: Type): Tree = internal.defineType(tree, tp) - - /** @see [[InternalMacroApi.setSymbol]] */ - @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def symbol_=(sym: Symbol): Unit = internal.setSymbol(tree, sym) - - /** @see [[InternalMacroApi.setSymbol]] */ - @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setSymbol(sym: Symbol): Tree = internal.setSymbol(tree, sym) - } + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class MacroCompatibleTree(tree: Tree) /** Scala 2.10 compatibility enrichments for TypeTree. */ - implicit class CompatibleTypeTree(tt: TypeTree) { - /** @see [[InternalMacroApi.setOriginal]] */ - @deprecated("use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree) - } - - /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("use `internal.captureVariable` instead", "2.11.0") - def captureVariable(vble: Symbol): Unit = internal.captureVariable(vble) - - /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("use `internal.referenceCapturedVariable` instead", "2.11.0") - def referenceCapturedVariable(vble: Symbol): Tree = internal.referenceCapturedVariable(vble) - - /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("use `internal.capturedVariableType` instead", "2.11.0") - def capturedVariableType(vble: Symbol): Type = internal.capturedVariableType(vble) + @deprecated("compatibility with Scala 2.10 EOL", "2.13.0") + implicit class CompatibleTypeTree(tt: TypeTree) } /** The type of compilation runs. diff --git a/src/reflect/scala/reflect/macros/blackbox/Context.scala b/src/reflect/scala/reflect/macros/blackbox/Context.scala index 3a5d10cd9bde..52b48e74014b 100644 --- a/src/reflect/scala/reflect/macros/blackbox/Context.scala +++ b/src/reflect/scala/reflect/macros/blackbox/Context.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -41,7 +41,7 @@ package blackbox * which means that its expansion will be upcast to its return type, enforcing faithfulness of that macro to its * type signature. Whitebox macros, i.e. the ones defined with `whitebox.Context`, aren't bound by this restriction, * which enables a number of important use cases, but they are also going to enjoy less support than blackbox macros, - * so choose wisely. See the [[http://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] for more information. + * so choose wisely. See the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] for more information. * * @see `scala.reflect.macros.whitebox.Context` */ diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala index 7118643dd641..6a741287bb8b 100644 --- a/src/reflect/scala/reflect/macros/package.scala +++ b/src/reflect/scala/reflect/macros/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,14 +22,14 @@ package reflect * Within these functions the programmer has access to compiler APIs. * For example, it is possible to generate, analyze and typecheck code. * - * See the [[http://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] on how to get started with Scala macros. + * See the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] on how to get started with Scala macros. */ package object macros { /** The Scala macros context. * * In Scala 2.11, macros that were once the one are split into blackbox and whitebox macros, * with the former being better supported and the latter being more powerful. You can read about - * the details of the split and the associated trade-offs in the [[http://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]]. + * the details of the split and the associated trade-offs in the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]]. * * `scala.reflect.macros.Context` follows this tendency and turns into `scala.reflect.macros.blackbox.Context` * and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons, diff --git a/src/reflect/scala/reflect/macros/whitebox/Context.scala b/src/reflect/scala/reflect/macros/whitebox/Context.scala index 690e450c7675..2850c7b45ffe 100644 --- a/src/reflect/scala/reflect/macros/whitebox/Context.scala +++ b/src/reflect/scala/reflect/macros/whitebox/Context.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -41,7 +41,7 @@ package whitebox * gaining the ability to refine the type of its expansion beyond its official return type, which enables a number of important use cases. * Blackbox macros, i.e. the ones defined with `blackbox.Context`, can't do that, so they are less powerful. * However blackbox macros are also going to enjoy better support than whitebox macros, so choose wisely. - * See the [[http://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] for more information. + * See the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] for more information. * * @see `scala.reflect.macros.blackbox.Context` */ @@ -86,4 +86,4 @@ trait Context extends blackbox.Context { * and always stays the same regardless of whatever happens during macro expansion. */ def enclosingImplicits: List[ImplicitCandidate] -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/runtime/Gil.scala b/src/reflect/scala/reflect/runtime/Gil.scala index 3443fbe722be..36e64a628316 100644 --- a/src/reflect/scala/reflect/runtime/Gil.scala +++ b/src/reflect/scala/reflect/runtime/Gil.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 21c0f5a6f45b..4bca1d19a934 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,42 +14,44 @@ package scala package reflect package runtime -import scala.language.existentials - -import scala.ref.WeakReference -import scala.collection.mutable.WeakHashMap - -import java.lang.{Class => jClass, Package => jPackage} +import java.io.IOException +import java.lang.{ Class => jClass, Package => jPackage } +import java.lang.annotation.{ Annotation => jAnnotation } +import java.lang.ref.{ WeakReference => jWeakReference } import java.lang.reflect.{ Method => jMethod, Constructor => jConstructor, Field => jField, Member => jMember, Type => jType, TypeVariable => jTypeVariable, Parameter => jParameter, GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement } -import java.lang.annotation.{Annotation => jAnnotation} -import java.io.IOException -import java.lang.ref.{WeakReference => jWeakReference} -import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags } +import java.nio.charset.StandardCharsets.UTF_8 + +import scala.annotation.nowarn +import scala.collection.immutable.ArraySeq +import scala.collection.mutable.{ListBuffer, WeakHashMap} +import scala.language.existentials +import scala.ref.WeakReference +import scala.reflect.api.TypeCreator +import scala.reflect.internal.{ JavaAccFlags, MissingRequirementError } +import scala.runtime.{BoxesRunTime, ClassValueCompat, ScalaRunTime} +import internal.Flags._ import internal.pickling.ByteCodecs import internal.pickling.UnPickler -import scala.collection.mutable.ListBuffer -import internal.Flags._ +import internal.util.StringContextStripMarginOps import ReflectionUtils._ -import scala.reflect.api.TypeCreator -import scala.runtime.{ScalaRunTime, BoxesRunTime} private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable => - private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]() + private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[MirrorImpl]]() private def createMirror(owner: Symbol, cl: ClassLoader): Mirror = { - val jm = new JavaMirror(owner, cl) - mirrors(cl) = new WeakReference(jm) + val jm = new MirrorImpl(owner, cl) + mirrors(cl) = new WeakReference[MirrorImpl](jm) jm.init() jm } - override type Mirror = JavaMirror - implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[JavaMirror]) + override type Mirror = MirrorImpl + implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[MirrorImpl]) override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader) @@ -66,10 +68,21 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive } /** The API of a mirror for a reflective universe */ + @nowarn("""cat=deprecation&origin=scala\.reflect\.runtime\.JavaMirrors\.JavaMirror""") + final type MirrorImpl = JavaMirror + + /** + * The API of a mirror for a reflective universe. + * + * @deprecated this class's name shadows another; use [[MirrorImpl]] instead + */ + @nowarn("msg=shadowing a nested class of a parent is deprecated") + @deprecated("use MirrorImpl instead", since = "2.13.4") class JavaMirror(owner: Symbol, - /* Class loader that is a mastermind behind the reflexive mirror */ - val classLoader: ClassLoader - ) extends Roots(owner) with super.JavaMirror { thisMirror => + /* Class loader that is a mastermind behind the reflexive mirror */ + val classLoader: ClassLoader + ) extends Roots(owner) + with super.JavaMirror { thisMirror => val universe: thisUniverse.type = thisUniverse @@ -99,14 +112,14 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // ----------- Caching ------------------------------------------------------------------ - private val classCache = new TwoWayCache[jClass[_], ClassSymbol] - private val packageCache = new TwoWayCache[Package, ModuleSymbol] - private val methodCache = new TwoWayCache[jMethod, MethodSymbol] - private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol] - private val fieldCache = new TwoWayCache[jField, TermSymbol] - private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] + private[this] val classCache = new TwoWayCache[jClass[_], ClassSymbol] + private[this] val packageCache = new TwoWayCache[Package, ModuleSymbol] + private[this] val methodCache = new TwoWayCache[jMethod, MethodSymbol] + private[this] val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol] + private[this] val fieldCache = new TwoWayCache[jField, TermSymbol] + private[this] val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] - private[this] object typeTagCache extends ClassValue[jWeakReference[TypeTag[_]]]() { + private[this] object typeTagCache extends ClassValueCompat[jWeakReference[TypeTag[_]]]() { val typeCreator = new ThreadLocal[TypeCreator]() override protected def computeValue(cls: jClass[_]): jWeakReference[TypeTag[_]] = { @@ -131,7 +144,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive } } - private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S = + private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (MirrorImpl, J) => S): S = cache.toScala(key){ val jclazz = implicitly[HasJavaClass[J]] getClazz key body(mirrorDefining(jclazz), key) @@ -147,6 +160,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive case jclazz: jClass[_] => jclazz case jmeth: jMethod => jmeth.getDeclaringClass case jconstr: jConstructor[_] => jconstr.getDeclaringClass + case x => throw new MatchError(x) } }) @@ -180,9 +194,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation } object ConstantArg { - def enumToSymbol(enum: Enum[_]): Symbol = { - val staticPartOfEnum = classToScala(enum.getClass).companionSymbol - staticPartOfEnum.info.declaration(TermName(enum.name)) + def enumToSymbol(`enum`: Enum[_]): Symbol = { + val staticPartOfEnum = classToScala(`enum`.getClass).companionSymbol + staticPartOfEnum.info.declaration(TermName(`enum`.name)) } def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match { @@ -239,14 +253,14 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol = classToScala(rtcls).companionModule.asModule - private def ensuringNotFree(sym: Symbol)(body: => Any) { + private def ensuringNotFree(sym: Symbol)(body: => Any): Unit = { val freeType = sym.ownerChain find (_.isFreeType) freeType match { case Some(freeType) => ErrorFree(sym, freeType) case _ => body } } - private def checkMemberOf(sym: Symbol, owner: ClassSymbol) { + private def checkMemberOf(sym: Symbol, owner: ClassSymbol): Unit = { if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) { // do nothing } else if (sym.owner == AnyValClass) { @@ -258,7 +272,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive } } - private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) { + private def checkConstructorOf(sym: Symbol, owner: ClassSymbol): Unit = { if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner) if (owner == ArrayClass) ErrorArrayConstructor(sym, owner) ensuringNotFree(sym) { @@ -308,8 +322,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val isDerivedValueClass = symbol.isDerivedValueClass lazy val boxer = runtimeClass(symbol.toType).getDeclaredConstructors().head lazy val unboxer = { - val fields @ (field :: _) = symbol.toType.decls.collect{ case ts: TermSymbol if ts.isParamAccessor && ts.isMethod => ts }.toList - assert(fields.length == 1, s"$symbol: $fields") + val fields @ (field :: _) = symbol.toType.decls.collect{ case ts: TermSymbol if ts.isParamAccessor && ts.isMethod => ts }.toList: @unchecked + assert(fields.lengthIs == 1, s"$symbol: $fields") runtimeClass(symbol.asClass).getDeclaredMethod(field.name.toString) } } @@ -338,9 +352,10 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // because both AnyVal and its primitive descendants define their own getClass methods private def isGetClass(meth: MethodSymbol) = (meth.name string_== "getClass") && meth.paramss.flatten.isEmpty private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType) - lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses - lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString, - Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod) + lazy val bytecodelessMethodOwners = + Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses + lazy val bytecodefulObjectMethods = + Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString, Object_notify, Object_notifyAll) ++ Object_wait.alternatives private def isBytecodelessMethod(meth: MethodSymbol): Boolean = { if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == runDefinitions.Predef_classOf || meth.isMacro) return true bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth) @@ -449,8 +464,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // caches MethodSymbol metadata, so that we minimize the work that needs to be done during Mirror.apply // TODO: vararg is only supported in the last parameter list (scala/bug#6182), so we don't need to worry about the rest for now private class MethodMetadata(symbol: MethodSymbol) { - private val params = symbol.paramss.flatten.toArray - private val vcMetadata = params.map(p => new DerivedValueClassMetadata(p.info)) + private[this] val params = symbol.paramss.flatten.toArray + private[this] val vcMetadata = params.map(p => new DerivedValueClassMetadata(p.info)) val isByName = params.map(p => isByNameParam(p.info)) def isDerivedValueClass(i: Int) = vcMetadata(i).isDerivedValueClass def paramUnboxers(i: Int) = vcMetadata(i).unboxer @@ -477,7 +492,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive ) i += 1 } - jinvoke(args1) + jinvoke(ArraySeq.unsafeWrapArray(args1)) } } @@ -497,7 +512,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val varargMatch = args.length >= params.length - 1 && isVarArgsList(params) if (!perfectMatch && !varargMatch) { val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}" - val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments" + val s_arguments = if (params.lengthIs == 1 && !isVarArgsList(params)) "argument" else "arguments" abort(s"${showDecl(symbol)} takes $n_arguments $s_arguments") } @@ -602,7 +617,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive ) /** The mirror that corresponds to the classloader that original defined the given Java class */ - def mirrorDefining(jclazz: jClass[_]): JavaMirror = { + def mirrorDefining(jclazz: jClass[_]): MirrorImpl = { val cl = jclazz.getClassLoader if (cl == this.classLoader) this else runtimeMirror(cl) } @@ -654,7 +669,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive loadBytes[String]("scala.reflect.ScalaSignature") match { case Some(ssig) => info(s"unpickling Scala $clazz and $module, owner = ${clazz.owner}") - val bytes = ssig.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val bytes = ssig.getBytes(UTF_8) val len = ByteCodecs.decode(bytes) assignAssociatedFile(clazz, module, jclazz) unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName) @@ -663,7 +678,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match { case Some(slsig) => info(s"unpickling Scala $clazz and $module with long Scala signature") - val encoded = slsig flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val encoded = slsig.flatMap(_.getBytes(UTF_8)) val len = ByteCodecs.decode(encoded) val decoded = encoded.take(len) assignAssociatedFile(clazz, module, jclazz) @@ -692,7 +707,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName)) .setInfo(new TypeParamCompleter(jtvar)) markFlagsCompleted(tparam)(mask = AllFlags) - tparamCache enter (jtvar, tparam) + tparamCache.enter(jtvar, tparam) tparam } @@ -703,7 +718,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive private class TypeParamCompleter(jtvar: jTypeVariable[_ <: GenericDeclaration]) extends LazyType with FlagAgnosticCompleter { override def load(sym: Symbol) = complete(sym) override def complete(sym: Symbol) = { - sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny)) + sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala)) markAllCompleted(sym) } } @@ -720,7 +735,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * Pre: `sym` is already initialized with a concrete type. * Note: If `sym` is a method or constructor, its parameter annotations are copied as well. */ - private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) { + private def copyAnnotations(sym: Symbol, jann: AnnotatedElement): Unit = { sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList // scala/bug#7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser val jexTpes = jann match { @@ -766,9 +781,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive markFlagsCompleted(clazz, module)(mask = AllFlags) /** used to avoid cycles while initializing classes */ - private var parentsLevel = 0 - private var pendingLoadActions: List[() => Unit] = Nil - private val relatedSymbols = clazz +: (if (module != NoSymbol) List(module, module.moduleClass) else Nil) + private[this] var parentsLevel = 0 + private[this] var pendingLoadActions: List[() => Unit] = Nil + private[this] val relatedSymbols = clazz +: (if (module != NoSymbol) List(module, module.moduleClass) else Nil) override def load(sym: Symbol): Unit = { debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug @@ -799,9 +814,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive parentsLevel += 1 val jsuperclazz = jclazz.getGenericSuperclass val ifaces = jclazz.getGenericInterfaces.toList map typeToScala - val isAnnotation = JavaAccFlags(jclazz).isAnnotation - if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces - else if (jclazz.isInterface) ObjectTpe :: ifaces // interfaces have Object as superclass in the classfile (see jvm spec), but getGenericSuperclass seems to return null + if (jclazz.isInterface) ObjectTpe :: ifaces // interfaces have Object as superclass in the classfile (see jvm spec), but getGenericSuperclass seems to return null else (if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)) :: ifaces } finally { parentsLevel -= 1 @@ -840,7 +853,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive } class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { + override def complete(sym: Symbol): Unit = { completeRest() markAllCompleted(clazz, module) } @@ -884,7 +897,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * The Scala owner of the Scala class corresponding to the Java class `jclazz` */ // @eb: a weird classloader might return a null package for something with a non-empty package name - // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c + // for example, https://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere // [martin] I think it's better to be forgiving here. Restoring packageNameToScala. @@ -989,18 +1002,19 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive if (split > 0) packageNameToScala(fullname take split) else this.RootPackage val owner = ownerModule.moduleClass val name = TermName(fullname) drop split + 1 - val opkg = owner.info decl name - if (opkg.hasPackageFlag) + // Be tolerant of clashes between, e.g. a subpackage and a package object member. These could arise + // under separate compilation. + val opkg = owner.info.decl(name).filter(_.hasPackageFlag) + if (opkg != NoSymbol) opkg.asModule - else if (opkg == NoSymbol) { + else { val pkg = owner.newPackage(name) pkg.moduleClass setInfo new LazyPackageType pkg setInfoAndEnter pkg.moduleClass.tpe markFlagsCompleted(pkg)(mask = AllFlags) info("made Scala "+pkg) pkg - } else - throw new ReflectError(opkg+" is not a package") + } } private def scalaSimpleName(jclazz: jClass[_]): TypeName = { @@ -1009,7 +1023,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive var prefix = if (enclosingClass != null) enclosingClass.getName else "" val isObject = owner.isModuleClass && !owner.isPackageClass if (isObject && !prefix.endsWith(nme.MODULE_SUFFIX_STRING)) prefix += nme.MODULE_SUFFIX_STRING - assert(jclazz.getName.startsWith(prefix)) + assert(jclazz.getName.startsWith(prefix), s"Class name ${jclazz.getName} missing prefix $prefix") var name = jclazz.getName.substring(prefix.length) name = name.substring(name.lastIndexOf(".") + 1) newTypeName(name) @@ -1054,7 +1068,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // otherwise we may mistake mangled symbolic names for mangled nested names // // in case when a Java binary name can be treated both as a top-level class and as a nested class - // (as described in http://groups.google.com/group/scala-internals/browse_thread/thread/10855403bbf04298) + // (as described in https://groups.google.com/group/scala-internals/browse_thread/thread/10855403bbf04298) // we check for a top-level class first // this is totally correct, because a top-level class and a nested class with the same name cannot coexist // so it's either one or another, but not both - therefore we always load $-bearing classes correctly @@ -1084,6 +1098,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val owner = genericDeclarationToScala(jparam.getGenericDeclaration) owner.info match { case PolyType(tparams, _) => tparams.find(_.name string_== jparam.getName).get.asType + case x => throw new MatchError(x) } } @@ -1094,10 +1109,12 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive case jclazz: jClass[_] => classToScala(jclazz) case jmeth: jMethod => methodToScala(jmeth) case jconstr: jConstructor[_] => constructorToScala(jconstr) + case x => throw new MatchError(x) } def reflectMemberToScala(m: jMember): Symbol = m match { case x: GenericDeclaration => genericDeclarationToScala(x) case x: jField => jfieldAsScala(x) + case x => throw new MatchError(x) } /** @@ -1111,7 +1128,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val tparam = owner.newExistential(newTypeName("T$" + tparams.length)) .setInfo(TypeBounds( lub(jwild.getLowerBounds.toList map typeToScala), - glb(jwild.getUpperBounds.toList map typeToScala map objToAny))) + glb(jwild.getUpperBounds.toList map typeToScala))) tparams += tparam typeRef(NoPrefix, tparam, List()) case _ => @@ -1129,10 +1146,13 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive arrayType(typeToScala(jclazz.getComponentType)) else { val clazz = classToScala(jclazz) - rawToExistential(typeRef(clazz.owner.thisType, clazz, List())) + rawToExistential(typeRef(clazz.owner.thisType, clazz, List())) match { + case ObjectTpe => ObjectTpeJava + case tp => tp + } } case japplied: ParameterizedType => - // http://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class + // https://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class val jcls = japplied.getRawType.asInstanceOf[jClass[_]] val sym = classToScala(jcls) val isStatic = java.lang.reflect.Modifier.isStatic(jcls.getModifiers) @@ -1141,10 +1161,15 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val (args, bounds) = targsToScala(pre.typeSymbol, args0.toList) newExistentialType(bounds, typeRef(pre, sym, args)) case jarr: GenericArrayType => - arrayType(typeToScala(jarr.getGenericComponentType)) + var elemtp = typeToScala(jarr.getGenericComponentType) + if (elemtp.typeSymbol.isAbstractType && elemtp.upperBound =:= ObjectTpe) { + elemtp = intersectionType(List(elemtp, ObjectTpe)) + } + arrayType(elemtp) case jtvar: jTypeVariable[_] => val tparam = typeParamToScala(jtvar) typeRef(NoPrefix, tparam, List()) + case x => throw new MatchError(x) } /** @@ -1201,7 +1226,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = { val clazz = sOwner(jmeth) val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags) - methodCache enter (jmeth, meth) + methodCache.enter(jmeth, meth) val tparams = jmeth.getTypeParameters.toList map createTypeParameter val params = jparamsAsScala(meth, jmeth.getParameters.toList) val resulttpe = typeToScala(jmeth.getGenericReturnType) @@ -1227,7 +1252,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out. val clazz = sOwner(jconstr) val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags) - constructorCache enter (jconstr, constr) + constructorCache.enter(jconstr, constr) val tparams = jconstr.getTypeParameters.toList map createTypeParameter val params = jparamsAsScala(constr, jconstr.getParameters.toList) setMethType(constr, tparams, params, clazz.tpe) @@ -1248,7 +1273,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive if (param.isNamePresent) TermName(param.getName) else nme.syntheticParamName(ix + 1) meth.owner.newValueParameter(name, meth.pos) - .setInfo(objToAny(typeToScala(param.getParameterizedType))) + .setInfo(typeToScala(param.getParameterizedType)) .setFlag(if (param.isNamePresent) 0 else SYNTHETIC) } } @@ -1307,7 +1332,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive noClass } - private val PackageAndClassPattern = """(.*\.)(.*)$""".r + private[this] val PackageAndClassPattern = """(.*\.)(.*)$""".r private def expandedName(sym: Symbol): String = if (sym.isPrivate) nme.expandedName(sym.name.toTermName, sym.owner).toString @@ -1332,10 +1357,10 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val jclazz = classToJava(meth.owner.asClass) val paramClasses = transformedType(meth).paramTypes map typeToJavaClass val jname = meth.name.dropLocal.toString - try jclazz getDeclaredMethod (jname, paramClasses: _*) + try jclazz.getDeclaredMethod(jname, paramClasses: _*) catch { case ex: NoSuchMethodException => - jclazz getDeclaredMethod (expandedName(meth), paramClasses: _*) + jclazz.getDeclaredMethod(expandedName(meth), paramClasses: _*) } } @@ -1365,9 +1390,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive } /** Assert that packages have package scopes */ - override def validateClassInfo(tp: ClassInfoType) { - assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope]) - } + override def validateClassInfo(tp: ClassInfoType): Unit = + assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope], s"$tp is package class or scope") override def newPackageScope(pkgClass: Symbol) = new PackageScope(pkgClass) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 6804dab84e10..c66f58ccf498 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,12 +14,12 @@ package scala package reflect package runtime -import scala.reflect.internal.{TreeInfo, SomePhase} +import scala.annotation.nowarn +import scala.reflect.internal.{SomePhase, TreeInfo} import scala.reflect.internal.{SymbolTable => InternalSymbolTable} import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable} -import scala.reflect.internal.util.Statistics import scala.reflect.api.{TypeCreator, Universe} -import scala.reflect.internal.util.Statistics +import scala.reflect.internal.util.{CodeAction, Statistics} /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * @@ -33,19 +33,21 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle lazy val settings = new Settings override final val statistics = new Statistics(JavaUniverse.this, settings) with ReflectStats - private val isLogging = sys.props contains "scala.debug.reflect" + private[this] val isLogging = System.getProperty("scala.debug.reflect") != null def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) // TODO: why put output under isLogging? Calls to inform are already conditional on debug/verbose/... import scala.reflect.internal.Reporter override def reporter: Reporter = new Reporter { + @nowarn("msg=overriding method info0") protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = log(msg) } // minimal Run to get Reporting wired def currentRun = new RunReporting {} class PerRunReporting extends PerRunReportingBase { - def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = reporter.warning(pos, msg) + def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String, actions: List[CodeAction]): Unit = + reporter.warning(pos, msg) } protected def PerRunReporting = new PerRunReporting @@ -150,10 +152,10 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle // 5) That will crash PackageScope.enter that helpfully detects double-enters. // // Therefore, before initializing ScalaPackageClass, we must pre-initialize ObjectClass - def init() { + def init(): Unit = { definitions.init() - // workaround for http://groups.google.com/group/scala-internals/browse_thread/thread/97840ba4fd37b52e + // workaround for https://groups.google.com/group/scala-internals/browse_thread/thread/97840ba4fd37b52e // constructors are by definition single-threaded, so we initialize all lazy vals (and local object) in advance // in order to avoid deadlocks later (e.g. one thread holds a global reflection lock and waits for definitions.Something to initialize, // whereas another thread holds a definitions.Something initialization lock and needs a global reflection lock to complete the initialization) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 75795814a9dd..9f40ceff66a5 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,12 @@ package scala.reflect package runtime +import scala.annotation.nowarn + +@nowarn("cat=deprecation&origin=scala\\.reflect\\.internal\\.Internals\\.compat") +@nowarn("cat=deprecation&origin=scala\\.reflect\\.internal\\.Trees\\.emptyValDef") trait JavaUniverseForce { self: runtime.JavaUniverse => - def force() { + def force(): Unit = { Literal(Constant(42)).duplicate nme.flattenedName(NoSymbol, nme.NO_NAME) nme.raw @@ -51,13 +55,14 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.CompoundTypeTreeOriginalAttachment this.SAMFunction this.DelambdafyTarget - this.JustMethodReference this.BackquotedIdentifierAttachment this.PostfixAttachment this.InfixAttachment this.AutoApplicationAttachment this.NoWarnAttachment + this.PatShadowAttachment this.PatVarDefAttachment + this.MultiDefAttachment this.ForAttachment this.SyntheticUnitAttachment this.SubpatternsAttachment @@ -67,8 +72,26 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.UseInvokeSpecial this.TypeParamVarargsAttachment this.KnownDirectSubclassesCalled + this.DottyEnumSingleton + this.ConstructorNeedsFence + this.MultiargInfixAttachment + this.NullaryOverrideAdapted this.ChangeOwnerAttachment + this.InterpolatedString + this.VirtualStringContext + this.CaseApplyInheritAccess + this.RootSelection + this.TypedExpectingUnitAttachment + this.FieldTypeInferred this.LookupAmbiguityWarning + this.PermittedSubclasses + this.PermittedSubclassSymbols + this.NamePos + this.UnnamedArg + this.DiscardedValue + this.DiscardedExpr + this.BooleanParameterType + this.ForceMatchDesugar this.noPrint this.typeDebug // inaccessible: this.posAssigner @@ -94,7 +117,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.ArrayValue this.Function this.Assign - this.AssignOrNamedArg + this.NamedArg this.If this.Match this.Return @@ -131,7 +154,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.LiteralAnnotArg this.ArrayAnnotArg this.NestedAnnotArg - this.ScalaSigBytes this.AnnotationInfo this.Annotation this.UnmappableAnnotation @@ -162,6 +184,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.ErrorType this.WildcardType this.BoundedWildcardType + this.OverloadedArgProto this.NoType this.NoPrefix this.ThisType @@ -172,6 +195,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.RefinedType this.ClassInfoType this.ConstantType + this.FoldableConstantType + this.LiteralType this.TypeRef this.MethodType this.NullaryMethodType @@ -199,12 +224,15 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.abstractTypesToBounds this.dropIllegalStarTypes this.wildcardExtrapolation + this.SubstSymMap this.IsDependentCollector this.ApproximateDependentMap - this.wildcardToTypeVarMap + this.identityTypeMap this.typeVarToOriginMap this.ErroneousCollector this.adaptToNewRunMap + this.UnrelatableCollector + this.IsRelatableCollector this.SubTypePair this.SymbolKind this.NoSymbol @@ -223,6 +251,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.JavaLangPackageClass definitions.ScalaPackage definitions.ScalaPackageClass + definitions.ScalaPackageObject definitions.RuntimePackage definitions.RuntimePackageClass definitions.AnyClass @@ -235,6 +264,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.NothingTpe definitions.NullTpe definitions.ObjectTpe + definitions.ObjectTpeJava definitions.SerializableTpe definitions.StringTpe definitions.ThrowableTpe @@ -254,6 +284,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.NullPointerExceptionClass definitions.ThrowableClass definitions.UninitializedErrorClass + definitions.RuntimeExceptionClass definitions.IllegalArgExceptionClass definitions.UninitializedFieldConstructor definitions.PartialFunctionClass @@ -263,44 +294,49 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.StringModule definitions.ClassClass definitions.DynamicClass - definitions.SysPackage definitions.UnqualifiedModules definitions.UnqualifiedOwners definitions.PredefModule definitions.SpecializableModule definitions.ScalaRunTimeModule + definitions.MurmurHash3Module definitions.SymbolModule - definitions.StringAddClass definitions.ScalaNumberClass definitions.DelayedInitClass definitions.TypeConstraintClass definitions.SingletonClass + definitions.ListOfSingletonClassTpe definitions.SerializableClass - definitions.JavaSerializableClass definitions.ComparableClass definitions.JavaCloneableClass definitions.JavaNumberClass definitions.JavaEnumClass - definitions.RemoteInterfaceClass - definitions.RemoteExceptionClass definitions.JavaUtilMap definitions.JavaUtilHashMap + definitions.JavaRecordClass definitions.ByNameParamClass definitions.JavaRepeatedParamClass definitions.RepeatedParamClass + definitions.SubTypeClass + definitions.SameTypeClass + definitions.DummyImplicitClass definitions.ConsClass definitions.IteratorClass definitions.IterableClass definitions.ListClass definitions.SeqClass + definitions.SeqFactoryClass + definitions.UnapplySeqWrapperClass definitions.JavaStringBuilderClass definitions.JavaStringBufferClass definitions.JavaCharSequenceClass - definitions.TraversableClass definitions.ListModule + definitions.ListModuleAlias definitions.NilModule + definitions.NilModuleAlias definitions.SeqModule - definitions.ISeqModule + definitions.SeqModuleAlias + definitions.Collection_SeqModule definitions.ArrayModule definitions.ArrayModule_overloadedApply definitions.ArrayClass @@ -340,8 +376,10 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.BlackboxContextClass definitions.WhiteboxContextClass definitions.MacroImplAnnotation + definitions.MacroImplLocationAnnotation definitions.StringContextClass definitions.StringContextModule + definitions.ValueOfClass definitions.QuasiquoteClass definitions.QuasiquoteClass_api definitions.QuasiquoteClass_api_apply @@ -355,6 +393,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.SomeClass definitions.NoneModule definitions.SomeModule + definitions.ModuleSerializationProxyClass definitions.VarArityClass definitions.ProductClass definitions.TupleClass @@ -365,7 +404,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.SuccessClass definitions.FutureClass definitions.PromiseClass - definitions.NonFatalClass + definitions.NonFatalModule + definitions.NonFatal_apply definitions.MacroContextType definitions.ProductRootClass definitions.Any_$eq$eq @@ -405,12 +445,11 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.BoxedUnitClass definitions.BoxedUnitModule definitions.AnnotationClass - definitions.ClassfileAnnotationClass + definitions.ConstantAnnotationClass definitions.StaticAnnotationClass definitions.AnnotationRetentionAttr definitions.AnnotationRetentionPolicyAttr definitions.AnnotationRepeatableAttr - definitions.BridgeClass definitions.ElidableMethodClass definitions.ImplicitNotFoundClass definitions.ImplicitAmbiguousClass @@ -422,25 +461,36 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.NowarnClass definitions.uncheckedStableClass definitions.uncheckedVarianceClass + definitions.ChildAnnotationClass + definitions.RepeatedAnnotationClass + definitions.TargetNameAnnotationClass + definitions.StaticMethodAnnotationClass + definitions.PolyFunctionClass + definitions.ExperimentalAnnotationClass + definitions.AnnotationDefaultClass + definitions.JavaAnnotationClass definitions.BeanPropertyAttr definitions.BooleanBeanPropertyAttr definitions.CompileTimeOnlyAttr + definitions.DefaultArgAttr definitions.DeprecatedAttr definitions.DeprecatedNameAttr definitions.DeprecatedInheritanceAttr definitions.DeprecatedOverridingAttr definitions.NativeAttr - definitions.RemoteAttr definitions.ScalaInlineClass definitions.ScalaNoInlineClass definitions.SerialVersionUIDAttr definitions.SerialVersionUIDAnnotation definitions.SpecializedClass + definitions.SuperArgAttr + definitions.SuperFwdArgAttr definitions.ThrowsClass definitions.TransientAttr definitions.UncheckedClass definitions.UncheckedBoundsClass definitions.UnspecializedClass + definitions.UnusedClass definitions.VolatileAttr definitions.JavaDeprecatedAttr definitions.FunctionalInterfaceClass @@ -454,6 +504,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.ClassTargetClass definitions.MethodTargetClass definitions.LanguageFeatureAnnot + definitions.InheritedAttr definitions.JUnitAnnotations definitions.languageFeatureModule definitions.metaAnnotations @@ -474,6 +525,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.lazyHolders definitions.LazyRefClass definitions.LazyUnitClass + definitions.RichFloatClass definitions.allRefClasses definitions.UnitClass definitions.ByteClass @@ -504,9 +556,12 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => uncurry.DesugaredParameterType erasure.GenericArray erasure.scalaErasure + erasure.scala3Erasure erasure.specialScalaErasure + erasure.specialScala3Erasure erasure.javaErasure erasure.verifiedJavaErasure erasure.boxingErasure + erasure.boxing3Erasure } } diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala index abf259d8b558..6e243c72fcd9 100644 --- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala +++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index 1b6060466ed4..48566676840a 100644 --- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,6 +15,7 @@ package reflect.runtime import java.lang.{Class => jClass} import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException } +import scala.annotation.tailrec import scala.reflect.internal.util.AbstractFileClassLoader import scala.reflect.io._ @@ -22,6 +23,7 @@ import scala.reflect.io._ */ object ReflectionUtils { // Unwraps some chained exceptions which arise during reflective calls. + @tailrec def unwrapThrowable(x: Throwable): Throwable = x match { case _: InvocationTargetException | // thrown by reflectively invoked method or constructor _: ExceptionInInitializerError | // thrown when running a static initializer (e.g. a scala module constructor) @@ -34,13 +36,13 @@ object ReflectionUtils { } // Transforms an exception handler into one which will only receive the unwrapped // exceptions (for the values of wrap covered in unwrapThrowable.) - def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] = { - case ex if pf isDefinedAt unwrapThrowable(ex) => pf(unwrapThrowable(ex)) - } + def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] = + pf.compose({ case ex => unwrapThrowable(ex) }) def show(cl: ClassLoader): String = { import scala.language.reflectiveCalls + @tailrec def isAbstractFileClassLoader(clazz: Class[_]): Boolean = { if (clazz == null) return false if (clazz == classOf[AbstractFileClassLoader]) return true @@ -58,10 +60,8 @@ object ReflectionUtils { "" } cl match { - case cl if cl != null => - "%s of type %s with classpath [%s] and parent being %s".format(cl, cl.getClass, inferClasspath(cl), show(cl.getParent)) - case null => - "primordial classloader with boot classpath [%s]".format(inferClasspath(cl)) + case null => s"primordial classloader with boot classpath [${inferClasspath(cl)}]" + case _ => s"$cl of type ${cl.getClass} with classpath [${inferClasspath(cl)}] and parent being ${show(cl.getParent)}" } } @@ -106,7 +106,7 @@ object ReflectionUtils { // I think we can keep the source code though, because it can be useful to the others // // def inferAssociatedFile(clazz: Class[_]): AbstractFile = { - // // http://stackoverflow.com/questions/227486/find-where-java-class-is-loaded-from + // // https://stackoverflow.com/questions/227486/find-where-java-class-is-loaded-from // try { // var cl = clazz.getClassLoader() // if (cl == null) { diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 3d96babda838..550d4b461b7b 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -25,7 +25,7 @@ private[reflect] class Settings extends MutableSettings { trait Setting extends SettingValue { } - class BooleanSetting(x: Boolean) extends Setting { + class BooleanSetting(@deprecatedName x: Boolean) extends Setting { type T = Boolean protected var v: Boolean = x override def value: Boolean = v @@ -44,29 +44,24 @@ private[reflect] class Settings extends MutableSettings { } val async = new BooleanSetting(false) - val Xexperimental = new BooleanSetting(false) - val XfullLubs = new BooleanSetting(false) val XnoPatmatAnalysis = new BooleanSetting(false) - val strictInference = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) val Yposdebug = new BooleanSetting(false) - val Yrangepos = new BooleanSetting(false) + val Yrangepos = new BooleanSetting(true) val Yshowsymowners = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) val breakCycles = new BooleanSetting(false) val debug = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDebugAndDeoptimize() } val developer = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDeveloperAndDeoptimize() } val explaintypes = new BooleanSetting(false) - val overrideObjects = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val uniqid = new BooleanSetting(false) val verbose = new BooleanSetting(false) - val YpartialUnification = new BooleanSetting(false) - val Yvirtpatmat = new BooleanSetting(false) - val Yrecursion = new IntSetting(0) - val maxClassfileName = new IntSetting(255) - def isScala211 = true - def isScala212 = true - private[scala] def isScala213 = false + val YhotStatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v && YstatisticsEnabled.value) StatisticsStatics.enableHotStatsAndDeoptimize() } + val YstatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableColdStatsAndDeoptimize() } + + val Yrecursion = new IntSetting(0) + def isScala212 = true + def isScala213 = true } diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index cbef3d3a0b54..30082e218cee 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,7 +30,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags) override def complete(sym: Symbol) = { debugInfo("completing "+sym+"/"+clazz.fullName) - assert(sym == clazz || sym == module || sym == module.moduleClass) + assert(sym == clazz || sym == module || sym == module.moduleClass, "Must be class or module") slowButSafeEnteringPhaseNotLaterThan(picklerPhase) { val loadingMirror = mirrorThatLoaded(sym) val javaClass = loadingMirror.javaClass(clazz.javaClassName) @@ -72,10 +72,10 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => /** The type completer for packages. */ class LazyPackageType extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { - assert(sym.isPackageClass) + override def complete(sym: Symbol): Unit = { + assert(sym.isPackageClass, "Must be package") // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule` - // creates a module symbol and invokes invokes `companionModule` while the `infos` field is + // creates a module symbol and invokes `companionModule` while the `infos` field is // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks. slowButSafeEnteringPhaseNotLaterThan(picklerPhase) { sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym) @@ -107,11 +107,11 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => // to slap a global lock on materialization in runtime reflection. class PackageScope(pkgClass: Symbol) extends Scope with SynchronizedScope { - assert(pkgClass.isType) + assert(pkgClass.isType, "Must be type") // materializing multiple copies of the same symbol in PackageScope is a very popular bug // this override does its best to guard against it - override def enter[T <: Symbol](sym: T): T = { + override def enter[T <: Symbol](sym: T): sym.type = { // workaround for scala/bug#7728 if (isCompilerUniverse) super.enter(sym) else { @@ -131,7 +131,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => // package scopes need to synchronize on the GIL // because lookupEntry might cause changes to the global symbol table override def syncLockSynchronized[T](body: => T): T = gilSynchronized(body) - private val negatives = new mutable.HashSet[Name] + private[this] val negatives = new mutable.HashSet[Name] override def lookupEntry(name: Name): ScopeEntry = syncLockSynchronized { val e = super.lookupEntry(name) if (e != null) @@ -153,8 +153,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName) val clazz = origOwner.info decl name.toTypeName val module = origOwner.info decl name.toTermName - assert(clazz != NoSymbol) - assert(module != NoSymbol) + assert(clazz != NoSymbol, "Missing class symbol") + assert(module != NoSymbol, "Missing module symbol") // currentMirror.mirrorDefining(cls) might side effect by entering symbols into pkgClass.info.decls // therefore, even though in the beginning of this method, super.lookupEntry(name) returned null // entering clazz/module now will result in a double-enter assertion in PackageScope.enter @@ -184,9 +184,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => } /** Assert that packages have package scopes */ - override def validateClassInfo(tp: ClassInfoType) { - assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope]) - } + override def validateClassInfo(tp: ClassInfoType): Unit = + assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope], "Package must have package scope") override def newPackageScope(pkgClass: Symbol) = new PackageScope(pkgClass) diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index aae1a2a641c8..c97b852931df 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -22,7 +22,7 @@ package runtime private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps with Gil with ThreadLocalStorage { def info(msg: => String) = - if (settings.verbose) println("[reflect-compiler] "+msg) + if (settings.verbose.value) println("[reflect-compiler] "+msg) def debugInfo(msg: => String) = if (settings.isDebug) info(msg) @@ -35,7 +35,7 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w * * On the other hand, this also means that usage scenarios of the universe * will differ from the conventional ones. For example, we have to do additional cleanup - * in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2. + * in order to prevent memory leaks: https://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2. */ override def isCompilerUniverse = false } diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala index 3ce1330008f5..4fd122ac0110 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -59,7 +59,7 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable def syncLockSynchronized[T](body: => T): T = if (isCompilerUniverse) body else syncLock.synchronized { body } override def isEmpty: Boolean = syncLockSynchronized { super.isEmpty } override def size: Int = syncLockSynchronized { super.size } - override def enter[T <: Symbol](sym: T): T = syncLockSynchronized { super.enter(sym) } + override def enter[T <: Symbol](sym: T): sym.type = syncLockSynchronized { super.enter(sym) } override def rehash(sym: Symbol, newname: Name) = syncLockSynchronized { super.rehash(sym, newname) } override def unlink(e: ScopeEntry) = syncLockSynchronized { super.unlink(e) } override def unlink(sym: Symbol) = syncLockSynchronized { super.unlink(sym) } diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 0c6ff7bc04c1..3ae4826d596b 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -32,7 +32,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb override def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable.set(value) // Set the fields which point companions at one another. Returns the module. - override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = + override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): m.type = gilSynchronized { super.connectModuleToClass(m, moduleClass) } override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol = diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala index 6f86656878cb..70b28f6f16f0 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -30,7 +30,7 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa // we can keep this lock fine-grained, because super.unique just updates the cache // and, in particular, doesn't call any reflection APIs which makes deadlocks impossible private lazy val uniqueLock = new Object - private val uniques = mutable.WeakHashMap[Type, jWeakRef[Type]]() + private[this] val uniques = mutable.WeakHashMap[Type, jWeakRef[Type]]() override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { // we need to have weak uniques for runtime reflection // because unlike the normal compiler universe, reflective universe isn't organized in runs diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala index ca99bb48909b..6212ee594538 100644 --- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala +++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -19,12 +19,12 @@ private[reflect] trait ThreadLocalStorage { self: SymbolTable => // see a discussion at scala-internals for more information: - // http://groups.google.com/group/scala-internals/browse_thread/thread/337ce68aa5e51f79 + // https://groups.google.com/group/scala-internals/browse_thread/thread/337ce68aa5e51f79 trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit } private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] { // TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here? // (we would need a version that uses weak keys) - private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]()) + private[this] val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]()) def get: T = { if (values containsKey currentThread) values.get(currentThread) else { diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala index 11f617cb9e5d..c031317fff2b 100644 --- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala +++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,8 +26,8 @@ import java.lang.ref.WeakReference */ private[runtime] class TwoWayCache[J, S] { - private val toScalaMap = new WeakHashMap[J, WeakReference[S]] - private val toJavaMap = new WeakHashMap[S, WeakReference[J]] + private[this] val toScalaMap = new WeakHashMap[J, WeakReference[S]] + private[this] val toJavaMap = new WeakHashMap[S, WeakReference[J]] def enter(j: J, s: S) = synchronized { // debugInfo("cached: "+j+"/"+s) diff --git a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala index 9e1e013d39e7..ec2856eb0774 100644 --- a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala +++ b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,8 +26,8 @@ import java.lang.ref.WeakReference private[runtime] trait TwoWayCaches { self: SymbolTable => class TwoWayCache[J, S] { - private val toScalaMap = new WeakHashMap[J, WeakReference[S]] - private val toJavaMap = new WeakHashMap[S, WeakReference[J]] + private[this] val toScalaMap = new WeakHashMap[J, WeakReference[S]] + private[this] val toJavaMap = new WeakHashMap[S, WeakReference[J]] def enter(j: J, s: S) = gilSynchronized { // debugInfo("cached: "+j+"/"+s) diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index b4c8149d9d11..0933df1fdd5b 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -23,7 +23,7 @@ package object runtime { * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._` * * See [[scala.reflect.api.Universe]] or the - * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]] + * [[https://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]] * for more details. */ lazy val universe: api.JavaUniverse = new runtime.JavaUniverse diff --git a/src/repl-frontend/scala/tools/nsc/Interpreter.scala b/src/repl-frontend/scala/tools/nsc/Interpreter.scala new file mode 100644 index 000000000000..7cd6da4091ea --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/Interpreter.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import scala.tools.nsc.interpreter.{IMain, Repl, ReplCore} +import scala.tools.nsc.interpreter.shell.{ILoop, ReplReporterImpl, ShellConfig} +import scala.tools.nsc.reporters.Reporter + +import scala.language.implicitConversions + +// Pretty gross contortion to satisfy the de facto interface expected by sbt. +// The idea is to have sbt stage a dummy interpreter, to extract the configuration +// it's trying to create, only to then actually create our interpreter when needed. + +@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") +class Interpreter(ignored: Settings) { + private[nsc] def config = (newCompiler(null, null), parentClassLoader) + + @deprecated("Only used for passing in the classloader.", "2.13.0-M2") + protected def parentClassLoader: ClassLoader = null + + // ignore the method name -- only used to find out what old sbt versions want our compiler settings to be + @deprecated("Only used for passing in settings.", "2.13.0-M2") + protected def newCompiler(settings: Settings, reporter: Reporter) = settings +} + +@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") +class InterpreterLoop { + @deprecated("Ignored.", "2.9.0") + var in: Any = null + + @deprecated("Ignored.", "2.13.0-M2") + def settings: Settings = null + + private var interpreterSettings: Settings = _ + private var compilerSettings: Settings = _ + private var parentClassLoader: Option[ClassLoader] = _ + + @deprecated("Mainly used for passing in settings.", "2.13.0-M2") + def interpreter_= (interpreter: Interpreter) = { + val config = interpreter.config + compilerSettings = config._1 match { case null => interpreterSettings case cs => cs } + parentClassLoader = Option(config._2) + } + + @volatile private var intp: Repl = _ + + def interpreter: ReplCore = { + if (intp eq null) { + intp = new IMain(interpreterSettings, parentClassLoader, compilerSettings, new ReplReporterImpl(interpreterSettings)) + } + intp + } + + @deprecated("Only used for passing in settings.", "2.13.0-M2") + def createInterpreter(): Unit = {} + + def closeInterpreter(): Unit = + if (intp ne null) { + intp.close() + intp = null + } + + def main(interpreterSettings: Settings): Unit = { + this.interpreterSettings = interpreterSettings + + // this call goes to the overridden method in sbt, + // which may customize a subclass of Interpreter with some settings + // if it does, it'll first call the setter for interpreter, and then the getter (to call setContextClassLoader) + // in any case, it'll bind some values and interpret a preamble + createInterpreter() + + val shell = new ILoop(ShellConfig(interpreterSettings)) + shell.intp = interpreter.asInstanceOf[Repl] // we've restricted the type of `interpreter` above to denote the subset used by sbt + shell.run(interpreterSettings) + } + + // sbt uses InterpreterLoop as follows -- the private method is an easy way to ensure we don't break it + // TODO: turns this into a test + // From https://github.com/sbt/sbt-zero-thirteen/blob/0.13/compile/interface/src/main/scala/xsbt/ConsoleInterface.scala + // See also: + // - https://github.com/sbt/zinc/blob/1.0/internal/compiler-bridge/src/main/scala/xsbt/InteractiveConsoleInterface.scala + // - https://github.com/sbt/zinc/blob/1.0/internal/compiler-bridge/src/main/scala/xsbt/ConsoleInterface.scala + @deprecated("Only here to ensure we don't break the sbt interface.", "2.13.0-M2") + @annotation.unused + private def __SbtConsoleInterface(compilerSettings: Settings, interpreterSettings: Settings, bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any]): Unit = { + import scala.tools.nsc.interpreter.InteractiveReader + import scala.tools.nsc.reporters.Reporter + + // compilerSettings.bootclasspath.value = bootClasspathString + // compilerSettings.classpath.value = classpathString + + val loop = new InterpreterLoop { + override def createInterpreter() = { + if (loader eq null) super.createInterpreter() + else { + in = InteractiveReader.createDefault() + interpreter = new Interpreter(settings) { + override protected def parentClassLoader = + if (loader eq null) super.parentClassLoader else loader + + override protected def newCompiler(settings: Settings, reporter: Reporter) = + super.newCompiler(compilerSettings, reporter) + } + } + + // for 2.8 compatibility + @annotation.unused + final class Compat { + def bindValue(id: String, value: Any) = + interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value) + } + @annotation.unused + implicit def compat(a: AnyRef): Compat = new Compat + + interpreter.beQuietDuring(interpreter.bindValue(??? : String, ??? : Any)) + + interpreter.interpret(??? : String) + } + + override def closeInterpreter(): Unit = { + interpreter.interpret(??? : String) + super.closeInterpreter() + } + } + loop.main(if (loader eq null) compilerSettings else interpreterSettings) + } +} diff --git a/src/repl-frontend/scala/tools/nsc/MainGenericRunner.scala b/src/repl-frontend/scala/tools/nsc/MainGenericRunner.scala new file mode 100644 index 000000000000..9e4e24d080b1 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/MainGenericRunner.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import interpreter.shell.{ILoop, ShellConfig} + +object JarRunner extends CommonRunner { + def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Option[Throwable] = { + val jar = new io.Jar(jarPath) + val mainClass = jar.mainClass getOrElse (throw new IllegalArgumentException(s"Cannot find main class for jar: $jarPath")) + val jarURLs = util.ClassPath expandManifestPath jarPath + val urls = if (jarURLs.isEmpty) io.File(jarPath).toURL +: settings.classpathURLs else jarURLs + + if (settings.Ylogcp.value) { + Console.err.println("Running jar with these URLs as the classpath:") + urls foreach println + } + + runAndCatch(urls, mainClass, arguments) + } +} + +/** An object that runs Scala code. It has three possible + * sources for the code to run: pre-compiled code, a script file, + * or interactive entry. + */ +class MainGenericRunner { + def errorFn(str: String, e: Option[Throwable] = None, isFailure: Boolean = true): Boolean = { + if (str.nonEmpty) Console.err.println(str) + e.foreach(_.printStackTrace()) + !isFailure + } + + def process(args: Array[String]): Boolean = { + val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x)) + import command.{settings, howToRun, thingToRun, shortUsageMsg} + import MainGenericRunner.CommandFailure + + // only created for info message + def sampleCompiler = new Global(settings) + + def run(): Boolean = { + def isE = settings.execute.isSetByUser + def dashe = settings.execute.value + + // when -e expr -howtorun script, read any -i or -I files and append expr + // the result is saved to a tmp script file and run + def combinedCode = { + val files = + for { + dashi <- List(settings.loadfiles, settings.pastefiles) if dashi.isSetByUser + path <- dashi.value + } yield io.File(path).slurp() + + (files :+ dashe).mkString("\n\n") + } + + import GenericRunnerCommand.{AsObject, AsScript, AsJar, Error} + def runTarget(): Option[Throwable] = howToRun match { + case AsObject => + ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments) + case AsScript if isE => + ScriptRunner(settings).runScriptText(combinedCode, thingToRun +: command.arguments) + case AsScript => + ScriptRunner(settings).runScript(thingToRun, command.arguments) + case AsJar => + JarRunner.runJar(settings, thingToRun, command.arguments) + case Error => + Some(CommandFailure) + case _ => + // We start the repl when no arguments are given. + if (settings.Wconf.isDefault && settings.lint.isDefault) { + // If user is agnostic about -Wconf and -Xlint, enable -deprecation and -feature + settings.deprecation.value = true + settings.feature.value = true + } + val config = ShellConfig(settings) + new ILoop(config).run(settings) + None + } + + runTarget() match { + case Some(ScriptCompileError) => false + case Some(CommandFailure) => false + case e @ Some(ex) => errorFn("", e) + case _ => true + } + } + + if (!command.ok) + errorFn(f"%n$shortUsageMsg") + else if (command.shouldStopWithInfo) + errorFn(command.getInfoMessage(sampleCompiler), isFailure = false) + else + run() + } +} + +object MainGenericRunner extends MainGenericRunner { + // control indicating command ran but non-zero exit + object CommandFailure extends scala.util.control.ControlThrowable("Command failed") + + def main(args: Array[String]): Unit = if (!process(args)) System.exit(1) +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/InteractiveReader.scala new file mode 100644 index 000000000000..c350c0ceb919 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -0,0 +1,20 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter + +// A compatibility stub for sbt +@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") +object InteractiveReader { + @deprecated("Does nothing. Stub for sbt's ConsoleInterface.", "2.9.0") + def createDefault(): Any = null +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala new file mode 100644 index 000000000000..204800159094 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -0,0 +1,368 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter +package jline + +import org.jline.builtins.InputRC +import org.jline.reader.Parser.ParseContext +import org.jline.reader._ +import org.jline.reader.impl.{CompletionMatcherImpl, DefaultParser, LineReaderImpl} +import org.jline.terminal.Terminal + +import java.io.{ByteArrayInputStream, File} +import java.net.{MalformedURLException, URI, URL} +import java.util.{List => JList} +import scala.io.Source +import scala.reflect.internal.Chars +import scala.tools.nsc.interpreter.shell.{Accumulator, ShellConfig} +import scala.util.Using +import scala.util.control.NonFatal + +/** A Reader that delegates to JLine3. + */ +class Reader private ( + config: ShellConfig, + reader: LineReader, + val accumulator: Accumulator, + val completion: shell.Completion, + terminal: Terminal) extends shell.InteractiveReader { + override val history: shell.History = new HistoryAdaptor(reader.getHistory) + override def interactive: Boolean = true + protected def readOneLine(prompt: String): String = { + try { + reader.readLine(prompt) + } catch { + case _: EndOfFileException | _: UserInterruptException => reader.getBuffer.delete() ; null + } + } + def redrawLine(): Unit = () //see https://github.com/scala/bug/issues/12395, SimpleReader#redrawLine also use `()` + def reset(): Unit = accumulator.reset() + override def close(): Unit = terminal.close() + + override def withSecondaryPrompt[T](prompt: String)(body: => T): T = { + val oldPrompt = reader.getVariable(LineReader.SECONDARY_PROMPT_PATTERN) + reader.setVariable(LineReader.SECONDARY_PROMPT_PATTERN, prompt) + try body + finally reader.setVariable(LineReader.SECONDARY_PROMPT_PATTERN, oldPrompt) + } +} + +object Reader { + import org.jline.reader.LineReaderBuilder + import org.jline.reader.impl.history.DefaultHistory + import org.jline.terminal.TerminalBuilder + + /** Construct a Reader with various JLine3-specific set-up. + * The `shell.Completion` is wrapped in the `jline.Completion` bridge to enable completion from JLine3. + */ + def apply( + config: ShellConfig, + repl: Repl, + completion: shell.Completion, + accumulator: Accumulator): Reader = { + require(repl != null) + if (config.isReplDebug) initLogging(trace = config.isReplTrace) + + System.setProperty(LineReader.PROP_SUPPORT_PARSEDLINE, java.lang.Boolean.TRUE.toString()) + + def inputrcFileUrl(): Option[URL] = { + sys.props + .get("jline.inputrc") + .flatMap { path => + try Some(new URI(path).toURL) + catch { + case _: MalformedURLException => + Some(new File(path).toURI.toURL) + } + }.orElse { + sys.props.get("user.home").map { home => + val f = new File(home).toPath.resolve(".inputrc").toFile + (if (f.isFile) f else new File("/etc/inputrc")).toURI.toURL + } + } + } + + def urlByteArray(url: URL): Array[Byte] = { + Using.resource(Source.fromURL(url).bufferedReader()) { + bufferedReader => + LazyList.continually(bufferedReader.read).takeWhile(_ != -1).map(_.toByte).toArray + } + } + + lazy val inputrcFileContents: Option[Array[Byte]] = inputrcFileUrl().map(in => urlByteArray(in)) + val jlineTerminal = TerminalBuilder.builder().build() + val completer = new Completion(completion) + val parser = new ReplParser(repl) + val history = new DefaultHistory + + val builder = + LineReaderBuilder.builder() + .appName("scala") + .completer(completer) + .history(history) + .parser(parser) + .terminal(jlineTerminal) + + locally { + import LineReader._, Option._ + builder + .option(AUTO_GROUP, false) + .option(LIST_PACKED, true) // TODO + .option(INSERT_TAB, true) // At the beginning of the line, insert tab instead of completing + .variable(HISTORY_FILE, config.historyFile) // Save history to file + .variable(SECONDARY_PROMPT_PATTERN, config.encolor(config.continueText)) // Continue prompt + .variable(WORDCHARS, LineReaderImpl.DEFAULT_WORDCHARS.filterNot("*?.[]~=/&;!#%^(){}<>".toSet)) + .option(Option.DISABLE_EVENT_EXPANSION, true) // Otherwise `scala> println(raw"\n".toList)` gives `List(n)` !! + .option(Option.COMPLETE_MATCHER_CAMELCASE, true) + .option(Option.COMPLETE_MATCHER_TYPO, true) + } + object customCompletionMatcher extends CompletionMatcherImpl { + override def compile(options: java.util.Map[LineReader.Option, java.lang.Boolean], prefix: Boolean, line: CompletingParsedLine, caseInsensitive: Boolean, errors: Int, originalGroupName: String): Unit = { + val errorsReduced = line.wordCursor() match { + case 0 | 1 | 2 | 3 => 0 // disable JLine's levenshtein-distance based typo matcher for short strings + case 4 | 5 => math.max(errors, 1) + case _ => errors + } + super.compile(options, prefix, line, caseInsensitive, errorsReduced, originalGroupName) + } + + override def matches(candidates: JList[Candidate]): JList[Candidate] = { + val matching = super.matches(candidates) + matching + } + } + + builder.completionMatcher(customCompletionMatcher) + + val reader = builder.build() + try inputrcFileContents.foreach(f => InputRC.configure(reader, new ByteArrayInputStream(f))) catch { + case NonFatal(_) => + } //ignore + + object ScalaShowType { + val Name = "scala-show-type" + private var lastInvokeLocation: Option[(String, Int)] = None + def apply(): Boolean = { + val nextInvokeLocation = Some((reader.getBuffer.toString, reader.getBuffer.cursor())) + val cursor = reader.getBuffer.cursor() + val text = reader.getBuffer.toString + val result = completer.complete(text, cursor, filter = true) + if (lastInvokeLocation == nextInvokeLocation) { + show(Naming.unmangle(result.typedTree)) + lastInvokeLocation = None + } else { + show(result.typeAtCursor) + lastInvokeLocation = nextInvokeLocation + } + true + } + def show(text: String): Unit = if (text != "") { + reader.callWidget(LineReader.CLEAR) + reader.getTerminal.writer.println() + reader.getTerminal.writer.println(text) + reader.callWidget(LineReader.REDRAW_LINE) + reader.callWidget(LineReader.REDISPLAY) + reader.getTerminal.flush() + } + } + reader.getWidgets().put(ScalaShowType.Name, () => ScalaShowType()) + + def secure(p: java.nio.file.Path): Unit = { + try scala.reflect.internal.util.OwnerOnlyChmod.chmodFileOrCreateEmpty(p) + catch { case scala.util.control.NonFatal(e) => + if (config.isReplDebug) e.printStackTrace() + config.replinfo(s"Warning: history file ${p}'s permissions could not be restricted to owner-only.") + } + } + def backupHistory(): Unit = { + import java.nio.file.{Files, Paths, StandardCopyOption} + import StandardCopyOption.REPLACE_EXISTING + val hf = Paths.get(config.historyFile) + val bk = Paths.get(config.historyFile + ".bk") + Files.move(/*source =*/ hf, /*target =*/ bk, REPLACE_EXISTING) + secure(bk) + } + // always try to restrict permissions on history file, + // creating an empty file if none exists. + secure(java.nio.file.Paths.get(config.historyFile)) + try history.attach(reader) + catch { + case e: IllegalArgumentException if e.getMessage.contains("Bad history file syntax") => + backupHistory() + history.attach(reader) + case _: NumberFormatException => + backupHistory() + history.attach(reader) + } + new Reader(config, reader, accumulator, completer, jlineTerminal) + } + + class ReplParser(repl: Repl) extends Parser { + val scalaParser = new ScalaParser(repl) + val commandParser = new CommandParser(repl) + def parse(line: String, cursor: Int, context: ParseContext): ParsedLine = + if (line.startsWith(":")) commandParser.parse(line, cursor, context) + else scalaParser.parse(line, cursor, context) + } + class ScalaParser(repl: Repl) extends Parser { + import Results._ + + def parse(line: String, cursor: Int, context: ParseContext): ParsedLine = { + import ParseContext._ + context match { + case ACCEPT_LINE => + repl.parseString(line) match { + case Incomplete if line.endsWith("\n\n") => throw new SyntaxError(0, 0, "incomplete") // incomplete but we're bailing now + case Incomplete => throw new EOFError(0, 0, "incomplete") // incomplete so keep reading input + case Success | Error => tokenize(line, cursor) // Try a real "final" parse. (dnw: even for Error??) + } + case COMPLETE => tokenize(line, cursor) // Parse to find completions (typically after a Tab). + case SECONDARY_PROMPT => + tokenize(line, cursor) // Called when we need to update the secondary prompts. + case SPLIT_LINE | UNSPECIFIED => + ScalaParsedLine(line, cursor, 0, 0, Nil) + } + } + private def tokenize(line: String, cursor: Int): ScalaParsedLine = { + val tokens = repl.reporter.suppressOutput { + repl.tokenize(line) + } + repl.reporter.reset() + if (tokens.isEmpty) ScalaParsedLine(line, cursor, 0, 0, Nil) + else { + val current = tokens.find(t => t.start <= cursor && cursor <= t.end) + val (wordCursor, wordIndex) = current match { + case Some(t) if t.isIdentifier => + (cursor - t.start, tokens.indexOf(t)) + case Some(t) => + val isIdentifierStartKeyword = (t.start until t.end).forall(i => Chars.isIdentifierPart(line.charAt(i))) + if (isIdentifierStartKeyword) + (cursor - t.start, tokens.indexOf(t)) + else + (0, -1) + case _ => + (0, -1) + } + ScalaParsedLine(line, cursor, wordCursor, wordIndex, tokens) + } + } + } + class CommandParser(repl: Repl) extends Parser { + val defaultParser = new DefaultParser() + def parse(line: String, cursor: Int, context: ParseContext): ParsedLine = + defaultParser.parse(line, cursor, context) + } + + /** + * Lines of Scala are opaque to JLine. + * + * @param line the line + */ + case class ScalaParsedLine(line: String, cursor: Int, wordCursor: Int, wordIndex: Int, tokens: List[TokenData]) extends CompletingParsedLine { + require(wordIndex <= tokens.size, + s"wordIndex $wordIndex out of range ${tokens.size}") + require(wordIndex == -1 || wordCursor == 0 || wordCursor <= tokens(wordIndex).end - tokens(wordIndex).start, + s"wordCursor $wordCursor should be in range ${tokens(wordIndex)}") + // Members declared in org.jline.reader.CompletingParsedLine. + // This is where backticks could be added, for example. + def escape(candidate: CharSequence, complete: Boolean): CharSequence = candidate + def rawWordCursor: Int = wordCursor + def rawWordLength: Int = word.length + def word: String = + if (wordIndex == -1 || wordIndex == tokens.size) + "" + else { + val t = tokens(wordIndex) + line.substring(t.start, t.end) + } + def words: JList[String] = { + import scala.jdk.CollectionConverters._ + tokens.map(t => line.substring(t.start, t.end)).asJava + } + } + + private def initLogging(trace: Boolean): Unit = { + import java.util.logging._ + val logger = Logger.getLogger("org.jline") + val handler = new ConsoleHandler() + val level = if (trace) Level.FINEST else Level.FINE + logger.setLevel(level) + handler.setLevel(level) + logger.addHandler(handler) + } +} + +/** A Completion bridge to JLine3. + * It delegates both interfaces to an underlying `Completion`. + */ +class Completion(delegate: shell.Completion) extends shell.Completion with Completer { + require(delegate != null) + // REPL Completion + def complete(buffer: String, cursor: Int, filter: Boolean): shell.CompletionResult = delegate.complete(buffer, cursor, filter) + + // JLine Completer + def complete(lineReader: LineReader, parsedLine: ParsedLine, newCandidates: JList[Candidate]): Unit = { + def candidateForResult(cc: CompletionCandidate, deprecated: Boolean, universal: Boolean): Candidate = { + val value = cc.name + val displayed = cc.name + (cc.arity match { + case CompletionCandidate.Nullary => "" + case CompletionCandidate.Nilary => "()" + case _ => "(" + }) + val group = null // results may be grouped + val descr = // displayed alongside + if (deprecated) "deprecated" + else if (universal) "universal" + else null + val suffix = null // such as slash after directory name + val key = null // same key implies mergeable result + val complete = false // more to complete? + new Candidate(value, displayed, group, descr, suffix, key, complete) + } + val result = complete(parsedLine.line, parsedLine.cursor, filter = false) + for (group <- result.candidates.groupBy(_.name)) { + // scala/bug#12238 + // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. + // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. + val allDeprecated = group._2.forall(_.isDeprecated) + val allUniversal = group._2.forall(_.isUniversal) + group._2.foreach(cc => newCandidates.add(candidateForResult(cc, allDeprecated, allUniversal))) + } + + val parsedLineWord = parsedLine.word() + result.candidates.filter(c => c.name == parsedLineWord || c.alias.fold(false)(a => a == parsedLineWord)) match { + case Nil => + case exacts => + val declStrings = exacts.map(_.declString()).filterNot(_ == "") + if (declStrings.nonEmpty) { + lineReader.callWidget(LineReader.CLEAR) + lineReader.getTerminal.writer.println() + for (declString <- declStrings) + lineReader.getTerminal.writer.println(declString) + lineReader.callWidget(LineReader.REDRAW_LINE) + lineReader.callWidget(LineReader.REDISPLAY) + lineReader.getTerminal.flush() + } + } + } +} + +// TODO +class HistoryAdaptor(history: History) extends shell.History { + //def historicize(text: String): Boolean = false + + def asStrings: List[String] = Nil + //def asStrings(from: Int, to: Int): List[String] = asStrings.slice(from, to) + def index: Int = 0 + def size: Int = 0 +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala new file mode 100644 index 000000000000..628d3a3fb8cc --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala @@ -0,0 +1,36 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter +package shell + +trait Completion { + final def complete(buffer: String, cursor: Int): CompletionResult = complete(buffer, cursor, filter = true) + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult +} +object NoCompletion extends Completion { + def complete(buffer: String, cursor: Int, filter: Boolean) = NoCompletions +} + +case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate], typeAtCursor: String = "", typedTree: String = "") { + final def orElse(other: => CompletionResult): CompletionResult = + if (candidates.nonEmpty) this else other +} +object CompletionResult { + val empty: CompletionResult = NoCompletions +} +object NoCompletions extends CompletionResult("", -1, Nil, "", "") + +case class MultiCompletion(underlying: Completion*) extends Completion { + override def complete(buffer: String, cursor: Int, filter: Boolean) = + underlying.foldLeft(CompletionResult.empty)((r,c) => r.orElse(c.complete(buffer, cursor, filter))) +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/History.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/History.scala new file mode 100644 index 000000000000..c7e45b98be24 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/History.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +/** Support for adding to history and retrieving it. + */ +trait History { + def historicize(text: String): Boolean = false + + def asStrings: List[String] + def asStrings(from: Int, to: Int): List[String] = asStrings.slice(from, to) + def index: Int + def size: Int +} +object NoHistory extends History { + def asStrings = Nil + def index = 0 + def size = 0 +} diff --git a/src/repl/scala/tools/nsc/interpreter/IBindings.java b/src/repl-frontend/scala/tools/nsc/interpreter/shell/IBindings.java similarity index 94% rename from src/repl/scala/tools/nsc/interpreter/IBindings.java rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/IBindings.java index abe0267375c8..a97a968b955d 100644 --- a/src/repl/scala/tools/nsc/interpreter/IBindings.java +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/IBindings.java @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,7 +10,7 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc.interpreter; +package scala.tools.nsc.interpreter.shell; import java.util.Map; import java.util.AbstractMap; diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala new file mode 100644 index 000000000000..aff002e9f187 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala @@ -0,0 +1,1093 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter +package shell + +import java.io.{BufferedReader, PrintWriter} +import java.nio.file.{Files, Path => JPath} +import java.util.concurrent.TimeUnit + +import scala.PartialFunction.cond +import scala.Predef.{println => _, _} +import scala.annotation.tailrec +import scala.jdk.CollectionConverters._ +import scala.language.implicitConversions +import scala.reflect.classTag +import scala.reflect.internal.util.{BatchSourceFile, NoPosition} +import scala.reflect.io.{AbstractFile, Directory, File, Path} +import scala.sys.process.Parser.tokenize +import scala.tools.asm.ClassReader +import scala.tools.nsc.Settings +import scala.tools.nsc.util.{stackTraceString, stringFromStream} +import scala.tools.nsc.interpreter.{AbstractOrMissingHandler, Repl, IMain, Phased, jline} +import scala.tools.nsc.interpreter.Results.{Error, Incomplete, Success} +import scala.tools.nsc.interpreter.StdReplTags._ +import scala.util.chaining._ + +/** The Scala interactive shell. This part provides the user interface, + * with evaluation and auto-complete handled by IMain. + * + * There should be no direct dependency of this code on the compiler; + * it should all go through the `intp` reference to the interpreter, + * or maybe eventually even over the wire to a remote compiler. + */ +class ILoop(config: ShellConfig, inOverride: BufferedReader = null, + protected val out: PrintWriter = new PrintWriter(Console.out, true)) extends LoopCommands { + import config._ + + // If set before calling run(), the provided interpreter will be used + // (until a destructive reset command is issued -- TODO: delegate resetting to the repl) + // Set by createInterpreter, closeInterpreter (and CompletionTest) + var intp: Repl = _ + + def Repl(config: ShellConfig, interpreterSettings: Settings, out: PrintWriter) = + new IMain(interpreterSettings, None, interpreterSettings, new ReplReporterImpl(config, interpreterSettings, out)) + + def global = intp.asInstanceOf[IMain].global + + // Set by run and interpretAllFrom (to read input from file). + private var in: InteractiveReader = _ + + // TODO: the new interface should make settings a ctor arg of ILoop, + // so that this can be a lazy val + private lazy val defaultIn: InteractiveReader = + if (batchMode) SimpleReader(batchText) + else if (inOverride != null) SimpleReader(inOverride, out, completion(new Accumulator), interactive = true) + else if (haveInteractiveConsole) { + val accumulator = new Accumulator + jline.Reader(config, intp, completion(accumulator), accumulator) + } + else SimpleReader() + + private val interpreterInitialized = new java.util.concurrent.CountDownLatch(1) + + def createTempDirectory(): JPath = Files.createTempDirectory("scala-repl").tap(_.toFile().deleteOnExit()) + + // TODO: move echo and friends to ReplReporterImpl + // When you know you are most likely breaking into the middle + // of a line being typed. This softens the blow. + protected def echoAndRefresh(msg: String) = { + echo("\n" + msg) + in.redrawLine() + } + protected var mum = false + protected def echo(msg: String) = if (!mum || isReplDebug) { + out println msg + out.flush() + } + // turn off our echo + def echoOff[A](op: => A): A = { + val saved = mum + mum = true + try op finally mum = saved + } + + private def printShellInterrupt() = out.print(ShellConfig.InterruptedString) + + protected def asyncMessage(msg: String): Unit = { + if (isReplInfo || isReplPower) + echoAndRefresh(msg) + } + + override def echoCommandMessage(msg: String): Unit = { + intp.reporter.withoutTruncating { intp.reporter.printMessage(msg) } + } + + import scala.tools.nsc.interpreter.ReplStrings.{words, string2codeQuoted} + + def welcome = enversion(welcomeString) + + /** Print a welcome message! */ + def printWelcome(): Unit = { + replinfo(s"[info] started at ${new java.util.Date}") + if (!welcome.isEmpty) echo(welcome) + } + + def history = in.history + + /** A reverse list of commands to replay if the user requests a :replay */ + var replayCommandStack: List[String] = Nil + + /** A list of commands to replay if the user requests a :replay */ + def replayCommands = replayCommandStack.reverse + + /** Record a command for replay should the user request a :replay */ + def addReplay(cmd: String) = replayCommandStack ::= cmd + + def savingReplayStack[T](body: => T): T = { + val saved = replayCommandStack + try body + finally replayCommandStack = saved + } + + + /** Close the interpreter and set the var to null. + * + * Used by sbt. + */ + def closeInterpreter(): Unit = { + if (intp ne null) { + intp.close() + intp = null + } + if (in ne null) { + in.close() + in = null + } + } + + + /** Create a new interpreter. + * + * Used by sbt. + */ + def createInterpreter(interpreterSettings: Settings): Unit = { + intp = Repl(config, interpreterSettings, out) + } + + /** Show the history */ + lazy val historyCommand: LoopCommand = new LoopCommand("history", "show the history (optional num is commands to show)", None) { + override def usage = "[num]" + def defaultLines = 20 + + def apply(line: String): Result = { + if (history eq NoHistory) + return "No history available." + + val xs = words(line) + val current = history.index + val count = try xs.head.toInt catch { case _: Exception => defaultLines } + val lines = history.asStrings takeRight count + val offset = current - lines.size + 1 + + for ((line, index) <- lines.zipWithIndex) + echo("%3d %s".format(index + offset, line)) + } + } + + + /** Search the history */ + def searchHistory(_cmdline: String): Unit = { + val cmdline = _cmdline.toLowerCase + val offset = history.index - history.size + 1 + + for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline) + echo("%d %s".format(index + offset, line)) + } + + import LoopCommand.{ cmd, nullary, cmdWithHelp } + + /** Standard commands **/ + lazy val standardCommands = List( + cmd("help", "[command]", "print this summary or command-specific help", helpCommand), + cmd("completions", "", "output completions for the given string", completionsCommand), + // TODO maybe just drop these commands, as jline subsumes them -- before reenabling, finish scala.tools.nsc.interpreter.jline.HistoryAdaptor + //cmd("edit", "|", "edit history", editCommand), + //historyCommand, + //cmd("h?", "", "search the history", searchHistory), + cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand), + cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand), + cmd("javap", "", "disassemble a file or class name", javapCommand), + cmd("line", "|", "place line(s) at the end of history", lineCommand), + cmd("load", "", "interpret lines in a file", loadCommand, fileCompletion), + cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand, fileCompletion), + nullary("power", "enable power user mode", () => powerCmd()), + nullary("quit", "exit the REPL", () => Result(keepRunning = false, None)), + cmd("replay", "[options]", "reset the REPL and replay all previous commands", replayCommand, settingsCompletion), + cmd("require", "", "add a jar to the classpath", require), + cmd("reset", "[options]", "reset the REPL to its initial state, forgetting all session entries", resetCommand, settingsCompletion), + cmd("save", "", "save replayable session to a file", saveCommand, fileCompletion), + shCommand, + cmd("settings", "", "update compiler options, if possible; see reset", changeSettings, settingsCompletion), + nullary("silent", "disable/enable automatic printing of results", () => verbosity()), + cmd("type", "[-v] ", "display the type of an expression without evaluating it", typeCommand), + cmdWithHelp("kind", kindUsage, "display the kind of a type. see also :help kind", Some(kindCommandDetailedHelp), kindCommand), + nullary("warnings", "show the suppressed warnings from the most recent line which had any", () => warningsCommand()) + ) + + /** Power user commands */ + lazy val powerCommands: List[LoopCommand] = List( + cmd("phase", "", "set the implicit phase for power commands", phaseCommand) + ) + + // complete filename + val fileCompletion: Completion = new Completion { + val emptyWord = """(\s+)$""".r.unanchored + val directorily = """(\S*/)$""".r.unanchored + val trailingWord = """(\S+)$""".r.unanchored + def listed(buffer: String, i: Int, dir: Option[Path]) = + dir.filter(_.isDirectory) + .map(d => CompletionResult(buffer, i, d.toDirectory.list.map(x => CompletionCandidate(x.name)).toList)) + .getOrElse(NoCompletions) + def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = + buffer.substring(0, cursor) match { + case emptyWord(s) => listed(buffer, cursor, Directory.Current) + case directorily(s) => listed(buffer, cursor, Option(Path(s))) + case trailingWord(s) => + val f = File(s) + val (i, maybes) = + if (f.isFile) (cursor - s.length, List(f.toAbsolute.path)) + else if (f.isDirectory) (cursor - s.length, List(s"${f.toAbsolute.path}/")) + else if (f.parent.exists) (cursor - f.name.length, listedIn(f.parent.toDirectory, f.name)) + else (-1, Nil) + if (maybes.isEmpty) NoCompletions else CompletionResult(buffer, i, maybes.map(CompletionCandidate(_))) + case _ => NoCompletions + } + } + + // complete settings name + val settingsCompletion: Completion = new Completion { + val trailingWord = """(\S+)$""".r.unanchored + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { + buffer.substring(0, cursor) match { + case trailingWord(s) => + val maybes = intp.visibleSettings.filter(x => if (filter) x.name.startsWith(s) else true).map(_.name) + .filterNot(cond(_) { case "-"|"-X"|"-Y" => true }).sorted + if (maybes.isEmpty) NoCompletions + else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_)), "", "") + case _ => NoCompletions + } + } + } + + + private def importsCommand(line: String): Result = + intp.importsCommandInternal(words(line)) mkString ("\n") + + private def implicitsCommand(line: String): Result = { + val (implicits, res) = intp.implicitsCommandInternal(line) + implicits foreach echoCommandMessage + res + } + + // Still todo: modules. + private def typeCommand(line0: String): Result = { + line0.trim match { + case "" => ":type [-v] . see also :help kind" + case s => + val verbose = s startsWith "-v " + val (sig, verboseSig) = intp.typeCommandInternal(s.stripPrefix("-v ").trim, verbose) + if (verbose) echoCommandMessage("// Type signature") + echoCommandMessage(sig) + if (!verboseSig.isEmpty) echoCommandMessage("\n// Internal Type structure\n"+ verboseSig) + () + } + } + + private lazy val kindUsage: String = "[-v] " + + private lazy val kindCommandDetailedHelp: String = + s""":kind $kindUsage + |Displays the kind of a given type. + | + | -v Displays verbose info. + | + |"Kind" is a word used to classify types and type constructors + |according to their level of abstractness. + | + |Concrete, fully specified types such as `Int` and `Option[Int]` + |are called "proper types" and denoted as `A` using Scala + |notation, or with the `*` symbol. + | + | scala> :kind Option[Int] + | Option[Int]'s kind is A + | + |In the above, `Option` is an example of a first-order type + |constructor, which is denoted as `F[A]` using Scala notation, or + |* -> * using the star notation. `:kind` also includes variance + |information in its output, so if we ask for the kind of `Option`, + |we actually see `F[+A]`: + | + | scala> :k -v Option + | Option's kind is F[+A] + | * -(+)-> * + | This is a type constructor: a 1st-order-kinded type. + | + |When you have more complicated types, `:kind` can be used to find + |out what you need to pass in. + | + | scala> trait ~>[-F1[_], +F2[_]] {} + | scala> :kind ~> + | ~>'s kind is X[-F1[A1],+F2[A2]] + | + |This shows that `~>` accepts something of `F[A]` kind, such as + |`List` or `Vector`. It's an example of a type constructor that + |abstracts over type constructors, also known as a higher-order + |type constructor or a higher-kinded type. + |""".stripMargin + + private def kindCommand(expr: String): Result = { + expr.trim match { + case "" => s":kind $kindUsage" + case s => intp.kindCommandInternal(s.stripPrefix("-v ").trim, verbose = s.startsWith("-v ")) + } + } + + private def warningsCommand(): Result = { + if (intp.lastWarnings.isEmpty) + "Can't find any cached warnings." + else + intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) } + } + + private def javapCommand(line: String): Result = { + def handle(results: List[Javap.JpResult]): Result = + results match { + case Nil => () + case res :: rest => + if (res.isError) res.value.toString + else { + res.show() + handle(rest) + } + } + handle(Javap(intp)(words(line): _*)) + } + + private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent" + + private def phaseCommand(name: String): Result = { + val phased: Phased = intp.power.phased + import phased.NoPhaseName + + if (name == "clear") { + phased.set(NoPhaseName) + intp.clearExecutionWrapper() + "Cleared active phase." + } + else if (name == "") phased.get match { + case NoPhaseName => "Usage: :phase (e.g. typer, erasure.next, erasure+3)" + case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get) + } + else { + val what = phased.parse(name) + if (what.isEmpty || !phased.set(what)) + "'" + name + "' does not appear to represent a valid phase." + else { + intp.setExecutionWrapper(pathToPhaseWrapper) + val activeMessage = + if (what.toString.length == name.length) "" + what + else "%s (%s)".format(what, name) + + "Active phase is now: " + activeMessage + } + } + } + + /** Available commands */ + def commands: List[LoopCommand] = standardCommands ++ ( + if (isReplPower) powerCommands else Nil + ) + + val replayQuestionMessage = + """|That entry seems to have slain the compiler. Shall I replay + |your session? I can re-run each line except the last one. + |[y/n] + """.trim.stripMargin + + private val crashRecovery: PartialFunction[Throwable, Boolean] = { + case ex: Throwable => + val (err, explain) = ( + if (intp.initializeComplete) + (stackTraceString(ex), "") + else + (ex.getMessage, "The compiler did not initialize.\n") + ) + echo(err) + + ex match { + case _: NoSuchMethodError | _: NoClassDefFoundError => + echo("\nUnrecoverable error.") + throw ex + case _ => + def fn(): Boolean = + try in.readYesOrNo(explain + replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() }) + catch { case _: RuntimeException => false } + + if (fn()) replay() + else echo("\nAbandoning crashed session.") + } + true + } + + // after process line, OK continue, ERR break, or EOF all done + object LineResults extends Enumeration { + type LineResult = Value + val EOF, ERR, OK = Value + } + import LineResults.LineResult + + // Notice failure to create compiler + def command(line: String): Result = + if (line startsWith ":") colonCommand(line) + else if (!intp.initializeCompiler()) Result(keepRunning = false, None) + else Result(keepRunning = true, interpretStartingWith(line)) + + // return false if repl should exit + def processLine(line: String): Boolean = { + // Long timeout here to avoid test failures under heavy load. + interpreterInitialized.await(10, TimeUnit.MINUTES) + + val res = command(line) + res.lineToRecord.foreach(addReplay) + res.keepRunning + } + + lazy val prompt = encolor(promptText) + + // R as in REPL + def readOneLine(): String = { + out.flush() + in.reset() + in.readLine(prompt) + } + + // L as in REPL + @tailrec final def loop(): LineResult = + readOneLine() match { + case null => LineResults.EOF + case s if (try processLine(s) catch crashRecovery) => loop() + case _ => LineResults.ERR + } + + /** interpret all lines from a specified file */ + def interpretAllFrom(file: File, verbose: Boolean = false): Unit = { + // Saving `in` is not factored out because we don't want to encourage doing this everywhere (the new design shouldn't rely on mutation) + val savedIn = in + try + savingReplayStack { + // `applyReader` will `close()` `fileReader` before returning, + // so, keep `in` pointing at `fileReader` until that's done. + file applyReader { fileReader => + echo(s"Loading $file...") + in = SimpleReader(fileReader, out, interactive = verbose, verbose = verbose) + loop() + } + } + finally in = savedIn + } + + private def changeSettings(line: String): Result = { + val intp = this.intp + def showSettings() = for (s <- { intp.userSetSettings }.toSeq.sorted(Ordering.ordered[intp.Setting])) echo(s.toString) + if (line.isEmpty) showSettings() + else { intp.updateSettings(words(line)) ; () } + } + + /** create a new interpreter and replay the given commands */ + def replayCommand(line: String): Unit = { + def run(destructive: Boolean): Unit = { + if (destructive) createInterpreter(intp.settings) else reset() + replay() + } + if (line.isEmpty) run(destructive = false) + else if (intp.updateSettings(words(line))) run(destructive = true) + } + /** Announces as it replays. */ + def replay(): Unit = { + if (replayCommandStack.isEmpty) + echo("Nothing to replay.") + else { + val reprompt = "replay> " + intp.reporter.indenting(reprompt.length) { + for (cmd <- replayCommands) { + echo(s"$reprompt$cmd") + command(cmd) + echo("") // flush because maybe cmd will have its own output + } + } + } + } + /** `reset` the interpreter in an attempt to start fresh. + * Supplying settings creates a new compiler. + */ + def resetCommand(line: String): Unit = { + def run(destructive: Boolean): Unit = { + echo("Resetting REPL state.") + if (replayCommandStack.nonEmpty) { + echo("Forgetting this session history:\n") + replayCommands foreach echo + echo("") + replayCommandStack = Nil + } + if (intp.namedDefinedTerms.nonEmpty) + echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) + if (intp.definedTypes.nonEmpty) + echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) + if (destructive) createInterpreter(intp.settings) else reset() + } + if (line.isEmpty) run(destructive = false) + else if (intp.updateSettings(words(line))) run(destructive = true) + } + /** Resets without announcements. */ + def reset(): Unit = { + intp.reset() + unleashAndSetPhase() + } + + def lineCommand(what: String): Result = editCommand(what, None) + + def completion(accumulator: Accumulator = new Accumulator) = { + val rc = new ReplCompletion(intp, accumulator) + MultiCompletion(shellCompletion, rc) + } + val shellCompletion = new Completion { + override def complete(buffer: String, cursor: Int, filter: Boolean) = + if (buffer.startsWith(":")) colonCompletion(buffer, cursor).complete(buffer, cursor, filter) + else NoCompletions + } + + // this may be used by editors that embed the REPL (e.g. emacs) to present completions themselves; + // it's also used by ReplTest + def completionsCommand(what: String): Result = { + val completions = in.completion.complete(what, what.length) + val candidates = completions.candidates.filterNot(_.isUniversal) + // condition here is a bit weird because of the weird hack we have where + // the first candidate having an empty defString means it's not really + // completion, but showing the method signature instead + if (candidates.headOption.exists(_.name.nonEmpty)) { + val prefix = + if (completions == NoCompletions) "" + else what.substring(0, completions.cursor) + // hvesalai (emacs sbt-mode maintainer) says it's important to echo only once and not per-line + echo( + candidates.map(c => s"[completions] $prefix${c.name}") + .mkString("\n") + ) + } + Result.default // never record completions + } + + // :edit id or :edit line + def editCommand(what: String): Result = editCommand(what, ShellConfig.EDITOR) + + def editCommand(what: String, editor: Option[String]): Result = { + def diagnose(code: String): Unit = paste.incomplete("The edited code is incomplete!\n", "", code) + + def edit(text: String): Result = editor match { + case Some(ed) => + val tmp = File.makeTemp() + tmp.writeAll(text) + try { + val pr = new ProcessResult(s"$ed ${tmp.path}") + pr.exitCode match { + case 0 => + tmp.safeSlurp() match { + case Some(edited) if edited.trim.isEmpty => echo("Edited text is empty.") + case Some(edited) => + echo(edited.linesIterator map ("+" + _) mkString "\n") + val res = intp interpret edited + if (res == Incomplete) diagnose(edited) + else { + history.historicize(edited) + Result(lineToRecord = Some(edited), keepRunning = true) + } + case None => echo("Can't read edited text. Did you delete it?") + } + case x => echo(s"Error exit from $ed ($x), ignoring") + } + } finally { + tmp.delete() + } + case None => + if (history.historicize(text)) echo("Placing text in recent history.") + else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text") + } + + // if what is a number, use it as a line number or range in history + def isNum = what forall (c => c.isDigit || c == '-' || c == '+') + // except that "-" means last value + def isLast = (what == "-") + if (isLast || !isNum) { + intp.requestDefining(if (isLast) intp.mostRecentVar else what) match { + case Some(req) => edit(req.line) + case None => echo(s"No symbol in scope: $what") + } + } else try { + val s = what + // line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur) + val (start, len) = + if ((s indexOf '+') > 0) { + val (a,b) = s splitAt (s indexOf '+') + (a.toInt, b.drop(1).toInt) + } else { + (s indexOf '-') match { + case -1 => (s.toInt, 1) + case 0 => val n = s.drop(1).toInt ; (history.index - n, n) + case _ if s.last == '-' => val n = s.init.toInt ; (n, history.index - n) + case i => val n = s.take(i).toInt ; (n, s.drop(i+1).toInt - n) + } + } + val index = (start - 1) max 0 + val text = history.asStrings(index, index + len) mkString "\n" + edit(text) + } catch { + case _: NumberFormatException => echo(s"Bad range '$what'") + echo("Use line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)") + } + } + + /** fork a shell and run a command */ + lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])", None) { + override def usage = "" + def apply(line: String): Result = line match { + case "" => showUsage() + case _ => + val toRun = s"new ${classOf[ProcessResult].getName}(${string2codeQuoted(line)})" + intp interpret toRun + () + } + } + + def withFile[A](filename: String)(action: File => A): Option[A] = intp.withLabel(filename) { + Some(File(filename)).filter(_.exists).map(action).tap(res => + if (res.isEmpty) intp.reporter.warning(NoPosition, s"File `$filename` does not exist.") + ) + } + + def loadCommand(arg: String): Result = { + def run(file: String, args: List[String], verbose: Boolean) = withFile(file) { f => + intp.interpret(s"val args: Array[String] = ${ args.map("\"" + _ + "\"").mkString("Array(", ",", ")") }") + interpretAllFrom(f, verbose) + Result recording s":load $arg" + } getOrElse Result.default + + tokenize(arg) match { + case "-v" :: file :: rest => run(file, rest, verbose = true) + case file :: rest => run(file, rest, verbose = false) + case _ => echo("usage: :load -v file") ; Result.default + } + } + + def saveCommand(filename: String): Result = ( + if (filename.isEmpty) echo("File name is required.") + else if (replayCommandStack.isEmpty) echo("No replay commands in session") + else File(filename).printlnAll(replayCommands: _*) + ) + + /** Adds jar file to the current classpath. Jar will only be added if it + * does not contain classes that already exist on the current classpath. + * + * Importantly, `require` adds jars to the classpath ''without'' resetting + * the state of the interpreter. This is in contrast to `replay` which can + * be used to add jars to the classpath and which creates a new instance of + * the interpreter and replays all interpreter expressions. + */ + def require(arg: String): Unit = { + val f = File(arg).normalize + + val jarFile = AbstractFile.getDirectory(new java.io.File(arg)) + if (jarFile == null) { + echo(s"Cannot load '$arg'") + return + } + + def flatten(f: AbstractFile): Iterator[AbstractFile] = + if (f.isClassContainer) f.iterator.flatMap(flatten) + else Iterator(f) + + val entries = flatten(jarFile) + + def classNameOf(classFile: AbstractFile): String = { + val input = classFile.input + try { + val reader = new ClassReader(input) + reader.getClassName.replace('/', '.') + } finally { + input.close() + } + } + def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined + val existingClass = entries.filter(_.hasExtension("class")).map(classNameOf).find(alreadyDefined) + + if (!f.exists) echo(s"The path '$f' doesn't seem to exist.") + else if (existingClass.nonEmpty) echo(s"The path '$f' cannot be loaded, it contains a classfile that already exists on the classpath: ${existingClass.get}") + else { + intp.addUrlsToClassPath(f.toURI.toURL) + echo("Added '%s' to classpath.".format(f.path)) + repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.classPathString)) + } + } + + def powerCmd(): Result = { + if (isReplPower) "Already in power mode." + else enablePowerMode(isDuringInit = false) + } + def enablePowerMode(isDuringInit: Boolean) = { + config.power setValue true + unleashAndSetPhase() + asyncEcho(isDuringInit, powerBannerMessage) + } + + private def powerBannerMessage: String = + powerBanner.option map { + case f if f.getName == "classic" => intp.power.classic + case f => Files.readAllLines(f.toPath).asScala.mkString("\n") + } getOrElse intp.power.banner + + private def unleashAndSetPhase() = + if (isReplPower) { + intp.power.unleash() + + intp.reporter.suppressOutput { + (powerInitCode.option + map (f => Files.readAllLines(f.toPath).asScala.toList) + getOrElse intp.power.initImports + foreach intp.interpret) + + phaseCommand("typer") // Set the phase to "typer" + } + } + + def asyncEcho(async: Boolean, msg: => String): Unit = { + if (async) asyncMessage(msg) + else echo(msg) + } + + def verbosity() = { + intp.reporter.togglePrintResults() + replinfo(s"Result printing is ${ if (intp.reporter.printResults) "on" else "off" }.") + } + + private def readWhile(cond: String => Boolean) = { + Iterator continually in.readLine("") takeWhile (x => x != null && cond(x)) + } + + /* :paste -raw file + * or + * :paste < EOF + * your code + * EOF + * :paste <~ EOF + * ~your code + * EOF + * and optionally + * :paste -java + */ + def pasteCommand(arg: String): Result = { + var shouldReplay: Option[String] = None + var label = "" + def result = Result(keepRunning = true, shouldReplay) + val (flags, args) = tokenize(arg).span(_.startsWith("-")) + val raw = flags.contains("-raw") + val java = flags.contains("-java") + val badFlags = flags.filterNot(List("-raw", "-java").contains) + def usage() = echo("usage: :paste [-raw | -java] file | < EOF") + def pasteFile(name: String): String = { + label = name + withFile(name) { f => + shouldReplay = Some(s":paste $arg") + f.slurp().trim().tap(s => echo(if (s.isEmpty) s"File contains no code: $f" else s"Pasting file $f...")) + }.getOrElse("") + } + def pasteWith(margin: String, eof: Option[String]): String = { + echo(s"// Entering paste mode (${ eof getOrElse "ctrl-D" } to finish)\n") + in.withSecondaryPrompt("") { + val delimiter = eof.orElse(config.pasteDelimiter.option) + def atEOF(s: String) = delimiter.map(_ == s).getOrElse(false) + val input = readWhile(s => !atEOF(s)).mkString("\n") + margin match { + case "" => input.trim + case "-" => input.linesIterator.map(_.trim).mkString("\n") + case _ => input.stripMargin(margin.head).trim + } + } + } + def interpretCode(code: String) = { + echo("// Exiting paste mode... now interpreting.") + if (intp.withLabel(label)(intp.interpret(code)) == Incomplete) + paste.incomplete("The pasted code is incomplete!", label, code) + } + def compileCode(code: String) = { + echo("// Exiting paste mode... now compiling with scalac.") + paste.compilePaste(label = label, code = code) + } + def compileJava(code: String): Unit = { + def pickLabel() = { + val gstable = global + val jparser = gstable.newJavaUnitParser(gstable.newCompilationUnit(code = code)) + val result = jparser.parse().collect { + case gstable.ClassDef(mods, className, _, _) if mods.isPublic => className + } + result.headOption + } + echo("// Exiting paste mode... now compiling with javac.") + pickLabel() match { + case Some(className) => + label = s"${className.decoded}" + val out = createTempDirectory() + JavacTool(out, intp.classLoader).compile(label, code) match { + case Some(errormsg) => echo(s"Compilation failed! $errormsg") + case None => intp.addUrlsToClassPath(out.toUri().toURL()) + } + case _ => + echo(s"No class detected in source!") + } + } + def dispatch(code: String): Unit = + if (code.isEmpty) + echo("// Exiting paste mode... nothing to compile.") + else + intp.reporter.indenting(0) { + if (java) compileJava(code) + else if (raw || paste.isPackaged(code)) compileCode(code) + else interpretCode(code) + } + args match { + case _ if badFlags.nonEmpty => usage() + case name :: Nil if !name.startsWith("<") => dispatch(pasteFile(name)) + case Nil => dispatch(pasteWith("", None)) + case here :: Nil => dispatch(pasteWith(here.slice(1, 2), None)) + case here :: eof :: Nil if here.startsWith("<") => dispatch(pasteWith(here.slice(1, 2), Some(eof))) + case _ => usage() + } + result + } + + private object paste extends Pasted(config.promptText, encolor(continueText), continueText) { + def interpret(line: String) = intp interpret line + def echo(message: String) = ILoop.this echo message + + val leadingElement = raw"(?s)\s*(package\s|/)".r + def isPackaged(code: String): Boolean = { + leadingElement.findPrefixMatchOf(code) + .map(m => if (m.group(1) == "/") intp.isPackaged(code) else true) + .getOrElse(false) + } + + // if input is incomplete, wrap and compile for diagnostics. + def incomplete(message: String, label: String, code: String): Boolean = { + echo(message) + val errless = intp.compileSources(new BatchSourceFile(label, s"object pastel {\n$code\n}")) + if (errless) echo("No error found in incomplete source.") + errless + } + + def compilePaste(label: String, code: String): Boolean = { + val errless = intp.compileSources(new BatchSourceFile(label, code)) + if (!errless) echo("There were compilation errors!") + errless + } + } + + private object invocation { + // used during loop + def unapply(line: String): Boolean = + intp.mostRecentVar != "" && Parsed.looksLikeInvocation(line) + } + + private val lineComment = """\s*//.*""".r // all comment + + /** Interpret expressions starting with the first line. + * Read lines until a complete compilation unit is available + * or until a syntax error has been seen. If a full unit is + * read, go ahead and interpret it. Return the full string + * to be recorded for replay, if any. + */ + final def interpretStartingWith(start: String): Option[String] = { + def loop(): Option[String] = { + val code = in.accumulator.toString + intp.interpret(code) match { + case Error => None + case Success => Some(code) + case Incomplete if in.interactive && code.endsWith("\n\n") => + echo("You typed two blank lines. Starting a new command.") + None + case Incomplete => + in.readLine(paste.ContinuePrompt) match { + case null => + // partial input with no input forthcoming, + // so ask again for parse error message. + // This happens at EOF of a :load file. + intp.interpretFinally(code) + None + case line => in.accumulator += line ; loop() + } + } + } + + start match { + case "" | lineComment() => None // empty or line comment, do nothing + case paste() => + val pasted = Iterator(start) ++ readWhile(!paste.isPromptOnly(_)) + paste.transcript(pasted) match { + case Some(s) => interpretStartingWith(s) + case _ => None + } + case invocation() => in.accumulator += intp.mostRecentVar + start ; loop() + case _ => in.accumulator += start ; loop() + } + } + + /** + * Allows to specify custom code to run quietly in the preamble + * @return custom Scala code to run automatically at the startup of the REPL + */ + protected def internalReplAutorunCode(): Seq[String] = Seq.empty + + /** Actions to cram in parallel while collecting first user input at prompt. + * Run with output muted both from ILoop and from the intp reporter. + */ + private def interpretPreamble() = { + // Bind intp somewhere out of the regular namespace where + // we can get at it in generated code. + intp.quietBind(intp.namedParam[Repl](s"$$intp", intp)(tagOfRepl, classTag[Repl])) + + internalReplAutorunCode().foreach(intp.quietRun) + + // Auto-run code via some setting. + (config.replAutorunCode.option + flatMap (f => File(f).safeSlurp()) + foreach (intp quietRun _) + ) + // power mode setup + if (isReplPower) + enablePowerMode(isDuringInit = true) + + for (f <- filesToLoad) { + loadCommand(f) + addReplay(s":load $f") + } + for (f <- filesToPaste) { + pasteCommand(f) + addReplay(s":paste $f") + } + } + + /** Start an interpreter with the given settings. + * @return true if successful + */ + def run(interpreterSettings: Settings): Boolean = { + if (!batchMode) printWelcome() + + createInterpreter(interpreterSettings) + in = defaultIn + + intp.reporter.withoutPrintingResults(intp.withSuppressedSettings { + intp.initializeCompiler() + interpreterInitialized.countDown() // TODO: move to reporter.compilerInitialized ? + + if (intp.reporter.hasErrors) { + echo("Interpreter encountered errors during initialization!") + throw new InterruptedException + } + + echoOff { interpretPreamble() } + }) + + // start full loop (if initialization was successful) + try + loop() match { + case LineResults.EOF if in.interactive => printShellInterrupt(); true + case LineResults.ERR => false + case _ => true + } + catch AbstractOrMissingHandler() + finally closeInterpreter() + } +} + +object ILoop { + implicit def loopToInterpreter(repl: ILoop): Repl = repl.intp + + class TestConfig(delegate: ShellConfig) extends ShellConfig { + def filesToPaste: List[String] = delegate.filesToPaste + def filesToLoad: List[String] = delegate.filesToLoad + def batchText: String = delegate.batchText + def batchMode: Boolean = delegate.batchMode + def doCompletion: Boolean = delegate.doCompletion + def haveInteractiveConsole: Boolean = delegate.haveInteractiveConsole + + def xsource: String = "" + + override val colorOk = delegate.colorOk + + // No truncated output, because the result changes on Windows because of line endings + override val maxPrintString = sys.Prop[Int]("wtf").tap(_.set("0")) + } + object TestConfig { + def apply(settings: Settings) = new TestConfig(ShellConfig(settings)) + } + + // Designed primarily for use by test code: take a String with a + // bunch of code, and prints out a transcript of what it would look + // like if you'd just typed it into the repl. + def runForTranscript(code: String, settings: Settings, inSession: Boolean = false): String = + runForTranscript(code, settings, TestConfig(settings), inSession) + + def runForTranscript(code: String, settings: Settings, config: ShellConfig, inSession: Boolean): String = { + import java.io.{BufferedReader, OutputStreamWriter, StringReader} + import java.lang.System.{lineSeparator => EOL} + + stringFromStream { ostream => + Console.withOut(ostream) { + val output = new PrintWriter(new OutputStreamWriter(ostream), true) { + // skip margin prefix for continuation lines, unless preserving session text for test + // should test for repl.paste.ContinueString or config.continueText.contains(ch) + override def write(str: String) = + if (inSession || (str.exists(ch => ch != ' ' && ch != '|'))) super.write(str) + } + val input = new BufferedReader(new StringReader(s"${code.trim}${EOL}")) { + override def readLine(): String = { + mark(1) // default buffer is 8k + val c = read() + if (c == -1 || c == 4) { + null + } else { + reset() + val s = super.readLine() + // helping out by printing the line being interpreted. + output.println(s) + s + } + } + } + + val repl = new ILoop(config, input, output) { + // remove welcome message as it has versioning info (for reproducible test results), + override def welcome = "" + } + if (settings.classpath.isDefault) + settings.classpath.value = sys.props("java.class.path") + + repl.run(settings) + } + } + } + + /** Creates an interpreter loop with default settings and feeds + * the given code to it as input. + */ + def run(code: String, sets: Settings = new Settings): String = { + import java.io.{BufferedReader, OutputStreamWriter, StringReader} + + stringFromStream { ostream => + Console.withOut(ostream) { + val input = new BufferedReader(new StringReader(code)) + val output = new PrintWriter(new OutputStreamWriter(ostream), true) + val config = ShellConfig(sets) + val repl = new ILoop(config, input, output) { + // remove welcome message as it has versioning info (for reproducible test results), + override def welcome = "" + } + + if (sets.classpath.isDefault) + sets.classpath.value = sys.props("java.class.path") + + repl.run(sets) + } + } + } + def run(lines: List[String]): String = run(lines.mkString("", "\n", "\n")) +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/InteractiveReader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/InteractiveReader.scala new file mode 100644 index 000000000000..3542a08a5fe7 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/InteractiveReader.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.IOException + +/** Reads lines from an input stream */ +trait InteractiveReader { + def interactive: Boolean + + def accumulator: Accumulator + + def reset(): Unit + def history: History + def completion: Completion + def redrawLine(): Unit + def withSecondaryPrompt[T](prompt: String)(body: => T): T = body + + def readYesOrNo(prompt: String, alt: => Boolean): Boolean = + readOneLine(prompt).trim.toUpperCase.headOption match { + case Some('Y') => true + case Some('N') => false + case _ => alt + } + + protected def readOneLine(prompt: String): String + + def readLine(prompt: String): String = readOneLine(prompt) + /* + // hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP + if (scala.util.Properties.isMac) restartSysCalls(readOneLine(prompt), reset()) + else readOneLine(prompt) + */ + + @deprecated("No longer used", "2.13.1") + def initCompletion(completion: Completion): Unit = () + + /* + * Closes the underlying resource created by the reader. + */ + def close(): Unit +} + +object InteractiveReader { + val msgEINTR = "Interrupted system call" + def restartSysCalls[R](body: => R, reset: => Unit): R = + try body catch { + case e: IOException if e.getMessage == msgEINTR => reset ; body + } + + def apply(): InteractiveReader = SimpleReader() +} + +/** Accumulate multi-line input. Shared by Reader and Completer, which must parse accumulated result. */ +class Accumulator { + var text: List[String] = Nil + def reset(): Unit = text = Nil + def +=(s: String): Unit = text :+= s + override def toString = text.mkString("\n") +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/JavacTool.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/JavacTool.scala new file mode 100644 index 000000000000..f4d6b2bc9660 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/JavacTool.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter +package shell + +import java.io.CharArrayWriter +import java.net.URI +import java.nio.charset.StandardCharsets.UTF_8 +import java.nio.file.Path +import java.util.Locale +import java.util.concurrent.ConcurrentLinkedQueue +import javax.tools._, JavaFileManager.Location, StandardLocation._, JavaFileObject.Kind, Kind._ +import scala.collection.mutable.Clearable +import scala.jdk.CollectionConverters._ +import scala.reflect.io.AbstractFile +import scala.util.chaining._ + +import System.lineSeparator + +class JavacTool private (tool: JavaCompiler, dir: AbstractFile, loader: ClassLoader) { + private val out = new CharArrayWriter + def written = { + out.flush() + val w = out.toString + out.reset() + w + } + val listener = new JavaReporter + val locale = Locale.getDefault + val fileManager = new JavaToolFileManager(dir, loader)(tool.getStandardFileManager(listener, locale, UTF_8)) + + def compile(label: String, code: String): Option[String] = { + val options = ( + "-encoding" :: + UTF_8.name() :: + Nil + ).asJava + val classes: java.lang.Iterable[String] = null + val units = List(StringFileObject(label, code)).asJava + val task = tool.getTask(out, fileManager, listener, options, classes, units) + val success = task.call() + if (success) None else Some(listener.reported(locale)) + } +} +object JavacTool { + def apply(dir: AbstractFile, loader: ClassLoader): JavacTool = new JavacTool(ToolProvider.getSystemJavaCompiler, dir, loader) + def apply(dir: Path, loader: ClassLoader) : JavacTool = apply(AbstractFile.getURL(dir.toUri().toURL()), loader) +} + +// use `dir` for output, `loader` for inputs +class JavaToolFileManager(dir: AbstractFile, loader: ClassLoader)(delegate: JavaFileManager) extends ForwardingJavaFileManager[JavaFileManager](delegate) { + override def getJavaFileForOutput(location: Location, className: String, kind: Kind, sibling: FileObject): JavaFileObject = { + require(location == CLASS_OUTPUT, s"$location is not CLASS_OUTPUT") + require(kind == CLASS, s"$kind is not CLASS") + AbstractFileObject(dir, className, kind) + } +} + +class AbstractFileObject(file: AbstractFile, uri0: URI, kind0: Kind) extends SimpleJavaFileObject(uri0, kind0) { + override def delete() = { file.delete() ; true } + override def openInputStream() = file.input + override def openOutputStream() = file.output +} +object AbstractFileObject { + def apply(dir: AbstractFile, path: String, kind: Kind) = { + val segments = path.replace(".", "/").split("/") + val parts = segments.init + val name = segments.last + val subdir = parts.foldLeft(dir)((vd, n) => vd.subdirectoryNamed(n)) + val file = subdir.fileNamed(s"${name}${kind.extension}") + val uri = file.file.toURI + new AbstractFileObject(file, uri, kind) + } +} + +// name is the URI path +// +class StringFileObject(uri0: URI, code: String) extends SimpleJavaFileObject(uri0, SOURCE) { + override def getCharContent(ignoreEncodingErrors: Boolean) = code +} +object StringFileObject { + def apply(label: String, code: String): StringFileObject = + new StringFileObject(URI.create(s"string:///${label.replace('.','/')}${SOURCE.extension}"), code) +} + +// A clearable diagnostic collector. +// +class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable { + type D = Diagnostic[_ <: JavaFileObject] + val diagnostics = new ConcurrentLinkedQueue[D] + private def messagesIterator(implicit locale: Locale) = diagnostics.iterator.asScala.map(_.getMessage(locale)) + override def report(d: Diagnostic[_ <: JavaFileObject]) = diagnostics.add(d) + override def clear() = diagnostics.clear() + /** All diagnostic messages. + * @param locale Locale for diagnostic messages, null by default. + */ + def messages(implicit locale: Locale = null) = messagesIterator.toList + + def reported(implicit locale: Locale = null): String = + if (diagnostics.isEmpty) "" + else + messages + .mkString("", lineSeparator, lineSeparator) + .tap(_ => clear()) +} diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/JavapClass.scala similarity index 88% rename from src/repl/scala/tools/nsc/interpreter/JavapClass.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/JavapClass.scala index a67708a46359..4da845d3ca68 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/JavapClass.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,12 +10,12 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter +package scala.tools.nsc.interpreter +package shell import scala.language.reflectiveCalls -import java.io.PrintWriter +import java.io.{InputStream, PrintWriter} import scala.reflect.internal.util.ScalaClassLoader import scala.tools.nsc.util.stringFromWriter import scala.util.{Failure, Success, Try} @@ -27,7 +27,7 @@ import Javap.JpResult */ class JavapClass( val loader: ScalaClassLoader, - intp: IMain, + intp: Repl, tool: JavapTool ) { import JavapClass._ @@ -36,7 +36,7 @@ class JavapClass( import java.io.FileNotFoundException import scala.reflect.io.File - private val printWriter: PrintWriter = new IMain.ReplStrippingWriter(intp) + private val printWriter: PrintWriter = intp.reporter.out def apply(args: Seq[String]): List[JpResult] = { val (options0, targets) = args.partition(s => s.startsWith("-") && s.length > 1) @@ -147,7 +147,7 @@ object JavapClass { } } -abstract class Javap(protected val intp: IMain) { +abstract class Javap(protected val intp: Repl) { def loader: Either[String, ClassLoader] def task(loader: ClassLoader): Either[String, JavapTool] @@ -176,14 +176,14 @@ object Javap { import java.io.File import java.net.URL - private val javap8 = "scala.tools.nsc.interpreter.Javap8" - private val javap9 = "scala.tools.nsc.interpreter.Javap9" - private val javapP = "scala.tools.nsc.interpreter.JavapProvider" + private val javap8 = "scala.tools.nsc.interpreter.shell.Javap8" + private val javap9 = "scala.tools.nsc.interpreter.shell.Javap9" + private val javapP = "scala.tools.nsc.interpreter.shell.JavapProvider" // load and run a tool - def apply(intp: IMain)(targets: String*): List[JpResult] = { + def apply(intp: Repl)(targets: String*): List[JpResult] = { def outDirIsClassPath: Boolean = intp.settings.Yreploutdir.isSetByUser && { - val outdir = intp.replOutput.dir.file.getAbsoluteFile + val outdir = intp.outputDir.file.getAbsoluteFile intp.compilerClasspath.exists(url => url.isFile && new File(url.toURI).getAbsoluteFile == outdir) } def create(toolName: String) = { @@ -199,7 +199,7 @@ object Javap { else if (!isJavaAtLeast("9")) create(javap8)(targets) else { var res: Option[List[JpResult]] = None - if (classOf[scala.tools.nsc.interpreter.IMain].getClassLoader != null) { + if (classOf[Repl].getClassLoader != null) { val javap = create(javap9) if (javap.loader.isRight) res = Some(javap(targets)) @@ -270,12 +270,12 @@ object Javap { /** Create a Showable to show tool messages and tool output, with output massage. * @param filter whether to strip REPL names */ - def showable(intp: IMain, filter: Boolean, text: String): Showable = + def showable(intp: Repl, filter: Boolean, text: String): Showable = new Showable { - val out = new IMain.ReplStrippingWriter(intp) + val out = intp.reporter.out def show() = - if (filter) intp.withoutTruncating(out.write(text)) - else intp.withoutUnwrapping(out.write(text, 0, text.length)) + if (filter) intp.reporter.withoutTruncating(out.write(text)) + else intp.reporter.withoutUnwrapping(out.write(text, 0, text.length)) } sealed trait JpResult { @@ -368,7 +368,7 @@ object Javap { } /** Loaded reflectively under JDK8 to locate tools.jar and load JavapTask tool. */ -class Javap8(intp0: IMain) extends Javap(intp0) { +class Javap8(intp0: Repl) extends Javap(intp0) { import scala.tools.util.PathResolver import scala.util.Properties.jdkHome @@ -388,7 +388,7 @@ class Javap8(intp0: IMain) extends Javap(intp0) { } /** Loaded reflectively under JDK9 to load JavapTask tool. */ -class Javap9(intp0: IMain) extends Javap(intp0) { +class Javap9(intp0: Repl) extends Javap(intp0) { override def loader = Right(new ClassLoader(intp.classLoader) with ScalaClassLoader).filterOrElse( _.tryToInitializeClass[AnyRef](JavapTask.taskClassName).isDefined, @@ -398,9 +398,9 @@ class Javap9(intp0: IMain) extends Javap(intp0) { } /** Loaded reflectively under JDK9 to locate ToolProvider. */ -class JavapProvider(intp0: IMain) extends Javap(intp0) { +class JavapProvider(intp0: Repl) extends Javap(intp0) { import JavapTool.Input - import Javap.{filterLines, HashSplit} + import Javap.filterLines import java.util.Optional //import java.util.spi.ToolProvider @@ -410,8 +410,8 @@ class JavapProvider(intp0: IMain) extends Javap(intp0) { private def tool(provider: ToolProvider) = new JavapTool { override def apply(options: Seq[String], filter: Boolean)(inputs: Seq[Input]): List[JpResult] = inputs.map { - case Input(target @ HashSplit(klass, _), actual, Success(_)) => - val more = List("-cp", intp.replOutput.dir.file.getAbsoluteFile.toString, actual) + case Input(target, actual, Success(_)) => + val more = List("-cp", intp.outputDir.file.getAbsoluteFile.toString, actual) val s = stringFromWriter(w => provider.run(w, w, (options ++ more).toArray)) JpResult(filterLines(target, s)) case Input(_, _, Failure(e)) => JpResult(e.toString) @@ -440,15 +440,12 @@ object JavapTool { } // Machinery to run JavapTask reflectively -class JavapTask(val loader: ScalaClassLoader, intp: IMain) extends JavapTool { - import javax.tools.{Diagnostic, DiagnosticListener, +class JavapTask(val loader: ScalaClassLoader, intp: Repl) extends JavapTool { + import javax.tools.{DiagnosticListener, ForwardingJavaFileManager, JavaFileManager, JavaFileObject, SimpleJavaFileObject, StandardLocation} import java.io.CharArrayWriter - import java.util.Locale - import java.util.concurrent.ConcurrentLinkedQueue - import scala.collection.JavaConverters._ - import scala.collection.generic.Clearable + import scala.jdk.CollectionConverters._ import JavapTool._ import Javap.{filterLines, showable} @@ -471,31 +468,15 @@ class JavapTask(val loader: ScalaClassLoader, intp: IMain) extends JavapTool { // val Ok, Error, CmdErr, SysErr, Abnormal = Value //} - class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable { - type D = Diagnostic[_ <: JavaFileObject] - val diagnostics = new ConcurrentLinkedQueue[D] - override def report(d: Diagnostic[_ <: JavaFileObject]) = diagnostics.add(d) - override def clear() = diagnostics.clear() - /** All diagnostic messages. - * @param locale Locale for diagnostic messages, null by default. - */ - def messages(implicit locale: Locale = null) = diagnostics.asScala.map(_.getMessage(locale)).toList - - def reportable(): String = { - import scala.util.Properties.lineSeparator - clear() - if (messages.nonEmpty) messages.mkString("", lineSeparator, lineSeparator) else "" - } - } val reporter = new JavaReporter // DisassemblerTool.getStandardFileManager(reporter,locale,charset) val defaultFileManager: JavaFileManager = - (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod ( + (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get.getMethod( "create", classOf[DiagnosticListener[_]], classOf[PrintWriter] - ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] + ).invoke(null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] // manages named arrays of bytes, which might have failed to load class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager) @@ -553,8 +534,8 @@ class JavapTask(val loader: ScalaClassLoader, intp: IMain) extends JavapTool { case Input(target, actual, Success(_)) => import java.lang.reflect.InvocationTargetException try { - if (task(options, Seq(actual), inputs).call()) JpResult(showable(intp, filter, filterLines(target, s"${reporter.reportable()}${written}"))) - else JpResult(reporter.reportable()) + if (task(options, Seq(actual), inputs).call()) JpResult(showable(intp, filter, filterLines(target, s"${reporter.reported()}${written}"))) + else JpResult(reporter.reported()) } catch { case e: InvocationTargetException => e.getCause match { case t: IllegalArgumentException => JpResult(t.getMessage) // bad option diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Logger.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Logger.scala new file mode 100644 index 000000000000..2c1f45807096 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Logger.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.PrintWriter + +trait Logger { + def isInfo: Boolean + def isDebug: Boolean + def isTrace: Boolean + def out: PrintWriter +} diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala similarity index 85% rename from src/repl/scala/tools/nsc/interpreter/LoopCommands.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index 04ee11fbad72..b706d16f1c9e 100644 --- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,14 +10,13 @@ * additional information regarding copyright ownership. */ -package scala -package tools -package nsc -package interpreter +package scala.tools.nsc.interpreter +package shell +import java.io.{PrintWriter => JPrintWriter} import scala.language.implicitConversions - import scala.collection.mutable.ListBuffer +import scala.tools.nsc.interpreter.ReplStrings.words class ProcessResult(val line: String) { import scala.sys.process._ @@ -31,7 +30,8 @@ class ProcessResult(val line: String) { override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode) } -trait LoopCommands { self: { def echo(msg: String): Unit } => +trait LoopCommands { + protected def echo(msg: String): Unit protected def out: JPrintWriter // So outputs can be suppressed. @@ -59,6 +59,7 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => // subclasses may provide completions def completion: Completion = NoCompletion + override def toString(): String = name } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -84,12 +85,16 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => } def helpSummary() = { - val usageWidth = commands map (_.usageMsg.length) max + val usageWidth = commands.map(_.usageMsg.length).max val formatStr = s"%-${usageWidth}s %s" echo("All commands can be abbreviated, e.g., :he instead of :help.") for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help)) + echo("") + echo("Useful default key bindings:") + echo(" TAB code completion") + echo(" CTRL-ALT-T show type at cursor, hit again to show code with types/implicits inferred.") } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { @@ -123,8 +128,6 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => case _ => echo("?") } - import Completion.Candidates - def colonCompletion(line: String, cursor: Int): Completion = line match { case commandish(name0, rest) => @@ -136,18 +139,18 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => case cmd :: Nil if !cursorAtName => cmd.completion case cmd :: Nil if cmd.name == name => NoCompletion case cmd :: Nil => - val completion = if (cmd.isInstanceOf[NullaryCmd] || cursor < line.length) cmd.name else cmd.name + " " + val completion = ":" + cmd.name new Completion { - def resetVerbosity(): Unit = () - def complete(buffer: String, cursor: Int) = Candidates(cursor = 1, List(completion)) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion)), "", "") } - case cmd :: rest => + case cmd :: _ => new Completion { - def resetVerbosity(): Unit = () - def complete(buffer: String, cursor: Int) = Candidates(cursor = 1, cmds.map(_.name)) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(":" + cmd.name)), "", "") } } - case _ => NoCompletion + case _ => NoCompletion } class NullaryCmd(name: String, help: String, detailedHelp: Option[String], diff --git a/src/repl/scala/tools/nsc/interpreter/Parsed.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Parsed.scala similarity index 80% rename from src/repl/scala/tools/nsc/interpreter/Parsed.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/Parsed.scala index eb32618c34dd..5f9b52e769f1 100644 --- a/src/repl/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Parsed.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,19 +10,16 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter - -import util.returning - -trait Delimited { - self: Parsed => - - def delimited: Char => Boolean - def escapeChars: List[Char] = List('\\') +package scala.tools.nsc.interpreter.shell +/** One instance of a command buffer. + */ +class Parsed private ( + val buffer: String, + val cursor: Int, + val delimited: Char => Boolean) { /** Break String into args based on delimiting function. - */ + */ protected def toArgs(s: String): List[String] = if (s == "") Nil else (s indexWhere isDelimiterChar) match { @@ -31,16 +28,8 @@ trait Delimited { } def isDelimiterChar(ch: Char) = delimited(ch) - def isEscapeChar(ch: Char): Boolean = escapeChars contains ch -} + def isEscapeChar(ch: Char): Boolean = ch == '\\' -/** One instance of a command buffer. - */ -class Parsed private ( - val buffer: String, - val cursor: Int, - val delimited: Char => Boolean -) extends Delimited { def isEmpty = args.isEmpty def isUnqualified = args.size == 1 def isAtStart = cursor <= 0 @@ -48,7 +37,7 @@ class Parsed private ( private var _verbosity = 0 def verbosity = _verbosity - def withVerbosity(v: Int): this.type = returning[this.type](this)(_ => _verbosity = v) + def withVerbosity(v: Int): this.type = { _verbosity = v ; this } def args = toArgs(buffer take cursor).toList def bufferHead = args.head @@ -83,4 +72,12 @@ object Parsed { new Parsed(onull(s), cursor, delimited) def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.') + + // a leading dot plus something, but not ".." or "./", ignoring leading whitespace + private val dotlike = """\s*\.[^./].*""".r + def looksLikeInvocation(code: String) = code match { + case null => false // insurance + case dotlike() => true + case _ => false + } } diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Pasted.scala similarity index 81% rename from src/repl/scala/tools/nsc/interpreter/Pasted.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/Pasted.scala index 51cf5cf31fe3..929787736020 100644 --- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Pasted.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,8 +10,9 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter +package scala.tools.nsc.interpreter.shell + +import scala.tools.nsc.interpreter.Results.{Result, Incomplete} /** If it looks like they're pasting in a scala interpreter * transcript, remove all the formatting we inserted so we @@ -22,14 +23,15 @@ package interpreter * a transcript should itself be pasteable and should achieve * the same result. */ -abstract class Pasted(prompt: String) { - def interpret(line: String): IR.Result +abstract class Pasted(prompt: String, continuePrompt: String, continueText: String) { + + def interpret(line: String): Result def echo(message: String): Unit val PromptString = prompt.linesIterator.toList.last val AltPromptString = "scala> " - val ContinuePrompt = replProps.continuePrompt - val ContinueString = replProps.continueText // " | " + val ContinuePrompt = continuePrompt + val ContinueString = continueText // " | " val anyPrompt = { import scala.util.matching.Regex.quote s"""\\s*(?:${quote(PromptString.trim)}|${quote(AltPromptString.trim)})\\s*""".r @@ -58,10 +60,10 @@ abstract class Pasted(prompt: String) { private val resAssign = """^val (res\d+).*""".r private class PasteAnalyzer(val lines: List[String]) { - val referenced = lines flatMap (resReference findAllIn _.trim.stripPrefix("res")) toSet - val ActualPromptString = lines find matchesPrompt map (s => - if (matchesString(s, PromptString)) PromptString else AltPromptString) getOrElse PromptString - val cmds = lines reduceLeft append split ActualPromptString filterNot (_.trim == "") toList + val referenced = lines.flatMap(s => resReference.findAllIn(s.trim.stripPrefix("res"))).toSet + val ActualPromptString = lines.find(matchesPrompt).map(s => + if (matchesString(s, PromptString)) PromptString else AltPromptString).getOrElse(PromptString) + val cmds = lines.reduceLeft(append).split(ActualPromptString).filterNot(_.trim == "").toList /** If it's a prompt or continuation line, strip the formatting bits and * assemble the code. Otherwise ship it off to be analyzed for res references @@ -104,12 +106,12 @@ abstract class Pasted(prompt: String) { def interpreted(line: String) = { echo(line.trim) val res = interpret(line) - if (res != IR.Incomplete) echo("") + if (res != Incomplete) echo("") res } def incompletely(cmd: String) = { print(ActualPromptString) - interpreted(cmd) == IR.Incomplete + interpreted(cmd) == Incomplete } def run(): Option[String] = { echo(s"// Replaying ${cmds.size} commands from transcript.\n") @@ -118,7 +120,7 @@ abstract class Pasted(prompt: String) { } // Run transcript and return incomplete line if any. - def transcript(lines: TraversableOnce[String]): Option[String] = { + def transcript(lines: IterableOnce[String]): Option[String] = { echo("\n// Detected repl transcript. Paste more, or ctrl-D to finish.\n") apply(lines) } @@ -128,10 +130,12 @@ abstract class Pasted(prompt: String) { * Everything else is discarded. When the end of the transcript is spotted, * all the commands are replayed. */ - def apply(lines: TraversableOnce[String]): Option[String] = { + def apply(lines: IterableOnce[String]): Option[String] = { isRunning = true - try new PasteAnalyzer(lines.toList).run() + try new PasteAnalyzer(List.from(lines)).run() finally isRunning = false } - def unapply(line: String): Boolean = isPrompted(line) + + // used during loop + def unapply(line: String): Boolean = !running && isPrompted(line) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala new file mode 100644 index 000000000000..ecb911bc8c72 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala @@ -0,0 +1,73 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter +package shell + +import scala.util.control.NonFatal + +/** Completion for the REPL. + */ +class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) extends Completion { + + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { + // special case for: + // + // scala> 1 + // scala> .toInt + val bufferWithVar = + if (Parsed.looksLikeInvocation(buffer)) intp.mostRecentVar + buffer + else buffer + + val bufferWithMultiLine = accumulator.toString + bufferWithVar + val cursor1 = cursor + (bufferWithMultiLine.length - buffer.length) + codeCompletion(bufferWithMultiLine, cursor1, filter) + } + + // A convenience for testing + def complete(before: String, after: String = ""): CompletionResult = complete(before + after, before.length) + + private def codeCompletion(buf: String, cursor: Int, filter: Boolean): CompletionResult = { + require(cursor >= 0 && cursor <= buf.length) + + // secret handshakes + val slashPrint = """.*// *print *""".r + val slashPrintRaw = """.*// *printRaw *""".r + val slashTypeAt = """.*// *typeAt *(\d+) *(\d+) *""".r + try { + intp.presentationCompile(cursor, buf) match { + case Left(_) => NoCompletions + case Right(result) => try { + buf match { + case slashPrint() if cursor == buf.length => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil), "", "") + case slashPrintRaw() if cursor == buf.length => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil), "", "") + case slashTypeAt(start, end) if cursor == buf.length => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil), "", "") + case _ => + // under JLine 3, we no longer use the tabCount concept, so tabCount is always 1 + // which always gives us all completions + val (c, r) = result.completionCandidates(filter, tabCount = 1) + val typeAtCursor = result.typeAt(cursor, cursor) + CompletionResult(buf, c, r, typeAtCursor, result.print) + } + } finally result.cleanup() + } + } catch { + case NonFatal(e) => + if (intp.settings.debug.value) + e.printStackTrace() + NoCompletions + } + } +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Reporter.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Reporter.scala new file mode 100644 index 000000000000..e2457b658c5d --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Reporter.scala @@ -0,0 +1,235 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.PrintWriter +import scala.reflect.internal +import scala.reflect.internal.util.{CodeAction, NoSourceFile, Position, StringOps} +import scala.tools.nsc.interpreter.{Naming, ReplReporter, ReplRequest} +import scala.tools.nsc.reporters.FilteringReporter +import scala.tools.nsc.{ConsoleWriter, NewLinePrintWriter, Settings} + +object ReplReporterImpl { + val defaultOut = new NewLinePrintWriter(new ConsoleWriter, autoFlush = true) +} + +// settings are for AbstractReporter (noWarnings, isVerbose, isDebug) +class ReplReporterImpl(val config: ShellConfig, val settings: Settings = new Settings, writer: PrintWriter = ReplReporterImpl.defaultOut) extends FilteringReporter with ReplReporter { + def this(settings: Settings, writer: PrintWriter) = this(ShellConfig(settings), settings, writer) + def this(settings: Settings) = this(ShellConfig(settings), settings) + + val out: PrintWriter = new ReplStrippingWriter(writer) + private class ReplStrippingWriter(out: PrintWriter) extends PrintWriter(out) { + override def write(str: String): Unit = + super.write(unmangleInterpreterOutput(str)) + } + + override def flush() = out.flush() + + // removes trailing space (for clean partest check files?) + private def printlnAndFlush(msg: String): Unit = { + out.println(StringOps.trimAllTrailingSpace(msg)) + flush() + } + + private def indentDepth: Int = config.promptText.linesIterator.toList.last.length + private[this] var indentation: String = " " * indentDepth + def indenting(n: Int)(body: => Unit): Unit = { + val save = indentation + indentation = " " * n + try body finally indentation = save + } + private def indented(str: String) = str.linesIterator.mkString(indentation, "\n" + indentation, "") + + def colorOk: Boolean = config.colorOk + def isDebug: Boolean = config.isReplDebug + def isTrace: Boolean = config.isReplTrace + + var printResults: Boolean = true + override def togglePrintResults(): Unit = printResults = !printResults + def withoutPrintingResults[T](body: => T): T = { + val saved = printResults + printResults = false + try body + finally printResults = saved + } + + override def printResult(result: Either[String, String]): Unit = + result match { + case Right(success) => + if (!success.isEmpty && printResults) + printMessage(success stripSuffix "\n") // TODO: can we avoid having to strip the trailing "\n"? + else if (isDebug) // show quiet-mode activity + printMessage(success.trim.linesIterator map ("[quiet] " + _) mkString "\n") + + case Left(error) => + // don't truncate stack traces + withoutTruncating { printMessage(error) } + } + + // whether to print anything + var totalSilence: Boolean = false + def suppressOutput[T](operation: => T): T = { + val saved = totalSilence + totalSilence = true + try operation + finally totalSilence = saved + } + + /** The maximum length of toString to use when printing the result + * of an evaluation. 0 means no maximum. If a printout requires + * more than this number of characters, then the printout is + * truncated. + */ + var maxPrintString = config.maxPrintString.option getOrElse 800 + + /** Whether very long lines can be truncated. This exists so important + * debugging information (like printing the classpath) is not rendered + * invisible due to the max message length. + */ + var truncationOK: Boolean = !settings.verbose.value + + def truncate(str: String): String = + if (truncationOK && (maxPrintString != 0 && str.length > maxPrintString)) (str take maxPrintString - 3) + "..." + else str + + def withoutTruncating[T](body: => T): T = { + val saved = truncationOK + truncationOK = false + try body + finally truncationOK = saved + } + + /** String unwrapping can be disabled if it is causing issues. + * Setting this to false means you will see Strings like "\$iw.\$iw.". + */ + var unwrapStrings = true + def withoutUnwrapping(op: => Unit): Unit = { + val saved = unwrapStrings + unwrapStrings = false ; try op finally unwrapStrings = saved + } + def unwrap(str: String): String = + if (unwrapStrings) Naming.unmangle(str) + else str + + def unmangleInterpreterOutput(str: String): String = truncate(unwrap(str)) + + var currentRequest: ReplRequest = _ + + import scala.io.AnsiColor.{BOLD, BLUE, GREEN, RED, RESET, YELLOW} + + def color(c: String, s: String) = if (colorOk) BOLD + c + s + RESET else s + def nameToCode(s: String) = color(BLUE, s) + def typeToCode(s: String) = color(GREEN, s) + + private def label(severity: Severity): String = severity match { + case internal.Reporter.ERROR => "error" + case internal.Reporter.WARNING => "warning" + case internal.Reporter.INFO => "" + } + + protected def clabel(severity: Severity): String = label(severity) match { + case "" => "" + case s => s"$s: " + } + + def severityColor(severity: Severity): String = severity match { + case internal.Reporter.ERROR => RED + case internal.Reporter.WARNING => YELLOW + case internal.Reporter.INFO => RESET + } + + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = withoutTruncating { + val prefix = + if (colorOk) severityColor(severity) + clabel(severity) + RESET + else clabel(severity) + + printMessageAt(pos, prefix + msg) + } + + // indent errors, error message uses the caret to point at the line already on the screen instead of repeating it + // TODO: can we splice the error into the code the user typed when multiple lines were entered? + // (should also comment out the error to keep multi-line copy/pastable) + // TODO: multiple errors are not very intuitive (should the second error for same line repeat the line?) + // TODO: the console could be empty due to external changes (also, :reset? -- see unfortunate example in jvm/interpreter (plusOne)) + def printMessageAt(posIn: Position, msg: String): Unit = { + if ((posIn eq null) || (posIn.source eq NoSourceFile)) printMessage(msg) + else if (posIn.source.file.name == "" && posIn.line == 1) { + // If there's only one line of input, and it's already printed on the console (as indicated by the position's source file name), + // reuse that line in our error output, and suppress the line number (since we know it's `1`) + // NOTE: see e.g. test/files/run/repl-colon-type.scala, where the error refers to a line that's not on the screen + printMessage(indentation + posIn.lineCaret) + printMessage(indented(msg)) + } else { + // note the side-effect -- don't move this around + val locationPrefix = + posIn.source.file.name match { + case "" => s"On line ${posIn.line}: " + case n => + // add newline to get away from prompt when we're reporting on a script/paste + printMessage("") + s"$n:${posIn.line}: " + } + + val isSynthetic = posIn.source.file.name == "" + + // for errors in synthetic code, don't remove wrapping so we can see what's really going on + def printLineContent() = printMessage(indentation + posIn.lineContent) + if (isSynthetic) withoutUnwrapping(printLineContent()) else printLineContent() + + printMessage(indentation + posIn.lineCaret) + + msg.indexOf('\n') match { + case -1 => printMessage(s"$locationPrefix$msg") + case n => + val msgFirstLine = msg.substring(0, n) + val msgRest = msg.substring((n + 1) min msg.length) + printMessage(s"$locationPrefix$msgFirstLine") + printMessage(indented(msgRest)) + } + + if (isSynthetic) printMessage("\n(To diagnose errors in synthetic code, try adding `// show` to the end of your input.)") + } + if (settings.prompt.value) displayPrompt() + } + + def printMessage(msg: String): Unit = + if (!totalSilence) printlnAndFlush(msg) + else if (isTrace) printlnAndFlush("[silent] " + msg) + + def displayPrompt(): Unit = + if (!totalSilence) { + out.println() + out.print("a)bort, s)tack, r)esume: ") + out.flush() + Console.in.read match { // TODO: use repl reader? + case 'a' | 'A' => + new Throwable().printStackTrace(out) + System.exit(1) + case 's' | 'S' => + new Throwable().printStackTrace(out) + out.println() + out.flush() + case _ => + } + } + + override def rerunWithDetails(setting: reflect.internal.settings.MutableSettings#Setting, name: String): String = + s"; for details, enable `:setting $name` or `:replay $name`" + + override def finish() = { + if (hasWarnings) printMessage(s"${StringOps.countElementsAsString(warningCount, label(WARNING))} found") + if (hasErrors) printMessage(s"${StringOps.countElementsAsString(errorCount, label(ERROR))} found") + } + +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Scripted.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Scripted.scala new file mode 100644 index 000000000000..ab7f3f0cd96d --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Scripted.scala @@ -0,0 +1,324 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.{Closeable, OutputStream, PrintWriter, Reader} +import java.util.Arrays.asList +import javax.script._ +import scala.beans.BeanProperty +import scala.jdk.CollectionConverters._ +import scala.reflect.internal.util.{CodeAction, Position} +import scala.tools.nsc.Settings +import scala.tools.nsc.interpreter.Results.Incomplete +import scala.tools.nsc.interpreter.{ImportContextPreamble, ScriptedInterpreter, ScriptedRepl} +import scala.tools.nsc.util.stringFromReader +import scala.util.Properties.versionString + +/* A REPL adaptor for the javax.script API. */ +class Scripted(@BeanProperty val factory: ScriptEngineFactory, settings: Settings, out: PrintWriter) + extends AbstractScriptEngine with Compilable { + + def createBindings: Bindings = new SimpleBindings + + // dynamic context bound under this name + final val ctx = s"$$ctx" + + // the underlying interpreter, tweaked to handle dynamic bindings + val intp: ScriptedRepl = new ScriptedInterpreter(settings, new SaveFirstErrorReporter(settings, out), importContextPreamble) + intp.initializeCompiler() + + var compileContext: ScriptContext = getContext + + + def importContextPreamble(wanted: Set[String]): ImportContextPreamble = { + // cull references that can be satisfied from the current dynamic context + val contextual = wanted & contextNames + + if (contextual.isEmpty) ImportContextPreamble.empty + else { + val adjusted = contextual.map { valname => + s"""def `$valname` = $ctx.`$valname`; """ + + s"""def `${valname}_=`(x: _root_.java.lang.Object) = $ctx.`$valname` = x;""" + }.mkString("", "\n", "\n") + ImportContextPreamble(contextual, Set(ctx), adjusted) + } + } + + // names available in current dynamic context + def contextNames: Set[String] = { + val ctx = compileContext + val terms = for { + scope <- ctx.getScopes.asScala + binding <- Option(ctx.getBindings(scope)) map (_.asScala) getOrElse Nil + key = binding._1 + } yield key + Set.from(terms) + } + + + def dynamicContext_=(ctx: ScriptContext): Unit = intp.call("set", ctx) + + def dynamicContext: ScriptContext = intp.call("value") match { + case Right(scriptctx: ScriptContext) => scriptctx + case Left(e) => throw e + case Right(other) => throw new ScriptException(s"Unexpected value for context: $other") + } + + // TODO: this wrapping probably belongs in ScriptedInterpreter + if (intp.initializeComplete) { + // compile the dynamic ScriptContext object holder + val ctxRes = intp compile s""" + |import _root_.javax.script._ + |object ${intp.evalName} { + | var value: ScriptContext = _ + | def set(x: _root_.scala.Any) = value = x.asInstanceOf[ScriptContext] + |} + """.stripMargin + if (!ctxRes) throw new ScriptException("Failed to compile ctx") + dynamicContext = getContext + + // Bridge dynamic references and script context + val dynRes = intp compileString s""" + |package scala.tools.nsc.interpreter + |import _root_.scala.language.dynamics + |import _root_.javax.script._, ScriptContext.ENGINE_SCOPE + |object dynamicBindings extends _root_.scala.Dynamic { + | def context: ScriptContext = ${ intp.evalPath }.value + | // $ctx.x retrieves the attribute x + | def selectDynamic(field: _root_.java.lang.String): _root_.java.lang.Object = context.getAttribute(field) + | // $ctx.x = v + | def updateDynamic(field: _root_.java.lang.String)(value: _root_.java.lang.Object) = context.setAttribute(field, value, ENGINE_SCOPE) + |} + |""".stripMargin + if (!dynRes) throw new ScriptException("Failed to compile dynamicBindings") + intp.reporter.withoutPrintingResults { + intp interpret s"val $ctx: _root_.scala.tools.nsc.interpreter.dynamicBindings.type = _root_.scala.tools.nsc.interpreter.dynamicBindings" + intp bind ("$engine" -> (this: ScriptEngine with Compilable)) + } + } + + // Set the context for dynamic resolution and run the body. + // Defines attributes available for evaluation. + // Avoid reflective access if using default context. + def withScriptContext[A](context: ScriptContext)(body: => A): A = + if (context eq getContext) body else { + val saved = dynamicContext + dynamicContext = context + try body + finally dynamicContext = saved + } + // Defines attributes available for compilation. + def withCompileContext[A](context: ScriptContext)(body: => A): A = { + val saved = compileContext + compileContext = context + try body + finally compileContext = saved + } + + // not obvious that ScriptEngine should accumulate code text + private var code = "" + + /* All scripts are compiled. The supplied context defines what references + * not in REPL history are allowed, though a different context may be + * supplied for evaluation of a compiled script. + */ + def compile(script: String, context: ScriptContext): CompiledScript = + withCompileContext(context) { + val cat = code + script + intp.compile(cat, synthetic = false) match { + case Right(req) => + code = "" + new WrappedRequest(req) + case Left(Incomplete) => + code = cat + "\n" + new CompiledScript { + def eval(context: ScriptContext): Object = null + def getEngine: ScriptEngine = Scripted.this + } + case Left(_) => + code = "" + throw intp.reporter.asInstanceOf[SaveFirstErrorReporter].firstError map { + case (pos, msg) => new ScriptException(msg, script, pos.line, pos.column) + } getOrElse new ScriptException("compile-time error") + } + } + + // documentation + //protected var context: ScriptContext + //def getContext: ScriptContext = context + + /* Compile with the default context. All references must be resolvable. */ + @throws[ScriptException] + def compile(script: String): CompiledScript = compile(script, context) + + @throws[ScriptException] + def compile(reader: Reader): CompiledScript = compile(stringFromReader(reader), context) + + /* Compile and evaluate with the given context. */ + @throws[ScriptException] + def eval(script: String, context: ScriptContext): Object = compile(script, context).eval(context) + + @throws[ScriptException] + def eval(reader: Reader, context: ScriptContext): Object = compile(stringFromReader(reader), context).eval(context) + + private class WrappedRequest(val req: intp.Request) extends CompiledScript { + var first = true + + private def evalEither(r: intp.Request, ctx: ScriptContext) = { + if (ctx.getWriter == null && ctx.getErrorWriter == null && ctx.getReader == null) r.eval + else { + val closeables = Array.ofDim[Closeable](2) + val w = if (ctx.getWriter == null) Console.out else { + val v = new WriterOutputStream(ctx.getWriter) + closeables(0) = v + v + } + val e = if (ctx.getErrorWriter == null) Console.err else { + val v = new WriterOutputStream(ctx.getErrorWriter) + closeables(1) = v + v + } + val in = if (ctx.getReader == null) Console.in else ctx.getReader + try { + Console.withOut(w) { + Console.withErr(e) { + Console.withIn(in) { + r.eval + } + } + } + } finally { + closeables foreach (c => if (c != null) c.close()) + } + } + } + + /* First time, cause lazy evaluation of a memoized result. + * Subsequently, instantiate a new object for evaluation. + * Per the API: Checked exception types thrown by underlying scripting implementations + * must be wrapped in instances of ScriptException. + */ + @throws[ScriptException] + override def eval(context: ScriptContext) = + withScriptContext(context) { + if (!first) + intp.addBackReferences(req).fold( + { line => Scripted.this.eval(line); null }, // we're evaluating after recording the request instead of other way around, but that should be ok, right? + evalAndRecord(context, _)) + else try evalAndRecord(context, req) finally first = false + } + + private def evalAndRecord(context: ScriptContext, req: intp.Request) = + evalEither(req, context) match { + case Left(e: RuntimeException) => throw e + case Left(e: Exception) => throw new ScriptException(e) + case Left(e) => throw e + case Right(result) => intp recordRequest req; result.asInstanceOf[Object] + } + + + def getEngine: ScriptEngine = Scripted.this + } +} + +object Scripted { + + class Factory extends ScriptEngineFactory { + @BeanProperty val engineName = "Scala REPL" + + @BeanProperty val engineVersion = "2.0" + + @BeanProperty val extensions = asList("scala") + + @BeanProperty val languageName = "Scala" + + @BeanProperty val languageVersion = versionString + + @BeanProperty val mimeTypes = asList("application/x-scala") + + @BeanProperty val names = asList("scala") + + def getMethodCallSyntax(obj: String, m: String, args: String*): String = args.mkString(s"$obj.$m(", ", ", ")") + + def getOutputStatement(toDisplay: String): String = s"Console.println($toDisplay)" + + def getParameter(key: String): Object = key match { + case ScriptEngine.ENGINE => engineName + case ScriptEngine.ENGINE_VERSION => engineVersion + case ScriptEngine.LANGUAGE => languageName + case ScriptEngine.LANGUAGE_VERSION => languageVersion + case ScriptEngine.NAME => names.get(0) + case _ => null + } + + def getProgram(statements: String*): String = statements.mkString("object Main extends _root_.scala.App {\n\t", "\n\t", "\n}") + + def getScriptEngine: ScriptEngine = { + val settings = new Settings() + settings.usemanifestcp.value = true + Scripted(this, settings) + } + } + + def apply(factory: ScriptEngineFactory = new Factory, settings: Settings = new Settings, out: PrintWriter = ReplReporterImpl.defaultOut) = { + settings.Yreplclassbased.value = true + settings.usejavacp.value = true + val s = new Scripted(factory, settings, out) + s.setBindings(s.createBindings, ScriptContext.ENGINE_SCOPE) + s + } +} + +import java.io.Writer +import java.nio.charset.Charset +import java.nio.charset.CodingErrorAction.{REPLACE => Replace} +import java.nio.{ByteBuffer, CharBuffer} + +/* An OutputStream that decodes bytes and flushes to the writer. */ +class WriterOutputStream(writer: Writer) extends OutputStream { + val decoder = Charset.defaultCharset.newDecoder + decoder onMalformedInput Replace + decoder onUnmappableCharacter Replace + + val byteBuffer = ByteBuffer.allocate(64) + val charBuffer = CharBuffer.allocate(64) + + override def write(b: Int): Unit = { + byteBuffer.put(b.toByte) + byteBuffer.flip() + decoder.decode(byteBuffer, charBuffer, /*eoi=*/ false) + if (byteBuffer.remaining == 0) byteBuffer.clear() + if (charBuffer.position() > 0) { + charBuffer.flip() + writer write charBuffer.toString + charBuffer.clear() + } + } + override def close(): Unit = { + decoder.decode(byteBuffer, charBuffer, /*eoi=*/ true) + decoder.flush(charBuffer) + } + override def toString = charBuffer.toString +} + +private class SaveFirstErrorReporter(settings: Settings, out: PrintWriter) extends ReplReporterImpl(settings, out) { + private var _firstError: Option[(Position, String)] = None + def firstError = _firstError + + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = + if (severity == ERROR && _firstError.isEmpty) _firstError = Some((pos, msg)) + + override def reset() = { super.reset(); _firstError = None } + + override def printResult(result: Either[String, String]): Unit = () +} diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ShellConfig.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ShellConfig.scala new file mode 100644 index 000000000000..5d0a4ef4180e --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ShellConfig.scala @@ -0,0 +1,154 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.File +import java.util.{Formattable, FormattableFlags, Formatter} + +import scala.annotation.nowarn +import scala.sys.{BooleanProp, Prop} +import scala.sys.Prop._ + +import scala.tools.nsc.{GenericRunnerSettings, Settings} +import scala.tools.nsc.Properties.{ + coloredOutputEnabled, envOrNone, javaVersion, javaVmName, + shellBannerString, shellInterruptedString, shellPromptString, shellWelcomeString, + userHome, versionString, versionNumberString, +} + +object ShellConfig { + val EDITOR = envOrNone("EDITOR") + // how to say we :quit + val InterruptedString = shellInterruptedString + + def apply(settings: Settings) = settings match { + case settings: GenericRunnerSettings => new ShellConfig { + val filesToPaste: List[String] = settings.pastefiles.value + val filesToLoad: List[String] = settings.loadfiles.value + val batchText: String = if (settings.execute.isSetByUser) settings.execute.value else "" + val batchMode: Boolean = batchText.nonEmpty + val doCompletion: Boolean = !(settings.noCompletion.value || batchMode) + val haveInteractiveConsole: Boolean = !settings.Xnojline.value + def xsource: String = if (settings.isScala3: @nowarn) settings.source.value.versionString else "" + } + case _ => new ShellConfig { + val filesToPaste: List[String] = Nil + val filesToLoad: List[String] = Nil + val batchText: String = "" + val batchMode: Boolean = false + val doCompletion: Boolean = !settings.noCompletion.value + val haveInteractiveConsole: Boolean = !settings.Xnojline.value + def xsource: String = if (settings.isScala3: @nowarn) settings.source.value.versionString else "" + } + } +} + +trait ShellConfig { + def filesToPaste: List[String] + def filesToLoad: List[String] + def batchText: String + def batchMode: Boolean + def doCompletion: Boolean + def haveInteractiveConsole: Boolean + + // source compatibility, i.e., -Xsource + def xsource: String + + private def bool(name: String) = BooleanProp.keyExists(name) + private def int(name: String) = Prop[Int](name) + + // This property is used in TypeDebugging. Let's recycle it. + val colorOk = coloredOutputEnabled + + val historyFile = s"$userHome/.scala_history_jline3" + + private val info = bool("scala.repl.info") + private val debug = bool("scala.repl.debug") + private val trace = bool("scala.repl.trace") + val power = bool("scala.repl.power") + + def enversion(s: String) = { + import FormattableFlags._ + val v = new Formattable { + override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) = { + val version = if ((flags & ALTERNATE) != 0) versionNumberString else versionString + val left = if ((flags & LEFT_JUSTIFY) != 0) "-" else "" + val w = if (width >= 0) s"$width" else "" + val p = if (precision >= 0) s".$precision" else "" + val fmt = s"%${left}${w}${p}s" + + val xversion = if (xsource.isEmpty) version else s"$version -Xsource:$xsource" + formatter.format(fmt, xversion) + } + } + s.format(v, javaVersion, javaVmName) + } + def encolor(s: String) = { + import scala.io.AnsiColor.{MAGENTA, RESET} + if (colorOk) s"$MAGENTA$s$RESET" else s + } + + // Handy system prop for shell prompt, or else pick it up from compiler.properties + val promptString = Prop[String]("scala.repl.prompt").option getOrElse (if (info) "%nscala %#s> " else shellPromptString) + val promptText = enversion(promptString) + + + // Prompt for continued input, will be right-adjusted to width of the primary prompt + val continueString = Prop[String]("scala.repl.continue").option getOrElse "| " + val continueText = { + val text = enversion(continueString) + val margin = promptText.linesIterator.toList.last.length - text.length + if (margin > 0) " " * margin + text else text + } + + // What to display at REPL startup. + val welcomeString = Prop[String]("scala.repl.welcome").option match { + case Some("banner") => shellBannerString + case Some(text) => text + case _ => shellWelcomeString + } + + val pasteDelimiter = Prop[String]("scala.repl.here") + + /** CSV of paged,across to enable pagination or `-x` style + * columns, "across" instead of down the column. Since + * pagination turns off columnar output, these flags are + * currently mutually exclusive. + */ + val format = Prop[String]("scala.repl.format") + val isPaged: Boolean = format.isSet && csv(format.get, "paged") + val isAcross: Boolean = format.isSet && csv(format.get, "across") + private def csv(p: String, v: String) = p.split(",").contains(v) + + val replAutorunCode = Prop[File]("scala.repl.autoruncode") + val powerInitCode = Prop[File]("scala.repl.power.initcode") + val powerBanner = Prop[File]("scala.repl.power.banner") + + val maxPrintString = int("scala.repl.maxprintstring") + + def isReplInfo: Boolean = info || isReplDebug + def replinfo(msg: => String) = if (isReplInfo) echo(msg) + def isReplDebug: Boolean = debug || isReplTrace + def repldbg(msg: => String) = if (isReplDebug) echo(msg) + def isReplTrace: Boolean = trace + def repltrace(msg: => String) = if (isReplTrace) echo(msg) + + def isReplPower: Boolean = power + + private def echo(msg: => String) = + try Console.println(msg) + catch { + case e: AssertionError => + Console.println(s"Assertion error printing debugging output: $e") + } +} diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleHistory.scala similarity index 81% rename from src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleHistory.scala index 3fa1d88251ec..1f8810a5baa1 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleHistory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,9 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter -package session +package scala.tools.nsc.interpreter.shell -import scala.collection.mutable.{ Buffer, ListBuffer } +import scala.collection.mutable.{Buffer, ListBuffer} class SimpleHistory extends History { private var _index: Int = 0 @@ -24,9 +22,7 @@ class SimpleHistory extends History { private def plusOne = { _index += 1 ; true } private def lastIndex = size - 1 private def fail(msg: String): String = { - repldbg("Internal error in history(size %d, index %d): %s".format( - size, index, msg) - ) +// repldbg(s"Internal error in history(size $size, index $index): $msg") "" } @@ -38,16 +34,16 @@ class SimpleHistory extends History { def isEmpty = buf.isEmpty def clear() = buf.clear() def get(idx: Int): CharSequence = buf(idx) - def add(item: CharSequence): Unit = buf += item + def add(item: CharSequence): Unit = buf += item.toString def replace(item: CharSequence): Unit = { - buf trimEnd 1 + buf dropRightInPlace 1 add(item) } def remove(idx: Int): CharSequence = buf remove idx def removeFirst(): CharSequence = buf remove 0 def removeLast(): CharSequence = buf remove lastIndex - def set(idx: Int, to: CharSequence): Unit = buf(idx) = to + def set(idx: Int, to: CharSequence): Unit = buf(idx) = to.toString def current() = if (index >= 0 && index < buf.size) buf(index) else fail("current()") def previous() = (index > 0) && minusOne diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleReader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleReader.scala new file mode 100644 index 000000000000..50bb4d65ff01 --- /dev/null +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/SimpleReader.scala @@ -0,0 +1,60 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.interpreter.shell + +import java.io.{BufferedReader, StringReader, PrintWriter => JPrintWriter} + +/** Reads using standard JDK API. */ +class SimpleReader(in: BufferedReader, out: JPrintWriter, val completion: Completion, val interactive: Boolean, val verbose: Boolean) extends InteractiveReader { + val history = NoHistory + val accumulator = new Accumulator + + override def reset() = accumulator.reset() + def redrawLine() = () + + // InteractiveReader internals + protected def readOneLine(prompt: String): String = { + echo(prompt) + + val input = readOneLine() + + // pretend we are a console for verbose purposes + // if there is more input, then echo the prompt and the input + if (input != null && verbose) echo(f"$prompt$input%n") + + input + } + + protected def readOneLine(): String = in.readLine() + protected def echo(s: String): Unit = if (interactive) { + out.print(s) + out.flush() + } + + override def close(): Unit = () +} + +object SimpleReader { + def defaultIn = Console.in + def defaultOut = new JPrintWriter(Console.out) + + def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, completion: Completion = NoCompletion, interactive: Boolean = true, verbose: Boolean = false): SimpleReader = + new SimpleReader(in, out, completion, interactive, verbose) + + // a non-interactive SimpleReader that returns the given text + def apply(text: String): SimpleReader = apply( + in = new BufferedReader(new StringReader(text)), + out = defaultOut, + interactive = false + ) +} diff --git a/src/repl/scala/tools/nsc/interpreter/Tabulators.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Tabulators.scala similarity index 80% rename from src/repl/scala/tools/nsc/interpreter/Tabulators.scala rename to src/repl-frontend/scala/tools/nsc/interpreter/shell/Tabulators.scala index f9694f5af262..3b864eb10d75 100644 --- a/src/repl/scala/tools/nsc/interpreter/Tabulators.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Tabulators.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,7 +10,8 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc.interpreter +package scala.tools.nsc.interpreter.shell +import scala.util.chaining._ trait Tabulator { def isAcross: Boolean @@ -18,7 +19,7 @@ trait Tabulator { def marginSize: Int protected def fits(items: Seq[String], width: Int): Boolean = ( - (items map (_.length)).sum + (items.length - 1) * marginSize < width + (items map (graphemeCount)).sum + (items.length - 1) * marginSize < width ) def tabulate(items: Seq[String]): Seq[Seq[String]] = ( if (fits(items, width)) Seq(Seq(items mkString " " * marginSize)) @@ -26,8 +27,8 @@ trait Tabulator { ) protected def columnize(ss: Seq[String]): Seq[Seq[String]] = ss map (s => Seq(s)) protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { - import scala.tools.nsc.interpreter.SimpleMath._ - val longest = (items map (_.length)).max + import SimpleMath._ + val longest = (items map (graphemeCount)).max val columnWidth = longest + marginSize val maxcols = ( if (columnWidth >= width) 1 @@ -36,7 +37,7 @@ trait Tabulator { val nrows = items.size /% maxcols val ncols = items.size /% nrows val groupSize = ncols - val padded = items map (s"%-${columnWidth}s" format _) + val padded = items map (pad(columnWidth, _)) val xwise = isAcross || ncols >= items.length val grouped: Seq[Seq[String]] = if (groupSize == 1) columnize(items) @@ -50,14 +51,25 @@ trait Tabulator { } grouped } + + protected def graphemeCount(s: String): Int = { + import java.text.BreakIterator + val it = BreakIterator.getCharacterInstance + it.setText(s) + Iterator.continually(it.next()).takeWhile(_ != BreakIterator.DONE).size + } + + protected def pad(width: Int, s: String): String = { + val count = 0 max (width - graphemeCount(s)) + s + (" " * count) + } } /** Adjust the column width and number of columns to minimize the row count. */ trait VariColumnTabulator extends Tabulator { override protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { - import scala.tools.nsc.interpreter.SimpleMath._ - val longest = (items map (_.length)).max - val shortest = (items map (_.length)).min + import SimpleMath._ + val (longest, shortest) = items.map(graphemeCount).pipe(vs => (vs.max, vs.min)) val fattest = longest + marginSize val skinny = shortest + marginSize @@ -69,7 +81,7 @@ trait VariColumnTabulator extends Tabulator { // max width item in each column def maxima(rows: Seq[Seq[String]]) = (0 until (ncols min items.size)) map { col => - val widths = for (r <- rows if r.size > col) yield r(col).length + val widths = for (r <- rows if r.size > col) yield graphemeCount(r(col)) widths.max } def resulting(rows: Seq[Seq[String]]) = { @@ -105,7 +117,7 @@ trait VariColumnTabulator extends Tabulator { // format to column width sss map (ss => ss.zipWithIndex map { - case (s, i) => s"%-${columnWidths(i)}s" format s + case (s, i) => pad(columnWidths(i), s) }) } } diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala deleted file mode 100644 index e7ad1bf9693d..000000000000 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc.interpreter.jline - -import _root_.jline.console.history.PersistentHistory - -import scala.tools.nsc.interpreter -import scala.reflect.io.{File, Path} -import scala.tools.nsc.Properties.{propOrNone, userHome} -import scala.reflect.internal.util.OwnerOnlyChmod -import scala.util.control.NonFatal - -/** TODO: file locking. - */ -trait FileBackedHistory extends JLineHistory with PersistentHistory { - def maxSize: Int - - // For a history file in the standard location, always try to restrict permission, - // creating an empty file if none exists. - // For a user-specified location, only lock down permissions if we're the ones - // creating it, otherwise responsibility for permissions is up to the caller. - protected lazy val historyFile: File = File { - propOrNone("scala.shell.histfile").map(Path.apply) match { - case Some(p) => if (!p.exists) secure(p) else p - case None => secure(Path(userHome) / FileBackedHistory.defaultFileName) - } - } - - private def secure(p: Path): Path = { - try OwnerOnlyChmod.chmodFileOrCreateEmpty(p.jfile.toPath) - catch { case NonFatal(e) => - if (interpreter.isReplDebug) e.printStackTrace() - interpreter.replinfo(s"Warning: history file ${p}'s permissions could not be restricted to owner-only.") - } - - p - } - - private var isPersistent = true - - locally { - load() - } - - def withoutSaving[T](op: => T): T = { - val saved = isPersistent - isPersistent = false - try op - finally isPersistent = saved - } - - def addLineToFile(item: CharSequence): Unit = { - if (isPersistent) - append(item + "\n") - } - - /** Overwrites the history file with the current memory. */ - protected def sync(): Unit = { - val lines = asStrings map (_ + "\n") - historyFile.writeAll(lines: _*) - } - - /** Append one or more lines to the history file. */ - protected def append(lines: String*): Unit = { - historyFile.appendAll(lines: _*) - } - - def load(): Unit = { - if (!historyFile.canRead) - historyFile.createFile() - - val lines: IndexedSeq[String] = { - try historyFile.lines().toIndexedSeq - catch { - // It seems that control characters in the history file combined - // with the default codec can lead to nio spewing exceptions. Rather - // than abandon hope we'll try to read it as ISO-8859-1 - case _: Exception => - try historyFile.lines("ISO-8859-1").toIndexedSeq - catch { - case _: Exception => Vector() - } - } - } - - interpreter.repldbg("Loading " + lines.size + " into history.") - - // avoid writing to the history file - withoutSaving(lines takeRight maxSize foreach add) - // truncate the history file if it's too big. - if (lines.size > maxSize) { - interpreter.repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.") - sync() - } - moveToEnd() - } - - def flush(): Unit = () - - def purge(): Unit = historyFile.truncate() -} - -object FileBackedHistory { - // val ContinuationChar = '\003' - // val ContinuationNL: String = Array('\003', '\n').mkString - - final val defaultFileName = ".scala_history" -} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala deleted file mode 100644 index 46aab3bfba80..000000000000 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc.interpreter.jline - -import scala.tools.nsc.interpreter - -import _root_.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } - -// implements a jline interface -class JLineDelimiter extends ArgumentDelimiter { - def toJLine(args: List[String], cursor: Int): ArgumentList = args match { - case Nil => new ArgumentList(Array.empty[String], 0, 0, cursor) - case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) - } - - def delimit(buffer: CharSequence, cursor: Int) = { - val p = interpreter.Parsed(buffer.toString, cursor) - toJLine(p.args, cursor) - } - - def isDelimiter(buffer: CharSequence, cursor: Int) = interpreter.Parsed(buffer.toString, cursor).isDelimiter -} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala deleted file mode 100644 index fd09f0590b24..000000000000 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc.interpreter.jline - -import java.util.{Iterator => JIterator, ListIterator => JListIterator} - -import _root_.jline.{console => jconsole} -import jconsole.history.History.{Entry => JEntry} -import jconsole.history.{History => JHistory} - -import scala.tools.nsc.interpreter -import scala.tools.nsc.interpreter.session.{History, SimpleHistory} - - -/** A straight scalafication of the jline interface which mixes - * in the sparse jline-independent one too. - */ -trait JLineHistory extends JHistory with History { - def size: Int - def isEmpty: Boolean - def index: Int - def clear(): Unit - def get(index: Int): CharSequence - def add(line: CharSequence): Unit - def replace(item: CharSequence): Unit - - def entries(index: Int): JListIterator[JEntry] - def entries(): JListIterator[JEntry] - def iterator: JIterator[JEntry] - - def current(): CharSequence - def previous(): Boolean - def next(): Boolean - def moveToFirst(): Boolean - def moveToLast(): Boolean - def moveTo(index: Int): Boolean - def moveToEnd(): Unit - - override def historicize(text: String): Boolean = { - text.linesIterator foreach add - moveToEnd() - true - } -} - -object JLineHistory { - class JLineFileHistory extends SimpleHistory with FileBackedHistory { - override def add(item: CharSequence): Unit = { - if (!isEmpty && last == item) - interpreter.repldbg("Ignoring duplicate entry '" + item + "'") - else { - super.add(item) - addLineToFile(item) - } - } - override def toString = "History(size = " + size + ", index = " + index + ")" - - import scala.collection.JavaConverters._ - - override def asStrings(from: Int, to: Int): List[String] = - entries(from).asScala.take(to - from).map(_.value.toString).toList - - case class Entry(index: Int, value: CharSequence) extends JEntry { - override def toString = value.toString - } - - private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x)} - def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx) - def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator() - def iterator: JIterator[JEntry] = toEntries().iterator.asJava - } - - def apply(): History = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() } -} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala deleted file mode 100644 index b8a9f2e5bf51..000000000000 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc.interpreter.jline - -import java.util.{Collection => JCollection, List => JList} - -import _root_.jline.{console => jconsole} -import jline.console.completer.{CandidateListCompletionHandler, Completer} -import jconsole.history.{History => JHistory} - -import scala.tools.nsc.interpreter -import scala.tools.nsc.interpreter.{Completion, NoCompletion} -import scala.tools.nsc.interpreter.Completion.Candidates -import scala.tools.nsc.interpreter.session.History - -/** - * Reads from the console using JLine. - * - * Eagerly instantiates all relevant JLine classes, so that we can detect linkage errors on `new JLineReader` and retry. - */ -class InteractiveReader(completer: () => Completion) extends interpreter.InteractiveReader { - val interactive = true - - val history: History = new JLineHistory.JLineFileHistory() - - private val consoleReader = { - val reader = new JLineConsoleReader() - - reader setPaginationEnabled interpreter.isPaged - - // turn off magic ! - reader setExpandEvents false - - // enable detecting pasted tab char (when next char is immediately available) which is taken raw, not completion - reader setCopyPasteDetection true - - reader setHistory history.asInstanceOf[JHistory] - - reader - } - - private[this] var _completion: Completion = interpreter.NoCompletion - def completion: Completion = _completion - - override def postInit() = { - _completion = completer() - - consoleReader.initCompletion(completion) - } - - def reset() = consoleReader.getTerminal().reset() - def redrawLine() = consoleReader.redrawLineAndFlush() - def readOneLine(prompt: String) = consoleReader.readLine(prompt) - def readOneKey(prompt: String) = consoleReader.readOneKey(prompt) -} - -// implements a jline interface -private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter.VariColumnTabulator { - val isAcross = interpreter.`package`.isAcross - val marginSize = 3 - - def width = getTerminal.getWidth() - def height = getTerminal.getHeight() - - private def morePrompt = "--More--" - - private def emulateMore(): Int = { - val key = readOneKey(morePrompt) - try key match { - case '\r' | '\n' => 1 - case 'q' => -1 - case _ => height - 1 - } - finally { - eraseLine() - // TODO: still not quite managing to erase --More-- and get - // back to a scala prompt without another keypress. - if (key == 'q') { - putString(getPrompt()) - redrawLine() - flush() - } - } - } - - override def printColumns(items: JCollection[_ <: CharSequence]): Unit = { - import scala.tools.nsc.interpreter.javaCharSeqCollectionToScala - printColumns_(items: List[String]) - } - - private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) { - val grouped = tabulate(items) - var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue - grouped foreach { xs => - println(xs.mkString) - linesLeft -= 1 - if (linesLeft <= 0) { - linesLeft = emulateMore() - if (linesLeft < 0) - return - } - } - } - - def readOneKey(prompt: String) = { - this.print(prompt) - this.flush() - this.readCharacter() - } - - def eraseLine() = resetPromptLine("", "", 0) - - def redrawLineAndFlush(): Unit = { - flush(); drawLine(); flush() - } - - // A hook for running code after the repl is done initializing. - def initCompletion(completion: Completion): Unit = { - this setBellEnabled false - - // adapt the JLine completion interface - def completer = - new Completer { - val tc = completion - def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { - val buf = if (_buf == null) "" else _buf - val Candidates(newCursor, newCandidates) = completion.complete(buf, cursor) - newCandidates foreach (candidates add _) - newCursor - } - } - getCompletionHandler match { - case clch: CandidateListCompletionHandler => clch.setPrintSpaceAfterFullCompletion(false) - } - - completion match { - case NoCompletion => () - case _ => this addCompleter completer - } - - setAutoprintThreshold(400) // max completion candidates without warning - } -} diff --git a/src/repl/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala deleted file mode 100644 index 2f7e724eb31f..000000000000 --- a/src/repl/scala/tools/nsc/Interpreter.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc - -import interpreter._ -import java.io._ - -/** A compatibility stub. - */ -@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") -class Interpreter(settings: Settings, out: PrintWriter) extends IMain(settings, out) { - def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) - def this() = this(new Settings()) -} \ No newline at end of file diff --git a/src/repl/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala deleted file mode 100644 index 80a777339443..000000000000 --- a/src/repl/scala/tools/nsc/InterpreterLoop.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc - -import interpreter._ -import java.io._ - -/** A compatibility stub for sbt. - */ -@deprecated("Use scala.tools.nsc.interpreter.ILoop.", "2.9.0") -class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) extends ILoop(in0, out) { - def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out) - def this() = this(None, new PrintWriter(scala.Console.out)) - - override protected final val isSbt = true - - @deprecated("use `process` instead", "2.9.0") - def main(settings: Settings): Unit = process(settings) //used by sbt -} diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala deleted file mode 100644 index 6013d41f194f..000000000000 --- a/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.nsc - -import io.File -import util.ClassPath -import interpreter.ILoop -import GenericRunnerCommand._ - -object JarRunner extends CommonRunner { - def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = { - val jar = new io.Jar(jarPath) - val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath) - val jarURLs = ClassPath expandManifestPath jarPath - val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs - - if (settings.Ylogcp) { - Console.err.println("Running jar with these URLs as the classpath:") - urls foreach println - } - - runAndCatch(urls, mainClass, arguments) - } -} - -/** An object that runs Scala code. It has three possible - * sources for the code to run: pre-compiled code, a script file, - * or interactive entry. - */ -class MainGenericRunner { - def errorFn(str: String, e: Option[Throwable] = None, isFailure: Boolean = true): Boolean = { - if (str.nonEmpty) Console.err println str - e foreach (_.printStackTrace()) - !isFailure - } - - def process(args: Array[String]): Boolean = { - val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x)) - import command.{settings, howToRun, thingToRun, shortUsageMsg} - - // only created for info message - def sampleCompiler = new Global(settings) - - def run(): Boolean = { - def isE = settings.execute.isSetByUser - def dashe = settings.execute.value - - // when -e expr -howtorun script, read any -i or -I files and append expr - // the result is saved to a tmp script file and run - def combinedCode = { - val files = - for { - dashi <- List(settings.loadfiles, settings.pastefiles) if dashi.isSetByUser - path <- dashi.value - } yield File(path).slurp() - - (files :+ dashe).mkString("\n\n") - } - - def runTarget(): Either[Throwable, Boolean] = howToRun match { - case AsObject => - ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments) - case AsScript if isE => - Right(ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)) - case AsScript => - ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments) - case AsJar => - JarRunner.runJar(settings, thingToRun, command.arguments) - case Error => - Right(false) - case _ => - // We start the repl when no arguments are given. - // If user is agnostic about both -feature and -deprecation, turn them on. - if (settings.deprecation.isDefault && settings.feature.isDefault) { - settings.deprecation.value = true - settings.feature.value = true - } - Right(new ILoop().process(settings)) - } - - runTarget() match { - case Left(ex) => errorFn("", Some(ex)) // there must be a useful message of hope to offer here - case Right(b) => b - } - } - - if (!command.ok) - errorFn(f"%n$shortUsageMsg") - else if (command.shouldStopWithInfo) - errorFn(command.getInfoMessage(sampleCompiler), isFailure = false) - else - run() - } -} - -object MainGenericRunner extends MainGenericRunner { - def main(args: Array[String]): Unit = if (!process(args)) sys.exit(1) -} diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala deleted file mode 100644 index e154335e7ff9..000000000000 --- a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package interpreter - -import scala.reflect.io.AbstractFile - -@deprecated("Use `scala.tools.nsc.util.AbstractFileClassLoader`", "2.11.0") -class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends util.AbstractFileClassLoader(root, parent) diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala index 3f4b51d7d19b..0be1ce5ce6d3 100644 --- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,39 +10,31 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter +package scala.tools.nsc.interpreter -class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends PartialFunction[Throwable, T] { - def isDefinedAt(t: Throwable) = t match { - case _: AbstractMethodError => true - case _: NoSuchMethodError => true - case _: MissingRequirementError => true - case _: NoClassDefFoundError => true - case _ => false - } - def apply(t: Throwable) = t match { - case x @ (_: AbstractMethodError | _: NoSuchMethodError | _: NoClassDefFoundError) => - onError(""" - |Failed to initialize compiler: %s. +import scala.reflect.internal.MissingRequirementError + +object AbstractOrMissingHandler { + def apply[T]() = create[T](Console println _, null.asInstanceOf[T]) + + private def create[T](onError: String => Unit, value: T): PartialFunction[Throwable, T] = { + case e @ (_: AbstractMethodError | _: NoSuchMethodError | _: NoClassDefFoundError) => + onError(s""" + |Failed to initialize compiler: ${e.getClass.getName.split('.').last}. |This is most often remedied by a full clean and recompile. |Otherwise, your classpath may continue bytecode compiled by |different and incompatible versions of scala. - |""".stripMargin.format(x.getClass.getName split '.' last) + |""".stripMargin ) - x.printStackTrace() + e.printStackTrace() value - case x: MissingRequirementError => - onError(""" - |Failed to initialize compiler: %s not found. + case e: MissingRequirementError => + onError(s""" + |Failed to initialize compiler: ${e.req} not found. |** Note that as of 2.8 scala does not assume use of the java classpath. |** For the old behavior pass -usejavacp to scala, or if using a Settings - |** object programmatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) + |** object programmatically, settings.usejavacp.value = true.""".stripMargin ) value } } - -object AbstractOrMissingHandler { - def apply[T]() = new AbstractOrMissingHandler[T](Console println _, null.asInstanceOf[T]) -} diff --git a/src/repl/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala deleted file mode 100644 index 32aa8ae9275e..000000000000 --- a/src/repl/scala/tools/nsc/interpreter/CommandLine.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package interpreter - -/** A command line for the interpreter. - */ -class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) { - override def cmdName = "scala" -} diff --git a/src/repl/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala deleted file mode 100644 index aef8079b7ace..000000000000 --- a/src/repl/scala/tools/nsc/interpreter/Completion.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package interpreter - -import Completion._ - -/** An implementation-agnostic completion interface which makes no - * reference to the jline classes. - */ -trait Completion { - def resetVerbosity(): Unit - def complete(buffer: String, cursor: Int): Candidates -} -object NoCompletion extends Completion { - def resetVerbosity() = () - def complete(buffer: String, cursor: Int) = NoCandidates -} - -object Completion { - case class Candidates(cursor: Int, candidates: List[String]) - val NoCandidates = Candidates(-1, Nil) - - // a leading dot plus something, but not ".." or "./", ignoring leading whitespace - private val dotlike = """\s*\.[^./].*""".r - def looksLikeInvocation(code: String) = code match { - case null => false // insurance - case dotlike() => true - case _ => false - } -} diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala index ad47c7c2a7ed..71b3927477f8 100644 --- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,18 +10,19 @@ * additional information regarding copyright ownership. */ -package scala.tools.nsc -package interpreter +package scala.tools.nsc.interpreter + +import Results.{Result, Success} trait ExprTyper { val repl: IMain import repl._ - import global.{ reporter => _, Import => _, _ } + import global.{ phase, Symbol, Type, exitingTyper, NoSymbol, NoType, NoPrefix, TypeRef, WildcardType } import naming.freshInternalVarName import global.definitions.{ MaxFunctionArity, NothingTpe } - private def doInterpret(code: String): IR.Result = { + private def doInterpret(code: String): Result = { // interpret/interpretSynthetic may change the phase, which would have unintended effects on types. val savedPhase = phase try interpretSynthetic(code) finally phase = savedPhase @@ -36,7 +37,7 @@ trait ExprTyper { val line = "def " + name + " = " + code doInterpret(line) match { - case IR.Success => + case Success => val sym0 = symbolOfTerm(name) // drop NullaryMethodType sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType) @@ -47,7 +48,7 @@ trait ExprTyper { val old = repl.definedSymbolList.toSet doInterpret(code) match { - case IR.Success => + case Success => repl.definedSymbolList filterNot old match { case Nil => NoSymbol case sym :: Nil => sym @@ -60,13 +61,13 @@ trait ExprTyper { doInterpret(code) NoSymbol } - beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError() + reporter.suppressOutput { asExpr() orElse asDefn() } orElse asError() } private var typeOfExpressionDepth = 0 def typeOfExpression(expr: String, silent: Boolean = true): Type = { if (typeOfExpressionDepth > 2) { - repldbg("Terminating typeOfExpression recursion for expression: " + expr) +// repldbg("Terminating typeOfExpression recursion for expression: " + expr) return NoType } typeOfExpressionDepth += 1 @@ -74,7 +75,7 @@ trait ExprTyper { // while letting errors through, so it is first trying it silently: if there // is an error, and errors are desired, then it re-evaluates non-silently // to induce the error message. - try beSilentDuring(symbolOfLine(expr).tpe) match { + try reporter.suppressOutput(symbolOfLine(expr).tpe) match { case NoType if !silent => symbolOfLine(expr).tpe // generate error case tpe => tpe } @@ -98,7 +99,7 @@ trait ExprTyper { case tpe => tpe } } - val typeOpt = (properTypeOpt /: (1 to MaxFunctionArity)) { + val typeOpt = (1 to MaxFunctionArity).foldLeft(properTypeOpt){ (acc, n: Int) => acc orElse typeFromTypeString(n) } typeOpt getOrElse NoType } @@ -109,7 +110,7 @@ trait ExprTyper { val name = freshInternalVarName() val line = s"def $name: $typeString = ???" doInterpret(line) match { - case IR.Success => + case Success => val tpe0 = exitingTyper { symbolOfTerm(name).asMethod.returnType } @@ -117,6 +118,6 @@ trait ExprTyper { case _ => None } } - beSilentDuring(asProperType()) + reporter.suppressOutput(asProperType()) } } diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala deleted file mode 100644 index b611f58d89a6..000000000000 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ /dev/null @@ -1,1121 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package tools.nsc -package interpreter - -import scala.language.{implicitConversions, existentials} -import scala.annotation.tailrec -import Predef.{println => _, _} -import PartialFunction.{cond => when} -import interpreter.session._ -import StdReplTags._ -import scala.tools.asm.ClassReader -import scala.tools.nsc.util.{ClassPath, stringFromStream} -import scala.reflect.classTag -import scala.reflect.internal.util.{BatchSourceFile, NoPosition} -import scala.reflect.io.{Directory, File, Path} -import io.AbstractFile -import scala.concurrent.{Await, Future} -import java.io.BufferedReader - -import scala.util.{Try, Success, Failure} - -import Completion._ - -/** The Scala interactive shell. It provides a read-eval-print loop - * around the Interpreter class. - * After instantiation, clients should call the main() method. - * - * If no in0 is specified, then input will come from the console, and - * the class will attempt to provide input editing feature such as - * input history. - * - * @author Moez A. Abdel-Gawad - * @author Lex Spoon - * @version 1.2 - */ -class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extends LoopCommands { - def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out) - def this() = this(None, new JPrintWriter(Console.out, true)) - - @deprecated("use `intp` instead.", "2.9.0") def interpreter = intp - @deprecated("use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i - - var in: InteractiveReader = _ // the input stream from which commands come - var settings: Settings = _ - var intp: IMain = _ - - private var globalFuture: Future[Boolean] = _ - - // ignore silent sbt errors on init - protected def isSbt: Boolean = false - - /** Print a welcome message! */ - def printWelcome(): Unit = { - Option(replProps.welcome) filter (!_.isEmpty) foreach echo - replinfo("[info] started at " + new java.util.Date) - } - - protected def asyncMessage(msg: String) { - if (isReplInfo || isReplPower) - echoAndRefresh(msg) - } - - override def echoCommandMessage(msg: String) { - intp.reporter printUntruncatedMessage msg - } - - lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) - def history = in.history - - // classpath entries added via :cp - @deprecated("use reset, replay or require to update class path", since = "2.11.0") - var addedClasspath: String = "" - - /** A reverse list of commands to replay if the user requests a :replay */ - var replayCommandStack: List[String] = Nil - - /** A list of commands to replay if the user requests a :replay */ - def replayCommands = replayCommandStack.reverse - - /** Record a command for replay should the user request a :replay */ - def addReplay(cmd: String) = replayCommandStack ::= cmd - - def savingReplayStack[T](body: => T): T = { - val saved = replayCommandStack - try body - finally replayCommandStack = saved - } - def savingReader[T](body: => T): T = { - val saved = in - try body - finally in = saved - } - - /** Close the interpreter and set the var to null. */ - def closeInterpreter() { - if (intp ne null) { - intp.close() - intp = null - } - } - - class ILoopInterpreter extends IMain(settings, out) { - override protected def parentClassLoader = { - val replClassLoader = classOf[ILoop].getClassLoader // might be null if we're on the boot classpath - settings.explicitParentLoader.orElse(Option(replClassLoader)).getOrElse(ClassLoader.getSystemClassLoader) - } - } - - /** Create a new interpreter. */ - def createInterpreter() { - if (addedClasspath != "") - settings.classpath append addedClasspath - - intp = new ILoopInterpreter - } - - /** Show the history */ - lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)", None) { - override def usage = "[num]" - def defaultLines = 20 - - def apply(line: String): Result = { - if (history eq NoHistory) - return "No history available." - - val xs = words(line) - val current = history.index - val count = try xs.head.toInt catch { case _: Exception => defaultLines } - val lines = history.asStrings takeRight count - val offset = current - lines.size + 1 - - for ((line, index) <- lines.zipWithIndex) - echo("%3d %s".format(index + offset, line)) - } - } - - // When you know you are most likely breaking into the middle - // of a line being typed. This softens the blow. - protected def echoAndRefresh(msg: String) = { - echo("\n" + msg) - in.redrawLine() - } - protected var mum = false - protected def echo(msg: String) = if (!mum) { - out println msg - out.flush() - } - // turn off intp reporter and our echo - def mumly[A](op: => A): A = - if (isReplDebug) op - else intp beQuietDuring { - val saved = mum - mum = true - try op finally mum = saved - } - - /** Search the history */ - def searchHistory(_cmdline: String) { - val cmdline = _cmdline.toLowerCase - val offset = history.index - history.size + 1 - - for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline) - echo("%d %s".format(index + offset, line)) - } - - /** Prompt to print when awaiting input */ - def prompt = replProps.prompt - - import LoopCommand.{ cmd, nullary, cmdWithHelp } - - /** Standard commands **/ - lazy val standardCommands = List( - cmd("completions", "", "output completions for the given string", completionsCommand), - cmd("edit", "|", "edit history", editCommand), - cmd("help", "[command]", "print this summary or command-specific help", helpCommand), - historyCommand, - cmd("h?", "", "search the history", searchHistory), - cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand), - cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand), - cmd("javap", "", "disassemble a file or class name", javapCommand), - cmd("line", "|", "place line(s) at the end of history", lineCommand), - cmd("load", "", "interpret lines in a file", loadCommand, fileCompletion), - cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand, fileCompletion), - nullary("power", "enable power user mode", powerCmd), - nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)), - cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand, settingsCompletion), - cmd("require", "", "add a jar to the classpath", require), - cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand, settingsCompletion), - cmd("save", "", "save replayable session to a file", saveCommand, fileCompletion), - shCommand, - cmd("settings", "", "update compiler options, if possible; see reset", changeSettings, settingsCompletion), - nullary("silent", "disable/enable automatic printing of results", verbosity), - cmd("type", "[-v] ", "display the type of an expression without evaluating it", typeCommand), - cmdWithHelp("kind", kindUsage, "display the kind of a type. see also :help kind", Some(kindCommandDetailedHelp), kindCommand), - nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand) - ) - - /** Power user commands */ - lazy val powerCommands: List[LoopCommand] = List( - cmd("phase", "", "set the implicit phase for power commands", phaseCommand) - ) - - // complete filename - val fileCompletion: Completion = new Completion { - def resetVerbosity(): Unit = () - val emptyWord = """(\s+)$""".r.unanchored - val directorily = """(\S*/)$""".r.unanchored - val trailingWord = """(\S+)$""".r.unanchored - def listed(i: Int, dir: Option[Path]) = - dir.filter(_.isDirectory).map(d => Candidates(i, d.toDirectory.list.map(_.name).toList)).getOrElse(NoCandidates) - def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList - def complete(buffer: String, cursor: Int): Candidates = - buffer.substring(0, cursor) match { - case emptyWord(s) => listed(cursor, Directory.Current) - case directorily(s) => listed(cursor, Option(Path(s))) - case trailingWord(s) => - val f = File(s) - val (i, maybes) = - if (f.isFile) (cursor - s.length, List(f.toAbsolute.path)) - else if (f.isDirectory) (cursor - s.length, List(s"${f.toAbsolute.path}/")) - else if (f.parent.exists) (cursor - f.name.length, listedIn(f.parent.toDirectory, f.name)) - else (-1, Nil) - if (maybes.isEmpty) NoCandidates else Candidates(i, maybes) - case _ => NoCandidates - } - } - - // complete settings name - val settingsCompletion: Completion = new Completion { - def resetVerbosity(): Unit = () - val trailingWord = """(\S+)$""".r.unanchored - def complete(buffer: String, cursor: Int): Candidates = { - buffer.substring(0, cursor) match { - case trailingWord(s) => - val maybes = settings.visibleSettings.filter(_.name.startsWith(s)).map(_.name) - .filterNot(when(_) { case "-"|"-X"|"-Y" => true }).sorted - if (maybes.isEmpty) NoCandidates else Candidates(cursor - s.length, maybes) - case _ => NoCandidates - } - } - } - - private def importsCommand(line: String): Result = { - val tokens = words(line) - val handlers = intp.languageWildcardHandlers ++ intp.importHandlers - - handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach { - case (handler, idx) => - val (types, terms) = handler.importedSymbols partition (_.name.isTypeName) - val imps = handler.implicitSymbols - val found = tokens filter (handler importsSymbolNamed _) - val typeMsg = if (types.isEmpty) "" else types.size + " types" - val termMsg = if (terms.isEmpty) "" else terms.size + " terms" - val implicitMsg = if (imps.isEmpty) "" else imps.size + " are implicit" - val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "") - val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")") - - intp.reporter.printMessage("%2d) %-30s %s%s".format( - idx + 1, - handler.importString, - statsMsg, - foundMsg - )) - } - } - - // Still todo: modules. - private def typeCommand(line0: String): Result = { - line0.trim match { - case "" => ":type [-v] . see also :help kind" - case s => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ") - } - } - - private lazy val kindUsage: String = "[-v] " - - private lazy val kindCommandDetailedHelp: String = - s""":kind $kindUsage - |Displays the kind of a given type. - | - | -v Displays verbose info. - | - |"Kind" is a word used to classify types and type constructors - |according to their level of abstractness. - | - |Concrete, fully specified types such as `Int` and `Option[Int]` - |are called "proper types" and denoted as `A` using Scala - |notation, or with the `*` symbol. - | - | scala> :kind Option[Int] - | Option[Int]'s kind is A - | - |In the above, `Option` is an example of a first-order type - |constructor, which is denoted as `F[A]` using Scala notation, or - |* -> * using the star notation. `:kind` also includes variance - |information in its output, so if we ask for the kind of `Option`, - |we actually see `F[+A]`: - | - | scala> :k -v Option - | Option's kind is F[+A] - | * -(+)-> * - | This is a type constructor: a 1st-order-kinded type. - | - |When you have more complicated types, `:kind` can be used to find - |out what you need to pass in. - | - | scala> trait ~>[-F1[_], +F2[_]] {} - | scala> :kind ~> - | ~>'s kind is X[-F1[A1],+F2[A2]] - | - |This shows that `~>` accepts something of `F[A]` kind, such as - |`List` or `Vector`. - |""".stripMargin - - private def kindCommand(expr: String): Result = { - expr.trim match { - case "" => s":kind $kindUsage" - case s => intp.kindCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ") - } - } - - private def warningsCommand(): Result = { - if (intp.lastWarnings.isEmpty) - "Can't find any cached warnings." - else - intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) } - } - - private def changeSettings(line: String): Result = { - val s = settings - def showSettings() = for (s <- s.userSetSettings.sorted(Ordering.ordered[s.Setting])) echo(s.toString) - if (line.isEmpty) showSettings() else { updateSettings(line) ; () } - } - private def updateSettings(line: String) = { - val (ok, rest) = settings.processArguments(words(line), processAll = false) - ok && rest.isEmpty - } - - private def javapCommand(line: String): Result = - Javap(intp)(words(line): _*) foreach { res => - if (res.isError) return s"${res.value}" - else res.show() - } - - private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent" - - private def phaseCommand(name: String): Result = { - val phased: Phased = power.phased - import phased.NoPhaseName - - if (name == "clear") { - phased.set(NoPhaseName) - intp.clearExecutionWrapper() - "Cleared active phase." - } - else if (name == "") phased.get match { - case NoPhaseName => "Usage: :phase (e.g. typer, erasure.next, erasure+3)" - case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get) - } - else { - val what = phased.parse(name) - if (what.isEmpty || !phased.set(what)) - "'" + name + "' does not appear to represent a valid phase." - else { - intp.setExecutionWrapper(pathToPhaseWrapper) - val activeMessage = - if (what.toString.length == name.length) "" + what - else "%s (%s)".format(what, name) - - "Active phase is now: " + activeMessage - } - } - } - - /** Available commands */ - def commands: List[LoopCommand] = standardCommands ++ ( - if (isReplPower) powerCommands else Nil - ) - - val replayQuestionMessage = - """|That entry seems to have slain the compiler. Shall I replay - |your session? I can re-run each line except the last one. - |[y/n] - """.trim.stripMargin - - private val crashRecovery: PartialFunction[Throwable, Boolean] = { - case ex: Throwable => - val (err, explain) = ( - if (intp.isInitializeComplete) - (intp.global.throwableAsString(ex), "") - else - (ex.getMessage, "The compiler did not initialize.\n") - ) - echo(err) - - ex match { - case _: NoSuchMethodError | _: NoClassDefFoundError => - echo("\nUnrecoverable error.") - throw ex - case _ => - def fn(): Boolean = - try in.readYesOrNo(explain + replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() }) - catch { case _: RuntimeException => false } - - if (fn()) replay() - else echo("\nAbandoning crashed session.") - } - true - } - - // after process line, OK continue, ERR break, or EOF all done - object LineResults extends Enumeration { - type LineResult = Value - val EOF, ERR, OK = Value - } - import LineResults.LineResult - - // return false if repl should exit - def processLine(line: String): Boolean = { - import scala.concurrent.duration._ - Await.ready(globalFuture, 10.minutes) // Long timeout here to avoid test failures under heavy load. - - command(line) match { - case Result(false, _) => false - case Result(_, Some(line)) => addReplay(line) ; true - case _ => true - } - } - - private def readOneLine() = { - out.flush() - in readLine prompt - } - - /** The main read-eval-print loop for the repl. It calls - * command() for each line of input, and stops when - * command() returns false. - */ - final def loop(): LineResult = loop(readOneLine()) - - @tailrec final def loop(line: String): LineResult = { - import LineResults._ - if (line == null) EOF - else if (try processLine(line) catch crashRecovery) loop(readOneLine()) - else ERR - } - - /** interpret all lines from a specified file */ - def interpretAllFrom(file: File, verbose: Boolean = false) { - savingReader { - savingReplayStack { - file applyReader { reader => - in = if (verbose) new SimpleReader(reader, out, interactive = true) with EchoReader - else SimpleReader(reader, out, interactive = false) - echo(s"Loading $file...") - loop() - } - } - } - } - - /** create a new interpreter and replay the given commands */ - def replayCommand(line: String): Unit = { - def run(destructive: Boolean): Unit = { - if (destructive) createInterpreter() else reset() - replay() - } - if (line.isEmpty) run(destructive = false) - else if (updateSettings(line)) run(destructive = true) - } - /** Announces as it replays. */ - def replay(): Unit = { - if (replayCommandStack.isEmpty) - echo("Nothing to replay.") - else for (cmd <- replayCommands) { - echo("Replaying: " + cmd) // flush because maybe cmd will have its own output - command(cmd) - echo("") - } - } - /** `reset` the interpreter in an attempt to start fresh. - * Supplying settings creates a new compiler. - */ - def resetCommand(line: String): Unit = { - def run(destructive: Boolean): Unit = { - echo("Resetting interpreter state.") - if (replayCommandStack.nonEmpty) { - echo("Forgetting this session history:\n") - replayCommands foreach echo - echo("") - replayCommandStack = Nil - } - if (intp.namedDefinedTerms.nonEmpty) - echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) - if (intp.definedTypes.nonEmpty) - echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) - if (destructive) createInterpreter() else reset() - } - if (line.isEmpty) run(destructive = false) - else if (updateSettings(line)) run(destructive = true) - } - /** Resets without announcements. */ - def reset() { - intp.reset() - unleashAndSetPhase() - } - - def lineCommand(what: String): Result = editCommand(what, None) - - def completionsCommand(what: String): Result = { - val completions = new ReplCompletion(intp).complete(what, what.length) - val prefix = if (completions == NoCandidates) "" else what.substring(0, completions.cursor) - - val completionLines = - completions.candidates.map { c => - s"[completions] $prefix$c" - } - - if (completionLines.nonEmpty) { - echo(completionLines.mkString("\n")) - } - - Result.default // never record completions - } - - // :edit id or :edit line - def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR")) - - def editCommand(what: String, editor: Option[String]): Result = { - def diagnose(code: String): Unit = paste.incomplete("The edited code is incomplete!\n", "", code) - - def edit(text: String): Result = editor match { - case Some(ed) => - val tmp = File.makeTemp() - tmp.writeAll(text) - try { - val pr = new ProcessResult(s"$ed ${tmp.path}") - pr.exitCode match { - case 0 => - tmp.safeSlurp() match { - case Some(edited) if edited.trim.isEmpty => echo("Edited text is empty.") - case Some(edited) => - echo(edited.linesIterator map ("+" + _) mkString "\n") - val res = intp interpret edited - if (res == IR.Incomplete) diagnose(edited) - else { - history.historicize(edited) - Result(lineToRecord = Some(edited), keepRunning = true) - } - case None => echo("Can't read edited text. Did you delete it?") - } - case x => echo(s"Error exit from $ed ($x), ignoring") - } - } finally { - tmp.delete() - } - case None => - if (history.historicize(text)) echo("Placing text in recent history.") - else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text") - } - - // if what is a number, use it as a line number or range in history - def isNum = what forall (c => c.isDigit || c == '-' || c == '+') - // except that "-" means last value - def isLast = (what == "-") - if (isLast || !isNum) { - val name = if (isLast) intp.mostRecentVar else what - val sym = intp.symbolOfIdent(name) - intp.prevRequestList collectFirst { case r if r.defines contains sym => r } match { - case Some(req) => edit(req.line) - case None => echo(s"No symbol in scope: $what") - } - } else try { - val s = what - // line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur) - val (start, len) = - if ((s indexOf '+') > 0) { - val (a,b) = s splitAt (s indexOf '+') - (a.toInt, b.drop(1).toInt) - } else { - (s indexOf '-') match { - case -1 => (s.toInt, 1) - case 0 => val n = s.drop(1).toInt ; (history.index - n, n) - case _ if s.last == '-' => val n = s.init.toInt ; (n, history.index - n) - case i => val n = s.take(i).toInt ; (n, s.drop(i+1).toInt - n) - } - } - val index = (start - 1) max 0 - val text = history.asStrings(index, index + len) mkString "\n" - edit(text) - } catch { - case _: NumberFormatException => echo(s"Bad range '$what'") - echo("Use line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)") - } - } - - /** fork a shell and run a command */ - lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])", None) { - override def usage = "" - def apply(line: String): Result = line match { - case "" => showUsage() - case _ => - val toRun = s"new ${classOf[ProcessResult].getName}(${string2codeQuoted(line)})" - intp interpret toRun - () - } - } - - def withFile[A](filename: String)(action: File => A): Option[A] = intp.withLabel(filename) { - val res = Some(File(filename)) filter (_.exists) map action - if (res.isEmpty) intp.reporter.warning(NoPosition, s"File `$filename' does not exist.") // courtesy side-effect - res - } - - def loadCommand(arg: String): Result = { - def run(file: String, verbose: Boolean) = withFile(file) { f => - interpretAllFrom(f, verbose) - Result recording s":load $arg" - } getOrElse Result.default - - words(arg) match { - case "-v" :: file :: Nil => run(file, verbose = true) - case file :: Nil => run(file, verbose = false) - case _ => echo("usage: :load -v file") ; Result.default - } - } - - def saveCommand(filename: String): Result = ( - if (filename.isEmpty) echo("File name is required.") - else if (replayCommandStack.isEmpty) echo("No replay commands in session") - else File(filename).printlnAll(replayCommands: _*) - ) - - @deprecated("use reset, replay or require to update class path", since = "2.11.0") - def addClasspath(arg: String): Unit = { - val f = File(arg).normalize - if (f.exists) { - addedClasspath = ClassPath.join(addedClasspath, f.path) - intp.addUrlsToClassPath(f.toURI.toURL) - echo("Added '%s' to classpath.".format(f.path)) - repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString)) - } - else echo("The path '" + f + "' doesn't seem to exist.") - } - - /** Adds jar file to the current classpath. Jar will only be added if it - * does not contain classes that already exist on the current classpath. - * - * Importantly, `require` adds jars to the classpath ''without'' resetting - * the state of the interpreter. This is in contrast to `replay` which can - * be used to add jars to the classpath and which creates a new instance of - * the interpreter and replays all interpreter expressions. - */ - def require(arg: String): Unit = { - val f = File(arg).normalize - - val jarFile = AbstractFile.getDirectory(new java.io.File(arg)) - if (jarFile == null) { - echo(s"Cannot load '$arg'") - return - } - - def flatten(f: AbstractFile): Iterator[AbstractFile] = - if (f.isClassContainer) f.iterator.flatMap(flatten) - else Iterator(f) - - val entries = flatten(jarFile) - - def classNameOf(classFile: AbstractFile): String = { - val input = classFile.input - try { - val reader = new ClassReader(input) - reader.getClassName.replace('/', '.') - } finally { - input.close() - } - } - def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined - val existingClass = entries.filter(_.hasExtension("class")).map(classNameOf).find(alreadyDefined) - - if (!f.exists) echo(s"The path '$f' doesn't seem to exist.") - else if (existingClass.nonEmpty) echo(s"The path '$f' cannot be loaded, it contains a classfile that already exists on the classpath: ${existingClass.get}") - else { - addedClasspath = ClassPath.join(addedClasspath, f.path) - intp.addUrlsToClassPath(f.toURI.toURL) - echo("Added '%s' to classpath.".format(f.path)) - repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString)) - } - } - - def powerCmd(): Result = { - if (isReplPower) "Already in power mode." - else enablePowerMode(isDuringInit = false) - } - def enablePowerMode(isDuringInit: Boolean) = { - replProps.power setValue true - unleashAndSetPhase() - asyncEcho(isDuringInit, power.banner) - } - private def unleashAndSetPhase() = if (isReplPower) { - power.unleash() - intp beSilentDuring phaseCommand("typer") // Set the phase to "typer" - } - - def asyncEcho(async: Boolean, msg: => String) { - if (async) asyncMessage(msg) - else echo(msg) - } - - def verbosity() = { - intp.printResults = !intp.printResults - if (in.interactive || isReplInfo) echo(s"Result printing is ${ if (intp.printResults) "on" else "off" }.") - } - - /** Run one command submitted by the user. Two values are returned: - * (1) whether to keep running, (2) the line to record for replay, if any. - */ - def command(line: String): Result = { - if (line startsWith ":") colonCommand(line) - else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler - else Result(keepRunning = true, interpretStartingWith(line)) - } - - private def readWhile(cond: String => Boolean) = { - Iterator continually in.readLine("") takeWhile (x => x != null && cond(x)) - } - - /* :paste -raw file - * or - * :paste < EOF - * your code - * EOF - * :paste <~ EOF - * ~your code - * EOF - */ - def pasteCommand(arg: String): Result = { - var shouldReplay: Option[String] = None - var label = "" - def result = Result(keepRunning = true, shouldReplay) - val (raw, file, margin) = - if (arg.isEmpty) (false, None, None) - else { - def maybeRaw(ss: List[String]) = if (ss.nonEmpty && ss.head == "-raw") (true, ss.tail) else (false, ss) - def maybeHere(ss: List[String]) = - if (ss.nonEmpty && ss.head.startsWith("<")) (ss.head.dropWhile(_ == '<'), ss.tail) - else (null, ss) - - val (raw0, ss0) = maybeRaw(words(arg)) - val (margin0, ss1) = maybeHere(ss0) - val file0 = ss1 match { - case Nil => null - case x :: Nil => x - case _ => echo("usage: :paste [-raw] file | < EOF") ; return result - } - (raw0, Option(file0), Option(margin0)) - } - val code = (file, margin) match { - case (Some(name), None) => - label = name - withFile(name) { f => - shouldReplay = Some(s":paste $arg") - val s = f.slurp.trim - if (s.isEmpty) echo(s"File contains no code: $f") - else echo(s"Pasting file $f...") - s - } getOrElse "" - case (eof, _) => - echo(s"// Entering paste mode (${ eof getOrElse "ctrl-D" } to finish)\n") - val delimiter = eof orElse replProps.pasteDelimiter.option - val input = readWhile(s => delimiter.isEmpty || delimiter.get != s) mkString "\n" - val text = ( - margin filter (_.nonEmpty) map { - case "-" => input.linesIterator map (_.trim) mkString "\n" - case m => input stripMargin m.head // ignore excess chars in "<<||" - } getOrElse input - ).trim - if (text.isEmpty) echo("\n// Nothing pasted, nothing gained.\n") - else echo("\n// Exiting paste mode, now interpreting.\n") - text - } - def interpretCode() = { - if (intp.withLabel(label)(intp interpret code) == IR.Incomplete) - paste.incomplete("The pasted code is incomplete!\n", label, code) - } - def compileCode() = paste.compilePaste(label = label, code = code) - - if (code.nonEmpty) { - if (raw || paste.isPackaged(code)) compileCode() else interpretCode() - } - result - } - - private object paste extends Pasted(replProps.promptText) { - def interpret(line: String) = intp interpret line - def echo(message: String) = ILoop.this echo message - - val leadingElement = raw"(?s)\s*(package\s|/)".r - def isPackaged(code: String): Boolean = { - leadingElement.findPrefixMatchOf(code) - .map(m => if (m.group(1) == "/") intp.parse.packaged(code) else true) - .getOrElse(false) - } - - // if input is incomplete, wrap and compile for diagnostics. - def incomplete(message: String, label: String, code: String): Boolean = { - echo(message) - val errless = intp.compileSources(new BatchSourceFile(label, s"object pastel {\n$code\n}")) - if (errless) echo("No error found in incomplete source.") - errless - } - - def compilePaste(label: String, code: String): Boolean = { - val errless = intp.compileSources(new BatchSourceFile(label, code)) - if (!errless) echo("There were compilation errors!") - errless - } - } - - private object invocation { - def unapply(line: String): Boolean = Completion.looksLikeInvocation(line) - } - - private val lineComment = """\s*//.*""".r // all comment - - /** Interpret expressions starting with the first line. - * Read lines until a complete compilation unit is available - * or until a syntax error has been seen. If a full unit is - * read, go ahead and interpret it. Return the full string - * to be recorded for replay, if any. - */ - final def interpretStartingWith(code: String): Option[String] = { - // signal completion non-completion input has been received - in.completion.resetVerbosity() - - /* Here we place ourselves between the user and the interpreter and examine - * the input they are ostensibly submitting. We intervene in several cases: - * - * 1) If the line starts with "scala> " it is assumed to be an interpreter paste. - * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation - * on the previous result. - * 3) If the Completion object's execute returns Some(_), we inject that value - * and avoid the interpreter, as it's likely not valid scala code. - */ - code match { - case "" => None - case lineComment() => None // line comment, do nothing - case paste() if !paste.running => paste.transcript(Iterator(code) ++ readWhile(!paste.isPromptOnly(_))) match { - case Some(s) => interpretStartingWith(s) - case _ => None - } - case invocation() if intp.mostRecentVar != "" => interpretStartingWith(intp.mostRecentVar + code) - case _ => intp.interpret(code) match { - case IR.Error => None - case IR.Success => Some(code) - case IR.Incomplete if in.interactive && code.endsWith("\n\n") => - echo("You typed two blank lines. Starting a new command.") - None - case IR.Incomplete => - val saved = intp.partialInput - intp.partialInput = code + "\n" - try { - in.readLine(paste.ContinuePrompt) match { - case null => - // we know compilation is going to fail since we're at EOF and the - // parser thinks the input is still incomplete, but since this is - // a file being read non-interactively we want to fail. So we send - // it straight to the compiler for the nice error message. - intp.compileString(code) - None - case line => interpretStartingWith(s"$code\n$line") - } - } finally intp.partialInput = saved - } - } - } - - // delegate to command completion or presentation compiler - class ReplCompletion(intp: IMain) extends Completion { - val pc = new PresentationCompilerCompleter(intp) - def resetVerbosity(): Unit = pc.resetVerbosity() - def complete(buffer: String, cursor: Int): Completion.Candidates = { - if (buffer.startsWith(":")) - colonCompletion(buffer, cursor).complete(buffer, cursor) - else - pc.complete(buffer, cursor) - } - } - - /** Tries to create a jline.InteractiveReader, falling back to SimpleReader, - * unless settings or properties are such that it should start with SimpleReader. - * The constructor of the InteractiveReader must take a Completion strategy, - * supplied as a `() => Completion`; the Completion object provides a concrete Completer. - */ - def chooseReader(settings: Settings): InteractiveReader = { - if (settings.Xnojline) SimpleReader() - else { - type Completer = () => Completion - type ReaderMaker = Completer => InteractiveReader - - def instantiater(className: String): ReaderMaker = completer => { - if (settings.isDebug) Console.println(s"Trying to instantiate an InteractiveReader from $className") - Class.forName(className).getConstructor(classOf[Completer]). - newInstance(completer). - asInstanceOf[InteractiveReader] - } - - def mkReader(maker: ReaderMaker) = maker { () => - if (settings.noCompletion) NoCompletion else new ReplCompletion(intp) - } - - def internalClass(kind: String) = s"scala.tools.nsc.interpreter.$kind.InteractiveReader" - val readerClasses = sys.props.get("scala.repl.reader").toStream ++ Stream(internalClass("jline"), internalClass("jline_embedded")) - val readers = readerClasses map (cls => Try { mkReader(instantiater(cls)) }) - - val reader = (readers collect { case Success(reader) => reader } headOption) getOrElse SimpleReader() - - if (settings.isDebug) { - val readerDiags = (readerClasses, readers).zipped map { - case (cls, Failure(e)) => s" - $cls --> \n\t" + scala.tools.nsc.util.stackTraceString(e) + "\n" - case (cls, Success(_)) => s" - $cls OK" - } - Console.println(s"All InteractiveReaders tried: ${readerDiags.mkString("\n","\n","\n")}") - } - reader - } - } - - /** Start an interpreter with the given settings. - * @return true if successful - */ - def process(settings: Settings): Boolean = { - // yes this is sad - val runnerSettings = settings match { - case generic: GenericRunnerSettings => Some(generic) - case _ => None - } - - /** Reader to use before interpreter is online. */ - def preLoop = { - def newReader = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true)) - val sr = SplashReader(newReader) { r => - in = r - in.postInit() - } - in = sr - SplashLoop(sr, prompt) - } - - // -e batch mode - def batchLoop(text: String) = { - val sr = SplashReader(InteractiveReader(text))(_ => ()) - in = sr - SplashLoop(sr, prompt) - } - - /* Actions to cram in parallel while collecting first user input at prompt. - * Run with output muted both from ILoop and from the intp reporter. - */ - def loopPostInit(): Unit = mumly { - // Bind intp somewhere out of the regular namespace where - // we can get at it in generated code. - intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) - - // Auto-run code via some setting. - ( replProps.replAutorunCode.option - flatMap (f => File(f).safeSlurp()) - foreach (intp quietRun _) - ) - // power mode setup - if (isReplPower) { - replProps.power setValue true - unleashAndSetPhase() - asyncMessage(power.banner) - } - loadInitFiles() - // scala/bug#7418 Now, and only now, can we enable TAB completion. - in.postInit() - } - def loadInitFiles(): Unit = settings match { - case settings: GenericRunnerSettings => - for (f <- settings.loadfiles.value) { - loadCommand(f) - addReplay(s":load $f") - } - for (f <- settings.pastefiles.value) { - pasteCommand(f) - addReplay(s":paste $f") - } - case _ => - } - // ctl-D on first line of repl zaps the intp - def globalOrNull = if (intp != null) intp.global else null - // wait until after startup to enable noisy settings; intp is used only after body completes - def startup(): String = IMain.withSuppressedSettings(settings, globalOrNull) { - // -e is non-interactive - val splash = - runnerSettings.filter(_.execute.isSetByUser).map(ss => batchLoop(ss.execute.value)).getOrElse { - // starting - printWelcome() - - // let them start typing - preLoop - } - splash.start() - - // while we go fire up the REPL - try { - // don't allow ancient sbt to hijack the reader - savingReader { - createInterpreter() - //for (rs <- runnerSettings if rs.execute.isSetByUser) intp.printResults = false - } - intp.initializeSynchronous() - globalFuture = Future.successful(true) - if (intp.reporter.hasErrors && (!isSbt || intp.reporter.hasReportableErrors)) { - echo("Interpreter encountered errors during initialization!") - null - } else { - loopPostInit() - val line = splash.line // what they typed in while they were waiting - if (line == null) { // they ^D - try out.print(Properties.shellInterruptedString) - finally closeInterpreter() - } - line - } - } catch { - case t: Throwable => t.printStackTrace() ; scala.sys.exit(1) - } finally splash.stop() - } - this.settings = settings - startup() match { - case null => false - case line => - try loop(line) match { - case LineResults.EOF if in.interactive => out.print(Properties.shellInterruptedString) - case _ => - } - catch AbstractOrMissingHandler() - finally closeInterpreter() - true - } - } -} - -object ILoop { - implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp - - // Designed primarily for use by test code: take a String with a - // bunch of code, and prints out a transcript of what it would look - // like if you'd just typed it into the repl. - def runForTranscript(code: String, settings: Settings, inSession: Boolean = false): String = { - import java.io.{ BufferedReader, StringReader, OutputStreamWriter } - import java.lang.System.{lineSeparator => EOL} - - stringFromStream { ostream => - Console.withOut(ostream) { - val output = new JPrintWriter(new OutputStreamWriter(ostream), true) { - // skip margin prefix for continuation lines, unless preserving session text for test - // should test for repl.paste.ContinueString or replProps.continueText.contains(ch) - override def write(str: String) = - if (inSession || (str.exists(ch => ch != ' ' && ch != '|'))) super.write(str) - } - val input = new BufferedReader(new StringReader(s"${code.trim}${EOL}")) { - override def readLine(): String = { - mark(1) // default buffer is 8k - val c = read() - if (c == -1 || c == 4) { - null - } else { - reset() - val s = super.readLine() - // helping out by printing the line being interpreted. - output.println(s) - s - } - } - } - val repl = new ILoop(input, output) - if (settings.classpath.isDefault) - settings.classpath.value = sys.props("java.class.path") - - repl process settings - } - } - } - - /** Creates an interpreter loop with default settings and feeds - * the given code to it as input. - */ - def run(code: String, sets: Settings = new Settings): String = { - import java.io.{ BufferedReader, StringReader, OutputStreamWriter } - - stringFromStream { ostream => - Console.withOut(ostream) { - val input = new BufferedReader(new StringReader(code)) - val output = new JPrintWriter(new OutputStreamWriter(ostream), true) - val repl = new ILoop(input, output) - - if (sets.classpath.isDefault) - sets.classpath.value = sys.props("java.class.path") - - repl process sets - } - } - } - def run(lines: List[String]): String = run(lines map (_ + "\n") mkString) -} diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index c00fc0d903cd..102e83c02ab1 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,165 +10,171 @@ * additional information regarding copyright ownership. */ -package scala -package tools.nsc -package interpreter +package scala.tools.nsc.interpreter +import java.io.{Closeable, PrintWriter, StringWriter} +import java.net.URL +import scala.collection.mutable, mutable.ListBuffer import scala.language.implicitConversions -import scala.collection.mutable -import scala.concurrent.{ExecutionContext, Future} -import scala.reflect.runtime.{universe => ru} import scala.reflect.{ClassTag, classTag} -import scala.reflect.internal.util.{BatchSourceFile, SourceFile} +import scala.reflect.internal.{FatalError, Flags, MissingRequirementError, NoPhase, Precedence} +import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader +import scala.reflect.internal.util.{AbstractFileClassLoader, BatchSourceFile, ListOfNil, Position, ReplBatchSourceFile, SourceFile} +import scala.reflect.runtime.{universe => ru} +import scala.tools.nsc.{Global, Settings} +import scala.tools.nsc.interpreter.Results.{Error, Incomplete, Result, Success} +import scala.tools.nsc.interpreter.StdReplTags.tagOfStdReplVals import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.typechecker.{StructuredTypeStrings, TypeStrings} -import scala.tools.nsc.util._ -import ScalaClassLoader.URLClassLoader -import scala.tools.nsc.util.Exceptional.unwrap -import java.net.URL -import java.io.Closeable +import scala.tools.nsc.util.Exceptional.rootCause +import scala.tools.nsc.util.{stackTraceString, stringFromWriter} import scala.tools.util.PathResolver import scala.util.{Try => Trying} +import scala.util.chaining._ +import scala.util.control.NonFatal /** An interpreter for Scala code. - * - * The main public entry points are compile(), interpret(), and bind(). - * The compile() method loads a complete Scala file. The interpret() method - * executes one line of Scala code at the request of the user. The bind() - * method binds an object to a variable that can then be used by later - * interpreted code. - * - * The overall approach is based on compiling the requested code and then - * using a Java classloader and Java reflection to run the code - * and access its results. - * - * In more detail, a single compiler instance is used - * to accumulate all successfully compiled or interpreted Scala code. To - * "interpret" a line of code, the compiler generates a fresh object that - * includes the line of code and which has public member(s) to export - * all variables defined by that code. To extract the result of an - * interpreted line to show the user, a second "result object" is created - * which imports the variables exported by the above object and then - * exports members called "$eval" and "$print". To accommodate user expressions - * that read from variables or methods defined in previous statements, "import" - * statements are used. - * - * This interpreter shares the strengths and weaknesses of using the - * full compiler-to-Java. The main strength is that interpreted code - * behaves exactly as does compiled code, including running at full speed. - * The main weakness is that redefining classes and methods is not handled - * properly, because rebinding at the Java level is technically difficult. - * - * @author Moez A. Abdel-Gawad - * @author Lex Spoon - */ -class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation with Closeable { - imain => - - def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut) + * + * The main public entry points are compile(), interpret(), and bind(). + * The compile() method loads a complete Scala file. The interpret() method + * executes one line of Scala code at the request of the user. The bind() + * method binds an object to a variable that can then be used by later + * interpreted code. + * + * The overall approach is based on compiling the requested code and then + * using a Java classloader and Java reflection to run the code + * and access its results. + * + * In more detail, a single compiler instance is used + * to accumulate all successfully compiled or interpreted Scala code. To + * "interpret" a line of code, the compiler generates a fresh object that + * includes the line of code and which has public member(s) to export + * all variables defined by that code. To extract the result of an + * interpreted line to show the user, a second "result object" is created + * which imports the variables exported by the above object and then + * exports members called "\$eval" and "\$print". To accommodate user expressions + * that read from variables or methods defined in previous statements, "import" + * statements are used. + * + * This interpreter shares the strengths and weaknesses of using the + * full compiler-to-Java. The main strength is that interpreted code + * behaves exactly as does compiled code, including running at full speed. + * The main weakness is that redefining classes and methods is not handled + * properly, because rebinding at the Java level is technically difficult. + */ +class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoader], compilerSettings: Settings, val reporter: ReplReporter) + extends Repl with Imports with PresentationCompilation with Closeable { - object replOutput extends ReplOutput(settings.Yreploutdir) { } + def this(interpreterSettings: Settings, reporter: ReplReporter) = this(interpreterSettings, None, interpreterSettings, reporter) - @deprecated("Use replOutput.dir instead", "2.11.0") - def virtualDirectory = replOutput.dir - // Used in a test case. - def showDirectory() = replOutput.show(out) + import reporter.{debug => repldbg} - lazy val isClassBased: Boolean = settings.Yreplclassbased.value private[interpreter] lazy val useMagicImport: Boolean = settings.YreplMagicImport.value - private[nsc] var printResults = true // whether to print result lines - private[nsc] var totalSilence = false // whether to print anything - private var _initializeComplete = false // compiler is initialized - private var _isInitialized: Future[Boolean] = null // set up initialization future private var bindExceptions = true // whether to bind the lastException variable private var _executionWrapper = "" // code to be wrapped around all lines - var partialInput: String = "" // code accumulated in multi-line REPL input private var label = "" // compilation unit name for reporting /** We're going to go to some trouble to initialize the compiler asynchronously. - * It's critical that nothing call into it until it's been initialized or we will - * run into unrecoverable issues, but the perceived repl startup time goes - * through the roof if we wait for it. So we initialize it with a future and - * use a lazy val to ensure that any attempt to use the compiler object waits - * on the future. - */ - private var _classLoader: util.AbstractFileClassLoader = null // active classloader - private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler - - private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL + * It's critical that nothing call into it until it's been initialized or we will + * run into unrecoverable issues, but the perceived repl startup time goes + * through the roof if we wait for it. So we initialize it with a future and + * use a lazy val to ensure that any attempt to use the compiler object waits + * on the future. + */ + private var _classLoader: AbstractFileClassLoader = null // active classloader + private var _runtimeMirror: ru.Mirror = null + private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL def compilerClasspath: Seq[java.net.URL] = ( - if (isInitializeComplete) global.classPath.asURLs + if (_initializeComplete) global.classPath.asURLs else new PathResolver(settings, global.closeableRegistry).resultAsURLs // the compiler's classpath - ) - def settings = initialSettings - def withoutWarnings[T](body: => T): T = beQuietDuring(IMain.withSuppressedSettings(settings, global)(body)) + ) + + // Run the code body with the given boolean settings flipped to true. + def withoutWarnings[T](body: => T): T = + reporter.withoutPrintingResults(IMain.withSuppressedSettings(settings, global)(body)) + + def withSuppressedSettings(body: => Unit): Unit = + IMain.withSuppressedSettings(settings, global)(body) + // Apply a temporary label for compilation (for example, script name) - def withLabel[A](temp: String)(body: => A): A = { + override def withLabel[A](temp: String)(body: => A): A = { val saved = label label = temp try body finally label = saved } - lazy val reporter: ReplReporter = new ReplReporter(this) + override def visibleSettings: List[Setting] = settings.visibleSettings + override def userSetSettings: List[Setting] = settings.userSetSettings + override def updateSettings(arguments: List[String]): Boolean = { + val (ok, rest) = settings.processArguments(arguments, processAll = false) + ok && rest.isEmpty + } + + object replOutput extends ReplOutput(settings.Yreploutdir) { } + + override def outputDir = replOutput.dir + + // Used in a test case. + def showDirectory: String = { + val writer = new StringWriter() + replOutput.show(new PrintWriter(writer)) + writer.toString + } + + lazy val isClassBased: Boolean = settings.Yreplclassbased.value + - import reporter.{ printMessage, printUntruncatedMessage } + override def initializeComplete = _initializeComplete + private[this] var _initializeComplete = false + + // initializes the compiler, returning false if something went wrong + override def initializeCompiler(): Boolean = global != null + + lazy val global: Global = { + compilerSettings.outputDirs.setSingleOutput(replOutput.dir) + compilerSettings.exposeEmptyPackage.value = true + + // Can't use our own reporter until global is initialized + val startupReporter = new StoreReporter(compilerSettings) + + val compiler = new Global(compilerSettings, startupReporter) with ReplGlobal - // This exists mostly because using the reporter too early leads to deadlock. - private def echo(msg: String) { Console println msg } - private def _initSources = List(new BatchSourceFile("", "class $repl_$init { }")) - private def _initialize() = { try { - // if this crashes, REPL will hang its head in shame - val run = new _compiler.Run() + val run = new compiler.Run() assert(run.typerPhase != NoPhase, "REPL requires a typer phase.") - run compileSources _initSources + IMain.withSuppressedSettings(compilerSettings, compiler) { + run compileSources List(new BatchSourceFile("", "class $repl_$init { }")) + } + + // there shouldn't be any errors yet; just in case, print them if we're debugging + if (reporter.isDebug) + startupReporter.infos foreach { Console.err.println } + + compiler.reporter = reporter _initializeComplete = true - true + compiler } catch AbstractOrMissingHandler() } - private val logScope = scala.sys.props contains "scala.repl.scope" - private def scopelog(msg: => String) = if (logScope) Console.err.println(msg) - - // argument is a thunk to execute after init is done - def initialize(postInitSignal: => Unit) { - synchronized { - if (_isInitialized == null) { - _isInitialized = - Future(try _initialize() finally postInitSignal)(ExecutionContext.global) - } - } - } - def initializeSynchronous(): Unit = { - if (!isInitializeComplete) { - _initialize() - assert(global != null, global) - } - } - def isInitializeComplete = _initializeComplete - - lazy val global: Global = { - if (!isInitializeComplete) _initialize() - _compiler - } import global._ - import definitions.{ObjectClass, termMember, dropNullaryMethod} + import definitions.{ ObjectClass, termMember, dropNullaryMethod} - def runtimeMirror = ru.runtimeMirror(classLoader) + override def classPathString = global.classPath.asClassPathString private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol } def getClassIfDefined(path: String) = ( - noFatal(runtimeMirror staticClass path) - orElse noFatal(rootMirror staticClass path) - ) + noFatal(runtimeMirror staticClass path) + orElse noFatal(rootMirror staticClass path) + ) def getModuleIfDefined(path: String) = ( - noFatal(runtimeMirror staticModule path) - orElse noFatal(rootMirror staticModule path) - ) + noFatal(runtimeMirror staticModule path) + orElse noFatal(rootMirror staticModule path) + ) implicit class ReplTypeOps(tp: Type) { def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) @@ -178,7 +184,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // scalac unhappiness with what look like cycles. It has not been easy to // reduce, but name resolution clearly takes different paths. object naming extends { - val global: imain.global.type = imain.global + val global: IMain.this.global.type = IMain.this.global } with Naming { // make sure we don't overwrite their unwisely named res3 etc. def freshUserTermName(): TermName = { @@ -189,75 +195,59 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def isInternalTermName(name: Name) = isInternalVarName("" + name) } import naming._ + import Naming._ object deconstruct extends { - val global: imain.global.type = imain.global + val global: IMain.this.global.type = IMain.this.global } with StructuredTypeStrings lazy val memberHandlers = new { - val intp: imain.type = imain + val intp: IMain.this.type = IMain.this } with MemberHandlers import memberHandlers._ - /** Temporarily be quiet */ - def beQuietDuring[T](body: => T): T = { - val saved = printResults - printResults = false - try body - finally printResults = saved - } - def beSilentDuring[T](operation: => T): T = { - val saved = totalSilence - totalSilence = true - try operation - finally totalSilence = saved - } - def quietRun[T](code: String) = beQuietDuring(interpret(code)) + override def quietRun(code: String): Result = reporter.withoutPrintingResults(interpret(code)) /** takes AnyRef because it may be binding a Throwable or an Exceptional */ private def withLastExceptionLock[T](body: => T, alt: => T): T = { assert(bindExceptions, "withLastExceptionLock called incorrectly.") bindExceptions = false - try beQuietDuring(body) - catch logAndDiscard("withLastExceptionLock", alt) - finally bindExceptions = true + try reporter.withoutPrintingResults(body) catch { case NonFatal(t) => + repldbg("withLastExceptionLock: " + rootCause(t)) + reporter.trace(stackTraceString(rootCause(t))) + alt + } finally bindExceptions = true } def executionWrapper = _executionWrapper def setExecutionWrapper(code: String) = _executionWrapper = code - def clearExecutionWrapper() = _executionWrapper = "" - - /** interpreter settings */ - lazy val isettings = new ISettings(this) - - /** Instantiate a compiler. Overridable. */ - protected def newCompiler(settings: Settings, reporter: reporters.Reporter): ReplGlobal = { - settings.outputDirs setSingleOutput replOutput.dir - settings.exposeEmptyPackage.value = true - new Global(settings, reporter) with ReplGlobal { - def sessionNames = naming.sessionNames - override def toString: String = "" - } - } + override def clearExecutionWrapper() = _executionWrapper = "" + + /** - * Adds all specified jars to the compile and runtime classpaths. - * - * @note Currently only supports jars, not directories. - * @param urls The list of items to add to the compile and runtime classpaths. - */ - def addUrlsToClassPath(urls: URL*): Unit = { + * Adds all specified jars to the compile and runtime classpaths. + * + * @note Currently only supports jars, not directories. + * @param urls The list of items to add to the compile and runtime classpaths. + */ + override def addUrlsToClassPath(urls: URL*): Unit = { new Run // force some initialization urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath } + protected def replClass: Class[_] = this.getClass + /** Parent classloader. Overridable. */ protected def parentClassLoader: ClassLoader = { - val replClassLoader = this.getClass.getClassLoader() // might be null if we're on the boot classpath - settings.explicitParentLoader.orElse(Option(replClassLoader)).getOrElse(ClassLoader.getSystemClassLoader) + // might be null if we're on the boot classpath + parentClassLoaderOverride. + orElse(settings.explicitParentLoader). + orElse(Option(replClass.getClassLoader())). + getOrElse(ClassLoader.getSystemClassLoader) } /* A single class loader is used for all commands interpreted by this Interpreter. @@ -276,16 +266,22 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def resetClassLoader() = { repldbg("Setting new classloader: was " + _classLoader) _classLoader = null + _runtimeMirror = null ensureClassLoader() } - final def ensureClassLoader() { + final def ensureClassLoader(): Unit = if (_classLoader == null) _classLoader = makeClassLoader() - } - def classLoader: util.AbstractFileClassLoader = { + + override def classLoader: AbstractFileClassLoader = { ensureClassLoader() _classLoader } + def runtimeMirror = { + if (_runtimeMirror == null) + _runtimeMirror = ru.runtimeMirror(classLoader) + _runtimeMirror + } def backticked(s: String): String = ( (s split '.').toList map { @@ -293,7 +289,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case s if nme.keywords(newTermName(s)) => s"`$s`" case s => s } mkString "." - ) + ) def readRootPath(readPath: String) = getModuleIfDefined(readPath) abstract class PhaseDependentOps { @@ -310,7 +306,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def shift[T](op: => T): T = exitingFlatten(op) } - def originalPath(name: String): String = originalPath(TermName(name)) + override def originalPath(name: String): String = originalPath(TermName(name)) def originalPath(name: Name): String = translateOriginalPath(typerOp path name) def originalPath(sym: Symbol): String = translateOriginalPath(typerOp path sym) @@ -323,15 +319,15 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends flatOp shift sym1.javaClassName } - def translatePath(path: String) = { + override def translatePath(path: String): Option[String] = { val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) sym.toOption map flatPath } /** If path represents a class resource in the default package, - * see if the corresponding symbol has a class file that is a REPL artifact - * residing at a different resource path. Translate X.class to $line3/$read$iw$X.class. - */ + * see if the corresponding symbol has a class file that is a REPL artifact + * residing at a different resource path. Translate X.class to \$line3/\$read\$\$iw\$\$iw\$X.class. + */ def translateSimpleResource(path: String): Option[String] = { if (!(path contains '/') && (path endsWith ".class")) { val name = path stripSuffix ".class" @@ -342,66 +338,64 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends None } } - def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath + override def translateEnclosingClass(n: String): Option[String] = symbolOfTerm(n).enclClass.toOption map flatPath /** If unable to find a resource foo.class, try taking foo as a symbol in scope * and use its java class name as a resource to load. * - * $intp.classLoader classBytes "Bippy" or $intp.classLoader getResource "Bippy.class" just work. + * \$intp.classLoader classBytes "Bippy" or \$intp.classLoader getResource "Bippy.class" just work. */ - private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) { + private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) { override protected def findAbstractFile(name: String): AbstractFile = super.findAbstractFile(name) match { - case null if _initializeComplete => translateSimpleResource(name) map super.findAbstractFile orNull + case null if _initializeComplete => translateSimpleResource(name).map(super.findAbstractFile).orNull case file => file } + // if the name was mapped by findAbstractFile, supply null name to avoid name check in defineClass + override protected def findClass(name: String): Class[_] = { + val bytes = classBytes(name) + if (bytes.length == 0) + throw new ClassNotFoundException(name) + else + defineClass(/*name=*/null, bytes, 0, bytes.length, protectionDomain) + } } - private def makeClassLoader(): util.AbstractFileClassLoader = - new TranslatingClassLoader({ - _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader) - _runtimeClassLoader - }) - - @deprecated("The thread context classloader is now set and restored around execution of REPL line, this method is now a no-op.", since = "2.12.0") - def setContextClassLoader() = () // Called from sbt-interface/0.12.4/src/ConsoleInterface.scala:39 + private def makeClassLoader(): AbstractFileClassLoader = + new TranslatingClassLoader(new URLClassLoader(compilerClasspath, parentClassLoader).tap(_runtimeClassLoader_=)) def allDefinedNames: List[Name] = exitingTyper(replScope.toList.map(_.name).sorted) - def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted + def unqualifiedIds: List[String] = allDefinedNames.map(_.decode).sorted /** Most recent tree handled which wasn't wholly synthetic. */ private def mostRecentlyHandledTree: Option[Tree] = { - prevRequests.reverse foreach { req => - req.handlers.reverse foreach { - case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member) - case _ => () - } - } - None + prevRequests.reverseIterator.map(_.handlers.reverseIterator.collectFirst { + case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => x.member + }).find(_.isDefined).flatten } + private val logScope = scala.sys.props contains "scala.repl.scope" + private def scopelog(msg: String) = if (logScope) Console.err.println(msg) + private def updateReplScope(sym: Symbol, isDefined: Boolean): Unit = { - def log(what: String) = scopelog { + def log(what: String): Unit = { val mark = if (sym.isType) "t " else "v " val name = exitingTyper(sym.nameString) val info = cleanTypeAfterTyper(sym) val defn = sym defStringSeenAs info - f"[$mark$what%6s] $name%-25s $defn%s" + scopelog(f"[$mark$what%6s] $name%-25s $defn%s") } - if (!ObjectClass.isSubClass(sym.owner)) { - // unlink previous - replScope.lookupAll(sym.name) foreach { sym => - log("unlink") - replScope unlink sym - } - val what = if (isDefined) "define" else "import" - log(what) - replScope enter sym + if (ObjectClass isSubClass sym.owner) return + // unlink previous + replScope lookupAll sym.name foreach { sym => + log("unlink") + replScope unlink sym } + val what = if (isDefined) "define" else "import" + log(what) + replScope enter sym } - def recordRequest(req: Request) { - if (req == null) - return + def recordRequest(req: Request): Unit = if (req != null) { prevRequests += req @@ -411,7 +405,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper { req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => val oldSym = replScope lookup newSym.name.companionName - if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }) { + if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule case _ => false }) { replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") replwarn("Companions must be defined together; you may wish to use :paste mode for this.") } @@ -423,9 +417,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } } - private[nsc] def replwarn(msg: => String) { - if (!settings.nowarnings) - printMessage(msg) + private[nsc] def replwarn(msg: => String): Unit = { + if (!settings.nowarnings.value) + reporter.printMessage(msg) } def compileSourcesKeepingRun(sources: SourceFile*) = { @@ -436,115 +430,33 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } /** Compile an nsc SourceFile. Returns true if there are - * no compilation errors, or false otherwise. - */ - def compileSources(sources: SourceFile*): Boolean = + * no compilation errors, or false otherwise. + */ + override def compileSources(sources: SourceFile*): Boolean = compileSourcesKeepingRun(sources: _*)._1 /** Compile a string. Returns true if there are no - * compilation errors, or false otherwise. - */ - def compileString(code: String): Boolean = + * compilation errors, or false otherwise. + */ + override def compileString(code: String): Boolean = compileSources(new BatchSourceFile(" - - - - - - - { if (universe.settings.docDiagrams.value) { - - - } else NodeSeq.Empty } - - + def headers: Elems = { + def extScript(str: String) = Script(`type` = "text/javascript", src = str) + def libScript(value: String) = extScript(relativeLinkTo(List(value, "lib"))) + val canonicalSetting = universe.settings.docCanonicalBaseUrl + val canonicalLink = if (canonicalSetting.isSetByUser) { + val canonicalUrl = + if (canonicalSetting.value.endsWith("/")) canonicalSetting.value + else canonicalSetting.value + "/" + List(HtmlTags.Link(href = canonicalUrl + Page.relativeLinkTo(List("."), path), rel = "canonical")) + } else Nil + canonicalLink ++ List( + HtmlTags.Link(href = relativeLinkTo(List("index.css", "lib")), media = "screen", `type` = "text/css", rel = "stylesheet"), + HtmlTags.Link(href = relativeLinkTo(List("template.css", "lib")), media = "screen", `type` = "text/css", rel = "stylesheet"), + HtmlTags.Link(href = relativeLinkTo(List("print.css", "lib")), media = "print", `type` = "text/css", rel = "stylesheet"), + HtmlTags.Link(href = relativeLinkTo(List("diagrams.css", "lib")), media = "screen", `type` = "text/css", rel = "stylesheet", id = "diagrams-css"), + libScript("jquery.min.js"), + libScript("index.js"), + extScript(relativeLinkTo(List("index.js"))), + libScript("scheduler.js"), + libScript("template.js")) ++ + ((if (!universe.settings.docDiagrams.value) Nil + else (List( + extScript("https://d3js.org/d3.v4.js"), + extScript("https://cdn.jsdelivr.net/npm/graphlib-dot@0.6.2/dist/graphlib-dot.min.js"), + extScript("https://cdnjs.cloudflare.com/ajax/libs/dagre-d3/0.6.1/dagre-d3.min.js")))) :+ + Script(`type` = "text/javascript", elems = + Txt("/* this variable can be used by the JS to determine the path to the root document */\n" + + s"""var toRoot = '${ val p = templateToPath(tpl); "../" * (p.size - 1) }';"""))) + } + + def body = - - { search } -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Packages

    -
      - { - def entityToUl(mbr: TemplateEntity with MemberEntity, indentation: Int): NodeSeq = - if (mbr.isObject && hasCompanion(mbr)) - NodeSeq.Empty - else -
    • - { - mbr match { + HtmlTags.Body ( + search :: + Div(id = "search-results", elems = + Div(id = "search-progress", elems = + Div(id = "progress-fill") + ) :: + Div(id="results-content", elems = + Div(id="entity-results") :: + Div(id="member-results")) :: NoElems + ) :: + Div(id="content-scroll-container", style="-webkit-overflow-scrolling: touch;", elems = + Div(id="content-container", style="-webkit-overflow-scrolling: touch;", elems = + Div(id="subpackage-spacer", elems = + Div(id="packages", elems = + H(1, Txt("Packages")) :: + Ul(elems = { + def entityToUl(mbr: TemplateEntity with MemberEntity, indentation: Int): Elems = + if (mbr.isObject && hasCompanion(mbr)) + NoElems + else + Li(`class`= s"current-entities indented$indentation", elems = + (mbr match { case dtpl: DocTemplateEntity => - dtpl.companion.fold() { c: DocTemplateEntity => - + dtpl.companion.fold(Span(`class`= "separator"): Elem) { c: DocTemplateEntity => + A(`class`= "object", href=relativeLinkTo(c), title= memberToShortCommentTitleTag(c)) } - case _ => - } - } - - {mbr.name} -
    • - - // Get path from root - val rootToParentLis = tpl.toRoot - .tail - .reverse - .zipWithIndex - .map { case (pack, ind) => - memberToHtml(pack, tpl, indentation = ind, isParent = (pack eq tpl.toRoot.tail.head)) + case _ => Span(`class`= "separator") + }) :: Txt(" ") :: + A(`class`= mbr.kind, href=relativeLinkTo(mbr), title=memberToShortCommentTitleTag(mbr)) :: + A(href=relativeLinkTo(mbr), title=memberToShortCommentTitleTag(mbr), elems=Txt(mbr.name)) :: NoElems + ) + + // Get path from root + val rootToParentLis: Elems = tpl.toRoot + .tail + .reverse + .zipWithIndex + .flatMap { case (pack, ind) => + memberToHtml(pack, tpl, indentation = ind, isParent = (pack eq tpl.toRoot.tail.head)) + } + + val parent = tpl.toRoot match { + case _ :: parent :: _ if !parent.isRootPackage => Some(parent) + case _ => None } - val parent = tpl.toRoot match { - case _ :: parent :: _ if !parent.isRootPackage => Some(parent) - case _ => None - } + val parentSub = parent.fold(Seq[TemplateEntity with MemberEntity](tpl)) { p => + p.templates.filter(_.isPackage).sortBy(_.name) + } - val parentSub = parent.fold(Seq[TemplateEntity with MemberEntity](tpl)) { p => - p.templates.filter(_.isPackage).sortBy(_.name) + // If current entity is a package, take its containing entities - otherwise take parent's containing entities + val currentPackageTpls = + if (tpl.isPackage) tpl.templates + else parent.fold(Seq.empty[TemplateEntity with MemberEntity])(p => p.templates) + + val (subsToTpl, subsAfterTpl) = parentSub.partition(_.name <= tpl.name) + + val subsToTplLis = subsToTpl.toList.flatMap(memberToHtml(_, tpl, indentation = rootToParentLis.length)) + val subsAfterTplLis = subsAfterTpl.toList.flatMap(memberToHtml(_, tpl, indentation = rootToParentLis.length)) + val currEntityLis = currentPackageTpls + .filter(x => !x.isPackage && (x.isTrait || x.isClass || x.isAbstractType || x.isObject)) + .sortBy(_.name) + .toList.flatMap(entityToUl(_, (if (tpl.isPackage) 0 else -1) + rootToParentLis.length)) + val currSubLis = tpl.templates + .filter(_.isPackage) + .sortBy(_.name) + .flatMap(memberToHtml(_, tpl, indentation = rootToParentLis.length + 1)) + + if (subsToTpl.isEmpty && !tpl.isPackage) // current Entity is not a package, show packages before entity listing + rootToParentLis ++ subsToTplLis ++ subsAfterTplLis ++ currSubLis ++ currEntityLis + else + rootToParentLis ++ subsToTplLis ++ currSubLis ++ currEntityLis ++ subsAfterTplLis } + ) + ) + ) :: + Div(id="content", elems = content + ) :: NoElems + ) + ) + ) - // If current entity is a package, take its containing entities - otherwise take parent's containing entities - val currentPackageTpls = - if (tpl.isPackage) tpl.templates - else parent.fold(Seq.empty[TemplateEntity with MemberEntity])(p => p.templates) - - val (subsToTpl, subsAfterTpl) = parentSub.partition(_.name <= tpl.name) - - val subsToTplLis = subsToTpl.map(memberToHtml(_, tpl, indentation = rootToParentLis.length)) - val subsAfterTplLis = subsAfterTpl.map(memberToHtml(_, tpl, indentation = rootToParentLis.length)) - val currEntityLis = currentPackageTpls - .filter(x => !x.isPackage && (x.isTrait || x.isClass || x.isAbstractType || x.isObject)) - .sortBy(_.name) - .map(entityToUl(_, (if (tpl.isPackage) 0 else -1) + rootToParentLis.length)) - val currSubLis = tpl.templates - .filter(_.isPackage) - .sortBy(_.name) - .map(memberToHtml(_, tpl, indentation = rootToParentLis.length + 1)) - - if (subsToTpl.isEmpty && !tpl.isPackage) // current Entity is not a package, show packages before entity listing - rootToParentLis ++ subsToTplLis ++ subsAfterTplLis ++ currSubLis ++ currEntityLis - else - rootToParentLis ++ subsToTplLis ++ currSubLis ++ currEntityLis ++ subsAfterTplLis - } -
    -
    -
    -
    - { content } -
    -
    -
    - def search = - + Div(id="search", elems= + Span(id= "doc-title", elems= Txt(universe.settings.doctitle.value) :: Span(id= "doc-version", elems= Txt(universe.settings.docversion.value))) :: + Txt(" ") :: + Span(`class`= "close-results", elems= Span(`class`="left", elems= Txt("<")) :: Txt(" Back")) :: + Div(id="textfilter", elems= + Span(`class`= "input", elems= + Input(autocapitalize="none", placeholder="Search", id="index-input", `type`="text", accesskey="/") :: + I(`class`= "clear material-icons", elems=Txt("\uE14C")) :: + I(id="search-icon", `class`= "material-icons", elems=Txt("\uE8B6")) + ) + ) :: NoElems + ) val valueMembers = - tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject) sorted + (tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject)).sorted val (absValueMembers, nonAbsValueMembers) = valueMembers partition (_.isAbstract) @@ -188,9 +183,13 @@ trait EntityPage extends HtmlPage { val (concValueMembers, shadowedImplicitMembers) = nonDeprValueMembers partition (!_.isShadowedOrAmbiguousImplicit) - val typeMembers = + val allTypeMembers = tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted (implicitly[Ordering[MemberEntity]]) + val (deprTypeMembers, typeMembers) = allTypeMembers partition (_.deprecation.isDefined) + + val packageMembers = tpl.templates.filter(x => x.isPackage) sorted (implicitly[Ordering[MemberEntity]]) + val constructors = (tpl match { case cls: Class => (cls.constructors: List[MemberEntity]).sorted case _ => Nil @@ -200,304 +199,246 @@ trait EntityPage extends HtmlPage { * like a package/object this problem should be fixed, this implementation * is just a patch. */ val content = { - val templateName = if (tpl.isRootPackage) "root package" else tpl.name - val displayName = tpl.companion match { - case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) => - { templateName } + val templateName = Txt(if (tpl.isRootPackage) "root package " else tpl.name) + val displayName: Elems = tpl.companion match { + case Some(companion) if (companion.visibility.isPublic && companion.inSource.isDefined) => + A(href= relativeLinkTo(companion), title= docEntityKindToCompanionTitle(tpl), elems=templateName) case _ => templateName } - val owner = { + val owner: Elems = { if (tpl.isRootPackage || tpl.inTemplate.isRootPackage) - NodeSeq.Empty + NoElems else -

    { templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }

    + P(id= "owner", elems= templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, Txt("."))) } - -
    - { - val imageClass = docEntityImageClass(tpl) + + val definition: Elems = + List(Div(id="definition", elems= + {val imageClass = docEntityImageClass(tpl) tpl.companion match { - case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) => -
    { imageClass.substring(0,1) }
    + case Some(companion) if (companion.visibility.isPublic && companion.inSource.isDefined) => + A(href= relativeLinkTo(companion), title= docEntityKindToCompanionTitle(tpl), elems= Div(`class`= s"big-circle $imageClass", elems=Txt(imageClass.substring(0,1)))) case _ => -
    { imageClass.substring(0,1) }
    - } - } - { owner } -

    { displayName }{ permalink(tpl) }

    - { if (tpl.isPackage) NodeSeq.Empty else

    {companionAndPackage(tpl)}

    } -
    - - { signature(tpl, isSelf = true) } - - { memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) } - - { if (valueMembers.filterNot(_.kind == "package").isEmpty) NodeSeq.Empty else -
    -
    -
    - - - - - -
    -
    -
    - Ordering -
      + Div(`class`= s"big-circle $imageClass", elems=Txt(imageClass.substring(0,1))) + }} :: + owner ++ + H(1, displayName ++ permalink(tpl)) ++ + {if (tpl.isPackage) NoElems else H(3, companionAndPackage(tpl)) :: NoElems } + )) + + val memberSel: Elems = + if (valueMembers.forall(_.kind == "package")) NoElems + else List(Div(id="mbrsel", elems= + Div(`class`="toggle") :: + Div(id="memberfilter", elems= + I(`class`="material-icons arrow", elems= Txt("\uE037")) :: + Span(`class`="input", elems= + Input(id="mbrsel-input", placeholder="Filter all members", `type`="text", accesskey="/") + ) :: + I(`class`="clear material-icons", elems=Txt("\uE14C")) + ) :: + Div(id="filterby", elems= + Div(id="order", elems= + Span(`class`="filtertype", elems=Txt("Ordering")) :: + Ol(elems= { if (!universe.settings.docGroups.value || tpl.members.map(_.group).distinct.forall(_ == ModelFactory.defaultGroup)) - NodeSeq.Empty + NoElems else -
    1. Grouped
    2. - } -
    3. Alphabetic
    4. + Li(`class`="group out", elems=Span(elems=Txt("Grouped"))) :: NoElems + } ++ + (Li(`class`="alpha in", elems=Span(elems=Txt("Alphabetic"))) :: { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) - NodeSeq.Empty + NoElems else -
    5. By Inheritance
    6. - } -
    -
    - { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else - { - if (tpl.linearizationTemplates.nonEmpty) -
    - Inherited
    -
    -
      - { (tpl :: tpl.linearizationTemplates).map(wte =>
    1. { wte.name }
    2. ) } -
    -
    - else NodeSeq.Empty - } ++ { - if (tpl.conversions.nonEmpty) -
    - Implicitly
    -
    -
      { - tpl.conversions.map { conv => - val name = conv.conversionQualifiedName - val hide = universe.settings.hiddenImplicits(name) -
    1. { "by " + conv.conversionShortName }
    2. - } + Li(`class`="inherit out", elems=Span(elems=Txt("By Inheritance"))) :: NoElems + }) + ) + ) ++ ( + if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NoElems else { + (if (tpl.linearizationTemplates.isEmpty) NoElems else + Div(`class`="ancestors", elems= + Span(`class`="filtertype", elems=Txt("Inherited") :: Br :: NoElems) :: + Ol(id="linearization", elems= + { (tpl :: tpl.linearizationTemplates).map(wte => Li(`class`="in", name= wte.qualifiedName, elems=Span(elems= Txt(wte.name)))) } + ) + ) :: NoElems) ++ + (if (tpl.conversions.isEmpty) NoElems else + Div(`class`="ancestors", elems= + Span(`class`="filtertype", elems=Txt("Implicitly") :: Br :: NoElems) :: + Ol(id="implicits", elems= { + tpl.conversions.map { conv => + val name = conv.conversionQualifiedName + val hide = universe.settings.hiddenImplicits(name) + Li(`class`="in", name= name, `data-hidden`= hide.toString, elems= Span(elems= Txt("by " + conv.conversionShortName))) } -
    -
    - else NodeSeq.Empty - } ++ -
    - -
      -
    1. Hide All
    2. -
    3. Show All
    4. -
    -
    - } - { -
    - Visibility -
    1. Public
    2. All
    -
    - } -
    -
    - } - -
    -
    - { if (constructors.isEmpty) NodeSeq.Empty else -
    -

    Instance Constructors

    -
      { constructors map (memberToHtml(_, tpl)) }
    -
    - } - - { if (typeMembers.isEmpty) NodeSeq.Empty else -
    -

    Type Members

    -
      { typeMembers map (memberToHtml(_, tpl)) }
    -
    - } - - { if (absValueMembers.isEmpty) NodeSeq.Empty else -
    -

    Abstract Value Members

    -
      { absValueMembers map (memberToHtml(_, tpl)) }
    -
    - } - - { if (concValueMembers.isEmpty) NodeSeq.Empty else -
    -

    { if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }

    -
      - { - concValueMembers - .map(memberToHtml(_, tpl)) } -
    -
    - } - - { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else -
    -

    Shadowed Implicit Value Members

    -
      { shadowedImplicitMembers map (memberToHtml(_, tpl)) }
    -
    - } - - { if (deprValueMembers.isEmpty) NodeSeq.Empty else -
    -

    Deprecated Value Members

    -
      { deprValueMembers map (memberToHtml(_, tpl)) }
    -
    - } -
    - -
    - { + ) + ) :: NoElems) ++ List( + Div(`class`="ancestors", elems= + Span(`class`="filtertype") :: + Ol(elems= + Li(`class`="hideall out", elems= Span(elems=Txt("Hide All"))) :: + Li(`class`="showall in", elems= Span(elems=Txt("Show All"))) + ) + )) + }) ++ List( + Div(id="visbl", elems= + Span(`class`="filtertype", elems=Txt("Visibility")) :: + Ol(elems= + List( + Li(`class`="public in", elems=Span(elems=Txt("Public"))), + Li(`class`="protected out", elems=Span(elems=Txt("Protected"))) + ) ++ List(Li(`class`="private out", elems=Span(elems=Txt("Private")))).filter(_ => universe.settings.visibilityPrivate.value)) + )) + ) + )) + + val template: Elems = List( + Div(id="template", elems= List( + Div(id="allMembers", elems= + memsDiv("package members", "Package Members", packageMembers, "packages") + ++ memsDiv("members", "Instance Constructors", constructors, "constructors") + ++ memsDiv("types members", "Type Members", typeMembers, "types") + ++ memsDiv("types members", "Deprecated Type Members", deprTypeMembers, "deprecatedTypes") + ++ memsDiv("values members", "Abstract Value Members", absValueMembers) + ++ memsDiv("values members", if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members", concValueMembers) + ++ memsDiv("values members", "Shadowed Implicit Value Members", shadowedImplicitMembers) + ++ memsDiv("values members", "Deprecated Value Members", deprValueMembers)), + Div(id="inheritedMembers", elems= // linearization - NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield -
    -

    Inherited from { - typeToHtmlWithStupidTypes(tpl, superTpl, superType) - }

    -
    - ) - } - { + (for ((superTpl, superType) <- tpl.linearizationTemplates zip tpl.linearizationTypes) yield + Div(`class`="parent", name= superTpl.qualifiedName, elems= + H(3, elems=Txt("Inherited from ") :: + typeToHtml(superType, hasLinks = true) + ))) ++ // implicitly inherited - NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield -
    -

    Inherited by implicit conversion { conversion.conversionShortName } from - { typeToHtml(tpl.resultType, hasLinks = true) } to { typeToHtml(conversion.targetType, hasLinks = true) } -

    -
    - ) - } -
    - -
    - { + (for (conversion <- tpl.conversions) yield + Div(`class`="conversion", name= conversion.conversionQualifiedName, elems= + H(3, elems=Txt(s"Inherited by implicit conversion ${conversion.conversionShortName} from") :: + typeToHtml(tpl.resultType, hasLinks = true) ++ (Txt(" to ") :: typeToHtml(conversion.targetType, hasLinks = true)) + ) + ))), + Div(id="groupedMembers", elems= { val allGroups = tpl.members.map(_.group).distinct val orderedGroups = allGroups.map(group => (tpl.groupPriority(group), group)).sorted.map(_._2) // linearization - NodeSeq fromSeq (for (group <- orderedGroups) yield -
    -

    { tpl.groupName(group) }

    - { - tpl.groupDescription(group) match { - case Some(body) =>
    { bodyToHtml(body) }
    - case _ => NodeSeq.Empty - } - } -
    - ) - } -
    - -
    - -
    - - { - if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) - - else - - } - + for (group <- orderedGroups) yield + Div(`class` = "group", name = group, elems = + H(3, Txt(tpl.groupName(group))) :: ( + tpl.groupDescription(group) match { + case Some(body) => Div(`class`="comment cmt", elems= bodyToHtml(body)) :: NoElems + case _ => NoElems + }) + ) + })) + )) + + val postamble = + List(Div(id = "tooltip"), + if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) + Div(id = "footer", elems = Txt("Scala programming documentation. Copyright (c) 2002-2025 ") :: A(href = "https://www.epfl.ch", target = "_top", elems = Txt("EPFL")) :: Txt(" and ") :: A(href = "https://akka.io", target = "_top", elems = Txt("Akka")) :: Txt(".")) + else + Div(id = "footer", elems = Txt(tpl.universe.settings.docfooter.value))) + + HtmlTags.Body(`class`= tpl.kind + (if (tpl.isType) " type" else " value"), elems= + definition ++ signature(tpl, isSelf = true) ++ memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) ++ memberSel ++ template ++ postamble + ) } + def memsDiv(cls: String, header: String, mems: List[MemberEntity], name: String = null) = + if (mems.isEmpty) NoElems + else List(Div(id= name, `class`= cls, elems= List(H(3, Txt(header)), Ol(elems= mems flatMap (memberToHtml(_, tpl)))))) + def memberToHtml( mbr: MemberEntity, inTpl: DocTemplateEntity, isParent: Boolean = false, indentation: Int = 0 - ): NodeSeq = { + ): Elems = { // Sometimes it's same, do we need signatureCompat still? - val sig = if (mbr.signature == mbr.signatureCompat) { - - } else { - + val sig = { + val anchorToMember = "anchorToMember" + + if (mbr.signature == mbr.signatureCompat) { + A(id= mbr.signature, `class` = anchorToMember) :: NoElems + } else { + A(id= mbr.signature, `class` = anchorToMember) :: A(id= mbr.signatureCompat, `class` = anchorToMember) :: NoElems + } } val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false) -
  • - { sig } - { signature(mbr, isSelf = false) } + Li(name= mbr.definitionName, + visbl=if (mbr.visibility.isPublic) "pub" else if (mbr.visibility.isProtected) "prt" else "prv", + `class`= s"indented$indentation " + (if (mbr eq inTpl) "current" else ""), + `data-isabs`= mbr.isAbstract.toString, + fullComment= if(!memberComment.exists(_.tagName == "div")) "no" else "yes", + group= mbr.group, elems= + { sig } ++ + (Txt(" ") :: { signature (mbr, isSelf = false) }) ++ { memberComment } -
  • + ) } - def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = { + def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): Elems = mbr match { + // comment of class itself case dte: DocTemplateEntity if isSelf => - // comment of class itself - -
    { memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }
    -
    + Div(id="comment", `class`="fullcommenttop", elems= memberToCommentBodyHtml(dte, inTpl, isSelf = true)) case _ => // comment of non-class member or non-documented inner class val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false) - if (commentBody.isEmpty) - NodeSeq.Empty + if (commentBody.isEmpty) { + if (universe.settings.docRequired.value && mbr.visibility.isPublic && inTpl.visibility.isPublic && mbr.toString.startsWith("scala.collection") && mbr.isInstanceOf[Def]) + docletReporter.error(scala.reflect.internal.util.NoPosition, s"Member $mbr is public but has no documentation") + NoElems + } else { val shortComment = memberToShortCommentHtml(mbr, isSelf) val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf) - val includedLongComment = if (shortComment.text.trim == longComment.text.trim) - NodeSeq.Empty - else -
    { longComment }
    + val includedLongComment = + if (textOf(shortComment) == textOf(longComment)) NoElems + else Div(`class`="fullcomment", elems= longComment) :: NoElems shortComment ++ includedLongComment } } - } - def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = { + def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): Elems = { mbr match { case nte: NonTemplateMemberEntity if nte.isUseCase => inlineToHtml(comment.Text("[use case] ")) - case _ => NodeSeq.Empty + case _ => NoElems } } - def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = - mbr.comment.fold(NodeSeq.Empty) { comment => -

    { memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(comment.short) }

    + def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): Elems = + mbr.comment.toList.flatMap { comment => + P(`class`="shortcomment cmt", elems= memberToUseCaseCommentHtml(mbr, isSelf) ++ inlineToHtml(comment.short) ) } def memberToShortCommentTitleTag(mbr: MemberEntity): String = mbr.comment.fold("")(comment => Page.inlineToStrForTitleTag(comment.short)) - def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = -

    { inlineToHtml(mbr.comment.get.short) }

    - - def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = { + def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): Elems = { val s = universe.settings val memberComment = - if (mbr.comment.isEmpty) NodeSeq.Empty - else
    { commentToHtml(mbr.comment) }
    + if (mbr.comment.isEmpty) NoElems + else Div(`class`="comment cmt", elems= commentToHtml(mbr.comment)) :: NoElems val authorComment = - if (! s.docAuthor || mbr.comment.isEmpty || - mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NodeSeq.Empty - else
    - {if (mbr.comment.get.authors.size > 1)
    Authors:
    else
    Author:
    } - { mbr.comment.get.authors map bodyToHtml} -
    + if (!s.docAuthor.value || mbr.comment.isEmpty || + mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NoElems + else Div(`class`= "comment cmt", elems= + H(6, Txt(if (mbr.comment.get.authors.size > 1) "Authors:" else "Author:" )) :: + mbr.comment.get.authors.flatMap(bodyToHtml) + ) :: NoElems val paramComments = { val prs: List[ParameterEntity] = mbr match { @@ -508,51 +449,51 @@ trait EntityPage extends HtmlPage { case _ => Nil } - def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): NodeSeq = prs match { + def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): Elems = prs match { case (tp: TypeParam) :: rest => - val paramEntry: NodeSeq = { -
    { tp.name }
    { bodyToHtml(comment.typeParams(tp.name)) }
    + val paramEntry: Elems = { + Dt(`class`= "tparam", elems=Txt(tp.name)) :: Dd(`class`= "cmt", elems= bodyToHtml(comment.typeParams(tp.name)) ) } paramEntry ++ paramCommentToHtml(rest, comment) case (vp: ValueParam) :: rest => - val paramEntry: NodeSeq = { -
    { vp.name }
    { bodyToHtml(comment.valueParams(vp.name)) }
    + val paramEntry: Elems = { + Dt(`class`= "param", elems=Txt(vp.name)) :: Dd(`class`= "cmt", elems= bodyToHtml(comment.valueParams(vp.name)) ) } paramEntry ++ paramCommentToHtml(rest, comment) case _ => - NodeSeq.Empty + NoElems } - mbr.comment.fold(NodeSeq.Empty) { comment => + mbr.comment.fold(NoElems) { comment => val cmtedPrs = prs filter { - case tp: TypeParam => comment.typeParams isDefinedAt tp.name + case tp: TypeParam => comment.typeParams isDefinedAt tp.name case vp: ValueParam => comment.valueParams isDefinedAt vp.name + case x => throw new MatchError(x) } - if (cmtedPrs.isEmpty && comment.result.isEmpty) NodeSeq.Empty + if (cmtedPrs.isEmpty && comment.result.isEmpty) NoElems else { -
    { + Dl(`class`= "paramcmts block", elems = paramCommentToHtml(cmtedPrs, comment) ++ ( comment.result match { - case None => NodeSeq.Empty + case None => NoElems case Some(cmt) => -
    returns
    { bodyToHtml(cmt) }
    - }) - }
    + Dt(elems=Txt("returns")) :: Dd(`class`="cmt", elems=bodyToHtml(cmt)) + })) } } } val implicitInformation = mbr.byConversion match { case Some(conv) => -
    Implicit
    ++ + Dt(`class`= "implicit", elems= Txt("Implicit")) ++ { val targetType = typeToHtml(conv.targetType, hasLinks = true) val conversionMethod = conv.convertorMethod match { - case Left(member) => Text(member.name) - case Right(name) => Text(name) + case Left(member) => member.name + case Right(name) => name } // strip off the package object endings, they make things harder to follow @@ -561,99 +502,101 @@ trait EntityPage extends HtmlPage { val constraintText = conv.constraints match { case Nil => - NodeSeq.Empty + NoElems case List(constraint) => - scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".") + Txt("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ Txt(".") case List(constraint1, constraint2) => - scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++ - scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".") + Txt("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++ + Txt(" and at the same time ") ++ constraintToHtml(constraint2) ++ Txt(".") case constraints => -
    ++ "This conversion will take place only if all of the following constraints are met:" ++
    ++ { + Br :: Txt("This conversion will take place only if all of the following constraints are met:") :: Br :: { var index = 0 - constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++
    } + constraints flatMap { constraint => Txt("" + { index += 1; index } + ". ") :: (constraintToHtml(constraint) :+ Br) } } } -
    - This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, hasLinks = true) } to - { targetType } performed by method { conversionMethod } in { conversionOwner }. - { constraintText } -
    + Dd(elems= + (Txt("This member is added by an implicit conversion from ") :: (typeToHtml(inTpl.resultType, hasLinks = true) :+ Txt(" to"))) ++ + targetType ++ (Txt(" performed by method ") :: (Txt(conversionMethod+" in ") :: (conversionOwner :+ Txt(".")))) ++ + constraintText + ) } ++ { if (mbr.isShadowedOrAmbiguousImplicit) { // These are the members that are shadowing or ambiguating the current implicit // see ImplicitMemberShadowing trait for more information val shadowingSuggestion = { val params = mbr match { - case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString + case d: Def => d.valueParams.map(_.map(_.name).mkString("(", ", ", ")")).mkString case _ => "" // no parameters } -
    ++ scala.xml.Text("To access this member you can use a ") ++ -
    type ascription ++ scala.xml.Text(":") ++ -
    ++
    {"(" + EntityPage.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }
    + Br ++ Txt("To access this member you can use a ") ++ + A(href="https://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala", + target="_blank", elems= Txt("type ascription")) ++ Txt(":") ++ + Br ++ Div(`class`="cmt", elems=Pre(Txt(s"(${EntityPage.lowerFirstLetter(tpl.name)}: ${conv.targetType.name}).${mbr.name}$params"))) } - val shadowingWarning: NodeSeq = + val shadowingWarning: Elems = if (mbr.isShadowedImplicit) - scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " + + Txt("This implicitly inherited member is shadowed by one or more members in this " + "class.") ++ shadowingSuggestion else if (mbr.isAmbiguousImplicit) - scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " + + Txt("This implicitly inherited member is ambiguous. One or more implicitly " + "inherited members have similar signatures, so calling this member may produce an ambiguous " + "implicit conversion compiler error.") ++ shadowingSuggestion - else NodeSeq.Empty + else NoElems -
    Shadowing
    ++ -
    { shadowingWarning }
    + Dt(`class`="implicit", elems=Txt("Shadowing")) :: + Dd(elems= shadowingWarning) :: NoElems - } else NodeSeq.Empty + } else NoElems } case _ => - NodeSeq.Empty + NoElems } + def dt(s: String) = Dt(elems=Txt(s)) + // --- start attributes block vals - val attributes: NodeSeq = { - val fvs: List[NodeSeq] = visibility(mbr).toList - if (fvs.isEmpty || isReduced) NodeSeq.Empty + val attributes: Elems = { + val fvs: List[Elems] = visibility(mbr).toList + if (fvs.isEmpty || isReduced) NoElems else { -
    Attributes
    -
    { fvs.map(_ ++ scala.xml.Text(" ")) }
    + dt("Attributes") :: + Dd(elems = fvs.flatMap(_ :+ Txt(" "))) :: NoElems } } - val definitionClasses: NodeSeq = { + val definitionClasses: Elems = { val inDefTpls = mbr.inDefinitionTemplates - if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty + if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NoElems else { -
    Definition Classes
    -
    { templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }
    + dt("Definition Classes") :: + Dd(elems= templatesToHtml(inDefTpls, Txt(" → ")) ) :: NoElems } } - val fullSignature: NodeSeq = { + val fullSignature: Elems = { mbr match { case nte: NonTemplateMemberEntity if nte.isUseCase => -
    - - - Full Signature - -
    { signature(nte.useCaseOf.get,isSelf = true) }
    -
    - case _ => NodeSeq.Empty + Div(`class`= "full-signature-block toggleContainer", elems= + Span(`class`= "toggle", elems= + I(`class`= "material-icons", elems=Txt("\uE037")) :: + Txt("Full Signature") :: NoElems + ) :: + Div(`class`= "hiddenContent full-signature-usecase", elems= signature(nte.useCaseOf.get,isSelf = true)) + ) + case _ => NoElems } } - val selfType: NodeSeq = mbr match { + val selfType: Elems = mbr match { case dtpl: DocTemplateEntity if (isSelf && dtpl.selfType.isDefined && !isReduced) => -
    Self Type
    -
    { typeToHtml(dtpl.selfType.get, hasLinks = true) }
    - case _ => NodeSeq.Empty + dt("Self Type") :: + Dd(elems= typeToHtml(dtpl.selfType.get, hasLinks = true) ) :: NoElems + case _ => NoElems } - val annotations: NodeSeq = { + val annotations: Elems = { // A list of annotations which don't show their arguments, e. g. because they are shown separately. val annotationsWithHiddenArguments = List("deprecated", "Deprecated", "migration") @@ -661,225 +604,215 @@ trait EntityPage extends HtmlPage { !(annotationsWithHiddenArguments.contains(annotation.qualifiedName)) if (mbr.annotations.nonEmpty) { -
    Annotations
    -
    { - mbr.annotations.map { annot => - - @{ templateToHtml(annot.annotationClass) }{ - if (showArguments(annot)) argumentsToHtml(annot.arguments) else NodeSeq.Empty - } - - } - } -
    - } else NodeSeq.Empty + dt("Annotations") :: + Dd(elems = + mbr.annotations.flatMap { annot => + Span(`class` = "name", elems = Txt("@") :: templateToHtml(annot.annotationClass)) :: (( + if (showArguments(annot)) argumentsToHtml(annot.arguments) else NoElems) :+ Txt(" ")) + } + ) :: NoElems + } else NoElems } - val sourceLink: NodeSeq = mbr match { + val sourceLink: Elems = mbr match { case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) => val (absFile, _) = dtpl.inSource.get -
    Source
    -
    { { Text(absFile.file.getName) } }
    - case _ => NodeSeq.Empty + dt("Source") :: + Dd(elems= A(href= dtpl.sourceUrl.get.toString, target="_blank", elems= Txt(absFile.file.getName) ) ) :: NoElems + case _ => NoElems } - val deprecation: NodeSeq = + val deprecations: Elems = mbr.deprecation match { case Some(deprecation) if !isReduced => -
    Deprecated
    -
    { bodyToHtml(deprecation) }
    - case _ => NodeSeq.Empty + dt("Deprecated") :: + Dd(`class`= "cmt", elems= bodyToHtml(deprecation) ) :: NoElems + case _ => NoElems } - val migration: NodeSeq = + val migrations: Elems = mbr.migration match { case Some(migration) if !isReduced => -
    Migration
    -
    { bodyToHtml(migration) }
    - case _ => NodeSeq.Empty + dt("Migration") :: + Dd(`class`= "cmt", elems= bodyToHtml(migration) ) :: NoElems + case _ => NoElems } - val mainComment: NodeSeq = mbr.comment match { + val mainComment: Elems = mbr.comment match { case Some(comment) if (! isReduced) => - def orEmpty[T](it: Iterable[T])(gen: =>NodeSeq): NodeSeq = - if (it.isEmpty) NodeSeq.Empty else gen + def orEmpty[T](it: Iterable[T])(gen: => Elems): Elems = + if (it.isEmpty) NoElems else gen val example = orEmpty(comment.example) { -
    Example{ if (comment.example.length > 1) "s" else ""}: -
      { - val exampleXml: List[NodeSeq] = for (ex <- comment.example) yield -
    1. { bodyToHtml(ex) }
    2. - exampleXml.reduceLeft(_ ++ Text(", ") ++ _) - }
    -
    + Div(`class`="block", elems= Txt(s"Example${if (comment.example.lengthIs > 1) "s" else ""}:") :: + Ol(elems = { + val exampleXml: List[Elems] = for (ex <- comment.example) yield + Li(`class`= "cmt", elems= bodyToHtml(ex)) :: NoElems + exampleXml.reduceLeft(_ ++ Txt(", ") ++ _) + }) + ) } - val version: NodeSeq = + val version: Elems = orEmpty(comment.version) { -
    Version
    -
    { for(body <- comment.version.toList) yield bodyToHtml(body) }
    + dt("Version") :: + Dd(elems= comment.version.toList.flatMap(bodyToHtml)) :: NoElems } - val sinceVersion: NodeSeq = + val sinceVersion: Elems = orEmpty(comment.since) { -
    Since
    -
    { for(body <- comment.since.toList) yield bodyToHtml(body) }
    + dt("Since") :: + Dd(elems= comment.since.toList.flatMap(bodyToHtml) ) :: NoElems } - val note: NodeSeq = + val note: Elems = orEmpty(comment.note) { -
    Note
    -
    { - val noteXml: List[NodeSeq] = for(note <- comment.note ) yield {bodyToHtml(note)} - noteXml.reduceLeft(_ ++ Text(", ") ++ _) - }
    + dt("Note") :: + Dd(elems= { + val noteXml: List[Elems] = for(note <- comment.note ) yield Span(`class`= "cmt", elems= bodyToHtml(note)) :: NoElems + noteXml.reduceLeft(_ ++ Txt(", ") ++ _) + }) } - val seeAlso: NodeSeq = + val seeAlso: Elems = orEmpty(comment.see) { -
    See also
    -
    { - val seeXml: List[NodeSeq] = for(see <- comment.see ) yield {bodyToHtml(see)} + dt("See also") :: + Dd(elems= { + val seeXml: List[Elems] = for(see <- comment.see ) yield Span(`class`= "cmt", elems= bodyToHtml(see)) :: NoElems seeXml.reduceLeft(_ ++ _) - }
    + }) } - val exceptions: NodeSeq = + val exceptions: Elems = orEmpty(comment.throws) { -
    Exceptions thrown
    -
    { - val exceptionsXml: List[NodeSeq] = - for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield - {bodyToHtml(body)} - exceptionsXml.reduceLeft(_ ++ Text("") ++ _) - }
    + dt("Exceptions thrown") :: + Dd(elems = { + val exceptionsXml: List[Elems] = + for ((name@_, body) <- comment.throws.toList.sortBy(_._1)) + yield Span(`class` = "cmt", elems = bodyToHtml(body)) :: NoElems + exceptionsXml.reduceLeft(_ ++ Txt("") ++ _) + }) } - val todo: NodeSeq = + val todo: Elems = orEmpty(comment.todo) { -
    To do
    -
    { - val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield {bodyToHtml(todo)} ) + dt("To do") :: + Dd(elems = { + val todoXml: List[Elems] = + for (todo <- comment.todo) + yield Span(`class` = "cmt", elems = bodyToHtml(todo)) :: NoElems todoXml.reduceLeft(_ ++ _) - }
    + }) } example ++ version ++ sinceVersion ++ exceptions ++ todo ++ note ++ seeAlso - case _ => NodeSeq.Empty + case _ => NoElems } // end attributes block vals --- - val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment + val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecations ++ migrations ++ sourceLink ++ mainComment val attributesBlock = if (attributesInfo.isEmpty) - NodeSeq.Empty + NoElems else -
    { attributesInfo }
    + Dl(`class`= "attributes block", elems= attributesInfo ) :: NoElems val linearization = mbr match { case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty => -
    - - Linear Supertypes - -
    { - typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", ")) - }
    -
    - case _ => NodeSeq.Empty + Div(`class` = "toggleContainer", elems = + Div(`class` = "toggle block", elems = + Span(elems = + Txt("Linear Supertypes") + ) :: + Div(`class` = "superTypes hiddenContent", elems = + typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = Txt(", ")) + ) + )) :: NoElems + case _ => NoElems } val subclasses = mbr match { case dtpl: DocTemplateEntity if isSelf && !isReduced => val subs = mutable.HashSet.empty[DocTemplateEntity] - def transitive(dtpl: DocTemplateEntity) { + + def transitive(dtpl: DocTemplateEntity): Unit = { for (sub <- dtpl.directSubClasses if !(subs contains sub)) { subs add sub transitive(sub) } } + transitive(dtpl) if (subs.nonEmpty) -
    - - Known Subclasses - -
    { - templatesToHtml(subs.toList.sorted(Entity.EntityOrdering), scala.xml.Text(", ")) - }
    -
    - else NodeSeq.Empty - case _ => NodeSeq.Empty + Div(`class` = "toggleContainer", elems = + Div(`class` = "toggle block", elems = + Span(elems = + Txt("Known Subclasses") + ) :: + Div(`class` = "subClasses hiddenContent", elems = + templatesToHtml(subs.toList.sorted(Entity.EntityOrdering), Txt(", ")) + ) + )) :: NoElems + else NoElems + case _ => NoElems } - def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq = - if (s.docDiagrams.value) mbr match { - case dtpl: DocTemplateEntity if isSelf && !isReduced => - val diagram = f(dtpl) - if (diagram.isDefined) { - val diagramSvg = generator.generate(diagram.get, tpl, this) - if (diagramSvg != NodeSeq.Empty) { -
    - - { description } - -
    { diagramSvg }
    -
    - - - -
    -
    - } else NodeSeq.Empty - } else NodeSeq.Empty - case _ => NodeSeq.Empty - } else NodeSeq.Empty // diagrams not generated - - val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram") - val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram") + def diagramDiv(description: String, diagId: String)(diagramSvg: Elems): Elems = + Div(`class`= "toggleContainer block diagram-container", id=diagId+"-container", elems= List( + Span(`class`= "toggle diagram-link", elems= Txt(description)), + Div(`class`= "diagram hiddenContent", id= diagId, elems= diagramSvg))) :: NoElems + + def ifDiags(genDiag: DocTemplateEntity => Option[Diagram])(embedDiagSvg: Elems => Elems): Elems = + mbr match { + case dtpl: DocTemplateEntity if s.docDiagrams.value && isSelf && !isReduced => + genDiag(dtpl).map(diag => embedDiagSvg(generator.generate(diag, tpl, this))).getOrElse(NoElems) + case _ => NoElems + } + + val typeHierarchy = ifDiags(_.inheritanceDiagram)(diagramDiv("Type Hierarchy", "inheritance-diagram")) + val contentHierarchy = ifDiags(_.contentDiagram)(diagramDiv("Content Hierarchy", "content-diagram")) memberComment ++ authorComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy } - def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = { - def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match { - case None => NodeSeq.Empty - case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks) + def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): Elems = { + def bound0(bnd: Option[TypeEntity], pre: String): Elems = bnd match { + case None => NoElems + case Some(tpe) => Txt(pre) ++ typeToHtml(tpe, hasLinks) } bound0(lo, " >: ") ++ bound0(hi, " <: ") } - def visibility(mbr: MemberEntity): Option[NodeSeq] = { + def visibility(mbr: MemberEntity): Option[Elems] = { mbr.visibility match { case PrivateInInstance() => - Some(Text("private[this]")) + Some(Txt("private[this]")) case PrivateInTemplate(None) => - Some(Text("private")) + Some(Txt("private")) case PrivateInTemplate(Some(owner)) => - Some(Text("private[") ++ typeToHtml(owner, true) ++ Text("]")) + Some((Txt("private[") :: typeToHtml(owner, hasLinks = true)) :+ Txt("]")) case ProtectedInInstance() => - Some(Text("protected[this]")) + Some(Txt("protected[this]")) case ProtectedInTemplate(None) => - Some(Text("protected")) + Some(Txt("protected")) case ProtectedInTemplate(Some(owner)) => - Some(Text("protected[") ++ typeToHtml(owner, true) ++ Text("]")) + Some((Txt("protected[") :: typeToHtml(owner, hasLinks = true)) :+ Txt("]")) case Public() => None } } /** name, tparams, params, result */ - def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = { - - def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq = - - - { mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) } - { kindToString(mbr) } - - + def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): Elems = { + + def inside(hasLinks: Boolean, nameLink: String = ""): Elems = + Span(`class`= "modifier_kind", elems= + Span(`class`= "modifier", elems= mbr.flags.flatMap(flag => inlineToHtml(flag.text) :+ Txt(" "))) :: + Txt(" ") :: Span(`class`= "kind", elems= Txt(kindToString(mbr))) :: NoElems + ) :: Txt(" ") :: + Span(`class`="symbol", elems= { val nameClass = if (mbr.isImplicitlyInherited) @@ -890,129 +823,125 @@ trait EntityPage extends HtmlPage { else "name" - val nameHtml = { + val nameHtml: Elem = { val value = if (mbr.isConstructor) tpl.name else mbr.name - val span = if (mbr.deprecation.isDefined) - { value } + val (cls, titleDepr) = if (mbr.deprecation.isDefined) + (nameClass + " deprecated", "Deprecated: "+bodyToStr(mbr.deprecation.get)) else - { value } + (nameClass, null) val encoded = scala.reflect.NameTransformer.encode(value) - if (encoded != value) { - span % new UnprefixedAttribute("title", - "gt4s: " + encoded + - span.attribute("title").map( - node => ". " + node - ).getOrElse(""), - scala.xml.Null) + val title = if (encoded != value) { + "gt4s: " + encoded + (if (titleDepr == null) "" else ". " + titleDepr) } else { - span + titleDepr } + Span(`class`= cls, title= title, elems=Txt(value)) } if (!nameLink.isEmpty) - {nameHtml} + A(title= memberToShortCommentTitleTag(mbr), href= nameLink, elems= nameHtml) else nameHtml - }{ - def tparamsToHtml(mbr: Any): NodeSeq = mbr match { + } :: { + def tparamsToHtml(mbr: Any): Elems = mbr match { case hk: HigherKinded => val tpss = hk.typeParams - if (tpss.isEmpty) NodeSeq.Empty else { - def tparam0(tp: TypeParam): NodeSeq = - { tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)} - def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match { + if (tpss.isEmpty) NoElems else { + def tparam0(tp: TypeParam): Elems = + Span(name= tp.name, elems= Txt(tp.variance + tp.name) :: tparamsToHtml(tp) ++ boundsToHtml(tp.hi, tp.lo, hasLinks)) + def tparams0(tpss: List[TypeParam]): Elems = (tpss: @unchecked) match { case tp :: Nil => tparam0(tp) - case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps) + case tp :: tps => tparam0(tp) ++ Txt(", ") ++ tparams0(tps) } - [{ tparams0(tpss) }] + Span(`class`= "tparams", elems= Txt("[") :: (tparams0(tpss) :+ Txt("]"))) } - case _ => NodeSeq.Empty + case _ => NoElems } tparamsToHtml(mbr) - }{ - if (isReduced) NodeSeq.Empty else { - def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = { - def param0(vl: ValueParam): NodeSeq = + } ++ { + if (isReduced) NoElems else { + def paramsToHtml(vlsss: List[List[ValueParam]]): Elems = { + def param0(vl: ValueParam): Elems = // notice the }{ in the next lines, they are necessary to avoid an undesired whitespace in output - { - Text(vl.name) - }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{ - vl.defaultValue match { - case Some(v) => Text(" = ") ++ treeToHtml(v) - case None => NodeSeq.Empty - } - } - - def params0(vlss: List[ValueParam]): NodeSeq = vlss match { - case Nil => NodeSeq.Empty + Span (name= vl.name, elems= + Txt(vl.name) :: + { Txt(": ") ++ typeToHtml(vl.resultType, hasLinks) } ++ + (vl.defaultValue match { + case Some(v) => Txt(" = ") :: treeToHtml(v) + case None => NoElems + }) + ) + + def params0(vlss: List[ValueParam]): Elems = vlss match { + case Nil => NoElems case vl :: Nil => param0(vl) - case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls) + case vl :: vls => param0(vl) ++ Txt(", ") ++ params0(vls) } - def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match { - case vl :: vls => if(vl.isImplicit) { implicit } else Text("") - case _ => Text("") + def implicitCheck(vlss: List[ValueParam]): Elems = vlss match { + case vl :: vls => if(vl.isImplicit) { Span(`class`= "implicit", elems= Txt("implicit ")) } else Txt("") + case _ => Txt("") } - vlsss map { vlss => ({implicitCheck(vlss) ++ params0(vlss) }) } + vlsss map { vlss => Span(`class`= "params", elems = Txt("(") :: implicitCheck(vlss) ++ params0(vlss) ++ Txt(")")) } } mbr match { case cls: Class => paramsToHtml(cls.valueParams) case ctr: Constructor => paramsToHtml(ctr.valueParams) case dfe: Def => paramsToHtml(dfe.valueParams) - case _ => NodeSeq.Empty + case _ => NoElems } } - }{ if (isReduced) NodeSeq.Empty else { + } ++ {if (isReduced) NoElems else { mbr match { case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) => - : { typeToHtml(tme.resultType, hasLinks) } + Span(`class`= "result", elems= Txt(": ") :: typeToHtml(tme.resultType, hasLinks) ) case abt: MemberEntity with AbstractType => val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks) - if (b2s != NodeSeq.Empty) - { b2s } - else NodeSeq.Empty + if (b2s != NoElems) + Span(`class`= "result", elems= b2s ) + else NoElems case alt: MemberEntity with AliasType => - = { typeToHtml(alt.alias, hasLinks) } + Span(`class`= "result alias", elems= Txt(" = ") :: typeToHtml(alt.alias, hasLinks) ) case tpl: MemberTemplateEntity if tpl.parentTypes.nonEmpty => - extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) } + Span(`class`= "result", elems= Txt(" extends ") :: typeToHtml(tpl.parentTypes.map(_._2), hasLinks) ) - case _ => NodeSeq.Empty + case _ => NoElems } }} - - + ) + mbr match { case dte: DocTemplateEntity if !isSelf => - permalink(dte) ++ { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) } + permalink(dte) :: Txt(" ") ++ { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) } case _ if isSelf => -

    { inside(hasLinks = true) }

    + H(4, id="signature", `class`= "signature", elems= inside(hasLinks = true)) case _ => - permalink(mbr) ++ { inside(hasLinks = true) } + permalink(mbr) :: Txt(" ") ++ { inside(hasLinks = true) } } } /** */ - def treeToHtml(tree: TreeEntity): NodeSeq = { + def treeToHtml(tree: TreeEntity): Elems = { /** Makes text good looking in the html page : newlines and basic indentation, * You must change this function if you want to improve pretty printing of default Values */ - def codeStringToXml(text: String): NodeSeq = { - var goodLookingXml: NodeSeq = NodeSeq.Empty + def codeStringToXml(text: String): Elems = { + var goodLookingXml: Elems = NoElems var indent = 0 for (c <- text) c match { case '{' => indent+=1 - goodLookingXml ++= Text("{") + goodLookingXml ++= Txt("{") case '}' => indent-=1 - goodLookingXml ++= Text("}") + goodLookingXml ++= Txt("}") case '\n' => - goodLookingXml++=
    ++ indentation - case _ => goodLookingXml ++= Text(c.toString) + goodLookingXml++= Br ++ indentation + case _ => goodLookingXml ++= Txt(c.toString) } - def indentation:NodeSeq = { - var indentXml = NodeSeq.Empty - for (x <- 1 to indent) indentXml ++= Text("  ") + def indentation:Elems = { + var indentXml = NoElems + for (_ <- 1 to indent) indentXml ++= Txt(" ") // TODO:    indentXml } goodLookingXml @@ -1021,7 +950,7 @@ trait EntityPage extends HtmlPage { var index = 0 val str = tree.expression val length = str.length - var myXml: NodeSeq = NodeSeq.Empty + var myXml: Elems = NoElems for ((from, (member, to)) <- tree.refEntity.toSeq) { if (index < from) { myXml ++= codeStringToXml(str.substring(index,from)) @@ -1031,11 +960,12 @@ trait EntityPage extends HtmlPage { member match { case mbr: DocTemplateEntity => val link = relativeLinkTo(mbr) - myXml ++= {str.substring(from, to)} + myXml ++= Span(`class`="name", elems= A(href=link, elems= Txt(str.substring(from, to)))) case mbr: MemberEntity => val anchor = "#" + mbr.signature val link = relativeLinkTo(mbr.inTemplate) - myXml ++= {str.substring(from, to)} + myXml ++= Span(`class`="name", elems= A(href=link + anchor, elems= Txt(str.substring(from, to)))) + case x => throw new MatchError(x) } index = to } @@ -1045,77 +975,62 @@ trait EntityPage extends HtmlPage { myXml ++= codeStringToXml(str.substring(index, length )) if (length < 36) - { myXml } + Span(`class`= "symbol", elems= myXml ) else - { "..." } + Span(`class`= "defval", elems= myXml ) // was buggy: { "..." } -- TODO: handle overflow in CSS (as in #search > span#doc-title > span#doc-version ) } - private def argumentsToHtml(argss: List[ValueArgument]): NodeSeq = { - def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match { - case Nil => NodeSeq.Empty + private def argumentsToHtml(argss: List[ValueArgument]): Elems = { + def argumentsToHtml0(argss: List[ValueArgument]): Elems = argss match { + case Nil => NoElems case arg :: Nil => argumentToHtml(arg) - case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args) + case arg :: args => argumentToHtml(arg) ++ Txt(", ") ++ argumentsToHtml0(args) } - ({ argumentsToHtml0(argss) }) + Span(`class`= "args", elems= (Txt("(") :: argumentsToHtml0(argss)) :+ Txt(")")) } - private def argumentToHtml(arg: ValueArgument): NodeSeq = { - - { - arg.parameter match { - case Some(param) => Text(param.name + " = ") - case None => NodeSeq.Empty - } + private def argumentToHtml(arg: ValueArgument): Elems = { + Span(elems= + arg.parameter match { + case Some(param) => Txt(param.name + " = ") :: treeToHtml(arg.value) + case None => treeToHtml(arg.value) } - { treeToHtml(arg.value) } - + ) } private def bodyToStr(body: comment.Body): String = - body.blocks flatMap (blockToStr(_)) mkString "" + body.blocks flatMap blockToStr mkString "" private def blockToStr(block: comment.Block): String = block match { case comment.Paragraph(in) => Page.inlineToStr(in) - case _ => block.toString + case _ => block.toString } - private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq = - if (tpl.universe.settings.useStupidTypes.value) - superTpl match { - case dtpl: DocTemplateEntity => - val sig = signature(dtpl, isSelf = false, isReduced = true) \ "_" - sig - case tpl: TemplateEntity => - Text(tpl.name) - } - else - typeToHtml(superType, hasLinks = true) - - private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match { + private def constraintToHtml(constraint: Constraint): Elems = constraint match { case ktcc: KnownTypeClassConstraint => - scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++ - templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")") + Txt(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++ + templateToHtml(ktcc.typeClassEntity) ++ Txt(")") case tcc: TypeClassConstraint => - scala.xml.Text(tcc.typeParamName + " is ") ++ - - context-bounded ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++ - templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")") + Txt(tcc.typeParamName + " is ") ++ + A(href="https://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala", target="_blank", elems= + Txt("context-bounded")) ++ Txt(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++ + templateToHtml(tcc.typeClassEntity) ++ Txt(")") case impl: ImplicitInScopeConstraint => - scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ scala.xml.Text(" is in scope") + Txt("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ Txt(" is in scope") case eq: EqualTypeParamConstraint => - scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++ - typeToHtml(eq.rhs, hasLinks = true) ++ scala.xml.Text(")") + Txt(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++ + typeToHtml(eq.rhs, hasLinks = true) ++ Txt(")") case bt: BoundedTypeParamConstraint => - scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " + + Txt(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " + bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++ - typeToHtml(bt.lowerBound, hasLinks = true) ++ scala.xml.Text(" <: ") ++ - typeToHtml(bt.upperBound, hasLinks = true) ++ scala.xml.Text(")") + typeToHtml(bt.lowerBound, hasLinks = true) ++ Txt(" <: ") ++ + typeToHtml(bt.upperBound, hasLinks = true) ++ Txt(")") case lb: LowerBoundedTypeParamConstraint => - scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++ - typeToHtml(lb.lowerBound, hasLinks = true) ++ scala.xml.Text(")") + Txt(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++ + typeToHtml(lb.lowerBound, hasLinks = true) ++ Txt(")") case ub: UpperBoundedTypeParamConstraint => - scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++ - typeToHtml(ub.upperBound, hasLinks = true) ++ scala.xml.Text(")") + Txt(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++ + typeToHtml(ub.upperBound, hasLinks = true) ++ Txt(")") } } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala index 7ca2cd2be762..eb32bf1dfaa1 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,6 @@ package html package page import scala.tools.nsc.doc -import scala.tools.nsc.doc.model.{Package, DocTemplateEntity} import scala.tools.nsc.doc.html.{Page, HtmlFactory} class IndexScript(universe: doc.Universe) extends Page { @@ -25,14 +24,12 @@ class IndexScript(universe: doc.Universe) extends Page { def path = List("index.js") - override def writeFor(site: HtmlFactory) { - writeFile(site) { - _.write(s"Index.PACKAGES = $packages;") - } + override def writeFor(site: HtmlFactory): Unit = writeFile(site) { + _.write(s"Index.PACKAGES = $packages;") } val packages = { - val pairs = allPackagesWithTemplates.toIterable.map(_ match { + val pairs = allPackagesWithTemplates.map(_ match { case (pack, templates) => { val merged = mergeByQualifiedName(templates) @@ -87,7 +84,7 @@ class IndexScript(universe: doc.Universe) extends Page { def allPackagesWithTemplates: Map[Package, List[DocTemplateEntity]] = { Map(allPackages.map((key) => { key -> key.templates.collect { - case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t + case t: DocTemplateEntity if !t.isPackage => t } }) : _*) } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala index 786e0628f848..2070531c6e4a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala index e9a1fbc81ad2..5c65d55c152c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,8 +16,6 @@ package html package page package diagram -import scala.xml.NodeSeq -import scala.tools.nsc.doc.html.HtmlPage import scala.tools.nsc.doc.model.diagram.Diagram import scala.tools.nsc.doc.model.DocTemplateEntity @@ -31,5 +29,5 @@ trait DiagramGenerator { * @param p The page the diagram will be embedded in (needed for link generation) * @return The HTML to be embedded in the Scaladoc page */ - def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage): NodeSeq + def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage): HtmlTags.Elems } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala index de0bb6871a28..4a0177c5f690 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,8 +16,8 @@ package html.page.diagram object DiagramStats { class TimeTracker(title: String) { - var totalTime: Long = 0l - var maxTime: Long = 0l + var totalTime: Long = 0L + var maxTime: Long = 0L var instances: Int = 0 def addTime(ms: Long) = { @@ -45,19 +45,15 @@ object DiagramStats { private[this] val filterTrack = new TimeTracker("diagrams model filtering") private[this] val modelTrack = new TimeTracker("diagrams model generation") private[this] val dotGenTrack = new TimeTracker("dot diagram generation") - private[this] val dotRunTrack = new TimeTracker("dot process running") - private[this] val svgTrack = new TimeTracker("svg processing") private[this] var brokenImages = 0 private[this] var fixedImages = 0 def printStats(settings: Settings) = { - if (settings.docDiagramsDebug) { + if (settings.docDiagramsDebug.value) { settings.printMsg("\nDiagram generation running time breakdown:\n") filterTrack.printStats(settings.printMsg) modelTrack.printStats(settings.printMsg) dotGenTrack.printStats(settings.printMsg) - dotRunTrack.printStats(settings.printMsg) - svgTrack.printStats(settings.printMsg) println(" Broken images: " + brokenImages) println(" Fixed images: " + fixedImages) println("") @@ -67,8 +63,6 @@ object DiagramStats { def addFilterTime(ms: Long) = filterTrack.addTime(ms) def addModelTime(ms: Long) = modelTrack.addTime(ms) def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms) - def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms) - def addSvgTime(ms: Long) = svgTrack.addTime(ms) def addBrokenImage(): Unit = brokenImages += 1 def addFixedImage(): Unit = fixedImages += 1 diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index de015d0f5bad..3360743a94ff 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,496 +18,377 @@ package html package page package diagram -import scala.xml.{NodeSeq, PrefixedAttribute, Elem, Null, UnprefixedAttribute} import scala.collection.immutable._ import model._ import model.diagram._ +import scala.tools.nsc.doc.html.HtmlTags._ -class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends DiagramGenerator { - - // the page where the diagram will be embedded - private var page: HtmlPage = null - // path to the "lib" folder relative to the page - private var pathToLib: String = null - // maps nodes to unique indices - private var node2Index: Map[Node, Int] = null - // true if the current diagram is a class diagram - private var isInheritanceDiagram = false - // incoming implicit nodes (needed for determining the CSS class of a node) - private var incomingImplicitNodes: List[Node] = List() +// this needs a rewrite +class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { // the suffix used when there are two many classes to show private final val MultiSuffix = " classes/traits" + // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams) - private var counter = 0 - - def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = { - counter = counter + 1 - this.page = page - pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/" - val dot = generateDot(diagram) - val result = generateSVG(dot, template) - // clean things up a bit, so we don't leave garbage on the heap - this.page = null - node2Index = null - incomingImplicitNodes = List() - result + private var graphId = 0 + + /** + * TODO: check this was ported from old code + * - adds a class attribute to the SVG element + * - changes the path of the node images from absolute to relative + * - assigns id and class attributes to nodes and edges + * - removes title elements + */ + def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage): HtmlTags.Elems = { + graphId = graphId + 1 + + // TODO: reconcile show/hide via display CSS attrib and rendering into the SVG (which only works when the element is visible so that its dimensions are known) -- see template.js for trigger('beforeShow') + List(Svg(id=s"graph$graphId", width= "800", height="600", `class` = if (diagram.isInstanceOf[InheritanceDiagram]) "class-diagram" else "package-diagram"), + Script(`type`="text/dot", id=s"dot$graphId", elems=Raw("\n" + generateDot(diagram)(page) + "\n")), + Script(elems= Txt( + s"""$$("#inheritance-diagram").bind('beforeShow', function() { + | if ($$("svg#graph$graphId").children().length == 0) { + | var dot = document.querySelector("#dot$graphId").text; + | var svg = d3.select("#graph$graphId"); + | var inner = svg.append("g"); + | + | // Set up zoom support + | var zoom = d3.zoom() + | .on("zoom", function() { + | inner.attr("transform", d3.event.transform); + | }); + | svg.call(zoom); + | + | var render = new dagreD3.render(); + | var g = graphlibDot.read(dot); + | render(inner, g); + | + | inner.selectAll("g.node").each(function(v) { + | // https://stackoverflow.com/questions/27381452/wrapping-existing-inline-svg-g-element-with-a-element + | var tgt = $$("#"+ g.node(v).id +" g.label text")[0]; + | var parent = tgt.parentNode; + | var a = document.createElementNS('http://www.w3.org/2000/svg', 'a'); + | a.setAttributeNS('http://www.w3.org/1999/xlink', 'xlink:href', g.node(v).URL); + | a.appendChild(tgt); + | parent.appendChild(a); + | }); + | } + |})""".stripMargin))) } /** * Generates a dot string for a given diagram. */ - private def generateDot(d: Diagram) = { - // inheritance nodes (all nodes except thisNode and implicit nodes) - var nodes: List[Node] = null - // inheritance edges (all edges except implicit edges) - var edges: List[(Node, List[Node])] = null - - // timing + private def generateDot(d: Diagram, preamble: String = "")(implicit page: HtmlPage): String = { var tDot = -System.currentTimeMillis - // variables specific to class diagrams: - // current node of a class diagram - var thisNode:Node = null - var subClasses = List[Node]() - var superClasses = List[Node]() - var incomingImplicits = List[Node]() - var outgoingImplicits = List[Node]() - isInheritanceDiagram = false - - d match { - case InheritanceDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) => - - def textTypeEntity(text: String) = - new TypeEntity { - val name = text - def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap() - } - + // yuck + val ( + thisNode, + nodes, // inheritance nodes (all nodes except thisNode and implicit nodes) + edges, // inheritance edges (all edges except implicit edges) : List[(Node, List[Node])] = null + node2Index, // maps nodes to unique indices : Map[Node, Int] = null + superClasses, + subClasses, + incomingImplicits, // incoming implicit nodes (needed for determining the CSS class of a node) + outgoingImplicits) + = d match { + case InheritanceDiagram(thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) => // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string // conservatively, we'll limit at 4000, to be sure: def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str + def tooltip(nodes: List[Node]) = limitSize(nodes.map(_.tpe.name).mkString(", ")) + + def counted(nodes: List[Node]) = new TypeEntity { + val name = "" + nodes.length + MultiSuffix; + def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap() + } // avoid overcrowding the diagram: // if there are too many super / sub / implicit nodes, represent // them by on node with a corresponding tooltip - superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) { - val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", "))) - List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None)(superClassesTooltip)) - } else _superClasses - - subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) { - val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", "))) - List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None)(subClassesTooltip)) - } else _subClasses - - incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) { - val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", "))) - List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None)(incomingImplicitsTooltip)) - } else _incomingImplicits - - outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) { - val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", "))) - List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None)(outgoingImplicitsTooltip)) - } else _outgoingImplicits - - thisNode = _thisNode - nodes = List() - edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode)) - node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap - isInheritanceDiagram = true - incomingImplicitNodes = incomingImplicits + + val outgoingImplicits = + if (_outgoingImplicits.lengthIs > settings.docDiagramsMaxImplicitClasses.value) { + List(ImplicitNode(counted(_outgoingImplicits), None)(Some(tooltip(_outgoingImplicits)))) + } else _outgoingImplicits + + val incomingImplicits = + if (_incomingImplicits.lengthIs > settings.docDiagramsMaxImplicitClasses.value) { + List(ImplicitNode(counted(_incomingImplicits), None)(Some(tooltip(_incomingImplicits)))) + } else _incomingImplicits + + val subClasses = + if (_subClasses.lengthIs > settings.docDiagramsMaxNormalClasses.value) { + List(NormalNode(counted(_subClasses), None)(Some(tooltip(_subClasses)))) + } else _subClasses + + val superClasses = + if (_superClasses.lengthIs > settings.docDiagramsMaxNormalClasses.value) { + List(NormalNode(counted(_superClasses), None)(Some(tooltip(_superClasses)))) + } else _superClasses + + (thisNode, + Nil, + (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode)), + (thisNode :: subClasses ::: superClasses ::: incomingImplicits ::: outgoingImplicits).zipWithIndex.toMap, + superClasses, + subClasses, + incomingImplicits, + outgoingImplicits) + case _ => - nodes = d.nodes - edges = d.edges - node2Index = d.nodes.zipWithIndex.toMap - incomingImplicitNodes = List() + (null, d.nodes, d.edges, d.nodes.zipWithIndex.toMap, Nil, Nil, Nil, Nil) } - val implicitsDot = { - if (!isInheritanceDiagram) "" + + /** + * Generates the dot string of a given node. + */ + def node2Dot(node: Node)(implicit page: HtmlPage) = { + val nodeIdx = node2Index(node) + + val baseClass = + if (node.isClassNode) " class" + else if (node.isTraitNode) " trait" + else if (node.isObjectNode) " object" + else if (node.isTypeNode) " type" + + val cls = + if (node.isImplicitNode && incomingImplicits.contains(node)) "implicit-incoming" + baseClass + else if (node.isImplicitNode) "implicit-outgoing" + baseClass + else if (node.isThisNode) "this" + baseClass + else if (node.isOutsideNode) "outside" + baseClass + else "default" + + val attr = + Map("label" -> node.name, + "id" -> s"graph${graphId}_$nodeIdx", + "class" -> cls) ++ + node.doctpl.toList.map { tpl => "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram-container") } ++ + (node.tooltip orElse node.tpl.map(_.qualifiedName)).toList.map { "tooltip" -> _ } + + s"node$nodeIdx ${nodeAttrString(attr)} ;" + } + + val implicitsDot = + if (!d.isInstanceOf[InheritanceDiagram]) "" else { // dot cluster containing thisNode - val thisCluster = "subgraph clusterThis {\n" + - "style=\"invis\"\n" + - node2Dot(thisNode) + - "}" + val thisCluster = + s"""subgraph clusterThis { + | style="invis" + | ${node2Dot(thisNode)} + |}""".stripMargin + // dot cluster containing incoming implicit nodes, if any - val incomingCluster = { - if(incomingImplicits.isEmpty) "" - else "subgraph clusterIncoming {\n" + - "style=\"invis\"\n" + - incomingImplicits.reverse.map(n => node2Dot(n)).mkString + - (if (incomingImplicits.size > 1) - incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") + - " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n" - else "") + - "}" - } - // dot cluster containing outgoing implicit nodes, if any - val outgoingCluster = { - if(outgoingImplicits.isEmpty) "" - else "subgraph clusterOutgoing {\n" + - "style=\"invis\"\n" + - outgoingImplicits.reverse.map(n => node2Dot(n)).mkString + - (if (outgoingImplicits.size > 1) - outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") + - " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n" - else "") + - "}" - } + def implicitCluster(impls: List[Node], clusterName: String) = + if (impls.isEmpty) "" + else { + val str = + if (impls.lengthCompare(1) <= 0) "" + else impls.map(n => "node" + node2Index(n)).mkString(" -> ") + + nodeAttrString(Map("constraint" -> "false", "style" -> "invis", "minlen" -> "0.0")) + ";\n" + + s"""subgraph $clusterName { + | style = "invis" + | ${impls.reverse.map(n => node2Dot(n)).mkString} + | $str + }""".stripMargin + } // assemble clusters into another cluster val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name - val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ") - "subgraph clusterAll {\n" + - "style=\"invis\"\n" + - outgoingCluster + "\n" + - thisCluster + "\n" + - incomingCluster + "\n" + - // incoming implicit edge - (if (incomingImplicits.nonEmpty) { - val n = incomingImplicits.last - "node" + node2Index(n) +" -> node" + node2Index(thisNode) + - " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" + - ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n" - } else "") + - // outgoing implicit edge - (if (outgoingImplicits.nonEmpty) { - val n = outgoingImplicits.head - "node" + node2Index(thisNode) + " -> node" + node2Index(n) + - " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" + - ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n" - } else "") + - "}" + val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ") + + def implicitEdge(from: Node, to: Node, tailLabel: String, headLabel: String, tooltip: String) = { + "node" + node2Index(from) + " -> node" + node2Index(to) + + nodeAttrString(Map( + "class" -> edgeClass(from, to), + "id" -> ("" + node2Index(from) + "_" + node2Index(to)), + "tooltip" -> tooltip, + "constraint" -> "false", + "minlen" -> "2", + "ltail" -> tailLabel, + "lhead" -> headLabel, + "label" -> "implicitly")) + } + + val incomingImplicitEdges = + if (incomingImplicits.isEmpty) "" + else implicitEdge(incomingImplicits.last, thisNode, "clusterIncoming", "clusterThis", incomingTooltip) + + val outgoingImplicitEdges = + if (outgoingImplicits.isEmpty) "" + else implicitEdge(thisNode, outgoingImplicits.head, "clusterThis", "clusterOutgoing", outgoingTooltip) + + s"""subgraph clusterAll { + | style = "invis" + | ${implicitCluster(outgoingImplicits, "clusterOutgoing")} + | $thisCluster + | ${implicitCluster(incomingImplicits, "clusterIncoming")} + | $incomingImplicitEdges + | $outgoingImplicitEdges + |}""".stripMargin } - } - // assemble graph - val graph = "digraph G {\n" + - // graph / node / edge attributes - graphAttributesStr + - "node [" + nodeAttributesStr + "];\n" + - "edge [" + edgeAttributesStr + "];\n" + - implicitsDot + "\n" + - // inheritance nodes - nodes.map(n => node2Dot(n)).mkString + - subClasses.map(n => node2Dot(n)).mkString + - superClasses.map(n => node2Dot(n)).mkString + - // inheritance edges - edges.map{ case (from, tos) => tos.map(to => { - val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from) + val edgesStr = edges.map { case (from, tos) => + tos.map { to => + val id = "graph" + graphId + "_" + node2Index(to) + "_" + node2Index(from) // the X -> Y edge is inverted twice to keep the diagram flowing the right way // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"] - "node" + node2Index(to) + " -> node" + node2Index(from) + - " [id=\"" + cssClass(to, from) + "|" + id + "\", " + - "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") + - to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n" - }).mkString}.mkString + - "}" + val attrs = Map("class" -> edgeClass(to, from), + "id" -> id, + "tooltip" -> s"${from.name} ${if (from.name.endsWith(MultiSuffix)) "are subtypes" else "is a subtype"} of ${to.name}", + "dir" -> "back", + "arrowtail" -> "empty") + + s"""node${node2Index(to)} -> node${node2Index(from)} ${nodeAttrString(attrs)} ;""" + }.mkString + }.mkString("\n ") + tDot += System.currentTimeMillis DiagramStats.addDotGenerationTime(tDot) - graph - } + def nodeStrings(nodes: List[Node]) = nodes.map(node2Dot).mkString("\n ") - /** - * Generates the dot string of a given node. - */ - private def node2Dot(node: Node) = { + s"""digraph G { + | $preamble + | $implicitsDot + | ${nodeStrings(nodes)} + | ${nodeStrings(subClasses)} + | ${nodeStrings(superClasses)} + | $edgesStr + |}""".stripMargin + } + private def nodeAttrString(attributes: Map[String, String]) = { // escape HTML characters in node names def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">") - - // assemble node attributes in a map - val attr = scala.collection.mutable.Map[String, String]() - - // link - node.doctpl match { - case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram") - case _ => - } - - // tooltip - node.tooltip match { - case Some(text) => attr += "tooltip" -> text - // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A]) - case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName - case _ => - } - - // styles - if(node.isImplicitNode) - attr ++= implicitStyle - else if(node.isOutsideNode) - attr ++= outsideStyle - else if(node.isTraitNode) - attr ++= traitStyle - else if(node.isClassNode) - attr ++= classStyle - else if(node.isObjectNode) - attr ++= objectStyle - else if(node.isTypeNode) - attr ++= typeStyle - else - attr ++= defaultStyle - - // HTML label - var name = escape(node.name) - var img = - if(node.isTraitNode) "trait_diagram.png" - else if(node.isClassNode) "class_diagram.png" - else if(node.isObjectNode) "object_diagram.png" - else if(node.isTypeNode) "type_diagram.png" - else "" - - if(!img.equals("")) { - img = "" - name = name + " " - } - val label = "<" + - "" + img + "" + - "
    " + name + "
    >" - - // dot does not allow to specify a CSS class, therefore - // set the id to "{class}|{id}", which will be used in - // the transform method - val id = "graph" + counter + "_" + node2Index(node) - attr += ("id" -> (cssClass(node) + "|" + id)) - - // return dot string - "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n" + attributes.map{ case (key, value) => s"""$key="${escape(value)}"""" }.mkString("[", ", ", "]") } - /** - * Returns the CSS class for an edge connecting node1 and node2. - */ - private def cssClass(node1: Node, node2: Node): String = { - if (node1.isImplicitNode && node2.isThisNode) - "implicit-incoming" - else if (node1.isThisNode && node2.isImplicitNode) - "implicit-outgoing" - else - "inheritance" - } - - /** - * Returns the CSS class for a node. - */ - private def cssClass(node: Node): String = - if (node.isImplicitNode && incomingImplicitNodes.contains(node)) - "implicit-incoming" + cssBaseClass(node, "", " ") - else if (node.isImplicitNode) - "implicit-outgoing" + cssBaseClass(node, "", " ") - else if (node.isThisNode) - "this" + cssBaseClass(node, "", " ") - else if (node.isOutsideNode) - "outside" + cssBaseClass(node, "", " ") - else - cssBaseClass(node, "default", "") - - private def cssBaseClass(node: Node, default: String, space: String) = - if (node.isClassNode) - space + "class" - else if (node.isTraitNode) - space + "trait" - else if (node.isObjectNode) - space + "object" - else if (node.isTypeNode) - space + "type" - else - default - - /** - * Calls dot with a given dot string and returns the SVG output. - */ - private def generateSVG(dotInput: String, template: DocTemplateEntity) = { - val dotOutput = dotRunner.feedToDot(dotInput, template) - var tSVG = -System.currentTimeMillis - - val result = if (dotOutput != null) { - val src = scala.io.Source.fromString(dotOutput) - try { - val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, preserveWS = false) - val doc = cpa.document() - if (doc != null) - transform(doc.docElem) - else - NodeSeq.Empty - } catch { - case exc: Exception => - if (settings.docDiagramsDebug) { - settings.printMsg("\n\n**********************************************************************") - settings.printMsg("Encountered an error while generating page for " + template.qualifiedName) - settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t","")) - settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t","")) - settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ","")) - settings.printMsg("\n\n**********************************************************************") - } else { - settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.") - settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.") - } - NodeSeq.Empty - } - } else - NodeSeq.Empty - - tSVG += System.currentTimeMillis - DiagramStats.addSvgTime(tSVG) - - result - } /** - * Transforms the SVG generated by dot: - * - adds a class attribute to the SVG element - * - changes the path of the node images from absolute to relative - * - assigns id and class attributes to nodes and edges - * - removes title elements + * Returns the CSS class for an edge connecting node1 and node2. */ - private def transform(e:scala.xml.Node): scala.xml.Node = e match { - // add an id and class attribute to the SVG element - case Elem(prefix, "svg", attribs, scope, child @ _*) => { - val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram" - Elem(prefix, "svg", attribs, scope, true, child map(x => transform(x)) : _*) % - new UnprefixedAttribute("id", "graph" + counter, Null) % - new UnprefixedAttribute("class", klass, Null) - } - // change the path of the node images from absolute to relative - case img @ => { - val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString - val file = href.substring(href.lastIndexOf("/") + 1, href.size) - img.asInstanceOf[Elem] % - new PrefixedAttribute("xlink", "href", pathToLib + file, Null) - } - // assign id and class attributes to edges and nodes: - // the id attribute generated by dot has the format: "{class}|{id}" - case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => { - var res = new Elem(prefix, "g", attribs, scope, true, (children map(x => transform(x))): _*) - val dotId = (g \ "@id").toString - if (dotId.count(_ == '|') == 1) { - val Array(klass, id) = dotId.toString.split("\\|") - /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple - * tests like execute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image - * back in the node */ - val kind = getKind(klass) - if (kind != "") - if (((g \ "a" \ "image").isEmpty)) { - DiagramStats.addBrokenImage() - val xposition = getPosition(g, "x", -22) - val yposition = getPosition(g, "y", -11.3334) - if (xposition.isDefined && yposition.isDefined) { - val imageNode = - val anchorNode = (g \ "a") match { - case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) => - transform(new Elem(prefix, "a", attribs, scope, true, (children ++ imageNode): _*)) - case _ => - g \ "a" - } - res = new Elem(prefix, "g", attribs, scope, true, anchorNode: _*) - DiagramStats.addFixedImage() - } - } - res % new UnprefixedAttribute("id", id, Null) % - new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null) - } - else res - } - // remove titles - case { _* } => - scala.xml.Text("") - // apply recursively - case Elem(prefix, label, attribs, scope, child @ _*) => - Elem(prefix, label, attribs, scope, true, child map(x => transform(x)) : _*) - case x => x - } - - def getKind(klass: String): String = - if (klass.contains("class")) "class" - else if (klass.contains("trait")) "trait" - else if (klass.contains("object")) "object" - else "" - - def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = { - val node = g \ "a" \ "text" \ ("@" + axis) - if (node.isEmpty) - None - else - Some(node.toString.toDouble + offset) - } - - /* graph / node / edge attributes */ - - private val graphAttributes: Map[String, String] = Map( - "compound" -> "true", - "rankdir" -> "TB" - ) - - private val nodeAttributes = Map( - "shape" -> "rect", - "style" -> "filled,rounded", - "penwidth" -> "1", - "margin" -> "0.08,0.01", - "width" -> "0.0", - "height" -> "0.0", - "fontname" -> "Source Code Pro", - "fontsize" -> "8.00" - ) - - private val edgeAttributes = Map( - "color" -> "#d4d4d4", - "arrowsize" -> "0.7", - "fontcolor" -> "#aaaaaa", - "fontsize" -> "9.00", - "fontname" -> "Source Code Pro" - ) - - private val defaultStyle = Map( - "color" -> "#ababab", - "fillcolor" -> "#e1e1e1", - "fontcolor" -> "#7d7d7d", - "margin" -> "0.1,0.04" - ) - - private val implicitStyle = Map( - "color" -> "#ababab", - "fillcolor" -> "#e1e1e1", - "fontcolor" -> "#7d7d7d" - ) - - private val outsideStyle = Map( - "color" -> "#ababab", - "fillcolor" -> "#e1e1e1", - "fontcolor" -> "#7d7d7d" - ) - - private val traitStyle = Map( - "color" -> "#2E6D82", - "fillcolor" -> "#2E6D82", - "fontcolor" -> "#ffffff" - ) - - private val classStyle = Map( - "color" -> "#418565", - "fillcolor" -> "#418565", - "fontcolor" -> "#ffffff" - ) - - private val objectStyle = Map( - "color" -> "#103A51", - "fillcolor" -> "#103A51", - "fontcolor" -> "#ffffff" - ) - - private val typeStyle = Map( - "color" -> "#2E6D82", - "fillcolor" -> "#2E6D82", - "fontcolor" -> "#ffffff" - ) - - private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ") - - private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString - private val nodeAttributesStr = flatten(nodeAttributes) - private val edgeAttributesStr = flatten(edgeAttributes) + private def edgeClass(node1: Node, node2: Node): String = + if (node1.isImplicitNode && node2.isThisNode) "implicit-incoming" + else if (node1.isThisNode && node2.isImplicitNode) "implicit-outgoing" + else "inheritance" + + + + // styles + // if(node.isImplicitNode) + // attr ++= implicitStyle + // else if(node.isOutsideNode) + // attr ++= outsideStyle + // else if(node.isTraitNode) + // attr ++= traitStyle + // else if(node.isClassNode) + // attr ++= classStyle + // else if(node.isObjectNode) + // attr ++= objectStyle + // else if(node.isTypeNode) + // attr ++= typeStyle + // else + // attr ++= defaultStyle + // var img = + // if(node.isTraitNode) "trait_diagram.png" + // else if(node.isClassNode) "class_diagram.png" + // else if(node.isObjectNode) "object_diagram.png" + // else if(node.isTypeNode) "type_diagram.png" + // else "" + // + // if(!img.equals("")) { + // img = "" + // name = name + " " + // } + // val label = "<" + + // "" + img + "" + + // "
    " + name + "
    >" + +// +// def getKind(klass: String): String = +// if (klass.contains("class")) "class" +// else if (klass.contains("trait")) "trait" +// else if (klass.contains("object")) "object" +// else "" + +// +// /* graph / node / edge attributes */ +// +// private val graphAttributes: Map[String, String] = Map( +// "compound" -> "true", +// "rankdir" -> "TB" +// ) +// +// private val nodeAttributes = Map( +// "shape" -> "rect", +// "style" -> "filled,rounded", +// "penwidth" -> "1", +// "margin" -> "0.08,0.01", +// "width" -> "0.0", +// "height" -> "0.0", +// "fontname" -> "Source Code Pro", +// "fontsize" -> "8.00" +// ) +// +// private val edgeAttributes = Map( +// "color" -> "#d4d4d4", +// "arrowsize" -> "0.7", +// "fontcolor" -> "#aaaaaa", +// "fontsize" -> "9.00", +// "fontname" -> "Source Code Pro" +// ) +// +// private val defaultStyle = Map( +// "color" -> "#ababab", +// "fillcolor" -> "#e1e1e1", +// "fontcolor" -> "#7d7d7d", +// "margin" -> "0.1,0.04" +// ) +// +// private val implicitStyle = Map( +// "color" -> "#ababab", +// "fillcolor" -> "#e1e1e1", +// "fontcolor" -> "#7d7d7d" +// ) +// +// private val outsideStyle = Map( +// "color" -> "#ababab", +// "fillcolor" -> "#e1e1e1", +// "fontcolor" -> "#7d7d7d" +// ) +// +// private val traitStyle = Map( +// "color" -> "#2E6D82", +// "fillcolor" -> "#2E6D82", +// "fontcolor" -> "#ffffff" +// ) +// +// private val classStyle = Map( +// "color" -> "#418565", +// "fillcolor" -> "#418565", +// "fontcolor" -> "#ffffff" +// ) +// +// private val objectStyle = Map( +// "color" -> "#103A51", +// "fillcolor" -> "#103A51", +// "fontcolor" -> "#ffffff" +// ) +// +// private val typeStyle = Map( +// "color" -> "#2E6D82", +// "fillcolor" -> "#2E6D82", +// "fontcolor" -> "#ffffff" +// ) + + +// +// private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString +// private val nodeAttributesStr = flatten(nodeAttributes) +// private val edgeAttributesStr = flatten(edgeAttributes) } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala deleted file mode 100644 index 735a54e5b4aa..000000000000 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package doc -package html -package page -package diagram - -import java.io.InputStream -import java.io.OutputStream -import java.io.InputStreamReader -import java.io.OutputStreamWriter -import java.io.BufferedWriter -import java.io.BufferedReader -import scala.sys.process._ -import scala.concurrent.SyncVar - -import model._ - -/** This class takes care of running the graphviz dot utility */ -class DotRunner(settings: doc.Settings) { - - private[this] var dotAttempts = 0 - private[this] var dotProcess: DotProcess = null - - def feedToDot(dotInput: String, template: DocTemplateEntity): String = { - - if (dotProcess == null) { - if (dotAttempts < settings.docDiagramsDotRestart.value + 1) { - if (dotAttempts > 0) - settings.printMsg("Graphviz will be restarted...\n") - dotAttempts += 1 - dotProcess = new DotProcess(settings) - } else - return null - } - - val tStart = System.currentTimeMillis - val result = dotProcess.feedToDot(dotInput, template.qualifiedName) - val tFinish = System.currentTimeMillis - DiagramStats.addDotRunningTime(tFinish - tStart) - - if (result == null) { - dotProcess.cleanup() - dotProcess = null - if (dotAttempts == 1 + settings.docDiagramsDotRestart.value) { - settings.printMsg("\n") - settings.printMsg("**********************************************************************") - settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool") - settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:") - settings.printMsg("") - val baseList = List(settings.docDiagramsDebug, - settings.docDiagramsDotPath, - settings.docDiagramsDotRestart, - settings.docDiagramsDotTimeout) - val width = (baseList map (_.helpSyntax.length)).max - def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + " " + s.helpDescription - baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett))) - settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.") - settings.printMsg("**********************************************************************\n\n") - - } - } - - result - } - - def cleanup() = - if (dotProcess != null) - dotProcess.cleanup() -} - -class DotProcess(settings: doc.Settings) { - - @volatile var error: Boolean = false // signal an error - val inputString = new SyncVar[String] // used for the dot process input - val outputString = new SyncVar[String] // used for the dot process output - val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging - - // set in only one place, in the main thread - var process: Process = null - var templateName: String = "" - var templateInput: String = "" - - def feedToDot(input: String, template: String): String = { - - templateName = template - templateInput = input - - try { - - // process creation - if (process == null) { - val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_)) - val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg") - process = processBuilder.run(procIO) - } - - // pass the input and wait for the output - assert(!inputString.isSet) - assert(!outputString.isSet) - inputString.put(input) - var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000L) - if (error) result = null - - result - - } catch { - case exc: Throwable => - errorBuffer.append(" Main thread in " + templateName + ": " + - (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc)) - error = true - return null - } - } - - def cleanup(): Unit = { - - // we'll need to know if there was any error for reporting - val _error = error - - if (process != null) { - // if there's no error, this should exit cleanly - if (!error) feedToDot("", "") - - // just in case there's any thread hanging, this will take it out of the loop - error = true - process.destroy() - // we'll need to unblock the input again - if (!inputString.isSet) inputString.put("") - if (outputString.isSet) outputString.take() - } - - if (_error) { - if (settings.docDiagramsDebug.value) { - settings.printMsg("\n**********************************************************************") - settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.") - settings.printMsg("\nThe following is the log of the failure:") - settings.printMsg(errorBuffer.toString) - settings.printMsg(" Cleanup: Last template: " + templateName) - settings.printMsg(" Cleanup: Last dot input: \n " + templateInput.replaceAll("\n","\n ") + "\n") - settings.printMsg(" Cleanup: Dot path: " + settings.docDiagramsDotPath.value) - if (process != null) - settings.printMsg(" Cleanup: Dot exit code: " + process.exitValue) - settings.printMsg("**********************************************************************") - } else { - // we shouldn't just sit there for 50s not reporting anything, no? - settings.printMsg("Graphviz dot encountered an error when generating the diagram for:") - settings.printMsg(templateName) - settings.printMsg("These are usually spurious errors, but if you notice a persistent error on") - settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.") - } - } - } - - /* The standard input passing function */ - private[this] def inputFn(stdin: OutputStream): Unit = { - val writer = new BufferedWriter(new OutputStreamWriter(stdin)) - try { - var input = inputString.take() - - while (!error) { - if (input == "") { - // empty => signal to finish - stdin.close() - return - } else { - // send output to dot - writer.write(input + "\n\n") - writer.flush() - } - - if (!error) input = inputString.take() - } - stdin.close() - } catch { - case exc: Throwable => - error = true - stdin.close() - errorBuffer.append(" Input thread in " + templateName + ": Exception: " + exc + "\n") - } - } - - private[this] def outputFn(stdOut: InputStream): Unit = { - val reader = new BufferedReader(new InputStreamReader(stdOut)) - val buffer: StringBuilder = new StringBuilder() - try { - var line = reader.readLine - while (!error && line != null) { - buffer.append(line + "\n") - // signal the last element in the svg (only for output) - if (line == "") { - outputString.put(buffer.toString) - buffer.setLength(0) - } - if (error) { stdOut.close(); return } - line = reader.readLine - } - assert(!outputString.isSet) - outputString.put(buffer.toString) - stdOut.close() - } catch { - case exc: Throwable => - error = true - stdOut.close() - errorBuffer.append(" Output thread in " + templateName + ": Exception: " + exc + "\n") - } - } - - private[this] def errorFn(stdErr: InputStream): Unit = { - val reader = new BufferedReader(new InputStreamReader(stdErr)) - try { - var line = reader.readLine - while (line != null) { - errorBuffer.append(" DOT : " + line + "\n") - error = true - line = reader.readLine - } - stdErr.close() - } catch { - case exc: Throwable => - error = true - stdErr.close() - errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n") - } - } -} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation.svg new file mode 100644 index 000000000000..66063fa3abf6 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation.svg @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @ + + + + + + + diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation_comp.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation_comp.svg new file mode 100644 index 000000000000..fe6dcd7b0ebf --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/annotation_comp.svg @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @ + + + + + + + + diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css index 08add0efa1ed..9dcafdb1fe61 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css @@ -30,7 +30,7 @@ } .diagram-container { - display: none; + display: block; } .diagram-container > span.toggle { @@ -39,13 +39,15 @@ .diagram { overflow: hidden; + display: none; padding-top:15px; } .diagram svg { display: block; - position: absolute; - visibility: hidden; + position: static; + visibility: visible; + z-index: auto; margin: auto; } @@ -73,6 +75,11 @@ text-decoration: none; } +#inheritance-diagram { + padding-bottom: 20px; +} + + #inheritance-diagram-container > span.toggle { z-index: 2; } @@ -136,68 +143,35 @@ svg text { font-size: 8.5px; } -/* try to move the node text 1px in order to be vertically - * centered (does not work in all browsers) - */ -svg .node text { - transform: translate(0px,1px); - -ms-transform: translate(0px,1px); - -webkit-transform: translate(0px,1px); - -o-transform: translate(0px,1px); - -moz-transform: translate(0px,1px); -} - -/* hover effect for edges */ - -svg .edge.over text, -svg .edge.implicit-incoming.over polygon, -svg .edge.implicit-outgoing.over polygon { - fill: #103A51; -} - -svg .edge.over path, -svg .edge.over polygon { - stroke: #103A51; -} -/* for hover effect on nodes in diagrams, edit the following */ -svg.class-diagram .node {} -svg.class-diagram .node.this {} -svg.class-diagram .node.over {} - -svg .node.over polygon { - stroke: #202020; +svg { + border: 1px solid #999; + overflow: hidden; } -/* hover effect for nodes in package diagrams */ - -svg.package-diagram .node.class.over polygon, -svg.class-diagram .node.this.class.over polygon { - fill: #098552; - fill: #04663e; +svg .node { + white-space: nowrap; } -svg.package-diagram .node.trait.over polygon, -svg.class-diagram .node.this.trait.over polygon { - fill: #3c7b9b; - fill: #235d7b; +svg .node rect, +svg .node circle, +svg .node ellipse { + stroke: #333; + fill: #fff; + stroke-width: 1.5px; } -svg.package-diagram .node.type.over polygon, -svg.class-diagram .node.this.type.over polygon { - fill: #098552; - fill: #04663e; +svg .cluster rect { + stroke: #333; + fill: #000; + fill-opacity: 0.1; + stroke-width: 1.5px; } - -svg.package-diagram .node.object.over polygon { - fill: #183377; +svg .edgePath path.path { + stroke: #333; + stroke-width: 1.5px; + fill: none; } -svg.package-diagram .node.outside.over polygon { - fill: #d4d4d4; -} -svg.package-diagram .node.default.over polygon { - fill: #d4d4d4; -} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js deleted file mode 100644 index b13732760a4b..000000000000 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js +++ /dev/null @@ -1,240 +0,0 @@ -/** - * JavaScript functions enhancing the SVG diagrams. - * - * @author Damien Obrist - */ - -var diagrams = {}; - -/** - * Initializes the diagrams in the main window. - */ -$(document).ready(function() -{ - // hide diagrams in browsers not supporting SVG - if(Modernizr && !Modernizr.inlinesvg) - return; - - if($("#content-diagram").length) - $("#inheritance-diagram").css("padding-bottom", "20px"); - - $(".diagram-container").css("display", "block"); - - $(".diagram").each(function() { - // store initial dimensions - $(this).data("width", $("svg", $(this)).width()); - $(this).data("height", $("svg", $(this)).height()); - // store unscaled clone of SVG element - $(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true)); - }); - - // make diagram visible, hide container - $(".diagram").css("display", "none"); - $(".diagram svg").css({ - "position": "static", - "visibility": "visible", - "z-index": "auto" - }); - - // enable linking to diagrams - if($(location).attr("hash") == "#inheritance-diagram") { - diagrams.toggle($("#inheritance-diagram-container"), true); - } else if($(location).attr("hash") == "#content-diagram") { - diagrams.toggle($("#content-diagram-container"), true); - } - - $(".diagram-link").click(function() { - diagrams.toggle($(this).parent()); - }); - - // register resize function - $(window).resize(diagrams.resize); - - // don't bubble event to parent div - // when clicking on a node of a resized - // diagram - $("svg a").click(function(e) { - e.stopPropagation(); - }); - - diagrams.initHighlighting(); - - $("button#diagram-fs").click(function() { - $(".diagram-container").toggleClass("full-screen"); - $(".diagram-container > div.diagram").css({ - height: $("svg").height() + "pt" - }); - - $panzoom.panzoom("reset", { animate: false, contain: false }); - }); -}); - -/** - * Initializes highlighting for nodes and edges. - */ -diagrams.initHighlighting = function() -{ - // helper function since $.hover doesn't work in IE - - function hover(elements, fn) - { - elements.mouseover(fn); - elements.mouseout(fn); - } - - // inheritance edges - - hover($("svg .edge.inheritance"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - var parts = $(this).attr("id").split("_"); - toggleClass($("#" + parts[0] + "_" + parts[1])); - toggleClass($("#" + parts[0] + "_" + parts[2])); - toggleClass($(this)); - }); - - // nodes - - hover($("svg .node"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - toggleClass($(this)); - var parts = $(this).attr("id").split("_"); - var index = parts[1]; - $("svg#" + parts[0] + " .edge.inheritance").each(function(){ - var parts2 = $(this).attr("id").split("_"); - if(parts2[1] == index) - { - toggleClass($("#" + parts2[0] + "_" + parts2[2])); - toggleClass($(this)); - } else if(parts2[2] == index) - { - toggleClass($("#" + parts2[0] + "_" + parts2[1])); - toggleClass($(this)); - } - }); - }); - - // incoming implicits - - hover($("svg .node.implicit-incoming"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - toggleClass($(this)); - toggleClass($("svg .edge.implicit-incoming")); - toggleClass($("svg .node.this")); - }); - - hover($("svg .edge.implicit-incoming"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - toggleClass($(this)); - toggleClass($("svg .node.this")); - $("svg .node.implicit-incoming").each(function(){ - toggleClass($(this)); - }); - }); - - // implicit outgoing nodes - - hover($("svg .node.implicit-outgoing"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - toggleClass($(this)); - toggleClass($("svg .edge.implicit-outgoing")); - toggleClass($("svg .node.this")); - }); - - hover($("svg .edge.implicit-outgoing"), function(evt){ - var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass; - toggleClass($(this)); - toggleClass($("svg .node.this")); - $("svg .node.implicit-outgoing").each(function(){ - toggleClass($(this)); - }); - }); -}; - -/** - * Resizes the diagrams according to the available width. - */ -diagrams.resize = function() { - // available width - var availableWidth = $(".diagram-container").width(); - - $(".diagram-container").each(function() { - // unregister click event on whole div - $(".diagram", this).unbind("click"); - var diagramWidth = $(".diagram", this).data("width"); - var diagramHeight = $(".diagram", this).data("height"); - - if (diagramWidth > availableWidth) { - // resize diagram - var height = diagramHeight / diagramWidth * availableWidth; - $(".diagram svg", this).width(availableWidth); - $(".diagram svg", this).height(height); - } else { - // restore full size of diagram - $(".diagram svg", this).width(diagramWidth); - $(".diagram svg", this).height(diagramHeight); - // don't show custom cursor any more - $(".diagram", this).removeClass("magnifying"); - } - }); -}; - -/** - * Shows or hides a diagram depending on its current state. - */ -diagrams.toggle = function(container, dontAnimate) -{ - // change class of link - $(".diagram-link", container).toggleClass("open"); - // get element to show / hide - var div = $(".diagram", container); - if (div.is(':visible')) { - $(".diagram-help", container).hide(); - div.unbind("click"); - div.slideUp(100); - - $("#diagram-controls", container).hide(); - $("#inheritance-diagram-container").unbind('mousewheel.focal'); - } else { - diagrams.resize(); - if(dontAnimate) - div.show(); - else - div.slideDown(100); - $(".diagram-help", container).show(); - - $("#diagram-controls", container).show(); - - $(".diagram-container").on('mousewheel.focal', function(e) { - e.preventDefault(); - var delta = e.delta || e.originalEvent.wheelDelta; - var zoomOut = delta ? delta < 0 : e.originalEvent.deltaY > 0; - $panzoom.panzoom('zoom', zoomOut, { - increment: 0.1, - animate: true, - focal: e - }); - }); - } -}; - -/** - * Helper method that adds a class to a SVG element. - */ -diagrams.addClass = function(svgElem, newClass) { - newClass = newClass || "over"; - var classes = svgElem.attr("class"); - if ($.inArray(newClass, classes.split(/\s+/)) == -1) { - classes += (classes ? ' ' : '') + newClass; - svgElem.attr("class", classes); - } -}; - -/** - * Helper method that removes a class from a SVG element. - */ -diagrams.removeClass = function(svgElem, oldClass) { - oldClass = oldClass || "over"; - var classes = svgElem.attr("class"); - classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' '); - svgElem.attr("class", classes); -}; diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css index 488bf3b8b56d..d4e8ec2dd271 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css @@ -457,7 +457,6 @@ div#search-progress > div#progress-fill { } div#content-scroll-container { - position: absolute; top: 0; right: 0; bottom: 0; @@ -480,6 +479,13 @@ div#content-container > div#content { margin: 4em auto 0; } +a.anchorToMember { + display: inline-block; + position: relative; + top: -5em; + width: 0; +} + div#content-container > div#subpackage-spacer { float: right; height: 100%; @@ -549,6 +555,11 @@ div#packages > ul > li > a.abstract.type { background-size: 0.9rem; } +div#packages > ul > li > a.annotation { + background: url("annotation.svg") no-repeat center; + background-size: 0.9rem; +} + div#packages > ul > li > a { text-decoration: none !important; margin-left: 1px; @@ -706,6 +717,12 @@ div#results-content > div#entity-results > ul.entities > li > .icon.class { background-size: 1em 1em; } +div#results-content > div#member-results > ul.entities > li > .icon.annotation, +div#results-content > div#entity-results > ul.entities > li > .icon.annotation { + background: url("annotation.svg") no-repeat center; + background-size: 1em 1em; +} + div#results-content > div#member-results > ul.entities > li > .icon.trait, div#results-content > div#entity-results > ul.entities > li > .icon.trait { background: url("trait.svg") no-repeat center; diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index 12f6ed6889bc..c3953901794a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -283,7 +283,7 @@ function compilePattern(query) { } } -/** Searches packages for entites matching the search query using a regex +/** Searches packages for entities matching the search query using a regex * * @param {[Object]} pack: package being searched * @param {RegExp} regExp: a regular expression for finding matching entities diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js deleted file mode 100644 index 03bfd60c5e4d..000000000000 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js +++ /dev/null @@ -1,8 +0,0 @@ -/*! - * jQuery Mousewheel 3.1.13 - * - * Copyright 2015 jQuery Foundation and other contributors - * Released under the MIT license. - * http://jquery.org/license - */ -!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):"object"==typeof exports?module.exports=a:a(jQuery)}(function(a){function b(b){var g=b||window.event,h=i.call(arguments,1),j=0,l=0,m=0,n=0,o=0,p=0;if(b=a.event.fix(g),b.type="mousewheel","detail"in g&&(m=-1*g.detail),"wheelDelta"in g&&(m=g.wheelDelta),"wheelDeltaY"in g&&(m=g.wheelDeltaY),"wheelDeltaX"in g&&(l=-1*g.wheelDeltaX),"axis"in g&&g.axis===g.HORIZONTAL_AXIS&&(l=-1*m,m=0),j=0===m?l:m,"deltaY"in g&&(m=-1*g.deltaY,j=m),"deltaX"in g&&(l=g.deltaX,0===m&&(j=-1*l)),0!==m||0!==l){if(1===g.deltaMode){var q=a.data(this,"mousewheel-line-height");j*=q,m*=q,l*=q}else if(2===g.deltaMode){var r=a.data(this,"mousewheel-page-height");j*=r,m*=r,l*=r}if(n=Math.max(Math.abs(m),Math.abs(l)),(!f||f>n)&&(f=n,d(g,n)&&(f/=40)),d(g,n)&&(j/=40,l/=40,m/=40),j=Math[j>=1?"floor":"ceil"](j/f),l=Math[l>=1?"floor":"ceil"](l/f),m=Math[m>=1?"floor":"ceil"](m/f),k.settings.normalizeOffset&&this.getBoundingClientRect){var s=this.getBoundingClientRect();o=b.clientX-s.left,p=b.clientY-s.top}return b.deltaX=l,b.deltaY=m,b.deltaFactor=f,b.offsetX=o,b.offsetY=p,b.deltaMode=0,h.unshift(b,j,l,m),e&&clearTimeout(e),e=setTimeout(c,200),(a.event.dispatch||a.event.handle).apply(this,h)}}function c(){f=null}function d(a,b){return k.settings.adjustOldDeltas&&"mousewheel"===a.type&&b%120===0}var e,f,g=["wheel","mousewheel","DOMMouseScroll","MozMousePixelScroll"],h="onwheel"in document||document.documentMode>=9?["wheel"]:["mousewheel","DomMouseScroll","MozMousePixelScroll"],i=Array.prototype.slice;if(a.event.fixHooks)for(var j=g.length;j;)a.event.fixHooks[g[--j]]=a.event.mouseHooks;var k=a.event.special.mousewheel={version:"3.1.12",setup:function(){if(this.addEventListener)for(var c=h.length;c;)this.addEventListener(h[--c],b,!1);else this.onmousewheel=b;a.data(this,"mousewheel-line-height",k.getLineHeight(this)),a.data(this,"mousewheel-page-height",k.getPageHeight(this))},teardown:function(){if(this.removeEventListener)for(var c=h.length;c;)this.removeEventListener(h[--c],b,!1);else this.onmousewheel=null;a.removeData(this,"mousewheel-line-height"),a.removeData(this,"mousewheel-page-height")},getLineHeight:function(b){var c=a(b),d=c["offsetParent"in a.fn?"offsetParent":"parent"]();return d.length||(d=a("body")),parseInt(d.css("fontSize"),10)||parseInt(c.css("fontSize"),10)||16},getPageHeight:function(b){return a(b).height()},settings:{adjustOldDeltas:!0,normalizeOffset:!0}};a.fn.extend({mousewheel:function(a){return a?this.bind("mousewheel",a):this.trigger("mousewheel")},unmousewheel:function(a){return this.unbind("mousewheel",a)}})}); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js deleted file mode 100644 index 3a52c5938136..000000000000 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/* jquery.panzoom.min.js 3.2.3 (c) Timmy Willison - MIT License */ -!function(a,b){"function"==typeof define&&define.amd?define(["jquery"],function(c){return b(a,c)}):"object"==typeof exports?b(a,require("jquery")):b(a,a.jQuery)}("undefined"!=typeof window?window:this,function(a,b){"use strict";function c(a,b){for(var c=a.length;--c;)if(Math.round(+a[c])!==Math.round(+b[c]))return!1;return!0}function d(a){var c={range:!0,animate:!0};return"boolean"==typeof a?c.animate=a:b.extend(c,a),c}function e(a,c,d,e,f,g,h,i,j){"array"===b.type(a)?this.elements=[+a[0],+a[2],+a[4],+a[1],+a[3],+a[5],0,0,1]:this.elements=[a,c,d,e,f,g,h||0,i||0,j||1]}function f(a,b,c){this.elements=[a,b,c]}function g(a,c){if(!(this instanceof g))return new g(a,c);1!==a.nodeType&&b.error("Panzoom called on non-Element node"),b.contains(h,a)||b.error("Panzoom element must be attached to the document");var d=b.data(a,i);if(d)return d;this.options=c=b.extend({},g.defaults,c),this.elem=a;var e=this.$elem=b(a);this.$set=c.$set&&c.$set.length?c.$set:e,this.$doc=b(a.ownerDocument||h),this.$parent=e.parent(),this.parent=this.$parent[0],this.isSVG=n.test(a.namespaceURI)&&"svg"!==a.nodeName.toLowerCase(),this.panning=!1,this._buildTransform(),this._transform=b.cssProps.transform?b.cssProps.transform.replace(m,"-$1").toLowerCase():"transform",this._buildTransition(),this.resetDimensions();var f=b(),j=this;b.each(["$zoomIn","$zoomOut","$zoomRange","$reset"],function(a,b){j[b]=c[b]||f}),this.enable(),this.scale=this.getMatrix()[0],this._checkPanWhenZoomed(),b.data(a,i,this)}var h=a.document,i="__pz__",j=Array.prototype.slice,k=/trident\/7./i,l=function(){if(k.test(navigator.userAgent))return!1;var a=h.createElement("input");return a.setAttribute("oninput","return"),"function"==typeof a.oninput}(),m=/([A-Z])/g,n=/^http:[\w\.\/]+svg$/,o="(\\-?\\d[\\d\\.e-]*)",p=new RegExp("^matrix\\("+o+"\\,?\\s*"+o+"\\,?\\s*"+o+"\\,?\\s*"+o+"\\,?\\s*"+o+"\\,?\\s*"+o+"\\)$");return e.prototype={x:function(a){var b=a instanceof f,c=this.elements,d=a.elements;return b&&3===d.length?new f(c[0]*d[0]+c[1]*d[1]+c[2]*d[2],c[3]*d[0]+c[4]*d[1]+c[5]*d[2],c[6]*d[0]+c[7]*d[1]+c[8]*d[2]):d.length===c.length&&new e(c[0]*d[0]+c[1]*d[3]+c[2]*d[6],c[0]*d[1]+c[1]*d[4]+c[2]*d[7],c[0]*d[2]+c[1]*d[5]+c[2]*d[8],c[3]*d[0]+c[4]*d[3]+c[5]*d[6],c[3]*d[1]+c[4]*d[4]+c[5]*d[7],c[3]*d[2]+c[4]*d[5]+c[5]*d[8],c[6]*d[0]+c[7]*d[3]+c[8]*d[6],c[6]*d[1]+c[7]*d[4]+c[8]*d[7],c[6]*d[2]+c[7]*d[5]+c[8]*d[8])},inverse:function(){var a=1/this.determinant(),b=this.elements;return new e(a*(b[8]*b[4]-b[7]*b[5]),a*-(b[8]*b[1]-b[7]*b[2]),a*(b[5]*b[1]-b[4]*b[2]),a*-(b[8]*b[3]-b[6]*b[5]),a*(b[8]*b[0]-b[6]*b[2]),a*-(b[5]*b[0]-b[3]*b[2]),a*(b[7]*b[3]-b[6]*b[4]),a*-(b[7]*b[0]-b[6]*b[1]),a*(b[4]*b[0]-b[3]*b[1]))},determinant:function(){var a=this.elements;return a[0]*(a[8]*a[4]-a[7]*a[5])-a[3]*(a[8]*a[1]-a[7]*a[2])+a[6]*(a[5]*a[1]-a[4]*a[2])}},f.prototype.e=e.prototype.e=function(a){return this.elements[a]},g.rmatrix=p,g.defaults={eventNamespace:".panzoom",transition:!0,cursor:"move",disablePan:!1,disableZoom:!1,disableXAxis:!1,disableYAxis:!1,which:1,increment:.3,linearZoom:!1,panOnlyWhenZoomed:!1,minScale:.3,maxScale:6,rangeStep:.05,duration:200,easing:"ease-in-out",contain:!1},g.prototype={constructor:g,instance:function(){return this},enable:function(){this._initStyle(),this._bind(),this.disabled=!1},disable:function(){this.disabled=!0,this._resetStyle(),this._unbind()},isDisabled:function(){return this.disabled},destroy:function(){this.disable(),b.removeData(this.elem,i)},resetDimensions:function(){this.container=this.parent.getBoundingClientRect();var a=this.elem,c=a.getBoundingClientRect(),d=Math.abs(this.scale);this.dimensions={width:c.width,height:c.height,left:b.css(a,"left",!0)||0,top:b.css(a,"top",!0)||0,border:{top:b.css(a,"borderTopWidth",!0)*d||0,bottom:b.css(a,"borderBottomWidth",!0)*d||0,left:b.css(a,"borderLeftWidth",!0)*d||0,right:b.css(a,"borderRightWidth",!0)*d||0},margin:{top:b.css(a,"marginTop",!0)*d||0,left:b.css(a,"marginLeft",!0)*d||0}}},reset:function(a){a=d(a);var b=this.setMatrix(this._origTransform,a);a.silent||this._trigger("reset",b)},resetZoom:function(a){a=d(a);var b=this.getMatrix(this._origTransform);a.dValue=b[3],this.zoom(b[0],a)},resetPan:function(a){var b=this.getMatrix(this._origTransform);this.pan(b[4],b[5],d(a))},setTransform:function(a){for(var c=this.$set,d=c.length;d--;)b.style(c[d],"transform",a),this.isSVG&&c[d].setAttribute("transform",a)},getTransform:function(a){var c=this.$set,d=c[0];return a?this.setTransform(a):(a=b.style(d,"transform"),!this.isSVG||a&&"none"!==a||(a=b.attr(d,"transform")||"none")),"none"===a||p.test(a)||this.setTransform(a=b.css(d,"transform")),a||"none"},getMatrix:function(a){var b=p.exec(a||this.getTransform());return b&&b.shift(),b||[1,0,0,1,0,0]},getScale:function(a){return Math.sqrt(Math.pow(a[0],2)+Math.pow(a[1],2))},setMatrix:function(a,c){if(!this.disabled){c||(c={}),"string"==typeof a&&(a=this.getMatrix(a));var d=this.getScale(a),e=void 0!==c.contain?c.contain:this.options.contain;if(e){var f=c.dims;f||(this.resetDimensions(),f=this.dimensions);var g,h,i,j=this.container,k=f.width,l=f.height,m=j.width,n=j.height,o=m/k,p=n/l;"center"!==this.$parent.css("textAlign")||"inline"!==b.css(this.elem,"display")?(i=(k-this.elem.offsetWidth)/2,g=i-f.border.left,h=k-m-i+f.border.right):g=h=(k-m)/2;var q=(l-n)/2+f.border.top,r=(l-n)/2-f.border.top-f.border.bottom;a[4]="invert"===e||"automatic"===e&&o<1.01?Math.max(Math.min(a[4],g-f.border.left),-h):Math.min(Math.max(a[4],g),-h),a[5]="invert"===e||"automatic"===e&&p<1.01?Math.max(Math.min(a[5],q-f.border.top),-r):Math.min(Math.max(a[5],q),-r)}if("skip"!==c.animate&&this.transition(!c.animate),c.range&&this.$zoomRange.val(d),this.options.disableXAxis||this.options.disableYAxis){var s=this.getMatrix();this.options.disableXAxis&&(a[4]=s[4]),this.options.disableYAxis&&(a[5]=s[5])}return this.setTransform("matrix("+a.join(",")+")"),this.scale=d,this._checkPanWhenZoomed(d),c.silent||this._trigger("change",a),a}},isPanning:function(){return this.panning},transition:function(a){if(this._transition)for(var c=a||!this.options.transition?"none":this._transition,d=this.$set,e=d.length;e--;)b.style(d[e],"transition")!==c&&b.style(d[e],"transition",c)},pan:function(a,b,c){if(!this.options.disablePan){c||(c={});var d=c.matrix;d||(d=this.getMatrix()),c.relative&&(a+=+d[4],b+=+d[5]),d[4]=a,d[5]=b,this.setMatrix(d,c),c.silent||this._trigger("pan",d[4],d[5])}},zoom:function(a,c){"object"==typeof a?(c=a,a=null):c||(c={});var d=b.extend({},this.options,c);if(!d.disableZoom){var g=!1,h=d.matrix||this.getMatrix(),i=new e(h),j=this.getScale(h);"number"!=typeof a?(a=d.linearZoom?1+d.increment*(a?-1:1)/j:a?1/(1+d.increment):1+d.increment,g=!0):a=1/j,a=Math.max(Math.min(a,d.maxScale/j),d.minScale/j);var k=i.x(new e(a,0,0,0,"number"==typeof d.dValue?d.dValue/j:a,0)),l=d.focal;if(l&&!d.disablePan){this.resetDimensions();var m=d.dims=this.dimensions,n=l.clientX,o=l.clientY;this.isSVG||(n-=m.width/j/2,o-=m.height/j/2);var p=new f(n,o,1),q=this.parentOffset||this.$parent.offset(),r=new e(1,0,q.left-this.$doc.scrollLeft(),0,1,q.top-this.$doc.scrollTop()),s=i.inverse().x(r.inverse().x(p));i=i.x(new e([a,0,0,a,0,0])),p=r.x(i.x(s)),h[4]=+h[4]+(n-p.e(0)),h[5]=+h[5]+(o-p.e(1))}h[0]=k.e(0),h[1]=k.e(3),h[2]=k.e(1),h[3]=k.e(4),this.setMatrix(h,{animate:void 0!==d.animate?d.animate:g,range:!d.noSetRange}),d.silent||this._trigger("zoom",a,d)}},option:function(a,c){var d;if(!a)return b.extend({},this.options);if("string"==typeof a){if(1===arguments.length)return void 0!==this.options[a]?this.options[a]:null;d={},d[a]=c}else d=a;this._setOptions(d)},_setOptions:function(a){b.each(a,b.proxy(function(a,c){switch(a){case"disablePan":this._resetStyle();case"$zoomIn":case"$zoomOut":case"$zoomRange":case"$reset":case"disableZoom":case"onStart":case"onChange":case"onZoom":case"onPan":case"onEnd":case"onReset":case"eventNamespace":this._unbind()}switch(this.options[a]=c,a){case"disablePan":this._initStyle();case"$zoomIn":case"$zoomOut":case"$zoomRange":case"$reset":this[a]=c;case"disableZoom":case"onStart":case"onChange":case"onZoom":case"onPan":case"onEnd":case"onReset":case"eventNamespace":this._bind();break;case"cursor":b.style(this.elem,"cursor",c);break;case"minScale":this.$zoomRange.attr("min",c);break;case"maxScale":this.$zoomRange.attr("max",c);break;case"rangeStep":this.$zoomRange.attr("step",c);break;case"startTransform":this._buildTransform();break;case"duration":case"easing":this._buildTransition();case"transition":this.transition();break;case"panOnlyWhenZoomed":this._checkPanWhenZoomed();break;case"$set":c instanceof b&&c.length&&(this.$set=c,this._initStyle(),this._buildTransform())}},this))},_checkPanWhenZoomed:function(a){var b=this.options;if(b.panOnlyWhenZoomed){a||(a=this.getMatrix()[0]);var c=a<=b.minScale;b.disablePan!==c&&this.option("disablePan",c)}},_initStyle:function(){var a={"transform-origin":this.isSVG?"0 0":"50% 50%"};this.options.disablePan||(a.cursor=this.options.cursor),this.$set.css(a);var c=this.$parent;c.length&&!b.nodeName(this.parent,"body")&&(a={overflow:"hidden"},"static"===c.css("position")&&(a.position="relative"),c.css(a))},_resetStyle:function(){this.$elem.css({cursor:"",transition:""}),this.$parent.css({overflow:"",position:""})},_bind:function(){var a=this,c=this.options,d=c.eventNamespace,e="mousedown"+d+" pointerdown"+d+" MSPointerDown"+d,f="touchstart"+d+" "+e,h="touchend"+d+" click"+d+" pointerup"+d+" MSPointerUp"+d,i={},j=this.$reset,k=this.$zoomRange;if(b.each(["Start","Change","Zoom","Pan","End","Reset"],function(){var a=c["on"+this];b.isFunction(a)&&(i["panzoom"+this.toLowerCase()+d]=a)}),c.disablePan&&c.disableZoom||(i[f]=function(b){var d;("touchstart"===b.type?!(d=b.touches||b.originalEvent.touches)||(1!==d.length||c.disablePan)&&2!==d.length:c.disablePan||(b.which||b.originalEvent.which)!==c.which)||(b.preventDefault(),b.stopPropagation(),a._startMove(b,d))},3===c.which&&(i.contextmenu=!1)),this.$elem.on(i),j.length&&j.on(h,function(b){b.preventDefault(),a.reset()}),k.length&&k.attr({step:c.rangeStep===g.defaults.rangeStep&&k.attr("step")||c.rangeStep,min:c.minScale,max:c.maxScale}).prop({value:this.getMatrix()[0]}),!c.disableZoom){var m=this.$zoomIn,n=this.$zoomOut;m.length&&n.length&&(m.on(h,function(b){b.preventDefault(),a.zoom()}),n.on(h,function(b){b.preventDefault(),a.zoom(!0)})),k.length&&(i={},i[e]=function(){a.transition(!0)},i[(l?"input":"change")+d]=function(){a.zoom(+this.value,{noSetRange:!0})},k.on(i))}},_unbind:function(){this.$elem.add(this.$zoomIn).add(this.$zoomOut).add(this.$reset).off(this.options.eventNamespace)},_buildTransform:function(){return this._origTransform=this.getTransform(this.options.startTransform)},_buildTransition:function(){if(this._transform){var a=this.options;this._transition=this._transform+" "+a.duration+"ms "+a.easing}},_getDistance:function(a){var b=a[0],c=a[1];return Math.sqrt(Math.pow(Math.abs(c.clientX-b.clientX),2)+Math.pow(Math.abs(c.clientY-b.clientY),2))},_getMiddle:function(a){var b=a[0],c=a[1];return{clientX:(c.clientX-b.clientX)/2+b.clientX,clientY:(c.clientY-b.clientY)/2+b.clientY}},_trigger:function(a){"string"==typeof a&&(a="panzoom"+a),this.$elem.triggerHandler(a,[this].concat(j.call(arguments,1)))},_startMove:function(a,d){if(!this.panning){var e,f,g,i,j,k,l,m,n=this,o=this.options,p=o.eventNamespace,q=this.getMatrix(),r=q.slice(0),s=+r[4],t=+r[5],u={matrix:q,animate:"skip"},v=a.type;"pointerdown"===v?(e="pointermove",f="pointerup"):"touchstart"===v?(e="touchmove",f="touchend"):"MSPointerDown"===v?(e="MSPointerMove",f="MSPointerUp"):(e="mousemove",f="mouseup"),e+=p,f+=p,this.transition(!0),this.panning=!0,this._trigger("start",a,d);var w=function(a,b){if(b){if(2===b.length){if(null!=g)return;return g=n._getDistance(b),i=n.getScale(q),void(j=n._getMiddle(b))}if(null!=k)return;(m=b[0])&&(k=m.pageX,l=m.pageY)}null==k&&(k=a.pageX,l=a.pageY)};w(a,d);var x=function(a){var b;if(a.preventDefault(),d=a.touches||a.originalEvent.touches,w(a,d),d){if(2===d.length){var c=n._getMiddle(d),e=n._getDistance(d)-g;return n.zoom(e*(o.increment/100)+i,{focal:c,matrix:q,animate:"skip"}),n.pan(+q[4]+c.clientX-j.clientX,+q[5]+c.clientY-j.clientY,u),void(j=c)}b=d[0]||{pageX:0,pageY:0}}b||(b=a),n.pan(s+b.pageX-k,t+b.pageY-l,u)};b(h).off(p).on(e,x).on(f,function(a){a.preventDefault(),b(this).off(p),n.panning=!1,a.type="panzoomend",n._trigger(a,q,!c(q,r))})}}},b.Panzoom=g,b.fn.panzoom=function(a){var c,d,e,f;return"string"==typeof a?(f=[],d=j.call(arguments,1),this.each(function(){c=b.data(this,i),c?"_"!==a.charAt(0)&&"function"==typeof(e=c[a])&&void 0!==(e=e.apply(c,d))&&f.push(e):f.push(void 0)}),f.length?1===f.length?f[0]:f:this):this.each(function(){new g(this,a)})},g}); diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js deleted file mode 100644 index 4688d633fe15..000000000000 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js +++ /dev/null @@ -1,4 +0,0 @@ -/* Modernizr 2.5.3 (Custom Build) | MIT & BSD - * Build: http://www.modernizr.com/download/#-inlinesvg - */ -;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_annotation.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_annotation.svg new file mode 100644 index 000000000000..bccf6caf73c7 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_annotation.svg @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + O + + + + + + + + diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/print.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/print.css new file mode 100644 index 000000000000..16ca6a7ca2cd --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/print.css @@ -0,0 +1,11 @@ +@media print { + * { + text-decoration: none; + font-family: "Lato", Arial, sans-serif; + border-width: 0px; + margin: 0px; + } + #textfilter, #package, #subpackage-spacer, #memberfilter, #filterby, div#definition .big-circle { + display: none !important; + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css index ae285a702398..c58d25dd0d94 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -244,6 +244,18 @@ span.symbol > a { background: url("package.svg") no-repeat center; } +.big-circle.annotation { + background: url("annotation.svg") no-repeat center; +} + +.big-circle.object-companion-annotation { + background: url("object_comp_annotation.svg") no-repeat center; +} + +.big-circle.annotation-companion-object { + background: url("annotation_comp.svg") no-repeat center; +} + body.abstract.type div.big-circle { background: url("abstract_type.svg") no-repeat center; } @@ -309,10 +321,6 @@ dl.attributes > dd { font-style: italic; } -#inheritedMembers > div.parent > h3 * { - color: white; -} - #inheritedMembers > div.conversion > h3 { height: 2em; padding: 1em; diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js index 89112cb02e3d..e8b44e9b6744 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -1,20 +1,7 @@ // © 2009–2010 EPFL/LAMP // code by Gilles Dubochet with contributions by Pedro Furlanetto, Marcin Kubala and Felix Mulder -var $panzoom = undefined; $(document).ready(function() { - // Add zoom functionality to type inheritance diagram - $panzoom = $(".diagram-container > .diagram").panzoom({ - increment: 0.1, - minScale: 1, - maxScale: 7, - transition: true, - duration: 200, - contain: 'invert', - easing: "ease-in-out", - $zoomIn: $('#diagram-zoom-in'), - $zoomOut: $('#diagram-zoom-out'), - }); var oldWidth = $("div#subpackage-spacer").width() + 1 + "px"; $("div#packages > ul > li.current").on("click", function() { @@ -24,8 +11,9 @@ $(document).ready(function() { var controls = { visibility: { - publicOnly: $("#visbl").find("> ol > li.public"), - all: $("#visbl").find("> ol > li.all") + publicFilter: $("#visbl").find("> ol > li.public"), + protectedFilter: $("#visbl").find("> ol > li.protected"), + privateFilter: $("#visbl").find("> ol > li.private") } }; @@ -34,21 +22,14 @@ $(document).ready(function() { return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=<>\|])/g, '\\$1'); } - function toggleVisibilityFilter(ctrlToEnable, ctrToDisable) { - if (ctrlToEnable.hasClass("out")) { - ctrlToEnable.removeClass("out").addClass("in"); - ctrToDisable.removeClass("in").addClass("out"); - filter(); - } + function toggleVisibilityFilter() { + $(this).toggleClass("in").toggleClass("out"); + filter(); } - controls.visibility.publicOnly.on("click", function() { - toggleVisibilityFilter(controls.visibility.publicOnly, controls.visibility.all); - }); - - controls.visibility.all.on("click", function() { - toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly); - }); + controls.visibility.publicFilter.on("click", toggleVisibilityFilter); + controls.visibility.protectedFilter.on("click", toggleVisibilityFilter); + controls.visibility.privateFilter.on("click", toggleVisibilityFilter); function exposeMember(jqElem) { var jqElemParent = jqElem.parent(), @@ -57,7 +38,7 @@ $(document).ready(function() { // switch visibility filter if necessary if (jqElemParent.attr("visbl") == "prt") { - toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly); + controls.visibility.privateFilter.removeClass("out").addClass("in"); } // toggle appropriate ancestor filter buttons @@ -80,7 +61,7 @@ $(document).ready(function() { return $(elem).attr("data-hidden") == 'true'; }; - $("#linearization li:gt(0)").filter(function(){ + $("#linearization li").slice(1).filter(function(){ return isHiddenClass($(this).attr("name")); }).removeClass("in").addClass("out"); @@ -270,18 +251,17 @@ $(document).ready(function() { if (!isMobile()) content.slideUp(100); else content.hide(); } else { + // TODO: is there a cleaner way to render the svg only once it's visible? + setTimeout(function() {content.trigger('beforeShow');}, 100); if (!isMobile()) content.slideDown(100); else content.show(); } }; - $(".toggleContainer:not(.diagram-container):not(.full-signature-block)").on("click", function() { - toggleShowContentFct($(this)); - }); - - $(".toggleContainer.full-signature-block").on("click", function() { - toggleShowContentFct($(this)); - return false; + $(".toggle").on("click", function() { + toggleShowContentFct($(this).parent()); + // Stop propagation so that we don't hide/show the parent (this a use case's full sig, which is nested in a member list) + if ($(this).parent().hasClass("full-signature-block")) return false; }); if ($("#order > ol > li.group").length == 1) { orderGroup(); }; @@ -294,9 +274,11 @@ $(document).ready(function() { // highlight and jump to selected member if an anchor is provided if (window.location.hash) { - var jqElem = findElementByHash(window.location.hash); - if (jqElem.length > 0) - exposeMember(jqElem); + var jqElem = findElementByHash(decodeURIComponent(window.location.hash)); + if (jqElem.length > 0) { + if (jqElem.hasClass("toggleContainer")) toggleShowContentFct(jqElem); + else exposeMember(jqElem); + } } $("#template span.permalink").on("click", function(e) { @@ -373,7 +355,7 @@ function initInherit() { groupParents[$(this).attr("name")] = $(this); }); - $("#types > ol > li").each(function(){ + $("#types > ol > li").add("#deprecatedTypes > ol > li").each(function(){ var mbr = $(this); this.mbrText = mbr.find("> .fullcomment .cmt").text(); var qualName = mbr.attr("name"); @@ -450,11 +432,15 @@ function filter() { var query = $.trim($("#memberfilter input").val()).toLowerCase(); query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|"); var queryRegExp = new RegExp(query, "i"); - var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in"); + + var publicMembersShown = $("#visbl > ol > li.public").hasClass("in"); + var protectedMembersShown = $("#visbl > ol > li.protected").hasClass("in"); + var privateMembersShown = $("#visbl > ol > li.private").hasClass("in"); + var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in"); var orderingInheritance = $("#order > ol > li.inherit").hasClass("in"); var orderingGroups = $("#order > ol > li.group").hasClass("in"); - var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out"); + var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li").slice(1) : $("#linearization > li.out"); var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() { return $(this).attr("name"); }).get(); @@ -500,7 +486,16 @@ function filter() { var members = $(this); members.find("> ol > li").each(function() { var mbr = $(this); - if (privateMembersHidden && mbr.attr("visbl") == "prt") { + var visibility = mbr.attr("visbl"); + if (!publicMembersShown && visibility == "pub") { + mbr.hide(); + return; + } + if (!protectedMembersShown && visibility == "prt") { + mbr.hide(); + return; + } + if (!privateMembersShown && visibility == "prv") { mbr.hide(); return; } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala index 9e7b69c9773c..4979bde28288 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,7 @@ package model import base.comment._ -import scala.collection._ +import scala.collection.mutable import scala.reflect.internal.util.Position /** The comment parser transforms raw comment strings into `Comment` objects. diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index c7f5bfb990c3..6059a1d9bbeb 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,6 @@ package scala.tools.nsc package doc package model -import scala.collection._ import base.comment._ import diagram._ @@ -102,8 +101,8 @@ trait TemplateEntity extends Entity { /** Whether documentation is available for this template. */ def isDocTemplate: Boolean - /** Whether this template is a case class. */ - def isCaseClass: Boolean + /** Whether this template is a case class or a case object. */ + def isCase: Boolean /** The self-type of this template, if it differs from the template type. */ def selfType : Option[TypeEntity] @@ -348,6 +347,11 @@ trait Object extends MemberTemplateEntity { def kind = "object" } +/** An annotation template. Any class which extends `scala.annotation.Annotation` */ +trait AnnotationClass extends Class { + override def kind = "annotation" +} + /** A package template. A package is in the universe if it is declared as a package object, or if it * contains at least one template. */ trait Package extends DocTemplateEntity { @@ -537,7 +541,7 @@ trait ImplicitMemberShadowing { } /** A trait that encapsulates a constraint necessary for implicit conversion */ -trait Constraint +sealed trait Constraint /** A constraint involving a type parameter which must be in scope */ trait ImplicitInScopeConstraint extends Constraint { @@ -566,7 +570,7 @@ trait KnownTypeClassConstraint extends TypeClassConstraint { } /** A constraint involving a type parameter */ -trait TypeParamConstraint extends Constraint { +sealed trait TypeParamConstraint extends Constraint { /** The type parameter involved */ def typeParamName: String } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala index c648008b9977..7bafff6e9701 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,7 +15,8 @@ package tools.nsc package doc package model -import scala.collection._ +import scala.collection.immutable.{SortedMap, SortedSet} +import scala.collection.mutable object IndexModelFactory { @@ -23,27 +24,24 @@ object IndexModelFactory { lazy val (firstLetterIndex, hasDeprecatedMembers): (Map[Char, SymbolMap], Boolean) = { - object result extends mutable.HashMap[Char,SymbolMap] { - + object result { + val map = mutable.HashMap.empty[Char, SymbolMap] var deprecated = false /* symbol name ordering */ - implicit def orderingMap = math.Ordering.String + implicit def orderingMap: Ordering[String] = math.Ordering.String - def addMember(d: MemberEntity) = { + def addMember(d: MemberEntity): Unit = { val firstLetter = { val ch = d.name.head.toLower if(ch.isLetterOrDigit) ch else '_' } - val letter = this.get(firstLetter).getOrElse { - immutable.SortedMap[String, SortedSet[MemberEntity]]() - } - val members = letter.get(d.name).getOrElse { - SortedSet.empty[MemberEntity](Ordering.by { _.toString }) - } + d - if (!deprecated && members.find(_.deprecation.isDefined).isDefined) + val map = this.map + val letter = map.getOrElse(firstLetter, SortedMap.empty[String, SortedSet[MemberEntity]]) + val members = letter.getOrElse(d.name, SortedSet.empty[MemberEntity](Ordering.by { _.toString })) + d + if (!deprecated && members.exists(_.deprecation.isDefined)) deprecated = true - this(firstLetter) = letter + (d.name -> members) + map(firstLetter) = letter + (d.name -> members) } } @@ -61,7 +59,7 @@ object IndexModelFactory { gather(universe.rootPackage) - (result.toMap, result.deprecated) + (result.map.toMap, result.deprecated) } } } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala index fc1b7ac46f29..997844fe6338 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,10 @@ package scala.tools.nsc package doc package model +import java.nio.file.Paths + import base._ +import scala.tools.nsc.io.AbstractFile /** This trait extracts all required information for documentation from compilation units */ trait MemberLookup extends base.MemberLookupBase { @@ -57,28 +60,108 @@ trait MemberLookup extends base.MemberLookupBase { /* Get package object which has associatedFile ne null */ sym.info.member(newTermName("package")) else sym - def classpathEntryFor(s: Symbol): Option[String] = { - Option(s.associatedFile).flatMap(_.underlyingSource).map { src => - val path = src.canonicalPath - if(path.endsWith(".class")) { // Individual class file -> Classpath entry is root dir - var nesting = s.ownerChain.count(_.hasPackageFlag) - if(nesting > 0) { - val p = 0.until(nesting).foldLeft(src) { - case (null, _) => null - case (f, _) => f.container - } - if(p eq null) path else p.canonicalPath - } else path - } else path // JAR file (and fallback option) - } - } classpathEntryFor(sym1) flatMap { path => - settings.extUrlMapping get path map { url => { - LinkToExternalTpl(name, url, makeTemplate(sym)) + if (isJDK(sym1)) { + Some(LinkToExternalTpl(name, jdkUrl(path, sym1), makeTemplate(sym))) + } + else { + settings.extUrlMapping get path map { url => + LinkToExternalTpl(name, url, makeTemplate(sym)) } } } } + private def classpathEntryFor(s: Symbol): Option[String] = { + Option(s.associatedFile).flatMap(_.underlyingSource).map { src => + val path = src.canonicalPath + if(path.endsWith(".class")) { // Individual class file -> Classpath entry is root dir + val nesting = s.ownerChain.count(_.hasPackageFlag) + if(nesting > 0) { + val p = 0.until(nesting).foldLeft(src) { + case (null, _) => null + case (f, _) => f.container + } + if(p eq null) path else p.canonicalPath + } else path + } else path // JAR file (and fallback option) + } + } + + /** + * Check if this file is a child of the given directory string. Can only be used + * on directories that actually exist in the file system. + */ + def isChildOf(f: AbstractFile, dir: String): Boolean = { + val parent = Paths.get(dir).toAbsolutePath().toString + f.canonicalPath.startsWith(parent) + } + + private def isJDK(sym: Symbol) = + sym.associatedFile.underlyingSource.map(f => isChildOf(f, (sys.props("java.home")))).getOrElse(false) + + // ISSUE-12820 + import scala.util.Try + private lazy val classGetModule = Try { + val clazz = Class.forName("java.lang.Class") + clazz.getMethod("getModule") + }.toOption + private lazy val moduleGetName = Try { + val clazz = Class.forName("java.lang.Module") + clazz.getMethod("getName") + }.toOption + + def jdkUrl(path: String, sym: Symbol): String = { + if (path.endsWith(".jmod") && javaVersion >= 11) { + val tokens = path.split(java.io.File.separatorChar) + val module = tokens.last.stripSuffix(".jmod") + s"$jdkUrl/$module" + } else if (path.endsWith("ct.sym") && javaVersion >= 11) { + (for { + clazz <- Try(Class.forName(sym.javaClassName)).toOption + getModule <- classGetModule + module <- Try(getModule.invoke(clazz)).toOption + getModuleName <- moduleGetName + moduleName <- + Try(getModuleName.invoke(module)).toOption + .map(Option(_)) + .flatten + } yield { + s"$jdkUrl/$moduleName" + }).getOrElse(jdkUrl) + } + else { + jdkUrl + } + } + + def jdkUrl: String = { + if (settings.jdkApiDocBase.isDefault) + defaultJdkUrl + else + settings.jdkApiDocBase.value + } + + lazy val defaultJdkUrl = { + if (javaVersion < 11) { + s"https://docs.oracle.com/javase/$javaVersion/docs/api" + } + else { + s"https://docs.oracle.com/en/java/javase/$javaVersion/docs/api" + } + } + + lazy val javaVersion: Int = + global.settings.releaseValue + .getOrElse(scala.util.Properties.javaSpecVersion) + .split('.') + .take(2) + .map(_.toIntOption) match { + case Array(Some(1), Some(n)) => n // example: 1.8.0_242 + case Array(Some(n)) => n // example: 14 + case Array(Some(n), _) => n // example: 11.0.7 + case _ => 8 // shrug! + } + override def warnNoLink = !settings.docNoLinkWarnings.value } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 4a2d5bdb8fc9..fa0efbb1f381 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,14 +18,16 @@ package model import base.comment._ import diagram._ -import scala.collection._ -import scala.tools.nsc.doc.html.page.diagram.{DotRunner} -import scala.util.matching.Regex +import java.net.URI +import java.nio.file.Paths + +import scala.collection.mutable, mutable.ListBuffer +import scala.reflect.io._ import scala.reflect.macros.internal.macroImpl -import symtab.Flags +import scala.util.matching.Regex.quoteReplacement -import io._ -import model.{ RootPackage => RootPackageEntity } +import model.{RootPackage => RootPackageEntity} +import symtab.Flags /** This trait extracts all required information for documentation from compilation units */ class ModelFactory(val global: Global, val settings: doc.Settings) { @@ -38,7 +40,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { with MemberLookup => import global._ - import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass } + import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, AnnotationClass } import rootMirror.{ RootPackage, EmptyPackage } import ModelFactory._ @@ -53,7 +55,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory.universe = thisUniverse val settings = thisFactory.settings val rootPackage = modelCreation.createRootPackage - lazy val dotRunner = new DotRunner(settings) } _modelFinished = true // complete the links between model entities, everything that couldn't have been done before @@ -72,6 +73,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { case _ => false } + // This unsightly hack closes issue scala/bug#4086. + private lazy val modifiedSynchronized: Symbol = { + val sym = definitions.Object_synchronized + val info = (sym.info: @unchecked) match { + case PolyType(ts, MethodType(List(bp), mt)) => + val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info)) + PolyType(ts, MethodType(List(cp), mt)) + } + sym.cloneSymbol.setPos(sym.pos).setInfo(info) + } + def optimize(str: String): String = if (str.length < 16) str.intern else str @@ -88,7 +100,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def inTemplate: TemplateImpl = inTpl def toRoot: List[EntityImpl] = this :: inTpl.toRoot def qualifiedName = name - def annotations = sym.annotations.filterNot(_.tpe =:= typeOf[macroImpl]).map(makeAnnotation) + def annotations = sym.annotations.filterNot(_.atp =:= typeOf[macroImpl]).map(makeAnnotation) def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject def isType = sym.name.isTypeName } @@ -100,7 +112,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isTrait = sym.isTrait def isClass = sym.isClass && !sym.isTrait def isObject = sym.isModule && !sym.hasPackageFlag - def isCaseClass = sym.isCaseClass + def isCase = sym.isCase def isRootPackage = false def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) } @@ -113,7 +125,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // if there is a field symbol, the ValDef will use it, which means docs attached to it will be under the field symbol, not the getter's protected[this] def commentCarryingSymbol(sym: Symbol) = - if (sym.hasAccessorFlag && sym.accessed.exists) sym.accessed else sym + if (sym == modifiedSynchronized) definitions.Object_synchronized + else if (sym.hasAccessorFlag && sym.accessed.exists) sym.accessed + else sym lazy val comment = thisFactory.comment(commentCarryingSymbol(sym), linkTarget, inTpl) @@ -145,7 +159,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } } def flags = { - val fgs = mutable.ListBuffer.empty[Paragraph] + val fgs = ListBuffer.empty[Paragraph] if (sym.isImplicit) fgs += Paragraph(Text("implicit")) if (sym.isSealed) fgs += Paragraph(Text("sealed")) if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract")) @@ -211,7 +225,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { case d: MemberEntity with Def => val paramLists: List[String] = if (d.valueParams.isEmpty) Nil - else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")")) + else d.valueParams.map(ps => ps.map(_.resultType.name).mkString("(",",",")")) paramLists.mkString case _ => "" } @@ -284,7 +298,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { assert(!(docTemplatesCache isDefinedAt sym), sym) docTemplatesCache += (sym -> this) - if (settings.verbose) + if (settings.verbose.value) inform("Creating doc template for " + sym) override def linkTarget: DocTemplateImpl = this @@ -301,26 +315,25 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def sourceUrl = { - def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/") - val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/" if (!settings.docsourceurl.isDefault) - inSource map { case (file, line) => - val filePathExt = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "") - val (filePath, fileExt) = filePathExt.splitAt(filePathExt.indexOf(".", filePathExt.lastIndexOf("/"))) + inSource.map { case (file, line) => + val filePathExt = { + // file path is relative to source root (-sourcepath); use an absolute path otherwise + val sp = settings.sourcepath.value + val fileUri = file.file.toPath.toUri + if (sp.isEmpty) fileUri.getRawPath + else Paths.get(sp).toUri.relativize(fileUri).getRawPath + } + val (filePath, fileExt) = + filePathExt.lastIndexOf('.') match { + case -1 => (filePathExt, "") + case i => filePathExt.splitAt(i) + } val tplOwner = this.inTemplate.qualifiedName val tplName = this.name - val patches = new Regex("""€\{(FILE_PATH|FILE_EXT|FILE_PATH_EXT|FILE_LINE|TPL_OWNER|TPL_NAME)\}""") - def substitute(name: String): String = name match { - case "FILE_PATH" => filePath - case "FILE_EXT" => fileExt - case "FILE_PATH_EXT" => filePathExt - case "FILE_LINE" => line.toString - case "TPL_OWNER" => tplOwner - case "TPL_NAME" => tplName - } - val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) ) - new java.net.URL(patchedString) + val patchedString = expandUrl(settings.docsourceurl.value, filePath, fileExt, filePathExt, line, tplOwner, tplName) + new URI(patchedString).toURL } else None } @@ -332,7 +345,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { /* Subclass cache */ private lazy val subClassesCache = ( if (sym == AnyRefClass || sym == AnyClass) null - else mutable.ListBuffer[DocTemplateEntity]() + else ListBuffer[DocTemplateEntity]() ) def registerSubClass(sc: DocTemplateEntity): Unit = { if (subClassesCache != null) @@ -341,10 +354,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList /* Implicitly convertible class cache */ - private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null + private var implicitlyConvertibleClassesCache: ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = { if (implicitlyConvertibleClassesCache == null) - implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]() + implicitlyConvertibleClassesCache = ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]() implicitlyConvertibleClassesCache += ((dtpl, conv)) } @@ -354,10 +367,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else implicitlyConvertibleClassesCache.toList - // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added - // lazily, on completeModel - val conversions: List[ImplicitConversionImpl] = - if (settings.docImplicits) makeImplicitConversions(sym, this) else Nil + // the implicit conversions are generated lazily, on completeModel + lazy val conversions: List[ImplicitConversionImpl] = + if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil // members as given by the compiler lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList @@ -454,7 +466,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None override def valueParams = // we don't want params on a class (non case class) signature - if (isCaseClass) primaryConstructor match { + if (isCase) primaryConstructor match { case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this))) case None => List() } @@ -493,17 +505,20 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] = if (sym eq RootPackage) None else findTemplateMaybe(sym) - /* Variable precedence order for implicitly added members: Take the variable definitions from ... - * 1. the target of the implicit conversion - * 2. the definition template (owner) - * 3. the current template - */ - val inRealTpl = ( - conversion.flatMap(conv => nonRootTemplate(conv.toType.typeSymbol)) - orElse nonRootTemplate(sym.owner) - orElse Option(inTpl)) + val inRealTpl = conversion match { + case Some(conv) => + /* Variable precedence order for implicitly added members: Take the variable definitions from ... + * 1. the target of the implicit conversion + * 2. the definition template (owner) + * 3. the current template + */ + nonRootTemplate(conv.toType.typeSymbol).orElse(nonRootTemplate(sym.owner)).getOrElse(inTpl) + case None => + // This case handles members which were inherited but not implemented or overridden + inTpl + } - inRealTpl flatMap (tpl => thisFactory.comment(commentCarryingSymbol(sym), tpl, tpl)) + thisFactory.comment(commentCarryingSymbol(sym), inRealTpl, inRealTpl) } override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates) @@ -515,7 +530,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def isUseCase = useCaseOf.isDefined override def byConversion: Option[ImplicitConversionImpl] = conversion - override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } + override def isImplicitlyInherited = { assert(modelFinished, "cannot check if implicitly inherited before model is finished"); conversion.isDefined } override def isShadowedImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false) override def isAmbiguousImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false) override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit @@ -545,13 +560,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { private trait TypeBoundsImpl { def sym: Symbol def inTpl: TemplateImpl - def lo = sym.info.lowerBound match { - case lo if lo.typeSymbol != NothingClass => + def lo = sym.info.bounds match { + case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym)) case _ => None } - def hi = sym.info.upperBound match { - case hi if hi.typeSymbol != AnyClass => + def hi = sym.info.bounds match { + case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym)) case _ => None } @@ -569,14 +584,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { * package object abstraction and placing members directly in the package. * * Here's the explanation of what we do. The code: - * + * {{{ * package foo { * object `package` { * class Bar * } * } - * + * }}} * will yield this Symbol structure: + *
        *                                       +---------+ (2)
        *                                       |         |
        * +---------------+         +---------- v ------- | ---+                              +--------+ (2)
    @@ -589,6 +605,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
        *                                                                +------------------- | ---+   |
        *                                                                                     |        |
        *                                                                                     +--------+
    +   * 
    * (1) sourceModule * (2) you get out of owners with .owner * @@ -618,7 +635,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { case Some(root: PackageImpl) => root case _ => modelCreation.createTemplate(RootPackage, null) match { case Some(root: PackageImpl) => root - case _ => sys.error("Scaladoc: Unable to create root package!") + case _ => throw new IllegalStateException("Scaladoc: Unable to create root package!") } } @@ -652,10 +669,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { new DocTemplateImpl(bSym, inTpl) with Object {} else if (bSym.isTrait) new DocTemplateImpl(bSym, inTpl) with Trait {} + else if (bSym.isClass && bSym.asClass.baseClasses.contains(AnnotationClass)) + new DocTemplateImpl(bSym, inTpl) with model.AnnotationClass {} else if (bSym.isClass || bSym == AnyRefClass) new DocTemplateImpl(bSym, inTpl) with Class {} else - sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.") + throw new IllegalArgumentException(s"'$bSym' isn't a class, trait or object thus cannot be built as a documentable template.") } val bSym = normalizeTemplate(aSym) @@ -695,11 +714,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } else Some(pack) case _ => - sys.error("'" + bSym + "' must be in a package") + throw new IllegalArgumentException(s"'$bSym' must be in a package") } else { // no class inheritance at this point - assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl) + assert(inOriginalOwner(bSym, inTpl), s"$bSym in $inTpl") Some(createDocTemplate(bSym, inTpl)) } } @@ -714,7 +733,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // Code is duplicate because the anonymous classes are created statically def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = { - assert(modelFinished) // only created AFTER the model is finished + assert(modelFinished, "cannot create NoDocMember template before model is finished") // only created AFTER the model is finished if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule)) new MemberTemplateImpl(bSym, inTpl) with Object {} else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait)) @@ -722,20 +741,21 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass)) new MemberTemplateImpl(bSym, inTpl) with Class {} else - sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.") + throw new IllegalArgumentException(s"'$bSym' isn't a class, trait or object thus cannot be built as a member template.") } - assert(modelFinished) + assert(modelFinished, "cannot create lazy template member before model is finished") val bSym = normalizeTemplate(aSym) if (docTemplatesCache isDefinedAt bSym) docTemplatesCache(bSym) else docTemplatesCache.get(bSym.owner) match { - case Some(inTpl) => - val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr }) - assert(mbrs.length == 1) - mbrs.head + case Some(docTpl) => + docTpl.members.collect { case mbr: MemberImpl if mbr.sym == bSym => mbr } match { + case h :: Nil => h + case _ => throw new AssertionError("must have exactly one member with bSym") + } case _ => // move the class completely to the new location createNoDocMemberTemplate(bSym, inTpl) @@ -756,17 +776,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override def isVar = true }) else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) { - val cSym = { // This unsightly hack closes issue #4086. - if (bSym == definitions.Object_synchronized) { - val cSymInfo = (bSym.info: @unchecked) match { - case PolyType(ts, MethodType(List(bp), mt)) => - val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info)) - PolyType(ts, MethodType(List(cp), mt)) - } - bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo) - } + val cSym: Symbol = + if (bSym == definitions.Object_synchronized) modifiedSynchronized else bSym - } Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def { override def isDef = true }) @@ -822,17 +834,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { - assert(modelFinished) + assert(modelFinished, "cannot try to find template before model is finished") docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_)) } def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None) def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = { - assert(modelFinished) + assert(modelFinished, "cannot make template before model is finished") def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = - noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl)) + noDocTemplatesCache.getOrElse(aSym, new NoDocTemplateImpl(aSym, inTpl)) findTemplateMaybe(aSym) getOrElse { val bSym = normalizeTemplate(aSym) @@ -918,21 +930,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } else None def resultType = - makeTypeInTemplateContext(aSym.tpe, inTpl, aSym) + makeTypeInTemplateContext(aSym.tpe, this.inTpl, aSym) def isImplicit = aSym.isImplicit } /** */ def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = { - def ownerTpl(sym: Symbol): Symbol = - if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) - val tpe = - if (thisFactory.settings.useStupidTypes) aType else { - def ownerTpl(sym: Symbol): Symbol = - if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) - val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym - aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym)) - } + val tpe = { + def ownerTpl(sym: Symbol): Symbol = + if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) + val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym + aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym)) + } makeType(tpe, inTpl) } @@ -953,14 +962,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { * - a NoDocTemplate if the type's symbol is not documented at all */ def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = { def noDocTemplate = makeTemplate(parent.typeSymbol) - findTemplateMaybe(parent.typeSymbol) match { - case Some(tpl) => tpl - case None => parent match { + findTemplateMaybe(parent.typeSymbol).getOrElse { + parent match { case TypeRef(pre, sym, args) => - findTemplateMaybe(pre.typeSymbol) match { - case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate) - case None => noDocTemplate - } + findTemplateMaybe(pre.typeSymbol) + .flatMap(findMember(parent.typeSymbol, _).collect { case t: TemplateImpl => t }) + .getOrElse(noDocTemplate) case _ => noDocTemplate } } @@ -1015,31 +1022,27 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { (!sym.isModule || sym.moduleClass.isInitialized) && // documenting only public and protected members localShouldDocument(sym) && - // Only this class's constructors are part of its members, inherited constructors are not. - (!sym.isConstructor || sym.owner == inTpl.sym) && - // If the @bridge annotation overrides a normal member, show it - !isPureBridge(sym) + // Only this class's constructors are part of its members; inherited constructors are not. + (!sym.isConstructor || sym.owner == inTpl.sym) } def isEmptyJavaObject(aSym: Symbol): Boolean = aSym.isModule && aSym.isJavaDefined && aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym)) - def localShouldDocument(aSym: Symbol): Boolean = - !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic - - /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See scala/bug#5373 for details */ - def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge) - - // the classes that are excluded from the index should also be excluded from the diagrams - def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName) + def localShouldDocument(aSym: Symbol): Boolean = { + // For `private[X]`, isPrivate is false (while for protected[X], isProtected is true) + def isPrivate = aSym.isPrivate || !aSym.isProtected && aSym.privateWithin != NoSymbol + // for private, only document if enabled in settings and not top-level + !aSym.isSynthetic && (!isPrivate || settings.visibilityPrivate.value && !aSym.isTopLevel) + } // the implicit conversions that are excluded from the pages should not appear in the diagram def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod) // whether or not to create a page for an {abstract,alias} type def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) = - (settings.docExpandAllTypes && (bSym.sourceFile != null)) || + (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) || (bSym.isAliasType || bSym.isAbstractType) && { val rawComment = global.expandedDocComment(bSym, inTpl.sym) rawComment.contains("@template") || rawComment.contains("@documentable") } @@ -1050,4 +1053,36 @@ object ModelFactory { val defaultGroupName = "Ungrouped" val defaultGroupDesc = None val defaultGroupPriority = 1000 -} \ No newline at end of file + + val tokens = raw"€\{($FILE_PATH|$FILE_EXT|$FILE_PATH_EXT|$FILE_LINE|$TPL_OWNER|$TPL_NAME)\}".r + final val FILE_PATH = "FILE_PATH" + final val FILE_EXT = "FILE_EXT" + final val FILE_PATH_EXT = "FILE_PATH_EXT" + final val FILE_LINE = "FILE_LINE" + final val TPL_OWNER = "TPL_OWNER" + final val TPL_NAME = "TPL_NAME" + + val WordChar = raw"(\w)".r + + def expandUrl(urlTemplate: String, filePath: String, fileExt: String, filePathExt: String, line: Int, tplOwner: String, tplName: String): String = { + val absolute = filePath.startsWith("/") + + def subst(token: String, index: Int): String = { + // If a relative path follows a word character, insert a `/` + def sep: String = + if (index > 0 && !absolute && WordChar.matches(urlTemplate.substring(index-1, index))) "/" + else "" + def dotted: Boolean = index > 0 && urlTemplate(index-1) == '.' + + token match { + case FILE_PATH => s"$sep$filePath" + case FILE_EXT => if (dotted) fileExt.stripPrefix(".") else fileExt + case FILE_PATH_EXT => s"$sep$filePathExt" + case FILE_LINE => line.toString + case TPL_OWNER => tplOwner + case TPL_NAME => tplName + } + } + tokens.replaceAllIn(urlTemplate, m => quoteReplacement(subst(m.group(1), m.start))) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 5a7bbb79ccb1..8dd03c2144ba 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc package doc package model -import scala.collection._ +import scala.annotation.nowarn import scala.tools.nsc.Reporting.WarningCategory /** @@ -31,10 +31,10 @@ import scala.tools.nsc.Reporting.WarningCategory * * class C extends B { * def bar = 2 - * class implicit + * class D * } * - * D def conv(a: A) = new C + * implicit def conv(a: A) = new C * } * }}} * @@ -103,10 +103,10 @@ trait ModelFactoryImplicitSupport { // also keep empty conversions, so they appear in diagrams // conversions = conversions.filter(!_.members.isEmpty) - val hiddenConversions: Seq[String] = thisFactory + val hiddenConversions: Set[String] = thisFactory .comment(sym, inTpl.linkTarget, inTpl) - .map(_.hideImplicitConversions) - .getOrElse(Nil) + .map(_.hideImplicitConversions.toSet) + .getOrElse(Set.empty) conversions = conversions filterNot { conv: ImplicitConversionImpl => hiddenConversions.contains(conv.conversionShortName) || @@ -164,7 +164,7 @@ trait ModelFactoryImplicitSupport { val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType) // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite - if (viewSimplifiedType.params.length != 1) { + if (viewSimplifiedType.params.lengthIs != 1) { // This is known to be caused by the `<%<` object in Predef: // {{{ // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable @@ -176,7 +176,7 @@ trait ModelFactoryImplicitSupport { return Nil } - if (!settings.docImplicitsShowAll && viewSimplifiedType.resultType.typeSymbol == sym) { + if (!settings.docImplicitsShowAll.value && viewSimplifiedType.resultType.typeSymbol == sym) { // If, when looking at views for a class A, we find one that returns A as well // (possibly with different type parameters), we ignore it. // It usually is a way to build a "whatever" into an A, but we already have an A, as in: @@ -203,6 +203,7 @@ trait ModelFactoryImplicitSupport { case global.analyzer.SilentTypeError(err) => context.warning(sym.pos, err.toString, WarningCategory.Scaladoc) return Nil + case x => throw new MatchError(x) } } @@ -265,7 +266,7 @@ trait ModelFactoryImplicitSupport { available match { case Some(true) => Nil - case Some(false) if !settings.docImplicitsShowAll => + case Some(false) if !settings.docImplicitsShowAll.value => // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String]) throw new ImplicitNotFound(implType) case _ => @@ -312,7 +313,7 @@ trait ModelFactoryImplicitSupport { (tparams zip constrs) flatMap { case (tparam, constr) => { uniteConstraints(constr) match { - case (loBounds, upBounds) => (loBounds filter (_ != NothingTpe), upBounds filter (_ != AnyTpe)) match { + case (loBounds, upBounds) => (loBounds filter (_ != NothingTpe), upBounds filter (up => up != AnyTpe && up != SingletonClass.tpe)) match { case (Nil, Nil) => Nil case (List(lo), List(up)) if (lo == up) => @@ -374,7 +375,7 @@ trait ModelFactoryImplicitSupport { convertorOwner match { case doc: DocTemplateImpl => val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m } - if (convertors.length == 1) + if (convertors.lengthIs == 1) convertor = convertors.head case _ => } @@ -436,11 +437,11 @@ trait ModelFactoryImplicitSupport { def makeShadowingTable(members: List[MemberImpl], convs: List[ImplicitConversionImpl], inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = { - assert(modelFinished) + assert(modelFinished, "cannot make shadowing table before model is finished") - val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]() + val shadowingTable = Map.newBuilder[MemberEntity, ImplicitMemberShadowing] val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name) - val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) { + val convsByMember = convs.foldLeft(Map.empty[MemberImpl, ImplicitConversionImpl]){ case (map, conv) => map ++ conv.memberImpls.map (_ -> conv) } @@ -469,12 +470,12 @@ trait ModelFactoryImplicitSupport { def ambiguatingMembers: List[MemberEntity] = ambiguous } - shadowingTable += (member -> shadowing) + shadowingTable.addOne(member -> shadowing) } } } - shadowingTable.toMap + shadowingTable.result() } @@ -585,6 +586,7 @@ trait ModelFactoryImplicitSupport { * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same * structure (A => B => C may not override (A, B) => C) and that all the types involved are * of the implicit conversion's member are subtypes of the parent members' parameters */ + @nowarn("cat=lint-nonlocal-return") def isDistinguishableFrom(t1: Type, t2: Type): Boolean = { // Vlad: I tried using matches but it's not exactly what we need: // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index a534a3851190..76b24c60e8df 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -16,8 +16,8 @@ package model import base._ import diagram._ - -import scala.collection._ +import scala.annotation.nowarn +import scala.collection.{immutable, mutable} /** This trait extracts all required information for documentation from compilation units */ trait ModelFactoryTypeSupport { @@ -45,7 +45,7 @@ trait ModelFactoryTypeSupport { appendType0(tp) case tp :: tps => appendType0(tp) - nameBuffer append sep + nameBuffer.append(sep) appendTypes0(tps, sep) } @@ -55,13 +55,13 @@ trait ModelFactoryTypeSupport { val args = tp.typeArgs nameBuffer append '(' appendTypes0(args.init, ", ") - nameBuffer append ") ⇒ " + nameBuffer append ") => " appendType0(args.last) case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) => appendType0(tp.args.head) nameBuffer append '*' case tp: TypeRef if definitions.isByNameParamType(tp) => - nameBuffer append "⇒ " + nameBuffer append "=> " appendType0(tp.args.head) case tp: TypeRef if definitions.isTupleTypeDirect(tp) => val args = tp.typeArgs @@ -119,6 +119,7 @@ trait ModelFactoryTypeSupport { // type is inherited from one template to another. There may be multiple symbols with the same name in scope, // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing // the prefix only for ambiguous references, not for overloaded ones. + @nowarn("cat=lint-nonlocal-return") def needsPrefix: Boolean = { if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym)) return true @@ -150,7 +151,7 @@ trait ModelFactoryTypeSupport { } val prefix = - if (!settings.docNoPrefixes && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { + if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { if (!owner.isRefinementClass) { val qName = makeQualifiedName(owner, Some(inTpl.sym)) if (qName != "") qName + "." else "" @@ -195,20 +196,22 @@ trait ModelFactoryTypeSupport { } /* Eval-by-name types */ case NullaryMethodType(result) => - nameBuffer append '⇒' + nameBuffer append "=>" appendType0(result) /* Polymorphic types */ - case PolyType(tparams, result) => assert(tparams.nonEmpty) - def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else - tps.map{tparam => - tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams) - }.mkString("[", ", ", "]") - nameBuffer append typeParamsToString(tparams) + case PolyType(tparams, result) => + assert(tparams.nonEmpty, "polymorphic type must have at least one type parameter") + def typeParamsToString(tps: List[Symbol]): String = + if (tps.isEmpty) "" + else + tps.map { tparam => + tparam.varianceString + tparam.unexpandedName + typeParamsToString(tparam.typeParams) + }.mkString("[", ", ", "]") + nameBuffer.append(typeParamsToString(tparams)) appendType0(result) case et@ExistentialType(quantified, underlying) => - def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = { if (sym.isType && !sym.isAliasType && !sym.isClass) { tp match { @@ -318,7 +321,7 @@ trait ModelFactoryTypeSupport { // scala/bug#4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the // same type based on the template the type is shown in. - if (settings.docNoPrefixes) + if (settings.docNoPrefixes.value) typeCache.getOrElseUpdate(aType, createTypeEntity) else createTypeEntity } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala index 6fc2efe68513..f03c21e4473c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,8 +14,7 @@ package scala.tools.nsc package doc package model -import scala.collection._ - +import scala.collection.immutable.SortedMap /** A fragment of code. */ abstract class TreeEntity { diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala index 82d694780584..4bd809056812 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc package doc package model -import scala.collection._ +import scala.collection.immutable import scala.reflect.internal.util.{RangePosition, SourceFile} /** The goal of this trait is, using makeTree, @@ -49,7 +49,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => /** Finds the Entity on which we will later create a link on, * stores it in tree.refs with its position */ - def makeLink(rhs: Tree){ + def makeLink(rhs: Tree): Unit ={ val start = pos.start - firstIndex val end = pos.end - firstIndex if(start != end) { diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala index 4973426174b3..ef585a45e275 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -14,7 +14,7 @@ package scala.tools.nsc package doc package model -import scala.collection._ +import scala.collection.immutable.SortedMap /** A type. Note that types and templates contain the same information only for the simplest types. For example, a type * defines how a template's type parameters are instantiated (as in `List[Cow]`), what the template's prefix is diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala index 8f5f090fc40c..628a5ff5dbf6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala index 0e64c94e6864..90f09e65232f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -18,10 +18,13 @@ package model sealed trait Visibility { def isProtected: Boolean = false def isPublic: Boolean = false + def isPrivate: Boolean = false } /** The visibility of `private[this]` members. */ -case class PrivateInInstance() extends Visibility +case class PrivateInInstance() extends Visibility { + override def isPrivate = true +} /** The visibility of `protected[this]` members. */ case class ProtectedInInstance() extends Visibility { @@ -29,8 +32,10 @@ case class ProtectedInInstance() extends Visibility { } /** The visibility of `private[owner]` members. An unqualified private members - * is encoded with `owner` equal to `None`. */ -case class PrivateInTemplate(owner: Option[TypeEntity]) extends Visibility + * is encoded with `owner` equal to the members's `inTemplate`. */ +case class PrivateInTemplate(owner: Option[TypeEntity]) extends Visibility { + override def isPrivate = true +} /** The visibility of `protected[owner]` members. An unqualified protected * members is encoded with `owner` equal to `None`. diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala index 6116d945700d..d098913e0174 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 1cd1d6de4a66..53a894a37cdc 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,6 +17,7 @@ package diagram import model._ import java.util.regex.Pattern +import scala.annotation.nowarn import scala.tools.nsc.Reporting.WarningCategory import scala.util.matching.Regex @@ -91,8 +92,8 @@ trait DiagramDirectiveParser { defaultFilter } - protected var tFilter = 0l - protected var tModel = 0l + protected var tFilter = 0L + protected var tModel = 0L /** Show the entire diagram, no filtering */ case object FullDiagram extends DiagramFilter { @@ -135,6 +136,7 @@ trait DiagramDirectiveParser { else n.name + @nowarn("cat=lint-nonlocal-return") def hideNode(clazz: Node): Boolean = { val qualifiedName = getName(clazz) for (hideFilter <- hideNodesFilter) @@ -145,6 +147,7 @@ trait DiagramDirectiveParser { false } + @nowarn("cat=lint-nonlocal-return") def hideEdge(clazz1: Node, clazz2: Node): Boolean = { val clazz1Name = getName(clazz1) val clazz2Name = getName(clazz2) @@ -262,7 +265,7 @@ trait DiagramDirectiveParser { hideNodesFilter = hideNodesFilter0, hideEdgesFilter = hideEdgesFilter0) - if (settings.docDiagramsDebug && result != NoDiagramAtAll && result != FullDiagram) + if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram) settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result) tFilter += System.currentTimeMillis diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala index 4c7d028af0da..775e260c9d80 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -31,7 +31,6 @@ trait DiagramFactory extends DiagramDirectiveParser { this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory => import this.global.definitions._ - import this.global._ // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes def aggregationNode(text: String) = @@ -60,7 +59,7 @@ trait DiagramFactory extends DiagramDirectiveParser { // superclasses val superclasses: List[Node] = tpl.parentTypes.collect { - case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))() + case p: (TemplateEntity, TypeEntity) => NormalNode(p._2, Some(p._1))() }.reverse // incoming implicit conversions @@ -72,7 +71,7 @@ trait DiagramFactory extends DiagramDirectiveParser { // subclasses var subclasses: List[Node] = tpl.directSubClasses.collect { - case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))() + case d: TemplateImpl => NormalNode(makeType(d.sym.tpe, tpl), Some(d))() }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse) // outgoing implicit conversions @@ -145,7 +144,7 @@ trait DiagramFactory extends DiagramDirectiveParser { } // for each node, add its subclasses - for (node <- nodesAll if !classExcluded(node)) { + for (node <- nodesAll) { node match { case dnode: MemberTemplateImpl => val superClasses = listSuperClasses(dnode) diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties index e27b29c0e13d..4d93c411f774 100644 --- a/src/scalap/decoder.properties +++ b/src/scalap/decoder.properties @@ -1,2 +1,2 @@ version.number=2.0.1 -copyright.string=(c) 2002-2024 LAMP/EPFL +copyright.string=(c) 2002-2025 LAMP/EPFL diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala index 74759f0c81dd..a9a26036ffb9 100644 --- a/src/scalap/scala/tools/scalap/Arguments.scala +++ b/src/scalap/scala/tools/scalap/Arguments.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -61,7 +61,7 @@ object Arguments { res } - def parse(args: Array[String], res: Arguments) { + def parse(args: Array[String], res: Arguments): Unit = { if (args != null) { var i = 0 while (i < args.length) @@ -94,7 +94,7 @@ object Arguments { val iter = prefixes.iterator val j = i while ((i == j) && iter.hasNext) { - val prefix = iter.next + val prefix = iter.next() if (args(i) startsWith prefix) { res.addPrefixed(prefix, args(i).substring(prefix.length()).trim()) i += 1 @@ -103,7 +103,7 @@ object Arguments { if (i == j) { val iter = prefixedBindings.keysIterator while ((i == j) && iter.hasNext) { - val prefix = iter.next + val prefix = iter.next() if (args(i) startsWith prefix) { val arg = args(i).substring(prefix.length()).trim() i = i + 1 diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala index d913c9072e59..c52e32c25d22 100644 --- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala +++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -109,7 +109,7 @@ class ByteArrayReader(content: Array[Byte]) { /** skip next 'n' bytes */ - def skip(n: Int) { + def skip(n: Int): Unit = { bp += n } diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala index 3a2b5f5ba190..9e2f4d5b9d89 100644 --- a/src/scalap/scala/tools/scalap/Classfile.scala +++ b/src/scalap/scala/tools/scalap/Classfile.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -26,10 +26,10 @@ class Classfile(in: ByteArrayReader) { val classname = in.nextChar val superclass = in.nextChar val interfaces = readInterfaces - val fields = readMembers(true) - val methods = readMembers(false) + val fields = readMembers(field = true) + val methods = readMembers(field = false) val attribs = readAttribs - def scalaSigAttribute = attribs find (_.toString == Main.SCALA_SIG) + def scalaSigAttribute = attribs.find(_.toString == Main.SCALA_SIG) def readAttribs = { val n = in.nextChar diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala index df3403b46079..5796596080c2 100644 --- a/src/scalap/scala/tools/scalap/Classfiles.scala +++ b/src/scalap/scala/tools/scalap/Classfiles.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala index 78e8737331e4..1e36aaad5d10 100644 --- a/src/scalap/scala/tools/scalap/CodeWriter.scala +++ b/src/scalap/scala/tools/scalap/CodeWriter.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,8 +17,8 @@ import java.io._ class CodeWriter(writer: Writer) { + import java.lang.System.{lineSeparator => nl} - private val nl = scala.compat.Platform.EOL private var step = " " private var level = 0 private var align = false @@ -60,11 +60,7 @@ class CodeWriter(writer: Writer) { if (step == null) newspace else if (!line) { - try { - writer.write(nl) - } catch { - case e: Exception => sys.error("IO error") - } + writer.write(nl) line = align align = true space = false @@ -116,7 +112,7 @@ class CodeWriter(writer: Writer) { def print(value: Double): CodeWriter = print(String.valueOf(value)) - def print(value: String): CodeWriter = try { + def print(value: String): CodeWriter = { if (align) { var i = 0 while (i < level) { @@ -131,8 +127,6 @@ class CodeWriter(writer: Writer) { space = false line = false this - } catch { - case e: Exception => sys.error("IO error") } override def toString(): String = writer.toString() diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala index acef4413ae82..0de24ec93ccd 100644 --- a/src/scalap/scala/tools/scalap/Decode.scala +++ b/src/scalap/scala/tools/scalap/Decode.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -54,8 +54,8 @@ object Decode { import classFile._ classFile annotation SCALA_SIG_ANNOTATION map { case Annotation(_, els) => - val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) orNull - val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) } + val bytesElem = els.find(x => constant(x.elementNameIndex) == BYTES_VALUE).orNull + val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) case x => throw new MatchError(x) } val bytes = _bytes.asInstanceOf[StringBytesPair].bytes val length = ByteCodecs.decode(bytes) diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala index 21f1f93bc769..ef15bfb5697d 100644 --- a/src/scalap/scala/tools/scalap/JavaWriter.scala +++ b/src/scalap/scala/tools/scalap/JavaWriter.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -119,8 +119,8 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer def isConstr(name: String) = (name == "") - def printField(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) { - print(flagsToStr(false, flags)) + def printField(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]): Unit = { + print(flagsToStr(clazz = false, flags)) if ((flags & 0x0010) != 0) print("val " + NameTransformer.decode(getName(name))) else @@ -128,9 +128,9 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer print(": " + getType(tpe) + ";").newline } - def printMethod(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) { + def printMethod(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]): Unit = { if (getName(name) == "") - print(flagsToStr(false, flags)) + print(flagsToStr(clazz = false, flags)) if (getName(name) == "") { print("def this" + getType(tpe) + ";").newline } @@ -154,7 +154,7 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer } } - def printClassHeader() { + def printClassHeader(): Unit = { if (isInterface(cf.flags)) { print("trait " + getSimpleClassName(cf.classname)) } else { @@ -167,21 +167,21 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer } } - def printClass() { + def printClass(): Unit = { val pck = getPackage(cf.classname) if (pck.length() > 0) println("package " + pck + ";") - print(flagsToStr(true, cf.flags)) + print(flagsToStr(clazz = true, cf.flags)) cf.attribs find { case cf.Attribute(name, _) => getName(name) == "JacoMeta" } match { case None => - printClassHeader; + printClassHeader(); case Some(cf.Attribute(_, data)) => val mp = new MetaParser(getName( ((data(0) & 0xff) << 8) + (data(1) & 0xff)).trim()) mp.parse match { - case None => printClassHeader; + case None => printClassHeader(); case Some(str) => if (isInterface(cf.flags)) print("trait " + getSimpleClassName(cf.classname) + str) diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index 5e3d633d429f..8e51efabd766 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -21,6 +21,7 @@ import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory} import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver import scalax.rules.scalasig._ +import scala.io.AnsiColor.{BOLD, RESET} /**The main object used to execute scalap on the command-line. * @@ -110,13 +111,13 @@ class Main { // we have to encode every fragment of a name separately, otherwise the NameTransformer // will encode using unicode escaping dot separators as well // we can afford allocations because this is not a performance critical code - classname.split('.').map(NameTransformer.encode).mkString(".") + classname.split('.').map(NameTransformer.encode _).mkString(".") } path.findClassFile(encName) match { case Some(classFile) => if (verbose) { - Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + classFile.path) + Console.println(BOLD + "FILENAME" + RESET + " = " + classFile.path) } val bytes = classFile.toByteArray if (isScalaFile(bytes)) { @@ -191,7 +192,7 @@ object Main extends Main { val path = createClassPath(cpArg, settings, registry) // print the classpath if output is verbose if (verbose) - Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) + Console.println(BOLD + "CLASSPATH" + RESET + " = " + path.asClassPathString) // process all given classes arguments.getOthers foreach process(arguments, path) diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala index 12b3f85a8822..9f70acd15359 100644 --- a/src/scalap/scala/tools/scalap/MetaParser.scala +++ b/src/scalap/scala/tools/scalap/MetaParser.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -41,7 +41,7 @@ class MetaParser(meta: String) { do { res.append(if (token == ",") ", " else "[") nextToken - parseType + parseType() } while (token == ",") nextToken res.append("]") @@ -88,7 +88,7 @@ class MetaParser(meta: String) { if (token == "<") { nextToken res.append(" <: ") - parseType + parseType() } } while (token == ",") nextToken @@ -101,7 +101,7 @@ class MetaParser(meta: String) { else res.append(" with ") nextToken - parseType + parseType() } while (token == "with") } res.toString() @@ -122,7 +122,7 @@ class MetaParser(meta: String) { if (token == "<") { nextToken res.append(" <: ") - parseType + parseType() } if (token == ",") { nextToken @@ -149,15 +149,15 @@ class MetaParser(meta: String) { nextToken res.append("def ") } - parseType + parseType() } } while (token == ",") nextToken res.append("): ") - parseType + parseType() } else { res.append(": ") - parseType + parseType() } res.toString() } @@ -165,7 +165,7 @@ class MetaParser(meta: String) { protected def parseMetaField: String = { nextToken res.append(": ") - parseType + parseType() res.toString() } @@ -176,7 +176,7 @@ class MetaParser(meta: String) { res.append(if (token == "(") "(" else ", ") nextToken if (token != ")") - parseType + parseType() } while (token == ",") nextToken res.append(")") diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala index 5058d9d5932b..3203f8833baa 100644 --- a/src/scalap/scala/tools/scalap/Properties.scala +++ b/src/scalap/scala/tools/scalap/Properties.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala index bdd1761ed972..16dc1d8e17ee 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala @@ -47,15 +47,12 @@ trait DefaultMemoisable extends Memoisable { protected def compute[A](key: AnyRef, a: => A): Any = a match { case success: Success[_, _] => onSuccess(key, success); success case other => - if(DefaultMemoisable.debug) println(key + " -> " + other) + if (DefaultMemoisable.debug) println(s"$key -> $other") other } - protected def onSuccess[S, T](key: AnyRef, result: Success[S, T]) { + protected def onSuccess[S, T](key: AnyRef, result: Success[S, T]): Unit = { val Success(out, t) = result - if(DefaultMemoisable.debug) println(key + " -> " + t + " (" + out + ")") + if (DefaultMemoisable.debug) println(s"$key -> $t ($out)") } } - - - diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala index 307458fc7d7b..a95845c7a81d 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala @@ -101,45 +101,66 @@ trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) { /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) } */ - def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f: (B1, B2) => C) = map { a => - (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) } + def ^~^[B1, B2, B >: A, C](f: (B1, B2) => C)(implicit viewbound_0: B => B1 ~ B2) = map { + a => (a: B1 ~ B2) match { + case b1 ~ b2 => + f(b1, b2) } +} /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) } */ - def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a => - (a: B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) } + def ^~~^[B1, B2, B3, B >: A, C](f: (B1, B2, B3) => C)(implicit viewbound_0: B => B1 ~ B2 ~ B3) = map { + a => (a: B1 ~ B2 ~ B3) match { + case b1 ~ b2 ~ b3 => + f(b1, b2, b3) } +} /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) } */ - def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f: (B1, B2, B3, B4) => C) = map { a => - (a: B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) } + def ^~~~^[B1, B2, B3, B4, B >: A, C](f: (B1, B2, B3, B4) => C)(implicit viewbound_0: B => B1 ~ B2 ~ B3 ~ B4) = map { + a => (a: B1 ~ B2 ~ B3 ~ B4) match { + case b1 ~ b2 ~ b3 ~ b4 => + f(b1, b2, b3, b4) } +} /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) } */ - def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f: (B1, B2, B3, B4, B5) => C) = map { a => - (a: B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) } + def ^~~~~^[B1, B2, B3, B4, B5, B >: A, C](f: (B1, B2, B3, B4, B5) => C)(implicit viewbound_0: B => B1 ~ B2 ~ B3 ~ B4 ~ B5) = map { + a => (a: B1 ~ B2 ~ B3 ~ B4 ~ B5) match { + case b1 ~ b2 ~ b3 ~ b4 ~ b5 => + f(b1, b2, b3, b4, b5) } +} /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) } */ - def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f: (B1, B2, B3, B4, B5, B6) => C) = map { a => - (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) } + def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A, C](f: (B1, B2, B3, B4, B5, B6) => C)(implicit viewbound_0: B => B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) = map { + a => (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { + case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => + f(b1, b2, b3, b4, b5, b6) } +} /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) } */ - def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f: (B1, B2, B3, B4, B5, B6, B7) => C) = map { a => - (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) } + def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A, C](f: (B1, B2, B3, B4, B5, B6, B7) => C)(implicit viewbound_0: B => B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) = map { + a => (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { + case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~ b7 => + f(b1, b2, b3, b4, b5, b6, b7) } +} /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) } */ - def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f: (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a => - (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) } + def >~>[Out2, B1, B2, B >: A, C, X2 >: X](f: (B1, B2) => Out => Result[Out2, C, X2])(implicit viewbound_0: B => B1 ~ B2) = flatMap { + a => (a: B1 ~ B2) match { + case b1 ~ b2 => + f(b1, b2) } +} /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) } */ @@ -147,9 +168,12 @@ trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) { /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) } */ - def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a => - (a: B2 ~ B3) match { case b2 ~ b3 => b1: B1 => f(b1, b2, b3) } - } + def ^~>~^[B1, B2, B3, B >: A, C](f: (B1, B2, B3) => C)(implicit viewbound_0: B => B2 ~ B3) = map { + a => (a: B2 ~ B3) match { + case b2 ~ b3 => + (b1: B1) => f(b1, b2, b3) + } +} } diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala index 5bafdf93d816..4de83694f577 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala @@ -14,8 +14,6 @@ package scala.tools.scalap package scalax package rules -import language.postfixOps - trait Name { def name: String override def toString = name @@ -70,7 +68,7 @@ trait Rules { } /** Converts a rule into a function that throws an Exception on failure. */ - def expect[In, Out, A, Any](rule: Rule[In, Out, A, Any]): In => A = (in) => rule(in) match { + def expect[In, Out, A, R](rule: Rule[In, Out, A, R]): In => A = (in) => rule(in) match { case Success(_, a) => a case Failure => throw new ScalaSigParserError("Unexpected failure") case Error(x) => throw new ScalaSigParserError("Unexpected error: " + x) @@ -129,7 +127,7 @@ trait StateRules { /** Create a rule that succeeds with a list of all the provided rules that succeed. @param rules the rules to apply in sequence. */ - def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) } + def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_.?)) ^^ { opts => opts.flatMap(x => x) } /** Repeatedly apply a rule from initial value until finished condition is met. */ def repeatUntil[T, X](rule: Rule[T => T, X])(finished: T => Boolean)(initial: T) = apply { diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala index f3c0235b233b..3ca96a9c3a08 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala @@ -15,6 +15,7 @@ package scalax package rules import language.postfixOps +import scala.collection.immutable.ArraySeq /** * A workaround for the difficulties of dealing with @@ -51,7 +52,7 @@ class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) { /** Creates a rule that always succeeds with a Boolean value. * Value is 'true' if this rule succeeds, 'false' otherwise */ - def -? = ? map { _ isDefined } + def -? = ? map (_.isDefined) def * = from[S] { // tail-recursive function with reverse list accumulator @@ -84,13 +85,13 @@ class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) { /** Repeats this rule num times */ def times(num: Int): Rule[S, S, Seq[A], X] = from[S] { - val result = new scala.collection.mutable.ArraySeq[A](num) + val result = new Array[AnyRef](num) // more compact using HoF but written this way so it's tail-recursive def rep(i: Int, in: S): Result[S, Seq[A], X] = { - if (i == num) Success(in, result) + if (i == num) Success(in, ArraySeq.unsafeWrapArray(result).asInstanceOf[ArraySeq[A]]) else rule(in) match { case Success(out, a) => { - result(i) = a + result(i) = a.asInstanceOf[AnyRef] rep(i + 1, out) } case Failure => Failure diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala index bf4d81a05231..8f54c291c53d 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,6 @@ package scalax package rules package scalasig -import language.postfixOps - import java.io.IOException object ByteCode { @@ -65,7 +63,7 @@ class ByteCode(val bytes: Array[Byte], val pos: Int, val length: Int) { result } - override def toString = length + " bytes" + override def toString = "" + length + " bytes" def toInt = fold(0) { (x, b) => (x << 8) + (b & 0xFF)} def toLong = fold(0L) { (x, b) => (x << 8) + (b & 0xFF)} @@ -96,22 +94,22 @@ trait ByteCodeReader extends RulesWithState { type S = ByteCode type Parser[A] = Rule[A, String] - val byte = apply(_ nextByte) + val byte = apply(_.nextByte) val u1 = byte ^^ (_ & 0xFF) - val u2 = bytes(2) ^^ (_ toInt) - val u4 = bytes(4) ^^ (_ toInt) // should map to Long?? + val u2 = bytes(2) ^^ (_.toInt) + val u4 = bytes(4) ^^ (_.toInt) // should map to Long?? def bytes(n: Int) = apply(_ next n) } object ClassFileParser extends ByteCodeReader { - def parse(byteCode: ByteCode) = expect(classFile)(byteCode) - def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode) + def parse(byteCode: ByteCode): ClassFile = expect(classFile)(byteCode) + def parseAnnotations(byteCode: ByteCode): Seq[Annotation] = expect(annotations)(byteCode) val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file") val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) } - val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_ isFull) + val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_.isFull) // NOTE currently most constants just evaluate to a string description // TODO evaluate to useful values @@ -155,13 +153,13 @@ object ClassFileParser extends ByteCodeReader { val attributes = u2 >> attribute.times // parse runtime-visible annotations - abstract class ElementValue - case class AnnotationElement(elementNameIndex: Int, elementValue: ElementValue) - case class ConstValueIndex(index: Int) extends ElementValue - case class EnumConstValue(typeNameIndex: Int, constNameIndex: Int) extends ElementValue - case class ClassInfoIndex(index: Int) extends ElementValue - case class Annotation(typeIndex: Int, elementValuePairs: Seq[AnnotationElement]) extends ElementValue - case class ArrayValue(values: Seq[ElementValue]) extends ElementValue + sealed abstract class ElementValue + final case class AnnotationElement(elementNameIndex: Int, elementValue: ElementValue) + final case class ConstValueIndex(index: Int) extends ElementValue + final case class EnumConstValue(typeNameIndex: Int, constNameIndex: Int) extends ElementValue + final case class ClassInfoIndex(index: Int) extends ElementValue + final case class Annotation(typeIndex: Int, elementValuePairs: Seq[AnnotationElement]) extends ElementValue + final case class ArrayValue(values: Seq[ElementValue]) extends ElementValue def element_value: Parser[ElementValue] = u1 >> { case 'B'|'C'|'D'|'F'|'I'|'J'|'S'|'Z'|'s' => u2 ^^ ConstValueIndex @@ -181,8 +179,8 @@ object ClassFileParser extends ByteCodeReader { val method = u2 ~ u2 ~ u2 ~ attributes ^~~~^ Method val methods = u2 >> method.times - val header = magicNumber -~ u2 ~ u2 ~ constantPool ~ u2 ~ u2 ~ u2 ~ interfaces ^~~~~~~^ ClassFileHeader - val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ ClassFile + val header = magicNumber -~ u2 ~ u2 ~ constantPool ~ u2 ~ u2 ~ u2 ~ interfaces ^~~~~~~^ (ClassFileHeader.apply _) + val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ (ClassFile.apply _) // TODO create a useful object, not just a string def memberRef(description: String) = u2 ~ u2 ^^ add1 { diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala index e7b7c78a901f..ff03e1e399ec 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index b8ef18306815..b61c416d2026 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -15,8 +15,8 @@ package scalax package rules package scalasig -import scala.language.postfixOps import scala.language.implicitConversions +import scala.language.postfixOps import ClassFileParser._ import scala.reflect.internal.pickling.ByteCodecs @@ -29,13 +29,15 @@ object ScalaSigParser { import classFile._ def getBytes(bytesElem: AnnotationElement): Array[Byte] = bytesElem.elementValue match { - case ConstValueIndex(index) => bytesForIndex(index) + case ConstValueIndex(index) => bytesForIndex(index) case ArrayValue(signatureParts) => mergedLongSignatureBytes(signatureParts) + case x => throw new MatchError(x) } - def mergedLongSignatureBytes(signatureParts: Seq[ElementValue]): Array[Byte] = signatureParts.flatMap { + def mergedLongSignatureBytes(signatureParts: Seq[ElementValue]): Array[Byte] = signatureParts.iterator.flatMap { case ConstValueIndex(index) => bytesForIndex(index) - }(collection.breakOut) + case x => throw new MatchError(x) + }.toArray def bytesForIndex(index: Int) = constantWrapped(index).asInstanceOf[StringBytesPair].bytes @@ -92,7 +94,7 @@ object ScalaSigAttributeParsers extends ByteCodeReader { val scalaSig = nat ~ nat ~ symtab ^~~^ ScalaSig val utf8 = read(x => x.fromUTF8StringAndBytes.string) - val longValue = read(_ toLong) + val longValue = read(_.toLong) } case class ScalaSig(majorVersion: Int, minorVersion: Int, table: Seq[Int ~ ByteCode]) extends DefaultMemoisable { @@ -112,10 +114,10 @@ case class ScalaSig(majorVersion: Int, minorVersion: Int, table: Seq[Int ~ ByteC def parseEntry(index: Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index)) - implicit def applyRule[A](parser: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this) + implicit def applyRule[A](parser: ScalaSigParsers.Parser[A]): A = ScalaSigParsers.expect(parser)(this) override def toString = "ScalaSig version " + majorVersion + "." + minorVersion + { - for (i <- 0 until table.size) yield i + ":\t" + parseEntry(i) // + "\n\t" + getEntry(i) + for (i <- 0 until table.size) yield "" + i + ":\t" + parseEntry(i) // + "\n\t" + getEntry(i) }.mkString("\n", "\n", "") lazy val symbols: Seq[Symbol] = ScalaSigParsers.symbols @@ -163,12 +165,13 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { def parseEntry[A](parser: EntryParser[A])(index: Int) = (toEntry(index) -~ parser) - implicit def entryType(code: Int) = key filter (_ == code) + type R = scala.tools.scalap.scalax.rules.Rule[ScalaSigEntryParsers.S, ScalaSigEntryParsers.S, Int, Nothing] + implicit def entryType(code: Int): R = key.filter(_ == code) val index = read(_.index) val key = read(_.entryType) - lazy val entry: EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get + lazy val entry: EntryParser[Any] = (symbol: EntryParser[Any]) | typeEntry | literal | name | attributeInfo | annotInfo | children | get val ref = byteCodeEntryParser(nat) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index 2b18f9a6c392..99618177a4d0 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -17,9 +17,8 @@ package scalasig import java.io.{PrintStream, ByteArrayOutputStream} import java.util.regex.Pattern -import scala.tools.scalap.scalax.util.StringUtil import scala.reflect.NameTransformer -import java.lang.String + class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { import stream._ @@ -28,7 +27,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case class TypeFlags(printRep: Boolean) - def printSymbol(symbol: Symbol) {printSymbol(0, symbol)} + def printSymbol(symbol: Symbol): Unit = {printSymbol(0, symbol)} def printSymbolAttributes(s: Symbol, onNewLine: Boolean, indent: => Unit) = s match { case t: SymbolInfoSymbol => { @@ -40,16 +39,16 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case _ => } - def printSymbol(level: Int, symbol: Symbol) { + def printSymbol(level: Int, symbol: Symbol): Unit = { if (!symbol.isLocal && !(symbol.isPrivate && !printPrivates)) { - def indent() {for (i <- 1 to level) print(" ")} + def indent(): Unit = {for (i <- 1 to level) print(" ")} - printSymbolAttributes(symbol, true, indent) + printSymbolAttributes(symbol, onNewLine = true, indent()) symbol match { case o: ObjectSymbol => if (!isCaseClassObject(o)) { - indent + indent() if (o.name == "package") { // print package object printPackageObject(level, o) @@ -58,15 +57,15 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { } } case c: ClassSymbol if !refinementClass(c) && !c.isModule => - indent + indent() printClass(level, c) case m: MethodSymbol => - printMethod(level, m, indent) + printMethod(level, m, () => indent()) case a: AliasSymbol => - indent + indent() printAlias(level, a) case t: TypeSymbol if !t.name.matches("_\\$\\d+")=> - indent + indent() printTypeSymbol(level, t) case s => } @@ -74,7 +73,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { } def isCaseClassObject(o: ObjectSymbol): Boolean = { - val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType: @unchecked o.isFinal && (classSymbol.children.find(x => x.isCase && x.isInstanceOf[MethodSymbol]) match { case Some(_) => true case None => false @@ -87,17 +86,17 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { } - private def printChildren(level: Int, symbol: Symbol) { + private def printChildren(level: Int, symbol: Symbol): Unit = { for (child <- symbol.children) printSymbol(level + 1, child) } - def printWithIndent(level: Int, s: String) { - def indent() {for (i <- 1 to level) print(" ")} - indent + def printWithIndent(level: Int, s: String): Unit = { + def indent(): Unit = {for (i <- 1 to level) print(" ")} + indent() print(s) } - def printModifiers(symbol: Symbol) { + def printModifiers(symbol: Symbol): Unit = { // print private access modifier if (symbol.isPrivate) print("private ") else if (symbol.isProtected) print("protected ") @@ -122,7 +121,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { private def refinementClass(c: ClassSymbol) = c.name == "" - def printClass(level: Int, c: ClassSymbol) { + def printClass(level: Int, c: ClassSymbol): Unit = { if (c.name == "" /*scala.tools.nsc.symtab.StdNames.LOCAL_CHILD.toString()*/ ) { print("\n") } else { @@ -158,20 +157,20 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { val baos = new ByteArrayOutputStream val stream = new PrintStream(baos) val printer = new ScalaSigPrinter(stream, printPrivates) - printer.printMethodType(m.infoType, false)(()) + printer.printMethodType(m.infoType, printResult = false)(()) baos.toString case _ => "" } } - def printPackageObject(level: Int, o: ObjectSymbol) { + def printPackageObject(level: Int, o: ObjectSymbol): Unit = { printModifiers(o) print("package ") print("object ") val poName = o.symbolInfo.owner.name print(processName(poName)) - val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType: @unchecked printType(classSymbol) print(" {\n") printChildren(level, classSymbol) @@ -179,11 +178,11 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { } - def printObject(level: Int, o: ObjectSymbol) { + def printObject(level: Int, o: ObjectSymbol): Unit = { printModifiers(o) print("object ") print(processName(o.name)) - val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType: @unchecked printType(classSymbol) print(" {\n") printChildren(level, classSymbol) @@ -195,7 +194,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { def _pmt(mt: MethodType) = { val paramEntries = mt.paramSymbols.map({ - case ms: MethodSymbol => ms.name + ": " + toString(ms.infoType)(TypeFlags(true)) + case ms: MethodSymbol => ms.name + ": " + toString(ms.infoType)(TypeFlags(printRep = true)) case _ => "^___^" }) val implicitWord = mt.paramSymbols.headOption match { @@ -208,7 +207,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { // Print result type mt.resultType match { - case mt: MethodType => printMethodType(mt, printResult)({}) + case res: MethodType => printMethodType(res, printResult)(()) case x => if (printResult) { print(": ") printType(x) @@ -231,7 +230,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { cont } - def printMethod(level: Int, m: MethodSymbol, indent: () => Unit) { + def printMethod(level: Int, m: MethodSymbol, indent: () => Unit): Unit = { def cont() = print(" = { /* compiled code */ }") val n = m.name @@ -251,18 +250,18 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { n match { case CONSTRUCTOR_NAME => print("this") - printMethodType(m.infoType, false)(cont) + printMethodType(m.infoType, printResult = false)(cont()) case name => val nn = processName(name) print(nn) - printMethodType(m.infoType, true)( + printMethodType(m.infoType, printResult = true)( {if (!m.isDeferred) print(" = { /* compiled code */ }" /* Print body only for non-abstract methods */ )} ) } print("\n") } - def printAlias(level: Int, a: AliasSymbol) { + def printAlias(level: Int, a: AliasSymbol): Unit = { print("type ") print(processName(a.name)) printType(a.infoType, " = ") @@ -270,7 +269,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { printChildren(level, a) } - def printTypeSymbol(level: Int, t: TypeSymbol) { + def printTypeSymbol(level: Int, t: TypeSymbol): Unit = { print("type ") print(processName(t.name)) printType(t.infoType) @@ -314,7 +313,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case _ => value.toString } - implicit object _tf extends TypeFlags(false) + implicit object _tf extends TypeFlags(printRep = false) def printType(sym: SymbolInfoSymbol)(implicit flags: TypeFlags): Unit = printType(sym.infoType)(flags) @@ -327,32 +326,33 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { def toString(t: Type, sep: String)(implicit flags: TypeFlags): String = { // print type itself t match { - case ThisType(symbol) => sep + processName(symbol.path) + ".type" + case ThisType(symbol) => sep + processName(symbol.path) + ".type" case SingleType(typeRef, symbol) => sep + processName(symbol.path) + ".type" - case ConstantType(constant) => sep + (constant match { - case null => "scala.Null" - case _: Unit => "scala.Unit" - case _: Boolean => "scala.Boolean" - case _: Byte => "scala.Byte" - case _: Char => "scala.Char" - case _: Short => "scala.Short" - case _: Int => "scala.Int" - case _: Long => "scala.Long" - case _: Float => "scala.Float" - case _: Double => "scala.Double" - case _: String => "java.lang.String" - case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]" + case ConstantType(constant) => sep + (constant match { + case null => "scala.Null" + case _: Unit => "scala.Unit" + case _: Boolean => "scala.Boolean" + case _: Byte => "scala.Byte" + case _: Char => "scala.Char" + case _: Short => "scala.Short" + case _: Int => "scala.Int" + case _: Long => "scala.Long" + case _: Float => "scala.Float" + case _: Double => "scala.Double" + case _: String => "java.lang.String" + case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]" case e: ExternalSymbol => e.parent.get.path - case tp: Type => "java.lang.Class[" + toString(tp, sep) + "]" + case tp: Type => "java.lang.Class[" + toString(tp, sep) + "]" + case x => throw new MatchError(x) }) case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match { case "scala." => flags match { case TypeFlags(true) => toString(typeArgs.head) + "*" - case _ => "scala.Seq" + typeArgString(typeArgs) + case _ => "scala.Seq" + typeArgString(typeArgs) } case "scala." => "=> " + toString(typeArgs.head) case _ => { - val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference + val path = symbol.path.replace(".package", "") //remove package object reference (processName(path) + typeArgString(typeArgs)).stripPrefix(".") } }) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala index 8b5616b36923..c2ac2b3d965a 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala index dba2e6d033da..bb8e5d78af89 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -39,7 +39,7 @@ abstract class ScalaSigSymbol extends Symbol { def entry: ScalaSig#Entry def index = entry.index - lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (sym => sym.parent == Some(this) && (this match { case _: MethodSymbol => true case _ => !sym.isParam})) + lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (sym => sym.parent == Some(this) && !sym.isParam) lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this) } diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala index 85bf97543c8e..79e603c6f4f8 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala @@ -1,7 +1,7 @@ /* * Scala classfile decoder (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). diff --git a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala b/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala deleted file mode 100644 index 08c689b57fbd..000000000000 --- a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala classfile decoder (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.scalap -package scalax -package util - -import java.beans.Introspector - -/** - * @author ilyas - */ - -object StringUtil { - - def decapitalize(s: String) = Introspector.decapitalize(s) - - def cutSubstring(dom: String)(s: String) = if (dom != null && s != null) dom.replace(s, "") else dom - -} diff --git a/src/tastytest/scala/tools/tastytest/ClasspathOps.scala b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala new file mode 100644 index 000000000000..6a7fbb9dc7d2 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import java.net.URL +import java.nio.file.Paths + +object ClasspathOps { + implicit class ClassPathSyntax(private val ls: List[String]) extends AnyVal { + def asURLs: List[URL] = ls.map(Paths.get(_).toUri().toURL()) + } +} diff --git a/src/tastytest/scala/tools/tastytest/Classpaths.scala b/src/tastytest/scala/tools/tastytest/Classpaths.scala new file mode 100644 index 000000000000..716aa218f163 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Classpaths.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.Properties +import java.io.File.pathSeparatorChar + +object Classpaths { + + private def classpathProp(name: String) = + Properties.propOrNone(name).map(_.split(pathSeparatorChar).filter(_.nonEmpty).toList).getOrElse(Nil) + + def dottyCompiler: List[String] = classpathProp("tastytest.classpaths.dottyCompiler") + + def scalaReflect: List[String] = classpathProp("tastytest.classpaths.scalaReflect") + + def dottyLibrary: List[String] = classpathProp("tastytest.classpaths.dottyLibrary") + +} diff --git a/src/tastytest/scala/tools/tastytest/Diff.scala b/src/tastytest/scala/tools/tastytest/Diff.scala new file mode 100644 index 000000000000..f3240ffa00d0 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Diff.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.jdk.CollectionConverters._ +import com.github.difflib.{DiffUtils, UnifiedDiffUtils} + +object Diff { + def removeTrailing(str: String): String = { + val lastWhitespace = str.reverseIterator.indexWhere(!_.isWhitespace) + if (lastWhitespace == -1) str + else str.dropRight(lastWhitespace) + } + + + def splitIntoLines(string: String): Seq[String] = + string.linesIterator.map(removeTrailing).toSeq + + def splitIntoLines(stream: java.util.stream.Stream[String]): Seq[String] = + stream.map(removeTrailing).iterator().asScala.toSeq + + def compareContents(output: String, check: String): String = + compareContents(splitIntoLines(output), splitIntoLines(check)) + + def compareContents(output: Seq[String], check: Seq[String]): String = { + val diff = DiffUtils.diff(check.asJava, output.asJava) + if (diff.getDeltas.isEmpty) + "" + else + UnifiedDiffUtils + .generateUnifiedDiff( + "check", + "output", + check.asJava, + diff, + 1 + ) + .asScala + .mkString("\n") + } +} diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala new file mode 100644 index 000000000000..ec460a1215e9 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -0,0 +1,169 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.{Try, Success, Failure} +import scala.util.control.NonFatal + +import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.runtime.ReflectionUtils +import java.lang.reflect.{Modifier, Method} + +import ClasspathOps._ +import java.io.OutputStream +import java.io.BufferedReader +import java.io.PrintWriter + +object Dotc extends Script.Command { + + final case class ClassLoader private (val parent: ScalaClassLoader) + + def initClassloader(): Try[Dotc.ClassLoader] = + Try(Dotc.ClassLoader(ScalaClassLoader.fromURLs(Classpaths.dottyCompiler.asURLs))) + + def processIn(op: Dotc.ClassLoader => Int): Int = { + Dotc.initClassloader() match { + case Success(cl) => op(cl) + case Failure(err) => + println(red(s"could not initialise Scala 3 classpath: $err")) + 1 + } + } + + def loadClass(name: String)(implicit cl: Dotc.ClassLoader) = + Class.forName(name, true, cl.parent) + + def invokeStatic(method: Method, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + assert(Modifier.isStatic(method.getModifiers), s"$method is not static!") + invoke(method, null, args) + } + + def invokeStatic( + className: String, + methodName: String, + args: Seq[(Class[_], Any)], + )(implicit cl: Dotc.ClassLoader): Try[Object] = { + val cls = loadClass(className) + val (tpes, provided) = args.unzip + val method = cls.getMethod(methodName, tpes:_*) + Try { + invokeStatic(method, provided) + } + } + + def invoke(method: Method, obj: AnyRef, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + inClassloader[AnyRef] { + method.invoke(obj, args.toArray:_*) + } + } + + def inClassloader[T](op: => T)(implicit cl: Dotc.ClassLoader): T = { + try cl.parent.asContext[T] { + op + } + catch { + case NonFatal(ex) => throw ReflectionUtils.unwrapThrowable(ex) + } + } + + def processMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = + processMethodImpl(className)(args, None) + + private def makeConsoleReporter(stream: OutputStream)(implicit cl: Dotc.ClassLoader): Try[AnyRef] = Try { + val consoleReporterCls = loadClass("dotty.tools.dotc.reporting.ConsoleReporter") + val ctor = consoleReporterCls.getConstructor( + /* reader: BufferedReader */classOf[BufferedReader], + /* writer: PrintWriter */classOf[PrintWriter], + /* echoer: PrintWriter */classOf[PrintWriter] // since 3.5.0-RC2 + ) + val pwriter = new PrintWriter(stream, true) + inClassloader[AnyRef] { + ctor.newInstance(/* reader = */Console.in, /* writer = */pwriter, /* echoer= */pwriter) + } + } + + private def processMethodImpl(className: String)(args: Seq[String], writer: Option[OutputStream])(implicit cl: Dotc.ClassLoader): Try[Boolean] = { + val reporterCls = loadClass("dotty.tools.dotc.reporting.Reporter") + val Reporter_hasErrors = reporterCls.getMethod("hasErrors") + val processArgs: Try[Seq[(Class[_], Any)]] = { + writer match { + case Some(stream) => + val callbackCls = loadClass("dotty.tools.dotc.interfaces.CompilerCallback") + for (myReporter <- makeConsoleReporter(stream)) yield + Seq(classOf[Array[String]] -> args.toArray, reporterCls -> myReporter, callbackCls -> null) + case _ => + Try(Seq(classOf[Array[String]] -> args.toArray)) + } + } + for { + args <- processArgs + reporter <- invokeStatic(className, "process", args) + } yield { + val hasErrors = invoke(Reporter_hasErrors, reporter, Seq.empty).asInstanceOf[Boolean] + !hasErrors + } + } + + def mainMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = { + val mainArgs = Seq(classOf[Array[String]] -> args.toArray) + for (_ <- invokeStatic(className, "main", mainArgs)) yield () + } + + def dotcVersion(implicit cl: Dotc.ClassLoader): String = { + val compilerPropertiesClass = loadClass("dotty.tools.dotc.config.Properties") + val Properties_simpleVersionString = compilerPropertiesClass.getMethod("simpleVersionString") + invokeStatic(Properties_simpleVersionString, Seq.empty).asInstanceOf[String] + } + + def dotc(out: String, classpath: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Boolean] = + dotcImpl(None, out, classpath, additionalSettings, sources:_*) + + def dotc(writer: OutputStream, out: String, classpath: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Boolean] = + dotcImpl(Some(writer), out, classpath, additionalSettings, sources:_*) + + def dotcImpl(writer: Option[OutputStream], out: String, classpath: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Boolean] = { + if (sources.isEmpty) { + Success(true) + } + else { + val libraryDeps = Classpaths.dottyLibrary ++ Classpaths.scalaReflect + val args = Seq( + "-d", out, + "-classpath", libraryDeps.mkString(classpath + Files.classpathSep, Files.classpathSep, ""), + "-deprecation", + "-Xfatal-warnings", + "-color:never", + ) ++ additionalSettings ++ sources + if (TastyTest.verbose) { + println(yellow(s"Invoking dotc (version $dotcVersion) with args: $args")) + } + processMethodImpl("dotty.tools.dotc.Main")(args, writer) + } + } + + val commandName: String = "dotc" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length < 2) { + println(red(s"please provide at least two arguments in sub-command: $describe")) + return 1 + } + val Seq(out, src, additional @ _*) = args: @unchecked + Dotc.processIn { implicit scala3classloader => + val success = dotc(out, out, additional, src).get + if (success) 0 else 1 + } + } + +} diff --git a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala new file mode 100644 index 000000000000..c74257648e67 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.Try + +object DotcDecompiler extends Script.Command { + + private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = + Dotc.processMethod("dotty.tools.dotc.decompiler.Main")(args) + + def decompile(source: String, additionalSettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = + dotcProcess(("-usejavacp" +: additionalSettings :+ source)) + + val commandName: String = "dotcd" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length < 1) { + println(red(s"please provide at least 1 argument in sub-command: $describe")) + return 1 + } + val Seq(tasty, additionalSettings @ _*) = args: @unchecked + Dotc.processIn { implicit scala3classloader => + val success = decompile(tasty, additionalSettings).get + if (success) 0 else 1 + } + } + +} diff --git a/src/tastytest/scala/tools/tastytest/Files.scala b/src/tastytest/scala/tools/tastytest/Files.scala new file mode 100644 index 000000000000..c465663fb1a1 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Files.scala @@ -0,0 +1,104 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.io.{ Source, BufferedSource } +import scala.jdk.CollectionConverters._ +import scala.util.Try + +import java.{ lang => jl, util => ju } +import java.nio.file.{ Files => JFiles, Paths => JPaths, Path => JPath, PathMatcher, FileSystems } +import java.io.FileNotFoundException + +object Files { + + def globMatcher(str: String): PathMatcher = FileSystems.getDefault.getPathMatcher(s"glob:$str") + + def tempDir(dir: String): Try[String] = Try(JFiles.createTempDirectory(dir)).map(_.toString) + + def currentDir: String = FileSystems.getDefault.getPath(".").toString + + def relativize(from: String, paths: String*): Try[Seq[String]] = Try { + val root = JPaths.get(from).toAbsolutePath + paths.map(p => root.relativize(JPaths.get(p).toAbsolutePath).toString()) + } + + def copyAll(relPaths: Seq[String], from: String, to: String): Try[Unit] = Try { + relPaths.foreach { p => + // create all directories in the path if they don't exist + JFiles.createDirectories(JPaths.get(to/p).getParent) + JFiles.copy(JPaths.get(from/p), JPaths.get(to/p)) + } + } + + def dir(dir: String): Try[String] = Try { + val path = JPaths.get(dir) + if (JFiles.isDirectory(path)) { + path.normalize.toString + } + else { + throw new FileNotFoundException(s"$path is not a directory.") + } + } + + def getFiles(dir: String): Try[Seq[String]] = Try { + var stream: java.util.stream.Stream[JPath] = null + try { + stream = JFiles.walk(JPaths.get(dir)) + val files = { + stream.filter(!JFiles.isDirectory(_)) + .map(_.normalize.toString) + .iterator + .asScala + .toSeq + } + if (files.isEmpty) printwarnln(s"Warning: $dir is empty.") + files + } finally { + if (stream != null) { + stream.close() + } + } + } + + def allowByNames(names: Set[String])(elem: String): Boolean = { + val path = JPaths.get(elem) + val name = path.getFileName.toString + names.contains(name) + } + + def processLines[A](file: String)(op: ju.stream.Stream[String] => A): A = { + val stream: java.util.stream.Stream[String] = JFiles.lines(JPaths.get(file)) + try + op(stream) + finally + stream.close() + + } + + def use[T](resource: String)(op: jl.Iterable[String] => Try[T]): Try[T] = Try { + var source: BufferedSource = null + try { + source = Source.fromResource(resource) + op(() => source.getLines().asJava) + } + finally if (source != null) { + source.close() + } + }.flatten + + val pathSep: String = FileSystems.getDefault.getSeparator + + val classpathSep: String = java.io.File.pathSeparator + +} diff --git a/src/tastytest/scala/tools/tastytest/Javac.scala b/src/tastytest/scala/tools/tastytest/Javac.scala new file mode 100644 index 000000000000..4d0c38884ada --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Javac.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.collection.immutable.ArraySeq +import scala.util.{Try, Success, Properties} + +import javax.tools.ToolProvider + +object Javac extends Script.Command { + + def javac(out: String, sources: String*): Try[Boolean] = { + + val javaCompiler = ToolProvider.getSystemJavaCompiler + val javaCP = Properties.propOrEmpty("java.class.path") + + def compile(args: String*) = + Try(javaCompiler.run(null, null, null, args:_*) == 0) + + if (sources.isEmpty) { + Success(true) + } + else { + val classpath = Seq(out, javaCP).filter(!_.isEmpty).mkString(Files.classpathSep) + val settings = Array( + "-d", out, + "-classpath", classpath, + ) ++ sources + compile(ArraySeq.unsafeWrapArray(settings):_*) + } + } + + val commandName: String = "javac" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length != 2) { + println(red(s"please provide two arguments in sub-command: $describe")) + return 1 + } + val Seq(out, src) = args: @unchecked + val success = javac(out, src).get + if (success) 0 else 1 + } + +} diff --git a/src/tastytest/scala/tools/tastytest/PrintTasty.scala b/src/tastytest/scala/tools/tastytest/PrintTasty.scala new file mode 100644 index 000000000000..14ad9e8c399c --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/PrintTasty.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.Try +import scala.util.Success +import scala.util.Failure + +object PrintTasty extends Script.Command { + + def printTasty(tasty: String)(implicit cl: Dotc.ClassLoader): Try[Unit] = + Dotc.mainMethod("dotty.tools.dotc.core.tasty.TastyPrinter")(Seq(tasty)) + + val commandName: String = "printTasty" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length != 1) { + println(red(s"please provide 1 argument in sub-command: $describe")) + return 1 + } + Dotc.processIn { implicit scala3classloader => + printTasty(tasty = args.head) match { + case Success(_) => 0 + case Failure(err) => + println(red(s"failed to print tasty: $err")) + 1 + } + } + } + +} diff --git a/src/tastytest/scala/tools/tastytest/Runner.scala b/src/tastytest/scala/tools/tastytest/Runner.scala new file mode 100644 index 000000000000..be52726a1c2e --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Runner.scala @@ -0,0 +1,98 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.runtime.ReflectionUtils +import scala.util.Try + +import java.nio.file.Paths +import java.io.{ OutputStream, ByteArrayOutputStream } +import java.{ lang => jl } +import jl.reflect.Modifier +import scala.util.control.NonFatal +import java.lang.reflect.Method + +import Files._ +import java.net.URL + +class Runner private (classloader: ScalaClassLoader) { + + val Runner_run: Method = { + val internal_Runner = Class.forName(Runner.name, true, classloader) + val run = internal_Runner.getMethod("run", classOf[String], classOf[OutputStream], classOf[OutputStream]) + assert(Modifier.isStatic(run.getModifiers), s"${Runner.name}.run is not static") + run + } + + def runCaptured(name: String): Try[String] = { + def kernel(out: OutputStream, err: OutputStream): Try[Unit] = Try { + try classloader.asContext[Unit](Runner_run.invoke(null, name, out, err)) + catch { + case NonFatal(ex) => throw ReflectionUtils.unwrapThrowable(ex) + } + } + val outStream = new ByteArrayOutputStream(50) + try { + val result = kernel(outStream, outStream) + outStream.flush() + result.map(_ => outStream.toString) + } + finally outStream.close() + } +} + +object Runner extends Script.Command { + + private val name = "scala.tools.tastytest.internal.Runner" + + private def currentClasspath: Try[Seq[URL]] = splitClasspath(System.getProperty("java.class.path")) + + private def splitClasspath(classpath: String): Try[Seq[URL]] = + Try(classpath.split(classpathSep).filter(_.nonEmpty).map(Paths.get(_).toUri.toURL).toIndexedSeq) + + def classloadFrom(classpath: String): Try[ScalaClassLoader] = for { + classpaths <- splitClasspath(classpath) + current <- currentClasspath + classloader <- Try(ScalaClassLoader.fromURLs(current ++ classpaths)) + } yield classloader + + def run(classloader: ScalaClassLoader, name: String): Unit = { + try { + val objClass = Class.forName(name, true, classloader) + val main = objClass.getMethod("main", classOf[Array[String]]) + if (!Modifier.isStatic(main.getModifiers)) + throw new NoSuchMethodException(name + ".main is not static") + classloader.asContext[Unit](main.invoke(null, Array.empty[String])) + } + catch { + case NonFatal(ex) => throw ReflectionUtils.unwrapThrowable(ex) + } + } + + def capturingRunner(classloader: ScalaClassLoader): Try[Runner] = Try(new Runner(classloader)) + + val commandName: String = "runDotty" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length != 2) { + println(red(s"please provide 2 arguments in sub-command: $describe")) + return 1 + } + val Seq(classpath, className) = args: @unchecked + classloadFrom(classpath).map(run(_, className)).get + 0 + } + +} diff --git a/src/tastytest/scala/tools/tastytest/Scalac.scala b/src/tastytest/scala/tools/tastytest/Scalac.scala new file mode 100644 index 000000000000..a503e7ee3f29 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Scalac.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.collection.immutable.ArraySeq +import scala.util.{ Try, Success, chaining }, chaining._ +import scala.tools.nsc.{Global, Settings, reporters}, reporters.ConsoleReporter +import java.io.OutputStream +import java.io.PrintWriter +import scala.tools.tastytest.classpath + +object Scalac extends Script.Command { + + def scalac(out: String, extraCp: Option[String], additionalSettings: Seq[String], sources: String*): Try[Boolean] = + scalac(Console.out, out, extraCp, additionalSettings, sources:_*) + + def scalac(writer: OutputStream, out: String, extraCp: Option[String], additionalSettings: Seq[String], sources: String*) = { + + def runCompile(global: Global): Boolean = { + global.reporter.reset() + new global.Run() compile sources.toList + val result = !global.reporter.hasErrors + global.reporter.finish() + result + } + + def newCompiler(args: String*): Global = + fromSettings(new Settings().tap(_.processArguments(args.toList, processAll = true))) + + def fromSettings(settings: Settings): Global = { + val pwriter = new PrintWriter(writer, true) + Global(settings, new ConsoleReporter(settings, Console.in, pwriter).tap(_.shortname = true)) + } + + def compile(args: String*) = + Try(runCompile(newCompiler(args: _*))) + + if (sources.isEmpty) { + Success(true) + } + else { + val settings = Array( + "-d", out, + "-classpath", classpath(out, extraCp.toList:_*), + "-deprecation", + "-Xfatal-warnings", + "-usejavacp" + ) ++ additionalSettings + compile(ArraySeq.unsafeWrapArray(settings):_*) + } + } + + val commandName: String = "scalac" + val describe: String = s"$commandName [--extra-cp ] " + + def process(args: String*): Int = { + if (args.length < 2) { + println(red(s"please provide at least 2 arguments in sub-command: $describe")) + return 1 + } + val Seq(out, src, additionalArgs @ _*) = args: @unchecked + val (extraCp, additionalArgs0) = { + val extraCpIdx = additionalArgs.indexOf("--extra-cp") + if (extraCpIdx < 0) (None, additionalArgs) + else { + val (before, Seq(_, cp, rest @ _*)) = additionalArgs.splitAt(extraCpIdx): @unchecked + (Some(cp), before ++ rest) + } + } + val success = scalac(out, extraCp, additionalArgs0, src).get + if (success) 0 else 1 + } +} diff --git a/src/tastytest/scala/tools/tastytest/Scaladoc.scala b/src/tastytest/scala/tools/tastytest/Scaladoc.scala new file mode 100644 index 000000000000..be5a900a98d3 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Scaladoc.scala @@ -0,0 +1,76 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.collection.immutable.ArraySeq +import scala.util.{ Try, Success, chaining }, chaining._ +import scala.tools.nsc.{ reporters}, reporters.{Reporter, ConsoleReporter} +import java.io.OutputStream +import java.io.PrintWriter + +import scala.tools.nsc.{ScalaDoc => RealScaladoc, doc} +import scala.reflect.internal.util.NoPosition + +object Scaladoc extends Script.Command { + + def scaladoc(out: String, additionalSettings: Seq[String], sources: String*): Try[Boolean] = + scaladoc(Console.out, out, additionalSettings, sources:_*) + + def scaladoc(writer: OutputStream, out: String, additionalSettings: Seq[String], sources: String*) = { + + def setup(args: Seq[String]): (Reporter, doc.Settings, RealScaladoc.Command) = { + lazy val (reporter: Reporter, docSettings) = { + val docSettings = new doc.Settings(msg => reporter.error(NoPosition, msg), msg => reporter.echo(msg)) + val pwriter = new PrintWriter(writer, true) + (new ConsoleReporter(docSettings, Console.in, pwriter).tap(_.shortname = true), docSettings) + } + (reporter, docSettings, new RealScaladoc.Command(args.toList, docSettings)) + } + + def compile(args: String*): Try[Boolean] = { + val (reporter, docSettings, command) = setup(args) + Try { + assert(command.files.nonEmpty, "no files to compile") + try { new doc.DocFactory(reporter, docSettings).document(command.files) } + finally reporter.finish() + }.map(_ => !reporter.hasErrors) + } + + if (sources.isEmpty) { + Success(true) + } + else { + val settings = Array( + "-d", out, + "-classpath", out, + "-deprecation", + "-Xfatal-warnings", + "-usejavacp" + ) ++ additionalSettings ++ sources + compile(ArraySeq.unsafeWrapArray(settings):_*) + } + } + + val commandName: String = "scaladoc" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length < 2) { + println(red(s"please provide at least 2 arguments in sub-command: $describe")) + return 1 + } + val Seq(out, src, additionalArgs @ _*) = args: @unchecked + val success = scaladoc(out, additionalArgs, src).get + if (success) 0 else 1 + } +} diff --git a/src/tastytest/scala/tools/tastytest/Script.scala b/src/tastytest/scala/tools/tastytest/Script.scala new file mode 100644 index 000000000000..2c65019a6832 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Script.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +trait Script extends Script.Command { + + def subcommands: Seq[Script.Command] + + final def process(args: String*): Int = { + if (args.isEmpty) { + println(red("Please provide at least one sub-command")) + return 1 + } + val Seq(command, args0 @ _*) = args: @unchecked + subcommands.collectFirst { + case subcommand if subcommand.commandName == command => subcommand.process(args0:_*) + }.getOrElse { + println(red(s"unrecognised sub-command $command, try from the following $describe")) + 1 + } + } + + final def main(args: Array[String]): Unit = sys.exit(process(args.toIndexedSeq: _*)) + + final def describe = subcommands.map(sub => s"$commandName ${sub.describe}").mkString("options:\n ","\n ","") +} + +object Script { + + trait Command { + def describe: String + def commandName: String + def process(args: String*): Int + } + +} diff --git a/src/tastytest/scala/tools/tastytest/SourceFile.scala b/src/tastytest/scala/tools/tastytest/SourceFile.scala new file mode 100644 index 000000000000..f24abc0216ba --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/SourceFile.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.Using +import scala.io.Source + +import SourceFile._ +import scala.util.chaining._ + +final case class SourceFile(path: String) { + lazy val options: Options = readOptions(path) +} + +object SourceFile { + + private val directivePattern = raw"\s*//>\s+using\s+(\S+)(?:\s+(.*))?".r + final case class Options(data: Map[String, Option[String]]) + + def readOptions(path: String): Options = + Using.resource(Source.fromFile(path)) { source => + source.getLines().takeWhile(_.trim.startsWith("//>")) + .flatMap { + case directivePattern(key, valueOrNull) => Some(key -> Option(valueOrNull)) + case _ => None + } + .toMap + .pipe(Options(_)) + } +} diff --git a/src/tastytest/scala/tools/tastytest/SourceKind.scala b/src/tastytest/scala/tools/tastytest/SourceKind.scala new file mode 100644 index 000000000000..4e9454058e2b --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/SourceKind.scala @@ -0,0 +1,76 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.Properties + +sealed abstract class SourceKind(val name: String){ self => + def permits(file: String): Boolean + def shouldValidate: Boolean + def validate(options: SourceFile.Options): Boolean +} + +sealed trait PermitByName { self: SourceKind => + def permits(file: String): Boolean = file.endsWith(name) + def fileOf(name: String) = name + self.name +} + +sealed trait AlwaysValid { self: SourceKind => + def shouldValidate: Boolean = false + def validate(options: SourceFile.Options) = true +} + +sealed trait CheckJVM { self: SourceKind => + def shouldValidate: Boolean = true + def validate(options: SourceFile.Options) = { + import CheckJVM.versionPattern + options.data.get("jvm") match { + case None => true // nothing to check + case Some(value) => value.getOrElse("") match { + case versionPattern(raw) => Properties.isJavaAtLeast(raw.toInt) + case value => throw new IllegalArgumentException(s"Invalid JVM version: $value") + } + } + } + +} + +object CheckJVM { + val versionPattern: scala.util.matching.Regex = raw"(\d+)\+".r +} + +object SourceKind { + + case object Scala extends SourceKind(".scala") with PermitByName with CheckJVM + case object ScalaFail extends SourceKind("_fail.scala") with PermitByName with AlwaysValid + case object ScalaPre extends SourceKind("_pre.scala") with PermitByName with AlwaysValid + case object Check extends SourceKind(".check") with PermitByName with AlwaysValid + case object SkipCheck extends SourceKind(".skipcheck") with PermitByName with AlwaysValid + case object Java extends SourceKind(".java") with PermitByName with CheckJVM + case object TastyFile extends SourceKind(".tasty") with PermitByName with AlwaysValid + + final case class ExactFiles(names: String*) extends SourceKind("") with AlwaysValid { + override def permits(file: String) = names.contains(file) + } + + def allowByKind(kinds: Set[SourceKind], paths: String*): Seq[String] = { + if (kinds.isEmpty) Nil // no kinds, so allow nothing + else { + val bigPermit = kinds.foldLeft((_: SourceFile) => false) { (permits, kind) => + file => + kind.permits(file.path) && (!kind.shouldValidate || kind.validate(file.options)) || permits(file) + } + paths.view.map(new SourceFile(_)).filter(bigPermit).map(_.path).toSeq + } + } +} diff --git a/src/tastytest/scala/tools/tastytest/TastyTest.scala b/src/tastytest/scala/tools/tastytest/TastyTest.scala new file mode 100644 index 000000000000..540d6b8fa915 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/TastyTest.scala @@ -0,0 +1,528 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.collection.mutable +import scala.jdk.CollectionConverters._ + +import scala.util.{ Try, Success, Failure } + +import java.nio.file.{ Files => JFiles, Paths => JPaths, Path => JPath } +import java.io.ByteArrayOutputStream +import java.{ util => ju } + +import SourceKind._ +import Files._ +import java.io.OutputStream + +object TastyTest { + + private[tastytest] val verbose = false + private[tastytest] val debug = false + + private def log(s: => String): Unit = + if (verbose) println(s) + + /**Simulates a Scala 2 application that depends on a Scala 3 library, where both may depend on a common prelude + * compiled by Scala 2. + * + * Steps: + * 1) compile all Scala files in `pre` with scala 2 to `out` + * 2) compile all Scala files in `src-3` with scala 3 to `out`, with `out` as the classpath + * 3) compile all Scala files in `src-2` with scala 2 to `out`, with `out` as the classpath + * 4) run the main method of all classes in `out/pkgName` that match a file in `src-2`. + * e.g. `out/tastytest/TestFoo.class` should be compiled from a corresponding file + * `src-2/tastytest/TestFoo.scala`. + */ + def runSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (pre, src2, src3) <- getRunSources(srcRoot/src) + out <- outDir.fold(tempDir(pkgName))(dir) + _ <- scalacPos(out, individualCapable=false, sourceRoot=srcRoot/src/"pre", additionalSettings, pre:_*) + _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) + _ <- scalacPos(out, individualCapable=true, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2:_*) + testNames <- visibleClasses(out, pkgName, src2:_*) + _ <- runMainOn(out, testNames:_*) + } yield () + + /**Simulates a Scala 2 application that depends on a Scala 3 library, + * where pipeline-compatible compilation is tested by restricting to a TASTy-only classpath. + * + * Steps: + * 1) compile all Scala/Java files in `src-3` with scala 3 to `out-classes`, send (Java TASTy to `java-tasty.jar`). + * 2) copy TASTy files from `out-classes` to `out`. (ensuring Scala 2 will not see class files) + * 3) compile all Scala files in `src-2` with scala 2 to `out`, with `out:java-tasty.jar` as the classpath. + * 4) compile Java files in `src-3` to `out-classes` with Javac. + * 5) run the main method of all classes in `out/pkgName` that match a file in `src-2`. + * e.g. `out/tastytest/TestFoo.class` should be compiled from a corresponding file + * `src-2/tastytest/TestFoo.scala`. Use `out:out-classes` as the classpath. + */ + def runPipelinedSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String], testFilter: Option[SourceKind] = None)(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala), src3Filters = Set(Scala, Java)) + case ((out, outJ), outCls) <- outDirs.fold(tempDir(pkgName) <*> tempDir(pkgName) <*> tempDir(pkgName))(p => dir(p._1) <*> dir(p._2) <*> dir(p._3)) + tastyJar = outJ/"java-tasty.jar" + _ <- dotcPos(outCls, sourceRoot=srcRoot/src/"src-3", pipelineDottyOpts(tastyJar) ++: additionalDottySettings, src3:_*) + allOuts <- getFiles(outCls) + relTastys <- relativize(outCls, allowByKind(Set(TastyFile), allOuts:_*):_*) + _ <- copyAll(relTastys, outCls, out) + src2Filtered = testFilter.fold(src2)(kind => allowByKind(Set(kind), src2:_*)) + _ <- scalacPos(out, tastyJar, individualCapable=true, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2Filtered:_*) + _ <- javacPos(outCls, sourceRoot=srcRoot/src/"src-3", allowByKind(Set(Java), src3:_*):_*) + testNames <- visibleClasses(out, pkgName, src2Filtered:_*) + _ <- runMainOn(classpath(out, outCls), testNames:_*) + } yield () + + /**Simulates a Scala 2 application that depends on a Scala 3 library, where both may depend on a common prelude + * compiled by Scala 2 and Java. In this case the applications are not executed. + * Steps: + * 1) compile all Java files in `pre` with Java to `out` + * 2) compile all Scala files in `pre` with Scala 2 to `out`, with `out` as the classpath + * 3) compile all Scala files in `src-3` with scala 3 to `out`, with `out` as the classpath + * 4) compile all Scala files in `src-2` with scala 2 to `out`, with `out` as the classpath + */ + def posSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (pre, src2, src3) <- getRunSources(srcRoot/src, preFilters = Set(Scala, Java)) + _ = log(s"Sources to compile under test: ${src2.map(cyan).mkString(", ")}") + out <- outDir.fold(tempDir(pkgName))(dir) + _ <- javacPos(out, sourceRoot=srcRoot/src/"pre", allowByKind(Set(Java), pre:_*):_*) + _ <- scalacPos(out, individualCapable=false, sourceRoot=srcRoot/src/"pre", additionalSettings, allowByKind(Set(Scala), pre:_*):_*) + _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) + _ <- scalacPos(out, individualCapable=true, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2:_*) + } yield () + + + /**Simulates running scaladoc on a Scala 2 library that depends on a Scala 3 library. + * Steps: + * 1) compile all Scala files in `src-3` with scala 3 to `out`, with `out` as the classpath + * 2) compile all Scala files in `src-2` with scaladoc (scala 2) to `out`, with `out` as the classpath + */ + def posDocSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala), src3Filters = Set(Scala)) + _ = log(s"Sources to compile under test: ${src2.map(cyan).mkString(", ")}") + out <- outDir.fold(tempDir(pkgName))(dir) + _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) + _ <- scaladoc(out, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2:_*) + } yield () + + /**Simulates a Scala 2 application that depends on a Scala 3 library, and is expected to fail compilation. + * Steps: + * 1) compile all Scala files in `src-3` with scala 3 to `out` + * 2) attempt to compile all Scala files in `src-2` with scala 2 to `out`, with `out` as the classpath. + * - If a file matches `FOO_fail.scala`, then it is expected to fail compilation. + * - For each `FOO_fail.scala`, if the file fails compilation, there is expected to be a corresponding `FOO.check` file, containing + * the captured errors, or else a `FOO.skipcheck` file indicating to skip comparing errors. + * - If `FOO_fail.scala` has a corresponding `FOO_pre.scala` file, then that is compiled first to `out`, + * so that `FOO_fail.scala` may depend on its compilation results. + */ + def negSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) + out <- outDir.fold(tempDir(pkgName))(dir) + _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) + _ <- scalacNeg(out, additionalSettings, src2:_*) + } yield () + + /**Simulates a Scala 2 application that depends on a Scala 3 library, and is expected to fail compilation, + * where pipeline-compatible compilation is tested by restricting to a TASTy-only classpath. + * + * Steps: + * 1) compile all Scala/Java files in `src-3` with scala 3 to `out-classes`, send (Java TASTy to `java-tasty.jar`). + * 2) copy TASTy files from `out-classes` to `out`. (ensuring Scala 2 will not see class files) + * 3) attempt to compile all Scala files in `src-2` with scala 2 to `out`, with `out:java-tasty.jar` as the classpath. + * - If a file matches `FOO_fail.scala`, then it is expected to fail compilation. + * - For each `FOO_fail.scala`, if the file fails compilation, there is expected to be a corresponding `FOO.check` file, containing + * the captured errors, or else a `FOO.skipcheck` file indicating to skip comparing errors. + * - If `FOO_fail.scala` has a corresponding `FOO_pre.scala` file, then that is compiled first to `out`, + * so that `FOO_fail.scala` may depend on its compilation results. + */ + def negPipelinedSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String], testFilter: Option[SourceKind] = None)(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck), src3Filters = Set(Scala, Java)) + case ((out, outJ), outCls) <- outDirs.fold(tempDir(pkgName) <*> tempDir(pkgName) <*> tempDir(pkgName))(p => dir(p._1) <*> dir(p._2) <*> dir(p._3)) + tastyJar = outJ/"java-tasty.jar" + _ <- dotcPos(outCls, sourceRoot=srcRoot/src/"src-3", pipelineDottyOpts(tastyJar) ++: additionalDottySettings, src3:_*) + allOuts <- getFiles(outCls) + relTastys <- relativize(outCls, allowByKind(Set(TastyFile), allOuts:_*):_*) + _ <- copyAll(relTastys, outCls, out) + src2Filtered = testFilter.fold(src2)(kind => allowByKind(Set(kind, Check, SkipCheck), src2:_*)) + _ <- scalacNeg(out, tastyJar, additionalSettings, src2Filtered:_*) + } yield () + + /**Simulates a Scala 3 application that depends on a Scala 2 library, where the Scala 2 + * library directly depends on an upstream Scala 3 library. The Scala 3 application is expected to fail compilation. + * Steps: + * 1) compile all Scala files in `src-3-upstream` with scala 3 to `out` + * 2) compile all Scala files in `src-2-downstream` with scala 2 to `out`, with `out` as the classpath. + * 3) attempt to compile all Scala files in `src-3-app` with scala 3 to `out`, with `out` as the classpath, + * following the same steps as `negSuite` to check for errors in compilation. + */ + def negFullCircleSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src3u, src2d, src3a) <- getFullCircleSources(srcRoot/src, src3appFilters = Set(Scala, Check, SkipCheck)) + out <- outDir.fold(tempDir(pkgName))(dir) + _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3-upstream", additionalDottySettings, src3u:_*) + _ <- scalacPos(out, individualCapable=false, sourceRoot=srcRoot/src/"src-2-downstream", additionalSettings, src2d:_*) + _ <- dotcNeg(out, additionalDottySettings, src3a:_*) + } yield () + + /**Same as `negSuite`, but introduces a dependency on a prelude by both the Scala 3 and Scala 2 libraries. In + * this case, they depend on binary incompatible versions of the same prelude (e.g. some definitions have moved + * between versions). Steps: + * 1) compile all Scala files in `pre-A` with scala 2 to `out1`. + * 2) compile all Scala files in `pre-B` with scala 2 to `out2`. + * 3) compile all Scala files in `src-3` with scala 3 to `out2`, with `out1` as the classpath. + * 4) attempt to compile all Scala files in `src-2` with scala 2 to `out2`, with `out2` as the classpath, + * following the same steps as `negSuite` to check for errors in compilation. + */ + def negChangePreSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (preA, preB, src2, src3) <- getMovePreChangeSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) + (out1, out2) <- outDirs.fold(tempDir(pkgName) <*> tempDir(pkgName))(p => dir(p._1) <*> dir(p._2)) + _ <- scalacPos(out1, individualCapable=false, sourceRoot=srcRoot/src/"pre-A", additionalSettings, preA:_*) + _ <- scalacPos(out2, individualCapable=false, sourceRoot=srcRoot/src/"pre-B", additionalSettings, preB:_*) + _ <- dotcPos(out2, out1, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) + _ <- scalacNeg(out2,additionalSettings, src2:_*) + } yield () + + /**Same as `negSuite`, but in addition, the Scala 3 library depends on another upstream Scala 3 library, + * which is missing from the classpath when compiling the Scala 2 library. Steps: + * 1) compile all Scala files in `src-3-A` with scala 3 to `out1`. + * 3) compile all Scala files in `src-3-B` with scala 3 to `out2`, with `out1:out2` as the classpath. + * 3) attempt to compile all Scala files in `src-2` with scala 2 to `out2`, with `out2` as the classpath, + * following the same steps as `negSuite` to check for errors in compilation. + */ + def negSuiteIsolated(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { + (src2, src3A, src3B) <- getNegIsolatedSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) + (out1, out2) <- outDirs.fold(tempDir(pkgName) <*> tempDir(pkgName))(p => dir(p._1) <*> dir(p._2)) + _ <- dotcPos(out1, sourceRoot=srcRoot/src/"src-3-A", additionalDottySettings, src3A:_*) + _ <- dotcPos(out2, classpath(out1, out2), sourceRoot=srcRoot/src/"src-3-B", additionalDottySettings, src3B:_*) + _ <- scalacNeg(out2, additionalSettings, src2:_*) + } yield () + + private def javacPos(out: String, sourceRoot: String, sources: String*): Try[Unit] = { + log(s"compiling sources in ${yellow(sourceRoot)} with javac.") + successWhen(Javac.javac(out, sources:_*))("javac failed to compile sources.") + } + + private def scalacPos(out: String, extraCp: String, individualCapable: Boolean, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = + scalacPos(out, extraCp = Some(extraCp), individualCapable, sourceRoot, additionalSettings, sources:_*) + + private def scalacPos(out: String, individualCapable: Boolean, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = + scalacPos(out, extraCp = None, individualCapable, sourceRoot, additionalSettings, sources:_*) + + private def scalacPos(out: String, extraCp: Option[String], individualCapable: Boolean, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = { + log(s"compiling sources in ${yellow(sourceRoot)} with scalac.") + val res = { + if (debug && individualCapable) { + def compileIndividual(srcs: List[String]): Try[Boolean] = { + srcs match { + case Nil => Success(true) + case src :: rest => + log(s"compiling source ${yellow(src)} with scalac.") + Scalac.scalac(out, extraCp, "-Ytasty-reader" +: additionalSettings, src) match { + case Success(true) => compileIndividual(rest) + case err => err + } + } + } + compileIndividual(sources.toList) + } + else { + Scalac.scalac(out, extraCp, "-Ytasty-reader" +: additionalSettings, sources:_*) + } + } + successWhen(res)("scalac failed to compile sources.") + } + + private def scaladoc(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = { + log(s"compiling sources in ${yellow(sourceRoot)} with scalac.") + val res = Scaladoc.scaladoc(out, "-Ytasty-reader" +: additionalSettings, sources:_*) + successWhen(res)("scaladoc failed to compile resources") + } + + private def scalacNeg(out: String, additionalSettings: Seq[String], files: String*): Try[Unit] = + scalacNeg(out, extraCp = None, additionalSettings, files:_*) + + private def scalacNeg(out: String, extraCp: String, additionalSettings: Seq[String], files: String*): Try[Unit] = + scalacNeg(out, extraCp = Some(extraCp), additionalSettings, files:_*) + + private def scalacNeg(out: String, extraCp: Option[String], additionalSettings: Seq[String], files: String*): Try[Unit] = { + def compile(source: String, writer: OutputStream) = + Scalac.scalac(writer, out, extraCp, "-Ytasty-reader" +: additionalSettings, source) + negTestImpl(withCapture(_, compile, identity))(files:_*) + } + + private def withCapture(source: String, compile: (String, OutputStream) => Try[Boolean], post: String => String): (String, Try[Boolean]) = { + val byteArrayStream = new ByteArrayOutputStream(50) + try { + val compiled = compile(source, byteArrayStream) + (post(byteArrayStream.toString), compiled) + } finally byteArrayStream.close() + } + + private def negTestImpl(compile: String => (String, Try[Boolean]))(files: String*): Try[Unit] = { + val errors = mutable.ArrayBuffer.empty[String] + val unexpectedFail = mutable.ArrayBuffer.empty[String] + val crashes = mutable.ArrayBuffer.empty[String] + val failMap: Map[String, (Option[String], Option[String])] = { + val (sources, rest) = files.partition(ScalaFail.permits) + sources.map({ s => + val name = s.stripSuffix(ScalaFail.name) + val check = Check.fileOf(name) + val skip = SkipCheck.fileOf(name) + val pre = ScalaPre.fileOf(name) + val foundCheck = rest.find(n => n == check || n == skip) + val foundPre = rest.find(_ == pre) + s -> (foundCheck, foundPre) + }).toMap + } + if (failMap.isEmpty) { + printwarnln(s"Warning: there are no source files marked as fail tests. (**/*${ScalaFail.name})") + } + def negCompile(source: String): Unit = { + val (output, compiled) = { + if (ScalaFail.permits(source)) { + val testName = source.stripSuffix(ScalaFail.name) + log(s"neg test ${cyan(testName)} started") + failMap(source) match { + case (_, Some(pre)) => + log(s" - compiling pre file...") + negCompile(pre) + case _ => + } + } + compile(source) + } + compiled match { + case Failure(exception) => + crashes += source + printerrln(s"ERROR: fatal error running compiler for $source: $exception") + case Success(true) => + if (failMap.contains(source)) { + errors += source + printerrln(s"ERROR: $source successfully compiled when expected to fail.") + } + case Success(false) => + failMap.get(source) match { + case None => + unexpectedFail += source + System.err.println(output) + printerrln(s"ERROR: $source did not compile when expected to. Perhaps it should match (**/*${ScalaFail.name})") + case Some((Some(checkFile), _)) if Check.permits(checkFile) => + processLines(checkFile) { stream => + val checkLines = Diff.splitIntoLines(stream) + val outputLines = Diff.splitIntoLines(output) + assert(outputLines.filterNot(_.isEmpty()).nonEmpty, s"outputLines should not be empty: $outputLines") + val diff = Diff.compareContents(outputLines, checkLines) + if (diff.nonEmpty) { + errors += source + printerrln(s"ERROR: $source failed, unexpected output.\n$diff") + } + } + case Some((Some(skipCheckFile), _)) => + printwarnln(s"warning: skipping check on ${skipCheckFile.stripSuffix(SkipCheck.name)}") + case Some((None, _)) => + if (output.nonEmpty) { + errors += source + val diff = Diff.compareContents(output, "") + printerrln(s"ERROR: $source failed, no check file found for unexpected output.\n$diff") + } + } + } + } + + val sources = files.filter(Scala.permits).filterNot(ScalaPre.permits) + sources.foreach(negCompile) + successWhen(errors.isEmpty && unexpectedFail.isEmpty && crashes.isEmpty) { + var msgs = List.empty[String] + if (crashes.nonEmpty) { + val str = if (crashes.size == 1) "file" else "files" + msgs ::= s"${crashes.length} $str fatally crashed the compiler: ${crashes.mkString(", ")}." + } + else if (unexpectedFail.nonEmpty) { + val str = if (unexpectedFail.size == 1) "file" else "files" + msgs ::= s"${unexpectedFail.length} $str did not compile when expected to: ${unexpectedFail.mkString(", ")}." + } + else if (errors.nonEmpty) { + val str = if (errors.size == 1) "error" else "errors" + msgs ::= s"Found ${errors.length} $str. These sources either compiled or had an incorrect or missing check file: ${errors.mkString(", ")}." + } + msgs.mkString(System.lineSeparator()) + } + } + + def dotcPos(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = dotcPos(out, out, sourceRoot, additionalSettings, sources:_*) + + def dotcPos(out: String, classpath: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = { + log(s"compiling sources in ${yellow(sourceRoot)} with dotc.") + val process = Dotc.dotc(out, classpath, additionalSettings, sources:_*) + successWhen(process)("dotc failed to compile sources.") + } + + private def pipelineDottyOpts(tastyJar: String): Seq[String] = + Seq("-Xjava-tasty", "-Xearly-tasty-output", tastyJar) + + private def dotcNeg(out: String, additionalSettings: Seq[String], files: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = { + def compile(source: String, writer: OutputStream) = { + Dotc.dotc(writer, out, out, additionalSettings, source) + } + def scrub(source: String, output: String): String = { + output.linesIterator.collect { + case header if header.contains(source) => + val filePart = source.split(java.util.regex.Pattern.quote(Files.pathSep)).last + header.trim.replace(source, filePart) + case ok => ok + }.mkString(System.lineSeparator()) + } + negTestImpl(src => withCapture(src, compile, scrub(src, _)))(files:_*) + } + + private def getSourceAsName(path: String): String = + path.substring(path.lastIndexOf(pathSep) + pathSep.length).stripSuffix(".scala") + + private def getRunSources(root: String, preFilters: Set[SourceKind] = Set(Scala), + src2Filters: Set[SourceKind] = Set(Scala), src3Filters: Set[SourceKind] = Set(Scala) + ): Try[(Seq[String], Seq[String], Seq[String])] = { + for { + (src2, src3) <- get2And3Sources(root, src2Filters, src3Filters) + pre <- getFiles(root/"pre") + } yield (allowByKind(preFilters, pre:_*), src2, src3) + } + + private def getMovePreChangeSources(root: String, + preAFilters: Set[SourceKind] = Set(Scala), + preBFilters: Set[SourceKind] = Set(Scala), + src2Filters: Set[SourceKind] /*= Set(Scala)*/, + src3Filters: Set[SourceKind] = Set(Scala) + ): Try[(Seq[String], Seq[String], Seq[String], Seq[String])] = { + for { + (src2, src3) <- get2And3Sources(root, src2Filters, src3Filters) + (preA, preB) <- getPreChangeSources(root, preAFilters, preBFilters) + } yield (allowByKind(preAFilters, preA:_*), allowByKind(preBFilters, preB:_*), src2, src3) + } + + private def get2And3Sources(root: String, src2Filters: Set[SourceKind] /*= Set(Scala)*/, + src3Filters: Set[SourceKind] = Set(Scala) + ): Try[(Seq[String], Seq[String])] = { + for { + src2 <- getFiles(root/"src-2") + src3 <- getFiles(root/"src-3") + } yield (allowByKind(src2Filters, src2:_*), allowByKind(src3Filters, src3:_*)) + } + + private def getFullCircleSources(root: String, src3upFilters: Set[SourceKind] = Set(Scala), + src2downFilters: Set[SourceKind] = Set(Scala), + src3appFilters: Set[SourceKind] + ): Try[(Seq[String], Seq[String], Seq[String])] = { + for { + src3up <- getFiles(root/"src-3-upstream") + src2down <- getFiles(root/"src-2-downstream") + src3app <- getFiles(root/"src-3-app") + } yield ( + allowByKind(src3upFilters, src3up:_*), + allowByKind(src2downFilters, src2down:_*), + allowByKind(src3appFilters, src3app:_*) + ) + } + + private def getPreChangeSources(root: String, preAFilters: Set[SourceKind] /*= Set(Scala)*/, + preBFilters: Set[SourceKind] /*= Set(Scala)*/ + ): Try[(Seq[String], Seq[String])] = { + for { + preA <- getFiles(root/"pre-a") + preB <- getFiles(root/"pre-b") + } yield (allowByKind(preAFilters, preA:_*), allowByKind(preBFilters, preB:_*)) + } + + private def getNegIsolatedSources(root: String, src2Filters: Set[SourceKind] /*= Set(Scala)*/, + src3Filters: Set[SourceKind] = Set(Scala) + ): Try[(Seq[String], Seq[String], Seq[String])] = { + for { + src2 <- getFiles(root/"src-2") + src3A <- getFiles(root/"src-3-A") + src3B <- getFiles(root/"src-3-B") + } yield (allowByKind(src2Filters, src2:_*), allowByKind(src3Filters, src3A:_*), allowByKind(src3Filters, src3B:_*)) + } + + private def visibleClasses(classpath: String, pkgName: String, src2: String*): Try[Seq[String]] = Try { + val classes = { + val matcher = globMatcher( + s"$classpath/${if (pkgName.isEmpty) "" else pkgName.toBinaryName}Test*.class" + ) + val visibleTests = src2.map(getSourceAsName) + val addPkg: String => String = if (pkgName.isEmpty) identity else pkgName + "." + _ + val prefix = if (pkgName.isEmpty) "" else pkgName.toBinaryName + val cp = JPaths.get(classpath).normalize + def nameFromClass(path: JPath) = { + path.subpath(cp.getNameCount, path.getNameCount) + .normalize + .toString + .stripPrefix(prefix) + .stripSuffix(".class") + } + var stream: ju.stream.Stream[JPath] = null + try { + stream = JFiles.walk(cp) + stream.filter(p => !JFiles.isDirectory(p) && matcher.matches(p)) + .map(_.normalize) + .iterator + .asScala + .drop(1) // drop the classpath itself + .map(nameFromClass) + .filter(visibleTests.contains) + .map(addPkg) + .toSeq + } + finally if (stream != null) { + stream.close() + } + } + if (classes.isEmpty) printwarnln("Warning: found no test classes.") + classes + } + + private def successWhen(cond: Boolean)(ifFalse: => String): Try[Unit] = + Option.when(cond)(()).failOnEmpty(new TestFailure(ifFalse)) + + private def successWhen(cond: Try[Boolean])(ifFalse: => String): Try[Unit] = + cond.flatMap(success => if (success) Success(()) else Failure(new TestFailure(ifFalse))) + + private def runMainOn(out: String, tests: String*): Try[Unit] = { + def runTests(errors: mutable.ArrayBuffer[String], runner: Runner): Try[Unit] = Try { + for (test <- tests) { + val (pkgs, name) = { + val names = test.split('.') + names.init.mkString(".") -> names.last + } + log(s"run suite ${if (pkgs.nonEmpty) pkgs + '.' else ""}${cyan(name)} started") + runner.runCaptured(test) match { + case Success(output) => + val diff = Diff.compareContents(output, "") + if (diff.nonEmpty) { + errors += test + printerrln(s"ERROR: $test failed, unexpected output.\n$diff") + } + case Failure(err) => + errors += test + printerrln(s"ERROR: $test failed: ${err.getClass.getSimpleName} ${err.getMessage} in ${err.getStackTrace().mkString("\n ", "\n ", "")}") + } + } + } + for { + cldr <- Runner.classloadFrom(out) + runner <- Runner.capturingRunner(cldr) + errors = mutable.ArrayBuffer.empty[String] + _ <- runTests(errors, runner) + _ <- successWhen(errors.isEmpty)({ + val str = if (errors.size == 1) "error" else "errors" + s"${errors.length} $str. Fix ${errors.mkString(", ")}." + }) + } yield () + } + +} diff --git a/src/tastytest/scala/tools/tastytest/TestFailure.scala b/src/tastytest/scala/tools/tastytest/TestFailure.scala new file mode 100644 index 000000000000..1ac1511e26c3 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/TestFailure.scala @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +class TestFailure(msg: String) extends Exception(msg) diff --git a/src/tastytest/scala/tools/tastytest/Tests.scala b/src/tastytest/scala/tools/tastytest/Tests.scala new file mode 100644 index 000000000000..aaa060890e8f --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Tests.scala @@ -0,0 +1,36 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest + +import scala.util.{ Try, Success, Failure } + +object Tests { + + def printSummary(suite: String, result: Try[Unit]) = result match { + case Success(_) => printsuccessln(s"$suite suite passed!") + case Failure(err) => printerrln(s"ERROR: $suite suite failed: ${err.getClass.getName}: ${err.getMessage}") + } + + def suite(name: String, willRun: Boolean)(runner: => Try[Unit]): Option[Boolean] = { + if (willRun) { + println(s"Performing suite $name") + val result = runner + printSummary(name, result) + Some(result.isSuccess) + } + else { + None + } + } + +} diff --git a/src/tastytest/scala/tools/tastytest/internal/Runner.scala b/src/tastytest/scala/tools/tastytest/internal/Runner.scala new file mode 100644 index 000000000000..aa4db6db914c --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/internal/Runner.scala @@ -0,0 +1,30 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.tastytest.internal + +import java.lang.reflect.Modifier +import java.io.OutputStream + +object Runner { + def run(name: String, out: OutputStream, err: OutputStream): Unit = { + val objClass = Class.forName(name, true, getClass.getClassLoader) + val main = objClass.getMethod("main", classOf[Array[String]]) + if (!Modifier.isStatic(main.getModifiers)) + throw new NoSuchMethodException(name + ".main is not static") + Console.withOut(out) { + Console.withErr(err) { + main.invoke(null, Array.empty[String]) + } + } + } +} diff --git a/src/tastytest/scala/tools/tastytest/package.scala b/src/tastytest/scala/tools/tastytest/package.scala new file mode 100644 index 000000000000..b9c99cc6e640 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/package.scala @@ -0,0 +1,52 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools + +package object tastytest { + + import scala.util.Try + + import Files.{pathSep, classpathSep} + + def printerrln(str: String): Unit = System.err.println(red(str)) + def printwarnln(str: String): Unit = System.err.println(yellow(str)) + def printsuccessln(str: String): Unit = System.err.println(green(str)) + + implicit final class ZipOps[T](val t: Try[T]) extends AnyVal { + @inline final def <*>[U](u: Try[U]): Try[(T, U)] = for { + x <- t + y <- u + } yield (x, y) + } + + def cyan(str: String): String = Console.CYAN + str + Console.RESET + def yellow(str: String): String = Console.YELLOW + str + Console.RESET + def red(str: String): String = Console.RED + str + Console.RESET + def green(str: String): String = Console.GREEN + str + Console.RESET + + implicit final class PathOps(val s: String) extends AnyVal { + @inline final def / (part: String): String = path(s, part) + @inline final def / (parts: Seq[String]): String = path(s, parts:_*) + + /** replace '.' by '/'. */ + @inline final def toBinaryName : String = s.replace(raw"\.", pathSep) + "/" + } + + private def path(part: String, parts: String*): String = (part +: parts).mkString(pathSep) + def classpath(path: String, paths: String*): String = (path +: paths).mkString(classpathSep) + + implicit final class OptionOps[A](val opt: Option[A]) extends AnyVal { + @inline final def failOnEmpty(ifEmpty: => Throwable): Try[A] = opt.toRight(ifEmpty).toTry + } + +} diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/testkit/scala/tools/testkit/ASMConverters.scala similarity index 92% rename from src/partest/scala/tools/partest/ASMConverters.scala rename to src/testkit/scala/tools/testkit/ASMConverters.scala index 831fe5fadc59..73fdcb605c92 100644 --- a/src/partest/scala/tools/partest/ASMConverters.scala +++ b/src/testkit/scala/tools/testkit/ASMConverters.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,11 +10,10 @@ * additional information regarding copyright ownership. */ -package scala.tools.partest +package scala.tools.testkit -import scala.collection.JavaConverters._ -import scala.tools.asm -import asm.{tree => t} +import scala.jdk.CollectionConverters._ +import scala.tools.asm, asm.{tree => t}, asm.Opcodes._ /** Makes using ASM from ByteCodeTests more convenient. * @@ -45,7 +44,7 @@ object ASMConverters { def dropStaleLabels = { val definedLabels: Set[Instruction] = self.filter(_.isInstanceOf[Label]).toSet - val usedLabels: Set[Instruction] = self.flatMap(referencedLabels)(collection.breakOut) + val usedLabels: Set[Instruction] = self.iterator.flatMap(referencedLabels).toSet self.filterNot(definedLabels diff usedLabels) } @@ -80,9 +79,13 @@ object ASMConverters { // toString such that the first field, "opcode: Int", is printed textually. final override def toString() = { val printOpcode = opcode != -1 + def q(a: Any) = a match { + case s: String => s""""$s"""" + case _ => a.toString + } productPrefix + ( - if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1) - else productIterator + if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1).map(q) + else productIterator.map(q) ).mkString("(", ", ", ")") } } @@ -152,21 +155,22 @@ object ASMConverters { case i: t.LabelNode => Label (labelIndex(i)) case i: t.FrameNode => FrameEntry (i.`type`, mapOverFrameTypes(lst(i.local)), mapOverFrameTypes(lst(i.stack))) case i: t.LineNumberNode => LineNumber (i.line, applyLabel(i.start)) + case i => throw new MatchError(x) } - private def convertBsmArgs(a: Array[Object]): List[Object] = a.map({ + private def convertBsmArgs(a: Array[Object]): List[Object] = a.iterator.map({ case h: asm.Handle => convertMethodHandle(h) case _ => a // can be: Class, method Type, primitive constant - })(collection.breakOut) + }).toList private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc, h.isInterface) private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = { - method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut) + method.tryCatchBlocks.iterator.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`))).toList } private def convertLocalVars(method: t.MethodNode): List[LocalVariable] = { - method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index))(collection.breakOut) + method.localVariables.iterator.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index)).toList } } @@ -240,10 +244,10 @@ object ASMConverters { } def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc, h.itf) - def unconvertBsmArgs(a: List[Object]): Array[Object] = a.map({ + def unconvertBsmArgs(a: List[Object]): Array[Object] = a.iterator.map({ case h: MethodHandle => unconvertMethodHandle(h) case o => o - })(collection.breakOut) + }).toArray private def visitMethod(method: t.MethodNode, instruction: Instruction, asmLabel: Map[Label, asm.Label]): Unit = instruction match { case Field(op, owner, name, desc) => method.visitFieldInsn(op, owner, name, desc) @@ -263,4 +267,6 @@ object ASMConverters { case FrameEntry(tp, local, stack) => method.visitFrame(tp, local.length, frameTypesToAsm(local, asmLabel).toArray, stack.length, frameTypesToAsm(stack, asmLabel).toArray) case LineNumber(line, start) => method.visitLineNumber(line, asmLabel(start)) } + def InvokeInterface(owner: String, name: String, desc: String): Invoke = Invoke(opcode = INVOKEINTERFACE, owner, name, desc, itf = true) + def InvokeVirtual(owner: String, name: String, desc: String): Invoke = Invoke(opcode = INVOKEVIRTUAL, owner, name, desc, itf = false) } diff --git a/src/testkit/scala/tools/testkit/AllocationTest.scala b/src/testkit/scala/tools/testkit/AllocationTest.scala new file mode 100644 index 000000000000..2071b4610fea --- /dev/null +++ b/src/testkit/scala/tools/testkit/AllocationTest.scala @@ -0,0 +1,167 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import java.lang.management.ManagementFactory + +import org.junit.Assert.{assertEquals, assertTrue, fail} + +import scala.annotation.{ nowarn, tailrec } +import scala.reflect.{ClassTag, classTag} + +object AllocationTest { + val threadMXBean = ManagementFactory.getThreadMXBean.asInstanceOf[com.sun.management.ThreadMXBean] + assertTrue(threadMXBean.isThreadAllocatedMemorySupported) + threadMXBean.setThreadAllocatedMemoryEnabled(true) + + @nowarn("cat=lint-nullary-unit") + private object coster extends AllocationTest { + def byte = 99.toByte + def short = 9999.toShort + def int = 100000000 + def long = 100000000000000L + def boolean = true + def char = 's' + def float = 123456F + def double = 123456D + def unit = () + + def sizeOf[T <: AnyRef](fn: => T): T = fn + } + + lazy val costObject = costOf(coster, "Object") + lazy val costByte = costOf(coster.byte, "Byte") + lazy val costShort = costOf(coster.short, "Short") + lazy val costInt = costOf(coster.int, "Int") + lazy val costLong = costOf(coster.long, "Long") + lazy val costBoolean = costOf(coster.boolean, "Boolean") + lazy val costChar = costOf(coster.char, "Char") + lazy val costFloat = costOf(coster.float, "Float") + lazy val costDouble = costOf(coster.double, "Double") + lazy val costUnit = costOf(coster.unit, "Unit") + + def sizeOf[T <: AnyRef](fn: => T, msg: String, ignoreEqualCheck: Boolean = false): Long = { + val size = coster.calcAllocationInfo(coster.sizeOf(fn), costObject, msg, ignoreEqualCheck).min + println(s"size of $msg = $size") + size + } + + private def costOf[T](fn: => T, tpe: String): Long = { + val cost = coster.calcAllocationInfo(fn, 0, "", ignoreEqualCheck = false).min + println(s"cost of tracking allocations - cost of $tpe = $cost") + cost + } +} + +trait AllocationTest { + import AllocationTest._ + + /** Asserts whether it's expected for `a == b` to allocate memory. */ + def nonAllocatingEqual(expected: Boolean, a: AnyRef, b: AnyRef): Unit = { + assertEquals(expected, nonAllocating(Boolean.box(a == b))) + } + + /** Asserts that the execution of `fn` does not allocate any memory. */ + def nonAllocating[T: ClassTag](fn: => T, text: String = "", ignoreEqualCheck: Boolean = false)(implicit execution: AllocationExecution = AllocationExecution()): T = { + onlyAllocates(0, text, ignoreEqualCheck)(fn) + } + + private def showAllocations(allocations: List[Long]): String = allocations match { + case a :: allocations => + val sb = new StringBuilder + def append(a: Long, count: Int) = sb.append(s" allocation $a ($count times)\n") + def loop(allocations: List[Long], last: Long, count: Int): String = allocations match { + case Nil => append(last, count).result() + case b :: allocations => + val n = if (b != last) { append(b, count); 1 } else count + 1 + loop(allocations, b, n) + } + loop(allocations, a, 1) + case _ => "" + } + + /** Asserts that the execution of `fn` allocates `size` bytes or less. */ + def onlyAllocates[T: ClassTag](size: Long, text: String = "", ignoreEqualCheck: Boolean = false)(fn: => T)(implicit execution: AllocationExecution = AllocationExecution()): T = { + val result = allocationInfo(fn, text, ignoreEqualCheck) + if (result.min > size) failTest(size, text, result) + result.result + } + + /** Asserts that the execution of `fn` allocates exactly `size` bytes. */ + def exactAllocates[T: ClassTag](size: Long, text: String = "", ignoreEqualCheck: Boolean = false)(fn: => T)(implicit execution: AllocationExecution = AllocationExecution()): T = { + val result = allocationInfo(fn, text, ignoreEqualCheck) + if (result.min != size) failTest(size, text, result) + result.result + } + + private def failTest[T](size: Long, text: String, result: AllocationInfo[T]) = { + val extraText = if (text.isEmpty) "" else s" -- $text" + def show(x: T) = if (x == null) "null" else s"$x (${x.getClass})" + fail(s"""allocating min = ${result.min} allowed = $size$extraText + | result = ${show(result.result)} + |${showAllocations(result.allocations.toList)}""".stripMargin) + } + + def allocationInfo[T: ClassTag](fn: => T, text: String = "", ignoreEqualCheck: Boolean = false)(implicit execution: AllocationExecution = AllocationExecution()): AllocationInfo[T] = { + val cost = classTag[T].runtimeClass match { + case cls if cls == classOf[Byte] => costByte + case cls if cls == classOf[Short] => costShort + case cls if cls == classOf[Int] => costInt + case cls if cls == classOf[Long] => costLong + case cls if cls == classOf[Boolean] => costBoolean + case cls if cls == classOf[Char] => costChar + case cls if cls == classOf[Float] => costFloat + case cls if cls == classOf[Double] => costDouble + case cls if cls == classOf[Unit] => costUnit + case cls if cls.isPrimitive => sys.error(s"Unexpected primitive $cls") + case _ => costObject + } + calcAllocationInfo(fn, cost, text, ignoreEqualCheck) + } + + /** Calculates memory allocation exempting `cost` expected bytes (e.g. java.lang.Object overhead) */ + private[AllocationTest] def calcAllocationInfo[T](fn: => T, cost: Long, text: String, ignoreEqualCheck: Boolean)(implicit execution: AllocationExecution = AllocationExecution()): AllocationInfo[T] = { + val expected = fn + val extraText = if (text.isEmpty) "" else s" -- $text" + @annotation.nowarn("cat=deprecation") + val id = Thread.currentThread().getId + val counts = new Array[Long](execution.executionCount) + + @tailrec def warmupLoop(i: Int): Unit = if (i < execution.warmupCount) { + val actual = fn + if (!ignoreEqualCheck && actual != expected) + assertEquals(s"warmup at index $i $expected $actual$extraText", expected, actual) + warmupLoop(i + 1) + } + + @tailrec def testLoop(i: Int): Unit = if (i < execution.executionCount) { + val before = threadMXBean.getThreadAllocatedBytes(id) + val actual = fn + val after = threadMXBean.getThreadAllocatedBytes(id) + counts(i) = after - cost - before + if (!ignoreEqualCheck && actual != expected) + assertEquals(s"at index $i $expected $actual$extraText", expected, actual) + testLoop(i + 1) + } + + warmupLoop(0) + testLoop(0) + AllocationInfo(expected, counts) + } +} + +case class AllocationExecution(executionCount: Int = 1000, warmupCount: Int = 1000) + +case class AllocationInfo[T](result: T, allocations: Array[Long]) { + def min: Long = allocations.min +} diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala new file mode 100644 index 000000000000..97e24211474c --- /dev/null +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -0,0 +1,419 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import org.junit.Assert.{assertEquals, assertNotEquals} +import org.junit.Assert.{assertFalse, assertTrue} +import org.junit.Assume.{assumeFalse, assumeTrue} + +import scala.annotation.nowarn +import scala.collection.mutable +import scala.concurrent.{Await, Awaitable} +import scala.reflect.ClassTag +import scala.runtime.BoxesRunTime +import scala.runtime.ScalaRunTime.stringOf +import scala.util.{Failure, Success, Try} +import scala.util.Properties.isWin +import scala.util.chaining._ +import scala.util.control.{ControlThrowable, NonFatal} +import java.lang.ref.{Reference, ReferenceQueue, SoftReference} +import java.lang.reflect.{Array => _, _} +import java.time.Duration +import java.util.concurrent.{CountDownLatch, TimeUnit} +import java.util.concurrent.atomic.AtomicReference +import java.util.IdentityHashMap + +/** This module contains additional higher-level assert statements + * that are ultimately based on junit.Assert primitives. + * + * Avoid adding methods, and above all *fields* (including `lazy val`s), that + * require JVM-specific features such as run-time reflection. Otherwise, all + * tests using this object stop working in Scala.js. Put such methods in + * `ReflectUtil` instead. (`ClassTag`s are fine; they are supported in + * Scala.js and Scala Native.) + */ +object AssertUtil { + + // junit fail is Unit + def fail(message: String): Nothing = throw new AssertionError(message) + + def noWin(message: String = "skipping test on Windows")(body: => Unit) = { + assumeFalse(message, isWin) + body + } + + private val Bail = new ControlThrowable {} + + def bail(): Nothing = throw Bail + + // if the test bails out, communicate a violated assumption + def bailable(name: String)(test: => Unit): Unit = + try test + catch { case _: Bail.type => assumeTrue(s"$name skipped bail!", false) } + + private val printable = raw"\p{Print}".r + + def hexdump(s: String): Iterator[String] = { + import scala.io.Codec + val codec: Codec = Codec.UTF8 + var offset = 0 + def hex(bytes: Array[Byte]) = bytes.map(b => f"$b%02x").mkString(" ") + def charFor(byte: Byte): Char = byte.toChar match { case c @ printable() => c ; case _ => '.' } + def ascii(bytes: Array[Byte]) = bytes.map(charFor).mkString + def format(bytes: Array[Byte]): String = + f"$offset%08x ${hex(bytes.slice(0, 8))}%-24s ${hex(bytes.slice(8, 16))}%-24s |${ascii(bytes)}|" + .tap(_ => offset += bytes.length) + s.getBytes(codec.charSet).grouped(16).map(format) + } + + private def dump(s: String) = hexdump(s).mkString("\n") + def assertEqualStrings(expected: String)(actual: String) = + assert(expected == actual, s"Expected:\n${dump(expected)}\nActual:\n${dump(actual)}") + + // assertEquals but use BoxesRunTime.equals + // let junit format a message on failure + def assertEqualsAny(expected: Any, actual: Any): Unit = + if (!BoxesRunTime.equals(expected, actual)) assertEquals(expected, actual) + // as a bonus, message is by-name, though retains junit parameter order + def assertEqualsAny(message: => String, expected: Any, actual: Any): Unit = + if (!BoxesRunTime.equals(expected, actual)) assertEquals(message, expected, actual) + def assertNotEqualsAny(expected: Any, actual: Any): Unit = + if (BoxesRunTime.equals(expected, actual)) assertNotEquals(expected, actual) + def assertNotEqualsAny(message: => String, expected: Any, actual: Any): Unit = + if (BoxesRunTime.equals(expected, actual)) assertNotEquals(message, expected, actual) + + private implicit class `ref helper`[A <: AnyRef](val r: Reference[A]) extends AnyVal { + def isEmpty: Boolean = r.get == null // r.refersTo(null) to avoid influencing collection + def nonEmpty: Boolean = !isEmpty + def hasReferent(x: AnyRef): Boolean = r.get eq x + } + private implicit class `class helper`(val clazz: Class[_]) extends AnyVal { + def allFields: List[Field] = { + def loop(k: Class[_]): List[Field] = + if (k == null) Nil + else k.getDeclaredFields.toList ::: loop(k.getSuperclass) + loop(clazz) + } + } + private implicit class `field helper`(val f: Field) extends AnyVal { + def follow(o: AnyRef): AnyRef = { + f setAccessible true + f get o + } + } + + /** Result and elapsed duration. + */ + def timed[A](body: => A): (A, Duration) = { + val start = System.nanoTime + val result = body + val end = System.nanoTime + (result, Duration.ofNanos(end - start)) + } + + /** Elapsed duration. + */ + def elapsed[U](body: => U): Duration = timed(body)._2 + + /** Elapsed duration. + */ + def withElapsed[A](f: Duration => Unit)(body: => A): A = timed(body).pipe { + case (result, duration) => f(duration) ; result + } + + /** Expect the exception is thrown by evaluating `body`. + */ + def intercept[T <: Throwable: ClassTag](body: => Any): Unit = assertThrown[T](_ => true)(body) + + /** Expect the exception thrown with exactly the given message. + */ + def interceptMessage[T <: Throwable: ClassTag](expected: String)(body: => Any): Unit = + assertThrown[T](_.getMessage == expected)(body) + + /** Check that throwable T (or a subclass) was thrown during evaluation of `body`, + * and that its message satisfies the `checkMessage` predicate. + * Any other exception is propagated. + */ + def assertThrows[T <: Throwable: ClassTag](body: => Any, checkMessage: String => Boolean = _ => true): Unit = + assertThrown[T](t => checkMessage(t.getMessage))(body) + + private val Unthrown = new ControlThrowable {} + + /** Assert that the exception was thrown while evaluating `body`, + * and that the exception instance satisfies the `checker` predicate. + */ + def assertThrown[T <: Throwable: ClassTag](checker: T => Boolean)(body: => Any): Unit = + try { + body + throw Unthrown + } catch { + case Unthrown => fail("Expression did not throw!") + case e: T if checker(e) => () + case failed: T => + val ae = new AssertionError(s"Exception failed check: $failed") + ae.addSuppressed(failed) + throw ae + case NonFatal(other) => + val ae = new AssertionError(s"Wrong exception: expected ${implicitly[ClassTag[T]]} but was ${other.getClass.getName}") + ae.addSuppressed(other) + throw ae + } + + def assertCond[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertTrue(PartialFunction.cond(x)(pf)) + def assertCondNot[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertFalse(PartialFunction.cond(x)(pf)) + + def assertFails[U](checkMessage: String => Boolean)(body: => U): Unit = assertThrows[AssertionError](body, checkMessage) + + private def orEmpty(b: Boolean)(text: => String): String = if (b) text else "" + + /** JUnit-style assertion for `Seq#sameElements`. + * The `actual` is iterated twice if failure is reported. + */ + def assertSameElements[A, B >: A](expected: Seq[A], actual: Iterable[B], message: String = ""): Unit = + if (!expected.sameElements(actual)) + fail(f"${orEmpty(message.nonEmpty)(s"$message ")}expected:<${stringOf(expected)}> but was:<${stringOf(actual)}>") + + /** Convenience for testing iterators and non-Seqs. + * The `actual` is collected to a `List` for reporting errors. + */ + def assertSameElements[A, B >: A](expected: Seq[A], actual: IterableOnce[B]): Unit = + assertSameElements(expected, actual.iterator.to(Iterable), message = "") + + /** Convenience for testing iterators and non-Seqs. + * The `expected` is collected to a `List` for reporting errors. + */ + def assertSameElements[A, B >: A](expected: IterableOnce[A], actual: IterableOnce[B]): Unit = + assertSameElements(expected.iterator.to(Seq), actual) + + /** Convenience for testing arrays. Avoids warning about implicit conversion to Seq. + */ + def assertSameElements[A, B >: A](expected: Array[A], actual: Array[B]): Unit = + assertSameElements(expected, actual, message = "") + + def assertSameElements[A, B >: A](expected: Array[A], actual: Array[B], message: String): Unit = + assertSameElements(expected.toIndexedSeq, actual.toIndexedSeq, message) + + /** Value is not strongly reachable from roots after body is evaluated. + */ + def assertNotReachable[A <: AnyRef](a: => A, roots: AnyRef*)(body: => Unit): Unit = { + val refq = new ReferenceQueue[A] + val ref: Reference[A] = new SoftReference(a, refq) + // fail if following strong references from root discovers referent. Quit if ref is empty. + def assertNoRef(root: AnyRef): Unit = { + val seen = new IdentityHashMap[AnyRef, Unit] + val stack = mutable.Stack.empty[AnyRef] + def loop(): Unit = if (ref.nonEmpty && stack.nonEmpty) { + val o: AnyRef = stack.pop() + if (o != null && !seen.containsKey(o)) { + seen.put(o, ()) + assertFalse(s"Root $root held reference $o", ref.hasReferent(o)) + o match { + case a: Array[AnyRef] => + a.foreach(e => if (e != null && !e.isInstanceOf[Reference[_]]) stack.push(e)) + case _ => + for (f <- o.getClass.allFields) + if (!Modifier.isStatic(f.getModifiers) && !f.getType.isPrimitive && !classOf[Reference[_]].isAssignableFrom(f.getType)) + stack.push(f.follow(o)) + } + } + refq.poll() match { + case null => + case r @ _ => fail("assertNotReachable dropped reference value") + } + loop() + } + stack.push(root) + loop() + } + body + roots.foreach(assertNoRef) + } + + /** Assert no new threads, with some margin for arbitrary threads to exit. */ + def assertZeroNetThreads(body: => Unit): Unit = { + val group = new ThreadGroup("junit") + try assertZeroNetThreads(group)(body) + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed + } + def assertZeroNetThreads[A](group: ThreadGroup)(body: => A): Try[A] = { + val testDone = new CountDownLatch(1) + def check(): Try[A] = { + val beforeCount = group.activeCount + val beforeThreads = new Array[Thread](beforeCount) + assertEquals("Spurious early thread creation.", beforeCount, group.enumerate(beforeThreads)) + + val outcome = Try(body) + + val afterCount = { + waitForIt(group.activeCount <= beforeCount, label = "after count") + group.activeCount + } + val afterThreads = new Array[Thread](afterCount) + assertEquals("Spurious late thread creation.", afterCount, group.enumerate(afterThreads)) + val staleThreads = afterThreads.toList.diff(beforeThreads) + //staleThreads.headOption.foreach(_.getStackTrace.foreach(println)) + val staleMessage = staleThreads.mkString("There are stale threads: ",",","") + assertEquals(staleMessage, beforeCount, afterCount) + assertTrue(staleMessage, staleThreads.isEmpty) + + outcome + } + val result = new AtomicReference[Try[A]]() + def test(): Try[A] = + try { + val checked = check() + result.set(checked) + checked + } finally { + testDone.countDown() + } + + val timeout = 10 * 1000L + val thread = new Thread(group, () => test()) + def abort(): Try[A] = { + group.interrupt() + new Failure(new AssertionError("Test did not complete")) + } + try { + thread.start() + waitForIt(testDone.getCount == 0, Fast, label = "test result") + if (testDone.await(timeout, TimeUnit.MILLISECONDS)) + result.get + else + abort() + } finally { + thread.join(timeout) + } + } + + /** Wait for a condition, with a simple back-off strategy. + * + * This makes it easier to see hanging threads in development + * without tweaking a timeout parameter. Conversely, when a thread + * fails to make progress in a test environment, we allow the wait + * period to grow larger than usual, since a long wait for failure + * is acceptable. + * + * It would be nicer if what we're waiting for gave us + * a progress indicator: we don't care if something + * takes a long time, so long as we can verify progress. + */ + def waitForIt(terminated: => Boolean, progress: Progress = Fast, label: => String = "test"): Unit = { + def value: Option[Boolean] = if (terminated) Some(true) else None + assertTrue(waitFor(value, progress, label)) + } + /** Wait for a value or eventually throw. + */ + def waitFor[A](value: => Option[A], progress: Progress = Fast, label: => String = "test"): A = { + val limit = 5 + var n = 1 + var (dormancy, factor) = progress match { + case Slow => (10000L, 5) + case Fast => (250L, 4) + } + var period = 0L + var result: Option[A] = None + var done = false + while (!done && n < limit) { + try { + result = value + done = result.nonEmpty + if (!done) { + //println(s"Wait for test condition: $label") + Thread.sleep(dormancy) + period += dormancy + } + } catch { + case _: InterruptedException => done = true + } + n += 1 + dormancy *= factor + } + result match { + case Some(v) => v + case _ => fail(s"Expired after dormancy period $period waiting for termination condition $label") + } + } + + /** How frequently to check a termination condition. */ + sealed trait Progress + final case object Slow extends Progress + final case object Fast extends Progress + + /** Like Await.ready but return false on timeout, true on completion, throw InterruptedException. */ + def readyOrNot(awaitable: Awaitable[_]): Boolean = Try(Await.ready(awaitable, TestDuration.Standard)).isSuccess + + def withoutATrace[A](body: => A) = NoTrace(body) + + /** To be thrown by test code to check stack depth. */ + case class Probe(depth: Int) extends ControlThrowable + + /** To be called by test code to check stack depth from assertStackSafe. */ + def probeStackSafety[A](): A = throw new Probe(Thread.currentThread.getStackTrace.length) + + def assertStackSafe[A](run1: => A, run2: => A): Unit = { + var res1 = -1 + var res2 = -1 + def check(f: Int => Unit): Probe => Boolean = { + case Probe(depth) => f(depth); true + } + assertThrown[Probe](check(depth => res1 = depth))(run1) + assertThrown[Probe](check(depth => res2 = depth))(run2) + assertEquals(s"Expected equal stack depths, but got $res1 and $res2", res1, res2) + } +} + +object TestDuration { + import scala.concurrent.duration.{Duration, SECONDS} + val Standard = Duration(4, SECONDS) +} + +/** Run a thunk, collecting uncaught exceptions from any spawned threads. */ +class NoTrace[A](body: => A) extends Runnable { + + private val uncaught = new mutable.ListBuffer[(Thread, Throwable)]() + + @volatile private[testkit] var result: Option[A] = None + + def run(): Unit = { + import AssertUtil.assertZeroNetThreads + val group = new ThreadGroup("notrace") { + override def uncaughtException(t: Thread, e: Throwable): Unit = synchronized { + uncaught += ((t, e)) + } + } + try assertZeroNetThreads(group)(body) match { + case Success(a) => result = Some(a) + case Failure(e) => synchronized { uncaught += ((Thread.currentThread, e)) } + } + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed + } + + private[testkit] lazy val errors: List[(Thread, Throwable)] = synchronized(uncaught.toList) + + private def suppress(t: Throwable, other: Throwable): t.type = { t.addSuppressed(other) ; t } + + private final val noError = None: Option[Throwable] + + def asserted: Option[Throwable] = + errors.collect { case (_, e: AssertionError) => e } + .foldLeft(noError)((res, e) => res.map(suppress(_, e)).orElse(Some(e))) + + def apply(test: (Option[A], List[(Thread, Throwable)]) => Option[Throwable]) = { + run() + test(result, errors).orElse(asserted).foreach(e => throw e) + } +} +object NoTrace { + def apply[A](body: => A): NoTrace[A] = new NoTrace(body) +} diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/src/testkit/scala/tools/testkit/BytecodeTesting.scala similarity index 90% rename from test/junit/scala/tools/testing/BytecodeTesting.scala rename to src/testkit/scala/tools/testkit/BytecodeTesting.scala index fd19b35fa978..b5ee4fc57e6b 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/src/testkit/scala/tools/testkit/BytecodeTesting.scala @@ -1,31 +1,42 @@ -package scala.tools.testing +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit import junit.framework.AssertionFailedError -import org.junit.Assert._ +import org.junit.Assert.assertTrue -import scala.collection.JavaConverters._ -import scala.collection.generic.Clearable -import scala.collection.mutable.ListBuffer +import scala.collection.mutable.{Clearable, ListBuffer} +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.BatchSourceFile import scala.reflect.io.VirtualDirectory +import scala.sys.process.{Parser => CommandLineParser} import scala.tools.asm.Opcodes import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode} -import scala.tools.cmd.CommandLineParser import scala.tools.nsc.backend.jvm.{AsmUtils, MethodNode1} import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.{Global, Settings} -import scala.tools.partest.ASMConverters._ +import scala.tools.testkit.ASMConverters._ trait BytecodeTesting extends ClearAfterClass { /** - * Overwrite to set additional compiler flags + * Override to set additional compiler flags. */ def compilerArgs = "" - val compiler = cached("compiler", () => BytecodeTesting.newCompiler(extraArgs = compilerArgs)) + val compiler: Compiler = cached("compiler", () => BytecodeTesting.newCompiler(extraArgs = compilerArgs)) } class Compiler(val global: Global) { @@ -57,7 +68,7 @@ class Compiler(val global: Global) { global.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) } - def newRun: global.Run = { + def newRun(): global.Run = { global.reporter.reset() resetOutput() keptPerRunCaches.foreach(_.clear()) @@ -75,7 +86,7 @@ class Compiler(val global: Global) { } def compileToBytes(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter.Info => Boolean = _ => false): List[(String, Array[Byte])] = { - val run = newRun + val run = newRun() run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2))) checkReport(allowMessage) getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get) @@ -86,14 +97,14 @@ class Compiler(val global: Global) { } def compileClass(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter.Info => Boolean = _ => false): ClassNode = { - val List(c) = compileClasses(code, javaCode, allowMessage) + val List(c) = compileClasses(code, javaCode, allowMessage): @unchecked c } def compileToBytesTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[(String, Array[Byte])] = { import global._ settings.stopBefore.value = "jvm" :: Nil - val run = newRun + val run = newRun() val scalaUnit = newCompilationUnit(scalaCode, "unitTestSource.scala") val javaUnits = javaCode.map(p => newCompilationUnit(p._1, p._2)) val units = scalaUnit :: javaUnits @@ -101,7 +112,7 @@ class Compiler(val global: Global) { settings.stopBefore.value = Nil scalaUnit.body = beforeBackend(scalaUnit.body) checkReport(_ => false) - val run1 = newRun + val run1 = newRun() run1.compileUnits(units, run1.phaseNamed("jvm")) checkReport(_ => false) getGeneratedClassfiles(settings.outputDirs.getSingleOutput.get) @@ -116,7 +127,7 @@ class Compiler(val global: Global) { } def compileAsmMethod(code: String, allowMessage: StoreReporter.Info => Boolean = _ => false): MethodNode = { - val List(m) = compileAsmMethods(code, allowMessage) + val List(m) = compileAsmMethods(code, allowMessage): @unchecked m } @@ -124,12 +135,12 @@ class Compiler(val global: Global) { compileAsmMethods(code, allowMessage).map(convertMethod) def compileMethod(code: String, allowMessage: StoreReporter.Info => Boolean = _ => false): Method = { - val List(m) = compileMethods(code, allowMessage = allowMessage) + val List(m) = compileMethods(code, allowMessage = allowMessage): @unchecked m } def compileInstructions(code: String, allowMessage: StoreReporter.Info => Boolean = _ => false): List[Instruction] = { - val List(m) = compileMethods(code, allowMessage = allowMessage) + val List(m) = compileMethods(code, allowMessage = allowMessage): @unchecked m.instructions } } @@ -170,6 +181,9 @@ object BytecodeTesting { } def makeSourceFile(code: String, filename: String): BatchSourceFile = new BatchSourceFile(filename, code) + private var fileCount = 0 + private def fileN = { val n = fileCount; fileCount += 1; if (n == 0) "" else n.toString } + def SourceFile(code: String*): List[BatchSourceFile] = code.map(makeSourceFile(_, s"UnitTestSource$fileN")).toList def getGeneratedClassfiles(outDir: AbstractFile): List[(String, Array[Byte])] = { def files(dir: AbstractFile): List[(String, Array[Byte])] = { @@ -224,7 +238,8 @@ object BytecodeTesting { def assertSameSummary(actual: List[Instruction], expected: List[Any]): Unit = { def expectedString = expected.map({ case s: String => s""""$s"""" - case i: Int => opcodeToString(i, i) + case i: Int => opcodeToString(i, i) + case x => throw new MatchError(x) }).mkString("List(", ", ", ")") assert(actual.summary == expected, s"\nFound : ${actual.summaryText}\nExpected: $expectedString") } @@ -264,7 +279,7 @@ object BytecodeTesting { } def findClass(cs: List[ClassNode], name: String): ClassNode = { - val List(c) = cs.filter(_.name == name) + val List(c) = cs.filter(_.name == name): @unchecked c } @@ -317,7 +332,7 @@ object BytecodeTesting { * If `query` starts with a `+`, the next instruction is returned. */ def findInstr(method: MethodNode, query: String): AbstractInsnNode = { - val List(i) = findInstrs(method, query) + val List(i) = findInstrs(method, query): @unchecked i } @@ -338,5 +353,5 @@ object BytecodeTesting { def stringLines = l.mkString("\n") } - val ignoreDeprecations = (info: StoreReporter#Info) => info.msg.contains("deprecation") + val ignoreDeprecations = (info: StoreReporter.Info) => info.msg.contains("deprecation") } diff --git a/test/junit/scala/tools/testing/ClearAfterClass.java b/src/testkit/scala/tools/testkit/ClearAfterClass.java similarity index 83% rename from test/junit/scala/tools/testing/ClearAfterClass.java rename to src/testkit/scala/tools/testkit/ClearAfterClass.java index 7f87f9a4d77f..96574cfcdc99 100644 --- a/test/junit/scala/tools/testing/ClearAfterClass.java +++ b/src/testkit/scala/tools/testkit/ClearAfterClass.java @@ -1,4 +1,16 @@ -package scala.tools.testing; +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit; import org.junit.ClassRule; import org.junit.rules.TestRule; diff --git a/src/testkit/scala/tools/testkit/CompileTime.scala b/src/testkit/scala/tools/testkit/CompileTime.scala new file mode 100644 index 000000000000..3669312fa79f --- /dev/null +++ b/src/testkit/scala/tools/testkit/CompileTime.scala @@ -0,0 +1,24 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox + +object CompileTime { + def versionNumberString: String = macro versionNumberStringImpl + def versionNumberStringImpl(c: blackbox.Context): c.Tree = { + import c.universe._ + q"${scala.util.Properties.versionNumberString}" + } +} diff --git a/src/testkit/scala/tools/testkit/ReflectUtil.scala b/src/testkit/scala/tools/testkit/ReflectUtil.scala new file mode 100644 index 000000000000..e85b7fe1d204 --- /dev/null +++ b/src/testkit/scala/tools/testkit/ReflectUtil.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import scala.reflect.{ ClassTag, classTag, ensureAccessible } +import scala.util.chaining._ +import java.lang.reflect.{ Array => _, _ } + +/** This module contains reflection-related utilities. + * + * This object contains methods that will not work on Scala.js nor Scala + * Native, making any test using `ReflectUtil` JVM-only. + */ +object ReflectUtil { + private lazy val modsField = ensureAccessible { + try ensureAccessible(classOf[Class[_]].getDeclaredMethod("getDeclaredFields0", classOf[Boolean])) + .invoke(classOf[Field], false).asInstanceOf[Array[Field]] + .findLast(_.getName == "modifiers") + .getOrElse(getModsField) + catch { case _: NoSuchMethodException => getModsField } + } + + private def getModsField = classOf[Field].getDeclaredField("modifiers") + + def getFieldAccessible[T: ClassTag](n: String): Field = + classTag[T] + .runtimeClass.getDeclaredField(n) + .tap { f => + if ((f.getModifiers & Modifier.FINAL) != 0) + modsField.setInt(f, f.getModifiers() & ~Modifier.FINAL) + if ((f.getModifiers & Modifier.PUBLIC) == 0) + f.setAccessible(true) + } + + def getFinalFieldAccessible[T: ClassTag](n: String): Field = + classTag[T] + .runtimeClass.getDeclaredField(n) + .tap { f => + if ((f.getModifiers & Modifier.PUBLIC) == 0) + f.setAccessible(true) + } + + // finds method with exact name or name$suffix but not name$default$suffix + def getMethodAccessible[A: ClassTag](name: String): Method = + implicitly[ClassTag[A]] + .runtimeClass.getDeclaredMethods + .find(nameMatches(_, name)) match { + case Some(m) => m.tap(_.setAccessible(true)) + case None => AssertUtil.fail(s"Missing method $name") + } + + private def nameMatches(m: Method, name: String): Boolean = + m.getName.startsWith(name) && + (m.getName.length == name.length || + m.getName.charAt(name.length) == '$' && !m.getName.substring(name.length).startsWith("$default$")) + + implicit class MethodOps(val m: Method) extends AnyVal { + def invokeAs[A](receiver: AnyRef, args: AnyRef*): A = + try m.invoke(receiver, args: _*).asInstanceOf[A] + catch { + case e: IllegalArgumentException if e.getMessage == "wrong number of arguments" => + def required = + m.getParameterCount match { + case 0 => "0" + case n => s"${m.getParameterCount}: (${m.getGenericParameterTypes.mkString(", ")})" + } + throw new IllegalArgumentException(s"wrong number of arguments: ${args.length}; required: $required") + } + } +} diff --git a/src/testkit/scala/tools/testkit/Resource.java b/src/testkit/scala/tools/testkit/Resource.java new file mode 100644 index 000000000000..ee9f443356e2 --- /dev/null +++ b/src/testkit/scala/tools/testkit/Resource.java @@ -0,0 +1,24 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit; + +import java.lang.annotation.*; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +/** + * An annotation for test scenarios, akin to common Resource. + */ +@Retention(RUNTIME) +public @interface Resource { + Class type(); +} diff --git a/src/testkit/scala/tools/testkit/RunTesting.scala b/src/testkit/scala/tools/testkit/RunTesting.scala new file mode 100644 index 000000000000..5d81962b993c --- /dev/null +++ b/src/testkit/scala/tools/testkit/RunTesting.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import scala.reflect.runtime._ +import scala.tools.reflect.ToolBox + +trait RunTesting extends ClearAfterClass { + def compilerArgs = "" // to be overridden + val runner = cached("toolbox", () => Runner.make(compilerArgs)) +} + +class Runner(val toolBox: ToolBox[universe.type]) { + def run[T](code: String): T = toolBox.eval(toolBox.parse(code)).asInstanceOf[T] +} + +object Runner { + def make(compilerArgs: String) = new Runner(universe.runtimeMirror(getClass.getClassLoader).mkToolBox(options = compilerArgs)) +} diff --git a/src/testkit/scala/tools/testkit/TempDir.scala b/src/testkit/scala/tools/testkit/TempDir.scala new file mode 100644 index 000000000000..61242e6a7856 --- /dev/null +++ b/src/testkit/scala/tools/testkit/TempDir.scala @@ -0,0 +1,74 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +import java.io.{IOException, File} +import java.nio.file.{Files, FileVisitResult, SimpleFileVisitor, Path}, FileVisitResult.{CONTINUE => Continue} +import java.nio.file.attribute._ + +import scala.util.Properties +import scala.util.Using.Releasable + +object TempDir { + final val TEMP_DIR_ATTEMPTS = 10000 + def createTempDir(): File = { + val baseDir = new File(System.getProperty("java.io.tmpdir")) + val baseName = s"${System.currentTimeMillis}-" + var c = 0 + while (c < TEMP_DIR_ATTEMPTS) { + val tempDir = new File(baseDir, baseName + c) + if (tempDir.mkdir()) return tempDir + c += 1 + } + throw new IOException(s"Failed to create directory") + } +} + +/* Turn a path into a temp file for purposes of Using it as a resource. + * On Windows, avoid "file is in use" errors by not attempting to delete it. + */ +case class ForDeletion(path: Path) +object ForDeletion { + implicit val deleteOnRelease: Releasable[ForDeletion] = new Releasable[ForDeletion] { + override def release(releasee: ForDeletion) = if (!Properties.isWin) Files.delete(releasee.path) + } +} +object ReleasablePath { + // On release of a path, delete the file it represents or recursively remove the directory. + implicit val deleteOnRelease: Releasable[Path] = new Releasable[Path] { + override def release(releasee: Path) = if (!Properties.isWin) remove(releasee) + } + // Delete a File on relese. + implicit val deleteOnRelease2: Releasable[File] = new Releasable[File] { + override def release(releasee: File) = if (!Properties.isWin) releasee.delete() + } + + private def remove(path: Path): Unit = if (Files.isDirectory(path)) removeRecursively(path) else Files.delete(path) + + private def removeRecursively(path: Path): Unit = Files.walkFileTree(path, new ZappingFileVisitor) + + private class ZappingFileVisitor extends SimpleFileVisitor[Path] { + private def zap(path: Path) = { Files.delete(path) ; Continue } + override def postVisitDirectory(path: Path, e: IOException): FileVisitResult = if (e != null) throw e else zap(path) + override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = zap(path) + } +} + +/* Things that MiMa won't let us make Autocloseable. + */ +object Releasables { + import scala.reflect.io.ZipArchive + implicit val closeZipOnRelease: Releasable[ZipArchive] = new Releasable[ZipArchive] { + override def release(releasee: ZipArchive) = releasee.close() + } +} diff --git a/test/junit/scala/tools/testing/VirtualCompilerTesting.scala b/src/testkit/scala/tools/testkit/VirtualCompilerTesting.scala similarity index 85% rename from test/junit/scala/tools/testing/VirtualCompilerTesting.scala rename to src/testkit/scala/tools/testkit/VirtualCompilerTesting.scala index 8025bfcf1932..116fefc08d6b 100644 --- a/test/junit/scala/tools/testing/VirtualCompilerTesting.scala +++ b/src/testkit/scala/tools/testkit/VirtualCompilerTesting.scala @@ -1,33 +1,44 @@ -package scala -package tools -package testing +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit import java.io.OutputStreamWriter import java.net.URI -import java.nio.charset.StandardCharsets +import java.nio.charset.StandardCharsets.UTF_8 import java.util.Locale import javax.tools._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.AbstractFileClassLoader import scala.reflect.io.{AbstractFile, VirtualDirectory} import scala.tools.nsc.classpath.{AggregateClassPath, VirtualDirectoryClassPath} import scala.tools.nsc.{Global, Settings} /** Utilities for testing with javac/scalac without using the actual filesystem, - * presumably because one doesn't wish to deal with platform idiosyncracies. + * presumably because one doesn't wish to deal with platform idiosyncrasies. */ class VirtualCompiler { /** A java compiler instance that we can use. */ - lazy val javac = ToolProvider.getSystemJavaCompiler + lazy val javac = Option(ToolProvider.getSystemJavaCompiler) + .getOrElse(throw new UnsupportedOperationException("No java compiler found in current Java runtime")) /** The directory in which are placed classfiles. */ lazy val output = new VirtualDirectory("out", maybeContainer = None) /** A javac file manager that places classfiles in `output`. */ lazy val fileManager: JavaFileManager = { - val dflt = javac.getStandardFileManager(null, Locale.ENGLISH, StandardCharsets.UTF_8) + val dflt = javac.getStandardFileManager(null, Locale.ENGLISH, UTF_8) new VirtualFileManager(output, dflt) } diff --git a/src/testkit/scala/tools/testkit/XMLTesting.scala b/src/testkit/scala/tools/testkit/XMLTesting.scala new file mode 100644 index 000000000000..cddbcf6e4dbb --- /dev/null +++ b/src/testkit/scala/tools/testkit/XMLTesting.scala @@ -0,0 +1,66 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testkit + +object XMLTesting { + object xml { + val code = """ +import collection.{immutable, mutable}, mutable.ArrayBuffer + +package scala.xml { + trait MetaData + //def key: String + //def value: Seq[Node] + //def next: MetaData + trait NamespaceBinding + object TopScope extends NamespaceBinding + object Null extends MetaData + abstract class Node extends immutable.Seq[Node] { + def label: String + def child: Seq[Node] = Nil + override def toString = label + child.mkString + + def iterator: Iterator[Node] = ??? // implements `def iterator: Iterator[A]` + // Members declared in scala.collection.SeqOps + def apply(i: Int): Node = ??? // implements `def apply(i: Int): A` + def length: Int = ??? + } + class Elem(prefix: String, val label: String, attributes1: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, override val child: Node*) extends Node + class NodeBuffer extends Seq[Node] { + val nodes = ArrayBuffer.empty[Node] + def &+(o: Any): this.type = + o match { + case n: Node => nodes.addOne(n); this + case _ => throw new MatchError(o) + } + // Members declared in scala.collection.IterableOnce + def iterator: Iterator[scala.xml.Node] = nodes.iterator + // Members declared in scala.collection.SeqOps + def apply(i: Int): scala.xml.Node = nodes(i) + def length: Int = nodes.length + } + case class Text(text: String) extends Node { + def label = text + } + case class Atom(t: Text) extends Node { + def label = t.text + } + trait Attribute extends MetaData + class PrefixedAttribute(pre: String, key: String, value: Seq[Node], next1: MetaData) extends Attribute + class UnprefixedAttribute(key: String, value: Seq[Node], next1: MetaData) extends Attribute { + def this(key: String, value: String, next1: MetaData) = this(key, Text(value), next1) + } +} +""" + } +} diff --git a/src/partest/scala/tools/partest/async/Async.scala b/src/testkit/scala/tools/testkit/async/Async.scala similarity index 95% rename from src/partest/scala/tools/partest/async/Async.scala rename to src/testkit/scala/tools/testkit/async/Async.scala index 1bdcd52e8972..9d618e072626 100644 --- a/src/partest/scala/tools/partest/async/Async.scala +++ b/src/testkit/scala/tools/testkit/async/Async.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,13 +10,13 @@ * additional information regarding copyright ownership. */ -package scala.tools.partest.async +package scala.tools.testkit.async import java.util.Objects -import scala.language.experimental.macros import scala.annotation.compileTimeOnly import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.language.experimental.macros import scala.reflect.macros.blackbox import scala.util.{Failure, Success, Try} @@ -33,7 +33,7 @@ object Async { } val name = TypeName("stateMachine$async") q""" - final class $name extends _root_.scala.tools.partest.async.AsyncAsMacroStateMachine($executionContext) { + final class $name extends _root_.scala.tools.testkit.async.AsyncAsMacroStateMachine($executionContext) { ${mark(q"""override def apply(tr$$async: _root_.scala.util.Try[_root_.scala.AnyRef]) = ${body}""")} } new $name().start().asInstanceOf[${c.macroApplication.tpe}] diff --git a/src/partest/scala/tools/partest/async/AsyncStateMachine.scala b/src/testkit/scala/tools/testkit/async/AsyncStateMachine.scala similarity index 94% rename from src/partest/scala/tools/partest/async/AsyncStateMachine.scala rename to src/testkit/scala/tools/testkit/async/AsyncStateMachine.scala index 245ad96385a9..8a0796f4e743 100644 --- a/src/partest/scala/tools/partest/async/AsyncStateMachine.scala +++ b/src/testkit/scala/tools/testkit/async/AsyncStateMachine.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,7 +10,7 @@ * additional information regarding copyright ownership. */ -package scala.tools.partest.async +package scala.tools.testkit.async // The async phase expects the state machine class to structurally conform to this interface. trait AsyncStateMachine[F, R] { diff --git a/test/ant/test-basic/build.xml b/test/ant/test-basic/build.xml deleted file mode 100644 index acc210806f07..000000000000 --- a/test/ant/test-basic/build.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - Super simple test for Scala - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/test/async/jvm/anf.check b/test/async/jvm/anf.check new file mode 100644 index 000000000000..7f946415af1d --- /dev/null +++ b/test/async/jvm/anf.check @@ -0,0 +1,3 @@ +anf.scala:181: warning: unreachable code + case "" if false => await(fut(1)) + 1 + ^ diff --git a/test/async/jvm/anf.scala b/test/async/jvm/anf.scala index 2f65f902ae8f..357651fc70f6 100644 --- a/test/async/jvm/anf.scala +++ b/test/async/jvm/anf.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.anf.AnfTransformSpec]) package scala.async.run.anf { @@ -6,7 +7,7 @@ package scala.async.run.anf { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import scala.reflect.{ClassTag, classTag} import org.junit.Test @@ -27,7 +28,7 @@ package scala.async.run.anf { } } implicit class objectops(obj: Any) { - def mustBe(other: Any) = assert(obj == other, obj + " is not " + other) + def mustBe(other: Any) = assert(obj == other, s"$obj is not $other") def mustEqual(other: Any) = mustBe(other) } @@ -386,6 +387,7 @@ package scala.async.run.anf { await(fut(1)) match { case Up => 1.0 case Down => -1.0 + case x => throw new MatchError(x) } } sign.block mustBe 1.0 @@ -397,7 +399,7 @@ package scala.async.run.anf { // val tree = tb.typeCheck(tb.parse { // """ // | import language.implicitConversions -// | import _root_.scala.tools.partest.async.Async.{async, await} +// | import _root_.scala.tools.testkit.async.Async.{async, await} // | import _root_.scala.concurrent._ // | import ExecutionContext.Implicits.global // | implicit def view(a: Int): String = "" diff --git a/test/async/jvm/await0.scala b/test/async/jvm/await0.scala index 0327edf3b6fe..fb412b73fc68 100644 --- a/test/async/jvm/await0.scala +++ b/test/async/jvm/await0.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.await0.Await0Spec]) package scala.async.run.await0 { @@ -11,7 +12,7 @@ package scala.async.run.await0 { import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/block0.scala b/test/async/jvm/block0.scala index babdc9377454..f485ea05fdc6 100644 --- a/test/async/jvm/block0.scala +++ b/test/async/jvm/block0.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.block0.AsyncSpec]) package scala.async.run.block0 { @@ -6,7 +7,7 @@ package scala.async.run.block0 { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/block1.scala b/test/async/jvm/block1.scala index 97c5a5191dc1..0fd42105656e 100644 --- a/test/async/jvm/block1.scala +++ b/test/async/jvm/block1.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.block1.Block1Spec]) package scala.async.run.block1 { @@ -6,7 +7,7 @@ package scala.async.run.block1 { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/completable-future.scala b/test/async/jvm/completable-future.scala index 4f1b8bb64948..6d702d508e2e 100644 --- a/test/async/jvm/completable-future.scala +++ b/test/async/jvm/completable-future.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import java.util.concurrent._ import scala.tools.partest.async.CompletableFutureAwait._ @@ -17,4 +18,4 @@ object Test { val result = f.get() assert(result == 100, result) } -} +} \ No newline at end of file diff --git a/test/async/jvm/concurrent_AfterRefchecksIssue.scala b/test/async/jvm/concurrent_AfterRefchecksIssue.scala index ac0f87941f74..1113bbbd29fe 100644 --- a/test/async/jvm/concurrent_AfterRefchecksIssue.scala +++ b/test/async/jvm/concurrent_AfterRefchecksIssue.scala @@ -1,5 +1,6 @@ -// scalac: -Xasync -import scala.concurrent._, ExecutionContext.Implicits.global, scala.tools.partest.async.Async._ +//> using options -Xasync + +import scala.concurrent._, ExecutionContext.Implicits.global, scala.tools.testkit.async.Async._ trait Factory[T] { def create: T diff --git a/test/async/jvm/concurrent_ArrayIndexOutOfBoundIssue.scala b/test/async/jvm/concurrent_ArrayIndexOutOfBoundIssue.scala index 68329b297e76..b1017e8f94eb 100644 --- a/test/async/jvm/concurrent_ArrayIndexOutOfBoundIssue.scala +++ b/test/async/jvm/concurrent_ArrayIndexOutOfBoundIssue.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration sealed trait Result @@ -12,7 +13,7 @@ case object B extends Result case object C extends Result -object Test extends App { test +object Test extends App { test() protected def doStuff(res: Result) = { class C { def needCheck = async { false } diff --git a/test/async/jvm/concurrent_GenericTypeBoundaryIssue.scala b/test/async/jvm/concurrent_GenericTypeBoundaryIssue.scala index 13d3edab6c7c..a63fe07cfddc 100644 --- a/test/async/jvm/concurrent_GenericTypeBoundaryIssue.scala +++ b/test/async/jvm/concurrent_GenericTypeBoundaryIssue.scala @@ -1,9 +1,10 @@ -// scalac: -Xasync +//> using options -Xasync + import Test.test import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration trait InstrumentOfValue diff --git a/test/async/jvm/concurrent_MatchEndIssue.scala b/test/async/jvm/concurrent_MatchEndIssue.scala index c6de6522ed04..055fd125ead2 100644 --- a/test/async/jvm/concurrent_MatchEndIssue.scala +++ b/test/async/jvm/concurrent_MatchEndIssue.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration sealed trait Subject diff --git a/test/async/jvm/concurrent_NegativeArraySizeException.scala b/test/async/jvm/concurrent_NegativeArraySizeException.scala index 8669a1bf782f..58960561fb2b 100644 --- a/test/async/jvm/concurrent_NegativeArraySizeException.scala +++ b/test/async/jvm/concurrent_NegativeArraySizeException.scala @@ -1,14 +1,15 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration -object Test extends App { test +object Test extends App { test() def foo(foo: Any, bar: Any) = () def getValue = async {4.2} def func(f: Any) = async { - foo(f match { case _ if "".isEmpty => 2 }, await(getValue)); + foo(f match { case _ if "".isEmpty => 2 case x => throw new MatchError(x) }, await(getValue)); } def test() = Await.result(func(4), Duration.Inf) diff --git a/test/async/jvm/concurrent_NegativeArraySizeExceptionFine1.scala b/test/async/jvm/concurrent_NegativeArraySizeExceptionFine1.scala index 87b9e81367f5..1c3ddc9f4a33 100644 --- a/test/async/jvm/concurrent_NegativeArraySizeExceptionFine1.scala +++ b/test/async/jvm/concurrent_NegativeArraySizeExceptionFine1.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration case class FixedFoo(foo: Int) @@ -17,6 +18,6 @@ class Foobar(val foo: Int, val bar: Double) { } } -object Test extends App { test +object Test extends App { test() def test() = Await.result(new Foobar(0, 0).func(4), Duration.Inf) } diff --git a/test/async/jvm/concurrent_ReturnTupleIssue.scala b/test/async/jvm/concurrent_ReturnTupleIssue.scala index 9369a9607dd8..61c3495a7bfe 100644 --- a/test/async/jvm/concurrent_ReturnTupleIssue.scala +++ b/test/async/jvm/concurrent_ReturnTupleIssue.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration class TestReturnExprIssue(str: String) { diff --git a/test/async/jvm/concurrent_fetch.check b/test/async/jvm/concurrent_fetch.check index 991fe4e4827b..dc76e7b27050 100644 --- a/test/async/jvm/concurrent_fetch.check +++ b/test/async/jvm/concurrent_fetch.check @@ -1,3 +1,3 @@ fetching fetching -63 +65 diff --git a/test/async/jvm/concurrent_fetch.scala b/test/async/jvm/concurrent_fetch.scala index 92f2f7b7c464..c25214709ecb 100644 --- a/test/async/jvm/concurrent_fetch.scala +++ b/test/async/jvm/concurrent_fetch.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent.{Await, Future, duration} import scala.concurrent.ExecutionContext.Implicits.global -import scala.tools.partest.async.Async.{async, await} +import scala.tools.testkit.async.Async.{async, await} object Test extends App { val out = Console.out @@ -9,8 +10,8 @@ object Test extends App { val sumLengths: Future[Int] = { async { - val body1 = fetchURL("http://scala-lang.org") - val body2 = fetchURL("http://docs.scala-lang.org") + val body1 = fetchURL("https://scala-lang.org") + val body2 = fetchURL("https://docs.scala-lang.org") await(body1).length + await(body2).length } } diff --git a/test/async/jvm/concurrent_patternAlternative.scala b/test/async/jvm/concurrent_patternAlternative.scala index 1db0f3d729b7..8ccfd05f005a 100644 --- a/test/async/jvm/concurrent_patternAlternative.scala +++ b/test/async/jvm/concurrent_patternAlternative.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { test diff --git a/test/async/jvm/concurrent_patternAlternativeBothAnnotations.scala b/test/async/jvm/concurrent_patternAlternativeBothAnnotations.scala index 78bf6858ae10..61e8bfb9a934 100644 --- a/test/async/jvm/concurrent_patternAlternativeBothAnnotations.scala +++ b/test/async/jvm/concurrent_patternAlternativeBothAnnotations.scala @@ -1,14 +1,15 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { test def func1() = async { "hello" } def func(a: Option[Boolean]) = async {a match { - case null | None => await(func1) + " world" + case null | None => await(func1()) + " world" case _ => "okay" }} def test: Any = Await.result(func(None), Duration.Inf) diff --git a/test/async/jvm/concurrent_polymorphicMethod.check b/test/async/jvm/concurrent_polymorphicMethod.check index 44b16a26e9ba..c5d96871b817 100644 --- a/test/async/jvm/concurrent_polymorphicMethod.check +++ b/test/async/jvm/concurrent_polymorphicMethod.check @@ -1,3 +1,3 @@ -concurrent_polymorphicMethod.scala:17: warning: unreachable code +concurrent_polymorphicMethod.scala:18: warning: unreachable code case _ if false => ???; ^ diff --git a/test/async/jvm/concurrent_polymorphicMethod.scala b/test/async/jvm/concurrent_polymorphicMethod.scala index fb5226734a71..c74160a8c05c 100644 --- a/test/async/jvm/concurrent_polymorphicMethod.scala +++ b/test/async/jvm/concurrent_polymorphicMethod.scala @@ -1,10 +1,11 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration -object Test extends App { assert(test.toString == "(C,C)") +object Test extends App { assert(test().toString == "(C,C)") class C { override def toString = "C" diff --git a/test/async/jvm/concurrent_shadowing.check b/test/async/jvm/concurrent_shadowing.check index 1a68bc7ba04d..ced6a50c2985 100644 --- a/test/async/jvm/concurrent_shadowing.check +++ b/test/async/jvm/concurrent_shadowing.check @@ -1,3 +1,3 @@ -concurrent_shadowing.scala:19: warning: a pure expression does nothing in statement position +concurrent_shadowing.scala:20: warning: a pure expression does nothing in statement position case _ => foo; () ^ diff --git a/test/async/jvm/concurrent_shadowing.scala b/test/async/jvm/concurrent_shadowing.scala index 25b9c34caa0d..9e959911fe4c 100644 --- a/test/async/jvm/concurrent_shadowing.scala +++ b/test/async/jvm/concurrent_shadowing.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { test @@ -19,6 +20,7 @@ object Test extends App { test case _ => foo; () } () + case x => throw new MatchError(x) } () }, Duration.Inf) diff --git a/test/async/jvm/concurrent_shadowing0.scala b/test/async/jvm/concurrent_shadowing0.scala index 1888578d77fd..e708eb44d169 100644 --- a/test/async/jvm/concurrent_shadowing0.scala +++ b/test/async/jvm/concurrent_shadowing0.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { test diff --git a/test/async/jvm/concurrent_shadowing2.scala b/test/async/jvm/concurrent_shadowing2.scala index 9066d4a3e37b..3a02f0b2db9d 100644 --- a/test/async/jvm/concurrent_shadowing2.scala +++ b/test/async/jvm/concurrent_shadowing2.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { test diff --git a/test/async/jvm/concurrent_shadowingRefinedTypes.scala b/test/async/jvm/concurrent_shadowingRefinedTypes.scala index fa785ed175df..5264923c9fbd 100644 --- a/test/async/jvm/concurrent_shadowingRefinedTypes.scala +++ b/test/async/jvm/concurrent_shadowingRefinedTypes.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration trait Base diff --git a/test/async/jvm/concurrent_test0.scala b/test/async/jvm/concurrent_test0.scala index 850f67f52b46..24dd770474d6 100644 --- a/test/async/jvm/concurrent_test0.scala +++ b/test/async/jvm/concurrent_test0.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + /* * Scala (https://www.scala-lang.org) * @@ -14,7 +15,7 @@ import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object Test extends App { assert(test == "foobar") diff --git a/test/async/jvm/exceptions.scala b/test/async/jvm/exceptions.scala index 45b328f942fa..22908defa1a5 100644 --- a/test/async/jvm/exceptions.scala +++ b/test/async/jvm/exceptions.scala @@ -1,15 +1,16 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.exceptions.ExceptionsSpec]) package scala.async.run.exceptions { - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import scala.concurrent.{Future, ExecutionContext, Await} import ExecutionContext.Implicits._ import scala.concurrent.duration._ import scala.reflect.ClassTag - import scala.tools.partest.TestUtil.intercept + import scala.tools.testkit.AssertUtil.assertThrows import org.junit.Test @@ -18,7 +19,7 @@ package scala.async.run.exceptions { @Test def `uncaught exception within async`(): Unit = { val fut = async { throw new Exception("problem") } - intercept[Exception] { Await.result(fut, 2.seconds) } + assertThrows[Exception] { Await.result(fut, 2.seconds) } } @Test @@ -28,7 +29,7 @@ package scala.async.run.exceptions { val len = await(base) throw new Exception(s"illegal length: $len") } - intercept[Exception] { Await.result(fut, 2.seconds) } + assertThrows[Exception] { Await.result(fut, 2.seconds) } } @Test @@ -38,7 +39,7 @@ package scala.async.run.exceptions { val x = await(base) x * 2 } - intercept[Exception] { Await.result(fut, 2.seconds) } + assertThrows[Exception] { Await.result(fut, 2.seconds) } } @Test @@ -48,9 +49,9 @@ package scala.async.run.exceptions { val a = await(base.mapTo[Int]) // result: 5 val b = await((Future { (a * 2).toString }).mapTo[Int]) // result: ClassCastException val c = await(Future { (7 * 2).toString }) // result: "14" - b + "-" + c + s"$b-$c" } - intercept[ClassCastException] { Await.result(fut, 2.seconds) } + assertThrows[ClassCastException] { Await.result(fut, 2.seconds) } } } diff --git a/test/async/jvm/futures.check b/test/async/jvm/futures.check deleted file mode 100644 index c0d02cf45930..000000000000 --- a/test/async/jvm/futures.check +++ /dev/null @@ -1,13 +0,0 @@ -futures.scala:121: warning: match may not be exhaustive. -It would fail on the following input: Failure(_) - f2 onComplete { case Success(_) => throw new ThrowableTest("dispatcher receive") } - ^ -futures.scala:128: warning: match may not be exhaustive. -It would fail on the following input: Failure(_) - f2 onComplete { case Success(_) => throw new ThrowableTest("current thread receive") } - ^ -futures.scala:173: warning: match may not be exhaustive. -It would fail on the following input: Req((x: T forSome x not in (Int, String))) - def asyncReq[T](req: Req[T]) = req match { - ^ -warning: 9 deprecations (since 2.12.0); re-run with -deprecation for details diff --git a/test/async/jvm/futures.scala b/test/async/jvm/futures.scala index 04f2df8bcd56..cc871fb056a4 100644 --- a/test/async/jvm/futures.scala +++ b/test/async/jvm/futures.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync -deprecation + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.futures.FutureSpec]) package scala.async { @@ -53,7 +54,7 @@ package scala.async.run.futures { import scala.reflect.{ClassTag, classTag} import scala.async.TestLatch - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test @@ -73,7 +74,7 @@ package scala.async.run.futures { //TODO use normal Assert calls in the tests implicit class objectops(obj: Any) { - def mustBe(other: Any) = assert(obj == other, obj + " is not " + other) + def mustBe(other: Any) = assert(obj == other, s"$obj is not $other") def mustEqual(other: Any) = mustBe(other) } @@ -84,6 +85,7 @@ package scala.async.run.futures { case "Hello" => Future { "World" } case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance")) case "NoReply" => Promise[String]().future + case x => throw new MatchError(x) } val defaultTimeout = 5 seconds @@ -118,14 +120,14 @@ package scala.async.run.futures { } f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") } - f2 onComplete { case Success(_) => throw new ThrowableTest("dispatcher receive") } + f2 onComplete { case Success(_) => throw new ThrowableTest("dispatcher receive"); case _ => } latch.open() Await.result(f2, defaultTimeout) mustBe ("success") f2 foreach { _ => throw new ThrowableTest("current thread foreach") } - f2 onComplete { case Success(_) => throw new ThrowableTest("current thread receive") } + f2 onComplete { case Success(_) => throw new ThrowableTest("current thread receive"); case _ => } Await.result(f3, defaultTimeout) mustBe ("SUCCESS") @@ -151,14 +153,14 @@ package scala.async.run.futures { val a = await(future0.mapTo[Int]) // returns 5 val b = await(asyncInt(a)) // returns "10" val c = await(asyncInt(7)) // returns "14" - b + "-" + c + s"$b-$c" } val future2 = async { val a = await(future0.mapTo[Int]) val b = await((Future { (a * 2).toString }).mapTo[Int]) val c = await(Future { (7 * 2).toString }) - b + "-" + c + s"$b-$c" } Await.result(future1, defaultTimeout) mustBe ("10-14") @@ -173,19 +175,20 @@ package scala.async.run.futures { def asyncReq[T](req: Req[T]) = req match { case Req(s: String) => Future { Res(s.length) } case Req(i: Int) => Future { Res((i * 2).toString) } + case _ => ??? } val future1 = for { Res(a: Int) <- asyncReq(Req("Hello")) Res(b: String) <- asyncReq(Req(a)) Res(c: String) <- asyncReq(Req(7)) - } yield b + "-" + c + } yield s"$b-$c" val future2 = for { Res(a: Int) <- asyncReq(Req("Hello")) Res(b: Int) <- asyncReq(Req(a)) Res(c: Int) <- asyncReq(Req(7)) - } yield b + "-" + c + } yield s"$b-$c" Await.result(future1, defaultTimeout) mustBe ("10-14") intercept[NoSuchElementException] { Await.result(future2, defaultTimeout) } @@ -344,14 +347,14 @@ package scala.async.run.futures { idx => async(idx, idx * 20) } // TODO: change to `foldLeft` after support for 2.11 is dropped - val folded = Future.fold(futures)(0)(_ + _) + val folded = Future.foldLeft(futures)(0)(_ + _) Await.result(folded, timeout) mustBe (45) val futuresit = (0 to 9) map { idx => async(idx, idx * 20) } // TODO: change to `foldLeft` after support for 2.11 is dropped - val foldedit = Future.fold(futures)(0)(_ + _) + val foldedit = Future.foldLeft(futures)(0)(_ + _) Await.result(foldedit, timeout) mustBe (45) } @@ -381,7 +384,7 @@ package scala.async.run.futures { idx => async(idx, idx * 10) } // TODO: change to `foldLeft` after support for 2.11 is dropped - val folded = Future.fold(futures)(0)(_ + _) + val folded = Future.foldLeft(futures)(0)(_ + _) intercept[IllegalArgumentException] { Await.result(folded, timeout) }.getMessage mustBe ("shouldFoldResultsWithException: expected") @@ -392,7 +395,7 @@ package scala.async.run.futures { def test(testNumber: Int): Unit = { val fs = (0 to 1000) map (i => Future(i)) // TODO: change to `foldLeft` after support for 2.11 is dropped - val f = Future.fold(fs)(ArrayBuffer.empty[AnyRef]) { + val f = Future.foldLeft(fs)(ArrayBuffer.empty[AnyRef]) { case (l, i) if i % 2 == 0 => l += i.asInstanceOf[AnyRef] case (l, _) => l } @@ -406,7 +409,7 @@ package scala.async.run.futures { @Test def `return zero value if folding empty list`(): Unit = { // TODO: change to `foldLeft` after support for 2.11 is dropped - val zero = Future.fold(List[Future[Int]]())(0)(_ + _) + val zero = Future.foldLeft(List[Future[Int]]())(0)(_ + _) Await.result(zero, defaultTimeout) mustBe (0) } @@ -419,12 +422,12 @@ package scala.async.run.futures { val futures = (0 to 9) map { async } // TODO: change to `reduceLeft` after support for 2.11 is dropped - val reduced = Future.reduce(futures)(_ + _) + val reduced = Future.reduceLeft(futures)(_ + _) Await.result(reduced, timeout) mustBe (45) val futuresit = (0 to 9) map { async } // TODO: change to `reduceLeft` after support for 2.11 is dropped - val reducedit = Future.reduce(futuresit)(_ + _) + val reducedit = Future.reduceLeft(futuresit)(_ + _) Await.result(reducedit, timeout) mustBe (45) } @@ -439,7 +442,7 @@ package scala.async.run.futures { idx => async(idx, idx * 10) } // TODO: change to `reduceLeft` after support for 2.11 is dropped - val failed = Future.reduce(futures)(_ + _) + val failed = Future.reduceLeft(futures)(_ + _) intercept[IllegalArgumentException] { Await.result(failed, timeout) }.getMessage mustBe ("shouldFoldResultsWithException: expected") @@ -448,7 +451,7 @@ package scala.async.run.futures { @Test def `shouldReduceThrowNSEEOnEmptyInput`(): Unit = { intercept[java.util.NoSuchElementException] { // TODO: change to `reduceLeft` after support for 2.11 is dropped - val emptyreduced = Future.reduce(List[Future[Int]]())(_ + _) + val emptyreduced = Future.reduceLeft(List[Future[Int]]())(_ + _) Await.result(emptyreduced, defaultTimeout) } } diff --git a/test/async/jvm/hygiene.scala b/test/async/jvm/hygiene.scala index 0baa4fe18a20..39cb86c11767 100644 --- a/test/async/jvm/hygiene.scala +++ b/test/async/jvm/hygiene.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.hygiene.HygieneSpec]) package scala.async.run.hygiene { @@ -8,7 +9,7 @@ package scala.async.run.hygiene { import scala.concurrent._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async._ + import scala.tools.testkit.async.Async._ import scala.concurrent.duration.Duration object TestUtil { import language.implicitConversions diff --git a/test/async/jvm/ifelse0.scala b/test/async/jvm/ifelse0.scala index ec7ba81aab75..e81a026e53b3 100644 --- a/test/async/jvm/ifelse0.scala +++ b/test/async/jvm/ifelse0.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse0.IfElseSpec]) package scala.async.run.ifelse0 { @@ -10,7 +11,7 @@ package scala.async.run.ifelse0 { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/ifelse0_while.scala b/test/async/jvm/ifelse0_while.scala index 34eb581b3194..a5cce1445e31 100644 --- a/test/async/jvm/ifelse0_while.scala +++ b/test/async/jvm/ifelse0_while.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse0.WhileSpec]) package scala.async.run.ifelse0 { @@ -8,7 +9,7 @@ package scala.async.run.ifelse0 { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/ifelse1.scala b/test/async/jvm/ifelse1.scala index 9216b69b43fb..104cf20ce323 100644 --- a/test/async/jvm/ifelse1.scala +++ b/test/async/jvm/ifelse1.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse1.IfElse1Spec]) package scala.async.run.ifelse1 { @@ -6,7 +7,7 @@ package scala.async.run.ifelse1 { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/ifelse2.scala b/test/async/jvm/ifelse2.scala index 4f2803dbf2de..a65ed71ad45e 100644 --- a/test/async/jvm/ifelse2.scala +++ b/test/async/jvm/ifelse2.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse2.IfElse2Spec]) package scala.async.run.ifelse2 { @@ -6,7 +7,7 @@ package scala.async.run.ifelse2 { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/ifelse3.scala b/test/async/jvm/ifelse3.scala index 0521c5716a78..5f455d7bbfee 100644 --- a/test/async/jvm/ifelse3.scala +++ b/test/async/jvm/ifelse3.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse3.IfElse3Spec]) package scala.async.run.ifelse3 { @@ -6,7 +7,7 @@ package scala.async.run.ifelse3 { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/ifelse4.scala b/test/async/jvm/ifelse4.scala index aed1783261e8..4fd1ff982921 100644 --- a/test/async/jvm/ifelse4.scala +++ b/test/async/jvm/ifelse4.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.ifelse4.IfElse4Spec]) package scala.async.run.ifelse4 { @@ -6,7 +7,7 @@ package scala.async.run.ifelse4 { import language.{reflectiveCalls, postfixOps, existentials} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import org.junit.Test import org.junit.Assert._ diff --git a/test/async/jvm/lazyval.scala b/test/async/jvm/lazyval.scala index 0f308ab6161b..414edd6c5310 100644 --- a/test/async/jvm/lazyval.scala +++ b/test/async/jvm/lazyval.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.lazyval.LazyValSpec]) package scala.async.run.lazyval { @@ -10,7 +11,7 @@ package scala.async.run.lazyval { import scala.concurrent.duration._ import ExecutionContext.Implicits.global import scala.collection.mutable.ListBuffer - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/live.scala b/test/async/jvm/live.scala index 6361f23f9b20..7ef1e9bd0a0f 100644 --- a/test/async/jvm/live.scala +++ b/test/async/jvm/live.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.live.LiveVariablesSpec]) package scala.async.run.live { @@ -7,7 +8,7 @@ package scala.async.run.live { import scala.concurrent._ import duration.Duration - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} import scala.collection.immutable object TestUtil { import language.implicitConversions @@ -49,8 +50,8 @@ package scala.async.run.live { } private def reflectivelyExtractStateMachine(runnable: Runnable) = { - assert(runnable.getClass == Class.forName("scala.concurrent.impl.CallbackRunnable"), runnable.getClass) - val fld = runnable.getClass.getDeclaredField("onComplete") + assert(runnable.getClass == Class.forName("scala.concurrent.impl.Promise$Transformation"), runnable.getClass) + val fld = runnable.getClass.getDeclaredField("_fun") fld.setAccessible(true) val stateMachine = fld.get(runnable) assert(stateMachine.getClass.getName.contains("stateMachine"), stateMachine.getClass) @@ -178,7 +179,7 @@ package scala.async.run.live { } } def randomTimesTwo = async { - val num = _root_.scala.math.random + val num = _root_.scala.math.random() if (num < 0 || num > 1) { await(errorGenerator(num)) } diff --git a/test/async/jvm/localclasses.scala b/test/async/jvm/localclasses.scala index 8326f5704d28..c0fb5d251051 100644 --- a/test/async/jvm/localclasses.scala +++ b/test/async/jvm/localclasses.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.neg.LocalClasses0Spec]) package scala.async.neg { @@ -8,7 +9,7 @@ package scala.async.neg { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/match0.scala b/test/async/jvm/match0.scala index 533eb2efb4da..566227e38d18 100644 --- a/test/async/jvm/match0.scala +++ b/test/async/jvm/match0.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.match0.MatchSpec]) package scala.async.run.match0 { @@ -11,7 +12,7 @@ package scala.async.run.match0 { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) @@ -118,6 +119,7 @@ package scala.async.run.match0 { await(0) case buf: Double => await(2) + case x => throw new MatchError(x) } }) diff --git a/test/async/jvm/nesteddef.scala b/test/async/jvm/nesteddef.scala index 1edb01e221af..656797e991c9 100644 --- a/test/async/jvm/nesteddef.scala +++ b/test/async/jvm/nesteddef.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.nesteddef.NestedDef]) package scala.async.run.nesteddef { @@ -8,7 +9,7 @@ package scala.async.run.nesteddef { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/noawait.scala b/test/async/jvm/noawait.scala index a78446e898b8..788984fd8d5a 100644 --- a/test/async/jvm/noawait.scala +++ b/test/async/jvm/noawait.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.noawait.NoAwaitSpec]) package scala.async.run.noawait { @@ -9,7 +10,7 @@ package scala.async.run.noawait { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/stackoverflow.scala b/test/async/jvm/stackoverflow.scala index 74de6caadf60..6708575d3120 100644 --- a/test/async/jvm/stackoverflow.scala +++ b/test/async/jvm/stackoverflow.scala @@ -1,8 +1,9 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async.{async, await} +import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) diff --git a/test/async/jvm/syncOptimization.scala b/test/async/jvm/syncOptimization.scala index 342db5010970..4d23ea348231 100644 --- a/test/async/jvm/syncOptimization.scala +++ b/test/async/jvm/syncOptimization.scala @@ -1,5 +1,6 @@ -// scalac: -Xasync -import scala.tools.partest.async.Async._ +//> using options -Xasync + +import scala.tools.testkit.async.Async._ import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits._ diff --git a/test/async/jvm/toolbox.scala b/test/async/jvm/toolbox.scala index 7da4759e2c9d..c10362367dad 100644 --- a/test/async/jvm/toolbox.scala +++ b/test/async/jvm/toolbox.scala @@ -1,11 +1,12 @@ -import tools.reflect._ -import reflect.runtime._ +import scala.tools.reflect._ +import scala.reflect.runtime._ object Test extends App { val box = currentMirror.mkToolBox(options = "-Xasync") + val code = """ import scala.concurrent._, scala.concurrent.duration._, ExecutionContext.Implicits._ - import scala.tools.partest.async._ + import scala.tools.testkit.async._ val f1 = Future(1) val f2 = Future(2) val res = Async.async { Async.await(f1) + Async.await(f2) } diff --git a/test/async/jvm/toughtype.check b/test/async/jvm/toughtype.check new file mode 100644 index 000000000000..8dcb3441e8d4 --- /dev/null +++ b/test/async/jvm/toughtype.check @@ -0,0 +1,3 @@ +toughtype.scala:175: warning: discarded pure expression does nothing + identity[A] _ + ^ diff --git a/test/async/jvm/toughtype.scala b/test/async/jvm/toughtype.scala index 22f5b893b009..d8a9643e0efb 100644 --- a/test/async/jvm/toughtype.scala +++ b/test/async/jvm/toughtype.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + object Test extends scala.tools.partest.JUnitTest(classOf[scala.async.run.toughtype.ToughTypeSpec]) package scala.async.run.toughtype { @@ -10,7 +11,7 @@ package scala.async.run.toughtype { import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions implicit def lift[T](t: T): Future[T] = Future.successful(t) @@ -33,7 +34,7 @@ package scala.async.run.toughtype { class ToughTypeSpec { - @Test def `propogates tough types`(): Unit = { + @Test def `propagates tough types`(): Unit = { val fut = ToughTypeObject.m2 val res: (List[_], scala.async.run.toughtype.ToughTypeObject.Inner) = Await.result(fut, 2 seconds) assertEquals(Nil, res._1) @@ -75,13 +76,14 @@ package scala.async.run.toughtype { var ss = s ss = s await(x) + case x => throw new MatchError(x) } }) assertEquals(0, m7(Nil)) } @Test def existentialBind2Issue19(): Unit = { - import scala.tools.partest.async.Async._, scala.concurrent.ExecutionContext.Implicits.global + import scala.tools.testkit.async.Async._, scala.concurrent.ExecutionContext.Implicits.global def conjure[T]: T = null.asInstanceOf[T] def m3 = async { @@ -113,6 +115,7 @@ package scala.async.run.toughtype { val foo = await(5) val e0 = buf(0) ContainerImpl(e0) + case x => throw new MatchError(x) } }) foo @@ -152,7 +155,7 @@ package scala.async.run.toughtype { import language.{reflectiveCalls, postfixOps} import scala.concurrent.{Future, ExecutionContext, Await} import scala.concurrent.duration._ - import scala.tools.partest.async.Async.{async, await} + import scala.tools.testkit.async.Async.{async, await} class Foo[A] @@ -183,7 +186,7 @@ package scala.async.run.toughtype { @Test def ticket63(): Unit = { - import scala.tools.partest.async.Async._ + import scala.tools.testkit.async.Async._ import scala.concurrent.{ ExecutionContext, Future } object SomeExecutionContext extends ExecutionContext { @@ -196,7 +199,7 @@ package scala.async.run.toughtype { } object FunDep { - implicit def `Something to do with List`[W, S, R](implicit funDep: FunDep[W, S, R]) = + implicit def `Something to do with List`[W, S, R](implicit funDep: FunDep[W, S, R]): FunDep[W,List[S],W] = new FunDep[W, List[S], W] { def method(w: W, l: List[S]) = async { val it = l.iterator @@ -226,7 +229,7 @@ package scala.async.run.toughtype { } @Test def ticket83ValueClass(): Unit = { - import scala.tools.partest.async.Async._ + import scala.tools.testkit.async.Async._ import scala.concurrent._, duration._, ExecutionContext.Implicits.global val f = async { val uid = new IntWrapper("foo") diff --git a/test/async/neg/ill-nested-await.check b/test/async/neg/ill-nested-await.check index 5be43d6d7f44..e04df598a775 100644 --- a/test/async/neg/ill-nested-await.check +++ b/test/async/neg/ill-nested-await.check @@ -1,46 +1,46 @@ -ill-nested-await.scala:16: error: await must not be used under a nested method. +ill-nested-await.scala:17: error: await must not be used under a nested method. async { foo(0)(await(f(0))) } ^ -ill-nested-await.scala:21: error: await must not be used under a nested object. +ill-nested-await.scala:22: error: await must not be used under a nested object. async { object Nested { await(f(false)) } } ^ -ill-nested-await.scala:26: error: await must not be used under a nested trait. +ill-nested-await.scala:27: error: await must not be used under a nested trait. async { trait Nested { await(f(false)) } } ^ -ill-nested-await.scala:31: error: await must not be used under a nested class. +ill-nested-await.scala:32: error: await must not be used under a nested class. async { class Nested { await(f(false)) } } ^ -ill-nested-await.scala:36: error: await must not be used under a nested method. +ill-nested-await.scala:37: error: await must not be used under a nested method. async { () => { await(f(false)) } } ^ -ill-nested-await.scala:41: error: await must not be used under a nested function. +ill-nested-await.scala:42: error: await must not be used under a nested function. async { { case 0 => { await(f(false)) } } : PartialFunction[Int, Boolean] } ^ -ill-nested-await.scala:46: error: await must not be used under a try/catch. +ill-nested-await.scala:47: error: await must not be used under a try/catch. async { try { await(f(false)) } catch { case _: Throwable => } } ^ -ill-nested-await.scala:51: error: await must not be used under a try/catch. +ill-nested-await.scala:52: error: await must not be used under a try/catch. async { try { () } catch { case _: Throwable => await(f(false)) } } ^ -ill-nested-await.scala:56: error: await must not be used under a try/catch. +ill-nested-await.scala:57: error: await must not be used under a try/catch. async { try { () } finally { await(f(false)) } } ^ -ill-nested-await.scala:61: error: await must not be used under a nested method. +ill-nested-await.scala:62: error: await must not be used under a nested method. async { def foo = await(f(false)) } ^ -ill-nested-await.scala:70: error: await must not be used under a lazy val initializer. +ill-nested-await.scala:71: error: await must not be used under a lazy val initializer. def foo(): Any = async { val x = { lazy val y = await(f(0)); y } } ^ -ill-nested-await.scala:76: error: await must not be used under a nested method. +ill-nested-await.scala:77: error: await must not be used under a nested method. async { fooAsByNameLambda(await(f(""))) } ^ -ill-nested-await.scala:81: error: await must not be used under a synchronized call. +ill-nested-await.scala:82: error: await must not be used under a synchronized call. async { lock.synchronized { await(f(1)) + await(f(2)) } } ^ -ill-nested-await.scala:81: error: await must not be used under a synchronized call. +ill-nested-await.scala:82: error: await must not be used under a synchronized call. async { lock.synchronized { await(f(1)) + await(f(2)) } } ^ -ill-nested-await.scala:10: error: `await` must be enclosed in an `async` block +ill-nested-await.scala:11: error: `await` must be enclosed in an `async` block await[Any](f(null)) ^ -15 errors found +15 errors diff --git a/test/async/neg/ill-nested-await.scala b/test/async/neg/ill-nested-await.scala index 00c50b9d4be7..aacee1e7ac9d 100644 --- a/test/async/neg/ill-nested-await.scala +++ b/test/async/neg/ill-nested-await.scala @@ -1,7 +1,8 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.concurrent._ import ExecutionContext.Implicits.global -import scala.tools.partest.async.Async._ +import scala.tools.testkit.async.Async._ import Future.{successful => f} diff --git a/test/async/neg/naked_await.check b/test/async/neg/naked_await.check index ba1250f4d64b..e4aa25b92eb9 100644 --- a/test/async/neg/naked_await.check +++ b/test/async/neg/naked_await.check @@ -1,7 +1,7 @@ -naked_await.scala:9: error: await must not be used under a nested method. +naked_await.scala:10: error: await must not be used under a nested method. def foo = await(Future(3)) ^ -naked_await.scala:11: error: `await` must be enclosed in an `async` block +naked_await.scala:12: error: `await` must be enclosed in an `async` block await(Future(4)) ^ -two errors found +2 errors diff --git a/test/async/neg/naked_await.scala b/test/async/neg/naked_await.scala index 73cb96f6cadf..13333d11615f 100644 --- a/test/async/neg/naked_await.scala +++ b/test/async/neg/naked_await.scala @@ -1,5 +1,6 @@ -// scalac: -Xasync -import scala.tools.partest.async.Async._ +//> using options -Xasync + +import scala.tools.testkit.async.Async._ import scala.concurrent.{ExecutionContext, Future} object Test { diff --git a/test/async/neg/stark_naked_await.check b/test/async/neg/stark_naked_await.check index b79a4131923d..1f6f4c7dc9cf 100644 --- a/test/async/neg/stark_naked_await.check +++ b/test/async/neg/stark_naked_await.check @@ -1,4 +1,4 @@ -stark_naked_await.scala:7: error: `await` must be enclosed in an `async` block +stark_naked_await.scala:8: error: `await` must be enclosed in an `async` block await(Future(4)) ^ -one error found +1 error diff --git a/test/async/neg/stark_naked_await.scala b/test/async/neg/stark_naked_await.scala index dc184bf8e637..11970557b7c2 100644 --- a/test/async/neg/stark_naked_await.scala +++ b/test/async/neg/stark_naked_await.scala @@ -1,5 +1,6 @@ -// scalac: -Xasync -import scala.tools.partest.async.Async._ +//> using options -Xasync + +import scala.tools.testkit.async.Async._ import scala.concurrent.{ExecutionContext, Future} object Test { diff --git a/test/async/run/booleans.scala b/test/async/run/booleans.scala index 2f29c0a0ac2b..f5c74607f59b 100644 --- a/test/async/run/booleans.scala +++ b/test/async/run/booleans.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.OptionAwait._ import org.junit.Assert._ diff --git a/test/async/run/edge-cases.scala b/test/async/run/edge-cases.scala index 21ce019c29cf..ab244c239a62 100644 --- a/test/async/run/edge-cases.scala +++ b/test/async/run/edge-cases.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.OptionAwait._ import org.junit.Assert._ @@ -33,6 +34,7 @@ object Test { value(Some(1)) "foo" match { case x if "".isEmpty => x + case x => throw new MatchError(x) } }: AnyRef }) diff --git a/test/async/run/lambda.scala b/test/async/run/lambda.scala index e5bc312f6ea0..455efc8af908 100644 --- a/test/async/run/lambda.scala +++ b/test/async/run/lambda.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.OptionAwait._ //import org.junit.Assert._ diff --git a/test/async/run/output.scala b/test/async/run/output.scala index 0984f0f1cd34..a61332f768eb 100644 --- a/test/async/run/output.scala +++ b/test/async/run/output.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.{OutputAwait, Output} import scala.collection.immutable import OutputAwait._ diff --git a/test/async/run/smoketest.scala b/test/async/run/smoketest.scala index 563eb54c6408..f31c46445603 100644 --- a/test/async/run/smoketest.scala +++ b/test/async/run/smoketest.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.OptionAwait._ import org.junit.Assert._ @@ -66,4 +67,4 @@ object Test { } }) } -} +} \ No newline at end of file diff --git a/test/async/run/string-switch-async.scala b/test/async/run/string-switch-async.scala new file mode 100644 index 000000000000..f907e0b5a34f --- /dev/null +++ b/test/async/run/string-switch-async.scala @@ -0,0 +1,19 @@ +//> using options -Xasync + +import scala.tools.partest.async.OptionAwait._ +import org.junit.Assert._ + +object Test { + def main(args: Array[String]): Unit = { + assertEquals(Some(""), testSwitch("")) + assertEquals(Some("aa"), testSwitch("a")) + } + + private def testSwitch(s: String) = optionally { + s match { + case "" => "" + case p => + value(Some(p)) + p + } + } +} diff --git a/test/async/run/string-switch-bug.scala b/test/async/run/string-switch-bug.scala new file mode 100644 index 000000000000..28fb8889c504 --- /dev/null +++ b/test/async/run/string-switch-bug.scala @@ -0,0 +1,29 @@ +//> using options -Xasync +import scala.tools.partest.async.OptionAwait._ +import org.junit.Assert._ + +// Scala.js compatible test suite for -Xasync that doesn't use Scala futures +object Test { + def main(args: Array[String]): Unit = { + stringSwitchBug() + } + + private def stringSwitchBug() = { + assertEquals(Some(true), optionally { + val x: String = "" + val as = List("x") + val it = as.iterator + var okay = false + while (it.hasNext) { + val x = it.next() + val res = (x match { + case "x" => + okay = value(Some(1)) == 1 + () + case _ => () + }) + } + okay + }) + } +} diff --git a/test/async/run/switch-await-in-guard.scala b/test/async/run/switch-await-in-guard.scala index f4797e03d11b..28678792054f 100644 --- a/test/async/run/switch-await-in-guard.scala +++ b/test/async/run/switch-await-in-guard.scala @@ -1,4 +1,4 @@ -// scalac: -Xasync +//> using options -Xasync import scala.tools.partest.async.OptionAwait._ import org.junit.Assert._ diff --git a/test/async/run/t12723.scala b/test/async/run/t12723.scala new file mode 100644 index 000000000000..0c8e0796999d --- /dev/null +++ b/test/async/run/t12723.scala @@ -0,0 +1,13 @@ +//> using options -Xasync -Werror -Wnonunit-statement + +import scala.tools.partest.async.OptionAwait._ +import org.junit.Assert._ + +object Test { + def main(args: Array[String]): Unit = { + val r = optionally { + value(Some(true)) + } + assert(r.get) + } +} diff --git a/test/async/run/value-class.scala b/test/async/run/value-class.scala index 454bee15aaa6..0fd3d81f8b5a 100644 --- a/test/async/run/value-class.scala +++ b/test/async/run/value-class.scala @@ -1,4 +1,5 @@ -// scalac: -Xasync +//> using options -Xasync + import scala.tools.partest.async.OptionAwait._ import org.junit.Assert._ diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 1c3cbee79f9d..71d0462889d4 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -1,52 +1,64 @@ # Scala library benchmarks This directory is used by the `bench` subproject of the Scala sbt build. -It makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/). +It makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](https://openjdk.java.net/projects/code-tools/jmh/). -## Running a benchmark +## About the benchmarks -Benchmarks are built with the bootstrap compiler ("starr") using the library built from the `library` project ("quick"). -If you want to test compiler changes you need to bootstrap with the new compiler. +Benchmarks are built with the reference compiler ("starr") using the library built from the `library` project ("quick"). +If you want to test compiler changes you need to bootstrap a new compiler. -You'll then need to know the fully-qualified name of the benchmark runner class. -The benchmarking classes are organized under `src/main/scala`, +The benchmarking classes are organized under `test/benchmarks/src/main/scala`, in the same package hierarchy as the classes that they test. -Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, -the benchmark runner would likely be named `scala.collection.mutable.OpenHashMapRunner`. -Using this example, one would simply run - bench/jmh:runMain scala.collection.mutable.OpenHashMapRunner +The benchmarking classes use the same package hierarchy as the classes that they test +in order to make it easy to expose members of the class under test in package-private scope, +should that be necessary for benchmarking. -in the Scala sbt build. +There are two types of classes in the source directory: +those suffixed `Benchmark`, and a few that are suffixed `Runner`. +(The latter are described below, under "Custom runners".) -The JMH results can be found under `../../target/jmh-results/` (i.e. the main Scala build's `target`, -not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, -so you should copy these files out of `target` if you wish to preserve them. +## Running a normal benchmark -## Creating a benchmark and runner +Use `bench/Jmh/run` and provide the fully qualified name of the benchmark +class: -The benchmarking classes use the same package hierarchy as the classes that they test -in order to make it easy to expose, in package scope, members of the class under test, -should that be necessary for benchmarking. + bench/Jmh/run scala.collection.mutable.ListBufferBenchmark -There are two types of classes in the source directory: -those suffixed `Benchmark` and those suffixed `Runner`. -The former are benchmarks that can be run directly using `bench/jmh:run`; -however, they are normally run from a corresponding class of the latter type, -which is run using `bench/jmh:runMain` (as described above). -This …`Runner` class is useful for setting appropriate JMH command options, +Results are printed to standard output. + +## Custom runners + +Some benchmarks have custom runners. A custom runner +can be useful for setting appropriate JMH command options, and for processing the JMH results into files that can be read by other tools, such as Gnuplot. -The `benchmark.JmhRunner` trait should be woven into any runner class, for the standard behavior that it provides. +Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, +the custom runner (if there is one) would likely be named +`scala.collection.mutable.OpenHashMapRunner`. +Using this example, one would run + + bench/Jmh/runMain scala.collection.mutable.OpenHashMapRunner + +in the Scala sbt build. + +Custom runner results are written to `../../target/jmh-results/` (i.e. the main Scala build's `target`, +not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, +so you should copy these files out of `target` if you wish to preserve them. + +If you want to make your own custom runner, extend the `benchmark.JmhRunner` trait, for the standard behavior that it provides. This includes creating output files in a subdirectory of `target/jmh-results` derived from the fully-qualified package name of the `Runner` class. ## Some useful HotSpot options -Adding these to the `jmh:run` or `jmh:runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. + +Adding these to the `Jmh/run` or `Jmh/runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. They require prefixing with `-jvmArgs`. -See [the Java documentation](http://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. +See [the Java documentation](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. ### Viewing JIT compilation events + Adding `-XX:+PrintCompilation` shows when Java methods are being compiled or deoptimized. At the most basic level, these messages will tell you whether the code that you're measuring is still being tuned, @@ -54,16 +66,20 @@ so that you know whether you're running enough warm-up iterations. See [Kris Mok's notes](https://gist.github.com/rednaxelafx/1165804#file-notes-md) to interpret the output in detail. ### Consider GC events + If you're not explicitly performing `System.gc()` calls outside of your benchmarking code, you should add the JVM option `-verbose:gc` to understand the effect that GCs may be having on your tests. ### "Diagnostic" options + These require the `-XX:+UnlockDiagnosticVMOptions` JVM option. #### Viewing inlining events + Add `-XX:+PrintInlining`. #### Viewing the disassembled code + If you're running OpenJDK or Oracle JVM, you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). In Debian, this is available in @@ -84,16 +100,16 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. ### Using JITWatch -[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT compiled +[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT-compiled code. If you install `hsdis`, as described above, machine code disassembly is also created. You can generate the `hotspot.log` file for a benchmark run by adding the [required JVM options](https://github.com/AdoptOpenJDK/jitwatch/wiki/Building-hsdis) -to JMH benchmark execution: +to JMH benchmark execution: ``` -sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly +sbt:root> bench/Jmh/run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly ... [info] Loaded disassembler from /Users/jz/.jabba/jdk/1.8.172/Contents/Home/jre/lib/hsdis-amd64.dylib [info] Decoding compiled method 0x0000000113f60bd0: @@ -114,8 +130,8 @@ sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger JITWatch requires configuration of the class and source path. We generate that with a custom task in our build: ``` -sbt> bench/jmh:jitwatchConfigFile -[info] Resolving jline#jline;2.14.6 ... +sbt> bench/Jmh/jitwatchConfigFile +... jmh ... [info] ^-- UNRESOLVED DEPENDENCIES warnings above are normal, please ignore @@ -125,27 +141,13 @@ jmh sbt> ^C ``` -Build jitwatch. - -``` -$ git clone https://github.com/AdoptOpenJDK/jitwatch -$ cd jitwatch -$ mvn install -``` - -Launch with the generated config file. -``` -$ ./launchUI.sh -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/target/jitwatch-jmh.properties -``` - - - -Select the generated `hotspot.log`, `start`, and then browse the benchmark to start gleaning insights! +Follow instructions in the output above and start gleaning insights! ## Useful reading + * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) * Brian Goetz's "Java theory and practice" articles: - * "[Dynamic compilation and performance measurement](http://www.ibm.com/developerworks/java/library/j-jtp12214/)" - * "[Anatomy of a flawed benchmark](http://www.ibm.com/developerworks/java/library/j-jtp02225/)" + * "[Dynamic compilation and performance measurement](https://www.ibm.com/developerworks/java/library/j-jtp12214/)" + * "[Anatomy of a flawed benchmark](https://www.ibm.com/developerworks/java/library/j-jtp02225/)" * [Doug Lea's JSR 166 benchmarks](http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/loops/) -* "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections +* "[Measuring performance](https://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections diff --git a/test/benchmarks/src/main/scala-2.13/scala/scala/collection/StringOpsBenchmark.scala b/test/benchmarks/src/main/scala-2.13/scala/scala/collection/StringOpsBenchmark.scala new file mode 100644 index 000000000000..644d3a1614f4 --- /dev/null +++ b/test/benchmarks/src/main/scala-2.13/scala/scala/collection/StringOpsBenchmark.scala @@ -0,0 +1,143 @@ +package scala.collection + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ + +import scala.util.Random + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class StringOpsBenchmark { + @Param(Array("0", "1", "10", "100", "1000")) + var size: Int = _ + + var prefix: String = _ + var suffix: String = _ + + // Used when we want to test stripPrefix etc with a string which is definitely not present + var impossibleString: String = _ + + var testObject: StringOps = _ + + @Setup(Level.Trial) def initKeys(): Unit = { + val randomString = Random.nextString(size) + + testObject = new StringOps(randomString) + + val prefixAndSuffixLength = size / 10 + prefix = randomString.substring(0, prefixAndSuffixLength) + suffix = randomString.reverse.substring(0, prefixAndSuffixLength) + + impossibleString = randomString * 2 + } + + @Benchmark def map: Any = { + testObject.map(char => char) + } + + @Benchmark def flatMap: Any = { + testObject.flatMap(char => char.toString) + } + + @Benchmark def concat: Any = { + testObject.concat(suffix) + } + + @Benchmark def padTo: Any = { + testObject.padTo(size * 2, 'x') + } + + @Benchmark def prepended: Any = { + testObject.prepended('x') + } + + @Benchmark def appended: Any = { + testObject.appended('x') + } + + @Benchmark def patch_zeroElementsfromZero: Any = { + testObject.patch(0, suffix, 0) + } + + @Benchmark def patch_fifteenElementsfromZero: Any = { + testObject.patch(0, suffix, 15) + } + + @Benchmark def patch_fiftyElementsfromZero: Any = { + testObject.patch(0, suffix, 50) + } + + @Benchmark def patch_zeroElementsfromFifteen: Any = { + testObject.patch(15, suffix, 0) + } + + @Benchmark def patch_fifteenElementsfromFifty: Any = { + testObject.patch(50, suffix, 15) + } + + @Benchmark def patch_fiftyElementsfromFifty: Any = { + testObject.patch(50, suffix, 50) + } + + @Benchmark def updated_atZero: Any = { + testObject.updated(0, 'x') + } + + @Benchmark def slice: Any = { + testObject.slice(size / 10, size / 2) + } + + @Benchmark def *(): Any = { + testObject * 10 + } + + @Benchmark def stripLineEnd: Any = { + testObject.stripLineEnd + } + + @Benchmark def linesWithSeparators: Any = { + testObject.linesWithSeparators + } + + @Benchmark def lines: Any = { + testObject.linesIterator + } + + @Benchmark def capitalize: Any = { + testObject.capitalize + } + + @Benchmark def stripPrefix_present: Any = { + testObject.stripPrefix(prefix) + } + + @Benchmark def stripPrefix_notPresent: Any = { + testObject.stripPrefix(impossibleString) + } + + @Benchmark def stripSuffix_present: Any = { + testObject.stripSuffix(suffix) + } + + @Benchmark def stripSuffix_notPresent: Any = { + testObject.stripSuffix(impossibleString) + } + + @Benchmark def replaceAllLiterally: Any = { + testObject.replaceAllLiterally("A", "B") + } + + @Benchmark def stripMargin: Any = { + testObject.stripMargin + } + + @Benchmark def split: Any = { + testObject.split('A') + } +} diff --git a/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala similarity index 92% rename from test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala rename to test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala index 23e303ede0d6..e173c917fc33 100644 --- a/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala @@ -18,7 +18,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withIntegerBitCount(bh: Blackhole) { + @Benchmark def withIntegerBitCount(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withIntegerBitCount(v) // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -30,7 +30,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withIntegerNumberOfLeadingZeros(bh: Blackhole) { + @Benchmark def withIntegerNumberOfLeadingZeros(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withIntegerNumberOfLeadingZeros(v) // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -42,7 +42,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withLoop(bh: Blackhole) { + @Benchmark def withLoop(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withLoop(v) bh.consume(leadingZeros) @@ -61,7 +61,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withMatch(bh: Blackhole) { + @Benchmark def withMatch(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withMatch(v) // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -106,7 +106,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def with2DeBruijn(bh: Blackhole) { + @Benchmark def with2DeBruijn(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = with2DeBruijn(v) // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -122,7 +122,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withBinSearch(bh: Blackhole) { + @Benchmark def withBinSearch(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withBinSearch(v) // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -150,7 +150,7 @@ class BitManipulationBenchmark { ////////////////////////////////////////////// - @Benchmark def withSumBinSearch(bh: Blackhole) { + @Benchmark def withSumBinSearch(bh: Blackhole): Unit = { for (v <- powersOfTwo) { val leadingZeros = withSumBinSearch(v) // assert(leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)") @@ -167,4 +167,4 @@ class BitManipulationBenchmark { if (remaining >= 4) { remaining >>>= 2; exponent -= 2 } if (remaining >= 2) exponent - 1 else exponent } -} \ No newline at end of file +} diff --git a/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala index 6f49a94c25c8..04fe0574b020 100644 --- a/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala @@ -15,7 +15,7 @@ class DistinctBenchmark { @Param(Array("0", "1", "2", "5", "10", "20", "50", "100", "1000")) var size: Int = _ - @Param(Array("List", "Vector")) + @Param(Array("List", "Vector", "ListBuffer")) var collectionType: String = _ var distinctDataSet: Seq[String] = null @@ -33,9 +33,10 @@ class DistinctBenchmark { b2 += i.toString } - val adjustCollectionType = collectionType match { + val adjustCollectionType: (Seq[String] => Seq[String]) = collectionType match { case "List" => (col: Seq[String]) => col.toList case "Vector" => (col: Seq[String]) => col.toVector + case "ListBuffer" => (col: Seq[String]) => mutable.ListBuffer.from(col) } distinctDataSet = adjustCollectionType(b1.result()) diff --git a/test/benchmarks/src/main/scala/scala/collection/GroupByBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/GroupByBenchmark.scala deleted file mode 100644 index b317915ccec0..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/GroupByBenchmark.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - - -import java.util.concurrent.TimeUnit -import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Level, Measurement, Mode, OutputTimeUnit, Param, Scope, Setup, State, Threads, Warmup} -import org.openjdk.jmh.infra.Blackhole -import scala.collection.mutable.ArrayBuffer - - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class GroupByBenchmark { - @Param(Array("128", "512", "2048", "8192")) - var size : Int = _ - - @Param(Array("0", "32")) - var hashCodeCost: Int = _ - - @Param(Array("8", "2147483647")) - var maxNumGroups: Int = _ - - private case class Key(a: Int) { - override def hashCode(): Int = { - Blackhole.consumeCPU(hashCodeCost) - Integer.hashCode(a) - } - } - - private case class Groupable(a: Int) { - val key = new Key(a % maxNumGroups) - } - - private var groupables: ArrayBuffer[Groupable] = _ - - private class GroupByWrapper[A](as: collection.Iterable[A]) extends collection.Iterable[A] { - override def iterator: Iterator[A] = as.iterator - override protected def newBuilder = new mutable.Builder[A, Iterable[A]] { - override def clear(): Unit = () - override def result(): Iterable[A] = Nil - override def +=(elem: A): this.type = this - } - } - - @Setup(Level.Trial) def setup(): Unit = { - groupables = ArrayBuffer.tabulate(size)(Groupable(_)) - } - - @Benchmark def buildArrayBuffer(): AnyRef = { - groupBy(groupables) - } - @Benchmark def buildNil(): AnyRef = { - groupBy(new GroupByWrapper[Groupable](groupables)) - } - - private def groupBy[B](as: collection.Iterable[Groupable]) = as.groupBy(_.key) -} diff --git a/test/benchmarks/src/main/scala/scala/collection/GroupMapBenchmarks.scala b/test/benchmarks/src/main/scala/scala/collection/GroupMapBenchmarks.scala new file mode 100755 index 000000000000..977946c991eb --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/GroupMapBenchmarks.scala @@ -0,0 +1,182 @@ +package scala.collection + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class GroupMapBenchmark { + + type CC[A] = immutable.List[A] + val factory = immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMap(bh: Blackhole): Unit = + xs.groupMap(_ % size)(_ * 2).foreach(bh.consume) + +} + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class GroupMapValuesBenchmark { + + type CC[A] = immutable.List[A] + val factory = immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMap(bh: Blackhole): Unit = + xs.groupBy(_ % size).mapValues(_.map(_ * 2)).foreach(bh.consume) + +} + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class ScalaGroupMapValuesBenchmark { + + type CC[A] = scala.collection.immutable.List[A] + val factory = scala.collection.immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMap(bh: Blackhole): Unit = + xs.groupBy(_ % size).mapValues(_.map(_ * 2)).foreach(bh.consume) + +} + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class GroupMapReduceBenchmark { + + type CC[A] = immutable.List[A] + val factory = immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMapReduce(bh: Blackhole): Unit = + xs.groupMapReduce(_ % size)(_ * 2)(_ + _).foreach(bh.consume) + +} + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class GroupMapValuesReduceBenchmark { + + type CC[A] = immutable.List[A] + val factory = immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMapReduce(bh: Blackhole): Unit = + xs.groupBy(_ % size).mapValues(_.map(_ * 2).reduce(_ + _)).foreach(bh.consume) + +} + + +@BenchmarkMode(scala.Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@Fork(1) +@Warmup(iterations = 8) +@Measurement(iterations = 8) +@State(Scope.Benchmark) +class ScalaGroupMapValuesReduceBenchmark { + + type CC[A] = scala.collection.immutable.List[A] + val factory = scala.collection.immutable.List + + @Param(scala.Array("2", "3", "5", "16", "17", "32", "33", "128", "129")) + var size: Int = _ + + var xs: CC[Long] = _ + + def fresh(n: Int) = factory((1 to n).map(_.toLong): _*) + + @Setup(Level.Trial) + def initTrial(): Unit = { + xs = fresh(10000) + } + + @Benchmark + def groupMapReduce(bh: Blackhole): Unit = + xs.groupBy(_ % size).mapValues(_.map(_ * 2).reduce(_ + _)).foreach(bh.consume) + +} diff --git a/test/benchmarks/src/main/scala/scala/collection/SizeCompareOpsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/SizeCompareOpsBenchmark.scala new file mode 100644 index 000000000000..3a2bd1d3e420 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/SizeCompareOpsBenchmark.scala @@ -0,0 +1,37 @@ +package scala.collection + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ + +import scala.util.Random + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class SizeCompareOpsBenchmark { + @Param(Array("0", "1", "10", "100", "1000")) + var size: Int = _ + + @Param(Array("1", "100", "10000")) + var cmpTo: Int = _ + + var values: List[Int] = _ + + + @Setup(Level.Trial) def initKeys(): Unit = { + values = List.fill(size)(Random.nextInt()) + } + + @Benchmark def sizeCompareUgly: Any = { + values.sizeCompare(cmpTo) == 0 + } + + @Benchmark def sizeComparePretty: Any = { + values.sizeIs == cmpTo + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala new file mode 100644 index 000000000000..62755b48439d --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala @@ -0,0 +1,100 @@ +package scala.collection.immutable + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.reflect.ClassTag + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ArraySeqBenchmark { + + @Param(Array("0", "1", "10", "100", "1000", "10000")) + var size: Int = _ + var integersS: ArraySeq[Int] = _ + var stringsS: ArraySeq[String] = _ + var newS: Array[String] = _ + + @Setup(Level.Trial) def initNumbers: Unit = { + val integers = (1 to size).toList + val strings = integers.map(_.toString) + integersS = ArraySeq.unsafeWrapArray(integers.toArray) + stringsS = ArraySeq.unsafeWrapArray(strings.toArray) + newS = Array("a", "b", "c", "d", "e", "f") + } + + @Benchmark def sortedStringOld(bh: Blackhole): Unit = + bh.consume(oldSorted(stringsS)) + + @Benchmark def sortedIntOld(bh: Blackhole): Unit = + bh.consume(oldSorted(integersS)) + + @Benchmark def sortedIntCustomOld(bh: Blackhole): Unit = + bh.consume(oldSorted(integersS)(Ordering.Int.reverse, implicitly)) + + @Benchmark def sortedStringNew(bh: Blackhole): Unit = + bh.consume(stringsS.sorted) + + @Benchmark def sortedIntNew(bh: Blackhole): Unit = + bh.consume(integersS.sorted) + + @Benchmark def sortedIntCustomNew(bh: Blackhole): Unit = + bh.consume(integersS.sorted(Ordering.Int.reverse)) + + private[this] def oldSorted[A](seq: ArraySeq[A])(implicit ord: Ordering[A], tag: ClassTag[A]): ArraySeq[A] = { + val len = seq.length + val b = ArraySeq.newBuilder[A](tag) + if (len == 1) b ++= seq + else if (len > 1) { + b.sizeHint(len) + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- seq) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + b += arr(i).asInstanceOf[A] + i += 1 + } + } + b.result() + } + + // newS is used to avoid allocating Strings, while still performing some sort of "mapping". + + @Benchmark def mapSOld(): ArraySeq[AnyRef] = + oldMap(stringsS)(x => newS(x.length)) + + @Benchmark def mapSNew(): ArraySeq[AnyRef] = + stringsS.map(x => newS(x.length)) + + // Mapping an ArraySeq.ofInt results in an ArraySeq.ofRef containing java.lang.Integers. + // Boxing small integers doesn't result in allocations thus the choice of _ & 0xf as the mapping function. + + @Benchmark def mapIOld(): ArraySeq[Int] = + oldMap(integersS)(_ & 0xf) + + @Benchmark def mapINew(): ArraySeq[Int] = + integersS.map(_ & 0xf) + + private def oldMap[A, B](seq: ArraySeq[A])(f: A => B): ArraySeq[B] = + seq.iterableFactory.tabulate(seq.length)(i => f(seq.apply(i))) + + @Benchmark def `min-max is reduction`(bh: Blackhole): Unit = bh.consume { + integersS.max + } + + @Benchmark def sliding(bh: Blackhole): Any = { + var coll = stringsS + coll.sliding(2).foreach(bh.consume) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/BitSetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/BitSetBenchmark.scala new file mode 100644 index 000000000000..34b091574a6d --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/BitSetBenchmark.scala @@ -0,0 +1,36 @@ +package scala.collection.immutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.util.Random + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 6) +@Measurement(iterations = 6) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BitSetBenchmark { + @Param(Array("1", "1000", "10000000")) + var sizeLeft: Int = _ + + var percentageFull: Double = 0.3 + + var bitSet: BitSet = _ + var arg: BitSet = _ + + @Setup(Level.Iteration) def initNumbers: Unit = { + bitSet = (0 to sizeLeft).filter(_ => Random.nextDouble() <= percentageFull).to(BitSet) + } + + @Benchmark + def filter(bh: Blackhole): Unit = { + (1 to 10) foreach { _ => + bh.consume(bitSet.filter(_ % 2 == 0)) + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/GenerateVectorBenchmark2Charts.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/GenerateVectorBenchmark2Charts.scala new file mode 100644 index 000000000000..46c9b3371d46 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/GenerateVectorBenchmark2Charts.scala @@ -0,0 +1,140 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import java.io.{BufferedWriter, File, FileWriter, PrintWriter} +import java.nio.file.Files +import java.text.DecimalFormat + +import scala.jdk.CollectionConverters._ + +/** + * Generate line charts for Vector benchmarks. + * + * Run benchmark and collect raw data: + * bench/jmh:run -rff vector-bench.csv -jvmArgs "-Xms256M -Xmx1G" scala.collection.immutable.VectorBenchmark2 + * + * Generate diagram data: + * bench/runMain scala.collection.immutable.GenerateVectorBenchmark2Charts test/benchmarks/vector-bench.csv test/benchmarks/vector-bench-data.js + */ +object GenerateVectorBenchmark2Charts extends App { + + case class Result(name: String, score: Double, error: Double, size: Int) + + def load(path: String): Map[String, IndexedSeq[Result]] = { + val f = new File(path) + if(f.getName.endsWith("*")) { + val dir = f.getParentFile + val prefix = f.getName.init + val files = dir.listFiles().toSeq.filter(_.getName.startsWith(prefix)) + files.foldLeft(Map.empty[String, IndexedSeq[Result]])({ (m, f) => m ++ load(f) }) + } else load(f) + } + + def load(file: File): Map[String, IndexedSeq[Result]] = { + println(s"Loading $file...") + Files.readAllLines(file.toPath).asScala.drop(1).map { s => + val a = s.split(',') + def unquote(s: String): String = s.substring(1, s.length-1) + def local(s: String): String = { + val i = s.lastIndexOf('.') + if(i < 0) s else s.substring(i+1) + } + Result(local(unquote(a(0))), a(4).toDouble, a(5).toDouble, a(7).toInt) + }.toIndexedSeq.groupBy(_.name) + } + + val data = args.toSeq.init.foldLeft(Map.empty[String, IndexedSeq[Result]])({ (m, s) => m ++ load(s) }) + val fmt3 = new DecimalFormat("###,###.###") + + def fmtTime(ns: Double, by: Double, withUnit: Boolean): String = { + val (s, u) = + if(by >= 1000000000d) (fmt3.format(ns/1000000000d), "s") + else if(by >= 1000000d) (fmt3.format(ns/1000000d), "ms") + else if(by >= 1000d) (fmt3.format(ns/1000d), "μs") + else (fmt3.format(ns), "ns") + if(withUnit) s"$s $u" + else s + } + + def fmtSize(i: Int): String = { + if(i >= 1000000000) s"${i/1000000000}B" + else if(i >= 1000000) s"${i/1000000}M" + else if(i >= 1000) s"${i/1000}K" + else s"$i" + } + + def printChartData(out: PrintWriter, name: String, rss: IndexedSeq[IndexedSeq[Result]], seriesNames: IndexedSeq[String]): Unit = { + println(s"""drawChart(new ChartData("$name", benchmarkData.$name));""") + val sizes = rss.flatten.map(_.size).toSet.toIndexedSeq.sorted + val bySize = rss.map(_.iterator.map(r => (r.size, r)).toMap) + val benchmarkNames = rss.map(_.head.name) + + val minScore = rss.flatten.map(_.score).min + val maxScore = rss.flatten.map(_.score).max + var timeFactor = + if(minScore > 1000000d) 1000000L + else if(minScore > 1000d) 1000L + else 1L + val timeUnit = timeFactor match { + case 1L => "ns" + case 1000L => "μs" + case 1000000L => "ms" + } + + out.println(s" $name: {") + out.println(s" rows: [") + var first = true + sizes.foreach { size => + if(!first) out.println(",") + else first = false + val sizeStr = fmtSize(size) + val forSize = bySize.map(_.get(size)) + val minScore = forSize.map(_.map(_.score)).flatten.min + val line = forSize.zipWithIndex.map { case (ro, i) => ro.map { r => + Seq( + r.score/timeFactor, + (r.score-r.error)/timeFactor, + (r.score+r.error)/timeFactor, + "\"Size: " + sizeStr + "
    " + seriesNames(i) + ": " + fmtTime(r.score, minScore, true) + " ± " + fmtTime(r.error, minScore, false) + "\"" + ) + }.getOrElse(Seq(null, null, null, null)) }.flatten + out.print(s" [$size, ${line.mkString(", ")}]") + } + out.println() + out.println(" ],") + out.println(" names: [" + benchmarkNames.map(s => "\""+s+"\"").mkString(", ") + "],") + out.println(" timeUnit: \""+timeUnit+"\"") + out.print(" }") + } + + val baseNames = data.keySet.filter(_.startsWith("nv")).map(_.drop(2)).toSeq.sorted + val comparisons = baseNames.map { s => + data.get("v"+s).map(v => (s, data("nv"+s), v)) + }.flatten + + val out = new PrintWriter(new BufferedWriter(new FileWriter(args(args.length-1)))) + out.println("var benchmarkData = {") + + var first = true + val seriesNames = IndexedSeq("Old Vector", "New Vector") + for((baseName, nvRes, vRes) <- comparisons) { + if(!first) out.println(",") + else first = false + printChartData(out, baseName, IndexedSeq(vRes, nvRes), seriesNames) + } + + out.println() + out.println("};") + out.close() +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala index 676f4cfc110f..c66079e5a157 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala @@ -2,6 +2,8 @@ package scala.collection.immutable import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ import java.util.concurrent.TimeUnit @BenchmarkMode(Array(Mode.AverageTime)) @@ -14,48 +16,78 @@ import java.util.concurrent.TimeUnit class HashMapBenchmark { @Param(Array("10", "100", "1000")) var size: Int = _ + @Param(Array("true")) + var useMissingValues = true + @Param(Array("false")) + var stringsOnly = false var existingKeys: Array[Any] = _ var missingKeys: Array[Any] = _ @Setup(Level.Trial) def initKeys(): Unit = { existingKeys = (0 to size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString case 0 => i.toString case 1 => i.toChar case 2 => i.toDouble case 3 => i.toInt }).toArray - missingKeys = (size to 2 * size).toArray + missingKeys = (size to 2 * size).toArray.map(_.toString) } - var map: collection.immutable.HashMap[Any, Any] = null + var map: collection.immutable.Map[Any, Any] = null + + var map2: collection.immutable.Map[Any, Any] = null @Setup(Level.Trial) def initialize = { - map = collection.immutable.HashMap(existingKeys.map(x => (x, x)) : _*) + map = collection.immutable.Map(existingKeys.map(x => (x, x)) : _*) + map2 = collection.immutable.Map(existingKeys.splitAt(10)._1.map(x => (x, (x, x))) ++ missingKeys.map(x => (x, x)) : _*) + } + + @Benchmark def concat(bh: Blackhole): Unit = { + bh.consume(map concat map2) } @Benchmark def contains(bh: Blackhole): Unit = { - var i = 0; + var i = 0 while (i < size) { bh.consume(map.contains(existingKeys(i))) - bh.consume(map.contains(missingKeys(i))) + if (useMissingValues) { + bh.consume(map.contains(missingKeys(i))) + } i += 1 } } @Benchmark def get(bh: Blackhole): Unit = { - var i = 0; + var i = 0 while (i < size) { bh.consume(map.get(existingKeys(i))) - bh.consume(map.get(missingKeys(i))) + if (useMissingValues) { + bh.consume(map.get(missingKeys(i))) + } i += 1 } } - @Benchmark def transform(): Unit = { - map.transform((_, _) => "") + @Benchmark def getOrElse(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(map.getOrElse(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.getOrElse(missingKeys(i), "")) + } + i += 1 + } } - @Benchmark def transformConserve(): Unit = { - map.transform((_, v) => v) + @Benchmark def updated(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(map.updated(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.updated(missingKeys(i), "")) + } + i += 1 + } } } diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmarkData.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmarkData.scala deleted file mode 100644 index 0b18d4d9867d..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmarkData.scala +++ /dev/null @@ -1,14 +0,0 @@ -package scala.collection.immutable - - -object HashMapBenchmarkData { - def apply(hashCode: Int, data: String) = new HashMapBenchmarkData(hashCode, data.intern()) -} -class HashMapBenchmarkData private (override val hashCode: Int, val data: String) { - override def equals(obj: Any): Boolean = obj match { - case that: HashMapBenchmarkData => this.hashCode == that.hashCode && (this.data eq that.data) - case _ => false - } - - override def toString: String = s"$hashCode-$data" -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBuilderBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBuilderBenchmark.scala deleted file mode 100644 index 128b720c4d66..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBuilderBenchmark.scala +++ /dev/null @@ -1,218 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -//typically run with -// bench/jmh:run scala.collection.immutable. HashMapBuilder --prof gc --rf csv - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -abstract class HashMapBuilderBenchmark { - @Param(Array( - "10", - "100", - "1000", - "10000" - )) - var size: Int = _ - @Param(Array("true", "false")) - var colliding: Boolean = _ - - @Param(Array("Map+=", "Map++=", "HashMap+=", "HashMap++=")) - var op: String = _ - var operation: (Blackhole, Map[HashMapBenchmarkData, String], Map[HashMapBenchmarkData, String]) => Any = _ - - // base data of specified size. All values are distinct - var baseData: Array[HashMap[HashMapBenchmarkData, String]] = _ - // overlap(i) contains baseData(i) .. baseData(i+9) but with no structural sharing - var overlap: Array[HashMap[HashMapBenchmarkData, String]] = _ - // overlap2(i) contains the same data as overlap(i) but with no structural sharing - var overlap2: Array[HashMap[HashMapBenchmarkData, String]] = _ - // shared(i) contains baseData(i) .. baseData(i+9) but with structural sharing, both to the base data and preceding/subsequent entries - var shared: Array[HashMap[HashMapBenchmarkData, String]] = _ - - @Setup(Level.Trial) def initKeys(): Unit = { - operation = op match { - case "Map+=" => operationMapPlusEquals - case "Map++=" => operationMapPlusPlusEquals - case "HashMap+=" => operationHashMapPlusEquals - case "HashMap++=" => operationHashMapPlusPlusEquals - } - - def generate(prefix: String, size: Int) = { - Array.tabulate(30)(i => (0 until size).map { k => - val data = s"key $i $k" - val hash = if (colliding) (k >> 2) * i else data.hashCode - HashMapBenchmarkData(hash, data) -> s"value $i $k" - }(scala.collection.breakOut): HashMap[HashMapBenchmarkData, String]) - } - - baseData = generate("", size) - - overlap = new Array[HashMap[HashMapBenchmarkData, String]](baseData.length - 10) - overlap2 = new Array[HashMap[HashMapBenchmarkData, String]](baseData.length - 10) - shared = new Array[HashMap[HashMapBenchmarkData, String]](baseData.length - 10) - for (i <- 0 until baseData.length - 10) { - var s1 = HashMap.empty[HashMapBenchmarkData, String] - var s2 = HashMap.empty[HashMapBenchmarkData, String]; - for (j <- 0 until 10) { - baseData(j) foreach { - x => - s1 += x - s2 += x - } - } - overlap(i) = s1 - overlap2(i) = s2 - - } - def base (i:Int) = { - baseData(if (i < 0) baseData.length+i else i) - } - shared(0) = (-10 to (0, 1)).foldLeft (base(-10)) {case (a, b) => a ++ base(b)} - for (i <- 1 until baseData.length - 10) { - shared(i) = shared(i - 1) -- base(i - 10).keys ++ base(i) - } - } - def operationMapPlusEquals(bh: Blackhole, map1: Map[HashMapBenchmarkData, String], map2: Map[HashMapBenchmarkData, String]) = { - val builder = Map.newBuilder[HashMapBenchmarkData, String] - builder ++= map1 - map2 foreach { - builder += _ - } - bh.consume(builder.result()) - } - def operationHashMapPlusEquals(bh: Blackhole, map1: Map[HashMapBenchmarkData, String], map2: Map[HashMapBenchmarkData, String]) = { - val builder = HashMap.newBuilder[HashMapBenchmarkData, String] - builder ++= map1 - map2 foreach { - builder += _ - } - bh.consume(builder.result()) - } - def operationMapPlusPlusEquals(bh: Blackhole, map1: Map[HashMapBenchmarkData, String], map2: Map[HashMapBenchmarkData, String]) = { - val builder = Map.newBuilder[HashMapBenchmarkData, String] - builder ++= map1 - builder ++= map2 - bh.consume(builder.result()) - } - def operationHashMapPlusPlusEquals(bh: Blackhole, map1: Map[HashMapBenchmarkData, String], map2: Map[HashMapBenchmarkData, String]) = { - val builder = HashMap.newBuilder[HashMapBenchmarkData, String] - builder ++= map1 - builder ++= map2 - bh.consume(builder.result()) - } -} - -class HashMapBuilderUnsharedBenchmark extends HashMapBuilderBenchmark { - - @OperationsPerInvocation(30) - @Benchmark def opDataWithEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), HashMap.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, HashMap.empty, baseData(i)) - i += 1 - } - } - @OperationsPerInvocation(30) - @Benchmark def opDataWithMapEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), Map.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opMapEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, Map.empty, baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(29) - @Benchmark def opWithDistinct(bh: Blackhole): Unit = { - var i = 0; - while (i < 29) { - operation(bh, baseData(i), baseData(i+1)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedUnshared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, overlap(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedShared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, shared(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedUnsharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), overlap(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedSharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), shared(i)) - i += 1 - } - } -} -class HashMapBuilderSharedBenchmark extends HashMapBuilderBenchmark { - @Param(Array("0", "20", "40", "60", "80", "90", "100")) - var sharing: Int = _ - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapUnshared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, overlap(i - (10 - sharing / 10)), overlap2(i)) - i += 1 - } - } - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapShared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, shared(i - (10 - sharing / 10)), shared(i)) - i += 1 - } - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBulkBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBulkBenchmark.scala index 49083c7723ed..9c653f359046 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBulkBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBulkBenchmark.scala @@ -155,11 +155,14 @@ abstract class HashMapBaseBulkBenchmark { } def generate(prefix: String, size: Int) = { - Array.tabulate(30)(i => (0 until size).map { k => - val data = s"key $i $k" - val hash = if (colliding) (k >> 2) * i else data.hashCode - HashMapBenchmarkData(hash, data) -> s"value $i $k" - }(scala.collection.breakOut): HashMap[HashMapBenchmarkData, String]) + Array.tabulate(30) { i => + val tuples = (0 until size).map { k => + val data = s"key $i $k" + val hash = if (colliding) (k >> 2) * i else data.hashCode + HashMapBenchmarkData(hash, data) -> s"value $i $k" + } + HashMap.from(tuples) + } } baseData = generate("", size) @@ -184,7 +187,7 @@ abstract class HashMapBaseBulkBenchmark { def base (i:Int) = { baseData(if (i < 0) baseData.length+i else i) } - shared(0) = (-10 to (0, 1)).foldLeft (base(-10)) {case (a, b) => a ++ base(b)} + shared(0) = (-10.to(0, 1)).foldLeft(base(-10)) { case (a, b) => a ++ base(b) } for (i <- 1 until baseData.length - 10) { shared(i) = shared(i - 1) -- base(i - 10).keys ++ base(i) } @@ -231,3 +234,15 @@ abstract class HashMapBaseBulkBenchmark { bh.consume(map1.filterKeys(map2.keySet)) } } +object HashMapBenchmarkData { + def apply(hashCode: Int, data: String) = new HashMapBenchmarkData(hashCode, data.intern()) +} +class HashMapBenchmarkData private (override val hashCode: Int, val data: String) { + override def equals(obj: Any): Boolean = obj match { + case that: HashMapBenchmarkData => this.hashCode == that.hashCode && (this.data eq that.data) + case _ => false + } + + override def toString: String = s"$hashCode-$data" +} + diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapEqualsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapEqualsBenchmark.scala deleted file mode 100644 index dd2194515bb2..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapEqualsBenchmark.scala +++ /dev/null @@ -1,52 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class HashMapEqualsBenchmark { - @Param(Array("10", "100", "1000", "10000", "100000", "1000000")) - var size: Int = _ - - var base: HashMap[String, String] = _ - var notShared: HashMap[String, String] = _ - var identical: HashMap[String, String] = _ - var shared: HashMap[String, String] = _ - var differentShared: HashMap[String, String] = _ - - @Setup(Level.Trial) def initKeys(): Unit = { - base = (1 to size).map { i => s"key $i" -> s"value $i" }(scala.collection.breakOut) - notShared = (1 to size).map { i => s"key $i" -> s"value $i" }(scala.collection.breakOut) - identical = base - shared = (base - base.head._1) + base.head - differentShared = (base - base.last._1) + (base.last._1 -> (base.last._2 + "xx")) - } - - - @Benchmark - def nonAllocatingIdentical() = { - base == base - } - - @Benchmark - def nonAllocatingNotShared() = { - base == notShared - } - - @Benchmark - def nonAllocatingShared() { - base == shared - } - @Benchmark - def nonAllocatingDifferentShared() { - base == differentShared - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBenchmarkData.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBenchmarkData.scala deleted file mode 100644 index a8e1b3dba448..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBenchmarkData.scala +++ /dev/null @@ -1,14 +0,0 @@ -package scala.collection.immutable - -object HashSetBenchmarkData { - def apply(hashCode: Int, data: String) = new HashSetBenchmarkData(hashCode, data.intern()) -} - -class HashSetBenchmarkData private(override val hashCode: Int, val data: String) { - override def equals(obj: Any): Boolean = obj match { - case that: HashSetBenchmarkData => this.hashCode == that.hashCode && (this.data eq that.data) - case _ => false - } - - override def toString: String = s"$hashCode-$data" -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBuilderBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBuilderBenchmark.scala deleted file mode 100644 index ed80beaa5ec4..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBuilderBenchmark.scala +++ /dev/null @@ -1,249 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -//typically run with -// bench/jmh:run scala.collection.immutable.HashSetBuilder -prof gc -rf csv - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -abstract class HashSetBuilderBaseBenchmark { - @Param(Array( - "10", - "100", - "1000", - "10000")) - var size: Int = _ - @Param(Array("true", "false")) - var colliding: Boolean = _ - - @Param(Array("Set+=", "Set++=", "HashSet+=", "HashSet++=")) - var op: String = _ - var operation: (Blackhole, Set[HashSetBenchmarkData], Set[HashSetBenchmarkData]) => Any = _ - - // base data of specified size. All values are distinct - var baseData: Array[HashSet[HashSetBenchmarkData]] = _ - // overlap(i) contains baseData(i) .. baseData(i+9) but with no structural sharing - var overlap: Array[HashSet[HashSetBenchmarkData]] = _ - // overlap2(i) contains the same data as overlap(i) but with no structural sharing - var overlap2: Array[HashSet[HashSetBenchmarkData]] = _ - // shared(i) contains baseData(i) .. baseData(i+9) but with structural sharing, both to the base data and preceding/subsequent entries - var shared: Array[HashSet[HashSetBenchmarkData]] = _ - - - @Setup(Level.Trial) def initKeys(): Unit = { - operation = op match { - case "Set+=" => operationSetPlusEquals - case "Set++=" => operationSetPlusPlusEquals - case "HashSet+=" => operationHashSetPlusEquals - case "HashSet++=" => operationHashSetPlusPlusEquals - } - - def generate(prefix: String, size: Int) = { - Array.tabulate(30)(i => (0 until size).map { k => - val data = s"key $i $k" - val hash = if (colliding) (k >> 2) * i else data.hashCode - HashSetBenchmarkData(hash, data) - }(scala.collection.breakOut): HashSet[HashSetBenchmarkData]) - } - - baseData = generate("", size) - - overlap = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - overlap2 = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - shared = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - for (i <- 0 until baseData.length - 10) { - var s1: HashSet[HashSetBenchmarkData] = HashSet.empty[HashSetBenchmarkData] - var s2: HashSet[HashSetBenchmarkData] = HashSet.empty[HashSetBenchmarkData] - for (j <- i until i + 10) { - baseData(j) foreach { - x => - s1 += x - s2 += x - } - } - overlap(i) = s1 - overlap2(i) = s2 - } - - def base(i: Int) = { - baseData(if (i < 0) baseData.length + i else i) - } - - shared(0) = (-10 to(0, 1)).foldLeft(base(-10)) { case (a, b) => a ++ base(b) } - for (i <- 1 until shared.length) { - shared(i) = shared(i - 1) -- base(i - 10) ++ base(i) - } - - } - - def operationSetPlusEquals(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - val builder = Set.newBuilder[HashSetBenchmarkData] - builder ++= set1 - set2 foreach { - builder += _ - } - bh.consume(builder.result) - } - - def operationSetPlusPlusEquals(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - val builder = Set.newBuilder[HashSetBenchmarkData] - builder ++= set1 - builder ++= set2 - bh.consume(builder.result) - } - - def operationHashSetPlusEquals(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - val builder = Set.newBuilder[HashSetBenchmarkData] - builder ++= set1 - set2 foreach { - builder += _ - } - bh.consume(builder.result) - } - - def operationHashSetPlusPlusEquals(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - val builder = Set.newBuilder[HashSetBenchmarkData] - builder ++= set1 - builder ++= set2 - bh.consume(builder.result) - } -} - -class HashSetBuilderUnsharedBenchmark extends HashSetBuilderBaseBenchmark { - - @OperationsPerInvocation(30) - @Benchmark def opDataWithEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), HashSet.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, HashSet.empty, baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opDataWithSetEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), Set.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opSetEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, Set.empty, baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(29) - @Benchmark def opWithDistinct(bh: Blackhole): Unit = { - var i = 0; - while (i < 29) { - operation(bh, baseData(i), baseData(i + 1)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedUnshared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, overlap(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedShared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, shared(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedUnsharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), overlap(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedSharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), shared(i)) - i += 1 - } - } -} - -class HashSetBuilderSharedBenchmark extends HashSetBuilderBaseBenchmark { - @Param(Array("0", "20", "40", "60", "80", "90", "100")) - var sharing: Int = _ - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapUnshared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, overlap(i - (10 - sharing / 10)), overlap2(i)) - i += 1 - } - } - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapShared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, shared(i - (10 - sharing / 10)), shared(i)) - i += 1 - } - } -} - - -//for testing, debugging, optimising etc -object TestHashSetBenchmark extends App { - - val bh = new Blackhole("Today's password is swordfish. I understand instantiating Blackholes directly is dangerous.") - val test = new HashSetBuilderUnsharedBenchmark - - test.size = 10000 - test.op = "++" - test.colliding = true - test.initKeys() - while (true) { - var j = 0 - val start = System.nanoTime() - while (j < 100) { - test.opDataWithContainedUnshared(bh) - j += 1 - } - val end = System.nanoTime() - println((end - start) / 1000000) - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBulkBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBulkBenchmark.scala deleted file mode 100644 index 60ecce796790..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetBulkBenchmark.scala +++ /dev/null @@ -1,249 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -class HashSetBulkUnsharedBenchmark extends HashSetBaseBulkBenchmark { - - @OperationsPerInvocation(30) - @Benchmark def opDataWithEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), HashSet.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, HashSet.empty, baseData(i)) - i += 1 - } - } - @OperationsPerInvocation(30) - @Benchmark def opDataWithSetEmpty(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, baseData(i), Set.empty) - i += 1 - } - } - - @OperationsPerInvocation(30) - @Benchmark def opSetEmptyWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 30) { - operation(bh, Set.empty, baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(29) - @Benchmark def opWithDistinct(bh: Blackhole): Unit = { - var i = 0; - while (i < 29) { - operation(bh, baseData(i), baseData(i+1)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedUnshared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, overlap(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opDataWithContainedShared(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, shared(i), baseData(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedUnsharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), overlap(i)) - i += 1 - } - } - - @OperationsPerInvocation(20) - @Benchmark def opContainedSharedWithData(bh: Blackhole): Unit = { - var i = 0; - while (i < 20) { - operation(bh, baseData(i), shared(i)) - i += 1 - } - } -} -class HashSetBulkSharedBenchmark extends HashSetBaseBulkBenchmark { - @Param(Array("0", "20", "40", "60", "80", "90", "100")) - var sharing: Int = _ - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapUnshared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, overlap(i - (10 - sharing / 10)), overlap2(i)) - i += 1 - } - } - - @OperationsPerInvocation(10) - @Benchmark def opWithOverlapShared(bh: Blackhole): Unit = { - var i = 10; - while (i < 20) { - operation(bh, shared(i - (10 - sharing / 10)), shared(i)) - i += 1 - } - } -} - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -abstract class HashSetBaseBulkBenchmark { - @Param(Array( - "10", - "100", - "1000", - "10000")) - var size: Int = _ - @Param(Array("true", "false")) - var colliding: Boolean = _ - - @Param(Array("+", "-", "++", "--", "union", "diff", "intersect", "subsetOf", "sameElements")) - var op: String = _ - var operation: (Blackhole, Set[HashSetBenchmarkData], Set[HashSetBenchmarkData]) => Any = _ - - // base data of specified size. All values are distinct - var baseData: Array[HashSet[HashSetBenchmarkData]] = _ - // overlap(i) contains baseData(i) .. baseData(i+9) but with no structural sharing - var overlap: Array[HashSet[HashSetBenchmarkData]] = _ - // overlap2(i) contains the same data as overlap(i) but with no structural sharing - var overlap2: Array[HashSet[HashSetBenchmarkData]] = _ - // shared(i) contains baseData(i) .. baseData(i+9) but with structural sharing, both to the base data and preceding/subsequent entries - var shared: Array[HashSet[HashSetBenchmarkData]] = _ - - - @Setup(Level.Trial) def initKeys(): Unit = { - operation = op match { - case "+" => operationPlus - case "-" => operationMinus - case "++" => operationPlusPlus - case "--" => operationMinusMinus - case "union" => operationUnion - case "diff" => operationDiff - case "intersect" => operationIntersect - case "subsetOf" => operationSubsetOf - case "sameElements" => operationSameElements - } - - def generate(prefix: String, size: Int) = { - Array.tabulate(30)(i => (0 until size).map { k => - val data = s"key $i $k" - val hash = if (colliding) (k >> 2) * i else data.hashCode - HashSetBenchmarkData(hash, data) - }(scala.collection.breakOut): HashSet[HashSetBenchmarkData]) - } - - baseData = generate("", size) - - overlap = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - overlap2 = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - shared = new Array[HashSet[HashSetBenchmarkData]](baseData.length - 10) - for (i <- 0 until baseData.length - 10) { - var s1: HashSet[HashSetBenchmarkData] = HashSet.empty[HashSetBenchmarkData] - var s2: HashSet[HashSetBenchmarkData] = HashSet.empty[HashSetBenchmarkData] - for (j <- i until i + 10) { - baseData(j) foreach { - x => - s1 += x - s2 += x - } - } - overlap(i) = s1 - overlap2(i) = s2 - } - def base (i:Int) = { - baseData(if (i < 0) baseData.length+i else i) - } - shared(0) = (-10 to (0, 1)).foldLeft (base(-10)) {case (a, b) => a ++ base(b)} - for (i <- 1 until shared.length) { - shared(i) = shared(i - 1) -- base(i - 10) ++ base(i) - } - - } - - def operationPlus(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - var res = set1 - set2 foreach { - res += _ - } - bh.consume(res) - } - def operationMinus(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - var res = set1 - set2 foreach { - res -= _ - } - bh.consume(res) - } - def operationPlusPlus(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1 ++ set2) - } - def operationMinusMinus(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1 -- set2) - } - def operationUnion(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1.union(set2)) - } - def operationDiff(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1.diff(set2)) - } - def operationIntersect(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1.intersect(set2)) - } - def operationSubsetOf(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1.subsetOf(set2)) - } - def operationSameElements(bh: Blackhole, set1: Set[HashSetBenchmarkData], set2: Set[HashSetBenchmarkData]) = { - bh.consume(set1.sameElements(set2)) - } -} -//for testing, debugging, optimising etc -object HashSetBulkBenchmarkTestApp extends App { - - val bh = new Blackhole("Today's password is swordfish. I understand instantiating Blackholes directly is dangerous.") - val test = new HashSetBulkUnsharedBenchmark - - test.size = 10000 - test.op = "++" - test.colliding = true - test.initKeys() - while (true) { - var j = 0 - val start = System.nanoTime() - while (j < 100) { - test.opDataWithContainedUnshared(bh) - j += 1 - } - val end = System.nanoTime() - println((end - start) / 1000000) - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetEqualsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetEqualsBenchmark.scala deleted file mode 100644 index f9c73ff0eda4..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/HashSetEqualsBenchmark.scala +++ /dev/null @@ -1,53 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class HashSetEqualsBenchmark { - @Param(Array("10", "100", "1000", "10000", "100000", "1000000")) - var size: Int = _ - - var base: HashSet[String] = _ - var notShared: HashSet[String] = _ - var identical: HashSet[String] = _ - var shared: HashSet[String] = _ - var differentShared: HashSet[String] = _ - - @Setup(Level.Trial) def initKeys(): Unit = { - base = (1 to size).map { i => s"key $i" }(scala.collection.breakOut) - notShared = (1 to size).map { i => s"key $i" }(scala.collection.breakOut) - identical = base - shared = (base - base.head) + base.head - differentShared = (base - base.last) + (base.last + "xx") - } - - - @Benchmark - def bmIdentical() = { - base == base - base .equals(base) - } - - @Benchmark - def bmNotShared() = { - base == notShared - } - - @Benchmark - def bmShared() { - base == shared - } - @Benchmark - def bmDifferentShared() { - base == differentShared - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/IndexedSeqEqualsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/IndexedSeqEqualsBenchmark.scala new file mode 100644 index 000000000000..0f51558e3e7d --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/IndexedSeqEqualsBenchmark.scala @@ -0,0 +1,53 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10000) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class IndexedSeqEqualsBenchmark { +// @Param(Array("0", "1", "2", "3", "4", "6", "8", "10", "12", "14", "16" +// , "20", "24", "28", "32", "40", "48", "56", "64", +// "128", "256", "512", "1024", "2048", "65536")) + @Param(Array("128")) + var size: Int = _ + + @Param(Array("ArraySeq")) + var type1: String = _ + @Param(Array("ArraySeq")) + var type2: String = _ + + var value: IndexedSeq[Any] = _ + var valueEqual: IndexedSeq[Any] = _ + var valueNotEqualFirst: IndexedSeq[Any] = _ + var valueNotEqualLast: IndexedSeq[Any] = _ + var valueNotEqualLength: IndexedSeq[Any] = _ + + @Setup(Level.Trial) def init(): Unit = { + def toSeq[T](data: Array[T], typeStr: String):IndexedSeq[T] = typeStr match { + case "Vector" => data.to(Vector) + case "ArraySeq" => + ArraySeq.unsafeWrapArray(data.clone) + } + + val data = Array.tabulate(size)(_.toString) + value = toSeq(data, type1) + valueEqual = toSeq(data, type2) + + valueNotEqualLength = valueEqual + "xx" + valueNotEqualFirst = if (value.isEmpty) valueEqual else valueEqual.updated(0, "xx") + valueNotEqualLast = if (value.isEmpty) valueEqual else valueEqual.updated(valueEqual.length - 1, "xx") + } + @Benchmark def equal = value == valueEqual +// @Benchmark def notEqualLength = value == valueNotEqualLength +// @Benchmark def notEqualFirst = value == valueNotEqualFirst +// @Benchmark def notEqualLast = value == valueNotEqualLast +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala index 36e251899369..c00b2d6be80d 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala @@ -24,6 +24,8 @@ class ListBenchmark { var mid: Content = _ var last: Content = _ var replacement: Content = _ + var firstHalf: List[Content] = _ + var lastHalf: List[Content] = _ @Setup(Level.Trial) def initKeys(): Unit = { @@ -31,6 +33,8 @@ class ListBenchmark { mid = Content(size / 2) last = Content(Math.max(0,size -1)) replacement = Content(size * 2 + 1) + firstHalf = values.take(size / 2) + lastHalf = values.drop(size / 2) } @Benchmark def filter_includeAll: Any = { @@ -67,4 +71,47 @@ class ListBenchmark { @Benchmark def mapConserve_modifyMid: Any = { values.mapConserve(x => if (x == mid) replacement else x) } + @Benchmark def partition_includeAll: Any = { + values.partition(v => true) + } + + @Benchmark def partition_excludeAll: Any = { + values.partition(_ => false) + } + + @Benchmark def partition_exc_mid: Any = { + values.partition(v => v.value != mid.value) + } + + @Benchmark def partition_from_mid: Any = { + values.partition(v => v.value <= mid.value) + } + + @Benchmark def partition_exc_last: Any = { + values.partition(v => v.value != last.value) + } + + @Benchmark def diff_single_mid: Any = { + values.diff(List(mid)) + } + + @Benchmark def diff_single_last: Any = { + values.diff(List(last)) + } + + @Benchmark def diff_notIncluded: Any = { + values.diff(List(Content(-1))) + } + + @Benchmark def diff_identical: Any = { + values.diff(values) + } + + @Benchmark def diff_first_half: Any = { + values.diff(firstHalf) + } + + @Benchmark def diff_last_half: Any = { + values.diff(lastHalf) + } } diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ListMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ListMapBenchmark.scala new file mode 100644 index 000000000000..246a086e9729 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ListMapBenchmark.scala @@ -0,0 +1,33 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ListMapBenchmark { + @Param(Array("1", "10", "100", "1000")) + var size: Int = _ + + var kvs: Iterable[(Int, Int)] = _ + + @Setup(Level.Trial) + def initKeys(): Unit = { + val unique = (0 to size).map(i => i -> i) + kvs = unique ++ unique + } + + @Benchmark + def builder(bh: Blackhole): Unit = { + val b = new ListMapBuilder[Int, Int] + bh.consume(b.addAll(kvs).result()) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/MapAppendBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/MapAppendBenchmark.scala new file mode 100644 index 000000000000..f7669233646a --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/MapAppendBenchmark.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class MapAppendBenchmark { + + @Param(Array("10", "100", "1000")) + var size: Int = _ + + @Benchmark def plus1(bh: Blackhole): Unit = { + var m = Map.empty[Int, Unit] + var i = 0 + while(i < size) { + m = m + ((i -> ())) + (((i+size) -> ())) + i += 1 + } + bh.consume(m) + } + + @Benchmark def plus2(bh: Blackhole): Unit = { + var m = Map.empty[Int, Unit] + var i = 0 + while(i < size) { + m = m.+((i -> ()), ((i+size) -> ())) + i += 1 + } + bh.consume(m) + } + + @Benchmark def plus2Empty(bh: Blackhole): Unit = { + val empty = IndexedSeq.empty + var m = Map.empty[Int, Unit] + var i = 0 + while(i < size) { + m = m.+((i -> ()), ((i+size) -> ()), empty: _*) + i += 1 + } + bh.consume(m) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/OldVector.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/OldVector.scala new file mode 100644 index 000000000000..577f93ff34ce --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/OldVector.scala @@ -0,0 +1,1311 @@ +// This is a copy of the original Scala 2.13 Vector implementation, used by VectorBenchmark2 + +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence + +/** $factoryInfo + * @define Coll `OldVector` + * @define coll vector + */ +@SerialVersionUID(3L) +object OldVector extends StrictOptimizedSeqFactory[OldVector] { + + def empty[A]: OldVector[A] = NIL + + def from[E](it: collection.IterableOnce[E]): OldVector[E] = + it match { + case as: ArraySeq[E] if as.length <= 32 => + if (as.isEmpty) NIL + else { + val unsafeArray = as.unsafeArray + val len = unsafeArray.length + val v = new OldVector(0, len, 0) + val display0 = new Array[Any](len) + if (unsafeArray.isInstanceOf[Array[AnyRef]]) { + System.arraycopy(unsafeArray, 0, display0, 0, len) + } else { + var i = 0 + while (i < len) { + display0(i) = unsafeArray(i) + i += 1 + } + } + v.display0 = display0.asInstanceOf[Array[AnyRef]] + v.depth = 1 + releaseFence() + v + } + case v: OldVector[E] => v + case _ => + val knownSize = it.knownSize + + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= 32) { + val display0 = new Array[Any](knownSize) + it.iterator.copyToArray(display0) + val v = new OldVector[E](0, knownSize, 0) + v.depth = 1 + v.display0 = display0.asInstanceOf[Array[AnyRef]] + releaseFence() + v + } else { + (newBuilder ++= it).result() + } + } + + def newBuilder[A]: ReusableBuilder[A, OldVector[A]] = new OldVectorBuilder[A] + + /** Creates a OldVector of one element. Not safe for publication, the caller is responsible for `releaseFence` */ + private def single[A](elem: A): OldVector[A] = { + val s = new OldVector[A](0, 1, 0) + s.depth = 1 + s.display0 = Array[AnyRef](elem.asInstanceOf[AnyRef]) + s + } + + @transient + private val NIL = new OldVector[Nothing](0, 0, 0) + + private val defaultApplyPreferredMaxLength: Int = + try System.getProperty("scala.collection.immutable.OldVector.defaultApplyPreferredMaxLength", + "1024").toInt + catch { + case _: SecurityException => 1024 + } + + // Constants governing concat strategy for performance + private final val Log2ConcatFaster = 5 + private final val TinyAppendFaster = 2 +} + +// in principle, most members should be private. however, access privileges must +// be carefully chosen to not prevent method inlining + +/** Vector is a general-purpose, immutable data structure. It provides random access and updates + * in effectively constant time, as well as very fast append and prepend. Because vectors strike + * a good balance between fast random selections and fast random functional updates, they are + * currently the default implementation of immutable indexed sequences. It is backed by a little + * endian bit-mapped vector trie with a branching factor of 32. Locality is very good, but not + * contiguous, which is good for very large sequences. + * + * @see [[https://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#vectors "Scala's Collection Library overview"]] + * section on `Vectors` for more information. + * + * @tparam A the element type + * + * @define Coll `OldVector` + * @define coll vector + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class OldVector[+A] private[immutable] (private[collection] val startIndex: Int, private[collection] val endIndex: Int, private[immutable] val focus: Int) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, OldVector, OldVector[A]] + with StrictOptimizedSeqOps[A, OldVector, OldVector[A]] + with IterableFactoryDefaults[A, OldVector] + with OldVectorPointer[A] + with DefaultSerializable { self => + + override def iterableFactory: SeqFactory[OldVector] = OldVector + + // Code paths that mutates `dirty` _must_ call `Statics.releaseFence()` before returning from + // the public method. + private[immutable] var dirty = false + // While most JDKs would implicit add this fence because of >= 1 final field, the spec only mandates + // it if all fields are final, so let's add this in explicitly. + releaseFence() + + def length: Int = endIndex - startIndex + + private[collection] def initIterator[B >: A](s: OldVectorIterator[B]): Unit = { + s.initFrom(this) + if (dirty) s.stabilize(focus) + if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus) + } + + override def iterator: Iterator[A] = { + if(isEmpty) + Iterator.empty + else { + val s = new OldVectorIterator[A](startIndex, endIndex) + initIterator(s) + s + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + var depth = -1 + val displaySource: OldVectorPointer[A] = + if (dirty) iterator.asInstanceOf[OldVectorIterator[A]] + else this + val trunk: Array[AnyRef] = + if (endIndex <= (1 << 5)) { depth = 0; displaySource.display0 } + else if (endIndex <= (1 << 10)) { depth = 1; displaySource.display1.asInstanceOf[Array[AnyRef]] } + else if (endIndex <= (1 << 15)) { depth = 2; displaySource.display2.asInstanceOf[Array[AnyRef]] } + else if (endIndex <= (1 << 20)) { depth = 3; displaySource.display3.asInstanceOf[Array[AnyRef]] } + else if (endIndex <= (1 << 25)) { depth = 4; displaySource.display4.asInstanceOf[Array[AnyRef]] } + else /* endIndex <= 1 << 30*/ { depth = 5; displaySource.display5.asInstanceOf[Array[AnyRef]] } + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper (startIndex, endIndex, depth, trunk) + case StepperShape.LongShape => new LongVectorStepper (startIndex, endIndex, depth, trunk) + case StepperShape.DoubleShape => new DoubleVectorStepper(startIndex, endIndex, depth, trunk) + case _ => shape.parUnbox(new AnyVectorStepper[A](startIndex, endIndex, depth, trunk)) + } + s.asInstanceOf[S with EfficientSplit] + } + + // Ideally, clients will inline calls to map all the way down, including the iterator/builder methods. + // In principle, escape analysis could even remove the iterator/builder allocations and do it + // with local variables exclusively. But we're not quite there yet ... + + @throws[IndexOutOfBoundsException] + def apply(index: Int): A = { + val idx = checkRangeConvert(index) + getElem(idx, idx ^ focus) + } + + @throws[IndexOutOfBoundsException] + private def checkRangeConvert(index: Int) = { + val idx = index + startIndex + if (index >= 0 && idx < endIndex) + idx + else + throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${endIndex-1})") + } + // requires structure is at pos oldIndex = xor ^ index + private final def getElem(index: Int, xor: Int): A = { + if (xor < (1 << 5)) { // level = 0 + (display0 + (index & 31).asInstanceOf[A]) + } else if (xor < (1 << 10)) { // level = 1 + (display1 + ((index >>> 5) & 31) + (index & 31).asInstanceOf[A]) + } else if (xor < (1 << 15)) { // level = 2 + (display2 + ((index >>> 10) & 31) + ((index >>> 5) & 31) + (index & 31).asInstanceOf[A]) + } else if (xor < (1 << 20)) { // level = 3 + (display3 + ((index >>> 15) & 31) + ((index >>> 10) & 31) + ((index >>> 5) & 31) + (index & 31).asInstanceOf[A]) + } else if (xor < (1 << 25)) { // level = 4 + (display4 + ((index >>> 20) & 31) + ((index >>> 15) & 31) + ((index >>> 10) & 31) + ((index >>> 5) & 31) + (index & 31).asInstanceOf[A]) + } else if (xor < (1 << 30)) { // level = 5 + (display5 + ((index >>> 25) & 31) + ((index >>> 20) & 31) + ((index >>> 15) & 31) + ((index >>> 10) & 31) + ((index >>> 5) & 31) + (index & 31).asInstanceOf[A]) + } else { // level = 6 + throw new IllegalArgumentException() + } + } + + override def updated[B >: A](index: Int, elem: B): OldVector[B] = updateAt(index, elem) + + override def take(n: Int): OldVector[A] = { + if (n <= 0) + OldVector.empty + else if (startIndex < endIndex - n) + dropBack0(startIndex + n) + else + this + } + + override def drop(n: Int): OldVector[A] = { + if (n <= 0) + this + else if (startIndex < endIndex - n) + dropFront0(startIndex + n) + else + OldVector.empty + } + + override def takeRight(n: Int): OldVector[A] = { + if (n <= 0) + OldVector.empty + else if (endIndex - n > startIndex) + dropFront0(endIndex - n) + else + this + } + + override def dropRight(n: Int): OldVector[A] = { + if (n <= 0) + this + else if (endIndex - n > startIndex) + dropBack0(endIndex - n) + else + OldVector.empty + } + + override def head: A = { + if (isEmpty) throw new NoSuchElementException("empty.head") + apply(0) + } + + override def tail: OldVector[A] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + drop(1) + } + + override def last: A = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + apply(length - 1) + } + + override def init: OldVector[A] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + dropRight(1) + } + + // appendAll (suboptimal but avoids worst performance gotchas) + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): OldVector[B] = { + import OldVector.{Log2ConcatFaster, TinyAppendFaster} + if (suffix.iterator.isEmpty) this + else { + suffix match { + case suffix: collection.Iterable[B] => + suffix.size match { + // Often it's better to append small numbers of elements (or prepend if RHS is a vector) + case n if n <= TinyAppendFaster || n < (this.size >>> Log2ConcatFaster) => + var v: OldVector[B] = this + for (x <- suffix) v = v :+ x + v + case n if this.size < (n >>> Log2ConcatFaster) && suffix.isInstanceOf[OldVector[_]] => + var v = suffix.asInstanceOf[OldVector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = ri.next() +: v + v + case _ => super.appendedAll(suffix) + } + case _ => super.appendedAll(suffix) + } + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): OldVector[B] = { + // Implementation similar to `appendAll`: when of the collections to concatenate (either `this` or `prefix`) + // has a small number of elements compared to the other, then we add them using `:+` or `+:` in a loop + import OldVector.{Log2ConcatFaster, TinyAppendFaster} + if (prefix.iterator.isEmpty) this + else { + prefix match { + case prefix: collection.Iterable[B] => + prefix.size match { + case n if n <= TinyAppendFaster || n < (this.size >>> Log2ConcatFaster) => + var v: OldVector[B] = this + val it = prefix.toIndexedSeq.reverseIterator + while (it.hasNext) v = it.next() +: v + v + case n if this.size < (n >>> Log2ConcatFaster) && prefix.isInstanceOf[OldVector[_]] => + var v = prefix.asInstanceOf[OldVector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + case _ => super.prependedAll(prefix) + } + case _ => + super.prependedAll(prefix) + } + } + } + + // semi-private api + + private[immutable] def updateAt[B >: A](index: Int, elem: B): OldVector[B] = { + val idx = checkRangeConvert(index) + val s = new OldVector[B](startIndex, endIndex, idx) + s.initFrom(this) + s.dirty = dirty + s.gotoPosWritable(focus, idx, focus ^ idx) // if dirty commit changes; go to new pos and prepare for writing + s.display0(idx & 31) = elem.asInstanceOf[AnyRef] + releaseFence() + s + } + + private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { + gotoPosWritable1(oldIndex, newIndex, xor) + } else { + gotoPosWritable0(newIndex, xor) + dirty = true + } + + private def gotoFreshPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { + gotoFreshPosWritable1(oldIndex, newIndex, xor) + } else { + gotoFreshPosWritable0(oldIndex, newIndex, xor) + dirty = true + } + + override def prepended[B >: A](value: B): OldVector[B] = { + val thisLength = length + val result = + if (depth == 1 && thisLength < 32) { + val s = new OldVector(0, thisLength + 1, 0) + s.depth = 1 + val newDisplay0 = new Array[AnyRef](thisLength + 1) + System.arraycopy(display0, startIndex, newDisplay0, 1, thisLength) + newDisplay0(0) = value.asInstanceOf[AnyRef] + s.display0 = newDisplay0 + s + } else if (thisLength > 0) { + val blockIndex = (startIndex - 1) & ~31 + val lo = (startIndex - 1) & 31 + + if (startIndex != blockIndex + 32) { + val s = new OldVector(startIndex - 1, endIndex, blockIndex) + s.initFrom(this) + s.dirty = dirty + s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } else { + + val freeSpace = (1 << (5 * depth)) - endIndex // free space at the right given the current tree-structure depth + val shift = freeSpace & ~((1 << (5 * (depth - 1))) - 1) // number of elements by which we'll shift right (only move at top level) + val shiftBlocks = freeSpace >>> (5 * (depth - 1)) // number of top-level blocks + + if (shift != 0) { + // case A: we can shift right on the top level + if (depth > 1) { + val newBlockIndex = blockIndex + shift + val newFocus = focus + shift + + val s = new OldVector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks + s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } else { + val newBlockIndex = blockIndex + 32 + val newFocus = focus + + val s = new OldVector(startIndex - 1 + shift, endIndex + shift, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.shiftTopLevel(0, shiftBlocks) // shift right by n elements + s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing + s.display0(shift - 1) = value.asInstanceOf[AnyRef] + s + } + } else if (blockIndex < 0) { + // case B: we need to move the whole structure + val move = (1 << (5 * (depth + 1))) - (1 << (5 * depth)) + val newBlockIndex = blockIndex + move + val newFocus = focus + move + + val s = new OldVector(startIndex - 1 + move, endIndex + move, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } else { + val newBlockIndex = blockIndex + val newFocus = focus + + val s = new OldVector(startIndex - 1, endIndex, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } + } + } else OldVector.single(value) + + releaseFence() + result + } + + override def appended[B >: A](value: B): OldVector[B] = { + val thisLength = length + val result = + if (depth == 1 && thisLength < 32) { + val s = new OldVector(0, thisLength + 1, 0) + s.depth = 1 + val newDisplay0 = new Array[AnyRef](thisLength + 1) + System.arraycopy(display0, startIndex, newDisplay0, 0, thisLength) + newDisplay0(thisLength) = value.asInstanceOf[AnyRef] + s.display0 = newDisplay0 + s + } else if (thisLength > 0) { + val blockIndex = endIndex & ~31 // round down to nearest 32 + val lo = endIndex & 31 // remainder of blockIndex / 32 + + if (endIndex != blockIndex) { + val s = new OldVector(startIndex, endIndex + 1, blockIndex) + s.initFrom(this) + s.dirty = dirty + s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } else { + val shift = startIndex & ~((1 << (5 * (depth - 1))) - 1) + val shiftBlocks = startIndex >>> (5 * (depth - 1)) + + if (shift != 0) { + if (depth > 1) { + val newBlockIndex = blockIndex - shift + val newFocus = focus - shift + + val s = new OldVector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks + s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } else { + val newBlockIndex = blockIndex - 32 + val newFocus = focus + + val s = new OldVector(startIndex - shift, endIndex + 1 - shift, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements + s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) + + if (s.display0.length < 32 - shift - 1) { + val newDisplay0 = new Array[AnyRef](32 - shift - 1) + s.display0.copyToArray(newDisplay0) + s.display0 = newDisplay0 + } + s.display0(32 - shift) = value.asInstanceOf[AnyRef] + s + } + } else { + val newBlockIndex = blockIndex + val newFocus = focus + + val s = new OldVector(startIndex, endIndex + 1, newBlockIndex) + s.initFrom(this) + s.dirty = dirty + s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) + s.display0(lo) = value.asInstanceOf[AnyRef] + s + } + } + } else OldVector.single(value) + + releaseFence() + result + } + + + // low-level implementation (needs cleanup, maybe move to util class) + + private def shiftTopLevel(oldLeft: Int, newLeft: Int) = (depth - 1) match { + case 0 => display0 = copyRange(display0, oldLeft, newLeft) + case 1 => display1 = copyRange(display1, oldLeft, newLeft) + case 2 => display2 = copyRange(display2, oldLeft, newLeft) + case 3 => display3 = copyRange(display3, oldLeft, newLeft) + case 4 => display4 = copyRange(display4, oldLeft, newLeft) + case 5 => display5 = copyRange(display5, oldLeft, newLeft) + } + + private def zeroLeft(array: Array[AnyRef], index: Int): Unit = { + var i = 0 + while (i < index) { + array(i) = null + i += 1 + } + } + + private def zeroRight(array: Array[AnyRef], index: Int): Unit = { + var i = index + while (i < array.length) { + array(i) = null + i += 1 + } + } + + private def copyLeft[T <: AnyRef](array: Array[T], right: Int): Array[T] = { + val copy = array.clone() + java.util.Arrays.fill(copy.asInstanceOf[Array[AnyRef]], right, array.length, null) + copy + } + private def copyRight[T <: AnyRef](array: Array[T], left: Int): Array[T] = { + val copy = array.clone() + java.util.Arrays.fill(copy.asInstanceOf[Array[AnyRef]], 0, left, null) + copy + } + + // requires structure is at index cutIndex and writable at level 0 + private def cleanLeftEdge(cutIndex: Int) = { + if (cutIndex < (1 << 5)) { + zeroLeft(display0, cutIndex) + } else if (cutIndex < (1 << 10)) { + zeroLeft(display0, cutIndex & 31) + display1 = copyRight(display1, cutIndex >>> 5) + } else if (cutIndex < (1 << 15)) { + zeroLeft(display0, cutIndex & 31) + display1 = copyRight(display1, (cutIndex >>> 5) & 31) + display2 = copyRight(display2, cutIndex >>> 10) + } else if (cutIndex < (1 << 20)) { + zeroLeft(display0, cutIndex & 31) + display1 = copyRight(display1, (cutIndex >>> 5) & 31) + display2 = copyRight(display2, (cutIndex >>> 10) & 31) + display3 = copyRight(display3, cutIndex >>> 15) + } else if (cutIndex < (1 << 25)) { + zeroLeft(display0, cutIndex & 31) + display1 = copyRight(display1, (cutIndex >>> 5) & 31) + display2 = copyRight(display2, (cutIndex >>> 10) & 31) + display3 = copyRight(display3, (cutIndex >>> 15) & 31) + display4 = copyRight(display4, cutIndex >>> 20) + } else if (cutIndex < (1 << 30)) { + zeroLeft(display0, cutIndex & 31) + display1 = copyRight(display1, (cutIndex >>> 5) & 31) + display2 = copyRight(display2, (cutIndex >>> 10) & 31) + display3 = copyRight(display3, (cutIndex >>> 15) & 31) + display4 = copyRight(display4, (cutIndex >>> 20) & 31) + display5 = copyRight(display5, cutIndex >>> 25) + } else { + throw new IllegalArgumentException() + } + } + + // requires structure is writable and at index cutIndex + private def cleanRightEdge(cutIndex: Int) = { + // we're actually sitting one block left if cutIndex lies on a block boundary + // this means that we'll end up erasing the whole block!! + + if (cutIndex <= (1 << 5)) { + zeroRight(display0, cutIndex) + } else if (cutIndex <= (1 << 10)) { + zeroRight(display0, ((cutIndex - 1) & 31) + 1) + display1 = copyLeft(display1, cutIndex >>> 5) + } else if (cutIndex <= (1 << 15)) { + zeroRight(display0, ((cutIndex - 1) & 31) + 1) + display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) + display2 = copyLeft(display2, cutIndex >>> 10) + } else if (cutIndex <= (1 << 20)) { + zeroRight(display0, ((cutIndex - 1) & 31) + 1) + display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) + display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) + display3 = copyLeft(display3, cutIndex >>> 15) + } else if (cutIndex <= (1 << 25)) { + zeroRight(display0, ((cutIndex - 1) & 31) + 1) + display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) + display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) + display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) + display4 = copyLeft(display4, cutIndex >>> 20) + } else if (cutIndex <= (1 << 30)) { + zeroRight(display0, ((cutIndex - 1) & 31) + 1) + display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1) + display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1) + display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1) + display4 = copyLeft(display4, (((cutIndex - 1) >>> 20) & 31) + 1) + display5 = copyLeft(display5, cutIndex >>> 25) + } else { + throw new IllegalArgumentException() + } + } + + private def requiredDepth(xor: Int) = { + if (xor < (1 << 5)) 1 + else if (xor < (1 << 10)) 2 + else if (xor < (1 << 15)) 3 + else if (xor < (1 << 20)) 4 + else if (xor < (1 << 25)) 5 + else if (xor < (1 << 30)) 6 + else throw new IllegalArgumentException() + } + + private def dropFront0(cutIndex: Int): OldVector[A] = { + val blockIndex = cutIndex & ~31 + val xor = cutIndex ^ (endIndex - 1) + val d = requiredDepth(xor) + val shift = cutIndex & ~((1 << (5 * d)) - 1) + + // need to init with full display iff going to cutIndex requires swapping block at level >= d + + val s = new OldVector(cutIndex - shift, endIndex - shift, blockIndex - shift) + s.initFrom(this) + s.dirty = dirty + s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) + s.preClean(d) + s.cleanLeftEdge(cutIndex - shift) + releaseFence() + s + } + + private def dropBack0(cutIndex: Int): OldVector[A] = { + val blockIndex = (cutIndex - 1) & ~31 + val xor = startIndex ^ (cutIndex - 1) + val d = requiredDepth(xor) + val shift = startIndex & ~((1 << (5 * d)) - 1) + + val s = new OldVector(startIndex - shift, cutIndex - shift, blockIndex - shift) + s.initFrom(this) + s.dirty = dirty + s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex) + s.preClean(d) + s.cleanRightEdge(cutIndex - shift) + releaseFence() + s + } + override protected def applyPreferredMaxLength: Int = OldVector.defaultApplyPreferredMaxLength + + override def equals(o: Any): Boolean = o match { + case that: OldVector[_] => + if (this eq that) true + else if (this.length != that.length) false + else if ( // + this.startIndex == that.startIndex && // + this.endIndex == that.endIndex && // + (this.display0 eq that.display0) && // + (this.display1 eq that.display1) && // + (this.display2 eq that.display2) && // + (this.display3 eq that.display3) && // + (this.display4 eq that.display4) && // + (this.display5 eq that.display5) // + ) true + else super.equals(o) + case _ => super.equals(o) + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + + //override def toVector: Vector[A] = this + + override protected[this] def className = "OldVector" +} + +//TODO: When making this class private, make it final as well. +@deprecated("This class is not intended for public consumption and will be made private in the future.","2.13.0") +class OldVectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) + extends AbstractIterator[A] + with OldVectorPointer[A] { + + private[this] final var blockIndex: Int = _startIndex & ~31 + private[this] final var lo: Int = _startIndex & 31 + private[this] final var endLo = Math.min(endIndex - blockIndex, 32) + + override def hasNext: Boolean = _hasNext + + private[this] final var _hasNext = blockIndex + lo < endIndex + + private[this] def advanceToNextBlockIfNecessary(): Unit = { + if (lo == endLo) { + if (blockIndex + lo < endIndex) { + val newBlockIndex = blockIndex + 32 + gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex) + + blockIndex = newBlockIndex + endLo = Math.min(endIndex - blockIndex, 32) + lo = 0 + } else { + _hasNext = false + } + } + } + + override def take(n: Int): Iterator[A] = { + if(n <= 0) { + _hasNext = false + endIndex = 0 + } else { + val dropR = remainingElementCount - n + if(dropR > 0) { + endIndex -= dropR + endLo = Math.min(endIndex - blockIndex, 32) + _hasNext = blockIndex + lo < endIndex + } + } + this + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val longLo = lo.toLong + n + if (blockIndex + longLo < endIndex) { + // We only need to adjust the block if we are outside the current block + // We know that we are within the collection as < endIndex + lo = longLo.toInt + if (lo >= 32) { + blockIndex = (blockIndex + lo) & ~31 + gotoNewBlockStart(blockIndex, depth) + + endLo = Math.min(endIndex - blockIndex, 32) + lo = lo & 31 + } + } else { + _hasNext = false + endIndex = 0 + } + } + this + } + + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = + if(from > 0) { + drop(from) + until - from + } else until + take(_until) + } + + override def next(): A = { + if (!_hasNext) throw new NoSuchElementException("reached iterator end") + val res = display0(lo).asInstanceOf[A] + lo += 1 + advanceToNextBlockIfNecessary() + res + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val totalToBeCopied = IterableOnce.elemsToCopyToArray(remainingElementCount, xsLen, start, len) + var totalCopied = 0 + while (hasNext && totalCopied < totalToBeCopied) { + val _start = start + totalCopied + val toBeCopied = IterableOnce.elemsToCopyToArray(endLo - lo, xsLen, _start, len - totalCopied) + Array.copy(display0, lo, xs, _start, toBeCopied) + totalCopied += toBeCopied + lo += toBeCopied + advanceToNextBlockIfNecessary() + } + totalCopied + } + + private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0 + + override def knownSize: Int = remainingElementCount + + /** Creates a new vector which consists of elements remaining in this iterator. + * Such a vector can then be split into several vectors using methods like `take` and `drop`. + */ + private[collection] def remainingVector: OldVector[A] = { + if(!_hasNext) OldVector.empty + else { + val v = new OldVector(blockIndex + lo, endIndex, blockIndex + lo) + v.initFrom(this) + v + } + } +} + +/** A class to build instances of `OldVector`. This builder is reusable. */ +final class OldVectorBuilder[A]() extends ReusableBuilder[A, OldVector[A]] with OldVectorPointer[A] { + + // possible alternative: start with display0 = null, blockIndex = -32, lo = 32 + // to avoid allocating initial array if the result will be empty anyways + + display0 = new Array[AnyRef](32) + depth = 1 + + private[this] var blockIndex = 0 + private[this] var lo = 0 + + def size: Int = blockIndex + lo + def isEmpty: Boolean = size == 0 + def nonEmpty: Boolean = size != 0 + + override def knownSize: Int = size + + private[this] def advanceToNextBlockIfNecessary(): Unit = { + if (lo >= display0.length) { + val newBlockIndex = blockIndex + 32 + gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex) + blockIndex = newBlockIndex + lo = 0 + } + } + + def addOne(elem: A): this.type = { + advanceToNextBlockIfNecessary() + display0(lo) = elem.asInstanceOf[AnyRef] + lo += 1 + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + val it = (xs.iterator : Iterator[A]).asInstanceOf[Iterator[AnyRef]] + while (it.hasNext) { + advanceToNextBlockIfNecessary() + lo += it.copyToArray(xs = display0, start = lo, len = display0.length - lo) + } + this + } + + def result(): OldVector[A] = { + val size = this.size + if (size == 0) + return OldVector.empty + val s = new OldVector[A](0, size, 0) // should focus front or back? + s.initFrom(this) + if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size! + releaseFence() + s + } + + def clear(): Unit = { + preClean(1) + display0 = new Array[AnyRef](32) + blockIndex = 0 + lo = 0 + } +} + +private[immutable] trait OldVectorPointer[+T] { + private[immutable] var depth: Int = _ + private[immutable] var display0: Array[AnyRef] = _ + private[immutable] var display1: Array[Array[AnyRef]] = _ + private[immutable] var display2: Array[Array[Array[AnyRef]]] = _ + private[immutable] var display3: Array[Array[Array[Array[AnyRef]]]] = _ + private[immutable] var display4: Array[Array[Array[Array[Array[AnyRef]]]]] = _ + private[immutable] var display5: Array[Array[Array[Array[Array[Array[AnyRef]]]]]] = _ + + protected def preClean(depth: Int): Unit = { + this.depth = depth + (depth - 1) match { + case 0 => + display1 = null + display2 = null + display3 = null + display4 = null + display5 = null + case 1 => + display2 = null + display3 = null + display4 = null + display5 = null + case 2 => + display3 = null + display4 = null + display5 = null + case 3 => + display4 = null + display5 = null + case 4 => + display5 = null + case 5 => + } + } + + + // used + private[immutable] final def initFrom[U](that: OldVectorPointer[U]): Unit = initFrom(that, that.depth) + + private[immutable] final def initFrom[U](that: OldVectorPointer[U], depth: Int) = { + this.depth = depth + (depth - 1) match { + case -1 => + case 0 => + display0 = that.display0 + case 1 => + display1 = that.display1 + display0 = that.display0 + case 2 => + display2 = that.display2 + display1 = that.display1 + display0 = that.display0 + case 3 => + display3 = that.display3 + display2 = that.display2 + display1 = that.display1 + display0 = that.display0 + case 4 => + display4 = that.display4 + display3 = that.display3 + display2 = that.display2 + display1 = that.display1 + display0 = that.display0 + case 5 => + display5 = that.display5 + display4 = that.display4 + display3 = that.display3 + display2 = that.display2 + display1 = that.display1 + display0 = that.display0 + } + } + + // go to specific position + // requires structure is at pos oldIndex = xor ^ index, + // ensures structure is at pos index + private[immutable] final def gotoPos(index: Int, xor: Int): Unit = { + if (xor < (1 << 5)) { // level = 0 + // we're already at the block start pos + } else if (xor < (1 << 10)) { // level = 1 + display0 = display1((index >>> 5) & 31) + } else if (xor < (1 << 15)) { // level = 2 + display1 = display2((index >>> 10) & 31) + display0 = display1((index >>> 5) & 31) + } else if (xor < (1 << 20)) { // level = 3 + display2 = display3((index >>> 15) & 31) + display1 = display2((index >>> 10) & 31) + display0 = display1((index >>> 5) & 31) + } else if (xor < (1 << 25)) { // level = 4 + display3 = display4((index >>> 20) & 31) + display2 = display3((index >>> 15) & 31) + display1 = display2((index >>> 10) & 31) + display0 = display1((index >>> 5) & 31) + } else if (xor < (1 << 30)) { // level = 5 + display4 = display5((index >>> 25) & 31) + display3 = display4((index >>> 20) & 31) + display2 = display3((index >>> 15) & 31) + display1 = display2((index >>> 10) & 31) + display0 = display1((index >>> 5) & 31) + } else { // level = 6 + throw new IllegalArgumentException() + } + } + + // USED BY ITERATOR + + // xor: oldIndex ^ index + private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos + if (xor < (1 << 10)) { // level = 1 + display0 = display1((index >>> 5) & 31) + } else if (xor < (1 << 15)) { // level = 2 + display1 = display2((index >>> 10) & 31) + display0 = display1(0) + } else if (xor < (1 << 20)) { // level = 3 + display2 = display3((index >>> 15) & 31) + display1 = display2(0) + display0 = display1(0) + } else if (xor < (1 << 25)) { // level = 4 + display3 = display4((index >>> 20) & 31) + display2 = display3(0) + display1 = display2(0) + display0 = display1(0) + } else if (xor < (1 << 30)) { // level = 5 + display4 = display5((index >>> 25) & 31) + display3 = display4(0) + display2 = display3(0) + display1 = display2(0) + display0 = display1(0) + } else { // level = 6 + throw new IllegalArgumentException() + } + } + private[immutable] final def gotoNewBlockStart(index: Int, depth: Int): Unit = { + if (depth > 5) display4 = display5((index >>> 25) & 31) + if (depth > 4) display3 = display4((index >>> 20) & 31) + if (depth > 3) display2 = display3((index >>> 15) & 31) + if (depth > 2) display1 = display2((index >>> 10) & 31) + if (depth > 1) display0 = display1((index >>> 5) & 31) + } + + // USED BY BUILDER + + // xor: oldIndex ^ index + private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos + if (xor < (1 << 10)) { // level = 1 + if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth += 1 } + display0 = new Array(32) + display1((index >>> 5) & 31) = display0 + } else if (xor < (1 << 15)) { // level = 2 + if (depth == 2) { display2 = new Array(32); display2(0) = display1; depth += 1 } + display0 = new Array(32) + display1 = new Array(32) + display1((index >>> 5) & 31) = display0 + display2((index >>> 10) & 31) = display1 + } else if (xor < (1 << 20)) { // level = 3 + if (depth == 3) { display3 = new Array(32); display3(0) = display2; depth += 1 } + display0 = new Array(32) + display1 = new Array(32) + display2 = new Array(32) + display1((index >>> 5) & 31) = display0 + display2((index >>> 10) & 31) = display1 + display3((index >>> 15) & 31) = display2 + } else if (xor < (1 << 25)) { // level = 4 + if (depth == 4) { display4 = new Array(32); display4(0) = display3; depth += 1 } + display0 = new Array(32) + display1 = new Array(32) + display2 = new Array(32) + display3 = new Array(32) + display1((index >>> 5) & 31) = display0 + display2((index >>> 10) & 31) = display1 + display3((index >>> 15) & 31) = display2 + display4((index >>> 20) & 31) = display3 + } else if (xor < (1 << 30)) { // level = 5 + if (depth == 5) { display5 = new Array(32); display5(0) = display4; depth += 1 } + display0 = new Array(32) + display1 = new Array(32) + display2 = new Array(32) + display3 = new Array(32) + display4 = new Array(32) + display1((index >>> 5) & 31) = display0 + display2((index >>> 10) & 31) = display1 + display3((index >>> 15) & 31) = display2 + display4((index >>> 20) & 31) = display3 + display5((index >>> 25) & 31) = display4 + } else { // level = 6 + throw new IllegalArgumentException() + } + } + + // STUFF BELOW USED BY APPEND / UPDATE + + private[immutable] final def nullSlotAndCopy[T <: AnyRef](array: Array[Array[T]], index: Int): Array[T] = { + val x = array(index) + array(index) = null + x.clone() + } + + // make sure there is no aliasing + // requires structure is at pos index + // ensures structure is clean and at pos index and writable at all levels except 0 + + private[immutable] final def stabilize(index: Int) = (depth - 1) match { + case 5 => + display5 = display5.clone() + display4 = display4.clone() + display3 = display3.clone() + display2 = display2.clone() + display1 = display1.clone() + display5((index >>> 25) & 31) = display4 + display4((index >>> 20) & 31) = display3 + display3((index >>> 15) & 31) = display2 + display2((index >>> 10) & 31) = display1 + display1((index >>> 5) & 31) = display0 + case 4 => + display4 = display4.clone() + display3 = display3.clone() + display2 = display2.clone() + display1 = display1.clone() + display4((index >>> 20) & 31) = display3 + display3((index >>> 15) & 31) = display2 + display2((index >>> 10) & 31) = display1 + display1((index >>> 5) & 31) = display0 + case 3 => + display3 = display3.clone() + display2 = display2.clone() + display1 = display1.clone() + display3((index >>> 15) & 31) = display2 + display2((index >>> 10) & 31) = display1 + display1((index >>> 5) & 31) = display0 + case 2 => + display2 = display2.clone() + display1 = display1.clone() + display2((index >>> 10) & 31) = display1 + display1((index >>> 5) & 31) = display0 + case 1 => + display1 = display1.clone() + display1((index >>> 5) & 31) = display0 + case 0 => + } + + + /// USED IN UPDATE AND APPEND BACK + + // prepare for writing at an existing position + + // requires structure is clean and at pos oldIndex = xor ^ newIndex, + // ensures structure is dirty and at pos newIndex and writable at level 0 + private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match { + case 5 => + display5 = display5.clone() + display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) + display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + case 4 => + display4 = display4.clone() + display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + case 3 => + display3 = display3.clone() + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + case 2 => + display2 = display2.clone() + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + case 1 => + display1 = display1.clone() + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + case 0 => + display0 = display0.clone() + } + + + // requires structure is dirty and at pos oldIndex, + // ensures structure is dirty and at pos newIndex and writable at level 0 + private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { + if (xor < (1 << 5)) { // level = 0 + display0 = display0.clone() + } else if (xor < (1 << 10)) { // level = 1 + display1 = display1.clone() + display1((oldIndex >>> 5) & 31) = display0 + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + } else if (xor < (1 << 15)) { // level = 2 + display1 = display1.clone() + display2 = display2.clone() + display1((oldIndex >>> 5) & 31) = display0 + display2((oldIndex >>> 10) & 31) = display1 + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + } else if (xor < (1 << 20)) { // level = 3 + display1 = display1.clone() + display2 = display2.clone() + display3 = display3.clone() + display1((oldIndex >>> 5) & 31) = display0 + display2((oldIndex >>> 10) & 31) = display1 + display3((oldIndex >>> 15) & 31) = display2 + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + } else if (xor < (1 << 25)) { // level = 4 + display1 = display1.clone() + display2 = display2.clone() + display3 = display3.clone() + display4 = display4.clone() + display1((oldIndex >>> 5) & 31) = display0 + display2((oldIndex >>> 10) & 31) = display1 + display3((oldIndex >>> 15) & 31) = display2 + display4((oldIndex >>> 20) & 31) = display3 + display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + } else if (xor < (1 << 30)) { // level = 5 + display1 = display1.clone() + display2 = display2.clone() + display3 = display3.clone() + display4 = display4.clone() + display5 = display5.clone() + display1((oldIndex >>> 5) & 31) = display0 + display2((oldIndex >>> 10) & 31) = display1 + display3((oldIndex >>> 15) & 31) = display2 + display4((oldIndex >>> 20) & 31) = display3 + display5((oldIndex >>> 25) & 31) = display4 + display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31) + display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31) + display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31) + display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31) + display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31) + } else { // level = 6 + throw new IllegalArgumentException() + } + } + + + // USED IN DROP + + private[immutable] final def copyRange[T <: AnyRef](array: Array[T], oldLeft: Int, newLeft: Int) = { + val elems = java.lang.reflect.Array.newInstance(array.getClass.getComponentType, 32).asInstanceOf[Array[T]] + java.lang.System.arraycopy(array, oldLeft, elems, newLeft, 32 - Math.max(newLeft, oldLeft)) + elems + } + + + // USED IN APPEND + // create a new block at the bottom level (and possibly nodes on its path) and prepares for writing + + // requires structure is clean and at pos oldIndex, + // ensures structure is dirty and at pos newIndex and writable at level 0 + private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos + if (xor < (1 << 5)) { // level = 0 + // we're already at the block start + } else if (xor < (1 << 10)) { // level = 1 + if (depth == 1) { + display1 = new Array(32) + display1((oldIndex >>> 5) & 31) = display0 + depth += 1 + } + display0 = new Array(32) + } else if (xor < (1 << 15)) { // level = 2 + if (depth == 2) { + display2 = new Array(32) + display2((oldIndex >>> 10) & 31) = display1 + depth += 1 + } + display1 = display2((newIndex >>> 10) & 31) + if (display1 == null) display1 = new Array(32) + display0 = new Array(32) + } else if (xor < (1 << 20)) { // level = 3 + if (depth == 3) { + display3 = new Array(32) + display3((oldIndex >>> 15) & 31) = display2 + depth += 1 + } + display2 = display3((newIndex >>> 15) & 31) + if (display2 == null) display2 = new Array(32) + display1 = display2((newIndex >>> 10) & 31) + if (display1 == null) display1 = new Array(32) + display0 = new Array(32) + } else if (xor < (1 << 25)) { // level = 4 + if (depth == 4) { + display4 = new Array(32) + display4((oldIndex >>> 20) & 31) = display3 + depth += 1 + } + display3 = display4((newIndex >>> 20) & 31) + if (display3 == null) display3 = new Array(32) + display2 = display3((newIndex >>> 15) & 31) + if (display2 == null) display2 = new Array(32) + display1 = display2((newIndex >>> 10) & 31) + if (display1 == null) display1 = new Array(32) + display0 = new Array(32) + } else if (xor < (1 << 30)) { // level = 5 + if (depth == 5) { + display5 = new Array(32) + display5((oldIndex >>> 25) & 31) = display4 + depth += 1 + } + display4 = display5((newIndex >>> 25) & 31) + if (display4 == null) display4 = new Array(32) + display3 = display4((newIndex >>> 20) & 31) + if (display3 == null) display3 = new Array(32) + display2 = display3((newIndex >>> 15) & 31) + if (display2 == null) display2 = new Array(32) + display1 = display2((newIndex >>> 10) & 31) + if (display1 == null) display1 = new Array(32) + display0 = new Array(32) + } else { // level = 6 + throw new IllegalArgumentException() + } + } + + // requires structure is dirty and at pos oldIndex, + // ensures structure is dirty and at pos newIndex and writable at level 0 + private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { + stabilize(oldIndex) + gotoFreshPosWritable0(oldIndex, newIndex, xor) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeBenchmark.scala index 65fc3728c9b7..307e256f5772 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeBenchmark.scala @@ -1,6 +1,5 @@ package scala.collection.immutable -import java.util import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ @@ -31,22 +30,25 @@ class RedBlackTreeBenchmark { @Setup(Level.Trial) def init: Unit = { nums = 1 to size - set1 = nums.to[TreeSet] - perm = nums.toArray - util.Collections.shuffle(util.Arrays.asList(perm)) + set1 = TreeSet.from(nums) + perm = new Array[Int](size) + val rem = scala.collection.mutable.ArrayBuffer.from(nums) + perm = Array.fill(size)(rem.remove(rnd.nextInt(rem.size))) + assert(rem.size == 0) + assert(perm.sum == nums.sum) set2 = set1.take(size/4) set3 = set1.take(size*3/4) set4 = set1.drop(size/2) - map1 = set1.map(i => (i -> i))(collection.breakOut) + map1 = TreeMap.from(nums.map(i => (i, i))) } @Benchmark - def build(bh: Blackhole): TreeSet[Int] = - (TreeSet.newBuilder[Int] ++= nums).result + def build(bh: Blackhole): Unit = + bh.consume(TreeSet.from(nums)) @Benchmark def buildRandom(bh: Blackhole): Unit = - (TreeSet.newBuilder[Int] ++= perm).result + bh.consume(TreeSet.from(perm)) @Benchmark def iterator(bh: Blackhole): Unit = { @@ -67,7 +69,7 @@ class RedBlackTreeBenchmark { @Benchmark def drain(bh: Blackhole): Unit = { var s = set1 - perm.foreach(i => s = s.-(i)) + perm.foreach(i => s = s.excl(i)) bh.consume(s) } @@ -75,15 +77,15 @@ class RedBlackTreeBenchmark { def union(bh: Blackhole): Unit = { bh.consume( set1.union(set1).size + - set2.union(set3).size + - set2.union(set4).size + - set4.union(set2).size + set2.union(set3).size + + set2.union(set4).size + + set4.union(set2).size ) } @Benchmark def range(bh: Blackhole): Unit = { - val s = set1 + var s = set1 var res = 0 for(i <- 0 to 5; j <- 0 to 5) res += s.range(s.size*i/5, s.size*j/5).size bh.consume(res) @@ -91,7 +93,7 @@ class RedBlackTreeBenchmark { @Benchmark def slice(bh: Blackhole): Unit = { - val s = set1 + var s = set1 var res = 0 for(i <- 0 to 5; j <- 0 to 5) res += s.slice(s.size*i/5, s.size*j/5).size bh.consume(res) @@ -99,7 +101,7 @@ class RedBlackTreeBenchmark { @Benchmark def take(bh: Blackhole): Unit = { - val s = set1 + var s = set1 var res = 0 for(i <- 0 to 10) res += s.take(s.size*i/10).size bh.consume(res) @@ -107,7 +109,7 @@ class RedBlackTreeBenchmark { @Benchmark def drop(bh: Blackhole): Unit = { - val s = set1 + var s = set1 var res = 0 for(i <- 0 to 10) res += s.drop(s.size*i/10).size bh.consume(res) diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeEqualsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeEqualsBenchmark.scala index a7073a9e9540..efaf8a7fd761 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeEqualsBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/RedBlackTreeEqualsBenchmark.scala @@ -32,7 +32,7 @@ class RedBlackTreeEqualsSharedBenchmark { val r = new Random() r.setSeed(0x1234567890abcdefL) - def aSet(start: Int, end: Int) : TreeSet[Int] = (start to end).to[TreeSet] + def aSet(start: Int, end: Int) : TreeSet[Int] = TreeSet.from(start to end) def aMap(start: Int, end: Int) : TreeMap[Int, Int] = TreeMap.empty[Int, Int] ++ ((start to end) map {x => x-> x}) set = aSet(1, size) @@ -74,7 +74,7 @@ class RedBlackTreeEqualsUnsharedBenchmark { var otherMap: TreeMap[Int, Int] = _ @Setup(Level.Trial) def init: Unit = { - def aSet(start: Int, end: Int) : TreeSet[Int] = (start to end).to[TreeSet] + def aSet(start: Int, end: Int) : TreeSet[Int] = TreeSet.from(start to end) def aMap(start: Int, end: Int) : TreeMap[Int, Int] = TreeMap.empty[Int, Int] ++ ((start to end) map {x => x-> x}) set = aSet(1, size) otherSet = aSet(1, size) diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/SeqMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/SeqMapBenchmark.scala new file mode 100644 index 000000000000..58e724e848c0 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/SeqMapBenchmark.scala @@ -0,0 +1,29 @@ +package scala.collection.immutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class SeqMapBenchmark { + + var base: SeqMap[String,String] = _ + + + @Setup(Level.Trial) def initKeys(): Unit = { + base = SeqMap("a" -> "a", "b" -> "b", "c" -> "c", "d" -> "d") + } + + // immutable map is implemented as EmptyMap -> Map1 -> Map2 -> Map3 -> Map4 -> Hashmap + // add an extra entry to Map4 causes a lot of work, benchmark the transition + @Benchmark def map4AddElement(bh: Blackhole): Unit = { + bh.consume(base.updated("e", "e")) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala index 93306266917e..c8574093fd8a 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala @@ -20,10 +20,15 @@ class SetBenchmark { @Setup(Level.Trial) def initKeys(): Unit = { base = Set("a", "b", "c", "d") } - + // immutable map is implemented as EmptySet -> Set1 -> Set2 -> Set3 -> Set4 -> HashSet // add an extra entry to Set4 causes a lot of work, benchmark the transition @Benchmark def set4AddElement(bh: Blackhole): Unit = { bh.consume(base + "e") } + + // benchmark for the optimized concat method + @Benchmark def set4Concat(bh: Blackhole): Unit = { + bh.consume(base concat base) + } } diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/TreeMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/TreeMapBenchmark.scala deleted file mode 100644 index 0f793062795f..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/TreeMapBenchmark.scala +++ /dev/null @@ -1,129 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ -//typical usage bench/jmh:run scala.collection.immutable.TreeMapBenchmark --prof gc - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class TreeMapBenchmark { - - val small = TreeMap.empty[String, String] ++ (Array.tabulate(10)(x => x.toString -> x.toString)) - val smallHash: HashMap[String, String] = (HashMap.newBuilder[String, String] ++= small).result - val rawData = Array.tabulate(1000)(x => x.toString -> x.toString) - - val large: TreeMap[String, String] = TreeMap.empty[String, String] ++ rawData - val largeHash: HashMap[String, String] = (HashMap.newBuilder[String, String] ++= rawData).result - val largeDifferentValues: TreeMap[String, String] = large map { case ((k, v)) => k -> (v + "-xx") } - val largeDifferentValuesHash: HashMap[String, String] = (HashMap.newBuilder[String, String] ++= largeDifferentValues).result - val large2: TreeMap[String, String] = large.map { case ((k, v)) => (k + "-yy") -> (v + "-xx") } - val large2Hash: HashMap[String, String] = (HashMap.newBuilder[String, String] ++= large2).result - - val one = TreeMap[String, String]("a" -> "b") - - @Benchmark def plusPlus(bh: Blackhole): Unit = { - bh.consume(large ++ large2) - } - - @OperationsPerInvocation(1000) - @Benchmark def builderPlus(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - var i = 0 - while (i < 1000) { - builder += rawData(i) - i += 1 - } - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusInitial(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusInitialHash(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large2Hash - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusSame(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= large - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSameHash(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= largeHash - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusDifferntValues(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= largeDifferentValues - builder ++= large - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusLargeLarge(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= large2 - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusLargeLargeHash(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= large2Hash - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusSmallLarge(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= small - builder ++= large - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSmallLargeHash(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= small - builder ++= largeHash - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusLargeSmall(bh: Blackhole): Unit = { - val builder = TreeMap.newBuilder[String, String] - builder ++= large - builder ++= small - bh.consume(builder.result) - } - -} - -//for testing, debugging, optimising etc -object TreeMapTest extends App { - - val bh = new Blackhole("Today's password is swordfish. I understand instantiating Blackholes directly is dangerous.") - val test = new TreeMapBenchmark - - while (true) { - var j = 0 - val start = System.nanoTime() - while (j < 100) { - test.builderPlusPlusLargeSmall(bh) - j += 1 - } - val end = System.nanoTime() - println((end - start) / 1000000) - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/TreeSetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/TreeSetBenchmark.scala deleted file mode 100644 index e3ef4f2ac677..000000000000 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/TreeSetBenchmark.scala +++ /dev/null @@ -1,115 +0,0 @@ -package scala.collection.immutable - -import java.util.concurrent.TimeUnit - -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra._ - -//typical usage bench/jmh:run scala.collection.immutable.TreeSetBenchmark --prof gc - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class TreeSetBenchmark { - - val small = (Array.tabulate(10) (_.toString)).to[TreeSet] - val smallHash: HashSet[String] = HashSet.empty ++ small - val rawData = Array.tabulate(1000) (_.toString) - - val large = rawData.to[TreeSet] - val largeHash: HashSet[String] = HashSet.empty ++ rawData - val large2 = large map (_+ "-xx") - val large2Hash: HashSet[String] = HashSet.empty ++ large2 - - val one = TreeSet[String] ("f") - - @Benchmark def plusPlus(bh: Blackhole): Unit = { - bh.consume(large ++ large2) - } - @OperationsPerInvocation(1000) - @Benchmark def builderPlus(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - var i = 0 - while (i < 1000) { - builder += rawData(i) - i += 1 - } - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusInitial(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusInitialHash(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= largeHash - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSame(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - builder ++= large - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSameHash(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - builder ++= largeHash - bh.consume(builder.result) - } - - @Benchmark def builderPlusPlusLargeLarge(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - builder ++= large2 - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusLargeLargeHash(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - builder ++= large2Hash - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSmallLarge(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= small - builder ++= large - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusSmallLargeHash(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= small - builder ++= largeHash - bh.consume(builder.result) - } - @Benchmark def builderPlusPlusLargeSmall(bh: Blackhole): Unit = { - val builder = TreeSet.newBuilder[String] - builder ++= large - builder ++= small - bh.consume(builder.result) - } - -} - -//for testing, debugging, optimising etc -object TreeSetTest extends App { - - val bh = new Blackhole("Today's password is swordfish. I understand instantiating Blackholes directly is dangerous.") - val test = new TreeSetBenchmark - - while (true) { - var j = 0 - val start = System.nanoTime() - while (j < 100) { - test.builderPlusPlusLargeSmall(bh) - j += 1 - } - val end = System.nanoTime() - println((end - start) / 1000000) - } -} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark.scala new file mode 100644 index 000000000000..bf095c44c04c --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark.scala @@ -0,0 +1,28 @@ +package scala.collection.immutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.infra.Blackhole + +import org.openjdk.jmh.annotations._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorBenchmark { + @Param(Array("0", "10", "1000", "1000000")) + var size: Int = _ + var vec: Vector[AnyRef] = _ + val array = Array.fill(1000000)(new AnyRef) + + @Setup(Level.Trial) def initKeys(): Unit = { + vec = Vector.fill(size)(new AnyRef) + } + @Benchmark def concat(bh: Blackhole): Any = { + bh.consume(vec.copyToArray(array, 0, size)) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark2.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark2.scala new file mode 100644 index 000000000000..d59862cca487 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorBenchmark2.scala @@ -0,0 +1,595 @@ +package scala.collection.immutable + +import java.lang.management.ManagementFactory +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.util.Random +import java.util.Arrays + +import scala.collection.IterableOps + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 8) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorBenchmark2 { + + //@Param(Array("1", "5", "10", "100", "1000", "2000", "10000", "50000", "500000", "5000000", "50000000")) + @Param(Array("1", "10", "100", "1000", "10000", "50000")) + //@Param(Array("1", "5", "10", "100", "1000", "2000", "10000")) + //@Param(Array("1", "5", "10")) + //@Param(Array("2000", "10000")) + //@Param(Array("100", "500", "1000")) + var size: Int = _ + + val rand = new Random(42) + val o, p = new AnyRef + + var a: Array[AnyRef] = _ + var v: OldVector[AnyRef] = _ + var nv: Vector[AnyRef] = _ + var as: ArraySeq[AnyRef] = _ + + @Setup(Level.Trial) def init: Unit = { + //a = Array.fill(size)(o) + v = OldVector.fill(size)(o) + nv = Vector.fill(size)(o) + //Vector.fillSparse(size)(o) + as = ArraySeq.fill(size)(o) + //println(s"init: size = $size, JVM: " + ManagementFactory.getRuntimeMXBean().getName()) + } + + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // Old Vector + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + @Benchmark def vBadApplySequential(bh: Blackhole): Any = { + val coll = v + var i = 0 + while(i < coll.length) { + bh.consume(v(i)) + i += 1 + } + } + + @Benchmark def vApplySequential(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(v(i % size)) + i += 1 + } + } + + @Benchmark def vApplyRandom(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(v(rand.nextInt(size))) + i += 1 + } + } + + @Benchmark def vPrepend(bh: Blackhole): Any = { + var coll0, coll = v + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vAppend(bh: Blackhole): Any = { + var coll0, coll = v + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.appended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vApprepend(bh: Blackhole): Any = { + var coll0, coll = v + var i = 0 + while(i < size) { + if(i % 2 == 0) coll = coll.appended(o) + else coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vBuild(bh: Blackhole): Any = { + val b = OldVector.newBuilder[AnyRef] + var i = 0 + while(i < size) { + b.addOne(o) + i += 1 + } + bh.consume(b.result()) + } + + @Benchmark def vUpdateSequential(bh: Blackhole): Any = { + var v = this.v + var i = 0 + while(i < 1000) { + v = v.updated(i % size, o) + i += 1 + } + bh.consume(v) + } + + @Benchmark def vUpdateRandom(bh: Blackhole): Any = { + var v = this.v + var i = 0 + while(i < 100) { + v = v.updated(rand.nextInt(size), o) + i += 1 + } + bh.consume(v) + } + + @Benchmark def vHead(bh: Blackhole): Any = { + var coll = v + var i = 0 + while(i < 1000) { + bh.consume(coll.head) + i += 1 + } + } + + @Benchmark def vLast(bh: Blackhole): Any = { + var coll = v + var i = 0 + while(i < 1000) { + bh.consume(coll.last) + i += 1 + } + } + + @Benchmark def vTail(bh: Blackhole): Any = { + var coll, coll1 = v + var i = 0 + while(i < 1000) { + coll = coll.tail + bh.consume(coll) + if(coll.isEmpty) coll = coll1 + i += 1 + } + } + + @Benchmark def vSlice(bh: Blackhole): Any = { + var coll = v + val inc = size / 10 + if(inc > 0) { + var i = 0 + while(i < size) { + var j = i + inc + while(j < size) { + bh.consume(coll.slice(i, j)) + j += inc + } + i += inc + } + } + } + + @Benchmark def vIterator(bh: Blackhole): Any = { + var coll = v + val it = coll.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def vForeach(bh: Blackhole): Any = { + var coll = v + coll.foreach(bh.consume _) + } + + @Benchmark def vMapIdentity(bh: Blackhole): Any = { + var coll = v + bh.consume(coll.map(identity)) + } + + @Benchmark def vMapNew(bh: Blackhole): Any = { + var coll = v + bh.consume(coll.map(_ => p)) + } + + @Benchmark def vBulkAppend2(bh: Blackhole): Any = { + var coll = v + val coll1 = OldVector.fill(2)(o) + var i = 0 + while(i < 100) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vBulkAppend10p(bh: Blackhole): Any = { + var coll = v + val coll1 = as.take(coll.size/10) + var i = 0 + while(i < 100) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vBulkAppend100p(bh: Blackhole): Any = { + var coll = v + val coll1 = as + var i = 0 + while(i < 10) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vBulkAppendSame(bh: Blackhole): Any = { + var coll = v + val coll1 = v + var i = 0 + while(i < 10) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def vFilter100p(bh: Blackhole): Any = { + var coll = v + bh.consume(coll.filter(x => true)) + } + + @Benchmark def vFilter50p(bh: Blackhole): Any = { + var coll = v + var b = false + bh.consume(coll.filter { x => + b = !b + b + }) + } + + @Benchmark def vFilter0p(bh: Blackhole): Any = { + var coll = v + bh.consume(coll.filter(x => false)) + } + + /* + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // ArraySeq + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + @Benchmark def asApplySequential(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(as(i % size)) + i += 1 + } + } + + @Benchmark def asApplyRandom(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(as(rand.nextInt(size))) + i += 1 + } + } + + @Benchmark def asPrepend(bh: Blackhole): Any = { + var coll0, coll = as + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def asAppend(bh: Blackhole): Any = { + var coll0, coll = as + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.appended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def asBuild(bh: Blackhole): Any = { + val b = ArraySeq.newBuilder[AnyRef] + var i = 0 + while(i < size) { + b.addOne(o) + i += 1 + } + bh.consume(b.result()) + } + + + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // Array + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + @Benchmark def aApplySequential(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(a(i % size)) + i += 1 + } + } + + @Benchmark def aApplyRandom(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(a(rand.nextInt(size))) + i += 1 + } + } + + @Benchmark def aPrepend(bh: Blackhole): Any = { + var coll0, coll = a + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def aAppend(bh: Blackhole): Any = { + var coll0, coll = a + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.appended(o) + i += 1 + } + bh.consume(coll) + } + */ + + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // New Vector + ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + @Benchmark def nvBadApplySequential(bh: Blackhole): Any = { + val coll = nv + var i = 0 + while(i < coll.length) { + bh.consume(v(i)) + i += 1 + } + } + + @Benchmark def nvApplySequential(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(nv(i % size)) + i += 1 + } + } + + @Benchmark def nvApplyRandom(bh: Blackhole): Any = { + var i = 0 + while(i < 1000000) { + bh.consume(nv(rand.nextInt(size))) + i += 1 + } + } + + @Benchmark def nvPrepend(bh: Blackhole): Any = { + var coll0, coll = nv + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvAppend(bh: Blackhole): Any = { + var coll0, coll = nv + var i = 0 + while(i < size) { + //if(i % 10 == 0) coll = coll0 + coll = coll.appended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvApprepend(bh: Blackhole): Any = { + var coll0, coll = nv + var i = 0 + while(i < size) { + if(i % 2 == 0) coll = coll.appended(o) + else coll = coll.prepended(o) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvBuild(bh: Blackhole): Any = { + val b = Vector.newBuilder[AnyRef] + var i = 0 + while(i < size) { + b.addOne(o) + i += 1 + } + bh.consume(b.result()) + } + + @Benchmark def nvFillSparse(bh: Blackhole): Any = { + bh.consume(Vector.fillSparse(size)(o)) + } + + @Benchmark def nvUpdateSequential(bh: Blackhole): Any = { + var nv = this.nv + var i = 0 + while(i < 1000) { + nv = nv.updated(i % size, o) + i += 1 + } + bh.consume(nv) + } + + @Benchmark def nvUpdateRandom(bh: Blackhole): Any = { + var nv = this.nv + var i = 0 + while(i < 100) { + nv = nv.updated(rand.nextInt(size), o) + i += 1 + } + bh.consume(nv) + } + + @Benchmark def nvHead(bh: Blackhole): Any = { + var coll = nv + var i = 0 + while(i < 1000) { + bh.consume(coll.head) + i += 1 + } + } + + @Benchmark def nvLast(bh: Blackhole): Any = { + var coll = nv + var i = 0 + while(i < 1000) { + bh.consume(coll.last) + i += 1 + } + } + + @Benchmark def nvTail(bh: Blackhole): Any = { + var coll, coll1 = nv + var i = 0 + while(i < 1000) { + coll = coll.tail + bh.consume(coll) + if(coll.isEmpty) coll = coll1 + i += 1 + } + } + + @Benchmark def nvSlice(bh: Blackhole): Any = { + var coll = nv + val inc = size / 10 + if(inc > 0) { + var i = 0 + while(i < size) { + var j = i + inc + while(j < size) { + bh.consume(coll.slice(i, j)) + j += inc + } + i += inc + } + } + } + + @Benchmark def nvIterator(bh: Blackhole): Any = { + var coll = nv + val it = coll.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def nvForeach(bh: Blackhole): Any = { + var coll = nv + coll.foreach(bh.consume _) + } + + @Benchmark def nvMapIdentity(bh: Blackhole): Any = { + var coll = nv + bh.consume(coll.map(identity)) + } + + @Benchmark def nvMapNew(bh: Blackhole): Any = { + var coll = nv + bh.consume(coll.map(_ => p)) + } + + @Benchmark def nvBulkAppend2(bh: Blackhole): Any = { + var coll = nv + val coll1 = Vector.fill(2)(o) + var i = 0 + while(i < 100) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvBulkAppend10p(bh: Blackhole): Any = { + var coll = nv + val coll1 = as.take(coll.size/10) + var i = 0 + while(i < 100) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvBulkAppend100p(bh: Blackhole): Any = { + var coll = nv + val coll1 = as + var i = 0 + while(i < 10) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvBulkAppendSame(bh: Blackhole): Any = { + var coll = nv + val coll1 = nv + var i = 0 + while(i < 10) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def nvFilter100p(bh: Blackhole): Any = { + var coll = nv + bh.consume(coll.filter(x => true)) + } + + @Benchmark def nvFilter50p(bh: Blackhole): Any = { + var coll = nv + var b = false + bh.consume(coll.filter { x => + b = !b + b + }) + } + + @Benchmark def nvFilter0p(bh: Blackhole): Any = { + var coll = nv + bh.consume(coll.filter(x => false)) + } + + @Benchmark def nvSliding(bh: Blackhole): Any = { + var coll = nv + coll.sliding(2).foreach(bh.consume) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatAlignToWorstCaseBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatAlignToWorstCaseBenchmark.scala new file mode 100644 index 000000000000..51c269f78712 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatAlignToWorstCaseBenchmark.scala @@ -0,0 +1,39 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 4) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorConcatAlignToWorstCaseBenchmark { + @Param(Array("1", "15", "31", "33", "63", "65", "127", "255", "513", "1023", "1025", "2047")) // should not be divisible by 32 + var size: Int = _ + @Param(Array("1", "32", "64", "128", "256")) + var sizeDifference: Int = _ + + val o = new AnyRef + + var shorter: Vector[String] = _ + var longer: Vector[String] = _ + + @Setup(Level.Trial) def init(): Unit = { + shorter = Vector.fill(size)("s") + longer = Vector.fill(size + sizeDifference)("l") + } + + @Benchmark def withoutAlignTo(bh: Blackhole): Any = + bh.consume(new VectorBuilder[String]().addAll(shorter).addAll(longer).result()) + + @Benchmark def withAlignTo(bh: Blackhole): Any = + bh.consume(new VectorBuilder[String]().alignTo(shorter.length, longer).addAll(shorter).addAll(longer).result()) + + @Benchmark def concat(bh: Blackhole): Any = + bh.consume(shorter ++ longer) +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBenchmark.scala new file mode 100644 index 000000000000..ad950cb0f865 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBenchmark.scala @@ -0,0 +1,52 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 4) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorConcatBenchmark { + @Param(Array("30", "32", "1000", "1024", "30000", "32000", "30720", "32768", "1048576", "33554432")) + var size: Int = _ + + val o = new AnyRef + + var vAligned: Vector[AnyRef] = _ + var vShifted: Vector[AnyRef] = _ + + @Setup(Level.Trial) def init(): Unit = { + vAligned = Vector.fillSparse(size)(o) + vShifted = Vector.fillSparse(size + 5)(o).drop(5) + } + + def concat(bh: Blackhole, a: Vector[AnyRef], b: Vector[AnyRef], times: Int = 10): Any = { + var coll = a + val coll1 = b + var i = 0 + while(i < times) { + coll = coll.appendedAll(coll1) + i += 1 + } + bh.consume(coll) + } + + @Benchmark def concatAlignedAligned(bh: Blackhole): Any = + concat(bh, vAligned, vAligned) + + @Benchmark def concatAlignedShifted(bh: Blackhole): Any = + concat(bh, vShifted, vShifted) + + + @Benchmark def concatMisalignedAligned(bh: Blackhole): Any = + concat(bh, vAligned, vShifted) + + @Benchmark def concatMisalignedShifted(bh: Blackhole): Any = + concat(bh, vShifted, vAligned) +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBigVectorsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBigVectorsBenchmark.scala new file mode 100644 index 000000000000..72b3122acc48 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorConcatBigVectorsBenchmark.scala @@ -0,0 +1,45 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 4) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorConcatBigVectorsBenchmark { + val size: Int = 1000000 + + @Param(Array("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10")) + var tenthsSplit: Int = _ + val o = new AnyRef + + var l: Vector[AnyRef] = _ + var lShifted: Vector[AnyRef] = _ + var r: Vector[AnyRef] = _ + var rNew: Vector[AnyRef] = _ + + @Setup(Level.Trial) def init(): Unit = { + val split = size * tenthsSplit / 10 + + val (a, b) = Vector.fillSparse(size)(o).splitAt(split) + l = a; r = b + rNew = Vector.fillSparse(size - split)(o) + lShifted = Vector.fillSparse(split + 5)(o).drop(5) + } + + @Benchmark def concatAligned(bh: Blackhole): Any = + bh.consume(l ++ r) + + @Benchmark def concatSemiAligned(bh: Blackhole): Any = + bh.consume(l ++ rNew) + + @Benchmark def concatMisaligned(bh: Blackhole): Any = + bh.consume(lShifted ++ r) + +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorIterationBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorIterationBenchmark.scala new file mode 100644 index 000000000000..edc380c8e200 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorIterationBenchmark.scala @@ -0,0 +1,55 @@ +package scala.collection.immutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class VectorIterationBenchmark { + @Param(Array("0", "1", "2", "3", "4", "6", "8", "10", "12", "14", "16" + , "20", "24", "28", "32", "40", "48", "56", "64", + "128", "256", "512", "1024", "2048", "4096", "8192", "16384", "32768", "65536")) + var size: Int = _ + + var value: Vector[Any] = _ + var larger: Vector[Any] = _ + + @Setup(Level.Trial) def init(): Unit = { + value = Vector.tabulate(size)(_.toString) + larger = value :+ "last" + } + + @Benchmark def iterate(bh: Blackhole) = { + var i = 0 + val it = value.iterator + while (i < size) { + bh.consume(it.next()) + i += 1 + } + } + + @Benchmark def apply(bh: Blackhole) = { + var i = 0 + while (i < size) { + bh.consume(value(i)) + i += 1 + } + } + + @Benchmark def applySingle(bh: Blackhole) = { + bh.consume(larger(size)) + } + + @Benchmark def drop = { + val it = value.iterator + it.drop(size) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala index 61e621dcdffd..2f4b49009dee 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala @@ -14,19 +14,20 @@ import java.util.concurrent.TimeUnit @OutputTimeUnit(TimeUnit.NANOSECONDS) @State(Scope.Benchmark) class VectorMapBenchmark { - @Param(Array("10", "100", "1000")) + @Param(Array("1", "10", "100", "1000", "1000000")) var size: Int = _ - var values: Vector[Any] = _ + var kvs: Iterable[(Int, Int)] = _ - @Setup(Level.Trial) def initKeys(): Unit = { - values = (0 to size).map(i => (i % 4) match { - case 0 => i.toString - case 1 => i.toChar - case 2 => i.toDouble - case 3 => i.toInt - }).toVector + @Setup(Level.Trial) + def initKeys(): Unit = { + val unique = (0 to size).map(i => i -> i) + kvs = unique ++ unique } - @Benchmark def groupBy = values.groupBy(_.getClass) + @Benchmark + def builder(bh: Blackhole): Unit = { + val b = VectorMap.newBuilder[Int, Int] + bh.consume(b.addAll(kvs).result()) + } } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/AnyRefMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/AnyRefMapBenchmark.scala new file mode 100644 index 000000000000..a6302dd5a26b --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/AnyRefMapBenchmark.scala @@ -0,0 +1,81 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class AnyRefMapBenchmark { + @Param(Array("10", "100", "1000")) + var size: Int = _ + @Param(Array("true")) + var useMissingValues = true + + var existingKeys: Array[String] = _ + var missingKeys: Array[String] = _ + var map: collection.mutable.AnyRefMap[String, String] = null + + @Setup(Level.Trial) def initialize: Unit = { + existingKeys = (0 to size).map(_.toString).toArray + missingKeys = (size to 2 * size).toArray.map(_.toString) + map = collection.mutable.AnyRefMap(existingKeys.map(x => (x, x)) : _*) + } + + @Benchmark def contains(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + bh.consume(map.contains(existingKeys(i))) + if (useMissingValues) { + bh.consume(map.contains(missingKeys(i))) + } + i += 1 + } + } + + @Benchmark def get(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + bh.consume(map.get(existingKeys(i))) + if (useMissingValues) { + bh.consume(map.get(missingKeys(i))) + } + i += 1 + } + } + + @Benchmark def getOrElse(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + bh.consume(map.getOrElse(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.getOrElse(missingKeys(i), "")) + } + i += 1 + } + } + + @Benchmark def getOrElseUpdate(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + bh.consume(map.getOrElseUpdate(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.getOrElse(missingKeys(i), "")) + } + i += 1 + } + } + + @Benchmark def fill(bh: Blackhole): Unit = { + val h = new AnyRefMap[String, String] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala new file mode 100644 index 000000000000..d988031a5c9b --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala @@ -0,0 +1,183 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 15) +@Measurement(iterations = 15) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ArrayBufferBenchmark { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + + var ref : ArrayBuffer[Int] = _ + var set : scala.collection.immutable.Set[Int] = _ + var list: List[Int] = _ + + @Setup(Level.Trial) def init: Unit = { + ref = new ArrayBuffer + for (i <- 0 until size) ref += i + set = ref.toSet + list = ref.toList + } + + @Benchmark def filterInPlace(bh: Blackhole): Unit = { + val b = ref.clone() + b.filterInPlace(_ % 2 == 0) + bh.consume(b) + } + + @Benchmark def toObjArrayTagged(bh:Blackhole):Unit = { + val res = ref.asInstanceOf[ArrayBuffer[Integer]].toArray + bh.consume(res) + } + + @Benchmark def toObjArrayUntagged(bh:Blackhole):Unit = { + val res = ref.asInstanceOf[ArrayBuffer[AnyRef]].toArray + bh.consume(res) + } + + + @Benchmark def update(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while (i < size) { + b.update(i, -1) + i += 2 + } + bh.consume(b) + } + + // append `ArrayBuffer` + @Benchmark def addAll1(bh: Blackhole): Unit = { + val b1 = ref.clone() + val b2 = ref.clone() + b1.addAll(b2) + bh.consume(b1) + } + + //addOne + @Benchmark def addOneArrayBuffer(bh:Blackhole):Unit = { + val res = ArrayBuffer[Object]() + ref.asInstanceOf[ArrayBuffer[Object]].foreach(res.addOne) + bh.consume(res) + } + + //addOne comparison + @Benchmark def addOneArrayList(bh:Blackhole):Unit = { + val res = new java.util.ArrayList[Object]() + ref.asInstanceOf[ArrayBuffer[Object]].foreach(res.add) + bh.consume(res) + } + + // append `Iterable` with known size + @Benchmark def addAll2(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(set) + bh.consume(b) + } + + // append `Iterable` without known size + @Benchmark def addAll3(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(list) + bh.consume(b) + } + + // append `IterableOnce` without known size + @Benchmark def addAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(list.iterator) + bh.consume(b) + } + + // insert `ArrayBuffer` + @Benchmark def insertAll1(bh: Blackhole): Unit = { + val b1 = ref.clone() + val b2 = ref.clone() + b1.insertAll(size / 2, b2) + bh.consume(b1) + } + + // insert `Iterable` with known size + @Benchmark def insertAll2(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, set) + bh.consume(b) + } + + // insert `Iterable` without known size + @Benchmark def insertAll3(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list) + bh.consume(b) + } + + // insert `IterableOnce` without known size + @Benchmark def insertAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list.iterator) + bh.consume(b) + } + + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { + val b = ref.clone() + val seq = scala.Seq(0, 0) + b.flatMapInPlace { _ => seq } + bh.consume(b) + } + + @Benchmark def iteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.iterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def iteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.iterator.toVector) + bh.consume(b) + } + + @Benchmark def reverseIteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.reverseIterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def reverseIteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.reverseIterator.toVector) + bh.consume(b) + } + + @Benchmark def `min-max is reduction`(bh: Blackhole): Unit = bh.consume { + ref.max + } + + @Benchmark def `sum is reduction`(bh: Blackhole): Unit = bh.consume { + ref.sum + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayDequeBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayDequeBenchmark.scala new file mode 100644 index 000000000000..2a099507bc84 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayDequeBenchmark.scala @@ -0,0 +1,149 @@ +package scala.collection +package mutable + +import java.util.concurrent.TimeUnit + +import scala.util.Random + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.MICROSECONDS) +@State(Scope.Benchmark) +class ArrayDequeBenchmark { + + /** Initial size of array. */ + @Param(Array("0", "1000")) // Zero-length is empty array + var length: Int = _ + + var arr: ArrayDeque[Int] = _ + var fraction: Int = _ + var near: Int = _ + var chunk: Int = _ + var tail: Int = _ + var tailChunk: Int = _ + var nearTail: Int = _ + var nearTailChunk: Int = _ + + /** Number of values to add, insert, prepend, access, ... */ + @Param(Array("1000")) + var range: Int = _ + + var rangeArr: Array[Int] = _ + + @Setup(Level.Trial) + def initRange: Unit = { + rangeArr = (1 to range.toInt).toArray + } + + @Setup(Level.Invocation) + def initArray: Unit = { + arr = ArrayDeque.range(0, length) + fraction = 20 + near = length / fraction + chunk = Math.min(arr.size, fraction) + tail = Math.max(0, arr.size - 1) + tailChunk = Math.max(0, tail - chunk) + nearTail = Math.max(0, tail - near) + nearTailChunk = Math.max(0, nearTail - chunk) + } + + @Benchmark + def addAll(bh: Blackhole): Unit = { + bh.consume(arr ++= rangeArr) + } + + @Benchmark + def prependAll(bh: Blackhole): Unit = { + bh.consume(arr.++=:(rangeArr)) + } + + @Benchmark + def removeFromHead(bh: Blackhole): Unit = { + arr.remove(0, chunk) + bh.consume(arr) + } + + @Benchmark + def removeFromTail(bh: Blackhole): Unit = { + arr.remove(tailChunk, chunk) + bh.consume(arr) + } + + @Benchmark + def removeNearHead(bh: Blackhole): Unit = { + arr.remove(near, chunk) + bh.consume(arr) + } + + @Benchmark + def removeNearTail(bh: Blackhole): Unit = { + arr.remove(nearTailChunk, chunk) + bh.consume(arr) + } + + @Benchmark + def add(bh: Blackhole): Unit = { + rangeArr.foreach { + arr += _ + } + bh.consume(arr) + } + + @Benchmark + def prepend(bh: Blackhole): Unit = { + rangeArr.foreach { + arr.+=:(_) + } + bh.consume(arr) + } + + @Benchmark + def insertAllNearHead(bh: Blackhole): Unit = { + bh.consume(arr.insertAll(near, rangeArr)) + } + + @Benchmark + def insertAllHead(bh: Blackhole): Unit = { + bh.consume(arr.insertAll(0, rangeArr)) + } + + @Benchmark + def insertAllNearTail(bh: Blackhole): Unit = { + bh.consume(arr.insertAll(nearTail, rangeArr)) + } + + @Benchmark + def insertAllTail(bh: Blackhole): Unit = { + bh.consume(arr.insertAll(tail, rangeArr)) + } + + @Benchmark + def reverse(bh: Blackhole): Unit = { + bh.consume(arr.reverse) + } + + @Benchmark + def _get(bh: Blackhole): Unit = { + rangeArr.foreach { i => + if (arr.isDefinedAt(i)) + bh.consume(arr(i)) + } + } + + @Benchmark + def toArray(bh: Blackhole): Unit = { + bh.consume(arr.toArray) + } + + @Benchmark + def clear(bh: Blackhole): Unit = { + arr.clear() + bh.consume(arr) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayIteratorBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayIteratorBenchmark.scala new file mode 100644 index 000000000000..ee66997ef080 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayIteratorBenchmark.scala @@ -0,0 +1,47 @@ +package scala.collection.mutable + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@Fork(1) +@Threads(1) +@Warmup(iterations = 4, time = 4, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 5, time = 5, timeUnit = TimeUnit.SECONDS) +@State(Scope.Benchmark) +class ArrayIteratorBenchmarks { + + @Param(Array( + "0", + "100", + /*"200", + "300", + "400", + "500", + "600", + "700", + "800", + "900",*/ + "1000", + "10000", + "100000", + "1000000", + "10000000", + "100000000", + )) + var valueCount: Int = _ + + var values: Array[Int] = _ + + @Setup + def setValues(): Unit = { + val random: util.Random = new util.Random(0) + values = Array.fill(valueCount)(random.nextInt()) + } + + @Benchmark + def arrayIterator(blackhole: Blackhole): Unit = { + val i = values.iterator + while (i.hasNext) blackhole.consume(i.next()) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala index 0e0bb9f185ee..bab4f975303c 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala @@ -1,10 +1,13 @@ package scala.collection.mutable import java.util.concurrent.TimeUnit +import java.util.Arrays import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole +import scala.reflect.ClassTag + @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @Threads(1) @@ -14,17 +17,49 @@ import org.openjdk.jmh.infra.Blackhole @State(Scope.Benchmark) class ArrayOpsBenchmark { - @Param(Array("10", "1000", "10000")) + @Param(Array("0", "1", "10", "1000", "10000")) var size: Int = _ var integers: List[Int] = _ var strings: List[String] = _ - + var integersA: Array[Int] = _ + var stringsA: Array[String] = _ + var intIntA: Array[Array[Int]] = _ @Setup(Level.Trial) def initNumbers: Unit = { integers = (1 to size).toList strings = integers.map(_.toString) + integersA = integers.toArray + stringsA = strings.toArray + intIntA = integersA.map { x => integersA } + } + + @Benchmark def foreachInt(bh: Blackhole): Unit = { + var i = 0 + integersA.foreach { x => i += x } + bh.consume(i) + } + + @Benchmark def foreachString(bh: Blackhole): Unit = { + var i = 0 + stringsA.foreach { x => i += x.length } + bh.consume(i) } + @Benchmark def flattenInt(bh: Blackhole): Unit = + bh.consume(intIntA.flatten) + + @Benchmark def mapIntInt(bh: Blackhole): Unit = + bh.consume(integersA.map(x => 0)) + + @Benchmark def mapIntString(bh: Blackhole): Unit = + bh.consume(integersA.map(x => "")) + + @Benchmark def mapStringString(bh: Blackhole): Unit = + bh.consume(stringsA.map(x => "")) + + @Benchmark def mapStringInt(bh: Blackhole): Unit = + bh.consume(stringsA.map(x => 0)) + @Benchmark def appendInteger(bh: Blackhole): Unit = { var arr = Array.empty[Int] integers foreach { i => @@ -56,4 +91,57 @@ class ArrayOpsBenchmark { } bh.consume(arr) } + + @Benchmark def foldLeftSum(bh: Blackhole): Unit = { + bh.consume(integersA.foldLeft(0){ (z,n) => z + n }) + } + + @Benchmark def foldSum(bh: Blackhole): Unit = { + bh.consume(integersA.fold(0){ (a,b) => a + b }) + } + + @Benchmark def sortedStringOld(bh: Blackhole): Unit = + bh.consume(oldSorted(stringsA)) + + @Benchmark def sortedIntOld(bh: Blackhole): Unit = + bh.consume(oldSorted(integersA)) + + @Benchmark def sortedIntCustomOld(bh: Blackhole): Unit = + bh.consume(oldSorted(integersA)(Ordering.Int.reverse)) + + @Benchmark def sortedStringNew(bh: Blackhole): Unit = + bh.consume(stringsA.sorted) + + @Benchmark def sortedIntNew(bh: Blackhole): Unit = + bh.consume(integersA.sorted) + + @Benchmark def sortedIntCustomNew(bh: Blackhole): Unit = + bh.consume(integersA.sorted(Ordering.Int.reverse)) + + def oldSorted[A, B >: A](xs: Array[A])(implicit ord: Ordering[B]): Array[A] = { + implicit def ct = ClassTag[A](xs.getClass.getComponentType) + val len = xs.length + if(xs.getClass.getComponentType.isPrimitive && len > 1) { + // need to copy into a boxed representation to use Java's Arrays.sort + val a = new Array[AnyRef](len) + var i = 0 + while(i < len) { + a(i) = xs(i).asInstanceOf[AnyRef] + i += 1 + } + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + val res = new Array[A](len) + i = 0 + while(i < len) { + res(i) = a(i).asInstanceOf[A] + i += 1 + } + res + } else { + val copy = xs.slice(0, len) + if(len > 1) + Arrays.sort(copy.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + copy + } + } } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetBenchmark.scala new file mode 100644 index 000000000000..8c06b050c7eb --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetBenchmark.scala @@ -0,0 +1,40 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 6) +@Measurement(iterations = 6) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BitSetBenchmark { + @Param(Array("0", "3", "5", "10", "1000", "1000000")) + var size: Int = _ + + val bitSet = (1 to 1000).to(mutable.BitSet) + + var bs: mutable.BitSet = _ + + var range: Range = _ + + val clones: Array[mutable.BitSet] = new Array(100) + + @Setup(Level.Iteration) def initializeRange(): Unit = { + range = (10 to (10 + size)) + } + @Setup(Level.Invocation) def initializeClones(): Unit = { + (0 until 100) foreach (i => clones(i) = bitSet.clone()) + } + + @Benchmark def addAll(bh: Blackhole): Unit = { + clones.foreach{ c => + bh consume c.addAll(range) + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetIteratorBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetIteratorBenchmark.scala new file mode 100644 index 000000000000..c865ac51639d --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/BitSetIteratorBenchmark.scala @@ -0,0 +1,33 @@ +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ + +import scala.collection.mutable + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(1) +@Threads(1) +@Warmup(iterations = 6) +@Measurement(iterations = 6) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BitSetIteratorBenchmark { + + @Param(Array("0", "1", "3", "15", "63", "255")) + var spacing: Int = _ + + var bs: mutable.BitSet = _ + + @Setup(Level.Iteration) def initializeRange(): Unit = { + bs = mutable.BitSet(0 until 1000 by (spacing + 1): _*) + } + + @Benchmark def iterateAll(): Unit = { + var sum = 0 + val it = bs.iterator + while (it.hasNext) sum += it.next() + } + +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/BuilderBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/BuilderBenchmark.scala new file mode 100644 index 000000000000..86ff396d7990 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/BuilderBenchmark.scala @@ -0,0 +1,55 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ HashSet => JHashSet } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BuilderBenchmark { + + import collection.mutable + + var listBuffer: mutable.ListBuffer[String] = _ + var arrayBuffer: mutable.ArrayBuffer[String] = _ + var queue: mutable.Queue[String] = _ + var sinks: Array[mutable.Buffer[String]] = _ + var source1: collection.Seq[String] = _ + var source2: collection.Seq[String] = _ + var source3: collection.Seq[String] = _ + var sources: Array[collection.Seq[String]] = _ + + @Setup(Level.Iteration) def init: Unit = { + listBuffer = new mutable.ListBuffer + arrayBuffer = new mutable.ArrayBuffer + queue = new collection.mutable.Queue + source1 = (1 to 1000).map(_.toString).toList + source2 = (1 to 1000).map(_.toString).toArray[String] + source3 = (1 to 1000).map(_.toString).toVector + sources = scala.util.Random.shuffle(List.fill(10)(List(source1, source2, source2, source3)).flatten).toArray + sinks = Array[Buffer[String]](listBuffer, arrayBuffer, listBuffer, queue, listBuffer, arrayBuffer, listBuffer, listBuffer) + } + + @Benchmark def addAllPolymorphic(bh: Blackhole): Unit = { + var i, j = 0 + val sources = this.sources + val sinks = this.sinks + while (i < sinks.length) { + sinks(i).clear() + while (j < sources.length) { + sinks(i).addAll(sources(j)) + j += 1 + } + bh.consume(sinks(i)) + i += 1 + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/CollisionProofHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/CollisionProofHashMapBenchmark.scala new file mode 100644 index 000000000000..19cd708ad93c --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/CollisionProofHashMapBenchmark.scala @@ -0,0 +1,205 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ HashMap => JHashMap } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class CollisionProofHashMapBenchmark { + @Param(Array(/*"0", "1", "10",*/ "100", "1000", "10000")) + var size: Int = _ + + class Collider(val x: String, val h: Int) extends Comparable[Collider] { + override def hashCode: Int = h + override def equals(o: Any): Boolean = o match { + case o: Collider => x == o.x + case _ => false + } + def compareTo(o: Collider): Int = x.compareTo(o.x) + } + + implicit val colliderOrdering: Ordering[Collider] = Ordering.String.on[Collider](_.x) + + var existingKeys: Array[String] = _ + var existingKVs: ArrayBuffer[(String, String)] = _ + var missingKeys: Array[String] = _ + var oa1: CollisionProofHashMap[String, String] = _ + var m1: HashMap[String, String] = _ + var j1: JHashMap[String, String] = new JHashMap[String, String] + var colliders: Array[Collider] = _ + var dos: Array[Collider] = _ + + @Setup(Level.Trial) def init: Unit = { + existingKeys = (0 until size).map(_.toString).toArray + existingKVs = ArrayBuffer.from(existingKeys.iterator.map(k => (k, k))) + missingKeys = (size until (2 * size.max(100))).toArray.map(_.toString) + oa1 = CollisionProofHashMap.from(existingKVs) + m1 = HashMap.from(existingKVs) + m1.foreach { case (k, v) => j1.put(k, v) } + colliders = existingKeys.map(k => new Collider(k, k.hashCode & 0x1111)) + dos = existingKeys.map(k => new Collider(k, 42)) + } + + @Benchmark def oaFillRegular(bh: Blackhole): Unit = { + val h = new CollisionProofHashMap[String, String] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def oaFillColliding(bh: Blackhole): Unit = { + val h = new CollisionProofHashMap[Collider, Collider] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def oaFillDoS(bh: Blackhole): Unit = { + val h = new CollisionProofHashMap[Collider, Collider] + dos.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def oaBuild(bh: Blackhole): Unit = + bh.consume(CollisionProofHashMap.from(existingKVs)) + + @Benchmark def oaIterateKeys(bh: Blackhole): Unit = { + val it = oa1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def oaIterateEntries(bh: Blackhole): Unit = { + val it = oa1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def oaGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(oa1.apply(existingKeys(i))) + i += 1 + } + } + + @Benchmark def oaGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(oa1.get(missingKeys(i))) + i += 1 + } + } + + @Benchmark def oaClearRemove(bh: Blackhole): Unit = { + val m = CollisionProofHashMap.from(oa1) + val it = oa1.keysIterator + while(it.hasNext) m.remove(it.next()) + assert(m.isEmpty) + bh.consume(m) + } + + @Benchmark def oaClearSubtractOne(bh: Blackhole): Unit = { + val m = CollisionProofHashMap.from(oa1) + val it = oa1.keysIterator + while(it.hasNext) m.subtractOne(it.next()) + assert(m.isEmpty) + bh.consume(m) + } + + @Benchmark def hmFillRegular(bh: Blackhole): Unit = { + val h = new HashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def hmFillColliding(bh: Blackhole): Unit = { + val h = new HashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def hmBuild(bh: Blackhole): Unit = + bh.consume(HashMap.from(existingKVs)) + + @Benchmark def hmIterateKeys(bh: Blackhole): Unit = { + val it = m1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hmIterateEntries(bh: Blackhole): Unit = { + val it = m1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(m1.apply(existingKeys(i))) + i += 1 + } + } + + @Benchmark def hmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(m1.get(missingKeys(i))) + i += 1 + } + } + + @Benchmark def javaFillRegular(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaFillColliding(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaFillDoS(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any] + dos.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaBuild(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaIterateKeys(bh: Blackhole): Unit = { + val it = j1.keySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javaIterateEntries(bh: Blackhole): Unit = { + val it = j1.entrySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javaGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j1.get(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javaGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j1.get(missingKeys(i))) + i += 1 + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ConstructionBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ConstructionBenchmark.scala new file mode 100644 index 000000000000..4771f8efc829 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ConstructionBenchmark.scala @@ -0,0 +1,49 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ConstructionBenchmark { + @Param(Array("0", "1", "10", "100")) + var size: Int = _ + + var values: Range = _ + + @Setup(Level.Trial) def init(): Unit = { + values = 1 to size + } + + @Benchmark def listBuffer_new: Any = { + new ListBuffer ++= values + } + + @Benchmark def listBuffer_from: Any = { + ListBuffer from values + } + + @Benchmark def listBuffer_to: Any = { + values to ListBuffer + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala index 3f01d154e934..78c0ac8fff2f 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala @@ -6,8 +6,6 @@ import org.openjdk.jmh.runner.IterationType import benchmark._ import java.util.concurrent.TimeUnit -import scala.collection.mutable - @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @Threads(1) @@ -18,31 +16,38 @@ import scala.collection.mutable class HashMapBenchmark { @Param(Array("10", "100", "1000")) var size: Int = _ + @Param(Array("true")) + var useMissingValues = true + @Param(Array("false")) + var stringsOnly = false var existingKeys: Array[Any] = _ var missingKeys: Array[Any] = _ @Setup(Level.Trial) def initKeys(): Unit = { existingKeys = (0 to size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString case 0 => i.toString case 1 => i.toChar case 2 => i.toDouble case 3 => i.toInt }).toArray - missingKeys = (size to 2 * size).toArray + missingKeys = (size to 2 * size).toArray.map(_.toString) } - var map = new mutable.HashMap[Any, Any] - - @Setup(Level.Invocation) def initializeMutable = existingKeys.foreach(v => map.put(v, v)) + var map: collection.mutable.HashMap[Any, Any] = null - @TearDown(Level.Invocation) def tearDown = map.clear() + @Setup(Level.Trial) def initialize = { + map = collection.mutable.HashMap(existingKeys.map(x => (x, x)) : _*) + } - @Benchmark def getOrElseUpdate(bh: Blackhole): Unit = { + @Benchmark def contains(bh: Blackhole): Unit = { var i = 0; while (i < size) { - bh.consume(map.getOrElseUpdate(existingKeys(i), -1)) - bh.consume(map.getOrElseUpdate(missingKeys(i), -1)) + bh.consume(map.contains(existingKeys(i))) + if (useMissingValues) { + bh.consume(map.contains(missingKeys(i))) + } i += 1 } } @@ -50,21 +55,48 @@ class HashMapBenchmark { @Benchmark def get(bh: Blackhole): Unit = { var i = 0; while (i < size) { - bh.consume(map.get(existingKeys(i), -1)) - bh.consume(map.get(missingKeys(i), -1)) + bh.consume(map.get(existingKeys(i))) + if (useMissingValues) { + bh.consume(map.get(missingKeys(i))) + } i += 1 } } - @Benchmark def put(bh: Blackhole): Any = { - var map = new mutable.HashMap[Any, Any] + @Benchmark def getOrElse(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + bh.consume(map.getOrElse(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.getOrElse(missingKeys(i), "")) + } + i += 1 + } + } + @Benchmark def getOrElseUpdate(bh: Blackhole): Unit = { var i = 0; while (i < size) { - map.put(existingKeys(i), i) + bh.consume(map.getOrElseUpdate(existingKeys(i), "")) + if (useMissingValues) { + bh.consume(map.getOrElse(missingKeys(i), "")) + } i += 1 } + } + + @Benchmark def updateWith(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + val res = i % 4 match { + case 0 => map.updateWith(existingKeys(i % existingKeys.length))(_ => None) + case 1 => map.updateWith(existingKeys(i % existingKeys.length))(_ => Some(existingKeys(i % existingKeys.length))) - map + case 2 => map.updateWith(missingKeys(i % missingKeys.length))(_ => None) + case 3 => map.updateWith(missingKeys(i % missingKeys.length))(_ => Some(existingKeys(i % existingKeys.length))) + } + bh.consume(res) + i += 1 + } } } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark2.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark2.scala new file mode 100644 index 000000000000..2f8aa7d686e4 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark2.scala @@ -0,0 +1,175 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ HashMap => JHashMap } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class HashMapBenchmark2 { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + @Param(Array("true")) + var stringsOnly = false + + class Collider(val x: Any, val h: Int) { + override def hashCode: Int = h + override def equals(o: Any): Boolean = o match { + case o: Collider => x == o.x + case _ => false + } + } + + var existingKeys: Array[Any] = _ + var existingKVs: ArrayBuffer[(Any, Any)] = _ + var missingKeys: Array[Any] = _ + var s1: HashSet[Any] = _ + var m1: HashMap[Any, Any] = _ + var j1: JHashMap[Any, Any] = new JHashMap[Any, Any] + var colliders: Array[Collider] = _ + + @Setup(Level.Trial) def init: Unit = { + existingKeys = (0 until size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString + case 0 => i.toString + case 1 => i.toChar + case 2 => i.toDouble + case 3 => i.toInt + }).toArray + existingKVs = ArrayBuffer.from(existingKeys.iterator.map(k => (k, k))) + missingKeys = (size until (2 * size.max(100))).toArray.map(_.toString) + s1 = HashSet.from(existingKeys) + m1 = HashMap.from(existingKVs) + m1.foreach { case (k, v) => j1.put(k, v) } + colliders = existingKeys.map(k => new Collider(k, k.hashCode & 0x1111)) + } + + @Benchmark def hsFillRegular(bh: Blackhole): Unit = { + val h = new HashSet[Any] + existingKeys.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def hsFillColliding(bh: Blackhole): Unit = { + val h = new HashSet[Any] + colliders.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def hsBuild(bh: Blackhole): Unit = + bh.consume(HashSet.from(existingKeys)) + + @Benchmark def hsIterate(bh: Blackhole): Unit = { + val it = s1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(s1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def hsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(s1.contains(missingKeys(i))) + i += 1 + } + } + + @Benchmark def hmFillRegular(bh: Blackhole): Unit = { + val h = new HashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def hmFillColliding(bh: Blackhole): Unit = { + val h = new HashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def hmBuild(bh: Blackhole): Unit = + bh.consume(HashMap.from(existingKVs)) + + @Benchmark def hmIterateKeys(bh: Blackhole): Unit = { + val it = m1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hmIterateEntries(bh: Blackhole): Unit = { + val it = m1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(m1.apply(existingKeys(i))) + i += 1 + } + } + + @Benchmark def hmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(m1.get(missingKeys(i))) + i += 1 + } + } + + @Benchmark def javaFillRegular(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaFillColliding(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaBuild(bh: Blackhole): Unit = { + val h = new JHashMap[Any, Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javaIterateKeys(bh: Blackhole): Unit = { + val it = j1.keySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javaIterateEntries(bh: Blackhole): Unit = { + val it = j1.entrySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javaGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j1.get(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javaGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j1.get(missingKeys(i))) + i += 1 + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/HashSetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/HashSetBenchmark.scala new file mode 100644 index 000000000000..28675fe16a69 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/HashSetBenchmark.scala @@ -0,0 +1,156 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ HashSet => JHashSet } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class HashSetBenchmark { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + @Param(Array("true")) + var stringsOnly = false + + class Collider(val x: Any, val h: Int) { + override def hashCode: Int = h + override def equals(o: Any): Boolean = o match { + case o: Collider => x == o.x + case _ => false + } + } + + var existingKeys: Array[Any] = _ + var missingKeys: Array[Any] = _ + var s1: HashSet[Any] = _ + var j1: JHashSet[Any] = new JHashSet[Any] + var m1: HashMap[Any, Null] = _ + var colliders: Array[Collider] = _ + + @Setup(Level.Trial) def init: Unit = { + existingKeys = (0 until size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString + case 0 => i.toString + case 1 => i.toChar + case 2 => i.toDouble + case 3 => i.toInt + }).toArray + missingKeys = (size until (2 * size.max(100))).toArray.map(_.toString) + s1 = HashSet.from(existingKeys) + m1 = HashMap.from(existingKeys.map(k => (k, null))) + s1.foreach(j1.add) + colliders = existingKeys.map(k => new Collider(k, k.hashCode & 0x1111)) + } + + @Benchmark def hsFillRegular(bh: Blackhole): Unit = { + val h = new HashSet[Any] + existingKeys.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def hsFillColliding(bh: Blackhole): Unit = { + val h = new HashSet[Any] + colliders.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def hsBuild(bh: Blackhole): Unit = + bh.consume(HashSet.from(existingKeys)) + + @Benchmark def hsIterate(bh: Blackhole): Unit = { + val it = s1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def hsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(s1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def hsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(s1.contains(missingKeys(i))) + i += 1 + } + } + + @Benchmark def javaFillRegular(bh: Blackhole): Unit = { + val h = new JHashSet[Any] + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javaFillColliding(bh: Blackhole): Unit = { + val h = new JHashSet[Any] + colliders.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javaBuild(bh: Blackhole): Unit = { + val h = new JHashSet[Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javaIterate(bh: Blackhole): Unit = { + val it = j1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javaContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javaContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j1.contains(missingKeys(i))) + i += 1 + } + } + + /* + @Benchmark def mapFill(bh: Blackhole): Unit = { + val h = new HashMap[Any, Null] + existingKeys.foreach(k => h.put(k, null)) + bh.consume(h) + } + + @Benchmark def mapIterate(bh: Blackhole): Unit = { + val it = m1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def mapContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(m1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def mapContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(m1.contains(missingKeys(i))) + i += 1 + } + } + */ +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark.scala new file mode 100644 index 000000000000..65612a67c561 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark.scala @@ -0,0 +1,57 @@ +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class LinkedHashMapBenchmark { + @Param(Array("10", "100", "1000")) + var size: Int = _ + @Param(Array("true")) + var useMissingValues = true + @Param(Array("false")) + var stringsOnly = false + + var existingKeys: Array[Any] = _ + var missingKeys: Array[Any] = _ + + @Setup(Level.Trial) def initKeys(): Unit = { + existingKeys = (0 to size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString + case 0 => i.toString + case 1 => i.toChar + case 2 => i.toDouble + case 3 => i.toInt + }).toArray + missingKeys = (size to 2 * size).toArray.map(_.toString) + } + + var map: collection.mutable.LinkedHashMap[Any, Any] = null + + @Setup(Level.Trial) def initialize = { + map = collection.mutable.LinkedHashMap(existingKeys.map(x => (x, x)) : _*) + } + + @Benchmark def updateWith(bh: Blackhole): Unit = { + var i = 0; + while (i < size) { + val res = i % 4 match { + case 0 => map.updateWith(existingKeys(i % existingKeys.length))(_ => None) + case 1 => map.updateWith(existingKeys(i % existingKeys.length))(_ => Some(existingKeys(i % existingKeys.length))) + + case 2 => map.updateWith(missingKeys(i % missingKeys.length))(_ => None) + case 3 => map.updateWith(missingKeys(i % missingKeys.length))(_ => Some(existingKeys(i % existingKeys.length))) + } + bh.consume(res) + i += 1 + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala new file mode 100644 index 000000000000..6725141f3b41 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala @@ -0,0 +1,216 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ LinkedHashMap => JLHashMap, LinkedHashSet => JLHashSet } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class LinkedHashMapBenchmark2 { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + @Param(Array("true")) + var stringsOnly = false + + class Collider(val x: Any, val h: Int) { + override def hashCode: Int = h + override def equals(o: Any): Boolean = o match { + case o: Collider => x == o.x + case _ => false + } + } + + var existingKeys: Array[Any] = _ + var existingKVs: ArrayBuffer[(Any, Any)] = _ + var missingKeys: Array[Any] = _ + var s1: LinkedHashSet[Any] = _ + var m1: LinkedHashMap[Any, Any] = _ + var j1: JLHashMap[Any, Any] = new JLHashMap[Any, Any] + var j2: JLHashSet[Any] = new JLHashSet[Any] + var colliders: Array[Collider] = _ + + @Setup(Level.Trial) def init: Unit = { + existingKeys = (0 until size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString + case 0 => i.toString + case 1 => i.toChar + case 2 => i.toDouble + case 3 => i.toInt + }).toArray + existingKVs = ArrayBuffer.from(existingKeys.iterator.map(k => (k, k))) + missingKeys = (size until (2 * size.max(100))).toArray.map(_.toString) + s1 = LinkedHashSet.from(existingKeys) + m1 = LinkedHashMap.from(existingKVs) + m1.foreach { case (k, v) => j1.put(k, v) } + s1.foreach({case k => j2.add(k)}) + colliders = existingKeys.map(k => new Collider(k, k.hashCode & 0x1111)) + } + + @Benchmark def lhsFillRegular(bh: Blackhole): Unit = { + val h = new LinkedHashSet[Any] + existingKeys.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def lhsFillColliding(bh: Blackhole): Unit = { + val h = new LinkedHashSet[Any] + colliders.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def lhsBuild(bh: Blackhole): Unit = + bh.consume(LinkedHashSet.from(existingKeys)) + + @Benchmark def lhsIterate(bh: Blackhole): Unit = { + val it = s1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(s1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def lhsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(s1.contains(missingKeys(i))) + i += 1 + } + } + + @Benchmark def lhmFillRegular(bh: Blackhole): Unit = { + val h = new LinkedHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def lhmFillColliding(bh: Blackhole): Unit = { + val h = new LinkedHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def lhmBuild(bh: Blackhole): Unit = + bh.consume(LinkedHashMap.from(existingKVs)) + + @Benchmark def lhmIterateKeys(bh: Blackhole): Unit = { + val it = m1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhmIterateEntries(bh: Blackhole): Unit = { + val it = m1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(m1.apply(existingKeys(i))) + i += 1 + } + } + + @Benchmark def lhmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(m1.get(missingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhmFillRegular(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmFillColliding(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmBuild(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmIterateKeys(bh: Blackhole): Unit = { + val it = j1.keySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javalhmIterateEntries(bh: Blackhole): Unit = { + val it = j1.entrySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javalhmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j1.get(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j1.get(missingKeys(i))) + i += 1 + } + } + @Benchmark def javalhsFillRegular(bh: Blackhole): Unit = { + val h = new JLHashSet[Any] + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsFillColliding(bh: Blackhole): Unit = { + val h = new JLHashSet[Any] + colliders.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsBuild(bh: Blackhole): Unit = { + val h = new JLHashSet[Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsIterate(bh: Blackhole): Unit = { + val it = j2.iterator() + while(it.hasNext) bh.consume(it.next()) + } + + + @Benchmark def javalhsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j2.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j2.contains(missingKeys(i))) + i += 1 + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ListBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ListBufferBenchmark.scala new file mode 100644 index 000000000000..a2e0999c939f --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ListBufferBenchmark.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ListBufferBenchmark { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + + var ref: ListBuffer[Int] = _ + + @Setup(Level.Trial) def init: Unit = { + ref = new ListBuffer + for(i <- 0 until size) ref += i + } + + @Benchmark def filterInPlace(bh: Blackhole): Unit = { + val b = ref.clone() + b.filterInPlace(_ % 2 == 0) + bh.consume(b) + } + + @Benchmark def update(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while(i < size) { + b.update(i, -1) + i += 2 + } + bh.consume(b) + } + + @Benchmark def remove1(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while(i < size/2) { + b.remove(i) + i += 2 + } + bh.consume(b) + } + + @Benchmark def remove2(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while(i < size/4) { + b.remove(i, 2) + i += 2 + } + bh.consume(b) + } + + @Benchmark def insert(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while(i < size) { + b.insert(i, 0) + i += 2 + } + bh.consume(b) + } + + @Benchmark def insertAll(bh: Blackhole): Unit = { + val b = ref.clone() + val seq = Seq(0,0) + var i = 0 + while(i < size/2) { + b.insertAll(i, seq) + i += 4 + } + bh.consume(b) + } + + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { + val b = ref.clone() + val seq = Seq(0,0) + b.flatMapInPlace { _ => seq } + bh.consume(b) + } + + @Benchmark def iteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.iterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def iteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.iterator.toVector) + bh.consume(b) + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 817b3ebda0f8..ea04c43425a7 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -57,7 +57,7 @@ private object OpenHashMapBenchmark { var maps: Array[OpenHashMap[K,Int]] = null @Setup - def threadSetup(params: BenchmarkParams) { + def threadSetup(params: BenchmarkParams): Unit = { size = params.getParam("size").toInt val n = math.ceil(minNanosPerInvocation / (nanosPerPut * size)).toInt _mapEntries = size * n @@ -66,12 +66,12 @@ private object OpenHashMapBenchmark { } @Setup(Level.Iteration) - def iterationSetup { + def iterationSetup: Unit = { _operations = 0 } @Setup(Level.Invocation) - def setup(params: IterationParams) { + def setup(params: IterationParams): Unit = { for (i <- 0 until maps.length) maps(i) = new OpenHashMap[K,Int](size) if (params.getType == IterationType.MEASUREMENT) { @@ -81,7 +81,7 @@ private object OpenHashMapBenchmark { } @TearDown(Level.Iteration) - def iterationTeardown(params: IterationParams) { + def iterationTeardown(params: IterationParams): Unit = { if (params.getType == IterationType.MEASUREMENT) { // limit to smaller cases to avoid OOM _memory = @@ -108,7 +108,7 @@ private object OpenHashMapBenchmark { /** Load the map with keys from `1` to `size`. */ @Setup - def setup(params: BenchmarkParams) { + def setup(params: BenchmarkParams): Unit = { val size = params.getParam("size").toInt _keys = keyBuilder.build(size) put(map, keys, 0, size) @@ -133,7 +133,7 @@ private object OpenHashMapBenchmark { /** Load the map with keys from `1` to `size`, removing half of them. */ @Setup - def setup(params: BenchmarkParams) { + def setup(params: BenchmarkParams): Unit = { val size = params.getParam("size").toInt _keys = keyBuilder.build(size) put_remove(map, keys) @@ -172,7 +172,7 @@ private object OpenHashMapBenchmark { * @param from lowest index in the range of keys to add * @param to highest index in the range of keys to add, plus one */ - private[this] def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K], from: Int, to: Int) { + private[this] def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K], from: Int, to: Int): Unit = { var i = from while (i < to) { // using a `for` expression instead adds significant overhead map.put(keys(i), i) @@ -190,7 +190,7 @@ private object OpenHashMapBenchmark { * * @param keys list of keys to use */ - private def put_remove[K](map: OpenHashMap[K,Int], keys: KeySeq[K]) { + private def put_remove[K](map: OpenHashMap[K,Int], keys: KeySeq[K]): Unit = { val blocks = 25 // should be a non-trivial factor of `size` val size = keys.size val blockSize: Int = size / blocks @@ -241,7 +241,7 @@ class OpenHashMapBenchmark { /** Test putting elements to a map of `Int` to `Int`. */ @Benchmark - def put_Int(state: IntBulkPutState) { + def put_Int(state: IntBulkPutState): Unit = { var i = 0 while (i < state.maps.length) { put(state.maps(i), state.keys) @@ -251,7 +251,7 @@ class OpenHashMapBenchmark { /** Test putting and removing elements to a growing map of `Int` to `Int`. */ @Benchmark - def put_remove_Int(state: IntBulkPutState) { + def put_remove_Int(state: IntBulkPutState): Unit = { var i = 0 while (i < state.maps.length) { put_remove(state.maps(i), state.keys) @@ -276,7 +276,7 @@ class OpenHashMapBenchmark { /** Test putting elements to a map of `AnyRef` to `Int`. */ @Benchmark - def put_AnyRef(state: AnyRefBulkPutState) { + def put_AnyRef(state: AnyRefBulkPutState): Unit = { var i = 0 while (i < state.maps.length) { put(state.maps(i), state.keys) @@ -286,7 +286,7 @@ class OpenHashMapBenchmark { /** Test putting and removing elements to a growing map of `AnyRef` to `Int`. */ @Benchmark - def put_remove_AnyRef(state: AnyRefBulkPutState) { + def put_remove_AnyRef(state: AnyRefBulkPutState): Unit = { var i = 0 while (i < state.maps.length) { put_remove(state.maps(i), state.keys) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala index b14b733a8128..23233933d1d9 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala @@ -46,10 +46,9 @@ object OpenHashMapRunner extends JmhRunner { /** Return the statistics of the given result as a string. */ private[this] def stats(r: Result[_]) = r.getScore + " " + r.getStatistics.getStandardDeviation + def main(args: Array[String]): Unit = { + import scala.collection.JavaConverters._ - def main(args: Array[String]) { - import scala.collection.JavaConversions._ - val opts = new CommandLineOptions(args: _*) var builder = new OptionsBuilder().parent(opts).jvmArgsPrepend("-Xmx6000m") if (!opts.verbosity.hasValue) builder = builder.verbosity(VerboseMode.SILENT) @@ -72,7 +71,7 @@ object OpenHashMapRunner extends JmhRunner { def addToDataset(key: String, result: RunResult): Unit = datasetByName.getOrElseUpdate(key, SortedSet.empty(ordering)) += result - results.foreach { result => + results.asScala.foreach { result => addToDataset(result.label, result) // Create another data set for trials that track memory usage @@ -95,7 +94,7 @@ object OpenHashMapRunner extends JmhRunner { } } - private[this] def outputDataset(f: PrintWriter, label: String, dataset: Iterable[RunResult]) { + private[this] def outputDataset(f: PrintWriter, label: String, dataset: Iterable[RunResult]): Unit = { f.println(s"# [$label]") val isMemoryUsageDataset = label.endsWith(memoryDatasetQualifier) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/RedBlackTreeBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/RedBlackTreeBenchmark.scala new file mode 100644 index 000000000000..b6b6e36cc501 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/RedBlackTreeBenchmark.scala @@ -0,0 +1,87 @@ +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +import scala.util.Random + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class RedBlackTreeBenchmark { + + @Param(Array("0", "1", "10", "100", "1000", "10000")) + var size: Int = _ + + var nums: Range = _ + val rnd = new Random(0) + var set1: TreeSet[Int] = _ + var perm: Array[Int] = _ // repeatably pseudo-random permutation + //var map1: TreeMap[Int, Int] = _ + + @Setup(Level.Trial) def init: Unit = { + nums = 1 to size + set1 = TreeSet.from(nums) + perm = new Array[Int](size) + val rem = scala.collection.mutable.ArrayBuffer.from(nums) + perm = Array.fill(size)(rem.remove(rnd.nextInt(rem.size))) + assert(rem.size == 0) + assert(perm.sum == nums.sum) + //map1 = TreeMap.from(nums.map(i => (i, i))) + } + + @Benchmark + def build(bh: Blackhole): Unit = + bh.consume(TreeSet.from(nums)) + + @Benchmark + def buildRandom(bh: Blackhole): Unit = + bh.consume(TreeSet.from(perm)) + + @Benchmark + def iterator(bh: Blackhole): Unit = { + val it = set1.iterator + var res = 0 + while(it.hasNext) + res += it.next() + bh.consume(res) + } + + @Benchmark + def foreach(bh: Blackhole): Unit = { + var i = 0 + set1.foreach { x => i += x } + bh.consume(i) + } + + @Benchmark + def copy(bh: Blackhole): Unit = + bh.consume(TreeSet.from(set1)) + + @Benchmark + def copyDrain(bh: Blackhole): Unit = { + var s = TreeSet.from(set1) + perm.foreach(i => s.remove(i)) + bh.consume(s) + } + + /* + @Benchmark + def transformNone(bh: Blackhole): Unit = + bh.consume(map1.transform((k, v) => v)) + + @Benchmark + def transformAll(bh: Blackhole): Unit = + bh.consume(map1.transform((k, v) => v+1)) + + @Benchmark + def transformHalf(bh: Blackhole): Unit = + bh.consume(map1.transform((k, v) => if(k % 2 == 0) v else v+1)) + */ +} diff --git a/test/benchmarks/src/main/scala/scala/concurrent/FutureBenchmark.scala b/test/benchmarks/src/main/scala/scala/concurrent/FutureBenchmark.scala index 6de5ef85fec9..5cf7d3eba9c0 100644 --- a/test/benchmarks/src/main/scala/scala/concurrent/FutureBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/concurrent/FutureBenchmark.scala @@ -1,7 +1,7 @@ package scala.concurrent import scala.concurrent.duration._ -import java.util.concurrent.{ TimeUnit, Executor, Executors, ExecutorService, ForkJoinPool, CountDownLatch } +import java.util.concurrent.{ TimeUnit, Executor, ThreadPoolExecutor, ExecutorService, ForkJoinPool, CountDownLatch, LinkedBlockingQueue } import org.openjdk.jmh.infra.Blackhole import org.openjdk.jmh.annotations._ import scala.util.{ Try, Success, Failure } @@ -12,10 +12,10 @@ import scala.annotation.tailrec @OutputTimeUnit(TimeUnit.MILLISECONDS) @Warmup(iterations = 1000) @Measurement(iterations = 10000) -@Fork(value = 1, jvmArgsAppend = Array("-Xmx1G", "-Xms1G", "-ea", "-server", "-XX:+UseCompressedOops", "-XX:+AlwaysPreTouch", "-XX:+UseCondCardMark")) +@Fork(value = 1, jvmArgsAppend = Array("-Xmx1G", "-Xms1G", "-server", "-XX:+AggressiveOpts", "-XX:+UseCompressedOops", "-XX:+AlwaysPreTouch", "-XX:+UseCondCardMark")) @Threads(value = 1) abstract class AbstractBaseFutureBenchmark { - // fjp = ForkJoinPool, fix = FixedThreadPool, fie = FutureInternalExecutor, gbl = GlobalEC + // fjp = ForkJoinPool, fix = FixedThreadPool, fie = parasiticEC, gbl = GlobalEC @Param(Array[String]("fjp", "fix", "fie", "gbl")) final var pool: String = _ @@ -33,30 +33,40 @@ abstract class AbstractBaseFutureBenchmark { @Setup(Level.Trial) def startup: Unit = { - val e = pool match { + executionContext = pool match { case "fjp" => - val fjp = new ForkJoinPool(threads) + val fjp = new ForkJoinPool(threads) with ExecutionContext with BatchingExecutor { + final override def submitForExecution(runnable: Runnable): Unit = super[ForkJoinPool].execute(runnable) + final override def execute(runnable: Runnable): Unit = + if ((!runnable.isInstanceOf[impl.Promise.Transformation[_,_]] || runnable.asInstanceOf[impl.Promise.Transformation[_,_]].benefitsFromBatching) && runnable.isInstanceOf[Batchable]) + submitAsyncBatched(runnable) + else + submitForExecution(runnable) + override final def reportFailure(t: Throwable) = t.printStackTrace(System.err) + } executorService = fjp // we want to close this fjp case "fix" => - val fix = Executors.newFixedThreadPool(threads) + val fix = new ThreadPoolExecutor(threads, threads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable]()) with ExecutionContext with BatchingExecutor { + final override def submitForExecution(runnable: Runnable): Unit = super[ThreadPoolExecutor].execute(runnable) + final override def execute(runnable: Runnable): Unit = + if ((!runnable.isInstanceOf[impl.Promise.Transformation[_,_]] || runnable.asInstanceOf[impl.Promise.Transformation[_,_]].benefitsFromBatching) && runnable.isInstanceOf[Batchable]) + submitAsyncBatched(runnable) + else + submitForExecution(runnable) + override final def reportFailure(t: Throwable) = t.printStackTrace(System.err) + } executorService = fix // we want to close this fix case "gbl" => + // make sure we set the global ec to use the number of threads the bench wants + System.setProperty("scala.concurrent.context.minThreads", threads.toString) + System.setProperty("scala.concurrent.context.numThreads", threads.toString) + System.setProperty("scala.concurrent.context.maxThreads", threads.toString) ExecutionContext.global case "fie" => - scala.concurrent.Future.InternalCallbackExecutor.asInstanceOf[Executor] + scala.concurrent.ExecutionContext.parasitic } - - executionContext = - if (e.isInstanceOf[ExecutionContext]) e.asInstanceOf[ExecutionContext] - else { // TODO: may want to extend this in the implementations directly - new ExecutionContext with BatchingExecutor { - private[this] final val g = e - override final def unbatchedExecute(r: Runnable) = g.execute(r) - override final def reportFailure(t: Throwable) = t.printStackTrace(System.err) - } - } } @TearDown(Level.Trial) @@ -80,13 +90,8 @@ abstract class OpFutureBenchmark extends AbstractBaseFutureBenchmark { final val pre_f_p: Promise[Result] = Promise.fromTry(aFailure) - @inline protected final def await[T](a: Future[T]): Boolean = { - var r: Option[Try[T]] = None - do { - r = a.value - } while(r eq None); - r.get.isInstanceOf[Success[T]] - } + @inline protected final def await[T](a: Future[T]): Boolean = + (a.value ne None) || (Await.ready(a, timeout) eq a) } class NoopFutureBenchmark extends OpFutureBenchmark { @@ -259,12 +264,12 @@ class AndThenFutureBenchmark extends OpFutureBenchmark { } class VariousFutureBenchmark extends OpFutureBenchmark { - final val mapFun: Result => Result = _.toUpperCase - final val flatMapFun: Result => Future[Result] = r => Future.successful(r) - final val filterFun: Result => Boolean = _ ne null - final val transformFun: Try[Result] => Try[Result] = _ => throw null - final val recoverFun: PartialFunction[Throwable, Result] = { case _ => "OK" } - final val keepLeft: (Result, Result) => Result = (a,b) => a + private[this] final val mapFun: Result => Result = _.toUpperCase + private[this] final val flatMapFun: Result => Future[Result] = r => Future.successful(r) + private[this] final val filterFun: Result => Boolean = _ ne null + private[this] final val transformFun: Try[Result] => Try[Result] = _ => throw null + private[this] final val recoverFun: PartialFunction[Throwable, Result] = { case _ => "OK" } + private[this] final val keepLeft: (Result, Result) => Result = (a,b) => a @tailrec private[this] final def next(i: Int, f: Future[Result])(implicit ec: ExecutionContext): Future[Result] = if (i > 0) { next(i - 1, f.map(mapFun).flatMap(flatMapFun).filter(filterFun).zipWith(f)(keepLeft).transform(transformFun).recover(recoverFun)) } else { f } @@ -281,8 +286,8 @@ class VariousFutureBenchmark extends OpFutureBenchmark { } class LoopFutureBenchmark extends OpFutureBenchmark { - val depth = 50 - val size = 2000 + private[this] val depth = 50 + private[this] val size = 2000 final def pre_loop(i: Int)(implicit ec: ExecutionContext): Future[Int] = if (i % depth == 0) Future.successful(i + 1).flatMap(pre_loop) @@ -390,4 +395,4 @@ class CallbackFutureBenchmark extends OpFutureBenchmark { post_p.complete(aSuccess) callback.await() } -} \ No newline at end of file +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala new file mode 100644 index 000000000000..690c078ec2f7 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala @@ -0,0 +1,29 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntEulerProblem15Benchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def eulerProblem15(bh: Blackhole): Unit = { + def f(row: Array[BigInt], c: Int): BigInt = + if (c == 0) row.last else f(row.scan(BigInt(0))(_ + _), c - 1) + def computeAnswer(n: Int): BigInt = f(Array.fill(n + 1)(BigInt(1)), n) + bh.consume(computeAnswer(size)) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala new file mode 100644 index 000000000000..0aaa18c029e1 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala @@ -0,0 +1,30 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.tailrec + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntFactorialBenchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def factorial(bh: Blackhole): Unit = { + @tailrec def fact(i: Int, n: Int, prev: BigInt): BigInt = + if (i > n) prev else fact(i + 1, n, prev * i) + bh.consume(fact(1, size, BigInt(1))) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala new file mode 100644 index 000000000000..4c93f324e0bd --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala @@ -0,0 +1,32 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntRSABenchmark { + + @Benchmark + def encodeDecode(bh: Blackhole): Unit = { + // private key + val d = BigInt("5617843187844953170308463622230283376298685") + // public key + val n = BigInt("9516311845790656153499716760847001433441357") + val e = 65537 + + // concatenation of "Scala is great" + val plaintext = BigInt("83099097108097032105115032103114101097116") + val ciphertext = plaintext.modPow(e, n) + val recoveredtext = ciphertext.modPow(d, n) + bh.consume(plaintext == recoveredtext) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala index 25bbff4a46ae..561bd28ae568 100644 --- a/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala @@ -25,7 +25,6 @@ class ClassTagBenchmark { var refClassTag: ClassTag[_] = null var otherValue: Object = null var arraySize: Int = 100 - private[this] var refClasses: Array[Class[_]] = _ @Setup def setup(): Unit = { unitClassTag = classTag[Unit] @@ -39,7 +38,6 @@ class ClassTagBenchmark { doubleClassTag = classTag[Double] refClassTag = classTag[ClassTagBenchmark] otherValue = new Object - refClasses = Array(classOf[java.lang.Boolean], classOf[java.lang.Character], classOf[java.lang.Short], classOf[java.lang.Integer], classOf[java.lang.Long], classOf[java.lang.Float], classOf[java.lang.Double]) } @Benchmark def primitivesNegOnRefClassTag(bh: Blackhole): Any = { @@ -88,17 +86,8 @@ class ClassTagBenchmark { @Benchmark def refClassTagUnapplyNeg2Direct(bh: Blackhole): Any = unapplyDirect(refClassTag, otherValue) - @Benchmark def lookupClassTag(bh: Blackhole): Any = { - var clss = refClasses - var i = 0 - while (i < clss.length) { - bh.consume(ClassTag.apply(clss(i))) - i += 1 - } - } - def unapplyDirect(ct: ClassTag[_], x: AnyRef): Option[_] = { if (null != x && (ct.runtimeClass.isInstance(x))) Some(x) else None } -} \ No newline at end of file +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/FindMemberBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/FindMemberBenchmark.scala new file mode 100644 index 000000000000..d8212b555af2 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/FindMemberBenchmark.scala @@ -0,0 +1,53 @@ +package scala.reflect.internal + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.tools.nsc.{Global, Settings} + +@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.SECONDS) +@State(Scope.Benchmark) +class FindMemberBenchmark { + type G <: Global with Singleton + var Type_List: G#Type = _ + var Type_ListRefined: G#Type = _ + var Name_Blerg: G#TermName = _ + var Name_toString: G#TermName = _ + var Name_isEmpty: G#TermName = _ + + @Setup(Level.Trial) def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + settings.stopAfter.value = List("typer") + val global = new Global(settings).asInstanceOf[G] + import global._ + new Run() + Type_List = typeOf[List[_]] + Type_ListRefined = typeOf[List[_] with String { def foo: Int }] // this sort of type turns up in LUBs (search "val lubRefined = ") + Name_Blerg = TermName("Blerg") + Name_toString = TermName("toString") + Name_isEmpty = TermName("isEmpty") + + } + + @Benchmark + def findMember(bh: Blackhole): Unit = { + bh.consume(Type_List.member(Name_Blerg)) + bh.consume(Type_List.member(Name_isEmpty)) + bh.consume(Type_List.member(Name_toString)) + } + + @Benchmark + def findMemberRefined(bh: Blackhole): Unit = { + bh.consume(Type_ListRefined.member(Name_Blerg)) + bh.consume(Type_ListRefined.member(Name_isEmpty)) + bh.consume(Type_ListRefined.member(Name_toString)) + } +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala new file mode 100644 index 000000000000..9370e3e3135f --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala @@ -0,0 +1,52 @@ +package scala.reflect.internal + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +import scala.reflect.internal.util.BatchSourceFile + +@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.SampleTime)) +@Fork(4) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class LubBenchmark { + import scala.tools.nsc._ + var g: Global = _ + var ts: List[Global#Type] = _ + + trait A1 ; trait A2 ; trait A3 extends A1 ; trait A4 extends A2 ; trait A5 ; trait A6 ; trait A7 ; trait A8 extends A7 + + trait Odd extends A1 with A3 with A5 with A7 + trait Even extends A2 with A3 with A6 with A8 + trait Low extends A1 with A2 with A3 with A4 + trait High extends A5 with A6 with A7 with A8 + trait All extends A1 with A2 with A3 with A4 with A5 with A6 with A7 with A8 + class B1 extends A1 with A2 + class B2 extends A7 with A8 + class B3 extends B2 with Low + class B4 extends B1 with High + + @Setup(Level.Trial) + def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + val global = new Global(settings) + g = global + val run = new global.Run() + import language.existentials + val tp = global.typeOf[((A1, A2, A3, A4), (Odd, Even, High, Low), (B1, B2, B3, B4) )] + ts = tp.typeArgs + } + + @Benchmark def measure(bh: Blackhole): Any = { + val global = g + import global._ + lub(ts.asInstanceOf[List[Type]]) + } +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala new file mode 100644 index 000000000000..1f5389ec0c9d --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala @@ -0,0 +1,46 @@ +package scala.reflect.internal + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit + +import scala.reflect.internal.util.BatchSourceFile + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class SymbolBenchmark { + import scala.tools.nsc._ + var g: Global = _ + var symbol: Global#Symbol = _ + + @Setup(Level.Trial) + def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + settings.stopAfter.value = List("typer") + val global = new Global(settings) + g = global + + val run = new global.Run() + val source = g.newSourceFile("package p1; class C { def foo: List[String] = Nil }") + run.compileSources(source :: Nil) + val foo = global.rootMirror.getClassIfDefined("p1.C").info.decl(global.newTermName("foo")) + symbol = foo + } + + @Benchmark def measure(bh: Blackhole): Unit = { + val r = g.currentRun + g.phase = r.erasurePhase + bh.consume(symbol.info) + g.phase = r.typerPhase + bh.consume(symbol.info) + + } +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala index 70d69178cb19..5ea602370a87 100644 --- a/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala @@ -8,16 +8,16 @@ import org.openjdk.jmh.infra.Blackhole class AlmostFinalValueBenchSettings extends scala.reflect.runtime.Settings { val flag = new BooleanSetting(false) - @inline final def isTrue2: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && flag + @inline final def isTrue2: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && flag.value } object AlmostFinalValueBenchSettings { implicit class SettingsOps(private val settings: AlmostFinalValueBenchSettings) extends AnyVal { - @inline final def isTrue3: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && settings.flag + @inline final def isTrue3: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && settings.flag.value } @inline def isTrue4(settings: AlmostFinalValueBenchSettings): Boolean = - AlmostFinalValueBenchmarkStatics.isTrue && settings.flag + AlmostFinalValueBenchmarkStatics.isTrue && settings.flag.value } @Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) @@ -33,10 +33,10 @@ class AlmostFinalValueBenchmark { private def pretendToWorkHard() = Blackhole.consumeCPU(3) @Benchmark def bench0_unit = () - @Benchmark def bench0_usingStaticFinalFalse = if (STATIC_FINAL_FALSE && flag) pretendToWorkHard() + @Benchmark def bench0_usingStaticFinalFalse = if (STATIC_FINAL_FALSE && flag.value) pretendToWorkHard() @Benchmark def bench0_workingHard = pretendToWorkHard() - @Benchmark def bench1_usingAlmostFinalFalse = if (AlmostFinalValueBenchmarkStatics.isTrue && flag) pretendToWorkHard() + @Benchmark def bench1_usingAlmostFinalFalse = if (AlmostFinalValueBenchmarkStatics.isTrue && flag.value) pretendToWorkHard() @Benchmark def bench2_usingInlineMethod = if (settings.isTrue2) pretendToWorkHard() @Benchmark def bench3_usingExtMethod = if (settings.isTrue3) pretendToWorkHard() @Benchmark def bench4_usingObjectMethod = if (AlmostFinalValueBenchSettings.isTrue4(settings)) pretendToWorkHard() diff --git a/test/benchmarks/src/main/scala/scala/tools/cmd/CommandLineParserBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/cmd/CommandLineParserBenchmark.scala new file mode 100644 index 000000000000..7a2909014002 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/cmd/CommandLineParserBenchmark.scala @@ -0,0 +1,35 @@ + +package scala.tools.cmd + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class CommandLineParserBenchmark { + + import scala.sys.process.Parser.tokenize + + @Param(Array("1000", "10000", "100000")) + var argCount: Int = _ + var line: String = _ + var quotyline: String = _ + var embedded: String = _ + + @Setup(Level.Trial) def init(): Unit = { + line = List.tabulate(argCount)(n => s"arg$n").mkString(" ") + val q = "\"" + quotyline = List.tabulate(argCount)(n => s"${q}arg${n}${q}").mkString(" ") + embedded = List.tabulate(argCount)(n => s"${n}${q}arg${q}${n}").mkString(" ") + } + @Benchmark def parsingBenchmark = tokenize(line) + @Benchmark def quoteUnquoteParsingBenchmark = tokenize(quotyline) + @Benchmark def embeddedQuoteParsingBenchmark = tokenize(embedded) +} diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/PhaseAssemblyBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/PhaseAssemblyBenchmark.scala index b70c495ddc8b..ff77e6bf392d 100644 --- a/test/benchmarks/src/main/scala/scala/tools/nsc/PhaseAssemblyBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/PhaseAssemblyBenchmark.scala @@ -18,26 +18,28 @@ class PhaseAssemblyBenchmark { class Data[G <: Global with Singleton](val global: G, val components: List[SubComponent { val global: G}]) var data: Data[_] = _ - @Param(Array("1", "4", "8", "16")) - var size: Int = 0 + case class component[G <: Global with Singleton]( + global: G, + phaseName: String, + override val runsRightAfter: Option[String], + override val runsAfter: List[String], + override val runsBefore: List[String], + ) extends SubComponent { + override val initial: Boolean = phaseName == "parser" + override val terminal: Boolean = phaseName == "terminal" + override def newPhase(prev: Phase): Phase = ??? + } + + @Param(Array("1", "4", "8", "16", "64")) + var size: Int = 64 @Setup def setup(): Unit = { val global = new Global(new Settings) - case class component[G <: Global with Singleton](val global: G, val phaseName: String, override val runsRightAfter: Option[String], override val runsAfter: List[String], override val runsBefore: List[String]) extends SubComponent { - override def newPhase(prev: Phase): Phase = ??? - - } - object component { - def apply(phaseName: String, runsRightAfter: Option[String], runsAfter: List[String], runsBefore: List[String]): component[global.type] = { - new component[global.type](global, phaseName, runsRightAfter, runsAfter, runsBefore) - } - } val N = size val components = List.tabulate(N){ i => - component(i.toString, None, if (i == 0) List("parser") else List.tabulate(2)(j => i - j - 1).filter(_ >= 0).map(_.toString), List("terminal")) - } ::: List(component("parser", None, Nil, Nil), component("terminal", None, Nil, List(N.toString))) - + component(global, i.toString, None, if (i == 0) List("parser") else List.tabulate(2)(j => i - j - 1).filter(_ >= 0).map(_.toString), List("terminal")) + } ::: List(component(global, "parser", None, Nil, Nil), component(global, "terminal", None, Nil, Nil)) data = new Data[global.type](global, components ) } @@ -45,10 +47,8 @@ class PhaseAssemblyBenchmark { @Benchmark def assemble(): Object = { val s = data.asInstanceOf[Data[Global with Singleton]] val g = s.global - val graph = g.phasesSetToDepGraph(s.components.reverse) - graph.removeDanglingNodes() - graph.collapseHardLinksAndLevels(graph.getNodeByPhase("parser"), 1) - graph + val graph = DependencyGraph(s.components.reverse) + graph.compilerPhaseList() } } @@ -58,4 +58,4 @@ object PhaseAssemblyBenchmark { bench.setup() bench.assemble() } -} \ No newline at end of file +} diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala index 761b1168576e..a55a3db9ca88 100644 --- a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala @@ -3,12 +3,12 @@ package backend.jvm import java.util.concurrent.TimeUnit -import scala.tools.asm.tree.ClassNode import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole -import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.jdk.CollectionConverters._ import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.analysis.ProdConsAnalyzer @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @@ -26,7 +26,6 @@ class ProdConsBenchmark { val settings = new Settings() settings.usejavacp.value = true val global = new Global(settings) - import global._ this.global = global.asInstanceOf[G] classNode = AsmUtils.readClass(global.classPath.findClassFile("scala.tools.nsc.typechecker.Implicits$ImplicitSearch").get.toByteArray) } @@ -34,7 +33,6 @@ class ProdConsBenchmark { @Benchmark def prodCons(bh: Blackhole): Unit = { val global: G = this.global - import global.genBCode.postProcessor.backendUtils._ for (m <- classNode.methods.iterator().asScala) { bh.consume(new ProdConsAnalyzer(m, classNode.name)) } diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala new file mode 100644 index 000000000000..fd1f2c681239 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala @@ -0,0 +1,1127 @@ +package scala.tools.nsc.transform.patmat + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations.CompilerControl.Mode.DONT_INLINE +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.switch +import scala.util.Random + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ClassMatchBenchmark { + private final val count = 10000 + @Param(Array("4", "8", "16", "32", "64", "128", "256")) private var numCases = 0 + + private var names: Array[Name] = null + private var classValue: ClassValue[Int] = null + + @Setup def setup(): Unit = { + val r = new Random(12345) + val names = Array[Name]( + Name0(), Name1(), Name2(), Name3(), Name4(), Name5(), Name6(), Name7(), Name8(), Name9(), + Name10(), Name11(), Name12(), Name13(), Name14(), Name15(), Name16(), Name17(), Name18(), Name19(), + Name20(), Name21(), Name22(), Name23(), Name24(), Name25(), Name26(), Name27(), Name28(), Name29(), + Name30(), Name31(), Name32(), Name33(), Name34(), Name35(), Name36(), Name37(), Name38(), Name39(), + Name40(), Name41(), Name42(), Name43(), Name44(), Name45(), Name46(), Name47(), Name48(), Name49(), + Name50(), Name51(), Name52(), Name53(), Name54(), Name55(), Name56(), Name57(), Name58(), Name59(), + Name60(), Name61(), Name62(), Name63(), Name64(), Name65(), Name66(), Name67(), Name68(), Name69(), + Name70(), Name71(), Name72(), Name73(), Name74(), Name75(), Name76(), Name77(), Name78(), Name79(), + Name80(), Name81(), Name82(), Name83(), Name84(), Name85(), Name86(), Name87(), Name88(), Name89(), + Name90(), Name91(), Name92(), Name93(), Name94(), Name95(), Name96(), Name97(), Name98(), Name99(), + Name100(), Name101(), Name102(), Name103(), Name104(), Name105(), Name106(), Name107(), Name108(), Name109(), + Name110(), Name111(), Name112(), Name113(), Name114(), Name115(), Name116(), Name117(), Name118(), Name119(), + Name120(), Name121(), Name122(), Name123(), Name124(), Name125(), Name126(), Name127(), Name128(), Name129(), + Name130(), Name131(), Name132(), Name133(), Name134(), Name135(), Name136(), Name137(), Name138(), Name139(), + Name140(), Name141(), Name142(), Name143(), Name144(), Name145(), Name146(), Name147(), Name148(), Name149(), + Name150(), Name151(), Name152(), Name153(), Name154(), Name155(), Name156(), Name157(), Name158(), Name159(), + Name160(), Name161(), Name162(), Name163(), Name164(), Name165(), Name166(), Name167(), Name168(), Name169(), + Name170(), Name171(), Name172(), Name173(), Name174(), Name175(), Name176(), Name177(), Name178(), Name179(), + Name180(), Name181(), Name182(), Name183(), Name184(), Name185(), Name186(), Name187(), Name188(), Name189(), + Name190(), Name191(), Name192(), Name193(), Name194(), Name195(), Name196(), Name197(), Name198(), Name199(), + Name200(), Name201(), Name202(), Name203(), Name204(), Name205(), Name206(), Name207(), Name208(), Name209(), + Name210(), Name211(), Name212(), Name213(), Name214(), Name215(), Name216(), Name217(), Name218(), Name219(), + Name220(), Name221(), Name222(), Name223(), Name224(), Name225(), Name226(), Name227(), Name228(), Name229(), + Name230(), Name231(), Name232(), Name233(), Name234(), Name235(), Name236(), Name237(), Name238(), Name239(), + Name240(), Name241(), Name242(), Name243(), Name244(), Name245(), Name246(), Name247(), Name248(), Name249(), + Name250(), Name251(), Name252(), Name253(), Name254(), Name255(), + ) + this.names = Array.fill(count)(names(r.nextInt(numCases))) + this.classValue = new NameClassValue + } + + @Benchmark @OperationsPerInvocation(count) def patmatShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = names(i) match { + case Name0() => "0" case Name1() => "1" case Name2() => "2" case Name3() => "3" case Name4() => "4" + case Name5() => "5" case Name6() => "6" case Name7() => "7" case Name8() => "8" case Name9() => "9" + case Name10() => "10" case Name11() => "11" case Name12() => "12" case Name13() => "13" case Name14() => "14" + case Name15() => "15" case Name16() => "16" case Name17() => "17" case Name18() => "18" case Name19() => "19" + case Name20() => "20" case Name21() => "21" case Name22() => "22" case Name23() => "23" case Name24() => "24" + case Name25() => "25" case Name26() => "26" case Name27() => "27" case Name28() => "28" case Name29() => "29" + case Name30() => "30" case Name31() => "31" case Name32() => "32" case Name33() => "33" case Name34() => "34" + case Name35() => "35" case Name36() => "36" case Name37() => "37" case Name38() => "38" case Name39() => "39" + case Name40() => "40" case Name41() => "41" case Name42() => "42" case Name43() => "43" case Name44() => "44" + case Name45() => "45" case Name46() => "46" case Name47() => "47" case Name48() => "48" case Name49() => "49" + case Name50() => "50" case Name51() => "51" case Name52() => "52" case Name53() => "53" case Name54() => "54" + case Name55() => "55" case Name56() => "56" case Name57() => "57" case Name58() => "58" case Name59() => "59" + case Name60() => "60" case Name61() => "61" case Name62() => "62" case Name63() => "63" case Name64() => "64" + case Name65() => "65" case Name66() => "66" case Name67() => "67" case Name68() => "68" case Name69() => "69" + case Name70() => "70" case Name71() => "71" case Name72() => "72" case Name73() => "73" case Name74() => "74" + case Name75() => "75" case Name76() => "76" case Name77() => "77" case Name78() => "78" case Name79() => "79" + case Name80() => "80" case Name81() => "81" case Name82() => "82" case Name83() => "83" case Name84() => "84" + case Name85() => "85" case Name86() => "86" case Name87() => "87" case Name88() => "88" case Name89() => "89" + case Name90() => "90" case Name91() => "91" case Name92() => "92" case Name93() => "93" case Name94() => "94" + case Name95() => "95" case Name96() => "96" case Name97() => "97" case Name98() => "98" case Name99() => "99" + case Name100() => "100" case Name101() => "101" case Name102() => "102" case Name103() => "103" case Name104() => "104" + case Name105() => "105" case Name106() => "106" case Name107() => "107" case Name108() => "108" case Name109() => "109" + case Name110() => "110" case Name111() => "111" case Name112() => "112" case Name113() => "113" case Name114() => "114" + case Name115() => "115" case Name116() => "116" case Name117() => "117" case Name118() => "118" case Name119() => "119" + case Name120() => "120" case Name121() => "121" case Name122() => "122" case Name123() => "123" case Name124() => "124" + case Name125() => "125" case Name126() => "126" case Name127() => "127" case Name128() => "128" case Name129() => "129" + case Name130() => "130" case Name131() => "131" case Name132() => "132" case Name133() => "133" case Name134() => "134" + case Name135() => "135" case Name136() => "136" case Name137() => "137" case Name138() => "138" case Name139() => "139" + case Name140() => "140" case Name141() => "141" case Name142() => "142" case Name143() => "143" case Name144() => "144" + case Name145() => "145" case Name146() => "146" case Name147() => "147" case Name148() => "148" case Name149() => "149" + case Name150() => "150" case Name151() => "151" case Name152() => "152" case Name153() => "153" case Name154() => "154" + case Name155() => "155" case Name156() => "156" case Name157() => "157" case Name158() => "158" case Name159() => "159" + case Name160() => "160" case Name161() => "161" case Name162() => "162" case Name163() => "163" case Name164() => "164" + case Name165() => "165" case Name166() => "166" case Name167() => "167" case Name168() => "168" case Name169() => "169" + case Name170() => "170" case Name171() => "171" case Name172() => "172" case Name173() => "173" case Name174() => "174" + case Name175() => "175" case Name176() => "176" case Name177() => "177" case Name178() => "178" case Name179() => "179" + case Name180() => "180" case Name181() => "181" case Name182() => "182" case Name183() => "183" case Name184() => "184" + case Name185() => "185" case Name186() => "186" case Name187() => "187" case Name188() => "188" case Name189() => "189" + case Name190() => "190" case Name191() => "191" case Name192() => "192" case Name193() => "193" case Name194() => "194" + case Name195() => "195" case Name196() => "196" case Name197() => "197" case Name198() => "198" case Name199() => "199" + case Name200() => "200" case Name201() => "201" case Name202() => "202" case Name203() => "203" case Name204() => "204" + case Name205() => "205" case Name206() => "206" case Name207() => "207" case Name208() => "208" case Name209() => "209" + case Name210() => "210" case Name211() => "211" case Name212() => "212" case Name213() => "213" case Name214() => "214" + case Name215() => "215" case Name216() => "216" case Name217() => "217" case Name218() => "218" case Name219() => "219" + case Name220() => "220" case Name221() => "221" case Name222() => "222" case Name223() => "223" case Name224() => "224" + case Name225() => "225" case Name226() => "226" case Name227() => "227" case Name228() => "228" case Name229() => "229" + case Name230() => "230" case Name231() => "231" case Name232() => "232" case Name233() => "233" case Name234() => "234" + case Name235() => "235" case Name236() => "236" case Name237() => "237" case Name238() => "238" case Name239() => "239" + case Name240() => "240" case Name241() => "241" case Name242() => "242" case Name243() => "243" case Name244() => "244" + case Name245() => "245" case Name246() => "246" case Name247() => "247" case Name248() => "248" case Name249() => "249" + case Name250() => "250" case Name251() => "251" case Name252() => "252" case Name253() => "253" case Name254() => "254" + case Name255() => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def virtualShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + bh.consume(names(i).virtualShow) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def intSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = (names(i)._id: @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def justClassValueLookup(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + bh.consume(classValue.get(names(i).getClass)) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classValueShow(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + val x = (classValue.get(names(i).getClass): @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classNameHashSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val name = names(i) + val cls = name.getClass + val x = ((cls.getName.##): @switch) match { + case -1200720095 => "0" + case -1200720094 => "1" + case -1200720093 => "2" + case -1200720092 => "3" + case -1200720091 => "4" + case -1200720090 => "5" + case -1200720089 => "6" + case -1200720088 => "7" + case -1200720087 => "8" + case -1200720086 => "9" + case 1432382798 => "10" + case 1432382799 => "11" + case 1432382800 => "12" + case 1432382801 => "13" + case 1432382802 => "14" + case 1432382803 => "15" + case 1432382804 => "16" + case 1432382805 => "17" + case 1432382806 => "18" + case 1432382807 => "19" + case 1432382829 => "20" + case 1432382830 => "21" + case 1432382831 => "22" + case 1432382832 => "23" + case 1432382833 => "24" + case 1432382834 => "25" + case 1432382835 => "26" + case 1432382836 => "27" + case 1432382837 => "28" + case 1432382838 => "29" + case 1432382860 => "30" + case 1432382861 => "31" + case 1432382862 => "32" + case 1432382863 => "33" + case 1432382864 => "34" + case 1432382865 => "35" + case 1432382866 => "36" + case 1432382867 => "37" + case 1432382868 => "38" + case 1432382869 => "39" + case 1432382891 => "40" + case 1432382892 => "41" + case 1432382893 => "42" + case 1432382894 => "43" + case 1432382895 => "44" + case 1432382896 => "45" + case 1432382897 => "46" + case 1432382898 => "47" + case 1432382899 => "48" + case 1432382900 => "49" + case 1432382922 => "50" + case 1432382923 => "51" + case 1432382924 => "52" + case 1432382925 => "53" + case 1432382926 => "54" + case 1432382927 => "55" + case 1432382928 => "56" + case 1432382929 => "57" + case 1432382930 => "58" + case 1432382931 => "59" + case 1432382953 => "60" + case 1432382954 => "61" + case 1432382955 => "62" + case 1432382956 => "63" + case 1432382957 => "64" + case 1432382958 => "65" + case 1432382959 => "66" + case 1432382960 => "67" + case 1432382961 => "68" + case 1432382962 => "69" + case 1432382984 => "70" + case 1432382985 => "71" + case 1432382986 => "72" + case 1432382987 => "73" + case 1432382988 => "74" + case 1432382989 => "75" + case 1432382990 => "76" + case 1432382991 => "77" + case 1432382992 => "78" + case 1432382993 => "79" + case 1432383015 => "80" + case 1432383016 => "81" + case 1432383017 => "82" + case 1432383018 => "83" + case 1432383019 => "84" + case 1432383020 => "85" + case 1432383021 => "86" + case 1432383022 => "87" + case 1432383023 => "88" + case 1432383024 => "89" + case 1432383046 => "90" + case 1432383047 => "91" + case 1432383048 => "92" + case 1432383049 => "93" + case 1432383050 => "94" + case 1432383051 => "95" + case 1432383052 => "96" + case 1432383053 => "97" + case 1432383054 => "98" + case 1432383055 => "99" + case 1454193826 => "100" + case 1454193827 => "101" + case 1454193828 => "102" + case 1454193829 => "103" + case 1454193830 => "104" + case 1454193831 => "105" + case 1454193832 => "106" + case 1454193833 => "107" + case 1454193834 => "108" + case 1454193835 => "109" + case 1454193857 => "110" + case 1454193858 => "111" + case 1454193859 => "112" + case 1454193860 => "113" + case 1454193861 => "114" + case 1454193862 => "115" + case 1454193863 => "116" + case 1454193864 => "117" + case 1454193865 => "118" + case 1454193866 => "119" + case 1454193888 => "120" + case 1454193889 => "121" + case 1454193890 => "122" + case 1454193891 => "123" + case 1454193892 => "124" + case 1454193893 => "125" + case 1454193894 => "126" + case 1454193895 => "127" + case 1454193896 => "128" + case 1454193897 => "129" + case 1454193919 => "130" + case 1454193920 => "131" + case 1454193921 => "132" + case 1454193922 => "133" + case 1454193923 => "134" + case 1454193924 => "135" + case 1454193925 => "136" + case 1454193926 => "137" + case 1454193927 => "138" + case 1454193928 => "139" + case 1454193950 => "140" + case 1454193951 => "141" + case 1454193952 => "142" + case 1454193953 => "143" + case 1454193954 => "144" + case 1454193955 => "145" + case 1454193956 => "146" + case 1454193957 => "147" + case 1454193958 => "148" + case 1454193959 => "149" + case 1454193981 => "150" + case 1454193982 => "151" + case 1454193983 => "152" + case 1454193984 => "153" + case 1454193985 => "154" + case 1454193986 => "155" + case 1454193987 => "156" + case 1454193988 => "157" + case 1454193989 => "158" + case 1454193990 => "159" + case 1454194012 => "160" + case 1454194013 => "161" + case 1454194014 => "162" + case 1454194015 => "163" + case 1454194016 => "164" + case 1454194017 => "165" + case 1454194018 => "166" + case 1454194019 => "167" + case 1454194020 => "168" + case 1454194021 => "169" + case 1454194043 => "170" + case 1454194044 => "171" + case 1454194045 => "172" + case 1454194046 => "173" + case 1454194047 => "174" + case 1454194048 => "175" + case 1454194049 => "176" + case 1454194050 => "177" + case 1454194051 => "178" + case 1454194052 => "179" + case 1454194074 => "180" + case 1454194075 => "181" + case 1454194076 => "182" + case 1454194077 => "183" + case 1454194078 => "184" + case 1454194079 => "185" + case 1454194080 => "186" + case 1454194081 => "187" + case 1454194082 => "188" + case 1454194083 => "189" + case 1454194105 => "190" + case 1454194106 => "191" + case 1454194107 => "192" + case 1454194108 => "193" + case 1454194109 => "194" + case 1454194110 => "195" + case 1454194111 => "196" + case 1454194112 => "197" + case 1454194113 => "198" + case 1454194114 => "199" + case 1454194787 => "200" + case 1454194788 => "201" + case 1454194789 => "202" + case 1454194790 => "203" + case 1454194791 => "204" + case 1454194792 => "205" + case 1454194793 => "206" + case 1454194794 => "207" + case 1454194795 => "208" + case 1454194796 => "209" + case 1454194818 => "210" + case 1454194819 => "211" + case 1454194820 => "212" + case 1454194821 => "213" + case 1454194822 => "214" + case 1454194823 => "215" + case 1454194824 => "216" + case 1454194825 => "217" + case 1454194826 => "218" + case 1454194827 => "219" + case 1454194849 => "220" + case 1454194850 => "221" + case 1454194851 => "222" + case 1454194852 => "223" + case 1454194853 => "224" + case 1454194854 => "225" + case 1454194855 => "226" + case 1454194856 => "227" + case 1454194857 => "228" + case 1454194858 => "229" + case 1454194880 => "230" + case 1454194881 => "231" + case 1454194882 => "232" + case 1454194883 => "233" + case 1454194884 => "234" + case 1454194885 => "235" + case 1454194886 => "236" + case 1454194887 => "237" + case 1454194888 => "238" + case 1454194889 => "239" + case 1454194911 => "240" + case 1454194912 => "241" + case 1454194913 => "242" + case 1454194914 => "243" + case 1454194915 => "244" + case 1454194916 => "245" + case 1454194917 => "246" + case 1454194918 => "247" + case 1454194919 => "248" + case 1454194920 => "249" + case 1454194942 => "250" + case 1454194943 => "251" + case 1454194944 => "252" + case 1454194945 => "253" + case 1454194946 => "254" + case 1454194947 => "255" + case hashCode => throw new MatchError(s"No case for: $name -> $cls -> $hashCode") + } + bh.consume(x) + i += 1 + } + } + +/* + This benchmark compares pattern matching to alternatives, specifically: + 1. using virtual methods instead (like our Tree#transform/traverse) + 2. doing a tableswitch on int field (like our Promise.Transformation) + 3. using a ClassValue as a more efficient way to store the int (like exotic's TypeSwitch) + 4. using the instance's class's name's hash, which are all memoised, in a jumptable + + The results appear to indicate that: + + 1. < 16 cases, patmat beats virtual method calls + 2. = 16 cases, patmat vs virtual overlap in error margins + 3. > 16 cases, patmat loses to virtual method calls + 4. int switching seems to only out perform virtual at 32+ cases + 5. class name hash switching beats class value, up to 32 cases (and matches performance at 64) +*/ +} + +final class NameClassValue extends ClassValue[Int] { + def computeValue(runtimeClass: Class[_]) = runtimeClass match { + case ClsName0 => 0 case ClsName1 => 1 case ClsName2 => 2 case ClsName3 => 3 case ClsName4 => 4 + case ClsName5 => 5 case ClsName6 => 6 case ClsName7 => 7 case ClsName8 => 8 case ClsName9 => 9 + case ClsName10 => 10 case ClsName11 => 11 case ClsName12 => 12 case ClsName13 => 13 case ClsName14 => 14 + case ClsName15 => 15 case ClsName16 => 16 case ClsName17 => 17 case ClsName18 => 18 case ClsName19 => 19 + case ClsName20 => 20 case ClsName21 => 21 case ClsName22 => 22 case ClsName23 => 23 case ClsName24 => 24 + case ClsName25 => 25 case ClsName26 => 26 case ClsName27 => 27 case ClsName28 => 28 case ClsName29 => 29 + case ClsName30 => 30 case ClsName31 => 31 case ClsName32 => 32 case ClsName33 => 33 case ClsName34 => 34 + case ClsName35 => 35 case ClsName36 => 36 case ClsName37 => 37 case ClsName38 => 38 case ClsName39 => 39 + case ClsName40 => 40 case ClsName41 => 41 case ClsName42 => 42 case ClsName43 => 43 case ClsName44 => 44 + case ClsName45 => 45 case ClsName46 => 46 case ClsName47 => 47 case ClsName48 => 48 case ClsName49 => 49 + case ClsName50 => 50 case ClsName51 => 51 case ClsName52 => 52 case ClsName53 => 53 case ClsName54 => 54 + case ClsName55 => 55 case ClsName56 => 56 case ClsName57 => 57 case ClsName58 => 58 case ClsName59 => 59 + case ClsName60 => 60 case ClsName61 => 61 case ClsName62 => 62 case ClsName63 => 63 case ClsName64 => 64 + case ClsName65 => 65 case ClsName66 => 66 case ClsName67 => 67 case ClsName68 => 68 case ClsName69 => 69 + case ClsName70 => 70 case ClsName71 => 71 case ClsName72 => 72 case ClsName73 => 73 case ClsName74 => 74 + case ClsName75 => 75 case ClsName76 => 76 case ClsName77 => 77 case ClsName78 => 78 case ClsName79 => 79 + case ClsName80 => 80 case ClsName81 => 81 case ClsName82 => 82 case ClsName83 => 83 case ClsName84 => 84 + case ClsName85 => 85 case ClsName86 => 86 case ClsName87 => 87 case ClsName88 => 88 case ClsName89 => 89 + case ClsName90 => 90 case ClsName91 => 91 case ClsName92 => 92 case ClsName93 => 93 case ClsName94 => 94 + case ClsName95 => 95 case ClsName96 => 96 case ClsName97 => 97 case ClsName98 => 98 case ClsName99 => 99 + case ClsName100 => 100 case ClsName101 => 101 case ClsName102 => 102 case ClsName103 => 103 case ClsName104 => 104 + case ClsName105 => 105 case ClsName106 => 106 case ClsName107 => 107 case ClsName108 => 108 case ClsName109 => 109 + case ClsName110 => 110 case ClsName111 => 111 case ClsName112 => 112 case ClsName113 => 113 case ClsName114 => 114 + case ClsName115 => 115 case ClsName116 => 116 case ClsName117 => 117 case ClsName118 => 118 case ClsName119 => 119 + case ClsName120 => 120 case ClsName121 => 121 case ClsName122 => 122 case ClsName123 => 123 case ClsName124 => 124 + case ClsName125 => 125 case ClsName126 => 126 case ClsName127 => 127 case ClsName128 => 128 case ClsName129 => 129 + case ClsName130 => 130 case ClsName131 => 131 case ClsName132 => 132 case ClsName133 => 133 case ClsName134 => 134 + case ClsName135 => 135 case ClsName136 => 136 case ClsName137 => 137 case ClsName138 => 138 case ClsName139 => 139 + case ClsName140 => 140 case ClsName141 => 141 case ClsName142 => 142 case ClsName143 => 143 case ClsName144 => 144 + case ClsName145 => 145 case ClsName146 => 146 case ClsName147 => 147 case ClsName148 => 148 case ClsName149 => 149 + case ClsName150 => 150 case ClsName151 => 151 case ClsName152 => 152 case ClsName153 => 153 case ClsName154 => 154 + case ClsName155 => 155 case ClsName156 => 156 case ClsName157 => 157 case ClsName158 => 158 case ClsName159 => 159 + case ClsName160 => 160 case ClsName161 => 161 case ClsName162 => 162 case ClsName163 => 163 case ClsName164 => 164 + case ClsName165 => 165 case ClsName166 => 166 case ClsName167 => 167 case ClsName168 => 168 case ClsName169 => 169 + case ClsName170 => 170 case ClsName171 => 171 case ClsName172 => 172 case ClsName173 => 173 case ClsName174 => 174 + case ClsName175 => 175 case ClsName176 => 176 case ClsName177 => 177 case ClsName178 => 178 case ClsName179 => 179 + case ClsName180 => 180 case ClsName181 => 181 case ClsName182 => 182 case ClsName183 => 183 case ClsName184 => 184 + case ClsName185 => 185 case ClsName186 => 186 case ClsName187 => 187 case ClsName188 => 188 case ClsName189 => 189 + case ClsName190 => 190 case ClsName191 => 191 case ClsName192 => 192 case ClsName193 => 193 case ClsName194 => 194 + case ClsName195 => 195 case ClsName196 => 196 case ClsName197 => 197 case ClsName198 => 198 case ClsName199 => 199 + case ClsName200 => 200 case ClsName201 => 201 case ClsName202 => 202 case ClsName203 => 203 case ClsName204 => 204 + case ClsName205 => 205 case ClsName206 => 206 case ClsName207 => 207 case ClsName208 => 208 case ClsName209 => 209 + case ClsName210 => 210 case ClsName211 => 211 case ClsName212 => 212 case ClsName213 => 213 case ClsName214 => 214 + case ClsName215 => 215 case ClsName216 => 216 case ClsName217 => 217 case ClsName218 => 218 case ClsName219 => 219 + case ClsName220 => 220 case ClsName221 => 221 case ClsName222 => 222 case ClsName223 => 223 case ClsName224 => 224 + case ClsName225 => 225 case ClsName226 => 226 case ClsName227 => 227 case ClsName228 => 228 case ClsName229 => 229 + case ClsName230 => 230 case ClsName231 => 231 case ClsName232 => 232 case ClsName233 => 233 case ClsName234 => 234 + case ClsName235 => 235 case ClsName236 => 236 case ClsName237 => 237 case ClsName238 => 238 case ClsName239 => 239 + case ClsName240 => 240 case ClsName241 => 241 case ClsName242 => 242 case ClsName243 => 243 case ClsName244 => 244 + case ClsName245 => 245 case ClsName246 => 246 case ClsName247 => 247 case ClsName248 => 248 case ClsName249 => 249 + case ClsName250 => 250 case ClsName251 => 251 case ClsName252 => 252 case ClsName253 => 253 case ClsName254 => 254 + case ClsName255 => 255 + } + + private val ClsName0 = classOf[Name0] + private val ClsName1 = classOf[Name1] + private val ClsName2 = classOf[Name2] + private val ClsName3 = classOf[Name3] + private val ClsName4 = classOf[Name4] + private val ClsName5 = classOf[Name5] + private val ClsName6 = classOf[Name6] + private val ClsName7 = classOf[Name7] + private val ClsName8 = classOf[Name8] + private val ClsName9 = classOf[Name9] + private val ClsName10 = classOf[Name10] + private val ClsName11 = classOf[Name11] + private val ClsName12 = classOf[Name12] + private val ClsName13 = classOf[Name13] + private val ClsName14 = classOf[Name14] + private val ClsName15 = classOf[Name15] + private val ClsName16 = classOf[Name16] + private val ClsName17 = classOf[Name17] + private val ClsName18 = classOf[Name18] + private val ClsName19 = classOf[Name19] + private val ClsName20 = classOf[Name20] + private val ClsName21 = classOf[Name21] + private val ClsName22 = classOf[Name22] + private val ClsName23 = classOf[Name23] + private val ClsName24 = classOf[Name24] + private val ClsName25 = classOf[Name25] + private val ClsName26 = classOf[Name26] + private val ClsName27 = classOf[Name27] + private val ClsName28 = classOf[Name28] + private val ClsName29 = classOf[Name29] + private val ClsName30 = classOf[Name30] + private val ClsName31 = classOf[Name31] + private val ClsName32 = classOf[Name32] + private val ClsName33 = classOf[Name33] + private val ClsName34 = classOf[Name34] + private val ClsName35 = classOf[Name35] + private val ClsName36 = classOf[Name36] + private val ClsName37 = classOf[Name37] + private val ClsName38 = classOf[Name38] + private val ClsName39 = classOf[Name39] + private val ClsName40 = classOf[Name40] + private val ClsName41 = classOf[Name41] + private val ClsName42 = classOf[Name42] + private val ClsName43 = classOf[Name43] + private val ClsName44 = classOf[Name44] + private val ClsName45 = classOf[Name45] + private val ClsName46 = classOf[Name46] + private val ClsName47 = classOf[Name47] + private val ClsName48 = classOf[Name48] + private val ClsName49 = classOf[Name49] + private val ClsName50 = classOf[Name50] + private val ClsName51 = classOf[Name51] + private val ClsName52 = classOf[Name52] + private val ClsName53 = classOf[Name53] + private val ClsName54 = classOf[Name54] + private val ClsName55 = classOf[Name55] + private val ClsName56 = classOf[Name56] + private val ClsName57 = classOf[Name57] + private val ClsName58 = classOf[Name58] + private val ClsName59 = classOf[Name59] + private val ClsName60 = classOf[Name60] + private val ClsName61 = classOf[Name61] + private val ClsName62 = classOf[Name62] + private val ClsName63 = classOf[Name63] + private val ClsName64 = classOf[Name64] + private val ClsName65 = classOf[Name65] + private val ClsName66 = classOf[Name66] + private val ClsName67 = classOf[Name67] + private val ClsName68 = classOf[Name68] + private val ClsName69 = classOf[Name69] + private val ClsName70 = classOf[Name70] + private val ClsName71 = classOf[Name71] + private val ClsName72 = classOf[Name72] + private val ClsName73 = classOf[Name73] + private val ClsName74 = classOf[Name74] + private val ClsName75 = classOf[Name75] + private val ClsName76 = classOf[Name76] + private val ClsName77 = classOf[Name77] + private val ClsName78 = classOf[Name78] + private val ClsName79 = classOf[Name79] + private val ClsName80 = classOf[Name80] + private val ClsName81 = classOf[Name81] + private val ClsName82 = classOf[Name82] + private val ClsName83 = classOf[Name83] + private val ClsName84 = classOf[Name84] + private val ClsName85 = classOf[Name85] + private val ClsName86 = classOf[Name86] + private val ClsName87 = classOf[Name87] + private val ClsName88 = classOf[Name88] + private val ClsName89 = classOf[Name89] + private val ClsName90 = classOf[Name90] + private val ClsName91 = classOf[Name91] + private val ClsName92 = classOf[Name92] + private val ClsName93 = classOf[Name93] + private val ClsName94 = classOf[Name94] + private val ClsName95 = classOf[Name95] + private val ClsName96 = classOf[Name96] + private val ClsName97 = classOf[Name97] + private val ClsName98 = classOf[Name98] + private val ClsName99 = classOf[Name99] + private val ClsName100 = classOf[Name100] + private val ClsName101 = classOf[Name101] + private val ClsName102 = classOf[Name102] + private val ClsName103 = classOf[Name103] + private val ClsName104 = classOf[Name104] + private val ClsName105 = classOf[Name105] + private val ClsName106 = classOf[Name106] + private val ClsName107 = classOf[Name107] + private val ClsName108 = classOf[Name108] + private val ClsName109 = classOf[Name109] + private val ClsName110 = classOf[Name110] + private val ClsName111 = classOf[Name111] + private val ClsName112 = classOf[Name112] + private val ClsName113 = classOf[Name113] + private val ClsName114 = classOf[Name114] + private val ClsName115 = classOf[Name115] + private val ClsName116 = classOf[Name116] + private val ClsName117 = classOf[Name117] + private val ClsName118 = classOf[Name118] + private val ClsName119 = classOf[Name119] + private val ClsName120 = classOf[Name120] + private val ClsName121 = classOf[Name121] + private val ClsName122 = classOf[Name122] + private val ClsName123 = classOf[Name123] + private val ClsName124 = classOf[Name124] + private val ClsName125 = classOf[Name125] + private val ClsName126 = classOf[Name126] + private val ClsName127 = classOf[Name127] + private val ClsName128 = classOf[Name128] + private val ClsName129 = classOf[Name129] + private val ClsName130 = classOf[Name130] + private val ClsName131 = classOf[Name131] + private val ClsName132 = classOf[Name132] + private val ClsName133 = classOf[Name133] + private val ClsName134 = classOf[Name134] + private val ClsName135 = classOf[Name135] + private val ClsName136 = classOf[Name136] + private val ClsName137 = classOf[Name137] + private val ClsName138 = classOf[Name138] + private val ClsName139 = classOf[Name139] + private val ClsName140 = classOf[Name140] + private val ClsName141 = classOf[Name141] + private val ClsName142 = classOf[Name142] + private val ClsName143 = classOf[Name143] + private val ClsName144 = classOf[Name144] + private val ClsName145 = classOf[Name145] + private val ClsName146 = classOf[Name146] + private val ClsName147 = classOf[Name147] + private val ClsName148 = classOf[Name148] + private val ClsName149 = classOf[Name149] + private val ClsName150 = classOf[Name150] + private val ClsName151 = classOf[Name151] + private val ClsName152 = classOf[Name152] + private val ClsName153 = classOf[Name153] + private val ClsName154 = classOf[Name154] + private val ClsName155 = classOf[Name155] + private val ClsName156 = classOf[Name156] + private val ClsName157 = classOf[Name157] + private val ClsName158 = classOf[Name158] + private val ClsName159 = classOf[Name159] + private val ClsName160 = classOf[Name160] + private val ClsName161 = classOf[Name161] + private val ClsName162 = classOf[Name162] + private val ClsName163 = classOf[Name163] + private val ClsName164 = classOf[Name164] + private val ClsName165 = classOf[Name165] + private val ClsName166 = classOf[Name166] + private val ClsName167 = classOf[Name167] + private val ClsName168 = classOf[Name168] + private val ClsName169 = classOf[Name169] + private val ClsName170 = classOf[Name170] + private val ClsName171 = classOf[Name171] + private val ClsName172 = classOf[Name172] + private val ClsName173 = classOf[Name173] + private val ClsName174 = classOf[Name174] + private val ClsName175 = classOf[Name175] + private val ClsName176 = classOf[Name176] + private val ClsName177 = classOf[Name177] + private val ClsName178 = classOf[Name178] + private val ClsName179 = classOf[Name179] + private val ClsName180 = classOf[Name180] + private val ClsName181 = classOf[Name181] + private val ClsName182 = classOf[Name182] + private val ClsName183 = classOf[Name183] + private val ClsName184 = classOf[Name184] + private val ClsName185 = classOf[Name185] + private val ClsName186 = classOf[Name186] + private val ClsName187 = classOf[Name187] + private val ClsName188 = classOf[Name188] + private val ClsName189 = classOf[Name189] + private val ClsName190 = classOf[Name190] + private val ClsName191 = classOf[Name191] + private val ClsName192 = classOf[Name192] + private val ClsName193 = classOf[Name193] + private val ClsName194 = classOf[Name194] + private val ClsName195 = classOf[Name195] + private val ClsName196 = classOf[Name196] + private val ClsName197 = classOf[Name197] + private val ClsName198 = classOf[Name198] + private val ClsName199 = classOf[Name199] + private val ClsName200 = classOf[Name200] + private val ClsName201 = classOf[Name201] + private val ClsName202 = classOf[Name202] + private val ClsName203 = classOf[Name203] + private val ClsName204 = classOf[Name204] + private val ClsName205 = classOf[Name205] + private val ClsName206 = classOf[Name206] + private val ClsName207 = classOf[Name207] + private val ClsName208 = classOf[Name208] + private val ClsName209 = classOf[Name209] + private val ClsName210 = classOf[Name210] + private val ClsName211 = classOf[Name211] + private val ClsName212 = classOf[Name212] + private val ClsName213 = classOf[Name213] + private val ClsName214 = classOf[Name214] + private val ClsName215 = classOf[Name215] + private val ClsName216 = classOf[Name216] + private val ClsName217 = classOf[Name217] + private val ClsName218 = classOf[Name218] + private val ClsName219 = classOf[Name219] + private val ClsName220 = classOf[Name220] + private val ClsName221 = classOf[Name221] + private val ClsName222 = classOf[Name222] + private val ClsName223 = classOf[Name223] + private val ClsName224 = classOf[Name224] + private val ClsName225 = classOf[Name225] + private val ClsName226 = classOf[Name226] + private val ClsName227 = classOf[Name227] + private val ClsName228 = classOf[Name228] + private val ClsName229 = classOf[Name229] + private val ClsName230 = classOf[Name230] + private val ClsName231 = classOf[Name231] + private val ClsName232 = classOf[Name232] + private val ClsName233 = classOf[Name233] + private val ClsName234 = classOf[Name234] + private val ClsName235 = classOf[Name235] + private val ClsName236 = classOf[Name236] + private val ClsName237 = classOf[Name237] + private val ClsName238 = classOf[Name238] + private val ClsName239 = classOf[Name239] + private val ClsName240 = classOf[Name240] + private val ClsName241 = classOf[Name241] + private val ClsName242 = classOf[Name242] + private val ClsName243 = classOf[Name243] + private val ClsName244 = classOf[Name244] + private val ClsName245 = classOf[Name245] + private val ClsName246 = classOf[Name246] + private val ClsName247 = classOf[Name247] + private val ClsName248 = classOf[Name248] + private val ClsName249 = classOf[Name249] + private val ClsName250 = classOf[Name250] + private val ClsName251 = classOf[Name251] + private val ClsName252 = classOf[Name252] + private val ClsName253 = classOf[Name253] + private val ClsName254 = classOf[Name254] + private val ClsName255 = classOf[Name255] +} + +sealed abstract class Name(val _id: Int) { + def virtualShow: String +} + +final case class Name0() extends Name(0) { def virtualShow = "0" } +final case class Name1() extends Name(1) { def virtualShow = "1" } +final case class Name2() extends Name(2) { def virtualShow = "2" } +final case class Name3() extends Name(3) { def virtualShow = "3" } +final case class Name4() extends Name(4) { def virtualShow = "4" } +final case class Name5() extends Name(5) { def virtualShow = "5" } +final case class Name6() extends Name(6) { def virtualShow = "6" } +final case class Name7() extends Name(7) { def virtualShow = "7" } +final case class Name8() extends Name(8) { def virtualShow = "8" } +final case class Name9() extends Name(9) { def virtualShow = "9" } +final case class Name10() extends Name(10) { def virtualShow = "10" } +final case class Name11() extends Name(11) { def virtualShow = "11" } +final case class Name12() extends Name(12) { def virtualShow = "12" } +final case class Name13() extends Name(13) { def virtualShow = "13" } +final case class Name14() extends Name(14) { def virtualShow = "14" } +final case class Name15() extends Name(15) { def virtualShow = "15" } +final case class Name16() extends Name(16) { def virtualShow = "16" } +final case class Name17() extends Name(17) { def virtualShow = "17" } +final case class Name18() extends Name(18) { def virtualShow = "18" } +final case class Name19() extends Name(19) { def virtualShow = "19" } +final case class Name20() extends Name(20) { def virtualShow = "20" } +final case class Name21() extends Name(21) { def virtualShow = "21" } +final case class Name22() extends Name(22) { def virtualShow = "22" } +final case class Name23() extends Name(23) { def virtualShow = "23" } +final case class Name24() extends Name(24) { def virtualShow = "24" } +final case class Name25() extends Name(25) { def virtualShow = "25" } +final case class Name26() extends Name(26) { def virtualShow = "26" } +final case class Name27() extends Name(27) { def virtualShow = "27" } +final case class Name28() extends Name(28) { def virtualShow = "28" } +final case class Name29() extends Name(29) { def virtualShow = "29" } +final case class Name30() extends Name(30) { def virtualShow = "30" } +final case class Name31() extends Name(31) { def virtualShow = "31" } +final case class Name32() extends Name(32) { def virtualShow = "32" } +final case class Name33() extends Name(33) { def virtualShow = "33" } +final case class Name34() extends Name(34) { def virtualShow = "34" } +final case class Name35() extends Name(35) { def virtualShow = "35" } +final case class Name36() extends Name(36) { def virtualShow = "36" } +final case class Name37() extends Name(37) { def virtualShow = "37" } +final case class Name38() extends Name(38) { def virtualShow = "38" } +final case class Name39() extends Name(39) { def virtualShow = "39" } +final case class Name40() extends Name(40) { def virtualShow = "40" } +final case class Name41() extends Name(41) { def virtualShow = "41" } +final case class Name42() extends Name(42) { def virtualShow = "42" } +final case class Name43() extends Name(43) { def virtualShow = "43" } +final case class Name44() extends Name(44) { def virtualShow = "44" } +final case class Name45() extends Name(45) { def virtualShow = "45" } +final case class Name46() extends Name(46) { def virtualShow = "46" } +final case class Name47() extends Name(47) { def virtualShow = "47" } +final case class Name48() extends Name(48) { def virtualShow = "48" } +final case class Name49() extends Name(49) { def virtualShow = "49" } +final case class Name50() extends Name(50) { def virtualShow = "50" } +final case class Name51() extends Name(51) { def virtualShow = "51" } +final case class Name52() extends Name(52) { def virtualShow = "52" } +final case class Name53() extends Name(53) { def virtualShow = "53" } +final case class Name54() extends Name(54) { def virtualShow = "54" } +final case class Name55() extends Name(55) { def virtualShow = "55" } +final case class Name56() extends Name(56) { def virtualShow = "56" } +final case class Name57() extends Name(57) { def virtualShow = "57" } +final case class Name58() extends Name(58) { def virtualShow = "58" } +final case class Name59() extends Name(59) { def virtualShow = "59" } +final case class Name60() extends Name(60) { def virtualShow = "60" } +final case class Name61() extends Name(61) { def virtualShow = "61" } +final case class Name62() extends Name(62) { def virtualShow = "62" } +final case class Name63() extends Name(63) { def virtualShow = "63" } +final case class Name64() extends Name(64) { def virtualShow = "64" } +final case class Name65() extends Name(65) { def virtualShow = "65" } +final case class Name66() extends Name(66) { def virtualShow = "66" } +final case class Name67() extends Name(67) { def virtualShow = "67" } +final case class Name68() extends Name(68) { def virtualShow = "68" } +final case class Name69() extends Name(69) { def virtualShow = "69" } +final case class Name70() extends Name(70) { def virtualShow = "70" } +final case class Name71() extends Name(71) { def virtualShow = "71" } +final case class Name72() extends Name(72) { def virtualShow = "72" } +final case class Name73() extends Name(73) { def virtualShow = "73" } +final case class Name74() extends Name(74) { def virtualShow = "74" } +final case class Name75() extends Name(75) { def virtualShow = "75" } +final case class Name76() extends Name(76) { def virtualShow = "76" } +final case class Name77() extends Name(77) { def virtualShow = "77" } +final case class Name78() extends Name(78) { def virtualShow = "78" } +final case class Name79() extends Name(79) { def virtualShow = "79" } +final case class Name80() extends Name(80) { def virtualShow = "80" } +final case class Name81() extends Name(81) { def virtualShow = "81" } +final case class Name82() extends Name(82) { def virtualShow = "82" } +final case class Name83() extends Name(83) { def virtualShow = "83" } +final case class Name84() extends Name(84) { def virtualShow = "84" } +final case class Name85() extends Name(85) { def virtualShow = "85" } +final case class Name86() extends Name(86) { def virtualShow = "86" } +final case class Name87() extends Name(87) { def virtualShow = "87" } +final case class Name88() extends Name(88) { def virtualShow = "88" } +final case class Name89() extends Name(89) { def virtualShow = "89" } +final case class Name90() extends Name(90) { def virtualShow = "90" } +final case class Name91() extends Name(91) { def virtualShow = "91" } +final case class Name92() extends Name(92) { def virtualShow = "92" } +final case class Name93() extends Name(93) { def virtualShow = "93" } +final case class Name94() extends Name(94) { def virtualShow = "94" } +final case class Name95() extends Name(95) { def virtualShow = "95" } +final case class Name96() extends Name(96) { def virtualShow = "96" } +final case class Name97() extends Name(97) { def virtualShow = "97" } +final case class Name98() extends Name(98) { def virtualShow = "98" } +final case class Name99() extends Name(99) { def virtualShow = "99" } +final case class Name100() extends Name(100) { def virtualShow = "100" } +final case class Name101() extends Name(101) { def virtualShow = "101" } +final case class Name102() extends Name(102) { def virtualShow = "102" } +final case class Name103() extends Name(103) { def virtualShow = "103" } +final case class Name104() extends Name(104) { def virtualShow = "104" } +final case class Name105() extends Name(105) { def virtualShow = "105" } +final case class Name106() extends Name(106) { def virtualShow = "106" } +final case class Name107() extends Name(107) { def virtualShow = "107" } +final case class Name108() extends Name(108) { def virtualShow = "108" } +final case class Name109() extends Name(109) { def virtualShow = "109" } +final case class Name110() extends Name(110) { def virtualShow = "110" } +final case class Name111() extends Name(111) { def virtualShow = "111" } +final case class Name112() extends Name(112) { def virtualShow = "112" } +final case class Name113() extends Name(113) { def virtualShow = "113" } +final case class Name114() extends Name(114) { def virtualShow = "114" } +final case class Name115() extends Name(115) { def virtualShow = "115" } +final case class Name116() extends Name(116) { def virtualShow = "116" } +final case class Name117() extends Name(117) { def virtualShow = "117" } +final case class Name118() extends Name(118) { def virtualShow = "118" } +final case class Name119() extends Name(119) { def virtualShow = "119" } +final case class Name120() extends Name(120) { def virtualShow = "120" } +final case class Name121() extends Name(121) { def virtualShow = "121" } +final case class Name122() extends Name(122) { def virtualShow = "122" } +final case class Name123() extends Name(123) { def virtualShow = "123" } +final case class Name124() extends Name(124) { def virtualShow = "124" } +final case class Name125() extends Name(125) { def virtualShow = "125" } +final case class Name126() extends Name(126) { def virtualShow = "126" } +final case class Name127() extends Name(127) { def virtualShow = "127" } +final case class Name128() extends Name(128) { def virtualShow = "128" } +final case class Name129() extends Name(129) { def virtualShow = "129" } +final case class Name130() extends Name(130) { def virtualShow = "130" } +final case class Name131() extends Name(131) { def virtualShow = "131" } +final case class Name132() extends Name(132) { def virtualShow = "132" } +final case class Name133() extends Name(133) { def virtualShow = "133" } +final case class Name134() extends Name(134) { def virtualShow = "134" } +final case class Name135() extends Name(135) { def virtualShow = "135" } +final case class Name136() extends Name(136) { def virtualShow = "136" } +final case class Name137() extends Name(137) { def virtualShow = "137" } +final case class Name138() extends Name(138) { def virtualShow = "138" } +final case class Name139() extends Name(139) { def virtualShow = "139" } +final case class Name140() extends Name(140) { def virtualShow = "140" } +final case class Name141() extends Name(141) { def virtualShow = "141" } +final case class Name142() extends Name(142) { def virtualShow = "142" } +final case class Name143() extends Name(143) { def virtualShow = "143" } +final case class Name144() extends Name(144) { def virtualShow = "144" } +final case class Name145() extends Name(145) { def virtualShow = "145" } +final case class Name146() extends Name(146) { def virtualShow = "146" } +final case class Name147() extends Name(147) { def virtualShow = "147" } +final case class Name148() extends Name(148) { def virtualShow = "148" } +final case class Name149() extends Name(149) { def virtualShow = "149" } +final case class Name150() extends Name(150) { def virtualShow = "150" } +final case class Name151() extends Name(151) { def virtualShow = "151" } +final case class Name152() extends Name(152) { def virtualShow = "152" } +final case class Name153() extends Name(153) { def virtualShow = "153" } +final case class Name154() extends Name(154) { def virtualShow = "154" } +final case class Name155() extends Name(155) { def virtualShow = "155" } +final case class Name156() extends Name(156) { def virtualShow = "156" } +final case class Name157() extends Name(157) { def virtualShow = "157" } +final case class Name158() extends Name(158) { def virtualShow = "158" } +final case class Name159() extends Name(159) { def virtualShow = "159" } +final case class Name160() extends Name(160) { def virtualShow = "160" } +final case class Name161() extends Name(161) { def virtualShow = "161" } +final case class Name162() extends Name(162) { def virtualShow = "162" } +final case class Name163() extends Name(163) { def virtualShow = "163" } +final case class Name164() extends Name(164) { def virtualShow = "164" } +final case class Name165() extends Name(165) { def virtualShow = "165" } +final case class Name166() extends Name(166) { def virtualShow = "166" } +final case class Name167() extends Name(167) { def virtualShow = "167" } +final case class Name168() extends Name(168) { def virtualShow = "168" } +final case class Name169() extends Name(169) { def virtualShow = "169" } +final case class Name170() extends Name(170) { def virtualShow = "170" } +final case class Name171() extends Name(171) { def virtualShow = "171" } +final case class Name172() extends Name(172) { def virtualShow = "172" } +final case class Name173() extends Name(173) { def virtualShow = "173" } +final case class Name174() extends Name(174) { def virtualShow = "174" } +final case class Name175() extends Name(175) { def virtualShow = "175" } +final case class Name176() extends Name(176) { def virtualShow = "176" } +final case class Name177() extends Name(177) { def virtualShow = "177" } +final case class Name178() extends Name(178) { def virtualShow = "178" } +final case class Name179() extends Name(179) { def virtualShow = "179" } +final case class Name180() extends Name(180) { def virtualShow = "180" } +final case class Name181() extends Name(181) { def virtualShow = "181" } +final case class Name182() extends Name(182) { def virtualShow = "182" } +final case class Name183() extends Name(183) { def virtualShow = "183" } +final case class Name184() extends Name(184) { def virtualShow = "184" } +final case class Name185() extends Name(185) { def virtualShow = "185" } +final case class Name186() extends Name(186) { def virtualShow = "186" } +final case class Name187() extends Name(187) { def virtualShow = "187" } +final case class Name188() extends Name(188) { def virtualShow = "188" } +final case class Name189() extends Name(189) { def virtualShow = "189" } +final case class Name190() extends Name(190) { def virtualShow = "190" } +final case class Name191() extends Name(191) { def virtualShow = "191" } +final case class Name192() extends Name(192) { def virtualShow = "192" } +final case class Name193() extends Name(193) { def virtualShow = "193" } +final case class Name194() extends Name(194) { def virtualShow = "194" } +final case class Name195() extends Name(195) { def virtualShow = "195" } +final case class Name196() extends Name(196) { def virtualShow = "196" } +final case class Name197() extends Name(197) { def virtualShow = "197" } +final case class Name198() extends Name(198) { def virtualShow = "198" } +final case class Name199() extends Name(199) { def virtualShow = "199" } +final case class Name200() extends Name(200) { def virtualShow = "200" } +final case class Name201() extends Name(201) { def virtualShow = "201" } +final case class Name202() extends Name(202) { def virtualShow = "202" } +final case class Name203() extends Name(203) { def virtualShow = "203" } +final case class Name204() extends Name(204) { def virtualShow = "204" } +final case class Name205() extends Name(205) { def virtualShow = "205" } +final case class Name206() extends Name(206) { def virtualShow = "206" } +final case class Name207() extends Name(207) { def virtualShow = "207" } +final case class Name208() extends Name(208) { def virtualShow = "208" } +final case class Name209() extends Name(209) { def virtualShow = "209" } +final case class Name210() extends Name(210) { def virtualShow = "210" } +final case class Name211() extends Name(211) { def virtualShow = "211" } +final case class Name212() extends Name(212) { def virtualShow = "212" } +final case class Name213() extends Name(213) { def virtualShow = "213" } +final case class Name214() extends Name(214) { def virtualShow = "214" } +final case class Name215() extends Name(215) { def virtualShow = "215" } +final case class Name216() extends Name(216) { def virtualShow = "216" } +final case class Name217() extends Name(217) { def virtualShow = "217" } +final case class Name218() extends Name(218) { def virtualShow = "218" } +final case class Name219() extends Name(219) { def virtualShow = "219" } +final case class Name220() extends Name(220) { def virtualShow = "220" } +final case class Name221() extends Name(221) { def virtualShow = "221" } +final case class Name222() extends Name(222) { def virtualShow = "222" } +final case class Name223() extends Name(223) { def virtualShow = "223" } +final case class Name224() extends Name(224) { def virtualShow = "224" } +final case class Name225() extends Name(225) { def virtualShow = "225" } +final case class Name226() extends Name(226) { def virtualShow = "226" } +final case class Name227() extends Name(227) { def virtualShow = "227" } +final case class Name228() extends Name(228) { def virtualShow = "228" } +final case class Name229() extends Name(229) { def virtualShow = "229" } +final case class Name230() extends Name(230) { def virtualShow = "230" } +final case class Name231() extends Name(231) { def virtualShow = "231" } +final case class Name232() extends Name(232) { def virtualShow = "232" } +final case class Name233() extends Name(233) { def virtualShow = "233" } +final case class Name234() extends Name(234) { def virtualShow = "234" } +final case class Name235() extends Name(235) { def virtualShow = "235" } +final case class Name236() extends Name(236) { def virtualShow = "236" } +final case class Name237() extends Name(237) { def virtualShow = "237" } +final case class Name238() extends Name(238) { def virtualShow = "238" } +final case class Name239() extends Name(239) { def virtualShow = "239" } +final case class Name240() extends Name(240) { def virtualShow = "240" } +final case class Name241() extends Name(241) { def virtualShow = "241" } +final case class Name242() extends Name(242) { def virtualShow = "242" } +final case class Name243() extends Name(243) { def virtualShow = "243" } +final case class Name244() extends Name(244) { def virtualShow = "244" } +final case class Name245() extends Name(245) { def virtualShow = "245" } +final case class Name246() extends Name(246) { def virtualShow = "246" } +final case class Name247() extends Name(247) { def virtualShow = "247" } +final case class Name248() extends Name(248) { def virtualShow = "248" } +final case class Name249() extends Name(249) { def virtualShow = "249" } +final case class Name250() extends Name(250) { def virtualShow = "250" } +final case class Name251() extends Name(251) { def virtualShow = "251" } +final case class Name252() extends Name(252) { def virtualShow = "252" } +final case class Name253() extends Name(253) { def virtualShow = "253" } +final case class Name254() extends Name(254) { def virtualShow = "254" } +final case class Name255() extends Name(255) { def virtualShow = "255" } diff --git a/test/benchmarks/src/main/scala/scala/util/hashing/MurmurHash3Benchmark.scala b/test/benchmarks/src/main/scala/scala/util/hashing/MurmurHash3Benchmark.scala new file mode 100644 index 000000000000..3cc88124b583 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/util/hashing/MurmurHash3Benchmark.scala @@ -0,0 +1,159 @@ +package scala.util.hashing + +import java.util.concurrent.TimeUnit +import scala.collection.immutable.ArraySeq + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class MurmurHash3Benchmark { + + @Param(Array("0", "1", "2", "3", "10", "100", "1000", "10000")) + var size: Int = _ + var ordered: Array[Int] = _ + var mixed1: Array[Int] = _ + var mixed2: Array[Int] = _ + var orderedL: List[Int] = _ + var mixed1L: List[Int] = _ + var mixed2L: List[Int] = _ + var orderedI: IndexedSeq[Int] = _ + var mixed1I: IndexedSeq[Int] = _ + var mixed2I: IndexedSeq[Int] = _ + var range: Range = _ + + @Setup(Level.Trial) def initNumbers: Unit = { + range = (1 to size) + ordered = Array.iterate(1, size)(_ + 1) + mixed1 = Array.copyOf(ordered, ordered.length) + mixed2 = Array.copyOf(ordered, ordered.length) + if(size > 1) { + swap(mixed1, 0, 1) + swap(mixed2, mixed2.length-1, mixed2.length-2) + } + orderedL = ordered.toList + mixed1L = mixed1.toList + mixed2L = mixed2.toList + orderedI = ArraySeq.from(ordered) + mixed1I = ArraySeq.from(mixed1) + mixed2I = ArraySeq.from(mixed2) + } + + def swap(a: Array[Int], i1: Int, i2: Int): Unit = { + val tmp = a(i1) + a(i1) = a(i2) + a(i2) = tmp + } + + @Benchmark def rangeHash(bh: Blackhole): Unit = + bh.consume(MurmurHash3.rangeHash(1, 1, size, MurmurHash3.seqSeed)) + + + @Benchmark def oldArrayHashOrdered(bh: Blackhole): Unit = + bh.consume(OldMurmurHash3.oldArrayHash(ordered, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedArrayHashOrdered(bh: Blackhole): Unit = + bh.consume(MurmurHash3.arrayHash(ordered, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedArrayHashMixed1(bh: Blackhole): Unit = + bh.consume(MurmurHash3.arrayHash(mixed1, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedArrayHashMixed2(bh: Blackhole): Unit = + bh.consume(MurmurHash3.arrayHash(mixed2, MurmurHash3.seqSeed)) + + @Benchmark def oldOrderedHashListOrdered(bh: Blackhole): Unit = + bh.consume(OldMurmurHash3.oldOrderedHash(orderedL, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashListOrdered(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(orderedL, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashListMixed1(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(mixed1L, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashListMixed2(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(mixed2L, MurmurHash3.seqSeed)) + + + @Benchmark def oldOrderedHashIndexedOrdered(bh: Blackhole): Unit = + bh.consume(OldMurmurHash3.oldOrderedHash(orderedI, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashIndexedOrdered(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(orderedI, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashIndexedMixed1(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(mixed1I, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedOrderedHashIndexedMixed2(bh: Blackhole): Unit = + bh.consume(MurmurHash3.orderedHash(mixed2I, MurmurHash3.seqSeed)) + + + @Benchmark def rangeOptimizedIndexedHashOrdered(bh: Blackhole): Unit = + bh.consume(MurmurHash3.indexedSeqHash(orderedI, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedIndexedHashMixed1(bh: Blackhole): Unit = + bh.consume(MurmurHash3.indexedSeqHash(mixed1I, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedIndexedHashMixed2(bh: Blackhole): Unit = + bh.consume(MurmurHash3.indexedSeqHash(mixed2I, MurmurHash3.seqSeed)) + + + @Benchmark def oldListHashOrdered(bh: Blackhole): Unit = + bh.consume(OldMurmurHash3.oldListHash(orderedL, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedListHashOrdered(bh: Blackhole): Unit = + bh.consume(MurmurHash3.listHash(orderedL, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedListHashMixed1(bh: Blackhole): Unit = + bh.consume(MurmurHash3.listHash(mixed1L, MurmurHash3.seqSeed)) + + @Benchmark def rangeOptimizedListHashMixed2(bh: Blackhole): Unit = + bh.consume(MurmurHash3.listHash(mixed2L, MurmurHash3.seqSeed)) +} + +object OldMurmurHash3 { + import MurmurHash3._ + + /** Compute a hash that depends on the order of its arguments. + */ + final def oldOrderedHash(xs: IterableOnce[Any], seed: Int): Int = { + var n = 0 + var h = seed + xs.iterator foreach { x => + h = mix(h, x.##) + n += 1 + } + finalizeHash(h, n) + } + + /** Compute the hash of an array. + */ + final def oldArrayHash[@specialized T](a: Array[T], seed: Int): Int = { + var h = seed + var i = 0 + while (i < a.length) { + h = mix(h, a(i).##) + i += 1 + } + finalizeHash(h, a.length) + } + + final def oldListHash(xs: scala.collection.immutable.List[_], seed: Int): Int = { + var n = 0 + var h = seed + var elems = xs + while (!elems.isEmpty) { + val head = elems.head + val tail = elems.tail + h = mix(h, head.##) + n += 1 + elems = tail + } + finalizeHash(h, n) + } +} diff --git a/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyBenchmark.scala b/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyBenchmark.scala index 4176bdc46bd5..f95f5f22939f 100644 --- a/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyBenchmark.scala @@ -25,20 +25,14 @@ class RegexUnapplyBenchmark { @Benchmark def t8022CharSequence(bh: Blackhole): Unit = { val full = t8022CharSequenceRegex val text = " When I use this operator: *" - // Testing 2.10.x compatibility of the return types of unapplySeq - val x :: Nil = full.unapplySeq(text: Any).get val y :: Nil = full.unapplySeq(text: CharSequence).get - bh.consume(x) bh.consume(y) } @Benchmark def t8022Match(bh: Blackhole): Unit = { val R = t8022MatchRegex val matchh = R.findFirstMatchIn("a1").get - // Testing 2.10.x compatibility of the return types of unapplySeq - val x :: Nil = R.unapplySeq(matchh: Any).get val y :: Nil = R.unapplySeq(matchh).get - bh.consume(x) bh.consume(y) } @@ -59,4 +53,4 @@ class RegexUnapplyBenchmark { bh.consume(z) } -} \ No newline at end of file +} diff --git a/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyGroupsBenchmark.scala b/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyGroupsBenchmark.scala index 2bf32d8fecd0..6924e8fa0a7e 100644 --- a/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyGroupsBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/util/matching/RegexUnapplyGroupsBenchmark.scala @@ -34,4 +34,4 @@ class RegexUnapplyGroupsBenchmark { bh.consume(res) } -} \ No newline at end of file +} diff --git a/test/benchmarks/vector-results.html b/test/benchmarks/vector-results.html new file mode 100644 index 000000000000..336028c1a609 --- /dev/null +++ b/test/benchmarks/vector-results.html @@ -0,0 +1,93 @@ + + + + + + + + + + + diff --git a/test/files/bench/equality/eq.scala b/test/files/bench/equality/eq.scala index 8ac5b5ef5c05..ad7cc60925e7 100644 --- a/test/files/bench/equality/eq.scala +++ b/test/files/bench/equality/eq.scala @@ -19,7 +19,7 @@ object eq extends testing.Benchmark { val obj1 = new Object val obj2 = new Object - def run() { + def run(): Unit = { var sum = 0 sum += eqtest(x => if (x == 0) obj1 else obj2, 2000) sum += eqtest(x => x, 1000) diff --git a/test/files/bench/equality/eqeq.eqlog b/test/files/bench/equality/eqeq.eqlog index 55a5eb430a96..0a126941c5b8 100644 --- a/test/files/bench/equality/eqeq.eqlog +++ b/test/files/bench/equality/eqeq.eqlog @@ -1,4 +1,4 @@ -Banchmark results for testing equality operations: +Benchmark results for testing equality operations: eq.scala: Base case, use eq equality only eqeq.scala: Test case, use == instead of eq. All tests run on Thinkpad T400, 1.6.0_12 client VM. diff --git a/test/files/bench/equality/eqeq.scala b/test/files/bench/equality/eqeq.scala index afccece88a58..0976333c49c8 100644 --- a/test/files/bench/equality/eqeq.scala +++ b/test/files/bench/equality/eqeq.scala @@ -31,7 +31,7 @@ object eqeq extends testing.Benchmark { val obj1 = new Object val obj2 = new Object - def run() { + def run(): Unit = { var sum = 0 sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000) sum += eqeqtest(x => x, 1000) diff --git a/test/files/filters b/test/files/filters index e91ca0eb3665..e85b2e1afaab 100644 --- a/test/files/filters +++ b/test/files/filters @@ -5,5 +5,3 @@ OpenJDK .* warning: # Hotspot receiving VM options through the $_JAVA_OPTIONS # env variable outputs them on stderr Picked up _JAVA_OPTIONS: -# Filter out a message caused by this bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=8021205 -objc\[\d+\]: Class JavaLaunchHelper is implemented in both .* and .*\. One of the two will be used\. Which one is undefined\. diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check index d317fc42077d..f0f447560a70 100644 --- a/test/files/instrumented/InstrumentationTest.check +++ b/test/files/instrumented/InstrumentationTest.check @@ -4,7 +4,5 @@ Method call statistics: 1 Foo1.someMethod()I 1 instrumented/Foo2.()V 1 instrumented/Foo2.someMethod()I - 1 scala/DeprecatedConsole.()V 1 scala/Predef$.println(Ljava/lang/Object;)V - 1 scala/io/AnsiColor.$init$(Lscala/io/AnsiColor;)V 1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean; diff --git a/test/files/instrumented/InstrumentationTest.scala b/test/files/instrumented/InstrumentationTest.scala index 458fd4974b75..c1e570f7a0e7 100644 --- a/test/files/instrumented/InstrumentationTest.scala +++ b/test/files/instrumented/InstrumentationTest.scala @@ -1,3 +1,4 @@ +//> using options -opt:none import scala.tools.partest.instrumented.Instrumentation._ /** We check if classes put in empty package are properly instrumented */ @@ -14,13 +15,17 @@ package instrumented { /** Tests if instrumentation itself works correctly */ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { if (scala.tools.partest.utils.Properties.isAvian) { println("!!!TEST SKIPPED!!!") println("Instrumentation is not supported on Avian.") } else { - // force predef initialization before profiling + // Predef initialization before profiling (lots of noise otherwise) Predef + // Console initialization to make this test work if the library is built with / without optimizer. + // The inliner inlines the call to AnsiColor.$init$, so it would show up in the profile when using + // a library built without optimizer. + Console startProfiling() val foo1 = new Foo1 foo1.someMethod diff --git a/test/files/instrumented/README b/test/files/instrumented/README index 32d0ef2da535..b065a7c5cd4a 100644 --- a/test/files/instrumented/README +++ b/test/files/instrumented/README @@ -3,7 +3,7 @@ they have additional byte-code instrumentation performed for profiling. You should put your tests in `instrumented` directory if you are interested in method call counts. Examples include tests for specialization (you want to count boxing and unboxing method calls) or high-level tests for optimizer -where you are interested if methods are successfuly inlined (so they should +where you are interested if methods are successfully inlined (so they should not be called at runtime) or closures are eliminated (so no constructors of closures are called). diff --git a/test/files/instrumented/empty-seq.check b/test/files/instrumented/empty-seq.check new file mode 100644 index 000000000000..5cd691e93a55 --- /dev/null +++ b/test/files/instrumented/empty-seq.check @@ -0,0 +1 @@ +Method call statistics: diff --git a/test/files/instrumented/empty-seq.scala b/test/files/instrumented/empty-seq.scala new file mode 100644 index 000000000000..fe5687c25574 --- /dev/null +++ b/test/files/instrumented/empty-seq.scala @@ -0,0 +1,9 @@ + +import scala.tools.partest.instrumented.Instrumentation._ + +object Test extends App { + startProfiling() + Seq() + stopProfiling() + printStatistics() +} diff --git a/test/files/instrumented/indy-symbol-literal.scala b/test/files/instrumented/indy-symbol-literal.scala deleted file mode 100644 index a1c333cf95e6..000000000000 --- a/test/files/instrumented/indy-symbol-literal.scala +++ /dev/null @@ -1,19 +0,0 @@ -import scala.tools.partest.instrumented._ -import scala.tools.partest.instrumented.Instrumentation._ - -object Test { - def main(args: Array[String]): Unit = { - 'warmup - startProfiling() - var i = 0; - while (i < 2) { - 'foo.name - i += 1 - } - stopProfiling() - // Only expect a single call to lookup the interned Symbol at each call site the defines - // a single literal. - val Symbol_apply = MethodCallTrace("scala/Symbol$", "apply", "(Ljava/lang/String;)Lscala/Symbol;") - assert(getStatistics.get(Symbol_apply) == Some(1), getStatistics); - } -} diff --git a/test/files/instrumented/inline-in-constructors/assert_1.scala b/test/files/instrumented/inline-in-constructors/assert_1.scala index df25b72726bd..0ab3c4047773 100644 --- a/test/files/instrumented/inline-in-constructors/assert_1.scala +++ b/test/files/instrumented/inline-in-constructors/assert_1.scala @@ -1,9 +1,9 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** -Wopt package instrumented object MyPredef { @inline - final def assert(assertion: Boolean, message: => Any) { + final def assert(assertion: Boolean, message: => Any): Unit = { if (!assertion) throw new java.lang.AssertionError("assertion failed: " + message) } diff --git a/test/files/instrumented/inline-in-constructors/bar_2.scala b/test/files/instrumented/inline-in-constructors/bar_2.scala index 6c23832f87fb..b67ea9f2f90a 100644 --- a/test/files/instrumented/inline-in-constructors/bar_2.scala +++ b/test/files/instrumented/inline-in-constructors/bar_2.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** package instrumented /** Class that uses assert compiled in previous compiler run so we check if diff --git a/test/files/instrumented/inline-in-constructors/test_3.scala b/test/files/instrumented/inline-in-constructors/test_3.scala index 2f8b98cf25b9..d4e6cd6fe716 100644 --- a/test/files/instrumented/inline-in-constructors/test_3.scala +++ b/test/files/instrumented/inline-in-constructors/test_3.scala @@ -1,9 +1,9 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** -Wopt import scala.tools.partest.instrumented.Instrumentation._ import instrumented._ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { if (scala.tools.partest.utils.Properties.isAvian) { println("!!!TEST SKIPPED!!!") println("Instrumentation is not supported on Avian.") diff --git a/test/files/instrumented/t12201.scala b/test/files/instrumented/t12201.scala new file mode 100644 index 000000000000..f38ae90f55c7 --- /dev/null +++ b/test/files/instrumented/t12201.scala @@ -0,0 +1,32 @@ +import scala.tools.partest.instrumented.Instrumentation._ + +object Test { + @noinline def discard(x: Any) = () + + def main(args: Array[String]): Unit = { + discard((): Any) // ensure BoxedUnit is loaded; only under -opt is it not loaded before this method + startProfiling() + + // to optimized + val x = Array[Double](1) + val y = Array[Double](1.0) + + // Currently correctly optimized + val i = Array(1.0) + val j: Array[Double] = Array(1) + + //others case + val a: Array[Double] = Array[Double](1.0) + val b: Array[Double] = Array[Double](1) + val c: Array[Double] = Array[Double](1: Double) + val d: Array[Double] = Array(1: Double) + val e = Array(1: Double) + val f = Array(1: Int) + val g = Array[Int](1) + val h = Array(1) + val k = Array[Unit](()) + + stopProfiling() + assert(getStatistics.isEmpty) + } +} diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala index 4c52f8a5ef9e..08610b67efbe 100644 --- a/test/files/instrumented/t6611.scala +++ b/test/files/instrumented/t6611.scala @@ -1,7 +1,7 @@ import scala.tools.partest.instrumented.Instrumentation._ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { startProfiling() // tests optimization in Cleanup for varargs reference arrays diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index 968288205b5f..d615224c040b 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -1,11 +1,7 @@ #partest java8 -Test_2.scala:8: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods - def foo: Unit = () - ^ -class java.rmi.RemoteException class java.io.IOException @java.lang.Deprecated() -@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=http://scala-lang.org) +@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=https://scala-lang.org) class Test4$Foo1 @test.SourceAnnotation_1(mails={you@bloodsuckers.com}, value=http://bloodsuckers.com) @@ -47,7 +43,7 @@ public int Test4$Foo9.getZ2() @test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) public int Test4$Foo9.getZ3() -@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=https://apple.com) public int Test4$Foo9.x() @test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com) @@ -67,13 +63,9 @@ public void Test4$Foo12.name_$eq(java.lang.String) dylan 2 #partest !java8 -Test_2.scala:8: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods - def foo: Unit = () - ^ -class java.rmi.RemoteException class java.io.IOException @java.lang.Deprecated(forRemoval=false, since="") -@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=http://scala-lang.org) +@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=https://scala-lang.org) class Test4$Foo1 @test.SourceAnnotation_1(mails={you@bloodsuckers.com}, value=http://bloodsuckers.com) @@ -115,7 +107,7 @@ public int Test4$Foo9.getZ2() @test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) public int Test4$Foo9.getZ3() -@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=https://apple.com) public int Test4$Foo9.x() @test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com) diff --git a/test/files/jvm/annotations/Test_2.scala b/test/files/jvm/annotations/Test_2.scala index f016215c2daf..d098f4084ea3 100644 --- a/test/files/jvm/annotations/Test_2.scala +++ b/test/files/jvm/annotations/Test_2.scala @@ -1,17 +1,7 @@ -// scalac: -deprecation +//> using options -deprecation +// import scala.tools.partest.Util.ArrayDeep -import scala.language.{ higherKinds, reflectiveCalls } - -object Test1 { - class Foo { - @remote - def foo: Unit = () - } - def run { - val method = classOf[Foo].getMethod("foo") - method.getExceptionTypes foreach println - } -} +import scala.language.reflectiveCalls object Test2 { import java.io.{BufferedReader,FileReader, IOException} @@ -21,7 +11,7 @@ object Test2 { @throws(classOf[IOException]) def read() = in.read() } - def run { + def run: Unit = { val method = classOf[Reader].getMethod("read") method.getExceptionTypes foreach println } @@ -44,7 +34,7 @@ object Test3 { @Deprecated def foo: Unit = () } - def run { + def run: Unit = { val method = classOf[Foo].getMethod("foo") val annotation = method.getAnnotation(classOf[Deprecated]) println(annotation) @@ -57,7 +47,7 @@ object Test3 { public String url(); public String mail(); } -@Source(url="http://scala.epfl.ch", mail="scala@lists.epfl.ch") +@Source(url="https://scala.epfl.ch", mail="scala@lists.epfl.ch") class Foo {} public class Main { public static void main(String[] args) throws Exception { @@ -65,13 +55,13 @@ public class Main { Annotation[] annotations = clazz.getAnnotations(); for (int i = 0; i < annotations.length; i++) System.out.println(annotations[i]); - // @test.Main$Source(url=http://scala-lang.org, mail=scala@lists.epfl.ch) + // @test.Main$Source(url=https://scala-lang.org, mail=scala@lists.epfl.ch) } } */ object Test4 { import test.SourceAnnotation_1 - @SourceAnnotation_1(value = "http://scala-lang.org", + @SourceAnnotation_1(value = "https://scala-lang.org", mails = Array("scala@lists.epfl.ch", "scala-lounge@lists.epfl.ch")) class Foo1 @SourceAnnotation_1(value = "http://bloodsuckers.com", @@ -100,7 +90,7 @@ object Test4 { class Foo9 { import scala.annotation.meta._ import scala.beans.BeanProperty - @(SourceAnnotation_1 @getter)("http://apple.com") val x = 0 + @(SourceAnnotation_1 @getter)("https://apple.com") val x = 0 @BeanProperty @(SourceAnnotation_1 @beanSetter)("http://uppla.com") var y = 0 type myAnn = SourceAnnotation_1 @beanGetter @field @@ -115,30 +105,30 @@ object Test4 { class Foo10(@SourceAnnotation_1("on param 1") val name: String) class Foo11(@(SourceAnnotation_1 @scala.annotation.meta.field)("on param 2") val name: String) class Foo12(@(SourceAnnotation_1 @scala.annotation.meta.setter)("on param 3") var name: String) - def run { + def run: Unit = { import java.lang.annotation.Annotation import java.lang.reflect.AnnotatedElement - def printSourceAnnotation(a: Annotation) { + def printSourceAnnotation(a: Annotation): Unit = { val ann = a.asInstanceOf[SourceAnnotation_1] println("@test.SourceAnnotation_1(mails=" + ann.mails.deep.mkString("{", ",", "}") + ", value=" + ann.value + ")") } - def printSourceAnnotations(target: AnnotatedElement) { + def printSourceAnnotations(target: AnnotatedElement): Unit = { //print SourceAnnotation in a predefined way to insure // against difference in the JVMs (e.g. Sun's vs IBM's) val anns = target.getAnnotations() anns foreach printSourceAnnotation if (anns.length > 0) { println(target) - println + println() } } - def printParamSourceAnnotations(target: { def getParameterAnnotations(): Array[Array[Annotation]] }) { + def printParamSourceAnnotations(target: { def getParameterAnnotations(): Array[Array[Annotation]] }): Unit = { val anns = target.getParameterAnnotations().flatten anns foreach printSourceAnnotation if (anns.length > 0) { println(target) - println + println() } } printSourceAnnotations(classOf[Foo1]) @@ -183,7 +173,7 @@ object Test5 { def get = getter.invoke(this).asInstanceOf[Integer].intValue def set(n: Int) = setter.invoke(this, Integer.valueOf(n)) } - def run { + def run: Unit = { val count = new Count println(count.get) count.set(99) @@ -199,7 +189,7 @@ object Test6 { @BeanProperty val m: Int = if (prop) 1 else 2 } - def run { + def run: Unit = { val c = new C("bob") c.setText("dylan") println(c.getText()) @@ -215,8 +205,7 @@ object Test6 { class A3345(@volatile private var i:Int) object Test { - def main(args: Array[String]) { - Test1.run + def main(args: Array[String]): Unit = { Test2.run Test3.run // requires the use of -target:jvm-1.5 Test4.run diff --git a/test/files/jvm/beanInfo.check b/test/files/jvm/beanInfo.check deleted file mode 100644 index d74e127711f0..000000000000 --- a/test/files/jvm/beanInfo.check +++ /dev/null @@ -1,6 +0,0 @@ -property descriptors -x -- int -- public int p.C.x() -- null -y -- class java.lang.String -- public java.lang.String p.C.y() -- public void p.C.y_$eq(java.lang.String) -z -- class scala.collection.immutable.List -- public scala.collection.immutable.List p.C.z() -- public void p.C.z_$eq(scala.collection.immutable.List) -method descriptors -f -- public p.C p.C.f() diff --git a/test/files/jvm/beanInfo/C_1.scala b/test/files/jvm/beanInfo/C_1.scala deleted file mode 100644 index a338abea1d28..000000000000 --- a/test/files/jvm/beanInfo/C_1.scala +++ /dev/null @@ -1,9 +0,0 @@ -package p - -@scala.beans.BeanInfo -class C { - val x: Int = 0 - var y: String = "" - var z: List[_] = Nil - def f: C = ??? -} diff --git a/test/files/jvm/beanInfo/Test_2.scala b/test/files/jvm/beanInfo/Test_2.scala deleted file mode 100644 index fa9b6e139129..000000000000 --- a/test/files/jvm/beanInfo/Test_2.scala +++ /dev/null @@ -1,17 +0,0 @@ -object Test extends App { - val info = java.beans.Introspector.getBeanInfo(classOf[p.C]) - - println("property descriptors") - - val pds = info.getPropertyDescriptors - for (pd <- pds) { - println(s"${pd.getName} -- ${pd.getPropertyType} -- ${pd.getReadMethod} -- ${pd.getWriteMethod}") - } - - println("method descriptors") - - val mds = info.getMethodDescriptors - for (md <- mds) { - println(s"${md.getName} -- ${md.getMethod}") - } -} diff --git a/test/files/jvm/bigints.scala b/test/files/jvm/bigints.scala index 06197cbb430a..c587233b1b0b 100644 --- a/test/files/jvm/bigints.scala +++ b/test/files/jvm/bigints.scala @@ -4,14 +4,14 @@ * @author Stephane Micheloud */ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Test_BigInt.runTest() Test_BigDecimal.runTest() } } object Test_BigInt { - def runTest() { + def runTest(): Unit = { import BigInt._ val x: BigInt = 1 @@ -26,7 +26,7 @@ object Test_BigInt { } object Test_BigDecimal { - def runTest() { + def runTest(): Unit = { import scala.BigDecimal, BigDecimal._ val xi: BigDecimal = 1 diff --git a/test/files/jvm/bytecode-test-example/Foo_1.scala b/test/files/jvm/bytecode-test-example/Foo_1.scala index c20115ac9bc1..465291351ee7 100644 --- a/test/files/jvm/bytecode-test-example/Foo_1.scala +++ b/test/files/jvm/bytecode-test-example/Foo_1.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:none +//> using options -opt:none class Foo_1 { def foo(x: AnyRef): Int = { val bool = x == null diff --git a/test/files/jvm/bytecode-test-example/Test.scala b/test/files/jvm/bytecode-test-example/Test.scala index af0e91e448e6..4bee72b53d9c 100644 --- a/test/files/jvm/bytecode-test-example/Test.scala +++ b/test/files/jvm/bytecode-test-example/Test.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:none +//> using options -opt:none import scala.tools.partest.BytecodeTest import scala.tools.nsc.util.JavaClassPath diff --git a/test/files/jvm/console.scala b/test/files/jvm/console.scala index 0ac43f2d263d..844f17de4aab 100644 --- a/test/files/jvm/console.scala +++ b/test/files/jvm/console.scala @@ -6,7 +6,7 @@ object Test extends App { print(true) print(1) print(1.0) - flush + flush() println("..") println(1) printf("Argument nr. %d has value %1.2f\n", diff --git a/test/files/jvm/deprecation.check b/test/files/jvm/deprecation.check index a135a2ef4bcb..2997495c7b9a 100644 --- a/test/files/jvm/deprecation.check +++ b/test/files/jvm/deprecation.check @@ -1,2 +1,28 @@ -Note: deprecation/Use_2.java uses or overrides a deprecated API. -Note: Recompile with -Xlint:deprecation for details. +Test_1.scala:7: warning: variable i in class Defs is deprecated + val u = d.i + 1 + ^ +Test_1.scala:8: warning: variable i in class Defs is deprecated + d.i = 2 + ^ +Test_1.scala:9: warning: method bar in class Defs is deprecated + val v = d.bar() + ^ +Test_1.scala:10: warning: class Inner in class Defs is deprecated + val i = new d.Inner + ^ +deprecation/Use_2.java:7: warning: [deprecation] Test.Inner in Test has been deprecated + Test.Inner a = u.new Inner(); + ^ +deprecation/Use_2.java:7: warning: [deprecation] Test.Inner in Test has been deprecated + Test.Inner a = u.new Inner(); + ^ +deprecation/Use_2.java:8: warning: [deprecation] f() in Test.Inner has been deprecated + int i = a.f(); + ^ +deprecation/Use_2.java:9: warning: [deprecation] g() in Test.Inner has been deprecated + int j = a.g(); + ^ +deprecation/Use_2.java:10: warning: [deprecation] g_$eq(int) in Test.Inner has been deprecated + a.g_$eq(5); + ^ +5 warnings diff --git a/test/files/jvm/deprecation/Test_1.scala b/test/files/jvm/deprecation/Test_1.scala index 0a5b6070379a..3288e20a4a5b 100644 --- a/test/files/jvm/deprecation/Test_1.scala +++ b/test/files/jvm/deprecation/Test_1.scala @@ -1,5 +1,8 @@ + +//> using options -Xlint:deprecation + class Test { - def test { + def test: Unit = { val d = new Defs val u = d.i + 1 d.i = 2 @@ -14,4 +17,4 @@ class Test { } } -object Test { def main(args: Array[String]) { } } +object Test { def main(args: Array[String]): Unit = { } } diff --git a/test/files/jvm/deprecation/Use_2.java b/test/files/jvm/deprecation/Use_2.java index 65da8a8fac90..c821f028d4c1 100644 --- a/test/files/jvm/deprecation/Use_2.java +++ b/test/files/jvm/deprecation/Use_2.java @@ -1,3 +1,6 @@ + +//> using javacOpt -Xlint:deprecation + class Use_2 { public int test() { Test u = new Test(); @@ -7,4 +10,4 @@ public int test() { a.g_$eq(5); return i + j; } -} \ No newline at end of file +} diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala index 400763f619b8..558fe83c9f5f 100644 --- a/test/files/jvm/duration-tck.scala +++ b/test/files/jvm/duration-tck.scala @@ -3,10 +3,9 @@ */ import scala.concurrent.duration._ -import scala.reflect._ -import scala.tools.partest.TestUtil.intercept +import scala.tools.testkit.AssertUtil.assertThrows -import scala.language.{ postfixOps } +import scala.language.postfixOps object Test extends App { @@ -135,29 +134,29 @@ object Test extends App { val dur = Duration(x, unit) val mdur = Duration(-x, unit) -mdur mustBe (dur) - intercept[IllegalArgumentException] { Duration(x + 10000000d, unit) } - intercept[IllegalArgumentException] { Duration(-x - 10000000d, unit) } + assertThrows[IllegalArgumentException] { Duration(x + 10000000d, unit) } + assertThrows[IllegalArgumentException] { Duration(-x - 10000000d, unit) } if (unit != NANOSECONDS) { - intercept[IllegalArgumentException] { Duration(x + 1, unit) } - intercept[IllegalArgumentException] { Duration(-x - 1, unit) } + assertThrows[IllegalArgumentException] { Duration(x + 1, unit) } + assertThrows[IllegalArgumentException] { Duration(-x - 1, unit) } } - intercept[IllegalArgumentException] { dur + 1.day } - intercept[IllegalArgumentException] { mdur - 1.day } - intercept[IllegalArgumentException] { dur * 1.1 } - intercept[IllegalArgumentException] { mdur * 1.1 } - intercept[IllegalArgumentException] { dur * 2.1 } - intercept[IllegalArgumentException] { mdur * 2.1 } - intercept[IllegalArgumentException] { dur / 0.9 } - intercept[IllegalArgumentException] { mdur / 0.9 } - intercept[IllegalArgumentException] { dur / 0.4 } - intercept[IllegalArgumentException] { mdur / 0.4 } - Duration(x + unit.toString.toLowerCase) + assertThrows[IllegalArgumentException] { dur + 1.day } + assertThrows[IllegalArgumentException] { mdur - 1.day } + assertThrows[IllegalArgumentException] { dur * 1.1 } + assertThrows[IllegalArgumentException] { mdur * 1.1 } + assertThrows[IllegalArgumentException] { dur * 2.1 } + assertThrows[IllegalArgumentException] { mdur * 2.1 } + assertThrows[IllegalArgumentException] { dur / 0.9 } + assertThrows[IllegalArgumentException] { mdur / 0.9 } + assertThrows[IllegalArgumentException] { dur / 0.4 } + assertThrows[IllegalArgumentException] { mdur / 0.4 } + Duration(x.toString + unit.toString.toLowerCase) Duration("-" + x + unit.toString.toLowerCase) - intercept[IllegalArgumentException] { Duration("%.0f".format(x + 10000000d) + unit.toString.toLowerCase) } - intercept[IllegalArgumentException] { Duration("-%.0f".format(x + 10000000d) + unit.toString.toLowerCase) } + assertThrows[IllegalArgumentException] { Duration("%.0f".format(x + 10000000d) + unit.toString.toLowerCase) } + assertThrows[IllegalArgumentException] { Duration("-%.0f".format(x + 10000000d) + unit.toString.toLowerCase) } } - intercept[IllegalArgumentException] { Duration.fromNanos(1e20) } - intercept[IllegalArgumentException] { Duration.fromNanos(-1e20) } + assertThrows[IllegalArgumentException] { Duration.fromNanos(1e20) } + assertThrows[IllegalArgumentException] { Duration.fromNanos(-1e20) } // test precision @@ -224,4 +223,12 @@ object Test extends App { // scala/bug#10320 Duration("6803536004516701ns").toNanos mustBe 6803536004516701L + + // scala/bug#12180 + Duration("9007199254740992 microseconds").toString mustBe "9007199254740992 microseconds" + Duration("9007199254740993 microseconds").toString mustBe "9007199254740993 microseconds" + Duration("-9007199254740992 microseconds").toString mustBe "-9007199254740992 microseconds" + Duration("-9007199254740993 microseconds").toString mustBe "-9007199254740993 microseconds" + Duration("-7036832630452943 microseconds").toString mustBe "-7036832630452943 microseconds" + Duration("2.134 s") mustBe Duration(2134, MILLISECONDS) } diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check index e69de29bb2d1..175774457643 100644 --- a/test/files/jvm/future-spec.check +++ b/test/files/jvm/future-spec.check @@ -0,0 +1,3 @@ +warning: 5 deprecations (since 2.13.0); re-run with -deprecation for details +FutureTests$$anon$2: do not rethrow +FutureTests$$anon$3: expected diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index c4d985f9422a..7181abd144c7 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -4,13 +4,14 @@ import scala.concurrent.duration.Duration.Inf import scala.collection._ import scala.runtime.NonLocalReturnControl import scala.util.{Try,Success,Failure} +import scala.util.control.NoStackTrace +import java.util.concurrent.ForkJoinPool -@annotation.nowarn("cat=deprecation") class FutureTests extends MinimalScalaTest { /* some utils */ - def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = s match { + def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = (s: @unchecked) match { case "Hello" => Future { "World" } case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance")) case "NoReply" => Promise[String]().future @@ -29,16 +30,16 @@ class FutureTests extends MinimalScalaTest { t } - val defaultTimeout = 5 seconds + val defaultTimeout = Test.DefaultTimeout /* future specification */ "A future with custom ExecutionContext" should { "shouldHandleThrowables" in { - val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable] - implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(), { + val ms = new concurrent.TrieMap[Throwable, Unit] + implicit val ec: ExecutionContextExecutor = ExecutionContext.fromExecutor(new ForkJoinPool(1), { t => - ms += t + ms.addOne((t, ())) }) class ThrowableTest(m: String) extends Throwable(m) @@ -52,36 +53,52 @@ class FutureTests extends MinimalScalaTest { } val latch = new TestLatch + val endLatch = new TestLatch(4) val f2 = Future { Await.ready(latch, 5 seconds) "success" } - val f3 = f2 map { s => s.toUpperCase } - f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") } - f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") } + f2 foreach { _ => endLatch.countDown(); throw new ThrowableTest("dispatcher foreach") } + f2 onComplete { case Success(_) => endLatch.countDown(); throw new ThrowableTest("dispatcher onComplete"); case _ => endLatch.countDown() } latch.open() Await.result(f2, defaultTimeout) mustBe ("success") - f2 foreach { _ => throw new ThrowableTest("current thread foreach") } - f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") } + f2 foreach { _ => endLatch.countDown(); throw new ThrowableTest("current thread foreach") } + f2 onComplete { case Success(_) => endLatch.countDown(); throw new ThrowableTest("current thread onComplete"); case _ => endLatch.countDown() } - Await.result(f3, defaultTimeout) mustBe ("SUCCESS") + Await.result(f2 map { s => s.toUpperCase }, defaultTimeout) mustBe ("SUCCESS") + waitForIt(endLatch.isOpen) - val waiting = Future { - Thread.sleep(1000) - } - Await.ready(waiting, 4000 millis) - - if (ms.size != 4) - assert(ms.size != 4, "Expected 4 throwables, found: " + ms) - //FIXME should check + ms.size mustBe 4 + val msgs = ms.keysIterator.map(_.getMessage).toSet + val expectedMsgs = Set("dispatcher foreach", "dispatcher onComplete", "current thread foreach", "current thread onComplete") + msgs mustBe expectedMsgs } } "Futures" should { + + "not be serializable" in { + + def verifyNonSerializabilityFor(p: Future[_]): Unit = { + import java.io._ + val out = new ObjectOutputStream(new ByteArrayOutputStream()) + intercept[NotSerializableException] { + out.writeObject(p) + } + } + verifyNonSerializabilityFor(Await.ready(Future.unit.map(_ => ())(ExecutionContext.global), defaultTimeout)) + verifyNonSerializabilityFor(Future.unit) + verifyNonSerializabilityFor(Future.failed(new NullPointerException)) + verifyNonSerializabilityFor(Future.successful("test")) + verifyNonSerializabilityFor(Future.fromTry(Success("test"))) + verifyNonSerializabilityFor(Future.fromTry(Failure(new NullPointerException))) + verifyNonSerializabilityFor(Future.never) + } + "have proper toString representations" in { import ExecutionContext.Implicits.global val s = 5 @@ -110,7 +127,6 @@ class FutureTests extends MinimalScalaTest { val s = "foo" val f = Future.successful(s) - ECNotUsed(ec => f.onFailure({ case _ => fail("onFailure should not have been called") })(ec)) assert( ECNotUsed(ec => f.recover({ case _ => fail("recover should not have been called")})(ec)) eq f) assert( ECNotUsed(ec => f.recoverWith({ case _ => fail("flatMap should not have been called")})(ec)) eq f) assert(f.fallbackTo(f) eq f, "Future.fallbackTo must be the same instance as Future.fallbackTo") @@ -126,33 +142,16 @@ class FutureTests extends MinimalScalaTest { assert(f.failed.value == Some(Success(e)), "Future.failed.failed must become successful") // scala/bug#10034 ECNotUsed(ec => f.foreach(_ => fail("foreach should not have been called"))(ec)) - ECNotUsed(ec => f.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec)) assert( ECNotUsed(ec => f.map(_ => fail("map should not have been called"))(ec)) eq f) assert( ECNotUsed(ec => f.flatMap(_ => fail("flatMap should not have been called"))(ec)) eq f) assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f) assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f) assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f) + } } "The Future companion object" should { - "call ExecutionContext.prepare on apply" in { - val p = Promise[Boolean]() - val ec = new ExecutionContext { - val delegate = ExecutionContext.global - override def prepare(): ExecutionContext = { - p.success(true) - delegate.prepare - } - override def execute(r: Runnable) = delegate.execute(r) - override def reportFailure(t: Throwable): Unit = delegate.reportFailure(t) - } - - val f = Future("foo")(ec) - Await.result(f, defaultTimeout) mustBe ("foo") - Await.result(p.future, defaultTimeout) mustBe (true) - } - "have a unit member representing an already completed Future containing Unit" in { assert(Future.unit ne null, "Future.unit must not be null") assert(Future.unit eq Future.unit, "Future.unit must be the same instance as Future.unit") @@ -180,8 +179,6 @@ class FutureTests extends MinimalScalaTest { assert(test.mapTo[String] eq test) ECNotUsed(ec => test.foreach(_ => fail("foreach should not have been called"))(ec)) - ECNotUsed(ec => test.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec)) - ECNotUsed(ec => test.onFailure({ case _ => fail("onFailure should not have been called") })(ec)) ECNotUsed(ec => test.onComplete({ case _ => fail("onComplete should not have been called") })(ec)) ECNotUsed(ec => test.transform(identity, identity)(ec) eq test) ECNotUsed(ec => test.transform(identity)(ec) eq test) @@ -197,6 +194,39 @@ class FutureTests extends MinimalScalaTest { } } + "The parasitic ExecutionContext" should { + "run Runnables on the calling thread" in { + val t = Thread.currentThread + var rt: Thread = null + ExecutionContext.parasitic.execute(() => rt = Thread.currentThread) + t mustBe rt + } + + "not rethrow non-fatal exceptions" in { + ExecutionContext.parasitic.execute(() => throw new RuntimeException("do not rethrow") with NoStackTrace) + } + + "rethrow fatal exceptions" in { + val oome = new OutOfMemoryError("test") + intercept[OutOfMemoryError] { + ExecutionContext.parasitic.execute(() => throw oome) + } mustBe oome + } + + "continue after non-fatal exceptions" in { + var value = "" + ExecutionContext.parasitic.execute(() => throw new RuntimeException("expected") with NoStackTrace) + ExecutionContext.parasitic.execute(() => value = "test") + value mustBe "test" + } + + "not blow the stack" in { + def recur(i: Int): Unit = if (i > 0) ExecutionContext.parasitic.execute(() => recur(i - 1)) else () + + recur(100000) + } + } + "The default ExecutionContext" should { import ExecutionContext.Implicits._ "report uncaught exceptions" in { @@ -226,13 +256,13 @@ class FutureTests extends MinimalScalaTest { a <- future0.mapTo[Int] // returns 5 b <- async(a) // returns "10" c <- async(7) // returns "14" - } yield b + "-" + c + } yield s"$b-$c" val future2 = for { a <- future0.mapTo[Int] b <- (Future { (a * 2).toString }).mapTo[Int] c <- Future { (7 * 2).toString } - } yield b + "-" + c + } yield s"$b-$c" Await.result(future1, defaultTimeout) mustBe ("10-14") assert(checkType(future1, manifest[String])) @@ -251,13 +281,13 @@ class FutureTests extends MinimalScalaTest { Res(a: Int) <- async(Req("Hello")) Res(b: String) <- async(Req(a)) Res(c: String) <- async(Req(7)) - } yield b + "-" + c + } yield s"$b-$c" val future2 = for { Res(a: Int) <- async(Req("Hello")) Res(b: Int) <- async(Req(a)) Res(c: Int) <- async(Req(7)) - } yield b + "-" + c + } yield s"$b-$c" Await.result(future1, defaultTimeout) mustBe ("10-14") intercept[NoSuchElementException] { Await.result(future2, defaultTimeout) } @@ -505,7 +535,7 @@ class FutureTests extends MinimalScalaTest { val result = Future.find[Int](futures)(_ == 3) Await.result(result, defaultTimeout) mustBe (Some(3)) - val notFound = Future.find[Int](futures.iterator)(_ == 11) + val notFound = Future.find[Int](futures)(_ == 11) Await.result(notFound, defaultTimeout) mustBe (None) } @@ -568,13 +598,13 @@ class FutureTests extends MinimalScalaTest { val futures = (0 to 9) map { idx => async(idx, idx * 20) } - val folded = Future.fold(futures)(0)(_ + _) + val folded = Future.foldLeft(futures)(0)(_ + _) Await.result(folded, timeout) mustBe (45) val futuresit = (0 to 9) map { idx => async(idx, idx * 20) } - val foldedit = Future.fold(futures)(0)(_ + _) + val foldedit = Future.foldLeft(futures)(0)(_ + _) Await.result(foldedit, timeout) mustBe (45) } @@ -603,7 +633,7 @@ class FutureTests extends MinimalScalaTest { def futures = (0 to 9) map { idx => async(idx, idx * 10) } - val folded = Future.fold(futures)(0)(_ + _) + val folded = Future.foldLeft(futures)(0)(_ + _) intercept[IllegalArgumentException] { Await.result(folded, timeout) }.getMessage mustBe ("shouldFoldResultsWithException: expected") @@ -611,9 +641,9 @@ class FutureTests extends MinimalScalaTest { "fold mutable zeroes safely" in { import scala.collection.mutable.ArrayBuffer - def test(testNumber: Int) { + def test(testNumber: Int): Unit = { val fs = (0 to 1000) map (i => Future(i)) - val f = Future.fold(fs)(ArrayBuffer.empty[AnyRef]) { + val f = Future.foldLeft(fs)(ArrayBuffer.empty[AnyRef]) { case (l, i) if i % 2 == 0 => l += i.asInstanceOf[AnyRef] case (l, _) => l } @@ -626,7 +656,7 @@ class FutureTests extends MinimalScalaTest { } "return zero value if folding empty list" in { - val zero = Future.fold(List[Future[Int]]())(0)(_ + _) + val zero = Future.foldLeft(List[Future[Int]]())(0)(_ + _) Await.result(zero, defaultTimeout) mustBe (0) } @@ -638,11 +668,11 @@ class FutureTests extends MinimalScalaTest { val timeout = 10000 millis val futures = (0 to 9) map { async } - val reduced = Future.reduce(futures)(_ + _) + val reduced = Future.reduceLeft(futures)(_ + _) Await.result(reduced, timeout) mustBe (45) val futuresit = (0 to 9) map { async } - val reducedit = Future.reduce(futuresit)(_ + _) + val reducedit = Future.reduceLeft(futuresit)(_ + _) Await.result(reducedit, timeout) mustBe (45) } @@ -656,7 +686,7 @@ class FutureTests extends MinimalScalaTest { def futures = (1 to 10) map { idx => async(idx, idx * 10) } - val failed = Future.reduce(futures)(_ + _) + val failed = Future.reduceLeft(futures)(_ + _) intercept[IllegalArgumentException] { Await.result(failed, timeout) }.getMessage mustBe ("shouldFoldResultsWithException: expected") @@ -664,7 +694,7 @@ class FutureTests extends MinimalScalaTest { "shouldReduceThrowNSEEOnEmptyInput" in { intercept[java.util.NoSuchElementException] { - val emptyreduced = Future.reduce(List[Future[Int]]())(_ + _) + val emptyreduced = Future.reduceLeft(List[Future[Int]]())(_ + _) Await.result(emptyreduced, defaultTimeout) } } @@ -820,6 +850,26 @@ class FutureTests extends MinimalScalaTest { Await.ready(f, defaultTimeout).value.get.toString mustBe expected.toString } + "should delegate equivalently to unit.flatMap on failure" in { + val t = new Exception("test") + val df = Future.delegate(throw t) + val fm = Future.unit.flatMap(_ => throw t) + + Await.ready(df, defaultTimeout) + Await.ready(fm, defaultTimeout) + df.value mustBe fm.value + } + + "should delegate equivalently to unit.flatMap on success" in { + val f = Future.successful("test") + val df = Future.delegate(f) + val fm = Future.unit.flatMap(_ => f) + + Await.ready(df, defaultTimeout) + Await.ready(fm, defaultTimeout) + df.value mustBe fm.value + } + } } diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala index fc54582cb318..e569136e891c 100644 --- a/test/files/jvm/future-spec/PromiseTests.scala +++ b/test/files/jvm/future-spec/PromiseTests.scala @@ -1,3 +1,6 @@ + + + import scala.concurrent._ import scala.concurrent.duration._ import scala.concurrent.duration.Duration.Inf @@ -5,7 +8,7 @@ import scala.collection._ import scala.runtime.NonLocalReturnControl import scala.util.{Try,Success,Failure} -@annotation.nowarn("cat=deprecation") + class PromiseTests extends MinimalScalaTest { import ExecutionContext.Implicits._ @@ -101,7 +104,7 @@ class PromiseTests extends MinimalScalaTest { "not be completable with a completed Promise" in { { val p = Promise[String]().failure(new RuntimeException("unbr0ken")) - p.tryCompleteWith(Promise[String].failure(new Exception("br0ken")).future) + p.tryCompleteWith(Promise[String]().failure(new Exception("br0ken")).future) intercept[RuntimeException] { Await.result(p.future, defaultTimeout) }.getMessage mustBe ("unbr0ken") @@ -117,7 +120,7 @@ class PromiseTests extends MinimalScalaTest { } "An interrupted Promise" should { - val message = "Boxed InterruptedException" + val message = "Boxed Exception" val future = Promise[String]().complete(Failure(new InterruptedException(message))).future futureWithException[ExecutionException](_(future, message)) } @@ -128,7 +131,24 @@ class PromiseTests extends MinimalScalaTest { futureWithResult(_(future, result)) } - def futureWithResult(f: ((Future[Any], Any) => Unit) => Unit) { + "A Promise should not be serializable" should { + + def verifyNonSerializabilityFor(p: Promise[_]): Unit = { + import java.io._ + val out = new ObjectOutputStream(new ByteArrayOutputStream()) + intercept[NotSerializableException] { + out.writeObject(p) + }.getMessage mustBe "Promises and Futures cannot be serialized" + } + + verifyNonSerializabilityFor(Promise[Unit]()) + verifyNonSerializabilityFor(Promise.failed(new NullPointerException)) + verifyNonSerializabilityFor(Promise.successful("test")) + verifyNonSerializabilityFor(Promise.fromTry(Success("test"))) + verifyNonSerializabilityFor(Promise.fromTry(Failure(new NullPointerException))) + } + + def futureWithResult(f: ((Future[Any], Any) => Unit) => Unit): Unit = { "be completed" in { f((future, _) => future.isCompleted mustBe (true)) } @@ -184,7 +204,7 @@ class PromiseTests extends MinimalScalaTest { f { (future, result) => val p = Promise[Any]() - future.onSuccess { case x => p.success(x) } + future foreach { x => p.success(x) } Await.result(p.future, defaultTimeout) mustBe (result) } } @@ -201,13 +221,13 @@ class PromiseTests extends MinimalScalaTest { "cast using mapTo" in { f { (future, result) => - Await.result(future.mapTo[Boolean].recover({ case _: ClassCastException ⇒ false }), defaultTimeout) mustBe (false) + Await.result(future.mapTo[Boolean].recover({ case _: ClassCastException => false }), defaultTimeout) mustBe (false) } } } - def futureWithException[E <: Throwable: Manifest](f: ((Future[Any], String) => Unit) => Unit) { + def futureWithException[E <: Throwable: Manifest](f: ((Future[Any], String) => Unit) => Unit): Unit = { "be completed" in { f((future, _) => future.isCompleted mustBe (true)) @@ -268,7 +288,7 @@ class PromiseTests extends MinimalScalaTest { "recover from exception" in { f { (future, message) => - Await.result(future.recover({ case e if e.getMessage == message ⇒ "pigdog" }), defaultTimeout) mustBe ("pigdog") + Await.result(future.recover({ case e if e.getMessage == message => "pigdog" }), defaultTimeout) mustBe ("pigdog") } } @@ -280,7 +300,7 @@ class PromiseTests extends MinimalScalaTest { f { (future, message) => val p = Promise[Any]() - future.onFailure { case _ => p.success(message) } + future.onComplete { case Failure(_) => p.success(message); case _ => } Await.result(p.future, defaultTimeout) mustBe (message) } } diff --git a/test/files/jvm/future-spec/TryTests.scala b/test/files/jvm/future-spec/TryTests.scala deleted file mode 100644 index ca968aa8d9e0..000000000000 --- a/test/files/jvm/future-spec/TryTests.scala +++ /dev/null @@ -1,131 +0,0 @@ -// This is a port of the com.twitter.util Try spec. -// -- -// It lives in the future-spec directory simply because it requires a specs-like -// DSL which has already been minimally implemented for the future spec tests. - -import scala.util.{Try,Success,Failure} - -@annotation.nowarn("cat=deprecation") -class TryTests extends MinimalScalaTest { - class MyException extends Exception - val e = new Exception("this is an exception") - - "Try()" should { - "catch exceptions and lift into the Try type" in { - Try[Int](1) mustEqual Success(1) - Try[Int] { throw e } mustEqual Failure(e) - } - } - - "Try" should { - "recoverWith" in { - val myException = new MyException - Success(1) recoverWith { case _ => Success(2) } mustEqual Success(1) - Failure(e) recoverWith { case _ => Success(2) } mustEqual Success(2) - Failure(e) recoverWith { case _ => Failure(e) } mustEqual Failure(e) - } - - "getOrElse" in { - Success(1) getOrElse 2 mustEqual 1 - Failure(e) getOrElse 2 mustEqual 2 - } - - "orElse" in { - Success(1) orElse Success(2) mustEqual Success(1) - Failure(e) orElse Success(2) mustEqual Success(2) - } - - "map" in { - "when there is no exception" in { - Success(1) map(1+) mustEqual Success(2) - Failure[Int](e) map(1+) mustEqual Failure(e) - } - - "when there is an exception" in { - Success(1) map(_ => throw e) mustEqual Failure(e) - - val e2 = new Exception - Failure[Int](e) map(_ => throw e2) mustEqual Failure(e) - } - "when there is a fatal exception" in { - val e3 = new ThreadDeath - intercept[ThreadDeath] { - Success(1) map (_ => throw e3) - } - } - } - - "flatMap" in { - "when there is no exception" in { - Success(1) flatMap(x => Success(1 + x)) mustEqual Success(2) - Failure[Int](e) flatMap(x => Success(1 + x)) mustEqual Failure(e) - } - - "when there is an exception" in { - Success(1).flatMap[Int](_ => throw e) mustEqual Failure(e) - - val e2 = new Exception - Failure[Int](e).flatMap[Int](_ => throw e2) mustEqual Failure(e) - } - "when there is a fatal exception" in { - val e3 = new ThreadDeath - intercept[ThreadDeath] { - Success(1).flatMap[Int](_ => throw e3) - } - } - } - - "flatten" in { - "is a Success(Success)" in { - Success(Success(1)).flatten mustEqual Success(1) - } - - "is a Success(Failure)" in { - val e = new Exception - Success(Failure(e)).flatten mustEqual Failure(e) - } - - "is a Throw" in { - val e = new Exception - Failure[Try[Int]](e).flatten mustEqual Failure(e) - } - } - - "for" in { - "with no Failure values" in { - val result = for { - i <- Success(1) - j <- Success(1) - } yield (i + j) - result mustEqual Success(2) - } - - "with Failure values" in { - "throws before" in { - val result = for { - i <- Failure[Int](e) - j <- Success(1) - } yield (i + j) - result mustEqual Failure(e) - } - - "throws after" in { - val result = for { - i <- Success(1) - j <- Failure[Int](e) - } yield (i + j) - result mustEqual Failure(e) - } - - "returns the FIRST Failure" in { - val e2 = new Exception - val result = for { - i <- Failure[Int](e) - j <- Failure[Int](e2) - } yield (i + j) - result mustEqual Failure(e) - } - } - } - } -} diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala index e36bc2994f00..dda7cfbb4e47 100644 --- a/test/files/jvm/future-spec/main.scala +++ b/test/files/jvm/future-spec/main.scala @@ -1,25 +1,26 @@ -// scalac: -deprecation import scala.collection._ import scala.concurrent._ import scala.concurrent.duration.Duration -import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit } - +import java.util.concurrent.{TimeoutException, CountDownLatch, TimeUnit} object Test { - def main(args: Array[String]) { - (new FutureTests).check() - (new PromiseTests).check() - (new TryTests).check() - } + val DefaultTimeout = Duration(5, TimeUnit.SECONDS) + + def main(args: Array[String]): Unit = + List( + (new FutureTests), + (new PromiseTests), + ).foreach(_.check()) } trait Features { - implicit def implicitously = scala.language.implicitConversions - implicit def reflectively = scala.language.reflectiveCalls - implicit def postulously = scala.language.postfixOps + import languageFeature._ + implicit def implicitously: implicitConversions = scala.language.implicitConversions + implicit def reflectively: reflectiveCalls = scala.language.reflectiveCalls + implicit def postulously: postfixOps = scala.language.postfixOps } @@ -32,22 +33,24 @@ trait Output { } -trait MinimalScalaTest extends Output with Features { +trait MinimalScalaTest extends Output with Features with Vigil { val throwables = mutable.ArrayBuffer[Throwable]() - def check() { + def check(): Unit = { if (throwables.nonEmpty) println(buffer.toString) } - implicit def stringops(s: String) = new { + type Ops = AnyRef{def should[U](snippets: => U): U; def in[U](snippet: => U): scala.collection.mutable.IndexedSeq[_ >: Char with Throwable] with scala.collection.mutable.AbstractSeq[_ >: Char with Throwable] with scala.collection.mutable.Growable[Char with Throwable] with java.io.Serializable} - def should[U](snippets: =>U) = { + implicit def stringops(s: String): Ops = new { + + def should[U](snippets: => U) = { bufferPrintln(s + " should:") snippets } - def in[U](snippet: =>U) = { + def in[U](snippet: => U) = { try { bufferPrintln("- " + s) snippet @@ -62,14 +65,15 @@ trait MinimalScalaTest extends Output with Features { } - implicit def objectops(obj: Any) = new { + type OOps = AnyRef{def mustBe(other: Any): Unit; def mustEqual(other: Any): Unit} + implicit def objectops(obj: Any): OOps = new { - def mustBe(other: Any) = assert(obj == other, obj + " is not " + other) + def mustBe(other: Any) = assert(obj == other, s"$obj is not $other") def mustEqual(other: Any) = mustBe(other) } - def intercept[T <: Throwable: Manifest](body: =>Any): T = { + def intercept[T <: Throwable: Manifest](body: => Any): T = { try { body throw new Exception("Exception of type %s was not thrown".format(manifest[T])) @@ -83,9 +87,35 @@ trait MinimalScalaTest extends Output with Features { def checkType[T: Manifest, S](in: Future[T], refmanifest: Manifest[S]): Boolean = manifest[T] == refmanifest } +trait Vigil { + def waitForIt(terminated: => Boolean): Unit = { + val limit = 5 + var n = 1 + var (dormancy, factor) = (250L, 4) + var period = 0L + var done = false + var ended = false + while (!done && n < limit) { + try { + ended = terminated + if (ended) { + done = true + } else { + Thread.sleep(dormancy) + period += dormancy + } + } catch { + case _: InterruptedException => done = true + } + n += 1 + dormancy *= factor + } + assert(ended, s"Expired after dormancy period $period waiting for termination condition") + } +} object TestLatch { - val DefaultTimeout = Duration(5, TimeUnit.SECONDS) + val DefaultTimeout = Test.DefaultTimeout def apply(count: Int = 1) = new TestLatch(count) } @@ -112,4 +142,3 @@ class TestLatch(count: Int = 1) extends Awaitable[Unit] { } } - diff --git a/test/files/jvm/getGenericSuperclass.scala b/test/files/jvm/getGenericSuperclass.scala index d8c5461c1aa8..b83f767199de 100644 --- a/test/files/jvm/getGenericSuperclass.scala +++ b/test/files/jvm/getGenericSuperclass.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Nil.getClass.getGenericSuperclass) println(None.getClass.getGenericSuperclass) } diff --git a/test/files/jvm/iinc.check b/test/files/jvm/iinc.check new file mode 100644 index 000000000000..3538a07f8587 --- /dev/null +++ b/test/files/jvm/iinc.check @@ -0,0 +1,18 @@ +def increment + iinc 1 + iinc 54 + iinc 127 + iinc -1 + iinc -54 + iinc -128 +end increment +def wideIncrement + iinc 128 + iinc 8765 + iinc 32767 + iinc -129 + iinc -8765 + iinc -32768 +end wideIncrement +def tooBigForIinc +end tooBigForIinc diff --git a/test/files/jvm/iinc/Increment_1.scala b/test/files/jvm/iinc/Increment_1.scala new file mode 100644 index 000000000000..03251016bfb3 --- /dev/null +++ b/test/files/jvm/iinc/Increment_1.scala @@ -0,0 +1,37 @@ +class Increment { + + // `iinc` + def increment(x: Int): Int = { + var i = x + i += 1 + i += 54 + i += 127 + i -= 1 + i -= 54 + i -= 128 + i + } + + // `wide iinc` + def wideIncrement(x: Int): Int = { + var i = x + i += 128 + i += 8765 + i += 32767 + i -= 129 + i -= 8765 + i -= 32768 + i + } + + def tooBigForIinc(x: Int): Int = { + var i = x + i += 32768 + i += 56789 + i += 2147483647 + i -= 32769 + i -= 56789 + i -= 2147483647 + i + } +} diff --git a/test/files/jvm/iinc/test.scala b/test/files/jvm/iinc/test.scala new file mode 100644 index 000000000000..4743fb1000af --- /dev/null +++ b/test/files/jvm/iinc/test.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.BytecodeTest + +import scala.tools.asm.tree.IincInsnNode + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Increment") + for (name <- List("increment", "wideIncrement", "tooBigForIinc")) { + println(s"def $name") + getMethod(classNode, name).instructions.toArray().collect { + case insn: IincInsnNode => println(s" iinc ${insn.incr}") + } + println(s"end $name") + } + } +} + diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala index dc01b124c576..8f68f3489165 100644 --- a/test/files/jvm/inner.scala +++ b/test/files/jvm/inner.scala @@ -66,7 +66,7 @@ object Scalatest { private val javacmd = javabin + File.separator + "java" private val javac = javabin + File.separator + "javac" - def javac(src: String, fname: String) { + def javac(src: String, fname: String): Unit = { val tmpfilename = outputdir + File.separator + fname val tmpfile = new FileWriter(tmpfilename) tmpfile.write(src) @@ -78,7 +78,7 @@ object Scalatest { exec(javacmd, "-cp", classpath, cname) /** Execute cmd, wait for the process to end and pipe its output to stdout */ - private def exec(args: String*) { + private def exec(args: String*): Unit = { val proc = Runtime.getRuntime().exec(args.toArray) val inp = new BufferedReader(new InputStreamReader(proc.getInputStream)) val errp = new BufferedReader(new InputStreamReader(proc.getErrorStream)) @@ -89,7 +89,7 @@ object Scalatest { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val javaInteraction = """ public class JavaInteraction { public static void main(String[] args) { diff --git a/test/files/jvm/innerClassAttribute.check b/test/files/jvm/innerClassAttribute.check new file mode 100644 index 000000000000..d43971dce825 --- /dev/null +++ b/test/files/jvm/innerClassAttribute.check @@ -0,0 +1,12 @@ +Classes_1.scala:117: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + ((x: String) => x + "3") + ^ +Classes_1.scala:124: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + ((x: String) => x + "2") + ^ +Classes_1.scala:129: warning: a pure expression does nothing in statement position + (s: String) => { + ^ +Classes_1.scala:130: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + {(s: String) => ()} + ^ diff --git a/test/files/jvm/innerClassAttribute/Classes_1.scala b/test/files/jvm/innerClassAttribute/Classes_1.scala index 5d869dacd250..a18bb9dbfe16 100644 --- a/test/files/jvm/innerClassAttribute/Classes_1.scala +++ b/test/files/jvm/innerClassAttribute/Classes_1.scala @@ -339,4 +339,4 @@ object LocalAndAnonymousInLazyInitializer { AA } } -} \ No newline at end of file +} diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala index 6814f326024d..f22ee8fa798e 100644 --- a/test/files/jvm/innerClassAttribute/Test.scala +++ b/test/files/jvm/innerClassAttribute/Test.scala @@ -276,7 +276,7 @@ object Test extends BytecodeTest { assertMember(d, "A22$C$", "D", name = Some("A22$C$D")) } - def testA23() { + def testA23(): Unit = { val List(c, d, e, f, g) = innerClassNodes("A23") assertMember(c, "Java_A_1", "C", flags = publicStatic) assertMember(d, "Java_A_1$C", "D", flags = publicStatic) @@ -285,7 +285,7 @@ object Test extends BytecodeTest { assertMember(g, "Java_A_1$F", "G") } - def testA24() { + def testA24(): Unit = { val List(defsCls, abs, conc, defsApi) = innerClassNodes("A24$DefinitionsClass") assertMember(defsCls, "A24", "DefinitionsClass") assertMember(abs, "A24$DefinitionsClass", "Abs$") @@ -320,7 +320,7 @@ object Test extends BytecodeTest { assert(innerClassNodes("SI_9105").length == 13) // the 12 local classes, plus MethodHandles$Lookup } - def testSI_9124() { + def testSI_9124(): Unit = { val classes: Map[String, String] = { List("SI_9124$$anon$10", "SI_9124$$anon$12", @@ -352,7 +352,7 @@ object Test extends BytecodeTest { // Note: the new trait encoding removed impl classes, so this test name doesn't make sense. // But I've left it here as there were some tests remaining that are still relevant. - def testImplClassesTopLevel() { + def testImplClassesTopLevel(): Unit = { val classes = List( "ImplClassesAreTopLevel$$anon$14", "ImplClassesAreTopLevel$$anon$15", @@ -392,7 +392,7 @@ object Test extends BytecodeTest { testInner("ImplClassesAreTopLevel", an14, an15, an16, b1, b2, b3, b4) } - def testSpecializedClassesTopLevel() { + def testSpecializedClassesTopLevel(): Unit = { val cls = List( "SpecializedClassesAreTopLevel$A$mcI$sp", "SpecializedClassesAreTopLevel$A", @@ -414,12 +414,12 @@ object Test extends BytecodeTest { List("SpecializedClassesAreTopLevel$T$", "SpecializedClassesAreTopLevel$T$B$mcI$sp", "SpecializedClassesAreTopLevel$T$B").foreach(testInner(_, t, b)) } - def testAnonymousClassesMayBeNestedInSpecialized() { + def testAnonymousClassesMayBeNestedInSpecialized(): Unit = { assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$$anon$17", "AnonymousClassesMayBeNestedInSpecialized$C", "foo", "(Ljava/lang/Object;)LAnonymousClassesMayBeNestedInSpecialized$A;") assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$18", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;") } - def testNestedInValueClass() { + def testNestedInValueClass(): Unit = { List( "NestedInValueClass", "NestedInValueClass$", diff --git a/test/files/jvm/innerClassEnclMethodJavaReflection.scala b/test/files/jvm/innerClassEnclMethodJavaReflection.scala index 6e4dbe0b58e6..644d9362d834 100644 --- a/test/files/jvm/innerClassEnclMethodJavaReflection.scala +++ b/test/files/jvm/innerClassEnclMethodJavaReflection.scala @@ -5,11 +5,11 @@ object Test extends App { val jarsOrDirectories = Set("partest.lib", "partest.reflect", "partest.comp") map sys.props object AllowedMissingClass { - // Some classes in scala-compiler.jar have references to jline / ant classes, which seem to be + // Some classes in scala-compiler.jar have references to jline classes, which seem to be // not on the classpath. We just skip over those classes. // PENDING: for now we also allow missing $anonfun classes: the optimizer may eliminate some closures // that are referred to in EnclosingClass attributes. scala/bug#9136 - val allowedMissingPackages = Set("jline", "org.apache.tools.ant", "$anonfun") + val allowedMissingPackages = Set("jline", "$anonfun") def ok(t: Throwable) = { allowedMissingPackages.exists(p => t.getMessage.replace('/', '.').contains(p)) @@ -34,7 +34,7 @@ object Test extends App { val classFullNames = flatten(classPath, "").filter(_._1.hasExtension("class")).map(_._2.replace("/", ".").replaceAll(".class$", "")) // it seems that Class objects can only be GC'd together with their class loader - // (http://stackoverflow.com/questions/2433261/when-and-how-are-classes-garbage-collected-in-java) + // (https://stackoverflow.com/questions/2433261/when-and-how-are-classes-garbage-collected-in-java) // if we just use the same class loader for the entire test (Class.forName), we run out of PermGen // even with that, we still neeed a PermGen of 90M or so, the default 64 is not enough. I tried // using one class loader per 100 classes, but that didn't help, the classes didn't get GC'd. diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 178fa24e5723..a85c3530b48f 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -2,47 +2,46 @@ scala> // basics scala> 3+4 -res0: Int = 7 +val res0: Int = 7 scala> def gcd(x: Int, y: Int): Int = { if (x == 0) y else if (y == 0) x else gcd(y%x, x) } -gcd: (x: Int, y: Int)Int +def gcd(x: Int, y: Int): Int scala> val five = gcd(15,35) -five: Int = 5 +val five: Int = 5 scala> var x = 1 -x: Int = 1 +var x: Int = 1 scala> x = 2 -x: Int = 2 +// mutated x scala> val three = x+1 -three: Int = 3 +val three: Int = 3 scala> type anotherint = Int -defined type alias anotherint +type anotherint scala> val four: anotherint = 4 -four: anotherint = 4 +val four: anotherint = 4 scala> val bogus: anotherint = "hello" -:12: error: type mismatch; - found : String("hello") - required: anotherint - (which expands to) Int - val bogus: anotherint = "hello" ^ + error: type mismatch; + found : String("hello") + required: anotherint + (which expands to) Int scala> trait PointlessTrait -defined trait PointlessTrait +trait PointlessTrait scala> val (x,y) = (2,3) -x: Int = 2 -y: Int = 3 +val x: Int = 2 +val y: Int = 3 scala> println("hello") hello @@ -52,52 +51,49 @@ scala> scala> // ticket #1513 scala> val t1513 = Array(null) -t1513: Array[Null] = Array(null) - -scala> // ambiguous toString problem from #547 - -scala> val atom = new scala.xml.Atom(()) -atom: scala.xml.Atom[Unit] = () +val t1513: Array[Null] = Array(null) scala> // overriding toString problem from #1404 scala> class S(override val toString : String) -defined class S +class S scala> val fish = new S("fish") -fish: S = fish +val fish: S = fish scala> // Test that arrays pretty print nicely. scala> val arr = Array("What's", "up", "doc?") -arr: Array[String] = Array(What's, up, doc?) +val arr: Array[String] = Array(What's, up, doc?) scala> // Test that arrays pretty print nicely, even when we give them type Any scala> val arrInt : Any = Array(1,2,3) -arrInt: Any = Array(1, 2, 3) +val arrInt: Any = Array(1, 2, 3) scala> // Test that nested arrays are pretty-printed correctly scala> val arrArrInt : Any = Array(Array(1, 2), Array(3, 4)) -arrArrInt: Any = Array(Array(1, 2), Array(3, 4)) +val arrArrInt: Any = Array(Array(1, 2), Array(3, 4)) scala> scala> // implicit conversions scala> case class Foo(n: Int) -defined class Foo +class Foo scala> case class Bar(n: Int) -defined class Bar +class Bar scala> implicit def foo2bar(foo: Foo) = Bar(foo.n) -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -foo2bar: (foo: Foo)Bar + ^ + warning: Implicit definition should have explicit type (inferred Bar) [quickfixable] +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +def foo2bar(foo: Foo): Bar scala> val bar: Bar = Foo(3) -bar: Bar = Bar(3) +val bar: Bar = Bar(3) scala> @@ -107,140 +103,140 @@ scala> import bar._ import bar._ scala> val m = n -m: Int = 3 +val m: Int = 3 scala> scala> // stressing the imports mechanism scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> val one = 1 -one: Int = 1 +val one: Int = 1 scala> scala> scala> val x1 = 1 -x1: Int = 1 +val x1: Int = 1 scala> val x2 = 1 -x2: Int = 1 +val x2: Int = 1 scala> val x3 = 1 -x3: Int = 1 +val x3: Int = 1 scala> val x4 = 1 -x4: Int = 1 +val x4: Int = 1 scala> val x5 = 1 -x5: Int = 1 +val x5: Int = 1 scala> val x6 = 1 -x6: Int = 1 +val x6: Int = 1 scala> val x7 = 1 -x7: Int = 1 +val x7: Int = 1 scala> val x8 = 1 -x8: Int = 1 +val x8: Int = 1 scala> val x9 = 1 -x9: Int = 1 +val x9: Int = 1 scala> val x10 = 1 -x10: Int = 1 +val x10: Int = 1 scala> val x11 = 1 -x11: Int = 1 +val x11: Int = 1 scala> val x12 = 1 -x12: Int = 1 +val x12: Int = 1 scala> val x13 = 1 -x13: Int = 1 +val x13: Int = 1 scala> val x14 = 1 -x14: Int = 1 +val x14: Int = 1 scala> val x15 = 1 -x15: Int = 1 +val x15: Int = 1 scala> val x16 = 1 -x16: Int = 1 +val x16: Int = 1 scala> val x17 = 1 -x17: Int = 1 +val x17: Int = 1 scala> val x18 = 1 -x18: Int = 1 +val x18: Int = 1 scala> val x19 = 1 -x19: Int = 1 +val x19: Int = 1 scala> val x20 = 1 -x20: Int = 1 +val x20: Int = 1 scala> scala> val two = one + x5 -two: Int = 2 +val two: Int = 2 scala> @@ -249,26 +245,26 @@ scala> // handling generic wildcard arrays (#2386) scala> // It's put here because type feedback is an important part of it. scala> val xs: Array[_] = Array(1, 2) -xs: Array[_] = Array(1, 2) +val xs: Array[_] = Array(1, 2) scala> xs.size -res2: Int = 2 +val res2: Int = 2 scala> xs.head -res3: Any = 1 +val res3: Any = 1 scala> xs filter (_ == 2) -res4: Array[_] = Array(2) +val res4: Array[_] = Array(2) scala> xs map (_ => "abc") -res5: Array[String] = Array(abc, abc) +val res5: Array[String] = Array(abc, abc) scala> xs map (x => x) -res6: Array[_] = Array(1, 2) +val res6: Array[_] = Array(1, 2) scala> xs map (x => (x, x)) -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2)) +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +val res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2)) scala> @@ -277,32 +273,17 @@ scala> // interior syntax errors should *not* go into multi-line input mode. scala> // both of the following should abort immediately: scala> def x => y => z -:1: error: '=' expected but '=>' found. - def x => y => z ^ + error: '=' expected but '=>' found. scala> [1,2,3] -:1: error: illegal start of definition - [1,2,3] ^ + error: illegal start of definition scala> scala> -scala> // multi-line XML - -scala> - -res8: scala.xml.Elem = - - - -scala> - scala> scala> /* @@ -311,8 +292,9 @@ scala> /* */ */ +scala> -You typed two blank lines. Starting a new command. +scala> scala> // multi-line string @@ -320,14 +302,12 @@ scala> """ hello there """ -res12: String = +val res8: String = " hello there " -scala> - scala> (1 + // give up early by typing two blank lines @@ -336,33 +316,31 @@ You typed two blank lines. Starting a new command. scala> // defining and using quoted names should work (ticket #323) scala> def `match` = 1 -match: Int +def match: Int scala> val x = `match` -x: Int = 1 +val x: Int = 1 scala> scala> // multiple classes defined on one line scala> sealed class Exp; class Fact extends Exp; class Term extends Exp -defined class Exp -defined class Fact -defined class Term +class Exp +class Fact +class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:18: warning: match may not be exhaustive. -It would fail on the following inputs: Exp(), Term() - def f(e: Exp) = e match { // non-exhaustive warning here ^ -f: (e: Exp)Int + warning: match may not be exhaustive. + It would fail on the following inputs: Exp(), Term() +def f(e: Exp): Int scala> :quit -plusOne: (x: Int)Int -res0: Int = 6 -res0: String = after reset -:12: error: not found: value plusOne - plusOne(5) // should be undefined now +def plusOne(x: Int): Int +val res0: Int = 6 +val res0: String = after reset ^ + error: not found: value plusOne diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala index 6e02dca8f06d..1354d8c6bffa 100644 --- a/test/files/jvm/interpreter.scala +++ b/test/files/jvm/interpreter.scala @@ -1,16 +1,17 @@ import scala.tools.nsc._ +import scala.tools.nsc.interpreter.shell.ReplReporterImpl import scala.tools.partest.ReplTest object Test extends ReplTest { override def extraSettings = "-deprecation" - def code = + def code = """ // basics 3+4 -def gcd(x: Int, y: Int): Int = {{ +def gcd(x: Int, y: Int): Int = { if (x == 0) y else if (y == 0) x else gcd(y%x, x) -}} +} val five = gcd(15,35) var x = 1 x = 2 @@ -24,8 +25,6 @@ println("hello") // ticket #1513 val t1513 = Array(null) -// ambiguous toString problem from #547 -val atom = new scala.xml.Atom(()) // overriding toString problem from #1404 class S(override val toString : String) val fish = new S("fish") @@ -108,13 +107,6 @@ def x => y => z [1,2,3] -// multi-line XML -<a> -<b - c="c" - d="dd" -/></a> - /* /* @@ -124,11 +116,10 @@ def x => y => z // multi-line string -""" +"""+ "\"\"\""+ """ hello there -""" - +"""+ "\"\"\""+ """ (1 + // give up early by typing two blank lines @@ -138,16 +129,16 @@ val x = `match` // multiple classes defined on one line sealed class Exp; class Fact extends Exp; class Term extends Exp -def f(e: Exp) = e match {{ // non-exhaustive warning here +def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 -}} +} -.text +""" def appendix() = { val settings = new Settings settings.classpath.value = sys.props("java.class.path") - val interp = new interpreter.IMain(settings) + val interp = new interpreter.IMain(settings, new ReplReporterImpl(settings)) interp.interpret("def plusOne(x: Int) = x + 1") interp.interpret("plusOne(5)") interp.reset() diff --git a/test/files/jvm/javaReflection.check b/test/files/jvm/javaReflection.check index 9835b950c967..341531409c4d 100644 --- a/test/files/jvm/javaReflection.check +++ b/test/files/jvm/javaReflection.check @@ -1,4 +1,22 @@ #partest java8 +Classes_1.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ +Classes_1.scala:16: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "-1") + ^ +Classes_1.scala:31: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + (() => "2") + ^ +Classes_1.scala:66: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + (() => () => "5") + ^ +Classes_1.scala:75: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ +Classes_1.scala:83: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ A / A (canon) / A (simple) - declared cls: List(class A$B, interface A$C, class A$D$) - enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) @@ -168,6 +186,24 @@ T / T (canon) / T (simple) - enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) - properties : false (local) / false (member) #partest !java8 +Classes_1.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ +Classes_1.scala:16: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "-1") + ^ +Classes_1.scala:31: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + (() => "2") + ^ +Classes_1.scala:66: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + (() => () => "5") + ^ +Classes_1.scala:75: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ +Classes_1.scala:83: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + (() => "1") + ^ A / A (canon) / A (simple) - declared cls: List(class A$B, interface A$C, class A$D$) - enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) diff --git a/test/files/jvm/javaReflection/Classes_1.scala b/test/files/jvm/javaReflection/Classes_1.scala index e9cd4f756ae9..ce1149d6524d 100644 --- a/test/files/jvm/javaReflection/Classes_1.scala +++ b/test/files/jvm/javaReflection/Classes_1.scala @@ -57,7 +57,7 @@ class A { (() => "5") } - def this(x: Int) { + def this(x: Int) = { this() class Q trait R diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala index 3e2965a5efad..4fa8568cfeb0 100644 --- a/test/files/jvm/javaReflection/Test.scala +++ b/test/files/jvm/javaReflection/Test.scala @@ -7,7 +7,7 @@ getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic therefore give surprising answers or may even throw an exception. In particular, the method "getSimpleName" assumes that classes are named after the Java spec - http://docs.oracle.com/javase/specs/jls/se8/html/jls-13.html#jls-13.1 + https://docs.oracle.com/javase/specs/jls/se8/html/jls-13.html#jls-13.1 Consider the following Scala example: class A { object B { class C } } @@ -50,22 +50,21 @@ getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic will change some day). */ -import scala.tools.nsc.settings.ScalaVersion -import scala.util.Properties.javaSpecVersion - object Test { - def assert8(b: => Boolean, msg: => Any) = { - if (ScalaVersion(javaSpecVersion) == ScalaVersion("1.8")) assert(b, msg) - else if (!b) println(s"assert not $msg") - } - def tr[T](m: => T): String = try { val r = m if (r == null) "null" else r.toString } catch { case e: InternalError => e.getMessage } + /** Assert on Java 8, but on later versions, just print if assert would fail. */ + def assert8(b: => Boolean, msg: => Any) = + if (!scala.util.Properties.isJavaAtLeast(9)) + assert(b, msg) + else if (!b) + println(s"assert not $msg") + def assertNotAnonymous(c: Class[_]) = assert8(!isAnonymous(c), s"$c is anonymous") def isAnonymous(c: Class[_]) = try { diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check index 73b7bcb86af3..7521857a0169 100644 --- a/test/files/jvm/manifests-new.check +++ b/test/files/jvm/manifests-new.check @@ -1,15 +1,16 @@ +warning: 7 deprecations (since 2.13.0); re-run with -deprecation for details x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean x=a, t=TypeTag[Char], k=TypeRef, s=class Char x=1, t=TypeTag[Int], k=TypeRef, s=class Int x=abc, t=TypeTag[String], k=TypeRef, s=class String -x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol +x=Symbol(abc), t=TypeTag[Symbol], k=TypeRef, s=class Symbol x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List x=List(abc), t=TypeTag[List[String]], k=TypeRef, s=class List -x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List +x=List(Symbol(abc)), t=TypeTag[List[Symbol]], k=TypeRef, s=class List x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array @@ -21,10 +22,10 @@ x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2 x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2 x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2 x=(abc,xyz), t=TypeTag[(String, String)], k=TypeRef, s=class Tuple2 -x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2 +x=(Symbol(abc),Symbol(xyz)), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2 x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test -x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List +x=scala.collection.immutable.List$, t=TypeTag[collection.immutable.List.type], k=SingleType, s=object List x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo @@ -38,11 +39,11 @@ x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s= using javaOpt -Dneeds.to.fork + object Test { //println("java.library.path=" + System.getProperty("java.library.path")) @@ -20,7 +22,7 @@ object Test { @native def sayHello(s: String): String = null - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val s = sayHello("Scala is great!") println("Invocation returned \"" + s + "\"") } diff --git a/test/files/jvm/nil-conversion/Foo_1.scala b/test/files/jvm/nil-conversion/Foo_1.scala new file mode 100644 index 000000000000..1fcf37c0393d --- /dev/null +++ b/test/files/jvm/nil-conversion/Foo_1.scala @@ -0,0 +1,13 @@ +//> using options -opt:none +class Foo_1 { + def foo: List[Int] = List() + + def bar: List[Int] = collection.immutable.List() + + def baz: List[Int] = Foo_1.MyList() + + def boo: List[Int] = List.empty +} +object Foo_1 { + val MyList = collection.immutable.List +} diff --git a/test/files/jvm/nil-conversion/Test.scala b/test/files/jvm/nil-conversion/Test.scala new file mode 100644 index 000000000000..78013c21b437 --- /dev/null +++ b/test/files/jvm/nil-conversion/Test.scala @@ -0,0 +1,39 @@ +//> using options -opt:none +import scala.tools.partest.BytecodeTest + +import scala.tools.asm +import asm.Opcodes._ +import asm.tree.{InsnList, AbstractInsnNode, FieldInsnNode, InsnNode} +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + + val module = "MODULE$" // nme.MODULE_INSTANCE_FIELD.decoded + + def checkModuleLoad(what: String, x: AbstractInsnNode): Unit = + x match { + case f: FieldInsnNode => + assert(f.name == module) + assert(f.owner == what) + assert(f.desc == s"L$what;") + assert(f.getOpcode == GETSTATIC) + } + def checkReturn(x: AbstractInsnNode): Unit = + x match { + case i: InsnNode => assert(i.getOpcode == ARETURN) + } + + def show: Unit = { + val classNode = loadClassNode("Foo_1") + verifyNilConversion(getMethod(classNode, "foo").instructions) + verifyNilConversion(getMethod(classNode, "bar").instructions) + //verifyNilConversion(getMethod(classNode, "baz").instructions) // requires extraordinary dispensation + } + + def verifyNilConversion(insnList: InsnList): Unit = { + val all = insnList.iterator.asScala + checkModuleLoad("scala/collection/immutable/Nil$", all.next()) + checkReturn(all.next()) + assert(!all.hasNext) + } +} diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala index 1ff7ee516eec..bc4106c91f64 100644 --- a/test/files/jvm/non-fatal-tests.scala +++ b/test/files/jvm/non-fatal-tests.scala @@ -16,7 +16,7 @@ trait NonFatalTests { new OutOfMemoryError, new LinkageError, new VirtualMachineError {}, - new Throwable with scala.util.control.ControlThrowable) + new scala.util.control.ControlThrowable {}) def testFatalsUsingApply(): Unit = { fatals foreach { t => assert(NonFatal(t) == false) } @@ -42,6 +42,4 @@ trait NonFatalTests { object Test extends App -with NonFatalTests { - System.exit(0) -} \ No newline at end of file +with NonFatalTests diff --git a/test/files/jvm/outerEnum/Test_2.scala b/test/files/jvm/outerEnum/Test_2.scala index 3649813a3ecc..7a8736f4c217 100644 --- a/test/files/jvm/outerEnum/Test_2.scala +++ b/test/files/jvm/outerEnum/Test_2.scala @@ -1,7 +1,7 @@ import enums._ object Test extends App { - def foo { + def foo: Unit = { val res: OuterEnum_1.Foo = OuterEnum_1.Foo.Bar println(res) } diff --git a/test/files/jvm/protectedacc.scala b/test/files/jvm/protectedacc.scala index 43d218fa89fd..51a50bed421c 100644 --- a/test/files/jvm/protectedacc.scala +++ b/test/files/jvm/protectedacc.scala @@ -47,7 +47,7 @@ package p { abstract class PolyA[a] { protected def m(x: a): Unit; - class B { + class BB { trait Node { def s: String = ""; @@ -134,7 +134,7 @@ package p { abstract class X[T] extends PolyA[T] { - trait Inner extends B { + trait Inner extends BB { def self: T; def self2: Node; def getB: Inner; diff --git a/test/files/jvm/scala-concurrent-tck-b.check b/test/files/jvm/scala-concurrent-tck-b.check new file mode 100644 index 000000000000..9c0aad3aa893 --- /dev/null +++ b/test/files/jvm/scala-concurrent-tck-b.check @@ -0,0 +1,2 @@ +starting testUncaughtExceptionReporting +finished testUncaughtExceptionReporting diff --git a/test/files/jvm/scala-concurrent-tck-b.scala b/test/files/jvm/scala-concurrent-tck-b.scala new file mode 100644 index 000000000000..882440297750 --- /dev/null +++ b/test/files/jvm/scala-concurrent-tck-b.scala @@ -0,0 +1,120 @@ +//> using jvm 9+ +import scala.concurrent.{ + TimeoutException, + ExecutionContext, + ExecutionContextExecutorService, + Await, + Awaitable, +} +import scala.annotation.tailrec +import scala.concurrent.duration._ +import scala.tools.testkit.AssertUtil.{Fast, Slow, waitFor, waitForIt} +import scala.util.{Try, Success, Failure} +import scala.util.chaining._ +import java.util.concurrent.CountDownLatch +import java.util.concurrent.TimeUnit.{MILLISECONDS => Milliseconds, SECONDS => Seconds} + +trait TestBase { + + trait Done { def apply(proof: => Boolean): Unit } + + def once(body: Done => Unit): Unit = { + import java.util.concurrent.LinkedBlockingQueue + val q = new LinkedBlockingQueue[Try[Boolean]] + body(new Done { + def apply(proof: => Boolean): Unit = q offer Try(proof) + }) + var tried: Try[Boolean] = null + def check = q.poll(5000L, Milliseconds).tap(tried = _) != null + waitForIt(check, progress = Slow, label = "concurrent-tck") + assert(tried.isSuccess) + assert(tried.get) + // Check that we don't get more than one completion + assert(q.poll(50, Milliseconds) eq null) + } + + def test[T](name: String)(body: => T): T = { + println(s"starting $name") + body.tap(_ => println(s"finished $name")) + } + + def await[A](value: Awaitable[A]): A = { + def check: Option[A] = + Try(Await.result(value, Duration(500, "ms"))) match { + case Success(x) => Some(x) + case Failure(_: TimeoutException) => None + case Failure(t) => throw t + } + waitFor(check, progress = Fast, label = "concurrent-tck test result") + } +} + +class ReportingExecutionContext extends TestBase { + val progress = Fast + @volatile var thread: Thread = null + @volatile var reportedOn: Thread = null + @volatile var reported: Throwable = null + val latch = new CountDownLatch(1) + + def report(t: Thread, e: Throwable): Unit = { + reportedOn = t + reported = e + latch.countDown() + } + + def ecesUsingDefaultFactory = { + import java.util.concurrent.{ForkJoinPool} + import java.util.function.Predicate + import scala.reflect.internal.util.RichClassLoader._ + + val path = "java.util.concurrent.ForkJoinPool" + val n = 2 // parallelism + val factory = scala.concurrent.TestUtil.threadFactory(report) + val ueh: Thread.UncaughtExceptionHandler = report(_, _) + val async = true + val coreSize = 4 + val maxSize = 4 + val minRun = 1 // minimumRunnable for liveness + val saturate: Predicate[ForkJoinPool] = (fjp: ForkJoinPool) => false // whether to continue after blocking at maxSize + val keepAlive = 2000L + val fjp = new ForkJoinPool(n, factory, ueh, async, coreSize, maxSize, minRun, saturate, keepAlive, Milliseconds) + ExecutionContext.fromExecutorService(fjp, report(null, _)) + } + + def testUncaughtExceptionReporting(ec: ExecutionContextExecutorService): Unit = once { + done => + val example = new InterruptedException + + @tailrec def spinForThreadDeath(turns: Int): Boolean = + turns > 0 && (thread != null && !thread.isAlive || { Thread.sleep(100L); spinForThreadDeath(turns - 1) }) + + def truthfully(b: Boolean): Option[Boolean] = if (b) Some(true) else None + + // jdk17 thread receives pool exception handler, so wait for thread to die slow and painful expired keepalive + def threadIsDead = waitFor(truthfully(spinForThreadDeath(turns = 10)), progress = progress, label = "concurrent-tck-thread-death") + + try { + ec.execute(() => { + thread = Thread.currentThread + throw example + }) + latch.await(2, Seconds) + done(threadIsDead && (example.eq(reported) || example.eq(reported.getCause))) + } + finally ec.shutdown() + } + + test("testUncaughtExceptionReporting")(testUncaughtExceptionReporting { + ecesUsingDefaultFactory + }) +} + +object Test extends App { + new ReportingExecutionContext +} + +package scala.concurrent { + object TestUtil { + def threadFactory(uncaughtExceptionHandler: Thread.UncaughtExceptionHandler) = new impl.ExecutionContextImpl.DefaultThreadFactory(daemonic=true, maxBlockers=256, prefix="test-thread", uncaughtExceptionHandler) + } +} diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check index f21d5c9ce8be..dbe69425548c 100644 --- a/test/files/jvm/scala-concurrent-tck.check +++ b/test/files/jvm/scala-concurrent-tck.check @@ -104,6 +104,12 @@ starting testSuccess finished testSuccess starting testFailure finished testFailure +starting testAwaitSuccess +finished testAwaitSuccess +starting testAwaitFailure +finished testAwaitFailure +starting testFQCNForAwaitAPI +finished testFQCNForAwaitAPI starting testDefaultOutsideFuture finished testDefaultOutsideFuture starting testDefaultFJP @@ -114,10 +120,10 @@ starting testPopCustom finished testPopCustom starting interruptHandling finished interruptHandling +starting rejectedExecutionException +finished rejectedExecutionException starting testNameOfGlobalECThreads finished testNameOfGlobalECThreads -starting testUncaughtExceptionReporting -finished testUncaughtExceptionReporting starting testOnSuccessCustomEC finished testOnSuccessCustomEC starting testKeptPromiseCustomEC diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index f56f2fcb6102..3a9119c438a9 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -1,4 +1,3 @@ -// scalac: -deprecation import scala.concurrent.{ Future, @@ -6,105 +5,126 @@ import scala.concurrent.{ TimeoutException, ExecutionException, ExecutionContext, + ExecutionContextExecutorService, CanAwait, Await, + Awaitable, blocking } -import scala.util.{ Try, Success, Failure } -import scala.concurrent.duration.Duration -import scala.concurrent.duration._ -import scala.reflect.{ classTag, ClassTag } -import scala.tools.partest.TestUtil.intercept import scala.annotation.{nowarn, tailrec} +import scala.concurrent.duration._ +import scala.reflect.{classTag, ClassTag} +import scala.tools.testkit.AssertUtil.{Fast, Slow, assertThrows, waitFor, waitForIt} +import scala.util.{Try, Success, Failure} +import scala.util.chaining._ +import java.util.concurrent.{CountDownLatch, ThreadPoolExecutor} +import java.util.concurrent.TimeUnit.{MILLISECONDS => Milliseconds, SECONDS => Seconds} -@nowarn("cat=deprecation") trait TestBase { + trait Done { def apply(proof: => Boolean): Unit } - def once(body: Done => Unit) { - import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit } + + def once(body: Done => Unit): Unit = { + import java.util.concurrent.LinkedBlockingQueue val q = new LinkedBlockingQueue[Try[Boolean]] body(new Done { def apply(proof: => Boolean): Unit = q offer Try(proof) }) - assert(Option(q.poll(2000, TimeUnit.MILLISECONDS)).map(_.get).getOrElse(false)) + var tried: Try[Boolean] = null + def check = q.poll(5000L, Milliseconds).tap(tried = _) != null + waitForIt(check, progress = Slow, label = "concurrent-tck") + assert(tried.isSuccess) + assert(tried.get) // Check that we don't get more than one completion - assert(q.poll(50, TimeUnit.MILLISECONDS) eq null) + assert(q.poll(50, Milliseconds) eq null) } def test[T](name: String)(body: => T): T = { println(s"starting $name") - val r = body - println(s"finished $name") - r + body.tap(_ => println(s"finished $name")) + } + + def await[A](value: Awaitable[A]): A = { + def check: Option[A] = + Try(Await.result(value, Duration(500, "ms"))) match { + case Success(x) => Some(x) + case Failure(_: TimeoutException) => None + case Failure(t) => throw t + } + waitFor(check, progress = Fast, label = "concurrent-tck test result") } } -@nowarn("cat=deprecation") -trait FutureCallbacks extends TestBase { + +class FutureCallbacks extends TestBase { import ExecutionContext.Implicits._ def testOnSuccess(): Unit = once { done => - var x = 0 - val f = Future { x = 1 } - f onSuccess { case _ => done(x == 1) } + var x = 0 + val f = Future { x = 1 } + f foreach { _ => done(x == 1) } } def testOnSuccessWhenCompleted(): Unit = once { done => - var x = 0 - val f = Future { x = 1 } - f onSuccess { - case _ if x == 1 => - x = 2 - f onSuccess { case _ => done(x == 2) } - } + var x = 0 + val f = Future { x = 1 } + f onComplete { + case Success(_) if x == 1 => + x = 2 + f foreach { _ => done(x == 2) } + case _ => + } } def testOnSuccessWhenFailed(): Unit = once { done => - val f = Future[Unit] { throw new Exception } - f onSuccess { case _ => done(false) } - f onFailure { case _ => done(true) } + val f = Future[Unit] { throw new Exception } + f onComplete { + case Success(_) => done(false) + case Failure(_) => done(true) + } } def testOnFailure(): Unit = once { done => - val f = Future[Unit] { throw new Exception } - f onSuccess { case _ => done(false) } - f onFailure { case _ => done(true) } + val f = Future[Unit] { throw new Exception } + f onComplete { + case Success(_) => done(false) + case Failure(_) => done(true) + } } def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once { done => - val f = Future[Unit] { throw cause } - f onSuccess { case _ => done(false) } - f onFailure { - case e: ExecutionException if e.getCause == cause => done(true) - case _ => done(false) - } + val f = Future[Unit] { throw cause } + f onComplete { + case Failure(e: ExecutionException) if e.getCause == cause => done(true) + case _ => done(false) + } } def testOnFailureWhenTimeoutException(): Unit = once { done => - val f = Future[Unit] { throw new TimeoutException() } - f onSuccess { case _ => done(false) } - f onFailure { - case e: TimeoutException => done(true) - case _ => done(false) - } + val f = Future[Unit] { throw new TimeoutException() } + f onComplete { + case Success(_) => done(false) + case Failure(e: TimeoutException) => done(true) + case Failure(_) => done(false) + } } def testThatNestedCallbacksDoNotYieldStackOverflow(): Unit = { - val promise = Promise[Int] + val promise = Promise[Int]() (0 to 10000).map(Future(_)).foldLeft(promise.future)((pf, f) => f.flatMap(i => pf)) promise.success(-1) } def stressTestNumberofCallbacks(): Unit = once { done => - val promise = Promise[Unit] - val otherPromise = Promise[Unit] + val promise = Promise[Unit]() + val otherPromise = Promise[Unit]() def attachMeaninglessCallbacksTo[T](f: Future[T]): Future[T] = { (1 to 20000).foreach(_ => f.onComplete(_ => ())) f @@ -127,7 +147,7 @@ trait FutureCallbacks extends TestBase { test("testOnFailure")(testOnFailure()) test("testOnFailureWhenSpecialThrowable")(testOnFailureWhenSpecialThrowable(5, new Error)) // testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { }) - //TODO: this test is currently problematic, because NonFatal does not match InterruptedException + //TODO: this test is currently problematic, because NonFatal does not catch InterruptedException //testOnFailureWhenSpecialThrowable(7, new InterruptedException) test("testThatNestedCallbacksDoNotYieldStackOverflow")(testThatNestedCallbacksDoNotYieldStackOverflow()) test("testOnFailureWhenTimeoutException")(testOnFailureWhenTimeoutException()) @@ -135,48 +155,57 @@ trait FutureCallbacks extends TestBase { } -@nowarn("cat=deprecation") -trait FutureCombinators extends TestBase { +class FutureCombinators extends TestBase { import ExecutionContext.Implicits._ def testMapSuccess(): Unit = once { done => val f = Future { 5 } val g = f map { x => "result: " + x } - g onSuccess { case s => done(s == "result: 5") } - g onFailure { case _ => done(false) } + g onComplete { + case Success(s) => done(s == "result: 5") + case Failure(_) => done(false) + } } def testMapFailure(): Unit = once { done => val f = Future[Unit] { throw new Exception("exception message") } val g = f map { x => "result: " + x } - g onSuccess { case _ => done(false) } - g onFailure { case t => done(t.getMessage() == "exception message") } + g onComplete { + case Success(_) => done(false) + case Failure(t) => done(t.getMessage() == "exception message") + } } def testMapSuccessPF(): Unit = once { done => val f = Future { 5 } val g = f map { case r => "result: " + r } - g onSuccess { case s => done(s == "result: 5") } - g onFailure { case _ => done(false) } + g onComplete { + case Success(s) => done(s == "result: 5") + case Failure(_) => done(false) + } } def testTransformSuccess(): Unit = once { done => val f = Future { 5 } val g = f.transform(r => "result: " + r, identity) - g onSuccess { case s => done(s == "result: 5") } - g onFailure { case _ => done(false) } + g onComplete { + case Success(s) => done(s == "result: 5") + case Failure(_) => done(false) + } } def testTransformSuccessPF(): Unit = once { done => val f = Future { 5 } val g = f.transform( { case r => "result: " + r }, identity) - g onSuccess { case s => done(s == "result: 5") } - g onFailure { case _ => done(false) } + g onComplete { + case Success(s) => done(s == "result: 5") + case Failure(_) => done(false) + } } def testTransformFailure(): Unit = once { @@ -184,8 +213,10 @@ def testTransformFailure(): Unit = once { val transformed = new Exception("transformed") val f = Future { throw new Exception("expected") } val g = f.transform(identity, _ => transformed) - g onSuccess { case _ => done(false) } - g onFailure { case e => done(e eq transformed) } + g onComplete { + case Success(_) => done(false) + case Failure(e) => done(e eq transformed) + } } def testTransformFailurePF(): Unit = once { @@ -193,9 +224,11 @@ def testTransformFailure(): Unit = once { val e = new Exception("expected") val transformed = new Exception("transformed") val f = Future[Unit] { throw e } - val g = f.transform(identity, { case `e` => transformed }) - g onSuccess { case _ => done(false) } - g onFailure { case e => done(e eq transformed) } + val g = f.transform(identity, (_: Throwable @unchecked) match { case `e` => transformed }) + g onComplete { + case Success(_) => done(false) + case Failure(e) => done(e eq transformed) + } } def testTransformResultToResult(): Unit = once { @@ -296,33 +329,41 @@ def testTransformFailure(): Unit = once { done => val f = Future[Unit] { throw new Exception("expected") } val g = f.transform(r => "result: " + r, identity) - g onSuccess { case _ => done(false) } - g onFailure { case t => done(t.getMessage() == "expected") } + g onComplete { + case Success(_) => done(false) + case Failure(t) => done(t.getMessage() == "expected") + } } def testFlatMapSuccess(): Unit = once { done => val f = Future { 5 } val g = f flatMap { _ => Future { 10 } } - g onSuccess { case x => done(x == 10) } - g onFailure { case _ => done(false) } + g onComplete { + case Success(x) => done(x == 10) + case Failure(_) => done(false) + } } def testFlatMapFailure(): Unit = once { done => val f = Future[Unit] { throw new Exception("expected") } val g = f flatMap { _ => Future { 10 } } - g onSuccess { case _ => done(false) } - g onFailure { case t => done(t.getMessage() == "expected") } + g onComplete { + case Success(_) => done(false) + case Failure(t) => done(t.getMessage() == "expected") + } } def testFlatMapDelayed(): Unit = once { done => val f = Future { 5 } - val p = Promise[Int] + val p = Promise[Int]() val g = f flatMap { _ => p.future } - g onSuccess { case x => done(x == 10) } - g onFailure { case _ => done(false) } + g onComplete { + case Success(x) => done(x == 10) + case Failure(_) => done(false) + } p.success(10) } @@ -330,18 +371,20 @@ def testTransformFailure(): Unit = once { done => val f = Future { 4 } val g = f filter { _ % 2 == 0 } - g onSuccess { case x: Int => done(x == 4) } - g onFailure { case _ => done(false) } + g onComplete { + case Success(x: Int) => done(x == 4) + case Failure(_) => done(false) + } } def testFilterFailure(): Unit = once { done => val f = Future { 4 } val g = f filter { _ % 2 == 1 } - g onSuccess { case x: Int => done(false) } - g onFailure { - case e: NoSuchElementException => done(true) - case _ => done(false) + g onComplete { + case Success(x: Int) => done(false) + case Failure(e: NoSuchElementException) => done(true) + case Failure(_) => done(false) } } @@ -349,18 +392,20 @@ def testTransformFailure(): Unit = once { done => val f = Future { -5 } val g = f collect { case x if x < 0 => -x } - g onSuccess { case x: Int => done(x == 5) } - g onFailure { case _ => done(false) } + g onComplete { + case Success(x: Int) => done(x == 5) + case Failure(_) => done(false) + } } def testCollectFailure(): Unit = once { done => val f = Future { -5 } val g = f collect { case x if x > 0 => x * 2 } - g onSuccess { case _ => done(false) } - g onFailure { - case e: NoSuchElementException => done(true) - case _ => done(false) + g onComplete { + case Success(_) => done(false) + case Failure(e: NoSuchElementException) => done(true) + case Failure(_) => done(false) } } @@ -371,8 +416,10 @@ def testTransformFailure(): Unit = once { f foreach { x => p.success(x * 2) } val g = p.future - g.onSuccess { case res: Int => done(res == 10) } - g.onFailure { case _ => done(false) } + g onComplete { + case Success(res: Int) => done(res == 10) + case Failure(_) => done(false) + } } def testForeachFailure(): Unit = once { @@ -380,110 +427,133 @@ def testTransformFailure(): Unit = once { val p = Promise[Int]() val f = Future[Int] { throw new Exception } f foreach { x => p.success(x * 2) } - f onFailure { case _ => p.failure(new Exception) } + f onComplete { case Failure(_) => p.failure(new Exception); case _ => } val g = p.future - g.onSuccess { case _ => done(false) } - g.onFailure { case _ => done(true) } + g onComplete { + case Success(_) => done(false) + case Failure(_) => done(true) + } } def testRecoverSuccess(): Unit = once { done => - val cause = new RuntimeException - val f = Future { - throw cause - } recover { - case re: RuntimeException => - "recovered" } - f onSuccess { case x => done(x == "recovered") } - f onFailure { case any => done(false) } + val cause = new RuntimeException + val f = Future { + throw cause + } recover { + case re: RuntimeException => + "recovered" } + f onComplete { + case Success(x) => done(x == "recovered") + case Failure(_) => done(false) + } } def testRecoverFailure(): Unit = once { done => - val cause = new RuntimeException - val f = Future { - throw cause - } recover { - case te: TimeoutException => "timeout" - } - f onSuccess { case _ => done(false) } - f onFailure { case any => done(any == cause) } + val cause = new RuntimeException + val f = Future { + throw cause + } recover { + case te: TimeoutException => "timeout" + } + f onComplete { + case Success(_) => done(false) + case Failure(any) => done(any == cause) + } } def testRecoverWithSuccess(): Unit = once { done => - val cause = new RuntimeException - val f = Future { - throw cause - } recoverWith { - case re: RuntimeException => - Future { "recovered" } - } - f onSuccess { case x => done(x == "recovered") } - f onFailure { case any => done(false) } + val cause = new RuntimeException + val f = Future { + throw cause + } recoverWith { + case re: RuntimeException => + Future { "recovered" } + } + f onComplete { + case Success(x) => done(x == "recovered") + case Failure(_) => done(false) + } } def testRecoverWithFailure(): Unit = once { done => - val cause = new RuntimeException - val f = Future { - throw cause - } recoverWith { - case te: TimeoutException => - Future { "timeout" } - } - f onSuccess { case x => done(false) } - f onFailure { case any => done(any == cause) } + val cause = new RuntimeException + val f = Future { + throw cause + } recoverWith { + case te: TimeoutException => + Future { "timeout" } + } + f onComplete { + case Success(_) => done(false) + case Failure(any) => done(any == cause) + } } def testZipSuccess(): Unit = once { done => - val f = Future { 5 } - val g = Future { 6 } - val h = f zip g - h onSuccess { case (l: Int, r: Int) => done(l+r == 11) } - h onFailure { case _ => done(false) } + val f = Future { 5 } + val g = Future { 6 } + val h = f zip g + h onComplete { + case Success((l: Int, r: Int)) => done(l + r == 11) + case Success(_) => + case Failure(_) => done(false) + } } def testZipFailureLeft(): Unit = once { done => - val cause = new Exception("expected") - val f = Future { throw cause } - val g = Future { 6 } - val h = f zip g - h onSuccess { case _ => done(false) } - h onFailure { case e: Exception => done(e.getMessage == "expected") } + val cause = new Exception("expected") + val f = Future { throw cause } + val g = Future { 6 } + val h = f zip g + h onComplete { + case Success(_) => done(false) + case Failure(e: Exception) => done(e.getMessage == "expected") + case Failure(_) => + } } def testZipFailureRight(): Unit = once { done => - val cause = new Exception("expected") - val f = Future { 5 } - val g = Future { throw cause } - val h = f zip g - h onSuccess { case _ => done(false) } - h onFailure { case e: Exception => done(e.getMessage == "expected") } + val cause = new Exception("expected") + val f = Future { 5 } + val g = Future { throw cause } + val h = f zip g + h onComplete { + case Success(_) => done(false) + case Failure(e: Exception) => done(e.getMessage == "expected") + case Failure(_) => + } } def testFallbackTo(): Unit = once { done => - val f = Future { sys.error("failed") } - val g = Future { 5 } - val h = f fallbackTo g - h onSuccess { case x: Int => done(x == 5) } - h onFailure { case _ => done(false) } + val f = Future { sys.error("failed") } + val g = Future { 5 } + val h = f fallbackTo g + h onComplete { + case Success(x: Int) => done(x == 5) + case Failure(_) => done(false) + } } def testFallbackToFailure(): Unit = once { done => - val cause = new Exception - val f = Future { throw cause } - val g = Future { sys.error("failed") } - val h = f fallbackTo g - - h onSuccess { case _ => done(false) } - h onFailure { case e => done(e eq cause) } + val cause = new Exception + val f = Future { throw cause } + val g = Future { sys.error("failed") } + val h = f fallbackTo g + + h onComplete { + case Success(_) => done(false) + case Failure(e) => done(e eq cause) + } } def testFallbackToThis(): Unit = { @@ -552,8 +622,7 @@ def testTransformFailure(): Unit = once { } -@nowarn("cat=deprecation") -trait FutureProjections extends TestBase { +class FutureProjections extends TestBase { import ExecutionContext.Implicits._ def testFailedFailureOnComplete(): Unit = once { @@ -568,9 +637,9 @@ trait FutureProjections extends TestBase { def testFailedFailureOnSuccess(): Unit = once { done => - val cause = new RuntimeException - val f = Future { throw cause } - f.failed onSuccess { case t => done(t == cause) } + val cause = new RuntimeException + val f = Future { throw cause } + f.failed foreach { t => done(t == cause) } } def testFailedSuccessOnComplete(): Unit = once { @@ -584,26 +653,26 @@ trait FutureProjections extends TestBase { def testFailedSuccessOnFailure(): Unit = once { done => - val f = Future { 0 } - f.failed onFailure { - case e: NoSuchElementException => done(true) - case _ => done(false) - } - f.failed onSuccess { case _ => done(false) } + val f = Future { 0 } + f.failed onComplete { + case Success(_) => done(false) + case Failure(_: NoSuchElementException) => done(true) + case Failure(_) => done(false) + } } def testFailedFailureAwait(): Unit = once { done => val cause = new RuntimeException val f = Future { throw cause } - done(Await.result(f.failed, Duration(500, "ms")) == cause) + done(await(f.failed) == cause) } def testFailedSuccessAwait(): Unit = once { done => val f = Future { 0 } try { - Await.result(f.failed, Duration(500, "ms")) + await(f.failed) done(false) } catch { case nsee: NoSuchElementException => done(true) @@ -615,23 +684,24 @@ trait FutureProjections extends TestBase { val p = Promise[Int]() val f = p.future Future { - intercept[IllegalArgumentException] { Await.ready(f, Duration.Undefined) } + assertThrows[IllegalArgumentException] { Await.ready(f, Duration.Undefined) } p.success(0) + await(f) Await.ready(f, Duration.Zero) Await.ready(f, Duration(500, "ms")) Await.ready(f, Duration.Inf) done(true) - } onFailure { case x => done(throw x) } + } onComplete { case Failure(x) => done(throw x); case _ => } } def testAwaitNegativeDuration(): Unit = once { done => val f = Promise().future Future { - intercept[TimeoutException] { Await.ready(f, Duration.Zero) } - intercept[TimeoutException] { Await.ready(f, Duration.MinusInf) } - intercept[TimeoutException] { Await.ready(f, Duration(-500, "ms")) } + assertThrows[TimeoutException] { Await.ready(f, Duration.Zero) } + assertThrows[TimeoutException] { Await.ready(f, Duration.MinusInf) } + assertThrows[TimeoutException] { Await.ready(f, Duration(-500, "ms")) } done(true) - } onFailure { case x => done(throw x) } + } onComplete { case Failure(x) => done(throw x); case _ => } } test("testFailedFailureOnComplete")(testFailedFailureOnComplete()) @@ -645,13 +715,13 @@ trait FutureProjections extends TestBase { } -trait Blocking extends TestBase { +class Blocking extends TestBase { import ExecutionContext.Implicits._ def testAwaitSuccess(): Unit = once { done => val f = Future { 0 } - done(Await.result(f, Duration(500, "ms")) == 0) + done(await(f) == 0) } def testAwaitFailure(): Unit = once { @@ -659,7 +729,7 @@ trait Blocking extends TestBase { val cause = new RuntimeException val f = Future { throw cause } try { - Await.result(f, Duration(500, "ms")) + await(f) done(false) } catch { case t: Throwable => done(t == cause) @@ -669,7 +739,7 @@ trait Blocking extends TestBase { def testFQCNForAwaitAPI(): Unit = once { done => done(classOf[CanAwait].getName == "scala.concurrent.CanAwait" && - Await.getClass.getName == "scala.concurrent.Await") + Await.getClass.getName == "scala.concurrent.Await$") } test("testAwaitSuccess")(testAwaitSuccess()) @@ -677,13 +747,11 @@ trait Blocking extends TestBase { test("testFQCNForAwaitAPI")(testFQCNForAwaitAPI()) } -trait BlockContexts extends TestBase { +class BlockContexts extends TestBase { import ExecutionContext.Implicits._ - import scala.concurrent.{ Await, Awaitable, BlockContext } + import scala.concurrent.BlockContext - private def getBlockContext(body: => BlockContext): BlockContext = { - Await.result(Future { body }, Duration(500, "ms")) - } + private def getBlockContext(body: => BlockContext): BlockContext = await(Future(body)) // test outside of an ExecutionContext def testDefaultOutsideFuture(): Unit = { @@ -693,15 +761,16 @@ trait BlockContexts extends TestBase { // test BlockContext in our default ExecutionContext def testDefaultFJP(): Unit = { + val prevCurrent = BlockContext.current val bc = getBlockContext(BlockContext.current) - assert(bc.isInstanceOf[java.util.concurrent.ForkJoinWorkerThread]) + assert(bc ne prevCurrent) // Should have been replaced by the EC. } // test BlockContext inside BlockContext.withBlockContext def testPushCustom(): Unit = { val orig = BlockContext.current val customBC = new BlockContext() { - override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = orig.blockOn(thunk) + override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = orig.blockOn(thunk) } val bc = getBlockContext({ @@ -717,7 +786,7 @@ trait BlockContexts extends TestBase { def testPopCustom(): Unit = { val orig = BlockContext.current val customBC = new BlockContext() { - override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = orig.blockOn(thunk) + override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = orig.blockOn(thunk) } val bc = getBlockContext({ @@ -734,31 +803,34 @@ trait BlockContexts extends TestBase { test("testPopCustom")(testPopCustom()) } -@nowarn("cat=deprecation") -trait Promises extends TestBase { +class Promises extends TestBase { import ExecutionContext.Implicits._ def testSuccess(): Unit = once { done => - val p = Promise[Int]() - val f = p.future + val p = Promise[Int]() + val f = p.future - f onSuccess { case x => done(x == 5) } - f onFailure { case any => done(false) } + f onComplete { + case Success(x) => done(x == 5) + case Failure(_) => done(false) + } - p.success(5) + p.success(5) } def testFailure(): Unit = once { done => - val e = new Exception("expected") - val p = Promise[Int]() - val f = p.future + val e = new Exception("expected") + val p = Promise[Int]() + val f = p.future - f onSuccess { case x => done(false) } - f onFailure { case any => done(any eq e) } + f onComplete { + case Success(_) => done(false) + case Failure(any) => done(any eq e) + } - p.failure(e) + p.failure(e) } test("testSuccess")(testSuccess()) @@ -766,8 +838,8 @@ trait Promises extends TestBase { } -trait Exceptions extends TestBase { - import java.util.concurrent.Executors +class Exceptions extends TestBase { + import java.util.concurrent.{Executors, RejectedExecutionException} def interruptHandling(): Unit = { implicit val e = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(1)) val p = Promise[String]() @@ -780,12 +852,21 @@ trait Exceptions extends TestBase { assert(ee.getCause.isInstanceOf[InterruptedException]) } + def rejectedExecutionException(): Unit = { + implicit val e = ExecutionContext.fromExecutor((r: Runnable) => throw new RejectedExecutionException("foo")) + val p = Promise[String]() + p.success("foo") + val f = p.future.map(identity) + val Failure(t: RejectedExecutionException) = Await.ready(f, 2.seconds).value.get + } + test("interruptHandling")(interruptHandling()) + test("rejectedExecutionException")(rejectedExecutionException()) } -trait GlobalExecutionContext extends TestBase { +class GlobalExecutionContext extends TestBase { import ExecutionContext.Implicits._ - + @nowarn("cat=deprecation") // Thread.getID is deprecated since JDK 19 def testNameOfGlobalECThreads(): Unit = once { done => Future({ @@ -797,9 +878,7 @@ trait GlobalExecutionContext extends TestBase { test("testNameOfGlobalECThreads")(testNameOfGlobalECThreads()) } -@nowarn("cat=deprecation") // Thread.getID is deprecated since JDK 19 -trait CustomExecutionContext extends TestBase { - import scala.concurrent.{ ExecutionContext, Awaitable } +class CustomExecutionContext extends TestBase { def defaultEC = ExecutionContext.global @@ -850,10 +929,9 @@ trait CustomExecutionContext extends TestBase { blocking { once { done => val f = Future(assertNoEC())(defaultEC) - f onSuccess { - case _ => - assertEC() - done(true) + f foreach { _ => + assertEC() + done(true) } assertNoEC() } @@ -869,10 +947,9 @@ trait CustomExecutionContext extends TestBase { blocking { once { done => val f = Promise.successful(10).future - f onSuccess { - case _ => - assertEC() - done(true) + f foreach { _ => + assertEC() + done(true) } } } @@ -911,50 +988,12 @@ trait CustomExecutionContext extends TestBase { assert(count >= 1) } - def testUncaughtExceptionReporting(): Unit = once { - done => - import java.util.concurrent.TimeUnit.SECONDS - val example = new InterruptedException() - val latch = new java.util.concurrent.CountDownLatch(1) - @volatile var thread: Thread = null - @volatile var reported: Throwable = null - val ec = ExecutionContext.fromExecutorService(null, t => { - reported = t - latch.countDown() - }) - - // scala/bug#12423, scala/scala#9680 - val threadDeathWaitingPeriod = - if (scala.util.Properties.isJavaAtLeast("17")) 1000L - else 10L - - @tailrec def waitForThreadDeath(turns: Int): Boolean = - if (turns <= 0) false - else if ((thread ne null) && thread.isAlive == false) true - else { - Thread.sleep(threadDeathWaitingPeriod) - waitForThreadDeath(turns - 1) - } - - try { - ec.execute(() => { - thread = Thread.currentThread - throw example - }) - latch.await(2, SECONDS) - done(waitForThreadDeath(turns = 100) && (reported eq example)) - } finally { - ec.shutdown() - } - } - - test("testUncaughtExceptionReporting")(testUncaughtExceptionReporting()) test("testOnSuccessCustomEC")(testOnSuccessCustomEC()) test("testKeptPromiseCustomEC")(testKeptPromiseCustomEC()) test("testCallbackChainCustomEC")(testCallbackChainCustomEC()) } -trait ExecutionContextPrepare extends TestBase { +class ExecutionContextPrepare extends TestBase { val theLocal = new ThreadLocal[String] { override protected def initialValue(): String = "" } @@ -987,7 +1026,7 @@ trait ExecutionContextPrepare extends TestBase { delegate.reportFailure(t) } - implicit val ec = new PreparingExecutionContext + implicit val ec: ExecutionContext = new PreparingExecutionContext def testOnComplete(): Unit = once { done => @@ -1008,17 +1047,15 @@ trait ExecutionContextPrepare extends TestBase { } object Test -extends App -with FutureCallbacks -with FutureCombinators -with FutureProjections -with Promises -with BlockContexts -with Exceptions -with GlobalExecutionContext -with CustomExecutionContext -with ExecutionContextPrepare -{ - System.exit(0) +extends App { + new FutureCallbacks + new FutureCombinators + new FutureProjections + new Promises + new Blocking + new BlockContexts + new Exceptions + new GlobalExecutionContext + new CustomExecutionContext + new ExecutionContextPrepare } - diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index 929d0ea23eb5..38fe598ea634 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -1,6 +1,16 @@ -warning: two deprecations (since 2.11.0) -warning: three deprecations (since 2.12.0) -warning: 5 deprecations in total; re-run with -deprecation for details +serialization-new.scala:24: warning: comparing values of types A and B using `equals` unsafely bypasses cooperative equality; use `==` instead + println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) + ^ +serialization-new.scala:24: warning: comparing values of types B and A using `equals` unsafely bypasses cooperative equality; use `==` instead + println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) + ^ +serialization-new.scala:25: warning: comparing values of types A and B using `equals` unsafely bypasses cooperative equality; use `==` instead + assert((x equals y) && (y equals x)) + ^ +serialization-new.scala:25: warning: comparing values of types B and A using `equals` unsafely bypasses cooperative equality; use `==` instead + assert((x equals y) && (y equals x)) + ^ +warning: 4 deprecations (since 2.13.0); re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] arrayEquals(a1, _a1): true @@ -57,8 +67,8 @@ _o2 = Some(1) o2 eq _o2: false, _o2 eq o2: false o2 equals _o2: true, _o2 equals o2: true -s1 = 'hello -_s1 = 'hello +s1 = Symbol(hello) +_s1 = Symbol(hello) s1 eq _s1: true, _s1 eq s1: true s1 equals _s1: true, _s1 equals s1: true @@ -75,12 +85,12 @@ x = BitSet(2, 3) y = BitSet(2, 3) x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = HashMap(1 -> A, 2 -> B, 3 -> C) +y = HashMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true -x = Set(1, 2) -y = Set(1, 2) +x = HashSet(1, 2) +y = HashSet(1, 2) x equals y: true, y equals x: true x = List((buffers,20), (layers,2), (title,3)) @@ -107,32 +117,28 @@ x = NumericRange 0 until 10 y = NumericRange 0 until 10 x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = TreeMap(1 -> A, 2 -> B, 3 -> C) +y = TreeMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true x = TreeSet(1, 2, 3) y = TreeSet(1, 2, 3) x equals y: true, y equals x: true -x = Stack(c, b, a) -y = Stack(c, b, a) +x = LazyList() +y = LazyList() x equals y: true, y equals x: true -x = Stream(0, ?) -y = Stream(0, ?) -x equals y: true, y equals x: true - -x = Map(42 -> FortyTwo) -y = Map(42 -> FortyTwo) +x = TreeMap(42 -> FortyTwo) +y = TreeMap(42 -> FortyTwo) x equals y: true, y equals x: true x = TreeSet(0, 2) y = TreeSet(0, 2) x equals y: true, y equals x: true -x = Vector('a, 'b, 'c) -y = Vector('a, 'b, 'c) +x = Vector(Symbol(a), Symbol(b), Symbol(c)) +y = Vector(Symbol(a), Symbol(b), Symbol(c)) x equals y: true, y equals x: true x = ArrayBuffer(one, two) @@ -147,51 +153,39 @@ x = ArrayBuilder.ofFloat y = ArrayBuilder.ofFloat x equals y: true, y equals x: true -x = ArraySeq(1, 2, 3) -y = ArraySeq(1, 2, 3) -x equals y: true, y equals x: true - -x = ArrayStack(3, 2, 20) -y = ArrayStack(3, 2, 20) -x equals y: true, y equals x: true - x = BitSet(0, 8, 9) y = BitSet(0, 8, 9) x equals y: true, y equals x: true -x = Map(A -> 1, C -> 3, B -> 2) -y = Map(A -> 1, C -> 3, B -> 2) +x = HashMap(A -> 1, B -> 2, C -> 3) +y = HashMap(A -> 1, B -> 2, C -> 3) x equals y: true, y equals x: true -x = Set(buffers, title, layers) -y = Set(buffers, title, layers) +x = HashSet(buffers, layers, title) +y = HashSet(buffers, layers, title) x equals y: true, y equals x: true -x = History() -y = History() +x = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) +y = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) x equals y: true, y equals x: true -x = Map(Linked -> 1, Hash -> 2, Map -> 3) -y = Map(Linked -> 1, Hash -> 2, Map -> 3) -x equals y: true, y equals x: true - -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) -y = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) +y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = Set(layers, buffers, title) -y = Set(layers, buffers, title) +x = LinkedHashSet(layers, buffers, title) +y = LinkedHashSet(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) -y = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) +y = List(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) y = List(layers, buffers, title) x equals y: true, y equals x: true @@ -211,8 +205,8 @@ x = abc y = abc x equals y: true, y equals x: true -x = WrappedArray(1, 2, 3) -y = WrappedArray(1, 2, 3) +x = ArraySeq(1, 2, 3) +y = ArraySeq(1, 2, 3) x equals y: true, y equals x: true x = TreeSet(1, 2, 3) @@ -247,40 +241,3 @@ x equals y: true, y equals x: true 2 1 2 - -x = UnrolledBuffer(one, two) -y = UnrolledBuffer(one, two) -x equals y: true, y equals x: true - -x = ParArray(abc, def, etc) -y = ParArray(abc, def, etc) -x equals y: true, y equals x: true - -x = ParHashMap(2 -> 4, 1 -> 2) -y = ParHashMap(2 -> 4, 1 -> 2) -x equals y: true, y equals x: true - -x = ParTrieMap(1 -> 2, 2 -> 4) -y = ParTrieMap(1 -> 2, 2 -> 4) -x equals y: true, y equals x: true - -x = ParHashSet(1, 2, 3) -y = ParHashSet(1, 2, 3) -x equals y: true, y equals x: true - -x = ParRange 0 to 4 -y = ParRange 0 to 4 -x equals y: true, y equals x: true - -x = ParRange 0 until 4 -y = ParRange 0 until 4 -x equals y: true, y equals x: true - -x = ParMap(5 -> 1, 10 -> 2) -y = ParMap(5 -> 1, 10 -> 2) -x equals y: true, y equals x: true - -x = ParSet(two, one) -y = ParSet(two, one) -x equals y: true, y equals x: true - diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala index 1b5e8566457d..498008193b58 100644 --- a/test/files/jvm/serialization-new.scala +++ b/test/files/jvm/serialization-new.scala @@ -18,7 +18,7 @@ object Serialize { new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) in.readObject().asInstanceOf[A] } - def check[A, B](x: A, y: B) { + def check[A, B](x: A, y: B): Unit = { println("x = " + x) println("y = " + y) println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) @@ -180,7 +180,7 @@ object Test1_scala { object Test2_immutable { import scala.collection.immutable.{ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap, - SortedSet, Stack, Stream, TreeMap, TreeSet, Vector} + SortedSet, LazyList, TreeMap, TreeSet, Vector} // in alphabetic order try { @@ -198,12 +198,12 @@ object Test2_immutable { check(bs2, _bs2) // HashMap - val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C") + val hm1 = HashMap.empty[Int, String] ++ List(1 -> "A", 2 -> "B", 3 -> "C") val _hm1: HashMap[Int, String] = read(write(hm1)) check(hm1, _hm1) // HashSet - val hs1 = new HashSet[Int] + 1 + 2 + val hs1 = HashSet.empty[Int] + 1 + 2 val _hs1: HashSet[Int] = read(write(hs1)) check(hs1, _hs1) @@ -213,7 +213,7 @@ object Test2_immutable { check(xs1, _xs1) // ListMap - val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3) + val lm1 = new ListMap[String, Int] ++ List("buffers" -> 20, "layers" -> 2, "title" -> 3) val _lm1: ListMap[String, Int] = read(write(lm1)) check(lm1, _lm1) @@ -237,7 +237,7 @@ object Test2_immutable { check(r2, _r2) // SortedMap - val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A") + val sm1 = SortedMap.empty[Int, String] ++ List(2 -> "B", 3 -> "C", 1 -> "A") val _sm1: SortedMap[Int, String] = read(write(sm1)) check(sm1, _sm1) @@ -246,14 +246,9 @@ object Test2_immutable { val _ss1: SortedSet[Int] = read(write(ss1)) check(ss1, _ss1) - // Stack - val s1 = new Stack().push("a", "b", "c") - val _s1: Stack[String] = read(write(s1)) - check(s1, _s1) - - // Stream - val st1 = Stream.range(0, 10) - val _st1: Stream[Int] = read(write(st1)) + // LazyList + val st1 = LazyList.range(0, 10) + val _st1: LazyList[Int] = read(write(st1)) check(st1, _st1) // TreeMap @@ -284,9 +279,9 @@ object Test2_immutable { object Test3_mutable { import scala.reflect.ClassTag import scala.collection.mutable.{ - ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList, - HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer, - Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet} + ArrayBuffer, ArrayBuilder, BitSet, + HashMap, HashSet, LinkedHashMap, LinkedHashSet, ListBuffer, + Queue, Stack, StringBuilder, ArraySeq, TreeSet} import scala.collection.concurrent.TrieMap // in alphabetic order @@ -306,17 +301,6 @@ object Test3_mutable { val _abu2: ArrayBuilder[ClassTag[Float]] = read(write(abu2)) check(abu2, _abu2) - // ArraySeq - val aq1 = ArraySeq(1, 2, 3) - val _aq1: ArraySeq[Int] = read(write(aq1)) - check(aq1, _aq1) - - // ArrayStack - val as1 = new ArrayStack[Int] - as1 ++= List(20, 2, 3).iterator - val _as1: ArrayStack[Int] = read(write(as1)) - check(as1, _as1) - // BitSet val bs1 = new BitSet() bs1 += 0 @@ -324,13 +308,7 @@ object Test3_mutable { bs1 += 9 val _bs1: BitSet = read(write(bs1)) check(bs1, _bs1) -/* - // DoubleLinkedList - val dl1 = new DoubleLinkedList[Int](2, null) - dl1.append(new DoubleLinkedList(3, null)) - val _dl1: DoubleLinkedList[Int] = read(write(dl1)) - check(dl1, _dl1) -*/ + // HashMap val hm1 = new HashMap[String, Int] hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator @@ -343,10 +321,6 @@ object Test3_mutable { val _hs1: HashSet[String] = read(write(hs1)) check(hs1, _hs1) - val h1 = new History[String, Int] - val _h1: History[String, Int] = read(write(h1)) - check(h1, _h1) - // LinkedHashMap { val lhm1 = new LinkedHashMap[String, Int] val list = List(("Linked", 1), ("Hash", 2), ("Map", 3)) @@ -366,13 +340,7 @@ object Test3_mutable { check(lhs1.toSeq, _lhs1.toSeq) // check elements order check(lhs1.toSeq, list) // check elements order } -/* - // LinkedList - val ll1 = new LinkedList[Int](2, null) - ll1.append(new LinkedList(3, null)) - val _ll1: LinkedList[Int] = read(write(ll1)) - check(ll1, _ll1) -*/ + // ListBuffer val lb1 = new ListBuffer[String] lb1 ++= List("white", "black") @@ -397,9 +365,9 @@ object Test3_mutable { val _sb1: StringBuilder = read(write(sb1)) check(sb1, _sb1) - // WrappedArray - val wa1 = WrappedArray.make(Array(1, 2, 3)) - val _wa1: WrappedArray[Int] = read(write(wa1)) + // ArraySeq + val wa1 = ArraySeq.make(Array(1, 2, 3)) + val _wa1: ArraySeq[Int] = read(write(wa1)) check(wa1, _wa1) // TreeSet @@ -538,69 +506,5 @@ object Test { Test6 Test7 Test8 - Test9_parallel } } - -//############################################################################ - - -//############################################################################ -// Test classes in package "scala.collection.parallel" and subpackages -object Test9_parallel { - import scala.collection.parallel._ - - try { - println() - - // UnrolledBuffer - val ub = new collection.mutable.UnrolledBuffer[String] - ub ++= List("one", "two") - val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub)) - check(ub, _ub) - - // mutable.ParArray - val pa = mutable.ParArray("abc", "def", "etc") - val _pa: mutable.ParArray[String] = read(write(pa)) - check(pa, _pa) - - // mutable.ParHashMap - val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4) - val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm)) - check(mpm, _mpm) - - // mutable.ParTrieMap - val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4) - val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc)) - check(mpc, _mpc) - - // mutable.ParHashSet - val mps = mutable.ParHashSet(1, 2, 3) - val _mps: mutable.ParHashSet[Int] = read(write(mps)) - check(mps, _mps) - - // immutable.ParRange - val pr1 = immutable.ParRange(0, 4, 1, true) - val _pr1: immutable.ParRange = read(write(pr1)) - check(pr1, _pr1) - - val pr2 = immutable.ParRange(0, 4, 1, false) - val _pr2: immutable.ParRange = read(write(pr2)) - check(pr2, _pr2) - - // immutable.ParHashMap - val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2) - val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm)) - check(ipm, _ipm) - - // immutable.ParHashSet - val ips = immutable.ParHashSet("one", "two") - val _ips: immutable.ParHashSet[String] = read(write(ips)) - check(ips, _ips) - - } catch { - case e: Exception => - println("Error in Test5_parallel: " + e) - throw e - } -} \ No newline at end of file diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 7095b5f69560..32e01e6a0777 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -1,6 +1,16 @@ -warning: two deprecations (since 2.11.0) -warning: one deprecation (since 2.12.0) -warning: three deprecations in total; re-run with -deprecation for details +serialization.scala:24: warning: comparing values of types A and B using `equals` unsafely bypasses cooperative equality; use `==` instead + println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) + ^ +serialization.scala:24: warning: comparing values of types B and A using `equals` unsafely bypasses cooperative equality; use `==` instead + println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) + ^ +serialization.scala:25: warning: comparing values of types A and B using `equals` unsafely bypasses cooperative equality; use `==` instead + assert((x equals y) && (y equals x)) + ^ +serialization.scala:25: warning: comparing values of types B and A using `equals` unsafely bypasses cooperative equality; use `==` instead + assert((x equals y) && (y equals x)) + ^ +warning: 4 deprecations (since 2.13.0); re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] arrayEquals(a1, _a1): true @@ -57,8 +67,8 @@ _o2 = Some(1) o2 eq _o2: false, _o2 eq o2: false o2 equals _o2: true, _o2 equals o2: true -s1 = 'hello -_s1 = 'hello +s1 = Symbol(hello) +_s1 = Symbol(hello) s1 eq _s1: true, _s1 eq s1: true s1 equals _s1: true, _s1 equals s1: true @@ -75,12 +85,12 @@ x = BitSet(2, 3) y = BitSet(2, 3) x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = HashMap(1 -> A, 2 -> B, 3 -> C) +y = HashMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true -x = Set(1, 2) -y = Set(1, 2) +x = HashSet(1, 2) +y = HashSet(1, 2) x equals y: true, y equals x: true x = List((buffers,20), (layers,2), (title,3)) @@ -107,32 +117,28 @@ x = NumericRange 0 until 10 y = NumericRange 0 until 10 x equals y: true, y equals x: true -x = Map(1 -> A, 2 -> B, 3 -> C) -y = Map(1 -> A, 2 -> B, 3 -> C) +x = TreeMap(1 -> A, 2 -> B, 3 -> C) +y = TreeMap(1 -> A, 2 -> B, 3 -> C) x equals y: true, y equals x: true x = TreeSet(1, 2, 3) y = TreeSet(1, 2, 3) x equals y: true, y equals x: true -x = Stack(c, b, a) -y = Stack(c, b, a) +x = LazyList() +y = LazyList() x equals y: true, y equals x: true -x = Stream(0, ?) -y = Stream(0, ?) -x equals y: true, y equals x: true - -x = Map(42 -> FortyTwo) -y = Map(42 -> FortyTwo) +x = TreeMap(42 -> FortyTwo) +y = TreeMap(42 -> FortyTwo) x equals y: true, y equals x: true x = TreeSet(0, 2) y = TreeSet(0, 2) x equals y: true, y equals x: true -x = Vector('a, 'b, 'c) -y = Vector('a, 'b, 'c) +x = Vector(Symbol(a), Symbol(b), Symbol(c)) +y = Vector(Symbol(a), Symbol(b), Symbol(c)) x equals y: true, y equals x: true x = ArrayBuffer(one, two) @@ -151,47 +157,43 @@ x = ArraySeq(1, 2, 3) y = ArraySeq(1, 2, 3) x equals y: true, y equals x: true -x = ArrayStack(3, 2, 20) -y = ArrayStack(3, 2, 20) +x = Stack(20, 2, 3) +y = Stack(20, 2, 3) x equals y: true, y equals x: true x = BitSet(0, 8, 9) y = BitSet(0, 8, 9) x equals y: true, y equals x: true -x = Map(A -> 1, C -> 3, B -> 2) -y = Map(A -> 1, C -> 3, B -> 2) -x equals y: true, y equals x: true - -x = Set(buffers, title, layers) -y = Set(buffers, title, layers) +x = HashMap(A -> 1, B -> 2, C -> 3) +y = HashMap(A -> 1, B -> 2, C -> 3) x equals y: true, y equals x: true -x = History() -y = History() +x = HashSet(buffers, layers, title) +y = HashSet(buffers, layers, title) x equals y: true, y equals x: true -x = Map(Linked -> 1, Hash -> 2, Map -> 3) -y = Map(Linked -> 1, Hash -> 2, Map -> 3) +x = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) +y = LinkedHashMap(Linked -> 1, Hash -> 2, Map -> 3) x equals y: true, y equals x: true -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) -y = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) +y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = ArrayBuffer((Linked,1), (Hash,2), (Map,3)) +x = List((Linked,1), (Hash,2), (Map,3)) y = List((Linked,1), (Hash,2), (Map,3)) x equals y: true, y equals x: true -x = Set(layers, buffers, title) -y = Set(layers, buffers, title) +x = LinkedHashSet(layers, buffers, title) +y = LinkedHashSet(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) -y = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) +y = List(layers, buffers, title) x equals y: true, y equals x: true -x = ArrayBuffer(layers, buffers, title) +x = List(layers, buffers, title) y = List(layers, buffers, title) x equals y: true, y equals x: true @@ -211,10 +213,6 @@ x = abc y = abc x equals y: true, y equals x: true -x = WrappedArray(1, 2, 3) -y = WrappedArray(1, 2, 3) -x equals y: true, y equals x: true - x = TreeSet(1, 2, 3) y = TreeSet(1, 2, 3) x equals y: true, y equals x: true @@ -247,40 +245,3 @@ x equals y: true, y equals x: true 2 1 2 - -x = UnrolledBuffer(one, two) -y = UnrolledBuffer(one, two) -x equals y: true, y equals x: true - -x = ParArray(abc, def, etc) -y = ParArray(abc, def, etc) -x equals y: true, y equals x: true - -x = ParHashMap(2 -> 4, 1 -> 2) -y = ParHashMap(2 -> 4, 1 -> 2) -x equals y: true, y equals x: true - -x = ParTrieMap(1 -> 2, 2 -> 4) -y = ParTrieMap(1 -> 2, 2 -> 4) -x equals y: true, y equals x: true - -x = ParHashSet(1, 2, 3) -y = ParHashSet(1, 2, 3) -x equals y: true, y equals x: true - -x = ParRange 0 to 4 -y = ParRange 0 to 4 -x equals y: true, y equals x: true - -x = ParRange 0 until 4 -y = ParRange 0 until 4 -x equals y: true, y equals x: true - -x = ParMap(5 -> 1, 10 -> 2) -y = ParMap(5 -> 1, 10 -> 2) -x equals y: true, y equals x: true - -x = ParSet(two, one) -y = ParSet(two, one) -x equals y: true, y equals x: true - diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala index bc61235267c0..d2479180954f 100644 --- a/test/files/jvm/serialization.scala +++ b/test/files/jvm/serialization.scala @@ -18,7 +18,7 @@ object Serialize { new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) in.readObject().asInstanceOf[A] } - def check[A, B](x: A, y: B) { + def check[A, B](x: A, y: B): Unit = { println("x = " + x) println("y = " + y) println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) @@ -180,7 +180,7 @@ object Test1_scala { object Test2_immutable { import scala.collection.immutable.{ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap, - SortedSet, Stack, Stream, TreeMap, TreeSet, Vector} + SortedSet, LazyList, TreeMap, TreeSet, Vector} // in alphabetic order try { @@ -198,12 +198,12 @@ object Test2_immutable { check(bs2, _bs2) // HashMap - val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C") + val hm1 = HashMap.empty[Int, String] ++ List(1 -> "A", 2 -> "B", 3 -> "C") val _hm1: HashMap[Int, String] = read(write(hm1)) check(hm1, _hm1) // HashSet - val hs1 = new HashSet[Int] + 1 + 2 + val hs1 = HashSet.empty[Int] + 1 + 2 val _hs1: HashSet[Int] = read(write(hs1)) check(hs1, _hs1) @@ -213,7 +213,7 @@ object Test2_immutable { check(xs1, _xs1) // ListMap - val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3) + val lm1 = new ListMap[String, Int] ++ List("buffers" -> 20, "layers" -> 2, "title" -> 3) val _lm1: ListMap[String, Int] = read(write(lm1)) check(lm1, _lm1) @@ -237,7 +237,7 @@ object Test2_immutable { check(r2, _r2) // SortedMap - val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A") + val sm1 = SortedMap.empty[Int, String] ++ List(2 -> "B", 3 -> "C", 1 -> "A") val _sm1: SortedMap[Int, String] = read(write(sm1)) check(sm1, _sm1) @@ -246,14 +246,9 @@ object Test2_immutable { val _ss1: SortedSet[Int] = read(write(ss1)) check(ss1, _ss1) - // Stack - val s1 = new Stack().push("a", "b", "c") - val _s1: Stack[String] = read(write(s1)) - check(s1, _s1) - - // Stream - val st1 = Stream.range(0, 10) - val _st1: Stream[Int] = read(write(st1)) + // LazyList + val st1 = LazyList.range(0, 10) + val _st1: LazyList[Int] = read(write(st1)) check(st1, _st1) // TreeMap @@ -285,9 +280,9 @@ object Test2_immutable { object Test3_mutable { import scala.reflect.ClassManifest import scala.collection.mutable.{ - ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList, - HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer, - Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet} + ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, + HashMap, HashSet, LinkedHashMap, LinkedHashSet, ListBuffer, + Queue, Stack, StringBuilder, TreeSet} import scala.collection.concurrent.TrieMap // in alphabetic order @@ -325,13 +320,7 @@ object Test3_mutable { bs1 += 9 val _bs1: BitSet = read(write(bs1)) check(bs1, _bs1) -/* - // DoubleLinkedList - val dl1 = new DoubleLinkedList[Int](2, null) - dl1.append(new DoubleLinkedList(3, null)) - val _dl1: DoubleLinkedList[Int] = read(write(dl1)) - check(dl1, _dl1) -*/ + // HashMap val hm1 = new HashMap[String, Int] hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator @@ -344,10 +333,6 @@ object Test3_mutable { val _hs1: HashSet[String] = read(write(hs1)) check(hs1, _hs1) - val h1 = new History[String, Int] - val _h1: History[String, Int] = read(write(h1)) - check(h1, _h1) - // LinkedHashMap { val lhm1 = new LinkedHashMap[String, Int] val list = List(("Linked", 1), ("Hash", 2), ("Map", 3)) @@ -367,13 +352,7 @@ object Test3_mutable { check(lhs1.toSeq, _lhs1.toSeq) // check elements order check(lhs1.toSeq, list) // check elements order } -/* - // LinkedList - val ll1 = new LinkedList[Int](2, null) - ll1.append(new LinkedList(3, null)) - val _ll1: LinkedList[Int] = read(write(ll1)) - check(ll1, _ll1) -*/ + // ListBuffer val lb1 = new ListBuffer[String] lb1 ++= List("white", "black") @@ -398,11 +377,6 @@ object Test3_mutable { val _sb1: StringBuilder = read(write(sb1)) check(sb1, _sb1) - // WrappedArray - val wa1 = WrappedArray.make(Array(1, 2, 3)) - val _wa1: WrappedArray[Int] = read(write(wa1)) - check(wa1, _wa1) - // TreeSet val ts1 = TreeSet[Int]() ++= Array(1, 2, 3) val _ts1: TreeSet[Int] = read(write(ts1)) @@ -532,7 +506,7 @@ object Test8 { @deprecated("Suppress warnings", since="2.11") object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Test1_scala Test2_immutable Test3_mutable @@ -540,7 +514,6 @@ object Test { Test6 Test7 Test8 - Test9_parallel Test10_util } } @@ -548,76 +521,16 @@ object Test { //############################################################################ -//############################################################################ -// Test classes in package "scala.collection.parallel" and subpackages -object Test9_parallel { - import scala.collection.parallel._ - - try { - println() - - // UnrolledBuffer - val ub = new collection.mutable.UnrolledBuffer[String] - ub ++= List("one", "two") - val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub)) - check(ub, _ub) - - // mutable.ParArray - val pa = mutable.ParArray("abc", "def", "etc") - val _pa: mutable.ParArray[String] = read(write(pa)) - check(pa, _pa) - - // mutable.ParHashMap - val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4) - val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm)) - check(mpm, _mpm) - - // mutable.ParTrieMap - val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4) - val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc)) - check(mpc, _mpc) - - // mutable.ParHashSet - val mps = mutable.ParHashSet(1, 2, 3) - val _mps: mutable.ParHashSet[Int] = read(write(mps)) - check(mps, _mps) - - // immutable.ParRange - val pr1 = immutable.ParRange(0, 4, 1, true) - val _pr1: immutable.ParRange = read(write(pr1)) - check(pr1, _pr1) - - val pr2 = immutable.ParRange(0, 4, 1, false) - val _pr2: immutable.ParRange = read(write(pr2)) - check(pr2, _pr2) - - // immutable.ParHashMap - val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2) - val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm)) - check(ipm, _ipm) - - // immutable.ParHashSet - val ips = immutable.ParHashSet("one", "two") - val _ips: immutable.ParHashSet[String] = read(write(ips)) - check(ips, _ips) - - } catch { - case e: Exception => - println("Error in Test5_parallel: " + e) - throw e - } -} - //############################################################################ // Test classes in package scala.util object Test10_util { import scala.util.Random - def rep[A](n: Int)(f: => A) { if (n > 0) { f; rep(n-1)(f) } } + def rep[A](n: Int)(f: => A): Unit = { if (n > 0) { f; rep(n-1)(f) } } { val random = new Random(345) val random2: Random = read(write(random)) - rep(5) { assert(random.nextInt == random2.nextInt) } + rep(5) { assert(random.nextInt() == random2.nextInt()) } } } diff --git a/test/files/jvm/signum.scala b/test/files/jvm/signum.scala index 76602a6641de..aa8f01cd8d4b 100644 --- a/test/files/jvm/signum.scala +++ b/test/files/jvm/signum.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { assert(math.signum(Long.MaxValue) == 1L) assert(math.signum(1L) == 1L) assert(math.signum(0L) == 0L) diff --git a/test/files/jvm/strictfp/Test_2.scala b/test/files/jvm/strictfp/Test_2.scala index 0a7a06a6b06b..1169386ff0c2 100644 --- a/test/files/jvm/strictfp/Test_2.scala +++ b/test/files/jvm/strictfp/Test_2.scala @@ -2,7 +2,7 @@ import scala.tools.asm.Opcodes import scala.tools.asm.tree._ import scala.tools.partest.BytecodeTest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object Test extends BytecodeTest { diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala index 1fbf61aeab36..46906e7568b8 100644 --- a/test/files/jvm/stringbuilder.scala +++ b/test/files/jvm/stringbuilder.scala @@ -4,7 +4,7 @@ */ import scala.language.{ postfixOps } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Test1.run() //ctor, reverse Test2.run() //append Test3.run() //insert @@ -13,7 +13,7 @@ object Test { } object Test1 { - def run() { + def run(): Unit = { val j0 = new java.lang.StringBuilder("abc") val s0 = new StringBuilder("abc") println("s0 equals j0 = " + (s0 equals j0)) @@ -30,7 +30,7 @@ Scala is a general purpose programming language designed to express common progr println("j2="+j2+", s2="+s2) println("s2.toString equals j2.toString = " + (s2.toString equals j2.toString)) - val j3 = j2; j3 setCharAt (0, j3 charAt 2) + val j3 = j2; j3.setCharAt(0, j3 charAt 2) val s3 = s2; s3(0) = s3(2) println("j3="+j3+", s3="+s3) println("s3.toString equals j3.toString = " + (s3.toString equals j3.toString)) @@ -38,7 +38,7 @@ Scala is a general purpose programming language designed to express common progr } object Test2 { - def run() { + def run(): Unit = { val j0 = new java.lang.StringBuilder("abc") val s0 = new StringBuilder("abc") j0 append true append (1.toByte) append 'a' append 9 append -1L append 1.2e-10f append -2.1e+100d @@ -54,35 +54,35 @@ object Test2 { } object Test3 { - def run() { + def run(): Unit = { val j0 = new java.lang.StringBuilder("abc") val s0 = new StringBuilder("abc") - j0 insert (0, true) insert (0, 1.toByte) insert (0, 'a') insert (0, 88.toShort) insert (0, 9) insert (0, -1L) - s0 insert (0, true) insert (0, 1.toByte) insert (0, 'a') insert (0, 88.toShort) insert (0, 9) insert (0, -1L) + j0.insert(0, true).insert(0, 1.toByte).insert(0, 'a').insert(0, 88.toShort).insert(0, 9).insert(0, -1L) + s0.insert(0, true).insert(0, 1.toByte).insert(0, 'a').insert(0, 88.toShort).insert(0, 9).insert(0, -1L) println("j0="+j0+", s0="+s0) println("s0.toString equals j0.toString = " + (s0.toString equals j0.toString)) val j1 = new java.lang.StringBuilder val s1 = new StringBuilder - j1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, "xyz".subSequence(0, 3)) - s1 insert (0, "###") insertAll (0, Array('0', '1', '2')) insertAll (0, List('x', 'y', 'z')) + j1.insert(0, "###").insert(0, Array('0', '1', '2')).insert(0, "xyz".subSequence(0, 3)) + s1.insert(0, "###").insertAll(0, Array('0', '1', '2')).insertAll(0, List('x', 'y', 'z')) println("j1="+j1+", s1="+s1) println("s1.toString equals j1.toString = " + (s1.toString equals j1.toString)) } } object Test4 { - def run() { + def run(): Unit = { val j0 = new java.lang.StringBuilder("abc") // Java 1.5+ val s0 = new StringBuilder("abc") - val j1 = j0 indexOf("c") - val s1 = s0 indexOf("c") + val j1 = j0.indexOf("c") + val s1 = s0.indexOf("c") println("j1="+j1+", s1="+s1) println("s1 == j1 = " + (s1 == j1)) - val j2 = j0 append "123abc" lastIndexOf("c") - val s2 = s0 append "123abc" lastIndexOf("c") + val j2 = j0.append("123abc").lastIndexOf("c") + val s2 = s0.append("123abc").lastIndexOf("c") println("j2="+j2+", s2="+s2) println("s2 == j2 = " + (s2 == j2)) } diff --git a/test/files/jvm/sync-var.check b/test/files/jvm/sync-var.check index e77aa319a576..cbe459537e7d 100644 --- a/test/files/jvm/sync-var.check +++ b/test/files/jvm/sync-var.check @@ -1 +1,2 @@ +warning: 1 deprecation (since 2.13.0); re-run with -deprecation for details 50005000 50005000 true diff --git a/test/files/jvm/sync-var.scala b/test/files/jvm/sync-var.scala index 8a6c2badea06..b98c81361338 100644 --- a/test/files/jvm/sync-var.scala +++ b/test/files/jvm/sync-var.scala @@ -1,7 +1,6 @@ -import java.util.concurrent._ import java.util.concurrent.atomic._ -object Test { def main(args: Array[String]) { +object Test { def main(args: Array[String]): Unit = { val n = 10000 val i = new AtomicInteger(n) @@ -11,7 +10,7 @@ val sum = new AtomicInteger val q = new scala.concurrent.SyncVar[Int] val producers = (1 to 3) map { z => new Thread { - override def run() { + override def run(): Unit = { var again = true while (again) { val x = i.getAndDecrement() @@ -24,7 +23,7 @@ val producers = (1 to 3) map { z => new Thread { } } val summers = (1 to 7) map { z => new Thread { - override def run() { + override def run(): Unit = { val x = j.decrementAndGet() if (x >= 0) { sum addAndGet q.take() @@ -44,7 +43,7 @@ summers foreach { _.join() } val got = sum.get val expected = (n + 1) * n / 2 -println(got + " " + expected + " " + (got == expected)) +println(got.toString + " " + expected + " " + (got == expected)) producers foreach { _.join() } diff --git a/test/files/jvm/t0014/Test_2.scala b/test/files/jvm/t0014/Test_2.scala index 1ab68cb6feda..9697ff3fc7fe 100644 --- a/test/files/jvm/t0014/Test_2.scala +++ b/test/files/jvm/t0014/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(classOf[test.NestedAnnotations_1].getName) } } diff --git a/test/files/jvm/t10610.check b/test/files/jvm/t10610.check new file mode 100644 index 000000000000..77f552caee30 --- /dev/null +++ b/test/files/jvm/t10610.check @@ -0,0 +1,3 @@ +t10610.scala:4: warning: @SerialVersionUID has no effect on traits +trait T + ^ diff --git a/test/files/jvm/t10610.scala b/test/files/jvm/t10610.scala new file mode 100644 index 000000000000..b54e0de685a8 --- /dev/null +++ b/test/files/jvm/t10610.scala @@ -0,0 +1,13 @@ +//> using options -Xlint:serial +// +@SerialVersionUID(0L) // should have no effect +trait T + +object Test extends App { + try { + classOf[T].getDeclaredField("serialVersionUID") + assert(false) + } catch { + case nsfe: NoSuchFieldException => + } +} diff --git a/test/files/jvm/t10880.scala b/test/files/jvm/t10880.scala index 6edc0a62dcce..0f1885c2f072 100644 --- a/test/files/jvm/t10880.scala +++ b/test/files/jvm/t10880.scala @@ -15,4 +15,4 @@ object Test extends App { println(ctor.getParameters.map(_.getParameterizedType).toList) println(ctor.getGenericParameterTypes.toList) -} \ No newline at end of file +} diff --git a/test/files/jvm/t1116.scala b/test/files/jvm/t1116.scala index 023e5b3a02c1..b9d202cd3ecc 100644 --- a/test/files/jvm/t1116.scala +++ b/test/files/jvm/t1116.scala @@ -21,7 +21,7 @@ object Foo { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Serialize.write(Foo.obj_foo(3)) } } diff --git a/test/files/jvm/t1143-2/t1143-2.scala b/test/files/jvm/t1143-2/t1143-2.scala index 13ab13b48c7a..571aa86cd324 100644 --- a/test/files/jvm/t1143-2/t1143-2.scala +++ b/test/files/jvm/t1143-2/t1143-2.scala @@ -56,7 +56,7 @@ class Main extends Serializable { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { (new Main).main(Array[String]()) } } diff --git a/test/files/jvm/t1143.scala b/test/files/jvm/t1143.scala index eb03c7224ee4..7200fd221f5d 100644 --- a/test/files/jvm/t1143.scala +++ b/test/files/jvm/t1143.scala @@ -49,7 +49,7 @@ class Form extends Component { @SerialVersionUID(1L) class Main extends Serializable { var pass = "pass" - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f = new Form { val p = new Printer(new VarModel(pass, s => pass = s)) } @@ -58,7 +58,7 @@ class Main extends Serializable { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { (new Main).main(Array[String]()) } } diff --git a/test/files/jvm/t1342/SI.scala b/test/files/jvm/t1342/SI.scala index 7c37d4bcd7c3..5d188b3b0343 100644 --- a/test/files/jvm/t1342/SI.scala +++ b/test/files/jvm/t1342/SI.scala @@ -1,5 +1,5 @@ class SI extends JI { - def varArgsMethod( args : String*) { + def varArgsMethod( args : String*): Unit = { for( arg <- args ) println( arg ) } } diff --git a/test/files/jvm/t1461.scala b/test/files/jvm/t1461.scala index f0e3cea6cdf1..297cbd6f4100 100644 --- a/test/files/jvm/t1461.scala +++ b/test/files/jvm/t1461.scala @@ -1,6 +1,6 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val jl = classOf[Foo].getMethod("jl", classOf[Baz[_]]) jl.getGenericParameterTypes // works fine @@ -13,6 +13,6 @@ object Test { class Baz[T] class Foo { - def l(b: Baz[Long]) { } - def jl(b: Baz[java.lang.Long]) { } + def l(b: Baz[Long]): Unit = { } + def jl(b: Baz[java.lang.Long]): Unit = { } } diff --git a/test/files/jvm/t1600.javaopts b/test/files/jvm/t1600.javaopts deleted file mode 100644 index f4038254ba29..000000000000 --- a/test/files/jvm/t1600.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm.maybe.because.context.classloader \ No newline at end of file diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala index 5a1b2900c500..4211294e386c 100644 --- a/test/files/jvm/t1600.scala +++ b/test/files/jvm/t1600.scala @@ -1,3 +1,4 @@ +//> using javaOpt -Dneeds.forked.jvm.maybe.because.context.classloader /** * Checks that serialization of hash-based collections works correctly if the hashCode @@ -6,7 +7,7 @@ object Test { import collection._ - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { for (i <- Seq(0, 1, 2, 10, 100)) { def entries = (0 until i).map(i => (new Foo, i)).toList def elements = entries.map(_._1) @@ -21,7 +22,7 @@ object Test { } } - private def test[A <: AnyRef](collections: Seq[A], expectedSize: Int, assertFunction: (A, Int) => Unit) { + private def test[A <: AnyRef](collections: Seq[A], expectedSize: Int, assertFunction: (A, Int) => Unit): Unit = { for (collection <- collections) { assertFunction(collection, expectedSize) @@ -49,7 +50,7 @@ object Test { bos.toByteArray } - private def assertMap[A, B](map: Map[A, B], expectedSize: Int) { + private def assertMap[A, B](map: Map[A, B], expectedSize: Int): Unit = { assert(expectedSize == map.size, "expected map size: " + expectedSize + ", actual size: " + map.size) map.foreach { case (k, v) => assert(map.contains(k), "contains should return true for key in the map, key: " + k) @@ -57,7 +58,7 @@ object Test { } } - private def assertSet[A](set: Set[A], expectedSize: Int) { + private def assertSet[A](set: Set[A], expectedSize: Int): Unit = { assert(expectedSize == set.size, "expected set size: " + expectedSize + ", actual size: " + set.size) set.foreach { e => assert(set.contains(e), "contains should return true for element in the set, element: " + e) } } diff --git a/test/files/jvm/t2104.scala b/test/files/jvm/t2104.scala deleted file mode 100644 index 655d74cee5e1..000000000000 --- a/test/files/jvm/t2104.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* https://lampsvn.epfl.ch/trac/scala/ticket/2104 - symptom: Source via Buffered Source always loses the last char of the input file. - cause: BufferedSource? doesn't check return for -1 (EOF), and uses reader.ready() improperly as a substitute. - - test: check over all possible strings of length up to N over alphabet chars: - write file, then read back its chars, and get back the original. - -*/ -object Test -{ - val N=4 - - import java.io.{ File => JFile } - import java.io.FileWriter - import io.Source - def overwrite(file: JFile,w: FileWriter=>Unit) { - val fw=new FileWriter(file) - w(fw) - fw.close - } - def delete_after(f: JFile,g: Source=>Unit) = { - g(Source.fromFile(f)) - f.delete - } - def store_tempfile(f: FileWriter=>Unit)(implicit name:String) : JFile = { - val tp=JFile.createTempFile(name,null) - overwrite(tp,f) - tp - } - - implicit val name="t2104" - val chars=List('\n','\r','a') - - type Cs = List[Char] - def all_strings(n: Int) : List[Cs] = { - if (n==0) List(Nil) - else { - val sufs=all_strings(n-1) - chars.flatMap((c)=>sufs.map(c :: _)) - } - } - def test(n: Int) { - for(l <- all_strings(n)) { - val tmp=store_tempfile((f)=>l.foreach(f.write(_))) - delete_after(tmp,(s)=>assert(s.toList == l)) - } - } - def main(args: Array[String]) { - (0 until N).foreach(test(_)) - } -} diff --git a/test/files/jvm/t2163/t2163.scala b/test/files/jvm/t2163/t2163.scala index fdf19c4e2580..4cf2fa4251e5 100644 --- a/test/files/jvm/t2163/t2163.scala +++ b/test/files/jvm/t2163/t2163.scala @@ -1,6 +1,4 @@ -import scala.language.{ higherKinds } - class T2163Scala[CC[X]](x: CC[Int]) { def bar[DD[X]](meh: DD[Int]): CC[Int] = x } diff --git a/test/files/jvm/t2214.scala b/test/files/jvm/t2214.scala index db3e400bbbef..062a506a6817 100644 --- a/test/files/jvm/t2214.scala +++ b/test/files/jvm/t2214.scala @@ -25,7 +25,7 @@ object Test { } } - def foreach(os: java.io.ObjectInputStream)(f: Object => Unit) { + def foreach(os: java.io.ObjectInputStream)(f: Object => Unit): Unit = { try { val obj = os.readObject if (obj != null) { diff --git a/test/files/jvm/t2470/Test_1.scala b/test/files/jvm/t2470/Test_1.scala index 00cf28748285..a74edb024795 100644 --- a/test/files/jvm/t2470/Test_1.scala +++ b/test/files/jvm/t2470/Test_1.scala @@ -4,7 +4,7 @@ object Test { def foo = 0 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val m = classOf[Foo].getDeclaredMethods().find(_.toString.contains("foo")).get println(m.getAnnotations().toList) } diff --git a/test/files/jvm/t2511.scala b/test/files/jvm/t2511.scala index eb57dc503deb..ccf4ac140822 100644 --- a/test/files/jvm/t2511.scala +++ b/test/files/jvm/t2511.scala @@ -25,7 +25,7 @@ object Test { in.readObject.asInstanceOf[MyMessage] } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val m = unserialize(serialize) // Xcheckinit freaks out here but its nullness is what we're testing try println(m.message) diff --git a/test/files/jvm/t2570/Test.scala b/test/files/jvm/t2570/Test.scala index ad4d29dd7f6d..6f892011920c 100644 --- a/test/files/jvm/t2570/Test.scala +++ b/test/files/jvm/t2570/Test.scala @@ -1,3 +1,3 @@ class Test2 extends Test1[Test3[Test4]] class Test4 -object Test extends App {} \ No newline at end of file +object Test extends App {} diff --git a/test/files/jvm/t2585/genericouter.scala b/test/files/jvm/t2585/genericouter.scala index e06aa8101ebc..be55bbe9efbc 100644 --- a/test/files/jvm/t2585/genericouter.scala +++ b/test/files/jvm/t2585/genericouter.scala @@ -22,4 +22,4 @@ class X { val oImpl = new OuterImpl(this) new oImpl.Inner } -} \ No newline at end of file +} diff --git a/test/files/jvm/t2827.scala b/test/files/jvm/t2827.scala index d89e68516b0c..0d4dfdac5578 100644 --- a/test/files/jvm/t2827.scala +++ b/test/files/jvm/t2827.scala @@ -6,7 +6,7 @@ object Stooges extends Enumeration { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Stooges.Larry) println(Stooges.Curly) println(Stooges.Moe) diff --git a/test/files/jvm/t3003/Test_1.scala b/test/files/jvm/t3003/Test_1.scala index 8ec08bebc682..321aeb3667ca 100644 --- a/test/files/jvm/t3003/Test_1.scala +++ b/test/files/jvm/t3003/Test_1.scala @@ -2,7 +2,7 @@ class C { @Annot(optionType=classOf[String]) val k = 0 } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val xs = ( classOf[C].getDeclaredFields.toList . sortBy(f => f.getName) diff --git a/test/files/jvm/t3415/HelloWorld.scala b/test/files/jvm/t3415/HelloWorld.scala index 5ef012390ef9..eff464fb879e 100644 --- a/test/files/jvm/t3415/HelloWorld.scala +++ b/test/files/jvm/t3415/HelloWorld.scala @@ -1,4 +1,4 @@ object Test extends App { @Hello - def foo() { } + def foo(): Unit = { } } diff --git a/test/files/jvm/t5471.scala b/test/files/jvm/t5471.scala index 2efd869b6194..3310ce11d23c 100644 --- a/test/files/jvm/t5471.scala +++ b/test/files/jvm/t5471.scala @@ -1,7 +1,7 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { import scala.math.Numeric import scala.math.Numeric.Implicits._ diff --git a/test/files/jvm/t680.scala b/test/files/jvm/t680.scala index b0b0c9f7c06a..33db4810469b 100644 --- a/test/files/jvm/t680.scala +++ b/test/files/jvm/t680.scala @@ -1,5 +1,5 @@ object Test { - def main(args:Array[String]) { + def main(args:Array[String]): Unit = { val sb = new java.lang.StringBuilder() // use Java 1.5 sb.setLength(0) } diff --git a/test/files/jvm/t7146.check b/test/files/jvm/t7146.check index b2c6e444f758..58be5746aa3b 100644 --- a/test/files/jvm/t7146.check +++ b/test/files/jvm/t7146.check @@ -1,4 +1,4 @@ -ExecutionContext.global is a scala.concurrent.impl.ExecutionContextImpl. +ExecutionContext.global is a ForkJoinPool should have non-null UncaughtExceptionHandler == true -ExecutionContext.global.executor.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl. +ExecutionContext.global.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl. should just print out on uncaught: true diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala index 89030730a9c1..ed61a7086b56 100644 --- a/test/files/jvm/t7146.scala +++ b/test/files/jvm/t7146.scala @@ -1,25 +1,27 @@ import scala.language.{ reflectiveCalls } -import java.util.concurrent.Executor +import java.util.concurrent.{Executor, ForkJoinPool} import scala.concurrent._ import scala.util.control.NoStackTrace object Test { def main(args: Array[String]): Unit = { - val ec = ExecutionContext.global.toString - if (ec startsWith "scala.concurrent.impl.ExecutionContextImpl") - println("ExecutionContext.global is a scala.concurrent.impl.ExecutionContextImpl.") - else println(s"!! ExecutionContext.global == $ec") + ExecutionContext.global match { + case fjp: ForkJoinPool => + val u = fjp.getUncaughtExceptionHandler - val u = ExecutionContext.global.asInstanceOf[{ def executor: Executor }].executor. - asInstanceOf[{ def getUncaughtExceptionHandler: Thread.UncaughtExceptionHandler }]. - getUncaughtExceptionHandler - println(s"should have non-null UncaughtExceptionHandler == ${u ne null}") - if (u.toString startsWith "scala.concurrent.impl.ExecutionContextImpl") - println("ExecutionContext.global.executor.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl.") - else println(s"!! ExecutionContext.global.executor.getUncaughtExceptionHandler == $u") + println("ExecutionContext.global is a ForkJoinPool") + println(s"should have non-null UncaughtExceptionHandler == ${u ne null}") - print("should just print out on uncaught: ") - u.uncaughtException(Thread.currentThread, new Throwable { override def printStackTrace() { println("true") } }) + if (u.toString startsWith "scala.concurrent.impl.ExecutionContextImpl") + println("ExecutionContext.global.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl.") + else + println(s"!! ExecutionContext.global.executor.getUncaughtExceptionHandler == $u") + + print("should just print out on uncaught: ") + u.uncaughtException(Thread.currentThread, new Throwable { override def printStackTrace(): Unit = { println("true") } }) + case other => + println(s"!! ExecutionContext.global == $other") + } } } diff --git a/test/files/jvm/t7181/Foo_1.scala b/test/files/jvm/t7181/Foo_1.scala index f9dfdd444250..6b3633f503ce 100644 --- a/test/files/jvm/t7181/Foo_1.scala +++ b/test/files/jvm/t7181/Foo_1.scala @@ -2,7 +2,7 @@ class Exception1 extends RuntimeException class Exception2 extends RuntimeException class Foo_1 { - def foo(baz: Baz) { + def foo(baz: Baz): Unit = { try { baz.bar } catch { @@ -11,9 +11,9 @@ class Foo_1 { } finally { // this should be the only copy of the magic constant 3 // making it easy to detect copies of this finally block - println(s"finally ${3}") + println("finally " + 3) } - println(s"normal flow") + println("normal flow") } } diff --git a/test/files/jvm/t7253.check b/test/files/jvm/t7253.check deleted file mode 100644 index 43f53aba123d..000000000000 --- a/test/files/jvm/t7253.check +++ /dev/null @@ -1 +0,0 @@ -bytecode identical diff --git a/test/files/jvm/t7253/test.scala b/test/files/jvm/t7253/test.scala index a3f1e86e6526..9ee22aa71887 100644 --- a/test/files/jvm/t7253/test.scala +++ b/test/files/jvm/t7253/test.scala @@ -1,28 +1,20 @@ -import scala.tools.partest.{BytecodeTest, ASMConverters} +//> using options -Werror -Xlint -import scala.tools.nsc.util.JavaClassPath -import java.io.InputStream -import scala.tools.asm -import asm.ClassReader -import asm.tree.{ClassNode, InsnList} -import scala.collection.JavaConverters._ +import scala.tools.asm.Opcodes +import scala.tools.partest.BytecodeTest +import scala.tools.testkit.ASMConverters._ object Test extends BytecodeTest { - import ASMConverters._ - def show: Unit = { - val instrBaseSeqs = Seq("ScalaClient_1", "JavaClient_1") map (name => instructionsFromMethod(getMethod(loadClassNode(name), "foo"))) - val instrSeqs = instrBaseSeqs map (_ filter isInvoke) - cmpInstructions(instrSeqs(0), instrSeqs(1)) + def show(): Unit = { + def fooOf(name: String) = instructionsFromMethod(getMethod(loadClassNode(name), "foo")).filter(isInvoke) + cmpInstructions(fooOf("ScalaClient_1"), fooOf("JavaClient_1")) } - def cmpInstructions(isa: List[Instruction], isb: List[Instruction]) = { - if (isa == isb) println("bytecode identical") - else diffInstructions(isa, isb) - } + def cmpInstructions(isa: List[Instruction], isb: List[Instruction]) = + if (!isa.sameElements(isb)) + diffInstructions(isa, isb) - def isInvoke(node: Instruction): Boolean = { - val opcode = node.opcode - (opcode == "INVOKEVIRTUAL") || (opcode == "INVOKEINTERFACE") - } + def isInvoke(node: Instruction): Boolean = + node.opcode == Opcodes.INVOKEVIRTUAL || node.opcode == Opcodes.INVOKEINTERFACE } diff --git a/test/files/jvm/t7994s.check b/test/files/jvm/t7994s.check new file mode 100644 index 000000000000..5f68d930550c --- /dev/null +++ b/test/files/jvm/t7994s.check @@ -0,0 +1,4 @@ +Test$$anon$1 +null +Test$$anon$1$$anon$2 +null diff --git a/test/files/jvm/t7994s.scala b/test/files/jvm/t7994s.scala new file mode 100644 index 000000000000..36b8068018a5 --- /dev/null +++ b/test/files/jvm/t7994s.scala @@ -0,0 +1,12 @@ +object Test { + def main(args: Array[String]): Unit = { + val o = new MyTest() { + val i: MyTest = new MyTest() {} + } + } +} + +class MyTest { + println(this.getClass.getName) + println(this.getClass.getDeclaringClass) +} \ No newline at end of file diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check index 1a96d9a8614a..a6d44fc2f17a 100644 --- a/test/files/jvm/t8582.check +++ b/test/files/jvm/t8582.check @@ -1,6 +1,3 @@ -t8582.scala:18: warning: class BeanInfo in package beans is deprecated (since 2.12.0): the generation of BeanInfo classes is no longer supported - class C1 - ^ getClass on module gives module class class p1.p2.Singleton$Singleton$ @@ -32,10 +29,6 @@ Because that attribute leads to an entry for B1 in the constant pool, C1 needs a className[A1$B1] outerClassName[A1] innerName[B1] access[1] className[A1$B1$C1] outerClassName[A1$B1] innerName[C1] access[1] -The BeanInfo class has the same InnerClass attributes as the corresponding bean - className[A1$B1] outerClassName[A1] innerName[B1] access[1] - className[A1$B1$C1] outerClassName[A1$B1] innerName[C1] access[1] - Class A2 mentions class C2 in the constant pool (due to method f), therefore it needs an InnerClass attribute for C1 className[A2$B2] outerClassName[A2] innerName[B2] access[1] className[A2$B2$C2] outerClassName[A2$B2] innerName[C2] access[1] diff --git a/test/files/jvm/t8582.scala b/test/files/jvm/t8582.scala index fdecef8e4000..8d33663d45e3 100644 --- a/test/files/jvm/t8582.scala +++ b/test/files/jvm/t8582.scala @@ -1,6 +1,6 @@ -// scalac: -deprecation +//> using options -deprecation import scala.tools.partest.BytecodeTest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ package p1 { package p2 { @@ -14,7 +14,6 @@ package p1 { class A1 { class B1 { - @scala.beans.BeanInfo class C1 } } @@ -38,7 +37,7 @@ object Test extends BytecodeTest { println(cnode.innerClasses.asScala.toList.map(i => s"className[${i.name}] outerClassName[${i.outerName}] innerName[${i.innerName}] access[${i.access}]").mkString(" ", "\n ", "")) } - def show() { + def show(): Unit = { println("getClass on module gives module class") println(" " + Singleton.Singleton.getClass) @@ -69,9 +68,6 @@ object Test extends BytecodeTest { "Because that attribute leads to an entry for B1 in the constant pool, C1 needs an InnerClass attribute for B1.") printInner("A1$B1$C1") - nprintln("The BeanInfo class has the same InnerClass attributes as the corresponding bean") - printInner("A1$B1$C1BeanInfo") - nprintln("Class A2 mentions class C2 in the constant pool (due to method f), therefore it needs an InnerClass attribute for C1") printInner("A2") println("B2") diff --git a/test/files/jvm/t8689.javaopts b/test/files/jvm/t8689.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/jvm/t8689.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/t8689.scala b/test/files/jvm/t8689.scala index 3ee20d711a92..bf4fb6ac9444 100644 --- a/test/files/jvm/t8689.scala +++ b/test/files/jvm/t8689.scala @@ -1,3 +1,4 @@ +//> using javaOpt -Dneeds.forked.jvm object Test { def main(args: Array[String]): Unit = { import scala.concurrent._ diff --git a/test/files/jvm/t8786-sig.scala b/test/files/jvm/t8786-sig.scala index 63e76c4ead3a..8d7c4066f413 100644 --- a/test/files/jvm/t8786-sig.scala +++ b/test/files/jvm/t8786-sig.scala @@ -44,26 +44,26 @@ object Test extends App { val as = classOf[Array[String]] val ai = classOf[Array[Int]] - check(sig("m1", sq) , "public java.lang.Object A.m1(scala.collection.Seq)") - check(sig("m2", sq) , "public java.lang.Object A.m2(scala.collection.Seq)") - check(sig("m3", sq) , "public java.lang.Object A.m3(scala.collection.Seq)") - check(sig("m4", sq) , "public int A.m4(scala.collection.Seq)") - check(sig("m5", sq) , "public java.lang.String A.m5(scala.collection.Seq)") - check(sig("m6", sq) , "public java.lang.String A.m6(scala.collection.Seq)") - check(sig("m7", sq) , "public int A.m7(scala.collection.Seq)") - check(sig("m8", sq) , "public java.lang.Object A.m8(scala.collection.Seq)") + check(sig("m1", sq) , "public java.lang.Object A.m1(scala.collection.immutable.Seq)") + check(sig("m2", sq) , "public java.lang.Object A.m2(scala.collection.immutable.Seq)") + check(sig("m3", sq) , "public java.lang.Object A.m3(scala.collection.immutable.Seq)") + check(sig("m4", sq) , "public int A.m4(scala.collection.immutable.Seq)") + check(sig("m5", sq) , "public java.lang.String A.m5(scala.collection.immutable.Seq)") + check(sig("m6", sq) , "public java.lang.String A.m6(scala.collection.immutable.Seq)") + check(sig("m7", sq) , "public int A.m7(scala.collection.immutable.Seq)") + check(sig("m8", sq) , "public java.lang.Object A.m8(scala.collection.immutable.Seq)") - check(genSig("m1", sq), "public T A.m1(scala.collection.Seq)") - check(genSig("m2", sq), "public T A.m2(scala.collection.Seq)") - check(genSig("m3", sq), "public T A.m3(scala.collection.Seq)") + check(genSig("m1", sq), "public T A.m1(scala.collection.immutable.Seq)") + check(genSig("m2", sq), "public T A.m2(scala.collection.immutable.Seq)") + check(genSig("m3", sq), "public T A.m3(scala.collection.immutable.Seq)") // TODO: the signature for is wrong for T <: Int, scala/bug#9846. The signature should be - // `public int A.m4(scala.collection.Seq)`. This is testing the status quo. - check(genSig("m4", sq), "public T A.m4(scala.collection.Seq)") - if (!isJavaAtLeast("15")) check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") - if ( isJavaAtLeast("15")) check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") - check(genSig("m6", sq), "public java.lang.String A.m6(scala.collection.Seq)") - check(genSig("m7", sq), "public int A.m7(scala.collection.Seq)") - check(genSig("m8", sq), "public U A.m8(scala.collection.Seq)") + // `public int A.m4(scala.collection.immutable.Seq)`. This is testing the status quo. + check(genSig("m4", sq), "public T A.m4(scala.collection.immutable.Seq)") + if (!isJavaAtLeast(15)) check(genSig("m5", sq), "public T A.m5(scala.collection.immutable.Seq)") + if ( isJavaAtLeast(15)) check(genSig("m5", sq), "public T A.m5(scala.collection.immutable.Seq)") + check(genSig("m6", sq), "public java.lang.String A.m6(scala.collection.immutable.Seq)") + check(genSig("m7", sq), "public int A.m7(scala.collection.immutable.Seq)") + check(genSig("m8", sq), "public U A.m8(scala.collection.immutable.Seq)") // varargs forwarder @@ -82,8 +82,8 @@ object Test extends App { check(genSig("m3", ao), "public T A.m3(T...)") // testing status quo: signature is wrong for T <: Int, scala/bug#9846 check(genSig("m4", ao), "public T A.m4(T...)") - if (!isJavaAtLeast("15")) check(genSig("m5", as), "public T A.m5(T...)") - if ( isJavaAtLeast("15")) check(genSig("m5", as), "public T A.m5(T...)") + if (!isJavaAtLeast(15)) check(genSig("m5", as), "public T A.m5(T...)") + if ( isJavaAtLeast(15)) check(genSig("m5", as), "public T A.m5(T...)") check(genSig("m6", as), "public java.lang.String A.m6(java.lang.String...)") check(genSig("m7", ai), "public int A.m7(int...)") check(genSig("m8", ao), "public U A.m8(U...)") @@ -112,8 +112,8 @@ object Test extends App { check(genSig("n3", ob), "public T A.n3(java.lang.Object)") // testing status quo: signature is wrong for T <: Int, scala/bug#9846 check(genSig("n4", ob), "public T A.n4(java.lang.Object)") - if (!isJavaAtLeast("15")) check(genSig("n5", as), "public T A.n5(T[])") - if ( isJavaAtLeast("15")) check(genSig("n5", as), "public T A.n5(T[])") + if (!isJavaAtLeast(15)) check(genSig("n5", as), "public T A.n5(T[])") + if ( isJavaAtLeast(15)) check(genSig("n5", as), "public T A.n5(T[])") check(genSig("n6", as), "public java.lang.String A.n6(java.lang.String[])") check(genSig("n7", ai), "public int A.n7(int[])") check(genSig("n8", ob), "public U A.n8(java.lang.Object)") diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check index 4a4bd6ad2110..75c9a96fe827 100644 --- a/test/files/jvm/throws-annot-from-java.check +++ b/test/files/jvm/throws-annot-from-java.check @@ -17,7 +17,7 @@ scala> :paste println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty) println(throwsAnn) } - println + println() { val method = clazz.info.member(newTermName("bar")) @@ -29,9 +29,7 @@ scala> :paste println(throwsAnn) } } - -// Exiting paste mode, now interpreting. - +// Exiting paste mode... now interpreting. foo atp.typeParams.isEmpty: true throws[IllegalStateException](classOf[java.lang.IllegalStateException]) diff --git a/test/files/jvm/throws-annot-from-java/Test_3.scala b/test/files/jvm/throws-annot-from-java/Test_3.scala index df62e032262e..1c85d9302e33 100644 --- a/test/files/jvm/throws-annot-from-java/Test_3.scala +++ b/test/files/jvm/throws-annot-from-java/Test_3.scala @@ -13,7 +13,7 @@ object Test extends ReplTest { println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty) println(throwsAnn) } - println + println() { val method = clazz.info.member(newTermName("bar")) diff --git a/test/files/jvm/throws-annot.scala b/test/files/jvm/throws-annot.scala index 90b58b99767c..0adeff494c9f 100644 --- a/test/files/jvm/throws-annot.scala +++ b/test/files/jvm/throws-annot.scala @@ -26,13 +26,13 @@ object TestThrows { def readNoEx(): Int } - def checkMethod(cls: Class[_], name: String) { + def checkMethod(cls: Class[_], name: String): Unit = { val method = cls.getMethod(name) println(name + " throws: " + method.getExceptionTypes.mkString("", ", ", "")) println(name + " annotations: " + method.getDeclaredAnnotations.mkString("", ", ", "")) } - def run(cls: Class[_]) { + def run(cls: Class[_]): Unit = { checkMethod(cls, "read") checkMethod(cls, "readWith2") checkMethod(cls, "readMixed") @@ -66,7 +66,7 @@ object TL { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { TestThrows.run(classOf[TestThrows.Foo]) println("Testing mirror class") TestThrows.run(Class.forName("TL")) diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala deleted file mode 100644 index b3926020f00b..000000000000 --- a/test/files/jvm/try-type-tests.scala +++ /dev/null @@ -1,188 +0,0 @@ -import scala.util.{Try, Success, Failure} - -// tests the basic combinators on Try -trait TryStandard { - - def testForeachSuccess(): Unit = { - val t = Success(1) - var res = 0 - t.foreach(x => res = x * 10) - assert(res == 10) - } - - def testForeachFailure(): Unit = { - val t = Failure(new Exception("foo")) - t.foreach(x => assert(false)) - } - - def testFlatMapSuccess(): Unit = { - val t = Success(1) - val n = t.flatMap(x => Try(x * 10)) - assert(n.get == 10) - } - - def testFlatMapFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.flatMap{ x => assert(false); Try(()) } - } - - def testMapSuccess(): Unit = { - val t = Success(1) - val n = t.map(x => x * 10) - assert(n.get == 10) - } - - def testMapFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.map(x => assert(false)) - } - - def testFilterSuccessTrue(): Unit = { - val t = Success(1) - val n = t.filter(x => x > 0) - assert(n.get == 1) - } - - def testFilterSuccessFalse(): Unit = { - val t = Success(1) - val n = t.filter(x => x < 0) - n match { - case Success(v) => assert(false) - case Failure(e: NoSuchElementException) => assert(true) - case _ => assert(false) - } - } - - def testFilterFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.filter{ x => assert(false); true } - } - - def testRescueSuccess(): Unit = { - val t = Success(1) - t.recoverWith{ case x => assert(false); Try(()) } - } - - def testRescueFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.recoverWith{ case x => Try(1) } - assert(n.get == 1) - } - - def testRecoverSuccess(): Unit = { - val t = Success(1) - t.recover{ case x => assert(false); 99 } - } - - def testRecoverFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.recover{ case x => 1 } - assert(n.get == 1) - } - - def testFlattenSuccess(): Unit = { - val f = Failure(new Exception("foo")) - val t = Success(f) - assert(t.flatten == f) - } - - def testFailedSuccess(): Unit = { - val t = Success(1) - val n = t.failed - n match { - case Failure(e: UnsupportedOperationException) => assert(true) - case _ => assert(false) - } - } - - def testFailedFailure(): Unit = { - val t = Failure(new Exception("foo")) - val n = t.failed - n match { - case Success(e: Exception) => assert(true) - case _ => assert(false) - } - } - - def testSuccessTransform(): Unit = { - val s = Success(1) - val succ = (x: Int) => Success(x * 10) - val fail = (x: Throwable) => Success(0) - assert(s.transform(succ, fail).get == 10) - } - - def testFailureTransform(): Unit = { - val f = Failure(new Exception("foo")) - val succ = (x: Int) => Success(x * 10) - val fail = (x: Throwable) => Success(0) - assert(f.transform(succ, fail).get == 0) - } - - def testSuccessEither(): Unit = { - val t = Success(1) - assert(t.toEither.isRight) - } - - def testFailureEither(): Unit = { - val t = Failure(new Exception("foo")) - assert(t.toEither.isLeft) - } - - def testFoldSuccess(): Unit = { - val t = Success(1) - val res = t.fold("Throws " + _, "Returns " + _) - assert(res == "Returns 1") - } - - def testFoldFailure(): Unit = { - val t = Failure(new Exception("foo")) - val res = t.fold("Throws " + _, "Returns " + _) - assert(res == "Throws java.lang.Exception: foo") - } - - def testFoldSuccessFailure(): Unit = { - val t = Success(1) - val res = t.fold("Throws " + _, _ => throw new Exception("foo")) - assert(res == "Throws java.lang.Exception: foo") - } - - def testFoldFailureFailure(): Unit = { - val t = Failure(new Exception("foo")) - val res = try { - t.fold(_ => throw new Exception("bar"), "Returns " + _) - } catch { - case e: Throwable => "Throws " + e - } - assert(res == "Throws java.lang.Exception: bar") - } - - testForeachSuccess() - testForeachFailure() - testFlatMapSuccess() - testFlatMapFailure() - testMapSuccess() - testMapFailure() - testFilterSuccessTrue() - testFilterSuccessFalse() - testFilterFailure() - testRescueSuccess() - testRescueFailure() - testRecoverSuccess() - testRecoverFailure() - testFlattenSuccess() - testFailedSuccess() - testFailedFailure() - testSuccessTransform() - testFailureTransform() - testSuccessEither() - testFailureEither() - testFoldSuccess() - testFoldFailure() - testFoldSuccessFailure() -} - -object Test -extends App -with TryStandard { - System.exit(0) -} diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala index b6862bb116d2..5bf9351433d7 100644 --- a/test/files/jvm/typerep.scala +++ b/test/files/jvm/typerep.scala @@ -43,13 +43,13 @@ object testPrimitives { println(getType(16.toByte)) println(getType('a')) println(getType(3)) - println(getType(3l)) + println(getType(3L)) println(getType(0.0f)) println(getType(0.0d)) println(getType("abc")) println(getType(())) // Unit println(getType(classOf[Int])) // Class - println + println() } object testOptions { @@ -62,7 +62,7 @@ object testOptions { println(getType(None: Option[Int])) val y: Option[Int] = None println(getType(y)) - println + println() } object testLists { @@ -71,7 +71,7 @@ object testLists { println(getType(List(List(3)))) println(getType(Nil: List[Int])) println(getType(List(1, "abc"))) - println + println() } object testArrays { @@ -81,7 +81,7 @@ object testArrays { println(getType(List(1).toArray)) println(getType(List[Int]().toArray)) println(getType(Array(3).drop(1).toArray)) // empty - println + println() } object testTuples { @@ -90,7 +90,7 @@ object testTuples { println(getType(((3, "abc"), (4, "xyz")))) println(getType(((Some('b'), 3), (Some('a'), 4)))) //println(getType(((Some('b'), 3), (None, 4)))) - println + println() } object testFuncs { @@ -109,7 +109,7 @@ object testFuncs { def f5(f: Int => Int, x: Int) = f(x) println(getType(f5 _)) println(getType(f5(f1, 1))) - println + println() } class Foo { @@ -135,12 +135,12 @@ object testClasses { val foo2 = new Foo println(getType(foo2)) println(getType(new foo2.Bar(1))) - println + println() println(getType(pkg1.c1)) val c1 = new pkg1.C1 println(getType(c1)) - println + println() */ } diff --git a/test/files/jvm/unittest_io_Jvm.scala b/test/files/jvm/unittest_io_Jvm.scala index 7c8ef131bc30..e9b4e01ecec0 100644 --- a/test/files/jvm/unittest_io_Jvm.scala +++ b/test/files/jvm/unittest_io_Jvm.scala @@ -1,14 +1,14 @@ import scala.io.Source object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val lines = Source.fromString( """| |This is a file |it is split on several lines. | |isn't it? - |""".stripMargin).getLines.toList + |""".stripMargin).getLines().toList println("lines.size = " + lines.size) lines.foreach(println) } diff --git a/test/files/jvm/unreachable.check b/test/files/jvm/unreachable.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/test/files/jvm/unreachable/Foo_1.check b/test/files/jvm/unreachable/Foo_1.check deleted file mode 100644 index 57824245009d..000000000000 --- a/test/files/jvm/unreachable/Foo_1.check +++ /dev/null @@ -1,36 +0,0 @@ -java.lang.ClassNotFoundException: Test - at java.net.URLClassLoader.findClass(URLClassLoader.java:387) - at java.lang.ClassLoader.loadClass(ClassLoader.java:418) - at java.lang.ClassLoader.loadClass(ClassLoader.java:351) - at scala.tools.partest.nest.Runner.$anonfun$execTestInProcess$2(Runner.scala:252) - at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) - at scala.Console$.withOut(Console.scala:167) - at scala.tools.partest.nest.StreamCapture$.$anonfun$capturingOutErr$2(StreamCapture.scala:44) - at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) - at scala.Console$.withErr(Console.scala:196) - at scala.tools.partest.nest.StreamCapture$.$anonfun$capturingOutErr$1(StreamCapture.scala:43) - at scala.tools.partest.nest.StreamCapture$.savingSystem(StreamCapture.scala:22) - at scala.tools.partest.nest.StreamCapture$.capturingOutErr(StreamCapture.scala:38) - at scala.tools.partest.nest.Runner.$anonfun$execTestInProcess$1(Runner.scala:251) - at scala.tools.partest.nest.StreamCapture$.withExtraProperties(StreamCapture.scala:68) - at scala.tools.partest.nest.Runner.run$2(Runner.scala:247) - at scala.tools.partest.nest.Runner.$anonfun$execTestInProcess$3(Runner.scala:274) - at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) - at scala.tools.partest.nest.TrapExit$.apply(TrapExit.scala:28) - at scala.tools.partest.nest.Runner.execTestInProcess(Runner.scala:274) - at scala.tools.partest.nest.Runner.exec$1(Runner.scala:703) - at scala.tools.partest.nest.Runner.$anonfun$runRunTest$1(Runner.scala:705) - at scala.tools.partest.TestState.andAlso(TestState.scala:33) - at scala.tools.partest.nest.Runner.$anonfun$runTestCommon$1(Runner.scala:605) - at scala.tools.partest.nest.Runner.runInContext(Runner.scala:439) - at scala.tools.partest.nest.Runner.runTestCommon(Runner.scala:605) - at scala.tools.partest.nest.Runner.runRunTest(Runner.scala:705) - at scala.tools.partest.nest.Runner.run(Runner.scala:694) - at scala.tools.partest.nest.AbstractRunner.liftedTree1$1(AbstractRunner.scala:317) - at scala.tools.partest.nest.AbstractRunner.runTest(AbstractRunner.scala:317) - at scala.tools.partest.nest.AbstractRunner.$anonfun$runTestsForFiles$2(AbstractRunner.scala:342) - at scala.tools.partest.package$$anon$2.call(package.scala:141) - at java.util.concurrent.FutureTask.run(FutureTask.java:266) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) - at java.lang.Thread.run(Thread.java:750) diff --git a/test/files/jvm/unreachable/Foo_1.scala b/test/files/jvm/unreachable/Foo_1.scala index 0f95941db05f..732b0834cca7 100644 --- a/test/files/jvm/unreachable/Foo_1.scala +++ b/test/files/jvm/unreachable/Foo_1.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:default +//> using options -opt:default import scala.sys.error class Foo_1 { @@ -9,14 +9,14 @@ class Foo_1 { def unreachableIf: Int = { return 42 - if (util.Random.nextInt % 2 == 0) + if (util.Random.nextInt() % 2 == 0) 0 else 1 } def unreachableIfBranches: Int = { - if (util.Random.nextInt % 2 == 0) + if (util.Random.nextInt() % 2 == 0) return 42 else return 42 @@ -25,14 +25,14 @@ class Foo_1 { } def unreachableOneLegIf: Int = { - if (util.Random.nextInt % 2 == 0) + if (util.Random.nextInt() % 2 == 0) return 42 return 42 } def unreachableLeftBranch: Int = { - val result = if (util.Random.nextInt % 2 == 0) + val result = if (util.Random.nextInt() % 2 == 0) return 42 else 42 @@ -41,7 +41,7 @@ class Foo_1 { } def unreachableRightBranch: Int = { - val result = if (util.Random.nextInt % 2 == 0) + val result = if (util.Random.nextInt() % 2 == 0) 42 else return 42 @@ -92,7 +92,7 @@ class Foo_1 { def unreachableSwitch: Int = { return 42 - val x = util.Random.nextInt % 2 + val x = util.Random.nextInt() % 2 x match { case 0 => return 0 case 1 => return 1 @@ -102,7 +102,7 @@ class Foo_1 { } def unreachableAfterSwitch: Int = { - val x = util.Random.nextInt % 2 + val x = util.Random.nextInt() % 2 x match { case 0 => return 42 case 1 => return 41 + x diff --git a/test/files/jvm/unreachable/Test.scala b/test/files/jvm/unreachable/Test.scala index 4c0fcb2ae82d..65bc95e7c0d6 100644 --- a/test/files/jvm/unreachable/Test.scala +++ b/test/files/jvm/unreachable/Test.scala @@ -20,4 +20,4 @@ object Test extends BytecodeTest { (node.getOpcode == opcode) insnList.iterator.asScala.count(isNop) } -} \ No newline at end of file +} diff --git a/test/files/jvm/varargs-separate-bytecode/Props_2.scala b/test/files/jvm/varargs-separate-bytecode/Props_2.scala index 3fc09586fc84..ff4fb8970c9a 100644 --- a/test/files/jvm/varargs-separate-bytecode/Props_2.scala +++ b/test/files/jvm/varargs-separate-bytecode/Props_2.scala @@ -1,3 +1,3 @@ import foo.AbstractProps -class Props extends AbstractProps \ No newline at end of file +class Props extends AbstractProps diff --git a/test/files/jvm/varargs-separate-bytecode/Test.scala b/test/files/jvm/varargs-separate-bytecode/Test.scala index a666de7f39dc..1eb3cbd1b430 100644 --- a/test/files/jvm/varargs-separate-bytecode/Test.scala +++ b/test/files/jvm/varargs-separate-bytecode/Test.scala @@ -6,7 +6,7 @@ import scala.tools.partest.BytecodeTest object Test extends BytecodeTest { def show: Unit = { val classNode = loadClassNode("Props") - val methods = classNode.methods.iterator().asScala.filter( m => m.name == "create") + val methods = classNode.methods.iterator.asScala.filter( m => m.name == "create") for (m <- methods if (m.access & Opcodes.ACC_VARARGS) > 0) { println(s"Found vararg overload for method ${m.name}") diff --git a/test/files/jvm/varargs.check b/test/files/jvm/varargs.check index 986f98896aff..225a8d0177b0 100644 --- a/test/files/jvm/varargs.check +++ b/test/files/jvm/varargs.check @@ -2,3 +2,4 @@ 10 19 a +33 diff --git a/test/files/jvm/varargs/JavaClass.java b/test/files/jvm/varargs/JavaClass.java index 35adcff850ba..350a3e52e00c 100644 --- a/test/files/jvm/varargs/JavaClass.java +++ b/test/files/jvm/varargs/JavaClass.java @@ -8,5 +8,6 @@ public static void callSomeAnnotations() { varargz(5, 1.0, 2.0, 3.0); va.vt(16, "", "", ""); System.out.println(va.vt1(16, "a", "b", "c")); + System.out.println(VaClass.vip(33, 22, 45)); } } diff --git a/test/files/jvm/varargs/VaClass.scala b/test/files/jvm/varargs/VaClass.scala index ee8c288a16ab..8789b905df7f 100644 --- a/test/files/jvm/varargs/VaClass.scala +++ b/test/files/jvm/varargs/VaClass.scala @@ -6,3 +6,7 @@ class VaClass { @varargs def vt[T](a: Int, b: T*) = println(a + b.length) @varargs def vt1[T](a: Int, b: T*): T = b.head } +object VaClass { + // scala/bug#11057 + @varargs def vip(i: Int*) = i.head +} diff --git a/test/files/jvm/varargs/varargs.scala b/test/files/jvm/varargs/varargs.scala index b09818f46f27..8f49579a5e13 100644 --- a/test/files/jvm/varargs/varargs.scala +++ b/test/files/jvm/varargs/varargs.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { JavaClass.callSomeAnnotations } } diff --git a/test/files/jvm/xml05.check b/test/files/jvm/xml05.check deleted file mode 100644 index d456c5e1e4e6..000000000000 --- a/test/files/jvm/xml05.check +++ /dev/null @@ -1,5 +0,0 @@ - -scala> -res0: scala.xml.Elem = - -scala> :quit diff --git a/test/files/jvm/xml05.scala b/test/files/jvm/xml05.scala deleted file mode 100644 index 52ae2553938d..000000000000 --- a/test/files/jvm/xml05.scala +++ /dev/null @@ -1,7 +0,0 @@ -import scala.tools.partest.ReplTest - -object Test extends ReplTest { - def code = """ - - """ -} \ No newline at end of file diff --git a/test/files/neg/FooMapView.check b/test/files/neg/FooMapView.check new file mode 100644 index 000000000000..e6757c079143 --- /dev/null +++ b/test/files/neg/FooMapView.check @@ -0,0 +1,6 @@ +FooMapView.scala:6: warning: overriding method stringPrefix in trait Iterable is deprecated (since 2.13.0): Override className instead + override def stringPrefix = "FooMapView" + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/FooMapView.scala b/test/files/neg/FooMapView.scala new file mode 100644 index 000000000000..103bf924246d --- /dev/null +++ b/test/files/neg/FooMapView.scala @@ -0,0 +1,7 @@ +//> using options -Xfatal-warnings -deprecation +// +class FooMapView extends collection.MapView[Int,Int] { + def iterator: Iterator[(Int,Int)] = ??? + def get(key: Int) = None + override def stringPrefix = "FooMapView" +} diff --git a/test/files/neg/abstract-class-2.check b/test/files/neg/abstract-class-2.check index ca79dd8293c8..b89bc0531a94 100644 --- a/test/files/neg/abstract-class-2.check +++ b/test/files/neg/abstract-class-2.check @@ -1,5 +1,7 @@ -abstract-class-2.scala:11: error: object creation impossible, since method f in trait S2 of type (x: P2.this.p.S1)Int is not defined -(Note that P.this.p.S1 does not match P2.this.S1: their prefixes (i.e. enclosing instances) differ) +abstract-class-2.scala:11: error: object creation impossible. +Missing implementation for member of trait S2: + def f(x: P2.this.p.S1): Int = ??? // implements `def f(x: P.this.p.S1): Int`; P.this.p.S1 does not match P2.this.S1: their prefixes (i.e., enclosing instances) differ + object O2 extends S2 { ^ -one error found +1 error diff --git a/test/files/neg/abstract-class-error.check b/test/files/neg/abstract-class-error.check index 643b90167bcf..9e30ffd214fa 100644 --- a/test/files/neg/abstract-class-error.check +++ b/test/files/neg/abstract-class-error.check @@ -1,5 +1,7 @@ -S.scala:1: error: class S needs to be abstract, since method g in class J of type (y: Int, z: java.util.List)Int is not defined -(Note that java.util.List does not match java.util.List[String]. To implement this raw type, use java.util.List[_]) +S.scala:1: error: class S needs to be abstract. +Missing implementation for member of class J: + def g(y: Int, z: java.util.List[_]): Int = ??? // implements `def g(y: Int, z: java.util.List): Int`; java.util.List does not match java.util.List[String]. To implement this raw type, use java.util.List[_] + class S extends J { ^ -one error found +1 error diff --git a/test/files/neg/abstract-concrete-methods.check b/test/files/neg/abstract-concrete-methods.check index e128f77e2654..da9dd76e90e2 100644 --- a/test/files/neg/abstract-concrete-methods.check +++ b/test/files/neg/abstract-concrete-methods.check @@ -1,5 +1,7 @@ -abstract-concrete-methods.scala:7: error: class Outer2 needs to be abstract, since method score in trait Outer of type (i: Outer2#Inner)Double is not defined -(Note that This#Inner does not match Outer2#Inner: class Inner in class Outer2 is a subclass of trait Inner in trait Outer, but method parameter types must match exactly.) +abstract-concrete-methods.scala:7: error: class Outer2 needs to be abstract. +Missing implementation for member of trait Outer: + def score(i: Outer2#Inner): Double = ??? // implements `def score(i: This#Inner): Double`; This#Inner does not match Outer2#Inner: class Inner in class Outer2 is a subclass of trait Inner in trait Outer, but method parameter types must match exactly. + class Outer2 extends Outer[Outer2] { ^ -one error found +1 error diff --git a/test/files/neg/abstract-explaintypes.check b/test/files/neg/abstract-explaintypes.check index 8bf8f10320b9..1d1304b83c86 100644 --- a/test/files/neg/abstract-explaintypes.check +++ b/test/files/neg/abstract-explaintypes.check @@ -1,15 +1,15 @@ -abstract-explaintypes.scala:7: error: type mismatch; +abstract-explaintypes.scala:8: error: type mismatch; found : A required: A.this.T def foo2: T = bar().baz(); ^ A <: A.this.T? false -abstract-explaintypes.scala:10: error: type mismatch; +abstract-explaintypes.scala:11: error: type mismatch; found : A required: A.this.T def foo5: T = baz().baz(); ^ A <: A.this.T? false -two errors found +2 errors diff --git a/test/files/neg/abstract-explaintypes.scala b/test/files/neg/abstract-explaintypes.scala index bf6b3a6ea363..2b2d3e6ad9c1 100644 --- a/test/files/neg/abstract-explaintypes.scala +++ b/test/files/neg/abstract-explaintypes.scala @@ -1,4 +1,5 @@ -// scalac: -explaintypes +//> using options -explaintypes +// trait A { type T <: A; def baz(): A; diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check index e5f3917b7049..62511219f707 100644 --- a/test/files/neg/abstract-inaccessible.check +++ b/test/files/neg/abstract-inaccessible.check @@ -1,15 +1,15 @@ -abstract-inaccessible.scala:6: warning: method implementMe in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:7: warning: method implementMe in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to provide a concrete implementation of implementMe. def implementMe(f: Int => (String, Bippy)): Unit ^ -abstract-inaccessible.scala:7: warning: method overrideMe in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:8: warning: method overrideMe in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to override overrideMe. def overrideMe[T <: Bippy](x: T): T = x ^ -abstract-inaccessible.scala:8: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:9: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to override overrideMeAlso. def overrideMeAlso(x: Map[Int, Set[Bippy]]) = x.keys.head ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/abstract-inaccessible.scala b/test/files/neg/abstract-inaccessible.scala index fc67770e3d84..006ccc84a407 100644 --- a/test/files/neg/abstract-inaccessible.scala +++ b/test/files/neg/abstract-inaccessible.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xlint:inaccessible +//> using options -Xfatal-warnings -Xlint:inaccessible +// package foo { private[foo] trait Bippy { } diff --git a/test/files/neg/abstract-report.check b/test/files/neg/abstract-report.check index 1ffeac060bd3..800b049ab4cc 100644 --- a/test/files/neg/abstract-report.check +++ b/test/files/neg/abstract-report.check @@ -1,24 +1,15 @@ -abstract-report.scala:1: error: class Unimplemented needs to be abstract, since: -it has 12 unimplemented members. -/** As seen from class Unimplemented, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - // Members declared in scala.collection.GenTraversableOnce - def isTraversableAgain: Boolean = ??? - def toIterator: Iterator[String] = ??? - def toStream: Stream[String] = ??? +abstract-report.scala:1: error: class Unimplemented needs to be abstract. +Missing implementations for 6 members. + // Members declared in scala.collection.IterableOnce + def iterator: Iterator[String] = ??? // implements `def iterator: Iterator[A]` - // Members declared in scala.collection.TraversableOnce - def copyToArray[B >: String](xs: Array[B],start: Int,len: Int): Unit = ??? - def exists(p: String => Boolean): Boolean = ??? - def find(p: String => Boolean): Option[String] = ??? - def forall(p: String => Boolean): Boolean = ??? - def foreach[U](f: String => U): Unit = ??? - def hasDefiniteSize: Boolean = ??? - def isEmpty: Boolean = ??? - def seq: scala.collection.TraversableOnce[String] = ??? - def toTraversable: Traversable[String] = ??? + // Members declared in scala.collection.IterableOps + protected def coll: List[String] = ??? // implements `protected def coll: C` + protected def fromSpecific(coll: scala.collection.IterableOnce[String]): List[String] = ??? // implements `protected def fromSpecific(coll: scala.collection.IterableOnce[A @scala.annotation.unchecked.uncheckedVariance]): C` + def iterableFactory: scala.collection.IterableFactory[List] = ??? // implements `def iterableFactory: scala.collection.IterableFactory[CC]` + protected def newSpecificBuilder: scala.collection.mutable.Builder[String,List[String]] = ??? // implements `protected def newSpecificBuilder: scala.collection.mutable.Builder[A @scala.annotation.unchecked.uncheckedVariance,C]` + def toIterable: Iterable[String] = ??? // implements `def toIterable: Iterable[A]` -class Unimplemented extends TraversableOnce[String] { } +class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] ^ -one error found +1 error diff --git a/test/files/neg/abstract-report.scala b/test/files/neg/abstract-report.scala index 538e09354777..fd4b5b1dce60 100644 --- a/test/files/neg/abstract-report.scala +++ b/test/files/neg/abstract-report.scala @@ -1 +1 @@ -class Unimplemented extends TraversableOnce[String] { } \ No newline at end of file +class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] diff --git a/test/files/neg/abstract-report2.check b/test/files/neg/abstract-report2.check index 5090dae704b7..f590abd5941b 100644 --- a/test/files/neg/abstract-report2.check +++ b/test/files/neg/abstract-report2.check @@ -1,16 +1,13 @@ -abstract-report2.scala:3: error: class Foo needs to be abstract, since: -it has 13 unimplemented members. -/** As seen from class Foo, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - def add(x$1: Int): Boolean = ??? - def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ??? +abstract-report2.scala:3: error: class Foo needs to be abstract. +Missing implementations for 13 members of trait Collection. + def add(x$1: Int): Boolean = ??? // implements `def add(x$1: E): Boolean` + def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ??? // implements `def addAll(x$1: java.util.Collection[_ <: E]): Boolean` def clear(): Unit = ??? - def contains(x$1: Any): Boolean = ??? + def contains(x$1: Object): Boolean = ??? def containsAll(x$1: java.util.Collection[_]): Boolean = ??? def isEmpty(): Boolean = ??? - def iterator(): java.util.Iterator[Int] = ??? - def remove(x$1: Any): Boolean = ??? + def iterator(): java.util.Iterator[Int] = ??? // implements `def iterator(): java.util.Iterator[E]` + def remove(x$1: Object): Boolean = ??? def removeAll(x$1: java.util.Collection[_]): Boolean = ??? def retainAll(x$1: java.util.Collection[_]): Boolean = ??? def size(): Int = ??? @@ -19,19 +16,16 @@ it has 13 unimplemented members. class Foo extends Collection[Int] ^ -abstract-report2.scala:5: error: class Bar needs to be abstract, since: -it has 13 unimplemented members. -/** As seen from class Bar, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - def add(x$1: List[_ <: String]): Boolean = ??? - def addAll(x$1: java.util.Collection[_ <: List[_ <: String]]): Boolean = ??? +abstract-report2.scala:5: error: class Bar needs to be abstract. +Missing implementations for 13 members of trait Collection. + def add(x$1: List[_ <: String]): Boolean = ??? // implements `def add(x$1: E): Boolean` + def addAll(x$1: java.util.Collection[_ <: List[_ <: String]]): Boolean = ??? // implements `def addAll(x$1: java.util.Collection[_ <: E]): Boolean` def clear(): Unit = ??? - def contains(x$1: Any): Boolean = ??? + def contains(x$1: Object): Boolean = ??? def containsAll(x$1: java.util.Collection[_]): Boolean = ??? def isEmpty(): Boolean = ??? - def iterator(): java.util.Iterator[List[_ <: String]] = ??? - def remove(x$1: Any): Boolean = ??? + def iterator(): java.util.Iterator[List[_ <: String]] = ??? // implements `def iterator(): java.util.Iterator[E]` + def remove(x$1: Object): Boolean = ??? def removeAll(x$1: java.util.Collection[_]): Boolean = ??? def retainAll(x$1: java.util.Collection[_]): Boolean = ??? def size(): Int = ??? @@ -40,19 +34,16 @@ it has 13 unimplemented members. class Bar extends Collection[List[_ <: String]] ^ -abstract-report2.scala:7: error: class Baz needs to be abstract, since: -it has 13 unimplemented members. -/** As seen from class Baz, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - def add(x$1: T): Boolean = ??? - def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ??? +abstract-report2.scala:7: error: class Baz needs to be abstract. +Missing implementations for 13 members of trait Collection. + def add(x$1: T): Boolean = ??? // implements `def add(x$1: E): Boolean` + def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ??? // implements `def addAll(x$1: java.util.Collection[_ <: E]): Boolean` def clear(): Unit = ??? - def contains(x$1: Any): Boolean = ??? + def contains(x$1: Object): Boolean = ??? def containsAll(x$1: java.util.Collection[_]): Boolean = ??? def isEmpty(): Boolean = ??? - def iterator(): java.util.Iterator[T] = ??? - def remove(x$1: Any): Boolean = ??? + def iterator(): java.util.Iterator[T] = ??? // implements `def iterator(): java.util.Iterator[E]` + def remove(x$1: Object): Boolean = ??? def removeAll(x$1: java.util.Collection[_]): Boolean = ??? def retainAll(x$1: java.util.Collection[_]): Boolean = ??? def size(): Int = ??? @@ -61,48 +52,36 @@ it has 13 unimplemented members. class Baz[T] extends Collection[T] ^ -abstract-report2.scala:21: error: class Dingus needs to be abstract, since: -it has 27 unimplemented members. -/** As seen from class Dingus, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - // Members declared in java.util.Collection - def add(x$1: String): Boolean = ??? - def addAll(x$1: java.util.Collection[_ <: String]): Boolean = ??? - def clear(): Unit = ??? - def contains(x$1: Any): Boolean = ??? - def containsAll(x$1: java.util.Collection[_]): Boolean = ??? - def iterator(): java.util.Iterator[String] = ??? - def remove(x$1: Any): Boolean = ??? - def removeAll(x$1: java.util.Collection[_]): Boolean = ??? - def retainAll(x$1: java.util.Collection[_]): Boolean = ??? - def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ??? - def toArray(): Array[Object] = ??? - - // Members declared in scala.collection.GenTraversableOnce - def isTraversableAgain: Boolean = ??? - def toIterator: Iterator[(Set[Int], String)] = ??? - def toStream: Stream[(Set[Int], String)] = ??? +abstract-report2.scala:21: error: class Dingus needs to be abstract. +Missing implementations for 7 members. + // Members declared in scala.collection.IterableOnce + def iterator: Iterator[(Set[Int], String)] = ??? // implements `def iterator: Iterator[A]` - // Members declared in Symbolic - def --!(i: Int): Unit = ??? - def --? : Int = ??? - def unary_~ : Long = ??? - - // Members declared in scala.collection.TraversableOnce - def copyToArray[B >: (Set[Int], String)](xs: Array[B],start: Int,len: Int): Unit = ??? - def exists(p: ((Set[Int], String)) => Boolean): Boolean = ??? - def find(p: ((Set[Int], String)) => Boolean): Option[(Set[Int], String)] = ??? - def forall(p: ((Set[Int], String)) => Boolean): Boolean = ??? - def foreach[U](f: ((Set[Int], String)) => U): Unit = ??? - def hasDefiniteSize: Boolean = ??? - def isEmpty: Boolean = ??? - def seq: scala.collection.TraversableOnce[(Set[Int], String)] = ??? - def toTraversable: Traversable[(Set[Int], String)] = ??? + // Members declared in scala.collection.IterableOps + protected def coll: List[(Set[Int], String)] = ??? // implements `protected def coll: C` + protected def fromSpecific(coll: scala.collection.IterableOnce[(Set[Int], String)]): List[(Set[Int], String)] = ??? // implements `protected def fromSpecific(coll: scala.collection.IterableOnce[A @scala.annotation.unchecked.uncheckedVariance]): C` + def iterableFactory: scala.collection.IterableFactory[List] = ??? // implements `def iterableFactory: scala.collection.IterableFactory[CC]` + protected def newSpecificBuilder: scala.collection.mutable.Builder[(Set[Int], String),List[(Set[Int], String)]] = ??? // implements `protected def newSpecificBuilder: scala.collection.mutable.Builder[A @scala.annotation.unchecked.uncheckedVariance,C]` + def toIterable: Iterable[(Set[Int], String)] = ??? // implements `def toIterable: Iterable[A]` // Members declared in Xyz - def foo(x: List[Int]): Boolean = ??? + def foo(x: List[Int]): Boolean = ??? // implements `def foo(x: T): Boolean` + +class Dingus extends Bippy[String, Set[Int], List[Int]] + ^ +abstract-report2.scala:23: error: class JustOne needs to be abstract. +Missing implementation for member of trait Collection: + def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ??? -class Dingus extends Bippy[String, Set[Int], List[Int]] with Symbolic +class JustOne extends Collection[Int] { + ^ +abstract-report2.scala:47: error: class C needs to be a mixin. +abstract override def t(): Int (defined in trait T) is marked `abstract` and `override`, but no concrete implementation could be found in a base class +class C extends T // refchecks + ^ +abstract-report2.scala:50: error: class D needs to be a mixin. +abstract override def t(): Int (defined in trait W) is marked `abstract` and `override` and overrides incomplete superclass member +abstract override def t(): Int (defined in trait V) +class D extends W ^ -four errors found +7 errors diff --git a/test/files/neg/abstract-report2.scala b/test/files/neg/abstract-report2.scala index a583238d2bc7..c394224dadb7 100644 --- a/test/files/neg/abstract-report2.scala +++ b/test/files/neg/abstract-report2.scala @@ -16,6 +16,35 @@ trait Symbolic { def unary_~ : Long } -trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Xyz[T3] +trait Bippy[T1, T2, T3] extends collection.IterableOps[(T2, String), List, List[(T2, String)]] with Xyz[T3] -class Dingus extends Bippy[String, Set[Int], List[Int]] with Symbolic \ No newline at end of file +class Dingus extends Bippy[String, Set[Int], List[Int]] + +class JustOne extends Collection[Int] { + def add(x$1: Int): Boolean = ??? + def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ??? + def clear(): Unit = ??? + def contains(x$1: Object): Boolean = ??? + def containsAll(x$1: java.util.Collection[_]): Boolean = ??? + def isEmpty(): Boolean = ??? + def iterator(): java.util.Iterator[Int] = ??? + def remove(x$1: Object): Boolean = ??? + def removeAll(x$1: java.util.Collection[_]): Boolean = ??? + def retainAll(x$1: java.util.Collection[_]): Boolean = ??? + def size(): Int = ??? + //def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ??? + def toArray(): Array[Object] = ??? +} +/* was: +test/files/neg/abstract-report2.scala:23: error: class JustOne needs to be abstract. Missing implementation for: + def toArray[T](x$1: Array[T with Object]): Array[T with Object] // inherited from trait Collection +(Note that Array[T with Object] does not match java.util.function.IntFunction[Array[T with Object]]) +class JustOne extends Collection[Int] { + */ + +trait U { def t(): Int } +trait T extends U { abstract override def t(): Int = super.t() + 1 } +class C extends T // refchecks +trait V extends U { abstract override def t(): Int = super.t() + 1 } +trait W extends V { abstract override def t(): Int = super.t() + 1 } +class D extends W diff --git a/test/files/neg/abstract-report3.check b/test/files/neg/abstract-report3.check new file mode 100644 index 000000000000..4420d6c8b1bd --- /dev/null +++ b/test/files/neg/abstract-report3.check @@ -0,0 +1,4 @@ +abstract-report3.scala:4: error: Member method t of mixin trait T is missing a concrete super implementation. +abstract class C extends T // mixin + ^ +1 error diff --git a/test/files/neg/abstract-report3.scala b/test/files/neg/abstract-report3.scala new file mode 100644 index 000000000000..c5d3ea49b97c --- /dev/null +++ b/test/files/neg/abstract-report3.scala @@ -0,0 +1,4 @@ + +trait U { def t(): Int } +trait T extends U { abstract override def t(): Int = super.t() + 1 } +abstract class C extends T // mixin diff --git a/test/files/neg/abstract-vars.check b/test/files/neg/abstract-vars.check index 8aa47745f667..9610c97b68fc 100644 --- a/test/files/neg/abstract-vars.check +++ b/test/files/neg/abstract-vars.check @@ -1,21 +1,31 @@ -abstract-vars.scala:5: error: class Fail1 needs to be abstract, since variable x is not defined -(Note that variables need to be initialized to be defined) +abstract-vars.scala:5: error: class Fail1 needs to be abstract. +Missing implementation: + def x: Int = ??? // variables need to be initialized to be defined + class Fail1 extends A { ^ -abstract-vars.scala:9: error: class Fail2 needs to be abstract, since variable x in class A of type Int is not defined -(Note that variables need to be initialized to be defined) +abstract-vars.scala:9: error: class Fail2 needs to be abstract. +Missing implementation for member of class A: + def x: Int = ??? // variables need to be initialized to be defined + class Fail2 extends A { } ^ -abstract-vars.scala:11: error: class Fail3 needs to be abstract, since variable x in class A of type Int is not defined -(Note that an abstract var requires a setter in addition to the getter) +abstract-vars.scala:11: error: class Fail3 needs to be abstract. +Missing implementation for member of class A: + def x_=(x$1: Int): Unit = ??? // an abstract var requires a setter in addition to the getter + class Fail3 extends A { ^ -abstract-vars.scala:14: error: class Fail4 needs to be abstract, since variable x in class A of type Int is not defined -(Note that an abstract var requires a setter in addition to the getter) +abstract-vars.scala:14: error: class Fail4 needs to be abstract. +Missing implementation for member of class A: + def x_=(x$1: Int): Unit = ??? // an abstract var requires a setter in addition to the getter + class Fail4 extends A { ^ -abstract-vars.scala:18: error: class Fail5 needs to be abstract, since variable x in class A of type Int is not defined -(Note that an abstract var requires a getter in addition to the setter) +abstract-vars.scala:18: error: class Fail5 needs to be abstract. +Missing implementation for member of class A: + def x: Int = ??? // an abstract var requires a getter in addition to the setter + class Fail5 extends A { ^ -5 errors found +5 errors diff --git a/test/files/neg/abstract.check b/test/files/neg/abstract.check index 811708ccff4f..0c7f5e4a5938 100644 --- a/test/files/neg/abstract.check +++ b/test/files/neg/abstract.check @@ -8,4 +8,4 @@ abstract.scala:9: error: type mismatch; required: A.this.T def foo5: T = baz().baz(); ^ -two errors found +2 errors diff --git a/test/files/neg/abstraction-from-volatile-type-error.check b/test/files/neg/abstraction-from-volatile-type-error.check index 34ba0551a53d..3e6796d0cf1e 100644 --- a/test/files/neg/abstraction-from-volatile-type-error.check +++ b/test/files/neg/abstraction-from-volatile-type-error.check @@ -1,4 +1,4 @@ abstraction-from-volatile-type-error.scala:9: error: illegal abstraction from value with volatile type a.Tv val tv : a.Tv ^ -one error found +1 error diff --git a/test/files/neg/accesses.check b/test/files/neg/accesses.check index db58af12ce9e..3063f8c4c54b 100644 --- a/test/files/neg/accesses.check +++ b/test/files/neg/accesses.check @@ -1,17 +1,21 @@ -accesses.scala:23: error: overriding method f2 in class A of type ()Unit; - method f2 has weaker access privileges; it should not be private +accesses.scala:23: error: weaker access privileges in overriding +private[package p2] def f2(): Unit (defined in class A) + override should not be private private def f2(): Unit = () ^ -accesses.scala:24: error: overriding method f3 in class A of type ()Unit; - method f3 has weaker access privileges; it should be at least protected +accesses.scala:24: error: weaker access privileges in overriding +protected def f3(): Unit (defined in class A) + override should at least be protected private[p2] def f3(): Unit = () ^ -accesses.scala:25: error: overriding method f4 in class A of type ()Unit; - method f4 has weaker access privileges; it should be at least private[p1] +accesses.scala:25: error: weaker access privileges in overriding +private[package p1] def f4(): Unit (defined in class A) + override should at least be private[p1] private[p2] def f4(): Unit ^ -accesses.scala:26: error: overriding method f5 in class A of type ()Unit; - method f5 has weaker access privileges; it should be at least protected[p1] +accesses.scala:26: error: weaker access privileges in overriding +protected[package p1] def f5(): Unit (defined in class A) + override should at least be protected[p1] protected[p2] def f5(): Unit ^ -four errors found +4 errors diff --git a/test/files/neg/accesses2.check b/test/files/neg/accesses2.check index 66cf9a116e8f..f4aff8f61dfd 100644 --- a/test/files/neg/accesses2.check +++ b/test/files/neg/accesses2.check @@ -1,12 +1,17 @@ -accesses2.scala:6: error: overriding method f2 in class A of type ()Int; - method f2 has weaker access privileges; it should not be private +accesses2.scala:6: error: weaker access privileges in overriding +private[package p2] def f2(): Int (defined in class A) + override should not be private private def f2(): Int = 1 ^ -accesses2.scala:5: error: class B1 needs to be abstract, since method f2 in class A of type ()Int is not defined +accesses2.scala:5: error: class B1 needs to be abstract. +Missing implementation for member of class A: + private[package p2] def f2(): Int = ??? + class B1 extends A { ^ -accesses2.scala:9: error: overriding method f2 in class A of type ()Int; - method f2 has weaker access privileges; it should not be private +accesses2.scala:9: error: weaker access privileges in overriding +private[package p2] def f2(): Int (defined in class A) + override should not be private private def f2(): Int = 1 ^ -three errors found +3 errors diff --git a/test/files/neg/adapt-to-any-member.check b/test/files/neg/adapt-to-any-member.check new file mode 100644 index 000000000000..6309622bb05e --- /dev/null +++ b/test/files/neg/adapt-to-any-member.check @@ -0,0 +1,6 @@ +adapt-to-any-member.scala:6: warning: conversion Elvis adds universal member method eq to type A + def f[A](x: A) = if (x eq null) 0 else 1 // warn + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/adapt-to-any-member.scala b/test/files/neg/adapt-to-any-member.scala new file mode 100644 index 000000000000..e4ecc81fceac --- /dev/null +++ b/test/files/neg/adapt-to-any-member.scala @@ -0,0 +1,14 @@ + +//> using options -Werror -Xlint:universal-methods + +class C { + import C._ + def f[A](x: A) = if (x eq null) 0 else 1 // warn + def g[A](x: A) = if (x.hashCode(0) == 0) 0 else 1 // nowarn +} +object C { + implicit class Elvis[A](alt: => A) { + def ?:(a: A): A = if (a.asInstanceOf[AnyRef] ne null) a else alt + def hashCode(seed: Int): Int = seed + } +} diff --git a/test/files/neg/aladdin1055.check b/test/files/neg/aladdin1055.check index b0e686184572..c9c116ad85b8 100644 --- a/test/files/neg/aladdin1055.check +++ b/test/files/neg/aladdin1055.check @@ -2,6 +2,6 @@ Test_1.scala:3: warning: match may not be exhaustive. It would fail on the following input: (_ : this.) def foo(t: A.T) = t match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/aladdin1055/A.scala b/test/files/neg/aladdin1055/A.scala index 79d915321d7a..862336e30cb1 100644 --- a/test/files/neg/aladdin1055/A.scala +++ b/test/files/neg/aladdin1055/A.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings object A { sealed trait T { def f: Int } class TT extends T { def f = 0 } diff --git a/test/files/neg/aladdin1055/Test_1.scala b/test/files/neg/aladdin1055/Test_1.scala index 4e2fb0c3ba14..4dd40df63666 100644 --- a/test/files/neg/aladdin1055/Test_1.scala +++ b/test/files/neg/aladdin1055/Test_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings object Test { def foo(t: A.T) = t match { case a: A.TT => 0 diff --git a/test/files/neg/ambiguous-float-dots2.check b/test/files/neg/ambiguous-float-dots2.check index 40c9b4186d65..8cb65eb471e0 100644 --- a/test/files/neg/ambiguous-float-dots2.check +++ b/test/files/neg/ambiguous-float-dots2.check @@ -4,4 +4,4 @@ ambiguous-float-dots2.scala:3: error: identifier expected but '}' found. ambiguous-float-dots2.scala:11: error: ';' expected but integer literal found. 1. + 2 ^ -two errors found +2 errors diff --git a/test/files/neg/ambiguous-same.check b/test/files/neg/ambiguous-same.check deleted file mode 100644 index 58f4e60ece7f..000000000000 --- a/test/files/neg/ambiguous-same.check +++ /dev/null @@ -1,6 +0,0 @@ -ambiguous-same.scala:13: error: reference to x is ambiguous; -it is both defined in object X and imported subsequently by -import X.x - x - ^ -one error found diff --git a/test/files/neg/ambiguous-same.scala b/test/files/neg/ambiguous-same.scala deleted file mode 100644 index 50dba71f677f..000000000000 --- a/test/files/neg/ambiguous-same.scala +++ /dev/null @@ -1,15 +0,0 @@ - -// When faced with ambiguities between imports, -// an attempt is made to see if the imports intend -// identical types. -// -// Here, no attempt is made to notice that x -// names the same thing. -// -object X { - val x = 42 - def f = { - import X.x - x - } -} diff --git a/test/files/neg/and-future.check b/test/files/neg/and-future.check index 6e2ea02e49c5..c7992b38964e 100644 --- a/test/files/neg/and-future.check +++ b/test/files/neg/and-future.check @@ -4,4 +4,4 @@ and-future.scala:9: error: Cannot parse infix type combining `&` and `Map`, plea and-future.scala:13: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported ^ -two errors found +2 errors diff --git a/test/files/neg/and-future.scala b/test/files/neg/and-future.scala index 1092c013b186..01e821206798 100644 --- a/test/files/neg/and-future.scala +++ b/test/files/neg/and-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // trait X diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check deleted file mode 100644 index 58a13b10e9c3..000000000000 --- a/test/files/neg/annot-nonconst.check +++ /dev/null @@ -1,18 +0,0 @@ -annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n - @Length(n) def foo = "foo" - ^ -annot-nonconst.scala:7: error: annotation argument cannot be null - @Ann2(null) def bar = "bar" - ^ -annot-nonconst.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class Length(value: Int) extends annotation.ClassfileAnnotation - ^ -annot-nonconst.scala:2: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class Ann2(value: String) extends annotation.ClassfileAnnotation - ^ -two warnings found -two errors found diff --git a/test/files/neg/annot-nonconst.scala b/test/files/neg/annot-nonconst.scala deleted file mode 100644 index 1b5856f8b256..000000000000 --- a/test/files/neg/annot-nonconst.scala +++ /dev/null @@ -1,8 +0,0 @@ -class Length(value: Int) extends annotation.ClassfileAnnotation -class Ann2(value: String) extends annotation.ClassfileAnnotation - -object Test { - def n = 15 - @Length(n) def foo = "foo" - @Ann2(null) def bar = "bar" -} diff --git a/test/files/neg/annotated-literal-annotation-arg.check b/test/files/neg/annotated-literal-annotation-arg.check index e4d377d0ff5d..220ab9a992f3 100644 --- a/test/files/neg/annotated-literal-annotation-arg.check +++ b/test/files/neg/annotated-literal-annotation-arg.check @@ -4,4 +4,4 @@ annotated-literal-annotation-arg.scala:14: error: $foo annotated-literal-annotation-arg.scala:15: error: bar implicitly[Bar] ^ -two errors found +2 errors diff --git a/test/files/neg/annots-constant-neg.check b/test/files/neg/annots-constant-neg.check new file mode 100644 index 000000000000..5c0562f60f2a --- /dev/null +++ b/test/files/neg/annots-constant-neg.check @@ -0,0 +1,106 @@ +Test.scala:11: error: class Ann1 cannot have auxiliary constructors because it extends ConstantAnnotation + def this(s: String) = this(0) // err + ^ +Test.scala:13: error: class Ann2 needs to have exactly one argument list because it extends ConstantAnnotation +class Ann2(x: Int)(y: Int) extends ConstantAnnotation // err + ^ +Test.scala:27: error: annotation argument needs to be a constant; found: Test.this.nonConst + @JAnn(nonConst) def t3 = 0 // err + ^ +Test.scala:29: error: annotation JAnn is missing argument value + @JAnn() def t4 = 0 // err + ^ +Test.scala:32: error: arguments to Java annotations have to be supplied as named arguments + @JAnn(0, "") def t7 = 0 // err + ^ +Test.scala:32: error: arguments to Java annotations have to be supplied as named arguments + @JAnn(0, "") def t7 = 0 // err + ^ +Test.scala:32: error: annotation JAnn is missing argument value + @JAnn(0, "") def t7 = 0 // err + ^ +Test.scala:33: error: arguments to Java annotations have to be supplied as named arguments + @JAnn(0, a = "") def t8 = 0 // err + ^ +Test.scala:33: error: annotation JAnn is missing argument value + @JAnn(0, a = "") def t8 = 0 // err + ^ +Test.scala:36: error: annotation argument cannot be null + @JAnn(value = 0, a = null) def t10 = 0 // err + ^ +Test.scala:37: error: annotation argument needs to be a constant; found: Test.this.getClass() + @JAnn(value = 0, b = getClass) def t11 = 0 // err + ^ +Test.scala:38: error: Array constants have to be specified using the `Array(...)` factory method + @JAnn(value = 0, c = new Array(1)) def t12 = 0 // err + ^ +Test.scala:39: error: annotation argument cannot be null + @JAnn(value = 0, d = null) def t13 = 0 // err + ^ +Test.scala:40: error: annotation argument cannot be null + @JAnn(value = 0, d = null) def t14 = 0 // err + ^ +Test.scala:43: error: annotation argument needs to be a constant; found: java.lang.Integer.TYPE + @JAnn(value = 0, b = java.lang.Integer.TYPE) def t16 = 0 // err + ^ +Test.scala:48: error: nested classfile annotations must be defined in java; found: inline + @JAnn(value = 0, c = Array(new inline)) def t18 = 0 // err + ^ +Test.scala:52: error: annotation argument needs to be a constant; found: Test.this.nonConst + @Ann(nonConst) def u3 = 0 // err + ^ +Test.scala:54: error: not enough arguments for constructor Ann: (value: Int, a: String, b: Class[_], c: Array[Object]): Ann. +Unspecified value parameter value. + @Ann() def u4 = 0 // err + ^ +Test.scala:61: error: annotation argument cannot be null + @Ann(value = 0, a = null) def u10 = 0 // err + ^ +Test.scala:62: error: annotation argument needs to be a constant; found: Test.this.getClass() + @Ann(value = 0, b = getClass) def u11 = 0 // err + ^ +Test.scala:63: error: Array constants have to be specified using the `Array(...)` factory method + @Ann(value = 0, c = new Array(1)) def u12 = 0 // err + ^ +Test.scala:66: error: annotation argument needs to be a constant; found: java.lang.Integer.TYPE + @Ann(value = 0, b = java.lang.Integer.TYPE) def u16 = 0 // err + ^ +Test.scala:69: error: Java annotation SuppressWarnings is abstract; cannot be instantiated +Error occurred in an application involving default arguments. + @Ann(value = 0, c = Array(new SuppressWarnings(value = Array("")))) def u17 = 0 // err + ^ +Test.scala:71: error: annotation argument needs to be a constant; found: new scala.inline() + @Ann(value = 0, c = Array(new inline)) def u18 = 0 // err + ^ +Test.scala:76: error: multiple constructors for Ann1 with alternatives: + (s: String)Ann1 + (value: Int)Ann1 + [which have no such parameter x] cannot be invoked with (x: String) + @Ann1(x = "") def v4 = 0 // err + ^ +Test.scala:78: error: Ann1 does not take parameters + @Ann1(0)(0) def v6 = 0 // err + ^ +Test.scala:79: error: not enough arguments for constructor Ann2: (x: Int)(y: Int): Ann2. +Unspecified value parameter x. + @Ann2 def v7 = 0 // err + ^ +Test.scala:80: error: missing argument list for constructor Ann2 in class Ann2 of type (x: Int)(y: Int): Ann2 + @Ann2(x = 0) def v8 = 0 // err + ^ +Test.scala:83: error: no arguments allowed for nullary constructor Ann3: (): Ann3 + @Ann3(0) def v11 = 0 // err + ^ +Test.scala:84: error: not enough arguments for constructor Ann4: (x: Int, value: Int): Ann4. +Unspecified value parameter value. + @Ann4(0) def v12 = 0 + ^ +Test.scala:90: error: no arguments allowed for nullary constructor Ann5: (): Ann5 + @Ann5(0) def v18 = 0 // err + ^ +Test.scala:81: warning: Implementation limitation: multiple argument lists on annotations are +currently not supported; ignoring arguments List(0) + @Ann2(x = 0)(y = 0) def v9 = 0 // warn + ^ +1 warning +31 errors diff --git a/test/files/neg/annots-constant-neg/JAnn.java b/test/files/neg/annots-constant-neg/JAnn.java new file mode 100644 index 000000000000..609c220c4649 --- /dev/null +++ b/test/files/neg/annots-constant-neg/JAnn.java @@ -0,0 +1,10 @@ +import java.lang.annotation.*; + +public @interface JAnn { + int value(); + String a() default ""; + Class b() default String.class; + Object[] c() default {}; + SuppressWarnings d() default @SuppressWarnings({}); // nested annot + RetentionPolicy e() default RetentionPolicy.SOURCE; // enum +} diff --git a/test/files/neg/annots-constant-neg/Test.scala b/test/files/neg/annots-constant-neg/Test.scala new file mode 100644 index 000000000000..3eccee0fdc84 --- /dev/null +++ b/test/files/neg/annots-constant-neg/Test.scala @@ -0,0 +1,96 @@ +import scala.annotation._ +import java.lang.annotation._ + +class Ann( + value: Int, + a: String = "", + b: Class[_] = classOf[String], + c: Array[Object] = Array()) extends ConstantAnnotation + +class Ann1(value: Int = 1) extends ConstantAnnotation { + def this(s: String) = this(0) // err +} +class Ann2(x: Int)(y: Int) extends ConstantAnnotation // err +class Ann3 extends ConstantAnnotation +class Ann4(x: Int = 0, value: Int) extends ConstantAnnotation +class Ann5() extends ConstantAnnotation +class Ann6(x: Int) extends ConstantAnnotation // scala/bug#11724 +class Ann7[T](x: T) extends ConstantAnnotation // scala/bug#11724 +class Ann8(x: Int = Test.nonConst) extends ConstantAnnotation // defaults of `ConstantAnnotation` are not enforced to be constants + +object Test { + final val const = 1 + def nonConst = 2 + + @JAnn(0) def t1 = 0 + @JAnn(const + 1 + const) def t2 = 0 + @JAnn(nonConst) def t3 = 0 // err + + @JAnn() def t4 = 0 // err + @JAnn(value = 0) def t5 = 0 + @JAnn(value = 0, a = "slkdjf" + "mix") def t6 = 0 + @JAnn(0, "") def t7 = 0 // err + @JAnn(0, a = "") def t8 = 0 // err + + @JAnn(value = 0, a = "moin", b = classOf[Object], c = Array(""), d = new SuppressWarnings(value = Array("", "")), e = RetentionPolicy.CLASS) def t9 = 0 + @JAnn(value = 0, a = null) def t10 = 0 // err + @JAnn(value = 0, b = getClass) def t11 = 0 // err + @JAnn(value = 0, c = new Array(1)) def t12 = 0 // err + @JAnn(value = 0, d = null) def t13 = 0 // err + @JAnn(value = 0, d = null) def t14 = 0 // err + + @JAnn(value = 0, b = classOf[Int]) def t15 = 0 + @JAnn(value = 0, b = java.lang.Integer.TYPE) def t16 = 0 // err + + // nested annotation is ok + @JAnn(value = 0, c = Array(new SuppressWarnings(value = Array("")))) def t17 = 0 + // but the nested annotation needs to be itself a Java annotation + @JAnn(value = 0, c = Array(new inline)) def t18 = 0 // err + + @Ann(1) def u1 = 0 + @Ann(const) def u2 = 0 + @Ann(nonConst) def u3 = 0 // err + + @Ann() def u4 = 0 // err + @Ann(value = 0) def u5 = 0 + @Ann(value = 0, a = "") def u6 = 0 + @Ann(0, "") def u7 = 0 + @Ann(0, a = "") def u8 = 0 + + @Ann(value = 0, a = "moin", b = classOf[Object], c = Array("")) def u9 = 0 + @Ann(value = 0, a = null) def u10 = 0 // err + @Ann(value = 0, b = getClass) def u11 = 0 // err + @Ann(value = 0, c = new Array(1)) def u12 = 0 // err + + @Ann(value = 0, b = classOf[Int]) def u15 = 0 + @Ann(value = 0, b = java.lang.Integer.TYPE) def u16 = 0 // err + + // nested annotations are only allowed for Java annotations, not for Scala ConstantAnnotations + @Ann(value = 0, c = Array(new SuppressWarnings(value = Array("")))) def u17 = 0 // err + // the outer and the nested annotation need to be Java annotations + @Ann(value = 0, c = Array(new inline)) def u18 = 0 // err + + @Ann1() def v1 = 0 + @Ann1(0) def v2 = 0 + @Ann1(value = 0) def v3 = 0 + @Ann1(x = "") def v4 = 0 // err + @Ann1 def v5 = 0 + @Ann1(0)(0) def v6 = 0 // err + @Ann2 def v7 = 0 // err + @Ann2(x = 0) def v8 = 0 // err + @Ann2(x = 0)(y = 0) def v9 = 0 // warn + @Ann3 def v10 = 0 + @Ann3(0) def v11 = 0 // err + @Ann4(0) def v12 = 0 + @Ann4(0, 1) def v13 = 0 + @Ann4(x = 0, value = 1) def v14 = 0 + @Ann4(value = 1, x = 0) def v15 = 0 + @Ann5 def v16 = 0 + @Ann5() def v17 = 0 + @Ann5(0) def v18 = 0 // err + + @Ann6(1) def w1 = 0 + + @Ann7(1) def x1 = 0 + @Ann7(Array("")) def x2 = 0 +} diff --git a/test/files/neg/any-vs-anyref.check b/test/files/neg/any-vs-anyref.check index 7378f0495fec..88ed488d9bfb 100644 --- a/test/files/neg/any-vs-anyref.check +++ b/test/files/neg/any-vs-anyref.check @@ -1,35 +1,3 @@ -any-vs-anyref.scala:6: error: type mismatch; - found : a.type (with underlying type A) - required: AnyRef -Note that A is bounded only by Equals, which means AnyRef is not a known parent. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def foo1[A <: Product](a: A) = { type X = a.type } - ^ -any-vs-anyref.scala:7: error: type mismatch; - found : a.type (with underlying type A) - required: AnyRef -Note that A is bounded only by Product, Quux, which means AnyRef is not a known parent. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def foo2[A <: Product with Quux](a: A) = { type X = a.type } - ^ -any-vs-anyref.scala:8: error: type mismatch; - found : a.type (with underlying type Product) - required: AnyRef -Note that Product extends Any, not AnyRef. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def foo3(a: Product) = { type X = a.type } - ^ -any-vs-anyref.scala:9: error: type mismatch; - found : Product with Quux - required: AnyRef -Note that the parents of this type (Product, Quux) extend Any, not AnyRef. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def foo4(a: Product with Quux) = { type X = a.type } - ^ any-vs-anyref.scala:10: error: value eq is not a member of Quux with Product Note that the parents of this type (Quux, Product) extend Any, not AnyRef. Such types can participate in value classes, but instances @@ -77,4 +45,4 @@ any-vs-anyref.scala:27: error: type mismatch; required: Quux{def g(x: Int): Int} f(new Quux { def g(x: String) = x }) ^ -11 errors found +7 errors diff --git a/test/files/neg/anytrait.check b/test/files/neg/anytrait.check index 6d9d681d602e..e53752e3add5 100644 --- a/test/files/neg/anytrait.check +++ b/test/files/neg/anytrait.check @@ -7,4 +7,4 @@ anytrait.scala:5: error: this statement is not allowed in universal trait extend anytrait.scala:9: error: field definition is not allowed in universal trait extending from class Any val y: T ^ -three errors found +3 errors diff --git a/test/files/neg/anyval-anyref-parent.check b/test/files/neg/anyval-anyref-parent.check index 8a00fb394dff..a29b2b56e584 100644 --- a/test/files/neg/anyval-anyref-parent.check +++ b/test/files/neg/anyval-anyref-parent.check @@ -7,17 +7,17 @@ class Bar1 extends Any // fail anyval-anyref-parent.scala:6: error: value class parameter must be a val and not be private[this] class Bar2(x: Int) extends AnyVal // fail ^ -anyval-anyref-parent.scala:10: error: illegal inheritance; superclass Any +anyval-anyref-parent.scala:11: error: illegal inheritance; superclass Any is not a subclass of the superclass Object - of the mixin trait Immutable -trait Foo4 extends Any with Immutable // fail + of the mixin trait Reffy +trait Foo4 extends Any with Reffy // fail ^ -anyval-anyref-parent.scala:11: error: illegal inheritance; superclass AnyVal +anyval-anyref-parent.scala:12: error: illegal inheritance; superclass AnyVal is not a subclass of the superclass Object - of the mixin trait Immutable -trait Foo5 extends AnyVal with Immutable // fail + of the mixin trait Reffy +trait Foo5 extends AnyVal with Reffy // fail ^ -anyval-anyref-parent.scala:11: error: only classes (not traits) are allowed to extend AnyVal -trait Foo5 extends AnyVal with Immutable // fail +anyval-anyref-parent.scala:12: error: only classes (not traits) are allowed to extend AnyVal +trait Foo5 extends AnyVal with Reffy // fail ^ -6 errors found +6 errors diff --git a/test/files/neg/anyval-anyref-parent.scala b/test/files/neg/anyval-anyref-parent.scala index f927992e595d..8666dd15e118 100644 --- a/test/files/neg/anyval-anyref-parent.scala +++ b/test/files/neg/anyval-anyref-parent.scala @@ -7,6 +7,7 @@ class Bar2(x: Int) extends AnyVal // fail class Bar3(val x: Int) extends AnyVal // fail class Bar4 extends AnyRef -trait Foo4 extends Any with Immutable // fail -trait Foo5 extends AnyVal with Immutable // fail -trait Foo6 extends AnyRef with Immutable +trait Reffy +trait Foo4 extends Any with Reffy // fail +trait Foo5 extends AnyVal with Reffy // fail +trait Foo6 extends AnyRef with Reffy diff --git a/test/files/neg/applydynamic_sip.check b/test/files/neg/applydynamic_sip.check index 6487b34067f7..854f7004002c 100644 --- a/test/files/neg/applydynamic_sip.check +++ b/test/files/neg/applydynamic_sip.check @@ -1,88 +1,76 @@ -applydynamic_sip.scala:8: error: applyDynamic does not support passing a vararg parameter +applydynamic_sip.scala:9: error: applyDynamic does not support passing a vararg parameter qual.sel(a, a2: _*) ^ -applydynamic_sip.scala:8: error: value applyDynamic is not a member of Dynamic +applydynamic_sip.scala:9: error: value applyDynamic is not a member of Dynamic error after rewriting to Test.this.qual.("sel") possible cause: maybe a wrong Dynamic method signature? qual.sel(a, a2: _*) ^ -applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter +applydynamic_sip.scala:10: error: applyDynamicNamed does not support passing a vararg parameter qual.sel(arg = a, a2: _*) ^ -applydynamic_sip.scala:9: error: value applyDynamicNamed is not a member of Dynamic +applydynamic_sip.scala:10: error: value applyDynamicNamed is not a member of Dynamic error after rewriting to Test.this.qual.("sel") possible cause: maybe a wrong Dynamic method signature? qual.sel(arg = a, a2: _*) ^ -applydynamic_sip.scala:9: error: not found: value arg - qual.sel(arg = a, a2: _*) - ^ -applydynamic_sip.scala:10: error: applyDynamicNamed does not support passing a vararg parameter +applydynamic_sip.scala:11: error: applyDynamicNamed does not support passing a vararg parameter qual.sel(arg, arg2 = "a2", a2: _*) ^ -applydynamic_sip.scala:10: error: value applyDynamicNamed is not a member of Dynamic +applydynamic_sip.scala:11: error: value applyDynamicNamed is not a member of Dynamic error after rewriting to Test.this.qual.("sel") possible cause: maybe a wrong Dynamic method signature? qual.sel(arg, arg2 = "a2", a2: _*) ^ -applydynamic_sip.scala:10: error: not found: value arg +applydynamic_sip.scala:11: error: not found: value arg qual.sel(arg, arg2 = "a2", a2: _*) ^ -applydynamic_sip.scala:10: error: not found: value arg2 - qual.sel(arg, arg2 = "a2", a2: _*) - ^ -applydynamic_sip.scala:19: error: type mismatch; +applydynamic_sip.scala:20: error: type mismatch; found : String("sel") required: Int error after rewriting to Test.this.bad1.selectDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad1.sel ^ -applydynamic_sip.scala:20: error: type mismatch; +applydynamic_sip.scala:21: error: type mismatch; found : String("sel") required: Int error after rewriting to Test.this.bad1.applyDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad1.sel(1) ^ -applydynamic_sip.scala:21: error: type mismatch; +applydynamic_sip.scala:22: error: type mismatch; found : String("sel") required: Int error after rewriting to Test.this.bad1.applyDynamicNamed("sel") possible cause: maybe a wrong Dynamic method signature? bad1.sel(a = 1) ^ -applydynamic_sip.scala:21: error: reassignment to val - bad1.sel(a = 1) - ^ -applydynamic_sip.scala:22: error: type mismatch; +applydynamic_sip.scala:23: error: type mismatch; found : String("sel") required: Int error after rewriting to Test.this.bad1.updateDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad1.sel = 1 ^ -applydynamic_sip.scala:30: error: Int does not take parameters +applydynamic_sip.scala:31: error: Int does not take parameters error after rewriting to Test.this.bad2.selectDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad2.sel ^ -applydynamic_sip.scala:31: error: Int does not take parameters +applydynamic_sip.scala:32: error: Int does not take parameters error after rewriting to Test.this.bad2.applyDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad2.sel(1) ^ -applydynamic_sip.scala:32: error: Int does not take parameters +applydynamic_sip.scala:33: error: Int does not take parameters error after rewriting to Test.this.bad2.applyDynamicNamed("sel") possible cause: maybe a wrong Dynamic method signature? bad2.sel(a = 1) ^ -applydynamic_sip.scala:32: error: reassignment to val - bad2.sel(a = 1) - ^ -applydynamic_sip.scala:33: error: Int does not take parameters +applydynamic_sip.scala:34: error: Int does not take parameters error after rewriting to Test.this.bad2.updateDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad2.sel = 1 ^ -19 errors found +15 errors diff --git a/test/files/neg/applydynamic_sip.scala b/test/files/neg/applydynamic_sip.scala index 1d09bfabafe6..5884dbec7544 100644 --- a/test/files/neg/applydynamic_sip.scala +++ b/test/files/neg/applydynamic_sip.scala @@ -1,4 +1,5 @@ -// scalac: -language:dynamics +//> using options -language:dynamics +// object Test extends App { val qual: Dynamic = ??? val expr = "expr" diff --git a/test/files/neg/auto-application.check b/test/files/neg/auto-application.check new file mode 100644 index 000000000000..c3e670f82953 --- /dev/null +++ b/test/files/neg/auto-application.check @@ -0,0 +1,16 @@ +auto-application.scala:4: error: Int does not take parameters + ("": Any).##() + ^ +auto-application.scala:5: error: Int does not take parameters + ("": AnyRef).##() + ^ +auto-application.scala:6: error: Int does not take parameters + ("": Object).##() + ^ +auto-application.scala:9: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method meth, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + meth // warn, auto-application (of nilary methods) is deprecated + ^ +1 warning +3 errors diff --git a/test/files/neg/auto-application.scala b/test/files/neg/auto-application.scala new file mode 100644 index 000000000000..906bd16d158e --- /dev/null +++ b/test/files/neg/auto-application.scala @@ -0,0 +1,10 @@ +//> using options -deprecation -Werror + +class Test { + ("": Any).##() + ("": AnyRef).##() + ("": Object).##() + + def meth() = "" + meth // warn, auto-application (of nilary methods) is deprecated +} diff --git a/test/files/neg/bad-advice.check b/test/files/neg/bad-advice.check index dbb1775b74eb..4226173f08f8 100644 --- a/test/files/neg/bad-advice.check +++ b/test/files/neg/bad-advice.check @@ -1,6 +1,6 @@ -bad-advice.scala:5: error: pattern type is incompatible with expected type; +bad-advice.scala:6: error: pattern type is incompatible with expected type; found : Bip.type required: Int case Bip => true ^ -one error found +1 error diff --git a/test/files/neg/bad-advice.scala b/test/files/neg/bad-advice.scala index f3e22376c998..0683a61036f0 100644 --- a/test/files/neg/bad-advice.scala +++ b/test/files/neg/bad-advice.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Bip object Test { def f(x: Int) = x match { diff --git a/test/files/neg/badimport.check b/test/files/neg/badimport.check index d58b64ff7cc6..d00e4060ac37 100644 --- a/test/files/neg/badimport.check +++ b/test/files/neg/badimport.check @@ -1,4 +1,4 @@ badimport.scala:2: error: . expected import collection ^ -one error found +1 error diff --git a/test/files/neg/badtok-1-212.check b/test/files/neg/badtok-1-212.check deleted file mode 100644 index 754652dd2db5..000000000000 --- a/test/files/neg/badtok-1-212.check +++ /dev/null @@ -1,17 +0,0 @@ -badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for string literal) -'42' -^ -badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for string literal) -'42' - ^ -badtok-1-212.scala:9: error: empty character literal -''; -^ -badtok-1-212.scala:11: error: unclosed character literal -' -^ -badtok-1-212.scala:7: warning: deprecated syntax for character literal (use '\'' for single quote) -''' -^ -one warning found -four errors found diff --git a/test/files/neg/badtok-1-212.scala b/test/files/neg/badtok-1-212.scala deleted file mode 100644 index 0d1bd5f64db6..000000000000 --- a/test/files/neg/badtok-1-212.scala +++ /dev/null @@ -1,11 +0,0 @@ -// scalac: -Xsource:2.12 -deprecation -Xfatal-warnings -// bug 989 -'42' - - -// SI-10133 -''' - -''; - -' diff --git a/test/files/neg/badtok-1.check b/test/files/neg/badtok-1.check index d795c6c34aa2..81c74d2f04bc 100644 --- a/test/files/neg/badtok-1.check +++ b/test/files/neg/badtok-1.check @@ -1,31 +1,31 @@ -badtok-1.scala:3: error: unclosed character literal (or use " not ' for string literal) +badtok-1.scala:4: error: unclosed character literal (or use " not ' for string literal) '42' ^ -badtok-1.scala:3: error: unclosed character literal (or use " not ' for string literal) +badtok-1.scala:4: error: unclosed character literal (or use " not ' for string literal) '42' ^ -badtok-1.scala:7: error: empty character literal (use '\'' for single quote) +badtok-1.scala:8: error: empty character literal (use '\'' for single quote) ''' ^ -badtok-1.scala:7: error: unclosed character literal (or use " not ' for string literal) +badtok-1.scala:8: error: unclosed character literal (or use " not ' for string literal) ''' ^ -badtok-1.scala:9: error: empty character literal +badtok-1.scala:10: error: empty character literal ''; ^ -badtok-1.scala:12: error: unclosed character literal (or use " for string literal "''abc") +badtok-1.scala:13: error: unclosed character literal (or use " for string literal "''abc") 'abc' ^ -badtok-1.scala:14: error: unclosed character literal (or use " for string literal "utf_8") +badtok-1.scala:15: error: unclosed character literal (or use " for string literal "utf_8") 'utf_8' ^ -badtok-1.scala:16: error: unclosed character literal (or use " not ' for string literal) +badtok-1.scala:17: error: unclosed character literal (or use " not ' for string literal) 'utf-8' ^ -badtok-1.scala:18: error: unclosed character literal +badtok-1.scala:19: error: unclosed character literal ' ^ -badtok-1.scala:12: error: expected class or object definition +badtok-1.scala:13: error: expected class or object definition 'abc' ^ -10 errors found +10 errors diff --git a/test/files/neg/badtok-1.scala b/test/files/neg/badtok-1.scala index a6f4e51eef1e..c9e98d468bb8 100644 --- a/test/files/neg/badtok-1.scala +++ b/test/files/neg/badtok-1.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 -deprecation -Xfatal-warnings +//> using options -Xlint:deprecation -Werror +// // bug 989 '42' diff --git a/test/files/neg/badtok-2.check b/test/files/neg/badtok-2.check index e6861c240763..81d24a46ad1c 100644 --- a/test/files/neg/badtok-2.check +++ b/test/files/neg/badtok-2.check @@ -1,4 +1,4 @@ badtok-2.scala:3: error: unclosed quoted identifier `x ^ -one error found +1 error diff --git a/test/files/neg/badtok-3.check b/test/files/neg/badtok-3.check index aee4f6f4db4d..a8472c773349 100644 --- a/test/files/neg/badtok-3.check +++ b/test/files/neg/badtok-3.check @@ -1,4 +1,4 @@ badtok-3.scala:2: error: input ended while parsing XML using options -Werror -Xlint:byname-implicit + +import language._ + +class C { + implicit def `ints are strs`(n: => Int): String = n.toString + implicit def `bools are strs`[A](b: => A)(implicit ev: A => Boolean): String = if (b) "yup" else "nah" + + def show(s: String) = println(s"[$s]") + + def f(): Unit = + show { + println("see me") + 42 // warn + } + + def g(): Unit = + show { + 42 // no warn + } + + def truly(): Unit = + show { + println("see me") + true // warn + } + + def falsely(): Unit = + show { + false // no warn + } +} + +// the workaround for https://github.com/erikvanoosten/metrics-scala/issues/42 +// This removes callsite verbosity with extra wrapping in a by-name arg, but the implicit still warns, +// because the call to healthCheck still requires the implicit conversion to Magnet. +// Here, the by-name should be removed from the implicit because by-name semantics is already provided by healthCheck. + +package object health { + type HealthCheck = () => String + + def healthCheck(name: String, unhealthyMessage: String = "Health check failed")(checker: => HealthCheckMagnet): HealthCheck = { + () => checker(unhealthyMessage)() + } +} + +package health { + + /*sealed*/ trait HealthCheckMagnet { + def apply(unhealthyMessage: String): HealthCheck + } + + object HealthCheckMagnet { + + /** Magnet for checkers returning a [[scala.Boolean]] (possibly implicitly converted). */ + implicit def fromBooleanCheck[A](checker: => A)(implicit ev: A => Boolean): HealthCheckMagnet = new HealthCheckMagnet { + def apply(unhealthyMessage: String) = () => if (checker) "OK" else unhealthyMessage + } + } +} + +object Test extends App { + import health._ + + var count = 0 + val res = healthCheck("bool test", "nope") { + count += 1 + false // warn + } + assert(res() == "nope") + assert(res() == "nope") + assert(count == 2) +} + +// t9386 +class Foo +object Foo { + implicit def int2Foo(a: => Int): Foo = new Foo //Never evaluate the argument + def bar(foo: Foo) = () + def bar(foo: Boolean) = () //unrelated overload +} + +object FooTest extends App { + Foo.bar { println("barring"); 0 } // warn +} + +class Nowarn { + implicit def cv(n: => Int): String = n.toString + + def show(s: String) = println(s"[$s]") + + def f(): Unit = show(cv(42)) // nowarn because it only warns if statements + def g(): Unit = show { println(); cv(42) } // nowarn anymore because explicit call by user +} diff --git a/test/files/neg/byname-implicits-11.check b/test/files/neg/byname-implicits-11.check new file mode 100644 index 000000000000..a1076282adc3 --- /dev/null +++ b/test/files/neg/byname-implicits-11.check @@ -0,0 +1,5 @@ +byname-implicits-11.scala:8: error: diverging implicit expansion for type Foo[Int] +starting with method foo in object Foo + implicitly[Foo[Int]] + ^ +1 error diff --git a/test/files/neg/byname-implicits-11.scala b/test/files/neg/byname-implicits-11.scala new file mode 100644 index 000000000000..a7160712944d --- /dev/null +++ b/test/files/neg/byname-implicits-11.scala @@ -0,0 +1,9 @@ +trait Foo[T] + +object Foo { + implicit def foo[T](implicit fooFoo: => Foo[Foo[T]]): Foo[T] = ??? +} + +object Test { + implicitly[Foo[Int]] +} diff --git a/test/files/neg/byname-implicits-16.check b/test/files/neg/byname-implicits-16.check new file mode 100644 index 000000000000..d0e6d0aa538b --- /dev/null +++ b/test/files/neg/byname-implicits-16.check @@ -0,0 +1,5 @@ +byname-implicits-16.scala:14: error: diverging implicit expansion for type Test.O[Test.Z] +starting with method mkN in object Test + implicitly[O[Z]] + ^ +1 error diff --git a/test/files/neg/byname-implicits-16.scala b/test/files/neg/byname-implicits-16.scala new file mode 100644 index 000000000000..2c656b3392c1 --- /dev/null +++ b/test/files/neg/byname-implicits-16.scala @@ -0,0 +1,15 @@ +object Test { + class Z + class O[T] + class E[T] + + class Expand[T, U] + object Expand { + implicit def expando[T]: Expand[O[T], E[O[T]]] = ??? + implicit def expande[T]: Expand[E[T], O[E[T]]] = ??? + } + + implicit def mkN[T, U](implicit e: => Expand[T, U], u: => U): T = ??? + + implicitly[O[Z]] +} diff --git a/test/files/neg/byname-implicits-18.check b/test/files/neg/byname-implicits-18.check new file mode 100644 index 000000000000..e30117469df6 --- /dev/null +++ b/test/files/neg/byname-implicits-18.check @@ -0,0 +1,5 @@ +byname-implicits-18.scala:52: error: diverging implicit expansion for type Test.Tc[Test.Bootstrap[Int]] +starting with method tcGen in object Tc + implicitly[Tc[Bootstrap[Int]]] + ^ +1 error diff --git a/test/files/neg/byname-implicits-18.scala b/test/files/neg/byname-implicits-18.scala new file mode 100644 index 000000000000..581274b882d9 --- /dev/null +++ b/test/files/neg/byname-implicits-18.scala @@ -0,0 +1,53 @@ +/* + * Demo of using by name implicits to resolve (hidden) divergence issues when + * traversing recursive generic structures. + * + * See https://stackoverflow.com/questions/25923974 + */ +sealed trait HList +object HList { + implicit class Syntax[L <: HList](l: L) { + def ::[U](u: U): U :: L = new ::(u, l) + } +} + +sealed trait HNil extends HList +object HNil extends HNil +case class ::[+H, +T <: HList](head : H, tail : T) extends HList + +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +object Generic { + type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 } +} + +object Test extends App { + case class Bootstrap[+A](head: A, tail: Option[Bootstrap[(A, A)]]) + object Bootstrap { + type BootstrapRepr[+A] = A :: Option[Bootstrap[(A, A)]] :: HNil + implicit def bootstrapGen[A]: Generic.Aux[Bootstrap[A], BootstrapRepr[A]] = + new Generic[Bootstrap[A]] { + type Repr = BootstrapRepr[A] + def to(t: Bootstrap[A]): Repr = t.head :: t.tail :: HNil + def from(r: Repr): Bootstrap[A] = Bootstrap(r.head, r.tail.head) + } + } + + class Tc[A] + object Tc { + implicit val tcInt: Tc[Int] = new Tc + implicit def tcOpt[A: Tc]: Tc[Option[A]] = new Tc + implicit def tcTuple[A: Tc, B: Tc]: Tc[(A, B)] = new Tc + implicit val tcHNil: Tc[HNil] = new Tc + implicit def tcHCons[H: Tc, T <: HList: Tc]: Tc[H :: T] = new Tc + implicit def tcGen[A, R <: HList]( + implicit gen: Generic.Aux[A, R], tcR: => Tc[R] + ): Tc[A] = new Tc + } + + implicitly[Tc[Bootstrap[Int]]] +} diff --git a/test/files/neg/byname-implicits-21.check b/test/files/neg/byname-implicits-21.check new file mode 100644 index 000000000000..c2a3295acf5b --- /dev/null +++ b/test/files/neg/byname-implicits-21.check @@ -0,0 +1,5 @@ +byname-implicits-21.scala:20: error: diverging implicit expansion for type Test.Foo[Test.A[Unit]] +starting with method fooGen in object Foo + implicitly[Foo[A[Unit]]] + ^ +1 error diff --git a/test/files/neg/byname-implicits-21.scala b/test/files/neg/byname-implicits-21.scala new file mode 100644 index 000000000000..63a202958f9c --- /dev/null +++ b/test/files/neg/byname-implicits-21.scala @@ -0,0 +1,21 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait Foo[T] + object Foo { + implicit def fooOption[T](implicit fooT: Foo[T]): Foo[Option[T]] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: => Foo[R]): Foo[T] = ??? + } + + trait A[T] + object A { + implicit def genA[T]: Generic[A[T]] { type Repr = Option[A[A[T]]] } = ??? + } + + implicitly[Foo[A[Unit]]] +} diff --git a/test/files/neg/byname-implicits-26.check b/test/files/neg/byname-implicits-26.check new file mode 100644 index 000000000000..9b9d8754ad5a --- /dev/null +++ b/test/files/neg/byname-implicits-26.check @@ -0,0 +1,5 @@ +byname-implicits-26.scala:34: error: diverging implicit expansion for type Test.Foo[Test.A] +starting with method fooGen in object Foo + implicitly[Foo[A]] + ^ +1 error diff --git a/test/files/neg/byname-implicits-26.scala b/test/files/neg/byname-implicits-26.scala new file mode 100644 index 000000000000..884dee39b74b --- /dev/null +++ b/test/files/neg/byname-implicits-26.scala @@ -0,0 +1,35 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait GNil + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: Foo[R]): Foo[T] = ??? + } + + case class A(b: B, c: C, i: Int) + object A { + implicit val genA: Generic[A] { type Repr = (B, (C, (Int, Unit))) } = ??? + } + + case class B(c0: C, c1: C, c2: C, i: Int) + object B { + implicit val genB: Generic[B] { type Repr = (C, (C, (C, (Int, Unit)))) } = ??? + } + + case class C(b: A, i: Int) + object C { + implicit val genC: Generic[C] { type Repr = (A, (Int, Unit)) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/neg/case-class-23-unrelated-unapply.check b/test/files/neg/case-class-23-unrelated-unapply.check new file mode 100644 index 000000000000..508dd6af77fb --- /dev/null +++ b/test/files/neg/case-class-23-unrelated-unapply.check @@ -0,0 +1,7 @@ +case-class-23-unrelated-unapply.scala:42: error: too many arguments for unapply pattern, maximum = 22 + val TwentyThree(a, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, b) = x + ^ +case-class-23-unrelated-unapply.scala:45: error: too many arguments for unapply pattern, maximum = 22 + val TwentyThree(a2, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, b2) = arr + ^ +2 errors diff --git a/test/files/neg/case-class-23-unrelated-unapply.scala b/test/files/neg/case-class-23-unrelated-unapply.scala new file mode 100644 index 000000000000..93759b3b0065 --- /dev/null +++ b/test/files/neg/case-class-23-unrelated-unapply.scala @@ -0,0 +1,47 @@ +case class TwentyThree( + _1: Int, + _2: Int, + _3: Int, + _4: Int, + _5: Int, + _6: Int, + _7: Int, + _8: Int, + _9: Int, + _10: Int, + _11: Int, + _12: Int, + _13: Int, + _14: Int, + _15: Int, + _16: Int, + _17: Int, + _18: Int, + _19: Int, + _20: Int, + _21: Int, + _22: Int, + _23: Int +) + +object TwentyThree { + def unapply(a: Array[Int]): Option[TwentyThree] = { + Option.when(a.length == 23) { + TwentyThree( + a(0), a(1), a(2), a(3), a(4), a(5), a(6), a(7), a(8), a(9), + a(10), a(11), a(12), a(13), a(14), a(15), a(16), a(17), a(18), a(19), + a(20), a(21), a(22), + ) + } + } +} + +// This is borked.. but I'm fairly certain it's borked since before I started meddling with it.. +object Test extends App { + val x = new TwentyThree(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) + val TwentyThree(a, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, b) = x + println((a, b)) + val arr = Array(0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + val TwentyThree(a2, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, b2) = arr + println((a2, b2)) +} diff --git a/test/files/neg/case-collision-multifile.check b/test/files/neg/case-collision-multifile.check index 0f5ae64566f2..03da60257d95 100644 --- a/test/files/neg/case-collision-multifile.check +++ b/test/files/neg/case-collision-multifile.check @@ -1,7 +1,7 @@ -two.scala:2: warning: Generated class hotDog differs only in case from HotDog (defined in one.scala). +two.scala:1: warning: Generated class hotDog differs only in case from HotDog (defined in one.scala). Such classes will overwrite one another on case-insensitive filesystems. class hotDog ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/case-collision-multifile/one.scala b/test/files/neg/case-collision-multifile/one.scala index 2836f2e6dd2c..5da343dae653 100644 --- a/test/files/neg/case-collision-multifile/one.scala +++ b/test/files/neg/case-collision-multifile/one.scala @@ -1,2 +1,2 @@ -// scalac: -Xfatal-warnings +//> using options -Werror -Ybackend-parallelism 1 class HotDog diff --git a/test/files/neg/case-collision-multifile/two.scala b/test/files/neg/case-collision-multifile/two.scala index 30364a562886..2c2fad1a1108 100644 --- a/test/files/neg/case-collision-multifile/two.scala +++ b/test/files/neg/case-collision-multifile/two.scala @@ -1,2 +1 @@ -// scalac: -Xfatal-warnings class hotDog diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check index 0f53c00470c0..aa09692cea41 100644 --- a/test/files/neg/case-collision.check +++ b/test/files/neg/case-collision.check @@ -1,27 +1,35 @@ -case-collision.scala:6: warning: Generated class foo.BIPPY differs only in case from foo.Bippy. +case-collision.scala:7: warning: Generated class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems. class BIPPY ^ -case-collision.scala:9: warning: Generated class foo.DINGO$ differs only in case from foo.Dingo$. +case-collision.scala:10: warning: Generated class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems. object DINGO ^ -case-collision.scala:9: warning: Generated class foo.DINGO differs only in case from foo.Dingo. +case-collision.scala:10: warning: Generated class foo.DINGO differs only in case from foo.Dingo. Such classes will overwrite one another on case-insensitive filesystems. object DINGO ^ -case-collision.scala:12: warning: Generated class foo.HyRaX$ differs only in case from foo.Hyrax$. +case-collision.scala:13: warning: Generated class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems. object HyRaX ^ -case-collision.scala:12: warning: Generated class foo.HyRaX differs only in case from foo.Hyrax. +case-collision.scala:13: warning: Generated class foo.HyRaX differs only in case from foo.Hyrax. Such classes will overwrite one another on case-insensitive filesystems. object HyRaX ^ -case-collision.scala:15: warning: Generated class foo.wackO differs only in case from foo.Wacko. +case-collision.scala:16: warning: Generated class foo.wackO differs only in case from foo.Wacko. Such classes will overwrite one another on case-insensitive filesystems. object wackO ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found +case-collision.scala:20: warning: Generated class foo.bar.Package differs only in case from foo.bar.package. + Such classes will overwrite one another on case-insensitive filesystems. + class Package + ^ +case-collision.scala:29: warning: Generated class foo.O$a_$bang$ differs only in case from foo.O$a_$bang$. + Such classes will overwrite one another on case-insensitive filesystems. + object bang { + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/case-collision.scala b/test/files/neg/case-collision.scala index e640e88cf75c..4e005dc40479 100644 --- a/test/files/neg/case-collision.scala +++ b/test/files/neg/case-collision.scala @@ -1,5 +1,6 @@ -// scalac: -Xfatal-warnings -package foo +//> using options -Werror +// +package foo { class Bippy @@ -13,3 +14,23 @@ object HyRaX class Wacko object wackO + +package object bar +package bar { + class Package +} + +object O extends App { + object a_! { + def foo = 1 + } + + object a_ { + object bang { + def foo = 2 + } + } + println(a_!.foo) + println(a_.bang.foo) +} +} diff --git a/test/files/neg/case-cross.check b/test/files/neg/case-cross.check new file mode 100644 index 000000000000..0517c49c04b2 --- /dev/null +++ b/test/files/neg/case-cross.check @@ -0,0 +1,6 @@ +case-cross.scala:5: error: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.apply +case class C private (c: Int) { + ^ +1 error diff --git a/test/files/neg/case-cross.scala b/test/files/neg/case-cross.scala new file mode 100644 index 000000000000..273dc438b77e --- /dev/null +++ b/test/files/neg/case-cross.scala @@ -0,0 +1,7 @@ +//> using options -Xsource:3 + +// warn about case class synthetic method getting access modifier from constructor + +case class C private (c: Int) { + def copy(c: Int) = this // warn about apply instead +} diff --git a/test/files/neg/case-warn.check b/test/files/neg/case-warn.check new file mode 100644 index 000000000000..64e18ed54216 --- /dev/null +++ b/test/files/neg/case-warn.check @@ -0,0 +1,9 @@ +case-warn.scala:6: warning: access modifiers for `copy` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +case class C private (c: Int) + ^ +case-warn.scala:6: warning: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +case class C private (c: Int) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/case-warn.scala b/test/files/neg/case-warn.scala new file mode 100644 index 000000000000..f72f09a9f60a --- /dev/null +++ b/test/files/neg/case-warn.scala @@ -0,0 +1,6 @@ +//> using options -Werror -Xsource:3 -Wconf:cat=scala3-migration:w + +// warn about case class synthetic method getting access modifier from constructor. +// if erroring, the error message for apply is hidden by previous error for copy. + +case class C private (c: Int) diff --git a/test/files/neg/caseclass_private_constructor.check b/test/files/neg/caseclass_private_constructor.check new file mode 100644 index 000000000000..b2b13a7fb872 --- /dev/null +++ b/test/files/neg/caseclass_private_constructor.check @@ -0,0 +1,53 @@ +caseclass_private_constructor.scala:6: error: method apply in object A cannot be accessed as a member of object A from object ATest +error after rewriting to A. +possible cause: maybe a wrong Dynamic method signature? + def a1: A = A(1) // error: apply is private + ^ +caseclass_private_constructor.scala:7: error: method copy in class A cannot be accessed as a member of A from object ATest + def a2: A = a1.copy(2) // error: copy is private + ^ +caseclass_private_constructor.scala:12: error: method apply in object B cannot be accessed as a member of object B from object BTest +error after rewriting to B. +possible cause: maybe a wrong Dynamic method signature? + def b1: B = B(1) // error: apply is private + ^ +caseclass_private_constructor.scala:13: error: method copy in class B cannot be accessed as a member of B from object BTest + def b2: B = b1.copy(2) // error: copy is private + ^ +caseclass_private_constructor.scala:24: error: method apply in object C cannot be accessed as a member of object qualified_private.C from object QPrivTest +error after rewriting to qualified_private.C. +possible cause: maybe a wrong Dynamic method signature? + def c1: C = C(1) // error: apply is private + ^ +caseclass_private_constructor.scala:25: error: method copy in class C cannot be accessed as a member of qualified_private.C from object QPrivTest + def c2: C = c1.copy(2) // error: copy is private + ^ +caseclass_private_constructor.scala:27: error: method apply in object D cannot be accessed as a member of object qualified_private.D from object QPrivTest +error after rewriting to qualified_private.D. +possible cause: maybe a wrong Dynamic method signature? + def d1: D = D(1) // error: apply is private + ^ +caseclass_private_constructor.scala:28: error: method copy in class D cannot be accessed as a member of qualified_private.D from object QPrivTest + def d2: D = d1.copy(2) // error: copy is private + ^ +caseclass_private_constructor.scala:34: error: method copy in class E cannot be accessed as a member of E from object ETest + Access to protected method copy not permitted because + enclosing object ETest is not a subclass of + class E where target is defined + def e2: E = e2.copy(2) // error: copy is protected + ^ +caseclass_private_constructor.scala:43: error: method copy in class F cannot be accessed as a member of qualified_protected.F from object QProtTest + Access to protected method copy not permitted because + enclosing object QProtTest is not a subclass of + class F in object qualified_protected where target is defined + def f2: F = f2.copy(2) // error: copy is protected + ^ +caseclass_private_constructor.scala:57: error: method copy in class OverrideApply cannot be accessed as a member of OverrideApply from object OverrideTest + def oa = OverrideApply(42).copy(24) // error: copy is still private + ^ +caseclass_private_constructor.scala:58: error: method apply in object OverrideCopy cannot be accessed as a member of object OverrideCopy from object OverrideTest +error after rewriting to OverrideCopy. +possible cause: maybe a wrong Dynamic method signature? + def oc = OverrideCopy(42) // error: apply is still private + ^ +12 errors diff --git a/test/files/neg/caseclass_private_constructor.scala b/test/files/neg/caseclass_private_constructor.scala new file mode 100644 index 000000000000..3282dfd85011 --- /dev/null +++ b/test/files/neg/caseclass_private_constructor.scala @@ -0,0 +1,59 @@ +//> using options -Xsource:3 -Wconf:cat=scala3-migration:s -Xsource-features:case-apply-copy-access + +case class A private (i: Int) +object A +object ATest { + def a1: A = A(1) // error: apply is private + def a2: A = a1.copy(2) // error: copy is private +} + +case class B private (i: Int) // no user-defined companion object, should compile +object BTest { + def b1: B = B(1) // error: apply is private + def b2: B = b1.copy(2) // error: copy is private +} + +object qualified_private { + case class C private[qualified_private] (i: Int) + object C + + case class D private[qualified_private] (i: Int) // no user-defined companion object, should compile +} +object QPrivTest { + import qualified_private._ + def c1: C = C(1) // error: apply is private + def c2: C = c1.copy(2) // error: copy is private + + def d1: D = D(1) // error: apply is private + def d2: D = d1.copy(2) // error: copy is private +} + +case class E protected (i: Int) +object ETest { + def e1: E = E(1) + def e2: E = e2.copy(2) // error: copy is protected +} + +object qualified_protected { + case class F protected[qualified_protected] (i: Int) +} +object QProtTest { + import qualified_protected._ + def f1: F = F(1) + def f2: F = f2.copy(2) // error: copy is protected +} + + +case class OverrideApply private (i: Int) +object OverrideApply { + def apply(i: Int): OverrideApply = new OverrideApply(i) +} + +case class OverrideCopy private (i: Int) { + def copy(i: Int = i): OverrideCopy = OverrideCopy(i) +} + +object OverrideTest { + def oa = OverrideApply(42).copy(24) // error: copy is still private + def oc = OverrideCopy(42) // error: apply is still private +} diff --git a/test/files/neg/caseinherit.check b/test/files/neg/caseinherit.check index 09327a4ffabf..28c78286cb9a 100644 --- a/test/files/neg/caseinherit.check +++ b/test/files/neg/caseinherit.check @@ -7,4 +7,4 @@ caseinherit.scala:4: error: case object Bippy has case ancestor foo.A, but case- caseinherit.scala:11: error: case class Dingus has case ancestor foo.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes. case class Dingus(y: Int) extends Innocent ^ -three errors found +3 errors diff --git a/test/files/neg/caseinherit.scala b/test/files/neg/caseinherit.scala index 5c8da13d3109..188b7d14f9d6 100644 --- a/test/files/neg/caseinherit.scala +++ b/test/files/neg/caseinherit.scala @@ -10,4 +10,4 @@ package bar { class Innocent extends A(5) case class Dingus(y: Int) extends Innocent case object Hungle extends Blameless(5) -} \ No newline at end of file +} diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check index e11915904074..e56376138e0a 100644 --- a/test/files/neg/catch-all.check +++ b/test/files/neg/catch-all.check @@ -1,12 +1,15 @@ -catch-all.scala:3: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. +catch-all.scala:4: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. try { "warn" } catch { case _ => } ^ -catch-all.scala:5: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:6: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. try { "warn" } catch { case x => } ^ -catch-all.scala:7: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:8: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. try { "warn" } catch { case _: RuntimeException => ; case x => } ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +catch-all.scala:36: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + try "okay" catch discarder // warn total function + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/catch-all.scala b/test/files/neg/catch-all.scala index 67d63d006297..46d6d757e4fb 100644 --- a/test/files/neg/catch-all.scala +++ b/test/files/neg/catch-all.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Werror +// object CatchAll { try { "warn" } catch { case _ => } @@ -27,6 +28,12 @@ object CatchAll { try { "okay" } catch { case _ if "".isEmpty => } "okay" match { case _ => "" } + + val handler: PartialFunction[Throwable, String] = { case _ => "hello, world" } + val discarder = (_: Throwable) => "goodbye, cruel world" + + try "okay" catch handler + try "okay" catch discarder // warn total function } object T extends Throwable diff --git a/test/files/neg/check-dead.check b/test/files/neg/check-dead.check index 7d4fc0901948..3f0128684a44 100644 --- a/test/files/neg/check-dead.check +++ b/test/files/neg/check-dead.check @@ -10,6 +10,6 @@ check-dead.scala:30: warning: dead code following this construct check-dead.scala:34: warning: dead code following this construct throw new Exception // should warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/check-dead.scala b/test/files/neg/check-dead.scala index f0f1db1cdda6..efe12f0bb4bf 100644 --- a/test/files/neg/check-dead.scala +++ b/test/files/neg/check-dead.scala @@ -1,4 +1,4 @@ -// scalac: -Ywarn-dead-code -Xfatal-warnings +//> using options -Wdead-code -Werror object Other { def oops(msg: String = "xxx"): Nothing = throw new Exception(msg) // should not warn } diff --git a/test/files/neg/checksensible-equals.check b/test/files/neg/checksensible-equals.check deleted file mode 100644 index 964a2c9d34be..000000000000 --- a/test/files/neg/checksensible-equals.check +++ /dev/null @@ -1,18 +0,0 @@ -checksensible-equals.scala:4: warning: comparing values of types Long and Int using `equals` unsafely bypasses cooperative equality; use `==` instead - 1L equals 1 - ^ -checksensible-equals.scala:11: warning: comparing values of types Any and Int using `equals` unsafely bypasses cooperative equality; use `==` instead - (1L: Any) equals 1 - ^ -checksensible-equals.scala:12: warning: comparing values of types AnyVal and Int using `equals` unsafely bypasses cooperative equality; use `==` instead - (1L: AnyVal) equals 1 - ^ -checksensible-equals.scala:13: warning: comparing values of types AnyVal and AnyVal using `equals` unsafely bypasses cooperative equality; use `==` instead - (1L: AnyVal) equals (1: AnyVal) - ^ -checksensible-equals.scala:16: warning: comparing values of types A and Int using `equals` unsafely bypasses cooperative equality; use `==` instead - def foo[A](a: A) = a.equals(1) - ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found diff --git a/test/files/neg/checksensible-equals.scala b/test/files/neg/checksensible-equals.scala deleted file mode 100644 index 905e96109433..000000000000 --- a/test/files/neg/checksensible-equals.scala +++ /dev/null @@ -1,19 +0,0 @@ -// scalac: -Xsource:2.13 -Werror - -class AnyEqualsTest { - 1L equals 1 - // ok, because it's between the same numeric types - 1 equals 1 - // ok - 1L equals "string" - // ok - 1L.equals(()) - (1L: Any) equals 1 - (1L: AnyVal) equals 1 - (1L: AnyVal) equals (1: AnyVal) - // ok - "string" equals 1 - def foo[A](a: A) = a.equals(1) - // ok - def bar[A <: AnyRef](a: A) = a.equals(1) -} diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index f4f36f5a1744..f150f255fb1f 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -1,254 +1,129 @@ -#partest !java8 -checksensible.scala:46: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Any.==(x$1: Any): Boolean - given arguments: - after adaptation: Any.==((): Unit) - () == () - ^ -checksensible.scala:49: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Object.!=(x$1: Any): Boolean - given arguments: - after adaptation: Object.!=((): Unit) - scala.runtime.BoxedUnit.UNIT != () - ^ -checksensible.scala:50: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Any.!=(x$1: Any): Boolean - given arguments: - after adaptation: Any.!=((): Unit) - (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn - ^ -checksensible.scala:14: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq (new AnyRef) +checksensible.scala:54: warning: symbol literal is deprecated; use Symbol("sym") instead [quickfixable] + (1 != 'sym) // w + ^ +checksensible.scala:15: warning: comparing a fresh object using `eq` will always yield false + (new AnyRef) eq (new AnyRef) // w ^ -checksensible.scala:15: warning: comparing a fresh object using `ne` will always yield true - (new AnyRef) ne (new AnyRef) +checksensible.scala:16: warning: comparing a fresh object using `ne` will always yield true + (new AnyRef) ne (new AnyRef) // w ^ -checksensible.scala:16: warning: comparing a fresh object using `eq` will always yield false - Shmoopie eq (new AnyRef) - ^ checksensible.scala:17: warning: comparing a fresh object using `eq` will always yield false - (Shmoopie: AnyRef) eq (new AnyRef) - ^ + Shmoopie eq (new AnyRef) // w + ^ checksensible.scala:18: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq Shmoopie - ^ + (Shmoopie: AnyRef) eq (new AnyRef) // w + ^ checksensible.scala:19: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq null + (new AnyRef) eq Shmoopie // w ^ checksensible.scala:20: warning: comparing a fresh object using `eq` will always yield false - null eq new AnyRef + (new AnyRef) eq null // w + ^ +checksensible.scala:21: warning: comparing a fresh object using `eq` will always yield false + null eq new AnyRef // w ^ -checksensible.scala:27: warning: comparing values of types Unit and Int using `==` will always yield false - (c = 1) == 0 +checksensible.scala:28: warning: comparing values of types Unit and Int using `==` will always yield false + (c = 1) == 0 // w ^ -checksensible.scala:28: warning: comparing values of types Int and Unit using `==` will always yield false - 0 == (c = 1) +checksensible.scala:29: warning: comparing values of types Integer and Unit using `==` will always yield false + 0 == (c = 1) // w ^ -checksensible.scala:30: warning: comparing values of types Int and String using `==` will always yield false - 1 == "abc" +checksensible.scala:31: warning: comparing values of types Int and String using `==` will always yield false + 1 == "abc" // w ^ -checksensible.scala:34: warning: comparing values of types Some[Int] and Int using `==` will always yield false - Some(1) == 1 // as above - ^ -checksensible.scala:36: warning: constructor Boolean in class Boolean is deprecated - true == new java.lang.Boolean(true) // none of these should warn except for deprecated API +checksensible.scala:34: warning: comparing values of types String and Int using `==` will always yield false + "abc" == 1 // w: string equality is known + ^ +checksensible.scala:35: warning: comparing values of types Some[Int] and Int using `==` will always yield false + Some(1) == 1 // w: case class equals ^ -checksensible.scala:37: warning: constructor Boolean in class Boolean is deprecated - new java.lang.Boolean(true) == true - ^ -checksensible.scala:39: warning: comparing a fresh object using `==` will always yield false - new AnyRef == 1 +checksensible.scala:40: warning: comparing a fresh object using `==` will always yield false + new AnyRef == 1 // w: fresh object ^ -checksensible.scala:41: warning: constructor Integer in class Integer is deprecated - 1 == (new java.lang.Integer(1)) // ...something like this - ^ -checksensible.scala:42: warning: comparing values of types Int and Boolean using `==` will always yield false - 1 == (new java.lang.Boolean(true)) +checksensible.scala:43: warning: comparing values of types Int and Boolean using `==` will always yield false + 1 == java.lang.Boolean.valueOf(true) // w ^ -checksensible.scala:42: warning: constructor Boolean in class Boolean is deprecated - 1 == (new java.lang.Boolean(true)) - ^ -checksensible.scala:44: warning: comparing values of types Int and Boolean using `!=` will always yield true - 1 != true +checksensible.scala:45: warning: comparing values of types Int and Boolean using `!=` will always yield true + 1 != true // w ^ -checksensible.scala:45: warning: comparing values of types Unit and Boolean using `==` will always yield false - () == true - ^ -checksensible.scala:46: warning: comparing values of types Unit and Unit using `==` will always yield true - () == () +checksensible.scala:46: warning: comparing values of types Unit and Boolean using `==` will always yield false + () == true // w ^ checksensible.scala:47: warning: comparing values of types Unit and Unit using `==` will always yield true - () == println + () == () // w + ^ +checksensible.scala:48: warning: comparing values of types Unit and Unit using `==` will always yield true + () == println() // w ^ -checksensible.scala:48: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true - () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false +checksensible.scala:49: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true + () == scala.runtime.BoxedUnit.UNIT // w ^ -checksensible.scala:49: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=` will always yield false - scala.runtime.BoxedUnit.UNIT != () +checksensible.scala:50: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=` will always yield false + scala.runtime.BoxedUnit.UNIT != () // w ^ -checksensible.scala:52: warning: comparing values of types Int and Unit using `!=` will always yield true - (1 != println) +checksensible.scala:53: warning: comparing values of types Int and Unit using `!=` will always yield true + (1 != println()) // w ^ -checksensible.scala:53: warning: comparing values of types Int and Symbol using `!=` will always yield true - (1 != 'sym) +checksensible.scala:54: warning: comparing values of types Int and Symbol using `!=` will always yield true + (1 != 'sym) // w ^ -checksensible.scala:59: warning: comparing a fresh object using `==` will always yield false - ((x: Int) => x + 1) == null - ^ checksensible.scala:60: warning: comparing a fresh object using `==` will always yield false - Bep == ((_: Int) + 1) + ((x: Int) => x + 1) == null // w (fresh object) + ^ +checksensible.scala:61: warning: comparing a fresh object using `==` will always yield false + Bep == ((_: Int) + 1) // w (fresh object) ^ -checksensible.scala:62: warning: comparing a fresh object using `==` will always yield false - new Object == new Object - ^ checksensible.scala:63: warning: comparing a fresh object using `==` will always yield false - new Object == "abc" + new Object == new Object // w + ^ +checksensible.scala:64: warning: comparing a fresh object using `==` will always yield false + new Object == "abc" // w ^ -checksensible.scala:64: warning: comparing a fresh object using `!=` will always yield true - new Exception() != new Exception() +checksensible.scala:65: warning: comparing a fresh object using `!=` will always yield true + new Exception() != new Exception() // w ^ -checksensible.scala:67: warning: comparing values of types Int and Null using `==` will always yield false - if (foo.length == null) "plante" else "plante pas" +checksensible.scala:68: warning: comparing values of types Int and Null using `==` will always yield false + if (foo.length == null) "plante" else "plante pas" // w ^ -checksensible.scala:72: warning: comparing values of types Bip and Bop using `==` will always yield false - (x1 == x2) +checksensible.scala:73: warning: comparing values of types Bip and Bop using `==` will always yield false + (x1 == x2) // w ^ -checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==` will always yield false - c3 == z1 +checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==` will always yield false + c3 == z1 // w ^ -checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==` will always yield false - z1 == c3 +checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==` will always yield false + z1 == c3 // w ^ -checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=` will always yield true - z1 != c3 +checksensible.scala:85: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=` will always yield true + z1 != c3 // w ^ -checksensible.scala:85: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=` will always yield true - c3 != "abc" +checksensible.scala:86: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=` will always yield true + c3 != "abc" // w ^ -checksensible.scala:96: warning: comparing values of types Unit and Int using `!=` will always yield true - while ((c = in.read) != -1) +checksensible.scala:97: warning: comparing values of types Unit and Int using `!=` will always yield true + while ((c = in.read) != -1) // w ^ -error: No warnings can be incurred under -Xfatal-warnings. -40 warnings found -one error found -#partest java8 -checksensible.scala:46: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Any.==(x$1: Any): Boolean - given arguments: - after adaptation: Any.==((): Unit) - () == () - ^ -checksensible.scala:49: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Object.!=(x$1: Any): Boolean - given arguments: - after adaptation: Object.!=((): Unit) - scala.runtime.BoxedUnit.UNIT != () - ^ -checksensible.scala:50: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: Any.!=(x$1: Any): Boolean - given arguments: - after adaptation: Any.!=((): Unit) - (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn - ^ -checksensible.scala:14: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq (new AnyRef) +checksensible.scala:105: warning: comparing values of types Long and Int using `equals` unsafely bypasses cooperative equality; use `==` instead + 1L equals 1 // w: bypasses coopeq + ^ +checksensible.scala:112: warning: comparing values of types Any and Int using `equals` unsafely bypasses cooperative equality; use `==` instead + (1L: Any) equals 1 // w: bypasses coopeq + ^ +checksensible.scala:113: warning: comparing values of types AnyVal and Int using `equals` unsafely bypasses cooperative equality; use `==` instead + (1L: AnyVal) equals 1 // w: bypasses coopeq ^ -checksensible.scala:15: warning: comparing a fresh object using `ne` will always yield true - (new AnyRef) ne (new AnyRef) +checksensible.scala:114: warning: comparing values of types AnyVal and AnyVal using `equals` unsafely bypasses cooperative equality; use `==` instead + (1L: AnyVal) equals (1: AnyVal) // w: bypasses coopeq ^ -checksensible.scala:16: warning: comparing a fresh object using `eq` will always yield false - Shmoopie eq (new AnyRef) - ^ -checksensible.scala:17: warning: comparing a fresh object using `eq` will always yield false - (Shmoopie: AnyRef) eq (new AnyRef) - ^ -checksensible.scala:18: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq Shmoopie - ^ -checksensible.scala:19: warning: comparing a fresh object using `eq` will always yield false - (new AnyRef) eq null - ^ -checksensible.scala:20: warning: comparing a fresh object using `eq` will always yield false - null eq new AnyRef - ^ -checksensible.scala:27: warning: comparing values of types Unit and Int using `==` will always yield false - (c = 1) == 0 - ^ -checksensible.scala:28: warning: comparing values of types Int and Unit using `==` will always yield false - 0 == (c = 1) - ^ -checksensible.scala:30: warning: comparing values of types Int and String using `==` will always yield false - 1 == "abc" - ^ -checksensible.scala:34: warning: comparing values of types Some[Int] and Int using `==` will always yield false - Some(1) == 1 // as above - ^ -checksensible.scala:39: warning: comparing a fresh object using `==` will always yield false - new AnyRef == 1 - ^ -checksensible.scala:42: warning: comparing values of types Int and Boolean using `==` will always yield false - 1 == (new java.lang.Boolean(true)) - ^ -checksensible.scala:44: warning: comparing values of types Int and Boolean using `!=` will always yield true - 1 != true - ^ -checksensible.scala:45: warning: comparing values of types Unit and Boolean using `==` will always yield false - () == true - ^ -checksensible.scala:46: warning: comparing values of types Unit and Unit using `==` will always yield true - () == () - ^ -checksensible.scala:47: warning: comparing values of types Unit and Unit using `==` will always yield true - () == println - ^ -checksensible.scala:48: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true - () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false - ^ -checksensible.scala:49: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=` will always yield false - scala.runtime.BoxedUnit.UNIT != () - ^ -checksensible.scala:52: warning: comparing values of types Int and Unit using `!=` will always yield true - (1 != println) - ^ -checksensible.scala:53: warning: comparing values of types Int and Symbol using `!=` will always yield true - (1 != 'sym) - ^ -checksensible.scala:59: warning: comparing a fresh object using `==` will always yield false - ((x: Int) => x + 1) == null - ^ -checksensible.scala:60: warning: comparing a fresh object using `==` will always yield false - Bep == ((_: Int) + 1) - ^ -checksensible.scala:62: warning: comparing a fresh object using `==` will always yield false - new Object == new Object - ^ -checksensible.scala:63: warning: comparing a fresh object using `==` will always yield false - new Object == "abc" - ^ -checksensible.scala:64: warning: comparing a fresh object using `!=` will always yield true - new Exception() != new Exception() - ^ -checksensible.scala:67: warning: comparing values of types Int and Null using `==` will always yield false - if (foo.length == null) "plante" else "plante pas" - ^ -checksensible.scala:72: warning: comparing values of types Bip and Bop using `==` will always yield false - (x1 == x2) - ^ -checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==` will always yield false - c3 == z1 - ^ -checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==` will always yield false - z1 == c3 - ^ -checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=` will always yield true - z1 != c3 - ^ -checksensible.scala:85: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=` will always yield true - c3 != "abc" - ^ -checksensible.scala:96: warning: comparing values of types Unit and Int using `!=` will always yield true - while ((c = in.read) != -1) - ^ -error: No warnings can be incurred under -Xfatal-warnings. -36 warnings found -one error found +checksensible.scala:117: warning: comparing values of types A and Int using `equals` unsafely bypasses cooperative equality; use `==` instead + def foo[A](a: A) = a.equals(1) // w: bypasses coopeq + ^ +checksensible.scala:126: warning: eq_refine.E and String are unrelated: they will most likely never compare equal + if (e == "") ??? // warn about comparing unrelated types + ^ +checksensible.scala:129: warning: eq_refine.SE and String are unrelated: they will most likely never compare equal + if (se == "") ??? // types are still unrelated + ^ +error: No warnings can be incurred under -Werror. +42 warnings +1 error diff --git a/test/files/neg/checksensible.scala b/test/files/neg/checksensible.scala index 419054b8dd66..91ae29a8f1ac 100644 --- a/test/files/neg/checksensible.scala +++ b/test/files/neg/checksensible.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Werror -deprecation +// final class Bip { def <=(other: Bop) = true } final class Bop { } object Bep { } @@ -11,65 +12,65 @@ final class Zing { class RefEqTest { object Shmoopie - (new AnyRef) eq (new AnyRef) - (new AnyRef) ne (new AnyRef) - Shmoopie eq (new AnyRef) - (Shmoopie: AnyRef) eq (new AnyRef) - (new AnyRef) eq Shmoopie - (new AnyRef) eq null - null eq new AnyRef + (new AnyRef) eq (new AnyRef) // w + (new AnyRef) ne (new AnyRef) // w + Shmoopie eq (new AnyRef) // w + (Shmoopie: AnyRef) eq (new AnyRef) // w + (new AnyRef) eq Shmoopie // w + (new AnyRef) eq null // w + null eq new AnyRef // w } // 13 warnings class EqEqValTest { var c = 0 - (c = 1) == 0 - 0 == (c = 1) - - 1 == "abc" - 1 == ("abc": Any) // doesn't warn because an Any may be a boxed Int - 1 == (1: Any) // as above - "abc" == 1 // warns because the lub of String and Int is Any - Some(1) == 1 // as above - - true == new java.lang.Boolean(true) // none of these should warn except for deprecated API - new java.lang.Boolean(true) == true - - new AnyRef == 1 - 1 == new AnyRef // doesn't warn because it could be... - 1 == (new java.lang.Integer(1)) // ...something like this - 1 == (new java.lang.Boolean(true)) - - 1 != true - () == true - () == () - () == println - () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false - scala.runtime.BoxedUnit.UNIT != () - (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn - - (1 != println) - (1 != 'sym) + (c = 1) == 0 // w + 0 == (c = 1) // w + + 1 == "abc" // w + 1 == ("abc": Any) // n: Any may be a boxed Int + 1 == (1: Any) // n: as above + "abc" == 1 // w: string equality is known + Some(1) == 1 // w: case class equals + + true == java.lang.Boolean.valueOf(true) // n + java.lang.Boolean.valueOf(true) == true // n + + new AnyRef == 1 // w: fresh object + 1 == (Integer.valueOf(1): AnyRef) // n: `AnyRef` could be an Integer, which is handled by cooperative equality + 1 == java.lang.Integer.valueOf(1) // n: cooperative equality (BoxesRunTime) + 1 == java.lang.Boolean.valueOf(true) // w + + 1 != true // w + () == true // w + () == () // w + () == println() // w + () == scala.runtime.BoxedUnit.UNIT // w + scala.runtime.BoxedUnit.UNIT != () // w + (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // n + + (1 != println()) // w + (1 != 'sym) // w } // 12 warnings class EqEqRefTest { val ref = new Bop - ((x: Int) => x + 1) == null - Bep == ((_: Int) + 1) + ((x: Int) => x + 1) == null // w (fresh object) + Bep == ((_: Int) + 1) // w (fresh object) - new Object == new Object - new Object == "abc" - new Exception() != new Exception() + new Object == new Object // w + new Object == "abc" // w + new Exception() != new Exception() // w val foo: Array[String] = Array("1","2","3") - if (foo.length == null) "plante" else "plante pas" + if (foo.length == null) "plante" else "plante pas" // w // final classes with default equals val x1 = new Bip val x2 = new Bop - (x1 == x2) + (x1 == x2) // w class C1 { } class C2 extends C1 { } @@ -79,23 +80,51 @@ class EqEqRefTest { val c3 = new C3 // these should always warn - c3 == z1 - z1 == c3 - z1 != c3 - c3 != "abc" - // this should warn when feeling chatty - c3 != z1 + c3 == z1 // w + z1 == c3 // w + z1 != c3 // w + c3 != "abc" // w + + c3 != z1 // n: method != is overridden // non-warners - (null: AnyRef) == (null: AnyRef) - (x1 <= x2) + (null: AnyRef) == (null: AnyRef) // n + (x1 <= x2) // n def main(args: Array[String]) = { val in = new java.io.FileInputStream(args(0)) var c = 0 - while ((c = in.read) != -1) + while ((c = in.read) != -1) // w print(c.toChar) in.close } } + +class AnyEqualsTest { + 1L equals 1 // w: bypasses coopeq + // ok, because it's between the same numeric types + 1 equals 1 // n + // ok + 1L equals "string" // n + // ok + 1L.equals(()) // n + (1L: Any) equals 1 // w: bypasses coopeq + (1L: AnyVal) equals 1 // w: bypasses coopeq + (1L: AnyVal) equals (1: AnyVal) // w: bypasses coopeq + // ok + "string" equals 1 // n + def foo[A](a: A) = a.equals(1) // w: bypasses coopeq + // ok + def bar[A <: AnyRef](a: A) = a.equals(1) // n +} + +object eq_refine { + class E + class SE extends Serializable + val e = new E + if (e == "") ??? // warn about comparing unrelated types + + val se = new SE + if (se == "") ??? // types are still unrelated +} diff --git a/test/files/neg/checksensibleUnit.check b/test/files/neg/checksensibleUnit.check index 7fed4e558aaa..6246a59163a6 100644 --- a/test/files/neg/checksensibleUnit.check +++ b/test/files/neg/checksensibleUnit.check @@ -4,4 +4,4 @@ checksensibleUnit.scala:3: error: value > is not a member of Unit checksensibleUnit.scala:4: error: value <= is not a member of Unit println((c = 1) <= 0) ^ -two errors found +2 errors diff --git a/test/files/neg/choices.check b/test/files/neg/choices.check index 2449cadcd647..338f847926e4 100644 --- a/test/files/neg/choices.check +++ b/test/files/neg/choices.check @@ -2,4 +2,4 @@ error: Usage: -Yresolve-term-conflict: where choices are pa error: bad option: '-Yresolve-term-conflict' error: bad options: -Yresolve-term-conflict error: flags file may only contain compiler options, found: -Yresolve-term-conflict -four errors found +4 errors diff --git a/test/files/neg/choices.scala b/test/files/neg/choices.scala index 976f695261e6..9803dfdf802a 100644 --- a/test/files/neg/choices.scala +++ b/test/files/neg/choices.scala @@ -1,4 +1,5 @@ -// scalac: -Yresolve-term-conflict +//> using options -Yresolve-term-conflict +// object Test { def main(args: Array[String]): Unit = { diff --git a/test/files/neg/class-of-double-targs.check b/test/files/neg/class-of-double-targs.check index f7e2094f9778..acb9d5c43b47 100644 --- a/test/files/neg/class-of-double-targs.check +++ b/test/files/neg/class-of-double-targs.check @@ -1,4 +1,4 @@ class-of-double-targs.scala:2: error: expression of type Class[Int](classOf[scala.Int]) does not take type parameters. classOf[Int][Int] ^ -one error found +1 error diff --git a/test/files/neg/classOfDeprecation.check b/test/files/neg/classOfDeprecation.check index e80b2d643a2b..e67fc64fc74d 100644 --- a/test/files/neg/classOfDeprecation.check +++ b/test/files/neg/classOfDeprecation.check @@ -4,6 +4,6 @@ classOfDeprecation.scala:6: warning: class C is deprecated (since like, forever) classOfDeprecation.scala:7: warning: class C is deprecated (since like, forever): no no! @ann(classOf[C]) def u = 1 ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/classOfDeprecation.scala b/test/files/neg/classOfDeprecation.scala index 497d11ad6197..92bab336b57f 100644 --- a/test/files/neg/classOfDeprecation.scala +++ b/test/files/neg/classOfDeprecation.scala @@ -1,4 +1,4 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Werror @deprecated("no no!", "like, forever") class C class ann(x: Any) extends annotation.Annotation diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check index 889826873445..b36d1cb870e3 100644 --- a/test/files/neg/classmanifests_new_deprecations.check +++ b/test/files/neg/classmanifests_new_deprecations.check @@ -1,27 +1,15 @@ -classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead - def cm1[T: ClassManifest] = ??? - ^ -classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead - def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? - ^ -classmanifests_new_deprecations.scala:5: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead - val cm3: ClassManifest[Int] = null - ^ -classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead def rcm1[T: scala.reflect.ClassManifest] = ??? - ^ -classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead + ^ +classmanifests_new_deprecations.scala:5: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:9: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead val rcm3: scala.reflect.ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:11: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead - type CM[T] = ClassManifest[T] - ^ -classmanifests_new_deprecations.scala:16: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead type RCM[T] = scala.reflect.ClassManifest[T] ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/classmanifests_new_deprecations.scala b/test/files/neg/classmanifests_new_deprecations.scala index 172df6887813..2c253e170417 100644 --- a/test/files/neg/classmanifests_new_deprecations.scala +++ b/test/files/neg/classmanifests_new_deprecations.scala @@ -1,18 +1,10 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -Xlint -Werror +// object Test extends App { - def cm1[T: ClassManifest] = ??? - def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? - val cm3: ClassManifest[Int] = null - def rcm1[T: scala.reflect.ClassManifest] = ??? def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? val rcm3: scala.reflect.ClassManifest[Int] = null - type CM[T] = ClassManifest[T] - def acm1[T: CM] = ??? - def acm2[T](implicit evidence$1: CM[T]) = ??? - val acm3: CM[Int] = null - type RCM[T] = scala.reflect.ClassManifest[T] def arcm1[T: RCM] = ??? def arcm2[T](implicit evidence$1: RCM[T]) = ??? diff --git a/test/files/neg/classtags_contextbound_a.check b/test/files/neg/classtags_contextbound_a.check index 5edb7f9a5ad1..b74d7f8b5843 100644 --- a/test/files/neg/classtags_contextbound_a.check +++ b/test/files/neg/classtags_contextbound_a.check @@ -1,4 +1,4 @@ classtags_contextbound_a.scala:2: error: No ClassTag available for T def foo[T] = Array[T]() ^ -one error found +1 error diff --git a/test/files/neg/classtags_contextbound_a.scala b/test/files/neg/classtags_contextbound_a.scala index d18beda34173..77ca0cfd063d 100644 --- a/test/files/neg/classtags_contextbound_a.scala +++ b/test/files/neg/classtags_contextbound_a.scala @@ -1,4 +1,4 @@ object Test extends App { def foo[T] = Array[T]() println(foo[Int].getClass) -} \ No newline at end of file +} diff --git a/test/files/neg/classtags_contextbound_b.check b/test/files/neg/classtags_contextbound_b.check index e17ab8b0d167..42e8e68467c0 100644 --- a/test/files/neg/classtags_contextbound_b.check +++ b/test/files/neg/classtags_contextbound_b.check @@ -1,4 +1,4 @@ classtags_contextbound_b.scala:5: error: No ClassTag available for T def foo[T] = mkArray[T] ^ -one error found +1 error diff --git a/test/files/neg/classtags_contextbound_b.scala b/test/files/neg/classtags_contextbound_b.scala index a189f9aa6597..503dfa5c0300 100644 --- a/test/files/neg/classtags_contextbound_b.scala +++ b/test/files/neg/classtags_contextbound_b.scala @@ -4,4 +4,4 @@ object Test extends App { def mkArray[T: ClassTag] = Array[T]() def foo[T] = mkArray[T] println(foo[Int].getClass) -} \ No newline at end of file +} diff --git a/test/files/neg/classtags_contextbound_c.check b/test/files/neg/classtags_contextbound_c.check index e8666f7a10e0..8bab1bfd4a9b 100644 --- a/test/files/neg/classtags_contextbound_c.check +++ b/test/files/neg/classtags_contextbound_c.check @@ -1,4 +1,4 @@ classtags_contextbound_c.scala:4: error: No ClassTag available for T def mkArray[T] = Array[T]() ^ -one error found +1 error diff --git a/test/files/neg/classtags_contextbound_c.scala b/test/files/neg/classtags_contextbound_c.scala index 54c616ce7e0e..b6b98ca74f96 100644 --- a/test/files/neg/classtags_contextbound_c.scala +++ b/test/files/neg/classtags_contextbound_c.scala @@ -4,4 +4,4 @@ object Test extends App { def mkArray[T] = Array[T]() def foo[T: ClassTag] = mkArray[T] println(foo[Int].getClass) -} \ No newline at end of file +} diff --git a/test/files/neg/classtags_dont_use_typetags.check b/test/files/neg/classtags_dont_use_typetags.check index 4f728d267de7..5c7bd9492a22 100644 --- a/test/files/neg/classtags_dont_use_typetags.check +++ b/test/files/neg/classtags_dont_use_typetags.check @@ -1,4 +1,4 @@ classtags_dont_use_typetags.scala:4: error: No ClassTag available for T def foo[T: TypeTag] = Array[T]() ^ -one error found +1 error diff --git a/test/files/neg/classtags_dont_use_typetags.scala b/test/files/neg/classtags_dont_use_typetags.scala index 2eb842b86054..3173d83111a5 100644 --- a/test/files/neg/classtags_dont_use_typetags.scala +++ b/test/files/neg/classtags_dont_use_typetags.scala @@ -2,4 +2,4 @@ import scala.reflect.runtime.universe._ object Test extends App { def foo[T: TypeTag] = Array[T]() -} \ No newline at end of file +} diff --git a/test/files/neg/cloneable.check b/test/files/neg/cloneable.check new file mode 100644 index 000000000000..a8e5617df6af --- /dev/null +++ b/test/files/neg/cloneable.check @@ -0,0 +1,6 @@ +cloneable.scala:7: warning: object X should not extend Cloneable. +object X extends Base + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/cloneable.scala b/test/files/neg/cloneable.scala new file mode 100644 index 000000000000..98b33c23c3ec --- /dev/null +++ b/test/files/neg/cloneable.scala @@ -0,0 +1,9 @@ + +//> using options -Werror -Xlint:cloneable +//> using retest.options -Wconf:cat=lint-cloneable:s + +class Base extends Cloneable + +object X extends Base + +class Y extends Base diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check index a10f8b64890e..d5a672d68578 100644 --- a/test/files/neg/compile-time-only-a.check +++ b/test/files/neg/compile-time-only-a.check @@ -1,3 +1,6 @@ +compile-time-only-a.scala:49: warning: Pattern definition introduces Unit-valued member of Test; consider wrapping it in `locally { ... }`. + val _ = c6.x + ^ compile-time-only-a.scala:10: error: C3 @compileTimeOnly("C3") case class C3(x: Int) ^ @@ -62,21 +65,25 @@ compile-time-only-a.scala:61: error: C7 val c707a: List[C7] = ??? ^ compile-time-only-a.scala:63: error: C7 + val c707c = List.empty[C7] // not yet eliminated by rewrite to Nil + ^ +compile-time-only-a.scala:64: error: C7 val c708a: T forSome { type T <: C7 } = ??? ^ -compile-time-only-a.scala:66: error: C8 +compile-time-only-a.scala:67: error: C8 val c709: (C8[Int], C8[C7]) = ??? ^ -compile-time-only-a.scala:67: error: C8 +compile-time-only-a.scala:68: error: C8 val c710: (C8[_] => C8[_]) = ??? ^ -compile-time-only-a.scala:74: error: placebo +compile-time-only-a.scala:75: error: placebo class Test { ^ -compile-time-only-a.scala:75: error: placebo +compile-time-only-a.scala:76: error: placebo @placebo def x = (2: @placebo) ^ -compile-time-only-a.scala:75: error: placebo +compile-time-only-a.scala:76: error: placebo @placebo def x = (2: @placebo) ^ -27 errors found +1 warning +28 errors diff --git a/test/files/neg/compile-time-only-a.scala b/test/files/neg/compile-time-only-a.scala index 533175a6996c..10cf18dd6745 100644 --- a/test/files/neg/compile-time-only-a.scala +++ b/test/files/neg/compile-time-only-a.scala @@ -59,7 +59,8 @@ object Test extends App { // val c705: ({ @compileTimeOnly("C7") type C7[T] = List[T] })#C7[_] = ??? val c706: C7 Either C7 = ??? val c707a: List[C7] = ??? - val c707b = List[C7]() + val c707b = List[C7]() // eliminated by rewrite to Nil + val c707c = List.empty[C7] // not yet eliminated by rewrite to Nil val c708a: T forSome { type T <: C7 } = ??? // https://groups.google.com/forum/#!topic/scala-internals/5n07TiCnBZU // val c708b: T forSome { @compileTimeOnly("C7") type T } = ??? @@ -73,4 +74,4 @@ class placebo extends scala.annotation.StaticAnnotation @placebo class Test { @placebo def x = (2: @placebo) -} \ No newline at end of file +} diff --git a/test/files/neg/compile-time-only-b.check b/test/files/neg/compile-time-only-b.check index 50cdf57fb5d6..80423aaa8c0f 100644 --- a/test/files/neg/compile-time-only-b.check +++ b/test/files/neg/compile-time-only-b.check @@ -10,4 +10,4 @@ compile-time-only-b.scala:13: error: splice must be enclosed within a reify {} b compile-time-only-b.scala:14: error: cannot use value except for signatures of macro implementations val ignored4 = reify(fortyTwo).value ^ -four errors found +4 errors diff --git a/test/files/neg/compile-time-only-b.scala b/test/files/neg/compile-time-only-b.scala index d5568dbe67ba..7636fd38db57 100644 --- a/test/files/neg/compile-time-only-b.scala +++ b/test/files/neg/compile-time-only-b.scala @@ -12,4 +12,4 @@ object Test extends App { val fortyTwo = 42 val ignored3 = reify(fortyTwo).splice val ignored4 = reify(fortyTwo).value -} \ No newline at end of file +} diff --git a/test/files/neg/constant-warning.check b/test/files/neg/constant-warning.check index dd99ad5dc231..0db80c1261c1 100644 --- a/test/files/neg/constant-warning.check +++ b/test/files/neg/constant-warning.check @@ -1,6 +1,39 @@ -constant-warning.scala:3: warning: Evaluation of a constant expression results in an arithmetic error: / by zero +constant-warning.scala:4: warning: Evaluation of a constant expression results in an arithmetic error: / by zero val fails = 1 + 2 / (3 - 2 - 1) ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +constant-warning.scala:6: warning: Evaluation of a constant expression results in an arithmetic error: integer overflow, using -2147483607 + val addi: Int = Int.MaxValue + 42 + ^ +constant-warning.scala:7: warning: Evaluation of a constant expression results in an arithmetic error: integer overflow, using 2147483606 + val subi: Int = Int.MinValue - 42 + ^ +constant-warning.scala:8: warning: Evaluation of a constant expression results in an arithmetic error: integer overflow, using -2 + val muli: Int = Int.MaxValue * 2 + ^ +constant-warning.scala:9: warning: Evaluation of a constant expression results in an arithmetic error: integer overflow, using -2147483648 + val divi: Int = Int.MinValue / -1 + ^ +constant-warning.scala:10: warning: Evaluation of a constant expression results in an arithmetic error: / by zero + val divz: Int = Int.MinValue / 0 + ^ +constant-warning.scala:12: warning: Evaluation of a constant expression results in an arithmetic error: integer overflow, using 0 + val long: Long = 100 * 1024 * 1024 * 1024 + ^ +constant-warning.scala:13: warning: Evaluation of a constant expression results in an arithmetic error: long overflow, using -9223372036854775767 + val addl: Long = Long.MaxValue + 42 + ^ +constant-warning.scala:14: warning: Evaluation of a constant expression results in an arithmetic error: long overflow, using 9223372036854775766 + val subl: Long = Long.MinValue - 42 + ^ +constant-warning.scala:15: warning: Evaluation of a constant expression results in an arithmetic error: long overflow, using -2 + val mull: Long = Long.MaxValue * 2 + ^ +constant-warning.scala:16: warning: Evaluation of a constant expression results in an arithmetic error: long overflow, using -9223372036854775808 + val divl: Long = Long.MinValue / -1 + ^ +constant-warning.scala:17: warning: Evaluation of a constant expression results in an arithmetic error: / by zero + val divlz: Long = Long.MinValue / 0 + ^ +error: No warnings can be incurred under -Werror. +12 warnings +1 error diff --git a/test/files/neg/constant-warning.scala b/test/files/neg/constant-warning.scala index 081ba4a1e01e..0bb939bf1123 100644 --- a/test/files/neg/constant-warning.scala +++ b/test/files/neg/constant-warning.scala @@ -1,4 +1,18 @@ -// scalac: -Xlint:constant -Xfatal-warnings +//> using options -Werror -Xlint:constant +//-Vprint:cleanup (bytecode test to ensure warnable constants are folded) object Test { val fails = 1 + 2 / (3 - 2 - 1) + + val addi: Int = Int.MaxValue + 42 + val subi: Int = Int.MinValue - 42 + val muli: Int = Int.MaxValue * 2 + val divi: Int = Int.MinValue / -1 + val divz: Int = Int.MinValue / 0 + + val long: Long = 100 * 1024 * 1024 * 1024 + val addl: Long = Long.MaxValue + 42 + val subl: Long = Long.MinValue - 42 + val mull: Long = Long.MaxValue * 2 + val divl: Long = Long.MinValue / -1 + val divlz: Long = Long.MinValue / 0 } diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check index 8a5bd97ae3ab..cf43a07b263d 100644 --- a/test/files/neg/constrs.check +++ b/test/files/neg/constrs.check @@ -4,7 +4,7 @@ constrs.scala:6: error: type T is not a member of object test constrs.scala:6: error: value u is not a member of object test def this(y: Int)(z: Int)(t: this.T) = { this(this.u + y + z); Console.println(x) } ^ -constrs.scala:10: error: called constructor's definition must precede calling constructor's definition +constrs.scala:10: error: self constructor invocation must refer to a constructor definition which precedes it, to prevent infinite cycles def this() = this("abc") ^ constrs.scala:12: error: constructor invokes itself @@ -15,4 +15,4 @@ constrs.scala:16: error: type mismatch; required: a def this() = this(1) ^ -5 errors found +5 errors diff --git a/test/files/neg/constructor-init-order.check b/test/files/neg/constructor-init-order.check index 7d2dbf735736..d62fbce591da 100644 --- a/test/files/neg/constructor-init-order.check +++ b/test/files/neg/constructor-init-order.check @@ -1,9 +1,9 @@ -constructor-init-order.scala:8: warning: Reference to uninitialized value baz +constructor-init-order.scala:9: warning: Reference to uninitialized value baz val bar1 = baz // warn ^ -constructor-init-order.scala:18: warning: Reference to uninitialized variable baz +constructor-init-order.scala:19: warning: Reference to uninitialized variable baz var bar1 = baz // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/constructor-init-order.scala b/test/files/neg/constructor-init-order.scala index 898189f2cec0..baaaa3fb2cd6 100644 --- a/test/files/neg/constructor-init-order.scala +++ b/test/files/neg/constructor-init-order.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// trait Foo0 { val quux1: String val quux2 = quux1 // warning here is "future work" diff --git a/test/files/neg/constructor-prefix-error.check b/test/files/neg/constructor-prefix-error.check index 87e948881b32..844aa15caaa3 100644 --- a/test/files/neg/constructor-prefix-error.check +++ b/test/files/neg/constructor-prefix-error.check @@ -1,4 +1,4 @@ constructor-prefix-error.scala:6: error: Outer is not a legal prefix for a constructor val x = new Outer#Inner ^ -one error found +1 error diff --git a/test/files/neg/cycle-bounds.check b/test/files/neg/cycle-bounds.check index 749fbcc9d820..7f5e333cfa70 100644 --- a/test/files/neg/cycle-bounds.check +++ b/test/files/neg/cycle-bounds.check @@ -1,4 +1,4 @@ -cycle-bounds.scala:6: error: illegal cyclic reference involving type T +cycle-bounds.scala:7: error: illegal cyclic reference involving type T class NotOk[T <: Comparable[_ <: T]] ^ -one error found +1 error diff --git a/test/files/neg/cycle-bounds.scala b/test/files/neg/cycle-bounds.scala index bb495cfbfe70..6a709cec752a 100644 --- a/test/files/neg/cycle-bounds.scala +++ b/test/files/neg/cycle-bounds.scala @@ -1,4 +1,5 @@ -// scalac: -Ybreak-cycles +//> using options -Ybreak-cycles +// // This should be allowed class Ok[T <: Comparable[_ >: T]] diff --git a/test/files/neg/cyclics-import.check b/test/files/neg/cyclics-import.check index be09fca374fe..28e7458deff5 100644 --- a/test/files/neg/cyclics-import.check +++ b/test/files/neg/cyclics-import.check @@ -3,4 +3,4 @@ Note: this is often due in part to a class depending on a definition nested with If applicable, you may wish to try moving some members into another object. import User.UserStatus._ ^ -one error found +1 error diff --git a/test/files/neg/cyclics.check b/test/files/neg/cyclics.check index c240387d2f67..6b7683d6c4e8 100644 --- a/test/files/neg/cyclics.check +++ b/test/files/neg/cyclics.check @@ -7,4 +7,4 @@ cyclics.scala:3: error: illegal cyclic reference involving type B cyclics.scala:5: error: illegal cyclic reference involving type E type C = I { type E = C } ^ -three errors found +3 errors diff --git a/test/files/neg/dbldef.check b/test/files/neg/dbldef.check index b896c4cdcf4e..30b825a73489 100644 --- a/test/files/neg/dbldef.check +++ b/test/files/neg/dbldef.check @@ -6,7 +6,4 @@ dbldef.scala:1: error: type mismatch; required: Int case class test0(x: Int, x: Float) ^ -dbldef.scala:1: error: in class test0, multiple overloaded alternatives of x define default arguments -case class test0(x: Int, x: Float) - ^ -three errors found +2 errors diff --git a/test/files/neg/deadline-inf-illegal.check b/test/files/neg/deadline-inf-illegal.check index 530d2b2443d4..b6da846c9233 100644 --- a/test/files/neg/deadline-inf-illegal.check +++ b/test/files/neg/deadline-inf-illegal.check @@ -6,10 +6,10 @@ deadline-inf-illegal.scala:6: error: type mismatch; required: scala.concurrent.duration.FiniteDuration Deadline.now + d ^ -deadline-inf-illegal.scala:7: error: overloaded method value - with alternatives: +deadline-inf-illegal.scala:7: error: overloaded method - with alternatives: (other: scala.concurrent.duration.Deadline)scala.concurrent.duration.FiniteDuration (other: scala.concurrent.duration.FiniteDuration)scala.concurrent.duration.Deadline cannot be applied to (scala.concurrent.duration.Duration) Deadline.now - d ^ -three errors found +3 errors diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check index 8d93f2a92c22..cbf7c8712130 100644 --- a/test/files/neg/delayed-init-ref.check +++ b/test/files/neg/delayed-init-ref.check @@ -1,15 +1,15 @@ -delayed-init-ref.scala:18: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value +delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value println(O.vall) // warn ^ -delayed-init-ref.scala:20: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value +delayed-init-ref.scala:21: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value println(vall) // warn ^ -delayed-init-ref.scala:29: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0 +delayed-init-ref.scala:30: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0 trait Before extends DelayedInit { ^ -delayed-init-ref.scala:41: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value +delayed-init-ref.scala:42: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value println({locally(()); this}.foo) // warn (spurious, but we can't discriminate) ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/delayed-init-ref.scala b/test/files/neg/delayed-init-ref.scala index b545ff097998..1ad93fa328bd 100644 --- a/test/files/neg/delayed-init-ref.scala +++ b/test/files/neg/delayed-init-ref.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xlint -Xfatal-warnings +//> using options -deprecation -Xlint -Xfatal-warnings +// trait T { val traitVal = "" } @@ -27,8 +28,8 @@ object Client { // Delayed init usage pattern from Specs2 // See: https://groups.google.com/d/msg/scala-sips/wP6dL8nIAQs/ogjoPE-MSVAJ trait Before extends DelayedInit { - def before() - override def delayedInit(x: => Unit): Unit = { before; x } + def before(): Unit + override def delayedInit(x: => Unit): Unit = { before(); x } } object Spec { trait UserContext extends Before { diff --git a/test/files/neg/depmet_1.check b/test/files/neg/depmet_1.check index 15498568c50a..78b1e1d9cd6e 100644 --- a/test/files/neg/depmet_1.check +++ b/test/files/neg/depmet_1.check @@ -7,4 +7,4 @@ depmet_1.scala:3: error: illegal dependent method type: parameter may only be re depmet_1.scala:4: error: not found: value y def precise2[T <: y.type](y: String): Unit = {} ^ -three errors found +3 errors diff --git a/test/files/neg/depmet_1.scala b/test/files/neg/depmet_1.scala index fc672e1ed8e2..9388b2c9a264 100644 --- a/test/files/neg/depmet_1.scala +++ b/test/files/neg/depmet_1.scala @@ -2,4 +2,4 @@ object Test { def precise0(y: x.type)(x: String): Unit = {} def precise1(x: String, y: x.type): Unit = {} def precise2[T <: y.type](y: String): Unit = {} -} \ No newline at end of file +} diff --git a/test/files/neg/deprecated-annots.check b/test/files/neg/deprecated-annots.check new file mode 100644 index 000000000000..9fd704e7d1d5 --- /dev/null +++ b/test/files/neg/deprecated-annots.check @@ -0,0 +1,6 @@ +deprecated-annots.scala:9: error: @scala.annotation.elidable is ignored in Scala 3 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D + @annotation.elidable(42) + ^ +1 error diff --git a/test/files/neg/deprecated-annots.scala b/test/files/neg/deprecated-annots.scala new file mode 100644 index 000000000000..fbdcd39b81cf --- /dev/null +++ b/test/files/neg/deprecated-annots.scala @@ -0,0 +1,11 @@ + +//> using options -Werror -Xlint -Xsource:3 + +class C[@specialized A] + +class D { + def f[@specialized A](a: A): A = a + + @annotation.elidable(42) + def g() = println("hello, world") +} diff --git a/test/files/neg/deprecated-options.check b/test/files/neg/deprecated-options.check new file mode 100644 index 000000000000..6153a635f83b --- /dev/null +++ b/test/files/neg/deprecated-options.check @@ -0,0 +1,7 @@ +warning: -Xsource is deprecated: instead of -Xsource:2.14, use -Xsource:3 and optionally -Xsource-features +warning: -Xfuture is deprecated: Not used since 2.13. +warning: -optimize is deprecated: Since 2.12, enables -opt:inline:**. This can be dangerous. +warning: -Xexperimental is deprecated: Not used since 2.13. +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/deprecated-options.scala b/test/files/neg/deprecated-options.scala new file mode 100644 index 000000000000..bea08b8e324f --- /dev/null +++ b/test/files/neg/deprecated-options.scala @@ -0,0 +1,4 @@ +// +//> using options -Werror -deprecation -optimise -Xexperimental -Xfuture -Xsource:2.14 +// +// Deprecated options are announced before compilation. diff --git a/test/files/neg/deprecated-target.check b/test/files/neg/deprecated-target.check deleted file mode 100644 index 554c98d1aa8e..000000000000 --- a/test/files/neg/deprecated-target.check +++ /dev/null @@ -1,4 +0,0 @@ -warning: -target is deprecated: -target:7 is deprecated, forcing use of 8 -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/deprecated-target.scala b/test/files/neg/deprecated-target.scala deleted file mode 100644 index 7ac29a4efe89..000000000000 --- a/test/files/neg/deprecated-target.scala +++ /dev/null @@ -1,2 +0,0 @@ -// scalac: -target:jvm-1.7 -deprecation -Xfatal-warnings -class C diff --git a/test/files/neg/deprecated.check b/test/files/neg/deprecated.check new file mode 100644 index 000000000000..2d1df0eac47e --- /dev/null +++ b/test/files/neg/deprecated.check @@ -0,0 +1,27 @@ +deprecated.scala:5: warning: Specify both message and version: @deprecated("message", since = "MyLib 1.0") + @deprecated def f = ??? + ^ +deprecated.scala:9: warning: Specify both message and version: @deprecated("message", since = "MyLib 1.0") + @deprecated("Don't use it."/*, forRemoval=true*/) def stale = ??? + ^ +deprecated.scala:21: warning: method f in trait T is deprecated + t.f + ^ +deprecated.scala:22: warning: method g in trait T is deprecated (since 1.0): Don't use it. + t.g + ^ +deprecated.scala:23: warning: method stale in trait T is deprecated: Don't use it. + t.stale + ^ +deprecated.scala:24: warning: method gross in trait T is deprecated (since 1.0): Don't use it. + t.gross + ^ +deprecated.scala:26: warning: method innie in trait T is deprecated (since 1.0): Don't use it. + t.innie // warn because API will be removed + ^ +deprecated.scala:27: warning: method keeper in trait T is deprecated (since 1.0): Prefer toString instead. + t.keeper // don't warn because it's an inlined forwarder? maybe just warn. + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/deprecated.scala b/test/files/neg/deprecated.scala new file mode 100644 index 000000000000..0c1c4c5e7336 --- /dev/null +++ b/test/files/neg/deprecated.scala @@ -0,0 +1,28 @@ +//> using options -Xlint:deprecation -Werror -opt:inline: +// + +trait T { + @deprecated def f = ??? + + @deprecated("Don't use it.", since="1.0") def g = ??? + + @deprecated("Don't use it."/*, forRemoval=true*/) def stale = ??? + + @deprecated("Don't use it.", since="1.0"/*, forRemoval=true*/) def gross = ??? + + @deprecated("Don't use it.", since="1.0"/*, forRemoval=true*/) @inline def innie = ??? + + @deprecated("Prefer toString instead.", since="1.0"/*, forRemoval=false*/) @inline def keeper = toString() +} + +object Main { + def t: T = ??? + + t.f + t.g + t.stale + t.gross + + t.innie // warn because API will be removed + t.keeper // don't warn because it's an inlined forwarder? maybe just warn. +} diff --git a/test/files/neg/deprecated_widening.check b/test/files/neg/deprecated_widening.check new file mode 100644 index 000000000000..48c3c6ed6817 --- /dev/null +++ b/test/files/neg/deprecated_widening.check @@ -0,0 +1,66 @@ +deprecated_widening.scala:5: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val i_f: Float = i // deprecated + ^ +deprecated_widening.scala:7: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val l_f: Float = l // deprecated + ^ +deprecated_widening.scala:8: warning: Widening conversion from Long to Double is deprecated because it loses precision. Write `.toDouble` instead. [quickfixable] + val l_d: Double = l // deprecated + ^ +deprecated_widening.scala:23: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val truncatedPosFloat:Float = 16777217L // deprecated + ^ +deprecated_widening.scala:26: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val truncatedNegFloat: Float = - 16777217L // deprecated + ^ +deprecated_widening.scala:30: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val truncatedPosFloatI:Float = 16777217 // deprecated + ^ +deprecated_widening.scala:33: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + val truncatedNegFloatI: Float = - 16777217 // deprecated + ^ +deprecated_widening.scala:37: warning: Widening conversion from Long to Double is deprecated because it loses precision. Write `.toDouble` instead. [quickfixable] + val truncatedPosDouble:Double = 18014398509481985L // deprecated + ^ +deprecated_widening.scala:40: warning: Widening conversion from Long to Double is deprecated because it loses precision. Write `.toDouble` instead. [quickfixable] + val truncatedNegDouble: Double = - 18014398509481985L // deprecated + ^ +deprecated_widening.scala:47: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def literals = Set[Float](0x7fffffc0, 0x7ffffffd, 0x7ffffffe, 0x7fffffff) + ^ +deprecated_widening.scala:47: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def literals = Set[Float](0x7fffffc0, 0x7ffffffd, 0x7ffffffe, 0x7fffffff) + ^ +deprecated_widening.scala:47: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def literals = Set[Float](0x7fffffc0, 0x7ffffffd, 0x7ffffffe, 0x7fffffff) + ^ +deprecated_widening.scala:48: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def longingly = Set[Float](0x7fffffc0L, 0x7ffffffdL, 0x7ffffffeL, 0x7fffffffL) + ^ +deprecated_widening.scala:48: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def longingly = Set[Float](0x7fffffc0L, 0x7ffffffdL, 0x7ffffffeL, 0x7fffffffL) + ^ +deprecated_widening.scala:48: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def longingly = Set[Float](0x7fffffc0L, 0x7ffffffdL, 0x7ffffffeL, 0x7fffffffL) + ^ +deprecated_widening.scala:48: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def longingly = Set[Float](0x7fffffc0L, 0x7ffffffdL, 0x7ffffffeL, 0x7fffffffL) + ^ +deprecated_widening.scala:50: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def `pick one` = Set[Float](0x1000003, 0x1000004, 0x1000005) + ^ +deprecated_widening.scala:50: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + def `pick one` = Set[Float](0x1000003, 0x1000004, 0x1000005) + ^ +deprecated_widening.scala:12: warning: method int2float in object Int is deprecated (since 2.13.1): Implicit conversion from Int to Float is dangerous because it loses precision. Write `.toFloat` instead. + implicitly[Int => Float] // deprecated + ^ +deprecated_widening.scala:14: warning: method long2float in object Long is deprecated (since 2.13.1): Implicit conversion from Long to Float is dangerous because it loses precision. Write `.toFloat` instead. + implicitly[Long => Float] // deprecated + ^ +deprecated_widening.scala:15: warning: method long2double in object Long is deprecated (since 2.13.1): Implicit conversion from Long to Double is dangerous because it loses precision. Write `.toDouble` instead. + implicitly[Long => Double] // deprecated + ^ +error: No warnings can be incurred under -Werror. +21 warnings +1 error diff --git a/test/files/neg/deprecated_widening.scala b/test/files/neg/deprecated_widening.scala new file mode 100644 index 000000000000..46fc46fdd25a --- /dev/null +++ b/test/files/neg/deprecated_widening.scala @@ -0,0 +1,56 @@ +//> using options -Werror -Xlint:deprecation +// +object Test { + def foo(i: Int, l: Long): Unit = { + val i_f: Float = i // deprecated + val i_d: Double = i // OK + val l_f: Float = l // deprecated + val l_d: Double = l // deprecated + } + + def imp: Unit = { + implicitly[Int => Float] // deprecated + implicitly[Int => Double] // OK + implicitly[Long => Float] // deprecated + implicitly[Long => Double] // deprecated + } + + // don't leak silent warning from float conversion + val n = 42 + def clean = n max 27 + + val posFloat:Float = 16777216L // OK + val truncatedPosFloat:Float = 16777217L // deprecated + val losslessPosFloat:Float = 16777218L // OK -- lossless + val negFloat: Float = - 16777216L // OK + val truncatedNegFloat: Float = - 16777217L // deprecated + val losslessNegFloat: Float = - 16777218L // OK -- lossless + + val posFloatI:Float = 16777216 // OK + val truncatedPosFloatI:Float = 16777217 // deprecated + val losslessPosFloatI:Float = 16777218 // OK -- lossless + val negFloatI: Float = - 16777216 // OK + val truncatedNegFloatI: Float = - 16777217 // deprecated + val losslessNegFloatI: Float = - 16777218 // OK -- lossless + + val posDouble:Double = 18014398509481984L// OK + val truncatedPosDouble:Double = 18014398509481985L // deprecated + val losslessPosDouble:Double = 18014398509481988L // OK -- lossless + val negDouble: Double = - 18014398509481984L // OK + val truncatedNegDouble: Double = - 18014398509481985L // deprecated + val losslessNegDouble: Double = - 18014398509481988L // OK -- lossless + + // literals don't get a pass -- *especially* literals! + + // 0x7ffffffc0 - 0x7fffffff + // Set[Float](2147483584, 2147483645, 2147483646, 2147483647) + def literals = Set[Float](0x7fffffc0, 0x7ffffffd, 0x7ffffffe, 0x7fffffff) + def longingly = Set[Float](0x7fffffc0L, 0x7ffffffdL, 0x7ffffffeL, 0x7fffffffL) + + def `pick one` = Set[Float](0x1000003, 0x1000004, 0x1000005) + + def `no warn` = 1f + 2147483584 + def `no warn either` = 2147483584 + 1f + def f = 1f + def `no warn sowieso` = f + 2147483584 +} diff --git a/test/files/neg/deprecationsFor3.check b/test/files/neg/deprecationsFor3.check new file mode 100644 index 000000000000..5b47c9def436 --- /dev/null +++ b/test/files/neg/deprecationsFor3.check @@ -0,0 +1,31 @@ +deprecationsFor3.scala:4: warning: Unicode escapes in triple quoted strings are deprecated; use the literal character instead + def inTripleQuoted = """\u0041""" // deprecation + ^ +deprecationsFor3.scala:16: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + `x` (42) // migration + ^ +deprecationsFor3.scala:5: warning: Unicode escapes in raw interpolations are deprecated; use literal characters instead + def inRawInterpolation = raw"\u0041" // deprecation + ^ +deprecationsFor3.scala:6: warning: Unicode escapes in raw interpolations are deprecated; use literal characters instead + def inRawTripleQuoted = raw"""\u0041""" // deprecation + ^ +deprecationsFor3.scala:29: warning: Implicit definition should have explicit type (inferred String => Option[Int]) [quickfixable] + implicit def b = _.toIntOption // error + ^ +deprecationsFor3.scala:31: warning: Implicit definition should have explicit type (inferred String) [quickfixable] + implicit def s = "" // error + ^ +deprecationsFor3.scala:30: warning: Implicit definition should have explicit type (inferred Int) [quickfixable] + implicit val i = 0 // error + ^ +deprecationsFor3.scala:34: warning: method any2stringadd in object Predef is deprecated (since 2.13.0): Implicit injection of + is deprecated. Convert to String to call + +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + ^ +deprecationsFor3.scala:38: warning: shadowing a nested class of a parent is deprecated but class X shadows class X defined in class A; rename the class to something else + class B extends A { class X; def f = new X } + ^ +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/deprecationsFor3.scala b/test/files/neg/deprecationsFor3.scala new file mode 100644 index 000000000000..b06e613b6fc9 --- /dev/null +++ b/test/files/neg/deprecationsFor3.scala @@ -0,0 +1,39 @@ +//> using options -deprecation -Werror -Xmigration + +object UnicodeEscapes { + def inTripleQuoted = """\u0041""" // deprecation + def inRawInterpolation = raw"\u0041" // deprecation + def inRawTripleQuoted = raw"""\u0041""" // deprecation +} + +object InfixNewline extends App { + class K { def x(y: Int) = 0 } + + def x(a: Int) = 1 + + def ok = { + (new K) + `x` (42) // migration + } +} + +case class CaseCompanionMods private (x: Int) // nothing + +trait InferredBase { def f: Object } +object InferredSub extends InferredBase { def f = "a" } // nothing + +trait ExplicitImplicitsBase { + implicit def b: String => Option[Int] +} +object ExplicitImplicits extends ExplicitImplicitsBase { + implicit def b = _.toIntOption // error + implicit val i = 0 // error + implicit def s = "" // error +} + +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + +object NameShadowing { + class A { class X } + class B extends A { class X; def f = new X } +} diff --git a/test/files/neg/discard-advice-a.check b/test/files/neg/discard-advice-a.check new file mode 100644 index 000000000000..acdcb1a30ca9 --- /dev/null +++ b/test/files/neg/discard-advice-a.check @@ -0,0 +1,12 @@ +discard-advice-a.scala:7: warning: unused value of type scala.concurrent.Future[Int] + Future(42) + ^ +discard-advice-a.scala:10: warning: unused value of type scala.concurrent.Future[Int] + Future(42) + ^ +discard-advice-a.scala:11: warning: unused value of type Boolean(true) + true + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/discard-advice-a.scala b/test/files/neg/discard-advice-a.scala new file mode 100644 index 000000000000..4e825ee92178 --- /dev/null +++ b/test/files/neg/discard-advice-a.scala @@ -0,0 +1,13 @@ +//> using options -Werror -Wnonunit-statement -Wvalue-discard + +import concurrent._, ExecutionContext.Implicits._ + +class C { + def f(): Unit = { + Future(42) + } + def g(): Unit = { + Future(42) + true + } +} diff --git a/test/files/neg/discard-advice-b.check b/test/files/neg/discard-advice-b.check new file mode 100644 index 000000000000..dc786b099b61 --- /dev/null +++ b/test/files/neg/discard-advice-b.check @@ -0,0 +1,9 @@ +discard-advice-b.scala:7: warning: discarded non-Unit value of type scala.concurrent.Future[Int] + Future(42) + ^ +discard-advice-b.scala:11: warning: discarded non-Unit value of type Boolean(true) + true + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/discard-advice-b.scala b/test/files/neg/discard-advice-b.scala new file mode 100644 index 000000000000..3ff081cff8eb --- /dev/null +++ b/test/files/neg/discard-advice-b.scala @@ -0,0 +1,13 @@ +//> using options -Werror -Wvalue-discard + +import concurrent._, ExecutionContext.Implicits._ + +class C { + def f(): Unit = { + Future(42) + } + def g(): Unit = { + Future(42) + true + } +} diff --git a/test/files/neg/discard-advice-c.check b/test/files/neg/discard-advice-c.check new file mode 100644 index 000000000000..f05cf5a48fb5 --- /dev/null +++ b/test/files/neg/discard-advice-c.check @@ -0,0 +1,6 @@ +discard-advice-c.scala:11: warning: discarded pure expression does nothing + true + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/discard-advice-c.scala b/test/files/neg/discard-advice-c.scala new file mode 100644 index 000000000000..c5784e09a9c0 --- /dev/null +++ b/test/files/neg/discard-advice-c.scala @@ -0,0 +1,13 @@ +//> using options -Werror + +import concurrent._, ExecutionContext.Implicits._ + +class C { + def f(): Unit = { + Future(42) + } + def g(): Unit = { + Future(42) + true + } +} diff --git a/test/files/neg/discard-advice-d.check b/test/files/neg/discard-advice-d.check new file mode 100644 index 000000000000..652de3b28504 --- /dev/null +++ b/test/files/neg/discard-advice-d.check @@ -0,0 +1,15 @@ +discard-advice-d.scala:7: warning: unused value of type scala.concurrent.Future[Int] +Applicable -Wconf / @nowarn filters for this warning: msg=, cat=other-pure-statement, site=C.f + Future(42) + ^ +discard-advice-d.scala:10: warning: unused value of type scala.concurrent.Future[Int] +Applicable -Wconf / @nowarn filters for this warning: msg=, cat=other-pure-statement, site=C.g + Future(42) + ^ +discard-advice-d.scala:11: warning: unused value of type Boolean(true) +Applicable -Wconf / @nowarn filters for this warning: msg=, cat=other-pure-statement, site=C.g + true + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/discard-advice-d.scala b/test/files/neg/discard-advice-d.scala new file mode 100644 index 000000000000..e685981bb6da --- /dev/null +++ b/test/files/neg/discard-advice-d.scala @@ -0,0 +1,13 @@ +//> using options -Wconf:any:warning-verbose -Werror -Wnonunit-statement -Wvalue-discard + +import concurrent._, ExecutionContext.Implicits._ + +class C { + def f(): Unit = { + Future(42) + } + def g(): Unit = { + Future(42) + true + } +} diff --git a/test/files/neg/divergent-implicit.check b/test/files/neg/divergent-implicit.check index d4a3ddfc71c0..cb234f2616c1 100644 --- a/test/files/neg/divergent-implicit.check +++ b/test/files/neg/divergent-implicit.check @@ -3,8 +3,8 @@ divergent-implicit.scala:4: error: type mismatch; required: String val x1: String = 1 ^ -divergent-implicit.scala:5: error: diverging implicit expansion for type Int => String -starting with method $conforms in object Predef +divergent-implicit.scala:5: error: diverging implicit expansion for type Int => Nothing +starting with method cast in object Test1 val x2: String = cast[Int, String](1) ^ divergent-implicit.scala:14: error: type mismatch; @@ -17,4 +17,4 @@ divergent-implicit.scala:15: error: type mismatch; required: Test2.Bar val y: Bar = new Baz ^ -four errors found +4 errors diff --git a/test/files/neg/dotless-targs-a.check b/test/files/neg/dotless-targs-a.check new file mode 100644 index 000000000000..087020309552 --- /dev/null +++ b/test/files/neg/dotless-targs-a.check @@ -0,0 +1,16 @@ +dotless-targs-a.scala:4: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def fn2 = List apply[Int] 2 + ^ +dotless-targs-a.scala:9: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +dotless-targs-a.scala:9: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +3 errors diff --git a/test/files/neg/dotless-targs-a.scala b/test/files/neg/dotless-targs-a.scala new file mode 100644 index 000000000000..a1bfe41655cc --- /dev/null +++ b/test/files/neg/dotless-targs-a.scala @@ -0,0 +1,10 @@ +//> using options -Xsource:3 -Yrangepos:false +class A { + def fn1 = List apply 1 + def fn2 = List apply[Int] 2 + + def g1: Char = "g1" toList 0 + def g2: Char = "g2" apply 1 + + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) +} diff --git a/test/files/neg/dotless-targs-b.check b/test/files/neg/dotless-targs-b.check new file mode 100644 index 000000000000..bcd970176bc8 --- /dev/null +++ b/test/files/neg/dotless-targs-b.check @@ -0,0 +1,16 @@ +dotless-targs-b.scala:4: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def fn2 = List apply[Int] 2 + ^ +dotless-targs-b.scala:9: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +dotless-targs-b.scala:9: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +3 errors diff --git a/test/files/neg/dotless-targs-b.scala b/test/files/neg/dotless-targs-b.scala new file mode 100644 index 000000000000..838d6cb49842 --- /dev/null +++ b/test/files/neg/dotless-targs-b.scala @@ -0,0 +1,10 @@ +//> using options -Werror -Xlint -Xsource:3 -Yrangepos:false +class A { + def fn1 = List apply 1 + def fn2 = List apply[Int] 2 + + def g1: Char = "g1" toList 0 + def g2: Char = "g2" apply 1 + + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) +} diff --git a/test/files/neg/dotless-targs-ranged-a.check b/test/files/neg/dotless-targs-ranged-a.check new file mode 100644 index 000000000000..2f4e7e9a1c22 --- /dev/null +++ b/test/files/neg/dotless-targs-ranged-a.check @@ -0,0 +1,21 @@ +dotless-targs-ranged-a.scala:4: warning: type application is not allowed for infix operators [quickfixable] + def fn2 = List apply[Int] 2 + ^ +dotless-targs-ranged-a.scala:9: warning: type application is not allowed for infix operators [quickfixable] + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +dotless-targs-ranged-a.scala:9: warning: type application is not allowed for infix operators [quickfixable] + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + ^ +dotless-targs-ranged-a.scala:13: warning: type application is not allowed for infix operators [quickfixable] + def eval = 1 ->[Int] 2 + ^ +dotless-targs-ranged-a.scala:14: warning: type application is not allowed for infix operators [quickfixable] + def evil = new A() op [Int, String ] 42 + ^ +dotless-targs-ranged-a.scala:11: warning: type parameter A defined in method op shadows class A defined in package . You may want to rename your type parameter, or possibly remove it. + def op[A, B](i: Int): Int = 2*i + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/dotless-targs-ranged-a.scala b/test/files/neg/dotless-targs-ranged-a.scala new file mode 100644 index 000000000000..f2416b0aa372 --- /dev/null +++ b/test/files/neg/dotless-targs-ranged-a.scala @@ -0,0 +1,16 @@ +//> using options -Wconf:cat=scala3-migration:w -Werror -Xlint -Xsource:3 +class A { + def fn1 = List apply 1 + def fn2 = List apply[Int] 2 + + def g1: Char = "g1" toList 0 + def g2: Char = "g2" apply 1 + + def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) + + def op[A, B](i: Int): Int = 2*i + + def eval = 1 ->[Int] 2 + def evil = new A() op [Int, String ] 42 +} + diff --git a/test/files/neg/dotless-targs.check b/test/files/neg/dotless-targs.check index 4aab939f6141..4cb371f3f331 100644 --- a/test/files/neg/dotless-targs.check +++ b/test/files/neg/dotless-targs.check @@ -1,4 +1,4 @@ -dotless-targs.scala:2: error: type application is not allowed for postfix operators +dotless-targs.scala:4: error: type application is not allowed for postfix operators def f1 = "f1" isInstanceOf[String] // not ok - ^ -one error found + ^ +1 error diff --git a/test/files/neg/dotless-targs.scala b/test/files/neg/dotless-targs.scala index eff63cbec4f9..e4b41535fb19 100644 --- a/test/files/neg/dotless-targs.scala +++ b/test/files/neg/dotless-targs.scala @@ -1,3 +1,5 @@ +//> using options -Xsource:3 -language:postfixOps +// class A { def f1 = "f1" isInstanceOf[String] // not ok def f2 = "f2".isInstanceOf[String] // ok diff --git a/test/files/neg/double-def-top-level.check b/test/files/neg/double-def-top-level.check index 85b16e81e5fa..331b54bb2ccb 100644 --- a/test/files/neg/double-def-top-level.check +++ b/test/files/neg/double-def-top-level.check @@ -4,4 +4,4 @@ class C D_3.scala:2: error: O is already defined as object O object O ^ -two errors found +2 errors diff --git a/test/files/neg/double-def-top-level/B_2.scala b/test/files/neg/double-def-top-level/B_2.scala index c328e8c964a9..23724da41c86 100644 --- a/test/files/neg/double-def-top-level/B_2.scala +++ b/test/files/neg/double-def-top-level/B_2.scala @@ -1,2 +1,2 @@ class C /* noerror */ -object O /* noerror */ \ No newline at end of file +object O /* noerror */ diff --git a/test/files/neg/double-def-top-level/C_3.scala b/test/files/neg/double-def-top-level/C_3.scala index e1c327c15aa6..518e0d1c54c7 100644 --- a/test/files/neg/double-def-top-level/C_3.scala +++ b/test/files/neg/double-def-top-level/C_3.scala @@ -1,2 +1,2 @@ class C -object O \ No newline at end of file +object O diff --git a/test/files/neg/double-feature.check b/test/files/neg/double-feature.check deleted file mode 100644 index d4732e27bbbc..000000000000 --- a/test/files/neg/double-feature.check +++ /dev/null @@ -1,19 +0,0 @@ -double-feature.scala:4: warning: implicit conversion method f should be enabled -by making the implicit value scala.language.implicitConversions visible. -This can be achieved by adding the import clause 'import scala.language.implicitConversions' -or by setting the compiler option -language:implicitConversions. -See the Scaladoc for value scala.language.implicitConversions for a discussion -why the feature should be explicitly enabled. - implicit def f(t: T): String = "I am the tee" - ^ -double-feature.scala:6: warning: postfix operator + should be enabled -by making the implicit value scala.language.postfixOps visible. -This can be achieved by adding the import clause 'import scala.language.postfixOps' -or by setting the compiler option -language:postfixOps. -See the Scaladoc for value scala.language.postfixOps for a discussion -why the feature should be explicitly enabled. - val g: Int => Int = 1 + - ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found diff --git a/test/files/neg/double-feature.flags b/test/files/neg/double-feature.flags deleted file mode 100644 index 3396aad951c1..000000000000 --- a/test/files/neg/double-feature.flags +++ /dev/null @@ -1 +0,0 @@ --feature -Xfatal-warnings diff --git a/test/files/neg/double-feature.scala b/test/files/neg/double-feature.scala deleted file mode 100644 index 15f04a123afd..000000000000 --- a/test/files/neg/double-feature.scala +++ /dev/null @@ -1,11 +0,0 @@ - -object Test { - trait T - implicit def f(t: T): String = "I am the tee" - //val n = "hello, world" length - val g: Int => Int = 1 + - def main(args: Array[String]): Unit = println {( - null.asInstanceOf[T] : String, - List(1).map(g), - )} -} diff --git a/test/files/neg/early-type-defs.check b/test/files/neg/early-type-defs.check new file mode 100644 index 000000000000..517b93f9b923 --- /dev/null +++ b/test/files/neg/early-type-defs.check @@ -0,0 +1,9 @@ +early-type-defs.scala:4: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +object Test extends { type A1 = Int } with Runnable { def run() = () } + ^ +early-type-defs.scala:4: warning: early type members are deprecated: move them to the regular body; the semantics are the same +object Test extends { type A1 = Int } with Runnable { def run() = () } + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/early-type-defs.scala b/test/files/neg/early-type-defs.scala new file mode 100644 index 000000000000..8c425e9b5f24 --- /dev/null +++ b/test/files/neg/early-type-defs.scala @@ -0,0 +1,4 @@ +// +//> using options -Werror -Xlint +// +object Test extends { type A1 = Int } with Runnable { def run() = () } diff --git a/test/files/neg/equiv-migration.check b/test/files/neg/equiv-migration.check new file mode 100644 index 000000000000..254c756b8abf --- /dev/null +++ b/test/files/neg/equiv-migration.check @@ -0,0 +1,17 @@ +equiv-migration.scala:3: warning: object DeprecatedFloatEquiv in object Equiv has changed semantics in version 2.13.2: + The default implicit equivalence for floats no longer conforms to + to IEEE 754's behavior for -0.0F and NaN. + Import `Equiv.Float.IeeeEquiv` to recover the previous behavior. + See also https://www.scala-lang.org/api/current/scala/math/Equiv$$Float$.html. + val f = Equiv[Float] + ^ +equiv-migration.scala:4: warning: object DeprecatedDoubleEquiv in object Equiv has changed semantics in version 2.13.2: + The default implicit equivalence for doubles no longer conforms to + to IEEE 754's behavior for -0.0D and NaN. + Import `Equiv.Double.IeeeEquiv` to recover the previous behavior. + See also https://www.scala-lang.org/api/current/scala/math/Equiv$$Double$.html. + val d = Equiv[Double] + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/equiv-migration.scala b/test/files/neg/equiv-migration.scala new file mode 100644 index 000000000000..19ed773a3245 --- /dev/null +++ b/test/files/neg/equiv-migration.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Xmigration +object Test { + val f = Equiv[Float] + val d = Equiv[Double] + + // In our similar test for `Ordering`, we check that the migration warning is produced + // when e.g. calling `sorted` on a list. I wanted to add something like that here, + // but I couldn't find anywhere where `Equiv` is actually used, except by + // `scala.collection.concurrent.TrieMap`, but that class defaults to using + // the (deprecated) `Equiv.universal`, so you don't run into the migration warning + // normally. +} diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.check b/test/files/neg/error_dependentMethodTpeConversionToFunction.check deleted file mode 100644 index 3496a552c487..000000000000 --- a/test/files/neg/error_dependentMethodTpeConversionToFunction.check +++ /dev/null @@ -1,4 +0,0 @@ -error_dependentMethodTpeConversionToFunction.scala:4: error: method with dependent type (x: AnyRef)x.type cannot be converted to function value - val x: Any => Any = foo - ^ -one error found diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.scala b/test/files/neg/error_dependentMethodTpeConversionToFunction.scala deleted file mode 100644 index 22649e509869..000000000000 --- a/test/files/neg/error_dependentMethodTpeConversionToFunction.scala +++ /dev/null @@ -1,5 +0,0 @@ -// test DependentMethodTpeConversionToFunctionError -object Test { - def foo(x: AnyRef): x.type = x - val x: Any => Any = foo -} \ No newline at end of file diff --git a/test/files/neg/error_tooManyArgsPattern.check b/test/files/neg/error_tooManyArgsPattern.check index ee401ad061ab..33bdf156570f 100644 --- a/test/files/neg/error_tooManyArgsPattern.check +++ b/test/files/neg/error_tooManyArgsPattern.check @@ -1,4 +1,4 @@ error_tooManyArgsPattern.scala:3: error: too many arguments for unapply pattern, maximum = 22 case List(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => 7 ^ -one error found +1 error diff --git a/test/files/neg/eta-expand-star.check b/test/files/neg/eta-expand-star.check index eba17210148e..808c76f8d3c2 100644 --- a/test/files/neg/eta-expand-star.check +++ b/test/files/neg/eta-expand-star.check @@ -1,4 +1,4 @@ -eta-expand-star.scala:6: error: too many arguments (2) for method apply: (v1: Seq[T])Unit in trait Function1 +eta-expand-star.scala:6: error: too many arguments (found 2, expected 1) for method apply: (v1: Seq[T]): Unit in trait Function1 g(1, 2) ^ -one error found +1 error diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check index eaa8517764e6..d78004522ea9 100644 --- a/test/files/neg/exhausting.check +++ b/test/files/neg/exhausting.check @@ -1,27 +1,27 @@ -exhausting.scala:22: warning: match may not be exhaustive. +exhausting.scala:23: warning: match may not be exhaustive. It would fail on the following inputs: List(_), List(_, _, _) def fail1[T](xs: List[T]) = xs match { ^ -exhausting.scala:28: warning: match may not be exhaustive. +exhausting.scala:29: warning: match may not be exhaustive. It would fail on the following input: Nil def fail2[T](xs: List[T]) = xs match { ^ -exhausting.scala:33: warning: match may not be exhaustive. +exhausting.scala:34: warning: match may not be exhaustive. It would fail on the following input: List((x: Int forSome x not in (1, 2))) def fail3a(xs: List[Int]) = xs match { ^ -exhausting.scala:40: warning: match may not be exhaustive. +exhausting.scala:41: warning: match may not be exhaustive. It would fail on the following input: Bar3 def fail3[T](x: Foo[T]) = x match { ^ -exhausting.scala:48: warning: match may not be exhaustive. +exhausting.scala:49: warning: match may not be exhaustive. It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2) def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { ^ -exhausting.scala:57: warning: match may not be exhaustive. +exhausting.scala:58: warning: match may not be exhaustive. It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2) def fail5[T](xx: (Foo[T], Foo[T])) = xx match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/exhausting.scala b/test/files/neg/exhausting.scala index 7df5aa16aa6d..d595aab64b26 100644 --- a/test/files/neg/exhausting.scala +++ b/test/files/neg/exhausting.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { sealed abstract class Foo[T] case object Bar1 extends Foo[Int] diff --git a/test/files/neg/f-interp-pos.check b/test/files/neg/f-interp-pos.check new file mode 100644 index 000000000000..4014c719dbe3 --- /dev/null +++ b/test/files/neg/f-interp-pos.check @@ -0,0 +1,4 @@ +f-interp-pos.scala:5: error: invalid string interpolation $<, expected: $$, $", $identifier or ${expression} + def oops = f"$s%s $ using options -Werror -Xlint:missing-interpolator + class A { val bippy = 123 @@ -92,3 +93,86 @@ package curry { def f5 = "I draw the line at $palomino" // no warn } } + +package companions { + class X + object X + class C { + def f1 = "$X" // nowarn companion + def f2 = "$Byte" // nowarn companion + def f3 = "$Char" // nowarn companion + def f4 = "$Short" // nowarn companion + def f5 = "$Int" // nowarn companion + def f6 = "$Float" // nowarn companion + def f7 = "$Double" // nowarn companion + def f8 = "$Character" // nowarn companion + def f9 = "$Integer" // nowarn companion + def f0 = "$companions" // nowarn companion + } +} +package object companions + +object `t10125 avoid forcing owners` { + implicit class HasIr(s: String) { + def ir: Int = 1234 + } + + val bar = "$bar".ir // nowarn owner + + val x = "$x" // nowarn owner +} + +object t10456 { + @deprecated("${myProperty}") + var myProperty: String = _ +} + +package pancake { } + +object Tester { + type NonVal = Int + + def ok = "Don't warn on $nosymbol interpolated." + + def pass = "Don't warn on $pancake package names." + + def types = "Or $NonVal type symbols either." + + def bar = "bar" + def f = { + val foo = "bar" + "An important $foo message!" // warn on ident in scope + } + def g = { + val foo = "bar" + "A doubly important ${foo * 2} message!" // warn on some expr, see below + } + def h = s"Try using '$$bar' instead." // no warn + def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test + def j = s"Try using '${ "something like $bar" }' instead." // warn + def k = f"Try using '$bar' instead." // no warn on other std interps + def p = "Template ${} {}" // no warn on unlikely or empty expressions + def q = "${}$bar" // disables subsequent checks! (a feature) + def r = "${}${bar}" // disables subsequent checks! (a feature) + + def v = "${baz}${bar}" // warn on second expr + def w = "${ op_* }" // warn, only cheap ident parsing + def x = "${ bar }" // warn, a cheap ident in scope + def y = "${ baz }" // no warn, cheap ident not in scope + def z = "${ baz * 3}" // warn, no expr parsing + + def thisly = "$this" + def exprly = "${this}" +} + +trait X { + val s = "hello" + val t = "$s" + val u = "a${s}b" + val v = "a$s b" +} + +trait DollarDollar { + val foo, bar = 42 + def s = "$foo$bar" +} diff --git a/test/files/neg/forward.check b/test/files/neg/forward.check index 252c990370e0..79630f888fbd 100644 --- a/test/files/neg/forward.check +++ b/test/files/neg/forward.check @@ -1,10 +1,13 @@ -forward.scala:6: error: forward reference extends over definition of value x +forward.scala:8: error: forward reference to value x defined on line 9 extends over definition of value x def f: Int = x; ^ -forward.scala:10: error: forward reference extends over definition of value x +forward.scala:12: error: forward reference to method g defined on line 14 extends over definition of value x def f: Int = g; ^ -forward.scala:15: error: forward reference extends over definition of variable x +forward.scala:17: error: forward reference to method g defined on line 19 extends over definition of variable x def f: Int = g; ^ -three errors found +forward.scala:29: error: forward reference to value ec defined on line 32 extends over definition of value z + a <- fInt + ^ +4 errors diff --git a/test/files/neg/forward.scala b/test/files/neg/forward.scala index d5c0851f09e3..bf1fc7ac8c95 100644 --- a/test/files/neg/forward.scala +++ b/test/files/neg/forward.scala @@ -1,3 +1,5 @@ +import scala.concurrent._ + object Test { def f: Int = x; val x: Int = f; @@ -21,4 +23,13 @@ object Test { Console.println("foo"); def g: Int = f; } + { + val fInt = Future.successful(1) + val z = for { + a <- fInt + } yield a + + implicit val ec: ExecutionContext = ExecutionContext.Implicits.global + z + } } diff --git a/test/files/neg/found-req-variance.check b/test/files/neg/found-req-variance.check index cc26458ac5e1..ef7aec9adeaa 100644 --- a/test/files/neg/found-req-variance.check +++ b/test/files/neg/found-req-variance.check @@ -178,8 +178,8 @@ You may wish to define T2 as +T2 instead. (SLS 4.5) found-req-variance.scala:105: error: type mismatch; found : scala.collection.immutable.Map[AnyRef,String] required: Map[String,String] -Note: AnyRef >: String, but trait Map is invariant in type A. +Note: AnyRef >: String, but trait Map is invariant in type K. You may wish to investigate a wildcard type such as `_ >: String`. (SLS 3.2.10) def g2 = Set[Map[String, String]]() + Map[AnyRef, String]() ^ -28 errors found +28 errors diff --git a/test/files/neg/found-req-variance.scala b/test/files/neg/found-req-variance.scala index 024b24c36747..de460f2d03b3 100644 --- a/test/files/neg/found-req-variance.scala +++ b/test/files/neg/found-req-variance.scala @@ -103,4 +103,4 @@ object Misc { class Trippy[+T1, T2, +T3] def g1 = Set[Trippy[AnyRef, AnyRef, AnyRef]]() + new Trippy[String, String, String] def g2 = Set[Map[String, String]]() + Map[AnyRef, String]() -} \ No newline at end of file +} diff --git a/test/files/neg/func-max-args.check b/test/files/neg/func-max-args.check new file mode 100644 index 000000000000..f9d8feb64f4c --- /dev/null +++ b/test/files/neg/func-max-args.check @@ -0,0 +1,4 @@ +func-max-args.scala:3: error: function values may not have more than 22 parameters, but 23 given + val func23: (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w) => Int + ^ +1 error diff --git a/test/files/neg/func-max-args.scala b/test/files/neg/func-max-args.scala new file mode 100644 index 000000000000..5c2e95de262b --- /dev/null +++ b/test/files/neg/func-max-args.scala @@ -0,0 +1,4 @@ +trait T { + val func22: (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v) => Int + val func23: (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w) => Int +} diff --git a/test/files/neg/gadts1.check b/test/files/neg/gadts1.check index 9b7ea5556a80..17b146956f20 100644 --- a/test/files/neg/gadts1.check +++ b/test/files/neg/gadts1.check @@ -6,4 +6,4 @@ gadts1.scala:20: error: type mismatch; required: a case Cell[a](x: Int) => c.x = 5 ^ -two errors found +2 errors diff --git a/test/files/neg/gadts2-strict.check b/test/files/neg/gadts2-strict.check deleted file mode 100644 index 173683b8b22d..000000000000 --- a/test/files/neg/gadts2-strict.check +++ /dev/null @@ -1,6 +0,0 @@ -gadts2-strict.scala:15: error: type mismatch; - found : Test.MyDouble - required: a - case NumTerm(n) => c.x = MyDouble(1.0) - ^ -one error found diff --git a/test/files/neg/gadts2-strict.scala b/test/files/neg/gadts2-strict.scala deleted file mode 100644 index aecd9d63c482..000000000000 --- a/test/files/neg/gadts2-strict.scala +++ /dev/null @@ -1,27 +0,0 @@ -// scalac: -Xstrict-inference -// A copy of pos/gadts2, which must fail under -Xstrict-inference. -object Test { - - abstract class Number - case class MyInt(n: Int) extends Number - case class MyDouble(d: Double) extends Number - - trait Term[a] - case class Cell[a](var x: a) extends Term[a] - final case class NumTerm(val n: Number) extends Term[Number] - - def f[a](t: Term[a], c: Cell[a]) { - t match { - case NumTerm(n) => c.x = MyDouble(1.0) - } - } - - val x: Term[Number] = NumTerm(MyInt(5)) - - def main(args: Array[String]) { - val cell = Cell[Number](MyInt(6)) - Console.println(cell) - f[Number](new NumTerm(MyInt(5)), cell) - Console.println(cell) - } -} diff --git a/test/files/neg/gadts2.check b/test/files/neg/gadts2.check deleted file mode 100644 index 229944f70e6c..000000000000 --- a/test/files/neg/gadts2.check +++ /dev/null @@ -1,6 +0,0 @@ -gadts2.scala:8: error: type mismatch; - found : String("abc") - required: B - (s1: Super[Any]) match { case Sub(f) => f("abc") } - ^ -one error found diff --git a/test/files/neg/gadts2.scala b/test/files/neg/gadts2.scala deleted file mode 100644 index 77d334340b5e..000000000000 --- a/test/files/neg/gadts2.scala +++ /dev/null @@ -1,13 +0,0 @@ -// scalac: -Xstrict-inference -trait Super[+A] -case class Sub[B](f: B => B) extends Super[B] - -object Test extends App { - val s1 = Sub((x: Int) => x) - - (s1: Super[Any]) match { case Sub(f) => f("abc") } -} -// java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Integer -// at scala.runtime.BoxesRunTime.unboxToInt(BoxesRunTime.java:105) -// at Test$$anonfun$1.apply(a.scala:5) -// at Test$.delayedEndpoint$Test$1(a.scala:7) diff --git a/test/files/neg/hidden-complexity.check b/test/files/neg/hidden-complexity.check new file mode 100644 index 000000000000..443ac084acb8 --- /dev/null +++ b/test/files/neg/hidden-complexity.check @@ -0,0 +1,9 @@ +hidden-complexity.scala:12: error: diverging implicit expansion for type Foo[List] +starting with method mkFoo in object Foo + implicitly[Foo[List]] + ^ +hidden-complexity.scala:13: error: diverging implicit expansion for type Bar[List] +starting with method mkBar in object Bar + implicitly[Bar[List]] + ^ +2 errors diff --git a/test/files/neg/hidden-complexity.scala b/test/files/neg/hidden-complexity.scala new file mode 100644 index 000000000000..465b79403fd6 --- /dev/null +++ b/test/files/neg/hidden-complexity.scala @@ -0,0 +1,14 @@ +trait Foo[F[_]] +object Foo { + implicit def mkFoo[F[_]](implicit ff: Foo[({ type λ[t] = F[F[t]] })#λ]): Foo[F] = ??? +} + +trait Bar[F[_]] +object Bar { + implicit def mkBar[F[_]](implicit bb: Bar[λ forSome { type λ[t] <: F[t] }]): Bar[F] = ??? +} + +object Test { + implicitly[Foo[List]] + implicitly[Bar[List]] +} diff --git a/test/files/neg/higherkind_novalue.check b/test/files/neg/higherkind_novalue.check index 932f7876b1ff..66d8045a5333 100644 --- a/test/files/neg/higherkind_novalue.check +++ b/test/files/neg/higherkind_novalue.check @@ -4,4 +4,4 @@ higherkind_novalue.scala:2: error: type m takes type parameters higherkind_novalue.scala:3: error: type m takes type parameters def y: m ^ -two errors found +2 errors diff --git a/test/files/neg/hk-bad-bounds.check b/test/files/neg/hk-bad-bounds.check index d6293993c141..686f6764715d 100644 --- a/test/files/neg/hk-bad-bounds.check +++ b/test/files/neg/hk-bad-bounds.check @@ -1,4 +1,4 @@ -hk-bad-bounds.scala:4: error: type arguments [Set] do not conform to class SeqFactory's type parameter bounds [CC[X] <: Seq[X] with scala.collection.generic.GenericTraversableTemplate[X,CC]] +hk-bad-bounds.scala:4: error: can't existentially abstract over parameterized type _1[X] def f(x: Boolean) = if (x) (null: SeqFactory[List]) else (null: SeqFactory[Set]) - ^ -one error found + ^ +1 error diff --git a/test/files/neg/hk-bad-bounds.scala b/test/files/neg/hk-bad-bounds.scala index 0ed0b4c38525..8893541f1ea1 100644 --- a/test/files/neg/hk-bad-bounds.scala +++ b/test/files/neg/hk-bad-bounds.scala @@ -1,4 +1,4 @@ -import collection.generic.SeqFactory +trait SeqFactory[CC[X] <: Seq[X]] class A { def f(x: Boolean) = if (x) (null: SeqFactory[List]) else (null: SeqFactory[Set]) diff --git a/test/files/neg/hk-existential-lb.check b/test/files/neg/hk-existential-lb.check new file mode 100644 index 000000000000..4779aa9cc784 --- /dev/null +++ b/test/files/neg/hk-existential-lb.check @@ -0,0 +1,8 @@ +hk-existential-lb.scala:5: error: type mismatch; + found : Functor[Option] + required: Functor[_[x] >: List[x]] +Note: Option <: [x]Any, but class Functor is invariant in type F. +You may wish to define F as +F instead. (SLS 4.5) + val someF: Functor[F] forSome { type F[x] >: List[x] } = new Functor[Option] + ^ +1 error diff --git a/test/files/neg/hk-existential-lb.scala b/test/files/neg/hk-existential-lb.scala new file mode 100644 index 000000000000..df4ff906fb63 --- /dev/null +++ b/test/files/neg/hk-existential-lb.scala @@ -0,0 +1,6 @@ +//> using options -language:higherKinds,existentials -Xfatal-warnings +// +class Functor[F[_]] +object Functor { + val someF: Functor[F] forSome { type F[x] >: List[x] } = new Functor[Option] +} diff --git a/test/files/neg/hk-typevar-unification.check b/test/files/neg/hk-typevar-unification.check index 7748dbee4bed..ff4f64475c2b 100644 --- a/test/files/neg/hk-typevar-unification.check +++ b/test/files/neg/hk-typevar-unification.check @@ -1,22 +1,22 @@ -hk-typevar-unification.scala:15: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). +hk-typevar-unification.scala:16: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). [_ <: B]Foo[_]'s type parameters do not match type F's expected parameters: -type _ (in class Foo)'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any +type _'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any f(tcFoo) ^ -hk-typevar-unification.scala:15: error: type mismatch; +hk-typevar-unification.scala:16: error: type mismatch; found : TC[Foo] required: TC[F] f(tcFoo) ^ -hk-typevar-unification.scala:18: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). +hk-typevar-unification.scala:19: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). [_ <: B]Foo[_]'s type parameters do not match type F's expected parameters: -type _ (in class Foo) is invariant, but type _ is declared covariant -type _ (in class Foo)'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any +type _ is invariant, but type _ is declared covariant +type _'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any g(tcFoo) ^ -hk-typevar-unification.scala:18: error: type mismatch; +hk-typevar-unification.scala:19: error: type mismatch; found : TC[Foo] required: TC[F] g(tcFoo) ^ -four errors found +4 errors diff --git a/test/files/neg/hk-typevar-unification.scala b/test/files/neg/hk-typevar-unification.scala index f79329395424..2cd9c93b2b43 100644 --- a/test/files/neg/hk-typevar-unification.scala +++ b/test/files/neg/hk-typevar-unification.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// class A class B trait TC[F[_ <: A]] diff --git a/test/files/neg/hkgadt.check b/test/files/neg/hkgadt.check index ef302a9abf38..ce711f91ed5c 100644 --- a/test/files/neg/hkgadt.check +++ b/test/files/neg/hkgadt.check @@ -28,4 +28,4 @@ hkgadt.scala:33: error: type mismatch; required: A case Baz1() => 1 ^ -6 errors found +6 errors diff --git a/test/files/neg/i10715a.check b/test/files/neg/i10715a.check new file mode 100644 index 000000000000..ca65c021b764 --- /dev/null +++ b/test/files/neg/i10715a.check @@ -0,0 +1,38 @@ +i10715a.scala:16: error: type mismatch; + found : Int + required: String + c.f: String // error + ^ +i10715a.scala:17: error: polymorphic expression cannot be instantiated to expected type; + found : [A]Int + required: String + c.g: String // error + ^ +i10715a.scala:18: error: value bad is not a member of Int + c.f.bad // error + ^ +i10715a.scala:19: error: value bad is not a member of Int + c.g.bad // error + ^ +i10715a.scala:21: error: type mismatch; + found : String("") + required: Int + c.f("") // error + ^ +i10715a.scala:22: error: type mismatch; + found : String("") + required: Int + c.g("") // error + ^ +i10715a.scala:23: error: overloaded method g with alternatives: + (x: Int)Parent + Int + cannot be applied to (String) + c.g[Int]("") // error + ^ +i10715a.scala:24: error: type mismatch; + found : Int + required: String => String + c.g[Int]: (String => String) // error + ^ +8 errors diff --git a/test/files/neg/i10715a.scala b/test/files/neg/i10715a.scala new file mode 100644 index 000000000000..14bcc5a3a4cc --- /dev/null +++ b/test/files/neg/i10715a.scala @@ -0,0 +1,27 @@ +class Parent { + def f(x: Int): Parent = ??? + def f: Int = 0 + + def g[A](x: Int): Parent = ??? + def g[A]: Int = 0 +} + +class Sub extends Parent { + override def f(x: Int): Parent = ??? + override def g[A](x: Int): Parent = ??? +} + +class C { + def bad(c: Sub): Unit = { + c.f: String // error + c.g: String // error + c.f.bad // error + c.g.bad // error + + c.f("") // error + c.g("") // error + c.g[Int]("") // error + c.g[Int]: (String => String) // error + c.g[Int]: (Int => Parent) // ok + } +} diff --git a/test/files/neg/i10715b.check b/test/files/neg/i10715b.check new file mode 100644 index 000000000000..577d8e421fd0 --- /dev/null +++ b/test/files/neg/i10715b.check @@ -0,0 +1,7 @@ +i10715b.scala:12: error: ambiguous reference to overloaded definition, +both method f in class Sub of type (x: Int)(implicit s: String): Unit +and method f in class Sub of type (x: Int): Unit +match argument types (Int) + def bad(c: Sub): Unit = c.f(1) // error: ambiguous overload + ^ +1 error diff --git a/test/files/neg/i10715b.scala b/test/files/neg/i10715b.scala new file mode 100644 index 000000000000..8cc8076ac193 --- /dev/null +++ b/test/files/neg/i10715b.scala @@ -0,0 +1,13 @@ +class Parent { + def f(x: Int): Unit = () + def f: Int = 0 +} + +class Sub extends Parent { + override def f(x: Int): Unit = () + def f(x: Int)(implicit s: String): Unit = () +} + +class C { + def bad(c: Sub): Unit = c.f(1) // error: ambiguous overload +} diff --git a/test/files/neg/i15552.check b/test/files/neg/i15552.check index a75cb129e1cb..849f2936c7a1 100644 --- a/test/files/neg/i15552.check +++ b/test/files/neg/i15552.check @@ -4,4 +4,4 @@ i15552.scala:4: error: not found: value g i15552.scala:22: error: not found: value using def f() = g(using) // error: no using, does not actually suggest Using ^ -two errors found +2 errors diff --git a/test/files/neg/i15552.scala b/test/files/neg/i15552.scala index e9363222d038..95f608efcb52 100644 --- a/test/files/neg/i15552.scala +++ b/test/files/neg/i15552.scala @@ -1,4 +1,4 @@ -// scalac: -Werror +//> using options -Werror class C { val ctx: C = new C() def f() = g(42)(using ctx) // error: no g diff --git a/test/files/neg/i17266.check b/test/files/neg/i17266.check new file mode 100644 index 000000000000..cf40a08872d6 --- /dev/null +++ b/test/files/neg/i17266.check @@ -0,0 +1,27 @@ +i17266.scala:13: warning: synchronized not selected from this instance + synchronized { // error + ^ +i17266.scala:26: warning: wait not selected from this instance + wait() // error + ^ +i17266.scala:32: warning: notify not selected from this instance + def `maybe notify`(): Unit = notify() + ^ +i17266.scala:33: warning: notifyAll not selected from this instance + def `maybe notifyAll`(): Unit = notifyAll() + ^ +i17266.scala:53: warning: conversion int2Integer adds universal member method synchronized to class Int + 1.synchronized { // warn + ^ +i17266.scala:165: warning: conversion int2Integer adds universal member method wait to class Int + 1.wait() // not an error (should be?) + ^ +i17266.scala:183: warning: conversion int2Integer adds universal member method wait to class Int + 1.wait(10) // not an error (should be?) + ^ +i17266.scala:53: warning: Suspicious `synchronized` call involving boxed primitive `Integer` + 1.synchronized { // warn + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/i17266.scala b/test/files/neg/i17266.scala new file mode 100644 index 000000000000..411fa262c73a --- /dev/null +++ b/test/files/neg/i17266.scala @@ -0,0 +1,200 @@ + +//> using options -Werror -Xsource:3 -Xlint:universal-methods + +// Dotty has top-level defs, so the reference is linted based on context. +// For Scala 2, check result of looking up the identifier. +// Universal members are not imported from root contexts (in particular, Predef). +// Use an explicit import to exercise the warning. + +class Test(val x: Any) extends AnyVal { + import Predef.* + + def test1 = + synchronized { // error + println("hello") + } + + /* correctly errors in Scala 2 + def test2 = + this.synchronized { // not an error (should be?) + println("hello") + } + */ + + // surprise, ~not~ a universal member + def test16 = + wait() // error + + // OK because Any, so this is kosher + def `maybe hashcode` = hashCode + + // it does know about notify + def `maybe notify`(): Unit = notify() + def `maybe notifyAll`(): Unit = notifyAll() + +} + +// Can't work these tests inside value class. +// +class ObjectHolder { + + object MyLib + + /* ambiguous + def test3 = { + import MyLib.* + synchronized { // error + println("hello") + } + } + */ + + def test4 = + 1.synchronized { // warn + println("hello") + } + + object Test4 { + synchronized { // not an error + println("hello") + } + } + + object Test5 { + def test5 = + synchronized { // not an error + println("hello") + } + } + + object Test6 { + import MyLib.* + synchronized { // not an error + println("hello") + } + } + + object Test7 { + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + } + + /* + object Test7b { + def test8 = + import MyLib.* + synchronized { // already an error: Reference to synchronized is ambiguous. + println("hello") + } + } + */ + + class Test8 { + synchronized { // not an error + println("hello") + } + } + + class Test9 { + def test5 = + synchronized { // not an error + println("hello") + } + } + + class Test10 { + import MyLib.* + synchronized { // not an error + println("hello") + } + } + + class Test11 { + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + } + + trait Test12 { + synchronized { // not an error + println("hello") + } + } + + trait Test13 { + def test5 = + synchronized { // not an error + println("hello") + } + } + + trait Test14 { + import MyLib.* + synchronized { // not an error + println("hello") + } + } + + trait Test15 { + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + } + + def test16 = + wait() // error + + def test17 = + this.wait() // not an error (should be?) + + /* ambiguous + def test18 = { + import MyLib.* + wait() // error + } + */ + + def test19 = + 1.wait() // not an error (should be?) + + /* ambiguous + def test20 = + wait(10) // error + */ + + def test21 = + this.wait(10) // not an error (should be?) + + /* ambiguous + def test22 = { + import MyLib.* + wait(10) // error + } + */ + + def test23 = + 1.wait(10) // not an error (should be?) + + def test24 = + hashCode() // error + + def test25 = + this.hashCode() // not an error (should be?) + + /* ambiguous + def test26 = { + import MyLib.* + hashCode() // error + } + */ + + def test27 = + 1.hashCode()// not an error (should be? probably not) +} diff --git a/test/files/neg/i17266c.check b/test/files/neg/i17266c.check new file mode 100644 index 000000000000..ab0d467b889e --- /dev/null +++ b/test/files/neg/i17266c.check @@ -0,0 +1,36 @@ +i17266c.scala:7: warning: eq not selected from this instance + def f = eq("hello, world") + ^ +i17266c.scala:8: warning: synchronized not selected from this instance + def g = synchronized { println("hello, world") } + ^ +i17266c.scala:12: warning: eq not selected from this instance + def f = eq(s) + ^ +i17266c.scala:13: warning: synchronized not selected from this instance + def g = synchronized { println(s) } + ^ +i17266c.scala:18: warning: eq not selected from this instance + def f = eq(s) + ^ +i17266c.scala:19: warning: synchronized not selected from this instance + def g = synchronized { println(s) } + ^ +i17266c.scala:7: warning: comparing values of types X.type and String using `eq` will always yield false + def f = eq("hello, world") + ^ +i17266c.scala:12: warning: comparing values of types Predef.type and String using `eq` will always yield false + def f = eq(s) + ^ +i17266c.scala:18: warning: comparing values of types p.package.type and String using `eq` will always yield false + def f = eq(s) + ^ +i17266c.scala:22: warning: comparing values of types X.type and String using `eq` will always yield false + def f = X.eq("hello, world") + ^ +i17266c.scala:27: warning: Z and String are unrelated: they will most likely never compare equal + def f = eq("hello, world") + ^ +error: No warnings can be incurred under -Werror. +11 warnings +1 error diff --git a/test/files/neg/i17266c.scala b/test/files/neg/i17266c.scala new file mode 100644 index 000000000000..1858c3427711 --- /dev/null +++ b/test/files/neg/i17266c.scala @@ -0,0 +1,29 @@ +//> using options -Werror -Xlint:universal-methods + +object X + +class A(val s: String) extends AnyVal { + import X._ + def f = eq("hello, world") + def g = synchronized { println("hello, world") } +} +class B(val s: String) extends AnyVal { + import Predef._ + def f = eq(s) + def g = synchronized { println(s) } +} +package object p +class C(val s: String) extends AnyVal { + import p.`package`._ + def f = eq(s) + def g = synchronized { println(s) } +} +class Y(val s: String) { + def f = X.eq("hello, world") + def g = X.synchronized { println("hello, world") } +} +class Z(val s: String) { + import X._ + def f = eq("hello, world") + def g = synchronized { println("hello, world") } +} diff --git a/test/files/neg/i17284.check b/test/files/neg/i17284.check new file mode 100644 index 000000000000..c7d4729df0a3 --- /dev/null +++ b/test/files/neg/i17284.check @@ -0,0 +1,15 @@ +i17284.scala:4: warning: Suspicious `synchronized` call involving boxed primitive `Integer` + def literal = 451.synchronized {} // error + ^ +i17284.scala:8: warning: Suspicious `synchronized` call involving boxed primitive `Integer` + x.synchronized {} // error + ^ +i17284.scala:13: warning: Suspicious `synchronized` call involving boxed primitive `Integer` + x.synchronized {} // error + ^ +i17284.scala:16: warning: Suspicious `synchronized` call involving boxed primitive `Boolean` + def bool = true.synchronized {} // error + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/i17284.scala b/test/files/neg/i17284.scala new file mode 100644 index 000000000000..7ac70ace391b --- /dev/null +++ b/test/files/neg/i17284.scala @@ -0,0 +1,20 @@ +//> using options -Werror + +class C { + def literal = 451.synchronized {} // error + + def integral = { + val x = 451 + x.synchronized {} // error + } + + def boxed = { + val x: Integer = 451 + x.synchronized {} // error + } + + def bool = true.synchronized {} // error + + // hard error + //def unit = ().synchronized {} // error +} diff --git a/test/files/neg/i20006.check b/test/files/neg/i20006.check new file mode 100644 index 000000000000..65ef7fe590ba --- /dev/null +++ b/test/files/neg/i20006.check @@ -0,0 +1,22 @@ +i20006.scala:8: error: method next is defined twice; + the conflicting value next was defined at line 6:7 + override def next(): T = { // error + ^ +i20006.scala:7: error: method hasNext is defined twice; + the conflicting value hasNext was defined at line 5:7 + override def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006.scala:47: error: x is already defined as value x + val x: String = "member" // error + ^ +i20006.scala:53: error: x is already defined as value x + private[this] var x: Int = 42 // error + ^ +i20006.scala:67: error: method x is defined twice; + the conflicting method x was defined at line 66:21 + def x(): Int = 42 // error + ^ +i20006.scala:77: error: x is already defined as variable x + def x(): Int = x // error + ^ +6 errors diff --git a/test/files/neg/i20006.scala b/test/files/neg/i20006.scala new file mode 100644 index 000000000000..542fad79063c --- /dev/null +++ b/test/files/neg/i20006.scala @@ -0,0 +1,79 @@ + +abstract class XIterateIterator[T](seed: T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + val hasNext: T => Boolean + val next: T => T + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // error + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class YIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // noerror + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class ZIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) { + private var first = true + private var acc = seed + def hasNext: Boolean = first || hasNext(acc) // error + def next(): T = { // noerror + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +class C(x: String) { + val x: String = "member" // error +} +class D(x: String) { + private var x: Int = 42 // error +} +class E(x: String) { + private[this] var x: Int = 42 // error +} +class F(x: String) { + def x(): Int = 42 // noerror +} +class G(x: String) { + def x(i: Int): Int = i +} +class H { + private[this] val x: String = "" + def x(): Int = 42 // noerror +} +class I { + private[this] def x: String = "" + def x(): Int = 42 // error +} +class PrivateConflict { + private[this] var x = 42 + def x(): Int = x + def x_=(n: Int) = x = n +} +class LocalConflict { + def f(): Unit = { + var x = 42 + def x(): Int = x // error + } +} diff --git a/test/files/neg/i20006b.check b/test/files/neg/i20006b.check new file mode 100644 index 000000000000..9b8efd0ea951 --- /dev/null +++ b/test/files/neg/i20006b.check @@ -0,0 +1,62 @@ +i20006b.scala:9: error: method next is defined twice; + the conflicting value next was defined at line 7:7 + override def next(): T = { // error + ^ +i20006b.scala:8: error: method hasNext is defined twice; + the conflicting value hasNext was defined at line 6:7 + override def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006b.scala:48: error: x is already defined as value x + val x: String = "member" // error + ^ +i20006b.scala:54: error: x is already defined as value x + private[this] var x: Int = 42 // error + ^ +i20006b.scala:68: error: method x is defined twice; + the conflicting method x was defined at line 67:21 + def x(): Int = 42 // error + ^ +i20006b.scala:78: error: x is already defined as variable x + def x(): Int = x // error + ^ +i20006b.scala:23: error: Double definition will be detected in Scala 3; the conflicting value next is defined at 19:65 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=YIterateIterator + override def next(): T = { // werror + ^ +i20006b.scala:22: error: Double definition will be detected in Scala 3; the conflicting value hasNext is defined at 19:42 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=YIterateIterator + override def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006b.scala:37: error: Double definition will be detected in Scala 3; the conflicting value next is defined at 33:65 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=ZIterateIterator + def next(): T = { // werror + ^ +i20006b.scala:36: error: Double definition will be detected in Scala 3; the conflicting value hasNext is defined at 33:42 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=ZIterateIterator + def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006b.scala:51: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 50:9 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D + private var x: Int = 42 // error + ^ +i20006b.scala:57: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 56:9 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=F + def x(): Int = 42 // werror + ^ +i20006b.scala:64: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 63:21 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=H + def x(): Int = 42 // werror + ^ +i20006b.scala:72: error: Double definition will be detected in Scala 3; the conflicting variable x is defined at 71:21 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=PrivateConflict + def x(): Int = x // werror + ^ +14 errors diff --git a/test/files/neg/i20006b.scala b/test/files/neg/i20006b.scala new file mode 100644 index 000000000000..ea04906accd4 --- /dev/null +++ b/test/files/neg/i20006b.scala @@ -0,0 +1,158 @@ +//> using options -Werror -Xsource:3 + +abstract class XIterateIterator[T](seed: T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + val hasNext: T => Boolean + val next: T => T + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // error + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class YIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // werror + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class ZIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) { + private var first = true + private var acc = seed + def hasNext: Boolean = first || hasNext(acc) // error + def next(): T = { // werror + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +class C(x: String) { + val x: String = "member" // error +} +class D(x: String) { + private var x: Int = 42 // error +} +class E(x: String) { + private[this] var x: Int = 42 // error +} +class F(x: String) { + def x(): Int = 42 // werror +} +class G(x: String) { + def x(i: Int): Int = i +} +class H { + private[this] val x: String = "" + def x(): Int = 42 // werror +} +class I { + private[this] def x: String = "" + def x(): Int = 42 // error +} +class PrivateConflict { + private[this] var x = 42 + def x(): Int = x // werror + def x_=(n: Int) = x = n +} +class LocalConflict { + def f(): Unit = { + var x = 42 + def x(): Int = x // error + } +} + +/* +-- [E120] Naming Error: test/files/neg/i20006.scala:8:15 --------------------------------------------------------------- +8 | override def hasNext: Boolean = first || hasNext(acc) + | ^ + | Double definition: + | val hasNext: T => Boolean in class XIterateIterator at line 6 and + | override def hasNext: Boolean in class XIterateIterator at line 8 +-- [E120] Naming Error: test/files/neg/i20006.scala:9:15 --------------------------------------------------------------- +9 | override def next(): T = { + | ^ + | Double definition: + | val next: T => T in class XIterateIterator at line 7 and + | override def next(): T in class XIterateIterator at line 9 +-- [E120] Naming Error: test/files/neg/i20006.scala:22:15 -------------------------------------------------------------- +22 | override def hasNext: Boolean = first || hasNext(acc) + | ^ + | Double definition: + | private[this] val hasNext: T => Boolean in class YIterateIterator at line 19 and + | override def hasNext: Boolean in class YIterateIterator at line 22 +-- [E120] Naming Error: test/files/neg/i20006.scala:23:15 -------------------------------------------------------------- +23 | override def next(): T = { + | ^ + | Double definition: + | private[this] val next: T => T in class YIterateIterator at line 19 and + | override def next(): T in class YIterateIterator at line 23 +-- [E120] Naming Error: test/files/neg/i20006.scala:36:6 --------------------------------------------------------------- +36 | def hasNext: Boolean = first || hasNext(acc) + | ^ + | Double definition: + | private[this] val hasNext: T => Boolean in class ZIterateIterator at line 33 and + | def hasNext: Boolean in class ZIterateIterator at line 36 +-- [E120] Naming Error: test/files/neg/i20006.scala:37:6 --------------------------------------------------------------- +37 | def next(): T = { + | ^ + | Double definition: + | private[this] val next: T => T in class ZIterateIterator at line 33 and + | def next(): T in class ZIterateIterator at line 37 +-- [E120] Naming Error: test/files/neg/i20006.scala:48:6 --------------------------------------------------------------- +48 | val x: String = "member" // error + | ^ + | Double definition: + | private[this] val x: String in class C at line 47 and + | val x: String in class C at line 48 +-- [E120] Naming Error: test/files/neg/i20006.scala:51:14 -------------------------------------------------------------- +51 | private var x: Int = 42 // error + | ^ + | Double definition: + | private[this] val x: String in class D at line 50 and + | private[this] var x: Int in class D at line 51 +-- [E120] Naming Error: test/files/neg/i20006.scala:54:20 -------------------------------------------------------------- +54 | private[this] var x: Int = 42 // error + | ^ + | Double definition: + | private[this] val x: String in class E at line 53 and + | private[this] var x: Int in class E at line 54 +-- [E120] Naming Error: test/files/neg/i20006.scala:57:6 --------------------------------------------------------------- +57 | def x(): Int = 42 // error + | ^ + | Double definition: + | private[this] val x: String in class F at line 56 and + | def x(): Int in class F at line 57 +-- [E120] Naming Error: test/files/neg/i20006.scala:65:6 --------------------------------------------------------------- +65 | def x(): Int = 42 + | ^ + | Double definition: + | val x: String in class H at line 63 and + | def x(): Int in class H at line 65 +-- Warning: test/files/neg/i20006.scala:54:16 -------------------------------------------------------------------------- +54 | private[this] var x: Int = 42 // error + | ^ + | Ignoring [this] qualifier. + | This syntax will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +1 warning found +11 errors found +*/ diff --git a/test/files/neg/i20006c.check b/test/files/neg/i20006c.check new file mode 100644 index 000000000000..6c46c554da6b --- /dev/null +++ b/test/files/neg/i20006c.check @@ -0,0 +1,54 @@ +i20006c.scala:9: error: method next is defined twice; + the conflicting value next was defined at line 7:7 + override def next(): T = { // error + ^ +i20006c.scala:8: error: method hasNext is defined twice; + the conflicting value hasNext was defined at line 6:7 + override def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006c.scala:23: error: method next is defined twice; + the conflicting value next was defined at line 19:65 + override def next(): T = { // error + ^ +i20006c.scala:22: error: method hasNext is defined twice; + the conflicting value hasNext was defined at line 19:42 + override def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006c.scala:37: error: method next is defined twice; + the conflicting value next was defined at line 33:65 + def next(): T = { // error + ^ +i20006c.scala:36: error: method hasNext is defined twice; + the conflicting value hasNext was defined at line 33:42 + def hasNext: Boolean = first || hasNext(acc) // error + ^ +i20006c.scala:48: error: x is already defined as value x + val x: String = "member" // error + ^ +i20006c.scala:51: error: variable x is defined twice; + the conflicting value x was defined at line 50:9 + private var x: Int = 42 // error + ^ +i20006c.scala:54: error: x is already defined as value x + private[this] var x: Int = 42 // error + ^ +i20006c.scala:57: error: method x is defined twice; + the conflicting value x was defined at line 56:9 + def x(): Int = 42 // error + ^ +i20006c.scala:64: error: method x is defined twice; + the conflicting value x was defined at line 63:21 + def x(): Int = 42 // error + ^ +i20006c.scala:68: error: method x is defined twice; + the conflicting method x was defined at line 67:21 + def x(): Int = 42 // error + ^ +i20006c.scala:72: error: method x is defined twice; + the conflicting variable x was defined at line 71:21 + def x(): Int = x // error + ^ +i20006c.scala:78: error: x is already defined as variable x + def x(): Int = x // error + ^ +14 errors diff --git a/test/files/neg/i20006c.scala b/test/files/neg/i20006c.scala new file mode 100644 index 000000000000..90cf99e96869 --- /dev/null +++ b/test/files/neg/i20006c.scala @@ -0,0 +1,80 @@ +//> using options -Werror -Xsource:3 -Xsource-features:double-definitions + +abstract class XIterateIterator[T](seed: T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + val hasNext: T => Boolean + val next: T => T + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // error + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class YIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) extends collection.AbstractIterator[T] { + private var first = true + private var acc = seed + override def hasNext: Boolean = first || hasNext(acc) // error + override def next(): T = { // error + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +final class ZIterateIterator[T](seed: T, hasNext: T => Boolean, next: T => T) { + private var first = true + private var acc = seed + def hasNext: Boolean = first || hasNext(acc) // error + def next(): T = { // error + if (first) { + first = false + } else { + acc = next(acc) + } + acc + } +} + +class C(x: String) { + val x: String = "member" // error +} +class D(x: String) { + private var x: Int = 42 // error +} +class E(x: String) { + private[this] var x: Int = 42 // error +} +class F(x: String) { + def x(): Int = 42 // error +} +class G(x: String) { + def x(i: Int): Int = i +} +class H { + private[this] val x: String = "" + def x(): Int = 42 // error +} +class I { + private[this] def x: String = "" + def x(): Int = 42 // error +} +class PrivateConflict { + private[this] var x = 42 + def x(): Int = x // error + def x_=(n: Int) = x = n +} +class LocalConflict { + def f(): Unit = { + var x = 42 + def x(): Int = x // error + } +} diff --git a/test/files/neg/i20006d.check b/test/files/neg/i20006d.check new file mode 100644 index 000000000000..12f91280798c --- /dev/null +++ b/test/files/neg/i20006d.check @@ -0,0 +1,62 @@ +i20006d.scala:12: error: method x is defined twice; + the conflicting value x was defined at line 11:22 + def x: Int = 4 // err + ^ +i20006d.scala:17: error: method x is defined twice; + the conflicting value x was defined at line 16:28 + def x: Int = 4 // err + ^ +i20006d.scala:22: error: method x is defined twice; + the conflicting value x was defined at line 21:24 + def x: Int = 4 // err + ^ +i20006d.scala:27: error: method x is defined twice; + the conflicting value x was defined at line 26:30 + def x: Int = 4 // err + ^ +i20006d.scala:32: error: method x is defined twice; + the conflicting value x was defined at line 31:14 + def x: Int = 4 // err + ^ +i20006d.scala:44: error: method x is defined twice; + the conflicting value x was defined at line 43:22 + def x(): Int = 4 // err + ^ +i20006d.scala:49: error: method x is defined twice; + the conflicting value x was defined at line 48:28 + def x(): Int = 4 // err + ^ +i20006d.scala:54: error: method x is defined twice; + the conflicting value x was defined at line 53:24 + def x(): Int = 4 // err + ^ +i20006d.scala:59: error: method x is defined twice; + the conflicting value x was defined at line 58:30 + def x(): Int = 4 // err + ^ +i20006d.scala:64: error: method x is defined twice; + the conflicting value x was defined at line 63:14 + def x(): Int = 4 // err + ^ +i20006d.scala:68: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:72: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:76: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:81: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:86: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:91: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006d.scala:96: error: x is already defined as value x + val x: Int = 4 // err + ^ +17 errors diff --git a/test/files/neg/i20006d.scala b/test/files/neg/i20006d.scala new file mode 100644 index 000000000000..9d75adfceb3d --- /dev/null +++ b/test/files/neg/i20006d.scala @@ -0,0 +1,97 @@ +//> using options -Werror + +class C1(x: String) { + def x: Int = 4 // ok in Scala 2 +} + +class C2(private[this] val x: String) { + def x: Int = 4 // ok in Scala 2 +} + +class C3(private val x: String) { + def x: Int = 4 // err +} + +object o4 { + class C4(private[o4] val x: String) { + def x: Int = 4 // err + } +} + +class C5(protected val x: String) { + def x: Int = 4 // err +} + +object o6 { + class C6(protected[o6] val x: String) { + def x: Int = 4 // err + } +} + +class C7(val x: String) { + def x: Int = 4 // err +} + +class D1(x: String) { + def x(): Int = 4 // ok +} + +class D2(private[this] val x: String) { + def x(): Int = 4 // ok +} + +class D3(private val x: String) { + def x(): Int = 4 // err +} + +object p4 { + class D4(private[p4] val x: String) { + def x(): Int = 4 // err + } +} + +class D5(protected val x: String) { + def x(): Int = 4 // err +} + +object p6 { + class D6(protected[p6] val x: String) { + def x(): Int = 4 // err + } +} + +class D7(val x: String) { + def x(): Int = 4 // err +} + +class E1(x: String) { + val x: Int = 4 // err +} + +class E2(private[this] val x: String) { + val x: Int = 4 // err +} + +class E3(private val x: String) { + val x: Int = 4 // err +} + +object q4 { + class E4(private[q4] val x: String) { + val x: Int = 4 // err + } +} + +class E5(protected val x: String) { + val x: Int = 4 // err +} + +object q6 { + class E6(protected[q6] val x: String) { + val x: Int = 4 // err + } +} + +class E7(val x: String) { + val x: Int = 4 // err +} diff --git a/test/files/neg/i20006e.check b/test/files/neg/i20006e.check new file mode 100644 index 000000000000..db8c0a149848 --- /dev/null +++ b/test/files/neg/i20006e.check @@ -0,0 +1,82 @@ +i20006e.scala:12: error: method x is defined twice; + the conflicting value x was defined at line 11:22 + def x: Int = 4 // err + ^ +i20006e.scala:17: error: method x is defined twice; + the conflicting value x was defined at line 16:28 + def x: Int = 4 // err + ^ +i20006e.scala:22: error: method x is defined twice; + the conflicting value x was defined at line 21:24 + def x: Int = 4 // err + ^ +i20006e.scala:27: error: method x is defined twice; + the conflicting value x was defined at line 26:30 + def x: Int = 4 // err + ^ +i20006e.scala:32: error: method x is defined twice; + the conflicting value x was defined at line 31:14 + def x: Int = 4 // err + ^ +i20006e.scala:44: error: method x is defined twice; + the conflicting value x was defined at line 43:22 + def x(): Int = 4 // err + ^ +i20006e.scala:49: error: method x is defined twice; + the conflicting value x was defined at line 48:28 + def x(): Int = 4 // err + ^ +i20006e.scala:54: error: method x is defined twice; + the conflicting value x was defined at line 53:24 + def x(): Int = 4 // err + ^ +i20006e.scala:59: error: method x is defined twice; + the conflicting value x was defined at line 58:30 + def x(): Int = 4 // err + ^ +i20006e.scala:64: error: method x is defined twice; + the conflicting value x was defined at line 63:14 + def x(): Int = 4 // err + ^ +i20006e.scala:68: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:72: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:76: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:81: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:86: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:91: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:96: error: x is already defined as value x + val x: Int = 4 // err + ^ +i20006e.scala:4: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 3:10 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C1 + def x: Int = 4 // warn in Xsource:3 + ^ +i20006e.scala:8: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 7:28 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C2 + def x: Int = 4 // warn in Xsource:3 + ^ +i20006e.scala:36: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 35:10 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D1 + def x(): Int = 4 // warn in Xsource:3 + ^ +i20006e.scala:40: error: Double definition will be detected in Scala 3; the conflicting value x is defined at 39:28 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D2 + def x(): Int = 4 // warn in Xsource:3 + ^ +21 errors diff --git a/test/files/neg/i20006e.scala b/test/files/neg/i20006e.scala new file mode 100644 index 000000000000..58d8d230903e --- /dev/null +++ b/test/files/neg/i20006e.scala @@ -0,0 +1,97 @@ +//> using options -Werror -Xsource:3 + +class C1(x: String) { + def x: Int = 4 // warn in Xsource:3 +} + +class C2(private[this] val x: String) { + def x: Int = 4 // warn in Xsource:3 +} + +class C3(private val x: String) { + def x: Int = 4 // err +} + +object o4 { + class C4(private[o4] val x: String) { + def x: Int = 4 // err + } +} + +class C5(protected val x: String) { + def x: Int = 4 // err +} + +object o6 { + class C6(protected[o6] val x: String) { + def x: Int = 4 // err + } +} + +class C7(val x: String) { + def x: Int = 4 // err +} + +class D1(x: String) { + def x(): Int = 4 // warn in Xsource:3 +} + +class D2(private[this] val x: String) { + def x(): Int = 4 // warn in Xsource:3 +} + +class D3(private val x: String) { + def x(): Int = 4 // err +} + +object p4 { + class D4(private[p4] val x: String) { + def x(): Int = 4 // err + } +} + +class D5(protected val x: String) { + def x(): Int = 4 // err +} + +object p6 { + class D6(protected[p6] val x: String) { + def x(): Int = 4 // err + } +} + +class D7(val x: String) { + def x(): Int = 4 // err +} + +class E1(x: String) { + val x: Int = 4 // err +} + +class E2(private[this] val x: String) { + val x: Int = 4 // err +} + +class E3(private val x: String) { + val x: Int = 4 // err +} + +object q4 { + class E4(private[q4] val x: String) { + val x: Int = 4 // err + } +} + +class E5(protected val x: String) { + val x: Int = 4 // err +} + +object q6 { + class E6(protected[q6] val x: String) { + val x: Int = 4 // err + } +} + +class E7(val x: String) { + val x: Int = 4 // err +} diff --git a/test/files/neg/i20026.check b/test/files/neg/i20026.check new file mode 100644 index 000000000000..1686b9040973 --- /dev/null +++ b/test/files/neg/i20026.check @@ -0,0 +1,4 @@ +JTest.java:3: error: illegal start of type declaration +import java.util.*; + ^ +1 error diff --git a/test/files/neg/i20026/JTest.java b/test/files/neg/i20026/JTest.java new file mode 100644 index 000000000000..c0f9ffa1a881 --- /dev/null +++ b/test/files/neg/i20026/JTest.java @@ -0,0 +1,6 @@ + +@Deprecated +import java.util.*; + +public class JTest { +} diff --git a/test/files/neg/i20026/p.java b/test/files/neg/i20026/p.java new file mode 100644 index 000000000000..a6fc2f1597d3 --- /dev/null +++ b/test/files/neg/i20026/p.java @@ -0,0 +1,2 @@ +@Deprecated +package p; diff --git a/test/files/neg/i20026/test.scala b/test/files/neg/i20026/test.scala new file mode 100644 index 000000000000..792e10c36d61 --- /dev/null +++ b/test/files/neg/i20026/test.scala @@ -0,0 +1,6 @@ + +object Test extends App { + println { + new JTest + } +} diff --git a/test/files/neg/illegal-stmt-start.check b/test/files/neg/illegal-stmt-start.check index 01747524f88d..9f229a359061 100644 --- a/test/files/neg/illegal-stmt-start.check +++ b/test/files/neg/illegal-stmt-start.check @@ -1,4 +1,4 @@ illegal-stmt-start.scala:3: error: illegal start of statement (no modifiers allowed here) private def bar {} ^ -one error found +1 error diff --git a/test/files/neg/illegal-stmt-start.scala b/test/files/neg/illegal-stmt-start.scala index 48ae0a8b0a2a..275bc80e94dd 100644 --- a/test/files/neg/illegal-stmt-start.scala +++ b/test/files/neg/illegal-stmt-start.scala @@ -2,4 +2,4 @@ class Test { def foo { private def bar {} } -} \ No newline at end of file +} diff --git a/test/files/neg/imp2.check b/test/files/neg/imp2.check index 999cecf16709..aac592f575a1 100644 --- a/test/files/neg/imp2.check +++ b/test/files/neg/imp2.check @@ -4,4 +4,4 @@ import b._ and import a._ val x = f ^ -one error found +1 error diff --git a/test/files/neg/implicit-ambiguous-2.check b/test/files/neg/implicit-ambiguous-2.check index 4a10b0dd6559..c3ac69514472 100644 --- a/test/files/neg/implicit-ambiguous-2.check +++ b/test/files/neg/implicit-ambiguous-2.check @@ -1,4 +1,4 @@ implicit-ambiguous-2.scala:10: error: Could not prove Int =!= Int implicitly[Int =!= Int] ^ -one error found +1 error diff --git a/test/files/neg/implicit-ambiguous-invalid.check b/test/files/neg/implicit-ambiguous-invalid.check index c3b8cb84329c..7c33045702cf 100644 --- a/test/files/neg/implicit-ambiguous-invalid.check +++ b/test/files/neg/implicit-ambiguous-invalid.check @@ -1,7 +1,7 @@ -implicit-ambiguous-invalid.scala:6: warning: Invalid implicitAmbiguous message for method neqAmbig1 in object Test: +implicit-ambiguous-invalid.scala:7: warning: Invalid implicitAmbiguous message for method neqAmbig1 in object Test: The type parameter B referenced in the message of the @implicitAmbiguous annotation is not defined by method neqAmbig1. implicit def neqAmbig1[A] : A =!= A = null ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/implicit-ambiguous-invalid.scala b/test/files/neg/implicit-ambiguous-invalid.scala index 08feb3539720..ca6003e34496 100644 --- a/test/files/neg/implicit-ambiguous-invalid.scala +++ b/test/files/neg/implicit-ambiguous-invalid.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xlint:implicit-not-found -Xfatal-warnings +// object Test { trait =!=[C, D] diff --git a/test/files/neg/implicit-ambiguous-val.check b/test/files/neg/implicit-ambiguous-val.check index 1e828537d5fa..e7302ee48d42 100644 --- a/test/files/neg/implicit-ambiguous-val.check +++ b/test/files/neg/implicit-ambiguous-val.check @@ -1,4 +1,4 @@ implicit-ambiguous-val.scala:16: error: unexpected string meh("") ^ -one error found +1 error diff --git a/test/files/neg/implicit-ambiguous.check b/test/files/neg/implicit-ambiguous.check index 0b3cebcb6fd1..a7e5a532b511 100644 --- a/test/files/neg/implicit-ambiguous.check +++ b/test/files/neg/implicit-ambiguous.check @@ -1,4 +1,4 @@ implicit-ambiguous.scala:10: error: Could not prove Int =!= Int implicitly[Int =!= Int] ^ -one error found +1 error diff --git a/test/files/neg/implicit-any2stringadd-migration.check b/test/files/neg/implicit-any2stringadd-migration.check new file mode 100644 index 000000000000..700a916ed5a7 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd-migration.check @@ -0,0 +1,6 @@ +implicit-any2stringadd-migration.scala:4: error: Converting to String for concatenation is not supported in Scala 3 (or with -Xsource-features:any2stringadd). +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=scala.Predef.any2stringadd + true + "what" + ^ +1 error diff --git a/test/files/neg/implicit-any2stringadd-migration.scala b/test/files/neg/implicit-any2stringadd-migration.scala new file mode 100644 index 000000000000..bbf2ee1c6525 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd-migration.scala @@ -0,0 +1,5 @@ +//> using options -Xsource:3 + +object Test { + true + "what" +} diff --git a/test/files/neg/implicit-any2stringadd-warning.check b/test/files/neg/implicit-any2stringadd-warning.check new file mode 100644 index 000000000000..b998faac2221 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd-warning.check @@ -0,0 +1,6 @@ +implicit-any2stringadd-warning.scala:4: warning: method any2stringadd in object Predef is deprecated (since 2.13.0): Implicit injection of + is deprecated. Convert to String to call + + true + "what" + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/implicit-any2stringadd-warning.scala b/test/files/neg/implicit-any2stringadd-warning.scala new file mode 100644 index 000000000000..4c928fd775f3 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd-warning.scala @@ -0,0 +1,5 @@ +//> using options -Werror -deprecation + +object Test { + true + "what" +} diff --git a/test/files/neg/implicit-any2stringadd.check b/test/files/neg/implicit-any2stringadd.check new file mode 100644 index 000000000000..21f119d39d09 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd.check @@ -0,0 +1,4 @@ +implicit-any2stringadd.scala:4: error: value + is not a member of Boolean + true + "what" + ^ +1 error diff --git a/test/files/neg/implicit-any2stringadd.scala b/test/files/neg/implicit-any2stringadd.scala new file mode 100644 index 000000000000..bfcdc5832c90 --- /dev/null +++ b/test/files/neg/implicit-any2stringadd.scala @@ -0,0 +1,5 @@ +//> using options -Xsource:3 -Xsource-features:any2stringadd + +object Test { + true + "what" +} diff --git a/test/files/neg/implicit-by-name.check b/test/files/neg/implicit-by-name.check deleted file mode 100644 index 03c6206970a9..000000000000 --- a/test/files/neg/implicit-by-name.check +++ /dev/null @@ -1,4 +0,0 @@ -implicit-by-name.scala:2: error: implicit parameters may not be call-by-name - implicit def reverseOrd[A](implicit ord: => Ordering[A]): Ordering[A] = - ^ -one error found diff --git a/test/files/neg/implicit-by-name.scala b/test/files/neg/implicit-by-name.scala deleted file mode 100644 index a45121eb507f..000000000000 --- a/test/files/neg/implicit-by-name.scala +++ /dev/null @@ -1,4 +0,0 @@ -object Test { - implicit def reverseOrd[A](implicit ord: => Ordering[A]): Ordering[A] = - ord.reverse -} diff --git a/test/files/neg/implicit-log.check b/test/files/neg/implicit-log.check new file mode 100644 index 000000000000..541aa6251b25 --- /dev/null +++ b/test/files/neg/implicit-log.check @@ -0,0 +1,4 @@ +implicit-log.scala:100: error: value baa is not a member of Int + 1.baa + ^ +1 error diff --git a/test/files/neg/implicit-log.scala b/test/files/neg/implicit-log.scala new file mode 100644 index 000000000000..7af20610f64d --- /dev/null +++ b/test/files/neg/implicit-log.scala @@ -0,0 +1,101 @@ +/* scalac: -Vimplicits -Xsource:3 -Xfatal-warnings */ + +package foo + +import scala.language.implicitConversions +import scala.reflect.runtime.universe._ +import scala.reflect.{ClassTag, classTag} + +// #1435 +object t1435 { + implicit def a(s:String):String = sys.error("") + implicit def a(i:Int):String = sys.error("") + implicit def b(i:Int):String = sys.error("") +} + +class C1435 { + val v:String = { + import t1435.a + 2 + } +} + +// #1492 +class C1492 { + + class X + + def foo(x: X => X): Unit = {} + + foo ( implicit x => implicitly[X] ) + foo { implicit x => implicitly[X] } +} + +// #1579 +object Test1579 { + class Column + class Query[E](val value: E) + class Invoker(q: Any) { val foo = null } + + implicit def unwrap[C](q: Query[C]): C = q.value + implicit def invoker(q: Query[Column]): Invoker = new Invoker(q) + + val q = new Query(new Column) + q.foo +} +// #1625 +object Test1625 { + + class Wrapped(x:Any) { + def unwrap() = x + } + + implicit def byName[A](x: => A): Wrapped = new Wrapped(x) + + implicit def byVal[A](x: A): A = x + + def main(args: Array[String]) = { + +// val res:Wrapped = 7 // works + + val res = 7.unwrap() // doesn't work + + println("=> result: " + res) + } +} + +object Test2188 { + implicit def toJavaList[A: ClassTag](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*) + + val x: java.util.List[String] = List("foo") +} + +object TestNumericWidening { + val y = 1 + val x: java.lang.Long = y +} + +// #2709 +package foo2709 { + class A + class B + + package object bar { + implicit def a2b(a: A): B = new B + } + + package bar { + object test { + new A: B + } + } +} + +// Problem with specs +object specsProblem { + println(implicitly[TypeTag[Class[_]]]) +} + +object Foo { + 1.baa +} diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check index 3e801cac807c..d7909b9c3a11 100644 --- a/test/files/neg/implicit-shadow.check +++ b/test/files/neg/implicit-shadow.check @@ -1,11 +1,4 @@ -implicit-shadow.scala:5: is not a valid implicit value for Int(1) => ?{def isEmpty: ?} because: -reference to i2s is ambiguous; -it is imported twice in the same scope by -import C._ -and import B._ +implicit-shadow.scala:6: error: value isEmpty is not a member of Int 1.isEmpty - ^ -implicit-shadow.scala:5: error: value isEmpty is not a member of Int - 1.isEmpty - ^ -one error found + ^ +1 error diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala index cab23c8b4a7a..dffa838221f4 100644 --- a/test/files/neg/implicit-shadow.scala +++ b/test/files/neg/implicit-shadow.scala @@ -1,4 +1,5 @@ -// scalac: -Xlog-implicits +//> using options -Vimplicits +// object Test { import B._, C._ diff --git a/test/files/neg/implicitly-self.check b/test/files/neg/implicitly-self.check index a7f4a56fbc24..2c74eea75815 100644 --- a/test/files/neg/implicitly-self.check +++ b/test/files/neg/implicitly-self.check @@ -1,15 +1,18 @@ -implicitly-self.scala:6: warning: Implicit resolves to enclosing method c +implicitly-self.scala:7: warning: Implicit resolves to enclosing method c implicit def c: Char = implicitly[Char] ^ -implicitly-self.scala:7: warning: Implicit resolves to enclosing value s +implicitly-self.scala:8: warning: Implicit resolves to enclosing value s implicit val s: String = implicitly[String] ^ -implicitly-self.scala:9: warning: Implicit resolves to enclosing value t +implicitly-self.scala:10: warning: Implicit resolves to enclosing value t def f = implicitly[Int] ^ -implicitly-self.scala:12: warning: Implicit resolves to enclosing object tcString +implicitly-self.scala:13: warning: Implicit resolves to enclosing object tcString implicit object tcString extends TC[String] { def ix = implicitly[TC[String]].ix + 1 } ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +implicitly-self.scala:22: warning: Implicit resolves to enclosing method bad + implicit def bad[A](a: A)(implicit ev: A => T): Sizeable = ev(a) + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/implicitly-self.scala b/test/files/neg/implicitly-self.scala index 0fa2d64b2f05..becd16ccb311 100644 --- a/test/files/neg/implicitly-self.scala +++ b/test/files/neg/implicitly-self.scala @@ -1,6 +1,7 @@ -// scalac: -Xfatal-warnings -Ywarn-self-implicit +//> using options -Werror -Xlint:implicit-recursion +// -trait TC[T] { def ix: Int } +trait TC[A] { def ix: Int } object Test { implicit def c: Char = implicitly[Char] @@ -11,3 +12,13 @@ object Test { } implicit object tcString extends TC[String] { def ix = implicitly[TC[String]].ix + 1 } } + +import language.implicitConversions + +trait T +trait Sizeable { def size: Int } + +class `t8357 warn on self-involved implicit` { + implicit def bad[A](a: A)(implicit ev: A => T): Sizeable = ev(a) + bad(new T{}) +} diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check index 6d61f7f22227..a91387c2f820 100644 --- a/test/files/neg/implicits.check +++ b/test/files/neg/implicits.check @@ -4,11 +4,23 @@ implicits.scala:38: error: type mismatch; foo(set) ^ implicits.scala:46: error: type mismatch; - found : List[Any] - required: List[Mxml] - children.toList.flatMap ( e => { - ^ -implicits.scala:66: error: could not find implicit value for parameter x: Nothing + found : Iterable[_] + required: scala.collection.IterableOnce[Mxml] + case s: scala.collection.Iterable[_] => s + ^ +implicits.scala:47: error: type mismatch; + found : Any + required: Mxml + case a => List(a) + ^ +implicits.scala:59: error: could not find implicit value for parameter x: Nothing foo { ^ -three errors found +implicits.scala:34: warning: Implicit definition should have explicit type (inferred T) [quickfixable] + implicit def select[T](t: HSome[T,_]) = t.head + ^ +implicits.scala:35: warning: Implicit definition should have explicit type (inferred L) [quickfixable] + implicit def selectTail[L](t: HSome[_,L]) = t.tail + ^ +2 warnings +4 errors diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala index 87dffc9ff16d..9ad845bbed6d 100644 --- a/test/files/neg/implicits.scala +++ b/test/files/neg/implicits.scala @@ -40,21 +40,14 @@ object test2 { // #2180 class Mxml { - - private def processChildren( children:Seq[Any] ):List[Mxml] = { - - children.toList.flatMap ( e => { - + private def processChildren(children: Seq[Any]): List[Mxml] = { + children.toList.flatMap(e => { e match { - - case s:scala.collection.Traversable[_] => s case a => List(a) - + case s: scala.collection.Iterable[_] => s + case a => List(a) } - }) - } - } // scala/bug#5316 diff --git a/test/files/neg/import-future.check b/test/files/neg/import-future.check index 282b1ae95e4c..daaaa678b635 100644 --- a/test/files/neg/import-future.check +++ b/test/files/neg/import-future.check @@ -1,4 +1,13 @@ import-future.scala:15: error: not found: value unrelated unrelated(1) // error ^ -one error found +import-future.scala:40: error: not found: value f + def g = f[Int] // error no f, was given is not a member + ^ +import-future.scala:44: error: could not find implicit value for parameter t: T[Int] + def g = f[Int] // implicit unavailable + ^ +import-future.scala:48: error: could not find implicit value for parameter t: T[Int] + def g = f[Int] // implicit unavailable + ^ +4 errors diff --git a/test/files/neg/import-future.scala b/test/files/neg/import-future.scala index 288fd3d0e240..4f88cf64c3e4 100644 --- a/test/files/neg/import-future.scala +++ b/test/files/neg/import-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // class D { @@ -25,3 +25,25 @@ object Test { *(1) } } + +trait T[A] { + def t: A +} +object TX { + implicit def tInt: T[Int] = new T[Int] { + def t: Int = 42 + } + def f[A](implicit t: T[A]): A = t.t +} +object X { + import TX.given + def g = f[Int] // error no f, was given is not a member +} +object Y { + import TX.{f, tInt as _, given} + def g = f[Int] // implicit unavailable +} +object Z { + import TX.{tInt as _, *} + def g = f[Int] // implicit unavailable +} diff --git a/test/files/neg/import-precedence.check b/test/files/neg/import-precedence.check index 5f9961105284..05dd397665f1 100644 --- a/test/files/neg/import-precedence.check +++ b/test/files/neg/import-precedence.check @@ -16,4 +16,4 @@ import uniq1.uniq2.X and import uniq1.X object Y { def f = X } ^ -three errors found +3 errors diff --git a/test/files/neg/import-syntax.check b/test/files/neg/import-syntax.check index 231b64ce44ae..887677e3cfd9 100644 --- a/test/files/neg/import-syntax.check +++ b/test/files/neg/import-syntax.check @@ -4,4 +4,4 @@ import-syntax.scala:10: error: Wildcard import cannot be renamed import-syntax.scala:11: error: Wildcard import cannot be renamed import d.{_ => also_no} ^ -two errors found +2 errors diff --git a/test/files/neg/import-syntax.scala b/test/files/neg/import-syntax.scala index 72f90f232d8d..00caf0aecd7b 100644 --- a/test/files/neg/import-syntax.scala +++ b/test/files/neg/import-syntax.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 class D { def *(y: Int): Int = y @@ -9,13 +9,4 @@ object nope { val d = new D import d.{* => huh} import d.{_ => also_no} - `should fail`() -} - -// OK, except previous syntax errors bail early -object rename { - val d = new D - import d.{unrelated => f, *} - def x = f(42) - def y = *(27) } diff --git a/test/files/neg/inferred-structural-3.check b/test/files/neg/inferred-structural-3.check new file mode 100644 index 000000000000..f297042016d3 --- /dev/null +++ b/test/files/neg/inferred-structural-3.check @@ -0,0 +1,18 @@ +inferred-structural-3.scala:8: error: in Scala 3 (or with -Xsource-features:no-infer-structural), method a will no longer have a structural type: Option[AnyRef{def g: Int}] + members that can be accessed with a reflective call: def g: Int +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.a + def a = Option(new { def g = 1 }) // warn + ^ +inferred-structural-3.scala:16: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to AnyRef instead of A [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.g + def g = new A { def f = this } // warn -- inferred type of `f` is `A`, since we're not using -Xsource-features:infer-override + ^ +inferred-structural-3.scala:19: error: in Scala 3 (or with -Xsource-features:no-infer-structural), method i will no longer have a structural type: AnyRef{val x: Int} + members that can be accessed with a reflective call: val x: Int +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.i + def i = new AnyRef { val x = 2 } // warn + ^ +3 errors diff --git a/test/files/neg/inferred-structural-3.scala b/test/files/neg/inferred-structural-3.scala new file mode 100644 index 000000000000..dc4c09325b79 --- /dev/null +++ b/test/files/neg/inferred-structural-3.scala @@ -0,0 +1,20 @@ +//> using options -Xsource:3 -Werror + +trait A { + def f: AnyRef +} + +class C { + def a = Option(new { def g = 1 }) // warn + def b: Option[{ def g: Int }] = Option(new { def g = 1 }) // ok + + def c(p: { def i: Int }): Int = 0 // ok + def d = new A { def f: A = this } // ok + + def e = new A { def f: AnyRef = new AnyRef } // ok + def f = new A { def f = new AnyRef } // ok + def g = new A { def f = this } // warn -- inferred type of `f` is `A`, since we're not using -Xsource-features:infer-override + + def h = new AnyRef { type T = String } // ok + def i = new AnyRef { val x = 2 } // warn +} diff --git a/test/files/neg/infix-named-arg.check b/test/files/neg/infix-named-arg.check new file mode 100644 index 000000000000..add5420ab385 --- /dev/null +++ b/test/files/neg/infix-named-arg.check @@ -0,0 +1,6 @@ +infix-named-arg.scala:5: warning: named argument is deprecated for infix syntax + def f = 42 + (x = 1) + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/infix-named-arg.scala b/test/files/neg/infix-named-arg.scala new file mode 100644 index 000000000000..b22b05613bb5 --- /dev/null +++ b/test/files/neg/infix-named-arg.scala @@ -0,0 +1,8 @@ + +//> using options -Werror -Xlint -Xsource:3 + +class C { + def f = 42 + (x = 1) + def multi(x: Int, y: Int): Int = x + y + def g = new C() `multi` (x = 42, y = 27) +} diff --git a/test/files/neg/infix-op-positions.check b/test/files/neg/infix-op-positions.check index 1bff9b941a77..a8d3acc59c71 100644 --- a/test/files/neg/infix-op-positions.check +++ b/test/files/neg/infix-op-positions.check @@ -4,4 +4,4 @@ infix-op-positions.scala:2: error: value -! is not a member of Option[Int] infix-op-positions.scala:3: error: value -!: is not a member of Option[Int] "test" -!: Option(1) // right associative operators ^ -two errors found +2 errors diff --git a/test/files/neg/infixed.check b/test/files/neg/infixed.check new file mode 100644 index 000000000000..02acdcb54351 --- /dev/null +++ b/test/files/neg/infixed.check @@ -0,0 +1,4 @@ +infixed.scala:8: error: ';' expected but integer literal found. + x 42 + ^ +1 error diff --git a/test/files/neg/infixed.scala b/test/files/neg/infixed.scala new file mode 100644 index 000000000000..1b8422e1792d --- /dev/null +++ b/test/files/neg/infixed.scala @@ -0,0 +1,10 @@ +//> using options -Xsource:3 + +class K { def x(y: Int) = 0 } + +class Test { + def bad = { + (new K) + x 42 + } +} diff --git a/test/files/neg/inlineIndyLambdaPrivate.check b/test/files/neg/inlineIndyLambdaPrivate.check index 0b82ca7f19b9..566605cde72c 100644 --- a/test/files/neg/inlineIndyLambdaPrivate.check +++ b/test/files/neg/inlineIndyLambdaPrivate.check @@ -11,6 +11,6 @@ The callee A_1::test()Ljava/lang/String; contains the instruction INVOKEDYNAMIC that would cause an IllegalAccessError when inlined into class Test. def foo = A_1.test ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala b/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala index 8200ff5241cc..87648689f116 100644 --- a/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala +++ b/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** -Yopt-inline-heuristics:everything -opt-warnings:_ -Xfatal-warnings +//> using options -opt:inline:** -Yopt-inline-heuristics:everything -Wopt:_ -Werror class Test { def foo = A_1.test } diff --git a/test/files/neg/inlineMaxSize.check b/test/files/neg/inlineMaxSize.check deleted file mode 100644 index a2f22f0aa794..000000000000 --- a/test/files/neg/inlineMaxSize.check +++ /dev/null @@ -1,9 +0,0 @@ -inlineMaxSize.scala:8: warning: C::i()I is annotated @inline but could not be inlined: -The size of the callsite method C::j()I -would exceed the JVM method size limit after inlining C::i()I. - - @inline final def j = i + i + i + i - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/inlineMaxSize.scala b/test/files/neg/inlineMaxSize.scala deleted file mode 100644 index 4eae0545ccab..000000000000 --- a/test/files/neg/inlineMaxSize.scala +++ /dev/null @@ -1,9 +0,0 @@ -// scalac: -Ydelambdafy:method -opt:l:inline -opt-inline-from:** -opt-warnings -Xfatal-warnings -// not a JUnit test because of https://github.com/scala-opt/scala/issues/23 -class C { - @inline final def f = 0 - @inline final def g = f + f + f + f + f + f + f + f + f + f - @inline final def h = g + g + g + g + g + g + g + g + g + g - @inline final def i = h + h + h + h + h + h + h + h + h + h - @inline final def j = i + i + i + i -} diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check index d15e33346cbc..d918e02840dd 100644 --- a/test/files/neg/interop_abstypetags_arenot_classmanifests.check +++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check @@ -1,4 +1,4 @@ -interop_abstypetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T. - println(classManifest[T]) - ^ -one error found +interop_abstypetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. + println(implicitly[ClassManifest[T]]) + ^ +1 error diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.scala b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala index 5d88c90ffdff..4e806d1737fc 100644 --- a/test/files/neg/interop_abstypetags_arenot_classmanifests.scala +++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala @@ -1,11 +1,12 @@ import scala.reflect.runtime.universe._ +import scala.reflect.ClassManifest object Test extends App { def weakTypeTagIsnotClassManifest[T: WeakTypeTag] = { - println(classManifest[T]) + println(implicitly[ClassManifest[T]]) } weakTypeTagIsnotClassManifest[Int] weakTypeTagIsnotClassManifest[String] weakTypeTagIsnotClassManifest[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.check b/test/files/neg/interop_abstypetags_arenot_classtags.check index 3aa7a50b5034..2cae95fc39f5 100644 --- a/test/files/neg/interop_abstypetags_arenot_classtags.check +++ b/test/files/neg/interop_abstypetags_arenot_classtags.check @@ -1,4 +1,4 @@ interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T println(classTag[T]) ^ -one error found +1 error diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.scala b/test/files/neg/interop_abstypetags_arenot_classtags.scala index de1f8657b630..7e049bf65582 100644 --- a/test/files/neg/interop_abstypetags_arenot_classtags.scala +++ b/test/files/neg/interop_abstypetags_arenot_classtags.scala @@ -9,4 +9,4 @@ object Test extends App { weakTypeTagIsnotClassTag[Int] weakTypeTagIsnotClassTag[String] weakTypeTagIsnotClassTag[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.check b/test/files/neg/interop_abstypetags_arenot_manifests.check index 5916b68742b8..3c3668f6128a 100644 --- a/test/files/neg/interop_abstypetags_arenot_manifests.check +++ b/test/files/neg/interop_abstypetags_arenot_manifests.check @@ -1,4 +1,4 @@ interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T. println(manifest[T]) ^ -one error found +1 error diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.scala b/test/files/neg/interop_abstypetags_arenot_manifests.scala index 1ca3673ce466..1f934e0bba2f 100644 --- a/test/files/neg/interop_abstypetags_arenot_manifests.scala +++ b/test/files/neg/interop_abstypetags_arenot_manifests.scala @@ -8,4 +8,4 @@ object Test extends App { weakTypeTagIsnotManifest[Int] weakTypeTagIsnotManifest[String] weakTypeTagIsnotManifest[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.check b/test/files/neg/interop_classmanifests_arenot_typetags.check index db8e57981aad..fdc7eafe2a15 100644 --- a/test/files/neg/interop_classmanifests_arenot_typetags.check +++ b/test/files/neg/interop_classmanifests_arenot_typetags.check @@ -1,4 +1,4 @@ -interop_classmanifests_arenot_typetags.scala:5: error: No TypeTag available for T +interop_classmanifests_arenot_typetags.scala:6: error: No TypeTag available for T println(implicitly[TypeTag[T]]) ^ -one error found +1 error diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.scala b/test/files/neg/interop_classmanifests_arenot_typetags.scala index 29d03a8ec8a7..f249e5b34648 100644 --- a/test/files/neg/interop_classmanifests_arenot_typetags.scala +++ b/test/files/neg/interop_classmanifests_arenot_typetags.scala @@ -1,4 +1,5 @@ import scala.reflect.runtime.universe._ +import scala.reflect.ClassManifest object Test extends App { def classManifestIsnotTypeTag[T: ClassManifest] = { @@ -8,4 +9,4 @@ object Test extends App { classManifestIsnotTypeTag[Int] classManifestIsnotTypeTag[String] classManifestIsnotTypeTag[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_classtags_arenot_manifests.check b/test/files/neg/interop_classtags_arenot_manifests.check index fa805b5918b8..3fe0b90be4e1 100644 --- a/test/files/neg/interop_classtags_arenot_manifests.check +++ b/test/files/neg/interop_classtags_arenot_manifests.check @@ -1,4 +1,4 @@ interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T. println(manifest[T]) ^ -one error found +1 error diff --git a/test/files/neg/interop_classtags_arenot_manifests.scala b/test/files/neg/interop_classtags_arenot_manifests.scala index 391143c6e8a1..3555118d62dd 100644 --- a/test/files/neg/interop_classtags_arenot_manifests.scala +++ b/test/files/neg/interop_classtags_arenot_manifests.scala @@ -8,4 +8,4 @@ object Test extends App { classTagIsnotManifest[Int] classTagIsnotManifest[String] classTagIsnotManifest[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check index 88fb1647e57b..0925e6ffba7e 100644 --- a/test/files/neg/interop_typetags_arenot_classmanifests.check +++ b/test/files/neg/interop_typetags_arenot_classmanifests.check @@ -1,4 +1,4 @@ -interop_typetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T. - println(classManifest[T]) - ^ -one error found +interop_typetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. + println(implicitly[ClassManifest[T]]) + ^ +1 error diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.scala b/test/files/neg/interop_typetags_arenot_classmanifests.scala index d07f4726f5a6..0eb81feeef55 100644 --- a/test/files/neg/interop_typetags_arenot_classmanifests.scala +++ b/test/files/neg/interop_typetags_arenot_classmanifests.scala @@ -1,11 +1,12 @@ import scala.reflect.runtime.universe._ +import scala.reflect.ClassManifest object Test extends App { def typeTagIsnotClassManifest[T: TypeTag] = { - println(classManifest[T]) + println(implicitly[ClassManifest[T]]) } typeTagIsnotClassManifest[Int] typeTagIsnotClassManifest[String] typeTagIsnotClassManifest[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_typetags_arenot_classtags.check b/test/files/neg/interop_typetags_arenot_classtags.check index 1d1fb15f9ebb..7eaad2efd641 100644 --- a/test/files/neg/interop_typetags_arenot_classtags.check +++ b/test/files/neg/interop_typetags_arenot_classtags.check @@ -1,4 +1,4 @@ interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T println(classTag[T]) ^ -one error found +1 error diff --git a/test/files/neg/interop_typetags_arenot_classtags.scala b/test/files/neg/interop_typetags_arenot_classtags.scala index 072c12adb087..f5bd75fb3362 100644 --- a/test/files/neg/interop_typetags_arenot_classtags.scala +++ b/test/files/neg/interop_typetags_arenot_classtags.scala @@ -9,4 +9,4 @@ object Test extends App { typeTagIsnotClassTag[Int] typeTagIsnotClassTag[String] typeTagIsnotClassTag[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check index ba744a883777..13e8cbeb419f 100644 --- a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check +++ b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check @@ -3,4 +3,4 @@ however typetag -> manifest conversion requires a class tag for the correspondin to proceed add a class tag to the type `T` (e.g. by introducing a context bound) and recompile. println(manifest[T]) ^ -one error found +1 error diff --git a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala index 0d48ae5cd0f5..ec69c1460a78 100644 --- a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala +++ b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala @@ -9,4 +9,4 @@ object Test extends App { typeTagWithoutClassTagIsnotManifest[Int] typeTagWithoutClassTagIsnotManifest[String] typeTagWithoutClassTagIsnotManifest[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/neg/iterable-ordering.check b/test/files/neg/iterable-ordering.check new file mode 100644 index 000000000000..d9f6ebff9927 --- /dev/null +++ b/test/files/neg/iterable-ordering.check @@ -0,0 +1,6 @@ +iterable-ordering.scala:3: warning: method Iterable in object Ordering is deprecated (since 2.13.0): Iterables are not guaranteed to have a consistent order; if using a type with a consistent order (e.g. Seq), use its Ordering (found in the Ordering.Implicits object) + val o = Ordering[Iterable[Int]] + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/iterable-ordering.scala b/test/files/neg/iterable-ordering.scala new file mode 100644 index 000000000000..ae25b1dd4522 --- /dev/null +++ b/test/files/neg/iterable-ordering.scala @@ -0,0 +1,4 @@ +//> using options -Xlint:deprecation -Werror +object Test { + val o = Ordering[Iterable[Int]] +} diff --git a/test/files/neg/java-access-neg.check b/test/files/neg/java-access-neg.check index af2812b579a1..f8c95aabfe13 100644 --- a/test/files/neg/java-access-neg.check +++ b/test/files/neg/java-access-neg.check @@ -13,4 +13,4 @@ S2.scala:47: error: method packageConcrete overrides nothing S2.scala:58: error: method packageConcrete overrides nothing override def packageConcrete() = () // fail ^ -5 errors found +5 errors diff --git a/test/files/neg/java-annotation-bad.check b/test/files/neg/java-annotation-bad.check new file mode 100644 index 000000000000..7afd015d7c3a --- /dev/null +++ b/test/files/neg/java-annotation-bad.check @@ -0,0 +1,43 @@ +Test_1.scala:12: error: Java annotation Ann_0 is abstract; cannot be instantiated + val a: Ann_0 = new Ann_0 // nok + ^ +Test_1.scala:13: error: Java annotation Ann_0 is abstract; cannot be instantiated + val b: Ann_0 = new Ann_0(Array()) // nok + ^ +Test_1.scala:14: error: Java annotation Ann_1 is abstract; cannot be instantiated + val c: Ann_1 = new Ann_1 // nok + ^ +Test_1.scala:15: error: Java annotation Ann_1 is abstract; cannot be instantiated + val d: Ann_1 = new Ann_1(Array()) // nok + ^ +Test_1.scala:18: error: type mismatch; + found : ann.Ann_0 + required: scala.annotation.Annotation + val e: Annotation = a // nok + ^ +Test_1.scala:19: error: type mismatch; + found : ann.Ann_1 + required: scala.annotation.Annotation + val f: Annotation = c // nok + ^ +Test_1.scala:20: error: type mismatch; + found : ann.Ann_0 + required: scala.annotation.StaticAnnotation + val g: StaticAnnotation = a // nok + ^ +Test_1.scala:21: error: type mismatch; + found : ann.Ann_1 + required: scala.annotation.StaticAnnotation + val h: StaticAnnotation = c // nok + ^ +Test_1.scala:22: error: type mismatch; + found : ann.Ann_0 + required: scala.annotation.ConstantAnnotation + val i: ConstantAnnotation = a // nok + ^ +Test_1.scala:23: error: type mismatch; + found : ann.Ann_1 + required: scala.annotation.ConstantAnnotation + val j: ConstantAnnotation = c // nok + ^ +10 errors diff --git a/test/files/neg/java-annotation-bad/Ann_0.java b/test/files/neg/java-annotation-bad/Ann_0.java new file mode 100644 index 000000000000..3e8033edb768 --- /dev/null +++ b/test/files/neg/java-annotation-bad/Ann_0.java @@ -0,0 +1,7 @@ +package ann; + +public @interface Ann_0 { + N[] value(); + + public @interface N {} +} \ No newline at end of file diff --git a/test/files/neg/java-annotation-bad/Ann_1.java b/test/files/neg/java-annotation-bad/Ann_1.java new file mode 100644 index 000000000000..37b87c79deea --- /dev/null +++ b/test/files/neg/java-annotation-bad/Ann_1.java @@ -0,0 +1,7 @@ +package ann; + +public @interface Ann_1 { + N[] value(); + + public @interface N {} +} \ No newline at end of file diff --git a/test/files/neg/java-annotation-bad/Test_1.scala b/test/files/neg/java-annotation-bad/Test_1.scala new file mode 100644 index 000000000000..a4c5b1395ec0 --- /dev/null +++ b/test/files/neg/java-annotation-bad/Test_1.scala @@ -0,0 +1,30 @@ +object Test { + import ann._ + + // ok + @Ann_0(Array(new Ann_0.N, new Ann_0.N)) + class A + + // ok + @Ann_1(Array(new Ann_1.N, new Ann_1.N)) + class B + + val a: Ann_0 = new Ann_0 // nok + val b: Ann_0 = new Ann_0(Array()) // nok + val c: Ann_1 = new Ann_1 // nok + val d: Ann_1 = new Ann_1(Array()) // nok + + import scala.annotation._, java.lang.{annotation => jla} + val e: Annotation = a // nok + val f: Annotation = c // nok + val g: StaticAnnotation = a // nok + val h: StaticAnnotation = c // nok + val i: ConstantAnnotation = a // nok + val j: ConstantAnnotation = c // nok + val k: jla.Annotation = a // ok + val l: jla.Annotation = c // ok + + val m = new Ann_0 { val annotationType = classOf[Ann_0] } // ok + val n = new Ann_1 { val annotationType = classOf[Ann_1] } // ok + +} diff --git a/test/files/neg/java-import-non-existing-selector/Test.java b/test/files/neg/java-import-non-existing-selector/Test.java index e9797851c77c..be3e496bd348 100644 --- a/test/files/neg/java-import-non-existing-selector/Test.java +++ b/test/files/neg/java-import-non-existing-selector/Test.java @@ -1,3 +1,4 @@ +// scalac: -Ypickle-java package p1; public class Test extends Base {} diff --git a/test/files/neg/lazy-override.check b/test/files/neg/lazy-override.check index 793e6b2020fa..83fc2c613a32 100644 --- a/test/files/neg/lazy-override.check +++ b/test/files/neg/lazy-override.check @@ -1,9 +1,9 @@ -lazy-override.scala:11: error: overriding value x in class A of type Int; - lazy value x cannot override a concrete non-lazy value +lazy-override.scala:11: error: concrete non-lazy value cannot be overridden: +val x: Int (defined in class A) override lazy val x: Int = { print("/*B.x*/"); 3 } ^ -lazy-override.scala:13: error: overriding lazy value y in class A of type Int; - value y must be declared lazy to override a concrete lazy value +lazy-override.scala:13: error: value must be lazy when overriding concrete lazy value: +lazy val y: Int (defined in class A) override val y: Int = { print("/*B.y*/"); 3 } ^ -two errors found +2 errors diff --git a/test/files/neg/lazyvals.check b/test/files/neg/lazyvals.check index c4daf9d842bd..5d3cdf0d819f 100644 --- a/test/files/neg/lazyvals.check +++ b/test/files/neg/lazyvals.check @@ -22,4 +22,4 @@ lazyvals.scala:22: error: lazy not allowed here. Only vals can be lazy lazyvals.scala:25: error: lazy modifier not allowed here. Use call-by-name parameters instead class A(lazy val obj: Object) {} ^ -8 errors found +8 errors diff --git a/test/files/neg/leibniz-liskov.check b/test/files/neg/leibniz-liskov.check new file mode 100644 index 000000000000..ad2ff6f6d4df --- /dev/null +++ b/test/files/neg/leibniz-liskov.check @@ -0,0 +1,120 @@ +leibniz-liskov.scala:7: error: Cannot prove that LeibnizLiskov.this.A =:= LeibnizLiskov.this.B. + implicitly[A =:= B] + ^ +leibniz-liskov.scala:8: error: Cannot prove that LeibnizLiskov.this.B =:= LeibnizLiskov.this.A. + implicitly[B =:= A] + ^ +leibniz-liskov.scala:11: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SA. + implicitly[A <:< SA] + ^ +leibniz-liskov.scala:12: error: Cannot prove that LeibnizLiskov.this.SB <:< LeibnizLiskov.this.B. + implicitly[SB <:< B] + ^ +leibniz-liskov.scala:13: error: Cannot prove that LeibnizLiskov.this.SA <:< LeibnizLiskov.this.B. + implicitly[SA <:< B] + ^ +leibniz-liskov.scala:14: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SB. + implicitly[A <:< SB] + ^ +leibniz-liskov.scala:18: error: no type parameters for method substituteCo: (ff: F[LeibnizLiskov.this.A]): F[LeibnizLiskov.this.B] exist so that it can be applied to arguments (List[LeibnizLiskov.this.B]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : List[LeibnizLiskov.this.B] + required: ?F[LeibnizLiskov.this.A] + aEqB.substituteCo (List(B(), B(), B())) + ^ +leibniz-liskov.scala:18: error: type mismatch; + found : List[LeibnizLiskov.this.B] + required: F[LeibnizLiskov.this.A] + aEqB.substituteCo (List(B(), B(), B())) + ^ +leibniz-liskov.scala:19: error: no type parameters for method substituteContra: (ft: F[LeibnizLiskov.this.B]): F[LeibnizLiskov.this.A] exist so that it can be applied to arguments (List[LeibnizLiskov.this.A]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : List[LeibnizLiskov.this.A] + required: ?F[LeibnizLiskov.this.B] + aEqB.substituteContra(List(A(), A(), A())) + ^ +leibniz-liskov.scala:19: error: type mismatch; + found : List[LeibnizLiskov.this.A] + required: F[LeibnizLiskov.this.B] + aEqB.substituteContra(List(A(), A(), A())) + ^ +leibniz-liskov.scala:20: error: Cannot prove that xs.type <:< List[LeibnizLiskov.this.B]. + locally { val xs = aEqB.flip.liftCo[List](List(B(), B(), B())); implicitly[xs.type <:< List[B]] } + ^ +leibniz-liskov.scala:21: error: no type parameters for method substituteContra: (ft: F[U]): F[T] exist so that it can be applied to arguments (List[T]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : List[T] + required: ?F[U] + def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) + ^ +leibniz-liskov.scala:21: error: type mismatch; + found : List[T] + required: F[U] + def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) + ^ +leibniz-liskov.scala:21: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[T] + required: List[U] + def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) + ^ +leibniz-liskov.scala:22: error: no type parameters for method substituteCo: (ff: F[T]): F[U] exist so that it can be applied to arguments (List[U]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : List[U] + required: ?F[T] + def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) + ^ +leibniz-liskov.scala:22: error: type mismatch; + found : List[U] + required: F[T] + def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) + ^ +leibniz-liskov.scala:22: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[U] + required: List[T] + def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) + ^ +leibniz-liskov.scala:30: error: kinds of the type arguments ([-Y]Y => LeibnizLiskov.this.A) do not conform to the expected kinds of the type parameters (type F). +[-Y]Y => LeibnizLiskov.this.A's type parameters do not match type F's expected parameters: +type Y is contravariant, but type _ is declared covariant + locally { val f = bSubA.substituteCo [To [A]#L](aSubB(_)); implicitly[f.type <:< (A => A)] } + ^ +leibniz-liskov.scala:31: error: kinds of the type arguments ([+Y]LeibnizLiskov.this.A => Y) do not conform to the expected kinds of the type parameters (type F). +[+Y]LeibnizLiskov.this.A => Y's type parameters do not match type F's expected parameters: +type Y is covariant, but type _ is declared contravariant + locally { val f = aSubB.substituteContra[From[A]#L](bSubA(_)); implicitly[f.type <:< (A => A)] } + ^ +leibniz-liskov.scala:32: error: kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type F). +List's type parameters do not match type F's expected parameters: +type A is covariant, but type _ is declared contravariant + def convertSub[T, U](l: List[T])(ev: T <:< U): List[U] = ev.liftContra[List](l) + ^ +leibniz-liskov.scala:34: error: kinds of the type arguments (LeibnizLiskov.this.Consumes) do not conform to the expected kinds of the type parameters (type F). +LeibnizLiskov.this.Consumes's type parameters do not match type F's expected parameters: +type X is contravariant, but type _ is declared covariant + def convertConsume1[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.liftCo[Consumes](c) + ^ +leibniz-liskov.scala:35: error: no type parameters for method substituteCo: (ff: F[U]): F[T] exist so that it can be applied to arguments (LeibnizLiskov.this.Consumes[T]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : LeibnizLiskov.this.Consumes[T] + (which expands to) T => Unit + required: ?F[U] + def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) + ^ +leibniz-liskov.scala:35: error: type mismatch; + found : LeibnizLiskov.this.Consumes[T] + (which expands to) T => Unit + required: F[U] + def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) + ^ +leibniz-liskov.scala:35: error: polymorphic expression cannot be instantiated to expected type; + found : [F[+_]]F[T] + required: LeibnizLiskov.this.Consumes[U] + (which expands to) U => Unit + def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) + ^ +24 errors diff --git a/test/files/neg/leibniz-liskov.scala b/test/files/neg/leibniz-liskov.scala new file mode 100644 index 000000000000..1a1a2a87f749 --- /dev/null +++ b/test/files/neg/leibniz-liskov.scala @@ -0,0 +1,36 @@ +trait LeibnizLiskov { + type A // instead of picking some concrete type, use a totally unknown, abstract one + type B + type SA <: A + type SB >: B + + implicitly[A =:= B] + implicitly[B =:= A] + def aEqB: A =:= B + + implicitly[A <:< SA] + implicitly[SB <:< B] + implicitly[SA <:< B] + implicitly[A <:< SB] + + def A(): A + def B(): B + aEqB.substituteCo (List(B(), B(), B())) + aEqB.substituteContra(List(A(), A(), A())) + locally { val xs = aEqB.flip.liftCo[List](List(B(), B(), B())); implicitly[xs.type <:< List[B]] } + def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) + def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) + + implicitly[A <:< A] + implicitly[B <:< B] + val aSubB: A <:< B = aEqB + val bSubA: B <:< A = aEqB.flip + type From[X] = { type L[+Y] = X => Y } + type To [X] = { type L[-Y] = Y => X } + locally { val f = bSubA.substituteCo [To [A]#L](aSubB(_)); implicitly[f.type <:< (A => A)] } + locally { val f = aSubB.substituteContra[From[A]#L](bSubA(_)); implicitly[f.type <:< (A => A)] } + def convertSub[T, U](l: List[T])(ev: T <:< U): List[U] = ev.liftContra[List](l) + type Consumes[-X] = X => Unit + def convertConsume1[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.liftCo[Consumes](c) + def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) +} diff --git a/test/files/neg/lint-inferred-structural.check b/test/files/neg/lint-inferred-structural.check new file mode 100644 index 000000000000..7141e1ee6464 --- /dev/null +++ b/test/files/neg/lint-inferred-structural.check @@ -0,0 +1,12 @@ +lint-inferred-structural.scala:8: warning: method a has an inferred structural type: Option[AnyRef{def g: Int}] + members that can be accessed with a reflective call: def g: Int + def a = Option(new { def g = 1 }) // warn + ^ +lint-inferred-structural.scala:19: warning: method i has an inferred structural type: AnyRef{val x: Int} + members that can be accessed with a reflective call: val x: Int + def i = new AnyRef { val x = 2 } // warn + ^ +warning: 1 feature warning; re-run with -feature for details +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/lint-inferred-structural.scala b/test/files/neg/lint-inferred-structural.scala new file mode 100644 index 000000000000..f3b17d7030d9 --- /dev/null +++ b/test/files/neg/lint-inferred-structural.scala @@ -0,0 +1,20 @@ +//> using options -Xlint -Werror + +trait A { + def f: AnyRef +} + +class C { + def a = Option(new { def g = 1 }) // warn + def b: Option[{ def g: Int }] = Option(new { def g = 1 }) // ok + + def c(p: { def i: Int }): Int = 0 // ok + def d = new A { def f: A = this } // ok + + def e = new A { def f: AnyRef = new AnyRef } // ok + def f = new A { def f = new AnyRef } // ok + def g = new A { def f = this } // ok + + def h = new AnyRef { type T = String } // ok + def i = new AnyRef { val x = 2 } // warn +} diff --git a/test/files/neg/lint-int-div-to-float.check b/test/files/neg/lint-int-div-to-float.check index 2c7f86bc44ff..83f69ca9ab8a 100644 --- a/test/files/neg/lint-int-div-to-float.check +++ b/test/files/neg/lint-int-div-to-float.check @@ -1,18 +1,18 @@ -lint-int-div-to-float.scala:6: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. +lint-int-div-to-float.scala:6: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. [quickfixable] def w1: Double = f / 2 ^ -lint-int-div-to-float.scala:7: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. +lint-int-div-to-float.scala:7: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. [quickfixable] def w2: Double = (f / 2) * 3 ^ -lint-int-div-to-float.scala:8: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. +lint-int-div-to-float.scala:8: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. [quickfixable] def w3: Double = -(f / 2) ^ -lint-int-div-to-float.scala:9: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. +lint-int-div-to-float.scala:9: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. [quickfixable] def w4: Double = (new C).f / (new C).f * 3 ^ -lint-int-div-to-float.scala:10: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. +lint-int-div-to-float.scala:10: warning: integral division is implicitly converted (widened) to floating point. Add an explicit `.toDouble`. [quickfixable] def w5: Double = f - f.abs / 2 ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/lint-int-div-to-float.scala b/test/files/neg/lint-int-div-to-float.scala index 4f66c481384e..927c82853f90 100644 --- a/test/files/neg/lint-int-div-to-float.scala +++ b/test/files/neg/lint-int-div-to-float.scala @@ -1,4 +1,4 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Xfatal-warnings class C { def f = 1 diff --git a/test/files/neg/literals.check b/test/files/neg/literals.check index 79b6d4778264..2f9e809bf4ec 100644 --- a/test/files/neg/literals.check +++ b/test/files/neg/literals.check @@ -1,52 +1,59 @@ -literals.scala:6: error: missing integer number +literals.scala:4: error: invalid literal number def missingHex: Int = { 0x } // line 4: was: not reported, taken as zero ^ -literals.scala:8: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.) - def leadingZeros: Int = { 01 } // line 6: no leading zero - ^ -literals.scala:10: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.) - def tooManyZeros: Int = { 00 } // line 8: no leading zero - ^ -literals.scala:12: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.) - def zeroOfNine: Int = { 09 } // line 10: no leading zero - ^ -literals.scala:16: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.) - def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected - ^ -literals.scala:23: error: missing integer number +literals.scala:22: error: invalid literal number def missingHex: Int = 0x // line 22: was: not reported, taken as zero ^ -literals.scala:27: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.) - def tooManyZeros: Int = 00 // line 26: no leading zero - ^ -literals.scala:40: error: floating point number too small +literals.scala:39: error: floating point number too small def tooTiny: Float = { 0.7e-45f } // floating point number too small ^ -literals.scala:42: error: double precision floating point number too small +literals.scala:41: error: double precision floating point number too small def twoTiny: Double = { 2.0e-324 } // double precision floating point number too small ^ -literals.scala:44: error: floating point number too large +literals.scala:43: error: floating point number too large def tooHuge: Float = { 3.4028236E38f } // floating point number too large ^ -literals.scala:46: error: double precision floating point number too large +literals.scala:45: error: double precision floating point number too large def twoHuge: Double = { 1.7976931348623159e308 } // double precision floating point number too large ^ -literals.scala:14: error: identifier expected but '}' found. +literals.scala:12: error: identifier expected but '}' found. def orphanDot: Int = { 9. } // line 12: ident expected ^ -literals.scala:16: error: identifier expected but '}' found. +literals.scala:14: error: identifier expected but '}' found. def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected ^ -literals.scala:18: error: ';' expected but double literal found. +literals.scala:16: error: ';' expected but double literal found. def noHexFloat: Double = { 0x1.2 } // line 16: ';' expected but double literal found. ^ -literals.scala:25: error: ';' expected but 'def' found. +literals.scala:24: error: ';' expected but 'def' found. def leadingZeros: Int = 01 // line 24: no leading zero ^ -literals.scala:29: error: ';' expected but 'def' found. - def zeroOfNine: Int = 09 // line 28: no leading zero - ^ -literals.scala:33: error: identifier expected but 'def' found. +literals.scala:32: error: identifier expected but 'def' found. def zeroOfNineDot: Int = 09. // line 32: malformed integer, ident expected ^ -17 errors found +literals.scala:6: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def leadingZeros: Int = { 01 } // line 6: no leading zero + ^ +literals.scala:8: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def tooManyZeros: Int = { 00 } // line 8: no leading zero + ^ +literals.scala:10: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def zeroOfNine: Int = { 09 } // line 10: no leading zero + ^ +literals.scala:14: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected + ^ +literals.scala:26: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def tooManyZeros: Int = 00 // line 26: no leading zero + ^ +literals.scala:28: warning: Decimal integer literals should not have a leading zero. (Octal syntax is obsolete.) + def zeroOfNine: Int = 09 // line 28: no leading zero + ^ +literals.scala:50: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + def bad = 1l + ^ +literals.scala:52: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + def worse = 123l + ^ +8 warnings +11 errors diff --git a/test/files/neg/literals.scala b/test/files/neg/literals.scala index 22d5d9acd160..f7e9e3d2bf42 100644 --- a/test/files/neg/literals.scala +++ b/test/files/neg/literals.scala @@ -1,6 +1,4 @@ - -/* This took me literally all day. - */ +//> using options -Woctal-literal -Werror -deprecation trait RejectedLiterals { def missingHex: Int = { 0x } // line 4: was: not reported, taken as zero @@ -16,6 +14,7 @@ trait RejectedLiterals { def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected def noHexFloat: Double = { 0x1.2 } // line 16: ';' expected but double literal found. + } trait Braceless { @@ -45,3 +44,15 @@ trait MoreSadness { def twoHuge: Double = { 1.7976931348623159e308 } // double precision floating point number too large } + +trait Lengthy { + + def bad = 1l + + def worse = 123l +} + +trait Regressions { + def oopsy = 0 // nowarn + def hexed = 0x0F // nowarn +} diff --git a/test/files/neg/literate_existentials.check b/test/files/neg/literate_existentials.check index c98f976f795d..73b45c0af204 100644 --- a/test/files/neg/literate_existentials.check +++ b/test/files/neg/literate_existentials.check @@ -1,4 +1,4 @@ literate_existentials.scala:189: error: Cannot prove that Int <:< M forSome { type M <: String }. implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails ^ -one error found +1 error diff --git a/test/files/neg/locally-x.check b/test/files/neg/locally-x.check new file mode 100644 index 000000000000..e4bee575ee56 --- /dev/null +++ b/test/files/neg/locally-x.check @@ -0,0 +1,6 @@ +locally-x.scala:8: warning: naming parameter x is deprecated. + x = 27 + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/locally-x.scala b/test/files/neg/locally-x.scala new file mode 100644 index 000000000000..58435659b2df --- /dev/null +++ b/test/files/neg/locally-x.scala @@ -0,0 +1,10 @@ +//> using options -Werror -Xlint:deprecation +// +// an innocent gotcha +// +class C { + var x = 0 + locally ( + x = 27 + ) +} diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check index 02f21dbb47dc..5ffcac5da49d 100644 --- a/test/files/neg/logImplicits.check +++ b/test/files/neg/logImplicits.check @@ -1,19 +1,22 @@ -logImplicits.scala:3: applied implicit conversion from xs.type to ?{def size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps.ofByte +logImplicits.scala:4: applied implicit conversion from xs.type to ?{def size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.ArrayOps[Byte] def f(xs: Array[Byte]) = xs.size ^ -logImplicits.scala:8: applied implicit conversion from String("abc") to ?{def map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps +logImplicits.scala:9: applied implicit conversion from String("abc") to ?{def map: ?} = implicit def augmentString(x: String): scala.collection.StringOps def f = "abc" map (_ + 1) ^ -logImplicits.scala:16: inferred view from String("abc") to Int via C.this.convert: (p: String)Int +logImplicits.scala:17: inferred view from String("abc") to Int via C.this.convert: (p: "abc"): Int math.max(122, x: Int) ^ -logImplicits.scala:20: applied implicit conversion from Int(1) to ?{def -> : ?} = implicit def ArrowAssoc[A](self: A): ArrowAssoc[A] +logImplicits.scala:21: applied implicit conversion from Int(1) to ?{def -> : ?} = final implicit def ArrowAssoc[A](self: A): ArrowAssoc[A] def f = (1 -> 2) + "c" ^ -logImplicits.scala:20: applied implicit conversion from (Int, Int) to ?{def + : ?} = implicit def any2stringadd[A](self: A): any2stringadd[A] +logImplicits.scala:21: applied implicit conversion from (Int, Int) to ?{def + : ?} = final implicit def any2stringadd[A](self: A): any2stringadd[A] def f = (1 -> 2) + "c" ^ -logImplicits.scala:23: error: class Un needs to be abstract, since method unimplemented is not defined +logImplicits.scala:24: error: class Un needs to be abstract. +Missing implementation: + def unimplemented: Int = ??? + class Un { ^ -one error found +1 error diff --git a/test/files/neg/logImplicits.scala b/test/files/neg/logImplicits.scala index d548128affc6..72df36b90a17 100644 --- a/test/files/neg/logImplicits.scala +++ b/test/files/neg/logImplicits.scala @@ -1,4 +1,5 @@ -// scalac: -Xlog-implicit-conversions +//> using options -Xlog-implicit-conversions +// class A { def f(xs: Array[Byte]) = xs.size def g(xs: Array[Byte]) = xs.length diff --git a/test/files/neg/lub-from-hell-2.check b/test/files/neg/lub-from-hell-2.check deleted file mode 100644 index 3ef935f93bfd..000000000000 --- a/test/files/neg/lub-from-hell-2.check +++ /dev/null @@ -1,7 +0,0 @@ -lub-from-hell-2.scala:3: error: type arguments [Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any]{def seq: Iterable[Any] with Int => Any}]{def seq: Iterable[Any] with Int => Any{def seq: Iterable[Any] with Int => Any}}] do not conform to trait Subtractable's type parameter bounds [A,+Repr <: scala.collection.generic.Subtractable[A,Repr]] - def foo(a: Boolean, b: collection.mutable.Set[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c - ^ -lub-from-hell-2.scala:4: error: type arguments [Any,scala.collection.mutable.Iterable[Any] with scala.collection.mutable.Cloneable[scala.collection.mutable.Iterable[Any] with scala.collection.mutable.Cloneable[scala.collection.mutable.Iterable[Any] with Cloneable with Int => Any] with Int => Any{def seq: scala.collection.mutable.Iterable[Any] with Cloneable with Int => Any}] with scala.collection.generic.Growable[Any] with Int => Any with scala.collection.generic.Shrinkable[Any] with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any]{def seq: Iterable[Any] with Int => Any}] with scala.collection.script.Scriptable[Any]] do not conform to trait Subtractable's type parameter bounds [A,+Repr <: scala.collection.generic.Subtractable[A,Repr]] - def bar(a: Boolean, b: scala.collection.mutable.SetLike[Any,scala.collection.mutable.Set[Any]], c: scala.collection.mutable.Buffer[Any]) = if (a) b else c - ^ -two errors found diff --git a/test/files/neg/lub-from-hell-2.scala b/test/files/neg/lub-from-hell-2.scala deleted file mode 100644 index 329f3ba99f8d..000000000000 --- a/test/files/neg/lub-from-hell-2.scala +++ /dev/null @@ -1,13 +0,0 @@ -class Test { - trait Tree - def foo(a: Boolean, b: collection.mutable.Set[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c - def bar(a: Boolean, b: scala.collection.mutable.SetLike[Any,scala.collection.mutable.Set[Any]], c: scala.collection.mutable.Buffer[Any]) = if (a) b else c - // bar produces an ill-bounded LUB in 2.11.8. After this commit, which fixes a bug in existential+refinement lubs, foo also fails. -} -// This test case minimizes a case that stated to fail compile after my fixes in scala/bug#5294. -// `foo` used to compile for the wrong reason, `mergePrefixAndArgs` failed to transpose a -// ragged matrix and skipped to the next level of the base type sequences to find a common type symbol. -// -// My changes fixed the root cause of the ragged matrix, which uncovered the latent bug. -// For comparison, `bar` failed to compile before _and_ after my changes for the same reason: -// f-bounded types involved in LUBs can sometimes produce an ill-bounded LUB. diff --git a/test/files/neg/lubs.check b/test/files/neg/lubs.check index affbd4983c40..9a512f7a5fa2 100644 --- a/test/files/neg/lubs.check +++ b/test/files/neg/lubs.check @@ -18,4 +18,4 @@ lubs.scala:25: error: type mismatch; required: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A}}} val x4: A { type T >: Null <: A { type T >: Null <: A { type T >: Null <: A } } } = f ^ -four errors found +4 errors diff --git a/test/files/neg/macro-abort.check b/test/files/neg/macro-abort.check index 1e58add53319..8eabb27e6910 100644 --- a/test/files/neg/macro-abort.check +++ b/test/files/neg/macro-abort.check @@ -1,4 +1,4 @@ Test_2.scala:2: error: aborted Macros.abort ^ -one error found +1 error diff --git a/test/files/neg/macro-abort/Macros_1.scala b/test/files/neg/macro-abort/Macros_1.scala index 2077e99ad7ec..12f1191cd43f 100644 --- a/test/files/neg/macro-abort/Macros_1.scala +++ b/test/files/neg/macro-abort/Macros_1.scala @@ -5,5 +5,5 @@ object Macros { def impl(c: Context) = { c.abort(c.enclosingPosition, "aborted") } - def abort = macro impl -} \ No newline at end of file + def abort: Any = macro impl +} diff --git a/test/files/neg/macro-abort/Test_2.scala b/test/files/neg/macro-abort/Test_2.scala index 1d0a7a25dc66..9eee0cccaa69 100644 --- a/test/files/neg/macro-abort/Test_2.scala +++ b/test/files/neg/macro-abort/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { Macros.abort -} \ No newline at end of file +} diff --git a/test/files/neg/macro-annot-not-expanded.check b/test/files/neg/macro-annot-not-expanded.check new file mode 100644 index 000000000000..c2036a154bcd --- /dev/null +++ b/test/files/neg/macro-annot-not-expanded.check @@ -0,0 +1,4 @@ +Test_2.scala:2: error: macro annotation could not be expanded (since these are experimental, you must enable them with -Ymacro-annotations) +class Test + ^ +1 error diff --git a/test/files/neg/macro-annot-not-expanded/Macros_1.scala b/test/files/neg/macro-annot-not-expanded/Macros_1.scala new file mode 100644 index 000000000000..08c24880d5ad --- /dev/null +++ b/test/files/neg/macro-annot-not-expanded/Macros_1.scala @@ -0,0 +1,12 @@ +//> using options -Ymacro-annotations +import scala.language.experimental.macros +import scala.reflect.macros.blackbox +import scala.annotation.StaticAnnotation + +object Macros { + def annotImpl(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = ??? +} + +class annot extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro Macros.annotImpl +} diff --git a/test/files/neg/macro-annot-not-expanded/Test_2.scala b/test/files/neg/macro-annot-not-expanded/Test_2.scala new file mode 100644 index 000000000000..171125047a84 --- /dev/null +++ b/test/files/neg/macro-annot-not-expanded/Test_2.scala @@ -0,0 +1,2 @@ +@annot +class Test diff --git a/test/files/neg/macro-annot-unused-param.check b/test/files/neg/macro-annot-unused-param.check new file mode 100644 index 000000000000..046e938dfe13 --- /dev/null +++ b/test/files/neg/macro-annot-unused-param.check @@ -0,0 +1,6 @@ +Test_2.scala:2: warning: parameter x in anonymous function is never used +@mymacro + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/macro-annot-unused-param/Macros_1.scala b/test/files/neg/macro-annot-unused-param/Macros_1.scala new file mode 100644 index 000000000000..3ab8d0bc820c --- /dev/null +++ b/test/files/neg/macro-annot-unused-param/Macros_1.scala @@ -0,0 +1,22 @@ +//> using options -Ymacro-annotations +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context +import scala.annotation.StaticAnnotation + +object Macros { + def annotImpl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + import c.universe._ + val classTree = annottees.head.tree + val objectTree = q""" + object X { + def f: Int => String = { x => "hello" } + } + """ + + c.Expr[Any](Block(List(classTree, objectTree), Literal(Constant(())))) + } +} + +class mymacro extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro Macros.annotImpl +} diff --git a/test/files/neg/macro-annot-unused-param/Test_2.scala b/test/files/neg/macro-annot-unused-param/Test_2.scala new file mode 100644 index 000000000000..f43e1122a8a0 --- /dev/null +++ b/test/files/neg/macro-annot-unused-param/Test_2.scala @@ -0,0 +1,7 @@ +//> using options -Ymacro-annotations -Wunused:params -Wmacros:after -Werror +@mymacro +class X + +object Test { + println(X.f(123)) +} diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check index d7d722586114..167948ab8970 100644 --- a/test/files/neg/macro-basic-mamdmi.check +++ b/test/files/neg/macro-basic-mamdmi.check @@ -10,4 +10,4 @@ Impls_Macros_Test_1.scala:34: error: macro implementation not found: quux (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) println(foo(2) + Macros.bar(2) * new Macros().quux(4)) ^ -three errors found +3 errors diff --git a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala index 982c12f11efd..49af96332176 100644 --- a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala +++ b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-blackbox-dynamic-materialization.check b/test/files/neg/macro-blackbox-dynamic-materialization.check index f6c73f7edb69..5cf6183f8bb0 100644 --- a/test/files/neg/macro-blackbox-dynamic-materialization.check +++ b/test/files/neg/macro-blackbox-dynamic-materialization.check @@ -1,4 +1,4 @@ Test_2.scala:2: error: I don't like classes that contain integers println(implicitly[Foo[C1]]) ^ -one error found +1 error diff --git a/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala b/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala index fc2907b6dcc4..16a379ea5e50 100644 --- a/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala +++ b/test/files/neg/macro-blackbox-dynamic-materialization/Macros_1.scala @@ -22,4 +22,4 @@ object Macros { c.abort(c.enclosingPosition, "I don't like classes that contain integers") q"new Foo[$tpe]{ override def toString = ${tpe.toString} }" } -} \ No newline at end of file +} diff --git a/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala b/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala index bf19209ab7e8..abb5229bd0d9 100644 --- a/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala +++ b/test/files/neg/macro-blackbox-dynamic-materialization/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { println(implicitly[Foo[C1]]) println(implicitly[Foo[C2]]) -} \ No newline at end of file +} diff --git a/test/files/neg/macro-blackbox-extractor.check b/test/files/neg/macro-blackbox-extractor.check index 4c53ff19b84d..bc265ddb1500 100644 --- a/test/files/neg/macro-blackbox-extractor.check +++ b/test/files/neg/macro-blackbox-extractor.check @@ -1,4 +1,4 @@ Test_2.scala:3: error: extractor macros can only be whitebox case Extractor(x) => println(x) ^ -one error found +1 error diff --git a/test/files/neg/macro-blackbox-fundep-materialization.check b/test/files/neg/macro-blackbox-fundep-materialization.check index b9185dc8315a..d8e761aafb83 100644 --- a/test/files/neg/macro-blackbox-fundep-materialization.check +++ b/test/files/neg/macro-blackbox-fundep-materialization.check @@ -1,8 +1,8 @@ -Test_2.scala:8: error: type mismatch; +Test_2.scala:7: error: type mismatch; found : Iso[Test.Foo,(Int, String, Boolean)] required: Iso[Test.Foo,Nothing] Note: (Int, String, Boolean) >: Nothing, but trait Iso is invariant in type U. You may wish to define U as -U instead. (SLS 4.5) val equiv = foo(Foo(23, "foo", true)) ^ -one error found +1 error diff --git a/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala b/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala index f74898dcac93..0603ffeaa5d1 100644 --- a/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala +++ b/test/files/neg/macro-blackbox-fundep-materialization/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xlog-implicits import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context @@ -25,7 +24,7 @@ object Iso { } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala index ccf51ca5e9ae..8b60943cfd8d 100644 --- a/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala +++ b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -Xlog-implicits // see the comments for macroExpand.onDelayed for an explanation of what's tested here object Test extends App { case class Foo(i: Int, s: String, b: Boolean) @@ -6,7 +5,7 @@ object Test extends App { { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } diff --git a/test/files/neg/macro-blackbox-structural.check b/test/files/neg/macro-blackbox-structural.check index 86a218559c64..b2dd28c4f910 100644 --- a/test/files/neg/macro-blackbox-structural.check +++ b/test/files/neg/macro-blackbox-structural.check @@ -1,4 +1,4 @@ Test_2.scala:4: error: value x is not a member of Any println(Macros.foo.x) ^ -one error found +1 error diff --git a/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala b/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala index a86a26d2c08c..786a3f45f2d3 100644 --- a/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala +++ b/test/files/neg/macro-blackbox-structural/Impls_Macros_1.scala @@ -12,4 +12,4 @@ object Macros { } def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-blackbox-structural/Test_2.scala b/test/files/neg/macro-blackbox-structural/Test_2.scala index ea6a817e346e..e02b4feb427a 100644 --- a/test/files/neg/macro-blackbox-structural/Test_2.scala +++ b/test/files/neg/macro-blackbox-structural/Test_2.scala @@ -2,4 +2,4 @@ import Macros._ object Test extends App { println(Macros.foo.x) -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-abstract.check b/test/files/neg/macro-bundle-abstract.check index 1e51a00d0524..b2941d20494d 100644 --- a/test/files/neg/macro-bundle-abstract.check +++ b/test/files/neg/macro-bundle-abstract.check @@ -1,4 +1,4 @@ macro-bundle-abstract.scala:10: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter def foo = macro Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-abstract.scala b/test/files/neg/macro-bundle-abstract.scala index 0afeaafc016a..db57d5dc9d7c 100644 --- a/test/files/neg/macro-bundle-abstract.scala +++ b/test/files/neg/macro-bundle-abstract.scala @@ -8,4 +8,4 @@ abstract class Bundle(c: Context) { object Macros { def foo = macro Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-ambiguous.check b/test/files/neg/macro-bundle-ambiguous.check index 84304964557e..c1c6f5ffca4e 100644 --- a/test/files/neg/macro-bundle-ambiguous.check +++ b/test/files/neg/macro-bundle-ambiguous.check @@ -2,4 +2,4 @@ macro-bundle-ambiguous.scala:13: error: macro implementation reference is ambigu a macro bundle method reference and a vanilla object method reference def foo: Unit = macro Macros.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-ambiguous.scala b/test/files/neg/macro-bundle-ambiguous.scala index 92c359d9a944..72c6a2c30847 100644 --- a/test/files/neg/macro-bundle-ambiguous.scala +++ b/test/files/neg/macro-bundle-ambiguous.scala @@ -11,4 +11,4 @@ object Macros { object Test extends App { def foo: Unit = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-need-qualifier.check b/test/files/neg/macro-bundle-need-qualifier.check index 6a74ee6aedb6..4c3bbaa04baa 100644 --- a/test/files/neg/macro-bundle-need-qualifier.check +++ b/test/files/neg/macro-bundle-need-qualifier.check @@ -1,4 +1,4 @@ macro-bundle-need-qualifier.scala:10: error: not found: value impl def foo: Any = macro impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-need-qualifier.scala b/test/files/neg/macro-bundle-need-qualifier.scala index 0d021e3537b2..1edda8081a25 100644 --- a/test/files/neg/macro-bundle-need-qualifier.scala +++ b/test/files/neg/macro-bundle-need-qualifier.scala @@ -8,4 +8,4 @@ class Macros(val c: Context) { object Macros { def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-noncontext.check b/test/files/neg/macro-bundle-noncontext.check index bb5d0851f592..a9547a85ee5d 100644 --- a/test/files/neg/macro-bundle-noncontext.check +++ b/test/files/neg/macro-bundle-noncontext.check @@ -1,4 +1,4 @@ macro-bundle-noncontext.scala:8: error: not found: value Bundle def foo = Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-noncontext.scala b/test/files/neg/macro-bundle-noncontext.scala index c228827e7015..b3d7f4464e91 100644 --- a/test/files/neg/macro-bundle-noncontext.scala +++ b/test/files/neg/macro-bundle-noncontext.scala @@ -6,4 +6,4 @@ class Bundle { object Macros { def foo = Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-nonpublic-c.check b/test/files/neg/macro-bundle-nonpublic-c.check index 1dfcee58b743..5472f6bb436e 100644 --- a/test/files/neg/macro-bundle-nonpublic-c.check +++ b/test/files/neg/macro-bundle-nonpublic-c.check @@ -1,4 +1,4 @@ macro-bundle-nonpublic-c.scala:6: error: private value c escapes its defining scope as part of type Macros.this.c.universe.Literal def impl = q"()" ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-nonpublic-c.scala b/test/files/neg/macro-bundle-nonpublic-c.scala index 86a2039743f3..a240787515df 100644 --- a/test/files/neg/macro-bundle-nonpublic-c.scala +++ b/test/files/neg/macro-bundle-nonpublic-c.scala @@ -8,4 +8,4 @@ class Macros(c: Context) { object Macros { def foo: Any = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-nonpublic-impl.check b/test/files/neg/macro-bundle-nonpublic-impl.check index 7a4e1516f763..00a18367e44b 100644 --- a/test/files/neg/macro-bundle-nonpublic-impl.check +++ b/test/files/neg/macro-bundle-nonpublic-impl.check @@ -1,4 +1,4 @@ macro-bundle-nonpublic-impl.scala:10: error: bundle implementation must be public def foo: Any = macro Macros.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-nonpublic-impl.scala b/test/files/neg/macro-bundle-nonpublic-impl.scala index 5857cc6b2497..1ed17277cc34 100644 --- a/test/files/neg/macro-bundle-nonpublic-impl.scala +++ b/test/files/neg/macro-bundle-nonpublic-impl.scala @@ -8,4 +8,4 @@ class Macros(val c: Context) { object Macros { def foo: Any = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-nonstatic.check b/test/files/neg/macro-bundle-nonstatic.check index 36bccc54db6d..77e5e1f0dd26 100644 --- a/test/files/neg/macro-bundle-nonstatic.check +++ b/test/files/neg/macro-bundle-nonstatic.check @@ -10,4 +10,4 @@ macro-bundle-nonstatic.scala:17: error: macro bundles must be static macro-bundle-nonstatic.scala:23: error: macro bundles must be static def foo = macro Bundle.impl ^ -four errors found +4 errors diff --git a/test/files/neg/macro-bundle-object.check b/test/files/neg/macro-bundle-object.check index b8800105f56d..2298e54c8825 100644 --- a/test/files/neg/macro-bundle-object.check +++ b/test/files/neg/macro-bundle-object.check @@ -1,8 +1,8 @@ macro-bundle-object.scala:10: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : : Nothing number of parameter sections differ - def foo = macro Bundle.impl - ^ -one error found + def foo: Any = macro Bundle.impl + ^ +1 error diff --git a/test/files/neg/macro-bundle-object.scala b/test/files/neg/macro-bundle-object.scala index 6e1eec16860d..35162ea00049 100644 --- a/test/files/neg/macro-bundle-object.scala +++ b/test/files/neg/macro-bundle-object.scala @@ -7,5 +7,5 @@ object Bundle { } object Macros { - def foo = macro Bundle.impl -} \ No newline at end of file + def foo: Any = macro Bundle.impl +} diff --git a/test/files/neg/macro-bundle-overloaded.check b/test/files/neg/macro-bundle-overloaded.check index 499068aaa812..6e9aee8ba475 100644 --- a/test/files/neg/macro-bundle-overloaded.check +++ b/test/files/neg/macro-bundle-overloaded.check @@ -1,4 +1,4 @@ macro-bundle-overloaded.scala:11: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter def foo = macro Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-overloaded.scala b/test/files/neg/macro-bundle-overloaded.scala index a4bc66f97451..c5120b5b5e29 100644 --- a/test/files/neg/macro-bundle-overloaded.scala +++ b/test/files/neg/macro-bundle-overloaded.scala @@ -9,4 +9,4 @@ class Bundle(val c: BlackboxContext) { object Macros { def foo = macro Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-polymorphic.check b/test/files/neg/macro-bundle-polymorphic.check index 60a4d59119b4..a04bb5aa04d6 100644 --- a/test/files/neg/macro-bundle-polymorphic.check +++ b/test/files/neg/macro-bundle-polymorphic.check @@ -16,4 +16,4 @@ macro-bundle-polymorphic.scala:41: error: macro bundles must be concrete monomor macro-bundle-polymorphic.scala:42: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter def white3: Any = macro WhiteboxBundle3.impl ^ -6 errors found +6 errors diff --git a/test/files/neg/macro-bundle-polymorphic.scala b/test/files/neg/macro-bundle-polymorphic.scala index 2ba91aa0c5d0..3e7ffe89c7bc 100644 --- a/test/files/neg/macro-bundle-polymorphic.scala +++ b/test/files/neg/macro-bundle-polymorphic.scala @@ -40,4 +40,4 @@ object Macros { def white1: Any = macro WhiteboxBundle1.impl def white2: Any = macro WhiteboxBundle2.impl def white3: Any = macro WhiteboxBundle3.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-priority-bundle.check b/test/files/neg/macro-bundle-priority-bundle.check index c6cea72ba6e6..149b89c6f50b 100644 --- a/test/files/neg/macro-bundle-priority-bundle.check +++ b/test/files/neg/macro-bundle-priority-bundle.check @@ -5,4 +5,4 @@ macro-bundle-priority-bundle.scala:13: error: bundle implementation has incompat number of parameter sections differ def foo: Unit = macro Macros.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-priority-bundle.scala b/test/files/neg/macro-bundle-priority-bundle.scala index ce831a71217f..ab9987b8f7f4 100644 --- a/test/files/neg/macro-bundle-priority-bundle.scala +++ b/test/files/neg/macro-bundle-priority-bundle.scala @@ -11,4 +11,4 @@ object Macros { object Test extends App { def foo: Unit = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-priority-nonbundle.check b/test/files/neg/macro-bundle-priority-nonbundle.check index 0d03b5074b18..798df1f8d3ba 100644 --- a/test/files/neg/macro-bundle-priority-nonbundle.check +++ b/test/files/neg/macro-bundle-priority-nonbundle.check @@ -5,4 +5,4 @@ macro-bundle-priority-nonbundle.scala:13: error: macro implementation has incomp number of parameter sections differ def foo: Unit = macro Macros.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-priority-nonbundle.scala b/test/files/neg/macro-bundle-priority-nonbundle.scala index 8dc00f6dd3c4..0ef5b789ade0 100644 --- a/test/files/neg/macro-bundle-priority-nonbundle.scala +++ b/test/files/neg/macro-bundle-priority-nonbundle.scala @@ -11,4 +11,4 @@ object Macros { object Test extends App { def foo: Unit = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-trait.check b/test/files/neg/macro-bundle-trait.check index 869c67e1e308..2c2a771b00c8 100644 --- a/test/files/neg/macro-bundle-trait.check +++ b/test/files/neg/macro-bundle-trait.check @@ -1,4 +1,4 @@ macro-bundle-trait.scala:10: error: not found: value Bundle def foo = macro Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-trait.scala b/test/files/neg/macro-bundle-trait.scala index 2aa63216f59e..2015508b8049 100644 --- a/test/files/neg/macro-bundle-trait.scala +++ b/test/files/neg/macro-bundle-trait.scala @@ -8,4 +8,4 @@ trait Bundle { object Macros { def foo = macro Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-whitebox-use-raw.check b/test/files/neg/macro-bundle-whitebox-use-raw.check index 5792e317a63a..379e2438ae66 100644 --- a/test/files/neg/macro-bundle-whitebox-use-raw.check +++ b/test/files/neg/macro-bundle-whitebox-use-raw.check @@ -14,4 +14,4 @@ Test_2.scala:13: error: I don't like classes that contain integers Test_2.scala:17: error: extractor macros can only be whitebox case ExtractorMacro(x) => println(x) ^ -four errors found +4 errors diff --git a/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala b/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala index 61bf73e481e7..7242752a3331 100644 --- a/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala +++ b/test/files/neg/macro-bundle-whitebox-use-raw/Macros_1.scala @@ -43,7 +43,7 @@ class FundepMaterializationBundle(val c: Context) { } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala b/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala index 3a8170025184..995da737cc95 100644 --- a/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala +++ b/test/files/neg/macro-bundle-whitebox-use-raw/Test_2.scala @@ -5,7 +5,7 @@ object Test extends App { def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c) locally { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } diff --git a/test/files/neg/macro-bundle-whitebox-use-refined.check b/test/files/neg/macro-bundle-whitebox-use-refined.check index 5792e317a63a..379e2438ae66 100644 --- a/test/files/neg/macro-bundle-whitebox-use-refined.check +++ b/test/files/neg/macro-bundle-whitebox-use-refined.check @@ -14,4 +14,4 @@ Test_2.scala:13: error: I don't like classes that contain integers Test_2.scala:17: error: extractor macros can only be whitebox case ExtractorMacro(x) => println(x) ^ -four errors found +4 errors diff --git a/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala b/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala index 186604422135..69349badc89b 100644 --- a/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala +++ b/test/files/neg/macro-bundle-whitebox-use-refined/Macros_1.scala @@ -43,7 +43,7 @@ class FundepMaterializationBundle(val c: Context { type PrefixType = Nothing }) } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala b/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala index 3a8170025184..995da737cc95 100644 --- a/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala +++ b/test/files/neg/macro-bundle-whitebox-use-refined/Test_2.scala @@ -5,7 +5,7 @@ object Test extends App { def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c) locally { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } diff --git a/test/files/neg/macro-bundle-wrongcontext-a.check b/test/files/neg/macro-bundle-wrongcontext-a.check index 10aadb00356e..59d694df7698 100644 --- a/test/files/neg/macro-bundle-wrongcontext-a.check +++ b/test/files/neg/macro-bundle-wrongcontext-a.check @@ -1,4 +1,4 @@ macro-bundle-wrongcontext-a.scala:12: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter def foo: Any = macro Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-wrongcontext-a.scala b/test/files/neg/macro-bundle-wrongcontext-a.scala index ed566fd977dd..0cfd433159a7 100644 --- a/test/files/neg/macro-bundle-wrongcontext-a.scala +++ b/test/files/neg/macro-bundle-wrongcontext-a.scala @@ -10,4 +10,4 @@ class Bundle(val c: MyContext) { object Macros { def foo: Any = macro Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-bundle-wrongcontext-b.check b/test/files/neg/macro-bundle-wrongcontext-b.check index e9700d379ed9..bbc1f83ae90b 100644 --- a/test/files/neg/macro-bundle-wrongcontext-b.check +++ b/test/files/neg/macro-bundle-wrongcontext-b.check @@ -1,4 +1,4 @@ macro-bundle-wrongcontext-b.scala:10: error: macro bundles must be concrete monomorphic classes having a single constructor with a `val c: Context` parameter def foo: Any = macro Bundle.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-bundle-wrongcontext-b.scala b/test/files/neg/macro-bundle-wrongcontext-b.scala index 0b4ff7e17cfc..5f47e46e436b 100644 --- a/test/files/neg/macro-bundle-wrongcontext-b.scala +++ b/test/files/neg/macro-bundle-wrongcontext-b.scala @@ -8,4 +8,4 @@ class Bundle(val c: Context { type Foo <: Int }) { object Macros { def foo: Any = macro Bundle.impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-cyclic.check b/test/files/neg/macro-cyclic.check index 7bc116a2988c..79dadefb66c0 100644 --- a/test/files/neg/macro-cyclic.check +++ b/test/files/neg/macro-cyclic.check @@ -1,4 +1,4 @@ Impls_Macros_1.scala:6: error: could not find implicit value for parameter e: SourceLocation c.universe.reify { implicitly[SourceLocation] } ^ -one error found +1 error diff --git a/test/files/neg/macro-cyclic/Impls_Macros_1.scala b/test/files/neg/macro-cyclic/Impls_Macros_1.scala index 0710b42e6c10..cc028d8ff911 100644 --- a/test/files/neg/macro-cyclic/Impls_Macros_1.scala +++ b/test/files/neg/macro-cyclic/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check index 3bad77913ee0..e800f2ffba83 100644 --- a/test/files/neg/macro-deprecate-idents.check +++ b/test/files/neg/macro-deprecate-idents.check @@ -1,67 +1,71 @@ -macro-deprecate-idents.scala:3: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:4: error: macro is now a reserved word; usage as an identifier is disallowed val macro = ??? ^ -macro-deprecate-idents.scala:7: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:8: error: macro is now a reserved word; usage as an identifier is disallowed var macro = ??? ^ -macro-deprecate-idents.scala:11: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:12: error: macro is now a reserved word; usage as an identifier is disallowed type macro = Int ^ -macro-deprecate-idents.scala:15: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:16: error: macro is now a reserved word; usage as an identifier is disallowed class macro ^ -macro-deprecate-idents.scala:19: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:20: error: macro is now a reserved word; usage as an identifier is disallowed class macro ^ -macro-deprecate-idents.scala:23: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:24: error: macro is now a reserved word; usage as an identifier is disallowed object macro ^ -macro-deprecate-idents.scala:27: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:28: error: macro is now a reserved word; usage as an identifier is disallowed object macro ^ -macro-deprecate-idents.scala:31: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:32: error: macro is now a reserved word; usage as an identifier is disallowed trait macro ^ -macro-deprecate-idents.scala:35: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:36: error: macro is now a reserved word; usage as an identifier is disallowed trait macro ^ -macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:39: error: macro is now a reserved word; usage as an identifier is disallowed package macro { ^ -macro-deprecate-idents.scala:39: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:40: error: macro is now a reserved word; usage as an identifier is disallowed package macro.bar { ^ -macro-deprecate-idents.scala:44: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:45: error: macro is now a reserved word; usage as an identifier is disallowed package macro.foo { ^ -macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is disallowed val Some(macro) = Some(42) ^ -macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:51: error: macro is now a reserved word; usage as an identifier is disallowed macro match { ^ -macro-deprecate-idents.scala:51: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:52: error: macro is now a reserved word; usage as an identifier is disallowed case macro => println(macro) ^ -macro-deprecate-idents.scala:51: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:52: error: macro is now a reserved word; usage as an identifier is disallowed case macro => println(macro) ^ -macro-deprecate-idents.scala:56: error: macro is now a reserved word; usage as an identifier is disallowed +macro-deprecate-idents.scala:57: error: macro is now a reserved word; usage as an identifier is disallowed def macro = 2 ^ -macro-deprecate-idents.scala:4: error: '=' expected but '}' found. +macro-deprecate-idents.scala:5: error: '=' expected but '}' found. } ^ -macro-deprecate-idents.scala:8: error: '=' expected but '}' found. +macro-deprecate-idents.scala:9: error: '=' expected but '}' found. } ^ -macro-deprecate-idents.scala:43: error: '{' expected. +macro-deprecate-idents.scala:44: error: '{' expected. package foo { ^ -macro-deprecate-idents.scala:46: error: '{' expected but '}' found. +macro-deprecate-idents.scala:47: error: '{' expected but '}' found. } ^ -macro-deprecate-idents.scala:53: error: ')' expected but '}' found. +macro-deprecate-idents.scala:54: error: ')' expected but '}' found. } ^ -22 errors found +macro-deprecate-idents.scala:57: warning: procedure syntax is deprecated: instead, add `: Unit` to explicitly declare ``'s return type [quickfixable] + def macro = 2 + ^ +1 warning +22 errors diff --git a/test/files/neg/macro-deprecate-idents.scala b/test/files/neg/macro-deprecate-idents.scala index ef08878e3e34..be21c06893db 100644 --- a/test/files/neg/macro-deprecate-idents.scala +++ b/test/files/neg/macro-deprecate-idents.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// object Test1 { val macro = ??? } diff --git a/test/files/neg/macro-divergence-controlled.check b/test/files/neg/macro-divergence-controlled.check index 4876f7cf9653..030a8c40ffc3 100644 --- a/test/files/neg/macro-divergence-controlled.check +++ b/test/files/neg/macro-divergence-controlled.check @@ -1,4 +1,4 @@ Test_2.scala:2: error: could not find implicit value for parameter e: Complex[Foo] println(implicitly[Complex[Foo]]) ^ -one error found +1 error diff --git a/test/files/neg/macro-divergence-controlled/Test_2.scala b/test/files/neg/macro-divergence-controlled/Test_2.scala index dcc4593335cb..e06175f306bc 100644 --- a/test/files/neg/macro-divergence-controlled/Test_2.scala +++ b/test/files/neg/macro-divergence-controlled/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { println(implicitly[Complex[Foo]]) -} \ No newline at end of file +} diff --git a/test/files/neg/macro-exception.check b/test/files/neg/macro-exception.check index dca97aebce70..87e3d5b53cce 100644 --- a/test/files/neg/macro-exception.check +++ b/test/files/neg/macro-exception.check @@ -4,4 +4,4 @@ java.lang.Exception Macros.exception ^ -one error found +1 error diff --git a/test/files/neg/macro-exception/Macros_1.scala b/test/files/neg/macro-exception/Macros_1.scala index 3d6109dc9db5..2b958a49bed8 100644 --- a/test/files/neg/macro-exception/Macros_1.scala +++ b/test/files/neg/macro-exception/Macros_1.scala @@ -5,5 +5,5 @@ object Macros { def impl(c: Context) = { throw new Exception() } - def exception = macro impl -} \ No newline at end of file + def exception: Any = macro impl +} diff --git a/test/files/neg/macro-exception/Test_2.scala b/test/files/neg/macro-exception/Test_2.scala index d82b21f2b2fc..223ee32c9635 100644 --- a/test/files/neg/macro-exception/Test_2.scala +++ b/test/files/neg/macro-exception/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { Macros.exception -} \ No newline at end of file +} diff --git a/test/files/neg/macro-false-deprecation-warning.check b/test/files/neg/macro-false-deprecation-warning.check index fe3d98982197..b55a85e73ca1 100644 --- a/test/files/neg/macro-false-deprecation-warning.check +++ b/test/files/neg/macro-false-deprecation-warning.check @@ -1,4 +1,4 @@ -Impls_Macros_1.scala:6: error: illegal start of simple expression +Impls_Macros_1.scala:9: error: illegal start of simple expression } ^ -one error found +1 error diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala index 7639cbfa95ed..e43173a04a6b 100644 --- a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala +++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala @@ -1,6 +1,9 @@ -// scalac: -language:experimental.macros -deprecation +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +// don't emit deprecation warnings about identifiers like `macro` or `then` +// when skimming through the source file trying to heal braces + object Helper { def unapplySeq[T](x: List[T]): Option[Seq[T]] = } diff --git a/test/files/neg/macro-incompatible-macro-engine-a.check b/test/files/neg/macro-incompatible-macro-engine-a.check index 8ae08bd16415..66569aa722a9 100644 --- a/test/files/neg/macro-incompatible-macro-engine-a.check +++ b/test/files/neg/macro-incompatible-macro-engine-a.check @@ -4,4 +4,4 @@ Test_3.scala:2: error: macro cannot be expanded, because it was compiled by an i Test_3.scala:3: error: macro cannot be expanded, because it was compiled by an incompatible macro engine Macros.foo ^ -two errors found +2 errors diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala index 5b24ff5c7b11..c3e2b9255ab0 100644 --- a/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala +++ b/test/files/neg/macro-incompatible-macro-engine-a/Macros_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xplugin:. +//> using options -Xplugin:. import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala b/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala index 44ed91d2fb3b..261681088c10 100644 --- a/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala +++ b/test/files/neg/macro-incompatible-macro-engine-a/Plugin_1.scala @@ -13,7 +13,7 @@ class Plugin(val global: Global) extends NscPlugin { addMacroPlugin(MacroPlugin) object MacroPlugin extends MacroPlugin { - def fixupBinding(tree: Tree) = new Transformer { + def fixupBinding(tree: Tree) = new AstTransformer { override def transform(tree: Tree) = { tree match { case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const) @@ -32,4 +32,4 @@ class Plugin(val global: Global) extends NscPlugin { Some(result) } } -} \ No newline at end of file +} diff --git a/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala b/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala index 7e4fae52364e..4b6836e516d7 100644 --- a/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala +++ b/test/files/neg/macro-incompatible-macro-engine-a/Test_3.scala @@ -1,4 +1,4 @@ object Test extends App { Macros.foo Macros.foo -} \ No newline at end of file +} diff --git a/test/files/neg/macro-incompatible-macro-engine-b.check b/test/files/neg/macro-incompatible-macro-engine-b.check index be2e0afcf5a4..e3ce4fb6cdf0 100644 --- a/test/files/neg/macro-incompatible-macro-engine-b.check +++ b/test/files/neg/macro-incompatible-macro-engine-b.check @@ -4,4 +4,4 @@ Test_3.scala:3: error: macro cannot be expanded, because it was compiled by an i Test_3.scala:4: error: macro cannot be expanded, because it was compiled by an incompatible macro engine (internal diagnostic: expected = v7.0 (implemented in Scala 2.11.0-M8), actual = vxxx (implemented in the incompatibleMacroEngine plugin)) Macros.foo ^ -two errors found +2 errors diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala index 5b24ff5c7b11..c3e2b9255ab0 100644 --- a/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala +++ b/test/files/neg/macro-incompatible-macro-engine-b/Macros_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xplugin:. +//> using options -Xplugin:. import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala b/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala index 8e727dae6989..261681088c10 100644 --- a/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala +++ b/test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala @@ -13,7 +13,7 @@ class Plugin(val global: Global) extends NscPlugin { addMacroPlugin(MacroPlugin) object MacroPlugin extends MacroPlugin { - def fixupBinding(tree: Tree) = new Transformer { + def fixupBinding(tree: Tree) = new AstTransformer { override def transform(tree: Tree) = { tree match { case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const) diff --git a/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala b/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala index 5c327a4620c0..e9f7953e3881 100644 --- a/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala +++ b/test/files/neg/macro-incompatible-macro-engine-b/Test_3.scala @@ -1,4 +1,4 @@ -// scalac: -Ymacro-debug-lite +//> using options -Vmacro-lite object Test extends App { Macros.foo Macros.foo diff --git a/test/files/neg/macro-invalidimpl.check b/test/files/neg/macro-invalidimpl.check index f3d35b3496fa..4843325cc13c 100644 --- a/test/files/neg/macro-invalidimpl.check +++ b/test/files/neg/macro-invalidimpl.check @@ -1,53 +1,53 @@ Macros_Test_2.scala:6: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] - def foo(x: Any) = macro impls.foo - ^ + def foo(x: Any): Any = macro impls.foo + ^ Macros_Test_2.scala:11: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] - def foo(x: Any) = macro impls.foo - ^ + def foo(x: Any): Any = macro impls.foo + ^ Macros_Test_2.scala:19: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] - def foo(x: Any) = macro Impls3.foo - ^ + def foo(x: Any): Any = macro Impls3.foo + ^ Macros_Test_2.scala:23: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] - def foo(x: Any) = macro Impls4.foo - ^ + def foo(x: Any): Any = macro Impls4.foo + ^ Macros_Test_2.scala:27: error: ambiguous reference to overloaded definition, -both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing -and method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any])Nothing +both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any]): Nothing +and method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): Nothing match expected type ? - def foo(x: Any) = macro Impls5.foo - ^ + def foo(x: Any): Any = macro Impls5.foo + ^ Macros_Test_2.scala:28: error: ambiguous reference to overloaded definition, -both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing -and method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any])Nothing +both method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any], y: c.Expr[Any]): Nothing +and method foo in object Impls5 of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): Nothing match expected type ? - def foo(x: Any, y: Any) = macro Impls5.foo - ^ + def foo(x: Any, y: Any): Any = macro Impls5.foo + ^ Macros_Test_2.scala:32: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(): c.Expr[Unit] number of parameter sections differ - def foo1 = macro Impls6.fooEmpty - ^ + def foo1: Any = macro Impls6.fooEmpty + ^ Macros_Test_2.scala:33: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(): c.Expr[Unit] + required: (c: scala.reflect.macros.blackbox.Context)(): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(): c.Tree found : (c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] number of parameter sections differ - def bar1() = macro Impls6.fooNullary - ^ + def bar1(): Any = macro Impls6.fooNullary + ^ Macros_Test_2.scala:37: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int] - def foo = macro Impls7.foo[String] - ^ + def foo: Any = macro Impls7.foo[String] + ^ Macros_Test_2.scala:54: error: macro implementation must be public - def foo = macro Impls8.impl - ^ -10 errors found + def foo: Any = macro Impls8.impl + ^ +10 errors diff --git a/test/files/neg/macro-invalidimpl/Impls_1.scala b/test/files/neg/macro-invalidimpl/Impls_1.scala index 01f658648550..862c93b5e81b 100644 --- a/test/files/neg/macro-invalidimpl/Impls_1.scala +++ b/test/files/neg/macro-invalidimpl/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context class Impls1 { diff --git a/test/files/neg/macro-invalidimpl/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala index 1da5b415b1c5..ffad4bfa66f2 100644 --- a/test/files/neg/macro-invalidimpl/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala @@ -1,14 +1,14 @@ -// scalac: -language:experimental.macros +import language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros1 { val impls = new Impls1 - def foo(x: Any) = macro impls.foo + def foo(x: Any): Any = macro impls.foo } object Macros2 { val impls = Impls2 - def foo(x: Any) = macro impls.foo + def foo(x: Any): Any = macro impls.foo } class Macros3 { @@ -16,25 +16,25 @@ class Macros3 { def foo(c: Context)(x: c.Expr[Any]) = ??? } - def foo(x: Any) = macro Impls3.foo + def foo(x: Any): Any = macro Impls3.foo } class Macros4 extends MacroHelpers { - def foo(x: Any) = macro Impls4.foo + def foo(x: Any): Any = macro Impls4.foo } object Macros5 { - def foo(x: Any) = macro Impls5.foo - def foo(x: Any, y: Any) = macro Impls5.foo + def foo(x: Any): Any = macro Impls5.foo + def foo(x: Any, y: Any): Any = macro Impls5.foo } object Macros6 { - def foo1 = macro Impls6.fooEmpty - def bar1() = macro Impls6.fooNullary + def foo1: Any = macro Impls6.fooEmpty + def bar1(): Any = macro Impls6.fooNullary } object Macros7 { - def foo = macro Impls7.foo[String] + def foo: Any = macro Impls7.foo[String] } object Test extends App { @@ -51,6 +51,6 @@ object Test extends App { package foo { object Test extends App { - def foo = macro Impls8.impl + def foo: Any = macro Impls8.impl } } diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check index 68842c44d476..11097f429909 100644 --- a/test/files/neg/macro-invalidret.check +++ b/test/files/neg/macro-invalidret.check @@ -1,31 +1,54 @@ -Macros_Test_2.scala:3: error: macro implementation has incompatible shape: +Macros_Test_2.scala:6: error: macro implementation has incompatible shape: required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (c: scala.reflect.macros.blackbox.Context): Int type mismatch for return type: Int does not conform to c.Expr[Any] def foo1 = macro Impls.foo1 ^ -Macros_Test_2.scala:4: error: macro implementation has incompatible shape: +Macros_Test_2.scala:7: error: macro implementation has incompatible shape: required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (c: scala.reflect.macros.blackbox.Context): reflect.runtime.universe.Literal type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any] def foo2 = macro Impls.foo2 ^ -Macros_Test_2.scala:7: error: macro defs must have explicitly specified return types +Macros_Test_2.scala:8: error: macro defs must have explicitly specified return types + def foo3 = macro Impls.foo3 + ^ +Macros_Test_2.scala:9: error: macro defs must have explicitly specified return types + def foo4 = macro ??? + ^ +Macros_Test_2.scala:10: error: macro defs must have explicitly specified return types def foo5 = macro Impls.foo5 ^ -Macros_Test_2.scala:15: error: exception during macro expansion: +Macros_Test_2.scala:11: error: macro defs must have explicitly specified return types + def foo6 = macro Impls.foo6 + ^ +Macros_Test_2.scala:14: error: macro implementation has incompatible shape: + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Int] + or : (c: scala.reflect.macros.blackbox.Context): c.Tree + found : (c: scala.reflect.macros.blackbox.Context): Int +type mismatch for return type: Int does not conform to c.Expr[Int] + def bar1: Int = macro Impls.foo1 + ^ +Macros_Test_2.scala:15: error: macro implementation has incompatible shape: + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Int] + or : (c: scala.reflect.macros.blackbox.Context): c.Tree + found : (c: scala.reflect.macros.blackbox.Context): reflect.runtime.universe.Literal +type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Int] + def bar2: Int = macro Impls.foo2 + ^ +Macros_Test_2.scala:33: error: exception during macro expansion: +#partest !java15+ java.lang.NullPointerException - at Impls$.foo3(Impls_1.scala:8) +#partest java15+ +java.lang.NullPointerException: Cannot throw exception because "null" is null +#partest + at Impls$.foo3(Impls_1.scala:7) - foo3 + bar3 ^ -Macros_Test_2.scala:16: error: macro implementation is missing - foo4 +Macros_Test_2.scala:34: error: macro implementation is missing + bar4 ^ -Macros_Test_2.scala:8: warning: macro defs must have explicitly specified return types (inference of Int from macro impl's c.Expr[Int] is deprecated and is going to stop working in 2.12) - def foo6 = macro Impls.foo6 - ^ -one warning found -5 errors found +10 errors diff --git a/test/files/neg/macro-invalidret/Impls_1.scala b/test/files/neg/macro-invalidret/Impls_1.scala index 30bae076c5d4..a52e8d8f3992 100644 --- a/test/files/neg/macro-invalidret/Impls_1.scala +++ b/test/files/neg/macro-invalidret/Impls_1.scala @@ -1,11 +1,10 @@ -// scalac: -language:experimental.macros -Xfatal-warnings -deprecation import scala.reflect.macros.blackbox.Context import scala.reflect.runtime.{universe => ru} object Impls { def foo1(c: Context) = 2 def foo2(c: Context) = ru.Literal(ru.Constant(42)) - def foo3(c: Context) = throw new NullPointerException + def foo3(c: Context) = throw null def foo5(c: Context) = c.universe.Literal(c.universe.Constant(42)) def foo6(c: Context) = c.Expr[Int](c.universe.Literal(c.universe.Constant(42))) } diff --git a/test/files/neg/macro-invalidret/Macros_Test_2.scala b/test/files/neg/macro-invalidret/Macros_Test_2.scala index f0494ad6c75a..102d112fb58c 100644 --- a/test/files/neg/macro-invalidret/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidret/Macros_Test_2.scala @@ -1,11 +1,22 @@ -// scalac: -language:experimental.macros -Xfatal-warnings -deprecation +//> using options -Xlint:deprecation -Werror +import language.experimental.macros + object Macros { + // result type required def foo1 = macro Impls.foo1 def foo2 = macro Impls.foo2 def foo3 = macro Impls.foo3 def foo4 = macro ??? def foo5 = macro Impls.foo5 def foo6 = macro Impls.foo6 + + // various flawed attempts to implement + def bar1: Int = macro Impls.foo1 + def bar2: Int = macro Impls.foo2 + def bar3: Int = macro Impls.foo3 + def bar4: Int = macro ??? + def bar5: Int = macro Impls.foo5 + def bar6: Int = macro Impls.foo6 } object Test extends App { @@ -16,4 +27,11 @@ object Test extends App { foo4 foo5 foo6 + + bar1 + bar2 + bar3 + bar4 + bar5 + bar6 } diff --git a/test/files/neg/macro-invalidshape.check b/test/files/neg/macro-invalidshape.check index 1b701f7c1d94..8f639225d5d7 100644 --- a/test/files/neg/macro-invalidshape.check +++ b/test/files/neg/macro-invalidshape.check @@ -8,7 +8,7 @@ macro [].[[]] or macro [].[[]] def foo2(x: Any) = macro Impls.foo(null)(null) ^ -Macros_Test_2.scala:5: error: missing argument list for method foo in object Impls +Macros_Test_2.scala:5: error: missing argument list for method foo in object Impls of type (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): Nothing Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `foo _` or `foo(_)(_)` instead of `foo`. def foo3(x: Any) = macro {2; Impls.foo} @@ -18,4 +18,4 @@ macro [].[[]] or macro [].[[]] def foo = macro impl ^ -four errors found +4 errors diff --git a/test/files/neg/macro-invalidshape/Impls_1.scala b/test/files/neg/macro-invalidshape/Impls_1.scala index 0e48a82c8d59..acc6b52b7bf0 100644 --- a/test/files/neg/macro-invalidshape/Impls_1.scala +++ b/test/files/neg/macro-invalidshape/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidshape/Macros_Test_2.scala b/test/files/neg/macro-invalidshape/Macros_Test_2.scala index 0ffe90be7faf..0e978953609e 100644 --- a/test/files/neg/macro-invalidshape/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidshape/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo1(x: Any) = macro 2 def foo2(x: Any) = macro Impls.foo(null)(null) diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check index 77322aa26b40..4467725470e8 100644 --- a/test/files/neg/macro-invalidsig-params-badtype.check +++ b/test/files/neg/macro-invalidsig-params-badtype.check @@ -3,6 +3,6 @@ Impls_Macros_1.scala:9: error: macro implementation has incompatible shape: or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(x: Int): Nothing type mismatch for parameter x: c.Expr[Int] does not conform to Int - def foo(x: Int) = macro Impls.foo - ^ -one error found + def foo(x: Int): Nothing = macro Impls.foo + ^ +1 error diff --git a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala index e3b6c79675af..da5212af66d2 100644 --- a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala +++ b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -6,5 +6,5 @@ object Impls { } object Macros { - def foo(x: Int) = macro Impls.foo + def foo(x: Int): Nothing = macro Impls.foo } diff --git a/test/files/neg/macro-invalidsig.check b/test/files/neg/macro-invalidsig.check index dfb754629f76..911592f96586 100644 --- a/test/files/neg/macro-invalidsig.check +++ b/test/files/neg/macro-invalidsig.check @@ -2,84 +2,84 @@ Macros_Test_2.scala:3: error: macro implementations cannot have implicit paramet def foo[U]: Int = macro Impls1.foo[U] ^ Macros_Test_2.scala:7: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : : Nothing number of parameter sections differ - def foo = macro Impls2.foo - ^ + def foo: Any = macro Impls2.foo + ^ Macros_Test_2.scala:11: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (c: scala.reflect.api.Universe): Nothing type mismatch for parameter c: scala.reflect.macros.blackbox.Context does not conform to scala.reflect.api.Universe - def foo = macro Impls3.foo - ^ + def foo: Any = macro Impls3.foo + ^ Macros_Test_2.scala:15: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (cs: scala.reflect.macros.blackbox.Context*): Nothing types incompatible for parameter cs: corresponding is not a vararg parameter - def foo = macro Impls4.foo - ^ + def foo: Any = macro Impls4.foo + ^ Macros_Test_2.scala:19: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Any]): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context): Nothing number of parameter sections differ - def foo(x: Any) = macro Impls5.foo - ^ + def foo(x: Any): Any = macro Impls5.foo + ^ Macros_Test_2.scala:23: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences - def foo[U](x: Int) = macro Impls6.foo[T, U] - ^ + def foo[U](x: Int): Any = macro Impls6.foo[T, U] + ^ Macros_Test_2.scala:27: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing parameter lists have different length, found extra parameter y: c.Expr[Int] - def foo(x: Int) = macro Impls7.foo - ^ + def foo(x: Int): Any = macro Impls7.foo + ^ Macros_Test_2.scala:31: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int]): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(x: c.universe.Symbol): Nothing type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Symbol - def foo(x: Int) = macro Impls8.foo - ^ + def foo(x: Int): Any = macro Impls8.foo + ^ Macros_Test_2.scala:35: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree, y: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[Int]*): Nothing parameter lists have different length, required extra parameter y: c.Expr[Int] - def foo(x: Int, y: Int) = macro Impls9.foo - ^ + def foo(x: Int, y: Int): Any = macro Impls9.foo + ^ Macros_Test_2.scala:39: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context)(x: c.Tree, y: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing parameter names differ: x != y - def foo(x: Int, y: Int) = macro Impls10.foo - ^ + def foo(x: Int, y: Int): Any = macro Impls10.foo + ^ Macros_Test_2.scala:43: error: macro implementation has incompatible shape: - required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Nothing] + required: (c: scala.reflect.macros.blackbox.Context): c.Expr[Any] or : (c: scala.reflect.macros.blackbox.Context): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(U: c.universe.Type): Nothing number of parameter sections differ - def foo[U] = macro Impls11.foo[U] - ^ + def foo[U]: Any = macro Impls11.foo[U] + ^ Macros_Test_2.scala:47: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String] - def foo[U] = macro Impls12.foo[U] - ^ + def foo[U]: Any = macro Impls12.foo[U] + ^ Macros_Test_2.scala:51: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String] - def foo[U <: Int] = macro Impls13.foo[U] - ^ -Macros_Test_2.scala:55: error: macro implementation reference has too few type arguments for method foo: [U](c: scala.reflect.macros.blackbox.Context)(implicit evidence$4: c.WeakTypeTag[U])Nothing - def foo = macro Impls14.foo - ^ -Macros_Test_2.scala:60: error: macro implementation reference has too few type arguments for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$5: c.WeakTypeTag[T], implicit evidence$6: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit] + def foo[U <: Int]: Any = macro Impls13.foo[U] + ^ +Macros_Test_2.scala:55: error: macro implementation reference has too few type arguments for method foo: [U](c: scala.reflect.macros.blackbox.Context)(implicit evidence$4: c.WeakTypeTag[U]): Nothing + def foo: Any = macro Impls14.foo + ^ +Macros_Test_2.scala:60: error: macro implementation reference has too few type arguments for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$5: c.WeakTypeTag[T], evidence$6: c.WeakTypeTag[U], V: c.WeakTypeTag[V]): c.Expr[Unit] def foo15[V]: Unit = macro Impls15.foo ^ -Macros_Test_2.scala:61: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$7: c.WeakTypeTag[T], implicit evidence$8: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit] +Macros_Test_2.scala:61: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.blackbox.Context)(implicit evidence$7: c.WeakTypeTag[T], evidence$8: c.WeakTypeTag[U], V: c.WeakTypeTag[V]): c.Expr[Unit] def foo16[V]: Unit = macro Impls16.foo[V] ^ -16 errors found +16 errors diff --git a/test/files/neg/macro-invalidsig/Impls_1.scala b/test/files/neg/macro-invalidsig/Impls_1.scala index 1145676aaa9e..2816c4ddfb4c 100644 --- a/test/files/neg/macro-invalidsig/Impls_1.scala +++ b/test/files/neg/macro-invalidsig/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/macro-invalidsig/Macros_Test_2.scala b/test/files/neg/macro-invalidsig/Macros_Test_2.scala index 5272c81edf97..9ac0810011ab 100644 --- a/test/files/neg/macro-invalidsig/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidsig/Macros_Test_2.scala @@ -1,58 +1,58 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros1 { def foo[U]: Int = macro Impls1.foo[U] } object Macros2 { - def foo = macro Impls2.foo + def foo: Any = macro Impls2.foo } object Macros3 { - def foo = macro Impls3.foo + def foo: Any = macro Impls3.foo } object Macros4 { - def foo = macro Impls4.foo + def foo: Any = macro Impls4.foo } object Macros5 { - def foo(x: Any) = macro Impls5.foo + def foo(x: Any): Any = macro Impls5.foo } class Macros6[T] { - def foo[U](x: Int) = macro Impls6.foo[T, U] + def foo[U](x: Int): Any = macro Impls6.foo[T, U] } object Macros7 { - def foo(x: Int) = macro Impls7.foo + def foo(x: Int): Any = macro Impls7.foo } object Macros8 { - def foo(x: Int) = macro Impls8.foo + def foo(x: Int): Any = macro Impls8.foo } object Macros9 { - def foo(x: Int, y: Int) = macro Impls9.foo + def foo(x: Int, y: Int): Any = macro Impls9.foo } object Macros10 { - def foo(x: Int, y: Int) = macro Impls10.foo + def foo(x: Int, y: Int): Any = macro Impls10.foo } object Macros11 { - def foo[U] = macro Impls11.foo[U] + def foo[U]: Any = macro Impls11.foo[U] } object Macros12 { - def foo[U] = macro Impls12.foo[U] + def foo[U]: Any = macro Impls12.foo[U] } object Macros13 { - def foo[U <: Int] = macro Impls13.foo[U] + def foo[U <: Int]: Any = macro Impls13.foo[U] } object Macros14 { - def foo = macro Impls14.foo + def foo: Any = macro Impls14.foo } class D[T] { diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check index f68b9e172e77..7bfc8edf22c0 100644 --- a/test/files/neg/macro-invalidusage-badargs.check +++ b/test/files/neg/macro-invalidusage-badargs.check @@ -9,11 +9,11 @@ Macros_Test_2.scala:7: error: too few argument lists for macro invocation Macros_Test_2.scala:8: error: Int does not take parameters foo(4)(2) ^ -Macros_Test_2.scala:9: error: not enough arguments for macro method foo: (x: Int)Int. +Macros_Test_2.scala:9: error: not enough arguments for macro method foo: (x: Int): Int. Unspecified value parameter x. foo() ^ -Macros_Test_2.scala:10: error: too many arguments (2) for macro method foo: (x: Int)Int +Macros_Test_2.scala:10: error: too many arguments (found 2, expected 1) for macro method foo: (x: Int): Int foo(4, 2) ^ -5 errors found +5 errors diff --git a/test/files/neg/macro-invalidusage-badargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala index 216780091f04..8765cfbd5f2c 100644 --- a/test/files/neg/macro-invalidusage-badargs/Impls_1.scala +++ b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala index d419e80d97cc..80e4e7d329ba 100644 --- a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(x: Int): Int = macro Impls.foo } import Macros._ diff --git a/test/files/neg/macro-invalidusage-badbounds.check b/test/files/neg/macro-invalidusage-badbounds.check index 5def45b63db3..09e94d361603 100644 --- a/test/files/neg/macro-invalidusage-badbounds.check +++ b/test/files/neg/macro-invalidusage-badbounds.check @@ -1,4 +1,4 @@ Macros_Test_2.scala:8: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String] foo[Int] ^ -one error found +1 error diff --git a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala index 90e2157b8fd1..1769da91e1eb 100644 --- a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala +++ b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala index d3b2ade75dc0..9bef92cb044d 100644 --- a/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo[U <: String]: Unit = macro Impls.foo[U] } diff --git a/test/files/neg/macro-invalidusage-badtargs.check b/test/files/neg/macro-invalidusage-badtargs.check index 7c37cf88aaf2..f51584a39deb 100644 --- a/test/files/neg/macro-invalidusage-badtargs.check +++ b/test/files/neg/macro-invalidusage-badtargs.check @@ -1,18 +1,18 @@ -Macros_Test_2.scala:14: error: macro method foo1: (x: Int)Int does not take type parameters. +Macros_Test_2.scala:14: error: macro method foo1: (x: Int): Int does not take type parameters. foo1[String](42) ^ -Macros_Test_2.scala:15: error: wrong number of type parameters for macro method foo2: [T](x: Int)Int +Macros_Test_2.scala:15: error: wrong number of type parameters for macro method foo2: [T](x: Int): Int foo2[String, String](42) ^ -Macros_Test_2.scala:16: error: wrong number of type parameters for macro method foo3: [T, U](x: Int)Int +Macros_Test_2.scala:16: error: wrong number of type parameters for macro method foo3: [T, U](x: Int): Int foo3[String](42) ^ -Macros_Test_2.scala:17: error: String takes no type parameters, expected: one +Macros_Test_2.scala:17: error: String takes no type parameters, expected: 1 foo4[String](42) ^ Macros_Test_2.scala:18: error: kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type T). List's type parameters do not match type T's expected parameters: -type A has no type parameters, but type U has one +type A has no type parameters, but type U has 1 foo5[List](42) ^ -5 errors found +5 errors diff --git a/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala index 216780091f04..8765cfbd5f2c 100644 --- a/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala +++ b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala index e0dba1396848..5038d1a3a6c1 100644 --- a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala @@ -1,5 +1,5 @@ -// scalac: -language:experimental.macros -import scala.language.higherKinds +import scala.language.experimental.macros +//import scala.language.higherKinds object Macros { def foo1(x: Int): Int = macro Impls.foo diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax.check b/test/files/neg/macro-invalidusage-methodvaluesyntax.check index 244b5f1d3892..acfa1ef85e04 100644 --- a/test/files/neg/macro-invalidusage-methodvaluesyntax.check +++ b/test/files/neg/macro-invalidusage-methodvaluesyntax.check @@ -1,4 +1,4 @@ Macros_Test_2.scala:7: error: macros cannot be eta-expanded val firstClassFoo = Macros.foo _ ^ -one error found +1 error diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala index 472da347fabf..5cf284750bf4 100644 --- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala +++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala index 67e2d134eee1..e5893d02e2ed 100644 --- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo: Unit = macro Impls.foo } diff --git a/test/files/neg/macro-invalidusage-nontypeable.check b/test/files/neg/macro-invalidusage-nontypeable.check index a5a396dd662f..4213a3adb153 100644 --- a/test/files/neg/macro-invalidusage-nontypeable.check +++ b/test/files/neg/macro-invalidusage-nontypeable.check @@ -1,4 +1,4 @@ -Test_2.scala:3: error: not found: value IDoNotExist +Test_2.scala:2: error: not found: value IDoNotExist Macros.foo ^ -one error found +1 error diff --git a/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala index 5d0160feb40b..765fea7a64cb 100644 --- a/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala +++ b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -10,5 +10,5 @@ object Impls { } object Macros { - def foo = macro Impls.foo + def foo: Int = macro Impls.foo } diff --git a/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala index 61d157c9c572..5d19639cddff 100644 --- a/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala +++ b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { Macros.foo } diff --git a/test/files/neg/macro-invalidusage-presuper.check b/test/files/neg/macro-invalidusage-presuper.check index 467d13cfd92f..8327428ac5a9 100644 --- a/test/files/neg/macro-invalidusage-presuper.check +++ b/test/files/neg/macro-invalidusage-presuper.check @@ -1,4 +1,4 @@ -Macros_Test_2.scala:4: error: only concrete field definitions allowed in early object initialization section +Macros_Test_2.scala:3: error: only concrete field definitions allowed in early object initialization section class D extends { def x = macro impl } with AnyRef ^ -one error found +1 error diff --git a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala index fa14445bd812..7f10e91854bc 100644 --- a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala +++ b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala index 71193902ccd3..929c36528fa6 100644 --- a/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala +++ b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import Impls._ class D extends { def x = macro impl } with AnyRef diff --git a/test/files/neg/macro-noexpand.check b/test/files/neg/macro-noexpand.check index d8d81daedb33..7f1d73fdf69c 100644 --- a/test/files/neg/macro-noexpand.check +++ b/test/files/neg/macro-noexpand.check @@ -1,4 +1,4 @@ Macros_Test_2.scala:8: error: not found: value x foo(x) ^ -one error found +1 error diff --git a/test/files/neg/macro-noexpand/Impls_1.scala b/test/files/neg/macro-noexpand/Impls_1.scala index 0e48a82c8d59..acc6b52b7bf0 100644 --- a/test/files/neg/macro-noexpand/Impls_1.scala +++ b/test/files/neg/macro-noexpand/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-noexpand/Macros_Test_2.scala b/test/files/neg/macro-noexpand/Macros_Test_2.scala index 7a055ec04955..055493bd92c6 100644 --- a/test/files/neg/macro-noexpand/Macros_Test_2.scala +++ b/test/files/neg/macro-noexpand/Macros_Test_2.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { - def foo(x: Any) = macro Impls.foo + def foo(x: Any): Any = macro Impls.foo } object Test extends App { diff --git a/test/files/neg/macro-nontypeablebody.check b/test/files/neg/macro-nontypeablebody.check index 3fa4949cba4d..50a122553ec1 100644 --- a/test/files/neg/macro-nontypeablebody.check +++ b/test/files/neg/macro-nontypeablebody.check @@ -1,4 +1,5 @@ Macros_Test_2.scala:3: error: value foo2 is not a member of object Impls +did you mean foo? def foo(x: Any) = macro Impls.foo2 ^ -one error found +1 error diff --git a/test/files/neg/macro-nontypeablebody/Impls_1.scala b/test/files/neg/macro-nontypeablebody/Impls_1.scala index 0e48a82c8d59..acc6b52b7bf0 100644 --- a/test/files/neg/macro-nontypeablebody/Impls_1.scala +++ b/test/files/neg/macro-nontypeablebody/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala b/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala index fb661610e5fd..f689411ff0f8 100644 --- a/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala +++ b/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(x: Any) = macro Impls.foo2 } diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check index 5baec777208f..903b3f8f21a6 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check @@ -1,5 +1,5 @@ -Impls_Macros_1.scala:13: error: overriding method foo in trait Foo of type (x: Int)Int; - macro method foo cannot be used here - term macros cannot override abstract methods +Impls_Macros_1.scala:13: error: macro cannot override abstract method: +def foo(x: Int): Int (defined in trait Foo) def foo(x: Int): Int = macro Impls.impl ^ -one error found +1 error diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala index 09f9245148e6..2ce31f46075f 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala index f0b6eb9e6f34..8821dc946481 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { val designator: Macros.type = Macros designator.foo(42) diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check index 0db2fd81dc04..a8bd5158cf4a 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check @@ -1,11 +1,11 @@ Test_2.scala:4: error: <$anon: C with A> inherits conflicting members: - macro method t in trait C of type ()Unit and - method t in trait A of type ()Unit -(Note: this can be resolved by declaring an override in <$anon: C with A>.) + macro override def t(): Unit (defined in trait C) and + def t(): Unit (defined in trait A) + (note: this can be resolved by declaring an `override` in <$anon: C with A>.) val c2 = new C with A {} ^ -Test_2.scala:6: error: overriding macro method t in trait C of type ()Unit; - method t cannot be used here - only term macros can override term macros +Test_2.scala:6: error: macro can only be overridden by another macro: +macro override def t(): Unit (defined in trait C) val c4 = new C with A { override def t(): Unit = () } ^ -two errors found +2 errors diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala index 0aba1840fc8e..17827abf7a6c 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context import language.experimental.macros diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala index d1cac88d41b9..e42b008ad3f4 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { val c1 = new A with C {} val c2 = new C with A {} diff --git a/test/files/neg/macro-override-method-overrides-macro.check b/test/files/neg/macro-override-method-overrides-macro.check index d6fdca775ae3..ba5fd598a046 100644 --- a/test/files/neg/macro-override-method-overrides-macro.check +++ b/test/files/neg/macro-override-method-overrides-macro.check @@ -1,5 +1,5 @@ -Macros_Test_2.scala:9: error: overriding macro method foo in class B of type (x: String)Unit; - method foo cannot be used here - only term macros can override term macros +Macros_Test_2.scala:9: error: macro can only be overridden by another macro: +macro def foo(x: String): Unit (defined in class B) override def foo(x: String): Unit = println("fooDString") ^ -one error found +1 error diff --git a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala index c94f47465596..8d585be67524 100644 --- a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala +++ b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala index 8afee7d91b52..a40331c36e4b 100644 --- a/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala +++ b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class B { def foo(x: String): Unit = macro Impls.fooBString def foo(x: Int): Unit = macro Impls.fooBInt diff --git a/test/files/neg/macro-qmarkqmarkqmark.check b/test/files/neg/macro-qmarkqmarkqmark.check index bc3e25edaf1a..ec2a6b14291a 100644 --- a/test/files/neg/macro-qmarkqmarkqmark.check +++ b/test/files/neg/macro-qmarkqmarkqmark.check @@ -10,4 +10,4 @@ macro-qmarkqmarkqmark.scala:9: error: macro implementation is missing macro-qmarkqmarkqmark.scala:12: error: macro implementation is missing foo3[Int] ^ -four errors found +4 errors diff --git a/test/files/neg/macro-qmarkqmarkqmark.scala b/test/files/neg/macro-qmarkqmarkqmark.scala index c8d8550fd8e4..9534b03b15a6 100644 --- a/test/files/neg/macro-qmarkqmarkqmark.scala +++ b/test/files/neg/macro-qmarkqmarkqmark.scala @@ -1,13 +1,13 @@ import language.experimental.macros object Macros { - def foo1 = macro ??? + def foo1: Any = macro ??? foo1 - def foo2(x: Int) = macro ??? + def foo2(x: Int): Any = macro ??? foo2 foo2(1) - def foo3[T] = macro ??? + def foo3[T]: Any = macro ??? foo3[Int] -} \ No newline at end of file +} diff --git a/test/files/neg/macro-quasiquotes.check b/test/files/neg/macro-quasiquotes.check index a985aee156e4..6fa8248cf9c6 100644 --- a/test/files/neg/macro-quasiquotes.check +++ b/test/files/neg/macro-quasiquotes.check @@ -5,4 +5,4 @@ Macros_1.scala:14: error: bundle implementation has incompatible shape: type mismatch for parameter x: Impls.this.c.Expr[Int] does not conform to Impls.this.c.universe.Block def m3(x: Int): Unit = macro Impls.impl3 ^ -one error found +1 error diff --git a/test/files/neg/macro-quasiquotes/Macros_1.scala b/test/files/neg/macro-quasiquotes/Macros_1.scala index b123c475c2f3..62e7a596bb1d 100644 --- a/test/files/neg/macro-quasiquotes/Macros_1.scala +++ b/test/files/neg/macro-quasiquotes/Macros_1.scala @@ -12,4 +12,4 @@ object Macros { def m1(x: Int): Unit = macro Impls.impl1 def m2(x: Int): Unit = macro Impls.impl2 def m3(x: Int): Unit = macro Impls.impl3 -} \ No newline at end of file +} diff --git a/test/files/neg/macro-reify-splice-splice.check b/test/files/neg/macro-reify-splice-splice.check index 995af25146d8..e57a468afafc 100644 --- a/test/files/neg/macro-reify-splice-splice.check +++ b/test/files/neg/macro-reify-splice-splice.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. { c.universe.reify(c.universe.reify("hello world")) }.splice.splice ^ -one error found +1 error diff --git a/test/files/neg/macro-reify-splice-splice/Macros_1.scala b/test/files/neg/macro-reify-splice-splice/Macros_1.scala index f8f7c7fa254c..3a1afea1f907 100644 --- a/test/files/neg/macro-reify-splice-splice/Macros_1.scala +++ b/test/files/neg/macro-reify-splice-splice/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { diff --git a/test/files/neg/macro-reify-splice-splice/Test_2.scala b/test/files/neg/macro-reify-splice-splice/Test_2.scala index 41ce473dacde..cff569bd81b1 100644 --- a/test/files/neg/macro-reify-splice-splice/Test_2.scala +++ b/test/files/neg/macro-reify-splice-splice/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println(Macros.foo) } diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check index 44efaae775c2..ce218cdbc28e 100644 --- a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check @@ -4,4 +4,4 @@ Test.scala:5: error: No TypeTag available for C[T] Test.scala:6: error: No TypeTag available for List[C[T]] println(implicitly[TypeTag[List[C[T]]]]) ^ -two errors found +2 errors diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala b/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala index c7b1cedcd2c4..09652721cdc1 100644 --- a/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala +++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[TypeTag[List[C[T]]]]) } fooNoTypeTagHK[List, Int] -} \ No newline at end of file +} diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags.check b/test/files/neg/macro-reify-typetag-typeparams-notags.check index 7c67b02aa606..65a08a6d3e7c 100644 --- a/test/files/neg/macro-reify-typetag-typeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-typeparams-notags.check @@ -4,4 +4,4 @@ Test.scala:5: error: No TypeTag available for T Test.scala:6: error: No TypeTag available for List[T] println(implicitly[TypeTag[List[T]]]) ^ -two errors found +2 errors diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala b/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala index 6d849cde3f82..504f7327c48b 100644 --- a/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala +++ b/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[TypeTag[List[T]]]) } fooNoTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/neg/macro-reify-typetag-useabstypetag.check b/test/files/neg/macro-reify-typetag-useabstypetag.check index 7c67b02aa606..65a08a6d3e7c 100644 --- a/test/files/neg/macro-reify-typetag-useabstypetag.check +++ b/test/files/neg/macro-reify-typetag-useabstypetag.check @@ -4,4 +4,4 @@ Test.scala:5: error: No TypeTag available for T Test.scala:6: error: No TypeTag available for List[T] println(implicitly[TypeTag[List[T]]]) ^ -two errors found +2 errors diff --git a/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala index 1e7fcb3f45be..95b8dfc99758 100644 --- a/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala +++ b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[TypeTag[List[T]]]) } fooTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check index 65445d80ddcc..712b3d2bdc52 100644 --- a/test/files/neg/macro-without-xmacros-a.check +++ b/test/files/neg/macro-without-xmacros-a.check @@ -14,4 +14,4 @@ Macros_2.scala:11: error: macro definition needs to be enabled by making the implicit value scala.language.experimental.macros visible. def quux(x: Int): Int = macro quux_impl ^ -three errors found +3 errors diff --git a/test/files/neg/macro-without-xmacros-a/Impls_1.scala b/test/files/neg/macro-without-xmacros-a/Impls_1.scala index 035913f3e376..04d5d1ac0f50 100644 --- a/test/files/neg/macro-without-xmacros-a/Impls_1.scala +++ b/test/files/neg/macro-without-xmacros-a/Impls_1.scala @@ -15,4 +15,4 @@ object Impls { import c.universe._ c.Expr(q"$x + 3") } -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-a/Macros_2.scala b/test/files/neg/macro-without-xmacros-a/Macros_2.scala index 62f9dcf50543..e32a70ba8d0a 100644 --- a/test/files/neg/macro-without-xmacros-a/Macros_2.scala +++ b/test/files/neg/macro-without-xmacros-a/Macros_2.scala @@ -9,4 +9,4 @@ object Macros { class Macros { def quux(x: Int): Int = macro quux_impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-a/Test_3.scala b/test/files/neg/macro-without-xmacros-a/Test_3.scala index e9a10e20c99b..011eb1f57453 100644 --- a/test/files/neg/macro-without-xmacros-a/Test_3.scala +++ b/test/files/neg/macro-without-xmacros-a/Test_3.scala @@ -1,4 +1,4 @@ object Test extends App { import Macros.Shmacros._ println(foo(2) + Macros.bar(2) * new Macros().quux(4)) -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check index e3c1010d50a8..f671ede73d67 100644 --- a/test/files/neg/macro-without-xmacros-b.check +++ b/test/files/neg/macro-without-xmacros-b.check @@ -14,4 +14,4 @@ Macros_2.scala:9: error: macro definition needs to be enabled by making the implicit value scala.language.experimental.macros visible. def quux(x: Int): Int = macro Impls.quux_impl ^ -three errors found +3 errors diff --git a/test/files/neg/macro-without-xmacros-b/Impls_1.scala b/test/files/neg/macro-without-xmacros-b/Impls_1.scala index 035913f3e376..04d5d1ac0f50 100644 --- a/test/files/neg/macro-without-xmacros-b/Impls_1.scala +++ b/test/files/neg/macro-without-xmacros-b/Impls_1.scala @@ -15,4 +15,4 @@ object Impls { import c.universe._ c.Expr(q"$x + 3") } -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-b/Macros_2.scala b/test/files/neg/macro-without-xmacros-b/Macros_2.scala index de7080c7e881..120f50abcd4b 100644 --- a/test/files/neg/macro-without-xmacros-b/Macros_2.scala +++ b/test/files/neg/macro-without-xmacros-b/Macros_2.scala @@ -7,4 +7,4 @@ object Macros { class Macros { def quux(x: Int): Int = macro Impls.quux_impl -} \ No newline at end of file +} diff --git a/test/files/neg/macro-without-xmacros-b/Test_3.scala b/test/files/neg/macro-without-xmacros-b/Test_3.scala index e9a10e20c99b..011eb1f57453 100644 --- a/test/files/neg/macro-without-xmacros-b/Test_3.scala +++ b/test/files/neg/macro-without-xmacros-b/Test_3.scala @@ -1,4 +1,4 @@ object Test extends App { import Macros.Shmacros._ println(foo(2) + Macros.bar(2) * new Macros().quux(4)) -} \ No newline at end of file +} diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check index 75cfe5f40194..4d9ef2fba906 100644 --- a/test/files/neg/main1.check +++ b/test/files/neg/main1.check @@ -1,28 +1,80 @@ -main1.scala:4: warning: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program. +main1.scala:5: warning: Foo has a valid main method (args: Array[String]): Unit, + but foo1.Foo will not have an entry point on the JVM. Reason: companion is a trait, which means no static forwarder can be generated. object Foo { // companion is trait ^ -main1.scala:11: warning: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program. +main1.scala:12: warning: Foo has a valid main method (args: Array[String]): Unit, + but foo2.Foo will not have an entry point on the JVM. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo { // companion has its own main ^ -main1.scala:23: warning: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program. +main1.scala:24: warning: Foo has a valid main method (args: Array[String]): Unit, + but foo3.Foo will not have an entry point on the JVM. Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated. object Foo { // Companion contains main, but not an interfering main. ^ -main1.scala:32: warning: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program. +main1.scala:33: warning: Foo has a valid main method (args: Array[String]): Unit, + but foo4.Foo will not have an entry point on the JVM. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo extends Foo { // Inherits main from the class ^ -main1.scala:40: warning: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program. +main1.scala:41: warning: Foo has a valid main method (args: Array[String]): Unit, + but foo5.Foo will not have an entry point on the JVM. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo extends Foo { // Overrides main from the class ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +main1.scala:53: warning: not a valid main method for p6.Main, + because main methods must have the exact signature `(Array[String]): Unit`. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[Int]) = () + ^ +main1.scala:59: warning: Main has a main method (args: Array[Int]): Unit, + but p7.Main will not have an entry point on the JVM. + Reason: companion is a trait, which means no static forwarder can be generated. + + object Main { + ^ +main1.scala:60: warning: not a valid main method for p7.Main, + because main methods must have the exact signature `(Array[String]): Unit`. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[Int]) = () + ^ +main1.scala:66: warning: Main has a main method, + but p8.Main will not have an entry point on the JVM. + Reason: companion is a trait, which means no static forwarder can be generated. + + object Main { + ^ +main1.scala:68: warning: not a valid main method for p8.Main, + because main methods must have the exact signature `(Array[String]): Unit`. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[Double]) = () + ^ +main1.scala:67: warning: not a valid main method for p8.Main, + because main methods must have the exact signature `(Array[String]): Unit`. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[Int]) = () + ^ +main1.scala:74: warning: not a valid main method for t7448.Main, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[String]) = ??? + ^ +error: No warnings can be incurred under -Werror. +12 warnings +1 error diff --git a/test/files/neg/main1.scala b/test/files/neg/main1.scala index 1152768f0872..a07b51e0d03b 100644 --- a/test/files/neg/main1.scala +++ b/test/files/neg/main1.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // negatives package foo1 { object Foo { // companion is trait @@ -44,3 +45,32 @@ package foo5 { def main(args: Array[String]): Unit = () } } + +// extended messaging + +package p6 { + object Main { + def main(args: Array[Int]) = () + } +} + +package p7 { + trait Main + object Main { + def main(args: Array[Int]) = () + } +} + +package p8 { + trait Main + object Main { + def main(args: Array[Int]) = () + def main(args: Array[Double]) = () + } +} + +package t7448 { + object Main { + def main(args: Array[String]) = ??? + } +} diff --git a/test/files/neg/main2.check b/test/files/neg/main2.check new file mode 100644 index 000000000000..eb56e417a58b --- /dev/null +++ b/test/files/neg/main2.check @@ -0,0 +1,9 @@ +main2.scala:7: warning: X has a valid main method (args: Array[String]): Unit, + but p.X will not have an entry point on the JVM. + Reason: companion is a trait, which means no static forwarder can be generated. + + object X { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/main2.scala b/test/files/neg/main2.scala new file mode 100644 index 000000000000..2b3bb7a0a12f --- /dev/null +++ b/test/files/neg/main2.scala @@ -0,0 +1,19 @@ +//> using options -Xfatal-warnings -Xmain-class p.X +// + +// emit a warning + +package p { + object X { + def main(args: Array[String]): Unit = () + } + trait X +} + +// no warn because not the designated main + +package q { + object Main { + def main(args: Array[Int]) = () + } +} diff --git a/test/files/neg/maxerrs.check b/test/files/neg/maxerrs.check index b9ca3162afa6..56c85f66d7b7 100644 --- a/test/files/neg/maxerrs.check +++ b/test/files/neg/maxerrs.check @@ -1,16 +1,16 @@ -maxerrs.scala:23: error: type mismatch; +maxerrs.scala:24: error: type mismatch; found : String("") required: Int def F = f("") ^ -maxerrs.scala:25: error: type mismatch; +maxerrs.scala:26: error: type mismatch; found : String("") required: Int def g = f("") ^ -maxerrs.scala:27: error: type mismatch; +maxerrs.scala:28: error: type mismatch; found : String("") required: Int def h = f("") ^ -5 errors found +5 errors diff --git a/test/files/neg/maxerrs.scala b/test/files/neg/maxerrs.scala index 79395f0e1853..f72577f5a319 100644 --- a/test/files/neg/maxerrs.scala +++ b/test/files/neg/maxerrs.scala @@ -1,4 +1,5 @@ -// scalac: -Xmaxerrs 3 -Xfatal-warnings -deprecation +//> using options -Xmaxerrs 3 -Xfatal-warnings -deprecation +// object X { @deprecated("just to annoy people", since="forever") diff --git a/test/files/neg/maxwarns.check b/test/files/neg/maxwarns.check index dddc9895d9d2..7c0da6eb8d74 100644 --- a/test/files/neg/maxwarns.check +++ b/test/files/neg/maxwarns.check @@ -1,12 +1,12 @@ -maxwarns.scala:13: warning: method x in object X is deprecated (since forever): just to annoy people +maxwarns.scala:14: warning: method x in object X is deprecated (since forever): just to annoy people def a = x ^ -maxwarns.scala:15: warning: method x in object X is deprecated (since forever): just to annoy people +maxwarns.scala:16: warning: method x in object X is deprecated (since forever): just to annoy people def b = x ^ -maxwarns.scala:17: warning: method x in object X is deprecated (since forever): just to annoy people +maxwarns.scala:18: warning: method x in object X is deprecated (since forever): just to annoy people def c = x ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/maxwarns.scala b/test/files/neg/maxwarns.scala index 9864ea44eb07..a5523a9af8d4 100644 --- a/test/files/neg/maxwarns.scala +++ b/test/files/neg/maxwarns.scala @@ -1,4 +1,5 @@ -// scalac: -Xmaxwarns 3 -Xfatal-warnings -deprecation +//> using options -Xmaxwarns 3 -Xfatal-warnings -deprecation +// object X { @deprecated("just to annoy people", since="forever") diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check deleted file mode 100644 index 133148e23f3a..000000000000 --- a/test/files/neg/migration28.check +++ /dev/null @@ -1,7 +0,0 @@ -migration28.scala:5: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: -The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse. - List(1,2,3,4,5).scanRight(0)(_+_) - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/migration28.scala b/test/files/neg/migration28.scala deleted file mode 100644 index 702e78ff86f5..000000000000 --- a/test/files/neg/migration28.scala +++ /dev/null @@ -1,10 +0,0 @@ -// scalac: -Xfatal-warnings -Xmigration -object Test { - import scala.collection.mutable._ - - List(1,2,3,4,5).scanRight(0)(_+_) - - def main(args: Array[String]): Unit = { - - } -} diff --git a/test/files/neg/missing-arg-list.check b/test/files/neg/missing-arg-list.check index 229baac177ea..902acecab736 100644 --- a/test/files/neg/missing-arg-list.check +++ b/test/files/neg/missing-arg-list.check @@ -1,26 +1,26 @@ -missing-arg-list.scala:9: error: missing argument list for method id in trait T +missing-arg-list.scala:9: error: missing argument list for method id in trait T of type (i: Int): Int Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `id _` or `id(_)` instead of `id`. val w = id ^ -missing-arg-list.scala:10: error: missing argument list for method f in trait T +missing-arg-list.scala:10: error: missing argument list for method f in trait T of type (i: Int)(j: Int): Int Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `f _` or `f(_)(_)` instead of `f`. val x = f ^ -missing-arg-list.scala:11: error: missing argument list for method g in trait T +missing-arg-list.scala:11: error: missing argument list for method g in trait T of type (i: Int, j: Int, k: Int): Int Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `g _` or `g(_,_,_)` instead of `g`. val y = g ^ -missing-arg-list.scala:12: error: missing argument list for method h in trait T +missing-arg-list.scala:12: error: missing argument list for method h in trait T of type (i: Int, j: Int, k: Int)(implicit s: String): String Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `h _` or `h(_,_,_)(_)` instead of `h`. val z = h ^ -missing-arg-list.scala:15: error: missing argument list for method + in trait T +missing-arg-list.scala:15: error: missing argument list for method + in trait T of type (i: Int): Int Unapplied methods are only converted to functions when a function type is expected. You can make this conversion explicit by writing `+ _` or `+(_)` instead of `+`. val p = + ^ -5 errors found +5 errors diff --git a/test/files/neg/missing-implicit.check b/test/files/neg/missing-implicit.check new file mode 100644 index 000000000000..bc043b4b2958 --- /dev/null +++ b/test/files/neg/missing-implicit.check @@ -0,0 +1,31 @@ +missing-implicit.scala:23: error: could not find implicit value for parameter e: TC[String]{type Int} (foo) + implicitly[TC[String] { type Int}] + ^ +missing-implicit.scala:24: error: bar + implicitly[XC[String]] + ^ +missing-implicit.scala:25: error: could not find implicit value for parameter e: U (nope) + implicitly[U] + ^ +missing-implicit.scala:26: error: no way + implicitly[V] + ^ +missing-implicit.scala:31: error: no way + f + ^ +missing-implicit.scala:32: error: huh + g + ^ +missing-implicit.scala:49: error: No F of Int + implicitly[F[Int]] + ^ +missing-implicit.scala:50: error: could not find implicit value for parameter e: M[Int] (No F of Int) + implicitly[M[Int]] + ^ +missing-implicit.scala:51: error: could not find implicit value for parameter e: AX (No F of String) + implicitly[AX] + ^ +missing-implicit.scala:52: error: could not find implicit value for parameter e: X0 (Missing X3 of Char and Int and String) + implicitly[X0] + ^ +10 errors diff --git a/test/files/neg/missing-implicit.scala b/test/files/neg/missing-implicit.scala new file mode 100644 index 000000000000..95fcac101642 --- /dev/null +++ b/test/files/neg/missing-implicit.scala @@ -0,0 +1,53 @@ + +import annotation.implicitNotFound + +@implicitNotFound("foo") +trait TC[A] { + type B +} + +@implicitNotFound("bar") +trait XC[A] { + type B +} + +@implicitNotFound("nope") +trait T + +trait U extends T + +@implicitNotFound("no way") +trait V extends T + +object Example { + implicitly[TC[String] { type Int}] + implicitly[XC[String]] + implicitly[U] + implicitly[V] + + def f(implicit v: V) = ??? + def g(implicit @implicitNotFound("huh") v: V) = ??? + + f + g +} + +@implicitNotFound("No F of ${A}") +trait F[A] + +trait M[A] extends F[A] + +trait AX extends F[String] + +@implicitNotFound("Missing X3 of ${A} and ${B} and ${C}") +trait X3[A, B, C] +trait X2[A, B] extends X3[A, B, String] +trait X1[A] extends X2[A, Int] +trait X0 extends X1[Char] + +object SuperSubstitutions { + implicitly[F[Int]] + implicitly[M[Int]] + implicitly[AX] + implicitly[X0] +} diff --git a/test/files/neg/missing-param-type-tuple.check b/test/files/neg/missing-param-type-tuple.check index 3a4258ff8c5b..a38bcb7cca21 100644 --- a/test/files/neg/missing-param-type-tuple.check +++ b/test/files/neg/missing-param-type-tuple.check @@ -6,6 +6,11 @@ Note: The expected type requires a one-argument function accepting a 2-Tuple. missing-param-type-tuple.scala:3: error: missing parameter type val x: ((Int, Int)) => Int = (a, b) => 0 ^ +missing-param-type-tuple.scala:3: error: type mismatch; + found : (?, ?) => ? + required: ((Int, Int)) => Int + val x: ((Int, Int)) => Int = (a, b) => 0 + ^ missing-param-type-tuple.scala:5: error: missing parameter type Note: The expected type requires a one-argument function accepting a 3-Tuple. Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }` @@ -17,6 +22,11 @@ missing-param-type-tuple.scala:5: error: missing parameter type missing-param-type-tuple.scala:5: error: missing parameter type val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0 ^ +missing-param-type-tuple.scala:5: error: type mismatch; + found : (?, ?, ?) => ? + required: ((Int, Int, Int)) => Int + val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0 + ^ missing-param-type-tuple.scala:7: error: missing parameter type Note: The expected type requires a one-argument function accepting a 3-Tuple. Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }` @@ -28,4 +38,9 @@ missing-param-type-tuple.scala:7: error: missing parameter type missing-param-type-tuple.scala:7: error: missing parameter type val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0 ^ -8 errors found +missing-param-type-tuple.scala:7: error: type mismatch; + found : (?, ?, ?) => ? + required: ((Int, Int, Int)) => Int + val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0 + ^ +11 errors diff --git a/test/files/neg/mixins.check b/test/files/neg/mixins.check index f310ca596294..d7b49d1db26e 100644 --- a/test/files/neg/mixins.check +++ b/test/files/neg/mixins.check @@ -3,4 +3,4 @@ mixins.scala:9: error: illegal inheritance; superclass C of the mixin trait M class D extends C with M ^ -one error found +1 error diff --git a/test/files/neg/moduleClassReference.check b/test/files/neg/moduleClassReference.check index 1f16aeb2509f..755f6a88561e 100644 --- a/test/files/neg/moduleClassReference.check +++ b/test/files/neg/moduleClassReference.check @@ -1,4 +1,4 @@ moduleClassReference.scala:2: error: not found: value Predef$ def foo = Predef$.MODULE$ == Predef ^ -one error found +1 error diff --git a/test/files/neg/multi-array.check b/test/files/neg/multi-array.check index 6671201f0b5c..4f7dd1526059 100644 --- a/test/files/neg/multi-array.check +++ b/test/files/neg/multi-array.check @@ -1,4 +1,4 @@ -multi-array.scala:8: error: too many arguments (2) for constructor Array: (_length: Int)Array[T] +multi-array.scala:9: error: too many arguments (found 2, expected 1) for constructor Array: (_length: Int): Array[T] val a: Array[Int] = new Array(10, 10) ^ -one error found +1 error diff --git a/test/files/neg/multi-array.scala b/test/files/neg/multi-array.scala index fed1974782ec..82fb938d511d 100644 --- a/test/files/neg/multi-array.scala +++ b/test/files/neg/multi-array.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation +//> using options -deprecation +// /** Multi-dimensional array creation with `new` was removed in 2.10. * The replacement Array.ofDim[Int](10,10) makes the original mistake * which was tested here impossible. diff --git a/test/files/neg/multiLineOps-b.check b/test/files/neg/multiLineOps-b.check new file mode 100644 index 000000000000..96562add8bd7 --- /dev/null +++ b/test/files/neg/multiLineOps-b.check @@ -0,0 +1,4 @@ +multiLineOps-b.scala:7: error: ';' expected but integer literal found. + */*one more*/22 // error: end of statement expected + ^ +1 error diff --git a/test/files/neg/multiLineOps-b.scala b/test/files/neg/multiLineOps-b.scala new file mode 100644 index 000000000000..1f2a397d3326 --- /dev/null +++ b/test/files/neg/multiLineOps-b.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Xsource:3 -Xsource-features:leading-infix + +class Test { + val b1 = { + 22 + * 22 // ok + */*one more*/22 // error: end of statement expected + } // error: ';' expected, but '}' found +} diff --git a/test/files/neg/multiLineOps-c.check b/test/files/neg/multiLineOps-c.check new file mode 100644 index 000000000000..b1aecce7dcfa --- /dev/null +++ b/test/files/neg/multiLineOps-c.check @@ -0,0 +1,5 @@ +multiLineOps-c.scala:7: error: value ! is not a member of Unit +possible cause: maybe a semicolon is missing before `value !`? + ! "hello".isEmpty // error: value ! is not a member of Unit + ^ +1 error diff --git a/test/files/neg/multiLineOps-c.scala b/test/files/neg/multiLineOps-c.scala new file mode 100644 index 000000000000..924f4c3ceec0 --- /dev/null +++ b/test/files/neg/multiLineOps-c.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Xsource:3 -Xsource-features:leading-infix + +class Test { + val x = 42 + val b2: Boolean = { + println(x) + ! "hello".isEmpty // error: value ! is not a member of Unit + } +} diff --git a/test/files/neg/multiLineOps.check b/test/files/neg/multiLineOps.check new file mode 100644 index 000000000000..e3d865c984d4 --- /dev/null +++ b/test/files/neg/multiLineOps.check @@ -0,0 +1,6 @@ +multiLineOps.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + +3 // warning: a pure expression does nothing in statement position + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/multiLineOps.scala b/test/files/neg/multiLineOps.scala new file mode 100644 index 000000000000..9d9ded40c09d --- /dev/null +++ b/test/files/neg/multiLineOps.scala @@ -0,0 +1,7 @@ +//> using options -Werror -Xlint -Xsource:3 -Xsource-features:leading-infix + +class Test { + val x = 1 + + 2 + +3 // warning: a pure expression does nothing in statement position +} diff --git a/test/files/neg/name-lookup-stable.check b/test/files/neg/name-lookup-stable.check index 68d98c416281..6ee5c7596ae7 100644 --- a/test/files/neg/name-lookup-stable.check +++ b/test/files/neg/name-lookup-stable.check @@ -8,4 +8,4 @@ it is both defined in class A and imported subsequently by import ColumnOption._ PrimaryKey // was already ambiguous in 2.10.3 ^ -two errors found +2 errors diff --git a/test/files/neg/named-booleans-relaxed.check b/test/files/neg/named-booleans-relaxed.check new file mode 100644 index 000000000000..8e0235fc026e --- /dev/null +++ b/test/files/neg/named-booleans-relaxed.check @@ -0,0 +1,54 @@ +named-booleans-relaxed.scala:22: warning: Boolean literals should be passed using named argument syntax for parameter x. [quickfixable] + val x0 = c.f(17, true, false) // warn + ^ +named-booleans-relaxed.scala:22: warning: Boolean literals should be passed using named argument syntax for parameter y. [quickfixable] + val x0 = c.f(17, true, false) // warn + ^ +named-booleans-relaxed.scala:44: warning: Boolean literals should be passed using named argument syntax for parameter cond. [quickfixable] + c.uncheck(false, "OK", true) + ^ +named-booleans-relaxed.scala:44: warning: Boolean literals should be passed using named argument syntax for parameter flag. [quickfixable] + c.uncheck(false, "OK", true) + ^ +named-booleans-relaxed.scala:63: warning: Boolean literals should be passed using named argument syntax for parameter isKlazz. [quickfixable] + def test = Klazz(true, false) // warn case class apply as for ctor + ^ +named-booleans-relaxed.scala:63: warning: Boolean literals should be passed using named argument syntax for parameter isWarnable. [quickfixable] + def test = Klazz(true, false) // warn case class apply as for ctor + ^ +named-booleans-relaxed.scala:71: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def g3 = f(42, false) // warn, unnamed could mean either param with default + ^ +named-booleans-relaxed.scala:72: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def g4 = f(42, false, true) // warn, swappable + ^ +named-booleans-relaxed.scala:72: warning: Boolean literals should be passed using named argument syntax for parameter down. [quickfixable] + def g4 = f(42, false, true) // warn, swappable + ^ +named-booleans-relaxed.scala:79: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def rev3 = rev(42, reverse=true, false) // warn, unnamed could mean either param with default + ^ +named-booleans-relaxed.scala:80: warning: Boolean literals should be passed using named argument syntax for parameter reverse. [quickfixable] + def rev4 = rev(42, false, true, false) // warn, swappable + ^ +named-booleans-relaxed.scala:80: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def rev4 = rev(42, false, true, false) // warn, swappable + ^ +named-booleans-relaxed.scala:80: warning: Boolean literals should be passed using named argument syntax for parameter down. [quickfixable] + def rev4 = rev(42, false, true, false) // warn, swappable + ^ +named-booleans-relaxed.scala:81: warning: Boolean literals should be passed using named argument syntax for parameter reverse. [quickfixable] + def rev5 = rev(42, true, down=true) // warn, out of order so it's a named block, otherwise same as rev3 + ^ +named-booleans-relaxed.scala:92: warning: Boolean literals should be passed using named argument syntax for parameter insideIf. [quickfixable] + def sus(s: String) = p.needsParentheses(s)(false) // warn + ^ +named-booleans-relaxed.scala:95: warning: Boolean literals should be passed using named argument syntax for parameter x. [quickfixable] + def f = p.f(true, z=42) // warn + ^ +named-booleans-relaxed.scala:106: warning: Boolean literals should be passed using named argument syntax for parameter y. [quickfixable] + def w = new V(true).combo(false) + ^ +error: No warnings can be incurred under -Werror. +17 warnings +1 error diff --git a/test/files/neg/named-booleans-relaxed.scala b/test/files/neg/named-booleans-relaxed.scala new file mode 100644 index 000000000000..e4b5121a4219 --- /dev/null +++ b/test/files/neg/named-booleans-relaxed.scala @@ -0,0 +1,107 @@ +//> using options -Werror -Wunnamed-boolean-literal + +class C { + def f(n: Int = 42, x: Boolean, y: Boolean) = if (x && y) n else 0 + + def g(x: Any) = + x match { + case (true, false) => 0 + case _ => 1 + } + var b = false + def fs(n: Int)(s: String, b: Boolean) = if (b) s*n else s + def gs[A](n: Int)(s: A, b: Boolean) = if (b) s.toString*n else s.toString + + def check(cond: Boolean, msg: => String) = if (cond) println(msg) + def uncheck(cond: Boolean, msg: => String, flag: Boolean) = if (cond && flag) println(msg) +} + +object Test extends App { + val c = new C + val b = false + val x0 = c.f(17, true, false) // warn + val x1 = c.f(17, true, b) // nowarn + val x2 = c.f(y = b, n = 17, x = true) // nowarn + c.b = true + val y = Some(false) + val z = Option(false) + val w = (true, false) + val v = c g true // nowarn infix + + val s = collection.mutable.Set.empty[String] + def mutateS(): Unit = s("updater") = true + //def updateS(): Unit = s.update("updater", true) + + val m = collection.mutable.Map.empty[String, true] + def mutateM(): Unit = m("updater") = true + + val ss = c.fs(42)("hello", true) + val tt = c.gs(42)("hello", true) + + def f(g: Boolean => Option[Boolean]) = g(true).getOrElse(false) + + c.check(true, "OK") + c.uncheck(false, "OK", true) +} + +class Arrays { + def test = Array(true, false, true) +} + +class Tuples { + def test = (true, false, true) +} + +class Functions { + val f: Boolean => Boolean = identity + def test = f(true) +} + +case class Klazz(isKlazz: Boolean, isWarnable: Boolean) + +class Klazzy { + def test = Klazz(true, false) // warn case class apply as for ctor +} + +class Defaulting { + def f(n: Int, up: Boolean = true, down: Boolean = false) = if (up) n+1 else if (down) n-1 else n + def g0 = f(42) // nowarn, all defaults + def g1 = f(42, up=false) // nowarn, named or defaults + def g2 = f(42, up=false, true) // nowarn, in param order so not a named block, unnamed is last remaining param + def g3 = f(42, false) // warn, unnamed could mean either param with default + def g4 = f(42, false, true) // warn, swappable + + def rev(n: Int, reverse: Boolean = false, up: Boolean = true, down: Boolean = false) = + if (!reverse) f(n, up, down) else if (down) n+1 else if (up) n-1 else n + def rev0 = rev(42) // nowarn, all defaults + def rev1 = rev(42, up=false) // nowarn, named or defaults + def rev2 = rev(42, true, up=false, down=true) // nowarn, in param order so not a named block, unnamed is last remaining param + def rev3 = rev(42, reverse=true, false) // warn, unnamed could mean either param with default + def rev4 = rev(42, false, true, false) // warn, swappable + def rev5 = rev(42, true, down=true) // warn, out of order so it's a named block, otherwise same as rev3 +} + +class Printers { + def needsParentheses(parent: String)(insideIf: Boolean = true, insideMatch: Boolean = true, insideTry: Boolean = true, insideAnnotated: Boolean = true, insideBlock: Boolean = true, insideLabelDef: Boolean = true, insideAssign: Boolean = true): Boolean = true + + def f(x: Boolean, y: String = "hi", z: Int = 2, b: Boolean = false) = if (x && b) y+z else y*z +} +object TestPrinters { + val p = new Printers + def ok(s: String) = p.needsParentheses(s)(insideLabelDef = false) + def sus(s: String) = p.needsParentheses(s)(false) // warn + def pick(s: String) = p.needsParentheses(s)(true, insideAssign=false, insideLabelDef=false, insideBlock=false, insideAnnotated=false, insideTry=false, insideMatch=false) + + def f = p.f(true, z=42) // warn + def g = p.f(x=true, b=true) // nowarn, no unnamed + def h = p.f(true, b=true) // nowarn, one unnamed but other boolean is named; defaults are non-boolean +} + +object Testy { + class V(val x: Boolean) extends AnyVal { + def combo(y: Boolean = true, z: Boolean = false) = x&&y&&z + } + + def v = new V(true) + def w = new V(true).combo(false) +} diff --git a/test/files/neg/named-booleans.check b/test/files/neg/named-booleans.check new file mode 100644 index 000000000000..ad1ab31a7888 --- /dev/null +++ b/test/files/neg/named-booleans.check @@ -0,0 +1,30 @@ +named-booleans.scala:22: warning: Boolean literals should be passed using named argument syntax for parameter x. [quickfixable] + val x0 = c.f(17, true, b) // warn + ^ +named-booleans.scala:38: warning: Boolean literals should be passed using named argument syntax for parameter b. [quickfixable] + val ss = c.fs(42)("hello", true) + ^ +named-booleans.scala:39: warning: Boolean literals should be passed using named argument syntax for parameter b. [quickfixable] + val tt = c.gs(42)("hello", true) + ^ +named-booleans.scala:44: warning: Boolean literals should be passed using named argument syntax for parameter cond. [quickfixable] + c.uncheck(false, "OK", true) + ^ +named-booleans.scala:44: warning: Boolean literals should be passed using named argument syntax for parameter flag. [quickfixable] + c.uncheck(false, "OK", true) + ^ +named-booleans.scala:70: warning: Boolean literals should be passed using named argument syntax for parameter down. [quickfixable] + def g2 = f(42, up=false, true) + ^ +named-booleans.scala:71: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def g3 = f(42, false) + ^ +named-booleans.scala:72: warning: Boolean literals should be passed using named argument syntax for parameter up. [quickfixable] + def g4 = f(42, false, true) + ^ +named-booleans.scala:72: warning: Boolean literals should be passed using named argument syntax for parameter down. [quickfixable] + def g4 = f(42, false, true) + ^ +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/named-booleans.scala b/test/files/neg/named-booleans.scala new file mode 100644 index 000000000000..a9ceaf575b21 --- /dev/null +++ b/test/files/neg/named-booleans.scala @@ -0,0 +1,73 @@ +//> using options -Werror -Wunnamed-boolean-literal-strict + +class C { + def f(n: Int = 42, x: Boolean, y: Boolean) = if (x && y) n else 0 + + def g(x: Any) = + x match { + case (true, false) => 0 + case _ => 1 + } + var b = false + def fs(n: Int)(s: String, b: Boolean) = if (b) s*n else s + def gs[A](n: Int)(s: A, b: Boolean) = if (b) s.toString*n else s.toString + + def check(cond: Boolean, msg: => String) = if (cond) println(msg) + def uncheck(cond: Boolean, msg: => String, flag: Boolean) = if (cond && flag) println(msg) +} + +object Test extends App { + val c = new C + val b = false + val x0 = c.f(17, true, b) // warn + val x1 = c.f(17, x = true, b) // nowarn + val x2 = c.f(y = b, n = 17, x = true) // nowarn + c.b = true + val y = Some(false) + val z = Option(false) + val w = (true, false) + val v = c g true // nowarn infix + + val s = collection.mutable.Set.empty[String] + def mutateS(): Unit = s("updater") = true + //def updateS(): Unit = s.update("updater", true) + + val m = collection.mutable.Map.empty[String, true] + def mutateM(): Unit = m("updater") = true + + val ss = c.fs(42)("hello", true) + val tt = c.gs(42)("hello", true) + + def f(g: Boolean => Option[Boolean]) = g(true).getOrElse(false) + + c.check(true, "OK") + c.uncheck(false, "OK", true) +} + +class Arrays { + def test = Array(true, false, true) +} + +class Tuples { + def test = (true, false, true) +} + +class Functions { + val f: Boolean => Boolean = identity + def test = f(true) +} + +case class Klazz(isKlazz: Boolean) + +class Klazzy { + def test = Klazz(true) // nowarn case class apply as for ctor +} + +class Defaulting { + def f(n: Int, up: Boolean = true, down: Boolean = false) = if (up) n+1 else if (down) n-1 else n + def g0 = f(42) + def g1 = f(42, up=false) + def g2 = f(42, up=false, true) + def g3 = f(42, false) + def g4 = f(42, false, true) +} diff --git a/test/files/neg/names-defaults-neg-213.check b/test/files/neg/names-defaults-neg-213.check index 0ec319781395..65b0b6af85b9 100644 --- a/test/files/neg/names-defaults-neg-213.check +++ b/test/files/neg/names-defaults-neg-213.check @@ -1,9 +1,9 @@ -names-defaults-neg-213.scala:9: error: unknown parameter name: x +names-defaults-neg-213.scala:10: error: unknown parameter name: x Note that assignments in argument position are no longer allowed since Scala 2.13. To express the assignment expression, wrap it in brackets, e.g., `{ x = ... }`. f2(x = 1) // error, no parameter named x. error message mentions change in 2.13 ^ -names-defaults-neg-213.scala:15: error: unknown parameter name: x +names-defaults-neg-213.scala:16: error: unknown parameter name: x f2(x = 1) // error (no such parameter). no mention of new semantics in 2.13 ^ -two errors found +2 errors diff --git a/test/files/neg/names-defaults-neg-213.scala b/test/files/neg/names-defaults-neg-213.scala index 10667b35304b..8e5c212f1d61 100644 --- a/test/files/neg/names-defaults-neg-213.scala +++ b/test/files/neg/names-defaults-neg-213.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// class C { def f1(x: Unit): Int = 0 def f2(y: Unit): Int = 0 diff --git a/test/files/neg/names-defaults-neg-pu.check b/test/files/neg/names-defaults-neg-pu.check new file mode 100644 index 000000000000..89bbe121d4b5 --- /dev/null +++ b/test/files/neg/names-defaults-neg-pu.check @@ -0,0 +1,154 @@ +names-defaults-neg-pu.scala:7: error: type mismatch; + found : String("#") + required: Int + test1(b = 2, a = "#") + ^ +names-defaults-neg-pu.scala:7: error: type mismatch; + found : Int(2) + required: String + test1(b = 2, a = "#") + ^ +names-defaults-neg-pu.scala:10: error: positional after named argument. + test1(b = "(*", 23) + ^ +names-defaults-neg-pu.scala:16: error: unknown parameter name: y + test2(y = 1) + ^ +names-defaults-neg-pu.scala:17: error: unknown parameter name: c + test1(c = 0, b = "joke") + ^ +names-defaults-neg-pu.scala:18: error: not found: value m + test7((m = 1)) // named arguments must be top-level assignments + ^ +names-defaults-neg-pu.scala:19: error: not found: value m + test7({m = 1}) + ^ +names-defaults-neg-pu.scala:20: error: not found: value m + test7 { m = 1 } // no named arguments in argument block + ^ +names-defaults-neg-pu.scala:24: error: parameter 'a' is already specified at parameter position 1 + test1(1, a = 2) + ^ +names-defaults-neg-pu.scala:25: error: parameter 'b' is already specified at parameter position 1 + test1(b = 1, b = "2") + ^ +names-defaults-neg-pu.scala:28: error: Int does not take parameters + test3(b = 3, a = 1)(3) + ^ +names-defaults-neg-pu.scala:37: error: ambiguous reference to overloaded definition, +both method f in object t1 of type (b: String, a: Int): String +and method f in object t1 of type (a: Int, b: String): String +match argument types (b: String,a: Int) + t1.f(b = "dkljf", a = 1) + ^ +names-defaults-neg-pu.scala:44: error: ambiguous reference to overloaded definition, +both method f in object t3 of type (a2: Int)(b: Int): String +and method f in object t3 of type (a1: Int): String +match argument types (Int) + t3.f(1) + ^ +names-defaults-neg-pu.scala:45: error: ambiguous reference to overloaded definition, +both method f in object t3 of type (a2: Int)(b: Int): String +and method f in object t3 of type (a1: Int): String +match argument types (Int) + t3.f(1)(2) + ^ +names-defaults-neg-pu.scala:51: error: ambiguous reference to overloaded definition, +both method g in object t7 of type (a: B): String +and method g in object t7 of type (a: C, b: Int*): String +match argument types (C) + t7.g(new C()) // ambiguous reference + ^ +names-defaults-neg-pu.scala:55: error: parameter 'b' is already specified at parameter position 2 + test5(a = 1, b = "dkjl", b = "dkj") + ^ +names-defaults-neg-pu.scala:56: error: parameter 'b' is already specified at parameter position 2 + test5(1, "2", b = 3) + ^ +names-defaults-neg-pu.scala:57: error: when using named arguments, the vararg parameter has to be specified exactly once + test5(b = "dlkj") + ^ +names-defaults-neg-pu.scala:63: error: ambiguous reference to overloaded definition, +both method f in object t8 of type (b: String, a: Int): String +and method f in object t8 of type (a: Int, b: Object): String +match argument types (a: Int,b: String) and expected result type Any + println(t8.f(a = 0, b = "1")) // ambiguous reference + ^ +names-defaults-neg-pu.scala:67: error: not enough arguments for method apply: (a: Int, b: String)(c: Int*): Fact in object Fact. +Unspecified value parameter b. + val fac = Fact(1)(2, 3) + ^ +names-defaults-neg-pu.scala:71: error: wrong number of arguments for pattern A1(x: Int, y: String) + A1() match { case A1(_) => () } + ^ +names-defaults-neg-pu.scala:78: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]]): T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : List[Int] + required: ?T[?T[List[?T[X forSome { type X }]]]] +Error occurred in an application involving default arguments. + test4() + ^ +names-defaults-neg-pu.scala:81: error: type mismatch; + found : List[Int] + required: List[List[?]] + def test6[T](x: List[List[T]] = List(1,2)) = x + ^ +names-defaults-neg-pu.scala:84: error: type mismatch; + found : 1 + required: String +Error occurred in an application involving default arguments. + new A2[String]() + ^ +names-defaults-neg-pu.scala:88: error: module extending its companion class cannot use default constructor arguments + object C extends C() + ^ +names-defaults-neg-pu.scala:92: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not). + def deprNam1(x: Int, @deprecatedName("x") y: String) = 0 + ^ +names-defaults-neg-pu.scala:93: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not). + def deprNam2(a: String)(@deprecatedName("a") b: Int) = 1 + ^ +names-defaults-neg-pu.scala:95: error: parameter 'b' is already specified at parameter position 1 + deprNam3(y = 10, b = 2) + ^ +names-defaults-neg-pu.scala:104: error: unknown parameter name: m + f3818(y = 1, m = 1) + ^ +names-defaults-neg-pu.scala:140: error: missing parameter type for expanded function (() => a = x$1) + val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) + ^ +names-defaults-neg-pu.scala:140: error: type mismatch; + found : ? => ? + required: Int + val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) + ^ +names-defaults-neg-pu.scala:140: error: not found: value get + val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) + ^ +names-defaults-neg-pu.scala:141: error: parameter 'a' is already specified at parameter position 1 + val taf3 = testAnnFun(b = _: String, a = get(8)) + ^ +names-defaults-neg-pu.scala:142: error: missing parameter type for expanded function (() => b = x$4) + val taf4: (Int, String) => Unit = testAnnFun(_, b = _) + ^ +names-defaults-neg-pu.scala:142: error: type mismatch; + found : ? => ? + required: String + val taf4: (Int, String) => Unit = testAnnFun(_, b = _) + ^ +names-defaults-neg-pu.scala:193: error: an expression of type Null is ineligible for implicit conversion +Error occurred in an application involving default arguments. + def f = new A3[Int]() + ^ +names-defaults-neg-pu.scala:95: warning: the parameter name y is deprecated: use b instead + deprNam3(y = 10, b = 2) + ^ +names-defaults-neg-pu.scala:98: warning: naming parameter deprNam4Arg is deprecated. + deprNam4(deprNam4Arg = null) + ^ +names-defaults-neg-pu.scala:100: warning: naming parameter deprNam5Arg is deprecated. + deprNam5(deprNam5Arg = null) + ^ +3 warnings +36 errors diff --git a/test/files/neg/names-defaults-neg-pu.scala b/test/files/neg/names-defaults-neg-pu.scala new file mode 100644 index 000000000000..1656b8b40691 --- /dev/null +++ b/test/files/neg/names-defaults-neg-pu.scala @@ -0,0 +1,194 @@ +//> using options -deprecation +// +object Test extends App { + // TESTS + + // re-ordering + test1(b = 2, a = "#") + + // mixing named and positional + test1(b = "(*", 23) + + // assignment / names + var x = 0 + var y = 0 + test2(x = 1) + test2(y = 1) + test1(c = 0, b = "joke") + test7((m = 1)) // named arguments must be top-level assignments + test7({m = 1}) + test7 { m = 1 } // no named arguments in argument block + test8(x = 1) + + // argument specified twice + test1(1, a = 2) + test1(b = 1, b = "2") + + // error message when there are too many argument lists (not very nice..) + test3(b = 3, a = 1)(3) + + + + // overloading resolution + object t1 { + def f(a: Int, b: String) = "first" + def f(b: String, a: Int) = "second" + } + t1.f(b = "dkljf", a = 1) + + + object t3 { + def f(a1: Int) = "first" + def f(a2: Int)(b: Int) = "second" + } + t3.f(1) + t3.f(1)(2) + + object t7 { + def g(a: C, b: Int*) = "third" + def g(a: B) = "fourth" + } + t7.g(new C()) // ambiguous reference + + // vararg + def test5(a: Int, b: String*) = a + test5(a = 1, b = "dkjl", b = "dkj") + test5(1, "2", b = 3) + test5(b = "dlkj") + + object t8 { + def f(a: Int, b: Object) = "first" + def f(b: String, a: Int) = "second" + } + println(t8.f(a = 0, b = "1")) // ambiguous reference + + + // case class copy does not exist if there's a vararg + val fac = Fact(1)(2, 3) + val facc = fac.copy(b = "dlkfj")() + + // no defaults in patterns + A1() match { case A1(_) => () } + + + // return types of default getters + + // definition compiles, but default cannot be used, it doesn't conform + def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]] = List(1,2)) = x + test4() + + // doesn't compile + def test6[T](x: List[List[T]] = List(1,2)) = x + + // correct error message + new A2[String]() + + object t3648 { + class C(val s: String = "") + object C extends C() + } + + // deprecated names + def deprNam1(x: Int, @deprecatedName("x") y: String) = 0 + def deprNam2(a: String)(@deprecatedName("a") b: Int) = 1 + def deprNam3(@deprecatedName("x") a: Int, @deprecatedName("y") b: Int) = a + b + deprNam3(y = 10, b = 2) + + def deprNam4(@deprecatedName("deprNam4Arg") deprNam4Arg: String) = 0 + deprNam4(deprNam4Arg = null) + def deprNam5(@deprecatedName deprNam5Arg: String) = 0 + deprNam5(deprNam5Arg = null) + + // t3818 + def f3818(x: Int = 1, y: Int, z: Int = 1) = 0 + f3818(y = 1, m = 1) + + // DEFINITIONS + def test1(a: Int, b: String) = a +": "+ b + def test2(x: Unit) = println("test2") + def test3(a: Int, b: Int) = a + b + def test7(m: Int) = m + def test8[T](x: => T) = println("test8") +} + +class B { + def foo(a: Int) = a + def bar(u: String = "ldksj") = u +} + +class C extends B { + override def foo(a: Int = 1092) = a + def foo(b: String = "lskdfj"): Unit + + def bar(i: Int = 129083) = i +} + +case class Fact(a: Int, b: String)(c: Int*) + +case class A1(x: Int = 1, y: String = "2") + +class A2[T](a: T = 1) + + +// anonymous functions +object anfun { + var var2 = 0 + def delay(var2: => Unit): Unit = { var2 } + delay(var2 = 40) + + def testAnnFun(a: Int, b: String) = println(a +": "+ b) + val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) + val taf3 = testAnnFun(b = _: String, a = get(8)) + val taf4: (Int, String) => Unit = testAnnFun(_, b = _) +} + +object t3685 { + object t { def f(x: Int) = x } + + def t1: Unit = { def x = t.f(x = 1) } + def t2: Unit = { val x = t.f(x = 1) } + def t3: Unit = { var x = t.f(x = 1) } + object t4 { def x = t.f(x = 1) } + object t5 { val x = t.f(x = 1) } + object t6 { var x = t.f(x = 1) } + class t7 { def x = t.f(x = 1) } + class t8 { val x = t.f(x = 1) } + class t9 { var x = t.f(x = 1) } + + def t10: Unit = { def x: Int = t.f(x = 1) } + def t11: Unit = { val x: Int = t.f(x = 1) } + def t12: Unit = { var x: Int = t.f(x = 1) } + class t13 { def x: Int = t.f(x = 1) } + class t14 { val x: Int = t.f(x = 1) } + class t15 { var x: Int = t.f(x = 1) } + + + object u { def f[T](x: T) = 100 } + + def u1: Unit = { def x = u.f(x = 1) } + def u2: Unit = { val x = u.f(x = 1) } + def u3: Unit = { var x = u.f(x = 1) } + def u4: Unit = { def x = u.f(x = "23") } + def u5: Unit = { val x = u.f(x = "32") } + def u6: Unit = { var x = u.f(x = "32") } + def u7: Unit = { def x: Int = u.f(x = 1) } + def u8: Unit = { val x: Int = u.f(x = 1) } + def u9: Unit = { var x: Int = u.f(x = 1) } + def u10: Unit = { def x: Int = u.f(x = "32") } + def u11: Unit = { val x: Int = u.f(x = "32") } + def u12: Unit = { var x: Int = u.f(x = "32") } + + class u13 { def x = u.f(x = 1) } + class u14 { val x = u.f(x = 1) } + class u15 { var x = u.f(x = 1) } + class u16 { def x: Int = u.f(x = 1) } + class u17 { val x: Int = u.f(x = 1) } + class u18 { var x: Int = u.f(x = 1) } + class u19 { def x: Int = u.f(x = "32") } + class u20 { val x: Int = u.f(x = "32") } + class u21 { var x: Int = u.f(x = "32") } +} +class A3[T](x: T = null) // scala/bug#4727 cf A2 +class t4727 { + def f = new A3[Int]() +} diff --git a/test/files/neg/names-defaults-neg-ref.check b/test/files/neg/names-defaults-neg-ref.check index 61d66fd32a4f..1c1a3d21bf7e 100644 --- a/test/files/neg/names-defaults-neg-ref.check +++ b/test/files/neg/names-defaults-neg-ref.check @@ -9,8 +9,10 @@ names-defaults-neg-ref.scala:17: error: in class C, multiple overloaded alternat The members with defaults are defined in class C and class B. class C extends B { ^ -names-defaults-neg-ref.scala:21: error: overriding method bar$default$1 in class B of type => String; - method bar$default$1 has incompatible type +names-defaults-neg-ref.scala:21: error: incompatible type in overriding +def bar$default$1: String (defined in class B); + found : Int + required: String def bar(i: Int = 129083) = i ^ -four errors found +4 errors diff --git a/test/files/neg/names-defaults-neg-ref.scala b/test/files/neg/names-defaults-neg-ref.scala index 17a482799074..2395b5e24dbe 100644 --- a/test/files/neg/names-defaults-neg-ref.scala +++ b/test/files/neg/names-defaults-neg-ref.scala @@ -16,7 +16,7 @@ class B { class C extends B { override def foo(a: Int = 1092) = a - def foo(b: String = "lskdfj") + def foo(b: String = "lskdfj"): Unit def bar(i: Int = 129083) = i } diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check index 5e13b4d04c4c..2a05c49709d3 100644 --- a/test/files/neg/names-defaults-neg-warn.check +++ b/test/files/neg/names-defaults-neg-warn.check @@ -1,20 +1,21 @@ -names-defaults-neg-warn.scala:23: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. - f1(x = 1) // 2.12: error, ambiguous (named arg or assign). 2.13: named arg +names-defaults-neg-warn.scala:25: error: unknown parameter name: x +Note that assignments in argument position are no longer allowed since Scala 2.13. +To express the assignment expression, wrap it in brackets, e.g., `{ x = ... }`. + f2(x = 1) // 2.12: deprecation warning, compiles. 2.13: error, no parameter named x ^ -names-defaults-neg-warn.scala:44: error: reassignment to val +names-defaults-neg-warn.scala:36: error: unknown parameter name: x +Note that assignments in argument position are no longer allowed since Scala 2.13. +To express the assignment expression, wrap it in brackets, e.g., `{ x = ... }`. + synchronized(x = 1) // deprecation warning in 2.12, error in 2.13 + ^ +names-defaults-neg-warn.scala:45: error: unknown parameter name: x f2(x = 1) // 2.12, 2.13: error (no such parameter). no deprecation warning in 2.12, x is not a variable. ^ -names-defaults-neg-warn.scala:12: warning: the parameter name s is deprecated: use x instead +names-defaults-neg-warn.scala:13: warning: the parameter name s is deprecated: use x instead deprNam2.f(s = "dlfkj") ^ -names-defaults-neg-warn.scala:13: warning: the parameter name x is deprecated: use s instead +names-defaults-neg-warn.scala:14: warning: the parameter name x is deprecated: use s instead deprNam2.g(x = "dlkjf") ^ -names-defaults-neg-warn.scala:24: warning: assignments in argument position are deprecated in favor of named arguments. Wrap the assignment in brackets, e.g., `{ x = ... }`. - f2(x = 1) // 2.12: deprecation warning, compiles. 2.13: error, no parameter named x - ^ -names-defaults-neg-warn.scala:35: warning: assignments in argument position are deprecated in favor of named arguments. Wrap the assignment in brackets, e.g., `{ x = ... }`. - synchronized(x = 1) // deprecation warning in 2.12, error in 2.13 - ^ -four warnings found -two errors found +2 warnings +3 errors diff --git a/test/files/neg/names-defaults-neg-warn.scala b/test/files/neg/names-defaults-neg-warn.scala index a96d407ad33f..611ef6d65916 100644 --- a/test/files/neg/names-defaults-neg-warn.scala +++ b/test/files/neg/names-defaults-neg-warn.scala @@ -1,10 +1,11 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// object Test extends App { object deprNam2 { - def f(@deprecatedName('s) x: String) = 1 + def f(@deprecatedName("s") x: String) = 1 def f(s: Object) = 2 - def g(@deprecatedName('x) s: Object) = 3 + def g(@deprecatedName("x") s: Object) = 3 def g(s: String) = 4 } diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index a306d66a4dbe..13c3ade0fc41 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -1,90 +1,87 @@ -names-defaults-neg.scala:6: error: type mismatch; +names-defaults-neg.scala:7: error: type mismatch; found : String("#") required: Int test1(b = 2, a = "#") ^ -names-defaults-neg.scala:6: error: type mismatch; +names-defaults-neg.scala:7: error: type mismatch; found : Int(2) required: String test1(b = 2, a = "#") ^ -names-defaults-neg.scala:9: error: positional after named argument. +names-defaults-neg.scala:10: error: positional after named argument. test1(b = "(*", 23) ^ -names-defaults-neg.scala:14: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. - test2(x = 1) +names-defaults-neg.scala:16: error: unknown parameter name: y + test2(y = 1) ^ -names-defaults-neg.scala:16: error: not found: value c +names-defaults-neg.scala:17: error: unknown parameter name: c test1(c = 0, b = "joke") - ^ -names-defaults-neg.scala:17: error: not found: value m + ^ +names-defaults-neg.scala:18: error: not found: value m test7((m = 1)) // named arguments must be top-level assignments ^ -names-defaults-neg.scala:18: error: not found: value m +names-defaults-neg.scala:19: error: not found: value m test7({m = 1}) ^ -names-defaults-neg.scala:19: error: not found: value m +names-defaults-neg.scala:20: error: not found: value m test7 { m = 1 } // no named arguments in argument block ^ -names-defaults-neg.scala:20: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. - test8(x = 1) - ^ -names-defaults-neg.scala:23: error: parameter 'a' is already specified at parameter position 1 +names-defaults-neg.scala:24: error: parameter 'a' is already specified at parameter position 1 test1(1, a = 2) ^ -names-defaults-neg.scala:24: error: parameter 'b' is already specified at parameter position 1 +names-defaults-neg.scala:25: error: parameter 'b' is already specified at parameter position 1 test1(b = 1, b = "2") ^ -names-defaults-neg.scala:27: error: Int does not take parameters +names-defaults-neg.scala:28: error: Int does not take parameters test3(b = 3, a = 1)(3) ^ -names-defaults-neg.scala:36: error: ambiguous reference to overloaded definition, -both method f in object t1 of type (b: String, a: Int)String -and method f in object t1 of type (a: Int, b: String)String +names-defaults-neg.scala:37: error: ambiguous reference to overloaded definition, +both method f in object t1 of type (b: String, a: Int): String +and method f in object t1 of type (a: Int, b: String): String match argument types (b: String,a: Int) t1.f(b = "dkljf", a = 1) ^ -names-defaults-neg.scala:43: error: ambiguous reference to overloaded definition, -both method f in object t3 of type (a2: Int)(b: Int)String -and method f in object t3 of type (a1: Int)String +names-defaults-neg.scala:44: error: ambiguous reference to overloaded definition, +both method f in object t3 of type (a2: Int)(b: Int): String +and method f in object t3 of type (a1: Int): String match argument types (Int) t3.f(1) ^ -names-defaults-neg.scala:44: error: ambiguous reference to overloaded definition, -both method f in object t3 of type (a2: Int)(b: Int)String -and method f in object t3 of type (a1: Int)String +names-defaults-neg.scala:45: error: ambiguous reference to overloaded definition, +both method f in object t3 of type (a2: Int)(b: Int): String +and method f in object t3 of type (a1: Int): String match argument types (Int) t3.f(1)(2) ^ -names-defaults-neg.scala:50: error: ambiguous reference to overloaded definition, -both method g in object t7 of type (a: B)String -and method g in object t7 of type (a: C, b: Int*)String +names-defaults-neg.scala:51: error: ambiguous reference to overloaded definition, +both method g in object t7 of type (a: B): String +and method g in object t7 of type (a: C, b: Int*): String match argument types (C) t7.g(new C()) // ambiguous reference ^ -names-defaults-neg.scala:54: error: parameter 'b' is already specified at parameter position 2 +names-defaults-neg.scala:55: error: parameter 'b' is already specified at parameter position 2 test5(a = 1, b = "dkjl", b = "dkj") ^ -names-defaults-neg.scala:55: error: parameter 'b' is already specified at parameter position 2 +names-defaults-neg.scala:56: error: parameter 'b' is already specified at parameter position 2 test5(1, "2", b = 3) ^ -names-defaults-neg.scala:56: error: when using named arguments, the vararg parameter has to be specified exactly once +names-defaults-neg.scala:57: error: when using named arguments, the vararg parameter has to be specified exactly once test5(b = "dlkj") ^ -names-defaults-neg.scala:62: error: ambiguous reference to overloaded definition, -both method f in object t8 of type (b: String, a: Int)String -and method f in object t8 of type (a: Int, b: Object)String +names-defaults-neg.scala:63: error: ambiguous reference to overloaded definition, +both method f in object t8 of type (b: String, a: Int): String +and method f in object t8 of type (a: Int, b: Object): String match argument types (a: Int,b: String) and expected result type Any println(t8.f(a = 0, b = "1")) // ambiguous reference ^ -names-defaults-neg.scala:66: error: not enough arguments for method apply: (a: Int, b: String)(c: Int*)Fact in object Fact. +names-defaults-neg.scala:67: error: not enough arguments for method apply: (a: Int, b: String)(c: Int*): Fact in object Fact. Unspecified value parameter b. val fac = Fact(1)(2, 3) ^ -names-defaults-neg.scala:70: error: wrong number of arguments for pattern A1(x: Int,y: String) +names-defaults-neg.scala:71: error: wrong number of arguments for pattern A1(x: Int, y: String) A1() match { case A1(_) => () } ^ -names-defaults-neg.scala:77: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int]) +names-defaults-neg.scala:78: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]]): T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int]) --- because --- argument expression's type is not compatible with formal parameter type; found : List[Int] @@ -92,97 +89,74 @@ argument expression's type is not compatible with formal parameter type; Error occurred in an application involving default arguments. test4() ^ -names-defaults-neg.scala:80: error: type mismatch; +names-defaults-neg.scala:81: error: type mismatch; found : List[Int] required: List[List[?]] def test6[T](x: List[List[T]] = List(1,2)) = x ^ -names-defaults-neg.scala:83: error: type mismatch; - found : Int +names-defaults-neg.scala:84: error: type mismatch; + found : 1 required: String Error occurred in an application involving default arguments. new A2[String]() ^ -names-defaults-neg.scala:87: error: module extending its companion class cannot use default constructor arguments +names-defaults-neg.scala:88: error: module extending its companion class cannot use default constructor arguments object C extends C() ^ -names-defaults-neg.scala:91: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not). - def deprNam1(x: Int, @deprecatedName('x) y: String) = 0 - ^ -names-defaults-neg.scala:92: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not). - def deprNam2(a: String)(@deprecatedName('a) b: Int) = 1 - ^ -names-defaults-neg.scala:94: error: parameter 'b' is already specified at parameter position 1 +names-defaults-neg.scala:92: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not). + def deprNam1(x: Int, @deprecatedName("x") y: String) = 0 + ^ +names-defaults-neg.scala:93: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not). + def deprNam2(a: String)(@deprecatedName("a") b: Int) = 1 + ^ +names-defaults-neg.scala:95: error: parameter 'b' is already specified at parameter position 1 deprNam3(y = 10, b = 2) ^ -names-defaults-neg.scala:103: error: unknown parameter name: m +names-defaults-neg.scala:110: error: unknown parameter name: m f3818(y = 1, m = 1) ^ -names-defaults-neg.scala:136: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope. - delay(var2 = 40) - ^ -names-defaults-neg.scala:139: error: missing parameter type for expanded function ((x$1: ) => a = x$1) +names-defaults-neg.scala:146: error: missing parameter type for expanded function (() => a = x$1) val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) ^ -names-defaults-neg.scala:139: error: not found: value a +names-defaults-neg.scala:146: error: type mismatch; + found : ? => ? + required: Int val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) - ^ -names-defaults-neg.scala:139: error: not found: value get + ^ +names-defaults-neg.scala:146: error: not found: value get val taf2: Int => Unit = testAnnFun(a = _, b = get("+")) ^ -names-defaults-neg.scala:140: error: parameter 'a' is already specified at parameter position 1 +names-defaults-neg.scala:147: error: parameter 'a' is already specified at parameter position 1 val taf3 = testAnnFun(b = _: String, a = get(8)) ^ -names-defaults-neg.scala:141: error: missing parameter type for expanded function ((x$4: ) => b = x$4) +names-defaults-neg.scala:148: error: missing parameter type for expanded function (() => b = x$4) val taf4: (Int, String) => Unit = testAnnFun(_, b = _) ^ -names-defaults-neg.scala:141: error: not found: value b +names-defaults-neg.scala:148: error: type mismatch; + found : ? => ? + required: String val taf4: (Int, String) => Unit = testAnnFun(_, b = _) - ^ -names-defaults-neg.scala:149: error: variable definition needs type because 'x' is used as a named argument in its body. - def t3 { var x = t.f(x = 1) } - ^ -names-defaults-neg.scala:152: error: variable definition needs type because 'x' is used as a named argument in its body. - object t6 { var x = t.f(x = 1) } - ^ -names-defaults-neg.scala:155: error: variable definition needs type because 'x' is used as a named argument in its body. - class t9 { var x = t.f(x = 1) } - ^ -names-defaults-neg.scala:169: error: variable definition needs type because 'x' is used as a named argument in its body. - def u3 { var x = u.f(x = 1) } - ^ -names-defaults-neg.scala:172: error: variable definition needs type because 'x' is used as a named argument in its body. - def u6 { var x = u.f(x = "32") } - ^ -names-defaults-neg.scala:175: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. - def u9 { var x: Int = u.f(x = 1) } - ^ -names-defaults-neg.scala:182: error: variable definition needs type because 'x' is used as a named argument in its body. - class u15 { var x = u.f(x = 1) } - ^ -names-defaults-neg.scala:185: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. - class u18 { var x: Int = u.f(x = 1) } - ^ -names-defaults-neg.scala:94: warning: the parameter name y is deprecated: use b instead + ^ +names-defaults-neg.scala:103: warning: symbol literal is deprecated; use Symbol("foo") instead [quickfixable] + def deprNam6(@deprecatedName('foo) deprNam6Arg: String) = 0 + ^ +names-defaults-neg.scala:105: warning: symbol literal is deprecated; use Symbol("bar") instead [quickfixable] + def deprNam7(@deprecatedName('bar, "2.12.0") deprNam7Arg: String) = 0 + ^ +names-defaults-neg.scala:95: warning: the parameter name y is deprecated: use b instead deprNam3(y = 10, b = 2) ^ -names-defaults-neg.scala:97: warning: naming parameter deprNam4Arg is deprecated. +names-defaults-neg.scala:98: warning: naming parameter deprNam4Arg is deprecated. deprNam4(deprNam4Arg = null) ^ -names-defaults-neg.scala:99: warning: naming parameter deprNam5Arg is deprecated. +names-defaults-neg.scala:100: warning: naming parameter deprNam5Arg is deprecated. deprNam5(deprNam5Arg = null) ^ -names-defaults-neg.scala:152: warning: failed to determine if 'x = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - object t6 { var x = t.f(x = 1) } - ^ -names-defaults-neg.scala:155: warning: failed to determine if 'x = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - class t9 { var x = t.f(x = 1) } - ^ -names-defaults-neg.scala:182: warning: failed to determine if 'x = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - class u15 { var x = u.f(x = 1) } - ^ -6 warnings found -45 errors found +names-defaults-neg.scala:104: warning: the parameter name foo is deprecated: use deprNam6Arg instead + deprNam6(foo = null) + ^ +names-defaults-neg.scala:106: warning: the parameter name bar is deprecated (since 2.12.0): use deprNam7Arg instead + deprNam7(bar = null) + ^ +7 warnings +35 errors diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala index 698aefd34b3f..fe76f5e41e8e 100644 --- a/test/files/neg/names-defaults-neg.scala +++ b/test/files/neg/names-defaults-neg.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation +//> using options -deprecation +// object Test extends App { // TESTS @@ -88,16 +89,22 @@ object Test extends App { } // deprecated names - def deprNam1(x: Int, @deprecatedName('x) y: String) = 0 - def deprNam2(a: String)(@deprecatedName('a) b: Int) = 1 - def deprNam3(@deprecatedName('x) a: Int, @deprecatedName('y) b: Int) = a + b + def deprNam1(x: Int, @deprecatedName("x") y: String) = 0 + def deprNam2(a: String)(@deprecatedName("a") b: Int) = 1 + def deprNam3(@deprecatedName("x") a: Int, @deprecatedName("y") b: Int) = a + b deprNam3(y = 10, b = 2) - def deprNam4(@deprecatedName('deprNam4Arg) deprNam4Arg: String) = 0 + def deprNam4(@deprecatedName("deprNam4Arg") deprNam4Arg: String) = 0 deprNam4(deprNam4Arg = null) def deprNam5(@deprecatedName deprNam5Arg: String) = 0 deprNam5(deprNam5Arg = null) + // deprecated deprecatedName constructors + def deprNam6(@deprecatedName('foo) deprNam6Arg: String) = 0 + deprNam6(foo = null) + def deprNam7(@deprecatedName('bar, "2.12.0") deprNam7Arg: String) = 0 + deprNam7(bar = null) + // t3818 def f3818(x: Int = 1, y: Int, z: Int = 1) = 0 f3818(y = 1, m = 1) @@ -117,7 +124,7 @@ class B { class C extends B { override def foo(a: Int = 1092) = a - def foo(b: String = "lskdfj") + def foo(b: String = "lskdfj"): Unit def bar(i: Int = 129083) = i } @@ -132,7 +139,7 @@ class A2[T](a: T = 1) // anonymous functions object anfun { var var2 = 0 - def delay(var2: => Unit) { var2 } + def delay(var2: => Unit): Unit = { var2 } delay(var2 = 40) def testAnnFun(a: Int, b: String) = println(a +": "+ b) @@ -144,9 +151,9 @@ object anfun { object t3685 { object t { def f(x: Int) = x } - def t1 { def x = t.f(x = 1) } - def t2 { val x = t.f(x = 1) } - def t3 { var x = t.f(x = 1) } + def t1: Unit = { def x = t.f(x = 1) } + def t2: Unit = { val x = t.f(x = 1) } + def t3: Unit = { var x = t.f(x = 1) } object t4 { def x = t.f(x = 1) } object t5 { val x = t.f(x = 1) } object t6 { var x = t.f(x = 1) } @@ -154,9 +161,9 @@ object t3685 { class t8 { val x = t.f(x = 1) } class t9 { var x = t.f(x = 1) } - def t10 { def x: Int = t.f(x = 1) } - def t11 { val x: Int = t.f(x = 1) } - def t12 { var x: Int = t.f(x = 1) } + def t10: Unit = { def x: Int = t.f(x = 1) } + def t11: Unit = { val x: Int = t.f(x = 1) } + def t12: Unit = { var x: Int = t.f(x = 1) } class t13 { def x: Int = t.f(x = 1) } class t14 { val x: Int = t.f(x = 1) } class t15 { var x: Int = t.f(x = 1) } @@ -164,18 +171,18 @@ object t3685 { object u { def f[T](x: T) = 100 } - def u1 { def x = u.f(x = 1) } - def u2 { val x = u.f(x = 1) } - def u3 { var x = u.f(x = 1) } - def u4 { def x = u.f(x = "23") } - def u5 { val x = u.f(x = "32") } - def u6 { var x = u.f(x = "32") } - def u7 { def x: Int = u.f(x = 1) } - def u8 { val x: Int = u.f(x = 1) } - def u9 { var x: Int = u.f(x = 1) } - def u10 { def x: Int = u.f(x = "32") } - def u11 { val x: Int = u.f(x = "32") } - def u12 { var x: Int = u.f(x = "32") } + def u1: Unit = { def x = u.f(x = 1) } + def u2: Unit = { val x = u.f(x = 1) } + def u3: Unit = { var x = u.f(x = 1) } + def u4: Unit = { def x = u.f(x = "23") } + def u5: Unit = { val x = u.f(x = "32") } + def u6: Unit = { var x = u.f(x = "32") } + def u7: Unit = { def x: Int = u.f(x = 1) } + def u8: Unit = { val x: Int = u.f(x = 1) } + def u9: Unit = { var x: Int = u.f(x = 1) } + def u10: Unit = { def x: Int = u.f(x = "32") } + def u11: Unit = { val x: Int = u.f(x = "32") } + def u12: Unit = { var x: Int = u.f(x = "32") } class u13 { def x = u.f(x = 1) } class u14 { val x = u.f(x = 1) } @@ -187,21 +194,3 @@ object t3685 { class u20 { val x: Int = u.f(x = "32") } class u21 { var x: Int = u.f(x = "32") } } - -trait t10336 { - case class C(a: Int = 10, b: String = "20") - - class X { - def c(c: C): (Int, String) = (c.a, c.b) - def f() = { - val (x, y) = c(C(b = "30")) - val a = y.toInt - () - } - def g() = { - val (x, y) = c(C(10, "20").copy(b = "30")) - val a = y.toInt - () - } - } -} diff --git a/test/files/neg/names-package-method-conflict.check b/test/files/neg/names-package-method-conflict.check new file mode 100644 index 000000000000..f5faaef316ce --- /dev/null +++ b/test/files/neg/names-package-method-conflict.check @@ -0,0 +1,4 @@ +names-package-method-conflict.scala:7: error: There is name conflict between the foo.bar and the package foo.bar. + package bar{ + ^ +1 error diff --git a/test/files/neg/names-package-method-conflict.scala b/test/files/neg/names-package-method-conflict.scala new file mode 100644 index 000000000000..34e8837fa49a --- /dev/null +++ b/test/files/neg/names-package-method-conflict.scala @@ -0,0 +1,10 @@ +package object foo { + def bar: Unit = {} +} + +package foo { + + package bar{ + class Baz + } +} diff --git a/test/files/neg/nested-annotation.check b/test/files/neg/nested-annotation.check deleted file mode 100644 index 1cd3df5bb054..000000000000 --- a/test/files/neg/nested-annotation.check +++ /dev/null @@ -1,10 +0,0 @@ -nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline - @ComplexAnnotation(new inline) def bippy(): Int = 1 - ^ -nested-annotation.scala:3: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation - ^ -one warning found -one error found diff --git a/test/files/neg/nested-annotation.scala b/test/files/neg/nested-annotation.scala deleted file mode 100644 index 35c0cd3b75ce..000000000000 --- a/test/files/neg/nested-annotation.scala +++ /dev/null @@ -1,9 +0,0 @@ -import annotation._ - -class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation - -class A { - // It's hard to induce this error because @ComplexAnnotation(@inline) is a parse - // error so it never gets out of the parser, but: - @ComplexAnnotation(new inline) def bippy(): Int = 1 -} diff --git a/test/files/neg/nested-class-shadowing-removal.check b/test/files/neg/nested-class-shadowing-removal.check new file mode 100644 index 000000000000..95f4e66b2516 --- /dev/null +++ b/test/files/neg/nested-class-shadowing-removal.check @@ -0,0 +1,6 @@ +nested-class-shadowing-removal.scala:9: error: shadowing a nested class of a parent is deprecated but class Status shadows class Status defined in trait Core; rename the class to something else +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Ext.Status + class Status extends super.Status + ^ +1 error diff --git a/test/files/neg/nested-class-shadowing-removal.scala b/test/files/neg/nested-class-shadowing-removal.scala new file mode 100644 index 000000000000..a691d3aed9da --- /dev/null +++ b/test/files/neg/nested-class-shadowing-removal.scala @@ -0,0 +1,10 @@ +//> using options -Xsource:3 +// + +trait Core { + class Status +} + +trait Ext extends Core { + class Status extends super.Status +} diff --git a/test/files/neg/nested-class-shadowing.check b/test/files/neg/nested-class-shadowing.check new file mode 100644 index 000000000000..928c23622865 --- /dev/null +++ b/test/files/neg/nested-class-shadowing.check @@ -0,0 +1,12 @@ +nested-class-shadowing.scala:9: warning: shadowing a nested class of a parent is deprecated but class Status shadows class Status defined in trait Core; rename the class to something else + class Status extends super.Status + ^ +nested-class-shadowing.scala:13: warning: shadowing a nested class of a parent is deprecated but class Status shadows class Status defined in trait Core; rename the class to something else + class Status + ^ +nested-class-shadowing.scala:17: warning: shadowing a nested class of a parent is deprecated but class Status shadows class Status defined in trait Core; rename the class to something else + class Status + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/nested-class-shadowing.scala b/test/files/neg/nested-class-shadowing.scala new file mode 100644 index 000000000000..c50d695503b9 --- /dev/null +++ b/test/files/neg/nested-class-shadowing.scala @@ -0,0 +1,31 @@ +//> using options -Werror -Xlint:deprecation +// + +trait Core { + class Status +} + +trait Ext extends Core { + class Status extends super.Status +} + +trait AlsoNo extends Core { + class Status +} + +trait NotEven extends Core { + class Status + object Status +} + +class NonStatus + +trait WhatAbout extends Core { + type Status = NonStatus +} + +object Test extends App { + val w = new WhatAbout {} + val x = new w.Status + println(x) +} diff --git a/test/files/neg/nested-fn-print.check b/test/files/neg/nested-fn-print.check index feeac0733fed..8fac1f33f4ea 100644 --- a/test/files/neg/nested-fn-print.check +++ b/test/files/neg/nested-fn-print.check @@ -17,4 +17,4 @@ nested-fn-print.scala:9: error: type mismatch; required: Int => Double x3 = "c" ^ -four errors found +4 errors diff --git a/test/files/neg/newpat_unreachable.check b/test/files/neg/newpat_unreachable.check index 97c233f46176..c2bf92f1d928 100644 --- a/test/files/neg/newpat_unreachable.check +++ b/test/files/neg/newpat_unreachable.check @@ -1,35 +1,35 @@ -newpat_unreachable.scala:7: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +newpat_unreachable.scala:8: warning: patterns after a variable pattern cannot match (SLS 8.1.1) If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` => case b => println("matched b") ^ -newpat_unreachable.scala:8: warning: unreachable code due to variable pattern 'b' on line 7 +newpat_unreachable.scala:9: warning: unreachable code due to variable pattern 'b' on line 8 If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` => case c => println("matched c") ^ -newpat_unreachable.scala:9: warning: unreachable code due to variable pattern 'b' on line 7 +newpat_unreachable.scala:10: warning: unreachable code due to variable pattern 'b' on line 8 If you intended to match against value d in class A, you must use backticks, like: case `d` => case d => println("matched d") ^ -newpat_unreachable.scala:10: warning: unreachable code due to variable pattern 'b' on line 7 +newpat_unreachable.scala:11: warning: unreachable code due to variable pattern 'b' on line 8 case _ => println("matched neither") ^ -newpat_unreachable.scala:8: warning: unreachable code +newpat_unreachable.scala:9: warning: unreachable code case c => println("matched c") ^ -newpat_unreachable.scala:23: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +newpat_unreachable.scala:24: warning: patterns after a variable pattern cannot match (SLS 8.1.1) If you intended to match against parameter b of method g, you must use backticks, like: case `b` => case b => 1 ^ -newpat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 23 +newpat_unreachable.scala:25: warning: unreachable code due to variable pattern 'b' on line 24 If you intended to match against parameter c of method h, you must use backticks, like: case `c` => case c => 2 ^ -newpat_unreachable.scala:25: warning: unreachable code due to variable pattern 'b' on line 23 +newpat_unreachable.scala:26: warning: unreachable code due to variable pattern 'b' on line 24 case _ => 3 ^ -newpat_unreachable.scala:24: warning: unreachable code +newpat_unreachable.scala:25: warning: unreachable code case c => 2 ^ -error: No warnings can be incurred under -Xfatal-warnings. -9 warnings found -one error found +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/newpat_unreachable.scala b/test/files/neg/newpat_unreachable.scala index bf3436dbf6dd..8d6b0221675f 100644 --- a/test/files/neg/newpat_unreachable.scala +++ b/test/files/neg/newpat_unreachable.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { class A { val d = 55 diff --git a/test/files/neg/no-implicit-to-anyref-any-val.check b/test/files/neg/no-implicit-to-anyref-any-val.check index 5953e1bd6d5d..59f3c58565a2 100644 --- a/test/files/neg/no-implicit-to-anyref-any-val.check +++ b/test/files/neg/no-implicit-to-anyref-any-val.check @@ -26,9 +26,9 @@ no-implicit-to-anyref-any-val.scala:32: error: type mismatch; found : Object required: AnyVal Note that implicit conversions are not applicable because they are ambiguous: - both method ArrowAssoc in object Predef of type [A](self: A)ArrowAssoc[A] - and method Ensuring in object Predef of type [A](self: A)Ensuring[A] + both method ArrowAssoc in object Predef of type [A](self: A): ArrowAssoc[A] + and method Ensuring in object Predef of type [A](self: A): Ensuring[A] are possible conversion functions from Object to AnyVal new Object() : AnyVal ^ -6 errors found +6 errors diff --git a/test/files/neg/no-predef.check b/test/files/neg/no-predef.check index 154cdc6a07c6..6816be77786b 100644 --- a/test/files/neg/no-predef.check +++ b/test/files/neg/no-predef.check @@ -1,14 +1,14 @@ -no-predef.scala:3: error: type mismatch; +no-predef.scala:4: error: type mismatch; found : Long (in scala) required: Long (in java.lang) def f1 = 5L: java.lang.Long ^ -no-predef.scala:4: error: type mismatch; +no-predef.scala:5: error: type mismatch; found : Long (in java.lang) required: Long (in scala) def f2 = new java.lang.Long(5) : Long ^ -no-predef.scala:5: error: value map is not a member of String +no-predef.scala:6: error: value map is not a member of String def f3 = "abc" map (_ + 1) ^ -three errors found +3 errors diff --git a/test/files/neg/no-predef.scala b/test/files/neg/no-predef.scala index 1c1c99988ee8..64f55028d46a 100644 --- a/test/files/neg/no-predef.scala +++ b/test/files/neg/no-predef.scala @@ -1,4 +1,5 @@ -// scalac: -Yno-predef +//> using options -Yno-predef +// class NoPredef { def f1 = 5L: java.lang.Long def f2 = new java.lang.Long(5) : Long diff --git a/test/files/neg/noMember1.check b/test/files/neg/noMember1.check index 846574bef977..78295f61b457 100644 --- a/test/files/neg/noMember1.check +++ b/test/files/neg/noMember1.check @@ -1,5 +1,5 @@ -noMember1.scala:1: error: object MultiMap is not a member of package scala.collection.mutable -Note: trait MultiMap exists, but it has no companion object. -import scala.collection.mutable.MultiMap._ - ^ -one error found +noMember1.scala:1: error: object IterableOnceOps is not a member of package collection +note: trait IterableOnceOps exists, but it has no companion object. +import scala.collection.IterableOnceOps._ + ^ +1 error diff --git a/test/files/neg/noMember1.scala b/test/files/neg/noMember1.scala index 0aee7bff7f9f..e8aeab45b9f9 100644 --- a/test/files/neg/noMember1.scala +++ b/test/files/neg/noMember1.scala @@ -1,3 +1,3 @@ -import scala.collection.mutable.MultiMap._ +import scala.collection.IterableOnceOps._ class A diff --git a/test/files/neg/noMember2.check b/test/files/neg/noMember2.check index f65571bdc900..a80c74926c0e 100644 --- a/test/files/neg/noMember2.check +++ b/test/files/neg/noMember2.check @@ -1,5 +1,5 @@ -noMember2.scala:2: error: object MultiMap is not a member of package scala.collection.mutable -Note: trait MultiMap exists, but it has no companion object. - val m = scala.collection.mutable.MultiMap(1, 2, 3) - ^ -one error found +noMember2.scala:2: error: object IterableOnceOps is not a member of package collection +note: trait IterableOnceOps exists, but it has no companion object. + val m = scala.collection.IterableOnceOps(1, 2, 3) + ^ +1 error diff --git a/test/files/neg/noMember2.scala b/test/files/neg/noMember2.scala index bf72d4f4713c..9b6d9cad6c39 100644 --- a/test/files/neg/noMember2.scala +++ b/test/files/neg/noMember2.scala @@ -1,3 +1,3 @@ object Test { - val m = scala.collection.mutable.MultiMap(1, 2, 3) + val m = scala.collection.IterableOnceOps(1, 2, 3) } diff --git a/test/files/neg/nonlocal-warning.check b/test/files/neg/nonlocal-warning.check index 6de59ca84e7b..ce49273ad57c 100644 --- a/test/files/neg/nonlocal-warning.check +++ b/test/files/neg/nonlocal-warning.check @@ -1,9 +1,9 @@ -nonlocal-warning.scala:5: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +nonlocal-warning.scala:6: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. catch { case x => 11 } ^ -nonlocal-warning.scala:3: warning: catch block may intercept non-local return from method foo +nonlocal-warning.scala:4: warning: catch block may intercept non-local return from method foo def foo(l: List[Int]): Int = { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/nonlocal-warning.scala b/test/files/neg/nonlocal-warning.scala index 29cbac9edd41..8fcf2e1c4709 100644 --- a/test/files/neg/nonlocal-warning.scala +++ b/test/files/neg/nonlocal-warning.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class Foo { def foo(l: List[Int]): Int = { try l foreach { _ => return 5 } diff --git a/test/files/neg/nonsense_eq_refine.check b/test/files/neg/nonsense_eq_refine.check deleted file mode 100644 index 756d926cbdc8..000000000000 --- a/test/files/neg/nonsense_eq_refine.check +++ /dev/null @@ -1,9 +0,0 @@ -nonsense_eq_refine.scala:7: warning: E and String are unrelated: they will most likely never compare equal - if (e == "") ??? // warn about comparing unrelated types - ^ -nonsense_eq_refine.scala:10: warning: SE and String are unrelated: they will most likely never compare equal - if (se == "") ??? // types are still unrelated - ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found diff --git a/test/files/neg/nonsense_eq_refine.scala b/test/files/neg/nonsense_eq_refine.scala deleted file mode 100644 index 3e0a64e5bb64..000000000000 --- a/test/files/neg/nonsense_eq_refine.scala +++ /dev/null @@ -1,11 +0,0 @@ -// scalac: -Xfatal-warnings -deprecation -class E -class SE extends Serializable - -object Test { - val e = new E - if (e == "") ??? // warn about comparing unrelated types - - val se = new SE - if (se == "") ??? // types are still unrelated -} diff --git a/test/files/neg/nonunit-if.check b/test/files/neg/nonunit-if.check new file mode 100644 index 000000000000..25039baffb26 --- /dev/null +++ b/test/files/neg/nonunit-if.check @@ -0,0 +1,51 @@ +nonunit-if.scala:13: warning: unused value of type scala.concurrent.Future[Int] + improved // warn + ^ +nonunit-if.scala:20: warning: unused value of type String + new E().toString // warn + ^ +nonunit-if.scala:26: warning: unused value of type scala.concurrent.Future[Int] + Future(42) // warn + ^ +nonunit-if.scala:30: warning: unused value of type K + copy() // warn + ^ +nonunit-if.scala:37: warning: unused value of type List[Int] + 27 +: xs // warn + ^ +nonunit-if.scala:44: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + null // warn for purity + ^ +nonunit-if.scala:58: warning: unused value of type U + if (!isEmpty) f(a) // warn, check is on + ^ +nonunit-if.scala:62: warning: unused value of type Boolean + f(a) // warn, check is on + ^ +nonunit-if.scala:73: warning: unused value of type U + if (!fellback) action(z) // warn, check is on + ^ +nonunit-if.scala:81: warning: unused value of type Int + g // warn, check is on + ^ +nonunit-if.scala:79: warning: unused value of type Int + g // warn block statement + ^ +nonunit-if.scala:86: warning: unused value of type Int + g // warn + ^ +nonunit-if.scala:84: warning: unused value of type Int + g // warn + ^ +nonunit-if.scala:96: warning: unused value of type Int + if (b) { // warn, at least one branch looks interesting + ^ +nonunit-if.scala:116: warning: unused value of type scala.collection.mutable.LinkedHashSet[A] + set += a // warn because cannot know whether the `set` was supposed to be consumed or assigned + ^ +nonunit-if.scala:146: warning: unused value of type String + while (it.hasNext) it.next() // warn + ^ +error: No warnings can be incurred under -Werror. +16 warnings +1 error diff --git a/test/files/neg/nonunit-if.scala b/test/files/neg/nonunit-if.scala new file mode 100644 index 000000000000..8d79ef2a5fca --- /dev/null +++ b/test/files/neg/nonunit-if.scala @@ -0,0 +1,191 @@ +//> using options -Werror -Wnonunit-statement -Wnonunit-if:true -Wvalue-discard +// debug: -Vprint:refchecks -Yprint-trees:format +import collection.ArrayOps +import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} +import concurrent._ +import scala.reflect.ClassTag + +class C { + import ExecutionContext.Implicits._ + def c = { + def improved = Future(42) + def stale = Future(27) + improved // warn + stale + } +} +class D { + def d = { + class E + new E().toString // warn + new E().toString * 2 + } +} +class F { + import ExecutionContext.Implicits._ + Future(42) // warn +} +// unused template expression uses synthetic method of class +case class K(s: String) { + copy() // warn +} +// mutations returning this are ok +class Mutate { + val b = ListBuffer.empty[Int] + b += 42 // nowarn, returns this.type + val xs = List(42) + 27 +: xs // warn + + def f(x: Int): this.type = this + def g(): Unit = f(42) // nowarn +} +// some uninteresting expressions may warn for other reasons +class WhoCares { + null // warn for purity + ??? // nowarn for impurity +} +// explicit Unit ascription to opt out of warning, even for funky applies +class Absolution { + def f(i: Int): Int = i+1 + import ExecutionContext.Implicits._ + Future(42): Unit // nowarn { F(42)(ctx) }: Unit where annot is on F(42) + f(42): Unit // nowarn +} +// warn uni-branched unless user disables it with -Wnonunit-if:false +class Boxed[A](a: A) { + def isEmpty = false + def foreach[U](f: A => U): Unit = + if (!isEmpty) f(a) // warn, check is on + def forall(f: A => Boolean): Unit = + if (!isEmpty) { + println(".") + f(a) // warn, check is on + } + def take(p: A => Boolean): Option[A] = { + while (isEmpty || !p(a)) () + Some(a).filter(p) + } +} +class Unibranch[A, B] { + def runWith[U](action: B => U): A => Boolean = { x => + val z = null.asInstanceOf[B] + val fellback = false + if (!fellback) action(z) // warn, check is on + !fellback + } + def f(i: Int): Int = { + def g = 17 + if (i < 42) { + g // warn block statement + println("uh oh") + g // warn, check is on + } + while (i < 42) { + g // warn + println("uh oh") + g // warn + } + 42 + } +} +class Dibranch { + def i: Int = ??? + def j: Int = ??? + def f(b: Boolean): Int = { + // if-expr might have an uninteresting LUB + if (b) { // warn, at least one branch looks interesting + println("true") + i + } + else { + println("false") + j + } + 42 + } +} +class Next[A] { + val all = ListBuffer.empty[A] + def f(it: Iterator[A], g: A => A): Unit = + while (it.hasNext) + all += g(it.next()) // nowarn +} +class Setting[A] { + def set = LinkedHashSet.empty[A] + def f(a: A): Unit = { + set += a // warn because cannot know whether the `set` was supposed to be consumed or assigned + println(set) + } +} +// neither StringBuilder warns, because either append is Java method or returns this.type +// while loop looks like if branch with block1(block2, jump to label), where block2 typed as non-unit +class Strung { + def iterator = Iterator.empty[String] + def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) + jsb.append(it.next()) + } + } + if (end.length != 0) jsb.append(end) + b + } + def f(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) { + b.append("\n") + b.append(it.next()) + } + b.toString + } + def g(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) it.next() // warn + b.toString + } +} +class J { + import java.util.Collections + def xs: java.util.List[Int] = ??? + def f(): Int = { + Collections.checkedList[Int](xs, classOf[Int]) + 42 + } +} +class Variant { + var bs = ListBuffer.empty[Int] + val xs = ListBuffer.empty[Int] + private[this] val ys = ListBuffer.empty[Int] + private[this] var zs = ListBuffer.empty[Int] + def f(i: Int): Unit = { + bs.addOne(i) + xs.addOne(i) + ys.addOne(i) + zs.addOne(i) + println("done") + } +} +final class ArrayOops[A](private val xs: Array[A]) extends AnyVal { + def other: ArrayOps[A] = ??? + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- other) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } +} diff --git a/test/files/neg/nonunit-statement.check b/test/files/neg/nonunit-statement.check new file mode 100644 index 000000000000..2a5f1ae9c21d --- /dev/null +++ b/test/files/neg/nonunit-statement.check @@ -0,0 +1,42 @@ +nonunit-statement.scala:13: warning: unused value of type scala.concurrent.Future[Int] + improved // warn + ^ +nonunit-statement.scala:20: warning: unused value of type String + new E().toString // warn + ^ +nonunit-statement.scala:26: warning: unused value of type scala.concurrent.Future[Int] + Future(42) // warn + ^ +nonunit-statement.scala:30: warning: unused value of type K + copy() // warn + ^ +nonunit-statement.scala:37: warning: unused value of type List[Int] + 27 +: xs // warn + ^ +nonunit-statement.scala:44: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + null // warn for purity + ^ +nonunit-statement.scala:79: warning: unused value of type Int + g // warn block statement + ^ +nonunit-statement.scala:86: warning: unused value of type Int + g // warn + ^ +nonunit-statement.scala:84: warning: unused value of type Int + g // warn + ^ +nonunit-statement.scala:96: warning: unused value of type Int + if (b) { // warn, at least one branch looks interesting + ^ +nonunit-statement.scala:116: warning: unused value of type scala.collection.mutable.LinkedHashSet[A] + set += a // warn because cannot know whether the `set` was supposed to be consumed or assigned + ^ +nonunit-statement.scala:146: warning: unused value of type String + while (it.hasNext) it.next() // warn + ^ +nonunit-statement.scala:202: warning: a pure expression does nothing in statement position + null // warn for purity, but does not cause multiline clause + ^ +error: No warnings can be incurred under -Werror. +13 warnings +1 error diff --git a/test/files/neg/nonunit-statement.scala b/test/files/neg/nonunit-statement.scala new file mode 100644 index 000000000000..b411249e5084 --- /dev/null +++ b/test/files/neg/nonunit-statement.scala @@ -0,0 +1,203 @@ +//> using options -Werror -Wnonunit-statement -Wnonunit-if:false -Wvalue-discard +// debug: -Vprint:refchecks -Yprint-trees:format +import collection.ArrayOps +import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} +import concurrent._ +import scala.reflect.ClassTag + +class C { + import ExecutionContext.Implicits._ + def c = { + def improved = Future(42) + def stale = Future(27) + improved // warn + stale + } +} +class D { + def d = { + class E + new E().toString // warn + new E().toString * 2 + } +} +class F { + import ExecutionContext.Implicits._ + Future(42) // warn +} +// unused template expression uses synthetic method of class +case class K(s: String) { + copy() // warn +} +// mutations returning this are ok +class Mutate { + val b = ListBuffer.empty[Int] + b += 42 // nowarn, returns this.type + val xs = List(42) + 27 +: xs // warn + + def f(x: Int): this.type = this + def g(): Unit = f(42) // nowarn +} +// some uninteresting expressions may warn for other reasons +class WhoCares { + null // warn for purity + ??? // nowarn for impurity +} +// explicit Unit ascription to opt out of warning, even for funky applies +class Absolution { + def f(i: Int): Int = i+1 + import ExecutionContext.Implicits._ + Future(42): Unit // nowarn { F(42)(ctx) }: Unit where annot is on F(42) + f(42): Unit // nowarn +} +// warn uni-branched unless user disables it with -Wnonunit-if:false +class Boxed[A](a: A) { + def isEmpty = false + def foreach[U](f: A => U): Unit = + if (!isEmpty) f(a) // nowarn, check is off + def forall(f: A => Boolean): Unit = + if (!isEmpty) { + println(".") + f(a) // nowarn, check is off + } + def take(p: A => Boolean): Option[A] = { + while (isEmpty || !p(a)) () + Some(a).filter(p) + } +} +class Unibranch[A, B] { + def runWith[U](action: B => U): A => Boolean = { x => + val z = null.asInstanceOf[B] + val fellback = false + if (!fellback) action(z) // nowarn, check is off + !fellback + } + def f(i: Int): Int = { + def g = 17 + if (i < 42) { + g // warn block statement + println("uh oh") + g // nowarn, check is off + } + while (i < 42) { + g // warn + println("uh oh") + g // warn + } + 42 + } +} +class Dibranch { + def i: Int = ??? + def j: Int = ??? + def f(b: Boolean): Int = { + // if-expr might have an uninteresting LUB + if (b) { // warn, at least one branch looks interesting + println("true") + i + } + else { + println("false") + j + } + 42 + } +} +class Next[A] { + val all = ListBuffer.empty[A] + def f(it: Iterator[A], g: A => A): Unit = + while (it.hasNext) + all += g(it.next()) // nowarn +} +class Setting[A] { + def set = LinkedHashSet.empty[A] + def f(a: A): Unit = { + set += a // warn because cannot know whether the `set` was supposed to be consumed or assigned + println(set) + } +} +// neither StringBuilder warns, because either append is Java method or returns this.type +// while loop looks like if branch with block1(block2, jump to label), where block2 typed as non-unit +class Strung { + def iterator = Iterator.empty[String] + def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) + jsb.append(it.next()) + } + } + if (end.length != 0) jsb.append(end) + b + } + def f(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) { + b.append("\n") + b.append(it.next()) + } + b.toString + } + def g(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) it.next() // warn + b.toString + } +} +class J { + import java.util.Collections + def xs: java.util.List[Int] = ??? + def f(): Int = { + Collections.checkedList[Int](xs, classOf[Int]) + 42 + } +} +class Variant { + var bs = ListBuffer.empty[Int] + val xs = ListBuffer.empty[Int] + private[this] val ys = ListBuffer.empty[Int] + private[this] var zs = ListBuffer.empty[Int] + def f(i: Int): Unit = { + bs.addOne(i) + xs.addOne(i) + ys.addOne(i) + zs.addOne(i) + println("done") + } +} +final class ArrayOops[A](private val xs: Array[A]) extends AnyVal { + def other: ArrayOps[A] = ??? + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- other) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } +} +class Depends { + def f[A](a: A): a.type = a + def g() = { + val d = new Depends + f(d) + () + } +} +// some uninteresting expressions may warn for other reasons +class NotMultiline { + null // warn for purity, but does not cause multiline clause +} diff --git a/test/files/neg/nopredefs.check b/test/files/neg/nopredefs.check index 0a0ab34482bd..88a9a90c06fb 100644 --- a/test/files/neg/nopredefs.check +++ b/test/files/neg/nopredefs.check @@ -1,4 +1,4 @@ nopredefs.scala:5: error: not found: value Set val y = Set(3) ^ -one error found +1 error diff --git a/test/files/neg/not-a-legal-formal-parameter-tuple.check b/test/files/neg/not-a-legal-formal-parameter-tuple.check index 2b906b8ff355..cc0801c645d1 100644 --- a/test/files/neg/not-a-legal-formal-parameter-tuple.check +++ b/test/files/neg/not-a-legal-formal-parameter-tuple.check @@ -16,4 +16,4 @@ Note: Tuples cannot be directly destructured in method or function parameters. or consider a pattern matching anonymous function: `{ case (param1, ..., param3) => ... } val z: ((Int, Int, Int) => Int) = (((a, NotAPatternVariableName, c)) => a) ^ -three errors found +3 errors diff --git a/test/files/neg/not-found.check b/test/files/neg/not-found.check new file mode 100644 index 000000000000..8a84f05f022b --- /dev/null +++ b/test/files/neg/not-found.check @@ -0,0 +1,26 @@ +not-found.scala:10: error: not found: value Simple +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + case Simple => 2 + ^ +not-found.scala:11: error: not found: value Simple + case Simple.member => 3 + ^ +not-found.scala:12: error: not found: value sample +Identifiers enclosed in backticks are not pattern variables but match the value in scope. + case `sample` => 4 + ^ +not-found.scala:13: error: not found: type Simple + case _: Simple => 5 + ^ +not-found.scala:14: error: not found: value Simple + case Simple(_) => 6 + ^ +not-found.scala:17: error: object Simple is not a member of package p +did you mean Sample? + def g = p.Simple + ^ +not-found.scala:21: error: not found: value X +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + val X :: Nil = List(42) + ^ +7 errors diff --git a/test/files/neg/not-found.scala b/test/files/neg/not-found.scala new file mode 100644 index 000000000000..239548e58bbe --- /dev/null +++ b/test/files/neg/not-found.scala @@ -0,0 +1,22 @@ + +package p + +object Sample + +trait T { + def f(x: Any) = + x match { + case Sample => 1 + case Simple => 2 + case Simple.member => 3 + case `sample` => 4 + case _: Simple => 5 + case Simple(_) => 6 + case _ => 7 + } + def g = p.Simple + + val x :: Nil = List(42) + + val X :: Nil = List(42) +} diff --git a/test/files/neg/not-possible-cause.check b/test/files/neg/not-possible-cause.check deleted file mode 100644 index 5c09fa154568..000000000000 --- a/test/files/neg/not-possible-cause.check +++ /dev/null @@ -1,9 +0,0 @@ -not-possible-cause.scala:2: error: type mismatch; - found : a.type (with underlying type A) - required: AnyRef -Note that A is bounded only by Equals, which means AnyRef is not a known parent. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def foo[A <: Product](a: A) { type X = a.type } - ^ -one error found diff --git a/test/files/neg/not-possible-cause.scala b/test/files/neg/not-possible-cause.scala deleted file mode 100644 index 83ec24dec863..000000000000 --- a/test/files/neg/not-possible-cause.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Foo { - def foo[A <: Product](a: A) { type X = a.type } -} diff --git a/test/files/neg/nowarn-lint.check b/test/files/neg/nowarn-lint.check new file mode 100644 index 000000000000..284d93ad0b6c --- /dev/null +++ b/test/files/neg/nowarn-lint.check @@ -0,0 +1,15 @@ +nowarn-lint.scala:30: warning: private val j in class J is never used + private val j = 42 + ^ +nowarn-lint.scala:6: warning: Class parameter is specialized for type Unit. Consider using `@specialized(Specializable.Arg)` instead. +class C[@specialized(Unit) A](a: A) + ^ +nowarn-lint.scala:7: warning: Class parameter is specialized for type Unit. Consider using `@specialized(Specializable.Arg)` instead. +class D[@specialized(Specializable.Primitives) A](a: A) + ^ +nowarn-lint.scala:35: warning: @nowarn annotation does not suppress any warnings +@nowarn("any") + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/nowarn-lint.scala b/test/files/neg/nowarn-lint.scala new file mode 100644 index 000000000000..5fdc2bf327f8 --- /dev/null +++ b/test/files/neg/nowarn-lint.scala @@ -0,0 +1,45 @@ +//> using options -Werror -Xlint:unit-special,unused + +import annotation.nowarn + +// exercise the lint warning +class C[@specialized(Unit) A](a: A) +class D[@specialized(Specializable.Primitives) A](a: A) + +// feel confident that remaining examples will either suppress warning +// or warn no suppression +@nowarn("cat=lint-unit-specialization") +class E[@specialized(Unit) A](a: A) +@nowarn("cat=lint-unit-specialization") +class F[@specialized(Specializable.Primitives) A](a: A) + +// other positions +@nowarn("cat=lint-unit-specialization") +class G[@specialized(Unit) A](x: Int, a: A) + +// what if it's a member with forwarders? +// was: incorrectly warned that nowarn was unused. +@nowarn("cat=lint-unit-specialization") +class H[@specialized(Unit) A](x: AnyRef, val a: A) + +@nowarn("cat=lint-unit-specialization") +class HH[@specialized(Unit) A](val a: A)(val aa: A) + +// actual unused thing +class J { + private val j = 42 + def f: Unit = () +} + +// canonically unused nowarn +@nowarn("any") +trait U + +/* as reported +import scala.util.control.ControlThrowable + +@nowarn("cat=lint-unit-specialization") +class NonLocalReturnControl[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit) T](val key: AnyRef, val value: T) extends ControlThrowable { + final override def fillInStackTrace(): Throwable = this +} +*/ diff --git a/test/files/neg/nowarnMacros.check b/test/files/neg/nowarnMacros.check index 76ad78c857fd..d1fe8d742484 100644 --- a/test/files/neg/nowarnMacros.check +++ b/test/files/neg/nowarnMacros.check @@ -7,6 +7,6 @@ Test_2.scala:19: warning: A try without a catch or finally is equivalent to putt Test_2.scala:28: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. try 1 // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/nowarnMacros/Test_2.scala b/test/files/neg/nowarnMacros/Test_2.scala index 5bb0e0315538..207e4fa019bf 100644 --- a/test/files/neg/nowarnMacros/Test_2.scala +++ b/test/files/neg/nowarnMacros/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror import language.experimental.macros import scala.annotation.nowarn @@ -29,7 +29,7 @@ object Test { 2 } def t3b = discard { - try 1 + try 1 // warn 2 }: @nowarn } diff --git a/test/files/neg/nowarnPointPos.check b/test/files/neg/nowarnPointPos.check index 020d67781a3f..284ab2b00ecf 100644 --- a/test/files/neg/nowarnPointPos.check +++ b/test/files/neg/nowarnPointPos.check @@ -19,7 +19,8 @@ nowarnPointPos.scala:75: warning: a pure expression does nothing in statement po nowarnPointPos.scala:80: warning: method dep in class C is deprecated (since 1.2.3): message a + dep ^ -nowarnPointPos.scala:45: warning: I3b has a main method with parameter type Array[String], but C.I3b will not be a runnable program. +nowarnPointPos.scala:45: warning: I3b has a valid main method (args: Array[String]): Unit, + but C.I3b will not have an entry point on the JVM. Reason: companion is a trait, which means no static forwarder can be generated. object I3b { @@ -42,6 +43,6 @@ nowarnPointPos.scala:24: warning: @nowarn annotation does not suppress any warni nowarnPointPos.scala:65: warning: @nowarn annotation does not suppress any warnings @nowarn("msg=something else") ^ -error: No warnings can be incurred under -Xfatal-warnings. -14 warnings found -one error found +error: No warnings can be incurred under -Werror. +14 warnings +1 error diff --git a/test/files/neg/nowarnPointPos.scala b/test/files/neg/nowarnPointPos.scala index d340f7d308d8..770867388ddd 100644 --- a/test/files/neg/nowarnPointPos.scala +++ b/test/files/neg/nowarnPointPos.scala @@ -1,4 +1,4 @@ -// scalac: -deprecation -Ywarn-unused:nowarn -Yrangepos:false -Xfatal-warnings +//> using options -deprecation -Wunused:nowarn -Yrangepos:false -Werror import scala.annotation._ class ann(a: Any) extends Annotation diff --git a/test/files/neg/nowarnRangePos.check b/test/files/neg/nowarnRangePos.check index cdca43eca6e3..9c20039a995d 100644 --- a/test/files/neg/nowarnRangePos.check +++ b/test/files/neg/nowarnRangePos.check @@ -1,3 +1,7 @@ +nowarnRangePos.scala:84: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. +Applicable -Wconf / @nowarn filters for this warning: msg=, cat=other, site=C.T12.f + @nowarn("v") def f = try 1 + ^ nowarnRangePos.scala:11: warning: method dep in class C is deprecated (since 1.2.3): message @nowarn @ann(dep) def t2 = 0 // deprecation warning, @nowarn unused ^ @@ -19,7 +23,18 @@ nowarnRangePos.scala:75: warning: a pure expression does nothing in statement po nowarnRangePos.scala:80: warning: method dep in class C is deprecated (since 1.2.3): message a + dep ^ -nowarnRangePos.scala:45: warning: I3b has a main method with parameter type Array[String], but C.I3b will not be a runnable program. +nowarnRangePos.scala:90: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +Applicable -Wconf / @nowarn filters for this warning: msg=, cat=other-pure-statement, site=C.T13.g + def g = { 1; 2 } + ^ +nowarnRangePos.scala:113: warning: method dep in class C is deprecated (since 1.2.3): message + @purr def t2 = new C().dep // warn, plus unused @nowarn + ^ +nowarnRangePos.scala:116: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + @nodep def t4 = { 1; 2 } // warn, plus unused @nowarn + ^ +nowarnRangePos.scala:45: warning: I3b has a valid main method (args: Array[String]): Unit, + but C.I3b will not have an entry point on the JVM. Reason: companion is a trait, which means no static forwarder can be generated. object I3b { @@ -42,6 +57,15 @@ nowarnRangePos.scala:24: warning: @nowarn annotation does not suppress any warni nowarnRangePos.scala:65: warning: @nowarn annotation does not suppress any warnings @nowarn("msg=something else") // unused ^ -error: No warnings can be incurred under -Xfatal-warnings. -14 warnings found -one error found +nowarnRangePos.scala:91: warning: @nowarn annotation does not suppress any warnings + @nowarn("v") def unused = 0 + ^ +nowarnRangePos.scala:113: warning: @nowarn annotation does not suppress any warnings + @purr def t2 = new C().dep // warn, plus unused @nowarn + ^ +nowarnRangePos.scala:116: warning: @nowarn annotation does not suppress any warnings + @nodep def t4 = { 1; 2 } // warn, plus unused @nowarn + ^ +error: No warnings can be incurred under -Werror. +21 warnings +1 error diff --git a/test/files/neg/nowarnRangePos.scala b/test/files/neg/nowarnRangePos.scala index ec8aaea2bba7..a8f5440734db 100644 --- a/test/files/neg/nowarnRangePos.scala +++ b/test/files/neg/nowarnRangePos.scala @@ -1,4 +1,4 @@ -// scalac: -deprecation -Ywarn-unused:nowarn -Yrangepos:true -Xfatal-warnings +//> using options -deprecation -Wunused:nowarn -Werror import scala.annotation._ class ann(a: Any) extends Annotation @@ -79,6 +79,17 @@ class C { val a = dep: @nowarn a + dep } + + @nowarn object T12 { + @nowarn("v") def f = try 1 + def g = { 1; 2 } + } + + @nowarn("verbose") object T13 { + @nowarn def f = try 1 + def g = { 1; 2 } + @nowarn("v") def unused = 0 + } } trait T { @@ -93,3 +104,14 @@ class Uh { def g(c: C) = c.dep } } + +object sd884 { + class nodep extends nowarn("cat=deprecation") + class purr extends nowarn("msg=pure expression does nothing") + + @nodep def t1 = new C().dep // no warn + @purr def t2 = new C().dep // warn, plus unused @nowarn + + @purr def t3 = { 1; 2 } // no warn + @nodep def t4 = { 1; 2 } // warn, plus unused @nowarn +} diff --git a/test/files/neg/null-unsoundness.check b/test/files/neg/null-unsoundness.check index bc0b5c996ca1..b19f3c8c7ca6 100644 --- a/test/files/neg/null-unsoundness.check +++ b/test/files/neg/null-unsoundness.check @@ -2,4 +2,4 @@ null-unsoundness.scala:8: error: stable identifier required, but A.this.x found. Note that value x is not stable because its type, A.this.A with A.this.D, is volatile. var y: x.T = new C("abc") ^ -one error found +1 error diff --git a/test/files/neg/nullary-override-3a.check b/test/files/neg/nullary-override-3a.check new file mode 100644 index 000000000000..73e0af01aa7a --- /dev/null +++ b/test/files/neg/nullary-override-3a.check @@ -0,0 +1,25 @@ +nullary-override-3a.scala:4: error: method with a single empty parameter list overrides method x in class A defined without a parameter list +def x: Int (defined in class A) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=B +class B extends A { override def x(): Int = 4 } + ^ +nullary-override-3a.scala:16: error: method with a single empty parameter list overrides method x in trait T1 defined without a parameter list +def x: String (defined in trait T1) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Mix12b +class Mix12b extends T1 with T2 { override def x() = "12b" } + ^ +nullary-override-3a.scala:18: error: method without a parameter list overrides method x in trait T2 defined with a single empty parameter list +def x(): String (defined in trait T2) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Mix21a +class Mix21a extends T2 with T1 { override def x = "21a" } + ^ +nullary-override-3a.scala:19: error: method with a single empty parameter list overrides method x in trait T1 defined without a parameter list +def x: String (defined in trait T1) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Mix21b +class Mix21b extends T2 with T1 { override def x() = "21b" } + ^ +4 errors diff --git a/test/files/neg/nullary-override-3a.scala b/test/files/neg/nullary-override-3a.scala new file mode 100644 index 000000000000..da03b78585e6 --- /dev/null +++ b/test/files/neg/nullary-override-3a.scala @@ -0,0 +1,40 @@ +//> using options -Werror -Wunused:nowarn -Xsource:3 +// +class A { def x: Int = 3 } +class B extends A { override def x(): Int = 4 } + + +trait T1 { def x: String = "1" } +trait T2 { def x(): String = "2" } + +// without overrides you just get: error: class X inherits conflicting members: +// def x: String (defined in trait T1) and +// def x(): String (defined in trait T2) +// (note: this can be resolved by declaring an `override` in class X.) + +// Mix12a in nullary-override-3b +class Mix12b extends T1 with T2 { override def x() = "12b" } + +class Mix21a extends T2 with T1 { override def x = "21a" } +class Mix21b extends T2 with T1 { override def x() = "21b" } + +import java.util.concurrent.atomic.{ AtomicMarkableReference => AMR } +trait Ref1 { def getReference: String = "1" } +trait Ref2 { def getReference(): String = "2" } + +// without overrides you just get: error: class X inherits conflicting members: +// def getReference(): String (defined in class AtomicMarkableReference) and +// def getReference: String (defined in trait Ref1) +// (note: this can be resolved by declaring an `override` in class X.) + +class Mark1a extends AMR[String]("", false) with Ref1 { override def getReference = "1a" } +class Mark1b extends AMR[String]("", false) with Ref1 { override def getReference() = "1b" } + +class Mark2a extends AMR[String]("", false) with Ref2 { override def getReference = "2a" } +class Mark2b extends AMR[String]("", false) with Ref2 { override def getReference() = "2b" } + +class Mark12a extends AMR[String]("", false) with Ref1 with Ref2 { override def getReference = "12a" } +class Mark12b extends AMR[String]("", false) with Ref1 with Ref2 { override def getReference() = "12b" } + +class Mark21a extends AMR[String]("", false) with Ref2 with Ref1 { override def getReference = "21a" } +class Mark21c extends AMR[String]("", false) with Ref2 with Ref1 { override def getReference() = "21b" } diff --git a/test/files/neg/nullary-override-3b.check b/test/files/neg/nullary-override-3b.check new file mode 100644 index 000000000000..4726c8b05f1e --- /dev/null +++ b/test/files/neg/nullary-override-3b.check @@ -0,0 +1,13 @@ +nullary-override-3b.scala:6: error: method without a parameter list overrides method x in class P defined with a single empty parameter list +def x(): Int (defined in class P) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Q +class Q extends P { override def x: Int = 4 } + ^ +nullary-override-3b.scala:11: error: method without a parameter list overrides method x in trait T2 defined with a single empty parameter list +def x(): String (defined in trait T2) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Mix12a +class Mix12a extends T1 with T2 { override def x = "12a" } + ^ +2 errors diff --git a/test/files/neg/nullary-override-3b.scala b/test/files/neg/nullary-override-3b.scala new file mode 100644 index 000000000000..bf46b885dda3 --- /dev/null +++ b/test/files/neg/nullary-override-3b.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Wunused:nowarn -Xsource:3 +// +// P has parens +class P { def x(): Int = 3 } +// Q is questionable +class Q extends P { override def x: Int = 4 } + +trait T1 { def x: String = "1" } +trait T2 { def x(): String = "2" } + +class Mix12a extends T1 with T2 { override def x = "12a" } +// the rest in nullary-override-3a diff --git a/test/files/neg/nullary-override.check b/test/files/neg/nullary-override.check index 21115d7fa508..708fe068162e 100644 --- a/test/files/neg/nullary-override.check +++ b/test/files/neg/nullary-override.check @@ -1,6 +1,24 @@ -nullary-override.scala:3: warning: non-nullary method overrides nullary method +nullary-override.scala:4: warning: method with a single empty parameter list overrides method x in class A defined without a parameter list [quickfixable] class B extends A { override def x(): Int = 4 } ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +nullary-override.scala:15: warning: method without a parameter list overrides method x in class P defined with a single empty parameter list [quickfixable] +class Q extends P { override def x: Int = 4 } + ^ +nullary-override.scala:36: warning: method without a parameter list overrides method x in trait T2 defined with a single empty parameter list [quickfixable] +class Mix12a extends T1 with T2 { override def x = "12a" } + ^ +nullary-override.scala:36: warning: method without a parameter list overrides method x in trait T1 defined with a single empty parameter list [quickfixable] +class Mix12a extends T1 with T2 { override def x = "12a" } + ^ +nullary-override.scala:37: warning: method with a single empty parameter list overrides method x in trait T1 defined without a parameter list [quickfixable] +class Mix12b extends T1 with T2 { override def x() = "12b" } + ^ +nullary-override.scala:39: warning: method without a parameter list overrides method x in trait T2 defined with a single empty parameter list [quickfixable] +class Mix21a extends T2 with T1 { override def x = "21a" } + ^ +nullary-override.scala:40: warning: method with a single empty parameter list overrides method x in trait T1 defined without a parameter list [quickfixable] +class Mix21b extends T2 with T1 { override def x() = "21b" } + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/nullary-override.scala b/test/files/neg/nullary-override.scala index 2343dbbab7a5..4d3884deed94 100644 --- a/test/files/neg/nullary-override.scala +++ b/test/files/neg/nullary-override.scala @@ -1,4 +1,61 @@ -// scalac: -Xfatal-warnings -Xlint +//> using options -Werror -Wunused:nowarn +// class A { def x: Int = 3 } class B extends A { override def x(): Int = 4 } +class C extends java.lang.CharSequence { + def charAt(x$1: Int): Char = ??? + def length: Int = ??? + def subSequence(x$1: Int, x$2: Int): CharSequence = ??? +} + +// P has parens +class P { def x(): Int = 3 } +// Q is questionable +class Q extends P { override def x: Int = 4 } + +// Welcome to the Happy J +class J { override def toString = "Happy J" } + +import annotation._ +class E { def x(): Int = 3 } +class F extends E { @nowarn override def x: Int = 4 } + +class G { def x: Int = 5 } +class H extends G { @nowarn override def x(): Int = 6 } + + +trait T1 { def x: String = "1" } +trait T2 { def x(): String = "2" } + +// without overrides you just get: error: class X inherits conflicting members: +// def x: String (defined in trait T1) and +// def x(): String (defined in trait T2) +// (note: this can be resolved by declaring an `override` in class X.) + +class Mix12a extends T1 with T2 { override def x = "12a" } +class Mix12b extends T1 with T2 { override def x() = "12b" } + +class Mix21a extends T2 with T1 { override def x = "21a" } +class Mix21b extends T2 with T1 { override def x() = "21b" } + +import java.util.concurrent.atomic.{ AtomicMarkableReference => AMR } +trait Ref1 { def getReference: String = "1" } +trait Ref2 { def getReference(): String = "2" } + +// without overrides you just get: error: class X inherits conflicting members: +// def getReference(): String (defined in class AtomicMarkableReference) and +// def getReference: String (defined in trait Ref1) +// (note: this can be resolved by declaring an `override` in class X.) + +class Mark1a extends AMR[String]("", false) with Ref1 { override def getReference = "1a" } +class Mark1b extends AMR[String]("", false) with Ref1 { override def getReference() = "1b" } + +class Mark2a extends AMR[String]("", false) with Ref2 { override def getReference = "2a" } +class Mark2b extends AMR[String]("", false) with Ref2 { override def getReference() = "2b" } + +class Mark12a extends AMR[String]("", false) with Ref1 with Ref2 { override def getReference = "12a" } +class Mark12b extends AMR[String]("", false) with Ref1 with Ref2 { override def getReference() = "12b" } + +class Mark21a extends AMR[String]("", false) with Ref2 with Ref1 { override def getReference = "21a" } +class Mark21c extends AMR[String]("", false) with Ref2 with Ref1 { override def getReference() = "21b" } diff --git a/test/files/neg/numeric-add-string-warning.check b/test/files/neg/numeric-add-string-warning.check new file mode 100644 index 000000000000..c53160fba233 --- /dev/null +++ b/test/files/neg/numeric-add-string-warning.check @@ -0,0 +1,6 @@ +numeric-add-string-warning.scala:4: warning: method + in class Int is deprecated (since 2.13.0): Adding a number and a String is deprecated. Use the string interpolation `s"$num$str"` + val x = 4 + "2" + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/numeric-add-string-warning.scala b/test/files/neg/numeric-add-string-warning.scala new file mode 100644 index 000000000000..07cae7735a80 --- /dev/null +++ b/test/files/neg/numeric-add-string-warning.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings -deprecation +// +object Test { + val x = 4 + "2" +} diff --git a/test/files/neg/object-not-a-value.check b/test/files/neg/object-not-a-value.check index afe61298e9c4..2af0beb26c77 100644 --- a/test/files/neg/object-not-a-value.check +++ b/test/files/neg/object-not-a-value.check @@ -1,4 +1,4 @@ object-not-a-value.scala:5: error: class java.util.List is not a value List(1) map (_ + 1) ^ -one error found +1 error diff --git a/test/files/neg/open-infix-future.check b/test/files/neg/open-infix-future.check index b39489cabad0..15515fc2ef50 100644 --- a/test/files/neg/open-infix-future.check +++ b/test/files/neg/open-infix-future.check @@ -19,4 +19,4 @@ open-infix-future.scala:14: error: illegal start of statement open-infix-future.scala:15: error: ';' expected but 'def' found. open def bla(y: Int) = y // error ^ -7 errors found +7 errors diff --git a/test/files/neg/open-infix-future.scala b/test/files/neg/open-infix-future.scala index 2a250f3b006e..f667d7961f21 100644 --- a/test/files/neg/open-infix-future.scala +++ b/test/files/neg/open-infix-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // open trait A // error diff --git a/test/files/neg/optimiseDeprecated.check b/test/files/neg/optimiseDeprecated.check deleted file mode 100644 index 1c1d3e90b7fd..000000000000 --- a/test/files/neg/optimiseDeprecated.check +++ /dev/null @@ -1,4 +0,0 @@ -warning: -optimise is deprecated: In 2.12, -optimise enables -opt:l:inline -opt-inline-from:**. Check -opt:help for using the Scala 2.12 optimizer. -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/optimiseDeprecated.scala b/test/files/neg/optimiseDeprecated.scala deleted file mode 100644 index 3fd4e2748407..000000000000 --- a/test/files/neg/optimiseDeprecated.scala +++ /dev/null @@ -1,2 +0,0 @@ -// scalac: -optimise -deprecation -Xfatal-warnings -class C diff --git a/test/files/neg/ordering-migration.check b/test/files/neg/ordering-migration.check new file mode 100644 index 000000000000..ef74061c367c --- /dev/null +++ b/test/files/neg/ordering-migration.check @@ -0,0 +1,30 @@ +ordering-migration.scala:3: warning: object DeprecatedFloatOrdering in object Ordering has changed semantics in version 2.13.0: + The default implicit ordering for floats now maintains consistency + between its `compare` method and its `lt`, `min`, `equiv`, etc., methods, + which means nonconforming to IEEE 754's behavior for -0.0F and NaN. + The sort order of floats remains the same, however, with NaN at the end. + Import Ordering.Float.IeeeOrdering to recover the previous behavior. + See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Float$.html. + val f = Ordering[Float] + ^ +ordering-migration.scala:4: warning: object DeprecatedDoubleOrdering in object Ordering has changed semantics in version 2.13.0: + The default implicit ordering for doubles now maintains consistency + between its `compare` method and its `lt`, `min`, `equiv`, etc., methods, + which means nonconforming to IEEE 754's behavior for -0.0 and NaN. + The sort order of doubles remains the same, however, with NaN at the end. + Import Ordering.Double.IeeeOrdering to recover the previous behavior. + See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Double$.html. + val d = Ordering[Double] + ^ +ordering-migration.scala:7: warning: object DeprecatedDoubleOrdering in object Ordering has changed semantics in version 2.13.0: + The default implicit ordering for doubles now maintains consistency + between its `compare` method and its `lt`, `min`, `equiv`, etc., methods, + which means nonconforming to IEEE 754's behavior for -0.0 and NaN. + The sort order of doubles remains the same, however, with NaN at the end. + Import Ordering.Double.IeeeOrdering to recover the previous behavior. + See also https://www.scala-lang.org/api/current/scala/math/Ordering$$Double$.html. + list.sorted + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/ordering-migration.scala b/test/files/neg/ordering-migration.scala new file mode 100644 index 000000000000..9458bbac9be4 --- /dev/null +++ b/test/files/neg/ordering-migration.scala @@ -0,0 +1,8 @@ +//> using options -Xmigration -Werror +object Test { + val f = Ordering[Float] + val d = Ordering[Double] + + val list = List(1.0, 2.0, 3.0) + list.sorted +} diff --git a/test/files/neg/outer-ref-checks.check b/test/files/neg/outer-ref-checks.check index 632f2dc0fa00..00fc7ae8010c 100644 --- a/test/files/neg/outer-ref-checks.check +++ b/test/files/neg/outer-ref-checks.check @@ -19,6 +19,6 @@ outer-ref-checks.scala:48: warning: The outer reference in this type test cannot outer-ref-checks.scala:58: warning: The outer reference in this type test cannot be checked at run time. case _: (Inner @uncheckedVariance) => // unchecked warning ^ -error: No warnings can be incurred under -Xfatal-warnings. -7 warnings found -one error found +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/outer-ref-checks.scala b/test/files/neg/outer-ref-checks.scala index 6c996aa6f83d..7e9996dac780 100644 --- a/test/files/neg/outer-ref-checks.scala +++ b/test/files/neg/outer-ref-checks.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // import scala.annotation.unchecked.uncheckedVariance @@ -50,7 +50,7 @@ object O extends Outer { } def belongsOtherOuter4(a: Outer#Inner): Unit = a match { - case _: (Inner @unchecked) => // warning supressed + case _: (Inner @unchecked) => // warning suppressed case _ => } diff --git a/test/files/neg/overload-msg.check b/test/files/neg/overload-msg.check index c61ace0dd0a3..392a5fe3939c 100644 --- a/test/files/neg/overload-msg.check +++ b/test/files/neg/overload-msg.check @@ -1,4 +1,4 @@ -overload-msg.scala:3: error: overloaded method value + with alternatives: +overload-msg.scala:3: error: overloaded method + with alternatives: (x: Double)Double (x: Float)Float (x: Long)Long @@ -10,4 +10,4 @@ overload-msg.scala:3: error: overloaded method value + with alternatives: cannot be applied to (Int(in method f)) def f[Int](y: Int) = x + y ^ -one error found +1 error diff --git a/test/files/neg/overload.check b/test/files/neg/overload.check index abfabaf3f2b7..e6bf7af0363d 100644 --- a/test/files/neg/overload.check +++ b/test/files/neg/overload.check @@ -1,7 +1,7 @@ overload.scala:10: error: ambiguous reference to overloaded definition, -both method f in class D of type (x: Any)Unit -and method f in class C of type (x: Int)Unit +both method f in class D of type (x: Any): Unit +and method f in class C of type (x: Int): Unit match argument types (Int) (new D).f(1) ^ -one error found +1 error diff --git a/test/files/neg/overload.scala b/test/files/neg/overload.scala index 6ad911e90e5f..3128a39d06b8 100644 --- a/test/files/neg/overload.scala +++ b/test/files/neg/overload.scala @@ -1,9 +1,9 @@ class C { - def f(x: Int) {} + def f(x: Int): Unit = {} } class D extends C { - def f(x: Any) {} + def f(x: Any): Unit = {} } object Test { diff --git a/test/files/neg/overloaded-implicit.check b/test/files/neg/overloaded-implicit.check index 0d48c5fe3d0f..662e593a22e3 100644 --- a/test/files/neg/overloaded-implicit.check +++ b/test/files/neg/overloaded-implicit.check @@ -1,10 +1,10 @@ -overloaded-implicit.scala:3: warning: parameterized overloaded implicit methods are not visible as view bounds +overloaded-implicit.scala:4: warning: parameterized overloaded implicit methods are not visible as view bounds implicit def imp1[T](x: List[T]): Map[T, T] = Map() ^ -overloaded-implicit.scala:4: warning: parameterized overloaded implicit methods are not visible as view bounds +overloaded-implicit.scala:5: warning: parameterized overloaded implicit methods are not visible as view bounds implicit def imp1[T](x: Set[T]): Map[T, T] = Map() ^ -warning: four feature warnings; re-run with -feature for details -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +warning: 4 feature warnings; re-run with -feature for details +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/overloaded-implicit.scala b/test/files/neg/overloaded-implicit.scala index 9eebae35a98e..05b13be5b745 100644 --- a/test/files/neg/overloaded-implicit.scala +++ b/test/files/neg/overloaded-implicit.scala @@ -1,15 +1,16 @@ -// scalac: -Xlint:poly-implicit-overload -Xfatal-warnings -Xdev +//> using options -Xlint:poly-implicit-overload -Xfatal-warnings -Xdev +// object Test { implicit def imp1[T](x: List[T]): Map[T, T] = Map() implicit def imp1[T](x: Set[T]): Map[T, T] = Map() - def f[T <% Map[Int, Int]](x: T): Double = 1.0d + def f[T](x: T)(implicit ev: T => Map[Int, Int]): Double = 1.0d // not parameterized, no warning implicit def imp2(x: List[Int]): String = "a" implicit def imp2(x: Set[Int]): String = "b" - def g[T <% String](x: T): Double = 2.0d + def g[T](x: T)(implicit ev: T => String): Double = 2.0d def main(args: Array[String]): Unit = { // println(f(List(1))) diff --git a/test/files/neg/overloaded-unapply.check b/test/files/neg/overloaded-unapply.check index 3951166de550..dc884a699b80 100644 --- a/test/files/neg/overloaded-unapply.check +++ b/test/files/neg/overloaded-unapply.check @@ -1,6 +1,6 @@ overloaded-unapply.scala:18: error: ambiguous reference to overloaded definition, -both method unapply in object List of type [a](xs: List[a])Option[Null] -and method unapply in object List of type [a](xs: List[a])Option[(a, List[a])] +both method unapply in object List of type [a](xs: List[a]): Option[Null] +and method unapply in object List of type [a](xs: List[a]): Option[(a, List[a])] match argument types (List[a]) case List(x, xs) => 7 ^ @@ -11,4 +11,4 @@ overloaded-unapply.scala:12: error: method unapply is defined twice; the conflicting method unapply was defined at line 7:7 def unapply[a](xs: List[a]): Option[Null] = xs match { ^ -three errors found +3 errors diff --git a/test/files/neg/override-concrete-type.check b/test/files/neg/override-concrete-type.check new file mode 100644 index 000000000000..4935097cbfe2 --- /dev/null +++ b/test/files/neg/override-concrete-type.check @@ -0,0 +1,12 @@ +override-concrete-type.scala:13: error: incompatible type in overriding +type A1 = Something (defined in class Foo) + (Equivalent type required when overriding a type alias.) + override type A1 = Something with Serializable + ^ +override-concrete-type.scala:14: error: incompatible type in overriding +type A2 <: Something (defined in class Foo); + found : Any + required: <: Something + override type A2 = Any + ^ +2 errors diff --git a/test/files/neg/override-concrete-type.scala b/test/files/neg/override-concrete-type.scala new file mode 100644 index 000000000000..cabeaa75ca55 --- /dev/null +++ b/test/files/neg/override-concrete-type.scala @@ -0,0 +1,15 @@ +class Something + +trait X { + type A1 +} + +class Foo extends X { + type A1 = Something + type A2 <: Something +} + +class Bar extends Foo { + override type A1 = Something with Serializable + override type A2 = Any +} diff --git a/test/files/neg/override-final-implicit.check b/test/files/neg/override-final-implicit.check new file mode 100644 index 000000000000..a849d1cae00c --- /dev/null +++ b/test/files/neg/override-final-implicit.check @@ -0,0 +1,9 @@ +override-final-implicit.scala:6: warning: Implicit definition should have explicit type (inferred Test.this.FooExtender) [quickfixable] + override implicit def FooExtender(foo: String) = super.FooExtender(foo) + ^ +override-final-implicit.scala:6: error: cannot override final member: +final implicit def FooExtender(foo: String): Test.this.FooExtender (defined in class Implicits) + override implicit def FooExtender(foo: String) = super.FooExtender(foo) + ^ +1 warning +1 error diff --git a/test/files/neg/override-final-implicit.scala b/test/files/neg/override-final-implicit.scala new file mode 100644 index 000000000000..7c0905407cab --- /dev/null +++ b/test/files/neg/override-final-implicit.scala @@ -0,0 +1,7 @@ +class Implicits { + final implicit class FooExtender(foo: String) +} + +class Test extends Implicits { + override implicit def FooExtender(foo: String) = super.FooExtender(foo) +} diff --git a/test/files/neg/override-object-flag.check b/test/files/neg/override-object-flag.check index 344165138dd3..f779914fc563 100644 --- a/test/files/neg/override-object-flag.check +++ b/test/files/neg/override-object-flag.check @@ -1,5 +1,5 @@ -override-object-flag.scala:3: error: overriding object Foo in trait A; - object Foo cannot override final member +override-object-flag.scala:3: error: cannot override final member: +object Foo (defined in trait A) trait B extends A { override object Foo } ^ -one error found +1 error diff --git a/test/files/neg/override-object-no.check b/test/files/neg/override-object-no.check deleted file mode 100644 index 598ca8a7365f..000000000000 --- a/test/files/neg/override-object-no.check +++ /dev/null @@ -1,31 +0,0 @@ -override-object-no.scala:15: error: overriding object Bar in trait Foo with object Bar in trait Foo2: -an overriding object must conform to the overridden object's class bound; - found : case1.Bippy - required: case1.Bippy with case1.Bippo - override object Bar extends Bippy { // err - ^ -override-object-no.scala:22: error: overriding object Bar in trait Quux1 with object Bar in trait Quux2: -an overriding object must conform to the overridden object's class bound; - found : AnyRef{def g: String} - required: AnyRef{def g: Int} - trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err - ^ -override-object-no.scala:26: error: overriding object Bar in trait Quux3; - object Bar cannot override final member - trait Quux4 extends Quux3 { override object Bar } // err - ^ -override-object-no.scala:44: error: overriding object A in class Foo with object A in class P2: -an overriding object must conform to the overridden object's class bound; - found : case2.Bar[List[String]] - required: case2.Bar[Traversable[String]] - override object A extends Bar[List[String]] // err - ^ -override-object-no.scala:53: error: overriding method x in trait A of type => SI9574.Foo.type; - method x has incompatible type - trait B extends A { def x: Bar.type } // should not compile (scala/bug#9574) - ^ -override-object-no.scala:54: error: overriding method x in trait A of type => SI9574.Foo.type; - object x has incompatible type - trait C extends A { override object x } - ^ -6 errors found diff --git a/test/files/neg/override-object-no.scala b/test/files/neg/override-object-no.scala deleted file mode 100644 index 8c3bf2e94bfa..000000000000 --- a/test/files/neg/override-object-no.scala +++ /dev/null @@ -1,57 +0,0 @@ -// scalac: -Yoverride-objects -// See also pos/override-object-yes.scala - -package case1 { - // Missing interface in overriding object - class Bippy { def f = 1 } - trait Bippo - - trait Foo { - object Bar extends Bippy with Bippo { override def f = 2 } - def f(x: Bippo) - def g = f(Bar) - } - trait Foo2 extends Foo { - override object Bar extends Bippy { // err - override def f = 3 - } - } - - // type mismatch in member - trait Quux1 { object Bar { def g = 55 } } - trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err - - // still can't override final objects! - trait Quux3 { final object Bar { } } - trait Quux4 extends Quux3 { override object Bar } // err -} - -// type parameter as-seen-from business -package case2 { - // invariance (see pos for the covariant case) - class Bar[T] - - class Foo[T] { - object A extends Bar[T] - } - - class Baz[S] extends Foo[S] { - override object A extends Bar[S] - } - - class P1 extends Foo[Traversable[String]] - class P2 extends P1 { - override object A extends Bar[List[String]] // err - } -} - -// Both overridden and overriding members must be objects, not vals with a module type -object SI9574 { - object Foo - object Bar - trait A { def x: Foo.type } - trait B extends A { def x: Bar.type } // should not compile (scala/bug#9574) - trait C extends A { override object x } - trait D { object x; def y = x } - trait E extends D { override val x: super.x.type = y } // OK but doesn't need object subtyping exception -} diff --git a/test/files/neg/override.check b/test/files/neg/override.check index 8be98bf4d0cd..9f69b6b70e87 100644 --- a/test/files/neg/override.check +++ b/test/files/neg/override.check @@ -1,5 +1,8 @@ -override.scala:9: error: overriding type T in trait A with bounds >: Int <: Int; - type T in trait B with bounds >: String <: String has incompatible type +override.scala:9: error: incompatible type in overriding +type T >: Int <: Int (defined in trait A) + with type T >: String <: String (defined in trait B); + found : >: String <: String + required: >: Int <: Int lazy val x : A with B = {println(""); x} ^ -one error found +1 error diff --git a/test/files/neg/parens-for-params.check b/test/files/neg/parens-for-params.check new file mode 100644 index 000000000000..498fb1feafb2 --- /dev/null +++ b/test/files/neg/parens-for-params.check @@ -0,0 +1,7 @@ +parens-for-params.scala:5: error: parentheses are required around the parameter of a lambda +Use '-Wconf:msg=lambda-parens:s' to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + x: Int => x * 2 + ^ +1 error diff --git a/test/files/neg/parens-for-params.scala b/test/files/neg/parens-for-params.scala new file mode 100644 index 000000000000..b774c8d8cac6 --- /dev/null +++ b/test/files/neg/parens-for-params.scala @@ -0,0 +1,8 @@ +//> using options -Xsource:3 + +class C { + def f = { + x: Int => x * 2 + } + def g = (x: Int) => x * 2 +} diff --git a/test/files/neg/parent-inherited-twice-error.check b/test/files/neg/parent-inherited-twice-error.check index 521a6c19d010..368c81b8131e 100644 --- a/test/files/neg/parent-inherited-twice-error.check +++ b/test/files/neg/parent-inherited-twice-error.check @@ -4,4 +4,4 @@ class B extends A with A parent-inherited-twice-error.scala:2: error: trait A is inherited twice class B extends A with A ^ -two errors found +2 errors diff --git a/test/files/neg/parstar.check b/test/files/neg/parstar.check index 108f0f4de8e0..3a90eb6bec70 100644 --- a/test/files/neg/parstar.check +++ b/test/files/neg/parstar.check @@ -4,4 +4,4 @@ parstar.scala:8: error: *-parameter must come last parstar.scala:9: error: *-parameter must come last def m(a: A*, b: B*) = a.toArray ^ -two errors found +2 errors diff --git a/test/files/neg/partestInvalidFlag.check b/test/files/neg/partestInvalidFlag.check index 7a54e3aa43ad..d4fd6e56d99d 100644 --- a/test/files/neg/partestInvalidFlag.check +++ b/test/files/neg/partestInvalidFlag.check @@ -1,4 +1,4 @@ error: bad option: '-badCompilerFlag' error: bad options: -badCompilerFlag notAFlag -opt:badChoice error: flags file may only contain compiler options, found: -badCompilerFlag notAFlag -opt:badChoice -three errors found +3 errors diff --git a/test/files/neg/partestInvalidFlag.scala b/test/files/neg/partestInvalidFlag.scala index 7ea96993279e..b64913fa6e76 100644 --- a/test/files/neg/partestInvalidFlag.scala +++ b/test/files/neg/partestInvalidFlag.scala @@ -1,2 +1,3 @@ -// scalac: -badCompilerFlag notAFlag -opt:badChoice +//> using options -badCompilerFlag notAFlag -opt:badChoice +// class C diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check index 4b01437ff809..3447b8c87d73 100644 --- a/test/files/neg/pat_unreachable.check +++ b/test/files/neg/pat_unreachable.check @@ -1,17 +1,17 @@ -pat_unreachable.scala:23: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +pat_unreachable.scala:24: warning: patterns after a variable pattern cannot match (SLS 8.1.1) If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` => case b => println("matched b") ^ -pat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 23 +pat_unreachable.scala:25: warning: unreachable code due to variable pattern 'b' on line 24 If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` => case c => println("matched c") ^ -pat_unreachable.scala:25: warning: unreachable code due to variable pattern 'b' on line 23 +pat_unreachable.scala:26: warning: unreachable code due to variable pattern 'b' on line 24 case _ => println("matched neither") ^ -pat_unreachable.scala:24: warning: unreachable code +pat_unreachable.scala:25: warning: unreachable code case c => println("matched c") ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/pat_unreachable.scala b/test/files/neg/pat_unreachable.scala index c7dc6bd0ed6c..ae73d384eec4 100644 --- a/test/files/neg/pat_unreachable.scala +++ b/test/files/neg/pat_unreachable.scala @@ -1,20 +1,21 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test extends App { - def unreachable1(xs:Seq[Char]) = xs match { + def unreachable1(xs:Seq[Char]) = (xs: @unchecked) match { case Seq(x, y, _*) => x::y::Nil case Seq(x, y, z, w) => List(z,w) // redundant! } - def unreachable2(xs:Seq[Char]) = xs match { + def unreachable2(xs:Seq[Char]) = (xs: @unchecked) match { case Seq(x, y, _*) => x::y::Nil case Seq(x, y) => List(x, y) } - def not_unreachable(xs:Seq[Char]) = xs match { + def not_unreachable(xs:Seq[Char]) = (xs: @unchecked) match { case Seq(x, y, _*) => x::y::Nil case Seq(x) => List(x) } - def not_unreachable2(xs:Seq[Char]) = xs match { + def not_unreachable2(xs:Seq[Char]) = (xs: @unchecked) match { case Seq(x, y) => x::y::Nil case Seq(x, y, z, _*) => List(x,y) } diff --git a/test/files/neg/patmat-classtag-compound.check b/test/files/neg/patmat-classtag-compound.check index 7e13e4f3ad1e..26fca152e626 100644 --- a/test/files/neg/patmat-classtag-compound.check +++ b/test/files/neg/patmat-classtag-compound.check @@ -1,6 +1,6 @@ patmat-classtag-compound.scala:14: warning: abstract type pattern A is unchecked since it is eliminated by erasure case b: A with Bar => true ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/patmat-classtag-compound.scala b/test/files/neg/patmat-classtag-compound.scala index fbd527db91c1..8867974a9ce1 100644 --- a/test/files/neg/patmat-classtag-compound.scala +++ b/test/files/neg/patmat-classtag-compound.scala @@ -1,4 +1,4 @@ -// scalac: -unchecked -Xfatal-warnings +//> using options -Xfatal-warnings // object Test extends App{ trait Bar diff --git a/test/files/neg/patmat-exprs-b.check b/test/files/neg/patmat-exprs-b.check new file mode 100644 index 000000000000..40003f76ead4 --- /dev/null +++ b/test/files/neg/patmat-exprs-b.check @@ -0,0 +1,21 @@ +patmat-exprs-b.scala:42: warning: parameter num in class Add is never used + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: parameter num in class Add2 is never used + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: parameter num in class Add3 is never used + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:42: warning: @nowarn annotation does not suppress any warnings + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: @nowarn annotation does not suppress any warnings + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: @nowarn annotation does not suppress any warnings + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/patmat-exprs-b.scala b/test/files/neg/patmat-exprs-b.scala new file mode 100644 index 000000000000..ac35e9c8bdc2 --- /dev/null +++ b/test/files/neg/patmat-exprs-b.scala @@ -0,0 +1,53 @@ +//> using options -Werror -Xlint +// + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} diff --git a/test/files/neg/patmat-sealed-reachable.check b/test/files/neg/patmat-sealed-reachable.check new file mode 100644 index 000000000000..6e3bd3713e89 --- /dev/null +++ b/test/files/neg/patmat-sealed-reachable.check @@ -0,0 +1,12 @@ +patmat-sealed-reachable.scala:14: warning: fruitless type test: a value of type Option[Int] cannot also be a SomeClass + def b(c: Option[Int]) = c match { case _: SomeClass =>; case _ => } + ^ +patmat-sealed-reachable.scala:13: warning: fruitless type test: a value of type Option[Int] cannot also be a SealedTrait + def a(c: Option[Int]) = c match { case _: SealedTrait =>; case _ => } + ^ +patmat-sealed-reachable.scala:15: warning: fruitless type test: a value of type Option[Int] cannot also be a UnsealedTrait + def c(c: Option[Int]) = c match { case _: UnsealedTrait =>; case _ => } + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/patmat-sealed-reachable.scala b/test/files/neg/patmat-sealed-reachable.scala new file mode 100644 index 000000000000..e315db9743b7 --- /dev/null +++ b/test/files/neg/patmat-sealed-reachable.scala @@ -0,0 +1,18 @@ +//> using options -Werror + +// aka t12438.scala + +sealed trait SealedTrait +class SomeClass +trait UnsealedTrait + +sealed abstract class O +class O1 extends O + +class Test { + def a(c: Option[Int]) = c match { case _: SealedTrait =>; case _ => } + def b(c: Option[Int]) = c match { case _: SomeClass =>; case _ => } + def c(c: Option[Int]) = c match { case _: UnsealedTrait =>; case _ => } + // O1 is not final , so there could be a value of type O1 with UnsealedTrait + def nowarn(c: O) = c match { case _: UnsealedTrait =>; case _ => } +} diff --git a/test/files/neg/patmat-seq-neg.check b/test/files/neg/patmat-seq-neg.check new file mode 100644 index 000000000000..2297f1efbb3e --- /dev/null +++ b/test/files/neg/patmat-seq-neg.check @@ -0,0 +1,15 @@ +patmat-seq-neg.scala:15: error: error during expansion of this match (this is a scalac bug). +The underlying error was: type mismatch; + found : scala.collection.mutable.ArrayBuffer[Int] + required: Seq[Int] + def t3: Any = 2 match { + ^ +patmat-seq-neg.scala:18: error: error during expansion of this match (this is a scalac bug). +The underlying error was: value toSeq is not a member of Array[Int] + def t4: Any = 2 match { + ^ +patmat-seq-neg.scala:24: error: error during expansion of this match (this is a scalac bug). +The underlying error was: value drop is not a member of Array[Int] + def t6: Any = 2 match { + ^ +3 errors diff --git a/test/files/neg/patmat-seq-neg.scala b/test/files/neg/patmat-seq-neg.scala new file mode 100644 index 000000000000..2a69d9afe594 --- /dev/null +++ b/test/files/neg/patmat-seq-neg.scala @@ -0,0 +1,27 @@ +object A { + def unapplySeq(a: Int) = Some(collection.mutable.ArrayBuffer(1,2,3)) +} +object B { + def unapplySeq(a: Int) = Some(Array(1,2,3)) +} + +class T { + def t1: Any = 2 match { + case A(xs@_*) => xs // ok + } + def t2: Any = 2 match { + case A(x, y) => (x, y) // ok + } + def t3: Any = 2 match { + case A(x, xs@_*) => (x, xs) // type error with call to drop. found: ArrayBuffer, required: Seq. + } + def t4: Any = 2 match { + case B(xs@_*) => xs // error: toSeq is not a member of Array. no ArrayOps because adaptToMember is disabled after typer. + } + def t5: Any = 2 match { + case B(x, y) => (x, y) // ok + } + def t6: Any = 2 match { + case B(x, xs@_*) => (x, xs) // error: drop is not a member of Array + } +} diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check index 5507972b949c..e3cfe7d23aac 100644 --- a/test/files/neg/patmat-type-check.check +++ b/test/files/neg/patmat-type-check.check @@ -33,4 +33,4 @@ patmat-type-check.scala:30: error: scrutinee is incompatible with pattern type; required: Test.Bop3[Char] def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail ^ -7 errors found +7 errors diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check index 9f1331cafee2..272dd53df4e0 100644 --- a/test/files/neg/patmatexhaust.check +++ b/test/files/neg/patmatexhaust.check @@ -1,42 +1,46 @@ -patmatexhaust.scala:8: warning: match may not be exhaustive. +patmatexhaust.scala:9: warning: match may not be exhaustive. It would fail on the following input: Baz def ma1(x:Foo) = x match { ^ -patmatexhaust.scala:12: warning: match may not be exhaustive. +patmatexhaust.scala:13: warning: match may not be exhaustive. It would fail on the following input: Bar(_) def ma2(x:Foo) = x match { ^ -patmatexhaust.scala:24: warning: match may not be exhaustive. +patmatexhaust.scala:25: warning: match may not be exhaustive. It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult()) def ma3(x:Mult) = (x,x) match { // not exhaustive ^ -patmatexhaust.scala:50: warning: match may not be exhaustive. +patmatexhaust.scala:51: warning: match may not be exhaustive. It would fail on the following inputs: Gp(), Gu def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included ^ -patmatexhaust.scala:56: warning: unreachable code +patmatexhaust.scala:57: warning: unreachable code case _ if 1 == 0 => ^ -patmatexhaust.scala:54: warning: match may not be exhaustive. +patmatexhaust.scala:55: warning: match may not be exhaustive. It would fail on the following input: Gp() def ma5(x:Deep) = x match { ^ -patmatexhaust.scala:76: warning: match may not be exhaustive. +patmatexhaust.scala:61: warning: match may not be exhaustive. +It would fail on the following input: Nil + def ma6() = List(1,2) match { + ^ +patmatexhaust.scala:77: warning: match may not be exhaustive. It would fail on the following input: B() def ma9(x: B) = x match { ^ -patmatexhaust.scala:101: warning: match may not be exhaustive. +patmatexhaust.scala:102: warning: match may not be exhaustive. It would fail on the following input: C1() def ma10(x: C) = x match { // not exhaustive: C1 is not sealed. ^ -patmatexhaust.scala:115: warning: match may not be exhaustive. +patmatexhaust.scala:116: warning: match may not be exhaustive. It would fail on the following inputs: D1, D2() def ma10(x: C) = x match { // not exhaustive: C1 has subclasses. ^ -patmatexhaust.scala:127: warning: match may not be exhaustive. +patmatexhaust.scala:128: warning: match may not be exhaustive. It would fail on the following input: C1() def ma10(x: C) = x match { // not exhaustive: C1 is not abstract. ^ -error: No warnings can be incurred under -Xfatal-warnings. -10 warnings found -one error found +error: No warnings can be incurred under -Werror. +11 warnings +1 error diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala index d0344b83be82..e44c26d70edc 100644 --- a/test/files/neg/patmatexhaust.scala +++ b/test/files/neg/patmatexhaust.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ypatmat-exhaust-depth off +//> using options -Xfatal-warnings -Ypatmat-exhaust-depth off +// class TestSealedExhaustive { // compile only sealed abstract class Foo @@ -57,7 +58,7 @@ class TestSealedExhaustive { // compile only case Ga => } - def ma6() = List(1,2) match { // give up + def ma6() = List(1,2) match { case List(1,2) => case x :: xs => } diff --git a/test/files/neg/patternalts.check b/test/files/neg/patternalts.check index 9bec9a001ae5..b11d3bfb1d31 100644 --- a/test/files/neg/patternalts.check +++ b/test/files/neg/patternalts.check @@ -1,4 +1,4 @@ patternalts.scala:3: error: illegal variable in pattern alternative case List(x) | List() => Console.println(x) ^ -one error found +1 error diff --git a/test/files/neg/permanent-blindness.check b/test/files/neg/permanent-blindness.check index 93a9a370fb2f..5e46b5f673ea 100644 --- a/test/files/neg/permanent-blindness.check +++ b/test/files/neg/permanent-blindness.check @@ -1,12 +1,12 @@ -permanent-blindness.scala:11: warning: imported `Bippy` is permanently hidden by definition of class Bippy in package bar +permanent-blindness.scala:12: warning: imported `Bippy` is permanently hidden by definition of class Bippy in package bar import foo.{ Bippy, Bop, Dingus } ^ -permanent-blindness.scala:11: warning: imported `Bop` is permanently hidden by definition of object Bop in package bar +permanent-blindness.scala:12: warning: imported `Bop` is permanently hidden by definition of object Bop in package bar import foo.{ Bippy, Bop, Dingus } ^ -permanent-blindness.scala:11: warning: imported `Dingus` is permanently hidden by definition of object Dingus in package bar +permanent-blindness.scala:12: warning: imported `Dingus` is permanently hidden by definition of object Dingus in package bar import foo.{ Bippy, Bop, Dingus } ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/permanent-blindness.scala b/test/files/neg/permanent-blindness.scala index c430b31eab57..88fd07ada448 100644 --- a/test/files/neg/permanent-blindness.scala +++ b/test/files/neg/permanent-blindness.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// package foo { class Bippy object Bop { diff --git a/test/files/neg/pickle-java-crash.check b/test/files/neg/pickle-java-crash.check index f313c75337c5..c25ab6697ff5 100644 --- a/test/files/neg/pickle-java-crash.check +++ b/test/files/neg/pickle-java-crash.check @@ -1,4 +1,4 @@ -Crash.java:4: error: not found: type NotThere +Crash.java:5: error: not found: type NotThere NotThere notThere(); ^ -one error found +1 error diff --git a/test/files/neg/pickle-java-crash/Brash.scala b/test/files/neg/pickle-java-crash/Brash.scala index d523655715c3..1a37d99291f4 100644 --- a/test/files/neg/pickle-java-crash/Brash.scala +++ b/test/files/neg/pickle-java-crash/Brash.scala @@ -1,4 +1,4 @@ -// scalac: -Ypickle-java +//> using options -Ypickle-java package crashy object brash diff --git a/test/files/neg/pickle-java-crash/Crash.java b/test/files/neg/pickle-java-crash/Crash.java index d6af4ae9dda9..2770a8cfc40f 100644 --- a/test/files/neg/pickle-java-crash/Crash.java +++ b/test/files/neg/pickle-java-crash/Crash.java @@ -1,5 +1,6 @@ + package crashy; public class Crash { NotThere notThere(); -} \ No newline at end of file +} diff --git a/test/files/neg/post-postfix.check b/test/files/neg/post-postfix.check new file mode 100644 index 000000000000..03ff0619eff0 --- /dev/null +++ b/test/files/neg/post-postfix.check @@ -0,0 +1,9 @@ +post-postfix.scala:5: error: postfix operator head needs to be enabled +by making the implicit value scala.language.postfixOps visible. +This can be achieved by adding the import clause 'import scala.language.postfixOps' +or by setting the compiler option -language:postfixOps. +See the Scaladoc for value scala.language.postfixOps for a discussion +why the feature needs to be explicitly enabled. + def f(): Int = List(1) head + ^ +1 error diff --git a/test/files/neg/post-postfix.scala b/test/files/neg/post-postfix.scala new file mode 100644 index 000000000000..516de20929c3 --- /dev/null +++ b/test/files/neg/post-postfix.scala @@ -0,0 +1,6 @@ + +//import language.postfixOps + +trait T { + def f(): Int = List(1) head +} diff --git a/test/files/neg/predef-masking.check b/test/files/neg/predef-masking.check index 79e4dece8aa8..ff01562e51f1 100644 --- a/test/files/neg/predef-masking.check +++ b/test/files/neg/predef-masking.check @@ -1,4 +1,4 @@ predef-masking.scala:7: error: value + is not a member of type parameter T def f[T](x: T) = x + 5 ^ -one error found +1 error diff --git a/test/files/neg/prefix-unary-nilary-deprecation.check b/test/files/neg/prefix-unary-nilary-deprecation.check new file mode 100644 index 000000000000..7f511fa86f42 --- /dev/null +++ b/test/files/neg/prefix-unary-nilary-deprecation.check @@ -0,0 +1,14 @@ +prefix-unary-nilary-deprecation.scala:4: warning: unary prefix operator definition with empty parameter list is deprecated: instead, remove () to declare as `def unary_~ : Foo = this` [quickfixable] + def unary_~() : Foo = this + ^ +prefix-unary-nilary-deprecation.scala:5: warning: unary prefix operator definition with empty parameter list is deprecated: instead, remove () to declare as `def unary_-(implicit pos: Long) = this` [quickfixable] + def unary_-()(implicit pos: Long) = this + ^ +prefix-unary-nilary-deprecation.scala:12: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method unary_~, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + val f2 = ~f + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/prefix-unary-nilary-deprecation.scala b/test/files/neg/prefix-unary-nilary-deprecation.scala new file mode 100644 index 000000000000..1a41640d48c4 --- /dev/null +++ b/test/files/neg/prefix-unary-nilary-deprecation.scala @@ -0,0 +1,13 @@ +//> using options -Werror -Xlint:deprecation +// +class Foo { + def unary_~() : Foo = this + def unary_-()(implicit pos: Long) = this + + def unary_! : Foo = this // ok + def unary_+(implicit pos: Long) = this // ok +} +object Test { + val f = new Foo + val f2 = ~f +} diff --git a/test/files/neg/prefix-unary-nilary-removal.check b/test/files/neg/prefix-unary-nilary-removal.check new file mode 100644 index 000000000000..8f2c1388258a --- /dev/null +++ b/test/files/neg/prefix-unary-nilary-removal.check @@ -0,0 +1,20 @@ +prefix-unary-nilary-removal.scala:4: warning: unary prefix operator definition with empty parameter list is deprecated: instead, remove () to declare as `def unary_~ : Foo = Foo()` [quickfixable] + def unary_~(): Foo = Foo() + ^ +prefix-unary-nilary-removal.scala:5: warning: unary prefix operator definition with empty parameter list is deprecated: instead, remove () to declare as `def unary_-(implicit pos: Long) = Foo()` [quickfixable] + def unary_-()(implicit pos: Long) = Foo() + ^ +prefix-unary-nilary-removal.scala:15: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method unary_~, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + val f2 = ~f + ^ +prefix-unary-nilary-removal.scala:5: warning: parameter pos in method unary_- is never used + def unary_-()(implicit pos: Long) = Foo() + ^ +prefix-unary-nilary-removal.scala:8: warning: parameter pos in method unary_+ is never used + def unary_+(implicit pos: Long) = Foo() // ok + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/prefix-unary-nilary-removal.scala b/test/files/neg/prefix-unary-nilary-removal.scala new file mode 100644 index 000000000000..23826e988301 --- /dev/null +++ b/test/files/neg/prefix-unary-nilary-removal.scala @@ -0,0 +1,16 @@ +//> using options -Werror -Xlint +// +class Foo { + def unary_~(): Foo = Foo() + def unary_-()(implicit pos: Long) = Foo() + + def `unary_!`: Foo = Foo() // ok + def unary_+(implicit pos: Long) = Foo() // ok +} +object Foo { + def apply() = new Foo +} +object Test { + val f = Foo() + val f2 = ~f +} diff --git a/test/files/neg/primitive-numbers-no-underlying.check b/test/files/neg/primitive-numbers-no-underlying.check new file mode 100644 index 000000000000..1d13eaaf26f2 --- /dev/null +++ b/test/files/neg/primitive-numbers-no-underlying.check @@ -0,0 +1,22 @@ +primitive-numbers-no-underlying.scala:3: error: value underlying is not a member of Byte + b.underlying + ^ +primitive-numbers-no-underlying.scala:4: error: value underlying is not a member of Short + s.underlying + ^ +primitive-numbers-no-underlying.scala:5: error: value underlying is not a member of Char + c.underlying + ^ +primitive-numbers-no-underlying.scala:6: error: value underlying is not a member of Int + i.underlying + ^ +primitive-numbers-no-underlying.scala:7: error: value underlying is not a member of Long + l.underlying + ^ +primitive-numbers-no-underlying.scala:8: error: value underlying is not a member of Float + f.underlying + ^ +primitive-numbers-no-underlying.scala:9: error: value underlying is not a member of Double + d.underlying + ^ +7 errors diff --git a/test/files/neg/primitive-numbers-no-underlying.scala b/test/files/neg/primitive-numbers-no-underlying.scala new file mode 100644 index 000000000000..1ee1644afb48 --- /dev/null +++ b/test/files/neg/primitive-numbers-no-underlying.scala @@ -0,0 +1,11 @@ +class PrimitiveNumbers(b: Byte, s: Short, c: Char, i: Int, l: Long, f: Float, d: Double) { + def noUnderlying(): Unit = { + b.underlying + s.underlying + c.underlying + i.underlying + l.underlying + f.underlying + d.underlying + } +} diff --git a/test/files/neg/primitive-sigs-1.check b/test/files/neg/primitive-sigs-1.check index 77dc457a499c..02ac5a34b8d8 100644 --- a/test/files/neg/primitive-sigs-1.check +++ b/test/files/neg/primitive-sigs-1.check @@ -3,4 +3,4 @@ B.scala:3: error: type mismatch; required: AC[Integer] J.f(new Bippy()) ^ -one error found +1 error diff --git a/test/files/neg/primitive-sigs-1/A.scala b/test/files/neg/primitive-sigs-1/A.scala index 0dd83b5d6ac2..007a64c8f1de 100644 --- a/test/files/neg/primitive-sigs-1/A.scala +++ b/test/files/neg/primitive-sigs-1/A.scala @@ -6,4 +6,4 @@ abstract class AC[T <: Int] { } class Bippy extends AC[Int] { def f(): Int = 5 -} \ No newline at end of file +} diff --git a/test/files/neg/private-implicit-class.check b/test/files/neg/private-implicit-class.check new file mode 100644 index 000000000000..29dfb141fcc6 --- /dev/null +++ b/test/files/neg/private-implicit-class.check @@ -0,0 +1,8 @@ +private-implicit-class.scala:6: error: method BarExtender in class ImplicitsPrivate cannot be accessed as a member of ImplicitsPrivate from class TestPrivate + override implicit def BarExtender(bar: Int) = super.BarExtender(bar) // error + ^ +private-implicit-class.scala:6: warning: Implicit definition should have explicit type [quickfixable] + override implicit def BarExtender(bar: Int) = super.BarExtender(bar) // error + ^ +1 warning +1 error diff --git a/test/files/neg/private-implicit-class.scala b/test/files/neg/private-implicit-class.scala new file mode 100644 index 000000000000..c3f3a15e2db4 --- /dev/null +++ b/test/files/neg/private-implicit-class.scala @@ -0,0 +1,7 @@ +class ImplicitsPrivate { + private implicit class BarExtender(bar: Int) +} + +class TestPrivate extends ImplicitsPrivate { + override implicit def BarExtender(bar: Int) = super.BarExtender(bar) // error +} diff --git a/test/files/neg/procedure-deprecation.check b/test/files/neg/procedure-deprecation.check new file mode 100644 index 000000000000..9953124cf823 --- /dev/null +++ b/test/files/neg/procedure-deprecation.check @@ -0,0 +1,18 @@ +procedure-deprecation.scala:4: warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `bar`'s return type [quickfixable] + def bar {} + ^ +procedure-deprecation.scala:5: warning: procedure syntax is deprecated: instead, add `: Unit` to explicitly declare `baz`'s return type [quickfixable] + def baz + ^ +procedure-deprecation.scala:6: warning: procedure syntax is deprecated: instead, add `: Unit` to explicitly declare `boo`'s return type [quickfixable] + def boo(i: Int, l: Long) + ^ +procedure-deprecation.scala:7: warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `boz`'s return type [quickfixable] + def boz(i: Int, l: Long) {} + ^ +procedure-deprecation.scala:8: warning: procedure syntax is deprecated for constructors: add `=`, as in method definition [quickfixable] + def this(i: Int) { this() } // Don't complain here! or maybe do complain + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/procedure-deprecation.scala b/test/files/neg/procedure-deprecation.scala new file mode 100644 index 000000000000..9ae5ed4d566c --- /dev/null +++ b/test/files/neg/procedure-deprecation.scala @@ -0,0 +1,10 @@ +//> using options -Werror -Xlint:deprecation +// +abstract class Foo { + def bar {} + def baz + def boo(i: Int, l: Long) + def boz(i: Int, l: Long) {} + def this(i: Int) { this() } // Don't complain here! or maybe do complain + def foz: Unit // Don't complain here! +} diff --git a/test/files/neg/procedure-removal.check b/test/files/neg/procedure-removal.check new file mode 100644 index 000000000000..f8d6f30f3f6d --- /dev/null +++ b/test/files/neg/procedure-removal.check @@ -0,0 +1,27 @@ +procedure-removal.scala:4: warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `bar`'s return type [quickfixable] + def bar {} + ^ +procedure-removal.scala:5: warning: procedure syntax is deprecated: instead, add `: Unit` to explicitly declare `baz`'s return type [quickfixable] + def baz + ^ +procedure-removal.scala:6: warning: procedure syntax is deprecated: instead, add `: Unit` to explicitly declare `boo`'s return type [quickfixable] + def boo(i: Int, l: Long) + ^ +procedure-removal.scala:7: warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `boz`'s return type [quickfixable] + def boz(i: Int, l: Long) {} + ^ +procedure-removal.scala:8: warning: procedure syntax is deprecated for constructors: add `=`, as in method definition [quickfixable] + def this(i: Int) { this() } // Don't complain here! Just slap them with an error. + ^ +procedure-removal.scala:4: warning: side-effecting nullary methods are discouraged: suggest defining as `def bar()` instead [quickfixable] + def bar {} + ^ +procedure-removal.scala:5: warning: side-effecting nullary methods are discouraged: suggest defining as `def baz()` instead [quickfixable] + def baz + ^ +procedure-removal.scala:9: warning: side-effecting nullary methods are discouraged: suggest defining as `def foz()` instead [quickfixable] + def foz: Unit // Don't complain here! + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/procedure-removal.scala b/test/files/neg/procedure-removal.scala new file mode 100644 index 000000000000..e7ae67e4ae71 --- /dev/null +++ b/test/files/neg/procedure-removal.scala @@ -0,0 +1,10 @@ +//> using options -Werror -Xlint +// +abstract class Foo { + def bar {} + def baz + def boo(i: Int, l: Long) + def boz(i: Int, l: Long) {} + def this(i: Int) { this() } // Don't complain here! Just slap them with an error. + def foz: Unit // Don't complain here! +} diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check index 4f076ec99341..fda310f81e4c 100644 --- a/test/files/neg/protected-constructors.check +++ b/test/files/neg/protected-constructors.check @@ -1,25 +1,25 @@ -protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding +protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed as a member of object dingus.Ding from object P in package hungus Access to protected class Foo3 not permitted because enclosing object P in package hungus is not a subclass of object Ding in package dingus where target is defined class Bar3 extends Ding.Foo3("abc") ^ -protected-constructors.scala:15: error: no arguments allowed for nullary constructor Object: ()Object +protected-constructors.scala:15: error: no arguments allowed for nullary constructor Object: (): Object class Bar3 extends Ding.Foo3("abc") ^ -protected-constructors.scala:17: error: no arguments allowed for nullary constructor Foo1: ()dingus.Foo1 +protected-constructors.scala:17: error: no arguments allowed for nullary constructor Foo1: (): dingus.Foo1 val foo1 = new Foo1("abc") ^ -protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P +protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P from object P in package hungus Access to protected constructor Foo2 not permitted because enclosing object P in package hungus is not a subclass of class Foo2 in package dingus where target is defined val foo2 = new Foo2("abc") ^ -protected-constructors.scala:19: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding +protected-constructors.scala:19: error: class Foo3 in object Ding cannot be accessed as a member of object dingus.Ding from object P in package hungus Access to protected class Foo3 not permitted because enclosing object P in package hungus is not a subclass of object Ding in package dingus where target is defined val foo3 = new Ding.Foo3("abc") ^ -5 errors found +5 errors diff --git a/test/files/neg/protected-static-fail.check b/test/files/neg/protected-static-fail.check index 1d1d32653c97..b43b81a9d4cd 100644 --- a/test/files/neg/protected-static-fail.check +++ b/test/files/neg/protected-static-fail.check @@ -1,16 +1,16 @@ -S.scala:5: error: method f in class J cannot be accessed in object bippy.J +S.scala:5: error: method f in class J cannot be accessed as a member of object bippy.J from object Test in package bippy J.f() ^ -S.scala:6: error: method f1 in object S1 cannot be accessed in object bippy.S1 +S.scala:6: error: method f1 in object S1 cannot be accessed as a member of object bippy.S1 from object Test in package bippy Access to protected method f1 not permitted because enclosing object Test in package bippy is not a subclass of object S1 in package bippy where target is defined S1.f1() ^ -S.scala:8: error: method f2 in class S2 cannot be accessed in bippy.S2 +S.scala:8: error: method f2 in class S2 cannot be accessed as a member of bippy.S2 from object Test in package bippy Access to protected method f2 not permitted because enclosing object Test in package bippy is not a subclass of class S2 in package bippy where target is defined x.f2() ^ -three errors found +3 errors diff --git a/test/files/neg/protected-static-fail/S0.scala b/test/files/neg/protected-static-fail/S0.scala index 1a3d192e5e3e..93a7bd91a892 100644 --- a/test/files/neg/protected-static-fail/S0.scala +++ b/test/files/neg/protected-static-fail/S0.scala @@ -6,4 +6,4 @@ object S1 { class S2 { protected def f2() = "hi mom" -} \ No newline at end of file +} diff --git a/test/files/neg/qmark-deprecated.check b/test/files/neg/qmark-deprecated.check new file mode 100644 index 000000000000..81053d19689a --- /dev/null +++ b/test/files/neg/qmark-deprecated.check @@ -0,0 +1,25 @@ +qmark-deprecated.scala:4: error: using `?` as a type name requires backticks. [quickfixable] +class Foo[?] // error + ^ +qmark-deprecated.scala:6: error: using `?` as a type name requires backticks. [quickfixable] +class Bar[M[?] <: List[?]] // error on the definition + ^ +qmark-deprecated.scala:10: error: using `?` as a type name requires backticks. [quickfixable] + class ? { val x = 1 } // error + ^ +qmark-deprecated.scala:16: error: using `?` as a type name requires backticks. [quickfixable] + trait ? // error + ^ +qmark-deprecated.scala:22: error: using `?` as a type name requires backticks. [quickfixable] + type ? = Int // error + ^ +qmark-deprecated.scala:33: error: using `?` as a type name requires backticks. [quickfixable] + def bar1[?] = {} // error + ^ +qmark-deprecated.scala:35: error: using `?` as a type name requires backticks. [quickfixable] + def bar3[M[?]] = {} // error + ^ +qmark-deprecated.scala:38: error: using `?` as a type name requires backticks. [quickfixable] + type A[?] = Int // error + ^ +8 errors diff --git a/test/files/neg/qmark-deprecated.scala b/test/files/neg/qmark-deprecated.scala new file mode 100644 index 000000000000..decba5fe92d0 --- /dev/null +++ b/test/files/neg/qmark-deprecated.scala @@ -0,0 +1,40 @@ +//> using options -deprecation -Xfatal-warnings +// + +class Foo[?] // error +class Foo2[`?`] // ok +class Bar[M[?] <: List[?]] // error on the definition +class Bar2[M[`?`] <: List[`?`]] // ok + +object G { + class ? { val x = 1 } // error +} +object G2 { + class `?` { val x = 1 } // ok +} +object H { + trait ? // error +} +object H2 { + trait `?` // ok +} +object I { + type ? = Int // error +} +object I2 { + type `?` = Int // ok + + val x: Array[?] = new Array[?](0) // no error reported here because we stop before running typer + val y: Array[`?`] = new Array[`?`](0) // ok + + def foo1[T <: Array[?]](x: T): Array[?] = x // ok + def foo2[T <: Array[`?`]](x: T): Array[`?`] = x // ok + + def bar1[?] = {} // error + def bar2[`?`] = {} // ok + def bar3[M[?]] = {} // error + def bar4[M[`?`]] = {} // error + + type A[?] = Int // error + type B[`?`] = Int // ok +} diff --git a/test/files/neg/qualifying-class-error-1.check b/test/files/neg/qualifying-class-error-1.check index c70db9ba6018..c803fa723e77 100644 --- a/test/files/neg/qualifying-class-error-1.check +++ b/test/files/neg/qualifying-class-error-1.check @@ -1,4 +1,4 @@ qualifying-class-error-1.scala:2: error: this can be used only in a class, object, or template class B extends A(this.getClass.getName.length) ^ -one error found +1 error diff --git a/test/files/neg/qualifying-class-error-2.check b/test/files/neg/qualifying-class-error-2.check index 50c275968563..3f51486b6b4e 100644 --- a/test/files/neg/qualifying-class-error-2.check +++ b/test/files/neg/qualifying-class-error-2.check @@ -1,4 +1,4 @@ qualifying-class-error-2.scala:9: error: A is not an enclosing class - protected[A] def f() {} + protected[A] def f(): Unit = {} ^ -one error found +1 error diff --git a/test/files/neg/qualifying-class-error-2.scala b/test/files/neg/qualifying-class-error-2.scala index d3aa8664bda8..9618f0a5f1a5 100644 --- a/test/files/neg/qualifying-class-error-2.scala +++ b/test/files/neg/qualifying-class-error-2.scala @@ -1,11 +1,11 @@ package A { trait X { - protected[A] def f() + protected[A] def f(): Unit } } package B { class Y extends A.X { - protected[A] def f() {} + protected[A] def f(): Unit = {} } } diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check index b12a7d13d653..c45d1d62833f 100644 --- a/test/files/neg/quasiquotes-syntax-error-position.check +++ b/test/files/neg/quasiquotes-syntax-error-position.check @@ -1,7 +1,7 @@ quasiquotes-syntax-error-position.scala:5: error: '=' expected but identifier found. q"def $a f" ^ -quasiquotes-syntax-error-position.scala:6: error: illegal start of simple expression +quasiquotes-syntax-error-position.scala:6: error: ')' expected but end of quote found. q"$a(" ^ quasiquotes-syntax-error-position.scala:7: error: '}' expected but end of quote found. @@ -31,7 +31,7 @@ quasiquotes-syntax-error-position.scala:14: error: ')' expected but end of quote quasiquotes-syntax-error-position.scala:15: error: ':' expected but ')' found. q"def foo(x)" ^ -quasiquotes-syntax-error-position.scala:16: error: illegal start of simple expression +quasiquotes-syntax-error-position.scala:16: error: ')' expected but ']' found. q"$a(])" ^ quasiquotes-syntax-error-position.scala:17: error: in XML literal: '>' expected instead of '$' @@ -43,4 +43,4 @@ quasiquotes-syntax-error-position.scala:19: error: ';' expected but '<:' found. quasiquotes-syntax-error-position.scala:20: error: '=' expected but '.' found. q"def f ( $x ) . $x" ^ -15 errors found +15 errors diff --git a/test/files/neg/quasiquotes-unliftable-not-found.check b/test/files/neg/quasiquotes-unliftable-not-found.check index 5594aa1b1573..d594c723a0ce 100644 --- a/test/files/neg/quasiquotes-unliftable-not-found.check +++ b/test/files/neg/quasiquotes-unliftable-not-found.check @@ -1,4 +1,4 @@ quasiquotes-unliftable-not-found.scala:4: error: Can't find reflect.runtime.universe.Unliftable[Test.C], consider providing it val q"${c: C}" = q"()" ^ -one error found +1 error diff --git a/test/files/neg/quasiquotes-unliftable-not-found.scala b/test/files/neg/quasiquotes-unliftable-not-found.scala index 6a5efae43b6f..b2a1ccb47d75 100644 --- a/test/files/neg/quasiquotes-unliftable-not-found.scala +++ b/test/files/neg/quasiquotes-unliftable-not-found.scala @@ -2,4 +2,4 @@ object Test extends App { import scala.reflect.runtime.universe._ class C val q"${c: C}" = q"()" -} \ No newline at end of file +} diff --git a/test/files/neg/quickfix-silent.check b/test/files/neg/quickfix-silent.check new file mode 100644 index 000000000000..7f8eb238c150 --- /dev/null +++ b/test/files/neg/quickfix-silent.check @@ -0,0 +1,11 @@ +quickfix-silent.scala:4: warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `f`'s return type + def f { println } + ^ +quickfix-silent.scala:4: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method println, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. + def f { println } + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/quickfix-silent.scala b/test/files/neg/quickfix-silent.scala new file mode 100644 index 000000000000..856eaa8cea96 --- /dev/null +++ b/test/files/neg/quickfix-silent.scala @@ -0,0 +1,5 @@ +//> using options -deprecation -Werror -quickfix:silent + +class C { + def f { println } +} diff --git a/test/files/neg/raw-types-stubs.check b/test/files/neg/raw-types-stubs.check index f1b26a23b755..9d677259edfb 100644 --- a/test/files/neg/raw-types-stubs.check +++ b/test/files/neg/raw-types-stubs.check @@ -1,11 +1,8 @@ -S_3.scala:1: error: class Sub needs to be abstract, since: -it has 2 unimplemented members. -/** As seen from class Sub, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ - def raw(x$1: M_1[_ <: String]): Unit = ??? - def raw(x$1: Any): Unit = ??? +S_3.scala:1: error: class Sub needs to be abstract. +Missing implementations for 2 members of class Raw_2. + def raw(x$1: M_1[_ <: String]): Unit = ??? // implements `def raw(x$1: M_1): Unit` + def raw(x$1: Object): Unit = ??? class Sub extends Raw_2 { } ^ -one error found +1 error diff --git a/test/files/neg/reassignment.check b/test/files/neg/reassignment.check index f0effd1459df..c2034118096f 100644 --- a/test/files/neg/reassignment.check +++ b/test/files/neg/reassignment.check @@ -10,4 +10,4 @@ reassignment.scala:4: error: not found: value y reassignment.scala:6: error: reassignment to val z = 51 ^ -four errors found +4 errors diff --git a/test/files/neg/reassignment.scala b/test/files/neg/reassignment.scala index e31eefbf3f6f..afb3542009ae 100644 --- a/test/files/neg/reassignment.scala +++ b/test/files/neg/reassignment.scala @@ -4,4 +4,4 @@ class A { y += 45 val z = 50 z = 51 -} \ No newline at end of file +} diff --git a/test/files/neg/recursive-method-default.check b/test/files/neg/recursive-method-default.check new file mode 100644 index 000000000000..f6e16f3109ee --- /dev/null +++ b/test/files/neg/recursive-method-default.check @@ -0,0 +1,27 @@ +recursive-method-default.scala:5: warning: Recursive call used default arguments instead of passing current argument values. + rec1(0) // warn + ^ +recursive-method-default.scala:10: warning: Recursive call used default arguments instead of passing current argument values. + rec2(0) // warn + ^ +recursive-method-default.scala:13: warning: Recursive call used default arguments instead of passing current argument values. + rec2(0) // warn + ^ +recursive-method-default.scala:16: warning: Recursive call used default arguments instead of passing current argument values. + rec2(0) // warn + ^ +recursive-method-default.scala:27: warning: Recursive call used default arguments instead of passing current argument values. + rec4(0)() // warn + ^ +recursive-method-default.scala:31: warning: Recursive call used default arguments instead of passing current argument values. + rec5(0)() // warn + ^ +recursive-method-default.scala:35: warning: Recursive call used default arguments instead of passing current argument values. + rec6()(0) // warn + ^ +recursive-method-default.scala:39: warning: Recursive call used default arguments instead of passing current argument values. + rec7()(0) // one warning only + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/recursive-method-default.scala b/test/files/neg/recursive-method-default.scala new file mode 100644 index 000000000000..2187f4d1afa5 --- /dev/null +++ b/test/files/neg/recursive-method-default.scala @@ -0,0 +1,41 @@ +//> using options -Werror -Xlint:recurse-with-default +object Test { + def rec1(a: Any, b: Any = "".reverse): Any = { + rec1(0, 0) // okay + rec1(0) // warn + } + + def rec2(a: Any, b: Any = "".reverse): Any = { + def nested = { + rec2(0) // warn + } + object X { + rec2(0) // warn + } + class X { + rec2(0) // warn + } + } + + + def rec3(a: Any) = () + def rec3(a: Any, b: Any = "".reverse): Any = { + rec3(0) // okay + } + + def rec4(a: Any)(b: Any = "".reverse): Any = { + rec4(0)() // warn + } + + def rec5(a: Any)(b: Any = 0): Any = { + rec5(0)() // warn + } + + def rec6(a: Any = 0)(b: Any = 0): Any = { + rec6()(0) // warn + } + + def rec7(a: Any = 0)(b: Any = 0): Any = { + rec7()(0) // one warning only + } +} diff --git a/test/files/neg/ref-checks.check b/test/files/neg/ref-checks.check index ca298c4f843c..8ea6d6e02b00 100644 --- a/test/files/neg/ref-checks.check +++ b/test/files/neg/ref-checks.check @@ -1,7 +1,17 @@ -ref-checks.scala:8: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] +ref-checks.scala:9: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] @ann[Chars[Int]] val x = 42 ^ -ref-checks.scala:9: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] +ref-checks.scala:10: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null ^ -two errors found +ref-checks.scala:11: error: type arguments [X forSome { type X <: Int }] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + def z: Chars[X forSome { type X <: Int }] = null + ^ +ref-checks.scala:18: warning: type DeprecatedAlias in object Test is deprecated + case _: DeprecatedAlias => + ^ +ref-checks.scala:19: warning: class DeprecatedClass in object Test is deprecated + case _: DeprecatedClass => + ^ +2 warnings +3 errors diff --git a/test/files/neg/ref-checks.scala b/test/files/neg/ref-checks.scala index 58e736ec1b54..42bab64d346d 100644 --- a/test/files/neg/ref-checks.scala +++ b/test/files/neg/ref-checks.scala @@ -1,4 +1,5 @@ -import scala.annotation.StaticAnnotation +//> using options -deprecation -Werror +import scala.annotation.{StaticAnnotation, nowarn} import scala.reflect.internal.annotations.uncheckedBounds object Test { @@ -7,4 +8,15 @@ object Test { class ann[A] extends StaticAnnotation @ann[Chars[Int]] val x = 42 val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + def z: Chars[X forSome { type X <: Int }] = null + + @deprecated type DeprecatedAlias = String + @deprecated class DeprecatedClass + @nowarn("cat=deprecation") type UndeprecatedAlias = DeprecatedClass + + ("": Any) match { + case _: DeprecatedAlias => + case _: DeprecatedClass => + case _: UndeprecatedAlias => // no warning here + } } diff --git a/test/files/neg/reify_ann2b.check b/test/files/neg/reify_ann2b.check deleted file mode 100644 index d32bedaf8f4b..000000000000 --- a/test/files/neg/reify_ann2b.check +++ /dev/null @@ -1,4 +0,0 @@ -reify_ann2b.scala:9: error: inner classes cannot be classfile annotations - class ann(bar: String) extends annotation.ClassfileAnnotation - ^ -one error found diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check index 75b7555b01d4..df10e37778ef 100644 --- a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check +++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. inner.splice ^ -one error found +1 error diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala index e4d1edffc408..409a03e235d4 100644 --- a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala +++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala @@ -13,4 +13,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check index ca5556db022e..0180d5fb4049 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. val code = reify{outer.splice.splice} ^ -one error found +1 error diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala index 739744158671..297db6b72e0c 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala @@ -11,4 +11,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check index e34cb1ac1e7b..3430a546c98d 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. }.splice ^ -one error found +1 error diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala index 4f27a44f0cd8..aab0778f6224 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check index 90b0e8dac636..90e40bf7a799 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. inner.splice.splice ^ -one error found +1 error diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala index 2f637301aaee..806caf5c6fd7 100644 --- a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala +++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala @@ -13,4 +13,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/neg/reify_nested_inner_refers_to_local.check b/test/files/neg/reify_nested_inner_refers_to_local.check index 68689b18d008..f3fae36824e9 100644 --- a/test/files/neg/reify_nested_inner_refers_to_local.check +++ b/test/files/neg/reify_nested_inner_refers_to_local.check @@ -4,4 +4,4 @@ if you're sure this is not an oversight, add scala-compiler.jar to the classpath import `scala.tools.reflect.Eval` and call `.eval` instead. reify{x}.splice ^ -one error found +1 error diff --git a/test/files/neg/reify_nested_inner_refers_to_local.scala b/test/files/neg/reify_nested_inner_refers_to_local.scala index 75ed1bf33088..526aba72394e 100644 --- a/test/files/neg/reify_nested_inner_refers_to_local.scala +++ b/test/files/neg/reify_nested_inner_refers_to_local.scala @@ -12,4 +12,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/neg/run-gadts-strict.check b/test/files/neg/run-gadts-strict.check deleted file mode 100644 index bad78547a6ff..000000000000 --- a/test/files/neg/run-gadts-strict.check +++ /dev/null @@ -1,21 +0,0 @@ -run-gadts-strict.scala:13: error: type mismatch; - found : n.type (with underlying type Int) - required: T - case Lit(n) => n - ^ -run-gadts-strict.scala:14: error: type mismatch; - found : Int - required: T - case Succ(u) => eval(u) + 1 - ^ -run-gadts-strict.scala:15: error: type mismatch; - found : Boolean - required: T - case IsZero(u) => eval(u) == 0 - ^ -run-gadts-strict.scala:16: error: type mismatch; - found : T(in class If) - required: T(in method eval) - case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) - ^ -four errors found diff --git a/test/files/neg/run-gadts-strict.scala b/test/files/neg/run-gadts-strict.scala deleted file mode 100644 index 4fc65bf8f075..000000000000 --- a/test/files/neg/run-gadts-strict.scala +++ /dev/null @@ -1,19 +0,0 @@ -// scalac: -Xstrict-inference -// A copy of run/gadts.scala, which must fail under -Xstrict-inference. -abstract class Term[T] -case class Lit(x: Int) extends Term[Int] -case class Succ(t: Term[Int]) extends Term[Int] -case class IsZero(t: Term[Int]) extends Term[Boolean] -case class If[T](c: Term[Boolean], - t1: Term[T], - t2: Term[T]) extends Term[T] - -object Test extends App { - def eval[T](t: Term[T]): T = t match { - case Lit(n) => n - case Succ(u) => eval(u) + 1 - case IsZero(u) => eval(u) == 0 - case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) - } - println(eval(If(IsZero(Lit(1)), Lit(41), Succ(Lit(41))))) -} diff --git a/test/files/neg/sabin2.check b/test/files/neg/sabin2.check index cd6fde4608a9..b8d75dbf7d6f 100644 --- a/test/files/neg/sabin2.check +++ b/test/files/neg/sabin2.check @@ -3,4 +3,4 @@ sabin2.scala:22: error: type mismatch; required: _1.T where val _1: Test.Base a.set(b.get()) // Error ^ -one error found +1 error diff --git a/test/files/neg/saferJavaConversions.check b/test/files/neg/saferJavaConversions.check deleted file mode 100644 index 0e53d2c4379b..000000000000 --- a/test/files/neg/saferJavaConversions.check +++ /dev/null @@ -1,6 +0,0 @@ -saferJavaConversions.scala:13: error: type mismatch; - found : String("a") - required: Foo - val v = map.get("a") // now this is a type error - ^ -one error found diff --git a/test/files/neg/saferJavaConversions.scala b/test/files/neg/saferJavaConversions.scala deleted file mode 100644 index b70a918404e9..000000000000 --- a/test/files/neg/saferJavaConversions.scala +++ /dev/null @@ -1,20 +0,0 @@ - -case class Foo(s: String) - -object Test { - def f1 = { - import scala.collection.convert.ImplicitConversions._ - val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") - val v = map.get("a") // should be a type error, actually returns null - } - def f2 = { - import scala.collection.convert.ImplicitConversionsToScala._ - val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") - val v = map.get("a") // now this is a type error - } - def f3 = { - import scala.collection.convert.ImplicitConversionsToJava._ - val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") - val v = map.get("a") - } -} diff --git a/test/files/neg/saito.check b/test/files/neg/saito.check index 061a45b76ec4..34eee590e369 100644 --- a/test/files/neg/saito.check +++ b/test/files/neg/saito.check @@ -1,4 +1,4 @@ saito.scala:10: error: class A cannot be instantiated because it does not conform to its self-type A with B val a: A = new A; // should not be allowed ^ -one error found +1 error diff --git a/test/files/neg/sammy_disabled.check b/test/files/neg/sammy_disabled.check deleted file mode 100644 index e8792e9e30f6..000000000000 --- a/test/files/neg/sammy_disabled.check +++ /dev/null @@ -1,4 +0,0 @@ -sammy_disabled.scala:4: error: missing parameter type -class C { val f: F = x => "a" } - ^ -one error found diff --git a/test/files/neg/sammy_disabled.scala b/test/files/neg/sammy_disabled.scala deleted file mode 100644 index fc97a6b2d700..000000000000 --- a/test/files/neg/sammy_disabled.scala +++ /dev/null @@ -1,4 +0,0 @@ -// scalac: -Xsource:2.11 -trait F { def apply(x: Int): String } - -class C { val f: F = x => "a" } diff --git a/test/files/neg/sammy_error.check b/test/files/neg/sammy_error.check index f14ac7e3a279..894d83aae30e 100644 --- a/test/files/neg/sammy_error.check +++ b/test/files/neg/sammy_error.check @@ -1,4 +1,11 @@ sammy_error.scala:6: error: missing parameter type foo(x => x) // should result in only one error (the second one stemmed from adapting to SAM when the tree was erroneous) ^ -one error found +sammy_error.scala:6: error: type mismatch; + found : F1[Any,Nothing] + required: F1[Any,Any] +Note: Nothing <: Any, but trait F1 is invariant in type B. +You may wish to define B as +B instead. (SLS 4.5) + foo(x => x) // should result in only one error (the second one stemmed from adapting to SAM when the tree was erroneous) + ^ +2 errors diff --git a/test/files/neg/sammy_error_exist_no_crash.check b/test/files/neg/sammy_error_exist_no_crash.check index 944b6471fda1..9818c855e908 100644 --- a/test/files/neg/sammy_error_exist_no_crash.check +++ b/test/files/neg/sammy_error_exist_no_crash.check @@ -1,4 +1,4 @@ -sammy_error_exist_no_crash.scala:5: error: value parseInt is not a member of String - bar(_.parseInt) +sammy_error_exist_no_crash.scala:5: error: value tryParseInt is not a member of String + bar(_.tryParseInt) ^ -one error found +1 error diff --git a/test/files/neg/sammy_error_exist_no_crash.scala b/test/files/neg/sammy_error_exist_no_crash.scala index 667b4db7635e..d2a771d7c441 100644 --- a/test/files/neg/sammy_error_exist_no_crash.scala +++ b/test/files/neg/sammy_error_exist_no_crash.scala @@ -2,5 +2,5 @@ trait F[T] { def apply(s: T): Int } object NeedsNiceError { def bar(x: F[_ >: String]) = ??? - bar(_.parseInt) + bar(_.tryParseInt) } diff --git a/test/files/neg/sammy_expected.check b/test/files/neg/sammy_expected.check index 3b76aabdd2a3..4715f9ff4899 100644 --- a/test/files/neg/sammy_expected.check +++ b/test/files/neg/sammy_expected.check @@ -3,4 +3,4 @@ sammy_expected.scala:4: error: type mismatch; required: F[Object,Int] def wrong: F[Object, Int] = (x: String) => 1 ^ -one error found +1 error diff --git a/test/files/neg/sammy_expected.scala b/test/files/neg/sammy_expected.scala index 8fc1f66ff741..174261f7325a 100644 --- a/test/files/neg/sammy_expected.scala +++ b/test/files/neg/sammy_expected.scala @@ -2,4 +2,4 @@ trait F[A, B] { def apply(x: A): B } class MustMeetExpected { def wrong: F[Object, Int] = (x: String) => 1 -} \ No newline at end of file +} diff --git a/test/files/neg/sammy_overload.check b/test/files/neg/sammy_overload.check index 87b198f4f05f..1a2429bceffe 100644 --- a/test/files/neg/sammy_overload.check +++ b/test/files/neg/sammy_overload.check @@ -1,7 +1,7 @@ -sammy_overload.scala:14: error: overloaded method value m with alternatives: +sammy_overload.scala:14: error: overloaded method m with alternatives: (x: ToString)Int (x: Int => String)Int cannot be applied to (Int => Int) O.m(x => x) // error expected: m cannot be applied to Int => Int ^ -one error found +1 error diff --git a/test/files/neg/sammy_restrictions.check b/test/files/neg/sammy_restrictions.check index 0225c61ac1c3..3e865b1b9f9b 100644 --- a/test/files/neg/sammy_restrictions.check +++ b/test/files/neg/sammy_restrictions.check @@ -1,62 +1,74 @@ -sammy_restrictions.scala:38: error: type mismatch; +sammy_restrictions.scala:40: error: type mismatch; found : () => Int required: NoAbstract - (() => 0) : NoAbstract - ^ -sammy_restrictions.scala:39: error: type mismatch; + def f0 = (() => 0) : NoAbstract + ^ +sammy_restrictions.scala:41: error: type mismatch; found : Int => Int required: TwoAbstract - ((x: Int) => 0): TwoAbstract - ^ -sammy_restrictions.scala:40: error: type mismatch; + def f1 = ((x: Int) => 0): TwoAbstract + ^ +sammy_restrictions.scala:42: error: type mismatch; found : Int => Int required: NoEmptyConstructor - ((x: Int) => 0): NoEmptyConstructor - ^ -sammy_restrictions.scala:41: error: type mismatch; + def f2 = ((x: Int) => 0): NoEmptyConstructor + ^ +sammy_restrictions.scala:43: error: type mismatch; found : Int => Int required: MultipleConstructorLists - ((x: Int) => 0): MultipleConstructorLists - ^ -sammy_restrictions.scala:42: error: type mismatch; + def f3 = ((x: Int) => 0): MultipleConstructorLists + ^ +sammy_restrictions.scala:44: error: type mismatch; found : Int => Int required: OneEmptySecondaryConstructor - ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call. - ^ -sammy_restrictions.scala:43: error: type mismatch; + def f4 = ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call. + ^ +sammy_restrictions.scala:45: error: type mismatch; found : Int => Int required: MultipleMethodLists - ((x: Int) => 0): MultipleMethodLists - ^ -sammy_restrictions.scala:44: error: type mismatch; + def f5 = ((x: Int) => 0): MultipleMethodLists + ^ +sammy_restrictions.scala:46: error: type mismatch; found : Int => Int required: ImplicitConstructorParam - ((x: Int) => 0): ImplicitConstructorParam - ^ -sammy_restrictions.scala:45: error: type mismatch; + def f6 = ((x: Int) => 0): ImplicitConstructorParam + ^ +sammy_restrictions.scala:47: error: type mismatch; found : Int => Int required: ImplicitMethodParam - ((x: Int) => 0): ImplicitMethodParam - ^ -sammy_restrictions.scala:46: error: type mismatch; + def f7 = ((x: Int) => 0): ImplicitMethodParam + ^ +sammy_restrictions.scala:48: error: type mismatch; found : Int => Int required: PolyMethod - ((x: Int) => 0): PolyMethod - ^ -sammy_restrictions.scala:47: error: type mismatch; + def f8 = ((x: Int) => 0): PolyMethod + ^ +sammy_restrictions.scala:49: error: type mismatch; found : Int => Int required: SelfTp - ((x: Int) => 0): SelfTp - ^ -sammy_restrictions.scala:48: error: type mismatch; + def f9 = ((x: Int) => 0): SelfTp + ^ +sammy_restrictions.scala:50: error: type mismatch; found : Int => Int required: T1 with U1 - ((x: Int) => 0): T1 with U1 - ^ -sammy_restrictions.scala:49: error: type mismatch; + def g0 = ((x: Int) => 0): T1 with U1 + ^ +sammy_restrictions.scala:51: error: type mismatch; found : Int => Int required: Test.NonClassTypeRefinement (which expands to) DerivedOneAbstract with OneAbstract - ((x: Int) => 0): NonClassTypeRefinement - ^ -12 errors found + def g1 = ((x: Int) => 0): NonClassTypeRefinement + ^ +sammy_restrictions.scala:52: error: type mismatch; + found : Int => Int + required: Test.NonOverridingMethodRefinement + (which expands to) OneAbstract{def apples(): Int} + def h1 = ((x: Int) => 0): NonOverridingMethodRefinement + ^ +sammy_restrictions.scala:53: error: type mismatch; + found : Int => Int + required: Test.OverridingMethodRefinement + (which expands to) OneAbstract{def ap(a: Int): Int} + def h2 = ((x: Int) => 0): OverridingMethodRefinement + ^ +14 errors diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala index dee4f1f24739..5e6a549568db 100644 --- a/test/files/neg/sammy_restrictions.scala +++ b/test/files/neg/sammy_restrictions.scala @@ -32,26 +32,30 @@ trait T1 { def t(a: Int): Int }; trait U1 object Test { implicit val s: String = "" type NonClassTypeRefinement = DerivedOneAbstract with OneAbstract + type NonOverridingMethodRefinement = OneAbstract { def apples(): Int } + type OverridingMethodRefinement = OneAbstract { def ap(a: Int): Int } // allowed in Scala 3 type NonClassType = DerivedOneAbstract // errors: - (() => 0) : NoAbstract - ((x: Int) => 0): TwoAbstract - ((x: Int) => 0): NoEmptyConstructor - ((x: Int) => 0): MultipleConstructorLists - ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call. - ((x: Int) => 0): MultipleMethodLists - ((x: Int) => 0): ImplicitConstructorParam - ((x: Int) => 0): ImplicitMethodParam - ((x: Int) => 0): PolyMethod - ((x: Int) => 0): SelfTp - ((x: Int) => 0): T1 with U1 - ((x: Int) => 0): NonClassTypeRefinement + def f0 = (() => 0) : NoAbstract + def f1 = ((x: Int) => 0): TwoAbstract + def f2 = ((x: Int) => 0): NoEmptyConstructor + def f3 = ((x: Int) => 0): MultipleConstructorLists + def f4 = ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call. + def f5 = ((x: Int) => 0): MultipleMethodLists + def f6 = ((x: Int) => 0): ImplicitConstructorParam + def f7 = ((x: Int) => 0): ImplicitMethodParam + def f8 = ((x: Int) => 0): PolyMethod + def f9 = ((x: Int) => 0): SelfTp + def g0 = ((x: Int) => 0): T1 with U1 + def g1 = ((x: Int) => 0): NonClassTypeRefinement + def h1 = ((x: Int) => 0): NonOverridingMethodRefinement + def h2 = ((x: Int) => 0): OverridingMethodRefinement // allowed: - ((x: Int) => 0): OneEmptyConstructor - ((x: Int) => 0): DerivedOneAbstract - ((x: Int) => 0): NonClassType // we also allow type aliases in instantiation expressions, if they resolve to a class type - ((x: Int) => 0): PolyClass[Int] - ((x: Int) => 0): SelfVar + def g2 = ((x: Int) => 0): OneEmptyConstructor + def g3 = ((x: Int) => 0): DerivedOneAbstract + def g4 = ((x: Int) => 0): NonClassType // we also allow type aliases in instantiation expressions, if they resolve to a class type + def g5 = ((x: Int) => 0): PolyClass[Int] + def g6 = ((x: Int) => 0): SelfVar } diff --git a/test/files/neg/sammy_wrong_arity.check b/test/files/neg/sammy_wrong_arity.check index af547a201dbe..e698871fc1a9 100644 --- a/test/files/neg/sammy_wrong_arity.check +++ b/test/files/neg/sammy_wrong_arity.check @@ -1,52 +1,77 @@ sammy_wrong_arity.scala:6: error: type mismatch; found : () => Int required: T1 - (() => 0): T1 - ^ + def f0 = (() => 0): T1 + ^ sammy_wrong_arity.scala:7: error: type mismatch; found : Any => Int required: T2 - ((x: Any) => 0): T2 - ^ + def f1 = ((x: Any) => 0): T2 + ^ sammy_wrong_arity.scala:9: error: type mismatch; found : Any => Int required: T0 - ((x: Any) => 0): T0 - ^ + def f2 = ((x: Any) => 0): T0 + ^ sammy_wrong_arity.scala:10: error: type mismatch; found : Any => Int required: T2 - ((x: Any) => 0): T2 - ^ + def f3 = ((x: Any) => 0): T2 + ^ sammy_wrong_arity.scala:12: error: type mismatch; found : (Any, Any) => Int required: T0 - ((x: Any, y: Any) => 0): T0 - ^ + def f4 = ((x: Any, y: Any) => 0): T0 + ^ sammy_wrong_arity.scala:13: error: type mismatch; found : (Any, Any) => Int required: T1 - ((x: Any, y: Any) => 0): T1 - ^ + def f5 = ((x: Any, y: Any) => 0): T1 + ^ sammy_wrong_arity.scala:15: error: missing parameter type - ((x) => 0): T2 - ^ + def f6 = ((x) => 0): T2 + ^ +sammy_wrong_arity.scala:15: error: type mismatch; + found : ? => ? + required: T2 + def f6 = ((x) => 0): T2 + ^ sammy_wrong_arity.scala:17: error: missing parameter type - ((x) => 0): T0 - ^ + def f7 = ((x) => 0): T0 + ^ +sammy_wrong_arity.scala:17: error: type mismatch; + found : ? => ? + required: T0 + def f7 = ((x) => 0): T0 + ^ sammy_wrong_arity.scala:18: error: missing parameter type - ((x) => 0): T2 - ^ + def f8 = ((x) => 0): T2 + ^ +sammy_wrong_arity.scala:18: error: type mismatch; + found : ? => ? + required: T2 + def f8 = ((x) => 0): T2 + ^ sammy_wrong_arity.scala:20: error: missing parameter type - ((x, y) => 0): T0 - ^ + def f9 = ((x, y) => 0): T0 + ^ sammy_wrong_arity.scala:20: error: missing parameter type - ((x, y) => 0): T0 - ^ + def f9 = ((x, y) => 0): T0 + ^ +sammy_wrong_arity.scala:20: error: type mismatch; + found : (?, ?) => ? + required: T0 + def f9 = ((x, y) => 0): T0 + ^ sammy_wrong_arity.scala:21: error: missing parameter type - ((x, y) => 0): T1 - ^ + def g0 = ((x, y) => 0): T1 + ^ sammy_wrong_arity.scala:21: error: missing parameter type - ((x, y) => 0): T1 - ^ -13 errors found + def g0 = ((x, y) => 0): T1 + ^ +sammy_wrong_arity.scala:21: error: type mismatch; + found : (?, ?) => ? + required: T1 + def g0 = ((x, y) => 0): T1 + ^ +18 errors diff --git a/test/files/neg/sammy_wrong_arity.scala b/test/files/neg/sammy_wrong_arity.scala index d03d266a0ba8..4465fcee77b7 100644 --- a/test/files/neg/sammy_wrong_arity.scala +++ b/test/files/neg/sammy_wrong_arity.scala @@ -3,20 +3,20 @@ trait T1 { def ap(a: Any): Int } trait T2 { def ap(a: Any, b: Any): Int } class Test { - (() => 0): T1 - ((x: Any) => 0): T2 + def f0 = (() => 0): T1 + def f1 = ((x: Any) => 0): T2 - ((x: Any) => 0): T0 - ((x: Any) => 0): T2 + def f2 = ((x: Any) => 0): T0 + def f3 = ((x: Any) => 0): T2 - ((x: Any, y: Any) => 0): T0 - ((x: Any, y: Any) => 0): T1 + def f4 = ((x: Any, y: Any) => 0): T0 + def f5 = ((x: Any, y: Any) => 0): T1 - ((x) => 0): T2 + def f6 = ((x) => 0): T2 - ((x) => 0): T0 - ((x) => 0): T2 + def f7 = ((x) => 0): T0 + def f8 = ((x) => 0): T2 - ((x, y) => 0): T0 - ((x, y) => 0): T1 + def f9 = ((x, y) => 0): T0 + def g0 = ((x, y) => 0): T1 } diff --git a/test/files/neg/scala3-keywords.check b/test/files/neg/scala3-keywords.check new file mode 100644 index 000000000000..9bc981ad86f3 --- /dev/null +++ b/test/files/neg/scala3-keywords.check @@ -0,0 +1,21 @@ +scala3-keywords.scala:15: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + val enum: Int = 1 // error + ^ +scala3-keywords.scala:16: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + val export: Int = 1 // error + ^ +scala3-keywords.scala:17: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + val given: Int = 1 // error + ^ +scala3-keywords.scala:18: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + def foo(given: Int) = {} // error + ^ +scala3-keywords.scala:19: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + def bla[export <: Int] = {} // error + ^ +scala3-keywords.scala:21: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] +class enum // error + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/scala3-keywords.scala b/test/files/neg/scala3-keywords.scala new file mode 100644 index 000000000000..6c37baaa5515 --- /dev/null +++ b/test/files/neg/scala3-keywords.scala @@ -0,0 +1,21 @@ +//> using options -deprecation -Xfatal-warnings +// +class A { + val `enum`: Int = 1 + println(enum) + val `export`: Int = 1 + val `given`: Int = 1 + def foo(`given`: Int) = given + def bla[`export` <: Int] = { + class `enum` + new enum + } +} +class B { + val enum: Int = 1 // error + val export: Int = 1 // error + val given: Int = 1 // error + def foo(given: Int) = {} // error + def bla[export <: Int] = {} // error +} +class enum // error diff --git a/test/files/neg/scopes.check b/test/files/neg/scopes.check index 2f2eaa758f68..e64cfbda35b3 100644 --- a/test/files/neg/scopes.check +++ b/test/files/neg/scopes.check @@ -19,4 +19,4 @@ scopes.scala:13: error: x is already defined as value x scopes.scala:15: error: x is already defined as value x case x::x => x ^ -7 errors found +7 errors diff --git a/test/files/neg/sd128.check b/test/files/neg/sd128.check index 8f6fcb121348..14744dbc5e9e 100644 --- a/test/files/neg/sd128.check +++ b/test/files/neg/sd128.check @@ -1,17 +1,16 @@ Test.scala:4: error: class C1 inherits conflicting members: - method f in trait A of type ()Int and - method f in trait T of type => Int -(Note: this can be resolved by declaring an override in class C1.) + def f(): Int (defined in trait A) and + def f: Int (defined in trait T) + (note: this can be resolved by declaring an `override` in class C1.) class C1 extends A with T // error ^ -Test.scala:5: error: class C2 inherits conflicting members: - method f in trait T of type => Int and - method f in trait A of type ()Int -(Note: this can be resolved by declaring an override in class C2.) +Test.scala:5: error: cannot override a concrete member without a third member that's overridden by both (this rule is designed to prevent accidental overrides) +def f: Int (defined in trait T) + with def f(): Int (defined in trait A) class C2 extends T with A // error ^ -Test.scala:14: error: overriding method f in trait A of type ()Int; - method f needs `override' modifier +Test.scala:14: error: `override` modifier required to override concrete member: + def f(): Int (defined in trait A) def f() = 9999 // need override modifier ^ -three errors found +3 errors diff --git a/test/files/neg/sd503.check b/test/files/neg/sd503.check new file mode 100644 index 000000000000..b74ef4328665 --- /dev/null +++ b/test/files/neg/sd503.check @@ -0,0 +1,44 @@ +sd503.scala:18: error: not enough arguments for method x_=: (i: Int, j: Int): Unit. +Unspecified value parameter j. + def f4() = c.x = (42, 27) // missing arg + ^ +sd503.scala:23: error: type mismatch; + found : (Int, Int) + required: Int + def f7() = d.x = (42, 27) // type mismatch (same as doti) + ^ +sd503.scala:9: warning: multiarg infix syntax looks like a tuple + def % (i: Int, j: Int) = i + j // operator, warn + ^ +sd503.scala:13: warning: multiarg infix syntax looks like a tuple + def f1(t: T) = t m (1, 2) // multiarg, warn + ^ +sd503.scala:15: warning: multiarg infix syntax looks like a tuple + def f3(t: T) = t % (1, 2) // multiarg, warn + ^ +sd503.scala:19: warning: multiarg infix syntax looks like a tuple + def f5() = c x_= (42, 27) // multiarg, warn + ^ +sd503.scala:54: warning: multiarg infix syntax looks like a tuple + def +=(x: A, y: A, zs: A*): this.type = addAll(x +: y +: zs) // very multiarg, warn + ^ +sd503.scala:59: warning: multiarg infix syntax looks like a tuple + def f[A](as: Embiggen[A], x: A, y: A, z: A): as.type = as += (x, y, z) // very multiarg, warn + ^ +sd503.scala:70: warning: multiarg infix syntax looks like a tuple + def f(x: A, y: A, zs: A*): this.type = this += (x, y, zs: _*) // warn but could defer to deprecation + ^ +sd503.scala:80: warning: multiarg infix syntax looks like a tuple + def f = this lines_! (42, 27) // warn usage, of course + ^ +sd503.scala:86: warning: multiarg infix syntax looks like a tuple + def +(i: Int, j: Int): Adder = new Adder(c + i*j) // warn multiarg symbolic def + ^ +sd503.scala:92: warning: multiarg infix syntax looks like a tuple + x = x + (3, 9) // warn multiarg infix apply + ^ +sd503.scala:102: warning: multiarg infix syntax looks like a tuple + x += (3, 9) // warn multiarg infix assignment! + ^ +11 warnings +2 errors diff --git a/test/files/neg/sd503.scala b/test/files/neg/sd503.scala new file mode 100644 index 000000000000..50f04096cf2f --- /dev/null +++ b/test/files/neg/sd503.scala @@ -0,0 +1,105 @@ +//> using options -Wmultiarg-infix +// +// lint multiarg infix syntax, e.g., vs += (1, 2, 3) +// Since infix is encouraged by symbolic operator names, discourage defining def += (x: A, y: A, zs: A*) + +trait T { + def m(i: Int, j: Int) = i + j + + def % (i: Int, j: Int) = i + j // operator, warn + + + def f0(t: T) = t.m(1, 2) // ok + def f1(t: T) = t m (1, 2) // multiarg, warn + def f2(t: T) = t.%(1, 2) // ok + def f3(t: T) = t % (1, 2) // multiarg, warn + + def c = new C + def f4() = c.x = (42, 27) // missing arg + def f5() = c x_= (42, 27) // multiarg, warn + + def d = new D + def f6() = d.x = 42 // ok! + def f7() = d.x = (42, 27) // type mismatch (same as doti) +} + +class C { + private var value: Int = _ + def x: Int = value + def x_=(i: Int, j: Int): Unit = value = i + j // multiarg, but don't warn +} +class D { + private var devalue: Int = _ // d.value + def x: Int = devalue + def x_=(i: Int, j: Int = 1): Unit = devalue = i + j // multiarg, but don't warn +} + +// If the application is adapted such that what looks like a tuple is taken as a tuple, +// then don't warn; eventually this will be normal behavior. +trait OK { + def f(p: (Int, Int)) = p == (42, 17) // nowarn! + def g(ps: Embiggen[(Int, Int)]) = ps :+ (42, 17) // nowarn! + + def ?[A: List: reflect.ClassTag] = ??? // nowarn! + + def calculate(): Int = { + def ++(i: Int, j: Int): Int = i + j // nowarn! + ++ (42, 17) + } +} + +// Growable is the paradigmatic example +trait Embiggen[A] { + def addAll(as: Seq[A]): this.type + def +=(x: A, y: A, zs: A*): this.type = addAll(x +: y +: zs) // very multiarg, warn + def :+(a: A): Embiggen[A] +} + +trait NotOK { + def f[A](as: Embiggen[A], x: A, y: A, z: A): as.type = as += (x, y, z) // very multiarg, warn +} + +// Don't warn if deprecated or not used +trait Exceptions[A] { + def addAll(as: Seq[A]): this.type + @deprecated("Quit using old infix style!", since="lately") + def +=(x: A, y: A, zs: A*): this.type = addAll(x +: y +: zs) // nowarn! + @annotation.unused + private def ++=(x: A, y: A, zs: A*): this.type = addAll(x +: y +: zs) // nowarn! + + def f(x: A, y: A, zs: A*): this.type = this += (x, y, zs: _*) // warn but could defer to deprecation +} +@deprecated("Quit using old infix style!", since="lately") +trait AlsoExceptions[A] { + def addAll(as: Seq[A]): this.type + def +=(x: A, y: A, zs: A*): this.type = addAll(x +: y +: zs) // nowarn! +} + +trait WhyNamingIsHard { + def lines_!(x: Int, y: Int): List[String] = ??? // nowarn, give it a pass + def f = this lines_! (42, 27) // warn usage, of course +} + +class A(a: Int, b: Int)(c: Int = 1) // nowarn on def $default$3(a: Int, b: Int): Int = 1 + +case class Adder(c: Int) { + def +(i: Int, j: Int): Adder = new Adder(c + i*j) // warn multiarg symbolic def +} + +object Test extends App { + println { + var x = new Adder(42) + x = x + (3, 9) // warn multiarg infix apply + x + } + println { + var x = new Adder(42) + x.+=(3, 9) // nowarn + x + } + println { + var x = new Adder(42) + x += (3, 9) // warn multiarg infix assignment! + x + } +} diff --git a/test/files/neg/sealed-final-neg.check b/test/files/neg/sealed-final-neg.check index c2291da6bc8c..a5479dca0c88 100644 --- a/test/files/neg/sealed-final-neg.check +++ b/test/files/neg/sealed-final-neg.check @@ -1,11 +1,11 @@ -sealed-final-neg.scala:18: warning: neg1/Foo::bar(I)I is annotated @inline but could not be inlined: +sealed-final-neg.scala:19: warning: neg1/Foo::bar(I)I is annotated @inline but could not be inlined: The method is not final and may be overridden. def f = Foo.mkFoo() bar 10 ^ -sealed-final-neg.scala:38: warning: neg2/Foo::bar(I)I is annotated @inline but could not be inlined: +sealed-final-neg.scala:39: warning: neg2/Foo::bar(I)I is annotated @inline but could not be inlined: The method is not final and may be overridden. def f = Foo.mkFoo() bar 10 ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/sealed-final-neg.scala b/test/files/neg/sealed-final-neg.scala index 05f9b4e6de88..1698f46712cc 100644 --- a/test/files/neg/sealed-final-neg.scala +++ b/test/files/neg/sealed-final-neg.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -opt:l:inline -opt-inline-from:** -opt-warnings +//> using options -Werror -opt:inline:** -Wopt +// package neg1 { sealed abstract class Foo { @inline def bar(x: Int) = x + 1 diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check index 4c8670665402..5d94d3c7b2fb 100644 --- a/test/files/neg/sealed-java-enums.check +++ b/test/files/neg/sealed-java-enums.check @@ -1,7 +1,7 @@ -sealed-java-enums.scala:6: warning: match may not be exhaustive. +sealed-java-enums.scala:7: warning: match may not be exhaustive. It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING def f(state: State) = state match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/sealed-java-enums.scala b/test/files/neg/sealed-java-enums.scala index e07a465f0064..7aead780d91c 100644 --- a/test/files/neg/sealed-java-enums.scala +++ b/test/files/neg/sealed-java-enums.scala @@ -1,4 +1,5 @@ -// scalac: -Xexperimental -Xfatal-warnings +//> using options -Werror +// import java.lang.Thread.State import java.lang.Thread.State._ diff --git a/test/files/neg/sensitive.check b/test/files/neg/sensitive.check index 32d988ec97b0..28921af2e4ba 100644 --- a/test/files/neg/sensitive.check +++ b/test/files/neg/sensitive.check @@ -1,4 +1,4 @@ -sensitive.scala:17: error: constructor Sensitive in class Sensitive cannot be accessed in object Attacker +sensitive.scala:17: error: constructor Sensitive in class Sensitive cannot be accessed in object Attacker from object Attacker val y = new Sensitive() ^ -one error found +1 error diff --git a/test/files/neg/sensitive2.check b/test/files/neg/sensitive2.check index 19152fe18846..e28f8de28941 100644 --- a/test/files/neg/sensitive2.check +++ b/test/files/neg/sensitive2.check @@ -2,9 +2,9 @@ sensitive2.scala:6: error: type mismatch; found : String("abc") required: Test.Foo[_] Note that implicit conversions are not applicable because they are ambiguous: - both method foo1 in object Test of type [A](a: A)Test.Foo[A] - and method foo2 in object Test of type (a: Any)Test.Foo[String] + both method foo1 in object Test of type [A](a: A): Test.Foo[A] + and method foo2 in object Test of type (a: Any): Test.Foo[String] are possible conversion functions from String("abc") to Test.Foo[_] val a: Foo[_] = "abc" ^ -one error found +1 error diff --git a/test/files/neg/sensitive2.scala b/test/files/neg/sensitive2.scala index 92b91bef2031..e0cf515bd078 100644 --- a/test/files/neg/sensitive2.scala +++ b/test/files/neg/sensitive2.scala @@ -5,4 +5,4 @@ object Test { val a: Foo[_] = "abc" -} \ No newline at end of file +} diff --git a/test/files/neg/serialversionuid-not-const.check b/test/files/neg/serialversionuid-not-const.check index 9c383d97adf4..964c3a659ae2 100644 --- a/test/files/neg/serialversionuid-not-const.check +++ b/test/files/neg/serialversionuid-not-const.check @@ -1,10 +1,7 @@ -serialversionuid-not-const.scala:1: error: annotation argument needs to be a constant; found: 13L.toLong -@SerialVersionUID(13l.toLong) class C1 extends Serializable - ^ serialversionuid-not-const.scala:3: error: annotation argument needs to be a constant; found: 13.asInstanceOf[Long] @SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable ^ serialversionuid-not-const.scala:4: error: annotation argument needs to be a constant; found: Test.bippy @SerialVersionUID(Test.bippy) class C4 extends Serializable ^ -three errors found +2 errors diff --git a/test/files/neg/sip23-no-null-type.check b/test/files/neg/sip23-no-null-type.check new file mode 100644 index 000000000000..f61d5da14c4b --- /dev/null +++ b/test/files/neg/sip23-no-null-type.check @@ -0,0 +1,4 @@ +sip23-no-null-type.scala:2: error: identifier expected but 'null' found. + val nada: null.type = null + ^ +1 error diff --git a/test/files/neg/sip23-no-null-type.scala b/test/files/neg/sip23-no-null-type.scala new file mode 100644 index 000000000000..e41ee22b3b67 --- /dev/null +++ b/test/files/neg/sip23-no-null-type.scala @@ -0,0 +1,3 @@ +object Test { + val nada: null.type = null +} diff --git a/test/files/neg/sip23-no-unit-type.check b/test/files/neg/sip23-no-unit-type.check new file mode 100644 index 000000000000..01fba8b5434b --- /dev/null +++ b/test/files/neg/sip23-no-unit-type.check @@ -0,0 +1,22 @@ +sip23-no-unit-type.scala:6: error: Illegal literal type (), use Unit instead [quickfixable] + case _: () => "err" + ^ +sip23-no-unit-type.scala:7: error: Illegal literal type (), use Unit instead [quickfixable] + case _: ().type => "err" + ^ +sip23-no-unit-type.scala:7: error: '=>' expected but '.' found. + case _: ().type => "err" + ^ +sip23-no-unit-type.scala:10: error: Illegal literal type (), use Unit instead [quickfixable] + val younit: () = () + ^ +sip23-no-unit-type.scala:11: error: Illegal literal type (), use Unit instead [quickfixable] + val unit: ().type = () + ^ +sip23-no-unit-type.scala:11: error: '=' expected but '.' found. + val unit: ().type = () + ^ +sip23-no-unit-type.scala:12: error: illegal start of simple expression +} +^ +7 errors diff --git a/test/files/neg/sip23-no-unit-type.scala b/test/files/neg/sip23-no-unit-type.scala new file mode 100644 index 000000000000..7c4ea051e5e8 --- /dev/null +++ b/test/files/neg/sip23-no-unit-type.scala @@ -0,0 +1,12 @@ + +object Test { + def f: () => Int = () => 42 + def s = null.asInstanceOf[Any] match { + case () => "ok" + case _: () => "err" + case _: ().type => "err" + case _: Unit => "quite good" + } + val younit: () = () + val unit: ().type = () +} diff --git a/test/files/neg/sip23-null.check b/test/files/neg/sip23-null.check new file mode 100644 index 000000000000..f5f711bdaacb --- /dev/null +++ b/test/files/neg/sip23-null.check @@ -0,0 +1,11 @@ +sip23-null.scala:2: error: type mismatch; + found : Null(null) + required: x.type + def bar(x: Any): x.type = null + ^ +sip23-null.scala:3: error: type mismatch; + found : Null(null) + required: x.type + def foo(x: AnyVal): x.type = null + ^ +2 errors diff --git a/test/files/neg/sip23-null.scala b/test/files/neg/sip23-null.scala new file mode 100644 index 000000000000..622b15b5d45d --- /dev/null +++ b/test/files/neg/sip23-null.scala @@ -0,0 +1,6 @@ +object Test { + def bar(x: Any): x.type = null + def foo(x: AnyVal): x.type = null + def baz(x: AnyRef): x.type = null // ok +} + diff --git a/test/files/neg/sip23-override.check b/test/files/neg/sip23-override.check new file mode 100644 index 000000000000..c156fcb4b55c --- /dev/null +++ b/test/files/neg/sip23-override.check @@ -0,0 +1,13 @@ +sip23-override.scala:24: error: incompatible type in overriding +val f2: 4 (defined in trait Overridden); + found : 5 + required: 4 + override val f2: 5 = 5 + ^ +sip23-override.scala:28: error: incompatible type in overriding +def f5: 4 (defined in trait Overridden); + found : 5 + required: 4 + override def f5: 5 = 5 + ^ +2 errors diff --git a/test/files/neg/sip23-override.scala b/test/files/neg/sip23-override.scala new file mode 100644 index 000000000000..36da4fe1f23e --- /dev/null +++ b/test/files/neg/sip23-override.scala @@ -0,0 +1,29 @@ +trait Overridden { + val f0 = 4 + val f1: Int = 4 + val f2: 4 = 4 + + def f3 = 4 + def f4: Int = 4 + def f5: 4 = 4 +} + +class Overrider0 extends Overridden { + override val f0 = 4 + override val f1: Int = 4 + override val f2: 4 = 4 + + override def f3 = 4 + override def f4: Int = 4 + override def f5: 4 = 4 +} + +class Overrider1 extends Overridden { + override val f0 = 5 + override val f1: 5 = 5 + override val f2: 5 = 5 + + override def f3 = 5 + override def f4: 5 = 5 + override def f5: 5 = 5 +} diff --git a/test/files/neg/sip23-uninitialized-0.check b/test/files/neg/sip23-uninitialized-0.check new file mode 100644 index 000000000000..21b2a684aebb --- /dev/null +++ b/test/files/neg/sip23-uninitialized-0.check @@ -0,0 +1,4 @@ +sip23-uninitialized-0.scala:2: error: unbound placeholder parameter + val f0: 1 = _ // error: unbound placeholder + ^ +1 error diff --git a/test/files/neg/sip23-uninitialized-0.scala b/test/files/neg/sip23-uninitialized-0.scala new file mode 100644 index 000000000000..c55847b31c71 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-0.scala @@ -0,0 +1,3 @@ +object Test { + val f0: 1 = _ // error: unbound placeholder +} diff --git a/test/files/neg/sip23-uninitialized-1.check b/test/files/neg/sip23-uninitialized-1.check new file mode 100644 index 000000000000..30e8307f0b65 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-1.check @@ -0,0 +1,6 @@ +sip23-uninitialized-1.scala:4: warning: value f1 in object Test does nothing other than call itself recursively + val f1: 1 = f1 // warning: recursive + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/sip23-uninitialized-1.scala b/test/files/neg/sip23-uninitialized-1.scala new file mode 100644 index 000000000000..89804aafafc3 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-1.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings +// +object Test { + val f1: 1 = f1 // warning: recursive +} diff --git a/test/files/neg/sip23-uninitialized-2.check b/test/files/neg/sip23-uninitialized-2.check new file mode 100644 index 000000000000..b97e06a5b2d0 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-2.check @@ -0,0 +1,4 @@ +sip23-uninitialized-2.scala:2: error: default initialization prohibited for literal-typed vars + var f2: 1 = _ // error: default init prohibited + ^ +1 error diff --git a/test/files/neg/sip23-uninitialized-2.scala b/test/files/neg/sip23-uninitialized-2.scala new file mode 100644 index 000000000000..de879e8df74c --- /dev/null +++ b/test/files/neg/sip23-uninitialized-2.scala @@ -0,0 +1,3 @@ +object Test { + var f2: 1 = _ // error: default init prohibited +} diff --git a/test/files/neg/sip23-uninitialized-3.check b/test/files/neg/sip23-uninitialized-3.check new file mode 100644 index 000000000000..a0fd7089e1e1 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-3.check @@ -0,0 +1,6 @@ +sip23-uninitialized-3.scala:4: warning: variable f3 in object Test does nothing other than call itself recursively + var f3: 1 = f3 // warning: recursive + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/sip23-uninitialized-3.scala b/test/files/neg/sip23-uninitialized-3.scala new file mode 100644 index 000000000000..bf7b7723aa13 --- /dev/null +++ b/test/files/neg/sip23-uninitialized-3.scala @@ -0,0 +1,5 @@ +//> using options -Xfatal-warnings +// +object Test { + var f3: 1 = f3 // warning: recursive +} diff --git a/test/files/neg/sip23-widen.check b/test/files/neg/sip23-widen.check new file mode 100644 index 000000000000..a3a4aeabaa03 --- /dev/null +++ b/test/files/neg/sip23-widen.check @@ -0,0 +1,56 @@ +sip23-widen.scala:4: error: type mismatch; + found : Test.f0.type (with underlying type Int) + required: 4 + f0: 4 + ^ +sip23-widen.scala:16: error: type mismatch; + found : () => Int + required: () => 4 + f2: (() => 4) + ^ +sip23-widen.scala:20: error: type mismatch; + found : () => Int + required: () => 4 + f3: (() => 4) + ^ +sip23-widen.scala:28: error: type mismatch; + found : Test.f5.type (with underlying type Int) + required: 4 + f5: 4 + ^ +sip23-widen.scala:32: error: type mismatch; + found : Test.f6.type (with underlying type Int) + required: 4 + f6: 4 + ^ +sip23-widen.scala:41: error: type mismatch; + found : Int(5) + required: 4 + val f8 = bar(() => 4)(5) + ^ +sip23-widen.scala:45: error: type mismatch; + found : () => (Int, () => Int) + required: () => (4, () => 5) + f9: (() => (4, () => 5)) + ^ +sip23-widen.scala:51: error: type mismatch; + found : Int + required: 4 + f11: 4 + ^ +sip23-widen.scala:56: error: type mismatch; + found : Int + required: 4 + f12: 4 + ^ +sip23-widen.scala:62: error: type mismatch; + found : Int(5) + required: 4 + f13 = 5 + ^ +sip23-widen.scala:75: error: type mismatch; + found : Test.annot0.type (with underlying type Int) + required: 1 @unchecked + annot0: 1 @unchecked + ^ +11 errors diff --git a/test/files/neg/sip23-widen.scala b/test/files/neg/sip23-widen.scala new file mode 100644 index 000000000000..913841b1f117 --- /dev/null +++ b/test/files/neg/sip23-widen.scala @@ -0,0 +1,82 @@ +object Test { + val f0 = 4 + //f0: Int + f0: 4 + + //final val f1: 4 = 4 + //f1: Int + //f1: 4 + + //final val f1b = 4 + //f1b: Int + //f1b: 4 + + val f2 = () => 4 + //f2: (() => Int) + f2: (() => 4) + + final val f3 = () => 4 + //f3: (() => Int) + f3: (() => 4) + + //val f4: () => 4 = () => 4 + + def foo[T](f: () => T)(t: T): T = t + + val f5 = foo(() => 4)(4) + //f5: Int + f5: 4 + + val f6 = foo(() => 4)(5) + //f6: Int + f6: 4 + + def bar[T <: Singleton](f: () => T)(t: T): T = t + + //final val f7 = bar(() => 4)(4) + //f7: Int + //f7: 4 + + // found 5, required 4 + val f8 = bar(() => 4)(5) + + val f9 = () => (4, () => 5) + //f9: (() => (Int, () => Int)) + f9: (() => (4, () => 5)) + + //val f10: () => (4, () => 5) = () => (4, () => 5) + + var f11 = 4 + //f11: Int + f11: 4 + //f11 = 5 + + final var f12 = 4 + //f12: Int + f12: 4 + //f12 = 5 + + final var f13: 4 = 4 + //f13: Int + //f13: 4 + f13 = 5 + + //final val one = 1 + //final val compiles: 2 = one + 1 + + //final val literalOne: 1 = 1 + //final val alsoCompiles: 2 = literalOne + 1 + + //final val recFive : 5 = recFive + 0 + + val annot0 = 1: @unchecked + //annot0: Int + //annot0: Int @unchecked + annot0: 1 @unchecked + + //final val annot1 = 1: @unchecked + //annot1: Int + //annot1: Int @unchecked + //annot1: 1 + //annot1: 1 @unchecked +} diff --git a/test/files/neg/sortedImplicitNotFound.check b/test/files/neg/sortedImplicitNotFound.check new file mode 100644 index 000000000000..788c9a022085 --- /dev/null +++ b/test/files/neg/sortedImplicitNotFound.check @@ -0,0 +1,80 @@ +sortedImplicitNotFound.scala:10: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ms.map(_ => o) + ^ +sortedImplicitNotFound.scala:13: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ms.flatMap(_ => List(o)) + ^ +sortedImplicitNotFound.scala:16: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ms.zip(List(o)) + ^ +sortedImplicitNotFound.scala:19: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ms.collect{case _ => o} + ^ +sortedImplicitNotFound.scala:24: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + is.map(_ => o) + ^ +sortedImplicitNotFound.scala:27: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + is.flatMap(_ => List(o)) + ^ +sortedImplicitNotFound.scala:30: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + is.zip(List(o)) + ^ +sortedImplicitNotFound.scala:33: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + is.collect{case _ => o} + ^ +sortedImplicitNotFound.scala:39: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + mb.map(_ => o) + ^ +sortedImplicitNotFound.scala:43: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + mb.flatMap(_ => List(o)) + ^ +sortedImplicitNotFound.scala:47: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + mb.zip(List(o)) + ^ +sortedImplicitNotFound.scala:51: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + mb.collect{case _ => o} + ^ +sortedImplicitNotFound.scala:57: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ib.map(_ => o) + ^ +sortedImplicitNotFound.scala:61: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ib.flatMap(_ => List(o)) + ^ +sortedImplicitNotFound.scala:65: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ib.zip(List(o)) + ^ +sortedImplicitNotFound.scala:69: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + ib.collect{case _ => o} + ^ +sortedImplicitNotFound.scala:74: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + es.map(_ => o) + ^ +sortedImplicitNotFound.scala:77: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + es.flatMap(_ => List(o)) + ^ +sortedImplicitNotFound.scala:80: error: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] +starting with method orderingToOrdered in object Ordered + es.zip(List(o)) // ah well...: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] + ^ +sortedImplicitNotFound.scala:83: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + es.collect{case _ => o} + ^ +sortedImplicitNotFound.scala:88: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + mm.map(_ => (o, o)) + ^ +sortedImplicitNotFound.scala:91: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + mm.flatMap(_ => List((o, o))) + ^ +sortedImplicitNotFound.scala:94: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + mm.collect{case _ => (o, o)} + ^ +sortedImplicitNotFound.scala:99: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + im.map(_ => (o, o)) + ^ +sortedImplicitNotFound.scala:102: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + im.flatMap(_ => List((o, o))) + ^ +sortedImplicitNotFound.scala:105: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + im.collect{case _ => (o, o)} + ^ +26 errors diff --git a/test/files/neg/sortedImplicitNotFound.scala b/test/files/neg/sortedImplicitNotFound.scala new file mode 100644 index 000000000000..5a04a439b1d1 --- /dev/null +++ b/test/files/neg/sortedImplicitNotFound.scala @@ -0,0 +1,107 @@ +import collection.{mutable => m, immutable => i} + +object WeekDay extends Enumeration { type WeekDay = Value; val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value } + +object Test { + val o = new Object + + val ms = m.SortedSet(1,2,3) + ms.map(_ => "") + ms.map(_ => o) + ms.unsorted.map(_ => o) + ms.flatMap(_ => List("")) + ms.flatMap(_ => List(o)) + ms.unsorted.flatMap(_ => List(o)) + ms.zip(List("")) + ms.zip(List(o)) + ms.unsorted.zip(List(o)) + ms.collect{case _ => ""} + ms.collect{case _ => o} + ms.unsorted.collect{case _ => o} + + val is = i.SortedSet(1,2,3) + is.map(_ => "") + is.map(_ => o) + is.unsorted.map(_ => o) + is.flatMap(_ => List("")) + is.flatMap(_ => List(o)) + is.unsorted.flatMap(_ => List(o)) + is.zip(List("")) + is.zip(List(o)) + is.unsorted.zip(List(o)) + is.collect{case _ => ""} + is.collect{case _ => o} + is.unsorted.collect{case _ => o} + + val mb = m.BitSet(1,2,3) + mb.map(x => x) : m.BitSet + mb.map(_ => "") + mb.map(_ => o) + mb.unsorted.map(_ => o) + mb.flatMap(x => List(x)) : m.BitSet + mb.flatMap(_ => List("")) + mb.flatMap(_ => List(o)) + mb.unsorted.flatMap(_ => List(o)) + mb.zip(List(1)) : m.SortedSet[(Int, Int)] + mb.zip(List("")) + mb.zip(List(o)) + mb.unsorted.zip(List(o)) + mb.collect{case x => x} : m.BitSet + mb.collect{case _ => ""} + mb.collect{case _ => o} + mb.unsorted.collect{case _ => o} + + val ib = i.BitSet(1,2,3) + ib.map(x => x) : i.BitSet + ib.map(_ => "") + ib.map(_ => o) + ib.unsorted.map(_ => o) + ib.flatMap(x => List(x)) : i.BitSet + ib.flatMap(_ => List("")) + ib.flatMap(_ => List(o)) + ib.unsorted.flatMap(_ => List(o)) + ib.zip(List(1)) : i.SortedSet[(Int, Int)] + ib.zip(List("")) + ib.zip(List(o)) + ib.unsorted.zip(List(o)) + ib.collect{case x => x} : i.BitSet + ib.collect{case _ => ""} + ib.collect{case _ => o} + ib.unsorted.collect{case _ => o} + + val es = WeekDay.values + es.map(_ => "") + es.map(_ => o) + es.unsorted.map(_ => o) + es.flatMap(_ => List("")) + es.flatMap(_ => List(o)) + es.unsorted.flatMap(_ => List(o)) + es.zip(List("")) + es.zip(List(o)) // ah well...: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] + es.unsorted.zip(List(o)) + es.collect{case _ => ""} + es.collect{case _ => o} + es.unsorted.collect{case _ => o} + + val mm = m.SortedMap(1 -> o) + mm.map(_ => ("", o)) + mm.map(_ => (o, o)) + mm.unsorted.map(_ => (o, o)) + mm.flatMap(_ => List(("", o))) + mm.flatMap(_ => List((o, o))) + mm.unsorted.flatMap(_ => List((o, o))) + mm.collect{case _ => ("", o)} + mm.collect{case _ => (o, o)} + mm.unsorted.collect{case _ => (o, o)} + + val im = i.SortedMap(1 -> o) + im.map(_ => ("", o)) + im.map(_ => (o, o)) + im.unsorted.map(_ => (o, o)) + im.flatMap(_ => List(("", o))) + im.flatMap(_ => List((o, o))) + im.unsorted.flatMap(_ => List((o, o))) + im.collect{case _ => ("", o)} + im.collect{case _ => (o, o)} + im.unsorted.collect{case _ => (o, o)} +} diff --git a/test/files/neg/source3Xneg.check b/test/files/neg/source3Xneg.check new file mode 100644 index 000000000000..ff9e1a67fe3a --- /dev/null +++ b/test/files/neg/source3Xneg.check @@ -0,0 +1,22 @@ +source3Xneg.scala:47: error: value + is not a member of List[Int] +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + ^ +source3Xneg.scala:56: error: method copy in class CaseCompanionMods cannot be accessed as a member of CaseCompanionMods from object Test + CaseCompanionMods.i.copy(CaseCompanionMods(2).x) // 2 errors + ^ +source3Xneg.scala:56: error: method apply in object CaseCompanionMods cannot be accessed as a member of object CaseCompanionMods from object Test +error after rewriting to CaseCompanionMods. +possible cause: maybe a wrong Dynamic method signature? + CaseCompanionMods.i.copy(CaseCompanionMods(2).x) // 2 errors + ^ +source3Xneg.scala:60: error: value toUpperCase is not a member of Object + InferredSub.f.toUpperCase // error + ^ +source3Xneg.scala:44: warning: Implicit definition must have explicit type (inferred String) [quickfixable] + implicit def s = "" // error + ^ +source3Xneg.scala:43: warning: Implicit definition must have explicit type (inferred Int) [quickfixable] + implicit val i = 0 // error + ^ +2 warnings +4 errors diff --git a/test/files/neg/source3Xneg.scala b/test/files/neg/source3Xneg.scala new file mode 100644 index 000000000000..aed8fdd09a5c --- /dev/null +++ b/test/files/neg/source3Xneg.scala @@ -0,0 +1,62 @@ +//> using options -deprecation -Xsource:3 -Xsource-features:_ -Wconf:cat=scala3-migration:w -Werror + +// StringContext hygiene +class SC1 { + class Impl(parts: Any*) { + def s(args: Any*) = "hello, old world" + } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = s"hello, $name" // ok +} + +object UnicodeEscapes { + def inTripleQuoted = """\u0041""" // ok + def inRawInterpolation = raw"\u0041" // ok + def inRawTripleQuoted = raw"""\u0041""" // ok +} + +object InfixNewline extends App { + class K { def x(y: Int) = 0 } + + def x(a: Int) = 1 + + def ok = { + (new K) + `x` (42) // ok + } +} + +case class CaseCompanionMods private (x: Int) // ok +object CaseCompanionMods { def i = CaseCompanionMods(1) } + +trait InferredBase { def f: Object } +object InferredSub extends InferredBase { def f = "a" } // nothing + +trait ExplicitImplicitsBase { + implicit def b: String => Option[Int] +} +object ExplicitImplicits extends ExplicitImplicitsBase { + implicit def b = _.toIntOption // ok + implicit val i = 0 // error + implicit def s = "" // error +} + +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + +object NameShadowing { + class A { class X } + class B extends A { class X; def f = new X } +} + +object Test { + locally { + CaseCompanionMods.i.copy(CaseCompanionMods(2).x) // 2 errors + } + + locally { + InferredSub.f.toUpperCase // error + } +} diff --git a/test/files/neg/source3XnegRefchecks.check b/test/files/neg/source3XnegRefchecks.check new file mode 100644 index 000000000000..acdb8a271e36 --- /dev/null +++ b/test/files/neg/source3XnegRefchecks.check @@ -0,0 +1,6 @@ +source3XnegRefchecks.scala:5: warning: shadowing a nested class of a parent is deprecated but class X shadows class X defined in class A; rename the class to something else + class B extends A { class X; def f = new X } + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/source3XnegRefchecks.scala b/test/files/neg/source3XnegRefchecks.scala new file mode 100644 index 000000000000..539a13b391cf --- /dev/null +++ b/test/files/neg/source3XnegRefchecks.scala @@ -0,0 +1,6 @@ +//> using options -deprecation -Xsource:3 -Xsource-features:_ -Wconf:cat=scala3-migration:w -Werror + +object NameShadowing { + class A { class X } + class B extends A { class X; def f = new X } +} diff --git a/test/files/neg/source3cross.check b/test/files/neg/source3cross.check new file mode 100644 index 000000000000..bc7cbaa04d45 --- /dev/null +++ b/test/files/neg/source3cross.check @@ -0,0 +1,4 @@ +source3cross.scala:4: error: value + is not a member of Any + def f(a: Any) = a + "" + ^ +1 error diff --git a/test/files/neg/source3cross.scala b/test/files/neg/source3cross.scala new file mode 100644 index 000000000000..24330c76a14a --- /dev/null +++ b/test/files/neg/source3cross.scala @@ -0,0 +1,5 @@ +//> using options -Xsource:3-cross + +object T { + def f(a: Any) = a + "" +} diff --git a/test/files/neg/source3neg.check b/test/files/neg/source3neg.check new file mode 100644 index 000000000000..f4e0cd820c3d --- /dev/null +++ b/test/files/neg/source3neg.check @@ -0,0 +1,46 @@ +source3neg.scala:16: warning: Unicode escapes in triple quoted strings are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use the literal character instead + def inTripleQuoted = """\u0041""" // error + ^ +source3neg.scala:28: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + `x` (42) // error + ^ +source3neg.scala:12: warning: In Scala 3 (or with -Xsource-features:string-context-scope), String interpolations always use scala.StringContext (SC1.StringContext is used here) + def test = s"hello, $name" // error + ^ +source3neg.scala:17: warning: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead + def inRawInterpolation = raw"\u0041" // error + ^ +source3neg.scala:18: warning: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead + def inRawTripleQuoted = raw"""\u0041""" // error + ^ +source3neg.scala:32: warning: access modifiers for `copy` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +case class CaseCompanionMods private (x: Int) // 2 errors + ^ +source3neg.scala:32: warning: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +case class CaseCompanionMods private (x: Int) // 2 errors + ^ +source3neg.scala:36: warning: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Object instead of String [quickfixable] +object InferredSub extends InferredBase { def f = "a" } // error + ^ +source3neg.scala:42: warning: Implicit definition must have explicit type (inferred String => Option[Int]) [quickfixable] + implicit def b = _.toIntOption // error + ^ +source3neg.scala:44: warning: Implicit definition must have explicit type (inferred String) [quickfixable] + implicit def s = "" // error + ^ +source3neg.scala:43: warning: Implicit definition must have explicit type (inferred Int) [quickfixable] + implicit val i = 0 // error + ^ +source3neg.scala:47: warning: Converting to String for concatenation is not supported in Scala 3 (or with -Xsource-features:any2stringadd). +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + ^ +source3neg.scala:47: warning: method any2stringadd in object Predef is deprecated (since 2.13.0): Implicit injection of + is deprecated. Convert to String to call + +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + ^ +source3neg.scala:51: warning: shadowing a nested class of a parent is deprecated but class X shadows class X defined in class A; rename the class to something else + class B extends A { class X; def f = new X } + ^ +error: No warnings can be incurred under -Werror. +14 warnings +1 error diff --git a/test/files/neg/source3neg.scala b/test/files/neg/source3neg.scala new file mode 100644 index 000000000000..c05a6944a0f8 --- /dev/null +++ b/test/files/neg/source3neg.scala @@ -0,0 +1,62 @@ +//> using options -deprecation -Xsource:3 -Wconf:cat=scala3-migration:w -Werror + +// StringContext hygiene +class SC1 { + class Impl(parts: Any*) { + def s(args: Any*) = "hello, old world" + } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = s"hello, $name" // error +} + +object UnicodeEscapes { + def inTripleQuoted = """\u0041""" // error + def inRawInterpolation = raw"\u0041" // error + def inRawTripleQuoted = raw"""\u0041""" // error +} + +object InfixNewline extends App { + class K { def x(y: Int) = 0 } + + def x(a: Int) = 1 + + def ok = { + (new K) + `x` (42) // error + } +} + +case class CaseCompanionMods private (x: Int) // 2 errors +object CaseCompanionMods { def i = CaseCompanionMods(1) } + +trait InferredBase { def f: Object } +object InferredSub extends InferredBase { def f = "a" } // error + +trait ExplicitImplicitsBase { + implicit def b: String => Option[Int] +} +object ExplicitImplicits extends ExplicitImplicitsBase { + implicit def b = _.toIntOption // error + implicit val i = 0 // error + implicit def s = "" // error +} + +object AnyPlus { def f(xs: List[Int]) = xs + ";" } + +object NameShadowing { + class A { class X } + class B extends A { class X; def f = new X } +} + +object Test { + locally { + CaseCompanionMods.i.copy(CaseCompanionMods(2).x) // ok + } + + locally { + InferredSub.f.toUpperCase // ok + } +} \ No newline at end of file diff --git a/test/files/neg/spec-overrides.check b/test/files/neg/spec-overrides.check index 639186af4078..8d895b4a67c2 100644 --- a/test/files/neg/spec-overrides.check +++ b/test/files/neg/spec-overrides.check @@ -4,4 +4,4 @@ spec-overrides.scala:8: error: Type parameter has to be specialized at least for spec-overrides.scala:12: error: Type parameter has to be specialized at least for the same types as in the overridden method. Missing types: Int override def a[T](t: T): List[T] = Nil ^ -two errors found +2 errors diff --git a/test/files/neg/specification-scopes.check b/test/files/neg/specification-scopes.check index 49cdbf9232ca..3120905d6762 100644 --- a/test/files/neg/specification-scopes.check +++ b/test/files/neg/specification-scopes.check @@ -1,12 +1,12 @@ P_2.scala:15: error: reference to x is ambiguous; it is both defined in value and imported subsequently by import q.X._ - println(s"L15: $x") // reference to `x' is ambiguous here + println(s"L15: $x") // reference to `x` is ambiguous here ^ P_2.scala:21: error: reference to y is ambiguous; it is imported twice in the same scope by import p.X._ and import X.y - println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L21: $y") // reference to `y` is ambiguous here ^ -two errors found +2 errors diff --git a/test/files/neg/specification-scopes/P_2.scala b/test/files/neg/specification-scopes/P_2.scala index 856e58c6fb65..725f703adda6 100644 --- a/test/files/neg/specification-scopes/P_2.scala +++ b/test/files/neg/specification-scopes/P_2.scala @@ -1,24 +1,24 @@ -package p { // `X' bound by package clause -import Console._ // `println' bound by wildcard import +package p { // `X` bound by package clause +import Console._ // `println` bound by wildcard import object Y { - println(s"L4: $X") // `X' refers to `p.X' here + println(s"L4: $X") // `X` refers to `p.X` here locally { - import q._ // `X' bound by wildcard import - println(s"L7: $X") // `X' refers to `q.X' here - import X._ // `x' and `y' bound by wildcard import - println(s"L9: $x") // `x' refers to `q.X.x' here + import q._ // `X` bound by wildcard import + println(s"L7: $X") // `X` refers to `q.X` here + import X._ // `x` and `y` bound by wildcard import + println(s"L9: $x") // `x` refers to `q.X.x` here locally { - val x = 3 // `x' bound by local definition - println(s"L12: $x") // `x' refers to constant `3' here + val x = 3 // `x` bound by local definition + println(s"L12: $x") // `x` refers to constant `3` here locally { - import q.X._ // `x' and `y' bound by wildcard import - println(s"L15: $x") // reference to `x' is ambiguous here - import X.y // `y' bound by explicit import - println(s"L17: $y") // `y' refers to `q.X.y' here + import q.X._ // `x` and `y` bound by wildcard import + println(s"L15: $x") // reference to `x` is ambiguous here + import X.y // `y` bound by explicit import + println(s"L17: $y") // `y` refers to `q.X.y` here locally { - val x = "abc" // `x' bound by local definition - import p.X._ // `x' and `y' bound by wildcard import - println(s"L21: $y") // reference to `y' is ambiguous here - println(s"L22: $x") // `x' refers to string "abc" here + val x = "abc" // `x` bound by local definition + import p.X._ // `x` and `y` bound by wildcard import + println(s"L21: $y") // reference to `y` is ambiguous here + println(s"L22: $x") // `x` refers to string "abc" here }}}}}} diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check index 020f7efedd4f..cc22eb1d843b 100644 --- a/test/files/neg/stmt-expr-discard.check +++ b/test/files/neg/stmt-expr-discard.check @@ -1,9 +1,9 @@ -stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +stmt-expr-discard.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 2 ^ -stmt-expr-discard.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +stmt-expr-discard.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 4 ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/stmt-expr-discard.scala b/test/files/neg/stmt-expr-discard.scala index 9fb997819718..a5c9f62627aa 100644 --- a/test/files/neg/stmt-expr-discard.scala +++ b/test/files/neg/stmt-expr-discard.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Werror -Xsource:2.13 -Xlint:deprecation +// class A { def f = 1 + 2 diff --git a/test/files/neg/string-context-refchecked.check b/test/files/neg/string-context-refchecked.check index 6d0d8f640aa8..1bb947a30b8c 100644 --- a/test/files/neg/string-context-refchecked.check +++ b/test/files/neg/string-context-refchecked.check @@ -1,5 +1,5 @@ -string-context-refchecked.scala:3: error: overriding method foo in class C of type => Int; - method foo cannot override final member +string-context-refchecked.scala:3: error: cannot override final member: +final def foo: Int (defined in class C) s"foo${class D extends C { def foo = 2 }; new D}bar" ^ -one error found +1 error diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check index ef6bc01ecbb3..ca16bc50e2c7 100644 --- a/test/files/neg/stringinterpolation_macro-neg.check +++ b/test/files/neg/stringinterpolation_macro-neg.check @@ -10,49 +10,44 @@ stringinterpolation_macro-neg.scala:15: error: too many arguments for interpolat stringinterpolation_macro-neg.scala:16: error: too few arguments for interpolated string new StringContext("", "").f() ^ -stringinterpolation_macro-neg.scala:19: error: type mismatch; - found : String - required: Boolean - f"$s%b" - ^ stringinterpolation_macro-neg.scala:20: error: type mismatch; found : String - required: Char + required: Char, Byte, Short, Int f"$s%c" ^ stringinterpolation_macro-neg.scala:21: error: type mismatch; found : Double - required: Char + required: Char, Byte, Short, Int f"$f%c" ^ stringinterpolation_macro-neg.scala:22: error: type mismatch; found : String - required: Int + required: Int, Long, Byte, Short, BigInt f"$s%x" ^ stringinterpolation_macro-neg.scala:23: error: type mismatch; found : Boolean - required: Int + required: Int, Long, Byte, Short, BigInt f"$b%d" ^ stringinterpolation_macro-neg.scala:24: error: type mismatch; found : String - required: Int + required: Int, Long, Byte, Short, BigInt f"$s%d" ^ stringinterpolation_macro-neg.scala:25: error: type mismatch; found : Double - required: Int + required: Int, Long, Byte, Short, BigInt f"$f%o" ^ stringinterpolation_macro-neg.scala:26: error: type mismatch; found : String - required: Double + required: Double, Float, BigDecimal f"$s%e" ^ stringinterpolation_macro-neg.scala:27: error: type mismatch; found : Boolean - required: Double + required: Double, Float, BigDecimal f"$b%f" ^ stringinterpolation_macro-neg.scala:32: error: type mismatch; @@ -109,64 +104,70 @@ stringinterpolation_macro-neg.scala:43: error: '(' not allowed for a, A stringinterpolation_macro-neg.scala:44: error: Only '-' allowed for date/time conversions f"$t%#+ 0,(tT" ^ -stringinterpolation_macro-neg.scala:47: error: precision not allowed +stringinterpolation_macro-neg.scala:45: error: Duplicate flag ',' + f"$d%,,d" + ^ +stringinterpolation_macro-neg.scala:48: error: precision not allowed f"$c%.2c" ^ -stringinterpolation_macro-neg.scala:48: error: precision not allowed +stringinterpolation_macro-neg.scala:49: error: precision not allowed f"$d%.2d" ^ -stringinterpolation_macro-neg.scala:49: error: precision not allowed +stringinterpolation_macro-neg.scala:50: error: precision not allowed f"%.2%" ^ -stringinterpolation_macro-neg.scala:50: error: precision not allowed +stringinterpolation_macro-neg.scala:51: error: precision not allowed f"%.2n" ^ -stringinterpolation_macro-neg.scala:51: error: precision not allowed +stringinterpolation_macro-neg.scala:52: error: precision not allowed f"$f%.2a" ^ -stringinterpolation_macro-neg.scala:52: error: precision not allowed +stringinterpolation_macro-neg.scala:53: error: precision not allowed f"$t%.2tT" ^ -stringinterpolation_macro-neg.scala:55: error: No last arg +stringinterpolation_macro-neg.scala:56: error: No last arg f"%: Null <: Object](p: { def f(p: U): Unit; def u: U }) = () ^ -9 errors found +9 errors diff --git a/test/files/neg/subkinding.check b/test/files/neg/subkinding.check new file mode 100644 index 000000000000..6e2179ab7bd0 --- /dev/null +++ b/test/files/neg/subkinding.check @@ -0,0 +1,16 @@ +subkinding.scala:5: error: kinds of the type arguments (Test1.C) do not conform to the expected kinds of the type parameters (type B) in trait A. +Test1.C's type parameters do not match type B's expected parameters: +type Y's bounds <: X are stricter than type S's declared bounds >: Nothing <: Any + type T = A[C] + ^ +subkinding.scala:12: error: kinds of the type arguments (Test2.C) do not conform to the expected kinds of the type parameters (type T) in trait A. +Test2.C's type parameters do not match type T's expected parameters: +type _ is invariant, but type _ is declared covariant + type T = A[C] + ^ +subkinding.scala:20: error: kinds of the type arguments (Test3.Adapter,T) do not conform to the expected kinds of the type parameters (type A,type S) in trait Mixin. +Test3.Adapter's type parameters do not match type A's expected parameters: +type B (in trait Adapter)'s bounds <: Test3.Box[T] are stricter than type B's declared bounds <: Test3.Box[S] + trait Super[T] extends Mixin[Adapter, T] + ^ +3 errors diff --git a/test/files/neg/subkinding.scala b/test/files/neg/subkinding.scala new file mode 100644 index 000000000000..5a1745e8c535 --- /dev/null +++ b/test/files/neg/subkinding.scala @@ -0,0 +1,21 @@ +// scala/bug#2067 +object Test1 { + trait A[B[D[X, Y <: X]]] + trait C[E[T, S]] + type T = A[C] +} + +// scala/bug#2067 +object Test2 { + trait A[T[_[_]]] + trait C[X[+_]] + type T = A[C] +} + +// scala/bug#12242 +object Test3 { + trait Box[T] + trait Adapter[B <: Box[T], T] + trait Mixin[+A[B <: Box[S], X], S] + trait Super[T] extends Mixin[Adapter, T] +} diff --git a/test/files/neg/suggest-similar.check b/test/files/neg/suggest-similar.check index 057aa8b250fd..1fa8f189ba04 100644 --- a/test/files/neg/suggest-similar.check +++ b/test/files/neg/suggest-similar.check @@ -1,10 +1,85 @@ -suggest-similar.scala:8: error: not found: value flippitx - flippitx = 123 - ^ -suggest-similar.scala:9: error: not found: value identiyt +suggest-similar.scala:10: error: value flippitx is not a member of object example.Weehawken +did you mean flippity? + Weehawken.flippitx = 123 + ^ +suggest-similar.scala:11: error: not found: value identiyt Nil map identiyt ^ -suggest-similar.scala:10: error: not found: type Bingus - new Bingus - ^ -three errors found +suggest-similar.scala:12: error: type Eeehawken is not a member of package example +did you mean Weehawken? + new example.Eeehawken + ^ +suggest-similar.scala:16: error: value readline is not a member of object scala.io.StdIn +did you mean readLine? or perhaps readByte, readInt, or readLong? + import scala.io.StdIn.{readline, readInt} + ^ +suggest-similar.scala:20: error: object stdin is not a member of package io +did you mean StdIn? + import scala.io.stdin.{readLine => line} + ^ +suggest-similar.scala:37: error: value foo is not a member of object example.Hohokus +did you mean foo1, foo2, foo3, or foo4? or...? + Hohokus.foo + ^ +suggest-similar.scala:41: error: value bar is not a member of example.Hohokus +did you mean bar2? + new Hohokus().bar // don't suggest bar1 + ^ +suggest-similar.scala:54: error: value acb is not a member of example.assignments.C +did you mean abc? + def f = c.acb(42) + ^ +suggest-similar.scala:55: error: value ++- is not a member of example.assignments.C +did you mean +++? + def g = c.++-(42) + ^ +suggest-similar.scala:56: error: value ++= is not a member of example.assignments.C +did you mean +++? + def h = c.++=(42) + ^ +suggest-similar.scala:57: error: value ++= is not a member of example.assignments.C +did you mean +++? + Expression does not convert to assignment because receiver is not assignable. + def i = c ++= 42 + ^ +suggest-similar.scala:59: error: value y_= is not a member of example.assignments.C + def v = c y_= 1 + ^ +suggest-similar.scala:60: error: value y is not a member of example.assignments.C + def v2 = c.y = 1 + ^ +suggest-similar.scala:61: error: value x_== is not a member of example.assignments.C + def w = c x_== 1 + ^ +suggest-similar.scala:62: error: value xx_= is not a member of example.assignments.C +did you mean x_=? + def y = c.xx_=(1) + ^ +suggest-similar.scala:63: error: reassignment to val + def y2 = c.xx = 1 + ^ +suggest-similar.scala:65: error: value zzz_= is not a member of example.assignments.C +did you mean zz_=? or perhaps z_=? + def z2 = c.zzz_=(1) + ^ +suggest-similar.scala:66: error: value ++++ is not a member of example.assignments.C +did you mean +++? + def z3 = c.++++(1) + ^ +suggest-similar.scala:67: error: value xxx is not a member of example.assignments.C +did you mean xx? or perhaps x? + def legacy_duple = c.xxx // did not suggest c.xx as too short + ^ +suggest-similar.scala:76: error: value missN is not a member of example.MaxAlt +did you mean miss0, miss1, miss2, or miss3? or...? + def test: Int = new MaxAlt().missN + ^ +suggest-similar.scala:83: error: value missN is not a member of example.MissAlt +did you mean miss0, miss1, or miss2? + def test: Int = new MissAlt().missN + ^ +suggest-similar.scala:94: error: value missN is not a member of example.MoreAlt +did you mean miss0, miss1, miss2, or miss3? or...? + def test: Int = new MoreAlt().missN + ^ +22 errors diff --git a/test/files/neg/suggest-similar.scala b/test/files/neg/suggest-similar.scala index ff327478fe10..f10b2d927be7 100644 --- a/test/files/neg/suggest-similar.scala +++ b/test/files/neg/suggest-similar.scala @@ -1,11 +1,95 @@ -class Dingus -object Dingus { +package example + +class Weehawken +object Weehawken { var flippity = 1 + type Blippitx = Int } -import Dingus._ class A { - flippitx = 123 + Weehawken.flippitx = 123 Nil map identiyt - new Bingus + new example.Eeehawken +} + +object B { + import scala.io.StdIn.{readline, readInt} +} + +object C { + import scala.io.stdin.{readLine => line} +} + +class Hohokus { + protected def bar1: Unit = () + protected[example] def bar2: Unit = () +} +object Hohokus { + def foo1 = 1 + def foo2 = 2 + def foo3 = 3 + def foo4 = 4 + def foo5 = 5 + def foo6 = 6 +} + +object D { + Hohokus.foo +} + +object E { + new Hohokus().bar // don't suggest bar1 +} + +object assignments { + class C { + def abc(i: Int) = i + def +++(i: Int) = i + var x = 42 + val xx = 42 + var z = 42 + var zz = 42 + } + val c = new C + def f = c.acb(42) + def g = c.++-(42) + def h = c.++=(42) + def i = c ++= 42 + def u = c x_= 1 + def v = c y_= 1 + def v2 = c.y = 1 + def w = c x_== 1 + def y = c.xx_=(1) + def y2 = c.xx = 1 + def z = c.zz_=(1) + def z2 = c.zzz_=(1) + def z3 = c.++++(1) + def legacy_duple = c.xxx // did not suggest c.xx as too short +} + +class MaxAlt { + def miss0 = 42 + def miss1 = 42 + def miss2 = 42 + def miss3 = 42 + def miss33 = 42 + def test: Int = new MaxAlt().missN +} + +class MissAlt { + def miss0 = 42 + def miss1 = 42 + def miss2 = 42 + def test: Int = new MissAlt().missN +} + +class MoreAlt { + def miss0 = 42 + def miss1 = 42 + def miss2 = 42 + def miss3 = 42 + def miss4 = 42 + def miss5 = 42 + def miss6 = 42 + def test: Int = new MoreAlt().missN } diff --git a/test/files/neg/super-cast-or-test.check b/test/files/neg/super-cast-or-test.check index 8e5eed62bd31..e1399c2a9e49 100644 --- a/test/files/neg/super-cast-or-test.check +++ b/test/files/neg/super-cast-or-test.check @@ -4,4 +4,4 @@ trait A { def f = super.asInstanceOf[AnyRef] } super-cast-or-test.scala:2: error: super not allowed here: use this.isInstanceOf instead trait B { def g = super.isInstanceOf[AnyRef] } ^ -two errors found +2 errors diff --git a/test/files/neg/surrogates.check b/test/files/neg/surrogates.check new file mode 100644 index 000000000000..3521b9b72817 --- /dev/null +++ b/test/files/neg/surrogates.check @@ -0,0 +1,4 @@ +surrogates.scala:3: error: illegal codepoint in Char constant: '\ud801\udc00' + def `too wide for Char` = '𐐀' + ^ +1 error diff --git a/test/files/neg/surrogates.scala b/test/files/neg/surrogates.scala new file mode 100644 index 000000000000..d8e2ef545a18 --- /dev/null +++ b/test/files/neg/surrogates.scala @@ -0,0 +1,4 @@ + +class C { + def `too wide for Char` = '𐐀' +} diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check index 6ed54ca70103..ca5e74912d7b 100644 --- a/test/files/neg/switch.check +++ b/test/files/neg/switch.check @@ -1,9 +1,9 @@ -switch.scala:39: warning: could not emit switch for @switch annotated match +switch.scala:40: warning: could not emit switch for @switch annotated match def fail2(c: Char) = (c: @switch @unchecked) match { ^ -switch.scala:46: warning: could not emit switch for @switch annotated match +switch.scala:47: warning: could not emit switch for @switch annotated match def fail3(c: Char) = (c: @unchecked @switch) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/switch.scala b/test/files/neg/switch.scala index 7968156b99dc..428d80cce261 100644 --- a/test/files/neg/switch.scala +++ b/test/files/neg/switch.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// import scala.annotation.switch // this is testing not so much how things ought to be but how they are; diff --git a/test/files/neg/symbol-literal-deprecation.check b/test/files/neg/symbol-literal-deprecation.check new file mode 100644 index 000000000000..caa918446009 --- /dev/null +++ b/test/files/neg/symbol-literal-deprecation.check @@ -0,0 +1,6 @@ +symbol-literal-deprecation.scala:4: warning: symbol literal is deprecated; use Symbol("TestSymbol") instead [quickfixable] + val foo = 'TestSymbol + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/symbol-literal-deprecation.scala b/test/files/neg/symbol-literal-deprecation.scala new file mode 100644 index 000000000000..296d18c377db --- /dev/null +++ b/test/files/neg/symbol-literal-deprecation.scala @@ -0,0 +1,5 @@ +//> using options -deprecation -Xfatal-warnings +// +abstract class Foo { + val foo = 'TestSymbol +} diff --git a/test/files/neg/t0003.check b/test/files/neg/t0003.check index 8bab55db3f4a..b33de9263d24 100644 --- a/test/files/neg/t0003.check +++ b/test/files/neg/t0003.check @@ -1,6 +1,6 @@ t0003.scala:2: error: type mismatch; found : A => (B => B) - required: A => B - def foo[A, B, C](l: List[A], f: A => B=>B, g: B=>B=>C): List[C] = l map (g compose f) - ^ -one error found + required: ? => B + def foo[A, B, C](l: List[A], f: A => B => B, g: B => B => C): List[C] = l map (g compose f) + ^ +1 error diff --git a/test/files/neg/t0003.scala b/test/files/neg/t0003.scala index e41cfa9e9e5f..a5f7999f0fac 100644 --- a/test/files/neg/t0003.scala +++ b/test/files/neg/t0003.scala @@ -1,3 +1,3 @@ object Test { - def foo[A, B, C](l: List[A], f: A => B=>B, g: B=>B=>C): List[C] = l map (g compose f) + def foo[A, B, C](l: List[A], f: A => B => B, g: B => B => C): List[C] = l map (g compose f) } diff --git a/test/files/neg/t0015.check b/test/files/neg/t0015.check index 43adc22f7283..41339eb8d8c9 100644 --- a/test/files/neg/t0015.check +++ b/test/files/neg/t0015.check @@ -3,4 +3,4 @@ t0015.scala:5: error: type mismatch; required: Nothing => ? Nil.map(f _) ^ -one error found +1 error diff --git a/test/files/neg/t0117.check b/test/files/neg/t0117.check index 579cf883a7c1..4339f05b2ecf 100644 --- a/test/files/neg/t0117.check +++ b/test/files/neg/t0117.check @@ -1,4 +1,4 @@ t0117.scala:2: error: Implementation restriction: traits may not select fields or methods from super[C] where C is a class trait B extends A { println(super[A].a) } ^ -one error found +1 error diff --git a/test/files/neg/t0152.check b/test/files/neg/t0152.check index a7909bf14d4d..2a8811d2e310 100644 --- a/test/files/neg/t0152.check +++ b/test/files/neg/t0152.check @@ -3,4 +3,4 @@ t0152.scala:10: error: illegal inheritance; Value[Int] and Value[String] object boom extends Value[java.lang.String]("foo") with PlusOne ^ -one error found +1 error diff --git a/test/files/neg/t0204.check b/test/files/neg/t0204.check index 0f7acfdddecb..360fe8e2b1e9 100644 --- a/test/files/neg/t0204.check +++ b/test/files/neg/t0204.check @@ -1,4 +1,4 @@ t0204.scala:4: error: class type required but Program.A{type T = String} found trait C extends B ^ -one error found +1 error diff --git a/test/files/neg/t0207.check b/test/files/neg/t0207.check index ebe6759b36ca..0fa9f890c8eb 100644 --- a/test/files/neg/t0207.check +++ b/test/files/neg/t0207.check @@ -4,4 +4,4 @@ t0207.scala:3: error: type T takes type parameters t0207.scala:3: error: type T takes type parameters type S = (T with T)[A] ^ -two errors found +2 errors diff --git a/test/files/neg/t0209.check b/test/files/neg/t0209.check index 1904e58e7ae3..a01c72a1418b 100644 --- a/test/files/neg/t0209.check +++ b/test/files/neg/t0209.check @@ -1,6 +1,6 @@ t0209.scala:15: error: type mismatch; found : C - required: _1.type where val _1: A + required: stabilizer$1.type (new B: A).f(new C) ^ -one error found +1 error diff --git a/test/files/neg/t0209.scala b/test/files/neg/t0209.scala index ecbe183e35be..cd9bd975442e 100644 --- a/test/files/neg/t0209.scala +++ b/test/files/neg/t0209.scala @@ -11,7 +11,7 @@ class C extends A { } object Program { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { (new B: A).f(new C) } } diff --git a/test/files/neg/t0214.check b/test/files/neg/t0214.check index 30bb0488cf4f..5be4db938315 100644 --- a/test/files/neg/t0214.check +++ b/test/files/neg/t0214.check @@ -1,4 +1,4 @@ t0214.scala:3: error: missing parameter type - a2p(x => x._1,(2,3)) + a2p(x => x._1, (2, 3)) ^ -one error found +1 error diff --git a/test/files/neg/t0214.scala b/test/files/neg/t0214.scala index bdc42bfd7832..c4de446f866d 100644 --- a/test/files/neg/t0214.scala +++ b/test/files/neg/t0214.scala @@ -1,4 +1,4 @@ object Test { - def a2p[a,b,c](f:((a,b))=>c,v:(a,b)):c = f(v) - a2p(x => x._1,(2,3)) + def a2p[a, b, c](f: ((a, b)) => c, v: (a,b)): c = f(v) + a2p(x => x._1, (2, 3)) } diff --git a/test/files/neg/t0218.check b/test/files/neg/t0218.check index a22583d23bd4..bd7c04736226 100644 --- a/test/files/neg/t0218.check +++ b/test/files/neg/t0218.check @@ -1,4 +1,4 @@ t0218.scala:10: error: class type required but APQ.this.P found List(new PP) ^ -one error found +1 error diff --git a/test/files/neg/t0226.check b/test/files/neg/t0226.check index 247f1f2443a7..7c7391c8452a 100644 --- a/test/files/neg/t0226.check +++ b/test/files/neg/t0226.check @@ -4,7 +4,7 @@ t0226.scala:5: error: not found: type A1 t0226.scala:5: error: not found: type A1 (implicit _1: Foo[List[A1]], _2: Foo[A2]): Foo[Tuple2[List[A1], A2]] = ^ -t0226.scala:8: error: could not find implicit value for parameter rep: Test.this.Foo[((List[Char], Int), (scala.collection.immutable.Nil.type, Int))] +t0226.scala:8: error: could not find implicit value for parameter rep: Test.this.Foo[((List[Char], Int), (collection.immutable.Nil.type, Int))] foo(((List('b'), 3), (Nil, 4))) ^ -three errors found +3 errors diff --git a/test/files/neg/t0259.check b/test/files/neg/t0259.check index 8c15d984196e..a8316dc6a0d6 100644 --- a/test/files/neg/t0259.check +++ b/test/files/neg/t0259.check @@ -1,7 +1,7 @@ t0259.scala:4: error: double definition: -constructor TestCase3: (groups: (String, Int)*)test.TestCase3 at line 3 and -constructor TestCase3: (groups: String*)test.TestCase3 at line 4 -have same type after erasure: (groups: Seq)test.TestCase3 +constructor TestCase3: (groups: (String, Int)*): test.TestCase3 at line 3 and +constructor TestCase3: (groups: String*): test.TestCase3 at line 4 +have same type after erasure: (groups: Seq): test.TestCase3 def this( groups: String*) = this() ^ -one error found +1 error diff --git a/test/files/neg/t0345.check b/test/files/neg/t0345.check index 1e55d01cd1ce..7b397ff69357 100644 --- a/test/files/neg/t0345.check +++ b/test/files/neg/t0345.check @@ -1,4 +1,7 @@ -t0345.scala:2: error: object creation impossible, since method cons in trait Lizt of type (a: Nothing)Unit is not defined +t0345.scala:2: error: object creation impossible. +Missing implementation for member of trait Lizt: + def cons(a: Nothing): Unit = ??? // implements `def cons(a: A): Unit` + val empty = new Lizt[Nothing] { ^ -one error found +1 error diff --git a/test/files/neg/t0345.scala b/test/files/neg/t0345.scala index f3652c183b6b..15b941bc414b 100644 --- a/test/files/neg/t0345.scala +++ b/test/files/neg/t0345.scala @@ -1,6 +1,6 @@ object Lizt { val empty = new Lizt[Nothing] { - def cons[A](a : A) {} + def cons[A](a : A): Unit = {} } } @@ -10,6 +10,6 @@ trait Lizt[A] { class Test { abstract class C[A] {} val c = new C[Int] { - def f[A](x: A) {} + def f[A](x: A): Unit = {} } } diff --git a/test/files/neg/t0351.check b/test/files/neg/t0351.check index ce10605eca04..cdc3ccdfe324 100644 --- a/test/files/neg/t0351.check +++ b/test/files/neg/t0351.check @@ -1,4 +1,4 @@ t0351.scala:2: error: no by-name parameter type allowed here def identity[T](x : => T) : (=> T) ^ -one error found +1 error diff --git a/test/files/neg/t0418.check b/test/files/neg/t0418.check deleted file mode 100644 index b95f8e4e1bcd..000000000000 --- a/test/files/neg/t0418.check +++ /dev/null @@ -1,4 +0,0 @@ -t0418.scala:2: error: not found: value Foo12340771 - null match { case Foo12340771.Bar(x) => x } - ^ -one error found diff --git a/test/files/neg/t0418.scala b/test/files/neg/t0418.scala deleted file mode 100644 index 67007010d4e8..000000000000 --- a/test/files/neg/t0418.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - null match { case Foo12340771.Bar(x) => x } -} diff --git a/test/files/neg/t0503.check b/test/files/neg/t0503.check index 51e5bbeda6f7..2be44bc67079 100644 --- a/test/files/neg/t0503.check +++ b/test/files/neg/t0503.check @@ -4,4 +4,4 @@ val x = new { } with { } t0503.scala:3: error: expected class or object definition val y = new { } with A ^ -two errors found +2 errors diff --git a/test/files/neg/t0513.check b/test/files/neg/t0513.check index edc0c9ab67c3..2ab6d1bf253b 100644 --- a/test/files/neg/t0513.check +++ b/test/files/neg/t0513.check @@ -4,4 +4,4 @@ t0513.scala:5: error: type arguments [Nothing,Int] do not conform to class Y's t t0513.scala:5: error: type arguments [Nothing,Int] do not conform to class Y's type parameter bounds [T1,T2 <: T1] val test2 = Test[Y[Nothing, Int]] // No error ^ -two errors found +2 errors diff --git a/test/files/neg/t0528neg.check b/test/files/neg/t0528neg.check index c8c3ab422f78..88670e44d42f 100644 --- a/test/files/neg/t0528neg.check +++ b/test/files/neg/t0528neg.check @@ -1,4 +1,4 @@ -t0528neg.scala:2: error: covariant type A occurs in invariant position in type => Array[T forSome { type T <: A }] of method toArray +t0528neg.scala:2: error: covariant type A occurs in invariant position in type Array[T forSome { type T <: A }] of method toArray def toArray: Array[T forSome {type T <: A}] ^ -one error found +1 error diff --git a/test/files/neg/t0565.check b/test/files/neg/t0565.check index 98e61a250352..bd47b089dd35 100644 --- a/test/files/neg/t0565.check +++ b/test/files/neg/t0565.check @@ -1,4 +1,4 @@ t0565.scala:8: error: Parameter type in structural refinement may not refer to a type member of that refinement def z (w : T) : T } = ^ -one error found +1 error diff --git a/test/files/neg/t0590.check b/test/files/neg/t0590.check index a3ef70c6cdf8..5cbd30dafc49 100644 --- a/test/files/neg/t0590.check +++ b/test/files/neg/t0590.check @@ -3,4 +3,4 @@ t0590.scala:2: error: type mismatch; required: T implicit def foo[T] : T = null ^ -one error found +1 error diff --git a/test/files/neg/t0606.check b/test/files/neg/t0606.check index fb83fca74437..794b869a95a2 100644 --- a/test/files/neg/t0606.check +++ b/test/files/neg/t0606.check @@ -1,4 +1,4 @@ t0606.scala:5: error: private value db escapes its defining scope as part of type Foo.this.db.Info val info = new db.Info ^ -one error found +1 error diff --git a/test/files/neg/t0673.check b/test/files/neg/t0673.check index af35a5a5fd0d..7583ddeaeda7 100644 --- a/test/files/neg/t0673.check +++ b/test/files/neg/t0673.check @@ -1,4 +1,4 @@ Test.scala:2: error: class JavaClass.InnerClass is not a value val x = JavaClass.InnerClass ^ -one error found +1 error diff --git a/test/files/neg/t0699.check b/test/files/neg/t0699.check index c944da8c105a..a4cfb471a011 100644 --- a/test/files/neg/t0699.check +++ b/test/files/neg/t0699.check @@ -7,4 +7,4 @@ B.scala:3: error: illegal inheritance from sealed class C B.scala:4: error: illegal inheritance from sealed class C class C1 extends A.C ^ -three errors found +3 errors diff --git a/test/files/neg/t0764.check b/test/files/neg/t0764.check index 0c7cff1e1e65..775bcac4c5fc 100644 --- a/test/files/neg/t0764.check +++ b/test/files/neg/t0764.check @@ -4,4 +4,4 @@ t0764.scala:13: error: type mismatch; (which expands to) Node{type T = Node{type T = NextType}} new Main[AType]( (value: AType).prepend ) ^ -one error found +1 error diff --git a/test/files/neg/t0764.scala b/test/files/neg/t0764.scala index 9762be6ab378..68a39ed090b3 100644 --- a/test/files/neg/t0764.scala +++ b/test/files/neg/t0764.scala @@ -42,4 +42,4 @@ class Node[+T <: Node[_]] { def prepend = new Node[this.type] } class Main[NextType <: Node[_]](value: Node[NextType]) { new Main(value.prepend) } -*/ \ No newline at end of file +*/ diff --git a/test/files/neg/t0764b.check b/test/files/neg/t0764b.check index 4040954e7c2d..681e2ba923ff 100644 --- a/test/files/neg/t0764b.check +++ b/test/files/neg/t0764b.check @@ -1,47 +1,47 @@ t0764b.scala:27: error: type mismatch; - found : p1.t0764.Node{type T = p1.t0764..type} + found : p1.t0764.Node{type T = Main1.this.v.type} required: p1.t0764.NodeAlias[p1.t0764.NodeAlias[A]] (which expands to) p1.t0764.Node{type T = p1.t0764.Node{type T = A}} private[this] def f2 = new Main1[NodeAlias[A]](v.prepend) // fail ^ t0764b.scala:28: error: type mismatch; - found : p1.t0764.Node{type T = p1.t0764..type} + found : p1.t0764.Node{type T = Main1.this.v.type} required: p1.t0764.NodeAlias[p1.t0764.Node{type T = A}] (which expands to) p1.t0764.Node{type T = p1.t0764.Node{type T = A}} private[this] def f3 = new Main1[Node { type T = A }](v.prepend) // fail ^ t0764b.scala:34: error: type mismatch; - found : p1.t0764.Node{type T = p1.t0764..type} + found : p1.t0764.Node{type T = Main2.this.v.type} required: p1.t0764.Node{type T = p1.t0764.NodeAlias[A]} (which expands to) p1.t0764.Node{type T = p1.t0764.Node{type T = A}} private[this] def f2 = new Main2[NodeAlias[A]](v.prepend) // fail ^ t0764b.scala:35: error: type mismatch; - found : p1.t0764.Node{type T = p1.t0764..type} + found : p1.t0764.Node{type T = Main2.this.v.type} required: p1.t0764.Node{type T = p1.t0764.Node{type T = A}} private[this] def f3 = new Main2[Node { type T = A }](v.prepend) // fail ^ t0764b.scala:51: error: type mismatch; - found : p2.t0764.Node{type T = p2.t0764..type} + found : p2.t0764.Node{type T = Main1.this.v.type} required: p2.t0764.NodeAlias[p2.t0764.NodeAlias[A]] (which expands to) p2.t0764.Node{type T = p2.t0764.Node{type T = A}} private[this] def f2 = new Main1[NodeAlias[A]](v.prepend) // fail ^ t0764b.scala:52: error: type mismatch; - found : p2.t0764.Node{type T = p2.t0764..type} + found : p2.t0764.Node{type T = Main1.this.v.type} required: p2.t0764.NodeAlias[p2.t0764.Node{type T = A}] (which expands to) p2.t0764.Node{type T = p2.t0764.Node{type T = A}} private[this] def f3 = new Main1[Node { type T = A }](v.prepend) // fail ^ t0764b.scala:58: error: type mismatch; - found : p2.t0764.Node{type T = p2.t0764..type} + found : p2.t0764.Node{type T = Main2.this.v.type} required: p2.t0764.Node{type T = p2.t0764.NodeAlias[A]} (which expands to) p2.t0764.Node{type T = p2.t0764.Node{type T = A}} private[this] def f2 = new Main2[NodeAlias[A]](v.prepend) // fail ^ t0764b.scala:59: error: type mismatch; - found : p2.t0764.Node{type T = p2.t0764..type} + found : p2.t0764.Node{type T = Main2.this.v.type} required: p2.t0764.Node{type T = p2.t0764.Node{type T = A}} private[this] def f3 = new Main2[Node { type T = A }](v.prepend) // fail ^ -8 errors found +8 errors diff --git a/test/files/neg/t0816.check b/test/files/neg/t0816.check index 48f37c141492..cb68e00262f0 100644 --- a/test/files/neg/t0816.check +++ b/test/files/neg/t0816.check @@ -1,4 +1,4 @@ t0816.scala:5: error: case class Ctest has case ancestor Btest, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes. case class Ctest(override val data: String) extends Btest(data, true) ^ -one error found +1 error diff --git a/test/files/neg/t0842.check b/test/files/neg/t0842.check index 3351aa1174ce..3178f3931dfb 100644 --- a/test/files/neg/t0842.check +++ b/test/files/neg/t0842.check @@ -1,4 +1,4 @@ t0842.scala:1: error: A.this.type does not take type parameters trait A[T] { def m: this.type[T] = this } ^ -one error found +1 error diff --git a/test/files/neg/t0899.check b/test/files/neg/t0899.check index 28cb06ae5ad7..108a8670a2c4 100644 --- a/test/files/neg/t0899.check +++ b/test/files/neg/t0899.check @@ -1,10 +1,10 @@ -t0899.scala:9: error: super may not be used on value o +t0899.scala:9: error: super may not be used on value o; super can only be used to select a member that is a method or type override val o = "Ha! " + super.o ^ -t0899.scala:11: error: super may not be used on variable v +t0899.scala:11: error: super may not be used on variable v; super can only be used to select a member that is a method or type super.v = "aa" ^ -t0899.scala:12: error: super may not be used on variable v +t0899.scala:12: error: super may not be used on variable v; super can only be used to select a member that is a method or type println(super.v) ^ -three errors found +3 errors diff --git a/test/files/neg/t0903.check b/test/files/neg/t0903.check index b8ca1f4acecb..02c82d2acccd 100644 --- a/test/files/neg/t0903.check +++ b/test/files/neg/t0903.check @@ -1,8 +1,8 @@ -t0903.scala:4: error: value += is not a member of Int +t0903.scala:5: error: value += is not a member of Int Expression does not convert to assignment because receiver is not assignable. x += 1 ^ -t0903.scala:5: error: reassignment to val +t0903.scala:6: error: reassignment to val x = 2 ^ -two errors found +2 errors diff --git a/test/files/neg/t0903.scala b/test/files/neg/t0903.scala index 798c924f288f..799a2fa6fa1d 100644 --- a/test/files/neg/t0903.scala +++ b/test/files/neg/t0903.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// object Test { val x = 1 x += 1 diff --git a/test/files/neg/t10003.check b/test/files/neg/t10003.check new file mode 100644 index 000000000000..1e3bb6f52851 --- /dev/null +++ b/test/files/neg/t10003.check @@ -0,0 +1,4 @@ +t10003.scala:4: error: illegal cyclic reference involving type List + new Functor[a] { type List[t] = List[t] } + ^ +1 error diff --git a/test/files/neg/t10003.scala b/test/files/neg/t10003.scala new file mode 100644 index 000000000000..3bec407ab1cf --- /dev/null +++ b/test/files/neg/t10003.scala @@ -0,0 +1,5 @@ +trait Functor[a] { type MyType[a] } +object Functor { + def listFunctor[a]: Functor[a] { type MyType[x] = List[x] } = + new Functor[a] { type List[t] = List[t] } +} diff --git a/test/files/neg/t10015.check b/test/files/neg/t10015.check new file mode 100644 index 000000000000..90ccf3dd67ea --- /dev/null +++ b/test/files/neg/t10015.check @@ -0,0 +1,7 @@ +t10015.scala:3: error: not found: type Baz + f (new Baz { def g }) + ^ +t10015.scala:3: error: only traits and abstract classes can have declared but undefined members + f (new Baz { def g }) + ^ +2 errors diff --git a/test/files/neg/t10015.scala b/test/files/neg/t10015.scala new file mode 100644 index 000000000000..f0898ad206d0 --- /dev/null +++ b/test/files/neg/t10015.scala @@ -0,0 +1,4 @@ +class Bar { + def f (x : { def g }) {} + f (new Baz { def g }) +} diff --git a/test/files/neg/t10019.check b/test/files/neg/t10019.check index 36314625bf80..3eb9db6bf2c3 100644 --- a/test/files/neg/t10019.check +++ b/test/files/neg/t10019.check @@ -6,6 +6,6 @@ t10019.scala:9: warning: match may not be exhaustive. It would fail on the following input: (Foo(None), _) def tuple(s: Foo, t: Foo): Nothing = (s, t) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t10019.scala b/test/files/neg/t10019.scala index 1730b0d84db9..8c4c910a588f 100644 --- a/test/files/neg/t10019.scala +++ b/test/files/neg/t10019.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Bug { sealed case class Foo(e: Option[Int]) diff --git a/test/files/neg/t10066.check b/test/files/neg/t10066.check index 3555205d836a..438965fc6c0e 100644 --- a/test/files/neg/t10066.check +++ b/test/files/neg/t10066.check @@ -4,4 +4,4 @@ t10066.scala:33: error: could not find implicit value for parameter extractor: d t10066.scala:37: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[A] println(storage.foo) ^ -two errors found +2 errors diff --git a/test/files/neg/t10068.check b/test/files/neg/t10068.check index 19c9a47b9478..7aec85c66414 100644 --- a/test/files/neg/t10068.check +++ b/test/files/neg/t10068.check @@ -1,13 +1,16 @@ -t10068.scala:6: error: i : Only methods can be marked @elidable. +t10068.scala:6: error: i : Only concrete methods can be marked @elidable. @elidable(INFO) val i: Int = 42 ^ -t10068.scala:7: error: j: Only methods can be marked @elidable. +t10068.scala:7: error: j: Only concrete methods can be marked @elidable. @elidable(INFO) lazy val j: Int = 42 ^ -t10068.scala:8: error: k : Only methods can be marked @elidable. +t10068.scala:8: error: k : Only concrete methods can be marked @elidable. @elidable(INFO) var k: Int = 42 ^ -t10068.scala:10: error: D: Only methods can be marked @elidable. +t10068.scala:9: error: f: Only concrete methods can be marked @elidable. The annotation affects only the annotated method, not overriding methods in subclasses. + @elidable(INFO) def f: Int + ^ +t10068.scala:11: error: D: Only concrete methods can be marked @elidable. @elidable(INFO) class D ^ -four errors found +5 errors diff --git a/test/files/neg/t10068.scala b/test/files/neg/t10068.scala index 30f193802311..1128b68d710b 100644 --- a/test/files/neg/t10068.scala +++ b/test/files/neg/t10068.scala @@ -1,10 +1,11 @@ -// scalac: -Xelide-below WARNING -Xsource:2.13 - +//> using options -Xelide-below WARNING -Xsource:2.13 +// import annotation._, elidable._ -class C { +abstract class C { @elidable(INFO) val i: Int = 42 @elidable(INFO) lazy val j: Int = 42 @elidable(INFO) var k: Int = 42 + @elidable(INFO) def f: Int } @elidable(INFO) class D diff --git a/test/files/neg/t10073.check b/test/files/neg/t10073.check index 9782135040db..90493f2047e0 100644 --- a/test/files/neg/t10073.check +++ b/test/files/neg/t10073.check @@ -1,4 +1,4 @@ -t10073.scala:7: error: tpe Unused is an unresolved spliceable type +t10073.scala:7: error: type Unused is an unresolved spliceable type "".yo() ^ -one error found +1 error diff --git a/test/files/neg/t10073.scala b/test/files/neg/t10073.scala index 06f3167854a3..f7b5e6408d1d 100644 --- a/test/files/neg/t10073.scala +++ b/test/files/neg/t10073.scala @@ -5,4 +5,4 @@ class Yo[Unused] { class MacroNotExpanded { implicit def toYo[Unused](a: Any)(implicit ct: reflect.ClassTag[Unused]): Yo[Unused] = new Yo[Unused] "".yo() -} \ No newline at end of file +} diff --git a/test/files/neg/t10073b.check b/test/files/neg/t10073b.check index 309fea6b9ac4..7fcd9023ebaf 100644 --- a/test/files/neg/t10073b.check +++ b/test/files/neg/t10073b.check @@ -1,4 +1,4 @@ -t10073b.scala:7: error: tpe Unused is an unresolved spliceable type +t10073b.scala:7: error: type Unused is an unresolved spliceable type "".yo() ^ -one error found +1 error diff --git a/test/files/neg/t10081.check b/test/files/neg/t10081.check new file mode 100644 index 000000000000..9d155bcfbd3b --- /dev/null +++ b/test/files/neg/t10081.check @@ -0,0 +1,4 @@ +t10081.scala:2: error: value x is not a member of B[X] +trait B[X] extends A[B[X @unchecked]] { this.x } + ^ +1 error diff --git a/test/files/neg/t10081.scala b/test/files/neg/t10081.scala new file mode 100644 index 000000000000..a18442d2d88f --- /dev/null +++ b/test/files/neg/t10081.scala @@ -0,0 +1,2 @@ +trait A[_] +trait B[X] extends A[B[X @unchecked]] { this.x } diff --git a/test/files/neg/t1009.check b/test/files/neg/t1009.check index 5c9978b73712..e27d5f5e200e 100644 --- a/test/files/neg/t1009.check +++ b/test/files/neg/t1009.check @@ -1,4 +1,4 @@ t1009.scala:2: error: empty quoted identifier def `` = "fish" ^ -one error found +1 error diff --git a/test/files/neg/t10090.check b/test/files/neg/t10090.check new file mode 100644 index 000000000000..14a0aed7a88d --- /dev/null +++ b/test/files/neg/t10090.check @@ -0,0 +1,7 @@ +t10090.scala:4: error: stable identifier required, but X.Y found. + (null: Any) match { case X.Y.Z() => } + ^ +t10090.scala:6: error: stable identifier required, but X.Y found. + (null: Any) match { case X.Y.Z => } + ^ +2 errors diff --git a/test/files/neg/t10090.scala b/test/files/neg/t10090.scala new file mode 100644 index 000000000000..8b8aecb38193 --- /dev/null +++ b/test/files/neg/t10090.scala @@ -0,0 +1,7 @@ +object X { implicit class Y(self: String) } + +object T10090 { + (null: Any) match { case X.Y.Z() => } + + (null: Any) match { case X.Y.Z => } +} diff --git a/test/files/neg/t10097.check b/test/files/neg/t10097.check index 504288148537..60e0ed47e7ff 100644 --- a/test/files/neg/t10097.check +++ b/test/files/neg/t10097.check @@ -1,19 +1,19 @@ -t10097.scala:3: error: case classes must have a non-implicit parameter list; try 'case class C()(...)' +t10097.scala:4: error: case classes must have a non-implicit parameter list; try 'case class C()(...)' case class C(implicit val c: Int) ^ -t10097.scala:5: error: case classes must have a non-implicit parameter list; try 'case class D()(...)(...)' +t10097.scala:6: error: case classes must have a non-implicit parameter list; try 'case class D()(...)(...)' case class D(implicit c: Int)(s: String) ^ -t10097.scala:5: error: an implicit parameter section must be last +t10097.scala:6: error: an implicit parameter section must be last case class D(implicit c: Int)(s: String) ^ -t10097.scala:7: error: case classes must have a non-implicit parameter list; try 'case class *()(...)' +t10097.scala:8: error: case classes must have a non-implicit parameter list; try 'case class *()(...)' case class *(implicit c: Int) ^ -t10097.scala:10: error: identifier expected but 'import' found. +t10097.scala:11: error: identifier expected but 'import' found. import collection._ ^ -t10097.scala:10: error: case classes must have a parameter list; try 'case class C()' or 'case object C' +t10097.scala:11: error: case classes must have a parameter list; try 'case class C()' or 'case object C' import collection._ ^ -6 errors found +6 errors diff --git a/test/files/neg/t10097.scala b/test/files/neg/t10097.scala index 723f3de94338..592ea5459400 100644 --- a/test/files/neg/t10097.scala +++ b/test/files/neg/t10097.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// case class C(implicit val c: Int) diff --git a/test/files/neg/t10097b.check b/test/files/neg/t10097b.check index 9dd35d01cfdb..240424840a30 100644 --- a/test/files/neg/t10097b.check +++ b/test/files/neg/t10097b.check @@ -1,6 +1,4 @@ -t10097b.scala:3: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)' +t10097b.scala:4: error: case classes must have a non-implicit parameter list; try 'case class C()(...)' case class C(implicit val c: Int) ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +1 error diff --git a/test/files/neg/t10097b.scala b/test/files/neg/t10097b.scala index f453f8b682ec..d52e9be1226d 100644 --- a/test/files/neg/t10097b.scala +++ b/test/files/neg/t10097b.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// case class C(implicit val c: Int) diff --git a/test/files/neg/t1010.check b/test/files/neg/t1010.check index d412d8ac1e15..ecdfde88c62d 100644 --- a/test/files/neg/t1010.check +++ b/test/files/neg/t1010.check @@ -1,6 +1,6 @@ t1010.scala:14: error: type mismatch; found : MailBox#Message - required: _1.in.Message where val _1: Actor + required: stabilizer$1.in.Message unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member ^ -one error found +1 error diff --git a/test/files/neg/t10156.check b/test/files/neg/t10156.check index e656cb3c25e8..e0c7e828aa8c 100644 --- a/test/files/neg/t10156.check +++ b/test/files/neg/t10156.check @@ -1,4 +1,4 @@ t10156.scala:4: error: could not find implicit value for parameter a: t10156.A val z = x _ ^ -one error found +1 error diff --git a/test/files/neg/t10156.scala b/test/files/neg/t10156.scala index a4a046108c38..7367e3a2f547 100644 --- a/test/files/neg/t10156.scala +++ b/test/files/neg/t10156.scala @@ -2,4 +2,4 @@ object t10156 { trait A def x(implicit a: A) = a val z = x _ -} \ No newline at end of file +} diff --git a/test/files/neg/t10207.check b/test/files/neg/t10207.check index 3330db44a5cc..73009767d508 100755 --- a/test/files/neg/t10207.check +++ b/test/files/neg/t10207.check @@ -1,4 +1,4 @@ -t10207.scala:14: error: too many arguments (2) for method apply: (key: Int)scala.collection.mutable.ArrayBuffer[String] in trait MapLike +t10207.scala:14: error: too many arguments (found 2, expected 1) for method apply: (key: Int): scala.collection.mutable.ArrayBuffer[String] in trait MapOps m(1, (_ => empty)) ++= AB("eins", "uno") ^ -one error found +1 error diff --git a/test/files/neg/t10228.check b/test/files/neg/t10228.check new file mode 100644 index 000000000000..f4346839aec5 --- /dev/null +++ b/test/files/neg/t10228.check @@ -0,0 +1,4 @@ +t10228.scala:9: error: malformed type: A#Type + def g[A <: Fili[A]]: Unit = implicitly[A <:< A#Type] + ^ +1 error diff --git a/test/files/neg/t10228.scala b/test/files/neg/t10228.scala new file mode 100644 index 000000000000..94825d147aca --- /dev/null +++ b/test/files/neg/t10228.scala @@ -0,0 +1,10 @@ +object t10228 { + trait Data { self => + type Type >: self.type <: Data + } + trait DataF[P <: Data, A <: DataF[P, A]] extends Data { type Type = P#Type } + + type Fili[A <: Data] = DataF[A, A] + + def g[A <: Fili[A]]: Unit = implicitly[A <:< A#Type] +} diff --git a/test/files/neg/t10249.check b/test/files/neg/t10249.check index 606c490c7d8a..4828bc4d3eda 100644 --- a/test/files/neg/t10249.check +++ b/test/files/neg/t10249.check @@ -1,4 +1,4 @@ Test_1.scala:11: error: Unable to emit reference to method m in class A, class A is not accessible in object Test w.m() ^ -one error found +1 error diff --git a/test/files/neg/t10260.check b/test/files/neg/t10260.check index 151e9ed98ee5..11c8029f52d0 100644 --- a/test/files/neg/t10260.check +++ b/test/files/neg/t10260.check @@ -1,17 +1,25 @@ -Test.scala:1: error: class IAImpl needs to be abstract, since method foo in trait IA of type (a: A)Unit is not defined -(Note that A does not match A[_]. To implement this raw type, use A[T] forSome { type T <: A[T] }) +Test.scala:1: error: class IAImpl needs to be abstract. +Missing implementation for member of trait IA: + def foo(a: A[T] forSome { type T <: A[T] }): Unit = ??? // implements `def foo(a: A): Unit`; A does not match A[_]. To implement this raw type, use A[T] forSome { type T <: A[T] } + class IAImpl extends IA { def foo(a: A[_]) = ??? } ^ -Test.scala:2: error: class IBImpl needs to be abstract, since method foo in trait IB of type (a: B)Unit is not defined -(Note that B does not match B[_, _]. To implement this raw type, use B[T,R] forSome { type T; type R <: java.util.List[_ >: T] }) +Test.scala:2: error: class IBImpl needs to be abstract. +Missing implementation for member of trait IB: + def foo(a: B[T,R] forSome { type T; type R <: java.util.List[_ >: T] }): Unit = ??? // implements `def foo(a: B): Unit`; B does not match B[_, _]. To implement this raw type, use B[T,R] forSome { type T; type R <: java.util.List[_ >: T] } + class IBImpl extends IB { def foo(a: B[_,_]) = ??? } ^ -Test.scala:3: error: class ICImpl needs to be abstract, since method foo in trait IC of type (a: Int, b: C, c: String)C is not defined -(Note that C does not match C[_]. To implement this raw type, use C[_ <: String]) +Test.scala:3: error: class ICImpl needs to be abstract. +Missing implementation for member of trait IC: + def foo(a: Int, b: C[_ <: String], c: String): C[_ <: String] = ??? // implements `def foo(a: Int, b: C, c: String): C`; C does not match C[_]. To implement this raw type, use C[_ <: String] + class ICImpl extends IC { def foo(a: Int, b: C[_], c: String) = ??? } ^ -Test.scala:4: error: class IDImpl needs to be abstract, since method foo in trait ID of type (a: D)Unit is not defined -(Note that D does not match D[_ <: String]. To implement this raw type, use D[_]) +Test.scala:4: error: class IDImpl needs to be abstract. +Missing implementation for member of trait ID: + def foo(a: D[_]): Unit = ??? // implements `def foo(a: D): Unit`; D does not match D[_ <: String]. To implement this raw type, use D[_] + class IDImpl extends ID { def foo(a: D[_ <: String]) = ??? } ^ -four errors found +4 errors diff --git a/test/files/neg/t10270.check b/test/files/neg/t10270.check index bbab20ee8177..12e352da441f 100644 --- a/test/files/neg/t10270.check +++ b/test/files/neg/t10270.check @@ -1,6 +1,6 @@ -Main_2.scala:6: warning: Unused import +Main_2.scala:5: warning: Unused import import Implicits._ ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t10270/Macros_1.scala b/test/files/neg/t10270/Macros_1.scala index 0d9f51e2c260..056995d2497a 100644 --- a/test/files/neg/t10270/Macros_1.scala +++ b/test/files/neg/t10270/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports import language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/t10270/Main_2.scala b/test/files/neg/t10270/Main_2.scala index 6c8185e14a90..e15240dd05d3 100644 --- a/test/files/neg/t10270/Main_2.scala +++ b/test/files/neg/t10270/Main_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports - +//> using options -Wunused:imports -Werror object Main extends App { def f(): Any = Macro { diff --git a/test/files/neg/t10279.check b/test/files/neg/t10279.check index 0c8fc3f79894..a399a2b15041 100644 --- a/test/files/neg/t10279.check +++ b/test/files/neg/t10279.check @@ -15,4 +15,4 @@ t10279.scala:14: error: type mismatch; t10279.scala:17: error: could not find implicit value for parameter x: Int val barSimple = fooSimple _ // error: no implicit int ^ -5 errors found +5 errors diff --git a/test/files/neg/t10287.check b/test/files/neg/t10287.check new file mode 100644 index 000000000000..a6725dac2c44 --- /dev/null +++ b/test/files/neg/t10287.check @@ -0,0 +1,24 @@ +t10287.scala:5: warning: pattern var x in method unused_patvar is never used + Some(42) match { case x => 27 } // warn + ^ +t10287.scala:12: warning: pattern var x in value $anonfun is never used + x <- Some(42) // warn + ^ +t10287.scala:16: warning: pattern var x in value $anonfun is never used + x <- Some(42) // warn + ^ +t10287.scala:22: warning: pattern var y in value $anonfun is never used + y <- Some(27) // warn + ^ +t10287.scala:27: warning: pattern var y in value $anonfun is never used + y = 3 // warn + ^ +t10287.scala:31: warning: pattern var x in value $anonfun is never used + x <- Some(42) // warn + ^ +t10287.scala:37: warning: pattern var y in value $anonfun is never used + y = 3 // warn + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t10287.scala b/test/files/neg/t10287.scala new file mode 100644 index 000000000000..e98f2400fa37 --- /dev/null +++ b/test/files/neg/t10287.scala @@ -0,0 +1,70 @@ +//> using options -Werror -Wunused:_ -Xsource:3 -Yvalidate-pos:typer + +class C { + def unused_patvar = + Some(42) match { case x => 27 } // warn + def a = + for { + x <- Some(42) // ok + } yield (x + 1) + def b = + for { + x <- Some(42) // warn + } yield 27 + def c = + for { + x <- Some(42) // warn + y <- Some(27) // ok + } yield y + def d = + for { + x <- Some(42) // ok + y <- Some(27) // warn + } yield x + def e = + for { + x <- Some(42) // ok + y = 3 // warn + } yield x + def f = + for { + x <- Some(42) // warn + y = 3 // ok + } yield y + def g = + for { + x <- Some(1 -> 2) // ok + y = 3 // warn + (a, b) = x // ok + } yield (a + b) + def h(xs: List[Int]) = + for { + x <- xs + y = x * 2 + _ = println(x) + } yield y + def i(xs: List[Int]) = + for { + x <- xs + if x > 42 + } println(".") +} + +case class K[A](a: A, i: Int) + +class Fixes { + def leavenstain(s: String, t: String) = { + val n = s.length + val m = t.length + for (i <- 1 to n; s_i = s(i - 1); j <- 1 to m) { + println("" + s_i + t(j - 1)) + } + } + + def f[A](ks: List[K[A]]): List[K[?]] = { + for { + K((s: String), j) <- ks + x = j*2 + } yield K(s*2, x) + } +} diff --git a/test/files/neg/t10296-after.check b/test/files/neg/t10296-after.check index 5003ec5a6ebe..2b071994e071 100644 --- a/test/files/neg/t10296-after.check +++ b/test/files/neg/t10296-after.check @@ -1,6 +1,6 @@ -Unused_2.scala:8: warning: private method g in object Unused is never used +Unused_2.scala:7: warning: private method g in object Unused is never used private def g(): Int = 17 ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t10296-after/UnusedMacro_1.scala b/test/files/neg/t10296-after/UnusedMacro_1.scala index 3c2deb44021d..9e042f803a86 100644 --- a/test/files/neg/t10296-after/UnusedMacro_1.scala +++ b/test/files/neg/t10296-after/UnusedMacro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:after import scala.reflect.macros.whitebox.Context diff --git a/test/files/neg/t10296-after/Unused_2.scala b/test/files/neg/t10296-after/Unused_2.scala index a4a170bbbf7d..93c110c35cbb 100644 --- a/test/files/neg/t10296-after/Unused_2.scala +++ b/test/files/neg/t10296-after/Unused_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:after - +//> using options -Werror -Xlint:unused -Wmacros:after import scala.language.experimental.macros object Unused extends App { diff --git a/test/files/neg/t10296-both.check b/test/files/neg/t10296-both.check index 64c62f910c8b..c16915f881eb 100644 --- a/test/files/neg/t10296-both.check +++ b/test/files/neg/t10296-both.check @@ -1,9 +1,9 @@ -Unused_2.scala:9: warning: private method k in object Unused is never used - private def k(): Int = 17 - ^ -Unused_2.scala:8: warning: private method g in object Unused is never used +Unused_2.scala:7: warning: private method g in object Unused is never used private def g(): Int = 17 ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +Unused_2.scala:8: warning: private method k in object Unused is never used + private def k(): Int = 17 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t10296-both/UnusedMacro_1.scala b/test/files/neg/t10296-both/UnusedMacro_1.scala index 41137609812c..b636ff0fc88a 100644 --- a/test/files/neg/t10296-both/UnusedMacro_1.scala +++ b/test/files/neg/t10296-both/UnusedMacro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:both import scala.reflect.macros.whitebox.Context diff --git a/test/files/neg/t10296-both/Unused_2.scala b/test/files/neg/t10296-both/Unused_2.scala index 1e9369a8ffe4..860c44d2d747 100644 --- a/test/files/neg/t10296-both/Unused_2.scala +++ b/test/files/neg/t10296-both/Unused_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:both - +//> using options -Werror -Xlint:unused -Wmacros:both import scala.language.experimental.macros object Unused extends App { diff --git a/test/files/neg/t10296-warn.check b/test/files/neg/t10296-warn.check index 98680c24db70..a5b01bd647e9 100755 --- a/test/files/neg/t10296-warn.check +++ b/test/files/neg/t10296-warn.check @@ -1,6 +1,6 @@ -Unused_2.scala:10: warning: private method unusedMacro in object Unused is never used +Unused_2.scala:9: warning: private method unusedMacro in object Unused is never used private def unusedMacro(): Unit = macro UnusedMacro.usedMacroImpl ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t10296-warn/UnusedMacro_1.scala b/test/files/neg/t10296-warn/UnusedMacro_1.scala index f3b33c38d561..d3576ee731f0 100644 --- a/test/files/neg/t10296-warn/UnusedMacro_1.scala +++ b/test/files/neg/t10296-warn/UnusedMacro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:unused import scala.reflect.macros.blackbox diff --git a/test/files/neg/t10296-warn/Unused_2.scala b/test/files/neg/t10296-warn/Unused_2.scala index e59f963138e4..a0fbd1ff5ce2 100644 --- a/test/files/neg/t10296-warn/Unused_2.scala +++ b/test/files/neg/t10296-warn/Unused_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint:unused - +//> using options -Werror -Xlint:unused import scala.language.experimental.macros object Unused { diff --git a/test/files/neg/t1033.check b/test/files/neg/t1033.check index 16e799264bbf..2e92f2d5741f 100644 --- a/test/files/neg/t1033.check +++ b/test/files/neg/t1033.check @@ -1,4 +1,4 @@ t1033.scala:5: error: return outside method definition return 10 ^ -one error found +1 error diff --git a/test/files/neg/t1033.scala b/test/files/neg/t1033.scala index 3aed8bb11aac..28af01d5cb18 100644 --- a/test/files/neg/t1033.scala +++ b/test/files/neg/t1033.scala @@ -7,7 +7,7 @@ object A { new B 20 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { f } } diff --git a/test/files/neg/t1038.check b/test/files/neg/t1038.check index b191b89ad124..f0ab1b5e36fe 100644 --- a/test/files/neg/t1038.check +++ b/test/files/neg/t1038.check @@ -1,5 +1,5 @@ -t1038.scala:4: error: not enough arguments for constructor X: (x: Int)X. +t1038.scala:4: error: not enough arguments for constructor X: (x: Int): X. Unspecified value parameter x. val a = new X ^ -one error found +1 error diff --git a/test/files/neg/t1038.scala b/test/files/neg/t1038.scala index 9fdcae207ffd..2cc45a190eea 100644 --- a/test/files/neg/t1038.scala +++ b/test/files/neg/t1038.scala @@ -4,5 +4,6 @@ object Y { val a = new X import a._ implicit val b : Int = 1 + @annotation.nowarn implicit val c = 2 -} \ No newline at end of file +} diff --git a/test/files/neg/t10392.check b/test/files/neg/t10392.check new file mode 100644 index 000000000000..fae3773f30c1 --- /dev/null +++ b/test/files/neg/t10392.check @@ -0,0 +1,14 @@ +t10392.scala:9: warning: implicit conversion method cv2 should be enabled +by making the implicit value scala.language.implicitConversions visible. +This can be achieved by adding the import clause 'import scala.language.implicitConversions' +or by setting the compiler option -language:implicitConversions. +See the Scaladoc for value scala.language.implicitConversions for a discussion +why the feature should be explicitly enabled. + private implicit def cv2(i: Int): String = i.toString * 2 + ^ +t10392.scala:9: warning: private method cv2 in class C is never used + private implicit def cv2(i: Int): String = i.toString * 2 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t10392.scala b/test/files/neg/t10392.scala new file mode 100644 index 000000000000..e75ad3dd6bee --- /dev/null +++ b/test/files/neg/t10392.scala @@ -0,0 +1,19 @@ + +//> using options -feature -Werror -Wunused + +// warn when conversion method is present but unused + +class C { + implicit val cv0: List[String] = List("zero", "one", "two", "three") + //implicit val cv1: Int => String = _.toString + private implicit def cv2(i: Int): String = i.toString * 2 + + def f(i: Int): String = i +} + +object Test extends App { + val c = new C + println { + c.f(3) + } +} diff --git a/test/files/neg/t1041.check b/test/files/neg/t1041.check index d82f3a8586be..b9a212420df9 100644 --- a/test/files/neg/t1041.check +++ b/test/files/neg/t1041.check @@ -3,4 +3,4 @@ t1041.scala:3: error: type mismatch; required: List[Int] case 1 => 4 ^ -one error found +1 error diff --git a/test/files/neg/t10474.check b/test/files/neg/t10474.check index d12531ca902d..bcf492cd9850 100644 --- a/test/files/neg/t10474.check +++ b/test/files/neg/t10474.check @@ -1,7 +1,7 @@ t10474.scala:8: error: stable identifier required, but Test.this.Foo found. - case Foo.Bar ⇒ true + case Foo.Bar => true ^ t10474.scala:15: error: stable identifier required, but hrhino.this.Foo found. val Foo.Crash = ??? ^ -two errors found +2 errors diff --git a/test/files/neg/t10474.scala b/test/files/neg/t10474.scala index 49f8e14839c1..340e608b27ba 100644 --- a/test/files/neg/t10474.scala +++ b/test/files/neg/t10474.scala @@ -5,8 +5,8 @@ object Test { object Bar def crash[A](): Boolean = Bar match { - case Foo.Bar ⇒ true - case _ ⇒ false + case Foo.Bar => true + case _ => false } } diff --git a/test/files/neg/t1049.check b/test/files/neg/t1049.check index 4a63cb175dcb..0276a8406378 100644 --- a/test/files/neg/t1049.check +++ b/test/files/neg/t1049.check @@ -1,4 +1,4 @@ t1049.scala:6: error: not found: value a case a : a.MapTo => ^ -one error found +1 error diff --git a/test/files/neg/t10502.check b/test/files/neg/t10502.check new file mode 100644 index 000000000000..4b985ea1ff92 --- /dev/null +++ b/test/files/neg/t10502.check @@ -0,0 +1,31 @@ +t10502.scala:7: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def single(opt: Option[String]) = opt match { // missing None case + ^ +t10502.scala:10: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def nested(opt: Option[String]) = opt match { // missing None case + ^ +t10502.scala:13: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def nested2(opt: Option[String]) = opt match { // missing None case + ^ +t10502.scala:22: warning: match may not be exhaustive. + def foo(foo: Foo) = foo match { // missing None case + ^ +t10502.scala:32: warning: match may not be exhaustive. + def bar(bar: Bar) = bar match { // missing None case + ^ +t10502.scala:49: warning: match may not be exhaustive. + def length(str: String) = str match { // missing non-0 case + ^ +t10502.scala:62: warning: match may not be exhaustive. + def nestedUnderIrrefutable(any: Any) = any match { // missing non-int case + ^ +t10502.scala:75: warning: match may not be exhaustive. +It would fail on the following inputs: Just(_), Nada() + def usingMay[A](may: May[A]) = may match { // missing Nada case + ^ +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/t10502.scala b/test/files/neg/t10502.scala new file mode 100644 index 000000000000..c428adaea897 --- /dev/null +++ b/test/files/neg/t10502.scala @@ -0,0 +1,78 @@ +//> using options -Werror -Xlint:strict-unsealed-patmat +object Bug { + object Perhaps { + def unapply[A](oa: Option[A]): Some[Option[A]] = Some(oa) + } + + def single(opt: Option[String]) = opt match { // missing None case + case Perhaps(Some(s)) => s + } + def nested(opt: Option[String]) = opt match { // missing None case + case Perhaps(Perhaps(Some(s))) => s + } + def nested2(opt: Option[String]) = opt match { // missing None case + case Perhaps(Perhaps(Perhaps(Some(s)))) => s + } + + class Foo(val str: Option[String]) + object Foo { + def unapply(foo: Foo): Some[Option[String]] = Some(foo.str) + } + + def foo(foo: Foo) = foo match { // missing None case + case Foo(Some(s)) => s + } + + + class Bar(val str: Option[String], val ing: Option[String]) + object Bar { + def unapply(bar: Bar): Some[(Option[String], Option[String])] = Some((bar.str, bar.ing)) + } + + def bar(bar: Bar) = bar match { // missing None case + case Bar(Some(s), _) => s + } + + + def list(list: List[Option[String]]) = list match { + case Perhaps(Some(s)) :: _ => s + case Perhaps(None ) :: _ => "" + case Nil => "" + } // was: warning: match may not be exhaustive. + // It would fail on the following input: List(_) + + + object Length { + def unapply(str: String): Some[Int] = Some(str.length) + } + + def length(str: String) = str match { // missing non-0 case + case Length(0) => "empty!" + } + + + object ToStr { + def unapply(any: Any): Some[String] = Some(any.toString) + } + + object ToInt { + def unapply(str: String): Option[Int] = str.toIntOption + } + + def nestedUnderIrrefutable(any: Any) = any match { // missing non-int case + case ToStr(ToInt(n)) => n + } + + + sealed trait May[+A] + final case class Just[+A](value: A) extends May[A] + final case class Nada() extends May[Nothing] + + object Possibly { + def unapply[A](may: May[A]): Some[May[A]] = Some(may) + } + + def usingMay[A](may: May[A]) = may match { // missing Nada case + case Possibly(Just(a)) => a + } +} diff --git a/test/files/neg/t10507.check b/test/files/neg/t10507.check new file mode 100644 index 000000000000..82f39853fff8 --- /dev/null +++ b/test/files/neg/t10507.check @@ -0,0 +1,4 @@ +Test_2.scala:2: error: self constructor invocation must refer to a constructor definition which precedes it, to prevent infinite cycles + Macros_1.seq + ^ +1 error diff --git a/test/files/neg/t10507/Macros_1.scala b/test/files/neg/t10507/Macros_1.scala new file mode 100644 index 000000000000..8a6630a751a7 --- /dev/null +++ b/test/files/neg/t10507/Macros_1.scala @@ -0,0 +1,13 @@ +import scala.language.experimental.macros +import scala.reflect.macros.blackbox + +object Macros_1 { + def seq: List[Int] = macro impl + def impl(c: blackbox.Context): c.Tree = { + import c.universe._ + q"""{ + class A(val l: Long) { def this() = this(0); def this(i: Int) = this(i: Long) } + List(new A, new A(1), new A(2L)).map(_.l) + }""" + } +} diff --git a/test/files/neg/t10507/Test_2.scala b/test/files/neg/t10507/Test_2.scala new file mode 100644 index 000000000000..944b8bf46091 --- /dev/null +++ b/test/files/neg/t10507/Test_2.scala @@ -0,0 +1,3 @@ +object Test_2 { + Macros_1.seq +} diff --git a/test/files/neg/t10514.check b/test/files/neg/t10514.check new file mode 100644 index 000000000000..254c2fae3485 --- /dev/null +++ b/test/files/neg/t10514.check @@ -0,0 +1,13 @@ +t10514.scala:8: error: no type parameters for constructor Foo: (value: F[C[G[Foo[F,G]]]]): Foo[F,G] exist so that it can be applied to arguments (Some[C[Foo[Option,Test.this.Id]]]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : Some[C[Foo[Option,Test.this.Id]]] + required: ?F[C[?G[Foo[?F,?G]]]] + new Foo(Some(new C(new Foo[Option, Id](None)))) + ^ +t10514.scala:8: error: type mismatch; + found : Some[C[Foo[Option,Test.this.Id]]] + required: F[C[G[Foo[F,G]]]] + new Foo(Some(new C(new Foo[Option, Id](None)))) + ^ +2 errors diff --git a/test/files/neg/t10514.scala b/test/files/neg/t10514.scala new file mode 100644 index 000000000000..8c3ed35f2926 --- /dev/null +++ b/test/files/neg/t10514.scala @@ -0,0 +1,9 @@ +// document status quo (used to crash, now just fails to infer) +// to make this compile, explicitly provide type args +class C[T](x: T) +class Foo[F[_], G[_]](value: F[C[G[Foo[F, G]]]]) + +class Test { + type Id[A] = A + new Foo(Some(new C(new Foo[Option, Id](None)))) +} diff --git a/test/files/neg/t10530.check b/test/files/neg/t10530.check index 3bf79a71ee18..4cffa98ae166 100644 --- a/test/files/neg/t10530.check +++ b/test/files/neg/t10530.check @@ -22,4 +22,4 @@ class Q(val u: AnyRef with X) extends AnyVal t10530.scala:12: error: value class may not wrap another user-defined value class class B[T <: A](val a: T) extends AnyVal ^ -8 errors found +8 errors diff --git a/test/files/neg/t10530.scala b/test/files/neg/t10530.scala index 4c971c2d65ca..f8b5e4871981 100644 --- a/test/files/neg/t10530.scala +++ b/test/files/neg/t10530.scala @@ -9,4 +9,4 @@ class R(val u: Z {}) extends AnyVal class Q(val u: AnyRef with X) extends AnyVal class A(val a: Int) extends AnyVal -class B[T <: A](val a: T) extends AnyVal \ No newline at end of file +class B[T <: A](val a: T) extends AnyVal diff --git a/test/files/neg/t10545.check b/test/files/neg/t10545.check new file mode 100644 index 000000000000..44891aa8cceb --- /dev/null +++ b/test/files/neg/t10545.check @@ -0,0 +1,7 @@ +t10545.scala:32: error: ambiguous implicit values: + both method barF0 in object Bar of type [F[_]](implicit fooF: Foo[F]): Bar[F] + and method barF1 in object Bar of type [F[_]](implicit fooF: Foo[F]): Bar[F] + match expected type Bar[Option] + implicitly[Bar[Option]] + ^ +1 error diff --git a/test/files/neg/t10545.scala b/test/files/neg/t10545.scala new file mode 100644 index 000000000000..164511f84d20 --- /dev/null +++ b/test/files/neg/t10545.scala @@ -0,0 +1,33 @@ +class Foo[F[_]] +object Foo { + // Prior to this fix these two are ambiguous + implicit def fooF0[F[_]]: Foo[F] = new Foo[F] + implicit def fooF1: Foo[Option] = new Foo[Option] +} + +class Bar[F[_]] +object Bar extends Bar0 { + // Prior to this fix these two aren't selected because there is no + // Foo[F] due to the ambiguity above + // After this fix these two are ambiguous + implicit def barF0[F[_]](implicit fooF: Foo[F]): Bar[F] = new Bar[F] + implicit def barF1[F[_]](implicit fooF: Foo[F]): Bar[F] = new Bar[F] +} + +trait Bar0 { + // Prior to this fix we fall back to here + implicit def barF2[F[_]]: Bar[F] = new Bar[F] +} + +object Test { + // Prior to this fix Bar.barF1[Option] + // After this fix, + // error: ambiguous implicit values: + // both method barF0 in object Bar of type [F[_]](implicit fooF: Foo[F])Bar[F] + // and method barF1 in object Bar of type [F[_]](implicit fooF: Foo[F])Bar[F] + // match expected type Bar[Option] + // implicitly[Bar[Option]] + // ^ + // one error found + implicitly[Bar[Option]] +} diff --git a/test/files/neg/t10619.check b/test/files/neg/t10619.check index 3bea5fd28b63..940f0dc2e17b 100644 --- a/test/files/neg/t10619.check +++ b/test/files/neg/t10619.check @@ -7,4 +7,4 @@ t10619.scala:5: error: stable identifier required, but Test.this.newOuter found. t10619.scala:12: error: stable identifier required, but Test.this.newOuter found. val f = new newOuter.Inner ^ -three errors found +3 errors diff --git a/test/files/neg/t10641.check b/test/files/neg/t10641.check new file mode 100644 index 000000000000..96120a27a0e8 --- /dev/null +++ b/test/files/neg/t10641.check @@ -0,0 +1,4 @@ +t10641.scala:6: error: ';' expected but '=>' found. +import collection._ => `not a rename, just dross` + ^ +1 error diff --git a/test/files/neg/t10641.scala b/test/files/neg/t10641.scala new file mode 100644 index 000000000000..000e698c06a4 --- /dev/null +++ b/test/files/neg/t10641.scala @@ -0,0 +1,8 @@ + +// previously ignored +// progressed Wildcard import cannot be renamed +// now ';' expected but '=>' found. + +import collection._ => `not a rename, just dross` + + diff --git a/test/files/neg/t10661.check b/test/files/neg/t10661.check index 02e41b4c79bf..1d8df9966126 100644 --- a/test/files/neg/t10661.check +++ b/test/files/neg/t10661.check @@ -1,4 +1,4 @@ t10661.scala:3: error: class type required but A found def f[A] = new C with A ^ -one error found +1 error diff --git a/test/files/neg/t10662.check b/test/files/neg/t10662.check new file mode 100644 index 000000000000..761af387fd5c --- /dev/null +++ b/test/files/neg/t10662.check @@ -0,0 +1,5 @@ +px_2.scala:19: error: reference to X is ambiguous; +it is both defined in package p and available as class X in package q + implicitly[T[X]] // ambiguous + ^ +1 error diff --git a/test/files/neg/t10662/pqx_1.scala b/test/files/neg/t10662/pqx_1.scala new file mode 100644 index 000000000000..1ec2d4c27d7a --- /dev/null +++ b/test/files/neg/t10662/pqx_1.scala @@ -0,0 +1,6 @@ + +package p.q + +class X { + override def toString() = "p.q.X" +} diff --git a/test/files/neg/t10662/px_2.scala b/test/files/neg/t10662/px_2.scala new file mode 100644 index 000000000000..579514115325 --- /dev/null +++ b/test/files/neg/t10662/px_2.scala @@ -0,0 +1,22 @@ + +package p { + + trait T[A] + + class X { + override def toString() = "p.X" + } + object X { + implicit val tx: T[X] = new T[X] { } + } + + package q { + //import p.X // "permanently hidden" + object Test { + // previously, picked p.q.X + // This file compiles by itself; + // from our perspective, the other X renders our X ambiguous + implicitly[T[X]] // ambiguous + } + } +} diff --git a/test/files/neg/t10662b.check b/test/files/neg/t10662b.check new file mode 100644 index 000000000000..f49e4e4c3ec1 --- /dev/null +++ b/test/files/neg/t10662b.check @@ -0,0 +1,6 @@ +px_2.scala:16: error: reference to X is ambiguous; +it is both defined in package p and imported subsequently by +import r.X + implicitly[T[X]] // ambiguous + ^ +1 error diff --git a/test/files/neg/t10662b/pqx_1.scala b/test/files/neg/t10662b/pqx_1.scala new file mode 100644 index 000000000000..1ec2d4c27d7a --- /dev/null +++ b/test/files/neg/t10662b/pqx_1.scala @@ -0,0 +1,6 @@ + +package p.q + +class X { + override def toString() = "p.q.X" +} diff --git a/test/files/neg/t10662b/px_2.scala b/test/files/neg/t10662b/px_2.scala new file mode 100644 index 000000000000..604aa6c1e516 --- /dev/null +++ b/test/files/neg/t10662b/px_2.scala @@ -0,0 +1,23 @@ + +package p { + + trait T[A] + + class X { + override def toString() = "p.X" + } + object X { + implicit val tx: T[X] = new T[X] { } + } + + package q { + import r.X + object Test { + implicitly[T[X]] // ambiguous + } + } + + package r { + class X + } +} diff --git a/test/files/neg/t10678.check b/test/files/neg/t10678.check index d73e3ca30649..15a3db301298 100644 --- a/test/files/neg/t10678.check +++ b/test/files/neg/t10678.check @@ -1,11 +1,11 @@ -t10678.scala:7: error: ';' expected but '<:' found. +t10678.scala:8: error: ';' expected but '<:' found. class C <: T { ^ -t10678.scala:10: error: ';' expected but '<:' found. +t10678.scala:11: error: ';' expected but '<:' found. object O <: T { ^ -t10678.scala:5: warning: Using `<:` for `extends` is deprecated +t10678.scala:6: warning: Using `<:` for `extends` is deprecated [quickfixable] trait U <: T ^ -one warning found -two errors found +1 warning +2 errors diff --git a/test/files/neg/t10678.scala b/test/files/neg/t10678.scala index 587fa3a3bc5b..62bb3025ef06 100644 --- a/test/files/neg/t10678.scala +++ b/test/files/neg/t10678.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// trait T diff --git a/test/files/neg/t10695.check b/test/files/neg/t10695.check index 1ece3a4d9d20..623a8ba5dd95 100644 --- a/test/files/neg/t10695.check +++ b/test/files/neg/t10695.check @@ -1,4 +1,4 @@ t10695.scala:6: error: stable identifier required, but X.raw found. val node: raw.Node = null ^ -one error found +1 error diff --git a/test/files/neg/t10695.scala b/test/files/neg/t10695.scala index 580d915615ca..28c31c545fb9 100644 --- a/test/files/neg/t10695.scala +++ b/test/files/neg/t10695.scala @@ -5,7 +5,7 @@ object Main extends App { val node: raw.Node = null - Seq().fold(node)(_ => _) + Seq().foldLeft(node)(_ => _) } diff --git a/test/files/neg/t10700-message.check b/test/files/neg/t10700-message.check new file mode 100644 index 000000000000..51f414b81702 --- /dev/null +++ b/test/files/neg/t10700-message.check @@ -0,0 +1,4 @@ +usage_2.scala:3: error: test type arguments [Int] do not conform to empty type parameter list + println(scala.testing.Macros.m[Int]) + ^ +1 error diff --git a/test/files/neg/t10700-message/macros_1.scala b/test/files/neg/t10700-message/macros_1.scala new file mode 100644 index 000000000000..72f588732165 --- /dev/null +++ b/test/files/neg/t10700-message/macros_1.scala @@ -0,0 +1,26 @@ + +package scala +package testing + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macros { + def m[A]: String = macro Impls.mImpl[A] +} + +object Impls { + def mImpl[A : c.WeakTypeTag](c: Context): c.Expr[String] = { + import c._ + val g = universe.asInstanceOf[scala.tools.nsc.Global] + import g.typer.infer.InferErrorGen._ + val t = implicitly[c.WeakTypeTag[A]].tpe.asInstanceOf[g.analyzer.global.Type] + val msg = NotWithinBoundsErrorMessage(prefix = "test ", targs = List(t), tparams = Nil, explaintypes = false) + abort(macroApplication.pos, msg) + Expr[String] { + import universe._ + Literal(Constant(msg)) + } + } +} +// was: java.util.NoSuchElementException: head of empty list diff --git a/test/files/neg/t10700-message/usage_2.scala b/test/files/neg/t10700-message/usage_2.scala new file mode 100644 index 000000000000..3892d2a09b68 --- /dev/null +++ b/test/files/neg/t10700-message/usage_2.scala @@ -0,0 +1,4 @@ + +object usage extends App { + println(scala.testing.Macros.m[Int]) +} diff --git a/test/files/neg/t10700.check b/test/files/neg/t10700.check new file mode 100644 index 000000000000..72141085385b --- /dev/null +++ b/test/files/neg/t10700.check @@ -0,0 +1,4 @@ +t10700.scala:3: error: can't existentially abstract over parameterized type _1[_] + def foo(b: Boolean) = if (b) new Foo[List] else new Foo[Option] + ^ +1 error diff --git a/test/files/neg/t10700.scala b/test/files/neg/t10700.scala new file mode 100644 index 000000000000..3fd610a1a6de --- /dev/null +++ b/test/files/neg/t10700.scala @@ -0,0 +1,4 @@ +object Test { + class Foo[F[_]] + def foo(b: Boolean) = if (b) new Foo[List] else new Foo[Option] +} diff --git a/test/files/neg/t10701.check b/test/files/neg/t10701.check index d58fdf52fa81..f869fe0bb54d 100644 --- a/test/files/neg/t10701.check +++ b/test/files/neg/t10701.check @@ -1,4 +1,4 @@ -t10701/Test.java:6: warning: [deprecation] whatever() in Meh has been deprecated +t10701/Test.java:4: warning: [deprecation] whatever() in Meh has been deprecated Meh.whatever(); ^ error: warnings found and -Werror specified diff --git a/test/files/neg/t10701/Meh.scala b/test/files/neg/t10701/Meh.scala index afac4fea5a86..558533613e78 100644 --- a/test/files/neg/t10701/Meh.scala +++ b/test/files/neg/t10701/Meh.scala @@ -1,3 +1,3 @@ object Meh { - @deprecated("","") def whatever {} -} \ No newline at end of file + @deprecated("","") def whatever: Unit = {} +} diff --git a/test/files/neg/t10701/Test.java b/test/files/neg/t10701/Test.java index c55bc52e128b..ffd525b77f1d 100644 --- a/test/files/neg/t10701/Test.java +++ b/test/files/neg/t10701/Test.java @@ -1,8 +1,6 @@ -/* - * javac: -Werror -deprecation - */ +//> using javacOpt -Werror -deprecation public class Test { public static void main(String [] args) { Meh.whatever(); } -} \ No newline at end of file +} diff --git a/test/files/neg/t10729.check b/test/files/neg/t10729.check new file mode 100644 index 000000000000..a4143cb6b0e6 --- /dev/null +++ b/test/files/neg/t10729.check @@ -0,0 +1,20 @@ +AbstractAnnotation.scala:6: error: class AbstractAnnotation is abstract; cannot be instantiated + 1: @AbstractAnnotation + ^ +ArrayAsAnnotation.scala:2: error: class Array does not extend scala.annotation.Annotation + 1: @Array(10) + ^ +SeqAsAnnotation.scala:2: error: trait Seq is abstract; cannot be instantiated + 1: @Seq(10) + ^ +Switch.scala:4: error: class switch does not extend scala.annotation.Annotation + def test(x: Int) = (x: @switch) match { + ^ +Switch.scala:1: warning: imported `switch` is permanently hidden by definition of class switch +import annotation.switch + ^ +TraitAnnotation.scala:6: error: trait TraitAnnotation is abstract; cannot be instantiated + 1: @TraitAnnotation + ^ +1 warning +5 errors diff --git a/test/files/neg/t10729/AbstractAnnotation.scala b/test/files/neg/t10729/AbstractAnnotation.scala new file mode 100644 index 000000000000..203380c55a62 --- /dev/null +++ b/test/files/neg/t10729/AbstractAnnotation.scala @@ -0,0 +1,7 @@ +import scala.annotation.Annotation + +abstract class AbstractAnnotation() extends Annotation {} + +object AbstractAnnotationFail { + 1: @AbstractAnnotation +} \ No newline at end of file diff --git a/test/files/neg/t10729/ArrayAsAnnotation.scala b/test/files/neg/t10729/ArrayAsAnnotation.scala new file mode 100644 index 000000000000..a69282434856 --- /dev/null +++ b/test/files/neg/t10729/ArrayAsAnnotation.scala @@ -0,0 +1,3 @@ +object ArrayAsAnnotation { + 1: @Array(10) +} \ No newline at end of file diff --git a/test/files/neg/t10729/SeqAsAnnotation.scala b/test/files/neg/t10729/SeqAsAnnotation.scala new file mode 100644 index 000000000000..f381317e7b8f --- /dev/null +++ b/test/files/neg/t10729/SeqAsAnnotation.scala @@ -0,0 +1,3 @@ +object SeqAsAnnotation { + 1: @Seq(10) +} \ No newline at end of file diff --git a/test/files/neg/t10729/Switch.scala b/test/files/neg/t10729/Switch.scala new file mode 100644 index 000000000000..cea73df36d4b --- /dev/null +++ b/test/files/neg/t10729/Switch.scala @@ -0,0 +1,7 @@ +import annotation.switch + +class switch { + def test(x: Int) = (x: @switch) match { + case 1 | 2 | 3 => () + } +} \ No newline at end of file diff --git a/test/files/neg/t10729/TraitAnnotation.scala b/test/files/neg/t10729/TraitAnnotation.scala new file mode 100644 index 000000000000..d40517d3036a --- /dev/null +++ b/test/files/neg/t10729/TraitAnnotation.scala @@ -0,0 +1,7 @@ +import scala.annotation.Annotation + +trait TraitAnnotation extends Annotation {} + +object TraitAnnotationFail { + 1: @TraitAnnotation +} \ No newline at end of file diff --git a/test/files/neg/t10731.check b/test/files/neg/t10731.check index d5e345c6f341..d554d493ae38 100644 --- a/test/files/neg/t10731.check +++ b/test/files/neg/t10731.check @@ -1,4 +1,4 @@ t10731.scala:3: error: stable identifier required, but C.this.eq found. val eq.a = 1 ^ -one error found +1 error diff --git a/test/files/neg/t10733.check b/test/files/neg/t10733.check new file mode 100644 index 000000000000..000a8a1021d9 --- /dev/null +++ b/test/files/neg/t10733.check @@ -0,0 +1,13 @@ +t10733.scala:6: error: unbound wildcard type + val a: T[_ <: _] = null + ^ +t10733.scala:7: error: unbound wildcard type + val b: T[_ >: _] = null + ^ +t10733.scala:9: error: unbound wildcard type + def x[Y <: _] = null + ^ +t10733.scala:10: error: unbound wildcard type + def y[X >: _ <: Int] = null + ^ +4 errors diff --git a/test/files/neg/t10733.scala b/test/files/neg/t10733.scala new file mode 100644 index 000000000000..f35a8c3dbb2e --- /dev/null +++ b/test/files/neg/t10733.scala @@ -0,0 +1,16 @@ +//> using options -Ystop-after:parser +trait T[_] +trait U[_, _] + +object Test { + val a: T[_ <: _] = null + val b: T[_ >: _] = null + + def x[Y <: _] = null + def y[X >: _ <: Int] = null + + val ok1 : ((_, Int) => Int) = (_, _) => 1 + val ok2 : (_, Long) = (1L, 1L) + val ok3 : _ => String = (_: Class[_]).toString + +} diff --git a/test/files/neg/t10748.check b/test/files/neg/t10748.check new file mode 100644 index 000000000000..e7f0b383cecc --- /dev/null +++ b/test/files/neg/t10748.check @@ -0,0 +1,10 @@ +t10748.scala:2: error: p is not an enclosing class +class X { private[p] class C(i: Int = 42) ; def c = new C(17) } + ^ +t10748.scala:5: error: p is not an enclosing class + private[p] class D(i: Int = 42) { def f = new Q } + ^ +t10748.scala:5: error: not found: type Q + private[p] class D(i: Int = 42) { def f = new Q } + ^ +3 errors diff --git a/test/files/neg/t10748.scala b/test/files/neg/t10748.scala new file mode 100644 index 000000000000..dcdeeed1f7c3 --- /dev/null +++ b/test/files/neg/t10748.scala @@ -0,0 +1,56 @@ + +class X { private[p] class C(i: Int = 42) ; def c = new C(17) } + +class Y { + private[p] class D(i: Int = 42) { def f = new Q } + class Z { def d = new D() } +} + +/* + * +test/files/pos/t10748.scala:2: error: p is not an enclosing class +class X { private[p] class C(i: Int = 42) ; def c = new C(17) } + ^ +error: java.lang.AssertionError: assertion failed: + C + while compiling: test/files/pos/t10748.scala + during phase: globalPhase=typer, enteringPhase=namer + library version: version 2.13.0-20180714-072842-414f884 + compiler version: version 2.13.0-20180714-072842-414f884 + reconstructed args: -d /tmp + + last tree to typer: TypeTree(class C) + tree position: line 2 of test/files/pos/t10748.scala + tree tpe: X.this.C + symbol: class C in class X + symbol definition: class C extends AnyRef (a ClassSymbol) + symbol package: + symbol owners: class C -> class X + call site: constructor C in class C in package + +== Source file context for tree position == + + 1 + 2 class X { private[p] class C(i: Int = 42) ; def c = new C(17) } + 3 + 4 + at scala.reflect.internal.SymbolTable.throwAssertionError(SymbolTable.scala:162) + at scala.reflect.internal.SymbolTable.assert(SymbolTable.scala:139) + at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1508) + at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1675) + at scala.tools.nsc.typechecker.Namers$Namer$DefaultGetterInCompanion.(Namers.scala:1601) + at scala.tools.nsc.typechecker.Namers$Namer$DefaultGetterNamerSearch$.apply(Namers.scala:1591) + at scala.tools.nsc.typechecker.Namers$Namer.addDefaultGetters(Namers.scala:1483) + at scala.tools.nsc.typechecker.Namers$Namer.methodSig(Namers.scala:1396) + at scala.tools.nsc.typechecker.Namers$Namer.memberSig(Namers.scala:1859) + at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1825) + at scala.tools.nsc.typechecker.Namers$Namer$MonoTypeCompleter.completeImpl(Namers.scala:849) + at scala.tools.nsc.typechecker.Namers$LockingTypeCompleter.complete(Namers.scala:2009) + at scala.tools.nsc.typechecker.Namers$LockingTypeCompleter.complete$(Namers.scala:2007) + at scala.tools.nsc.typechecker.Namers$TypeCompleterBase.complete(Namers.scala:2002) + at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1527) + at scala.reflect.internal.Symbols.argsDependOnPrefix(Symbols.scala:3763) + at scala.reflect.internal.Symbols.argsDependOnPrefix$(Symbols.scala:3751) + at scala.reflect.internal.SymbolTable.argsDependOnPrefix(SymbolTable.scala:17) + at scala.tools.nsc.typechecker.Typers$Typer.typedSelect$1(Typers.scala:5046) + */ diff --git a/test/files/neg/t10748b.check b/test/files/neg/t10748b.check new file mode 100644 index 000000000000..e1534c42293e --- /dev/null +++ b/test/files/neg/t10748b.check @@ -0,0 +1,7 @@ +t10748b.scala:2: error: p is not an enclosing class +private[p] class C(i: Int = 42) { def c = new C() } + ^ +t10748b.scala:4: error: p is not an enclosing class +private[p] class D(i: Int = 42) { def f = new Q } + ^ +2 errors diff --git a/test/files/neg/t10748b.scala b/test/files/neg/t10748b.scala new file mode 100644 index 000000000000..7435c9ea6741 --- /dev/null +++ b/test/files/neg/t10748b.scala @@ -0,0 +1,5 @@ + +private[p] class C(i: Int = 42) { def c = new C() } + +private[p] class D(i: Int = 42) { def f = new Q } +class X { def d = new D() } diff --git a/test/files/neg/t10752.check b/test/files/neg/t10752.check index 3c3b180d3181..76d046bf2408 100644 --- a/test/files/neg/t10752.check +++ b/test/files/neg/t10752.check @@ -1,9 +1,12 @@ +Test_2.scala:2: warning: class DeprecatedClass in package p1 is deprecated +object Test extends p1.DeprecatedClass { + ^ Test_2.scala:3: warning: class DeprecatedClass in package p1 is deprecated def useC = p1.DeprecatedClass.foo ^ Test_2.scala:4: warning: method foo in class DeprecatedMethod is deprecated def useM = p1.DeprecatedMethod.foo ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t10752/Test_2.scala b/test/files/neg/t10752/Test_2.scala index d656eb95ae3f..18b74b3cb815 100644 --- a/test/files/neg/t10752/Test_2.scala +++ b/test/files/neg/t10752/Test_2.scala @@ -1,5 +1,5 @@ -// scalac: -Xfatal-warnings -deprecation -object Test { +//> using options -Xlint:deprecation -Werror +object Test extends p1.DeprecatedClass { def useC = p1.DeprecatedClass.foo def useM = p1.DeprecatedMethod.foo } diff --git a/test/files/neg/t10763.check b/test/files/neg/t10763.check new file mode 100644 index 000000000000..abc14f772004 --- /dev/null +++ b/test/files/neg/t10763.check @@ -0,0 +1,6 @@ +t10763.scala:6: warning: pattern var x in value $anonfun is never used + for (x @ 1 <- List(1.0)) () + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t10763.scala b/test/files/neg/t10763.scala new file mode 100644 index 000000000000..edf22e4e3c9f --- /dev/null +++ b/test/files/neg/t10763.scala @@ -0,0 +1,7 @@ +//> using options -Werror -Wunused + +object Test extends App { + // cf test/files/pos/t10763.scala test/files/run/t11938.scala + // withFilter is exempt from warning but foreach is not. + for (x @ 1 <- List(1.0)) () +} diff --git a/test/files/neg/t10785.check b/test/files/neg/t10785.check new file mode 100644 index 000000000000..a89726eeae3b --- /dev/null +++ b/test/files/neg/t10785.check @@ -0,0 +1,6 @@ +t10785.scala:6: error: type mismatch; + found : String + required: scala.util.Either[?,?] + y <- 1 + x + ^ +1 error diff --git a/test/files/neg/t10785.scala b/test/files/neg/t10785.scala new file mode 100644 index 000000000000..e604c16d06c6 --- /dev/null +++ b/test/files/neg/t10785.scala @@ -0,0 +1,8 @@ +object t10785 { + implicit def safeInt(x: Int): Either[String, Int] = Right(x) + + for { + x <- Left("error") + y <- 1 + x + } yield x +} diff --git a/test/files/neg/t10790.check b/test/files/neg/t10790.check new file mode 100644 index 000000000000..3a3bb22abd43 --- /dev/null +++ b/test/files/neg/t10790.check @@ -0,0 +1,12 @@ +t10790.scala:8: warning: parameter x in method control is never used + def control(x: Int) = answer // warn to verify control + ^ +t10790.scala:10: warning: private class C in class X is never used + private class C // warn + ^ +t10790.scala:13: warning: pattern var y in class X is never used + private val Some(y) = Option(answer) // warn + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t10790.scala b/test/files/neg/t10790.scala new file mode 100644 index 000000000000..8cdfccb61e36 --- /dev/null +++ b/test/files/neg/t10790.scala @@ -0,0 +1,23 @@ +//> using options -Werror -Wunused + +import annotation.unused + +class X { + def f(@unused x: Int) = answer // no warn + + def control(x: Int) = answer // warn to verify control + + private class C // warn + @unused private class D // no warn + + private val Some(y) = Option(answer) // warn + private val Some(z @ _) = Option(answer) // no warn + + @unused("not updated") private var i = answer // no warn + def g = i + + @unused("not read") private var j = answer // no warn + def update() = j = 17 + + def answer: Int = 42 +} diff --git a/test/files/neg/t10803.check b/test/files/neg/t10803.check new file mode 100644 index 000000000000..4cbc9ffe5aa6 --- /dev/null +++ b/test/files/neg/t10803.check @@ -0,0 +1,10 @@ +t10803.scala:4: error: not found: type CocartesianMonoidalCategory + val sum: CocartesianMonoidalCategory[F] { + ^ +t10803.scala:18: error: not found: type CCC + } = new SymmetricDistributiveCategory[Function1] with CCC[Function1] { self => + ^ +t10803.scala:20: error: not found: type CocartesianMonoidalCategory + val sum = new CocartesianMonoidalCategory[F] {} + ^ +3 errors diff --git a/test/files/neg/t10803.scala b/test/files/neg/t10803.scala new file mode 100644 index 000000000000..33c6a209f283 --- /dev/null +++ b/test/files/neg/t10803.scala @@ -0,0 +1,23 @@ +import scala.language.higherKinds + +trait DistributiveCategory[F[_, _]] { self => + val sum: CocartesianMonoidalCategory[F] { + type Unit = self.Zero + } +} + +object DistributiveCategory { + trait SymmetricDistributiveCategory[F[_, _]] + + object instances { + type Void + type Dummy + + implicit val function1: SymmetricDistributiveCategory[Function1] { + type Obj[A] + } = new SymmetricDistributiveCategory[Function1] with CCC[Function1] { self => + type Zero + val sum = new CocartesianMonoidalCategory[F] {} + } + } +} diff --git a/test/files/neg/t10806.check b/test/files/neg/t10806.check new file mode 100644 index 000000000000..bf32017556e2 --- /dev/null +++ b/test/files/neg/t10806.check @@ -0,0 +1,12 @@ +t10806.scala:11: warning: unreachable code + case e: IllegalArgumentException => println(e.getMessage) + ^ +t10806.scala:17: warning: unreachable code + case e: IllegalArgumentException => println(e.getMessage) + ^ +t10806.scala:22: warning: unreachable code + case e: IllegalArgumentException => println(e.getMessage) + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t10806.scala b/test/files/neg/t10806.scala new file mode 100644 index 000000000000..54d4f9d53992 --- /dev/null +++ b/test/files/neg/t10806.scala @@ -0,0 +1,25 @@ +//> using options -Werror + +trait T { + + object Nope extends Throwable + + def f(): Unit = { + // anything but simple type tests forced analysis + try { 1 } catch { + case _: Exception => println("Something went wrong") + case e: IllegalArgumentException => println(e.getMessage) + case Nope => ??? + } + + try { 1 } catch { + case _: Exception => println("Something went wrong") + case e: IllegalArgumentException => println(e.getMessage) + } + + (new IllegalArgumentException()) match { + case _: Exception => println("Something went very wrong") + case e: IllegalArgumentException => println(e.getMessage) + } + } +} diff --git a/test/files/neg/t10820-warn.check b/test/files/neg/t10820-warn.check new file mode 100644 index 000000000000..e3855e984a61 --- /dev/null +++ b/test/files/neg/t10820-warn.check @@ -0,0 +1,6 @@ +t10820-warn.scala:13: warning: return statement uses an exception to pass control to the caller of the enclosing named method result + def result: Option[Int] = Try(0/0).recoverWith { case _ => return Some(-1) }.toOption + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t10820-warn.scala b/test/files/neg/t10820-warn.scala new file mode 100644 index 000000000000..57fc9bc984e1 --- /dev/null +++ b/test/files/neg/t10820-warn.scala @@ -0,0 +1,14 @@ +//> using options -Werror -Wperformance +// + +import util.Try + +trait Test { + + /* + * Inspired by + * https://stackoverflow.com/questions/50297478/why-scala-allows-return-from-recoverwith + * "I actually saw the post Don't Use Return in Scala long ago never paid attention." + */ + def result: Option[Int] = Try(0/0).recoverWith { case _ => return Some(-1) }.toOption +} diff --git a/test/files/neg/t10886.check b/test/files/neg/t10886.check index 824f80b8713d..72a3771b0deb 100644 --- a/test/files/neg/t10886.check +++ b/test/files/neg/t10886.check @@ -12,4 +12,4 @@ t10886.scala:12: error: value -= is not a member of Int Expression does not convert to assignment because receiver is not assignable. !! -= 4 ^ -four errors found +4 errors diff --git a/test/files/neg/t10888.check b/test/files/neg/t10888.check index 371eaa959031..bda8838bf70b 100644 --- a/test/files/neg/t10888.check +++ b/test/files/neg/t10888.check @@ -8,7 +8,7 @@ t10888.scala:5: error: package scala.collection is not a value val x = scala.collection // package scala.collection is not a value ^ t10888.scala:7: error: object App is not a member of package scala -Note: trait App exists, but it has no companion object. +note: trait App exists, but it has no companion object. val z = scala.App // object App is not a member of package scala ^ -four errors found +4 errors diff --git a/test/files/neg/t10888.scala b/test/files/neg/t10888.scala index 742d9b3f1f62..c56043e9e136 100644 --- a/test/files/neg/t10888.scala +++ b/test/files/neg/t10888.scala @@ -6,4 +6,4 @@ object t10888 { val y = scala.collection.`package` val z = scala.App // object App is not a member of package scala -} \ No newline at end of file +} diff --git a/test/files/neg/t10896.check b/test/files/neg/t10896.check new file mode 100644 index 000000000000..6715f7929a74 --- /dev/null +++ b/test/files/neg/t10896.check @@ -0,0 +1,4 @@ +t10896.scala:3: error: malformed type: t10896.type#S + type F[A] = S[S] + ^ +1 error diff --git a/test/files/neg/t10896.scala b/test/files/neg/t10896.scala new file mode 100644 index 000000000000..c29cb92055b3 --- /dev/null +++ b/test/files/neg/t10896.scala @@ -0,0 +1,4 @@ +object t10896 { + type S[A[_]] = A[A[Int]] + type F[A] = S[S] +} diff --git a/test/files/neg/t10935.check b/test/files/neg/t10935.check index 477961ff8056..1d561107a82f 100644 --- a/test/files/neg/t10935.check +++ b/test/files/neg/t10935.check @@ -1,7 +1,8 @@ t10935.scala:4: error: value += is not a member of Int Expression does not convert to assignment because: value lengt is not a member of String + did you mean length? expansion: a.this.size = a.this.size.+(1.+("foo".)) size += 1 + "foo".lengt ^ -one error found +1 error diff --git a/test/files/neg/t10938.check b/test/files/neg/t10938.check new file mode 100644 index 000000000000..91268d06df7d --- /dev/null +++ b/test/files/neg/t10938.check @@ -0,0 +1,4 @@ +t10938.scala:4: error: object xxx is not a member of package scala + def f(): scala.xxx.XXX[_, _] = ??? + ^ +1 error diff --git a/test/files/neg/t10938.scala b/test/files/neg/t10938.scala new file mode 100644 index 000000000000..b880272378bc --- /dev/null +++ b/test/files/neg/t10938.scala @@ -0,0 +1,21 @@ +//> using options -feature + +trait T { + def f(): scala.xxx.XXX[_, _] = ??? +} + +/* +t10938.scala:4: error: object xxx is not a member of package scala + def f(): scala.xxx.XXX[_, _] = ??? + ^ +t10938.scala:4: warning: the existential type forSome { type _$1; type _$2 }, which cannot be expressed by wildcards, should be enabled +by making the implicit value scala.language.existentials visible. +This can be achieved by adding the import clause 'import scala.language.existentials' +or by setting the compiler option -language:existentials. +See the Scaladoc for value scala.language.existentials for a discussion +why the feature should be explicitly enabled. + def f(): scala.xxx.XXX[_, _] = ??? + ^ +one warning found +one error found +*/ diff --git a/test/files/neg/t11012.check b/test/files/neg/t11012.check new file mode 100644 index 000000000000..8e569776d6ae --- /dev/null +++ b/test/files/neg/t11012.check @@ -0,0 +1,12 @@ +t11012.scala:8: warning: class A is deprecated: class! +@A class C + ^ +t11012.scala:10: warning: constructor B in class B is deprecated: constructor! +@B class D // should warn + ^ +t11012.scala:13: warning: constructor B in class B is deprecated: constructor! + def foo = new B + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t11012.scala b/test/files/neg/t11012.scala new file mode 100644 index 000000000000..4f109c126324 --- /dev/null +++ b/test/files/neg/t11012.scala @@ -0,0 +1,14 @@ +//> using options -Xfatal-warnings -deprecation +import scala.annotation.StaticAnnotation + +@deprecated("class!", "") class A extends StaticAnnotation + +class B @deprecated("constructor!", "") extends StaticAnnotation + +@A class C + +@B class D // should warn + +trait T { + def foo = new B +} diff --git a/test/files/neg/t11042.check b/test/files/neg/t11042.check new file mode 100644 index 000000000000..52da8fd35140 --- /dev/null +++ b/test/files/neg/t11042.check @@ -0,0 +1,8 @@ +t11042.scala:9: error: incompatible type in overriding +def get: Int (defined in trait Base) + with override def get: Any (defined in trait SubBase); + found : Any + required: Int +class Hi extends Base[Int] with SubBase + ^ +1 error diff --git a/test/files/neg/t11042.scala b/test/files/neg/t11042.scala new file mode 100644 index 000000000000..d92e28f585f1 --- /dev/null +++ b/test/files/neg/t11042.scala @@ -0,0 +1,18 @@ +trait Base[+A] { + def get: A = null.asInstanceOf[A] +} + +trait SubBase extends Base[Any] { + override def get: Any = "" +} + +class Hi extends Base[Int] with SubBase + +object Test { + def main(args: Array[String]): Unit = { + val hi = new Hi + val base: Base[Int] = hi + val y: Int = base.get // ClassCastException: java.lang.String cannot be cast to java.lang.Integer + println(y) + } +} diff --git a/test/files/neg/t1106.check b/test/files/neg/t1106.check index f81d0c60258b..51d0c9ce8b4c 100644 --- a/test/files/neg/t1106.check +++ b/test/files/neg/t1106.check @@ -4,4 +4,4 @@ val p = new Par[String] t1106.scala:5: error: expected class or object definition new Foo[p.type](p) // crashes compiler ^ -two errors found +2 errors diff --git a/test/files/neg/t11102.check b/test/files/neg/t11102.check new file mode 100644 index 000000000000..b23c79e154ea --- /dev/null +++ b/test/files/neg/t11102.check @@ -0,0 +1,13 @@ +t11102.scala:5: error: error during expansion of this match (this is a scalac bug). +The underlying error was: type mismatch; + found : Seq[MutableCons] (in scala.collection.mutable) + required: Seq[MutableCons] (in scala.collection.immutable) + def f(x: MutableSeq) = x match { + ^ +t11102.scala:8: error: error during expansion of this match (this is a scalac bug). +The underlying error was: type mismatch; + found : Seq[CollectionCons] (in scala.collection) + required: Seq[CollectionCons] (in scala.collection.immutable) + def f(x: CollectionSeq) = x match { + ^ +2 errors diff --git a/test/files/neg/t11102.scala b/test/files/neg/t11102.scala new file mode 100644 index 000000000000..1615073093b0 --- /dev/null +++ b/test/files/neg/t11102.scala @@ -0,0 +1,117 @@ +object Test { + def f(x: ImmutableSeq) = x match { + case ImmutableCons(x, xs @ _*) => xs + } + def f(x: MutableSeq) = x match { + case MutableCons(x, xs @ _*) => xs + } + def f(x: CollectionSeq) = x match { + case CollectionCons(x, xs @ _*) => xs + } + def f(x: ScalaSeq) = x match { + case ScalaCons(x, xs @ _*) => xs + } + def f(x: DefaultSeq) = x match { + case DefaultCons(x, xs @ _*) => xs + } +} + +/** + * collection.immutable.Seq + */ +abstract class ImmutableSeq +extends collection.immutable.Seq[Int] + with UnimplementedSeq + +object ImmutableCons { + def unapplySeq(x: ImmutableCons) = + Some((x.first, x.more)) +} + +abstract class ImmutableCons +extends ImmutableSeq { + def first: Int + def more: collection.immutable.Seq[ImmutableCons] +} + +/** + * collection.mutable.Seq + */ +abstract class MutableSeq +extends collection.mutable.Seq[Int] +with UnimplementedSeq + +object MutableCons { + def unapplySeq(x: MutableCons) = + Some((x.first, x.more)) // ! +} + +abstract class MutableCons +extends MutableSeq { + def first: Int + def more: collection.mutable.Seq[MutableCons] +} + +/** + * collection.Seq + */ +abstract class CollectionSeq +extends collection.Seq[Int] +with UnimplementedSeq + +object CollectionCons { + def unapplySeq(x: CollectionCons) = + Some((x.first, x.more)) // ! +} + +abstract class CollectionCons +extends CollectionSeq { + def first: Int + def more: collection.Seq[CollectionCons] +} + +/** + * scala.Seq + */ +abstract class ScalaSeq +extends collection.Seq[Int] +with UnimplementedSeq + +object ScalaCons { + def unapplySeq(x: ScalaCons) = + Some((x.first, x.more)) +} + +abstract class ScalaCons +extends ScalaSeq { + def first: Int + def more: scala.Seq[ScalaCons] +} + +/** + * Seq + */ +abstract class DefaultSeq +extends Seq[Int] +with UnimplementedSeq + +object DefaultCons { + def unapplySeq(x: DefaultCons) = + Some((x.first, x.more)) +} + +abstract class DefaultCons +extends DefaultSeq { + def first: Int + def more: Seq[DefaultCons] +} + +/** + * Unimplemented sequence. + */ +trait UnimplementedSeq { + def iterator: Iterator[Int] = ??? + def apply(i: Int): Int = ??? + def length: Int = ??? + def update(idx: Int, elem: Int): Unit = ??? +} diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check index e6058bf176f8..61b4c5972836 100644 --- a/test/files/neg/t1112.check +++ b/test/files/neg/t1112.check @@ -1,4 +1,4 @@ -t1112.scala:12: error: too many arguments (2) for method call: (p: Int)(f: => Test.this.Type1)Unit +t1112.scala:12: error: too many arguments (found 2, expected 1) for method call: (p: Int)(f: => Test.this.Type1): Unit call(0,() => System.out.println("here we are")) ^ -one error found +1 error diff --git a/test/files/neg/t11136.check b/test/files/neg/t11136.check new file mode 100644 index 000000000000..1527c553e23a --- /dev/null +++ b/test/files/neg/t11136.check @@ -0,0 +1,6 @@ +t11136.scala:12: error: cannot override final member: +final override def c(x: Int): Int (defined in trait SO) + with override def c(x: Int): Int (defined in trait SOIO) +class L extends AS with SOSO // error expected: c definined in SOIO overrides final method c in SO + ^ +1 error diff --git a/test/files/neg/t11136.scala b/test/files/neg/t11136.scala new file mode 100644 index 000000000000..4ed596a9d53a --- /dev/null +++ b/test/files/neg/t11136.scala @@ -0,0 +1,12 @@ +trait IO { + def c(x: Int): Int = ??? +} +trait SO extends IO { + override final def c(x: Int): Int = ??? +} +trait SOIO extends IO { + override def c(x: Int): Int = ??? +} +trait SOSO extends SOIO with SO +abstract class AS extends SO +class L extends AS with SOSO // error expected: c definined in SOIO overrides final method c in SO \ No newline at end of file diff --git a/test/files/neg/t11136_override_conflict.check b/test/files/neg/t11136_override_conflict.check new file mode 100644 index 000000000000..bf9aa184869b --- /dev/null +++ b/test/files/neg/t11136_override_conflict.check @@ -0,0 +1,29 @@ +t11136_override_conflict.scala:99: error: incompatible type in overriding +def empty: Qu[A] (defined in trait ArDqOps) + with def empty: ArDq[A] (defined in class ArDq); + found : ArDq[A] + required: Qu[A] +class Qu[A] extends ArDq[A] + ^ +t11136_override_conflict.scala:110: error: incompatible type in overriding +def empty: Qu[A] (defined in trait ArDqOps) + with def empty: ArDq[A] (defined in class ArDq); + found : ArDq[A] + required: Qu[A] + override def from[A](source: It[A]): Qu[A] = new Qu[A]{} + ^ +t11136_override_conflict.scala:111: error: incompatible type in overriding +def empty: Qu[A] (defined in trait ArDqOps) + with def empty: ArDq[A] (defined in class ArDq); + found : ArDq[A] + required: Qu[A] + override def newBuilder[A]: Bldr[A, Qu[A]] = new Bldr[A, Qu[A]] { def result(): Qu[A] = new Qu[A]{} } + ^ +t11136_override_conflict.scala:120: error: incompatible type in overriding +def empty: Qu[String] (defined in trait ArDqOps) + with def empty: ArDq[String] (defined in class ArDq); + found : ArDq[String] + required: Qu[String] + (new Qu[String]{}: ArDqOps[String, Qu, Qu[String]]).empty.bazza + ^ +4 errors diff --git a/test/files/neg/t11136_override_conflict.scala b/test/files/neg/t11136_override_conflict.scala new file mode 100644 index 000000000000..aa5d61f4b952 --- /dev/null +++ b/test/files/neg/t11136_override_conflict.scala @@ -0,0 +1,122 @@ + +import annotation.unchecked.uncheckedVariance + +trait Bldr[-A, +To] { self => + def result(): To + def mapResult[NTo](f: To => NTo): Bldr[A, NTo] = new Bldr[A, NTo] { + def result(): NTo = f(self.result()) + } +} +trait ItFact[+CC[_]] { + def from[A](source: It[A]): CC[A] + def newBuilder[A]: Bldr[A, CC[A]] +} + +trait DefaultFromSpecific[+A, +CC[_], +C] { + protected def fromSpecific(coll: It[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def newSpecificBuilder: Bldr[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + def iterableFactory: ItFact[CC] +} + +trait ItOps[+A, +CC[_], +C] { + def it: It[A] + + protected def newSpecificBuilder: Bldr[A @uncheckedVariance, C] + protected def fromSpecific(coll: It[A @uncheckedVariance]): C + def iterableFactory: ItFact[CC] + + def filter: C = fromSpecific(it) + def strictFilter: C = newSpecificBuilder.result() + def map[B](f: A => B): CC[B] = iterableFactory.newBuilder.result() +} + +trait It[+A] extends ItOps[A, It, It[A]] with DefaultFromSpecific[A, It, It[A]] { + def it: It[A] = this + def iterableFactory: ItFact[It] = It +} + +object It extends ItFact[It] { + def from[A](source: It[A]): It[A] = new It[A]{} + def newBuilder[A]: Bldr[A,It[A]] = new Bldr[A, It[A]] { def result(): It[A] = new It[A]{} } +} + +trait SqOps[A, +CC[_], +C] extends ItOps[A, CC, C] + +trait Sq[A] extends It[A] with SqOps[A, Sq, Sq[A]] with DefaultFromSpecific[A, Sq, Sq[A]] { + override def iterableFactory: ItFact[Sq] = Sq + + def flup = 0 +} + +object Sq extends ItFact[Sq] { + def from[A](source: It[A]): Sq[A] = new Sq[A]{} + def newBuilder[A]: Bldr[A, Sq[A]] = new Bldr[A, Sq[A]] { def result(): Sq[A] = new Sq[A]{} } +} + +trait Acc[A, +CC[X] <: Sq[X], +C <: Sq[A]] extends Sq[A] with SqOps[A, CC, C] { + protected def fromSpecificImpl(coll: It[A]): C + protected def newSpecificBuilderImpl: Bldr[A, C] + protected def iterableFactoryImpl: ItFact[CC] + + protected override def fromSpecific(coll: It[A]): C = fromSpecificImpl(coll) + protected override def newSpecificBuilder: Bldr[A, C] = newSpecificBuilderImpl + override def iterableFactory: ItFact[CC] = iterableFactoryImpl +} +trait AnyAcc[A] extends Acc[A, AnyAcc, AnyAcc[A]] { + protected override def fromSpecificImpl(coll: It[A]): AnyAcc[A] = iterableFactory.from(coll) + protected override def newSpecificBuilderImpl: Bldr[A, AnyAcc[A]] = iterableFactory.newBuilder + override def iterableFactoryImpl: ItFact[AnyAcc] = AnyAcc + + def flap = 1 +} +object AnyAcc extends ItFact[AnyAcc] { + def from[A](source: It[A]): AnyAcc[A] = new AnyAcc[A] {} + def newBuilder[A]: Bldr[A, AnyAcc[A]] = new Bldr[A, AnyAcc[A]] { def result(): AnyAcc[A] = new AnyAcc[A]{} } +} +trait IntAcc extends Acc[Int, AnyAcc, IntAcc] { + protected override def fromSpecificImpl(coll: It[Int]): IntAcc = new IntAcc{} + protected override def newSpecificBuilderImpl: Bldr[Int, IntAcc] = new Bldr[Int, IntAcc] { def result(): IntAcc = new IntAcc{} } + override def iterableFactoryImpl: ItFact[AnyAcc] = AnyAcc +} + +class ArDq[A] extends Sq[A] + with SqOps[A, ArDq, ArDq[A]] + with ArDqOps[A, ArDq, ArDq[A]] + with DefaultFromSpecific[A, ArDq, ArDq[A]] { + override def iterableFactory: ItFact[ArDq] = ArDq + def empty: ArDq[A] = new ArDq[A]{} +} + +object ArDq extends ItFact[ArDq] { + override def from[A](source: It[A]): ArDq[A] = new ArDq[A]{} + override def newBuilder[A]: Bldr[A, ArDq[A]] = new Bldr[A, ArDq[A]] { def result(): ArDq[A] = new ArDq[A]{} } +} + +trait ArDqOps[A, +CC[_], +C <: AnyRef] extends SqOps[A, CC, C] { + def empty: C +} + +class Qu[A] extends ArDq[A] + with SqOps[A, Qu, Qu[A]] + with ArDqOps[A, Qu, Qu[A]] + with DefaultFromSpecific[A, Qu, Qu[A]] { + override def iterableFactory: ItFact[Qu] = Qu + // should get a missing override error, but don't. if `ArDq` and `Qu` are both traits, the error shows. + // override def empty: Qu[A] = new Qu[A]{} + def bazza = 2 +} + +object Qu extends ItFact[Qu] { + override def from[A](source: It[A]): Qu[A] = new Qu[A]{} + override def newBuilder[A]: Bldr[A, Qu[A]] = new Bldr[A, Qu[A]] { def result(): Qu[A] = new Qu[A]{} } +} + +object Test { + def main(args: Array[String]): Unit = { + val sq = new Sq[String] { } + println(sq.filter.flup) + val ia = new IntAcc{} + println(((ia.filter: IntAcc).map(_ => ""): AnyAcc[String]).flap) + (new Qu[String]{}: ArDqOps[String, Qu, Qu[String]]).empty.bazza + } +} diff --git a/test/files/neg/t11136_was_t4731.check b/test/files/neg/t11136_was_t4731.check new file mode 100644 index 000000000000..fba38ea2591f --- /dev/null +++ b/test/files/neg/t11136_was_t4731.check @@ -0,0 +1,8 @@ +t11136_was_t4731.scala:11: error: incompatible type in overriding +def foo(arg: java.util.Comparator[String]): Unit (defined in trait Trait1) + with def foo(arg: java.util.Comparator[String]): Int (defined in trait Trait2); + found : (arg: java.util.Comparator[String]): Int + required: (arg: java.util.Comparator[String]): Unit +class Class1 extends Trait2[String] { } + ^ +1 error diff --git a/test/files/neg/t11136_was_t4731.scala b/test/files/neg/t11136_was_t4731.scala new file mode 100644 index 000000000000..78518f7c8d21 --- /dev/null +++ b/test/files/neg/t11136_was_t4731.scala @@ -0,0 +1,18 @@ +import java.util.Comparator + +/* This accidentally started as a pos/ test, with the initial fix addressing a crasher in the backend. +The real problem is that `foo`'s return type is not refined covariantly, so the override should be ruled out. +Refchecks was too eager in pruning the paits it considers, so this was never detected. +*/ +trait Trait1[T] { def foo(arg: Comparator[T]): Unit } + +trait Trait2[T] extends Trait1[T] { def foo(arg: Comparator[String]): Int = 0 } + +class Class1 extends Trait2[String] { } + +object Test { + def main(args: Array[String]): Unit = { + val c = new Class1 + c.foo(Ordering[String]) + } +} diff --git a/test/files/neg/t112706A.check b/test/files/neg/t112706A.check index ad403ab13452..73ee944146df 100644 --- a/test/files/neg/t112706A.check +++ b/test/files/neg/t112706A.check @@ -3,4 +3,4 @@ t112706A.scala:5: error: constructor cannot be instantiated to expected type; required: String case Tuple2(node,_) => ^ -one error found +1 error diff --git a/test/files/neg/t11282.check b/test/files/neg/t11282.check index a7b79c285598..b0436547f77d 100644 --- a/test/files/neg/t11282.check +++ b/test/files/neg/t11282.check @@ -4,7 +4,7 @@ t11282.scala:4: error: Unit does not take parameters t11282.scala:6: error: Unit does not take parameters b().fail[scala.Int] // error ^ -t11282.scala:9: error: wrong number of type parameters for method asInstanceOf of type [T0]=> T0 +t11282.scala:9: error: wrong number of type parameters for method asInstanceOf of type [T0]T0 Map().empty.asInstanceOf[String, (String, String)] // error ^ -three errors found +3 errors diff --git a/test/files/neg/t11337.check b/test/files/neg/t11337.check new file mode 100644 index 000000000000..32bccbdb4ebf --- /dev/null +++ b/test/files/neg/t11337.check @@ -0,0 +1,4 @@ +t11337.scala:3: error: trait Foo takes type parameters + val foo: Foo[Any] { type Bar = Any } = new Foo { def baz(): Any = () } + ^ +1 error diff --git a/test/files/neg/t11337.scala b/test/files/neg/t11337.scala new file mode 100644 index 000000000000..e5c8341b0442 --- /dev/null +++ b/test/files/neg/t11337.scala @@ -0,0 +1,4 @@ +trait Foo[T] +object Foo { + val foo: Foo[Any] { type Bar = Any } = new Foo { def baz(): Any = () } +} diff --git a/test/files/neg/t11351.check b/test/files/neg/t11351.check new file mode 100644 index 000000000000..8c31542e8e7d --- /dev/null +++ b/test/files/neg/t11351.check @@ -0,0 +1,6 @@ +t11351.scala:17: error: illegal trait super target found for method foo required by trait C; + found : override def foo: X in trait B; + expected: def foo: Y in trait A +class Fail extends B with C + ^ +1 error diff --git a/test/files/neg/t11351.scala b/test/files/neg/t11351.scala new file mode 100644 index 000000000000..8d46da3b6b5b --- /dev/null +++ b/test/files/neg/t11351.scala @@ -0,0 +1,25 @@ +class X +class Y extends X + +trait A[+T] { + def foo: T = null.asInstanceOf[T] +} + +trait B extends A[X] { + override def foo: X = new X +} + +trait C extends A[Y] { + override def foo: Y = new Y + def superFoo: Y = super.foo // C will have an abstract `def C$$super$foo: Y` because of this call +} + +class Fail extends B with C +// Should generate `def C$$super$foo: Y = super[A].foo` and not `= super[B].foo` + +object Test { + def main(args: Array[String]): Unit = { + val y: Y = (new Fail).superFoo // Used to fail with a ClassCastException because of `Fail#C$$super$foo` being incorrect above + assert(y == null) + } +} diff --git a/test/files/neg/t11374.check b/test/files/neg/t11374.check new file mode 100644 index 000000000000..070198272152 --- /dev/null +++ b/test/files/neg/t11374.check @@ -0,0 +1,4 @@ +t11374.scala:8: error: _ is already defined as value _ + val `_` = 17 // not ok + ^ +1 error diff --git a/test/files/neg/t11374.scala b/test/files/neg/t11374.scala new file mode 100644 index 000000000000..f89143d4f27d --- /dev/null +++ b/test/files/neg/t11374.scala @@ -0,0 +1,9 @@ + +class C { + val `_` = 42 // ok +} + +class D { + val `_` = 42 + val `_` = 17 // not ok +} diff --git a/test/files/neg/t11374b.check b/test/files/neg/t11374b.check new file mode 100644 index 000000000000..aca074541fe0 --- /dev/null +++ b/test/files/neg/t11374b.check @@ -0,0 +1,13 @@ +t11374b.scala:3: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. + val Some(`_`) = Option(42) // was crashola + ^ +t11374b.scala:6: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. + val Some(`_`) = Option(42) // was crashola + ^ +t11374b.scala:3: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + val Some(`_`) = Option(42) // was crashola + ^ +1 warning +2 errors diff --git a/test/files/neg/t11374b.scala b/test/files/neg/t11374b.scala new file mode 100644 index 000000000000..e3c6721547a6 --- /dev/null +++ b/test/files/neg/t11374b.scala @@ -0,0 +1,8 @@ + +class C { + val Some(`_`) = Option(42) // was crashola + + def f(): Unit = { + val Some(`_`) = Option(42) // was crashola + } +} diff --git a/test/files/neg/t11379a.check b/test/files/neg/t11379a.check new file mode 100644 index 000000000000..e02fdb938bfd --- /dev/null +++ b/test/files/neg/t11379a.check @@ -0,0 +1,6 @@ +t11379a.scala:17: error: type mismatch; + found : scala.util.Right[Nothing,Unit] + required: Unit => Unit + def test4: Either[Int, Unit] = Right(()).map(Right(())) + ^ +1 error diff --git a/test/files/neg/t11379a.scala b/test/files/neg/t11379a.scala new file mode 100644 index 000000000000..5ed2de4b315c --- /dev/null +++ b/test/files/neg/t11379a.scala @@ -0,0 +1,23 @@ +//> using options -Werror -Wvalue-discard +object UnitOfTrust { + import scala.util._ + + private def unitRight[A]: Either[A, Unit] = Right(()) + + // fails with: + // discarded non-Unit value + def test1: Either[Int, Unit] = Right(Right(())) + def test2: Either[Int, Unit] = Right(()).map(_ => unitRight[Int]) + def test3: Either[Int, Unit] = Right(()).map { case _ => unitRight[Int] } + + // fails with: + // error: type mismatch; + // found : scala.util.Right[Nothing,Unit] + // required: Unit => Unit + def test4: Either[Int, Unit] = Right(()).map(Right(())) + + // was: compiles just fine + def test5: Either[Int, Unit] = Right(()).map { case _ => unitRight } + def test6: Either[Int, Unit] = Right(()).map { _ => unitRight } + def test7: Either[Int, Unit] = Right(()).map(_ => unitRight) +} diff --git a/test/files/neg/t11379a2.check b/test/files/neg/t11379a2.check new file mode 100644 index 000000000000..06f7e46546c0 --- /dev/null +++ b/test/files/neg/t11379a2.check @@ -0,0 +1,21 @@ +t11379a2.scala:9: warning: discarded non-Unit value of type scala.util.Right[Nothing,Unit] + def test1: Either[Int, Unit] = Right(Right(())) + ^ +t11379a2.scala:10: warning: discarded non-Unit value of type scala.util.Either[Int,Unit] + def test2: Either[Int, Unit] = Right(()).map(_ => unitRight[Int]) + ^ +t11379a2.scala:11: warning: discarded non-Unit value of type scala.util.Either[Int,Unit] + def test3: Either[Int, Unit] = Right(()).map { case _ => unitRight[Int] } + ^ +t11379a2.scala:20: warning: discarded non-Unit value of type scala.util.Either[Nothing,Unit] + def test5: Either[Int, Unit] = Right(()).map { case _ => unitRight } + ^ +t11379a2.scala:21: warning: discarded non-Unit value of type scala.util.Either[Nothing,Unit] + def test6: Either[Int, Unit] = Right(()).map { _ => unitRight } + ^ +t11379a2.scala:22: warning: discarded non-Unit value of type scala.util.Either[Nothing,Unit] + def test7: Either[Int, Unit] = Right(()).map(_ => unitRight) + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t11379a2.scala b/test/files/neg/t11379a2.scala new file mode 100644 index 000000000000..9dad59d6cab1 --- /dev/null +++ b/test/files/neg/t11379a2.scala @@ -0,0 +1,23 @@ +//> using options -Werror -Wvalue-discard +object UnitOfTrust { + import scala.util._ + + private def unitRight[A]: Either[A, Unit] = Right(()) + + // fails with: + // discarded non-Unit value + def test1: Either[Int, Unit] = Right(Right(())) + def test2: Either[Int, Unit] = Right(()).map(_ => unitRight[Int]) + def test3: Either[Int, Unit] = Right(()).map { case _ => unitRight[Int] } + + // fails with: + // error: type mismatch; + // found : scala.util.Right[Nothing,Unit] + // required: Unit => Unit + //def test4: Either[Int, Unit] = Right(()).map(Right(())) + + // was: compiles just fine + def test5: Either[Int, Unit] = Right(()).map { case _ => unitRight } + def test6: Either[Int, Unit] = Right(()).map { _ => unitRight } + def test7: Either[Int, Unit] = Right(()).map(_ => unitRight) +} diff --git a/test/files/neg/t11379b.check b/test/files/neg/t11379b.check new file mode 100644 index 000000000000..10b12f7179b0 --- /dev/null +++ b/test/files/neg/t11379b.check @@ -0,0 +1,9 @@ +t11379b.scala:5: warning: discarded non-Unit value of type OneTypeParam[Unit] + def checkCompiler: OneTypeParam[Unit] = unitValue.map(_ => unitValue) + ^ +t11379b.scala:11: warning: discarded non-Unit value of type TwoTypeParam[Nothing,Unit] + def checkCompiler: TwoTypeParam[String, Unit] = unitValue.map(_ => unitValue) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t11379b.scala b/test/files/neg/t11379b.scala new file mode 100644 index 000000000000..0e00bcf3271b --- /dev/null +++ b/test/files/neg/t11379b.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Wvalue-discard +class OneTypeParam[B](value: B) { + def map[B1](fn: B => B1): OneTypeParam[B1] = new OneTypeParam(fn(value)) + def unitValue: OneTypeParam[Unit] = new OneTypeParam(()) + def checkCompiler: OneTypeParam[Unit] = unitValue.map(_ => unitValue) +} + +class TwoTypeParam[A, B](value: B) { + def map[B1](fn: B => B1): TwoTypeParam[A, B1] = new TwoTypeParam(fn(value)) + def unitValue[C]: TwoTypeParam[C, Unit] = new TwoTypeParam(()) + def checkCompiler: TwoTypeParam[String, Unit] = unitValue.map(_ => unitValue) +} diff --git a/test/files/neg/t11379c.check b/test/files/neg/t11379c.check new file mode 100644 index 000000000000..2ee886c2b83f --- /dev/null +++ b/test/files/neg/t11379c.check @@ -0,0 +1,15 @@ +t11379c.scala:8: warning: discarded non-Unit value of type OneTypeParam[Unit] + def checkCompilerUnTyped: OneTypeParam[Unit] = unitValue.map(_ => unitValue) + ^ +t11379c.scala:9: warning: discarded non-Unit value of type OneTypeParam[Unit] + def checkCompilerTypedInner: OneTypeParam[Unit] = unitValue.map(_ => typedValue) + ^ +t11379c.scala:10: warning: discarded non-Unit value of type OneTypeParam[Unit] + def checkCompilerTypedOuter: OneTypeParam[Unit] = typedValue.map(_ => unitValue) + ^ +t11379c.scala:11: warning: discarded non-Unit value of type OneTypeParam[Unit] + def checkCompilerTypedBoth: OneTypeParam[Unit] = typedValue.map(_ => typedValue) + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t11379c.scala b/test/files/neg/t11379c.scala new file mode 100644 index 000000000000..c1f21547d43d --- /dev/null +++ b/test/files/neg/t11379c.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Wvalue-discard +class OneTypeParam[B](value: B) { + def map[B1](fn: B => B1): OneTypeParam[B1] = new OneTypeParam(fn(value)) + + def unitValue: OneTypeParam[Unit] = new OneTypeParam(()) + def typedValue[C]: OneTypeParam[Unit] = new OneTypeParam(()) + + def checkCompilerUnTyped: OneTypeParam[Unit] = unitValue.map(_ => unitValue) + def checkCompilerTypedInner: OneTypeParam[Unit] = unitValue.map(_ => typedValue) + def checkCompilerTypedOuter: OneTypeParam[Unit] = typedValue.map(_ => unitValue) + def checkCompilerTypedBoth: OneTypeParam[Unit] = typedValue.map(_ => typedValue) +} diff --git a/test/files/neg/t11446.check b/test/files/neg/t11446.check new file mode 100644 index 000000000000..f9da7aa670fc --- /dev/null +++ b/test/files/neg/t11446.check @@ -0,0 +1,29 @@ +t11446.scala:29: error: object A is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply(s: String, strict: Boolean): Option[Int] exists in object A, but it cannot be used as an extractor as it has more than one (non-implicit) parameter + "a" match { case A(i) => i } + ^ +t11446.scala:30: error: object B is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply(s: String)(u: Int): Option[Int] exists in object B, but it cannot be used as an extractor due to its second non-implicit parameter list + "b" match { case B(i) => i } + ^ +t11446.scala:31: error: object C is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply(s: String, i: Int): Option[Int] exists in object C, but it cannot be used as an extractor as it has more than one (non-implicit) parameter + "c" match { case C(i) => i } + ^ +t11446.scala:32: error: object D is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply(): Option[Int] exists in object D, but it cannot be used as an extractor: an unapply method must accept a single argument + "d" match { case D(i) => i } + ^ +t11446.scala:33: error: object E is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply: Option[Int] exists in object E, but it cannot be used as an extractor: an unapply method must accept a single argument + "e" match { case E(i) => i } + ^ +t11446.scala:34: error: object F is not a case class, nor does it have a valid unapply/unapplySeq member +Note: val unapply: Option[Int] exists in object F, but it cannot be used as an extractor: an unapply method must accept a single argument + "f" match { case F(i) => i } + ^ +t11446.scala:35: error: object F is not a case class, nor does it have a valid unapply/unapplySeq member +Note: val unapply: Option[Int] exists in object F, but it cannot be used as an extractor: an unapply method must accept a single argument + "g" match { case F(i) => i } + ^ +7 errors diff --git a/test/files/neg/t11446.scala b/test/files/neg/t11446.scala new file mode 100644 index 000000000000..4015348b1d3d --- /dev/null +++ b/test/files/neg/t11446.scala @@ -0,0 +1,36 @@ +package t11446 + +object A { + def unapply(s: String, strict: Boolean = false) = + if (s == "") None else Some(s.length) +} +object B { + def unapply(s: String)(u: Int) = + if (s == "") None else Some(u) +} +object C { + def unapply(s: String, i: Int) = + if (s == "") None else Some(i) +} +object D { + def unapply(): Option[Int] = Some(1) +} +object E { + def unapply: Option[Int] = Some(1) +} +object F { + val unapply: Option[Int] = Some(1) +} +object G { + def unapply(va: String*): Option[String] = va.headOption +} + +object Test { + "a" match { case A(i) => i } + "b" match { case B(i) => i } + "c" match { case C(i) => i } + "d" match { case D(i) => i } + "e" match { case E(i) => i } + "f" match { case F(i) => i } + "g" match { case F(i) => i } +} \ No newline at end of file diff --git a/test/files/neg/t11517.check b/test/files/neg/t11517.check new file mode 100644 index 000000000000..d5c41efeb8b5 --- /dev/null +++ b/test/files/neg/t11517.check @@ -0,0 +1,57 @@ +t11517.scala:6: error: missing parameter type for expanded function (() => x$1.$plus(1)) + X.f(_ + 1) + ^ +t11517.scala:6: error: overloaded method f with alternatives: + (s: String)String + (i: Int)Int + does not match arguments (? => ?) + X.f(_ + 1) + ^ +t11517.scala:17: error: missing parameter type for expanded function (() => x$2.$plus(1)) + def `quite overloaded`(s: MyString): Int = s.indexOf(_ + 1) + ^ +t11517.scala:17: error: overloaded method indexOf with alternatives: + (s: String,begin: Int,end: Int)Int + (s: String,begin: Int)Int + (s: String)Int + (c: Int,begin: Int,end: Int)Int + (c: Int,begin: Int)Int + (c: Int)Int + does not match arguments (? => ?) with expected result type Int + def `quite overloaded`(s: MyString): Int = s.indexOf(_ + 1) + ^ +t11517.scala:19: error: overloaded method f with alternatives: + (s: String)String + (i: Int)Int + cannot be applied to (String, i: Int) + X.f("hello, world", i = 42) + ^ +t11517.scala:20: error: overloaded method f with alternatives: + (s: String)String + (i: Int)Int + cannot be applied to (String, Int => Int) + X.f("hello, world", (i: Int) => i) + ^ +t11517.scala:21: error: missing parameter type + X.f((i: Int, _) => i + 1) + ^ +t11517.scala:21: error: overloaded method f with alternatives: + (s: String)String + (i: Int)Int + does not match arguments ((Int, ?) => ?) + X.f((i: Int, _) => i + 1) + ^ +t11517.scala:22: error: missing parameter type + X.g(i => i) + ^ +t11517.scala:22: error: type mismatch; + found : ? => ? + required: String + X.g(i => i) + ^ +t11517.scala:23: error: type mismatch; + found : Int => Int + required: String + X.g((i: Int) => i) + ^ +11 errors diff --git a/test/files/neg/t11517.scala b/test/files/neg/t11517.scala new file mode 100644 index 000000000000..c0429c86b88c --- /dev/null +++ b/test/files/neg/t11517.scala @@ -0,0 +1,32 @@ + +object X { def f(i: Int): Int = i; def f(s: String): String = s; def g(s: String) = s } + +// improve error message +object Test extends App { + X.f(_ + 1) + + // "abc".indexOf(_ + 1) unstable API across JDK versions + trait MyString { + def indexOf(c: Int): Int + def indexOf(c: Int, begin: Int): Int + def indexOf(c: Int, begin: Int, end: Int): Int + def indexOf(s: String): Int + def indexOf(s: String, begin: Int): Int + def indexOf(s: String, begin: Int, end: Int): Int + } + def `quite overloaded`(s: MyString): Int = s.indexOf(_ + 1) + + X.f("hello, world", i = 42) + X.f("hello, world", (i: Int) => i) + X.f((i: Int, _) => i + 1) + X.g(i => i) + X.g((i: Int) => i) +} +/* +t11517.scala:6: error: missing parameter type for expanded function (() => x$1.$plus(1)) + X.f(_ + 1) + ^ +t11517.scala:8: error: missing parameter type for expanded function (() => x$2.$plus(1)) + "abc".indexOf(_ + 1) + ^ +*/ diff --git a/test/files/neg/t11575b.check b/test/files/neg/t11575b.check new file mode 100644 index 000000000000..1f55871659e4 --- /dev/null +++ b/test/files/neg/t11575b.check @@ -0,0 +1,7 @@ +LocalImpl.scala:2: error: incompatible type in overriding +private[package ] var counts: Array[Int] (defined in class Cover); + found : String + required: Array[Int] + override var counts: String = "" + ^ +1 error diff --git a/test/files/neg/t11575b/Base.java b/test/files/neg/t11575b/Base.java new file mode 100644 index 000000000000..e32464388e21 --- /dev/null +++ b/test/files/neg/t11575b/Base.java @@ -0,0 +1,3 @@ +public class Base { + long[] counts; +} diff --git a/test/files/neg/t11575b/Cover.java b/test/files/neg/t11575b/Cover.java new file mode 100644 index 000000000000..52c0c49620d2 --- /dev/null +++ b/test/files/neg/t11575b/Cover.java @@ -0,0 +1,3 @@ +public class Cover extends Base { + int[] counts; +} diff --git a/test/files/neg/t11575b/LocalImpl.scala b/test/files/neg/t11575b/LocalImpl.scala new file mode 100644 index 000000000000..26a4cb2a31f9 --- /dev/null +++ b/test/files/neg/t11575b/LocalImpl.scala @@ -0,0 +1,3 @@ +class LocalImpl extends Cover { + override var counts: String = "" +} diff --git a/test/files/neg/t11591.check b/test/files/neg/t11591.check new file mode 100644 index 000000000000..4d110a4c3ab3 --- /dev/null +++ b/test/files/neg/t11591.check @@ -0,0 +1,4 @@ +t11591.scala:8: error: could not find implicit value for parameter e: Test.A + implicitly[A] + ^ +1 error diff --git a/test/files/neg/t11591.scala b/test/files/neg/t11591.scala new file mode 100644 index 000000000000..407304e9822d --- /dev/null +++ b/test/files/neg/t11591.scala @@ -0,0 +1,9 @@ +object Test { + class A + class B + + implicit def mkA(implicit b: => B): A = ??? + implicit def mkB(implicit a: A, i: Int): B = ??? + + implicitly[A] +} diff --git a/test/files/neg/t11607.check b/test/files/neg/t11607.check new file mode 100644 index 000000000000..b698453b01f5 --- /dev/null +++ b/test/files/neg/t11607.check @@ -0,0 +1,4 @@ +t11607.scala:2: error: not found: type migration +@migration("", "") class C + ^ +1 error diff --git a/test/files/neg/t11607.scala b/test/files/neg/t11607.scala new file mode 100644 index 000000000000..1f1a592f6a00 --- /dev/null +++ b/test/files/neg/t11607.scala @@ -0,0 +1,2 @@ +import scala.annotation.migration +@migration("", "") class C diff --git a/test/files/neg/t11618.check b/test/files/neg/t11618.check new file mode 100644 index 000000000000..7777f7b8fb37 --- /dev/null +++ b/test/files/neg/t11618.check @@ -0,0 +1,36 @@ +t11618.scala:9: warning: Implicit pattern definition binds no variables + implicit val _ = 42 + ^ +t11618.scala:10: warning: Implicit pattern definition binds no variables + implicit val Some(_) = Option(42) + ^ +t11618.scala:18: warning: Implicit pattern definition binds no variables + implicit val _ = i + ^ +t11618.scala:19: warning: Implicit pattern definition binds no variables + implicit val Some(_) = Option(i) + ^ +t11618.scala:24: warning: Implicit pattern definition binds no variables + implicit val Some(_) = Option(42) + ^ +t11618.scala:7: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + val _ = 42 + ^ +t11618.scala:8: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + val Some(_) = Option(42) + ^ +t11618.scala:9: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + implicit val _ = 42 + ^ +t11618.scala:10: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + implicit val Some(_) = Option(42) + ^ +t11618.scala:23: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + val Some(_) = Option(42) + ^ +t11618.scala:24: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. + implicit val Some(_) = Option(42) + ^ +error: No warnings can be incurred under -Werror. +11 warnings +1 error diff --git a/test/files/neg/t11618.scala b/test/files/neg/t11618.scala new file mode 100644 index 000000000000..cef5b3a36c48 --- /dev/null +++ b/test/files/neg/t11618.scala @@ -0,0 +1,25 @@ +//> using options -Werror -Xlint:deprecation + +// warn about introducing Unit-valued fields instead of merely side-effecting. +// warn about implicit not introducing any implicits +// +class C { + val _ = 42 + val Some(_) = Option(42) + implicit val _ = 42 + implicit val Some(_) = Option(42) + + val p = println() // nowarn + val Some(answer @ _) = Option(42) // nowarn + + def f(i: 42) = { + val _ = i // nowarn + val Some(_) = Option(i) // nowarn + implicit val _ = i + implicit val Some(_) = Option(i) + } +} +object C { + val Some(_) = Option(42) + implicit val Some(_) = Option(42) +} diff --git a/test/files/neg/t1163.check b/test/files/neg/t1163.check index 69e6b7ac4a4d..4437c44a95fc 100644 --- a/test/files/neg/t1163.check +++ b/test/files/neg/t1163.check @@ -1,7 +1,8 @@ -t1163.scala:2: error: overriding method foo in trait Sub of type => Sub; - method foo in trait Super of type => Super has incompatible type; - (Note that method foo in trait Sub of type => Sub is abstract, - and is therefore overridden by concrete method foo in trait Super of type => Super) +t1163.scala:2: error: incompatible type in overriding +override def foo: Sub (defined in trait Sub) + with def foo: Super (defined in trait Super); + (note that override def foo: Sub (defined in trait Sub) is abstract, + and is therefore overridden by concrete def foo: Super (defined in trait Super)) trait Sub extends Super { override def foo: Sub } ^ -one error found +1 error diff --git a/test/files/neg/t11643.check b/test/files/neg/t11643.check index 601199c86e45..9db82b3af825 100644 --- a/test/files/neg/t11643.check +++ b/test/files/neg/t11643.check @@ -4,4 +4,4 @@ t11643.scala:6: error: could not find implicit value for parameter i: Int t11643.scala:7: error: could not find implicit value for parameter i: Int def k(j: Int) = { val x = j + f ; 42 } ^ -two errors found +2 errors diff --git a/test/files/neg/t11643.scala b/test/files/neg/t11643.scala index aaecf3840b6b..8c5b4e0a30cb 100644 --- a/test/files/neg/t11643.scala +++ b/test/files/neg/t11643.scala @@ -1,5 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:params +//> using options -Werror -Wunused:params trait T { def f(implicit i: Int) = i diff --git a/test/files/neg/t11644a.check b/test/files/neg/t11644a.check new file mode 100644 index 000000000000..6cee9521f5d1 --- /dev/null +++ b/test/files/neg/t11644a.check @@ -0,0 +1,18 @@ +t11644a.scala:20: error: type mismatch; + found : Int + required: AcciSamZero + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t11644a.scala:21: error: type mismatch; + found : Int + required: SamZero + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t11644a.scala:24: warning: Eta-expansion to expected type AcciSamOne, which is not a function type but is SAM-convertible to Int => Int. +trait AcciSamOne should be annotated with `@FunctionalInterface` if eta-expansion is desired. +Avoid eta-expansion by writing the function literal `((x: Int) => m3(x))` or `m3(_)`. +This warning can be filtered with `-Wconf:cat=lint-eta-sam`. + val t3AcciSam: AcciSamOne = m3 // warn + ^ +1 warning +2 errors diff --git a/test/files/neg/t11644a.scala b/test/files/neg/t11644a.scala new file mode 100644 index 000000000000..3f51e8cf667c --- /dev/null +++ b/test/files/neg/t11644a.scala @@ -0,0 +1,28 @@ +//> using options -Xsource:3 +// +// eta-expansion to SAM type always warns in Scala 3 world + +trait AcciSamZero { def apply(): Int } + +@FunctionalInterface +trait SamZero { def apply(): Int } + +trait AcciSamOne { def apply(i: Int): Int } + +@FunctionalInterface +trait SamOne { def apply(i: Int): Int } + +class EtaExpand214 { + def m2() = 1 + def m3(x: Int) = x + + val t2: () => Any = m2 // eta-expanded with lint warning + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + + val t3: Int => Any = m3 // no warn + val t3AcciSam: AcciSamOne = m3 // warn + val t3SamLit: AcciSamOne = (x: Int) => m3(x) // no warn + val t3SamPH: AcciSamOne = m3(_) // no warn + val t3Sam: SamOne = m3 // no warn +} diff --git a/test/files/neg/t11644b.check b/test/files/neg/t11644b.check new file mode 100644 index 000000000000..c24d848a4208 --- /dev/null +++ b/test/files/neg/t11644b.check @@ -0,0 +1,22 @@ +t11644b.scala:18: error: type mismatch; + found : Int + required: AcciSamZero + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types under -Xsource:3 + ^ +t11644b.scala:19: error: type mismatch; + found : Int + required: SamZero + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types under -Xsource:3 + ^ +t11644b.scala:17: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write m2() to invoke method m2, or change the expected type. + val t2: () => Any = m2 // eta-expanded with lint warning + ^ +t11644b.scala:22: warning: Eta-expansion to expected type AcciSamOne, which is not a function type but is SAM-convertible to Int => Int. +trait AcciSamOne should be annotated with `@FunctionalInterface` if eta-expansion is desired. +Avoid eta-expansion by writing the function literal `((x: Int) => m3(x))` or `m3(_)`. +This warning can be filtered with `-Wconf:cat=lint-eta-sam`. + val t3AcciSam: AcciSamOne = m3 // warn + ^ +2 warnings +2 errors diff --git a/test/files/neg/t11644b.scala b/test/files/neg/t11644b.scala new file mode 100644 index 000000000000..0722aa7ab322 --- /dev/null +++ b/test/files/neg/t11644b.scala @@ -0,0 +1,24 @@ +//> using options -Xlint:deprecation,eta-zero,eta-sam + +trait AcciSamZero { def apply(): Int } + +@FunctionalInterface +trait SamZero { def apply(): Int } + +trait AcciSamOne { def apply(i: Int): Int } + +@FunctionalInterface +trait SamOne { def apply(i: Int): Int } + +class EtaExpand214 { + def m2() = 1 + def m3(x: Int) = x + + val t2: () => Any = m2 // eta-expanded with lint warning + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types under -Xsource:3 + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types under -Xsource:3 + + val t3: Int => Any = m3 // no warn + val t3AcciSam: AcciSamOne = m3 // warn + val t3Sam: SamOne = m3 // no warn +} diff --git a/test/files/neg/t11644c.check b/test/files/neg/t11644c.check new file mode 100644 index 000000000000..7f1c788df184 --- /dev/null +++ b/test/files/neg/t11644c.check @@ -0,0 +1,14 @@ +s.scala:13: warning: Eta-expansion to expected type J, which is not a function type but is SAM-convertible to Int => Int. +Avoid eta-expansion by writing the function literal `((i: Int) => bump(i))` or `bump(_)`. +This warning can be filtered with `-Wconf:cat=lint-eta-sam`. + c.f(bump), + ^ +s.scala:14: warning: Eta-expansion to expected type K, which is not a function type but is SAM-convertible to Int => Int. +trait K should be annotated with `@FunctionalInterface` if eta-expansion is desired. +Avoid eta-expansion by writing the function literal `((i: Int) => bump(i))` or `bump(_)`. +This warning can be filtered with `-Wconf:cat=lint-eta-sam`. + c.g(bump), + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t11644c/J.java b/test/files/neg/t11644c/J.java new file mode 100644 index 000000000000..95a3612859da --- /dev/null +++ b/test/files/neg/t11644c/J.java @@ -0,0 +1,7 @@ + +public abstract class J { + public abstract int f(int i); +} +interface K { + int f(int i); +} diff --git a/test/files/neg/t11644c/s.scala b/test/files/neg/t11644c/s.scala new file mode 100644 index 000000000000..06085452737f --- /dev/null +++ b/test/files/neg/t11644c/s.scala @@ -0,0 +1,16 @@ +//> using options -Xsource:3 -Werror + +class C { + def f(j: J): Int = j.f(42) + def g(k: K): Int = k.f(17) +} +object Test extends App { + def bump(i: Int): Int = i + 1 + val c = new C + println {( + c.f((i: Int) => i + 1), + c.g((i: Int) => i + 1), + c.f(bump), + c.g(bump), + )} +} diff --git a/test/files/neg/t1168.check b/test/files/neg/t1168.check index d9b754774fa5..dcb224d86902 100644 --- a/test/files/neg/t1168.check +++ b/test/files/neg/t1168.check @@ -1,4 +1,4 @@ -t1168.scala:6: error: Names of vals or vars may not end in `_=' +t1168.scala:6: error: Names of vals or vars may not end in `_=` val r_= = 4 ^ -one error found +1 error diff --git a/test/files/neg/t11681.check b/test/files/neg/t11681.check new file mode 100644 index 000000000000..e0957104092a --- /dev/null +++ b/test/files/neg/t11681.check @@ -0,0 +1,14 @@ +t11681.scala:9: error: object bar is not a member of package com.example + def foobar: String = example.bar // not a usage + ^ +t11681.scala:10: error: object Detest is not a member of package com.example + def detest = example.Detest // not a usage + ^ +t11681.scala:14: warning: private method bar in package object example is never used + private def bar: String = "bar" + ^ +t11681.scala:17: warning: private object Detest in package object example is never used + private object Detest + ^ +2 warnings +2 errors diff --git a/test/files/neg/t11681.scala b/test/files/neg/t11681.scala new file mode 100644 index 000000000000..0254fa0884d0 --- /dev/null +++ b/test/files/neg/t11681.scala @@ -0,0 +1,18 @@ +// +//> using options -Wunused:privates -Werror +// +package com + +package example { + private object Test { + def foo: String = "foo" + def foobar: String = example.bar // not a usage + def detest = example.Detest // not a usage + } +} +package object example { + private def bar: String = "bar" + private[example] def barNone: String = "bar" // not unqualified private + + private object Detest +} diff --git a/test/files/neg/t11690.check b/test/files/neg/t11690.check new file mode 100644 index 000000000000..5e525a958f5b --- /dev/null +++ b/test/files/neg/t11690.check @@ -0,0 +1,6 @@ +t11690.scala:11: warning: Unused import + import X._ + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t11690.scala b/test/files/neg/t11690.scala new file mode 100644 index 000000000000..79f84c9ad1b7 --- /dev/null +++ b/test/files/neg/t11690.scala @@ -0,0 +1,18 @@ + +//> using options -Wunused:imports -Werror + +object X { + val v = 27 +} +object Y { + val v = 42 +} +object Main { + import X._ + import Y.v + def main(args: Array[String]) = println { + //import Y.v // warns + v + } +} + diff --git a/test/files/neg/t11746.check b/test/files/neg/t11746.check new file mode 100644 index 000000000000..a7334117cd21 --- /dev/null +++ b/test/files/neg/t11746.check @@ -0,0 +1,7 @@ +t11746.scala:18: warning: failed to determine if e should be inlined: +The method e()Ljava/lang/Throwable; could not be found in the class java/lang/Object or any of its parents. + case Failure(e) => println(e.toString) + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t11746.scala b/test/files/neg/t11746.scala new file mode 100644 index 000000000000..9a95f427873c --- /dev/null +++ b/test/files/neg/t11746.scala @@ -0,0 +1,20 @@ +// +//> using options -Werror -opt:inline:** -Wopt:none,_ +// +// compare -opt-warnings:none,at-inline-failed-summary + +trait Try + +object Try { + def apply(s: String): Try = Success(s) +} + +case class Success(s: String) extends Try +case class Failure(e: Throwable) extends Try + +class C { + private def get(a: String): Unit = Try(a) match { + case Failure(e: Exception) => + case Failure(e) => println(e.toString) + } +} diff --git a/test/files/neg/t11758.check b/test/files/neg/t11758.check new file mode 100644 index 000000000000..8ddaee35d53a --- /dev/null +++ b/test/files/neg/t11758.check @@ -0,0 +1,24 @@ +t11758.scala:3: warning: Unused import of deprecated lazy value higherKinds: higherKinds no longer needs to be imported explicitly +import language.higherKinds + ^ +t11758.scala:19: warning: Unused import from deprecated object outer: no outer + import outer.other + ^ +t11758.scala:21: warning: Unused import of deprecated object inner: no inner + import outer.{inner => odder} + ^ +t11758.scala:25: warning: Unused import of deprecated class C: no see + import nest.C + ^ +t11758.scala:23: warning: object inner in object outer is deprecated (since 2.0): no inner + def f = inner + ^ +t11758.scala:23: warning: object outer is deprecated (since 1.0): no outer + def f = inner + ^ +t11758.scala:30: warning: class C in object nest is deprecated (since 3.0): no see + def g = new C + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t11758.scala b/test/files/neg/t11758.scala new file mode 100644 index 000000000000..027d4ca2d868 --- /dev/null +++ b/test/files/neg/t11758.scala @@ -0,0 +1,35 @@ +//> using options -Xlint:deprecation -Wunused:imports -Werror + +import language.higherKinds + +@deprecated("no outer", "1.0") +object outer { + @deprecated("no inner", "2.0") + object inner + object other +} +object nest { + @deprecated("no see", "3.0") + class C + + val status = true +} + +trait T { + import outer.other + import outer.inner + import outer.{inner => odder} + + def f = inner + + import nest.C + def g = () +} +trait U { + import nest.C + def g = new C +} +trait OK { + import nest.{C => _, _} + def ok = status +} diff --git a/test/files/neg/t11788.check b/test/files/neg/t11788.check new file mode 100644 index 000000000000..b378cd643cb5 --- /dev/null +++ b/test/files/neg/t11788.check @@ -0,0 +1,6 @@ +t11788/Foo.java:5: error: cannot find symbol + return java.lang.Integer.valueOf(42); + ^ + symbol: variable lang + location: variable java of type String +1 error diff --git a/test/files/neg/t11788/Bar.scala b/test/files/neg/t11788/Bar.scala new file mode 100644 index 000000000000..01c1838abe21 --- /dev/null +++ b/test/files/neg/t11788/Bar.scala @@ -0,0 +1,3 @@ +object Bar extends App { + println(new Foo().test()) +} diff --git a/test/files/neg/t11788/Foo.java b/test/files/neg/t11788/Foo.java new file mode 100644 index 000000000000..22167e26a5f4 --- /dev/null +++ b/test/files/neg/t11788/Foo.java @@ -0,0 +1,7 @@ +public class Foo { + private String java; + + public java.lang.Integer test() { + return java.lang.Integer.valueOf(42); + } +} diff --git a/test/files/neg/t11788b.check b/test/files/neg/t11788b.check new file mode 100644 index 000000000000..58db2408b5f4 --- /dev/null +++ b/test/files/neg/t11788b.check @@ -0,0 +1,4 @@ +t11788b/Foo.java:5: error: incompatible types: java.lang.Integer cannot be converted to java.lang.Integer + return Integer.valueOf(42); + ^ +1 error diff --git a/test/files/neg/t11788b/Bar.scala b/test/files/neg/t11788b/Bar.scala new file mode 100644 index 000000000000..01c1838abe21 --- /dev/null +++ b/test/files/neg/t11788b/Bar.scala @@ -0,0 +1,3 @@ +object Bar extends App { + println(new Foo().test()) +} diff --git a/test/files/neg/t11788b/Foo.java b/test/files/neg/t11788b/Foo.java new file mode 100644 index 000000000000..802929d7fc92 --- /dev/null +++ b/test/files/neg/t11788b/Foo.java @@ -0,0 +1,7 @@ +public class Foo { + private String java; + + public java.lang.Integer test() { + return Integer.valueOf(42); + } +} diff --git a/test/files/neg/t11788b/java.java b/test/files/neg/t11788b/java.java new file mode 100644 index 000000000000..2301bc90a177 --- /dev/null +++ b/test/files/neg/t11788b/java.java @@ -0,0 +1,7 @@ + +public class java { + public static class lang { + public static class Integer { + } + } +} diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check index 05ed6b181291..880645e32a9b 100644 --- a/test/files/neg/t1181.check +++ b/test/files/neg/t1181.check @@ -3,4 +3,4 @@ t1181.scala:9: error: type mismatch; required: Symbol _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail) ^ -one error found +1 error diff --git a/test/files/neg/t1181.scala b/test/files/neg/t1181.scala index 5e5fceacc8ee..108788d461c4 100644 --- a/test/files/neg/t1181.scala +++ b/test/files/neg/t1181.scala @@ -3,7 +3,7 @@ package test import scala.collection.immutable.Map class CompilerTest(val valueList: List[Symbol]) { - def buildMap(map: Map[Symbol, Symbol], keyList: List[Symbol], valueList: List[Symbol]): Map[Symbol, Symbol] = { + def buildMap(map: Map[Symbol, Symbol], keyList: List[Symbol], valueList: List[Symbol]): Map[Symbol, Symbol] = { (keyList, valueList) match { case (Nil, Nil) => map _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail) diff --git a/test/files/neg/t11823.check b/test/files/neg/t11823.check new file mode 100644 index 000000000000..de9c19058768 --- /dev/null +++ b/test/files/neg/t11823.check @@ -0,0 +1,7 @@ +t11823.scala:7: error: could not find implicit value for parameter e: Test.Foo[String] + val fooString: Foo[String] = implicitly + ^ +t11823.scala:8: error: could not find implicit value for parameter foo: Test.Foo[String] + val barString: Bar[String] = bar + ^ +2 errors diff --git a/test/files/neg/t11823.scala b/test/files/neg/t11823.scala new file mode 100644 index 000000000000..c34a330c10a2 --- /dev/null +++ b/test/files/neg/t11823.scala @@ -0,0 +1,9 @@ +object Test { + trait Foo[A] + trait Bar[A] + implicit def fooInt: Foo[Int] = ??? + implicit def fooLong: Foo[Long] = ??? + def bar[A](implicit foo: Foo[A]): Bar[A] = ??? + val fooString: Foo[String] = implicitly + val barString: Bar[String] = bar +} diff --git a/test/files/neg/t1183.check b/test/files/neg/t1183.check index c402829c701b..ba2bc803c70c 100644 --- a/test/files/neg/t1183.check +++ b/test/files/neg/t1183.check @@ -14,4 +14,4 @@ t1183.scala:9: error: name clash: class Foo defines class Bar and its companion object Foo also defines class Bar case class Bar(i:Int) ^ -four errors found +4 errors diff --git a/test/files/neg/t1183.scala b/test/files/neg/t1183.scala index 23868ab40102..cbd6348af6fd 100644 --- a/test/files/neg/t1183.scala +++ b/test/files/neg/t1183.scala @@ -14,7 +14,7 @@ object Test { val foo1 = new Foo(1) def runTest() = { - val res = (foo1.Bar(2):Any) match { + val res = (foo1.Bar(2):Any @unchecked) match { case foo1.Bar(2) => true // (1) } require(res) diff --git a/test/files/neg/t11843.check b/test/files/neg/t11843.check index aa833ee834fc..35a071d8841e 100644 --- a/test/files/neg/t11843.check +++ b/test/files/neg/t11843.check @@ -1,17 +1,21 @@ t11843.scala:6: error: value $isInstanceOf is not a member of String +did you mean isInstanceOf? or perhaps asInstanceOf? "".$isInstanceOf[Int] ^ t11843.scala:7: error: value $asInstanceOf is not a member of String +did you mean asInstanceOf? or perhaps isInstanceOf? "".$asInstanceOf[Int] ^ t11843.scala:10: error: value $isInstanceOf is not a member of Symbol +did you mean isInstanceOf? or perhaps asInstanceOf? ss.$isInstanceOf[String] ^ t11843.scala:11: error: value $asInstanceOf is not a member of Symbol +did you mean asInstanceOf? or perhaps isInstanceOf? ss.$asInstanceOf[String] ^ t11843.scala:8: warning: fruitless type test: a value of type Symbol cannot also be a String (the underlying of String) ss.isInstanceOf[String] ^ -one warning found -four errors found +1 warning +4 errors diff --git a/test/files/neg/t11850.check b/test/files/neg/t11850.check new file mode 100644 index 000000000000..65fab248bfd6 --- /dev/null +++ b/test/files/neg/t11850.check @@ -0,0 +1,21 @@ +t11850.scala:9: warning: Name x is already introduced in an enclosing scope as value x in trait T. Did you intend to match it using backquoted `x`? + case x => 1 // warn + ^ +t11850.scala:28: warning: Name x is already introduced in an enclosing scope as value x in class CT. Did you intend to match it using backquoted `x`? + case x => 1 // warn + ^ +t11850.scala:135: warning: Name self is already introduced in an enclosing scope as value self in class Selfie. Did you intend to match it using backquoted `self`? + case (x, self) => x + ^ +t11850.scala:202: warning: Name x is already introduced in an enclosing scope as value x at line 201. Did you intend to match it using backquoted `x`? + case x => x.toString + ^ +t11850.scala:224: warning: Name c is already introduced in an enclosing scope as value c in class pattern matches occasionally appear in pattern-matching anonymous functions. Did you intend to match it using backquoted `c`? + def f = c.collect { case c if c.flag => c.toString } + ^ +t11850.scala:233: warning: Name x is already introduced in an enclosing scope as object x in object is it worth qualifying what kind of term. Did you intend to match it using backquoted `x`? + case x => 1 + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t11850.scala b/test/files/neg/t11850.scala new file mode 100644 index 000000000000..3ba73e663e4e --- /dev/null +++ b/test/files/neg/t11850.scala @@ -0,0 +1,235 @@ +//> using options -Werror -Xlint:pattern-shadow + +trait T { + val x = 42 + + def f(i: Int) = + i match { + case `x` => 0 // presence of this case really obviates warning on the next? + case x => 1 // warn + } + def g(i: Int) = + i match { + case x @ _ => 1 // never warn if user writes bind of wildcard + } + def h(i: Any) = + i match { + case i: Int => i // alias of scrutinee + case _ => 42 + } +} +// same but in a class +class CT { + val x = 42 + + def f(i: Int) = + i match { + case `x` => 0 // presence of this case really obviates warning on the next? + case x => 1 // warn + } + def g(i: Int) = + i match { + case x @ _ => 1 // never warn if user writes bind of wildcard + } + def h(i: Any) = + i match { + case i: Int => i // alias of scrutinee + case _ => 42 + } +} +trait Overload[A] { + def map[B](f: A => B): Int = ??? +} +trait Overloader[K, V] extends Overload[(K, V)] { + def map[K2, V2](f: ((K, V)) => (K2, V2)): String = ??? + + def f() = this match { + case map: Overloader[k, v] => // shadows overloaded members which are not stable + } +} +class C { + val (x, y) = (42, 27) + def f(): Unit = { + val (a, b) = (42, 27) + println(a+b) + } +} +final class D(private val xs: List[Int]) extends AnyVal { + def f: List[Int] = + (xs: Any @unchecked) match { + case xs: List[_] => Nil + case _ => Nil + } +} +sealed class Tree +final case class Apply(fn: Tree, args: List[Tree]) extends Tree +final class Applied(val tree: Tree) { + /** The tree stripped of the possibly nested applications. + * The original tree if it's not an application. + */ + def callee: Tree = { + @annotation.tailrec + def loop(tree: Tree): Tree = tree match { + case Apply(fn, _) => loop(fn) + case tree => tree // alias of scrutinee + } + loop(tree) + } + + def `ident introduced by case class`(): Unit = { + val fn = 42 + tree match { + case Apply(fn, Nil) => println(fn) // name of parameter + case _ => println(fn) + } + } + + def `ident introduced by case class with a twist`(): Unit = { + val fn = 42 + tree match { + case t @ Apply(fn, Nil) => println((t, fn)) // name of parameter but not top level pattern + case _ => println(fn) + } + } + + def `bound var in pattern is selector`(t: Tree): Unit = { + t match { + case Apply(t, args) => println((t, args)) // alias of scrutinee but not top level pattern + case _ => + } + } +} +object X { def unapply(p: (Int, Int)): Option[(Int, Int)] = Option(p).filter(p => p._1 == p._2) } +object Y { def unapply(p: (Int, Int, Int)): Option[(Int, Int)] = Option(p).map { case (x, y, z) => (x, y+z) } } +class Tupling { + def f(x: Int, y: Int): Int = (x, y) match { + case (42, 27) => 5 + case (x, y) => x+y // correspond to tuple arg (or anywhere in selector) + } + def g(x: Some[Int], y: Some[Int]): Int = (x, y) match { + case (Some(42), Some(27)) => 5 + case (Some(x), Some(y)) => x+y // correspond to tuple arg but not top level pattern (or anywhere in selector) + } + def e(x: Int, y: Int): Int = (x, y) match { + case X(x, y) => x+y // extractor args correspond to tuple args (or anywhere in selector) + case _ => -1 + } + def err(x: Int, y: Int, z: Int): Int = (x, y, z) match { + case Y(x, y) => x+y // only allow 1-1 (or anywhere in selector) + case _ => -1 + } + def swap(x: Int, y: Int): Int = (x, y) match { + case X(y, x) => x+y // anywhere in selector + case _ => -1 + } + def add1(x: Int): Int = x + 1 match { + case x => x+42 // anywhere in selector + } + def add2(x: Int): Int = 1 + x match { + case x => x+42 // anywhere in selector + } +} +class Selfie { self => + def f(x: Int, y: Selfie): Int = (x, y) match { + case (42, this) => 5 + case (x, self) => x + case _ => 42 + } + def g(): Int = self match { + case self: Selfie => 5 + case _ => 42 + } +} +class Deconstruct[K, +V](val mapping: Deconstruct.Mapping[K, V]) { + def test(k: K): V = { + val (_, v) = mapping(k) + v + } +} +object Deconstruct { + type Mapping[K, +V] = Map[K, (Int, V)] +} +class Init { + def f(): Int = 42 + val res = f() match { + case res => res + } +} +package p { + class P { + def m = ??? + def test(x: Any, y: => Any) = x match { + case p: Int => p + case m: String => m.toInt + case y: Double => y.toInt + case _ => 42 + } + } +} +class `multi extraction of singular scrutinee` { + val r = raw"(\d)(\d)".r + val x = "42" + def test = x match { + case r(x, y) => x * y.toInt + case _ => "" + } +} +class `weird but true` { + val _1 = "yup" + def test = (42, 27) match { + case (_1, 27) => 3 // briefly did not warn as param name + case _ => 5 + } +} +case class Thing(i: Int, other: Thing) +class `derived thing is refinement` { + val t0 = Thing(27, null) + val t = Thing(42, t0) + t.other match { + case t => // ok because select from t is another Thing maybe related + } + t.copy(i = 5) match { + case t => // ok because deriving from t is another Thing maybe related + } +} +class `kosher selector` { + def f(x: Any) = x.toString match { + case x => x + } +} +class `unkosher selector` { + def f(x: Any) = 42 match { + case x => x.toString + } +} +class `also unkosher selector` { + // selector is a value derived from x but it is an unrelated type; x does not "refine" Thing + def f(x: Thing) = x.toString match { + case x => x + } +} +class `lukas asked whats that null check for` { + import annotation._ + def isOperatorPart(c: Char): Boolean = (c: @unchecked) match { + case '+' => true + case c => false + } +} +case class Collector() { + def collect[T](pf: PartialFunction[Collector, T]): List[T] = ??? + def flag = true +} +class `pattern matches occasionally appear in pattern-matching anonymous functions` { + val c = Collector() + def f = c.collect { case c if c.flag => c.toString } +} +object `is it worth qualifying what kind of term` { + trait T + object x extends T + + def f(t: T) = + t match { + case `x` => 0 + case x => 1 + } +} diff --git a/test/files/neg/t11866.check b/test/files/neg/t11866.check new file mode 100644 index 000000000000..02024d249f7b --- /dev/null +++ b/test/files/neg/t11866.check @@ -0,0 +1,25 @@ +t11866.scala:11: error: ambiguous reference to overloaded definition, +both object f in class X of type Test.x.f.type +and method f in class X of type (n: Int): Int +match argument types (Int) + def t1 = x.f(42) + ^ +t11866.scala:13: error: ambiguous reference to overloaded definition, +both object f in class X of type Test.x.f.type +and method f in class X of type (n: Int): Int +match argument types (Int) + def t2 = x.f(n) + ^ +t11866.scala:15: error: ambiguous reference to overloaded definition, +both method f in class Y of type [A <: 42](a: A): Int +and method f in class Y of type (n: 42): Int +match argument types (42) + def t3 = y.f(n) + ^ +t11866.scala:17: error: ambiguous reference to overloaded definition, +both object f in class Z of type Test.z.f.type +and method f in class Z of type [A <: 42](a: A): Int +match argument types (Int) and expected result type Any + println(z.f(n)) + ^ +4 errors diff --git a/test/files/neg/t11866.scala b/test/files/neg/t11866.scala new file mode 100644 index 000000000000..2c0899856857 --- /dev/null +++ b/test/files/neg/t11866.scala @@ -0,0 +1,18 @@ + +// error message won't follow apply into object f, +// so it won't keep narrow type in z +// +class X { def f(n: Int) = 42 ; object f { def apply(i: Int) = 42 + i } } +class Y { def f(n: 42) = 42 ; def f[A <: 42](a: A) = 27 } +class Z { def f[A <: 42](a: A) = 42 ; object f { def apply(i: 42) = i } } + +object Test extends App { + val x = new X() + def t1 = x.f(42) + val n: 42 = 42 + def t2 = x.f(n) + val y = new Y() + def t3 = y.f(n) + val z = new Z() + println(z.f(n)) +} diff --git a/test/files/neg/t11900.check b/test/files/neg/t11900.check new file mode 100644 index 000000000000..c398802a9d80 --- /dev/null +++ b/test/files/neg/t11900.check @@ -0,0 +1,13 @@ +t11900.scala:35: error: ';' expected but ',' found. + a => a + 1, // error: weird comma + ^ +t11900.scala:39: error: ';' expected but ',' found. + println("a"), // error: weird comma + ^ +t11900.scala:43: error: ';' expected but ',' found. + println("b"), // error: weird comma + ^ +t11900.scala:55: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern) + _*, + ^ +4 errors diff --git a/test/files/neg/t11900.scala b/test/files/neg/t11900.scala new file mode 100644 index 000000000000..a273b9f2eb86 --- /dev/null +++ b/test/files/neg/t11900.scala @@ -0,0 +1,70 @@ + +trait t11900 { + // cf pos/trailing-commas + // + import scala.collection.{ + immutable, + mutable, + } + + def h[A, + ]: List[A] = Nil + + def g = List( + 1, + 2, + 3, + ) + + def star = + List(1, 2, 3, 4, 5) match { + case List( + 1, + 2, + 3, + ) => false + case List( + 1, + 2, + _*, + ) => true + } + + def f = + List(1, 2, 3).map { + a => a + 1, // error: weird comma + } + + class A() { + println("a"), // error: weird comma + } + + def b() = { + println("b"), // error: weird comma + } + + def starcrossed = + List(1, 2, 3, 4, 5) match { + case List( + 1, + 2, + 3, + ) => false + case List( + 1, + _*, + 2, + ) => true + } + + def p(p: (Int, + String, + ) + ): Unit + + def q: (Int, + String, + ) + + val z = 42 +} diff --git a/test/files/neg/t11906.check b/test/files/neg/t11906.check new file mode 100644 index 000000000000..6c74a939ed01 --- /dev/null +++ b/test/files/neg/t11906.check @@ -0,0 +1,4 @@ +t11906.scala:6: error: super may not be used on lazy value test; super can only be used to select a member that is a method or type + override val test: Seq[String] = super.test + ^ +1 error diff --git a/test/files/neg/t11906.scala b/test/files/neg/t11906.scala new file mode 100644 index 000000000000..cfc3c6cc099a --- /dev/null +++ b/test/files/neg/t11906.scala @@ -0,0 +1,7 @@ +class Test { + lazy val test: Seq[String] = Seq.empty +} + +class Test2 extends Test { + override val test: Seq[String] = super.test +} diff --git a/test/files/neg/t11921-alias.check b/test/files/neg/t11921-alias.check index f5193028ee7b..ca9812924487 100644 --- a/test/files/neg/t11921-alias.check +++ b/test/files/neg/t11921-alias.check @@ -1,31 +1,55 @@ -t11921-alias.scala:18: warning: reference to TT is ambiguous; +t11921-alias.scala:18: error: reference to TT is ambiguous; it is both defined in the enclosing object O and inherited in the enclosing class D as type TT (defined in class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.TT`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t2.O.D.n.x def n(x: TT) = x // ambiguous ^ -t11921-alias.scala:38: warning: reference to c is ambiguous; +t11921-alias.scala:27: error: in Scala 3 (or with -Xsource-features:no-infer-structural), value a will no longer have a structural type: t3.A[B.this.c.type]{def n: t3.Context} + members that can be accessed with a reflective call: def n: t3.Context +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t3.B.a + val a = new A[c.type](c) { + ^ +t11921-alias.scala:38: error: reference to c is ambiguous; it is both defined in the enclosing class B and inherited in the enclosing anonymous class as value c (defined in class A) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.c`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t4.B.a def n = c // ambiguous ^ -t11921-alias.scala:57: warning: reference to name is ambiguous; +t11921-alias.scala:37: error: in Scala 3 (or with -Xsource-features:no-infer-structural), value a will no longer have a structural type: t4.A[t4.Context]{def n: t4.Context} + members that can be accessed with a reflective call: def n: t4.Context +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t4.B.a + val a = new A(c) { + ^ +t11921-alias.scala:47: error: in Scala 3 (or with -Xsource-features:no-infer-structural), method f will no longer have a structural type: t5.K[t.type]{def test: t5.TT} + members that can be accessed with a reflective call: def test: t5.TT +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t5.C.f + def f(t: TT) = new K[t.type](t) { + ^ +t11921-alias.scala:57: error: reference to name is ambiguous; it is both defined in the enclosing method m and inherited in the enclosing anonymous class as value name (defined in class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.name`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t6.Test.m println(name) ^ -t11921-alias.scala:67: warning: reference to name is ambiguous; +t11921-alias.scala:67: error: reference to name is ambiguous; it is both defined in the enclosing method m and inherited in the enclosing anonymous class as value name (defined in class A, inherited through parent class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.name`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=t7.Test.m println(name) ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +7 errors diff --git a/test/files/neg/t11921-alias.scala b/test/files/neg/t11921-alias.scala index ceccd9745ca6..2e4131633082 100644 --- a/test/files/neg/t11921-alias.scala +++ b/test/files/neg/t11921-alias.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xsource:2.13 +//> using options -Werror -Xsource:3 object t1 { class C[T] { type TT = T } diff --git a/test/files/neg/t11921.check b/test/files/neg/t11921.check new file mode 100644 index 000000000000..2a5dac20107e --- /dev/null +++ b/test/files/neg/t11921.check @@ -0,0 +1,15 @@ +t11921.scala:6: error: type mismatch; + found : A => B + required: B => B + def iterator = coll.iterator.map(f) // coll is ambiguous + ^ +t11921.scala:6: error: reference to coll is ambiguous; +it is both defined in the enclosing method lazyMap and inherited in the enclosing anonymous class as method coll (defined in trait Iterable) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.coll`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.lazyMap + def iterator = coll.iterator.map(f) // coll is ambiguous + ^ +2 errors diff --git a/test/files/neg/t11921.scala b/test/files/neg/t11921.scala new file mode 100644 index 000000000000..fe3f4fa2bfe2 --- /dev/null +++ b/test/files/neg/t11921.scala @@ -0,0 +1,16 @@ +//> using options -Xsource:3 + +class C { + def lazyMap[A, B](coll: Iterable[A], f: A => B) = + new Iterable[B] { + def iterator = coll.iterator.map(f) // coll is ambiguous + } +} + +/* was: +t11921.scala:5: error: type mismatch; + found : A => B + required: B => B + def iterator = coll.iterator.map(f) + ^ +*/ diff --git a/test/files/neg/t11921b.check b/test/files/neg/t11921b.check index ff39e0001fa2..a6d2e0931f97 100644 --- a/test/files/neg/t11921b.check +++ b/test/files/neg/t11921b.check @@ -1,61 +1,76 @@ -t11921b.scala:156: error: could not find implicit value for parameter i: Int +t11921b.scala:135: error: could not find implicit value for parameter i: Int def u = t // doesn't compile in Scala 2 (maybe there's a ticket for that) ^ -t11921b.scala:11: warning: reference to x is ambiguous; +t11921b.scala:11: error: reference to x is ambiguous; it is both defined in the enclosing object Test and inherited in the enclosing class D as value x (defined in class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.x`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test1.Test.D println(x) // error ^ -t11921b.scala:15: warning: reference to x is ambiguous; +t11921b.scala:15: error: reference to x is ambiguous; it is both defined in the enclosing object Test and inherited in the enclosing anonymous class as value x (defined in class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.x`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test1.Test.f println(x) // error ^ -t11921b.scala:26: warning: reference to y is ambiguous; +t11921b.scala:26: error: reference to y is ambiguous; it is both defined in the enclosing method c and inherited in the enclosing anonymous class as value y (defined in class D) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.y`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test2.c println(y) // error ^ -t11921b.scala:38: warning: reference to y is ambiguous; +t11921b.scala:38: error: reference to y is ambiguous; it is both defined in the enclosing method c and inherited in the enclosing class E as value y (defined in class D) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. -Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.y`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `E.this.y`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test3.c.E.F println(y) // error ^ -t11921b.scala:65: warning: reference to global is ambiguous; +t11921b.scala:65: error: reference to global is ambiguous; it is both defined in the enclosing package and inherited in the enclosing object D as value global (defined in class C) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.global`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D println(global) // error ^ -t11921b.scala:75: warning: reference to x is ambiguous; +t11921b.scala:75: error: reference to x is ambiguous; it is both defined in the enclosing object Uhu and inherited in the enclosing class C as value x (defined in class A, inherited through parent class B) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. -Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.x`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. - def t = x // error, message mentions parent B +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `C.this.x`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test5.Uhu.C.Inner.t + def t = x // ambiguous, message mentions parent B ^ -t11921b.scala:132: warning: reference to a is ambiguous; +t11921b.scala:89: error: reference to a is ambiguous; it is both defined in the enclosing class C and inherited in the enclosing trait J as method a (defined in trait I) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.a`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test6.C.J.t val t = a // error ^ -t11921b.scala:157: warning: reference to lo is ambiguous; +t11921b.scala:136: error: reference to lo is ambiguous; it is both defined in the enclosing object test10 and inherited in the enclosing class C as value lo (defined in class P) In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.lo`. -Or use `-Wconf:msg=legacy-binding:s` to silence this warning. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test10.C.v def v = t(lo) // error ^ -8 warnings found -one error found +9 errors diff --git a/test/files/neg/t11921b.scala b/test/files/neg/t11921b.scala index eb79d23bb246..b30c52766c46 100644 --- a/test/files/neg/t11921b.scala +++ b/test/files/neg/t11921b.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xsource:2.13 +//> using options -Werror -Xsource:3 object test1 { @@ -18,7 +18,7 @@ object test1 { } object test2 { - def c(y: Float) = { + def c(y: Float): AnyRef { val y: Int } = { class D { val y = 2 } @@ -72,32 +72,26 @@ object test5 { val x = 2 class C extends B { class Inner { - def t = x // error, message mentions parent B + def t = x // ambiguous, message mentions parent B } } } } object test6 { - trait T { - val s: String - } - - trait U { - this: T => - val s: String - def t = s // ok + trait I { + val a = 1 + def a(x: Int) = "" } - - - class AA { - def f = 1 - class B extends AA { - def g = f // ok, same symbol + class C { + val a = "" + trait J extends I { + val t = a // error } } } + object test7 { trait T { // overloaded a @@ -119,21 +113,6 @@ object test7 { } } - -object test8 { - trait I { - // overloaded a - val a = 1 - def a(x: Int) = "" - } - class C { - val a = "" - trait J extends I { - val t = a // error - } - } -} - object test9 { val lo: Int = 1 class P { @@ -157,3 +136,26 @@ object test10 { def v = t(lo) // error } } + +package scala { + trait P { trait Option[+A] } + class C extends P { + def t = new Option[String] {} // OK, competing scala.Option is not defined in the same compilation unit + } +} + +trait t12850 { + def pm(x: Int) = 1 + def pm(x: String) = 2 +} +package object pt12850 extends t12850 { + def t = pm(1) // no error +} + +trait t12850b { + def pm(x: Int) = 1 + def pm(x: String) = 2 + object O extends t12850b { + def t = pm(1) // no error + } +} diff --git a/test/files/neg/t11921c.check b/test/files/neg/t11921c.check new file mode 100644 index 000000000000..1c2d1ba8adda --- /dev/null +++ b/test/files/neg/t11921c.check @@ -0,0 +1,6 @@ +t11921c.scala:6: error: type mismatch; + found : A => B + required: B => B + def iterator = coll.iterator.map(f) // coll is ambiguous + ^ +1 error diff --git a/test/files/neg/t11921c.scala b/test/files/neg/t11921c.scala new file mode 100644 index 000000000000..280cd3eabc42 --- /dev/null +++ b/test/files/neg/t11921c.scala @@ -0,0 +1,16 @@ +//> using options -Wconf:msg=legacy-binding:s -Xsource:3 + +class C { + def lazyMap[A, B](coll: Iterable[A], f: A => B) = + new Iterable[B] { + def iterator = coll.iterator.map(f) // coll is ambiguous + } +} + +/* was: +t11921.scala:5: error: type mismatch; + found : A => B + required: B => B + def iterator = coll.iterator.map(f) + ^ +*/ diff --git a/test/files/neg/t11938.check b/test/files/neg/t11938.check new file mode 100644 index 000000000000..4e3edbd256c1 --- /dev/null +++ b/test/files/neg/t11938.check @@ -0,0 +1,11 @@ +t11938.scala:4: error: type mismatch; + found : n.type (with underlying type Any) + required: Nil.type + val a: Nil.type = (Vector(): Any) match { case n @ Nil => n } // error + ^ +t11938.scala:5: error: type mismatch; + found : n.type (with underlying type Any) + required: Nil.type + val b: Nil.type = (Vector(): Any) match { case n @ (m @ Nil) => n } // error was: CCE + ^ +2 errors diff --git a/test/files/neg/t11938.scala b/test/files/neg/t11938.scala new file mode 100644 index 000000000000..7c8c5bedb746 --- /dev/null +++ b/test/files/neg/t11938.scala @@ -0,0 +1,6 @@ +//> using options -Xlint -Werror + +class Test { + val a: Nil.type = (Vector(): Any) match { case n @ Nil => n } // error + val b: Nil.type = (Vector(): Any) match { case n @ (m @ Nil) => n } // error was: CCE +} diff --git a/test/files/neg/t11952.check b/test/files/neg/t11952.check index 45a92109799d..e3533cc631c4 100644 --- a/test/files/neg/t11952.check +++ b/test/files/neg/t11952.check @@ -1,3 +1,3 @@ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t11952.scala b/test/files/neg/t11952.scala index d40441212300..c24f18689ef4 100644 --- a/test/files/neg/t11952.scala +++ b/test/files/neg/t11952.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint -Xmaxwarns 0 +//> using options -Werror -Xlint -Xmaxwarns 0 // // nowarn should mean no warnings are emitted, // irrespective of other flags, and also no diff --git a/test/files/neg/t11952b.check b/test/files/neg/t11952b.check index b2e9fe6cc50f..8d713edbf2d8 100644 --- a/test/files/neg/t11952b.check +++ b/test/files/neg/t11952b.check @@ -2,15 +2,14 @@ [running phase namer on t11952b.scala] [running phase packageobjects on t11952b.scala] [running phase typer on t11952b.scala] -[running phase patmat on t11952b.scala] [running phase superaccessors on t11952b.scala] [running phase extmethods on t11952b.scala] [running phase pickler on t11952b.scala] [running phase refchecks on t11952b.scala] -t11952b.scala:9: error: overriding method f in class C of type => String; - method f cannot override final member; - found : => scala.this.Int - required: => String +t11952b.scala:9: error: cannot override final member: + final def f: String (defined in class C); + found : scala.this.Int + required: String override def f: Int = 42 ^ -one error found +1 error diff --git a/test/files/neg/t11952b.scala b/test/files/neg/t11952b.scala index 004befd15d3b..e3d68fe62970 100644 --- a/test/files/neg/t11952b.scala +++ b/test/files/neg/t11952b.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint -Ydebug +//> using options -Werror -Xlint -Vdebug // // Multiple errors at a location are shown under debug. // diff --git a/test/files/neg/t11962.check b/test/files/neg/t11962.check new file mode 100644 index 000000000000..034f6c24a788 --- /dev/null +++ b/test/files/neg/t11962.check @@ -0,0 +1,9 @@ +t11962.scala:3: warning: side-effecting nullary methods are discouraged: suggest defining as `def f()` instead [quickfixable] + def f = println() + ^ +t11962.scala:7: warning: side-effecting nullary methods are discouraged: suggest defining as `def f()` instead [quickfixable] + override def f = super.f + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t11962.scala b/test/files/neg/t11962.scala new file mode 100644 index 000000000000..9dd109f3e3e0 --- /dev/null +++ b/test/files/neg/t11962.scala @@ -0,0 +1,8 @@ +//> using options -Xlint -Werror -Ystop-after:refchecks +trait T extends Any { + def f = println() +} + +class C(val x: Any) extends AnyVal with T { + override def f = super.f +} diff --git a/test/files/neg/t12005.check b/test/files/neg/t12005.check new file mode 100644 index 000000000000..1b420b853a13 --- /dev/null +++ b/test/files/neg/t12005.check @@ -0,0 +1,5 @@ +t12005.scala:24: error: diverging implicit expansion for type AbstractSaleRepository.this.context.Encoder[A] +starting with method mappedDecoder in trait Encoding + implicitly[Encoder[A]] + ^ +1 error diff --git a/test/files/neg/t12005.scala b/test/files/neg/t12005.scala new file mode 100644 index 000000000000..b7bf283c3953 --- /dev/null +++ b/test/files/neg/t12005.scala @@ -0,0 +1,25 @@ +class A +class B +class MappedEncoding[I, O] + +trait Encoding { + type Encoder[T] + implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] + implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Encoder[I]): Encoder[O] +} + +trait Tag[T] +object Tag { + type Context[T] = Encoding with Tag[T] +} + +trait WithContext[T] { + protected val context: Tag.Context[T] + implicit val encoder: MappedEncoding[A, B] + implicit val decoder: MappedEncoding[B, A] +} + +trait AbstractSaleRepository[T] extends WithContext[T] { + import context._ + implicitly[Encoder[A]] +} diff --git a/test/files/neg/t12026.check b/test/files/neg/t12026.check index 1e01e37b6fa0..96bb4e2c8bbb 100644 --- a/test/files/neg/t12026.check +++ b/test/files/neg/t12026.check @@ -1,6 +1,6 @@ -t12026.scala:11: warning: discarded non-Unit value +t12026.scala:11: warning: discarded non-Unit value of type Int def f(): Unit = v() ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12026.scala b/test/files/neg/t12026.scala index a0ffc34df314..67e3ce7cbd50 100644 --- a/test/files/neg/t12026.scala +++ b/test/files/neg/t12026.scala @@ -1,4 +1,4 @@ -//scalac: -Ywarn-value-discard -Xfatal-warnings +//> using options -Wvalue-discard -Werror import annotation._ diff --git a/test/files/neg/t12071.check b/test/files/neg/t12071.check new file mode 100644 index 000000000000..82e968690ea9 --- /dev/null +++ b/test/files/neg/t12071.check @@ -0,0 +1,31 @@ +t12071.scala:15: error: not found: value c c + `c c` i + ^ +t12071.scala:15: error: postfix operator i needs to be enabled +by making the implicit value scala.language.postfixOps visible. +This can be achieved by adding the import clause 'import scala.language.postfixOps' +or by setting the compiler option -language:postfixOps. +See the Scaladoc for value scala.language.postfixOps for a discussion +why the feature needs to be explicitly enabled. +Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + `c c` i + ^ +t12071.scala:20: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + + 2 + ^ +t12071.scala:25: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + + 1 + ^ +t12071.scala:28: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + `test-1` + `test-2` + ^ +t12071.scala:31: warning: Lines starting with an operator are taken as an infix expression continued from the previous line in Scala 3 (or with -Xsource-features:leading-infix). +To force the current interpretation as a separate statement, add an explicit `;`, add an empty line, or remove spaces after the operator. + `compareTo` (2 - 1) + ^ +4 warnings +2 errors diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala new file mode 100644 index 000000000000..9bc0e851ea39 --- /dev/null +++ b/test/files/neg/t12071.scala @@ -0,0 +1,51 @@ +//> using options -Werror -Xlint -Xmigration:2.13 + +class C { + def `c c`(n: Int): Int = n + 1 +} + +// backticked operator is candidate for multiline infix, +// but backticked value is an innocent bystander. +// +class t12071 { + def c: C = ??? + def i: Int = 42 + def `n n`: Int = 17 + def f = c + `c c` i + def g = i + + `n n` + def basic = + 1 + + 2 +} + +object C { + def x = 42 + + 1 + + def y = 1 + + `test-1` + `test-2` + + def z = 2 + `compareTo` (2 - 1) + + def `test-1`: Int = 23 + def `test-2`: Int = 42 + def compareTo(x: Int) = println("lol") + + def yy = 1 + /* fails in scala 3 + + + `test-1` + + + `test-2` + */ +} + +object Test extends App { + println(C.x) + println(C.y) + println(C.z) + println(C.yy) +} diff --git a/test/files/neg/t12074.check b/test/files/neg/t12074.check new file mode 100644 index 000000000000..0604703cd710 --- /dev/null +++ b/test/files/neg/t12074.check @@ -0,0 +1,9 @@ +t12074.scala:5: warning: private val range in class C is never used + private val range = -700 to 700 + ^ +t12074.scala:6: warning: private val rangely in class C is never used + private val rangely = -700.to(700) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12074.scala b/test/files/neg/t12074.scala new file mode 100644 index 000000000000..f94a138a8a1e --- /dev/null +++ b/test/files/neg/t12074.scala @@ -0,0 +1,7 @@ + +//> using options -Xlint -Werror -Yrangepos:false + +class C { + private val range = -700 to 700 + private val rangely = -700.to(700) +} diff --git a/test/files/neg/t12098.check b/test/files/neg/t12098.check new file mode 100644 index 000000000000..538115d78088 --- /dev/null +++ b/test/files/neg/t12098.check @@ -0,0 +1,7 @@ +t12098.scala:9: warning: method f in class C is deprecated (since 1.0): don't + def g() = f() + ^ +warning: 1 lint warning; change -Wconf for cat=lint to display individual messages +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12098.scala b/test/files/neg/t12098.scala new file mode 100644 index 000000000000..37454bce88a5 --- /dev/null +++ b/test/files/neg/t12098.scala @@ -0,0 +1,10 @@ +//xlint supersedes default Wconf setting, which is ws warn-summary for deprecation +//> using options -Werror -Wconf:cat=lint-missing-interpolator:ws -Xlint + +class C(i: Int) { + def p() = println("hi $i") + + @deprecated("don't", since="1.0") def f() = 42 + + def g() = f() +} diff --git a/test/files/neg/t1211.check b/test/files/neg/t1211.check new file mode 100644 index 000000000000..b8914d4fbef6 --- /dev/null +++ b/test/files/neg/t1211.check @@ -0,0 +1,11 @@ +test_2.scala:5: error: value E is not a member of J +did you mean I.E? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + j.f(j.E.E1) + ^ +test_2.scala:6: error: value E is not a member of object J +did you mean I.E? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + j.f(J.E.E1) + ^ +2 errors diff --git a/test/files/neg/t1211/I.java b/test/files/neg/t1211/I.java new file mode 100644 index 000000000000..0488c33631ff --- /dev/null +++ b/test/files/neg/t1211/I.java @@ -0,0 +1,4 @@ + +interface I { + enum E { E1 } +} diff --git a/test/files/neg/t1211/J.java b/test/files/neg/t1211/J.java new file mode 100644 index 000000000000..91bf023f4921 --- /dev/null +++ b/test/files/neg/t1211/J.java @@ -0,0 +1,4 @@ + +class J implements I { + void f(E e) { } +} diff --git a/test/files/neg/t1211/test_2.scala b/test/files/neg/t1211/test_2.scala new file mode 100644 index 000000000000..f1ffad821bdf --- /dev/null +++ b/test/files/neg/t1211/test_2.scala @@ -0,0 +1,8 @@ + +object Test extends App { + val j = new J + println { + j.f(j.E.E1) + j.f(J.E.E1) + } +} diff --git a/test/files/neg/t12110.check b/test/files/neg/t12110.check new file mode 100644 index 000000000000..430bf6c8f54a --- /dev/null +++ b/test/files/neg/t12110.check @@ -0,0 +1,6 @@ +t12110.scala:20: error: type mismatch; + found : Unit + required: Int + case value unapplyer () => println(value) + ^ +1 error diff --git a/test/files/neg/t12110.scala b/test/files/neg/t12110.scala new file mode 100644 index 000000000000..02bb42633ab8 --- /dev/null +++ b/test/files/neg/t12110.scala @@ -0,0 +1,30 @@ + +object Main extends App { + + class Unapplyer { + def unapplySeq(seq: Seq[Int]): Option[(Int, Seq[Int])] = + if (seq.isEmpty) None else Some((seq.head, seq.tail)) + } + val unapplyer = new Unapplyer + + /* + v2.12.12: + prints "1" + v2.13.3: + type mismatch; + found : Unit + required: Int + case value unapplyer () => println(value) + */ + Seq(1) match { + case value unapplyer () => println(value) + } +} +/* +scala 3: +-- Error: test/files/neg/t12110.scala:20:25 ---------------------------------------------------------------------------------------------- +20 | case value unapplyer () => println(value) + | ^^ + | Values of types Unit and Int cannot be compared with == or != +1 error found +*/ diff --git a/test/files/neg/t1211b.check b/test/files/neg/t1211b.check new file mode 100644 index 000000000000..2555f993fa9c --- /dev/null +++ b/test/files/neg/t1211b.check @@ -0,0 +1,11 @@ +test.scala:4: error: type K is not a member of object Test +did you mean I.K? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + new this.K + ^ +test.scala:5: error: type K is not a member of object Test +did you mean I.K? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + new Test.K + ^ +2 errors diff --git a/test/files/neg/t1211b/I.java b/test/files/neg/t1211b/I.java new file mode 100644 index 000000000000..36cc09e5ca23 --- /dev/null +++ b/test/files/neg/t1211b/I.java @@ -0,0 +1,4 @@ + +class I { + static class K {} +} diff --git a/test/files/neg/t1211b/test.scala b/test/files/neg/t1211b/test.scala new file mode 100644 index 000000000000..fa99281577cb --- /dev/null +++ b/test/files/neg/t1211b/test.scala @@ -0,0 +1,7 @@ + +object Test extends I with App { + println { + new this.K + new Test.K + } +} diff --git a/test/files/neg/t1211c.check b/test/files/neg/t1211c.check new file mode 100644 index 000000000000..e05d336bdbff --- /dev/null +++ b/test/files/neg/t1211c.check @@ -0,0 +1,11 @@ +test.scala:5: error: value E is not a member of J +did you mean I.E? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + j.f(j.E.E1) + ^ +test.scala:6: error: value E is not a member of object J +did you mean I.E? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + j.f(J.E.E1) + ^ +2 errors diff --git a/test/files/neg/t1211c/I.java b/test/files/neg/t1211c/I.java new file mode 100644 index 000000000000..0488c33631ff --- /dev/null +++ b/test/files/neg/t1211c/I.java @@ -0,0 +1,4 @@ + +interface I { + enum E { E1 } +} diff --git a/test/files/neg/t1211c/J.java b/test/files/neg/t1211c/J.java new file mode 100644 index 000000000000..91bf023f4921 --- /dev/null +++ b/test/files/neg/t1211c/J.java @@ -0,0 +1,4 @@ + +class J implements I { + void f(E e) { } +} diff --git a/test/files/neg/t1211c/test.scala b/test/files/neg/t1211c/test.scala new file mode 100644 index 000000000000..f1ffad821bdf --- /dev/null +++ b/test/files/neg/t1211c/test.scala @@ -0,0 +1,8 @@ + +object Test extends App { + val j = new J + println { + j.f(j.E.E1) + j.f(J.E.E1) + } +} diff --git a/test/files/neg/t12122.check b/test/files/neg/t12122.check new file mode 100644 index 000000000000..62f0ecbceca6 --- /dev/null +++ b/test/files/neg/t12122.check @@ -0,0 +1,9 @@ +t12122.scala:4: error: type mismatch; + found : String + required: Int + def g = (i: String, s) => f(i, s) + ^ +t12122.scala:9: error: not found: type Junk + def g = (i: Junk, s) => f(i, s) + ^ +2 errors diff --git a/test/files/neg/t12122.scala b/test/files/neg/t12122.scala new file mode 100644 index 000000000000..9f993cf92b26 --- /dev/null +++ b/test/files/neg/t12122.scala @@ -0,0 +1,10 @@ + +class C { + def f(i: Int, s: String) = s * i + def g = (i: String, s) => f(i, s) +} + +class D { + def f(i: Int, s: String) = s * i + def g = (i: Junk, s) => f(i, s) +} diff --git a/test/files/neg/t12134.check b/test/files/neg/t12134.check new file mode 100644 index 000000000000..63f9e9366df0 --- /dev/null +++ b/test/files/neg/t12134.check @@ -0,0 +1,6 @@ +#partest isWin +error: t12134-neg.obj/testy.jar (Access is denied) +1 error +#partest !isWin +error: t12134-neg.obj/testy.jar (Permission denied) +1 error diff --git a/test/files/neg/t12134/ploogin_1.scala b/test/files/neg/t12134/ploogin_1.scala new file mode 100644 index 000000000000..458abe8c0ea3 --- /dev/null +++ b/test/files/neg/t12134/ploogin_1.scala @@ -0,0 +1,44 @@ + +package t12134 + +import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} + +//import java.nio.file.Files.createFile +//import java.nio.file.Files.deleteIfExists +import java.nio.file.Paths +//import java.nio.file.attribute.PosixFilePermissions.asFileAttribute +//import java.nio.file.attribute.PosixFilePermission.OWNER_READ +//import java.util.EnumSet + +/** A test plugin. */ +class Unplugged(val global: Global) extends Plugin { + import global._ + + val name = "unplugged" + val description = "A plugin that creates local output before jvm writes classes." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Unplugged.this.global.type = Unplugged.this.global + override val runsBefore = List("jvm") + val runsAfter = List("typer") + val phaseName = Unplugged.this.name + override def description = "Interfere with classwriter!" + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit): Unit = { + global.settings.outdir.value = s"${global.settings.plugin.value.head}/${global.settings.outdir.value}" + val path = Paths.get(global.settings.outdir.value) + val file = path.toFile() + //import scala.util.Try + //val perms = asFileAttribute(EnumSet.of(OWNER_READ)) + //val res = Try(createFile(path, perms)) + //assert(res.isSuccess) + file.delete() + assert(file.createNewFile() && file.setReadOnly()) + } + } + } +} diff --git a/test/files/neg/t12134/sample_2.scala b/test/files/neg/t12134/sample_2.scala new file mode 100644 index 000000000000..5d3b9aadb960 --- /dev/null +++ b/test/files/neg/t12134/sample_2.scala @@ -0,0 +1,9 @@ +//> using options -Xplugin:. -Xplugin-require:unplugged -d testy.jar +package sample + +// The unplugged plugin pre-emptively creates a read-only testy.jar. +// Previously, compilation would NPE after failing to write the output. +// Now the error is terse but accurate. +// The plugin updates -d to put the file under partest output, +// which happens to be the same as -Xplugin. +object Main extends App diff --git a/test/files/neg/t12134/scalac-plugin.xml b/test/files/neg/t12134/scalac-plugin.xml new file mode 100644 index 000000000000..4c9f14a441a0 --- /dev/null +++ b/test/files/neg/t12134/scalac-plugin.xml @@ -0,0 +1,4 @@ + +sample-plugin +t12134.Unplugged + diff --git a/test/files/neg/t1215.check b/test/files/neg/t1215.check index d6113b2dec0e..fd0ea5230326 100644 --- a/test/files/neg/t1215.check +++ b/test/files/neg/t1215.check @@ -1,5 +1,5 @@ -t1215.scala:3: error: value += is not a member of Int +t1215.scala:4: error: value += is not a member of Int Expression does not convert to assignment because receiver is not assignable. val x = 1 += 1 ^ -one error found +1 error diff --git a/test/files/neg/t1215.scala b/test/files/neg/t1215.scala index 7a3aa89262a0..82480b9d3f6a 100644 --- a/test/files/neg/t1215.scala +++ b/test/files/neg/t1215.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// object Test { val x = 1 += 1 } diff --git a/test/files/neg/t12155.check b/test/files/neg/t12155.check index a27554c0b90d..ea30afc28a82 100644 --- a/test/files/neg/t12155.check +++ b/test/files/neg/t12155.check @@ -1,4 +1,4 @@ -t12155.scala:6: error: class Node in class C1 cannot be accessed in C1[A,_$1] +t12155.scala:6: error: class Node in class C1 cannot be accessed as a member of C1[A,_$1] from package class D[A, C](val x: C1[A, _]#Node[C]) { ^ -one error found +1 error diff --git a/test/files/neg/t12159.check b/test/files/neg/t12159.check new file mode 100644 index 000000000000..bda2e48622ce --- /dev/null +++ b/test/files/neg/t12159.check @@ -0,0 +1,7 @@ +s.scala:5: error: illegal inheritance from sealed class H +class S extends H { + ^ +s.scala:8: error: illegal inheritance from sealed trait I +trait T extends I { + ^ +2 errors diff --git a/test/files/neg/t12159/H.java b/test/files/neg/t12159/H.java new file mode 100644 index 000000000000..3a15309f733e --- /dev/null +++ b/test/files/neg/t12159/H.java @@ -0,0 +1,19 @@ +// javaVersion: 17+ +package p; + +sealed public class H { +} + +final class K extends H { +} + +non-sealed class L extends H { +} + +sealed +class P extends H { +} + +final +class Q extends P { +} diff --git a/test/files/neg/t12159/I.java b/test/files/neg/t12159/I.java new file mode 100644 index 000000000000..f91c69dd7828 --- /dev/null +++ b/test/files/neg/t12159/I.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +sealed interface I permits J { +} diff --git a/test/files/neg/t12159/J.java b/test/files/neg/t12159/J.java new file mode 100644 index 000000000000..5bd2c4c92374 --- /dev/null +++ b/test/files/neg/t12159/J.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +sealed public class J implements I permits M { +} diff --git a/test/files/neg/t12159/M.java b/test/files/neg/t12159/M.java new file mode 100644 index 000000000000..245c79304d29 --- /dev/null +++ b/test/files/neg/t12159/M.java @@ -0,0 +1,9 @@ +// javaVersion: 17+ + +package p; + +public final class M extends J { +} + +final class N extends L { +} diff --git a/test/files/neg/t12159/s.scala b/test/files/neg/t12159/s.scala new file mode 100644 index 000000000000..9a32043d5a0b --- /dev/null +++ b/test/files/neg/t12159/s.scala @@ -0,0 +1,9 @@ +//> using jvm 17+ + +package p + +class S extends H { +} + +trait T extends I { +} diff --git a/test/files/neg/t12159b.check b/test/files/neg/t12159b.check new file mode 100644 index 000000000000..14dd6627065d --- /dev/null +++ b/test/files/neg/t12159b.check @@ -0,0 +1,4 @@ +s_2.scala:5: error: illegal inheritance from sealed trait I +class S extends I + ^ +1 error diff --git a/test/files/neg/t12159b/I.java b/test/files/neg/t12159b/I.java new file mode 100644 index 000000000000..f91c69dd7828 --- /dev/null +++ b/test/files/neg/t12159b/I.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +sealed interface I permits J { +} diff --git a/test/files/neg/t12159b/J.java b/test/files/neg/t12159b/J.java new file mode 100644 index 000000000000..12de6f9fcbd4 --- /dev/null +++ b/test/files/neg/t12159b/J.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +public final class J implements I { +} diff --git a/test/files/neg/t12159b/s_2.scala b/test/files/neg/t12159b/s_2.scala new file mode 100644 index 000000000000..4569ad1458fb --- /dev/null +++ b/test/files/neg/t12159b/s_2.scala @@ -0,0 +1,5 @@ +//> using jvm 17+ + +package p + +class S extends I diff --git a/test/files/neg/t12159c.check b/test/files/neg/t12159c.check new file mode 100644 index 000000000000..189c51ef6817 --- /dev/null +++ b/test/files/neg/t12159c.check @@ -0,0 +1,7 @@ +s_2.scala:7: warning: match may not be exhaustive. +It would fail on the following input: K() + h match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12159c/H.java b/test/files/neg/t12159c/H.java new file mode 100644 index 000000000000..bf6394e1e869 --- /dev/null +++ b/test/files/neg/t12159c/H.java @@ -0,0 +1,14 @@ +// javaVersion: 17+ +package p; + +sealed abstract public class H { +} + +final class J extends H { +} + +final class K extends H { +} + +final class L extends H { +} diff --git a/test/files/neg/t12159c/s_2.scala b/test/files/neg/t12159c/s_2.scala new file mode 100644 index 000000000000..caba28ddab05 --- /dev/null +++ b/test/files/neg/t12159c/s_2.scala @@ -0,0 +1,12 @@ +//> using jvm 17+ +//> using options -Werror +package p + +class C { + def f(h: H) = + h match { + case j: J => j.toString + case l: L => l.toString + } +} + diff --git a/test/files/neg/t12159d.check b/test/files/neg/t12159d.check new file mode 100644 index 000000000000..1efb798c80d7 --- /dev/null +++ b/test/files/neg/t12159d.check @@ -0,0 +1,7 @@ +t.scala:7: warning: match may not be exhaustive. +It would fail on the following input: W() + x match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12159d/X.java b/test/files/neg/t12159d/X.java new file mode 100644 index 000000000000..0812a6fd21cd --- /dev/null +++ b/test/files/neg/t12159d/X.java @@ -0,0 +1,14 @@ +// javaVersion: 17+ +package p; + +sealed abstract public class X { +} + +final class W extends X { +} + +final class Y extends X { +} + +final class Z extends X { +} diff --git a/test/files/neg/t12159d/t.scala b/test/files/neg/t12159d/t.scala new file mode 100644 index 000000000000..89410685ee97 --- /dev/null +++ b/test/files/neg/t12159d/t.scala @@ -0,0 +1,12 @@ +//> using jvm 17+ +//> using options -Werror +package p + +class C { + def f(x: X) = + x match { + case y: Y => y.toString + case z: Z => z.toString + } +} + diff --git a/test/files/neg/t12159e.check b/test/files/neg/t12159e.check new file mode 100644 index 000000000000..86807203f124 --- /dev/null +++ b/test/files/neg/t12159e.check @@ -0,0 +1,11 @@ +t.scala:7: warning: match may not be exhaustive. +It would fail on the following input: W() + x match { + ^ +t.scala:12: warning: match may not be exhaustive. +It would fail on the following inputs: Z(), Z2() + x match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12159e/X.java b/test/files/neg/t12159e/X.java new file mode 100644 index 000000000000..6b770b115ea6 --- /dev/null +++ b/test/files/neg/t12159e/X.java @@ -0,0 +1,20 @@ +// javaVersion: 17+ +package p; + +sealed abstract public class X { +} + +final class W extends X { +} + +final class Y extends X { +} + +sealed class Z extends X permits Z1, Z2 { +} + +final class Z1 extends Z { +} + +final class Z2 extends Z { +} diff --git a/test/files/neg/t12159e/t.scala b/test/files/neg/t12159e/t.scala new file mode 100644 index 000000000000..06b5a92391af --- /dev/null +++ b/test/files/neg/t12159e/t.scala @@ -0,0 +1,18 @@ +//> using jvm 17+ +//> using options -Werror +package p + +class C { + def f(x: X) = + x match { + case y: Y => y.toString + case z: Z => z.toString + } + def g(x: X) = + x match { + case w: W => w.toString + case y: Y => y.toString + case z: Z1 => z.toString + } +} + diff --git a/test/files/neg/t12159f.check b/test/files/neg/t12159f.check new file mode 100644 index 000000000000..86807203f124 --- /dev/null +++ b/test/files/neg/t12159f.check @@ -0,0 +1,11 @@ +t.scala:7: warning: match may not be exhaustive. +It would fail on the following input: W() + x match { + ^ +t.scala:12: warning: match may not be exhaustive. +It would fail on the following inputs: Z(), Z2() + x match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12159f/X.java b/test/files/neg/t12159f/X.java new file mode 100644 index 000000000000..bc1043b66039 --- /dev/null +++ b/test/files/neg/t12159f/X.java @@ -0,0 +1,14 @@ +// javaVersion: 17+ +package p; + +sealed abstract public class X { +} + +final class W extends X { +} + +final class Y extends X { +} + +sealed class Z extends X permits Z1, Z2 { +} diff --git a/test/files/neg/t12159f/Z.java b/test/files/neg/t12159f/Z.java new file mode 100644 index 000000000000..55fcc661a182 --- /dev/null +++ b/test/files/neg/t12159f/Z.java @@ -0,0 +1,8 @@ +// javaVersion: 17+ +package p; + +final class Z1 extends Z { +} + +final class Z2 extends Z { +} diff --git a/test/files/neg/t12159f/t.scala b/test/files/neg/t12159f/t.scala new file mode 100644 index 000000000000..06b5a92391af --- /dev/null +++ b/test/files/neg/t12159f/t.scala @@ -0,0 +1,18 @@ +//> using jvm 17+ +//> using options -Werror +package p + +class C { + def f(x: X) = + x match { + case y: Y => y.toString + case z: Z => z.toString + } + def g(x: X) = + x match { + case w: W => w.toString + case y: Y => y.toString + case z: Z1 => z.toString + } +} + diff --git a/test/files/neg/t12159g.check b/test/files/neg/t12159g.check new file mode 100644 index 000000000000..f268b3430cdc --- /dev/null +++ b/test/files/neg/t12159g.check @@ -0,0 +1,7 @@ +t.scala:4: warning: match may not be exhaustive. +It would fail on the following inputs: Oz(), Z() + def n(a: X) = a match { case _: Y => 42 } + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12159g/X.java b/test/files/neg/t12159g/X.java new file mode 100644 index 000000000000..8687ede65157 --- /dev/null +++ b/test/files/neg/t12159g/X.java @@ -0,0 +1,12 @@ + +package p; +public sealed interface X { + public default int x() { return 27; } +} +final class Y implements X { } +final class O { + final static class Z implements X { } + final static class Inner { + final static class Oz implements X { } + } +} diff --git a/test/files/neg/t12159g/t.scala b/test/files/neg/t12159g/t.scala new file mode 100644 index 000000000000..a1cbf521e0e1 --- /dev/null +++ b/test/files/neg/t12159g/t.scala @@ -0,0 +1,5 @@ +//> using options -Werror -Xlint +package p +class T { + def n(a: X) = a match { case _: Y => 42 } +} diff --git a/test/files/neg/t12199.check b/test/files/neg/t12199.check new file mode 100644 index 000000000000..777ad6cc8891 --- /dev/null +++ b/test/files/neg/t12199.check @@ -0,0 +1,6 @@ +t12199.scala:4: warning: method copyArrayToImmutableIndexedSeq in class LowPriorityImplicits2 is deprecated (since 2.13.0): implicit conversions from Array to immutable.IndexedSeq are implemented by copying; use `toIndexedSeq` explicitly if you want to copy, or use the more efficient non-copying ArraySeq.unsafeWrapArray + val a: IndexedSeq[Int] = Array(1, 2, 3) + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12199.scala b/test/files/neg/t12199.scala new file mode 100644 index 000000000000..822b86483688 --- /dev/null +++ b/test/files/neg/t12199.scala @@ -0,0 +1,5 @@ +//> using options -Werror -Xlint + +class C { + val a: IndexedSeq[Int] = Array(1, 2, 3) +} diff --git a/test/files/neg/t12226.check b/test/files/neg/t12226.check new file mode 100644 index 000000000000..444d882ff6b3 --- /dev/null +++ b/test/files/neg/t12226.check @@ -0,0 +1,18 @@ +t12226.scala:6: warning: Implicit resolves to enclosing class Elvis; the conversion adds a member of AnyRef to value a + implicit class Elvis[A](alt: => A) { def ?:(a: A): A = if (a ne null) a else alt } // warn + ^ +t12226.scala:9: warning: Implicit resolves to enclosing method f; the conversion adds a member of AnyRef to value a + implicit def f[A](a: A): String = if (a ne null) a else "nope" // warn + ^ +t12226.scala:9: warning: Implicit resolves to enclosing method f + implicit def f[A](a: A): String = if (a ne null) a else "nope" // warn + ^ +t12226.scala:13: warning: Implicit resolves to enclosing method f; the conversion adds a member of AnyRef to result of method idt + implicit def f[A](a: A): String = if (idt(x = a) ne null) "yup" else "nope" // warn + ^ +t12226.scala:25: warning: Implicit resolves to enclosing class StringOps; the enrichment wraps value s + def normal: String = s.crazy.crazy // warn + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t12226.scala b/test/files/neg/t12226.scala new file mode 100644 index 000000000000..0cd38ca13d19 --- /dev/null +++ b/test/files/neg/t12226.scala @@ -0,0 +1,43 @@ +//> using options -Xlint:implicit-recursion -Werror + +import language.implicitConversions + +object X { + implicit class Elvis[A](alt: => A) { def ?:(a: A): A = if (a ne null) a else alt } // warn +} +object Y { + implicit def f[A](a: A): String = if (a ne null) a else "nope" // warn +} +object YY { + def idt[A](n: Int = 1, x: A): A = x + implicit def f[A](a: A): String = if (idt(x = a) ne null) "yup" else "nope" // warn +} +object Z { + implicit class StringOps(val s: String) extends AnyVal { + def crazy: String = s.reverse + def normal: String = s.crazy.crazy // nowarn value class + def join(other: String): String = crazy + other.crazy // nowarn + } +} +object ZZ { + implicit class StringOps(s: String) { + def crazy: String = s.reverse + def normal: String = s.crazy.crazy // warn + def join(other: String): String = crazy + other.crazy // nowarn + } +} + +object ZZZ { + class C { def f: C = this } + implicit class E(c: C) { + def bar: Int = c.f.bar // nowarn + } +} + +object sd893 { + case class C(a: Int, b: Int) { + implicit class Enrich(c2: C) { + def foo: C = c2.copy(b = 0).foo // nowarn + } + } +} diff --git a/test/files/neg/t12233.check b/test/files/neg/t12233.check new file mode 100644 index 000000000000..ffa267af2701 --- /dev/null +++ b/test/files/neg/t12233.check @@ -0,0 +1,7 @@ +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error diff --git a/test/files/neg/t12233.scala b/test/files/neg/t12233.scala new file mode 100644 index 000000000000..b2ad76732461 --- /dev/null +++ b/test/files/neg/t12233.scala @@ -0,0 +1,20 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) +} + +/* was +t12233.scala:4: error: too many arguments (found 3, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + * now +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + */ diff --git a/test/files/neg/t12237.check b/test/files/neg/t12237.check new file mode 100644 index 000000000000..dbe091243e5e --- /dev/null +++ b/test/files/neg/t12237.check @@ -0,0 +1,10 @@ +t12237.scala:24: warning: Exhaustivity analysis reached max recursion depth, not all missing cases are reported. +(Please try with scalac -Ypatmat-exhaust-depth 40 or -Ypatmat-exhaust-depth off.) + (pq: PathAndQuery) match { + ^ +t12237.scala:24: warning: match may not be exhaustive. + (pq: PathAndQuery) match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12237.scala b/test/files/neg/t12237.scala new file mode 100644 index 000000000000..0a8b27675fb8 --- /dev/null +++ b/test/files/neg/t12237.scala @@ -0,0 +1,30 @@ +//> using options -Werror +sealed trait PathAndQuery +sealed trait Path extends PathAndQuery +sealed trait Query extends PathAndQuery + +object PathAndQuery { + case object Root extends Path + case class /(prev: Path, value: String) extends Path + + case class ===(k: String, v: String) extends Query + case class :&(prev: Query, next: (===)) extends Query + case class +?(path: Path, next: (===)) extends Query +} + +object Main { + def main(args: Array[String]): Unit = { + import PathAndQuery._ + + val path = /(/(Root, "page"), "1") + val q1 = ===("k1", "v1") + val q2 = ===("k2", "v2") + val pq = :&(+?(path, q1), q2) + + (pq: PathAndQuery) match { + case Root / "page" / "1" => println("match 1") + case Root / "page" / "1" +? ("k1" === "v1") => println("match 2") + case Root / "page" / "1" +? ("k1" === "v1") :& ("k2" === "v2") => println("match 3") + } + } +} diff --git a/test/files/neg/t1224.check b/test/files/neg/t1224.check index 47c483876a44..c1532cd2a8c3 100644 --- a/test/files/neg/t1224.check +++ b/test/files/neg/t1224.check @@ -1,4 +1,4 @@ -t1224.scala:5: error: lower bound C[A.this.T] does not conform to upper bound C[C[A.this.T]] +t1224.scala:6: error: lower bound C[A.this.T] does not conform to upper bound C[C[A.this.T]] type T >: C[T] <: C[C[T]] ^ -one error found +1 error diff --git a/test/files/neg/t1224.scala b/test/files/neg/t1224.scala index d9eeedaf981d..64b76a04b3a7 100644 --- a/test/files/neg/t1224.scala +++ b/test/files/neg/t1224.scala @@ -1,4 +1,5 @@ -// scalac: -Ybreak-cycles +//> using options -Ybreak-cycles +// trait C[T] {} abstract class A { diff --git a/test/files/neg/t12240.check b/test/files/neg/t12240.check new file mode 100644 index 000000000000..fa290cc2aa88 --- /dev/null +++ b/test/files/neg/t12240.check @@ -0,0 +1,13 @@ +t12240.scala:21: warning: match may not be exhaustive. + def guardedNonExhaustive(x: Int) = x match { + ^ +t12240.scala:25: warning: match may not be exhaustive. + def guardedSeqNonExhaustive(x: Int) = x match { + ^ +t12240.scala:32: warning: match may not be exhaustive. +It would fail on the following input: Vector1() + def reported(v: Vector[String]) = v match { + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t12240.scala b/test/files/neg/t12240.scala new file mode 100644 index 000000000000..bebd05e8376a --- /dev/null +++ b/test/files/neg/t12240.scala @@ -0,0 +1,38 @@ +//> using options -Xfatal-warnings -Xlint:strict-unsealed-patmat +// + +object Test { + + //see also pos/t12240.scala + + class IrrefutableNameBasedResult[Result](r: Result) { + def isEmpty: false = false + def get: Result = r + } + + object IrrefutableIdentityExtractor { + def unapply[A](a: A): IrrefutableNameBasedResult[A] = new IrrefutableNameBasedResult(a) + } + + object IrrefutableSeqExtractor { + def unapplySeq[A](a: A) = new IrrefutableNameBasedResult(List(a)) + } + + def guardedNonExhaustive(x: Int) = x match { + case IrrefutableIdentityExtractor(_) if false => "non-exhaustive" + } + + def guardedSeqNonExhaustive(x: Int) = x match { + case IrrefutableSeqExtractor(_*) if false => "non-exhaustive" + } + + //status quo: + //should be in pos/t12240.scala but isn't exhaustive per + //per https://github.com/scala/bug/issues/12252 + def reported(v: Vector[String]) = v match { + case Vector() => "empty" + case Vector(_) => "one" + case Vector(_, _, _*) => "scalac doesn't know that this is exhaustive" + } + +} \ No newline at end of file diff --git a/test/files/neg/t12258.check b/test/files/neg/t12258.check new file mode 100644 index 000000000000..479826cac2af --- /dev/null +++ b/test/files/neg/t12258.check @@ -0,0 +1,9 @@ +sample_2.scala:2: warning: Something is wrong. +package sample + ^ +sample_2.scala:5: More is broken. +object Sample extends App { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12258/ploogin_1.scala b/test/files/neg/t12258/ploogin_1.scala new file mode 100644 index 000000000000..8e4d0c8e460d --- /dev/null +++ b/test/files/neg/t12258/ploogin_1.scala @@ -0,0 +1,36 @@ + +package t12258 + +import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} +import scala.tools.nsc.Reporting.WarningCategory.OtherDebug +import scala.reflect.io.Path +import scala.reflect.io.File + +/** A test plugin. */ +class Ploogin(val global: Global) extends Plugin { + import global._ + + val name = "ploogin" + val description = "A sample plugin for testing." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Ploogin.this.global.type = Ploogin.this.global + override val runsBefore = List("erasure") + val runsAfter = List("typer") + val phaseName = Ploogin.this.name + override def description = "A sample phase that emits warnings." + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit): Unit = { + currentRun.reporting.warning(unit.body.pos, "Something is wrong.", OtherDebug, site="ploog", Nil) + unit.body match { + case PackageDef(_, stats) => + currentRun.reporting.warning(stats.head.pos, "More is broken.", OtherDebug, site="ploog", Nil) + } + } + } + } +} diff --git a/test/files/neg/t12258/sample_2.scala b/test/files/neg/t12258/sample_2.scala new file mode 100644 index 000000000000..2ecd706a86de --- /dev/null +++ b/test/files/neg/t12258/sample_2.scala @@ -0,0 +1,6 @@ +//> using options -Wconf:cat=other-debug&msg=More:i -Werror -Xplugin:. -Xplugin-require:ploogin +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t12258/scalac-plugin.xml b/test/files/neg/t12258/scalac-plugin.xml new file mode 100644 index 000000000000..b0b48a4d2285 --- /dev/null +++ b/test/files/neg/t12258/scalac-plugin.xml @@ -0,0 +1,5 @@ + + ploogin + t12258.Ploogin + + diff --git a/test/files/neg/t12294.check b/test/files/neg/t12294.check new file mode 100644 index 000000000000..865a8a35c9e4 --- /dev/null +++ b/test/files/neg/t12294.check @@ -0,0 +1,37 @@ +t12294.scala:2: error: invalid unicode escape + "\u" + ^ +t12294.scala:3: error: invalid unicode escape + "\u " + ^ +t12294.scala:4: error: invalid unicode escape + "\uuuu" + ^ +t12294.scala:5: error: invalid unicode escape + "\uuuuu" + ^ +t12294.scala:6: error: invalid unicode escape + "\u123" + ^ +t12294.scala:7: error: invalid unicode escape + "\uu123" + ^ +t12294.scala:8: error: invalid unicode escape at index 1 of \u + """\u""" + ^ +t12294.scala:9: error: invalid unicode escape at index 2 of \u + """\u """ + ^ +t12294.scala:10: error: invalid unicode escape at index 4 of \uuuu + """\uuuu""" + ^ +t12294.scala:11: error: invalid unicode escape at index 5 of \uuuuu + """\uuuuu""" + ^ +t12294.scala:12: error: invalid unicode escape at index 5 of \u123 + """\u123""" + ^ +t12294.scala:13: error: invalid unicode escape at index 6 of \uu123 + """\uu123""" + ^ +12 errors diff --git a/test/files/neg/t12294.scala b/test/files/neg/t12294.scala new file mode 100644 index 000000000000..7bc2687691ce --- /dev/null +++ b/test/files/neg/t12294.scala @@ -0,0 +1,14 @@ +object Test { + "\u" + "\u " + "\uuuu" + "\uuuuu" + "\u123" + "\uu123" + """\u""" + """\u """ + """\uuuu""" + """\uuuuu""" + """\u123""" + """\uu123""" +} \ No newline at end of file diff --git a/test/files/neg/t12304.check b/test/files/neg/t12304.check new file mode 100644 index 000000000000..6709f57969ef --- /dev/null +++ b/test/files/neg/t12304.check @@ -0,0 +1,6 @@ +t12304.scala:10: warning: fruitless type test: a value of type Foo cannot also be a Bar + m.collect { case (_, bar: Bar) =>} + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12304.scala b/test/files/neg/t12304.scala new file mode 100644 index 000000000000..0e40e8fa140f --- /dev/null +++ b/test/files/neg/t12304.scala @@ -0,0 +1,12 @@ +//> using options -Werror + +// variant of pos/t12304 +// that does warn as desired + +class Foo; class Bar +class Test { + def t1: Unit = { + val m = Map(1 -> new Foo) + m.collect { case (_, bar: Bar) =>} + } +} diff --git a/test/files/neg/t12320.check b/test/files/neg/t12320.check new file mode 100644 index 000000000000..cae2ed0f80b1 --- /dev/null +++ b/test/files/neg/t12320.check @@ -0,0 +1,21 @@ +t12320.scala:4: warning: a type was inferred to be `Any`; this may indicate a programming error. + def f = Option.empty[Int].contains("") || false + ^ +t12320.scala:5: warning: a type was inferred to be `Any`; this may indicate a programming error. + def g(other: Option[Int]) = other.contains("") || false + ^ +t12320.scala:6: warning: a type was inferred to be `Any`; this may indicate a programming error. + def any = Option.empty[Int].contains("") + ^ +t12320.scala:11: warning: a type was inferred to be `Any`; this may indicate a programming error. + def f = List(1 -> "a", 2 -> "b", 3) map { p => val (a,b) = p; a + " -> " + b } + ^ +t12320.scala:17: warning: a type was inferred to be `Any`; this may indicate a programming error. + def test = review.f(42) + ^ +t12320.scala:11: warning: method any2stringadd in object Predef is deprecated (since 2.13.0): Implicit injection of + is deprecated. Convert to String to call + + def f = List(1 -> "a", 2 -> "b", 3) map { p => val (a,b) = p; a + " -> " + b } + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t12320.scala b/test/files/neg/t12320.scala new file mode 100644 index 000000000000..6455c7443b74 --- /dev/null +++ b/test/files/neg/t12320.scala @@ -0,0 +1,18 @@ +//> using options -Werror -Xlint:infer-any,deprecation +// +trait T { + def f = Option.empty[Int].contains("") || false + def g(other: Option[Int]) = other.contains("") || false + def any = Option.empty[Int].contains("") +} + +trait `t9211 via 5898` { + + def f = List(1 -> "a", 2 -> "b", 3) map { p => val (a,b) = p; a + " -> " + b } +} + +object review { def f(x: String) = 0; def f[T >: String](x: T) = 1 } + +trait review { + def test = review.f(42) +} diff --git a/test/files/neg/t12321.check b/test/files/neg/t12321.check new file mode 100644 index 000000000000..55369db52a6a --- /dev/null +++ b/test/files/neg/t12321.check @@ -0,0 +1,4 @@ +Test_2.scala:7: error: not found: type B + def b = new B(1) + ^ +1 error diff --git a/test/files/neg/t12321/B_1.scala b/test/files/neg/t12321/B_1.scala new file mode 100644 index 000000000000..5d2da4e3962e --- /dev/null +++ b/test/files/neg/t12321/B_1.scala @@ -0,0 +1,5 @@ + +package bar + +protected[bar] class B(x: Int) + diff --git a/test/files/neg/t12321/Test_2.scala b/test/files/neg/t12321/Test_2.scala new file mode 100644 index 000000000000..06bc4e9a050e --- /dev/null +++ b/test/files/neg/t12321/Test_2.scala @@ -0,0 +1,8 @@ + +package example + +import bar.B + +object Test { + def b = new B(1) +} diff --git a/test/files/neg/t12322a.check b/test/files/neg/t12322a.check new file mode 100644 index 000000000000..1298c3190e10 --- /dev/null +++ b/test/files/neg/t12322a.check @@ -0,0 +1,7 @@ +t12322a.scala:3: error: class type required but Wrapper.this.MyObj.type found + val obj = new MyObj.type + ^ +t12322a.scala:6: error: class type required but Wrapper.this.MyObj.type found + val oops = new Oops + ^ +2 errors diff --git a/test/files/neg/t12322a.scala b/test/files/neg/t12322a.scala new file mode 100644 index 000000000000..ae456ed8bd07 --- /dev/null +++ b/test/files/neg/t12322a.scala @@ -0,0 +1,7 @@ +class Wrapper { + object MyObj + val obj = new MyObj.type + + type Oops = MyObj.type + val oops = new Oops +} diff --git a/test/files/neg/t12322b.check b/test/files/neg/t12322b.check new file mode 100644 index 000000000000..4de2de221469 --- /dev/null +++ b/test/files/neg/t12322b.check @@ -0,0 +1,7 @@ +t12322b.scala:7: error: class type required but MyObj.type found +object ObjectNew extends MyObj.type { + ^ +t12322b.scala:9: error: class type required but MyObj.type found + val mo: MyObj.type = new MyObj.type() // here + ^ +2 errors diff --git a/test/files/neg/t12322b.scala b/test/files/neg/t12322b.scala new file mode 100644 index 000000000000..b532b6428661 --- /dev/null +++ b/test/files/neg/t12322b.scala @@ -0,0 +1,11 @@ +object MyObj { + val a: Int = 123 + val b: Double = 456.789 + val c: String = "ABC" +} + +object ObjectNew extends MyObj.type { + def main(args: Array[String]): Unit = { + val mo: MyObj.type = new MyObj.type() // here + } +} diff --git a/test/files/neg/t12324.check b/test/files/neg/t12324.check new file mode 100644 index 000000000000..3ade85f310a8 --- /dev/null +++ b/test/files/neg/t12324.check @@ -0,0 +1,19 @@ +t12324.scala:2: error: `@throws` only allowed for methods and constructors +@throws[Exception] class ScalaClass[T](someList: List[T]) { + ^ +t12324.scala:8: error: `@throws` only allowed for methods and constructors + @throws[Exception] object Y { ??? } + ^ +t12324.scala:12: error: `@throws` only allowed for methods and constructors + def f[A <: AnyRef](a: A) = a: AnyRef @throws[Exception] + ^ +t12324.scala:12: error: `@throws` only allowed for methods and constructors + def f[A <: AnyRef](a: A) = a: AnyRef @throws[Exception] + ^ +t12324.scala:14: error: `@throws` only allowed for methods and constructors + def g(): Unit = (): @throws[Exception] + ^ +t12324.scala:16: error: `@throws` only allowed for methods and constructors + def n(i: Int) = i match { case 42 => 27: @throws[Exception] } // not all cruft reaches refchecks + ^ +6 errors diff --git a/test/files/neg/t12324.scala b/test/files/neg/t12324.scala new file mode 100644 index 000000000000..3a193e512484 --- /dev/null +++ b/test/files/neg/t12324.scala @@ -0,0 +1,17 @@ + +@throws[Exception] class ScalaClass[T](someList: List[T]) { + throw new IllegalArgumentException("Boom!") +} + +object X { + // might be useful to annotate accessors of lazy vals + @throws[Exception] object Y { ??? } +} + +trait T { + def f[A <: AnyRef](a: A) = a: AnyRef @throws[Exception] + + def g(): Unit = (): @throws[Exception] + + def n(i: Int) = i match { case 42 => 27: @throws[Exception] } // not all cruft reaches refchecks +} diff --git a/test/files/neg/t12326.check b/test/files/neg/t12326.check new file mode 100644 index 000000000000..243d43ed616e --- /dev/null +++ b/test/files/neg/t12326.check @@ -0,0 +1,6 @@ +t12326.scala:5: warning: Unused import +import scala.collection.mutable._ + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12326.scala b/test/files/neg/t12326.scala new file mode 100644 index 000000000000..8515393c8f67 --- /dev/null +++ b/test/files/neg/t12326.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Wunused:imports -Wconf:site=p.T:s + +package p + +import scala.collection.mutable._ + +trait T { + import scala.concurrent.ExecutionContext.Implicits._ +} diff --git a/test/files/neg/t12326b.check b/test/files/neg/t12326b.check new file mode 100644 index 000000000000..4bfce221c26e --- /dev/null +++ b/test/files/neg/t12326b.check @@ -0,0 +1,6 @@ +t12326b.scala:5: warning: Unused import +import scala.collection.mutable.{ListBuffer, Map, Set} + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12326b.scala b/test/files/neg/t12326b.scala new file mode 100644 index 000000000000..3b8054fd007b --- /dev/null +++ b/test/files/neg/t12326b.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Wunused:imports -Wconf:site=p.T:s,origin=scala.collection.mutable.Map:s,origin=scala.collection.mutable.Set:s + +package p + +import scala.collection.mutable.{ListBuffer, Map, Set} + +trait T { + import scala.concurrent.ExecutionContext.Implicits._ +} diff --git a/test/files/neg/t12326c.check b/test/files/neg/t12326c.check new file mode 100644 index 000000000000..4c35e5ce86f7 --- /dev/null +++ b/test/files/neg/t12326c.check @@ -0,0 +1,6 @@ +t12326c.scala:8: warning: Unused import + import scala.concurrent.ExecutionContext.Implicits._ + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12326c.scala b/test/files/neg/t12326c.scala new file mode 100644 index 000000000000..37f8c86f1d60 --- /dev/null +++ b/test/files/neg/t12326c.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Wunused:imports -Wconf:origin=scala.collection.mutable.*:s + +package p + +import scala.collection.mutable.{ListBuffer, Map, Set} + +trait T { + import scala.concurrent.ExecutionContext.Implicits._ +} diff --git a/test/files/neg/t12347.check b/test/files/neg/t12347.check new file mode 100644 index 000000000000..0476089c1c4e --- /dev/null +++ b/test/files/neg/t12347.check @@ -0,0 +1,10 @@ +t12347.scala:14: error: unknown parameter name: x + X.f(n = count, x = text) + ^ +t12347.scala:15: error: overloaded method f with alternatives: + (s: String)String + (n: Int,s: String)String + [which have no such parameter x] cannot be applied to (n: Int, x: String) + Y.f(n = count, x = text) + ^ +2 errors diff --git a/test/files/neg/t12347.scala b/test/files/neg/t12347.scala new file mode 100644 index 000000000000..1795ecfc8320 --- /dev/null +++ b/test/files/neg/t12347.scala @@ -0,0 +1,16 @@ + +object X { + def f(n: Int, s: String) = s * n +} + +object Y { + def f(n: Int, s: String) = s * n + def f(s: String) = s * 3 +} + +object Test extends App { + def count = 2 + def text = "hi" + X.f(n = count, x = text) + Y.f(n = count, x = text) +} diff --git a/test/files/neg/t12349.check b/test/files/neg/t12349.check new file mode 100644 index 000000000000..e774d33bd3a0 --- /dev/null +++ b/test/files/neg/t12349.check @@ -0,0 +1,283 @@ +t12349b.scala:7: error: weaker access privileges in overriding +def a2(): Unit (defined in class t12349a) + override should be public + protected override def a2(): Unit = println("Inner12349b#a2()") // weaker access privileges + ^ +t12349b.scala:8: error: weaker access privileges in overriding +def a3(): Unit (defined in class t12349a) + override should not be private + private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges + ^ +t12349b.scala:9: error: weaker access privileges in overriding +def a4(): Unit (defined in class t12349a) + override should be public + protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // weaker access privileges + ^ +t12349b.scala:10: error: weaker access privileges in overriding +def a5(): Unit (defined in class t12349a) + override should be public + private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // weaker access privileges + ^ +t12349b.scala:11: error: weaker access privileges in overriding +def a6(): Unit (defined in class t12349a) + override should be public + protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges + ^ +t12349b.scala:12: error: weaker access privileges in overriding +def a7(): Unit (defined in class t12349a) + override should be public + private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges + ^ +t12349b.scala:13: error: weaker access privileges in overriding +def a8(): Unit (defined in class t12349a) + override should be public + protected[Inner12349b] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges + ^ +t12349b.scala:15: error: weaker access privileges in overriding +def aA(): Unit (defined in class t12349a) + override should be public + protected[this] override def aA(): Unit = println("Inner12349b#aA()") // weaker access privileges + ^ +t12349b.scala:16: error: weaker access privileges in overriding +def aB(): Unit (defined in class t12349a) + override should not be private + private[this] override def aB(): Unit = println("Inner12349b#aB()") // weaker access privileges + ^ +t12349b.scala:20: error: weaker access privileges in overriding +protected[package t12349] def b3(): Unit (defined in class t12349a) + override should not be private + private override def b3(): Unit = println("Inner12349b#b3()") // weaker access privileges + ^ +t12349b.scala:22: error: weaker access privileges in overriding +protected[package t12349] def b5(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349b] override def b5(): Unit = println("Inner12349b#b5()") // weaker access privileges + ^ +t12349b.scala:24: error: weaker access privileges in overriding +protected[package t12349] def b7(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges + ^ +t12349b.scala:28: error: weaker access privileges in overriding +protected[package t12349] def bB(): Unit (defined in class t12349a) + override should not be private + private[this] override def bB(): Unit = println("Inner12349b#bB()") // weaker access privileges + ^ +t12349b.scala:31: error: weaker access privileges in overriding +private[package t12349] def c2(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges + ^ +t12349b.scala:32: error: weaker access privileges in overriding +private[package t12349] def c3(): Unit (defined in class t12349a) + override should not be private + private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges + ^ +t12349b.scala:33: error: weaker access privileges in overriding +private[package t12349] def c4(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // weaker access privileges + ^ +t12349b.scala:34: error: weaker access privileges in overriding +private[package t12349] def c5(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // weaker access privileges + ^ +t12349b.scala:37: error: weaker access privileges in overriding +private[package t12349] def c8(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[Inner12349b] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges + ^ +t12349b.scala:39: error: weaker access privileges in overriding +private[package t12349] def cA(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[this] override def cA(): Unit = println("Inner12349b#cA()") // weaker access privileges + ^ +t12349b.scala:40: error: weaker access privileges in overriding +private[package t12349] def cB(): Unit (defined in class t12349a) + override should not be private + private[this] override def cB(): Unit = println("Inner12349b#cB()") // weaker access privileges + ^ +t12349b.scala:42: error: method d1 overrides nothing + override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing + ^ +t12349b.scala:43: error: method d2 overrides nothing + protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing + ^ +t12349b.scala:44: error: method d3 overrides nothing + private override def d3(): Unit = println("Inner12349b#d3()") // overrides nothing + ^ +t12349b.scala:45: error: method d4 overrides nothing + protected[t12349b] override def d4(): Unit = println("Inner12349b#d4()") // overrides nothing + ^ +t12349b.scala:46: error: method d5 overrides nothing + private[t12349b] override def d5(): Unit = println("Inner12349b#d5()") // overrides nothing + ^ +t12349b.scala:47: error: method d6 overrides nothing + protected[t12349] override def d6(): Unit = println("Inner12349b#d6()") // overrides nothing + ^ +t12349b.scala:48: error: method d7 overrides nothing + private[t12349] override def d7(): Unit = println("Inner12349b#d7()") // overrides nothing + ^ +t12349b.scala:49: error: method d8 overrides nothing + protected[Inner12349b] override def d8(): Unit = println("Inner12349b#d8()") // overrides nothing + ^ +t12349b.scala:51: error: method dA overrides nothing + protected[this] override def dA(): Unit = println("Inner12349b#dA()") // overrides nothing + ^ +t12349b.scala:52: error: method dB overrides nothing + private[this] override def dB(): Unit = println("Inner12349b#dB()") // overrides nothing + ^ +t12349b.scala:50: error: method d9 overrides nothing + private[Inner12349b] override def d9(): Unit = println("Inner12349b#d9()") // overrides nothing + ^ +t12349c.scala:9: error: weaker access privileges in overriding +def a2(): Unit (defined in class t12349a) + override should be public + protected override def a2(): Unit = println("Inner12349c#a2()") // weaker access privileges + ^ +t12349c.scala:10: error: weaker access privileges in overriding +def a3(): Unit (defined in class t12349a) + override should not be private + private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges + ^ +t12349c.scala:11: error: weaker access privileges in overriding +def a4(): Unit (defined in class t12349a) + override should be public + protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // weaker access privileges + ^ +t12349c.scala:12: error: weaker access privileges in overriding +def a5(): Unit (defined in class t12349a) + override should be public + private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // weaker access privileges + ^ +t12349c.scala:13: error: weaker access privileges in overriding +def a6(): Unit (defined in class t12349a) + override should be public + protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges + ^ +t12349c.scala:14: error: weaker access privileges in overriding +def a7(): Unit (defined in class t12349a) + override should be public + private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges + ^ +t12349c.scala:15: error: weaker access privileges in overriding +def a8(): Unit (defined in class t12349a) + override should be public + protected[Inner12349c] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges + ^ +t12349c.scala:17: error: weaker access privileges in overriding +def aA(): Unit (defined in class t12349a) + override should be public + protected[this] override def aA(): Unit = println("Inner12349c#aA()") // weaker access privileges + ^ +t12349c.scala:18: error: weaker access privileges in overriding +def aB(): Unit (defined in class t12349a) + override should not be private + private[this] override def aB(): Unit = println("Inner12349c#aB()") // weaker access privileges + ^ +t12349c.scala:22: error: weaker access privileges in overriding +protected[package t12349] def b3(): Unit (defined in class t12349a) + override should not be private + private override def b3(): Unit = println("Inner12349c#b3()") // weaker access privileges + ^ +t12349c.scala:24: error: weaker access privileges in overriding +protected[package t12349] def b5(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349c] override def b5(): Unit = println("Inner12349c#b5()") // weaker access privileges + ^ +t12349c.scala:26: error: weaker access privileges in overriding +protected[package t12349] def b7(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges + ^ +t12349c.scala:30: error: weaker access privileges in overriding +protected[package t12349] def bB(): Unit (defined in class t12349a) + override should not be private + private[this] override def bB(): Unit = println("Inner12349c#bB()") // weaker access privileges + ^ +t12349c.scala:33: error: weaker access privileges in overriding +private[package t12349] def c2(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges + ^ +t12349c.scala:34: error: weaker access privileges in overriding +private[package t12349] def c3(): Unit (defined in class t12349a) + override should not be private + private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges + ^ +t12349c.scala:35: error: weaker access privileges in overriding +private[package t12349] def c4(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // weaker access privileges + ^ +t12349c.scala:36: error: weaker access privileges in overriding +private[package t12349] def c5(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // weaker access privileges + ^ +t12349c.scala:37: error: weaker access privileges in overriding +private[package t12349] def c6(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // weaker access privileges + ^ +t12349c.scala:38: error: weaker access privileges in overriding +private[package t12349] def c7(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // weaker access privileges + ^ +t12349c.scala:39: error: weaker access privileges in overriding +private[package t12349] def c8(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[Inner12349c] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges + ^ +t12349c.scala:41: error: weaker access privileges in overriding +private[package t12349] def cA(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[this] override def cA(): Unit = println("Inner12349c#cA()") // weaker access privileges + ^ +t12349c.scala:42: error: weaker access privileges in overriding +private[package t12349] def cB(): Unit (defined in class t12349a) + override should not be private + private[this] override def cB(): Unit = println("Inner12349c#cB()") // weaker access privileges + ^ +t12349c.scala:32: error: method c1 overrides nothing + override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) + ^ +t12349c.scala:44: error: method d1 overrides nothing + override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing + ^ +t12349c.scala:45: error: method d2 overrides nothing + protected override def d2(): Unit = println("Inner12349c#d2()") // overrides nothing + ^ +t12349c.scala:46: error: method d3 overrides nothing + private override def d3(): Unit = println("Inner12349c#d3()") // overrides nothing + ^ +t12349c.scala:47: error: method d4 overrides nothing + protected[t12349c] override def d4(): Unit = println("Inner12349c#d4()") // overrides nothing + ^ +t12349c.scala:48: error: method d5 overrides nothing + private[t12349c] override def d5(): Unit = println("Inner12349c#d5()") // overrides nothing + ^ +t12349c.scala:49: error: method d6 overrides nothing + protected[pkg] override def d6(): Unit = println("Inner12349c#d6()") // overrides nothing + ^ +t12349c.scala:50: error: method d7 overrides nothing + private[pkg] override def d7(): Unit = println("Inner12349c#d7()") // overrides nothing + ^ +t12349c.scala:51: error: method d8 overrides nothing + protected[Inner12349c] override def d8(): Unit = println("Inner12349c#d8()") // overrides nothing + ^ +t12349c.scala:53: error: method dA overrides nothing + protected[this] override def dA(): Unit = println("Inner12349c#dA()") // overrides nothing + ^ +t12349c.scala:54: error: method dB overrides nothing + private[this] override def dB(): Unit = println("Inner12349c#dB()") // overrides nothing + ^ +t12349c.scala:40: error: method c9 overrides nothing + private[Inner12349c] override def c9(): Unit = println("Inner12349c#c9()") // weaker access privileges + ^ +t12349c.scala:52: error: method d9 overrides nothing + private[Inner12349c] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing + ^ +66 errors diff --git a/test/files/neg/t12349/t12349a.java b/test/files/neg/t12349/t12349a.java new file mode 100644 index 000000000000..f1b2cb51af39 --- /dev/null +++ b/test/files/neg/t12349/t12349a.java @@ -0,0 +1,53 @@ +package t12349; + +public class t12349a { + + public void a1() { System.out.println("t12349a#a1()"); } + public void a2() { System.out.println("t12349a#a2()"); } + public void a3() { System.out.println("t12349a#a3()"); } + public void a4() { System.out.println("t12349a#a4()"); } + public void a5() { System.out.println("t12349a#a5()"); } + public void a6() { System.out.println("t12349a#a6()"); } + public void a7() { System.out.println("t12349a#a7()"); } + public void a8() { System.out.println("t12349a#a8()"); } + public void a9() { System.out.println("t12349a#a9()"); } + public void aA() { System.out.println("t12349a#aA()"); } + public void aB() { System.out.println("t12349a#aB()"); } + + protected void b1() { System.out.println("t12349a#b1()"); } + protected void b2() { System.out.println("t12349a#b2()"); } + protected void b3() { System.out.println("t12349a#b3()"); } + protected void b4() { System.out.println("t12349a#b4()"); } + protected void b5() { System.out.println("t12349a#b5()"); } + protected void b6() { System.out.println("t12349a#b6()"); } + protected void b7() { System.out.println("t12349a#b7()"); } + protected void b8() { System.out.println("t12349a#b8()"); } + protected void b9() { System.out.println("t12349a#b9()"); } + protected void bA() { System.out.println("t12349a#bA()"); } + protected void bB() { System.out.println("t12349a#bB()"); } + + void c1() { System.out.println("t12349a#c1()"); } + void c2() { System.out.println("t12349a#c2()"); } + void c3() { System.out.println("t12349a#c3()"); } + void c4() { System.out.println("t12349a#c4()"); } + void c5() { System.out.println("t12349a#c5()"); } + void c6() { System.out.println("t12349a#c6()"); } + void c7() { System.out.println("t12349a#c7()"); } + void c8() { System.out.println("t12349a#c8()"); } + void c9() { System.out.println("t12349a#c9()"); } + void cA() { System.out.println("t12349a#cA()"); } + void cB() { System.out.println("t12349a#cB()"); } + + private void d1() { System.out.println("t12349a#d1()"); } + private void d2() { System.out.println("t12349a#d2()"); } + private void d3() { System.out.println("t12349a#d3()"); } + private void d4() { System.out.println("t12349a#d4()"); } + private void d5() { System.out.println("t12349a#d5()"); } + private void d6() { System.out.println("t12349a#d6()"); } + private void d7() { System.out.println("t12349a#d7()"); } + private void d8() { System.out.println("t12349a#d8()"); } + private void d9() { System.out.println("t12349a#d9()"); } + private void dA() { System.out.println("t12349a#dA()"); } + private void dB() { System.out.println("t12349a#dB()"); } + +} diff --git a/test/files/neg/t12349/t12349b.scala b/test/files/neg/t12349/t12349b.scala new file mode 100644 index 000000000000..2d72c8124c32 --- /dev/null +++ b/test/files/neg/t12349/t12349b.scala @@ -0,0 +1,55 @@ +package t12349 + +object t12349b { + + class Inner12349b extends t12349a { + override def a1(): Unit = println("Inner12349b#a1()") + protected override def a2(): Unit = println("Inner12349b#a2()") // weaker access privileges + private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges + protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // weaker access privileges + private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // weaker access privileges + protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges + private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges + protected[Inner12349b] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges + private[Inner12349b] override def a9(): Unit = println("Inner12349b#a9()") // weaker access privileges + protected[this] override def aA(): Unit = println("Inner12349b#aA()") // weaker access privileges + private[this] override def aB(): Unit = println("Inner12349b#aB()") // weaker access privileges + + override def b1(): Unit = println("Inner12349b#b1()") + protected override def b2(): Unit = println("Inner12349b#b2()") + private override def b3(): Unit = println("Inner12349b#b3()") // weaker access privileges + protected[t12349b] override def b4(): Unit = println("Inner12349b#b4()") + private[t12349b] override def b5(): Unit = println("Inner12349b#b5()") // weaker access privileges + protected[t12349] override def b6(): Unit = println("Inner12349b#b6()") + private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges + protected[Inner12349b] override def b8(): Unit = println("Inner12349b#b8()") // [#12349] - not fixed by PR #9525 + private[Inner12349b] override def b9(): Unit = println("Inner12349b#b9()") // weaker access privileges + protected[this] override def bA(): Unit = println("Inner12349b#bA()") // [#12349] - not fixed by PR #9525 + private[this] override def bB(): Unit = println("Inner12349b#bB()") // weaker access privileges + + override def c1(): Unit = println("Inner12349b#c1()") + protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges + private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges + protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // weaker access privileges + private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // weaker access privileges + protected[t12349] override def c6(): Unit = println("Inner12349b#c6()") + private[t12349] override def c7(): Unit = println("Inner12349b#c7()") + protected[Inner12349b] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges + private[Inner12349b] override def c9(): Unit = println("Inner12349b#c9()") // weaker access privileges + protected[this] override def cA(): Unit = println("Inner12349b#cA()") // weaker access privileges + private[this] override def cB(): Unit = println("Inner12349b#cB()") // weaker access privileges + + override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing + protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing + private override def d3(): Unit = println("Inner12349b#d3()") // overrides nothing + protected[t12349b] override def d4(): Unit = println("Inner12349b#d4()") // overrides nothing + private[t12349b] override def d5(): Unit = println("Inner12349b#d5()") // overrides nothing + protected[t12349] override def d6(): Unit = println("Inner12349b#d6()") // overrides nothing + private[t12349] override def d7(): Unit = println("Inner12349b#d7()") // overrides nothing + protected[Inner12349b] override def d8(): Unit = println("Inner12349b#d8()") // overrides nothing + private[Inner12349b] override def d9(): Unit = println("Inner12349b#d9()") // overrides nothing + protected[this] override def dA(): Unit = println("Inner12349b#dA()") // overrides nothing + private[this] override def dB(): Unit = println("Inner12349b#dB()") // overrides nothing + } + +} diff --git a/test/files/neg/t12349/t12349c.scala b/test/files/neg/t12349/t12349c.scala new file mode 100644 index 000000000000..e86cfd9a79bf --- /dev/null +++ b/test/files/neg/t12349/t12349c.scala @@ -0,0 +1,59 @@ +package t12349 + +package pkg { + + object t12349c { + + class Inner12349c extends t12349a { + override def a1(): Unit = println("Inner12349c#a1()") + protected override def a2(): Unit = println("Inner12349c#a2()") // weaker access privileges + private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges + protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // weaker access privileges + private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // weaker access privileges + protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges + private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges + protected[Inner12349c] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges + private[Inner12349c] override def a9(): Unit = println("Inner12349c#a9()") // weaker access privileges + protected[this] override def aA(): Unit = println("Inner12349c#aA()") // weaker access privileges + private[this] override def aB(): Unit = println("Inner12349c#aB()") // weaker access privileges + + override def b1(): Unit = println("Inner12349c#b1()") + protected override def b2(): Unit = println("Inner12349c#b2()") + private override def b3(): Unit = println("Inner12349c#b3()") // weaker access privileges + protected[t12349c] override def b4(): Unit = println("Inner12349c#b4()") + private[t12349c] override def b5(): Unit = println("Inner12349c#b5()") // weaker access privileges + protected[pkg] override def b6(): Unit = println("Inner12349c#b6()") + private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges + protected[Inner12349c] override def b8(): Unit = println("Inner12349c#b8()") // [#12349] - not fixed by PR #9525 + private[Inner12349c] override def b9(): Unit = println("Inner12349c#b9()") // weaker access privileges + protected[this] override def bA(): Unit = println("Inner12349c#bA()") // [#12349] - not fixed by PR #9525 + private[this] override def bB(): Unit = println("Inner12349c#bB()") // weaker access privileges + + override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) + protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges + private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // weaker access privileges + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // weaker access privileges + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // weaker access privileges + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // weaker access privileges + protected[Inner12349c] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges + private[Inner12349c] override def c9(): Unit = println("Inner12349c#c9()") // weaker access privileges + protected[this] override def cA(): Unit = println("Inner12349c#cA()") // weaker access privileges + private[this] override def cB(): Unit = println("Inner12349c#cB()") // weaker access privileges + + override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing + protected override def d2(): Unit = println("Inner12349c#d2()") // overrides nothing + private override def d3(): Unit = println("Inner12349c#d3()") // overrides nothing + protected[t12349c] override def d4(): Unit = println("Inner12349c#d4()") // overrides nothing + private[t12349c] override def d5(): Unit = println("Inner12349c#d5()") // overrides nothing + protected[pkg] override def d6(): Unit = println("Inner12349c#d6()") // overrides nothing + private[pkg] override def d7(): Unit = println("Inner12349c#d7()") // overrides nothing + protected[Inner12349c] override def d8(): Unit = println("Inner12349c#d8()") // overrides nothing + private[Inner12349c] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing + protected[this] override def dA(): Unit = println("Inner12349c#dA()") // overrides nothing + private[this] override def dB(): Unit = println("Inner12349c#dB()") // overrides nothing + } + + } + +} diff --git a/test/files/neg/t12380.check b/test/files/neg/t12380.check new file mode 100644 index 000000000000..4b9f7ae63a68 --- /dev/null +++ b/test/files/neg/t12380.check @@ -0,0 +1,8 @@ +Test.scala:1: error: incompatible type in overriding +def m(): String (defined in trait I) + with def m(): Object (defined in class C); + found : (): Object + required: (): String +object Test extends p.J.C with p.J.I { + ^ +1 error diff --git a/test/files/neg/t12380/J.java b/test/files/neg/t12380/J.java new file mode 100644 index 000000000000..280cea1286b1 --- /dev/null +++ b/test/files/neg/t12380/J.java @@ -0,0 +1,14 @@ +package p; + +public class J { + public static class C { + public Object m() { return new Object(); } + } + public interface I { + public String m(); + } + + public static class Test extends C implements I { + @Override public String m() { return ""; } + } +} diff --git a/test/files/neg/t12380/Test.scala b/test/files/neg/t12380/Test.scala new file mode 100644 index 000000000000..976b42ffdb93 --- /dev/null +++ b/test/files/neg/t12380/Test.scala @@ -0,0 +1,5 @@ +object Test extends p.J.C with p.J.I { + def main(args: Array[String]): Unit = { + println((this: p.J.I).m.trim) + } +} diff --git a/test/files/neg/t12394.check b/test/files/neg/t12394.check new file mode 100644 index 000000000000..7dbf4d49d9e5 --- /dev/null +++ b/test/files/neg/t12394.check @@ -0,0 +1,11 @@ +Test.scala:2: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S2 extends p.A.C with p.A.J + ^ +Test.scala:4: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S3 extends p.A.C with K + ^ +2 errors diff --git a/test/files/neg/t12394/A.java b/test/files/neg/t12394/A.java new file mode 100644 index 000000000000..cf3188018d93 --- /dev/null +++ b/test/files/neg/t12394/A.java @@ -0,0 +1,17 @@ +package p; + +public class A { + public static interface I { + default int m() { return 1; } + } + + public static interface J extends I { + @Override default int m() { return 2; } + } + + public static class C implements I { + @Override public final int m() { return 3; } + } + + public static class D extends C implements J { } +} diff --git a/test/files/neg/t12394/Test.scala b/test/files/neg/t12394/Test.scala new file mode 100644 index 000000000000..8a272c5127cd --- /dev/null +++ b/test/files/neg/t12394/Test.scala @@ -0,0 +1,4 @@ +class S1 extends p.A.D +class S2 extends p.A.C with p.A.J +trait K extends p.A.J +class S3 extends p.A.C with K diff --git a/test/files/neg/t12408-backport.check b/test/files/neg/t12408-backport.check deleted file mode 100644 index cc8ab23c5037..000000000000 --- a/test/files/neg/t12408-backport.check +++ /dev/null @@ -1,6 +0,0 @@ -t12408-backport.scala:2: warning: abstract type X in type pattern Some[X] is unchecked since it is eliminated by erasure -class A[X] { def f[Y](x: Option[Y]) = x match { case s: Some[X] => 0; case _ => 1 } } - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/t12408-backport.scala b/test/files/neg/t12408-backport.scala deleted file mode 100644 index 1bac49550253..000000000000 --- a/test/files/neg/t12408-backport.scala +++ /dev/null @@ -1,2 +0,0 @@ -// scalac: -Xsource:2.13 -Werror -class A[X] { def f[Y](x: Option[Y]) = x match { case s: Some[X] => 0; case _ => 1 } } diff --git a/test/files/neg/t12408.check b/test/files/neg/t12408.check new file mode 100644 index 000000000000..33be21bb4ecd --- /dev/null +++ b/test/files/neg/t12408.check @@ -0,0 +1,30 @@ +t12408.scala:6: warning: abstract type pattern B is unchecked since it is eliminated by erasure + def f1[B] = a match { case _: B => } // warn + ^ +t12408.scala:7: warning: abstract type B in type pattern t12408.Renderer[B] is unchecked since it is eliminated by erasure + def f2[B] = a match { case _: Renderer[B] => } // warn + ^ +t12408.scala:8: warning: non-variable type argument Int in type pattern List[Int] (the underlying of List[Int]) is unchecked since it is eliminated by erasure + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + ^ +t12408.scala:9: warning: abstract type A in type pattern t12408.Renderer[A] is unchecked since it is eliminated by erasure + def g = a match { case _: Renderer[A] => } // now also warn + ^ +t12408.scala:14: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:17: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:22: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:47: warning: the type test for pattern t12408c.C[A,B] cannot be checked at runtime because it has type parameters eliminated by erasure + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + ^ +t12408.scala:65: warning: the type test for pattern reported.Renderer[Page,Props] cannot be checked at runtime because it has type parameters eliminated by erasure + case r: Renderer[Page, Props] => 1 // warn as above + ^ +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/t12408.scala b/test/files/neg/t12408.scala new file mode 100644 index 000000000000..fc6ffa9899f3 --- /dev/null +++ b/test/files/neg/t12408.scala @@ -0,0 +1,82 @@ +//> using options -Werror + +package t12408 { + class Renderer[A] + class Test[A](a: Any) { + def f1[B] = a match { case _: B => } // warn + def f2[B] = a match { case _: Renderer[B] => } // warn + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + def g = a match { case _: Renderer[A] => } // now also warn + } + + trait T[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + def g[A,B,C,D,E,F,G,H,I,J,K,L,M] = (null: Any) match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } + class C[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } +} + +package t12408b { + // trait's type params align with class C + sealed trait T[A, B] + final case class C[A, B](a: A, b: B) extends T[A, B] + + class Test[A, B] { + def test(t: T[A, B]) = t match { case _: C[A, B] => } // nowarn + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package t12408c { + sealed trait T[A] + final case class C[A, B](a: A, b: B) extends T[A] + + class Test[A, B] { + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package reported { + sealed trait Action[Page] + final case class Renderer[Page, Props]() extends Action[Page] + sealed trait Redirect[Page] extends Action[Page] + + final class RouterLogic[Page, Props] { + + def hmm1(a: Action[Page]): Int = + a match { + case r: Renderer[Page, Props] => 1 // warn as above + case _ => 2 + } + + def hmm2(a: Action[Page]): Int = + a match { + case r: Redirect[Page] => 2 // nowarn + case _ => 1 + } + } +} + +package regression { + object unchecked3 { + /* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 } + /* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 } + } +} diff --git a/test/files/neg/t1241.check b/test/files/neg/t1241.check index e1ccf4172fa3..fd8aa0e7c7b8 100644 --- a/test/files/neg/t1241.check +++ b/test/files/neg/t1241.check @@ -1,4 +1,4 @@ t1241.scala:5: error: class type required but AnyRef{def hello(): Unit} found - val x4 = new T { def hello() { println("4") } } // error! + val x4 = new T { def hello(): Unit = { println("4") } } // error! ^ -one error found +1 error diff --git a/test/files/neg/t1241.scala b/test/files/neg/t1241.scala index e115917136c5..09969ea29d78 100644 --- a/test/files/neg/t1241.scala +++ b/test/files/neg/t1241.scala @@ -1,8 +1,8 @@ object test extends App { // more.. - type T = { def hello() } + type T = { def hello(): Unit } //val x4 = new AnyRef { def hello() { println("4") } } // ok! - val x4 = new T { def hello() { println("4") } } // error! + val x4 = new T { def hello(): Unit = { println("4") } } // error! x4.hello() // more.. } diff --git a/test/files/neg/t12413.check b/test/files/neg/t12413.check new file mode 100644 index 000000000000..fefa9a3e8a80 --- /dev/null +++ b/test/files/neg/t12413.check @@ -0,0 +1,16 @@ +t12413.scala:13: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close.toString()) + ^ +t12413.scala:14: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close == 0) + ^ +t12413.scala:15: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString) + ^ +t12413.scala:16: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString()) + ^ +t12413.scala:17: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open() == 0) + ^ +5 errors diff --git a/test/files/neg/t12413.scala b/test/files/neg/t12413.scala new file mode 100644 index 000000000000..505c04f6b33b --- /dev/null +++ b/test/files/neg/t12413.scala @@ -0,0 +1,18 @@ +class Open + +class Door[State] { + def close[Phantom >: State <: Open]: Int = 0 + def open[Phantom >: State <: Open](): Int = 0 +} + +class Test { + val door = new Door[AnyRef] + // the error here happens later (at refchecks) + println(door.close.toString) + // the errors below happen when typing implicit conversions + println(door.close.toString()) + println(door.close == 0) + println(door.open().toString) + println(door.open().toString()) + println(door.open() == 0) +} diff --git a/test/files/neg/t12414.check b/test/files/neg/t12414.check new file mode 100644 index 000000000000..e94e68fb179c --- /dev/null +++ b/test/files/neg/t12414.check @@ -0,0 +1,6 @@ +t12414.scala:12: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414.scala b/test/files/neg/t12414.scala new file mode 100644 index 000000000000..187f701d6015 --- /dev/null +++ b/test/files/neg/t12414.scala @@ -0,0 +1,15 @@ +//> using options -Werror + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} diff --git a/test/files/neg/t12414b.check b/test/files/neg/t12414b.check new file mode 100644 index 000000000000..82da8bfc3fe3 --- /dev/null +++ b/test/files/neg/t12414b.check @@ -0,0 +1,6 @@ +b_2.scala:6: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414b/a_1.scala b/test/files/neg/t12414b/a_1.scala new file mode 100644 index 000000000000..cdb91902eb37 --- /dev/null +++ b/test/files/neg/t12414b/a_1.scala @@ -0,0 +1,6 @@ + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 diff --git a/test/files/neg/t12414b/b_2.scala b/test/files/neg/t12414b/b_2.scala new file mode 100644 index 000000000000..39f17177de45 --- /dev/null +++ b/test/files/neg/t12414b/b_2.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} diff --git a/test/files/neg/t12433.check b/test/files/neg/t12433.check new file mode 100644 index 000000000000..ff7288bf8858 --- /dev/null +++ b/test/files/neg/t12433.check @@ -0,0 +1,4 @@ +t12433.scala:5: error: not found: value / + def t1 = / + ^ +1 error diff --git a/test/files/neg/t12433.scala b/test/files/neg/t12433.scala new file mode 100644 index 000000000000..885b49ff3646 --- /dev/null +++ b/test/files/neg/t12433.scala @@ -0,0 +1,7 @@ +//> using options -Wunused:nowarn +import annotation.nowarn +object T { + @deprecated def f = 1 + def t1 = / + @nowarn def t2 = f +} diff --git a/test/files/neg/t12441.check b/test/files/neg/t12441.check new file mode 100644 index 000000000000..1636980c4879 --- /dev/null +++ b/test/files/neg/t12441.check @@ -0,0 +1,24 @@ +t12441.scala:6: warning: a type was inferred to be `Any`; this may indicate a programming error. + def f: Any = Option.empty[String].contains(1234) + ^ +t12441.scala:7: warning: a type was inferred to be `Any`; this may indicate a programming error. + def g = Option.empty[String].contains(1234) + ^ +t12441.scala:8: warning: a type was inferred to be `Any`; this may indicate a programming error. + def h() = k(Option.empty[String].contains(1234)) + ^ +t12441.scala:10: warning: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want + signature: SetOps.apply(elem: A): Boolean + given arguments: + after adaptation: SetOps((): Unit) + def s0 = List(1, 2, 3).toSet() // adapt mistaken arg () to apply and infer AnyVal + ^ +t12441.scala:10: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. + def s0 = List(1, 2, 3).toSet() // adapt mistaken arg () to apply and infer AnyVal + ^ +t12441.scala:14: warning: a type was inferred to be `Any`; this may indicate a programming error. + def peskier = p == (42, List(17, "")) + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t12441.scala b/test/files/neg/t12441.scala new file mode 100644 index 000000000000..900e748d84c6 --- /dev/null +++ b/test/files/neg/t12441.scala @@ -0,0 +1,15 @@ + +//> using options -Werror -Xlint + +trait T { + def k(u: => Unit): Unit = u + def f: Any = Option.empty[String].contains(1234) + def g = Option.empty[String].contains(1234) + def h() = k(Option.empty[String].contains(1234)) + + def s0 = List(1, 2, 3).toSet() // adapt mistaken arg () to apply and infer AnyVal + + val p = (42, 27) + def pesky = p == (42, 17) + def peskier = p == (42, List(17, "")) +} diff --git a/test/files/neg/t12478.check b/test/files/neg/t12478.check index f050cc5c6ad8..425079d4b825 100644 --- a/test/files/neg/t12478.check +++ b/test/files/neg/t12478.check @@ -1,31 +1,31 @@ -t12478.scala:3: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:3: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead accessLevel != "user‮ ⁦// Check if admin⁩ ⁦" ^ -t12478.scala:3: error: found unicode bidirectional character '\u2066'; use a unicode escape instead +t12478.scala:3: error: found unicode bidirectional character '\u2066'; in a string or character literal, use a unicode escape instead accessLevel != "user‮ ⁦// Check if admin⁩ ⁦" ^ -t12478.scala:3: error: found unicode bidirectional character '\u2069'; use a unicode escape instead +t12478.scala:3: error: found unicode bidirectional character '\u2069'; in a string or character literal, use a unicode escape instead accessLevel != "user‮ ⁦// Check if admin⁩ ⁦" ^ -t12478.scala:3: error: found unicode bidirectional character '\u2066'; use a unicode escape instead +t12478.scala:3: error: found unicode bidirectional character '\u2066'; in a string or character literal, use a unicode escape instead accessLevel != "user‮ ⁦// Check if admin⁩ ⁦" ^ -t12478.scala:7: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:7: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead cl‮ass C ^ -t12478.scala:9: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:9: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead def a‮cb ^ -t12478.scala:11: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:11: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead // comm‮tne ^ -t12478.scala:16: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:16: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead """te‮tx""" ^ -t12478.scala:17: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:17: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead raw"""te‮tx""" ^ -t12478.scala:19: error: found unicode bidirectional character '\u202e'; use a unicode escape instead +t12478.scala:19: error: found unicode bidirectional character '\u202e'; in a string or character literal, use a unicode escape instead val u202e = '‮' ^ -10 errors found +10 errors diff --git a/test/files/neg/t12494.check b/test/files/neg/t12494.check new file mode 100644 index 000000000000..b408a1af431e --- /dev/null +++ b/test/files/neg/t12494.check @@ -0,0 +1,164 @@ +[running phase parser on t12494.scala] +[running phase namer on t12494.scala] +[running phase packageobjects on t12494.scala] +[running phase typer on t12494.scala] +[running phase superaccessors on t12494.scala] +[log superaccessors] [context] ++ t12494.scala / Import(value ) +[log superaccessors] [context] ++ t12494.scala / Import(value ) +[log superaccessors] [context] ++ t12494.scala / Import(value ) +[log superaccessors] [context] ++ t12494.scala / EmptyTree +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / Ident() +[log superaccessors] [context] ++ t12494.scala / Ident() +[log superaccessors] [context] ++ t12494.scala / term X +[log superaccessors] [context] ++ t12494.scala / term X +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term m +[log superaccessors] [context] ++ t12494.scala / term m +[log superaccessors] [context] ++ t12494.scala / type C +[log superaccessors] [context] ++ t12494.scala / type C +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term C +[log superaccessors] [context] ++ t12494.scala / term C +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / type C2 +[log superaccessors] [context] ++ t12494.scala / type C2 +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term test +[log superaccessors] [context] ++ t12494.scala / term test +[log superaccessors] [context] ++ t12494.scala / term Y +[log superaccessors] [context] ++ t12494.scala / term Y +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term n +[log superaccessors] [context] ++ t12494.scala / term n +[log superaccessors] [context] ++ t12494.scala / type C +[log superaccessors] [context] ++ t12494.scala / type C +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / type X +[log superaccessors] [context] ++ t12494.scala / type X +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term x +[log superaccessors] [context] ++ t12494.scala / term x +[log superaccessors] [context] ++ t12494.scala / term X +[log superaccessors] [context] ++ t12494.scala / term X +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term y +[log superaccessors] [context] ++ t12494.scala / term y +[log superaccessors] [context] ++ t12494.scala / term y +[log superaccessors] [context] ++ t12494.scala / term y +[log superaccessors] [context] ++ t12494.scala / term C +[log superaccessors] [context] ++ t12494.scala / term C +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / type C2 +[log superaccessors] [context] ++ t12494.scala / type C2 +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term test +[log superaccessors] [context] ++ t12494.scala / term test +[log superaccessors] In trait Base, renaming g -> Base$$g +[log superaccessors] Expanded 'g' to 'Base$$g' in trait Base +[log superaccessors] In trait Base, renaming h -> Base$$h +[log superaccessors] Expanded 'h' to 'Base$$h' in trait Base +[log superaccessors] In trait Base, renaming p -> Base$$p +[log superaccessors] Expanded 'p' to 'Base$$p' in trait Base +[log superaccessors] [context] ++ t12494.scala / type Base +[log superaccessors] [context] ++ t12494.scala / type Base +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term g +[log superaccessors] [context] ++ t12494.scala / term g +[log superaccessors] [context] ++ t12494.scala / term h +[log superaccessors] [context] ++ t12494.scala / term h +[log superaccessors] [context] ++ t12494.scala / term p +[log superaccessors] [context] ++ t12494.scala / term p +[log superaccessors] [context] ++ t12494.scala / term Base +[log superaccessors] [context] ++ t12494.scala / term Base +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / type Child +[log superaccessors] [context] ++ t12494.scala / type Child +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / Template(value ) +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term f +[log superaccessors] [context] ++ t12494.scala / term g +[log superaccessors] [context] ++ t12494.scala / term g +[log superaccessors] [context] ++ t12494.scala / term h +[log superaccessors] [context] ++ t12494.scala / term h +[log superaccessors] [context] ++ t12494.scala / term p +[log superaccessors] [context] ++ t12494.scala / term p +[running phase extmethods on t12494.scala] +[running phase pickler on t12494.scala] +[running phase refchecks on t12494.scala] +t12494.scala:9: error: weaker access privileges in overriding + protected[trait C] def f: scala.this.Int (defined in trait C) + override should at least be protected[C]; + found : scala.this.Int + required: scala.this.Int + protected[C] def f: Int = 42 // no, limitation + ^ +t12494.scala:28: error: weaker access privileges in overriding + protected[trait C] def f: scala.this.Int (defined in trait C) + override should at least be protected[C]; + found : scala.this.Int + required: scala.this.Int + protected[C] def f: Int = 42 // no + ^ +t12494.scala:47: error: class Child needs to be abstract. +Missing implementations for 3 members of trait Base. + private[trait Base] def g: scala.this.Int = ??? + private[trait Base] def h: scala.this.Int = ??? + private[trait Base] def p: scala.this.Int = ??? + + class Child extends Base { + ^ +t12494.scala:50: error: method g overrides nothing + override private[Base] def g: Int = 42 // ok, companion + ^ +t12494.scala:51: error: method h overrides nothing + override protected[Base] def h: Int = 42 // ok, private[C] widens to protected[C] + ^ +t12494.scala:52: error: method p overrides nothing + override protected def p: Int = 42 // error, protected only overrides protected + ^ +6 errors diff --git a/test/files/neg/t12494.scala b/test/files/neg/t12494.scala new file mode 100644 index 000000000000..a54f436aba99 --- /dev/null +++ b/test/files/neg/t12494.scala @@ -0,0 +1,54 @@ +//> using options -Ylog:superaccessors -Ydebug +object X { + def m: Int = { + trait C { + protected[C] def f: Int + } + object C { + class C2 extends C { + protected[C] def f: Int = 42 // no, limitation + def test = f + } + } + new C.C2().test + } +} +object Y { + def n: Int = { + trait C { + protected[C] def f: Int + } + class X { private def x = 17 } + locally { + object X { + val y = 27 + } + object C { + class C2 extends C { + protected[C] def f: Int = 42 // no + def test = f + X.y + } + } + new C.C2().test + } + } +} + +// other combinations +// mangling qualified privates says: +// Expanded 'g' to 'Base$$g' in trait Base +trait Base { + protected[Base] def f: Int + private[Base] def g: Int + private[Base] def h: Int + private[Base] def p: Int +} +object Base { + class Child extends Base { + override protected[Base] def f: Int = 42 // ok, companion + // was: overrides nothing (because of name mangling) + override private[Base] def g: Int = 42 // ok, companion + override protected[Base] def h: Int = 42 // ok, private[C] widens to protected[C] + override protected def p: Int = 42 // error, protected only overrides protected + } +} diff --git a/test/files/neg/t12495.check b/test/files/neg/t12495.check new file mode 100644 index 000000000000..bf735c969070 --- /dev/null +++ b/test/files/neg/t12495.check @@ -0,0 +1,12 @@ +t12495.scala:4: warning: adapted the argument list to expected Unit type: arguments will be discarded + signature: Function1.apply(v1: T1): R + given arguments: 42, 27 + after adaptation: Function1((42, 27): Unit) + def g = f(42, 27) + ^ +t12495.scala:4: warning: discarded non-Unit value of type (Int, Int) + def g = f(42, 27) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12495.scala b/test/files/neg/t12495.scala new file mode 100644 index 000000000000..5d24b25a486f --- /dev/null +++ b/test/files/neg/t12495.scala @@ -0,0 +1,5 @@ +//> using options -Werror -Xlint:arg-discard,adapted-args -Wvalue-discard +class C { + val f = (u: Unit) => println(s"[$u]") + def g = f(42, 27) +} diff --git a/test/files/neg/t12499.check b/test/files/neg/t12499.check new file mode 100644 index 000000000000..c49779c18ec4 --- /dev/null +++ b/test/files/neg/t12499.check @@ -0,0 +1,8 @@ +t12499.scala:455: warning: Cannot check match for unreachability. +The analysis required more space than allowed. +Please try with scalac -Ypatmat-exhaust-depth 60 or -Ypatmat-exhaust-depth off. + implicit val converter: Thing[Phantom.TypeA] => String = { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12499.scala b/test/files/neg/t12499.scala new file mode 100644 index 000000000000..be5e60cbe1d8 --- /dev/null +++ b/test/files/neg/t12499.scala @@ -0,0 +1,591 @@ +//> using options -Werror -Ystop-after:patmat -Ypatmat-exhaust-depth 30 +sealed trait Phantom[A] {} + +object Phantom { + type TypeA +} + +sealed abstract class ThingType(val v: Int) + +private object ThingType { + case object A extends ThingType(1) + case object B extends ThingType(-10) + case object C extends ThingType(-5) + case object D extends ThingType(15) + case object E extends ThingType(-16) +} + +sealed abstract class Thing[A](val thingType: ThingType) + +object Thing { + sealed abstract class ThingA[A] extends Thing[A](ThingType.A) + sealed abstract class ThingB[A] extends Thing[A](ThingType.B) + sealed abstract class ThingC[A] extends Thing[A](ThingType.C) + sealed abstract class ThingD[A] extends Thing[A](ThingType.D) + sealed abstract class ThingE[A] extends Thing[A](ThingType.E) +} + +object Stuff extends Phantom[Phantom.TypeA] { + import Phantom.TypeA + import Thing._ + + sealed abstract class OM(val id: String) extends ThingA[TypeA] { + def linkedA: ThingA[TypeA] + } + + case object OM1L extends ThingA[TypeA] + + case object OM1 extends OM("1") { + override def linkedA: ThingA[TypeA] = OM1L + } + + case object OM2L extends ThingA[TypeA] + + case object OM2 extends OM("2") { + override def linkedA: ThingA[TypeA] = OM2L + } + + case object OM3L extends ThingA[TypeA] + + case object OM3 extends OM("3") { + override def linkedA: ThingA[TypeA] = OM3L + } + + case object OM4L extends ThingA[TypeA] + + case object OM4 extends OM("4") { + override def linkedA: ThingA[TypeA] = OM4L + } + + case object OM5L extends ThingA[TypeA] + + case object OM5 extends OM("5") { + override def linkedA: ThingA[TypeA] = OM5L + } + + case object OM6L extends ThingA[TypeA] + + case object OM6 extends OM("6") { + override def linkedA: ThingA[TypeA] = OM6L + } + + case object OM7L extends ThingA[TypeA] + + case object OM7 extends OM("7") { + override def linkedA: ThingA[TypeA] = OM7L + } + + case object A1 extends ThingA[TypeA] + + case object A2 extends ThingA[TypeA] + + case object A3 extends ThingA[TypeA] + + sealed trait AA extends ThingA[TypeA] { + def linkedD: ThingD[TypeA] + } + + case object A4 extends AA { + override val linkedD: ThingD[TypeA] = A4L + } + + case object A5 extends AA { + override val linkedD: ThingD[TypeA] = A5L + } + + case object A6 extends AA { + override val linkedD: ThingD[TypeA] = A6L + } + + case object A7 extends AA { + override val linkedD: ThingD[TypeA] = A7L + } + + case object A8 extends AA { + override val linkedD: ThingD[TypeA] = A8L + } + + case object A9 extends AA { + override val linkedD: ThingD[TypeA] = A9L + } + + case object A10 extends AA { + override val linkedD: ThingD[TypeA] = A10L + } + + case object A11 extends AA { + override val linkedD: ThingD[TypeA] = A11L + } + + case object A12 extends AA { + override val linkedD: ThingD[TypeA] = A12L + } + + case object A13 extends AA { + override val linkedD: ThingD[TypeA] = A13L + } + + case object A14 extends AA { + override val linkedD: ThingD[TypeA] = A14L + } + + case object A15 extends AA { + override val linkedD: ThingD[TypeA] = A15L + } + + sealed abstract class G(val id: String) extends ThingA[TypeA] { + def linkedG: ThingA[TypeA] + } + + case object G1L extends ThingA[TypeA] + + case object G1 extends G("1") { + override def linkedG: ThingA[TypeA] = G1L + } + + case object G2L extends ThingA[TypeA] + + case object G2 extends G("2") { + override def linkedG: ThingA[TypeA] = G2L + } + + case object G3L extends ThingA[TypeA] + + case object G3 extends G("3") { + override def linkedG: ThingA[TypeA] = G3L + } + + case object G4L extends ThingA[TypeA] + + case object G4 extends G("4") { + override def linkedG: ThingA[TypeA] = G4L + } + + case object G5L extends ThingA[TypeA] + + case object G5 extends G("%") { + override def linkedG: ThingA[TypeA] = G5L + } + + case object G6L extends ThingA[TypeA] + + case object G6 extends G("6") { + override def linkedG: ThingA[TypeA] = G6L + } + + case object G7L extends ThingA[TypeA] + + case object G7 extends G("7") { + override def linkedG: ThingA[TypeA] = G7L + } + + case object G8L extends ThingA[TypeA] + + case object G8 extends G("8") { + override def linkedG: ThingA[TypeA] = G8L + } + + case object G9L extends ThingA[TypeA] + + case object G9 extends G("9") { + override def linkedG: ThingA[TypeA] = G9L + } + + case object G10L extends ThingA[TypeA] + + case object G10 extends G("10") { + override def linkedG: ThingA[TypeA] = G10L + } + + case object G11L extends ThingA[TypeA] + + case object G11 extends G("11") { + override def linkedG: ThingA[TypeA] = G11L + } + + sealed abstract class CC(val id: String) extends ThingA[TypeA] { + def c1: ThingA[TypeA] + + def c2: ThingA[TypeA] + + def c3: ThingA[TypeA] + + def c4: ThingD[TypeA] + + def c5: ThingD[TypeA] + } + + case object C1 extends CC("1") { + override def c1: ThingA[TypeA] = C11 + + override def c2: ThingA[TypeA] = C12 + + override def c3: ThingA[TypeA] = C13 + + override def c4: ThingD[TypeA] = C14 + + override def c5: ThingD[TypeA] = C15 + } + + case object C11 extends ThingA[TypeA] + + case object C12 extends ThingA[TypeA] + + case object C13 extends ThingA[TypeA] + + case object C2 extends CC("2") { + override def c1: ThingA[TypeA] = C21 + + override def c2: ThingA[TypeA] = C22 + + override def c3: ThingA[TypeA] = C23 + + override def c4: ThingD[TypeA] = C24 + + override def c5: ThingD[TypeA] = C25 + } + + case object C21 extends ThingA[TypeA] + + case object C22 extends ThingA[TypeA] + + case object C23 extends ThingA[TypeA] + + case object SN extends ThingC[TypeA] + + case object CLC extends ThingE[TypeA] + + case object SW extends ThingE[TypeA] + + case object A4L extends ThingD[TypeA] + + case object A5L extends ThingD[TypeA] + + case object A6L extends ThingD[TypeA] + + case object A7L extends ThingD[TypeA] + + case object A8L extends ThingD[TypeA] + + case object A9L extends ThingD[TypeA] + + case object A10L extends ThingD[TypeA] + + case object A11L extends ThingD[TypeA] + + case object A12L extends ThingD[TypeA] + + case object A13L extends ThingD[TypeA] + + case object A14L extends ThingD[TypeA] + + case object A15L extends ThingD[TypeA] + + case object ABC1 extends ThingD[TypeA] + + case object ABC2 extends ThingD[TypeA] + + case object ABC3 extends ThingD[TypeA] + + case object ABC4 extends ThingD[TypeA] + + case object ABC5 extends ThingD[TypeA] + + case object ABC6 extends ThingD[TypeA] + + case object ABC7 extends ThingD[TypeA] + + case object ABC8 extends ThingD[TypeA] + + case object ABC9 extends ThingD[TypeA] + + case object ABC10 extends ThingD[TypeA] + + case object C14 extends ThingD[TypeA] + + case object C15 extends ThingD[TypeA] + + case object C24 extends ThingD[TypeA] + + case object C25 extends ThingD[TypeA] + + case object ASD1 extends ThingD[TypeA] + + case object ASD2 extends ThingD[TypeA] + + case object ASD3 extends ThingD[TypeA] + + case object ASD4 extends ThingD[TypeA] + + case object ASD5 extends ThingE[TypeA] + + case object ASD6 extends ThingE[TypeA] + + sealed trait IR extends ThingE[TypeA] { + def linkedIR1: ThingD[TypeA] + def linkedIR2: ThingD[TypeA] + } + + case object IR11 extends ThingD[TypeA] + + case object IR12 extends ThingD[TypeA] + + case object IR1 extends IR { + override def linkedIR1: ThingD[TypeA] = IR11 + + override def linkedIR2: ThingD[TypeA] = IR12 + } + + case object IR21 extends ThingD[TypeA] + + case object IR22 extends ThingD[TypeA] + + case object IR2 extends IR { + override def linkedIR1: ThingD[TypeA] = IR21 + override def linkedIR2: ThingD[TypeA] = IR22 + } + + case object QW1 extends ThingE[TypeA] + + case object QW2 extends ThingE[TypeA] + + case object QW3 extends ThingE[TypeA] + + case object QW4 extends ThingE[TypeA] + + case object QW5 extends ThingE[TypeA] + + case object QW6 extends ThingE[TypeA] + + sealed abstract class IE(val id: String) extends ThingA[TypeA] { + def linkedIE1: ThingE[TypeA] + def linkedIE2: ThingE[TypeA] + def linkedIE3: Thing[TypeA] + def linkedIE4: Thing[TypeA] + } + + case object IE1 extends IE("1") { + override val linkedIE1: ThingE[TypeA] = IE11 + override val linkedIE2: ThingE[TypeA] = IE12 + override val linkedIE3: ThingD[TypeA] = ABC3 + override val linkedIE4: ThingD[TypeA] = ABC4 + } + + case object IE11 extends ThingE[TypeA] + + case object IE12 extends ThingE[TypeA] + + case object IE2 extends IE("2") { + override val linkedIE1: ThingE[TypeA] = IE21 + override val linkedIE2: ThingE[TypeA] = IE22 + override val linkedIE3: ThingE[TypeA] = IE23 + override val linkedIE4: ThingE[TypeA] = IE24 + } + + case object IE21 extends ThingE[TypeA] + + case object IE22 extends ThingE[TypeA] + + case object IE23 extends ThingE[TypeA] + + case object IE24 extends ThingE[TypeA] + + sealed abstract class LA extends ThingC[TypeA] + + case object LA1 extends LA + + case object LA2 extends LA + + case object LA3 extends LA + + case object LA4 extends LA + + case object LA5 extends ThingC[TypeA] + + sealed abstract class MAD(val id: String) extends ThingC[TypeA] { + def otherId: String + def linkedMAD1: ThingC[TypeA] + def linkedMAD2: ThingC[TypeA] + def linkedMAD3: ThingD[TypeA] + def linkedMAD4: ThingC[TypeA] + def linkedMAD5: ThingC[TypeA] + def linkedMAD6: ThingC[TypeA] + } + + case object MAD11 extends ThingC[TypeA] + + case object MAD12 extends ThingC[TypeA] + + case object MAD13 extends ThingD[TypeA] + + case object MAD14 extends ThingC[TypeA] + + case object MAD15 extends ThingC[TypeA] + + case object MAD16 extends ThingC[TypeA] + + case object MAD1 extends MAD("1") { + override def otherId: String = "c1" + override def linkedMAD1: ThingC[TypeA] = MAD11 + override def linkedMAD2: ThingC[TypeA] = MAD12 + override def linkedMAD3: ThingD[TypeA] = MAD13 + override def linkedMAD4: ThingC[TypeA] = MAD14 + override def linkedMAD5: ThingC[TypeA] = MAD15 + override def linkedMAD6: ThingC[TypeA] = MAD16 + } + + case object MAD21 extends ThingC[TypeA] + case object MAD22 extends ThingC[TypeA] + case object MAD23 extends ThingD[TypeA] + case object MAD24 extends ThingC[TypeA] + case object MAD25 extends ThingC[TypeA] + case object MAD26 extends ThingC[TypeA] + case object MAD2 extends MAD("2") { + override def otherId: String = "c2" + override def linkedMAD1: ThingC[TypeA] = MAD21 + override def linkedMAD2: ThingC[TypeA] = MAD22 + override def linkedMAD3: ThingD[TypeA] = MAD23 + override def linkedMAD4: ThingC[TypeA] = MAD24 + override def linkedMAD5: ThingC[TypeA] = MAD25 + override def linkedMAD6: ThingC[TypeA] = MAD26 + } +} + +object Matcher { + implicit val converter: Thing[Phantom.TypeA] => String = { + case Stuff.OM1 => "OM1" + case Stuff.OM1L => "OM1L" + case Stuff.OM2 => "OM2" + case Stuff.OM2L => "OM2L" + case Stuff.OM3 => "OM3" + case Stuff.OM3L => "OM3L" + case Stuff.OM4 => "OM4" + case Stuff.OM4L => "OM4L" + case Stuff.OM5 => "OM5" + case Stuff.OM5L => "OM5L" + case Stuff.OM6 => "OM6" + case Stuff.OM6L => "OM6L" + case Stuff.OM7 => "OM7" + case Stuff.OM7L => "OM7L" + case Stuff.A4 => "A4" + case Stuff.A5 => "A5" + case Stuff.A6 => "A6" + case Stuff.A7 => "A7" + case Stuff.A8 => "A8" + case Stuff.A9 => "A9" + case Stuff.A10 => "A10" + case Stuff.A11 => "A11" + case Stuff.A12 => "A12" + case Stuff.A13 => "A13" + case Stuff.A14 => "A14" + case Stuff.A15 => "A15" + case Stuff.A4L => "A4L" + case Stuff.A5L => "A5L" + case Stuff.A6L => "A6L" + case Stuff.A7L => "A7L" + case Stuff.A8L => "A8L" + case Stuff.A9L => "A9L" + case Stuff.A10L => "A10L" + case Stuff.A11L => "A11L" + case Stuff.A12L => "A12L" + case Stuff.A13L => "A13L" + case Stuff.A14L => "A14L" + case Stuff.A15L => "A15L" + case Stuff.ABC1 => "ABC1" + case Stuff.ABC2 => "ABC2" + case Stuff.ABC3 => "ABC3" + case Stuff.ABC4 => "ABC4" + case Stuff.QW1 => "QW1" + case Stuff.QW2 => "QW2" + case Stuff.IR1 => "IR1" + case Stuff.QW3 => "QW3" + case Stuff.QW4 => "QW4" + case Stuff.QW5 => "QW5" + case Stuff.QW6 => "QW6" + case Stuff.IE1 => "IE1" + case Stuff.IE11 => "IE11" + case Stuff.IE12 => "IE12" + case Stuff.IE2 => "IE2" + case Stuff.IE21 => "IE21" + case Stuff.IE22 => "IE22" + case Stuff.IE23 => "IE23" + case Stuff.IE24 => "IE24" + case Stuff.LA1 => "LA1" + case Stuff.LA2 => "LA2" + case Stuff.LA3 => "LA3" + case Stuff.LA5 => "LA5" + case Stuff.A3 => "A3" + case Stuff.ASD1 => "ASD1" + case Stuff.ASD5 => "ASD5" + case Stuff.ASD6 => "ASD6" + case Stuff.IR11 => "IR11" + case Stuff.IR12 => "IR12" + case Stuff.ASD2 => "ASD2" + case Stuff.ASD3 => "ASD3" + case Stuff.A1 => "A1" + case Stuff.A2 => "A2" + case Stuff.G1 => "G1" + case Stuff.G2 => "G2" + case Stuff.G3 => "G3" + case Stuff.G4 => "G4" + case Stuff.G5 => "G5" + case Stuff.G6 => "G6" + case Stuff.G1L => "G1L" + case Stuff.G2L => "G2L" + case Stuff.G3L => "G3L" + case Stuff.G4L => "G4L" + case Stuff.G5L => "G5L" + case Stuff.G6L => "G6L" + case Stuff.ABC5 => "ABC5" + case Stuff.ABC6 => "ABC6" + case Stuff.ABC7 => "ABC7" + case Stuff.ABC8 => "ABC8" + case Stuff.ABC9 => "ABC9" + case Stuff.ABC10 => "ABC10" + case Stuff.ASD4 => "ASD4" + case Stuff.SW => "SW" + case Stuff.C1 => "C1" + case Stuff.C11 => "C11" + case Stuff.IR2 => "IR2" + case Stuff.IR21 => "IR21" + case Stuff.IR22 => "IR22" + case Stuff.MAD14 => "MAD14" + case Stuff.MAD15 => "MAD15" + case Stuff.MAD11 => "MAD11" + case Stuff.MAD1 => "MAD1" + case Stuff.SN => "SN" + case Stuff.C12 => "C12" + case Stuff.C13 => "C13" + case Stuff.MAD12 => "MAD12" + case Stuff.C14 => "C14" + case Stuff.C15 => "C15" + case Stuff.G7 => "G7" + case Stuff.G7L => "G7L" + case Stuff.G8 => "G8" + case Stuff.G8L => "G8L" + case Stuff.C2 => "C2" + case Stuff.C21 => "C21" + case Stuff.C22 => "C22" + case Stuff.C23 => "C23" + case Stuff.C24 => "C24" + case Stuff.C25 => "C25" + case Stuff.MAD21 => "MAD21" + case Stuff.MAD22 => "MAD22" + case Stuff.MAD24 => "MAD24" + case Stuff.MAD25 => "MAD25" + case Stuff.MAD2 => "MAD2" + case Stuff.CLC => "CLC" + case Stuff.MAD13 => "MAD13" + case Stuff.MAD16 => "MAD16" + case Stuff.MAD23 => "MAD23" + case Stuff.MAD26 => "MAD26" + case Stuff.G9 => "G9" + case Stuff.G9L => "G9L" + case Stuff.LA4 => "LA4" + case Stuff.G10 => "G10" + case Stuff.G10L => "G10L" + case Stuff.G11 => "G11" + case Stuff.G11L => "G11L" + case _ => "unknown" + } +} diff --git a/test/files/neg/t12513.check b/test/files/neg/t12513.check new file mode 100644 index 000000000000..e156ba7fc383 --- /dev/null +++ b/test/files/neg/t12513.check @@ -0,0 +1,4 @@ +predefer_2.scala:10: error: not found: value x + println((x,y)) + ^ +1 error diff --git a/test/files/neg/t12513/predefer_2.scala b/test/files/neg/t12513/predefer_2.scala new file mode 100644 index 000000000000..1872ff9d82fe --- /dev/null +++ b/test/files/neg/t12513/predefer_2.scala @@ -0,0 +1,12 @@ +//> using options -Yimports:p.MyPredef,scala.Predef,scala + +package p { + object Test extends App { + println((x,y)) + } +} +package q { + object Test extends App { + println((x,y)) + } +} diff --git a/test/files/neg/t12513/predefined_1.scala b/test/files/neg/t12513/predefined_1.scala new file mode 100644 index 000000000000..eefdfa7011eb --- /dev/null +++ b/test/files/neg/t12513/predefined_1.scala @@ -0,0 +1,7 @@ + +package p + +object MyPredef { + private [p] def x = 27 + def y = 42 +} diff --git a/test/files/neg/t12513b.check b/test/files/neg/t12513b.check new file mode 100644 index 000000000000..6204fe160453 --- /dev/null +++ b/test/files/neg/t12513b.check @@ -0,0 +1,4 @@ +t12513b.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call not in tail position + @T def f: Int = { f ; 42 } // the annotation worked: error, f is not tail recursive + ^ +1 error diff --git a/test/files/neg/t12513b.scala b/test/files/neg/t12513b.scala new file mode 100644 index 000000000000..c4dc17f02a7f --- /dev/null +++ b/test/files/neg/t12513b.scala @@ -0,0 +1,9 @@ + +//> using options -Werror -Xsource:3 + +object X { type T = annotation.tailrec } +object Y { type T = annotation.tailrec } +object Z { + import X.*, Y.* // OK, both T mean tailrec + @T def f: Int = { f ; 42 } // the annotation worked: error, f is not tail recursive +} diff --git a/test/files/neg/t12514.check b/test/files/neg/t12514.check new file mode 100644 index 000000000000..d9f29cd19f5e --- /dev/null +++ b/test/files/neg/t12514.check @@ -0,0 +1,4 @@ +t12514.scala:10: error: t.type forSome { val t: Test.T } is not a legal prefix for a constructor + val y = new (t.type forSome { val t: T })#Y {} + ^ +1 error diff --git a/test/files/neg/t12514.scala b/test/files/neg/t12514.scala new file mode 100644 index 000000000000..e0c47ccb7f55 --- /dev/null +++ b/test/files/neg/t12514.scala @@ -0,0 +1,11 @@ +import scala.language.existentials + +object Test { + trait T { + trait Y { + def x = 0 + } + } + + val y = new (t.type forSome { val t: T })#Y {} +} \ No newline at end of file diff --git a/test/files/neg/t12523.check b/test/files/neg/t12523.check new file mode 100644 index 000000000000..776fce186844 --- /dev/null +++ b/test/files/neg/t12523.check @@ -0,0 +1,4 @@ +Test.scala:1: error: Unable to implement a super accessor, A needs to be directly extended by class C. +class C extends B { + ^ +1 error diff --git a/test/files/neg/t12523/A.java b/test/files/neg/t12523/A.java new file mode 100644 index 000000000000..6148098e0398 --- /dev/null +++ b/test/files/neg/t12523/A.java @@ -0,0 +1,5 @@ +public interface A { + default int foo() { + return 41; + } +} diff --git a/test/files/neg/t12523/B.java b/test/files/neg/t12523/B.java new file mode 100644 index 000000000000..6e0fbf70e928 --- /dev/null +++ b/test/files/neg/t12523/B.java @@ -0,0 +1,3 @@ +public interface B extends A { + int bar(); +} diff --git a/test/files/neg/t12523/Test.scala b/test/files/neg/t12523/Test.scala new file mode 100644 index 000000000000..46fb2f59f597 --- /dev/null +++ b/test/files/neg/t12523/Test.scala @@ -0,0 +1,15 @@ +class C extends B { + override def bar(): Int = 1 + + override def foo(): Int = { + val f: () => Int = super.foo + f() + } +} + +object Test { + def main(args: Array[String]): Unit = { + var c = new C() + assert(c.foo() + c.bar() == 42) + } +} diff --git a/test/files/neg/t12529.check b/test/files/neg/t12529.check new file mode 100644 index 000000000000..a970c3d75e86 --- /dev/null +++ b/test/files/neg/t12529.check @@ -0,0 +1,6 @@ +t12529.scala:21: error: `abstract override` modifiers required to override: +abstract override def m(a: Int): Int (defined in trait C) + with override def m(a: Int): Int (defined in trait D) +class X extends E with D + ^ +1 error \ No newline at end of file diff --git a/test/files/neg/t12529.scala b/test/files/neg/t12529.scala new file mode 100644 index 000000000000..16a07237b6bc --- /dev/null +++ b/test/files/neg/t12529.scala @@ -0,0 +1,21 @@ +trait A { + def m(a:Int): Int +} + +trait B extends A { + override def m(a:Int): Int = { return a; } +} + +trait C extends A { + abstract override def m(a:Int):Int = { return super.m(a); } +} + +trait D extends B with C { + override def m(a:Int):Int = { return super.m(a); } +} + +trait E extends C with B { + abstract override def m(a:Int):Int = { return super.m(a); } +} + +class X extends E with D \ No newline at end of file diff --git a/test/files/neg/t12531.check b/test/files/neg/t12531.check new file mode 100644 index 000000000000..b3e40d4b34d7 --- /dev/null +++ b/test/files/neg/t12531.check @@ -0,0 +1,6 @@ +t12531.scala:3: error: type mismatch; + found : String("") + required: Int + def format(i: Int): String = s"${format("")}" + ^ +1 error diff --git a/test/files/neg/t12531.scala b/test/files/neg/t12531.scala new file mode 100644 index 000000000000..79bf4b8a3d09 --- /dev/null +++ b/test/files/neg/t12531.scala @@ -0,0 +1,4 @@ + +class C { + def format(i: Int): String = s"${format("")}" +} diff --git a/test/files/neg/t12543.check b/test/files/neg/t12543.check index 87f67189ea6b..585a791c4886 100644 --- a/test/files/neg/t12543.check +++ b/test/files/neg/t12543.check @@ -1,4 +1,4 @@ -t12543.scala:9: error: value of is not a member of object java.util.List +t12543.scala:10: error: value of is not a member of object java.util.List val ss = java.util.List.of("Hello", who) ^ -one error found +1 error diff --git a/test/files/neg/t12543.scala b/test/files/neg/t12543.scala index 322ad6769af5..6d4cd1a540fa 100644 --- a/test/files/neg/t12543.scala +++ b/test/files/neg/t12543.scala @@ -1,7 +1,8 @@ -// scalac: -Werror -release:8 +//> using options -Werror -release 8 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ +//import jdk.CollectionConverters._ // scala/bug/issues/12566 import sun._ object HelloWorld { diff --git a/test/files/neg/t12565.check b/test/files/neg/t12565.check index bde4949b4154..b67443b9b545 100644 --- a/test/files/neg/t12565.check +++ b/test/files/neg/t12565.check @@ -1,4 +1,4 @@ t12565.scala:5: error: java.time.Instant does not have a constructor def f = new java.time.Instant ^ -one error found +1 error diff --git a/test/files/neg/t12565.scala b/test/files/neg/t12565.scala index c723835b3065..f9d9b0b76231 100644 --- a/test/files/neg/t12565.scala +++ b/test/files/neg/t12565.scala @@ -1,5 +1,5 @@ -// scalac: -Xfatal-warnings -release:8 -// javaVersion: 9+ +//> using options -Werror --release 8 +//> using jvm 9+ class C { def f = new java.time.Instant diff --git a/test/files/neg/t12590.check b/test/files/neg/t12590.check new file mode 100644 index 000000000000..c52ae56a1b44 --- /dev/null +++ b/test/files/neg/t12590.check @@ -0,0 +1,6 @@ +t12590.scala:4: warning: local val a in method unusedLocal is never used + val a = 27 + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12590.scala b/test/files/neg/t12590.scala new file mode 100644 index 000000000000..2339f3411412 --- /dev/null +++ b/test/files/neg/t12590.scala @@ -0,0 +1,7 @@ +//> using options -Werror -Wunused:locals +class C { + def unusedLocal = { + val a = 27 + 42 + } +} diff --git a/test/files/neg/t12591.check b/test/files/neg/t12591.check new file mode 100644 index 000000000000..04c916ac1324 --- /dev/null +++ b/test/files/neg/t12591.check @@ -0,0 +1,6 @@ +t12591.scala:5: warning: evidence parameter evidence$1 of type Context[A] in method g is never used + def g[A: Context] = f + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12591.scala b/test/files/neg/t12591.scala new file mode 100644 index 000000000000..c66259aa9393 --- /dev/null +++ b/test/files/neg/t12591.scala @@ -0,0 +1,7 @@ +//> using options -Werror -Wunused:synthetics +trait Context[A] { def m(a: A): A = a } // Context has a member, so warn if unused + +object Example { + def g[A: Context] = f + def f = 42 +} diff --git a/test/files/neg/t12647.check b/test/files/neg/t12647.check new file mode 100644 index 000000000000..7b0f04dc8b10 --- /dev/null +++ b/test/files/neg/t12647.check @@ -0,0 +1,4 @@ +Test_3.scala:6: error: value value is not a member of Result + println(resolver.resolve.value) + ^ +1 error diff --git a/test/files/neg/t12647/Macro_1.scala b/test/files/neg/t12647/Macro_1.scala new file mode 100644 index 000000000000..da6431cd2ae2 --- /dev/null +++ b/test/files/neg/t12647/Macro_1.scala @@ -0,0 +1,13 @@ + +//> using options -Xsource:3 + +import scala.reflect.macros.blackbox.Context + +trait Result + +object Macros { + def impl(c: Context) = { + import c.universe._ + q"""new Result { def value = "Was this the answer you sought?" }""" + } +} diff --git a/test/files/neg/t12647/Resolve_2.scala b/test/files/neg/t12647/Resolve_2.scala new file mode 100644 index 000000000000..97cc4c354007 --- /dev/null +++ b/test/files/neg/t12647/Resolve_2.scala @@ -0,0 +1,13 @@ + +//> using options -Xsource:3 + +import language.experimental.macros + +trait Resolver { + def resolve: Result = ??? +} + +class ValueResolver extends Resolver { + override def resolve = valueResult + def valueResult: Result = macro Macros.impl +} diff --git a/test/files/neg/t12647/Test_3.scala b/test/files/neg/t12647/Test_3.scala new file mode 100644 index 000000000000..e2fc19f46853 --- /dev/null +++ b/test/files/neg/t12647/Test_3.scala @@ -0,0 +1,7 @@ + +//> using options -Xsource:3 + +object Test extends App { + val resolver = new ValueResolver + println(resolver.resolve.value) +} diff --git a/test/files/neg/t12648.check b/test/files/neg/t12648.check new file mode 100644 index 000000000000..c75720faa043 --- /dev/null +++ b/test/files/neg/t12648.check @@ -0,0 +1,15 @@ +LogConfig_2.scala:7: warning: value MESSAGE_FORMAT_VERSION_CONFIG in class TopicConfig_1 is deprecated + val MessageFormatVersionPropX = TopicConfig_1.MESSAGE_FORMAT_VERSION_CONFIG + ^ +LogConfig_2.scala:10: warning: Specify both message and version: @deprecated("message", since = "MyLib 1.0") + @deprecated("3.0") + ^ +LogConfig_2.scala:12: warning: Specify both message and version: @deprecated("message", since = "MyLib 1.0") + @deprecated("3.0") @nowarn("cat=deprecation") + ^ +LogConfig_2.scala:12: warning: @nowarn annotation does not suppress any warnings + @deprecated("3.0") @nowarn("cat=deprecation") + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t12648/LogConfig_2.scala b/test/files/neg/t12648/LogConfig_2.scala new file mode 100644 index 000000000000..c8ff61d95df8 --- /dev/null +++ b/test/files/neg/t12648/LogConfig_2.scala @@ -0,0 +1,14 @@ + +//> using options -Werror -Xlint -Xsource:3 + +import annotation.* + +class LogConfig { + val MessageFormatVersionPropX = TopicConfig_1.MESSAGE_FORMAT_VERSION_CONFIG + @nowarn("cat=deprecation") + val MessageFormatVersionPropY = TopicConfig_1.MESSAGE_FORMAT_VERSION_CONFIG + @deprecated("3.0") + val MessageFormatVersionPropZ = TopicConfig_1.MESSAGE_FORMAT_VERSION_CONFIG + @deprecated("3.0") @nowarn("cat=deprecation") + val MessageFormatVersionProp = TopicConfig_1.MESSAGE_FORMAT_VERSION_CONFIG +} diff --git a/test/files/neg/t12648/TopicConfig_1.java b/test/files/neg/t12648/TopicConfig_1.java new file mode 100644 index 000000000000..9dc66e19eba8 --- /dev/null +++ b/test/files/neg/t12648/TopicConfig_1.java @@ -0,0 +1,4 @@ + +public class TopicConfig_1 { + @Deprecated public static final String MESSAGE_FORMAT_VERSION_CONFIG = "message.format.version"; +} diff --git a/test/files/neg/t12664.check b/test/files/neg/t12664.check new file mode 100644 index 000000000000..e3315946e96c --- /dev/null +++ b/test/files/neg/t12664.check @@ -0,0 +1,11 @@ +t12664.scala:4: warning: side-effecting nullary methods are discouraged: suggest defining as `def f()` instead [quickfixable] + def f: Unit = 42 + ^ +t12664.scala:4: warning: a pure expression does nothing in statement position + def f: Unit = 42 + ^ +warning: 1 deprecation (since 1.0); re-run enabling -deprecation for details, or try -help +warning: 1 lint warning; change -Wconf for cat=lint to display individual messages +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t12664.scala b/test/files/neg/t12664.scala new file mode 100644 index 000000000000..8b965d64ea44 --- /dev/null +++ b/test/files/neg/t12664.scala @@ -0,0 +1,12 @@ +//> using options -Wconf:cat=lint-missing-interpolator:ws,cat=deprecation:ws -Werror -Xlint + +class C { + def f: Unit = 42 + + def oops = "$f" + + @deprecated("old stuff", since="1.0") + def old = 17 + + def stale = old +} diff --git a/test/files/neg/t12690.check b/test/files/neg/t12690.check new file mode 100644 index 000000000000..2bacf96a9720 --- /dev/null +++ b/test/files/neg/t12690.check @@ -0,0 +1,6 @@ +t12690.scala:10: warning: Unused import + import A._ // missing unused warning (order of imports is significant) + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12690.scala b/test/files/neg/t12690.scala new file mode 100644 index 000000000000..6971693c44a9 --- /dev/null +++ b/test/files/neg/t12690.scala @@ -0,0 +1,12 @@ + +//> using options -Werror -Wunused:imports + +class X +class Y extends X +object A { implicit val x: X = new X } +object B { implicit val y: Y = new Y } +class C { + import B._ + import A._ // missing unused warning (order of imports is significant) + def t = implicitly[X] +} diff --git a/test/files/neg/t12690b.check b/test/files/neg/t12690b.check new file mode 100644 index 000000000000..38f30fa05819 --- /dev/null +++ b/test/files/neg/t12690b.check @@ -0,0 +1,14 @@ +t12690b.scala:15: error: reference to v is ambiguous; +it is imported twice in the same scope by +import Y.v +and import X.v + v + ^ +t12690b.scala:11: warning: Unused import + import X.v + ^ +t12690b.scala:12: warning: Unused import + import Y.v + ^ +2 warnings +1 error diff --git a/test/files/neg/t12690b.scala b/test/files/neg/t12690b.scala new file mode 100644 index 000000000000..95cace46d190 --- /dev/null +++ b/test/files/neg/t12690b.scala @@ -0,0 +1,17 @@ + +//> using options -Wunused:imports -Werror + +object X { + val v = 27 +} +object Y { + val v = 42 +} +object Main { + import X.v + import Y.v + def main(args: Array[String]) = println { + //"hello, world" + v + } +} diff --git a/test/files/neg/t12691.check b/test/files/neg/t12691.check new file mode 100644 index 000000000000..5367c6238725 --- /dev/null +++ b/test/files/neg/t12691.check @@ -0,0 +1,110 @@ +t12691.scala:4: error: class Example needs to be abstract. +Missing implementations for 101 members. + // Members declared in Example + val f1: Int = ??? + val f10: Int = ??? + val f100: Int = ??? + val f11: Int = ??? + val f12: Int = ??? + val f13: Int = ??? + val f14: Int = ??? + val f15: Int = ??? + val f16: Int = ??? + val f17: Int = ??? + val f18: Int = ??? + val f19: Int = ??? + val f2: Int = ??? + val f20: Int = ??? + val f21: Int = ??? + val f22: Int = ??? + val f23: Int = ??? + val f24: Int = ??? + val f25: Int = ??? + val f26: Int = ??? + val f27: Int = ??? + val f28: Int = ??? + val f29: Int = ??? + val f3: Int = ??? + val f30: Int = ??? + val f31: Int = ??? + val f32: Int = ??? + val f33: Int = ??? + val f34: Int = ??? + val f35: Int = ??? + val f36: Int = ??? + val f37: Int = ??? + val f38: Int = ??? + val f39: Int = ??? + val f4: Int = ??? + val f40: Int = ??? + val f41: Int = ??? + val f42: Int = ??? + val f43: Int = ??? + val f44: Int = ??? + val f45: Int = ??? + val f46: Int = ??? + val f47: Int = ??? + val f48: Int = ??? + val f49: Int = ??? + val f5: Int = ??? + val f50: Int = ??? + val f51: Int = ??? + val f52: Int = ??? + val f53: Int = ??? + val f54: Int = ??? + val f55: Int = ??? + val f56: Int = ??? + val f57: Int = ??? + val f58: Int = ??? + val f59: Int = ??? + val f6: Int = ??? + val f60: Int = ??? + val f61: Int = ??? + val f62: Int = ??? + val f63: Int = ??? + val f64: Int = ??? + val f65: Int = ??? + val f66: Int = ??? + val f67: Int = ??? + val f68: Int = ??? + val f69: Int = ??? + val f7: Int = ??? + val f70: Int = ??? + val f71: Int = ??? + val f72: Int = ??? + val f73: Int = ??? + val f74: Int = ??? + val f75: Int = ??? + val f76: Int = ??? + val f77: Int = ??? + val f78: Int = ??? + val f79: Int = ??? + val f8: Int = ??? + val f80: Int = ??? + val f81: Int = ??? + val f82: Int = ??? + val f83: Int = ??? + val f84: Int = ??? + val f85: Int = ??? + val f86: Int = ??? + val f87: Int = ??? + val f88: Int = ??? + val f89: Int = ??? + val f9: Int = ??? + val f90: Int = ??? + val f91: Int = ??? + val f92: Int = ??? + val f93: Int = ??? + val f94: Int = ??? + val f95: Int = ??? + val f96: Int = ??? + val f97: Int = ??? + val f98: Int = ??? + val f99: Int = ??? + + // Members declared in Runner + def run(i: Int): Unit = ??? + +class Example extends Runner { + ^ +1 error diff --git a/test/files/neg/t12691.scala b/test/files/neg/t12691.scala new file mode 100644 index 000000000000..0047f7ae404b --- /dev/null +++ b/test/files/neg/t12691.scala @@ -0,0 +1,106 @@ +abstract class Runner { + def run(i: Int): Unit +} +class Example extends Runner { + def run(s: String) = () + val f1: Int + val f2: Int + val f3: Int + val f4: Int + val f5: Int + val f6: Int + val f7: Int + val f8: Int + val f9: Int + val f10: Int + val f11: Int + val f12: Int + val f13: Int + val f14: Int + val f15: Int + val f16: Int + val f17: Int + val f18: Int + val f19: Int + val f20: Int + val f21: Int + val f22: Int + val f23: Int + val f24: Int + val f25: Int + val f26: Int + val f27: Int + val f28: Int + val f29: Int + val f30: Int + val f31: Int + val f32: Int + val f33: Int + val f34: Int + val f35: Int + val f36: Int + val f37: Int + val f38: Int + val f39: Int + val f40: Int + val f41: Int + val f42: Int + val f43: Int + val f44: Int + val f45: Int + val f46: Int + val f47: Int + val f48: Int + val f49: Int + val f50: Int + val f51: Int + val f52: Int + val f53: Int + val f54: Int + val f55: Int + val f56: Int + val f57: Int + val f58: Int + val f59: Int + val f60: Int + val f61: Int + val f62: Int + val f63: Int + val f64: Int + val f65: Int + val f66: Int + val f67: Int + val f68: Int + val f69: Int + val f70: Int + val f71: Int + val f72: Int + val f73: Int + val f74: Int + val f75: Int + val f76: Int + val f77: Int + val f78: Int + val f79: Int + val f80: Int + val f81: Int + val f82: Int + val f83: Int + val f84: Int + val f85: Int + val f86: Int + val f87: Int + val f88: Int + val f89: Int + val f90: Int + val f91: Int + val f92: Int + val f93: Int + val f94: Int + val f95: Int + val f96: Int + val f97: Int + val f98: Int + val f99: Int + val f100: Int +} diff --git a/test/files/neg/t12691b.check b/test/files/neg/t12691b.check new file mode 100644 index 000000000000..14f2c613a5f6 --- /dev/null +++ b/test/files/neg/t12691b.check @@ -0,0 +1,109 @@ +t12691b.scala:4: error: class Example needs to be abstract. +Missing implementations for 100 members. + // Members declared in Example + val f1: Int = ??? + val f10: Int = ??? + val f11: Int = ??? + val f12: Int = ??? + val f13: Int = ??? + val f14: Int = ??? + val f15: Int = ??? + val f16: Int = ??? + val f17: Int = ??? + val f18: Int = ??? + val f19: Int = ??? + val f2: Int = ??? + val f20: Int = ??? + val f21: Int = ??? + val f22: Int = ??? + val f23: Int = ??? + val f24: Int = ??? + val f25: Int = ??? + val f26: Int = ??? + val f27: Int = ??? + val f28: Int = ??? + val f29: Int = ??? + val f3: Int = ??? + val f30: Int = ??? + val f31: Int = ??? + val f32: Int = ??? + val f33: Int = ??? + val f34: Int = ??? + val f35: Int = ??? + val f36: Int = ??? + val f37: Int = ??? + val f38: Int = ??? + val f39: Int = ??? + val f4: Int = ??? + val f40: Int = ??? + val f41: Int = ??? + val f42: Int = ??? + val f43: Int = ??? + val f44: Int = ??? + val f45: Int = ??? + val f46: Int = ??? + val f47: Int = ??? + val f48: Int = ??? + val f49: Int = ??? + val f5: Int = ??? + val f50: Int = ??? + val f51: Int = ??? + val f52: Int = ??? + val f53: Int = ??? + val f54: Int = ??? + val f55: Int = ??? + val f56: Int = ??? + val f57: Int = ??? + val f58: Int = ??? + val f59: Int = ??? + val f6: Int = ??? + val f60: Int = ??? + val f61: Int = ??? + val f62: Int = ??? + val f63: Int = ??? + val f64: Int = ??? + val f65: Int = ??? + val f66: Int = ??? + val f67: Int = ??? + val f68: Int = ??? + val f69: Int = ??? + val f7: Int = ??? + val f70: Int = ??? + val f71: Int = ??? + val f72: Int = ??? + val f73: Int = ??? + val f74: Int = ??? + val f75: Int = ??? + val f76: Int = ??? + val f77: Int = ??? + val f78: Int = ??? + val f79: Int = ??? + val f8: Int = ??? + val f80: Int = ??? + val f81: Int = ??? + val f82: Int = ??? + val f83: Int = ??? + val f84: Int = ??? + val f85: Int = ??? + val f86: Int = ??? + val f87: Int = ??? + val f88: Int = ??? + val f89: Int = ??? + val f9: Int = ??? + val f90: Int = ??? + val f91: Int = ??? + val f92: Int = ??? + val f93: Int = ??? + val f94: Int = ??? + val f95: Int = ??? + val f96: Int = ??? + val f97: Int = ??? + val f98: Int = ??? + val f99: Int = ??? + + // Members declared in Runner + def run(i: Int): Unit = ??? // Int does not match String in `def run(s: String): Unit` + +class Example extends Runner { + ^ +1 error diff --git a/test/files/neg/t12691b.scala b/test/files/neg/t12691b.scala new file mode 100644 index 000000000000..571936ec2a35 --- /dev/null +++ b/test/files/neg/t12691b.scala @@ -0,0 +1,105 @@ +abstract class Runner { + def run(i: Int): Unit +} +class Example extends Runner { + def run(s: String) = () + val f1: Int + val f2: Int + val f3: Int + val f4: Int + val f5: Int + val f6: Int + val f7: Int + val f8: Int + val f9: Int + val f10: Int + val f11: Int + val f12: Int + val f13: Int + val f14: Int + val f15: Int + val f16: Int + val f17: Int + val f18: Int + val f19: Int + val f20: Int + val f21: Int + val f22: Int + val f23: Int + val f24: Int + val f25: Int + val f26: Int + val f27: Int + val f28: Int + val f29: Int + val f30: Int + val f31: Int + val f32: Int + val f33: Int + val f34: Int + val f35: Int + val f36: Int + val f37: Int + val f38: Int + val f39: Int + val f40: Int + val f41: Int + val f42: Int + val f43: Int + val f44: Int + val f45: Int + val f46: Int + val f47: Int + val f48: Int + val f49: Int + val f50: Int + val f51: Int + val f52: Int + val f53: Int + val f54: Int + val f55: Int + val f56: Int + val f57: Int + val f58: Int + val f59: Int + val f60: Int + val f61: Int + val f62: Int + val f63: Int + val f64: Int + val f65: Int + val f66: Int + val f67: Int + val f68: Int + val f69: Int + val f70: Int + val f71: Int + val f72: Int + val f73: Int + val f74: Int + val f75: Int + val f76: Int + val f77: Int + val f78: Int + val f79: Int + val f80: Int + val f81: Int + val f82: Int + val f83: Int + val f84: Int + val f85: Int + val f86: Int + val f87: Int + val f88: Int + val f89: Int + val f90: Int + val f91: Int + val f92: Int + val f93: Int + val f94: Int + val f95: Int + val f96: Int + val f97: Int + val f98: Int + val f99: Int +} diff --git a/test/files/neg/t12704.check b/test/files/neg/t12704.check new file mode 100644 index 000000000000..2de8a06ca4df --- /dev/null +++ b/test/files/neg/t12704.check @@ -0,0 +1,7 @@ +t12704.scala:13: warning: match may not be exhaustive. +It would fail on the following input: L(_) + def t2(t: P) = t match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12704.scala b/test/files/neg/t12704.scala new file mode 100644 index 000000000000..d481cc21635e --- /dev/null +++ b/test/files/neg/t12704.scala @@ -0,0 +1,16 @@ +//> using options -Werror +class Out { + sealed trait P + case class K(x: Int) extends P + case class L(x: Int) extends P +} +class C[O <: Out](val o: O) { + import o._ + def t1(t: P) = t match { + case _: K => 0 + case _: L => 0 + } + def t2(t: P) = t match { + case _: K => 0 + } +} diff --git a/test/files/neg/t12715.check b/test/files/neg/t12715.check new file mode 100644 index 000000000000..729af9c43246 --- /dev/null +++ b/test/files/neg/t12715.check @@ -0,0 +1,7 @@ +t12715.scala:21: error: parent trait E has a super call to method B.f, which binds to the value D.f. Super calls can only target methods. +object O1 extends B with C with D with E + ^ +t12715.scala:22: error: parent trait E has a super call to method B.f, which binds to the value C.f. Super calls can only target methods. +object O2 extends B with C with E with D + ^ +2 errors diff --git a/test/files/neg/t12715.scala b/test/files/neg/t12715.scala new file mode 100644 index 000000000000..e3769141b4c4 --- /dev/null +++ b/test/files/neg/t12715.scala @@ -0,0 +1,23 @@ +trait A { + def f: String +} + +trait B extends A { + def f = "B"; +} + +trait C extends A { + override val f = "C" +} + +trait D extends C { + override val f = "D" +} + +trait E extends A with B { + def d = super.f +} + +object O1 extends B with C with D with E +object O2 extends B with C with E with D +object O3 extends B with E with C with D diff --git a/test/files/neg/t12715b.check b/test/files/neg/t12715b.check new file mode 100644 index 000000000000..3cdb24a74351 --- /dev/null +++ b/test/files/neg/t12715b.check @@ -0,0 +1,4 @@ +t12715b.scala:17: error: parent trait D has a super call to method B.f, which binds to the value C.f. Super calls can only target methods. + new A(10.0f) with C with D {} + ^ +1 error diff --git a/test/files/neg/t12715b.scala b/test/files/neg/t12715b.scala new file mode 100644 index 000000000000..9d89233dc85f --- /dev/null +++ b/test/files/neg/t12715b.scala @@ -0,0 +1,19 @@ +trait B { + def f: Float = 1.0f +} + +class A(override val f: Float) extends B + +trait C extends B { + abstract override val f = super.f + 100.0f +} + +trait D extends B { + abstract override val f = super.f + 1000.0f +} + +object Test { + def main(args: Array[String]): Unit = { + new A(10.0f) with C with D {} + } +} diff --git a/test/files/neg/t12728.check b/test/files/neg/t12728.check new file mode 100644 index 000000000000..c664b4ea4d9d --- /dev/null +++ b/test/files/neg/t12728.check @@ -0,0 +1,162 @@ +t12728.scala:10: warning: dubious usage of method != with unit value + println(u.!=(x)) + ^ +t12728.scala:11: warning: dubious usage of method ## with unit value + println(u.##) + ^ +t12728.scala:12: warning: dubious usage of method == with unit value + println(u.==(x)) + ^ +t12728.scala:13: warning: dubious usage of method asInstanceOf with unit value + println(u.asInstanceOf[Any]) + ^ +t12728.scala:14: warning: dubious usage of method equals with unit value + println(u.equals(x)) + ^ +t12728.scala:15: warning: dubious usage of method hashCode with unit value + println(u.hashCode) + ^ +t12728.scala:16: warning: dubious usage of method isInstanceOf with unit value + println(u.isInstanceOf[Any]) + ^ +t12728.scala:17: warning: dubious usage of method toString with unit value + println(u.toString) + ^ +t12728.scala:20: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isNaN) + ^ +t12728.scala:20: warning: dubious usage of method isNaN with integer value + println(i.isNaN) + ^ +t12728.scala:21: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isInfinity) + ^ +t12728.scala:21: warning: dubious usage of method isInfinity with integer value + println(i.isInfinity) + ^ +t12728.scala:22: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isInfinite) + ^ +t12728.scala:22: warning: dubious usage of method isInfinite with integer value + println(i.isInfinite) + ^ +t12728.scala:23: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isFinite) + ^ +t12728.scala:23: warning: dubious usage of method isFinite with integer value + println(i.isFinite) + ^ +t12728.scala:24: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isPosInfinity) + ^ +t12728.scala:24: warning: dubious usage of method isPosInfinity with integer value + println(i.isPosInfinity) + ^ +t12728.scala:25: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.isNegInfinity) + ^ +t12728.scala:25: warning: dubious usage of method isNegInfinity with integer value + println(i.isNegInfinity) + ^ +t12728.scala:27: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.ceil) + ^ +t12728.scala:27: warning: dubious usage of method ceil with integer value + println(i.ceil) + ^ +t12728.scala:28: warning: Widening conversion from Int to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(i.floor) + ^ +t12728.scala:28: warning: dubious usage of method floor with integer value + println(i.floor) + ^ +t12728.scala:30: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isNaN) + ^ +t12728.scala:30: warning: dubious usage of method isNaN with integer value + println(l.isNaN) + ^ +t12728.scala:31: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isInfinity) + ^ +t12728.scala:31: warning: dubious usage of method isInfinity with integer value + println(l.isInfinity) + ^ +t12728.scala:32: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isInfinite) + ^ +t12728.scala:32: warning: dubious usage of method isInfinite with integer value + println(l.isInfinite) + ^ +t12728.scala:33: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isFinite) + ^ +t12728.scala:33: warning: dubious usage of method isFinite with integer value + println(l.isFinite) + ^ +t12728.scala:34: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isPosInfinity) + ^ +t12728.scala:34: warning: dubious usage of method isPosInfinity with integer value + println(l.isPosInfinity) + ^ +t12728.scala:35: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.isNegInfinity) + ^ +t12728.scala:35: warning: dubious usage of method isNegInfinity with integer value + println(l.isNegInfinity) + ^ +t12728.scala:37: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.ceil) + ^ +t12728.scala:37: warning: dubious usage of method ceil with integer value + println(l.ceil) + ^ +t12728.scala:38: warning: Widening conversion from Long to Float is deprecated because it loses precision. Write `.toFloat` instead. [quickfixable] + println(l.floor) + ^ +t12728.scala:38: warning: dubious usage of method floor with integer value + println(l.floor) + ^ +t12728.scala:40: warning: dubious usage of method isNaN with integer value + println(c.isNaN) + ^ +t12728.scala:41: warning: dubious usage of method isInfinity with integer value + println(c.isInfinity) + ^ +t12728.scala:42: warning: dubious usage of method isInfinite with integer value + println(c.isInfinite) + ^ +t12728.scala:43: warning: dubious usage of method isFinite with integer value + println(c.isFinite) + ^ +t12728.scala:44: warning: dubious usage of method isPosInfinity with integer value + println(c.isPosInfinity) + ^ +t12728.scala:45: warning: dubious usage of method isNegInfinity with integer value + println(c.isNegInfinity) + ^ +t12728.scala:47: warning: dubious usage of method ceil with integer value + println(c.ceil) + ^ +t12728.scala:48: warning: dubious usage of method floor with integer value + println(c.floor) + ^ +t12728.scala:54: warning: dubious usage of method toString with unit value + def g = new java.lang.StringBuilder("hi").setCharAt(0, 'H').toString // "()" + ^ +t12728.scala:14: warning: comparing values of types Unit and Any using `equals` unsafely bypasses cooperative equality; use `==` instead + println(u.equals(x)) + ^ +t12728.scala:26: warning: method round in class RichInt is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? + println(i.round) + ^ +t12728.scala:36: warning: method round in class RichLong is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? + println(l.round) + ^ +t12728.scala:46: warning: method round in class RichInt is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? + println(c.round) + ^ +error: No warnings can be incurred under -Werror. +53 warnings +1 error diff --git a/test/files/neg/t12728.scala b/test/files/neg/t12728.scala new file mode 100644 index 000000000000..3b2730b7add6 --- /dev/null +++ b/test/files/neg/t12728.scala @@ -0,0 +1,55 @@ +//> using options -Werror -Xlint + +class C { + val u = () + val i = 42 + val l = 42L + val c = 'c' + val x = null: Any + + println(u.!=(x)) + println(u.##) + println(u.==(x)) + println(u.asInstanceOf[Any]) + println(u.equals(x)) + println(u.hashCode) + println(u.isInstanceOf[Any]) + println(u.toString) + println(i.toString) + + println(i.isNaN) + println(i.isInfinity) + println(i.isInfinite) + println(i.isFinite) + println(i.isPosInfinity) + println(i.isNegInfinity) + println(i.round) + println(i.ceil) + println(i.floor) + + println(l.isNaN) + println(l.isInfinity) + println(l.isInfinite) + println(l.isFinite) + println(l.isPosInfinity) + println(l.isNegInfinity) + println(l.round) + println(l.ceil) + println(l.floor) + + println(c.isNaN) + println(c.isInfinity) + println(c.isInfinite) + println(c.isFinite) + println(c.isPosInfinity) + println(c.isNegInfinity) + println(c.round) + println(c.ceil) + println(c.floor) +} + +class UseCase { + def f = new scala.StringBuilder("hi").setCharAt(0, 'H').toString // "Hi" + + def g = new java.lang.StringBuilder("hi").setCharAt(0, 'H').toString // "()" +} diff --git a/test/files/neg/t12734.check b/test/files/neg/t12734.check new file mode 100644 index 000000000000..8b9eb155464c --- /dev/null +++ b/test/files/neg/t12734.check @@ -0,0 +1,12 @@ +[ERROR] [RangePosition(t12734.scala, 94, 94, 108)]: object AssertionErrer is not a member of package java.lang +did you mean AssertionError? +[ERROR] [RangePosition(t12734.scala, 176, 182, 192)]: object connection is not a member of package scala +did you mean collection? +[WARNING] [RangePosition(t12734.scala, 110, 110, 125)]: Unused import +[WARNING] [RangePosition(t12734.scala, 127, 127, 133)]: Unused import +[WARNING] [RangePosition(t12734.scala, 167, 167, 168)]: Unused import +[WARNING] [RangePosition(t12734.scala, 193, 193, 194)]: Unused import +[WARNING] [RangePosition(t12734.scala, 397, 397, 403)]: Unused import +[WARNING] [RangePosition(t12734.scala, 489, 489, 494)]: Unused import +6 warnings +2 errors diff --git a/test/files/neg/t12734.scala b/test/files/neg/t12734.scala new file mode 100644 index 000000000000..cf159a93a46f --- /dev/null +++ b/test/files/neg/t12734.scala @@ -0,0 +1,32 @@ +//> using options -Xlint -Xreporter:scala.tools.partest.nest.PlainReporter + +import java.lang.{AssertionErrer, Integer => JInt, String, Thread} +import scala.annotation._ +import scala.connection._ + +trait T { + def t: Thread +} + +// these import infos are not seen in position order +// warnings are not sorted later +class C { + def f(): Int = { + def compute(): Int = { + import scala.concurrent.Future + //Future(42).get + 42 + } + compute() + } + import scala.util.matching.Regex + //def g: Regex = "(\\d+)".r + def g = "(\\d+)".r +} + +/* +Previous result shows the selectors with same range but different points. +[ERROR] [RangePosition(t12734.scala, 76, 83, 117)]: object AssertionErrer is not a member of package java.lang +did you mean AssertionError? +[WARNING] [RangePosition(t12734.scala, 76, 94, 117)]: Unused import +*/ diff --git a/test/files/neg/t12735a.check b/test/files/neg/t12735a.check new file mode 100644 index 000000000000..421d088e7274 --- /dev/null +++ b/test/files/neg/t12735a.check @@ -0,0 +1,6 @@ +[ERROR] [RangePosition(t12735a.scala, 104, 104, 105)]: class B needs to be abstract. +Missing implementation for member of trait A: + def x: String = ??? + +[ERROR] [RangePosition(t12735a.scala, 130, 130, 134)]: covariant type T occurs in contravariant position in type T of value t +2 errors diff --git a/test/files/neg/t12735a.scala b/test/files/neg/t12735a.scala new file mode 100644 index 000000000000..67594fcf7a18 --- /dev/null +++ b/test/files/neg/t12735a.scala @@ -0,0 +1,9 @@ +//> using options -Xreporter:scala.tools.partest.nest.PlainReporter + +trait A { + def x: String +} + +class B[+T] extends A { + def y(t: T): Unit = () +} diff --git a/test/files/neg/t12735b.check b/test/files/neg/t12735b.check new file mode 100644 index 000000000000..6a5138333fe2 --- /dev/null +++ b/test/files/neg/t12735b.check @@ -0,0 +1,8 @@ +[WARNING] [RangePosition(t12735b.scala, 119, 119, 120)]: private method m in class UnusedMethod is never used +[WARNING] [RangePosition(t12735b.scala, 223, 223, 224)]: private object X in object UnusedObject is never used +[WARNING] [RangePosition(t12735b.scala, 235, 235, 260)]: side-effecting nullary methods are discouraged: suggest defining as `def stuff to create a range()` instead +[WARNING] [RangePosition(t12735b.scala, 135, 139, 142)]: match may not be exhaustive. +It would fail on the following input: List(_) +[ERROR] [NoPosition]: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t12735b.scala b/test/files/neg/t12735b.scala new file mode 100644 index 000000000000..3b8fa184005d --- /dev/null +++ b/test/files/neg/t12735b.scala @@ -0,0 +1,14 @@ +//> using options -Werror -Xlint -Xreporter:scala.tools.partest.nest.PlainReporter + +class UnusedMethod { + private def m: String = + List(1) match { + case Nil => "nil" + } +} + +object UnusedObject { + private object X { + def `stuff to create a range` = () + } +} diff --git a/test/files/neg/t12735c.check b/test/files/neg/t12735c.check new file mode 100644 index 000000000000..fcbb3b774ccd --- /dev/null +++ b/test/files/neg/t12735c.check @@ -0,0 +1,21 @@ +[WARNING] [RangePosition(t12735c.scala, 116, 116, 117)]: private val v in class UnusedVal is never used +[WARNING] [RangePosition(t12735c.scala, 214, 214, 215)]: private val v in class UnusedVals is never used +[WARNING] [RangePosition(t12735c.scala, 217, 217, 218)]: private val w in class UnusedVals is never used +[WARNING] [RangePosition(t12735c.scala, 220, 220, 221)]: private val x in class UnusedVals is never used +[WARNING] [RangePosition(t12735c.scala, 223, 223, 224)]: private val y in class UnusedVals is never used +[WARNING] [RangePosition(t12735c.scala, 226, 226, 227)]: private val z in class UnusedVals is never used +[WARNING] [RangePosition(t12735c.scala, 326, 326, 327)]: private val v in class UnusedIdents is never used +[WARNING] [RangePosition(t12735c.scala, 329, 329, 330)]: private val w in class UnusedIdents is never used +[WARNING] [RangePosition(t12735c.scala, 332, 332, 333)]: private val x in class UnusedIdents is never used +[WARNING] [RangePosition(t12735c.scala, 335, 335, 336)]: private val y in class UnusedIdents is never used +[WARNING] [RangePosition(t12735c.scala, 338, 338, 339)]: private val z in class UnusedIdents is never used +[WARNING] [RangePosition(t12735c.scala, 431, 431, 434)]: private type int in object UnusedAlias is never used +[WARNING] [RangePosition(t12735c.scala, 132, 136, 139)]: match may not be exhaustive. +It would fail on the following input: List(_) +[WARNING] [source-t12735c.scala,line-12,offset=246]: match may not be exhaustive. +It would fail on the following input: List(_) +[WARNING] [source-t12735c.scala,line-19,offset=350]: match may not be exhaustive. +It would fail on the following input: List(_) +[ERROR] [NoPosition]: No warnings can be incurred under -Werror. +15 warnings +1 error diff --git a/test/files/neg/t12735c.scala b/test/files/neg/t12735c.scala new file mode 100644 index 000000000000..8b1b3e29f6a5 --- /dev/null +++ b/test/files/neg/t12735c.scala @@ -0,0 +1,26 @@ +//> using options -Werror -Xlint -Xreporter:scala.tools.partest.nest.PlainReporter + +class UnusedVal { + private val v: String = + List(1) match { + case Nil => "nil" + } +} + +class UnusedVals { + private val v, w, x, y, z: String = + List(1) match { + case Nil => "nil" + } +} + +class UnusedIdents { + private val v, w, x, y, z = + List(1) match { + case Nil => "nil" + } +} + +object UnusedAlias { + private type int = Int +} diff --git a/test/files/neg/t1275.check b/test/files/neg/t1275.check index a930e25ab345..d7db84a0313d 100644 --- a/test/files/neg/t1275.check +++ b/test/files/neg/t1275.check @@ -3,4 +3,4 @@ t1275.scala:11: error: type mismatch; required: s = xs f ^ -one error found +1 error diff --git a/test/files/neg/t1275.scala b/test/files/neg/t1275.scala index 1175b30763a4..d7513bf0983f 100644 --- a/test/files/neg/t1275.scala +++ b/test/files/neg/t1275.scala @@ -12,4 +12,4 @@ object Test { // ^ // found : xs.MyType[a] // required: s -} \ No newline at end of file +} diff --git a/test/files/neg/t12770.check b/test/files/neg/t12770.check new file mode 100644 index 000000000000..1849cc9e0b59 --- /dev/null +++ b/test/files/neg/t12770.check @@ -0,0 +1,11 @@ +t12770.scala:12: warning: match may not be exhaustive. +It would fail on the following input: B() + def control(input: T) = input match { + ^ +t12770.scala:15: warning: match may not be exhaustive. +It would fail on the following input: (x: Any forSome x not in ("a", "b", "c", 42)) + def any(input: Any) = input match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12770.scala b/test/files/neg/t12770.scala new file mode 100644 index 000000000000..8119777134e8 --- /dev/null +++ b/test/files/neg/t12770.scala @@ -0,0 +1,25 @@ + +//> using options -Werror -Xlint + +object Test { + def doesntWarn(input: String) = input match { + case "a" => + case "b" => + } + def warnsNoLonger(input: String) = input match { + case "c" => + } + def control(input: T) = input match { + case _: A => + } + def any(input: Any) = input match { + case "a" => + case 42 => + case "b" => + case "c" => + } +} + +sealed trait T +final class A extends T +final class B extends T diff --git a/test/files/neg/t12785.check b/test/files/neg/t12785.check new file mode 100644 index 000000000000..5d40914cdb3c --- /dev/null +++ b/test/files/neg/t12785.check @@ -0,0 +1,9 @@ +t12785.scala:6: warning: comparing values of types Predef.type and Array[B] using `eq` will always yield false + def startsWith[B >: A](that: Array[B]): Boolean = eq(that) + ^ +t12785.scala:10: warning: comparing values of types scala.package.type and Array[B] using `eq` will always yield false + eq(that) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12785.scala b/test/files/neg/t12785.scala new file mode 100644 index 000000000000..19609b1fa59d --- /dev/null +++ b/test/files/neg/t12785.scala @@ -0,0 +1,14 @@ +//> using options -Werror + +import scala.Predef._ + +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + def startsWith[B >: A](that: Array[B]): Boolean = eq(that) + + def endsWith[B >: A](that: Array[B]): Boolean = { + import scala.`package`._ + eq(that) + } +} + +//warning: comparing values of types type and Array[B] using `eq` will always yield false diff --git a/test/files/neg/t12785b.check b/test/files/neg/t12785b.check new file mode 100644 index 000000000000..3981a45b5cc1 --- /dev/null +++ b/test/files/neg/t12785b.check @@ -0,0 +1,26 @@ +t12785b.scala:3: error: type mismatch; + found : Predef.type + required: Nothing + def f: Nothing = null.asInstanceOf[Predef.type] + ^ +t12785b.scala:4: error: type mismatch; + found : scala.package.type + required: Nothing + def g: Nothing = null.asInstanceOf[scala.`package`.type] + ^ +t12785b.scala:5: error: type mismatch; + found : List.type + required: Nothing + def list: Nothing = null.asInstanceOf[List.type] + ^ +t12785b.scala:6: error: type mismatch; + found : Set.type + required: Nothing + def set: Nothing = null.asInstanceOf[Set.type] + ^ +t12785b.scala:7: error: type mismatch; + found : Nil.type + required: Nothing + def nil: Nothing = null.asInstanceOf[Nil.type] + ^ +5 errors diff --git a/test/files/neg/t12785b.scala b/test/files/neg/t12785b.scala new file mode 100644 index 000000000000..63d117b1aebd --- /dev/null +++ b/test/files/neg/t12785b.scala @@ -0,0 +1,8 @@ + +class C { + def f: Nothing = null.asInstanceOf[Predef.type] + def g: Nothing = null.asInstanceOf[scala.`package`.type] + def list: Nothing = null.asInstanceOf[List.type] + def set: Nothing = null.asInstanceOf[Set.type] + def nil: Nothing = null.asInstanceOf[Nil.type] +} diff --git a/test/files/neg/t12798-migration.check b/test/files/neg/t12798-migration.check new file mode 100644 index 000000000000..50f11360d649 --- /dev/null +++ b/test/files/neg/t12798-migration.check @@ -0,0 +1,72 @@ +t12798-migration.scala:11: error: unknown parameter name: z +Note that assignments in argument position are no longer allowed since Scala 2.13. +To express the assignment expression, wrap it in brackets, e.g., `{ z = ... }`. + f(42, z = 27) + ^ +t12798-migration.scala:25: error: unary prefix operator definition with empty parameter list is unsupported: instead, remove () to declare as `def unary_- = -42` [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def unary_-() = -42 + ^ +t12798-migration.scala:33: error: procedure syntax is deprecated for constructors: add `=`, as in method definition [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def this(s: String) { this() } + ^ +t12798-migration.scala:34: error: procedure syntax is unsupported: instead, add `: Unit =` to explicitly declare `f`'s return type [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f() { println() } + ^ +t12798-migration.scala:35: error: procedure syntax is unsupported: instead, add `: Unit` to explicitly declare `g`'s return type [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def g() + ^ +t12798-migration.scala:39: error: parentheses are required around the parameter of a lambda +Use '-Wconf:msg=lambda-parens:s' to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f = List(42).map { x: Int => x + 1 } + ^ +t12798-migration.scala:43: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f = List(42) map [Int] (_ + 1) + ^ +t12798-migration.scala:46: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +class `misuse of underscore`[_] + ^ +t12798-migration.scala:48: error: early initializers are deprecated; use trait parameters instead. +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +class `early bird` extends { val x = "hello, world" } with Runnable { def run() = println(x) } + ^ +t12798-migration.scala:17: error: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=interpolated unicode such as C.f + def f = raw"\u0043 is for $entry" + ^ +t12798-migration.scala:18: error: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=interpolated unicode such as C.g + def g = raw"""\u0043 is for Cat""" + ^ +t12798-migration.scala:50: error: access modifiers for `copy` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=case mods propagate +case class `case mods propagate` private (s: String) + ^ +t12798-migration.scala:60: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Option[Int] instead of Some[Int] [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Child.f + override def f = Some(27) + ^ +t12798-migration.scala:52: error: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=copyless case mods propagate.apply +case class `copyless case mods propagate` private (s: String) { + ^ +14 errors diff --git a/test/files/neg/t12798-migration.scala b/test/files/neg/t12798-migration.scala new file mode 100644 index 000000000000..69f82fd7a820 --- /dev/null +++ b/test/files/neg/t12798-migration.scala @@ -0,0 +1,66 @@ +//> using options -Xsource:3 + +// Demonstrate migration warnings at typer for -Xsource:3 + +class `named arg is not assignment` { + // unfortunately, z member is not available yet while erroring in g + //var z = 17 + def f(x: Int, y: Int) = x + y + def g = { + var z = 17 + f(42, z = 27) + } +} + +class `interpolated unicode such as \u0043` { + def entry = "Cat" + def f = raw"\u0043 is for $entry" + def g = raw"""\u0043 is for Cat""" +} + +// it was always specified that unary is parameterless. +// The most correct behavior would be that you can define unary_-() +// but you can't use it as unary prefix. +class `unary op lacks parens` { + def unary_-() = -42 +} + +package object tester extends Runnable { + def run() = () +} + +abstract class `procedure syntax` { + def this(s: String) { this() } + def f() { println() } + def g() +} + +class `lambda parens` { + def f = List(42).map { x: Int => x + 1 } +} + +class `infix type args` { + def f = List(42) map [Int] (_ + 1) +} + +class `misuse of underscore`[_] + +class `early bird` extends { val x = "hello, world" } with Runnable { def run() = println(x) } + +case class `case mods propagate` private (s: String) + +case class `copyless case mods propagate` private (s: String) { + def copy(x: String) = this +} + +class Parent { + def f: Option[Int] = Some(42) +} +class Child extends Parent { + override def f = Some(27) +} + +@annotation.nowarn +class `get off my back` { + def f() { println("hello, world") } +} diff --git a/test/files/neg/t12798.check b/test/files/neg/t12798.check new file mode 100644 index 000000000000..c9d3bf826934 --- /dev/null +++ b/test/files/neg/t12798.check @@ -0,0 +1,72 @@ +t12798.scala:11: error: unknown parameter name: z +Note that assignments in argument position are no longer allowed since Scala 2.13. +To express the assignment expression, wrap it in brackets, e.g., `{ z = ... }`. + f(42, z = 27) + ^ +t12798.scala:25: error: unary prefix operator definition with empty parameter list is unsupported: instead, remove () to declare as `def unary_- = -42` [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def unary_-() = -42 + ^ +t12798.scala:33: error: procedure syntax is deprecated for constructors: add `=`, as in method definition [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def this(s: String) { this() } + ^ +t12798.scala:34: error: procedure syntax is unsupported: instead, add `: Unit =` to explicitly declare `f`'s return type [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f() { println() } + ^ +t12798.scala:35: error: procedure syntax is unsupported: instead, add `: Unit` to explicitly declare `g`'s return type [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def g() + ^ +t12798.scala:39: error: parentheses are required around the parameter of a lambda +Use '-Wconf:msg=lambda-parens:s' to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f = List(42).map { x: Int => x + 1 } + ^ +t12798.scala:43: error: type application is not allowed for infix operators [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f = List(42) map [Int] (_ + 1) + ^ +t12798.scala:46: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +class `misuse of underscore`[_] + ^ +t12798.scala:48: error: early initializers are deprecated; use trait parameters instead. +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +class `early bird` extends { val x = "hello, world" } with Runnable { def run() = println(x) } + ^ +t12798.scala:17: error: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=interpolated unicode such as C.f + def f = raw"\u0043 is for $entry" + ^ +t12798.scala:18: error: Unicode escapes in raw interpolations are ignored in Scala 3 (or with -Xsource-features:unicode-escapes-raw); use literal characters instead +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=interpolated unicode such as C.g + def g = raw"""\u0043 is for Cat""" + ^ +t12798.scala:50: error: access modifiers for `copy` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=case mods propagate +case class `case mods propagate` private (s: String) + ^ +t12798.scala:60: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Option[Int] instead of Some[Int] [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Child.f + override def f = Some(27) + ^ +t12798.scala:52: error: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=copyless case mods propagate.apply +case class `copyless case mods propagate` private (s: String) { + ^ +14 errors diff --git a/test/files/neg/t12798.scala b/test/files/neg/t12798.scala new file mode 100644 index 000000000000..69f82fd7a820 --- /dev/null +++ b/test/files/neg/t12798.scala @@ -0,0 +1,66 @@ +//> using options -Xsource:3 + +// Demonstrate migration warnings at typer for -Xsource:3 + +class `named arg is not assignment` { + // unfortunately, z member is not available yet while erroring in g + //var z = 17 + def f(x: Int, y: Int) = x + y + def g = { + var z = 17 + f(42, z = 27) + } +} + +class `interpolated unicode such as \u0043` { + def entry = "Cat" + def f = raw"\u0043 is for $entry" + def g = raw"""\u0043 is for Cat""" +} + +// it was always specified that unary is parameterless. +// The most correct behavior would be that you can define unary_-() +// but you can't use it as unary prefix. +class `unary op lacks parens` { + def unary_-() = -42 +} + +package object tester extends Runnable { + def run() = () +} + +abstract class `procedure syntax` { + def this(s: String) { this() } + def f() { println() } + def g() +} + +class `lambda parens` { + def f = List(42).map { x: Int => x + 1 } +} + +class `infix type args` { + def f = List(42) map [Int] (_ + 1) +} + +class `misuse of underscore`[_] + +class `early bird` extends { val x = "hello, world" } with Runnable { def run() = println(x) } + +case class `case mods propagate` private (s: String) + +case class `copyless case mods propagate` private (s: String) { + def copy(x: String) = this +} + +class Parent { + def f: Option[Int] = Some(42) +} +class Child extends Parent { + override def f = Some(27) +} + +@annotation.nowarn +class `get off my back` { + def f() { println("hello, world") } +} diff --git a/test/files/neg/t12798b.check b/test/files/neg/t12798b.check new file mode 100644 index 000000000000..e80848b73354 --- /dev/null +++ b/test/files/neg/t12798b.check @@ -0,0 +1,11 @@ +t12798b.scala:9: error: shadowing a nested class of a parent is deprecated but class P shadows class P defined in class HasP; rename the class to something else +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=AnotherP.P + class P extends super.P + ^ +t12798b.scala:15: error: shadowing a nested class of a parent is deprecated but class Q shadows class Q defined in class HasQ; rename the class to something else +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=AnotherQ.Q + class Q + ^ +2 errors diff --git a/test/files/neg/t12798b.scala b/test/files/neg/t12798b.scala new file mode 100644 index 000000000000..448135c1c09f --- /dev/null +++ b/test/files/neg/t12798b.scala @@ -0,0 +1,16 @@ +//> using options -Xsource:3 + +// Demonstrate migration warnings at refchecks for -Xsource:3 + +class HasP { + class P +} +class AnotherP extends HasP { + class P extends super.P +} +class HasQ { + class Q +} +class AnotherQ extends HasQ { + class Q +} diff --git a/test/files/neg/t12799.check b/test/files/neg/t12799.check new file mode 100644 index 000000000000..8b14a5e82043 --- /dev/null +++ b/test/files/neg/t12799.check @@ -0,0 +1,9 @@ +Test_2.scala:8: warning: method answer in class C is deprecated (since beginning) + println(C.answer()) + ^ +Test_2.scala:8: warning: class C in package example is deprecated (since you like it) + println(C.answer()) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12799/C.java b/test/files/neg/t12799/C.java new file mode 100644 index 000000000000..ea454ac01c74 --- /dev/null +++ b/test/files/neg/t12799/C.java @@ -0,0 +1,11 @@ + +package example; + +@Deprecated(since="you like it") +public class C { + + @Deprecated(since="beginning") + public static int answer() { + return 42; + } +} diff --git a/test/files/neg/t12799/Test_2.scala b/test/files/neg/t12799/Test_2.scala new file mode 100644 index 000000000000..a4d6d5e9fa23 --- /dev/null +++ b/test/files/neg/t12799/Test_2.scala @@ -0,0 +1,9 @@ + +//> using options -Werror -Xlint +//> using jvm 9+ + +import example._ + +object Test extends App { + println(C.answer()) +} diff --git a/test/files/neg/t12800.check b/test/files/neg/t12800.check new file mode 100644 index 000000000000..6367d6bc9610 --- /dev/null +++ b/test/files/neg/t12800.check @@ -0,0 +1,7 @@ +matcher_1.scala:8: warning: match may not be exhaustive. +It would fail on the following input: ORANGE + jb match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12800/JetBrains.java b/test/files/neg/t12800/JetBrains.java new file mode 100644 index 000000000000..7bdf1ca3576d --- /dev/null +++ b/test/files/neg/t12800/JetBrains.java @@ -0,0 +1,13 @@ + +public enum JetBrains { + APPLE { + @Override public String text() { + return "Cupertino tech company"; + } + }, + ORANGE + ; + public String text() { + return "Boring default"; + } +} diff --git a/test/files/neg/t12800/matcher_1.scala b/test/files/neg/t12800/matcher_1.scala new file mode 100644 index 000000000000..186acb763fb4 --- /dev/null +++ b/test/files/neg/t12800/matcher_1.scala @@ -0,0 +1,11 @@ + +//> using options -Werror -Xsource:3 + +import JetBrains.* + +class C { + def f(jb: JetBrains): Int = + jb match { + case APPLE => 42 + } +} diff --git a/test/files/neg/t12813.check b/test/files/neg/t12813.check new file mode 100644 index 000000000000..4fe6fe3f61de --- /dev/null +++ b/test/files/neg/t12813.check @@ -0,0 +1,16 @@ +t12813.scala:5: error: a is imported twice +import O.{a, a} // error + ^ +t12813.scala:8: error: a is renamed twice to b +import O.{a => b, a => b} // error + ^ +t12813.scala:10: error: b is an ambiguous name on import +import O.{a => b, toString => b} // error + ^ +t12813.scala:11: error: toString is an ambiguous name on import +import O.{a => toString, toString} // error + ^ +t12813.scala:12: error: toString is an ambiguous name on import +import O.{toString, a => toString} // error + ^ +5 errors diff --git a/test/files/neg/t12813.scala b/test/files/neg/t12813.scala new file mode 100644 index 000000000000..759fc8b2f450 --- /dev/null +++ b/test/files/neg/t12813.scala @@ -0,0 +1,19 @@ +// + +object O { val a = 1 } + +import O.{a, a} // error +import O.{a => b, a} // ok +import O.{a, a => b} // ok +import O.{a => b, a => b} // error +import O.{a => b, a => c} // ok +import O.{a => b, toString => b} // error +import O.{a => toString, toString} // error +import O.{toString, a => toString} // error +import O.{a => _, toString => _} // ok +import O.{given, a, _} // ok +import O.{given, toString, a, _} // ok +import O.{a, given, *} // ok +import O.{a, *, given} // ok +import O.{a, given, *, _} // ok +import O.{a, given} // ok diff --git a/test/files/neg/t12813b.check b/test/files/neg/t12813b.check new file mode 100644 index 000000000000..a3d73ea2cb01 --- /dev/null +++ b/test/files/neg/t12813b.check @@ -0,0 +1,25 @@ +t12813b.scala:5: error: a is imported twice +import O.{a, a} // error + ^ +t12813b.scala:8: error: a is renamed twice to b +import O.{a => b, a => b} // error + ^ +t12813b.scala:10: error: b is an ambiguous name on import +import O.{a => b, toString => b} // error + ^ +t12813b.scala:11: error: toString is an ambiguous name on import +import O.{a => toString, toString} // error + ^ +t12813b.scala:12: error: toString is an ambiguous name on import +import O.{toString, a => toString} // error + ^ +t12813b.scala:14: error: wildcard import must be in last position +import O.{given, a, _} // error 3 + ^ +t12813b.scala:15: error: wildcard import must be in last position +import O.{given, toString, a, _} // error 3 + ^ +t12813b.scala:18: error: duplicate wildcard selector +import O.{a, given, *, _} // error 3 + ^ +8 errors diff --git a/test/files/neg/t12813b.scala b/test/files/neg/t12813b.scala new file mode 100644 index 000000000000..c59c401e65c9 --- /dev/null +++ b/test/files/neg/t12813b.scala @@ -0,0 +1,19 @@ +//> using options -Xsource:3 + +object O { val a = 1 } + +import O.{a, a} // error +import O.{a => b, a} // ok +import O.{a, a => b} // ok +import O.{a => b, a => b} // error +import O.{a => b, a => c} // ok +import O.{a => b, toString => b} // error +import O.{a => toString, toString} // error +import O.{toString, a => toString} // error +import O.{a => _, toString => _} // ok +import O.{given, a, _} // error 3 +import O.{given, toString, a, _} // error 3 +import O.{a, given, *} // ok +import O.{a, *, given} // ok +import O.{a, given, *, _} // error 3 +import O.{a, given} // ok diff --git a/test/files/neg/t12813c.check b/test/files/neg/t12813c.check new file mode 100644 index 000000000000..b882b8e3e67d --- /dev/null +++ b/test/files/neg/t12813c.check @@ -0,0 +1,4 @@ +t12813c.scala:6: error: a is renamed twice to abcdefghij +import O.{a => abcdefghij, a => abcdefghij} + ^ +1 error diff --git a/test/files/neg/t12813c.scala b/test/files/neg/t12813c.scala new file mode 100644 index 000000000000..6cb0fded1715 --- /dev/null +++ b/test/files/neg/t12813c.scala @@ -0,0 +1,6 @@ + +//> abusing options -Vprint:parser -Vpos -Yprint-trees:format + +object O { val a = 42 } + +import O.{a => abcdefghij, a => abcdefghij} diff --git a/test/files/neg/t12815.check b/test/files/neg/t12815.check new file mode 100644 index 000000000000..059ece1cac95 --- /dev/null +++ b/test/files/neg/t12815.check @@ -0,0 +1,9 @@ +t12815.scala:22: warning: method with a single empty parameter list overrides method e in trait T defined without a parameter list [quickfixable] + def e(): Int = 1 // warn + ^ +t12815.scala:23: warning: method without a parameter list overrides method f in trait T defined with a single empty parameter list [quickfixable] + def f: Int = 1 // warn + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12815.scala b/test/files/neg/t12815.scala new file mode 100644 index 000000000000..dd477e487c76 --- /dev/null +++ b/test/files/neg/t12815.scala @@ -0,0 +1,24 @@ +//> using options -Werror + +import scala.beans.BeanProperty + +trait T { + def getAa: String + def getBb(): String + + def c: Int + def d(): Int + + def e: Int + def f(): Int +} +class C extends T { + @BeanProperty val aa: String = "" // ok + @BeanProperty val bb: String = "" + + val c: Int = 1 + val d: Int = 1 // ok + + def e(): Int = 1 // warn + def f: Int = 1 // warn +} diff --git a/test/files/neg/t12816.check b/test/files/neg/t12816.check new file mode 100644 index 000000000000..d78566930c28 --- /dev/null +++ b/test/files/neg/t12816.check @@ -0,0 +1,19 @@ +t12816.scala:29: error: reference to c is ambiguous; +it is both defined in the enclosing package p and inherited in the enclosing trait RR as method c (defined in trait T) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.c`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=p.RR.m3 + def m3 = c // warn + ^ +t12816.scala:33: error: reference to Z is ambiguous; +it is both defined in the enclosing package p and inherited in the enclosing trait RR as trait Z (defined in trait T) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.Z`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=p.RR.n3 + def n3: Z // warn + ^ +2 errors diff --git a/test/files/neg/t12816.scala b/test/files/neg/t12816.scala new file mode 100644 index 000000000000..fa9e74543167 --- /dev/null +++ b/test/files/neg/t12816.scala @@ -0,0 +1,35 @@ +//> using options -Xsource:3 -Werror + +trait U { + def a: Int = 0 + trait X +} + +package object p extends U { + def b: Int = 0 + trait Y +} + +package p { + object c + trait Z + trait T { + def a = 1 + def b = 1 + def c = 1 + + trait X + trait Y + trait Z + } + + trait RR extends T { + def m1 = a // ok + def m2 = b // ok + def m3 = c // warn + + def n1: X // ok + def n2: Y // ok + def n3: Z // warn + } +} diff --git a/test/files/neg/t12816b.check b/test/files/neg/t12816b.check new file mode 100644 index 000000000000..eb80167ae660 --- /dev/null +++ b/test/files/neg/t12816b.check @@ -0,0 +1,19 @@ +B.scala:19: error: reference to c is ambiguous; +it is both defined in the enclosing package p and inherited in the enclosing trait RR as method c (defined in trait T) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.c`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=p.RR.m3 + def m3 = c // warn + ^ +B.scala:23: error: reference to Z is ambiguous; +it is both defined in the enclosing package p and inherited in the enclosing trait RR as trait Z (defined in trait T) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.Z`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=p.RR.n3 + def n3: Z // warn + ^ +2 errors diff --git a/test/files/neg/t12816b/A.scala b/test/files/neg/t12816b/A.scala new file mode 100644 index 000000000000..056a08a52194 --- /dev/null +++ b/test/files/neg/t12816b/A.scala @@ -0,0 +1,8 @@ +trait U { + def a: Int = 0 + trait X +} +package object p extends U { + def b: Int = 0 + trait Y +} diff --git a/test/files/neg/t12816b/B.scala b/test/files/neg/t12816b/B.scala new file mode 100644 index 000000000000..1ea81921e114 --- /dev/null +++ b/test/files/neg/t12816b/B.scala @@ -0,0 +1,25 @@ +//> using options -Xsource:3 -Werror + +package p { + object c + trait Z + trait T { + def a = 1 + def b = 1 + def c = 1 + + trait X + trait Y + trait Z + } + + trait RR extends T { + def m1 = a // ok + def m2 = b // ok + def m3 = c // warn + + def n1: X // ok + def n2: Y // ok + def n3: Z // warn + } +} diff --git a/test/files/neg/t12843.check b/test/files/neg/t12843.check new file mode 100644 index 000000000000..77f74bf37dc2 --- /dev/null +++ b/test/files/neg/t12843.check @@ -0,0 +1,21 @@ +t12843.scala:4: error: ambiguous reference to overloaded definition, +both method remove in class ListBuffer of type (idx: Int, count: Int): Unit +and method remove in class ListBuffer of type (idx: Int): Int +match expected type ? + def f = b.remove + ^ +t12843.scala:5: error: missing argument list for method update in class ListBuffer of type (idx: Int, elem: Int): Unit +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `update _` or `update(_,_)` instead of `update`. + def g = b.update + ^ +t12843.scala:10: error: missing argument list for method map in class BitSet +with overloaded members in scala.collection.immutable.BitSet + (f: Int => Int): scala.collection.immutable.BitSet + [B](f: Int => B)(implicit ev: Ordering[B]): scala.collection.immutable.SortedSet[B] + [B](f: Int => B): scala.collection.immutable.Set[B] +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `map _` or `map(_)` instead of `map`. + def f = c.map + ^ +3 errors diff --git a/test/files/neg/t12843.scala b/test/files/neg/t12843.scala new file mode 100644 index 000000000000..5838811763e5 --- /dev/null +++ b/test/files/neg/t12843.scala @@ -0,0 +1,11 @@ + +class C { + val b = collection.mutable.ListBuffer.empty[Int] + def f = b.remove + def g = b.update +} + +class D { + val c = collection.immutable.BitSet(1, 2, 3) + def f = c.map +} diff --git a/test/files/neg/t12845.check b/test/files/neg/t12845.check new file mode 100644 index 000000000000..0653ae66dbaf --- /dev/null +++ b/test/files/neg/t12845.check @@ -0,0 +1,9 @@ +t12845.scala:4: warning: Prefer the Scala annotation over Java's `@Deprecated` to provide a message and version: @deprecated("message", since = "MyLib 1.0") + @Deprecated def f = 0 + ^ +t12845.scala:6: warning: method f in object O is deprecated +class C { def g = O.f } + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12845.scala b/test/files/neg/t12845.scala new file mode 100644 index 000000000000..2d7d549cf238 --- /dev/null +++ b/test/files/neg/t12845.scala @@ -0,0 +1,6 @@ +//> using options -Werror -deprecation -Xlint + +object O { + @Deprecated def f = 0 +} +class C { def g = O.f } diff --git a/test/files/neg/t12851b.check b/test/files/neg/t12851b.check new file mode 100644 index 000000000000..7a9c790ba819 --- /dev/null +++ b/test/files/neg/t12851b.check @@ -0,0 +1,9 @@ +C_2.scala:2: warning: method f in trait T2 defined with a single empty parameter list overrides method f in trait T1 defined without a parameter list +class C extends T1 with T2 + ^ +C_2.scala:2: warning: method g in trait T2 defined without a parameter list overrides method g in trait T1 defined with a single empty parameter list +class C extends T1 with T2 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12851b/C_2.scala b/test/files/neg/t12851b/C_2.scala new file mode 100644 index 000000000000..39536ddec310 --- /dev/null +++ b/test/files/neg/t12851b/C_2.scala @@ -0,0 +1,2 @@ +//> using options -Werror +class C extends T1 with T2 diff --git a/test/files/neg/t12851b/T_1.scala b/test/files/neg/t12851b/T_1.scala new file mode 100644 index 000000000000..e43a11ab2eb8 --- /dev/null +++ b/test/files/neg/t12851b/T_1.scala @@ -0,0 +1,11 @@ + +trait T1 { + def f: Int + def g(): Int + def v(): Int +} +trait T2 { + def f() = 42 + def g = 42 + val v = 42 +} diff --git a/test/files/neg/t1286.check b/test/files/neg/t1286.check index 912709613cf3..e980aaab6806 100644 --- a/test/files/neg/t1286.check +++ b/test/files/neg/t1286.check @@ -2,4 +2,4 @@ b.scala:1: error: Companions 'trait Foo' and 'object Foo' must be defined in sam Found in t1286/a.scala and t1286/b.scala object Foo extends Foo { ^ -one error found +1 error diff --git a/test/files/neg/t12879.check b/test/files/neg/t12879.check new file mode 100644 index 000000000000..bb9517262a4e --- /dev/null +++ b/test/files/neg/t12879.check @@ -0,0 +1,6 @@ +Test.scala:3: warning: Java enum Foo in Java enum A is deprecated + val x: A = A.Foo + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12879/A.java b/test/files/neg/t12879/A.java new file mode 100644 index 000000000000..c757e09bd19f --- /dev/null +++ b/test/files/neg/t12879/A.java @@ -0,0 +1,5 @@ +public enum A { + @Deprecated + Foo, + Bar +} diff --git a/test/files/neg/t12879/Test.scala b/test/files/neg/t12879/Test.scala new file mode 100644 index 000000000000..d3e755560921 --- /dev/null +++ b/test/files/neg/t12879/Test.scala @@ -0,0 +1,5 @@ +//> using options -deprecation -Werror +class C { + val x: A = A.Foo + val y: A = A.Bar +} diff --git a/test/files/neg/t12883.check b/test/files/neg/t12883.check new file mode 100644 index 000000000000..664d25d0d3d6 --- /dev/null +++ b/test/files/neg/t12883.check @@ -0,0 +1,6 @@ +t12883.scala:3: error: access modifiers for `apply` method are copied from the case class constructor under Scala 3 (or with -Xsource-features:case-apply-copy-access) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C.apply +case class C private (c: Int) { + ^ +1 error diff --git a/test/files/neg/t12883.scala b/test/files/neg/t12883.scala new file mode 100644 index 000000000000..9c3dc860340f --- /dev/null +++ b/test/files/neg/t12883.scala @@ -0,0 +1,5 @@ +//> using options -Xsource:3 + +case class C private (c: Int) { + def copy(c: Int = this.c): C = new C(c) +} diff --git a/test/files/neg/t12919-3cross.check b/test/files/neg/t12919-3cross.check new file mode 100644 index 000000000000..c32d670a1342 --- /dev/null +++ b/test/files/neg/t12919-3cross.check @@ -0,0 +1,4 @@ +t12919-3cross.scala:24: error: could not find implicit value for parameter ord: Ordering[a.A] + def f(xs: List[a.A]) = xs.sorted // not found + ^ +1 error diff --git a/test/files/neg/t12919-3cross.scala b/test/files/neg/t12919-3cross.scala new file mode 100644 index 000000000000..aa2a5dcb74a1 --- /dev/null +++ b/test/files/neg/t12919-3cross.scala @@ -0,0 +1,36 @@ +//> using options -Xsource:3 -Xsource-features:package-prefix-implicits + +package object a { + implicit val aOrd: Ordering[A] = null + implicit val bOrd: Ordering[b.B] = null +} + +package a { + class A + + package aa { + class U { + // implicit is in an enclosing package of the callsite, not in the path of the implicit's type + def f(xs: List[a.A]) = xs.sorted // ok + def g(xs: List[b.B]) = xs.sorted // ok + } + } +} + +package b { + class B + + class V { + def f(xs: List[a.A]) = xs.sorted // not found + } +} + +package c { + import a._ + + class W { + def f(xs: List[a.A]) = xs.sorted // ok + def g(xs: List[b.B]) = xs.sorted // ok + } +} + diff --git a/test/files/neg/t12919.check b/test/files/neg/t12919.check new file mode 100644 index 000000000000..0c06daaf2e80 --- /dev/null +++ b/test/files/neg/t12919.check @@ -0,0 +1,13 @@ +t12919.scala:24: error: Implicit value aOrd was found in a package prefix of the required type, which is not part of the implicit scope in Scala 3 (or with -Xsource-features:package-prefix-implicits). +For migration, add `import a.aOrd`. +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=b.V.f + def f(xs: List[a.A]) = xs.sorted // warn + ^ +t12919.scala:48: error: Implicit method myClassToSeq was found in a package prefix of the required type, which is not part of the implicit scope in Scala 3 (or with -Xsource-features:package-prefix-implicits). +For migration, add `import a1.a2.myClassToSeq`. +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=a1.Main.f + def f[A](x: Seq[a1.a2.MyClass[A]]): Seq[A] = x.flatten + ^ +2 errors diff --git a/test/files/neg/t12919.scala b/test/files/neg/t12919.scala new file mode 100644 index 000000000000..ea35a2d6ba4b --- /dev/null +++ b/test/files/neg/t12919.scala @@ -0,0 +1,50 @@ +//> using options -Xsource:3 -Werror + +package object a { + implicit val aOrd: Ordering[A] = null + implicit val bOrd: Ordering[b.B] = null +} + +package a { + class A + + package aa { + class U { + // implicit is in an enclosing package of the callsite, not in the path of the implicit's type + def f(xs: List[a.A]) = xs.sorted // ok + def g(xs: List[b.B]) = xs.sorted // ok + } + } +} + +package b { + class B + + class V { + def f(xs: List[a.A]) = xs.sorted // warn + } +} + +package c { + import a._ + + class W { + def f(xs: List[a.A]) = xs.sorted // ok + def g(xs: List[b.B]) = xs.sorted // ok + } +} + +package a1 { + + package object a2 { + implicit def myClassToSeq[A](a: MyClass[A]): Seq[A] = a.values + } + + package a2 { + case class MyClass[A](values: Seq[A]) + } + + object Main { + def f[A](x: Seq[a1.a2.MyClass[A]]): Seq[A] = x.flatten + } +} diff --git a/test/files/neg/t12953-expandee.check b/test/files/neg/t12953-expandee.check new file mode 100644 index 000000000000..dc4fccd9fbb9 --- /dev/null +++ b/test/files/neg/t12953-expandee.check @@ -0,0 +1,9 @@ +Client_2.scala:10: warning: possible missing interpolator: detected interpolated identifier `$unusedVariable` + println("hello, world of $unusedVariable") + ^ +Client_2.scala:9: warning: local val unusedVariable in value is never used + val unusedVariable = "42".toInt + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12953-expandee/Client_2.scala b/test/files/neg/t12953-expandee/Client_2.scala new file mode 100644 index 000000000000..175eab07ef1f --- /dev/null +++ b/test/files/neg/t12953-expandee/Client_2.scala @@ -0,0 +1,13 @@ + +//> using options -Werror -Wunused:locals -Xlint:missing-interpolator -Wmacros:before + +import Macro.id + +object Test extends App { + println { + id { + val unusedVariable = "42".toInt + println("hello, world of $unusedVariable") + } + } +} diff --git a/test/files/neg/t12953-expandee/Macro_1.scala b/test/files/neg/t12953-expandee/Macro_1.scala new file mode 100644 index 000000000000..9b3204041783 --- /dev/null +++ b/test/files/neg/t12953-expandee/Macro_1.scala @@ -0,0 +1,11 @@ + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macro { + def id[A](body: A): A = macro impl[A] + + def impl[A: c.WeakTypeTag](c: Context)(body: c.Expr[A]) = { + body + } +} diff --git a/test/files/neg/t12953-expansion-b.check b/test/files/neg/t12953-expansion-b.check new file mode 100644 index 000000000000..dcf0eb37fcc4 --- /dev/null +++ b/test/files/neg/t12953-expansion-b.check @@ -0,0 +1,9 @@ +Client_2.scala:8: warning: possible missing interpolator: detected interpolated identifier `$unusedVariable` + id { + ^ +Client_2.scala:8: warning: local val unusedVariable in value is never used + id { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12953-expansion-b/Client_2.scala b/test/files/neg/t12953-expansion-b/Client_2.scala new file mode 100644 index 000000000000..732242fcf7d9 --- /dev/null +++ b/test/files/neg/t12953-expansion-b/Client_2.scala @@ -0,0 +1,12 @@ + +//> using options -Werror -Wunused:locals -Xlint:missing-interpolator -Wmacros:after + +import Macro.id + +object Test extends App { + println { + id { + println("goodbye, cruel world of $unusedVariable") + } + } +} diff --git a/test/files/neg/t12953-expansion-b/Macro_1.scala b/test/files/neg/t12953-expansion-b/Macro_1.scala new file mode 100644 index 000000000000..9e5d8467520c --- /dev/null +++ b/test/files/neg/t12953-expansion-b/Macro_1.scala @@ -0,0 +1,13 @@ + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +// with unused interpolator check in typer, the variable and literal must be typechecked together to warn +object Macro { + def id[A](body: A): A = macro impl[A] + + def impl[A: c.WeakTypeTag](c: Context)(body: c.Expr[A]) = { + import c.universe._ + q"""val unusedVariable = "42".toInt; println("hello, world of $$unusedVariable"); $body""" + } +} diff --git a/test/files/neg/t12953-expansion.check b/test/files/neg/t12953-expansion.check new file mode 100644 index 000000000000..4b1a987128dc --- /dev/null +++ b/test/files/neg/t12953-expansion.check @@ -0,0 +1,6 @@ +Client_2.scala:8: warning: local val unusedVariable in value is never used + id { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12953-expansion/Client_2.scala b/test/files/neg/t12953-expansion/Client_2.scala new file mode 100644 index 000000000000..522dd9411865 --- /dev/null +++ b/test/files/neg/t12953-expansion/Client_2.scala @@ -0,0 +1,12 @@ + +//> using options -Werror -Wunused:locals -Xlint:missing-interpolator -Wmacros:after + +import Macro.id + +object Test extends App { + println { + id { + println("hello, world of $unusedVariable") + } + } +} diff --git a/test/files/neg/t12953-expansion/Macro_1.scala b/test/files/neg/t12953-expansion/Macro_1.scala new file mode 100644 index 000000000000..4159c22a76d0 --- /dev/null +++ b/test/files/neg/t12953-expansion/Macro_1.scala @@ -0,0 +1,13 @@ + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +// with unused interpolator check in typer, the variable and literal must be typechecked together to warn +object Macro { + def id[A](body: A): A = macro impl[A] + + def impl[A: c.WeakTypeTag](c: Context)(body: c.Expr[A]) = { + import c.universe._ + q"""val unusedVariable = "42".toInt; $body""" + } +} diff --git a/test/files/neg/t12984.check b/test/files/neg/t12984.check new file mode 100644 index 000000000000..238af71422db --- /dev/null +++ b/test/files/neg/t12984.check @@ -0,0 +1,6 @@ +t12984.scala:12: warning: class D is deprecated (since 2.0): Will be phased out eventually someday. + def d = new D + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12984.scala b/test/files/neg/t12984.scala new file mode 100644 index 000000000000..f62ae8c2ab37 --- /dev/null +++ b/test/files/neg/t12984.scala @@ -0,0 +1,13 @@ + +//> using options -deprecation -Wconf:cat=deprecation&origin=C:s -Werror -Xlint + +@deprecated("Just say no.", since="1.0") +class C + +@deprecated("Will be phased out eventually someday.", since="2.0") +class D + +trait Test { + def c = new C + def d = new D +} diff --git a/test/files/neg/t13004b.check b/test/files/neg/t13004b.check new file mode 100644 index 000000000000..da72f763e67d --- /dev/null +++ b/test/files/neg/t13004b.check @@ -0,0 +1,9 @@ +t13004b.scala:14: applied implicit conversion from Money to ?{def + : ?} = final implicit def any2stringadd[A](self: A): any2stringadd[A] + Money(3.14) + Money(1.7) + ^ +t13004b.scala:14: error: type mismatch; + found : Money + required: String + Money(3.14) + Money(1.7) + ^ +1 error diff --git a/test/files/neg/t13004b.scala b/test/files/neg/t13004b.scala new file mode 100644 index 000000000000..850a5cef4544 --- /dev/null +++ b/test/files/neg/t13004b.scala @@ -0,0 +1,16 @@ +//> using options -Vimplicit-conversions -Xlint -Xsource:3 -Xsource-features:any2stringadd + +import scala.Predef.* + +case class Money(value: Double) +object Money { + implicit class MoneySyntax(private val self: Money) extends AnyVal { + def +(other: Money): Money = Money(self.value + other.value) + } +} + +object Test extends App { + println { + Money(3.14) + Money(1.7) + } +} diff --git a/test/files/neg/t13006.check b/test/files/neg/t13006.check new file mode 100644 index 000000000000..4d46b2624997 --- /dev/null +++ b/test/files/neg/t13006.check @@ -0,0 +1,30 @@ +t13006.scala:7: warning: class C is deprecated + type X[T] = C[T] // warn + ^ +t13006.scala:17: warning: class C is deprecated + def t3 = C.apply(20) // warn + ^ +t13006.scala:18: warning: class C is deprecated + def t4 = new C(10) // warn + ^ +t13006.scala:21: warning: constructor D in class D is deprecated + def u1 = A.Y.apply(10) // warn + ^ +t13006.scala:22: warning: constructor D in class D is deprecated + def u2 = new A.Y(10) // warn + ^ +t13006.scala:23: warning: constructor D in class D is deprecated + def u3 = D.apply(10) // warn + ^ +t13006.scala:24: warning: constructor D in class D is deprecated + def u4 = new D(10) // warn + ^ +t13006.scala:34: warning: class C in object oho is deprecated + def t1 = oho.C(10) // warn + ^ +t13006.scala:35: warning: constructor D in class D is deprecated + def t2 = oho.D(10) // warn + ^ +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/t13006.scala b/test/files/neg/t13006.scala new file mode 100644 index 000000000000..dd0bd589bcda --- /dev/null +++ b/test/files/neg/t13006.scala @@ -0,0 +1,36 @@ +//> using options -deprecation -Werror + +@deprecated case class C[T](x: T) +case class D @deprecated() (x: Int) + +object A { + type X[T] = C[T] // warn + val X = C // no warn + + type Y = D // no warn + val Y = D // no warn +} + +class T { + def t1 = A.X.apply(10) // no warn + def t2 = new A.X(10) // no warn + def t3 = C.apply(20) // warn + def t4 = new C(10) // warn + def t5 = C.hashCode // no warn + + def u1 = A.Y.apply(10) // warn + def u2 = new A.Y(10) // warn + def u3 = D.apply(10) // warn + def u4 = new D(10) // warn + def u5(d: D) = 10 // no warn +} + +object oho { + @deprecated case class C private[oho] (x: Int) + case class D @deprecated() private[oho] (x: Int) +} + +class T1 { + def t1 = oho.C(10) // warn + def t2 = oho.D(10) // warn +} diff --git a/test/files/neg/t13007.check b/test/files/neg/t13007.check new file mode 100644 index 000000000000..b57af0ea8f82 --- /dev/null +++ b/test/files/neg/t13007.check @@ -0,0 +1,10 @@ +Test.scala:9: error: Unable to emit reference to method j in class J, class J is not accessible in trait T + def t4 = j() + ^ +Test.scala:10: error: Unable to emit reference to method j in class J, class J is not accessible in trait T + def t5 = this.j() + ^ +Test.scala:11: error: Unable to emit reference to method j in class J, class J is not accessible in trait T + def t6 = self.j() + ^ +3 errors diff --git a/test/files/neg/t13007/J.java b/test/files/neg/t13007/J.java new file mode 100644 index 000000000000..da76473a48c0 --- /dev/null +++ b/test/files/neg/t13007/J.java @@ -0,0 +1,7 @@ +package j; + +public class J { + protected boolean i() { return false; } + protected boolean j() { return false; } + public boolean k() { return false; } +} diff --git a/test/files/neg/t13007/Test.scala b/test/files/neg/t13007/Test.scala new file mode 100644 index 000000000000..a2f219accaad --- /dev/null +++ b/test/files/neg/t13007/Test.scala @@ -0,0 +1,18 @@ +package s + +trait T { self: j.J => + override def i(): Boolean = true + def t1 = i() + def t2 = this.i() + def t3 = self.i() + + def t4 = j() + def t5 = this.j() + def t6 = self.j() + + def t7 = k() + def t8 = this.k() + def t9 = self.k() +} + +class C extends j.J with T diff --git a/test/files/neg/t13014.check b/test/files/neg/t13014.check new file mode 100644 index 000000000000..0f0c6012e815 --- /dev/null +++ b/test/files/neg/t13014.check @@ -0,0 +1,5 @@ +t13014.scala:2: error: could not find implicit value for parameter blup: String +Error occurred in an application involving default arguments. +class D extends C(y = 1) + ^ +1 error diff --git a/test/files/neg/t13014.scala b/test/files/neg/t13014.scala new file mode 100644 index 000000000000..575d434432e4 --- /dev/null +++ b/test/files/neg/t13014.scala @@ -0,0 +1,2 @@ +class C(x: Int = 0, y: Int = 0)(implicit blup: String) +class D extends C(y = 1) diff --git a/test/files/neg/t13055.check b/test/files/neg/t13055.check new file mode 100644 index 000000000000..26f5e77f5799 --- /dev/null +++ b/test/files/neg/t13055.check @@ -0,0 +1,10 @@ +t13055.scala:15: error: missing argument list for method forAll in object Main +with overloaded members in Main.type + [A1, P](f: A1 => P)(implicit p: P => Main.Prop): Main.Prop + [T1, P](g: Main.Gen[T1])(f: T1 => P)(implicit p: P => Main.Prop): Main.Prop +Unapplied methods are only converted to functions when a function type is expected. +Use -Xsource-features:eta-expand-always to convert even if the expected type is not a function type. +You can make this conversion explicit by writing `forAll _` or `forAll(_)(_)(_)` instead of `forAll`. + def what() = forAll { + ^ +1 error diff --git a/test/files/neg/t13055.scala b/test/files/neg/t13055.scala new file mode 100644 index 000000000000..4a236a092af8 --- /dev/null +++ b/test/files/neg/t13055.scala @@ -0,0 +1,28 @@ +//> using options -Xsource:3 + +//import org.scalacheck._, Prop._ + +object Main extends App { + class Prop + class Gen[A] + object Gen { + implicit def const[T](x: T): Gen[T] = ??? + } + + def forAll[T1, P](g: Gen[T1])(f: T1 => P)(implicit p: P => Prop): Prop = ??? + def forAll[A1, P](f: A1 => P)(implicit p: P => Prop): Prop = ??? + + def what() = forAll { + (a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, + a8: Int, + a9: Int, + ) => false + } + +} + +/* + def what(): (((Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean) => Nothing) => Main.Prop = { + val eta$0$1: Main.Gen[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean] = Main.this.Gen.const[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean](((a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int) => false)); + ((f: ((Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean) => Nothing) => Main.this.forAll[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean, Nothing](eta$0$1)(f)(scala.Predef.$conforms[Nothing])) +*/ diff --git a/test/files/neg/t13070.check b/test/files/neg/t13070.check new file mode 100644 index 000000000000..33fab290c580 --- /dev/null +++ b/test/files/neg/t13070.check @@ -0,0 +1,18 @@ +t13070.scala:7: warning: pattern var i in value $anonfun is never used + (i, j) = ns // warn // warn + ^ +t13070.scala:7: warning: pattern var j in value $anonfun is never used + (i, j) = ns // warn // warn + ^ +t13070.scala:16: warning: pattern var j in value $anonfun is never used + (i, j) = ns // warn + ^ +t13070.scala:23: warning: pattern var i in object pat vardef are patvars is never used + private var (i, j) = (42, 27) // warn // warn + ^ +t13070.scala:23: warning: pattern var j in object pat vardef are patvars is never used + private var (i, j) = (42, 27) // warn // warn + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t13070.scala b/test/files/neg/t13070.scala new file mode 100644 index 000000000000..4b84fbfc9212 --- /dev/null +++ b/test/files/neg/t13070.scala @@ -0,0 +1,59 @@ +//> using options -Wunused:patvars -Werror +class C { + def g = { + val t = Option((27, 42)) + for { + ns <- t + (i, j) = ns // warn // warn + } yield 42 + } +} +class D { + def g = { + val t = Option((27, 42)) + for { + ns <- t + (i, j) = ns // warn + } yield 42 + i + } +} + +// previously, the following do not warn under -Wunused:patvars in Scala 2 (but Scala 3 does) +object `pat vardef are patvars` { + private var (i, j) = (42, 27) // warn // warn +} + +object `patvar is assignable` { + var (i, j) = (42, 27) // no warn nonprivate + j += 1 + println((i, j)) +} + +object `privy patvar is assignable` { + private var (i, j) = (42, 27) // warn + j += 1 + println((i, j)) +} + +object `local patvar is assignable` { + def f() = { + var (i, j) = (42, 27) // warn + j += 1 + println((i, j)) + } +} + +object `mutable patvar in for` { + def f(xs: List[Int]) = { + for (x <- xs; y = x + 1 if y > 10) + yield { + var z :: Nil = y :: Nil: @unchecked // warn + z + 10 + } + } +} + +class `unset var requires -Wunused` { + private var i = 0 // no warn as we didn't ask for it + def f = println(i) +} diff --git a/test/files/neg/t13095.check b/test/files/neg/t13095.check new file mode 100644 index 000000000000..ab88b56a87a6 --- /dev/null +++ b/test/files/neg/t13095.check @@ -0,0 +1,21 @@ +t13095.scala:12: warning: pattern var z in object Main is never used + private val A(w, z) = A(42, 27) // warn + ^ +t13095.scala:13: warning: pattern var r in object Main is never used + private[this] val A(q, r) = A(42, 27) // warn + ^ +t13095.scala:42: warning: pattern var s in method spam is never used + case email(s, addr) => // warn // warn each, multiple extraction + ^ +t13095.scala:42: warning: pattern var addr in method spam is never used + case email(s, addr) => // warn // warn each, multiple extraction + ^ +t13095.scala:52: warning: pattern var v in method scala-dev#902 is never used + case (i, v @ (_, _)) => i // warn multiple patvars + ^ +t13095.scala:52: warning: a pure expression does nothing in statement position + case (i, v @ (_, _)) => i // warn multiple patvars + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t13095.scala b/test/files/neg/t13095.scala new file mode 100644 index 000000000000..a044a6a19d6a --- /dev/null +++ b/test/files/neg/t13095.scala @@ -0,0 +1,65 @@ +//> using options -Wunused:patvars -Werror + +case class A(x: Int, y: Int) + +object Main { + for { + a <- List.empty[A] + A(x, y) = a + } yield x + y + + private val A(x, y) = A(42, 27) // nowarn for canonical name + private val A(w, z) = A(42, 27) // warn + private[this] val A(q, r) = A(42, 27) // warn + def W = w + def Q = q +} + +class C { + def f(x: Any) = + x match { + case x: String => // nowarn because x is not a new reference but an alias + case _ => + } + def g(x: Any) = + (x: @unchecked) match { + case x: String => // nowarn because x is not a new reference but an alias + case _ => + } + def s(x: Option[String]) = + x match { + case x: Some[String] => // nowarn because x is not a new reference but an alias + case _ => + } + def t(x: Option[String]) = + x match { + case Some(x) => // nowarn because x is not a new reference but an alias of sorts + case _ => + } + val email = "(.*)@(.*)".r + def spam(s: String) = + s match { + case email(s, addr) => // warn // warn each, multiple extraction + case _ => + } + def border(s: String) = + s match { + case email(s, _) => // nowarn only one patvar + case _ => + } + def `scala-dev#902`(v: (Int, (Boolean, String))): Unit = + v match { + case (i, v @ (_, _)) => i // warn multiple patvars + } +} + +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + def f = + (xs: Array[_]) match { + case xs => + } +} + +class Publix { + val A(w, z) = A(42, 27) // nowarn if an accessor is neither private nor local +} diff --git a/test/files/neg/t13307b.check b/test/files/neg/t13307b.check new file mode 100644 index 000000000000..4959408a56e8 --- /dev/null +++ b/test/files/neg/t13307b.check @@ -0,0 +1,9 @@ +Test.scala:8: error: Implementation restriction: trait T accesses protected method j inside a concrete trait method. +Add an accessor in a class extending class J as a workaround. + def t4 = j() + ^ +Test.scala:9: error: Implementation restriction: trait T accesses protected method j inside a concrete trait method. +Add an accessor in a class extending class J as a workaround. + def t5 = this.j() + ^ +2 errors diff --git a/test/files/neg/t13307b/J.java b/test/files/neg/t13307b/J.java new file mode 100644 index 000000000000..da76473a48c0 --- /dev/null +++ b/test/files/neg/t13307b/J.java @@ -0,0 +1,7 @@ +package j; + +public class J { + protected boolean i() { return false; } + protected boolean j() { return false; } + public boolean k() { return false; } +} diff --git a/test/files/neg/t13307b/Test.scala b/test/files/neg/t13307b/Test.scala new file mode 100644 index 000000000000..2025b035ecae --- /dev/null +++ b/test/files/neg/t13307b/Test.scala @@ -0,0 +1,15 @@ +package s + +trait T extends j.J { + override def i(): Boolean = true + def t1 = i() + def t2 = this.i() + + def t4 = j() + def t5 = this.j() + + def t7 = k() + def t8 = this.k() +} + +class C extends j.J with T diff --git a/test/files/neg/t1355.check b/test/files/neg/t1355.check index f9786e927181..cecae8f4866b 100644 --- a/test/files/neg/t1355.check +++ b/test/files/neg/t1355.check @@ -1,4 +1,4 @@ t1355.scala:1: error: type arguments [A[T]] do not conform to trait A's type parameter bounds [T <: A[A[T]]] trait A[T <: A[A[T]]] ^ -one error found +1 error diff --git a/test/files/neg/t1355.scala b/test/files/neg/t1355.scala index 3e5e375e9acf..3abb3c70ced8 100644 --- a/test/files/neg/t1355.scala +++ b/test/files/neg/t1355.scala @@ -1 +1 @@ -trait A[T <: A[A[T]]] \ No newline at end of file +trait A[T <: A[A[T]]] diff --git a/test/files/neg/t1364.check b/test/files/neg/t1364.check index cb8803abdcd9..05d868bbde93 100644 --- a/test/files/neg/t1364.check +++ b/test/files/neg/t1364.check @@ -1,5 +1,7 @@ -t1364.scala:9: error: overriding type T in trait A with bounds <: AnyRef{type S[-U]}; - type T has incompatible type +t1364.scala:9: error: incompatible type in overriding +type T <: AnyRef{type S[-U]} (defined in trait A); + found : AnyRef{type S[U] = U} + required: <: AnyRef{type S[-U]} type T = { type S[U] = U } ^ -one error found +1 error diff --git a/test/files/neg/t1364.scala b/test/files/neg/t1364.scala index 6b02580fa54c..dbc7dcf99c6d 100644 --- a/test/files/neg/t1364.scala +++ b/test/files/neg/t1364.scala @@ -12,4 +12,4 @@ object B extends A { def t : String = z } -// println(B.t) \ No newline at end of file +// println(B.t) diff --git a/test/files/neg/t1371.check b/test/files/neg/t1371.check index f2e9ffebe825..43a855eed620 100644 --- a/test/files/neg/t1371.check +++ b/test/files/neg/t1371.check @@ -1,4 +1,4 @@ -t1371.scala:1: error: unbound wildcard type +t1371.scala:2: error: unbound wildcard type trait A[T <: (_)] ^ -one error found +1 error diff --git a/test/files/neg/t1371.scala b/test/files/neg/t1371.scala index 4ad84a9d5c85..6e7e5009bbd8 100644 --- a/test/files/neg/t1371.scala +++ b/test/files/neg/t1371.scala @@ -1,2 +1,3 @@ +// trait A[T <: (_)] diff --git a/test/files/neg/t1422.check b/test/files/neg/t1422.check index 362d7ef36bf6..fc02e9fefd59 100644 --- a/test/files/neg/t1422.check +++ b/test/files/neg/t1422.check @@ -1,7 +1,7 @@ t1422.scala:1: error: private[this] not allowed for case class parameters case class A(private[this] val foo:String) { } ^ -t1422.scala:1: error: value foo in class A cannot be accessed in A +t1422.scala:1: error: value foo in class A cannot be accessed as a member of A from object A case class A(private[this] val foo:String) { } ^ -two errors found +2 errors diff --git a/test/files/neg/t1431.check b/test/files/neg/t1431.check index a17ba732435f..76462728ddb0 100644 --- a/test/files/neg/t1431.check +++ b/test/files/neg/t1431.check @@ -1,4 +1,4 @@ t1431.scala:8: error: class type required but X#Factory found def fun[X<:MyTrait with Singleton]() = new X#Factory().value ^ -one error found +1 error diff --git a/test/files/neg/t1432.check b/test/files/neg/t1432.check index e41f3453fec5..94afb3d2a05d 100644 --- a/test/files/neg/t1432.check +++ b/test/files/neg/t1432.check @@ -3,6 +3,6 @@ t1432.scala:12: error: type mismatch; (which expands to) (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double) required: Bug_NoUnique.TypeCon[Unit] (which expands to) (Int, Unit => Double) - def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x) - ^ -one error found + def test(x: TypeCon[Wrap[Unit]]): TypeCon[Unit] = wrap(x) + ^ +1 error diff --git a/test/files/neg/t1432.scala b/test/files/neg/t1432.scala index bdf23312800a..9fce0ce5fe09 100644 --- a/test/files/neg/t1432.scala +++ b/test/files/neg/t1432.scala @@ -1,14 +1,13 @@ object Bug_NoUnique { - type TypeCon[Env] = (Int, Env=>Double) + type TypeCon[Env] = (Int, Env => Double) - case class Wrap[E](parent:E) {} + case class Wrap[E](parent: E) {} type Alias2[E] = Wrap[E] - def wrap[E,A,Y](v : (A,E=>Y)) : (A,Alias2[E]=>Y) = + def wrap[E,A,Y](v: (A, E => Y)): (A, Alias2[E] => Y) = throw new Error("Body here") - def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x) + def test(x: TypeCon[Wrap[Unit]]): TypeCon[Unit] = wrap(x) } - diff --git a/test/files/neg/t1472.check b/test/files/neg/t1472.check index 91b56004a516..a0b3efe2dbd4 100644 --- a/test/files/neg/t1472.check +++ b/test/files/neg/t1472.check @@ -4,4 +4,4 @@ t1472.scala:7: error: illegal cyclic reference involving type Utmp t1472.scala:12: error: illegal cyclic reference involving type U type Ttmp = this.a.type#T ^ -two errors found +2 errors diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check index 72bffa327061..4893654ecf85 100644 --- a/test/files/neg/t1477.check +++ b/test/files/neg/t1477.check @@ -1,5 +1,5 @@ -t1477.scala:13: error: overriding type V in trait C with bounds <: Middle.this.D; - type V is a volatile type; cannot override a type with non-volatile upper bound - type V <: (D with U) +t1477.scala:13: error: volatile type member cannot override type member with non-volatile upper bound: +type V <: Middle.this.D (defined in trait C) + type V <: (this.D with U) ^ -one error found +1 error diff --git a/test/files/neg/t1477.scala b/test/files/neg/t1477.scala index a9a6d678ca5f..9e4bcba224b7 100644 --- a/test/files/neg/t1477.scala +++ b/test/files/neg/t1477.scala @@ -10,7 +10,7 @@ object Test extends App { } trait Middle extends C { - type V <: (D with U) + type V <: (this.D with U) } class D extends Middle { diff --git a/test/files/neg/t1503.check b/test/files/neg/t1503.check index 20282096c1e4..8fc44be33e2d 100644 --- a/test/files/neg/t1503.check +++ b/test/files/neg/t1503.check @@ -1,8 +1,21 @@ -t1503.scala:8: warning: The value matched by Whatever is bound to n, which may be used under the -unsound assumption that it has type Whatever.type, whereas we can only safely -count on it having type Any, as the pattern is matched using `==` (see scala/bug#1503). - def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n } - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +t1503.scala:10: error: type mismatch; + found : n.type (with underlying type Any) + required: Nil.type + val a: Nil.type = (Vector(): Any) match { case n @ Nil => n } // error + ^ +t1503.scala:12: error: type mismatch; + found : n.type (with underlying type Any) + required: Nil.type + val b: Nil.type = (Vector(): Any) match { case n @ (m @ Nil) => n } // error was: CCE + ^ +t1503.scala:18: error: type mismatch; + found : Any + required: Int + val d: Int = (1.0: Any) match { case x @ 1 => x } // error + ^ +t1503.scala:20: error: type mismatch; + found : Any + required: Int + val e: Int = (1.0: Any) match { case x @ (_: 1) => x } // error was: CCE + ^ +4 errors diff --git a/test/files/neg/t1503.scala b/test/files/neg/t1503.scala index 504defe51439..c8cf90f52960 100644 --- a/test/files/neg/t1503.scala +++ b/test/files/neg/t1503.scala @@ -1,9 +1,36 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Werror -Wvalue-discard +// object Whatever { override def equals(x: Any) = true } class Test { - // when left to its own devices, and not under -Xfuture, the return type is Whatever.type - def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n } + def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n } // used to warn + + val a: Nil.type = (Vector(): Any) match { case n @ Nil => n } // error + + val b: Nil.type = (Vector(): Any) match { case n @ (m @ Nil) => n } // error was: CCE + + //val c = List(42) match { case xs @ (ys @ _*) => xs } // syntax error in parser + + // numeric value classes compare equals betwixt themselves + + val d: Int = (1.0: Any) match { case x @ 1 => x } // error + + val e: Int = (1.0: Any) match { case x @ (_: 1) => x } // error was: CCE + + // edge case, Boolean and Unit only equal themselves + + val f: Boolean = (true: Any) match { case b @ true => b } + val f2: Boolean = (true: Any) match { case b @ (_: true) => b } + val f3: true = (true: Any) match { case b @ (_: true) => b } + + def g(): Unit = ((): Any) match { case u @ () => u } + def g2(): Unit = ((): Any) match { case u @ (_: Unit) => u } // no value discard + + def h(x: Any): String = x match { case s @ "hello, world" => s } + def h2(x: Any): String = x match { case s @ (_: "hello, world") => s } + def h3(x: Any): "hello, world" = x match { case s @ "hello, world" => s } + + //def j(x: Any): Array[Int] = x match { case xs @ Array(42) => xs } // found Array[T] required Array[Int] } diff --git a/test/files/neg/t1523.check b/test/files/neg/t1523.check index 273d0f8cf77a..b28a63a56f21 100644 --- a/test/files/neg/t1523.check +++ b/test/files/neg/t1523.check @@ -1,4 +1,4 @@ -t1523.scala:4: error: 25 more arguments than can be applied to method bug: (x: Any)Any +t1523.scala:4: error: too many arguments (found 26, which exceeds the largest Tuple) for method bug: (x: Any): Any def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a") ^ -one error found +1 error diff --git a/test/files/neg/t1548.check b/test/files/neg/t1548.check index 7f5a3f44e2a2..b0ba863d75fd 100644 --- a/test/files/neg/t1548.check +++ b/test/files/neg/t1548.check @@ -1,4 +1,4 @@ S.scala:2: error: method defaultMethod overrides nothing override def defaultMethod = "Boo!" ^ -one error found +1 error diff --git a/test/files/neg/t1565-alt.check b/test/files/neg/t1565-alt.check new file mode 100644 index 000000000000..63659089fd66 --- /dev/null +++ b/test/files/neg/t1565-alt.check @@ -0,0 +1,13 @@ +t1565-alt.scala:3: error: not a legal formal parameter. +Note: Tuples cannot be directly destructured in method or function parameters. + Either create a single parameter accepting the Tuple1, + or consider a pattern matching anonymous function: `{ case (param1, param1) => ... } +trait SelfFirst { (this: Int, that: Int) => + ^ +t1565-alt.scala:6: error: not a legal formal parameter. +Note: Tuples cannot be directly destructured in method or function parameters. + Either create a single parameter accepting the Tuple1, + or consider a pattern matching anonymous function: `{ case (param1, param1) => ... } +trait SelfSecond { (that: Int, this: Int) => + ^ +2 errors diff --git a/test/files/neg/t1565-alt.scala b/test/files/neg/t1565-alt.scala new file mode 100644 index 000000000000..43c6729a2275 --- /dev/null +++ b/test/files/neg/t1565-alt.scala @@ -0,0 +1,7 @@ +//No special error message for multiple parameters, it's not likely +//that the user was trying to use a self-type +trait SelfFirst { (this: Int, that: Int) => +} + +trait SelfSecond { (that: Int, this: Int) => +} diff --git a/test/files/neg/t1565.check b/test/files/neg/t1565.check new file mode 100644 index 000000000000..3e4315e722f9 --- /dev/null +++ b/test/files/neg/t1565.check @@ -0,0 +1,4 @@ +t1565.scala:1: error: self-type annotation may not be in parentheses +trait HasSelf { (this: Forbidden) => + ^ +1 error diff --git a/test/files/neg/t1565.scala b/test/files/neg/t1565.scala new file mode 100644 index 000000000000..7a219c50191b --- /dev/null +++ b/test/files/neg/t1565.scala @@ -0,0 +1,2 @@ +trait HasSelf { (this: Forbidden) => +} \ No newline at end of file diff --git a/test/files/neg/t1623.check b/test/files/neg/t1623.check index 251039ad30c3..be813c9d64fa 100644 --- a/test/files/neg/t1623.check +++ b/test/files/neg/t1623.check @@ -1,4 +1,4 @@ t1623.scala:11: error: class BImpl cannot be instantiated because it does not conform to its self-type test.BImpl with test.A val b = new BImpl ^ -one error found +1 error diff --git a/test/files/neg/t1672b.check b/test/files/neg/t1672b.check index 60ccf771742d..001d941ffbaa 100644 --- a/test/files/neg/t1672b.check +++ b/test/files/neg/t1672b.check @@ -13,4 +13,4 @@ t1672b.scala:34: error: could not optimize @tailrec annotated method bez: it con t1672b.scala:46: error: could not optimize @tailrec annotated method bar: it contains a recursive call not in tail position else 1 + (try { ^ -5 errors found +5 errors diff --git a/test/files/neg/t1701.check b/test/files/neg/t1701.check index d603e62e5a6a..ffebbc6ff510 100644 --- a/test/files/neg/t1701.check +++ b/test/files/neg/t1701.check @@ -1,4 +1,4 @@ t1701.scala:1: error: Cloneable does not take type parameters class A extends java.lang.Cloneable[String, Option, Int] ^ -one error found +1 error diff --git a/test/files/neg/t1705.check b/test/files/neg/t1705.check index 7f75bd0fb5bb..f11c1ae97816 100644 --- a/test/files/neg/t1705.check +++ b/test/files/neg/t1705.check @@ -4,4 +4,4 @@ t1705.scala:9: error: can't existentially abstract over parameterized type this. t1705.scala:14: error: can't existentially abstract over parameterized type C[String] val x1 = { ^ -two errors found +2 errors diff --git a/test/files/neg/t1838.check b/test/files/neg/t1838.check index af811a381027..a46839146b2b 100644 --- a/test/files/neg/t1838.check +++ b/test/files/neg/t1838.check @@ -1,7 +1,7 @@ -t1838.scala:5: error: `sealed' modifier can be used only for classes +t1838.scala:5: error: `sealed` modifier can be used only for classes sealed def f = 0 ^ -t1838.scala:6: error: `sealed' modifier can be used only for classes +t1838.scala:6: error: `sealed` modifier can be used only for classes sealed val v = 0 ^ -two errors found +2 errors diff --git a/test/files/neg/t1845.check b/test/files/neg/t1845.check index a6c82f565987..33dc69fb7b79 100644 --- a/test/files/neg/t1845.check +++ b/test/files/neg/t1845.check @@ -3,4 +3,4 @@ Note: this is often due in part to a class depending on a definition nested with If applicable, you may wish to try moving some members into another object. import lexical._ ^ -one error found +1 error diff --git a/test/files/neg/t1872.check b/test/files/neg/t1872.check index c5dc2a808077..15842de91e30 100644 --- a/test/files/neg/t1872.check +++ b/test/files/neg/t1872.check @@ -4,5 +4,5 @@ t1872.scala:3: warning: fruitless type test: a value of type Int cannot also be t1872.scala:3: error: isInstanceOf cannot test if value types are references. def f(x: Int) = x.isInstanceOf[util.Random] ^ -one warning found -one error found +1 warning +1 error diff --git a/test/files/neg/t1872.scala b/test/files/neg/t1872.scala index 0ebee0b32d4f..f3c26530489f 100644 --- a/test/files/neg/t1872.scala +++ b/test/files/neg/t1872.scala @@ -1,4 +1,4 @@ class A { // a true result here would necessitate profound soul searching def f(x: Int) = x.isInstanceOf[util.Random] -} \ No newline at end of file +} diff --git a/test/files/neg/t1878.check b/test/files/neg/t1878.check index 5814375515ce..1f7e12c26ff5 100644 --- a/test/files/neg/t1878.check +++ b/test/files/neg/t1878.check @@ -4,4 +4,4 @@ t1878.scala:3: error: bad simple pattern: bad use of _* (a sequence pattern must t1878.scala:9: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern) val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6)) ^ -two errors found +2 errors diff --git a/test/files/neg/t1909-object.check b/test/files/neg/t1909-object.check index 9bc03be957f4..852dc89450dd 100644 --- a/test/files/neg/t1909-object.check +++ b/test/files/neg/t1909-object.check @@ -1,6 +1,6 @@ -t1909-object.scala:5: warning: !!! scala/bug#1909 Unable to STATICally lift object InnerTrouble$2, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely. +t1909-object.scala:6: warning: !!! scala/bug#1909 Unable to STATICally lift object InnerTrouble$2, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely. object InnerTrouble ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t1909-object.scala b/test/files/neg/t1909-object.scala index 3d83415f84cb..cbce3bc84a77 100644 --- a/test/files/neg/t1909-object.scala +++ b/test/files/neg/t1909-object.scala @@ -1,4 +1,5 @@ -// scalac: -Xdev -Xfatal-warnings +//> using options -Xdev -Xfatal-warnings +// class Kaboom(a: Any) { def this() = { this({ diff --git a/test/files/neg/t1909b.check b/test/files/neg/t1909b.check index 9a683643ae9a..b74e66ab608f 100644 --- a/test/files/neg/t1909b.check +++ b/test/files/neg/t1909b.check @@ -1,4 +1,4 @@ t1909b.scala:4: error: this can be used only in a class, object, or template def bar() = this.z + 5 ^ -one error found +1 error diff --git a/test/files/neg/t1909b.scala b/test/files/neg/t1909b.scala index 6aa78704782d..637fa9619805 100644 --- a/test/files/neg/t1909b.scala +++ b/test/files/neg/t1909b.scala @@ -4,4 +4,4 @@ class Ticket1909 (x: Int) { def bar() = this.z + 5 bar }) -} \ No newline at end of file +} diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check index bb6d3d3548b0..9b6292c8a511 100644 --- a/test/files/neg/t1960.check +++ b/test/files/neg/t1960.check @@ -4,4 +4,4 @@ class C(vr: Int, vl: Int) extends T { def ref = vr + vl } t1960.scala:2: error: parameter 'vl' requires field but conflicts with value vl in trait T class C(vr: Int, vl: Int) extends T { def ref = vr + vl } ^ -two errors found +2 errors diff --git a/test/files/neg/t1980.check b/test/files/neg/t1980.check deleted file mode 100644 index 6e3f2cb07de7..000000000000 --- a/test/files/neg/t1980.check +++ /dev/null @@ -1,12 +0,0 @@ -t1980.scala:3: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see scala/bug#1980. - def op1_:(x: => Any) = () // warn - ^ -t1980.scala:4: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see scala/bug#1980. - def op2_:(x: Any, y: => Any) = () // warn - ^ -t1980.scala:5: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see scala/bug#1980. - def op3_:(x: Any, y: => Any)(a: Any) = () // warn - ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found diff --git a/test/files/neg/t1980.scala b/test/files/neg/t1980.scala deleted file mode 100644 index f1851ac60b7b..000000000000 --- a/test/files/neg/t1980.scala +++ /dev/null @@ -1,10 +0,0 @@ -// scalac: -Xlint:by-name-right-associative -Xfatal-warnings -object Test { - def op1_:(x: => Any) = () // warn - def op2_:(x: Any, y: => Any) = () // warn - def op3_:(x: Any, y: => Any)(a: Any) = () // warn - - def op4() = () // no warn - def op5(x: => Any) = () // no warn - def op6_:(x: Any)(a: => Any) = () // no warn -} diff --git a/test/files/neg/t200.check b/test/files/neg/t200.check index f0c5e777720d..e1704b859e1b 100644 --- a/test/files/neg/t200.check +++ b/test/files/neg/t200.check @@ -2,4 +2,4 @@ t200.scala:7: error: method foo is defined twice; the conflicting method foo was defined at line 6:7 def foo: Int; ^ -one error found +1 error diff --git a/test/files/neg/t2031.check b/test/files/neg/t2031.check index 74aa6c9c0b1c..21eed7e7172f 100644 --- a/test/files/neg/t2031.check +++ b/test/files/neg/t2031.check @@ -1,6 +1,6 @@ t2031.scala:8: error: polymorphic expression cannot be instantiated to expected type; - found : [A]scala.collection.mutable.Builder[A,scala.collection.immutable.TreeSet[A]] - required: scala.collection.generic.CanBuildFrom[scala.collection.immutable.TreeSet[Int],Int,?] + found : [A]scala.collection.mutable.ReusableBuilder[A,scala.collection.immutable.TreeSet[A]] + required: Ordering[Int] res0.map(x => x)(TreeSet.newBuilder) ^ -one error found +1 error diff --git a/test/files/neg/t2066.check b/test/files/neg/t2066.check index efade87e2615..8892c26f27d1 100644 --- a/test/files/neg/t2066.check +++ b/test/files/neg/t2066.check @@ -1,21 +1,31 @@ -t2066.scala:6: error: overriding method f in trait A1 of type [T[_]]=> Unit; - method f has incompatible type +t2066.scala:6: error: incompatible type in overriding +def f[T[_]]: Unit (defined in trait A1); + found : [T[+_]]Unit + required: [T[_]]Unit override def f[T[+_]] = () ^ -t2066.scala:10: error: overriding method f in trait A1 of type [T[_]]=> Unit; - method f has incompatible type +t2066.scala:10: error: incompatible type in overriding +def f[T[_]]: Unit (defined in trait A1); + found : [T[-_]]Unit + required: [T[_]]Unit override def f[T[-_]] = () ^ -t2066.scala:23: error: overriding method f in trait A2 of type [T[+_]]=> Unit; - method f has incompatible type +t2066.scala:23: error: incompatible type in overriding +def f[T[+_]]: Unit (defined in trait A2); + found : [T[-_]]Unit + required: [T[+_]]Unit override def f[T[-_]] = () ^ -t2066.scala:45: error: overriding method f in trait A4 of type [T[X[+_]]]=> Unit; - method f has incompatible type +t2066.scala:45: error: incompatible type in overriding +def f[T[X[+_]]]: Unit (defined in trait A4); + found : [T[X[_]]]Unit + required: [T[X[+_]]]Unit override def f[T[X[_]]] = () ^ -t2066.scala:53: error: overriding method f in trait A5 of type [T[X[-_]]]=> Unit; - method f has incompatible type +t2066.scala:53: error: incompatible type in overriding +def f[T[X[-_]]]: Unit (defined in trait A5); + found : [T[X[_]]]Unit + required: [T[X[-_]]]Unit override def f[T[X[_]]] = () ^ -5 errors found +5 errors diff --git a/test/files/neg/t2066b.check b/test/files/neg/t2066b.check index 097c44fef337..bd27921619b5 100644 --- a/test/files/neg/t2066b.check +++ b/test/files/neg/t2066b.check @@ -1,5 +1,7 @@ -t2066b.scala:7: error: overriding method f in trait A of type [T[_]](x: T[Int])T[Any]; - method f has incompatible type +t2066b.scala:7: error: incompatible type in overriding +def f[T[_]](x: T[Int]): T[Any] (defined in trait A); + found : [T(in method f)(in method f)[+_]](x: T(in method f)(in method f)[Int]): T(in method f)(in method f)[Any] + required: [T(in method f)(in method f)[_]](x: T(in method f)(in method f)[Int]): T(in method f)(in method f)[Any] def f[T[+_]](x : T[Int]) : T[Any] = x - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t2066b.scala b/test/files/neg/t2066b.scala index 46177b19f739..2f8ffde1446a 100644 --- a/test/files/neg/t2066b.scala +++ b/test/files/neg/t2066b.scala @@ -13,4 +13,4 @@ object Test extends App { val palias = (new B():A).f[P](p) palias.y = "hello" val z: Int = p.y -} \ No newline at end of file +} diff --git a/test/files/neg/t2070.check b/test/files/neg/t2070.check index ef1d08f7b74d..383c1f3b33b7 100644 --- a/test/files/neg/t2070.check +++ b/test/files/neg/t2070.check @@ -1,6 +1,6 @@ t2070.scala:8: error: The kind of trait T does not conform to the expected kind of type T[X] in trait A. t2070.B.T's type parameters do not match type T's expected parameters: -type X (in object B) has one type parameter, but type X (in trait A) has none +type X (in object B) has 1 type parameter, but type X (in trait A) has 0 trait T[X[_]] ^ -one error found +1 error diff --git a/test/files/neg/t2078.check b/test/files/neg/t2078.check index 00bb323a0bac..77e63eb9441f 100644 --- a/test/files/neg/t2078.check +++ b/test/files/neg/t2078.check @@ -1,4 +1,4 @@ -t2078.scala:2: error: contravariant type S occurs in covariant position in type => AnyRef{val x: S} of value f +t2078.scala:2: error: contravariant type S occurs in covariant position in type AnyRef{val x: S} of value f val f = new { val x = y } ^ -one error found +1 error diff --git a/test/files/neg/t2102.check b/test/files/neg/t2102.check index 6f70839d22d7..d87db088f0fa 100644 --- a/test/files/neg/t2102.check +++ b/test/files/neg/t2102.check @@ -3,4 +3,4 @@ t2102.scala:2: error: type mismatch; required: Iterator[_] (in scala.collection) val x: Iterator[_] = new java.util.ArrayList[Int]().iterator ^ -one error found +1 error diff --git a/test/files/neg/t2139.check b/test/files/neg/t2139.check index e26f2907617f..14225d45409e 100644 --- a/test/files/neg/t2139.check +++ b/test/files/neg/t2139.check @@ -3,4 +3,4 @@ t2139.scala:13: error: type mismatch; required: Nothing val z:Int=(u.f _)(4) ^ -one error found +1 error diff --git a/test/files/neg/t2144.check b/test/files/neg/t2144.check index 670e188c2a68..1f3ce9e1b206 100644 --- a/test/files/neg/t2144.check +++ b/test/files/neg/t2144.check @@ -1,4 +1,4 @@ t2144.scala:2: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement def foo[A](a: A) = new { def bar(x: A): A = x } ^ -one error found +1 error diff --git a/test/files/neg/t2148.check b/test/files/neg/t2148.check index 27b5dce50791..fbdd58705ee3 100644 --- a/test/files/neg/t2148.check +++ b/test/files/neg/t2148.check @@ -1,4 +1,4 @@ t2148.scala:9: error: A is not a legal prefix for a constructor val b = new A with A#A1 ^ -one error found +1 error diff --git a/test/files/neg/t2148.scala b/test/files/neg/t2148.scala index 25788be84a80..897f1457b3a7 100644 --- a/test/files/neg/t2148.scala +++ b/test/files/neg/t2148.scala @@ -7,4 +7,4 @@ class A { object Bob { val b = new A with A#A1 -} \ No newline at end of file +} diff --git a/test/files/neg/t2180.check b/test/files/neg/t2180.check index addc4cfbb84c..902c003079c2 100644 --- a/test/files/neg/t2180.check +++ b/test/files/neg/t2180.check @@ -1,6 +1,11 @@ -t2180.scala:3: error: type mismatch; - found : List[Any] - required: List[Mxml] - children.toList.flatMap ( e => { - ^ -one error found +t2180.scala:5: error: type mismatch; + found : Iterable[_] + required: scala.collection.IterableOnce[Mxml] + case s: scala.collection.Iterable[_] => s + ^ +t2180.scala:6: error: type mismatch; + found : Any + required: Mxml + case a => List(a) + ^ +2 errors diff --git a/test/files/neg/t2180.scala b/test/files/neg/t2180.scala index 54a9e49c1c15..bceda00667a4 100644 --- a/test/files/neg/t2180.scala +++ b/test/files/neg/t2180.scala @@ -2,7 +2,8 @@ class Mxml { private def processChildren( children:Seq[Any] ):List[Mxml] = { children.toList.flatMap ( e => { e match { - case s:scala.collection.Traversable[_] => s case a => List(a) + case s: scala.collection.Iterable[_] => s + case a => List(a) } }) } diff --git a/test/files/neg/t2206.check b/test/files/neg/t2206.check index 766f35d93a3f..426e0a336f3a 100644 --- a/test/files/neg/t2206.check +++ b/test/files/neg/t2206.check @@ -1,5 +1,9 @@ t2206.scala:10: error: value f is not a member of o.A - Note: implicit method ax is not applicable here because it comes after the application point and it lacks an explicit result type + Note: implicit method ax is not applicable here because it comes after the application point and it lacks an explicit result type. a.f() ^ -one error found +t2206.scala:13: warning: Implicit definition should have explicit type (inferred o.AX) [quickfixable] + implicit def ax(a: A) = new AX + ^ +1 warning +1 error diff --git a/test/files/neg/t2206.scala b/test/files/neg/t2206.scala index cd2ec225e9d0..529f5030b5f2 100644 --- a/test/files/neg/t2206.scala +++ b/test/files/neg/t2206.scala @@ -2,7 +2,7 @@ object o { class A class AX { - def f() { } + def f(): Unit = { } } import Implicits._ @@ -12,4 +12,4 @@ object o { object Implicits { implicit def ax(a: A) = new AX } -} \ No newline at end of file +} diff --git a/test/files/neg/t2208.check b/test/files/neg/t2208.check index 64bb3a77c83b..c87eb6147b90 100644 --- a/test/files/neg/t2208.check +++ b/test/files/neg/t2208.check @@ -1,4 +1,4 @@ t2208.scala:7: error: type arguments [Any] do not conform to type Alias's type parameter bounds [X <: Test.A] class C extends Alias[Any] // not ok, normalisation should check bounds before expanding Alias - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t2208.scala b/test/files/neg/t2208.scala index 43bb0adec5b6..53165cc81a8b 100644 --- a/test/files/neg/t2208.scala +++ b/test/files/neg/t2208.scala @@ -5,4 +5,4 @@ object Test { type Alias[X <: A] = B[X] class C extends Alias[Any] // not ok, normalisation should check bounds before expanding Alias -} \ No newline at end of file +} diff --git a/test/files/neg/t2213.check b/test/files/neg/t2213.check index 9fb3bb2eb76d..ae97b55a9768 100644 --- a/test/files/neg/t2213.check +++ b/test/files/neg/t2213.check @@ -1,8 +1,5 @@ -t2213.scala:9: error: class C needs to be abstract, since: -it has 4 unimplemented members. -/** As seen from class C, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ +t2213.scala:9: error: class C needs to be abstract. +Missing implementations for 4 members of class A. def f: Int = ??? def g: Int = ??? val x: Int = ??? @@ -10,11 +7,8 @@ it has 4 unimplemented members. class C extends A {} ^ -t2213.scala:11: error: object creation impossible, since: -it has 4 unimplemented members. -/** As seen from object Q, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ +t2213.scala:11: error: object creation impossible. +Missing implementations for 4 members of class A. def f: Int = ??? def g: Int = ??? val x: Int = ??? @@ -22,4 +16,4 @@ it has 4 unimplemented members. object Q extends A { } ^ -two errors found +2 errors diff --git a/test/files/neg/t2213.scala b/test/files/neg/t2213.scala index af1df3ccfe4b..61050906f773 100644 --- a/test/files/neg/t2213.scala +++ b/test/files/neg/t2213.scala @@ -8,4 +8,4 @@ abstract class A { class C extends A {} -object Q extends A { } \ No newline at end of file +object Q extends A { } diff --git a/test/files/neg/t2275a.check b/test/files/neg/t2275a.check index dc16bc7962e9..3d434abd579b 100644 --- a/test/files/neg/t2275a.check +++ b/test/files/neg/t2275a.check @@ -10,4 +10,4 @@ t2275a.scala:4: error: ';' expected but 'else' found. t2275a.scala:7: error: '}' expected but eof found. } ^ -four errors found +4 errors diff --git a/test/files/neg/t2275b.check b/test/files/neg/t2275b.check index 706c04313bab..a16f0ae3836a 100644 --- a/test/files/neg/t2275b.check +++ b/test/files/neg/t2275b.check @@ -7,4 +7,4 @@ t2275b.scala:2: error: I encountered a '}' where I didn't expect one, maybe thi t2275b.scala:3: error: '}' expected but eof found. } ^ -three errors found +3 errors diff --git a/test/files/neg/t2296a.check b/test/files/neg/t2296a.check index 863b8610468e..ba251a26d1d0 100644 --- a/test/files/neg/t2296a.check +++ b/test/files/neg/t2296a.check @@ -2,4 +2,4 @@ S.scala:6: error: Implementation restriction: trait S accesses protected method Add an accessor in a class extending class J as a workaround. foo() ^ -one error found +1 error diff --git a/test/files/neg/t2296a/S.scala b/test/files/neg/t2296a/S.scala index 532d038a42ac..2b2e6cddaa19 100644 --- a/test/files/neg/t2296a/S.scala +++ b/test/files/neg/t2296a/S.scala @@ -2,7 +2,7 @@ package s { import j.J trait S extends J { - def bar() { + def bar(): Unit = { foo() } } @@ -11,8 +11,8 @@ package s { } object Test { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { (new s.SC).bar() (new s.S { }).bar() } -} \ No newline at end of file +} diff --git a/test/files/neg/t2296b.check b/test/files/neg/t2296b.check index 07cc54d5733d..56f3d6e1a255 100644 --- a/test/files/neg/t2296b.check +++ b/test/files/neg/t2296b.check @@ -2,4 +2,4 @@ S_2.scala:6: error: Implementation restriction: trait S accesses protected metho Add an accessor in a class extending class J_1 as a workaround. foo() ^ -one error found +1 error diff --git a/test/files/neg/t2296b/S_2.scala b/test/files/neg/t2296b/S_2.scala index 6cdb0cfabab8..786ba0bfca6e 100644 --- a/test/files/neg/t2296b/S_2.scala +++ b/test/files/neg/t2296b/S_2.scala @@ -2,7 +2,7 @@ package s { import j.J_1 trait S extends J_1 { - def bar() { + def bar(): Unit = { foo() } } @@ -11,7 +11,7 @@ package s { } object Test { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { (new s.SC).bar() (new s.S { }).bar() } diff --git a/test/files/neg/t2316.check b/test/files/neg/t2316.check index fea174566336..a79e617f6ccc 100644 --- a/test/files/neg/t2316.check +++ b/test/files/neg/t2316.check @@ -1,7 +1,7 @@ t2316.scala:28: error: ambiguous implicit values: - both method T1FromT3 in object T1 of type (implicit t3: test.T3)test.T1 - and method T1FromT2 in object T1 of type (implicit t2: test.T2)test.T1 + both method T1FromT2 in object T1 of type (implicit t2: test.T2): test.T1 + and method T1FromT3 in object T1 of type (implicit t3: test.T3): test.T1 match expected type test.T1 val t1 = requireT1 ^ -one error found +1 error diff --git a/test/files/neg/t2316.scala b/test/files/neg/t2316.scala index ccda9d5aac8f..bf4bb0ec6f46 100644 --- a/test/files/neg/t2316.scala +++ b/test/files/neg/t2316.scala @@ -40,4 +40,4 @@ object test { // one error found } -} \ No newline at end of file +} diff --git a/test/files/neg/t2336.check b/test/files/neg/t2336.check index 28acd4d17965..421f1fd2d4cb 100644 --- a/test/files/neg/t2336.check +++ b/test/files/neg/t2336.check @@ -1,4 +1,4 @@ t2336.scala:6: error: Foo[Int] is not a legal prefix for a constructor new Foo[Int]#Bar(0) ^ -one error found +1 error diff --git a/test/files/neg/t2388.check b/test/files/neg/t2388.check index 3f97608a4db1..ffe68fa17cb0 100644 --- a/test/files/neg/t2388.check +++ b/test/files/neg/t2388.check @@ -1,4 +1,4 @@ t2388.scala:2: error: recursive method search needs result type val searchField = new AnyRef { search() } ^ -one error found +1 error diff --git a/test/files/neg/t2405.check b/test/files/neg/t2405.check index 8638f02ad553..c944aafcba12 100644 --- a/test/files/neg/t2405.check +++ b/test/files/neg/t2405.check @@ -1,8 +1,8 @@ t2405.scala:8: error: could not find implicit value for parameter e: Int implicitly[Int] - ^ + ^ t2405.scala:6: warning: imported `y` is permanently hidden by definition of method y import A.{x => y} - ^ -one warning found -one error found + ^ +1 warning +1 error diff --git a/test/files/neg/t2416.check b/test/files/neg/t2416.check index 0899ad09d5eb..b14e8719db56 100644 --- a/test/files/neg/t2416.check +++ b/test/files/neg/t2416.check @@ -7,4 +7,4 @@ t2416.scala:8: error: type arguments [Boolean] do not conform to type B's type p t2416.scala:13: error: type arguments [String] do not conform to type B's type parameter bounds [Z <: Double] type C[Z <: A] = Z#B[String] // nuh-uh! ^ -three errors found +3 errors diff --git a/test/files/neg/t2416.scala b/test/files/neg/t2416.scala index 6bb57a984b23..6fd2ca229492 100644 --- a/test/files/neg/t2416.scala +++ b/test/files/neg/t2416.scala @@ -11,4 +11,4 @@ object t2416b { object t2416c { trait A{type B[Z <: Double] = Int} type C[Z <: A] = Z#B[String] // nuh-uh! -} \ No newline at end of file +} diff --git a/test/files/neg/t2421b.check b/test/files/neg/t2421b.check index f666a7d9d732..a5f8e8b12029 100644 --- a/test/files/neg/t2421b.check +++ b/test/files/neg/t2421b.check @@ -1,4 +1,8 @@ t2421b.scala:12: error: could not find implicit value for parameter aa: Test.F[Test.A] f ^ -one error found \ No newline at end of file +t2421b.scala:10: warning: Implicit definition should have explicit type (inferred Test.F[X]) [quickfixable] + implicit def b[X <: B] = new F[X]() + ^ +1 warning +1 error diff --git a/test/files/neg/t2421b.scala b/test/files/neg/t2421b.scala index d8159a8c371a..4e23a74e833d 100644 --- a/test/files/neg/t2421b.scala +++ b/test/files/neg/t2421b.scala @@ -14,4 +14,4 @@ object Test { /* bug: error: type arguments [Test2.A] do not conform to method b's type parameter bounds [X <: Test2.B] -*/ \ No newline at end of file +*/ diff --git a/test/files/neg/t2441.check b/test/files/neg/t2441.check index 6eaacd8fd185..16b5230ec71a 100644 --- a/test/files/neg/t2441.check +++ b/test/files/neg/t2441.check @@ -1,4 +1,4 @@ t2441.scala:12: error: private class Y escapes its defining scope as part of type Some[B.Y] override def f = Some(new B.Y) ^ -one error found +1 error diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check index c0e7baed3607..bcc1eda7f090 100644 --- a/test/files/neg/t2442.check +++ b/test/files/neg/t2442.check @@ -6,6 +6,6 @@ t2442.scala:12: warning: match may not be exhaustive. It would fail on the following input: BLUE def g(e: MySecondEnum) = e match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t2442/t2442.scala b/test/files/neg/t2442/t2442.scala index 5f391341186a..738e12485936 100644 --- a/test/files/neg/t2442/t2442.scala +++ b/test/files/neg/t2442/t2442.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror class Test { import MyEnum._ diff --git a/test/files/neg/t2458.check b/test/files/neg/t2458.check new file mode 100644 index 000000000000..12c37834ff0f --- /dev/null +++ b/test/files/neg/t2458.check @@ -0,0 +1,6 @@ +test.scala:7: error: reference to X is ambiguous; +it is both defined in package p and imported subsequently by +import q.X + def test = f(X) + ^ +1 error diff --git a/test/files/neg/t2458/pq.scala b/test/files/neg/t2458/pq.scala new file mode 100644 index 000000000000..0b6929c1588a --- /dev/null +++ b/test/files/neg/t2458/pq.scala @@ -0,0 +1,7 @@ + +package p { + object X +} +package q { + object X +} diff --git a/test/files/neg/t2458/test.scala b/test/files/neg/t2458/test.scala new file mode 100644 index 000000000000..bc0d25957984 --- /dev/null +++ b/test/files/neg/t2458/test.scala @@ -0,0 +1,18 @@ +//> using options -Xsource:2.13 +import q.X + +package p { + object Test { + def f(x: q.X.type) = ??? + def test = f(X) + } +} + +/* Should be ambiguous, but was: +test/files/neg/t2458/test.scala:7: error: type mismatch; + found : X (in p) + required: X (in q) + def test = f(X) + ^ +one error found +*/ diff --git a/test/files/neg/t2458b.check b/test/files/neg/t2458b.check new file mode 100644 index 000000000000..e9a143d3d06f --- /dev/null +++ b/test/files/neg/t2458b.check @@ -0,0 +1,6 @@ +test.scala:5: error: reference to CC is ambiguous; +it is both defined in package qq and imported subsequently by +import q._ + class C extends CC // ambiguous though same underlying + ^ +1 error diff --git a/test/files/neg/t2458b/q.scala b/test/files/neg/t2458b/q.scala new file mode 100644 index 000000000000..02f188f82424 --- /dev/null +++ b/test/files/neg/t2458b/q.scala @@ -0,0 +1,9 @@ + +package q { + object `package` { + type CC = qq.CC + } + package qq { + class CC + } +} diff --git a/test/files/neg/t2458b/test.scala b/test/files/neg/t2458b/test.scala new file mode 100644 index 000000000000..767187d14276 --- /dev/null +++ b/test/files/neg/t2458b/test.scala @@ -0,0 +1,6 @@ +//> using options -Xsource:2.13 +import q._ + +package q.qq { + class C extends CC // ambiguous though same underlying +} diff --git a/test/files/neg/t2458c.check b/test/files/neg/t2458c.check new file mode 100644 index 000000000000..ed2fd54d7ed7 --- /dev/null +++ b/test/files/neg/t2458c.check @@ -0,0 +1,11 @@ +test.scala:8: error: reference to f is ambiguous; +it is both defined in package object p and imported subsequently by +import q.X._ + f() + g() + ^ +test.scala:8: error: reference to g is ambiguous; +it is both defined in package object p and imported subsequently by +import q.X._ + f() + g() + ^ +2 errors diff --git a/test/files/neg/t2458c/p.scala b/test/files/neg/t2458c/p.scala new file mode 100644 index 000000000000..e35d0cfd1525 --- /dev/null +++ b/test/files/neg/t2458c/p.scala @@ -0,0 +1,8 @@ + +package p { + object `package` { + def f() = 42 + def f(i: Int) = i + 1 + def g() = 17 + } +} diff --git a/test/files/neg/t2458c/test.scala b/test/files/neg/t2458c/test.scala new file mode 100644 index 000000000000..8e5ea67c6def --- /dev/null +++ b/test/files/neg/t2458c/test.scala @@ -0,0 +1,14 @@ +//> using options -Xsource:2.13 +import q.X._ + +package p { + object Test { + def main(args: Array[String]): Unit = println { + // both defined in p in other unit, and imported from q.X = ambiguous + f() + g() + } + } +} +package q { + object X { def f = "bye" ; def g = "hi" } +} diff --git a/test/files/neg/t2462a.check b/test/files/neg/t2462a.check index 86d74b86d406..671acdc29346 100644 --- a/test/files/neg/t2462a.check +++ b/test/files/neg/t2462a.check @@ -1,4 +1,4 @@ -t2462a.scala:2: error: Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. - List(1,2,3).map[Int, List[String]](x => 1) - ^ -one error found +t2462a.scala:6: error: Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. + def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1) + ^ +1 error diff --git a/test/files/neg/t2462a.scala b/test/files/neg/t2462a.scala index 2d523b4dd896..308bb259b345 100644 --- a/test/files/neg/t2462a.scala +++ b/test/files/neg/t2462a.scala @@ -1,3 +1,7 @@ +class Lst[+A] { + def map[B, That](f: A => B)(implicit bf: collection.BuildFrom[List[A], B, That]): That = ??? +} + object Test { - List(1,2,3).map[Int, List[String]](x => 1) -} \ No newline at end of file + def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1) +} diff --git a/test/files/neg/t2462b.check b/test/files/neg/t2462b.check index d8c2b8cafd89..e395ef5ff980 100644 --- a/test/files/neg/t2462b.check +++ b/test/files/neg/t2462b.check @@ -1,11 +1,19 @@ -t2462b.scala:7: warning: Invalid implicitNotFound message for trait Meh in package test: +t2462b.scala:8: warning: Invalid implicitNotFound message for trait Meh in package test: The type parameters Too, Elem referenced in the message of the @implicitNotFound annotation are not defined by trait Meh. trait Meh[-From, +To] ^ -t2462b.scala:10: warning: Invalid implicitNotFound message for trait Meh2 in package test: +t2462b.scala:11: warning: Invalid implicitNotFound message for trait Meh2 in package test: The type parameter Elem referenced in the message of the @implicitNotFound annotation is not defined by trait Meh2. trait Meh2[-From, +To] ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +t2462b.scala:15: warning: Invalid implicitNotFound message for value theC: +The type parameter Uuh referenced in the message of the @implicitNotFound annotation is not in scope. + def m[Aaa](implicit @implicitNotFound("I see no C[${Uuh}]") theC: C[Aaa]) = ??? + ^ +t2462b.scala:21: warning: Invalid implicitNotFound message for value i: +The type parameters XX, ZZ, Nix referenced in the message of the @implicitNotFound annotation are not in scope. + def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t2462b.scala b/test/files/neg/t2462b.scala index 8151ab319835..d1c826d81644 100644 --- a/test/files/neg/t2462b.scala +++ b/test/files/neg/t2462b.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xlint:implicit-not-found -Xfatal-warnings +// package test import scala.annotation.implicitNotFound @@ -8,3 +9,15 @@ trait Meh[-From, +To] @implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.") trait Meh2[-From, +To] + +class C[T] +trait T { + def m[Aaa](implicit @implicitNotFound("I see no C[${Uuh}]") theC: C[Aaa]) = ??? + def n[Aaa](implicit @implicitNotFound("I see no C[${Aaa}]") theC: C[Aaa]) = ??? +} + +trait U[X, Y[_], Z[_, ZZ]] { + class I[R] { + def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ??? + } +} diff --git a/test/files/neg/t2462c.check b/test/files/neg/t2462c.check index 9ccbd58d466a..3b425b41730d 100644 --- a/test/files/neg/t2462c.check +++ b/test/files/neg/t2462c.check @@ -1,7 +1,16 @@ -t2462c.scala:19: error: No C of X$Y +t2462c.scala:26: error: No C of X$Y f[X$Y] ^ -t2462c.scala:25: error: No C of Foo[Int] +t2462c.scala:32: error: No C of Foo[Int] f[Foo[Int]] ^ -two errors found +t2462c.scala:35: error: No C of Foo[Int] + g[Foo[Int]] + ^ +t2462c.scala:38: error: I see no C[Foo[Int]] + h[Foo[Int]] + ^ +t2462c.scala:42: error: String List [?T0, ZZ] -> List[C[_]] Int Option[Long] -- . + i.m[Option[Long]] + ^ +5 errors diff --git a/test/files/neg/t2462c.scala b/test/files/neg/t2462c.scala index 29bc260daad1..f229a76e577b 100644 --- a/test/files/neg/t2462c.scala +++ b/test/files/neg/t2462c.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Werror +// import annotation._ @@ -14,6 +15,12 @@ trait X$$$$Y trait Foo[A] +trait U[X, Y[_], Z[_, ZZ]] { + class I[R] { + def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX}.") i: Int) = ??? + } +} + class Test { def f[A: C] = ??? f[X$Y] @@ -23,4 +30,14 @@ class Test { f[X$$$$Y] */ f[Foo[Int]] + + def g[Aaa](implicit theC: C[Aaa]) = ??? + g[Foo[Int]] + + def h[Aaa](implicit @implicitNotFound("I see no C[${Aaa}]") theC: C[Aaa]) = ??? + h[Foo[Int]] + + val u = new U[String, List, ({type T[A, _] = List[C[_]]})#T] { } + val i = new u.I[Int] + i.m[Option[Long]] } diff --git a/test/files/neg/t2488.check b/test/files/neg/t2488.check index 170dbf88ff19..03b6838519d1 100644 --- a/test/files/neg/t2488.check +++ b/test/files/neg/t2488.check @@ -1,31 +1,31 @@ -t2488.scala:7: error: overloaded method value f with alternatives: +t2488.scala:7: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int cannot be applied to (b: Int, Int) println(c.f(b = 2, 2)) ^ -t2488.scala:8: error: overloaded method value f with alternatives: +t2488.scala:8: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (a: Int, c: Int) + [which have no such parameter c] cannot be applied to (a: Int, c: Int) println(c.f(a = 2, c = 2)) ^ -t2488.scala:9: error: overloaded method value f with alternatives: +t2488.scala:9: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (Int, c: Int) + [which have no such parameter c] cannot be applied to (Int, c: Int) println(c.f(2, c = 2)) ^ -t2488.scala:10: error: overloaded method value f with alternatives: +t2488.scala:10: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (c: Int, Int) + [which have no such parameter c] cannot be applied to (c: Int, Int) println(c.f(c = 2, 2)) ^ -t2488.scala:11: error: overloaded method value f with alternatives: +t2488.scala:11: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int cannot be applied to (Int) println(c.f(2)) ^ -5 errors found +5 errors diff --git a/test/files/neg/t2494.check b/test/files/neg/t2494.check index 6d43011e248e..ed6d1f83e2d2 100644 --- a/test/files/neg/t2494.check +++ b/test/files/neg/t2494.check @@ -1,4 +1,4 @@ t2494.scala:1: error: recursive value a needs type object A { val a = { println("a = " + a); a = 1} } ^ -one error found +1 error diff --git a/test/files/neg/t2497.check b/test/files/neg/t2497.check new file mode 100644 index 000000000000..7de0a2a5beab --- /dev/null +++ b/test/files/neg/t2497.check @@ -0,0 +1,6 @@ +t2497.scala:21: error: cannot override a concrete member without a third member that's overridden by both (this rule is designed to prevent accidental overrides) +def eval: Int (defined in class Foo) + with abstract override def eval: Int (defined in trait DebugNode) + (new Foo with DebugNode).eval + ^ +1 error diff --git a/test/files/neg/t2497.scala b/test/files/neg/t2497.scala new file mode 100644 index 000000000000..01d2bc6ea030 --- /dev/null +++ b/test/files/neg/t2497.scala @@ -0,0 +1,22 @@ +trait Node { def eval: Int } + +trait DebugNode extends Node { + abstract override def eval: Int = { + println("before") + val res = super.eval + println("res= "+res) + res + } +} + +class Var extends Node { def eval = 42 } +class Foo { def eval = 42 } + +class C { + // typechecks, correct + (new Var with DebugNode).eval + + // should *not* typecheck but does! + // Foo.eval does not override Node.eval, but typechecker accepts this anyway + (new Foo with DebugNode).eval +} diff --git a/test/files/neg/t2497b.check b/test/files/neg/t2497b.check new file mode 100644 index 000000000000..661690b8c890 --- /dev/null +++ b/test/files/neg/t2497b.check @@ -0,0 +1,6 @@ +t2497b.scala:6: error: cannot override a concrete member without a third member that's overridden by both (this rule is designed to prevent accidental overrides) +def f: Int (defined in trait B) + with override def f: Int (defined in trait AAA) +class C extends B with AAA + ^ +1 error diff --git a/test/files/neg/t2497b.scala b/test/files/neg/t2497b.scala new file mode 100644 index 000000000000..524c57bb27ba --- /dev/null +++ b/test/files/neg/t2497b.scala @@ -0,0 +1,6 @@ +// from https://github.com/scala/scala/pull/7439#issuecomment-470101184 +trait A { def f: Int } +trait AA extends A { def f = -1 } +trait AAA extends A { override def f = 1 } +trait B { def f = -1 } +class C extends B with AAA diff --git a/test/files/neg/t2509-2.check b/test/files/neg/t2509-2.check index 3c0fe3fef224..7194b6c0185c 100644 --- a/test/files/neg/t2509-2.check +++ b/test/files/neg/t2509-2.check @@ -1,7 +1,7 @@ t2509-2.scala:27: error: ambiguous implicit values: - both value xb in object Test of type => X[B,Int] - and value xa in object Test of type => X[A,Boolean] + both value xb in object Test of type X[B,Int] + and value xa in object Test of type X[A,Boolean] match expected type X[B,U] val fb = f(new B) ^ -one error found +1 error diff --git a/test/files/neg/t2509-2.scala b/test/files/neg/t2509-2.scala index 032e6083acca..4a2fadfde231 100644 --- a/test/files/neg/t2509-2.scala +++ b/test/files/neg/t2509-2.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3.0 +//> using options -Xsource:3 -Xsource-features:implicit-resolution class A class B extends A class C extends B diff --git a/test/files/neg/t2509-3.check b/test/files/neg/t2509-3.check new file mode 100644 index 000000000000..2f28ffff79ad --- /dev/null +++ b/test/files/neg/t2509-3.check @@ -0,0 +1,4 @@ +t2509-3.scala:32: error: value value is not a member of B + println("B: " + b.value) + ^ +1 error diff --git a/test/files/neg/t2509-3.scala b/test/files/neg/t2509-3.scala new file mode 100644 index 000000000000..efb736aab7ae --- /dev/null +++ b/test/files/neg/t2509-3.scala @@ -0,0 +1,34 @@ +//> using options -Xsource:3 -Xsource-features:implicit-resolution +class A +class B extends A + +trait Y { + def value: String +} + +trait X[-T] { + def y(t: T): Y +} + +trait Z[-T] extends X[T] + +object ZA extends Z[A] { + def y(a: A) = new Y { def value = s"${a.getClass}: AValue" } +} + +object XB extends X[B] { + def y(b: B) = new Y { def value = s"${b.getClass}: BValue" } +} + +object Test { + implicit def f[T](t: T)(implicit x: X[T]): Y = x.y(t) + implicit val za: Z[A] = ZA + implicit val xb: X[B] = XB + + def main(argv: Array[String]): Unit = { + val a = new A + val b = new B + println("A: " + a.value) + println("B: " + b.value) + } +} diff --git a/test/files/neg/t2509-7b.check b/test/files/neg/t2509-7b.check new file mode 100644 index 000000000000..9071284c5ba8 --- /dev/null +++ b/test/files/neg/t2509-7b.check @@ -0,0 +1,7 @@ +t2509-7b.scala:31: error: ambiguous implicit values: + both method make in object X of type Both[X,X] + and method make in trait Factory of type Both[Y,Y] + match expected type Both[Y,X] + get + ^ +1 error diff --git a/test/files/neg/t2509-7b.scala b/test/files/neg/t2509-7b.scala new file mode 100644 index 000000000000..f1bb32aa0e2a --- /dev/null +++ b/test/files/neg/t2509-7b.scala @@ -0,0 +1,32 @@ +//> using options -Xsource:3 -Xsource-features:implicit-resolution +class Both[-A, +B] + +trait Factory[A] { + implicit def make: Both[A, A] = new Both[A, A] +} + +trait X +object X extends Factory[X] { + override implicit def make: Both[X, X] = super.make +} + +class Y extends X +object Y extends Factory[Y] { + // See test/files/pos/t2509-7a.scala ... discussion below + // override implicit def make: Both[Y, Y] = super.make +} + +object Test { + def get(implicit ev: Both[Y, X]) = ev + + // There are two possible implicits here: X.make and Y.make, neither are + // subtype of each other, so who wins? + // - Under the old scheme it's X.make because `isAsGood` sees that X.make is defined + // in X whereas Y.make is defined in Factory + // - Under the new scheme it's ambiguous because we replace contravariance by covariance + // in top-level type parameters so Y.make is treated as a subtype of X.make + // In both schemes we can get Y.make to win by uncommenting the override for make in Y + // (Y wins against X because `isDerived` also considers the subtyping relationships + // of companion classes) + get +} diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check index a0a960f0eac6..1da33ba0d8f9 100644 --- a/test/files/neg/t2641.check +++ b/test/files/neg/t2641.check @@ -1,7 +1,7 @@ -t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2 +t2641.scala:22: error: wrong number of type arguments for ManagedSeq, should be 2 with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]] ^ -t2641.scala:27: error: value managedIterator is not a member of ManagedSeq[A,Coll] - override def managedIterator = self.managedIterator slice (from, until) +t2641.scala:28: error: value managedIterator is not a member of ManagedSeq[A,Coll] + override def managedIterator = self.managedIterator.slice(0, 0) ^ -two errors found +2 errors diff --git a/test/files/neg/t2641.scala b/test/files/neg/t2641.scala index bc048e039ecf..db89a4ff68fe 100644 --- a/test/files/neg/t2641.scala +++ b/test/files/neg/t2641.scala @@ -1,30 +1,31 @@ -import scala.collection._ -import scala.collection.generic._ -import scala.collection.mutable.Builder - - +import collection.IterableOps abstract class ManagedSeqStrict[+A] - extends Traversable[A] - with GenericTraversableTemplate[A, ManagedSeqStrict] -{ - override def companion: GenericCompanion[ManagedSeqStrict] = null - - override def foreach[U](f: A => U): Unit = () + extends Iterable[A] + with IterableOps[A, ManagedSeqStrict, ManagedSeqStrict[A]] +//{ +// override def companion: GenericCompanion[ManagedSeqStrict] = null +// +// override def foreach[U](f: A => U): Unit = () +//} + +trait TraversableViewLike[+A, ++Coll, ++This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]] +extends Iterable[A] with IterableOps[A, Iterable, This] { + trait Transformed[+B] } +trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] trait ManagedSeq[+A, +Coll] extends ManagedSeqStrict[A] with TraversableView[A, ManagedSeqStrict[A]] with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]] { self => - - override def underlying = throw new Exception("no underlying") - //trait Transformed[+B] extends ManagedSeq[B] with super.Transformed[B] trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B] trait Sliced extends Transformed[A] with super.Sliced { - override def managedIterator = self.managedIterator slice (from, until) + override def managedIterator = self.managedIterator.slice(0, 0) } } diff --git a/test/files/neg/t2712-1.check b/test/files/neg/t2712-1.check deleted file mode 100644 index 61e4b6b1499c..000000000000 --- a/test/files/neg/t2712-1.check +++ /dev/null @@ -1,13 +0,0 @@ -t2712-1.scala:7: error: no type parameters for method foo: (m: M[A])Unit exist so that it can be applied to arguments (test.Two[Int,String]) - --- because --- -argument expression's type is not compatible with formal parameter type; - found : test.Two[Int,String] - required: ?M[?A] - def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* - ^ -t2712-1.scala:7: error: type mismatch; - found : test.Two[Int,String] - required: M[A] - def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* - ^ -two errors found diff --git a/test/files/neg/t2712-1.scala b/test/files/neg/t2712-1.scala deleted file mode 100644 index f7967d71b689..000000000000 --- a/test/files/neg/t2712-1.scala +++ /dev/null @@ -1,8 +0,0 @@ -package test - -trait Two[A, B] - -object Test { - def foo[M[_], A](m: M[A]) = () - def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* -} diff --git a/test/files/neg/t2712-2.check b/test/files/neg/t2712-2.check index 551d0f94ea85..bb61736f6463 100644 --- a/test/files/neg/t2712-2.check +++ b/test/files/neg/t2712-2.check @@ -1,13 +1,13 @@ -t2712-2.scala:17: error: type mismatch; +t2712-2.scala:16: error: type mismatch; found : test.Foo required: test.Two[test.X1,Object] Note: test.X2 <: Object (and test.Foo <: test.Two[test.X1,test.X2]), but trait Two is invariant in type B. You may wish to define B as +B instead. (SLS 4.5) - test1(foo): One[X3] // fails with -Ypartial-unification enabled + test1(foo): One[X3] ^ -t2712-2.scala:17: error: type mismatch; +t2712-2.scala:16: error: type mismatch; found : test.Two[test.X1,Object] required: test.One[test.X3] - test1(foo): One[X3] // fails with -Ypartial-unification enabled + test1(foo): One[X3] ^ -two errors found +2 errors diff --git a/test/files/neg/t2712-2.scala b/test/files/neg/t2712-2.scala index b6ed1c2ed4db..a11cd86e4745 100644 --- a/test/files/neg/t2712-2.scala +++ b/test/files/neg/t2712-2.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test class X1 @@ -14,6 +13,6 @@ object Test { val foo = new Foo - test1(foo): One[X3] // fails with -Ypartial-unification enabled - test1(foo): Two[X1, X2] // fails without -Ypartial-unification + test1(foo): One[X3] + test1(foo): Two[X1, X2] } diff --git a/test/files/neg/t2712-3.check b/test/files/neg/t2712-3.check deleted file mode 100644 index a84d96bf09c9..000000000000 --- a/test/files/neg/t2712-3.check +++ /dev/null @@ -1,6 +0,0 @@ -t2712-3.scala:17: error: type mismatch; - found : test.One[test.X3] - required: test.Two[test.X1,test.X2] - test1(foo): Two[X1, X2] // fails without -Ypartial-unification - ^ -one error found diff --git a/test/files/neg/t2712-3.scala b/test/files/neg/t2712-3.scala deleted file mode 100644 index 85ed52348903..000000000000 --- a/test/files/neg/t2712-3.scala +++ /dev/null @@ -1,18 +0,0 @@ -package test - -class X1 -class X2 -class X3 - -trait One[A] -trait Two[A, B] - -class Foo extends Two[X1, X2] with One[X3] -object Test { - def test1[M[_], A](x: M[A]): M[A] = x - - val foo = new Foo - - test1(foo): One[X3] // fails with -Ypartial-unification enabled - test1(foo): Two[X1, X2] // fails without -Ypartial-unification -} diff --git a/test/files/neg/t2712-8.check b/test/files/neg/t2712-8.check new file mode 100644 index 000000000000..f7f448ad41cb --- /dev/null +++ b/test/files/neg/t2712-8.check @@ -0,0 +1,13 @@ +t2712-8.scala:9: error: no type parameters for method foo: (x: D[D[Boolean]]): Nothing exist so that it can be applied to arguments (Test.Quux[Int]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : Test.Quux[Int] + required: ?D[?D[Boolean]] + foo(bar) + ^ +t2712-8.scala:9: error: type mismatch; + found : Test.Quux[Int] + required: D[D[Boolean]] + foo(bar) + ^ +2 errors diff --git a/test/files/neg/t2712-8.scala b/test/files/neg/t2712-8.scala new file mode 100644 index 000000000000..f411a606b9cf --- /dev/null +++ b/test/files/neg/t2712-8.scala @@ -0,0 +1,10 @@ +object Test extends App { + class L[A] + class Quux0[B, CC[_]] + class Quux[C] extends Quux0[C, L] + + def foo[D[_]](x: D[D[Boolean]]) = ??? + def bar: Quux[Int] = ??? + + foo(bar) +} diff --git a/test/files/neg/t276.check b/test/files/neg/t276.check index b241953a2256..4edc719f28ee 100644 --- a/test/files/neg/t276.check +++ b/test/files/neg/t276.check @@ -1,5 +1,5 @@ -t276.scala:6: error: overriding type Bar in class Foo, which equals (Int, Int); - class Bar cannot be used here - classes can only override abstract types +t276.scala:6: error: classes can only override abstract types; cannot override: +type Bar = (Int, Int) (defined in class Foo) class Bar ^ -one error found +1 error diff --git a/test/files/neg/t2773.check b/test/files/neg/t2773.check index a5ffb5fbd572..0974769451a2 100644 --- a/test/files/neg/t2773.check +++ b/test/files/neg/t2773.check @@ -1,7 +1,7 @@ t2773.scala:5: error: value x is not a member of C import c.x - ^ + ^ t2773.scala:6: error: not found: value x println(x) ^ -two errors found +2 errors diff --git a/test/files/neg/t2775.check b/test/files/neg/t2775.check index 934a970f2e8d..74dc28f27c34 100644 --- a/test/files/neg/t2775.check +++ b/test/files/neg/t2775.check @@ -1,4 +1,4 @@ t2775.scala:1: error: cannot find class tag for element type B.this.T trait B[S] { type T = S; val c = new Array[T](1) } ^ -one error found +1 error diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check index 9881d5182c27..62396c55b5ef 100644 --- a/test/files/neg/t2779.check +++ b/test/files/neg/t2779.check @@ -2,4 +2,4 @@ t2779.scala:16: error: method f is defined twice; the conflicting method f was defined at line 15:18 override def f = List(M1) ^ -one error found +1 error diff --git a/test/files/neg/t278.check b/test/files/neg/t278.check index 940b8edcefb0..0565d92d927b 100644 --- a/test/files/neg/t278.check +++ b/test/files/neg/t278.check @@ -1,6 +1,6 @@ -t278.scala:5: error: overloaded method value a with alternatives: - => C.this.A => Unit - => () => Unit +t278.scala:5: error: overloaded method a with alternatives: + C.this.A => Unit + () => Unit does not take type parameters println(a[A]) ^ @@ -8,4 +8,4 @@ t278.scala:4: error: method a is defined twice; the conflicting method a was defined at line 3:7 def a = (p:A) => () ^ -two errors found +2 errors diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check index 692afd2de87a..e99fb83d1e8a 100644 --- a/test/files/neg/t2796.check +++ b/test/files/neg/t2796.check @@ -1,9 +1,18 @@ -t2796.scala:12: warning: early type members are deprecated. Move them to the regular body: the semantics are the same. +t2796.scala:9: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +trait T1 extends { + ^ +t2796.scala:13: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +trait T2 extends { + ^ +t2796.scala:14: warning: early type members are deprecated: move them to the regular body; the semantics are the same type X = Int // warn ^ -t2796.scala:8: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized. +t2796.scala:17: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +class C1 extends { + ^ +t2796.scala:10: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized. val abstractVal = "T1.abstractVal" // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala index f53c6405d12d..7a718fc0cade 100644 --- a/test/files/neg/t2796.scala +++ b/test/files/neg/t2796.scala @@ -1,4 +1,6 @@ -// scalac: -deprecation -Xfatal-warnings +// +//> using options -deprecation -Xfatal-warnings +// trait Base { val abstractVal: String final val useAbstractVal = abstractVal @@ -17,7 +19,7 @@ class C1 extends { } with Base object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { assert(new C1 ().useAbstractVal == "C1.abstractVal") // This currently fails. a more ambitious approach to this ticket would add $earlyinit$ // to traits and call it from the right places in the right order. diff --git a/test/files/neg/t2799.check b/test/files/neg/t2799.check new file mode 100644 index 000000000000..87419d7ce8cc --- /dev/null +++ b/test/files/neg/t2799.check @@ -0,0 +1,21 @@ +t2799.scala:7: warning: trait T is deprecated: other mother +class C[A: T] { + ^ +t2799.scala:8: warning: trait T is deprecated: other mother + def f = (t: T[A]) => null.asInstanceOf[T[A]] + ^ +t2799.scala:8: warning: trait T is deprecated: other mother + def f = (t: T[A]) => null.asInstanceOf[T[A]] + ^ +t2799.scala:9: warning: method int2float in object Int is deprecated (since 2.13.1): Implicit conversion from Int to Float is dangerous because it loses precision. Write `.toFloat` instead. + def g() = implicitly[Int => Float] + ^ +t2799.scala:16: warning: trait T is deprecated: other mother +object T extends T[String] { + ^ +t2799.scala:17: warning: class Bob is deprecated: hi mom + def t = Bob() // warn + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t2799.scala b/test/files/neg/t2799.scala new file mode 100644 index 000000000000..85bce2a27b94 --- /dev/null +++ b/test/files/neg/t2799.scala @@ -0,0 +1,22 @@ +//> using options -Xlint -Werror + +@deprecated("other mother", "") +trait T[A] + +// warn even though parameter is synthetic +class C[A: T] { + def f = (t: T[A]) => null.asInstanceOf[T[A]] + def g() = implicitly[Int => Float] +} + +@deprecated("hi mom", "") +case class Bob () + +// No exclusion for companion of deprecated T +object T extends T[String] { + def t = Bob() // warn +} + +class Client { + def test = T.t // if no warn at t, then this code appears deprecation-free +} diff --git a/test/files/neg/t2801.check b/test/files/neg/t2801.check index 25320de5bc4e..8ab06febcc1d 100644 --- a/test/files/neg/t2801.check +++ b/test/files/neg/t2801.check @@ -3,4 +3,4 @@ t2801.scala:2: error: type mismatch; required: A def f[A <: AnyRef] = { val a: A = null ; a } ^ -one error found +1 error diff --git a/test/files/neg/t284.check b/test/files/neg/t284.check index 047701f1512f..0fb6286f2da0 100644 --- a/test/files/neg/t284.check +++ b/test/files/neg/t284.check @@ -1,6 +1,6 @@ -t284.scala:3: warning: Detected apparent refinement of Unit; are you missing an '=' sign? +t284.scala:4: warning: Detected apparent refinement of Unit; are you missing an '=' sign? def f1(a: T): Unit { } ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t284.scala b/test/files/neg/t284.scala index 1ac90706f3c2..40118ca29612 100644 --- a/test/files/neg/t284.scala +++ b/test/files/neg/t284.scala @@ -1,6 +1,7 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// trait B[T] { def f1(a: T): Unit { } def f2(a: T): Unit - def f3(a: T) { } + def f3(a: T): Unit = { } } diff --git a/test/files/neg/t2866.check b/test/files/neg/t2866.check index 260e0975d9b0..68c6bb37c980 100644 --- a/test/files/neg/t2866.check +++ b/test/files/neg/t2866.check @@ -1,17 +1,17 @@ t2866.scala:42: error: ambiguous implicit values: both value two of type Int - and value one in object A of type => Int + and value one in object A of type Int match expected type Int assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6 ^ t2866.scala:50: error: ambiguous implicit values: both value two of type Int - and value one in object A of type => Int + and value one in object A of type Int match expected type Int assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6 ^ t2866.scala:30: warning: imported `one` is permanently hidden by definition of value one - import A.one // warning: imported `one' is permanently hidden by definition of value one. + import A.one // warning: imported `one` is permanently hidden by definition of value one. ^ -one warning found -two errors found +1 warning +2 errors diff --git a/test/files/neg/t2866.scala b/test/files/neg/t2866.scala index 6be8bf9e8909..d80822349244 100644 --- a/test/files/neg/t2866.scala +++ b/test/files/neg/t2866.scala @@ -1,7 +1,7 @@ // for 2.7.x compatibility object A { - implicit val one = 1 + implicit val one: Int = 1 } object Test { @@ -27,7 +27,7 @@ object Test { } locally { - import A.one // warning: imported `one' is permanently hidden by definition of value one. + import A.one // warning: imported `one` is permanently hidden by definition of value one. // !!! Really? //assert(implicitly[Int] == 1) implicit val one = 2 diff --git a/test/files/neg/t2870.check b/test/files/neg/t2870.check index 99522eca651c..6fcc2e7dc762 100644 --- a/test/files/neg/t2870.check +++ b/test/files/neg/t2870.check @@ -6,4 +6,4 @@ Note: this is often due in part to a class depending on a definition nested with If applicable, you may wish to try moving some members into another object. import scala.util.Properties.javaClassPath ^ -two errors found +2 errors diff --git a/test/files/neg/t2910.check b/test/files/neg/t2910.check index 44bf1993db70..fd98de338b06 100644 --- a/test/files/neg/t2910.check +++ b/test/files/neg/t2910.check @@ -1,16 +1,16 @@ -t2910.scala:3: error: forward reference extends over definition of value ret - val ret = l.map({ case MyMatch(id) => id }) - ^ -t2910.scala:9: error: forward reference extends over definition of value z +t2910.scala:3: error: forward reference to value MyMatch defined on line 4 extends over definition of value ret + val ret = l.collect({ case MyMatch(id) => id }) + ^ +t2910.scala:9: error: forward reference to lazy value s defined on line 11 extends over definition of value z println(s.length) ^ -t2910.scala:16: error: forward reference extends over definition of value z +t2910.scala:16: error: forward reference to lazy value x defined on line 18 extends over definition of value z x ^ -t2910.scala:30: error: forward reference extends over definition of value x +t2910.scala:30: error: forward reference to value x defined on line 31 extends over definition of value x lazy val f: Int = x ^ -t2910.scala:35: error: forward reference extends over definition of variable x +t2910.scala:35: error: forward reference to lazy value g defined on line 37 extends over definition of variable x lazy val f: Int = g ^ -5 errors found +5 errors diff --git a/test/files/neg/t2910.scala b/test/files/neg/t2910.scala index d9a781032c0e..d8446eb93ff7 100644 --- a/test/files/neg/t2910.scala +++ b/test/files/neg/t2910.scala @@ -1,17 +1,17 @@ object Junk { def f(l: List[String]): List[String] = { - val ret = l.map({ case MyMatch(id) => id }) + val ret = l.collect({ case MyMatch(id) => id }) val MyMatch = "(\\d+)".r ret } - def test2() { + def test2(): Unit = { println(s.length) val z = 0 lazy val s = "abc" } - def test4() { + def test4(): Unit = { lazy val x = { x val z = 0 diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check index f45494d78131..61f8bb6943ef 100644 --- a/test/files/neg/t2918.check +++ b/test/files/neg/t2918.check @@ -4,4 +4,4 @@ t2918.scala:2: error: illegal cyclic reference involving type A t2918.scala:2: error: cyclic aliasing or subtyping involving type A def g[X, A[X] <: A[X]](x: A[X]) = x ^ -two errors found +2 errors diff --git a/test/files/neg/t2968.check b/test/files/neg/t2968.check index 5d2387f98c21..949d82bbbf92 100644 --- a/test/files/neg/t2968.check +++ b/test/files/neg/t2968.check @@ -1,10 +1,10 @@ -t2968.scala:8: error: Missing closing brace `}' assumed here +t2968.scala:8: error: Missing closing brace `}` assumed here } // missing brace ^ -t2968.scala:17: error: Missing closing brace `}' assumed here +t2968.scala:17: error: Missing closing brace `}` assumed here } // missing brace ^ -t2968.scala:26: error: Missing closing brace `}' assumed here +t2968.scala:26: error: Missing closing brace `}` assumed here } // missing brace ^ -three errors found +3 errors diff --git a/test/files/neg/t2968b.check b/test/files/neg/t2968b.check index 36d25a2d1229..22a3ddb82f89 100644 --- a/test/files/neg/t2968b.check +++ b/test/files/neg/t2968b.check @@ -1,4 +1,4 @@ t2968b.scala:7: error: '}' expected but eof found. // missing brace ^ -one error found +1 error diff --git a/test/files/neg/t2973.check b/test/files/neg/t2973.check index 582fe0063dc8..464abdfadce8 100644 --- a/test/files/neg/t2973.check +++ b/test/files/neg/t2973.check @@ -1,4 +1,4 @@ t2973.scala:1: error: ';' expected but 'package' found. package foo {} package bar {} ^ -one error found \ No newline at end of file +1 error diff --git a/test/files/neg/t3006.check b/test/files/neg/t3006.check index 2447eebc9cba..283864fc77ed 100644 --- a/test/files/neg/t3006.check +++ b/test/files/neg/t3006.check @@ -3,4 +3,8 @@ t3006.scala:8: error: type mismatch; required: Int println(A(3) + "H") ^ -one error found +t3006.scala:6: warning: Implicit definition should have explicit type (inferred Test.Foo) [quickfixable] + implicit def aToFoo(x: A) = new Foo(x); + ^ +1 warning +1 error diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check index 729db844e7f0..2f4c16974434 100644 --- a/test/files/neg/t3015.check +++ b/test/files/neg/t3015.check @@ -3,4 +3,4 @@ t3015.scala:7: error: scrutinee is incompatible with pattern type; required: String val b(foo) = "foo" ^ -one error found +1 error diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check index 8190a292146b..af0ae368f815 100644 --- a/test/files/neg/t3098.check +++ b/test/files/neg/t3098.check @@ -1,7 +1,7 @@ -b.scala:4: warning: match may not be exhaustive. +b.scala:3: warning: match may not be exhaustive. It would fail on the following input: (_ : C) def f = (null: T) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t3098/a.scala b/test/files/neg/t3098/a.scala index 7c35db64565e..d2ee7048fd56 100644 --- a/test/files/neg/t3098/a.scala +++ b/test/files/neg/t3098/a.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings -// Traits.scala +//> using options -Werror sealed trait T trait A extends T diff --git a/test/files/neg/t3098/b.scala b/test/files/neg/t3098/b.scala index c3f77ffd19e9..84a1f9f6f471 100644 --- a/test/files/neg/t3098/b.scala +++ b/test/files/neg/t3098/b.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings // Test.scala object Test { def f = (null: T) match { diff --git a/test/files/neg/t3118.check b/test/files/neg/t3118.check index da00f1c330b2..9338f9290ad5 100644 --- a/test/files/neg/t3118.check +++ b/test/files/neg/t3118.check @@ -4,4 +4,4 @@ t3118.scala:6: error: value C is not a member of O1 t3118.scala:7: error: type C is not a member of O1 println(new x.C) // is correctly not accessible ^ -two errors found +2 errors diff --git a/test/files/neg/t3160ambiguous.check b/test/files/neg/t3160ambiguous.check index 73a0c6d5dbca..e8d90bb3dd77 100644 --- a/test/files/neg/t3160ambiguous.check +++ b/test/files/neg/t3160ambiguous.check @@ -4,4 +4,4 @@ import scala.collection.immutable._ and import Bippy._ def f(x: List[Any]): String = ??? // ambiguous, because Bippy.List is accessible ^ -one error found +1 error diff --git a/test/files/neg/t3189.check b/test/files/neg/t3189.check index 122af564743a..dfbb653abd2e 100644 --- a/test/files/neg/t3189.check +++ b/test/files/neg/t3189.check @@ -1,4 +1,4 @@ t3189.scala:2: error: bad simple pattern: use _* to match a sequence val Array(a,b*) = ("": Any) ^ -one error found +1 error diff --git a/test/files/neg/t3189.scala b/test/files/neg/t3189.scala index 4ea4bb7581b9..94c13c54d8fb 100644 --- a/test/files/neg/t3189.scala +++ b/test/files/neg/t3189.scala @@ -1,3 +1,3 @@ object A { val Array(a,b*) = ("": Any) -} \ No newline at end of file +} diff --git a/test/files/neg/t3194.check b/test/files/neg/t3194.check new file mode 100644 index 000000000000..d5c73a5e8f69 --- /dev/null +++ b/test/files/neg/t3194.check @@ -0,0 +1,4 @@ +t3194.scala:3: error: value z is not a member of object Nil + def zero(): Unit = Nil.z = 0 + ^ +1 error diff --git a/test/files/neg/t3194.scala b/test/files/neg/t3194.scala new file mode 100644 index 000000000000..865b52c20858 --- /dev/null +++ b/test/files/neg/t3194.scala @@ -0,0 +1,4 @@ + +trait T { + def zero(): Unit = Nil.z = 0 +} diff --git a/test/files/neg/t3209.check b/test/files/neg/t3209.check index c5a6b1d95dac..4e976148f0f2 100644 --- a/test/files/neg/t3209.check +++ b/test/files/neg/t3209.check @@ -1,4 +1,4 @@ t3209.scala:2: error: expected start of definition package test ^ -one error found +1 error diff --git a/test/files/neg/t3209.scala b/test/files/neg/t3209.scala index d893726659ae..f03c2942b8ee 100644 --- a/test/files/neg/t3209.scala +++ b/test/files/neg/t3209.scala @@ -1,2 +1,2 @@ @javax.annotation.Generated(Array("test")) -package test \ No newline at end of file +package test diff --git a/test/files/neg/t3220-1.check b/test/files/neg/t3220-1.check new file mode 100644 index 000000000000..6f3b6cea1b9a --- /dev/null +++ b/test/files/neg/t3220-1.check @@ -0,0 +1,25 @@ +t3220-1.scala:2: error: invalid unicode escape + val badsingle = "foo \unope that's wrong" + ^ +t3220-1.scala:3: error: invalid unicode escape at index 6 of foo \unope that's wrong + val badtriple = """foo \unope that's wrong""" + ^ +t3220-1.scala:4: error: invalid unicode escape + val caretPos = "foo \u12x3 pos @ x" + ^ +t3220-1.scala:5: error: invalid unicode escape + val caretPos2 = "foo \uuuuuuu12x3 pos @ x" + ^ +t3220-1.scala:6: error: invalid unicode escape + val carPosTerm = "foo \u123" + ^ +t3220-1.scala:7: error: invalid unicode escape + val halfAnEscape = "foo \u12" + ^ +t3220-1.scala:8: error: invalid unicode escape + val halfAnEscapeChar = '\u45' + ^ +t3220-1.scala:9: error: invalid unicode escape + val `half An Identifier\u45` = "nope" + ^ +8 errors diff --git a/test/files/neg/t3220-1.scala b/test/files/neg/t3220-1.scala new file mode 100644 index 000000000000..5935b352443f --- /dev/null +++ b/test/files/neg/t3220-1.scala @@ -0,0 +1,10 @@ +object Example { + val badsingle = "foo \unope that's wrong" + val badtriple = """foo \unope that's wrong""" + val caretPos = "foo \u12x3 pos @ x" + val caretPos2 = "foo \uuuuuuu12x3 pos @ x" + val carPosTerm = "foo \u123" + val halfAnEscape = "foo \u12" + val halfAnEscapeChar = '\u45' + val `half An Identifier\u45` = "nope" +} \ No newline at end of file diff --git a/test/files/neg/t3220-2.check b/test/files/neg/t3220-2.check new file mode 100644 index 000000000000..c77afcdc3cfa --- /dev/null +++ b/test/files/neg/t3220-2.check @@ -0,0 +1,19 @@ +t3220-2.scala:2: error: invalid unicode escape at index 6 of foo \unope that's wrong + val badInters1 = s"foo \unope that's wrong" + ^ +t3220-2.scala:3: error: invalid unicode escape at index 8 of foo \u12 + val badIntersEnd1 = s"foo \u12" + ^ +t3220-2.scala:4: error: invalid unicode escape at index 6 of foo \unope that's wrong + val badInterRaw1 = raw"foo \unope that's wrong" + ^ +t3220-2.scala:5: error: invalid unicode escape at index 8 of foo \u12 + val badInterRawEnd1 = raw"foo \u12" + ^ +t3220-2.scala:6: error: invalid unicode escape at index 6 of foo \unope that's wrong + val badInters3 = s"""foo \unope that's wrong""" + ^ +t3220-2.scala:7: error: invalid unicode escape at index 6 of foo \unope that's wrong + val badInterRaw3 = raw"""foo \unope that's wrong""" + ^ +6 errors diff --git a/test/files/neg/t3220-2.scala b/test/files/neg/t3220-2.scala new file mode 100644 index 000000000000..79d2ca7449f1 --- /dev/null +++ b/test/files/neg/t3220-2.scala @@ -0,0 +1,8 @@ +object Example { + val badInters1 = s"foo \unope that's wrong" + val badIntersEnd1 = s"foo \u12" + val badInterRaw1 = raw"foo \unope that's wrong" + val badInterRawEnd1 = raw"foo \u12" + val badInters3 = s"""foo \unope that's wrong""" + val badInterRaw3 = raw"""foo \unope that's wrong""" +} \ No newline at end of file diff --git a/test/files/neg/t3222.check b/test/files/neg/t3222.check index 6170827cc96f..f940dd45060c 100644 --- a/test/files/neg/t3222.check +++ b/test/files/neg/t3222.check @@ -2,7 +2,7 @@ t3222.scala:1: error: not found: type B @throws(classOf[B]) ^ t3222.scala:4: error: not found: type D - def foo(@throws(classOf[D]) x: Int) {} + def foo(@throws(classOf[D]) x: Int): Unit = {} ^ t3222.scala:3: error: not found: type C @throws(classOf[C]) @@ -10,4 +10,4 @@ t3222.scala:3: error: not found: type C t3222.scala:6: error: not found: type E @throws(classOf[E]) ^ -four errors found +4 errors diff --git a/test/files/neg/t3222.scala b/test/files/neg/t3222.scala index 448292e8a738..7918671a93f6 100644 --- a/test/files/neg/t3222.scala +++ b/test/files/neg/t3222.scala @@ -1,7 +1,7 @@ @throws(classOf[B]) class ExceptionTest { @throws(classOf[C]) - def foo(@throws(classOf[D]) x: Int) {} + def foo(@throws(classOf[D]) x: Int): Unit = {} @throws(classOf[E]) type t = String diff --git a/test/files/neg/t3224.check b/test/files/neg/t3224.check index 69b02c88625a..8c4eb3d05faf 100644 --- a/test/files/neg/t3224.check +++ b/test/files/neg/t3224.check @@ -23,4 +23,4 @@ t3224.scala:48: error: polymorphic expression cannot be instantiated to expected required: List[?] assert(size(Array()) == 0) ^ -5 errors found +5 errors diff --git a/test/files/neg/t3236-neg.check b/test/files/neg/t3236-neg.check index ef28574d4518..83cb7d3142e0 100644 --- a/test/files/neg/t3236-neg.check +++ b/test/files/neg/t3236-neg.check @@ -31,4 +31,4 @@ AnnotationTest.scala:13: error: annotation argument needs to be a constant; foun AnnotationTest.scala:15: error: annotation argument needs to be a constant; found: Constants.StringAdd @StringAnnotation(Constants.StringAdd) ^ -11 errors found +11 errors diff --git a/test/files/neg/t3236-neg/AnnotationTest.scala b/test/files/neg/t3236-neg/AnnotationTest.scala index aec2a99020e1..55225552ebae 100644 --- a/test/files/neg/t3236-neg/AnnotationTest.scala +++ b/test/files/neg/t3236-neg/AnnotationTest.scala @@ -14,4 +14,4 @@ trait AnnotationTest { @StringAnnotation(Constants.ConstString) // ok @StringAnnotation(Constants.StringAdd) def test: Unit -} \ No newline at end of file +} diff --git a/test/files/neg/t3275.check b/test/files/neg/t3275.check index 117c792321e6..cf06eee488d3 100644 --- a/test/files/neg/t3275.check +++ b/test/files/neg/t3275.check @@ -1,4 +1,4 @@ t3275.scala:2: error: @tailrec annotated method contains no recursive calls @annotation.tailrec def foo() = 5 ^ -one error found +1 error diff --git a/test/files/neg/t3346b.check b/test/files/neg/t3346b.check index bcde6d90e46d..cf740736a799 100644 --- a/test/files/neg/t3346b.check +++ b/test/files/neg/t3346b.check @@ -1,4 +1,4 @@ t3346b.scala:14: error: could not find implicit value for evidence parameter of type TC[Any] val y = foo(1) ^ -one error found +1 error diff --git a/test/files/neg/t3346b.scala b/test/files/neg/t3346b.scala index 8ea8970298e8..f28ee8ba340a 100644 --- a/test/files/neg/t3346b.scala +++ b/test/files/neg/t3346b.scala @@ -12,4 +12,4 @@ object Test extends App { val x = foo[T, Int](1) val y = foo(1) -} \ No newline at end of file +} diff --git a/test/files/neg/t3346c.check b/test/files/neg/t3346c.check index 575379d009d3..7ffdda688f76 100644 --- a/test/files/neg/t3346c.check +++ b/test/files/neg/t3346c.check @@ -1,4 +1,4 @@ -t3346c.scala:60: error: value bar is not a member of Either[Int,String] +t3346c.scala:65: error: value bar is not a member of Either[Int,String] eii.bar ^ -one error found +1 error diff --git a/test/files/neg/t3346c.scala b/test/files/neg/t3346c.scala index 59584e0a6168..287c91bdf2ca 100644 --- a/test/files/neg/t3346c.scala +++ b/test/files/neg/t3346c.scala @@ -1,3 +1,5 @@ +import annotation._ + object Test extends App { // // An attempt to workaround scala/bug#2712, foiled by scala/bug#3346 @@ -32,15 +34,18 @@ object Test extends App { } + @nowarn implicit def ToTCValue[M[_], A](ma: M[A])(implicit M0: TC[M]) = new TCValue[M, A] { - implicit val M = M0 + implicit val M: TC[M] = M0 val self = ma } implicit def ToTCValueBin1[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[A, α]})#λ]): TCValue[({type λ[α] = M[A, α]})#λ, B] = new TCValue[({type λ[α]=M[A, α]})#λ, B] { + @nowarn implicit val M = M0 val self = ma } implicit def ToTCValueBin2[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[α, B]})#λ]): TCValue[({type λ[α]=M[α, B]})#λ, A] = new TCValue[({type λ[α]=M[α, B]})#λ, A] { + @nowarn implicit val M = M0 val self = ma } diff --git a/test/files/neg/t3346i.check b/test/files/neg/t3346i.check index cc17ab7ce484..619bf2a38659 100644 --- a/test/files/neg/t3346i.check +++ b/test/files/neg/t3346i.check @@ -4,4 +4,20 @@ t3346i.scala:28: error: value a is not a member of Test.A[T] t3346i.scala:29: error: value a is not a member of Test.A[Nothing] (new A[Nothing]).a ^ -two errors found +t3346i.scala:16: warning: Implicit definition should have explicit type (inferred Implicit1[T]) [quickfixable] + implicit def implicit1[T <: Intermediate[_, _]](implicit b: Implicit2[T]) = new Implicit1[T](b) + ^ +t3346i.scala:18: warning: Implicit definition should have explicit type (inferred Implicit2[T]) [quickfixable] + implicit def implicit2alt1[T <: Intermediate[_ <: String, _]](implicit c: Implicit3[T]) = new Implicit2[T](c) + ^ +t3346i.scala:19: warning: Implicit definition should have explicit type (inferred Implicit2[T]) [quickfixable] + implicit def implicit2alt2[T <: Intermediate[_ <: Double, _]](implicit c: Implicit3[T]) = new Implicit2[T](c) + ^ +t3346i.scala:21: warning: Implicit definition should have explicit type (inferred Implicit3[T]) [quickfixable] + implicit def implicit3alt1[T <: Intermediate[_, _ <: Int]] = new Implicit3[T]() + ^ +t3346i.scala:22: warning: Implicit definition should have explicit type (inferred Implicit3[T]) [quickfixable] + implicit def implicit3alt2[T <: Intermediate[_ <: Double, _ <: AnyRef],X] = new Implicit3[T]() + ^ +5 warnings +2 errors diff --git a/test/files/neg/t3392.check b/test/files/neg/t3392.check index 842d63eec98e..cff0fcd2f9ec 100644 --- a/test/files/neg/t3392.check +++ b/test/files/neg/t3392.check @@ -1,4 +1,4 @@ t3392.scala:9: error: not found: value x case x@A(x/*<-- refers to the pattern that includes this comment*/.Ex(42)) => ^ -one error found +1 error diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check index 987da944c6ee..112574b3ffe3 100644 --- a/test/files/neg/t3399.check +++ b/test/files/neg/t3399.check @@ -1,4 +1,4 @@ t3399.scala:23: error: Cannot prove that Nats.Add[Nats._1,Nats._1] =:= Nats._1. implicitly[ Add[_1, _1] =:= _1] ^ -one error found +1 error diff --git a/test/files/neg/t3399.scala b/test/files/neg/t3399.scala index 3edaa0724fe6..e39e54ac677e 100644 --- a/test/files/neg/t3399.scala +++ b/test/files/neg/t3399.scala @@ -21,4 +21,4 @@ object Nats { type _1 = Succ[_0] implicitly[ Add[_1, _1] =:= _1] -} \ No newline at end of file +} diff --git a/test/files/neg/t3403.check b/test/files/neg/t3403.check index e52d140e6a42..bc5f30b73545 100644 --- a/test/files/neg/t3403.check +++ b/test/files/neg/t3403.check @@ -1,4 +1,4 @@ t3403.scala:2: error: implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import class Foo { @bp var bar: Int = 1 } ^ -one error found +1 error diff --git a/test/files/neg/t3420b.check b/test/files/neg/t3420b.check new file mode 100644 index 000000000000..3770c326dede --- /dev/null +++ b/test/files/neg/t3420b.check @@ -0,0 +1,4 @@ +t3420b.scala:8: error: value translateEscapes is not a member of String + def f(s: String) = s.translateEscapes + ^ +1 error diff --git a/test/files/neg/t3420b.scala b/test/files/neg/t3420b.scala new file mode 100644 index 000000000000..a6c9ff90170e --- /dev/null +++ b/test/files/neg/t3420b.scala @@ -0,0 +1,9 @@ + +//> using options --release 8 -opt:inline:** -Wopt -Werror +// +class C { + val cv = Map[Int, Int](1 -> 2) + lazy val cl = Map[Int, Int](1 -> 2) + def cd = Map[Int, Int](1 -> 2) + def f(s: String) = s.translateEscapes +} diff --git a/test/files/neg/t343.check b/test/files/neg/t343.check index d310b7915fa2..22d8ea708798 100644 --- a/test/files/neg/t343.check +++ b/test/files/neg/t343.check @@ -1,4 +1,4 @@ t343.scala:5: error: private class Foo escapes its defining scope as part of type C.this.Foo def get:Foo = new Foo(); ^ -one error found +1 error diff --git a/test/files/neg/t3453.check b/test/files/neg/t3453.check index 52c948128c72..5e0489e1410f 100644 --- a/test/files/neg/t3453.check +++ b/test/files/neg/t3453.check @@ -18,4 +18,4 @@ t3453.scala:64: error: type mismatch; required: B new A ^ -four errors found +4 errors diff --git a/test/files/neg/t3453.scala b/test/files/neg/t3453.scala index 090b777151aa..af778189408f 100644 --- a/test/files/neg/t3453.scala +++ b/test/files/neg/t3453.scala @@ -63,4 +63,4 @@ class T3 { // despite it not being accessible without a prefix new A } -} \ No newline at end of file +} diff --git a/test/files/neg/t3481.check b/test/files/neg/t3481.check index debe07275be6..16de781ff4f4 100644 --- a/test/files/neg/t3481.check +++ b/test/files/neg/t3481.check @@ -26,4 +26,4 @@ t3481.scala:25: error: type mismatch; (which expands to) _$4 b.m("Hello") ^ -5 errors found +5 errors diff --git a/test/files/neg/t3481.scala b/test/files/neg/t3481.scala index f4b781ee37bd..6924fdf566f5 100644 --- a/test/files/neg/t3481.scala +++ b/test/files/neg/t3481.scala @@ -25,4 +25,4 @@ object t3481 { b.m("Hello") } } -} \ No newline at end of file +} diff --git a/test/files/neg/t3507-old.check b/test/files/neg/t3507-old.check index b3ac40473e33..d50ebfd9c984 100644 --- a/test/files/neg/t3507-old.check +++ b/test/files/neg/t3507-old.check @@ -1,4 +1,4 @@ t3507-old.scala:13: error: No Manifest available for _1.b.c.type. mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier ^ -one error found +1 error diff --git a/test/files/neg/t3507-old.scala b/test/files/neg/t3507-old.scala index 9a8c7c546217..5f8f3644b765 100644 --- a/test/files/neg/t3507-old.scala +++ b/test/files/neg/t3507-old.scala @@ -12,4 +12,4 @@ object Test { def mani[T: Manifest](x: T) = () mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier // --> _1 is not in scope here -} \ No newline at end of file +} diff --git a/test/files/neg/t3604.check b/test/files/neg/t3604.check index d25dafc5c21d..8ae7921c4b07 100644 --- a/test/files/neg/t3604.check +++ b/test/files/neg/t3604.check @@ -4,4 +4,4 @@ t3604.scala:3: error: in XML literal: expected closing tag of abbr t3604.scala:3: error: start tag was here: abbr>

    ^ -two errors found +2 errors diff --git a/test/files/neg/t3614.check b/test/files/neg/t3614.check index 81628ef37f2b..a0fea338e7ad 100644 --- a/test/files/neg/t3614.check +++ b/test/files/neg/t3614.check @@ -1,4 +1,4 @@ t3614.scala:2: error: only declarations allowed here def v = new ({ def a=0 }) ^ -one error found +1 error diff --git a/test/files/neg/t3614.scala b/test/files/neg/t3614.scala index 5b02cdf2b218..70e429440332 100644 --- a/test/files/neg/t3614.scala +++ b/test/files/neg/t3614.scala @@ -1,3 +1,3 @@ object t3614 { def v = new ({ def a=0 }) -} \ No newline at end of file +} diff --git a/test/files/neg/t3649.check b/test/files/neg/t3649.check index 76d68fa3b7a2..1435b0e42575 100644 --- a/test/files/neg/t3649.check +++ b/test/files/neg/t3649.check @@ -4,7 +4,7 @@ object T { class C(s: String = ""); val C = 0 } t3649.scala:2: error: C is already defined as (compiler-generated) case class companion object C object U { class C(val s: String = ""); val C = new C() {} } ^ -t3649.scala:2: error: not enough arguments for constructor C: (s: String)U.C +t3649.scala:2: error: not enough arguments for constructor C: (s: String): U.C object U { class C(val s: String = ""); val C = new C() {} } ^ -three errors found +3 errors diff --git a/test/files/neg/t3653.check b/test/files/neg/t3653.check index ad68e29fb49b..11857f50c90e 100644 --- a/test/files/neg/t3653.check +++ b/test/files/neg/t3653.check @@ -1,7 +1,7 @@ t3653.scala:3: error: double definition: def x(i: Int): Int at line 2 and def x(implicit x: Int): Int at line 3 -have same type after erasure: (i: Int)Int +have same type after erasure: (i: Int): Int def x(implicit x: Int) = 5 ^ -one error found +1 error diff --git a/test/files/neg/t3653.scala b/test/files/neg/t3653.scala index 96cf96008a6c..0bedc4fdc3ff 100644 --- a/test/files/neg/t3653.scala +++ b/test/files/neg/t3653.scala @@ -1,4 +1,4 @@ class B { def x(i: Int) = 3 def x(implicit x: Int) = 5 -} \ No newline at end of file +} diff --git a/test/files/neg/t3663.check b/test/files/neg/t3663.check index c4b27ef21110..3ac1cc05fec6 100644 --- a/test/files/neg/t3663.check +++ b/test/files/neg/t3663.check @@ -1,4 +1,4 @@ -main.scala:11: error: variable foo in class PackageProtected cannot be accessed in test.Test +main.scala:11: error: variable foo in class PackageProtected cannot be accessed as a member of test.Test from object Main in package another println(t.foo) ^ -one error found +1 error diff --git a/test/files/neg/t3663/main.scala b/test/files/neg/t3663/main.scala index 29619550cc14..a70ee52cf8bd 100644 --- a/test/files/neg/t3663/main.scala +++ b/test/files/neg/t3663/main.scala @@ -6,9 +6,9 @@ final class Test extends PackageProtected { package another { object Main { - def bug(t: Test) { + def bug(t: Test): Unit = { // Can always be replicated. println(t.foo) } } -} \ No newline at end of file +} diff --git a/test/files/neg/t3664.check b/test/files/neg/t3664.check new file mode 100644 index 000000000000..36da8817db82 --- /dev/null +++ b/test/files/neg/t3664.check @@ -0,0 +1,11 @@ +t3664.scala:10: error: Synthetic case companion used as a function. In Scala 3 (or with -Xsource-features:case-companion-function), case companions no longer extend FunctionN. Use C.apply instead. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Test.f + def f(xs: List[Int]): List[C] = xs.map(C) // ident + ^ +t3664.scala:11: error: Synthetic case companion used as a function. In Scala 3 (or with -Xsource-features:case-companion-function), case companions no longer extend FunctionN. Use D.apply instead. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Test.g + def g(xs: List[Int]): List[O.D] = xs.map(O.D) // select + ^ +2 errors diff --git a/test/files/neg/t3664.scala b/test/files/neg/t3664.scala new file mode 100644 index 000000000000..60ccc8462d57 --- /dev/null +++ b/test/files/neg/t3664.scala @@ -0,0 +1,12 @@ +//> using options -Xsource:3 + +// use -Xsource:3 to warn that implicitly extending Function is deprecated +// use -Xsource-features for dotty behavior: no extend Function, yes adapt C.apply.tupled + +case class C(i: Int) +object O { case class D(i: Int) } + +class Test { + def f(xs: List[Int]): List[C] = xs.map(C) // ident + def g(xs: List[Int]): List[O.D] = xs.map(O.D) // select +} diff --git a/test/files/neg/t3664b.check b/test/files/neg/t3664b.check new file mode 100644 index 000000000000..9119412b0e91 --- /dev/null +++ b/test/files/neg/t3664b.check @@ -0,0 +1,9 @@ +t3664b.scala:10: warning: The method `apply` is inserted. The auto insertion will be deprecated, please write `C.apply` explicitly. + def f(xs: List[Int]): List[C] = xs.map(C) // ident + ^ +t3664b.scala:11: warning: The method `apply` is inserted. The auto insertion will be deprecated, please write `D.apply` explicitly. + def g(xs: List[Int]): List[O.D] = xs.map(O.D) // select + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t3664b.scala b/test/files/neg/t3664b.scala new file mode 100644 index 000000000000..d13a2eb32c80 --- /dev/null +++ b/test/files/neg/t3664b.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Xsource:3 -Xsource-features:case-companion-function -deprecation + +// use -Xsource:3 to warn that implicitly extending Function is deprecated +// use -Xsource-features for dotty behavior: no extend Function, yes adapt C.apply.tupled + +case class C(i: Int) +object O { case class D(i: Int) } + +class Test { + def f(xs: List[Int]): List[C] = xs.map(C) // ident + def g(xs: List[Int]): List[O.D] = xs.map(O.D) // select +} diff --git a/test/files/neg/t3664c.check b/test/files/neg/t3664c.check new file mode 100644 index 000000000000..614526c15ef5 --- /dev/null +++ b/test/files/neg/t3664c.check @@ -0,0 +1,27 @@ +t3664c.scala:20: error: type mismatch; + found : C.type + required: ((Int, Int, Int)) => C + def f(xs: List[(Int, Int, Int)]): List[C] = xs.map(C) // hard error + ^ +t3664c.scala:22: error: type mismatch; + found : ((Int, Int)) => C + required: ((Int, Int, Int)) => C + def g(xs: List[(Int, Int, Int)]): List[C] = xs.map(C.tupled) // hard error + ^ +t3664c.scala:24: error: type mismatch; + found : D.type + required: ((Int, Int)) => D + def d(xs: List[(Int, Int)]): List[D] = xs.map(D) // hard error + ^ +t3664c.scala:26: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => E), rather than applied to `()`. +Write E.apply() to invoke method apply, or change the expected type. + val e: () => E = E // apply insertion warning, plus lint warning about 0-arity eta expansion + ^ +t3664c.scala:26: warning: The method `apply` is inserted. The auto insertion will be deprecated, please write `E.apply` explicitly. + val e: () => E = E // apply insertion warning, plus lint warning about 0-arity eta expansion + ^ +t3664c.scala:28: warning: The method `apply` is inserted. The auto insertion will be deprecated, please write `F.apply` explicitly. + def ov(xs: List[Int]): List[F] = xs.map(F) + ^ +3 warnings +3 errors diff --git a/test/files/neg/t3664c.scala b/test/files/neg/t3664c.scala new file mode 100644 index 000000000000..16b5ab08931a --- /dev/null +++ b/test/files/neg/t3664c.scala @@ -0,0 +1,29 @@ +//> using options -Werror -Xlint -Xsource:3 -Xsource-features:case-companion-function + +// use -Xsource:3 to warn that implicitly extending Function is deprecated +// use -Xsource-features for dotty behavior: no extend Function, yes adapt C.apply.tupled + +case class C(i: Int, j: Int) + +abstract case class D(i: Int, j: Int) + +case class E() + +case class F(i: Int) +object F { + def apply(): F = apply(42) + def apply(i: Int): F = new F(i) + def apply(i: Int, j: Int): F = new F(i+j) +} + +class Test { + def f(xs: List[(Int, Int, Int)]): List[C] = xs.map(C) // hard error + + def g(xs: List[(Int, Int, Int)]): List[C] = xs.map(C.tupled) // hard error + + def d(xs: List[(Int, Int)]): List[D] = xs.map(D) // hard error + + val e: () => E = E // apply insertion warning, plus lint warning about 0-arity eta expansion + + def ov(xs: List[Int]): List[F] = xs.map(F) +} diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check index f40f07435a9a..9b1823e2e684 100644 --- a/test/files/neg/t3683a.check +++ b/test/files/neg/t3683a.check @@ -1,7 +1,7 @@ -t3683a.scala:15: warning: match may not be exhaustive. +t3683a.scala:16: warning: match may not be exhaustive. It would fail on the following input: XX() w match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t3683a.scala b/test/files/neg/t3683a.scala index 20346aa2bcd2..cf06ef26094c 100644 --- a/test/files/neg/t3683a.scala +++ b/test/files/neg/t3683a.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait Foo sealed trait Bar extends Foo sealed trait W[T >: Bar <: Foo] diff --git a/test/files/neg/t3683b.check b/test/files/neg/t3683b.check index 6e3369241bba..62e3361d046d 100644 --- a/test/files/neg/t3683b.check +++ b/test/files/neg/t3683b.check @@ -5,4 +5,4 @@ Note: Foo >: Bar (and X <: W[Foo]), but trait W is invariant in type T. You may wish to define T as -T instead. (SLS 4.5) case X() => 1 ^ -one error found +1 error diff --git a/test/files/neg/t3683b.scala b/test/files/neg/t3683b.scala index 646e41812130..1c361ed2d3b0 100644 --- a/test/files/neg/t3683b.scala +++ b/test/files/neg/t3683b.scala @@ -18,4 +18,4 @@ object Main { case Z(z) => f1(z) } } -} \ No newline at end of file +} diff --git a/test/files/neg/t3691.check b/test/files/neg/t3691.check index 6a7e13049a03..b716e5b7f996 100644 --- a/test/files/neg/t3691.check +++ b/test/files/neg/t3691.check @@ -1,16 +1,16 @@ t3691.scala:4: error: type mismatch; found : Test.A[String] required: AnyRef{type A[x]} - val b = (new A[String]{}): { type A[x] } // not ok - ^ + val b = (new A[String]{}): { type A[x] } // not ok + ^ t3691.scala:5: error: type mismatch; found : Test.A[String] required: AnyRef{type A} - val c = (new A[String]{}): { type A } // not ok - ^ + val c = (new A[String]{}): { type A } // not ok + ^ t3691.scala:7: error: type mismatch; found : AnyRef{type A = String} required: AnyRef{type A[X]} - val x = (new { type A = String }): { type A[X] } // not ok - ^ -three errors found + val x = (new { type A = String }): { type A[X] } // not ok + ^ +3 errors diff --git a/test/files/neg/t3691.scala b/test/files/neg/t3691.scala index 69e8bef630a7..79c7c4e27439 100644 --- a/test/files/neg/t3691.scala +++ b/test/files/neg/t3691.scala @@ -1,11 +1,11 @@ object Test { - trait A[X] { type A[x <: X] = x } - val a = (new A[String]{}): { type A[x <: String] } // ok - val b = (new A[String]{}): { type A[x] } // not ok - val c = (new A[String]{}): { type A } // not ok + trait A[X] { type A[x <: X] = x } + val a = (new A[String]{}): { type A[x <: String] } // ok + val b = (new A[String]{}): { type A[x] } // not ok + val c = (new A[String]{}): { type A } // not ok - val x = (new { type A = String }): { type A[X] } // not ok + val x = (new { type A = String }): { type A[X] } // not ok //a: AnyRef{type A[X]} - identity[x.A[Any]] _ -} \ No newline at end of file + def f = identity[x.A[Any]] _ +} diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check index f6d4c8df75eb..93104d8a1e69 100644 --- a/test/files/neg/t3692-new.check +++ b/test/files/neg/t3692-new.check @@ -1,19 +1,15 @@ -t3692-new.scala:15: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,Int] (the underlying of Map[Int,Int]) is unchecked since it is eliminated by erasure +t3692-new.scala:17: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,Int] (the underlying of Map[Int,Int]) is unchecked since it is eliminated by erasure case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer] ^ -t3692-new.scala:16: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) is unchecked since it is eliminated by erasure +t3692-new.scala:18: warning: the type test for pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) cannot be checked at runtime because it has type parameters eliminated by erasure case m1: Map[Int, V] => new java.util.HashMap[Integer, V] ^ -t3692-new.scala:17: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) is unchecked since it is eliminated by erasure +t3692-new.scala:19: warning: the type test for pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) cannot be checked at runtime because it has type parameters eliminated by erasure case m2: Map[T, Int] => new java.util.HashMap[T, Integer] ^ -t3692-new.scala:16: warning: unreachable code +t3692-new.scala:18: warning: unreachable code case m1: Map[Int, V] => new java.util.HashMap[Integer, V] ^ -t3692-new.scala:6: warning: Tester has a main method with parameter type Array[String], but Tester will not be a runnable program. - Reason: main method must have exact signature (Array[String])Unit - def main(args: Array[String]) = { - ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t3692-new.scala b/test/files/neg/t3692-new.scala index b0edd78818e8..3a3531270cda 100644 --- a/test/files/neg/t3692-new.scala +++ b/test/files/neg/t3692-new.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Werror +// import scala.reflect.{ClassTag, classTag} import java.lang.Integer @@ -6,6 +7,7 @@ object Tester { def main(args: Array[String]) = { val map = Map("John" -> 1, "Josh" -> 2) new Tester().toJavaMap(map) + () } } diff --git a/test/files/neg/t3714-neg.check b/test/files/neg/t3714-neg.check index 4f297169686f..ee484b575ae2 100644 --- a/test/files/neg/t3714-neg.check +++ b/test/files/neg/t3714-neg.check @@ -1,13 +1,13 @@ -t3714-neg.scala:17: error: value break in class BreakImpl cannot be accessed in BreakImpl +t3714-neg.scala:17: error: value break in class BreakImpl cannot be accessed as a member of BreakImpl from object Test Access to protected value break not permitted because enclosing object Test is not a subclass of class BreakImpl where target is defined case b: BreakImpl => b.break ^ -t3714-neg.scala:25: error: value break in class BreakImpl cannot be accessed in BreakImpl +t3714-neg.scala:25: error: value break in class BreakImpl cannot be accessed as a member of BreakImpl from object Test Access to protected value break not permitted because enclosing object Test is not a subclass of class BreakImpl where target is defined case b: BreakImpl => b.break ^ -two errors found +2 errors diff --git a/test/files/neg/t3714-neg.scala b/test/files/neg/t3714-neg.scala index 753b367ec090..72518de148fd 100644 --- a/test/files/neg/t3714-neg.scala +++ b/test/files/neg/t3714-neg.scala @@ -30,7 +30,7 @@ object Test { case _ => -1 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val break = BreakImpl(22) assert(f1(break) == 22) assert(f2(break) == 22) diff --git a/test/files/neg/t3736.check b/test/files/neg/t3736.check index 7a20f6c08b7c..f862868cdb96 100644 --- a/test/files/neg/t3736.check +++ b/test/files/neg/t3736.check @@ -13,4 +13,4 @@ t3736.scala:7: error: super not allowed here: use this.!= instead t3736.scala:8: error: super not allowed here: use this.## instead def f6 = super.## ^ -5 errors found +5 errors diff --git a/test/files/neg/t3757.check b/test/files/neg/t3757.check index 1507df8c4f40..525a1d10282b 100644 --- a/test/files/neg/t3757.check +++ b/test/files/neg/t3757.check @@ -1,4 +1,4 @@ B.scala:4: error: method foo overrides nothing override def foo = "B" ^ -one error found +1 error diff --git a/test/files/neg/t3757/B.scala b/test/files/neg/t3757/B.scala index 7c78fb634e52..e899f51e768b 100644 --- a/test/files/neg/t3757/B.scala +++ b/test/files/neg/t3757/B.scala @@ -2,4 +2,4 @@ package b class B extends a.A { override def foo = "B" -} \ No newline at end of file +} diff --git a/test/files/neg/t3761-overload-byname.check b/test/files/neg/t3761-overload-byname.check index ae7d21dfa6b3..d74311419130 100644 --- a/test/files/neg/t3761-overload-byname.check +++ b/test/files/neg/t3761-overload-byname.check @@ -1,13 +1,13 @@ t3761-overload-byname.scala:9: error: ambiguous reference to overloaded definition, -both method m1 in object t of type (x: => Int, s: Object)Int -and method m1 in object t of type (x: => AnyVal, s: String)Int +both method m1 in object t of type (x: => Int, s: Object): Int +and method m1 in object t of type (x: => AnyVal, s: String): Int match argument types (Int,String) m1(1, "") ^ t3761-overload-byname.scala:11: error: ambiguous reference to overloaded definition, -both method m2 in object t of type (x: => Int, s: Object)Int -and method m2 in object t of type (x: => Any, s: String)Int +both method m2 in object t of type (x: => Int, s: Object): Int +and method m2 in object t of type (x: => Any, s: String): Int match argument types (Int,String) m2(1, "") ^ -two errors found +2 errors diff --git a/test/files/neg/t3769.check b/test/files/neg/t3769.check index 3c9738bdb413..62b035d59443 100644 --- a/test/files/neg/t3769.check +++ b/test/files/neg/t3769.check @@ -4,4 +4,4 @@ t3769.scala:2: error: in XML literal: expected closing tag of a t3769.scala:2: error: start tag was here: a> val x = {"text"} ^ -two errors found +2 errors diff --git a/test/files/neg/t3772.check b/test/files/neg/t3772.check index d1ed39d8b681..9522e8df89e7 100644 --- a/test/files/neg/t3772.check +++ b/test/files/neg/t3772.check @@ -4,4 +4,4 @@ t3772.scala:7: error: value inner is not a member of object CC t3772.scala:14: error: value outer is not a member of object CC CC.outer ^ -two errors found +2 errors diff --git a/test/files/neg/t3776.check b/test/files/neg/t3776.check index 0dfe129596d4..796a8c86061f 100644 --- a/test/files/neg/t3776.check +++ b/test/files/neg/t3776.check @@ -1,4 +1,4 @@ t3776.scala:8: error: value someOperation is not a member of _$1 def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v ^ -one error found +1 error diff --git a/test/files/neg/t3816.check b/test/files/neg/t3816.check index 40621f89622b..86a14e86e37c 100644 --- a/test/files/neg/t3816.check +++ b/test/files/neg/t3816.check @@ -4,4 +4,4 @@ t3816.scala:30: error: stable identifier required, but `syncID` found. t3816.scala:38: error: stable identifier required, but Test.this.foo found. case Some( `foo` ) => ^ -two errors found +2 errors diff --git a/test/files/neg/t3816.scala b/test/files/neg/t3816.scala index 31b0825f1d7c..0eadbcf8f10f 100644 --- a/test/files/neg/t3816.scala +++ b/test/files/neg/t3816.scala @@ -1,5 +1,5 @@ class B { - def ::(a: List[Int]) { + def ::(a: List[Int]): Unit = { a match { case x::xs => case _ => diff --git a/test/files/neg/t3836.check b/test/files/neg/t3836.check index ff2fc36ae9a4..6a356d5aff48 100644 --- a/test/files/neg/t3836.check +++ b/test/files/neg/t3836.check @@ -10,4 +10,4 @@ import baz._ and import bar._ def f: Bippy[Int] = ??? ^ -two errors found +2 errors diff --git a/test/files/neg/t3854.check b/test/files/neg/t3854.check index c478481a6fba..1170b64a5404 100644 --- a/test/files/neg/t3854.check +++ b/test/files/neg/t3854.check @@ -1,5 +1,7 @@ -t3854.scala:1: error: class Bar needs to be abstract, since method foo in trait Foo of type [G[_]](implicit n: N[G,F])X[F] is not defined -(Note that N[G,F] does not match M[G]) +t3854.scala:1: error: class Bar needs to be abstract. +Missing implementation for member of trait Foo: + def foo[G[_]](implicit n: N[G,F]): X[F] = ??? // N[G,F] does not match M[G] in `def foo[G[_[_], _]](implicit M: M[G]): X[[α]G[F,α]]` + class Bar[F[_]] extends Foo[F] { ^ -one error found +1 error diff --git a/test/files/neg/t3871.check b/test/files/neg/t3871.check index c9667abfb6be..f3471bc70bac 100644 --- a/test/files/neg/t3871.check +++ b/test/files/neg/t3871.check @@ -1,7 +1,7 @@ -t3871.scala:4: error: variable foo in class Sub2 cannot be accessed in Sub2 +t3871.scala:4: error: variable foo in class Sub2 cannot be accessed as a member of Sub2 from class Base Access to protected variable foo not permitted because enclosing class Base is not a subclass of class Sub2 where target is defined s.foo = true ^ -one error found +1 error diff --git a/test/files/neg/t3871b.check b/test/files/neg/t3871b.check index 0f9ecaf188cd..df210983a836 100644 --- a/test/files/neg/t3871b.check +++ b/test/files/neg/t3871b.check @@ -1,97 +1,106 @@ t3871b.scala:61: error: not found: value protOT protOT // not allowed ^ -t3871b.scala:77: error: method prot in class A cannot be accessed in E.this.A +t3871b.scala:77: error: method prot in class A cannot be accessed as a member of E.this.A from class B in class E Access to protected method prot not permitted because prefix type E.this.A does not conform to class B in class E where the access takes place a.prot // not allowed, prefix type `A` does not conform to `B` ^ t3871b.scala:79: error: value protT is not a member of E.this.B +did you mean prot or protE? b.protT // not allowed ^ t3871b.scala:80: error: value protT is not a member of E.this.C +did you mean prot or protE? c.protT // not allowed ^ t3871b.scala:81: error: value protT is not a member of E.this.A +did you mean protE? a.protT // not allowed ^ -t3871b.scala:91: error: method prot in class A cannot be accessed in E.this.A +t3871b.scala:91: error: method prot in class A cannot be accessed as a member of E.this.A from object B in class E Access to protected method prot not permitted because prefix type E.this.A does not conform to object B in class E where the access takes place a.prot // not allowed ^ t3871b.scala:93: error: value protT is not a member of E.this.B +did you mean prot or protE? b.protT // not allowed ^ t3871b.scala:94: error: value protT is not a member of E.this.C +did you mean prot or protE? c.protT // not allowed ^ t3871b.scala:95: error: value protT is not a member of E.this.A +did you mean protE? a.protT // not allowed ^ -t3871b.scala:102: error: method prot in class A cannot be accessed in E.this.B +t3871b.scala:102: error: method prot in class A cannot be accessed as a member of E.this.B from class Z in class E Access to protected method prot not permitted because enclosing class Z in class E is not a subclass of class A in class E where target is defined b.prot // not allowed ^ -t3871b.scala:103: error: method prot in class A cannot be accessed in E.this.C +t3871b.scala:103: error: method prot in class A cannot be accessed as a member of E.this.C from class Z in class E Access to protected method prot not permitted because enclosing class Z in class E is not a subclass of class A in class E where target is defined c.prot // not allowed ^ -t3871b.scala:104: error: method prot in class A cannot be accessed in E.this.A +t3871b.scala:104: error: method prot in class A cannot be accessed as a member of E.this.A from class Z in class E Access to protected method prot not permitted because enclosing class Z in class E is not a subclass of class A in class E where target is defined a.prot // not allowed ^ t3871b.scala:109: error: value protT is not a member of E.this.B +did you mean protE? b.protT // not allowed ^ t3871b.scala:110: error: value protT is not a member of E.this.C +did you mean protE? c.protT // not allowed ^ t3871b.scala:111: error: value protT is not a member of E.this.A +did you mean protE? a.protT // not allowed ^ -t3871b.scala:120: error: method prot in class A cannot be accessed in Other.this.e.B +t3871b.scala:120: error: method prot in class A cannot be accessed as a member of Other.this.e.B from class Other Access to protected method prot not permitted because enclosing class Other is not a subclass of class A in class E where target is defined b.prot // not allowed ^ -t3871b.scala:121: error: method prot in class A cannot be accessed in Other.this.e.C +t3871b.scala:121: error: method prot in class A cannot be accessed as a member of Other.this.e.C from class Other Access to protected method prot not permitted because enclosing class Other is not a subclass of class A in class E where target is defined c.prot // not allowed ^ -t3871b.scala:122: error: method prot in class A cannot be accessed in Other.this.e.A +t3871b.scala:122: error: method prot in class A cannot be accessed as a member of Other.this.e.A from class Other Access to protected method prot not permitted because enclosing class Other is not a subclass of class A in class E where target is defined a.prot // not allowed ^ -t3871b.scala:123: error: method protE in class A cannot be accessed in Other.this.e.B +t3871b.scala:123: error: method protE in class A cannot be accessed as a member of Other.this.e.B from class Other Access to protected method protE not permitted because enclosing class Other is not a subclass of class A in class E where target is defined b.protE // not allowed ^ -t3871b.scala:124: error: method protE in class A cannot be accessed in Other.this.e.A +t3871b.scala:124: error: method protE in class A cannot be accessed as a member of Other.this.e.A from class Other Access to protected method protE not permitted because enclosing class Other is not a subclass of class A in class E where target is defined a.protE // not allowed ^ -t3871b.scala:125: error: method protE in class A cannot be accessed in Other.this.e.C +t3871b.scala:125: error: method protE in class A cannot be accessed as a member of Other.this.e.C from class Other Access to protected method protE not permitted because enclosing class Other is not a subclass of class A in class E where target is defined c.protE // not allowed ^ -21 errors found +21 errors diff --git a/test/files/neg/t3873.check b/test/files/neg/t3873.check index f9f413aeafd4..5db167f41404 100644 --- a/test/files/neg/t3873.check +++ b/test/files/neg/t3873.check @@ -3,4 +3,4 @@ t3873.scala:11: error: type mismatch; required: a.B where val a: A wrongf(new A)(a.b) // should not compile ^ -one error found +1 error diff --git a/test/files/neg/t3873.scala b/test/files/neg/t3873.scala index b27b4e9c9dfd..bf57999554a5 100644 --- a/test/files/neg/t3873.scala +++ b/test/files/neg/t3873.scala @@ -9,4 +9,4 @@ object Test { val a = new A wrongf(a)(a.b) wrongf(new A)(a.b) // should not compile -} \ No newline at end of file +} diff --git a/test/files/neg/t3909.check b/test/files/neg/t3909.check index 052b49f855b5..9ed443d6b494 100644 --- a/test/files/neg/t3909.check +++ b/test/files/neg/t3909.check @@ -1,5 +1,4 @@ -t3909.scala:1: error: in object DO, multiple overloaded alternatives of m1 define default arguments -Error occurred in an application involving default arguments. +t3909.scala:1: error: in object DO, multiple overloaded alternatives of method m1 define default arguments. object DO { ^ -one error found +1 error diff --git a/test/files/neg/t3909b.check b/test/files/neg/t3909b.check new file mode 100644 index 000000000000..7fd44088d4b3 --- /dev/null +++ b/test/files/neg/t3909b.check @@ -0,0 +1,4 @@ +t3909b.scala:1: error: in object DO, multiple overloaded alternatives of method m1 define default arguments. +object DO { + ^ +1 error diff --git a/test/files/neg/t3909b.scala b/test/files/neg/t3909b.scala new file mode 100644 index 000000000000..26875f61c072 --- /dev/null +++ b/test/files/neg/t3909b.scala @@ -0,0 +1,10 @@ +object DO { + + def m1(str: String, extraStuff: String = "stuff"): Int = ??? + def m1(i: Int, extraStuff: Int = "42".toInt): Int = ??? + + def main(args: Array[String]): Unit = { + val m1s = m1("foo") + val m1i = m1(42) + } +} diff --git a/test/files/neg/t391.check b/test/files/neg/t391.check index 879d9af71fe1..4092e5ff522d 100644 --- a/test/files/neg/t391.check +++ b/test/files/neg/t391.check @@ -10,4 +10,4 @@ class E(def x: Int); // the "def x" is illegal t391.scala:6: error: ':' expected but eof found. class E(def x: Int); // the "def x" is illegal ^ -four errors found +4 errors diff --git a/test/files/neg/t3913.check b/test/files/neg/t3913.check index d85e5c5bea18..cf1f53a1e0ae 100644 --- a/test/files/neg/t3913.check +++ b/test/files/neg/t3913.check @@ -1,4 +1,4 @@ t3913.scala:2: error: super constructor cannot be passed a self reference unless parameter is declared by-name object LimboStage extends Stage( Set( LimboStage )) ^ -one error found +1 error diff --git a/test/files/neg/t3913.scala b/test/files/neg/t3913.scala index e0917ff3e3d0..a5408fe02517 100644 --- a/test/files/neg/t3913.scala +++ b/test/files/neg/t3913.scala @@ -2,7 +2,7 @@ class Stage( val transits: Set[ Stage ]) object LimboStage extends Stage( Set( LimboStage )) object Test { - def main( args: Array[ String ]) { + def main( args: Array[ String ]): Unit = { val x = LimboStage } } diff --git a/test/files/neg/t3934.check b/test/files/neg/t3934.check index ecccc3960b64..37fbd48c7503 100644 --- a/test/files/neg/t3934.check +++ b/test/files/neg/t3934.check @@ -1,13 +1,13 @@ -t3934.scala:15: error: method f2 in class J cannot be accessed in test.J +t3934.scala:15: error: method f2 in class J cannot be accessed as a member of test.J from class S1 in package nest Access to protected method f2 not permitted because enclosing class S1 in package nest is not a subclass of class J in package test where target is defined def g2(x: J) = x.f2() ^ -t3934.scala:20: error: method f2 in class J cannot be accessed in test.J +t3934.scala:20: error: method f2 in class J cannot be accessed as a member of test.J from class S2 in package nest Access to protected method f2 not permitted because prefix type test.J does not conform to class S2 in package nest where the access takes place def g2(x: J) = x.f2() ^ -two errors found +2 errors diff --git a/test/files/neg/t3971.check b/test/files/neg/t3971.check index 8685119876ec..3ead342df452 100644 --- a/test/files/neg/t3971.check +++ b/test/files/neg/t3971.check @@ -18,4 +18,4 @@ t3971.scala:11: error: type mismatch; required: String ({"ab".reverse; "ba".equals})(0): String ^ -four errors found +4 errors diff --git a/test/files/neg/t3977.check b/test/files/neg/t3977.check index 72335a092610..78249b09000f 100644 --- a/test/files/neg/t3977.check +++ b/test/files/neg/t3977.check @@ -1,4 +1,4 @@ t3977.scala:12: error: could not find implicit value for parameter w: False#If[E] new NoNull ^ -one error found +1 error diff --git a/test/files/neg/t3987.check b/test/files/neg/t3987.check index a9f7912b778b..1e64681ec214 100644 --- a/test/files/neg/t3987.check +++ b/test/files/neg/t3987.check @@ -4,4 +4,4 @@ t3987.scala:11: error: type mismatch; (which expands to) t#Zed forSome { type t <: Gox } val y: GoxZed = x ^ -one error found +1 error diff --git a/test/files/neg/t3995.check b/test/files/neg/t3995.check deleted file mode 100644 index 00ecf4ca5b6e..000000000000 --- a/test/files/neg/t3995.check +++ /dev/null @@ -1,6 +0,0 @@ -t3995.scala:31: error: type mismatch; - found : String("") - required: _1.F0 where val _1: Lift - (new Lift).apply("") - ^ -one error found diff --git a/test/files/neg/t4044.check b/test/files/neg/t4044.check index 0e1ea4f51d37..37679a7d6c69 100644 --- a/test/files/neg/t4044.check +++ b/test/files/neg/t4044.check @@ -1,14 +1,14 @@ -t4044.scala:9: error: AnyRef takes no type parameters, expected: one +t4044.scala:9: error: AnyRef takes no type parameters, expected: 1 M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *) ^ t4044.scala:11: error: kinds of the type arguments (Test.A) do not conform to the expected kinds of the type parameters (type N). Test.A's type parameters do not match type N's expected parameters: -type _ has no type parameters, but type O has one +type _ has no type parameters, but type O has 1 M[A] // error, (A :: (* -> *) not kind-conformant to (N :: * -> * -> *) ^ t4044.scala:15: error: kinds of the type arguments (Test.C) do not conform to the expected kinds of the type parameters (type N). Test.C's type parameters do not match type N's expected parameters: -type _ has one type parameter, but type _ has none +type _ has 1 type parameter, but type _ has 0 M[C] // error, (C :: (* -> * -> * -> *) not kind-conformant to (N :: * -> * -> *) ^ -three errors found +3 errors diff --git a/test/files/neg/t4044.scala b/test/files/neg/t4044.scala index aedffbb96d3c..eeb1d11ac968 100644 --- a/test/files/neg/t4044.scala +++ b/test/files/neg/t4044.scala @@ -13,4 +13,4 @@ object Test { M[B] // okay, (B :: (* -> * -> *) is kind-conformant to (N :: * -> * -> *) M[C] // error, (C :: (* -> * -> * -> *) not kind-conformant to (N :: * -> * -> *) -} \ No newline at end of file +} diff --git a/test/files/neg/t4064.check b/test/files/neg/t4064.check index 0d0e20ded13f..496eeec05a78 100644 --- a/test/files/neg/t4064.check +++ b/test/files/neg/t4064.check @@ -1,4 +1,4 @@ t4064.scala:4: error: value FALSE is not a member of object Boolean new Foo[Boolean](Boolean.FALSE) ^ -one error found +1 error diff --git a/test/files/neg/t4064.scala b/test/files/neg/t4064.scala index 097a62eae20d..f8cb7848a1d0 100644 --- a/test/files/neg/t4064.scala +++ b/test/files/neg/t4064.scala @@ -2,4 +2,4 @@ class Foo[T](v: T) {} object Test { new Foo[Boolean](Boolean.FALSE) -} \ No newline at end of file +} diff --git a/test/files/neg/t4069.check b/test/files/neg/t4069.check index e71693361e7d..bc55b17f89b3 100644 --- a/test/files/neg/t4069.check +++ b/test/files/neg/t4069.check @@ -1,7 +1,7 @@ t4069.scala:7: error: unexpected end of input: possible missing '}' in XML block case 2 => ^ -t4069.scala:6: error: Missing closing brace `}' assumed here +t4069.scala:6: error: Missing closing brace `}` assumed here ^ t4069.scala:9: error: in XML literal: in XML content, please use '}}' to express '}' @@ -13,4 +13,4 @@ t4069.scala:4: error: I encountered a '}' where I didn't expect one, maybe this t4069.scala:10: error: '}' expected but eof found. } ^ -5 errors found +5 errors diff --git a/test/files/neg/t4069.scala b/test/files/neg/t4069.scala index 80df6ec16dfa..831eba413c31 100644 --- a/test/files/neg/t4069.scala +++ b/test/files/neg/t4069.scala @@ -7,4 +7,4 @@ object ParserBug { case 2 =>
    } -} \ No newline at end of file +} diff --git a/test/files/neg/t4079.check b/test/files/neg/t4079.check index f4c956c44588..286151d1154e 100644 --- a/test/files/neg/t4079.check +++ b/test/files/neg/t4079.check @@ -1,4 +1,4 @@ t4079_2.scala:2: error: could not find implicit value for parameter f: Functor[List] Cat.compose[List,Option].Functor ^ -one error found +1 error diff --git a/test/files/neg/t409.check b/test/files/neg/t409.check index 0edc0d03cd99..866c4cd80d56 100644 --- a/test/files/neg/t409.check +++ b/test/files/neg/t409.check @@ -1,4 +1,4 @@ t409.scala:6: error: class Case1 needs to be a trait to be mixed in class Toto extends Expr with Case1(12); ^ -one error found +1 error diff --git a/test/files/neg/t4091.check b/test/files/neg/t4091.check index 2fdd07fd4d6a..8dfa726b8566 100644 --- a/test/files/neg/t4091.check +++ b/test/files/neg/t4091.check @@ -1,4 +1,4 @@ t4091.scala:1: error: expected start of definition private a ^ -one error found +1 error diff --git a/test/files/neg/t4098.check b/test/files/neg/t4098.check index 232c082ec91f..8e15e90abaa3 100644 --- a/test/files/neg/t4098.check +++ b/test/files/neg/t4098.check @@ -1,13 +1,13 @@ -t4098.scala:3: error: forward reference not allowed from self constructor invocation +t4098.scala:3: error: forward reference to method b defined on line 4 not allowed from self constructor invocation this(b) ^ -t4098.scala:8: error: forward reference not allowed from self constructor invocation +t4098.scala:8: error: forward reference to lazy value b defined on line 9 not allowed from self constructor invocation this(b) ^ -t4098.scala:13: error: forward reference not allowed from self constructor invocation +t4098.scala:13: error: forward reference to value b defined on line 14 not allowed from self constructor invocation this(b) ^ -t4098.scala:18: error: forward reference not allowed from self constructor invocation +t4098.scala:18: error: forward reference to method b defined on line 20 not allowed from self constructor invocation this(b) ^ -four errors found +4 errors diff --git a/test/files/neg/t412.check b/test/files/neg/t412.check index 9cb467e85411..3525dc5efd03 100644 --- a/test/files/neg/t412.check +++ b/test/files/neg/t412.check @@ -2,4 +2,4 @@ t412.scala:11: error: stable identifier required, but A.this.c found. Note that value c is not stable because its type, A.this.CX with A.this.C2, is volatile. def castA(x: c.T): T2 = x; ^ -one error found +1 error diff --git a/test/files/neg/t4134.check b/test/files/neg/t4134.check index 35a1820b0a6d..829ef3104839 100644 --- a/test/files/neg/t4134.check +++ b/test/files/neg/t4134.check @@ -1,4 +1,4 @@ t4134.scala:22: error: Member method f of mixin trait T2 is missing a concrete super implementation. class Konkret extends T3 ^ -one error found +1 error diff --git a/test/files/neg/t4137.check b/test/files/neg/t4137.check index 9767bdb1ce64..2b97865c0e83 100644 --- a/test/files/neg/t4137.check +++ b/test/files/neg/t4137.check @@ -1,9 +1,11 @@ -t4137.scala:9: error: overriding type EPC in trait A, which equals [X1]C[X1]; - type EPC has incompatible type +t4137.scala:9: error: incompatible type in overriding +type EPC[X1] = C[X1] (defined in trait A) + (Equivalent type required when overriding a type alias.) override type EPC = C[T] ^ -t4137.scala:10: error: overriding type EPC2 in trait A, which equals [X1]C[X1]; - type EPC2 has incompatible type +t4137.scala:10: error: incompatible type in overriding +type EPC2[X1] = C[X1] (defined in trait A) + (Equivalent type required when overriding a type alias.) override type EPC2[X1 <: String] = C[X1] ^ -two errors found +2 errors diff --git a/test/files/neg/t4137.scala b/test/files/neg/t4137.scala index 68f249fdeb14..60de6de70175 100644 --- a/test/files/neg/t4137.scala +++ b/test/files/neg/t4137.scala @@ -8,4 +8,4 @@ trait A[T] { trait B[T] extends A[T] { override type EPC = C[T] override type EPC2[X1 <: String] = C[X1] -} \ No newline at end of file +} diff --git a/test/files/neg/t414.check b/test/files/neg/t414.check index 30211eef8ed4..96ae1ee055c9 100644 --- a/test/files/neg/t414.check +++ b/test/files/neg/t414.check @@ -9,4 +9,4 @@ t414.scala:7: error: type mismatch; required: a case _ => ^ -two errors found +2 errors diff --git a/test/files/neg/t4158.check b/test/files/neg/t4158.check index 7bac6558f760..4fa2ec9e961a 100644 --- a/test/files/neg/t4158.check +++ b/test/files/neg/t4158.check @@ -4,4 +4,4 @@ t4158.scala:2: error: an expression of type Null is ineligible for implicit conv t4158.scala:3: error: an expression of type Null is ineligible for implicit conversion var y = null: Int ^ -two errors found +2 errors diff --git a/test/files/neg/t4158.scala b/test/files/neg/t4158.scala index 07aa69a95cd7..be3dc4398bc5 100644 --- a/test/files/neg/t4158.scala +++ b/test/files/neg/t4158.scala @@ -1,4 +1,4 @@ class A { var x: Int = null var y = null: Int -} \ No newline at end of file +} diff --git a/test/files/neg/t4163.check b/test/files/neg/t4163.check index 47bc78d31cbe..eb09cdde8b8b 100644 --- a/test/files/neg/t4163.check +++ b/test/files/neg/t4163.check @@ -4,4 +4,4 @@ t4163.scala:4: error: '<-' expected but '=' found. t4163.scala:5: error: illegal start of simple expression y <- 0 to 100 ^ -two errors found +2 errors diff --git a/test/files/neg/t4166.check b/test/files/neg/t4166.check index 10b77d841abc..7c65aa902ae5 100644 --- a/test/files/neg/t4166.check +++ b/test/files/neg/t4166.check @@ -1,4 +1,4 @@ t4166.scala:3: error: super constructor arguments cannot reference unconstructed `this` class Demo extends Base(new { Demo.this.toString }) { ^ -one error found +1 error diff --git a/test/files/neg/t4174.check b/test/files/neg/t4174.check index 914fcff76e0f..e819fd75a419 100644 --- a/test/files/neg/t4174.check +++ b/test/files/neg/t4174.check @@ -1,4 +1,4 @@ t4174.scala:7: error: method bar overrides nothing foo(new C { override def bar = 1 }) ^ -one error found +1 error diff --git a/test/files/neg/t418.check b/test/files/neg/t418.check index 1b99717b8214..1dc45672b258 100644 --- a/test/files/neg/t418.check +++ b/test/files/neg/t418.check @@ -1,4 +1,4 @@ t418.scala:2: error: not found: value Foo12340771 null match { case Foo12340771.Bar(x) => x } ^ -one error found +1 error diff --git a/test/files/neg/t4196.check b/test/files/neg/t4196.check index a0586819e94d..c288f08cc234 100644 --- a/test/files/neg/t4196.check +++ b/test/files/neg/t4196.check @@ -1,4 +1,4 @@ t4196.scala:5: error: Some[String] does not take parameters }.apply("first param") ("spurious param") ^ -one error found +1 error diff --git a/test/files/neg/t4196.scala b/test/files/neg/t4196.scala index 06e1f28d549a..a0ad4db72a50 100644 --- a/test/files/neg/t4196.scala +++ b/test/files/neg/t4196.scala @@ -3,4 +3,4 @@ object Weird { val foo = Some(s); // to illustrate that vals are printed in the error foo }.apply("first param") ("spurious param") -} \ No newline at end of file +} diff --git a/test/files/neg/t421.check b/test/files/neg/t421.check index dc5fa425acdc..c5b89f867fd9 100644 --- a/test/files/neg/t421.check +++ b/test/files/neg/t421.check @@ -1,4 +1,4 @@ t421.scala:5: error: star patterns must correspond with varargs parameters case Bar("foo",_*) => sys.error("huh?"); ^ -one error found +1 error diff --git a/test/files/neg/t4217.check b/test/files/neg/t4217.check index 6c49ec335424..c9de95841d3d 100644 --- a/test/files/neg/t4217.check +++ b/test/files/neg/t4217.check @@ -1,4 +1,4 @@ t4217.scala:2: error: 'case' expected but '}' found. 42 match { } ^ -one error found +1 error diff --git a/test/files/neg/t4221.check b/test/files/neg/t4221.check index 46c2d10a988d..82577e26b48a 100644 --- a/test/files/neg/t4221.check +++ b/test/files/neg/t4221.check @@ -3,4 +3,4 @@ t4221.scala:8: error: type mismatch; required: Wrapper[S] def wrap[S <: Cl#Sub[S]](v: S): Wrapper[S] = { ^ -one error found +1 error diff --git a/test/files/neg/t425.check b/test/files/neg/t425.check index 77ea0c5a4b44..25ae66044dd8 100644 --- a/test/files/neg/t425.check +++ b/test/files/neg/t425.check @@ -1,4 +1,4 @@ t425.scala:3: error: case class B has case ancestor Temp.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes. case class B(override val x: Int, y: Double) extends A(x) ^ -one error found +1 error diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check index cfe0a93e005c..af56ada4fa57 100644 --- a/test/files/neg/t4270.check +++ b/test/files/neg/t4270.check @@ -1,4 +1,4 @@ t4270.scala:5: error: could not find implicit value for parameter e: Int implicitly[Int] ^ -one error found +1 error diff --git a/test/files/neg/t4271.check b/test/files/neg/t4271.check index 91d9fbcfa197..c24ac9de1175 100644 --- a/test/files/neg/t4271.check +++ b/test/files/neg/t4271.check @@ -1,10 +1,31 @@ t4271.scala:9: error: value to is not a member of Int +did you mean toInt? 3 to 5 ^ t4271.scala:10: error: value ensuring is not a member of Int 5 ensuring true ^ t4271.scala:11: error: value -> is not a member of Int +did you mean >>? 3 -> 5 ^ -three errors found +t4271.scala:3: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def Ensuring[A](x: A) = Donotuseme + ^ +t4271.scala:4: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def doubleWrapper(x: Int) = Donotuseme + ^ +t4271.scala:5: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def floatWrapper(x: Int) = Donotuseme + ^ +t4271.scala:6: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def intWrapper(x: Int) = Donotuseme + ^ +t4271.scala:7: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def longWrapper(x: Int) = Donotuseme + ^ +t4271.scala:8: warning: Implicit definition should have explicit type (inferred foo.Donotuseme.type) [quickfixable] + implicit def ArrowAssoc[A](x: A) = Donotuseme + ^ +6 warnings +3 errors diff --git a/test/files/neg/t4302.check b/test/files/neg/t4302.check index ea72d291b8dd..68d62eb36922 100644 --- a/test/files/neg/t4302.check +++ b/test/files/neg/t4302.check @@ -1,6 +1,6 @@ t4302.scala:4: warning: abstract type T is unchecked since it is eliminated by erasure def hasMatch[T](x: AnyRef) = x.isInstanceOf[T] ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t4302.scala b/test/files/neg/t4302.scala index a8b46629c8ec..233250d212d9 100644 --- a/test/files/neg/t4302.scala +++ b/test/files/neg/t4302.scala @@ -1,4 +1,4 @@ -// scalac: -unchecked -Xfatal-warnings +//> using options -Xfatal-warnings // object Test { def hasMatch[T](x: AnyRef) = x.isInstanceOf[T] diff --git a/test/files/neg/t4417.check b/test/files/neg/t4417.check index dbd0f1df460d..360065798b8b 100644 --- a/test/files/neg/t4417.check +++ b/test/files/neg/t4417.check @@ -1,7 +1,7 @@ -t4417.scala:11: error: constructor Pixel$mcD$sp in class Pixel$mcD$sp cannot be accessed in object Pixel +t4417.scala:11: error: constructor Pixel$mcD$sp in class Pixel$mcD$sp cannot be accessed in object Pixel from object Pixel Access to protected constructor Pixel$mcD$sp not permitted because enclosing object Pixel is not a subclass of class Pixel$mcD$sp where target is defined def apply(v: Double): Pixel1d = new Pixel1d(v) ^ -one error found +1 error diff --git a/test/files/neg/t4419.check b/test/files/neg/t4419.check index a53e0c95da91..cce4223ecf24 100644 --- a/test/files/neg/t4419.check +++ b/test/files/neg/t4419.check @@ -1,4 +1,4 @@ -t4419.scala:2: error: forward reference extends over definition of value b +t4419.scala:2: error: forward reference to value a defined on line 2 extends over definition of value b { val b = a; val a = 1 ; println(a) } ^ -one error found +1 error diff --git a/test/files/neg/t4419.scala b/test/files/neg/t4419.scala index 5dc86d354efe..161593f9bbf6 100644 --- a/test/files/neg/t4419.scala +++ b/test/files/neg/t4419.scala @@ -1,3 +1,3 @@ class A { { val b = a; val a = 1 ; println(a) } -} \ No newline at end of file +} diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check index 00006c08f0c6..c62a09c3207f 100644 --- a/test/files/neg/t4425.check +++ b/test/files/neg/t4425.check @@ -1,13 +1,13 @@ -t4425.scala:3: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425.scala:3: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: Int)(y: Option[Int]): None.type exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list 42 match { case _ X _ => () } ^ -t4425.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425.scala:8: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: Int)(y: Int): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list 42 match { case _ X _ => () } ^ -t4425.scala:13: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425.scala:13: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list "" match { case _ X _ => () } ^ -three errors found +3 errors diff --git a/test/files/neg/t4425.scala b/test/files/neg/t4425.scala index 1714955c27c0..8feedc20f424 100644 --- a/test/files/neg/t4425.scala +++ b/test/files/neg/t4425.scala @@ -11,4 +11,4 @@ object Foo2 { object Foo3 { object X { def unapply(x : String)(y: String) = Some((2,2)) } "" match { case _ X _ => () } -} \ No newline at end of file +} diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check index 79ebe0a0cbba..2fc9121ea6fb 100644 --- a/test/files/neg/t4425b.check +++ b/test/files/neg/t4425b.check @@ -1,43 +1,43 @@ -t4425b.scala:5: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:5: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println( "" match { case _ X _ => "ok" ; case _ => "fail" }) ^ -t4425b.scala:6: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:6: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) ^ -t4425b.scala:7: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:7: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println( "" match { case X(_) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:8: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println((X: Any) match { case X(_) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:9: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:9: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println( "" match { case X(_, _) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:10: error: object X is not a case class, nor does it have an unapply/unapplySeq member +t4425b.scala:10: error: object X is not a case class, nor does it have a valid unapply/unapplySeq member Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:18: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:18: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println( "" match { case _ X _ => "ok" ; case _ => "fail" }) ^ -t4425b.scala:19: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:19: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) ^ -t4425b.scala:20: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:20: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println( "" match { case X(_) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:21: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:21: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println((X: Any) match { case X(_) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:22: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:22: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println( "" match { case X(_, _) => "ok" ; case _ => "fail" }) ^ -t4425b.scala:23: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing +t4425b.scala:23: error: object X can't be used as an extractor: The result type of an unapply method may not be Nothing println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" }) ^ t4425b.scala:31: error: too many patterns for object X offering Nothing: expected 1, found 2 @@ -52,4 +52,4 @@ t4425b.scala:35: error: too many patterns for object X offering Nothing: expecte t4425b.scala:36: error: too many patterns for object X offering Nothing: expected 1, found 2 println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" }) ^ -16 errors found +16 errors diff --git a/test/files/neg/t4425b.scala b/test/files/neg/t4425b.scala index 861e9521f6a9..23c4855c7d06 100644 --- a/test/files/neg/t4425b.scala +++ b/test/files/neg/t4425b.scala @@ -1,7 +1,7 @@ object Test1 { object X { def unapply(x : String)(y: String) = throw new Exception } - def f1() { + def f1(): Unit = { println( "" match { case _ X _ => "ok" ; case _ => "fail" }) println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) println( "" match { case X(_) => "ok" ; case _ => "fail" }) @@ -14,7 +14,7 @@ object Test1 { object Test2 { object X { def unapply(x : String) = throw new Exception } - def f1() { + def f1(): Unit = { println( "" match { case _ X _ => "ok" ; case _ => "fail" }) println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) println( "" match { case X(_) => "ok" ; case _ => "fail" }) @@ -27,7 +27,7 @@ object Test2 { object Test3 { object X { def unapply(x : String) = None } - def f1() { + def f1(): Unit = { println( "" match { case _ X _ => "ok" ; case _ => "fail" }) println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) println( "" match { case X(_) => "ok" ; case _ => "fail" }) diff --git a/test/files/neg/t4431.check b/test/files/neg/t4431.check index 7896ec1a623f..50e28e8bb3b6 100644 --- a/test/files/neg/t4431.check +++ b/test/files/neg/t4431.check @@ -1,7 +1,9 @@ -t4431.scala:5: error: class BB needs to be abstract, since there is a deferred declaration of method f which is not implemented in a subclass +t4431.scala:5: error: class BB needs to be abstract. +No implementation found in a subclass for deferred declaration +def f(): Unit class BB extends B { def f (): Unit } ^ t4431.scala:8: error: trait cannot redefine final method from class AnyRef trait C { def wait (): Unit } ^ -two errors found +2 errors diff --git a/test/files/neg/t4440.check b/test/files/neg/t4440.check index 591ddadd60e3..ac396adc3789 100644 --- a/test/files/neg/t4440.check +++ b/test/files/neg/t4440.check @@ -10,6 +10,6 @@ t4440.scala:18: warning: The outer reference in this type test cannot be checked t4440.scala:19: warning: The outer reference in this type test cannot be checked at run time. case _: b.Inner => println("b") // this is the case we want ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t4440.scala b/test/files/neg/t4440.scala index 8cb6b47865b1..21d99b89d804 100644 --- a/test/files/neg/t4440.scala +++ b/test/files/neg/t4440.scala @@ -1,4 +1,4 @@ -// scalac: -unchecked -Xfatal-warnings +//> using options -Xfatal-warnings // // constructors used to drop outer fields when they were not accessed // however, how can you know (respecting separate compilation) that they're not accessed!? @@ -10,11 +10,11 @@ class Outer { final class Inner } object Test extends App { val a = new Outer val b = new Outer - (new a.Inner: Any) match { + (new a.Inner: Any @unchecked) match { case _: b.Inner => println("b") case _: a.Inner => println("a") // this is the case we want } - (new b.Inner: Any) match { + (new b.Inner: Any @unchecked) match { case _: a.Inner => println("a") case _: b.Inner => println("b") // this is the case we want } diff --git a/test/files/neg/t4457_1.check b/test/files/neg/t4457_1.check index c6b83c6ce587..6dc6b6f3adae 100644 --- a/test/files/neg/t4457_1.check +++ b/test/files/neg/t4457_1.check @@ -1,7 +1,23 @@ t4457_1.scala:27: error: ambiguous reference to overloaded definition, -both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A])ImplicitConvAmbiguity2.AA[Float] -and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A])ImplicitConvAmbiguity2.AA[A] +both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A]): ImplicitConvAmbiguity2.AA[Float] +and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A]): ImplicitConvAmbiguity2.AA[A] match argument types (Float) val x = aFunc(4F) ^ -one error found +t4457_1.scala:11: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.NE[Float]) [quickfixable] + implicit def conv1(i: Float) = new NE[Float] + ^ +t4457_1.scala:12: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[java.util.TooManyListenersException]) [quickfixable] + implicit def conv3(op: AA[java.util.TooManyListenersException]) = new N[java.util.TooManyListenersException] + ^ +t4457_1.scala:13: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[Float]) [quickfixable] + implicit def conv4(op: AA[Float]) = new N[Float] + ^ +t4457_1.scala:14: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.NZ[Float]) [quickfixable] + implicit def conv7(i: Float) = new NZ[Float] + ^ +t4457_1.scala:15: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[java.util.GregorianCalendar]) [quickfixable] + implicit def conv5(e: BB[java.util.GregorianCalendar]) = new N[java.util.GregorianCalendar] + ^ +5 warnings +1 error diff --git a/test/files/neg/t4457_1.scala b/test/files/neg/t4457_1.scala index 11f12379f687..11dae1097ff3 100644 --- a/test/files/neg/t4457_1.scala +++ b/test/files/neg/t4457_1.scala @@ -23,11 +23,11 @@ object ImplicitConvAmbiguity2 { def bFunc[T](e1: N[T]) = {} - def typeMe1 { + def typeMe1: Unit = { val x = aFunc(4F) bFunc(x) } - def typeMe2 { + def typeMe2: Unit = { bFunc(aFunc(4F)) } } diff --git a/test/files/neg/t4457_2.check b/test/files/neg/t4457_2.check index 770a355395da..0a6532dffbc9 100644 --- a/test/files/neg/t4457_2.check +++ b/test/files/neg/t4457_2.check @@ -1,13 +1,29 @@ t4457_2.scala:27: error: ambiguous reference to overloaded definition, -both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A])ImplicitConvAmbiguity2.AA[A] -and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A])ImplicitConvAmbiguity2.AA[A] +both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A]): ImplicitConvAmbiguity2.AA[A] +and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A]): ImplicitConvAmbiguity2.AA[A] match argument types (Float) val x = aFunc(4F) ^ t4457_2.scala:31: error: ambiguous reference to overloaded definition, -both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A])ImplicitConvAmbiguity2.AA[A] -and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A])ImplicitConvAmbiguity2.AA[A] +both method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NZ[A]): ImplicitConvAmbiguity2.AA[A] +and method aFunc in object ImplicitConvAmbiguity2 of type [A](a: ImplicitConvAmbiguity2.NE[A]): ImplicitConvAmbiguity2.AA[A] match argument types (Float) bFunc(aFunc(4F)) ^ -two errors found +t4457_2.scala:11: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.NE[Float]) [quickfixable] + implicit def conv1(i: Float) = new NE[Float] + ^ +t4457_2.scala:12: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[java.util.TooManyListenersException]) [quickfixable] + implicit def conv3(op: AA[java.util.TooManyListenersException]) = new N[java.util.TooManyListenersException] + ^ +t4457_2.scala:13: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[Float]) [quickfixable] + implicit def conv4(op: AA[Float]) = new N[Float] + ^ +t4457_2.scala:14: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.NZ[Float]) [quickfixable] + implicit def conv7(i: Float) = new NZ[Float] + ^ +t4457_2.scala:15: warning: Implicit definition should have explicit type (inferred ImplicitConvAmbiguity2.N[java.util.GregorianCalendar]) [quickfixable] + implicit def conv5(e: BB[java.util.GregorianCalendar]) = new N[java.util.GregorianCalendar] + ^ +5 warnings +2 errors diff --git a/test/files/neg/t4457_2.scala b/test/files/neg/t4457_2.scala index f3a170f1f291..f2664cc14ee6 100644 --- a/test/files/neg/t4457_2.scala +++ b/test/files/neg/t4457_2.scala @@ -23,11 +23,11 @@ object ImplicitConvAmbiguity2 { def bFunc[T](e1: N[T]) = {} - def typeMe2 { + def typeMe2: Unit = { val x = aFunc(4F) bFunc(x) } - def typeMe1 { + def typeMe1: Unit = { bFunc(aFunc(4F)) } } diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check index 7a7618a11403..05838276c38c 100644 --- a/test/files/neg/t4460a.check +++ b/test/files/neg/t4460a.check @@ -1,4 +1,4 @@ t4460a.scala:6: error: constructor invokes itself def this() = this() // was binding to Predef. !! ^ -one error found +1 error diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check index 9a621dbd5cda..842ef80525fb 100644 --- a/test/files/neg/t4460b.check +++ b/test/files/neg/t4460b.check @@ -1,4 +1,4 @@ t4460b.scala:7: error: constructor invokes itself def this() = this() // was binding to Predef. !! - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t4460c.check b/test/files/neg/t4460c.check index 4e96711b8bb6..688e394d888f 100644 --- a/test/files/neg/t4460c.check +++ b/test/files/neg/t4460c.check @@ -1,7 +1,7 @@ -t4460c.scala:4: error: overloaded method constructor B with alternatives: +t4460c.scala:4: error: multiple constructors for B with alternatives: (a: String)B (x: Int)B - cannot be applied to () + cannot be invoked with no arguments def this(a: String) = this() ^ -one error found +1 error diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check index 708fcfbd294f..dc92d7c8ae5e 100644 --- a/test/files/neg/t4515.check +++ b/test/files/neg/t4515.check @@ -13,4 +13,4 @@ t4515.scala:37: error: type mismatch; required: Main.PushEventContext[_$2] handler.onEvent(target, ctx.getEvent, node, ctx) ^ -three errors found +3 errors diff --git a/test/files/neg/t4515.scala b/test/files/neg/t4515.scala index 4efe45f4fbe4..8965b96867e2 100644 --- a/test/files/neg/t4515.scala +++ b/test/files/neg/t4515.scala @@ -30,7 +30,7 @@ object Main { Nil } - def onTimer(target: Target) { + def onTimer(target: Target): Unit = { val pushService = TimerPushService.get for ((node, handler) <- handlers) { for (ctx <- pushService.pollEvents(node)) { @@ -38,4 +38,4 @@ object Main { } } } -} \ No newline at end of file +} diff --git a/test/files/neg/t452.check b/test/files/neg/t452.check index aac663068ebc..99fb09259844 100644 --- a/test/files/neg/t452.check +++ b/test/files/neg/t452.check @@ -3,4 +3,4 @@ t452.scala:3: error: type mismatch; required: Test.Foo def this() = this(this); ^ -one error found +1 error diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check index 0a3e48bcca40..958342344ce4 100644 --- a/test/files/neg/t4541.check +++ b/test/files/neg/t4541.check @@ -1,7 +1,7 @@ -t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int] +t4541.scala:11: error: variable data in class Sparse cannot be accessed as a member of Sparse[Int] from class Sparse$mcI$sp Access to protected variable data not permitted because prefix type Sparse[Int] does not conform to class Sparse$mcI$sp where the access takes place that.data ^ -one error found +1 error diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check index d1813f1f95ab..b963ee4582d0 100644 --- a/test/files/neg/t4541b.check +++ b/test/files/neg/t4541b.check @@ -1,7 +1,7 @@ -t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int] +t4541b.scala:13: error: variable data in class SparseArray cannot be accessed as a member of SparseArray[Int] from class SparseArray$mcI$sp Access to protected variable data not permitted because prefix type SparseArray[Int] does not conform to class SparseArray$mcI$sp where the access takes place use(that.data.clone) ^ -one error found +1 error diff --git a/test/files/neg/t4568.check b/test/files/neg/t4568.check index f94d69948693..ecc3c46a03c7 100644 --- a/test/files/neg/t4568.check +++ b/test/files/neg/t4568.check @@ -1,4 +1,8 @@ t4568.scala:8: error: recursive method isSubListOf needs result type case h :: t => y.contains(h) && (t.isSubListOf(y.drop(y.indexOf(h) + 1))) ^ -one error found +t4568.scala:2: warning: Implicit definition should have explicit type (inferred SubList.SubListable[A]) [quickfixable] + implicit def sublistable[A](x: List[A]) = new SubListable(x) + ^ +1 warning +1 error diff --git a/test/files/neg/t4568.scala b/test/files/neg/t4568.scala index 806775926264..6fda28736482 100644 --- a/test/files/neg/t4568.scala +++ b/test/files/neg/t4568.scala @@ -10,4 +10,4 @@ object SubList { } } -} \ No newline at end of file +} diff --git a/test/files/neg/t4584.check b/test/files/neg/t4584.check index 97d07afa0edb..9b3fc3a676dd 100644 --- a/test/files/neg/t4584.check +++ b/test/files/neg/t4584.check @@ -1,7 +1,7 @@ -t4584.scala:1: error: error in unicode escape -class A { val \u2 - ^ -t4584.scala:1: error: illegal character '\uffff' -class A { val \u2 - ^ -two errors found +t4584.scala:1: error: unclosed multi-line string literal +class A { val x = """\u2 + ^ +t4584.scala:1: error: illegal start of simple expression +class A { val x = """\u2 + ^ +2 errors diff --git a/test/files/neg/t4584.scala b/test/files/neg/t4584.scala index b34aba91a2ba..741746649a01 100644 --- a/test/files/neg/t4584.scala +++ b/test/files/neg/t4584.scala @@ -1 +1 @@ -class A { val \u2 \ No newline at end of file +class A { val x = """\u2 diff --git a/test/files/neg/t4612.check b/test/files/neg/t4612.check index 2b6201b57408..0b9d566e2d39 100644 --- a/test/files/neg/t4612.check +++ b/test/files/neg/t4612.check @@ -3,4 +3,4 @@ t4612.scala:13: error: type mismatch; required: _1 def foo = new Bob ^ -one error found +1 error diff --git a/test/files/neg/t464-neg.check b/test/files/neg/t464-neg.check index e822e7fb6bfe..d51d4cd3f852 100644 --- a/test/files/neg/t464-neg.check +++ b/test/files/neg/t464-neg.check @@ -1,16 +1,16 @@ t464-neg.scala:7: error: not found: value f1 f1() ^ -t464-neg.scala:8: error: method f1 in class A cannot be accessed in A +t464-neg.scala:8: error: method f1 in class A cannot be accessed as a member of A from class B super.f1() ^ t464-neg.scala:9: error: value f2 is not a member of B def otherb(b2: B) = b2.f2() ^ -t464-neg.scala:10: error: method f3 in class A cannot be accessed in B +t464-neg.scala:10: error: method f3 in class A cannot be accessed as a member of B from class B f3() ^ -t464-neg.scala:11: error: method f3 in class A cannot be accessed in A +t464-neg.scala:11: error: method f3 in class A cannot be accessed as a member of A from class B super.f3() ^ -5 errors found +5 errors diff --git a/test/files/neg/t464-neg.scala b/test/files/neg/t464-neg.scala index 138fa8e8693e..c2f027a21a94 100644 --- a/test/files/neg/t464-neg.scala +++ b/test/files/neg/t464-neg.scala @@ -1,7 +1,7 @@ class A { - private[this] def f1() {} - protected[this] def f2() {} - private[A] def f3() {} + private[this] def f1(): Unit = {} + protected[this] def f2(): Unit = {} + private[A] def f3(): Unit = {} } class B extends A { f1() @@ -9,4 +9,4 @@ class B extends A { def otherb(b2: B) = b2.f2() f3() super.f3() -} \ No newline at end of file +} diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check index c907a35a1217..a495a11a3876 100644 --- a/test/files/neg/t4691_exhaust_extractor.check +++ b/test/files/neg/t4691_exhaust_extractor.check @@ -1,15 +1,15 @@ -t4691_exhaust_extractor.scala:18: warning: match may not be exhaustive. +t4691_exhaust_extractor.scala:19: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f1(x: Foo) = x match { ^ -t4691_exhaust_extractor.scala:24: warning: match may not be exhaustive. +t4691_exhaust_extractor.scala:25: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f2(x: Foo) = x match { ^ -t4691_exhaust_extractor.scala:30: warning: match may not be exhaustive. +t4691_exhaust_extractor.scala:31: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f3(x: Foo) = x match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t4691_exhaust_extractor.scala b/test/files/neg/t4691_exhaust_extractor.scala index 8448f9e29d68..c1f8bd498ab5 100644 --- a/test/files/neg/t4691_exhaust_extractor.scala +++ b/test/files/neg/t4691_exhaust_extractor.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait Foo class Bar1 extends Foo class Bar2 extends Foo diff --git a/test/files/neg/t4701.check b/test/files/neg/t4701.check index f5f3ae20ec3c..b0064a2dcd55 100644 --- a/test/files/neg/t4701.check +++ b/test/files/neg/t4701.check @@ -1,6 +1,6 @@ t4701.scala:9: error: type mismatch; - found : Int + found : Int(1) required: String hasType[HL[String]](nnn :: HN) // type mismatch error should have position at `nnn` ^ -one error found +1 error diff --git a/test/files/neg/t4701.scala b/test/files/neg/t4701.scala index 54ec087dd07f..f14cae85fd63 100644 --- a/test/files/neg/t4701.scala +++ b/test/files/neg/t4701.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// trait HL[A] object HN { def :: [A](x: A): HL[A] = new HL[A] {} @@ -7,4 +7,4 @@ object Test { import Predef.{identity => hasType} final val nnn = 1 hasType[HL[String]](nnn :: HN) // type mismatch error should have position at `nnn` -} \ No newline at end of file +} diff --git a/test/files/neg/t4727.check b/test/files/neg/t4727.check deleted file mode 100644 index a17cdde04417..000000000000 --- a/test/files/neg/t4727.check +++ /dev/null @@ -1,5 +0,0 @@ -t4727.scala:5: error: an expression of type Null is ineligible for implicit conversion -Error occurred in an application involving default arguments. - new C[Int] - ^ -one error found diff --git a/test/files/neg/t4727.scala b/test/files/neg/t4727.scala deleted file mode 100644 index 40c06713caa6..000000000000 --- a/test/files/neg/t4727.scala +++ /dev/null @@ -1,7 +0,0 @@ -class C[T](x : T = null) - -object Test { - def main(args: Array[String]): Unit = { - new C[Int] - } -} diff --git a/test/files/neg/t4728.check b/test/files/neg/t4728.check index c6ef182d34f1..93c3a06808d9 100644 --- a/test/files/neg/t4728.check +++ b/test/files/neg/t4728.check @@ -1,7 +1,7 @@ t4728.scala:10: error: ambiguous reference to overloaded definition, -both method f in object Ambiguous of type (ys: Y*)Int -and method f in object Ambiguous of type (x: X)Int +both method f in object Ambiguous of type (ys: Y*): Int +and method f in object Ambiguous of type (x: X): Int match argument types (Y) and expected result type Any println(Ambiguous.f(new Y)) ^ -one error found +1 error diff --git a/test/files/neg/t4728.scala b/test/files/neg/t4728.scala index 36f7860613f5..83d3fc7c2e5b 100644 --- a/test/files/neg/t4728.scala +++ b/test/files/neg/t4728.scala @@ -8,4 +8,4 @@ object Ambiguous { object Test extends App { println(Ambiguous.f(new X)) println(Ambiguous.f(new Y)) -} \ No newline at end of file +} diff --git a/test/files/neg/t473.check b/test/files/neg/t473.check index a14222c9624b..9f5bc2c1d1f5 100644 --- a/test/files/neg/t473.check +++ b/test/files/neg/t473.check @@ -1,4 +1,4 @@ t473.scala:3: error: super constructor cannot be passed a self reference unless parameter is declared by-name case object Voop extends Foo(Voop) ^ -one error found +1 error diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check index b8937ebac986..ee5967c2cd0c 100644 --- a/test/files/neg/t4749.check +++ b/test/files/neg/t4749.check @@ -1,34 +1,49 @@ -t4749.scala:4: warning: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program. - Reason: main method must have exact signature (Array[String])Unit +t4749.scala:5: warning: not a valid main method for bippy.Fail1, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + def main(args: Array[String]): Any = () ^ -t4749.scala:7: warning: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program. - Reason: main methods cannot be generic. - object Fail2 { - ^ -t4749.scala:12: warning: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program. - Reason: main methods cannot refer to type parameters or abstract types. +t4749.scala:9: warning: not a valid main method for bippy.Fail2, + because main methods cannot be generic. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main[T](args: Array[String]): T = null.asInstanceOf[T] + ^ +t4749.scala:13: warning: not a valid main method for bippy.Fail3, + because main methods cannot refer to type parameters or abstract types. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + def main(args: Array[String]): T = null.asInstanceOf[T] ^ -t4749.scala:17: warning: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program. +t4749.scala:18: warning: Fail4 has a valid main method (args: Array[String]): Unit, + but bippy.Fail4 will not have an entry point on the JVM. Reason: companion is a trait, which means no static forwarder can be generated. object Fail4 { ^ -t4749.scala:22: warning: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program. +t4749.scala:23: warning: Fail5 has a valid main method (args: Array[String]): Unit, + but bippy.Fail5 will not have an entry point on the JVM. Reason: companion contains its own main method, which means no static forwarder can be generated. object Fail5 extends Fail5 { } ^ -t4749.scala:27: warning: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program. +t4749.scala:28: warning: Fail6 has a valid main method (args: Array[String]): Unit, + but bippy.Fail6 will not have an entry point on the JVM. Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated. object Fail6 { ^ -t4749.scala:43: warning: Win3 has a main method with parameter type Array[String], but bippy.Win3 will not be a runnable program. - Reason: main method must have exact signature (Array[String])Unit +t4749.scala:44: warning: not a valid main method for bippy.Win3, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + object Win3 extends WinBippy[Unit] { } ^ -error: No warnings can be incurred under -Xfatal-warnings. -7 warnings found -one error found +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t4749.scala b/test/files/neg/t4749.scala index 180f66e7298c..f9521d47ddce 100644 --- a/test/files/neg/t4749.scala +++ b/test/files/neg/t4749.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// package bippy { object Fail1 { def main(args: Array[String]): Any = () diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check index c76829e227b1..4088131de277 100644 --- a/test/files/neg/t4762.check +++ b/test/files/neg/t4762.check @@ -1,9 +1,13 @@ -t4762.scala:16: warning: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names. +t4762.scala:17: warning: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B; you may want to give them distinct names. /* (99,99) */ (this.x, this.y), ^ -t4762.scala:49: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names. +t4762.scala:50: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived; you may want to give them distinct names. class Derived( x : Int ) extends Base( x ) { override def toString = x.toString } ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +t4762.scala:13: error: weaker access privileges in overriding +val y: Int (defined in class A) + override should not be private + private[this] def y: Int = 99 + ^ +2 warnings +1 error diff --git a/test/files/neg/t4762.scala b/test/files/neg/t4762.scala index dd723ce0a536..c56924bea198 100644 --- a/test/files/neg/t4762.scala +++ b/test/files/neg/t4762.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Xfatal-warnings +// // https://github.com/scala/bug/issues/4762 // In A, x and y are -1. @@ -45,7 +46,7 @@ object Test { } class bug4762 { - class Base( var x : Int ) { def increment() { x = x + 1 } } + class Base( var x : Int ) { def increment(): Unit = { x = x + 1 } } class Derived( x : Int ) extends Base( x ) { override def toString = x.toString } val derived = new Derived( 1 ) diff --git a/test/files/neg/t4818.check b/test/files/neg/t4818.check index a5e15e456b82..17294c5a1e5d 100644 --- a/test/files/neg/t4818.check +++ b/test/files/neg/t4818.check @@ -2,5 +2,5 @@ t4818.scala:4: error: type mismatch; found : Int(5) required: Nothing def f(x: Any) = x match { case Fn(f) => f(5) } - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t4831.check b/test/files/neg/t4831.check index 3b8b836f05a5..fc78eed6014a 100644 --- a/test/files/neg/t4831.check +++ b/test/files/neg/t4831.check @@ -4,4 +4,4 @@ import O.b and import O.{a=>b} println(b) ^ -one error found +1 error diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check index b53bbdbd15d0..fbde85438267 100644 --- a/test/files/neg/t4842.check +++ b/test/files/neg/t4842.check @@ -4,4 +4,4 @@ t4842.scala:2: error: self constructor arguments cannot reference unconstructed t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this` def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error ^ -two errors found +2 errors diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check index 3a33f4ddd09c..d61c1b6d1457 100644 --- a/test/files/neg/t4851.check +++ b/test/files/neg/t4851.check @@ -1,51 +1,51 @@ -S.scala:3: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. - signature: J(x: Any): J +S.scala:4: warning: adaptation of an empty argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous + signature: J(x: Object): J given arguments: after adaptation: new J((): Unit) val x1 = new J ^ -S.scala:4: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. - signature: J(x: Any): J +S.scala:5: warning: adaptation of an empty argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous + signature: J(x: Object): J given arguments: after adaptation: new J((): Unit) val x2 = new J() ^ -S.scala:5: warning: Adapting argument list by creating a 5-tuple: this may not be what you want. - signature: J(x: Any): J +S.scala:6: warning: adapted the argument list to the expected 5-tuple: add additional parens instead + signature: J(x: Object): J given arguments: 1, 2, 3, 4, 5 - after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int)) + after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int)) [quickfixable] val x3 = new J(1, 2, 3, 4, 5) ^ -S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. +S.scala:8: warning: adapted the argument list to the expected 3-tuple: add additional parens instead signature: Some.apply[A](value: A): Some[A] given arguments: 1, 2, 3 - after adaptation: Some((1, 2, 3): (Int, Int, Int)) + after adaptation: Some((1, 2, 3): (Int, Int, Int)) [quickfixable] val y1 = Some(1, 2, 3) ^ -S.scala:8: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. +S.scala:9: warning: adapted the argument list to the expected 3-tuple: add additional parens instead signature: Some(value: A): Some[A] given arguments: 1, 2, 3 - after adaptation: new Some((1, 2, 3): (Int, Int, Int)) + after adaptation: new Some((1, 2, 3): (Int, Int, Int)) [quickfixable] val y2 = new Some(1, 2, 3) ^ -S.scala:10: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. +S.scala:11: warning: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want signature: J2(x: T): J2[T] given arguments: after adaptation: new J2((): Unit) val z1 = new J2 ^ -S.scala:11: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. +S.scala:12: warning: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want signature: J2(x: T): J2[T] given arguments: after adaptation: new J2((): Unit) val z2 = new J2() ^ -S.scala:15: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. +S.scala:16: warning: adapted the argument list to the expected 3-tuple: add additional parens instead signature: Test.anyId(a: Any): Any given arguments: 1, 2, 3 - after adaptation: Test.anyId((1, 2, 3): (Int, Int, Int)) + after adaptation: Test.anyId((1, 2, 3): (Int, Int, Int)) [quickfixable] val w1 = anyId(1, 2 ,3) ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/t4851/S.scala b/test/files/neg/t4851/S.scala index c33d01a166c7..3ebe93a9d775 100644 --- a/test/files/neg/t4851/S.scala +++ b/test/files/neg/t4851/S.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:adapted-args -Xfatal-warnings -deprecation +//> using options -Xlint:adapted-args -Xfatal-warnings -deprecation +// object Test { val x1 = new J val x2 = new J() diff --git a/test/files/neg/t4877.check b/test/files/neg/t4877.check index 5a2413ca8b28..fc7fe39bed48 100644 --- a/test/files/neg/t4877.check +++ b/test/files/neg/t4877.check @@ -19,4 +19,4 @@ t4877.scala:17: error: type mismatch; (which expands to) AnyRef{type Mom; def bar(x: Int): this.Mom; def bippy(): List[this.Mom]} val x: Bippy = new AnyRef { ^ -four errors found +4 errors diff --git a/test/files/neg/t4877.scala b/test/files/neg/t4877.scala index 5d978775186e..8968a8a8f6d4 100644 --- a/test/files/neg/t4877.scala +++ b/test/files/neg/t4877.scala @@ -19,4 +19,4 @@ class B { def bar(x: Int) = 55 def bippy() = List(bar(55)) } -} \ No newline at end of file +} diff --git a/test/files/neg/t4879.check b/test/files/neg/t4879.check index c7edd583c896..eef3c1bbce72 100644 --- a/test/files/neg/t4879.check +++ b/test/files/neg/t4879.check @@ -10,4 +10,4 @@ t4879.scala:10: error: pattern type is incompatible with expected type; Note: if you intended to match against the class, try `case D(_,_,_)` case D => true ^ -two errors found +2 errors diff --git a/test/files/neg/t4882.check b/test/files/neg/t4882.check index 0aafc8277067..bf1ba558a946 100644 --- a/test/files/neg/t4882.check +++ b/test/files/neg/t4882.check @@ -1,4 +1,4 @@ -t4882.scala:2: error: `implicit' modifier not allowed for constructors +t4882.scala:2: error: `implicit` modifier not allowed for constructors implicit def this(a: String) = this(a.toInt) ^ -one error found +1 error diff --git a/test/files/neg/t4889.check b/test/files/neg/t4889.check new file mode 100644 index 000000000000..b5c38dc11bac --- /dev/null +++ b/test/files/neg/t4889.check @@ -0,0 +1,8 @@ +t4889.scala:19: error: could not find implicit value for parameter ma1: t4889.MatrixAdder[Int,[S]t4889.SparseMatrix[S]] + m1.foo + ^ +t4889.scala:14: warning: Implicit definition should have explicit type (inferred t4889.MatrixAdder[S,R]) [quickfixable] + implicit def adderImplicit[S, R[s] <: Matrix[s, R]] = new MatrixAdder[S, R] { + ^ +1 warning +1 error diff --git a/test/files/neg/t4889.scala b/test/files/neg/t4889.scala new file mode 100644 index 000000000000..3f71cb818d3f --- /dev/null +++ b/test/files/neg/t4889.scala @@ -0,0 +1,21 @@ +object t4889 { + + import scala.language.higherKinds + trait Matrix[S, +Repr[s] <: Matrix[s, Repr]] { + def foo[R[S] >: Repr[S]](implicit ma1: MatrixAdder[S, R]) {} + } + + trait SparseMatrix[S] extends Matrix[S, SparseMatrix] + + trait MatrixAdder[S, -R[_]] { + def addTo(m: R[S]): Unit + } + + implicit def adderImplicit[S, R[s] <: Matrix[s, R]] = new MatrixAdder[S, R] { + def addTo(m: R[S]) = { } + } + + val m1 = new SparseMatrix[Int] { } + m1.foo + +} diff --git a/test/files/neg/t4928.check b/test/files/neg/t4928.check index 18a5d57a62cb..7bb8b786da54 100644 --- a/test/files/neg/t4928.check +++ b/test/files/neg/t4928.check @@ -1,5 +1,4 @@ -t4928.scala:3: error: parameter 'a' is already specified at parameter position 1 -Note that 'z' is not a parameter name of the invoked method. +t4928.scala:3: error: unknown parameter name: z f(z = 0, a = 1) - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t4940.check b/test/files/neg/t4940.check new file mode 100644 index 000000000000..3b26c3117d0c --- /dev/null +++ b/test/files/neg/t4940.check @@ -0,0 +1,31 @@ +t4940.scala:3: error: type mismatch; + found : String("x") + required: Int + val f: PartialFunction[String, Int] = (x: Int) => x match { case "x" => 3 } // error + ^ +t4940.scala:3: error: type mismatch; + found : scala.runtime.AbstractPartialFunction[Int,Int] with java.io.Serializable + required: PartialFunction[String,Int] + val f: PartialFunction[String, Int] = (x: Int) => x match { case "x" => 3 } // error + ^ +t4940.scala:5: error: type mismatch; + found : String("x") + required: X + val g: PartialFunction[String, Int] = (x: X) => x match { case "x" => 3 } // error + ^ +t4940.scala:5: error: type mismatch; + found : scala.runtime.AbstractPartialFunction[X,Int] with java.io.Serializable + required: PartialFunction[String,Int] + val g: PartialFunction[String, Int] = (x: X) => x match { case "x" => 3 } // error + ^ +t4940.scala:7: error: type mismatch; + found : scala.runtime.AbstractPartialFunction[Double,Int] with java.io.Serializable + required: PartialFunction[Int,Int] + val m: PartialFunction[Int, Int] = (x: Double) => x match { case 3.14 => 3 } // error + ^ +t4940.scala:9: error: type mismatch; + found : scala.runtime.AbstractPartialFunction[X,Int] with java.io.Serializable + required: PartialFunction[Y,Int] + val g3: PartialFunction[Y, Int] = (x: X) => x match { case _: X => 3 } // error + ^ +6 errors diff --git a/test/files/neg/t4940.scala b/test/files/neg/t4940.scala new file mode 100644 index 000000000000..6c8d1c7bafd4 --- /dev/null +++ b/test/files/neg/t4940.scala @@ -0,0 +1,21 @@ +//> using options -Werror -Xlint +class C { + val f: PartialFunction[String, Int] = (x: Int) => x match { case "x" => 3 } // error + + val g: PartialFunction[String, Int] = (x: X) => x match { case "x" => 3 } // error + + val m: PartialFunction[Int, Int] = (x: Double) => x match { case 3.14 => 3 } // error + + val g3: PartialFunction[Y, Int] = (x: X) => x match { case _: X => 3 } // error +} + +class Y +class X extends Y + +object Test extends App { + val c = new C + println(c.f.applyOrElse("hello, world", (s: String) => -1)) + println(c.f.applyOrElse("x", (s: String) => -1)) + println(c.g.applyOrElse("hello, world", (s: String) => -1)) + println(c.m.applyOrElse(42, (n: Int) => -1)) +} diff --git a/test/files/neg/t4987.check b/test/files/neg/t4987.check index 8d7344d27b55..282110aa7a49 100644 --- a/test/files/neg/t4987.check +++ b/test/files/neg/t4987.check @@ -1,4 +1,4 @@ -t4987.scala:2: error: constructor Foo2 in class Foo2 cannot be accessed in object Bar2 +t4987.scala:2: error: constructor Foo2 in class Foo2 cannot be accessed in object Bar2 from object Bar2 object Bar2 { new Foo2(0, 0) } ^ -one error found +1 error diff --git a/test/files/neg/t4989.check b/test/files/neg/t4989.check index 814507fc3ffb..a3452c9ef574 100644 --- a/test/files/neg/t4989.check +++ b/test/files/neg/t4989.check @@ -4,4 +4,4 @@ t4989.scala:14: error: method print in class A cannot be directly accessed from t4989.scala:18: error: method print in class A cannot be directly accessed from trait T because class B redeclares it as abstract override def print(): String = super.print() // should be an error ^ -two errors found +2 errors diff --git a/test/files/neg/t500.check b/test/files/neg/t500.check index b3f5c8597875..4f83731d1d55 100644 --- a/test/files/neg/t500.check +++ b/test/files/neg/t500.check @@ -1,4 +1,4 @@ t500.scala:3: error: lower bound X does not conform to upper bound Y type T >: X <: Y; ^ -one error found +1 error diff --git a/test/files/neg/t501.check b/test/files/neg/t501.check index 3e3bf390757a..7b7a13dfff3b 100644 --- a/test/files/neg/t501.check +++ b/test/files/neg/t501.check @@ -1,4 +1,4 @@ t501.scala:3: error: lower bound X does not conform to upper bound Y abstract class I { type T >: X <: Y; } ^ -one error found +1 error diff --git a/test/files/neg/t5031.check b/test/files/neg/t5031.check index 2f1090c32158..4606827a6b92 100644 --- a/test/files/neg/t5031.check +++ b/test/files/neg/t5031.check @@ -2,4 +2,4 @@ package.scala:2: error: Companions 'class Test' and 'object Test' must be define Found in t5031/package.scala and t5031/Id.scala class Test ^ -one error found +1 error diff --git a/test/files/neg/t5031b.check b/test/files/neg/t5031b.check index 3bc2284a4d79..00e4d7c6916c 100644 --- a/test/files/neg/t5031b.check +++ b/test/files/neg/t5031b.check @@ -2,4 +2,4 @@ b.scala:3: error: Companions 'class Bippy' and 'object Bippy' must be defined in Found in t5031b/a.scala and t5031b/b.scala object Bippy ^ -one error found +1 error diff --git a/test/files/neg/t5044.check b/test/files/neg/t5044.check deleted file mode 100644 index dc3708123f75..000000000000 --- a/test/files/neg/t5044.check +++ /dev/null @@ -1,9 +0,0 @@ -t5044.scala:7: error: recursive value a needs type - val id = m(a) - ^ -t5044.scala:6: warning: failed to determine if 'id = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - val a = foo(id = 1) - ^ -one warning found -one error found diff --git a/test/files/neg/t5060.check b/test/files/neg/t5060.check index 09b2d9a4b1b5..a28481d66b24 100644 --- a/test/files/neg/t5060.check +++ b/test/files/neg/t5060.check @@ -1,7 +1,7 @@ -t5060.scala:2: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of value foo0 +t5060.scala:2: error: covariant type T occurs in contravariant position in type AnyRef{def contains(x: T): Unit} of value foo0 val foo0 = { ^ -t5060.scala:6: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of method foo1 +t5060.scala:6: error: covariant type T occurs in contravariant position in type AnyRef{def contains(x: T): Unit} of method foo1 def foo1 = { ^ -two errors found +2 errors diff --git a/test/files/neg/t5063.check b/test/files/neg/t5063.check index c6e553c1b5f6..9b3e7ef1c8c5 100644 --- a/test/files/neg/t5063.check +++ b/test/files/neg/t5063.check @@ -1,4 +1,4 @@ t5063.scala:2: error: value + is not a member of AnyRef super.+("") ^ -one error found +1 error diff --git a/test/files/neg/t5067.check b/test/files/neg/t5067.check index 32491766d75c..85acae5cc804 100644 --- a/test/files/neg/t5067.check +++ b/test/files/neg/t5067.check @@ -3,4 +3,4 @@ t5067.scala:3: error: type mismatch; required: (Int, Int) => Int override def tupled: (Int, Int) => Int = super.tupled ^ -one error found +1 error diff --git a/test/files/neg/t5078.check b/test/files/neg/t5078.check index 8f66445b03c8..5ec7e8cbfd90 100644 --- a/test/files/neg/t5078.check +++ b/test/files/neg/t5078.check @@ -1,13 +1,9 @@ -t5078.scala:7: error: an unapply method must accept a single argument. +t5078.scala:7: error: object Foo is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply: Option[Int] exists in object Foo, but it cannot be used as an extractor: an unapply method must accept a single argument val Foo(x1) = 1 ^ -t5078.scala:7: error: recursive value x1 needs type - val Foo(x1) = 1 - ^ -t5078.scala:8: error: an unapply method must accept a single argument. +t5078.scala:8: error: object Foo2 is not a case class, nor does it have a valid unapply/unapplySeq member +Note: def unapply(): Option[Int] exists in object Foo2, but it cannot be used as an extractor: an unapply method must accept a single argument val Foo2(y2) = 2 ^ -t5078.scala:8: error: recursive value y2 needs type - val Foo2(y2) = 2 - ^ -four errors found +2 errors diff --git a/test/files/neg/t5091.check b/test/files/neg/t5091.check deleted file mode 100644 index 156f695f41d8..000000000000 --- a/test/files/neg/t5091.check +++ /dev/null @@ -1,9 +0,0 @@ -t5091.scala:8: error: recursive value xxx needs type - val param = bar(xxx) - ^ -t5091.scala:7: warning: failed to determine if 'param = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - val xxx = foo(param = null) - ^ -one warning found -one error found diff --git a/test/files/neg/t5093.check b/test/files/neg/t5093.check index b794f023e56d..4f65a2cca455 100644 --- a/test/files/neg/t5093.check +++ b/test/files/neg/t5093.check @@ -4,4 +4,4 @@ t5093.scala:2: error: illegal cyclic reference involving type C t5093.scala:2: error: cyclic aliasing or subtyping involving type C def f[C[X] <: C[X]](l: C[_]) = l.x ^ -two errors found +2 errors diff --git a/test/files/neg/t510.check b/test/files/neg/t510.check index 355a6cdf0748..d45c1ed02598 100644 --- a/test/files/neg/t510.check +++ b/test/files/neg/t510.check @@ -1,4 +1,4 @@ t510.scala:19: error: cyclic aliasing or subtyping involving type T def g(t: e.T): Unit = { ^ -one error found +1 error diff --git a/test/files/neg/t5106.check b/test/files/neg/t5106.check index ac16041cf7a8..709c5fa4e274 100644 --- a/test/files/neg/t5106.check +++ b/test/files/neg/t5106.check @@ -8,4 +8,4 @@ t5106.scala:3: error: type mismatch; required: Int val (n, l): (String, Int) = (4, "") ^ -two errors found +2 errors diff --git a/test/files/neg/t5106.scala b/test/files/neg/t5106.scala index 419b430ff1d8..6508662fd48e 100644 --- a/test/files/neg/t5106.scala +++ b/test/files/neg/t5106.scala @@ -1,5 +1,5 @@ class A { - def f { + def f: Unit = { val (n, l): (String, Int) = (4, "") } } diff --git a/test/files/neg/t512.check b/test/files/neg/t512.check index 051e5ee19f1d..e95f4f227e22 100644 --- a/test/files/neg/t512.check +++ b/test/files/neg/t512.check @@ -4,4 +4,4 @@ t512.scala:3: error: not found: value something t512.scala:4: error: not found: value something_else something_else; ^ -two errors found +2 errors diff --git a/test/files/neg/t5120.check b/test/files/neg/t5120.check index b6a3cb96aa18..c4ee72bc8caa 100644 --- a/test/files/neg/t5120.check +++ b/test/files/neg/t5120.check @@ -9,4 +9,4 @@ t5120.scala:25: error: type mismatch; (which expands to) _1 List(str, num).foreach(h => h.f1 = new Thread()) ^ -two errors found +2 errors diff --git a/test/files/neg/t5120.scala b/test/files/neg/t5120.scala index 0df67bc09bec..4e61b4170f0e 100644 --- a/test/files/neg/t5120.scala +++ b/test/files/neg/t5120.scala @@ -23,7 +23,7 @@ object Test2 { val str = new Holder("t1", "t2") val num = new Holder(1, 2) List(str, num).foreach(h => h.f1 = new Thread()) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(str.f1) } } diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check index f48be110d40e..e56b728c18d7 100644 --- a/test/files/neg/t5148.check +++ b/test/files/neg/t5148.check @@ -1,13 +1,7 @@ -t5148.scala:4: error: Symbol 'type scala.tools.nsc.interpreter.IMain.Request.Wrapper' is missing from the classpath. -This symbol is required by 'value scala.tools.nsc.interpreter.Imports.wrapper'. -Make sure that type Wrapper is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. -A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.Request. -class IMain extends Imports - ^ t5148.scala:4: error: Symbol 'type scala.tools.nsc.interpreter.IMain.Request' is missing from the classpath. This symbol is required by 'method scala.tools.nsc.interpreter.Imports.allReqAndHandlers'. Make sure that type Request is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain. class IMain extends Imports ^ -two errors found +1 error diff --git a/test/files/neg/t515.check b/test/files/neg/t515.check index 47d2d30d0180..20e9044f283e 100644 --- a/test/files/neg/t515.check +++ b/test/files/neg/t515.check @@ -3,4 +3,4 @@ t515.scala:7: error: type mismatch; required: Test.Truc val parent: Truc = file.getMachin ^ -one error found +1 error diff --git a/test/files/neg/t5152.check b/test/files/neg/t5152.check index fd510dbae0a1..c6b6446f8d44 100644 --- a/test/files/neg/t5152.check +++ b/test/files/neg/t5152.check @@ -1,11 +1,11 @@ t5152.scala:7: error: kinds of the type arguments (Test.B) do not conform to the expected kinds of the type parameters (type E) in class A. Test.B's type parameters do not match type E's expected parameters: -type E has one type parameter, but type _ has none +type E has 1 type parameter, but type _ has 0 class B[E[_]] extends A[B] { } // B is depth 2 but A requires 1 ^ t5152.scala:11: error: kinds of the type arguments (Test.B1) do not conform to the expected kinds of the type parameters (type E) in class A1. Test.B1's type parameters do not match type E's expected parameters: -type _ has no type parameters, but type G has one +type _ has no type parameters, but type G has 1 class B1[E[_]] extends A1[B1] // B1 is depth 2 but A1 requires 3 ^ -two errors found +2 errors diff --git a/test/files/neg/t5182.check b/test/files/neg/t5182.check index 69de73b8f621..528fda36eaf9 100644 --- a/test/files/neg/t5182.check +++ b/test/files/neg/t5182.check @@ -1,7 +1,7 @@ -t5182.scala:3: error: unknown annotation argument name: qwe +t5182.scala:4: error: unknown annotation argument name: qwe @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1 ^ -t5182.scala:4: error: classfile annotation arguments have to be supplied as named arguments +t5182.scala:5: error: arguments to Java annotations have to be supplied as named arguments @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1 ^ -two errors found +2 errors diff --git a/test/files/neg/t5182.scala b/test/files/neg/t5182.scala index b9e62c79a4da..2eeb0960cfa2 100644 --- a/test/files/neg/t5182.scala +++ b/test/files/neg/t5182.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class test { @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1 @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1 diff --git a/test/files/neg/t5189.check b/test/files/neg/t5189.check index 4885de99cd9e..aefb661cf68f 100644 --- a/test/files/neg/t5189.check +++ b/test/files/neg/t5189.check @@ -3,4 +3,4 @@ t5189.scala:3: error: type mismatch; required: Any => Any def f(x: Any): Any => Any = x match { case Foo(bar) => bar } ^ -one error found +1 error diff --git a/test/files/neg/t5189_inferred.check b/test/files/neg/t5189_inferred.check index 9cc5dcc24248..2a54e1929d0e 100644 --- a/test/files/neg/t5189_inferred.check +++ b/test/files/neg/t5189_inferred.check @@ -1,6 +1,6 @@ t5189_inferred.scala:7: error: type mismatch; - found : scala.collection.immutable.Nil.type + found : Nil.type required: ?A1 where type ?A1 f(Invariant(arr): Covariant[Any])(0) = Nil ^ -one error found +1 error diff --git a/test/files/neg/t5189_inferred.scala b/test/files/neg/t5189_inferred.scala index e4e87654454b..c97b4117cf40 100644 --- a/test/files/neg/t5189_inferred.scala +++ b/test/files/neg/t5189_inferred.scala @@ -5,4 +5,4 @@ class Test { val arr = Array("abc") def f[A](v: Covariant[A]) /*inferred!*/ = v match { case Invariant(xs) => xs } f(Invariant(arr): Covariant[Any])(0) = Nil -} \ No newline at end of file +} diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check index 200eeb7d0a0c..f14a4dfdfdc3 100644 --- a/test/files/neg/t5189b.check +++ b/test/files/neg/t5189b.check @@ -8,4 +8,4 @@ You may wish to define W as +W instead. (SLS 4.5) t5189b.scala:51: error: value foo is not a member of type parameter T case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation) ^ -two errors found +2 errors diff --git a/test/files/neg/t5197.check b/test/files/neg/t5197.check new file mode 100644 index 000000000000..2d1d16a12cc0 --- /dev/null +++ b/test/files/neg/t5197.check @@ -0,0 +1,7 @@ +t5197.scala:12: error: type mismatch; + found : List[String] + required: Int + Note: implicit object O2 is not applicable here because it comes after the application point and it lacks an explicit result type. An object can be written as a lazy val with an explicit type. + val y: Int = List("b") + ^ +1 error diff --git a/test/files/neg/t5197.scala b/test/files/neg/t5197.scala new file mode 100644 index 000000000000..61f2a535220a --- /dev/null +++ b/test/files/neg/t5197.scala @@ -0,0 +1,14 @@ + +//> using options -Werror -feature + +// Periodic reminder that the feature is not required for implicit function values. +//import scala.language.implicitConversions + +object A { + val x: Int = List("a") + implicit lazy val O1: (List[String] => Int) = _.head.toInt +} +object B { + val y: Int = List("b") + implicit object O2 extends (List[String] => Int) { def apply(ss: List[String]): Int = ss.head.toInt } +} diff --git a/test/files/neg/t520.check b/test/files/neg/t520.check index 0035f89a7912..cd2f8e83283d 100644 --- a/test/files/neg/t520.check +++ b/test/files/neg/t520.check @@ -1,4 +1,4 @@ t520.scala:8: error: overloaded method verifyKeyword needs result type verifyKeyword("", source, pos); ^ -one error found +1 error diff --git a/test/files/neg/t521.check b/test/files/neg/t521.check index a10019565523..3cf03a2c36bc 100644 --- a/test/files/neg/t521.check +++ b/test/files/neg/t521.check @@ -1,15 +1,21 @@ -t521.scala:10: error: class PlainFile needs to be abstract, since method path in class AbstractFile of type => String is not defined +t521.scala:10: error: class PlainFile needs to be abstract. +Missing implementation for member of class AbstractFile: + def path: String = ??? + class PlainFile(val file : File) extends AbstractFile {} ^ -t521.scala:13: error: overriding value file in class PlainFile of type java.io.File; - value file needs `override' modifier +t521.scala:13: error: `override` modifier required to override concrete member: +val file: java.io.File (defined in class PlainFile) final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { ^ -t521.scala:13: error: class ZipArchive needs to be abstract, since method path in class AbstractFile of type => String is not defined +t521.scala:13: error: class ZipArchive needs to be abstract. +Missing implementation for member of class AbstractFile: + def path: String = ??? + final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { ^ -t521.scala:15: error: overriding value path in class VirtualFile of type String; - method path needs to be a stable, immutable value +t521.scala:15: error: stable, immutable value required to override: +val path: String (defined in class VirtualFile) override def path = ""; ^ -four errors found +4 errors diff --git a/test/files/neg/t521.scala b/test/files/neg/t521.scala index c6afebc0be0b..9ee529e9d8a7 100644 --- a/test/files/neg/t521.scala +++ b/test/files/neg/t521.scala @@ -11,7 +11,7 @@ class PlainFile(val file : File) extends AbstractFile {} class VirtualFile(val name : String, val path : String) extends AbstractFile {} final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { - class Entry(name : String, path : String) extends VirtualFile(name, path) { + class Entry(name : String, path0 : String) extends VirtualFile(name, path0) { override def path = ""; } } diff --git a/test/files/neg/t521b.check b/test/files/neg/t521b.check new file mode 100644 index 000000000000..0b120275927f --- /dev/null +++ b/test/files/neg/t521b.check @@ -0,0 +1,6 @@ +t521b.scala:16: error: Double definition will be detected in Scala 3; the conflicting value path is defined at 15:30 +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=test.ZipArchive.Entry + override def path = ""; + ^ +1 error diff --git a/test/files/neg/t521b.scala b/test/files/neg/t521b.scala new file mode 100644 index 000000000000..064e59825d97 --- /dev/null +++ b/test/files/neg/t521b.scala @@ -0,0 +1,18 @@ +//> using options -Xsource:3 +package test + +import java.io.File +import java.util.zip.ZipFile + +abstract class AbstractFile { + def path : String; +} + +class PlainFile(val file : File) extends AbstractFile {} +class VirtualFile(val name : String, val path : String) extends AbstractFile {} + +final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { + class Entry(name : String, path : String) extends VirtualFile(name, path) { + override def path = ""; + } +} diff --git a/test/files/neg/t5265a.check b/test/files/neg/t5265a.check new file mode 100644 index 000000000000..3018c5163812 --- /dev/null +++ b/test/files/neg/t5265a.check @@ -0,0 +1,18 @@ +t5265a.scala:7: warning: Implicit definition should have explicit type (inferred T[String]) [quickfixable] + implicit val tsMissing = new T[String] {} // warn val in trait + ^ +t5265a.scala:20: warning: Implicit definition should have explicit type (inferred T[String]) [quickfixable] + implicit val tsChild = new T[String] {} // warn because inferred from RHS + ^ +t5265a.scala:22: warning: Implicit definition should have explicit type (inferred Int) [quickfixable] + implicit private[this] val pChild = 42 // also warn + ^ +t5265a.scala:27: warning: Implicit definition should have explicit type (inferred Int) [quickfixable] + implicit private[this] val y = 42 // also warn + ^ +t5265a.scala:25: warning: Implicit definition should have explicit type (inferred T[String]) [quickfixable] + implicit val tsD = new T[String] {} // warn val in class + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t5265a.scala b/test/files/neg/t5265a.scala new file mode 100644 index 000000000000..42984206bdf5 --- /dev/null +++ b/test/files/neg/t5265a.scala @@ -0,0 +1,32 @@ +//> using options -Werror +trait T[A] + +class C[A: T] + +trait Missing { + implicit val tsMissing = new T[String] {} // warn val in trait + def f = new C[String] +} +trait Local { + def f = { + implicit val tsLocal = new T[String] {} // nowarn because local + new C[String] + } +} +trait Parent { + def t: T[String] +} +trait Child extends Parent { + implicit val tsChild = new T[String] {} // warn because inferred from RHS + def f = new C[String] + implicit private[this] val pChild = 42 // also warn +} +class D { + implicit val tsD = new T[String] {} // warn val in class + def f = new C[String] + implicit private[this] val y = 42 // also warn +} +class X extends Missing +trait Z { + val z = 42 +} diff --git a/test/files/neg/t5265b.check b/test/files/neg/t5265b.check new file mode 100644 index 000000000000..7e761d661c4a --- /dev/null +++ b/test/files/neg/t5265b.check @@ -0,0 +1,11 @@ +t5265b.scala:7: error: Implicit definition must have explicit type (inferred T[String]) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Missing.tsMissing + implicit val tsMissing = new T[String] {} // warn val in trait + ^ +t5265b.scala:20: error: Implicit definition must have explicit type (inferred T[String]) [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Child.tsChild + implicit val tsChild = new T[String] {} // warn (no warn with -Xsource-features:infer-override) + ^ +2 errors diff --git a/test/files/neg/t5265b.scala b/test/files/neg/t5265b.scala new file mode 100644 index 000000000000..45b2170fc7d9 --- /dev/null +++ b/test/files/neg/t5265b.scala @@ -0,0 +1,22 @@ +//> using options -Xsource:3 +trait T[A] + +class C[A: T] + +trait Missing { + implicit val tsMissing = new T[String] {} // warn val in trait + def f = new C[String] +} +trait Local { + def f = { + implicit val tsLocal = new T[String] {} // nowarn because local + new C[String] + } +} +trait Parent { + def tsChild: T[String] +} +trait Child extends Parent { + implicit val tsChild = new T[String] {} // warn (no warn with -Xsource-features:infer-override) + def f = new C[String] +} diff --git a/test/files/neg/t5318.check b/test/files/neg/t5318.check index d6a3a57935d0..56a991562d57 100644 --- a/test/files/neg/t5318.check +++ b/test/files/neg/t5318.check @@ -2,4 +2,4 @@ t5318.scala:7: error: diverging implicit expansion for type CompilerHang.this.TC starting with method tc in class CompilerHang breakage // type checker doesn't terminate, should report inference failure ^ -one error found +1 error diff --git a/test/files/neg/t5318.scala b/test/files/neg/t5318.scala index 8009c66e6b64..5be82a02a60d 100644 --- a/test/files/neg/t5318.scala +++ b/test/files/neg/t5318.scala @@ -5,4 +5,4 @@ class CompilerHang { implicit def tc[M[_]](implicit M0: TC[M]): TC[S] = null def breakage[F[_] : TC] = 0 breakage // type checker doesn't terminate, should report inference failure -} \ No newline at end of file +} diff --git a/test/files/neg/t5318b.check b/test/files/neg/t5318b.check index 47a10d673382..b20c4dc84eb9 100644 --- a/test/files/neg/t5318b.check +++ b/test/files/neg/t5318b.check @@ -2,4 +2,4 @@ t5318b.scala:7: error: diverging implicit expansion for type DivergingImplicitRe starting with method tc in class DivergingImplicitReported breakage // correct: diverging implicit expansion ^ -one error found \ No newline at end of file +1 error diff --git a/test/files/neg/t5318b.scala b/test/files/neg/t5318b.scala index 123f8b4e048e..a2d55b1e4a73 100644 --- a/test/files/neg/t5318b.scala +++ b/test/files/neg/t5318b.scala @@ -5,4 +5,4 @@ class DivergingImplicitReported { implicit def tc[M](implicit M0: TC[M]): TC[S] = null def breakage[F: TC] = 0 breakage // correct: diverging implicit expansion -} \ No newline at end of file +} diff --git a/test/files/neg/t5318c.check b/test/files/neg/t5318c.check index 594539be6968..7bbf5c1c72f8 100644 --- a/test/files/neg/t5318c.check +++ b/test/files/neg/t5318c.check @@ -2,4 +2,4 @@ t5318c.scala:13: error: diverging implicit expansion for type CompilerHang.this. starting with method tc in class CompilerHang breakage // type checker doesn't terminate, should report inference failure ^ -one error found +1 error diff --git a/test/files/neg/t5340.check b/test/files/neg/t5340.check index 2de19293c4be..9f768a354fff 100644 --- a/test/files/neg/t5340.check +++ b/test/files/neg/t5340.check @@ -1,6 +1,6 @@ -t5340.scala:17: error: type mismatch; - found : MyApp.r.E - required: MyApp.s.E - println(b: s.E) - ^ -one error found +t5340.scala:15: error: type mismatch; + found : Quux[MyApp.r.E,MyApp.s.E] + required: Int + (new Quux[r.E, s.E]): Int // fails due to pre-stripping implicits which + ^ +1 error diff --git a/test/files/neg/t5340.scala b/test/files/neg/t5340.scala index b283f1333842..2c09d77e9635 100644 --- a/test/files/neg/t5340.scala +++ b/test/files/neg/t5340.scala @@ -1,29 +1,19 @@ +class Quux[T, U] + class Poly { class E object E { - implicit def conv(value: Any): E = sys.error("") + implicit def conv[T, U](b: Quux[T, U]): Int = 1 } } object MyApp { - val r: Poly = sys.error("") - val s: Poly = sys.error("") - val b: r.E = sys.error("") - - // okay - s.E.conv(b): s.E - - // compilation fails with error below - println(b: s.E) - - // amb prefix: MyApp.s.type#class E MyApp.r.type#class E - // amb prefix: MyApp.s.type#class E MyApp.r.type#class E - // ../test/pending/run/t5310.scala:17: error: type mismatch; - // found : MyApp.r.E - // required: MyApp.s.E - // println(b: s.E) - // ^ + val r: Poly = ??? + val s: Poly = ??? - // The type error is as expected, but the `amb prefix` should be logged, - // rather than printed to standard out. + (new Quux[r.E, Int]): Int // ok + (new Quux[r.E, s.E]): Int // fails due to pre-stripping implicits which + // are reachable via different prefixes but not + // dependent on the prefix. Ambiguity not + // reported as such. } diff --git a/test/files/neg/t5352.check b/test/files/neg/t5352.check index df76ac8267c8..b4a0b0644b7b 100644 --- a/test/files/neg/t5352.check +++ b/test/files/neg/t5352.check @@ -1,13 +1,13 @@ -t5352.scala:12: error: type mismatch; +t5352.scala:13: error: type mismatch; found : boop.Bar required: boop.BarF (which expands to) AnyRef{def f(): Int} x = xs.head ^ -t5352.scala:15: error: method f in class Bar1 cannot be accessed in boop.Bar1 +t5352.scala:16: error: method f in class Bar1 cannot be accessed as a member of boop.Bar1 from object boop Access to protected method f not permitted because enclosing object boop is not a subclass of class Bar1 in object boop where target is defined (new Bar1).f ^ -two errors found +2 errors diff --git a/test/files/neg/t5352.scala b/test/files/neg/t5352.scala index 7bd73a7ed5a3..2dd99003d0bc 100644 --- a/test/files/neg/t5352.scala +++ b/test/files/neg/t5352.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object boop { abstract class Bar { protected def f(): Any } class Bar1 extends Bar { protected def f(): Int = 5 } diff --git a/test/files/neg/t5354.check b/test/files/neg/t5354.check index e47cecb5fee9..906f135e9711 100644 --- a/test/files/neg/t5354.check +++ b/test/files/neg/t5354.check @@ -1,7 +1,7 @@ t5354.scala:9: error: ambiguous implicit values: - both method x123 in package foo of type => foo.Bippy - and method z of type => foo.Bippy + both method x123 in package foo of type foo.Bippy + and method z of type foo.Bippy match expected type foo.Bippy implicitly[Bippy] ^ -one error found +1 error diff --git a/test/files/neg/t5355.check b/test/files/neg/t5355.check index 52c9c985d6eb..38e71c5ab1b1 100644 --- a/test/files/neg/t5355.check +++ b/test/files/neg/t5355.check @@ -13,4 +13,4 @@ t5355.scala:5: error: illegal cyclic reference involving value a t5355.scala:6: error: illegal cyclic reference involving method a val e: { def a: e.type } ^ -5 errors found +5 errors diff --git a/test/files/neg/t5357.check b/test/files/neg/t5357.check index 3385559071a6..29d5a3a04337 100644 --- a/test/files/neg/t5357.check +++ b/test/files/neg/t5357.check @@ -1,4 +1,4 @@ t5357.scala:5: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.) case A: N => 1 - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t5358.check b/test/files/neg/t5358.check index 59e83bba2f41..77b9cd32a79e 100644 --- a/test/files/neg/t5358.check +++ b/test/files/neg/t5358.check @@ -1,7 +1,7 @@ t5358.scala:3: error: class C inherits conflicting members: - method hi in trait A of type => String and - method hi in trait B of type => String -(Note: this can be resolved by declaring an override in class C.) + def hi: String (defined in trait A) and + def hi: String (defined in trait B) + (note: this can be resolved by declaring an `override` in class C.) class C extends A with B ^ -one error found +1 error diff --git a/test/files/neg/t5361.check b/test/files/neg/t5361.check index d7fee87ccdf3..2b3305a090a0 100644 --- a/test/files/neg/t5361.check +++ b/test/files/neg/t5361.check @@ -1,4 +1,4 @@ t5361.scala:2: error: only declarations allowed here val x : { val self = this } = new { self => } ^ -one error found +1 error diff --git a/test/files/neg/t5365.check b/test/files/neg/t5365.check new file mode 100644 index 000000000000..baddfb1e4552 --- /dev/null +++ b/test/files/neg/t5365.check @@ -0,0 +1,31 @@ +t5365.scala:3: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def nonExhautiveIfWeAssumeGuardsTrueOrFalse(x: Option[Int]): Int = x match { + ^ +t5365.scala:7: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def nonExhautiveIfWeAssumeGuardsFalse(x: Option[Int]): Int = x match { + ^ +t5365.scala:12: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def inverseGuards(x: Option[Int]): Int = x match { + ^ +t5365.scala:18: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def extractor(x: Option[Int]) = x match { + ^ +t5365.scala:21: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def repeatedExtractor(x: Option[Int]) = x match { + ^ +t5365.scala:24: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def extractorStrict(x: Option[Int]) = x match { + ^ +t5365.scala:28: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def repeatedExtractorStrict(x: Option[Int]) = x match { + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t5365.scala b/test/files/neg/t5365.scala new file mode 100644 index 000000000000..087635454d3f --- /dev/null +++ b/test/files/neg/t5365.scala @@ -0,0 +1,40 @@ +//> using options -Xfatal-warnings +class C { + def nonExhautiveIfWeAssumeGuardsTrueOrFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + } + + def nonExhautiveIfWeAssumeGuardsFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + case None => 0 + } + + def inverseGuards(x: Option[Int]): Int = x match { + case Some(n) if n > 0 => n + case Some(n) if n <= 0 => ??? + case None => 0 + } + + def extractor(x: Option[Int]) = x match { + case Some(Extractor(_)) => + } + def repeatedExtractor(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + } + def extractorStrict(x: Option[Int]) = x match { + case Some(Extractor(_)) => + case None => + } + def repeatedExtractorStrict(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + case None => + } +} + +object Extractor { + def unapply(a: Any): Option[Any] = None +} + +object RepeatedExtractor { + def unapplySeq(a: Any): Option[Seq[Any]] = None +} diff --git a/test/files/neg/t5365b.check b/test/files/neg/t5365b.check new file mode 100644 index 000000000000..ad289c32fcc2 --- /dev/null +++ b/test/files/neg/t5365b.check @@ -0,0 +1,31 @@ +t5365b.scala:3: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def nonExhautiveIfWeAssumeGuardsTrueOrFalse(x: Option[Int]): Int = x match { + ^ +t5365b.scala:7: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def nonExhautiveIfWeAssumeGuardsFalse(x: Option[Int]): Int = x match { + ^ +t5365b.scala:12: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def inverseGuards(x: Option[Int]): Int = x match { + ^ +t5365b.scala:18: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def extractor(x: Option[Int]) = x match { + ^ +t5365b.scala:21: warning: match may not be exhaustive. +It would fail on the following inputs: None, Some(_) + def repeatedExtractor(x: Option[Int]) = x match { + ^ +t5365b.scala:24: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def extractorStrict(x: Option[Int]) = x match { + ^ +t5365b.scala:28: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + def repeatedExtractorStrict(x: Option[Int]) = x match { + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t5365b.scala b/test/files/neg/t5365b.scala new file mode 100644 index 000000000000..087635454d3f --- /dev/null +++ b/test/files/neg/t5365b.scala @@ -0,0 +1,40 @@ +//> using options -Xfatal-warnings +class C { + def nonExhautiveIfWeAssumeGuardsTrueOrFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + } + + def nonExhautiveIfWeAssumeGuardsFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + case None => 0 + } + + def inverseGuards(x: Option[Int]): Int = x match { + case Some(n) if n > 0 => n + case Some(n) if n <= 0 => ??? + case None => 0 + } + + def extractor(x: Option[Int]) = x match { + case Some(Extractor(_)) => + } + def repeatedExtractor(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + } + def extractorStrict(x: Option[Int]) = x match { + case Some(Extractor(_)) => + case None => + } + def repeatedExtractorStrict(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + case None => + } +} + +object Extractor { + def unapply(a: Any): Option[Any] = None +} + +object RepeatedExtractor { + def unapplySeq(a: Any): Option[Seq[Any]] = None +} diff --git a/test/files/neg/t5365c.check b/test/files/neg/t5365c.check new file mode 100644 index 000000000000..4f94ab7316f2 --- /dev/null +++ b/test/files/neg/t5365c.check @@ -0,0 +1,7 @@ +t5365c.scala:6: warning: match may not be exhaustive. +It would fail on the following input: (x: C.Z forSome x not in C.Q) + def unsealedTrait(z: Z) = z match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t5365c.scala b/test/files/neg/t5365c.scala new file mode 100644 index 000000000000..3c460b59d56d --- /dev/null +++ b/test/files/neg/t5365c.scala @@ -0,0 +1,32 @@ +//> using options -Xfatal-warnings -Xlint:strict-unsealed-patmat +object C { + trait Z + final case class Q(i: Int) extends Z + + def unsealedTrait(z: Z) = z match { + case Q(_) => + } + + def unsealedTraitWithCatchall(z: Z) = z match { + case Q(_) => + case _ => + } + + def uncheckedUnsealedTrait(z: Z) = (z: @unchecked) match { + case Q(_) => + } + + def catchBlock() = { + try { 42 } catch { case MyException(_) => 43 } // Throwable isn't sealed, but don't warn here + } + + def catchBlockWithTypePattern() = { + try { 42 } catch { case _: MyException => 43 } // See? Just behave like Java. + } + + def partialFunction(): PartialFunction[Throwable, Int] = { // Or like PartialFunction behaves. + case MyException(_) => 67 + } +} + +case class MyException(x: String) extends Exception diff --git a/test/files/neg/t5365d.check b/test/files/neg/t5365d.check new file mode 100644 index 000000000000..5b90b806d7c9 --- /dev/null +++ b/test/files/neg/t5365d.check @@ -0,0 +1,11 @@ +t5365d.scala:11: warning: match may not be exhaustive. +It would fail on the following inputs: C(_), D(_) + def extractorOnly(t: T): Unit = t match { + ^ +t5365d.scala:15: warning: match may not be exhaustive. +It would fail on the following input: D(_) + def extractorAndClass(t: T): Unit = t match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t5365d.scala b/test/files/neg/t5365d.scala new file mode 100644 index 000000000000..6f84c2eaf2b8 --- /dev/null +++ b/test/files/neg/t5365d.scala @@ -0,0 +1,19 @@ +//> using options -Xfatal-warnings +object D { + sealed trait T + final case class C(i: Int) extends T + final case class D(i: Int) extends T + + object NoCoverage { + def unapply(t: T): Option[Int] = None + } + + def extractorOnly(t: T): Unit = t match { + case NoCoverage(_) => + } + + def extractorAndClass(t: T): Unit = t match { + case NoCoverage(_) => + case C(_) => + } +} diff --git a/test/files/neg/t5365e.check b/test/files/neg/t5365e.check new file mode 100644 index 000000000000..912f9b04b388 --- /dev/null +++ b/test/files/neg/t5365e.check @@ -0,0 +1,27 @@ +t5365e.scala:8: warning: match may not be exhaustive. +It would fail on the following inputs: Bar(_), Foo(_) + def f0(x: Exh) = x match { case Foo() => () } // don't back off + ^ +t5365e.scala:9: warning: match may not be exhaustive. +It would fail on the following inputs: Bar(_), Foo(_) + def f1(x: Exh) = x match { case Foo(x) => x } // don't back off + ^ +t5365e.scala:10: warning: match may not be exhaustive. +It would fail on the following inputs: Bar(_), Foo(_) + def f2(x: Exh) = x match { case Foo(x, y) => x + y } // don't back off + ^ +t5365e.scala:11: warning: match may not be exhaustive. +It would fail on the following input: Bar(_) + def fX(x: Exh) = x match { case Foo(xs @ _*) => xs } // don't back off + ^ +t5365e.scala:12: warning: match may not be exhaustive. +It would fail on the following input: Foo(_) + def b1(x: Exh) = x match { case Bar(x) => x } // inexhaustive + ^ +t5365e.scala:13: warning: match may not be exhaustive. +It would fail on the following input: Foo(_) + def fb(x: Exh) = x match { case Foo(x) => x case Bar(x) => x } // pessimistically inexhaustive + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t5365e.scala b/test/files/neg/t5365e.scala new file mode 100644 index 000000000000..f06c70b11a60 --- /dev/null +++ b/test/files/neg/t5365e.scala @@ -0,0 +1,14 @@ +//> using options -Xfatal-warnings +sealed trait Exh +final case class Foo(xs: String*) extends Exh +final case class Bar(x: String) extends Exh + +class Main { + def ex(x: Exh) = x match { case Foo(xs @ _*) => xs case Bar(x) => x } // exhaustive + def f0(x: Exh) = x match { case Foo() => () } // don't back off + def f1(x: Exh) = x match { case Foo(x) => x } // don't back off + def f2(x: Exh) = x match { case Foo(x, y) => x + y } // don't back off + def fX(x: Exh) = x match { case Foo(xs @ _*) => xs } // don't back off + def b1(x: Exh) = x match { case Bar(x) => x } // inexhaustive + def fb(x: Exh) = x match { case Foo(x) => x case Bar(x) => x } // pessimistically inexhaustive +} diff --git a/test/files/neg/t5376.check b/test/files/neg/t5376.check index 0376163c35e5..2d7adb39ddda 100644 --- a/test/files/neg/t5376.check +++ b/test/files/neg/t5376.check @@ -8,4 +8,4 @@ t5376.scala:22: error: type mismatch; required: Int "a": Int ^ -two errors found +2 errors diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala index b1ba41bd542b..99e5cf1a1340 100644 --- a/test/files/neg/t5376.scala +++ b/test/files/neg/t5376.scala @@ -21,4 +21,4 @@ object Test { // Implicit usage compiles. "a": Int } -} \ No newline at end of file +} diff --git a/test/files/neg/t5378.check b/test/files/neg/t5378.check index c1460083f6c6..5a638ee06b8c 100644 --- a/test/files/neg/t5378.check +++ b/test/files/neg/t5378.check @@ -28,4 +28,4 @@ t5378.scala:28: error: Parameter type in structural refinement may not refer to t5378.scala:29: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ??? ^ -10 errors found +10 errors diff --git a/test/files/neg/t5389.check b/test/files/neg/t5389.check new file mode 100644 index 000000000000..e9b4fbb76019 --- /dev/null +++ b/test/files/neg/t5389.check @@ -0,0 +1,4 @@ +t5389.scala:2: error: not found: object ne +import ne.scala + ^ +1 error diff --git a/test/files/neg/t5389.scala b/test/files/neg/t5389.scala new file mode 100644 index 000000000000..bb83d7a563d7 --- /dev/null +++ b/test/files/neg/t5389.scala @@ -0,0 +1,4 @@ + +import ne.scala + +class C diff --git a/test/files/neg/t5390.check b/test/files/neg/t5390.check index 6a0129b898d8..0f5b2a3a4e02 100644 --- a/test/files/neg/t5390.check +++ b/test/files/neg/t5390.check @@ -1,4 +1,4 @@ -t5390.scala:7: error: forward reference extends over definition of value b +t5390.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ -one error found +1 error diff --git a/test/files/neg/t5390.scala b/test/files/neg/t5390.scala index dd628f8851be..de596a478d79 100644 --- a/test/files/neg/t5390.scala +++ b/test/files/neg/t5390.scala @@ -3,8 +3,8 @@ class A { } object X { - def foo { + def foo: Unit = { val b = a.B("") val a = new A } -} \ No newline at end of file +} diff --git a/test/files/neg/t5390b.check b/test/files/neg/t5390b.check index cbf8fafa6bbe..55c13c06d7d5 100644 --- a/test/files/neg/t5390b.check +++ b/test/files/neg/t5390b.check @@ -1,4 +1,4 @@ -t5390b.scala:7: error: forward reference extends over definition of value b +t5390b.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ -one error found +1 error diff --git a/test/files/neg/t5390b.scala b/test/files/neg/t5390b.scala index c3373b87d3c2..94032c907d88 100644 --- a/test/files/neg/t5390b.scala +++ b/test/files/neg/t5390b.scala @@ -3,8 +3,8 @@ class A { } object X { - def foo { + def foo: Unit = { val b = a.B("") val a = new A } -} \ No newline at end of file +} diff --git a/test/files/neg/t5390c.check b/test/files/neg/t5390c.check index f8a794d690a3..1688bb3f4afb 100644 --- a/test/files/neg/t5390c.check +++ b/test/files/neg/t5390c.check @@ -1,4 +1,4 @@ -t5390c.scala:7: error: forward reference extends over definition of value b +t5390c.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = new a.B("") ^ -one error found +1 error diff --git a/test/files/neg/t5390c.scala b/test/files/neg/t5390c.scala index 6b1157661197..6277400dc4f4 100644 --- a/test/files/neg/t5390c.scala +++ b/test/files/neg/t5390c.scala @@ -3,8 +3,8 @@ class A { } object X { - def foo { + def foo: Unit = { val b = new a.B("") val a = new A } -} \ No newline at end of file +} diff --git a/test/files/neg/t5390d.check b/test/files/neg/t5390d.check index daa29142e739..c814ddd53cb8 100644 --- a/test/files/neg/t5390d.check +++ b/test/files/neg/t5390d.check @@ -1,4 +1,4 @@ -t5390d.scala:7: error: forward reference extends over definition of value b +t5390d.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B.toString ^ -one error found +1 error diff --git a/test/files/neg/t5390d.scala b/test/files/neg/t5390d.scala index 7a2671b44394..d376b714bc8f 100644 --- a/test/files/neg/t5390d.scala +++ b/test/files/neg/t5390d.scala @@ -3,8 +3,8 @@ class A { } object X { - def foo { + def foo: Unit = { val b = a.B.toString val a = new A } -} \ No newline at end of file +} diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check index 3ac09e534f13..7089257b783c 100644 --- a/test/files/neg/t5426.check +++ b/test/files/neg/t5426.check @@ -1,15 +1,15 @@ -t5426.scala:3: warning: comparing values of types Some[Int] and Int using `==` will always yield false +t5426.scala:4: warning: comparing values of types Some[Int] and Int using `==` will always yield false def f1 = Some(5) == 5 ^ -t5426.scala:4: warning: comparing values of types Int and Some[Int] using `==` will always yield false +t5426.scala:5: warning: comparing values of types Int and Some[Int] using `==` will always yield false def f2 = 5 == Some(5) ^ -t5426.scala:9: warning: comparing values of types Int and Some[Int] using `==` will always yield false +t5426.scala:10: warning: comparing values of types Int and Some[Int] using `==` will always yield false (x1 == x2) ^ -t5426.scala:10: warning: comparing values of types Some[Int] and Int using `==` will always yield false +t5426.scala:11: warning: comparing values of types Some[Int] and Int using `==` will always yield false (x2 == x1) ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t5426.scala b/test/files/neg/t5426.scala index b6bd5b832274..de8480f8b826 100644 --- a/test/files/neg/t5426.scala +++ b/test/files/neg/t5426.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A { def f1 = Some(5) == 5 def f2 = 5 == Some(5) diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check index fb2d9c2e476e..3dc0bcf2c1a7 100644 --- a/test/files/neg/t5429.check +++ b/test/files/neg/t5429.check @@ -1,49 +1,55 @@ -t5429.scala:20: error: overriding value value in class A of type Int; - object value needs `override' modifier +t5429.scala:20: error: `override` modifier required to override concrete member: +val value: Int (defined in class A) object value // fail ^ -t5429.scala:21: error: overriding lazy value lazyvalue in class A of type Int; - object lazyvalue needs `override' modifier +t5429.scala:21: error: `override` modifier required to override concrete member: +lazy val lazyvalue: Int (defined in class A) object lazyvalue // fail ^ -t5429.scala:22: error: overriding method nullary in class A of type => Int; - object nullary needs `override' modifier +t5429.scala:22: error: `override` modifier required to override concrete member: +def nullary: Int (defined in class A) object nullary // fail ^ -t5429.scala:23: error: overriding method emptyArg in class A of type ()Int; - object emptyArg needs `override' modifier +t5429.scala:23: error: `override` modifier required to override concrete member: +def emptyArg(): Int (defined in class A) object emptyArg // fail ^ -t5429.scala:27: error: overriding value value in class A0 of type Any; - object value needs `override' modifier +t5429.scala:27: error: `override` modifier required to override concrete member: +val value: Any (defined in class A0) object value // fail ^ -t5429.scala:28: error: overriding lazy value lazyvalue in class A0 of type Any; - object lazyvalue needs `override' modifier +t5429.scala:28: error: `override` modifier required to override concrete member: +lazy val lazyvalue: Any (defined in class A0) object lazyvalue // fail ^ -t5429.scala:29: error: overriding method nullary in class A0 of type => Any; - object nullary needs `override' modifier +t5429.scala:29: error: `override` modifier required to override concrete member: +def nullary: Any (defined in class A0) object nullary // fail ^ -t5429.scala:30: error: overriding method emptyArg in class A0 of type ()Any; - object emptyArg needs `override' modifier +t5429.scala:30: error: `override` modifier required to override concrete member: +def emptyArg(): Any (defined in class A0) object emptyArg // fail ^ -t5429.scala:35: error: overriding value value in class A of type Int; - object value has incompatible type +t5429.scala:35: error: incompatible type in overriding +val value: Int (defined in class A); + found : C.this.value.type + required: Int override object value // fail ^ -t5429.scala:36: error: overriding lazy value lazyvalue in class A of type Int; - object lazyvalue must be declared lazy to override a concrete lazy value +t5429.scala:36: error: value must be lazy when overriding concrete lazy value: +lazy val lazyvalue: Int (defined in class A) override object lazyvalue // fail ^ -t5429.scala:37: error: overriding method nullary in class A of type => Int; - object nullary has incompatible type +t5429.scala:37: error: incompatible type in overriding +def nullary: Int (defined in class A); + found : C.this.nullary.type + required: Int override object nullary // fail ^ -t5429.scala:38: error: overriding method emptyArg in class A of type ()Int; - object emptyArg has incompatible type +t5429.scala:38: error: incompatible type in overriding +def emptyArg(): Int (defined in class A); + found : C.this.emptyArg.type + required: (): Int override object emptyArg // fail ^ t5429.scala:39: error: object oneArg overrides nothing. @@ -51,8 +57,8 @@ Note: the super classes of class C contain the following, non final members name def oneArg(x: String): Int override object oneArg // fail ^ -t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any; - object lazyvalue must be declared lazy to override a concrete lazy value +t5429.scala:43: error: value must be lazy when overriding concrete lazy value: +lazy val lazyvalue: Any (defined in class A0) override object lazyvalue // !!! this fails, but should succeed (lazy over lazy) ^ t5429.scala:46: error: object oneArg overrides nothing. @@ -60,24 +66,24 @@ Note: the super classes of class C0 contain the following, non final members nam def oneArg(x: String): Any override object oneArg // fail ^ -t5429.scala:50: error: overriding value value in class A of type Int; - value value needs `override' modifier +t5429.scala:50: error: `override` modifier required to override concrete member: +val value: Int (defined in class A) val value = 0 // fail ^ -t5429.scala:51: error: overriding lazy value lazyvalue in class A of type Int; - value lazyvalue needs `override' modifier +t5429.scala:51: error: `override` modifier required to override concrete member: +lazy val lazyvalue: Int (defined in class A) val lazyvalue = 0 // fail ^ -t5429.scala:52: error: overriding method nullary in class A of type => Int; - value nullary needs `override' modifier +t5429.scala:52: error: `override` modifier required to override concrete member: +def nullary: Int (defined in class A) val nullary = 5 // fail ^ -t5429.scala:53: error: overriding method emptyArg in class A of type ()Int; - value emptyArg needs `override' modifier +t5429.scala:53: error: `override` modifier required to override concrete member: +def emptyArg(): Int (defined in class A) val emptyArg = 10 // fail ^ -t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any; - value lazyvalue must be declared lazy to override a concrete lazy value +t5429.scala:58: error: value must be lazy when overriding concrete lazy value: +lazy val lazyvalue: Any (defined in class A0) override val lazyvalue = 0 // fail (non-lazy) ^ t5429.scala:61: error: value oneArg overrides nothing. @@ -85,53 +91,56 @@ Note: the super classes of class D0 contain the following, non final members nam def oneArg(x: String): Any override val oneArg = 15 // fail ^ -t5429.scala:65: error: overriding value value in class A of type Int; - method value needs `override' modifier +t5429.scala:65: error: `override` modifier required to override concrete member: +val value: Int (defined in class A) def value = 0 // fail ^ -t5429.scala:66: error: overriding lazy value lazyvalue in class A of type Int; - method lazyvalue needs `override' modifier +t5429.scala:66: error: `override` modifier required to override concrete member: +lazy val lazyvalue: Int (defined in class A) def lazyvalue = 2 // fail ^ -t5429.scala:67: error: overriding method nullary in class A of type => Int; - method nullary needs `override' modifier +t5429.scala:67: error: `override` modifier required to override concrete member: +def nullary: Int (defined in class A) def nullary = 5 // fail ^ -t5429.scala:68: error: overriding method emptyArg in class A of type ()Int; - method emptyArg needs `override' modifier +t5429.scala:68: error: `override` modifier required to override concrete member: +def emptyArg(): Int (defined in class A) def emptyArg = 10 // fail ^ -t5429.scala:72: error: overriding value value in class A0 of type Any; - method value needs to be a stable, immutable value +t5429.scala:72: error: stable, immutable value required to override: +val value: Any (defined in class A0) override def value = 0 // fail ^ -t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any; - method lazyvalue needs to be a stable, immutable value +t5429.scala:73: error: stable, immutable value required to override: +lazy val lazyvalue: Any (defined in class A0) override def lazyvalue = 2 // fail ^ +t5429.scala:75: warning: method without a parameter list overrides method emptyArg in class A0 defined with a single empty parameter list [quickfixable] + override def emptyArg = 10 // override + ^ t5429.scala:76: error: method oneArg overrides nothing. Note: the super classes of class E0 contain the following, non final members named oneArg: def oneArg(x: String): Any override def oneArg = 15 // fail ^ -t5429.scala:80: error: overriding value value in class A of type Int; - lazy value value needs `override' modifier +t5429.scala:80: error: `override` modifier required to override concrete member: +val value: Int (defined in class A) lazy val value = 0 // fail ^ -t5429.scala:81: error: overriding lazy value lazyvalue in class A of type Int; - lazy value lazyvalue needs `override' modifier +t5429.scala:81: error: `override` modifier required to override concrete member: +lazy val lazyvalue: Int (defined in class A) lazy val lazyvalue = 2 // fail ^ -t5429.scala:82: error: overriding method nullary in class A of type => Int; - lazy value nullary needs `override' modifier +t5429.scala:82: error: `override` modifier required to override concrete member: +def nullary: Int (defined in class A) lazy val nullary = 5 // fail ^ -t5429.scala:83: error: overriding method emptyArg in class A of type ()Int; - lazy value emptyArg needs `override' modifier +t5429.scala:83: error: `override` modifier required to override concrete member: +def emptyArg(): Int (defined in class A) lazy val emptyArg = 10 // fail ^ -t5429.scala:87: error: overriding value value in class A0 of type Any; - lazy value value cannot override a concrete non-lazy value +t5429.scala:87: error: concrete non-lazy value cannot be overridden: +val value: Any (defined in class A0) override lazy val value = 0 // fail (strict over lazy) ^ t5429.scala:91: error: lazy value oneArg overrides nothing. @@ -139,4 +148,5 @@ Note: the super classes of class F0 contain the following, non final members nam def oneArg(x: String): Any override lazy val oneArg = 15 // fail ^ -34 errors found +1 warning +34 errors diff --git a/test/files/neg/t5440.check b/test/files/neg/t5440.check index fdcbde3fde7c..4ca05d8186ee 100644 --- a/test/files/neg/t5440.check +++ b/test/files/neg/t5440.check @@ -1,7 +1,7 @@ -t5440.scala:4: warning: match may not be exhaustive. +t5440.scala:5: warning: match may not be exhaustive. It would fail on the following inputs: (List(_), Nil), (Nil, List(_)) (list1, list2) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t5440.scala b/test/files/neg/t5440.scala index 1c10ea9b16b0..dbbad683338e 100644 --- a/test/files/neg/t5440.scala +++ b/test/files/neg/t5440.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { def merge(list1: List[Long], list2: List[Long]): Boolean = (list1, list2) match { diff --git a/test/files/neg/t545.check b/test/files/neg/t545.check index aae575fa9699..d0e358b14aec 100644 --- a/test/files/neg/t545.check +++ b/test/files/neg/t545.check @@ -1,4 +1,4 @@ t545.scala:4: error: value blah is not a member of Test.Foo val x = foo.blah match { ^ -one error found +1 error diff --git a/test/files/neg/t5452-new.check b/test/files/neg/t5452-new.check index 1850a7004ae6..a064518fbbb1 100644 --- a/test/files/neg/t5452-new.check +++ b/test/files/neg/t5452-new.check @@ -1,8 +1,8 @@ -t5452-new.scala:30: error: overloaded method value apply with alternatives: +t5452-new.scala:30: error: overloaded method apply with alternatives: ()Queryable[CoffeesTable] - (t: Tree)(implicit evidence$2: scala.reflect.ClassTag[CoffeesTable])Nothing + (t: Tree)(implicit evidence$2: scala.reflect.ClassTag[CoffeesTable]): Nothing (implicit evidence$1: scala.reflect.ClassTag[CoffeesTable])Nothing cannot be applied to (Queryable[CoffeesTable]) Queryable[CoffeesTable]( q.treeFilter(null) ) ^ -one error found +1 error diff --git a/test/files/neg/t5452-new.scala b/test/files/neg/t5452-new.scala index b74b1550bd67..ef5f199e794e 100644 --- a/test/files/neg/t5452-new.scala +++ b/test/files/neg/t5452-new.scala @@ -28,4 +28,4 @@ trait CoffeesTable{ object Test extends App{ val q = new Queryable[CoffeesTable] Queryable[CoffeesTable]( q.treeFilter(null) ) -} \ No newline at end of file +} diff --git a/test/files/neg/t5452-old.check b/test/files/neg/t5452-old.check index 1860c98c5309..ff3f83a48fd9 100644 --- a/test/files/neg/t5452-old.check +++ b/test/files/neg/t5452-old.check @@ -1,8 +1,8 @@ -t5452-old.scala:28: error: overloaded method value apply with alternatives: +t5452-old.scala:28: error: overloaded method apply with alternatives: ()Queryable[CoffeesTable] - (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing + (t: Tree)(implicit evidence$2: Manifest[CoffeesTable]): Nothing (implicit evidence$1: Manifest[CoffeesTable])Nothing cannot be applied to (Queryable[CoffeesTable]) Queryable[CoffeesTable]( q.treeFilter(null) ) ^ -one error found +1 error diff --git a/test/files/neg/t5455.check b/test/files/neg/t5455.check index 788daf99fa8f..8d65cb897490 100644 --- a/test/files/neg/t5455.check +++ b/test/files/neg/t5455.check @@ -1,4 +1,4 @@ t5455.scala:4: error: lazy vals are not tailcall transformed @annotation.tailrec final lazy val bar: Thing[Int] = { ^ -one error found +1 error diff --git a/test/files/neg/t5493.check b/test/files/neg/t5493.check index 78b1536bc78c..40fb545f2511 100644 --- a/test/files/neg/t5493.check +++ b/test/files/neg/t5493.check @@ -1,4 +1,4 @@ t5493.scala:2: error: not found: value iDontExist def meh(xs: Any): Any = xs :: iDontExist :: Nil ^ -one error found +1 error diff --git a/test/files/neg/t5497.check b/test/files/neg/t5497.check index 4d6d52b519d5..d8400a993b4b 100644 --- a/test/files/neg/t5497.check +++ b/test/files/neg/t5497.check @@ -1,4 +1,4 @@ t5497.scala:3: error: not found: value sq case other => println(null.asInstanceOf[sq.Filter].tableName) ^ -one error found +1 error diff --git a/test/files/neg/t550.check b/test/files/neg/t550.check index da862e110e66..e09b9cab03f7 100644 --- a/test/files/neg/t550.check +++ b/test/files/neg/t550.check @@ -4,4 +4,4 @@ t550.scala:6: error: type List takes type parameters t550.scala:8: error: could not find implicit value for parameter m: Monoid[a] sum(List(1,2,3)) ^ -two errors found +2 errors diff --git a/test/files/neg/t5507.check b/test/files/neg/t5507.check new file mode 100644 index 000000000000..650280a359c7 --- /dev/null +++ b/test/files/neg/t5507.check @@ -0,0 +1,6 @@ +t5507.scala:5: warning: Class parameter is specialized for type Unit. Consider using `@specialized(Specializable.Arg)` instead. +final class C[@specialized A, @specialized B](a: A, b: B) + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t5507.scala b/test/files/neg/t5507.scala new file mode 100644 index 000000000000..63a9e6604de9 --- /dev/null +++ b/test/files/neg/t5507.scala @@ -0,0 +1,12 @@ +// +//> using options -Xlint:unit-special -Werror + +// warn once +final class C[@specialized A, @specialized B](a: A, b: B) + +// explicit Unit is ok +final class D[@specialized(Specializable.Arg) A](a: A, u: Unit) + +final class F[@specialized A](f: concurrent.Future[A]) { + def result: A = concurrent.Await.result(f, concurrent.duration.Duration.Inf) +} diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check index 322a2f5e25f4..27ff43709409 100644 --- a/test/files/neg/t5510.check +++ b/test/files/neg/t5510.check @@ -16,4 +16,4 @@ t5510.scala:6: error: unclosed multi-line string literal t5510.scala:7: error: unclosed multi-line string literal } ^ -6 errors found +6 errors diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check index da3f84e1ec7f..20243f7d6515 100644 --- a/test/files/neg/t5529.check +++ b/test/files/neg/t5529.check @@ -4,4 +4,4 @@ t5529.scala:12: error: File is already defined as class File t5529.scala:10: error: class type required but test.Test.File found sealed class Dir extends File { } ^ -two errors found +2 errors diff --git a/test/files/neg/t5543.check b/test/files/neg/t5543.check index b61de0f78b96..7a054de684e5 100644 --- a/test/files/neg/t5543.check +++ b/test/files/neg/t5543.check @@ -7,4 +7,4 @@ t5543.scala:11: error: not found: value x t5543.scala:18: error: not found: value x def this(a: Int = x) { this() } ^ -three errors found +3 errors diff --git a/test/files/neg/t5544.check b/test/files/neg/t5544.check index d4113935a33d..4d7cc0aef8f6 100644 --- a/test/files/neg/t5544.check +++ b/test/files/neg/t5544.check @@ -1,4 +1,4 @@ Test_2.scala:2: error: value baz is not a member of object Api Api.baz ^ -one error found +1 error diff --git a/test/files/neg/t5553_1.check b/test/files/neg/t5553_1.check index afd64898881c..008851ecb734 100644 --- a/test/files/neg/t5553_1.check +++ b/test/files/neg/t5553_1.check @@ -1,54 +1,54 @@ t5553_1.scala:18: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (z: String)Base[T] -and method apply in object Foo1 of type (a: Int)Base[T] +both method apply in object Foo1 of type (z: String): Base[T] +and method apply in object Foo1 of type (a: Int): Base[T] match expected type ? def test1[T] = Foo1[T] ^ t5553_1.scala:19: error: type mismatch; - found : [T](z: String)Base[T] (a: Int)Base[T] + found : [T](z: String): Base[T] (a: Int): Base[T] required: Int def test2[T]: Int = Foo1[T] ^ t5553_1.scala:20: error: type mismatch; - found : [T(in method apply)](z: String)Base[T(in method apply)] (a: Int)Base[T(in method apply)] + found : [T(in method apply)](z: String): Base[T(in method apply)] (a: Int): Base[T(in method apply)] required: Base[T(in method test3)] def test3[T]: Base[T] = Foo1[T] ^ t5553_1.scala:24: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (z: String)Base[T] -and method apply in object Foo2 of type (a: Int)Base[T] +both method apply in object Foo2 of type (z: String): Base[T] +and method apply in object Foo2 of type (a: Int): Base[T] match expected type ? def test4[T] = Foo2[T] ^ t5553_1.scala:25: error: type mismatch; - found : [T](z: String)Base[T] (a: Int)Base[T] + found : [T](z: String): Base[T] (a: Int): Base[T] required: Int def test5[T]: Int = Foo2[T] ^ t5553_1.scala:26: error: type mismatch; - found : [T(in method apply)](z: String)Base[T(in method apply)] (a: Int)Base[T(in method apply)] + found : [T(in method apply)](z: String): Base[T(in method apply)] (a: Int): Base[T(in method apply)] required: Base[T(in method test6)] def test6[T]: Base[T] = Foo2[T] ^ t5553_1.scala:30: error: ambiguous reference to overloaded definition, -both method apply in object Foo3 of type (z: String)String -and method apply in object Foo3 of type (a: Int)Base[T] +both method apply in object Foo3 of type (z: String): String +and method apply in object Foo3 of type (a: Int): Base[T] match expected type ? def test7[T] = Foo3[T] ^ t5553_1.scala:31: error: type mismatch; - found : [T](z: String)String (a: Int)Base[T] + found : [T](z: String): String (a: Int): Base[T] required: String def test8[T]: String = Foo3[T] ^ t5553_1.scala:32: error: type mismatch; - found : [T](z: String)String (a: Int)Base[T] + found : [T](z: String): String (a: Int): Base[T] required: Int def test9[T]: Int = Foo3[T] ^ t5553_1.scala:33: error: type mismatch; - found : [T(in method apply)](z: String)String (a: Int)Base[T(in method apply)] + found : [T(in method apply)](z: String): String (a: Int): Base[T(in method apply)] required: Base[T(in method test10)] def test10[T]: Base[T] = Foo3[T] ^ -10 errors found +10 errors diff --git a/test/files/neg/t5553_2.check b/test/files/neg/t5553_2.check index 599fdb05239a..b26c7f634f70 100644 --- a/test/files/neg/t5553_2.check +++ b/test/files/neg/t5553_2.check @@ -36,15 +36,15 @@ t5553_2.scala:50: error: could not find implicit value for parameter z: String def test16[T] = Foo3[T] ^ t5553_2.scala:54: error: ambiguous reference to overloaded definition, -both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo4 of type (x: Int)Base[T] +both method apply in object Foo4 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo4 of type (x: Int): Base[T] match argument types (Int) def test17[T] = Foo4[T](1) ^ t5553_2.scala:55: error: ambiguous reference to overloaded definition, -both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo4 of type (x: Int)Base[T] +both method apply in object Foo4 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo4 of type (x: Int): Base[T] match argument types (Int) and expected result type Base[T] def test18[T]: Base[T] = Foo4[T](1) ^ -11 errors found +11 errors diff --git a/test/files/neg/t5554.check b/test/files/neg/t5554.check index 8f657fd32f97..a577bef153bd 100644 --- a/test/files/neg/t5554.check +++ b/test/files/neg/t5554.check @@ -1,67 +1,67 @@ t5554.scala:14: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (x: Int)(implicit z: String)String -and method apply in object Foo1 of type (x: Int)Base[T] +both method apply in object Foo1 of type (x: Int)(implicit z: String): String +and method apply in object Foo1 of type (x: Int): Base[T] match argument types (Int) def test1[T]: Int = Foo1[T](1) ^ t5554.scala:16: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (x: Int)(implicit z: String)String -and method apply in object Foo1 of type (x: Int)Base[T] +both method apply in object Foo1 of type (x: Int)(implicit z: String): String +and method apply in object Foo1 of type (x: Int): Base[T] match argument types (Int) def test3[T]: String = Foo1[T](1) ^ t5554.scala:17: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (x: Int)(implicit z: String)String -and method apply in object Foo1 of type (x: Int)Base[T] +both method apply in object Foo1 of type (x: Int)(implicit z: String): String +and method apply in object Foo1 of type (x: Int): Base[T] match argument types (Int) def test4[T] = Foo1[T](1) ^ t5554.scala:22: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (x: Int)(implicit z: String)String -and method apply in object Foo1 of type (x: Int)Base[T] +both method apply in object Foo1 of type (x: Int)(implicit z: String): String +and method apply in object Foo1 of type (x: Int): Base[T] match argument types (Int) def test5[T]: Int = Foo1[T](1) ^ t5554.scala:25: error: ambiguous reference to overloaded definition, -both method apply in object Foo1 of type (x: Int)(implicit z: String)String -and method apply in object Foo1 of type (x: Int)Base[T] +both method apply in object Foo1 of type (x: Int)(implicit z: String): String +and method apply in object Foo1 of type (x: Int): Base[T] match argument types (Int) def test8[T] = Foo1[T](1) ^ t5554.scala:29: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) def test9[T]: String = Foo2[T](1) ^ t5554.scala:30: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) and expected result type Base[T] def test10[T]: Base[T] = Foo2[T](1) ^ t5554.scala:31: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) def test11[T] = Foo2[T](1) ^ t5554.scala:36: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) def test12[T]: String = Foo2[T](1) ^ t5554.scala:37: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) and expected result type Base[T] def test13[T]: Base[T] = Foo2[T](1) ^ t5554.scala:38: error: ambiguous reference to overloaded definition, -both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T] -and method apply in object Foo2 of type (x: Int)Base[T] +both method apply in object Foo2 of type (x: Int)(implicit z: String): Base[T] +and method apply in object Foo2 of type (x: Int): Base[T] match argument types (Int) def test14[T] = Foo2[T](1) ^ -11 errors found +11 errors diff --git a/test/files/neg/t556.check b/test/files/neg/t556.check index 30cc296b356f..98f0443a5642 100644 --- a/test/files/neg/t556.check +++ b/test/files/neg/t556.check @@ -1,7 +1,12 @@ t556.scala:3: error: missing parameter type - def g:Int = f((x,y)=>x) - ^ + def g: Int = f((x, y) => x) + ^ t556.scala:3: error: missing parameter type - def g:Int = f((x,y)=>x) - ^ -two errors found + def g: Int = f((x, y) => x) + ^ +t556.scala:3: error: type mismatch; + found : (?, ?) => ? + required: Int => Int + def g: Int = f((x, y) => x) + ^ +3 errors diff --git a/test/files/neg/t556.scala b/test/files/neg/t556.scala index b0113258c946..bbcddcee39dd 100644 --- a/test/files/neg/t556.scala +++ b/test/files/neg/t556.scala @@ -1,4 +1,4 @@ object Main extends App { - def f(a:Int=>Int):Int = a(4) - def g:Int = f((x,y)=>x) + def f(a: Int => Int): Int = a(4) + def g: Int = f((x, y) => x) } diff --git a/test/files/neg/t5564.check b/test/files/neg/t5564.check index e7e13ccc9c26..528459bb2be4 100644 --- a/test/files/neg/t5564.check +++ b/test/files/neg/t5564.check @@ -1,4 +1,4 @@ t5564.scala:8: error: inferred type arguments [A] do not conform to method bar's type parameter bounds [B >: A <: C] def bar[B >: A <: C]: T = throw new Exception ^ -one error found +1 error diff --git a/test/files/neg/t5572.check b/test/files/neg/t5572.check index 3c9adf41cdc0..fbd27d0699e0 100644 --- a/test/files/neg/t5572.check +++ b/test/files/neg/t5572.check @@ -13,4 +13,4 @@ t5572.scala:18: error: type mismatch; required: B run(sth, b) ^ -three errors found +3 errors diff --git a/test/files/neg/t5572.scala b/test/files/neg/t5572.scala index 4169df42161e..81f848362bda 100644 --- a/test/files/neg/t5572.scala +++ b/test/files/neg/t5572.scala @@ -11,7 +11,7 @@ class Test { def bar(): (A, B) - def foo { + def foo: Unit = { val (b, a) = bar() Z.transf(a, b) match { case sth => diff --git a/test/files/neg/t5578.check b/test/files/neg/t5578.check index 56123d2e0f2c..4643fc4979db 100644 --- a/test/files/neg/t5578.check +++ b/test/files/neg/t5578.check @@ -4,4 +4,4 @@ t5578.scala:33: error: type mismatch; (which expands to) NumericOpsExp.this.Exp[T] def plus[T: Numeric](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x,y) ^ -one error found +1 error diff --git a/test/files/neg/t558.check b/test/files/neg/t558.check index f33ddc451f10..9a3bc16afb84 100644 --- a/test/files/neg/t558.check +++ b/test/files/neg/t558.check @@ -1,4 +1,4 @@ t558.scala:13: error: value file is not a member of NewModel.this.RootURL final val source = top.file; ^ -one error found +1 error diff --git a/test/files/neg/t5580a.check b/test/files/neg/t5580a.check index 50a31857d591..c42fa07d0096 100644 --- a/test/files/neg/t5580a.check +++ b/test/files/neg/t5580a.check @@ -3,4 +3,4 @@ t5580a.scala:9: error: polymorphic expression cannot be instantiated to expected required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]] if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set()) ^ -one error found +1 error diff --git a/test/files/neg/t5580b.check b/test/files/neg/t5580b.check index 45fde46ff99f..f1301c75692b 100644 --- a/test/files/neg/t5580b.check +++ b/test/files/neg/t5580b.check @@ -3,4 +3,4 @@ t5580b.scala:11: error: polymorphic expression cannot be instantiated to expecte required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]] if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set()) ^ -one error found +1 error diff --git a/test/files/neg/t5606.check b/test/files/neg/t5606.check new file mode 100644 index 000000000000..5d68161a431b --- /dev/null +++ b/test/files/neg/t5606.check @@ -0,0 +1,27 @@ +t5606.scala:5: error: using `?` as a type name requires backticks. [quickfixable] +case class CaseTest_?[?](someData: String) + ^ +t5606.scala:23: error: using `?` as a type name requires backticks. [quickfixable] + def regress_?[F[?]] = 2 + ^ +t5606.scala:3: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +case class CaseTest[_](someData: String) + ^ +t5606.scala:8: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +case class CaseTest2[_, _](someData: String) + ^ +t5606.scala:8: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration +case class CaseTest2[_, _](someData: String) + ^ +t5606.scala:11: error: Top-level wildcard is not allowed +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f[_](x: Int) = ??? + ^ +6 errors diff --git a/test/files/neg/t5606.scala b/test/files/neg/t5606.scala new file mode 100644 index 000000000000..fed05b645f69 --- /dev/null +++ b/test/files/neg/t5606.scala @@ -0,0 +1,26 @@ +//> using options -Xsource:3 +// was: _ taken as ident of type param, but poor interactions below +case class CaseTest[_](someData: String) + +case class CaseTest_?[?](someData: String) + +// was: _ already defined +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} + +object Test extends App { + def f0 = new CaseTest("X") + def f1: CaseTest[Int] = new CaseTest[Int]("X") // OK! + def f2: CaseTest[Int] = CaseTest[Int]("X") // CaseTest[Any] + def f3 = new CaseTest[Int]("X").copy() // CaseTest[Any] + def f4 = new CaseTest[Int]("X").copy[Int]() // CaseTest[Any] + + def regress0[F[_]] = 0 + def regress1[F[_, _]] = 1 + def regress_?[F[?]] = 2 + //def regress0[F[_$$1]] = 0; + //def regress1[F[_$$2, _$$3]] = 1 +} diff --git a/test/files/neg/t5606b.check b/test/files/neg/t5606b.check new file mode 100644 index 000000000000..955c17b9a98e --- /dev/null +++ b/test/files/neg/t5606b.check @@ -0,0 +1,15 @@ +t5606b.scala:4: warning: Top-level wildcard is not allowed +case class CaseTest[_](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:10: warning: Top-level wildcard is not allowed + def f[_](x: Int) = ??? + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t5606b.scala b/test/files/neg/t5606b.scala new file mode 100644 index 000000000000..eef807300770 --- /dev/null +++ b/test/files/neg/t5606b.scala @@ -0,0 +1,11 @@ +//> using options -Xlint -Werror +// +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) + +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} diff --git a/test/files/neg/t5617.check b/test/files/neg/t5617.check index 79cc3a1e3240..6da744aa1ade 100644 --- a/test/files/neg/t5617.check +++ b/test/files/neg/t5617.check @@ -5,4 +5,4 @@ def foo(x: Boolean): Int def foo(i: Int)(b: String): Int override def foo(s: String): Int ^ -one error found +1 error diff --git a/test/files/neg/t562.check b/test/files/neg/t562.check index 95be075af101..10aa30a86313 100644 --- a/test/files/neg/t562.check +++ b/test/files/neg/t562.check @@ -1,4 +1,4 @@ -t562.scala:10: error: super may not be used on value y +t562.scala:10: error: super may not be used on value y; super can only be used to select a member that is a method or type override val y = super.y; ^ -one error found +1 error diff --git a/test/files/neg/t563.check b/test/files/neg/t563.check index 1431c85eb052..12f414742726 100644 --- a/test/files/neg/t563.check +++ b/test/files/neg/t563.check @@ -1,4 +1,4 @@ t563.scala:6: error: missing parameter type map(n,ptr => Option(ptr.get)); ^ -one error found +1 error diff --git a/test/files/neg/t5639b.check b/test/files/neg/t5639b.check deleted file mode 100644 index 3dbcf446e59f..000000000000 --- a/test/files/neg/t5639b.check +++ /dev/null @@ -1,4 +0,0 @@ -A_2.scala:7: error: could not find implicit value for parameter e: Int - implicitly[Int] - ^ -one error found diff --git a/test/files/neg/t5639b/A_1.scala b/test/files/neg/t5639b/A_1.scala deleted file mode 100644 index ab35b2d2aba1..000000000000 --- a/test/files/neg/t5639b/A_1.scala +++ /dev/null @@ -1,18 +0,0 @@ -// scalac: -Xsource:2.11 -import Implicits._ - -class Baz - -object Test { - implicitly[Int] -} - -object Implicits { - implicit val Baz: Int = 0 - // This implicit was being ignored by `isQualifyingImplicit` - // if the classpath contained a class file for `class Baz`. - // This is because the package scope contains a speculative - // symbol for `object Baz` which is entered by `SymbolLoaders` - // before looking inside the class file. (A Java originated - // classfile results in the class/module symbol pair.) -} diff --git a/test/files/neg/t5639b/A_2.scala b/test/files/neg/t5639b/A_2.scala deleted file mode 100644 index 2769b0ec0898..000000000000 --- a/test/files/neg/t5639b/A_2.scala +++ /dev/null @@ -1,12 +0,0 @@ -// scalac: -Xsource:2.11 -import Implicits._ - -class Baz - -object Test { - implicitly[Int] -} - -object Implicits { - implicit val Baz: Int = 0 -} diff --git a/test/files/neg/t565.check b/test/files/neg/t565.check index d7657c0f5ddf..97c0f41999d5 100644 --- a/test/files/neg/t565.check +++ b/test/files/neg/t565.check @@ -2,4 +2,4 @@ t565.scala:2: error: only traits and abstract classes can have declared but unde (Note that variables need to be initialized to be defined) var s0: String ^ -one error found +1 error diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check index 54d0cd630426..6be3bb3b766b 100644 --- a/test/files/neg/t5663-badwarneq.check +++ b/test/files/neg/t5663-badwarneq.check @@ -1,42 +1,42 @@ -t5663-badwarneq.scala:48: warning: comparing case class values of types Some[Int] and None.type using `==` will always yield false +t5663-badwarneq.scala:49: warning: comparing case class values of types Some[Int] and None.type using `==` will always yield false println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object ^ -t5663-badwarneq.scala:49: warning: comparing case class values of types Some[Int] and Thing using `==` will always yield false +t5663-badwarneq.scala:50: warning: comparing case class values of types Some[Int] and Thing using `==` will always yield false println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object ^ -t5663-badwarneq.scala:57: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal +t5663-badwarneq.scala:58: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal println(t1 == t2) // true, but apparently unrelated, a compromise warning ^ -t5663-badwarneq.scala:58: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal +t5663-badwarneq.scala:59: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated ^ -t5663-badwarneq.scala:61: warning: comparing case class values of types ThingTwo and Some[Int] using `==` will always yield false +t5663-badwarneq.scala:62: warning: comparing case class values of types ThingTwo and Some[Int] using `==` will always yield false println(t3 == Some(1)) // false, warn on different cases ^ -t5663-badwarneq.scala:62: warning: comparing values of types ThingOne and Cousin using `==` will always yield false +t5663-badwarneq.scala:63: warning: comparing values of types ThingOne and Cousin using `==` will always yield false println(t1 == c) // should warn ^ -t5663-badwarneq.scala:70: warning: comparing case class values of types Simple and SimpleSibling.type using `==` will always yield false +t5663-badwarneq.scala:71: warning: comparing case class values of types Simple and SimpleSibling.type using `==` will always yield false println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case ^ -t5663-badwarneq.scala:73: warning: ValueClass1 and Int are unrelated: they will never compare equal +t5663-badwarneq.scala:74: warning: ValueClass1 and Int are unrelated: they will never compare equal println(new ValueClass1(5) == 5) // bad ^ -t5663-badwarneq.scala:75: warning: comparing values of types Int and ValueClass1 using `==` will always yield false +t5663-badwarneq.scala:76: warning: comparing values of types Int and ValueClass1 using `==` will always yield false println(5 == new ValueClass1(5)) // bad ^ -t5663-badwarneq.scala:79: warning: ValueClass2[String] and String are unrelated: they will never compare equal +t5663-badwarneq.scala:80: warning: ValueClass2[String] and String are unrelated: they will never compare equal println(new ValueClass2("abc") == "abc") // bad ^ -t5663-badwarneq.scala:80: warning: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal +t5663-badwarneq.scala:81: warning: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes ^ -t5663-badwarneq.scala:82: warning: comparing values of types ValueClass3 and ValueClass2[Int] using `==` will always yield false +t5663-badwarneq.scala:83: warning: comparing values of types ValueClass3 and ValueClass2[Int] using `==` will always yield false println(ValueClass3(5) == new ValueClass2(5)) // bad ^ -t5663-badwarneq.scala:83: warning: comparing values of types ValueClass3 and Int using `==` will always yield false +t5663-badwarneq.scala:84: warning: comparing values of types ValueClass3 and Int using `==` will always yield false println(ValueClass3(5) == 5) // bad ^ -error: No warnings can be incurred under -Xfatal-warnings. -13 warnings found -one error found +error: No warnings can be incurred under -Werror. +13 warnings +1 error diff --git a/test/files/neg/t5663-badwarneq.scala b/test/files/neg/t5663-badwarneq.scala index 4dcba7189616..4646b9fd011d 100644 --- a/test/files/neg/t5663-badwarneq.scala +++ b/test/files/neg/t5663-badwarneq.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // alias trait Thingy @@ -42,7 +43,7 @@ class MyThing extends InThing */ object Test { - def main(a: Array[String]) { + def main(a: Array[String]): Unit = { // nothing to do with Gavin println(new Some(1) == new Some(1)) // OK, true println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object diff --git a/test/files/neg/t5666.check b/test/files/neg/t5666.check index 1c714796ba45..f98929f9d231 100644 --- a/test/files/neg/t5666.check +++ b/test/files/neg/t5666.check @@ -34,4 +34,4 @@ t5666.scala:12: error: class Unit is abstract; cannot be instantiated t5666.scala:13: error: class Nothing is abstract; cannot be instantiated new Nothing ^ -12 errors found +12 errors diff --git a/test/files/neg/t5666.scala b/test/files/neg/t5666.scala index ffaeaacdaf39..21ab9f45a197 100644 --- a/test/files/neg/t5666.scala +++ b/test/files/neg/t5666.scala @@ -11,4 +11,4 @@ object t5666 { new Boolean new Unit new Nothing -} \ No newline at end of file +} diff --git a/test/files/neg/t5675.check b/test/files/neg/t5675.check index 44bcd60299e4..3ed2b22c715b 100644 --- a/test/files/neg/t5675.check +++ b/test/files/neg/t5675.check @@ -1,4 +1,4 @@ -warning: one feature warning; re-run with -feature for details -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +warning: 1 feature warning; re-run with -feature for details +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t5675.scala b/test/files/neg/t5675.scala index 73fc1c2f2902..1e414307798a 100644 --- a/test/files/neg/t5675.scala +++ b/test/files/neg/t5675.scala @@ -1,8 +1,7 @@ -// scalac: -Xfatal-warnings -class PostFix { - val list = List(1, 2, 3) - def main(args: Array[String]) { - val a = list filter (2 !=) - val b = list filter (2 != _) - } +//> using options -Xfatal-warnings +// +// without -feature, don't double-count the warning +// +class OneWarningOnly { + implicit def `this is why we warn`(x: Any): Int = x.toString.toInt } diff --git a/test/files/neg/t5683.check b/test/files/neg/t5683.check deleted file mode 100644 index 7c0e50113c07..000000000000 --- a/test/files/neg/t5683.check +++ /dev/null @@ -1,16 +0,0 @@ -t5683.scala:12: error: inferred kinds of the type arguments (Object,Int) do not conform to the expected kinds of the type parameters (type M,type B). -Object's type parameters do not match type M's expected parameters: -class Object has no type parameters, but type M has one - val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } - ^ -t5683.scala:12: error: type mismatch; - found : Int => Test.W[String,Int] - required: Int => M[B] - val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } - ^ -t5683.scala:12: error: type mismatch; - found : Test.K[M,Int,B] - required: Test.K[Test.StringW,Int,Int] - val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } - ^ -three errors found diff --git a/test/files/neg/t5683.scala b/test/files/neg/t5683.scala deleted file mode 100644 index 05ab03579274..000000000000 --- a/test/files/neg/t5683.scala +++ /dev/null @@ -1,23 +0,0 @@ -object Test { - trait NT[X] - trait W[W, A] extends NT[Int] - type StringW[T] = W[String, T] - trait K[M[_], A, B] - - def k[M[_], B](f: Int => M[B]): K[M, Int, B] = null - - val okay1: K[StringW,Int,Int] = k{ (y: Int) => null: StringW[Int] } - val okay2 = k[StringW,Int]{ (y: Int) => null: W[String, Int] } - - val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } - - // remove `extends NT[Int]`, and the last line gives an inference error - // rather than a crash. - // test/files/pos/t5683.scala:12: error: no type parameters for method k: (f: Int => M[B])Test.K[M,Int,B] exist so that it can be applied to arguments (Int => Test.W[String,Int]) - // --- because --- - // argument expression's type is not compatible with formal parameter type; - // found : Int => Test.W[String,Int] - // required: Int => ?M[?B] - // val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } - // ^ -} diff --git a/test/files/neg/t5687.check b/test/files/neg/t5687.check index 5096077ee5d8..baa3ac5c6041 100644 --- a/test/files/neg/t5687.check +++ b/test/files/neg/t5687.check @@ -1,8 +1,10 @@ t5687.scala:4: error: type arguments [T] do not conform to class Template's type parameter bounds [T <: AnyRef] type Repr[T]<:Template[T] ^ -t5687.scala:20: error: overriding type Repr in class Template with bounds[T] <: Template[T]; - type Repr has incompatible type +t5687.scala:20: error: incompatible type in overriding +type Repr[T] <: Template[T] (defined in class Template); + found : CurveTemplate[T(in class CurveTemplate)] + required: [T(in type Repr)] <: Template[T(in type Repr)] type Repr = CurveTemplate[T] ^ -two errors found +2 errors diff --git a/test/files/neg/t5687.scala b/test/files/neg/t5687.scala index 90a9ae265cfb..6ffecf972054 100644 --- a/test/files/neg/t5687.scala +++ b/test/files/neg/t5687.scala @@ -26,7 +26,7 @@ class Y extends Base object Example { - def test1() { + def test1(): Unit = { new CurveTemplate(new Curve).access1(10) new CurveTemplate(new Curve).access2 @@ -41,7 +41,7 @@ object Example { } - def test2() { + def test2(): Unit = { new CurveTemplate(new Curve).access1(10).withReadModifiers(1) new CurveTemplate(new Curve).access2.withReadModifiers(1) diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check index a1c3f85fa8a0..9bb645409679 100644 --- a/test/files/neg/t5689.check +++ b/test/files/neg/t5689.check @@ -1,8 +1,8 @@ -t5689.scala:5: error: macro implementation has incompatible shape: +t5689.scala:6: error: macro implementation has incompatible shape: required: (c: scala.reflect.macros.blackbox.Context)(i: c.Expr[Double]): c.Expr[String] or : (c: scala.reflect.macros.blackbox.Context)(i: c.Tree): c.Tree found : (c: scala.reflect.macros.blackbox.Context)(i: c.Expr[Double]): c.Expr[Int] type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String] def returnsString(i: Double): String = macro returnsIntImpl ^ -one error found +1 error diff --git a/test/files/neg/t5689.scala b/test/files/neg/t5689.scala index efb7a8acd394..23462a860ee6 100644 --- a/test/files/neg/t5689.scala +++ b/test/files/neg/t5689.scala @@ -1,4 +1,5 @@ -// scalac: -language:experimental.macros +//> using options -language:experimental.macros +// import scala.reflect.macros.blackbox.Context object Macros { diff --git a/test/files/neg/t5691.check b/test/files/neg/t5691.check index c69e460e3abd..bba4ad50ada6 100644 --- a/test/files/neg/t5691.check +++ b/test/files/neg/t5691.check @@ -1,24 +1,24 @@ -t5691.scala:8: warning: type parameter D defined in method foobar shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it. +t5691.scala:9: warning: type parameter D defined in method foobar shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it. def foobar[D](in: D) = in.toString ^ -t5691.scala:11: warning: type parameter D defined in type MySeq shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it. +t5691.scala:12: warning: type parameter D defined in type MySeq shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it. type MySeq[D] = Seq[D] ^ -t5691.scala:16: warning: type parameter T defined in method bar shadows type T defined in class Foo. You may want to rename your type parameter, or possibly remove it. +t5691.scala:17: warning: type parameter T defined in method bar shadows type T defined in class Foo. You may want to rename your type parameter, or possibly remove it. def bar[T](w: T) = w.toString ^ -t5691.scala:14: warning: type parameter T defined in class Foo shadows type T defined in class B. You may want to rename your type parameter, or possibly remove it. +t5691.scala:15: warning: type parameter T defined in class Foo shadows type T defined in class B. You may want to rename your type parameter, or possibly remove it. class Foo[T](t: T) { ^ -t5691.scala:20: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. +t5691.scala:21: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. class C[M[List[_]]] ^ -t5691.scala:21: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. +t5691.scala:22: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. type E[M[List[_]]] = Int ^ -t5691.scala:22: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. +t5691.scala:23: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it. def foo[N[M[List[_]]]] = ??? ^ -error: No warnings can be incurred under -Xfatal-warnings. -7 warnings found -one error found +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t5691.scala b/test/files/neg/t5691.scala index 24b6f614dc4c..3624e200910d 100644 --- a/test/files/neg/t5691.scala +++ b/test/files/neg/t5691.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:type-parameter-shadow -language:higherKinds -Xfatal-warnings +//> using options -Xlint:type-parameter-shadow -language:higherKinds -Xfatal-warnings +// class B { type T = Int diff --git a/test/files/neg/t5696.check b/test/files/neg/t5696.check index e0fb61b83961..7f983b03b1b3 100644 --- a/test/files/neg/t5696.check +++ b/test/files/neg/t5696.check @@ -16,4 +16,4 @@ t5696.scala:38: error: too many argument lists for constructor invocation t5696.scala:46: error: too many argument lists for constructor invocation object x extends G(1)(2) {} ^ -6 errors found +6 errors diff --git a/test/files/neg/t5702-neg-bad-and-wild.check b/test/files/neg/t5702-neg-bad-and-wild.check index a52136dbf889..ee78dd76642b 100644 --- a/test/files/neg/t5702-neg-bad-and-wild.check +++ b/test/files/neg/t5702-neg-bad-and-wild.check @@ -25,4 +25,4 @@ t5702-neg-bad-and-wild.scala:23: error: bad simple pattern: bad use of _* (a seq t5702-neg-bad-and-wild.scala:24: error: bad simple pattern: bad use of _* (sequence pattern not allowed) val (b, _ * ) = (5,6) // bad use of _* (sequence pattern not allowed) ^ -9 errors found +9 errors diff --git a/test/files/neg/t5702-neg-bad-brace.check b/test/files/neg/t5702-neg-bad-brace.check index dce59017d9b5..bdd68e43f892 100644 --- a/test/files/neg/t5702-neg-bad-brace.check +++ b/test/files/neg/t5702-neg-bad-brace.check @@ -1,7 +1,7 @@ -t5702-neg-bad-brace.scala:8: error: Unmatched closing brace '}' ignored here +t5702-neg-bad-brace.scala:7: error: Unmatched closing brace '}' ignored here case List(1, _*} => ^ -t5702-neg-bad-brace.scala:11: error: eof expected but '}' found. +t5702-neg-bad-brace.scala:10: error: eof expected but '}' found. } ^ -two errors found +2 errors diff --git a/test/files/neg/t5702-neg-bad-brace.scala b/test/files/neg/t5702-neg-bad-brace.scala index c69436ed6b8b..49f55a37b2b2 100644 --- a/test/files/neg/t5702-neg-bad-brace.scala +++ b/test/files/neg/t5702-neg-bad-brace.scala @@ -3,7 +3,6 @@ object Test { def main(args: Array[String]): Unit = { val is = List(1,2,3) - is match { case List(1, _*} => } diff --git a/test/files/neg/t5702-neg-bad-xbrace.check b/test/files/neg/t5702-neg-bad-xbrace.check index 9240abea4489..d8babc395b93 100644 --- a/test/files/neg/t5702-neg-bad-xbrace.check +++ b/test/files/neg/t5702-neg-bad-xbrace.check @@ -4,4 +4,4 @@ t5702-neg-bad-xbrace.scala:19: error: bad simple pattern: bad brace or paren aft t5702-neg-bad-xbrace.scala:28: error: bad simple pattern: bad brace or paren after _* val {a, z@_*)} = xml ^ -two errors found +2 errors diff --git a/test/files/neg/t5702-neg-ugly-xbrace.check b/test/files/neg/t5702-neg-ugly-xbrace.check index 31fc4b6bb4d7..7231bcf902a3 100644 --- a/test/files/neg/t5702-neg-ugly-xbrace.check +++ b/test/files/neg/t5702-neg-ugly-xbrace.check @@ -1,7 +1,7 @@ t5702-neg-ugly-xbrace.scala:11: error: bad simple pattern: bad brace or paren after _* val {a, z@_*) = xml ^ -t5702-neg-ugly-xbrace.scala:12: error: Missing closing brace `}' assumed here +t5702-neg-ugly-xbrace.scala:12: error: Missing closing brace `}` assumed here println("A for "+ a +", ending with "+ z) ^ t5702-neg-ugly-xbrace.scala:13: error: in XML literal: in XML content, please use '}}' to express '}' @@ -16,4 +16,4 @@ t5702-neg-ugly-xbrace.scala:14: error: illegal start of simple pattern t5702-neg-ugly-xbrace.scala:14: error: '}' expected but eof found. } ^ -6 errors found +6 errors diff --git a/test/files/neg/t5702-neg-ugly-xbrace.scala b/test/files/neg/t5702-neg-ugly-xbrace.scala index 0ff7bfa09d5a..6e2ddb092454 100644 --- a/test/files/neg/t5702-neg-ugly-xbrace.scala +++ b/test/files/neg/t5702-neg-ugly-xbrace.scala @@ -1,6 +1,6 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val xml = appleboychild // This is the more likely typo, and the uglier parse. diff --git a/test/files/neg/t5715.check b/test/files/neg/t5715.check new file mode 100644 index 000000000000..5c6d40a1a9fb --- /dev/null +++ b/test/files/neg/t5715.check @@ -0,0 +1,12 @@ +t5715.scala:19: warning: Wrap `then` in backticks to use it as an identifier, it will become a keyword in Scala 3. [quickfixable] + object then // keyword + ^ +t5715.scala:12: warning: class then in package example is deprecated (since 0.1): that was then + val u = new `then` // backticked but deprecated ref + ^ +t5715.scala:16: warning: class then in package example is deprecated (since 0.1): that was then + val y = new then // keyword and deprecated ref + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t5715.scala b/test/files/neg/t5715.scala new file mode 100644 index 000000000000..5ea9c2a3af7a --- /dev/null +++ b/test/files/neg/t5715.scala @@ -0,0 +1,20 @@ + +//> using options -Xlint -Werror + +package example + +@deprecated("that was then", "0.1") +class then // suppressed + +object X { + @deprecated("this is now", "0.5") + val t = new then // suppressed + val u = new `then` // backticked but deprecated ref +} + +object Y { + val y = new then // keyword and deprecated ref +} +object Z { + object then // keyword +} diff --git a/test/files/neg/t5728.check b/test/files/neg/t5728.check index 14f9c42ae007..ac95d8e548a5 100644 --- a/test/files/neg/t5728.check +++ b/test/files/neg/t5728.check @@ -1,4 +1,8 @@ t5728.scala:3: error: implicit classes must accept exactly one primary constructor parameter implicit class Foo ^ -one error found +t5728.scala:5: warning: Implicit definition should have explicit type (inferred Test.Foo) [quickfixable] + implicit def Foo = new Foo + ^ +1 warning +1 error diff --git a/test/files/neg/t5735.check b/test/files/neg/t5735.check index f6e002804404..f187d54caadf 100644 --- a/test/files/neg/t5735.check +++ b/test/files/neg/t5735.check @@ -1,6 +1,6 @@ t5735.scala:6: error: type mismatch; - found : (x: Int)Int => String + found : String required: Int val z: Int = a ^ -one error found +1 error diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check index 4da5a8adf5e6..72e3fc9826f4 100644 --- a/test/files/neg/t5753.check +++ b/test/files/neg/t5753.check @@ -2,4 +2,4 @@ Test_2.scala:10: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) println(foo(42)) ^ -one error found +1 error diff --git a/test/files/neg/t5753/Impls_Macros_1.scala b/test/files/neg/t5753/Impls_Macros_1.scala index 7b2c7f14be77..9872c69171e8 100644 --- a/test/files/neg/t5753/Impls_Macros_1.scala +++ b/test/files/neg/t5753/Impls_Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context trait Impls { diff --git a/test/files/neg/t5753/Test_2.scala b/test/files/neg/t5753/Test_2.scala index 47fc6859c7fa..32005eb3aaf6 100644 --- a/test/files/neg/t5753/Test_2.scala +++ b/test/files/neg/t5753/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros extends Impls { diff --git a/test/files/neg/t576.check b/test/files/neg/t576.check index 7105c92866b0..4823716188ff 100644 --- a/test/files/neg/t576.check +++ b/test/files/neg/t576.check @@ -1,4 +1,4 @@ t576.scala:14: error: overloaded method insert needs result type if (true) sibling.insert(node); ^ -one error found +1 error diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check index 15c0bc76341e..b79c2526917f 100644 --- a/test/files/neg/t5761.check +++ b/test/files/neg/t5761.check @@ -1,12 +1,12 @@ -t5761.scala:4: error: not enough arguments for constructor D: (x: Int)D[Int]. +t5761.scala:4: error: not enough arguments for constructor D: (x: Int): D[Int]. Unspecified value parameter x. println(new D[Int]{}) // crash ^ -t5761.scala:8: error: not enough arguments for constructor D: (x: Int)D[Int]. +t5761.scala:8: error: not enough arguments for constructor D: (x: Int): D[Int]. Unspecified value parameter x. println(new D[Int]()) // no crash ^ -t5761.scala:9: error: not enough arguments for constructor D: (x: Int)D[Int]. +t5761.scala:9: error: not enough arguments for constructor D: (x: Int): D[Int]. Unspecified value parameter x. println(new D[Int]{}) // crash ^ @@ -16,4 +16,4 @@ t5761.scala:13: error: not found: type Tread t5761.scala:13: error: value run is not a member of new Tread("sth") { }.run() ^ -5 errors found +5 errors diff --git a/test/files/neg/t5762.check b/test/files/neg/t5762.check index 8de7c532d362..ca2f286bbb88 100644 --- a/test/files/neg/t5762.check +++ b/test/files/neg/t5762.check @@ -1,15 +1,15 @@ -t5762.scala:7: warning: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure +t5762.scala:8: warning: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure case _: D[Int] if bippy => 1 ^ -t5762.scala:8: warning: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure +t5762.scala:9: warning: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure case _: D[String] => 2 ^ -t5762.scala:21: warning: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure +t5762.scala:22: warning: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure case _: D[D[Int]] if bippy => 1 ^ -t5762.scala:22: warning: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure +t5762.scala:23: warning: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure case _: D[D[String]] => 2 ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t5762.scala b/test/files/neg/t5762.scala index d63815b95d8d..a2341860f600 100644 --- a/test/files/neg/t5762.scala +++ b/test/files/neg/t5762.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class D[-A] object Test { diff --git a/test/files/neg/t5799.check b/test/files/neg/t5799.check index 3b43d06a944f..899196ce5e59 100644 --- a/test/files/neg/t5799.check +++ b/test/files/neg/t5799.check @@ -1,4 +1,4 @@ t5799.scala:2: error: secondary constructor is not allowed in value class def this(s: String) = this(s.toDouble) ^ -one error found +1 error diff --git a/test/files/neg/t5801.check b/test/files/neg/t5801.check index abf8e6e9326a..d8156d3f6340 100644 --- a/test/files/neg/t5801.check +++ b/test/files/neg/t5801.check @@ -1,6 +1,6 @@ t5801.scala:1: error: object sth is not a member of package scala import scala.sth - ^ + ^ t5801.scala:4: error: not found: value sth def foo(a: Int)(implicit b: sth.Sth): Unit = {} ^ @@ -19,4 +19,4 @@ t5801.scala:13: error: not found: value sth t5801.scala:14: error: could not find implicit value for parameter b: Int meh2(1) ^ -7 errors found +7 errors diff --git a/test/files/neg/t5803.check b/test/files/neg/t5803.check index 6a2de2e1df36..54d348450455 100644 --- a/test/files/neg/t5803.check +++ b/test/files/neg/t5803.check @@ -1,4 +1,4 @@ t5803.scala:3: error: could not find implicit value for parameter ev: Nothing new Foo(): String ^ -one error found +1 error diff --git a/test/files/neg/t5821.check b/test/files/neg/t5821.check index f9c00604bc8a..fcee5ed8c825 100644 --- a/test/files/neg/t5821.check +++ b/test/files/neg/t5821.check @@ -1,4 +1,4 @@ t5821.scala:1: error: not found: object SthImportant import SthImportant._ ^ -one error found +1 error diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check index e67ad3772b96..f365277ec1b6 100644 --- a/test/files/neg/t5830.check +++ b/test/files/neg/t5830.check @@ -1,9 +1,9 @@ -t5830.scala:7: warning: unreachable code +t5830.scala:8: warning: unreachable code case 'a' => println("b") // unreachable ^ -t5830.scala:5: warning: could not emit switch for @switch annotated match +t5830.scala:6: warning: could not emit switch for @switch annotated match def unreachable(ch: Char) = (ch: @switch) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t5830.scala b/test/files/neg/t5830.scala index 7b8cfc64b12e..8a3f96485f95 100644 --- a/test/files/neg/t5830.scala +++ b/test/files/neg/t5830.scala @@ -1,10 +1,11 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// import scala.annotation.switch class Test { def unreachable(ch: Char) = (ch: @switch) match { case 'a' => println("b") // ok case 'a' => println("b") // unreachable - case 'c' => + case _ => } } diff --git a/test/files/neg/t5839.check b/test/files/neg/t5839.check index d4b125bd1e8c..de554ea026d7 100644 --- a/test/files/neg/t5839.check +++ b/test/files/neg/t5839.check @@ -1,6 +1,6 @@ t5839.scala:5: error: type mismatch; - found : (x: String => String)Int (x: Int)Int + found : (x: String => String): Int (x: Int): Int required: Int => String val x: String = goo(foo _) ^ -one error found +1 error diff --git a/test/files/neg/t585.check b/test/files/neg/t585.check index d332ac541460..232508747f81 100644 --- a/test/files/neg/t585.check +++ b/test/files/neg/t585.check @@ -1,4 +1,4 @@ t585.scala:1: error: unclosed comment /* ^ -one error found +1 error diff --git a/test/files/neg/t5856.check b/test/files/neg/t5856.check index 306cc0417772..3d035a87e15c 100644 --- a/test/files/neg/t5856.check +++ b/test/files/neg/t5856.check @@ -1,9 +1,6 @@ -t5856.scala:10: error: invalid string interpolation $", expected: $$, $identifier or ${expression} - val s9 = s"$" - ^ t5856.scala:10: error: unclosed string literal val s9 = s"$" - ^ + ^ t5856.scala:2: error: error in interpolated string: identifier or block expected val s1 = s"$null" ^ @@ -28,4 +25,4 @@ t5856.scala:8: error: error in interpolated string: identifier or block expected t5856.scala:9: error: error in interpolated string: identifier or block expected val s8 = s"$super" ^ -10 errors found +9 errors diff --git a/test/files/neg/t5856.scala b/test/files/neg/t5856.scala index 2ceee590af55..2d580d3ed540 100644 --- a/test/files/neg/t5856.scala +++ b/test/files/neg/t5856.scala @@ -8,4 +8,12 @@ object Test { val s7 = s"$s1 $null $super" val s8 = s"$super" val s9 = s"$" -} \ No newline at end of file + val sA = s"$this" + + // is Java but not UnicodeIdentifierStart + //val ₵ = "cents" + //val sense = s"$₵" + + val ᛯ = "sign" + def sign = s"$ᛯ" +} diff --git a/test/files/neg/t5878.check b/test/files/neg/t5878.check index c60c4653a27e..d75941b5de3f 100644 --- a/test/files/neg/t5878.check +++ b/test/files/neg/t5878.check @@ -10,4 +10,4 @@ class Foo1(val x: Bar1) extends AnyVal t5878.scala:5: error: value class may not wrap another user-defined value class class Bar1(val x: Foo1) extends AnyVal ^ -four errors found +4 errors diff --git a/test/files/neg/t588.check b/test/files/neg/t588.check index ff08f77a6fe7..867585042baf 100644 --- a/test/files/neg/t588.check +++ b/test/files/neg/t588.check @@ -1,13 +1,13 @@ t588.scala:3: error: double definition: def visit(f: Int => Unit): Boolean at line 2 and def visit(f: Int => String): Boolean at line 3 -have same type after erasure: (f: Function1)Boolean +have same type after erasure: (f: Function1): Boolean def visit(f: Int => String): Boolean ^ t588.scala:10: error: double definition: def f(node: Test.this.TypeA): Unit at line 9 and def f(brac: Test.this.TypeB): Unit at line 10 -have same type after erasure: (node: Test#TraitA)Unit +have same type after erasure: (node: Test#TraitA): Unit def f(brac : TypeB) : Unit; ^ -two errors found +2 errors diff --git a/test/files/neg/t5882.check b/test/files/neg/t5882.check index e0958e19d983..2163bd46c8ef 100644 --- a/test/files/neg/t5882.check +++ b/test/files/neg/t5882.check @@ -6,4 +6,4 @@ t5882.scala:5: error: implementation restriction: nested object is not allowed i This restriction is planned to be removed in subsequent releases. object Bar ^ -two errors found +2 errors diff --git a/test/files/neg/t5887.check b/test/files/neg/t5887.check new file mode 100644 index 000000000000..21bedc99d721 --- /dev/null +++ b/test/files/neg/t5887.check @@ -0,0 +1,29 @@ +t5887.scala:6: error: type mismatch; + found : Int(22) + required: Throwable => ? + def f = try ??? catch 22 + ^ +t5887.scala:10: error: missing parameter type for expanded function +The argument types of an anonymous function must be fully known. (SLS 8.5) +Expected type was: ? + def h = List("x") map (s => try { case _ => 7 }) + ^ +t5887.scala:29: error: type mismatch; + found : TheOldCollegeTry.this.catcher.type + required: Throwable => Int + def noLongerAllower: Int = try 42 catch catcher + ^ +t5887.scala:8: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. + def g = try 42 + ^ +t5887.scala:10: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. + def h = List("x") map (s => try { case _ => 7 }) + ^ +t5887.scala:12: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + def j = try ??? catch (_ => 42) + ^ +t5887.scala:18: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + def k2 = try 27 catch recover + ^ +4 warnings +3 errors diff --git a/test/files/neg/t5887.scala b/test/files/neg/t5887.scala new file mode 100644 index 000000000000..e8dc51f91af8 --- /dev/null +++ b/test/files/neg/t5887.scala @@ -0,0 +1,30 @@ + +trait TheOldCollegeTry { + + // was: value isDefinedAt is not a member of Int + // now: required: Function[Throwable,?] + def f = try ??? catch 22 + + def g = try 42 + + def h = List("x") map (s => try { case _ => 7 }) + + def j = try ??? catch (_ => 42) + + import PartialFunction.fromFunction + + def recover(t: Throwable): Int = 42 + def k = try 27 catch fromFunction(recover) + def k2 = try 27 catch recover + + def parseErrorHandler[T]: PartialFunction[Throwable, T] = ??? + def pushBusy[T](body: => T): T = + try body + catch parseErrorHandler + + object catcher { + def isDefinedAt(x: Any) = true + def apply(x: Any) = 27 + } + def noLongerAllower: Int = try 42 catch catcher +} diff --git a/test/files/neg/t5892.check b/test/files/neg/t5892.check index 839bf9de2302..fac355ecb1bd 100644 --- a/test/files/neg/t5892.check +++ b/test/files/neg/t5892.check @@ -14,4 +14,4 @@ t5892.scala:13: error: type mismatch; t5892.scala:17: error: not found: value b2s @annot(b2s(false)) class F { ^ -four errors found +4 errors diff --git a/test/files/neg/t5898.check b/test/files/neg/t5898.check new file mode 100644 index 000000000000..9c9e7f201995 --- /dev/null +++ b/test/files/neg/t5898.check @@ -0,0 +1,11 @@ +t5898.scala:9: warning: match may not be exhaustive. +It would fail on the following input: C(_) + def f() = t match { case x: D => ??? } + ^ +t5898.scala:10: warning: match may not be exhaustive. +It would fail on the following input: C(_) + val D(x) = t + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t5898.scala b/test/files/neg/t5898.scala new file mode 100644 index 000000000000..23057bc7b09e --- /dev/null +++ b/test/files/neg/t5898.scala @@ -0,0 +1,12 @@ +//> using options -Xlint:valpattern -Xfatal-warnings +// +sealed trait T +case class C(i: Int) extends T +case class D(i: Int) extends T + +trait Test { + def t: T = C(42) + def f() = t match { case x: D => ??? } + val D(x) = t + val D(y) = (null: Any) +} diff --git a/test/files/neg/t5903a.check b/test/files/neg/t5903a.check index 34003b0a82cb..617a96588daa 100644 --- a/test/files/neg/t5903a.check +++ b/test/files/neg/t5903a.check @@ -1,4 +1,4 @@ Test_2.scala:4: error: too many patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3 case nq"$x + $y + $z" => println((x, y)) ^ -one error found +1 error diff --git a/test/files/neg/t5903b.check b/test/files/neg/t5903b.check index e7637d3edb59..2bbfcd1a41c3 100644 --- a/test/files/neg/t5903b.check +++ b/test/files/neg/t5903b.check @@ -3,4 +3,4 @@ Test_2.scala:4: error: type mismatch; required: String case t"$x" => println(x) ^ -one error found +1 error diff --git a/test/files/neg/t5903c.check b/test/files/neg/t5903c.check index 05bd775d3022..2e093386fde3 100644 --- a/test/files/neg/t5903c.check +++ b/test/files/neg/t5903c.check @@ -1,4 +1,4 @@ Test_2.scala:4: error: String is not supported case t"$x" => println(x) ^ -one error found +1 error diff --git a/test/files/neg/t5903d.check b/test/files/neg/t5903d.check index 54a91a7ba6d3..24b8fed28396 100644 --- a/test/files/neg/t5903d.check +++ b/test/files/neg/t5903d.check @@ -1,4 +1,4 @@ Test_2.scala:4: error: extractor macros can only be whitebox case t"$x" => println(x) ^ -one error found +1 error diff --git a/test/files/neg/t5903e.check b/test/files/neg/t5903e.check index 3bdeb091a0e4..695327988f66 100644 --- a/test/files/neg/t5903e.check +++ b/test/files/neg/t5903e.check @@ -1,4 +1,4 @@ Test_2.scala:4: error: value class may not be a member of another class case t"$x" => println(x) ^ -one error found +1 error diff --git a/test/files/neg/t591.check b/test/files/neg/t591.check index c0bade08146c..98a1abfb888e 100644 --- a/test/files/neg/t591.check +++ b/test/files/neg/t591.check @@ -2,4 +2,4 @@ t591.scala:40: error: method input_= is defined twice; the conflicting variable input was defined at line 35:18 def input_=(in : Input) = {} ^ -one error found +1 error diff --git a/test/files/neg/t593.check b/test/files/neg/t593.check index c1aeab8ec47f..36e5ed3fac6c 100644 --- a/test/files/neg/t593.check +++ b/test/files/neg/t593.check @@ -1,4 +1,4 @@ t593.scala:1: error: traits or objects may not have parameters trait Wrapper[T](x : T) { ^ -one error found +1 error diff --git a/test/files/neg/t5956.check b/test/files/neg/t5956.check index e3f7bf33c4c3..0333872d3b0d 100644 --- a/test/files/neg/t5956.check +++ b/test/files/neg/t5956.check @@ -1,7 +1,7 @@ -t5956.scala:2: error: C is already defined as case class C +t5956.scala:3: error: C is already defined as case class C object O { case class C[T](); class C() } ^ -t5956.scala:3: error: C is already defined as case class C +t5956.scala:4: error: C is already defined as case class C object T { case class C[T](); case class C() } ^ -two errors found +2 errors diff --git a/test/files/neg/t5956.scala b/test/files/neg/t5956.scala index 94b96fdfef43..507e721f4043 100644 --- a/test/files/neg/t5956.scala +++ b/test/files/neg/t5956.scala @@ -1,3 +1,4 @@ -// scalac: -deprecation +//> using options -deprecation +// object O { case class C[T](); class C() } object T { case class C[T](); case class C() } diff --git a/test/files/neg/t5969.check b/test/files/neg/t5969.check index 9d8ac9a3a5a6..4fc6fafd7dc7 100644 --- a/test/files/neg/t5969.check +++ b/test/files/neg/t5969.check @@ -1,7 +1,7 @@ -t5969.scala:9: error: overloaded method value g with alternatives: +t5969.scala:9: error: overloaded method g with alternatives: (x: C2)String (x: C1)String cannot be applied to (String) if (false) List(g(x)) else List[C1]() map g ^ -one error found +1 error diff --git a/test/files/neg/t6011.check b/test/files/neg/t6011.check index 98fcf4e673a5..a3e2da5d250d 100644 --- a/test/files/neg/t6011.check +++ b/test/files/neg/t6011.check @@ -1,12 +1,12 @@ -t6011.scala:5: warning: unreachable code +t6011.scala:6: warning: unreachable code case 'a' | 'c' => 1 // unreachable ^ -t6011.scala:11: warning: unreachable code +t6011.scala:13: warning: unreachable code case 'b' | 'a' => 1 // unreachable ^ -t6011.scala:9: warning: could not emit switch for @switch annotated match +t6011.scala:11: warning: could not emit switch for @switch annotated match def f2(ch: Char): Any = (ch: @annotation.switch) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t6011.scala b/test/files/neg/t6011.scala index 14da7c68da6f..c909c4e4691b 100644 --- a/test/files/neg/t6011.scala +++ b/test/files/neg/t6011.scala @@ -1,8 +1,10 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { def f(ch: Char): Any = ch match { case 'a' => 1 case 'a' | 'c' => 1 // unreachable + case _ => throw new MatchError(ch) } // won't be compiled to a switch since it has an unreachable (duplicate) case diff --git a/test/files/neg/t6013.check b/test/files/neg/t6013.check index 502da999f56f..daa978432760 100644 --- a/test/files/neg/t6013.check +++ b/test/files/neg/t6013.check @@ -1,7 +1,11 @@ -DerivedScala.scala:4: error: class C needs to be abstract, since there is a deferred declaration of method foo in class B of type => Int which is not implemented in a subclass +DerivedScala.scala:4: error: class C needs to be abstract. +No implementation found in a subclass for deferred declaration +def foo: Int (defined in class B) class C extends B ^ -DerivedScala.scala:7: error: class DerivedScala needs to be abstract, since there is a deferred declaration of method foo in class Abstract of type ()Boolean which is not implemented in a subclass +DerivedScala.scala:7: error: class DerivedScala needs to be abstract. +No implementation found in a subclass for deferred declaration +def foo(): Boolean (defined in class Abstract) class DerivedScala extends Abstract ^ -two errors found +2 errors diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check index 350f796d1830..a55bb551035a 100644 --- a/test/files/neg/t6040.check +++ b/test/files/neg/t6040.check @@ -6,4 +6,4 @@ See the Scaladoc for value scala.language.dynamics for a discussion why the feature needs to be explicitly enabled. class X extends Dynamic ^ -one error found +1 error diff --git a/test/files/neg/t6040.scala b/test/files/neg/t6040.scala index b8f7dab7a4d7..7e2c1b1057fe 100644 --- a/test/files/neg/t6040.scala +++ b/test/files/neg/t6040.scala @@ -1 +1 @@ -class X extends Dynamic \ No newline at end of file +class X extends Dynamic diff --git a/test/files/neg/t6042.check b/test/files/neg/t6042.check index 221f06e2c57e..aa259970adf9 100644 --- a/test/files/neg/t6042.check +++ b/test/files/neg/t6042.check @@ -1,4 +1,4 @@ t6042.scala:7: error: illegal type selection from volatile type a.OpSemExp (with upper bound LazyExp[a.OpSemExp] with _$1) def foo[AA <: LazyExp[_]](a: AA): a.OpSemExp#Val = ??? // a.OpSemExp is volatile, because of `with This` ^ -one error found +1 error diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check index fe8848cbdb36..f5a9e1400f6d 100644 --- a/test/files/neg/t6048.check +++ b/test/files/neg/t6048.check @@ -1,18 +1,18 @@ -t6048.scala:4: warning: unreachable code +t6048.scala:5: warning: unreachable code case _ if false => x // unreachable ^ -t6048.scala:9: warning: unreachable code +t6048.scala:10: warning: unreachable code case _ if false => x // unreachable ^ -t6048.scala:14: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +t6048.scala:15: warning: patterns after a variable pattern cannot match (SLS 8.1.1) case _ => x ^ -t6048.scala:15: warning: unreachable code due to variable pattern on line 14 - case 5 if true => x // unreachable +t6048.scala:16: warning: unreachable code due to variable pattern on line 15 + case _ if true => x // unreachable ^ -t6048.scala:15: warning: unreachable code - case 5 if true => x // unreachable +t6048.scala:16: warning: unreachable code + case _ if true => x // unreachable ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t6048.scala b/test/files/neg/t6048.scala index fc029dd1f4a8..431cb6adde64 100644 --- a/test/files/neg/t6048.scala +++ b/test/files/neg/t6048.scala @@ -1,18 +1,19 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A { def f1(x: Int) = x match { case _ if false => x // unreachable - case 5 => x + case _ => x } def f2(x: Int) = x match { case _ if false => x // unreachable - case 5 if true => x + case _ if true => x } def f3(x: Int) = x match { case _ => x - case 5 if true => x // unreachable + case _ if true => x // unreachable } def test1(x: Int) = x match { diff --git a/test/files/neg/t6074.check b/test/files/neg/t6074.check index 38670e5b3d66..4ad3ee3c7d9e 100644 --- a/test/files/neg/t6074.check +++ b/test/files/neg/t6074.check @@ -1,4 +1,4 @@ -t6074.scala:5: error: constructor A in class A cannot be accessed in object T +t6074.scala:5: error: constructor A in class A cannot be accessed in object T from object T def t = new A() ^ -one error found +1 error diff --git a/test/files/neg/t608.check b/test/files/neg/t608.check index 5c7f49d00420..eaccd71df7d3 100644 --- a/test/files/neg/t608.check +++ b/test/files/neg/t608.check @@ -3,4 +3,4 @@ t608.scala:16: error: type mismatch; required: hs{type s = hs; type a = ha} = g(f(x).bimap(id)) ^ -one error found +1 error diff --git a/test/files/neg/t608.scala b/test/files/neg/t608.scala index 34dc4c03525d..76a3582020c0 100644 --- a/test/files/neg/t608.scala +++ b/test/files/neg/t608.scala @@ -1,17 +1,17 @@ trait CrashDueToTypeError { - def id[a](x :a) :a = x + def id[a](x :a): a = x trait Bifunctor { type a; // content type s <: Bifunctor - // uncomment this-vvvvvvvvvvvvvvvvvvvvvvvvvvvv, and it compiles - def bimap[c](f :a=>c) :s{/*type s=Bifunctor.this.s;*/type a=c; } + // uncomment this-vvvvvvvvvvvvvvvvvvvvvvvvvvvv, and it compiles + def bimap[c](f: a => c) :s{/*type s=Bifunctor.this.s;*/type a=c; } } def hylo[hs <: Bifunctor,ha,hb,hc] - (f :hb=>hs{type s=hs; type a=ha}, - g :hs{type s=hs; type a=ha}=>hc)(x :hb) - :hc + (f: hb => hs {type s = hs; type a = ha}, + g: hs {type s = hs; type a = ha} => hc)(x: hb) + : hc = g(f(x).bimap(id)) } diff --git a/test/files/neg/t6082.check b/test/files/neg/t6082.check deleted file mode 100644 index 9f757d2db82a..000000000000 --- a/test/files/neg/t6082.check +++ /dev/null @@ -1,13 +0,0 @@ -t6082.scala:2: error: classfile annotation arguments have to be supplied as named arguments -@annot("") class C - ^ -t6082.scala:2: error: annotation annot is missing argument notValue -@annot("") class C - ^ -t6082.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class annot(notValue: String) extends annotation.ClassfileAnnotation - ^ -one warning found -two errors found diff --git a/test/files/neg/t6082.scala b/test/files/neg/t6082.scala deleted file mode 100644 index 30de91a4c926..000000000000 --- a/test/files/neg/t6082.scala +++ /dev/null @@ -1,2 +0,0 @@ -class annot(notValue: String) extends annotation.ClassfileAnnotation -@annot("") class C \ No newline at end of file diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check index 7116bda41d9a..20dd4fa62de1 100644 --- a/test/files/neg/t6083.check +++ b/test/files/neg/t6083.check @@ -1,10 +1,4 @@ t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) @annot(101) class C ^ -t6083.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class annot(value: String) extends annotation.ClassfileAnnotation - ^ -one warning found -one error found +1 error diff --git a/test/files/neg/t6083.scala b/test/files/neg/t6083.scala index 1de18e65279e..21d1ab42ea5f 100644 --- a/test/files/neg/t6083.scala +++ b/test/files/neg/t6083.scala @@ -3,5 +3,5 @@ object conv { } import conv._ -class annot(value: String) extends annotation.ClassfileAnnotation +class annot(value: String) extends annotation.ConstantAnnotation @annot(101) class C diff --git a/test/files/neg/t6120.check b/test/files/neg/t6120.check index 75294adfe568..714da810df67 100644 --- a/test/files/neg/t6120.check +++ b/test/files/neg/t6120.check @@ -1,20 +1,20 @@ -t6120.scala:6: warning: postfix operator bippy should be enabled -by making the implicit value scala.language.postfixOps visible. -This can be achieved by adding the import clause 'import scala.language.postfixOps' -or by setting the compiler option -language:postfixOps. -See the Scaladoc for value scala.language.postfixOps for a discussion -why the feature should be explicitly enabled. - def f = null == null bippy - ^ -t6120.scala:6: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo - def f = null == null bippy - ^ -t6120.scala:6: warning: comparing values of types Null and Null using `==` will always yield true - def f = null == null bippy +t6120.scala:14: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo + def f = (null == null).bippy + ^ +t6120.scala:14: warning: method bippy in class BooleanOps has changed semantics in version 2.12.6: +Used to return 5 + def f = (null == null).bippy + ^ +t6120.scala:14: warning: comparing values of types Null and Null using `==` will always yield true + def f = (null == null).bippy + ^ +t6120.scala:15: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo + def g = true.bippy ^ -t6120.scala:7: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo +t6120.scala:15: warning: method bippy in class BooleanOps has changed semantics in version 2.12.6: +Used to return 5 def g = true.bippy ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t6120.scala b/test/files/neg/t6120.scala index 23d2cf1cf909..dc86ef752102 100644 --- a/test/files/neg/t6120.scala +++ b/test/files/neg/t6120.scala @@ -1,8 +1,32 @@ -// scalac: -feature -deprecation -Xfatal-warnings +//> using options -deprecation -Werror -Xmigration:2.10 +// +// showing that multiple warnings at same location are reported +// +package scala.test +import scala.annotation._ + class A { implicit class BooleanOps(val b: Boolean) { - @deprecated("bobo", "2.11.0") def bippy() = 5 + @deprecated("bobo", since="2.11.0") + @migration("Used to return 5", changedIn="2.12.6") + def bippy = 42 } - def f = null == null bippy + def f = (null == null).bippy def g = true.bippy } + +/* + +$ ~/scala-2.10.4/bin/scalac -d /tmp -Xmigration:2.10 -deprecation test/files/neg/t6120.scala +test/files/neg/t6120.scala:14: warning: method bippy in class BooleanOps is deprecated: bobo + def f = (null == null).bippy + ^ +test/files/neg/t6120.scala:14: warning: comparing values of types Null and Null using `==` will always yield true + def f = (null == null).bippy + ^ +test/files/neg/t6120.scala:15: warning: method bippy in class BooleanOps is deprecated: bobo + def g = true.bippy + ^ +three warnings found + + */ diff --git a/test/files/neg/t6123-explaintypes-macros.check b/test/files/neg/t6123-explaintypes-macros.check index 7f74104394cd..2a347759de0a 100644 --- a/test/files/neg/t6123-explaintypes-macros.check +++ b/test/files/neg/t6123-explaintypes-macros.check @@ -7,4 +7,4 @@ BadMac_2.scala:7: error: macro implementation has incompatible shape: type mismatch for parameter params: c.Expr[Any]* does not conform to c.Expr[String]* def printf(format: String, params: Any*): Unit = macro printf_impl ^ -one error found +1 error diff --git a/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala index 65303fb6e6dd..c9d33756d6a7 100644 --- a/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala +++ b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala @@ -1,4 +1,4 @@ -// scalac: -explaintypes +//> using options -explaintypes import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/t6123-explaintypes-macros/Macros.scala b/test/files/neg/t6123-explaintypes-macros/Macros.scala index 21979a86207d..a6feb5b88dbc 100644 --- a/test/files/neg/t6123-explaintypes-macros/Macros.scala +++ b/test/files/neg/t6123-explaintypes-macros/Macros.scala @@ -1,4 +1,4 @@ -// scalac: -explaintypes +//> using options -explaintypes import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/neg/t6124.check b/test/files/neg/t6124.check new file mode 100644 index 000000000000..9af32ba09436 --- /dev/null +++ b/test/files/neg/t6124.check @@ -0,0 +1,49 @@ +t6124.scala:3: error: illegal separator + def i: Int = 123_456_ + ^ +t6124.scala:4: error: illegal separator + def j: Long = 123_456_L * 1000 + ^ +t6124.scala:7: error: illegal separator + def f = 3_14_E-2 + ^ +t6124.scala:8: error: illegal separator + def e = 3_14E-_2 + ^ +t6124.scala:9: error: illegal separator + def d = 3_14E-2_ + ^ +t6124.scala:11: error: illegal separator + def p = 3.1_4_ + ^ +t6124.scala:12: error: illegal separator + def q = 3.1_4_d + ^ +t6124.scala:13: error: illegal separator + def r = 3.1_4_dd + ^ +t6124.scala:13: error: invalid literal number + def r = 3.1_4_dd + ^ +t6124.scala:14: error: illegal separator + def s = 3_.14 + ^ +t6124.scala:18: error: illegal separator + def v = 0_x42 + ^ +t6124.scala:18: error: invalid literal number + def v = 0_x42 + ^ +t6124.scala:22: error: illegal separator + def x = 00_ + ^ +t6124.scala:23: error: illegal separator + def y = 0_ + ^ +t6124.scala:26: error: invalid literal number + def wtf = 0x // see neg/literals.scala + ^ +t6124.scala:30: error: illegal separator + def `caret positions` = 0x___________ + ^ +16 errors diff --git a/test/files/neg/t6124.scala b/test/files/neg/t6124.scala new file mode 100644 index 000000000000..50e8e5aaa2fe --- /dev/null +++ b/test/files/neg/t6124.scala @@ -0,0 +1,32 @@ + +trait T { + def i: Int = 123_456_ + def j: Long = 123_456_L * 1000 + //def k = 123'455' + + def f = 3_14_E-2 + def e = 3_14E-_2 + def d = 3_14E-2_ + + def p = 3.1_4_ + def q = 3.1_4_d + def r = 3.1_4_dd + def s = 3_.14 + def t = 3._14 // member selection + + def u = 0x_42 + def v = 0_x42 + + def `was: error: malformed double precision floating point number` = 0_1.1 + def w = 0_1 + def x = 00_ + def y = 0_ + def z = 0 + + def wtf = 0x // see neg/literals.scala +} + +trait SyntaxInRecovery { + def `caret positions` = 0x___________ + def `minimal cascade` = 0x_42 + 1 +} diff --git a/test/files/neg/t6138.check b/test/files/neg/t6138.check index 8fd997824824..dd8a112938e4 100644 --- a/test/files/neg/t6138.check +++ b/test/files/neg/t6138.check @@ -1,7 +1,7 @@ t6138.scala:4: error: ambiguous reference to overloaded definition, -both method getClass in object definitions of type (s: Int)Any -and method getClass in object definitions of type (s: String)Any +both method getClass in object definitions of type (s: Int): Any +and method getClass in object definitions of type (s: String): Any match argument types (Nothing) getClass(???): String ^ -one error found +1 error diff --git a/test/files/neg/t6159.check b/test/files/neg/t6159.check new file mode 100644 index 000000000000..19da9e4aa0cc --- /dev/null +++ b/test/files/neg/t6159.check @@ -0,0 +1,7 @@ +t6159.scala:10: warning: match may not be exhaustive. +It would fail on the following input: (_ : A.this.X2) + def f(x: X) = x match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6159.scala b/test/files/neg/t6159.scala new file mode 100644 index 000000000000..71abea905e54 --- /dev/null +++ b/test/files/neg/t6159.scala @@ -0,0 +1,13 @@ +//> using options -Werror +// like test/files/pos/t6159.scala +// but with T2 not private +trait A { + sealed abstract class X + private class X1 extends X with X2 { } + trait X2 extends X + sealed trait X3 extends X + + def f(x: X) = x match { + case _: X1 => 0 + } +} diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check index 3698f4b48d75..c961449fa36c 100644 --- a/test/files/neg/t6162-inheritance.check +++ b/test/files/neg/t6162-inheritance.check @@ -1,12 +1,12 @@ -usage.scala:4: warning: inheritance from class Foo in package t6126 is deprecated (since 2.10.0): `Foo` will be made final in a future version. +usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated (since 2.10.0): `Foo` will be made final in a future version. class SubFoo extends Foo ^ -usage.scala:6: warning: inheritance from trait T in package t6126 is deprecated +usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated object SubT extends T ^ -usage.scala:9: warning: inheritance from trait S in package t6126 is deprecated +usage.scala:8: warning: inheritance from trait S in package t6126 is deprecated new S { ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t6162-inheritance/defn.scala b/test/files/neg/t6162-inheritance/defn.scala index 8e4f4d0362df..673ea4dbfd1b 100644 --- a/test/files/neg/t6162-inheritance/defn.scala +++ b/test/files/neg/t6162-inheritance/defn.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -deprecation -Werror package scala.t6126 @deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0") diff --git a/test/files/neg/t6162-inheritance/usage.scala b/test/files/neg/t6162-inheritance/usage.scala index aa9e2fdd2dbf..097e4f590309 100644 --- a/test/files/neg/t6162-inheritance/usage.scala +++ b/test/files/neg/t6162-inheritance/usage.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -deprecation package scala.t6126 class SubFoo extends Foo diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check index 596df2e9d84b..9191fc2730cd 100644 --- a/test/files/neg/t6162-overriding.check +++ b/test/files/neg/t6162-overriding.check @@ -1,9 +1,9 @@ -t6162-overriding.scala:15: warning: overriding method bar in class Bar is deprecated (since 2.10.0): `bar` will be made private in a future version. +t6162-overriding.scala:16: warning: overriding method bar in class Bar is deprecated (since 2.10.0): `bar` will be made private in a future version. override def bar = 43 ^ -t6162-overriding.scala:16: warning: overriding method baz in class Bar is deprecated +t6162-overriding.scala:17: warning: overriding method baz in class Bar is deprecated override def baz = 43 ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t6162-overriding.scala b/test/files/neg/t6162-overriding.scala index af6b77b216e2..17932569c129 100644 --- a/test/files/neg/t6162-overriding.scala +++ b/test/files/neg/t6162-overriding.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Xfatal-warnings -deprecation +// package scala.t6162 class Bar { diff --git a/test/files/neg/t6214.check b/test/files/neg/t6214.check index 9d746351d139..e7c26f42840c 100644 --- a/test/files/neg/t6214.check +++ b/test/files/neg/t6214.check @@ -1,7 +1,4 @@ -t6214.scala:5: error: ambiguous reference to overloaded definition, -both method m in object Test of type (f: Int => Unit)Int -and method m in object Test of type (f: String => Unit)Int -match argument types (Any => Unit) +t6214.scala:5: error: missing parameter type m { s => case class Foo() } - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t6214.scala b/test/files/neg/t6214.scala index 734acda35ef1..0d5ffc5dedfa 100644 --- a/test/files/neg/t6214.scala +++ b/test/files/neg/t6214.scala @@ -1,7 +1,7 @@ object Test { def m(f: String => Unit) = 0 def m(f: Int => Unit) = 0 - def foo { + def foo: Unit = { m { s => case class Foo() } } } diff --git a/test/files/neg/t6217.check b/test/files/neg/t6217.check new file mode 100644 index 000000000000..727a48d223cd --- /dev/null +++ b/test/files/neg/t6217.check @@ -0,0 +1,6 @@ +t6217.scala:11: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package p, which is in scope + import _root_.scala.Option + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6217.scala b/test/files/neg/t6217.scala new file mode 100644 index 000000000000..58cce7844fbe --- /dev/null +++ b/test/files/neg/t6217.scala @@ -0,0 +1,22 @@ +//> using options -Werror +package p { + package _root_ { + package scala { + object Option + } + } +} +package p { + object Test { + import _root_.scala.Option + def f = Option(null) + } +} + +// was: +// test/files/neg/t6217.scala:12: error: p._root_.scala.Option.type does not take parameters +/* +t6217.scala:11: warning: ignoring relative package named _root_ in root position + import _root_.scala.Option + ^ + */ diff --git a/test/files/neg/t6217b.check b/test/files/neg/t6217b.check new file mode 100644 index 000000000000..e956322c6edd --- /dev/null +++ b/test/files/neg/t6217b.check @@ -0,0 +1,6 @@ +t6217b.scala:5: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package p, which is in scope + import _root_.scala.Option + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6217b.scala b/test/files/neg/t6217b.scala new file mode 100644 index 000000000000..923effa94171 --- /dev/null +++ b/test/files/neg/t6217b.scala @@ -0,0 +1,10 @@ +//> using options -Werror +package p +package _root_ +object Test { + import _root_.scala.Option + def f = Option(null) +} + +// was: +// test/files/neg/t6217b.scala:5: error: object scala is not a member of package p._root_ diff --git a/test/files/neg/t6217c.check b/test/files/neg/t6217c.check new file mode 100644 index 000000000000..c8fc7b551047 --- /dev/null +++ b/test/files/neg/t6217c.check @@ -0,0 +1,18 @@ +t6217c.scala:18: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package a, which is in scope + val x = new _root_.b.B + ^ +t6217c.scala:25: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package a, which is in scope + import _root_.b._ + ^ +t6217c.scala:19: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package a, which is in scope + def k: _root_.b.B = (x: Any) match { + ^ +t6217c.scala:20: warning: _root_ in root position of qualifier refers to the root package, not package _root_ in package a, which is in scope + case b: _root_.b.B => b + ^ +t6217c.scala:38: warning: _root_ in root position in package definition does not refer to the root package, but to package _root_ in package a, which is in scope + package _root_.p { + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t6217c.scala b/test/files/neg/t6217c.scala new file mode 100644 index 000000000000..439aad07c106 --- /dev/null +++ b/test/files/neg/t6217c.scala @@ -0,0 +1,41 @@ +//> using options -Werror +package b { + class B +} +package object b { + def f: B = new a.b.A().x + def c = new a.b.C() + def g: B = c.y + def k: B = (new a.b.A().x: Any) match { + case b: _root_.b.B => b + case _ => ??? + } +} + +package a { + package b { + class A { + val x = new _root_.b.B + def k: _root_.b.B = (x: Any) match { + case b: _root_.b.B => b + case _ => ??? + } + } + class C { + import _root_.b._ + def y = new B + def z = a._root_.X + def v = a.b.p.Y + def w = a._root_.p.Y + } + } + package b.p { + object Y + } + package _root_ { + object X + } + package _root_.p { + object Y + } +} diff --git a/test/files/neg/t6227.check b/test/files/neg/t6227.check index 5e3c636712db..4dacb2df5b3f 100644 --- a/test/files/neg/t6227.check +++ b/test/files/neg/t6227.check @@ -1,4 +1,4 @@ t6227.scala:2: error: illegal combination of modifiers: implicit and case for: class IntOps implicit case class IntOps( i: Int ) { ^ -one error found +1 error diff --git a/test/files/neg/t6258.check b/test/files/neg/t6258.check index 73363d82806d..a749e1b86af4 100644 --- a/test/files/neg/t6258.check +++ b/test/files/neg/t6258.check @@ -2,15 +2,15 @@ t6258.scala:2: error: missing parameter type for expanded function The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: PartialFunction[?, Int] val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param - ^ + ^ t6258.scala:5: error: missing parameter type for expanded function The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: PartialFunction[?,Int] foo { case a : Int => a } // undefined param - ^ + ^ t6258.scala:22: error: missing parameter type for expanded function The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: PartialFunction[?,Any] bar[M[Any]] (foo { // undefined param ^ -three errors found +3 errors diff --git a/test/files/neg/t6258.scala b/test/files/neg/t6258.scala index 5046a4750a8b..19794b325f50 100644 --- a/test/files/neg/t6258.scala +++ b/test/files/neg/t6258.scala @@ -1,7 +1,7 @@ object Test { val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param - def foo[A](pf: PartialFunction[A, Int]) {}; + def foo[A](pf: PartialFunction[A, Int]): Unit = {}; foo { case a : Int => a } // undefined param val g : PartialFunction[Int, _] = { case a : Int => a } // okay diff --git a/test/files/neg/t6260-named.check b/test/files/neg/t6260-named.check index ed6ab5e76f68..4bfd921a2d11 100644 --- a/test/files/neg/t6260-named.check +++ b/test/files/neg/t6260-named.check @@ -1,13 +1,13 @@ -t6260-named.scala:12: error: bridge generated for member method apply: (a: C[Any])C[Any] in object O -which overrides method apply: (v1: T1)R in trait Function1 +t6260-named.scala:12: error: bridge generated for member method apply: (a: C[Any]): C[Any] in object O +which overrides method apply: (v1: T1): R in trait Function1 clashes with definition of the member itself; -both have erased type (v1: Object)Object +both have erased type (v1: Object): Object def apply(a: C[Any]) = a ^ -t6260-named.scala:14: error: bridge generated for member method apply: (a: C[Any])C[Any] in class X -which overrides method apply: (a: A)A in trait T +t6260-named.scala:14: error: bridge generated for member method apply: (a: C[Any]): C[Any] in class X +which overrides method apply: (a: A): A in trait T clashes with definition of the member itself; -both have erased type (a: Object)Object +both have erased type (a: Object): Object class X extends T[C[Any]] { def apply(a: C[Any]) = a } ^ -two errors found +2 errors diff --git a/test/files/neg/t6260-named.scala b/test/files/neg/t6260-named.scala index 7cd9ce8473e8..97b48d732661 100644 --- a/test/files/neg/t6260-named.scala +++ b/test/files/neg/t6260-named.scala @@ -4,8 +4,8 @@ trait T[A] { } object Test { - (x: C[Any]) => {println(s"f($x)"); x} // okay - new T[C[Any]] { def apply(a: C[Any]) = a } // okay + def f = (x: C[Any]) => {println(s"f($x)"); x} // okay + def g = new T[C[Any]] { def apply(a: C[Any]) = a } // okay // we can't rename the specific apply method to avoid the clash object O extends Function1[C[Any], C[Any]] { diff --git a/test/files/neg/t6260c.check b/test/files/neg/t6260c.check index cbbcfd1504c9..7ce9d7363f75 100644 --- a/test/files/neg/t6260c.check +++ b/test/files/neg/t6260c.check @@ -1,7 +1,7 @@ -t6260c.scala:4: error: bridge generated for member method f: ()Option[A] in class Bar1 -which overrides method f: ()A in class Foo1 +t6260c.scala:4: error: bridge generated for member method f: (): Option[A] in class Bar1 +which overrides method f: (): A in class Foo1 clashes with definition of the member itself; -both have erased type ()Object +both have erased type (): Object class Bar1[A] extends Foo1[Option[A]] { def f(): Option[A] = ??? } ^ -one error found +1 error diff --git a/test/files/neg/t6263.check b/test/files/neg/t6263.check deleted file mode 100644 index 9e9c7c615b99..000000000000 --- a/test/files/neg/t6263.check +++ /dev/null @@ -1,9 +0,0 @@ -t6263.scala:5: error: type mismatch; - found : A.this.c.type (with underlying type C) - required: AnyRef -Note that C extends Any, not AnyRef. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - type t = c.type - ^ -one error found diff --git a/test/files/neg/t6264.check b/test/files/neg/t6264.check index 312f6e9d00fc..b7cf577e7ee8 100644 --- a/test/files/neg/t6264.check +++ b/test/files/neg/t6264.check @@ -1,6 +1,6 @@ -t6264.scala:4: warning: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure +t6264.scala:5: warning: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure x.isInstanceOf[Tuple2[_, Tuple1[_]]] ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6264.scala b/test/files/neg/t6264.scala index 52885eb58ca3..f7be7eff0f01 100644 --- a/test/files/neg/t6264.scala +++ b/test/files/neg/t6264.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class Foo { def foo(x: AnyRef): Unit = { x.isInstanceOf[Tuple2[_, Tuple1[_]]] diff --git a/test/files/neg/t6276.check b/test/files/neg/t6276.check index e4bad9a5f833..d8bd04c00fa9 100644 --- a/test/files/neg/t6276.check +++ b/test/files/neg/t6276.check @@ -1,21 +1,25 @@ -t6276.scala:5: warning: method a in class C does nothing other than call itself recursively +t6276.scala:6: warning: method a in class C does nothing other than call itself recursively def a: Any = a // warn ^ -t6276.scala:6: warning: value b in class C does nothing other than call itself recursively +t6276.scala:7: warning: value b in class C does nothing other than call itself recursively val b: Any = b // warn ^ -t6276.scala:8: warning: method c in class C does nothing other than call itself recursively +t6276.scala:9: warning: method c in class C does nothing other than call itself recursively def c: Any = this.c // warn ^ -t6276.scala:9: warning: method d in class C does nothing other than call itself recursively +t6276.scala:10: warning: method d in class C does nothing other than call itself recursively def d: Any = C.this.d // warn ^ -t6276.scala:14: warning: method a does nothing other than call itself recursively +t6276.scala:11: warning: method e in class C does nothing other than call itself recursively + def e(): Any = e //warn + ^ +t6276.scala:16: warning: method a does nothing other than call itself recursively def a: Any = a // warn ^ -t6276.scala:23: warning: method a does nothing other than call itself recursively +t6276.scala:25: warning: method a does nothing other than call itself recursively def a = a // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found +warning: 1 deprecation (since 2.13.3); re-run with -deprecation for details +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/t6276.scala b/test/files/neg/t6276.scala index 150bc3a57500..d236ee69885f 100644 --- a/test/files/neg/t6276.scala +++ b/test/files/neg/t6276.scala @@ -1,15 +1,17 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { - def foo(a: Int, b: Int, c: Int) { + def foo(a: Int, b: Int, c: Int): Unit = { class C { def a: Any = a // warn val b: Any = b // warn def c: Any = this.c // warn def d: Any = C.this.d // warn + def e(): Any = e //warn } - def method { + def method: Unit = { // method local def a: Any = a // warn } diff --git a/test/files/neg/t6283.check b/test/files/neg/t6283.check index 69e417ee93ad..6040efc50886 100644 --- a/test/files/neg/t6283.check +++ b/test/files/neg/t6283.check @@ -1,4 +1,4 @@ -t6283.scala:1: error: `abstract' modifier cannot be used with value classes +t6283.scala:1: error: `abstract` modifier cannot be used with value classes abstract class Funky(val i: Int) extends AnyVal ^ -one error found +1 error diff --git a/test/files/neg/t6289/SUT_5.scala b/test/files/neg/t6289/SUT_5.scala index 971f87ad75bd..0a996352c039 100644 --- a/test/files/neg/t6289/SUT_5.scala +++ b/test/files/neg/t6289/SUT_5.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings /** The System Under Test. * We bail on the earlier round that generates the first error. diff --git a/test/files/neg/t630.check b/test/files/neg/t630.check index 0814ef0c1876..366b403dadf9 100644 --- a/test/files/neg/t630.check +++ b/test/files/neg/t630.check @@ -1,5 +1,7 @@ -t630.scala:20: error: overriding value foo in trait Bar of type Req2; - object foo has incompatible type +t630.scala:20: error: incompatible type in overriding +val foo: Req2 (defined in trait Bar); + found : Test.foo.type + required: Req2 object foo extends Req1 ^ -one error found +1 error diff --git a/test/files/neg/t631.check b/test/files/neg/t631.check index 3759565e1178..074794c77dec 100644 --- a/test/files/neg/t631.check +++ b/test/files/neg/t631.check @@ -1,4 +1,4 @@ -t631.scala:1: error: `implicit' modifier cannot be used for top-level objects +t631.scala:1: error: `implicit` modifier cannot be used for top-level objects implicit object Test { ^ -one error found +1 error diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check index b0368fa1b43f..399514cb1ce9 100644 --- a/test/files/neg/t6323a.check +++ b/test/files/neg/t6323a.check @@ -1,15 +1,7 @@ -t6323a.scala:11: materializing requested scala.reflect.type.ClassTag[Test] using scala.reflect.`package`.materializeClassTag[Test]() - val lookAtMe = m.reflect(Test("a",List(5))) - ^ -t6323a.scala:12: materializing requested reflect.runtime.universe.type.TypeTag[Test] using scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) +t6323a.scala:13: error: implicit error; +!I ttag: reflect.runtime.universe.TypeTag[Test] + No TypeTag available for Test + val value = u.typeOf[Test] ^ -t6323a.scala:12: scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because: -failed to typecheck the materialized tag: -cannot create a TypeTag referring to class Test.Test local to the reifee: use WeakTypeTag instead - val value = u.typeOf[Test] - ^ -t6323a.scala:12: error: No TypeTag available for Test - val value = u.typeOf[Test] - ^ -one error found +1 error diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala index fb2721987404..a0d6324f8bb8 100644 --- a/test/files/neg/t6323a.scala +++ b/test/files/neg/t6323a.scala @@ -1,4 +1,5 @@ -// scalac: -Xlog-implicits +//> using options -Vimplicits +// import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => m} import scala.reflect.runtime.{universe => u} diff --git a/test/files/neg/t633.check b/test/files/neg/t633.check index d69d3be70e26..42294bae35cb 100644 --- a/test/files/neg/t633.check +++ b/test/files/neg/t633.check @@ -1,4 +1,4 @@ t633.scala:3: error: not found: type ListBuffer def t(a : ListBuffer[String]) = { ^ -one error found +1 error diff --git a/test/files/neg/t6335.check b/test/files/neg/t6335.check index d118440f75a9..3a7f80701687 100644 --- a/test/files/neg/t6335.check +++ b/test/files/neg/t6335.check @@ -6,4 +6,4 @@ t6335.scala:3: error: method X is defined twice; the conflicting method X was defined at line 2:7 implicit class X(val x: Int) { def xx = x } ^ -two errors found +2 errors diff --git a/test/files/neg/t6335.scala b/test/files/neg/t6335.scala index 5c41e81ef537..6c898af5b487 100644 --- a/test/files/neg/t6335.scala +++ b/test/files/neg/t6335.scala @@ -1,7 +1,7 @@ object ImplicitClass { - def X(i: Int) {} + def X(i: Int): Unit = {} implicit class X(val x: Int) { def xx = x } - def Z[A](i: A) {} + def Z[A](i: A): Unit = {} implicit class Z[A](val i: A) { def zz = i } -} \ No newline at end of file +} diff --git a/test/files/neg/t6336.check b/test/files/neg/t6336.check index f70a5f70ab35..7212ff2f74b9 100644 --- a/test/files/neg/t6336.check +++ b/test/files/neg/t6336.check @@ -4,4 +4,4 @@ t6336.scala:3: error: Parameter type in structural refinement may not refer to a t6336.scala:4: error: Result type in structural refinement may not refer to a user-defined value class val b = new { def y[T](x: T): X[T] = new X(2) } ^ -two errors found +2 errors diff --git a/test/files/neg/t6336.scala b/test/files/neg/t6336.scala index b1d61f4dd228..d8b795e3edbf 100644 --- a/test/files/neg/t6336.scala +++ b/test/files/neg/t6336.scala @@ -1,5 +1,5 @@ object D { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val a = new { def y[T](x: X[T]) = x.i } val b = new { def y[T](x: T): X[T] = new X(2) } val x = new X(3) diff --git a/test/files/neg/t6337.check b/test/files/neg/t6337.check index 8448f71320d9..21333b6eab7e 100644 --- a/test/files/neg/t6337.check +++ b/test/files/neg/t6337.check @@ -4,4 +4,4 @@ class X[T](val i: XX[T]) extends AnyVal t6337.scala:20: error: value class may not wrap another user-defined value class class X1[T](val i: XX1[T]) extends AnyVal ^ -two errors found +2 errors diff --git a/test/files/neg/t6337.scala b/test/files/neg/t6337.scala index c3858f8c04b9..a6cb9bb33ad6 100644 --- a/test/files/neg/t6337.scala +++ b/test/files/neg/t6337.scala @@ -11,7 +11,7 @@ class X[T](val i: XX[T]) extends AnyVal class XX[T](val x: T) extends AnyVal object C1 { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val x = new X1(new XX1(Some(3))) println(x.i.x.get + 9) } diff --git a/test/files/neg/t6340.check b/test/files/neg/t6340.check index f18b8c3f4ba2..f2367a824f78 100644 --- a/test/files/neg/t6340.check +++ b/test/files/neg/t6340.check @@ -1,10 +1,16 @@ t6340.scala:11: error: value D is not a member of object Foo - import Foo.{ A, B, C, D, E, X, Y, Z } - ^ + import Foo.{ A, B, C, D, E, F => G, X, Y, Z } + ^ +t6340.scala:11: error: value E is not a member of object Foo + import Foo.{ A, B, C, D, E, F => G, X, Y, Z } + ^ +t6340.scala:11: error: value F is not a member of object Foo + import Foo.{ A, B, C, D, E, F => G, X, Y, Z } + ^ t6340.scala:16: error: not found: type D val d = new D ^ t6340.scala:17: error: not found: type W val w = new W ^ -three errors found +5 errors diff --git a/test/files/neg/t6340.scala b/test/files/neg/t6340.scala index 8934d5c15d6f..1d56ac2134df 100644 --- a/test/files/neg/t6340.scala +++ b/test/files/neg/t6340.scala @@ -8,7 +8,7 @@ object Foo { } object Test { - import Foo.{ A, B, C, D, E, X, Y, Z } + import Foo.{ A, B, C, D, E, F => G, X, Y, Z } val a = new A val b = new B diff --git a/test/files/neg/t6352.check b/test/files/neg/t6352.check new file mode 100644 index 000000000000..4e8f165e00a8 --- /dev/null +++ b/test/files/neg/t6352.check @@ -0,0 +1,23 @@ +t6352.scala:9: error: too many arguments (found 2, expected 1) for method apply: (i: Int): H1.StringContext in object StringContext; signature for interpolation must be `StringContext.apply(String*)` + val str = s"a: $x" + ^ +t6352.scala:15: error: type mismatch; + found : String("a: ") + required: Int; signature for interpolation must be `StringContext.apply(String*)` + val str = s"a: $x" + ^ +t6352.scala:23: error: Int does not take parameters; incompatible interpolation method s + val str = s"a: $x" + ^ +t6352.scala:31: error: not found: value y + val bad = s"b: $y" + ^ +t6352.scala:38: error: value t is not a member of X2.StringContext + val bad = t"a: $x" + ^ +t6352.scala:45: error: type mismatch; + found : Int + required: String; incompatible interpolation method s + val bad = s"a: $x" + ^ +6 errors diff --git a/test/files/neg/t6352.scala b/test/files/neg/t6352.scala new file mode 100644 index 000000000000..a83d80264e0f --- /dev/null +++ b/test/files/neg/t6352.scala @@ -0,0 +1,46 @@ +//scalac: -Yimports:java.lang + +import scala.Int + +object H1 { + case class StringContext(i: Int) + + val x = 3 + val str = s"a: $x" +} +object H2 { + case class StringContext(i: Int*) + + val x = 3 + val str = s"a: $x" +} +object H3 { + case class StringContext(strs: String*) { + def s: Int = 3 + } + + val x = 3 + val str = s"a: $x" +} +object X1 { + case class StringContext(strs: String*) { + def s(args: Int*): String = strs.mkString + args.sum + } + val x = 3 + val str = s"a: $x" + val bad = s"b: $y" +} +object X2 { + case class StringContext(strs: String*) { + def s(args: Int*): String = strs.mkString + args.sum + } + val x = 3 + val bad = t"a: $x" +} +object X3 { + case class StringContext(strs: String*) { + def s(args: String*): String = scala.StringContext(strs: _*).s(args: _*) + } + val x = 3 + val bad = s"a: $x" +} diff --git a/test/files/neg/t6355a.check b/test/files/neg/t6355a.check index 5768d31f0b48..5d47c206a11a 100644 --- a/test/files/neg/t6355a.check +++ b/test/files/neg/t6355a.check @@ -4,4 +4,4 @@ t6355a.scala:12: error: implementation restriction: applyDynamic cannot be overl t6355a.scala:18: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2) def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3 ^ -two errors found +2 errors diff --git a/test/files/neg/t6355b.check b/test/files/neg/t6355b.check index f827f07e53bd..05cdd0b02bd6 100644 --- a/test/files/neg/t6355b.check +++ b/test/files/neg/t6355b.check @@ -8,4 +8,4 @@ error after rewriting to x.("bippy") possible cause: maybe a wrong Dynamic method signature? println(x.bippy("42")) ^ -two errors found +2 errors diff --git a/test/files/neg/t6357.check b/test/files/neg/t6357.check index a534d1439abb..39d089dd2a0e 100644 --- a/test/files/neg/t6357.check +++ b/test/files/neg/t6357.check @@ -1,4 +1,4 @@ t6357.scala:3: error: value class may not be a local class final class Y(val j: Int) extends AnyVal ^ -one error found +1 error diff --git a/test/files/neg/t6359.check b/test/files/neg/t6359.check index 5bcdc57331a0..5f48ec55af7c 100644 --- a/test/files/neg/t6359.check +++ b/test/files/neg/t6359.check @@ -6,4 +6,4 @@ t6359.scala:4: error: implementation restriction: nested class is not allowed in This restriction is planned to be removed in subsequent releases. class Y ^ -two errors found +2 errors diff --git a/test/files/neg/t639.check b/test/files/neg/t639.check index 6d41d872de68..1d83eb194a1b 100644 --- a/test/files/neg/t639.check +++ b/test/files/neg/t639.check @@ -4,4 +4,4 @@ import a._ t639.scala:5: error: not found: type B @B ^ -two errors found +2 errors diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check index 2200f15cb587..898d4f05114d 100644 --- a/test/files/neg/t6406-regextract.check +++ b/test/files/neg/t6406-regextract.check @@ -1,6 +1,4 @@ -t6406-regextract.scala:5: warning: method unapplySeq in class Regex is deprecated (since 2.11.0): extracting a match result from anything but a CharSequence or Match is deprecated +t6406-regextract.scala:4: error: cannot resolve overloaded unapply List(1) collect { case r(i) => i } ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +1 error diff --git a/test/files/neg/t6406-regextract.scala b/test/files/neg/t6406-regextract.scala index e1e2cc5f7b70..0f5dad908d4b 100644 --- a/test/files/neg/t6406-regextract.scala +++ b/test/files/neg/t6406-regextract.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -deprecation object Test extends App { val r = "(\\d+)".r diff --git a/test/files/neg/t6426.check b/test/files/neg/t6426.check new file mode 100644 index 000000000000..bc9b6019654e --- /dev/null +++ b/test/files/neg/t6426.check @@ -0,0 +1,4 @@ +t6426.scala:4: error: not found: value _ + def f = `_`.Buffer(0) + ^ +1 error diff --git a/test/files/neg/t6426.scala b/test/files/neg/t6426.scala new file mode 100644 index 000000000000..324e423b341b --- /dev/null +++ b/test/files/neg/t6426.scala @@ -0,0 +1,5 @@ + +trait T { + import collection.{mutable => _, _} + def f = `_`.Buffer(0) +} diff --git a/test/files/neg/t6436.check b/test/files/neg/t6436.check index 5cee6fb5581d..b47959aca308 100644 --- a/test/files/neg/t6436.check +++ b/test/files/neg/t6436.check @@ -2,9 +2,16 @@ t6436.scala:8: error: type mismatch; found : StringContext required: ?{def q: ?} Note that implicit conversions are not applicable because they are ambiguous: - both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} - and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} + both method foo1 in object quasiquotes of type (ctx: StringContext): AnyRef{def q: Nothing} + and method foo2 in object quasiquotes of type (ctx: StringContext): AnyRef{def q: Nothing} are possible conversion functions from StringContext to ?{def q: ?} println(q"a") ^ -one error found +t6436.scala:2: warning: Implicit definition should have explicit type (inferred AnyRef{def q: Nothing}) [quickfixable] + implicit def foo1(ctx: StringContext) = new { def q = ??? } + ^ +t6436.scala:3: warning: Implicit definition should have explicit type (inferred AnyRef{def q: Nothing}) [quickfixable] + implicit def foo2(ctx: StringContext) = new { def q = ??? } + ^ +2 warnings +1 error diff --git a/test/files/neg/t6436b.check b/test/files/neg/t6436b.check index 21ab972b7972..b325db502734 100644 --- a/test/files/neg/t6436b.check +++ b/test/files/neg/t6436b.check @@ -2,9 +2,16 @@ t6436b.scala:8: error: type mismatch; found : StringContext required: ?{def q: ?} Note that implicit conversions are not applicable because they are ambiguous: - both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} - and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} + both method foo1 in object quasiquotes of type (ctx: StringContext): AnyRef{def q: Nothing} + and method foo2 in object quasiquotes of type (ctx: StringContext): AnyRef{def q: Nothing} are possible conversion functions from StringContext to ?{def q: ?} println(StringContext("a").q()) ^ -one error found +t6436b.scala:2: warning: Implicit definition should have explicit type (inferred AnyRef{def q: Nothing}) [quickfixable] + implicit def foo1(ctx: StringContext) = new { def q = ??? } + ^ +t6436b.scala:3: warning: Implicit definition should have explicit type (inferred AnyRef{def q: Nothing}) [quickfixable] + implicit def foo2(ctx: StringContext) = new { def q = ??? } + ^ +2 warnings +1 error diff --git a/test/files/neg/t6443c.check b/test/files/neg/t6443c.check index 7b7f419f6c18..f3e688961efd 100644 --- a/test/files/neg/t6443c.check +++ b/test/files/neg/t6443c.check @@ -1,7 +1,7 @@ t6443c.scala:16: error: double definition: -def foo(d: B.D)(a: Any,d2: d.type): Unit at line 11 and +def foo(d: B.D)(a: Any, d2: d.type): Unit at line 11 and def foo(d: B.D)(a: Any)(d2: d.type): Unit at line 16 -have same type after erasure: (d: B.D, a: Object, d2: B.D)Unit +have same type after erasure: (d: B.D, a: Object, d2: B.D): Unit def foo(d: D)(a: Any)(d2: d.type): Unit = () ^ -one error found +1 error diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check index 9d4af37b987f..5b48c1e79fa5 100644 --- a/test/files/neg/t6446-additional.check +++ b/test/files/neg/t6446-additional.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-additional/ploogin_1.scala b/test/files/neg/t6446-additional/ploogin_1.scala index ed6adfc1cf97..dbf433f9a41d 100644 --- a/test/files/neg/t6446-additional/ploogin_1.scala +++ b/test/files/neg/t6446-additional/ploogin_1.scala @@ -23,7 +23,7 @@ class Ploogin(val global: Global) extends Plugin { def newPhase(prev: Phase) = new TestPhase(prev) class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { // kewl kode } } diff --git a/test/files/neg/t6446-additional/sample_2.scala b/test/files/neg/t6446-additional/sample_2.scala index 9f8c68433fd1..f9d2a3e52360 100644 --- a/test/files/neg/t6446-additional/sample_2.scala +++ b/test/files/neg/t6446-additional/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xshow-phases - +//> using options -Xplugin:. -Vphases package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t6446-list/ploogin_1.scala b/test/files/neg/t6446-list/ploogin_1.scala index ed6adfc1cf97..dbf433f9a41d 100644 --- a/test/files/neg/t6446-list/ploogin_1.scala +++ b/test/files/neg/t6446-list/ploogin_1.scala @@ -23,7 +23,7 @@ class Ploogin(val global: Global) extends Plugin { def newPhase(prev: Phase) = new TestPhase(prev) class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { // kewl kode } } diff --git a/test/files/neg/t6446-list/sample_2.scala b/test/files/neg/t6446-list/sample_2.scala index 734aa63b38cc..55d8bb1625d7 100644 --- a/test/files/neg/t6446-list/sample_2.scala +++ b/test/files/neg/t6446-list/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-list - +//> using options -Xplugin:. -Xplugin-list package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check index 65b5e5dc0964..d8e822cecd1f 100644 --- a/test/files/neg/t6446-missing.check +++ b/test/files/neg/t6446-missing.check @@ -5,11 +5,11 @@ Error: unable to load class: t6446.Ploogin namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-missing/sample_2.scala b/test/files/neg/t6446-missing/sample_2.scala index 9f8c68433fd1..f9d2a3e52360 100644 --- a/test/files/neg/t6446-missing/sample_2.scala +++ b/test/files/neg/t6446-missing/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xshow-phases - +//> using options -Xplugin:. -Vphases package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check index 373f63e5b259..436a02643597 100644 --- a/test/files/neg/t6446-show-phases.check +++ b/test/files/neg/t6446-show-phases.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-show-phases.scala b/test/files/neg/t6446-show-phases.scala index 1dbc96a49097..016067364f1d 100644 --- a/test/files/neg/t6446-show-phases.scala +++ b/test/files/neg/t6446-show-phases.scala @@ -1,4 +1,5 @@ -// scalac: -Xshow-phases +//> using options -Vphases +// // testing compiler flag output only object Test extends App diff --git a/test/files/neg/t6455.check b/test/files/neg/t6455.check index 8f2aad0b9e67..53ad6dd96222 100644 --- a/test/files/neg/t6455.check +++ b/test/files/neg/t6455.check @@ -1,4 +1,4 @@ t6455.scala:5: error: value withFilter is not a member of object O O.withFilter(f => true) ^ -one error found +1 error diff --git a/test/files/neg/t6476.check b/test/files/neg/t6476.check new file mode 100644 index 000000000000..bf0c65efc6b8 --- /dev/null +++ b/test/files/neg/t6476.check @@ -0,0 +1,4 @@ +t6476.scala:8: error: unclosed string literal; note that `\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead + mimi"\" + ^ +1 error diff --git a/test/files/neg/t6476.scala b/test/files/neg/t6476.scala new file mode 100644 index 000000000000..9b88e43593cb --- /dev/null +++ b/test/files/neg/t6476.scala @@ -0,0 +1,9 @@ +// only the last one doesn't parse +class C { + mimi"""\ """ + mimi"""\\""" + mimi"""\""" + mimi"\ " + mimi"\\" + mimi"\" +} diff --git a/test/files/neg/t6476b.check b/test/files/neg/t6476b.check new file mode 100644 index 000000000000..e6aa3e441214 --- /dev/null +++ b/test/files/neg/t6476b.check @@ -0,0 +1,7 @@ +t6476b.scala:2: error: invalid escape at terminal index 0 in "\". Use \\ for literal \. + val sa = s"""\""" + ^ +t6476b.scala:4: error: invalid escape '\ ' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 0 in "\ ". Use \\ for literal \. + val sc = s"""\ """ + ^ +2 errors diff --git a/test/files/neg/t6476b.scala b/test/files/neg/t6476b.scala new file mode 100644 index 000000000000..d601091972ce --- /dev/null +++ b/test/files/neg/t6476b.scala @@ -0,0 +1,8 @@ +class C { + val sa = s"""\""" + val sb = s"""\\""" + val sc = s"""\ """ + val ra = raw"""\""" + val rb = raw"""\\""" + val rc = raw"""\ """ +} diff --git a/test/files/neg/t6483.check b/test/files/neg/t6483.check index 66e35071071c..4fcf25b24a72 100644 --- a/test/files/neg/t6483.check +++ b/test/files/neg/t6483.check @@ -6,4 +6,4 @@ t6483.scala:20: error: implementation restriction: nested class is not allowed i This restriction is planned to be removed in subsequent releases. class Inner extends T { ^ -two errors found +2 errors diff --git a/test/files/neg/t6483.scala b/test/files/neg/t6483.scala index bd99f68fa4c3..3d6713db4795 100644 --- a/test/files/neg/t6483.scala +++ b/test/files/neg/t6483.scala @@ -16,7 +16,7 @@ class C3(val a: Int) extends AnyVal with T { } class C4(val a: Int) extends AnyVal with T { - def foo { + def foo: Unit = { class Inner extends T { override def foo = super[T].foo + a // no (direct) error, other than that a nested class is currently illegal. } diff --git a/test/files/neg/t649.check b/test/files/neg/t649.check index 5a270d475187..7a27b9e8e0dc 100644 --- a/test/files/neg/t649.check +++ b/test/files/neg/t649.check @@ -1,4 +1,4 @@ t649.scala:3: error: overloaded method foo needs result type def foo[A] = foo[A] ^ -one error found +1 error diff --git a/test/files/neg/t650.check b/test/files/neg/t650.check index 320ae66704d9..e7c41d5bd39e 100644 --- a/test/files/neg/t650.check +++ b/test/files/neg/t650.check @@ -1,4 +1,4 @@ -t650.scala:4: error: missing type arguments -trait Test2 extends LinkedList; +t650.scala:3: error: missing type arguments +trait Test2 extends Stack ^ -one error found +1 error diff --git a/test/files/neg/t650.scala b/test/files/neg/t650.scala index cdb4b3da479f..01c37b77d18e 100644 --- a/test/files/neg/t650.scala +++ b/test/files/neg/t650.scala @@ -1,4 +1,3 @@ // test/Test2.scala -package test; -import scala.collection.mutable._; -trait Test2 extends LinkedList; +import scala.collection.mutable._ +trait Test2 extends Stack diff --git a/test/files/neg/t6526.check b/test/files/neg/t6526.check index 606c18c3019c..1a6ad99508d9 100644 --- a/test/files/neg/t6526.check +++ b/test/files/neg/t6526.check @@ -13,4 +13,4 @@ t6526.scala:30: error: could not optimize @tailrec annotated method inner: it co t6526.scala:39: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position def inner(i: Int): Int = 1 + inner(i) ^ -5 errors found +5 errors diff --git a/test/files/neg/t6526.scala b/test/files/neg/t6526.scala index 0bc249aa9866..23458d2ba32f 100644 --- a/test/files/neg/t6526.scala +++ b/test/files/neg/t6526.scala @@ -10,7 +10,7 @@ class TailRec { }.length // transform the body of a function - () => { + def f = () => { @tailrec def inner(i: Int): Int = 1 + inner(i) inner(0) } diff --git a/test/files/neg/t6528.check b/test/files/neg/t6528.check index 7820504f35f0..1c55fe568e98 100644 --- a/test/files/neg/t6528.check +++ b/test/files/neg/t6528.check @@ -1,4 +1,4 @@ t6528.scala:6: error: could not find implicit value for parameter e: CoSet[U,Any] implicitly[CoSet[U, Any]] ^ -one error found +1 error diff --git a/test/files/neg/t6534.check b/test/files/neg/t6534.check index 1daa81176a5d..c2d57178c6ed 100644 --- a/test/files/neg/t6534.check +++ b/test/files/neg/t6534.check @@ -1,10 +1,10 @@ -t6534.scala:7: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class +t6534.scala:8: error: redefinition of equals method. See SIP-15, criterion 5. is not allowed in value class class Bippy3(val x: Int) extends AnyVal { override def equals(x: Any) = false } // error ^ -t6534.scala:8: error: redefinition of hashCode method. See SIP-15, criterion 4. is not allowed in value class +t6534.scala:9: error: redefinition of hashCode method. See SIP-15, criterion 5. is not allowed in value class class Bippy4(val x: Int) extends AnyVal { override def hashCode = -1 } // error ^ -t6534.scala:10: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class +t6534.scala:11: error: redefinition of equals method. See SIP-15, criterion 5. is not allowed in value class case class Bippy6(val x: Int) extends AnyVal { override def productPrefix = "Dingo" ; override def equals(x: Any) = false } // error ^ -three errors found +3 errors diff --git a/test/files/neg/t6534.scala b/test/files/neg/t6534.scala index 11b8bf9d908d..c80af5fc4c4b 100644 --- a/test/files/neg/t6534.scala +++ b/test/files/neg/t6534.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint +//> using options -Xlint +// trait Foo extends Any { override def equals(x: Any) = false } trait Ding extends Any { override def hashCode = -1 } diff --git a/test/files/neg/t6535.check b/test/files/neg/t6535.check index 1225ea70db23..c04511d7286f 100644 --- a/test/files/neg/t6535.check +++ b/test/files/neg/t6535.check @@ -3,4 +3,4 @@ Note: this is often due in part to a class depending on a definition nested with If applicable, you may wish to try moving some members into another object. import Bs.B._ ^ -one error found +1 error diff --git a/test/files/neg/t6535.scala b/test/files/neg/t6535.scala index 30a750311c16..ddff78e33004 100644 --- a/test/files/neg/t6535.scala +++ b/test/files/neg/t6535.scala @@ -3,7 +3,7 @@ object As { object A extends scala.AnyRef // needed for the cycle; - // replacing with a locally defined closs doesn't + // replacing with a locally defined class doesn't // hit the locked import and hence doesn't cycle. } diff --git a/test/files/neg/t6539.check b/test/files/neg/t6539.check index 8c94a8ad4c66..52a3ea9dfc8e 100644 --- a/test/files/neg/t6539.check +++ b/test/files/neg/t6539.check @@ -13,4 +13,4 @@ Test_2.scala:9: error: splice must be enclosed within a reify {} block Test_2.scala:10: error: cannot use value except for signatures of macro implementations val value = expr.value ^ -5 errors found +5 errors diff --git a/test/files/neg/t6558.check b/test/files/neg/t6558.check index 6ad3cecd5023..7ae75bf40238 100644 --- a/test/files/neg/t6558.check +++ b/test/files/neg/t6558.check @@ -6,5 +6,5 @@ t6558.scala:7: error: not found: type typeparam ^ t6558.scala:10: error: not found: type valueparam @valueparam x: Any - ^ -three errors found + ^ +3 errors diff --git a/test/files/neg/t6558b.check b/test/files/neg/t6558b.check index cfa384fc08f4..95afd62ad398 100644 --- a/test/files/neg/t6558b.check +++ b/test/files/neg/t6558b.check @@ -4,4 +4,4 @@ t6558b.scala:5: error: not found: type inargument t6558b.scala:11: error: not found: type infunction @infunction ^ -two errors found +2 errors diff --git a/test/files/neg/t6558b.scala b/test/files/neg/t6558b.scala index 2aa06f69cf60..65fc764df8d4 100644 --- a/test/files/neg/t6558b.scala +++ b/test/files/neg/t6558b.scala @@ -7,7 +7,7 @@ class AnnotNotFound { foo } - () => { + def f = () => { @infunction def foo = 0 () diff --git a/test/files/neg/t6563.check b/test/files/neg/t6563.check index 75dca1507dcc..e0384c7964fc 100644 --- a/test/files/neg/t6563.check +++ b/test/files/neg/t6563.check @@ -1,4 +1,4 @@ t6563.scala:4: error: not found: value e e("f") ^ -one error found +1 error diff --git a/test/files/neg/t6563.scala b/test/files/neg/t6563.scala index b0077b6f94b4..9c97959315af 100644 --- a/test/files/neg/t6563.scala +++ b/test/files/neg/t6563.scala @@ -1,5 +1,5 @@ class A{ - def b(c: => Unit){} + def b(c: => Unit): Unit ={} b{ e("f") new G()(){} diff --git a/test/files/neg/t6566a.check b/test/files/neg/t6566a.check index 7668f9d2fbc9..a232f615dc4b 100644 --- a/test/files/neg/t6566a.check +++ b/test/files/neg/t6566a.check @@ -1,4 +1,4 @@ t6566a.scala:2: error: covariant type T occurs in invariant position in type T of type MyType class TypeCheat[+T] { type MyType = T } ^ -one error found +1 error diff --git a/test/files/neg/t6566b.check b/test/files/neg/t6566b.check index fb3fe81fca15..f6189c558970 100644 --- a/test/files/neg/t6566b.check +++ b/test/files/neg/t6566b.check @@ -1,4 +1,4 @@ t6566b.scala:3: error: covariant type T occurs in invariant position in type T of type MyType type MyType = T ^ -one error found +1 error diff --git a/test/files/neg/t6566b.scala b/test/files/neg/t6566b.scala index 18ddebf88b73..0d488dbe8a6c 100644 --- a/test/files/neg/t6566b.scala +++ b/test/files/neg/t6566b.scala @@ -9,7 +9,7 @@ object WhatsYourTypeIsMyType { } class Bar extends Foo with WithMyType[Bar] { - def unsound { println("iAmABar") } + def unsound: Unit = { println("iAmABar") } setX() println(x.unsound) diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check index 23716a87d109..c648ea7c6789 100644 --- a/test/files/neg/t6567.check +++ b/test/files/neg/t6567.check @@ -1,10 +1,13 @@ -t6567.scala:9: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. +t6567.scala:8: warning: Implicit definition should have explicit type (inferred B) [quickfixable] + implicit def a2b(a: A) = new B + ^ +t6567.scala:10: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. Option[B](a) ^ -t6567.scala:11: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. +t6567.scala:12: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. val b: Option[B] = Option(a) ^ -warning: one feature warning; re-run with -feature for details -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +warning: 1 feature warning; re-run with -feature for details +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t6567.scala b/test/files/neg/t6567.scala index 9a70a56d343b..3e300f9f7053 100644 --- a/test/files/neg/t6567.scala +++ b/test/files/neg/t6567.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:option-implicit -Xfatal-warnings +//> using options -Xlint:option-implicit -Xfatal-warnings +// class A class B diff --git a/test/files/neg/t6574.check b/test/files/neg/t6574.check index 5fc3c5c3c275..7eeebfdcf683 100644 --- a/test/files/neg/t6574.check +++ b/test/files/neg/t6574.check @@ -1,4 +1,4 @@ t6574.scala:4: error: could not optimize @tailrec annotated method notTailPos$extension: it contains a recursive call not in tail position println("tail") ^ -one error found +1 error diff --git a/test/files/neg/t6574.scala b/test/files/neg/t6574.scala index 1e7bdb15258d..09e1bb63364d 100644 --- a/test/files/neg/t6574.scala +++ b/test/files/neg/t6574.scala @@ -1,5 +1,5 @@ class Bad[X, Y](val v: Int) extends AnyVal { - @annotation.tailrec final def notTailPos[Z](a: Int)(b: String) { + @annotation.tailrec final def notTailPos[Z](a: Int)(b: String): Unit = { this.notTailPos[Z](a)(b) println("tail") } diff --git a/test/files/neg/t6582_exhaust_big.check b/test/files/neg/t6582_exhaust_big.check index 9f3c9b24daac..f4a25dd53bd8 100644 --- a/test/files/neg/t6582_exhaust_big.check +++ b/test/files/neg/t6582_exhaust_big.check @@ -2,6 +2,6 @@ t6582_exhaust_big.scala:29: warning: match may not be exhaustive. It would fail on the following input: Z11() def foo(z: Z) = z match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6582_exhaust_big.scala b/test/files/neg/t6582_exhaust_big.scala index 91c2fe67d51a..6f53cd24f28a 100644 --- a/test/files/neg/t6582_exhaust_big.scala +++ b/test/files/neg/t6582_exhaust_big.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // sealed abstract class Z object Z { diff --git a/test/files/neg/t6593.check b/test/files/neg/t6593.check new file mode 100644 index 000000000000..1554440bd723 --- /dev/null +++ b/test/files/neg/t6593.check @@ -0,0 +1,147 @@ +t6593.scala:69: warning: comparing values of types Any and Int using `equals` unsafely bypasses cooperative equality; use `==` instead + any equals 1 // w: bypass + ^ +t6593.scala:77: warning: Cls and FinCls are unrelated: they will most likely never compare equal + cls == fin // w: unrelated + ^ +t6593.scala:78: warning: Cls and Eqs are unrelated: they will most likely never compare equal + cls == eqs // w: unrelated + ^ +t6593.scala:79: warning: Cls and String are unrelated: they will most likely never compare equal + cls == str // w: unrelated + ^ +t6593.scala:80: warning: Cls and List[Int] are unrelated: they will most likely never compare equal + cls == lst // w: unrelated + ^ +t6593.scala:81: warning: Cls and scala.collection.immutable.Nil.type are unrelated: they will most likely never compare equal + cls == Nil // w: unrelated + ^ +t6593.scala:82: warning: Cls and Int are unrelated: they will most likely never compare equal + cls == 1 // w: unrelated + ^ +t6593.scala:83: warning: Cls and Integer are unrelated: they will most likely never compare equal + cls == intB // w: unrelated + ^ +t6593.scala:90: warning: SubCls and FinCls are unrelated: they will most likely never compare equal + sub == fin // w: unrelated + ^ +t6593.scala:94: warning: FinCls and Cls are unrelated: they will most likely never compare equal + fin == cls // w: unrelated + ^ +t6593.scala:95: warning: comparing values of types FinCls and Int using `==` will always yield false + fin == 1 // w: non-sensible + ^ +t6593.scala:96: warning: comparing values of types FinCls and String using `==` will always yield false + fin == str // w: non-sensible + ^ +t6593.scala:97: warning: FinCls and List[Int] are unrelated: they will most likely never compare equal + fin == lst // w: unrelated + ^ +t6593.scala:98: warning: comparing values of types FinCls and scala.collection.immutable.Nil.type using `==` will always yield false + fin == Nil // w: non-sensible (both are final, unlike the line above) + ^ +t6593.scala:99: warning: comparing a fresh object using `==` will always yield false + fin == new AnyRef() // w: final receiver, fresh + ^ +t6593.scala:110: warning: comparing values of types CC and Cls using `==` will always yield false + cc == cls // w: non-sensible + ^ +t6593.scala:111: warning: comparing values of types CC and Cls using `==` will always yield false + scc == cls // w: non-sensible (wrong) + ^ +t6593.scala:112: warning: comparing values of types CC and Int using `==` will always yield false + cc == 1 // w: non-sensible + ^ +t6593.scala:117: warning: comparing values of types String and Cls using `==` will always yield false + str == cls // w: non-sensible + ^ +t6593.scala:118: warning: comparing values of types String and Int using `==` will always yield false + str == 1 // w: non-sensible + ^ +t6593.scala:120: warning: comparing values of types String and Option[Int] using `!=` will always yield true + str != opt // w: non-sensible + ^ +t6593.scala:130: warning: Option[Int] and Cls are unrelated: they will most likely never compare equal + opt == cls // w: unrelated + ^ +t6593.scala:131: warning: Option[Int] and Int are unrelated: they will most likely never compare equal + opt == 1 // w: unrelated + ^ +t6593.scala:132: warning: Option[Int] and String are unrelated: they will most likely never compare equal + opt == str // w: unrelated + ^ +t6593.scala:133: warning: Option[Int] and List[Int] are unrelated: they will most likely never compare equal + opt == lst // w: unrelated + ^ +t6593.scala:137: warning: comparing values of types Some[Int] and Cls using `==` will always yield false + som == cls // w: non-sensible + ^ +t6593.scala:138: warning: comparing values of types Some[Int] and Int using `==` will always yield false + som == 1 // w: non-sensible + ^ +t6593.scala:139: warning: comparing values of types Some[Int] and String using `==` will always yield false + som == str // w: non-sensible + ^ +t6593.scala:144: warning: comparing values of types List[Int] and Cls using `==` will always yield false + lst == cls // w: non-sensible (collections) + ^ +t6593.scala:145: warning: comparing values of types List[Int] and Int using `==` will always yield false + lst == 1 // w: non-sensible (collections) + ^ +t6593.scala:146: warning: comparing values of types List[Int] and Option[Int] using `==` will always yield false + lst == opt // w: non-sensible (collections) + ^ +t6593.scala:147: warning: comparing values of types List[Int] and String using `==` will always yield false + lst == str // w: non-sensible (collections) + ^ +t6593.scala:148: warning: comparing values of types List[Int] and scala.collection.immutable.Set[Int] using `==` will always yield false + lst == set // w: non-sensible (collections) + ^ +t6593.scala:150: warning: comparing values of types List[Int] and scala.collection.mutable.Map[Int,Int] using `==` will always yield false + lst == map // w: non-sensible (collections) + ^ +t6593.scala:151: warning: comparing values of types List[Int] and Eqs using `==` will always yield false + lst == eqs // w: non-sensible (collections) + ^ +t6593.scala:153: warning: comparing values of types List[Int] and Iterator[Int] using `==` will always yield false + lst == itr // w: non-sensible (collections) + ^ +t6593.scala:159: warning: comparing values of types Int and Cls using `==` will always yield false + int == cls // w: non-sensible + ^ +t6593.scala:160: warning: comparing values of types Int and String using `==` will always yield false + int == "" // w: non-sensible + ^ +t6593.scala:162: warning: comparing values of types Int and Boolean using `==` will always yield false + int == true // w: non-sensible + ^ +t6593.scala:164: warning: comparing values of types Int and Boolean using `==` will always yield false + int == booB // w: non-sensible + ^ +t6593.scala:165: warning: comparing values of types Int and Unit using `==` will always yield false + int == () // w: non-sensible + ^ +t6593.scala:166: warning: comparing values of types Int and scala.runtime.BoxedUnit using `==` will always yield false + int == uniB // w: non-sensible + ^ +t6593.scala:172: warning: comparing values of types Null and Int using `==` will always yield false + null == int // w: non-sensible + ^ +t6593.scala:178: warning: comparing values of types Integer and Cls using `==` will always yield false + intB == cls // w: non-sensible + ^ +t6593.scala:179: warning: comparing values of types Integer and String using `==` will always yield false + intB == str // w: non-sensible + ^ +t6593.scala:183: warning: comparing values of types Integer and Boolean using `==` will always yield false + intB == true // w: non-sensible + ^ +t6593.scala:184: warning: comparing values of types Integer and Boolean using `==` will always yield false + intB == booB // w: non-sensible + ^ +t6593.scala:185: warning: comparing values of types Integer and Unit using `==` will always yield false + intB == () // w: non-sensible + ^ +error: No warnings can be incurred under -Werror. +48 warnings +1 error diff --git a/test/files/neg/t6593.scala b/test/files/neg/t6593.scala new file mode 100644 index 000000000000..c7685a219f25 --- /dev/null +++ b/test/files/neg/t6593.scala @@ -0,0 +1,198 @@ +//> using options -Xfatal-warnings -deprecation + +class Cls +class SubCls extends Cls + +final class FinCls + +class Eqs { + override def equals(o: Any): Boolean = super.equals(o) +} + +case class CC(x: Int) +class SubCC(x: Int) extends CC(x) { override def equals(o: Any) = true } + +class Hello2024 { + + // operations: ==, !=, eq, ne + // - todo: cover `eq/ne` extensively in this test + // - no "non-sensible" warnings when using equals + + // inventory + + val obj = new AnyRef + val any: Any = obj + + val cls = new Cls + val sub = new SubCls + val fin = new FinCls + + val eqs = new Eqs + + val cc = CC(1) + val scc: CC = new SubCC(1) + + val str = "kno" + val opt = Option(1) + val lst = List(1) + val set = Set(1) + val map = collection.mutable.Map(1 -> 1) + + // primitives: 1, true, () + val int = 1 + val boo = true + val uni = () + // null + + val intB = Integer.valueOf(1) + val booB = java.lang.Boolean.TRUE + val uniB = scala.runtime.BoxedUnit.UNIT + + // fresh: `new Cls`, () => 1 + + + // obj + locally { + // obj == any, references: doesn't warn, no need to test + obj == 1 // n + obj equals 1 // n + obj == () // n + obj == intB // n + obj == new Cls // n: no warn here, obj can be anything + obj == (() => 1) // n + } + + // any: same as obj. additional warning for "bypasses cooperative equality" when using equals + // instead of ==. not extensively tested here, this test is about a different warning. + locally { + any == 1 // n + any equals 1 // w: bypass + } + + // warning for unrelated classes ("will most likely never compare equal") + locally { + cls == obj // n + cls == any // n + cls == sub // n: related + cls == fin // w: unrelated + cls == eqs // w: unrelated + cls == str // w: unrelated + cls == lst // w: unrelated + cls == Nil // w: unrelated + cls == 1 // w: unrelated + cls == intB // w: unrelated + cls == new AnyRef() // n + } + + locally { + sub == cls // n: related + sub == obj // n + sub == fin // w: unrelated + } + + locally { + fin == cls // w: unrelated + fin == 1 // w: non-sensible + fin == str // w: non-sensible + fin == lst // w: unrelated + fin == Nil // w: non-sensible (both are final, unlike the line above) + fin == new AnyRef() // w: final receiver, fresh + } + + locally { + eqs == obj // n + eqs == cls // n: unrelated but custom equality (note that inverse warns "unrelated"). TODO scala/bug#6593 + eqs == 1 // n: but inverse warns "non-sensible" + } + + locally { + cc == obj // n + cc == cls // w: non-sensible + scc == cls // w: non-sensible (wrong) + cc == 1 // w: non-sensible + } + + locally { + str == obj // n + str == cls // w: non-sensible + str == 1 // w: non-sensible + str == "" // n + str != opt // w: non-sensible + str == null // n + val cs: CharSequence = "oy" + str == cs // n + cs == str // n + } + + locally { + // Option has no `equals` override, unlike List. Some has synthetic case-equals + opt == obj // n + opt == cls // w: unrelated + opt == 1 // w: unrelated + opt == str // w: unrelated + opt == lst // w: unrelated + + val som = Some(1) + som == obj //n + som == cls // w: non-sensible + som == 1 // w: non-sensible + som == str // w: non-sensible + } + + locally { + lst == obj // n + lst == cls // w: non-sensible (collections) + lst == 1 // w: non-sensible (collections) + lst == opt // w: non-sensible (collections) + lst == str // w: non-sensible (collections) + lst == set // w: non-sensible (collections) + lst == Seq(1) // n, both are Seqs + lst == map // w: non-sensible (collections) + lst == eqs // w: non-sensible (collections) + val itr = Iterator(1) + lst == itr // w: non-sensible (collections) + lst == (itr: scala.collection.IterableOnce[Int]) // n + } + + locally { + int == obj // n + int == cls // w: non-sensible + int == "" // w: non-sensible + int == 1L // n + int == true // w: non-sensible + int == intB // n + int == booB // w: non-sensible + int == () // w: non-sensible + int == uniB // w: non-sensible + } + + locally { + null == obj // n + null == cls // n + null == int // w: non-sensible + null == intB // n + } + + locally { + intB == obj // n + intB == cls // w: non-sensible + intB == str // w: non-sensible + intB == int // n + intB == 1L // n + intB == null // n + intB == true // w: non-sensible + intB == booB // w: non-sensible + intB == () // w: non-sensible + } +} + +/* +cooperative equality + - used when `primitive == Any/AnyRef` + - primitive is boxed, so it's a java.lang.Number + - if both java.lang.Number + - maintain scala semantics for boxed: (1: Any) equals (1L: Any) is true + - if one is ScalaNumber, always use its `equals`. to support `1 == BigInt(1)` + - also special cases java.lang.Character for ('a': Any) == 97 + - no special casing for strings / CharSequence +*/ diff --git a/test/files/neg/t6595.check b/test/files/neg/t6595.check new file mode 100644 index 000000000000..56e276a09a44 --- /dev/null +++ b/test/files/neg/t6595.check @@ -0,0 +1,6 @@ +t6595.scala:5: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +class Foo extends { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/pos/t6595.scala b/test/files/neg/t6595.scala similarity index 89% rename from test/files/pos/t6595.scala rename to test/files/neg/t6595.scala index 08a2873c45f0..4cdc0f6d89a4 100644 --- a/test/files/pos/t6595.scala +++ b/test/files/neg/t6595.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings -deprecation +// import scala.annotation.switch class Foo extends { diff --git a/test/files/neg/t6597.check b/test/files/neg/t6597.check index 1d52519d1dc0..49a8e8d26a81 100644 --- a/test/files/neg/t6597.check +++ b/test/files/neg/t6597.check @@ -1,4 +1,4 @@ t6597.scala:3: error: illegal combination of modifiers: implicit and case for: class Quux implicit case class Quux(value: Int) extends AnyVal with T ^ -one error found +1 error diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check index 1410e1b11a79..742ead17c748 100644 --- a/test/files/neg/t6601.check +++ b/test/files/neg/t6601.check @@ -1,4 +1,4 @@ -AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor +AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor from class AccessPrivateConstructor new PrivateConstructor("") // Scalac should forbid accessing to the private constructor! ^ -one error found +1 error diff --git a/test/files/neg/t663.check b/test/files/neg/t663.check index 633e27ee1243..659d82a6e2f0 100644 --- a/test/files/neg/t663.check +++ b/test/files/neg/t663.check @@ -1,7 +1,7 @@ t663.scala:11: error: name clash between defined and inherited member: def asMatch(node: Test.this.Matchable): Any in trait MatchableImpl and def asMatch(m: Test.this.Node): Any at line 11 -have same type after erasure: (node: test.Test#NodeImpl)Object +have same type after erasure: (node: test.Test#NodeImpl): Object def asMatch(m : Node) : Any = { ^ -one error found +1 error diff --git a/test/files/neg/t6631.check b/test/files/neg/t6631.check new file mode 100644 index 000000000000..4fdacd1c97b1 --- /dev/null +++ b/test/files/neg/t6631.check @@ -0,0 +1,4 @@ +t6631.scala:2: error: invalid escape '\x' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 0 in "\x". Use \\ for literal \. + s"""\x""" + ^ +1 error diff --git a/test/files/neg/t6631.scala b/test/files/neg/t6631.scala new file mode 100644 index 000000000000..496e5b15088e --- /dev/null +++ b/test/files/neg/t6631.scala @@ -0,0 +1,3 @@ +class C { + s"""\x""" +} diff --git a/test/files/neg/t664.check b/test/files/neg/t664.check index cbdf53daea4b..ba6ec1e29892 100644 --- a/test/files/neg/t664.check +++ b/test/files/neg/t664.check @@ -4,4 +4,4 @@ t664.scala:4: error: type Foo is not a member of test.Test t664.scala:5: error: type Bar is not a member of AnyRef trait Bar extends super.Bar; ^ -two errors found +2 errors diff --git a/test/files/neg/t6663.check b/test/files/neg/t6663.check index aa4faa4a46d2..419e22a97f37 100644 --- a/test/files/neg/t6663.check +++ b/test/files/neg/t6663.check @@ -3,4 +3,4 @@ t6663.scala:16: error: type mismatch; required: Int var v = new C(42).foo[String].get :Int ^ -one error found +1 error diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check index 7d9c1497098b..4a2510d8d1b8 100644 --- a/test/files/neg/t6666.check +++ b/test/files/neg/t6666.check @@ -1,37 +1,37 @@ -t6666.scala:24: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1 +t6666.scala:25: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1 F.byname(x) ^ -t6666.scala:31: error: Implementation restriction: access of lazy value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2 +t6666.scala:32: error: Implementation restriction: access of lazy value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2 F.byname(x) ^ -t6666.scala:38: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3 +t6666.scala:39: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3 F.hof(() => x) ^ -t6666.scala:51: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1 +t6666.scala:52: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1 F.byname(x) ^ -t6666.scala:55: error: Implementation restriction: access of lazy value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2 +t6666.scala:56: error: Implementation restriction: access of lazy value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2 F.byname(x) ^ -t6666.scala:59: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3 +t6666.scala:60: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3 F.hof(() => x) ^ -t6666.scala:63: error: Implementation restriction: access of method x$9 in class C4 from object Nested$1, would require illegal premature access to the unconstructed `this` of class C4 +t6666.scala:64: error: Implementation restriction: access of method x$9 in class C4 from object Nested$1, would require illegal premature access to the unconstructed `this` of class C4 object Nested { def xx = x} ^ -t6666.scala:77: error: Implementation restriction: access of method x$11 in class C11 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C11 +t6666.scala:78: error: Implementation restriction: access of method x$11 in class C11 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C11 F.byname(x) ^ -t6666.scala:96: error: Implementation restriction: access of method x$12 in class C13 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C13 +t6666.scala:97: error: Implementation restriction: access of method x$12 in class C13 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C13 F.hof(() => x) ^ -t6666.scala:105: error: Implementation restriction: access of method x$13 in class C14 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C14 +t6666.scala:106: error: Implementation restriction: access of method x$13 in class C14 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C14 object Nested { def xx = x} ^ -t6666.scala:113: error: Implementation restriction: access of method foo$1 in class COuter from class CInner$1, would require illegal premature access to the unconstructed `this` of class COuter +t6666.scala:114: error: Implementation restriction: access of method foo$1 in class COuter from class CInner$1, would require illegal premature access to the unconstructed `this` of class COuter class CInner extends C({foo}) ^ -t6666.scala:119: error: Implementation restriction: access of method x$14 in class CEarly from object Nested$5, would require illegal premature access to the unconstructed `this` of class CEarly +t6666.scala:120: error: Implementation restriction: access of method x$14 in class CEarly from object Nested$5, would require illegal premature access to the unconstructed `this` of class CEarly object Nested { def xx = x} ^ -12 errors found +12 errors diff --git a/test/files/neg/t6666.scala b/test/files/neg/t6666.scala index 86c0b8299228..cf3d0010c96c 100644 --- a/test/files/neg/t6666.scala +++ b/test/files/neg/t6666.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline +// class C(a: Any) object F { def byname(a: => Any) = println(a) diff --git a/test/files/neg/t6666b.check b/test/files/neg/t6666b.check index 21f3947c0fae..c469782dae0e 100644 --- a/test/files/neg/t6666b.check +++ b/test/files/neg/t6666b.check @@ -4,4 +4,4 @@ t6666b.scala:11: error: Implementation restriction: access of method x$1 in clas t6666b.scala:22: error: Implementation restriction: access of method x$2 in class C15 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C15 object Nested { def xx = x} ^ -two errors found +2 errors diff --git a/test/files/neg/t6666c.check b/test/files/neg/t6666c.check index 69df9a876758..e060cf1156a0 100644 --- a/test/files/neg/t6666c.check +++ b/test/files/neg/t6666c.check @@ -1,10 +1,10 @@ -t6666c.scala:3: error: Implementation restriction: access of method x$1 in class D from object X$1, would require illegal premature access to the unconstructed `this` of class D +t6666c.scala:4: error: Implementation restriction: access of method x$1 in class D from object X$1, would require illegal premature access to the unconstructed `this` of class D class D extends C({def x = 0; object X { x }}) ^ -t6666c.scala:6: error: Implementation restriction: access of method x$2 in class D1 from object X$3, would require illegal premature access to the unconstructed `this` of class D1 +t6666c.scala:7: error: Implementation restriction: access of method x$2 in class D1 from object X$3, would require illegal premature access to the unconstructed `this` of class D1 class D1 extends C1({def x = 0; () => {object X { x }}}) ^ -t6666c.scala:9: error: Implementation restriction: access of method x$3 from object X$5, would require illegal premature access to the unconstructed `this` of <$anon: Function0> +t6666c.scala:10: error: Implementation restriction: access of method x$3 from object X$5, would require illegal premature access to the unconstructed `this` of <$anon: Function0> class D2 extends C2({def x = 0; object X { x }}) ^ -three errors found +3 errors diff --git a/test/files/neg/t6666c.scala b/test/files/neg/t6666c.scala index 90f26565ba4c..7a8ec5e23b36 100644 --- a/test/files/neg/t6666c.scala +++ b/test/files/neg/t6666c.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline +// class C(a: Any) class D extends C({def x = 0; object X { x }}) diff --git a/test/files/neg/t6666e.check b/test/files/neg/t6666e.check index 3189612314a1..5b1a3177aa22 100644 --- a/test/files/neg/t6666e.check +++ b/test/files/neg/t6666e.check @@ -1,4 +1,4 @@ t6666e.scala:8: error: Implementation restriction: <$anon: Nothing => Unit> requires premature access to class Crash. this(Nil.collect{case x =>}) ^ -one error found +1 error diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check index 43313fa4fe29..cfc38701fccc 100644 --- a/test/files/neg/t6667.check +++ b/test/files/neg/t6667.check @@ -1,13 +1,17 @@ t6667.scala:8: error: ambiguous implicit values: - both value inScope1 in object Test of type => C - and value inScope2 in object Test of type => C + both value inScope1 in object Test of type C + and value inScope2 in object Test of type C match expected type C implicitly[C]: Unit // C.companion was used; whereas the ambiguity should abort the implicit search. ^ t6667.scala:9: error: ambiguous implicit values: - both value inScope1 in object Test of type => C - and value inScope2 in object Test of type => C + both value inScope1 in object Test of type C + and value inScope2 in object Test of type C match expected type C implicitly[C] // ambiguity reported, rather than falling back to C.companion ^ -two errors found +t6667.scala:3: warning: Implicit definition should have explicit type (inferred C) [quickfixable] + implicit def companion = new C + ^ +1 warning +2 errors diff --git a/test/files/neg/t6667.scala b/test/files/neg/t6667.scala index fb857ebd3322..840921f3449d 100644 --- a/test/files/neg/t6667.scala +++ b/test/files/neg/t6667.scala @@ -4,7 +4,7 @@ object C { } object Test { - implicit val inScope1, inScope2 = new C + implicit val inScope1, inScope2: C = new C implicitly[C]: Unit // C.companion was used; whereas the ambiguity should abort the implicit search. implicitly[C] // ambiguity reported, rather than falling back to C.companion } diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check index 99cea9a47cc7..3cc5d72c8191 100644 --- a/test/files/neg/t6667b.check +++ b/test/files/neg/t6667b.check @@ -1,13 +1,13 @@ t6667b.scala:16: error: ambiguous implicit values: - both value a in object Test of type => Test.Box + both value a in object Test of type Test.Box and value b of type Test.Box match expected type Test.Box new Test() ^ t6667b.scala:19: error: ambiguous implicit values: - both value a in object Test of type => Test.Box + both value a in object Test of type Test.Box and value b of type Test.Box match expected type Test.Box new Test() ^ -two errors found +2 errors diff --git a/test/files/neg/t6667b.scala b/test/files/neg/t6667b.scala index 4e64e1af176b..db31c0a9be69 100644 --- a/test/files/neg/t6667b.scala +++ b/test/files/neg/t6667b.scala @@ -7,7 +7,7 @@ object Test { val value= 1 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { implicit val b: Box= new Box { val value= 2 } diff --git a/test/files/neg/t667.check b/test/files/neg/t667.check index e68c6dea00ad..43313dd7e682 100644 --- a/test/files/neg/t667.check +++ b/test/files/neg/t667.check @@ -1,4 +1,4 @@ t667.scala:8: error: illegal cyclic reference involving class Ni class Ni extends super.Ni with Ni; ^ -one error found +1 error diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check index 714961075f5d..dad3f61cf159 100644 --- a/test/files/neg/t6675.check +++ b/test/files/neg/t6675.check @@ -1,6 +1,6 @@ -t6675.scala:11: warning: deprecated adaptation: object X expects 3 patterns to hold (Int, Int, Int) but crushing into 3-tuple to fit single pattern (scala/bug#6675) - "" match { case X(b) => b } // should warn under -Xlint. Not an error because of scala/bug#6111 +t6675.scala:12: warning: deprecated adaptation: object X expects 3 patterns to hold (Int, Int, Int) but crushing into 3-tuple to fit single pattern (scala/bug#6675) + "" match { case X(b) => b case x => throw new MatchError(x) } // should warn under -Xlint. Not an error because of scala/bug#6111 ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t6675.scala b/test/files/neg/t6675.scala index 0b9f86e48b6a..7cc97f36cdef 100644 --- a/test/files/neg/t6675.scala +++ b/test/files/neg/t6675.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// object X { def unapply(s: String): Option[(Int,Int,Int)] = Some((1,2,3)) } @@ -8,7 +9,7 @@ object Y { } object Test { - "" match { case X(b) => b } // should warn under -Xlint. Not an error because of scala/bug#6111 + "" match { case X(b) => b case x => throw new MatchError(x) } // should warn under -Xlint. Not an error because of scala/bug#6111 - "" match { case Y(b) => b } // no warning + "" match { case Y(b) => b case x => throw new MatchError(x) } // no warning } diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check index c78d8edb1b14..e6dc64965367 100644 --- a/test/files/neg/t6675b.check +++ b/test/files/neg/t6675b.check @@ -1,37 +1,37 @@ -t6675b.scala:20: error: constructor cannot be instantiated to expected type; +t6675b.scala:21: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (Int, Int) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ -t6675b.scala:27: error: constructor cannot be instantiated to expected type; +t6675b.scala:28: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (?A1, ?A2) where type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ -t6675b.scala:33: error: constructor cannot be instantiated to expected type; +t6675b.scala:34: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: ((Int, Int), (Int, Int)) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a } // fail ^ -t6675b.scala:40: error: constructor cannot be instantiated to expected type; +t6675b.scala:41: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail ^ -t6675b.scala:18: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) +t6675b.scala:19: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn ^ -t6675b.scala:25: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) +t6675b.scala:26: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn ^ -t6675b.scala:31: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) +t6675b.scala:32: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn ^ -t6675b.scala:37: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) +t6675b.scala:38: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f1[A](x: A) = (Left(x): Either[A, A]) match { case NativelyTwo(a) => a } // warn ^ -t6675b.scala:38: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) +t6675b.scala:39: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a } // warn ^ -5 warnings found -four errors found +5 warnings +4 errors diff --git a/test/files/neg/t6675b.scala b/test/files/neg/t6675b.scala index 475bb8a2ece1..2f494e08b539 100644 --- a/test/files/neg/t6675b.scala +++ b/test/files/neg/t6675b.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xlint +//> using options -deprecation -Xlint +// object LeftOrRight { def unapply[A](value: Either[A, A]): Option[A] = value match { case scala.Left(x) => Some(x) diff --git a/test/files/neg/t668.check b/test/files/neg/t668.check index b057ca793601..6815a6bd9604 100644 --- a/test/files/neg/t668.check +++ b/test/files/neg/t668.check @@ -1,4 +1,4 @@ t668.scala:1: error: type Iterable takes type parameters class Test extends Iterable ^ -one error found +1 error diff --git a/test/files/neg/t6680a.check b/test/files/neg/t6680a.check deleted file mode 100644 index 0b28ddff344a..000000000000 --- a/test/files/neg/t6680a.check +++ /dev/null @@ -1,11 +0,0 @@ -t6680a.scala:11: error: type mismatch; - found : String("abc") - required: A - y.x = "abc" - ^ -t6680a.scala:18: error: type mismatch; - found : String("") - required: A - case class C[A](f:A=>A);def f(x:Any)=x match { case C(f)=>f("") };f(C[Int](x=>x)) - ^ -two errors found diff --git a/test/files/neg/t6680a.scala b/test/files/neg/t6680a.scala deleted file mode 100644 index 5a6ed86136ce..000000000000 --- a/test/files/neg/t6680a.scala +++ /dev/null @@ -1,19 +0,0 @@ -// scalac: -Xstrict-inference -case class Cell[A](var x: A) -object Test { - def f1(x: Any) = x match { case y @ Cell(_) => y } // Inferred type is Cell[Any] - def f2(x: Cell[_]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[_] - def f3[A](x: Cell[A]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[A] - - def main(args: Array[String]): Unit = { - val x = new Cell(1) - val y = f1(x) - y.x = "abc" - println(x.x + 1) - } -} - -// The tweetable variation -object Tweet { - case class C[A](f:A=>A);def f(x:Any)=x match { case C(f)=>f("") };f(C[Int](x=>x)) -} diff --git a/test/files/neg/t6714.check b/test/files/neg/t6714.check index eb8b6251d44c..d23f8b60c23f 100644 --- a/test/files/neg/t6714.check +++ b/test/files/neg/t6714.check @@ -1,7 +1,7 @@ -t6714.scala:19: error: Unexpected tree during assignment conversion. +t6714.scala:21: error: Unexpected tree during assignment conversion. a(1)(9000) += 20 // Not OK ^ -t6714.scala:21: error: Unexpected tree during assignment conversion. +t6714.scala:23: error: Unexpected tree during assignment conversion. b(1)(5) += 20 // Not OK ^ -two errors found +2 errors diff --git a/test/files/neg/t6714.scala b/test/files/neg/t6714.scala index 2cb3414e2f08..9e9e41d51ecd 100644 --- a/test/files/neg/t6714.scala +++ b/test/files/neg/t6714.scala @@ -1,3 +1,5 @@ +// +// case class A(a: Int, index: Int) { def apply(i: Int)(implicit ev: Int): A = new A(ev, i) diff --git a/test/files/neg/t6728.check b/test/files/neg/t6728.check index d853d6f724da..7d1692e36189 100644 --- a/test/files/neg/t6728.check +++ b/test/files/neg/t6728.check @@ -1,4 +1,4 @@ t6728.scala:4: error: '(' expected but '}' found. } ^ -one error found +1 error diff --git a/test/files/neg/t6758.check b/test/files/neg/t6758.check index 2cdd6b8ae53c..ea5fb20a52e7 100644 --- a/test/files/neg/t6758.check +++ b/test/files/neg/t6758.check @@ -25,4 +25,4 @@ t6758.scala:38: error: not found: type typeparam t6758.scala:41: error: not found: type valueparam @valueparam x: Any ^ -9 errors found +9 errors diff --git a/test/files/neg/t6758.scala b/test/files/neg/t6758.scala index acf333bf9059..a466f0f3ca09 100644 --- a/test/files/neg/t6758.scala +++ b/test/files/neg/t6758.scala @@ -7,13 +7,13 @@ class AnnotNotFound { foo } - () => { + def f = () => { @infunction def foo = 0 () } - () => { + def g = () => { val bar: Int = { @nested val bar2: Int = 2 diff --git a/test/files/neg/t677.check b/test/files/neg/t677.check index 122830a98fa2..000f1259bdad 100644 --- a/test/files/neg/t677.check +++ b/test/files/neg/t677.check @@ -3,4 +3,4 @@ t677.scala:2: error: type mismatch; required: Nothing val zx: Nothing = {() => 4} ^ -one error found +1 error diff --git a/test/files/neg/t6771b.check b/test/files/neg/t6771b.check index 0c9fae533e28..4b0aff5cf740 100644 --- a/test/files/neg/t6771b.check +++ b/test/files/neg/t6771b.check @@ -3,4 +3,4 @@ t6771b.scala:12: error: type mismatch; required: Test.a.type b = b match { case x => x } ^ -one error found +1 error diff --git a/test/files/neg/t6788.check b/test/files/neg/t6788.check index 96a6f8b6016e..4e04cc8be1b1 100644 --- a/test/files/neg/t6788.check +++ b/test/files/neg/t6788.check @@ -2,4 +2,4 @@ t6788.scala:6: error: not found: value foo Error occurred in an application involving default arguments. s.copy(b = foo) ^ -one error found +1 error diff --git a/test/files/neg/t6795.check b/test/files/neg/t6795.check index 88ef3e9a526d..073d1a0a7110 100644 --- a/test/files/neg/t6795.check +++ b/test/files/neg/t6795.check @@ -1,4 +1,4 @@ -t6795.scala:3: error: `abstract override' modifier not allowed for type members +t6795.scala:3: error: `abstract override` modifier not allowed for type members trait T1 extends T { abstract override type U = Int } ^ -one error found +1 error diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check index b9a362666796..1345a0ca7918 100644 --- a/test/files/neg/t6810.check +++ b/test/files/neg/t6810.check @@ -25,4 +25,4 @@ t6810.scala:27: error: unclosed character literal t6810.scala:25: error: '=' expected. val a = '\u000D' // similar treatment of CR ^ -9 errors found +9 errors diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala index e7a1f032bf72..c01db614809f 100644 --- a/test/files/neg/t6810.scala +++ b/test/files/neg/t6810.scala @@ -15,7 +15,7 @@ trait t6810 { val B = s""" """ // or the same for interpolated strings - import scala.compat.Platform.EOL + import System.{lineSeparator => EOL} val `\u000A` = EOL // backquoted identifiers are arbitrary string literals val ` ` = EOL // not raw string literals aka triple-quoted, multiline strings diff --git a/test/files/neg/t6815.check b/test/files/neg/t6815.check index fae3819be115..5b3944fb7b9c 100644 --- a/test/files/neg/t6815.check +++ b/test/files/neg/t6815.check @@ -2,4 +2,4 @@ t6815.scala:15: error: stable identifier required, but Test.this.u.emptyValDef f Note that value emptyValDef is not stable because its type, Test.u.ValDef, is volatile. case _: u.emptyValDef.T => // and, unlike in pos/t6185.scala, we shouldn't allow this. ^ -one error found +1 error diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check index 5ccd531be194..72a60b3e8b27 100644 --- a/test/files/neg/t6829.check +++ b/test/files/neg/t6829.check @@ -53,4 +53,4 @@ t6829.scala:53: error: not found: value currentHistory Error occurred in an application involving default arguments. copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory) ^ -13 errors found +13 errors diff --git a/test/files/neg/t6844.check b/test/files/neg/t6844.check index 1fc24855205b..884ebf3279ec 100644 --- a/test/files/neg/t6844.check +++ b/test/files/neg/t6844.check @@ -3,4 +3,4 @@ t6844.scala:4: error: type mismatch; required: reflect.runtime.universe.Tree q"def foo($x)" ^ -one error found +1 error diff --git a/test/files/neg/t6889.check b/test/files/neg/t6889.check index c14c3b09c087..c5841d767581 100644 --- a/test/files/neg/t6889.check +++ b/test/files/neg/t6889.check @@ -7,4 +7,4 @@ t6889.scala:17: error: the result type of an implicit conversion must be more sp t6889.scala:18: error: an expression of type Null is ineligible for implicit conversion var x: Int = null // fail - no conversion from Null ^ -three errors found +3 errors diff --git a/test/files/neg/t6895.check b/test/files/neg/t6895.check deleted file mode 100644 index df01031fff46..000000000000 --- a/test/files/neg/t6895.check +++ /dev/null @@ -1,6 +0,0 @@ -t6895.scala:19: error: polymorphic expression cannot be instantiated to expected type; - found : [F3[F3_P]]Foo[F3] - required: Foo[[X3]Bar[[X1]String]] - val nok: Foo[({type L[X3] = Bar[M]})#L] = barFoo /* Type inference can't unify F with L */ - ^ -one error found diff --git a/test/files/neg/t6895.scala b/test/files/neg/t6895.scala deleted file mode 100644 index 5fb20d8c619c..000000000000 --- a/test/files/neg/t6895.scala +++ /dev/null @@ -1,26 +0,0 @@ -trait Foo[F1[F1_P]] -trait Bar[F2[F2_P]] - -class Test { - def barFoo[F3[F3_P]]: Foo[F3] = ??? - - // Now we can define a couple of type aliases: - type M[X1] = String - type N[X2] = Bar[M] - - // val ok1: Foo[N] = barFoo - // Foo[?F3] <:< Foo[Test.this.N] - // [X2]Test.this.N[X2] <:< [F3_P]?F3[F3_P] - // Test.this.N[X2] <:< ?F3[X2] - // true, ?F3=N - - // val ok2: Foo[({type L[X] = Bar[M]})#L] = barFoo[N] - - val nok: Foo[({type L[X3] = Bar[M]})#L] = barFoo /* Type inference can't unify F with L */ - // Foo[?F3] <:< Foo[[X3]Bar[[X1]String]] - // [X3]Bar[[X1]String] <:< ?F3 - // [X3]Bar[[X1]String] <:< [F3_P]?F3[F3_P] - // Bar[[X1]String] <:< ?F3[X3] - // X3 <:< [X1]String - // false -} diff --git a/test/files/neg/t6895b.check b/test/files/neg/t6895b.check deleted file mode 100644 index 565925127b18..000000000000 --- a/test/files/neg/t6895b.check +++ /dev/null @@ -1,9 +0,0 @@ -t6895b.scala:20: error: could not find implicit value for parameter e: Foo[[X]Bar[[X]Or[String,X],X]] - implicitly[Foo[({type L[X] = Bar[StringOr, X]})#L]] - ^ -t6895b.scala:23: error: polymorphic expression cannot be instantiated to expected type; - found : [F[_]]Foo[[X(in type L)]Bar[F,X(in type L)]] - required: Foo[[X(in type L)]Bar[[X]Or[String,X],X(in type L)]] - barFoo(null) : Foo[({type L[X] = Bar[StringOr, X]})#L] - ^ -two errors found diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check index 82173360ba53..dabceb8df834 100644 --- a/test/files/neg/t6902.check +++ b/test/files/neg/t6902.check @@ -1,12 +1,12 @@ -t6902.scala:5: warning: unreachable code +t6902.scala:6: warning: unreachable code case Some(b) => 3 // no warning was emitted ^ -t6902.scala:10: warning: unreachable code +t6902.scala:11: warning: unreachable code case Some(b) => 3 // no warning was emitted ^ -t6902.scala:22: warning: unreachable code +t6902.scala:23: warning: unreachable code case 1 => 3 // crash ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t6902.scala b/test/files/neg/t6902.scala index 381512757231..680e6108481c 100644 --- a/test/files/neg/t6902.scala +++ b/test/files/neg/t6902.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { Some(Some(1)) collect { case Some(a) => 2 diff --git a/test/files/neg/t691.check b/test/files/neg/t691.check index 77ff7b1d0234..ed872a85e20a 100644 --- a/test/files/neg/t691.check +++ b/test/files/neg/t691.check @@ -1,4 +1,4 @@ t691.scala:27: error: ambiguous parent class qualifier trait TiC extends super[Arrow].Ti2 with super[AssignArrow].Ti1; ^ -one error found +1 error diff --git a/test/files/neg/t6912.check b/test/files/neg/t6912.check index 137b65170555..a8f66352f0b9 100644 --- a/test/files/neg/t6912.check +++ b/test/files/neg/t6912.check @@ -1,4 +1,4 @@ t6912.scala:8: error: not found: type Xxxx def test[T]: Xxxx = Foo1[T] ^ -one error found +1 error diff --git a/test/files/neg/t692.check b/test/files/neg/t692.check index 0ca99717d69c..db773fb93646 100644 --- a/test/files/neg/t692.check +++ b/test/files/neg/t692.check @@ -16,4 +16,8 @@ t692.scala:14: error: class Foo takes type parameters t692.scala:19: error: class Foo takes type parameters class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo; ^ -6 errors found +t692.scala:11: warning: Implicit definition should have explicit type (inferred test3.this.FooType) [quickfixable] + implicit def typeOfFoo = FooType(); + ^ +1 warning +6 errors diff --git a/test/files/neg/t6920.check b/test/files/neg/t6920.check index d10abff03cc0..c661429b4881 100644 --- a/test/files/neg/t6920.check +++ b/test/files/neg/t6920.check @@ -1,6 +1,6 @@ -t6920.scala:9: error: too many arguments (2) for method applyDynamicNamed: (values: Seq[(String, Any)])String +t6920.scala:9: error: too many arguments (found 2, expected 1) for method applyDynamicNamed: (values: Seq[(String, Any)]): String error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2)) possible cause: maybe a wrong Dynamic method signature? test.crushTheCompiler(a = 1, b = 2) ^ -one error found +1 error diff --git a/test/files/neg/t6920.scala b/test/files/neg/t6920.scala index b79d641698bb..25dc7b3b6bfd 100644 --- a/test/files/neg/t6920.scala +++ b/test/files/neg/t6920.scala @@ -7,4 +7,4 @@ class DynTest extends Dynamic { class CompilerError { val test = new DynTest test.crushTheCompiler(a = 1, b = 2) -} \ No newline at end of file +} diff --git a/test/files/neg/t6928.check b/test/files/neg/t6928.check index 28b8e382dcfb..23b8ac80c364 100644 --- a/test/files/neg/t6928.check +++ b/test/files/neg/t6928.check @@ -4,4 +4,4 @@ object B extends A(B) t6928.scala:3: error: super constructor cannot be passed a self reference unless parameter is declared by-name object C extends A(null, null, C) ^ -two errors found +2 errors diff --git a/test/files/neg/t693.check b/test/files/neg/t693.check index 25bd14150108..7bfeb4311adb 100644 --- a/test/files/neg/t693.check +++ b/test/files/neg/t693.check @@ -1,4 +1,4 @@ t693.scala:4: error: x is already defined as value x val x : Int = 10; ^ -one error found +1 error diff --git a/test/files/neg/t6931.check b/test/files/neg/t6931.check index 7cf804a93632..fb60874896f7 100644 --- a/test/files/neg/t6931.check +++ b/test/files/neg/t6931.check @@ -7,4 +7,4 @@ Test_2.scala:3: error: 2 Test_2.scala:3: error: 3 err"123" ^ -three errors found +3 errors diff --git a/test/files/neg/t6931/Macros_1.scala b/test/files/neg/t6931/Macros_1.scala index 56da075d1f03..9e167e61db74 100644 --- a/test/files/neg/t6931/Macros_1.scala +++ b/test/files/neg/t6931/Macros_1.scala @@ -12,4 +12,4 @@ object Macros { for (i <- 1 to 3) c.error(arg.pos.withPoint(arg.pos.point + i - 1), i.toString) q"()" } -} \ No newline at end of file +} diff --git a/test/files/neg/t6931/Test_2.scala b/test/files/neg/t6931/Test_2.scala index 6a6f64590408..dbd7fc2605dc 100644 --- a/test/files/neg/t6931/Test_2.scala +++ b/test/files/neg/t6931/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { import Macros._ err"123" -} \ No newline at end of file +} diff --git a/test/files/neg/t6934.check b/test/files/neg/t6934.check index 6ec2ebdbfd05..14e24b428215 100644 --- a/test/files/neg/t6934.check +++ b/test/files/neg/t6934.check @@ -1,7 +1,7 @@ -ScalaMain.scala:6: error: variable STATIC_PROTECTED_FIELD in class JavaClass cannot be accessed in object test.JavaClass +ScalaMain.scala:6: error: variable STATIC_PROTECTED_FIELD in class JavaClass cannot be accessed as a member of object test.JavaClass from object ScalaMain in package test2 Access to protected variable STATIC_PROTECTED_FIELD not permitted because enclosing object ScalaMain in package test2 is not a subclass of class JavaClass in package test where target is defined val a = test.JavaClass.STATIC_PROTECTED_FIELD ^ -one error found +1 error diff --git a/test/files/neg/t6934/ScalaClass.scala b/test/files/neg/t6934/ScalaClass.scala index 1ecd3303651a..cac6034d75d0 100644 --- a/test/files/neg/t6934/ScalaClass.scala +++ b/test/files/neg/t6934/ScalaClass.scala @@ -3,4 +3,4 @@ package test class ScalaClass { /* double-checking that we can still do this */ def hmm = JavaClass.STATIC_PROTECTED_FIELD -} \ No newline at end of file +} diff --git a/test/files/neg/t6934/ScalaMain.scala b/test/files/neg/t6934/ScalaMain.scala index 8b660dcf13ad..b9283ef0205b 100644 --- a/test/files/neg/t6934/ScalaMain.scala +++ b/test/files/neg/t6934/ScalaMain.scala @@ -2,8 +2,8 @@ package test2 object ScalaMain { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val a = test.JavaClass.STATIC_PROTECTED_FIELD } -} \ No newline at end of file +} diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check index acee0e7d60fd..01026e711dd7 100644 --- a/test/files/neg/t6952.check +++ b/test/files/neg/t6952.check @@ -10,4 +10,4 @@ t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled by making the implicit value scala.language.dynamics visible. trait C extends A with Dynamic ^ -two errors found +2 errors diff --git a/test/files/neg/t696.check b/test/files/neg/t696.check index b7bc5cdf98d5..c2e263e9af4e 100644 --- a/test/files/neg/t696.check +++ b/test/files/neg/t696.check @@ -6,4 +6,4 @@ t696.scala:6: error: diverging implicit expansion for type TypeUtil0.Type[X] starting with method WithType in object TypeUtil0 def foo[X]() = as[X](null) ^ -two errors found +2 errors diff --git a/test/files/neg/t6962.check b/test/files/neg/t6962.check new file mode 100644 index 000000000000..c435c9fa5c33 --- /dev/null +++ b/test/files/neg/t6962.check @@ -0,0 +1,4 @@ +t6962.scala:5: error: Bar is not an enclosing class + protected[Bar] def crashes(withDefaultParam: Boolean = true): Int = 42 + ^ +1 error diff --git a/test/files/neg/t6962.scala b/test/files/neg/t6962.scala new file mode 100644 index 000000000000..4747944d5980 --- /dev/null +++ b/test/files/neg/t6962.scala @@ -0,0 +1,6 @@ +trait t6962 { + def sth() = { + crashes() + } + protected[Bar] def crashes(withDefaultParam: Boolean = true): Int = 42 +} diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check deleted file mode 100644 index 8042116140fa..000000000000 --- a/test/files/neg/t6963a.check +++ /dev/null @@ -1,7 +0,0 @@ -t6963a.scala:5: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: -The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse. - List(1,2,3,4,5).scanRight(0)(_+_) - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/t6963a.scala b/test/files/neg/t6963a.scala deleted file mode 100644 index db41f6d96b23..000000000000 --- a/test/files/neg/t6963a.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Xfatal-warnings -Xmigration:2.7 -object Test { - import scala.collection.mutable._ - - List(1,2,3,4,5).scanRight(0)(_+_) -} diff --git a/test/files/neg/t6988.check b/test/files/neg/t6988.check index acb7b3cb082d..cda3408ee7eb 100644 --- a/test/files/neg/t6988.check +++ b/test/files/neg/t6988.check @@ -4,4 +4,4 @@ t6988.scala:3: error: annotation argument needs to be a constant; found: 13.asIn t6988.scala:8: error: annotation argument needs to be a constant; found: O.SerialUID @SerialVersionUID(O.SerialUID) case class IdentifyMessage3(userName: String, user: User, code: Int) ^ -two errors found +2 errors diff --git a/test/files/neg/t700.check b/test/files/neg/t700.check index 4c0a2e5fda2d..836c4eecea8e 100644 --- a/test/files/neg/t700.check +++ b/test/files/neg/t700.check @@ -1,4 +1,4 @@ -t700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override' +t700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract` and `override` def foobar: Unit = super.foobar ^ -one error found +1 error diff --git a/test/files/neg/t7007.check b/test/files/neg/t7007.check index e22ecb9e4ef1..2a4ac43b7396 100644 --- a/test/files/neg/t7007.check +++ b/test/files/neg/t7007.check @@ -4,4 +4,4 @@ t7007.scala:5: error: Implementation restriction: <$anon: A => B> requires prema t7007.scala:5: error: Implementation restriction: <$anon: A => B> requires premature access to class Crash. def this(a: Seq[A]) = this(a.collect{ case b: B => b}, a.collect{ case b: B => b}) ^ -two errors found +2 errors diff --git a/test/files/neg/t7014.check b/test/files/neg/t7014.check index 9351079918e8..7d2e15cc397c 100644 --- a/test/files/neg/t7014.check +++ b/test/files/neg/t7014.check @@ -1,5 +1,5 @@ warning: While parsing annotations in t7014-neg.obj/t7014/ThreadSafetyLevel_1.class, could not find COMPLETELY_THREADSAFE in enum ThreadSafetyLevel_1. This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014). -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t7014/t7014_2.scala b/test/files/neg/t7014/t7014_2.scala index 62a053b9ebb5..94f8f1c05237 100644 --- a/test/files/neg/t7014/t7014_2.scala +++ b/test/files/neg/t7014/t7014_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror package t7014 import ThreadSafetyLevel_1.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed diff --git a/test/files/neg/t7020.check b/test/files/neg/t7020.check index 3116600c7999..3a8813f727f9 100644 --- a/test/files/neg/t7020.check +++ b/test/files/neg/t7020.check @@ -1,19 +1,19 @@ -t7020.scala:4: warning: match may not be exhaustive. +t7020.scala:5: warning: match may not be exhaustive. It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _) List(5) match { ^ -t7020.scala:11: warning: match may not be exhaustive. +t7020.scala:12: warning: match may not be exhaustive. It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _) List(5) match { ^ -t7020.scala:18: warning: match may not be exhaustive. +t7020.scala:19: warning: match may not be exhaustive. It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _) List(5) match { ^ -t7020.scala:25: warning: match may not be exhaustive. +t7020.scala:26: warning: match may not be exhaustive. It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _) List(5) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t7020.scala b/test/files/neg/t7020.scala index e82e396373f8..f323456a9bca 100644 --- a/test/files/neg/t7020.scala +++ b/test/files/neg/t7020.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { // warning was non-deterministic List(5) match { diff --git a/test/files/neg/t7046-2.check b/test/files/neg/t7046-2.check index b4efd8b5e98d..e9321630864e 100644 --- a/test/files/neg/t7046-2.check +++ b/test/files/neg/t7046-2.check @@ -1,3 +1,3 @@ error: knownDirectSubclasses of Foo observed before subclass Bar registered error: knownDirectSubclasses of Foo observed before subclass Baz registered -two errors found +2 errors diff --git a/test/files/neg/t7046.check b/test/files/neg/t7046.check index 689520a0aa71..fc0f9ce6cb3b 100644 --- a/test/files/neg/t7046.check +++ b/test/files/neg/t7046.check @@ -1,3 +1,3 @@ error: knownDirectSubclasses of Foo observed before subclass Local registered error: knownDirectSubclasses of Foo observed before subclass Riddle registered -two errors found +2 errors diff --git a/test/files/neg/t7052.check b/test/files/neg/t7052.check new file mode 100644 index 000000000000..6816f79bde81 --- /dev/null +++ b/test/files/neg/t7052.check @@ -0,0 +1,7 @@ +t7052.scala:9: error: name clash between defined and inherited member: +def apply(xs: Int*): Int in class A and +def apply(xs: Seq[Int]): Int at line 9 +have same type after erasure: (xs: Seq): Int + def apply(xs: Seq[Int]) = 27 + ^ +1 error diff --git a/test/files/neg/t7052.scala b/test/files/neg/t7052.scala new file mode 100644 index 000000000000..0cfad0dce678 --- /dev/null +++ b/test/files/neg/t7052.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: + */ +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + +/* method apply overrides nothing. +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + */ + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} diff --git a/test/files/neg/t7052b.check b/test/files/neg/t7052b.check new file mode 100644 index 000000000000..c45d895b65c0 --- /dev/null +++ b/test/files/neg/t7052b.check @@ -0,0 +1,6 @@ +t7052b.scala:15: error: method apply overrides nothing. +Note: the super classes of class C contain the following, non final members named apply: +def apply(xs: Int*): Int + override def apply(xs: Seq[Int]) = 17 + ^ +1 error diff --git a/test/files/neg/t7052b.scala b/test/files/neg/t7052b.scala new file mode 100644 index 000000000000..8c410e8bf0ef --- /dev/null +++ b/test/files/neg/t7052b.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + */ + +/* method apply overrides nothing. + */ +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} diff --git a/test/files/neg/t708.check b/test/files/neg/t708.check index 4983aab61368..6cc97444afde 100644 --- a/test/files/neg/t708.check +++ b/test/files/neg/t708.check @@ -1,5 +1,7 @@ -t708.scala:8: error: overriding type S in trait X with bounds <: A.this.T; - type S has incompatible type +t708.scala:8: error: incompatible type in overriding +private[trait A] type S <: A.this.T (defined in trait X); + found : Any + required: <: A.this.T override private[A] type S = Any; ^ -one error found +1 error diff --git a/test/files/neg/t7110.check b/test/files/neg/t7110.check index e7dc25f6d715..1b24cee70cb2 100644 --- a/test/files/neg/t7110.check +++ b/test/files/neg/t7110.check @@ -1,6 +1,6 @@ -t7110.scala:3: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. +t7110.scala:4: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. try { ??? } // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t7110.scala b/test/files/neg/t7110.scala index b005b4ebf216..66aa076681c0 100644 --- a/test/files/neg/t7110.scala +++ b/test/files/neg/t7110.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { try { ??? } // warn diff --git a/test/files/neg/t712.check b/test/files/neg/t712.check index 3f02b4b29457..c8931d970141 100644 --- a/test/files/neg/t712.check +++ b/test/files/neg/t712.check @@ -1,4 +1,11 @@ t712.scala:10: error: overloaded method coerce needs result type implicit def coerce(p : ParentImpl) = p.self; ^ -one error found +t712.scala:3: warning: Implicit definition should have explicit type (inferred A.this.Node) [quickfixable] + implicit def coerce(n : NodeImpl) = n.self; + ^ +t712.scala:10: warning: Implicit definition should have explicit type [quickfixable] + implicit def coerce(p : ParentImpl) = p.self; + ^ +2 warnings +1 error diff --git a/test/files/neg/t7131.check b/test/files/neg/t7131.check new file mode 100644 index 000000000000..af8a5ce7447f --- /dev/null +++ b/test/files/neg/t7131.check @@ -0,0 +1,14 @@ +t7131.scala:21: error: type mismatch; + found : Iterable[U] + required: That + Note: implicit method convertToSimpleMappable is not applicable here because it comes after the application point and it lacks an explicit result type. + x.value.map(f) + ^ +t7131.scala:28: warning: Implicit definition should have explicit type (inferred ObservableValue.TraversableMappable[T,Container]) [quickfixable] + implicit def convertToTraversableMappable[T, Container[X] <: Traversable[X]](x: ObservableValue[Container[T]]) = + ^ +t7131.scala:43: warning: Implicit definition should have explicit type (inferred ObservableValue.NestedMappable[T,Container]) [quickfixable] + implicit def convertToSimpleMappable[T, Container[X] <: ObservableValue.HasMap[X, Container]](x: ObservableValue[Container[T]]) = + ^ +2 warnings +1 error diff --git a/test/files/neg/t7131.scala b/test/files/neg/t7131.scala new file mode 100644 index 000000000000..15f8b87796fc --- /dev/null +++ b/test/files/neg/t7131.scala @@ -0,0 +1,93 @@ +import collection.generic.CanBuildFrom + +// leaving out the observable part +trait ObservableValue[-T] { + def value: T +} + +object ObservableValue { + + //this works as is + implicit class SimpleMappable[T](x: ObservableValue[T]) { + def map[U](f: T => U) = new ObservableValue[U] { + def value = f(x.value) + } + } + + class TraversableMappable[T, Container[X] <: Traversable[X]](x: ObservableValue[Container[T]]) { + + def map[U, That](f: T => U)(implicit bf: CanBuildFrom[Traversable[T], U, That]): ObservableValue[That] = new ObservableValue[That] { + def value: That = { + x.value.map(f) + } + } + + } + + //for some reason using an implicit class does not work + implicit def convertToTraversableMappable[T, Container[X] <: Traversable[X]](x: ObservableValue[Container[T]]) = + new TraversableMappable(x) + + type HasMap[T, That[_]] = { + def map[U](f: T => U): That[U] + } + + class NestedMappable[T, Container[X] <: HasMap[X, Container]](x: ObservableValue[Container[T]]) { + + def map[U](f: T => U): ObservableValue[Container[U]] = new ObservableValue[Container[U]] { + def value: Container[U] = x.value.map(f) + } + } + + //for some reason using an implicit class does not work + implicit def convertToSimpleMappable[T, Container[X] <: ObservableValue.HasMap[X, Container]](x: ObservableValue[Container[T]]) = + new NestedMappable(x) + +} + +object Main extends App { + + class TestCase extends ObservableValue[Int] { + var value: Int = 0 + } + + val x = new TestCase + + val r = x.map(_ + 1) + + println(r.value) //1 + + x.value = 42 + + println(r.value) //43 + + + class TestCase1 extends ObservableValue[Option[Int]] { + var value: Option[Int] = None + } + + val x1 = new TestCase1 + + val r1 = x1 map ((x: Int) => x + 1) + + println(r1.value) //None + + x1.value = Some(3) + + println(r1.value) //Some(4) + + class TestCase2 extends ObservableValue[List[Int]] { + var value: List[Int] = List() + } + + val y = new TestCase2 + + val q = y map (_ + 1) + + println(q.value) //List() + + y.value = List(3, 4) + + println(q.value) //List(4, 5) + +} diff --git a/test/files/neg/t715.check b/test/files/neg/t715.check index 2c01047a63cb..301e118ef0db 100644 --- a/test/files/neg/t715.check +++ b/test/files/neg/t715.check @@ -1,4 +1,4 @@ -t715.scala:12: error: method chilren in trait NodeImpl is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override' +t715.scala:12: error: method chilren in trait NodeImpl is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract` and `override` override def children = super.chilren; ^ -one error found +1 error diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check index 0b813949469b..d8c70443dddb 100644 --- a/test/files/neg/t7157.check +++ b/test/files/neg/t7157.check @@ -1,85 +1,85 @@ -Test_2.scala:5: error: no arguments allowed for nullary macro method m1_0_0: ()Unit +Test_2.scala:5: error: no arguments allowed for nullary macro method m1_0_0: (): Unit m1_0_0(1) ^ -Test_2.scala:6: error: no arguments allowed for nullary macro method m1_0_0: ()Unit +Test_2.scala:6: error: no arguments allowed for nullary macro method m1_0_0: (): Unit m1_0_0(1, 2) ^ -Test_2.scala:7: error: no arguments allowed for nullary macro method m1_0_0: ()Unit +Test_2.scala:7: error: no arguments allowed for nullary macro method m1_0_0: (): Unit m1_0_0(1, 2, 3) ^ -Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int)Unit. +Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int): Unit. Unspecified value parameter x. m1_1_1() ^ -Test_2.scala:11: error: too many arguments (2) for macro method m1_1_1: (x: Int)Unit +Test_2.scala:11: error: too many arguments (found 2, expected 1) for macro method m1_1_1: (x: Int): Unit m1_1_1(1, 2) ^ -Test_2.scala:12: error: too many arguments (3) for macro method m1_1_1: (x: Int)Unit +Test_2.scala:12: error: too many arguments (found 3, expected 1) for macro method m1_1_1: (x: Int): Unit m1_1_1(1, 2, 3) ^ -Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit. +Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int): Unit. Unspecified value parameters x, y. m1_2_2() ^ -Test_2.scala:15: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit. +Test_2.scala:15: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int): Unit. Unspecified value parameter y. m1_2_2(1) ^ -Test_2.scala:17: error: too many arguments (3) for macro method m1_2_2: (x: Int, y: Int)Unit +Test_2.scala:17: error: too many arguments (found 3, expected 2) for macro method m1_2_2: (x: Int, y: Int): Unit m1_2_2(1, 2, 3) ^ -Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*)Unit. +Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*): Unit. Unspecified value parameters x, y. m1_1_inf() ^ -Test_2.scala:29: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*)Unit. +Test_2.scala:29: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*): Unit. Unspecified value parameters x, y, z. m1_2_inf() ^ -Test_2.scala:30: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*)Unit. +Test_2.scala:30: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*): Unit. Unspecified value parameters y, z. m1_2_inf(1) ^ -Test_2.scala:35: error: no arguments allowed for nullary macro method m2_0_0: ()Unit +Test_2.scala:35: error: no arguments allowed for nullary macro method m2_0_0: (): Unit m2_0_0()(1) ^ -Test_2.scala:36: error: no arguments allowed for nullary macro method m2_0_0: ()Unit +Test_2.scala:36: error: no arguments allowed for nullary macro method m2_0_0: (): Unit m2_0_0()(1, 2) ^ -Test_2.scala:37: error: no arguments allowed for nullary macro method m2_0_0: ()Unit +Test_2.scala:37: error: no arguments allowed for nullary macro method m2_0_0: (): Unit m2_0_0()(1, 2, 3) ^ -Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int)Unit. +Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int): Unit. Unspecified value parameter x. m2_1_1()() ^ -Test_2.scala:41: error: too many arguments (2) for macro method m2_1_1: (x: Int)Unit +Test_2.scala:41: error: too many arguments (found 2, expected 1) for macro method m2_1_1: (x: Int): Unit m2_1_1()(1, 2) ^ -Test_2.scala:42: error: too many arguments (3) for macro method m2_1_1: (x: Int)Unit +Test_2.scala:42: error: too many arguments (found 3, expected 1) for macro method m2_1_1: (x: Int): Unit m2_1_1()(1, 2, 3) ^ -Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit. +Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int): Unit. Unspecified value parameters x, y. m2_2_2()() ^ -Test_2.scala:45: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit. +Test_2.scala:45: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int): Unit. Unspecified value parameter y. m2_2_2()(1) ^ -Test_2.scala:47: error: too many arguments (3) for macro method m2_2_2: (x: Int, y: Int)Unit +Test_2.scala:47: error: too many arguments (found 3, expected 2) for macro method m2_2_2: (x: Int, y: Int): Unit m2_2_2()(1, 2, 3) ^ -Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*)Unit. +Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*): Unit. Unspecified value parameters x, y. m2_1_inf()() ^ -Test_2.scala:59: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*)Unit. +Test_2.scala:59: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*): Unit. Unspecified value parameters x, y, z. m2_2_inf()() ^ -Test_2.scala:60: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*)Unit. +Test_2.scala:60: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*): Unit. Unspecified value parameters y, z. m2_2_inf()(1) ^ -24 errors found +24 errors diff --git a/test/files/neg/t7157/Impls_Macros_1.scala b/test/files/neg/t7157/Impls_Macros_1.scala index 31d4d786d060..ee1b1e4e7a13 100644 --- a/test/files/neg/t7157/Impls_Macros_1.scala +++ b/test/files/neg/t7157/Impls_Macros_1.scala @@ -5,28 +5,28 @@ object Macros { def impl1_0_0(c: Context)() = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl1_1_1(c: Context)(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl1_2_2(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } - def m1_0_0() = macro impl1_0_0 - def m1_1_1(x: Int) = macro impl1_1_1 - def m1_2_2(x: Int, y: Int) = macro impl1_2_2 + def m1_0_0(): Unit = macro impl1_0_0 + def m1_1_1(x: Int): Unit = macro impl1_1_1 + def m1_2_2(x: Int, y: Int): Unit = macro impl1_2_2 def impl1_0_inf(c: Context)(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl1_1_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl1_2_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } - def m1_0_inf(x: Int*) = macro impl1_0_inf - def m1_1_inf(x: Int, y: Int*) = macro impl1_1_inf - def m1_2_inf(x: Int, y: Int, z: Int*) = macro impl1_2_inf + def m1_0_inf(x: Int*): Unit = macro impl1_0_inf + def m1_1_inf(x: Int, y: Int*): Unit = macro impl1_1_inf + def m1_2_inf(x: Int, y: Int, z: Int*): Unit = macro impl1_2_inf def impl2_0_0(c: Context)()() = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl2_1_1(c: Context)()(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl2_2_2(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } - def m2_0_0()() = macro impl2_0_0 - def m2_1_1()(x: Int) = macro impl2_1_1 - def m2_2_2()(x: Int, y: Int) = macro impl2_2_2 + def m2_0_0()(): Unit = macro impl2_0_0 + def m2_1_1()(x: Int): Unit = macro impl2_1_1 + def m2_2_2()(x: Int, y: Int): Unit = macro impl2_2_2 def impl2_0_inf(c: Context)()(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl2_1_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } def impl2_2_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"""println("hello world")""") } - def m2_0_inf()(x: Int*) = macro impl2_0_inf - def m2_1_inf()(x: Int, y: Int*) = macro impl2_1_inf - def m2_2_inf()(x: Int, y: Int, z: Int*) = macro impl2_2_inf -} \ No newline at end of file + def m2_0_inf()(x: Int*): Unit = macro impl2_0_inf + def m2_1_inf()(x: Int, y: Int*): Unit = macro impl2_1_inf + def m2_2_inf()(x: Int, y: Int, z: Int*): Unit = macro impl2_2_inf +} diff --git a/test/files/neg/t7157/Test_2.scala b/test/files/neg/t7157/Test_2.scala index 45a60263992a..faafb02a3888 100644 --- a/test/files/neg/t7157/Test_2.scala +++ b/test/files/neg/t7157/Test_2.scala @@ -60,4 +60,4 @@ object Test extends App { m2_2_inf()(1) m2_2_inf()(1, 2) m2_2_inf()(1, 2, 3) -} \ No newline at end of file +} diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check index bb76795843f7..e4d669cf19d4 100644 --- a/test/files/neg/t7171.check +++ b/test/files/neg/t7171.check @@ -4,6 +4,6 @@ t7171.scala:4: warning: The outer reference in this type test cannot be checked t7171.scala:11: warning: The outer reference in this type test cannot be checked at run time. case _: A => true; case _ => false ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t7171.scala b/test/files/neg/t7171.scala index 443deeeac3da..aa3aaa9ef32d 100644 --- a/test/files/neg/t7171.scala +++ b/test/files/neg/t7171.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // trait T { final case class A() diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check index 7adfe6f1d374..4444bb4942ae 100644 --- a/test/files/neg/t7171b.check +++ b/test/files/neg/t7171b.check @@ -7,6 +7,6 @@ t7171b.scala:10: warning: The outer reference in this type test cannot be checke t7171b.scala:15: warning: The outer reference in this type test cannot be checked at run time. case _: A => true; case _ => false ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t7171b.scala b/test/files/neg/t7171b.scala index 4a91c9d2f78b..236603c02ce8 100644 --- a/test/files/neg/t7171b.scala +++ b/test/files/neg/t7171b.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // trait T { final case class A() diff --git a/test/files/neg/t7187-3.check b/test/files/neg/t7187-3.check new file mode 100644 index 000000000000..64304640d40d --- /dev/null +++ b/test/files/neg/t7187-3.check @@ -0,0 +1,26 @@ +t7187-3.scala:13: error: type mismatch; + found : Int + required: () => Any + val t1: () => Any = m1 // error + ^ +t7187-3.scala:15: error: type mismatch; + found : Int + required: AcciSamZero + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t7187-3.scala:16: error: type mismatch; + found : Int + required: SamZero + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t7187-3.scala:14: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write m2() to invoke method m2, or change the expected type. + val t2: () => Any = m2 // eta-expanded with lint warning + ^ +t7187-3.scala:27: error: Methods without a parameter list and by-name params can no longer be converted to functions as `m _`, write a function literal `() => m` instead [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=EtaExpand214.t7 + val t7 = m1 _ // error: eta-expanding a nullary method + ^ +1 warning +4 errors diff --git a/test/files/neg/t7187-3.scala b/test/files/neg/t7187-3.scala new file mode 100644 index 000000000000..c6291f9b96d9 --- /dev/null +++ b/test/files/neg/t7187-3.scala @@ -0,0 +1,34 @@ +//> using options -Xsource:3 -Xlint:eta-zero -Xsource-features:eta-expand-always +// +trait AcciSamZero { def apply(): Int } + +@FunctionalInterface +trait SamZero { def apply(): Int } + +class EtaExpand214 { + def m1 = 1 + def m2() = 1 + def m3(x: Int) = x + + val t1: () => Any = m1 // error + val t2: () => Any = m2 // eta-expanded with lint warning + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + val t3: Int => Any = m3 // ok + + val t4 = m1 // apply + val t5 = m2 // apply, ()-insertion + val t6 = m3 // eta-expansion in 3.0 + + val t4a: Int = t4 // ok + val t5a: Int = t5 // ok + val t6a: Int => Any = t6 // ok + + val t7 = m1 _ // error: eta-expanding a nullary method + val t8 = m2 _ + val t9 = m3 _ + + val t7a: () => Any = t7 // error: t7 is an error + val t8a: () => Any = t8 // ok + val t9a: Int => Any = t9 // ok +} diff --git a/test/files/neg/t7187-deprecation.check b/test/files/neg/t7187-deprecation.check new file mode 100644 index 000000000000..3e143bcb89f9 --- /dev/null +++ b/test/files/neg/t7187-deprecation.check @@ -0,0 +1,32 @@ +t7187-deprecation.scala:17: error: type mismatch; + found : Int + required: () => Any + val t1: () => Any = m1 // error + ^ +t7187-deprecation.scala:19: error: type mismatch; + found : Int + required: AcciSamZero + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t7187-deprecation.scala:20: error: type mismatch; + found : Int + required: SamZero + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + ^ +t7187-deprecation.scala:24: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method m2, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + val t5 = m2 // warn: apply, ()-insertion + ^ +t7187-deprecation.scala:31: error: Methods without a parameter list and by-name params can no longer be converted to functions as `m _`, write a function literal `() => m` instead [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=EtaExpand214.t7 + val t7 = m1 _ // error: eta-expanding a nullary method + ^ +t7187-deprecation.scala:40: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method boom, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + a.boom // warning: apply, ()-insertion + ^ +2 warnings +4 errors diff --git a/test/files/neg/t7187-deprecation.scala b/test/files/neg/t7187-deprecation.scala new file mode 100644 index 000000000000..6991036176ee --- /dev/null +++ b/test/files/neg/t7187-deprecation.scala @@ -0,0 +1,45 @@ +//> using options -Xsource:3 -deprecation -Werror -Xsource-features:eta-expand-always +// +trait AcciSamZero { def apply(): Int } + +@FunctionalInterface +trait SamZero { def apply(): Int } + +class A { + def boom(): Unit +} + +class EtaExpand214 { + def m1 = 1 + def m2() = 1 + def m3(x: Int) = x + + val t1: () => Any = m1 // error + val t2: () => Any = m2 // eta-expanded, only warns w/ -Xlint:eta-zero + val t2AcciSam: AcciSamZero = m2 // error, nilary methods don't eta-expand to SAM types + val t2Sam: SamZero = m2 // error, nilary methods don't eta-expand to SAM types + val t3: Int => Any = m3 // ok + + val t4 = m1 // apply + val t5 = m2 // warn: apply, ()-insertion + val t6 = m3 // eta-expansion in 3.0 + + val t4a: Int = t4 // ok + val t5a: Int = t5 // ok + val t6a: Int => Any = t6 // ok + + val t7 = m1 _ // error: eta-expanding a nullary method + val t8 = m2 _ + val t9 = m3 _ + + val t7a: () => Any = t7 // error: t7 is an error + val t8a: () => Any = t8 // ok + val t9a: Int => Any = t9 // ok + + val a = new A + a.boom // warning: apply, ()-insertion + + import scala.collection.mutable.Map + val xs = Map(1 -> "foo") + val ys = xs.clone // ok +} diff --git a/test/files/neg/t7187.check b/test/files/neg/t7187.check index 5a19b59dc2d1..518f031589e5 100644 --- a/test/files/neg/t7187.check +++ b/test/files/neg/t7187.check @@ -1,20 +1,59 @@ -t7187.scala:9: error: _ must follow method; cannot follow () => String +t7187.scala:16: error: _ must follow method; cannot follow () => String val t1f: Any = foo() _ // error: _ must follow method ^ -t7187.scala:12: error: type mismatch; +t7187.scala:19: error: type mismatch; found : String required: () => Any - val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods + val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods (nullary methods) ^ -t7187.scala:13: error: not enough arguments for method apply: (index: Int)Char in class StringOps. -Unspecified value parameter index. +t7187.scala:20: error: not enough arguments for method apply: (i: Int): Char in class StringOps. +Unspecified value parameter i. val t2b: () => Any = bar() // error: bar doesn't take arguments, so expanded to bar.apply(), which misses an argument ^ -t7187.scala:16: error: not enough arguments for method apply: (index: Int)Char in class StringOps. -Unspecified value parameter index. +t7187.scala:23: error: not enough arguments for method apply: (i: Int): Char in class StringOps. +Unspecified value parameter i. val t2e: Any = bar() _ // error: not enough arguments for method apply ^ -t7187.scala:22: error: _ must follow method; cannot follow String +t7187.scala:29: error: _ must follow method; cannot follow String val t3d: Any = baz() _ // error: _ must follow method ^ -5 errors found +t7187.scala:38: error: missing argument list for method zup in class EtaExpandZeroArg of type (x: Int): Int +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `zup _` or `zup(_)` instead of `zup`. + val t5a = zup // error in 2.13, eta-expansion in 3.0 + ^ +t7187.scala:12: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write foo() to invoke method foo, or change the expected type. + val t1b: () => Any = foo // eta-expansion, but lint warning + ^ +t7187.scala:13: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method foo, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + val t1c: () => Any = { val t = foo; t } // `()`-insertion because no expected type + ^ +t7187.scala:21: warning: Methods without a parameter list and by-name params can no longer be converted to functions as `m _`, write a function literal `() => m` instead [quickfixable] + val t2c: () => Any = bar _ // warning: eta-expanding a nullary method + ^ +t7187.scala:22: warning: Methods without a parameter list and by-name params can no longer be converted to functions as `m _`, write a function literal `() => m` instead [quickfixable] + val t2d: Any = bar _ // warning: eta-expanding a nullary method + ^ +t7187.scala:26: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write baz() to invoke method baz, or change the expected type. + val t3a: () => Any = baz // eta-expansion, but lint warning + ^ +t7187.scala:32: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write zap() to invoke method zap, or change the expected type. + val t4a: () => Any = zap // eta-expansion, but lint warning + ^ +t7187.scala:33: warning: An unapplied 0-arity method was eta-expanded (due to the expected type () => Any), rather than applied to `()`. +Write zap()() to invoke method zap, or change the expected type. + val t4b: () => Any = zap() // ditto + ^ +t7187.scala:40: warning: Eta-expansion to expected type AcciSamOne, which is not a function type but is SAM-convertible to Int => Int. +trait AcciSamOne should be annotated with `@FunctionalInterface` if eta-expansion is desired. +Avoid eta-expansion by writing the function literal `((x: Int) => zup(x))` or `zup(_)`. +This warning can be filtered with `-Wconf:cat=lint-eta-sam`. + val t5AcciSam: AcciSamOne = zup // ok, but warning + ^ +8 warnings +6 errors diff --git a/test/files/neg/t7187.scala b/test/files/neg/t7187.scala index 702a32298cc1..42ad461bf8b1 100644 --- a/test/files/neg/t7187.scala +++ b/test/files/neg/t7187.scala @@ -1,29 +1,42 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -Xlint:deprecation,eta-zero,eta-sam +// + +trait AcciSamOne { def apply(x: Int): Int } + +@FunctionalInterface +trait SamOne { def apply(x: Int): Int } + class EtaExpandZeroArg { def foo(): () => String = () => "" val t1a: () => Any = foo() // ok (obviously) - val t1b: () => Any = foo // eta-expansion (deprecated) in 2.12, `()`-insertion in 2.13 - val t1c: () => Any = { val t = foo; t } // ok, no expected type, `()`-insertion - val t1d: () => Any = foo _ // ok - val t1e: Any = foo _ // ok + val t1b: () => Any = foo // eta-expansion, but lint warning + val t1c: () => Any = { val t = foo; t } // `()`-insertion because no expected type + val t1d: () => Any = foo _ // ok, explicit eta-expansion requested + val t1e: Any = foo _ // ok, explicit eta-expansion requested val t1f: Any = foo() _ // error: _ must follow method def bar = "" - val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods + val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods (nullary methods) val t2b: () => Any = bar() // error: bar doesn't take arguments, so expanded to bar.apply(), which misses an argument - val t2c: () => Any = bar _ // ok - val t2d: Any = bar _ // ok + val t2c: () => Any = bar _ // warning: eta-expanding a nullary method + val t2d: Any = bar _ // warning: eta-expanding a nullary method val t2e: Any = bar() _ // error: not enough arguments for method apply def baz() = "" - val t3a: () => Any = baz // eta-expansion (deprecated) in 2.12, error in 2.13 + val t3a: () => Any = baz // eta-expansion, but lint warning val t3b: () => Any = baz _ // ok val t3c: Any = baz _ // ok val t3d: Any = baz() _ // error: _ must follow method def zap()() = "" - val t4a: () => Any = zap // eta-expansion (deprecated) in 2.12, error in 2.13 + val t4a: () => Any = zap // eta-expansion, but lint warning val t4b: () => Any = zap() // ditto val t4c: () => Any = zap _ // ok val t4d: () => Any = zap() _ // ok + + def zup(x: Int) = x + val t5a = zup // error in 2.13, eta-expansion in 3.0 + val t5Fun: Int => Int = zup // ok + val t5AcciSam: AcciSamOne = zup // ok, but warning + val t5Sam: SamOne = zup // ok } diff --git a/test/files/neg/t7212.check b/test/files/neg/t7212.check new file mode 100644 index 000000000000..9b1be1c6ea89 --- /dev/null +++ b/test/files/neg/t7212.check @@ -0,0 +1,16 @@ +t7212.scala:5: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Object instead of String [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=K.f +class K extends T { def f = "" } + ^ +t7212.scala:11: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Object instead of String [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=F.f +class F extends T { val f = "" } + ^ +t7212.scala:17: error: in Scala 3 (or with -Xsource-features:infer-override), the inferred type changes to Object instead of String [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=V.f +trait V extends T { var f = "" } + ^ +3 errors diff --git a/test/files/neg/t7212.scala b/test/files/neg/t7212.scala new file mode 100644 index 000000000000..ad690794a743 --- /dev/null +++ b/test/files/neg/t7212.scala @@ -0,0 +1,31 @@ + +//> using options -Xsource:3 + +trait T { def f: Object } +class K extends T { def f = "" } +object K { + val k = new K + val s: String = k.f +} + +class F extends T { val f = "" } +object F { + val f = new F + val s: String = f.f +} + +trait V extends T { var f = "" } +class W extends V +object W { + val w = new W + val s: String = w.f +} + +object refinement { + trait X { def f: Int } + trait T { def f: X } + // inferred: RefinedType(List(T, AnyRef), Nil) + // parent type: TypeRef(T) + // `=:=` is false, but `<:<` is true in both directions + class C extends T { def f = new X { def f = 1 } } +} diff --git a/test/files/neg/t7212b.check b/test/files/neg/t7212b.check new file mode 100644 index 000000000000..fe26946bb497 --- /dev/null +++ b/test/files/neg/t7212b.check @@ -0,0 +1,16 @@ +t7212b.scala:7: error: type mismatch; + found : Object + required: String + val s: String = k.f + ^ +t7212b.scala:13: error: type mismatch; + found : Object + required: String + val s: String = f.f + ^ +t7212b.scala:20: error: type mismatch; + found : Object + required: String + val s: String = w.f + ^ +3 errors diff --git a/test/files/neg/t7212b.scala b/test/files/neg/t7212b.scala new file mode 100644 index 000000000000..0dd0c9a1781b --- /dev/null +++ b/test/files/neg/t7212b.scala @@ -0,0 +1,21 @@ +//> using options -Xsource:3 -Xsource-features:infer-override + +trait T { def f: Object } +class K extends T { def f = "" } +object K { + val k = new K + val s: String = k.f +} + +class F extends T { val f = "" } +object F { + val f = new F + val s: String = f.f +} + +trait V extends T { var f = "" } +class W extends V +object W { + val w = new W + val s: String = w.f +} diff --git a/test/files/neg/t7214neg.check b/test/files/neg/t7214neg.check index 291af04578dc..02cbee3a893f 100644 --- a/test/files/neg/t7214neg.check +++ b/test/files/neg/t7214neg.check @@ -1,4 +1,4 @@ t7214neg.scala:28: error: not enough patterns for object Extractor offering Any: expected 1, found 0 case Extractor() => ^ -one error found +1 error diff --git a/test/files/neg/t7235.check b/test/files/neg/t7235.check index 357a3dfd83ea..ba344dff9428 100644 --- a/test/files/neg/t7235.check +++ b/test/files/neg/t7235.check @@ -1,4 +1,4 @@ t7235.scala:9: error: implementation restriction: cannot reify refinement type trees with non-empty bodies val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C { def x: Int } = ??? }.tree ^ -one error found +1 error diff --git a/test/files/neg/t7238.check b/test/files/neg/t7238.check index b87f83ff65ea..f728cc5894d2 100644 --- a/test/files/neg/t7238.check +++ b/test/files/neg/t7238.check @@ -3,4 +3,4 @@ t7238.scala:6: error: type mismatch; required: Seq[String] c.c()(Seq[Any](): _*) ^ -one error found +1 error diff --git a/test/files/neg/t7238.scala b/test/files/neg/t7238.scala index d42dc8d385cc..c97d3818f188 100644 --- a/test/files/neg/t7238.scala +++ b/test/files/neg/t7238.scala @@ -1,6 +1,6 @@ trait Main { trait C { - def c(x: Any = 0)(bs: String*) + def c(x: Any = 0)(bs: String*): Unit } def c: C c.c()(Seq[Any](): _*) diff --git a/test/files/neg/t7239.check b/test/files/neg/t7239.check index 80b14f8fc6a8..1b3bd684a486 100644 --- a/test/files/neg/t7239.check +++ b/test/files/neg/t7239.check @@ -1,4 +1,4 @@ t7239.scala:10: error: not found: value foBar fooBar = foBar.toInt ^ -one error found +1 error diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check index a17e710d367e..3a8a770d80e1 100644 --- a/test/files/neg/t7251.check +++ b/test/files/neg/t7251.check @@ -1,4 +1,4 @@ B_2.scala:5: error: object Outer$Triple$ is not a member of package s println( s.Outer$Triple$ ) ^ -one error found +1 error diff --git a/test/files/neg/t7259.check b/test/files/neg/t7259.check index 0ad627fc3bdf..5fbe5fb63bd7 100644 --- a/test/files/neg/t7259.check +++ b/test/files/neg/t7259.check @@ -4,4 +4,4 @@ t7259.scala:1: error: not found: type xxxxx t7259.scala:8: error: type xxxxx is not a member of package annotation @annotation.xxxxx // error: not found: type scala ^ -two errors found +2 errors diff --git a/test/files/neg/t7285.check b/test/files/neg/t7285.check index 468ee90f69b5..cf728ac20dd8 100644 --- a/test/files/neg/t7285.check +++ b/test/files/neg/t7285.check @@ -1,15 +1,15 @@ -t7285.scala:16: warning: match may not be exhaustive. +t7285.scala:17: warning: match may not be exhaustive. It would fail on the following input: (Up, Down) (d1, d2) match { ^ -t7285.scala:34: warning: match may not be exhaustive. +t7285.scala:35: warning: match may not be exhaustive. It would fail on the following input: Down (d1) match { ^ -t7285.scala:52: warning: match may not be exhaustive. +t7285.scala:53: warning: match may not be exhaustive. It would fail on the following input: (Up, Down) (d1, d2) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t7285.scala b/test/files/neg/t7285.scala index 46cd98cfc36b..9204c1ded393 100644 --- a/test/files/neg/t7285.scala +++ b/test/files/neg/t7285.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed abstract class Base @@ -12,7 +13,7 @@ object Test1 { case object Up extends Base { } - (d1: Base, d2: Base) => + val test = (d1: Base, d2: Base) => (d1, d2) match { case (Up, Up) | (Down, Down) => false case (Down, Up) => true @@ -30,7 +31,7 @@ object Test2 { case object Up extends Base { } - (d1: Base, d2: Base) => + val test = (d1: Base, d2: Base) => (d1) match { case Test2.Base.Up => false } @@ -48,7 +49,7 @@ object Test4 { } import Test4.Base._ - (d1: Base, d2: Base) => + val test = (d1: Base, d2: Base) => (d1, d2) match { case (Up, Up) | (Down, Down) => false case (Down, Test4.Base.Up) => true diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check index e4aeebbc6c1c..05dad641b93c 100644 --- a/test/files/neg/t7289.check +++ b/test/files/neg/t7289.check @@ -1,4 +1,4 @@ -t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[scala.collection.immutable.Nil.type] +t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[Nil.type] implicitly[Schtroumpf[Nil.type]] ^ -one error found +1 error diff --git a/test/files/neg/t7289.scala b/test/files/neg/t7289.scala index f4ed3daf76d8..ad9340055cd6 100644 --- a/test/files/neg/t7289.scala +++ b/test/files/neg/t7289.scala @@ -36,4 +36,4 @@ isStrictlyMoreSpecific(info1, info2) [ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing ) [ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=Int ) [ setInst] =?Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?Int ) -*/ \ No newline at end of file +*/ diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check index 31c072e969fd..ca3c0124f001 100644 --- a/test/files/neg/t7289_status_quo.check +++ b/test/files/neg/t7289_status_quo.check @@ -5,8 +5,8 @@ t7289_status_quo.scala:11: error: could not find implicit value for parameter e: implicitly[Ext[List[List[List[Int]]]]] // fails - not found ^ t7289_status_quo.scala:15: error: ambiguous implicit values: - both method f in object Test1 of type [A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Test1.Ext[A])Test1.Ext[Coll] - and value m in object Test1 of type => Test1.Ext[List[List[Int]]] + both method f in object Test1 of type [A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Test1.Ext[A]): Test1.Ext[Coll] + and value m in object Test1 of type Test1.Ext[List[List[Int]]] match expected type Test1.Ext[_ <: List[List[Int]]] implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous ^ @@ -19,4 +19,4 @@ t7289_status_quo.scala:21: error: could not find implicit value for parameter e: t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]] implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found ^ -6 errors found +6 errors diff --git a/test/files/neg/t7289_status_quo.scala b/test/files/neg/t7289_status_quo.scala index 39621429a1b5..e1f9a9f8aa3e 100644 --- a/test/files/neg/t7289_status_quo.scala +++ b/test/files/neg/t7289_status_quo.scala @@ -20,4 +20,4 @@ object Test1 { implicitly[ExtCov[List[Int]]] // fails - not found implicitly[ExtCov[List[List[Int]]]] // fails - not found implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found -} \ No newline at end of file +} diff --git a/test/files/neg/t729.check b/test/files/neg/t729.check index fb858dc09a9e..6662f4bfc147 100644 --- a/test/files/neg/t729.check +++ b/test/files/neg/t729.check @@ -3,4 +3,11 @@ t729.scala:20: error: type mismatch; required: ScalaParserAutoEdit.this.NodeImpl(in trait ScalaParserAutoEdit) val yyy : NodeImpl = link.from; ^ -one error found +t729.scala:3: warning: Implicit definition should have explicit type (inferred Parser.this.Node) [quickfixable] + implicit def coerce(n : NodeImpl) = n.self; + ^ +t729.scala:14: warning: Implicit definition should have explicit type (inferred ScalaParserAutoEdit.this.Node) [quickfixable] + implicit def coerce(node : NodeImpl) = node.self; + ^ +2 warnings +1 error diff --git a/test/files/neg/t7290.check b/test/files/neg/t7290.check index e7fd30c56ef3..812febb8e951 100644 --- a/test/files/neg/t7290.check +++ b/test/files/neg/t7290.check @@ -1,12 +1,12 @@ -t7290.scala:5: warning: Pattern contains duplicate alternatives: 0 +t7290.scala:6: warning: Pattern contains duplicate alternatives: 0 case 0 | 0 => 0 ^ -t7290.scala:6: warning: Pattern contains duplicate alternatives: 2, 3 +t7290.scala:7: warning: Pattern contains duplicate alternatives: 2, 3 case 2 | 2 | 2 | 3 | 2 | 3 => 0 ^ -t7290.scala:7: warning: Pattern contains duplicate alternatives: 4 +t7290.scala:8: warning: Pattern contains duplicate alternatives: 4 case 4 | (_ @ 4) => 0 ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t7290.scala b/test/files/neg/t7290.scala index 8bf0ae9c866d..1d3c774119af 100644 --- a/test/files/neg/t7290.scala +++ b/test/files/neg/t7290.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test extends App { val y = (0: Int) match { case 1 => 1 diff --git a/test/files/neg/t7292-deprecation.check b/test/files/neg/t7292-deprecation.check deleted file mode 100644 index 4122f3b9a9a7..000000000000 --- a/test/files/neg/t7292-deprecation.check +++ /dev/null @@ -1,12 +0,0 @@ -t7292-deprecation.scala:3: warning: Octal escape literals are deprecated, use \u0000 instead. - val chr1 = '\0' - ^ -t7292-deprecation.scala:4: warning: Octal escape literals are deprecated, use \u0053 instead. - val str1 = "abc\123456" - ^ -t7292-deprecation.scala:5: warning: Octal escape literals are deprecated, use \n instead. - val lf = '\012' - ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found diff --git a/test/files/neg/t7292-deprecation.scala b/test/files/neg/t7292-deprecation.scala deleted file mode 100644 index 9b096948d253..000000000000 --- a/test/files/neg/t7292-deprecation.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Xfatal-warnings -deprecation -object OctalEscapes { - val chr1 = '\0' - val str1 = "abc\123456" - val lf = '\012' -} diff --git a/test/files/neg/t7292-removal.check b/test/files/neg/t7292-removal.check index ce8a75a59f38..de714daf0c57 100644 --- a/test/files/neg/t7292-removal.check +++ b/test/files/neg/t7292-removal.check @@ -1,10 +1,10 @@ -t7292-removal.scala:3: error: Octal escape literals are unsupported, use \u0000 instead. +t7292-removal.scala:2: error: octal escape literals are unsupported: use \u0000 instead val chr1 = '\0' ^ -t7292-removal.scala:4: error: Octal escape literals are unsupported, use \u0053 instead. +t7292-removal.scala:3: error: octal escape literals are unsupported: use \u0053 instead val str1 = "abc\123456" ^ -t7292-removal.scala:5: error: Octal escape literals are unsupported, use \n instead. +t7292-removal.scala:4: error: octal escape literals are unsupported: use \n instead val lf = '\012' ^ -three errors found +3 errors diff --git a/test/files/neg/t7292-removal.scala b/test/files/neg/t7292-removal.scala index 6e86eb643b44..d857f0e1ecea 100644 --- a/test/files/neg/t7292-removal.scala +++ b/test/files/neg/t7292-removal.scala @@ -1,4 +1,3 @@ -// scalac: -Xfuture object OctalEscapes { val chr1 = '\0' val str1 = "abc\123456" diff --git a/test/files/neg/t7294.check b/test/files/neg/t7294.check index 1a38e2555cb4..7daf2ec6ec8b 100644 --- a/test/files/neg/t7294.check +++ b/test/files/neg/t7294.check @@ -3,4 +3,4 @@ t7294.scala:4: error: pattern type is incompatible with expected type; required: (Int, Int) (1, 2) match { case Seq() => 0; case _ => 1 } ^ -one error found +1 error diff --git a/test/files/neg/t7294.scala b/test/files/neg/t7294.scala index 335d0711245d..ed8f46749908 100644 --- a/test/files/neg/t7294.scala +++ b/test/files/neg/t7294.scala @@ -1,5 +1,5 @@ object Test { - // Treat TupleN as final under -Xfuture for the for the purposes - // of the "fruitless type test" warning. + // TupleN is final now, so we get a + // "fruitless type test" warning. (1, 2) match { case Seq() => 0; case _ => 1 } } diff --git a/test/files/neg/t7294b.check b/test/files/neg/t7294b.check index 3390cb72787f..6168eb180cba 100644 --- a/test/files/neg/t7294b.check +++ b/test/files/neg/t7294b.check @@ -1,4 +1,4 @@ t7294b.scala:1: error: illegal inheritance from final class Tuple2 class C extends Tuple2[Int, Int](0, 0) ^ -one error found +1 error diff --git a/test/files/neg/t7294b.scala b/test/files/neg/t7294b.scala index 2ab86a80584a..deeed4fc79ea 100644 --- a/test/files/neg/t7294b.scala +++ b/test/files/neg/t7294b.scala @@ -1 +1 @@ -class C extends Tuple2[Int, Int](0, 0) \ No newline at end of file +class C extends Tuple2[Int, Int](0, 0) diff --git a/test/files/neg/t7299.check b/test/files/neg/t7299.check index 74340c4841ed..fae0759421c4 100644 --- a/test/files/neg/t7299.check +++ b/test/files/neg/t7299.check @@ -1,7 +1,7 @@ -t7299.scala:4: error: implementation restricts functions to 22 parameters +t7299.scala:4: error: functions may not have more than 22 parameters; method f cannot be eta-expanded because it takes 23 arguments val eta1 = f _ ^ -t7299.scala:5: error: implementation restricts functions to 22 parameters +t7299.scala:5: error: functions may not have more than 22 parameters; method g cannot be eta-expanded because it takes 23 arguments val eta2 = g[Any] _ ^ -two errors found +2 errors diff --git a/test/files/neg/t7324.check b/test/files/neg/t7324.check deleted file mode 100644 index 586947d5e708..000000000000 --- a/test/files/neg/t7324.check +++ /dev/null @@ -1,4 +0,0 @@ -t7324.scala:2: error: Platform restriction: a parameter list's length cannot exceed 254. -class Bar( - ^ -one error found diff --git a/test/files/neg/t7324.scala b/test/files/neg/t7324.scala deleted file mode 100644 index 81d7674d6822..000000000000 --- a/test/files/neg/t7324.scala +++ /dev/null @@ -1,57 +0,0 @@ -object Bar extends App -class Bar( -_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int, -_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int, -_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int, -_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int, -_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int, -_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int, -_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int, -_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int, -_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int, -_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int, -_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int, -_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int, -_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int, -_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int, -_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int, -_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int, -_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int, -_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int, -_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int, -_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int, -_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int, -_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int, -_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int, -_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int, -_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int, -_251: Int, _252: Int, _253: Int, _254: Int, _255: Int -) - -class BarOK( -_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int, -_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int, -_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int, -_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int, -_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int, -_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int, -_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int, -_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int, -_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int, -_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int, -_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int, -_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int, -_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int, -_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int, -_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int, -_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int, -_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int, -_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int, -_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int, -_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int, -_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int, -_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int, -_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int, -_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int, -_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int, -_251: Int, _252: Int, _253: Int, _254: Int) diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check index 61c33f99b154..2e0fbbd90307 100644 --- a/test/files/neg/t7325.check +++ b/test/files/neg/t7325.check @@ -16,4 +16,4 @@ t7325.scala:19: error: conversions must follow a splice; use %% for literal %, % t7325.scala:21: error: conversions must follow a splice; use %% for literal %, %n for newline println(f"${0}%%%%%d") ^ -6 errors found +6 errors diff --git a/test/files/neg/t7325.scala b/test/files/neg/t7325.scala index adfd8dd47a0f..45a726d705a1 100644 --- a/test/files/neg/t7325.scala +++ b/test/files/neg/t7325.scala @@ -22,4 +22,4 @@ object Test extends App { println(f"${0}%n") println(f"${0}%d%n") -} \ No newline at end of file +} diff --git a/test/files/neg/t7330.check b/test/files/neg/t7330.check index b96d656d2bb6..f3c8e874502c 100644 --- a/test/files/neg/t7330.check +++ b/test/files/neg/t7330.check @@ -2,4 +2,4 @@ t7330.scala:4: error: pattern must be a value: Y[_] Note: if you intended to match against the class, try `case _: Y[_]` 0 match { case Y[_] => } ^ -one error found +1 error diff --git a/test/files/neg/t7330.scala b/test/files/neg/t7330.scala index 13a943a02bbe..2d1660fad1ec 100644 --- a/test/files/neg/t7330.scala +++ b/test/files/neg/t7330.scala @@ -2,4 +2,4 @@ class Y[T] class Test { // TypeTree is not a valid tree for a pattern 0 match { case Y[_] => } -} \ No newline at end of file +} diff --git a/test/files/neg/t7369.check b/test/files/neg/t7369.check index 19c2aaadb1b8..be32375aba31 100644 --- a/test/files/neg/t7369.check +++ b/test/files/neg/t7369.check @@ -1,15 +1,15 @@ -t7369.scala:7: warning: unreachable code +t7369.scala:8: warning: unreachable code case Tuple1(X) => // unreachable ^ -t7369.scala:14: warning: unreachable code +t7369.scala:15: warning: unreachable code case Tuple1(true) => // unreachable ^ -t7369.scala:32: warning: unreachable code +t7369.scala:33: warning: unreachable code case Tuple1(X) => // unreachable ^ -t7369.scala:41: warning: unreachable code +t7369.scala:42: warning: unreachable code case Tuple1(null) => // unreachable ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t7369.scala b/test/files/neg/t7369.scala index d92b4ef2dcc4..91ba91ffee3b 100644 --- a/test/files/neg/t7369.scala +++ b/test/files/neg/t7369.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { val X, Y = true (null: Tuple1[Boolean]) match { diff --git a/test/files/neg/t7385.check b/test/files/neg/t7385.check index 70d3c3fb61ae..db6781c4ea42 100644 --- a/test/files/neg/t7385.check +++ b/test/files/neg/t7385.check @@ -7,4 +7,4 @@ t7385.scala:6: error: '(' expected but identifier found. t7385.scala:7: error: illegal start of simple expression } ^ -three errors found +3 errors diff --git a/test/files/neg/t7386.check b/test/files/neg/t7386.check new file mode 100644 index 000000000000..7ec8c26bf5d0 --- /dev/null +++ b/test/files/neg/t7386.check @@ -0,0 +1,7 @@ +MyScalaClass.scala:3: error: value myStaticBaseMethod is not a member of object MyClass +did you mean myStaticBaseMethead? +did you mean MyClassBase.myStaticBaseMethod? Static Java members belong to companion objects in Scala; +they are not inherited, even by subclasses defined in Java. + MyClass.myStaticBaseMethod() //no compiles. Wrong!!! + ^ +1 error diff --git a/test/files/neg/t7386/MyClass.java b/test/files/neg/t7386/MyClass.java new file mode 100644 index 000000000000..8979a666eb7c --- /dev/null +++ b/test/files/neg/t7386/MyClass.java @@ -0,0 +1,4 @@ + +public class MyClass extends MyClassBase { + public static void myStaticBaseMethead() {} +} diff --git a/test/files/neg/t7386/MyClassBase.java b/test/files/neg/t7386/MyClassBase.java new file mode 100644 index 000000000000..f64639cf011e --- /dev/null +++ b/test/files/neg/t7386/MyClassBase.java @@ -0,0 +1,4 @@ + +abstract class MyClassBase { + public static void myStaticBaseMethod() {} +} diff --git a/test/files/neg/t7386/MyClient.java b/test/files/neg/t7386/MyClient.java new file mode 100644 index 000000000000..21e42df3ecb8 --- /dev/null +++ b/test/files/neg/t7386/MyClient.java @@ -0,0 +1,4 @@ + +public class MyClient { + public static void f() { MyClass.myStaticBaseMethod(); } +} diff --git a/test/files/neg/t7386/MyScalaClass.scala b/test/files/neg/t7386/MyScalaClass.scala new file mode 100644 index 000000000000..db0a77cc9a4a --- /dev/null +++ b/test/files/neg/t7386/MyScalaClass.scala @@ -0,0 +1,4 @@ +class MyScalaClass { + MyClassBase.myStaticBaseMethod() //compiles + MyClass.myStaticBaseMethod() //no compiles. Wrong!!! +} diff --git a/test/files/neg/t7388.check b/test/files/neg/t7388.check index 0a29e04896de..cbebe393ca37 100644 --- a/test/files/neg/t7388.check +++ b/test/files/neg/t7388.check @@ -1,4 +1,4 @@ t7388.scala:1: error: doesnotexist is not an enclosing class class Test private[doesnotexist]() ^ -one error found +1 error diff --git a/test/files/neg/t7415.check b/test/files/neg/t7415.check new file mode 100644 index 000000000000..bc0a3d1b9023 --- /dev/null +++ b/test/files/neg/t7415.check @@ -0,0 +1,38 @@ +t7415.scala:10: warning: Calls to parameterless method foo will be easy to mistake for calls to def foo(implicit a: T): Int, which has a single implicit parameter list. + def foo = 0 // warn + ^ +t7415.scala:14: warning: Usages of value foo will be easy to mistake for calls to def foo(implicit a: T): Int, which has a single implicit parameter list. + val foo = 0 // warn + ^ +t7415.scala:18: warning: Usages of value foo will be easy to mistake for calls to def foo(implicit a: T): Int, which has a single implicit parameter list. + private[this] val foo = 42 // warn + ^ +t7415.scala:31: warning: Calls to parameterless method foo will be easy to mistake for calls to def foo(implicit a: T): Int, which has a single implicit parameter list. +class Mixed extends Base with T1 // warn here + ^ +t7415.scala:41: warning: Usages of value foo will be easy to mistake for calls to overloads which have a single implicit parameter list: + def foo(implicit e: String): Int + def foo(implicit e: Int): Int + val foo = 0 // warn + ^ +t7415.scala:54: warning: Usages of value x will be easy to mistake for calls to def x(implicit t: T): Int, which has a single implicit parameter list. + def x(implicit t: T) = 27 // warn + ^ +t7415.scala:65: warning: Usages of value i will be easy to mistake for calls to def i(implicit t: T): Int, which has a single implicit parameter list. +class R(val i: Int) extends Q // warn + ^ +t7415.scala:66: warning: Usages of value i will be easy to mistake for calls to def i(implicit t: T): Int, which has a single implicit parameter list. +class S(i: Int) extends R(i) { // warn + ^ +t7415.scala:66: warning: Usages of value i will be easy to mistake for calls to def i(implicit t: T): Int, which has a single implicit parameter list. +class S(i: Int) extends R(i) { // warn + ^ +t7415.scala:76: warning: Calls to parameterless method f will be easy to mistake for calls to def f[A](implicit t: T): Int, which has a single implicit parameter list. + def f[A] = 27 // warn + ^ +t7415.scala:82: warning: Calls to parameterless method foo will be easy to mistake for calls to def foo(implicit a: T): Int, which has a single implicit parameter list. + val d1 = new Derived1 {} // warn + ^ +error: No warnings can be incurred under -Werror. +11 warnings +1 error diff --git a/test/files/neg/t7415.scala b/test/files/neg/t7415.scala new file mode 100644 index 000000000000..b36a514388e6 --- /dev/null +++ b/test/files/neg/t7415.scala @@ -0,0 +1,88 @@ +//> using options -Werror -Xlint:overload + +trait T + +trait Base { + def foo(implicit a: T) = 0 +} + +trait Derived1 extends Base { + def foo = 0 // warn +} + +trait Derived2 extends Base { + val foo = 0 // warn +} + +class C extends Base { + private[this] val foo = 42 // warn +} + +/* private local cannot directly conflict +class C2 extends Derived2 { + private[this] val foo = 42 // weaker access privileges in overriding +} +*/ + +trait T1 { + def foo = 0 +} + +class Mixed extends Base with T1 // warn here + +class D { + def foo(a: List[Int])(implicit d: DummyImplicit) = 0 + def foo(a: List[String]) = 1 +} + +class CleverLukas { + def foo(implicit e: String) = 1 + def foo(implicit e: Int) = 2 + val foo = 0 // warn +} + +class MoreInspiration { + def foo(implicit a: T) = 0 + def foo() = 1 // has parens but Scala 2 allows `foo` with adaptation +} + +class X { + val x = 42 +} + +class Y extends X { + def x(implicit t: T) = 27 // warn +} + +class J(val i: Int) +class K(i: Int) extends J(i) { // no warn local i shadows member i that is not implicit method + def f = i +} + +class Q { + def i(implicit t: T) = 42 +} +class R(val i: Int) extends Q // warn +class S(i: Int) extends R(i) { // warn + def f = i +} + +trait PBase { + def f[A](implicit t: T) = 42 + def g[A](s: String) = s.toInt +} + +trait PDerived extends PBase { + def f[A] = 27 // warn + def g[A] = f[A] // no warn +} + +object Test extends App { + implicit val t: T = new T {} + val d1 = new Derived1 {} // warn + println(d1.foo) // ! + val more = new MoreInspiration + println(more.foo) // ? + val y = new Y + println(y.x) // you have been warned! +} diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check index d35571544238..0f3a8dc9297d 100644 --- a/test/files/neg/t742.check +++ b/test/files/neg/t742.check @@ -1,6 +1,6 @@ t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z). Crash._1's type parameters do not match type m's expected parameters: -type s1 has one type parameter, but type n has two +type s1 has 1 type parameter, but type n has 2 type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion ^ -one error found +1 error diff --git a/test/files/neg/t742.scala b/test/files/neg/t742.scala index bb1c2f85ead9..cc0d7063c7ab 100644 --- a/test/files/neg/t742.scala +++ b/test/files/neg/t742.scala @@ -5,4 +5,4 @@ object Crash { type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion // _1[_2, Zero] // _2[Zero] -} \ No newline at end of file +} diff --git a/test/files/neg/t7473.check b/test/files/neg/t7473.check index bc8c29d46377..47d20354e6a7 100644 --- a/test/files/neg/t7473.check +++ b/test/files/neg/t7473.check @@ -4,4 +4,4 @@ t7473.scala:6: error: '<-' expected but '=' found. t7473.scala:6: error: illegal start of simple expression (for (x = Option(i); if x == j) yield 42) toList ^ -two errors found +2 errors diff --git a/test/files/neg/t7475c.check b/test/files/neg/t7475c.check index 472808131ac5..8feafffebce3 100644 --- a/test/files/neg/t7475c.check +++ b/test/files/neg/t7475c.check @@ -4,4 +4,4 @@ t7475c.scala:6: error: value a is not a member of A.this.B t7475c.scala:7: error: value b is not a member of A.this.B println(this.b) // wait, what? ^ -two errors found +2 errors diff --git a/test/files/neg/t7475e.check b/test/files/neg/t7475e.check index 48af2be51a09..7ae3f6933920 100644 --- a/test/files/neg/t7475e.check +++ b/test/files/neg/t7475e.check @@ -1,4 +1,4 @@ t7475e.scala:8: error: value priv is not a member of Base.this.TT (??? : TT).priv ^ -one error found +1 error diff --git a/test/files/neg/t7475f.check b/test/files/neg/t7475f.check index a07a4480e224..ad16bb89e564 100644 --- a/test/files/neg/t7475f.check +++ b/test/files/neg/t7475f.check @@ -1,10 +1,11 @@ -t7475f.scala:12: error: method c1 in class C cannot be accessed in C[T] +t7475f.scala:12: error: method c1 in class C cannot be accessed as a member of C[T] from trait D c1 // a member, but inaccessible. ^ t7475f.scala:13: error: not found: value c2 c2 // a member, but inaccessible. ^ t7475f.scala:26: error: value d2 is not a member of D[Any] +did you mean d1? other.d2 // not a member ^ -three errors found +3 errors diff --git a/test/files/neg/t7475f.scala b/test/files/neg/t7475f.scala index 6c5feadf198c..eaac4a2e1de3 100644 --- a/test/files/neg/t7475f.scala +++ b/test/files/neg/t7475f.scala @@ -21,7 +21,7 @@ trait D[T] { d2 } - def x(other: D[Any]) { + def x(other: D[Any]): Unit = { other.d1 other.d2 // not a member } diff --git a/test/files/neg/t7494-after-terminal.check b/test/files/neg/t7494-after-terminal.check index 096efe09cded..efdf150eb06e 100644 --- a/test/files/neg/t7494-after-terminal.check +++ b/test/files/neg/t7494-after-terminal.check @@ -1,2 +1 @@ -error: [phase assembly, after dependency on terminal phase not allowed: afterterminal => terminal] -one error found +fatal error: Phases form a cycle: terminal -> afterterminal -> terminal diff --git a/test/files/neg/t7494-after-terminal/ThePlugin.scala b/test/files/neg/t7494-after-terminal/ThePlugin.scala index f3c913086e60..e8de8132ec42 100644 --- a/test/files/neg/t7494-after-terminal/ThePlugin.scala +++ b/test/files/neg/t7494-after-terminal/ThePlugin.scala @@ -25,7 +25,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = ThePlugin.this.name - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7494-after-terminal/sample_2.scala b/test/files/neg/t7494-after-terminal/sample_2.scala index c0a457edbaca..d2583c1b5524 100644 --- a/test/files/neg/t7494-after-terminal/sample_2.scala +++ b/test/files/neg/t7494-after-terminal/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:afterterminal - +//> using options -Xplugin:. -Xplugin-require:afterterminal package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7494-before-parser.check b/test/files/neg/t7494-before-parser.check index 9a407923b1dd..f2b944d252f1 100644 --- a/test/files/neg/t7494-before-parser.check +++ b/test/files/neg/t7494-before-parser.check @@ -1,2 +1,7 @@ -error: [phase assembly, before dependency on parser phase not allowed: parser => beforeparser] -one error found +warning: Dropping phase beforeparser, it is not reachable from parser +sample_2.scala:8: error: type mismatch; + found : String("") + required: Int + def f: Int = "" + ^ +1 error diff --git a/test/files/neg/t7494-before-parser/ThePlugin.scala b/test/files/neg/t7494-before-parser/ThePlugin.scala index 8714a55dc4fa..bb335c9c9dcd 100644 --- a/test/files/neg/t7494-before-parser/ThePlugin.scala +++ b/test/files/neg/t7494-before-parser/ThePlugin.scala @@ -26,7 +26,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = ThePlugin.this.name - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7494-before-parser/sample_2.scala b/test/files/neg/t7494-before-parser/sample_2.scala index 0b6ab3526a82..0eae2ded199c 100644 --- a/test/files/neg/t7494-before-parser/sample_2.scala +++ b/test/files/neg/t7494-before-parser/sample_2.scala @@ -1,7 +1,9 @@ -// scalac: -Xplugin:. -Xplugin-require:beforeparser - +//> using options -Xplugin:. -Xplugin-require:beforeparser package sample // just a sample that is compiled with the sample plugin enabled object Sample extends App { + // because `-Werror` doesn't work; after phase assembly warnings are issued, + // Run.compileUnits resets the reporter (and its warning count) + def f: Int = "" } diff --git a/test/files/neg/t7494-multi-right-after.check b/test/files/neg/t7494-multi-right-after.check index 151d17741484..b5dd2b8f71d1 100644 --- a/test/files/neg/t7494-multi-right-after.check +++ b/test/files/neg/t7494-multi-right-after.check @@ -1 +1 @@ -error: Multiple phases want to run right after explicitouter; followers: erasure,multi-rafter; created phase-order.dot +fatal error: Phases multi-rafter and erasure both immediately follow explicitouter diff --git a/test/files/neg/t7494-multi-right-after/ThePlugin.scala b/test/files/neg/t7494-multi-right-after/ThePlugin.scala index 4c761517c1fc..cb9e86e90675 100644 --- a/test/files/neg/t7494-multi-right-after/ThePlugin.scala +++ b/test/files/neg/t7494-multi-right-after/ThePlugin.scala @@ -25,7 +25,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = ThePlugin.this.name - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7494-multi-right-after/sample_2.scala b/test/files/neg/t7494-multi-right-after/sample_2.scala index 6bf3bfdbefa2..2c9cfa861a9b 100644 --- a/test/files/neg/t7494-multi-right-after/sample_2.scala +++ b/test/files/neg/t7494-multi-right-after/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:multi-rafter - +//> using options -Xplugin:. -Xplugin-require:multi-rafter package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check index 1bf5c2371197..9fd03ab0ba03 100644 --- a/test/files/neg/t7494-no-options.check +++ b/test/files/neg/t7494-no-options.check @@ -5,11 +5,11 @@ error: Error: ploogin takes no options namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t7494-no-options/ploogin_1.scala b/test/files/neg/t7494-no-options/ploogin_1.scala index ed6adfc1cf97..dbf433f9a41d 100644 --- a/test/files/neg/t7494-no-options/ploogin_1.scala +++ b/test/files/neg/t7494-no-options/ploogin_1.scala @@ -23,7 +23,7 @@ class Ploogin(val global: Global) extends Plugin { def newPhase(prev: Phase) = new TestPhase(prev) class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description - def apply(unit: CompilationUnit) { + def apply(unit: CompilationUnit): Unit = { // kewl kode } } diff --git a/test/files/neg/t7494-no-options/sample_2.scala b/test/files/neg/t7494-no-options/sample_2.scala index 46dfe4ee1ec9..47a206b7b3de 100644 --- a/test/files/neg/t7494-no-options/sample_2.scala +++ b/test/files/neg/t7494-no-options/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xshow-phases -P:ploogin:inploog - +//> using options -Xplugin:. -Vphases -P:ploogin:inploog package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7494-right-after-before.check b/test/files/neg/t7494-right-after-before.check index 7e83daab4ade..695073496674 100644 --- a/test/files/neg/t7494-right-after-before.check +++ b/test/files/neg/t7494-right-after-before.check @@ -1 +1,27 @@ -error: Phase erasure can't follow explicitouter, created phase-order.dot + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees + packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions +rafter-before-1 10 hey it works + uncurry 11 uncurry, translate function values to anonymous classes + fields 12 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 13 replace tail calls by jumps + specialize 14 @specialized-driven class and method specialization + explicitouter 15 this refs to outer pointers + erasure 16 erase types, add interfaces for traits + posterasure 17 clean up erased inline classes + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + terminal 25 the last phase during a compilation run diff --git a/test/files/neg/t7494-right-after-before/ThePlugin.scala b/test/files/neg/t7494-right-after-before/ThePlugin.scala index c42a9140665c..cc4670b2fe66 100644 --- a/test/files/neg/t7494-right-after-before/ThePlugin.scala +++ b/test/files/neg/t7494-right-after-before/ThePlugin.scala @@ -10,11 +10,12 @@ class ThePlugin(val global: Global) extends Plugin { import global._ val name = "rafter-before-1" - val description = "" + val description = "hey it works" val components = List[PluginComponent](thePhase1) private object thePhase1 extends PluginComponent { val global = ThePlugin.this.global + override def description = ThePlugin.this.description val runsAfter = List[String]("refchecks") override val runsBefore = List[String]("erasure") @@ -25,7 +26,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = ThePlugin.this.name - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7494-right-after-before/sample_2.scala b/test/files/neg/t7494-right-after-before/sample_2.scala index 46de4c345110..bc9a8d934c1f 100644 --- a/test/files/neg/t7494-right-after-before/sample_2.scala +++ b/test/files/neg/t7494-right-after-before/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:rafter-before-1 - +//> using options -Xplugin:. -Xplugin-require:rafter-before-1 -Vphases package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7494-right-after-terminal.check b/test/files/neg/t7494-right-after-terminal.check index 6fe4f63c823a..95a92ffe6010 100644 --- a/test/files/neg/t7494-right-after-terminal.check +++ b/test/files/neg/t7494-right-after-terminal.check @@ -1,2 +1 @@ -error: [phase assembly, right after dependency on terminal phase not allowed: rightafterterminal => terminal] -one error found +fatal error: Phases form a cycle: terminal -> rightafterterminal -> terminal diff --git a/test/files/neg/t7494-right-after-terminal/ThePlugin.scala b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala index 47dd06ec8aac..7c4d084582de 100644 --- a/test/files/neg/t7494-right-after-terminal/ThePlugin.scala +++ b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala @@ -26,7 +26,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = ThePlugin.this.name - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7494-right-after-terminal/sample_2.scala b/test/files/neg/t7494-right-after-terminal/sample_2.scala index b599ee4d2127..e0f367078bfa 100644 --- a/test/files/neg/t7494-right-after-terminal/sample_2.scala +++ b/test/files/neg/t7494-right-after-terminal/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:rightafterterminal - +//> using options -Xplugin:. -Xplugin-require:rightafterterminal package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t750.check b/test/files/neg/t750.check index c17ca334e663..589feae7d095 100644 --- a/test/files/neg/t750.check +++ b/test/files/neg/t750.check @@ -12,4 +12,4 @@ Note: Int >: Int, but class Array is invariant in type T. You may wish to investigate a wildcard type such as `_ >: Int`. (SLS 3.2.10) AO_1.f[Int](a) ^ -two errors found +2 errors diff --git a/test/files/neg/t7501.check b/test/files/neg/t7501.check index 2ded07c7ed81..273ed1e01f1a 100644 --- a/test/files/neg/t7501.check +++ b/test/files/neg/t7501.check @@ -4,4 +4,4 @@ t7501_2.scala:2: error: value name is not a member of A t7501_2.scala:4: error: not found: type X type TP = X // already failed before this fix ^ -two errors found +2 errors diff --git a/test/files/neg/t7507.check b/test/files/neg/t7507.check index de30fc705737..6d90d0d33ae0 100644 --- a/test/files/neg/t7507.check +++ b/test/files/neg/t7507.check @@ -1,4 +1,4 @@ t7507.scala:6: error: not found: value bippy locally(bippy) ^ -one error found +1 error diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check index eaa6303cf520..30a01e840b3d 100644 --- a/test/files/neg/t7509.check +++ b/test/files/neg/t7509.check @@ -6,7 +6,4 @@ t7509.scala:3: error: type mismatch; required: R crash(42) ^ -t7509.scala:3: error: could not find implicit value for parameter ev: R - crash(42) - ^ -three errors found +2 errors diff --git a/test/files/neg/t750b.check b/test/files/neg/t750b.check index 72a249191ebe..94909a572683 100644 --- a/test/files/neg/t750b.check +++ b/test/files/neg/t750b.check @@ -12,4 +12,4 @@ Note: Int >: Int, but class Array is invariant in type T. You may wish to investigate a wildcard type such as `_ >: Int`. (SLS 3.2.10) AO.f[Int](a) ^ -two errors found +2 errors diff --git a/test/files/neg/t7519-b.check b/test/files/neg/t7519-b.check index bc8500b2b875..2f982644993a 100644 --- a/test/files/neg/t7519-b.check +++ b/test/files/neg/t7519-b.check @@ -3,4 +3,4 @@ Use_2.scala:8: error: type mismatch; required: Q val x: Q = ex.Mac.mac("asdf") ^ -one error found +1 error diff --git a/test/files/neg/t7519.check b/test/files/neg/t7519.check index df54abaa3e23..499c38ab72f9 100644 --- a/test/files/neg/t7519.check +++ b/test/files/neg/t7519.check @@ -8,4 +8,4 @@ t7519.scala:15: error: type mismatch; required: String locally(0 : String) // was: "value conversion is not a member of U" ^ -two errors found +2 errors diff --git a/test/files/neg/t752.check b/test/files/neg/t752.check index a91bba46eaa5..9e2096e0334d 100644 --- a/test/files/neg/t752.check +++ b/test/files/neg/t752.check @@ -3,4 +3,4 @@ t752.scala:6: error: type mismatch; required: Int => Unit f(g _) ^ -one error found +1 error diff --git a/test/files/neg/t7530.check b/test/files/neg/t7530.check new file mode 100644 index 000000000000..06e7d516179e --- /dev/null +++ b/test/files/neg/t7530.check @@ -0,0 +1,12 @@ +t7530.scala:4: warning: private var j in class C is never updated: consider refactoring vars to a separate definition + private var (i, j) = init() + ^ +t7530.scala:11: warning: private var j in class D is never updated: consider refactoring vars to a separate definition + private var i, j = init() + ^ +t7530.scala:19: warning: private var j in class E is never updated: consider refactoring vars to a separate definition + private var K(i, j) = init() + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t7530.scala b/test/files/neg/t7530.scala new file mode 100644 index 000000000000..818a3528d171 --- /dev/null +++ b/test/files/neg/t7530.scala @@ -0,0 +1,24 @@ +//> using options -Werror -Xlint + +class C { + private var (i, j) = init() + + private def init() = (42, 27) + def f(): Unit = println(i+j) + def g(): Unit = i += 1 +} +class D { + private var i, j = init() + + private def init() = 42 + def f(): Unit = println(i+j) + def g(): Unit = i += 1 +} +case class K(i: Int, j: Int) +class E { + private var K(i, j) = init() + + private def init() = K(42, 27) + def f(): Unit = println(i+j) + def g(): Unit = i += 1 +} diff --git a/test/files/neg/t7602.check b/test/files/neg/t7602.check index 5ce3776790d8..b1ff68bab829 100644 --- a/test/files/neg/t7602.check +++ b/test/files/neg/t7602.check @@ -2,4 +2,4 @@ t7602.scala:16: error: method foo is defined twice; the conflicting method foo was defined at line 15:7 def foo : Device ^ -one error found +1 error diff --git a/test/files/neg/t7602.scala b/test/files/neg/t7602.scala index 5a9444a1abc3..07410ad67d54 100644 --- a/test/files/neg/t7602.scala +++ b/test/files/neg/t7602.scala @@ -23,4 +23,4 @@ Exception in thread "main" java.lang.AssertionError: assertion failed: List(meth at scala.reflect.internal.tpe.GlbLubs$$anonfun$23.apply(GlbLubs.scala:349) at scala.collection.immutable.List.map(List.scala:272) at scala.reflect.internal.tpe.GlbLubs$class.lubsym$1(GlbLubs.scala:349) -*/ \ No newline at end of file +*/ diff --git a/test/files/neg/t7605-deprecation.check b/test/files/neg/t7605-deprecation.check deleted file mode 100644 index d989472c1566..000000000000 --- a/test/files/neg/t7605-deprecation.check +++ /dev/null @@ -1,15 +0,0 @@ -t7605-deprecation.scala:3: warning: Procedure syntax is deprecated. Convert procedure `bar` to method by adding `: Unit =`. - def bar {} - ^ -t7605-deprecation.scala:4: warning: Procedure syntax is deprecated. Convert procedure `baz` to method by adding `: Unit`. - def baz - ^ -t7605-deprecation.scala:5: warning: Procedure syntax is deprecated. Convert procedure `boo` to method by adding `: Unit`. - def boo(i: Int, l: Long) - ^ -t7605-deprecation.scala:6: warning: Procedure syntax is deprecated. Convert procedure `boz` to method by adding `: Unit =`. - def boz(i: Int, l: Long) {} - ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found diff --git a/test/files/neg/t7605-deprecation.scala b/test/files/neg/t7605-deprecation.scala deleted file mode 100644 index 7c2ae3ada797..000000000000 --- a/test/files/neg/t7605-deprecation.scala +++ /dev/null @@ -1,9 +0,0 @@ -// scalac: -deprecation -Xfuture -Xfatal-warnings -abstract class Foo { - def bar {} - def baz - def boo(i: Int, l: Long) - def boz(i: Int, l: Long) {} - def this(i: Int) { this() } // Don't complain here! - def foz: Unit // Don't complain here! -} diff --git a/test/files/neg/t7622-cyclic-dependency.check b/test/files/neg/t7622-cyclic-dependency.check index 3546964f5f68..c824e07ecbd8 100644 --- a/test/files/neg/t7622-cyclic-dependency.check +++ b/test/files/neg/t7622-cyclic-dependency.check @@ -1 +1 @@ -error: Cycle in phase dependencies detected at cyclicdependency1, created phase-cycle.dot +fatal error: Phases form a cycle: cyclicdependency2 -> cyclicdependency1 -> cyclicdependency2 diff --git a/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala index 0734863e640c..b25ac741570c 100644 --- a/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala +++ b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala @@ -34,7 +34,7 @@ class ThePlugin(val global: Global) extends Plugin { } private class ThePhase(prev: Phase, val name: String) extends Phase(prev) { - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7622-cyclic-dependency/sample_2.scala b/test/files/neg/t7622-cyclic-dependency/sample_2.scala index 6424f913758c..798bebc4a23e 100644 --- a/test/files/neg/t7622-cyclic-dependency/sample_2.scala +++ b/test/files/neg/t7622-cyclic-dependency/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:cyclicdependency - +//> using options -Xplugin:. -Xplugin-require:cyclicdependency package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7622-missing-dependency.check b/test/files/neg/t7622-missing-dependency.check index a0d0e308705a..5bbae133cb48 100644 --- a/test/files/neg/t7622-missing-dependency.check +++ b/test/files/neg/t7622-missing-dependency.check @@ -1,2 +1,2 @@ error: Phase 'myplugin' requires: List(missing) -one error found +1 error diff --git a/test/files/neg/t7622-missing-dependency/ThePlugin.scala b/test/files/neg/t7622-missing-dependency/ThePlugin.scala index fa634a64c1b7..0fcbc6309ad6 100644 --- a/test/files/neg/t7622-missing-dependency/ThePlugin.scala +++ b/test/files/neg/t7622-missing-dependency/ThePlugin.scala @@ -27,7 +27,7 @@ class ThePlugin(val global: Global) extends Plugin { private class ThePhase(prev: Phase) extends Phase(prev) { def name = thePhase.phaseName - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7622-missing-dependency/sample_2.scala b/test/files/neg/t7622-missing-dependency/sample_2.scala index cd0aaf83b451..9bff3616a847 100644 --- a/test/files/neg/t7622-missing-dependency/sample_2.scala +++ b/test/files/neg/t7622-missing-dependency/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:myplugin - +//> using options -Xplugin:. -Xplugin-require:myplugin package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7622-missing-required.check b/test/files/neg/t7622-missing-required.check index 5982178581c0..e02b8529bc01 100644 --- a/test/files/neg/t7622-missing-required.check +++ b/test/files/neg/t7622-missing-required.check @@ -1,2 +1,2 @@ error: Missing required plugin: special-plugin -one error found +1 error diff --git a/test/files/neg/t7622-missing-required.scala b/test/files/neg/t7622-missing-required.scala index 8974cce51dc1..76d381981d6e 100644 --- a/test/files/neg/t7622-missing-required.scala +++ b/test/files/neg/t7622-missing-required.scala @@ -1,4 +1,5 @@ -// scalac: -Xplugin-require:special-plugin +//> using options -Xplugin-require:special-plugin +// // the amazing features of this trait // are unlocked by compiling with a special plugin. diff --git a/test/files/neg/t7622-multi-followers.check b/test/files/neg/t7622-multi-followers.check index d123853a5b7b..82eb0ee03ff4 100644 --- a/test/files/neg/t7622-multi-followers.check +++ b/test/files/neg/t7622-multi-followers.check @@ -1 +1 @@ -error: Multiple phases want to run right after parser; followers: multi1,multi2; created phase-order.dot +fatal error: Phases multi1 and multi2 both immediately follow parser diff --git a/test/files/neg/t7622-multi-followers/ThePlugin.scala b/test/files/neg/t7622-multi-followers/ThePlugin.scala index cbd28d0c23df..36ee84f94a3f 100644 --- a/test/files/neg/t7622-multi-followers/ThePlugin.scala +++ b/test/files/neg/t7622-multi-followers/ThePlugin.scala @@ -38,7 +38,7 @@ class ThePlugin(val global: Global) extends Plugin { } private class ThePhase(prev: Phase, val name: String) extends Phase(prev) { - def run {} + def run: Unit = {} } } diff --git a/test/files/neg/t7622-multi-followers/sample_2.scala b/test/files/neg/t7622-multi-followers/sample_2.scala index 8732dc555196..941a6da13e39 100644 --- a/test/files/neg/t7622-multi-followers/sample_2.scala +++ b/test/files/neg/t7622-multi-followers/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:multi - +//> using options -Xplugin:. -Xplugin-require:multi package sample // just a sample that is compiled with the sample plugin enabled diff --git a/test/files/neg/t7623.check b/test/files/neg/t7623.check index 81afa76f3982..81db3577446b 100644 --- a/test/files/neg/t7623.check +++ b/test/files/neg/t7623.check @@ -1,15 +1,27 @@ -t7623.scala:22: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. +t7623.scala:23: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. def g = "" match { case X(s, t) => } // warn ^ -t7623.scala:24: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. +t7623.scala:25: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. def h = "" match { case X(s, t, u @ _*) => } // warn ^ -t7623.scala:12: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. +t7623.scala:11: warning: match may not be exhaustive. +It would fail on the following input: C(_, _) + def f = C("") match { case C(s) => } + ^ +t7623.scala:13: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. def g = C("") match { case C(s, t) => } // warn ^ -t7623.scala:14: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. +t7623.scala:13: warning: match may not be exhaustive. +It would fail on the following input: C(_, _) + def g = C("") match { case C(s, t) => } // warn + ^ +t7623.scala:15: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. def h = C("") match { case C(s, t, u @ _*) => } // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +t7623.scala:15: warning: match may not be exhaustive. +It would fail on the following input: C(_, _) + def h = C("") match { case C(s, t, u @ _*) => } // warn + ^ +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/t7623.scala b/test/files/neg/t7623.scala index 0c62321d7743..8fae4fcf13f5 100644 --- a/test/files/neg/t7623.scala +++ b/test/files/neg/t7623.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:stars-align -Xfatal-warnings +//> using options -Xlint:stars-align -Xfatal-warnings +// case class C(s: String, xs: Int*) diff --git a/test/files/neg/t7629-view-bounds-deprecation.check b/test/files/neg/t7629-view-bounds-deprecation.check deleted file mode 100644 index 75ed84ac8616..000000000000 --- a/test/files/neg/t7629-view-bounds-deprecation.check +++ /dev/null @@ -1,11 +0,0 @@ -t7629-view-bounds-deprecation.scala:3: warning: View bounds are deprecated. Use an implicit parameter instead. -Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`. - def f[A <% Int](a: A) = null - ^ -t7629-view-bounds-deprecation.scala:4: warning: View bounds are deprecated. Use an implicit parameter instead. -Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`. - def g[C, B <: C, A <% B : Numeric](a: A) = null - ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found diff --git a/test/files/neg/t7629-view-bounds-deprecation.scala b/test/files/neg/t7629-view-bounds-deprecation.scala deleted file mode 100644 index 130bc97f78bb..000000000000 --- a/test/files/neg/t7629-view-bounds-deprecation.scala +++ /dev/null @@ -1,5 +0,0 @@ -// scalac: -deprecation -Xfatal-warnings -Xfuture -object Test { - def f[A <% Int](a: A) = null - def g[C, B <: C, A <% B : Numeric](a: A) = null -} diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check index 12391cccc8ff..59f474ca90fb 100644 --- a/test/files/neg/t7636.check +++ b/test/files/neg/t7636.check @@ -1,10 +1,10 @@ t7636.scala:3: error: illegal inheritance; - self-type Main.C does not conform to Main.ResultTable[_$3]'s selftype Main.ResultTable[_$3] - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ + self-type Main.bar.type does not conform to Main.Foo[T]'s selftype Main.Foo[T] + object bar extends Foo(5: T forSome { type T }) + ^ t7636.scala:3: error: type mismatch; - found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2 - required: Either[_, _$3(in value )] where type _$3(in value ) - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ -two errors found + found : T(in constructor bar) where type T(in constructor bar) + required: T(in value ) where type T(in value ) + object bar extends Foo(5: T forSome { type T }) + ^ +2 errors diff --git a/test/files/neg/t7636.scala b/test/files/neg/t7636.scala index 32dc18131350..27d4c060932f 100644 --- a/test/files/neg/t7636.scala +++ b/test/files/neg/t7636.scala @@ -1,7 +1,7 @@ -object Main extends App{ - class ResultTable[E]( query : Either[_,E] )( columns : Int ) - class C extends ResultTable(Left(5):Either[_,_])(5) +object Main extends App { + class Foo[A](x: A) + object bar extends Foo(5: T forSome { type T }) } -// Inference of the existential type for the parent type argument -// E still fails. That looks tricky to fix, see the comments in scala/bug#7636. -// But we at least prevent a cascading NPE. \ No newline at end of file +// Inference of the existential type for the parent type argument A still fails. +// That looks tricky to fix, see the comments in scala/bug#7636. +// But we at least prevent a cascading NPE. diff --git a/test/files/neg/t765.check b/test/files/neg/t765.check index 5a5f60325242..93a8433681e3 100644 --- a/test/files/neg/t765.check +++ b/test/files/neg/t765.check @@ -1,4 +1,4 @@ t765.scala:3: error: not found: type Bar123 val bar = new Bar123 ^ -one error found +1 error diff --git a/test/files/neg/t766.check b/test/files/neg/t766.check index 92039ed1ff48..d0d4e27d28f9 100644 --- a/test/files/neg/t766.check +++ b/test/files/neg/t766.check @@ -1,4 +1,4 @@ t766.scala:5: error: not found: value badIdentifier val p = badIdentifier ^ -one error found +1 error diff --git a/test/files/neg/t7669.check b/test/files/neg/t7669.check index 110c18a26570..114b24d69703 100644 --- a/test/files/neg/t7669.check +++ b/test/files/neg/t7669.check @@ -1,7 +1,7 @@ -t7669.scala:10: warning: match may not be exhaustive. +t7669.scala:11: warning: match may not be exhaustive. It would fail on the following input: NotHandled(_) def exhausto(expr: Expr): Unit = expr match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t7669.scala b/test/files/neg/t7669.scala index c791fdbc9021..2033b37b89a0 100644 --- a/test/files/neg/t7669.scala +++ b/test/files/neg/t7669.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { sealed abstract class Expr diff --git a/test/files/neg/t7686.check b/test/files/neg/t7686.check index 1e994d55a7b6..2374f42bb7ec 100644 --- a/test/files/neg/t7686.check +++ b/test/files/neg/t7686.check @@ -7,4 +7,4 @@ t7686.scala:11: error: No TypeTag available for Test.Co[_] t7686.scala:12: error: No TypeTag available for Test.Cn[_] t1[Cn]; t2[Cn]; t3[Cn]; t4[Cn] ^ -three errors found +3 errors diff --git a/test/files/neg/t7691.check b/test/files/neg/t7691.check new file mode 100644 index 000000000000..409762b4bbbf --- /dev/null +++ b/test/files/neg/t7691.check @@ -0,0 +1,53 @@ +t7691.scala:16: error: value _ is not a member of object X + def x = X.`_` // value _ is not a member of object X + ^ +t7691.scala:18: error: value _ is not a member of object Y + def y = Y.`_` // still not + ^ +t7691.scala:20: error: value _ is not a member of object Z + def z = Z.`_` // still not + ^ +t7691.scala:22: error: value _ is not a member of object V + def v = V.`_` // still not + ^ +t7691.scala:24: error: value _ is not a member of object W + def w = W.`_` // still not + ^ +t7691.scala:26: error: value _ is not a member of object Q + def q = Q.`_` // still not + ^ +t7691.scala:2: warning: Pattern definition introduces Unit-valued member of X; consider wrapping it in `locally { ... }`. +object X { val _ = 42 } + ^ +t7691.scala:4: warning: Pattern definition introduces Unit-valued member of Y; consider wrapping it in `locally { ... }`. +object Y { val (_, _) = (42, 17) } + ^ +t7691.scala:6: warning: Pattern definition introduces Unit-valued member of Z; consider wrapping it in `locally { ... }`. +object Z { val _ = 42 ; val _ = 17 } // was error + ^ +t7691.scala:6: warning: Pattern definition introduces Unit-valued member of Z; consider wrapping it in `locally { ... }`. +object Z { val _ = 42 ; val _ = 17 } // was error + ^ +t7691.scala:8: warning: Pattern definition introduces Unit-valued member of V; consider wrapping it in `locally { ... }`. +object V { val _, _, _ = println("hi") } // was error + ^ +t7691.scala:8: warning: Pattern definition introduces Unit-valued member of V; consider wrapping it in `locally { ... }`. +object V { val _, _, _ = println("hi") } // was error + ^ +t7691.scala:8: warning: Pattern definition introduces Unit-valued member of V; consider wrapping it in `locally { ... }`. +object V { val _, _, _ = println("hi") } // was error + ^ +t7691.scala:10: warning: Pattern definition introduces Unit-valued member of W; consider wrapping it in `locally { ... }`. +object W { val _: Int = 42 ; val _: Int = 17 } // was error + ^ +t7691.scala:10: warning: Pattern definition introduces Unit-valued member of W; consider wrapping it in `locally { ... }`. +object W { val _: Int = 42 ; val _: Int = 17 } // was error + ^ +t7691.scala:12: warning: Pattern definition introduces Unit-valued member of Q; consider wrapping it in `locally { ... }`. +object Q { val _ @ _ = 42 ; val _ = 17 } // was error + ^ +t7691.scala:12: warning: Pattern definition introduces Unit-valued member of Q; consider wrapping it in `locally { ... }`. +object Q { val _ @ _ = 42 ; val _ = 17 } // was error + ^ +11 warnings +6 errors diff --git a/test/files/neg/t7691.scala b/test/files/neg/t7691.scala new file mode 100644 index 000000000000..dfeafa01ccd5 --- /dev/null +++ b/test/files/neg/t7691.scala @@ -0,0 +1,36 @@ + +object X { val _ = 42 } + +object Y { val (_, _) = (42, 17) } + +object Z { val _ = 42 ; val _ = 17 } // was error + +object V { val _, _, _ = println("hi") } // was error + +object W { val _: Int = 42 ; val _: Int = 17 } // was error + +object Q { val _ @ _ = 42 ; val _ = 17 } // was error + + +object Test { + def x = X.`_` // value _ is not a member of object X + + def y = Y.`_` // still not + + def z = Z.`_` // still not + + def v = V.`_` // still not + + def w = W.`_` // still not + + def q = Q.`_` // still not + + def h = locally { + val _ = 42 + val _ = 17 // was error + } +} + +class C { + val `_` = 42 // was crashola, see t11374 +} diff --git a/test/files/neg/t771.check b/test/files/neg/t771.check index c0d1e002f8aa..ee0c55629650 100644 --- a/test/files/neg/t771.check +++ b/test/files/neg/t771.check @@ -1,4 +1,4 @@ t771.scala:4: error: trait Iterator is abstract; cannot be instantiated def c[A](it:java.util.Iterator[A]) = new scala.Iterator[A] ^ -one error found +1 error diff --git a/test/files/neg/t7715.check b/test/files/neg/t7715.check index 4ee6b6c95dec..bd7e521c5fa4 100644 --- a/test/files/neg/t7715.check +++ b/test/files/neg/t7715.check @@ -10,4 +10,4 @@ t7715.scala:17: error: unbound placeholder parameter t7715.scala:17: error: unbound placeholder parameter days zip days map s"${_: Int} by ${_: Int}".tupled foreach println ^ -four errors found +4 errors diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check index d465a4600877..2fa50df39c8d 100644 --- a/test/files/neg/t7721.check +++ b/test/files/neg/t7721.check @@ -1,27 +1,45 @@ -t7721.scala:12: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:13: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure case x: Foo with Concrete => x.bippy + x.conco ^ -t7721.scala:16: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:17: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure case x: Concrete with Foo => x.bippy + x.conco ^ -t7721.scala:20: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:21: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure case x: Foo with Bar => x.bippy + x.barry ^ -t7721.scala:20: warning: abstract type pattern A.this.Bar is unchecked since it is eliminated by erasure +t7721.scala:21: warning: abstract type pattern A.this.Bar is unchecked since it is eliminated by erasure case x: Foo with Bar => x.bippy + x.barry ^ -t7721.scala:40: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:41: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure case x: Foo with Concrete => x.bippy + x.dingo + x.conco ^ -t7721.scala:44: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:45: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure case x: Concrete with Foo => x.bippy + x.dingo + x.conco ^ -t7721.scala:48: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure +t7721.scala:49: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo ^ -t7721.scala:48: warning: abstract type pattern B.this.Bar is unchecked since it is eliminated by erasure +t7721.scala:49: warning: abstract type pattern B.this.Bar is unchecked since it is eliminated by erasure case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found +t7721.scala:13: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.conco + ^ +t7721.scala:17: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.conco + ^ +t7721.scala:21: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar => x.bippy + x.barry + ^ +t7721.scala:41: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.dingo + x.conco + ^ +t7721.scala:45: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.dingo + x.conco + ^ +t7721.scala:49: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo + ^ +error: No warnings can be incurred under -Werror. +14 warnings +1 error diff --git a/test/files/neg/t7721.scala b/test/files/neg/t7721.scala index e6908238e2d5..7d1b40ad6450 100644 --- a/test/files/neg/t7721.scala +++ b/test/files/neg/t7721.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// import scala.language.reflectiveCalls trait A { @@ -70,7 +71,7 @@ object Test { } implicit def barTag: scala.reflect.ClassTag[Bar] = scala.reflect.ClassTag(classOf[Bar]) - def run() { + def run(): Unit = { println("f1") wrap(f1(new Concrete {})) wrap(f1(new Foo {})) @@ -127,12 +128,12 @@ object Test { object ao extends Base object bo extends Base with B - private def wrap(body: => Any) { + private def wrap(body: => Any): Unit = { try println(body) catch { case ex: NoSuchMethodException => println(ex) } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { ao.run() bo.run() } diff --git a/test/files/neg/t7752.check b/test/files/neg/t7752.check index 0a015d3f3785..bdf0ca46aebb 100644 --- a/test/files/neg/t7752.check +++ b/test/files/neg/t7752.check @@ -1,27 +1,27 @@ -t7752.scala:25: error: overloaded method value foo with alternatives: - [A](heading: String, rows: A*)(A,) - [A, B](heading: (String, String), rows: (A, B)*)(A, B) - [A, B, C](heading: (String, String, String), rows: (A, B, C)*)(A, B, C) - [A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*)(A, B, C, D) - [A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*)(A, B, C, D, E) - [A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*)(A, B, C, D, E, F) - [A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*)(A, B, C, D, E, F, G) - [A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*)(A, B, C, D, E, F, G, H) - [A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*)(A, B, C, D, E, F, G, H, I) - [A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*)(A, B, C, D, E, F, G, H, I, J) - [A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*)(A, B, C, D, E, F, G, H, I, J, K) - [A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*)(A, B, C, D, E, F, G, H, I, J, K, L) - [A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*)(A, B, C, D, E, F, G, H, I, J, K, L, M) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) - [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) +t7752.scala:25: error: overloaded method foo with alternatives: + [A](heading: String, rows: A*): (A,) + [A, B](heading: (String, String), rows: (A, B)*): (A, B) + [A, B, C](heading: (String, String, String), rows: (A, B, C)*): (A, B, C) + [A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*): (A, B, C, D) + [A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*): (A, B, C, D, E) + [A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*): (A, B, C, D, E, F) + [A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*): (A, B, C, D, E, F, G) + [A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*): (A, B, C, D, E, F, G, H) + [A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*): (A, B, C, D, E, F, G, H, I) + [A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*): (A, B, C, D, E, F, G, H, I, J) + [A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*): (A, B, C, D, E, F, G, H, I, J, K) + [A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*): (A, B, C, D, E, F, G, H, I, J, K, L) + [A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*): (A, B, C, D, E, F, G, H, I, J, K, L, M) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) + [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) cannot be applied to (Int) foo((1)) ^ -one error found +1 error diff --git a/test/files/neg/t7752.scala b/test/files/neg/t7752.scala index 40ba2103b1f4..342a2ca34963 100644 --- a/test/files/neg/t7752.scala +++ b/test/files/neg/t7752.scala @@ -23,4 +23,4 @@ object Test { def foo[A](heading: String, rows: A*): Tuple1[A] = null foo((1)) -} \ No newline at end of file +} diff --git a/test/files/neg/t7756a.check b/test/files/neg/t7756a.check index 8d42717e47e3..8a49fa3558fb 100644 --- a/test/files/neg/t7756a.check +++ b/test/files/neg/t7756a.check @@ -4,4 +4,4 @@ t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence] locally(null: TA[Object]) ^ -two errors found +2 errors diff --git a/test/files/neg/t7756b.check b/test/files/neg/t7756b.check index 0a5ac1b0d307..b037ffef1d4e 100644 --- a/test/files/neg/t7756b.check +++ b/test/files/neg/t7756b.check @@ -1,6 +1,6 @@ -t7756b.scala:4: warning: comparing values of types Int and String using `==` will always yield false +t7756b.scala:5: warning: comparing values of types Int and String using `==` will always yield false case _ => 0 == "" ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t7756b.scala b/test/files/neg/t7756b.scala index 5d1b84a9c2bd..6feab9ba5c87 100644 --- a/test/files/neg/t7756b.scala +++ b/test/files/neg/t7756b.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { 0 match { case _ => 0 == "" diff --git a/test/files/neg/t7757a.check b/test/files/neg/t7757a.check index de24e23004d2..35d2bd138a68 100644 --- a/test/files/neg/t7757a.check +++ b/test/files/neg/t7757a.check @@ -1,4 +1,4 @@ t7757a.scala:1: error: ';' expected but '@' found. trait Foo @annot ^ -one error found +1 error diff --git a/test/files/neg/t7757a.scala b/test/files/neg/t7757a.scala index 24f6c16cb4c3..77168b145b50 100644 --- a/test/files/neg/t7757a.scala +++ b/test/files/neg/t7757a.scala @@ -1 +1 @@ -trait Foo @annot \ No newline at end of file +trait Foo @annot diff --git a/test/files/neg/t7757b.check b/test/files/neg/t7757b.check index 3e5a0f1fa672..2a983f6a023d 100644 --- a/test/files/neg/t7757b.check +++ b/test/files/neg/t7757b.check @@ -1,4 +1,4 @@ t7757b.scala:2: error: expected start of definition @annot2 ^ -one error found +1 error diff --git a/test/files/neg/t7757b.scala b/test/files/neg/t7757b.scala index e9a537dba160..e67d944bd8d6 100644 --- a/test/files/neg/t7757b.scala +++ b/test/files/neg/t7757b.scala @@ -1,2 +1,2 @@ trait Foo2 -@annot2 \ No newline at end of file +@annot2 diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check index 2db01ea6771d..b25bd6bd02a5 100644 --- a/test/files/neg/t7783.check +++ b/test/files/neg/t7783.check @@ -1,18 +1,18 @@ -t7783.scala:2: warning: type D in object O is deprecated: +t7783.scala:3: warning: type D in object O is deprecated object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil } ^ -t7783.scala:12: warning: type D in object O is deprecated: +t7783.scala:13: warning: type D in object O is deprecated type T = O.D ^ -t7783.scala:13: warning: type D in object O is deprecated: +t7783.scala:14: warning: type D in object O is deprecated locally(null: O.D) ^ -t7783.scala:14: warning: type D in object O is deprecated: +t7783.scala:15: warning: type D in object O is deprecated val x: O.D = null ^ -t7783.scala:15: warning: type D in object O is deprecated: +t7783.scala:16: warning: type D in object O is deprecated locally(null.asInstanceOf[O.D]) ^ -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t7783.scala b/test/files/neg/t7783.scala index abfd13790fa8..85ae75dafd1a 100644 --- a/test/files/neg/t7783.scala +++ b/test/files/neg/t7783.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil } object NoWarn { diff --git a/test/files/neg/t779.check b/test/files/neg/t779.check index 65f463c192d6..271769f9f12c 100644 --- a/test/files/neg/t779.check +++ b/test/files/neg/t779.check @@ -1,4 +1,4 @@ t779.scala:6: error: method ast has return statement; needs result type override def ast = return null ^ -one error found +1 error diff --git a/test/files/neg/t7808.check b/test/files/neg/t7808.check new file mode 100644 index 000000000000..4c72decfa294 --- /dev/null +++ b/test/files/neg/t7808.check @@ -0,0 +1,4 @@ +t7808.scala:5: error: recursive value ls needs type + val (ls, rs) = z match { + ^ +1 error diff --git a/test/files/neg/t7808.scala b/test/files/neg/t7808.scala new file mode 100644 index 000000000000..d01aea925188 --- /dev/null +++ b/test/files/neg/t7808.scala @@ -0,0 +1,18 @@ +//> using options -Vcyclic +class C { + type OI = Option[Int] + def f(z: OI, ls: List[OI], rs: List[OI]): (List[OI], List[OI]) = { + val (ls, rs) = z match { + case Some(_) => (z::ls, rs) + case _ => (ls, z::rs) + } + (ls, rs) + } +} + +/* +t7808.scala:5: error: recursive value x$1 needs type + val (ls, rs) = z match { + ^ +1 error +*/ diff --git a/test/files/neg/t7808b.check b/test/files/neg/t7808b.check new file mode 100644 index 000000000000..1ece6555fe49 --- /dev/null +++ b/test/files/neg/t7808b.check @@ -0,0 +1,4 @@ +t7808b.scala:5: error: recursive value x$1 needs type; value x$1 is synthetic; use -Vcyclic to find which definition needs an explicit type + val (ls, rs) = z match { + ^ +1 error diff --git a/test/files/neg/t7808b.scala b/test/files/neg/t7808b.scala new file mode 100644 index 000000000000..986587f582fb --- /dev/null +++ b/test/files/neg/t7808b.scala @@ -0,0 +1,18 @@ + +class C { + type OI = Option[Int] + def f(z: OI, ls: List[OI], rs: List[OI]): (List[OI], List[OI]) = { + val (ls, rs) = z match { + case Some(_) => (z::ls, rs) + case _ => (ls, z::rs) + } + (ls, rs) + } +} + +/* +t7808.scala:5: error: recursive value x$1 needs type + val (ls, rs) = z match { + ^ +1 error +*/ diff --git a/test/files/neg/t783.check b/test/files/neg/t783.check index 37610a50ffbe..e197d727d195 100644 --- a/test/files/neg/t783.check +++ b/test/files/neg/t783.check @@ -3,4 +3,4 @@ t783.scala:12: error: type mismatch; required: Contexts.this.global.Template globalInit0.Template(10, 20); ^ -one error found +1 error diff --git a/test/files/neg/t7834neg.check b/test/files/neg/t7834neg.check index 569df4b8ce12..882dfc7a43bc 100644 --- a/test/files/neg/t7834neg.check +++ b/test/files/neg/t7834neg.check @@ -38,4 +38,4 @@ t7834neg.scala:75: error: type mismatch; required: C.this.q.type x3 = super[S2].q // fail ^ -8 errors found +8 errors diff --git a/test/files/neg/t7848-interp-warn.check b/test/files/neg/t7848-interp-warn.check deleted file mode 100644 index 804037cd270a..000000000000 --- a/test/files/neg/t7848-interp-warn.check +++ /dev/null @@ -1,27 +0,0 @@ -t7848-interp-warn.scala:19: warning: possible missing interpolator: detected interpolated identifier `$foo` - "An important $foo message!" // warn on ident in scope - ^ -t7848-interp-warn.scala:23: warning: possible missing interpolator: detected an interpolated expression - "A doubly important ${foo * 2} message!" // warn on some expr, see below - ^ -t7848-interp-warn.scala:26: warning: possible missing interpolator: detected interpolated identifier `$bar` - def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test - ^ -t7848-interp-warn.scala:27: warning: possible missing interpolator: detected interpolated identifier `$bar` - def j = s"Try using '${ "something like $bar" }' instead." // warn - ^ -t7848-interp-warn.scala:33: warning: possible missing interpolator: detected an interpolated expression - def v = "${baz}${bar}" // warn on second expr - ^ -t7848-interp-warn.scala:34: warning: possible missing interpolator: detected an interpolated expression - def w = "${ op_* }" // warn, only cheap ident parsing - ^ -t7848-interp-warn.scala:35: warning: possible missing interpolator: detected an interpolated expression - def x = "${ bar }" // warn, a cheap ident in scope - ^ -t7848-interp-warn.scala:37: warning: possible missing interpolator: detected an interpolated expression - def z = "${ baz * 3}" // warn, no expr parsing - ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found diff --git a/test/files/neg/t7848-interp-warn.scala b/test/files/neg/t7848-interp-warn.scala deleted file mode 100644 index 713aec202ec4..000000000000 --- a/test/files/neg/t7848-interp-warn.scala +++ /dev/null @@ -1,38 +0,0 @@ -// scalac: -Xlint:missing-interpolator -Xfatal-warnings - -package test - -package pancake { } - -object Test { - type NonVal = Int - - def ok = "Don't warn on $nosymbol interpolated." - - def pass = "Don't warn on $pancake package names." - - def types = "Or $NonVal type symbols either." - - def bar = "bar" - def f = { - val foo = "bar" - "An important $foo message!" // warn on ident in scope - } - def g = { - val foo = "bar" - "A doubly important ${foo * 2} message!" // warn on some expr, see below - } - def h = s"Try using '$$bar' instead." // no warn - def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test - def j = s"Try using '${ "something like $bar" }' instead." // warn - def k = f"Try using '$bar' instead." // no warn on other std interps - def p = "Template ${} {}" // no warn on unlikely or empty expressions - def q = "${}$bar" // disables subsequent checks! (a feature) - def r = "${}${bar}" // disables subsequent checks! (a feature) - - def v = "${baz}${bar}" // warn on second expr - def w = "${ op_* }" // warn, only cheap ident parsing - def x = "${ bar }" // warn, a cheap ident in scope - def y = "${ baz }" // no warn, cheap ident not in scope - def z = "${ baz * 3}" // warn, no expr parsing -} diff --git a/test/files/neg/t7850.check b/test/files/neg/t7850.check index 60d62f1ce269..3eccd9045b84 100644 --- a/test/files/neg/t7850.check +++ b/test/files/neg/t7850.check @@ -4,4 +4,4 @@ t7850.scala:11: error: an unapply result must have a member `def isEmpty: Boolea t7850.scala:12: error: an unapply result must have a member `def isEmpty: Boolean` val Dingy(x2) = new Dingy(1) ^ -two errors found +2 errors diff --git a/test/files/neg/t7850.scala b/test/files/neg/t7850.scala index 04edad82b5dd..794ce2eb00da 100644 --- a/test/files/neg/t7850.scala +++ b/test/files/neg/t7850.scala @@ -7,7 +7,7 @@ class Dingy(a: Int) { def get = this } object Dingy { def unapply(a: Dingy) = a } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val Casey(x1) = new Casey(1) val Dingy(x2) = new Dingy(1) println(s"$x1 $x2") diff --git a/test/files/neg/t7859.check b/test/files/neg/t7859.check index 5789e2a1228c..dac10031799e 100644 --- a/test/files/neg/t7859.check +++ b/test/files/neg/t7859.check @@ -1,7 +1,7 @@ B_2.scala:6: error: not found: value x new p1.A(x).x ^ -B_2.scala:6: error: value x in class A cannot be accessed in p1.A +B_2.scala:6: error: value x in class A cannot be accessed as a member of p1.A from object Test new p1.A(x).x ^ B_2.scala:7: error: not found: value x @@ -13,7 +13,7 @@ B_2.scala:7: error: value x is not a member of B B_2.scala:8: error: not found: value x new C(x).x ^ -B_2.scala:8: error: value x in class C cannot be accessed in C +B_2.scala:8: error: value x in class C cannot be accessed as a member of C from object Test new C(x).x ^ -6 errors found +6 errors diff --git a/test/files/neg/t7860.check b/test/files/neg/t7860.check index 6c2ad9d82a38..34da077bb77f 100644 --- a/test/files/neg/t7860.check +++ b/test/files/neg/t7860.check @@ -1,9 +1,9 @@ -t7860.scala:6: warning: private class for your eyes only in object Test is never used +t7860.scala:7: warning: private class for your eyes only in object Test is never used private implicit class `for your eyes only`(i: Int) { // warn ^ -t7860.scala:32: warning: private class C in object Test3 is never used +t7860.scala:33: warning: private class C in object Test3 is never used private implicit class C(val i: Int) extends AnyVal { // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t7860.scala b/test/files/neg/t7860.scala index 903717dc8813..7eaf1f7b09ba 100644 --- a/test/files/neg/t7860.scala +++ b/test/files/neg/t7860.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:privates +//> using options -Xfatal-warnings -Ywarn-unused:privates +// class Test diff --git a/test/files/neg/t7870.check b/test/files/neg/t7870.check index d9db911ac1ce..3a76201bf04c 100644 --- a/test/files/neg/t7870.check +++ b/test/files/neg/t7870.check @@ -1,4 +1,4 @@ t7870.scala:1: error: in class C, multiple overloaded alternatives of constructor C define default arguments. class C(a: Int = 0, b: Any) { ^ -one error found +1 error diff --git a/test/files/neg/t7872.check b/test/files/neg/t7872.check index 57d9772abc5d..16216db3cd96 100644 --- a/test/files/neg/t7872.check +++ b/test/files/neg/t7872.check @@ -1,10 +1,10 @@ t7872.scala:6: error: contravariant type a occurs in covariant position in type [-a]Cov[a] of type l type x = {type l[-a] = Cov[a]} ^ -t7872.scala:8: error: covariant type a occurs in contravariant position in type [+a]Inv[a] of type l +t7872.scala:8: error: covariant type a occurs in contravariant position in type [+a]Inv[a] of value foo[({type l[+a] = Inv[a]})#l] ^ t7872.scala:5: error: contravariant type a occurs in covariant position in type [-a]Cov[a] of type l type l[-a] = Cov[a] ^ -three errors found +3 errors diff --git a/test/files/neg/t7872b.check b/test/files/neg/t7872b.check index 0dc4e76301a4..4460a01621fb 100644 --- a/test/files/neg/t7872b.check +++ b/test/files/neg/t7872b.check @@ -1,7 +1,7 @@ -t7872b.scala:8: error: contravariant type a occurs in covariant position in type [-a]List[a] of type l +t7872b.scala:8: error: contravariant type a occurs in covariant position in type [-a]List[a] of value def oops1 = down[({type l[-a] = List[a]})#l](List('whatever: Object)).head + "oops" ^ -t7872b.scala:19: error: covariant type a occurs in contravariant position in type [+a]coinv.Stringer[a] of type l +t7872b.scala:19: error: covariant type a occurs in contravariant position in type [+a]a => String of value def oops2 = up[({type l[+a] = Stringer[a]})#l]("printed: " + _) ^ -two errors found +2 errors diff --git a/test/files/neg/t7872b.scala b/test/files/neg/t7872b.scala index 307a1470c58f..daaa0ad02fe3 100644 --- a/test/files/neg/t7872b.scala +++ b/test/files/neg/t7872b.scala @@ -4,7 +4,7 @@ object coinv { up(List("hi")) - // should not compile; `l' is unsound + // should not compile; `l` is unsound def oops1 = down[({type l[-a] = List[a]})#l](List('whatever: Object)).head + "oops" // scala> oops1 // java.lang.ClassCastException: scala.Symbol cannot be cast to java.lang.String @@ -15,7 +15,7 @@ object coinv { // [error] type A is contravariant, but type _ is declared covariant // up[Stringer]("printed: " + _) - // should not compile; `l' is unsound + // should not compile; `l` is unsound def oops2 = up[({type l[+a] = Stringer[a]})#l]("printed: " + _) // scala> oops2(Some(33)) // java.lang.ClassCastException: scala.Some cannot be cast to java.lang.String diff --git a/test/files/neg/t7872c.check b/test/files/neg/t7872c.check index 469449dbd5bb..b78f3a4560e8 100644 --- a/test/files/neg/t7872c.check +++ b/test/files/neg/t7872c.check @@ -8,4 +8,4 @@ t7872c.scala:7: error: type mismatch; required: F[Object] down(List('whatever: Object)) ^ -two errors found +2 errors diff --git a/test/files/neg/t7877.check b/test/files/neg/t7877.check index 7f7f832463a0..64fadcbee6b7 100644 --- a/test/files/neg/t7877.check +++ b/test/files/neg/t7877.check @@ -4,4 +4,4 @@ t7877.scala:6: error: not found: value Y t7877.scala:7: error: OnNext[Any] does not take parameters case OnNext[Any]() => () // should *not* be allowed, but was. ^ -two errors found +2 errors diff --git a/test/files/neg/t7879.check b/test/files/neg/t7879.check new file mode 100644 index 000000000000..d64d37be30b9 --- /dev/null +++ b/test/files/neg/t7879.check @@ -0,0 +1,6 @@ +t7879.scala:2: error: case `copy` method is allowed to have by-name parameters under Scala 3 (or with -Xsource-features:case-copy-by-name) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=C +case class C(i: Int)(j: => Int)(k: => Int) { def sum = i + j + k } + ^ +1 error diff --git a/test/files/neg/t7879.scala b/test/files/neg/t7879.scala new file mode 100644 index 000000000000..e873120e5ef2 --- /dev/null +++ b/test/files/neg/t7879.scala @@ -0,0 +1,2 @@ +//> using options -Xsource:3 +case class C(i: Int)(j: => Int)(k: => Int) { def sum = i + j + k } diff --git a/test/files/neg/t7879b.check b/test/files/neg/t7879b.check new file mode 100644 index 000000000000..89579285661d --- /dev/null +++ b/test/files/neg/t7879b.check @@ -0,0 +1,4 @@ +t7879b.scala:5: error: value copy is not a member of C + def f(c: C): C = c.copy(42)(Seq(9, 9, 9): _*) + ^ +1 error diff --git a/test/files/neg/t7879b.scala b/test/files/neg/t7879b.scala new file mode 100644 index 000000000000..d23e5dea0927 --- /dev/null +++ b/test/files/neg/t7879b.scala @@ -0,0 +1,6 @@ +//> using options -Xsource:3 -Xsource-features:case-copy-by-name +case class C(i: Int)(js: Int*) { def sum = i + js.sum } + +class Usage { + def f(c: C): C = c.copy(42)(Seq(9, 9, 9): _*) +} diff --git a/test/files/neg/t7879c.check b/test/files/neg/t7879c.check new file mode 100644 index 000000000000..29754cfca1b6 --- /dev/null +++ b/test/files/neg/t7879c.check @@ -0,0 +1,4 @@ +t7879c.scala:6: error: value copy is not a member of C + C(42)(27)(1).copy() + ^ +1 error diff --git a/test/files/neg/t7879c.scala b/test/files/neg/t7879c.scala new file mode 100644 index 000000000000..b16a74067350 --- /dev/null +++ b/test/files/neg/t7879c.scala @@ -0,0 +1,8 @@ +//> using options -Werror -Xlint +case class C(i: Int)(j: => Int)(k: => Int) { def sum = i + j + k } + +object Test extends App { + println { + C(42)(27)(1).copy() + } +} diff --git a/test/files/neg/t7895.check b/test/files/neg/t7895.check index 1a58e24b77f7..5a7889b2c620 100644 --- a/test/files/neg/t7895.check +++ b/test/files/neg/t7895.check @@ -1,4 +1,4 @@ t7895.scala:4: error: not found: value Goop case Goop(a, b, c) => Tuple2(a, b) ^ -one error found +1 error diff --git a/test/files/neg/t7895b.check b/test/files/neg/t7895b.check index 87ea72704e2d..34318b5b0907 100644 --- a/test/files/neg/t7895b.check +++ b/test/files/neg/t7895b.check @@ -4,4 +4,4 @@ t7895b.scala:4: error: not found: value a t7895b.scala:4: error: not found: value b foo(a, b) ^ -two errors found +2 errors diff --git a/test/files/neg/t7895c.check b/test/files/neg/t7895c.check index d4745b1f4be7..41c932ead5ad 100644 --- a/test/files/neg/t7895c.check +++ b/test/files/neg/t7895c.check @@ -10,4 +10,4 @@ t7895c.scala:2: error: not found: value bippity t7895c.scala:2: error: not found: value bazingo def booboo = bong + booble + bippity - bazingo ^ -four errors found +4 errors diff --git a/test/files/neg/t7897.check b/test/files/neg/t7897.check index 48eff511c7b1..fd0d3ec3e9f5 100644 --- a/test/files/neg/t7897.check +++ b/test/files/neg/t7897.check @@ -1,4 +1,4 @@ t7897.scala:19: error: value length is not a member of p0.Single case p0.Single(x) => println(s"`$x` has ${x.length} chars") ^ -one error found +1 error diff --git a/test/files/neg/t7899.check b/test/files/neg/t7899.check index febfe76b8a1d..2fc667f5a99f 100644 --- a/test/files/neg/t7899.check +++ b/test/files/neg/t7899.check @@ -3,4 +3,4 @@ t7899.scala:5: error: type mismatch; required: (=> Int) => ? foo(identity)() ^ -one error found +1 error diff --git a/test/files/neg/t7899.scala b/test/files/neg/t7899.scala index f2dea3ab1f7a..279f24b9cf69 100644 --- a/test/files/neg/t7899.scala +++ b/test/files/neg/t7899.scala @@ -1,7 +1,7 @@ object Test { def foo[B](f: (=> Int) => B): () => B = () => f(0) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { foo(identity)() } } diff --git a/test/files/neg/t7967.check b/test/files/neg/t7967.check index cde950dcdf6b..f9d8a0fcb213 100644 --- a/test/files/neg/t7967.check +++ b/test/files/neg/t7967.check @@ -6,4 +6,4 @@ t7967.scala:8: error: illegal inheritance; self-type Test.CC does not conform to Test.CC's selftype Test.CC new CC {} // should fail, doesn't ^ -two errors found +2 errors diff --git a/test/files/neg/t798.check b/test/files/neg/t798.check index b120f3a40395..5cd4eaf0d963 100644 --- a/test/files/neg/t798.check +++ b/test/files/neg/t798.check @@ -1,4 +1,4 @@ t798.scala:2: error: cyclic aliasing or subtyping involving type Bracks trait Test[Bracks <: Bracks] { ^ -one error found +1 error diff --git a/test/files/neg/t7980.check b/test/files/neg/t7980.check index 031c23dbeb5a..9e1db747672f 100644 --- a/test/files/neg/t7980.check +++ b/test/files/neg/t7980.check @@ -1,4 +1,4 @@ t7980.scala:7: error: Can't unquote Nothing, bottom type values often indicate programmer mistake println(q"class ${Name(X)} { }") ^ -one error found +1 error diff --git a/test/files/neg/t7984.check b/test/files/neg/t7984.check index 26d64c44f3bd..726e6d773bd7 100644 --- a/test/files/neg/t7984.check +++ b/test/files/neg/t7984.check @@ -1,6 +1,6 @@ -t7984.scala:5: warning: non-variable type argument Int in type pattern List[Int] (the underlying of Test.this.ListInt) is unchecked since it is eliminated by erasure +t7984.scala:6: warning: non-variable type argument Int in type pattern List[Int] (the underlying of Test.this.ListInt) is unchecked since it is eliminated by erasure case is: ListInt => is.head ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t7984.scala b/test/files/neg/t7984.scala index af30ade4e26f..c53dd1633eb8 100644 --- a/test/files/neg/t7984.scala +++ b/test/files/neg/t7984.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class Test { type ListInt = List[Int] List[Any]("") match { diff --git a/test/files/neg/t800.check b/test/files/neg/t800.check index 238b8dd27d01..3c7a7d00ccb4 100644 --- a/test/files/neg/t800.check +++ b/test/files/neg/t800.check @@ -13,4 +13,4 @@ t800.scala:16: error: variable qualification is defined twice; the conflicting variable qualification was defined at line 15:7 var qualification = false; ^ -four errors found +4 errors diff --git a/test/files/neg/t8002-nested-scope.check b/test/files/neg/t8002-nested-scope.check index f66249e43213..ba567d797fda 100644 --- a/test/files/neg/t8002-nested-scope.check +++ b/test/files/neg/t8002-nested-scope.check @@ -1,4 +1,4 @@ -t8002-nested-scope.scala:8: error: method x in class C cannot be accessed in C +t8002-nested-scope.scala:8: error: method x in class C cannot be accessed as a member of C from object C new C().x ^ -one error found +1 error diff --git a/test/files/neg/t8006.check b/test/files/neg/t8006.check index 6152d0fba364..b3e168540020 100644 --- a/test/files/neg/t8006.check +++ b/test/files/neg/t8006.check @@ -1,6 +1,6 @@ -t8006.scala:3: error: too many arguments (2) for method applyDynamicNamed: (value: (String, Any))String +t8006.scala:3: error: too many arguments (found 2, expected 2-tuple) for method applyDynamicNamed: (value: (String, Any)): String error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100)) possible cause: maybe a wrong Dynamic method signature? d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed ^ -one error found +1 error diff --git a/test/files/neg/t8006.scala b/test/files/neg/t8006.scala index b2f71c15873c..8dc60697dcb9 100644 --- a/test/files/neg/t8006.scala +++ b/test/files/neg/t8006.scala @@ -5,4 +5,4 @@ object X { import language.dynamics class D extends Dynamic { def applyDynamicNamed(name: String)(value: (String, Any)) = name -} \ No newline at end of file +} diff --git a/test/files/neg/t8012.check b/test/files/neg/t8012.check new file mode 100644 index 000000000000..216303617827 --- /dev/null +++ b/test/files/neg/t8012.check @@ -0,0 +1,4 @@ +t8012.scala:4: error: class type required but p.C[_] found + object `package` extends C[_] + ^ +1 error diff --git a/test/files/neg/t8012.scala b/test/files/neg/t8012.scala new file mode 100644 index 000000000000..2c43674947c8 --- /dev/null +++ b/test/files/neg/t8012.scala @@ -0,0 +1,5 @@ +package p { + class C[A] + + object `package` extends C[_] +} diff --git a/test/files/neg/t8015-ffa.check b/test/files/neg/t8015-ffa.check index 0f28be7fe7b7..41ee7aa8d402 100644 --- a/test/files/neg/t8015-ffa.check +++ b/test/files/neg/t8015-ffa.check @@ -3,4 +3,4 @@ t8015-ffa.scala:7: error: type mismatch; required: Int val i: Int = "3" // error line 7 (was 8) ^ -one error found +1 error diff --git a/test/files/neg/t8015-ffb.check b/test/files/neg/t8015-ffb.check index 936ea67cdac6..bacba6254196 100644 --- a/test/files/neg/t8015-ffb.check +++ b/test/files/neg/t8015-ffb.check @@ -1,6 +1,6 @@ -t8015-ffb.scala:11: warning: side-effecting nullary methods are discouraged: suggest defining as `def w()` instead +t8015-ffb.scala:12: warning: side-effecting nullary methods are discouraged: suggest defining as `def w()` instead [quickfixable] def w = { x\u000c() } // ^L is colored blue on this screen, hardly visible ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t8015-ffb.scala b/test/files/neg/t8015-ffb.scala index a2d25c16460f..d94e4d6353f6 100644 --- a/test/files/neg/t8015-ffb.scala +++ b/test/files/neg/t8015-ffb.scala @@ -1,12 +1,13 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Xfatal-warnings +// trait G { - val c: Char = '\u000a' // disallowed! - def x\u000d\u000a = 9 // as nl + val c: Char = '\u000a' // allowed! + def x = 9 def y() = x def z() = { - y()\u000a() // was Int does not take parameters + y() () // was Int does not take parameters } - def v = y()\u000c() // was Int does not take parameters + def v = y() () // was Int does not take parameters def w = { x () } // ^L is colored blue on this screen, hardly visible } diff --git a/test/files/neg/t8024.check b/test/files/neg/t8024.check index bd551aa59173..3d3d287dc990 100644 --- a/test/files/neg/t8024.check +++ b/test/files/neg/t8024.check @@ -3,4 +3,4 @@ it is both defined in package object p and imported subsequently by import java.lang.Math.sqrt sqrt(0d) ^ -one error found +1 error diff --git a/test/files/neg/t8024b.check b/test/files/neg/t8024b.check index 9cd89bca535c..1f7902a38779 100644 --- a/test/files/neg/t8024b.check +++ b/test/files/neg/t8024b.check @@ -3,4 +3,4 @@ it is both defined in object FastComplex and imported subsequently by import java.lang.Math.sqrt sqrt(0d) ^ -one error found +1 error diff --git a/test/files/neg/t8035-deprecated.check b/test/files/neg/t8035-deprecated.check index 7380a69c491f..46054f04982b 100644 --- a/test/files/neg/t8035-deprecated.check +++ b/test/files/neg/t8035-deprecated.check @@ -1,21 +1,24 @@ -t8035-deprecated.scala:3: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. - signature: GenSetLike.apply(elem: A): Boolean +t8035-deprecated.scala:4: warning: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want + signature: SetOps.apply(elem: A): Boolean given arguments: - after adaptation: GenSetLike((): Unit) + after adaptation: SetOps((): Unit) List(1,2,3).toSet() ^ -t8035-deprecated.scala:6: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. +t8035-deprecated.scala:4: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. + List(1,2,3).toSet() + ^ +t8035-deprecated.scala:7: warning: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want signature: A(x: T): Foo.A[T] given arguments: after adaptation: new A((): Unit) new A ^ -t8035-deprecated.scala:10: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. - signature: Format.format(x$1: Any): String +t8035-deprecated.scala:11: warning: adaptation of an empty argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous + signature: Format.format(x$1: Object): String given arguments: after adaptation: Format.format((): Unit) sdf.format() ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t8035-deprecated.scala b/test/files/neg/t8035-deprecated.scala index 98002f8569e1..bb8a93302b19 100644 --- a/test/files/neg/t8035-deprecated.scala +++ b/test/files/neg/t8035-deprecated.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -Werror -Xlint +// object Foo { List(1,2,3).toSet() diff --git a/test/files/neg/t8035-no-adapted-args.check b/test/files/neg/t8035-no-adapted-args.check deleted file mode 100644 index 3f68e9059177..000000000000 --- a/test/files/neg/t8035-no-adapted-args.check +++ /dev/null @@ -1,15 +0,0 @@ -t8035-no-adapted-args.scala:5: error: too many arguments (3) for method f: (x: (Int, Int, Int))Int - f(1, 2, 3) - ^ -t8035-no-adapted-args.scala:6: error: not enough arguments for method f: (x: Unit)Int. -Unspecified value parameter x. - f() - ^ -t8035-no-adapted-args.scala:5: warning: No automatic adaptation here: use explicit parentheses. - signature: Test.f[T](x: T): Int - given arguments: 1, 2, 3 - after adaptation: Test.f((1, 2, 3): (Int, Int, Int)) - f(1, 2, 3) - ^ -one warning found -two errors found diff --git a/test/files/neg/t8035-no-adapted-args.scala b/test/files/neg/t8035-no-adapted-args.scala deleted file mode 100644 index b70dc1bfdc17..000000000000 --- a/test/files/neg/t8035-no-adapted-args.scala +++ /dev/null @@ -1,7 +0,0 @@ -// scalac: -Yno-adapted-args -object Test { - def f[T](x: T) = 0 - - f(1, 2, 3) - f() -} diff --git a/test/files/neg/t8035-removed.check b/test/files/neg/t8035-removed.check index 6bd30f03f3de..cf47a1dfa38b 100644 --- a/test/files/neg/t8035-removed.check +++ b/test/files/neg/t8035-removed.check @@ -1,16 +1,35 @@ -t8035-removed.scala:3: error: Adaptation of argument list by inserting () has been removed. - signature: GenSetLike.apply(elem: A): Boolean +t8035-removed.scala:4: error: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want + signature: SetOps.apply(elem: A): Boolean given arguments: + after adaptation: SetOps((): Unit) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=scala.collection.SetOps.apply List(1,2,3).toSet() ^ -t8035-removed.scala:6: error: Adaptation of argument list by inserting () has been removed. +t8035-removed.scala:4: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. + List(1,2,3).toSet() + ^ +t8035-removed.scala:7: error: adaptation of an empty argument list by inserting () is deprecated: this is unlikely to be what you want signature: A(x: T): Foo.A[T] given arguments: + after adaptation: new A((): Unit) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=Foo.A. new A ^ -t8035-removed.scala:10: error: Adaptation of argument list by inserting () has been removed. - signature: Format.format(x$1: Any): String +t8035-removed.scala:11: error: adaptation of an empty argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous + signature: Format.format(x$1: Object): String given arguments: + after adaptation: Format.format((): Unit) +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=java.text.Format.format sdf.format() ^ -three errors found +t8035-removed.scala:14: warning: adapted the argument list to the expected 2-tuple: add additional parens instead + signature: List.::[B >: A](elem: B): List[B] + given arguments: 42, 27 + after adaptation: List.::((42, 27): (Int, Int)) [quickfixable] + Nil.::(42, 27) // yeswarn + ^ +2 warnings +3 errors diff --git a/test/files/neg/t8035-removed.scala b/test/files/neg/t8035-removed.scala index 4ddfea14e6da..88ccfba66c48 100644 --- a/test/files/neg/t8035-removed.scala +++ b/test/files/neg/t8035-removed.scala @@ -1,4 +1,5 @@ -// scalac: -Xfuture +//> using options -Werror -Xlint -Xsource:3 + object Foo { List(1,2,3).toSet() @@ -8,4 +9,7 @@ object Foo { import java.text.SimpleDateFormat val sdf = new SimpleDateFormat("yyyyMMdd-HH0000") sdf.format() + + (42, 27) :: Nil // nowarn + Nil.::(42, 27) // yeswarn } diff --git a/test/files/neg/t8044-b.check b/test/files/neg/t8044-b.check index 4a93e9a77238..ef4a1375d2fa 100644 --- a/test/files/neg/t8044-b.check +++ b/test/files/neg/t8044-b.check @@ -1,4 +1,4 @@ t8044-b.scala:3: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.) def g = 42 match { case `Oops` : Int => } // must be varish - ^ -one error found + ^ +1 error diff --git a/test/files/neg/t8044.check b/test/files/neg/t8044.check index 678bf8c7007b..612da7afcb68 100644 --- a/test/files/neg/t8044.check +++ b/test/files/neg/t8044.check @@ -1,4 +1,4 @@ t8044.scala:3: error: not found: value _ def f = 42 match { case `_` : Int => `_` } // doesn't leak quoted underscore ^ -one error found +1 error diff --git a/test/files/neg/t8072.check b/test/files/neg/t8072.check deleted file mode 100644 index 92670101353c..000000000000 --- a/test/files/neg/t8072.check +++ /dev/null @@ -1,4 +0,0 @@ -t8072.scala:4: error: value ifParSeq is not a member of List[Int] - val y = x.ifParSeq[Int](throw new Exception).otherwise(0) // Shouldn't compile - ^ -one error found diff --git a/test/files/neg/t8072.scala b/test/files/neg/t8072.scala deleted file mode 100644 index 2c8213e34ad6..000000000000 --- a/test/files/neg/t8072.scala +++ /dev/null @@ -1,6 +0,0 @@ -class NoIfParSeq { - import collection.parallel._ - val x = List(1,2) - val y = x.ifParSeq[Int](throw new Exception).otherwise(0) // Shouldn't compile - val z = x.toParArray -} \ No newline at end of file diff --git a/test/files/neg/t8079a.check b/test/files/neg/t8079a.check index 6bbe78afa6e3..47eb2f1b091c 100644 --- a/test/files/neg/t8079a.check +++ b/test/files/neg/t8079a.check @@ -1,4 +1,4 @@ t8079a.scala:3: error: contravariant type I occurs in covariant position in type C.this.X of value b def f2(b: X): Unit ^ -one error found +1 error diff --git a/test/files/neg/t8104.check b/test/files/neg/t8104.check index 69b3461bd52e..b781d95393dd 100644 --- a/test/files/neg/t8104.check +++ b/test/files/neg/t8104.check @@ -1,4 +1,4 @@ Test_2.scala:20: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)] implicitly[Generic.Aux[C, (Int, Int)]] ^ -one error found +1 error diff --git a/test/files/neg/t8104/Macros_1.scala b/test/files/neg/t8104/Macros_1.scala index e135bd807b11..129c8db16f6a 100644 --- a/test/files/neg/t8104/Macros_1.scala +++ b/test/files/neg/t8104/Macros_1.scala @@ -8,4 +8,4 @@ object Macros { val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.info)) q"new Generic[$T]{ type Repr = $Repr }" } -} \ No newline at end of file +} diff --git a/test/files/neg/t8127a.check b/test/files/neg/t8127a.check index 5a30574861a8..764ab5310ff8 100644 --- a/test/files/neg/t8127a.check +++ b/test/files/neg/t8127a.check @@ -1,4 +1,4 @@ -t8127a.scala:7: error: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[_$1] +t8127a.scala:7: error: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. case H(v) => ^ -one error found +1 error diff --git a/test/files/neg/t8127a.scala b/test/files/neg/t8127a.scala index c05facdac1c4..e1bd1559667a 100644 --- a/test/files/neg/t8127a.scala +++ b/test/files/neg/t8127a.scala @@ -7,6 +7,9 @@ object Test { case H(v) => case _ => } - // now: too many patterns for object H offering Boolean: expected 0, found 1 - // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] } + // later: OK + // then: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. + // and: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[Any] + // now: too many patterns for object H offering Boolean: expected 0, found 1 + // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] diff --git a/test/files/neg/t8143a.check b/test/files/neg/t8143a.check index 4e11000a2a53..5a8232f9ca31 100644 --- a/test/files/neg/t8143a.check +++ b/test/files/neg/t8143a.check @@ -1,5 +1,6 @@ -t8143a.scala:2: error: overriding method f in class Foo of type => Int; - method f has weaker access privileges; it should not be private +t8143a.scala:2: error: weaker access privileges in overriding +def f: Int (defined in class Foo) + override should not be private class Bar extends Foo { private def f = 10 } ^ -one error found +1 error diff --git a/test/files/neg/t8143a.scala b/test/files/neg/t8143a.scala index 4ec539e6711d..6abe35e335c4 100644 --- a/test/files/neg/t8143a.scala +++ b/test/files/neg/t8143a.scala @@ -12,4 +12,4 @@ class Foo3 { private[this] def f = 5 } class Bar3 extends Foo3 { private def f = 10 } // okay class Foo4 { private def f = 5 } -class Bar4 extends Foo4 { private[this] def f = 10 } // okay \ No newline at end of file +class Bar4 extends Foo4 { private[this] def f = 10 } // okay diff --git a/test/files/neg/t8146-non-finitary-2.check b/test/files/neg/t8146-non-finitary-2.check index 8c2e1436c2dd..fc8bbcdb7a06 100644 --- a/test/files/neg/t8146-non-finitary-2.check +++ b/test/files/neg/t8146-non-finitary-2.check @@ -6,4 +6,4 @@ t8146-non-finitary-2.scala:7: error: type mismatch; required: N[C[Int]] def foo(c: C[Int]): N[C[Int]] = c ^ -two errors found +2 errors diff --git a/test/files/neg/t8146-non-finitary-2.scala b/test/files/neg/t8146-non-finitary-2.scala index c12f5f8f497e..d8fe3495456f 100644 --- a/test/files/neg/t8146-non-finitary-2.scala +++ b/test/files/neg/t8146-non-finitary-2.scala @@ -1,5 +1,5 @@ // Example 3 from "On Decidability of Nominal Subtyping with Variance" (Pierce, Kennedy) -// http://research.microsoft.com/pubs/64041/fool2007.pdf +// https://research.microsoft.com/pubs/64041/fool2007.pdf trait N[-Z] trait D[Y] trait C[X] extends N[N[C[D[X]]]] diff --git a/test/files/neg/t8146-non-finitary.check b/test/files/neg/t8146-non-finitary.check index 8363b750caf3..b1919571f586 100644 --- a/test/files/neg/t8146-non-finitary.check +++ b/test/files/neg/t8146-non-finitary.check @@ -6,4 +6,4 @@ t8146-non-finitary.scala:6: error: type mismatch; required: N[C[Int]] def foo(c: C[Int]): N[C[Int]] = c ^ -two errors found +2 errors diff --git a/test/files/neg/t8146-non-finitary.scala b/test/files/neg/t8146-non-finitary.scala index 3d8a3074c76a..b8ab1724c74d 100644 --- a/test/files/neg/t8146-non-finitary.scala +++ b/test/files/neg/t8146-non-finitary.scala @@ -1,5 +1,5 @@ // Example 3 from "On Decidability of Nominal Subtyping with Variance" (Pierce, Kennedy) -// http://research.microsoft.com/pubs/64041/fool2007.pdf +// https://research.microsoft.com/pubs/64041/fool2007.pdf trait N[-A] trait C[A] extends N[N[C[C[A]]]] object Test { diff --git a/test/files/neg/t8157.check b/test/files/neg/t8157.check index 9a21a49a0771..6ad4ea64683b 100644 --- a/test/files/neg/t8157.check +++ b/test/files/neg/t8157.check @@ -1,4 +1,4 @@ t8157.scala:1: error: in object Test, multiple overloaded alternatives of method foo define default arguments. object Test { ^ -one error found +1 error diff --git a/test/files/neg/t8158.check b/test/files/neg/t8158.check index fa6b744ba5a5..84d7360283dd 100644 --- a/test/files/neg/t8158.check +++ b/test/files/neg/t8158.check @@ -1,4 +1,4 @@ Test_2.scala:10: error: not enough patterns for <$anon: AnyRef> offering AnyRef{def isEmpty: Boolean; def get: $anon; def unapply(x: String): $anon}: expected 1, found 0 case X() => ^ -one error found +1 error diff --git a/test/files/neg/t8158/Macros_1.scala b/test/files/neg/t8158/Macros_1.scala index b84e3ed8d31d..c0df1d9c0681 100644 --- a/test/files/neg/t8158/Macros_1.scala +++ b/test/files/neg/t8158/Macros_1.scala @@ -31,4 +31,4 @@ object Max { } c.Expr[Any](t) } -} \ No newline at end of file +} diff --git a/test/files/neg/t8158/Test_2.scala b/test/files/neg/t8158/Test_2.scala index f5ac6616bb2c..aeeb62982b0c 100644 --- a/test/files/neg/t8158/Test_2.scala +++ b/test/files/neg/t8158/Test_2.scala @@ -11,4 +11,4 @@ class BugTest { case _ => ??? } } -} \ No newline at end of file +} diff --git a/test/files/neg/t8177a.check b/test/files/neg/t8177a.check index 0d01206e0c2f..20f9efea322f 100644 --- a/test/files/neg/t8177a.check +++ b/test/files/neg/t8177a.check @@ -3,4 +3,4 @@ t8177a.scala:5: error: type mismatch; required: A{type Result = String} : A { type Result = String} = x ^ -one error found +1 error diff --git a/test/files/neg/t8177a.scala b/test/files/neg/t8177a.scala index d1e47f8c1e6d..0347541eaf39 100644 --- a/test/files/neg/t8177a.scala +++ b/test/files/neg/t8177a.scala @@ -3,4 +3,4 @@ trait A { type Result } class PolyTests { def wrong(x: A { type Result = Int }) : A { type Result = String} = x -} \ No newline at end of file +} diff --git a/test/files/neg/t8178.check b/test/files/neg/t8178.check new file mode 100644 index 000000000000..a221c15b8074 --- /dev/null +++ b/test/files/neg/t8178.check @@ -0,0 +1,23 @@ +t8178.scala:6: warning: match may not be exhaustive. +It would fail on the following inputs: FailsChild2(_), VarArgs1(_) + def t1(f: Fails) = f match { // inexhaustive on both, was: no warning + ^ +t8178.scala:10: warning: match may not be exhaustive. +It would fail on the following input: VarArgs1(_) + def t2(f: Fails) = f match { // inexhaustive on VarArgs1 + ^ +t8178.scala:14: warning: match may not be exhaustive. +It would fail on the following input: VarArgs1(_) + def t12(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning + ^ +t8178.scala:19: warning: match may not be exhaustive. +It would fail on the following input: VarArgs1(_) + def t21(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning + ^ +t8178.scala:35: warning: match may not be exhaustive. +It would fail on the following input: SeqArgs2(_) + def t1(f: Works) = f match { // inexhaustive on SeqArgs2 + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/t8178.scala b/test/files/neg/t8178.scala new file mode 100644 index 000000000000..37595cc83062 --- /dev/null +++ b/test/files/neg/t8178.scala @@ -0,0 +1,38 @@ +//> using options -Xfatal-warnings +sealed trait Fails +case class VarArgs1(a: String*) extends Fails +case class FailsChild2(a: Seq[String]) extends Fails +object FailsTest { + def t1(f: Fails) = f match { // inexhaustive on both, was: no warning + case VarArgs1(_) => ??? + } + + def t2(f: Fails) = f match { // inexhaustive on VarArgs1 + case FailsChild2(_) => ??? + } + + def t12(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning + case VarArgs1(_) => ??? + case FailsChild2(_) => ??? + } + + def t21(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning + case FailsChild2(_) => ??? + case VarArgs1(_) => ??? + } + +} + +sealed trait Works +case class SeqArgs1(a: Seq[String]) extends Works +case class SeqArgs2(a: Seq[String]) extends Works +object WorksTest { + def t12(f: Works) = f match { + case SeqArgs1(_) => ??? + case SeqArgs2(_) => ??? + } + + def t1(f: Works) = f match { // inexhaustive on SeqArgs2 + case SeqArgs1(_) => ??? + } +} diff --git a/test/files/neg/t8182.check b/test/files/neg/t8182.check index a156d70883a8..0bb9d6f7bb70 100644 --- a/test/files/neg/t8182.check +++ b/test/files/neg/t8182.check @@ -6,17 +6,17 @@ t8182.scala:7: error: illegal start of simple pattern ^ t8182.scala:6: error: type application is not allowed in pattern val a b[B] // error then continue as for X - ^ + ^ t8182.scala:10: error: illegal start of simple pattern case a b[B] => // bumpy recovery ^ t8182.scala:10: error: type application is not allowed in pattern case a b[B] => // bumpy recovery - ^ + ^ t8182.scala:11: error: '=>' expected but '}' found. } ^ t8182.scala:16: error: type application is not allowed in pattern case a B[T] b => - ^ -7 errors found + ^ +7 errors diff --git a/test/files/neg/t8207.check b/test/files/neg/t8207.check index 59facd897aa3..132f3e34043a 100644 --- a/test/files/neg/t8207.check +++ b/test/files/neg/t8207.check @@ -4,4 +4,4 @@ class C { import C.this.toString } t8207.scala:3: error: '.' expected but '}' found. class D { import D.this.toString } ^ -two errors found +2 errors diff --git a/test/files/neg/t8217-local-alias-requires-rhs.check b/test/files/neg/t8217-local-alias-requires-rhs.check index 383b1f8d63a5..d0d9562f52ae 100644 --- a/test/files/neg/t8217-local-alias-requires-rhs.check +++ b/test/files/neg/t8217-local-alias-requires-rhs.check @@ -7,4 +7,4 @@ t8217-local-alias-requires-rhs.scala:6: error: only traits and abstract classes t8217-local-alias-requires-rhs.scala:14: error: only traits and abstract classes can have declared but undefined members def this(a: Any) = { this(); type C } ^ -three errors found +3 errors diff --git a/test/files/neg/t8219-any-any-ref-equals.check b/test/files/neg/t8219-any-any-ref-equals.check index 95d2536fba06..740378a03409 100644 --- a/test/files/neg/t8219-any-any-ref-equals.check +++ b/test/files/neg/t8219-any-any-ref-equals.check @@ -1,10 +1,10 @@ -t8219-any-any-ref-equals.scala:5: error: method ==: (x$1: Any)Boolean does not take type parameters. +t8219-any-any-ref-equals.scala:5: error: method ==: (x$1: Any): Boolean does not take type parameters. "".==[Int] ^ -t8219-any-any-ref-equals.scala:6: error: method ==: (x$1: Any)Boolean does not take type parameters. +t8219-any-any-ref-equals.scala:6: error: method ==: (x$1: Any): Boolean does not take type parameters. ("": AnyRef).==[Int] ^ -t8219-any-any-ref-equals.scala:7: error: method ==: (x$1: Any)Boolean does not take type parameters. +t8219-any-any-ref-equals.scala:7: error: method ==: (x$1: Any): Boolean does not take type parameters. ("": Object).==[Int] ^ -three errors found +3 errors diff --git a/test/files/neg/t8228.check b/test/files/neg/t8228.check index 02eff4b1b714..776b98cd78a0 100644 --- a/test/files/neg/t8228.check +++ b/test/files/neg/t8228.check @@ -1,4 +1,4 @@ t8228.scala:4: error: recursive value foo needs type val foo = foo(null) ^ -one error found +1 error diff --git a/test/files/neg/t8229.check b/test/files/neg/t8229.check index cc504fa34e25..335e74d58ef3 100644 --- a/test/files/neg/t8229.check +++ b/test/files/neg/t8229.check @@ -1,4 +1,4 @@ t8229.scala:5: error: value + is not a member of Object o + "" ^ -one error found +1 error diff --git a/test/files/neg/t8237-default.check b/test/files/neg/t8237-default.check index 59fe21ed0390..808d22d908ff 100644 --- a/test/files/neg/t8237-default.check +++ b/test/files/neg/t8237-default.check @@ -1,4 +1,4 @@ -t8237-default.scala:5: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int]) +t8237-default.scala:5: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]]): Nothing exist so that it can be applied to arguments (List[Int]) --- because --- argument expression's type is not compatible with formal parameter type; found : List[Int] @@ -10,4 +10,4 @@ t8237-default.scala:5: error: type mismatch; required: T[T[List[T[X forSome { type X }]]]] test4(test4$default$1) ^ -two errors found +2 errors diff --git a/test/files/neg/t8244.check b/test/files/neg/t8244.check index ecbcf128e249..c39181c91d79 100644 --- a/test/files/neg/t8244.check +++ b/test/files/neg/t8244.check @@ -1,4 +1,4 @@ Test_2.scala:9: error: value exxx is not a member of T raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X ^ -one error found +1 error diff --git a/test/files/neg/t8244b.check b/test/files/neg/t8244b.check index f6cbf99eb596..855f8e31d684 100644 --- a/test/files/neg/t8244b.check +++ b/test/files/neg/t8244b.check @@ -1,4 +1,4 @@ t8244b.scala:15: error: value exxx is not a member of _$1 raw.t.exxx ^ -one error found +1 error diff --git a/test/files/neg/t8244b.scala b/test/files/neg/t8244b.scala index 2fb4f451a1dd..cd4cb0f7c1bc 100644 --- a/test/files/neg/t8244b.scala +++ b/test/files/neg/t8244b.scala @@ -5,7 +5,7 @@ class Raw_1[T]{ class X extends Raw_1[X] { - override def t = this + override def t() = this def exxx = 0 } diff --git a/test/files/neg/t8244c.check b/test/files/neg/t8244c.check index fd58a5847c2e..e9c8fb6e2e1a 100644 --- a/test/files/neg/t8244c.check +++ b/test/files/neg/t8244c.check @@ -1,4 +1,4 @@ t8244c.scala:15: error: value exxx is not a member of _$1 raw.t.exxx ^ -one error found +1 error diff --git a/test/files/neg/t8244c.scala b/test/files/neg/t8244c.scala index 2fb4f451a1dd..cd4cb0f7c1bc 100644 --- a/test/files/neg/t8244c.scala +++ b/test/files/neg/t8244c.scala @@ -5,7 +5,7 @@ class Raw_1[T]{ class X extends Raw_1[X] { - override def t = this + override def t() = this def exxx = 0 } diff --git a/test/files/neg/t8244e.check b/test/files/neg/t8244e.check index 01942da10c48..92e94df8a4f8 100644 --- a/test/files/neg/t8244e.check +++ b/test/files/neg/t8244e.check @@ -1,4 +1,4 @@ Test.scala:9: error: value exxx is not a member of T raw.t.exxx // java.lang.ClassCastException: java.lang.String cannot be cast to X ^ -one error found +1 error diff --git a/test/files/neg/t8265.check b/test/files/neg/t8265.check index 0c61a935286b..e7e5e4541982 100644 --- a/test/files/neg/t8265.check +++ b/test/files/neg/t8265.check @@ -1,6 +1,4 @@ -t8265.scala:2: warning: Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond +t8265.scala:3: error: covariant type CC occurs in invariant position in type CC[_] of type Coll class Foo[+CC[X]] { type Coll = CC[_] } ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +1 error diff --git a/test/files/neg/t8265.scala b/test/files/neg/t8265.scala index 48017063ef97..d7896cde87da 100644 --- a/test/files/neg/t8265.scala +++ b/test/files/neg/t8265.scala @@ -1,2 +1,3 @@ -// scalac: -Xsource:2.10 -deprecation -language:higherKinds -Xfatal-warnings +//> using options -language:higherKinds +// class Foo[+CC[X]] { type Coll = CC[_] } diff --git a/test/files/neg/t8266-invalid-interp.check b/test/files/neg/t8266-invalid-interp.check index bb2d44a80cab..06e0ec82b53f 100644 --- a/test/files/neg/t8266-invalid-interp.check +++ b/test/files/neg/t8266-invalid-interp.check @@ -1,10 +1,18 @@ t8266-invalid-interp.scala:4: error: Trailing '\' escapes nothing. - f"a\", - ^ -t8266-invalid-interp.scala:5: error: invalid escape '\x' not one of [\b, \t, \n, \f, \r, \\, \", \'] at index 1 in "a\xc". Use \\ for literal \. + f"""a\""", + ^ +t8266-invalid-interp.scala:5: error: invalid escape '\x' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 1 in "a\xc". Use \\ for literal \. f"a\xc", ^ -t8266-invalid-interp.scala:7: error: invalid escape '\v' not one of [\b, \t, \n, \f, \r, \\, \", \'] at index 1 in "a\vc". Use \\ for literal \. - f"a\vc" +t8266-invalid-interp.scala:7: error: \v is not supported, but for vertical tab use \u000b; +invalid escape '\v' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 1 in "a\vc". Use \\ for literal \. + f"a\vc", ^ -three errors found +t8266-invalid-interp.scala:8: error: \v is not supported, but for vertical tab use \u000b; +invalid escape '\v' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 0 in "\v". Use \\ for literal \. + f"\v$x%.4s, Fred", + ^ +t8266-invalid-interp.scala:11: error: invalid escape '\s' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 11 in ". And then \s.". Use \\ for literal \. + s"She said, $x. And then \s.", + ^ +5 errors diff --git a/test/files/neg/t8266-invalid-interp.scala b/test/files/neg/t8266-invalid-interp.scala index 4b26546880a3..2bcf49430493 100644 --- a/test/files/neg/t8266-invalid-interp.scala +++ b/test/files/neg/t8266-invalid-interp.scala @@ -1,9 +1,13 @@ - trait X { + final val x = "hello, world" def f = Seq( - f"a\", + f"""a\""", f"a\xc", // following could suggest \u000b for vertical tab, similar for \a alert - f"a\vc" + f"a\vc", + f"\v$x%.4s, Fred", + ) + def s = Seq( + s"She said, $x. And then \s.", ) } diff --git a/test/files/neg/t8291.check b/test/files/neg/t8291.check index c9972e5575f6..4667f890e640 100644 --- a/test/files/neg/t8291.check +++ b/test/files/neg/t8291.check @@ -4,4 +4,4 @@ t8291.scala:5: error: Could not find implicit for Int or String t8291.scala:6: error: Could not find implicit for Int or String implicitly[Z[String]] ^ -two errors found +2 errors diff --git a/test/files/neg/t8300-overloading.check b/test/files/neg/t8300-overloading.check index edd34d44bd6c..8856820ae410 100644 --- a/test/files/neg/t8300-overloading.check +++ b/test/files/neg/t8300-overloading.check @@ -1,7 +1,7 @@ t8300-overloading.scala:15: error: double definition: def foo(name: Test.u.Name): Nothing at line 14 and def foo(name: Test.u.TermName): Nothing at line 15 -have same type after erasure: (name: Universe#NameApi)Nothing +have same type after erasure: (name: Universe#NameApi): Nothing def foo(name: TermName) = ??? ^ -one error found +1 error diff --git a/test/files/neg/t8300-overloading.scala b/test/files/neg/t8300-overloading.scala index eb393155a03a..0f4eee7b4211 100644 --- a/test/files/neg/t8300-overloading.scala +++ b/test/files/neg/t8300-overloading.scala @@ -13,4 +13,4 @@ object Test extends App { def foo(name: Name) = ??? def foo(name: TermName) = ??? -} \ No newline at end of file +} diff --git a/test/files/neg/t8322.check b/test/files/neg/t8322.check new file mode 100644 index 000000000000..65a7a9298d29 --- /dev/null +++ b/test/files/neg/t8322.check @@ -0,0 +1,23 @@ +t8322.scala:17: error: ambiguous implicit values: + both method $conforms in object Predef of type [A]A => A + and value ew in trait F of type Writes[Any] + match expected type T + implicit def wr[E] = jw(implicitly, implicitly) + ^ +t8322.scala:17: error: diverging implicit expansion for type W[Any] +starting with method jw in trait F + implicit def wr[E] = jw(implicitly, implicitly) + ^ +t8322.scala:19: error: type mismatch; + found : String + required: scala.util.Either[?,?] + Right(0).right.flatMap(_ => new String()) + ^ +t8322.scala:15: warning: Implicit definition should have explicit type (inferred Writes[Seq[E]]) [quickfixable] + implicit def rw[E] = Writes[Seq[E]] { _ => "" } + ^ +t8322.scala:17: warning: Implicit definition should have explicit type [quickfixable] + implicit def wr[E] = jw(implicitly, implicitly) + ^ +2 warnings +3 errors diff --git a/test/files/neg/t8322.scala b/test/files/neg/t8322.scala new file mode 100644 index 000000000000..52d00a79f702 --- /dev/null +++ b/test/files/neg/t8322.scala @@ -0,0 +1,20 @@ + +trait W[A] +class Writes[-A] + +object Writes { + def apply[A](f: A => String) = new Writes[A]() +} + +trait F { + + implicit def jw[A](implicit wa: Writes[A], wj: W[Any]): W[A] + + implicit val ew: Writes[Any] + + implicit def rw[E] = Writes[Seq[E]] { _ => "" } + + implicit def wr[E] = jw(implicitly, implicitly) + + Right(0).right.flatMap(_ => new String()) +} diff --git a/test/files/neg/t8323.check b/test/files/neg/t8323.check new file mode 100644 index 000000000000..48aa28ce79a7 --- /dev/null +++ b/test/files/neg/t8323.check @@ -0,0 +1,7 @@ +t8323.scala:5: error: double definition: +def f(x: "Bippy"): String at line 4 and +def f(x: "Dingo"): String at line 5 +have same type after erasure: (x: String): String + def f(x: b.type) = x + ^ +1 error diff --git a/test/files/neg/t8323.scala b/test/files/neg/t8323.scala new file mode 100644 index 000000000000..5dc58ba71542 --- /dev/null +++ b/test/files/neg/t8323.scala @@ -0,0 +1,7 @@ +object Test { + final val a = "Bippy" + final val b = "Dingo" + def f(x: a.type) = x + def f(x: b.type) = x + def main(args: Array[String]): Unit = () +} diff --git a/test/files/neg/t8325-b.check b/test/files/neg/t8325-b.check index ec80826dc084..a0955ca7ef2d 100644 --- a/test/files/neg/t8325-b.check +++ b/test/files/neg/t8325-b.check @@ -7,4 +7,4 @@ t8325-b.scala:3: error: ';' expected but '=' found. t8325-b.scala:4: error: eof expected but '}' found. } ^ -three errors found +3 errors diff --git a/test/files/neg/t8325-c.check b/test/files/neg/t8325-c.check index 51ea4988a649..b9c4978349f6 100644 --- a/test/files/neg/t8325-c.check +++ b/test/files/neg/t8325-c.check @@ -4,4 +4,4 @@ t8325-c.scala:3: error: identifier expected but ')' found. t8325-c.scala:4: error: ')' expected but '}' found. } ^ -two errors found +2 errors diff --git a/test/files/neg/t8325.check b/test/files/neg/t8325.check index 175a0db41511..263669c3a826 100644 --- a/test/files/neg/t8325.check +++ b/test/files/neg/t8325.check @@ -9,7 +9,7 @@ t8325.scala:10: error: type mismatch; required: Int* def j(is: Int* = 5) = ??? ^ -t8325.scala:10: error: a parameter section with a `*'-parameter is not allowed to have default arguments +t8325.scala:10: error: a parameter section with a `*`-parameter is not allowed to have default arguments def j(is: Int* = 5) = ??? ^ -four errors found +4 errors diff --git a/test/files/neg/t8341.check b/test/files/neg/t8341.check new file mode 100644 index 000000000000..e3be7363c22e --- /dev/null +++ b/test/files/neg/t8341.check @@ -0,0 +1,27 @@ +t8341.scala:24: error: type mismatch; + found : CanBuildFrom[Invariant[Nothing]] + required: CanBuildFrom[Invariant[G]] + s.combined // fail + ^ +t8341.scala:24: error: value combined is not a member of Invariant[Nothing] + s.combined // fail + ^ +t8341.scala:35: error: type mismatch; + found : CanBuildFrom[Invariant[Nothing]] + required: CanBuildFrom[Invariant[G]] + s.combined // was okay! + ^ +t8341.scala:35: error: value combined is not a member of Invariant[Nothing] + s.combined // was okay! + ^ +t8341.scala:45: error: type mismatch; + found : CanBuildFrom[Invariant[Nothing]] + required: CanBuildFrom[Invariant[G]] + convert2(s).combined + ^ +t8341.scala:48: error: type mismatch; + found : CanBuildFrom[Invariant[Nothing]] + required: CanBuildFrom[Invariant[G]] + {val c1 = convert2(s); c1.combined} + ^ +6 errors diff --git a/test/files/neg/t8431.scala b/test/files/neg/t8341.scala similarity index 100% rename from test/files/neg/t8431.scala rename to test/files/neg/t8341.scala diff --git a/test/files/neg/t8344.check b/test/files/neg/t8344.check new file mode 100644 index 000000000000..e779298cf9b4 --- /dev/null +++ b/test/files/neg/t8344.check @@ -0,0 +1,7 @@ +t8344.scala:7: error: ambiguous reference to overloaded definition, +both method f in object t of type (x: String*): Int +and method f in object t of type (x: Object): Int +match argument types (String) + t.f("") + ^ +1 error diff --git a/test/files/neg/t8344.scala b/test/files/neg/t8344.scala new file mode 100644 index 000000000000..467e678899d0 --- /dev/null +++ b/test/files/neg/t8344.scala @@ -0,0 +1,8 @@ +object t { + def f(x: Object) = 1 + def f(x: String*) = 2 +} + +class Test { + t.f("") +} diff --git a/test/files/neg/t835.check b/test/files/neg/t835.check index 6ad18d30284f..52945997ed06 100644 --- a/test/files/neg/t835.check +++ b/test/files/neg/t835.check @@ -1,9 +1,10 @@ -t835.scala:2: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) +t835.scala:2: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the single repeated parameter Int*; +sequence argument must be the last argument Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*)) ^ -t835.scala:2: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) +t835.scala:2: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the single repeated parameter Int* Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*)) ^ -two errors found +2 errors diff --git a/test/files/neg/t836.check b/test/files/neg/t836.check index cf2faf926fd8..228bc9983606 100644 --- a/test/files/neg/t836.check +++ b/test/files/neg/t836.check @@ -4,4 +4,4 @@ t836.scala:9: error: type mismatch; (which expands to) A.this.MyObj#S val some: S = any // compiles => type X is set to scala.Any ^ -one error found +1 error diff --git a/test/files/neg/t8372.check b/test/files/neg/t8372.check index 6a6424a834b0..b89cf288c52d 100644 --- a/test/files/neg/t8372.check +++ b/test/files/neg/t8372.check @@ -1,7 +1,7 @@ -t8372.scala:7: error: No ClassTag available for T1 +t8372.scala:7: error: No ClassTag available for A1 def unzip[T1, T2](a: Array[(T1, T2)]) = a.unzip ^ t8372.scala:9: error: No ClassTag available for T1 def unzip3[T1, T2, T3](a: Array[(T1, T2, T3)]): (Array[T1], Array[T2], Array[T3]) = a.unzip3 ^ -two errors found +2 errors diff --git a/test/files/neg/t8376.check b/test/files/neg/t8376.check index 22ed942d51a3..8382a697ba05 100644 --- a/test/files/neg/t8376.check +++ b/test/files/neg/t8376.check @@ -1,7 +1,7 @@ -S.scala:2: error: overloaded method value m with alternatives: +S.scala:2: error: overloaded method m with alternatives: (a: J*)Unit (a: String*)Unit cannot be applied to (Int) J.m(0) ^ -one error found +1 error diff --git a/test/files/neg/t8417.check b/test/files/neg/t8417.check index 5d8ac221df86..8589c15dd441 100644 --- a/test/files/neg/t8417.check +++ b/test/files/neg/t8417.check @@ -1,15 +1,15 @@ -t8417.scala:6: warning: Adapting argument list by creating a 2-tuple: this may not be what you want. +t8417.scala:7: warning: adapted the argument list to the expected 2-tuple: add additional parens instead signature: T.f(x: Any)(y: Any): String given arguments: "hello", "world" - after adaptation: T.f(("hello", "world"): (String, String)) + after adaptation: T.f(("hello", "world"): (String, String)) [quickfixable] def g = f("hello", "world")("holy", "moly") ^ -t8417.scala:6: warning: Adapting argument list by creating a 2-tuple: this may not be what you want. +t8417.scala:7: warning: adapted the argument list to the expected 2-tuple: add additional parens instead signature: T.f(x: Any)(y: Any): String given arguments: "holy", "moly" - after adaptation: T.f(("holy", "moly"): (String, String)) + after adaptation: T.f(("holy", "moly"): (String, String)) [quickfixable] def g = f("hello", "world")("holy", "moly") ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t8417.scala b/test/files/neg/t8417.scala index 2a8b8fede2c8..8f37043ce886 100644 --- a/test/files/neg/t8417.scala +++ b/test/files/neg/t8417.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-adapted-args +//> using options -Xfatal-warnings -Xlint:adapted-args +// trait T { diff --git a/test/files/neg/t8430.check b/test/files/neg/t8430.check index 59fbf3ebd4ca..715a5e6c2f1e 100644 --- a/test/files/neg/t8430.check +++ b/test/files/neg/t8430.check @@ -1,27 +1,27 @@ -t8430.scala:16: warning: match may not be exhaustive. -It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ t8430.scala:17: warning: match may not be exhaustive. It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ + val f0 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ t8430.scala:18: warning: match may not be exhaustive. It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ + val f1 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ t8430.scala:19: warning: match may not be exhaustive. It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ + val f2 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ t8430.scala:20: warning: match may not be exhaustive. It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ + val f3 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ t8430.scala:21: warning: match may not be exhaustive. It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found + val f4 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ +t8430.scala:22: warning: match may not be exhaustive. +It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP + val f5 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t8430.scala b/test/files/neg/t8430.scala index 6b91f07822cd..972aeea3fd99 100644 --- a/test/files/neg/t8430.scala +++ b/test/files/neg/t8430.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ypatmat-exhaust-depth off +//> using options -Xfatal-warnings -Ypatmat-exhaust-depth off +// sealed trait CL3Literal case object IntLit extends CL3Literal case object CharLit extends CL3Literal @@ -13,12 +14,12 @@ case object LetC extends Tree case object LetF extends Tree object Test { - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - (tree: Tree) => tree match {case LetL(CharLit) => ??? } - (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f0 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f1 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f2 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f3 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f4 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } + val f5 = (tree: Tree) => tree match {case LetL(CharLit) => ??? } // After the first patch for scala/bug#8430, we achieve stability: all of // these get the same warning: // diff --git a/test/files/neg/t8431.check b/test/files/neg/t8431.check deleted file mode 100644 index 75351a8ae7ec..000000000000 --- a/test/files/neg/t8431.check +++ /dev/null @@ -1,27 +0,0 @@ -t8431.scala:24: error: type mismatch; - found : CanBuildFrom[Invariant[Nothing]] - required: CanBuildFrom[Invariant[G]] - s.combined // fail - ^ -t8431.scala:24: error: value combined is not a member of Invariant[Nothing] - s.combined // fail - ^ -t8431.scala:35: error: type mismatch; - found : CanBuildFrom[Invariant[Nothing]] - required: CanBuildFrom[Invariant[G]] - s.combined // was okay! - ^ -t8431.scala:35: error: value combined is not a member of Invariant[Nothing] - s.combined // was okay! - ^ -t8431.scala:45: error: type mismatch; - found : CanBuildFrom[Invariant[Nothing]] - required: CanBuildFrom[Invariant[G]] - convert2(s).combined - ^ -t8431.scala:48: error: type mismatch; - found : CanBuildFrom[Invariant[Nothing]] - required: CanBuildFrom[Invariant[G]] - {val c1 = convert2(s); c1.combined} - ^ -6 errors found diff --git a/test/files/neg/t8450.check b/test/files/neg/t8450.check index c2bbf219552a..9cef0dce70b1 100644 --- a/test/files/neg/t8450.check +++ b/test/files/neg/t8450.check @@ -1,6 +1,6 @@ -t8450.scala:6: warning: implicit numeric widening - def elapsed: Foo = (System.nanoTime - 100L).foo - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +t8450.scala:7: warning: implicit numeric widening + def elapsed: Foo = (System.nanoTime.toInt - 100).foo + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t8450.scala b/test/files/neg/t8450.scala index 4ae709dbca40..55f60ecd61d7 100644 --- a/test/files/neg/t8450.scala +++ b/test/files/neg/t8450.scala @@ -1,13 +1,14 @@ -// scalac: -Ywarn-numeric-widen -Xfatal-warnings +//> using options -Ywarn-numeric-widen -Xfatal-warnings +// trait Foo class WarnWidening { implicit class FooDouble(d: Double) { def foo = new Foo {} } - def elapsed: Foo = (System.nanoTime - 100L).foo + def elapsed: Foo = (System.nanoTime.toInt - 100).foo } class NoWarnWidening { - implicit class FooLong(l: Long) { def foo = new Foo {} } + implicit class FooInt(i: Int) { def foo = new Foo {} } implicit class FooDouble(d: Double) { def foo = new Foo {} } - def elapsed: Foo = (System.nanoTime - 100L).foo + def elapsed: Foo = (System.nanoTime.toInt - 100).foo } diff --git a/test/files/neg/t846.check b/test/files/neg/t846.check index 242a8001ff3c..c31b9c1d9144 100644 --- a/test/files/neg/t846.check +++ b/test/files/neg/t846.check @@ -3,4 +3,4 @@ t846.scala:9: error: type mismatch; required: B if (a != null) f(a) else null ^ -one error found +1 error diff --git a/test/files/neg/t8463.check b/test/files/neg/t8463.check index 9aaacf83917a..572a460728ed 100644 --- a/test/files/neg/t8463.check +++ b/test/files/neg/t8463.check @@ -1,16 +1,16 @@ t8463.scala:5: error: type mismatch; - found : Long + found : 5L required: ?T[Long] Note that implicit conversions are not applicable because they are ambiguous: - both method longWrapper in class LowPriorityImplicits of type (x: Long)scala.runtime.RichLong - and method ArrowAssoc in object Predef of type [A](self: A)ArrowAssoc[A] - are possible conversion functions from Long to ?T[Long] + both method longWrapper in class LowPriorityImplicits of type (x: Long): scala.runtime.RichLong + and method ArrowAssoc in object Predef of type [A](self: A): ArrowAssoc[A] + are possible conversion functions from 5L to ?T[Long] insertCell(Foo(5)) ^ -t8463.scala:5: error: no type parameters for method apply: (activity: T[Long])Test.Foo[T] in object Foo exist so that it can be applied to arguments (Long) +t8463.scala:5: error: no type parameters for method apply: (activity: T[Long]): Test.Foo[T] in object Foo exist so that it can be applied to arguments (Long) --- because --- argument expression's type is not compatible with formal parameter type; - found : Long + found : 5L required: ?T[Long] insertCell(Foo(5)) ^ @@ -19,9 +19,4 @@ t8463.scala:5: error: type mismatch; required: T[Long] insertCell(Foo(5)) ^ -t8463.scala:5: error: type mismatch; - found : Test.Foo[T] - required: Test.Foo[Test.Cell] - insertCell(Foo(5)) - ^ -four errors found +3 errors diff --git a/test/files/neg/t8511.check b/test/files/neg/t8511.check new file mode 100644 index 000000000000..1b9d60455acb --- /dev/null +++ b/test/files/neg/t8511.check @@ -0,0 +1,11 @@ +t8511.scala:28: warning: unreachable code + ??? + ^ +t8511.scala:23: warning: match may not be exhaustive. +It would fail on the following inputs: Bar(_), Baz(), EatsExhaustiveWarning(_) + private def logic(head: Expr): String = head match { + ^ +warning: 1 deprecation (since 2.13.0); re-run with -deprecation for details +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t8511.scala b/test/files/neg/t8511.scala new file mode 100644 index 000000000000..9be381769f18 --- /dev/null +++ b/test/files/neg/t8511.scala @@ -0,0 +1,30 @@ +//> using options -Werror +sealed trait Expr +final case class Foo(other: Option[String]) extends Expr +final case class Bar(someConstant: String) extends Expr +final case class Baz() extends Expr +final case class EatsExhaustiveWarning(other: Reference) extends Expr + +sealed trait Reference { + val value: String +} + +object Reference { + def unapply(reference: Reference): Option[(String)] = { + Some(reference.value) + } +} + +object EntryPoint { + def main(args: Array[String]) { + println("Successfully ran") + } + + private def logic(head: Expr): String = head match { + case Foo(_) => + ??? + // Commenting this line only causes the exhaustive search warning to be emitted + case EatsExhaustiveWarning(Reference(text)) => + ??? + } +} diff --git a/test/files/neg/t8525.check b/test/files/neg/t8525.check index 028fed915710..cb9756115bf5 100644 --- a/test/files/neg/t8525.check +++ b/test/files/neg/t8525.check @@ -1,15 +1,15 @@ -t8525.scala:8: warning: Adapting argument list by creating a 2-tuple: this may not be what you want. +t8525.scala:9: warning: adapted the argument list to the expected 2-tuple: add additional parens instead signature: X.f(p: (Int, Int)): Int given arguments: 3, 4 - after adaptation: X.f((3, 4): (Int, Int)) + after adaptation: X.f((3, 4): (Int, Int)) [quickfixable] def g = f(3, 4) // adapted ^ -t8525.scala:10: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X - you may want to give them distinct names. +t8525.scala:11: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X; you may want to give them distinct names. override def toString = name // shadowing mutable var name ^ -t8525.scala:9: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead +t8525.scala:10: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead [quickfixable] def u: Unit = () // unitarian universalist ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t8525.scala b/test/files/neg/t8525.scala index e1ff7ba981fd..96d5e3a51b45 100644 --- a/test/files/neg/t8525.scala +++ b/test/files/neg/t8525.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xlint:-missing-interpolator -Xlint +//> using options -Xfatal-warnings -Xlint:-missing-interpolator -Xlint +// class Named(var name: String) diff --git a/test/files/neg/t8534.check b/test/files/neg/t8534.check index 297e7c1bebba..32d6431b3560 100644 --- a/test/files/neg/t8534.check +++ b/test/files/neg/t8534.check @@ -1,4 +1,4 @@ t8534.scala:6: error: MyTrait is not an enclosing class class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false} ^ -one error found +1 error diff --git a/test/files/neg/t8534b.check b/test/files/neg/t8534b.check index 39ffa4119406..31707c92d5fb 100644 --- a/test/files/neg/t8534b.check +++ b/test/files/neg/t8534b.check @@ -1,4 +1,4 @@ t8534b.scala:3: error: stable identifier required, but foo.type found. type T = foo.type#Foo ^ -one error found +1 error diff --git a/test/files/neg/t856.check b/test/files/neg/t856.check index fb93f96d9f30..fa4e6577e6e5 100644 --- a/test/files/neg/t856.check +++ b/test/files/neg/t856.check @@ -1,14 +1,11 @@ -t856.scala:3: error: class ComplexRect needs to be abstract, since: -it has 2 unimplemented members. -/** As seen from class ComplexRect, the missing signatures are as follows. - * For convenience, these are usable as stub implementations. - */ +t856.scala:3: error: class ComplexRect needs to be abstract. +Missing implementations for 2 members. // Members declared in scala.Equals def canEqual(that: Any): Boolean = ??? // Members declared in scala.Product2 - def _2: Double = ??? + def _2: Double = ??? // implements `def _2: T2` class ComplexRect(val _1:Double, _2:Double) extends Complex { ^ -one error found +1 error diff --git a/test/files/neg/t8597.check b/test/files/neg/t8597.check index b7b022368da5..17a22db515f6 100644 --- a/test/files/neg/t8597.check +++ b/test/files/neg/t8597.check @@ -1,21 +1,21 @@ -t8597.scala:3: warning: abstract type T in type pattern Some[T] is unchecked since it is eliminated by erasure +t8597.scala:4: warning: abstract type T in type pattern Some[T] is unchecked since it is eliminated by erasure def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to scala/bug#8597) ^ -t8597.scala:6: warning: abstract type pattern T is unchecked since it is eliminated by erasure +t8597.scala:7: warning: abstract type pattern T is unchecked since it is eliminated by erasure def warn1[T] = (null: Any) match { case _: T => } // warn ^ -t8597.scala:7: warning: non-variable type argument String in type pattern Some[String] is unchecked since it is eliminated by erasure +t8597.scala:8: warning: non-variable type argument String in type pattern Some[String] is unchecked since it is eliminated by erasure def warn2 = (null: Any) match { case _: Some[String] => } // warn ^ -t8597.scala:8: warning: non-variable type argument Unchecked.this.C in type pattern Some[Unchecked.this.C] is unchecked since it is eliminated by erasure +t8597.scala:9: warning: non-variable type argument Unchecked.this.C in type pattern Some[Unchecked.this.C] is unchecked since it is eliminated by erasure (null: Any) match { case _: Some[C] => } // warn ^ -t8597.scala:19: warning: abstract type T in type pattern Array[T] is unchecked since it is eliminated by erasure +t8597.scala:20: warning: abstract type T in type pattern Array[T] is unchecked since it is eliminated by erasure def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to scala/bug#8597) ^ -t8597.scala:27: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure +t8597.scala:28: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/t8597.scala b/test/files/neg/t8597.scala index 0acd0211221c..815032539721 100644 --- a/test/files/neg/t8597.scala +++ b/test/files/neg/t8597.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class Unchecked[C] { def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to scala/bug#8597) @@ -8,7 +9,7 @@ class Unchecked[C] { (null: Any) match { case _: Some[C] => } // warn // These must remain without warnings. These are excerpts from - // related tests that are more exhauative. + // related tests that are more exhaustive. class C; class D extends C def okay = (List(new D) : Seq[D]) match { case _: List[C] => case _ => } // nowarn class B2[A, B] diff --git a/test/files/neg/t8597b.check b/test/files/neg/t8597b.check index 88fddcd23340..2a587a1db1de 100644 --- a/test/files/neg/t8597b.check +++ b/test/files/neg/t8597b.check @@ -1,6 +1,6 @@ -t8597b.scala:19: warning: non-variable type argument T in type pattern Some[T] is unchecked since it is eliminated by erasure +t8597b.scala:20: warning: non-variable type argument T in type pattern Some[T] is unchecked since it is eliminated by erasure case _: Some[T] => // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala index d2f5a937ab31..4a4f38d79d39 100644 --- a/test/files/neg/t8597b.scala +++ b/test/files/neg/t8597b.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Unchecked { (null: Any) match { case _: Some[t] => diff --git a/test/files/neg/t8610-arg.check b/test/files/neg/t8610-arg.check index 3f35c1f674a6..50ae41a3389e 100644 --- a/test/files/neg/t8610-arg.check +++ b/test/files/neg/t8610-arg.check @@ -1,6 +1,6 @@ -t8610-arg.scala:9: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead +t8610-arg.scala:10: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead [quickfixable] def u: Unit = () // unitarian universalist ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t8610-arg.scala b/test/files/neg/t8610-arg.scala index 2e1ad7669eac..380796019999 100644 --- a/test/files/neg/t8610-arg.scala +++ b/test/files/neg/t8610-arg.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xlint nullary-unit +//> using options -Xfatal-warnings -Xlint nullary-unit +// class Named(var name: String) diff --git a/test/files/neg/t8610.check b/test/files/neg/t8610.check index 2166515222f0..92aeda9690a1 100644 --- a/test/files/neg/t8610.check +++ b/test/files/neg/t8610.check @@ -1,18 +1,18 @@ -t8610.scala:6: warning: possible missing interpolator: detected interpolated identifier `$name` +t8610.scala:7: warning: possible missing interpolator: detected interpolated identifier `$name` def x = "Hi, $name" // missing interp ^ -t8610.scala:8: warning: Adapting argument list by creating a 2-tuple: this may not be what you want. +t8610.scala:9: warning: adapted the argument list to the expected 2-tuple: add additional parens instead signature: X.f(p: (Int, Int)): Int given arguments: 3, 4 - after adaptation: X.f((3, 4): (Int, Int)) + after adaptation: X.f((3, 4): (Int, Int)) [quickfixable] def g = f(3, 4) // adapted ^ -t8610.scala:10: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X - you may want to give them distinct names. +t8610.scala:11: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X; you may want to give them distinct names. override def toString = name // shadowing mutable var name ^ -t8610.scala:9: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead +t8610.scala:10: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead [quickfixable] def u: Unit = () // unitarian universalist ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t8610.scala b/test/files/neg/t8610.scala index c4b8d4b9c987..62443d2af273 100644 --- a/test/files/neg/t8610.scala +++ b/test/files/neg/t8610.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xlint +//> using options -Xfatal-warnings -Xlint +// class Named(var name: String) diff --git a/test/files/neg/t8630.check b/test/files/neg/t8630.check index 98b084b15333..c68f8212b72e 100644 --- a/test/files/neg/t8630.check +++ b/test/files/neg/t8630.check @@ -4,4 +4,4 @@ package bobsdelights abstract class Fruit( val name: String, val color: Strin t8630.scala:1: error: '}' expected but eof found. package bobsdelights abstract class Fruit( val name: String, val color: String ) object Fruits { object Apple extends Fruit("apple", "red") object Orange extends Fruit("orange", "orange") object Pear extends Fruit("pear", "yellowish") val menu = List(Apple, Orange, Pear) } ^ -two errors found +2 errors diff --git a/test/files/neg/t8650.check b/test/files/neg/t8650.check new file mode 100644 index 000000000000..45fc60f8b192 --- /dev/null +++ b/test/files/neg/t8650.check @@ -0,0 +1,9 @@ +t8650.scala:19: warning: method s in class C is deprecated (since MyLib 17): hello world + def t = s + ^ +t8650.scala:21: warning: method f in class C is deprecated (since MyLib 17): hello world + def g = f + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t8650.scala b/test/files/neg/t8650.scala new file mode 100644 index 000000000000..98b33b116ab3 --- /dev/null +++ b/test/files/neg/t8650.scala @@ -0,0 +1,22 @@ + +//> using options -Werror -Xlint + +object Strings { + final val greeting = "hello" + final val lib = "MyLib" + final val version = "17" +} + +class C { + import Strings._ + + @deprecated(s"$greeting world", since=s"$lib $version") + def s = 42 + + @deprecated(f"$greeting world", since=f"$lib $version") + def f = 42 + + def t = s + + def g = f +} diff --git a/test/files/neg/t8650b.check b/test/files/neg/t8650b.check new file mode 100644 index 000000000000..7cf657076d14 --- /dev/null +++ b/test/files/neg/t8650b.check @@ -0,0 +1,7 @@ +t8650b.scala:13: error: annotation argument needs to be a constant; found: "".+("MyLib").+(" ").+(17) + @deprecated(s"$greeting world", since=s"$lib $version") + ^ +t8650b.scala:16: error: annotation argument needs to be a constant; found: "".+("MyLib").+(" ").+(17) + @deprecated(f"$greeting world", since=f"$lib $version") + ^ +2 errors diff --git a/test/files/neg/t8650b.scala b/test/files/neg/t8650b.scala new file mode 100644 index 000000000000..217b0fd1845b --- /dev/null +++ b/test/files/neg/t8650b.scala @@ -0,0 +1,22 @@ + +//> using options -Werror -Xlint + +object Strings { + final val greeting = "hello" + final val lib = "MyLib" + final val version = 17 +} + +class C { + import Strings._ + + @deprecated(s"$greeting world", since=s"$lib $version") + def s = 42 + + @deprecated(f"$greeting world", since=f"$lib $version") + def f = 42 + + def t = s + + def g = f +} diff --git a/test/files/neg/t8667.check b/test/files/neg/t8667.check index 82451ee5d6ad..c9852dd91c70 100644 --- a/test/files/neg/t8667.check +++ b/test/files/neg/t8667.check @@ -1,91 +1,212 @@ -t8667.scala:6: error: too many arguments (3) for constructor C: (a: Int, b: Int)C -Note that 'c' is not a parameter name of the invoked method. +t8667.scala:6: error: unknown parameter name: c def c2 = new C(a = 42, b = 17, c = 5) ^ +t8667.scala:6: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C + def c2 = new C(a = 42, b = 17, c = 5) + ^ t8667.scala:7: error: unknown parameter name: c def c3 = new C(b = 42, a = 17, c = 5) ^ -t8667.scala:7: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +t8667.scala:7: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C def c3 = new C(b = 42, a = 17, c = 5) ^ t8667.scala:8: error: positional after named argument. def c4 = new C(b = 42, a = 17, 5) ^ -t8667.scala:8: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +t8667.scala:8: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C def c4 = new C(b = 42, a = 17, 5) ^ -t8667.scala:9: error: not found: value c +t8667.scala:9: error: unknown parameter name: c def c5 = new C(a = 42, c = 17) - ^ -t8667.scala:10: error: parameter 'b' is already specified at parameter position 2 -Note that 'c' is not a parameter name of the invoked method. + ^ +t8667.scala:10: error: unknown parameter name: c def c6 = new C(a = 42, c = 17, b = 5) - ^ -t8667.scala:10: error: too many arguments (3) for constructor C: (a: Int, b: Int)C -Note that 'c' is not a parameter name of the invoked method. + ^ +t8667.scala:10: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C def c6 = new C(a = 42, c = 17, b = 5) ^ -t8667.scala:11: error: parameter 'a' is already specified at parameter position 1 -Note that 'c' is not a parameter name of the invoked method. +t8667.scala:11: error: unknown parameter name: c def c7 = new C(c = 42, a = 17, b = 5) - ^ -t8667.scala:11: error: too many arguments (3) for constructor C: (a: Int, b: Int)C -Note that 'c' is not a parameter name of the invoked method. + ^ +t8667.scala:11: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C def c7 = new C(c = 42, a = 17, b = 5) ^ t8667.scala:12: error: parameter 'b' is already specified at parameter position 2 def c8 = new C(42, 17, b = 5) ^ -t8667.scala:12: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +t8667.scala:12: error: too many arguments (found 3, expected 2) for constructor C: (a: Int, b: Int): C def c8 = new C(42, 17, b = 5) ^ -t8667.scala:13: error: parameter 'b' is already specified at parameter position 2 -Note that 'c' is not a parameter name of the invoked method. +t8667.scala:13: error: unknown parameter name: c def c9 = new C(a = 42, c = 17, d = 3, b = 5) - ^ -t8667.scala:13: error: too many arguments (4) for constructor C: (a: Int, b: Int)C -Note that 'c', 'd' are not parameter names of the invoked method. + ^ +t8667.scala:13: error: unknown parameter name: d def c9 = new C(a = 42, c = 17, d = 3, b = 5) ^ -t8667.scala:14: error: too many arguments (4) for constructor C: (a: Int, b: Int)C -Note that 'd', 'c' are not parameter names of the invoked method. +t8667.scala:13: error: too many arguments (found 4, expected 2) for constructor C: (a: Int, b: Int): C + def c9 = new C(a = 42, c = 17, d = 3, b = 5) + ^ +t8667.scala:14: error: unknown parameter name: d def c0 = new C(42, 17, d = 3, c = 5) ^ -t8667.scala:25: error: no arguments allowed for nullary method f0: ()Int +t8667.scala:14: error: unknown parameter name: c + def c0 = new C(42, 17, d = 3, c = 5) + ^ +t8667.scala:14: error: too many arguments (found 4, expected 2) for constructor C: (a: Int, b: Int): C + def c0 = new C(42, 17, d = 3, c = 5) + ^ +t8667.scala:31: error: no arguments allowed for nullary method f0: (): Int f0(1) ^ -t8667.scala:26: error: too many arguments (2) for method f1: (i: Int)Int +t8667.scala:32: error: too many arguments (found 2, expected 1) for method f1: (i: Int): Int f1(1, 2) ^ -t8667.scala:27: error: too many arguments (3) for method f1: (i: Int)Int +t8667.scala:33: error: too many arguments (found 3, expected 1) for method f1: (i: Int): Int f1(1, 2, 3) ^ -t8667.scala:28: error: 3 more arguments than can be applied to method f1: (i: Int)Int +t8667.scala:34: error: too many arguments (found 4, expected 1) for method f1: (i: Int): Int f1(1, 2, 3, 4) ^ -t8667.scala:29: error: 3 more arguments than can be applied to method f1: (i: Int)Int -Note that 'j' is not a parameter name of the invoked method. +t8667.scala:35: error: unknown parameter name: j f1(1, j = 2, 3, 4) ^ -t8667.scala:30: error: 3 more arguments than can be applied to method f1: (i: Int)Int -Note that 'j', 'k' are not parameter names of the invoked method. +t8667.scala:35: error: too many arguments (found 4, expected 1) for method f1: (i: Int): Int + f1(1, j = 2, 3, 4) + ^ +t8667.scala:36: error: unknown parameter name: j f1(1, j = 2, k = 3, 4) ^ -t8667.scala:31: error: parameter 'i' is already specified at parameter position 1 -Note that 'k' is not a parameter name of the invoked method. +t8667.scala:36: error: unknown parameter name: k + f1(1, j = 2, k = 3, 4) + ^ +t8667.scala:36: error: too many arguments (found 4, expected 1) for method f1: (i: Int): Int + f1(1, j = 2, k = 3, 4) + ^ +t8667.scala:37: error: unknown parameter name: k f2(k = 1, i = 2, j = 3) - ^ -t8667.scala:31: error: too many arguments (3) for method f2: (i: Int, j: Int)Int -Note that 'k' is not a parameter name of the invoked method. + ^ +t8667.scala:37: error: too many arguments (found 3, expected 2) for method f2: (i: Int, j: Int): Int f2(k = 1, i = 2, j = 3) ^ -t8667.scala:32: error: one more argument than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int +t8667.scala:38: error: too many arguments (found 7, expected 6) for method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int): Int f6(1, 2, 3, 4, 5, 6, 7) ^ -t8667.scala:33: error: 2 more arguments than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int +t8667.scala:39: error: too many arguments (found 8, expected 6) for method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int): Int f6(1, 2, 3, 4, 5, 6, 7, 8) ^ -t8667.scala:34: error: 15 arguments but expected 12 for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int)Int +t8667.scala:40: error: too many arguments (found 15, expected 12) for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int): Int f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) ^ -26 errors found +t8667.scala:46: error: too many arguments (found 22, expected 1) for method mini: (x: A): Int + mini(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + ^ +t8667.scala:48: error: too many arguments (found 24, which exceeds the largest Tuple) for method max: (x: Any): Int + max(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + ^ +t8667.scala:49: error: too many arguments (found 24, which exceeds the largest Tuple) for method maxi: (x: A): Int + maxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + ^ +t8667.scala:50: error: too many arguments (found 24, which exceeds the largest Tuple) for method moxi: (x: AnyRef): Int + moxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + ^ +t8667.scala:51: error: too many arguments (found 24, which exceeds the largest Tuple) for method movi: (x: Object): Int + movi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + ^ +t8667.scala:52: error: too many arguments (found 24, expected 1) for method mini: (x: A): Int + mini(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + ^ +t8667.scala:64: error: too many arguments (found 3, expected 2-tuple) for method t: (t: (Int, Int)): Nothing + def z = t(42, 27, 17) + ^ +t8667.scala:70: error: unknown parameter name: c + def x = f(c = "hello, world") + ^ +t8667.scala:76: error: can't supply unit value with infix notation because nullary method f0: (): Int takes no arguments; use dotted invocation instead: x.f0() + x f0 () + ^ +t8667.scala:77: error: no arguments allowed for nullary method f0: (): Int + x.f0(()) + ^ +t8667.scala:89: error: unknown parameter name: k + f2(k = 42) + ^ +t8667.scala:89: error: not enough arguments for method f2: (i: Int, j: Int): Unit. +Unspecified value parameters i, j. + f2(k = 42) + ^ +t8667.scala:90: error: unknown parameter name: k + f2(17, k = 42) + ^ +t8667.scala:91: error: unknown parameter name: k + f2(17, 27, k = 42) + ^ +t8667.scala:91: error: too many arguments (found 3, expected 2) for method f2: (i: Int, j: Int): Unit + f2(17, 27, k = 42) + ^ +t8667.scala:98: error: type mismatch; + found : String("not a num 1") + required: Double + def close() = almostTupleAdapted(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:98: error: type mismatch; + found : String("not a num 2") + required: Double + def close() = almostTupleAdapted(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:98: error: too many arguments (found 2, expected 2-tuple) for method almostTupleAdapted: (t2: (Int, String)): Unit + def close() = almostTupleAdapted(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:102: error: type mismatch; + found : String("not a num") + required: Double + def missed() = missingArgs(math.floor("not a num")) + ^ +t8667.scala:102: error: not enough arguments for method missingArgs: (d: Double, s: String): Unit. +Unspecified value parameter s. + def missed() = missingArgs(math.floor("not a num")) + ^ +t8667.scala:106: error: type mismatch; + found : String("not a num") + required: Double + def miscount() = tooManyArgs(math.floor("not a num")) + ^ +t8667.scala:106: error: not enough arguments for method tooManyArgs: (s: String, i: Int): Unit. +Unspecified value parameter i. + def miscount() = tooManyArgs(math.floor("not a num")) + ^ +t8667.scala:108: error: not found: value doesntExist + def nonesuch(): Unit = doesntExist(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:108: error: type mismatch; + found : String("not a num 1") + required: Double + def nonesuch(): Unit = doesntExist(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:108: error: type mismatch; + found : String("not a num 2") + required: Double + def nonesuch(): Unit = doesntExist(math.floor("not a num 1"), math.floor("not a num 2")) + ^ +t8667.scala:110: error: not found: value doesntExist + def nonesuchical: Unit = doesntExist(i = math.floor("not a num 1"), j = math.floor("not a num 2")) + ^ +t8667.scala:110: error: type mismatch; + found : String("not a num 1") + required: Double + def nonesuchical: Unit = doesntExist(i = math.floor("not a num 1"), j = math.floor("not a num 2")) + ^ +t8667.scala:110: error: type mismatch; + found : String("not a num 2") + required: Double + def nonesuchical: Unit = doesntExist(i = math.floor("not a num 1"), j = math.floor("not a num 2")) + ^ +t8667.scala:112: error: value munge is not a member of List[Int] + def badApplied: Unit = List(42).munge(x = 27) + ^ +t8667.scala:114: error: value munge is not a member of List[Int] + def badApply: Unit = List(42).munge { x = 27 } + ^ +t8667.scala:114: error: not found: value x + def badApply: Unit = List(42).munge { x = 27 } + ^ +64 errors diff --git a/test/files/neg/t8667.scala b/test/files/neg/t8667.scala index d55582ca6b8f..d65dc99589f1 100644 --- a/test/files/neg/t8667.scala +++ b/test/files/neg/t8667.scala @@ -15,11 +15,17 @@ trait T { } trait X { + trait Upper def f0() = 42 def f1(i: Int) = 42 def f2(i: Int, j: Int) = 42 def f6(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int) = 42 def f12(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int) = 42 + def max(x: Any) = 42 + def maxi[A](x: A) = 42 + def mini[A <: Upper](x: A) = 42 + def moxi(x: AnyRef) = 42 + def movi(x: Object) = 42 def g() = { f0(1) @@ -32,6 +38,78 @@ trait X { f6(1, 2, 3, 4, 5, 6, 7) f6(1, 2, 3, 4, 5, 6, 7, 8) f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) + // confirm auto-tupling + max(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + maxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + moxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + movi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + mini(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) + // clarify auto-tupling failure + max(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + maxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + moxi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + movi(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) + mini(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) () } } + +trait Y { + def f[A](a: A) = ??? + def g(x: Any) = ??? + def t(t: (Int, Int)) = ??? + + def x = f(42, 27, 17) + def y = g(42, 27, 17) + def z = t(42, 27, 17) +} + +trait Z { + def f(a: Int = 1, b: String = "") = b * a + + def x = f(c = "hello, world") +} + +object app { + def test(): Unit = { + val x = null.asInstanceOf[X] + x f0 () + x.f0(()) + } + def workaround(): Unit = { + import language.postfixOps + val x = null.asInstanceOf[X] + x f0 + } +} + +trait Nuance { + def f2(i: Int, j: Int) = () + def `always report bad named arg`: Unit = { + f2(k = 42) + f2(17, k = 42) + f2(17, 27, k = 42) + } +} + +trait FurtherFailures { + def almostTupleAdapted(t2: (Int, String)): Unit = () + + def close() = almostTupleAdapted(math.floor("not a num 1"), math.floor("not a num 2")) + + def missingArgs(d: Double, s: String): Unit = () + + def missed() = missingArgs(math.floor("not a num")) + + def tooManyArgs(s: String, i: Int): Unit = () + + def miscount() = tooManyArgs(math.floor("not a num")) + + def nonesuch(): Unit = doesntExist(math.floor("not a num 1"), math.floor("not a num 2")) + + def nonesuchical: Unit = doesntExist(i = math.floor("not a num 1"), j = math.floor("not a num 2")) + + def badApplied: Unit = List(42).munge(x = 27) + + def badApply: Unit = List(42).munge { x = 27 } +} diff --git a/test/files/neg/t8675.check b/test/files/neg/t8675.check index 4e44fba91867..bfc4ac3fe330 100644 --- a/test/files/neg/t8675.check +++ b/test/files/neg/t8675.check @@ -8,4 +8,4 @@ t8675.scala:22: error: type mismatch; required: String new X().m(x[A]({new isString(true)})) // !!! allowed ^ -two errors found +2 errors diff --git a/test/files/neg/t8675.scala b/test/files/neg/t8675.scala index ca9bb57ffaa8..fc49dc87584e 100644 --- a/test/files/neg/t8675.scala +++ b/test/files/neg/t8675.scala @@ -8,16 +8,16 @@ class Test { def x[A](a: Any): A = ??? - def test { + def test: Unit = { val a = Array[A]() a.update(0, x[A]({new isString(true)})) // !!! allowed // boils down to class X { - def m(p: Any) {} + def m(p: Any): Unit = {} } implicit class XOps(x: X) { - def m(p: Any) {} + def m(p: Any): Unit = {} } new X().m(x[A]({new isString(true)})) // !!! allowed } diff --git a/test/files/neg/t8675b.check b/test/files/neg/t8675b.check index cb7ac8af59a1..2426ce6356c1 100644 --- a/test/files/neg/t8675b.check +++ b/test/files/neg/t8675b.check @@ -3,4 +3,4 @@ The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: List[Test.Reportable1[?,?]] => Boolean for (path: List[Any] <- (null : Engine1).asRequirement.pathsIncludingSelf.toList) { ^ -one error found +1 error diff --git a/test/files/neg/t8685.check b/test/files/neg/t8685.check index 7c395c5e63e4..db0953d9c243 100644 --- a/test/files/neg/t8685.check +++ b/test/files/neg/t8685.check @@ -1,48 +1,45 @@ -t8685.scala:7: warning: constructor D in class D is deprecated (since now): ctor D is depr -case class D @deprecated("ctor D is depr", since="now") (i: Int) - ^ -t8685.scala:36: warning: class C is deprecated (since now): class C is depr +t8685.scala:37: warning: class C is deprecated (since now): class C is depr def f = C(42) ^ -t8685.scala:37: warning: constructor D in class D is deprecated (since now): ctor D is depr +t8685.scala:38: warning: constructor D in class D is deprecated (since now): ctor D is depr def g = D(42) ^ -t8685.scala:38: warning: object E is deprecated (since now): module E is depr +t8685.scala:39: warning: class E is deprecated (since now): class E is depr def h = E(42) ^ -t8685.scala:38: warning: class E is deprecated (since now): class E is depr +t8685.scala:39: warning: object E is deprecated (since now): module E is depr def h = E(42) ^ -t8685.scala:39: warning: object F is deprecated (since now): module F is depr +t8685.scala:40: warning: object F is deprecated (since now): module F is depr def i = F.G(42) ^ -t8685.scala:40: warning: object F in object Extra is deprecated (since now): Extra module F is depr +t8685.scala:41: warning: object F in object Extra is deprecated (since now): Extra module F is depr def j = Extra.F.G(42) ^ -t8685.scala:44: warning: value gg in trait Applies is deprecated (since now): member gg +t8685.scala:45: warning: value gg in trait Applies is deprecated (since now): member gg def k = this.gg.H(0) ^ -t8685.scala:46: warning: class K in object J is deprecated (since now): Inner K is depr +t8685.scala:47: warning: class K in object J is deprecated (since now): Inner K is depr def l = J.K(42) ^ -t8685.scala:49: warning: class C is deprecated (since now): class C is depr +t8685.scala:50: warning: class C is deprecated (since now): class C is depr def f = new C(42) ^ -t8685.scala:50: warning: constructor D in class D is deprecated (since now): ctor D is depr +t8685.scala:51: warning: constructor D in class D is deprecated (since now): ctor D is depr def g = new D(42) ^ -t8685.scala:51: warning: class E is deprecated (since now): class E is depr +t8685.scala:52: warning: class E is deprecated (since now): class E is depr def h = new E(42) ^ -t8685.scala:52: warning: object F is deprecated (since now): module F is depr +t8685.scala:53: warning: object F is deprecated (since now): module F is depr def i = new F.G(42) ^ -t8685.scala:53: warning: object F in object Extra is deprecated (since now): Extra module F is depr +t8685.scala:54: warning: object F in object Extra is deprecated (since now): Extra module F is depr def j = new Extra.F.G(42) ^ -t8685.scala:54: warning: class K in object J is deprecated (since now): Inner K is depr +t8685.scala:55: warning: class K in object J is deprecated (since now): Inner K is depr def l = new J.K(42) ^ -error: No warnings can be incurred under -Xfatal-warnings. -15 warnings found -one error found +error: No warnings can be incurred under -Werror. +14 warnings +1 error diff --git a/test/files/neg/t8685.scala b/test/files/neg/t8685.scala index 28990d180e64..af9db61ea9c3 100644 --- a/test/files/neg/t8685.scala +++ b/test/files/neg/t8685.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// @deprecated("class C is depr", since="now") diff --git a/test/files/neg/t8700a.check b/test/files/neg/t8700a.check index e5b6314e9cff..5a77148d38fb 100644 --- a/test/files/neg/t8700a.check +++ b/test/files/neg/t8700a.check @@ -6,6 +6,6 @@ Bar.scala:7: warning: match may not be exhaustive. It would fail on the following input: B def bar2(foo: Baz) = foo match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t8700a/Bar.scala b/test/files/neg/t8700a/Bar.scala index 994b49d8ccdf..3055c25042e0 100644 --- a/test/files/neg/t8700a/Bar.scala +++ b/test/files/neg/t8700a/Bar.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Bar { def bar1(foo: Foo) = foo match { case Foo.A => 1 diff --git a/test/files/neg/t8700b.check b/test/files/neg/t8700b.check index cc4e657def2a..8fbbe07bc282 100644 --- a/test/files/neg/t8700b.check +++ b/test/files/neg/t8700b.check @@ -1,11 +1,11 @@ -Bar_2.scala:3: warning: match may not be exhaustive. +Bar_2.scala:4: warning: match may not be exhaustive. It would fail on the following input: B - def bar1(foo: Foo_1) = foo match { - ^ -Bar_2.scala:7: warning: match may not be exhaustive. + def bar1(foo: Foo) = foo match { + ^ +Bar_2.scala:8: warning: match may not be exhaustive. It would fail on the following input: B - def bar2(foo: Baz_1) = foo match { - ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found + def bar2(foo: Baz) = foo match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t8700b/Bar_2.scala b/test/files/neg/t8700b/Bar_2.scala index 8cc238171987..c7f5c4966717 100644 --- a/test/files/neg/t8700b/Bar_2.scala +++ b/test/files/neg/t8700b/Bar_2.scala @@ -1,10 +1,11 @@ -// scalac: -Xfatal-warnings +//> using options -Werror +// object Bar { - def bar1(foo: Foo_1) = foo match { - case Foo_1.A => 1 + def bar1(foo: Foo) = foo match { + case Foo.A => 1 } - def bar2(foo: Baz_1) = foo match { - case Baz_1.A => 1 - } + def bar2(foo: Baz) = foo match { + case Baz.A => 1 + } } diff --git a/test/files/neg/t8700b/Baz.java b/test/files/neg/t8700b/Baz.java new file mode 100644 index 000000000000..f85ad40802f6 --- /dev/null +++ b/test/files/neg/t8700b/Baz.java @@ -0,0 +1,11 @@ +public enum Baz { + A { + public void baz1() {} + }, + B { + public void baz1() {} + }; + + public abstract void baz1(); + public void baz2() {} +} diff --git a/test/files/neg/t8700b/Baz_1.java b/test/files/neg/t8700b/Baz_1.java deleted file mode 100644 index 6a057c2c9c07..000000000000 --- a/test/files/neg/t8700b/Baz_1.java +++ /dev/null @@ -1,11 +0,0 @@ -public enum Baz_1 { - A { - public void baz1() {} - }, - B { - public void baz1() {} - }; - - public abstract void baz1(); - public void baz2() {} -} diff --git a/test/files/neg/t8700b/Foo.java b/test/files/neg/t8700b/Foo.java new file mode 100644 index 000000000000..cc8e9daf1f8a --- /dev/null +++ b/test/files/neg/t8700b/Foo.java @@ -0,0 +1,4 @@ +public enum Foo { + A, + B +} diff --git a/test/files/neg/t8700b/Foo_1.java b/test/files/neg/t8700b/Foo_1.java deleted file mode 100644 index 22656bdeddff..000000000000 --- a/test/files/neg/t8700b/Foo_1.java +++ /dev/null @@ -1,4 +0,0 @@ -public enum Foo_1 { - A, - B -} diff --git a/test/files/neg/t8704.check b/test/files/neg/t8704.check index 1083bdba3fb1..c9e5c120bfed 100644 --- a/test/files/neg/t8704.check +++ b/test/files/neg/t8704.check @@ -1,11 +1,11 @@ -t8704.scala:4: error: an implicit parameter section must be last +t8704.scala:5: error: an implicit parameter section must be last class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ -t8704.scala:4: error: multiple implicit parameter sections are not allowed +t8704.scala:5: error: multiple implicit parameter sections are not allowed class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ -t8704.scala:8: warning: 2 parameter sections are effectively implicit +t8704.scala:9: warning: 2 parameter sections are effectively implicit class D(private implicit val i: Int)(implicit s: String) ^ -one warning found -two errors found +1 warning +2 errors diff --git a/test/files/neg/t8704.scala b/test/files/neg/t8704.scala index dbafda12f828..37614452f4f0 100644 --- a/test/files/neg/t8704.scala +++ b/test/files/neg/t8704.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-extra-implicit +//> using options -Ywarn-extra-implicit +// class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { diff --git a/test/files/neg/t8731.check b/test/files/neg/t8731.check index 9ee34ff7cbc4..23804161d835 100644 --- a/test/files/neg/t8731.check +++ b/test/files/neg/t8731.check @@ -1,6 +1,6 @@ -t8731.scala:11: warning: could not emit switch for @switch annotated match +t8731.scala:12: warning: could not emit switch for @switch annotated match def g(x: Int) = (x: @annotation.switch) match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t8731.scala b/test/files/neg/t8731.scala index 588c3d6d73eb..c1015396a1f0 100644 --- a/test/files/neg/t8731.scala +++ b/test/files/neg/t8731.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class C { // not a compile-time constant due to return type final val K: Int = 20 diff --git a/test/files/neg/t8736-c.check b/test/files/neg/t8736-c.check deleted file mode 100644 index 99d5edae7dac..000000000000 --- a/test/files/neg/t8736-c.check +++ /dev/null @@ -1,11 +0,0 @@ -t8736-c.scala:5: warning: higher-kinded type should be enabled -by making the implicit value scala.language.higherKinds visible. -This can be achieved by adding the import clause 'import scala.language.higherKinds' -or by setting the compiler option -language:higherKinds. -See the Scaladoc for value scala.language.higherKinds for a discussion -why the feature should be explicitly enabled. - def hk[M[_]] = ??? - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/t8736-c.scala b/test/files/neg/t8736-c.scala deleted file mode 100644 index 7e1a83f62274..000000000000 --- a/test/files/neg/t8736-c.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -feature -language:-higherKinds,_ -Xfatal-warnings -// scalac: -feature -language:-higherKinds,_ -Xfatal-warnings -// showing that wildcard doesn't supersede explicit disablement -class X { - def hk[M[_]] = ??? - - implicit def imp(x: X): Int = x.hashCode -} diff --git a/test/files/neg/t875.check b/test/files/neg/t875.check index 406edcf50702..4f514d426df7 100644 --- a/test/files/neg/t875.check +++ b/test/files/neg/t875.check @@ -1,17 +1,30 @@ -t875.scala:3: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) - val ys = List(1, 2, 3, xs: _*) +t875.scala:3: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the single repeated parameter Int* + val ys = List(1, 2, 3, xs: _*) // error alignment with 1 param ^ -t875.scala:6: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) - mkList1(xs: _*) +t875.scala:6: error: Sequence argument type annotation `: _*` cannot be used here: +the single parameter has type Int which is not a repeated parameter type + mkList1(xs: _*) // error not varargs ^ -t875.scala:15: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) - f(true, 1, xs: _*) +t875.scala:15: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the corresponding repeated parameter Int* + f(true, 1, xs: _*) // error alignment with many params ^ -t875.scala:16: error: no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) - g(1, xs:_*) +t875.scala:16: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the single repeated parameter Int* + g(1, xs: _*) // error alignment with 1 param ^ -four errors found +t875.scala:18: error: Sequence argument type annotation `: _*` cannot be used here: +it is not the only argument to be passed to the corresponding repeated parameter Int*; +sequence argument must be the last argument + f(true, xs: _*, 17) // error alignment with many params, not last + ^ +t875.scala:20: error: Sequence argument type annotation `: _*` cannot be used here: +such annotations are only allowed in arguments to *-parameters + val zs = xs: _* // error not even close + ^ +t875.scala:22: error: Sequence argument type annotation `: _*` cannot be used here: +the single parameter has type Any which is not a repeated parameter type + val txt = s"${List(42): _*}" + ^ +7 errors diff --git a/test/files/neg/t875.scala b/test/files/neg/t875.scala index 841b2aec3f57..3d864fe2de03 100644 --- a/test/files/neg/t875.scala +++ b/test/files/neg/t875.scala @@ -1,9 +1,9 @@ object Test extends App { val xs = List(4, 5, 6) - val ys = List(1, 2, 3, xs: _*) + val ys = List(1, 2, 3, xs: _*) // error alignment with 1 param def mkList1(x: Int) = List(x) def mkList2(x: Boolean) = List(x) - mkList1(xs: _*) + mkList1(xs: _*) // error not varargs def f(x: Int*) = List(x: _*) @@ -12,7 +12,18 @@ object Test extends App { def g[a](x: a*) = List(x: _*) - f(true, 1, xs: _*) - g(1, xs:_*) + f(true, 1, xs: _*) // error alignment with many params + g(1, xs: _*) // error alignment with 1 param + + f(true, xs: _*, 17) // error alignment with many params, not last + + val zs = xs: _* // error not even close + + val txt = s"${List(42): _*}" + + implicit class summer(val sc: StringContext) { + def sum(xs: Int*) = xs.sum.toString + } + val summed = sum"${List(42): _*}" } diff --git a/test/files/neg/t8755-regress-a.check b/test/files/neg/t8755-regress-a.check new file mode 100644 index 000000000000..b65df72c101f --- /dev/null +++ b/test/files/neg/t8755-regress-a.check @@ -0,0 +1,28 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + C8 0 C8 makes C7 reachable +superaccessors 6 add super accessors in traits and nested classes + C7 0 C7 has only a before constraint + extmethods 8 add extension methods for inline classes + pickler 9 serialize symbol tables + refchecks 10 reference/override checking, translate nested objects + patmat 11 translate match expressions + uncurry 12 uncurry, translate function values to anonymous classes + fields 13 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 14 replace tail calls by jumps + specialize 15 @specialized-driven class and method specialization + explicitouter 16 this refs to outer pointers + erasure 17 erase types, add interfaces for traits + posterasure 18 clean up erased inline classes + lambdalift 19 move nested functions to top level + constructors 20 move field definitions into constructors + flatten 21 eliminate inner classes + mixin 22 mixin composition + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/test/files/neg/t8755-regress-a/ploogin_1.scala b/test/files/neg/t8755-regress-a/ploogin_1.scala new file mode 100644 index 000000000000..643c09f2fed7 --- /dev/null +++ b/test/files/neg/t8755-regress-a/ploogin_1.scala @@ -0,0 +1,40 @@ + +package t8755 + +import scala.tools.nsc, nsc.{Global, Phase, plugins}, plugins.{Plugin, PluginComponent} + +class P(val global: Global) extends Plugin { + override val name = "Testing phase assembly" + override val description = "C7 is not dropped even though it has no runs[Right]After" + override val components = List[PluginComponent]( + component7, + component8, + ) + + object component7 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C7" + override val description = "C7 has only a before constraint" + override val runsRightAfter = None + override val runsAfter = Nil + override val runsBefore = List("patmat") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component8 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C8" + override val description = "C8 makes C7 reachable" + override val runsRightAfter = None + override val runsAfter = List("typer") + override val runsBefore = List("C7") // component name, not phase name! + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } +} diff --git a/test/files/neg/t8755-regress-a/sample_2.scala b/test/files/neg/t8755-regress-a/sample_2.scala new file mode 100644 index 000000000000..c33cb6e657c4 --- /dev/null +++ b/test/files/neg/t8755-regress-a/sample_2.scala @@ -0,0 +1,6 @@ +//> using options -Xplugin:. -Xplugin-require:"Testing phase assembly" -Vphases -Werror +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t8755-regress-a/scalac-plugin.xml b/test/files/neg/t8755-regress-a/scalac-plugin.xml new file mode 100644 index 000000000000..37440fe54290 --- /dev/null +++ b/test/files/neg/t8755-regress-a/scalac-plugin.xml @@ -0,0 +1,5 @@ + + Testing phase assembly + t8755.P + + diff --git a/test/files/neg/t8755.check b/test/files/neg/t8755.check new file mode 100644 index 000000000000..4322d7154355 --- /dev/null +++ b/test/files/neg/t8755.check @@ -0,0 +1,29 @@ +warning: No phase `refchicks` for ploogin.runsAfter - did you mean refchecks? +warning: No phase `java` for ploogin.runsBefore - did you mean jvm? +warning: Dropping phase ploogin, it is not reachable from parser + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions + uncurry 10 uncurry, translate function values to anonymous classes + fields 11 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + delambdafy 22 remove lambdas + jvm 23 generate JVM bytecode + terminal 24 the last phase during a compilation run diff --git a/test/files/neg/t8755/ploogin_1.scala b/test/files/neg/t8755/ploogin_1.scala new file mode 100644 index 000000000000..33ea7e6c6e56 --- /dev/null +++ b/test/files/neg/t8755/ploogin_1.scala @@ -0,0 +1,31 @@ + +package t8755 + +import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} +import scala.reflect.io.Path +import scala.reflect.io.File + +/** A test plugin. */ +class Ploogin(val global: Global) extends Plugin { + import global._ + + val name = "ploogin" + val description = "A sample plugin for testing." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Ploogin.this.global.type = Ploogin.this.global + override val runsBefore = List("java") + val runsAfter = List("refchicks") + val phaseName = Ploogin.this.name + override def description = "A sample phase that doesn't know when to run." + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit): Unit = { + // kewl kode + } + } + } +} diff --git a/test/files/neg/t8755/sample_2.scala b/test/files/neg/t8755/sample_2.scala new file mode 100644 index 000000000000..ef2fba58a09b --- /dev/null +++ b/test/files/neg/t8755/sample_2.scala @@ -0,0 +1,6 @@ +//> using options -Xplugin:. -Xplugin-require:ploogin -Vphases -Werror +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t8755/scalac-plugin.xml b/test/files/neg/t8755/scalac-plugin.xml new file mode 100644 index 000000000000..451480c58d68 --- /dev/null +++ b/test/files/neg/t8755/scalac-plugin.xml @@ -0,0 +1,5 @@ + + ploogin + t8755.Ploogin + + diff --git a/test/files/neg/t8755b.check b/test/files/neg/t8755b.check new file mode 100644 index 000000000000..b4275a932aff --- /dev/null +++ b/test/files/neg/t8755b.check @@ -0,0 +1,27 @@ +warning: Dropping phase ploogin, it is not reachable from parser + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions + uncurry 10 uncurry, translate function values to anonymous classes + fields 11 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + delambdafy 22 remove lambdas + jvm 23 generate JVM bytecode + terminal 24 the last phase during a compilation run diff --git a/test/files/neg/t8755b/ploogin_1.scala b/test/files/neg/t8755b/ploogin_1.scala new file mode 100644 index 000000000000..76ab22f4b257 --- /dev/null +++ b/test/files/neg/t8755b/ploogin_1.scala @@ -0,0 +1,29 @@ + +package t8755 + +import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} +import scala.reflect.io.Path +import scala.reflect.io.File + +/** A test plugin. */ +class Ploogin(val global: Global) extends Plugin { + import global._ + + val name = "ploogin" + val description = "A sample plugin for testing." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Ploogin.this.global.type = Ploogin.this.global + override val runsBefore = List("erasure") + val runsAfter = Nil + val phaseName = Ploogin.this.name + override def description = "A phase that another phase must run before." + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit): Unit = () + } + } +} diff --git a/test/files/neg/t8755b/sample_2.scala b/test/files/neg/t8755b/sample_2.scala new file mode 100644 index 000000000000..ef2fba58a09b --- /dev/null +++ b/test/files/neg/t8755b/sample_2.scala @@ -0,0 +1,6 @@ +//> using options -Xplugin:. -Xplugin-require:ploogin -Vphases -Werror +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t8755b/scalac-plugin.xml b/test/files/neg/t8755b/scalac-plugin.xml new file mode 100644 index 000000000000..451480c58d68 --- /dev/null +++ b/test/files/neg/t8755b/scalac-plugin.xml @@ -0,0 +1,5 @@ + + ploogin + t8755.Ploogin + + diff --git a/test/files/neg/t8755c.check b/test/files/neg/t8755c.check new file mode 100644 index 000000000000..606c9132cf41 --- /dev/null +++ b/test/files/neg/t8755c.check @@ -0,0 +1,28 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + C1 0 C1 tests phase assembly +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + patmat 10 translate match expressions + C6 0 C6 tests phase assembly after a phase missing in Scaladoc + uncurry 12 uncurry, translate function values to anonymous classes + fields 13 synthesize accessors and fields, add bitmaps for lazy vals + tailcalls 14 replace tail calls by jumps + specialize 15 @specialized-driven class and method specialization + explicitouter 16 this refs to outer pointers + erasure 17 erase types, add interfaces for traits + posterasure 18 clean up erased inline classes + lambdalift 19 move nested functions to top level + constructors 20 move field definitions into constructors + flatten 21 eliminate inner classes + mixin 22 mixin composition + cleanup 23 platform-specific cleanups, generate reflective calls + delambdafy 24 remove lambdas + jvm 25 generate JVM bytecode + terminal 26 the last phase during a compilation run diff --git a/test/files/neg/t8755c/ploogin_1.scala b/test/files/neg/t8755c/ploogin_1.scala new file mode 100644 index 000000000000..7c09ca9dbf99 --- /dev/null +++ b/test/files/neg/t8755c/ploogin_1.scala @@ -0,0 +1,126 @@ + +package t8755 + +import scala.tools.nsc +import nsc.{Global, Phase, plugins} +import plugins.{Plugin, PluginComponent} + +class P(val global: Global) extends Plugin { + override val name = "Testing" + override val description = "Testing phase assembly" + override val components = List[PluginComponent]( + component1, + //component2, + //component3, + //component4, + //component5, + component6, + //component7, + //component8, + ) + + object component1 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C1" + override val description = "C1 tests phase assembly" + override val runsRightAfter = Option("typer") + override val runsAfter = List("typer") + override val runsBefore = List("terminal") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component2 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C2" + override val description = "C2 tests phase assembly impossible constraint" + override val runsRightAfter = Option("patmat") + override val runsAfter = List("typer") + override val runsBefore = List("typer") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component3 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C3" + override val description = "C3 tests phase assembly missing before, phase is ignored" + override val runsRightAfter = None + override val runsAfter = List("typer") + override val runsBefore = List("germinal") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component4 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C4" + override val description = "C4 tests phase assembly impossible constraint not right after" + override val runsRightAfter = None + override val runsAfter = List("typer") + override val runsBefore = List("typer") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component5 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C5" + override val description = "C5 tests phase assembly before a phase missing in Scaladoc" + override val runsRightAfter = None + override val runsAfter = List("typer") + override val runsBefore = List("erasure") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component6 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C6" + override val description = "C6 tests phase assembly after a phase missing in Scaladoc" + override val runsRightAfter = None + override val runsAfter = List("patmat") + //override val runsBefore = List("terminal") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component7 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C7" + override val description = "C7 tests phase assembly if only a before constraint" + override val runsRightAfter = None + override val runsAfter = Nil + override val runsBefore = List("patmat") + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } + object component8 extends PluginComponent { + override val global = P.this.global + override val phaseName = "C8" + override val description = "C8 is before C7 which specifies no after" + override val runsRightAfter = None + override val runsAfter = List("typer") + override val runsBefore = List("C7") // component name, not phase name! + override def newPhase(prev: Phase) = new phase(prev) + class phase(prev: Phase) extends Phase(prev) { + override val name = s"phase $phaseName" + override def run() = println(name) + } + } +} diff --git a/test/files/neg/t8755c/sample_2.scala b/test/files/neg/t8755c/sample_2.scala new file mode 100644 index 000000000000..6a687ef41075 --- /dev/null +++ b/test/files/neg/t8755c/sample_2.scala @@ -0,0 +1,6 @@ +//> using options -Xplugin:. -Xplugin-require:Testing -Vphases -Werror +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t8755c/scalac-plugin.xml b/test/files/neg/t8755c/scalac-plugin.xml new file mode 100644 index 000000000000..ff68af97a9dd --- /dev/null +++ b/test/files/neg/t8755c/scalac-plugin.xml @@ -0,0 +1,5 @@ + + Testing + t8755.P + + diff --git a/test/files/neg/t876.check b/test/files/neg/t876.check index 7df2e126a6f0..6223a812bad7 100644 --- a/test/files/neg/t876.check +++ b/test/files/neg/t876.check @@ -1,4 +1,4 @@ -t876.scala:25: error: too many arguments (2) for method apply: (key: AssertionError.A)manager.B in class HashMap +t876.scala:25: error: too many arguments (found 2, expected 1) for method apply: (key: AssertionError.A): manager.B in class HashMap assert(manager.map(A2) == List(manager.map(A2, A1))) ^ -one error found +1 error diff --git a/test/files/neg/t876.scala b/test/files/neg/t876.scala index 1f6a90dff3fb..981d48d8bad6 100644 --- a/test/files/neg/t876.scala +++ b/test/files/neg/t876.scala @@ -16,7 +16,7 @@ object AssertionError extends AnyRef with App } - def test[T](f: => T) { f } + def test[T](f: => T): Unit = { f } test { val manager = new Manager diff --git a/test/files/neg/t8763.check b/test/files/neg/t8763.check index 4659e57be689..56183fa24b8f 100644 --- a/test/files/neg/t8763.check +++ b/test/files/neg/t8763.check @@ -3,4 +3,4 @@ t8763.scala:9: error: type mismatch; required: String names_times(fields(0)) += fields(1).toLong ^ -one error found +1 error diff --git a/test/files/neg/t8763.scala b/test/files/neg/t8763.scala index 08ce1b471a58..8d83893982b9 100644 --- a/test/files/neg/t8763.scala +++ b/test/files/neg/t8763.scala @@ -2,7 +2,7 @@ import collection.mutable object Foo { - def bar() { + def bar(): Unit = { val names_times = mutable.Map[String, mutable.Set[Long]]() val line = "" val Array(fields) = line.split("\t") diff --git a/test/files/neg/t877.check b/test/files/neg/t877.check index c3d4ab658478..388b4f78211f 100644 --- a/test/files/neg/t877.check +++ b/test/files/neg/t877.check @@ -1,7 +1,7 @@ -t877.scala:3: error: Invalid literal number +t877.scala:3: error: invalid literal number trait Foo extends A(22A, Bug!) {} ^ t877.scala:3: error: ')' expected but eof found. trait Foo extends A(22A, Bug!) {} ^ -two errors found +2 errors diff --git a/test/files/neg/t8777.check b/test/files/neg/t8777.check index cd05f1ec11de..121c2beebc05 100644 --- a/test/files/neg/t8777.check +++ b/test/files/neg/t8777.check @@ -1,6 +1,6 @@ -t8777.scala:3: error: type mismatch; - found : Foo.this.TreePrinter(in trait Printers) - required: Foo.this.TreePrinter(in trait Printers) - super.newCodePrinter(out, tree, printRootPkg) - ^ -one error found +t8777.scala:12: error: type mismatch; + found : Test.this.Shadow(in trait Test) + required: Test.this.Shadow(in trait Test) + override def test: Shadow = super.test + ^ +1 error diff --git a/test/files/neg/t8777.scala b/test/files/neg/t8777.scala index 5b7d123202d0..8d8ad0fa921d 100644 --- a/test/files/neg/t8777.scala +++ b/test/files/neg/t8777.scala @@ -1,4 +1,14 @@ -trait Foo extends scala.tools.nsc.Global { - override def newCodePrinter(out: java.io.PrintWriter, tree: Tree, printRootPkg: Boolean): TreePrinter = - super.newCodePrinter(out, tree, printRootPkg) +//> using options '-Wconf:msg=shadowing a nested class of a parent is deprecated:s' + +package a { + trait Test { + class Shadow + def test: Shadow = new Shadow + } +} +package b { + trait Test extends a.Test { + class Shadow extends super.Shadow + override def test: Shadow = super.test + } } diff --git a/test/files/neg/t882.check b/test/files/neg/t882.check index a906778a1a41..82e6e0571f63 100644 --- a/test/files/neg/t882.check +++ b/test/files/neg/t882.check @@ -1,4 +1,4 @@ -t882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...' +t882.scala:2: error: traits cannot have type parameters with context bounds `: ...` nor view bounds `<% ...` trait SortedSet[A <% Ordered[A]] { ^ -one error found +1 error diff --git a/test/files/neg/t8841.check b/test/files/neg/t8841.check deleted file mode 100644 index ad525dc3f87a..000000000000 --- a/test/files/neg/t8841.check +++ /dev/null @@ -1,9 +0,0 @@ -t8841.scala:13: error: recursive value c needs type - val ambiguousName = c.ambiguousName - ^ -t8841.scala:12: warning: failed to determine if 'ambiguousName = ...' is a named argument or an assignment expression. -an explicit type is required for the definition mentioned in the error message above. - val c = new Cell(ambiguousName = Some("bla")) - ^ -one warning found -one error found diff --git a/test/files/neg/t8841.scala b/test/files/neg/t8841.scala deleted file mode 100644 index 80430d997eea..000000000000 --- a/test/files/neg/t8841.scala +++ /dev/null @@ -1,15 +0,0 @@ -class Cell(val ambiguousName: Option[String]) - -class Test { - def wrap(f: Any): Nothing = ??? - - wrap { - // the namer for these two ValDefs is created when typing the argument expression - // of wrap. This happens to be in a silent context (tryTypedApply). Therefore, the - // cyclic reference will not be thrown, but transformed into a NormalTypeError by - // `silent`. This requires different handling in NamesDefaults. - - val c = new Cell(ambiguousName = Some("bla")) - val ambiguousName = c.ambiguousName - } -} diff --git a/test/files/neg/t8849.check b/test/files/neg/t8849.check index 1d5b4164b205..bf243cf7ffc7 100644 --- a/test/files/neg/t8849.check +++ b/test/files/neg/t8849.check @@ -1,7 +1,7 @@ t8849.scala:8: error: ambiguous implicit values: - both lazy value global in object Implicits of type => scala.concurrent.ExecutionContext + both method global in object Implicits of type scala.concurrent.ExecutionContext and value dummy of type scala.concurrent.ExecutionContext match expected type scala.concurrent.ExecutionContext require(implicitly[ExecutionContext] eq dummy) ^ -one error found +1 error diff --git a/test/files/neg/t8849.scala b/test/files/neg/t8849.scala index 336f16b40f8f..27ab3e857db4 100644 --- a/test/files/neg/t8849.scala +++ b/test/files/neg/t8849.scala @@ -7,4 +7,4 @@ object Test { require(scala.concurrent.ExecutionContext.Implicits.global ne null) require(implicitly[ExecutionContext] eq dummy) } -} \ No newline at end of file +} diff --git a/test/files/neg/t8869.check b/test/files/neg/t8869.check index 40b8570f9f6a..17c728618e95 100644 --- a/test/files/neg/t8869.check +++ b/test/files/neg/t8869.check @@ -4,4 +4,4 @@ t8869.scala:5: error: class Option takes type parameters t8869.scala:7: error: class Option takes type parameters type l2[x] = Option // error correctly reported ^ -two errors found +2 errors diff --git a/test/files/neg/t8890.check b/test/files/neg/t8890.check index 1b69d6cf305e..b6461b2d810e 100644 --- a/test/files/neg/t8890.check +++ b/test/files/neg/t8890.check @@ -1,4 +1,4 @@ t8890.scala:6: error: not found: type Str def bar(x: Str): Unit = ??? ^ -one error found +1 error diff --git a/test/files/neg/t8890.scala b/test/files/neg/t8890.scala index cbdeb11d43da..191874d34b94 100644 --- a/test/files/neg/t8890.scala +++ b/test/files/neg/t8890.scala @@ -8,4 +8,4 @@ class A { class B { (new A).bar(0) -} \ No newline at end of file +} diff --git a/test/files/neg/t8892.check b/test/files/neg/t8892.check index 5930be58c5e1..8743944da7c2 100644 --- a/test/files/neg/t8892.check +++ b/test/files/neg/t8892.check @@ -4,4 +4,4 @@ t8892.scala:2: error: type mismatch; (which expands to) String class C[B](x: B) extends A { def f: B = x } ^ -one error found +1 error diff --git a/test/files/neg/t8989.check b/test/files/neg/t8989.check index 4e89b862bda1..9594d61c0061 100644 --- a/test/files/neg/t8989.check +++ b/test/files/neg/t8989.check @@ -1,4 +1,4 @@ t8989.scala:11: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in A val f = p match {case d(1) => true; case _ => false} ^ -one error found +1 error diff --git a/test/files/neg/t900.check b/test/files/neg/t900.check deleted file mode 100644 index 6fe26a31acdc..000000000000 --- a/test/files/neg/t900.check +++ /dev/null @@ -1,9 +0,0 @@ -t900.scala:4: error: type mismatch; - found : Foo.this.x.type (with underlying type Foo.this.bar) - required: AnyRef -Note that bar is unbounded, which means AnyRef is not a known parent. -Such types can participate in value classes, but instances -cannot appear in singleton types or in reference comparisons. - def break(): x.type - ^ -one error found diff --git a/test/files/neg/t9008.check b/test/files/neg/t9008.check index c32bc41bafc4..8e621a7f4b94 100644 --- a/test/files/neg/t9008.check +++ b/test/files/neg/t9008.check @@ -1,4 +1,4 @@ t9008.scala:2: error: type M takes type parameters def x: List[M forSome { type M[_] }] = ??? ^ -one error found +1 error diff --git a/test/files/neg/t9008b.check b/test/files/neg/t9008b.check index 5e911fc13832..d5f5f5ef8f50 100644 --- a/test/files/neg/t9008b.check +++ b/test/files/neg/t9008b.check @@ -1,4 +1,4 @@ t9008b.scala:2: error: type M takes type parameters type T = M forSome { type M[_] } ^ -one error found +1 error diff --git a/test/files/neg/t9014.check b/test/files/neg/t9014.check new file mode 100644 index 000000000000..650093881062 --- /dev/null +++ b/test/files/neg/t9014.check @@ -0,0 +1,4 @@ +t9014.scala:4: error: Inner is already defined as case class Inner + case class Inner(default: T) + ^ +1 error diff --git a/test/files/neg/t9014.scala b/test/files/neg/t9014.scala new file mode 100644 index 000000000000..32465c3c7dcd --- /dev/null +++ b/test/files/neg/t9014.scala @@ -0,0 +1,7 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + // still broken - specialize can't deal with the synthetic companion object + case class Inner(default: T) + t + } +} diff --git a/test/files/neg/t9041.check b/test/files/neg/t9041.check index 669e9434e079..172d3a350cf5 100644 --- a/test/files/neg/t9041.check +++ b/test/files/neg/t9041.check @@ -1,4 +1,4 @@ t9041.scala:11: error: could not find implicit value for parameter cellSetter: CellSetter[scala.math.BigDecimal] - def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) } - ^ -one error found + def setCell(cell: Cell, data: math.BigDecimal): Unit = { cell.setCellValue(data) } + ^ +1 error diff --git a/test/files/neg/t9041.scala b/test/files/neg/t9041.scala index 2bdef0d3ae1c..17d21ce075f1 100644 --- a/test/files/neg/t9041.scala +++ b/test/files/neg/t9041.scala @@ -8,7 +8,7 @@ trait Nope { def setCell(cell: Cell, data: A): Unit } implicit val bigDecimalCellSetter = new CellSetter[math.BigDecimal]() { - def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) } + def setCell(cell: Cell, data: math.BigDecimal): Unit = { cell.setCellValue(data) } } implicit class RichCell(cell: Cell) { def setCellValue[A](data: A)(implicit cellSetter: CellSetter[A]) = cellSetter.setCell(cell, data) diff --git a/test/files/neg/t9045.check b/test/files/neg/t9045.check index 07d0e2dd7464..a31ec337a13e 100644 --- a/test/files/neg/t9045.check +++ b/test/files/neg/t9045.check @@ -1,7 +1,7 @@ t9045.scala:3: error: constructor invokes itself def this(axes: Array[Int]) = this(axes) ^ -t9045.scala:6: error: called constructor's definition must precede calling constructor's definition +t9045.scala:6: error: self constructor invocation must refer to a constructor definition which precedes it, to prevent infinite cycles def this(d: Double) = this(d.toLong) ^ -two errors found +2 errors diff --git a/test/files/neg/t908.check b/test/files/neg/t908.check index 2c723a700bc2..fec5d29b2b35 100644 --- a/test/files/neg/t908.check +++ b/test/files/neg/t908.check @@ -1,4 +1,4 @@ t908.scala:8: error: not found: value makeA this(makeA) ^ -one error found +1 error diff --git a/test/files/neg/t909.check b/test/files/neg/t909.check index e7a42bd246b7..adb0bec04ab4 100644 --- a/test/files/neg/t909.check +++ b/test/files/neg/t909.check @@ -3,4 +3,4 @@ t909.scala:6: error: type mismatch; required: Int case Foo("Hello") => ^ -one error found +1 error diff --git a/test/files/neg/t909.scala b/test/files/neg/t909.scala index cd67042e02b6..63dd88adf20e 100644 --- a/test/files/neg/t909.scala +++ b/test/files/neg/t909.scala @@ -1,7 +1,7 @@ case class Foo(x:Int) object Bar { - def main(args:Array[String]) { + def main(args:Array[String]): Unit = { Foo(2) match { case Foo("Hello") => } diff --git a/test/files/neg/t9093.check b/test/files/neg/t9093.check index 085a433f0bb7..22ac91dfdbce 100644 --- a/test/files/neg/t9093.check +++ b/test/files/neg/t9093.check @@ -1,6 +1,6 @@ -t9093.scala:3: error: polymorphic expression cannot be instantiated to expected type; - found : [C](f: C)Null - required: Unit +t9093.scala:3: error: missing argument list for method apply2 in object Main of type [C](fa: Any)(f: C): Null +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `apply2 _` or `apply2(_)(_)` instead of `apply2`. val x: Unit = apply2(0)/*(0)*/ ^ -one error found +1 error diff --git a/test/files/neg/t910.check b/test/files/neg/t910.check index 45420f8e3545..ee22900c719f 100644 --- a/test/files/neg/t910.check +++ b/test/files/neg/t910.check @@ -3,4 +3,4 @@ t910.scala:4: error: type mismatch; required: Seq[Int] val y: Seq[Int] = rest ^ -one error found +1 error diff --git a/test/files/neg/t9111b.check b/test/files/neg/t9111b.check index 668cacbcfd8c..260cb6635733 100644 --- a/test/files/neg/t9111b.check +++ b/test/files/neg/t9111b.check @@ -3,4 +3,4 @@ Test.scala:4: error: type mismatch; required: A.P.T println(j.foo(new A.T())) // compiles in mixed compilation (it should not) ^ -one error found +1 error diff --git a/test/files/neg/t9125.check b/test/files/neg/t9125.check new file mode 100644 index 000000000000..cf58ee5c812a --- /dev/null +++ b/test/files/neg/t9125.check @@ -0,0 +1,6 @@ +t9125.scala:10: error: reference to p is ambiguous; +it is both defined in package q and imported subsequently by +import _root_.p + def f() = new p.C + ^ +1 error diff --git a/test/files/neg/t9125.scala b/test/files/neg/t9125.scala new file mode 100644 index 000000000000..78af55cb91e9 --- /dev/null +++ b/test/files/neg/t9125.scala @@ -0,0 +1,13 @@ + +package p { + class C +} + +package q { + object p { + class K { + import _root_.p + def f() = new p.C + } + } +} diff --git a/test/files/neg/t9127.check b/test/files/neg/t9127.check deleted file mode 100644 index be6e4fc65084..000000000000 --- a/test/files/neg/t9127.check +++ /dev/null @@ -1,12 +0,0 @@ -t9127.scala:5: warning: possible missing interpolator: detected interpolated identifier `$s` - val t = "$s" - ^ -t9127.scala:6: warning: possible missing interpolator: detected an interpolated expression - val u = "a${s}b" - ^ -t9127.scala:7: warning: possible missing interpolator: detected interpolated identifier `$s` - val v = "a$s b" - ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found diff --git a/test/files/neg/t9127.scala b/test/files/neg/t9127.scala deleted file mode 100644 index d0e7d4fda545..000000000000 --- a/test/files/neg/t9127.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -Xlint:missing-interpolator -Xfatal-warnings - -trait X { - val s = "hello" - val t = "$s" - val u = "a${s}b" - val v = "a$s b" -} diff --git a/test/files/neg/t9138.check b/test/files/neg/t9138.check index 895c57f520c8..7f1adce81b5b 100644 --- a/test/files/neg/t9138.check +++ b/test/files/neg/t9138.check @@ -1,11 +1,25 @@ -t9138.scala:9: error: class D needs to be abstract, since method f in class C of type (t: B)(s: String)B is not defined +t9138.scala:9: error: class D needs to be abstract. +Missing implementation for member of class C: + def f(t: B)(s: String): B = ??? // implements `def f(t: T)(s: String): T`; String does not match Int in `def f(b: B)(i: Int): B` + class D extends C[B] { ^ -t9138.scala:19: error: object creation impossible, since method foo in trait Base of type (a: String)(b: Int)Nothing is not defined +t9138.scala:19: error: object creation impossible. +Missing implementation for member of trait Base: + def foo(a: String)(b: Int): Nothing = ??? // implements `def foo(a: A)(b: Int): Nothing` + object Derived extends Base[String] { ^ -t9138.scala:29: error: class DDD needs to be abstract, since method f in class CCC of type (t: B, s: String)B is not defined -(Note that T does not match Int) +t9138.scala:29: error: class DDD needs to be abstract. +Missing implementation for member of class CCC: + def f(t: B, s: String): B = ??? // implements `def f(t: T, s: String): T`; T does not match Int in `def f(b: Int, i: String): Int` + class DDD extends CCC[B] { ^ -three errors found +t9138.scala:43: error: object creation impossible. +Missing implementation for member of trait Model: + def create(conditionalParams: ImplementingParamTrait)(implicit d: Double): Int = ??? // implements `def create(conditionalParams: P)(implicit d: Double): Int`; overriding member must declare implicit parameter list + +object Obj extends Model[ImplementingParamTrait] { + ^ +4 errors diff --git a/test/files/neg/t9138.scala b/test/files/neg/t9138.scala index 0e8544220e82..b2412450fac9 100644 --- a/test/files/neg/t9138.scala +++ b/test/files/neg/t9138.scala @@ -28,4 +28,18 @@ abstract class CCC[T <: A] { class DDD extends CCC[B] { def f(b: Int, i: String) = b -} \ No newline at end of file +} + +// an additional example from the forum + +trait ParamTrait + +class ImplementingParamTrait extends ParamTrait + +trait Model[P <: ParamTrait] { + def create(conditionalParams: P)(implicit d: Double): Int +} + +object Obj extends Model[ImplementingParamTrait] { + def create(conditionalParams: ImplementingParamTrait)(d: Double): Int = 5 +} diff --git a/test/files/neg/t9231.check b/test/files/neg/t9231.check index 43c14f53ca5a..a164f5f59265 100644 --- a/test/files/neg/t9231.check +++ b/test/files/neg/t9231.check @@ -1,4 +1,4 @@ t9231.scala:8: error: not found: type DoesNotExist foo[DoesNotExist] ^ -one error found +1 error diff --git a/test/files/neg/t9232.check b/test/files/neg/t9232.check new file mode 100644 index 000000000000..e8a4f7ef4219 --- /dev/null +++ b/test/files/neg/t9232.check @@ -0,0 +1,7 @@ +t9232.scala:14: warning: match may not be exhaustive. +It would fail on the following inputs: Node1(_), Node2() + def transformTree(tree: Tree): Any = tree match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t9232.scala b/test/files/neg/t9232.scala new file mode 100644 index 000000000000..bdc5977ef90a --- /dev/null +++ b/test/files/neg/t9232.scala @@ -0,0 +1,17 @@ +//> using options -Xfatal-warnings + +final class Foo(val value: Int) +object Foo { + def unapplySeq(foo: Foo): Some[Seq[Int]] = Some(List(foo.value)) + //def unapply(foo: Foo): Some[Int] = Some(foo.value) +} + +sealed trait Tree +case class Node1(foo: Foo) extends Tree +case class Node2() extends Tree + +object Test { + def transformTree(tree: Tree): Any = tree match { + case Node1(Foo(1)) => ??? + } +} diff --git a/test/files/neg/t9273.check b/test/files/neg/t9273.check index 1dca63a736e1..0f1ff22293ea 100644 --- a/test/files/neg/t9273.check +++ b/test/files/neg/t9273.check @@ -7,4 +7,4 @@ t9273.scala:3: error: not found: type X t9273.scala:7: error: not found: type X val foo4: Class[_] = Predef.classOf[X] // good error, all info contained ^ -three errors found +3 errors diff --git a/test/files/neg/t9286a.check b/test/files/neg/t9286a.check index 2bc7c0cf1575..0b23e75eae8f 100644 --- a/test/files/neg/t9286a.check +++ b/test/files/neg/t9286a.check @@ -1,7 +1,7 @@ t9286a.scala:6: error: name clash between defined and inherited member: def foo(o: (String,)): Unit in class T and private def foo(o: (Any,)): Unit at line 6 -have same type after erasure: (o: Tuple1)Unit +have same type after erasure: (o: Tuple1): Unit private def foo(o: Tuple1[Any]) = () ^ -one error found +1 error diff --git a/test/files/neg/t9286b.check b/test/files/neg/t9286b.check index 89a191bfeed5..c1507690a7b3 100644 --- a/test/files/neg/t9286b.check +++ b/test/files/neg/t9286b.check @@ -1,7 +1,7 @@ t9286b.scala:2: error: name clash between defined and inherited member: def foo: Int in class C and private def foo[A]: Int at line 2 -have same type after erasure: ()Int +have same type after erasure: (): Int class D extends C { private def foo[A] = 0 } ^ -one error found +1 error diff --git a/test/files/neg/t9286c.check b/test/files/neg/t9286c.check index 785cb3f93754..626ac1adb875 100644 --- a/test/files/neg/t9286c.check +++ b/test/files/neg/t9286c.check @@ -1,7 +1,7 @@ t9286c.scala:8: error: name clash between defined and inherited member: def foo(m: M[_ >: String]): Int in trait T and private def foo(m: M[_ >: Any]): Int at line 8 -have same type after erasure: (m: M)Int +have same type after erasure: (m: M): Int def foo(m: M[_ >: Any]) = 0 // Expected: "same type after erasure" ^ -one error found +1 error diff --git a/test/files/neg/t9286c.scala b/test/files/neg/t9286c.scala index 3df08dcfe6da..8a52858def7c 100644 --- a/test/files/neg/t9286c.scala +++ b/test/files/neg/t9286c.scala @@ -9,6 +9,6 @@ object Test { } def main(args: Array[String]): Unit = { val m: M[String] = null - t.foo(m) // VeriyError: Duplicate method name&signature + t.foo(m) // VerifyError: Duplicate method name&signature } } diff --git a/test/files/neg/t9310.check b/test/files/neg/t9310.check new file mode 100644 index 000000000000..b7a505c5b50f --- /dev/null +++ b/test/files/neg/t9310.check @@ -0,0 +1,6 @@ +t9310.scala:8: error: (arg: Any): Test.this.C does not take parameters +error after rewriting to Test.this.c.applyDynamic("m") +possible cause: maybe a wrong Dynamic method signature? + def f = c.m(42) + ^ +1 error diff --git a/test/files/neg/t9310.scala b/test/files/neg/t9310.scala new file mode 100644 index 000000000000..0839b65b3b13 --- /dev/null +++ b/test/files/neg/t9310.scala @@ -0,0 +1,28 @@ +import scala.language.dynamics + +trait Test { + class C extends Dynamic { + def applyDynamic(arg: Any): C = ??? + } + val c = new C + def f = c.m(42) +} + +/* +object Test { + case class LeonAny(v: Any) extends Dynamic { + def applyDynamic(args: Any*): LeonAny = this + } + + implicit def fooToFoo(a: Any): LeonAny = { + LeonAny(a) + } + + def test() { + val a: LeonAny = 42 + val b: LeonAny = Nil + + b += a + } +} +*/ diff --git a/test/files/neg/t9334.check b/test/files/neg/t9334.check index 96eb564b14e7..e5fe6ef6d0ed 100644 --- a/test/files/neg/t9334.check +++ b/test/files/neg/t9334.check @@ -1,5 +1,6 @@ -t9334.scala:7: error: overriding method aaa in class A of type => Int; - method aaa has weaker access privileges; it should not be private +t9334.scala:5: error: weaker access privileges in overriding +def aaa: Int (defined in class A) + override should not be private private[this] def aaa: Int = 42 ^ -one error found +1 error diff --git a/test/files/neg/t9334.scala b/test/files/neg/t9334.scala index 53d264076acc..c8838e855db2 100644 --- a/test/files/neg/t9334.scala +++ b/test/files/neg/t9334.scala @@ -1,5 +1,3 @@ -// scalac: -Xsource:2.13 - class A { def aaa: Int = 10 } diff --git a/test/files/neg/t935.check b/test/files/neg/t935.check index af634a2630f2..d82ebd24b417 100644 --- a/test/files/neg/t935.check +++ b/test/files/neg/t935.check @@ -4,4 +4,4 @@ t935.scala:7: error: type arguments [Test3.B] do not conform to class E's type p t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String] val b: String @E[B](new B) = "hi" ^ -two errors found +2 errors diff --git a/test/files/neg/t9361.check b/test/files/neg/t9361.check index 847d137f7d6c..15c497151ef0 100644 --- a/test/files/neg/t9361.check +++ b/test/files/neg/t9361.check @@ -1,11 +1,6 @@ t9361.scala:4: error: type mismatch; found : Tc[_$2] where type _$2 - required: Nothing[] + required: Nothing new Foo { def tc = null.asInstanceOf[Tc[_]] } ^ -t9361.scala:4: error: type mismatch; - found : Foo[Nothing] - required: Foo[Tc]{type T = Nothing} - new Foo { def tc = null.asInstanceOf[Tc[_]] } - ^ -two errors found +1 error diff --git a/test/files/neg/t9382.check b/test/files/neg/t9382.check index 93bf48926ab0..39337378179a 100644 --- a/test/files/neg/t9382.check +++ b/test/files/neg/t9382.check @@ -7,4 +7,4 @@ t9382.scala:4: error: value x is not a member of (List[Int], List[Int], List[Int t9382.scala:5: error: value x is not a member of (Int, Int) def huh = (1,2).x ^ -three errors found +3 errors diff --git a/test/files/neg/t9398.check b/test/files/neg/t9398.check index 2adf7bd7e268..9e6d8314ce72 100644 --- a/test/files/neg/t9398.check +++ b/test/files/neg/t9398.check @@ -2,6 +2,6 @@ match.scala:4: warning: match may not be exhaustive. It would fail on the following input: CC(B2) def test(c: CC): Unit = c match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t9398/data.scala b/test/files/neg/t9398/data.scala index 31b2762a66d9..7a98c0e8e80e 100644 --- a/test/files/neg/t9398/data.scala +++ b/test/files/neg/t9398/data.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings sealed abstract class TB case object B extends TB case object B2 extends TB diff --git a/test/files/neg/t9398/match.scala b/test/files/neg/t9398/match.scala index 28bc77c83d0c..95015888d5a3 100644 --- a/test/files/neg/t9398/match.scala +++ b/test/files/neg/t9398/match.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror class Test { // Should warn that CC(B2) isn't matched def test(c: CC): Unit = c match { diff --git a/test/files/neg/t9401.check b/test/files/neg/t9401.check index 638d56db63bc..cb1558b0189b 100644 --- a/test/files/neg/t9401.check +++ b/test/files/neg/t9401.check @@ -1,4 +1,4 @@ t9401.scala:3: error: cannot find class tag for element type T gencastarray = new Array[T](0) ^ -one error found +1 error diff --git a/test/files/neg/t9414.check b/test/files/neg/t9414.check new file mode 100644 index 000000000000..20a78141cc27 --- /dev/null +++ b/test/files/neg/t9414.check @@ -0,0 +1,10 @@ +t9414.scala:6: error: could not optimize @tailrec annotated method bar: it is neither private nor final so can be overridden + @tailrec def bar: Int = { + ^ +t9414.scala:20: error: could not optimize @tailrec annotated method bar: it is neither private nor final so can be overridden + @tailrec def bar: Int = { + ^ +t9414.scala:34: error: could not optimize @tailrec annotated method bar: it is neither private nor final so can be overridden + @tailrec def bar: Int = { + ^ +3 errors diff --git a/test/files/neg/t9414.scala b/test/files/neg/t9414.scala new file mode 100644 index 000000000000..2eea04795058 --- /dev/null +++ b/test/files/neg/t9414.scala @@ -0,0 +1,42 @@ +import annotation._ + +class C { + def foo = { + class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + class Child extends Parent { + override def bar = 42 + } + } +} + +class D { + def foo = { + class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + class Child extends Parent + class GrandChild extends Child { + override def bar = 42 + } + } +} + +object E { + sealed class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + final class Child extends Parent { + override def bar = 42 + } +} diff --git a/test/files/neg/t9438.check b/test/files/neg/t9438.check new file mode 100644 index 000000000000..ca717b022493 --- /dev/null +++ b/test/files/neg/t9438.check @@ -0,0 +1,11 @@ +t9438.scala:3: error: type mismatch; + found : Double(3.14) + required: Int + val i: Int = 3.14 + ^ +t9438.scala:5: error: type mismatch; + found : Double(3.14) + required: Int + 3.14 + ^ +2 errors diff --git a/test/files/neg/t9438.scala b/test/files/neg/t9438.scala new file mode 100644 index 000000000000..d540c9a47285 --- /dev/null +++ b/test/files/neg/t9438.scala @@ -0,0 +1,6 @@ + +class C { + val i: Int = 3.14 + def f(): Int = + 3.14 +} diff --git a/test/files/neg/t944.check b/test/files/neg/t944.check index 1fc0a12208cd..dbd5e05a9e85 100644 --- a/test/files/neg/t944.check +++ b/test/files/neg/t944.check @@ -1,4 +1,4 @@ -t944.scala:5: error: implementation restricts functions to 22 parameters +t944.scala:5: error: functions may not have more than 22 parameters, but 23 given a23:Int) => 1 ^ -one error found +1 error diff --git a/test/files/neg/t9527a.check b/test/files/neg/t9527a.check index e756518bed7c..a67a74e82446 100644 --- a/test/files/neg/t9527a.check +++ b/test/files/neg/t9527a.check @@ -1,7 +1,7 @@ t9527a.scala:5: error: ambiguous implicit values: - both method f in class C of type (x: Int)String - and method g in class C of type (x: Int)String + both method f in class C of type (x: Int): String + and method g in class C of type (x: Int): String match expected type Int => String implicitly[Int => String] ^ -one error found +1 error diff --git a/test/files/neg/t9527b.check b/test/files/neg/t9527b.check index 4529ec83ea25..bdc77302be9f 100644 --- a/test/files/neg/t9527b.check +++ b/test/files/neg/t9527b.check @@ -1,4 +1,4 @@ t9527b.scala:6: error: msg A=Nothing implicitly[Int => String] ^ -one error found +1 error diff --git a/test/files/neg/t9529.check b/test/files/neg/t9529.check index 1d4724a59831..ddd3900ea7be 100644 --- a/test/files/neg/t9529.check +++ b/test/files/neg/t9529.check @@ -1,4 +1,4 @@ t9529.scala:7: error: Java annotation Deprecated may not appear multiple times on class TooMany class TooMany ^ -one error found +1 error diff --git a/test/files/neg/t9535.check b/test/files/neg/t9535.check index 5c3e3ea8e687..13beadbd6185 100644 --- a/test/files/neg/t9535.check +++ b/test/files/neg/t9535.check @@ -4,4 +4,4 @@ t9535.scala:4: error: not found: type E1 t9535.scala:6: error: class type required but E found @throws(classOf[E]) def g: E = ??? // neg test: classOf requires class type ^ -two errors found +2 errors diff --git a/test/files/neg/t9538.check b/test/files/neg/t9538.check new file mode 100644 index 000000000000..17458daf5d32 --- /dev/null +++ b/test/files/neg/t9538.check @@ -0,0 +1,13 @@ +t9538.scala:9: error: Option[String] is not a valid result type of an unapplySeq method of an extractor. + def f(x: Any) = x match { case X(y, z) => } + ^ +t9538.scala:10: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g0(x: Any) = x match { case Y() => } + ^ +t9538.scala:11: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g1(x: Any) = x match { case Y(y) => } + ^ +t9538.scala:12: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g2(x: Any) = x match { case Y(y,z) => } + ^ +4 errors diff --git a/test/files/neg/t9538.scala b/test/files/neg/t9538.scala new file mode 100644 index 000000000000..f64ef9552dd8 --- /dev/null +++ b/test/files/neg/t9538.scala @@ -0,0 +1,13 @@ + + + +object X { def unapplySeq(x: Any): Option[String] = { Some(x.toString.toUpperCase) }} + +object Y { def unapplySeq(v: Any) = Option((1, 2, 3)) } + +object Test extends App { + def f(x: Any) = x match { case X(y, z) => } + def g0(x: Any) = x match { case Y() => } + def g1(x: Any) = x match { case Y(y) => } + def g2(x: Any) = x match { case Y(y,z) => } +} diff --git a/test/files/neg/t9572.check b/test/files/neg/t9572.check index b95bd015cf53..13b124b50840 100644 --- a/test/files/neg/t9572.check +++ b/test/files/neg/t9572.check @@ -1,7 +1,7 @@ -t9572.scala:3: error: too many elements for tuple: 23, allowed: 22 +t9572.scala:3: error: tuples may not have more than 22 elements, but 23 given val term23 = (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) - ^ -t9572.scala:5: error: too many elements for tuple: 23, allowed: 22 + ^ +t9572.scala:5: error: tuples may not have more than 22 elements, but 23 given val type23: (Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int) = null - ^ -two errors found + ^ +2 errors diff --git a/test/files/neg/t961.check b/test/files/neg/t961.check index 14d39b0f4224..faeaad754e6a 100644 --- a/test/files/neg/t961.check +++ b/test/files/neg/t961.check @@ -1,4 +1,4 @@ t961.scala:11: error: Temp.B.type does not take parameters B() match { ^ -one error found +1 error diff --git a/test/files/neg/t9617.check b/test/files/neg/t9617.check index 825578d683c5..84a286a25bb7 100644 --- a/test/files/neg/t9617.check +++ b/test/files/neg/t9617.check @@ -1,9 +1,12 @@ Test.scala:4: warning: class DeprecatedClass in package p1 is deprecated +object Test extends p1.DeprecatedClass { + ^ +Test.scala:5: warning: class DeprecatedClass in package p1 is deprecated def useC = p1.DeprecatedClass.foo ^ -Test.scala:5: warning: method foo in class DeprecatedMethod is deprecated +Test.scala:6: warning: method foo in class DeprecatedMethod is deprecated def useM = p1.DeprecatedMethod.foo ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/t9617/Test.scala b/test/files/neg/t9617/Test.scala index 6d5e5ab78acf..2d365a0ebf8f 100644 --- a/test/files/neg/t9617/Test.scala +++ b/test/files/neg/t9617/Test.scala @@ -1,6 +1,7 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Werror -Xlint:deprecation + // Joint-compilation copy of test/files/neg/t10752/Test_2.scala -object Test { +object Test extends p1.DeprecatedClass { def useC = p1.DeprecatedClass.foo def useM = p1.DeprecatedMethod.foo } diff --git a/test/files/neg/t9629.check b/test/files/neg/t9629.check index 4eafa842365d..b2e21a275039 100644 --- a/test/files/neg/t9629.check +++ b/test/files/neg/t9629.check @@ -14,4 +14,4 @@ t9629.scala:9: error: pattern must be a value: Option[Int] Note: if you intended to match against the class, try `case _: Option[_]` case x @ (y @ Option[Int]) => ^ -four errors found +4 errors diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check index 85b64b0bb581..973069f516ad 100644 --- a/test/files/neg/t963.check +++ b/test/files/neg/t963.check @@ -9,4 +9,4 @@ t963.scala:14: error: stable identifier required, but y3.x.type found. t963.scala:17: error: stable identifier required, but y4.x.type found. val w4 : y4.x.type = y4.x ^ -three errors found +3 errors diff --git a/test/files/neg/t9636.check b/test/files/neg/t9636.check index a30d401ea909..0c1ebb84e742 100644 --- a/test/files/neg/t9636.check +++ b/test/files/neg/t9636.check @@ -1,6 +1,6 @@ -t9636.scala:12: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. +t9636.scala:13: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. if (signature.sameElements(Array(0x1F, 0x8B))) { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t9636.scala b/test/files/neg/t9636.scala index 030edb6cd1a2..4c3293d20384 100644 --- a/test/files/neg/t9636.scala +++ b/test/files/neg/t9636.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Werror -Xlint +// import java.io._ import java.util.zip._ diff --git a/test/files/neg/t963b.check b/test/files/neg/t963b.check index 9918a98c464c..08ff5c94056a 100644 --- a/test/files/neg/t963b.check +++ b/test/files/neg/t963b.check @@ -3,4 +3,4 @@ t963b.scala:25: error: type mismatch; required: AnyRef{val y: A} B.f(B) ^ -one error found +1 error diff --git a/test/files/neg/t963b.scala b/test/files/neg/t963b.scala index b34aae8095ae..3442f46c4e5d 100644 --- a/test/files/neg/t963b.scala +++ b/test/files/neg/t963b.scala @@ -5,7 +5,7 @@ trait A { } object B { - def f(x : { val y : A }) { x.y.v = x.y.v } + def f(x : { val y : A }): Unit = { x.y.v = x.y.v } var a : A = _ var b : Boolean = false diff --git a/test/files/neg/t9675.check b/test/files/neg/t9675.check index b7aa7825943a..10d7634de252 100644 --- a/test/files/neg/t9675.check +++ b/test/files/neg/t9675.check @@ -1,27 +1,27 @@ -t9675.scala:5: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:6: warning: comparing values of types Test.A and String using `!=` will always yield true val func1 = (x: A) => { x != "x" } ^ -t9675.scala:7: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:8: warning: comparing values of types Test.A and String using `!=` will always yield true val func2 = (x: A) => { x != "x" }: Boolean ^ -t9675.scala:9: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:10: warning: comparing values of types Test.A and String using `!=` will always yield true val func3: Function1[A, Boolean] = (x) => { x != "x" } ^ -t9675.scala:12: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:13: warning: comparing values of types Test.A and String using `!=` will always yield true def apply(x: A): Boolean = { x != "x" } ^ -t9675.scala:15: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:16: warning: comparing values of types Test.A and String using `!=` will always yield true def method(x: A): Boolean = { x != "x" } ^ -t9675.scala:19: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:20: warning: comparing values of types Test.A and String using `!=` will always yield true A("x") != "x" ^ -t9675.scala:21: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:22: warning: comparing values of types Test.A and String using `!=` will always yield true val func5: Function1[A, Boolean] = (x) => { x != "x" } ^ -t9675.scala:23: warning: comparing values of types Test.A and String using `!=` will always yield true +t9675.scala:24: warning: comparing values of types Test.A and String using `!=` will always yield true List(A("x")).foreach((item: A) => item != "x") ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found +error: No warnings can be incurred under -Werror. +8 warnings +1 error diff --git a/test/files/neg/t9675.scala b/test/files/neg/t9675.scala index e981166cb978..66319c2886b6 100644 --- a/test/files/neg/t9675.scala +++ b/test/files/neg/t9675.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { case class A(x: String) @@ -15,7 +16,7 @@ object Test { def method(x: A): Boolean = { x != "x" } case class PersonInfo(rankPayEtc: Unit) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { A("x") != "x" val func5: Function1[A, Boolean] = (x) => { x != "x" } diff --git a/test/files/neg/t9684.check b/test/files/neg/t9684.check deleted file mode 100644 index 0947d513a6ae..000000000000 --- a/test/files/neg/t9684.check +++ /dev/null @@ -1,9 +0,0 @@ -t9684.scala:7: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters - null.asInstanceOf[java.util.List[Int]] : Buffer[Int] - ^ -t9684.scala:9: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters - null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int] - ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found diff --git a/test/files/neg/t9684.scala b/test/files/neg/t9684.scala deleted file mode 100644 index a511d8fee75d..000000000000 --- a/test/files/neg/t9684.scala +++ /dev/null @@ -1,10 +0,0 @@ -// scalac: -deprecation -Xfatal-warnings - -import scala.collection.JavaConversions._ -import scala.collection.mutable.Buffer - -trait Test { - null.asInstanceOf[java.util.List[Int]] : Buffer[Int] - - null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int] -} diff --git a/test/files/neg/t9684b.check b/test/files/neg/t9684b.check deleted file mode 100644 index 5f328abd4389..000000000000 --- a/test/files/neg/t9684b.check +++ /dev/null @@ -1,7 +0,0 @@ -t9684b.scala:6: error: reference to asScalaIterator is ambiguous; -it is imported twice in the same scope by -import scala.collection.JavaConversions._ -and import scala.collection.JavaConverters._ - asScalaIterator(null) // fails: asScalaIterator is imported twice. - ^ -one error found diff --git a/test/files/neg/t9684b.scala b/test/files/neg/t9684b.scala deleted file mode 100644 index 010e9d1b5dc3..000000000000 --- a/test/files/neg/t9684b.scala +++ /dev/null @@ -1,14 +0,0 @@ -trait T1 { - import scala.collection.JavaConverters._ - import scala.collection.JavaConversions._ - - null.asInstanceOf[java.util.Iterator[String]]: Iterator[String] // works - asScalaIterator(null) // fails: asScalaIterator is imported twice. -} - -trait T2 { - import scala.collection.JavaConversions.asScalaIterator - - null.asInstanceOf[java.util.Iterator[String]]: Iterator[String] // works - asScalaIterator(null) // works -} diff --git a/test/files/neg/t9717.check b/test/files/neg/t9717.check new file mode 100644 index 000000000000..29ea674e98a2 --- /dev/null +++ b/test/files/neg/t9717.check @@ -0,0 +1,16 @@ +t9717.scala:2: error: ambiguous implicit values: + both value F of type Int + and value v of type Int + match expected type Int +class B(implicit F: Int) extends A({ implicit val v: Int = 1; implicitly[Int] }) // ambiguous + ^ +t9717.scala:6: error: could not find implicit value for parameter e: Int + def this() = this(implicitly[Int]) // neg + ^ +t9717.scala:7: error: not found: value f + def this(s: String) = this(f) // neg (`this` is not in scope!) + ^ +t9717.scala:12: error: could not find implicit value for parameter e: Int + def this() = { this(implicitly[Int]) } // not in scope (spec 5.3.1, scope which is in effect at the point of the enclosing class definition) + ^ +4 errors diff --git a/test/files/neg/t9717.scala b/test/files/neg/t9717.scala new file mode 100644 index 000000000000..848ed257251f --- /dev/null +++ b/test/files/neg/t9717.scala @@ -0,0 +1,16 @@ +class A(val a: Int)(implicit val F: Int) +class B(implicit F: Int) extends A({ implicit val v: Int = 1; implicitly[Int] }) // ambiguous + +class C(x: Int) { + implicit def f: Int = 1 + def this() = this(implicitly[Int]) // neg + def this(s: String) = this(f) // neg (`this` is not in scope!) +} + +class D(x: Int) { + import D.f + def this() = { this(implicitly[Int]) } // not in scope (spec 5.3.1, scope which is in effect at the point of the enclosing class definition) +} +object D { + implicit def f: Int = 1 +} diff --git a/test/files/neg/t9745.check b/test/files/neg/t9745.check index 687cc98d2707..04e6834d0238 100644 --- a/test/files/neg/t9745.check +++ b/test/files/neg/t9745.check @@ -1,4 +1,4 @@ -t9745.scala:2: error: missing parameter type for expanded function ((x$1: ) => Seq({ +t9745.scala:2: error: missing parameter type for expanded function (() => Seq({ .<$plus$eq: error>(1); 42 }).apply(x$1)) @@ -16,4 +16,4 @@ t9745.scala:19: error: missing parameter type t9745.scala:19: error: missing parameter type val g = (x, y) => f(42)(x, y) ^ -5 errors found +5 errors diff --git a/test/files/neg/t9745.scala b/test/files/neg/t9745.scala index 5f0cfc4462f9..fd193e46b3aa 100644 --- a/test/files/neg/t9745.scala +++ b/test/files/neg/t9745.scala @@ -17,4 +17,4 @@ class E { class Convo { def f(i: Int)(z: Any): Int = ??? val g = (x, y) => f(42)(x, y) -} \ No newline at end of file +} diff --git a/test/files/neg/t9781.check b/test/files/neg/t9781.check index 422c51013a74..3abb3115eaf8 100644 --- a/test/files/neg/t9781.check +++ b/test/files/neg/t9781.check @@ -1,4 +1,4 @@ t9781.scala:3: error: not found: value undefinedSymbol c(undefinedSymbol) += 1 ^ -one error found +1 error diff --git a/test/files/neg/t9834.check b/test/files/neg/t9834.check index 4ca451596bf1..26bc7bb6e637 100644 --- a/test/files/neg/t9834.check +++ b/test/files/neg/t9834.check @@ -1,4 +1,4 @@ -t9834.scala:6: error: value += is not a member of Int +t9834.scala:7: error: value += is not a member of Int Expression does not convert to assignment because: type mismatch; found : String @@ -6,4 +6,4 @@ t9834.scala:6: error: value += is not a member of Int expansion: x.update(x.apply().+("42")) x() += "42" ^ -one error found +1 error diff --git a/test/files/neg/t9834.scala b/test/files/neg/t9834.scala index 4d93d449127f..43a605591655 100644 --- a/test/files/neg/t9834.scala +++ b/test/files/neg/t9834.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// object x { def apply() = 42 ; def update(i: Int) = () } diff --git a/test/files/neg/t9847.check b/test/files/neg/t9847.check index c37317ece9ab..d10c0e852288 100644 --- a/test/files/neg/t9847.check +++ b/test/files/neg/t9847.check @@ -1,45 +1,30 @@ -t9847.scala:5: warning: discarded non-Unit value +t9847.scala:6: warning: discarded non-Unit value of type Int(42) def f(): Unit = 42 ^ -t9847.scala:6: warning: discarded non-Unit value - def g = (42: Unit) - ^ -t9847.scala:13: warning: discarded non-Unit value - + 1 - ^ -t9847.scala:17: warning: discarded non-Unit value - x + 1 - ^ -t9847.scala:20: warning: discarded non-Unit value - def j(): Unit = x + 1 - ^ -t9847.scala:5: warning: a pure expression does nothing in statement position - def f(): Unit = 42 - ^ -t9847.scala:6: warning: a pure expression does nothing in statement position - def g = (42: Unit) - ^ -t9847.scala:8: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +t9847.scala:9: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 1 ^ -t9847.scala:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +t9847.scala:13: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 1 ^ -t9847.scala:13: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected - + 1 - ^ -t9847.scala:13: warning: a pure expression does nothing in statement position +t9847.scala:14: warning: discarded non-Unit value of type Int(1) + 1 ^ -t9847.scala:22: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +t9847.scala:18: warning: discarded non-Unit value of type Int + x + 1 + ^ +t9847.scala:21: warning: discarded non-Unit value of type Int + def j(): Unit = x + 1 + ^ +t9847.scala:23: warning: a pure expression does nothing in statement position class C { 42 } ^ -t9847.scala:23: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +t9847.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses class D { 42 ; 17 } ^ -t9847.scala:23: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +t9847.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses class D { 42 ; 17 } ^ -error: No warnings can be incurred under -Xfatal-warnings. -14 warnings found -one error found +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/t9847.scala b/test/files/neg/t9847.scala index 714dbf386814..9e6abbffb167 100644 --- a/test/files/neg/t9847.scala +++ b/test/files/neg/t9847.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-value-discard +//> using options -Werror -Xlint:deprecation -Ywarn-value-discard +// trait T { diff --git a/test/files/neg/t9849.check b/test/files/neg/t9849.check index 7b4715084648..3adc2fdc1983 100644 --- a/test/files/neg/t9849.check +++ b/test/files/neg/t9849.check @@ -1,7 +1,7 @@ -t9849.scala:14: error: method h in object O cannot be accessed in object p.O +t9849.scala:14: error: method h in object O cannot be accessed as a member of object p.O from object Test in package p O.h() ^ -t9849.scala:15: error: method h$default$1 in object O cannot be accessed in object p.O +t9849.scala:15: error: method h$default$1 in object O cannot be accessed as a member of object p.O from object Test in package p O.h$default$1 ^ -two errors found +2 errors diff --git a/test/files/neg/t987.check b/test/files/neg/t987.check index 90ab70ba1ca0..9efcdf42aa9c 100644 --- a/test/files/neg/t987.check +++ b/test/files/neg/t987.check @@ -16,4 +16,4 @@ abstract class D extends C with B[D] {} t987.scala:25: error: type arguments [D] do not conform to trait B's type parameter bounds [T <: B[T]] abstract class D extends C with B[D] {} ^ -four errors found +4 errors diff --git a/test/files/neg/t9911.check b/test/files/neg/t9911.check new file mode 100644 index 000000000000..e16451a9cfa3 --- /dev/null +++ b/test/files/neg/t9911.check @@ -0,0 +1,4 @@ +t9911.scala:23: error: super may not be used on value source; super can only be used to select a member that is a method or type + super.source.getSomething + ^ +1 error diff --git a/test/files/neg/t9911.scala b/test/files/neg/t9911.scala new file mode 100644 index 000000000000..1da80358f4cf --- /dev/null +++ b/test/files/neg/t9911.scala @@ -0,0 +1,28 @@ +// This should say: +// Error: super may not be used on value source +class ScalacBug { + + class SomeClass { + + type U + + // Changing T or U stops the problem + def getSomething[T]: U = ??? + } + + trait Base { + + // Changing this to a def like it should be stops the problem + val source: SomeClass = ??? + } + + class Bug extends Base { + + override val source = { + // Not calling the function stops the problem + super.source.getSomething + ??? + } + } + +} diff --git a/test/files/neg/t9912.check b/test/files/neg/t9912.check new file mode 100644 index 000000000000..c90180cb681d --- /dev/null +++ b/test/files/neg/t9912.check @@ -0,0 +1,7 @@ +t9912.scala:7: error: bridge generated for member method compareTo: (b: B): Int in class B +which overrides method compareTo: (x$1: T): Int in trait Comparable +clashes with definition of method compareTo: (o: Any): Int in class A; +both have erased type (x$1: Object): Int + def compareTo(b: B): Int = 0 + ^ +1 error diff --git a/test/files/neg/t9912.scala b/test/files/neg/t9912.scala new file mode 100644 index 000000000000..e00c39f9d4f8 --- /dev/null +++ b/test/files/neg/t9912.scala @@ -0,0 +1,20 @@ +// skalac: -Vdebug -Vlog:_ -Vprint:erasure + +class A { + def compareTo(o: Any): Int = 0 +} +class B extends A with Comparable[B] { + def compareTo(b: B): Int = 0 +} +object C { + def main(args: Array[String]): Unit = { + println(new B().compareTo(new Object())) + } +} + +/* +java.lang.ClassCastException: class java.lang.Object cannot be cast to class B (java.lang.Object is in module java.base of loader 'bootstrap'; B is in unnamed module of loader java.net.URLClassLoader @3af17be2) + at B.compareTo(t9912.scala:5) + at Main$.main(t9912.scala:10) + at Main.main(t9912.scala) +*/ diff --git a/test/files/neg/t9912b.check b/test/files/neg/t9912b.check new file mode 100644 index 000000000000..c653c6e55655 --- /dev/null +++ b/test/files/neg/t9912b.check @@ -0,0 +1,7 @@ +t9912.scala:6: error: name clash between defined and inherited member: +def compareTo(o: java.util.List[_]): Int in class Comparer and +def compareTo(other: java.util.List[A]): Int at line 6 +have same type after erasure: (o: java.util.List): Int + def compareTo(other: java.util.List[A]): Int = 0 + ^ +1 error diff --git a/test/files/neg/t9912b/Comparer.java b/test/files/neg/t9912b/Comparer.java new file mode 100644 index 000000000000..64056687d56c --- /dev/null +++ b/test/files/neg/t9912b/Comparer.java @@ -0,0 +1,7 @@ + + +public class Comparer { + public int compareTo(java.util.List o) { + return 0; + } +} diff --git a/test/files/neg/t9912b/t9912.scala b/test/files/neg/t9912b/t9912.scala new file mode 100644 index 000000000000..d8bfabfed833 --- /dev/null +++ b/test/files/neg/t9912b/t9912.scala @@ -0,0 +1,12 @@ + +import scala.jdk.CollectionConverters._ + +// superclass can't write other erased types so as to clash with bridge +class B[A: Ordering] extends Comparer with Comparable[java.util.List[A]] { + def compareTo(other: java.util.List[A]): Int = 0 +} +object C { + def main(args: Array[String]): Unit = println { + new B[Int]().compareTo(List(42).asJava) + } +} diff --git a/test/files/neg/t9912c.check b/test/files/neg/t9912c.check new file mode 100644 index 000000000000..762faa11bec6 --- /dev/null +++ b/test/files/neg/t9912c.check @@ -0,0 +1,7 @@ +t9912c.scala:8: error: bridge generated for member method compareTo: (b: B): Int in class C3 +which overrides method compareTo: (t: T): Int in trait C2 +clashes with definition of method compareTo: (a: A): Int in class C1; +both have erased type (t: A): Int +class C3 extends C1 with C2[B] { def compareTo(b: B): Int = 1 } + ^ +1 error diff --git a/test/files/neg/t9912c.scala b/test/files/neg/t9912c.scala new file mode 100644 index 000000000000..93d3a264c4e7 --- /dev/null +++ b/test/files/neg/t9912c.scala @@ -0,0 +1,14 @@ +class A +class B extends A + +class C1 { def compareTo(a: A): Int = 0 } + +trait C2[T <: A] { def compareTo(t: T): Int } + +class C3 extends C1 with C2[B] { def compareTo(b: B): Int = 1 } + +object Test extends App { + println { + (new C3).compareTo(new A) + } +} diff --git a/test/files/neg/t9953.check b/test/files/neg/t9953.check index 9514c5aa61d8..929c70c5f3b2 100644 --- a/test/files/neg/t9953.check +++ b/test/files/neg/t9953.check @@ -1,6 +1,6 @@ -t9953.scala:11: warning: Object and X are unrelated: they will never compare equal +t9953.scala:12: warning: Object and X are unrelated: they will never compare equal def b = y == x // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t9953.scala b/test/files/neg/t9953.scala index c882f4dc673c..a106a03920c7 100644 --- a/test/files/neg/t9953.scala +++ b/test/files/neg/t9953.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class X(val v: Int) extends AnyVal trait T extends Any diff --git a/test/files/neg/t9960.check b/test/files/neg/t9960.check new file mode 100644 index 000000000000..085665971bc4 --- /dev/null +++ b/test/files/neg/t9960.check @@ -0,0 +1,4 @@ +t9960.scala:27: error: could not find implicit value for parameter m: NNN.Aux[NNN.Reader,NNN.FxAppend[NNN.Fx1[NNN.Task],NNN.Fx2[NNN.Validate,NNN.Reader]],NNN.Fx2[NNN.Task,NNN.Validate]] + val hhhh: Eff[Fx2[Task, Validate], Unit] = runReader(gggg) + ^ +1 error diff --git a/test/files/neg/t9960.scala b/test/files/neg/t9960.scala new file mode 100644 index 000000000000..df54380acecf --- /dev/null +++ b/test/files/neg/t9960.scala @@ -0,0 +1,30 @@ +import scala.language.higherKinds + +object NNN { + class Validate[A] + class Task[A] + class Fx2[M[_], N[_]] + class Fx1[M[_]] + class Eff[R, A] + class Reader[A] + final case class FxAppend[L, R](left: L, right: R) + trait Member[T[_], R]{ + type Out + } + + type Aux[T[_], R, U] = Member[T, R] { type Out = U } + + implicit def Member3L[T[_], L[_], R[_]]: Aux[T, FxAppend[Fx1[T], Fx2[L, R]], Fx2[L, R]] = + new Member[T, FxAppend[Fx1[T], Fx2[L, R]]] { outer => + type Out = Fx2[L, R] + } + + object TheTest { + def runReader[R, U, B](r: Eff[R, B])(implicit m: Aux[Reader, R, U]): Eff[U, B] = ??? + + def helper(): Unit = { + val gggg: Eff[FxAppend[Fx1[Task], Fx2[Validate, Reader]], Unit] = ??? + val hhhh: Eff[Fx2[Task, Validate], Unit] = runReader(gggg) + } + } +} diff --git a/test/files/neg/t9963.check b/test/files/neg/t9963.check index 38f0f7dcd04a..22f9d506ee40 100644 --- a/test/files/neg/t9963.check +++ b/test/files/neg/t9963.check @@ -1,4 +1,4 @@ t9963.scala:14: error: value withFilter is not a member of t9963.MySet[A] j: A <- new MySet[A]() // must have a typecheck patmat here to trigger this bug ^ -one error found +1 error diff --git a/test/files/neg/t9963.scala b/test/files/neg/t9963.scala index 8358aa1d2738..df0524d3bff5 100644 --- a/test/files/neg/t9963.scala +++ b/test/files/neg/t9963.scala @@ -5,7 +5,7 @@ object t9963 { } class MySet[A] { - def map[B: Equiv](f: A => B): MySet[B] = ??? // must have an implicit typeclass here to trigger this bug + def map[B: Equiv](f: A => B): MySet[B] = ??? // must have an implicit type class here to trigger this bug def filter(f: A => Boolean): MySet[A] = ??? } diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check index b11879222929..38c2b75fd09d 100644 --- a/test/files/neg/t997.check +++ b/test/files/neg/t997.check @@ -1,4 +1,4 @@ t997.scala:13: error: too many patterns for object Foo offering (String, String): expected 2, found 3 "x" match { case Foo(a, b, c) => Console.println((a,b,c)) } ^ -one error found +1 error diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check index 1daad6922edc..362b046c5416 100644 --- a/test/files/neg/tailrec-2.check +++ b/test/files/neg/tailrec-2.check @@ -4,4 +4,4 @@ tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it con tailrec-2.scala:9: error: @tailrec annotated method contains no recursive calls @annotation.tailrec final def f1[B >: A](mem: List[B]): List[B] = this.g(mem) ^ -two errors found +2 errors diff --git a/test/files/neg/tailrec-2.scala b/test/files/neg/tailrec-2.scala index 9eb3af2f07c5..d6b8b1355b01 100644 --- a/test/files/neg/tailrec-2.scala +++ b/test/files/neg/tailrec-2.scala @@ -26,4 +26,4 @@ object Other { object Bop { def m1[A] : Super[A] = sys.error("") def m2[A] : Bop2[A] = sys.error("") -} \ No newline at end of file +} diff --git a/test/files/neg/tailrec-3.check b/test/files/neg/tailrec-3.check index a3542fb56441..8f3e805fc3fe 100644 --- a/test/files/neg/tailrec-3.check +++ b/test/files/neg/tailrec-3.check @@ -7,4 +7,4 @@ tailrec-3.scala:6: error: could not optimize @tailrec annotated method quux2: it tailrec-3.scala:10: error: could not optimize @tailrec annotated method quux3: it contains a recursive call not in tail position case x :: xs if quux3(List("abc")) => quux3(xs) ^ -three errors found +3 errors diff --git a/test/files/neg/tailrec-4.check b/test/files/neg/tailrec-4.check index 3ec32744780e..0b2df62c6c80 100644 --- a/test/files/neg/tailrec-4.check +++ b/test/files/neg/tailrec-4.check @@ -13,4 +13,4 @@ tailrec-4.scala:23: error: could not optimize @tailrec annotated method foo: it tailrec-4.scala:31: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position @tailrec def foo: Int = foo + 1 ^ -5 errors found +5 errors diff --git a/test/files/neg/tailrec-4.scala b/test/files/neg/tailrec-4.scala index 4822799dfa07..90dad3c86542 100644 --- a/test/files/neg/tailrec-4.scala +++ b/test/files/neg/tailrec-4.scala @@ -1,7 +1,7 @@ import annotation._ object Tail { - def tcInFunc: Unit = { + def tcInFunc: () => Unit = { () => { @tailrec def foo: Int = foo + 1 } diff --git a/test/files/neg/tailrec.check b/test/files/neg/tailrec.check index 79073a2c881d..f48b6de36c52 100644 --- a/test/files/neg/tailrec.check +++ b/test/files/neg/tailrec.check @@ -13,4 +13,4 @@ tailrec.scala:59: error: could not optimize @tailrec annotated method fail3: it tailrec.scala:63: error: could not optimize @tailrec annotated method fail4: it changes type of 'this' on a polymorphic recursive call @tailrec final def fail4[U](other: Tom[U], x: Int): Int = other.fail4[U](other, x - 1) ^ -5 errors found +5 errors diff --git a/test/files/neg/tailrec.scala b/test/files/neg/tailrec.scala index 176459aea80d..dcc0b44854bf 100644 --- a/test/files/neg/tailrec.scala +++ b/test/files/neg/tailrec.scala @@ -10,7 +10,7 @@ object Winners { @tailrec def loopsucc1(x: Int): Int = loopsucc1(x - 1) @tailrec def loopsucc2[T](x: Int): Int = loopsucc2[T](x - 1) - def ding() { + def ding(): Unit = { object dong { @tailrec def loopsucc3(x: Int): Int = loopsucc3(x) } diff --git a/test/files/neg/tcpoly_bounds.check b/test/files/neg/tcpoly_bounds.check index 8b65c8d7fe63..4605e9161a46 100644 --- a/test/files/neg/tcpoly_bounds.check +++ b/test/files/neg/tcpoly_bounds.check @@ -1,4 +1,4 @@ tcpoly_bounds.scala:3: error: type arguments [List] do not conform to class A's type parameter bounds [m[x] <: Option[x]] object b extends A[List] ^ -one error found +1 error diff --git a/test/files/neg/tcpoly_infer_ticket1162.check b/test/files/neg/tcpoly_infer_ticket1162.check index 67b79e7f3c5d..62d24852ffc4 100644 --- a/test/files/neg/tcpoly_infer_ticket1162.check +++ b/test/files/neg/tcpoly_infer_ticket1162.check @@ -1,4 +1,4 @@ -tcpoly_infer_ticket1162.scala:6: error: wrong number of type parameters for method apply: [A, B, F[_]]()Test.Lift[A,B,F] in object Lift +tcpoly_infer_ticket1162.scala:6: error: wrong number of type parameters for method apply: [A, B, F[_]](): Test.Lift[A,B,F] in object Lift def simplify[A,B]: Expression[A,B] = Lift[A,B]() ^ -one error found +1 error diff --git a/test/files/neg/tcpoly_override.check b/test/files/neg/tcpoly_override.check index dbc3ff946157..eb0fb8d8082f 100644 --- a/test/files/neg/tcpoly_override.check +++ b/test/files/neg/tcpoly_override.check @@ -1,6 +1,6 @@ tcpoly_override.scala:9: error: The kind of type T does not conform to the expected kind of type T[_] in trait A. C.this.T's type parameters do not match type T's expected parameters: -type T (in class C) has no type parameters, but type T (in trait A) has one +type T (in class C) has no type parameters, but type T (in trait A) has 1 type T = B // This compiles well (@M: ... but it shouldn't) ^ -one error found +1 error diff --git a/test/files/neg/tcpoly_ticket2101.check b/test/files/neg/tcpoly_ticket2101.check index ad0fd8bda26a..6bb21f526b6c 100644 --- a/test/files/neg/tcpoly_ticket2101.check +++ b/test/files/neg/tcpoly_ticket2101.check @@ -1,4 +1,4 @@ tcpoly_ticket2101.scala:2: error: type arguments [T2,X] do not conform to class T's type parameter bounds [A[Y] <: T[A,B],B] class T2[X] extends T[T2, X] // ill-typed ^ -one error found +1 error diff --git a/test/files/neg/tcpoly_typealias.check b/test/files/neg/tcpoly_typealias.check index 4beac0e44071..b995d49b2810 100644 --- a/test/files/neg/tcpoly_typealias.check +++ b/test/files/neg/tcpoly_typealias.check @@ -13,4 +13,4 @@ BBound.this.m's type parameters do not match type m's expected parameters: type x (in trait BBound)'s bounds <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any type m[+x <: String] = FooBound[x] // error: x with stricter bound ^ -three errors found +3 errors diff --git a/test/files/neg/tcpoly_variance.check b/test/files/neg/tcpoly_variance.check index c0dfcac2dd7c..30a13b799481 100644 --- a/test/files/neg/tcpoly_variance.check +++ b/test/files/neg/tcpoly_variance.check @@ -1,5 +1,7 @@ -tcpoly_variance.scala:6: error: overriding method str in class A of type => m[Object]; - method str has incompatible type +tcpoly_variance.scala:6: error: incompatible type in overriding +def str: m[Object] (defined in class A); + found : m[String] + required: m[Object] override def str: m[String] = sys.error("foo") // since x in m[x] is invariant, ! m[String] <: m[Object] ^ -one error found +1 error diff --git a/test/files/neg/tcpoly_variance_enforce.check b/test/files/neg/tcpoly_variance_enforce.check index 3299cc343518..3a496d084b6d 100644 --- a/test/files/neg/tcpoly_variance_enforce.check +++ b/test/files/neg/tcpoly_variance_enforce.check @@ -54,4 +54,4 @@ FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant def y: coll[FooContra] = sys.error("foo") // error ^ -11 errors found +11 errors diff --git a/test/files/neg/text-blocks.check b/test/files/neg/text-blocks.check new file mode 100644 index 000000000000..8a9af6292a04 --- /dev/null +++ b/test/files/neg/text-blocks.check @@ -0,0 +1,13 @@ +text-blocks/Invalid1.java:4: error: illegal text block open delimiter sequence, missing line terminator + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:4: error: expected + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:6: error: illegal text block open delimiter sequence, missing line terminator + """; + ^ +text-blocks/Invalid2.java:6: error: unclosed string literal + foo""""; + ^ +4 errors diff --git a/test/files/neg/text-blocks/Invalid1.java b/test/files/neg/text-blocks/Invalid1.java new file mode 100644 index 000000000000..f49a4d1fd416 --- /dev/null +++ b/test/files/neg/text-blocks/Invalid1.java @@ -0,0 +1,7 @@ +//> using jvm 15+ +class Invalid1 { + + public static final String badOpeningDelimiter = """non-whitespace + foo + """; +} diff --git a/test/files/neg/text-blocks/Invalid2.java b/test/files/neg/text-blocks/Invalid2.java new file mode 100644 index 000000000000..3aafd9dcaadb --- /dev/null +++ b/test/files/neg/text-blocks/Invalid2.java @@ -0,0 +1,7 @@ +//> using jvm 15+ +class Invalid2 { + + // Closing delimiter is first three eligible `"""`, not last + public static final String closingDelimiterIsNotScalas = """ + foo""""; +} diff --git a/test/files/neg/ticket513.check b/test/files/neg/ticket513.check index 8994269262f0..1c70efa6b7ef 100644 --- a/test/files/neg/ticket513.check +++ b/test/files/neg/ticket513.check @@ -1,4 +1,4 @@ ticket513.scala:6: error: type arguments [NotThatBound] do not conform to trait T's type parameter bounds [A <: Bound] object Wrong extends Wrap[T[NotThatBound]] ^ -one error found +1 error diff --git a/test/files/neg/too-large.check b/test/files/neg/too-large.check new file mode 100644 index 000000000000..abcefd8bde22 --- /dev/null +++ b/test/files/neg/too-large.check @@ -0,0 +1,4 @@ +error: Error while emitting C +UTF8 string too large +error: Method crash in class C has a bad signature of length 124243 +2 errors diff --git a/test/files/neg/too-large.scala b/test/files/neg/too-large.scala new file mode 100644 index 000000000000..037ca2400933 --- /dev/null +++ b/test/files/neg/too-large.scala @@ -0,0 +1,12 @@ +//> abusing options -Vdebug -Xverify +class C { + type Level1 = Tuple4[Unit, Unit, Unit, Unit] + type Level2 = Tuple4[Level1, Level1, Level1, Level1] + type Level3 = Tuple4[Level2, Level2, Level2, Level2] + type Level4 = Tuple4[Level3, Level3, Level3, Level3] + type Level5 = Tuple4[Level4, Level4, Level4, Level4] + type Level6 = Tuple4[Level5, Level5, Level5, Level5] + type Level7 = Tuple4[Level6, Level6, Level6, Level6] + + def crash(x: Level6): Unit = ??? +} diff --git a/test/files/neg/tostring-interpolated.check b/test/files/neg/tostring-interpolated.check new file mode 100644 index 000000000000..13d0527e6745 --- /dev/null +++ b/test/files/neg/tostring-interpolated.check @@ -0,0 +1,41 @@ +tostring-interpolated.scala:7: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.f + def f = f"$c" // warn + ^ +tostring-interpolated.scala:8: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.s + def s = s"$c" // warn + ^ +tostring-interpolated.scala:9: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.r + def r = raw"$c" // warn + ^ +tostring-interpolated.scala:11: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.format + def format = f"${c.x}%d in $c or $c%s" // warn using c.toString // warn + ^ +tostring-interpolated.scala:11: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.format + def format = f"${c.x}%d in $c or $c%s" // warn using c.toString // warn + ^ +tostring-interpolated.scala:13: warning: Boolean format is null test for non-Boolean + def bool = f"$c%b" // warn just a null check + ^ +tostring-interpolated.scala:15: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.oops + def oops = s"${null} slipped thru my fingers" // warn + ^ +tostring-interpolated.scala:20: error: interpolation uses toString +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=T.greeting + def greeting = s"$sb, world" // warn + ^ +tostring-interpolated.scala:31: error: interpolated Unit value +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=Mitigations.unitized + def unitized = s"unfortunately $shown" // warn accidental unit value + ^ +tostring-interpolated.scala:32: error: interpolated Unit value +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=w-flag-tostring-interpolated, site=Mitigations.funitized + def funitized = f"unfortunately $shown" // warn accidental unit value + ^ +1 warning +9 errors diff --git a/test/files/neg/tostring-interpolated.scala b/test/files/neg/tostring-interpolated.scala new file mode 100644 index 000000000000..959720e4aacb --- /dev/null +++ b/test/files/neg/tostring-interpolated.scala @@ -0,0 +1,36 @@ +//> using options -Wconf:cat=w-flag-tostring-interpolated:e -Wtostring-interpolated + +case class C(x: Int) + +trait T { + def c = C(42) + def f = f"$c" // warn + def s = s"$c" // warn + def r = raw"$c" // warn + + def format = f"${c.x}%d in $c or $c%s" // warn using c.toString // warn + + def bool = f"$c%b" // warn just a null check + + def oops = s"${null} slipped thru my fingers" // warn + + def ok = s"${c.toString}" + + def sb = new StringBuilder().append("hello") + def greeting = s"$sb, world" // warn +} + +class Mitigations { + + val s = "hello, world" + val i = 42 + def shown() = println("shown") + + def ok = s"$s is ok" + def jersey = s"number $i" + def unitized = s"unfortunately $shown" // warn accidental unit value + def funitized = f"unfortunately $shown" // warn accidental unit value + + def nopct = f"$s is ok" + def nofmt = f"number $i" +} diff --git a/test/files/neg/trailing-commas.check b/test/files/neg/trailing-commas.check index e2677dc3f550..a371d51fe2fb 100644 --- a/test/files/neg/trailing-commas.check +++ b/test/files/neg/trailing-commas.check @@ -61,15 +61,9 @@ trait TypeArgs { def f: C[Int, String, ] } trailing-commas.scala:23: error: identifier expected but ']' found. trait TypeParamClause { type C[A, B, ] } ^ -trailing-commas.scala:23: error: ']' expected but '}' found. -trait TypeParamClause { type C[A, B, ] } - ^ trailing-commas.scala:24: error: identifier expected but ']' found. trait FunTypeParamClause { def f[A, B, ] } ^ -trailing-commas.scala:24: error: ']' expected but '}' found. -trait FunTypeParamClause { def f[A, B, ] } - ^ trailing-commas.scala:26: error: identifier expected but ')' found. trait SimpleType { def f: (Int, String, ) } ^ @@ -127,4 +121,4 @@ trait SimpleType2 { def f: (Int, ) } trailing-commas.scala:48: error: ')' expected but '}' found. trait SimpleType2 { def f: (Int, ) } ^ -43 errors found +41 errors diff --git a/test/files/neg/trait-defaults-super.check b/test/files/neg/trait-defaults-super.check index 2b19402828b5..7ddf00a86cb6 100644 --- a/test/files/neg/trait-defaults-super.check +++ b/test/files/neg/trait-defaults-super.check @@ -1,4 +1,4 @@ trait-defaults-super.scala:14: error: Unable to implement a super accessor required by trait T unless Iterable[String] is directly extended by class C. class C extends T ^ -one error found +1 error diff --git a/test/files/neg/trait-defaults-super.scala b/test/files/neg/trait-defaults-super.scala index c0febb43cd86..5ccc6aaac428 100644 --- a/test/files/neg/trait-defaults-super.scala +++ b/test/files/neg/trait-defaults-super.scala @@ -9,6 +9,6 @@ trait T extends java.lang.Iterable[String] { super[Iterable].spliterator super.spliterator } - def iterator(): java.util.Iterator[String] = java.util.Collections.emptyList().iterator() + def iterator: java.util.Iterator[String] = java.util.Collections.emptyList().iterator() } class C extends T diff --git a/test/files/neg/trait-no-native.check b/test/files/neg/trait-no-native.check index 12bce4042dda..cffe6fcb68b8 100644 --- a/test/files/neg/trait-no-native.check +++ b/test/files/neg/trait-no-native.check @@ -1,4 +1,4 @@ trait-no-native.scala:3: error: A trait cannot define a native method. @native def foo = ??? ^ -one error found +1 error diff --git a/test/files/neg/trait_fields_conflicts.check b/test/files/neg/trait_fields_conflicts.check index 696d0284c103..f7361122ad84 100644 --- a/test/files/neg/trait_fields_conflicts.check +++ b/test/files/neg/trait_fields_conflicts.check @@ -1,273 +1,273 @@ -trait_fields_conflicts.scala:5: error: overriding value x in trait Val of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:5: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) trait ValForVal extends Val { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:6: error: overriding value x in trait Val of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:6: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) trait VarForVal extends Val { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:7: error: overriding value x in trait Val of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:7: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) trait DefForVal extends Val { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:8: error: overriding variable x in trait Var of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:8: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) trait ValForVar extends Var { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:9: error: overriding variable x in trait Var of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:9: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) trait VarForVar extends Var { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:10: error: overriding variable x in trait Var of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:10: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) trait DefForVar extends Var { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:11: error: overriding lazy value x in trait Lazy of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:11: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) trait ValForLazy extends Lazy { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:12: error: overriding lazy value x in trait Lazy of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:12: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) trait VarForLazy extends Lazy { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:13: error: overriding lazy value x in trait Lazy of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:13: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) trait DefForLazy extends Lazy { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:16: error: overriding value x in trait Val of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:16: error: stable, immutable value required to override: +val x: Int (defined in trait Val) trait VarForValOvr extends Val { override var x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:17: error: overriding value x in trait Val of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:17: error: stable, immutable value required to override: +val x: Int (defined in trait Val) trait DefForValOvr extends Val { override def x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:18: error: overriding variable x in trait Var of type Int; - value x cannot override a mutable variable +trait_fields_conflicts.scala:18: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) trait ValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes ^ -trait_fields_conflicts.scala:19: error: overriding variable x in trait Var of type Int; - variable x cannot override a mutable variable +trait_fields_conflicts.scala:19: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) trait VarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:20: error: overriding variable x in trait Var of type Int; - method x cannot override a mutable variable +trait_fields_conflicts.scala:20: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) trait DefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:21: error: overriding lazy value x in trait Lazy of type Int; - value x must be declared lazy to override a concrete lazy value +trait_fields_conflicts.scala:21: error: value must be lazy when overriding concrete lazy value: +lazy val x: Int (defined in trait Lazy) trait ValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:22: error: overriding lazy value x in trait Lazy of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:22: error: stable, immutable value required to override: +lazy val x: Int (defined in trait Lazy) trait VarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:23: error: overriding lazy value x in trait Lazy of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:23: error: stable, immutable value required to override: +lazy val x: Int (defined in trait Lazy) trait DefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:25: error: overriding value x in trait Val of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:25: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) class CValForVal extends Val { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:26: error: overriding value x in trait Val of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:26: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) class CVarForVal extends Val { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:27: error: overriding value x in trait Val of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:27: error: `override` modifier required to override concrete member: +val x: Int (defined in trait Val) class CDefForVal extends Val { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:28: error: overriding variable x in trait Var of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:28: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) class CValForVar extends Var { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:29: error: overriding variable x in trait Var of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:29: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) class CVarForVar extends Var { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:30: error: overriding variable x in trait Var of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:30: error: `override` modifier required to override concrete member: +def x: Int (defined in trait Var) class CDefForVar extends Var { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:31: error: overriding lazy value x in trait Lazy of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:31: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) class CValForLazy extends Lazy { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:32: error: overriding lazy value x in trait Lazy of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:32: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) class CVarForLazy extends Lazy { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:33: error: overriding lazy value x in trait Lazy of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:33: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in trait Lazy) class CDefForLazy extends Lazy { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:36: error: overriding value x in trait Val of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:36: error: stable, immutable value required to override: +val x: Int (defined in trait Val) class CVarForValOvr extends Val { override var x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:37: error: overriding value x in trait Val of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:37: error: stable, immutable value required to override: +val x: Int (defined in trait Val) class CDefForValOvr extends Val { override def x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:38: error: overriding variable x in trait Var of type Int; - value x cannot override a mutable variable +trait_fields_conflicts.scala:38: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) class CValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes ^ -trait_fields_conflicts.scala:39: error: overriding variable x in trait Var of type Int; - variable x cannot override a mutable variable +trait_fields_conflicts.scala:39: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) class CVarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:40: error: overriding variable x in trait Var of type Int; - method x cannot override a mutable variable +trait_fields_conflicts.scala:40: error: mutable variable cannot be overridden: +def x: Int (defined in trait Var) class CDefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:41: error: overriding lazy value x in trait Lazy of type Int; - value x must be declared lazy to override a concrete lazy value +trait_fields_conflicts.scala:41: error: value must be lazy when overriding concrete lazy value: +lazy val x: Int (defined in trait Lazy) class CValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:42: error: overriding lazy value x in trait Lazy of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:42: error: stable, immutable value required to override: +lazy val x: Int (defined in trait Lazy) class CVarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:43: error: overriding lazy value x in trait Lazy of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:43: error: stable, immutable value required to override: +lazy val x: Int (defined in trait Lazy) class CDefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:49: error: overriding value x in class CVal of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:49: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) trait ValForCVal extends CVal { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:50: error: overriding value x in class CVal of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:50: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) trait VarForCVal extends CVal { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:51: error: overriding value x in class CVal of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:51: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) trait DefForCVal extends CVal { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:52: error: overriding variable x in class CVar of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:52: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) trait ValForCVar extends CVar { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:53: error: overriding variable x in class CVar of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:53: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) trait VarForCVar extends CVar { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:54: error: overriding variable x in class CVar of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:54: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) trait DefForCVar extends CVar { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:55: error: overriding lazy value x in class CLazy of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:55: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) trait ValForCLazy extends CLazy { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:56: error: overriding lazy value x in class CLazy of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:56: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) trait VarForCLazy extends CLazy { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:57: error: overriding lazy value x in class CLazy of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:57: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) trait DefForCLazy extends CLazy { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:60: error: overriding value x in class CVal of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:60: error: stable, immutable value required to override: +val x: Int (defined in class CVal) trait VarForCValOvr extends CVal { override var x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:61: error: overriding value x in class CVal of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:61: error: stable, immutable value required to override: +val x: Int (defined in class CVal) trait DefForCValOvr extends CVal { override def x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:62: error: overriding variable x in class CVar of type Int; - value x cannot override a mutable variable +trait_fields_conflicts.scala:62: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) trait ValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes ^ -trait_fields_conflicts.scala:63: error: overriding variable x in class CVar of type Int; - variable x cannot override a mutable variable +trait_fields_conflicts.scala:63: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) trait VarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:64: error: overriding variable x in class CVar of type Int; - method x cannot override a mutable variable +trait_fields_conflicts.scala:64: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) trait DefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:65: error: overriding lazy value x in class CLazy of type Int; - value x must be declared lazy to override a concrete lazy value +trait_fields_conflicts.scala:65: error: value must be lazy when overriding concrete lazy value: +lazy val x: Int (defined in class CLazy) trait ValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:66: error: overriding lazy value x in class CLazy of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:66: error: stable, immutable value required to override: +lazy val x: Int (defined in class CLazy) trait VarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:67: error: overriding lazy value x in class CLazy of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:67: error: stable, immutable value required to override: +lazy val x: Int (defined in class CLazy) trait DefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:69: error: overriding value x in class CVal of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:69: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) class CValForCVal extends CVal { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:70: error: overriding value x in class CVal of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:70: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) class CVarForCVal extends CVal { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:71: error: overriding value x in class CVal of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:71: error: `override` modifier required to override concrete member: +val x: Int (defined in class CVal) class CDefForCVal extends CVal { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:72: error: overriding variable x in class CVar of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:72: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) class CValForCVar extends CVar { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:73: error: overriding variable x in class CVar of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:73: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) class CVarForCVar extends CVar { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:74: error: overriding variable x in class CVar of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:74: error: `override` modifier required to override concrete member: +def x: Int (defined in class CVar) class CDefForCVar extends CVar { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:75: error: overriding lazy value x in class CLazy of type Int; - value x needs `override' modifier +trait_fields_conflicts.scala:75: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) class CValForCLazy extends CLazy { val x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:76: error: overriding lazy value x in class CLazy of type Int; - variable x needs `override' modifier +trait_fields_conflicts.scala:76: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) class CVarForCLazy extends CLazy { var x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:77: error: overriding lazy value x in class CLazy of type Int; - method x needs `override' modifier +trait_fields_conflicts.scala:77: error: `override` modifier required to override concrete member: +lazy val x: Int (defined in class CLazy) class CDefForCLazy extends CLazy { def x: Int = 1 } // needs override ^ -trait_fields_conflicts.scala:80: error: overriding value x in class CVal of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:80: error: stable, immutable value required to override: +val x: Int (defined in class CVal) class CVarForCValOvr extends CVal { override var x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:81: error: overriding value x in class CVal of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:81: error: stable, immutable value required to override: +val x: Int (defined in class CVal) class CDefForCValOvr extends CVal { override def x: Int = 1 } // bad override ^ -trait_fields_conflicts.scala:82: error: overriding variable x in class CVar of type Int; - value x cannot override a mutable variable +trait_fields_conflicts.scala:82: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) class CValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes ^ -trait_fields_conflicts.scala:83: error: overriding variable x in class CVar of type Int; - variable x cannot override a mutable variable +trait_fields_conflicts.scala:83: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) class CVarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:84: error: overriding variable x in class CVar of type Int; - method x cannot override a mutable variable +trait_fields_conflicts.scala:84: error: mutable variable cannot be overridden: +def x: Int (defined in class CVar) class CDefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:85: error: overriding lazy value x in class CLazy of type Int; - value x must be declared lazy to override a concrete lazy value +trait_fields_conflicts.scala:85: error: value must be lazy when overriding concrete lazy value: +lazy val x: Int (defined in class CLazy) class CValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:86: error: overriding lazy value x in class CLazy of type Int; - variable x needs to be a stable, immutable value +trait_fields_conflicts.scala:86: error: stable, immutable value required to override: +lazy val x: Int (defined in class CLazy) class CVarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? ^ -trait_fields_conflicts.scala:87: error: overriding lazy value x in class CLazy of type Int; - method x needs to be a stable, immutable value +trait_fields_conflicts.scala:87: error: stable, immutable value required to override: +lazy val x: Int (defined in class CLazy) class CDefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? ^ -68 errors found +68 errors diff --git a/test/files/neg/trait_fields_deprecated_overriding.check b/test/files/neg/trait_fields_deprecated_overriding.check index 785fae02c235..a8604fb6753d 100644 --- a/test/files/neg/trait_fields_deprecated_overriding.check +++ b/test/files/neg/trait_fields_deprecated_overriding.check @@ -1,6 +1,6 @@ -trait_fields_deprecated_overriding.scala:9: warning: overriding value x in trait DeprecatedOverriding is deprecated +trait_fields_deprecated_overriding.scala:10: warning: overriding value x in trait DeprecatedOverriding is deprecated override val x = 2 ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/trait_fields_deprecated_overriding.scala b/test/files/neg/trait_fields_deprecated_overriding.scala index f2c57be5f97d..73711e59b685 100644 --- a/test/files/neg/trait_fields_deprecated_overriding.scala +++ b/test/files/neg/trait_fields_deprecated_overriding.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// package scala trait DeprecatedOverriding { diff --git a/test/files/neg/trait_fields_var_override.check b/test/files/neg/trait_fields_var_override.check index 7245c78b0913..5d5113ef97aa 100644 --- a/test/files/neg/trait_fields_var_override.check +++ b/test/files/neg/trait_fields_var_override.check @@ -1,5 +1,5 @@ -trait_fields_var_override.scala:2: error: overriding variable end in trait SizeChangeEvent of type Int; - variable end cannot override a mutable variable +trait_fields_var_override.scala:2: error: mutable variable cannot be overridden: +protected def end: Int (defined in trait SizeChangeEvent) class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent ^ -one error found +1 error diff --git a/test/files/neg/type-diagnostics.check b/test/files/neg/type-diagnostics.check index fd327bcb6622..7e69cdb71683 100644 --- a/test/files/neg/type-diagnostics.check +++ b/test/files/neg/type-diagnostics.check @@ -18,4 +18,4 @@ The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: ? val f = { case 5 => 10 } ^ -four errors found +4 errors diff --git a/test/files/neg/type-diagnostics.scala b/test/files/neg/type-diagnostics.scala index c4171328deee..366e6e7acfb1 100644 --- a/test/files/neg/type-diagnostics.scala +++ b/test/files/neg/type-diagnostics.scala @@ -19,4 +19,4 @@ object TParamConfusion { object PartialInfer { val f = { case 5 => 10 } -} \ No newline at end of file +} diff --git a/test/files/neg/typeerror.check b/test/files/neg/typeerror.check index f117e702f0d7..71e4b36d0029 100644 --- a/test/files/neg/typeerror.check +++ b/test/files/neg/typeerror.check @@ -8,4 +8,4 @@ typeerror.scala:6: error: type mismatch; required: scala.Long else add2(x.head, y.head) :: add(x.tail, y.tail) ^ -two errors found +2 errors diff --git a/test/files/neg/typevar_derive_alias.check b/test/files/neg/typevar_derive_alias.check new file mode 100644 index 000000000000..a289b4311257 --- /dev/null +++ b/test/files/neg/typevar_derive_alias.check @@ -0,0 +1,7 @@ +typevar_derive_alias.scala:14: error: missing parameter type + def toSetMap2(ts: Sq[El]): St[El] = ts.toSt.map(x => x) // B occurs contravariantly, so we maximize + ^ +typevar_derive_alias.scala:17: error: missing parameter type + def toSetMap3(ts: Sq[Alias]): St[Alias] = ts.toSt.map(x => x) + ^ +2 errors diff --git a/test/files/neg/typevar_derive_alias.scala b/test/files/neg/typevar_derive_alias.scala new file mode 100644 index 000000000000..cdaed43010c0 --- /dev/null +++ b/test/files/neg/typevar_derive_alias.scala @@ -0,0 +1,18 @@ +// documenting the status quo +// all toSetMapN should type check, but 2 & 3 do not right now -- all type check in dotty! +// (Before, the non-type-alias version type checked due to some gross hack -- I removed it so now they both fail) +class Test { + trait St[T] { def map[U](f: T => U): St[U] } + trait Sq[+T] { def toSt[B >: T]: St[B] } + + trait El + def toSetMap1(ts: Sq[El]): St[El] = { + val st = ts.toSt // here we infer B to be El because B occurs invariantly + st.map(x => x) + } + + def toSetMap2(ts: Sq[El]): St[El] = ts.toSt.map(x => x) // B occurs contravariantly, so we maximize + + type Alias = El + def toSetMap3(ts: Sq[Alias]): St[Alias] = ts.toSt.map(x => x) +} diff --git a/test/files/neg/unary-unquoted.check b/test/files/neg/unary-unquoted.check new file mode 100644 index 000000000000..e7a2ec23fd8e --- /dev/null +++ b/test/files/neg/unary-unquoted.check @@ -0,0 +1,4 @@ +unary-unquoted.scala:7: error: ';' expected but integer literal found. + def i = `+` 42 // error not taken as unary prefix operator + ^ +1 error diff --git a/test/files/neg/unary-unquoted.scala b/test/files/neg/unary-unquoted.scala new file mode 100644 index 000000000000..ddca7c443268 --- /dev/null +++ b/test/files/neg/unary-unquoted.scala @@ -0,0 +1,8 @@ + +object Test { + def +[T](x: T): String = "x" + +[Int](6): String // OK in scala 2 +} +class C { + def i = `+` 42 // error not taken as unary prefix operator +} diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check index 4f6edc61b9e8..c836b525d6ab 100644 --- a/test/files/neg/unchecked-abstract.check +++ b/test/files/neg/unchecked-abstract.check @@ -1,39 +1,39 @@ -unchecked-abstract.scala:17: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:18: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Contravariant[H]]) ^ -unchecked-abstract.scala:22: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:23: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Contravariant[H]]) ^ -unchecked-abstract.scala:23: warning: abstract type T in type Contravariant[M.this.T] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:24: warning: abstract type T in type Contravariant[M.this.T] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Contravariant[T]]) ^ -unchecked-abstract.scala:28: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:29: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[T]]) ^ -unchecked-abstract.scala:29: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:30: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[L]]) ^ -unchecked-abstract.scala:32: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:33: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[H]]) ^ -unchecked-abstract.scala:34: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:35: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[L]]) ^ -unchecked-abstract.scala:37: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:38: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[H]]) ^ -unchecked-abstract.scala:38: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:39: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Invariant[T]]) ^ -unchecked-abstract.scala:43: warning: abstract type T in type Covariant[M.this.T] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:44: warning: abstract type T in type Covariant[M.this.T] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Covariant[T]]) ^ -unchecked-abstract.scala:44: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:45: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Covariant[L]]) ^ -unchecked-abstract.scala:49: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure +unchecked-abstract.scala:50: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure /* warn */ println(x.isInstanceOf[Covariant[L]]) ^ -error: No warnings can be incurred under -Xfatal-warnings. -12 warnings found -one error found +error: No warnings can be incurred under -Werror. +12 warnings +1 error diff --git a/test/files/neg/unchecked-abstract.scala b/test/files/neg/unchecked-abstract.scala index 677483a2bb99..808128a310ec 100644 --- a/test/files/neg/unchecked-abstract.scala +++ b/test/files/neg/unchecked-abstract.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// trait Contravariant[-X] trait Invariant[X] trait Covariant[+X] diff --git a/test/files/neg/unchecked-impossible.check b/test/files/neg/unchecked-impossible.check index 49748452557d..3b4f57987192 100644 --- a/test/files/neg/unchecked-impossible.check +++ b/test/files/neg/unchecked-impossible.check @@ -1,6 +1,6 @@ -unchecked-impossible.scala:6: error: pattern type is incompatible with expected type; +unchecked-impossible.scala:7: error: pattern type is incompatible with expected type; found : Seq[A] required: T2[Int,Int] case Seq(x) => ^ -one error found +1 error diff --git a/test/files/neg/unchecked-impossible.scala b/test/files/neg/unchecked-impossible.scala index e6c808050ea3..8d84fb938921 100644 --- a/test/files/neg/unchecked-impossible.scala +++ b/test/files/neg/unchecked-impossible.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// final case class T2[+A, +B](a: A, b: B) class A { diff --git a/test/files/neg/unchecked-knowable.check b/test/files/neg/unchecked-knowable.check index ef5901b012d1..19a15ef7785c 100644 --- a/test/files/neg/unchecked-knowable.check +++ b/test/files/neg/unchecked-knowable.check @@ -1,9 +1,9 @@ -unchecked-knowable.scala:19: warning: fruitless type test: a value of type Bippy cannot also be a A1 +unchecked-knowable.scala:20: warning: fruitless type test: a value of type Bippy cannot also be a A1 /* warn */ (new Bippy).isInstanceOf[A1] ^ -unchecked-knowable.scala:20: warning: fruitless type test: a value of type Bippy cannot also be a B1 +unchecked-knowable.scala:21: warning: fruitless type test: a value of type Bippy cannot also be a B1 /* warn */ (new Bippy).isInstanceOf[B1] ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/unchecked-knowable.scala b/test/files/neg/unchecked-knowable.scala index 675e1ead2e47..39c967468890 100644 --- a/test/files/neg/unchecked-knowable.scala +++ b/test/files/neg/unchecked-knowable.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// /** Knowable - only final leaves */ sealed abstract class A1 sealed abstract class A2 extends A1 diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check index 58b7fadf87e3..8ef312f91ca5 100644 --- a/test/files/neg/unchecked-refinement.check +++ b/test/files/neg/unchecked-refinement.check @@ -1,16 +1,20 @@ -unchecked-refinement.scala:18: warning: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:19: warning: the type test for pattern Foo[U,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[U, U, V] if b => () ^ -unchecked-refinement.scala:20: warning: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:21: warning: the type test for pattern Foo[Any,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[Any, U, V] if b => () ^ -unchecked-refinement.scala:24: warning: a pattern match on a refinement type is unchecked - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn - ^ unchecked-refinement.scala:25: warning: a pattern match on a refinement type is unchecked + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable + ^ +unchecked-refinement.scala:26: warning: a pattern match on a refinement type is unchecked /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn ^ -warning: one feature warning; re-run with -feature for details -error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found -one error found +unchecked-refinement.scala:24: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + def f4(xs: List[Int]) = xs match { + ^ +warning: 1 feature warning; re-run with -feature for details +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/unchecked-refinement.scala b/test/files/neg/unchecked-refinement.scala index 7f87bc615a31..491fcfaf33a0 100644 --- a/test/files/neg/unchecked-refinement.scala +++ b/test/files/neg/unchecked-refinement.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // a.scala // Thu Sep 27 09:42:16 PDT 2012 @@ -21,7 +22,7 @@ class A { } def f4(xs: List[Int]) = xs match { - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn /* nowarn */ case x: ((AnyRef { def size: Int }) @unchecked) if b => x.size } diff --git a/test/files/neg/unchecked-suppress.check b/test/files/neg/unchecked-suppress.check index 1b009e86aefc..481bbacc85f7 100644 --- a/test/files/neg/unchecked-suppress.check +++ b/test/files/neg/unchecked-suppress.check @@ -1,12 +1,12 @@ -unchecked-suppress.scala:5: warning: non-variable type argument Int in type pattern scala.collection.immutable.Set[Int] (the underlying of Set[Int]) is unchecked since it is eliminated by erasure +unchecked-suppress.scala:6: warning: non-variable type argument Int in type pattern scala.collection.immutable.Set[Int] (the underlying of Set[Int]) is unchecked since it is eliminated by erasure case xs: Set[Int] => xs.head // unchecked ^ -unchecked-suppress.scala:6: warning: non-variable type argument String in type pattern scala.collection.immutable.Map[String @unchecked,String] (the underlying of Map[String @unchecked,String]) is unchecked since it is eliminated by erasure +unchecked-suppress.scala:7: warning: non-variable type argument String in type pattern scala.collection.immutable.Map[String @unchecked,String] (the underlying of Map[String @unchecked,String]) is unchecked since it is eliminated by erasure case xs: Map[String @unchecked, String] => xs.head // one unchecked, one okay ^ -unchecked-suppress.scala:8: warning: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure +unchecked-suppress.scala:9: warning: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure case f: ((Int, Int) => Int) => // unchecked ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/unchecked-suppress.scala b/test/files/neg/unchecked-suppress.scala index f10d173ad686..9302ef9e9e16 100644 --- a/test/files/neg/unchecked-suppress.scala +++ b/test/files/neg/unchecked-suppress.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A { def f(x: Any) = x match { case xs: List[String @unchecked] => xs.head // okay diff --git a/test/files/neg/unchecked.check b/test/files/neg/unchecked.check index 0519273c9720..4b92a212a6ba 100644 --- a/test/files/neg/unchecked.check +++ b/test/files/neg/unchecked.check @@ -16,6 +16,6 @@ unchecked.scala:52: warning: non-variable type argument String in type pattern T unchecked.scala:57: warning: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked ^ -error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found -one error found +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/unchecked.scala b/test/files/neg/unchecked.scala index 37b66573abe5..eee51c67bce3 100644 --- a/test/files/neg/unchecked.scala +++ b/test/files/neg/unchecked.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // import language.existentials diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check index 3638eecd7184..ba40dffd0c4d 100644 --- a/test/files/neg/unchecked2.check +++ b/test/files/neg/unchecked2.check @@ -40,6 +40,6 @@ unchecked2.scala:21: warning: non-variable type argument (String, Double) in typ unchecked2.scala:22: warning: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure /* warn */ (Some(123): Any).isInstanceOf[Option[String => Double]] ^ -error: No warnings can be incurred under -Xfatal-warnings. -14 warnings found -one error found +error: No warnings can be incurred under -Werror. +14 warnings +1 error diff --git a/test/files/neg/unchecked2.scala b/test/files/neg/unchecked2.scala index ce655b6bbba5..007b0b6a8d6e 100644 --- a/test/files/neg/unchecked2.scala +++ b/test/files/neg/unchecked2.scala @@ -1,4 +1,4 @@ -// scalac: -unchecked -Xfatal-warnings +//> using options -Xfatal-warnings // object Test { // These warn because it can be statically shown they won't match. diff --git a/test/files/neg/unchecked3.check b/test/files/neg/unchecked3.check index bff551d55898..24d834d284bb 100644 --- a/test/files/neg/unchecked3.check +++ b/test/files/neg/unchecked3.check @@ -1,42 +1,42 @@ -unchecked3.scala:25: warning: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure +unchecked3.scala:26: warning: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure /* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true } ^ -unchecked3.scala:26: warning: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure +unchecked3.scala:27: warning: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure /* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true } ^ -unchecked3.scala:29: warning: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure +unchecked3.scala:30: warning: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure /* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true } ^ -unchecked3.scala:33: warning: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure +unchecked3.scala:34: warning: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure /* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true } ^ -unchecked3.scala:41: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure +unchecked3.scala:42: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[List[String]] => () ^ -unchecked3.scala:44: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure +unchecked3.scala:45: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[Array[List[String]]] => () ^ -unchecked3.scala:51: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure +unchecked3.scala:52: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[List[String]] => () ^ -unchecked3.scala:54: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure +unchecked3.scala:55: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[Array[List[String]]] => () ^ -unchecked3.scala:61: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure +unchecked3.scala:62: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[List[String]] => () ^ -unchecked3.scala:63: warning: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure +unchecked3.scala:64: warning: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[List[Array[String]]] => () ^ -unchecked3.scala:64: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure +unchecked3.scala:65: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure /* warn */ case _: Array[Array[List[String]]] => () ^ -unchecked3.scala:76: warning: abstract type A in type pattern scala.collection.immutable.Set[Q.this.A] (the underlying of Set[Q.this.A]) is unchecked since it is eliminated by erasure +unchecked3.scala:77: warning: abstract type A in type pattern scala.collection.immutable.Set[Q.this.A] (the underlying of Set[Q.this.A]) is unchecked since it is eliminated by erasure /* warn */ case xs: Set[A] => xs.head ^ -unchecked3.scala:63: warning: unreachable code +unchecked3.scala:64: warning: unreachable code /* warn */ case _: Array[List[Array[String]]] => () ^ -error: No warnings can be incurred under -Xfatal-warnings. -13 warnings found -one error found +error: No warnings can be incurred under -Werror. +13 warnings +1 error diff --git a/test/files/neg/unchecked3.scala b/test/files/neg/unchecked3.scala index 3834952a7e2b..d077a59b8849 100644 --- a/test/files/neg/unchecked3.scala +++ b/test/files/neg/unchecked3.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait A2[T1] final class B2[T1, T2] extends A2[T1] @@ -70,12 +71,12 @@ object Matching { type A type B <: A - def f(xs: Traversable[B]) = xs match { + def f(xs: Iterable[B]) = xs match { /* nowarn */ case xs: List[A] => xs.head /* nowarn */ case xs: Seq[B] => xs.head /* warn */ case xs: Set[A] => xs.head } - def f2[T <: B](xs: Traversable[T]) = xs match { + def f2[T <: B](xs: Iterable[T]) = xs match { /* nowarn */ case xs: List[B with T] => xs.head /* nowarn */ case xs: Seq[A] => xs.head /* nowarn */ case xs: Set[T] => xs.head diff --git a/test/files/neg/unicode-arrows-deprecation.check b/test/files/neg/unicode-arrows-deprecation.check new file mode 100644 index 000000000000..e01ae3a915b8 --- /dev/null +++ b/test/files/neg/unicode-arrows-deprecation.check @@ -0,0 +1,15 @@ +unicode-arrows-deprecation.scala:4: warning: The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code. [quickfixable] + val a: Int ⇒ Int = x ⇒ x + ^ +unicode-arrows-deprecation.scala:4: warning: The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code. [quickfixable] + val a: Int ⇒ Int = x ⇒ x + ^ +unicode-arrows-deprecation.scala:6: warning: The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code. [quickfixable] + val b = for { x ← (1 to 10) } yield x + ^ +unicode-arrows-deprecation.scala:8: warning: method → in class ArrowAssoc is deprecated (since 2.13.0): Use `->` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code. + val c: (Int, Int) = 1 → 1 + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/unicode-arrows-deprecation.scala b/test/files/neg/unicode-arrows-deprecation.scala new file mode 100644 index 000000000000..7a9f05a24d68 --- /dev/null +++ b/test/files/neg/unicode-arrows-deprecation.scala @@ -0,0 +1,9 @@ +//> using options -deprecation -Xfatal-warnings +// +object Test { + val a: Int ⇒ Int = x ⇒ x + + val b = for { x ← (1 to 10) } yield x + + val c: (Int, Int) = 1 → 1 +} diff --git a/test/files/neg/unicode-unterminated-quote.check b/test/files/neg/unicode-unterminated-quote.check deleted file mode 100644 index 166488710b2a..000000000000 --- a/test/files/neg/unicode-unterminated-quote.check +++ /dev/null @@ -1,7 +0,0 @@ -unicode-unterminated-quote.scala:2: error: unclosed string literal - val x = \u0022 - ^ -unicode-unterminated-quote.scala:2: error: '}' expected but eof found. - val x = \u0022 - ^ -two errors found diff --git a/test/files/neg/unicode-unterminated-quote.scala b/test/files/neg/unicode-unterminated-quote.scala deleted file mode 100644 index bb6eab667fb6..000000000000 --- a/test/files/neg/unicode-unterminated-quote.scala +++ /dev/null @@ -1,2 +0,0 @@ -class A { - val x = \u0022 \ No newline at end of file diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check index 46bc01df8e0c..ed4f70309e0a 100644 --- a/test/files/neg/unit-returns-value.check +++ b/test/files/neg/unit-returns-value.check @@ -1,15 +1,15 @@ -unit-returns-value.scala:5: warning: enclosing method f has result type Unit: return value discarded +unit-returns-value.scala:6: warning: enclosing method f has result type Unit: return value of type Int(5) discarded if (b) return 5 ^ -unit-returns-value.scala:5: warning: a pure expression does nothing in statement position +unit-returns-value.scala:6: warning: a pure expression does nothing in statement position if (b) return 5 ^ -unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +unit-returns-value.scala:24: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses i1 // warn ^ -unit-returns-value.scala:24: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +unit-returns-value.scala:25: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses i2 // warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/unit-returns-value.scala b/test/files/neg/unit-returns-value.scala index 586135df91f7..89ce8d827b9f 100644 --- a/test/files/neg/unit-returns-value.scala +++ b/test/files/neg/unit-returns-value.scala @@ -1,12 +1,13 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { - def f { + def f: Unit = { var b = false if (b) return 5 } // no warning - def g { + def g: Unit = { return println("hello") } } diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check index 6d11461700bc..7d26c13ad0df 100644 --- a/test/files/neg/unit2anyref.check +++ b/test/files/neg/unit2anyref.check @@ -3,4 +3,4 @@ unit2anyref.scala:2: error: type mismatch; required: AnyRef val x: AnyRef = () // this should not succeed. ^ -one error found +1 error diff --git a/test/files/neg/universal-lint.check b/test/files/neg/universal-lint.check new file mode 100644 index 000000000000..f62561ef827c --- /dev/null +++ b/test/files/neg/universal-lint.check @@ -0,0 +1,9 @@ +universal-lint.scala:4: warning: missing type argument to method isInstanceOf + def f = List("").map(_.isInstanceOf) + ^ +universal-lint.scala:5: warning: missing type argument to method asInstanceOf + def g = List("").map(_.asInstanceOf) + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/universal-lint.scala b/test/files/neg/universal-lint.scala new file mode 100644 index 000000000000..c01a573cf939 --- /dev/null +++ b/test/files/neg/universal-lint.scala @@ -0,0 +1,24 @@ +//> using options -Xlint -Werror + +trait Test { + def f = List("").map(_.isInstanceOf) + def g = List("").map(_.asInstanceOf) + def ok = List("").map(_.isInstanceOf[String]) + def or = List("").map(_.asInstanceOf[String]) +} + +import java.util.Spliterator +import java.util.stream._ +import scala.collection.Stepper +sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S +} +trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } +} diff --git a/test/files/neg/unreachablechar.check b/test/files/neg/unreachablechar.check index e88919c3201b..f8c54e176eed 100644 --- a/test/files/neg/unreachablechar.check +++ b/test/files/neg/unreachablechar.check @@ -1,12 +1,12 @@ -unreachablechar.scala:5: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +unreachablechar.scala:6: warning: patterns after a variable pattern cannot match (SLS 8.1.1) case _ => println("stuff"); ^ -unreachablechar.scala:6: warning: unreachable code due to variable pattern on line 5 +unreachablechar.scala:7: warning: unreachable code due to variable pattern on line 6 case 'f' => println("not stuff?"); ^ -unreachablechar.scala:6: warning: unreachable code +unreachablechar.scala:7: warning: unreachable code case 'f' => println("not stuff?"); ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/unreachablechar.scala b/test/files/neg/unreachablechar.scala index 1290b4b33167..b17fe0505efc 100644 --- a/test/files/neg/unreachablechar.scala +++ b/test/files/neg/unreachablechar.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Foo extends App{ 'f' match { case 'o'|'c'|'b' => println("Oooo"); diff --git a/test/files/neg/unsafe.check b/test/files/neg/unsafe.check index 49285199b2a4..7de2e58aeb82 100644 --- a/test/files/neg/unsafe.check +++ b/test/files/neg/unsafe.check @@ -1,4 +1,4 @@ unsafe.scala:9: error: value threadId is not a member of Thread def f(t: Thread) = t.threadId ^ -one error found +1 error diff --git a/test/files/neg/unsafe.scala b/test/files/neg/unsafe.scala index f026db2598e2..372280dc868b 100644 --- a/test/files/neg/unsafe.scala +++ b/test/files/neg/unsafe.scala @@ -1,6 +1,6 @@ -// scalac: --release:8 -Yrelease:java.lang -// javaVersion: 19+ +//> using options --release:8 -Yrelease:java.lang +//> using jvm 19+ // -Yrelease opens packages but does not override class definitions // because ct.sym comes first diff --git a/test/files/neg/unused.check b/test/files/neg/unused.check new file mode 100644 index 000000000000..256ea024f817 --- /dev/null +++ b/test/files/neg/unused.check @@ -0,0 +1,9 @@ +unused.scala:7: warning: pattern var x in value patvars is never used + case C(x, 2, _, _) => 1 + ^ +unused.scala:10: warning: pattern var e in value patvars is never used + case e => 4 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/unused.scala b/test/files/neg/unused.scala new file mode 100644 index 000000000000..397cb4243520 --- /dev/null +++ b/test/files/neg/unused.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Wunused:patvars + +case class C(a: Int, b: Int, c: Int, d: Int) + +object Test { + val patvars = C(1, 2, 3, 4) match { + case C(x, 2, _, _) => 1 + case C(2, b, y@_, _) => 2 + case C(a, b@_, c, d@_) => 3 + case e => 4 + } +} diff --git a/test/files/neg/userdefined_apply.check b/test/files/neg/userdefined_apply.check index c8c8976f5fb9..9ebe8d033d8d 100644 --- a/test/files/neg/userdefined_apply.check +++ b/test/files/neg/userdefined_apply.check @@ -22,4 +22,4 @@ case class ClashNoSigPoly private(x: Int) userdefined_apply.scala:51: error: ClashNoSigPoly.type does not take parameters def apply(x: T) = if (???) ClashNoSigPoly(1) else ??? ^ -8 errors found +8 errors diff --git a/test/files/neg/userdefined_apply.scala b/test/files/neg/userdefined_apply.scala index 0a0d960b3948..cd362067c525 100644 --- a/test/files/neg/userdefined_apply.scala +++ b/test/files/neg/userdefined_apply.scala @@ -40,7 +40,7 @@ class BaseNCNSP[T] { } object NoClashNoSigPoly extends BaseNCNSP[Boolean] -// TODO: position error at definition of apply in superclass instead of on case clss +// TODO: position error at definition of apply in superclass instead of on case class // error: recursive method apply needs result type case class NoClashNoSigPoly private(x: Int) @@ -52,6 +52,6 @@ class BaseCNSP[T] { } object ClashNoSigPoly extends BaseCNSP[Int] -// TODO: position error at definition of apply in superclass instead of on case clss +// TODO: position error at definition of apply in superclass instead of on case class // error: recursive method apply needs result type case class ClashNoSigPoly private(x: Int) diff --git a/test/files/neg/using-source3.check b/test/files/neg/using-source3.check new file mode 100644 index 000000000000..24519344e517 --- /dev/null +++ b/test/files/neg/using-source3.check @@ -0,0 +1,10 @@ +using-source3.scala:14: error: reference to f is ambiguous; +it is both defined in the enclosing class D and inherited in the enclosing class E as method f (defined in class C) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.f`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D.E.g + def g = f + ^ +1 error diff --git a/test/files/neg/using-source3.scala b/test/files/neg/using-source3.scala new file mode 100644 index 000000000000..9f7c9c2f92f6 --- /dev/null +++ b/test/files/neg/using-source3.scala @@ -0,0 +1,16 @@ + +// skalac: -Werror -Xsource:3 + +//> using options -Werror +//> using options -Xsource:3 + +class C { + def f = 42 +} + +class D { + def f = 27 + class E extends C { + def g = f + } +} diff --git a/test/files/neg/using-source3b.check b/test/files/neg/using-source3b.check new file mode 100644 index 000000000000..5bba8f78b9d0 --- /dev/null +++ b/test/files/neg/using-source3b.check @@ -0,0 +1,10 @@ +using-source3b.scala:13: error: reference to f is ambiguous; +it is both defined in the enclosing class D and inherited in the enclosing class E as method f (defined in class C) +In Scala 2, symbols inherited from a superclass shadow symbols defined in an outer scope. +Such references are ambiguous in Scala 3. To continue using the inherited symbol, write `this.f`. +Or use `-Wconf:msg=legacy-binding:s` to silence this warning. [quickfixable] +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration, site=D.E.g + def g = f + ^ +1 error diff --git a/test/files/neg/using-source3b.scala b/test/files/neg/using-source3b.scala new file mode 100644 index 000000000000..f1730ace1672 --- /dev/null +++ b/test/files/neg/using-source3b.scala @@ -0,0 +1,15 @@ + +// skalac: -Werror -Xsource:3 + +//> using options -Werror, "-Wconf:cat=deprecation:e,cat=feature:s", -Xlint , -Xsource:3 + +class C { + def f = 42 +} + +class D { + def f = 27 + class E extends C { + def g = f + } +} diff --git a/test/files/neg/val_infer.check b/test/files/neg/val_infer.check index 711450add958..988bd582a37e 100644 --- a/test/files/neg/val_infer.check +++ b/test/files/neg/val_infer.check @@ -3,4 +3,4 @@ val_infer.scala:3: error: type mismatch; required: Int trait Sub extends Base { def foo = "" } ^ -one error found +1 error diff --git a/test/files/neg/val_infer.scala b/test/files/neg/val_infer.scala index 7fe839374991..4dc905a1d51c 100644 --- a/test/files/neg/val_infer.scala +++ b/test/files/neg/val_infer.scala @@ -1,4 +1,4 @@ class Test { trait Base { def foo: Int } trait Sub extends Base { def foo = "" } -} \ No newline at end of file +} diff --git a/test/files/neg/val_sig_infer_match.check b/test/files/neg/val_sig_infer_match.check index 704c99cf84ef..6307827afd99 100644 --- a/test/files/neg/val_sig_infer_match.check +++ b/test/files/neg/val_sig_infer_match.check @@ -1,4 +1,4 @@ val_sig_infer_match.scala:21: error: value y is not a member of A def m = f.y // doesn't compile anymore ^ -one error found +1 error diff --git a/test/files/neg/val_sig_infer_match.scala b/test/files/neg/val_sig_infer_match.scala index fb8aa66d56d8..5177cdf78cdf 100644 --- a/test/files/neg/val_sig_infer_match.scala +++ b/test/files/neg/val_sig_infer_match.scala @@ -19,4 +19,4 @@ class D extends C { } def m = f.y // doesn't compile anymore -} \ No newline at end of file +} diff --git a/test/files/neg/val_sig_infer_struct.check b/test/files/neg/val_sig_infer_struct.check index 26efbbc3f499..85f077df2b40 100644 --- a/test/files/neg/val_sig_infer_struct.check +++ b/test/files/neg/val_sig_infer_struct.check @@ -1,4 +1,4 @@ val_sig_infer_struct.scala:7: error: value foo is not a member of Object def bar = f.foo ^ -one error found +1 error diff --git a/test/files/neg/val_sig_infer_struct.scala b/test/files/neg/val_sig_infer_struct.scala index e88340337cb3..c1a2e9366145 100644 --- a/test/files/neg/val_sig_infer_struct.scala +++ b/test/files/neg/val_sig_infer_struct.scala @@ -5,4 +5,4 @@ class C { class D extends C { override val f = new Object { def foo = 1 } def bar = f.foo -} \ No newline at end of file +} diff --git a/test/files/neg/value-discard.check b/test/files/neg/value-discard.check new file mode 100644 index 000000000000..92e63c680954 --- /dev/null +++ b/test/files/neg/value-discard.check @@ -0,0 +1,12 @@ +value-discard.scala:6: warning: discarded non-Unit value of type Boolean + mutable.Set[String]().remove("") // warn because suspicious receiver + ^ +value-discard.scala:13: warning: discarded non-Unit value of type scala.collection.mutable.Set[String] + def subtract(): Unit = mutable.Set.empty[String].subtractOne("") // warn because suspicious receiver + ^ +value-discard.scala:17: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + "" // warn pure expr + ^ +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/value-discard.scala b/test/files/neg/value-discard.scala new file mode 100644 index 000000000000..2d6c866c2370 --- /dev/null +++ b/test/files/neg/value-discard.scala @@ -0,0 +1,62 @@ +//> using options -Wvalue-discard -Werror +final class UnusedTest { + import scala.collection.mutable + + def remove(): Unit = { + mutable.Set[String]().remove("") // warn because suspicious receiver + } + + def removeAscribed(): Unit = { + mutable.Set[String]().remove(""): Unit // nowarn + } + + def subtract(): Unit = mutable.Set.empty[String].subtractOne("") // warn because suspicious receiver + + def warnings(): Unit = { + val s: mutable.Set[String] = mutable.Set.empty[String] + "" // warn pure expr + "": Unit // nowarn + s.subtractOne("") // nowarn + } + + def f(implicit x: Int): Boolean = x % 2 == 1 + + implicit def i: Int = 42 + + def u: Unit = f: Unit // nowarn +} + +class UnitAscription { + import scala.concurrent._, ExecutionContext.Implicits._ + + case class C(c: Int) { + def f(i: Int, j: Int = c) = i + j + } + + def f(i: Int, j: Int = 27) = i + j + + def g[A]: List[A] = Nil + + def i: Int = 42 + + def `default arg is inline`: Unit = + f(i = 42): Unit // nowarn + + def `default arg requires block`: Unit = + C(27).f(i = 42): Unit // nowarn + + def `application requires implicit arg`: Unit = + Future(42): Unit // nowarn + + def `application requires inferred type arg`: Unit = + g: Unit // nowarn + + def `implicit selection from this`: Unit = + i: Unit // nowarn +} + +object UnitAscription { + def g[A]: List[A] = Nil + def `application requires inferred type arg`: Unit = + g: Unit // nowarn UnitAscription.g +} diff --git a/test/files/neg/valueclasses-doubledefs.check b/test/files/neg/valueclasses-doubledefs.check index ec513aca6b9e..b8c0e07be9b9 100644 --- a/test/files/neg/valueclasses-doubledefs.check +++ b/test/files/neg/valueclasses-doubledefs.check @@ -1,7 +1,7 @@ valueclasses-doubledefs.scala:5: error: double definition: def apply(x: Double): String at line 4 and def apply(x: Meter): String at line 5 -have same type after erasure: (x: Double)String +have same type after erasure: (x: Double): String def apply(x: Meter) = x.toString ^ -one error found +1 error diff --git a/test/files/neg/valueclasses-impl-restrictions.check b/test/files/neg/valueclasses-impl-restrictions.check index 0af9173f74fc..bcda5788a5ab 100644 --- a/test/files/neg/valueclasses-impl-restrictions.check +++ b/test/files/neg/valueclasses-impl-restrictions.check @@ -10,4 +10,4 @@ valueclasses-impl-restrictions.scala:23: error: implementation restriction: nest This restriction is planned to be removed in subsequent releases. private[this] class I2(val q: String) ^ -three errors found +3 errors diff --git a/test/files/neg/valueclasses-pavlov.check b/test/files/neg/valueclasses-pavlov.check index 17102a0c68d8..a468de1caba9 100644 --- a/test/files/neg/valueclasses-pavlov.check +++ b/test/files/neg/valueclasses-pavlov.check @@ -1,7 +1,7 @@ valueclasses-pavlov.scala:8: error: double definition: def foo(x: String): String at line 7 and def foo(x: Box2): String at line 8 -have same type after erasure: (x: String)String +have same type after erasure: (x: String): String def foo(x: Box2) = "foo(Box2): ok" ^ -one error found +1 error diff --git a/test/files/neg/valueclasses-pavlov.scala b/test/files/neg/valueclasses-pavlov.scala index a5858b2cf0d5..d0c1ed59ba10 100644 --- a/test/files/neg/valueclasses-pavlov.scala +++ b/test/files/neg/valueclasses-pavlov.scala @@ -13,7 +13,7 @@ class Box2(val value: String) extends AnyVal object test2a { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val b1 = new Box1(null) val b2 = new Box2(null) val f: Foo[Box2] = b1 diff --git a/test/files/neg/valueclasses.check b/test/files/neg/valueclasses.check index 35d38aae6082..839908149498 100644 --- a/test/files/neg/valueclasses.check +++ b/test/files/neg/valueclasses.check @@ -43,4 +43,4 @@ class V15(protected[this] val x: Int) extends AnyVal // fail valueclasses.scala:36: error: value class needs to have exactly one val parameter class V16()(val a: Any) extends AnyVal // fail, was allowed 2.10.x ^ -15 errors found +15 errors diff --git a/test/files/neg/valueclasses.scala b/test/files/neg/valueclasses.scala index 06fde40a70b0..7ccf9680f9d9 100644 --- a/test/files/neg/valueclasses.scala +++ b/test/files/neg/valueclasses.scala @@ -4,7 +4,7 @@ trait T extends AnyVal // fail class Foo { class Bar(x: Int) extends AnyVal // fail - def foo() { + def foo(): Unit = { class Baz(x: Int) extends AnyVal // fail } } diff --git a/test/files/neg/varargs.check b/test/files/neg/varargs.check index 32eb40abb15e..f39ed66e056f 100644 --- a/test/files/neg/varargs.check +++ b/test/files/neg/varargs.check @@ -1,10 +1,10 @@ -varargs.scala:11: error: A method annotated with @varargs produces a forwarder method with the same signature (a: Int, b: Array[String])Int as an existing method. +varargs.scala:11: error: A method annotated with @varargs produces a forwarder method with the same signature (a: Int, b: Array[String]): Int as an existing method. @varargs def v1(a: Int, b: String*) = a + b.length // nok ^ varargs.scala:14: error: A method annotated with @varargs must have a single repeated parameter in its last parameter list. @varargs def nov(a: Int) = 0 // nok ^ -varargs.scala:16: error: A method annotated with @varargs produces a forwarder method with the same signature (a: Int, b: Array[String])Int as an existing method. +varargs.scala:16: error: A method annotated with @varargs produces a forwarder method with the same signature (a: Int, b: Array[String]): Int as an existing method. @varargs def v2(a: Int, b: String*) = 0 // nok ^ varargs.scala:19: error: A method annotated with @varargs must have a single repeated parameter in its last parameter list. @@ -16,4 +16,4 @@ varargs.scala:20: error: A method annotated with @varargs must have a single rep varargs.scala:23: error: A method annotated with @varargs must have a single repeated parameter in its last parameter list. @varargs def v6: Int = 1 // nok ^ -6 errors found +6 errors diff --git a/test/files/neg/varargs2.check b/test/files/neg/varargs2.check new file mode 100644 index 000000000000..23d13ec6bf09 --- /dev/null +++ b/test/files/neg/varargs2.check @@ -0,0 +1,13 @@ +varargs2.scala:7: error: Only methods can be marked @varargs + @varargs val x = 42 // nok + ^ +varargs2.scala:8: error: Only methods can be marked @varargs + def f(@varargs y: Int) = 42 // nok + ^ +varargs2.scala:9: error: Only methods can be marked @varargs + def g(z: Int @varargs) = 42 // nok + ^ +varargs2.scala:10: error: Only methods can be marked @varargs + def h(z: Int) = 42: @varargs // nok + ^ +4 errors diff --git a/test/files/neg/varargs2.scala b/test/files/neg/varargs2.scala new file mode 100644 index 000000000000..26c8da1e43a8 --- /dev/null +++ b/test/files/neg/varargs2.scala @@ -0,0 +1,13 @@ +//> using options -Xsource:3 + +import annotation.* + +trait T { + @varargs def d(n: Int*) = 42 // ok + @varargs val x = 42 // nok + def f(@varargs y: Int) = 42 // nok + def g(z: Int @varargs) = 42 // nok + def h(z: Int) = 42: @varargs // nok + + lazy val VarargsClass = List.empty[varargs] // good one +} diff --git a/test/files/neg/variance-alias.check b/test/files/neg/variance-alias.check new file mode 100644 index 000000000000..820d743393ec --- /dev/null +++ b/test/files/neg/variance-alias.check @@ -0,0 +1,7 @@ +variance-alias.scala:5: error: covariant type T occurs in invariant position in type Inv[T] of value a + val a: Inv[({type L[+X] = X})#L[T]] = new Inv[({type L[+X] = X})#L[T]] {} // error + ^ +variance-alias.scala:6: error: covariant type T occurs in invariant position in type Inv[X.this.Id[T]] of value b + val b: Inv[Id[T]] = new Inv[Id[T]] {} // error + ^ +2 errors diff --git a/test/files/neg/variance-alias.scala b/test/files/neg/variance-alias.scala new file mode 100644 index 000000000000..fcdd4c8f5187 --- /dev/null +++ b/test/files/neg/variance-alias.scala @@ -0,0 +1,7 @@ +trait Inv[A] + +class X[+T] { + type Id[+A] = A + val a: Inv[({type L[+X] = X})#L[T]] = new Inv[({type L[+X] = X})#L[T]] {} // error + val b: Inv[Id[T]] = new Inv[Id[T]] {} // error +} diff --git a/test/files/neg/variance-holes.check b/test/files/neg/variance-holes.check new file mode 100644 index 000000000000..b8cefaaf30a5 --- /dev/null +++ b/test/files/neg/variance-holes.check @@ -0,0 +1,22 @@ +variance-holes.scala:9: error: covariant type x occurs in contravariant position in type [+x, +y] >: F[x,y] of type F2 + def asWiden[F2[+x, +y] >: F[x, y]]: F2[Int, Int] = v + ^ +variance-holes.scala:2: error: contravariant type A occurs in covariant position in type [-A] >: List[A] of type Lower1 + type Lower1[-A] >: List[A] + ^ +variance-holes.scala:5: error: covariant type x occurs in contravariant position in type [+x] >: F[x] of type G + type G[+x] >: F[x] + ^ +variance-holes.scala:13: error: covariant type A occurs in contravariant position in type AnyRef{type T >: A} of method foo + def foo: { type T >: A } + ^ +variance-holes.scala:17: error: covariant type A occurs in contravariant position in type AnyRef{type T <: A} of value x + def foo(x: { type T <: A }): Unit + ^ +variance-holes.scala:20: error: covariant type A occurs in contravariant position in type <: AnyRef{type T >: A} of type x + class RefinedLower[+A, x <: { type T >: A }] + ^ +variance-holes.scala:21: error: covariant type A occurs in contravariant position in type A of value x_= + private[this] class PrivateThis[+A](var x: A) + ^ +7 errors diff --git a/test/files/neg/variance-holes.scala b/test/files/neg/variance-holes.scala new file mode 100644 index 000000000000..439a9449bde1 --- /dev/null +++ b/test/files/neg/variance-holes.scala @@ -0,0 +1,22 @@ +object Test { + type Lower1[-A] >: List[A] + + class Lower2[F[-_]] { + type G[+x] >: F[x] + } + + class Lower3[F[-_, -_]](v: F[Int, Int]) { + def asWiden[F2[+x, +y] >: F[x, y]]: F2[Int, Int] = v + } + + trait Refined1[+A] { + def foo: { type T >: A } + } + + trait Refined2[+A] { + def foo(x: { type T <: A }): Unit + } + + class RefinedLower[+A, x <: { type T >: A }] + private[this] class PrivateThis[+A](var x: A) +} \ No newline at end of file diff --git a/test/files/neg/variances-refinement.check b/test/files/neg/variances-refinement.check index 2bed3ffa6b0b..d88075a298d2 100644 --- a/test/files/neg/variances-refinement.check +++ b/test/files/neg/variances-refinement.check @@ -1,22 +1,22 @@ -variances-refinement.scala:17: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A} of method fail1 +variances-refinement.scala:17: error: contravariant type A occurs in covariant position in type (): AnyRef{def f0(x: A): A} of method fail1 def fail1() = { object O { def f0(x: A): A = ??? } ; O } // fail ^ -variances-refinement.scala:18: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): A} of method fail2 +variances-refinement.scala:18: error: covariant type B occurs in contravariant position in type (): AnyRef{def f0(x: B): A} of method fail2 def fail2() = { object O { def f0(x: B): A = ??? } ; O } // fail ^ -variances-refinement.scala:19: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): B} of method fail3 +variances-refinement.scala:19: error: covariant type B occurs in contravariant position in type (): AnyRef{def f0(x: B): B} of method fail3 def fail3() = { object O { def f0(x: B): B = ??? } ; O } // fail ^ -variances-refinement.scala:20: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): C} of method fail4 +variances-refinement.scala:20: error: covariant type B occurs in contravariant position in type (): AnyRef{def f0(x: B): C} of method fail4 def fail4() = { object O { def f0(x: B): C = ??? } ; O } // fail ^ -variances-refinement.scala:21: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: C): A} of method fail5 +variances-refinement.scala:21: error: contravariant type A occurs in covariant position in type (): AnyRef{def f0(x: C): A} of method fail5 def fail5() = { object O { def f0(x: C): A = ??? } ; O } // fail ^ -variances-refinement.scala:23: error: contravariant type A occurs in covariant position in type ()O1.type forSome { val O1: AnyRef with O0; type O0 <: AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} } of method fail6 +variances-refinement.scala:23: error: contravariant type A occurs in covariant position in type (): O1.type forSome { val O1: AnyRef with O0; type O0 <: AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} } of method fail6 def fail6() = { // fail ^ -variances-refinement.scala:32: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} of method fail7 +variances-refinement.scala:32: error: contravariant type A occurs in covariant position in type (): AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} of method fail7 def fail7() = { // fail ^ -7 errors found +7 errors diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check index 3c1545a375d7..bdd811166ac4 100644 --- a/test/files/neg/variances.check +++ b/test/files/neg/variances.check @@ -1,9 +1,6 @@ variances.scala:4: error: covariant type A occurs in contravariant position in type test.Vector[A] of value x def append(x: Vector[A]): Vector[A] ^ -variances.scala:75: error: covariant type A occurs in contravariant position in type => A => A of value m - val m: A => A - ^ variances.scala:18: error: covariant type A occurs in contravariant position in type A of value a private def setA3(a : A) = this.a = a ^ @@ -13,13 +10,13 @@ variances.scala:19: error: covariant type A occurs in contravariant position in variances.scala:21: error: covariant type A occurs in invariant position in supertype test.C[A] of object Baz object Baz extends C[A] ^ -variances.scala:74: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x +variances.scala:74: error: covariant type A occurs in contravariant position in type test.Covariant.T[A]{val m: A => A} of value x val x: T[A] { ^ variances.scala:89: error: covariant type T occurs in invariant position in type T of type A type A = T ^ -variances.scala:90: error: covariant type A occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo +variances.scala:90: error: covariant type A occurs in contravariant position in type test.TestAlias.B[C.this.A] of method foo def foo: B[A] ^ -8 errors found +7 errors diff --git a/test/files/neg/variances.scala b/test/files/neg/variances.scala index 10ca111cd031..01c95be04aff 100644 --- a/test/files/neg/variances.scala +++ b/test/files/neg/variances.scala @@ -60,7 +60,7 @@ object Covariant { class Test extends AbstractTest { val a : Foo[java.lang.Character] = new Foo[java.lang.Character] } - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { val test = new Test test.c.doit1(test.b) test.c.doit2(test.b) diff --git a/test/files/neg/variances2.check b/test/files/neg/variances2.check index 433cc125ad14..3f969533523a 100644 --- a/test/files/neg/variances2.check +++ b/test/files/neg/variances2.check @@ -4,10 +4,10 @@ variances2.scala:9: error: covariant type B occurs in contravariant position in variances2.scala:12: error: covariant type E occurs in contravariant position in type E of value x def f4(x: E): Unit = () ^ -variances2.scala:15: error: contravariant type A occurs in covariant position in type ()A of method f6 +variances2.scala:15: error: contravariant type A occurs in covariant position in type (): A of method f6 def f6(): A = ??? ^ -variances2.scala:18: error: contravariant type D occurs in covariant position in type ()D of method f9 +variances2.scala:18: error: contravariant type D occurs in covariant position in type (): D of method f9 def f9(): D = ??? ^ variances2.scala:22: error: contravariant type A occurs in covariant position in type A => A of value f @@ -70,73 +70,73 @@ variances2.scala:53: error: covariant type B occurs in contravariant position in variances2.scala:56: error: covariant type E occurs in contravariant position in type F => E of value f def f46(f: F => E): Unit = () ^ -variances2.scala:59: error: contravariant type A occurs in covariant position in type ()A => A of method f48 +variances2.scala:59: error: contravariant type A occurs in covariant position in type (): A => A of method f48 def f48(): A => A = null ^ -variances2.scala:62: error: contravariant type D occurs in covariant position in type ()A => D of method f51 +variances2.scala:62: error: contravariant type D occurs in covariant position in type (): A => D of method f51 def f51(): A => D = null ^ -variances2.scala:65: error: covariant type B occurs in contravariant position in type ()B => A of method f54 +variances2.scala:65: error: covariant type B occurs in contravariant position in type (): B => A of method f54 def f54(): B => A = null ^ -variances2.scala:66: error: covariant type B occurs in contravariant position in type ()B => B of method f55 +variances2.scala:66: error: covariant type B occurs in contravariant position in type (): B => B of method f55 def f55(): B => B = null ^ -variances2.scala:67: error: covariant type B occurs in contravariant position in type ()B => C of method f56 +variances2.scala:67: error: covariant type B occurs in contravariant position in type (): B => C of method f56 def f56(): B => C = null ^ -variances2.scala:68: error: covariant type B occurs in contravariant position in type ()B => D of method f57 +variances2.scala:68: error: covariant type B occurs in contravariant position in type (): B => D of method f57 def f57(): B => D = null ^ -variances2.scala:69: error: covariant type B occurs in contravariant position in type ()B => E of method f58 +variances2.scala:69: error: covariant type B occurs in contravariant position in type (): B => E of method f58 def f58(): B => E = null ^ -variances2.scala:70: error: covariant type B occurs in contravariant position in type ()B => F of method f59 +variances2.scala:70: error: covariant type B occurs in contravariant position in type (): B => F of method f59 def f59(): B => F = null ^ -variances2.scala:71: error: contravariant type A occurs in covariant position in type ()C => A of method f60 +variances2.scala:71: error: contravariant type A occurs in covariant position in type (): C => A of method f60 def f60(): C => A = null ^ -variances2.scala:74: error: contravariant type D occurs in covariant position in type ()C => D of method f63 +variances2.scala:74: error: contravariant type D occurs in covariant position in type (): C => D of method f63 def f63(): C => D = null ^ -variances2.scala:77: error: contravariant type A occurs in covariant position in type ()D => A of method f66 +variances2.scala:77: error: contravariant type A occurs in covariant position in type (): D => A of method f66 def f66(): D => A = null ^ -variances2.scala:80: error: contravariant type D occurs in covariant position in type ()D => D of method f69 +variances2.scala:80: error: contravariant type D occurs in covariant position in type (): D => D of method f69 def f69(): D => D = null ^ -variances2.scala:83: error: covariant type E occurs in contravariant position in type ()E => A of method f72 +variances2.scala:83: error: covariant type E occurs in contravariant position in type (): E => A of method f72 def f72(): E => A = null ^ -variances2.scala:84: error: covariant type E occurs in contravariant position in type ()E => B of method f73 +variances2.scala:84: error: covariant type E occurs in contravariant position in type (): E => B of method f73 def f73(): E => B = null ^ -variances2.scala:85: error: covariant type E occurs in contravariant position in type ()E => C of method f74 +variances2.scala:85: error: covariant type E occurs in contravariant position in type (): E => C of method f74 def f74(): E => C = null ^ -variances2.scala:86: error: covariant type E occurs in contravariant position in type ()E => D of method f75 +variances2.scala:86: error: covariant type E occurs in contravariant position in type (): E => D of method f75 def f75(): E => D = null ^ -variances2.scala:87: error: covariant type E occurs in contravariant position in type ()E => E of method f76 +variances2.scala:87: error: covariant type E occurs in contravariant position in type (): E => E of method f76 def f76(): E => E = null ^ -variances2.scala:88: error: covariant type E occurs in contravariant position in type ()E => F of method f77 +variances2.scala:88: error: covariant type E occurs in contravariant position in type (): E => F of method f77 def f77(): E => F = null ^ -variances2.scala:89: error: contravariant type A occurs in covariant position in type ()F => A of method f78 +variances2.scala:89: error: contravariant type A occurs in covariant position in type (): F => A of method f78 def f78(): F => A = null ^ -variances2.scala:92: error: contravariant type D occurs in covariant position in type ()F => D of method f81 +variances2.scala:92: error: contravariant type D occurs in covariant position in type (): F => D of method f81 def f81(): F => D = null ^ -variances2.scala:96: error: contravariant type A occurs in covariant position in type (x: A)A of method f84 +variances2.scala:96: error: contravariant type A occurs in covariant position in type (x: A): A of method f84 def f84(x: A): A = ??? ^ -variances2.scala:99: error: contravariant type D occurs in covariant position in type (x: A)D of method f87 +variances2.scala:99: error: contravariant type D occurs in covariant position in type (x: A): D of method f87 def f87(x: A): D = ??? ^ -variances2.scala:102: error: contravariant type A occurs in covariant position in type (x: B)A of method f90 +variances2.scala:102: error: contravariant type A occurs in covariant position in type (x: B): A of method f90 def f90(x: B): A = ??? ^ variances2.scala:102: error: covariant type B occurs in contravariant position in type B of value x @@ -148,7 +148,7 @@ variances2.scala:103: error: covariant type B occurs in contravariant position i variances2.scala:104: error: covariant type B occurs in contravariant position in type B of value x def f92(x: B): C = ??? ^ -variances2.scala:105: error: contravariant type D occurs in covariant position in type (x: B)D of method f93 +variances2.scala:105: error: contravariant type D occurs in covariant position in type (x: B): D of method f93 def f93(x: B): D = ??? ^ variances2.scala:105: error: covariant type B occurs in contravariant position in type B of value x @@ -160,19 +160,19 @@ variances2.scala:106: error: covariant type B occurs in contravariant position i variances2.scala:107: error: covariant type B occurs in contravariant position in type B of value x def f95(x: B): F = ??? ^ -variances2.scala:108: error: contravariant type A occurs in covariant position in type (x: C)A of method f96 +variances2.scala:108: error: contravariant type A occurs in covariant position in type (x: C): A of method f96 def f96(x: C): A = ??? ^ -variances2.scala:111: error: contravariant type D occurs in covariant position in type (x: C)D of method f99 +variances2.scala:111: error: contravariant type D occurs in covariant position in type (x: C): D of method f99 def f99(x: C): D = ??? ^ -variances2.scala:114: error: contravariant type A occurs in covariant position in type (x: D)A of method f102 +variances2.scala:114: error: contravariant type A occurs in covariant position in type (x: D): A of method f102 def f102(x: D): A = ??? ^ -variances2.scala:117: error: contravariant type D occurs in covariant position in type (x: D)D of method f105 +variances2.scala:117: error: contravariant type D occurs in covariant position in type (x: D): D of method f105 def f105(x: D): D = ??? ^ -variances2.scala:120: error: contravariant type A occurs in covariant position in type (x: E)A of method f108 +variances2.scala:120: error: contravariant type A occurs in covariant position in type (x: E): A of method f108 def f108(x: E): A = ??? ^ variances2.scala:120: error: covariant type E occurs in contravariant position in type E of value x @@ -184,7 +184,7 @@ variances2.scala:121: error: covariant type E occurs in contravariant position i variances2.scala:122: error: covariant type E occurs in contravariant position in type E of value x def f110(x: E): C = ??? ^ -variances2.scala:123: error: contravariant type D occurs in covariant position in type (x: E)D of method f111 +variances2.scala:123: error: contravariant type D occurs in covariant position in type (x: E): D of method f111 def f111(x: E): D = ??? ^ variances2.scala:123: error: covariant type E occurs in contravariant position in type E of value x @@ -196,10 +196,10 @@ variances2.scala:124: error: covariant type E occurs in contravariant position i variances2.scala:125: error: covariant type E occurs in contravariant position in type E of value x def f113(x: E): F = ??? ^ -variances2.scala:126: error: contravariant type A occurs in covariant position in type (x: F)A of method f114 +variances2.scala:126: error: contravariant type A occurs in covariant position in type (x: F): A of method f114 def f114(x: F): A = ??? ^ -variances2.scala:129: error: contravariant type D occurs in covariant position in type (x: F)D of method f117 +variances2.scala:129: error: contravariant type D occurs in covariant position in type (x: F): D of method f117 def f117(x: F): D = ??? ^ variances2.scala:133: error: contravariant type A occurs in covariant position in supertype Cov[A] of object O1 @@ -226,4 +226,4 @@ variances2.scala:148: error: contravariant type D occurs in invariant position i variances2.scala:149: error: covariant type E occurs in invariant position in supertype Inv[E] of object O17 object O17 extends Inv[E] ^ -76 errors found +76 errors diff --git a/test/files/neg/variances2.scala b/test/files/neg/variances2.scala index d30345dd8300..0a57f8120dc2 100644 --- a/test/files/neg/variances2.scala +++ b/test/files/neg/variances2.scala @@ -153,7 +153,7 @@ trait Trait[-A, +B, C] { trait Trait2[-A, +B, C] { // trait Inner[-D <: C, +E >: C, F] { - def method[D <: A, E >: B, F]() { + def method[D <: A, E >: B, F](): Unit = { def f0(x: A): Unit = () def f1(x: B): Unit = () def f2(x: C): Unit = () diff --git a/test/files/neg/variant-placeholders-future.check b/test/files/neg/variant-placeholders-future.check index e3361c5560a7..d166e8d577a9 100644 --- a/test/files/neg/variant-placeholders-future.check +++ b/test/files/neg/variant-placeholders-future.check @@ -1,7 +1,7 @@ -variant-placeholders-future.scala:4: error: `=', `>:', or `<:' expected +variant-placeholders-future.scala:4: error: `=`, `>:`, or `<:` expected type -_ = Int // error -_ not allowed as a type def name without backticks ^ -variant-placeholders-future.scala:5: error: `=', `>:', or `<:' expected +variant-placeholders-future.scala:5: error: `=`, `>:`, or `<:` expected type +_ = Int // error +_ not allowed as a type def name without backticks ^ -two errors found +2 errors diff --git a/test/files/neg/variant-placeholders-future.scala b/test/files/neg/variant-placeholders-future.scala index 75296ff945b4..62ec844d76f1 100644 --- a/test/files/neg/variant-placeholders-future.scala +++ b/test/files/neg/variant-placeholders-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // object Test { type -_ = Int // error -_ not allowed as a type def name without backticks diff --git a/test/files/neg/variant-placeholders-nofuture.check b/test/files/neg/variant-placeholders-nofuture.check index b4148154918a..8cf591d0a32f 100644 --- a/test/files/neg/variant-placeholders-nofuture.check +++ b/test/files/neg/variant-placeholders-nofuture.check @@ -4,4 +4,4 @@ variant-placeholders-nofuture.scala:5: error: ';' expected but '_' found. variant-placeholders-nofuture.scala:6: error: ')' expected but '_' found. val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 ^ -two errors found +2 errors diff --git a/test/files/neg/view-bounds-deprecation.check b/test/files/neg/view-bounds-deprecation.check new file mode 100644 index 000000000000..277d40449492 --- /dev/null +++ b/test/files/neg/view-bounds-deprecation.check @@ -0,0 +1,11 @@ +view-bounds-deprecation.scala:4: warning: view bounds are deprecated; use an implicit parameter instead. + example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)` + def f[A <% Int](a: A) = null + ^ +view-bounds-deprecation.scala:5: warning: view bounds are deprecated; use an implicit parameter instead. + example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)` + def g[C, B <: C, A <% B : Numeric](a: A) = null + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/view-bounds-deprecation.scala b/test/files/neg/view-bounds-deprecation.scala new file mode 100644 index 000000000000..4d4e56f3bc7f --- /dev/null +++ b/test/files/neg/view-bounds-deprecation.scala @@ -0,0 +1,6 @@ +//> using options -deprecation -Xfatal-warnings +// +object Test { + def f[A <% Int](a: A) = null + def g[C, B <: C, A <% B : Numeric](a: A) = null +} diff --git a/test/files/neg/view-bounds-removal.check b/test/files/neg/view-bounds-removal.check new file mode 100644 index 000000000000..3bebd5120cef --- /dev/null +++ b/test/files/neg/view-bounds-removal.check @@ -0,0 +1,13 @@ +view-bounds-removal.scala:4: error: view bounds are unsupported; use an implicit parameter instead. + example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)` +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def f[A <% Int](a: A) = null + ^ +view-bounds-removal.scala:5: error: view bounds are unsupported; use an implicit parameter instead. + example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)` +Scala 3 migration messages are issued as errors under -Xsource:3. Use -Wconf or @nowarn to demote them to warnings or suppress. +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=scala3-migration + def g[C, B <: C, A <% B : Numeric](a: A) = null + ^ +2 errors diff --git a/test/files/neg/view-bounds-removal.scala b/test/files/neg/view-bounds-removal.scala new file mode 100644 index 000000000000..7c1d741d5cdb --- /dev/null +++ b/test/files/neg/view-bounds-removal.scala @@ -0,0 +1,6 @@ +//> using options -Xsource:3 +// +object Test { + def f[A <% Int](a: A) = null + def g[C, B <: C, A <% B : Numeric](a: A) = null +} diff --git a/test/files/neg/viewtest.check b/test/files/neg/viewtest.check index 21ed93a01cee..9b1ee3a5f9a5 100644 --- a/test/files/neg/viewtest.check +++ b/test/files/neg/viewtest.check @@ -3,4 +3,4 @@ viewtest.scala:43: error: type mismatch; required: List[a(in method view3)] case y1: List[a] => compareLists(x, y1) ^ -one error found +1 error diff --git a/test/files/neg/viewtest.scala b/test/files/neg/viewtest.scala index 5e7d624d2307..fe3568b33540 100644 --- a/test/files/neg/viewtest.scala +++ b/test/files/neg/viewtest.scala @@ -4,8 +4,8 @@ package test */ trait Ordered[+a] { - /** Result of comparing `this' with operand `that'. - * returns `x' where + /** Result of comparing `this` with operand `that`. + * returns `x` where * x < 0 iff this < that * x == 0 iff this == that * x > 0 iff this > that diff --git a/test/files/neg/virtpatmat_exhaust_big.check b/test/files/neg/virtpatmat_exhaust_big.check index ed25da521a2e..79e1f3362dce 100644 --- a/test/files/neg/virtpatmat_exhaust_big.check +++ b/test/files/neg/virtpatmat_exhaust_big.check @@ -2,6 +2,6 @@ virtpatmat_exhaust_big.scala:29: warning: match may not be exhaustive. It would fail on the following input: Z11() def foo(z: Z) = z match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/virtpatmat_exhaust_big.scala b/test/files/neg/virtpatmat_exhaust_big.scala index 91c2fe67d51a..6f53cd24f28a 100644 --- a/test/files/neg/virtpatmat_exhaust_big.scala +++ b/test/files/neg/virtpatmat_exhaust_big.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // sealed abstract class Z object Z { diff --git a/test/files/neg/virtpatmat_exhaust_compound.check b/test/files/neg/virtpatmat_exhaust_compound.check index a6c263882583..e9de8d45bb32 100644 --- a/test/files/neg/virtpatmat_exhaust_compound.check +++ b/test/files/neg/virtpatmat_exhaust_compound.check @@ -1,15 +1,15 @@ -virtpatmat_exhaust_compound.scala:15: warning: match may not be exhaustive. +virtpatmat_exhaust_compound.scala:16: warning: match may not be exhaustive. It would fail on the following inputs: O1, O2, O4 a match { ^ -virtpatmat_exhaust_compound.scala:19: warning: match may not be exhaustive. +virtpatmat_exhaust_compound.scala:20: warning: match may not be exhaustive. It would fail on the following input: O4 def t1(a: Product with Base with Base2) = a match { ^ -virtpatmat_exhaust_compound.scala:23: warning: match may not be exhaustive. +virtpatmat_exhaust_compound.scala:24: warning: match may not be exhaustive. It would fail on the following input: O2 def t2(a: Product with Base { def foo: Int }) = a match { ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found +error: No warnings can be incurred under -Werror. +3 warnings +1 error diff --git a/test/files/neg/virtpatmat_exhaust_compound.scala b/test/files/neg/virtpatmat_exhaust_compound.scala index 79b52dc74222..4860d94558e6 100644 --- a/test/files/neg/virtpatmat_exhaust_compound.scala +++ b/test/files/neg/virtpatmat_exhaust_compound.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait Base case object O1 extends Base case object O2 extends Base { diff --git a/test/files/neg/virtpatmat_reach_null.check b/test/files/neg/virtpatmat_reach_null.check index b0e7638d0afc..0dceb45e37f4 100644 --- a/test/files/neg/virtpatmat_reach_null.check +++ b/test/files/neg/virtpatmat_reach_null.check @@ -1,6 +1,6 @@ -virtpatmat_reach_null.scala:14: warning: unreachable code +virtpatmat_reach_null.scala:15: warning: unreachable code case _ => // unreachable ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/virtpatmat_reach_null.scala b/test/files/neg/virtpatmat_reach_null.scala index 60b94a277a5e..f6c8db0ca687 100644 --- a/test/files/neg/virtpatmat_reach_null.scala +++ b/test/files/neg/virtpatmat_reach_null.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed abstract class Const { final def excludes(other: Const) = (this, other) match { diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.check b/test/files/neg/virtpatmat_reach_sealed_unsealed.check index 6e8b3b8f230c..f42ae5c2f7a8 100644 --- a/test/files/neg/virtpatmat_reach_sealed_unsealed.check +++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.check @@ -1,16 +1,16 @@ -virtpatmat_reach_sealed_unsealed.scala:17: warning: match may not be exhaustive. +virtpatmat_reach_sealed_unsealed.scala:18: warning: match may not be exhaustive. It would fail on the following input: false (true: Boolean) match { case true => } // not exhaustive, but reachable ^ -virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code +virtpatmat_reach_sealed_unsealed.scala:20: warning: unreachable code (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable ^ -virtpatmat_reach_sealed_unsealed.scala:20: warning: unreachable code +virtpatmat_reach_sealed_unsealed.scala:21: warning: unreachable code (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable ^ -virtpatmat_reach_sealed_unsealed.scala:21: warning: unreachable code +virtpatmat_reach_sealed_unsealed.scala:22: warning: unreachable code (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.scala b/test/files/neg/virtpatmat_reach_sealed_unsealed.scala index b701a292d07c..f0a8905f6d72 100644 --- a/test/files/neg/virtpatmat_reach_sealed_unsealed.scala +++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed abstract class X sealed case class A(x: Int) extends X diff --git a/test/files/neg/virtpatmat_unreach_select.check b/test/files/neg/virtpatmat_unreach_select.check index bfcc79ced172..7b24eda8df24 100644 --- a/test/files/neg/virtpatmat_unreach_select.check +++ b/test/files/neg/virtpatmat_unreach_select.check @@ -1,6 +1,6 @@ -virtpatmat_unreach_select.scala:11: warning: unreachable code +virtpatmat_unreach_select.scala:12: warning: unreachable code case WARNING.id => // unreachable ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/virtpatmat_unreach_select.scala b/test/files/neg/virtpatmat_unreach_select.scala index e76a5149d7cc..dc49fe2dabd4 100644 --- a/test/files/neg/virtpatmat_unreach_select.scala +++ b/test/files/neg/virtpatmat_unreach_select.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Werror +// class Test { object severity extends Enumeration class Severity(val id: Int) extends severity.Value diff --git a/test/files/neg/volatile-intersection.check b/test/files/neg/volatile-intersection.check index ef983994ab41..c96252b49e52 100644 --- a/test/files/neg/volatile-intersection.check +++ b/test/files/neg/volatile-intersection.check @@ -1,4 +1,4 @@ volatile-intersection.scala:8: error: illegal type selection from volatile type C.this.D with C.this.U val y: (D with U)#T = new B { } ^ -one error found +1 error diff --git a/test/files/neg/volatile.check b/test/files/neg/volatile.check index b904284125e3..b5a0e8e8b596 100644 --- a/test/files/neg/volatile.check +++ b/test/files/neg/volatile.check @@ -4,4 +4,4 @@ volatile.scala:11: error: Inferred type C.this.D with C.this.E#T contains type s volatile.scala:11: error: Inferred type () => C.this.D with C.this.E#T contains type selection from volatile type C.this.D with C.this.E var sneak = { () => y.x } ^ -two errors found +2 errors diff --git a/test/files/neg/volatile_no_override.check b/test/files/neg/volatile_no_override.check index a9a60ab697b3..728e1b45564d 100644 --- a/test/files/neg/volatile_no_override.check +++ b/test/files/neg/volatile_no_override.check @@ -1,5 +1,5 @@ -volatile_no_override.scala:13: error: overriding value x in class A of type Volatile.this.D; - value x has a volatile type; cannot override a member with non-volatile type +volatile_no_override.scala:13: error: member with volatile type cannot override member with non-volatile type: +val x: Volatile.this.D (defined in class A) val x: A with D = null ^ -one error found +1 error diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check index 8381b8e70d55..bfa91758007b 100644 --- a/test/files/neg/warn-inferred-any.check +++ b/test/files/neg/warn-inferred-any.check @@ -1,15 +1,21 @@ -warn-inferred-any.scala:9: warning: a type was inferred to be `Any`; this may indicate a programming error. +warn-inferred-any.scala:10: warning: a type was inferred to be `Any`; this may indicate a programming error. { List(1, 2, 3) contains "a" } // only this warns ^ warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. - { 1l to 5l contains 5 } - ^ + { 1 to 5 contains 5L } // should warn: scala.Predef.intWrapper(1).to(5).contains[AnyVal](5L) + ^ warn-inferred-any.scala:18: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. - { 1l to 5l contains 5d } + { 1L to 5L contains 5 } // warn + ^ +warn-inferred-any.scala:19: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. + { 1L to 5L contains 5d } // warn ^ -warn-inferred-any.scala:26: warning: a type was inferred to be `Any`; this may indicate a programming error. +warn-inferred-any.scala:27: warning: a type was inferred to be `Any`; this may indicate a programming error. def za = f(1, "one") ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found +warn-inferred-any.scala:37: warning: a type was inferred to be `Object`; this may indicate a programming error. + cs.contains(new C2) // warns + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala index a2d3f40630c6..069ddbafa42d 100644 --- a/test/files/neg/warn-inferred-any.scala +++ b/test/files/neg/warn-inferred-any.scala @@ -1,8 +1,9 @@ -// scalac: -Xfatal-warnings -Xlint:infer-any +//> using options -Xfatal-warnings -Xlint:infer-any +// trait Foo[-A <: AnyRef, +B <: AnyRef] { def run[U](x: A)(action: B => U): Boolean = ??? - { run(_: A)(_: B => String) } + def foo = { run(_: A)(_: B => String) } } trait Xs[+A] { @@ -13,16 +14,59 @@ trait Xs[+A] { } trait Ys[+A] { - { 1 to 5 contains 5l } - { 1l to 5l contains 5 } - { 1l to 5l contains 5d } - { 1l to 5l contains 5l } + { 1 to 5 contains 5L } // should warn: scala.Predef.intWrapper(1).to(5).contains[AnyVal](5L) + { 1L to 5L contains 5 } // warn + { 1L to 5L contains 5d } // warn + { 1L to 5L contains 5L } } trait Zs { def f[A](a: A*) = 42 - def g[A >: Any](a: A*) = 42 // don't warn + def g[A >: Any](a: A*) = 42 // don't warn def za = f(1, "one") def zu = g(1, "one") } + +class C1 +class C2 + +trait Cs { + val cs = List(new C1) + cs.contains[AnyRef](new C2) // doesn't warn + cs.contains(new C2) // warns +} + +object t11798 { + + trait ZIO[-R, +E, +A] + type Task[A] = ZIO[Any, Throwable, A] // explicit Any + + trait ZStream[-R, +E, +A] { + def mapM[R1 <: R, E1 >: E, B](f: A => ZIO[R1, E1, B]): ZStream[R1, E1, B] = + ??? + } + + val stream: ZStream[Any, Throwable, Int] = ??? + def f(n: Int): Task[Int] = ??? + stream.mapM(f) // should not warn + stream.mapM(n => (f(n): ZIO[Any, Throwable, Int])) + stream.mapM(f: Int => ZIO[Any, Throwable, Int]) +} + +/** + * 1 to 5 contains 5L fails to warn, because silent mode due to overload + * +scala> :type 1 to 5 +scala.collection.immutable.Range.Inclusive + +scala> :type 1L to 5L +scala.collection.immutable.NumericRange.Inclusive[Long] + +warning: !!! HK subtype check on scala.this.Int and [B >: scala.this.Int]B, but both don't normalize to polytypes: + tp1=scala.this.Int ClassNoArgsTypeRef + tp2=[B >: scala.this.Int]B PolyType +[log typer] infer method alt value contains with alternatives List([B >: scala.this.Int]( elem: B)scala.this.Boolean, ( x: scala.this.Int)scala.this.Boolean) argtpes=List(5L) pt=? +[log typer] infer method inst scala.Predef.intWrapper(1).to(5).contains[B], tparams = List(type B), args = List(scala.this.Long(5L)), pt = ?, lobounds = List(scala.this.Int), parambounds = List( >: scala.this.Int) +[log typer] checkKindBounds0(List(type B), List(scala.this.AnyVal), , , true) + */ diff --git a/test/files/neg/warn-shadowing-import-213.check b/test/files/neg/warn-shadowing-import-213.check deleted file mode 100644 index 71318e628987..000000000000 --- a/test/files/neg/warn-shadowing-import-213.check +++ /dev/null @@ -1,8 +0,0 @@ -Test1.scala:4: warning: This wildcard import imports a.O, which is shadowed by b.O. -This is not according to the language specification and has changed in Scala 2.13, where a.O takes precedence. -To keep the same meaning in 2.12 and 2.13, un-import O by adding `O => _` to the import list. -import a._ - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/warn-shadowing-import-213/A.scala b/test/files/neg/warn-shadowing-import-213/A.scala deleted file mode 100644 index 930cf96d00ab..000000000000 --- a/test/files/neg/warn-shadowing-import-213/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -package a - -object O { def a = 0 } \ No newline at end of file diff --git a/test/files/neg/warn-shadowing-import-213/B.scala b/test/files/neg/warn-shadowing-import-213/B.scala deleted file mode 100644 index f1400519004d..000000000000 --- a/test/files/neg/warn-shadowing-import-213/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -package b - -object O { def b = 0 } \ No newline at end of file diff --git a/test/files/neg/warn-shadowing-import-213/Test1.scala b/test/files/neg/warn-shadowing-import-213/Test1.scala deleted file mode 100644 index 1a9865f0e5b9..000000000000 --- a/test/files/neg/warn-shadowing-import-213/Test1.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -Xsource:2.13 -Werror - -package b -import a._ - -class C { - def t = O.b -} diff --git a/test/files/neg/warn-shadowing-import-213/Test2.scala b/test/files/neg/warn-shadowing-import-213/Test2.scala deleted file mode 100644 index 9569d3f6ccba..000000000000 --- a/test/files/neg/warn-shadowing-import-213/Test2.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -Xsource:2.13 -Werror - -package b -import b._ // no warning, imported b.O is the same as package member b.O - -class D { - def t = O.b -} diff --git a/test/files/neg/warn-unused-explicits.check b/test/files/neg/warn-unused-explicits.check new file mode 100644 index 000000000000..9238b072a5ed --- /dev/null +++ b/test/files/neg/warn-unused-explicits.check @@ -0,0 +1,6 @@ +warn-unused-explicits.scala:9: warning: parameter x in method warn is never used + def warn(x: Int) = answer + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/warn-unused-explicits.scala b/test/files/neg/warn-unused-explicits.scala new file mode 100644 index 000000000000..b90ac269fd51 --- /dev/null +++ b/test/files/neg/warn-unused-explicits.scala @@ -0,0 +1,11 @@ +//> using options -Wunused:explicits -Werror +// +trait Context[A] +trait ExplicitsOnly { + def i(implicit s: String) = answer + def f[A](implicit ctx: Context[A]) = answer + def g[A: Context] = answer + + def warn(x: Int) = answer + def answer: Int = 42 +} diff --git a/test/files/neg/warn-unused-implicits.check b/test/files/neg/warn-unused-implicits.check index ca6bb32dc5e4..62482bf568de 100644 --- a/test/files/neg/warn-unused-implicits.check +++ b/test/files/neg/warn-unused-implicits.check @@ -1,9 +1,15 @@ -warn-unused-implicits.scala:12: warning: parameter value s in method f is never used +warn-unused-implicits.scala:13: warning: parameter s in method f is never used )(implicit s: String): Int = { // warn ^ -warn-unused-implicits.scala:32: warning: parameter value s in method i is never used +warn-unused-implicits.scala:33: warning: parameter s in method i is never used def i(implicit s: String, t: Int) = t // yes, warn ^ -error: No warnings can be incurred under -Xfatal-warnings. -two warnings found -one error found +warn-unused-implicits.scala:52: warning: parameter ev in method ==> is never used + def ==>[B](b: B)(implicit ev: BadCanEqual[A, B]): Boolean = a == b // warn, ev.run + ^ +warn-unused-implicits.scala:60: warning: parameter m in method f is never used + def f[A](implicit m: MembersOnly[A]) = toString.nonEmpty // warn implicit trait with private member + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/warn-unused-implicits.scala b/test/files/neg/warn-unused-implicits.scala index 5114bb9db663..2fd2b374f7cd 100644 --- a/test/files/neg/warn-unused-implicits.scala +++ b/test/files/neg/warn-unused-implicits.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-unused:implicits -Xfatal-warnings +//> using options -Werror -Wunused:implicits +// trait InterFace { /** Call something. */ @@ -31,3 +32,30 @@ trait BadAPI extends InterFace { def i(implicit s: String, t: Int) = t // yes, warn } + +trait T { + def f()(implicit i: Int): Int +} +trait U { _: T => + override def f()(implicit i: Int): Int = g() // no warn, required by baseclass, scala/bug#12876 + def g(): Int +} + +trait CanEqual[A, B] +trait BadCanEqual[A, B] extends Runnable + +class EqTest { + implicit class ArrowAssert[A](a: A) { + def ==>[B](b: B)(implicit ev: CanEqual[A, B]): Boolean = a == b // no warn, no non-trivial members + } + implicit class BadArrowAssert[A](a: A) { + def ==>[B](b: B)(implicit ev: BadCanEqual[A, B]): Boolean = a == b // warn, ev.run + } +} + +trait MembersOnly[A] { + private def member() = 42 // don't care whether member is accessible; maybe they must pass it elsewhere +} +class Privately { + def f[A](implicit m: MembersOnly[A]) = toString.nonEmpty // warn implicit trait with private member +} diff --git a/test/files/neg/warn-unused-imports-b.check b/test/files/neg/warn-unused-imports-b.check new file mode 100644 index 000000000000..19d201c2354b --- /dev/null +++ b/test/files/neg/warn-unused-imports-b.check @@ -0,0 +1,55 @@ +warn-unused-imports_2.scala:135: error: type mismatch; + found : Int(42) + required: Sample.X + f(42) // error + ^ +warn-unused-imports_2.scala:59: warning: Unused import + import p1.A // warn + ^ +warn-unused-imports_2.scala:64: warning: Unused import + import p1.{ A, B } // warn on A + ^ +warn-unused-imports_2.scala:69: warning: Unused import + import p1.{ A, B } // warn on both + ^ +warn-unused-imports_2.scala:69: warning: Unused import + import p1.{ A, B } // warn on both + ^ +warn-unused-imports_2.scala:75: warning: Unused import + import c._ // warn + ^ +warn-unused-imports_2.scala:80: warning: Unused import + import p1._ // warn + ^ +warn-unused-imports_2.scala:87: warning: Unused import + import c._ // warn + ^ +warn-unused-imports_2.scala:93: warning: Unused import + import p1.c._ // warn + ^ +warn-unused-imports_2.scala:100: warning: Unused import + import p1._ // warn + ^ +warn-unused-imports_2.scala:120: warning: Unused import + import p1.A // warn + ^ +warn-unused-imports_2.scala:134: warning: Unused import + import Sample.Implicits._ // warn + ^ +warn-unused-imports_2.scala:145: warning: Unused import + import Sample.Implicits.useless // warn + ^ +warn-unused-imports_2.scala:149: warning: Unused import + import java.io.File // warn + ^ +warn-unused-imports_2.scala:150: warning: Unused import + import scala.concurrent.Future // warn + ^ +warn-unused-imports_2.scala:151: warning: Unused import + import scala.concurrent.ExecutionContext.Implicits.global // warn + ^ +warn-unused-imports_2.scala:152: warning: Unused import + import p1.A // warn + ^ +16 warnings +1 error diff --git a/test/files/neg/warn-unused-imports-b/sample_1.scala b/test/files/neg/warn-unused-imports-b/sample_1.scala new file mode 100644 index 000000000000..eea4d0eb4c51 --- /dev/null +++ b/test/files/neg/warn-unused-imports-b/sample_1.scala @@ -0,0 +1,32 @@ + +import language._ + +object Sample { + trait X + trait Y + + // import of the non-implicit should be unused + object Implicits { + def `int to X`(i: Int): X = null + implicit def `int to Y`(i: Int): Y = null + implicit def useless(i: Int): String = null + } + + def f(x: X) = ??? + def g(y: Y) = ??? +} + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macro { + def f: Int = macro fImpl + def fImpl(c: Context): c.Tree = { + import c.universe._ + + q""" + import scala.util.Random + 42 // TODO randomize + """ + } +} diff --git a/test/files/neg/warn-unused-imports-b/warn-unused-imports_2.scala b/test/files/neg/warn-unused-imports-b/warn-unused-imports_2.scala new file mode 100644 index 000000000000..d09377524abc --- /dev/null +++ b/test/files/neg/warn-unused-imports-b/warn-unused-imports_2.scala @@ -0,0 +1,161 @@ +//> using options -Werror -Wunused:imports -Ymacro-annotations +// +class Bippo { + def length: Int = 123 + class Tree +} + +package object p1 { + class A + implicit class B(val s: String) { def bippy = s } + val c: Bippo = new Bippo + type D = String +} +package object p2 { + class A + implicit class B(val s: String) { def bippy = s } + val c: Bippo = new Bippo + type D = Int +} + +trait NoWarn { + { + import p1._ // no warn + println("abc".bippy) + } + + { + import p1._ // no warn + println(new A) + } + + { + import p1.B // no warn + println("abc".bippy) + } + + { + import p1._ // no warn + import c._ // no warn + println(length) + } + + { + import p1._ // no warn + import c._ // no warn + val x: Tree = null + println(x) + } + + { + import p1.D // no warn + val x: D = null + println(x) + } +} + +trait Warn { + { + import p1.A // warn + println(123) + } + + { + import p1.{ A, B } // warn on A + println("abc".bippy) + } + + { + import p1.{ A, B } // warn on both + println(123) + } + + { + import p1._ // no warn (technically this could warn, but not worth the effort to unroll unusedness transitively) + import c._ // warn + println(123) + } + + { + import p1._ // warn + println(123) + } + + { + class Tree + import p1._ // no warn + import c._ // warn + val x: Tree = null + println(x) + } + + { + import p1.c._ // warn + println(123) + } +} + +trait Nested { + { + import p1._ // warn + trait Warn { // don't warn about unused local trait with -Ywarn-unused:imports + import p2._ + println(new A) + println("abc".bippy) + } + println("") + } + + { + import p1._ // no warn + trait NoWarn { + import p2.B // no warn + println("abc".bippy) + println(new A) + } + println(new NoWarn { }) + } + + { + import p1.A // warn + trait Warn { + import p2.A + println(new A) + } + println(new Warn { }) + } +} + +// test unusage of imports from other compilation units after implicit search +trait Outsiders { + { + //implicit search should not disable warning + import Sample._ + import Sample.Implicits._ // warn + f(42) // error + } + { + import Sample._ + import Sample.Implicits._ // nowarn + g(42) // ok + } + { + import Sample._ + import Sample.Implicits.`int to Y` // nowarn + import Sample.Implicits.useless // warn + g(42) // ok + } + { + import java.io.File // warn + import scala.concurrent.Future // warn + import scala.concurrent.ExecutionContext.Implicits.global // warn + import p1.A // warn + import p1.B // no warn + println("abc".bippy) + //Future("abc".bippy) + } +} + +class MacroClient { + def x = Macro.f // don't crash; but also don't warn on expansion, see scala/bug#10270 and [pos|neg]/t10270 +} diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check index 90533f0078fb..19d201c2354b 100644 --- a/test/files/neg/warn-unused-imports.check +++ b/test/files/neg/warn-unused-imports.check @@ -1,55 +1,55 @@ -warn-unused-imports_2.scala:134: error: type mismatch; +warn-unused-imports_2.scala:135: error: type mismatch; found : Int(42) required: Sample.X f(42) // error ^ -warn-unused-imports_2.scala:58: warning: Unused import +warn-unused-imports_2.scala:59: warning: Unused import import p1.A // warn ^ -warn-unused-imports_2.scala:63: warning: Unused import +warn-unused-imports_2.scala:64: warning: Unused import import p1.{ A, B } // warn on A ^ -warn-unused-imports_2.scala:68: warning: Unused import +warn-unused-imports_2.scala:69: warning: Unused import import p1.{ A, B } // warn on both ^ -warn-unused-imports_2.scala:68: warning: Unused import +warn-unused-imports_2.scala:69: warning: Unused import import p1.{ A, B } // warn on both ^ -warn-unused-imports_2.scala:74: warning: Unused import +warn-unused-imports_2.scala:75: warning: Unused import import c._ // warn ^ -warn-unused-imports_2.scala:79: warning: Unused import +warn-unused-imports_2.scala:80: warning: Unused import import p1._ // warn ^ -warn-unused-imports_2.scala:86: warning: Unused import +warn-unused-imports_2.scala:87: warning: Unused import import c._ // warn ^ -warn-unused-imports_2.scala:92: warning: Unused import +warn-unused-imports_2.scala:93: warning: Unused import import p1.c._ // warn ^ -warn-unused-imports_2.scala:99: warning: Unused import +warn-unused-imports_2.scala:100: warning: Unused import import p1._ // warn ^ -warn-unused-imports_2.scala:119: warning: Unused import +warn-unused-imports_2.scala:120: warning: Unused import import p1.A // warn ^ -warn-unused-imports_2.scala:133: warning: Unused import +warn-unused-imports_2.scala:134: warning: Unused import import Sample.Implicits._ // warn ^ -warn-unused-imports_2.scala:144: warning: Unused import +warn-unused-imports_2.scala:145: warning: Unused import import Sample.Implicits.useless // warn ^ -warn-unused-imports_2.scala:148: warning: Unused import +warn-unused-imports_2.scala:149: warning: Unused import import java.io.File // warn ^ -warn-unused-imports_2.scala:149: warning: Unused import +warn-unused-imports_2.scala:150: warning: Unused import import scala.concurrent.Future // warn ^ -warn-unused-imports_2.scala:150: warning: Unused import +warn-unused-imports_2.scala:151: warning: Unused import import scala.concurrent.ExecutionContext.Implicits.global // warn ^ -warn-unused-imports_2.scala:151: warning: Unused import +warn-unused-imports_2.scala:152: warning: Unused import import p1.A // warn ^ -16 warnings found -one error found +16 warnings +1 error diff --git a/test/files/neg/warn-unused-imports/sample_1.scala b/test/files/neg/warn-unused-imports/sample_1.scala index d07b82ac24bc..80dcc59cfa10 100644 --- a/test/files/neg/warn-unused-imports/sample_1.scala +++ b/test/files/neg/warn-unused-imports/sample_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports +//> using options -Werror -Wunused:imports import language._ diff --git a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala index ccee51b9d8b7..d9fc9c69072f 100644 --- a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala +++ b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports +//> using options -Werror -Wunused:imports +// class Bippo { def length: Int = 123 class Tree diff --git a/test/files/neg/warn-unused-locals.check b/test/files/neg/warn-unused-locals.check index dfa8bed6ae1b..c3ddc76575dd 100644 --- a/test/files/neg/warn-unused-locals.check +++ b/test/files/neg/warn-unused-locals.check @@ -1,24 +1,24 @@ -warn-unused-locals.scala:8: warning: local var x in method f0 is never used +warn-unused-locals.scala:9: warning: local var x in method f0 is never used var x = 1 // warn ^ -warn-unused-locals.scala:15: warning: local val b in method f1 is never used +warn-unused-locals.scala:16: warning: local val b in method f1 is never used val b = new Outer // warn ^ -warn-unused-locals.scala:26: warning: local object HiObject in method l1 is never used +warn-unused-locals.scala:20: warning: local var x in method f2 is never updated: consider using immutable val + var x = 100 // warn about it being a var + ^ +warn-unused-locals.scala:27: warning: local object HiObject in method l1 is never used object HiObject { def f = this } // warn ^ -warn-unused-locals.scala:27: warning: local class Hi is never used +warn-unused-locals.scala:28: warning: local class Hi is never used class Hi { // warn ^ -warn-unused-locals.scala:31: warning: local class DingDongDoobie is never used +warn-unused-locals.scala:32: warning: local class DingDongDoobie is never used class DingDongDoobie // warn ^ -warn-unused-locals.scala:34: warning: local type OtherThing is never used +warn-unused-locals.scala:35: warning: local type OtherThing is never used type OtherThing = String // warn ^ -warn-unused-locals.scala:19: warning: local var x in method f2 is never updated: consider using immutable val - var x = 100 // warn about it being a var - ^ -error: No warnings can be incurred under -Xfatal-warnings. -7 warnings found -one error found +error: No warnings can be incurred under -Werror. +7 warnings +1 error diff --git a/test/files/neg/warn-unused-locals.scala b/test/files/neg/warn-unused-locals.scala index 4940ba77d3e0..ef9068d4845e 100644 --- a/test/files/neg/warn-unused-locals.scala +++ b/test/files/neg/warn-unused-locals.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-unused:locals -Xfatal-warnings +//> using options -Wunused:locals -Werror + class Outer { class Inner } @@ -35,3 +36,8 @@ object Types { (new Bippy): Something } } + +// breakage: local val x$1 in method skolemize is never used +case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) { + def skolemize: SymbolKind = copy(accurate = s"$accurate skolem", abbreviation = s"$abbreviation#SKO") +} diff --git a/test/files/neg/warn-unused-params.check b/test/files/neg/warn-unused-params.check index 60c18ad02fd4..3f02ca64cd49 100644 --- a/test/files/neg/warn-unused-params.check +++ b/test/files/neg/warn-unused-params.check @@ -1,24 +1,48 @@ -warn-unused-params.scala:10: warning: parameter value b in method f is never used +warn-unused-params.scala:13: warning: parameter b in method f is never used b: String, // warn ^ -warn-unused-params.scala:33: warning: parameter value s in method i is never used - def i(implicit s: String) = 42 // yes, warn +warn-unused-params.scala:36: warning: parameter s in method i is never used + def i(implicit s: String) = answer // yes, warn ^ -warn-unused-params.scala:50: warning: parameter value u in class Unusing is never used +warn-unused-params.scala:53: warning: parameter u in class Unusing is never used class Unusing(u: Int) { // warn ^ -warn-unused-params.scala:60: warning: parameter value s in class CaseyAtTheBat is never used +warn-unused-params.scala:63: warning: parameter s in class CaseyAtTheBat is never used case class CaseyAtTheBat(k: Int)(s: String) // warn ^ -warn-unused-params.scala:63: warning: parameter value readResolve in method f is never used - def f(readResolve: Int) = 42 // warn +warn-unused-params.scala:66: warning: parameter readResolve in method f is never used + def f(readResolve: Int) = answer // warn ^ -warn-unused-params.scala:77: warning: parameter value i in anonymous function is never used - def f = (i: Int) => 42 // warn +warn-unused-params.scala:81: warning: parameter dummy in method g is never used + def g(dummy: DummyImplicit) = answer + ^ +warn-unused-params.scala:86: warning: parameter ev in method f2 is never used + def f2[A, B](ev: A =:= B) = answer + ^ +warn-unused-params.scala:87: warning: parameter ev in method g2 is never used + def g2[A, B](ev: A <:< B) = answer + ^ +warn-unused-params.scala:91: warning: parameter i in anonymous function is never used + def f = (i: Int) => answer // warn ^ -warn-unused-params.scala:83: warning: parameter value i in anonymous function is never used - def g = for (i <- List(1)) yield 42 // warn map.(i => 42) +warn-unused-params.scala:101: warning: parameter ctx in method f is never used + def f[A](implicit ctx: Context[A]) = answer + ^ +warn-unused-params.scala:102: warning: evidence parameter evidence$1 of type Context[A] in method g is never used + def g[A: Context] = answer + ^ +warn-unused-params.scala:104: warning: evidence parameter evidence$2 of type Context[A] in class Bound is never used +class Bound[A: Context] ^ -error: No warnings can be incurred under -Xfatal-warnings. -7 warnings found -one error found +warn-unused-params.scala:111: warning: parameter b in method f is never used + b: String, // warn + ^ +warn-unused-params.scala:134: warning: parameter s in method i is never used + def i(implicit s: String) = answer // yes, warn + ^ +warn-unused-params.scala:142: warning: parameter s in method f is never used + def f(s: Serializable) = toString.nonEmpty // warn explicit param of marker trait + ^ +error: No warnings can be incurred under -Werror. +15 warnings +1 error diff --git a/test/files/neg/warn-unused-params.scala b/test/files/neg/warn-unused-params.scala index 1fe87efaf265..4a03355e533e 100644 --- a/test/files/neg/warn-unused-params.scala +++ b/test/files/neg/warn-unused-params.scala @@ -1,4 +1,7 @@ -// scalac: -Ywarn-unused:params -Xfatal-warnings +//> using options -Wunused:params -Werror +// + +import Answers._ trait InterFace { /** Call something. */ @@ -12,7 +15,7 @@ trait BadAPI extends InterFace { println(c) a } - @deprecated ("no warn in deprecated API", since="yesterday") + @deprecated("no warn in deprecated API", since="yesterday") def g(a: Int, b: String, // no warn c: Double): Int = { @@ -30,7 +33,7 @@ trait BadAPI extends InterFace { override def equals(other: Any): Boolean = true // no warn - def i(implicit s: String) = 42 // yes, warn + def i(implicit s: String) = answer // yes, warn /* def future(x: Int): Int = { @@ -60,7 +63,7 @@ case class CaseyKasem(k: Int) // no warn case class CaseyAtTheBat(k: Int)(s: String) // warn trait Ignorance { - def f(readResolve: Int) = 42 // warn + def f(readResolve: Int) = answer // warn } class Reusing(u: Int) extends Unusing(u) // no warn @@ -73,12 +76,84 @@ trait Unimplementation { def f(u: Int): Int = ??? // no warn for param in unimplementation } +trait DumbStuff { + def f(implicit dummy: DummyImplicit) = answer + def g(dummy: DummyImplicit) = answer +} +trait Proofs { + def f[A, B](implicit ev: A =:= B) = answer + def g[A, B](implicit ev: A <:< B) = answer + def f2[A, B](ev: A =:= B) = answer + def g2[A, B](ev: A <:< B) = answer +} + trait Anonymous { - def f = (i: Int) => 42 // warn + def f = (i: Int) => answer // warn + + def f1 = (_: Int) => answer // no warn underscore parameter (a fresh name) + + def f2: Int => Int = _ + 1 // no warn placeholder syntax (a fresh name and synthetic parameter) - def f1 = (_: Int) => 42 // no warn underscore parameter (a fresh name) + def g = for (i <- List(1)) yield answer // no warn patvar elaborated as map.(i => 42) +} +trait Context[A] { def m(a: A): A = a } +trait Implicits { + def f[A](implicit ctx: Context[A]) = answer + def g[A: Context] = answer +} +class Bound[A: Context] +object Answers { + def answer: Int = 42 +} + +trait BadMix { _: InterFace => + def f(a: Int, + b: String, // warn + c: Double): Int = { + println(c) + a + } + @deprecated("no warn in deprecated API", since="yesterday") + def g(a: Int, + b: String, // no warn + c: Double): Int = { + println(c) + a + } + override def call(a: Int, + b: String, // no warn, required by superclass + c: Double): Int = { + println(c) + a + } + + def meth(x: Int) = x + + override def equals(other: Any): Boolean = true // no warn + + def i(implicit s: String) = answer // yes, warn +} + +class Unequal { + override def equals(other: Any) = toString.nonEmpty // no warn non-trivial RHS, required by universal method +} + +class Seriously { + def f(s: Serializable) = toString.nonEmpty // warn explicit param of marker trait +} + +class TryStart(start: String) { + def FINALLY(end: END.type) = start +} + +object END + +class Nested { + @annotation.unused private def actuallyNotUsed(fresh: Int, stale: Int) = fresh +} - def f2: Int => Int = _ + 1 // no warn placeholder syntax (a fresh name and synthethic parameter) +class Annie(value: String) extends annotation.StaticAnnotation // no warn for annotation - def g = for (i <- List(1)) yield 42 // warn map.(i => 42) +class Selfie { + def f(i: Int, j: Int) = this // no warn this is trivial } diff --git a/test/files/neg/warn-unused-patvars.check b/test/files/neg/warn-unused-patvars.check index 0acb6fb05e67..d473a2bc45bd 100644 --- a/test/files/neg/warn-unused-patvars.check +++ b/test/files/neg/warn-unused-patvars.check @@ -1,6 +1,6 @@ warn-unused-patvars.scala:10: warning: private val x in trait Boundings is never used - private val x = 42 // warn, sanity check + private val x = 42 // warn, to ensure that warnings are enabled ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/warn-unused-patvars.scala b/test/files/neg/warn-unused-patvars.scala index 34905a31ecc8..2a015e72dc23 100644 --- a/test/files/neg/warn-unused-patvars.scala +++ b/test/files/neg/warn-unused-patvars.scala @@ -1,13 +1,13 @@ -// scalac: -Ywarn-unused:-patvars,_ -Xfatal-warnings +//> using options -Wunused:-patvars,_ -Werror -// verify no warning when -Ywarn-unused:-patvars +// verify NO warning when -Wunused:-patvars case class C(a: Int, b: String, c: Option[String]) case class D(a: Int) trait Boundings { - private val x = 42 // warn, sanity check + private val x = 42 // warn, to ensure that warnings are enabled def c = C(42, "hello", Some("world")) def d = D(42) diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index bfec9ad9e8c3..4c49b4c09e65 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -1,72 +1,105 @@ -warn-unused-privates.scala:3: warning: private constructor in class Bippy is never used +warn-unused-privates.scala:31: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +class Boppy extends { + ^ +warn-unused-privates.scala:5: warning: private constructor in class Bippy is never used private def this(c: Int) = this(c, c) // warn ^ -warn-unused-privates.scala:5: warning: private method boop in class Bippy is never used +warn-unused-privates.scala:6: warning: private method bippy in class Bippy is never used + private def bippy(x: Int): Int = bippy(x) // warn + ^ +warn-unused-privates.scala:7: warning: private method boop in class Bippy is never used private def boop(x: Int) = x+a+b // warn ^ -warn-unused-privates.scala:7: warning: private val MILLIS2 in class Bippy is never used +warn-unused-privates.scala:8: warning: private val MILLIS1 in class Bippy is never used + final private val MILLIS1 = 2000 // now warn, might have been inlined + ^ +warn-unused-privates.scala:9: warning: private val MILLIS2 in class Bippy is never used final private val MILLIS2: Int = 1000 // warn ^ -warn-unused-privates.scala:14: warning: private val HEY_INSTANCE in object Bippy is never used +warn-unused-privates.scala:16: warning: private val HEY_INSTANCE in object Bippy is never used private val HEY_INSTANCE: Int = 1000 // warn ^ -warn-unused-privates.scala:15: warning: private val BOOL in object Bippy is never used +warn-unused-privates.scala:17: warning: private val BOOL in object Bippy is never used private lazy val BOOL: Boolean = true // warn ^ -warn-unused-privates.scala:37: warning: private val hummer in class Boppy is never used +warn-unused-privates.scala:41: warning: private val hummer in class Boppy is never used private val hummer = "def" // warn ^ -warn-unused-privates.scala:44: warning: private var v1 in trait Accessors is never used +warn-unused-privates.scala:43: warning: private val bum in class Boppy is never used + private final val bum = "ghi" // now warn, might have been (was) inlined + ^ +warn-unused-privates.scala:48: warning: private var v1 in trait Accessors is never used private var v1: Int = 0 // warn ^ -warn-unused-privates.scala:45: warning: private var v2 in trait Accessors is never used +warn-unused-privates.scala:49: warning: private var v2 in trait Accessors is never updated: consider using immutable val private var v2: Int = 0 // warn, never set ^ -warn-unused-privates.scala:46: warning: private var v3 in trait Accessors is never used +warn-unused-privates.scala:50: warning: private var v3 in trait Accessors is never used private var v3: Int = 0 // warn, never got ^ -warn-unused-privates.scala:57: warning: private var s1 in class StableAccessors is never used +warn-unused-privates.scala:53: warning: private var v5 in trait Accessors is never updated: consider using immutable val + private[this] var v5 = 0 // warn, never set + ^ +warn-unused-privates.scala:54: warning: private var v6 in trait Accessors is never used + private[this] var v6 = 0 // warn, never got + ^ +warn-unused-privates.scala:67: warning: private var s1 in class StableAccessors is never used private var s1: Int = 0 // warn ^ -warn-unused-privates.scala:58: warning: private var s2 in class StableAccessors is never updated: consider using immutable val +warn-unused-privates.scala:68: warning: private var s2 in class StableAccessors is never updated: consider using immutable val private var s2: Int = 0 // warn, never set ^ -warn-unused-privates.scala:59: warning: private var s3 in class StableAccessors is never used +warn-unused-privates.scala:69: warning: private var s3 in class StableAccessors is never used private var s3: Int = 0 // warn, never got ^ -warn-unused-privates.scala:71: warning: private default argument in trait DefaultArgs is never used +warn-unused-privates.scala:72: warning: local var s5 in class StableAccessors is never updated: consider using immutable val + private[this] var s5 = 0 // warn, never set + ^ +warn-unused-privates.scala:87: warning: private default argument in trait DefaultArgs is never used private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 ^ -warn-unused-privates.scala:71: warning: private default argument in trait DefaultArgs is never used +warn-unused-privates.scala:87: warning: private default argument in trait DefaultArgs is never used private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 ^ -warn-unused-privates.scala:104: warning: private object Dongo in object Types is never used +warn-unused-privates.scala:114: warning: local var x in method f2 is never updated: consider using immutable val + var x = 100 // warn about it being a var + ^ +warn-unused-privates.scala:120: warning: private object Dongo in object Types is never used private object Dongo { def f = this } // warn ^ -warn-unused-privates.scala:137: warning: private method x_= in class OtherNames is never used +warn-unused-privates.scala:121: warning: private class Bar1 in object Types is never used + private class Bar1 // warn + ^ +warn-unused-privates.scala:123: warning: private type Alias1 in object Types is never used + private type Alias1 = String // warn + ^ +warn-unused-privates.scala:153: warning: private method x_= in class OtherNames is never used private def x_=(i: Int): Unit = () ^ -warn-unused-privates.scala:138: warning: private method x in class OtherNames is never used +warn-unused-privates.scala:154: warning: private method x in class OtherNames is never used private def x: Int = 42 ^ -warn-unused-privates.scala:139: warning: private method y_= in class OtherNames is never used +warn-unused-privates.scala:155: warning: private method y_= in class OtherNames is never used private def y_=(i: Int): Unit = () ^ -warn-unused-privates.scala:105: warning: private class Bar1 in object Types is never used - private class Bar1 // warn - ^ -warn-unused-privates.scala:107: warning: private type Alias1 in object Types is never used - private type Alias1 = String // warn - ^ -warn-unused-privates.scala:217: warning: private class for your eyes only in object not even using companion privates is never used +warn-unused-privates.scala:233: warning: private class for your eyes only in object not even using companion privates is never used private implicit class `for your eyes only`(i: Int) { // warn ^ -warn-unused-privates.scala:233: warning: private class D in class nonprivate alias is enclosing is never used +warn-unused-privates.scala:249: warning: private class D in class nonprivate alias is enclosing is never used private class D extends C2 // warn ^ -warn-unused-privates.scala:98: warning: local var x in method f2 is never updated: consider using immutable val - var x = 100 // warn about it being a var - ^ -error: No warnings can be incurred under -Xfatal-warnings. -23 warnings found -one error found +warn-unused-privates.scala:269: warning: private val n in class t12992 enclosing def is unused is never used + private val n = 42 + ^ +warn-unused-privates.scala:274: warning: private method f in class recursive reference is not a usage is never used + private def f(i: Int): Int = // warn + ^ +warn-unused-privates.scala:277: warning: private class P in class recursive reference is not a usage is never used + private class P { + ^ +warn-unused-privates.scala:284: warning: private val There in class Constantly is never used + private final val There = "there" // warn + ^ +error: No warnings can be incurred under -Werror. +34 warnings +1 error diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index fd408f89e007..91b5752fa44d 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -1,9 +1,11 @@ -// scalac: -Ywarn-unused:privates -Xfatal-warnings +// +//> using options -deprecation -Werror -Wunused:privates +// class Bippy(a: Int, b: Int) { private def this(c: Int) = this(c, c) // warn - private def bippy(x: Int): Int = bippy(x) // TODO: could warn + private def bippy(x: Int): Int = bippy(x) // warn private def boop(x: Int) = x+a+b // warn - final private val MILLIS1 = 2000 // no warn, might have been inlined + final private val MILLIS1 = 2000 // now warn, might have been inlined final private val MILLIS2: Int = 1000 // warn final private val HI_COMPANION: Int = 500 // no warn, accessed from companion def hi() = Bippy.HI_INSTANCE @@ -20,6 +22,8 @@ class B1(msg: String) extends A(msg) class B2(msg0: String) extends A(msg0) class B3(msg0: String) extends A("msg") +trait Bing + /*** Early defs warnings disabled primarily due to scala/bug#6595. * The test case is here to assure we aren't issuing false positives; * the ones labelled "warn" don't warn. @@ -32,11 +36,11 @@ class Boppy extends { final val himinline = him private val hum: String = "jkl" // warn final val ding = hmm.length -} with Mutable { +} with Bing { val dinger = hom private val hummer = "def" // warn - private final val bum = "ghi" // no warn, might have been (was) inlined + private final val bum = "ghi" // now warn, might have been (was) inlined final val bum2 = "ghi" // no warn, same } @@ -46,10 +50,16 @@ trait Accessors { private var v3: Int = 0 // warn, never got private var v4: Int = 0 // no warn + private[this] var v5 = 0 // warn, never set + private[this] var v6 = 0 // warn, never got + private[this] var v7 = 0 // no warn + def bippy(): Int = { - v3 = 5 - v4 = 6 - v2 + v4 + v3 = 3 + v4 = 4 + v6 = 6 + v7 = 7 + v2 + v4 + v5 + v7 } } @@ -59,10 +69,16 @@ class StableAccessors { private var s3: Int = 0 // warn, never got private var s4: Int = 0 // no warn + private[this] var s5 = 0 // warn, never set + private[this] var s6 = 0 // no warn, limitation + private[this] var s7 = 0 // no warn + def bippy(): Int = { - s3 = 5 - s4 = 6 - s2 + s4 + s3 = 3 + s4 = 4 + s6 = 6 + s7 = 7 + s2 + s4 + s5 + s7 } } @@ -244,3 +260,51 @@ trait `short comings` { 17 } } + +class `issue 12600 ignore abstract types` { + type Abs +} + +class `t12992 enclosing def is unused` { + private val n = 42 + @annotation.unused def f() = n + 2 // unused code uses n +} + +class `recursive reference is not a usage` { + private def f(i: Int): Int = // warn + if (i <= 0) i + else f(i-1) + private class P { + def f() = new P() + } +} + +class Constantly { + private final val Here = "here" + private final val There = "there" // warn + def bromide = Here + " today, gone tomorrow." +} + +class Annots { + import annotation._ + + trait T { + def value: Int + } + + class C { + private final val Here = "here" + private final val There = "msg=there" + def f(implicit @implicitNotFound(Here) t: T) = t.value + def x: String @nowarn(There) = "" + } + + // cf HashMap#mergeInto which looped on type of new unchecked + // case bm: BitmapIndexedMapNode[K, V] @unchecked => + class Weird[K, V] { + def f(other: Weird[K, V]) = + other match { + case weird: Weird[K, V] @unchecked => + } + } +} diff --git a/test/files/neg/warn-useless-svuid.check b/test/files/neg/warn-useless-svuid.check new file mode 100644 index 000000000000..02ff5a584115 --- /dev/null +++ b/test/files/neg/warn-useless-svuid.check @@ -0,0 +1,18 @@ +warn-useless-svuid.scala:4: warning: @SerialVersionUID has no effect on non-serializable classes +class X + ^ +warn-useless-svuid.scala:7: warning: @SerialVersionUID has no effect on non-serializable classes +class Y extends X + ^ +warn-useless-svuid.scala:19: warning: @SerialVersionUID has no effect on traits +trait T + ^ +warn-useless-svuid.scala:22: warning: @SerialVersionUID has no effect on traits +trait U extends scala.Serializable + ^ +warn-useless-svuid.scala:25: warning: @SerialVersionUID has no effect on traits +trait V extends java.io.Serializable + ^ +error: No warnings can be incurred under -Werror. +5 warnings +1 error diff --git a/test/files/neg/warn-useless-svuid.scala b/test/files/neg/warn-useless-svuid.scala new file mode 100644 index 000000000000..3253cb8ec8e0 --- /dev/null +++ b/test/files/neg/warn-useless-svuid.scala @@ -0,0 +1,25 @@ +//> using options -Xlint:serial -Xfatal-warnings +// +@SerialVersionUID(1L) +class X + +@SerialVersionUID(1L) +class Y extends X + +@SerialVersionUID(1L) +class Z extends scala.Serializable + +@SerialVersionUID(1L) +class W extends java.io.Serializable + +@SerialVersionUID(1L) +class Q extends Z + +@SerialVersionUID(1L) +trait T + +@SerialVersionUID(1L) +trait U extends scala.Serializable + +@SerialVersionUID(1L) +trait V extends java.io.Serializable diff --git a/test/files/neg/wconfSource1.check b/test/files/neg/wconfSource1.check index bb76a9643864..9efd33b3ae72 100644 --- a/test/files/neg/wconfSource1.check +++ b/test/files/neg/wconfSource1.check @@ -1,7 +1,8 @@ -wconfSource1.scala:9: error: method dep in class C is deprecated: +wconfSource1.scala:9: error: method dep in class C is deprecated +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=deprecation, site=C.t, origin=C.dep def t = dep ^ wconfSource1.scala:10: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses def u = { 1; 2 } ^ -one error found +1 error diff --git a/test/files/neg/wconfSource1.scala b/test/files/neg/wconfSource1.scala index 19e191de5a19..0e7e8805170d 100644 --- a/test/files/neg/wconfSource1.scala +++ b/test/files/neg/wconfSource1.scala @@ -1,4 +1,4 @@ -// scalac: -Wconf:src=.*Source.*&cat=deprecation:e,src=Source1.scala&msg=statement:e,src=wconfSource1&msg=statement:e,src=wconfSource1.scala&msg=statement:i +//> using options -Wconf:src=.*Source.*&cat=deprecation:e,src=Source1.scala&msg=statement:e,src=wconfSource1&msg=statement:e,src=wconfSource1.scala&msg=statement:i // src=Source1.scala doesn't match: the pattern needs to start at a path segment (after `/`) // src=wconfSource1 doesn't match: the pattern needs to match to the end of the path (.scala) diff --git a/test/files/neg/wconfSource2.check b/test/files/neg/wconfSource2.check index c91fcfc76419..b32bdf320e0b 100644 --- a/test/files/neg/wconfSource2.check +++ b/test/files/neg/wconfSource2.check @@ -6,10 +6,11 @@ See the Scaladoc for value scala.language.reflectiveCalls for a discussion why the feature should be explicitly enabled. def v(a: { def f: Int }) = a.f ^ -wconfSource2.scala:5: error: method dep in class C is deprecated: +wconfSource2.scala:5: error: method dep in class C is deprecated +Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=deprecation, site=C.t, origin=C.dep def t = dep ^ wconfSource2.scala:6: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses def u = { 1; 2 } ^ -one error found +1 error diff --git a/test/files/neg/wconfSource2.scala b/test/files/neg/wconfSource2.scala index 022356993884..549476972c76 100644 --- a/test/files/neg/wconfSource2.scala +++ b/test/files/neg/wconfSource2.scala @@ -1,4 +1,4 @@ -// scalac: -Wconf:src=test/files/neg/wconfSource2.scala&cat=deprecation:e,src=test/.*&msg=statement:i,src=/neg/.*&cat=feature-reflective-calls:i +//> using options -Wconf:src=test/files/neg/wconfSource2.scala&cat=deprecation:e,src=test/.*&msg=statement:i,src=/neg/.*&cat=feature-reflective-calls:i class C { @deprecated("", "") def dep = 0 diff --git a/test/files/neg/wellkinded_app.check b/test/files/neg/wellkinded_app.check index d57a0e4b5648..4686a6188647 100644 --- a/test/files/neg/wellkinded_app.check +++ b/test/files/neg/wellkinded_app.check @@ -1,4 +1,4 @@ wellkinded_app.scala:3: error: x does not take type parameters type t = x[x] ^ -one error found +1 error diff --git a/test/files/neg/wellkinded_app2.check b/test/files/neg/wellkinded_app2.check index 20a177ea590e..ee1ecbbe9d80 100644 --- a/test/files/neg/wellkinded_app2.check +++ b/test/files/neg/wellkinded_app2.check @@ -1,4 +1,4 @@ wellkinded_app2.scala:3: error: s does not take type parameters val foo: s[Int] ^ -one error found +1 error diff --git a/test/files/neg/wellkinded_bounds.check b/test/files/neg/wellkinded_bounds.check index 806eb09a76d7..6daad7c99168 100644 --- a/test/files/neg/wellkinded_bounds.check +++ b/test/files/neg/wellkinded_bounds.check @@ -1,4 +1,4 @@ wellkinded_bounds.scala:2: error: type List takes type parameters class WellKindedWrongSyntax[s <: List] { // must be s[x] <: List[x] ^ -one error found +1 error diff --git a/test/files/neg/wellkinded_wrongarity.check b/test/files/neg/wellkinded_wrongarity.check index b9f033b4536b..30de12fa0c02 100644 --- a/test/files/neg/wellkinded_wrongarity.check +++ b/test/files/neg/wellkinded_wrongarity.check @@ -1,4 +1,4 @@ -wellkinded_wrongarity.scala:5: error: Tuple2 takes two type parameters, expected: one +wellkinded_wrongarity.scala:5: error: Tuple2 takes 2 type parameters, expected: 1 object mp extends Monad[Tuple2] ^ -one error found +1 error diff --git a/test/files/neg/wellkinded_wrongarity2.check b/test/files/neg/wellkinded_wrongarity2.check index 922f73381e29..15233f9215e8 100644 --- a/test/files/neg/wellkinded_wrongarity2.check +++ b/test/files/neg/wellkinded_wrongarity2.check @@ -1,13 +1,13 @@ -wellkinded_wrongarity2.scala:5: error: String takes no type parameters, expected: one +wellkinded_wrongarity2.scala:5: error: String takes no type parameters, expected: 1 trait ms1 extends Monad[String] // wrong ^ -wellkinded_wrongarity2.scala:6: error: t takes no type parameters, expected: one +wellkinded_wrongarity2.scala:6: error: t takes no type parameters, expected: 1 trait ms2[t] extends Monad[t] // wrong ^ -wellkinded_wrongarity2.scala:7: error: m[t] takes no type parameters, expected: one +wellkinded_wrongarity2.scala:7: error: m[t] takes no type parameters, expected: 1 trait ms3[m[_], t] extends Monad[m[t]] // wrong -- added to check regression on bug ^ wellkinded_wrongarity2.scala:12: error: type m takes type parameters trait Bar2[m[_]] extends Foo[m] // check that m is properly recognized as kind *->*, while * is expected ^ -four errors found +4 errors diff --git a/test/files/neg/wildcards-future.check b/test/files/neg/wildcards-future.check index a5b4b23520f3..31f116c7e547 100644 --- a/test/files/neg/wildcards-future.check +++ b/test/files/neg/wildcards-future.check @@ -1,11 +1,11 @@ wildcards-future.scala:7: error: type mismatch; - found : Map[_$1,_$2] where type _$2 >: Null, type _$1 <: AnyRef + found : scala.collection.immutable.Map[_$1,Any] where type _$1 <: AnyRef required: Map[String,String] underscores : Map[String, String] // error wildcard variables starting with `_` ^ wildcards-future.scala:9: error: type mismatch; - found : Map[?$1,?$2] where type ?$2 >: Null, type ?$1 <: AnyRef + found : scala.collection.immutable.Map[?$1,Any] where type ?$1 <: AnyRef required: Map[String,String] qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax ^ -two errors found +2 errors diff --git a/test/files/neg/wildcards-future.scala b/test/files/neg/wildcards-future.scala index 54b7675813e7..f69e0b02f79d 100644 --- a/test/files/neg/wildcards-future.scala +++ b/test/files/neg/wildcards-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // object Test { val underscores: Map[_ <: AnyRef, _ >: Null] = Map() diff --git a/test/files/neg/wrong-args-for-none.check b/test/files/neg/wrong-args-for-none.check index d3b2d572ab71..f7725c250398 100644 --- a/test/files/neg/wrong-args-for-none.check +++ b/test/files/neg/wrong-args-for-none.check @@ -1,4 +1,4 @@ -wrong-args-for-none.scala:5: error: wrong number of arguments for pattern Test.Foo(x: Int,y: Int) +wrong-args-for-none.scala:5: error: wrong number of arguments for pattern Test.Foo(x: Int, y: Int) def f(x: Any) = x match { case Bar(Foo(5)) => } ^ -one error found +1 error diff --git a/test/files/neg/xlint-captured.check b/test/files/neg/xlint-captured.check new file mode 100644 index 000000000000..36584ac470c3 --- /dev/null +++ b/test/files/neg/xlint-captured.check @@ -0,0 +1,9 @@ +xlint-captured.scala:10: warning: return statement uses an exception to pass control to the caller of the enclosing named method f + def f(): Unit = List(42).foreach(i => if (i > 27) return) + ^ +xlint-captured.scala:5: warning: Modification of variable c within a closure causes it to be boxed. + var c = a // nok + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/xlint-captured.scala b/test/files/neg/xlint-captured.scala new file mode 100644 index 000000000000..ded04c983428 --- /dev/null +++ b/test/files/neg/xlint-captured.scala @@ -0,0 +1,11 @@ +//> using options -Werror -Wperformance +object Test { + var a, b = 0 // ok + def mkStrangeCounter(): Int => Int = { + var c = a // nok + object _d { var d = b }; import _d._ // ok + e => { c += a; d += b; a *= b; b -= c; c ^ d } + } + + def f(): Unit = List(42).foreach(i => if (i > 27) return) +} diff --git a/test/files/neg/xml-attributes.check b/test/files/neg/xml-attributes.check new file mode 100644 index 000000000000..3d161574dbf5 --- /dev/null +++ b/test/files/neg/xml-attributes.check @@ -0,0 +1,5 @@ +xml-attributes.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-attributes.scala b/test/files/neg/xml-attributes.scala new file mode 100644 index 000000000000..99daea57edf1 --- /dev/null +++ b/test/files/neg/xml-attributes.scala @@ -0,0 +1,8 @@ +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/neg/xml-comments.check b/test/files/neg/xml-comments.check new file mode 100644 index 000000000000..e933df8c0705 --- /dev/null +++ b/test/files/neg/xml-comments.check @@ -0,0 +1,5 @@ +xml-comments.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-comments.scala b/test/files/neg/xml-comments.scala new file mode 100644 index 000000000000..8ce992ef8044 --- /dev/null +++ b/test/files/neg/xml-comments.scala @@ -0,0 +1,8 @@ +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/neg/xml-doctype.check b/test/files/neg/xml-doctype.check index 0612cef5aac9..85a7877faaee 100644 --- a/test/files/neg/xml-doctype.check +++ b/test/files/neg/xml-doctype.check @@ -7,4 +7,4 @@ xml-doctype.scala:4: error: in XML literal: '-' expected instead of 'O' xml-doctype.scala:7: error: input ended while parsing XML } ^ -three errors found +3 errors diff --git a/test/files/neg/xml-entitydecl.check b/test/files/neg/xml-entitydecl.check index 71f1292b8eea..f7ed607b5a78 100644 --- a/test/files/neg/xml-entitydecl.check +++ b/test/files/neg/xml-entitydecl.check @@ -7,4 +7,4 @@ xml-entitydecl.scala:4: error: in XML literal: '-' expected instead of 'O' xml-entitydecl.scala:9: error: input ended while parsing XML } ^ -three errors found +3 errors diff --git a/test/files/neg/xml-entityref.check b/test/files/neg/xml-entityref.check new file mode 100644 index 000000000000..90497ad99c6c --- /dev/null +++ b/test/files/neg/xml-entityref.check @@ -0,0 +1,5 @@ +xml-entityref.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-entityref.scala b/test/files/neg/xml-entityref.scala new file mode 100644 index 000000000000..e78e684e932b --- /dev/null +++ b/test/files/neg/xml-entityref.scala @@ -0,0 +1,7 @@ +object foo { + val bar = "baz" + val xml = + + & " ' < > + +} diff --git a/test/files/neg/xml-match.check b/test/files/neg/xml-match.check new file mode 100644 index 000000000000..9ebf57ef817b --- /dev/null +++ b/test/files/neg/xml-match.check @@ -0,0 +1,5 @@ +xml-match.scala:3: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + case { _* } => + ^ +1 error diff --git a/test/files/neg/xml-match.scala b/test/files/neg/xml-match.scala new file mode 100644 index 000000000000..dcbcc104a836 --- /dev/null +++ b/test/files/neg/xml-match.scala @@ -0,0 +1,22 @@ +object foo { + def bar(e: Elem) = e match { + case { _* } => + case { _ } => + case {{3}} => {{3}} + case {{3}} => {{3}} + case { x } if x.toString.toInt < 4 => + { x.toString.toInt + 1 } + } + def bar(n: Node) = n match { + case { _* } => + case { _ } => + case {{3}} => {{3}} + case {{3}} => {{3}} + case { x } if x.toString.toInt < 4 => + { x.toString.toInt + 1 } + } + def bar(n: Any) = null match { + // illegal - bug #1764 + case

    { _* }

    => + } +} diff --git a/test/files/neg/xml-ns-empty.check b/test/files/neg/xml-ns-empty.check new file mode 100644 index 000000000000..29c1cad6e4f5 --- /dev/null +++ b/test/files/neg/xml-ns-empty.check @@ -0,0 +1,5 @@ +xml-ns-empty.scala:3: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-ns-empty.scala b/test/files/neg/xml-ns-empty.scala new file mode 100644 index 000000000000..b937a53590da --- /dev/null +++ b/test/files/neg/xml-ns-empty.scala @@ -0,0 +1,5 @@ +object foo { + val n = + + n.namespace == null +} diff --git a/test/files/neg/xml-parens.check b/test/files/neg/xml-parens.check new file mode 100644 index 000000000000..2d17c5680bb3 --- /dev/null +++ b/test/files/neg/xml-parens.check @@ -0,0 +1,5 @@ +xml-parens.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + () + ^ +1 error diff --git a/test/files/neg/xml-parens.scala b/test/files/neg/xml-parens.scala new file mode 100644 index 000000000000..b97bf73442b4 --- /dev/null +++ b/test/files/neg/xml-parens.scala @@ -0,0 +1,5 @@ +//> using options -Ystop-after:typer +// +object Test { + () +} diff --git a/test/files/neg/xml-pcdata.check b/test/files/neg/xml-pcdata.check new file mode 100644 index 000000000000..a08d508d040e --- /dev/null +++ b/test/files/neg/xml-pcdata.check @@ -0,0 +1,5 @@ +xml-pcdata.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-pcdata.scala b/test/files/neg/xml-pcdata.scala new file mode 100644 index 000000000000..f0b678659657 --- /dev/null +++ b/test/files/neg/xml-pcdata.scala @@ -0,0 +1,6 @@ +object foo { + val bar = "baz" + val cdata = + + +} diff --git a/test/files/neg/xml-procinstr.check b/test/files/neg/xml-procinstr.check new file mode 100644 index 000000000000..0a17dc953444 --- /dev/null +++ b/test/files/neg/xml-procinstr.check @@ -0,0 +1,5 @@ +xml-procinstr.scala:4: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + + ^ +1 error diff --git a/test/files/neg/xml-procinstr.scala b/test/files/neg/xml-procinstr.scala new file mode 100644 index 000000000000..08cbd2e43000 --- /dev/null +++ b/test/files/neg/xml-procinstr.scala @@ -0,0 +1,8 @@ +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/neg/xml-quasiquote.check b/test/files/neg/xml-quasiquote.check new file mode 100644 index 000000000000..40371b8cb690 --- /dev/null +++ b/test/files/neg/xml-quasiquote.check @@ -0,0 +1,5 @@ +:1: error: To compile XML syntax, the scala.xml package must be on the classpath. +Please see https://github.com/scala/scala-xml for details. + +^ +1 error diff --git a/test/files/neg/xml-quasiquote.scala b/test/files/neg/xml-quasiquote.scala new file mode 100644 index 000000000000..0d6e62c1edae --- /dev/null +++ b/test/files/neg/xml-quasiquote.scala @@ -0,0 +1,6 @@ +import reflect.runtime.universe._ + +object foo { + val ns1 = + q"" +} diff --git a/test/files/neg/xml-quasiquote2.check b/test/files/neg/xml-quasiquote2.check new file mode 100644 index 000000000000..820b76a842d0 --- /dev/null +++ b/test/files/neg/xml-quasiquote2.check @@ -0,0 +1,4 @@ +xml-quasiquote2.scala:5: error: input ended while parsing XML + q"" + ^ +1 error diff --git a/test/files/neg/xml-quasiquote2.scala b/test/files/neg/xml-quasiquote2.scala new file mode 100644 index 000000000000..d0d7d848476c --- /dev/null +++ b/test/files/neg/xml-quasiquote2.scala @@ -0,0 +1,6 @@ +import reflect.runtime.universe._ + +object foo { + val ns1 = + q"" +} diff --git a/test/files/neg/xmlcorner.check b/test/files/neg/xmlcorner.check index 8791829e50c2..9023938f4df5 100644 --- a/test/files/neg/xmlcorner.check +++ b/test/files/neg/xmlcorner.check @@ -4,4 +4,4 @@ xmlcorner.scala:2: error: illegal start of simple expression xmlcorner.scala:5: error: in XML literal: name cannot end in ':' val wrong = ^ -two errors found +2 errors diff --git a/test/files/neg/xmltruncated1.check b/test/files/neg/xmltruncated1.check index 36daa342e56d..712c9ec7afa9 100644 --- a/test/files/neg/xmltruncated1.check +++ b/test/files/neg/xmltruncated1.check @@ -1,4 +1,4 @@ xmltruncated1.scala:2: error: input ended while parsing XML val stuff = ^ -one error found +1 error diff --git a/test/files/neg/xmltruncated2.check b/test/files/neg/xmltruncated2.check index f1de059f84e9..d4b7259d1c51 100644 --- a/test/files/neg/xmltruncated2.check +++ b/test/files/neg/xmltruncated2.check @@ -1,4 +1,4 @@ xmltruncated2.scala:2: error: input ended while parsing XML val stuff = ^ -one error found +1 error diff --git a/test/files/neg/xmltruncated6.check b/test/files/neg/xmltruncated6.check index f638f2f09056..027147ed7154 100644 --- a/test/files/neg/xmltruncated6.check +++ b/test/files/neg/xmltruncated6.check @@ -1,4 +1,4 @@ xmltruncated6.scala:2: error: in XML literal: expected end of Scala block val stuff = { "no closing brace" ^ -one error found +1 error diff --git a/test/files/neg/xmltruncated7.check b/test/files/neg/xmltruncated7.check index 9cb8b875d2d7..85b919c9a604 100644 --- a/test/files/neg/xmltruncated7.check +++ b/test/files/neg/xmltruncated7.check @@ -4,4 +4,4 @@ xmltruncated7.scala:2: error: in XML literal: in XML content, please use '}}' to xmltruncated7.scala:2: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed

    foo}:

    ^ -two errors found +2 errors diff --git a/test/files/neg/xmltruncated7.scala b/test/files/neg/xmltruncated7.scala index 7e296a910db9..466b169df860 100644 --- a/test/files/neg/xmltruncated7.scala +++ b/test/files/neg/xmltruncated7.scala @@ -1,3 +1,3 @@ object Test {

    foo}:

    -} \ No newline at end of file +} diff --git a/test/files/neg/yimports-custom-b.check b/test/files/neg/yimports-custom-b.check new file mode 100644 index 000000000000..5eb02fa8703d --- /dev/null +++ b/test/files/neg/yimports-custom-b.check @@ -0,0 +1,7 @@ +C_2.scala:8: error: not found: type Numb + val v: Numb = Answer + ^ +C_2.scala:9: error: not found: value println + def greet() = println("hello, world!") + ^ +2 errors diff --git a/test/files/neg/yimports-custom-b/C_2.scala b/test/files/neg/yimports-custom-b/C_2.scala new file mode 100644 index 000000000000..37901f6cbeba --- /dev/null +++ b/test/files/neg/yimports-custom-b/C_2.scala @@ -0,0 +1,10 @@ +//> using options -Yimports:hello.world.minidef + +import hello.{world => hw} +import hw.minidef.{Magic => Answer} + +// Finds the answer, but dumb to forget Numb +class C { + val v: Numb = Answer + def greet() = println("hello, world!") +} diff --git a/test/files/neg/yimports-custom-b/minidef_1.scala b/test/files/neg/yimports-custom-b/minidef_1.scala new file mode 100644 index 000000000000..78d2f3c03bfc --- /dev/null +++ b/test/files/neg/yimports-custom-b/minidef_1.scala @@ -0,0 +1,8 @@ +//> using options -Yimports:scala + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/test/files/neg/yimports-custom.check b/test/files/neg/yimports-custom.check new file mode 100644 index 000000000000..f3ed373f1952 --- /dev/null +++ b/test/files/neg/yimports-custom.check @@ -0,0 +1,4 @@ +C_2.scala:5: error: not found: value println + def greet() = println("hello, world!") + ^ +1 error diff --git a/test/files/neg/yimports-custom/C_2.scala b/test/files/neg/yimports-custom/C_2.scala new file mode 100644 index 000000000000..5a5482c51058 --- /dev/null +++ b/test/files/neg/yimports-custom/C_2.scala @@ -0,0 +1,6 @@ +//> using options -Yimports:hello.world.minidef + +class C { + val v: Numb = Magic + def greet() = println("hello, world!") +} diff --git a/test/files/neg/yimports-custom/minidef_1.scala b/test/files/neg/yimports-custom/minidef_1.scala new file mode 100644 index 000000000000..5d18d0a39584 --- /dev/null +++ b/test/files/neg/yimports-custom/minidef_1.scala @@ -0,0 +1,7 @@ + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/test/files/neg/yimports-masked.check b/test/files/neg/yimports-masked.check new file mode 100644 index 000000000000..b7f6929d65f1 --- /dev/null +++ b/test/files/neg/yimports-masked.check @@ -0,0 +1,7 @@ +C_2.scala:11: error: not found: type Numb + val v: Numb = Answer + ^ +C_2.scala:12: error: not found: value println + def greet() = println("hello, world!") + ^ +2 errors diff --git a/test/files/neg/yimports-masked/C_2.scala b/test/files/neg/yimports-masked/C_2.scala new file mode 100644 index 000000000000..88d6e61a3e4b --- /dev/null +++ b/test/files/neg/yimports-masked/C_2.scala @@ -0,0 +1,14 @@ +//> using options -Yimports:scala,hello.world.minidef + +// import at top level or top of package disables implicit import. +// the import can appear at any statement position, here, end of package. +// Update: with new trick, the import has to be completed before usages. + +import hello.world.minidef.{Magic => Answer} + +package p { + class C { + val v: Numb = Answer + def greet() = println("hello, world!") + } +} diff --git a/test/files/neg/yimports-masked/minidef_1.scala b/test/files/neg/yimports-masked/minidef_1.scala new file mode 100644 index 000000000000..5d18d0a39584 --- /dev/null +++ b/test/files/neg/yimports-masked/minidef_1.scala @@ -0,0 +1,7 @@ + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/test/files/neg/yimports-nojava.check b/test/files/neg/yimports-nojava.check new file mode 100644 index 000000000000..1cddbe213f19 --- /dev/null +++ b/test/files/neg/yimports-nojava.check @@ -0,0 +1,7 @@ +yimports-nojava.scala:5: error: not found: type Integer + def g() = new Integer(42) + ^ +yimports-nojava.scala:6: error: not found: value Thread + def sleep() = Thread.sleep(42000L) + ^ +2 errors diff --git a/test/files/neg/yimports-nojava.scala b/test/files/neg/yimports-nojava.scala new file mode 100644 index 000000000000..d7ef3646cdf4 --- /dev/null +++ b/test/files/neg/yimports-nojava.scala @@ -0,0 +1,7 @@ +//> using options -Yimports:scala,scala.Predef + +trait T { + def f() = println("hello, world!") + def g() = new Integer(42) + def sleep() = Thread.sleep(42000L) +} diff --git a/test/files/neg/yimports-nosuch.check b/test/files/neg/yimports-nosuch.check new file mode 100644 index 000000000000..acbf7aa19ab5 --- /dev/null +++ b/test/files/neg/yimports-nosuch.check @@ -0,0 +1,3 @@ +error: bad preamble import skala +error: bad preamble import scala.Predeff +2 errors diff --git a/test/files/neg/yimports-nosuch.scala b/test/files/neg/yimports-nosuch.scala new file mode 100644 index 000000000000..617f0b84ba12 --- /dev/null +++ b/test/files/neg/yimports-nosuch.scala @@ -0,0 +1,3 @@ +//> using options -Yimports:skala,scala.Predeff +// +class C diff --git a/test/files/neg/yimports-order.check b/test/files/neg/yimports-order.check new file mode 100644 index 000000000000..4664fa9d624d --- /dev/null +++ b/test/files/neg/yimports-order.check @@ -0,0 +1,10 @@ +yimports-order.scala:9: error: not found: value Map + def f() = Map("hello" -> "world") + ^ +yimports-order.scala:9: error: value -> is not a member of String + def f() = Map("hello" -> "world") + ^ +yimports-order.scala:10: error: not found: value println + def g() = println(f()) + ^ +3 errors diff --git a/test/files/neg/yimports-order.scala b/test/files/neg/yimports-order.scala new file mode 100644 index 000000000000..9de528b6cc85 --- /dev/null +++ b/test/files/neg/yimports-order.scala @@ -0,0 +1,13 @@ + +package top { + package middle { + class C { + def c() = println("hello, world") + } + import Predef.{Map => _} + object Test { + def f() = Map("hello" -> "world") + def g() = println(f()) + } + } +} diff --git a/test/files/neg/yimports-predef.check b/test/files/neg/yimports-predef.check new file mode 100644 index 000000000000..ff06d7eae635 --- /dev/null +++ b/test/files/neg/yimports-predef.check @@ -0,0 +1,4 @@ +yimports-predef.scala:6: error: value + is not a member of type parameter A + def f[A](x: A) = x + 42 + ^ +1 error diff --git a/test/files/neg/yimports-predef.scala b/test/files/neg/yimports-predef.scala new file mode 100644 index 000000000000..d6b2b7be4197 --- /dev/null +++ b/test/files/neg/yimports-predef.scala @@ -0,0 +1,7 @@ +//> using options -Yimports:scala,scala.Predef +// +import Predef.{any2stringadd => _, _} + +class classic { + def f[A](x: A) = x + 42 +} diff --git a/test/files/neg/yimports-stable.check b/test/files/neg/yimports-stable.check new file mode 100644 index 000000000000..e0d01e1b9eb0 --- /dev/null +++ b/test/files/neg/yimports-stable.check @@ -0,0 +1,2 @@ +error: bad preamble import hello.world.potions +1 error diff --git a/test/files/neg/yimports-stable/C_2.scala b/test/files/neg/yimports-stable/C_2.scala new file mode 100644 index 000000000000..6c57e369be71 --- /dev/null +++ b/test/files/neg/yimports-stable/C_2.scala @@ -0,0 +1,6 @@ +//> using options -Yimports:scala,scala.Predef,hello.world.potions +// +class C { + val v: Numb = magic + def greet() = println("hello, world!") +} diff --git a/test/files/neg/yimports-stable/minidef_1.scala b/test/files/neg/yimports-stable/minidef_1.scala new file mode 100644 index 000000000000..b3ea7445df24 --- /dev/null +++ b/test/files/neg/yimports-stable/minidef_1.scala @@ -0,0 +1,11 @@ + +package hello + +trait stuff { + type Numb = Int + val magic = 42 +} + +object world { + val potions = new stuff {} +} diff --git a/test/files/pos/11484/A_2.java b/test/files/pos/11484/A_2.java new file mode 100644 index 000000000000..aa8ef2cf5a42 --- /dev/null +++ b/test/files/pos/11484/A_2.java @@ -0,0 +1 @@ +public class A_2 extends C { } diff --git a/test/files/pos/11484/C_1.scala b/test/files/pos/11484/C_1.scala new file mode 100644 index 000000000000..48f5bd8174cf --- /dev/null +++ b/test/files/pos/11484/C_1.scala @@ -0,0 +1,6 @@ +class B[A] +sealed trait T[A] { + def overloaded(that: List[T[A]]): T[A] = that.head + def overloaded(that: List[B[A]]): B[A] = that.head +} +abstract class C[A] extends T[A] diff --git a/test/files/pos/11512/A_2.java b/test/files/pos/11512/A_2.java new file mode 100644 index 000000000000..ed549568a5f4 --- /dev/null +++ b/test/files/pos/11512/A_2.java @@ -0,0 +1 @@ +public class A_2 extends C { } diff --git a/test/files/pos/11512/C_1.scala b/test/files/pos/11512/C_1.scala new file mode 100644 index 000000000000..8e791e333a82 --- /dev/null +++ b/test/files/pos/11512/C_1.scala @@ -0,0 +1,7 @@ +trait T { this: U => + def m: Int +} +trait U { + def m: Int = ??? +} +abstract class C extends U with T diff --git a/test/files/pos/S1.scala b/test/files/pos/S1.scala index 68706e3dd3b0..fa9f381dd040 100644 --- a/test/files/pos/S1.scala +++ b/test/files/pos/S1.scala @@ -8,6 +8,24 @@ ** ^ */ class S1() { - def foo[T <: this.type](x: T) = x; - foo[this.type](this); + def foo[T <: this.type](x: T) = x + def f = foo[this.type](this) } + +class S2() { + def foo[T <: this.type](x: T) = x + def f = foo(this) +} +/* + * +$ scalac -d /tmp test/files/pos/S1.scala +test/files/pos/S1.scala:17: error: inferred type arguments [S2] do not conform to method foo's type parameter bounds [T <: S2.this.type] + def f = foo(this) + ^ +test/files/pos/S1.scala:17: error: type mismatch; + found : S2 + required: T + def f = foo(this) + ^ +two errors found + */ diff --git a/test/files/pos/Transactions.scala b/test/files/pos/Transactions.scala index 32889f8180f9..dc33e8c377a2 100644 --- a/test/files/pos/Transactions.scala +++ b/test/files/pos/Transactions.scala @@ -71,7 +71,7 @@ trait Transactional { null } - def getter(thisTrans: Transaction) { + def getter(thisTrans: Transaction): Unit = { if (writer == thisTrans) return var r = readers while (r != null && r.head.status != Transaction.Running) { r = r.next; readers = r } @@ -91,7 +91,7 @@ trait Transactional { } } - def setter(thisTrans: Transaction) { + def setter(thisTrans: Transaction): Unit = { if (writer == thisTrans) return synchronized { val w = currentWriter() diff --git a/test/files/pos/aladdin883.scala b/test/files/pos/aladdin883.scala new file mode 100644 index 000000000000..792b554477ef --- /dev/null +++ b/test/files/pos/aladdin883.scala @@ -0,0 +1,11 @@ +import scala.language.implicitConversions + +trait Foo[A] { + implicit def convert(a : A) : Ordered[A]; + class Filter(f : A => Boolean) extends Foo[A] { + implicit def convert(a : A) = Foo.this.convert(a); + } + class Range(x : A, y : A) extends Filter(a => { + (a).compare(x) >= 0 && (a).compare(y) < 0; + }) {} +} diff --git a/test/files/pos/alladin763.scala b/test/files/pos/alladin763.scala index 29c9b2531877..90d0c7cb8d08 100644 --- a/test/files/pos/alladin763.scala +++ b/test/files/pos/alladin763.scala @@ -1,4 +1,4 @@ -// Test from http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html +// Test from https://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html // and expanded with package object variants diff --git a/test/files/pos/ambiguous-same/p.scala b/test/files/pos/ambiguous-same/p.scala new file mode 100644 index 000000000000..7c84116409be --- /dev/null +++ b/test/files/pos/ambiguous-same/p.scala @@ -0,0 +1,5 @@ + +package p +object X { + val x = 42 +} diff --git a/test/files/pos/ambiguous-same/test.scala b/test/files/pos/ambiguous-same/test.scala new file mode 100644 index 000000000000..a70739ccc8d7 --- /dev/null +++ b/test/files/pos/ambiguous-same/test.scala @@ -0,0 +1,35 @@ + +import p._ + +package p { + class Y { + // strictly, X is ambiguous because + // X made available by the foreign definition in p + // cannot shadow the import. + // But if the imported symbol is the same, + // cut them some slack. + // Note that since the import is not required here, + // the import is unused with respect to this expression, + // for purposes of linting. + def f = X.x + } +} + +package q { + class Y { + // Putting the import at the top of the file is casual, + // but it is useful here, where the import is required. + // Test code using this idiom was fixed by moving import + // inside the definition of q. + def f = X.x + } +} + +/* +test.scala:9: error: reference to X is ambiguous; +it is both defined in package p and imported subsequently by +import p._ + def f = X.x + ^ +one error found +*/ diff --git a/test/files/pos/and-future.scala b/test/files/pos/and-future.scala index b09e0e8ce411..27ace4bc8d54 100644 --- a/test/files/pos/and-future.scala +++ b/test/files/pos/and-future.scala @@ -1,11 +1,11 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // trait X trait Y class Test[A, B <: A & AnyRef] { - def foo[T >: A & Null <: A & AnyRef & Any](x: T & String): String & T = x + def foo[T >: A & Null <: A & AnyRef & Any](x: T & ""): "" & T = x val a: X & Y & AnyRef = new X with Y {} val b: (X & Y) & AnyRef = new X with Y {} diff --git a/test/files/pos/annot-inner.scala b/test/files/pos/annot-inner.scala index 9f155a5a834c..fa9691e0b51d 100644 --- a/test/files/pos/annot-inner.scala +++ b/test/files/pos/annot-inner.scala @@ -1,7 +1,7 @@ object test { class annot extends scala.annotation.Annotation - def foo { + def foo: Unit = { @annot def bar(i: Int): Int = i @annot class Silly { } bar(5) diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala index 84a01bcce59a..a7c1fb30a8c2 100644 --- a/test/files/pos/annotated-original/M_1.scala +++ b/test/files/pos/annotated-original/M_1.scala @@ -3,5 +3,5 @@ import scala.reflect.macros.blackbox.Context object M { def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree)) - def m(a: Any) = macro impl + def m(a: Any): Any = macro impl } diff --git a/test/files/pos/annotated-outer.scala b/test/files/pos/annotated-outer.scala new file mode 100644 index 000000000000..51a26f2aa0b4 --- /dev/null +++ b/test/files/pos/annotated-outer.scala @@ -0,0 +1,12 @@ +//> using options -Werror +object Test { + trait MySet[A] + trait MyMap[K, +V] { + class Keys extends MySet[K] + } + + def areKeys[A](xs: MySet[A]) = xs match { + case _: (MyMap[A, _] @unchecked)#Keys => true + case _ => false + } +} diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala index 8fed06e4f629..986287dfa088 100644 --- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala +++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context import collection.mutable.ListBuffer diff --git a/test/files/pos/annotated-treecopy/Test_2.scala b/test/files/pos/annotated-treecopy/Test_2.scala index 1dc433c77798..1c6b862efe17 100644 --- a/test/files/pos/annotated-treecopy/Test_2.scala +++ b/test/files/pos/annotated-treecopy/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros._ // tree { (x:((Int,Int,Int),(Int,Int,Int))) => { val y=x; val ((r1,m1,c1),(r2,m2,c2))=y; (r1, m1 + m2 + r1 * c1 * c2, c2) } } diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala index 8ab994dacef3..a84b52314f7c 100644 --- a/test/files/pos/annotations.scala +++ b/test/files/pos/annotations.scala @@ -1,8 +1,7 @@ class ann(i: Int) extends scala.annotation.Annotation -class cfann(x: String) extends annotation.ClassfileAnnotation +class cfann(x: String) extends annotation.ConstantAnnotation // annotations on abstract types -abstract class C1[@annotation.elidable(0) +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] @@ -46,7 +45,7 @@ object Test { class BeanPropertyTests { @scala.beans.BeanProperty lazy val lv1 = 0 - def foo() { + def foo(): Unit = { val bp1 = new BeanPropertyTests1 println(lv1) @@ -70,7 +69,7 @@ class BeanPropertyTests1 { // test mixin of getters / setters, and implementing abstract // methods using @BeanProperty class C extends T with BeanF { - def foo() { + def foo(): Unit = { setF("doch!") setG(true) this.getF() @@ -91,9 +90,9 @@ trait BeanF { } -class Ann3(arr: Array[String]) extends annotation.ClassfileAnnotation -class Ann4(i: Int) extends annotation.ClassfileAnnotation -class Ann5(value: Class[_]) extends annotation.ClassfileAnnotation +class Ann3(arr: Array[String]) extends annotation.ConstantAnnotation +class Ann4(i: Int) extends annotation.ConstantAnnotation +class Ann5(value: Class[_]) extends annotation.ConstantAnnotation object Test3 { final val i = 1083 @@ -104,9 +103,9 @@ class Test4 { @Ann3(arr = Array("dlkfj", "DSF")) @Ann4(i = 2908) @Ann5(value = classOf[Int]) - def foo {} + def foo: Unit = {} @Ann4(i = Test3.i) @Ann5(Test3.cls) - def bar {} + def bar: Unit = {} } diff --git a/test/files/pos/any-vs-anyref.scala b/test/files/pos/any-vs-anyref.scala new file mode 100644 index 000000000000..4f45b5e30a4f --- /dev/null +++ b/test/files/pos/any-vs-anyref.scala @@ -0,0 +1,23 @@ +trait Quux extends Any +trait QuuxRef extends AnyRef +final class Bippy(val x: Any) extends AnyVal with Quux + +object Foo { + def foo1[A <: Product](a: A) = { type X = a.type } + def foo2[A <: Product with Quux](a: A) = { type X = a.type } + def foo3(a: Product) = { type X = a.type } + def foo4(a: Product with Quux) = { type X = a.type } + + def ok1[A <: QuuxRef](a: A) = { type X = a.type } + def ok2[A <: Product with QuuxRef](a: A) = { type X = a.type } + def ok3(a: QuuxRef) = { type X = a.type } + def ok4(a: Product with QuuxRef) = { type X = a.type } + def ok5(x: QuuxRef with Product) = (x eq "abc") && ("abc" eq x) + def ok6(x: QuuxRef with Product { def f: Int }) = (x eq "abc") && ("abc" eq x) + def ok7(x: QuuxRef { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x) +} + +object Bar { + def f(x: Quux { def g(x: Int): Int }): Int = x g 5 + f(new Quux { def g(x: Int) = x }) +} diff --git a/test/files/pos/array-interfaces.scala b/test/files/pos/array-interfaces.scala index 70cafd2bb11a..88b9a3f301d8 100644 --- a/test/files/pos/array-interfaces.scala +++ b/test/files/pos/array-interfaces.scala @@ -6,4 +6,4 @@ object s { f(args) g(args) } -} \ No newline at end of file +} diff --git a/test/files/pos/arrays2.scala b/test/files/pos/arrays2.scala index b770d21b8a26..8984fd615ad1 100644 --- a/test/files/pos/arrays2.scala +++ b/test/files/pos/arrays2.scala @@ -1,11 +1,11 @@ -case class C(); +case class C() object arrays2 { def main(args: Array[String]): Unit = { - val a: Array[Array[C]] = new Array[Array[C]](2); - a(0) = new Array[C](2); - a(0)(0) = new C(); + val a: Array[Array[C]] = new Array[Array[C]](2) + a(0) = new Array[C](2) + a(0)(0) = new C() } } @@ -14,10 +14,14 @@ object arrays4 { val args = Array[String]("World") "Hello %1$s".format(args: _*) } +/* +test/files/pos/arrays2.scala:15: warning: Passing an explicit array value to a Scala varargs method is deprecated (since 2.13.0) and will result in a defensive copy; Use the more efficient non-copying ArraySeq.unsafeWrapArray or an explicit toIndexedSeq call + "Hello %1$s".format(args: _*) + ^ +one warning found +*/ // #2461 object arrays3 { - import collection.convert.ImplicitConversions._ def apply[X](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*) } - diff --git a/test/files/pos/attachments-typed-another-ident/Impls_1.scala b/test/files/pos/attachments-typed-another-ident/Impls_1.scala index 7213e543a90e..0a4f9614e5ae 100644 --- a/test/files/pos/attachments-typed-another-ident/Impls_1.scala +++ b/test/files/pos/attachments-typed-another-ident/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context import language.experimental.macros @@ -15,5 +14,5 @@ object Macros { c.Expr[Int](typed) } - def foo = macro impl + def foo: Int = macro impl } diff --git a/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala index 90b2e3de4fbd..022639bfe9b8 100644 --- a/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala +++ b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { def bar = 2 Macros.foo diff --git a/test/files/pos/attachments-typed-ident/Impls_1.scala b/test/files/pos/attachments-typed-ident/Impls_1.scala index 7213e543a90e..0a4f9614e5ae 100644 --- a/test/files/pos/attachments-typed-ident/Impls_1.scala +++ b/test/files/pos/attachments-typed-ident/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context import language.experimental.macros @@ -15,5 +14,5 @@ object Macros { c.Expr[Int](typed) } - def foo = macro impl + def foo: Int = macro impl } diff --git a/test/files/pos/attachments-typed-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-ident/Macros_Test_2.scala index 8604eb40017d..45a0609de0b9 100644 --- a/test/files/pos/attachments-typed-ident/Macros_Test_2.scala +++ b/test/files/pos/attachments-typed-ident/Macros_Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { def bar = 2 Macros.foo diff --git a/test/files/pos/auto-application.scala b/test/files/pos/auto-application.scala new file mode 100644 index 000000000000..293136c39b0d --- /dev/null +++ b/test/files/pos/auto-application.scala @@ -0,0 +1,46 @@ +//> using options -Werror -deprecation -Xsource:3 +// +class Test { + def a1(xs: List[String]): Int = xs.hashCode + def a2(xs: List[String]): Int = xs.hashCode() + def a3(xs: List[String]): String = xs.toString + def a4(xs: List[String]): String = xs.toString() + def a5(xs: List[String]): Class[_] = xs.getClass + def a6(xs: List[String]): Class[_] = xs.getClass() + def a7(xs: List[String]): Int = xs.## + def a9(x: Address): String = x.toString + def a10(x: Address): String = x.toString() + def a11(x: A): String = x.toString + def a12(x: A): String = x.toString() + def a13(x: B): String = x.toString + def a14(x: B): String = x.toString() +} + +case class Address() + +class A() { + override def toString(): String = "A()" +} + +class B() { + override def toString: String = "B()" +} + +// Value class generates this.underlying.hashCode +case class C(c: Int) extends AnyVal + +// This generates toString$extension +class Bibbity(val i: Int) extends AnyVal { + override def toString = "hi" +} + +class City extends Runnable { override def run(): Unit = () } +object City { + val c = new City + c.run // should be ok without parens +} + +object Sam { + val r: java.lang.Runnable = () => () + r.run // should be ok without parens +} diff --git a/test/files/pos/bcode_throw_null/TN.scala b/test/files/pos/bcode_throw_null/TN.scala index ed38b59baef4..ab04c45131ca 100644 --- a/test/files/pos/bcode_throw_null/TN.scala +++ b/test/files/pos/bcode_throw_null/TN.scala @@ -1,6 +1,6 @@ object TN { - def pre1(b: Boolean) { + def pre1(b: Boolean): Unit = { println(if (b) 1 else throw null) } diff --git a/test/files/pos/builders.scala b/test/files/pos/builders.scala index 0b620769c0a1..3defae3c095a 100644 --- a/test/files/pos/builders.scala +++ b/test/files/pos/builders.scala @@ -1,21 +1,21 @@ object builders { trait Builder[-From, +To, -Elem] { - def += (elem: Elem) + def += (elem: Elem): Unit def result: To } implicit def iterableBuilder[A, B] = new Builder[Iterable[A], Iterable[B], B] { println("new iterable builder") private val buf = new scala.collection.mutable.ListBuffer[B] - def += (elem: B) { buf += elem } + def += (elem: B): Unit = { buf += elem } def result: Iterable[B] = buf.toList } implicit def listBuilder[A, B] = new Builder[List[A], List[B], B] { println("new list builder") private val buf = new scala.collection.mutable.ListBuffer[B] - def += (elem: B) { buf += elem } + def += (elem: B): Unit = { buf += elem } def result: List[B] = buf.toList } /* diff --git a/test/files/pos/byname-implicits-1.scala b/test/files/pos/byname-implicits-1.scala new file mode 100644 index 000000000000..f6b48e0750f7 --- /dev/null +++ b/test/files/pos/byname-implicits-1.scala @@ -0,0 +1,8 @@ +trait Foo +object Foo { + implicit def foo(implicit rec: => Foo): Foo = ??? +} + +object Test { + implicitly[Foo] +} diff --git a/test/files/pos/byname-implicits-10.scala b/test/files/pos/byname-implicits-10.scala new file mode 100644 index 000000000000..6686e751c4d8 --- /dev/null +++ b/test/files/pos/byname-implicits-10.scala @@ -0,0 +1,11 @@ +trait Foo[T] + +object Foo { + implicit def pair[T, U](implicit fooT: => Foo[(T, U)], fooU: => Foo[(U, T)]): Foo[(T, U)] = new Foo[(T, U)] {} + implicit def int: Foo[Int] = new Foo[Int] {} + implicit def string: Foo[String] = new Foo[String] {} +} + +object Test { + implicitly[Foo[(Int, String)]] +} diff --git a/test/files/pos/byname-implicits-12.scala b/test/files/pos/byname-implicits-12.scala new file mode 100644 index 000000000000..57134e09d7e7 --- /dev/null +++ b/test/files/pos/byname-implicits-12.scala @@ -0,0 +1,15 @@ +trait Foo[T] +object Foo { + implicit def unit: Foo[Unit] = ??? + implicit def int: Foo[Int] = ??? + implicit def pair[T, U](implicit ft: Foo[T], fu: Foo[U]): Foo[(T, U)] = ??? +} + +class Bar +object Bar { + implicit def bar(implicit f: => Foo[(Int, (Int, Unit))]): Foo[Bar] = ??? +} + +object Test { + implicitly[Foo[(Bar, Unit)]] +} diff --git a/test/files/pos/byname-implicits-13.scala b/test/files/pos/byname-implicits-13.scala new file mode 100644 index 000000000000..4ecc495d7a07 --- /dev/null +++ b/test/files/pos/byname-implicits-13.scala @@ -0,0 +1,192 @@ +// deriving/src/main/scala/by-name-implicit-test.scala.scala +sealed trait AABB +case class AA(a: String) extends AABB +case class BB(a: String) extends AABB +case class DAABB(d: Double, aabb: AABB) +case class IDAABBS(i: Int, daabb: DAABB, s: String) + +case class Dog(age: Long) +case class Cat(name: String, friend: Either[Cat, Dog]) + +// Definitions from Shapeless --------------------------------------------------------------------- + +sealed trait HList extends Product with Serializable +final case class ::[+H, +T <: HList](head: H, tail: T) extends HList +sealed trait HNil extends HList +final case object HNil extends HNil + +sealed trait Coproduct extends Product with Serializable +sealed trait :+:[+H, +T <: Coproduct] extends Coproduct +final case class Inl[+H, +T <: Coproduct](head: H) extends :+:[H, T] +final case class Inr[+H, +T <: Coproduct](tail: T) extends :+:[H, T] +sealed trait CNil extends Coproduct + +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +// Manual Generic macro expansions ---------------------------------------------------------------- + +object GenericInstances { + implicit val genAABB: Generic[AABB] { type Repr = AA :+: BB :+: CNil } = + new Generic[AABB] { + type Repr = AA :+: BB :+: CNil + def to(t: AABB): Repr = t match { + case x: AA => Inl(x) + case x: BB => Inr(Inl(x)) + } + def from(r: Repr): AABB = r match { + case Inl(x) => x + case Inr(Inl(x)) => x + case _ => ??? + } + } + + implicit val genAA: Generic[AA] { type Repr = String :: HNil } = + new Generic[AA] { + type Repr = String :: HNil + def to(t: AA): Repr = t match { case AA(x) => ::(x, HNil) } + def from(r: Repr): AA = r match { case ::(x, HNil) => AA(x) } + } + + implicit val genBB: Generic[BB] { type Repr = String :: HNil } = + new Generic[BB] { + type Repr = String :: HNil + def to(t: BB): Repr = t match { case BB(x) => ::(x, HNil) } + def from(r: Repr): BB = r match { case ::(x, HNil) => BB(x) } + } + + implicit val genDAABB: Generic[DAABB] { type Repr = Double :: AABB :: HNil } = + new Generic[DAABB] { + type Repr = Double :: AABB :: HNil + def to(t: DAABB): Repr = t match { case DAABB(x, y) => ::(x, ::(y, HNil)) } + def from(r: Repr): DAABB = r match { case ::(x, ::(y, HNil)) => DAABB(x, y) } + } + + implicit val genIDAABBS: Generic[IDAABBS] { type Repr = Int :: DAABB :: String :: HNil } = + new Generic[IDAABBS] { + type Repr = Int :: DAABB :: String :: HNil + def to(t: IDAABBS): Repr = t match { case IDAABBS(x, y, z) => ::(x, ::(y, ::(z, HNil))) } + def from(r: Repr): IDAABBS = r match { case ::(x, ::(y, ::(z, HNil))) => IDAABBS(x, y, z) } + } + + implicit val genDog: Generic[Dog] { type Repr = Long :: HNil } = + new Generic[Dog] { + type Repr = Long :: HNil + def to(t: Dog): Repr = t match { case Dog(x) => ::(x, HNil) } + def from(r: Repr): Dog = r match { case ::(x, HNil) => Dog(x) } + } + + implicit val genCat: Generic[Cat] { type Repr = String :: Either[Cat, Dog] :: HNil } = + new Generic[Cat] { + type Repr = String :: Either[Cat, Dog] :: HNil + def to(t: Cat): Repr = t match { case Cat(x, y) => ::(x, ::(y, HNil)) } + def from(r: Repr): Cat = r match { case ::(x, ::(y, HNil)) => Cat(x, y) } + } + + implicit def genEither[A, B]: Generic[Either[A, B]] { type Repr = Left[A, B] :+: Right[A, B] :+: CNil } = + new Generic[Either[A, B]] { + type Repr = Left[A, B] :+: Right[A, B] :+: CNil + def to(t: Either[A, B]): Repr = t match { + case (x: Left[A, B] @unchecked) => Inl(x) + case (x: Right[A, B] @unchecked) => Inr(Inl(x)) + } + def from(r: Repr): Either[A, B] = r match { + case Inl(x) => x + case Inr(Inl(x)) => x + case _ => ??? + } + } + + implicit def genLeft[A, B]: Generic[Left[A, B]] { type Repr = A :: HNil } = + new Generic[Left[A, B]] { + type Repr = A :: HNil + def to(t: Left[A, B]): Repr = t match { case Left(x) => ::(x, HNil) } + def from(r: Repr): Left[A, B] = r match { case ::(x, HNil) => Left(x) } + } + + implicit def genRight[A, B]: Generic[Right[A, B]] { type Repr = B :: HNil } = + new Generic[Right[A, B]] { + type Repr = B :: HNil + def to(t: Right[A, B]): Repr = t match { case Right(x) => ::(x, HNil) } + def from(r: Repr): Right[A, B] = r match { case ::(x, HNil) => Right(x) } + } +} + +// First example from https://github.com/milessabin/shapeless-type-class-derivation-2015-demo +object equal { + trait Eq[T] { + def eqv(x: T, y: T): Boolean + } + + object Eq { + implicit val eqInt: Eq[Int] = + new Eq[Int] { + def eqv(x: Int, y: Int): Boolean = x == y + } + + implicit val eqString: Eq[String] = + new Eq[String] { + def eqv(x: String, y: String): Boolean = x == y + } + + implicit def eqGeneric[T, R] + (implicit + gen: Generic[T] { type Repr = R }, + eqRepr: => Eq[R] + ): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = + eqRepr.eqv(gen.to(x), gen.to(y)) + } + + implicit val eqHNil: Eq[HNil] = new Eq[HNil] { + def eqv(x: HNil, y: HNil): Boolean = true + } + + implicit def eqHCons[H, T <: HList] + (implicit + eqH: Eq[H], + eqT: Eq[T] + ): Eq[H :: T] = + new Eq[H :: T] { + def eqv(x: H :: T, y: H :: T): Boolean = + eqH.eqv(x.head, y.head) && eqT.eqv(x.tail, y.tail) + } + + implicit val eqCNil: Eq[CNil] = new Eq[CNil] { + def eqv(x: CNil, y: CNil): Boolean = true + } + + implicit def eqCNCons[H, T <: Coproduct] + (implicit + eqH: Eq[H], + eqT: Eq[T] + ): Eq[H :+: T] = + new Eq[H :+: T] { + def eqv(x: H :+: T, y: H :+: T): Boolean = + (x, y) match { + case (Inl(xh), Inl(yh)) => eqH.eqv(xh, yh) + case (Inr(xt), Inr(yt)) => eqT.eqv(xt, yt) + case _ => false + } + } + } + + implicit class EqOps[T](x: T)(implicit eqT: Eq[T]) { + def ===(y: T): Boolean = eqT.eqv(x, y) + } + + import GenericInstances._ + + implicit val EqLongInstance: Eq[Long] = new Eq[Long] { def eqv(x: Long, y: Long): Boolean = x == y } + implicit val EqDoubleInstance: Eq[Double] = new Eq[Double] { def eqv(x: Double, y: Double): Boolean = x == y } + implicit val EqIntInstance: Eq[Int] = new Eq[Int] { def eqv(x: Int, y: Int): Boolean = x == y } + implicit val EqStringInstance: Eq[String] = new Eq[String] { def eqv(x: String, y: String): Boolean = x == y } + + implicitly[Eq[Dog]] + implicitly[Eq[Cat]] + implicitly[Eq[IDAABBS]] +} diff --git a/test/files/pos/byname-implicits-14.scala b/test/files/pos/byname-implicits-14.scala new file mode 100644 index 000000000000..122903d7b695 --- /dev/null +++ b/test/files/pos/byname-implicits-14.scala @@ -0,0 +1,192 @@ +// deriving/src/main/scala/by-name-implicit-test.scala.scala +sealed trait AABB +case class AA(a: String) extends AABB +case class BB(a: String) extends AABB +case class DAABB(d: Double, aabb: AABB) +case class IDAABBS(i: Int, daabb: DAABB, s: String) + +case class Dog(age: Long) +case class Cat(name: String, friend: Either[Cat, Dog]) + +// Definitions from Shapeless --------------------------------------------------------------------- + +sealed trait HList extends Product with Serializable +final case class ::[+H, +T <: HList](head: H, tail: T) extends HList +sealed trait HNil extends HList +final case object HNil extends HNil + +sealed trait Coproduct extends Product with Serializable +sealed trait :+:[+H, +T <: Coproduct] extends Coproduct +final case class Inl[+H, +T <: Coproduct](head: H) extends :+:[H, T] +final case class Inr[+H, +T <: Coproduct](tail: T) extends :+:[H, T] +sealed trait CNil extends Coproduct + +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +// Manual Generic macro expansions ---------------------------------------------------------------- + +object GenericInstances { + implicit val genAABB: Generic[AABB] { type Repr = AA :+: BB :+: CNil } = + new Generic[AABB] { + type Repr = AA :+: BB :+: CNil + def to(t: AABB): Repr = t match { + case x: AA => Inl(x) + case x: BB => Inr(Inl(x)) + } + def from(r: Repr): AABB = r match { + case Inl(x) => x + case Inr(Inl(x)) => x + case _ => ??? + } + } + + implicit val genAA: Generic[AA] { type Repr = String :: HNil } = + new Generic[AA] { + type Repr = String :: HNil + def to(t: AA): Repr = t match { case AA(x) => ::(x, HNil) } + def from(r: Repr): AA = r match { case ::(x, HNil) => AA(x) } + } + + implicit val genBB: Generic[BB] { type Repr = String :: HNil } = + new Generic[BB] { + type Repr = String :: HNil + def to(t: BB): Repr = t match { case BB(x) => ::(x, HNil) } + def from(r: Repr): BB = r match { case ::(x, HNil) => BB(x) } + } + + implicit val genDAABB: Generic[DAABB] { type Repr = Double :: AABB :: HNil } = + new Generic[DAABB] { + type Repr = Double :: AABB :: HNil + def to(t: DAABB): Repr = t match { case DAABB(x, y) => ::(x, ::(y, HNil)) } + def from(r: Repr): DAABB = r match { case ::(x, ::(y, HNil)) => DAABB(x, y) } + } + + implicit val genIDAABBS: Generic[IDAABBS] { type Repr = Int :: DAABB :: String :: HNil } = + new Generic[IDAABBS] { + type Repr = Int :: DAABB :: String :: HNil + def to(t: IDAABBS): Repr = t match { case IDAABBS(x, y, z) => ::(x, ::(y, ::(z, HNil))) } + def from(r: Repr): IDAABBS = r match { case ::(x, ::(y, ::(z, HNil))) => IDAABBS(x, y, z) } + } + + implicit val genDog: Generic[Dog] { type Repr = Long :: HNil } = + new Generic[Dog] { + type Repr = Long :: HNil + def to(t: Dog): Repr = t match { case Dog(x) => ::(x, HNil) } + def from(r: Repr): Dog = r match { case ::(x, HNil) => Dog(x) } + } + + implicit val genCat: Generic[Cat] { type Repr = String :: Either[Cat, Dog] :: HNil } = + new Generic[Cat] { + type Repr = String :: Either[Cat, Dog] :: HNil + def to(t: Cat): Repr = t match { case Cat(x, y) => ::(x, ::(y, HNil)) } + def from(r: Repr): Cat = r match { case ::(x, ::(y, HNil)) => Cat(x, y) } + } + + implicit def genEither[A, B]: Generic[Either[A, B]] { type Repr = Left[A, B] :+: Right[A, B] :+: CNil } = + new Generic[Either[A, B]] { + type Repr = Left[A, B] :+: Right[A, B] :+: CNil + def to(t: Either[A, B]): Repr = t match { + case (x: Left[A, B] @unchecked) => Inl(x) + case (x: Right[A, B] @unchecked) => Inr(Inl(x)) + } + def from(r: Repr): Either[A, B] = r match { + case Inl(x) => x + case Inr(Inl(x)) => x + case _ => ??? + } + } + + implicit def genLeft[A, B]: Generic[Left[A, B]] { type Repr = A :: HNil } = + new Generic[Left[A, B]] { + type Repr = A :: HNil + def to(t: Left[A, B]): Repr = t match { case Left(x) => ::(x, HNil) } + def from(r: Repr): Left[A, B] = r match { case ::(x, HNil) => Left(x) } + } + + implicit def genRight[A, B]: Generic[Right[A, B]] { type Repr = B :: HNil } = + new Generic[Right[A, B]] { + type Repr = B :: HNil + def to(t: Right[A, B]): Repr = t match { case Right(x) => ::(x, HNil) } + def from(r: Repr): Right[A, B] = r match { case ::(x, HNil) => Right(x) } + } +} + +// First example from https://github.com/milessabin/shapeless-type-class-derivation-2015-demo +object equal { + trait Eq[T] { + def eqv(x: T, y: T): Boolean + } + + object Eq { + implicit val eqInt: Eq[Int] = + new Eq[Int] { + def eqv(x: Int, y: Int): Boolean = x == y + } + + implicit val eqString: Eq[String] = + new Eq[String] { + def eqv(x: String, y: String): Boolean = x == y + } + + implicit def eqGeneric[T, R] + (implicit + gen: Generic[T] { type Repr = R }, + eqRepr: => Eq[R] + ): Eq[T] = + new Eq[T] { + def eqv(x: T, y: T): Boolean = + eqRepr.eqv(gen.to(x), gen.to(y)) + } + + implicit val eqHNil: Eq[HNil] = new Eq[HNil] { + def eqv(x: HNil, y: HNil): Boolean = true + } + + implicit def eqHCons[H, T <: HList] + (implicit + eqH: => Eq[H], + eqT: => Eq[T] + ): Eq[H :: T] = + new Eq[H :: T] { + def eqv(x: H :: T, y: H :: T): Boolean = + eqH.eqv(x.head, y.head) && eqT.eqv(x.tail, y.tail) + } + + implicit val eqCNil: Eq[CNil] = new Eq[CNil] { + def eqv(x: CNil, y: CNil): Boolean = true + } + + implicit def eqCNCons[H, T <: Coproduct] + (implicit + eqH: => Eq[H], + eqT: => Eq[T] + ): Eq[H :+: T] = + new Eq[H :+: T] { + def eqv(x: H :+: T, y: H :+: T): Boolean = + (x, y) match { + case (Inl(xh), Inl(yh)) => eqH.eqv(xh, yh) + case (Inr(xt), Inr(yt)) => eqT.eqv(xt, yt) + case _ => false + } + } + } + + implicit class EqOps[T](x: T)(implicit eqT: Eq[T]) { + def ===(y: T): Boolean = eqT.eqv(x, y) + } + + import GenericInstances._ + + implicit val EqLongInstance: Eq[Long] = new Eq[Long] { def eqv(x: Long, y: Long): Boolean = x == y } + implicit val EqDoubleInstance: Eq[Double] = new Eq[Double] { def eqv(x: Double, y: Double): Boolean = x == y } + implicit val EqIntInstance: Eq[Int] = new Eq[Int] { def eqv(x: Int, y: Int): Boolean = x == y } + implicit val EqStringInstance: Eq[String] = new Eq[String] { def eqv(x: String, y: String): Boolean = x == y } + + implicitly[Eq[Dog]] + implicitly[Eq[Cat]] + implicitly[Eq[IDAABBS]] +} diff --git a/test/files/pos/byname-implicits-15.scala b/test/files/pos/byname-implicits-15.scala new file mode 100644 index 000000000000..1818d3794759 --- /dev/null +++ b/test/files/pos/byname-implicits-15.scala @@ -0,0 +1,27 @@ +object Test { + trait Generic[T] { + type Repr + } + + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + implicit def genTuple3[T, U, V]: Aux[(T, U, V), (T, (U, (V, Unit)))] = ??? + implicit def genTuple5[T, U, V, W, X]: Aux[(T, U, V, W, X), (T, (U, (V, (W, (X, Unit)))))] = ??? + } + + trait Show[T] + object Show { + implicit val showUnit: Show[Unit] = ??? + implicit val showInt: Show[Int] = ??? + implicit def showPair[T, U](implicit st: Show[T], su: Show[U]): Show[(T, U)] = ??? + implicit def showGen[T, R](implicit gen: Generic.Aux[T, R], sr: => Show[R]): Show[T] = ??? + } + + type I5 = (Int, Int, Int, Int, Int) + + // Demonstrates that the bynamity of sr suppresses the false positive divergence test + // which would otherwise see 5 nested pairs dominating 3 nested pairs. + implicitly[Show[(I5, I5, I5)]] + implicitly[Show[(Int, I5, Int)]] + implicitly[Show[(I5, (I5, I5, I5), Int)]] +} diff --git a/test/files/pos/byname-implicits-19.scala b/test/files/pos/byname-implicits-19.scala new file mode 100644 index 000000000000..94321224b4ec --- /dev/null +++ b/test/files/pos/byname-implicits-19.scala @@ -0,0 +1,42 @@ +object Test { + class A + class B + class C + class D + + { + implicit def parentA(implicit arg: => B): A = ??? + implicit def parentB(implicit arg: C): B = ??? + implicit def parentC(implicit arg: D): C = ??? + implicit def parentD(implicit arg: A): D = ??? + + implicitly[A] + } + + { + implicit def parentA(implicit arg: B): A = ??? + implicit def parentB(implicit arg: => C): B = ??? + implicit def parentC(implicit arg: D): C = ??? + implicit def parentD(implicit arg: A): D = ??? + + implicitly[A] + } + + { + implicit def parentA(implicit arg: B): A = ??? + implicit def parentB(implicit arg: C): B = ??? + implicit def parentC(implicit arg: => D): C = ??? + implicit def parentD(implicit arg: A): D = ??? + + implicitly[A] + } + + { + implicit def parentA(implicit arg: B): A = ??? + implicit def parentB(implicit arg: C): B = ??? + implicit def parentC(implicit arg: D): C = ??? + implicit def parentD(implicit arg: => A): D = ??? + + implicitly[A] + } +} diff --git a/test/files/pos/byname-implicits-2.scala b/test/files/pos/byname-implicits-2.scala new file mode 100644 index 000000000000..cd752c301b08 --- /dev/null +++ b/test/files/pos/byname-implicits-2.scala @@ -0,0 +1,25 @@ +trait Foo[T] +object Foo { + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[H, T](implicit h: Foo[H], t: Foo[T]): Foo[(H, T)] = ??? +} + +trait Bar +object Bar { + implicit def fooBar(implicit repr: => Foo[(Int, (Int, Int))]): Foo[Bar] = ??? +} + +trait Baz +object Baz { + implicit def fooBaz(implicit i: Foo[Int], rec: => Foo[Baz]): Foo[Baz] = ??? +} + +object Test { + implicitly[Foo[Int]] + implicitly[Foo[(Int, Int)]] + implicitly[Foo[(Int, (Int, Int))]] + implicitly[Foo[(Int, (Int, (Int, Int)))]] + implicitly[Foo[Bar]] + implicitly[Foo[(Int, Bar)]] + implicitly[Foo[Baz]] +} diff --git a/test/files/pos/byname-implicits-20.scala b/test/files/pos/byname-implicits-20.scala new file mode 100644 index 000000000000..7b725d71e50f --- /dev/null +++ b/test/files/pos/byname-implicits-20.scala @@ -0,0 +1,28 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: => Foo[R]): Foo[T] = ??? + } + + trait A + object A { + implicit val genA: Generic[A] { type Repr = (B, Unit) } = ??? + } + + trait B + object B { + implicit val genB: Generic[B] { type Repr = (Int, (Int, Unit)) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/pos/byname-implicits-22.scala b/test/files/pos/byname-implicits-22.scala new file mode 100644 index 000000000000..1fcfb98f6ac0 --- /dev/null +++ b/test/files/pos/byname-implicits-22.scala @@ -0,0 +1,44 @@ +object repro { + import scala.reflect.runtime.universe._ + + trait +[L, R] + + case class Atomic[V](val name: String) + object Atomic { + def apply[V](implicit vtt: TypeTag[V]): Atomic[V] = Atomic[V](vtt.tpe.typeSymbol.name.toString) + } + + case class Assign[V, X](val name: String) + object Assign { + def apply[V, X](implicit vtt: TypeTag[V]): Assign[V, X] = Assign[V, X](vtt.tpe.typeSymbol.name.toString) + } + + trait AsString[X] { + def str: String + } + object AsString { + implicit def atomic[V](implicit a: Atomic[V]): AsString[V] = + new AsString[V] { val str = a.name } + implicit def assign[V, X](implicit a: Assign[V, X], asx: AsString[X]): AsString[V] = + new AsString[V] { val str = asx.str } + implicit def plus[L, R](implicit asl: AsString[L], asr: AsString[R]): AsString[+[L, R]] = + new AsString[+[L, R]] { val str = s"(${asl.str}) + (${asr.str})" } + } + + trait X + implicit val declareX = Atomic[X] + trait Y + implicit val declareY = Atomic[Y] + trait Z + implicit val declareZ = Atomic[Z] + + trait Q + implicit val declareQ = Assign[Q, (X + Y) + Z] + trait R + implicit val declareR = Assign[R, Q + Z] + + implicitly[AsString[X]] + implicitly[AsString[X + Y]] + implicitly[AsString[Q]] + implicitly[AsString[R]] +} diff --git a/test/files/pos/byname-implicits-23.scala b/test/files/pos/byname-implicits-23.scala new file mode 100644 index 000000000000..19b6746732d2 --- /dev/null +++ b/test/files/pos/byname-implicits-23.scala @@ -0,0 +1,30 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait GNil + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: Foo[R]): Foo[T] = ??? + } + + trait A + object A { + implicit val genA: Generic[A] { type Repr = (B, (Int, Unit)) } = ??? + } + + trait B + object B { + implicit val genB: Generic[B] { type Repr = (Int, (Int, (Int, Unit))) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/pos/byname-implicits-24.scala b/test/files/pos/byname-implicits-24.scala new file mode 100644 index 000000000000..70b7f57873f9 --- /dev/null +++ b/test/files/pos/byname-implicits-24.scala @@ -0,0 +1,30 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait GNil + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: Foo[R]): Foo[T] = ??? + } + + trait A + object A { + implicit val genA: Generic[A] { type Repr = (B, (Unit, Unit)) } = ??? + } + + trait B + object B { + implicit val genB: Generic[B] { type Repr = (Unit, (Unit, (Unit, Unit))) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/pos/byname-implicits-25.scala b/test/files/pos/byname-implicits-25.scala new file mode 100644 index 000000000000..9b8658654811 --- /dev/null +++ b/test/files/pos/byname-implicits-25.scala @@ -0,0 +1,35 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait GNil + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: Foo[R]): Foo[T] = ??? + } + + case class A(b: B, c: C, i: Int) + object A { + implicit val genA: Generic[A] { type Repr = (B, (C, (Int, Unit))) } = ??? + } + + case class B(c0: C, c1: C, c2: C, i: Int) + object B { + implicit val genB: Generic[B] { type Repr = (C, (C, (C, (Int, Unit)))) } = ??? + } + + case class C(i0: Int, i1: Int, i2: Int, i3: Int, i4: Int) + object C { + implicit val genC: Generic[C] { type Repr = (Int, (Int, (Int, (Int, (Int, Unit))))) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/pos/byname-implicits-27.scala b/test/files/pos/byname-implicits-27.scala new file mode 100644 index 000000000000..6b7a0a258f16 --- /dev/null +++ b/test/files/pos/byname-implicits-27.scala @@ -0,0 +1,37 @@ +object Test { + trait Generic[T] { + type Repr + } + object Generic { + type Aux[T, R] = Generic[T] { type Repr = R } + } + + trait GNil + + trait Foo[T] + object Foo { + implicit val fooUnit: Foo[Unit] = ??? + implicit val fooInt: Foo[Int] = ??? + implicit val fooString: Foo[String] = ??? + implicit val fooBoolean: Foo[Boolean] = ??? + implicit def fooPair[T, U](implicit fooT: Foo[T], fooU: Foo[U]): Foo[(T, U)] = ??? + implicit def fooGen[T, R](implicit gen: Generic.Aux[T, R], fr: Foo[R]): Foo[T] = ??? + } + + case class A(b: B, i: Int) + object A { + implicit val genA: Generic[A] { type Repr = (B, (Int, Unit)) } = ??? + } + + case class B(c: C, i: Int, b: Boolean) + object B { + implicit val genB: Generic[B] { type Repr = (C, (Int, (Boolean, Unit))) } = ??? + } + + case class C(i: Int, s: String, b: Boolean) + object C { + implicit val genC: Generic[C] { type Repr = (Int, (String, (Boolean, Unit))) } = ??? + } + + implicitly[Foo[A]] +} diff --git a/test/files/pos/byname-implicits-3.scala b/test/files/pos/byname-implicits-3.scala new file mode 100644 index 000000000000..d34e559c5907 --- /dev/null +++ b/test/files/pos/byname-implicits-3.scala @@ -0,0 +1,11 @@ +trait Foo[T] + +object Foo { + implicit val int: Foo[Int] = ??? + implicit val bool: Foo[Boolean] = ??? + implicit def pair[T, U](implicit ftu0: => Foo[(T, U)], ftu1: => Foo[(T, U)]): Foo[(T, U)] = ??? +} + +object Test { + implicitly[Foo[(Int, Boolean)]] +} diff --git a/test/files/pos/byname-implicits-31/Macros_1.scala b/test/files/pos/byname-implicits-31/Macros_1.scala new file mode 100644 index 000000000000..612656d58617 --- /dev/null +++ b/test/files/pos/byname-implicits-31/Macros_1.scala @@ -0,0 +1,63 @@ +import scala.language.experimental.macros + +import scala.reflect.macros.whitebox + +object util { + def lazily[T](implicit t: => T): T = t +} + +abstract class Quux { + def ping: Ping + def pong: Pong +} + +object Quux { + implicit def mkQuux: Quux = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + q""" + new Quux { + def ping = util.lazily[Ping] + def pong = util.lazily[Pong] + } + """ + } +} + +abstract class Ping { + def next: Quux +} + +object Ping { + implicit def mkPing: Ping = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + q""" + new Ping { + def next = util.lazily[Quux] + } + """ + } +} + +abstract class Pong { + def next: Quux +} + +object Pong { + implicit def mkPong: Pong = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + q""" + new Pong { + def next = util.lazily[Quux] + } + """ + } +} diff --git a/test/files/pos/byname-implicits-31/Main_2.scala b/test/files/pos/byname-implicits-31/Main_2.scala new file mode 100644 index 000000000000..7d279d8b9cae --- /dev/null +++ b/test/files/pos/byname-implicits-31/Main_2.scala @@ -0,0 +1,4 @@ +//> using options -Ycheck:_ +object Test { + util.lazily[Ping] +} diff --git a/test/files/pos/byname-implicits-32/Macros_1.scala b/test/files/pos/byname-implicits-32/Macros_1.scala new file mode 100644 index 000000000000..e427b35431fd --- /dev/null +++ b/test/files/pos/byname-implicits-32/Macros_1.scala @@ -0,0 +1,70 @@ +import scala.language.experimental.macros + +import scala.reflect.macros.whitebox + +object util { + def lazily[T](implicit t: => T): T = t +} + +abstract class Quux { + def ping: Ping + def pong: Pong +} + +object Quux { + implicit def mkQuux: Quux = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + val ping = c.untypecheck(c.inferImplicitValue(appliedType(definitions.ByNameParamClass, weakTypeOf[Ping]), silent = false)) + val pong = c.untypecheck(c.inferImplicitValue(appliedType(definitions.ByNameParamClass, weakTypeOf[Pong]), silent = false)) + + q""" + new Quux { + def ping = $ping + def pong = $pong + } + """ + } +} + +abstract class Ping { + def next: Quux +} + +object Ping { + implicit def mkPing: Ping = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + val quux = c.untypecheck(c.inferImplicitValue(appliedType(definitions.ByNameParamClass, weakTypeOf[Quux]), silent = false)) + + q""" + new Ping { + def next = $quux + } + """ + } +} + +abstract class Pong { + def next: Quux +} + +object Pong { + implicit def mkPong: Pong = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + val quux = c.untypecheck(c.inferImplicitValue(appliedType(definitions.ByNameParamClass, weakTypeOf[Quux]), silent = false)) + + q""" + new Pong { + def next = $quux + } + """ + } +} diff --git a/test/files/pos/byname-implicits-32/Main_2.scala b/test/files/pos/byname-implicits-32/Main_2.scala new file mode 100644 index 000000000000..7d279d8b9cae --- /dev/null +++ b/test/files/pos/byname-implicits-32/Main_2.scala @@ -0,0 +1,4 @@ +//> using options -Ycheck:_ +object Test { + util.lazily[Ping] +} diff --git a/test/files/pos/byname-implicits-33/Macros_1.scala b/test/files/pos/byname-implicits-33/Macros_1.scala new file mode 100644 index 000000000000..1beea690d361 --- /dev/null +++ b/test/files/pos/byname-implicits-33/Macros_1.scala @@ -0,0 +1,36 @@ +import scala.language.experimental.macros + +import scala.reflect.macros.whitebox + +object util { + def lazily[T](implicit t: => T): T = t +} + +case class Rec[T](res: Rec[T]) + +class Functor[F[_]] + +object Functor { + def apply[F[_]](implicit f: => Functor[F]): Functor[F] = f + + implicit def hcons[F[_]](implicit ihc: IsHCons10[F]): Functor[F] = ??? +} + +trait IsHCons10[L[_]] + +object IsHCons10 { + implicit def apply[L[_]]: IsHCons10[L] = macro mkImpl[L] + + def mkImpl[L[_]](c: whitebox.Context) + (implicit lTag: c.WeakTypeTag[L[_]]): c.Tree = { + import c.universe._ + + val tpe = lTag.tpe.etaExpand + + q""" + new IsHCons10[$tpe] { + def foo = util.lazily[Functor[Rec]] + } + """ + } +} diff --git a/test/files/pos/byname-implicits-33/Main_2.scala b/test/files/pos/byname-implicits-33/Main_2.scala new file mode 100644 index 000000000000..e1e0214b50c3 --- /dev/null +++ b/test/files/pos/byname-implicits-33/Main_2.scala @@ -0,0 +1,4 @@ +//> using options -Ycheck:_ +object Test { + util.lazily[Functor[Rec]] +} diff --git a/test/files/pos/byname-implicits-34/Macros_1.scala b/test/files/pos/byname-implicits-34/Macros_1.scala new file mode 100644 index 000000000000..a3140f18e83a --- /dev/null +++ b/test/files/pos/byname-implicits-34/Macros_1.scala @@ -0,0 +1,25 @@ +import scala.language.experimental.macros + +import scala.reflect.macros.whitebox + +object util { + def lazily[T](implicit t: => T): T = t +} + +abstract class Rec { + def next: Rec +} + +object Rec { + implicit def mkRec: Rec = macro mkImpl + + def mkImpl(c: whitebox.Context): c.Tree = { + import c.universe._ + + q""" + new Rec { + def next = util.lazily[Rec] + } + """ + } +} diff --git a/test/files/pos/byname-implicits-34/Main_2.scala b/test/files/pos/byname-implicits-34/Main_2.scala new file mode 100644 index 000000000000..249d845e0340 --- /dev/null +++ b/test/files/pos/byname-implicits-34/Main_2.scala @@ -0,0 +1,4 @@ +//> using options -Ycheck:_ +object Test { + util.lazily[Rec] +} diff --git a/test/files/pos/byname-implicits-7.scala b/test/files/pos/byname-implicits-7.scala new file mode 100644 index 000000000000..83c335798dcf --- /dev/null +++ b/test/files/pos/byname-implicits-7.scala @@ -0,0 +1,17 @@ +trait Foo { + type Out + def out: Out +} + +object Foo { + type Aux[Out0] = Foo { type Out = Out0 } + + implicit val fooInt: Aux[Int] = new Foo { type Out = Int ; def out = 23 } +} + +object Test { + def bar[T](t: T)(implicit foo: => Foo.Aux[T]): T = foo.out + + val i = bar(13) + i: Int +} diff --git a/test/files/pos/byname-implicits-8.scala b/test/files/pos/byname-implicits-8.scala new file mode 100644 index 000000000000..ea2e4ce1ad28 --- /dev/null +++ b/test/files/pos/byname-implicits-8.scala @@ -0,0 +1,31 @@ +// shapeless's Lazy implemented in terms of byname implicits +trait Lazy[T] { + val value: T +} + +object Lazy { + implicit def apply[T](implicit t: => T): Lazy[T] = + new Lazy[T] { + lazy val value = t + } + + def unapply[T](lt: Lazy[T]): Option[T] = Some(lt.value) +} + +trait Foo { + type Out + def out: Out +} + +object Foo { + type Aux[Out0] = Foo { type Out = Out0 } + + implicit val fooInt: Aux[Int] = new Foo { type Out = Int ; def out = 23 } +} + +object Test { + def bar[T](t: T)(implicit foo: Lazy[Foo.Aux[T]]): foo.value.Out = foo.value.out + + val i = bar(13) + i: Int +} diff --git a/test/files/pos/byname-implicits-9.scala b/test/files/pos/byname-implicits-9.scala new file mode 100644 index 000000000000..0f1c586dcb37 --- /dev/null +++ b/test/files/pos/byname-implicits-9.scala @@ -0,0 +1,73 @@ +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +object Generic { + type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 } +} + +object ListInstances { + type LRepr[T] = Either[::[T], Either[Nil.type, Unit]] + type CRepr[T] = (T, (List[T], Unit)) + type NRepr = Unit + + implicit def genList[T]: Generic.Aux[List[T], LRepr[T]] = new Generic[List[T]] { + type Repr = LRepr[T] + def to(t: List[T]): Repr = t match { + case hd :: tl => Left(::(hd, tl)) + case n@Nil => Right(Left(Nil)) + } + def from(r: Repr): List[T] = r match { + case Left(c) => c + case Right(Left(n)) => n + } + } + + implicit def genCons[T]: Generic.Aux[::[T], CRepr[T]] = new Generic[::[T]] { + type Repr = CRepr[T] + def to(t: ::[T]): Repr = (t.head, (t.tail, ())) + def from(r: Repr): ::[T] = ::(r._1, r._2._1) + } + + implicit def genNil: Generic.Aux[Nil.type, NRepr] = new Generic[Nil.type] { + type Repr = NRepr + def to(t: Nil.type): Repr = () + def from(r: Repr): Nil.type = Nil + } +} + +trait Show[T] { + def show(t: T): String +} + +object Show { + implicit def showUnit: Show[Unit] = new Show[Unit] { + def show(u: Unit): String = "()" + } + + implicit def showInt: Show[Int] = new Show[Int] { + def show(i: Int): String = i.toString + } + + implicit def showPair[T, U](implicit st: Show[T], su: Show[U]): Show[(T, U)] = new Show[(T, U)] { + def show(t: (T, U)): String = s"(${st.show(t._1)}, ${su.show(t._2)}" + } + + implicit def showEither[T, U](implicit st: Show[T], su: Show[U]): Show[Either[T, U]] = new Show[Either[T, U]] { + def show(t: Either[T, U]): String = t match { + case Left(t) => s"Left(${st.show(t)})" + case Right(u) => s"Right(${su.show(u)})" + } + } + + implicit def showGen[T, R](implicit gen: Generic.Aux[T, R], sr: => Show[R]): Show[T] = new Show[T] { + def show(t: T) = sr.show(gen.to(t)) + } +} + +object Test { + import ListInstances._ + implicitly[Show[List[Int]]] +} diff --git a/test/files/pos/case-object-add-serializable.scala b/test/files/pos/case-object-add-serializable.scala index 92866b487837..b773f08a7e7c 100644 --- a/test/files/pos/case-object-add-serializable.scala +++ b/test/files/pos/case-object-add-serializable.scala @@ -1,4 +1,4 @@ -// scalac: -Xdev -Xfatal-warnings +//> using options -Xdev -Werror // Was: "warning: !!! base trait Serializable not found in basetypes of object Person. This might indicate incorrect caching of TypeRef#parents." // under -Xdev class Test { diff --git a/test/files/pos/caseclass_private_constructor.scala b/test/files/pos/caseclass_private_constructor.scala new file mode 100644 index 000000000000..c401ef41437b --- /dev/null +++ b/test/files/pos/caseclass_private_constructor.scala @@ -0,0 +1,64 @@ +//> using options -Wconf:cat=scala3-migration:ws -Xsource:3 + +case class A private (i: Int) +object A { + def a = A(1).copy(2) // apply and copy are accessible in companion +} + +case class B private (i: Int) { // no user-defined companion object, should compile + def b = B(1).copy(2) // apply and copy are accessible +} + +object qualified_private { + case class A private[qualified_private] (i: Int) + object A { + def a = A(1).copy(2) // apply and copy are accessible in companion + } + + def a = A(1).copy(2) // apply and copy are accessible in qualified_private object + + case class B private[qualified_private] (i: Int) { // no user-defined companion object, should compile + def b = B(1).copy(2) // apply and copy are accessible + } + + def b = B(1).copy(2) // apply and copy are accessible in qualified_private object +} + +case class C protected (i: Int) +class CSub extends C(1) { + def c = copy(2) // copy is accessible in subclass +} +object CTest { + def c = C(1) // apply is public +} + +object qualified_protected { + case class C protected[qualified_protected] (i: Int) + class CSub extends C(1) { + def c = copy(2) // copy is accessible in subclass + } + object CTest { + def c = C(1) // apply is public + def checkExtendsFunction: Int => C = C // companion extends (Int => C) + } + + def c = C(1).copy(2) +} +object CQualifiedTest { + def c = qualified_protected.C(1) // apply is public +} + + +case class OverrideApply private (i: Int) +object OverrideApply { + def apply(i: Int): OverrideApply = new OverrideApply(i) +} + +case class OverrideCopy private (i: Int) { + def copy(i: Int = i): OverrideCopy = OverrideCopy(i) +} + +object OverrideTest { + def oa = OverrideApply(42) // overridden apply is public + def oc(o: OverrideCopy) = o.copy(42) // overridden copy is public +} diff --git a/test/files/pos/chaining.scala b/test/files/pos/chaining.scala new file mode 100644 index 000000000000..057a1e048a6a --- /dev/null +++ b/test/files/pos/chaining.scala @@ -0,0 +1,4 @@ +object Test { + case class Foo[A](self: A) { def bar: self.type = self } + lazy val result = Foo(1).bar +} diff --git a/test/files/pos/classOfObjectType/AnnotationWithClassType.java b/test/files/pos/classOfObjectType/AnnotationWithClassType.java new file mode 100644 index 000000000000..476cfdeffe1b --- /dev/null +++ b/test/files/pos/classOfObjectType/AnnotationWithClassType.java @@ -0,0 +1,10 @@ +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface AnnotationWithClassType { + Class cls(); +} diff --git a/test/files/pos/classOfObjectType/Foo.scala b/test/files/pos/classOfObjectType/Foo.scala new file mode 100644 index 000000000000..b7f91bd877be --- /dev/null +++ b/test/files/pos/classOfObjectType/Foo.scala @@ -0,0 +1,7 @@ + +object Bar + +trait Foo { + @AnnotationWithClassType(cls = classOf[Bar.type]) + def function: Any = ??? +} diff --git a/test/files/pos/classtag-pos.scala b/test/files/pos/classtag-pos.scala index f30c3c3c1685..83c607f4b85c 100644 --- a/test/files/pos/classtag-pos.scala +++ b/test/files/pos/classtag-pos.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// import scala.reflect.runtime.universe._ class A { diff --git a/test/files/pos/collectGenericCC.scala b/test/files/pos/collectGenericCC.scala index 5c51a50d7122..ba19d78d065d 100644 --- a/test/files/pos/collectGenericCC.scala +++ b/test/files/pos/collectGenericCC.scala @@ -1,9 +1,8 @@ -import scala.collection.generic.CanBuildFrom import scala.collection._ object Test { - def collect[A, Res](r: Traversable[A])(implicit bf: generic.CanBuild[A, Res]) = { - val b: collection.mutable.Builder[A, Res] = bf() + def collect[A, Res](r: Iterable[A])(implicit bf: Factory[A, Res]) = { + val b: collection.mutable.Builder[A, Res] = bf.newBuilder r foreach ((a: A) => b += a) b.result } @@ -11,4 +10,4 @@ object Test { collect[Int, Vector[Int]](List(1,2,3,4)) collect[Char, String](List('1','2','3','4')) collect[Char, Array[Char]](List('1','2','3','4')) -} \ No newline at end of file +} diff --git a/test/files/pos/collections.scala b/test/files/pos/collections.scala deleted file mode 100644 index 23b23d016e1b..000000000000 --- a/test/files/pos/collections.scala +++ /dev/null @@ -1,15 +0,0 @@ -package mixins; - -import scala.collection.mutable._; - -class Collections extends HashSet[Int] with ObservableSet[Int] { - override def +=(elem: Int): this.type = super.+=(elem); - override def -=(elem: Int): this.type = super.-=(elem); - override def clear: Unit = super.clear; - -} - -object collections extends Collections; - -//class Collections1 extends HashSet[Int] with ObservableSet[Int,Collections1]; -//object collections1 extends Collections1; diff --git a/test/files/pos/comp-rec-test.scala b/test/files/pos/comp-rec-test.scala index 05031da0235f..a29ab52ef2e1 100644 --- a/test/files/pos/comp-rec-test.scala +++ b/test/files/pos/comp-rec-test.scala @@ -1,4 +1,4 @@ -// scalac: -Yrecursion 1 +// was: -Yrecursion 1 object Comp extends App { trait Family { diff --git a/test/files/pos/consVariance.scala b/test/files/pos/consVariance.scala new file mode 100644 index 000000000000..e1fa74d5f514 --- /dev/null +++ b/test/files/pos/consVariance.scala @@ -0,0 +1,3 @@ +class Foo[+A] { + def nel: ::[A] = ??? +} diff --git a/test/files/pos/contextbounds-implicits-new.scala b/test/files/pos/contextbounds-implicits-new.scala index 327c4a98dc18..8389d1332a15 100644 --- a/test/files/pos/contextbounds-implicits-new.scala +++ b/test/files/pos/contextbounds-implicits-new.scala @@ -5,6 +5,6 @@ import scala.reflect.runtime.universe._ */ class C { - def f[T: TypeTag, S: TypeTag](x: T, y: S)(implicit p: C) { } + def f[T: TypeTag, S: TypeTag](x: T, y: S)(implicit p: C): Unit = { } -} \ No newline at end of file +} diff --git a/test/files/pos/contextbounds-implicits-old.scala b/test/files/pos/contextbounds-implicits-old.scala index f9113ee320eb..084a4aaf5e00 100644 --- a/test/files/pos/contextbounds-implicits-old.scala +++ b/test/files/pos/contextbounds-implicits-old.scala @@ -3,6 +3,6 @@ */ class C { - def f[T: Manifest, S: Manifest](x: T, y: S)(implicit p: C) { } + def f[T: Manifest, S: Manifest](x: T, y: S)(implicit p: C): Unit = { } } diff --git a/test/files/pos/cycle-jsoup/Test_2.scala b/test/files/pos/cycle-jsoup/Test_2.scala index d1c45580ff9d..f60c50f74345 100644 --- a/test/files/pos/cycle-jsoup/Test_2.scala +++ b/test/files/pos/cycle-jsoup/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -Ybreak-cycles object Test { def main(args : Array[String]): Unit = { org.jsoup.Jsoup_1.parse(null: java.net.URL, 3000) diff --git a/test/files/pos/cycle/X_2.scala b/test/files/pos/cycle/X_2.scala index 68c22b86446f..3effc687d68f 100644 --- a/test/files/pos/cycle/X_2.scala +++ b/test/files/pos/cycle/X_2.scala @@ -1,4 +1,4 @@ -// scalac: -Ybreak-cycles +//> using options -Ybreak-cycles import bar.J_1._ //<--- illegal cyclic reference involving class X diff --git a/test/files/pos/debug-reset-local-attrs.scala b/test/files/pos/debug-reset-local-attrs.scala index 94d68ff9c0f7..cbc86a2c901f 100644 --- a/test/files/pos/debug-reset-local-attrs.scala +++ b/test/files/pos/debug-reset-local-attrs.scala @@ -1,2 +1,2 @@ -// scalac: -Ydebug +//> using options -Ydebug case class FT(f : Float) diff --git a/test/files/pos/delambdafy_t6260_method.scala b/test/files/pos/delambdafy_t6260_method.scala index 477638998fd6..7c073c0d3066 100644 --- a/test/files/pos/delambdafy_t6260_method.scala +++ b/test/files/pos/delambdafy_t6260_method.scala @@ -1,4 +1,4 @@ -// scalac: -Ydelambdafy:method +//> using options -Ydelambdafy:method class Box[X](val x: X) extends AnyVal { def map[Y](f: X => Y): Box[Y] = ((bx: Box[X]) => new Box(f(bx.x)))(this) @@ -8,7 +8,7 @@ object Test { def map2[X, Y](self: Box[X], f: X => Y): Box[Y] = ((bx: Box[X]) => new Box(f(bx.x)))(self) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f = (x: Int) => x + 1 val g = (x: String) => x + x diff --git a/test/files/pos/depmet_1_pos.scala b/test/files/pos/depmet_1_pos.scala index 166e9918175c..2b4f443e6c9a 100644 --- a/test/files/pos/depmet_1_pos.scala +++ b/test/files/pos/depmet_1_pos.scala @@ -3,4 +3,4 @@ object Test { val foo = "foo" val fun : foo.type => foo.type = precise(foo) val bar : foo.type = precise(foo)(foo) -} \ No newline at end of file +} diff --git a/test/files/pos/depmet_implicit_oopsla_session.scala b/test/files/pos/depmet_implicit_oopsla_session.scala index 21588a56adf5..7e51861d60d4 100644 --- a/test/files/pos/depmet_implicit_oopsla_session.scala +++ b/test/files/pos/depmet_implicit_oopsla_session.scala @@ -60,4 +60,4 @@ object Sessions { // s.run(p, dp) def myRun = runSession(addServer, addClient) -} \ No newline at end of file +} diff --git a/test/files/pos/depmet_implicit_oopsla_session_2.scala b/test/files/pos/depmet_implicit_oopsla_session_2.scala index 5c3b78e3f595..598d3454c179 100644 --- a/test/files/pos/depmet_implicit_oopsla_session_2.scala +++ b/test/files/pos/depmet_implicit_oopsla_session_2.scala @@ -84,4 +84,4 @@ object Sessions { // s.run(p, dp) -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/depmet_implicit_oopsla_zipwith.scala b/test/files/pos/depmet_implicit_oopsla_zipwith.scala index c76d02c1aeec..c034e3ef5b7b 100644 --- a/test/files/pos/depmet_implicit_oopsla_zipwith.scala +++ b/test/files/pos/depmet_implicit_oopsla_zipwith.scala @@ -41,4 +41,4 @@ object Test { def zipWith3[A, B, C, D](f: A => B => C => D) = //: Stream[A] => Stream[B] => Stream[C] => Stream[D] = // BUG why do we need a return type? zWith(Succ(Succ(Succ(Zero()))),f) -} \ No newline at end of file +} diff --git a/test/files/pos/depmet_implicit_tpbetareduce.scala b/test/files/pos/depmet_implicit_tpbetareduce.scala index 35d260683b7f..f4da54949968 100644 --- a/test/files/pos/depmet_implicit_tpbetareduce.scala +++ b/test/files/pos/depmet_implicit_tpbetareduce.scala @@ -9,4 +9,4 @@ trait HOSeq { type m[+x] = List[x] def accumulator[t]: Accumulator[List, t] = listAccumulator[t] } -} \ No newline at end of file +} diff --git a/test/files/pos/dotless-targs-ranged.scala b/test/files/pos/dotless-targs-ranged.scala index 67d4f8d80dcb..935e1dd8f8a9 100644 --- a/test/files/pos/dotless-targs-ranged.scala +++ b/test/files/pos/dotless-targs-ranged.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:true +// class A { def fn1 = List apply 1 def fn2 = List apply[Int] 2 diff --git a/test/files/pos/dotless-targs.scala b/test/files/pos/dotless-targs.scala index e88f7206dc6f..7e208d5198c5 100644 --- a/test/files/pos/dotless-targs.scala +++ b/test/files/pos/dotless-targs.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false class A { def fn1 = List apply 1 def fn2 = List apply[Int] 2 diff --git a/test/files/pos/elidable-tparams.scala b/test/files/pos/elidable-tparams.scala index 23b1cba61570..d65478bcb73e 100644 --- a/test/files/pos/elidable-tparams.scala +++ b/test/files/pos/elidable-tparams.scala @@ -7,4 +7,4 @@ class ElidableCrashTest { @elidable(MINIMUM) def foo[a >: My <: My]: scala.Unit = () foo[My] // crash -} \ No newline at end of file +} diff --git a/test/files/pos/exhaust_alternatives.scala b/test/files/pos/exhaust_alternatives.scala index 07bd1e01775e..dfa3e0919f23 100644 --- a/test/files/pos/exhaust_alternatives.scala +++ b/test/files/pos/exhaust_alternatives.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings sealed abstract class X sealed case class A(x: Boolean) extends X case object B extends X diff --git a/test/files/pos/exhaustive_heuristics.scala b/test/files/pos/exhaustive_heuristics.scala index 297900510b2a..5fb709c52ce2 100644 --- a/test/files/pos/exhaustive_heuristics.scala +++ b/test/files/pos/exhaustive_heuristics.scala @@ -23,4 +23,4 @@ object Test { case Nil => } -} \ No newline at end of file +} diff --git a/test/files/pos/existental-slow-compile2.scala b/test/files/pos/existental-slow-compile2.scala deleted file mode 100644 index 907344982c6e..000000000000 --- a/test/files/pos/existental-slow-compile2.scala +++ /dev/null @@ -1,7 +0,0 @@ -class C { - class L[+A] - def test = { - val foo: - L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]] - = ??? } } - diff --git a/test/files/pos/existential-function-pt.scala b/test/files/pos/existential-function-pt.scala new file mode 100644 index 000000000000..bd8b9242c210 --- /dev/null +++ b/test/files/pos/existential-function-pt.scala @@ -0,0 +1,7 @@ +object Test { + def foo(a: Function[String, _ <: String]): a.type = a + foo(x => x) + + def foo(a: PartialFunction[String, _ <: String]): a.type = a + foo({ case x => x }) +} diff --git a/test/files/pos/existential-slow-compile1.scala b/test/files/pos/existential-slow-compile1.scala index bd7407e32fd0..9ebfcc0e46ee 100644 --- a/test/files/pos/existential-slow-compile1.scala +++ b/test/files/pos/existential-slow-compile1.scala @@ -1,8 +1,8 @@ -// scalac: -Ystop-after:refchecks +//> using options -Ystop-after:refchecks class C { type L[+A] = scala.collection.immutable.List[A] def test = { val foo: - L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]] + L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]]] = ??? } } diff --git a/test/files/pos/existential-slow-compile2.scala b/test/files/pos/existential-slow-compile2.scala new file mode 100644 index 000000000000..3c779b477e73 --- /dev/null +++ b/test/files/pos/existential-slow-compile2.scala @@ -0,0 +1,8 @@ +//> using options -Ystop-after:refchecks +class C { + class L[+A] + def test = { + val foo: + L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]]] + = ??? } } + diff --git a/test/files/pos/extractor-types.scala b/test/files/pos/extractor-types.scala index bb9659a13c84..200279be6ffe 100644 --- a/test/files/pos/extractor-types.scala +++ b/test/files/pos/extractor-types.scala @@ -16,7 +16,7 @@ package p2 { object Baz { def unapply(x: Any): Option[Quux] = None } } trait Reifiers { - def f() { + def f(): Unit = { val u2: Other = null (null: Any) match { case u2.Baz(x) => println(x) } //: u2.Quux) } // The underlying error was: type mismatch; diff --git a/test/files/pos/forcomp-treepos.scala b/test/files/pos/forcomp-treepos.scala new file mode 100644 index 000000000000..8e48a749c59f --- /dev/null +++ b/test/files/pos/forcomp-treepos.scala @@ -0,0 +1,5 @@ +//> using options -Yvalidate-pos:typer +object A { + def foo(list: List[String]) = for (string <- list if string.length > 5) + println(string) +} diff --git a/test/files/pos/gadts2.scala b/test/files/pos/gadts2.scala index d77c8a7ba4cc..b67bafb32638 100644 --- a/test/files/pos/gadts2.scala +++ b/test/files/pos/gadts2.scala @@ -8,7 +8,7 @@ object Test { case class Cell[a](var x: a) extends Term[a] final case class NumTerm(val n: Number) extends Term[Number] - def f[a](t: Term[a], c: Cell[a]) { + def f[a](t: Term[a], c: Cell[a]): Unit = { t match { case NumTerm(n) => c.x = MyDouble(1.0) } @@ -16,7 +16,7 @@ object Test { val x: Term[Number] = NumTerm(MyInt(5)) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val cell = Cell[Number](MyInt(6)) Console.println(cell) f[Number](new NumTerm(MyInt(5)), cell) diff --git a/test/files/pos/gen-traversable-methods.scala b/test/files/pos/gen-traversable-methods.scala index bc720742cfb8..b59cd33c769b 100644 --- a/test/files/pos/gen-traversable-methods.scala +++ b/test/files/pos/gen-traversable-methods.scala @@ -7,7 +7,7 @@ import collection._ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val gen: GenTraversable[Int] = List(1, 2, 3) gen.head gen.headOption diff --git a/test/files/pos/generic-sigs.scala b/test/files/pos/generic-sigs.scala index f1293f8da12f..841849e70c21 100644 --- a/test/files/pos/generic-sigs.scala +++ b/test/files/pos/generic-sigs.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings import language.existentials object A { diff --git a/test/files/pos/gosh.scala b/test/files/pos/gosh.scala index 98fae8a31e00..81af140163df 100644 --- a/test/files/pos/gosh.scala +++ b/test/files/pos/gosh.scala @@ -9,25 +9,25 @@ object ShapeTest extends App { } class Line(s: Point, e: Point) extends Shape { - def draw() { Console.println("draw line " + s + "," + e) } + def draw(): Unit = { Console.println("draw line " + s + "," + e) } } abstract class Foo { type T <: Object def show(o: T): Unit - def print() { Console.println("in Foo") } + def print(): Unit = { Console.println("in Foo") } } abstract class ShapeFoo extends Foo { type T <: Shape - def show(o: T) { o.draw() } - override def print() { Console.println("in ShapeFoo") } + def show(o: T): Unit = { o.draw() } + override def print(): Unit = { Console.println("in ShapeFoo") } } class LineFoo extends ShapeFoo { type T = Line - override def print() { Console.println("in LineFoo") } + override def print(): Unit = { Console.println("in LineFoo") } } val p1 = new Point(1,4) @@ -38,7 +38,7 @@ object ShapeTest extends App { val l = new ShapeFoo { // ** // type T = Line // ** // - override def print() { Console.println("in LineFoo") } // ** // + override def print(): Unit = { Console.println("in LineFoo") } // ** // } l.show(l1) // ** // } diff --git a/test/files/pos/gui.scala b/test/files/pos/gui.scala index 3b4f49cf5562..0504fb4354da 100644 --- a/test/files/pos/gui.scala +++ b/test/files/pos/gui.scala @@ -20,10 +20,10 @@ trait Screen { } object DummyScreen extends Screen { - def drawRect(r: Geom.Rectangle, c: Color) { + def drawRect(r: Geom.Rectangle, c: Color): Unit = { Console.println("draw " + r + " with " + c) } - def fillRect(r: Geom.Rectangle, c: Color) { + def fillRect(r: Geom.Rectangle, c: Color): Unit = { Console.println("fill " + r + " with " + c) } } @@ -37,7 +37,7 @@ object GUI { trait Glyph { def getRect: Geom.Rectangle def setLoc(p: Geom.Point): Unit - def draw() { Console.println("draw " + this) } + def draw(): Unit = { Console.println("draw " + this) } } class Label(scr: Screen, p: Geom.Point, name: String) extends Glyph { @@ -61,7 +61,7 @@ object GUI { val label = new Label(scr, p, name) /* Glyph methods */ - override def draw() { + override def draw(): Unit = { if (enabled) scr.drawRect(getRect, Color.black) else scr.fillRect(getRect, Color.grey); label.draw(); @@ -72,7 +72,7 @@ object GUI { /* Ctl methods */ def enable(b: Boolean): this.type = { enabled = b; draw(); this } def getGlyph = label - final def mouseDown(p: Geom.Point) { + final def mouseDown(p: Geom.Point): Unit = { if (enabled) doit() else Console.println("button is disabled"); } /* deferred method to be specified by client */ @@ -83,15 +83,15 @@ object GUI { object GUIClient { class App { - def quit() { Console.println("application exited") } + def quit(): Unit = { Console.println("application exited") } } class QuitButton (scr: Screen, p: Geom.Point, name: String, a: App) extends GUI.Button(scr, p, name) { - def doit() { a.quit() } + def doit(): Unit = { a.quit() } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val b = new QuitButton( DummyScreen, Geom.Point(1, 1), "quit", new App); b.draw(); diff --git a/test/files/pos/higherKinds.scala b/test/files/pos/higherKinds.scala new file mode 100644 index 000000000000..27f43ed0d061 --- /dev/null +++ b/test/files/pos/higherKinds.scala @@ -0,0 +1,5 @@ +//> using options -feature -Werror +object Test { + type F[A] <: List[A] + def foo[G[X] <: List[X]](x: F[Int]): F[Int] = x +} diff --git a/test/files/pos/hk-existential-subtype.scala b/test/files/pos/hk-existential-subtype.scala new file mode 100644 index 000000000000..9528c404545e --- /dev/null +++ b/test/files/pos/hk-existential-subtype.scala @@ -0,0 +1,6 @@ +//> using options -language:higherKinds,existentials -Xfatal-warnings +class Functor[F[_]] +object Functor { + val someF: Functor[F] forSome { type F[_] } = new Functor[Option] + val someG: Functor[G] forSome { type G[x] <: List[x] } = new Functor[::] +} diff --git a/test/files/pos/hk-infer.scala b/test/files/pos/hk-infer.scala index 30e347640421..ff8df638dda7 100644 --- a/test/files/pos/hk-infer.scala +++ b/test/files/pos/hk-infer.scala @@ -16,7 +16,7 @@ object DoesWork { // Testing the not giving of explicit Booper[M] arguments. object ShouldWorkHK { - class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] { + class Booper[M[_]](xs: Seq[M[_]]) { def underlying = xs def BOOP(ys: Seq[M[_]]) = new Booper(xs ++ ys) } @@ -26,7 +26,7 @@ object ShouldWorkHK { } object DoesWorkHK { - class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] { + class Booper[M[_]](xs: Seq[M[_]]) { def underlying = xs def BOOP(ys: Seq[M[_]]) = new Booper[M](xs ++ ys) } diff --git a/test/files/pos/hk-paths.scala b/test/files/pos/hk-paths.scala new file mode 100644 index 000000000000..f3402bdc6868 --- /dev/null +++ b/test/files/pos/hk-paths.scala @@ -0,0 +1,61 @@ +object Test { + // scala/bug#12142 + trait Bounds { + type Upper <: Bounds + } + + trait Narrow extends Bounds { + type Upper >: Narrow <: Bounds + } + + trait Template[+X <: Bounds] extends Bounds { + val body :X + type Bound >: body.Upper <: Bounds + type Copy[+A <: Bound] <: Template[A] + type High[T[+A <: Narrow] <: Bounds] + + def applied(narrow: Template[Narrow]): High[narrow.Copy] //ok + def indirect(narrow: Template[Narrow]): High[({ type T[+A <: Narrow] = narrow.Copy[A] })#T] //also ok + } + + trait Expr[X, E] { + def applyTo[F[A >: E <: E]] :Expr[X, F[E]] + } + + trait Functor[F, A <: U, U] { + type Apply[+X <: U] + type Super[+X >: A <: U] >: F + } + + trait Implicit[X, Y] { + type S >: Y + val expr :Expr[X, S] + } + + def test[F, A <: U, U, X](fun :Functor[F, A, U], x :Implicit[X, A] { type S >: A <: U }) = { + x.expr.applyTo[({ type E[B >: A <: U] = fun.Super[B] })#E] + x.expr.applyTo[fun.Super] + } + + // scala/bug#10186 + trait Foo { + type A + type F[_ <: A] + } + + def noop[A, F[_ <: A]]: Unit = () + def f(foo: Foo): Unit = noop[foo.A, foo.F] + + // scala/bug#9625 + trait `* -> *`[F[_]] + + trait Bar { + type Qux[A] + implicit val `* -> *`: `* -> *`[Qux] + } + + def foo(bar: Bar): `* -> *`[bar.Qux] = { + import bar._ + implicitly[`* -> *`[bar.Qux]] + } +} diff --git a/test/files/pos/hkarray.scala b/test/files/pos/hkarray.scala index 11e7b50f53ca..93457452dbbb 100644 --- a/test/files/pos/hkarray.scala +++ b/test/files/pos/hkarray.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -language:higherKinds +//> using options -Xfatal-warnings -language:higherKinds trait Foo[CC[_]] { } class Bip { diff --git a/test/files/pos/hklub0.scala b/test/files/pos/hklub0.scala index 8f68aab4eaac..6ed3f105cfd0 100644 --- a/test/files/pos/hklub0.scala +++ b/test/files/pos/hklub0.scala @@ -1,5 +1,6 @@ +class GenericCompanion[+CC[X] <: Iterable[X]] object Test { - val a : scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq] = null - val b : scala.collection.generic.GenericCompanion[scala.collection.mutable.Seq] = null - List(a, b) // immutable.this.List.apply[scala.collection.generic.GenericCompanion[Seq]](Test.this.a, Test.this.b) -} \ No newline at end of file + val a : GenericCompanion[scala.collection.immutable.Seq] = null + val b : GenericCompanion[scala.collection.mutable.Seq] = null + List(a, b) // immutable.this.List.apply[GenericCompanion[Seq]](Test.this.a, Test.this.b) +} diff --git a/test/files/pos/i10715a.scala b/test/files/pos/i10715a.scala new file mode 100644 index 000000000000..db1b243b2c78 --- /dev/null +++ b/test/files/pos/i10715a.scala @@ -0,0 +1,31 @@ +class Parent { + def f(x: Int): Parent = ??? + def f: Int = 0 + + def g[A](x: Int): Parent = ??? + def g[A]: Int = 0 +} + +// For the issue to show up, there must be a subclass that overrides +// one of the two methods. +class Sub extends Parent { + override def f(x: Int): Parent = ??? + override def g[A](x: Int): Parent = ??? +} + +class C { + def test(c: Sub): Unit = { + c.f(1) // already worked + c.f + c.f.+(0) + c.f.toString + + c.g(0) // already worked + c.g + c.g[Int] + c.g.+(0) + c.g.toString + c.g[Int].+(0) + c.g.toString + } +} diff --git a/test/files/pos/i10715b/C_1.java b/test/files/pos/i10715b/C_1.java new file mode 100644 index 000000000000..8973027a0c3c --- /dev/null +++ b/test/files/pos/i10715b/C_1.java @@ -0,0 +1,16 @@ +class C_1 { + + public int f() { + return 0; + } + public C_1 f(int x) { + return null; + } +} + +class Child extends C_1 { + @Override + public C_1 f(int x) { + return null; + } +} diff --git a/test/files/pos/i10715b/caller_2.scala b/test/files/pos/i10715b/caller_2.scala new file mode 100644 index 000000000000..141d606bf3c9 --- /dev/null +++ b/test/files/pos/i10715b/caller_2.scala @@ -0,0 +1,15 @@ +class C { + def test(c: Child): Unit = { + c.f() // always ok + c.f // should work too + c.f(1) + c.f.toString + } + + // The issue was first detected on NIO buffers, + // (on Java 11+), so these should pass now. + def buffer(c: java.nio.ByteBuffer): Unit = { + c.position + c.position(10).position.toString + } +} diff --git a/test/files/pos/i11371.scala b/test/files/pos/i11371.scala new file mode 100644 index 000000000000..c895d83e6d20 --- /dev/null +++ b/test/files/pos/i11371.scala @@ -0,0 +1,21 @@ +//> using options -Xsource:3 +// +object HelloWorld { + def whileLoop: Int = { + var i = 0 + var acc = 0 + while (i < 3) { + var `i'` = 0 + while (`i'` < 4) { + acc += (i * `i'`) + `i'` += 1 + } + i += 1 + } + acc + } + + def main(args: Array[String]): Unit = { + println(s"hello world: ${whileLoop}") + } +} diff --git a/test/files/pos/i13282.scala b/test/files/pos/i13282.scala new file mode 100644 index 000000000000..1872900aabe7 --- /dev/null +++ b/test/files/pos/i13282.scala @@ -0,0 +1,14 @@ +class Ptr[T](var value: T) { + def `unary_!` : T = value + def `unary_!_=`(value: T): Unit = this.value = value +} + +object Test extends App { + def test = { + val x = new Ptr(9) + !x = 10 + !{ println("hi") ; x } = 11 + println(!x) + } + test +} diff --git a/test/files/pos/i20006-java/J.java b/test/files/pos/i20006-java/J.java new file mode 100644 index 000000000000..65705b33d256 --- /dev/null +++ b/test/files/pos/i20006-java/J.java @@ -0,0 +1,4 @@ +public class J { + private String mo; + public String mo() { return this.mo; } +} diff --git a/test/files/pos/i20006-java/T.scala b/test/files/pos/i20006-java/T.scala new file mode 100644 index 000000000000..4b3aaa7c92f2 --- /dev/null +++ b/test/files/pos/i20006-java/T.scala @@ -0,0 +1,5 @@ +//> using options -Ypickle-java + +class T { + new J().mo(); +} diff --git a/src/reflect/scala/reflect/internal/Required.scala b/test/files/pos/i20026/Empty.java similarity index 100% rename from src/reflect/scala/reflect/internal/Required.scala rename to test/files/pos/i20026/Empty.java diff --git a/test/files/pos/i20026/JFun.java b/test/files/pos/i20026/JFun.java new file mode 100644 index 000000000000..d2109909c3e0 --- /dev/null +++ b/test/files/pos/i20026/JFun.java @@ -0,0 +1,4 @@ +@FunctionalInterface +public interface JFun { + String f(String s); +} diff --git a/test/files/pos/i20026/JTest.java b/test/files/pos/i20026/JTest.java new file mode 100644 index 000000000000..c91f533383b5 --- /dev/null +++ b/test/files/pos/i20026/JTest.java @@ -0,0 +1,4 @@ + +@api.TestInstance(api.TestInstance.Lifecycle.PER_CLASS) +public class JTest { +} diff --git a/test/files/pos/i20026/KTest.java b/test/files/pos/i20026/KTest.java new file mode 100644 index 000000000000..9b32932e780c --- /dev/null +++ b/test/files/pos/i20026/KTest.java @@ -0,0 +1,6 @@ + +import api.*; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class KTest { +} diff --git a/test/files/pos/i20026/TestInstance.java b/test/files/pos/i20026/TestInstance.java new file mode 100644 index 000000000000..11cca9698664 --- /dev/null +++ b/test/files/pos/i20026/TestInstance.java @@ -0,0 +1,20 @@ + +package api; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Inherited +@Documented +public @interface TestInstance { + enum Lifecycle { + PER_CLASS; + } + Lifecycle value(); +} diff --git a/test/files/pos/i20026/test.scala b/test/files/pos/i20026/test.scala new file mode 100644 index 000000000000..838788d85923 --- /dev/null +++ b/test/files/pos/i20026/test.scala @@ -0,0 +1,6 @@ + +object Test extends App { + println { + (new JTest, new KTest, ((s: String) => s): JFun) + } +} diff --git a/test/files/pos/i21319/Foo.java b/test/files/pos/i21319/Foo.java new file mode 100644 index 000000000000..1240d014b7e7 --- /dev/null +++ b/test/files/pos/i21319/Foo.java @@ -0,0 +1,8 @@ +package app; + +import java.util.Optional; +import lib.*; + +public class Foo { + private java.util.@lib.Valid Optional userId; +} diff --git a/test/files/pos/i21319/Test.scala b/test/files/pos/i21319/Test.scala new file mode 100644 index 000000000000..a85c8f461aab --- /dev/null +++ b/test/files/pos/i21319/Test.scala @@ -0,0 +1,3 @@ +package app + +class Test diff --git a/test/files/pos/i21319/Valid.java b/test/files/pos/i21319/Valid.java new file mode 100644 index 000000000000..17e0e1173726 --- /dev/null +++ b/test/files/pos/i21319/Valid.java @@ -0,0 +1,17 @@ +package lib; + +import static java.lang.annotation.ElementType.CONSTRUCTOR; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +@Target({ METHOD, FIELD, CONSTRUCTOR, PARAMETER, TYPE_USE }) +@Retention(RUNTIME) +@Documented +public @interface Valid {} diff --git a/test/files/pos/i6705.scala b/test/files/pos/i6705.scala new file mode 100644 index 000000000000..6f0d0327b6d2 --- /dev/null +++ b/test/files/pos/i6705.scala @@ -0,0 +1,15 @@ +trait StringTempl { + def mkString: String + def mkString(x: String): String +} + +object Test { + implicit class extension(val x: String) { + def shouldBe(y: String): Boolean = ??? + } + + def test(tmpl: StringTempl): Unit = { + tmpl.mkString shouldBe "hello" // error + tmpl.mkString(", world") shouldBe "hello, world" + } +} diff --git a/test/files/pos/ifunc.scala b/test/files/pos/ifunc.scala new file mode 100644 index 000000000000..45b221fcdb4e --- /dev/null +++ b/test/files/pos/ifunc.scala @@ -0,0 +1,30 @@ + +trait T { + val f = if (_) 42 else 17 + + val p = while (_) println() // weird + + val q = do println while (_) // weird + + val g = (b: () => Boolean) => while (b()) println() // less weird +} + +import language.implicitConversions + +class Setting { + def tap(f: this.type => Unit): this.type = { f(this); this } +} +class BooleanSetting extends Setting { + def tap2(f: BooleanSetting => Unit): BooleanSetting = { f(this); this } +} +object BooleanSetting { + implicit def cv(s: BooleanSetting): Boolean = true +} + +object Test extends App { + val setting = new Setting().tap(println) + val boolean = new BooleanSetting().tap(if (_) println("yes")) + val bool1 = new BooleanSetting().tap(s => if (s) println("yes")) + val bool2a = new BooleanSetting().tap2(s => if (s) println("yes")) + val bool2b = new BooleanSetting().tap2(if (_) println("yes")) +} diff --git a/test/files/pos/ignore-wperf.scala b/test/files/pos/ignore-wperf.scala new file mode 100644 index 000000000000..b63323bd07fb --- /dev/null +++ b/test/files/pos/ignore-wperf.scala @@ -0,0 +1,11 @@ +//> using options -Werror -Wperformance -Wconf:cat=lint-performance:s + +class C { + var x = 0 + + def f: (Int => Int) = { + var y = x + z => y + 1 + } +} +//test/files/neg/ignore-wperf.scala:7: warning: Modification of variable y within a closure causes it to be boxed. diff --git a/test/files/pos/implicit-anyval-2.10.scala b/test/files/pos/implicit-anyval-2.10.scala deleted file mode 100644 index 918eebc0729e..000000000000 --- a/test/files/pos/implicit-anyval-2.10.scala +++ /dev/null @@ -1,4 +0,0 @@ -// scalac: -Xsource:2.10 -object Test { - "": AnyVal // newly prohibited in 2.11, allowed under -Xsourse:2.10 -} diff --git a/test/files/pos/implicit-infix-ops.scala b/test/files/pos/implicit-infix-ops.scala index 66f3718e866d..9182893539be 100644 --- a/test/files/pos/implicit-infix-ops.scala +++ b/test/files/pos/implicit-infix-ops.scala @@ -20,4 +20,4 @@ object Frac { def f1[T: Fractional](x: T, y: T, z: T) = (x + y + z) / z def f2[T: Ordering](x: T, y: T, z: T) = if (x < y) (z > y) else (x < z) -} \ No newline at end of file +} diff --git a/test/files/pos/implicits-new.scala b/test/files/pos/implicits-new.scala index 7b4f20c6c9c8..9f0daf008345 100644 --- a/test/files/pos/implicits-new.scala +++ b/test/files/pos/implicits-new.scala @@ -20,7 +20,7 @@ class C1492 { class X - def foo(x: X => X) {} + def foo(x: X => X): Unit = {} foo ( implicit x => implicitly[X] ) foo { implicit x => implicitly[X] } @@ -45,7 +45,7 @@ object Test1625 { def unwrap() = x } - implicit def byName[A](x: =>A) = new Wrapped(x) + implicit def byName[A](x: => A) = new Wrapped(x) implicit def byVal[A](x: A) = x diff --git a/test/files/pos/implicits-old.scala b/test/files/pos/implicits-old.scala index 62ae6b835c4b..28bb00f4d872 100644 --- a/test/files/pos/implicits-old.scala +++ b/test/files/pos/implicits-old.scala @@ -1,3 +1,5 @@ +import scala.reflect.ClassManifest + // #1435 object t1435 { implicit def a(s:String):String = sys.error("") @@ -17,7 +19,7 @@ class C1492 { class X - def foo(x: X => X) {} + def foo(x: X => X): Unit = {} foo ( implicit x => implicitly[X] ) foo { implicit x => implicitly[X] } @@ -42,7 +44,7 @@ object Test1625 { def unwrap() = x } - implicit def byName[A](x: =>A) = new Wrapped(x) + implicit def byName[A](x: => A) = new Wrapped(x) implicit def byVal[A](x: A) = x diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala index 5b215d907a58..0ffe056ee49d 100644 --- a/test/files/pos/import-future.scala +++ b/test/files/pos/import-future.scala @@ -1,5 +1,4 @@ -// scalac: -Xsource:3 -// +//> using options -Xsource:3 import java.io as jio import scala.{collection as c} @@ -26,8 +25,36 @@ class C { object starring { - import scala.concurrent.{*, given}, duration.{given, Duration as D, *}, ExecutionContext.Implicits.* + import scala.concurrent.{*, given}, duration.{Duration as D, given, *}, ExecutionContext.Implicits.* val f = Future(42) val r = Await.result(f, D.Inf) } + +trait T[A] { + def t: A +} +object T { + implicit def tInt: T[Int] = new T[Int] { + def t: Int = 42 + } + def f[A](implicit t: T[A]): A = t.t +} +object X { + import T.given + def g = T.f[Int] // was given is not a member +} + +class status_quo { + import scala.util.chaining._ + import scala.concurrent.duration._ + def f = 42.tap(println) + def g = 42.seconds +} + +class givenly { + import scala.util.chaining.given + import scala.concurrent.duration.given + def f = 42.tap(println) + def g = 42.seconds +} diff --git a/test/files/pos/infer_override_def_args.scala b/test/files/pos/infer_override_def_args.scala deleted file mode 100644 index 5d9f8645ccab..000000000000 --- a/test/files/pos/infer_override_def_args.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Yinfer-argument-types -abstract class A { def foo(a: Int): A } -class B extends A { - implicit def spackle(x: Int): A = new B - def foo(a) = a -} diff --git a/test/files/pos/infersingle.scala b/test/files/pos/infersingle.scala index 0d317e858354..60f4ff07e60d 100644 --- a/test/files/pos/infersingle.scala +++ b/test/files/pos/infersingle.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental object Test1 { def one[T](x: T): Option[T] = Some(x) val x = "one" diff --git a/test/files/pos/infixed.scala b/test/files/pos/infixed.scala new file mode 100644 index 000000000000..ac03760491e6 --- /dev/null +++ b/test/files/pos/infixed.scala @@ -0,0 +1,10 @@ +//> using options -Xsource:3 -Xsource-features:leading-infix + +class K { def x(y: Int) = 0 } + +class Test { + def ok = { + (new K) + `x` 42 + } +} diff --git a/test/files/pos/inline-access-levels/A_1.scala b/test/files/pos/inline-access-levels/A_1.scala index 3ae205dc45a7..b8fa46ba6a71 100644 --- a/test/files/pos/inline-access-levels/A_1.scala +++ b/test/files/pos/inline-access-levels/A_1.scala @@ -1,11 +1,16 @@ -// scalac: -opt:l:inline -opt-inline-from:** -Xfatal-warnings -opt-warnings +//> using options -opt:inline:** -Wopt -Werror package test object A { private var x: Int = 0 - @inline def actOnX(f: Int => Int) = { - x = f(x) - } + @inline def actOnX(f: Int => Int) = { x = f(x) } +} + +object B { + + private[this] var x: Int = 0 + + @inline def actOnX(f: Int => Int) = { x = f(x) } } diff --git a/test/files/pos/inline-access-levels/Test_2.scala b/test/files/pos/inline-access-levels/Test_2.scala index 4bb4ac2130a7..97937c80b43d 100644 --- a/test/files/pos/inline-access-levels/Test_2.scala +++ b/test/files/pos/inline-access-levels/Test_2.scala @@ -1,12 +1,11 @@ -// scalac: -opt:l:inline -opt-inline-from:** -Xfatal-warnings -opt-warnings +//> using options -opt:inline:** -Wopt -Werror package test object Test { - def main(args: Array[String]) { - + def main(args: Array[String]): Unit = { A.actOnX(_ + 1) - + B.actOnX(_ + 1) } } diff --git a/test/files/pos/isApplicableSafe.scala b/test/files/pos/isApplicableSafe.scala index 591beb2b36a3..b4cacbf28620 100644 --- a/test/files/pos/isApplicableSafe.scala +++ b/test/files/pos/isApplicableSafe.scala @@ -5,4 +5,4 @@ class A { xs = Array(Array()) ys = Array(Map(), Map()) -} \ No newline at end of file +} diff --git a/test/files/pos/java-access-pos/S1.scala b/test/files/pos/java-access-pos/S1.scala index 10730e3a7026..9e6ae4a3be8a 100644 --- a/test/files/pos/java-access-pos/S1.scala +++ b/test/files/pos/java-access-pos/S1.scala @@ -64,4 +64,4 @@ class S7 extends J(1, 2) { def packageAbstract() = () def protectedAbstract() = () def publicAbstract() = () -} \ No newline at end of file +} diff --git a/test/files/pos/java-import-static-from-subclass/Test.scala b/test/files/pos/java-import-static-from-subclass/Test.scala index 7d2326b519e1..4b7e4a5d159e 100644 --- a/test/files/pos/java-import-static-from-subclass/Test.scala +++ b/test/files/pos/java-import-static-from-subclass/Test.scala @@ -1,2 +1,2 @@ -// scalac: -Ypickle-java +//> using options -Ypickle-java class Test diff --git a/test/files/pos/java-object-any-unification-1/J_1.java b/test/files/pos/java-object-any-unification-1/J_1.java new file mode 100644 index 000000000000..b4837e5fa4a4 --- /dev/null +++ b/test/files/pos/java-object-any-unification-1/J_1.java @@ -0,0 +1,16 @@ +package p1; + +public class J_1 { + public static class Map {} + + public static class A { + public synchronized void putAll(Map t) { + } + } + + public static class B extends A { + @Override + public synchronized void putAll(Map t) { + } + } +} diff --git a/test/files/pos/java-object-any-unification-1/Test_2.scala b/test/files/pos/java-object-any-unification-1/Test_2.scala new file mode 100644 index 000000000000..76907cfc6d18 --- /dev/null +++ b/test/files/pos/java-object-any-unification-1/Test_2.scala @@ -0,0 +1,5 @@ +object Test { + def test(b: p1.J_1.B) = { + b.putAll(null) + } +} diff --git a/test/files/pos/java-object-any-unification-2/J.java b/test/files/pos/java-object-any-unification-2/J.java new file mode 100644 index 000000000000..7583fd37161e --- /dev/null +++ b/test/files/pos/java-object-any-unification-2/J.java @@ -0,0 +1,16 @@ +package p1; + +public class J { + public static class Map {} + + public static class A { + public synchronized void putAll(Map t) { + } + } + + public static class B extends A { + @Override + public synchronized void putAll(Map t) { + } + } +} diff --git a/test/files/pos/java-object-any-unification-2/Test.scala b/test/files/pos/java-object-any-unification-2/Test.scala new file mode 100644 index 000000000000..d04a3b19e003 --- /dev/null +++ b/test/files/pos/java-object-any-unification-2/Test.scala @@ -0,0 +1,5 @@ +object Test { + def test(b: p1.J.B) = { + b.putAll(null) + } +} diff --git a/test/files/pos/javaConversions-2.10-ambiguity.scala b/test/files/pos/javaConversions-2.10-ambiguity.scala index b08568f4757f..e92185bb4fb8 100644 --- a/test/files/pos/javaConversions-2.10-ambiguity.scala +++ b/test/files/pos/javaConversions-2.10-ambiguity.scala @@ -1,10 +1,9 @@ import collection.{mutable, concurrent} -import collection.convert.ImplicitConversionsToScala._ +import collection.JavaConverters._ import java.util.concurrent.{ConcurrentHashMap => CHM} object Bar { def assertType[T](t: T) = t - val a = new CHM[String, String]() += (("", "")) + val a = new CHM[String, String]().asScala += (("", "")) assertType[concurrent.Map[String, String]](a) } -// vim: set et: diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala deleted file mode 100644 index 8d84c92b6151..000000000000 --- a/test/files/pos/javaConversions-2.10-regression.scala +++ /dev/null @@ -1,17 +0,0 @@ -import collection.{convert, mutable, concurrent, JavaConverters} -import convert.ImplicitConversionsToScala._ -import java.util.concurrent.{ConcurrentHashMap => CHM} - -object Foo { - def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = - JavaConverters.mapAsScalaConcurrentMap(new CHM()) - - def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = - new CHM[K, V]() -} - -object Bar { - def assertType[T](t: T) = t - val a = new CHM[String, String]() += (("", "")) - assertType[concurrent.Map[String, String]](a) -} diff --git a/test/files/pos/javaReadsSigs/fromjava.java b/test/files/pos/javaReadsSigs/fromjava.java index 92441b0c6b7a..3245c9803199 100644 --- a/test/files/pos/javaReadsSigs/fromjava.java +++ b/test/files/pos/javaReadsSigs/fromjava.java @@ -2,8 +2,7 @@ import scala.math.Ordering; import scala.math.Numeric; import scala.collection.Seq; -import scala.collection.Traversable; -import scala.collection.Traversable$; +import scala.collection.Iterable; import scala.collection.immutable.Set; import scala.collection.immutable.HashSet; import scala.collection.immutable.Map; @@ -11,7 +10,6 @@ import scala.collection.immutable.HashMap; import scala.collection.immutable.Vector; import scala.collection.immutable.List; -import scala.collection.generic.CanBuildFrom; class A { }; class B { }; @@ -48,28 +46,30 @@ public static String vector(Vector x) { return y.head(); } public static String list(List x) { - List y = x.drop(2); + // Needs cast since 2.13, as `drop` is not overridden in List. + // 2.12 has the same issue for methods that are not overridden, e.g., dropRight + List y = (List)x.drop(2); return y.head(); } public static Tuple2 map(Map x) { - Traversable> y = x.drop(2); + Iterable> y = x.drop(2); return y.head(); } - public static Object sum(Traversable x) { + public static Object sum(Iterable x) { return x.sum(Contra.intNum); } // Looks like sum as given below fails under java5, so disabled. // // [partest] testing: [...]/files/pos/javaReadsSigs [FAILED] - // [partest] files/pos/javaReadsSigs/fromjava.java:62: name clash: sum(scala.collection.Traversable
    ) and sum(scala.collection.Traversable) have the same erasure - // [partest] public static B sum(Traversable x) { + // [partest] files/pos/javaReadsSigs/fromjava.java:62: name clash: sum(scala.collection.Iterable) and sum(scala.collection.Iterable) have the same erasure + // [partest] public static B sum(Iterable x) { // [partest] ^ // // // can't make this work with an actual CanBuildFrom: see #4389. - // public static B sum(Traversable x) { - // // have to cast it unfortunately: map in TraversableLike returns + // public static B sum(Iterable x) { + // // have to cast it unfortunately: map in IterableLike returns // // "That" and such types seem to be signature poison. - // return ((Traversable)x.map(f1, null)).head(); + // return ((Iterable)x.map(f1, null)).head(); // } } \ No newline at end of file diff --git a/test/files/pos/lazyref-autoapply.scala b/test/files/pos/lazyref-autoapply.scala new file mode 100644 index 000000000000..37642091d579 --- /dev/null +++ b/test/files/pos/lazyref-autoapply.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Xsource:3 -Xlint + +object Test { + def doti(i: Int): Product = { + case class Dot(i: Int) // was: warning: auto-application to `()` is deprecated ... + Dot(i) + } +} +object t11890 { + // was: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method unary_!, ... + lazy val x = 5 +} diff --git a/test/files/pos/leading-infix-op.scala b/test/files/pos/leading-infix-op.scala new file mode 100644 index 000000000000..2aead2c55541 --- /dev/null +++ b/test/files/pos/leading-infix-op.scala @@ -0,0 +1,18 @@ +//> using options -Xsource:3 -Xsource-features:leading-infix + +trait T { + def f(x: Int): Boolean = + x < 0 + || + x > 0 + && + x != 3 + + def g(x: Option[Int]) = x match { + case Some(err) => + println("hi") + ??? + case None => + ??? + } +} diff --git a/test/files/pos/leibniz-liskov.scala b/test/files/pos/leibniz-liskov.scala new file mode 100644 index 000000000000..d9da59ca266a --- /dev/null +++ b/test/files/pos/leibniz-liskov.scala @@ -0,0 +1,36 @@ +trait LeibnizLiskov { + type A // instead of picking some concrete type, use a totally unknown, abstract one + type B + type SA <: A + type SB >: B + + implicitly[A =:= A] + implicitly[B =:= B] + def aEqB: A =:= B + + type SASub[+X] = SA <:< X + (implicitly[B <:< SB].compose(aEqB.substituteCo[SASub](implicitly[SASub[A]]))): SA <:< SB + (aEqB.substituteCo[SASub](implicitly[SASub[A]]).andThen(implicitly[B <:< SB])): SA <:< SB + + // checks that inference is working (no explicit types on xs) + def A(): A + def B(): B + locally { val xs = aEqB.substituteCo (List(A(), A(), A())); implicitly[xs.type <:< List[B]] } + locally { val xs = aEqB.substituteContra (List(B(), B(), B())); implicitly[xs.type <:< List[A]] } + locally { val xs = aEqB.flip.liftCo[List](List(B(), B(), B())); implicitly[xs.type <:< List[A]] } + def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteCo (l) + def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteContra(l) + + implicitly[A <:< A] + implicitly[B <:< B] + val aSubB = { implicit val aEqB0 = aEqB ; implicitly[A <:< B] } + val bSubA = { implicit val bEqA0 = aEqB.flip; implicitly[B <:< A] } + type From[X] = { type L[+Y] = X => Y } + type To [X] = { type L[-Y] = Y => X } + locally { val f = bSubA.substituteCo [From[A]#L](aSubB(_)); implicitly[f.type <:< (A => A)] } + locally { val f = aSubB.substituteContra[To [A]#L](bSubA(_)); implicitly[f.type <:< (A => A)] } + def convertSub[T, U](l: List[T])(ev: T <:< U): List[U] = ev.liftCo[List](l) + type Consumes[-X] = X => Unit + def convertConsume1[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteContra(c) + def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.liftContra[Consumes](c) +} diff --git a/test/files/pos/looping-jsig.scala b/test/files/pos/looping-jsig.scala index 6e3313c463bc..aa6732454a3e 100644 --- a/test/files/pos/looping-jsig.scala +++ b/test/files/pos/looping-jsig.scala @@ -11,7 +11,7 @@ trait BugTrack { def giveMeSame = this } - def amethod[T](p: =>A[T]): A[T] = A(in => cache.get(p) match { + def amethod[T](p: => A[T]): A[T] = A(in => cache.get(p) match { case Some(res) => res case None => p(in) }).giveMeSame.asInstanceOf[A[T]] diff --git a/test/files/pos/lub-dealias-widen.scala b/test/files/pos/lub-dealias-widen.scala index 8d26708d6518..d09a3abf8e4d 100644 --- a/test/files/pos/lub-dealias-widen.scala +++ b/test/files/pos/lub-dealias-widen.scala @@ -31,4 +31,4 @@ object Test { // do not conform to method &'s type parameter bounds // [G <: H,H >: Int => (Int => String)] val s = r & r2 -} \ No newline at end of file +} diff --git a/test/files/pos/lub-from-hell.scala b/test/files/pos/lub-from-hell.scala index 752ce889d790..ce492e6acc9c 100644 --- a/test/files/pos/lub-from-hell.scala +++ b/test/files/pos/lub-from-hell.scala @@ -1,6 +1,11 @@ +// This test case minimizes a case that failed to compile due to a bug in my work on +// scala/bug#5294. After refining my patches, it compiles again, as expected. class Test { trait Tree def foo(b: Boolean, buf: collection.mutable.ArrayBuffer[Any], acc: StringBuilder) = if (b) buf else acc + + // In the 2.12 collection implementation, `bar` and `baz` used to produce ill-bounded LUBs. + // In the 2.13 collections, they work fine, but the underlying bug still exists. See `run/invalid-lubs.scala` + def bar(a: Boolean, b: collection.mutable.Set[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c + def baz(a: Boolean, b: scala.collection.mutable.SetOps[Any,scala.collection.mutable.Set,scala.collection.mutable.Set[Any]], c: scala.collection.mutable.Buffer[Any]) = if (a) b else c } -// This test case minimizes a case that failed to compile due to a bug in my work on -// scala/bug#5294. After refining my patches, it compiles again, as expected. \ No newline at end of file diff --git a/test/files/pos/macro-annot-unused-param-b/Macros_1.scala b/test/files/pos/macro-annot-unused-param-b/Macros_1.scala new file mode 100644 index 000000000000..3ab8d0bc820c --- /dev/null +++ b/test/files/pos/macro-annot-unused-param-b/Macros_1.scala @@ -0,0 +1,22 @@ +//> using options -Ymacro-annotations +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context +import scala.annotation.StaticAnnotation + +object Macros { + def annotImpl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + import c.universe._ + val classTree = annottees.head.tree + val objectTree = q""" + object X { + def f: Int => String = { x => "hello" } + } + """ + + c.Expr[Any](Block(List(classTree, objectTree), Literal(Constant(())))) + } +} + +class mymacro extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro Macros.annotImpl +} diff --git a/test/files/pos/macro-annot-unused-param-b/Test_2.scala b/test/files/pos/macro-annot-unused-param-b/Test_2.scala new file mode 100644 index 000000000000..c2e959094c22 --- /dev/null +++ b/test/files/pos/macro-annot-unused-param-b/Test_2.scala @@ -0,0 +1,7 @@ +//> using options -Ymacro-annotations -Werror -Wmacros:before -Wunused:params +@mymacro +class X + +object Test { + println(X.f(123)) +} diff --git a/test/files/pos/macro-annot-unused-param/Macros_1.scala b/test/files/pos/macro-annot-unused-param/Macros_1.scala new file mode 100644 index 000000000000..3ab8d0bc820c --- /dev/null +++ b/test/files/pos/macro-annot-unused-param/Macros_1.scala @@ -0,0 +1,22 @@ +//> using options -Ymacro-annotations +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context +import scala.annotation.StaticAnnotation + +object Macros { + def annotImpl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + import c.universe._ + val classTree = annottees.head.tree + val objectTree = q""" + object X { + def f: Int => String = { x => "hello" } + } + """ + + c.Expr[Any](Block(List(classTree, objectTree), Literal(Constant(())))) + } +} + +class mymacro extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro Macros.annotImpl +} diff --git a/test/files/pos/macro-annot-unused-param/Test_2.scala b/test/files/pos/macro-annot-unused-param/Test_2.scala new file mode 100644 index 000000000000..b6fe87874ef7 --- /dev/null +++ b/test/files/pos/macro-annot-unused-param/Test_2.scala @@ -0,0 +1,7 @@ +//> using options -Ymacro-annotations -Werror -Wmacros:default -Wunused:params +@mymacro +class X + +object Test { + println(X.f(123)) +} diff --git a/test/files/pos/macro-annot/t12366.check b/test/files/pos/macro-annot/t12366.check new file mode 100644 index 000000000000..de47a31a6b4e --- /dev/null +++ b/test/files/pos/macro-annot/t12366.check @@ -0,0 +1 @@ +warning: 2 deprecations; re-run with -deprecation for details diff --git a/test/files/pos/macro-annot/t12366.scala b/test/files/pos/macro-annot/t12366.scala new file mode 100644 index 000000000000..885919ec411d --- /dev/null +++ b/test/files/pos/macro-annot/t12366.scala @@ -0,0 +1,15 @@ +//> using options -Ymacro-annotations +object Test extends App { + + @deprecated + class Inner() { + } + + lazy val Inner = new Inner() + + @deprecated + class Inner2() { + } + + val Inner2 = new Inner2() +} diff --git a/test/files/pos/macro-attachments/Macros_1.scala b/test/files/pos/macro-attachments/Macros_1.scala index 38d05d5b85db..283223f6f258 100644 --- a/test/files/pos/macro-attachments/Macros_1.scala +++ b/test/files/pos/macro-attachments/Macros_1.scala @@ -16,4 +16,4 @@ object Macros { } def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/pos/macro-attachments/Test_2.scala b/test/files/pos/macro-attachments/Test_2.scala index acfddae94215..5d19639cddff 100644 --- a/test/files/pos/macro-attachments/Test_2.scala +++ b/test/files/pos/macro-attachments/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/macro-bounds-check/MacroImpl_1.scala b/test/files/pos/macro-bounds-check/MacroImpl_1.scala index fc76a03ddf4c..b419dc9e01e3 100644 --- a/test/files/pos/macro-bounds-check/MacroImpl_1.scala +++ b/test/files/pos/macro-bounds-check/MacroImpl_1.scala @@ -28,6 +28,7 @@ class DerivationMacros(val c: whitebox.Context) { q""" { def e(a: $R): Object = a + println("encode hlist") Predef.??? } """ @@ -39,7 +40,7 @@ class DerivationMacros(val c: whitebox.Context) { q""" { def e(a: $R): Object = a - + println("encode coproduct") Predef.??? } """ diff --git a/test/files/pos/macro-bundle-disambiguate-bundle.scala b/test/files/pos/macro-bundle-disambiguate-bundle.scala index 04809317e126..40d965b0e764 100644 --- a/test/files/pos/macro-bundle-disambiguate-bundle.scala +++ b/test/files/pos/macro-bundle-disambiguate-bundle.scala @@ -11,4 +11,4 @@ object Macros { object Test extends App { def foo: Unit = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/pos/macro-bundle-disambiguate-nonbundle.scala b/test/files/pos/macro-bundle-disambiguate-nonbundle.scala index cb66f28a0b20..185177607882 100644 --- a/test/files/pos/macro-bundle-disambiguate-nonbundle.scala +++ b/test/files/pos/macro-bundle-disambiguate-nonbundle.scala @@ -11,4 +11,4 @@ object Macros { object Test extends App { def foo: Unit = macro Macros.impl -} \ No newline at end of file +} diff --git a/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala b/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala index 204a41ca94b0..a32fc61e7311 100644 --- a/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala +++ b/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings object Test1 { val `macro` = ??? } diff --git a/test/files/pos/macro-qmarkqmarkqmark.scala b/test/files/pos/macro-qmarkqmarkqmark.scala index a91e4320b6b2..05d48c90e7a3 100644 --- a/test/files/pos/macro-qmarkqmarkqmark.scala +++ b/test/files/pos/macro-qmarkqmarkqmark.scala @@ -1,7 +1,7 @@ import language.experimental.macros object Macros { - def foo1 = macro ??? - def foo2(x: Int) = macro ??? - def foo3[T] = macro ??? -} \ No newline at end of file + def foo1: Nothing = macro ??? + def foo2(x: Int): Nothing = macro ??? + def foo3[T]: Nothing = macro ??? +} diff --git a/test/files/pos/manifest1-new.scala b/test/files/pos/manifest1-new.scala index 3907d7854b9d..3a957bf0d69b 100644 --- a/test/files/pos/manifest1-new.scala +++ b/test/files/pos/manifest1-new.scala @@ -1,7 +1,7 @@ import scala.reflect.runtime.universe._ object Test { - def foo[T](x: T)(implicit m: TypeTag[T]) { + def foo[T](x: T)(implicit m: TypeTag[T]): Unit = { foo(List(x)) } foo(1) @@ -18,4 +18,4 @@ object Test { val d: D = new D { type T = String; val m = stringm; val x = "x" } import d.m foo(d.x) -} \ No newline at end of file +} diff --git a/test/files/pos/manifest1-old.scala b/test/files/pos/manifest1-old.scala index 8901aa74376f..b24311bd3324 100644 --- a/test/files/pos/manifest1-old.scala +++ b/test/files/pos/manifest1-old.scala @@ -1,7 +1,7 @@ import scala.reflect.Manifest object Test { - def foo[T](x: T)(implicit m: Manifest[T]) { + def foo[T](x: T)(implicit m: Manifest[T]): Unit = { foo(List(x)) } foo(1) diff --git a/test/files/pos/multiLineOps.scala b/test/files/pos/multiLineOps.scala new file mode 100644 index 000000000000..8b9da5484317 --- /dev/null +++ b/test/files/pos/multiLineOps.scala @@ -0,0 +1,30 @@ +//> using options -Werror -Xsource:3 -Xsource-features:leading-infix + +class Channel { + def ! (msg: String): Channel = this + def send_! (msg: String): Channel = this +} + +class Test { + val x = 1 + + 2 + + 3 + + val c = new Channel() + + def send() = + c ! "hello" + ! "world" + send_! "!" + + val b: Boolean = + "hello".isEmpty + && true && + !"hello".isEmpty + + val b2: Boolean = { + println(x) + !"hello".isEmpty + ??? + } +} diff --git a/test/files/pos/native-warning.scala b/test/files/pos/native-warning.scala index 7f47e94604a9..73a22ceaecc7 100644 --- a/test/files/pos/native-warning.scala +++ b/test/files/pos/native-warning.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Xfatal-warnings -deprecation class A { @native def setup(): Unit diff --git a/test/files/pos/nonlocal-unchecked.scala b/test/files/pos/nonlocal-unchecked.scala index 818517aea8d5..62487c47f419 100644 --- a/test/files/pos/nonlocal-unchecked.scala +++ b/test/files/pos/nonlocal-unchecked.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings class A { def f: Boolean = { val xs = Nil map (_ => return false) diff --git a/test/files/pos/not-possible-cause.scala b/test/files/pos/not-possible-cause.scala new file mode 100644 index 000000000000..6c500b7b8511 --- /dev/null +++ b/test/files/pos/not-possible-cause.scala @@ -0,0 +1,3 @@ +object Foo { + def foo[A <: Product](a: A): Unit = { type X = a.type } +} diff --git a/test/files/pos/nothing_manifest_disambig-new.scala b/test/files/pos/nothing_manifest_disambig-new.scala index ed3a9e8fb1fd..64afdee602d2 100644 --- a/test/files/pos/nothing_manifest_disambig-new.scala +++ b/test/files/pos/nothing_manifest_disambig-new.scala @@ -9,4 +9,4 @@ object Test { def foo[A, C](m : C)(implicit ev: C <:< Traversable[A], mani: TypeTag[A]): (C, A, TypeTag[A]) = (m, m.head, mani) foo(List(1,2,3)) -} \ No newline at end of file +} diff --git a/test/files/pos/nothing_manifest_disambig-old.scala b/test/files/pos/nothing_manifest_disambig-old.scala index 9a3db0c6d492..f282cb914b4a 100644 --- a/test/files/pos/nothing_manifest_disambig-old.scala +++ b/test/files/pos/nothing_manifest_disambig-old.scala @@ -7,4 +7,4 @@ object Test { def foo[A, C](m : C)(implicit ev: C <:< Traversable[A], mani: Manifest[A]): (C, A, Manifest[A]) = (m, m.head, mani) foo(List(1,2,3)) -} \ No newline at end of file +} diff --git a/test/files/pos/nullary-override-3.scala b/test/files/pos/nullary-override-3.scala new file mode 100644 index 000000000000..2ab5e5ce86c1 --- /dev/null +++ b/test/files/pos/nullary-override-3.scala @@ -0,0 +1,10 @@ +//> using options -Werror -Wunused:nowarn -Xsource:3 +// +class C extends java.lang.CharSequence { + def charAt(x$1: Int): Char = ??? + def length: Int = ??? + def subSequence(x$1: Int, x$2: Int): CharSequence = ??? +} + +// Welcome to the Happy J +class J { override def toString = "Happy J" } diff --git a/test/files/pos/open-infix-future.scala b/test/files/pos/open-infix-future.scala index 8fee778d40cb..7b09cb54cf83 100644 --- a/test/files/pos/open-infix-future.scala +++ b/test/files/pos/open-infix-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // open class A diff --git a/test/files/pos/overload_poly_repeated.scala b/test/files/pos/overload_poly_repeated.scala new file mode 100644 index 000000000000..6ca097f71767 --- /dev/null +++ b/test/files/pos/overload_poly_repeated.scala @@ -0,0 +1,54 @@ +class C { + // this always worked + // during isApplicableToMethod will use formalTypes to eliminate the repeated param in the formal types, + // but we keep the repeated marker in the arguments -- here's a debug log: +/* +isCompatibleArgs false (List(Int*), List(Int)) +isAsSpecific false: (xs: Int*)Int >> (x: Int)Int? + --> the repeated case is not more specific than the single-arg case because + you can't apply something of `Int*` to `Int` + +isCompatibleArgs true (List(Int), List(Int)) +isAsSpecific true: (x: Int)Int >> (xs: Int*)Int? + --> the single param case is more specific than the repeated param case, because + you can apply a single argument to the method with the repeated param + +isCompatibleArgs true (List(Int), List(Int)) +isAsSpecific true: (x: Int)Int >> (xs: Int*)Int? +isCompatibleArgs false (List(Int*), List(Int)) +isAsSpecific false: (xs: Int*)Int >> (x: Int)Int? +isCompatibleArgs true (List(Int), List(Int)) +isAsSpecific true: (x: Int)Int >> (xs: Int*)Int? +isCompatibleArgs false (List(Int*), List(Int)) +isAsSpecific false: (xs: Int*)Int >> (x: Int)Int? +inferMethodAlternative applicable List(method foo, method foo) --> ranked: List(method foo) + + */ + + def foo(xs: Int*): Int = xs.toSeq.head + def foo(x: Int): Int = x + foo(2) + + // this should also type check, resolving to the non-repeated case, + // but there was a bug in the polymorphic case of isApplicableToMethod + // (adjustTypeArgs would remove the incompatibility in applying something + // expecting type T to a T*, as the latter would be turned into Seq[T]) +/* +isAsSpecific false: [T](xs: T*)T >> [T](x: T)T? +isAsSpecific true: [T](x: T)T >> [T](xs: T*)T? +isAsSpecific true: [T](x: T)T >> [T](xs: T*)T? +isAsSpecific false: [T](xs: T*)T >> [T](x: T)T? +isAsSpecific true: [T](x: T)T >> [T](xs: T*)T? +isAsSpecific false: [T](xs: T*)T >> [T](x: T)T? +inferMethodAlternative applicable List(method fooT, method fooT) --> ranked: List(method fooT) + */ + def fooT[T](xs: T*): T = xs.toSeq.head + def fooT[T](x: T): T = x + fooT(2) + + // from 4775 + def f[T](x: T): T = x + def f[T](x: T, xs: T*): T = x + + f(5) +} diff --git a/test/files/pos/overload_proto.scala b/test/files/pos/overload_proto.scala new file mode 100644 index 000000000000..6d5a46933c6b --- /dev/null +++ b/test/files/pos/overload_proto.scala @@ -0,0 +1,95 @@ +object Util { + def mono(x: Int) = x + def poly[T](x: T): T = x +} + +trait FunSam[-T, +R] { def apply(x: T): R } + + +trait TFun { def map[T](f: T => Int): Unit = () } +object Fun extends TFun { import Util._ + def map[T: scala.reflect.ClassTag](f: T => Int): Unit = () + + map(mono) + map(mono _) + map(x => mono(x)) + +// can't infer polymorphic type for function parameter: +// map(poly) +// map(poly _) +// map(x => poly(x)) +} + +trait TSam { def map[T](f: T FunSam Int): Unit = () } +object Sam extends TSam { import Util._ + def map[T: scala.reflect.ClassTag](f: T `FunSam` Int): Unit = () + + map(mono) // sam + map(mono _) // sam + map(x => mono(x)) // sam + +// can't infer polymorphic type for function parameter: +// map(poly) +// map(poly _) +// map(x => poly(x)) +} + +trait IntFun { def map[T](f: Int => T): Unit = () } +object int_Fun extends IntFun { import Util._ + def map[T: scala.reflect.ClassTag](f: Int => T): Unit = () + + map(mono) + map(mono _) + map(x => mono(x)) + + map(poly) + map(poly _) + map(x => poly(x)) +} + +trait IntSam { def map[T](f: Int FunSam T): Unit = () } +object int_Sam extends IntSam { import Util._ + def map[T: scala.reflect.ClassTag](f: Int `FunSam` T): Unit = () + + map(mono) // sam + map(mono _) // sam + map(x => mono(x)) // sam + + map(poly) // sam + map(poly _) // sam + map(x => poly(x)) // sam +} + + +/* +eta_overload_hof.scala:27: error: missing argument list for method mono in object Util +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `mono _` or `mono(_)` instead of `mono`. + map(mono) + ^ +eta_overload_hof.scala:46: error: type mismatch; + found : Nothing => Nothing + required: ?<: Int => ? + map(poly _) + ^ +eta_overload_hof.scala:54: error: missing argument list for method mono in object Util +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `mono _` or `mono(_)` instead of `mono`. + map(mono) + ^ +eta_overload_hof.scala:58: error: missing argument list for method poly in object Util +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `poly _` or `poly(_)` instead of `poly`. + map(poly) + ^ +eta_overload_hof.scala:59: error: overloaded method value map with alternatives: + [T](f: FunSam[Int,T])(implicit evidence$4: scala.reflect.ClassTag[T])Unit + [T](f: FunSam[Int,T])Unit + cannot be applied to (Nothing => Nothing) + map(poly _) + ^ +eta_overload_hof.scala:60: error: missing parameter type + map(x => poly(x)) + ^ + +* */ diff --git a/test/files/pos/overload_proto_accisam.scala b/test/files/pos/overload_proto_accisam.scala new file mode 100644 index 000000000000..0fdeab55305c --- /dev/null +++ b/test/files/pos/overload_proto_accisam.scala @@ -0,0 +1,7 @@ +// TODO make independent of java.io.OutputStream, but obvious way does not capture the bug (see didInferSamType and OverloadedArgProto) +class Test { + def overloadedAccidentalSam(a: java.io.OutputStream, b: String) = ??? + def overloadedAccidentalSam(a: java.io.OutputStream, b: Any)= ??? + + overloadedAccidentalSam(??? : java.io.OutputStream, null) +} diff --git a/test/files/pos/overload_proto_collapse.scala b/test/files/pos/overload_proto_collapse.scala new file mode 100644 index 000000000000..a0c11ba16e9a --- /dev/null +++ b/test/files/pos/overload_proto_collapse.scala @@ -0,0 +1,57 @@ + +class Test { + def prepended[B >: Char](elem: B): String = ??? + def prepended(c: Char): String = ??? + + def +:[B >: Char](elem: B): String = prepended(elem) +} + + +trait DurationConversions { + trait Classifier[C] { type R } + + def days: Int = ??? + def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ??? + + def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c) +} + + +trait AnonMatch { + trait MapOps[K, +V, +CC[_, _]] { + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = ??? + def map[K2 <: AnyRef, V2](f: ((K with AnyRef, V)) => (K2, V2)): MapOps[K2, V2, Map] = ??? + } + + (??? : MapOps[String, Int, Map]).map{ case (k,v) => ??? } +} + + +trait FBounds { + def f[A](x: A) = 11; + def f[A <: Ordered[A]](x: Ordered[A]) = 12; + + f(1) +} + +// Don't collapse A and Tree[A]. Naively replacing type params with ? gives ? and Tree[?], +// which are equal because wildcard equals whatever +// example from specs2 +class Trees { outer => + trait Tree[B] + + def clean[A](t: Tree[Option[A]]): Tree[A] = + prune(t, (a: Option[A]) => a).getOrElse(??? : Tree[A]) + + def prune[A, B](t: Tree[A], f: A => Option[B]): Option[Tree[B]] = ??? + def prune[A](t: Tree[A], f: Tree[A] => Option[A])(implicit initial: A): Tree[A] = ??? +} + + +// From gigahorse +abstract class Sam[A] { def apply(a: String): A } + +class GigaHorse { + def map[A](f: String => A): A = map(new Sam[A] { def apply(a: String): A = f(a) }) + def map[A](f: Sam[A]): A = ??? +} diff --git a/test/files/pos/overloaded_extractor_and_regular_def.scala b/test/files/pos/overloaded_extractor_and_regular_def.scala index c8e7da5cadda..72b9f65d8480 100644 --- a/test/files/pos/overloaded_extractor_and_regular_def.scala +++ b/test/files/pos/overloaded_extractor_and_regular_def.scala @@ -12,7 +12,7 @@ trait TreesBase { } trait TreesApi extends TreesBase { - def Apply(x: String) + def Apply(x: String): Unit } class Universe extends TreesApi { @@ -23,10 +23,10 @@ class Universe extends TreesApi { } object Test extends App { - def foo(tapi: TreesApi) { + def foo(tapi: TreesApi): Unit = { import tapi._ - def bar(tree: Tree) { + def bar(tree: Tree): Unit = { val Apply(x) = tree } } -} \ No newline at end of file +} diff --git a/test/files/pos/overloaded_ho_fun.scala b/test/files/pos/overloaded_ho_fun.scala index 17176715f0f1..dbc6885812b9 100644 --- a/test/files/pos/overloaded_ho_fun.scala +++ b/test/files/pos/overloaded_ho_fun.scala @@ -27,10 +27,10 @@ class StringLike(xs: String) { object Test { val of = new OverloadedFun[Int](1) - of.foo(_.toString) +// of.foo(_.toString) // not allowed -- different argument types for the hof arg of.poly(x => x / 2 ) - of.polySam(x => x / 2 ) +// of.polySam(x => x / 2) // not allowed -- need at least one regular function type in the mix of.polyJavaSam(x => x) val sl = new StringLike("a") diff --git a/test/files/pos/override-object-yes.scala b/test/files/pos/override-object-yes.scala deleted file mode 100644 index 250d161d7fab..000000000000 --- a/test/files/pos/override-object-yes.scala +++ /dev/null @@ -1,41 +0,0 @@ -// scalac: -Yoverride-objects -package case1 { - class Bippy { - def f = 1 - } - - trait Foo { - object Bar extends Bippy { - override def f = 2 - } - } - - trait Foo2 extends Foo { - override object Bar extends Bippy { - override def f = 3 - } - } - - trait Foo3 { - object Bar { - def g: Traversable[Int] = Nil - } - } - trait Foo4 extends Foo3 { - override object Bar { - def g: List[Int] = Nil - } - } -} - -package case2 { - class Bar[T] - - class Foo[T] { - object A extends Bar[T] - } - - class Baz[S] extends Foo[S] { - override object A extends Bar[S] - } -} diff --git a/test/files/pos/overzealous-assert-genbcode.scala b/test/files/pos/overzealous-assert-genbcode.scala index ddd70b0c44ab..82be359d9f29 100644 --- a/test/files/pos/overzealous-assert-genbcode.scala +++ b/test/files/pos/overzealous-assert-genbcode.scala @@ -1,6 +1,6 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { args(0) match { case a: String => while(a == null) {} } diff --git a/test/files/pos/package-implicit/ActorRef.scala b/test/files/pos/package-implicit/ActorRef.scala index e3f93c5e7213..de57e61ceb6d 100644 --- a/test/files/pos/package-implicit/ActorRef.scala +++ b/test/files/pos/package-implicit/ActorRef.scala @@ -4,4 +4,4 @@ trait ActorRef { def stop(): Unit = {} } -trait ScalaActorRef { self: ActorRef => } \ No newline at end of file +trait ScalaActorRef { self: ActorRef => } diff --git a/test/files/pos/package-implicit/package.scala b/test/files/pos/package-implicit/package.scala index 96c4b133f83a..d0f28b36b863 100644 --- a/test/files/pos/package-implicit/package.scala +++ b/test/files/pos/package-implicit/package.scala @@ -3,4 +3,4 @@ package t1000647 package object foo { implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef = ref.asInstanceOf[ActorRef] -} \ No newline at end of file +} diff --git a/test/files/pos/package-ob-case/A_1.scala b/test/files/pos/package-ob-case/A_1.scala index 39f68eef9dbb..7101da2092ed 100644 --- a/test/files/pos/package-ob-case/A_1.scala +++ b/test/files/pos/package-ob-case/A_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings package foo { package object foo { case class X(z: Int) { } diff --git a/test/files/pos/package-ob-case/B_2.scala b/test/files/pos/package-ob-case/B_2.scala index 39f68eef9dbb..7101da2092ed 100644 --- a/test/files/pos/package-ob-case/B_2.scala +++ b/test/files/pos/package-ob-case/B_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings package foo { package object foo { case class X(z: Int) { } diff --git a/test/files/pos/package-object-deferred-load-bug/A_1.scala b/test/files/pos/package-object-deferred-load-bug/A_1.scala new file mode 100644 index 000000000000..2597aff42d23 --- /dev/null +++ b/test/files/pos/package-object-deferred-load-bug/A_1.scala @@ -0,0 +1,7 @@ +package p1 { + package x_01 { object zappo {}} + + package x_64 { + object zappo {} + } +} diff --git a/test/files/pos/package-object-deferred-load-bug/A_2.scala b/test/files/pos/package-object-deferred-load-bug/A_2.scala new file mode 100644 index 000000000000..f40038de1a6a --- /dev/null +++ b/test/files/pos/package-object-deferred-load-bug/A_2.scala @@ -0,0 +1,10 @@ +package p1 { + import x_64._ + package x_01 { + object blerg extends AnyRef {} + } + + package x_64 { + object blerg {} + } +} \ No newline at end of file diff --git a/test/files/pos/package-object-deferred-load-bug/B_2.scala b/test/files/pos/package-object-deferred-load-bug/B_2.scala new file mode 100644 index 000000000000..b39bbae9da72 --- /dev/null +++ b/test/files/pos/package-object-deferred-load-bug/B_2.scala @@ -0,0 +1,10 @@ +package p1 { + import x_64._ + package x_01 { + object `package` extends AnyRef { def m = "m "} + } + + package x_64 { + object `package` { def m = "m" } + } +} diff --git a/test/files/pos/parallel-classloader.scala b/test/files/pos/parallel-classloader.scala index 0a4751b56e11..22d4afca0f23 100644 --- a/test/files/pos/parallel-classloader.scala +++ b/test/files/pos/parallel-classloader.scala @@ -1,3 +1,3 @@ class Loader extends ClassLoader { ClassLoader.registerAsParallelCapable() -} \ No newline at end of file +} diff --git a/test/files/pos/parens-for-params-silent.scala b/test/files/pos/parens-for-params-silent.scala new file mode 100644 index 000000000000..b0ee04fd6bd9 --- /dev/null +++ b/test/files/pos/parens-for-params-silent.scala @@ -0,0 +1,8 @@ +//> using options -Werror -Wconf:msg=lambda-parens:s -Xsource:3 + +class C { + def f = { + x: Int => x * 2 + } + def g = (x: Int) => x * 2 +} diff --git a/test/files/pos/partialfun.scala b/test/files/pos/partialfun.scala index 9f32a2202313..04ea6d02ed76 100644 --- a/test/files/pos/partialfun.scala +++ b/test/files/pos/partialfun.scala @@ -8,4 +8,12 @@ object partialfun { case None => throw new MatchError(None) } (None); + // Again, but using function literal + applyPartial(_.get)(None) + + // Now test this case involving an implicit conversion and auto-tupling + // derived from AbstractFSM's onTransition; akka/akka#27410 + implicit final def g(f: (String, String) => Unit): PartialFunction[(String, String), Unit] = ??? + val fun: (String, String) => Unit = ??? + val pf: PartialFunction[(String, String), Unit] = fun(_: String, _: String) } diff --git a/test/files/pos/patmat-exprs-b.scala b/test/files/pos/patmat-exprs-b.scala new file mode 100644 index 000000000000..426419a0c8ee --- /dev/null +++ b/test/files/pos/patmat-exprs-b.scala @@ -0,0 +1,51 @@ + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} diff --git a/test/files/pos/patmat-hk.scala b/test/files/pos/patmat-hk.scala index 95f08e018d25..5e150da4695f 100644 --- a/test/files/pos/patmat-hk.scala +++ b/test/files/pos/patmat-hk.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 case class Foo[F[_]]() case class APair[F[_], G[_], A](f: F[A], g: G[A]) diff --git a/test/files/pos/patmat-suppress.scala b/test/files/pos/patmat-suppress.scala index b56a0f42430a..e144193b1316 100644 --- a/test/files/pos/patmat-suppress.scala +++ b/test/files/pos/patmat-suppress.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xno-patmat-analysis +//> using options -Xfatal-warnings -Xno-patmat-analysis +// // test that none of these warn due to -Xno-patmat-analysis // tests taken from test/files/neg/patmatexhaust.scala, test/files/neg/pat_unreachable.scala class TestSealedExhaustive { // compile only diff --git a/test/files/pos/patmat.scala b/test/files/pos/patmat.scala index 51b879abf27c..53e1c5f1fc9d 100644 --- a/test/files/pos/patmat.scala +++ b/test/files/pos/patmat.scala @@ -34,7 +34,7 @@ object t1261 { object Row { def unapply(r: Row) = true - def f(elem: Elem) { + def f(elem: Elem): Unit = { elem match { case Bar() => ; case Row() => ; @@ -49,7 +49,7 @@ case class Node(l: Tree, v: Int, r: Tree) extends Tree case object EmptyTree extends Tree object Ticket335 { // compile-only - def runTest() { + def runTest(): Unit = { (EmptyTree: Tree @unchecked) match { case Node(_, v, _) if (v == 0) => 0 case EmptyTree => 2 @@ -152,7 +152,7 @@ object Ticket522 { } object Ticket710 { - def method { + def method: Unit = { sealed class Parent() case object Child extends Parent() val x: Parent = Child diff --git a/test/files/pos/patmat_list_rewrite.scala b/test/files/pos/patmat_list_rewrite.scala new file mode 100644 index 000000000000..693cf0a30b77 --- /dev/null +++ b/test/files/pos/patmat_list_rewrite.scala @@ -0,0 +1,10 @@ +//> using options -Werror +// +class C { + def m(xs: List[String]) = xs match { + case List() => "z" + case y :: _ => y + } + // was: patmat_list_rewrite.scala:4: warning: match may not be exhaustive. + // It would fail on the following input: Nil +} diff --git a/test/files/pos/patterns1213.scala b/test/files/pos/patterns1213.scala index 7e8af171880c..de1972ca597e 100644 --- a/test/files/pos/patterns1213.scala +++ b/test/files/pos/patterns1213.scala @@ -2,7 +2,7 @@ abstract class MapLocation(ID: Int) { abstract class Message case class ReceivePlayer(id: Int) extends Message - def foo(p: Message) { + def foo(p: Message): Unit = { p match { case ReceivePlayer(ID) => () diff --git a/test/files/pos/polymorphic-case-class.scala b/test/files/pos/polymorphic-case-class.scala index c9a22ff207e2..f133ae8a1bf5 100644 --- a/test/files/pos/polymorphic-case-class.scala +++ b/test/files/pos/polymorphic-case-class.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // // no unchecked warnings case class Bippy[T, -U, +V](x: T, z: V) { } diff --git a/test/files/pos/pos-bug1241.scala b/test/files/pos/pos-bug1241.scala index 1752208f3765..1038dc30489c 100644 --- a/test/files/pos/pos-bug1241.scala +++ b/test/files/pos/pos-bug1241.scala @@ -1,8 +1,8 @@ object test extends App { // more.. - type T = { def hello() } + type T = { def hello(): Unit } //val x4 = new AnyRef { def hello() { println("4") } } // ok! - val x4: T = new { def hello() { println("4") } } // error! + val x4: T = new { def hello(): Unit = { println("4") } } // error! x4.hello() // more.. } diff --git a/test/files/pos/post-postfix.scala b/test/files/pos/post-postfix.scala new file mode 100644 index 000000000000..19f6660ba8cf --- /dev/null +++ b/test/files/pos/post-postfix.scala @@ -0,0 +1,7 @@ + +// required +import language.postfixOps + +trait T { + def f(): Int = List(1) head +} diff --git a/test/files/pos/private-types-after-typer.scala b/test/files/pos/private-types-after-typer.scala index 79ef93406382..5c20cac2a1fb 100644 --- a/test/files/pos/private-types-after-typer.scala +++ b/test/files/pos/private-types-after-typer.scala @@ -6,4 +6,4 @@ trait T { object O2 } } -} \ No newline at end of file +} diff --git a/test/files/pos/proj-rec-test.scala b/test/files/pos/proj-rec-test.scala index d5bec2f892ec..2484d175a1ae 100644 --- a/test/files/pos/proj-rec-test.scala +++ b/test/files/pos/proj-rec-test.scala @@ -1,4 +1,6 @@ -// scalac: -Yrecursion 1 + +// was: -Yrecursion 1 +// object ProjTest { trait MInt { type Type } trait _0 extends MInt { type Type = Boolean } diff --git a/test/files/pos/rangepos-anonapply.scala b/test/files/pos/rangepos-anonapply.scala index 2a069e31bb62..14dfe5ceae24 100644 --- a/test/files/pos/rangepos-anonapply.scala +++ b/test/files/pos/rangepos-anonapply.scala @@ -1,10 +1,10 @@ -// scalac: -Yrangepos +// class Test { trait PropTraverser { def apply(x: Int): Unit = {} } - def gather(x: Int) { + def gather(x: Int): Unit = { (new PropTraverser {})(x) } } diff --git a/test/files/pos/rangepos-patmat.scala b/test/files/pos/rangepos-patmat.scala index 0a7cab6b2d23..64bbce5b344f 100644 --- a/test/files/pos/rangepos-patmat.scala +++ b/test/files/pos/rangepos-patmat.scala @@ -1,5 +1,5 @@ -// scalac: -Yrangepos + +// class Foo { def test: PartialFunction[Any, String] = { case _ => "ok" } - } diff --git a/test/files/pos/rangepos.scala b/test/files/pos/rangepos.scala index 99ed30a96991..80c0d6d80eba 100644 --- a/test/files/pos/rangepos.scala +++ b/test/files/pos/rangepos.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos + +// class Foo(val x: Double) extends AnyVal { } object Pretty { diff --git a/test/files/pos/raw-map/S_2.scala b/test/files/pos/raw-map/S_2.scala index de6c4ee5c2d5..d2886fdce9e4 100644 --- a/test/files/pos/raw-map/S_2.scala +++ b/test/files/pos/raw-map/S_2.scala @@ -1,5 +1,5 @@ class Foo { - def foo { + def foo: Unit = { val x: J_1 = null x.setRawType(new java.util.HashMap) } diff --git a/test/files/pos/reflection-compat-api-universe.scala b/test/files/pos/reflection-compat-api-universe.scala deleted file mode 100644 index 0aee8bcda529..000000000000 --- a/test/files/pos/reflection-compat-api-universe.scala +++ /dev/null @@ -1,136 +0,0 @@ -object Test extends App { - val u: scala.reflect.api.Universe = ??? - import u._ - import scala.reflect.ClassTag - import compat._ - - val tree: Tree = ??? - val ttree: TypeTree = ??? - val stree: SymTree = ??? - val trees: List[Tree] = ??? - val mods: Modifiers = ??? - val impl: Template = ??? - val vparamss: List[List[ValDef]] = ??? - val rhs: Tree = ??? - val sym: Symbol = ??? - val tsym: TypeSymbol = ??? - val syms: List[Symbol] = ??? - val params: List[Symbol] = ??? - val tparams: List[Symbol] = ??? - val tpe: Type = ??? - val tpes: List[Type] = ??? - val manifest: Manifest[Int] = ??? - val tag: TypeTag[Int] = ??? - val mirror: Mirror = ??? - val decls: Scope = ??? - val pos: Position = ??? - val ann: Annotation = ??? - val anns: List[Annotation] = ??? - val const: Constant = ??? - val name: Name = ??? - val tyname: TypeName = ??? - val tename: TermName = ??? - val flags: FlagSet = ??? - val str: String = ??? - val i: Int = ??? - val b: Boolean = ??? - - // abstract class BuildApi - // abstract class ReferenceToBoxedExtractor - // abstract trait AttachableApi - // abstract trait FreeTermSymbolApi - // abstract trait FreeTypeSymbolApi - // abstract trait IdentContextApi - // abstract trait ReferenceToBoxedApi - // abstract trait SymTreeContextApi - // abstract trait SymbolContextApi - // abstract trait TreeContextApi - // abstract trait TypeTreeContextApi - locally(ClassDef(sym, impl): ClassDef) - locally(DefDef(sym, mods, vparamss, rhs): DefDef) - locally(DefDef(sym, vparamss, rhs): DefDef) - locally(DefDef(sym, mods, rhs): DefDef) - locally(DefDef(sym, rhs): DefDef) - locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef) - locally(LabelDef(sym, params, rhs): LabelDef) - locally(ModuleDef(sym, impl): ModuleDef) - locally(TypeDef(sym, rhs): TypeDef) - locally(TypeDef(sym): TypeDef) - locally(ValDef(sym, rhs): ValDef) - locally(ValDef(sym): ValDef) - locally(AnnotatedType(anns, tpe): AnnotatedType) - locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType) - locally(TypeBounds(tpe, tpe): TypeBounds) - locally(MethodType(params, tpe): MethodType) - locally(RefinedType(tpes, decls): RefinedType) - locally(RefinedType(tpes, decls, sym): RefinedType) - locally(ClassInfoType(tpes, decls, sym): ClassInfoType) - locally(SingleType(tpe, sym): Type) - locally(TypeRef(tpe, sym, tpes): Type) - locally(ExistentialType(syms, tpe): ExistentialType) - locally(NullaryMethodType(tpe): NullaryMethodType) - locally(ThisType(sym): Type) - locally(SuperType(tpe, tpe): Type) - locally(PolyType(syms, tpe): PolyType) - locally(ConstantType(const): ConstantType) - locally(sym.asFreeTerm: FreeTermSymbol) - locally(sym.asFreeType: FreeTypeSymbol) - locally(existentialAbstraction(tparams, tpe): Type) - locally(tree.freeTerms: List[FreeTermSymbol]) - locally(tree.freeTypes: List[FreeTypeSymbol]) - locally(intersectionType(tpes): Type) - locally(intersectionType(tpes, sym): Type) - locally(sym.isErroneous: Boolean) - locally(sym.isFreeTerm: Boolean) - locally(sym.isFreeType: Boolean) - locally(sym.isLocal: Boolean) - locally(sym.isOverride: Boolean) - locally(tsym.isSkolem: Boolean) - locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int]) - locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type}) - locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol) - locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol) - locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol)) - locally(newScopeWith(sym, sym, sym): Scope) - locally(sym.newTermSymbol(tename, pos, flags): TermSymbol) - locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol) - locally(polyType(tparams, tpe): Type) - locally(sym.pos: Position) - locally(refinedType(tpes, sym): Type) - locally(refinedType(tpes, sym, decls, pos): Type) - locally(singleType(tpe, sym): Type) - locally(tree.substituteSymbols(syms, syms): Tree) - locally(tree.substituteThis(sym, tree): Tree) - locally(tree.substituteTypes(syms, tpes): Tree) - locally(typeRef(tpe, sym, tpes): Type) - locally(typeTagToManifest(mirror, tag): Manifest[Int]) - locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol]) - locally((??? : FreeTermSymbol).origin) - locally((??? : FreeTermSymbol).value) - locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]) - locally((??? : FreeTypeSymbol).origin) - locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]) - locally(build: BuildApi) - locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed) - locally((??? : ReferenceToBoxed).ident: Tree) - locally(ReferenceToBoxed.unapply(???): Option[Ident]) - locally(build.selectType(sym, str): TypeSymbol) - locally(build.selectTerm(sym, str): TermSymbol) - locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol) - locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol) - locally(build.newFreeTerm(str, i): FreeTermSymbol) - locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol) - locally(build.newFreeType(str): FreeTypeSymbol) - locally(build.newFreeType(str, flags, str): FreeTypeSymbol) - locally(build.setTypeSignature(sym, tpe): Symbol) - locally(build.setAnnotations(sym, anns): Symbol) - locally(build.flagsFromBits(??? : Long): FlagSet) - locally(build.emptyValDef: ValDef) - locally(build.This(sym): Tree) - locally(build.Select(tree, sym): Select) - locally(build.Ident(sym): Ident) - locally(build.TypeTree(tpe): TypeTree) - locally(build.thisPrefix(sym): Type) - locally(build.setType(tree, tpe): Tree) - locally(build.setSymbol(tree, sym): Tree) -} \ No newline at end of file diff --git a/test/files/pos/reflection-compat-c.scala b/test/files/pos/reflection-compat-c.scala deleted file mode 100644 index 73158decdc99..000000000000 --- a/test/files/pos/reflection-compat-c.scala +++ /dev/null @@ -1,139 +0,0 @@ -import scala.reflect.macros.Context - -object Test extends App { - def impl(c: Context) = { - import c.universe._ - import scala.reflect.ClassTag - import compat._ - - val tree: Tree = ??? - val ttree: TypeTree = ??? - val stree: SymTree = ??? - val trees: List[Tree] = ??? - val mods: Modifiers = ??? - val impl: Template = ??? - val vparamss: List[List[ValDef]] = ??? - val rhs: Tree = ??? - val sym: Symbol = ??? - val tsym: TypeSymbol = ??? - val syms: List[Symbol] = ??? - val params: List[Symbol] = ??? - val tparams: List[Symbol] = ??? - val tpe: Type = ??? - val tpes: List[Type] = ??? - val manifest: Manifest[Int] = ??? - val tag: TypeTag[Int] = ??? - val mirror: Mirror = ??? - val decls: Scope = ??? - val pos: Position = ??? - val ann: Annotation = ??? - val anns: List[Annotation] = ??? - val const: Constant = ??? - val name: Name = ??? - val tyname: TypeName = ??? - val tename: TermName = ??? - val flags: FlagSet = ??? - val str: String = ??? - val i: Int = ??? - val b: Boolean = ??? - - // abstract class BuildApi - // abstract class ReferenceToBoxedExtractor - // abstract trait AttachableApi - // abstract trait FreeTermSymbolApi - // abstract trait FreeTypeSymbolApi - // abstract trait IdentContextApi - // abstract trait ReferenceToBoxedApi - // abstract trait SymTreeContextApi - // abstract trait SymbolContextApi - // abstract trait TreeContextApi - // abstract trait TypeTreeContextApi - locally(ClassDef(sym, impl): ClassDef) - locally(DefDef(sym, mods, vparamss, rhs): DefDef) - locally(DefDef(sym, vparamss, rhs): DefDef) - locally(DefDef(sym, mods, rhs): DefDef) - locally(DefDef(sym, rhs): DefDef) - locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef) - locally(LabelDef(sym, params, rhs): LabelDef) - locally(ModuleDef(sym, impl): ModuleDef) - locally(TypeDef(sym, rhs): TypeDef) - locally(TypeDef(sym): TypeDef) - locally(ValDef(sym, rhs): ValDef) - locally(ValDef(sym): ValDef) - locally(AnnotatedType(anns, tpe): AnnotatedType) - locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType) - locally(TypeBounds(tpe, tpe): TypeBounds) - locally(MethodType(params, tpe): MethodType) - locally(RefinedType(tpes, decls): RefinedType) - locally(RefinedType(tpes, decls, sym): RefinedType) - locally(ClassInfoType(tpes, decls, sym): ClassInfoType) - locally(SingleType(tpe, sym): Type) - locally(TypeRef(tpe, sym, tpes): Type) - locally(ExistentialType(syms, tpe): ExistentialType) - locally(NullaryMethodType(tpe): NullaryMethodType) - locally(ThisType(sym): Type) - locally(SuperType(tpe, tpe): Type) - locally(PolyType(syms, tpe): PolyType) - locally(ConstantType(const): ConstantType) - locally(sym.asFreeTerm: FreeTermSymbol) - locally(sym.asFreeType: FreeTypeSymbol) - locally(existentialAbstraction(tparams, tpe): Type) - locally(tree.freeTerms: List[FreeTermSymbol]) - locally(tree.freeTypes: List[FreeTypeSymbol]) - locally(intersectionType(tpes): Type) - locally(intersectionType(tpes, sym): Type) - locally(sym.isErroneous: Boolean) - locally(sym.isFreeTerm: Boolean) - locally(sym.isFreeType: Boolean) - locally(sym.isLocal: Boolean) - locally(sym.isOverride: Boolean) - locally(tsym.isSkolem: Boolean) - locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int]) - locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type}) - locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol) - locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol) - locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol)) - locally(newScopeWith(sym, sym, sym): Scope) - locally(sym.newTermSymbol(tename, pos, flags): TermSymbol) - locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol) - locally(polyType(tparams, tpe): Type) - locally(sym.pos: Position) - locally(refinedType(tpes, sym): Type) - locally(refinedType(tpes, sym, decls, pos): Type) - locally(singleType(tpe, sym): Type) - locally(tree.substituteSymbols(syms, syms): Tree) - locally(tree.substituteThis(sym, tree): Tree) - locally(tree.substituteTypes(syms, tpes): Tree) - locally(typeRef(tpe, sym, tpes): Type) - locally(typeTagToManifest(mirror, tag): Manifest[Int]) - locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol]) - locally((??? : FreeTermSymbol).origin) - locally((??? : FreeTermSymbol).value) - locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]) - locally((??? : FreeTypeSymbol).origin) - locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]) - locally(build: BuildApi) - locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed) - locally((??? : ReferenceToBoxed).ident: Tree) - locally(ReferenceToBoxed.unapply(???): Option[Ident]) - locally(build.selectType(sym, str): TypeSymbol) - locally(build.selectTerm(sym, str): TermSymbol) - locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol) - locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol) - locally(build.newFreeTerm(str, i): FreeTermSymbol) - locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol) - locally(build.newFreeType(str): FreeTypeSymbol) - locally(build.newFreeType(str, flags, str): FreeTypeSymbol) - locally(build.setTypeSignature(sym, tpe): Symbol) - locally(build.setAnnotations(sym, anns): Symbol) - locally(build.flagsFromBits(??? : Long): FlagSet) - locally(build.emptyValDef: ValDef) - locally(build.This(sym): Tree) - locally(build.Select(tree, sym): Select) - locally(build.Ident(sym): Ident) - locally(build.TypeTree(tpe): TypeTree) - locally(build.thisPrefix(sym): Type) - locally(build.setType(tree, tpe): Tree) - locally(build.setSymbol(tree, sym): Tree) - } -} \ No newline at end of file diff --git a/test/files/pos/reflection-compat-macro-universe.scala b/test/files/pos/reflection-compat-macro-universe.scala deleted file mode 100644 index 89ca36dab28d..000000000000 --- a/test/files/pos/reflection-compat-macro-universe.scala +++ /dev/null @@ -1,177 +0,0 @@ -object Test extends App { - val u: scala.reflect.macros.Universe = ??? - import u._ - import scala.reflect.macros.Attachments - import scala.reflect.ClassTag - import compat._ - - val tree: Tree = ??? - val ttree: TypeTree = ??? - val stree: SymTree = ??? - val trees: List[Tree] = ??? - val mods: Modifiers = ??? - val impl: Template = ??? - val vparamss: List[List[ValDef]] = ??? - val rhs: Tree = ??? - val sym: Symbol = ??? - val tsym: TypeSymbol = ??? - val syms: List[Symbol] = ??? - val params: List[Symbol] = ??? - val tparams: List[Symbol] = ??? - val tpe: Type = ??? - val tpes: List[Type] = ??? - val manifest: Manifest[Int] = ??? - val tag: TypeTag[Int] = ??? - val mirror: Mirror = ??? - val decls: Scope = ??? - val pos: Position = ??? - val ann: Annotation = ??? - val anns: List[Annotation] = ??? - val const: Constant = ??? - val name: Name = ??? - val tyname: TypeName = ??? - val tename: TermName = ??? - val flags: FlagSet = ??? - val str: String = ??? - val i: Int = ??? - val b: Boolean = ??? - - // abstract class BuildApi - // abstract class ReferenceToBoxedExtractor - // abstract trait AttachableApi - // abstract trait FreeTermSymbolApi - // abstract trait FreeTypeSymbolApi - // abstract trait IdentContextApi - // abstract trait ReferenceToBoxedApi - // abstract trait SymTreeContextApi - // abstract trait SymbolContextApi - // abstract trait TreeContextApi - // abstract trait TypeTreeContextApi - locally(ClassDef(sym, impl): ClassDef) - locally(DefDef(sym, mods, vparamss, rhs): DefDef) - locally(DefDef(sym, vparamss, rhs): DefDef) - locally(DefDef(sym, mods, rhs): DefDef) - locally(DefDef(sym, rhs): DefDef) - locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef) - locally(LabelDef(sym, params, rhs): LabelDef) - locally(ModuleDef(sym, impl): ModuleDef) - locally(TypeDef(sym, rhs): TypeDef) - locally(TypeDef(sym): TypeDef) - locally(ValDef(sym, rhs): ValDef) - locally(ValDef(sym): ValDef) - locally(AnnotatedType(anns, tpe): AnnotatedType) - locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType) - locally(TypeBounds(tpe, tpe): TypeBounds) - locally(MethodType(params, tpe): MethodType) - locally(RefinedType(tpes, decls): RefinedType) - locally(RefinedType(tpes, decls, sym): RefinedType) - locally(ClassInfoType(tpes, decls, sym): ClassInfoType) - locally(SingleType(tpe, sym): Type) - locally(TypeRef(tpe, sym, tpes): Type) - locally(ExistentialType(syms, tpe): ExistentialType) - locally(NullaryMethodType(tpe): NullaryMethodType) - locally(ThisType(sym): Type) - locally(SuperType(tpe, tpe): Type) - locally(PolyType(syms, tpe): PolyType) - locally(ConstantType(const): ConstantType) - locally(sym.asFreeTerm: FreeTermSymbol) - locally(sym.asFreeType: FreeTypeSymbol) - locally(sym.attachments: Attachments { type Pos = Position }) - locally(tree.attachments: Attachments { type Pos = Position }) - locally(captureVariable(sym): Unit) - locally(capturedVariableType(sym): Type) - locally(sym.deSkolemize: Symbol) - locally(tree.defineType(tpe): Tree) - locally(existentialAbstraction(tparams, tpe): Type) - locally(tree.freeTerms: List[FreeTermSymbol]) - locally(tree.freeTypes: List[FreeTypeSymbol]) - locally(intersectionType(tpes): Type) - locally(intersectionType(tpes, sym): Type) - locally(sym.isErroneous: Boolean) - locally(sym.isFreeTerm: Boolean) - locally(sym.isFreeType: Boolean) - locally(sym.isLocal: Boolean) - locally(sym.isOverride: Boolean) - locally(tsym.isSkolem: Boolean) - locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int]) - locally(treeBuild.mkAttributedIdent(sym): RefTree) - locally(treeBuild.mkAttributedQualifier(tpe): Tree) - locally(treeBuild.mkAttributedQualifier(tpe, sym): Tree) - locally(treeBuild.mkAttributedRef(tpe, sym): RefTree) - locally(treeBuild.mkAttributedRef(sym): RefTree) - locally(treeBuild.mkAttributedSelect(tree, sym): RefTree) - locally(treeBuild.mkAttributedThis(sym): This) - locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type}) - locally(treeBuild.mkMethodCall(sym, trees): Tree) - locally(treeBuild.mkMethodCall(sym, tpes, trees): Tree) - locally(treeBuild.mkMethodCall(sym, name, trees): Tree) - locally(treeBuild.mkMethodCall(sym, name, tpes, trees): Tree) - locally(treeBuild.mkMethodCall(tree, sym, tpes, trees): Tree) - locally(treeBuild.mkMethodCall(tree, trees): Tree) - locally(treeBuild.mkMethodCall(tree, tpes, trees): Tree) - locally(treeBuild.mkNullaryCall(sym, tpes): Tree) - locally(treeBuild.mkRuntimeUniverseRef: Tree) - locally(treeBuild.mkUnattributedRef(name): RefTree) - locally(treeBuild.mkUnattributedRef(sym): RefTree) - locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol) - locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol) - locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol)) - locally(newScopeWith(sym, sym, sym): Scope) - locally(sym.newTermSymbol(tename, pos, flags): TermSymbol) - locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol) - locally(polyType(tparams, tpe): Type) - locally(sym.pos: Position) - locally((tree.pos = pos): Unit) - locally(referenceCapturedVariable(sym): Tree) - locally(refinedType(tpes, sym): Type) - locally(refinedType(tpes, sym, decls, pos): Type) - locally(sym.removeAttachment[Int]: Symbol) - locally(tree.removeAttachment[Int]: Tree) - locally(sym.setAnnotations(ann, ann, ann): Symbol) - locally(sym.setName(name): Symbol) - locally(ttree.setOriginal(tree): TypeTree) - locally(tree.setPos(pos): Tree) - locally(sym.setPrivateWithin(sym): Symbol) - locally(tree.setSymbol(sym): Tree) - locally(tree.setType(tpe): Tree) - locally(sym.setTypeSignature(tpe): Symbol) - locally(singleType(tpe, sym): Type) - locally(tree.substituteSymbols(syms, syms): Tree) - locally(tree.substituteThis(sym, tree): Tree) - locally(tree.substituteTypes(syms, tpes): Tree) - locally((tree.symbol = sym): Unit) - locally((tree.tpe = tpe): Unit) - locally(typeRef(tpe, sym, tpes): Type) - locally(typeTagToManifest(mirror, tag): Manifest[Int]) - locally(sym.updateAttachment(42): Symbol) - locally(tree.updateAttachment(42): Tree) - locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol]) - locally((??? : FreeTermSymbol).origin) - locally((??? : FreeTermSymbol).value) - locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]) - locally((??? : FreeTypeSymbol).origin) - locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]) - locally(build: BuildApi) - locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed) - locally((??? : ReferenceToBoxed).ident: Tree) - locally(ReferenceToBoxed.unapply(???): Option[Ident]) - locally(build.selectType(sym, str): TypeSymbol) - locally(build.selectTerm(sym, str): TermSymbol) - locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol) - locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol) - locally(build.newFreeTerm(str, i): FreeTermSymbol) - locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol) - locally(build.newFreeType(str): FreeTypeSymbol) - locally(build.newFreeType(str, flags, str): FreeTypeSymbol) - locally(build.setTypeSignature(sym, tpe): Symbol) - locally(build.setAnnotations(sym, anns): Symbol) - locally(build.flagsFromBits(??? : Long): FlagSet) - locally(build.emptyValDef: ValDef) - locally(build.This(sym): Tree) - locally(build.Select(tree, sym): Select) - locally(build.Ident(sym): Ident) - locally(build.TypeTree(tpe): TypeTree) - locally(build.thisPrefix(sym): Type) - locally(build.setType(tree, tpe): Tree) - locally(build.setSymbol(tree, sym): Tree) -} \ No newline at end of file diff --git a/test/files/pos/reflection-compat-ru.scala b/test/files/pos/reflection-compat-ru.scala deleted file mode 100644 index 9ff72d1cf031..000000000000 --- a/test/files/pos/reflection-compat-ru.scala +++ /dev/null @@ -1,135 +0,0 @@ -object Test extends App { - import scala.reflect.runtime.universe._ - import scala.reflect.ClassTag - import compat._ - - val tree: Tree = ??? - val ttree: TypeTree = ??? - val stree: SymTree = ??? - val trees: List[Tree] = ??? - val mods: Modifiers = ??? - val impl: Template = ??? - val vparamss: List[List[ValDef]] = ??? - val rhs: Tree = ??? - val sym: Symbol = ??? - val tsym: TypeSymbol = ??? - val syms: List[Symbol] = ??? - val params: List[Symbol] = ??? - val tparams: List[Symbol] = ??? - val tpe: Type = ??? - val tpes: List[Type] = ??? - val manifest: Manifest[Int] = ??? - val tag: TypeTag[Int] = ??? - val mirror: Mirror = ??? - val decls: Scope = ??? - val pos: Position = ??? - val ann: Annotation = ??? - val anns: List[Annotation] = ??? - val const: Constant = ??? - val name: Name = ??? - val tyname: TypeName = ??? - val tename: TermName = ??? - val flags: FlagSet = ??? - val str: String = ??? - val i: Int = ??? - val b: Boolean = ??? - - // abstract class BuildApi - // abstract class ReferenceToBoxedExtractor - // abstract trait AttachableApi - // abstract trait FreeTermSymbolApi - // abstract trait FreeTypeSymbolApi - // abstract trait IdentContextApi - // abstract trait ReferenceToBoxedApi - // abstract trait SymTreeContextApi - // abstract trait SymbolContextApi - // abstract trait TreeContextApi - // abstract trait TypeTreeContextApi - locally(ClassDef(sym, impl): ClassDef) - locally(DefDef(sym, mods, vparamss, rhs): DefDef) - locally(DefDef(sym, vparamss, rhs): DefDef) - locally(DefDef(sym, mods, rhs): DefDef) - locally(DefDef(sym, rhs): DefDef) - locally(DefDef(sym, (??? : List[List[Symbol]] => Tree)): DefDef) - locally(LabelDef(sym, params, rhs): LabelDef) - locally(ModuleDef(sym, impl): ModuleDef) - locally(TypeDef(sym, rhs): TypeDef) - locally(TypeDef(sym): TypeDef) - locally(ValDef(sym, rhs): ValDef) - locally(ValDef(sym): ValDef) - locally(AnnotatedType(anns, tpe): AnnotatedType) - locally(BoundedWildcardType(??? : TypeBounds): BoundedWildcardType) - locally(TypeBounds(tpe, tpe): TypeBounds) - locally(MethodType(params, tpe): MethodType) - locally(RefinedType(tpes, decls): RefinedType) - locally(RefinedType(tpes, decls, sym): RefinedType) - locally(ClassInfoType(tpes, decls, sym): ClassInfoType) - locally(SingleType(tpe, sym): Type) - locally(TypeRef(tpe, sym, tpes): Type) - locally(ExistentialType(syms, tpe): ExistentialType) - locally(NullaryMethodType(tpe): NullaryMethodType) - locally(ThisType(sym): Type) - locally(SuperType(tpe, tpe): Type) - locally(PolyType(syms, tpe): PolyType) - locally(ConstantType(const): ConstantType) - locally(sym.asFreeTerm: FreeTermSymbol) - locally(sym.asFreeType: FreeTypeSymbol) - locally(existentialAbstraction(tparams, tpe): Type) - locally(tree.freeTerms: List[FreeTermSymbol]) - locally(tree.freeTypes: List[FreeTypeSymbol]) - locally(intersectionType(tpes): Type) - locally(intersectionType(tpes, sym): Type) - locally(sym.isErroneous: Boolean) - locally(sym.isFreeTerm: Boolean) - locally(sym.isFreeType: Boolean) - locally(sym.isLocal: Boolean) - locally(sym.isOverride: Boolean) - locally(tsym.isSkolem: Boolean) - locally(manifestToTypeTag(mirror, manifest): scala.reflect.api.Universe#TypeTag[Int]) - locally(mkImporter(scala.reflect.runtime.universe): Importer{val from: scala.reflect.runtime.universe.type}) - locally(sym.newClassSymbol(tyname, pos, flags): ClassSymbol) - locally(sym.newMethodSymbol(tename, pos, flags): MethodSymbol) - locally(sym.newModuleAndClassSymbol(name, pos, flags): (ModuleSymbol, ClassSymbol)) - locally(newScopeWith(sym, sym, sym): Scope) - locally(sym.newTermSymbol(tename, pos, flags): TermSymbol) - locally(sym.newTypeSymbol(tyname, pos, flags): TypeSymbol) - locally(polyType(tparams, tpe): Type) - locally(sym.pos: Position) - locally(refinedType(tpes, sym): Type) - locally(refinedType(tpes, sym, decls, pos): Type) - locally(singleType(tpe, sym): Type) - locally(tree.substituteSymbols(syms, syms): Tree) - locally(tree.substituteThis(sym, tree): Tree) - locally(tree.substituteTypes(syms, tpes): Tree) - locally(typeRef(tpe, sym, tpes): Type) - locally(typeTagToManifest(mirror, tag): Manifest[Int]) - locally(FreeTermSymbolTag: ClassTag[FreeTermSymbol]) - locally((??? : FreeTermSymbol).origin) - locally((??? : FreeTermSymbol).value) - locally(FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]) - locally((??? : FreeTypeSymbol).origin) - locally(ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]) - locally(build: BuildApi) - locally(ReferenceToBoxed(??? : Ident): ReferenceToBoxed) - locally((??? : ReferenceToBoxed).ident: Tree) - locally(ReferenceToBoxed.unapply(???): Option[Ident]) - locally(build.selectType(sym, str): TypeSymbol) - locally(build.selectTerm(sym, str): TermSymbol) - locally(build.selectOverloadedMethod(sym, str, i): MethodSymbol) - locally(build.newNestedSymbol(sym, name, pos, flags, b): Symbol) - locally(build.newFreeTerm(str, i): FreeTermSymbol) - locally(build.newFreeTerm(str, i, flags, str): FreeTermSymbol) - locally(build.newFreeType(str): FreeTypeSymbol) - locally(build.newFreeType(str, flags, str): FreeTypeSymbol) - locally(build.setTypeSignature(sym, tpe): Symbol) - locally(build.setAnnotations(sym, anns): Symbol) - locally(build.flagsFromBits(??? : Long): FlagSet) - locally(build.emptyValDef: ValDef) - locally(build.This(sym): Tree) - locally(build.Select(tree, sym): Select) - locally(build.Ident(sym): Ident) - locally(build.TypeTree(tpe): TypeTree) - locally(build.thisPrefix(sym): Type) - locally(build.setType(tree, tpe): Tree) - locally(build.setSymbol(tree, sym): Tree) -} \ No newline at end of file diff --git a/test/files/pos/sam_erasure_boundedwild.scala b/test/files/pos/sam_erasure_boundedwild.scala index 1ec27e0ea4e6..dc8adc787f2a 100644 --- a/test/files/pos/sam_erasure_boundedwild.scala +++ b/test/files/pos/sam_erasure_boundedwild.scala @@ -8,4 +8,4 @@ class Test { val x : Q[_] = ??? x.toArray // crashes while doing overload resolution } -} \ No newline at end of file +} diff --git a/test/files/pos/sammy_ctor_arg.scala b/test/files/pos/sammy_ctor_arg.scala index 3c556d59f006..c31d0371f748 100644 --- a/test/files/pos/sammy_ctor_arg.scala +++ b/test/files/pos/sammy_ctor_arg.scala @@ -1,4 +1,4 @@ trait Fun[A, B] { def apply(a: A): B } // can't do sam expansion until the sam body def is a static method in the sam class, and not a local method in a block' class C(f: Fun[Int, String]) -class Test extends C(s => "a") \ No newline at end of file +class Test extends C(s => "a") diff --git a/test/files/pos/sammy_infer_argtype_subtypes.scala b/test/files/pos/sammy_infer_argtype_subtypes.scala index 63966f879e58..86d8556c895e 100644 --- a/test/files/pos/sammy_infer_argtype_subtypes.scala +++ b/test/files/pos/sammy_infer_argtype_subtypes.scala @@ -3,4 +3,4 @@ trait Fun[A, B] { def apply(a: A): B } class SamInferResult { def foreach[U](f: Fun[String, U]): U = ??? def foo = foreach(println) -} \ No newline at end of file +} diff --git a/test/files/pos/sammy_java8/F.java b/test/files/pos/sammy_java8/F.java new file mode 100644 index 000000000000..5dac57a1e2ae --- /dev/null +++ b/test/files/pos/sammy_java8/F.java @@ -0,0 +1,6 @@ +public interface F { + U apply(T t); + default void yadayada() { + throw new UnsupportedOperationException("yadayada"); + } +} diff --git a/test/files/pos/sammy_java8/Test.scala b/test/files/pos/sammy_java8/Test.scala new file mode 100644 index 000000000000..61fcf4f0ce4f --- /dev/null +++ b/test/files/pos/sammy_java8/Test.scala @@ -0,0 +1,4 @@ +class T { + def app[T, U](x: T)(f: F[T, U]): U = f(x) + app(1)(x => List(x)) +} diff --git a/test/files/pos/sammy_override.scala b/test/files/pos/sammy_override.scala index a1d0651c3982..62d7b5143100 100644 --- a/test/files/pos/sammy_override.scala +++ b/test/files/pos/sammy_override.scala @@ -5,4 +5,4 @@ trait IntConsumer { object Test { def anyConsumer(x: Any): Unit = ??? val f: IntConsumer = anyConsumer -} \ No newline at end of file +} diff --git a/test/files/pos/sammy_refined.scala b/test/files/pos/sammy_refined.scala new file mode 100644 index 000000000000..22f34d5a587c --- /dev/null +++ b/test/files/pos/sammy_refined.scala @@ -0,0 +1,22 @@ +trait DepFn[-A] { + type Out + def apply(in: A): Out +} + +object DepFn { + type Aux[-A, B] = DepFn[A] { type Out = B } + type AuxF[F[_], A] = Aux[F[A], F[A]] { type B >: A } + val length: DepFn[String] { type Out = Int } = _.length + val upper: Aux[String, String] = _.toUpperCase + val reverse: AuxF[List, Int] = _.reverse +} + +class Outer { + // T here does not compile to a SAM in bytecode, + // because of the outer reference to the enclosing class. + trait T { def f(x: Int): Int } + val t1: T = x => x + val t2: T { type U = String } = x => x + val t3: T { type U } = x => x + val t4: (T { type U }) { type V } = x => x +} diff --git a/test/files/pos/sammy_single.scala b/test/files/pos/sammy_single.scala index 7a3d2729830b..c64e9c2b0812 100644 --- a/test/files/pos/sammy_single.scala +++ b/test/files/pos/sammy_single.scala @@ -6,4 +6,4 @@ object Test { trait T { def apply(x: s.type): s.type } val preservedResult: s.type = ((x => x): T)(s) -} \ No newline at end of file +} diff --git a/test/files/pos/sammy_twice.scala b/test/files/pos/sammy_twice.scala index c91f5b9fd206..088106e9c164 100644 --- a/test/files/pos/sammy_twice.scala +++ b/test/files/pos/sammy_twice.scala @@ -6,4 +6,4 @@ class C { def app[T, U](x: T)(f: F[T, U]): U = f(x) app(1)(x => List(x)) app(2)(x => List(x)) -} \ No newline at end of file +} diff --git a/test/files/pos/scoping3.scala b/test/files/pos/scoping3.scala index f6d531dbb237..55fd32d3bd89 100644 --- a/test/files/pos/scoping3.scala +++ b/test/files/pos/scoping3.scala @@ -16,6 +16,6 @@ object CI { type TreeNode <: ITreeNode with ITreeNodeExp } abstract class SimpleTreeDisplay extends TreeDisplay { self: TreeDisplayFinal => - def display() { this.getRoot().display() } + def display(): Unit = { this.getRoot().display() } } } diff --git a/test/files/pos/sd219.scala b/test/files/pos/sd219.scala index 3c3f4962f0b9..b4484626df05 100644 --- a/test/files/pos/sd219.scala +++ b/test/files/pos/sd219.scala @@ -8,4 +8,4 @@ trait CommonPrintUtils { trait CompilerProvider { val global: Global = ??? } -class AbstractPrinter extends CommonPrintUtils with CompilerProvider \ No newline at end of file +class AbstractPrinter extends CommonPrintUtils with CompilerProvider diff --git a/test/files/pos/sd467.scala b/test/files/pos/sd467.scala new file mode 100644 index 000000000000..f6193346fe5b --- /dev/null +++ b/test/files/pos/sd467.scala @@ -0,0 +1,63 @@ +import scala.annotation.tailrec + +class TestA { + @tailrec + final def loop0(i: Int): this.type = { + if(i == 0) this + else loop0(i-1) + } + + @tailrec + final def loop1(i: Int, self: this.type): Int = { + if(i == 0) 0 + else loop1(i-1, this) + } + + @tailrec + final def loop2(i: Int, self: this.type): this.type = { + if(i == 0) this + else loop2(i-1, this) + } +} + +object TestB { + object Done + + @tailrec + def loop0(i: Int): Done.type = { + if(i == 0) Done + else loop0(i-1) + } + + @tailrec + def loop1(i: Int, done: Done.type): Int = { + if(i == 0) 0 + else loop1(i-1, Done) + } + + @tailrec + def loop2(i: Int, done: Done.type): Done.type = { + if(i == 0) done + else loop2(i-1, done) + } +} + +object TestC { + @tailrec + def loop0(i: Int): 0 = { + if(i == 0) 0 + else loop0(i-1) + } + + @tailrec + def loop1(i: Int, zero: 0): Int = { + if(i == 0) 0 + else loop1(i-1, 0) + } + + @tailrec + def loop2(i: Int, zero: 0): 0 = { + if(i == 0) 0 + else loop2(i-1, 0) + } +} diff --git a/test/files/pos/setter-not-implicit.scala b/test/files/pos/setter-not-implicit.scala index c499f4a124a8..4cb6b5cd88bd 100644 --- a/test/files/pos/setter-not-implicit.scala +++ b/test/files/pos/setter-not-implicit.scala @@ -1,4 +1,5 @@ -// scalac: -feature -Xfatal-warnings + +//> using options -feature -Xfatal-warnings object O { implicit var x: Int = 0 } diff --git a/test/files/pos/sip23-aliasing.scala b/test/files/pos/sip23-aliasing.scala new file mode 100644 index 000000000000..d61052fb754d --- /dev/null +++ b/test/files/pos/sip23-aliasing.scala @@ -0,0 +1,36 @@ +object Test { + trait Foo0 { + type T0 + } + + object Foo0 { + type Aux[T] = Foo0 {type T0 = T} + implicit def apply[T](implicit v: ValueOf[T]): Aux[T] = new Foo0 { + type T0 = T + } + } + + object Bar { + type Aux[T] = Foo0 { type T0 = T } + } + + type Foo[T] = Foo0 { type T0 = T } + val Foo = Foo0 + + Foo[5] //OK + implicitly[Foo.Aux[5]] //OK! + implicitly[Foo[5]] //implicit not found error! + + + val three: 3 = 3 + type Three = three.type + Foo[Three] + implicitly[Foo.Aux[Three]] //works + implicitly[Foo[Three]] //implicit not found + + final object bar + type Bar = bar.type + Foo[Bar] + implicitly[Foo.Aux[Bar]] //works + implicitly[Foo[Bar]] //implicit not found +} diff --git a/test/files/pos/sip23-any.scala b/test/files/pos/sip23-any.scala new file mode 100644 index 000000000000..b91c8278b52a --- /dev/null +++ b/test/files/pos/sip23-any.scala @@ -0,0 +1,18 @@ +// See https://github.com/typelevel/scala/issues/139 + +import scala.collection.mutable + +object Test { + // Compiles, as expected + def pass(a: AnyRef): mutable.Buffer[_ <: a.type] = { + mutable.Buffer.empty[a.type] + } + + // Expect compile, got error + def error(a: Any): mutable.Buffer[_ <: a.type] = { + mutable.Buffer.empty[a.type] + } + + class Foo[T] + def error1(a: Any): Foo[_ <: a.type] = new Foo[a.type] +} diff --git a/test/files/pos/sip23-bounds.scala b/test/files/pos/sip23-bounds.scala new file mode 100644 index 000000000000..b44f93265dd1 --- /dev/null +++ b/test/files/pos/sip23-bounds.scala @@ -0,0 +1,7 @@ +object Test { + class Covar[-A] + + def foo[A, CC[X] <: Option[X]]: Covar[CC[_ <: A]] = ??? + + val bar: Covar[Option[Int]] = foo +} diff --git a/test/files/pos/sip23-final.scala b/test/files/pos/sip23-final.scala new file mode 100644 index 000000000000..a92fe791681c --- /dev/null +++ b/test/files/pos/sip23-final.scala @@ -0,0 +1,8 @@ +object C { + final val x = "abc" + + implicit def convert(p: x.type): Int = 123 + + x: Int +} + diff --git a/test/files/pos/sip23-folding.scala b/test/files/pos/sip23-folding.scala new file mode 100644 index 000000000000..ca6b714b1fdc --- /dev/null +++ b/test/files/pos/sip23-folding.scala @@ -0,0 +1,25 @@ +object Test { + { val _ = 0; 0 }: 0 // compiles + + { 0; 0 }: 0 // compiles + + { val 0 = 0; 0 }: 0 // compiles + + { (); 0 }: 0 // compiles + + { val _ = (); 0 }: 0 // compiles + + { val () = (); 0 }: 0 // compiles + + ({ val _ = 0; 0 } + 0): 0 // compiles + + ({ val _ = (); 0 } + 0): 0 // compiles + + ({ val () = (); 0 } + 0): 0 // does not compile + + ({ (); 0 } + 0): 0 // does not compile + + ({ 0; 0 } + 0): 0 // does not compile + + ({ val 0 = 0; 0 } + 0): 0 // does not compile +} diff --git a/test/files/pos/sip23-named-default.scala b/test/files/pos/sip23-named-default.scala new file mode 100644 index 000000000000..281f672211f8 --- /dev/null +++ b/test/files/pos/sip23-named-default.scala @@ -0,0 +1,19 @@ +object Test { + case class Tag0[T <: Singleton](a: T) + Tag0("a").copy() + + case class Tag1[T](a: T) + Tag1("a").copy() + + def foo[T <: Singleton](a: T = "a"): T = a + val v0: "a" = foo() + + def bar(a: "a" = "a"): "a" = a + val v1: "a" = bar() + + def baz[T](a: T = "a"): T = a + val v2: "a" = baz() + + def id[T](a: T): T = a + val v3: "a" = id("a") +} diff --git a/test/files/pos/sip23-narrow-no-empty-refinements.scala b/test/files/pos/sip23-narrow-no-empty-refinements.scala new file mode 100644 index 000000000000..10980bef0e33 --- /dev/null +++ b/test/files/pos/sip23-narrow-no-empty-refinements.scala @@ -0,0 +1,162 @@ +object Test { + type Id[T] = T + + def one[T <: 1](t: T): T = t + final val o = one(1) + o: 1 + + def narrow[T <: Singleton](t: T): Id[T] = t + final val fn0 = narrow(23) + fn0: 23 + + val n0 = narrow(23) + n0: 23 + + def id[T](t: T): T = t + + final val fi0 = id(23) + fi0: Int + final val fi1 = id[23](23) + fi1: 23 + final val fi2 = id(23: Id[23]) + fi2: 23 + final val fi3 = id(narrow(23)) + fi3: 23 + + val i0 = id(23) + i0: Int + val i1 = id[23](23) + i1: Int + val i2 = id(23: Id[23]) + i2: 23 + val i3 = id(narrow(23)) + i3: 23 + + def opt[T](t: T): Option[T] = Some(t) + + final val fo0 = opt(23) + fo0: Option[Int] + final val fo1 = opt[23](23) + fo1: Option[23] + final val fo2 = opt(23: Id[23]) + fo2: Option[23] + final val fo3 = opt(narrow(23)) + fo3: Option[23] + + val o0 = opt(23) + o0: Option[Int] + val o1 = opt[23](23) + o1: Option[23] + val o2 = opt(23: Id[23]) + o2: Option[23] + val o3 = opt(narrow(23)) + o3: Option[23] + + sealed trait HList + final case class ::[+H, +T <: HList](h: H, t: T) extends HList { + def ::[HH](h: HH): HH :: H :: T = Test.::(h, this) + } + sealed trait HNil extends HList { + def ::[H](h: H): H :: HNil = Test.::(h, this) + } + object HNil extends HNil + + val l0 = 23 :: "foo" :: true :: HNil + l0: Int :: String :: Boolean :: HNil + + val l1 = narrow(23) :: narrow("foo") :: narrow(true) :: HNil + l1: 23 :: "foo" :: true :: HNil + + def bar[T](t: T): t.type = t + + final val b0 = bar(23) + b0: 23 + + trait Skidoo[T] { type T <: Boolean } + object Skidoo extends Skidoo0 { + implicit def twentyThree: Skidoo[23] { type T = true } = ??? + } + trait Skidoo0 { + implicit def default[T <: Int]: Skidoo[T] { type T = false } = ??? + } + + def skidoo1(i: Int)(implicit s: Skidoo[i.type]): s.T = ??? + skidoo1(23): true + skidoo1(13): false + skidoo1(narrow(23)): true + skidoo1(narrow(13)): false + + def skidoo2[T](t: T)(implicit s: Skidoo[T]): s.T = ??? + skidoo2(23): false + skidoo2(13): false + skidoo2(narrow(23)): true + skidoo2(narrow(13)): false + + def skidoo3[T <: Singleton](t: T)(implicit s: Skidoo[T]): s.T = ??? + skidoo3(23): true + skidoo3(13): false + skidoo3(narrow(23)): true + skidoo3(narrow(13)): false + + implicit class NarrowSyntax[T <: Singleton](val t: T) extends AnyVal { + def narrow: Id[T] = t + } + + val ns0 = 23.narrow + ns0: 23 + val ns1 = 23L.narrow + ns1: 23L + val ns2 = 23.0F.narrow + ns2: 23F + val ns3 = 23.0.narrow + ns3: 23.0 + val ns4 = true.narrow + ns4: true + val ns5 = '*'.narrow + ns5: '*' + val ns6 = "foo".narrow + ns6: "foo" + + sealed trait Nat + sealed trait Succ[N <: Nat] extends Nat + sealed trait _0 extends Nat + object _0 extends _0 + type _1 = Succ[_0] + object _1 extends _1 + type _2 = Succ[_1] + object _2 extends _2 + type _3 = Succ[_2] + object _3 extends _3 + + object Nat { + implicit def zero(i: 0): _0 = _0 + implicit def one(i: 1): _1 = _1 + implicit def two(i: 2): _2 = _2 + implicit def three(i: 3): _3 = _3 + } + + trait Unroll[-N <: Nat] { + type Out <: HList + } + + object Unroll { + implicit def zero: Unroll[_0] { type Out = HNil } = ??? + implicit def succ[N <: Nat](implicit un: Unroll[N]): Unroll[Succ[N]] { type Out = Int :: un.Out } = ??? + } + + def unroll(n: Nat)(implicit u: Unroll[n.type]): u.Out = ??? + + val u0 = unroll(0) + u0: HNil + val u1 = unroll(1) + u1: Int :: HNil + val u2 = unroll(2) + u2: Int :: Int :: HNil + val u3 = unroll(3) + u3: Int :: Int :: Int :: HNil + + type SInt = Int with Singleton + def narrowAliased[A <: SInt](x: A): Id[A] = x + val na = narrowAliased(5) + na: 5 +} diff --git a/test/files/pos/sip23-narrow.scala b/test/files/pos/sip23-narrow.scala new file mode 100644 index 000000000000..a1931441dbf0 --- /dev/null +++ b/test/files/pos/sip23-narrow.scala @@ -0,0 +1,165 @@ +object Test { + def one[T <: 1](t: T): T = t + final val o = one(1) + o: 1 + + def narrow[T <: Singleton](t: T): T {} = t + final val fn0 = narrow(23) + fn0: 23 + + val n0 = narrow(23) + n0: 23 + + def id[T](t: T): T = t + + final val fi0 = id(23) + fi0: Int + final val fi1 = id[23](23) + fi1: 23 + final val fi2 = id(23: 23 {}) + fi2: 23 + final val fi3 = id(narrow(23)) + fi3: 23 + + val i0 = id(23) + i0: Int + val i1 = id[23](23) + i1: Int + val i2 = id(23: 23 {}) + i2: 23 + val i3 = id(narrow(23)) + i3: 23 + + def opt[T](t: T): Option[T] = Some(t) + + final val fo0 = opt(23) + fo0: Option[Int] + final val fo1 = opt[23](23) + fo1: Option[23] + final val fo2 = opt(23: 23 {}) + fo2: Option[23] + final val fo3 = opt(narrow(23)) + fo3: Option[23] + + val o0 = opt(23) + o0: Option[Int] + val o1 = opt[23](23) + o1: Option[23] + val o2 = opt(23: 23 {}) + o2: Option[23] + val o3 = opt(narrow(23)) + o3: Option[23] + + sealed trait HList + final case class ::[+H, +T <: HList](h: H, t: T) extends HList { + def ::[HH](h: HH): HH :: H :: T = Test.::(h, this) + } + sealed trait HNil extends HList { + def ::[H](h: H): H :: HNil = Test.::(h, this) + } + object HNil extends HNil + + val l0 = 23 :: "foo" :: true :: HNil + l0: Int :: String :: Boolean :: HNil + + val l1 = narrow(23) :: narrow("foo") :: narrow(true) :: HNil + l1: 23 :: "foo" :: true :: HNil + + def bar[T](t: T): t.type = t + + final val b0 = bar(23) + b0: 23 + + trait Skidoo[T] { type T <: Boolean } + object Skidoo extends Skidoo0 { + // Ideally we would have, + // implicit def twentyThree: Skidoo[23] { type T = true } = ??? + // however this the empty refinement returned by narrow interferes + // with the commented example below. Using Id instead of and {} + // solves this problem. + implicit def twentyThree[T <: 23]: Skidoo[T] { type T = true } = ??? + } + trait Skidoo0 { + implicit def default[T <: Int]: Skidoo[T] { type T = false } = ??? + } + + def skidoo1(i: Int)(implicit s: Skidoo[i.type]): s.T = ??? + skidoo1(23): true + skidoo1(13): false + skidoo1(narrow(23)): true // This requires the <: 23 bound + skidoo1(narrow(13)): false + + def skidoo2[T](t: T)(implicit s: Skidoo[T]): s.T = ??? + skidoo2(23): false + skidoo2(13): false + skidoo2(narrow(23)): true + skidoo2(narrow(13)): false + + def skidoo3[T <: Singleton](t: T)(implicit s: Skidoo[T]): s.T = ??? + skidoo3(23): true + skidoo3(13): false + skidoo3(narrow(23)): true + skidoo3(narrow(13)): false + + implicit class NarrowSyntax[T <: Singleton](val t: T) extends AnyVal { + def narrow: T {} = t + } + + val ns0 = 23.narrow + ns0: 23 + val ns1 = 23L.narrow + ns1: 23L + val ns2 = 23.0F.narrow + ns2: 23F + val ns3 = 23.0.narrow + ns3: 23.0 + val ns4 = true.narrow + ns4: true + val ns5 = '*'.narrow + ns5: '*' + val ns6 = "foo".narrow + ns6: "foo" + + sealed trait Nat + sealed trait Succ[N <: Nat] extends Nat + sealed trait _0 extends Nat + object _0 extends _0 + type _1 = Succ[_0] + object _1 extends _1 + type _2 = Succ[_1] + object _2 extends _2 + type _3 = Succ[_2] + object _3 extends _3 + + object Nat { + implicit def zero(i: 0): _0 = _0 + implicit def one(i: 1): _1 = _1 + implicit def two(i: 2): _2 = _2 + implicit def three(i: 3): _3 = _3 + } + + trait Unroll[-N <: Nat] { + type Out <: HList + } + + object Unroll { + implicit def zero: Unroll[_0] { type Out = HNil } = ??? + implicit def succ[N <: Nat](implicit un: Unroll[N]): Unroll[Succ[N]] { type Out = Int :: un.Out } = ??? + } + + def unroll(n: Nat)(implicit u: Unroll[n.type]): u.Out = ??? + + val u0 = unroll(0) + u0: HNil + val u1 = unroll(1) + u1: Int :: HNil + val u2 = unroll(2) + u2: Int :: Int :: HNil + val u3 = unroll(3) + u3: Int :: Int :: Int :: HNil + + type SInt = Int with Singleton + def narrowAliased[A <: SInt](x: A): A {} = x + val na = narrowAliased(5) + na: 5 +} diff --git a/test/files/pos/sip23-negative-literals.scala b/test/files/pos/sip23-negative-literals.scala new file mode 100644 index 000000000000..26ce7a783e23 --- /dev/null +++ b/test/files/pos/sip23-negative-literals.scala @@ -0,0 +1,9 @@ +object Test { + type ~~[A, B] + type nonNeg = 2 ~~ 2 + + type neg0 = -2 + type neg1 = -2 ~~ 2 + type neg2 = 2 ~~ -2 + type neg3 = -2 ~~ -2 +} diff --git a/test/files/pos/sip23-no-widen.scala b/test/files/pos/sip23-no-widen.scala new file mode 100644 index 000000000000..de7e25900980 --- /dev/null +++ b/test/files/pos/sip23-no-widen.scala @@ -0,0 +1,8 @@ +object Test { + final val a : 1 = 1 + final val fails : 2 = a + a //fail + final val works : 2 = a + 1 + + final val aok = 1 + final val also_works : 2 = aok + aok +} diff --git a/test/files/pos/sip23-numeric-lub.scala b/test/files/pos/sip23-numeric-lub.scala new file mode 100644 index 000000000000..7da2e02afa71 --- /dev/null +++ b/test/files/pos/sip23-numeric-lub.scala @@ -0,0 +1,3 @@ +class C { + def foo(x: Boolean) = if (x) 1 else 0 +} diff --git a/test/files/pos/sip23-override.scala b/test/files/pos/sip23-override.scala new file mode 100644 index 000000000000..0e1bf411cc48 --- /dev/null +++ b/test/files/pos/sip23-override.scala @@ -0,0 +1,29 @@ +trait Overridden { + val f0 = 4 + val f1: Int = 4 + val f2: 4 = 4 + + def f3 = 4 + def f4: Int = 4 + def f5: 4 = 4 +} + +class Overrider0 extends Overridden { + override val f0 = 4 + override val f1: Int = 4 + override val f2: 4 = 4 + + override def f3 = 4 + override def f4: Int = 4 + override def f5: 4 = 4 +} + +class Overrider1 extends Overridden { + override val f0 = 5 + override val f1: 5 = 5 + //override val f2: 5 = 5 + + override def f3 = 5 + override def f4: 5 = 5 + //override def f5: 5 = 5 +} diff --git a/test/files/pos/sip23-singleton-conv.scala b/test/files/pos/sip23-singleton-conv.scala new file mode 100644 index 000000000000..fabd5f4650e3 --- /dev/null +++ b/test/files/pos/sip23-singleton-conv.scala @@ -0,0 +1,22 @@ +object Test { + sealed trait Nat + sealed trait Succ[N <: Nat] extends Nat + sealed trait _0 extends Nat + object _0 extends _0 + + object Nat { + implicit def zero(i: 0): _0 = _0 + } + + trait Unroll[-N <: Nat] { + type Out + } + + object Unroll { + implicit def zero: Unroll[_0] { type Out = Int } = ??? + } + + def unroll(n: Nat)(implicit u: Unroll[n.type]): u.Out = ??? + val u0 = unroll(0) + u0: Int +} diff --git a/test/files/pos/sip23-singleton-sub.scala b/test/files/pos/sip23-singleton-sub.scala new file mode 100644 index 000000000000..16794f6f203d --- /dev/null +++ b/test/files/pos/sip23-singleton-sub.scala @@ -0,0 +1,17 @@ +object Test { + val a: Singleton = 1 + val b: Singleton = 1L + val c: Singleton = 1.0 + val d: Singleton = 1.0F + val e: Singleton = true + val f: Singleton = 'c' + val g: Singleton = "foo" + + implicitly[1 <:< Singleton] + implicitly[1L <:< Singleton] + implicitly[1.0 <:< Singleton] + implicitly[1.0F <:< Singleton] + implicitly[true <:< Singleton] + implicitly['c' <:< Singleton] + implicitly["foo" <:< Singleton] +} diff --git a/test/files/pos/sip23-singleton-view.scala b/test/files/pos/sip23-singleton-view.scala new file mode 100644 index 000000000000..735173cacb9d --- /dev/null +++ b/test/files/pos/sip23-singleton-view.scala @@ -0,0 +1,6 @@ +import language.implicitConversions + +class Test { + implicit def singletonToString(c: Singleton): String = "" + def foo(a: 1): String = a // implicit was being ruled out because Int(1).widen was not a subclass of Singletom +} diff --git a/test/files/pos/sip23-strings.scala b/test/files/pos/sip23-strings.scala new file mode 100644 index 000000000000..91b260ea6657 --- /dev/null +++ b/test/files/pos/sip23-strings.scala @@ -0,0 +1,17 @@ +object Test { + val s0: """foo""" = """foo""" + type broken = +""" +foo +bar +""" + + val s1: broken = +""" +foo +bar +""" + + type escaped = "\nBar\n" + val s2: escaped = "\nBar\n" +} diff --git a/test/files/pos/sip23-valueof-alias.scala b/test/files/pos/sip23-valueof-alias.scala new file mode 100644 index 000000000000..a699627f2180 --- /dev/null +++ b/test/files/pos/sip23-valueof-alias.scala @@ -0,0 +1,15 @@ +object Test { + valueOf[1] + + type SOne = 1 + + valueOf[SOne] + + val one : 1 = 1 + + valueOf[one.type] + + type SOne1 = one.type + + valueOf[SOne1] +} diff --git a/test/files/pos/sip23-valueof-covariance.scala b/test/files/pos/sip23-valueof-covariance.scala new file mode 100644 index 000000000000..58fc48a4115f --- /dev/null +++ b/test/files/pos/sip23-valueof-covariance.scala @@ -0,0 +1,7 @@ +object Test { + trait Foo[+A] + implicit def foo[A <: Singleton](implicit v: ValueOf[A]): Foo[A] = new Foo[A] { } + val s = "" + implicitly[ValueOf[s.type]] // works + implicitly[Foo[s.type]] // doesn't work +} diff --git a/test/files/pos/sip23-valueof-this.scala b/test/files/pos/sip23-valueof-this.scala new file mode 100644 index 000000000000..48266d9f75f3 --- /dev/null +++ b/test/files/pos/sip23-valueof-this.scala @@ -0,0 +1,13 @@ +object Test1 { + valueOf[this.type] +} + +class Test2 { + valueOf[this.type] +} + +class Test3 { self => + valueOf[self.type] +} + +class A { outerSelf => class B { valueOf[outerSelf.type] } } diff --git a/test/files/pos/sip23-widen.scala b/test/files/pos/sip23-widen.scala new file mode 100644 index 000000000000..a77d8bd8376a --- /dev/null +++ b/test/files/pos/sip23-widen.scala @@ -0,0 +1,82 @@ +object Test { + val f0 = 4 + f0: Int + //f0: 4 + + final val f1: 4 = 4 + f1: Int + f1: 4 + + final val f1b = 4 + f1b: Int + f1b: 4 + + val f2 = () => 4 + f2: (() => Int) + //f2: (() => 4) + + final val f3 = () => 4 + f3: (() => Int) + //f3: (() => 4) + + val f4: () => 4 = () => 4 + + def foo[T](f: () => T)(t: T): T = t + + val f5 = foo(() => 4)(4) + f5: Int + //f5: 4 + + val f6 = foo(() => 4)(5) + f6: Int + //f6: 4 + + def bar[T <: Singleton](f: () => T)(t: T): T = t + + final val f7 = bar(() => 4)(4) + f7: Int + f7: 4 + + // found 5, required 4 + //val f8 = bar(() => 4)(5) + + val f9 = () => (4, () => 5) + f9: (() => (Int, () => Int)) + //f9: (() => (4, () => 5)) + + val f10: () => (4, () => 5) = () => (4, () => 5) + + var f11 = 4 + f11: Int + //f11: 4 + f11 = 5 + + final var f12 = 4 + f12: Int + //f12: 4 + f12 = 5 + + final var f13: 4 = 4 + f13: Int + f13: 4 + //f13 = 5 + + final val one = 1 + final val compiles: 2 = one + 1 + + final val literalOne: 1 = 1 + final val alsoCompiles: 2 = literalOne + 1 + + final val recFive : 5 = recFive + 0 + + val annot0 = 1: @unchecked + annot0: Int + annot0: Int @unchecked + //annot0: 1 @unchecked + + final val annot1 = 1: @unchecked + annot1: Int + annot1: Int @unchecked + annot1: 1 + annot1: 1 @unchecked +} diff --git a/test/files/pos/skunky-expansion.scala b/test/files/pos/skunky-expansion.scala new file mode 100644 index 000000000000..15f84bb9234b --- /dev/null +++ b/test/files/pos/skunky-expansion.scala @@ -0,0 +1,31 @@ +//> using options -Werror -Wnonunit-statement +// +import scala.reflect.macros._ +import scala.reflect.api.TypeCreator + +abstract trait Encoder[A] extends scala.AnyRef; +object StringContextOps extends scala.AnyRef { + class StringOpsMacros(c: scala.reflect.macros.whitebox.Context) extends scala.AnyRef { + def sql_impl(argSeq: StringOpsMacros.this.c.universe.Tree*): AnyRef = { + val EncoderType: StringOpsMacros.this.c.universe.Type = StringOpsMacros.this.c.universe.typeOf[Encoder[_]](({ + val $u: StringOpsMacros.this.c.universe.type = StringOpsMacros.this.c.universe; + val $m: $u.Mirror = StringOpsMacros.this.c.universe.rootMirror; + $u.TypeTag.apply[Encoder[_]]($m, { + final class $typecreator1 extends TypeCreator { + def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = { + val $u: U = $m$untyped.universe; + val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror]; + val symdef$EncoderType1: $u.Symbol = $u.internal.reificationSupport.newNestedSymbol($u.internal.reificationSupport.selectTerm($u.internal.reificationSupport.selectType($m.staticModule("StringContextOps").asModule.moduleClass, "StringOpsMacros"), "sql_impl"), $u.TermName.apply("EncoderType"), $u.NoPosition, $u.internal.reificationSupport.FlagsRepr.apply(549755813888L), false); + val symdef$_$11: $u.Symbol = $u.internal.reificationSupport.newNestedSymbol(symdef$EncoderType1, $u.TypeName.apply("_$1"), $u.NoPosition, $u.internal.reificationSupport.FlagsRepr.apply(34359738384L), false); + $u.internal.reificationSupport.setInfo[$u.Symbol](symdef$EncoderType1, $u.NoType); + $u.internal.reificationSupport.setInfo[$u.Symbol](symdef$_$11, $u.internal.reificationSupport.TypeBounds($m.staticClass("scala.Nothing").asType.toTypeConstructor, $m.staticClass("scala.Any").asType.toTypeConstructor)); + $u.internal.reificationSupport.ExistentialType(scala.collection.immutable.List.apply[$u.Symbol](symdef$_$11), $u.internal.reificationSupport.TypeRef($u.internal.reificationSupport.thisPrefix($m.EmptyPackageClass), $m.staticClass("Encoder"), scala.collection.immutable.List.apply[$u.Type]($u.internal.reificationSupport.TypeRef($u.NoPrefix, symdef$_$11, scala.collection.immutable.Nil)))) + } + }; + new $typecreator1() + }) + }: StringOpsMacros.this.c.universe.TypeTag[Encoder[_]])); + argSeq.head + } + } +} diff --git a/test/files/pos/skunky.scala b/test/files/pos/skunky.scala new file mode 100644 index 000000000000..98a8604b2f43 --- /dev/null +++ b/test/files/pos/skunky.scala @@ -0,0 +1,15 @@ +//> using options -Werror -Wnonunit-statement + +import scala.reflect.macros._ + +trait Encoder[A] + +object StringContextOps { + class StringOpsMacros(val c: whitebox.Context) { + import c.universe._ + def sql_impl(argSeq: Tree*): Tree = { + val EncoderType = typeOf[Encoder[_]] + argSeq.head + } + } +} diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala index b23abf48e8cf..0357e070132c 100644 --- a/test/files/pos/spec-annotations.scala +++ b/test/files/pos/spec-annotations.scala @@ -1,7 +1,6 @@ class ann(i: Int) extends scala.annotation.Annotation // annotations on abstract types -abstract class C1[@annotation.elidable(0) +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala index 883bc2fe62fc..10daaba18045 100644 --- a/test/files/pos/spec-arrays.scala +++ b/test/files/pos/spec-arrays.scala @@ -1,6 +1,6 @@ abstract class AbsArray[T] { def apply(idx: Int): T - def update(idx: Int, elem: T) + def update(idx: Int, elem: T): Unit def length: Int def applyByte(idx: Int): Byte = apply(idx).asInstanceOf[Byte] def updateByte(idx: Int, elem: Byte) = update(idx, elem.asInstanceOf[T]) @@ -44,8 +44,8 @@ class SpecArray[@specialized T](arr: Array[T]) extends AbsArray[T] { abstract class Test { def sum(): Int - def modify(i: Int) - def run() { + def modify(i: Int): Unit + def run(): Unit = { var s = 0 for (i <- 1 to 1000000) { s += sum() diff --git a/test/files/pos/spec-asseenfrom.scala b/test/files/pos/spec-asseenfrom.scala index ede579170956..5c412dd96924 100644 --- a/test/files/pos/spec-asseenfrom.scala +++ b/test/files/pos/spec-asseenfrom.scala @@ -14,7 +14,7 @@ class Automaton[@specialized(Double) W,State] { def finalStateWeights() = { val it = allStates.iterator; while(it.hasNext) { - finalWeight(it.next); + finalWeight(it.next()); } } */ diff --git a/test/files/pos/spec-constr-new.scala b/test/files/pos/spec-constr-new.scala index 7beff91d8d72..c6acc862a0ef 100644 --- a/test/files/pos/spec-constr-new.scala +++ b/test/files/pos/spec-constr-new.scala @@ -6,4 +6,4 @@ class SparseArray2[@specialized(Int) T:ClassTag](val maxSize: Int, initialLength // comment out to compile correctly data.length + 3; -} \ No newline at end of file +} diff --git a/test/files/pos/spec-constr-old.scala b/test/files/pos/spec-constr-old.scala index e908b65a415f..c78d4d80ba4e 100644 --- a/test/files/pos/spec-constr-old.scala +++ b/test/files/pos/spec-constr-old.scala @@ -1,3 +1,5 @@ +import scala.reflect.ClassManifest + class SparseArray2[@specialized(Int) T:ClassManifest](val maxSize: Int, initialLength:Int = 3) { private var data = new Array[T](initialLength); private var index = new Array[Int](initialLength); diff --git a/test/files/pos/spec-doubledef-new.scala b/test/files/pos/spec-doubledef-new.scala index 589ceb33b286..de438d6e9408 100644 --- a/test/files/pos/spec-doubledef-new.scala +++ b/test/files/pos/spec-doubledef-new.scala @@ -27,4 +27,4 @@ abstract class B[T, @specialized(scala.Int) U : TypeTag, @specialized(scala.Int) (u, v2) } } -} \ No newline at end of file +} diff --git a/test/files/pos/spec-fields-new.scala b/test/files/pos/spec-fields-new.scala index de75b4b748d0..2163e654ed0e 100644 --- a/test/files/pos/spec-fields-new.scala +++ b/test/files/pos/spec-fields-new.scala @@ -9,4 +9,4 @@ abstract class Foo[@specialized T: ClassTag, U <: Ordered[U]](x: T, size: Int) { def getZ = z def setZ(zz: T) = z = zz -} \ No newline at end of file +} diff --git a/test/files/pos/spec-fields-old.scala b/test/files/pos/spec-fields-old.scala index 26a8c4ffbd12..3b225fc5aeae 100644 --- a/test/files/pos/spec-fields-old.scala +++ b/test/files/pos/spec-fields-old.scala @@ -1,3 +1,5 @@ +import scala.reflect.ClassManifest + abstract class Foo[@specialized T: ClassManifest, U <: Ordered[U]](x: T, size: Int) { var y: T var z: T = x diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala index f92ae9899d4e..a742d81aaf16 100644 --- a/test/files/pos/spec-funs.scala +++ b/test/files/pos/spec-funs.scala @@ -15,7 +15,7 @@ final class IntTest { } } - def run() { + def run(): Unit = { val xs = new Array[Int](10000) val f = new AbsFunction1[Int, Int] { def apply(x: Int): Int = x * x @@ -42,7 +42,7 @@ final class ClosureTest { } } - def run() { + def run(): Unit = { val xs = new Array[Int](10000) // val f = (x: Int) => x * x for (j <- 0 until niters) { diff --git a/test/files/pos/spec-maps.scala b/test/files/pos/spec-maps.scala index fe214c9580f8..d961110cda30 100644 --- a/test/files/pos/spec-maps.scala +++ b/test/files/pos/spec-maps.scala @@ -3,7 +3,7 @@ trait Fun1[@specialized +R, @specialized -T] { } object Main { - def mapA[@specialized B](xs: Array[B], f: Fun1[B, B]) { + def mapA[@specialized B](xs: Array[B], f: Fun1[B, B]): Unit = { for (i <- 0 until xs.length) xs(i) = f(xs(i)) } diff --git a/test/files/pos/spec-params-old.scala b/test/files/pos/spec-params-old.scala index 33a252120cc1..3bee436e2889 100644 --- a/test/files/pos/spec-params-old.scala +++ b/test/files/pos/spec-params-old.scala @@ -1,3 +1,5 @@ +import scala.reflect.ClassManifest + class Foo[@specialized A: ClassManifest] { // conflicting in bounds, expect a normalized member calling m diff --git a/test/files/pos/spec-partialmap.scala b/test/files/pos/spec-partialmap.scala index 09684e024208..1e944c777043 100644 --- a/test/files/pos/spec-partialmap.scala +++ b/test/files/pos/spec-partialmap.scala @@ -1,17 +1,12 @@ - // ticket #3378, overloaded specialized variants -import scala.collection.{Traversable,TraversableLike}; -import scala.collection.generic.CanBuildFrom; - -trait PartialMap[@specialized A,@specialized B] -extends PartialFunction[A,B] with Iterable[(A,B)] { +import scala.collection.{Iterable,IterableOps}; +trait PartialMap[@specialized A,@specialized B] extends PartialFunction[A,B] with Iterable[(A,B)] { // commenting out this declaration gives a different exception. /** Getter for all values for which the given key function returns true. */ def apply(f : (A => Boolean)) : Iterator[B] = - for ((k,v) <- iterator; if f(k)) yield v; + for ((k,v) <- iterator; if f(k)) yield v // if this is commented, it compiles fine: - def apply[This <: Traversable[A], That](keys : TraversableLike[A,This]) - (implicit bf: CanBuildFrom[This, B, That]) : That = keys.map(apply); + def apply[This <: Iterable[A]](keys : IterableOps[A, Iterable, This]): Iterable[B] = keys.map(apply) } diff --git a/test/files/pos/spec-private.scala b/test/files/pos/spec-private.scala index 7d53bad95545..cd79170ebfb2 100644 --- a/test/files/pos/spec-private.scala +++ b/test/files/pos/spec-private.scala @@ -3,7 +3,7 @@ class Foo { def foo[@specialized(Int) T](x: T) = new Object { private final val myEdges = List(1, 2 , 3) - def boo { + def boo: Unit = { myEdges } } diff --git a/test/files/pos/spec-simple.scala b/test/files/pos/spec-simple.scala index 66a48d155ef2..19b660688f34 100644 --- a/test/files/pos/spec-simple.scala +++ b/test/files/pos/spec-simple.scala @@ -12,7 +12,7 @@ class Foo[@specialized T] { } class Test { - def test { + def test: Unit = { val a = new Foo[Int] val b = new a.Bar[Int] a.foo(10) diff --git a/test/files/pos/spec-sparsearray-new.scala b/test/files/pos/spec-sparsearray-new.scala index df31089fe2ee..9e9bde1b0efc 100644 --- a/test/files/pos/spec-sparsearray-new.scala +++ b/test/files/pos/spec-sparsearray-new.scala @@ -1,7 +1,7 @@ +import scala.collection.{MapFactory, mutable} import scala.reflect.{ClassTag, classTag} -import scala.collection.mutable.MapLike -class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] { +class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapOps[Int, T, collection.mutable.Map, SparseArray[T]] { override def get(x: Int) = { val ind = findOffset(x) if(ind < 0) None else Some(sys.error("ignore")) @@ -16,10 +16,11 @@ class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[I sys.error("impl doesn't matter") } - override def apply(i : Int) : T = { sys.error("ignore") } - override def update(i : Int, value : T) = sys.error("ignore") - override def empty = new SparseArray[T] - def -=(ind: Int) = sys.error("ignore") - def +=(kv: (Int,T)) = sys.error("ignore") - override final def iterator = sys.error("ignore") + def addOne(elem: (Int, T)): SparseArray.this.type = ??? + def iterator: Iterator[(Int, T)] = ??? + def subtractOne(elem: Int): SparseArray.this.type = ??? + + override protected[this] def fromSpecific(coll: IterableOnce[(Int, T)]): SparseArray[T] = ??? + override protected[this] def newSpecificBuilder: mutable.Builder[(Int, T), SparseArray[T]] = ??? + override def empty: SparseArray[T] = ??? } diff --git a/test/files/pos/spec-sparsearray-old.scala b/test/files/pos/spec-sparsearray-old.scala index e10dabd542ad..2f4c833eed9b 100644 --- a/test/files/pos/spec-sparsearray-old.scala +++ b/test/files/pos/spec-sparsearray-old.scala @@ -1,6 +1,6 @@ -import scala.collection.mutable.MapLike - -class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] { +import scala.reflect.ClassManifest +import scala.collection.{MapFactory, mutable} +class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapOps[Int,T,collection.mutable.Map,SparseArray[T]] { override def get(x: Int) = { val ind = findOffset(x) if(ind < 0) None else Some(sys.error("ignore")) @@ -15,10 +15,11 @@ class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable. sys.error("impl doesn't matter") } - override def apply(i : Int) : T = { sys.error("ignore") } - override def update(i : Int, value : T) = sys.error("ignore") - override def empty = new SparseArray[T] - def -=(ind: Int) = sys.error("ignore") - def +=(kv: (Int,T)) = sys.error("ignore") - override final def iterator = sys.error("ignore") + def addOne(elem: (Int, T)): SparseArray.this.type = ??? + def iterator: Iterator[(Int, T)] = ??? + def subtractOne(elem: Int): SparseArray.this.type = ??? + + override protected[this] def fromSpecific(coll: IterableOnce[(Int, T)]): SparseArray[T] = ??? + override protected[this] def newSpecificBuilder: mutable.Builder[(Int, T), SparseArray[T]] = ??? + override def empty: SparseArray[T] = ??? } diff --git a/test/files/pos/spec-super.scala b/test/files/pos/spec-super.scala index 67179e023021..3bb72314ef12 100644 --- a/test/files/pos/spec-super.scala +++ b/test/files/pos/spec-super.scala @@ -1,9 +1,7 @@ -import scala.collection.immutable._ -import scala.collection.mutable.ListBuffer -import scala.collection.generic._ +import scala.collection.BuildFrom -trait Base[+A] extends Traversable[A] { - def add[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Base[A], B, That]): That = { +trait Base[+A] extends Iterable[A] { + def add[B >: A, That](that: Iterable[B])(implicit bf: BuildFrom[Base[A], B, That]): That = { val b = bf(this) b ++= this b ++= that @@ -13,7 +11,7 @@ trait Base[+A] extends Traversable[A] { } abstract class Derived[@specialized +A] extends Base[A] { - override def add[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Base[A], B, That]): That = { + override def add[B >: A, That](that: Iterable[B])(implicit bf: BuildFrom[Base[A], B, That]): That = { val b = bf(this) super.add[B, That](that) } diff --git a/test/files/pos/spec-traits.scala b/test/files/pos/spec-traits.scala index 074f6c3d3c20..c8c8000d8dcd 100644 --- a/test/files/pos/spec-traits.scala +++ b/test/files/pos/spec-traits.scala @@ -11,7 +11,7 @@ class Lazy { // issue 3307 class Bug3307 { - def f[Z](block: String => Z) { + def f[Z](block: String => Z): Unit = { block("abc") } @@ -34,7 +34,7 @@ class Bug3301 { } // issue 3299 object Failure { - def thunk() { + def thunk(): Unit = { for (i <- 1 to 2) { val Array(a, b) = Array(1,2) () @@ -46,7 +46,7 @@ object Failure { object AA { - def f(block: => Unit) {} + def f(block: => Unit): Unit = {} object BB { @@ -61,4 +61,4 @@ object AA } // issue 3325 -object O { def f[@specialized T] { for(k <- Nil: List[T]) { } } } +object O { def f[@specialized T]: Unit = { for(k <- Nil: List[T]) { } } } diff --git a/test/files/pos/spec-vector.scala b/test/files/pos/spec-vector.scala index 392949c6699d..a38d973b25cd 100644 --- a/test/files/pos/spec-vector.scala +++ b/test/files/pos/spec-vector.scala @@ -1,4 +1,4 @@ // ticket #3379, abstract overrides -trait Vector extends (Int=>Double) { +trait Vector extends (Int => Double) { override def apply(i: Int): Double } diff --git a/test/files/pos/spec.scala b/test/files/pos/spec.scala index cc060ffe842f..f548e156fe98 100644 --- a/test/files/pos/spec.scala +++ b/test/files/pos/spec.scala @@ -49,7 +49,7 @@ class Foo[@specialized(Int, AnyRef) A](val a: Array[A]) { // instantiation and selection object Test { - def main(arg: Array[String]) { + def main(arg: Array[String]): Unit = { val f = new Foo(new Array[String](5)) f.id("") diff --git a/test/files/pos/specializes-sym-crash.scala b/test/files/pos/specializes-sym-crash.scala index 7778ba277b0f..b3ca70679668 100644 --- a/test/files/pos/specializes-sym-crash.scala +++ b/test/files/pos/specializes-sym-crash.scala @@ -1,9 +1,24 @@ import scala.collection._ +trait IterableViewLike[+A, ++Coll, ++This <: IterableView[A, Coll] with IterableViewLike[A, Coll, This]] { + def viewToString: String = "" + protected[this] def viewIdentifier: String = "" + trait Transformed[+B] +} +trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] +trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] +trait SeqViewLike[+A, ++Coll, ++This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]] + extends Seq[A] with SeqOps[A, Seq, Seq[A]] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] + + trait Foo[+A, - +Coll, - +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]] -extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] { ++Coll, ++This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]] +extends Seq[A] with SeqOps[A, Seq, Seq[A]] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] { self => trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] { diff --git a/test/files/pos/spurious-overload.scala b/test/files/pos/spurious-overload.scala index 9767a44eee65..aae4c96329a2 100644 --- a/test/files/pos/spurious-overload.scala +++ b/test/files/pos/spurious-overload.scala @@ -29,4 +29,4 @@ object Test extends App { val s: Susp[Int] = delay { println("evaluating..."); 3 } println("2 + s = " + (2 + s)) // implicit call to force() } -} \ No newline at end of file +} diff --git a/test/files/pos/sudoku.scala b/test/files/pos/sudoku.scala index 9435f504d6af..99aff7166a0c 100644 --- a/test/files/pos/sudoku.scala +++ b/test/files/pos/sudoku.scala @@ -1,3 +1,5 @@ +import scala.io.StdIn.readLine + object SudokuSolver extends App { // The board is represented by an array of strings (arrays of chars), // held in a global variable m. The program begins by reading 9 lines diff --git a/test/files/pos/super/Super_2.scala b/test/files/pos/super/Super_2.scala index 862e55a1a50a..486d6b700c17 100644 --- a/test/files/pos/super/Super_2.scala +++ b/test/files/pos/super/Super_2.scala @@ -1,6 +1,6 @@ object Test { val x: Super = null - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { } } diff --git a/test/files/pos/surrogates.scala b/test/files/pos/surrogates.scala new file mode 100644 index 000000000000..1b710ad901ae --- /dev/null +++ b/test/files/pos/surrogates.scala @@ -0,0 +1,28 @@ + +// allow supplementary chars in identifiers + +class 𐐀 { + def 𐐀 = 42 + + // regression check: anything goes in strings + def x = "𐐀" + def y = s"$𐐀" + def w = s" 𐐀" +} + +case class 𐐀𐐀(n: Int) { + def 𐐀𐐀 = n + def `𐐀𐐀1` = n + n +} + +// uncontroversially, orphan surrogates may be introduced +// via unicode escape. +class Construction { + def hi = '\ud801' + def lo = '\udc00' + def endhi = "abc\ud801" + def startlo = "\udc00xyz" + def reversed = "xyz\udc00\ud801abc" +} + +// was: error: illegal character '\ud801', '\udc00' diff --git a/test/files/pos/t0165.scala b/test/files/pos/t0165.scala index 76aef8524017..ca47eb655102 100644 --- a/test/files/pos/t0165.scala +++ b/test/files/pos/t0165.scala @@ -4,7 +4,7 @@ import scala.collection.mutable.LinkedHashMap trait Main { def asMany : ArrayResult = { object result extends LinkedHashMap[String,String] with ArrayResult { - def current = result + def current = this.result } result } diff --git a/test/files/pos/t0231.scala b/test/files/pos/t0231.scala index 17a2eb1c548a..0850d7a57cf4 100644 --- a/test/files/pos/t0231.scala +++ b/test/files/pos/t0231.scala @@ -1,5 +1,5 @@ class Foo { - def aaa { + def aaa: Unit = { println("a") } } diff --git a/test/files/pos/t0612/Ob.scala b/test/files/pos/t0612/Ob.scala index c7cbcfe42d50..d12e64963de8 100644 --- a/test/files/pos/t0612/Ob.scala +++ b/test/files/pos/t0612/Ob.scala @@ -1,5 +1,5 @@ package test object Ob { - protected[test] def f {} + protected[test] def f: Unit = {} } diff --git a/test/files/pos/t0625.scala b/test/files/pos/t0625.scala index 56145425998f..eea5afecc6e5 100644 --- a/test/files/pos/t0625.scala +++ b/test/files/pos/t0625.scala @@ -3,6 +3,5 @@ object Test { def main(args: Array[String]): Unit = { idMap(Some(5)) - idMap(Responder.constant(5)) } } diff --git a/test/files/pos/t0644.scala b/test/files/pos/t0644.scala index a92e2abb5356..e51ec7df5f1e 100644 --- a/test/files/pos/t0644.scala +++ b/test/files/pos/t0644.scala @@ -1,6 +1,6 @@ class A { def apply(): Int = 0 - def update(n: Int) {} + def update(n: Int): Unit = {} } class B extends A { diff --git a/test/files/pos/t0710.scala b/test/files/pos/t0710.scala index fb440bc79640..d550d63f9807 100644 --- a/test/files/pos/t0710.scala +++ b/test/files/pos/t0710.scala @@ -1,5 +1,5 @@ object t0710 { - def method { + def method: Unit = { sealed class Parent case object Child extends Parent val x: Parent = Child diff --git a/test/files/pos/t0786.scala b/test/files/pos/t0786.scala index f40cf7d2e1a7..4cd09f4a16a6 100644 --- a/test/files/pos/t0786.scala +++ b/test/files/pos/t0786.scala @@ -17,7 +17,7 @@ object ImplicitProblem { def depth[T <% Rep[T]](n: T) = n.eval - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(depth(nullval[M[Int]])) // (1) this works println(nullval[M[Int]].eval) // (2) this works diff --git a/test/files/pos/t0872.scala b/test/files/pos/t0872.scala index 8f4c1c4436c1..79df8e804229 100644 --- a/test/files/pos/t0872.scala +++ b/test/files/pos/t0872.scala @@ -1,5 +1,5 @@ object Main { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { val fn = (a : Int, str : String) => "a: " + a + ", str: " + str implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null) println(fn(1)) diff --git a/test/files/pos/t10035.scala b/test/files/pos/t10035.scala index 00ad9797e6f2..274481faa028 100644 --- a/test/files/pos/t10035.scala +++ b/test/files/pos/t10035.scala @@ -4,7 +4,7 @@ trait Inner { class Outer(o: Set[Inner]) { def this() = this(Set(1).map{ - case k => new Inner{ + case k => new Inner { def f(): Outer = Outer.this } }) diff --git a/test/files/pos/t10052.scala b/test/files/pos/t10052.scala new file mode 100644 index 000000000000..ed21f91215ce --- /dev/null +++ b/test/files/pos/t10052.scala @@ -0,0 +1,36 @@ + +package foo { trait identifier } +package foo { package identifier.foo { } } +object Test13 { def identifier = 2 } + +package bar { trait identifier } +package bar { package identifier.bar { class X } } +object Test14 { def identifier = 2 } + +/* Was: +error: Error while emitting t10012.scala +assertion failed: + Java member module without member class: package foo - List(package foo) + while compiling: t10012.scala + during phase: jvm + library version: version 2.13.1 + compiler version: version 2.13.1 + reconstructed args: -d /tmp + + last tree to typer: TypeTree(class Int) + tree position: line 12 of t10012.scala + tree tpe: Int + symbol: (final abstract) class Int in package scala + symbol definition: final abstract class Int extends (a ClassSymbol) + symbol package: scala + symbol owners: class Int + call site: constructor Test13 in object Test13 in package + +== Source file context for tree position == + + 9 //object Test9 { trait identifier } + 10 //package identifier { package identifier.bar {} } + 11 package foo { package identifier.foo {} } + 12 object Test13 { def identifier = 2 } + 13 + */ diff --git a/test/files/pos/t1006.scala b/test/files/pos/t1006.scala index 5ebf799d6368..2163b2b74183 100644 --- a/test/files/pos/t1006.scala +++ b/test/files/pos/t1006.scala @@ -1,6 +1,6 @@ object Test extends App { -def test() { +def test(): Unit = { abstract class A[T] { def myVal: T diff --git a/test/files/pos/t10080.scala b/test/files/pos/t10080.scala new file mode 100644 index 000000000000..35fc62744ed9 --- /dev/null +++ b/test/files/pos/t10080.scala @@ -0,0 +1,3 @@ +class OC[T1: Ordering, T2: Ordering]() { + val ordering = implicitly[Ordering[((Int, T1), T2)]] +} diff --git a/test/files/pos/t10093.scala b/test/files/pos/t10093.scala index 30c3e60a086d..1f5da004680b 100644 --- a/test/files/pos/t10093.scala +++ b/test/files/pos/t10093.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings + +//> using options -Xfatal-warnings class A[@specialized(Int) T](val value: T) { trait B def useValue(x:T): Unit = () diff --git a/test/files/pos/t10117.scala b/test/files/pos/t10117.scala new file mode 100644 index 000000000000..46edda04d90f --- /dev/null +++ b/test/files/pos/t10117.scala @@ -0,0 +1,11 @@ +import scala.language.higherKinds + +case class Const[A, B](value: A) +sealed trait Foo[F[_], A] +final case class Bar[F[_]]() extends Foo[F, Unit] + +object Test { + def f[F[_], A](foo: Foo[F, A]): Unit = foo match { + case Bar() => Const[Unit, F[Unit]](()).value + } +} diff --git a/test/files/pos/t10154.scala b/test/files/pos/t10154.scala index 51616b71d6d6..40b9d27ed3e2 100644 --- a/test/files/pos/t10154.scala +++ b/test/files/pos/t10154.scala @@ -1,7 +1,7 @@ trait Bar2[T] object Test2 { - def wrap { + def wrap: Unit = { object Foo { implicit def fooBar: Bar2[Foo.type] = ??? } diff --git a/test/files/pos/t10159/record_0.scala b/test/files/pos/t10159/record_0.scala index ce992357ea24..38ef92bb9bf1 100644 --- a/test/files/pos/t10159/record_0.scala +++ b/test/files/pos/t10159/record_0.scala @@ -7,4 +7,4 @@ object Record extends Dynamic { import c.universe._ internal.setType(q"()", c.typecheck(tq"{type T = Int}", mode = c.TYPEmode).tpe) } -} \ No newline at end of file +} diff --git a/test/files/pos/t10159/test_1.scala b/test/files/pos/t10159/test_1.scala index ac9987ad95ed..c0d9ca889f14 100644 --- a/test/files/pos/t10159/test_1.scala +++ b/test/files/pos/t10159/test_1.scala @@ -2,4 +2,4 @@ object Test { type K = Record.bip.T implicit val lk: List[K] = 1 :: Nil val r = implicitly[List[K]] -} \ No newline at end of file +} diff --git a/test/files/pos/t10185.scala b/test/files/pos/t10185.scala index 673b1c1491d4..5334a1c0177e 100644 --- a/test/files/pos/t10185.scala +++ b/test/files/pos/t10185.scala @@ -1,4 +1,6 @@ -// scalac: -Xsource:2.13 + +//> using options -Xsource:2.13 +// sealed trait Foo[A, F[_ <: A]] case class Bar[A, F[_ <: A]]() extends Foo[A, F] diff --git a/test/files/pos/t10195.scala b/test/files/pos/t10195.scala index 07f92078e2e3..58e23de346c0 100644 --- a/test/files/pos/t10195.scala +++ b/test/files/pos/t10195.scala @@ -1,4 +1,6 @@ -// scalac: -Xsource:2.13 + +//> using options -Xsource:2.13 +// sealed trait Foo[F[_]] case class Bar[F[_]]() extends Foo[F] diff --git a/test/files/pos/t10195b.scala b/test/files/pos/t10195b.scala index 8e6122560447..0bf710f97e49 100644 --- a/test/files/pos/t10195b.scala +++ b/test/files/pos/t10195b.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// sealed trait Foo[F[_]] case class Bar[F[_]]() extends Foo[F] diff --git a/test/files/pos/t10197.scala b/test/files/pos/t10197.scala index de3c3ae9929b..6c69aa0d2405 100644 --- a/test/files/pos/t10197.scala +++ b/test/files/pos/t10197.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 + +//> using options -Xsource:2.13 import scala.language.higherKinds final case class Getter[S, A](get: S => A) diff --git a/test/files/pos/t10208.scala b/test/files/pos/t10208.scala new file mode 100644 index 000000000000..4cd42ebee26d --- /dev/null +++ b/test/files/pos/t10208.scala @@ -0,0 +1,17 @@ +import scala.language.higherKinds + +trait ~>[A[_], B[_]] { + def apply[I](fa: A[I]): B[I] +} + +sealed trait GADTK[A[_], I] +final case class MemberK[A[_]](i: Int) extends GADTK[A, Int] + +object Test { + def doesNotCompile[A[_]]: (({ type λ[α] = GADTK[A, α] })#λ ~> ({ type λ[α] = GADTK[A, α] })#λ) = + new (({ type λ[α] = GADTK[A, α] })#λ ~> ({ type λ[α] = GADTK[A, α] })#λ) { + def apply[I](v: GADTK[A, I]): GADTK[A, I] = v match { + case MemberK(i) => MemberK(i) + } + } +} diff --git a/test/files/pos/t10208b.scala b/test/files/pos/t10208b.scala new file mode 100644 index 000000000000..f9242a71e8d2 --- /dev/null +++ b/test/files/pos/t10208b.scala @@ -0,0 +1,22 @@ +import scala.language.higherKinds + +class Base[M[_]] { def mint: M[Int] = ??? } +final case class Sub() extends Base[Option] + +object Test { + def test[M[_]](b: Base[M]): Option[Int] = { + bar: M[Any] + b match { + case Sub() => + bar: M[Any] // incorrect error: M does not take type parameters + b.mint // GADT refinement: M = Option + + // Fix: change GADT refinement for HK type params to assign M the type: + // [A]( >: Option[A] <: Option[A]), + // rather than: + // >: Option <: Option + } + } + + def bar[N[_]]: N[Any] = ??? +} diff --git a/test/files/pos/t10211.scala b/test/files/pos/t10211.scala new file mode 100644 index 000000000000..709e7cf41c42 --- /dev/null +++ b/test/files/pos/t10211.scala @@ -0,0 +1,12 @@ +trait QuitePrivate[@specialized(Int) K] { + protected[this] def hasK(k :K) :Boolean +} + +trait MoreOpen extends QuitePrivate[Int] { + override def hasK(k :Int) :Boolean +} + + +object Playground extends App { + def check(guy :MoreOpen) = guy.hasK(42) +} \ No newline at end of file diff --git a/test/files/pos/t10213.scala b/test/files/pos/t10213.scala index 2fcd9fb22847..44b1cf521049 100644 --- a/test/files/pos/t10213.scala +++ b/test/files/pos/t10213.scala @@ -1,4 +1,6 @@ -// scalac: -Xsource:2.13 + + +//> using options -Xsource:2.13 import scala.language.higherKinds final case class Coproduct[F[_], G[_], A](run: Either[F[A], G[A]]) diff --git a/test/files/pos/t10215.scala b/test/files/pos/t10215.scala new file mode 100644 index 000000000000..8c9895292fd0 --- /dev/null +++ b/test/files/pos/t10215.scala @@ -0,0 +1,7 @@ +import language.higherKinds + +class One[F[_]](val f: F[One[F]]) extends AnyVal +class Two[A]() +object One { + val _ = new One[Two](new Two) +} diff --git a/test/files/pos/t10238.scala b/test/files/pos/t10238.scala index 02ab8cd69744..bfb6f23044ad 100644 --- a/test/files/pos/t10238.scala +++ b/test/files/pos/t10238.scala @@ -1,4 +1,6 @@ -// scalac: -Xsource:2.13 + +//> using options -Xsource:2.13 + object Test { // Data types @@ -12,7 +14,7 @@ object Test { type MaybeMaybe[A] = MaybeT[Maybe, A] - // Typeclass + // Type class trait Monad[F[_]] diff --git a/test/files/pos/t10270/Macros_1.scala b/test/files/pos/t10270/Macros_1.scala index 0d9f51e2c260..056995d2497a 100644 --- a/test/files/pos/t10270/Macros_1.scala +++ b/test/files/pos/t10270/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports import language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/pos/t10270/Main_2.scala b/test/files/pos/t10270/Main_2.scala index 58d21e8e0c00..a0b7c5aff01a 100644 --- a/test/files/pos/t10270/Main_2.scala +++ b/test/files/pos/t10270/Main_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:imports +//> using options -Xfatal-warnings -Ywarn-unused:imports object Main extends App { diff --git a/test/files/pos/t10272.scala b/test/files/pos/t10272.scala new file mode 100644 index 000000000000..6e830e0a8d7f --- /dev/null +++ b/test/files/pos/t10272.scala @@ -0,0 +1,14 @@ +object Test { + sealed trait AbstractProven { + type Proven[+A, TypeClass[_]] <: A + } + + val abstractProven: AbstractProven = new AbstractProven { + override type Proven[+A, TypeClass[_]] = A + } + + import abstractProven._ + def x(a: Any Proven Ordering): Unit = a match { + case i: Int => + } +} diff --git a/test/files/pos/t10288.scala b/test/files/pos/t10288.scala index 7c76a8965d24..4b2b32bbb281 100644 --- a/test/files/pos/t10288.scala +++ b/test/files/pos/t10288.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 + +//> using options -Xsource:2.13 trait Target trait Unrelated diff --git a/test/files/pos/t10296-before/UnusedMacro_1.scala b/test/files/pos/t10296-before/UnusedMacro_1.scala index 220ae4edcaad..8d08c39ce102 100644 --- a/test/files/pos/t10296-before/UnusedMacro_1.scala +++ b/test/files/pos/t10296-before/UnusedMacro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:before import scala.reflect.macros.whitebox.Context diff --git a/test/files/pos/t10296-before/Unused_2.scala b/test/files/pos/t10296-before/Unused_2.scala index 9df8dc3e9381..5d00b5ba284e 100644 --- a/test/files/pos/t10296-before/Unused_2.scala +++ b/test/files/pos/t10296-before/Unused_2.scala @@ -1,5 +1,6 @@ -// scalac: -Xfatal-warnings -Xlint:unused -Ywarn-macros:before +//> using options -Xfatal-warnings -Xlint:unused -Ywarn-macros:before +// import scala.language.experimental.macros object Unused extends App { diff --git a/test/files/pos/t10296/UnusedMacro_1.scala b/test/files/pos/t10296/UnusedMacro_1.scala index f3b33c38d561..d3576ee731f0 100644 --- a/test/files/pos/t10296/UnusedMacro_1.scala +++ b/test/files/pos/t10296/UnusedMacro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:unused import scala.reflect.macros.blackbox diff --git a/test/files/pos/t10296/Unused_2.scala b/test/files/pos/t10296/Unused_2.scala index 7f1e2ec869a2..adca67fbf0bf 100644 --- a/test/files/pos/t10296/Unused_2.scala +++ b/test/files/pos/t10296/Unused_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint:unused +//> using options -Xfatal-warnings -Xlint:unused import scala.language.experimental.macros diff --git a/test/files/pos/t10372.scala b/test/files/pos/t10372.scala index 68f19c27c039..1a22e89c8d76 100644 --- a/test/files/pos/t10372.scala +++ b/test/files/pos/t10372.scala @@ -1,4 +1,6 @@ -// scalac: -Xsource:2.13 +// +//> using options -Xsource:2.13 +// import scala.language.higherKinds import scala.language.implicitConversions diff --git a/test/files/pos/t10373.flags b/test/files/pos/t10373.flags deleted file mode 100644 index 85d8eb2ba295..000000000000 --- a/test/files/pos/t10373.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings diff --git a/test/files/pos/t10373.scala b/test/files/pos/t10373.scala index c588607d3d41..f913cfe64fb7 100644 --- a/test/files/pos/t10373.scala +++ b/test/files/pos/t10373.scala @@ -1,4 +1,5 @@ -abstract class Foo { +//> using options -Werror +sealed abstract class Foo { def bar(): Unit = this match { case Foo_1() => //do something case Foo_2() => //do something diff --git a/test/files/pos/t10394.scala b/test/files/pos/t10394.scala index abdfe65f4b11..51ce1062b8bf 100644 --- a/test/files/pos/t10394.scala +++ b/test/files/pos/t10394.scala @@ -1,5 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:patvars +//> using options -Xfatal-warnings -Ywarn-unused:patvars trait T { def f = for (i: Int <- List(42)) yield i } diff --git a/test/files/pos/t10418_bounds.scala b/test/files/pos/t10418_bounds.scala new file mode 100644 index 000000000000..9f7ba4f1cecc --- /dev/null +++ b/test/files/pos/t10418_bounds.scala @@ -0,0 +1,5 @@ +class Test { + def foo(c: java.util.Collection[String]): Unit = { + c.removeIf(x => true) + } +} diff --git a/test/files/pos/t10425.scala b/test/files/pos/t10425.scala new file mode 100644 index 000000000000..c00096e1973c --- /dev/null +++ b/test/files/pos/t10425.scala @@ -0,0 +1,19 @@ +class Poly { + class E +} + +object MyApp { + object r extends Poly { + implicit def conv(value: Any): E = sys.error("") + } + object s extends Poly { + implicit def conv(value: Any): E = sys.error("") + } + val b: r.E = sys.error("") + + // okay + s.conv(b): s.E + + // okay + println(b: s.E) +} diff --git a/test/files/pos/t10497.scala b/test/files/pos/t10497.scala new file mode 100644 index 000000000000..4da013d4935c --- /dev/null +++ b/test/files/pos/t10497.scala @@ -0,0 +1,8 @@ +import scala.annotation._ + +// also works with subsets of {@annotation.meta.field @annotation.meta.getter @annotation.meta.setter} +class baz(out: Foo => Int) extends StaticAnnotation + +class Foo { + @baz(out = _.value) val value: Int = 5 +} diff --git a/test/files/pos/t10502.scala b/test/files/pos/t10502.scala new file mode 100644 index 000000000000..0222e731211f --- /dev/null +++ b/test/files/pos/t10502.scala @@ -0,0 +1,36 @@ +//> using options -Xfatal-warnings + +final class Box[A](val unwrap: A) + +object Box { + def apply[A](x: A): Box[A] = new Box[A](x) + def unapply[A](x: Box[A]): Some[A] = Some(x.unwrap) +} + +object Perhaps { + def unapply[A](oa: Option[A]): Some[Option[A]] = Some(oa) +} + +class Test { + def test() = { + List(Option("hello")) match { + case Perhaps(Some(s)) :: _ => + case Perhaps(None ) :: _ => + case Nil => + } + } + + def justOption() = { + Option("hello") match { + case Perhaps(Some(s)) => + case Perhaps(None) => + } + } + + def boxTest() = { + Box(Option("hello")) match { + case Box(Some(s)) => + case Box(None) => + } + } +} diff --git a/test/files/pos/t10505.scala b/test/files/pos/t10505.scala new file mode 100644 index 000000000000..b6a08d61ad8a --- /dev/null +++ b/test/files/pos/t10505.scala @@ -0,0 +1,7 @@ +object Test { + def length[@specialized(Int) A](as: List[A], len: A => Int, acc: Int): Int = + as match { + case Nil => acc + case h :: t => length(t, len, acc + len(h)) + } +} diff --git a/test/files/pos/t10507/Macros_1.scala b/test/files/pos/t10507/Macros_1.scala new file mode 100644 index 000000000000..2483ba2e77df --- /dev/null +++ b/test/files/pos/t10507/Macros_1.scala @@ -0,0 +1,13 @@ +import scala.language.experimental.macros +import scala.reflect.macros.blackbox + +object Macros_1 { + def seq: List[Int] = macro impl + def impl(c: blackbox.Context): c.Tree = { + import c.universe._ + q"""{ + class A(val i: Int) { def this() = this(0) } + List(new A, new A(1)).map(_.i) + }""" + } +} diff --git a/test/files/pos/t10507/Test_2.scala b/test/files/pos/t10507/Test_2.scala new file mode 100644 index 000000000000..944b8bf46091 --- /dev/null +++ b/test/files/pos/t10507/Test_2.scala @@ -0,0 +1,3 @@ +object Test_2 { + Macros_1.seq +} diff --git a/test/files/pos/t10519.scala b/test/files/pos/t10519.scala new file mode 100644 index 000000000000..4c84b3f29092 --- /dev/null +++ b/test/files/pos/t10519.scala @@ -0,0 +1,6 @@ +object Test { + case class D[A]() + val xs = Seq(D[Int](), D[Boolean]()) + def g[Y](is: Seq[D[_ >: Y]]) = ??? + g(xs) +} diff --git a/test/files/pos/t10528.scala b/test/files/pos/t10528.scala new file mode 100644 index 000000000000..cfdeebb3f8f4 --- /dev/null +++ b/test/files/pos/t10528.scala @@ -0,0 +1,35 @@ +object Test { + trait Holder[A] + trait NilHolder[A] extends Holder[A] + + trait Solve[A, H <: Holder[A]] { + type Output <: Holder[A] + } + type SolveAux[A, H <: Holder[A], O <: Holder[A]] = Solve[A, H] {type Output = O} + + implicit def nilSolve[A] = new Solve[A, NilHolder[A]] { + override type Output = NilHolder[A] + } + + trait WrapSolve[A, H <: Holder[A]] { + type Output <: Holder[A] + } + + implicit def wrapAux[A, H <: Holder[A], O <: Holder[A]](implicit one : SolveAux[A, H, O]) = + new WrapSolve[A, H] { + override type Output = O + } + + val wrapped = implicitly[WrapSolve[String, NilHolder[String]]] +} + +object Test2 { + class Inv[T] + class Foo[T, U <: Inv[T]] + + implicit def foo[T]: Foo[T, Inv[T]] = new Foo[T, Inv[T]] + + def bar[T, U <: Inv[T]](implicit foo: Foo[T, U]): Inv[T] = new Inv[T] + + val baz: Inv[Int] = bar +} diff --git a/test/files/pos/t10532/C.java b/test/files/pos/t10532/C.java new file mode 100644 index 000000000000..515d8bae362c --- /dev/null +++ b/test/files/pos/t10532/C.java @@ -0,0 +1,11 @@ + +public class C { + public long f(long x, Long y) { return x + y; } + public long f(Long x, Long y) { return x + y; } + + public long g(long x, String y) { return x + Long.parseLong(y); } + public long g(Long x, String y) { return x + Long.parseLong(y); } + + public long h(long x) { return x + 1; } + public long h(Long x) { return x + 1; } +} diff --git a/test/files/pos/t10532/D.scala b/test/files/pos/t10532/D.scala new file mode 100644 index 000000000000..6d2157f0b44a --- /dev/null +++ b/test/files/pos/t10532/D.scala @@ -0,0 +1,7 @@ + +import java.lang.{Long => JLong} + +class D { + def f(x: Long, y: JLong): Long = x + y + def f(x: JLong, y: JLong): Long = x + y +} diff --git a/test/files/pos/t10532/test.scala b/test/files/pos/t10532/test.scala new file mode 100644 index 000000000000..bb6602eabca5 --- /dev/null +++ b/test/files/pos/t10532/test.scala @@ -0,0 +1,17 @@ + +object Test extends App { + val c = new C + val d = new D + + println(c.f(42, 42)) + println(c.f(42L, 42L)) + + println(c.g(42, "42")) + println(c.g(42L, "42")) + + println(c.h(42)) + println(c.h(42L)) + + println(d.f(42, 42)) + println(d.f(42L, 42L)) +} diff --git a/test/files/pos/t10533.scala b/test/files/pos/t10533.scala new file mode 100644 index 000000000000..d84e87b0bc21 --- /dev/null +++ b/test/files/pos/t10533.scala @@ -0,0 +1,5 @@ +object Foo { + val b @ Bar(_) = Bar(1)(2)(3) +} + +case class Bar(a: Int)(b: Int)(c: Int) diff --git a/test/files/pos/t10536.scala b/test/files/pos/t10536.scala new file mode 100644 index 000000000000..4e87a5bf8440 --- /dev/null +++ b/test/files/pos/t10536.scala @@ -0,0 +1,9 @@ +trait A + +trait B[C <: B[C]] { + def ==(o: C)(implicit a: A): Boolean = ??? +} + +trait D[C <: B[C]] + +case class E[C <: B[C]](c: C) extends D[C] diff --git a/test/files/pos/t10561.scala b/test/files/pos/t10561.scala new file mode 100644 index 000000000000..a159d24b54c7 --- /dev/null +++ b/test/files/pos/t10561.scala @@ -0,0 +1,12 @@ + +class Parent { + private val field: Int = 3 +} + +class Child(n: Int) extends { + private val field = n +} with Parent { + class Inner { + def f = field + } +} diff --git a/test/files/pos/t10568/Impl.scala b/test/files/pos/t10568/Impl.scala index 09c0c8bb52c8..8940c76dbc1d 100644 --- a/test/files/pos/t10568/Impl.scala +++ b/test/files/pos/t10568/Impl.scala @@ -6,4 +6,4 @@ class Impl extends Converter.FactoryFactory { import Converter.FactoryFactory._ def method: String = getString + Converter.STRING -} \ No newline at end of file +} diff --git a/test/files/pos/t10569.scala b/test/files/pos/t10569.scala new file mode 100644 index 000000000000..b4f90a5b8ff8 --- /dev/null +++ b/test/files/pos/t10569.scala @@ -0,0 +1,6 @@ +object Test { + object Foo + Tuple1[Foo.type](Foo) match { + case Tuple1(foo: Singleton) => foo + } +} diff --git a/test/files/pos/t10589-case-implicit-param/cc_2.scala b/test/files/pos/t10589-case-implicit-param/cc_2.scala new file mode 100644 index 000000000000..48281f051bac --- /dev/null +++ b/test/files/pos/t10589-case-implicit-param/cc_2.scala @@ -0,0 +1,6 @@ +//> using options -Ymacro-annotations +trait T[A] + +@macid +case class CC[A: T](x: A) + diff --git a/test/files/pos/t10589-case-implicit-param/macros_1.scala b/test/files/pos/t10589-case-implicit-param/macros_1.scala new file mode 100644 index 000000000000..d3eeb8bf05ce --- /dev/null +++ b/test/files/pos/t10589-case-implicit-param/macros_1.scala @@ -0,0 +1,19 @@ +//> using options -Ymacro-annotations + +import scala.annotation.StaticAnnotation +import scala.language.experimental.macros +import scala.reflect.macros.whitebox.Context + +class macid extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro macidMacro.impl +} +object macidMacro { + def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + new Macros[c.type](c).macidMacroImpl(annottees.toList) + } +} +class Macros[C <: Context](val c: C) { + import c.universe._ + def macidMacroImpl(annottees: List[c.Expr[Any]]): c.Expr[Any] = + annottees(0) +} diff --git a/test/files/pos/t10623.scala b/test/files/pos/t10623.scala index 8d574849e319..93cc77a4abd9 100644 --- a/test/files/pos/t10623.scala +++ b/test/files/pos/t10623.scala @@ -1,6 +1,6 @@ -// scalac: -Xfatal-warnings -Xlint:unused -import language.higherKinds +//> using options -Xfatal-warnings -Xlint:unused +// object `package` { def refl[A]: A Is A = ??? diff --git a/test/files/pos/t10639/Identifiable_1.java b/test/files/pos/t10639/Identifiable_1.java new file mode 100644 index 000000000000..56fe89c73b22 --- /dev/null +++ b/test/files/pos/t10639/Identifiable_1.java @@ -0,0 +1,14 @@ + +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.toMap; + +public interface Identifiable_1 { + String name(); + + static & Identifiable_1> Map valuesByName(final E[] values) { + return Stream.of(values).collect(toMap(Identifiable_1::name, Function.identity())); + } +} diff --git a/test/files/pos/t10639/Size_1.java b/test/files/pos/t10639/Size_1.java new file mode 100644 index 000000000000..2141e41106b8 --- /dev/null +++ b/test/files/pos/t10639/Size_1.java @@ -0,0 +1,3 @@ +public enum Size_1 implements Identifiable_1 { + SMALL, MEDIUM, LARGE; +} diff --git a/test/files/pos/t10639/broken_2.scala b/test/files/pos/t10639/broken_2.scala new file mode 100644 index 000000000000..86739557d334 --- /dev/null +++ b/test/files/pos/t10639/broken_2.scala @@ -0,0 +1,4 @@ + +object Broken extends App { + println(Size_1.SMALL) +} diff --git a/test/files/pos/t10643.scala b/test/files/pos/t10643.scala index f59d305ffd6b..4636f064cb31 100644 --- a/test/files/pos/t10643.scala +++ b/test/files/pos/t10643.scala @@ -1,4 +1,6 @@ -// scalac: -Yrangepos + +// + trait AA trait BB trait Foo { diff --git a/test/files/pos/t10644/Objs_1.scala b/test/files/pos/t10644/Objs_1.scala index 804cfa435c18..7f8649e5599f 100644 --- a/test/files/pos/t10644/Objs_1.scala +++ b/test/files/pos/t10644/Objs_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings case object A ; case object B object C { // inferred refinement type `Product with Serializable` of val `objs` has owner `C` diff --git a/test/files/pos/t10644/Test_2.scala b/test/files/pos/t10644/Test_2.scala index 97aab8bfef03..0e5ba627e2ea 100644 --- a/test/files/pos/t10644/Test_2.scala +++ b/test/files/pos/t10644/Test_2.scala @@ -1,7 +1,7 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings object Test { // Should not result in the spurious warning: // comparing non-null values of types Product with Serializable - // and A.type using `==' will always yield false + // and A.type using `==` will always yield false assert(C.objs.head == A) } diff --git a/test/files/pos/t10669.scala b/test/files/pos/t10669.scala new file mode 100644 index 000000000000..c80456e66f3d --- /dev/null +++ b/test/files/pos/t10669.scala @@ -0,0 +1,10 @@ +abstract class CharSet[P] { + type Type <: P +} + +object LetterOrDigit extends CharSet[Char] +object Digit extends CharSet[LetterOrDigit.Type] + +object t { + type D = Digit.Type +} \ No newline at end of file diff --git a/test/files/pos/t10680.scala b/test/files/pos/t10680.scala index 7a2c64bfb0e1..d1ee3b3a5459 100644 --- a/test/files/pos/t10680.scala +++ b/test/files/pos/t10680.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings object MyEnum extends Enumeration { val e1 = Value("e1") val e2 = Value("e2") diff --git a/test/files/pos/t10686.scala b/test/files/pos/t10686.scala new file mode 100644 index 000000000000..ff803e4b6b53 --- /dev/null +++ b/test/files/pos/t10686.scala @@ -0,0 +1,6 @@ +object Test { + def ltr[A](implicit ev: (Int, A) =:= (Int, String)) = null + def rtl[A](implicit ev: (Int, String) =:= (Int, A)) = null + ltr + rtl +} diff --git a/test/files/pos/t10714.scala b/test/files/pos/t10714.scala new file mode 100644 index 000000000000..b871ba7e8037 --- /dev/null +++ b/test/files/pos/t10714.scala @@ -0,0 +1,15 @@ +object Test { + class Foo { + type Out + } + object Foo { + implicit def foo: Foo { type Out = Bar } = ??? + } + + class Bar { + type Baz = Foo + def foo(implicit foo: Baz): foo.Out = ??? + } + + (new Bar).foo.foo +} diff --git a/test/files/pos/t10714b.scala b/test/files/pos/t10714b.scala new file mode 100644 index 000000000000..2ec06aec2790 --- /dev/null +++ b/test/files/pos/t10714b.scala @@ -0,0 +1,14 @@ +object Test { + class Bar { + class Foo { + type Out + } + object Foo { + implicit def foo: Foo { type Out = Bar } = ??? + } + + def foo(implicit foo: Foo): foo.Out = ??? + } + + (new Bar).foo.foo +} diff --git a/test/files/pos/t10714c.scala b/test/files/pos/t10714c.scala new file mode 100644 index 000000000000..675d6c60d00a --- /dev/null +++ b/test/files/pos/t10714c.scala @@ -0,0 +1,16 @@ +object Test { + class Foo + trait Bar { + val foo: Foo + def baz(implicit quux: Quux[foo.type]): Unit = ??? + } + implicit def mkBar(foo0: Foo): Bar { val foo: foo0.type } = ??? + + trait Quux[T] + object Quux { + implicit def mkQuux[T]: Quux[T] = ??? + } + + (new Foo).baz +} + diff --git a/test/files/pos/t10714d.scala b/test/files/pos/t10714d.scala new file mode 100644 index 000000000000..3dd4cc0fb911 --- /dev/null +++ b/test/files/pos/t10714d.scala @@ -0,0 +1,19 @@ +object Test { + class Foo + trait Bar { + trait Baz + object Baz { + implicit val qb: Quux[Baz] = ??? + } + def baz(implicit quux: Quux[Baz]): Unit = ??? + } + implicit def mkBar(foo0: Foo): Bar = ??? + + trait Quux[T] + object Quux { + implicit def mkQuux[T]: Quux[T] = ??? + } + + (new Foo).baz +} + diff --git a/test/files/pos/t10722.scala b/test/files/pos/t10722.scala new file mode 100644 index 000000000000..dee9942a3338 --- /dev/null +++ b/test/files/pos/t10722.scala @@ -0,0 +1,13 @@ +import language.higherKinds + +object Test { + sealed trait Coin[+A, +B] + case class Heads[+A](face: A) extends Coin[A, Nothing] + case class Tails[+B](back: B) extends Coin[Nothing, B] + + def flip[F[_, _], G[x] <: F[x, String]](f: F[Int, String], g: G[Int]) = ??? + def flop[F[_], G <: F[String]](f: F[String], g: G) = ??? + + flip(Tails("scala"): Coin[Int, String], Heads(50)) + flop(Option("scala"), None) +} diff --git a/test/files/pos/t10726.scala b/test/files/pos/t10726.scala new file mode 100644 index 000000000000..5238fe4db084 --- /dev/null +++ b/test/files/pos/t10726.scala @@ -0,0 +1,8 @@ +object Test { + + def eat(foods: List["food"]): Unit = println(s"ate ${foods.size} foods") + + eat("food" :: Nil) + eat(Nil.::("food")) + +} diff --git a/test/files/pos/t10736.scala b/test/files/pos/t10736.scala new file mode 100644 index 000000000000..ede74052dfa7 --- /dev/null +++ b/test/files/pos/t10736.scala @@ -0,0 +1,26 @@ +object ImplicitRegression { + + import scala.language.{implicitConversions, higherKinds} + + trait Map[K, +V] extends MapOps[K, V, Map, Map[K, V]] + + trait MapOps[K, +V, +CC[_, _], +C] + + trait BuildFrom[-From, -A, +C] + + object BuildFrom { + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0], (K, V), CC[K, V]] = ??? + } + + trait MapDecorator[K, V] { + val foo: Map[K, V] + def zipByKeyWith[W, X, C1](that: Map[K, W])(f: (V, W) => X)(implicit bf: BuildFrom[foo.type, (K, X), C1]): C1 = ??? + } + + implicit def MapDecorator[K, V](map: Map[K, V]): MapDecorator[K, V] { val foo: map.type } = ??? + + def test[A, B, C0](map1: Map[A, B], map2: Map[A, B], f: (B, B) => C0): Unit = { + val map3 = map1.zipByKeyWith(map2)(f)/*(BuildFrom.buildFromMapOps)*/ + map3: Map[A, C0] + } +} diff --git a/test/files/pos/t10740.scala b/test/files/pos/t10740.scala new file mode 100644 index 000000000000..5b409e659378 --- /dev/null +++ b/test/files/pos/t10740.scala @@ -0,0 +1,11 @@ +object Test { + case class Inv[A](x: A) + def diff1[A](i: Inv[A], j: Inv[_ <: A]) = 1 + def diff2[A](i: Inv[_ >: A], j: Inv[A]) = 2 + def diff3[A](i: Inv[_ >: A], j: Inv[_ <: A]) = 3 + val i = Inv(Option(42)) + val j = Inv(Some(42)) + diff1(i, j) + diff2(i, j) + diff3(i, j) +} diff --git a/test/files/pos/t10758/Macros_1.scala b/test/files/pos/t10758/Macros_1.scala new file mode 100644 index 000000000000..6a470f407a5d --- /dev/null +++ b/test/files/pos/t10758/Macros_1.scala @@ -0,0 +1,24 @@ +import scala.language.experimental.macros + +import scala.reflect.macros.whitebox.Context + +final class TwoFaceInt[T](val value : Int) { + def + [R](that : TwoFaceInt[R]) = ??? +} +object TwoFaceInt { + def apply[T <: Int, Out <: T](value : T) : TwoFaceInt[Out] = macro Builder.Macro.fromNumValue[T] +} + +object Builder { + final class Macro(val c: Context) { + def fromNumValue[T](value : c.Tree)(implicit t : c.WeakTypeTag[T]) : c.Tree = { + import c.universe._ + val tTpe = weakTypeOf[T] + val valueTpe = value match { + case Literal(Constant(t : Int)) => c.internal.constantType(Constant(t)) + case _ => tTpe + } + q"new TwoFaceInt[$valueTpe]($value)" + } + } +} diff --git a/test/files/pos/t10758/Main_2.scala b/test/files/pos/t10758/Main_2.scala new file mode 100644 index 000000000000..9939102992f0 --- /dev/null +++ b/test/files/pos/t10758/Main_2.scala @@ -0,0 +1,3 @@ +object Test { + TwoFaceInt(1) + TwoFaceInt(2) +} diff --git a/test/files/pos/t10762/t10762_1.scala b/test/files/pos/t10762/t10762_1.scala new file mode 100644 index 000000000000..d1006f69aaa7 --- /dev/null +++ b/test/files/pos/t10762/t10762_1.scala @@ -0,0 +1,12 @@ +import language.higherKinds + +trait Tycon[A] +trait MatcherFactory1X[TC1[_]] +trait Matcher[T]{ + class X { + def be = { + def inferTycon[TC1[_]](other: MatcherFactory1X[TC1]): MatcherFactory1X[TC1] = ??? + inferTycon(??? : MatcherFactory1X[Tycon]) + } + } +} diff --git a/test/files/pos/t10762/t10762_2.scala b/test/files/pos/t10762/t10762_2.scala new file mode 100644 index 000000000000..7f27c5818ef3 --- /dev/null +++ b/test/files/pos/t10762/t10762_2.scala @@ -0,0 +1,4 @@ +object Test { + val m: Matcher[_] = ??? + val tpinfer = (new m.X).be +} diff --git a/test/files/pos/t10763.scala b/test/files/pos/t10763.scala index 6dae1ae0fed9..f4c0bbbf3c25 100644 --- a/test/files/pos/t10763.scala +++ b/test/files/pos/t10763.scala @@ -1,9 +1,11 @@ -// scalac: -Xfatal-warnings -Ywarn-unused + +//> using options -Werror -Wunused + class Test { def xsUnused = { val xs: List[Int] = List(0) - for (refute@1 <- xs) {} + for (refute@1 <- xs) println(refute) } def f() = for (Some(i: Int) <- List(Option(42))) println(i) } diff --git a/test/files/pos/t10768.scala b/test/files/pos/t10768.scala new file mode 100644 index 000000000000..e85bda4fbf68 --- /dev/null +++ b/test/files/pos/t10768.scala @@ -0,0 +1,6 @@ +object Test { + type Id[T] = T + def foo(x: Int): Id[x.type] = x + + lazy val result = foo(1) +} diff --git a/test/files/pos/t10779.scala b/test/files/pos/t10779.scala new file mode 100644 index 000000000000..067332ea50dd --- /dev/null +++ b/test/files/pos/t10779.scala @@ -0,0 +1,7 @@ +object Test { + final val x: 1 = 1 + final val y: 2 = 2 + x + 2: 3 //ok + 1 + y: 3 //ok + x + y: 3 //ok +} diff --git a/test/files/pos/t10792.scala b/test/files/pos/t10792.scala new file mode 100644 index 000000000000..300f1ccd1ed7 --- /dev/null +++ b/test/files/pos/t10792.scala @@ -0,0 +1,6 @@ +object Test { + type Id[T] = T + def foo(x: Int): Id[x.type] = x + + { lazy val result: 1 = foo(1) } +} diff --git a/test/files/pos/t10849.scala b/test/files/pos/t10849.scala new file mode 100644 index 000000000000..994bafca4d59 --- /dev/null +++ b/test/files/pos/t10849.scala @@ -0,0 +1,10 @@ +import language.higherKinds + +object Test { + class TcHk[F[_]] + trait Foo { type T } + implicit val tcHkOpt: TcHk[Option] = new TcHk + implicit def foo[F[_], A](implicit F: TcHk[F]): Foo { type T = F[A] } = + new Foo { type T = F[A] } + implicitly[Foo { type T = Option[Int] }] +} diff --git a/test/files/pos/t10907.scala b/test/files/pos/t10907.scala new file mode 100644 index 000000000000..0b8838f41839 --- /dev/null +++ b/test/files/pos/t10907.scala @@ -0,0 +1,3 @@ +object Test { + val x = Seq(Vector(2), List(3)) +} diff --git a/test/files/pos/t11015.scala b/test/files/pos/t11015.scala new file mode 100644 index 000000000000..c40e38e163f9 --- /dev/null +++ b/test/files/pos/t11015.scala @@ -0,0 +1,19 @@ +class Foo[A] + +object ParserInput { implicit def apply(string: String): ParserInput = ??? } +trait ParserInput + +// overloaded with default arg +class JsonParser(input: ParserInput, settings: String = null) { def this(input: ParserInput) = this(input, null) } + +class Test { + def append(a: Foo[String]): Unit = () + def append(a: Foo[String], as: Int*): Unit = () + + append(new Foo) + + // needs implicit conversion from companion of expected type + // the problem was that getParts in implicits did not consider OverloadedArgProto's underlying type; + // the implemented solution is to normalize the expected type of a view before calling inferView + new JsonParser("""{"key":1}{"key":2}""") +} diff --git a/test/files/pos/t11052.scala b/test/files/pos/t11052.scala new file mode 100644 index 000000000000..34f9cc2256ef --- /dev/null +++ b/test/files/pos/t11052.scala @@ -0,0 +1,8 @@ +object Test { + val x: Byte = Iterator.empty.next + // error: polymorphic expression cannot be instantiated to expected type; + // found : [T]()T + // required: Byte + // val x: Byte = Iterator.empty.next + // ^ +} diff --git a/test/files/pos/t11102.scala b/test/files/pos/t11102.scala new file mode 100644 index 000000000000..d842334cb569 --- /dev/null +++ b/test/files/pos/t11102.scala @@ -0,0 +1,117 @@ +object Test { + def f(x: ImmutableSeq) = x match { + case ImmutableCons(x, xs @ _*) => xs + } + def f(x: MutableSeq) = x match { + case MutableCons(x, xs @ _*) => xs + } + def f(x: CollectionSeq) = x match { + case CollectionCons(x, xs @ _*) => xs + } + def f(x: ScalaSeq) = x match { + case ScalaCons(x, xs @ _*) => xs + } + def f(x: DefaultSeq) = x match { + case DefaultCons(x, xs @ _*) => xs + } +} + +/** + * collection.immutable.Seq + */ +abstract class ImmutableSeq +extends collection.immutable.Seq[Int] + with UnimplementedSeq + +object ImmutableCons { + def unapplySeq(x: ImmutableCons) = + Some((x.first, x.more)) +} + +abstract class ImmutableCons +extends ImmutableSeq { + def first: Int + def more: collection.immutable.Seq[ImmutableCons] +} + +/** + * collection.mutable.Seq + */ +abstract class MutableSeq +extends collection.mutable.Seq[Int] +with UnimplementedSeq + +object MutableCons { + def unapplySeq(x: MutableCons) = + Some((x.first, x.more.toSeq)) // ! +} + +abstract class MutableCons +extends MutableSeq { + def first: Int + def more: collection.mutable.Seq[MutableCons] +} + +/** + * collection.Seq + */ +abstract class CollectionSeq +extends collection.Seq[Int] +with UnimplementedSeq + +object CollectionCons { + def unapplySeq(x: CollectionCons) = + Some((x.first, x.more.toSeq)) // ! +} + +abstract class CollectionCons +extends CollectionSeq { + def first: Int + def more: collection.Seq[CollectionCons] +} + +/** + * scala.Seq + */ +abstract class ScalaSeq +extends collection.Seq[Int] +with UnimplementedSeq + +object ScalaCons { + def unapplySeq(x: ScalaCons) = + Some((x.first, x.more)) +} + +abstract class ScalaCons +extends ScalaSeq { + def first: Int + def more: scala.Seq[ScalaCons] +} + +/** + * Seq + */ +abstract class DefaultSeq +extends Seq[Int] +with UnimplementedSeq + +object DefaultCons { + def unapplySeq(x: DefaultCons) = + Some((x.first, x.more)) +} + +abstract class DefaultCons +extends DefaultSeq { + def first: Int + def more: Seq[DefaultCons] +} + +/** + * Unimplemented sequence. + */ +trait UnimplementedSeq { + def iterator: Iterator[Int] = ??? + def apply(i: Int): Int = ??? + def length: Int = ??? + def update(idx: Int, elem: Int): Unit = ??? +} diff --git a/test/files/pos/t11103.scala b/test/files/pos/t11103.scala new file mode 100644 index 000000000000..2597c30423ed --- /dev/null +++ b/test/files/pos/t11103.scala @@ -0,0 +1,12 @@ +trait Outer[O] { + case class C[T <: O](x: T, y: Outer.this.type) +} + +object Foo extends Outer[String] + +class Tst { + val c = new Foo.C("a", Foo) + c match { + case Foo.C(a, o) => a + } +} diff --git a/test/files/pos/t11113.scala b/test/files/pos/t11113.scala new file mode 100644 index 000000000000..95c5c7acf58b --- /dev/null +++ b/test/files/pos/t11113.scala @@ -0,0 +1,4 @@ +object Foo { + implicit class IntOps(self: Int) { def apply[T] = self } + val s = 1234[Int] +} diff --git a/test/files/pos/t11136/Maps.java b/test/files/pos/t11136/Maps.java new file mode 100644 index 000000000000..335b002922c5 --- /dev/null +++ b/test/files/pos/t11136/Maps.java @@ -0,0 +1,14 @@ +interface Map { + default V getOrDefault(Object key, V defaultValue) { + return null; + } +} + +interface ConcurrentMap extends Map { + @Override + default V getOrDefault(Object key, V defaultValue) { + return null; + } +} + +// class ConcurrentMapWrapper implements Map, ConcurrentMap { } diff --git a/test/files/pos/t11136/Test.scala b/test/files/pos/t11136/Test.scala new file mode 100644 index 000000000000..7827ca5491fc --- /dev/null +++ b/test/files/pos/t11136/Test.scala @@ -0,0 +1 @@ +class ConcurrentMapWrapper[K, V] extends Map[K, V] with ConcurrentMap[K, V] diff --git a/test/files/pos/t11158/CharSeqJava.java b/test/files/pos/t11158/CharSeqJava.java new file mode 100644 index 000000000000..5abe16cd1372 --- /dev/null +++ b/test/files/pos/t11158/CharSeqJava.java @@ -0,0 +1,4 @@ + +public interface CharSeqJava { + int length(); +} diff --git a/test/files/pos/t11158/cs_1.scala b/test/files/pos/t11158/cs_1.scala new file mode 100644 index 000000000000..da6ce3f5109c --- /dev/null +++ b/test/files/pos/t11158/cs_1.scala @@ -0,0 +1,13 @@ + +// not computer science but the basis of the entire field, the character sequence +class cs extends CharSeqJava { + private var n = 42 + override def length: Int = n + def length_=(value: Int): Unit = n = value +} + +object Resetter extends App { + val cs = new cs + cs.length = 0 + assert(cs.length == 0) +} diff --git a/test/files/pos/t11158/sb_2.scala b/test/files/pos/t11158/sb_2.scala new file mode 100644 index 000000000000..246e32d42f70 --- /dev/null +++ b/test/files/pos/t11158/sb_2.scala @@ -0,0 +1,7 @@ + +// sbt is an acronym for StringBuilder test +class sbt { + val sb = new StringBuilder("There once was a man from Killarney / whose comments were nothing but blarney") + + def reset() = sb.length = 42 +} diff --git a/test/files/pos/t11169.scala b/test/files/pos/t11169.scala new file mode 100644 index 000000000000..77f4cce188d8 --- /dev/null +++ b/test/files/pos/t11169.scala @@ -0,0 +1,27 @@ +import scala.language.higherKinds + +object Test { + trait HKT[F[_]] + + trait A1[S[_]] { type T } // HKT param + trait A2 { type S ; type T } // non-HKT member + trait A3 { type S[_]; type T } // HKT member + + // "Aux"-style aliases for fixing member types: "ST" fixes S and T, "T" fixes just T (where applicable) + object A1 { type ST[ S[_], _T] = A1[S] { type T = _T } } + object A2 { type ST[_S , _T] = A2 { type S = _S ; type T = _T }; type T[_T] = A2 { type T = _T } } + object A3 { type ST[_S[_], _T] = A3 { type S[U] = _S[U]; type T = _T }; type T[_T] = A3 { type T = _T } } + + // HKT derivations for aliases above, always with wildcard `T` in rightmost position, for partial unification + implicit def a1[S[_]]: HKT[({ type F[x] = A1.ST[S, x] })#F] = ??? + implicit def a2[S ]: HKT[({ type F[x] = A2.ST[S, x] })#F] = ??? + implicit def a3[S[_]]: HKT[({ type F[x] = A3.ST[S, x] })#F] = ??? + implicit def a2t : HKT[A2. T ] = ??? + implicit def a3t : HKT[A3. T ] = ??? + + implicitly[HKT[({ type F[x] = A1.ST[List, x] })#F]] // HKT-param + implicitly[HKT[({ type F[x] = A2.ST[Char, x] })#F]] // non-HKT member + implicitly[HKT[({ type F[x] = A3.ST[List, x] })#F]] // HKT-member + implicitly[HKT[A2.T]] + implicitly[HKT[A3.T]] +} diff --git a/test/files/pos/t11239.scala b/test/files/pos/t11239.scala new file mode 100644 index 000000000000..6bb7d4f19123 --- /dev/null +++ b/test/files/pos/t11239.scala @@ -0,0 +1,6 @@ +import scala.language.higherKinds + +trait Request[F[_]] +trait Context { type F[_] } +final case class AuthedRequest[F[_], A](authInfo: A, req: Request[F]) +final case class HttpRequestContext[C <: Context, Ctx](request: AuthedRequest[C#F, Ctx], context: Ctx) diff --git a/test/files/pos/t11252.scala b/test/files/pos/t11252.scala new file mode 100644 index 000000000000..e23a3e257501 --- /dev/null +++ b/test/files/pos/t11252.scala @@ -0,0 +1,13 @@ +final case class Ttl(duration: Int, other: Boolean) + +object Ttl { + def apply(duration: Int) = new Ttl(duration, false) + + def unapply(x: Ttl): Option[Int] = if (x eq null) None else Some(x.duration) +} + +object Test { + def main(args: Array[String]): Unit = { + Ttl(1) match { case Ttl(y) => println(y) } + } +} diff --git a/test/files/pos/t11273-case-class-access-boundary.scala b/test/files/pos/t11273-case-class-access-boundary.scala new file mode 100644 index 000000000000..397d563ccb52 --- /dev/null +++ b/test/files/pos/t11273-case-class-access-boundary.scala @@ -0,0 +1,13 @@ +package p1 { + private[p1] case class Foo(a: Int) + object Foo { + def m = 1 + } +} + +package p2 { + class Test { + p1.Foo.m + p1.Foo(42) + } +} diff --git a/test/files/pos/t11303.scala b/test/files/pos/t11303.scala new file mode 100644 index 000000000000..2998610eaab8 --- /dev/null +++ b/test/files/pos/t11303.scala @@ -0,0 +1,18 @@ +//> using options -Werror + +object Test { + sealed trait Gadt[F[_], O, R] + final case class Output[F[_], O](values: List[O]) extends Gadt[F, O, Unit] + + final case class Algebra[F[_], A]() + final case class Free[F[_], A](fa: F[A]) + + def values[F[_], O, R](gadt: Gadt[F, O, R]): List[O] = + gadt match { case o: Output[F, O] => o.values } + + def free[F[_], A](f: Free[({ type G[x] = Algebra[F, x] })#G, A]): Algebra[F, A] = f.fa + def pure[F[_], A]: Free[({ type G[x] = Algebra[F, x] })#G, A] = Free(Algebra()) + + val vs = values(Output[Option, String](List("GADT"))) + val a: Algebra[Option, Int] = free(pure) +} diff --git a/test/files/pos/t1131.scala b/test/files/pos/t1131.scala index 5ef980348daa..1b2a90457921 100644 --- a/test/files/pos/t1131.scala +++ b/test/files/pos/t1131.scala @@ -1,4 +1,4 @@ trait A { self: Any { def p: Any } => - def f(b: => Unit) {} + def f(b: => Unit): Unit = {} f { p } } diff --git a/test/files/pos/t1136.scala b/test/files/pos/t1136.scala index e505badc9cb0..92d603e69070 100644 --- a/test/files/pos/t1136.scala +++ b/test/files/pos/t1136.scala @@ -1,5 +1,5 @@ object test { - def foo(s: Int*) { + def foo(s: Int*): Unit = { s.toList match { case t: List[Int] => foo(t: _*) //case _ => // unreachable code diff --git a/test/files/pos/t11374.scala b/test/files/pos/t11374.scala new file mode 100644 index 000000000000..8a73403ae308 --- /dev/null +++ b/test/files/pos/t11374.scala @@ -0,0 +1,5 @@ + +class C { + val `_` = 42 + val Some(`_`) = Option(42) +} diff --git a/test/files/pos/t11406.scala b/test/files/pos/t11406.scala new file mode 100644 index 000000000000..93f3a851be80 --- /dev/null +++ b/test/files/pos/t11406.scala @@ -0,0 +1,33 @@ + +class Var[T](private var value: T) { + def get: T = value + def update(newValue: T) = value = newValue +} + +object Test extends App { + type UpdatePair[T] = (Var[T], T => T) + + def doUpdates[A](updateChain: List[UpdatePair[A]]) = { + updateChain.groupBy(_._1) foreach { + case (v: Var[t], ops) => + var current: t = v.get + // This is what I want to get rid of: + type Op = t => t + ops foreach { + case (_, op: Op) => current = op(current) + } + v() = current + } + } + def doUpdates2[A](updateChain: List[UpdatePair[A]]) = { + updateChain.groupBy(_._1) foreach { + case (v: Var[t], ops) => + var current: t = v.get + ops foreach { + case (_, op: Function1[`t`, `t`]) => current = op(current) + } + v() = current + } + } +} + diff --git a/test/files/pos/t11437.scala b/test/files/pos/t11437.scala new file mode 100644 index 000000000000..e1de81be62ef --- /dev/null +++ b/test/files/pos/t11437.scala @@ -0,0 +1,6 @@ + +trait T { + val adder0: Int => Int = _ + 3 // Works fine + var adder1: Int => Int = (_ + 3) // Works fine + var adder2: Int => Int = _ + 3 // was: Error +} diff --git a/test/files/pos/t11460.scala b/test/files/pos/t11460.scala new file mode 100644 index 000000000000..b8d17d9eb5e4 --- /dev/null +++ b/test/files/pos/t11460.scala @@ -0,0 +1,43 @@ +package ok1 { + case class test private (foo: Map[String, List[Int]], + bar: List[Int], + baz: Map[String, List[String]]) {} + + case object test { + def getInstance = apply(Map.empty, List.empty, Map.empty) + + def apply(foo: Map[String, List[Int]], + bar: List[Int], + baz: Map[String, List[String]]) = new test(foo, bar, baz) + } +} + +package ok2 { + case class test private (foo: Map[String, List[Int]], + bar: List[Int], + baz: Map[String, List[String]]) {} + + case object test { + def getInstance = apply(Map.empty) + + def apply(foo: Map[String, List[Int]] = Map.empty, + bar: List[Int] = List.empty, + baz: Map[String, List[String]] = Map.empty) = + new test(foo, bar, baz) + } +} + +package notok { + case class test private (foo: Map[String, List[Int]], + bar: List[Int], + baz: Map[String, List[String]]) {} + + case object test { + def getInstance = apply() + + def apply(foo: Map[String, List[Int]] = Map.empty, + bar: List[Int] = List.empty, + baz: Map[String, List[String]] = Map.empty) = + new test(foo, bar, baz) + } +} diff --git a/test/files/pos/t11469/Generic.java b/test/files/pos/t11469/Generic.java new file mode 100644 index 000000000000..479f49a65dc8 --- /dev/null +++ b/test/files/pos/t11469/Generic.java @@ -0,0 +1,4 @@ +package jkson; + +abstract class Erased { public abstract Object foo(); } +public abstract class Generic extends Erased { public T foo() { return null; } } diff --git a/test/files/pos/t11469/test.scala b/test/files/pos/t11469/test.scala new file mode 100644 index 000000000000..df8db87d63a1 --- /dev/null +++ b/test/files/pos/t11469/test.scala @@ -0,0 +1,4 @@ +import jkson.Generic + +abstract class ScalaGen[T] extends Generic[T] +abstract class ScalaMono extends Generic[Product] diff --git a/test/files/pos/t11469_2/Test_1.java b/test/files/pos/t11469_2/Test_1.java new file mode 100644 index 000000000000..bab00de09b95 --- /dev/null +++ b/test/files/pos/t11469_2/Test_1.java @@ -0,0 +1,11 @@ +import java.util.Optional; + +public class Test_1 { + public abstract static class A { + public void m(Optional a) { } + } + public abstract static class B extends A { + @Override + public void m(Optional a) { } + } +} diff --git a/test/files/pos/t11469_2/Test_2.scala b/test/files/pos/t11469_2/Test_2.scala new file mode 100644 index 000000000000..32e6382fbf06 --- /dev/null +++ b/test/files/pos/t11469_2/Test_2.scala @@ -0,0 +1,11 @@ +class A { + new Test_1.B() { } + + new Test_1.B() { + override def m(a: java.util.Optional[_ <: Object]): Unit = { } + } + + new Test_1.B() { + override def m(a: java.util.Optional[_]): Unit = { } + } +} \ No newline at end of file diff --git a/test/files/pos/t11469_joint/Test.java b/test/files/pos/t11469_joint/Test.java new file mode 100644 index 000000000000..02e0dfcfc18c --- /dev/null +++ b/test/files/pos/t11469_joint/Test.java @@ -0,0 +1,11 @@ +import java.util.Optional; + +public class Test{ + public abstract static class A { + public void m(Optional a) { } + } + public abstract static class B extends A { + @Override + public void m(Optional a) { } + } +} diff --git a/test/files/pos/t11469_joint/Test.scala b/test/files/pos/t11469_joint/Test.scala new file mode 100644 index 000000000000..509d573f7b44 --- /dev/null +++ b/test/files/pos/t11469_joint/Test.scala @@ -0,0 +1,11 @@ +class A { + new Test.B() { } + + new Test.B() { + override def m(a: java.util.Optional[_ <: Object]): Unit = { } + } + + new Test.B() { + override def m(a: java.util.Optional[_]): Unit = { } + } +} diff --git a/test/files/pos/t11474/Outer_1.java b/test/files/pos/t11474/Outer_1.java new file mode 100644 index 000000000000..ab2b76178072 --- /dev/null +++ b/test/files/pos/t11474/Outer_1.java @@ -0,0 +1,19 @@ +public class Outer_1 { + public interface BaseBuilder { } + public abstract static class Container { } + public abstract static class Builder, B extends Builder> implements BaseBuilder { } + public abstract static class Builder1> extends Builder { } + public abstract class Builder2 extends Builder1 { } + + interface MyBase { + BaseBuilder newBuilder(); + } + + public static abstract class M1 implements MyBase { + public Builder2 newBuilder() { return null; } + } + + public static abstract class M2 implements MyBase { + public Builder newBuilder() { return null; } + } +} diff --git a/test/files/pos/t11474/Test_2.scala b/test/files/pos/t11474/Test_2.scala new file mode 100644 index 000000000000..027297f46436 --- /dev/null +++ b/test/files/pos/t11474/Test_2.scala @@ -0,0 +1,3 @@ +object Test_2 { + Seq(new Outer_1.M2 {}, new Outer_1.M1 {}) +} diff --git a/test/files/pos/t11511.scala b/test/files/pos/t11511.scala new file mode 100644 index 000000000000..15680c2e3797 --- /dev/null +++ b/test/files/pos/t11511.scala @@ -0,0 +1,12 @@ +object ORSet { + def empty[A]: ORSet[A] = ??? +} + +final class ORSet[A] { + def add(node: Long, element: A): ORSet[A] = ??? + def add(node: Int, element: A): ORSet[A] = ??? +} + +class Test { + ORSet.empty.add(42, "A") +} diff --git a/test/files/pos/t1152/S.scala b/test/files/pos/t1152/S.scala index 7f751c50909a..3d7f5eb89791 100644 --- a/test/files/pos/t1152/S.scala +++ b/test/files/pos/t1152/S.scala @@ -1,2 +1,2 @@ -class S2(fn:(J)=>Any) +class S2(fn: (J) => Any) object S { new S2(_.k) } diff --git a/test/files/pos/t11525.scala b/test/files/pos/t11525.scala new file mode 100644 index 000000000000..94f63b23e1e5 --- /dev/null +++ b/test/files/pos/t11525.scala @@ -0,0 +1,63 @@ +//> using options -Ystop-after:refchecks -Ydebug -uniqid +package java.lang + +/* This is a pretty random test that very indirectly tests `unique`ing of `ObjectTpeJavaRef` +It's minimize from scala-js, where CI chanced on a compilation order that would first +unique `TypeBounds(lo, ObjectTpe)`, and then `TypeBounds(lo, ObjectTpeJava)`, +which would result in a Java reference to Object being replaced by one that is used +to represent a Scala occurrence of a reference to Object, which is distinct from Any. +When Java code refers to Object, it's taken as the same thing as Any, at least when +it comes to =:= and `... <:< Object-in-java`. +*/ +import java.util.Iterator + +class Class[A](o: Object) + +class Comparable[A] { def compareTo(o: A): scala.Int = ??? } + +object System { + def currentTimeMillis(): scala.Long = ??? + + def arraycopy(src: Object, srcPos: scala.Int, dest: Object, destPos: scala.Int, length: scala.Int): Unit = { + import scala.{Boolean, Double} + + def mismatch(): Nothing = + throw new ArrayStoreException("Incompatible array types") + + def copyPrim[@specialized T](src: Array[T], dest: Array[T]): Unit = { + var i = length-1 + while (i >= 0) { + dest(i+destPos) = src(i+srcPos) + i -= 1 + } + } + + def copyRef(src: Array[AnyRef], dest: Array[AnyRef]): Unit = { + val x = (src.length, dest.length) + + var i = length-1 + while (i >= 0) { + dest(i+destPos) = src(i+srcPos) + i -= 1 + } + } + + (src match { + case src: Array[Boolean] => + dest match { + case dest: Array[Boolean] => copyPrim(src, dest) + case _ => mismatch() + } + + }) + } + + def identityHashCode(x: Object): scala.Int = { + x.getClass + 1 + } +} + +trait Iterable[T] { + def iterator(): java.util.Iterator[T] +} diff --git a/test/files/pos/t11525a/A.scala b/test/files/pos/t11525a/A.scala new file mode 100644 index 000000000000..8baac2cc655f --- /dev/null +++ b/test/files/pos/t11525a/A.scala @@ -0,0 +1,16 @@ +package example + + +trait PartialFunction[-A, +B] extends Function1[A,B] { self => + def isDefinedAt(x: A): Boolean + def compose[CLASH](k: PartialFunction[CLASH, A]): PartialFunction[CLASH, B] = null +} + +trait Function1[-T1, +R] extends AnyRef { self => + def apply(v1: T1): R + def compose[A](g: Function1[A, T1]): Function1[A , R] = null +} + +abstract class AbstractPartialFunction[-T1, +CLASH] extends Function1[T1, CLASH] with PartialFunction[T1, CLASH] { self => + def apply(x: T1): CLASH = ??? +} diff --git a/test/files/pos/t11525a/Test.java b/test/files/pos/t11525a/Test.java new file mode 100644 index 000000000000..811b55ce38b0 --- /dev/null +++ b/test/files/pos/t11525a/Test.java @@ -0,0 +1,5 @@ +public class Test { + class D extends example.AbstractPartialFunction { + public boolean isDefinedAt(String s) { return false; } + }; +} \ No newline at end of file diff --git a/test/files/pos/t11525b/Test.java b/test/files/pos/t11525b/Test.java new file mode 100644 index 000000000000..acc4805ad402 --- /dev/null +++ b/test/files/pos/t11525b/Test.java @@ -0,0 +1,5 @@ +public class Test { + class D extends scala.runtime.AbstractPartialFunction { + public boolean isDefinedAt(String s) { return false; } + }; +} \ No newline at end of file diff --git a/test/files/pos/t11534.scala b/test/files/pos/t11534.scala new file mode 100644 index 000000000000..dd3808195b75 --- /dev/null +++ b/test/files/pos/t11534.scala @@ -0,0 +1,8 @@ +//> using options -Werror +object Test1 { + val g: scala.tools.nsc.Global = ??? + import g._ + def test(sym: Symbol) = sym.name match { + case _: TermName => + } +} diff --git a/test/files/pos/t11538.scala b/test/files/pos/t11538.scala index 815f0f996d9a..0c02d81241b7 100644 --- a/test/files/pos/t11538.scala +++ b/test/files/pos/t11538.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -deprecation -stop:refchecks +//> using options -Werror -deprecation -stop:refchecks package t11538 @deprecated("not for you", since = "just now") diff --git a/test/files/pos/t11558.scala b/test/files/pos/t11558.scala new file mode 100644 index 000000000000..5882952ab2c6 --- /dev/null +++ b/test/files/pos/t11558.scala @@ -0,0 +1,13 @@ +class Test { + trait F1[A, B] { def apply(a: A): B } + trait S[T] { def map[R](f: F1[_ >: T, _ <: R]): S[R] } + (??? : S[Int]).map(_.toString) // check that map's type param is inferred (should be String) +} + +object Test_9244 { + trait F1[T1, R] { def apply(a1: T1): R } + + def cycle[T](function: F1[? >: T, ? <: T]): Unit = ??? + + def t9244 = cycle((_: String) => "") +} diff --git a/test/files/pos/t11575/Base.java b/test/files/pos/t11575/Base.java new file mode 100644 index 000000000000..e32464388e21 --- /dev/null +++ b/test/files/pos/t11575/Base.java @@ -0,0 +1,3 @@ +public class Base { + long[] counts; +} diff --git a/test/files/pos/t11575/Cover.java b/test/files/pos/t11575/Cover.java new file mode 100644 index 000000000000..52c0c49620d2 --- /dev/null +++ b/test/files/pos/t11575/Cover.java @@ -0,0 +1,3 @@ +public class Cover extends Base { + int[] counts; +} diff --git a/test/files/pos/t11575/LocalImpl.scala b/test/files/pos/t11575/LocalImpl.scala new file mode 100644 index 000000000000..2a63dd17948e --- /dev/null +++ b/test/files/pos/t11575/LocalImpl.scala @@ -0,0 +1 @@ +class LocalImpl extends Cover diff --git a/test/files/pos/t11575c/LocalImpl.scala b/test/files/pos/t11575c/LocalImpl.scala new file mode 100644 index 000000000000..0a4bef46e534 --- /dev/null +++ b/test/files/pos/t11575c/LocalImpl.scala @@ -0,0 +1,9 @@ +class Base { + private[this] var counts: Array[Long] = _ +} + +class Cover extends Base { + private[this] var counts: Array[Int] = _ +} + +class LocalImpl extends Cover diff --git a/test/files/pos/t11579.scala b/test/files/pos/t11579.scala new file mode 100644 index 000000000000..244e3fb8221b --- /dev/null +++ b/test/files/pos/t11579.scala @@ -0,0 +1,13 @@ +import scala.collection.generic.IsIterable +import scala.language.implicitConversions + +object Test { + class Ops[I] { + def method: Unit = () + } + + implicit def ToOps[Repr, L, R](aCol: Repr)(implicit isIterable: IsIterable[Repr]{type A = (L, R)}): Ops[isIterable.type] = + new Ops[isIterable.type] + + List(1 -> 2).method +} diff --git a/test/files/pos/t11603.scala b/test/files/pos/t11603.scala new file mode 100644 index 000000000000..8d943b1ae6fc --- /dev/null +++ b/test/files/pos/t11603.scala @@ -0,0 +1,6 @@ +//> using options -Werror +class C { + def m(x: true) = x match { + case true => println("the one true path") + } +} diff --git a/test/files/pos/t11640.scala b/test/files/pos/t11640.scala new file mode 100644 index 000000000000..7b432f3695f0 --- /dev/null +++ b/test/files/pos/t11640.scala @@ -0,0 +1,6 @@ +object Test { + trait T[S] + type Foo[S <: Foo[S]] = T[S] + + type X[A <: X[A]] = String +} diff --git a/test/files/pos/t11663/A_1.scala b/test/files/pos/t11663/A_1.scala new file mode 100644 index 000000000000..1b0a13ae6122 --- /dev/null +++ b/test/files/pos/t11663/A_1.scala @@ -0,0 +1,6 @@ +class A { + @inline final def m: Int = { + val (x, y) = (10, 20) + x + } +} diff --git a/test/files/pos/t11663/B_2.scala b/test/files/pos/t11663/B_2.scala new file mode 100644 index 000000000000..5c2513618f28 --- /dev/null +++ b/test/files/pos/t11663/B_2.scala @@ -0,0 +1,4 @@ +//> using options -opt:inline:** -Wopt:_ +class B { + def bar(c: A) = c.m +} diff --git a/test/files/pos/t11681.scala b/test/files/pos/t11681.scala new file mode 100644 index 000000000000..68638b12e4ad --- /dev/null +++ b/test/files/pos/t11681.scala @@ -0,0 +1,10 @@ +// +//> using options -Wunused:privates -Werror +// +package com + +package example { + private object Test { + def foo: String = "foo" + } +} diff --git a/test/files/pos/t11755.scala b/test/files/pos/t11755.scala new file mode 100644 index 000000000000..5985cdde032f --- /dev/null +++ b/test/files/pos/t11755.scala @@ -0,0 +1,15 @@ +trait Map[K] +class HashMap[K] extends Map[K] +class IdentityBox[+A] + +class IdentityHashMap[K](inner: HashMap[IdentityBox[K]]) extends Map[K] { + def this(initialMap: Map[_ <: K]) = this(new HashMap[IdentityBox[K]]()) + + def bla[K](inner: HashMap[IdentityBox[K]]): IdentityHashMap[K] = ??? + def bla[K](initialMap: Map[_ <: K]): IdentityHashMap[K] = bla(new HashMap[IdentityBox[K]]()) + + new IdentityHashMap(??? : HashMap[IdentityBox[K]]) + bla(??? :HashMap[IdentityBox[K]]) +} + +// isApplicable true : (inner: HashMap[IdentityBox[K]])IdentityHashMap[K] to List(HashMap[IdentityBox[K]]) for ? under List() diff --git a/test/files/pos/t11768.scala b/test/files/pos/t11768.scala new file mode 100644 index 000000000000..7d4907f3b1cb --- /dev/null +++ b/test/files/pos/t11768.scala @@ -0,0 +1,16 @@ +object A { + trait B[T] + implicit def c[T <: Singleton]: B[T] = ??? + implicit def d[T1: B, T2: B]: B[Tuple2[T1, T2]] = ??? + implicit def e[T: B]: B[Option[T]] = ??? + implicit def f[C[_] <: Iterable[_], T](implicit r: B[T]): B[C[T]] = ??? +} + +object G { + class H[T: A.B, V: A.B](t: Option[(V, T)]){ + implicitly[A.B[Option[(V, T)]]] + } + def h[T: A.B, V: A.B](t: Option[(V, T)]){ + implicitly[A.B[Option[(V, T)]]] + } +} diff --git a/test/files/pos/t11774.scala b/test/files/pos/t11774.scala new file mode 100644 index 000000000000..b88c975100cd --- /dev/null +++ b/test/files/pos/t11774.scala @@ -0,0 +1,3 @@ +class C[T](x: AnyRef, y: Boolean = false) { + def this(x: T) = this(x.asInstanceOf[AnyRef]) +} diff --git a/test/files/pos/t11787.scala b/test/files/pos/t11787.scala new file mode 100644 index 000000000000..134da214db5e --- /dev/null +++ b/test/files/pos/t11787.scala @@ -0,0 +1,10 @@ + +object syntax { + implicit class Ops1(x: String) { private[syntax] def bar: Int = 1 } + implicit class Ops2(x: String) { def bar: Int = 2 } +} + +object test { + import syntax._ + val result = "foo".bar +} diff --git a/test/files/pos/t11787b.scala b/test/files/pos/t11787b.scala new file mode 100644 index 000000000000..8c78f9292380 --- /dev/null +++ b/test/files/pos/t11787b.scala @@ -0,0 +1,4 @@ + +class C { + def f = (Array(1), Array("foo")).zipped +} diff --git a/test/files/pos/t11788/Bar.scala b/test/files/pos/t11788/Bar.scala new file mode 100644 index 000000000000..01c1838abe21 --- /dev/null +++ b/test/files/pos/t11788/Bar.scala @@ -0,0 +1,3 @@ +object Bar extends App { + println(new Foo().test()) +} diff --git a/test/files/pos/t11788/Foo.java b/test/files/pos/t11788/Foo.java new file mode 100644 index 000000000000..802929d7fc92 --- /dev/null +++ b/test/files/pos/t11788/Foo.java @@ -0,0 +1,7 @@ +public class Foo { + private String java; + + public java.lang.Integer test() { + return Integer.valueOf(42); + } +} diff --git a/test/files/pos/t11813.scala b/test/files/pos/t11813.scala index 88d96dbfe4e2..accc6b4e2377 100644 --- a/test/files/pos/t11813.scala +++ b/test/files/pos/t11813.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Ywarn-self-implicit +//> using options -Werror -Xlint:implicit-recursion // package warner diff --git a/test/files/pos/t11840/A_1.java b/test/files/pos/t11840/A_1.java new file mode 100644 index 000000000000..24b4e8b4eda2 --- /dev/null +++ b/test/files/pos/t11840/A_1.java @@ -0,0 +1,12 @@ +public class A_1 { + public static abstract class X {} + + public static abstract class Y { + public X appendFile(String s) { return null; } + } + + public static class Z extends Y { + @Override + public X appendFile(String s) { return null; } + } +} diff --git a/test/files/pos/t11840/B_2.scala b/test/files/pos/t11840/B_2.scala new file mode 100644 index 000000000000..263b171ffaba --- /dev/null +++ b/test/files/pos/t11840/B_2.scala @@ -0,0 +1 @@ +object B extends A_1.Z diff --git a/test/files/pos/t1185.scala b/test/files/pos/t1185.scala index de453ec8ddcf..5bbaa18a2cd5 100644 --- a/test/files/pos/t1185.scala +++ b/test/files/pos/t1185.scala @@ -7,7 +7,7 @@ class Test { } object Main{ - def main(args : Array[String]){ + def main(args : Array[String]): Unit ={ val fff=new Test() fff.foo() assert(1==fff.look) diff --git a/test/files/pos/t11856.scala b/test/files/pos/t11856.scala new file mode 100644 index 000000000000..69831172dc1d --- /dev/null +++ b/test/files/pos/t11856.scala @@ -0,0 +1,19 @@ +//> using options -Werror -Wunused:params + +class C { + def answer: 42 = 42 + object X + def g0(x: Int) = ??? + def f0(x: Int) = () + def f1(x: Int) = throw new RuntimeException + def f2(x: Int) = 42 + def f3(x: Int): Option[Int] = None + def f4(x: Int) = classOf[Int] + def f5(x: Int) = answer + 27 + def f6(x: Int) = X + def f7(x: Int) = Y + def f8(x: Int): List[C] = Nil + //def z0(x: Int) = X.toString + //def z1(x: Int) = (42: Int) +} +object Y diff --git a/test/files/pos/t11895.scala b/test/files/pos/t11895.scala new file mode 100644 index 000000000000..3f58faf1b246 --- /dev/null +++ b/test/files/pos/t11895.scala @@ -0,0 +1,9 @@ +//> using options -Werror -deprecation +// +trait Bar { + trait Foo + val Foo: Foo +} +object SomeBar extends Bar { + object Foo extends Foo +} diff --git a/test/files/pos/t11908/C.scala b/test/files/pos/t11908/C.scala new file mode 100644 index 000000000000..b443d18fe3fb --- /dev/null +++ b/test/files/pos/t11908/C.scala @@ -0,0 +1,71 @@ +//> using jvm 16+ +object C { + + def useR0: Unit = { + val r0 = new R0 + } + + def useR1 = { + // constructor signature + val r1 = new R1(123, "hello") + + // accessors signature + val i: Int = r1.i + val s: String = r1.s + val j: Int = r1.i() + + // method + val s2: String = r1.someMethod() + + // supertype + val isRecord: java.lang.Record = r1 + + () + } + + def useR2 = { + // constructor signature + val r2 = new R2.R(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + () + } + + def useR3 = { + // constructor signature + val r3 = new R3(123, 42L, "hi") + new R3("hi", 123) + + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + + locally { + val i: Int = r3.i() + val l: Long = r3.l() + val s: String = r3.s() + } + + // method + val l2: Long = r3.l(43L, 44L) + + // supertype + val isRecord: java.lang.Record = r3 + } + + def useR4: Unit = { + val r4 = new R4(42) + val n: Int = r4.t + } +} diff --git a/test/files/pos/t11908/IntLike.scala b/test/files/pos/t11908/IntLike.scala new file mode 100644 index 000000000000..e1172f12d964 --- /dev/null +++ b/test/files/pos/t11908/IntLike.scala @@ -0,0 +1,4 @@ +//> using jvm 16+ +trait IntLike { + def getInt: Int +} diff --git a/test/files/pos/t11908/R1.java b/test/files/pos/t11908/R1.java new file mode 100644 index 000000000000..8880f0bc1cff --- /dev/null +++ b/test/files/pos/t11908/R1.java @@ -0,0 +1,11 @@ +// javaVersion: 16+ +record R1(int i, String s) { + + public String someMethod() { + return s + "!"; + } +} + +record R0() {} + +record R4(T t) {} diff --git a/test/files/pos/t11908/R2.java b/test/files/pos/t11908/R2.java new file mode 100644 index 000000000000..62bf5ff6c22c --- /dev/null +++ b/test/files/pos/t11908/R2.java @@ -0,0 +1,14 @@ +// javaVersion: 16+ +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R(int i, java.lang.String s) { + this.i = i; + this.s = s.intern(); + } + } +} diff --git a/test/files/pos/t11908/R3.java b/test/files/pos/t11908/R3.java new file mode 100644 index 000000000000..03a06dfc6f37 --- /dev/null +++ b/test/files/pos/t11908/R3.java @@ -0,0 +1,23 @@ +// javaVersion: 16+ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} diff --git a/test/files/pos/t11908b/C_2.scala b/test/files/pos/t11908b/C_2.scala new file mode 100644 index 000000000000..b443d18fe3fb --- /dev/null +++ b/test/files/pos/t11908b/C_2.scala @@ -0,0 +1,71 @@ +//> using jvm 16+ +object C { + + def useR0: Unit = { + val r0 = new R0 + } + + def useR1 = { + // constructor signature + val r1 = new R1(123, "hello") + + // accessors signature + val i: Int = r1.i + val s: String = r1.s + val j: Int = r1.i() + + // method + val s2: String = r1.someMethod() + + // supertype + val isRecord: java.lang.Record = r1 + + () + } + + def useR2 = { + // constructor signature + val r2 = new R2.R(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + () + } + + def useR3 = { + // constructor signature + val r3 = new R3(123, 42L, "hi") + new R3("hi", 123) + + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + + locally { + val i: Int = r3.i() + val l: Long = r3.l() + val s: String = r3.s() + } + + // method + val l2: Long = r3.l(43L, 44L) + + // supertype + val isRecord: java.lang.Record = r3 + } + + def useR4: Unit = { + val r4 = new R4(42) + val n: Int = r4.t + } +} diff --git a/test/files/pos/t11908b/IntLike.scala b/test/files/pos/t11908b/IntLike.scala new file mode 100644 index 000000000000..e1172f12d964 --- /dev/null +++ b/test/files/pos/t11908b/IntLike.scala @@ -0,0 +1,4 @@ +//> using jvm 16+ +trait IntLike { + def getInt: Int +} diff --git a/test/files/pos/t11908b/R1.java b/test/files/pos/t11908b/R1.java new file mode 100644 index 000000000000..8880f0bc1cff --- /dev/null +++ b/test/files/pos/t11908b/R1.java @@ -0,0 +1,11 @@ +// javaVersion: 16+ +record R1(int i, String s) { + + public String someMethod() { + return s + "!"; + } +} + +record R0() {} + +record R4(T t) {} diff --git a/test/files/pos/t11908b/R2.java b/test/files/pos/t11908b/R2.java new file mode 100644 index 000000000000..62bf5ff6c22c --- /dev/null +++ b/test/files/pos/t11908b/R2.java @@ -0,0 +1,14 @@ +// javaVersion: 16+ +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R(int i, java.lang.String s) { + this.i = i; + this.s = s.intern(); + } + } +} diff --git a/test/files/pos/t11908b/R3.java b/test/files/pos/t11908b/R3.java new file mode 100644 index 000000000000..03a06dfc6f37 --- /dev/null +++ b/test/files/pos/t11908b/R3.java @@ -0,0 +1,23 @@ +// javaVersion: 16+ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} diff --git a/test/files/pos/t11917/Z.scala b/test/files/pos/t11917/Z.scala index 1ebadbc9ae48..13b19b20ec8e 100644 --- a/test/files/pos/t11917/Z.scala +++ b/test/files/pos/t11917/Z.scala @@ -1,4 +1,5 @@ -// scalac: -Ypickle-java +//> using options -Ypickle-java + package bar -class Z +class Z \ No newline at end of file diff --git a/test/files/pos/t11921-depth.scala b/test/files/pos/t11921-depth.scala deleted file mode 100644 index 6b02294c87cd..000000000000 --- a/test/files/pos/t11921-depth.scala +++ /dev/null @@ -1,21 +0,0 @@ -// scalac: -Xfatal-warnings -Xsource:2.13 - -package p.q.test { - class K -} - -package test { - class NimicDeloc -} - -package m { - import p.q._ - - abstract class T { - def test = 1 - } - - class C extends T { - def m = test - } -} diff --git a/test/files/pos/t11921a.scala b/test/files/pos/t11921a.scala new file mode 100644 index 000000000000..5bee630e57c1 --- /dev/null +++ b/test/files/pos/t11921a.scala @@ -0,0 +1,18 @@ + +class C(x: Int) { + class D extends C(42) { + def f() = println(x) + } +} + +trait T { + val t: Int +} +trait U extends T { + val t: Int + import t._ +} +trait V { this: T => + val t: Int + import t._ +} diff --git a/test/files/pos/t11921b.scala b/test/files/pos/t11921b.scala index 354f50624e6f..420b4c4d2f14 100644 --- a/test/files/pos/t11921b.scala +++ b/test/files/pos/t11921b.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xsource:2.13 -Wconf:msg=legacy-binding:s +//> using options -Werror -Wconf:msg=legacy-binding:s -Xsource:3 object test1 { @@ -8,22 +8,22 @@ object test1 { object Test { val x = 1 class D extends C { - println(x) // error + println(x) // error } def f() = new C { - println(x) // error + println(x) // error } } } object test2 { - def c(y: Float) = { + def c(y: Float): AnyRef { val y: Int } = { class D { val y = 2 } new D { - println(y) // error + println(y) // error } } } @@ -35,7 +35,7 @@ object test3 { } class E extends D { class F { - println(y) // error + println(y) // error } } } @@ -47,5 +47,5 @@ class C { val global = 42 } object D extends C { - println(global) // error + println(global) // OK, since global is defined in package (https://github.com/scala/scala/pull/10220/files#r1109773904) } diff --git a/test/files/pos/t11921c.scala b/test/files/pos/t11921c.scala index 9afe4335975f..d10aa603669a 100644 --- a/test/files/pos/t11921c.scala +++ b/test/files/pos/t11921c.scala @@ -1,4 +1,6 @@ -// scalac: -Xfatal-warnings -Xsource:2.13 +//> using options -Xsource:3 + +// test/scaladoc/resources/t5784.scala package test.templates { object `package` { diff --git a/test/files/pos/t11952.scala b/test/files/pos/t11952.scala index c3b1b4e1f0da..5da46f3e6c2b 100644 --- a/test/files/pos/t11952.scala +++ b/test/files/pos/t11952.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xlint -nowarn +//> using options -Werror -Xlint -nowarn // // nowarn should mean no warnings are emitted, // irrespective of other flags, and also no diff --git a/test/files/pos/t11964.scala b/test/files/pos/t11964.scala new file mode 100644 index 000000000000..86b51f6ae33a --- /dev/null +++ b/test/files/pos/t11964.scala @@ -0,0 +1,19 @@ +//> using options -Werror -Xlint + +object Hmm { + def zxc(b: Int*)(implicit x: Int = 3) = "" + b + x + def res = zxc(4) +} + +object Test { + def foo(a: Any, b: Any = null, c: Any = null)(cs: String*) = ??? + def res = foo("", c = "")("X") +} + +object OP { + def f(a: Int, b: String*) = "first" + def res = f(b = "sl19", a = 28) // looks like the issue is only with single arg supplied to varargs. + def or = f(b = ("x"::"y"::Nil):_*, a = 42) // 2.13 syntax only + //def and = f(b = ("x"::"y"::Nil):_*) // broken under 2.13, which disallows default + varargs + def and = List(elems = ("x"::"y"::Nil):_*) +} diff --git a/test/files/pos/t11966.scala b/test/files/pos/t11966.scala new file mode 100644 index 000000000000..4802c8c602cf --- /dev/null +++ b/test/files/pos/t11966.scala @@ -0,0 +1,7 @@ +//> using options -Werror -deprecation +// +object Test { + val original = """\/ \/ /\""" + val minimal = """\1234\""" + val alternative = raw"""\1234\""" +} \ No newline at end of file diff --git a/test/files/pos/t11980.scala b/test/files/pos/t11980.scala new file mode 100644 index 000000000000..113f550f2e75 --- /dev/null +++ b/test/files/pos/t11980.scala @@ -0,0 +1,15 @@ +//scalac: -Wconf:cat=scala3-migration:s -Werror -Wunused:privates -Xsource:3 +// +object Domain { + def id(id: String): Domain = Domain(Some(id), None) + def name(name: String): Domain = Domain(None, Some(name)) + + // induces private copy and private copy defaults + def apply(id: String, name: String): Domain = Domain(Some(id), Some(name)) +} + +case class Domain private (id: Option[String], name: Option[String]) + +// t7707 +object O { O() ; def f(): Unit = O() } +case class O private (x: Int = 3) diff --git a/test/files/pos/t12006.scala b/test/files/pos/t12006.scala new file mode 100644 index 000000000000..013ba24a3e2f --- /dev/null +++ b/test/files/pos/t12006.scala @@ -0,0 +1,10 @@ +//> using options -Xsource:3 + +// see https://github.com/scala/bug/issues/12006 +// java.io.InputStream looks like a SAM (read method), +// but u.openStream returns InputStream so don't eta-expand. +class C1(s: => java.io.InputStream) +class D1(u: java.net.URL) extends C1(u.openStream) // ok + +class C2(s: java.io.InputStream) +class D2(u: java.net.URL) extends C2(u.openStream) // ok diff --git a/test/files/pos/t12028.scala b/test/files/pos/t12028.scala new file mode 100644 index 000000000000..ab4fa7827588 --- /dev/null +++ b/test/files/pos/t12028.scala @@ -0,0 +1,12 @@ +import scala.annotation.unchecked.uncheckedVariance + +object Test { + trait TypeClass[F[_]] { + def action[A](foo: F[A], bar: F[A]): F[A] + } + + class Syntax[F[+_], A](private val foo: F[A]) extends AnyVal { + def action[F1[+x] >: F[x] @uncheckedVariance](bar: F1[A])(implicit tc: TypeClass[F1]): F1[A] = + tc.action[A](foo, bar) + } +} diff --git a/test/files/pos/t12041.scala b/test/files/pos/t12041.scala new file mode 100644 index 000000000000..ec4b8904cf76 --- /dev/null +++ b/test/files/pos/t12041.scala @@ -0,0 +1,5 @@ +object Test { + def foo(xs: Int*) = new util.Random().shuffle(xs) + def dup[T](x: T)(y: x.type) = (x, y) + def bar(xs: Int*) = dup(xs)(xs) +} \ No newline at end of file diff --git a/test/files/pos/t12077.scala b/test/files/pos/t12077.scala new file mode 100644 index 000000000000..d6a88b19017e --- /dev/null +++ b/test/files/pos/t12077.scala @@ -0,0 +1,13 @@ +object Main { + type Foo[+A] <: A + + final class Bar { + def bar = ??? + } + + class Ops[A](private val self: Foo[A]) { + def baz: A = self match { + case x: Bar => x.bar + } + } +} diff --git a/test/files/pos/t12122.scala b/test/files/pos/t12122.scala new file mode 100644 index 000000000000..7f236d0ba868 --- /dev/null +++ b/test/files/pos/t12122.scala @@ -0,0 +1,5 @@ + +class C { + def f(i: Int, s: String) = s * i + def g = (i: Int, s) => f(i, s) +} diff --git a/test/files/pos/t12127.scala b/test/files/pos/t12127.scala new file mode 100644 index 000000000000..c600d254ad5c --- /dev/null +++ b/test/files/pos/t12127.scala @@ -0,0 +1,10 @@ + +//> using options -Werror -Wunused + +class C { + def f(x: Any): "hi" = x match { case s @ "hi" => s } // was: Error while emitting (in backend) + def g(x: Any): String = x match { case s @ "hi" => s } + def h(x: Any): String = x match { case s: String => s } + + def check(x: Any): "bi" = x match { case s @ "hi" => "bi" } +} diff --git a/test/files/pos/t12159/H.java b/test/files/pos/t12159/H.java new file mode 100644 index 000000000000..3a15309f733e --- /dev/null +++ b/test/files/pos/t12159/H.java @@ -0,0 +1,19 @@ +// javaVersion: 17+ +package p; + +sealed public class H { +} + +final class K extends H { +} + +non-sealed class L extends H { +} + +sealed +class P extends H { +} + +final +class Q extends P { +} diff --git a/test/files/pos/t12159/I.java b/test/files/pos/t12159/I.java new file mode 100644 index 000000000000..f91c69dd7828 --- /dev/null +++ b/test/files/pos/t12159/I.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +sealed interface I permits J { +} diff --git a/test/files/pos/t12159/J.java b/test/files/pos/t12159/J.java new file mode 100644 index 000000000000..5bd2c4c92374 --- /dev/null +++ b/test/files/pos/t12159/J.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +package p; + +sealed public class J implements I permits M { +} diff --git a/test/files/pos/t12159/M.java b/test/files/pos/t12159/M.java new file mode 100644 index 000000000000..245c79304d29 --- /dev/null +++ b/test/files/pos/t12159/M.java @@ -0,0 +1,9 @@ +// javaVersion: 17+ + +package p; + +public final class M extends J { +} + +final class N extends L { +} diff --git a/test/files/pos/t12159/s.scala b/test/files/pos/t12159/s.scala new file mode 100644 index 000000000000..1cfc877e074b --- /dev/null +++ b/test/files/pos/t12159/s.scala @@ -0,0 +1,6 @@ +//> using jvm 17+ + +package p + +class S extends L { +} diff --git a/test/files/pos/t12159b/H_1.java b/test/files/pos/t12159b/H_1.java new file mode 100644 index 000000000000..cb3ccb9749fc --- /dev/null +++ b/test/files/pos/t12159b/H_1.java @@ -0,0 +1,14 @@ +// javaVersion: 17+ +package p; + +sealed abstract public class H_1 { +} + +final class J extends H_1 { +} + +final class K extends H_1 { +} + +final class L extends H_1 { +} diff --git a/test/files/pos/t12159b/s_2.scala b/test/files/pos/t12159b/s_2.scala new file mode 100644 index 000000000000..07a865b66c93 --- /dev/null +++ b/test/files/pos/t12159b/s_2.scala @@ -0,0 +1,13 @@ +//> using jvm 17+ +//> using options -Werror +package p + +class C { + def f(h: H_1) = + h match { + case j: J => j.toString + case k: K => k.toString + case l: L => l.toString + } +} + diff --git a/test/files/pos/t12186.scala b/test/files/pos/t12186.scala new file mode 100644 index 000000000000..a1ca8e05dc68 --- /dev/null +++ b/test/files/pos/t12186.scala @@ -0,0 +1,29 @@ +//> using options -Werror + +// this is remodeling of the scala package object and scala.collection.immutable.{ List, ::, Nil } +// in order to: +// * avoid the scala package, which is auto-imported +// * avoid List, which is rewritten/fudged in the pattern matcher +package skala.collect { + sealed trait Xs[+A] + final case class Cons[+A](head: A, tail: Xs[A]) extends Xs[A] + final case object Done extends Xs[Nothing] + object Xs +} +package object skala { + type Cons[+A] = skala.collect.Cons[A] + type Xs[+A] = skala.collect.Xs[A] + val Cons = skala.collect.Cons + val Done: skala.collect.Done.type = skala.collect.Done + val Xs = skala.collect.Xs +} + +import skala._ + +class Test { + def test(xs: Xs[Int]): Boolean = xs match { + case Cons(_, _) => true + case _: Done.type => false + //case _: skala.collect.Done.type => false // done this way it already works + } +} diff --git a/test/files/pos/t12187.scala b/test/files/pos/t12187.scala new file mode 100644 index 000000000000..d89ce1b86c02 --- /dev/null +++ b/test/files/pos/t12187.scala @@ -0,0 +1,8 @@ +object Test { + trait Foo[S[_[_], _[_]]] extends Bar[S] { + def m[F[_]](x: S[({ type G[A] = Bar[S] })#G, F]): Unit + } + trait Bar[S[_[_], _[_]]] { + def m[F[_]](x: S[({ type G[A] = Bar[S] })#G, F]): Unit + } +} diff --git a/test/files/pos/t12194.scala b/test/files/pos/t12194.scala new file mode 100644 index 000000000000..f8a94e0e2525 --- /dev/null +++ b/test/files/pos/t12194.scala @@ -0,0 +1,21 @@ +object Test { + trait Trait[A] + class CaseA extends Trait[Int] + class CaseB extends Trait[Boolean] + + object Pattern { + def unapply(fa: CaseB): Option[CaseB] = None + } + + def foo[A](t: Trait[A]) = { + def bar(f: Trait[A]): Unit = {} + + t match { + case Pattern(_) => + } + + t match { + case a: CaseA => bar(new CaseA) + } + } +} diff --git a/test/files/pos/t12210.scala b/test/files/pos/t12210.scala new file mode 100644 index 000000000000..35d6cdbf8c87 --- /dev/null +++ b/test/files/pos/t12210.scala @@ -0,0 +1,20 @@ +trait SpecFun[@specialized T] { + type Res + def res: Res +} + +object Test { + def m[@specialized T](op: SpecFun[T]): op.Res = op.res +} + +trait ValuesVisitor[A] { + def visit(a: A): Unit + def visitArray(arr: Array[A]): Unit = ??? +} + +class OpArray[@specialized A] { + def traverse(from: Array[A], fn: ValuesVisitor[A]): fn.type = { + fn.visitArray(from) + fn + } +} diff --git a/test/files/pos/t12212.scala b/test/files/pos/t12212.scala new file mode 100644 index 000000000000..6cd8de28c2ab --- /dev/null +++ b/test/files/pos/t12212.scala @@ -0,0 +1,13 @@ +object Test { + trait Low[X] + trait Lower[V, X] extends Low[X] + trait Res[V] + trait High[L[X] <: Low[X]] + trait HighRes[L[X] <: Lower[V, X], V] extends Res[V] + trait Mid[X, Y] + + def m[L[X] <: Lower[V, X], V](high: High[L]): HighRes[L, V] = ??? + def m[X, Y](mid: Mid[X, Y]): Res[Y] = ??? + def ok[L[X] <: Lower[V, X], V](high :High[L]): HighRes[L, V] = m(high) + def wtf[L[X] <: Lower[V, X], V](high :High[L]): HighRes[L, V] = m[L, V](high) +} \ No newline at end of file diff --git a/test/files/pos/t12225.scala b/test/files/pos/t12225.scala new file mode 100644 index 000000000000..76a8b9a2ffb4 --- /dev/null +++ b/test/files/pos/t12225.scala @@ -0,0 +1,6 @@ +//> using options -Ydebug +object Test { + def foo(arr: Array[Int]): Unit = { + val Array(x, y) = arr + } +} diff --git a/test/files/pos/t12233.scala b/test/files/pos/t12233.scala new file mode 100644 index 000000000000..481b5258d2d5 --- /dev/null +++ b/test/files/pos/t12233.scala @@ -0,0 +1,12 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit j: Int) = this(i, j) +} + +/* was +test/files/pos/t12233.scala:4: error: too many arguments (found 2, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit j: Int) = this(i, j) + ^ +1 error + */ diff --git a/test/files/pos/t12240.scala b/test/files/pos/t12240.scala new file mode 100644 index 000000000000..7f34ab67408d --- /dev/null +++ b/test/files/pos/t12240.scala @@ -0,0 +1,65 @@ +//> using options -Xfatal-warnings -Xlint:strict-unsealed-patmat +// + +object Test { + + //original reports + + def originalReported(v: Vector[String]) = v match { + case Vector() => "empty" + case Vector(_) => "one" + case Vector(_*) => "this pattern is irrefutable" + } + + def originalMinimized(v: Vector[String]) = v match { + case Vector(_*) => "this pattern is irrefutable" + } + + //broader applicability + + class IrrefutableNameBasedResult[Result](r: Result) { + def isEmpty: false = false + def get: Result = r + } + + object IrrefutableIdentityExtractor { + def unapply[A](a: A) = new IrrefutableNameBasedResult(a) + } + + object IrrefutableSeqExtractor { + def unapplySeq[A](a: A) = new IrrefutableNameBasedResult(List(a)) + } + + def nameBasedPatternIsExhaustive(x: Int) = x match { + case IrrefutableIdentityExtractor(_) => "exhaustive" + } + + def nameBasedSeqIsExhaustive(x: Int) = x match { + case IrrefutableSeqExtractor(_*) => "exhaustive" + } + + //status quo: + //should be in neg/t12240.scala but isn't exhaustive per + //per https://github.com/scala/bug/issues/12252 + + def reported(v: Vector[String]) = v match { + case Vector() => "empty" + case Vector(_) => "one" + case Vector(_, _, _*) => "this ought to be exhaustive" + case Vector(_*) => "scalac doesn't know was already exhaustive" + } + + //status quo: + //should be in neg/t12240.scala, but the unreachable code isn't reported + //per https://github.com/scala/bug/issues/12251 + def nameBasedPatternUnreachable(x: Int) = x match { + case IrrefutableIdentityExtractor(_) => "exhaustive" + case _ => "unreachable" + } + + def nameBasedSeqUnreachable(x: Int) = x match { + case IrrefutableSeqExtractor(_*) => "exhaustive" + case _ => "unreachable" + } + +} \ No newline at end of file diff --git a/test/files/pos/t12248.scala b/test/files/pos/t12248.scala new file mode 100644 index 000000000000..aed9f52d1705 --- /dev/null +++ b/test/files/pos/t12248.scala @@ -0,0 +1,9 @@ +trait A + +trait B { + def ==[T](o: T)(implicit a: A): Boolean = ??? +} + +case class C(b: B) + +// cf test/files/pos/t10536.scala diff --git a/test/files/pos/t12248b.scala b/test/files/pos/t12248b.scala new file mode 100644 index 000000000000..f56de67c2d8d --- /dev/null +++ b/test/files/pos/t12248b.scala @@ -0,0 +1,9 @@ +trait A + +trait B extends Any { + def ==[T](o: T)(implicit a: A): Boolean = ??? +} + +case class C(b: B) + +// cf test/files/pos/t10536.scala diff --git a/test/files/pos/t12248c.scala b/test/files/pos/t12248c.scala new file mode 100644 index 000000000000..a02f2642f11d --- /dev/null +++ b/test/files/pos/t12248c.scala @@ -0,0 +1,9 @@ +trait A + +class B(private val n: Int) extends AnyVal { + def ==[T](o: T)(implicit a: A): Boolean = ??? +} + +case class C(b: B) + +// cf test/files/pos/t10536.scala diff --git a/test/files/pos/t12249/A.scala b/test/files/pos/t12249/A.scala new file mode 100644 index 000000000000..dd3901812050 --- /dev/null +++ b/test/files/pos/t12249/A.scala @@ -0,0 +1,4 @@ +package mixintest.a +abstract class A { + protected val x: Int +} \ No newline at end of file diff --git a/test/files/pos/t12249/B.scala b/test/files/pos/t12249/B.scala new file mode 100644 index 000000000000..554d2c88cd9f --- /dev/null +++ b/test/files/pos/t12249/B.scala @@ -0,0 +1,5 @@ +package mixintest.b +import mixintest.a.A +trait B extends A { + println(x) +} \ No newline at end of file diff --git a/test/files/pos/t12249/C.scala b/test/files/pos/t12249/C.scala new file mode 100644 index 000000000000..2ebc19bec65a --- /dev/null +++ b/test/files/pos/t12249/C.scala @@ -0,0 +1,4 @@ +package mixintest.c +import mixintest.a.A +import mixintest.b.B +case class C(override protected val x: Int) extends A with B \ No newline at end of file diff --git a/test/files/pos/t12250.scala b/test/files/pos/t12250.scala new file mode 100644 index 000000000000..38add8ba16d6 --- /dev/null +++ b/test/files/pos/t12250.scala @@ -0,0 +1,11 @@ +//> using options -Werror +final case class Foo(value: String) + +object Foo { + def unapply(str: String): Option[Foo] = Some(Foo(str)) + + def extract(id: Foo): String = + id match { + case Foo(a) => a + } +} diff --git a/test/files/pos/t12250b.scala b/test/files/pos/t12250b.scala new file mode 100644 index 000000000000..cf84e9bea5a6 --- /dev/null +++ b/test/files/pos/t12250b.scala @@ -0,0 +1,23 @@ +//> using options -Werror + +sealed case class Sub1(str: String) +final case class Sup1(str: String) extends Sup0 + +final class Sub2 extends Sub1("") +sealed trait Sup0 { def str: String } + +// both of these unapplies are overloads of the synthetic unapply +// i.e. it isn't suppressed +object Sub1 { def unapply(x: Sub2): Some[String] = Some(x.str) } +object Sup1 { def unapply(x: Sup0): Some[String] = Some(x.str) } + +object Test { + // these seek the original unapplies and should be converted to use their constructors + def testSub1(x: Sub1) = x match { case Sub1(str) => str } + def testSup1(x: Sup1) = x match { case Sup1(str) => str } + + // these seek the user-defined alternative unapplies + // thus they shouldn't accidentally be converted to use their constructors + def testSub2(x: Sub2) = x match { case Sub1(str) => str } + def testSup0(x: Sup0) = x match { case Sup1(str) => str } +} diff --git a/test/files/pos/t12254.scala b/test/files/pos/t12254.scala new file mode 100644 index 000000000000..e9d5c084164c --- /dev/null +++ b/test/files/pos/t12254.scala @@ -0,0 +1,15 @@ +//> using options -Xfatal-warnings -Xlint:strict-unsealed-patmat +// + +object Test { + sealed trait Foo + final class Bar extends Foo + + object Bar { + def unapply(o: Bar): true = true + } + + def f(foo: Foo) = foo match { + case Bar() => println("Bar") + } +} \ No newline at end of file diff --git a/test/files/pos/t12277.scala b/test/files/pos/t12277.scala new file mode 100644 index 000000000000..8737de45ad7e --- /dev/null +++ b/test/files/pos/t12277.scala @@ -0,0 +1,12 @@ +//> using options -Xlint:strict-unsealed-patmat -Werror +sealed trait A +final case class B() extends A +final case class C() extends A + +object x extends App { + + def matcher[A1 <: A](a1: A1) = a1 match { + case x @ (_: B | _: C) => println("B") + } + +} diff --git a/test/files/pos/t12295.scala b/test/files/pos/t12295.scala new file mode 100644 index 000000000000..24150cb97103 --- /dev/null +++ b/test/files/pos/t12295.scala @@ -0,0 +1,33 @@ +object Test { + sealed trait Foo[+A] // sealed or not doesn't matter + case class ImplA[A](a: A) extends Foo[A] + case class ImplAny[A](a: Any) extends Foo[A] + + trait Bar[+G[_]] // must be covariant + + def err[F[_]](): Unit = { + val x: Foo[Foo[Bar[F]]] = ??? + + x match { + case ImplAny(ImplAny(_)) => ??? + case ImplAny(ImplA(_)) => ??? + case ImplA(_) => ??? + case ImplAny(_) => ??? + case _ => ??? + } + + x match { + case ImplA(ImplA(_)) => ??? + case ImplA(ImplAny(_)) => ??? + case ImplA(y) => y.toString + case ImplA(y) => y match { + case ImplA(_) => ??? + case _ => ??? + } + case ImplA(y) => y + case _ => ??? + } + + () + } +} diff --git a/test/files/pos/t12304.scala b/test/files/pos/t12304.scala new file mode 100644 index 000000000000..28160f2e7501 --- /dev/null +++ b/test/files/pos/t12304.scala @@ -0,0 +1,15 @@ +//> using options -Werror + +class Foo +class Test { + def t1: Unit = { + val m: Map[String, Map[String, Foo]] = Map("outer" -> Map("inner" -> new Foo)) + m.collect { case (_, foo: Foo) => "This should be type error" } + // no: + // class Foo isn't final + // Map is an unsealed trait + // so it's possible to define: + // class Bar extends Foo with Map[...] + // so the compiler is right not to warn + } +} diff --git a/test/files/pos/t12312-hmm.scala b/test/files/pos/t12312-hmm.scala new file mode 100644 index 000000000000..16decd4f9325 --- /dev/null +++ b/test/files/pos/t12312-hmm.scala @@ -0,0 +1,45 @@ +package hmm + +// Taken from https://github.com/typelevel/kind-projector/blob/7ad46d6ca995976ae2ff18215dbb32cd7ad0dd7a/src/test/scala/hmm.scala +// As a regression test for the issue spotted in https://github.com/scala/community-build/pull/1400 + +class TC[A] + +object TC { + def apply[A]: Unit = () +} + +object test { + + sealed trait HList extends Product with Serializable + case class ::[+H, +T <: HList](head : H, tail : T) extends HList + sealed trait HNil extends HList + case object HNil extends HNil + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] +} diff --git a/test/files/pos/t12315.scala b/test/files/pos/t12315.scala deleted file mode 100644 index c94dc9f2be78..000000000000 --- a/test/files/pos/t12315.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -Xfatal-warnings -Xlint -import annotation._ - -class Thingy[T] extends Iterator[T] with java.util.Iterator[T] { - @nowarn - def hasNext: Boolean = ??? - def next(): T = ??? -} diff --git a/test/files/pos/t12323.scala b/test/files/pos/t12323.scala new file mode 100644 index 000000000000..bd0d6cb6dbc5 --- /dev/null +++ b/test/files/pos/t12323.scala @@ -0,0 +1,26 @@ +object X { + for { + x <- 1 to 5 if true if true + } yield x + + for { + x <- 1 to 5 if true + if true + } yield x + + for { + x <- 1 to 5 + if true if true + } yield x + + for { + x <- 1 to 5 + if true ; if true + } yield x + + for { + x <- 1 to 5 + if true + if true + } yield x +} diff --git a/test/files/pos/t12326.scala b/test/files/pos/t12326.scala new file mode 100644 index 000000000000..464a63590de0 --- /dev/null +++ b/test/files/pos/t12326.scala @@ -0,0 +1,7 @@ +//> using options -Werror -Wunused:imports -Wconf:origin=scala.collection.mutable._:s,origin=scala.concurrent.ExecutionContext.Implicits._:s + +import scala.collection.mutable._ + +trait T { + import scala.concurrent.ExecutionContext.Implicits._ +} diff --git a/test/files/pos/t12326b.scala b/test/files/pos/t12326b.scala new file mode 100644 index 000000000000..1b6049edad3b --- /dev/null +++ b/test/files/pos/t12326b.scala @@ -0,0 +1,8 @@ +//> using options -Werror -Wunused:imports + +import annotation._ + +@nowarn("origin=scala.concurrent.ExecutionContext.Implicits._") +trait T { + import scala.concurrent.ExecutionContext.Implicits._ +} diff --git a/test/files/pos/t12331.scala b/test/files/pos/t12331.scala new file mode 100644 index 000000000000..ca89465f475d --- /dev/null +++ b/test/files/pos/t12331.scala @@ -0,0 +1,8 @@ +//scalac: -Ymacro-annotations +//-Ymacro-annotations -Yvalidate-pos:_ -Vprint:_ -Vprint-pos + +import annotation._ + +trait T { val x: Int } + +@strictfp class C extends { val x = 0 } with T diff --git a/test/files/pos/t12349b/A.java b/test/files/pos/t12349b/A.java new file mode 100644 index 000000000000..aab1185d87ac --- /dev/null +++ b/test/files/pos/t12349b/A.java @@ -0,0 +1,7 @@ +package p; + +public class A { + public static class R { } + + /* package-protected */ R foo() { return null; } +} diff --git a/test/files/pos/t12349b/B.java b/test/files/pos/t12349b/B.java new file mode 100644 index 000000000000..735c91372a03 --- /dev/null +++ b/test/files/pos/t12349b/B.java @@ -0,0 +1,7 @@ +package q; + +public class B extends p.A { + public static class RR extends p.A.R { } + + /* package-protected */ RR foo() { return null; } +} diff --git a/test/files/pos/t12349b/Test.scala b/test/files/pos/t12349b/Test.scala new file mode 100644 index 000000000000..3f22fa033e08 --- /dev/null +++ b/test/files/pos/t12349b/Test.scala @@ -0,0 +1 @@ +class Test extends q.B diff --git a/test/files/pos/t1236.scala b/test/files/pos/t1236.scala index 5e221ce4117a..75a1befd263c 100644 --- a/test/files/pos/t1236.scala +++ b/test/files/pos/t1236.scala @@ -11,4 +11,4 @@ object T { def foo[F[_]](e: Empty[F]) = "world" val x = foo[List](ListEmpty) -} \ No newline at end of file +} diff --git a/test/files/pos/t1237.scala b/test/files/pos/t1237.scala index 7777372138d1..31ba2966aadc 100644 --- a/test/files/pos/t1237.scala +++ b/test/files/pos/t1237.scala @@ -1,5 +1,5 @@ class HelloWorld { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { object TypeBool; diff --git a/test/files/pos/t12392.scala b/test/files/pos/t12392.scala new file mode 100644 index 000000000000..339c8898c0ea --- /dev/null +++ b/test/files/pos/t12392.scala @@ -0,0 +1,15 @@ +//> using options -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + def deepIntersectionTypeMembers[U <: SingletonUniverse](targetType: U#Type): List[U#Type] = { + def go(tpe: U#Type): List[U#Type] = { + tpe match { + case r: U#RefinedTypeApi => r.parents.flatMap(t => deepIntersectionTypeMembers[U]((t.dealias): U#Type)) + case _ => List(tpe) + } + } + go(targetType).distinct + } +} diff --git a/test/files/pos/t12393/R1.java b/test/files/pos/t12393/R1.java new file mode 100644 index 000000000000..a4fd067f2bbe --- /dev/null +++ b/test/files/pos/t12393/R1.java @@ -0,0 +1,7 @@ +//> using jvm 9+ +public interface R1 { + + private void foo() { + return; + } +} diff --git a/test/files/pos/t12396/B_2.scala b/test/files/pos/t12396/B_2.scala index b61d88c9f292..7ef4203e8787 100644 --- a/test/files/pos/t12396/B_2.scala +++ b/test/files/pos/t12396/B_2.scala @@ -1,4 +1,4 @@ -// javaVersion: 21+ +//> using jvm 21+ class B { def bar = (new A_1).f(null) diff --git a/test/files/pos/t12398.scala b/test/files/pos/t12398.scala new file mode 100644 index 000000000000..2d2708d46cd7 --- /dev/null +++ b/test/files/pos/t12398.scala @@ -0,0 +1,11 @@ +//> using options -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: u.Type): List[u.Annotation] = typ match { + case t: u.AnnotatedTypeApi => t.annotations // was: "The outer reference in this type test cannot be checked at run time." + case _ => Nil + } +} diff --git a/test/files/pos/t12398b.scala b/test/files/pos/t12398b.scala new file mode 100644 index 000000000000..546992be6d82 --- /dev/null +++ b/test/files/pos/t12398b.scala @@ -0,0 +1,11 @@ +//> using options -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: U#Type): List[U#Annotation] = typ match { + case t: U#AnnotatedTypeApi => t.annotations // as a comparison, this wasn't emitting a warning + case _ => Nil + } +} diff --git a/test/files/pos/t12407/A.java b/test/files/pos/t12407/A.java new file mode 100644 index 000000000000..fd2c83a43298 --- /dev/null +++ b/test/files/pos/t12407/A.java @@ -0,0 +1,10 @@ +public class A { + public interface I { + I[] getArray(); + } + + public interface J extends I { + @Override + J[] getArray(); + } +} diff --git a/test/files/pos/t12407/Test.scala b/test/files/pos/t12407/Test.scala new file mode 100644 index 000000000000..6ef6c534d423 --- /dev/null +++ b/test/files/pos/t12407/Test.scala @@ -0,0 +1 @@ +trait Test extends A.J diff --git a/test/files/pos/t12467.scala b/test/files/pos/t12467.scala new file mode 100644 index 000000000000..a0cb4f79dd4f --- /dev/null +++ b/test/files/pos/t12467.scala @@ -0,0 +1,15 @@ +object PagedResponse { + type Aux[Item0] = PagedResponse { type Item = Item0 } +} + +trait PagedResponse { + type Item + sealed trait NextPage + case class NoMorePages() extends NextPage +} + +object Test { + def foo[A](next: PagedResponse.Aux[A]#NextPage): Unit = next match { + case _: PagedResponse.Aux[A]#NoMorePages => ??? + } +} diff --git a/test/files/pos/t12474/Nat.java b/test/files/pos/t12474/Nat.java new file mode 100644 index 000000000000..c9de3f590d84 --- /dev/null +++ b/test/files/pos/t12474/Nat.java @@ -0,0 +1,6 @@ +// javaVersion: 17+ + +public sealed interface Nat permits Nat.Zero, Nat.Succ { + public static final record Zero() implements Nat {} + public static final record Succ(Nat pred) implements Nat {} +} diff --git a/test/files/pos/t12474/s.scala b/test/files/pos/t12474/s.scala new file mode 100644 index 000000000000..53da15f42147 --- /dev/null +++ b/test/files/pos/t12474/s.scala @@ -0,0 +1,5 @@ +//> using jvm 17+ + +class S { + def j: Nat = new Nat.Zero +} diff --git a/test/files/pos/t12513c/c.scala b/test/files/pos/t12513c/c.scala new file mode 100644 index 000000000000..927c87347983 --- /dev/null +++ b/test/files/pos/t12513c/c.scala @@ -0,0 +1 @@ +package p { class C } diff --git a/test/files/pos/t12513c/xy.scala b/test/files/pos/t12513c/xy.scala new file mode 100644 index 000000000000..d46ea3f556a8 --- /dev/null +++ b/test/files/pos/t12513c/xy.scala @@ -0,0 +1,3 @@ +import p._ +package p { class X extends C } // not ambiguous (compiles without the import) +package q { class Y extends C } // requires the import diff --git a/test/files/pos/t12520.scala b/test/files/pos/t12520.scala new file mode 100644 index 000000000000..aaad5ff09691 --- /dev/null +++ b/test/files/pos/t12520.scala @@ -0,0 +1,16 @@ +class Outcome +trait TestSuite { + protected trait NoArgTest extends (() => Outcome) + protected def withFixture(test: NoArgTest): Outcome = test() +} + +trait TestSuiteMixin { this: TestSuite => + protected def withFixture(test: NoArgTest): Outcome +} + +trait TimeLimitedTests extends TestSuiteMixin { this: TestSuite => + abstract override def withFixture(test: NoArgTest): Outcome = super.withFixture(test) +} + +trait AnyFunSuiteLike extends TestSuite +abstract class Test[C] extends AnyFunSuiteLike with TimeLimitedTests diff --git a/test/files/pos/t12538.scala b/test/files/pos/t12538.scala new file mode 100644 index 000000000000..18a4c6a5164d --- /dev/null +++ b/test/files/pos/t12538.scala @@ -0,0 +1,6 @@ + +//> using jvm 17+ + +class C { + @java.lang.Deprecated(since = s"test") var i = 4 +} diff --git a/test/files/pos/t12554.scala b/test/files/pos/t12554.scala new file mode 100644 index 000000000000..c18b46f6261d --- /dev/null +++ b/test/files/pos/t12554.scala @@ -0,0 +1,7 @@ +//> using options -Yimports:java.lang,scala,scala.Predef,scala.util.chaining + +class C { + def f = 42.tap(println) +} + +// was: error: bad preamble import scala.util.chaining diff --git a/test/files/pos/t12554b/p_1.scala b/test/files/pos/t12554b/p_1.scala new file mode 100644 index 000000000000..73eeec1469f4 --- /dev/null +++ b/test/files/pos/t12554b/p_1.scala @@ -0,0 +1,18 @@ + +package p { + object X +} + +package object q { + object Y +} + +package q { + class C +} + +package object r { + object Z { + def greeting = "hello, world" + } +} diff --git a/test/files/pos/t12554b/s_2.scala b/test/files/pos/t12554b/s_2.scala new file mode 100644 index 000000000000..653d0442ccd8 --- /dev/null +++ b/test/files/pos/t12554b/s_2.scala @@ -0,0 +1,9 @@ + +//> using options -Yimports:java.lang,scala,scala.Predef,p,q,r.Z + +object Test extends App { + println(X) + println(Y) + println(new C) + println(greeting) +} diff --git a/test/files/pos/t12576/macro_1.scala b/test/files/pos/t12576/macro_1.scala new file mode 100644 index 000000000000..e1a3bb27e76a --- /dev/null +++ b/test/files/pos/t12576/macro_1.scala @@ -0,0 +1,28 @@ + +import language.experimental.macros + +import reflect.macros._ + +package object test { + def wrapper(expr: Any): List[Any] = macro TestMacros.wrapper_impl +} + +package test { + + object TestMacros { + + def wrapper_impl(c: blackbox.Context)(expr: c.Expr[Any]): c.Expr[List[Any]] = { + import c.universe._ + + val f = q"(x: Any) => List($expr)" + val g = c.typecheck(f) + c.universe.internal.changeOwner(expr.tree, c.internal.enclosingOwner, g.symbol) + val code = q"List.empty[Any].flatMap($g)" + + // see doc at macros.Universe#MacroInternalApi#changeOwner + //val code = q"List.empty[Any].flatMap(x => List($expr))" + + c.Expr[List[Any]](code) + } + } +} diff --git a/test/files/pos/t12576/usage_2.scala b/test/files/pos/t12576/usage_2.scala new file mode 100644 index 000000000000..721aecba9858 --- /dev/null +++ b/test/files/pos/t12576/usage_2.scala @@ -0,0 +1,25 @@ + +package usage + +import test._ + +case class A() +case class B(i: Int, s: String) +object B { + //the order matters in this case - if we swap params it works + def default(s: String = "defaultString", i: Int): B = new B(i, s) +} + +// `a` param is necessary to reproduce the issue, but order, in this case, doesn't matter +case class C(b: B, a: A) + +object Wrapped { + val theC = wrapper(B.default(i = 1)) + val expected = (List.empty[Any].flatMap[B]((z: Any) => List({val y = "hi"; val x = 42; B(x,y)}))): List[Any] +} + +object Test extends App { + println { + Wrapped.theC + } +} diff --git a/test/files/pos/t1260.scala b/test/files/pos/t1260.scala index 9cd860afd8a0..02f9e7e6b193 100644 --- a/test/files/pos/t1260.scala +++ b/test/files/pos/t1260.scala @@ -6,7 +6,7 @@ object Bar { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f = Foo("1", "2") f match { case Foo(Bar(1), Bar(2)) => 1 diff --git a/test/files/pos/t12600.scala b/test/files/pos/t12600.scala new file mode 100644 index 000000000000..41e9b33e8620 --- /dev/null +++ b/test/files/pos/t12600.scala @@ -0,0 +1,6 @@ +//> using options -Werror -Wunused:_ +class Private { + private type Curry[A] = { type T[B] = Either[A, B] } + def m2[T[A]]: Unit = () + def f() = m2[Curry[Int]#T] +} diff --git a/test/files/pos/t12622.scala b/test/files/pos/t12622.scala new file mode 100644 index 000000000000..959097d2134e --- /dev/null +++ b/test/files/pos/t12622.scala @@ -0,0 +1,43 @@ +trait ScenarioParam { + type Builder <: Type +} + +trait ScenarioParamBuilder + +trait Type { + type Builder <: ScenarioParamBuilder +} + +trait Types[H <: ScenarioParam, T <: Type] extends Type { + type Builder = H#Builder with T#Builder +} + +trait Nil extends Type { + type Builder = ScenarioParamBuilder +} + +trait ScenarioTarget { + type FilterParam <: Type +} + +class P1 extends ScenarioParam +class P2 extends ScenarioParam + +object someTarget extends ScenarioTarget { + type FilterParam = Types[P1, Types[P2, Nil]] +} + +class WhereClauseBuilder1[T <: ScenarioTarget] { + type FilterBuilderType = T#FilterParam#Builder + def m1(f: FilterBuilderType => Any): Any = null + def m2(f: T#FilterParam#Builder => Any): Any = null +} + +object t { + (null: WhereClauseBuilder1[someTarget.type]).m1(x => null) + + val stabilizer: WhereClauseBuilder1[someTarget.type] = null + stabilizer.m1(x => null) + + (null: WhereClauseBuilder1[someTarget.type]).m2(x => null) +} \ No newline at end of file diff --git a/test/files/pos/t12623.scala b/test/files/pos/t12623.scala new file mode 100644 index 000000000000..a51be649dcec --- /dev/null +++ b/test/files/pos/t12623.scala @@ -0,0 +1,19 @@ + +trait MapI[C] { + def i: Int + def s: String + def copy(i: Int = this.i, s: String = this.s): C + def mapI(i: Int): C = copy(i) +} + +case class C(i: Int, s: String) extends MapI[C] + +/* +was: +t12623.scala:9: error: class C needs to be abstract. +Missing implementation for member of trait MapI: + def copy(i: Int, s: String): C = ??? + +case class C(i: Int, s: String) extends MapI[C] + ^ + */ diff --git a/test/files/pos/t12645/Macro_1.scala b/test/files/pos/t12645/Macro_1.scala new file mode 100644 index 000000000000..870c84ef1549 --- /dev/null +++ b/test/files/pos/t12645/Macro_1.scala @@ -0,0 +1,20 @@ + +//> using options -Xsource:3 + +import language.experimental.macros +import scala.reflect.macros.whitebox.Context + +trait Greeter { + def greeting: Option[Any] = Some("Take me to your leader, Greeter.") +} + +class Welcomer { + def greeter: Greeter = macro Macros.impl +} + +object Macros { + def impl(c: Context) = { + import c.universe._ + q"""new Greeter { override def greeting = Some("hello, quoted world") }""" + } +} diff --git a/test/files/pos/t12645/Test_2.scala b/test/files/pos/t12645/Test_2.scala new file mode 100644 index 000000000000..ab71fab4b2b6 --- /dev/null +++ b/test/files/pos/t12645/Test_2.scala @@ -0,0 +1,8 @@ + +//> using options -Xsource:3 + +object Test extends App { + def f(s: String) = println(s) + val welcomer = new Welcomer + welcomer.greeter.greeting.foreach(f) +} diff --git a/test/files/pos/t12645b/Test_2.scala b/test/files/pos/t12645b/Test_2.scala new file mode 100644 index 000000000000..8b5fa24c10c4 --- /dev/null +++ b/test/files/pos/t12645b/Test_2.scala @@ -0,0 +1,6 @@ +//> using options -Xsource:3 + +object Test extends App { + Foo.ctx.quote(42).ast.id +} +// was: Test_2.scala:4: error: value id is not a member of Ast diff --git a/test/files/pos/t12645b/macro_1.scala b/test/files/pos/t12645b/macro_1.scala new file mode 100644 index 000000000000..888390bf8831 --- /dev/null +++ b/test/files/pos/t12645b/macro_1.scala @@ -0,0 +1,31 @@ + +//> using options -Xsource:3 + +import scala.language.experimental.macros +import scala.reflect.macros.whitebox.Context + +trait Ast +case class Foo(id: Int) extends Ast +object Foo { + val ctx = new Ctx {} +} + +trait Ctx { + trait Quoted[+A] { + def ast: Ast + } + final def quote[A](a: A): Quoted[A] = macro QuoteMacro.quoteImpl[A] +} + +class QuoteMacro(val c: Context) { + import c.universe.* + + def quoteImpl[A](a: c.Expr[A])(implicit t: WeakTypeTag[A]) = + c.untypecheck { + q""" + new ${c.prefix}.Quoted[$t] { + def ast = Foo(42) + } + """ + } +} diff --git a/test/files/pos/t12646.scala b/test/files/pos/t12646.scala new file mode 100644 index 000000000000..b0e3f66f5b9a --- /dev/null +++ b/test/files/pos/t12646.scala @@ -0,0 +1,11 @@ + +//> using options -Werror -Wunused:params + +trait T { + private var x: String = _ + + def y: String = { + if (x eq null) x = "hello, world" + x + } +} diff --git a/test/files/pos/t12647/Macro_1.scala b/test/files/pos/t12647/Macro_1.scala new file mode 100644 index 000000000000..cbec50a79d11 --- /dev/null +++ b/test/files/pos/t12647/Macro_1.scala @@ -0,0 +1,13 @@ + +//> using options -Xsource:3 + +import scala.reflect.macros.whitebox.Context + +trait Result + +object Macros { + def impl(c: Context) = { + import c.universe._ + q"""new Result { def value = "Was this the answer you sought?" }""" + } +} diff --git a/test/files/pos/t12647/Resolve_2.scala b/test/files/pos/t12647/Resolve_2.scala new file mode 100644 index 000000000000..c9f54c1040c2 --- /dev/null +++ b/test/files/pos/t12647/Resolve_2.scala @@ -0,0 +1,13 @@ + +//> using options -Xsource:3 + +import language.experimental.macros + +trait Resolver { + def resolve: Result = ??? +} + +class ValueResolver extends Resolver { + override def resolve: Result { def value: String } = valueResult + def valueResult: Result = macro Macros.impl +} diff --git a/test/files/pos/t12647/Test_3.scala b/test/files/pos/t12647/Test_3.scala new file mode 100644 index 000000000000..e2fc19f46853 --- /dev/null +++ b/test/files/pos/t12647/Test_3.scala @@ -0,0 +1,7 @@ + +//> using options -Xsource:3 + +object Test extends App { + val resolver = new ValueResolver + println(resolver.resolve.value) +} diff --git a/test/files/pos/t12647b/Macro_1.scala b/test/files/pos/t12647b/Macro_1.scala new file mode 100644 index 000000000000..e54621c987e4 --- /dev/null +++ b/test/files/pos/t12647b/Macro_1.scala @@ -0,0 +1,11 @@ + +import scala.reflect.macros.whitebox.Context + +trait Result + +object Macros { + def impl(c: Context): c.Tree = { + import c.universe._ + q"""new Result { def value = "Was this the answer you sought?" }""" + } +} diff --git a/test/files/pos/t12647b/Resolve_2.scala b/test/files/pos/t12647b/Resolve_2.scala new file mode 100644 index 000000000000..cbf1457b8635 --- /dev/null +++ b/test/files/pos/t12647b/Resolve_2.scala @@ -0,0 +1,11 @@ + +import language.experimental.macros + +abstract class Resolver { + def resolve: Result = ??? +} + +class ValueResolver extends Resolver { + override def resolve = valueResult + def valueResult: Result = macro Macros.impl +} diff --git a/test/files/pos/t12647b/Test_3.scala b/test/files/pos/t12647b/Test_3.scala new file mode 100644 index 000000000000..16149d9965bc --- /dev/null +++ b/test/files/pos/t12647b/Test_3.scala @@ -0,0 +1,5 @@ + +object Test extends App { + val resolver = new ValueResolver + println(resolver.resolve.value) +} diff --git a/test/files/pos/t12664.scala b/test/files/pos/t12664.scala new file mode 100644 index 000000000000..aadbae96156e --- /dev/null +++ b/test/files/pos/t12664.scala @@ -0,0 +1,38 @@ +//> using options -nowarn -Wconf:cat=lint-missing-interpolator:ws -Werror -Xlint -Xsource:3 + +/* +-nowarn and -Xlint are in contradiction. Which wins? +-nowarn is recognized by per-run reporting and by sink reporter (e.g., ConsoleReporter). +For per-run reporting, -nowarn means default wconf for deprecation must not be ws (summary, which would warn). +Instead, it is w or s depending on whether -deprecation is requested. +So from the perspective of per-run reporting, -deprecation means issue a diagnostic even if -nowarn. + +For the sink reporter, too, -nowarn means "don't summarize with warning count". +In addition, -nowarn means -maxwarns:0, so any warning is filtered by FilteringReporter. +(Normally, displayed warnings is capped by -maxwarns and then summarized as a count when done.) +So from the perspective of the sink reporter, -nowarn means filter out warnings and don't print count of warnings. +It doesn't consider -deprecation at all. +In addition, -nowarn subverts -Werror. + +In the test example, there are 2 lints, a non-lint, and a migration warning. +The wconf boosts a lint to ws summary, but -nowarn tells the sink not to print either a warning or a count. +Migration is boosted to e by default, but -nowarn says don't boost migration warnings. +A user-supplied wconf could boost migration despite -nowarn. +Other warnings are silenced by any:s installed by -nowarn. +*/ +trait T { + def g[A]: Option[A] +} + +class C extends T { + def f: Unit = 42 // suppressed other warning for expr, lint for parens + + override def g[A] = None // suppressed migration warning, not boosted to error under --no-warnings + + def oops = "$f" // summarized lint + + @deprecated("old stuff", since="1.0") + def old = 17 + + def stale = old +} diff --git a/test/files/pos/t12666.scala b/test/files/pos/t12666.scala new file mode 100644 index 000000000000..33d336b3224a --- /dev/null +++ b/test/files/pos/t12666.scala @@ -0,0 +1,32 @@ + +package foo { + + trait Baz[A] + object Baz { + implicit def instance[A]: Baz[A] = ??? + } + + package syntax { + object all { + implicit def ops1[A: Baz](a: A): BarOps1 = new BarOps1(a) + implicit def ops2[A: Baz](a: A): BarOps2 = new BarOps2(a) + } + + class BarOps1(val a: Any) extends AnyVal { + def bar(x: Int): String = ??? + } + + class BarOps2(val a: Any) extends AnyVal { + private[syntax] def bar(x: Int): String = ??? + } + } +} + +import foo.syntax.all._ + +object Main { + def main(args: Array[String]): Unit = { + val a = new Object + a.bar(42) + } +} diff --git a/test/files/pos/t12671.scala b/test/files/pos/t12671.scala new file mode 100644 index 000000000000..65dfa3dcc7ec --- /dev/null +++ b/test/files/pos/t12671.scala @@ -0,0 +1,30 @@ + +//> using options -Xsource:3 + +import scala.collection.{mutable, IterableOnce} +import scala.collection.immutable.{AbstractSet, Set, SetOps} + +final case class Foo[-T](components: IndexedSeq[Int]) + +sealed trait FooTrie[T] + extends AbstractSet[Foo[T]] + with SetOps[Foo[T], Set, FooTrie[T]] { + + override def fromSpecific( + coll: IterableOnce[Foo[T]] + ): FooTrie[T] = { + coll.iterator.foldLeft(empty)(_ incl _) // error here + } + + override def newSpecificBuilder + : mutable.Builder[Foo[T], FooTrie[T]] = ??? + + override def incl(elem: Foo[T]): FooTrie[T] = ??? + + override def empty = FooTrie.empty[T] + //override def empty: FooTrie[T] = FooTrie.empty[T] +} + +object FooTrie { + def empty[T]: FooTrie[T] = ??? +} diff --git a/test/files/pos/t12712.scala b/test/files/pos/t12712.scala new file mode 100644 index 000000000000..59d8f4504819 --- /dev/null +++ b/test/files/pos/t12712.scala @@ -0,0 +1,16 @@ +//> using options -Werror +object T { + private sealed trait T + private object O extends T + private trait U extends T + private object P extends U + + private def t(t: T) = t match { + case O => () + case _: U => println("hai") + } + + def main(args: Array[String]): Unit = { + t(P) + } +} diff --git a/test/files/pos/t1272.scala b/test/files/pos/t1272.scala index d86a909ae501..916b783bbb3e 100644 --- a/test/files/pos/t1272.scala +++ b/test/files/pos/t1272.scala @@ -6,4 +6,4 @@ object ImplicitTest { def fn[T](implicit x : T) = 0 val x = fn[Array[Byte]] -} \ No newline at end of file +} diff --git a/test/files/pos/t12736/bar_2.scala b/test/files/pos/t12736/bar_2.scala new file mode 100644 index 000000000000..3e3bd2b25cf3 --- /dev/null +++ b/test/files/pos/t12736/bar_2.scala @@ -0,0 +1,3 @@ +package bar + +private object Foo diff --git a/test/files/pos/t12736/baz_2.scala b/test/files/pos/t12736/baz_2.scala new file mode 100644 index 000000000000..187c805bde2b --- /dev/null +++ b/test/files/pos/t12736/baz_2.scala @@ -0,0 +1,15 @@ +package bar +package baz + +import foo._ + +object Bar { + def test() = println(Foo) +} + +/* +baz_2.scala:8: error: object Foo in package foo cannot be accessed as a member of package foo from object Bar in package baz + def test() = println(Foo) + ^ +1 error +*/ diff --git a/test/files/pos/t12736/foo_1.scala b/test/files/pos/t12736/foo_1.scala new file mode 100644 index 000000000000..b7fd1b4355e5 --- /dev/null +++ b/test/files/pos/t12736/foo_1.scala @@ -0,0 +1,3 @@ +package foo + +private object Foo diff --git a/test/files/pos/t12740.scala b/test/files/pos/t12740.scala new file mode 100644 index 000000000000..535b4219a38c --- /dev/null +++ b/test/files/pos/t12740.scala @@ -0,0 +1,17 @@ +class C(x: NoSuchElementException) + +// available in java.lang +package object K extends Cloneable + +package object XX extends Serializable +package object XY extends NoSuchElementException + +object XZ extends NoSuchElementException + +package object Y { + type NSE = java.util.NoSuchElementException +} +package Z { + import Y._ + class C extends NSE +} diff --git a/test/files/pos/t12740b/Y_1.scala b/test/files/pos/t12740b/Y_1.scala new file mode 100644 index 000000000000..2b655d288ad6 --- /dev/null +++ b/test/files/pos/t12740b/Y_1.scala @@ -0,0 +1,4 @@ + +package object Y { + type NSE = java.util.NoSuchElementException +} diff --git a/test/files/pos/t12740b/Z_2.scala b/test/files/pos/t12740b/Z_2.scala new file mode 100644 index 000000000000..e9cce1f03487 --- /dev/null +++ b/test/files/pos/t12740b/Z_2.scala @@ -0,0 +1,6 @@ +//> using options -Yimports:java.lang,scala,scala.Predef,Y + +package Z { + //import Y._ + class C extends NSE +} diff --git a/test/files/pos/t12787.scala b/test/files/pos/t12787.scala new file mode 100644 index 000000000000..3d1f1be6fea4 --- /dev/null +++ b/test/files/pos/t12787.scala @@ -0,0 +1,18 @@ + +//> using options -opt:inline: -Wopt -Werror +// skalac: -opt:inline: -Vopt:C -Wopt -Werror + +// > using scala 2.13.nightly +// > using options -opt:inline:, -Wopt + +import scala.collection.LinearSeq + +final class C { + val iterators: LinearSeq[Int] = List(42) + @inline def current: Int = iterators.head + val asString = current.toString +} +object Test extends App { + val c = new C + println(c.asString) +} diff --git a/test/files/pos/t12792.scala b/test/files/pos/t12792.scala new file mode 100644 index 000000000000..6a6855325700 --- /dev/null +++ b/test/files/pos/t12792.scala @@ -0,0 +1,103 @@ + +//> using options -Werror -Xlint + +import annotation._ + +object Foo { + final val w = 1 << (java.lang.Integer.SIZE - 1) + final val x = 1 << (java.lang.Integer.SIZE - 2) + final val y = 1 << (java.lang.Integer.SIZE - 3) + final val z = 1 << (java.lang.Integer.SIZE - 4) + final val c = 0xffffffff & ~w & ~x & ~y & ~z + + final val i = +42 // 42.unary_+ + final val j = -27 // literal -42 +} + +class Ann(value: Int) extends ConstantAnnotation +class Byt(value: Byte) extends ConstantAnnotation + +class Test { + import Foo._ + @Ann(w) def fw = 42 + @Ann(x) def fx = 42 + @Ann(c) def fc = 42 + @Ann(i) def fi = 42 + @Ann(j) def fj = 42 + @Byt(42) def byteMe = 42 +} + +class AnnL(value: Long) extends ConstantAnnotation +class AnnD(value: Double) extends ConstantAnnotation + +object i17446Types { + + final val myInt = 1 << 6 + + // toLong + final val char2Long: 99L = 'c'.toLong + final val int2Long: 0L = 0.toLong + final val long2Long: 0L = 0L.toLong + final val int2LongPropagated: 64L = myInt.toLong + + // toInt + final val char2Int: 99 = 'c'.toInt + final val int2Int: 0 = 0.toInt + final val long2Int: 0 = 0L.toInt + final val long2IntWrapped: -2147483648 = 2147483648L.toInt + final val int2IntPropagated: 64 = myInt.toInt + + // toChar + final val char2Char: 'c' = 'c'.toChar + final val int2Char: 'c' = 99.toChar + final val long2Char: 'c' = 99L.toChar + final val int2CharPropagated: '@' = myInt.toChar + + // chain everything + final val wow: 1.0 = 1.toChar.toInt.toLong.toFloat.toDouble +} +object i17446 { + + final val myInt = 1 << 6 + + // toLong + final val char2Long = 'c'.toLong + final val int2Long = 0.toLong + final val long2Long = 0L.toLong + final val int2LongPropagated = myInt.toLong + + // toInt + final val char2Int = 'c'.toInt + final val int2Int = 0.toInt + final val long2Int = 0L.toInt + final val long2IntWrapped = 2147483648L.toInt + final val int2IntPropagated = myInt.toInt + + // toChar + final val char2Char = 'c'.toChar + final val int2Char = 99.toChar + final val long2Char = 99L.toChar + final val int2CharPropagated = myInt.toChar + + // chain everything + final val wow = 1.toChar.toInt.toLong.toFloat.toDouble +} +class i17446 { + import i17446._ + @Ann(char2Int) def a = 42 + @Ann(int2Int) def b = 42 + @Ann(long2Int) def c = 42 + @Ann(long2IntWrapped) def d = 42 + @Ann(int2IntPropagated) def e = 42 + @Ann(char2Char) def f = 42 + @Ann(int2Char) def g = 42 + @Ann(long2Char) def h = 42 + @Ann(int2CharPropagated) def i = 42 + + @AnnL(char2Long) def j = 42 + @AnnL(int2Long) def k = 42 + @AnnL(long2Long) def l = 42 + @AnnL(int2LongPropagated) def m = 42 + + @AnnD(wow) def n = 42 +} diff --git a/test/files/pos/t12800/JetBrains.java b/test/files/pos/t12800/JetBrains.java new file mode 100644 index 000000000000..1b8fbbe92538 --- /dev/null +++ b/test/files/pos/t12800/JetBrains.java @@ -0,0 +1,14 @@ + +public enum JetBrains { + APPLE { + @Override public String text() { + return "Cupertino tech company"; + } + }, + ORANGE { + @Override public String text() { + return "SoCal county"; + } + }; + public abstract String text(); +} diff --git a/test/files/pos/t12800/matcher_1.scala b/test/files/pos/t12800/matcher_1.scala new file mode 100644 index 000000000000..52912d2a97fa --- /dev/null +++ b/test/files/pos/t12800/matcher_1.scala @@ -0,0 +1,12 @@ + +//> using options -Werror -Xsource:3 + +import JetBrains.* + +class C { + def f(jb: JetBrains): Int = + jb match { + case APPLE => 42 + case ORANGE => 27 + } +} diff --git a/test/files/pos/t12812.scala b/test/files/pos/t12812.scala new file mode 100644 index 000000000000..e4b84fe79fbd --- /dev/null +++ b/test/files/pos/t12812.scala @@ -0,0 +1,8 @@ + +//> using options -Werror -Xsource:3 -language:postfixOps -Xlint + +class C { + def foo(max: Int) = (1 to max).map(1 to).foreach(r => println(r.mkString(","))) +} + +//java.lang.NullPointerException: Cannot invoke "scala.reflect.internal.Symbols$Symbol.owner()" because the return value of "scala.reflect.internal.Trees$Tree.symbol()" is null diff --git a/test/files/pos/t12814.scala b/test/files/pos/t12814.scala new file mode 100644 index 000000000000..b2a7980ea4a3 --- /dev/null +++ b/test/files/pos/t12814.scala @@ -0,0 +1,98 @@ +// https://github.com/scala/bug/issues/12814#issuecomment-1822770100 +object t1 { + trait A[X] { type T = X } + object B extends A[String] + object C extends A[B.T] { + def f: C.T = "hai" + } +} + +// https://github.com/scala/bug/issues/12814 +object t2 { + sealed trait Common + sealed trait One extends Common + sealed trait Two extends Common + + + trait Module[C <: Common] { + val name: String + type Narrow = C + def narrow: PartialFunction[Common, C] + } + + object ModuleA extends Module[One] { + val name = "A" + val narrow: PartialFunction[Common, Narrow] = { + case cc: Narrow => cc + } + } + + object ModuleB extends Module[ModuleA.Narrow] { + val name = "B" + val narrow: PartialFunction[Common, Narrow] = { + case cc: Narrow => cc + } + } + + object ModuleC extends Module[Two] { + val name = "C" + val narrow: PartialFunction[Common, Narrow] = { + case cc: Narrow => cc + } + } + + object ModuleD extends Module[One with Two] { + val name = "D" + val narrow: PartialFunction[Common, Narrow] = { + case cc: Narrow => cc + } + } + + val one = new One {} + val two = new Two {} + val oneTwo = new One with Two {} + + Seq(ModuleA, ModuleB, ModuleC, ModuleD).foreach { module => + println(s"${module.name} at One = ${module.narrow.isDefinedAt(one)}") + println(s"${module.name} at Two = ${module.narrow.isDefinedAt(two)}") + println(s"${module.name} at OneTwo = ${module.narrow.isDefinedAt(oneTwo)}") + println("-" * 10) + } +} + +// https://github.com/scala/scala/pull/10457/files +object t3 { + sealed trait A + + sealed trait B extends A + + trait F[C] { + type T = C + } + + object O extends F[B] + + object P1 extends F[O.T] { + val f: PartialFunction[A, P1.T] = { + case x: P1.T => x + } + } + + object P2 extends F[O.T] { + val f: PartialFunction[A, P2.T] = x => x match { + case x: P2.T => x + } + } + + object P3 extends F[O.T] { + val f: Function1[A, P3.T] = { + case x: P3.T => x + } + } + + object P4 extends F[O.T] { + val f: Function1[A, P4.T] = x => x match { + case x: P4.T => x + } + } +} \ No newline at end of file diff --git a/test/files/pos/t12830.scala b/test/files/pos/t12830.scala new file mode 100644 index 000000000000..90e1a8a7f237 --- /dev/null +++ b/test/files/pos/t12830.scala @@ -0,0 +1,23 @@ +//> using options -Xsource:3 + +class C { + def i: Int = 42 +} +object D extends C { + override final val i = 27 +} +object Test { + def f: 27 = D.i +} + +/* +t12830.scala:10: error: type mismatch; + found : D.i.type (with underlying type Int) + required: 27 + def f: 27 = D.i + ^ +t12830.scala:7: error: under -Xsource:3, inferred Int instead of Int(27) + override final val i = 27 + ^ +2 errors +*/ diff --git a/test/files/pos/t12851/C_2.scala b/test/files/pos/t12851/C_2.scala new file mode 100644 index 000000000000..859df816094c --- /dev/null +++ b/test/files/pos/t12851/C_2.scala @@ -0,0 +1,2 @@ +//> using options -Werror +class C extends T2 diff --git a/test/files/pos/t12851/T_1.scala b/test/files/pos/t12851/T_1.scala new file mode 100644 index 000000000000..88c0c5d93297 --- /dev/null +++ b/test/files/pos/t12851/T_1.scala @@ -0,0 +1,7 @@ + +trait T1 { + def f: Int +} +trait T2 extends T1 { + def f() = 42 +} diff --git a/test/files/pos/t12851c/ScalaNumber.java b/test/files/pos/t12851c/ScalaNumber.java new file mode 100644 index 000000000000..5ed76ec3fb22 --- /dev/null +++ b/test/files/pos/t12851c/ScalaNumber.java @@ -0,0 +1,20 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.math; + +/** A marker class for Number types introduced by Scala + */ +public abstract class ScalaNumber extends java.lang.Number { + protected abstract boolean isWhole(); + public abstract Object underlying(); +} diff --git a/test/files/pos/t12851c/ScalaNumericConversions.scala b/test/files/pos/t12851c/ScalaNumericConversions.scala new file mode 100644 index 000000000000..c53f2f225aa8 --- /dev/null +++ b/test/files/pos/t12851c/ScalaNumericConversions.scala @@ -0,0 +1,30 @@ +//> using option -Werror +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package math + +/** A slightly more specific conversion trait for classes which + * extend ScalaNumber (which excludes value classes.) + */ +trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversions { + def underlying: Object +} + +/** Conversions which present a consistent conversion interface + * across all the numeric types, suitable for use in value classes. + */ +trait ScalaNumericAnyConversions extends Any { + /** @return `'''true'''` if this number has no decimal component, `'''false'''` otherwise. */ + def isWhole: Boolean +} diff --git a/test/files/pos/t12858/A.java b/test/files/pos/t12858/A.java new file mode 100644 index 000000000000..ea5091288874 --- /dev/null +++ b/test/files/pos/t12858/A.java @@ -0,0 +1,3 @@ +interface A { + int f(); +} diff --git a/test/files/pos/t12858/B.scala b/test/files/pos/t12858/B.scala new file mode 100644 index 000000000000..3611602cbf1b --- /dev/null +++ b/test/files/pos/t12858/B.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +trait B1 extends A { def f: Int } +trait C1 { def f = 2 } +class T1 extends B1 with C1 + +trait B2 extends A { def f: Int = 1} +trait C2 { self: B2 => override def f = 2 } +class T2 extends B2 with C2 diff --git a/test/files/pos/t12883.scala b/test/files/pos/t12883.scala new file mode 100644 index 000000000000..007e2ae4bfea --- /dev/null +++ b/test/files/pos/t12883.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Xsource:3 + +case class C private (c: Int) { + def copy(c: Int = this.c): C = new C(c) +} + +object C { + def apply(c: Int) = new C(c) +} diff --git a/test/files/pos/t12898.scala b/test/files/pos/t12898.scala deleted file mode 100644 index 45d1ff9ab2d0..000000000000 --- a/test/files/pos/t12898.scala +++ /dev/null @@ -1,13 +0,0 @@ -// scalac: -deprecation -Werror - -import annotation.nowarn - -class T { - @nowarn("msg=unchecked since it is eliminated by erasure") - @nowarn("msg=Pair in object Predef") - def f(x: Any): Int = x match { - case l: List[Int] => l.head - case _ => - Pair(1, 2)._2 - } -} diff --git a/test/files/pos/t12899.scala b/test/files/pos/t12899.scala new file mode 100644 index 000000000000..9d674a198b1c --- /dev/null +++ b/test/files/pos/t12899.scala @@ -0,0 +1,18 @@ +object Kafi { + // OK in 2.12, fails in 2.13 with "missing parameter type for expanded function" + val c1: Cache[(Seq[String], Class[_]), String] = build { + case (sq, cs) => mk(sq, cs) + } + + // OK in both 2.12 and 2.13 + val c2: Cache[(Seq[String], Class[_]), String] = build( + key => mk(key._1, key._2) + ) + + def mk(sq: Seq[String], cs: Class[_]): String = "" + + def build[K, V](): Cache[K, V] = null + def build[K, V](c: Cache[K, V]): Cache[K, V] = null + + trait Cache[K, V] { def load(k: K): V } +} diff --git a/test/files/pos/t12953-expandee/Client_2.scala b/test/files/pos/t12953-expandee/Client_2.scala new file mode 100644 index 000000000000..82fd527122b1 --- /dev/null +++ b/test/files/pos/t12953-expandee/Client_2.scala @@ -0,0 +1,12 @@ + +//> using options -Werror -Wunused:locals -Xlint:missing-interpolator -Wmacros:default + +import Macro.id + +object Test extends App { + println { + id { + println("hello, world of $unusedVariable") + } + } +} diff --git a/test/files/pos/t12953-expandee/Macro_1.scala b/test/files/pos/t12953-expandee/Macro_1.scala new file mode 100644 index 000000000000..921cca9ae4cb --- /dev/null +++ b/test/files/pos/t12953-expandee/Macro_1.scala @@ -0,0 +1,12 @@ + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macro { + def id[A](body: A): A = macro impl[A] + + def impl[A: c.WeakTypeTag](c: Context)(body: c.Expr[A]) = { + import c.universe._ + q"""val unusedVariable = "42".toInt; $body""" + } +} diff --git a/test/files/pos/t12953.scala b/test/files/pos/t12953.scala new file mode 100644 index 000000000000..a0b44003a9fb --- /dev/null +++ b/test/files/pos/t12953.scala @@ -0,0 +1,24 @@ + +//> using options -Werror -Wunused:privates + +package example + +import scala.reflect.macros.blackbox + +/*case*/ class A(x: Int) + +class MyMacro(val c: blackbox.Context) { + import c.universe._ + + def impl(tree: c.Tree): Tree = { + tree match { + case q"${a: A}" => + //reify(a).tree // uses $m in expansion + reify(()).tree + case _ => + c.abort(c.enclosingPosition, "err") + } + } + + private implicit def instance: Unliftable[A] = ??? +} diff --git a/test/files/pos/t12976.scala b/test/files/pos/t12976.scala new file mode 100644 index 000000000000..05d36de90a81 --- /dev/null +++ b/test/files/pos/t12976.scala @@ -0,0 +1,6 @@ + +//> using options -Xsource:3-cross + +trait T { + def f(c: Char) = raw"\u%04X".format(c.toInt) +} diff --git a/test/files/pos/t12985.scala b/test/files/pos/t12985.scala new file mode 100644 index 000000000000..e95bbdf503f3 --- /dev/null +++ b/test/files/pos/t12985.scala @@ -0,0 +1,8 @@ + +//> using options -Wconf:any:s -Werror -Xlint:cloneable + +class Base extends Cloneable + +object X extends Base + +class Y extends Base diff --git a/test/files/pos/t12987.scala b/test/files/pos/t12987.scala new file mode 100644 index 000000000000..e9a866f4abc9 --- /dev/null +++ b/test/files/pos/t12987.scala @@ -0,0 +1,30 @@ +object typeMember { + class Foo { + type FT + class I + def m(b: FT, o: Option[I]): Int = 0 + } + + object Test { + def f[T]: Foo { type FT = T } = ??? + def t = { + val b: Any = ??? + f.m(b, None) + } + } +} + +object typeParam { + class Foo[FT] { + class I + def m(b: FT, o: Option[I]): Int = 0 + } + + object Test { + def f[T]: Foo[T] = ??? + def t = { + val b: Any = ??? + f.m(b, None) + } + } +} diff --git a/test/files/pos/t12988.scala b/test/files/pos/t12988.scala new file mode 100644 index 000000000000..45234ed3a19b --- /dev/null +++ b/test/files/pos/t12988.scala @@ -0,0 +1,12 @@ +object example { + final case class Toto[@specialized(Int) A] (private val actualToString: String, a: A) { + @inline def description: String = actualToString + } + def toto[A](a: A): Toto[A] = Toto("", a) +} + +object Test extends App { + import example._ + + println(s"Hello World! ${toto(1)}") +} diff --git a/test/files/pos/t13013.scala b/test/files/pos/t13013.scala new file mode 100644 index 000000000000..3a123b2ef8aa --- /dev/null +++ b/test/files/pos/t13013.scala @@ -0,0 +1,9 @@ +object Node { + trait Root { self: Node => + val root = this + } +} +trait Node { + def root: Node +} +final class RootNode extends Node with Node.Root diff --git a/test/files/pos/t13017/A.java b/test/files/pos/t13017/A.java new file mode 100644 index 000000000000..da6911a86790 --- /dev/null +++ b/test/files/pos/t13017/A.java @@ -0,0 +1,12 @@ +enum En { + @Deprecated Um +} + +@interface Ann { + En value(); +} + +public class A { + @Ann(En.Um) + public static void te() { return; } +} diff --git a/test/files/pos/t13017/Test.scala b/test/files/pos/t13017/Test.scala new file mode 100644 index 000000000000..8f939a5c4eb3 --- /dev/null +++ b/test/files/pos/t13017/Test.scala @@ -0,0 +1,5 @@ +//> using options -Werror -Ypickle-java -deprecation + +object Test { + def t = A.te() +} diff --git a/test/files/pos/t13025.scala b/test/files/pos/t13025.scala new file mode 100644 index 000000000000..6589629cad25 --- /dev/null +++ b/test/files/pos/t13025.scala @@ -0,0 +1,11 @@ +package p1 { + case class Foo[X] private (a: Int) + object Foo { + def apply[X](a: String): Foo[Any] = ??? + } +} +package p2 { + class C { + def x = p1.Foo[Any](0) + } +} diff --git a/test/files/pos/t13041.scala b/test/files/pos/t13041.scala new file mode 100644 index 000000000000..c1bcce529368 --- /dev/null +++ b/test/files/pos/t13041.scala @@ -0,0 +1,26 @@ + +//> using options -Werror -Wunused:patvars -Yvalidate-pos:typer + +class C { + val m = Map( + "first" -> Map((true, 1), (false, 2), (true, 3)), + "second" -> Map((true, 1), (false, 2), (true, 3)) + ) + def f = + m.map { case (a, m1) => + for { + (status, lag) <- m1 if status + } yield (a, status, lag) + } + def g = + for { + (a, m1) <- m + (status, lag) <- m1 if status + } yield (a, status, lag) + def leading = + for { + _ <- List("42") + i = 1 + _ <- List("0", "27")(i) + } yield () +} diff --git a/test/files/pos/t13055.scala b/test/files/pos/t13055.scala new file mode 100644 index 000000000000..e759da628092 --- /dev/null +++ b/test/files/pos/t13055.scala @@ -0,0 +1,28 @@ +//> using options -Xsource:3 -Xsource-features:eta-expand-always + +//import org.scalacheck._, Prop._ + +object Main extends App { + class Prop + class Gen[A] + object Gen { + implicit def const[T](x: T): Gen[T] = ??? + } + + def forAll[T1, P](g: Gen[T1])(f: T1 => P)(implicit p: P => Prop): Prop = ??? + def forAll[A1, P](f: A1 => P)(implicit p: P => Prop): Prop = ??? + + def what() = forAll { + (a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, + a8: Int, + a9: Int, + ) => false + } + +} + +/* + def what(): (((Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean) => Nothing) => Main.Prop = { + val eta$0$1: Main.Gen[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean] = Main.this.Gen.const[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean](((a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int) => false)); + ((f: ((Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean) => Nothing) => Main.this.forAll[(Int, Int, Int, Int, Int, Int, Int, Int, Int) => Boolean, Nothing](eta$0$1)(f)(scala.Predef.$conforms[Nothing])) +*/ diff --git a/test/files/pos/t13060.scala b/test/files/pos/t13060.scala new file mode 100644 index 000000000000..0af5ba710166 --- /dev/null +++ b/test/files/pos/t13060.scala @@ -0,0 +1,26 @@ +class C { + def id[A](r: A): A = r + def bug(x: Int, e: Boolean): Unit = { + x match { + case 1 => id(()) + case 2 if e => + } + println() + } +} + +class D { + def foo(): Unit = () + def b: Boolean = false + + def bug(x: Int): Unit = { + (x: @annotation.switch) match { + case 2 if b => + foo() + case _ if b => + foo() + case _ => + foo() + } + } +} diff --git a/test/files/pos/t13066.scala b/test/files/pos/t13066.scala new file mode 100644 index 000000000000..43a1a8a01159 --- /dev/null +++ b/test/files/pos/t13066.scala @@ -0,0 +1,17 @@ + +//> using options -Werror + +package testsamepackageimport { + package p { + class C + } + + package p { + package q { + import p._ // no warn + class U { + def f = new C + } + } + } +} diff --git a/test/files/pos/t13089.scala b/test/files/pos/t13089.scala new file mode 100644 index 000000000000..b54db5d43f72 --- /dev/null +++ b/test/files/pos/t13089.scala @@ -0,0 +1,17 @@ +//> using options -Werror + +trait F + +class T { + def t1 = "" == Some("").getOrElse(None) // used to warn incorrectly, because a RefinementClassSymbol is unrelated to String + + def a: T with Serializable = null + def b: Serializable with T = null + def t2 = "" == a // no warn, the implementation bails on intersection types + def t3 = "" == b // no warn + + def t1(a: F, b: Product with F) = a == b // no warn + def t2(a: F, b: F with Product) = a == b // no warn + def t3(a: F with Product, b: F) = a == b // no warn + def t4(a: Product with F, b: F) = a == b // no warn +} diff --git a/test/files/pos/t1318.scala b/test/files/pos/t1318.scala index f3d2f7ab5aec..34e2bc8dd5b3 100644 --- a/test/files/pos/t1318.scala +++ b/test/files/pos/t1318.scala @@ -28,4 +28,4 @@ object C { def ab = List(A) ::: List(B) // the following compiles successfully: // def ab = List(A) ::: List[M](B) -} \ No newline at end of file +} diff --git a/test/files/pos/t1357.scala b/test/files/pos/t1357.scala index fcdecb3ad3c5..05f8d20ed602 100644 --- a/test/files/pos/t1357.scala +++ b/test/files/pos/t1357.scala @@ -18,4 +18,4 @@ object Main { } } } -} \ No newline at end of file +} diff --git a/test/files/pos/t1381-new.scala b/test/files/pos/t1381-new.scala index 57e0f31fba57..b0b300c6f6f9 100644 --- a/test/files/pos/t1381-new.scala +++ b/test/files/pos/t1381-new.scala @@ -28,4 +28,4 @@ abstract class EV[T](initval:T) extends PV[T](initval) with IV { override def d : ID[VT] = null // Comment this out to eliminate crash protected var indx = d.index(initval) -} \ No newline at end of file +} diff --git a/test/files/pos/t1391.scala b/test/files/pos/t1391.scala index 8133da54826a..5178ba987798 100644 --- a/test/files/pos/t1391.scala +++ b/test/files/pos/t1391.scala @@ -35,7 +35,7 @@ package sandbox } } - def foo () { + def foo (): Unit = { val t = new NAnB.nA println(t) } diff --git a/test/files/pos/t1439.scala b/test/files/pos/t1439.scala index b5d9170e4c60..df8fed01656a 100644 --- a/test/files/pos/t1439.scala +++ b/test/files/pos/t1439.scala @@ -1,6 +1,4 @@ -// scalac: -unchecked -Xfatal-warnings -language:higherKinds - -import language.higherKinds +//> using options -Werror // no unchecked warnings class View[C[A]] { } @@ -8,5 +6,6 @@ class View[C[A]] { } object Test { (null: Any) match { case v: View[_] => + case _ => } } diff --git a/test/files/pos/t1479.scala b/test/files/pos/t1479.scala new file mode 100644 index 000000000000..cd1756e1560f --- /dev/null +++ b/test/files/pos/t1479.scala @@ -0,0 +1,32 @@ +class A { + class C + class Inf[X<:C](x: X) + def f[X<:C](x: X) = () +} + +object Test extends A { + val c: C = new C + f(c) + new Inf(c) +} + +/* +snips $ ~/scala-2.13.0-M2/bin/scalac -d /tmp t1479.scala +t1479.scala:10: error: inferred type arguments [Test.C] do not conform to class Inf's type parameter bounds [X <: A.this.C] + new Inf(c) + ^ +t1479.scala:10: error: type mismatch; + found : Test.C + required: X + new Inf(c) + ^ +two errors found +snips $ ~/scala-2.13.0-M3/bin/scalac -d /tmp t1479.scala +t1479.scala:10: error: type mismatch; + found : Test.C + required: A.this.C + new Inf(c) + ^ +one error found +snips $ ~/scala-2.13.0-M5/bin/scalac -d /tmp t1479.scala +*/ diff --git a/test/files/pos/t1569.scala b/test/files/pos/t1569.scala index e5f9553268ff..a7200a6d1ebc 100644 --- a/test/files/pos/t1569.scala +++ b/test/files/pos/t1569.scala @@ -1,5 +1,5 @@ object Bug { class C { type T } - def foo(x: Int)(y: C)(z: y.T) {} + def foo(x: Int)(y: C)(z: y.T): Unit = {} foo(3)(new C { type T = String })("hello") -} \ No newline at end of file +} diff --git a/test/files/pos/t159.scala b/test/files/pos/t159.scala index 95c47da23ab0..4d67f8afffae 100644 --- a/test/files/pos/t159.scala +++ b/test/files/pos/t159.scala @@ -1,7 +1,7 @@ object foo { // the problem seems to appear only // if "val _" is in the body of a case - def cooked(ckd: StringBuilder) { + def cooked(ckd: StringBuilder): Unit = { 'a' match { case '-' => val _ = ckd.append( '_' ); @@ -12,7 +12,7 @@ object foo { } object foo1 { - def f() { + def f(): Unit = { 1 match { case 2 => val _ = 1; case 3 => val _ = 2; diff --git a/test/files/pos/t1614/foo.scala b/test/files/pos/t1614/foo.scala index e6cc41bafd67..bb8960af6c1a 100644 --- a/test/files/pos/t1614/foo.scala +++ b/test/files/pos/t1614/foo.scala @@ -1,12 +1,12 @@ // foo.scala trait Foo { - def foo(arg: List[_]) + def foo(arg: List[_]): Unit } trait FooImpl extends Foo { - def foo(arg: List[_]) {} + def foo(arg: List[_]): Unit = {} } trait AbstractOverrideFoo extends Foo { - abstract override def foo(arg: List[_]) { + abstract override def foo(arg: List[_]): Unit = { super.foo(arg) } } diff --git a/test/files/pos/t1642/JavaCallingScalaHashMap.java b/test/files/pos/t1642/JavaCallingScalaHashMap.java index 976e465ff7c4..7a15e952a07c 100644 --- a/test/files/pos/t1642/JavaCallingScalaHashMap.java +++ b/test/files/pos/t1642/JavaCallingScalaHashMap.java @@ -1,8 +1,8 @@ -import scala.collection.immutable.HashMap; +import scala.collection.immutable.HashMap$; import scala.collection.immutable.Map; public class JavaCallingScalaHashMap { public static void main( String[] args ) { - Map hashMap = new HashMap(); + Map hashMap = HashMap$.MODULE$.empty(); } } diff --git a/test/files/pos/t1675.scala b/test/files/pos/t1675.scala index dcf1bf5a08ce..8630890eed91 100644 --- a/test/files/pos/t1675.scala +++ b/test/files/pos/t1675.scala @@ -6,6 +6,6 @@ object Foo extends pack.Bar { } package pack { class Bar { - protected def test(s: String*) {} + protected def test(s: String*): Unit = {} } } diff --git a/test/files/pos/t1693.scala b/test/files/pos/t1693.scala index f3615f475632..881bf89a00d7 100644 --- a/test/files/pos/t1693.scala +++ b/test/files/pos/t1693.scala @@ -6,4 +6,4 @@ object Test { implicit def mkOtherOps(x: Foo) : OtherOps = new OtherOps(x) (new Foo).foo(1) -} \ No newline at end of file +} diff --git a/test/files/pos/t1748.scala b/test/files/pos/t1748.scala new file mode 100644 index 000000000000..66dc44c26d54 --- /dev/null +++ b/test/files/pos/t1748.scala @@ -0,0 +1,9 @@ +object Test { + class C[K,V] + class J { def f[K,V](x: C[_ >: K, _ <: V]): C[K,V] = null } + object o { def go() = (new J).f(new C) } + + class D[K,V] + def f[K,V](x: D[_ >: K, _ <: V]) = x + f(new D) +} diff --git a/test/files/pos/t1782/Test_1.scala b/test/files/pos/t1782/Test_1.scala index 6467a74c2974..47495c082929 100644 --- a/test/files/pos/t1782/Test_1.scala +++ b/test/files/pos/t1782/Test_1.scala @@ -1,6 +1,6 @@ @ImplementedBy(classOf[Provider]) trait Service { - def someMethod() + def someMethod(): Unit } class Provider diff --git a/test/files/pos/t1786-counter.scala b/test/files/pos/t1786-counter.scala index dd706073effc..d8e31fec5bff 100644 --- a/test/files/pos/t1786-counter.scala +++ b/test/files/pos/t1786-counter.scala @@ -35,4 +35,4 @@ t1786-counter.scala:10: error: class TupleShape needs to be abstract, since meth final class TupleShape[Level <: ShapeLevel, M <: Product, U <: Product, P <: Product](val shapes: Shape[_, _, _, _]*) extends ProductNodeShape[Level, Product, M, U, P] { ^ one error found -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/t1786-cycle.scala b/test/files/pos/t1786-cycle.scala index 6cd8d4f94453..3d9cbff72ffa 100644 --- a/test/files/pos/t1786-cycle.scala +++ b/test/files/pos/t1786-cycle.scala @@ -54,4 +54,4 @@ Okay again after scala/bug#1786 was reverted. | | | | | caught scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving trait LongTraversableLike: while typing GenTraversableLike[Any, Repr] test/files/pos/t1786-cycle.scala:11: error: illegal cyclic reference involving trait LongTraversableLike trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenT -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/t1803.scala b/test/files/pos/t1803.scala deleted file mode 100644 index 85b3f6315518..000000000000 --- a/test/files/pos/t1803.scala +++ /dev/null @@ -1,3 +0,0 @@ -// scalac: -Yinfer-argument-types -class A { def foo[A](a: A) = a } -class B extends A { override def foo[A](b) = b } diff --git a/test/files/pos/t1832.scala b/test/files/pos/t1832.scala index f3bb556e3210..7e435d70b583 100644 --- a/test/files/pos/t1832.scala +++ b/test/files/pos/t1832.scala @@ -5,4 +5,4 @@ trait Cloning { implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null } val pool = 4 * fn { case ghostSYMBOL: Int => ghostSYMBOL * 2 } -} \ No newline at end of file +} diff --git a/test/files/pos/t1843.scala b/test/files/pos/t1843.scala index cc73353b75fc..8504bf342cef 100644 --- a/test/files/pos/t1843.scala +++ b/test/files/pos/t1843.scala @@ -10,7 +10,7 @@ object Crash { case object IntegerUpdateType extends UpdateType[Integer] //However this method will cause a crash - def crash(updates: List[StateUpdate[_]]) { + def crash(updates: List[StateUpdate[_]]): Unit = { updates match { case Nil => case u::us => diff --git a/test/files/pos/t1858.scala b/test/files/pos/t1858.scala index c06e73e7e61e..99e88c7af52f 100644 --- a/test/files/pos/t1858.scala +++ b/test/files/pos/t1858.scala @@ -1,4 +1,4 @@ -import scala.collection.immutable.Stack +import scala.collection.mutable.Stack object Test { diff --git a/test/files/pos/t1896/D0.scala b/test/files/pos/t1896/D0.scala index 05febb24ff21..6b3150d96916 100644 --- a/test/files/pos/t1896/D0.scala +++ b/test/files/pos/t1896/D0.scala @@ -3,9 +3,9 @@ package p class X[T] trait A { - def m(s:X[_]) {} + def m(s:X[_]): Unit = {} } trait B extends A { - def f { super.m(null) } + def f: Unit = { super.m(null) } } diff --git a/test/files/pos/t1974.scala b/test/files/pos/t1974.scala index a0daa13c2124..3f4d41e7fbbf 100644 --- a/test/files/pos/t1974.scala +++ b/test/files/pos/t1974.scala @@ -17,4 +17,4 @@ object Works2 { def addToMap(c : Class[_], s : String) = map += ((c : Class[_]) -> s) def fetch(c : Class[_]) = map(c) -} \ No newline at end of file +} diff --git a/test/files/pos/t2018.scala b/test/files/pos/t2018.scala index 1736c394c9c7..198b4be42a2f 100644 --- a/test/files/pos/t2018.scala +++ b/test/files/pos/t2018.scala @@ -12,4 +12,4 @@ class A { if (c == p) p else c.b.getAncestor(c) } } -} \ No newline at end of file +} diff --git a/test/files/pos/t2030.scala b/test/files/pos/t2030.scala index 4a70cf662821..60260e5ae539 100644 --- a/test/files/pos/t2030.scala +++ b/test/files/pos/t2030.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3.0 +//> using options -Xsource:3.0 import scala.collection.immutable._ object Test extends App { diff --git a/test/files/pos/t2038.scala b/test/files/pos/t2038.scala index 17b1a702dd79..8c8ca44da3b3 100644 --- a/test/files/pos/t2038.scala +++ b/test/files/pos/t2038.scala @@ -2,4 +2,4 @@ class Test { List(Some(classOf[java.lang.Integer]), Some(classOf[Int])).map { case Some(f: Class[_]) => f.cast(???) } -} \ No newline at end of file +} diff --git a/test/files/pos/t2066-2.10-compat.scala b/test/files/pos/t2066-2.10-compat.scala deleted file mode 100644 index ff7d18e24d8c..000000000000 --- a/test/files/pos/t2066-2.10-compat.scala +++ /dev/null @@ -1,72 +0,0 @@ -// scalac: -Xsource:2.10 -import language._ -trait A1 { - def f[T[_]] = () -} - -trait B1 extends A1 { - override def f[T[+_]] = () -} - -trait C1 extends A1 { - override def f[T[-_]] = () -} - - -trait A2 { - def f[T[+_]] = () -} - -trait B2 extends A2 { - override def f[T[_]] = () // okay -} - -trait C2 extends A2 { - override def f[T[-_]] = () -} - - -trait A3 { - def f[T[-_]] = () -} - -trait B3 extends A3 { - override def f[T[_]] = () // okay -} - -trait C3 extends A3 { - override def f[T[-_]] = () -} - - -trait A4 { - def f[T[X[+_]]] = () -} - -trait B4 extends A4 { - override def f[T[X[_]]] = () -} - -trait A5 { - def f[T[X[-_]]] = () -} - -trait B5 extends A5 { - override def f[T[X[_]]] = () -} - - - -trait A6 { - def f[T[X[_]]] = () -} - -trait B6 extends A6 { - override def f[T[X[+_]]] = () // okay -} -trait C6 extends A6 { - override def f[T[X[_]]] = () // okay -} -trait D6 extends A6 { - override def f[T[X[-_]]] = () -} diff --git a/test/files/pos/t2082.scala b/test/files/pos/t2082.scala index 3a160612fe07..a7ee3789b994 100644 --- a/test/files/pos/t2082.scala +++ b/test/files/pos/t2082.scala @@ -30,7 +30,7 @@ object Main { (parents: List[PType], metaMapper: CMetaType, keyGetter: (PType) => FKType ): Map[Long, CType] = Map.empty - def callIt { + def callIt: Unit = { oneToOneJoin[TestRun, TestSubject, MetaTestSubject, MappedForeignKey[Long, TestRun, TestSubject]]( List(), TestSubject, (tr: TestRun) => tr.testSubject) diff --git a/test/files/pos/t2094.scala b/test/files/pos/t2094.scala index ff142117b2ee..6b6c4f077a69 100644 --- a/test/files/pos/t2094.scala +++ b/test/files/pos/t2094.scala @@ -28,4 +28,4 @@ case class UnknownValue() extends Value { case class KnownType(typ: Class[_]) extends Value { def getType = Some(typ) -} \ No newline at end of file +} diff --git a/test/files/pos/t2127.scala b/test/files/pos/t2127.scala index d6244c14a2f5..88cc9e7081ab 100644 --- a/test/files/pos/t2127.scala +++ b/test/files/pos/t2127.scala @@ -2,4 +2,4 @@ class Foo private (val value : Int) abstract class Bar(val ctor : (Int) => Foo) -object Foo extends Bar(new Foo(_)) //<--- ILLEGAL ACCESS \ No newline at end of file +object Foo extends Bar(new Foo(_)) //<--- ILLEGAL ACCESS diff --git a/test/files/pos/t2183.scala b/test/files/pos/t2183.scala deleted file mode 100644 index 1243568b638f..000000000000 --- a/test/files/pos/t2183.scala +++ /dev/null @@ -1,5 +0,0 @@ -import scala.collection.mutable._ - -object Test { - val m = new HashSet[String] with SynchronizedSet[String] -} diff --git a/test/files/pos/t2187-2.scala b/test/files/pos/t2187-2.scala index 3f2742dd89cd..506cc496f50e 100644 --- a/test/files/pos/t2187-2.scala +++ b/test/files/pos/t2187-2.scala @@ -4,4 +4,4 @@ class Test { case Seq(x) => "x" case _ => "something else" } -} \ No newline at end of file +} diff --git a/test/files/pos/t2208_pos.scala b/test/files/pos/t2208_pos.scala index f1a521b14c65..dd6d686baf46 100644 --- a/test/files/pos/t2208_pos.scala +++ b/test/files/pos/t2208_pos.scala @@ -5,4 +5,4 @@ object Test { type Alias[X <: A] = B[X] val foo: B[A] = new Alias[A] // check that type aliases can be instantiated -} \ No newline at end of file +} diff --git a/test/files/pos/t2261.scala b/test/files/pos/t2261.scala index aac5c9e0fd30..06360d50010c 100644 --- a/test/files/pos/t2261.scala +++ b/test/files/pos/t2261.scala @@ -6,4 +6,4 @@ object Test { // the problem here was that somehow the type variable that was used to infer the type argument for List.apply // would accumulate several conflicting constraints // can't reproduce with -} \ No newline at end of file +} diff --git a/test/files/pos/t2293.scala b/test/files/pos/t2293.scala deleted file mode 100644 index baa44552c9b8..000000000000 --- a/test/files/pos/t2293.scala +++ /dev/null @@ -1,5 +0,0 @@ -import scala.collection.convert.ImplicitConversionsToJava._ - -object Test { - val m: java.util.Map[String,String] = collection.mutable.Map("1"->"2") -} diff --git a/test/files/pos/t2305.scala b/test/files/pos/t2305.scala index 6b66c5db131f..3338ab91192c 100644 --- a/test/files/pos/t2305.scala +++ b/test/files/pos/t2305.scala @@ -11,12 +11,12 @@ object Bind { object works { // this works fine: - def runbind(implicit bind: Bind[MySerializable]) {} + def runbind(implicit bind: Bind[MySerializable]): Unit = {} runbind } object breaks { - def runbind(implicit bind: Bind[ArrayList]) {} + def runbind(implicit bind: Bind[ArrayList]): Unit = {} runbind /*java.lang.AssertionError: assertion failed: java.io.Serializable at scala.Predef$.assert(Predef.scala:107) diff --git a/test/files/pos/t2399.scala b/test/files/pos/t2399.scala index 07882dd5490c..a99998a0a959 100644 --- a/test/files/pos/t2399.scala +++ b/test/files/pos/t2399.scala @@ -11,4 +11,4 @@ trait T[A, This >: Null <: That1[A] with T[A, This]] extends That2[A, This] { def loop(x: This, cnt: Int): Int = if (x.isEmpty) cnt else loop(x.next, cnt + 1) loop(self, 0) } -} \ No newline at end of file +} diff --git a/test/files/pos/t2409/t2409.scala b/test/files/pos/t2409/t2409.scala index 5775008fc43f..0412f7d82853 100644 --- a/test/files/pos/t2409/t2409.scala +++ b/test/files/pos/t2409/t2409.scala @@ -1 +1 @@ -object S { new J(null) } \ No newline at end of file +object S { new J(null) } diff --git a/test/files/pos/t2421_delitedsl.scala b/test/files/pos/t2421_delitedsl.scala index a05887023a13..3c889f668a1b 100644 --- a/test/files/pos/t2421_delitedsl.scala +++ b/test/files/pos/t2421_delitedsl.scala @@ -10,8 +10,7 @@ trait DeliteDSL { case class DeliteInt(x: Int) extends Forcible[Int] implicit val forcibleInt = Forcible.factory(DeliteInt(_: Int)) - import scala.collection.Traversable - class DeliteCollection[T](val xs: Traversable[T]) { + class DeliteCollection[T](val xs: Iterable[T]) { // must use existential in bound of P, instead of T itself, because we cannot both have: // Test.x below: DeliteCollection[T=Int] -> P=DeliteInt <: Forcible[T=Int], as T=Int <~< P=DeliteInt // Test.xAlready below: DeliteCollection[T=DeliteInt] -> P=DeliteInt <: Forcible[T=DeliteInt], as T=DeliteInt <~< P=DeliteInt @@ -34,4 +33,4 @@ trait DeliteDSL { val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy // inferred: val xAlready: DeliteInt = new DeliteCollection[DeliteInt](List.apply[DeliteInt](DeliteInt(1), DeliteInt(2), DeliteInt(3))).headProxy[DeliteInt](trivial[DeliteInt]); } -} \ No newline at end of file +} diff --git a/test/files/pos/t2421c.scala b/test/files/pos/t2421c.scala index 755e6a39f0c1..d212fb9036f9 100644 --- a/test/files/pos/t2421c.scala +++ b/test/files/pos/t2421c.scala @@ -14,4 +14,4 @@ object Test { implicit def b[X <: B](implicit mx: G[X]) = new F[X]() f -} \ No newline at end of file +} diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala index 550681b6a25d..4cda3bde1c48 100644 --- a/test/files/pos/t2429.scala +++ b/test/files/pos/t2429.scala @@ -7,7 +7,7 @@ object Msg { implicit def fromSeq(s: Seq[T]): TSeq = sys.error("stub") } - def render { + def render: Unit = { val msgs: TSeq = (List[(Any, Any)]().flatMap { case (a, b) => { a match { diff --git a/test/files/pos/t2458/A.scala b/test/files/pos/t2458/A.scala new file mode 100644 index 000000000000..4b0710990695 --- /dev/null +++ b/test/files/pos/t2458/A.scala @@ -0,0 +1,10 @@ + +package p + +// was: test/files/pos/t2458/A.scala:4: warning: imported `BitSet` is permanently hidden by definition of class BitSet in package p + +import scala.collection.BitSet + +trait A { + val b: BitSet = new scala.collection.mutable.BitSet +} diff --git a/test/files/pos/t2458/B.scala b/test/files/pos/t2458/B.scala new file mode 100644 index 000000000000..2313ae59e621 --- /dev/null +++ b/test/files/pos/t2458/B.scala @@ -0,0 +1,4 @@ + +package p + +class BitSet diff --git a/test/files/pos/t247.scala b/test/files/pos/t247.scala index fdcafeb2c6cc..7059cd272b1a 100644 --- a/test/files/pos/t247.scala +++ b/test/files/pos/t247.scala @@ -2,6 +2,8 @@ class Order[t](less:(t,t) => Boolean,equal:(t,t) => Boolean) {} trait Map[A, B] extends scala.collection.Map[A, B] { val factory:MapFactory[A] + def -(key1: A, key2: A, keys: A*): Map[A, B] = null + def -(key: A): Map[A, B] = null } abstract class MapFactory[A] { def Empty[B]:Map[A,B]; diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala index 88da6aaac81c..15165c247c1e 100644 --- a/test/files/pos/t2484.scala +++ b/test/files/pos/t2484.scala @@ -13,7 +13,7 @@ class Admin extends javax.swing.JApplet { } // t2630.scala object Test { - def meh(xs: List[Any]) { + def meh(xs: List[Any]): Unit = { xs map { x => (new AnyRef {}) } } } diff --git a/test/files/pos/t2500.scala b/test/files/pos/t2500.scala index 76dea4cf8d19..d0ff99a93747 100644 --- a/test/files/pos/t2500.scala +++ b/test/files/pos/t2500.scala @@ -3,4 +3,4 @@ object Test { ((Map(1 -> "a", 2 -> "b"): collection.Map[Int, String]) map identity[(Int, String)]) : scala.collection.Map[Int,String] ((SortedMap(1 -> "a", 2 -> "b"): collection.SortedMap[Int, String]) map identity[(Int, String)]): scala.collection.SortedMap[Int,String] ((SortedSet(1, 2): collection.SortedSet[Int]) map identity[Int]): scala.collection.SortedSet[Int] -} \ No newline at end of file +} diff --git a/test/files/pos/t2509-5.scala b/test/files/pos/t2509-5.scala new file mode 100644 index 000000000000..ea067fc896be --- /dev/null +++ b/test/files/pos/t2509-5.scala @@ -0,0 +1,15 @@ +// See https://github.com/scala/scala3/issues/2974 +//> using options -Xsource:3 -Xsource-features:implicit-resolution + +trait Foo[-T] + +trait Bar[-T] extends Foo[T] + +object Test { + implicit val fa: Foo[Any] = ??? + implicit val ba: Bar[Int] = ??? + + def test: Unit = { + implicitly[Foo[Int]] + } +} diff --git a/test/files/pos/t2509-6.scala b/test/files/pos/t2509-6.scala new file mode 100644 index 000000000000..5d21d986becf --- /dev/null +++ b/test/files/pos/t2509-6.scala @@ -0,0 +1,41 @@ +//> using options -Xsource:3 -Xsource-features:implicit-resolution +class A +class B extends A + +trait Y { + def value: String +} + +trait X[-T] { + def y(t: T): Y +} + +trait Z[-T] extends X[T] + +object XA extends X[A] { + def y(a: A) = new Y { def value = s"${a.getClass}: AValue" } +} + +object ZB extends Z[B] { + def y(b: B) = new Y { def value = s"${b.getClass}: BValue" } +} + +object Test { + implicit def f[T](t: T)(implicit x: X[T]): Y = x.y(t) + implicit val za: X[A] = XA + implicit val xb: Z[B] = ZB + + def main(argv: Array[String]): Unit = { + val a = new A + val b = new B + println("A: " + a.value) + println("B: " + b.value) + } +} + +/* +t2509-6.scala:31: error: value value is not a member of B + println("B: " + b.value) + ^ +one error found + */ diff --git a/test/files/pos/t2509-7a.scala b/test/files/pos/t2509-7a.scala new file mode 100644 index 000000000000..5927e67bddfc --- /dev/null +++ b/test/files/pos/t2509-7a.scala @@ -0,0 +1,33 @@ +//> using options -Xsource:3 -Xsource-features:implicit-resolution + +class Both[-A, +B] + +trait Factory[A] { + implicit def make: Both[A, A] = new Both[A, A] +} + +trait X +object X extends Factory[X] { + override implicit def make: Both[X, X] = super.make +} + +class Y extends X +object Y extends Factory[Y] { + // See test/files/neg/t2509-7b.scala ... discussion below + override implicit def make: Both[Y, Y] = super.make +} + +object Test { + def get(implicit ev: Both[Y, X]) = ev + + // There are two possible implicits here: X.make and Y.make, neither are + // subtype of each other, so who wins? + // - Under the old scheme it's X.make because `isAsGood` sees that X.make is defined + // in X whereas Y.make is defined in Factory + // - Under the new scheme it's ambiguous because we replace contravariance by covariance + // in top-level type parameters so Y.make is treated as a subtype of X.make + // In both schemes we can get Y.make to win by uncommenting the override for make in Y + // (Y wins against X because `isDerived` also considers the subtyping relationships + // of companion classes) + get +} diff --git a/test/files/pos/t2591.scala b/test/files/pos/t2591.scala index d3c32ba4e9dd..47ae551bfd3e 100644 --- a/test/files/pos/t2591.scala +++ b/test/files/pos/t2591.scala @@ -12,4 +12,4 @@ object Test { import Implicits.imp (new A) : Int -} \ No newline at end of file +} diff --git a/test/files/pos/t2610.scala b/test/files/pos/t2610.scala index 8dd4cde66eb6..8a82b4a72f87 100644 --- a/test/files/pos/t2610.scala +++ b/test/files/pos/t2610.scala @@ -14,4 +14,4 @@ package object bbb { def bar = () aaa.foo // value foo is not a member of package mada.defects.tests.aaa } -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/t2619.scala b/test/files/pos/t2619.scala index 565bc9572b71..283d93bf2b97 100644 --- a/test/files/pos/t2619.scala +++ b/test/files/pos/t2619.scala @@ -77,4 +77,4 @@ object ModuleBM extends ModuleType1 { ModuleBL, ModuleBE ) ::: List(ModuleBK) -} \ No newline at end of file +} diff --git a/test/files/pos/t2660.scala b/test/files/pos/t2660.scala index b1908b201ba9..d42dcc72b5d6 100644 --- a/test/files/pos/t2660.scala +++ b/test/files/pos/t2660.scala @@ -18,8 +18,8 @@ class A[T](x: T) { } object T { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { implicit def g2h(g: G): H = new H new A(new H, 23) } -} \ No newline at end of file +} diff --git a/test/files/pos/t2665.scala b/test/files/pos/t2665.scala index 3163e313266f..e46453534c0f 100644 --- a/test/files/pos/t2665.scala +++ b/test/files/pos/t2665.scala @@ -1,3 +1,3 @@ object Test { val x: Unit = Array("") -} \ No newline at end of file +} diff --git a/test/files/pos/t2667.scala b/test/files/pos/t2667.scala index b214cc7f3769..7f1f36f00bad 100644 --- a/test/files/pos/t2667.scala +++ b/test/files/pos/t2667.scala @@ -3,4 +3,4 @@ object A { def foo[T](x: T*): Int = 55 val x: Unit = foo(23, 23f) -} \ No newline at end of file +} diff --git a/test/files/pos/t267.scala b/test/files/pos/t267.scala index 7e5876eae99b..abb0038c2000 100644 --- a/test/files/pos/t267.scala +++ b/test/files/pos/t267.scala @@ -1,9 +1,9 @@ package expAbstractData /** A base class consisting of - * - a root trait (i.e. abstract class) `Exp' with an `eval' function - * - an abstract type `exp' bounded by `Exp' - * - a concrete instance class `Num' of `Exp' for numeric literals + * - a root trait (i.e. abstract class) `Exp` with an `eval` function + * - an abstract type `exp` bounded by `Exp` + * - a concrete instance class `Num` of `Exp` for numeric literals */ trait Base { type exp <: Exp @@ -23,7 +23,7 @@ object testBase extends App with Base { Console.println(term.eval) } -/** Data extension: An extension of `Base' with `Plus' expressions +/** Data extension: An extension of `Base` with `Plus` expressions */ trait BasePlus extends Base { class Plus(l: exp, r: exp) extends Exp { self: exp => @@ -33,7 +33,7 @@ trait BasePlus extends Base { } } -/** Operation extension: An extension of `Base' with 'show' methods. +/** Operation extension: An extension of `Base` with 'show' methods. */ trait Show extends Base { type exp <: Exp1 @@ -46,7 +46,7 @@ trait Show extends Base { } } -/** Operation extension: An extension of `BasePlus' with 'show' methods. +/** Operation extension: An extension of `BasePlus` with 'show' methods. */ trait ShowPlus extends BasePlus with Show { class Plus1(l: exp, r: exp) extends Plus(l, r) with Exp1 { self: exp with Plus1 => diff --git a/test/files/pos/t2691.scala b/test/files/pos/t2691.scala index 5f0ddd122f87..0a18f2ea9442 100644 --- a/test/files/pos/t2691.scala +++ b/test/files/pos/t2691.scala @@ -4,13 +4,13 @@ object Breakdown { object Test { 42 match { case Breakdown("") => // needed to trigger bug - case Breakdown("", who) => println ("hello " + who) + case Breakdown("", who) => println("hello " + who) } } object Test2 { 42 match { case Breakdown("") => // needed to trigger bug case Breakdown("foo") => // needed to trigger bug - case Breakdown("", who) => println ("hello " + who) + case Breakdown("", who) => println("hello " + who) } } diff --git a/test/files/pos/t2693.scala b/test/files/pos/t2693.scala index 97732cf08129..5d4d0380c418 100644 --- a/test/files/pos/t2693.scala +++ b/test/files/pos/t2693.scala @@ -3,4 +3,4 @@ class A { def usetHk[T[_], A](ta: T[A]) = 0 usetHk(new T[Int]{}: T[Int]) usetHk(new T[Int]{}) // fails with: found: java.lang.Object with T[Int], required: ?T[ ?A ] -} \ No newline at end of file +} diff --git a/test/files/pos/t2712-1.scala b/test/files/pos/t2712-1.scala index 5a1585e7fdfb..0c3ac4242e19 100644 --- a/test/files/pos/t2712-1.scala +++ b/test/files/pos/t2712-1.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test // Original test case from, diff --git a/test/files/pos/t2712-2.scala b/test/files/pos/t2712-2.scala index 81f2e9ecfe92..39f22dd92a79 100644 --- a/test/files/pos/t2712-2.scala +++ b/test/files/pos/t2712-2.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test // See: https://github.com/milessabin/si2712fix-demo/issues/3 diff --git a/test/files/pos/t2712-3.scala b/test/files/pos/t2712-3.scala index c8b8666efd21..46445f9289f7 100644 --- a/test/files/pos/t2712-3.scala +++ b/test/files/pos/t2712-3.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test object Test1 { diff --git a/test/files/pos/t2712-4.scala b/test/files/pos/t2712-4.scala index 2814e3658cc2..3e2e5cddaedf 100644 --- a/test/files/pos/t2712-4.scala +++ b/test/files/pos/t2712-4.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test object Test1 { diff --git a/test/files/pos/t2712-5.scala b/test/files/pos/t2712-5.scala index ed9de2ca4a91..ed96d4c06fcc 100644 --- a/test/files/pos/t2712-5.scala +++ b/test/files/pos/t2712-5.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test import scala.language.higherKinds diff --git a/test/files/pos/t2712-6.scala b/test/files/pos/t2712-6.scala index c693ad7caa28..eefe769ad652 100644 --- a/test/files/pos/t2712-6.scala +++ b/test/files/pos/t2712-6.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test object Tags { diff --git a/test/files/pos/t2712-7.scala b/test/files/pos/t2712-7.scala index 558fe5450a79..d9c5243f132d 100644 --- a/test/files/pos/t2712-7.scala +++ b/test/files/pos/t2712-7.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package test // Cats Xor, Scalaz \/, scala.util.Either diff --git a/test/files/pos/t2741/2741_1.scala b/test/files/pos/t2741/2741_1.scala index d47ed3b6cbeb..d9d04f7ab0b9 100644 --- a/test/files/pos/t2741/2741_1.scala +++ b/test/files/pos/t2741/2741_1.scala @@ -6,4 +6,4 @@ trait MAs { val a: MA[Partial#Apply] = null // after compilation, the type is pickled as `MA[ [B] List[B] ]` } -object Scalaz extends MAs \ No newline at end of file +object Scalaz extends MAs diff --git a/test/files/pos/t2741/2741_2.scala b/test/files/pos/t2741/2741_2.scala index 41f6a64260e4..a9fd9d7d0ee1 100644 --- a/test/files/pos/t2741/2741_2.scala +++ b/test/files/pos/t2741/2741_2.scala @@ -2,4 +2,4 @@ object Test { import Scalaz._ Scalaz.a -} \ No newline at end of file +} diff --git a/test/files/pos/t2764/Use.scala b/test/files/pos/t2764/Use.scala index 8cf810270944..b0c108907071 100644 --- a/test/files/pos/t2764/Use.scala +++ b/test/files/pos/t2764/Use.scala @@ -2,5 +2,5 @@ package bippy class Use { @Ann(Enum.VALUE) - def foo {} + def foo: Unit = {} } diff --git a/test/files/pos/t2782.scala b/test/files/pos/t2782.scala index ab12aaf1fe4d..f66fec3ed53c 100644 --- a/test/files/pos/t2782.scala +++ b/test/files/pos/t2782.scala @@ -4,7 +4,7 @@ object Test { trait Foo[T] // Haven't managed to repro without using a CanBuild or CanBuildFrom implicit parameter - implicit def MapFoo[A, B, M[A, B] <: sc.Map[A,B]](implicit aFoo: Foo[A], bFoo: Foo[B], cb: sc.generic.CanBuild[(A, B), M[A, B]]) = new Foo[M[A,B]] {} + implicit def MapFoo[A, B, M[A, B] <: sc.Map[A,B]](implicit aFoo: Foo[A], bFoo: Foo[B], cb: sc.Factory[(A, B), M[A, B]]) = new Foo[M[A,B]] {} implicit object Tuple2IntIntFoo extends Foo[(Int, Int)] // no difference if this line is uncommented implicit def Tuple2Foo[A, B] = new Foo[(A, B)] {} // nor this one diff --git a/test/files/pos/t2795-new.scala b/test/files/pos/t2795-new.scala index a6a5fdb1273a..e307133e0910 100644 --- a/test/files/pos/t2795-new.scala +++ b/test/files/pos/t2795-new.scala @@ -16,4 +16,4 @@ trait Transform { self: Config => def processBlock(block: Array[T]): Unit = { var X = new Array[T](1) } -} \ No newline at end of file +} diff --git a/test/files/pos/t2795-old.scala b/test/files/pos/t2795-old.scala index 935cb1f44439..393def643774 100644 --- a/test/files/pos/t2795-old.scala +++ b/test/files/pos/t2795-old.scala @@ -1,5 +1,7 @@ package t1 +import scala.reflect.ClassManifest + trait Element[T] { } diff --git a/test/files/pos/t2799.scala b/test/files/pos/t2799.scala index 2c63b39b65bf..9cefaefc38b2 100644 --- a/test/files/pos/t2799.scala +++ b/test/files/pos/t2799.scala @@ -1,2 +1,4 @@ -// scalac: -deprecation -Xfatal-warnings -@deprecated("hi mom", "") case class Bob () +//> using options -Xlint -Werror + +@deprecated("hi mom", "") +case class Bob () diff --git a/test/files/pos/t283.scala b/test/files/pos/t283.scala new file mode 100644 index 000000000000..8691404db6ae --- /dev/null +++ b/test/files/pos/t283.scala @@ -0,0 +1,5 @@ + +import _root_._ // _root_.java._ is OK +object Test extends App { + println(java.util.Locale.getDefault().toString) // static call +} diff --git a/test/files/pos/t284-pos.scala b/test/files/pos/t284-pos.scala index 19673b7e4c39..40277e2db233 100644 --- a/test/files/pos/t284-pos.scala +++ b/test/files/pos/t284-pos.scala @@ -1,5 +1,5 @@ trait B[T] { def f1(a: T): Unit { } def f2(a: T): Unit - def f3(a: T) { } + def f3(a: T): Unit = { } } diff --git a/test/files/pos/t2910.scala b/test/files/pos/t2910.scala index d4d92fa76527..f9f2c87a98d0 100644 --- a/test/files/pos/t2910.scala +++ b/test/files/pos/t2910.scala @@ -1,15 +1,15 @@ object Test { - def test1 { + def test1: Unit = { C object C } - def test2 { + def test2: Unit = { println(s.length) lazy val s = "abc" } - def test3 { + def test3: Unit = { lazy val lazyBar = bar object bar { val foo = 12 @@ -17,7 +17,7 @@ object Test { lazy val lazyBar2 = bar } - def test4 { + def test4: Unit = { lazy val x = { x lazy val x = 12 @@ -25,9 +25,9 @@ object Test { } } - def test5 { + def test5: Unit = { lazy val f: Int = g Console.println("foo") lazy val g: Int = f } -} \ No newline at end of file +} diff --git a/test/files/pos/t2913.scala b/test/files/pos/t2913.scala index 11d8b92053ed..6366548c6208 100644 --- a/test/files/pos/t2913.scala +++ b/test/files/pos/t2913.scala @@ -44,7 +44,7 @@ object test1 { } } object Main { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { val fn = (a : Int, str : String) => "a: " + a + ", str: " + str implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null) println(fn(1)) diff --git a/test/files/pos/t2939.scala b/test/files/pos/t2939.scala deleted file mode 100644 index 3be4d4d5611d..000000000000 --- a/test/files/pos/t2939.scala +++ /dev/null @@ -1,13 +0,0 @@ -import collection._ - -object Proxies { - class C1 extends MapProxy[Int,Int] { def self = Map[Int,Int]() } - class C2 extends mutable.MapProxy[Int,Int] { def self = mutable.Map[Int,Int]() } - class C3 extends immutable.MapProxy[Int,Int] { def self = immutable.Map[Int,Int]() } - - class C4 extends SetProxy[Int] { def self = Set[Int]() } - class C5 extends mutable.SetProxy[Int] { def self = mutable.Set[Int]() } - class C6 extends immutable.SetProxy[Int] { def self = immutable.Set[Int]() } - - class C7 extends SeqProxy[Int] { def self = Seq[Int]() } -} \ No newline at end of file diff --git a/test/files/pos/t2940/Error.scala b/test/files/pos/t2940/Error.scala index bf5a6bd0dfa7..92f08f5800f7 100644 --- a/test/files/pos/t2940/Error.scala +++ b/test/files/pos/t2940/Error.scala @@ -4,7 +4,7 @@ abstract class Error { object Test { trait Quux[T] extends Cycle[Quux[T]] - val x = new Quux[Int] { def doStuff() { } } + val x = new Quux[Int] { def doStuff(): Unit = { } } def main(args: Array[String]): Unit = { diff --git a/test/files/pos/t2945.scala b/test/files/pos/t2945.scala index 762bdb61e10e..0379e9ba1603 100644 --- a/test/files/pos/t2945.scala +++ b/test/files/pos/t2945.scala @@ -9,4 +9,4 @@ object Foo { s } } -} \ No newline at end of file +} diff --git a/test/files/pos/t2956/BeanDefinitionVisitor.java b/test/files/pos/t2956/BeanDefinitionVisitor.java deleted file mode 100644 index 2ff5daa25398..000000000000 --- a/test/files/pos/t2956/BeanDefinitionVisitor.java +++ /dev/null @@ -1,6 +0,0 @@ -import java.util.Map; -public class BeanDefinitionVisitor { - @SuppressWarnings("unchecked") - protected void visitMap(Map mapVal) { - } -} diff --git a/test/files/pos/t2956/t2956.scala b/test/files/pos/t2956/t2956.scala deleted file mode 100644 index 9b6ae8098fb8..000000000000 --- a/test/files/pos/t2956/t2956.scala +++ /dev/null @@ -1,7 +0,0 @@ -import scala.collection.convert.ImplicitConversionsToScala._ - -class Outer { - protected class Inner extends BeanDefinitionVisitor { - protected def visitMap(mapVal: Map[_, _]): Unit = () - } -} diff --git a/test/files/pos/t2994a.scala b/test/files/pos/t2994a.scala index cb4a389e2fc3..f1a4a4a12d30 100644 --- a/test/files/pos/t2994a.scala +++ b/test/files/pos/t2994a.scala @@ -24,4 +24,4 @@ object Naturals { type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z] } -} \ No newline at end of file +} diff --git a/test/files/pos/t2994b.scala b/test/files/pos/t2994b.scala index c9d9cc812bb1..8b5eb9692419 100644 --- a/test/files/pos/t2994b.scala +++ b/test/files/pos/t2994b.scala @@ -4,4 +4,4 @@ object Test { type Apply[T] } trait Foo[V[_] <: Bar[V]] extends Bar[Baz[V]#Apply] -} \ No newline at end of file +} diff --git a/test/files/pos/t3076/C2.scala b/test/files/pos/t3076/C2.scala index d08f9ee81d9c..c8a6ea9e3e05 100644 --- a/test/files/pos/t3076/C2.scala +++ b/test/files/pos/t3076/C2.scala @@ -1,4 +1,4 @@ class C2 { - def m1() { new T { } } - def m2() { new T { } } + def m1(): Unit = { new T { } } + def m2(): Unit = { new T { } } } diff --git a/test/files/pos/t3079.scala b/test/files/pos/t3079.scala index fa732ea51628..b7bd63190114 100644 --- a/test/files/pos/t3079.scala +++ b/test/files/pos/t3079.scala @@ -14,4 +14,4 @@ object Coerce { // Providing the type of _ works around the problem. //def unwrap = (_: Identity[B]).value } -} \ No newline at end of file +} diff --git a/test/files/pos/t3106.scala b/test/files/pos/t3106.scala index 162e93366fb4..a9591d0aaf9f 100644 --- a/test/files/pos/t3106.scala +++ b/test/files/pos/t3106.scala @@ -4,4 +4,4 @@ object Sample { implicit def apply[A] (x:A): Sample[A] = { new Sample(p => p._1) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3174.scala b/test/files/pos/t3174.scala index c3d90a49461e..8d9b2578d075 100644 --- a/test/files/pos/t3174.scala +++ b/test/files/pos/t3174.scala @@ -1,5 +1,5 @@ object test { - def method() { + def method(): Unit = { class Foo extends AnyRef { object Color { object Blue diff --git a/test/files/pos/t3177.scala b/test/files/pos/t3177.scala index 9f9528faeca4..9b850966db71 100644 --- a/test/files/pos/t3177.scala +++ b/test/files/pos/t3177.scala @@ -36,4 +36,4 @@ object Endo { } implicit def EndoFrom[A](e: Endo[A]): A => A = e.apply(_) -} \ No newline at end of file +} diff --git a/test/files/pos/t3218.scala b/test/files/pos/t3218.scala new file mode 100644 index 000000000000..cbf9277dfdbe --- /dev/null +++ b/test/files/pos/t3218.scala @@ -0,0 +1,41 @@ + +import java.util.{List => JList} +import scala.collection.mutable +import scala.collection.JavaConverters.asScalaBuffer +import scala.language.existentials +import scala.language.implicitConversions + +object Test extends App { + + def fromJava[T](li: JList[T]): List[T] = asScalaBuffer(li).toList + + implicit def `list asScalaBuffer`[A](l: JList[A]): mutable.Buffer[A] = asScalaBuffer(l) + + // implicit conversion - ok + def test1(jList:JList[_]) = jList.filter(_.isInstanceOf[java.lang.Object]) + + // explicit conversion - ok + def test2(jList:JList[_]) = { + val f = fromJava(jList).filter _ + f(_.isInstanceOf[java.lang.Object]) + } + + // implicit conversion - error + def test3(jList:JList[_]) = { + val f = jList.filter _ + f(_.isInstanceOf[java.lang.Object]) + } + + val ss: JList[String] = { + val res = new java.util.ArrayList[String] + res.add("hello, world") + res + } + + println {( + test1(ss), + test2(ss), + test3(ss), + )} + +} diff --git a/test/files/pos/t3218b.scala b/test/files/pos/t3218b.scala new file mode 100644 index 000000000000..d9c6b8e6ecea --- /dev/null +++ b/test/files/pos/t3218b.scala @@ -0,0 +1,35 @@ + +import language.implicitConversions + +trait T +trait U { + def u(x: Any) = x.toString * 2 +} +class C { + implicit def cv(t: T): U = new U {} + def f(t: T): Any => String = cv(t).u _ + def g(t: T): Any => String = t.u _ + def h(t: T) = t.u _ +} + +object Test extends App { + val c = new C + val t = new T {} + println {( + c.f(t)("f"), + c.g(t)("g"), + c.h(t)("h"), + )} +} + +/* +2.11, 2.12 say: +t3218b.scala:11: error: _ must follow method; cannot follow Any => String + def g(t: T): Any => String = t.u _ + ^ +t3218b.scala:12: error: missing argument list for method u in trait U +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `u _` or `u(_)` instead of `u`. + def h(t: T) = t.u _ + ^ +*/ diff --git a/test/files/pos/t3234.scala b/test/files/pos/t3234.scala index 3430c3e751c9..1b1769aadff3 100644 --- a/test/files/pos/t3234.scala +++ b/test/files/pos/t3234.scala @@ -1,4 +1,6 @@ -// scalac: -opt:l:inline -opt-inline-from:** -opt-warnings -Xfatal-warnings + +//> using options -opt:inline:** -Wopt -Werror +// trait Trait1 { @inline final def foo2(n: Int) = n*n } diff --git a/test/files/pos/t3240.scala b/test/files/pos/t3240.scala index cf197a406d64..fdd86d8eae17 100644 --- a/test/files/pos/t3240.scala +++ b/test/files/pos/t3240.scala @@ -5,4 +5,4 @@ class A { a.getOrElse(defVal).asInstanceOf[t] } } -} \ No newline at end of file +} diff --git a/test/files/pos/t3249/a.scala b/test/files/pos/t3249/a.scala index 039446454983..fad6f8da6bf7 100644 --- a/test/files/pos/t3249/a.scala +++ b/test/files/pos/t3249/a.scala @@ -8,4 +8,4 @@ $ javac -cp .:$SCALA_HOME/lib/scala-library.jar -Xprint 'A$X$1' public class X$1 extends A implements scala.ScalaObject { public X$1(A null); } -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/t3272.scala b/test/files/pos/t3272.scala index 8efd5ded2cda..cf54d6a848fe 100644 --- a/test/files/pos/t3272.scala +++ b/test/files/pos/t3272.scala @@ -1,8 +1,8 @@ trait A { trait C[+T] { - protected[this] def f(t: T) {} + protected[this] def f(t: T): Unit = {} } trait D[T] extends C[T] { - def g(t: T) { f(t) } + def g(t: T): Unit = { f(t) } } } diff --git a/test/files/pos/t3274.scala b/test/files/pos/t3274.scala index 15723184bb69..bd8b080c9a5c 100644 --- a/test/files/pos/t3274.scala +++ b/test/files/pos/t3274.scala @@ -5,5 +5,5 @@ trait A { this: B => } trait B extends A { - trait Y { def f {} } -} \ No newline at end of file + trait Y { def f: Unit = {} } +} diff --git a/test/files/pos/t3278.scala b/test/files/pos/t3278.scala index 788ec75d26b5..458070c5e7e7 100644 --- a/test/files/pos/t3278.scala +++ b/test/files/pos/t3278.scala @@ -1,15 +1,15 @@ class Foo class Test { - def update[B](x : B, b : Int) {} + def update[B](x : B, b : Int): Unit = {} def apply[B](x : B) = 1 } object Test { - def main(a : Array[String]) { + def main(a : Array[String]): Unit = { val a = new Test val f = new Foo a(f) = 1 //works a(f) = a(f) + 1 //works a(f) += 1 //error: reassignment to val } -} \ No newline at end of file +} diff --git a/test/files/pos/t3343.scala b/test/files/pos/t3343.scala index 6c34cdff0003..9d1bc9355c4f 100644 --- a/test/files/pos/t3343.scala +++ b/test/files/pos/t3343.scala @@ -12,4 +12,4 @@ object Test { def main(args: Array[String]): Unit = { } -} \ No newline at end of file +} diff --git a/test/files/pos/t3363-new.scala b/test/files/pos/t3363-new.scala index fef2bf8a72ed..d4d43198476f 100644 --- a/test/files/pos/t3363-new.scala +++ b/test/files/pos/t3363-new.scala @@ -13,7 +13,7 @@ object TestCase { def is(xs: List[T]) = List(xs) } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Map(1 -> "2") is List(2)) } diff --git a/test/files/pos/t3363-old.scala b/test/files/pos/t3363-old.scala index c08cf2a6b69c..36d4cd933843 100644 --- a/test/files/pos/t3363-old.scala +++ b/test/files/pos/t3363-old.scala @@ -11,7 +11,7 @@ object TestCase { def is(xs: List[T]) = List(xs) } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Map(1 -> "2") is List(2)) } diff --git a/test/files/pos/t3368.scala b/test/files/pos/t3368.scala index 1f4e6fb29295..ba9bf05117d9 100644 --- a/test/files/pos/t3368.scala +++ b/test/files/pos/t3368.scala @@ -1,5 +1,6 @@ -// scalac: -Ystop-after:parser +//> using options -Ystop-after:parser +// trait X { // error: in XML literal: name expected, but char '!' cannot start a name def x = diff --git a/test/files/pos/t3373.scala b/test/files/pos/t3373.scala index b4af3610bb9f..91768e201df1 100644 --- a/test/files/pos/t3373.scala +++ b/test/files/pos/t3373.scala @@ -8,4 +8,4 @@ object Test { implicit val orderEntries = new Ordering[Entry] { def compare(first: Entry, second: Entry) = extractTime(first) compare extractTime(second) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3374.scala b/test/files/pos/t3374.scala index 4c0293181df5..c9bedcf69472 100644 --- a/test/files/pos/t3374.scala +++ b/test/files/pos/t3374.scala @@ -3,4 +3,4 @@ trait Parent { } trait Sub extends Parent { type Test[AS, HS[B <: AS]] = AS -} \ No newline at end of file +} diff --git a/test/files/pos/t3404/Derived.scala b/test/files/pos/t3404/Derived.scala index 16976fe3d510..b1a6c6b19ce6 100644 --- a/test/files/pos/t3404/Derived.scala +++ b/test/files/pos/t3404/Derived.scala @@ -1,3 +1,3 @@ class Derived extends Base { def foo(a: AnyRef) = classOf[String] -} \ No newline at end of file +} diff --git a/test/files/pos/t3411.scala b/test/files/pos/t3411.scala index b58e52db8da6..6d46be4e4415 100644 --- a/test/files/pos/t3411.scala +++ b/test/files/pos/t3411.scala @@ -1,7 +1,7 @@ object A { - def g(c: PartialFunction[Any,Unit]) {} + def g(c: PartialFunction[Any,Unit]): Unit = {} - def f { + def f: Unit = { lazy val x = 0 g { case `x` => } } diff --git a/test/files/pos/t3419/B_1.scala b/test/files/pos/t3419/B_1.scala index a8ec7edba4b9..f11701a86ecb 100644 --- a/test/files/pos/t3419/B_1.scala +++ b/test/files/pos/t3419/B_1.scala @@ -1,3 +1,3 @@ trait T[A,B] { type X[a <: A, b <: B] <: B -} \ No newline at end of file +} diff --git a/test/files/pos/t3419/C_2.scala b/test/files/pos/t3419/C_2.scala index da721d2c31ab..880c2838c088 100644 --- a/test/files/pos/t3419/C_2.scala +++ b/test/files/pos/t3419/C_2.scala @@ -1,3 +1,3 @@ object F { type S = T[Any,Int] {type X[N <: Int, Acc <: Int] = Acc} -} \ No newline at end of file +} diff --git a/test/files/pos/t3420.scala b/test/files/pos/t3420.scala index e8aaef8eece6..962a0ab437ff 100644 --- a/test/files/pos/t3420.scala +++ b/test/files/pos/t3420.scala @@ -1,4 +1,6 @@ -// scalac: -opt-warnings -opt:l:inline -opt-inline-from:** -Xfatal-warnings + +//> using options -opt:inline:** -Wopt -Werror +// class C { val cv = Map[Int, Int](1 -> 2) lazy val cl = Map[Int, Int](1 -> 2) diff --git a/test/files/pos/t3420b.scala b/test/files/pos/t3420b.scala new file mode 100644 index 000000000000..84471c995d08 --- /dev/null +++ b/test/files/pos/t3420b.scala @@ -0,0 +1,8 @@ + +//> using options --release 8 -opt:inline:** -Wopt -Werror +// +class C { + val cv = Map[Int, Int](1 -> 2) + lazy val cl = Map[Int, Int](1 -> 2) + def cd = Map[Int, Int](1 -> 2) +} diff --git a/test/files/pos/t3429/A.scala b/test/files/pos/t3429/A.scala index 80785db51d42..ea89af16818b 100644 --- a/test/files/pos/t3429/A.scala +++ b/test/files/pos/t3429/A.scala @@ -9,4 +9,4 @@ class A { // required: java.lang.Class // @Test(exc = classOf[Exception]) // ^ -// one error found \ No newline at end of file +// one error found diff --git a/test/files/pos/t3486/test.scala b/test/files/pos/t3486/test.scala index 544232b0d199..d4534e29f42e 100644 --- a/test/files/pos/t3486/test.scala +++ b/test/files/pos/t3486/test.scala @@ -3,4 +3,4 @@ trait Test[A] { def specified(a:A):A = a } -abstract class T2[A] extends Test[A] \ No newline at end of file +abstract class T2[A] extends Test[A] diff --git a/test/files/pos/t3494.scala b/test/files/pos/t3494.scala index 35a4bcde5db4..dd54b0f82faa 100644 --- a/test/files/pos/t3494.scala +++ b/test/files/pos/t3494.scala @@ -4,4 +4,4 @@ object Test { val x = "abc" f[x.type](x) -} \ No newline at end of file +} diff --git a/test/files/pos/t3495.scala b/test/files/pos/t3495.scala index 9d0980ace0a2..5dfd28328ce6 100644 --- a/test/files/pos/t3495.scala +++ b/test/files/pos/t3495.scala @@ -1,3 +1,7 @@ -// scalac: -Dsoot.class.path=bin:. + +//> using options -Dsoot.class.path=bin:. +// +// option parsing broke on colon +// class Foo { } diff --git a/test/files/pos/t3498-new.scala b/test/files/pos/t3498-new.scala index eaf00cc351da..ccf2af9dee92 100644 --- a/test/files/pos/t3498-new.scala +++ b/test/files/pos/t3498-new.scala @@ -14,4 +14,4 @@ abstract class B extends A[ Array[Byte], Int ] { { new Array[U](N + state(N)) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3498-old.scala b/test/files/pos/t3498-old.scala index bcc90ca64cb8..118a8d849fff 100644 --- a/test/files/pos/t3498-old.scala +++ b/test/files/pos/t3498-old.scala @@ -12,4 +12,4 @@ abstract class B extends A[ Array[Byte], Int ] { { new Array[U](N + state(N)) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3521/a.scala b/test/files/pos/t3521/a.scala index 94bb451fc3d8..e9619b76e942 100644 --- a/test/files/pos/t3521/a.scala +++ b/test/files/pos/t3521/a.scala @@ -1,4 +1,4 @@ class Test { @DoubleValue(-0.05) var a = 0 -} \ No newline at end of file +} diff --git a/test/files/pos/t3534.scala b/test/files/pos/t3534.scala index d926d4e286e1..44d3aafb6633 100644 --- a/test/files/pos/t3534.scala +++ b/test/files/pos/t3534.scala @@ -3,4 +3,4 @@ object Test { case None :: bb :: cc => () case x => throw new Exception(x.filter(_.isDefined).mkString) } - } \ No newline at end of file + } diff --git a/test/files/pos/t3568.scala b/test/files/pos/t3568.scala index c8e3fcc4be1a..59b6753cd342 100644 --- a/test/files/pos/t3568.scala +++ b/test/files/pos/t3568.scala @@ -12,7 +12,7 @@ package object buffer { package buffer { object Main { // ArrayVec2 can be compiled, instantiated and used. - def main(args: Array[String]) { println(works) } + def main(args: Array[String]): Unit = { println(works) } } trait ElemType { type Element; type Component <: ElemType } @@ -20,7 +20,7 @@ package buffer { class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 } abstract class BaseSeq[T <: ElemType, E] - extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] { + extends IndexedSeq[E] with StrictOptimizedSeqOps[E, IndexedSeq, IndexedSeq[E]] { def length = 1 def apply(i: Int) :E } diff --git a/test/files/pos/t3570.scala b/test/files/pos/t3570.scala index 8921f83b2a6e..0e20905afae3 100644 --- a/test/files/pos/t3570.scala +++ b/test/files/pos/t3570.scala @@ -1,7 +1,7 @@ class test { object Break extends Throwable def break = throw Break - def block(x: => Unit) { + def block(x: => Unit): Unit = { try { x } catch { case e: Break.type => } } } diff --git a/test/files/pos/t3582.scala b/test/files/pos/t3582.scala index 0ac112efbf09..d13d69775969 100644 --- a/test/files/pos/t3582.scala +++ b/test/files/pos/t3582.scala @@ -1,6 +1,6 @@ trait C[A] object Test { - def ImplicitParamCA[CC[A], A](implicit ev: C[A]) {implicitly[C[A]]} // must use this exact syntax... + def ImplicitParamCA[CC[A], A](implicit ev: C[A]): Unit = {implicitly[C[A]]} // must use this exact syntax... // error: could not find implicit value for parameter e: C[A] } // [[syntax trees at end of typer]] diff --git a/test/files/pos/t3582b.scala b/test/files/pos/t3582b.scala index 8f0bfb9b2afd..f7778148e0b5 100644 --- a/test/files/pos/t3582b.scala +++ b/test/files/pos/t3582b.scala @@ -2,4 +2,4 @@ object ParamScoping { // scoping worked fine in the result type, but was wrong in body // reason: typedTypeDef needs new context, which was set up by typed1 but not by typedDefDef and typedClassDef def noOverlapFOwithHO[T, G[T]]: G[T] = null.asInstanceOf[G[T]] -} \ No newline at end of file +} diff --git a/test/files/pos/t3612.scala b/test/files/pos/t3612.scala index d3bcc373e3bf..a9d063998ca1 100644 --- a/test/files/pos/t3612.scala +++ b/test/files/pos/t3612.scala @@ -3,4 +3,4 @@ trait C class Outer { object O0 extends C {} object O extends C { self => } -} \ No newline at end of file +} diff --git a/test/files/pos/t3622/Test.scala b/test/files/pos/t3622/Test.scala index fb82c581f96d..d18953bbaca4 100644 --- a/test/files/pos/t3622/Test.scala +++ b/test/files/pos/t3622/Test.scala @@ -2,4 +2,4 @@ package test class Test extends MyAsyncTask { protected[test] def doInBackground1(args: Array[String]): String = "" -} \ No newline at end of file +} diff --git a/test/files/pos/t3631.scala b/test/files/pos/t3631.scala index bcf91619ee55..e723741307ed 100644 --- a/test/files/pos/t3631.scala +++ b/test/files/pos/t3631.scala @@ -1,3 +1,3 @@ case class X22(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) { } -case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { } \ No newline at end of file +case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { } diff --git a/test/files/pos/t3636.scala b/test/files/pos/t3636.scala index 24d18c653d50..cd224a32f7a9 100644 --- a/test/files/pos/t3636.scala +++ b/test/files/pos/t3636.scala @@ -1,5 +1,5 @@ class CTxnLocal[ T ] { - def set( x: T )( implicit t: Txn ) {} + def set( x: T )( implicit t: Txn ): Unit = {} def get( implicit t: Txn ) : T = null.asInstanceOf[ T ] def initialValue( t: Txn ) : T = null.asInstanceOf[ T ] } @@ -32,7 +32,7 @@ object TxnLocal { set( v ) oldV } - def transform( f: T => T )( implicit tx: ProcTxn ) { + def transform( f: T => T )( implicit tx: ProcTxn ): Unit = { set( f( apply )) } } diff --git a/test/files/pos/t3642/t3642_2.scala b/test/files/pos/t3642/t3642_2.scala index 0892e508a5d4..954795fe2ade 100644 --- a/test/files/pos/t3642/t3642_2.scala +++ b/test/files/pos/t3642/t3642_2.scala @@ -1,3 +1,3 @@ object T { Tuppel_1.get -} \ No newline at end of file +} diff --git a/test/files/pos/t3664.scala b/test/files/pos/t3664.scala new file mode 100644 index 000000000000..0c0ea6fa17a5 --- /dev/null +++ b/test/files/pos/t3664.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Xlint -Xsource:3 + +import language.implicitConversions + +case class C(i: Int) +object C // no function parent + +// use conversion, don't warn about apply insertion +class Test { + implicit def cv(c: C.type): Function[Int, C] = C(_) + def f(xs: List[Int]): List[C] = xs.map(C) +} diff --git a/test/files/pos/t3671.scala b/test/files/pos/t3671.scala index 1ca9327bb7bf..afb3a539d19a 100644 --- a/test/files/pos/t3671.scala +++ b/test/files/pos/t3671.scala @@ -4,4 +4,4 @@ object Crash { case java.lang.Integer.MAX_VALUE => println("MAX_VALUE") case java.lang.Integer.MIN_VALUE => println("MIN_VALUE") } -} \ No newline at end of file +} diff --git a/test/files/pos/t3672.scala b/test/files/pos/t3672.scala index b2752ce21ff7..2c17a17ff8b1 100644 --- a/test/files/pos/t3672.scala +++ b/test/files/pos/t3672.scala @@ -1,4 +1,22 @@ object Test { - def foo(f: Int => Int) = () ; foo { implicit x : Int => x + 1 } - def bar(f: Int => Int) = () ; foo { x : Int => x + 1 } + def foo(f: Int => Int) = () + def test(): Unit = { + foo { x => x + 1 } + foo { implicit x => x + 1 } + foo { x: Int => x + 1 } + foo { implicit x: Int => x + 1 } + foo { _ => 42 } + foo { implicit _ => implicitly[Int] + 1 } // scala 2 deficit + foo { _: Int => 42 } + foo { implicit _: Int => implicitly[Int] + 1 } // scala 2 deficit + + foo(x => x + 1) + foo(implicit x => x + 1) + foo((x: Int) => x + 1) + foo(implicit (x: Int) => x + 1) // scala 3 + foo(_ => 42) + foo(implicit _ => implicitly[Int] + 1) // scala 2 deficit + foo((_: Int) => 42) + foo(implicit (_: Int) => implicitly[Int] + 1) // scala 3 + } } diff --git a/test/files/pos/t3688.scala b/test/files/pos/t3688.scala deleted file mode 100644 index 58464332d1c9..000000000000 --- a/test/files/pos/t3688.scala +++ /dev/null @@ -1,14 +0,0 @@ -import collection.mutable -import collection.convert.ImplicitConversionsToJava._ -import java.{util => ju} - -object Test { - - implicitly[mutable.Map[Int, String] => ju.Dictionary[Int, String]] - -} - -object Test2 { - def m[P <% ju.List[Int]](l: P) = 1 - m(List(1)) // bug: should compile -} diff --git a/test/files/pos/t3792.scala b/test/files/pos/t3792.scala index 10773c5f5b70..364d46317889 100644 --- a/test/files/pos/t3792.scala +++ b/test/files/pos/t3792.scala @@ -1,4 +1,4 @@ object Test { type Hui = Nil.type val n: Hui = Nil -} \ No newline at end of file +} diff --git a/test/files/pos/t3800.scala b/test/files/pos/t3800.scala index 61dbeafff33b..096502174b32 100644 --- a/test/files/pos/t3800.scala +++ b/test/files/pos/t3800.scala @@ -3,4 +3,4 @@ class meh extends annotation.StaticAnnotation class ALike[C] abstract class AFactory[CC[x] <: ALike[CC[x]]] { def aCompanion : AFactory[CC @meh] -} \ No newline at end of file +} diff --git a/test/files/pos/t3808.scala b/test/files/pos/t3808.scala index 294621803a0e..8162fce72f58 100644 --- a/test/files/pos/t3808.scala +++ b/test/files/pos/t3808.scala @@ -8,4 +8,4 @@ object Test { baz // () // commenting or uncommenting this line should not affect compilation (visibly) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3833.scala b/test/files/pos/t3833.scala index c49e9339caac..2df658df1e33 100644 --- a/test/files/pos/t3833.scala +++ b/test/files/pos/t3833.scala @@ -9,7 +9,7 @@ object Main { }).asInstanceOf[T#AType] } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(mkArray[I](1)) //java.lang.ClassCastException: [I cannot be cast to [Ljava.lang.Object; } diff --git a/test/files/pos/t3859.scala b/test/files/pos/t3859.scala index 83d4c37b29f2..9922073016bf 100644 --- a/test/files/pos/t3859.scala +++ b/test/files/pos/t3859.scala @@ -1,4 +1,4 @@ class Test { def foo: Unit = bar(Array(): _*) def bar(values: AnyRef*): Unit = () -} \ No newline at end of file +} diff --git a/test/files/pos/t3862.scala b/test/files/pos/t3862.scala index a6dba84fea13..8ca4a0586120 100644 --- a/test/files/pos/t3862.scala +++ b/test/files/pos/t3862.scala @@ -1,8 +1,8 @@ object OverloadingShapeType { // comment out this, and the other alternative is chosen. - def blerg(f: String) {} + def blerg(f: String): Unit = {} - def blerg[M[X], T](l: M[T]) {} + def blerg[M[X], T](l: M[T]): Unit = {} blerg(List(1)) // error: type mismatch; found : List[Int] required: String -} \ No newline at end of file +} diff --git a/test/files/pos/t3864/scalaz_2.scala b/test/files/pos/t3864/scalaz_2.scala index a3f5b69617d4..a7a50de2c418 100644 --- a/test/files/pos/t3864/scalaz_2.scala +++ b/test/files/pos/t3864/scalaz_2.scala @@ -1 +1 @@ -object Scalaz extends Tuples \ No newline at end of file +object Scalaz extends Tuples diff --git a/test/files/pos/t3866.scala b/test/files/pos/t3866.scala index 5d366ccf13b6..f1f64edb9597 100644 --- a/test/files/pos/t3866.scala +++ b/test/files/pos/t3866.scala @@ -2,8 +2,8 @@ abstract class ImplicitRepeated { trait T[+A, +B] trait X - def f[N, R <: List[_]](elems: T[N, R]*) // alternative a) - def f[N, R <: List[_]](props: String, elems: T[N, R]*) // alternative b) + def f[N, R <: List[_]](elems: T[N, R]*): Unit // alternative a) + def f[N, R <: List[_]](props: String, elems: T[N, R]*): Unit // alternative b) // the following implicit causes "cannot be applied" errors implicit def xToRight(r: X): T[Nothing, X] = null @@ -14,4 +14,4 @@ abstract class ImplicitRepeated { f( 1, 2 ) // should be implicitly resolved to alternative a) // ImplicitRepeated.this.f[Int, Nothing]("A", ImplicitRepeated.this.anyToN[Int](1), ImplicitRepeated.this.anyToN[Int](2)); // ImplicitRepeated.this.f[Int, Nothing](ImplicitRepeated.this.anyToN[Int](1), ImplicitRepeated.this.anyToN[Int](2)) -} \ No newline at end of file +} diff --git a/test/files/pos/t3869.scala b/test/files/pos/t3869.scala index acd19abb24af..581c11c81b23 100644 --- a/test/files/pos/t3869.scala +++ b/test/files/pos/t3869.scala @@ -4,7 +4,7 @@ object Test { def f: Unit = try return finally while(true) () - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { f } } diff --git a/test/files/pos/t3880.scala b/test/files/pos/t3880.scala index cd5f3c0125c3..f778eb71a81a 100644 --- a/test/files/pos/t3880.scala +++ b/test/files/pos/t3880.scala @@ -13,4 +13,4 @@ abstract class C1[+B] extends Bar[B] { private[this] def f2(x: Bar[B]): Unit = x match { case x: C1[_] => f2(x) } -} \ No newline at end of file +} diff --git a/test/files/pos/t389.scala b/test/files/pos/t389.scala index 535bd4de8715..8a047ac041e1 100644 --- a/test/files/pos/t389.scala +++ b/test/files/pos/t389.scala @@ -3,5 +3,5 @@ object Test { def b = 'B def c = '+ //def d = '`\n` //error: unclosed character literal - def e = '\u0041 + //def e = '\u0041 //error: unclosed character literal } diff --git a/test/files/pos/t3890.scala b/test/files/pos/t3890.scala index bfc4754779c6..0c5f5dfe6f21 100644 --- a/test/files/pos/t3890.scala +++ b/test/files/pos/t3890.scala @@ -1,4 +1,4 @@ object Test { def g[S, T <: S](s: S)(t: T): Unit = println("") g("a")("a") // error: inferred type arguments [java.lang.String] do not conform to method g's type parameter bounds [T <: S] -} \ No newline at end of file +} diff --git a/test/files/pos/t3924.scala b/test/files/pos/t3924.scala index 35165baaf0d8..b11b67c565e1 100644 --- a/test/files/pos/t3924.scala +++ b/test/files/pos/t3924.scala @@ -1,6 +1,6 @@ object Test { class Hoe extends Serializable { def add(a: java.io.Serializable): Unit = println(a) - def someMethod() { add(this) } + def someMethod(): Unit = { add(this) } } } diff --git a/test/files/pos/t3927.scala b/test/files/pos/t3927.scala index f5869c55d5ac..5e0d581d4eab 100644 --- a/test/files/pos/t3927.scala +++ b/test/files/pos/t3927.scala @@ -1,5 +1,5 @@ object A { - def x { + def x: Unit = { implicit lazy val e: Equiv[Int] = sys.error("") implicitly[Equiv[Int]] } diff --git a/test/files/pos/t3946/Test_1.scala b/test/files/pos/t3946/Test_1.scala index 0cceff3aabe7..8d8c71a92451 100644 --- a/test/files/pos/t3946/Test_1.scala +++ b/test/files/pos/t3946/Test_1.scala @@ -1,6 +1,6 @@ package q { class B extends p.A { - override protected def f() { } + override protected def f(): Unit = { } } } diff --git a/test/files/pos/t3951/Coll_1.scala b/test/files/pos/t3951/Coll_1.scala index 556c8486888d..106b12a5568b 100644 --- a/test/files/pos/t3951/Coll_1.scala +++ b/test/files/pos/t3951/Coll_1.scala @@ -14,7 +14,7 @@ sealed trait DynamicDocument extends Document { class Coll extends StaticDocument -// similiar issue with annotations +// similar issue with annotations class ann[T] extends annotation.StaticAnnotation trait StatDoc extends Doc { diff --git a/test/files/pos/t3960.scala b/test/files/pos/t3960.scala index 509b79c43f29..6452a645ceee 100644 --- a/test/files/pos/t3960.scala +++ b/test/files/pos/t3960.scala @@ -1,4 +1,6 @@ -// scalac: -Ycheck:typer + +//> using options -Ycheck:typer +// class A { class C[x] val cs = new scala.collection.mutable.HashMap[C[_], Int] diff --git a/test/files/pos/t3972.scala b/test/files/pos/t3972.scala index 5dfc10fcef27..f1a977f26bbc 100644 --- a/test/files/pos/t3972.scala +++ b/test/files/pos/t3972.scala @@ -1,5 +1,5 @@ object CompilerCrash { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { args match { case Array("a", a @ _*) => { } // The code compiles fine if this line is commented out or "@ _*" is deleted or this line is swapped for the next line case Array("b") => { } // The code compiles fine if this line is commented out diff --git a/test/files/pos/t3986.scala b/test/files/pos/t3986.scala index 10c4eb435f49..635b00b1cb80 100644 --- a/test/files/pos/t3986.scala +++ b/test/files/pos/t3986.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { new { val x = "abc" } with AnyRef { } } } diff --git a/test/files/neg/t3995.scala b/test/files/pos/t3995.scala similarity index 95% rename from test/files/neg/t3995.scala rename to test/files/pos/t3995.scala index c79f2a5865ce..57b53738d44f 100644 --- a/test/files/neg/t3995.scala +++ b/test/files/pos/t3995.scala @@ -1,5 +1,8 @@ + +// +// class Lift { - def apply(f: F0) {} + def apply(f: F0): Unit = {} class F0 object F0 { diff --git a/test/files/pos/t3999/a_1.scala b/test/files/pos/t3999/a_1.scala index 25366ee9c470..54c1b86ce373 100644 --- a/test/files/pos/t3999/a_1.scala +++ b/test/files/pos/t3999/a_1.scala @@ -6,4 +6,4 @@ package object bar { class Val(b: Boolean) implicit def boolean2Val(b: Boolean) = new Val(b) implicit def boolean2Outside(b: Boolean) = new Outside -} \ No newline at end of file +} diff --git a/test/files/pos/t3999/b_2.scala b/test/files/pos/t3999/b_2.scala index 1af82c8c5b73..775b839d9531 100644 --- a/test/files/pos/t3999/b_2.scala +++ b/test/files/pos/t3999/b_2.scala @@ -4,4 +4,4 @@ package bar class A { val s: Val = false val o: Outside = false -} \ No newline at end of file +} diff --git a/test/files/pos/t3999b.scala b/test/files/pos/t3999b.scala index d3fe108479c3..0f3f7d642971 100644 --- a/test/files/pos/t3999b.scala +++ b/test/files/pos/t3999b.scala @@ -17,4 +17,4 @@ object Tester { val tt = new TT[FT] val r = tt.read("1.0") r.toString -} \ No newline at end of file +} diff --git a/test/files/pos/t4020.scala b/test/files/pos/t4020.scala index bc91cfc9e1e6..fb620e98c897 100644 --- a/test/files/pos/t4020.scala +++ b/test/files/pos/t4020.scala @@ -1,4 +1,6 @@ -// scalac: -Xfatal-warnings + +//> using options -Xfatal-warnings +// class A { sealed trait Foo } diff --git a/test/files/pos/t4036.scala b/test/files/pos/t4036.scala index b902a3153b61..06486df0fdc8 100644 --- a/test/files/pos/t4036.scala +++ b/test/files/pos/t4036.scala @@ -1,5 +1,5 @@ object Error { - def f { + def f: Unit = { case class X(b: Boolean = false) val r = X() } diff --git a/test/files/pos/t4043.scala b/test/files/pos/t4043.scala new file mode 100644 index 000000000000..c793333e66bf --- /dev/null +++ b/test/files/pos/t4043.scala @@ -0,0 +1,10 @@ +import scala.language.higherKinds +object t4043 { + trait GC[K[_ <: H0], H0] + + trait PA[H1] { + type Apply[A <: H1] = Any + } + + type a = GC[PA[Int]#Apply, Int] +} diff --git a/test/files/pos/t4063.scala b/test/files/pos/t4063.scala index 5e19c42edc14..f7033f686a0a 100644 --- a/test/files/pos/t4063.scala +++ b/test/files/pos/t4063.scala @@ -15,7 +15,7 @@ trait CMap[K, V] extends CIterableLike[(K, V), CMap[K, V]] with Parallelizable[P object Test { var x = 0 - def main() { + def main(): Unit = { val map: CMap[Int, CSet[Int]] = new CMap[Int, CSet[Int]] {} val set: CSet[Int] = new CSet[Int] {} diff --git a/test/files/pos/t4070b.scala b/test/files/pos/t4070b.scala index 36d03de80ce9..d6851b8cca26 100644 --- a/test/files/pos/t4070b.scala +++ b/test/files/pos/t4070b.scala @@ -32,4 +32,4 @@ package b { val func: Exp[B] val alloc: Exp[C[B]] } -} \ No newline at end of file +} diff --git a/test/files/pos/t4112.scala b/test/files/pos/t4112.scala index 311dad9e725c..ab0f36fdc4ab 100644 --- a/test/files/pos/t4112.scala +++ b/test/files/pos/t4112.scala @@ -5,7 +5,7 @@ import collection.immutable._ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val treemap = TreeMap(1 -> 2, 3 -> 4) ++ TreeMap(5 -> 6) (treemap: TreeMap[Int, Int]) } diff --git a/test/files/pos/t4205/1.scala b/test/files/pos/t4205/1.scala index 4dad7903763a..003763f6c701 100644 --- a/test/files/pos/t4205/1.scala +++ b/test/files/pos/t4205/1.scala @@ -1,3 +1,3 @@ trait A[OUT[_]] { null.asInstanceOf[B[OUT]].b1("") -} \ No newline at end of file +} diff --git a/test/files/pos/t4225.scala b/test/files/pos/t4225.scala new file mode 100644 index 000000000000..2ea3222ca36b --- /dev/null +++ b/test/files/pos/t4225.scala @@ -0,0 +1,90 @@ + +// +// +object Test { + class Foo { + class Bar + object Bar { + implicit def mkBar: Bar = new Bar + } + } + + object f extends Foo + + object ImplicitClass { + implicit class Ops[F <: Foo](val f0: F) { + def op0(implicit b: f0.Bar): f0.Bar = b + def op1(i: Int)(implicit b: f0.Bar): f0.Bar = b + def op2(i: Int = 23)(implicit b: f0.Bar): f0.Bar = b + def op3(i: Int)(j: Boolean)(implicit b: f0.Bar): f0.Bar = b + def op4[T](implicit b: f0.Bar): f0.Bar = b + def op5(i: => Int)(implicit b: f0.Bar): f0.Bar = b + } + + f.op0 + f.op1(23) + f.op2() + f.op3(23)(true) + f.op4[Int] + f.op5(23) + } + + object LazyImplicitConversion { + class Ops[F <: Foo](val f0: F) { + def op0(implicit b: f0.Bar): f0.Bar = b + def op1(i: Int)(implicit b: f0.Bar): f0.Bar = b + def op2(i: Int = 23)(implicit b: f0.Bar): f0.Bar = b + def op3(i: Int)(j: Boolean)(implicit b: f0.Bar): f0.Bar = b + def op4[T](implicit b: f0.Bar): f0.Bar = b + def op5(i: => Int)(implicit b: f0.Bar): f0.Bar = b + } + implicit def ops[F <: Foo](f: => F): Ops[F] = new Ops(f) + + f.op0 + f.op1(23) + f.op2() + f.op3(23)(true) + f.op4[Int] + f.op5(23) + } + + object RightAssociativeOps { + implicit class Ops[F <: Foo](val f0: F) { + def op0_:(i: Int)(implicit b: f0.Bar): f0.Bar = b + def op1_:(i: => Int)(implicit b: f0.Bar): f0.Bar = b + } + + object f extends Foo + + 23 op0_: f + 23 op1_: f + } + + object Blocks { + implicit class Ops[F <: Foo](val f0: F) { + def op0(implicit b: f0.Bar): f0.Bar = b + } + + { class Foo ; f }.op0 + + { object f1 extends Foo ; f1 }.op0 + + // The above expands to the following ... + val stab0 = + new Ops({ + object f1 extends Foo + f1 + }) + stab0.op0(stab0.f0.Bar.mkBar) + + { val f1 = new Foo ; f1 }.op0 + + // The above expands to the following ... + val stab1 = + new Ops({ + val f1: Foo = new Foo + f1 + }) + stab1.op0(stab1.f0.Bar.mkBar) + } +} diff --git a/test/files/pos/t4225b.scala b/test/files/pos/t4225b.scala new file mode 100644 index 000000000000..a2786759a6ce --- /dev/null +++ b/test/files/pos/t4225b.scala @@ -0,0 +1,20 @@ + +// +// +class Foo { + class Bar + object Bar { + implicit def fromString(a: String) = new Bar + } + def andThen(b : Bar) = b + def andThen1(i : Int)(b : Bar) = b + def andThen2(b : Bar)(implicit dummy: DummyImplicit) = b + def andThen3[T](b: Bar) = b +} + +object Test { + (new Foo) andThen ("Bar") + (new Foo).andThen1(23)("Bar") + (new Foo) andThen2 ("Bar") + (new Foo) andThen3[Int]("Bar") +} diff --git a/test/files/pos/t4225c.scala b/test/files/pos/t4225c.scala new file mode 100644 index 000000000000..e21b2251ead8 --- /dev/null +++ b/test/files/pos/t4225c.scala @@ -0,0 +1,18 @@ + +// +// +trait A +trait B + +class Foo[A2, B2 <: A2] { + class Bar + object Bar { + implicit def fromString(a: String) = new Bar + } + def andThen(b : Bar) = b +} + +object Test { + def lub = if (true) (null: Foo[A, A]) else (null: Foo[B, B]) + (lub) andThen ("Bar") +} diff --git a/test/files/pos/t4257.scala b/test/files/pos/t4257.scala index 39f04a0a0a7a..fd150a150a6c 100644 --- a/test/files/pos/t4257.scala +++ b/test/files/pos/t4257.scala @@ -1,9 +1,9 @@ object Test { class SA[@specialized(Int) A] { - def o[U](f: ((Int, A) => Any)) {} + def o[U](f: ((Int, A) => Any)): Unit = {} - def o[U](f: A => Any) {} + def o[U](f: A => Any): Unit = {} } class X[@specialized(Int) B] { diff --git a/test/files/pos/t4266.scala b/test/files/pos/t4266.scala index 222f65e97089..9989ff4869c7 100644 --- a/test/files/pos/t4266.scala +++ b/test/files/pos/t4266.scala @@ -7,7 +7,7 @@ object Test { ] { def domain: D; - def checkKey(k1: A1) { + def checkKey(k1: A1): Unit = { domain._1.contains(k1) } } diff --git a/test/files/pos/t4273.scala b/test/files/pos/t4273.scala index a4d37174ad0d..1a711d62f118 100644 --- a/test/files/pos/t4273.scala +++ b/test/files/pos/t4273.scala @@ -1,8 +1,8 @@ class A { - implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.Ops(x) + implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.OrderingOps(x) class Bippy implicit val bippyOrdering = new Ordering[Bippy] { def compare(x: Bippy, y: Bippy) = util.Random.nextInt } (new Bippy) < (new Bippy) -} \ No newline at end of file +} diff --git a/test/files/pos/t430-feb09.scala b/test/files/pos/t430-feb09.scala index 1499f32b7a36..3365b48022ba 100644 --- a/test/files/pos/t430-feb09.scala +++ b/test/files/pos/t430-feb09.scala @@ -30,5 +30,5 @@ package f.scala { // Doesn't compile: type is not a member of package h.scala package h.scala { - case class H(s: String)(t: =>String) + case class H(s: String)(t: => String) } diff --git a/test/files/pos/t430.scala b/test/files/pos/t430.scala index e04e39bea81a..b2367a973801 100644 --- a/test/files/pos/t430.scala +++ b/test/files/pos/t430.scala @@ -1,5 +1,5 @@ object Test extends App { - def foo[T <% Ordered[T]](x: T){ Console.println(""+(x < x)+" "+(x <= x)) } + def foo[T <% Ordered[T]](x: T): Unit ={ Console.println(""+(x < x)+" "+(x <= x)) } def bar(x: Unit ): Unit = foo(x); def bar(x: Boolean): Unit = foo(x); def bar(x: Byte ): Unit = foo(x); diff --git a/test/files/pos/t4305.scala b/test/files/pos/t4305.scala index ba3eb65bc196..68ec7b712ecf 100644 --- a/test/files/pos/t4305.scala +++ b/test/files/pos/t4305.scala @@ -28,4 +28,4 @@ object T3 { x.getTypeParameters } } -} \ No newline at end of file +} diff --git a/test/files/pos/t4345.scala b/test/files/pos/t4345.scala index 59de1eadd3d0..b0131d5fa5f5 100644 --- a/test/files/pos/t4345.scala +++ b/test/files/pos/t4345.scala @@ -4,4 +4,4 @@ trait C1[+A, +CC[X]] { trait C2[+A, +CC[X]] extends C1[A, CC] { override protected[this] def f = super.f -} \ No newline at end of file +} diff --git a/test/files/pos/t4365/a_1.scala b/test/files/pos/t4365/a_1.scala index e7466e0d4813..2362ecdd4985 100644 --- a/test/files/pos/t4365/a_1.scala +++ b/test/files/pos/t4365/a_1.scala @@ -1,18 +1,3 @@ -import scala.collection._ - -trait SeqViewLike[+A, - +Coll, - +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, Nothing]] - extends Seq[A] with GenSeqViewLike[A, Coll, Nothing] -{ - - trait Transformed[+B] extends super[GenSeqViewLike].Transformed[B] - - abstract class AbstractTransformed[+B] extends Seq[B] with Transformed[B] { - def underlying: Coll = sys.error("") - } - - trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed - - protected def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed +trait SVL extends GSVL[Int, CBar] { + new Reversed { } } diff --git a/test/files/pos/t4365/b_1.scala b/test/files/pos/t4365/b_1.scala index 1158db6c322c..56e3afd76d4f 100644 --- a/test/files/pos/t4365/b_1.scala +++ b/test/files/pos/t4365/b_1.scala @@ -1,24 +1,14 @@ -import scala.collection._ - -trait GenSeqView0[+A, +Coll] - -trait GenSeqViewLike[+A, - +Coll, - +This <: GenSeqView0[A, Coll] with GenSeqViewLike[A, Coll, Nothing]] -extends GenSeq[A] { -self => - - trait Transformed[+B] { - def length: Int = 0 - def apply(idx: Int): B = sys.error("") - } - - trait Reversed extends Transformed[A] { - def iterator: Iterator[A] = createReversedIterator - - private def createReversedIterator: Iterator[A] = { - self.foreach(_ => ()) - null - } +trait Bar0[+A] +trait Bar1[+This] +class CBar extends Bar0[Int] with Bar1[CBar] { } + +trait GSVL[+A, +This <: Bar0[A] with Bar1[This]] { + // There has to be a method in Foo + trait Foo { def f = ??? } + + // There has to be a private method with a closure in Reversed, + // and it has to be a trait. + trait Reversed extends Foo { + private def g = { List(1) map (_ + 1) ; ??? } } } diff --git a/test/files/pos/t4457_1.scala b/test/files/pos/t4457_1.scala index 32edd6cfdc3c..7192d97f4676 100644 --- a/test/files/pos/t4457_1.scala +++ b/test/files/pos/t4457_1.scala @@ -16,11 +16,11 @@ object ImplicitConvAmbiguity2 { def bFunc[T](e1: N[T]) = {} - def typeMe1 { + def typeMe1: Unit = { val x = aFunc(4F) bFunc(x) } - def typeMe2 { + def typeMe2: Unit = { bFunc(aFunc(4F)) } } diff --git a/test/files/pos/t4494.scala b/test/files/pos/t4494.scala index ef6e45fbecb5..5e08e35d87bc 100644 --- a/test/files/pos/t4494.scala +++ b/test/files/pos/t4494.scala @@ -1,4 +1,6 @@ -// scalac: -Yrangepos + +// +// object A { List(1) } diff --git a/test/files/pos/t4501.scala b/test/files/pos/t4501.scala index 40628f1a4bea..6ac36e1e9b53 100644 --- a/test/files/pos/t4501.scala +++ b/test/files/pos/t4501.scala @@ -3,7 +3,8 @@ import scala.collection.mutable.ListBuffer class A { def foo[T](a:T, b:T):T = a - def f1 = foo(ListBuffer(), List()) + // f1 no longer compiles with 2.13 collections, it produces an invalid lub; added to run/invalid-lubs.scala + // def f1 = foo(ListBuffer(), List()) def f2 = foo(ListBuffer(), ListBuffer()) def f3 = foo(List(), List()) @@ -11,4 +12,4 @@ class A { // def f1 : scala.collection.Seq[scala.Nothing] = { /* compiled code */ } // def f2 : scala.collection.mutable.ListBuffer[scala.Nothing] = { /* compiled code */ } // def f3 : scala.collection.immutable.List[scala.Nothing] = { /* compiled code */ } -} \ No newline at end of file +} diff --git a/test/files/pos/t4502.scala b/test/files/pos/t4502.scala index ed7d3d055714..53e4abc39773 100644 --- a/test/files/pos/t4502.scala +++ b/test/files/pos/t4502.scala @@ -1,9 +1,9 @@ class T { - def send(o: Any, d: Int = 10) { } + def send(o: Any, d: Int = 10): Unit = { } - def c(f: => Any) { } + def c(f: => Any): Unit = { } - def f() { + def f(): Unit = { var a = this a.send( c(a.send(())) diff --git a/test/files/pos/t4545.scala b/test/files/pos/t4545.scala index 8c7a3236c4b6..b2b67fa8f6cc 100644 --- a/test/files/pos/t4545.scala +++ b/test/files/pos/t4545.scala @@ -1,7 +1,7 @@ object Test { - def f[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](table: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Unit) { + def f[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](table: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Unit): Unit = { } - def g[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](table: Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Unit) { + def g[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](table: Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Unit): Unit = { } def g20 = f( diff --git a/test/files/pos/t4547.scala b/test/files/pos/t4547.scala index 01f229bbf67f..b771c6967116 100644 --- a/test/files/pos/t4547.scala +++ b/test/files/pos/t4547.scala @@ -1,4 +1,4 @@ object Main { def g: BigInt = 5 + BigInt(4) // since we're looking for an implicit that converts an int into something that has a + method that takes a BigInt, BigInt should be in the implicit scope def g2 = 5 + BigInt(4) -} \ No newline at end of file +} diff --git a/test/files/pos/t4553.scala b/test/files/pos/t4553.scala index 4eefe57b2bef..e9bef4099428 100644 --- a/test/files/pos/t4553.scala +++ b/test/files/pos/t4553.scala @@ -1,5 +1,5 @@ trait VectorLike[+T, +V[A] <: Vector[A]] { - def +[S, VResult[S] >: V[S]](v: VResult[S]) + def +[S, VResult[S] >: V[S]](v: VResult[S]): Unit } trait Vector[+T] extends VectorLike[T, Vector] diff --git a/test/files/pos/t4559.scala b/test/files/pos/t4559.scala new file mode 100644 index 000000000000..2ae36bf3fcb1 --- /dev/null +++ b/test/files/pos/t4559.scala @@ -0,0 +1,38 @@ + +package mypkg { + + trait M { type C <: M } + trait F extends M { type C = F } + + private[mypkg] abstract class Private[E <: M] { + type Sub <: Private[E#C] + final val sub: Sub = null.asInstanceOf[Sub] + + def foo: E + } + + trait Public[E <: M] extends Private[E] { + type Sub = Public[E#C] + } +} + +package p { + + import mypkg._ + + object Test { + val seq = null.asInstanceOf[Public[F]] + seq.sub.foo + } +} + +/* private[mypkg] is permitted but unqualified private is disallowed: + * +test/files/pos/t4559.scala:18: error: private class Private escapes its defining scope as part of type mypkg.Private[E] + trait Public[E <: M] extends Private[E] { + ^ +test/files/pos/t4559.scala:29: error: value sub is not a member of mypkg.Public[mypkg.F] + seq.sub.foo + ^ +two errors found +*/ diff --git a/test/files/pos/t4603/S.scala b/test/files/pos/t4603/S.scala index c7d809d9f769..c1364202bd30 100644 --- a/test/files/pos/t4603/S.scala +++ b/test/files/pos/t4603/S.scala @@ -2,7 +2,7 @@ class S extends J[AnyRef] object Test { - def main(args:Array[String]) { + def main(args:Array[String]): Unit = { J.f(classOf[S]) } } diff --git a/test/files/pos/t464.scala b/test/files/pos/t464.scala index 45521335c38b..9afa5b80434c 100644 --- a/test/files/pos/t464.scala +++ b/test/files/pos/t464.scala @@ -1,5 +1,5 @@ class A { - protected[this] def f() {} + protected[this] def f(): Unit = {} } class B extends A { f() @@ -10,4 +10,4 @@ class C extends A { } class D extends C { override protected def f() = super.f() -} \ No newline at end of file +} diff --git a/test/files/pos/t4649.scala b/test/files/pos/t4649.scala index 4f811d108549..a00121f2eedf 100644 --- a/test/files/pos/t4649.scala +++ b/test/files/pos/t4649.scala @@ -1,7 +1,10 @@ -// scalac: -Xfatal-warnings + +//> using options -Xfatal-warnings +// object Test { // @annotation.tailrec - def lazyFilter[E](s: Stream[E], p: E => Boolean): Stream[E] = s match { + def lazyFilter[E](s: LazyList[E], p: E => Boolean): LazyList[E] = s match { case h #:: t => if (p(h)) h #:: lazyFilter(t, p) else lazyFilter(t, p) + case _ => LazyList.empty[E] } } diff --git a/test/files/pos/t4692.scala b/test/files/pos/t4692.scala index 409daf225783..2ed98a6eeaef 100644 --- a/test/files/pos/t4692.scala +++ b/test/files/pos/t4692.scala @@ -24,4 +24,4 @@ class TypeAliasVsImplicitTest { // // val xs: MyListOfInt = error("") // toFor(xs : xs.type) -// } \ No newline at end of file +// } diff --git a/test/files/pos/t4716.scala b/test/files/pos/t4716.scala index ec29e8d2cbf3..045aad15e608 100644 --- a/test/files/pos/t4716.scala +++ b/test/files/pos/t4716.scala @@ -1,10 +1,6 @@ - - - - -trait Bug2[@specialized(Int) +A] extends TraversableOnce[A] { - def ++[B >: A](that: TraversableOnce[B]) = { - lazy val it = that.toIterator +trait Bug2[@specialized(Int) +A] extends IterableOnce[A] { + def ++[B >: A](that: IterableOnce[B]) = { + lazy val it = that.iterator it } } diff --git a/test/files/pos/t4717.scala b/test/files/pos/t4717.scala index ed35a8ad8742..8cfe6230a940 100644 --- a/test/files/pos/t4717.scala +++ b/test/files/pos/t4717.scala @@ -1,22 +1,13 @@ +trait Bug1[@specialized(Boolean) A] extends IterableOnce[A] { - - - - - - -trait Bug1[@specialized(Boolean) A] extends TraversableOnce[A] { - - def ++[B >: A](that: TraversableOnce[B]): Iterator[B] = new Iterator[B] { - lazy val it = that.toIterator + def ++[B >: A](that: IterableOnce[B]): Iterator[B] = new Iterator[B] { + lazy val it = that.iterator def hasNext = it.hasNext - def next = it.next + def next = it.next() } } - - trait WorksFine[@specialized(Boolean) A] { class SubBounds[B >: A] extends Bounds[B] { lazy val it = ??? @@ -24,12 +15,9 @@ trait WorksFine[@specialized(Boolean) A] { def x[B >: A]: Unit = new SubBounds[B] } - trait Bounds[@specialized(Boolean) A] { // okay without `>: A` def x[B >: A]: Unit = new Bounds[B] { lazy val it = ??? // def or val okay } } - - diff --git a/test/files/pos/t4731.scala b/test/files/pos/t4731.scala deleted file mode 100644 index d457543c1f4c..000000000000 --- a/test/files/pos/t4731.scala +++ /dev/null @@ -1,14 +0,0 @@ -import java.util.Comparator - -trait Trait1[T] { def foo(arg: Comparator[T]): Unit } - -trait Trait2[T] extends Trait1[T] { def foo(arg: Comparator[String]): Int = 0 } - -class Class1 extends Trait2[String] { } - -object Test { - def main(args: Array[String]): Unit = { - val c = new Class1 - c.foo(Ordering[String]) - } -} diff --git a/test/files/pos/t4737/S_2.scala b/test/files/pos/t4737/S_2.scala index 859846655785..dc89d13168c8 100644 --- a/test/files/pos/t4737/S_2.scala +++ b/test/files/pos/t4737/S_2.scala @@ -3,7 +3,7 @@ package s import j.J_1 class ScalaSubClass extends J_1 { - override def method(javaInnerClass: J_1#JavaInnerClass) { + override def method(javaInnerClass: J_1#JavaInnerClass): Unit = { println("world") } } diff --git a/test/files/pos/t4744/Bar.scala b/test/files/pos/t4744/Bar.scala index 01182d0a9a9f..4d20c49eb561 100644 --- a/test/files/pos/t4744/Bar.scala +++ b/test/files/pos/t4744/Bar.scala @@ -1,2 +1,2 @@ -// scalac: -Ybreak-cycles +//> using options -Ybreak-cycles class Bar { val quux = new Foo[java.lang.Integer]() } diff --git a/test/files/pos/t4775/JavaClass.java b/test/files/pos/t4775/JavaClass.java new file mode 100644 index 000000000000..bef2a1ccb170 --- /dev/null +++ b/test/files/pos/t4775/JavaClass.java @@ -0,0 +1,25 @@ +public class JavaClass { + public static class Element { + + } + + public static int foo(Element a, Class b, boolean c, Class... d) { + return 1; + } + + public static int foo(Element a, Class b, boolean c) { + return 2; + } + + public static int foo(Element a, Class... b) { + return 3; + } + + public static int foo(Element a, boolean b, Class... c) { + return 4; + } + + static { + foo(new Element(), Element.class, false); + } +} diff --git a/test/files/pos/t4775/Test.scala b/test/files/pos/t4775/Test.scala new file mode 100644 index 000000000000..970c3544e13d --- /dev/null +++ b/test/files/pos/t4775/Test.scala @@ -0,0 +1,4 @@ +class Test { + import JavaClass._ + foo(new Element, classOf[Element], false) +} diff --git a/test/files/pos/t4812.scala b/test/files/pos/t4812.scala index 2a807ab05eec..da223677078e 100644 --- a/test/files/pos/t4812.scala +++ b/test/files/pos/t4812.scala @@ -1,4 +1,4 @@ trait Test1 { - def m1(sym: Symbol = 'TestSym) - def m2(s: String = "TestString") + def m1(sym: Symbol = 'TestSym): Unit + def m2(s: String = "TestString"): Unit } diff --git a/test/files/pos/t4840.scala b/test/files/pos/t4840.scala index 9cd95db67baa..921da5e8ca68 100644 --- a/test/files/pos/t4840.scala +++ b/test/files/pos/t4840.scala @@ -1,4 +1,6 @@ -// scalac: -opt:l:inline -opt-inline-from:** + +//> using options -opt:inline:** -Wopt +// class Crashy { def g(): Option[Any] = None diff --git a/test/files/pos/t4853.scala b/test/files/pos/t4853.scala index c91f2d6b0595..f227ef36e63c 100644 --- a/test/files/pos/t4853.scala +++ b/test/files/pos/t4853.scala @@ -1,5 +1,5 @@ object Animal { - def main(args: Array[String]) { new Animal[Awake].goToSleep } + def main(args: Array[String]): Unit = { new Animal[Awake].goToSleep } } class Animal[A <: AwakeOrAsleep] { diff --git a/test/files/pos/t4911.scala b/test/files/pos/t4911.scala index 68131328e129..efae95df97d0 100644 --- a/test/files/pos/t4911.scala +++ b/test/files/pos/t4911.scala @@ -1,16 +1,16 @@ -// scalac: -unchecked -Xfatal-warnings -// +//> using options -Xfatal-warnings +// import language._ object Test { class Foo[T](val x: T) ; object Foo { def unapply[T](x: Foo[T]) = Some(x.x) } def f1[T](x: Foo[T]) = x match { case Foo(y) => y } - def f2[M[_], T](x: M[T]) = x match { case Foo(y) => y } + def f2[M[_], T](x: M[T]) = x match { case Foo(y) => y case _ => throw new MatchError(x) } case class Bar[T](x: T) - def f3[T](x: Bar[T]) = x match { case Bar(y) => y } - def f4[M[_], T](x: M[T]) = x match { case Bar(y) => y } + def f3[T](x: Bar[T]) = x match { case Bar(y) => y case _ => throw new MatchError(x) } + def f4[M[_], T](x: M[T]) = x match { case Bar(y) => y case _ => throw new MatchError(x) } } // // ./b.scala:4: warning: non variable type-argument T in type pattern Test.Foo[T] is unchecked since it is eliminated by erasure diff --git a/test/files/pos/t4940.scala b/test/files/pos/t4940.scala new file mode 100644 index 000000000000..b6a59a5bdd08 --- /dev/null +++ b/test/files/pos/t4940.scala @@ -0,0 +1,39 @@ +//> using options -Werror -Xlint +class C { + val f: PartialFunction[String, Int] = (x: String) => x match { case "x" => 3 } + val f2: PartialFunction[String, Int] = (x: String) => x match { case "x" => x.toString.toInt } + + val g: PartialFunction[X, Int] = (x: X) => x match { case X(i) => i } + val g2: PartialFunction[X, Int] = (x: Y) => x match { case X(i) => i } + //val g3: PartialFunction[Y, Int] = (x: X) => x match { case X(i) => i } + + val m: PartialFunction[Double, Int] = (x: Double) => x match { case 3.14 => 3 } +} + +class D { + val f: PartialFunction[String, Int] = _ match { case "x" => 3 } + + val g: PartialFunction[X, Int] = _ match { case X(i) => i } + + val m: PartialFunction[Double, Int] = _ match { case 3.14 => 3 } +} + +class E { + val f: PartialFunction[String, Int] = x => x.toInt + + val g: PartialFunction[X, Int] = x => x.x + + val m: PartialFunction[Double, Long] = d => d.round +} + +trait Y +case class X(x: Int) extends Y + +class ActuallyOK { + val map = Map(42 -> "foo") + def k = List(27).collect { + map.get(_) match { + case Some(i) => i + } + } +} diff --git a/test/files/pos/t4947.scala b/test/files/pos/t4947.scala new file mode 100644 index 000000000000..4bc7911dd494 --- /dev/null +++ b/test/files/pos/t4947.scala @@ -0,0 +1,31 @@ +class DependentImplicitTezt { + trait Bridge + + class Outer { + class Inner extends Bridge + + object Inner { + implicit def fromOther(b: Bridge): Inner = throw new Error("todo") + } + + def run(x: Inner) = throw new Error("todo") + } + + val o1 = new Outer + val o2 = new Outer + val i1 = new o1.Inner + val i2 = new o2.Inner + + def doesntCompile: Unit = { + o1.run(i2) // should compile + } + + def workaround1: Unit = { + o1.run(i2: Bridge) // ok + } + + def workaround2: Unit = { + import o1.Inner.fromOther + o1.run(i2) // ok + } +} diff --git a/test/files/pos/t4975.scala b/test/files/pos/t4975.scala index 12d889c0d5c6..97ed9369ea8c 100644 --- a/test/files/pos/t4975.scala +++ b/test/files/pos/t4975.scala @@ -1,7 +1,7 @@ object ImplicitScope { class A[T] - def foo { + def foo: Unit = { trait B object B { implicit def ab = new A[B] diff --git a/test/files/pos/t5020.scala b/test/files/pos/t5020.scala index 06f7723f9f7f..28e674bf0eb4 100644 --- a/test/files/pos/t5020.scala +++ b/test/files/pos/t5020.scala @@ -16,4 +16,4 @@ package b { trait GenericCons[L, M[_ >: L], T <: GenericList[L, M]] extends GenericList[L, M] { type Transformed[N[MMB >: L]] = GenericCons[L, N, T#Transformed[N]] } -} \ No newline at end of file +} diff --git a/test/files/pos/t5022.scala b/test/files/pos/t5022.scala index b9a085fb35a5..5db71c6562af 100644 --- a/test/files/pos/t5022.scala +++ b/test/files/pos/t5022.scala @@ -1,5 +1,5 @@ class ForSomeVsUnapply { - def test { + def test: Unit = { def makeWrap: Wrap = ??? def useRep[e](rep: (e, X[e])) = () diff --git a/test/files/pos/t5029.scala b/test/files/pos/t5029.scala index fe14e12b9c8f..d423d1ba1244 100644 --- a/test/files/pos/t5029.scala +++ b/test/files/pos/t5029.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { - (Vector(): Seq[_]) match { case List() => true; case Nil => false } + (Vector(): Seq[_]) match { case List() => true; case Nil => false; case x => throw new MatchError(x) } } diff --git a/test/files/pos/t5033.scala b/test/files/pos/t5033.scala index c4c33348526d..3aa9fce5f403 100644 --- a/test/files/pos/t5033.scala +++ b/test/files/pos/t5033.scala @@ -12,4 +12,4 @@ trait PipExtractor { trait LaserGuidedPipExtractor extends PipExtractor { def extract(f: Fruit)(g: Eater): g.Food[f.Seed] -} \ No newline at end of file +} diff --git a/test/files/neg/t5044.scala b/test/files/pos/t5044.scala similarity index 85% rename from test/files/neg/t5044.scala rename to test/files/pos/t5044.scala index 2663ec1bbb58..8ea8ef67f88b 100644 --- a/test/files/neg/t5044.scala +++ b/test/files/pos/t5044.scala @@ -2,7 +2,7 @@ class T { def foo[T](id: T) = 0 def m(a: Int) = 0 - def f { + def f: Unit = { val a = foo(id = 1) val id = m(a) } diff --git a/test/files/pos/t5075.scala b/test/files/pos/t5075.scala new file mode 100644 index 000000000000..5ec881dc3d06 --- /dev/null +++ b/test/files/pos/t5075.scala @@ -0,0 +1,27 @@ +// Derived from Scalaz - http://scalaz.googlecode.com/svn/continuous/latest/browse.sxr/scalaz/PartialApplys.scala.html + +trait PartialApply1Of2[T[_, _], A] { type Apply[B] = T[A, B] } + +trait HKT[D[_]] +trait HKTBounded[C[X] <: Traversable[X], D[_]] +trait Cov[+T] + +class Test { + // exercise type constructor inference in different ways + implicit def m[D[_]](t: HKT[D]): Int = 1 + def mCov[D[_]](t: Cov[HKT[D]]): Any = ??? + def mBounded[C[X] <: Traversable[X], D[_]](t: Cov[HKTBounded[C, D]]): Any = ??? + + val param: HKT[PartialApply1Of2[Tuple2, Int]#Apply] = ??? + m[PartialApply1Of2[Tuple2, Int]#Apply](param): Int // Already compiled + m(param) // Compiles now + param: Int // Compiles now + + val paramCov: Cov[HKT[PartialApply1Of2[Tuple2, Int]#Apply]] = ??? + mCov[PartialApply1Of2[Tuple2, Int]#Apply](paramCov) + mCov(paramCov) + + val paramBounded: Cov[HKTBounded[Traversable, PartialApply1Of2[Tuple2, Int]#Apply]] = ??? + mBounded[Traversable, PartialApply1Of2[Tuple2, Int]#Apply](paramBounded) + mBounded(paramBounded) +} diff --git a/test/files/neg/t5091.scala b/test/files/pos/t5091.scala similarity index 98% rename from test/files/neg/t5091.scala rename to test/files/pos/t5091.scala index 217e83f66d87..b5879ab6c60e 100644 --- a/test/files/neg/t5091.scala +++ b/test/files/pos/t5091.scala @@ -7,5 +7,4 @@ object RecursiveValueNeedsType { val xxx = foo(param = null) val param = bar(xxx) } - } diff --git a/test/files/pos/t5099.scala b/test/files/pos/t5099.scala index 178151259f6d..b41697e572dd 100644 --- a/test/files/pos/t5099.scala +++ b/test/files/pos/t5099.scala @@ -11,4 +11,4 @@ class LazyValVsFunctionType[a] { */ // _x: a // ok } -} \ No newline at end of file +} diff --git a/test/files/pos/t5120.scala b/test/files/pos/t5120.scala index 40540b8a7dde..a525e0bc89d3 100644 --- a/test/files/pos/t5120.scala +++ b/test/files/pos/t5120.scala @@ -22,5 +22,5 @@ class Test { } } - def foo[T](x: ScopedKey[T], v: ScopedKey[T]) {} + def foo[T](x: ScopedKey[T], v: ScopedKey[T]): Unit = {} } diff --git a/test/files/pos/t5127.scala b/test/files/pos/t5127.scala index c56202530261..c2f3b923f159 100644 --- a/test/files/pos/t5127.scala +++ b/test/files/pos/t5127.scala @@ -2,7 +2,7 @@ package foo { trait Abstract1[C <: Abstract2[C]] trait Abstract2[C <: Abstract2[C]] extends Abstract1[C] class Parametrized1[T] extends Abstract1[Parametrized2[T]] { - def bar(a: AnyRef) { a match { case d: Parametrized1[_] => println("ok") } } + def bar(a: AnyRef): Unit = { a match { case d: Parametrized1[_] => println("ok") } } } class Parametrized2[T] extends Parametrized1[T] with Abstract2[Parametrized2[T]] } diff --git a/test/files/pos/t5137.scala b/test/files/pos/t5137.scala index bb72cf378f37..d5b6036df878 100644 --- a/test/files/pos/t5137.scala +++ b/test/files/pos/t5137.scala @@ -14,4 +14,4 @@ object Test { (1 * (List[BigInt]().map({ case x => x }).sum)) -} \ No newline at end of file +} diff --git a/test/files/pos/t5154.scala b/test/files/pos/t5154.scala deleted file mode 100644 index 2629308f0066..000000000000 --- a/test/files/pos/t5154.scala +++ /dev/null @@ -1,9 +0,0 @@ - -trait Z { - // extra space made the pattern OK - def f = {{3}} match { case {{3}} => } - - // lack of space: error: illegal start of simple pattern - def g = {{3}} match { case {{3}} => } -} - diff --git a/test/files/pos/t5156.scala b/test/files/pos/t5156.scala index 52412ad4c1e5..129e97a5224a 100644 --- a/test/files/pos/t5156.scala +++ b/test/files/pos/t5156.scala @@ -8,7 +8,7 @@ object HList { implicit def hlistOps[L <: HList](l : L) = new { def ::[H](h : H) : H :: L = HCons(h, l) - def last(implicit last : Last[L]) {} + def last(implicit last : Last[L]): Unit = {} } class Last[L <: HList] diff --git a/test/files/pos/t516.scala b/test/files/pos/t516.scala deleted file mode 100644 index 5561b7610c3d..000000000000 --- a/test/files/pos/t516.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.collection.mutable._; -import scala.collection.script._; - -class Members; - -object subscriber extends Subscriber[Message[String] with Undoable, Members] { - def notify(pub: Members, event: Message[String] with Undoable): Unit = - (event: Message[String]) match { - case Include(l, elem) => Console.println("ADD: " + elem); - case Remove(l, elem) => Console.println("REM: " + elem); - //case i : Include[HasTree] with Undoable => - //case r : Remove [HasTree] with Undoable => - } - } diff --git a/test/files/pos/t5165b/TestAnnotation_1.java b/test/files/pos/t5165b/TestAnnotation_1.java index 02eb3f9d4c86..c561f194dae5 100644 --- a/test/files/pos/t5165b/TestAnnotation_1.java +++ b/test/files/pos/t5165b/TestAnnotation_1.java @@ -1,3 +1,4 @@ +// scalac: -Werror import java.lang.annotation.*; @Retention(RetentionPolicy.RUNTIME) diff --git a/test/files/pos/t5165b/TestObject_3.scala b/test/files/pos/t5165b/TestObject_3.scala index b7082983646e..68f85d53a47e 100644 --- a/test/files/pos/t5165b/TestObject_3.scala +++ b/test/files/pos/t5165b/TestObject_3.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings - +//> using options -Werror object TestObject extends TestTrait diff --git a/test/files/pos/t5165b/TestTrait_2.scala b/test/files/pos/t5165b/TestTrait_2.scala index 99ff458d3e4a..94a7e4b92624 100644 --- a/test/files/pos/t5165b/TestTrait_2.scala +++ b/test/files/pos/t5165b/TestTrait_2.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings - +//> using options -Werror @TestAnnotation_1(one=TestAnnotation_1.TestEnumOne.A, two=TestAnnotation_1.TestEnumTwo.C, strVal="something") trait TestTrait diff --git a/test/files/pos/t5175.scala b/test/files/pos/t5175.scala index 2e500381827d..b3da323650d9 100644 --- a/test/files/pos/t5175.scala +++ b/test/files/pos/t5175.scala @@ -1,8 +1,9 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { def ==(p: Phase): Int = 0 - def foo { + def foo: Unit = { ==(new Phase()) } } diff --git a/test/files/pos/t5183.scala b/test/files/pos/t5183.scala index 783b8c28dcfd..94e58ffd643c 100644 --- a/test/files/pos/t5183.scala +++ b/test/files/pos/t5183.scala @@ -31,4 +31,4 @@ object Example { def tag[U](arr: Array[Int]):Array[Int @@ U] = arr.asInstanceOf[Array[Int @@ U]] tag[User](Array(3, 4, 5)).map(_.toString) -} \ No newline at end of file +} diff --git a/test/files/pos/t5223.scala b/test/files/pos/t5223.scala index d81daa990798..bfd1e153c397 100644 --- a/test/files/pos/t5223.scala +++ b/test/files/pos/t5223.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Foo extends App { reify{def printf(format: String, args: Any*): String = null } reify{def printf(format: String, args: Any*): String = ("abc": @deprecated)} -} \ No newline at end of file +} diff --git a/test/files/pos/t5245.scala b/test/files/pos/t5245.scala index 763be9ec263f..45b54a67b5ff 100644 --- a/test/files/pos/t5245.scala +++ b/test/files/pos/t5245.scala @@ -1,3 +1,3 @@ object Foo { def bar = { var x = (); def foo() = x } -} \ No newline at end of file +} diff --git a/test/files/pos/t5259.scala b/test/files/pos/t5259.scala index d33c4dd6a759..40c508f7d8c1 100644 --- a/test/files/pos/t5259.scala +++ b/test/files/pos/t5259.scala @@ -1,6 +1,6 @@ class A[T] class B { - def m(a: A[this.type] = new A[this.type]) { } + def m(a: A[this.type] = new A[this.type]): Unit = { } } class C { @@ -15,7 +15,7 @@ object Test { val stableB = new B stableB.m() - def f { + def f: Unit = { println((new C).foo(0)) } } diff --git a/test/files/pos/t5265a.scala b/test/files/pos/t5265a.scala new file mode 100644 index 000000000000..b8c465d85e13 --- /dev/null +++ b/test/files/pos/t5265a.scala @@ -0,0 +1,32 @@ +//> using options -Werror -Wconf:cat=other-implicit-type:s +trait T[A] + +class C[A: T] + +trait Missing { + implicit val tsMissing = new T[String] {} // warn val in trait + def f = new C[String] +} +trait Local { + def f = { + implicit val tsLocal = new T[String] {} // nowarn because local + new C[String] + } +} +trait Parent { + def t: T[String] +} +trait Child extends Parent { + implicit val tsChild = new T[String] {} // warn because inferred from RHS + def f = new C[String] + implicit private[this] val pChild = 42 // also warn +} +class D { + implicit val tsD = new T[String] {} // warn val in class + def f = new C[String] + implicit private[this] val y = 42 // also warn +} +class X extends Missing +trait Z { + val z = 42 +} diff --git a/test/files/pos/t531.scala b/test/files/pos/t531.scala index d53539f34f8a..d18a5c8860fa 100644 --- a/test/files/pos/t531.scala +++ b/test/files/pos/t531.scala @@ -8,4 +8,4 @@ object Test extends App { }} () } -} \ No newline at end of file +} diff --git a/test/files/pos/t5313.scala b/test/files/pos/t5313.scala index e77b73ca4c91..605e868793b5 100644 --- a/test/files/pos/t5313.scala +++ b/test/files/pos/t5313.scala @@ -15,7 +15,7 @@ object DepBug { val b = a.mkB } - def useDep(d : Dep) { + def useDep(d : Dep): Unit = { import d._ a.m(b) // OK } diff --git a/test/files/pos/t532.scala b/test/files/pos/t532.scala index 7c33637b5d36..9604c8afc65a 100644 --- a/test/files/pos/t532.scala +++ b/test/files/pos/t532.scala @@ -8,4 +8,4 @@ object Test extends App { }} () } -} \ No newline at end of file +} diff --git a/test/files/pos/t5330.scala b/test/files/pos/t5330.scala index 813acd4b832f..24aab7733b82 100644 --- a/test/files/pos/t5330.scala +++ b/test/files/pos/t5330.scala @@ -1,9 +1,9 @@ trait FM[A] { - def map(f: A => Any) + def map(f: A => Any): Unit } trait M[A] extends FM[A] { - def map(f: A => Any) + def map(f: A => Any): Unit } trait N[A] extends FM[A] diff --git a/test/files/pos/t5340.scala b/test/files/pos/t5340.scala new file mode 100644 index 000000000000..76ac862f11e8 --- /dev/null +++ b/test/files/pos/t5340.scala @@ -0,0 +1,18 @@ +class Poly { + class E + object E { + implicit def conv(value: Any): E = sys.error("") + } +} + +object MyApp { + val r: Poly = sys.error("") + val s: Poly = sys.error("") + val b: r.E = sys.error("") + + // okay + s.E.conv(b): s.E + + // okay + println(b: s.E) +} diff --git a/test/files/pos/t5365-nonStrict.scala b/test/files/pos/t5365-nonStrict.scala new file mode 100644 index 000000000000..c71ff102b058 --- /dev/null +++ b/test/files/pos/t5365-nonStrict.scala @@ -0,0 +1,42 @@ +//> using options -Werror -Xnon-strict-patmat-analysis +// +// copy of neg/t5365.scala, which under -Xnon-strict-patmat-analysis gives no warnings +class C { + def nonExhautiveIfWeAssumeGuardsTrueOrFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + } + + def nonExhautiveIfWeAssumeGuardsFalse(x: Option[Int]): Int = x match { + case Some(n) if n % 2 == 0 => n + case None => 0 + } + + def inverseGuards(x: Option[Int]): Int = x match { + case Some(n) if n > 0 => n + case Some(n) if n <= 0 => ??? + case None => 0 + } + + def extractor(x: Option[Int]) = x match { + case Some(Extractor(_)) => + } + def repeatedExtractor(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + } + def extractorStrict(x: Option[Int]) = x match { + case Some(Extractor(_)) => + case None => + } + def repeatedExtractorStrict(x: Option[Int]) = x match { + case Some(RepeatedExtractor(_)) => + case None => + } +} + +object Extractor { + def unapply(a: Any): Option[Any] = None +} + +object RepeatedExtractor { + def unapplySeq(a: Any): Option[Seq[Any]] = None +} diff --git a/test/files/pos/t5390.scala b/test/files/pos/t5390.scala index 36febb6a589c..d12bcf789b87 100644 --- a/test/files/pos/t5390.scala +++ b/test/files/pos/t5390.scala @@ -3,9 +3,9 @@ class A { } object X { - def foo { + def foo: Unit = { val a = new A val b = new a.B[c.type]("") // not a forward reference val c = "" } -} \ No newline at end of file +} diff --git a/test/files/pos/t5399.scala b/test/files/pos/t5399.scala index ebae7dbd9eaa..0e7cce3c1776 100644 --- a/test/files/pos/t5399.scala +++ b/test/files/pos/t5399.scala @@ -21,7 +21,7 @@ class Foo { val scalaHome: Setting[Option[String]] = null val scalaVersion: Setting[String] = null - def testPatternMatch(s: Setting[_]) { + def testPatternMatch(s: Setting[_]): Unit = { s.key match { case ScopedKey1(scalaHome.key | scalaVersion.key) => () } diff --git a/test/files/pos/t5399a.scala b/test/files/pos/t5399a.scala index 4ebd85ad03a8..c40cef4f961b 100644 --- a/test/files/pos/t5399a.scala +++ b/test/files/pos/t5399a.scala @@ -11,7 +11,7 @@ class Foo { val scalaHome: Setting[Option[String]] = null val scalaVersion: Setting[String] = null - def testPatternMatch(s: Setting[_]) { + def testPatternMatch(s: Setting[_]): Unit = { s.key match { case ScopedKey1(scalaHome.key | scalaVersion.key) => () } diff --git a/test/files/pos/t5413.scala b/test/files/pos/t5413.scala index 47af514a140d..521a9c552676 100644 --- a/test/files/pos/t5413.scala +++ b/test/files/pos/t5413.scala @@ -1,9 +1,11 @@ +/* scalac: -Xsource:3.0 */ + object Fail { - def nom (guard : => Boolean) (something : => Unit) { } - def main(args: Array[String]) { + def nom (guard : => Boolean) (something : => Unit): Unit = { } + def main(args: Array[String]): Unit = { nom { val i = 0 (i != 3) - }() + }(()) } } diff --git a/test/files/pos/t5444.scala b/test/files/pos/t5444.scala index df6b2ce4f809..d40c01328f14 100644 --- a/test/files/pos/t5444.scala +++ b/test/files/pos/t5444.scala @@ -13,7 +13,7 @@ class Test { class Bippy extends T with U { def z() = x() + x1() } new Bippy } - def b() { + def b(): Unit = { trait T { def y() = 3 trait T2 { @@ -29,7 +29,7 @@ class Test { class Bippy extends T with U { def z() = y() + y1() + (1 to (new T2 { }).yy()).map(_ + 1).sum } (new Bippy).z() } - def c() { + def c(): Unit = { trait T { def z() = 5 } diff --git a/test/files/pos/t5542.scala b/test/files/pos/t5542.scala index c437ee4bc218..816d371dcf33 100644 --- a/test/files/pos/t5542.scala +++ b/test/files/pos/t5542.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // class Test { Option(3) match { case Some(n) => n; case None => 0 } diff --git a/test/files/pos/t5546.scala b/test/files/pos/t5546.scala index 4b0b0589b6c2..8269bf18f277 100644 --- a/test/files/pos/t5546.scala +++ b/test/files/pos/t5546.scala @@ -1 +1 @@ -class A { def foo: Class[_ <: A] = getClass } \ No newline at end of file +class A { def foo: Class[_ <: A] = getClass } diff --git a/test/files/pos/t5559.scala b/test/files/pos/t5559.scala new file mode 100644 index 000000000000..a7fb41c2afc0 --- /dev/null +++ b/test/files/pos/t5559.scala @@ -0,0 +1,48 @@ +import language.implicitConversions + +object Test { + def f[T](x1: Set[T]) = () => new { + def apply(x2: Set[_ <: T]) = List(x1, x2) + } + + + class Matcher[X] + object Problem2 { + def allOf[X](x: Matcher[_ >: X], y: Matcher[_ >: X]) = new Matcher[X] + def allOf[X](x: Matcher[_ >: X]*) = new Matcher[X] + } + + def equalTo[X](x: X) = new Matcher[X] + val a = equalTo("g") + val b = Problem2.allOf(a) + val c = Problem2.allOf(a,a) + + + class JObserver[T] + class JSubscriber[T] extends JObserver[T] + class Converted + implicit def convertSubscriber[T](s: JSubscriber[_ >: T]): Converted = ??? + implicit def convertObserver[T](s: JObserver[_ >: T]): Converted = ??? + val jSubscriber: JSubscriber[_ >: Int] = ??? + val conv: Converted = jSubscriber + + + sealed trait Foo[A] + case class Bar[A](f: A) extends Foo[A] + object Extractor { + def unapply[A](f: Bar[_ >: A]): Option[A] = ??? + } + + type X = Int + val t: Foo[X] = ??? + t match { + case Extractor(f) => f + case _ => ??? + } + + + class F[+T] + class I[T] + def f1[U](f: I[F[U]]) = f + def f2[U](f: I[F[_ <: U]]) = f1(f) +} diff --git a/test/files/pos/t5579.scala b/test/files/pos/t5579.scala new file mode 100644 index 000000000000..18dcf3f6bb5c --- /dev/null +++ b/test/files/pos/t5579.scala @@ -0,0 +1,13 @@ +object Test { + class Result[+A] + case class Success[A](x: A) extends Result[A] + class Apply[A] + object Apply { + def apply[A](f: Int => A): Apply[A] = new Apply[A] + } + + def foo = Apply(i => i match { + case 1 => Success(Some(1)) + case _ => Success(None) + }) +} diff --git a/test/files/pos/t5604/ReplConfig.scala b/test/files/pos/t5604/ReplConfig.scala deleted file mode 100644 index 8c589eba6068..000000000000 --- a/test/files/pos/t5604/ReplConfig.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import util.Exceptional.unwrap -import util.stackTraceString - -trait ReplConfig { - lazy val replProps = new ReplProps - - class TapMaker[T](x: T) { - def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) - def tapDebug(msg: => String): T = tap(x => repldbg(parens(x))) - def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) - def tap[U](f: T => U): T = { - f(x) - x - } - } - - private def parens(x: Any) = "(" + x + ")" - private def echo(msg: => String) = - try Console println msg - catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) } - - private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg) - private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg) - private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg) - - private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = { - case t => - repldbg(label + ": " + unwrap(t)) - repltrace(stackTraceString(unwrap(t))) - alt - } - private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = - substituteAndLog("" + alt, alt)(body) - private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = { - try body - catch logAndDiscard(label, alt) - } - private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = - substituteAndLog(label, ())(body) - - def isReplTrace: Boolean = replProps.trace - def isReplDebug: Boolean = replProps.debug || isReplTrace - def isReplInfo: Boolean = replProps.info || isReplDebug - def isReplPower: Boolean = replProps.power -} diff --git a/test/files/pos/t5604/ReplReporter.scala b/test/files/pos/t5604/ReplReporter.scala deleted file mode 100644 index 130af990ad14..000000000000 --- a/test/files/pos/t5604/ReplReporter.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import reporters._ -import IMain._ - -class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) { - override def printMessage(msg: String) { - // Avoiding deadlock if the compiler starts logging before - // the lazy val is complete. - if (intp.isInitializeComplete) { - if (intp.totalSilence) { - if (isReplTrace) - super.printMessage("[silent] " + msg) - } - else super.printMessage(msg) - } - else Console.println("[init] " + msg) - } - - override def displayPrompt() { - if (intp.totalSilence) () - else super.displayPrompt() - } -} diff --git a/test/files/pos/t5606.scala b/test/files/pos/t5606.scala index 2545271e32d8..8daffaf1e783 100644 --- a/test/files/pos/t5606.scala +++ b/test/files/pos/t5606.scala @@ -1,9 +1,9 @@ +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) - - - - - - -case class CaseTest[_](someData:String) +class C { + def f[_](x: Int) = ??? +} diff --git a/test/files/pos/t5626.scala b/test/files/pos/t5626.scala index c501dfbe60c6..77a5ad229121 100644 --- a/test/files/pos/t5626.scala +++ b/test/files/pos/t5626.scala @@ -1,3 +1,4 @@ +//> using options -feature -Wconf:cat=feature-reflective-calls:s -Werror class C { val blob = { new { case class Foo() } @@ -6,7 +7,11 @@ class C { class Inner { case class Foo() } new Inner } + val z: Any { def x: X.type } = new { def x = X } val foo = blob.Foo() val bar = blub.Foo() + val baz = z.x() } + +case class X() diff --git a/test/files/pos/t5638/Among.java b/test/files/pos/t5638/Among.java new file mode 100644 index 000000000000..8ef56809b75c --- /dev/null +++ b/test/files/pos/t5638/Among.java @@ -0,0 +1,5 @@ +public class Among { + +/** class body */ + +}; \ No newline at end of file diff --git a/test/files/pos/t5638/Usage.scala b/test/files/pos/t5638/Usage.scala new file mode 100644 index 000000000000..9422073eda87 --- /dev/null +++ b/test/files/pos/t5638/Usage.scala @@ -0,0 +1,3 @@ +object Usage { + def among: Among = new Among +} diff --git a/test/files/pos/t5639/A_1.scala b/test/files/pos/t5639/A_1.scala index 4bbfcf908968..c5da10eae4f9 100644 --- a/test/files/pos/t5639/A_1.scala +++ b/test/files/pos/t5639/A_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xsource:2.12 import Implicits._ class Baz diff --git a/test/files/pos/t5639/A_2.scala b/test/files/pos/t5639/A_2.scala index 3659c20fecab..08ffe7e6d5b6 100644 --- a/test/files/pos/t5639/A_2.scala +++ b/test/files/pos/t5639/A_2.scala @@ -1,4 +1,3 @@ -// scalac: -Xsource:2.12 import Implicits._ class Baz @@ -10,3 +9,12 @@ object Test { object Implicits { implicit val Baz: Int = 0 } + +/* + * under -Xsource:2.11 + * + A_2.scala:6: error: could not find implicit value for parameter e: Int + implicitly[Int] + ^ +one error found + */ diff --git a/test/files/pos/t5683.scala b/test/files/pos/t5683.scala index cd340f805765..05ab03579274 100644 --- a/test/files/pos/t5683.scala +++ b/test/files/pos/t5683.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification object Test { trait NT[X] trait W[W, A] extends NT[Int] diff --git a/test/files/pos/t5692a/Macros_1.scala b/test/files/pos/t5692a/Macros_1.scala index f7c90dd3c0eb..a94daf7a4f30 100644 --- a/test/files/pos/t5692a/Macros_1.scala +++ b/test/files/pos/t5692a/Macros_1.scala @@ -1,8 +1,7 @@ -import scala.language.experimental.macros - import scala.reflect.macros.blackbox.Context +import language.experimental.macros object Macros { def impl[T](c: Context) = { import c.universe._; c.Expr[Unit](q"()") } - def foo[T] = macro impl[T] + def foo[T]: Unit = macro impl[T] } diff --git a/test/files/pos/t5692a/Test_2.scala b/test/files/pos/t5692a/Test_2.scala index 08d510cc6faa..72ecd95762a8 100644 --- a/test/files/pos/t5692a/Test_2.scala +++ b/test/files/pos/t5692a/Test_2.scala @@ -1,3 +1,3 @@ class Test { def x = Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t5692b/Macros_1.scala b/test/files/pos/t5692b/Macros_1.scala index b04a6a1e3c2b..cf02484e3aa6 100644 --- a/test/files/pos/t5692b/Macros_1.scala +++ b/test/files/pos/t5692b/Macros_1.scala @@ -1,8 +1,7 @@ -import scala.language.experimental.macros - import scala.reflect.macros.blackbox.Context +import language.experimental.macros object Macros { def impl[T, U](c: Context) = { import c.universe._; c.Expr[Unit](q"()") } - def foo[T, U] = macro impl[T, U] + def foo[T, U]: Unit = macro impl[T, U] } diff --git a/test/files/pos/t5692b/Test_2.scala b/test/files/pos/t5692b/Test_2.scala index 08d510cc6faa..72ecd95762a8 100644 --- a/test/files/pos/t5692b/Test_2.scala +++ b/test/files/pos/t5692b/Test_2.scala @@ -1,3 +1,3 @@ class Test { def x = Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t5692c.scala b/test/files/pos/t5692c.scala index fa5f0b2dcde6..f9a9c4aa552e 100644 --- a/test/files/pos/t5692c.scala +++ b/test/files/pos/t5692c.scala @@ -1,4 +1,4 @@ class C { def foo[T: scala.reflect.ClassTag](xs: T*): Array[T] = ??? foo() -} \ No newline at end of file +} diff --git a/test/files/pos/t5702-pos-infix-star.scala b/test/files/pos/t5702-pos-infix-star.scala index 756bcdd8de0a..9b2dfafcf653 100644 --- a/test/files/pos/t5702-pos-infix-star.scala +++ b/test/files/pos/t5702-pos-infix-star.scala @@ -4,7 +4,7 @@ object Test { type Star = * case class P(a: Int, b: Star) // alias still required - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val v = new *(6,7) val x * y = v printf("%d,%d\n",x,y) diff --git a/test/files/pos/t5703/Impl.scala b/test/files/pos/t5703/Impl.scala index ee22d8fb4bb0..f0120ef0b9a9 100644 --- a/test/files/pos/t5703/Impl.scala +++ b/test/files/pos/t5703/Impl.scala @@ -1,3 +1,3 @@ class Implementation extends Base[Object] { def func(params: Array[Object]): Unit = {} -} \ No newline at end of file +} diff --git a/test/files/pos/t5706.scala b/test/files/pos/t5706.scala index eb58e8412229..8ba3e40d9713 100644 --- a/test/files/pos/t5706.scala +++ b/test/files/pos/t5706.scala @@ -1,10 +1,10 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.{Context => BlackboxContext} import scala.reflect.macros.whitebox.{Context => WhiteboxContext} +import language.experimental.macros class Logger { - def error1(message: String) = macro Impls.error1 - def error2(message: String) = macro Impls.error2 + def error1(message: String): Unit = macro Impls.error1 + def error2(message: String): Unit = macro Impls.error2 } object Impls { diff --git a/test/files/pos/t5720-ownerous.scala b/test/files/pos/t5720-ownerous.scala index ad4d4c171d29..e171ce9c2a7e 100644 --- a/test/files/pos/t5720-ownerous.scala +++ b/test/files/pos/t5720-ownerous.scala @@ -40,7 +40,7 @@ class C { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val c = new C println(c.model.currentUser) println(c.model.message) diff --git a/test/files/pos/t5726.scala b/test/files/pos/t5726.scala index b28ebd86741a..1ef14ac790cb 100644 --- a/test/files/pos/t5726.scala +++ b/test/files/pos/t5726.scala @@ -2,13 +2,13 @@ import scala.language.dynamics class DynamicTest extends Dynamic { def selectDynamic(name: String) = s"value of $name" - def updateDynamic(name: String)(value: Any) { + def updateDynamic(name: String)(value: Any): Unit = { println(s"You have just updated property '$name' with value: $value") } } object MyApp extends App { - def testing() { + def testing(): Unit = { val test = new DynamicTest test.firstName = "John" } diff --git a/test/files/pos/t5727.scala b/test/files/pos/t5727.scala index e091d827b48e..2c6c0f3056a9 100644 --- a/test/files/pos/t5727.scala +++ b/test/files/pos/t5727.scala @@ -26,6 +26,6 @@ object Test { def rep[T](p: => Base[T]): Base[T] = null // whatever - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { } } diff --git a/test/files/pos/t5738.scala b/test/files/pos/t5738.scala index b8755ed66e61..ec2b08e0160f 100644 --- a/test/files/pos/t5738.scala +++ b/test/files/pos/t5738.scala @@ -5,4 +5,4 @@ object Test extends App { reify(a.toString + b) reify(a + b.toString) } -} \ No newline at end of file +} diff --git a/test/files/pos/t5742.scala b/test/files/pos/t5742.scala index 3d3125b5d3b8..1cd73e0cb3b9 100644 --- a/test/files/pos/t5742.scala +++ b/test/files/pos/t5742.scala @@ -5,4 +5,4 @@ object Test extends App { val x1 = a val x2 = reify(a) } -} \ No newline at end of file +} diff --git a/test/files/pos/t5744/Macros_1.scala b/test/files/pos/t5744/Macros_1.scala index 6e2bf4825bd6..b7bcd2f50b69 100644 --- a/test/files/pos/t5744/Macros_1.scala +++ b/test/files/pos/t5744/Macros_1.scala @@ -2,8 +2,8 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { - def foo[U: Numeric](x: U) = macro foo_impl[U] - def bar[U: Numeric : Equiv, Y <% String](x: U)(implicit s: String) = macro bar_impl[U, Y] + def foo[U: Numeric](x: U): Unit = macro foo_impl[U] + def bar[U: Numeric : Equiv, Y <% String](x: U)(implicit s: String): Unit = macro bar_impl[U, Y] def foo_impl[U](c: Context)(x: c.Expr[U])(numeric: c.Expr[Numeric[U]]) = { import c.universe._ @@ -19,4 +19,4 @@ object Macros { val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusLen)) c.Expr[Unit](body) } -} \ No newline at end of file +} diff --git a/test/files/pos/t5744/Test_2.scala b/test/files/pos/t5744/Test_2.scala index 64b57e603280..dc3de03e42eb 100644 --- a/test/files/pos/t5744/Test_2.scala +++ b/test/files/pos/t5744/Test_2.scala @@ -3,4 +3,4 @@ object Test extends App { foo(42) implicit val s = "" bar(43) -} \ No newline at end of file +} diff --git a/test/files/pos/t5756.scala b/test/files/pos/t5756.scala index 45960fa8bd62..411f5b05df8e 100644 --- a/test/files/pos/t5756.scala +++ b/test/files/pos/t5756.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { def tagme[T: TypeTag](x: T) = typeTag[T] val foo = tagme{object Bar; Bar} -} \ No newline at end of file +} diff --git a/test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala b/test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala index ed4b731bb06b..a0256f633bfa 100644 --- a/test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala +++ b/test/files/pos/t5760-pkgobj-warn/stalepkg_1.scala @@ -5,7 +5,7 @@ package object stalepkg { package stalepkg { object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { } } } diff --git a/test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala b/test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala index 9abcdbab17d3..924bf35fa982 100644 --- a/test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala +++ b/test/files/pos/t5760-pkgobj-warn/stalepkg_2.scala @@ -5,7 +5,7 @@ package object stalepkg { package stalepkg { class Foo object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { } } } diff --git a/test/files/pos/t5769.scala b/test/files/pos/t5769.scala index fdc46b65e917..d7ec23a56524 100644 --- a/test/files/pos/t5769.scala +++ b/test/files/pos/t5769.scala @@ -6,4 +6,4 @@ class A { def f1 = classTag[Array[Int]] def f2 = classTag[AI] -} \ No newline at end of file +} diff --git a/test/files/pos/t5777.scala b/test/files/pos/t5777.scala index 24cea3616346..0d703b5fcce0 100644 --- a/test/files/pos/t5777.scala +++ b/test/files/pos/t5777.scala @@ -23,7 +23,7 @@ object MyApp { val r1 = new Poly[BigInt.type](BigInt) (null.asInstanceOf[BigInt.E] : r1.ring.E) - // Oddly, -Xprint:typer reports that r and r1 have the same inferred type. + // Oddly, -Vprint:typer reports that r and r1 have the same inferred type. // // private[this] val r: Poly[BigInt.type] = new Poly[BigInt.type](BigInt); // def r: Poly[BigInt.type] = MyApp.this.r; diff --git a/test/files/pos/t5779-numeq-warn.scala b/test/files/pos/t5779-numeq-warn.scala index 76ef2970fd53..c480ed0b3e6c 100644 --- a/test/files/pos/t5779-numeq-warn.scala +++ b/test/files/pos/t5779-numeq-warn.scala @@ -1,11 +1,11 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val d: Double = (BigInt(1) << 64).toDouble val f: Float = d.toFloat val n: java.lang.Number = d.toFloat assert (d == f) // ok - assert (d == n) // was: comparing values of types Double and Number using `==' will always yield false + assert (d == n) // was: comparing values of types Double and Number using `==` will always yield false assert (n == d) // was: Number and Double are unrelated: they will most likely never compare equal assert (f == n) assert (n == f) diff --git a/test/files/pos/t5796.scala b/test/files/pos/t5796.scala index d05350c53513..72ca89f7a4e0 100644 --- a/test/files/pos/t5796.scala +++ b/test/files/pos/t5796.scala @@ -1,5 +1,5 @@ object Bug { - def foo() { + def foo(): Unit = { val v = { lazy val s = 0 s diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala index c5235b34e7a6..ef41fd0db6e8 100644 --- a/test/files/pos/t5809.scala +++ b/test/files/pos/t5809.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Xfatal-warnings +// package object foo { implicit class EnrichedInt(foo: Int) { def bar = ??? diff --git a/test/files/pos/t5818.scala b/test/files/pos/t5818.scala index dba5479e9823..cdea6899b8e7 100644 --- a/test/files/pos/t5818.scala +++ b/test/files/pos/t5818.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// abstract class Abstract { type TypeMember val member: TypeMember diff --git a/test/files/pos/t5829.scala b/test/files/pos/t5829.scala index 236045ed117f..84b450ab31f7 100644 --- a/test/files/pos/t5829.scala +++ b/test/files/pos/t5829.scala @@ -15,4 +15,4 @@ object Test extends App { def select: Select = ??? def ident: Ident = ??? List(select, ident) -} \ No newline at end of file +} diff --git a/test/files/pos/t5862.scala b/test/files/pos/t5862.scala index e3006ddc3f7a..013f387c38e2 100644 --- a/test/files/pos/t5862.scala +++ b/test/files/pos/t5862.scala @@ -17,7 +17,7 @@ abstract class TaggedMapper[A, K, V] extends Serializable { } -/** Type-class for sending types across the Hadoop wire. */ +/** Type class for sending types across the Hadoop wire. */ trait WireFormat[A] class MapReduceJob { diff --git a/test/files/pos/t5886.scala b/test/files/pos/t5886.scala index 066187322de8..59245cab226d 100644 --- a/test/files/pos/t5886.scala +++ b/test/files/pos/t5886.scala @@ -5,7 +5,7 @@ object A { f0(this.getClass) // ok f1(this.getClass) - f2(this.getClass) // ok + f2(() => this.getClass()) // ok // a.scala:7: error: type mismatch; // found : Class[_ <: A.type] diff --git a/test/files/pos/t5887.scala b/test/files/pos/t5887.scala new file mode 100644 index 000000000000..5952255acb4f --- /dev/null +++ b/test/files/pos/t5887.scala @@ -0,0 +1,26 @@ + +trait TheOldCollegeTry { + def f = try (1) + 1 finally () + + def g = try { 1 } + 1 finally () + + type H[A] = PartialFunction[Throwable, A] + + val myh: H[Int] = { case _: NullPointerException => ??? ; case t => 42 } + def h = try ??? catch myh finally () + + // a little weird + def pf: H[Nothing] = try { case _: Throwable => ??? } finally () + + // but not weirder than + def tf = try (t: Throwable) => throw t finally () + + // old jedi mind trick +//badcatch.scala:14: error: recursive value catchExpr1 needs type +// try {} catch catchExpr1 +// ^ + def badcatch = { + def catchExpr1: PartialFunction[Throwable, Any] = ??? + try {} catch catchExpr1 + } +} diff --git a/test/files/pos/t5892.scala b/test/files/pos/t5892.scala index 241e59860abd..26dd1f73da9a 100644 --- a/test/files/pos/t5892.scala +++ b/test/files/pos/t5892.scala @@ -1,5 +1,5 @@ class foo(a: String) extends annotation.StaticAnnotation object o { implicit def i2s(i: Int) = "" - @foo(1: String) def blerg { } + @foo(1: String) def blerg: Unit = { } } diff --git a/test/files/pos/t5897.scala b/test/files/pos/t5897.scala index 2e4afe61ada9..61599eff22e9 100644 --- a/test/files/pos/t5897.scala +++ b/test/files/pos/t5897.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // no warning here // (strangely, if there's an unreachable code warning *anywhere in this compilation unit*, // the non-sensical warning goes away under -Xfatal-warnings) diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala index 92bb85186cd8..b94f845605a6 100644 --- a/test/files/pos/t5899.scala +++ b/test/files/pos/t5899.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// import scala.tools.nsc._ trait Foo { diff --git a/test/files/pos/t5930.scala b/test/files/pos/t5930.scala index 845ac9a3bfe2..0abbcaf97091 100644 --- a/test/files/pos/t5930.scala +++ b/test/files/pos/t5930.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-dead-code -Xfatal-warnings +//> using options -Ywarn-dead-code -Xfatal-warnings +// // should not warn about dead code (`matchEnd(throw new MatchError)`) class Test { 0 match { case x: Int => } diff --git a/test/files/pos/t5932.scala b/test/files/pos/t5932.scala index 78a29e2d6079..904ef3708c76 100644 --- a/test/files/pos/t5932.scala +++ b/test/files/pos/t5932.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A case object B extends A diff --git a/test/files/pos/t5946.scala b/test/files/pos/t5946.scala new file mode 100644 index 000000000000..b8019d4c94dc --- /dev/null +++ b/test/files/pos/t5946.scala @@ -0,0 +1,9 @@ +// +// +object TestDep { + class Ops(val g: scala.reflect.api.JavaUniverse) { + def op[T: g.TypeTag] = () + } + implicit def Ops(g: scala.reflect.api.JavaUniverse): Ops = new Ops(g) + scala.reflect.runtime.universe.op[Int] +} diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala index 7ba035ec3bca..84e2243d1d6d 100644 --- a/test/files/pos/t5953.scala +++ b/test/files/pos/t5953.scala @@ -1,16 +1,19 @@ -import scala.collection.{ mutable, immutable, generic, GenTraversableOnce } +trait CBF[-F, -A, +C] +trait GenTraversable[+A] +trait Traversable[+A] extends GenTraversable[A] +trait Vector[A] extends Traversable[A] +object Vector { + implicit def cbf[A]: CBF[Vector[_], A, Vector[A]] = ??? +} package object foo { - @inline implicit class TravOps[A, CC[A] <: GenTraversableOnce[A]](val coll: CC[A]) extends AnyVal { - def build[CC2[X]](implicit cbf: generic.CanBuildFrom[Nothing, A, CC2[A]]): CC2[A] = { - cbf() ++= coll.toIterator result - } + @inline implicit class TravOps[A, CC[A] <: GenTraversable[A]](val coll: CC[A]) extends AnyVal { + def build[CC2[X]](implicit cbf: CBF[Nothing, A, CC2[A]]): CC2[A] = ??? } } package foo { object Test { - def f1[T](xs: Traversable[T]) = xs.to[immutable.Vector] - def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector] + def f2[T](xs: Traversable[T]) = xs.build[Vector] } } diff --git a/test/files/pos/t5954c/A_1.scala b/test/files/pos/t5954c/A_1.scala index f7d2b98074b3..f61fb978a569 100644 --- a/test/files/pos/t5954c/A_1.scala +++ b/test/files/pos/t5954c/A_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror package object A { // these used to should be prevented by the implementation restriction // but are now allowed diff --git a/test/files/pos/t5954c/B_2.scala b/test/files/pos/t5954c/B_2.scala index f7d2b98074b3..f61fb978a569 100644 --- a/test/files/pos/t5954c/B_2.scala +++ b/test/files/pos/t5954c/B_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror package object A { // these used to should be prevented by the implementation restriction // but are now allowed diff --git a/test/files/pos/t5954d/A_1.scala b/test/files/pos/t5954d/A_1.scala index ea096f9c8877..cfd32660649a 100644 --- a/test/files/pos/t5954d/A_1.scala +++ b/test/files/pos/t5954d/A_1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xdev +//> using options -Werror -Xdev package p { package object base { class B diff --git a/test/files/pos/t5954d/B_2.scala b/test/files/pos/t5954d/B_2.scala index 6ee13ab07dfe..d89c7ceef104 100644 --- a/test/files/pos/t5954d/B_2.scala +++ b/test/files/pos/t5954d/B_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -Xdev +//> using options -Werror -Xdev package p { trait T { class B diff --git a/test/files/pos/t5958.scala b/test/files/pos/t5958.scala index 3b910f3633f4..3e078d2aaea9 100644 --- a/test/files/pos/t5958.scala +++ b/test/files/pos/t5958.scala @@ -12,4 +12,4 @@ class Test { val u = this newComponent(u): u.Component // ok } -} \ No newline at end of file +} diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala index da5f05da5685..ac59997ee294 100644 --- a/test/files/pos/t5968.scala +++ b/test/files/pos/t5968.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object X { def f(e: Either[Int, X.type]) = e match { case Left(i) => i diff --git a/test/files/pos/t6008.scala b/test/files/pos/t6008.scala index 737941095c01..447d0a0ce714 100644 --- a/test/files/pos/t6008.scala +++ b/test/files/pos/t6008.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // none of these should complain about exhaustivity class Test { // It would fail on the following inputs: (_, false), (_, true) diff --git a/test/files/pos/t6014.scala b/test/files/pos/t6014.scala index 46e03bb55268..26e258a27ff8 100644 --- a/test/files/pos/t6014.scala +++ b/test/files/pos/t6014.scala @@ -10,4 +10,4 @@ object Test { // def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails // ^ // one error found -} \ No newline at end of file +} diff --git a/test/files/pos/t6022.scala b/test/files/pos/t6022.scala index 07d34271c23a..a28bb7dabdab 100644 --- a/test/files/pos/t6022.scala +++ b/test/files/pos/t6022.scala @@ -1,8 +1,10 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class Test { (null: Any) match { case x: AnyRef if false => case list: Option[_] => case product: Product => // change Product to String and it's all good + case x => throw new MatchError(x) } } diff --git a/test/files/pos/t6040.scala b/test/files/pos/t6040.scala index 9c00ecd8e16a..7a5b2daedb90 100644 --- a/test/files/pos/t6040.scala +++ b/test/files/pos/t6040.scala @@ -1,3 +1,3 @@ import language.dynamics -class X extends Dynamic \ No newline at end of file +class X extends Dynamic diff --git a/test/files/pos/t6047.scala b/test/files/pos/t6047.scala index 0960c8b02515..2f35c303c9bd 100644 --- a/test/files/pos/t6047.scala +++ b/test/files/pos/t6047.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context import java.io.InputStream diff --git a/test/files/pos/t6082.scala b/test/files/pos/t6082.scala new file mode 100644 index 000000000000..b9c96b38f957 --- /dev/null +++ b/test/files/pos/t6082.scala @@ -0,0 +1,2 @@ +class annot(notValue: String) extends annotation.ConstantAnnotation +@annot("") class C diff --git a/test/files/pos/t6089b.scala b/test/files/pos/t6089b.scala index 03a0c1e1c9e9..a228f4898f61 100644 --- a/test/files/pos/t6089b.scala +++ b/test/files/pos/t6089b.scala @@ -15,4 +15,4 @@ class BKTree { } object BKTreeEmpty extends BKTree -case class BKTreeNode[A](v: A) extends BKTree \ No newline at end of file +case class BKTreeNode[A](v: A) extends BKTree diff --git a/test/files/pos/t6091.scala b/test/files/pos/t6091.scala index ecc32bf64b3a..8ce694e643c1 100644 --- a/test/files/pos/t6091.scala +++ b/test/files/pos/t6091.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Xlint +//> using options -Xfatal-warnings -Xlint +// object Foo { def eq(x: Int) = x } class X { def ==(other: String) = other.nonEmpty } diff --git a/test/files/pos/t611.scala b/test/files/pos/t611.scala index 40ad28db4b19..de4e8992eded 100644 --- a/test/files/pos/t611.scala +++ b/test/files/pos/t611.scala @@ -19,7 +19,7 @@ case class StringField(value: String) extends Field { } object Test { - def main (args: scala.Array[String]) { + def main (args: scala.Array[String]): Unit = { Console.println(List(new StringField ("bar"), new IntField(8))) } } diff --git a/test/files/pos/t6123-explaintypes-implicits.scala b/test/files/pos/t6123-explaintypes-implicits.scala index 2268e3f3ca6c..38ba0cae3328 100644 --- a/test/files/pos/t6123-explaintypes-implicits.scala +++ b/test/files/pos/t6123-explaintypes-implicits.scala @@ -1,4 +1,5 @@ -// scalac: -explaintypes +//> using options -explaintypes +// object ImplicitBugReport { trait Exp[+T] trait CanBuildExp[-Elem, +To] extends (Exp[Elem] => To) @@ -8,7 +9,7 @@ object ImplicitBugReport { implicit def canBuildExpTrav[T, ExpT <: Exp[T]](implicit c: CanBuildExp[T, ExpT]): CanBuildExp[Traversable[T], TraversableExp[T, ExpT]] = ??? def toExpTempl[T, That](t: T)(implicit c: CanBuildExp[T, That]): That = ??? - def testBug() { + def testBug(): Unit = { val a1 = toExpTempl(Seq(1, 2, 3, 5)) } } diff --git a/test/files/pos/t6124.scala b/test/files/pos/t6124.scala new file mode 100644 index 000000000000..1359803277a6 --- /dev/null +++ b/test/files/pos/t6124.scala @@ -0,0 +1,11 @@ + +trait T { + def i: Int = 1_024 + def j: Long = 1_024L * 1_024 + //def k = 1'024 + + def f = 3_14e-2 + def d = 3_14E-2_1 + + def z = 0 +} diff --git a/test/files/pos/t6145.scala b/test/files/pos/t6145.scala index 28334d4420b4..4161a24b5698 100644 --- a/test/files/pos/t6145.scala +++ b/test/files/pos/t6145.scala @@ -8,4 +8,4 @@ object Test { searchClass } } -} \ No newline at end of file +} diff --git a/test/files/pos/t6146.scala b/test/files/pos/t6146.scala index 9f20a6b28e73..9c7d37b5fef2 100644 --- a/test/files/pos/t6146.scala +++ b/test/files/pos/t6146.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// // No unreachable or exhaustiveness warnings, please. // diff --git a/test/files/pos/t6159.scala b/test/files/pos/t6159.scala new file mode 100644 index 000000000000..af68e1ca3c52 --- /dev/null +++ b/test/files/pos/t6159.scala @@ -0,0 +1,11 @@ +//> using options -Werror +trait A { + sealed abstract class X + private class X1 extends X with X2 { } + private trait X2 extends X + sealed trait X3 extends X + + def f(x: X) = x match { + case _: X1 => 0 + } +} diff --git a/test/files/pos/t6162-inheritance.scala b/test/files/pos/t6162-inheritance.scala index 50cc1079fa9f..e0662833836f 100644 --- a/test/files/pos/t6162-inheritance.scala +++ b/test/files/pos/t6162-inheritance.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// package scala.t6126 // Don't warn about inheritance in the same file. diff --git a/test/files/pos/t6169/skinnable.scala b/test/files/pos/t6169/skinnable.scala index 3ba273452605..08204f29d8ee 100644 --- a/test/files/pos/t6169/skinnable.scala +++ b/test/files/pos/t6169/skinnable.scala @@ -11,4 +11,4 @@ trait TestWildcardBoundInference { def skinCheckInference = delegate.skinProperty skinCheckInference: ObjectProperty[Skin[_ <: Skinnable]] -} \ No newline at end of file +} diff --git a/test/files/pos/t6169/t6169.scala b/test/files/pos/t6169/t6169.scala index e97e1c8afc9c..d35808c5d154 100644 --- a/test/files/pos/t6169/t6169.scala +++ b/test/files/pos/t6169/t6169.scala @@ -4,4 +4,4 @@ class Test { def stringy: Exist[_ <: String] = (new Exist[String]).foo def fbounded: (ExistF[t] forSome {type t <: ExistF[t] }) = (new MyExist).foo def indir: ExistIndir[_ <: String, _ <: String] = (new ExistIndir[String, String]).foo -} \ No newline at end of file +} diff --git a/test/files/pos/t6184.scala b/test/files/pos/t6184.scala index 83a1306acaf5..386399963e30 100644 --- a/test/files/pos/t6184.scala +++ b/test/files/pos/t6184.scala @@ -4,4 +4,4 @@ trait Foo[TroubleSome] { this match { case e: Foo[_]#T => ??? } -} \ No newline at end of file +} diff --git a/test/files/pos/t6201.scala b/test/files/pos/t6201.scala index d4e5bce03a74..9d6667b6fdde 100644 --- a/test/files/pos/t6201.scala +++ b/test/files/pos/t6201.scala @@ -16,4 +16,4 @@ class Test { implicit def toFoo2(s: Elem) = new Foo2() def is: Unit = { (elem) } -} \ No newline at end of file +} diff --git a/test/files/pos/t6204-a.scala b/test/files/pos/t6204-a.scala index bd8d5c437eb6..dbc1144b42d9 100644 --- a/test/files/pos/t6204-a.scala +++ b/test/files/pos/t6204-a.scala @@ -1,9 +1,9 @@ import scala.reflect.runtime.universe._ object Bish { - def m { + def m: Unit = { object Bash { typeOf[Option[_]] } } -} \ No newline at end of file +} diff --git a/test/files/pos/t6204-b.scala b/test/files/pos/t6204-b.scala index 86094d1a1904..62f2b5d5c533 100644 --- a/test/files/pos/t6204-b.scala +++ b/test/files/pos/t6204-b.scala @@ -1,10 +1,10 @@ import scala.reflect.runtime.universe._ object Bosh { - def Besh { + def Besh: Unit = { new { val t = typeOf[Option[_]] val x = t } } -} \ No newline at end of file +} diff --git a/test/files/pos/t6205.scala b/test/files/pos/t6205.scala index 02d924fe85cf..52078bd5f46f 100644 --- a/test/files/pos/t6205.scala +++ b/test/files/pos/t6205.scala @@ -15,4 +15,4 @@ class Test2 { = backing.map(x => x match {case Holder(k: A[kt]) => (k: A[kt])} ) -} \ No newline at end of file +} diff --git a/test/files/pos/t6210.scala b/test/files/pos/t6210.scala index 56108f8fcfd1..44ae23511dcb 100644 --- a/test/files/pos/t6210.scala +++ b/test/files/pos/t6210.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// abstract sealed trait AST abstract sealed trait AExpr extends AST case class AAssign(name: String, v: AExpr) extends AExpr diff --git a/test/files/pos/t6217.scala b/test/files/pos/t6217.scala index 45b19c6138c9..8695bf90b2ea 100644 --- a/test/files/pos/t6217.scala +++ b/test/files/pos/t6217.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings package p { package _root_ { package scala { diff --git a/test/files/pos/t6221.scala b/test/files/pos/t6221.scala index 34f02859f38f..bb2f1dde78ff 100644 --- a/test/files/pos/t6221.scala +++ b/test/files/pos/t6221.scala @@ -11,7 +11,7 @@ object Test { implicit def otherFuncToMyFunc[A, B](f: OtherFunc[A, B]): MyFunc[A, B] = new MyFunc // = new MyFunc[A,Nothing](); - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val col = new MyCollection[Int] // Doesn't compile: error: missing parameter type for expanded function ((x$1) => x$1.toString) @@ -30,4 +30,4 @@ object Test { println(col.map(new OtherFunc)) // scala.this.Predef.println(col.map[Nothing](Test.this.otherFuncToMyFunc[Any, Nothing](new OtherFunc[Any,Nothing]()))) } -} \ No newline at end of file +} diff --git a/test/files/pos/t6245/Foo.scala b/test/files/pos/t6245/Foo.scala index f5f997fbff85..ea7a99e12acf 100644 --- a/test/files/pos/t6245/Foo.scala +++ b/test/files/pos/t6245/Foo.scala @@ -2,7 +2,7 @@ import t1.Vis abstract class Foo extends t1.Base { trait Nested { - def crash() { + def crash(): Unit = { inner } } diff --git a/test/files/pos/t6260.scala b/test/files/pos/t6260.scala index 1bab9c021fda..b09668f825ea 100644 --- a/test/files/pos/t6260.scala +++ b/test/files/pos/t6260.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline +// class Box[X](val x: X) extends AnyVal { def map[Y](f: X => Y): Box[Y] = ((bx: Box[X]) => new Box(f(bx.x)))(this) @@ -8,7 +9,7 @@ object Test { def map2[X, Y](self: Box[X], f: X => Y): Box[Y] = ((bx: Box[X]) => new Box(f(bx.x)))(self) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f = (x: Int) => x + 1 val g = (x: String) => x + x diff --git a/test/files/neg/t6263.scala b/test/files/pos/t6263.scala similarity index 100% rename from test/files/neg/t6263.scala rename to test/files/pos/t6263.scala diff --git a/test/files/pos/t6275.scala b/test/files/pos/t6275.scala index cd59c5d5ee5b..1ba9fac14880 100644 --- a/test/files/pos/t6275.scala +++ b/test/files/pos/t6275.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait A[T] final class B[T] extends A[T] diff --git a/test/files/pos/t6278-synth-def.scala b/test/files/pos/t6278-synth-def.scala index b8b660fbe30a..37eee8deb6b8 100644 --- a/test/files/pos/t6278-synth-def.scala +++ b/test/files/pos/t6278-synth-def.scala @@ -4,9 +4,9 @@ package t6278 import language.implicitConversions object test { - def ok() { + def ok(): Unit = { class Foo(val i: Int) { - def foo[A](body: =>A): A = body + def foo[A](body: => A): A = body } implicit def toFoo(i: Int): Foo = new Foo(i) @@ -14,9 +14,9 @@ object test { k foo println("k?") val j = 2 } - def nope() { + def nope(): Unit = { implicit class Foo(val i: Int) { - def foo[A](body: =>A): A = body + def foo[A](body: => A): A = body } val k = 1 @@ -24,7 +24,7 @@ object test { //lazy val j = 2 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { ok(); nope() } } diff --git a/test/files/pos/t6386.scala b/test/files/pos/t6386.scala index 85098a78f069..4031ae26725e 100644 --- a/test/files/pos/t6386.scala +++ b/test/files/pos/t6386.scala @@ -2,4 +2,4 @@ import scala.reflect.runtime.universe._ object Test extends App { reify(manifest[Some[_]]) -} \ No newline at end of file +} diff --git a/test/files/pos/t6450.scala b/test/files/pos/t6450.scala new file mode 100644 index 000000000000..39aa6307f9f4 --- /dev/null +++ b/test/files/pos/t6450.scala @@ -0,0 +1,55 @@ +//> using options -Werror +object ExhaustivityWarnBugReportMinimal { + //sealed is needed for the warning + sealed trait FoundNode[T]/*presence of parameters is irrelevant*/ + // This also causes a warning: + // sealed abstract class FoundNode[T]/*presence of parameters is irrelevant*/ + case class FoundFilter[T](/*presence of parameters is irrelevant*/) extends FoundNode[T] + case class FoundTypeCase[T](/*presence of parameters is irrelevant*/) extends FoundNode[T] + val f: Some[_] = ??? + f match { + case x: Some[t] => //no warning + } + //With these variants, no warnings: + //val v: (Some[Int], FoundNode[_]) = (???, ???) + //val v: (Some[AnyRef], FoundNode[_]) = (???, ???) + //val v: (Some[String], FoundNode[_]) = (???, ???) + + val v: (Some[_], FoundNode[_]) = (???, ???) + //Warning here: + v match { // was: "It would fail on the following inputs: (_, FoundFilter()), (_, FoundTypeCase())" + case (x: Some[t], _: FoundNode[_]) => + } + v match { // was: "It would fail on the following input: (_, _)" + case (x: Some[t], _) => + } + + v match { // was: "It would fail on the following input: (_, _)" + case (x: Some[_], _) => + } + case class Foo[T]() + + val vp: (Foo[_], FoundNode[_]) = (???, ???) + vp match { // was: "It would fail on the following input: (_, _)" + case (x: Foo[_], _) => + } + + //No warning here: + v match { + case (Some(y), _) => + } + + v match { + case (x, _) => + } + + val v2: (Some[_], Int) = (???, ???) + v2 match { + case (x: Some[t], _) => + } + + val v3: (Option[_], FoundNode[_]) = (???, ???) + v match { + case (x: Option[_], _) => + } +} diff --git a/test/files/pos/t6479.scala b/test/files/pos/t6479.scala index c463bc5ab0e6..e4a4ff6011ae 100644 --- a/test/files/pos/t6479.scala +++ b/test/files/pos/t6479.scala @@ -1,7 +1,7 @@ object TailrecAfterTryCatch { @annotation.tailrec - final def good1() { + final def good1(): Unit = { 1 match { case 2 => { try { @@ -15,7 +15,7 @@ object TailrecAfterTryCatch { } @annotation.tailrec - final def good2() { + final def good2(): Unit = { //1 match { // case 2 => { try { @@ -29,7 +29,7 @@ object TailrecAfterTryCatch { } @annotation.tailrec - final def good3() { + final def good3(): Unit = { val 1 = 2 try { return @@ -40,7 +40,7 @@ object TailrecAfterTryCatch { } @annotation.tailrec - final def bad() { + final def bad(): Unit = { 1 match { case 2 => { try { @@ -53,4 +53,4 @@ object TailrecAfterTryCatch { } } -} \ No newline at end of file +} diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala index 24ea38e519a4..8efbc57333f0 100644 --- a/test/files/pos/t6482.scala +++ b/test/files/pos/t6482.scala @@ -1,4 +1,4 @@ -final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal { +final class IterableOnceOps[+A](val collection: Iterable[A]) extends AnyVal { def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) } diff --git a/test/files/pos/t6485a/Macros_1.scala b/test/files/pos/t6485a/Macros_1.scala index 570c9877096f..cc7dc3d3e7e2 100644 --- a/test/files/pos/t6485a/Macros_1.scala +++ b/test/files/pos/t6485a/Macros_1.scala @@ -2,4 +2,4 @@ import scala.reflect.macros.blackbox.Context object Macros { def crash(c: Context): c.Expr[Unit] = c.universe.reify(()) -} \ No newline at end of file +} diff --git a/test/files/pos/t6485a/Test_2.scala b/test/files/pos/t6485a/Test_2.scala index 54e260ac744d..5c7db8a948f3 100644 --- a/test/files/pos/t6485a/Test_2.scala +++ b/test/files/pos/t6485a/Test_2.scala @@ -1,5 +1,5 @@ import scala.language.experimental.macros final class Ops[T](val x: T) extends AnyVal { - def f = macro Macros.crash + def f: Unit = macro Macros.crash } diff --git a/test/files/pos/t6485b/Test.scala b/test/files/pos/t6485b/Test.scala index 3b81c6f8abb9..f693139e1672 100644 --- a/test/files/pos/t6485b/Test.scala +++ b/test/files/pos/t6485b/Test.scala @@ -2,9 +2,9 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context final class Ops[T](val x: T) extends AnyVal { - def f = macro Macros.crash + def f: Unit = macro Macros.crash } object Macros { def crash(c: Context): c.Expr[Unit] = c.universe.reify(()) -} \ No newline at end of file +} diff --git a/test/files/pos/t6516.scala b/test/files/pos/t6516.scala index 2980d83eb676..ac03239ea132 100644 --- a/test/files/pos/t6516.scala +++ b/test/files/pos/t6516.scala @@ -1,19 +1,19 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context -import scala.collection.TraversableLike +import scala.collection.IterableOps // This one compiles object Test { - type Alias[T, CC[_]] = Context { type PrefixType = TraversableLike[T, CC[T]] } - def f() = macro f_impl + type Alias[T, CC[_]] = Context { type PrefixType = IterableOps[T, CC, CC[T]] } + def f(): Nothing = macro f_impl def f_impl(c: Alias[Int, List])() = ??? } // This one doesn't object Test2 { type Ctx = scala.reflect.macros.blackbox.Context - type Alias[T, CC[_]] = Ctx { type PrefixType = TraversableLike[T, CC[T]] } + type Alias[T, CC[_]] = Ctx { type PrefixType = IterableOps[T, CC, CC[T]] } - def f() = macro f_impl + def f(): Nothing = macro f_impl def f_impl(c: Alias[Int, List])() = ??? } diff --git a/test/files/pos/t6537.scala b/test/files/pos/t6537.scala index ce159f000d02..0d5712b54526 100644 --- a/test/files/pos/t6537.scala +++ b/test/files/pos/t6537.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// package tester object PatMatWarning { diff --git a/test/files/pos/t6551.scala b/test/files/pos/t6551.scala index 8bb396a19f4d..ada4bea44ddb 100644 --- a/test/files/pos/t6551.scala +++ b/test/files/pos/t6551.scala @@ -1,7 +1,7 @@ import scala.language.dynamics object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { class Lenser[T] extends Dynamic { def selectDynamic(propName: String) = ??? } diff --git a/test/files/pos/t6562.scala b/test/files/pos/t6562.scala index eec7aa5199f0..600234a297ea 100644 --- a/test/files/pos/t6562.scala +++ b/test/files/pos/t6562.scala @@ -1,13 +1,13 @@ class Test { @inline - def foo { + def foo: Unit = { def it = new {} (_: Any) => it } @inline - private def bar { + private def bar: Unit = { def it = new {} (_: Any) => it } diff --git a/test/files/pos/t6574.scala b/test/files/pos/t6574.scala index 28b62f205cad..53bd0336f274 100644 --- a/test/files/pos/t6574.scala +++ b/test/files/pos/t6574.scala @@ -1,18 +1,27 @@ class Bad[X, Y](val v: Int) extends AnyVal { def vv = v - @annotation.tailrec final def foo[Z](a: Int)(b: String) { + @annotation.tailrec final def foo[Z](a: Int)(b: String): Unit = { this.foo[Z](a)(b) } - @annotation.tailrec final def differentReceiver { + @annotation.tailrec final def differentReceiver: Unit = { {(); new Bad[X, Y](0)}.differentReceiver } - @annotation.tailrec final def dependent[Z](a: Int)(b: String): b.type = { + // The original test case fails with the new is/asInstanceOf semantics + // introduced along with along with SIP-23 implementation because the method has a + // singleton result type which cannot be erased correctly. + // See: neg/sip23-tailrec-singleton.scala + //@annotation.tailrec final def dependent[Z](a: Int)(b: String): b.type = { + // this.dependent[Z](a)(b) + //} + + // Replacement test case + @annotation.tailrec final def dependent[Z](a: Int)(b: String): Option[b.type] = { this.dependent[Z](a)(b) } - @annotation.tailrec final def differentTypeArgs { + @annotation.tailrec final def differentTypeArgs: Unit = { {(); new Bad[String, Unit](0)}.differentTypeArgs } } diff --git a/test/files/pos/t6600.scala b/test/files/pos/t6600.scala index 1e8137894cd6..c0cc6677d3db 100644 --- a/test/files/pos/t6600.scala +++ b/test/files/pos/t6600.scala @@ -1,8 +1,8 @@ final class Natural extends scala.math.ScalaNumber with scala.math.ScalaNumericConversions { - def intValue(): Int = 0 - def longValue(): Long = 0L - def floatValue(): Float = 0.0F - def doubleValue(): Double = 0.0D - def isWhole(): Boolean = false - def underlying() = this + def intValue: Int = 0 + def longValue: Long = 0L + def floatValue: Float = 0.0F + def doubleValue: Double = 0.0D + def isWhole: Boolean = false + def underlying = this } diff --git a/test/files/pos/t6601/PrivateValueClass_1.scala b/test/files/pos/t6601/PrivateValueClass_1.scala index 85c368713784..dc0137420e51 100644 --- a/test/files/pos/t6601/PrivateValueClass_1.scala +++ b/test/files/pos/t6601/PrivateValueClass_1.scala @@ -1 +1 @@ -class V private (val a: Any) extends AnyVal \ No newline at end of file +class V private (val a: Any) extends AnyVal diff --git a/test/files/pos/t6610.scala b/test/files/pos/t6610.scala new file mode 100644 index 000000000000..92ff3b697380 --- /dev/null +++ b/test/files/pos/t6610.scala @@ -0,0 +1,9 @@ +object Test { + trait A[T[C], @specialized(Int) C] { + def f(x: T[C]): T[C] + } + + trait B[T[C], @specialized(Int) C] extends A[T, C] { + def f(x: T[C]) = x + } +} diff --git a/test/files/pos/t6624.scala b/test/files/pos/t6624.scala index 44554c59c724..43bc565cb545 100644 --- a/test/files/pos/t6624.scala +++ b/test/files/pos/t6624.scala @@ -25,4 +25,4 @@ object Test { klist match { case KCons(_) => } -} \ No newline at end of file +} diff --git a/test/files/pos/t6648.scala b/test/files/pos/t6648.scala index b8f24870cc70..0accb0f06aa4 100644 --- a/test/files/pos/t6648.scala +++ b/test/files/pos/t6648.scala @@ -2,7 +2,7 @@ abstract class Node extends NodeSeq trait NodeSeq extends Seq[Node] object NodeSeq { implicit def seqToNodeSeq(ns: Seq[Node]): NodeSeq = ??? - def foo[B, That](f: Seq[B])(implicit bf: scala.collection.generic.CanBuildFrom[Seq[Int], B, That]): That = ??? + def foo[B, That](f: Seq[B])(implicit bf: scala.collection.BuildFrom[Seq[Int], B, That]): That = ??? } class Transformer { diff --git a/test/files/pos/t6675.scala b/test/files/pos/t6675.scala index d0d863292037..bd9624ff004c 100644 --- a/test/files/pos/t6675.scala +++ b/test/files/pos/t6675.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// object LeftOrRight { def unapply[A](value: Either[A, A]): Option[A] = value match { case scala.Left(x) => Some(x) @@ -9,13 +10,16 @@ object LeftOrRight { object Test { (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(pair @ (a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)" + case _ => sys.error("LeftOrRight.unapply is (non-explicitly) irrefutable") } (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)" + case _ => sys.error("LeftOrRight.unapply is (non-explicitly) irrefutable") } (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a, b) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)" + case _ => sys.error("LeftOrRight.unapply is (non-explicitly) irrefutable") } } diff --git a/test/files/pos/t6712.scala b/test/files/pos/t6712.scala index 3c96eb14fb9d..f70d1a8732c9 100644 --- a/test/files/pos/t6712.scala +++ b/test/files/pos/t6712.scala @@ -1,5 +1,5 @@ class H { object O - def foo() { object O } + def foo(): Unit = { object O } } diff --git a/test/files/pos/t6734.scala b/test/files/pos/t6734.scala index 88932cd2cc5e..8dbef2e62820 100644 --- a/test/files/pos/t6734.scala +++ b/test/files/pos/t6734.scala @@ -6,7 +6,7 @@ package object p package p { import scala.concurrent.Future - case class C private[p] (value: Future[Int]) // private to avoid rewriting C.apply to new C + case class C protected[p] (value: Future[Int]) // protected to avoid rewriting C.apply to new C } package client { diff --git a/test/files/pos/t6771.scala b/test/files/pos/t6771.scala index e5668e6883b6..5e34b4baff40 100644 --- a/test/files/pos/t6771.scala +++ b/test/files/pos/t6771.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { type Id[X] = X val a: Id[Option[Int]] = None diff --git a/test/files/pos/t6891.scala b/test/files/pos/t6891.scala index 01ab44bd6d35..9884bf3bda1d 100644 --- a/test/files/pos/t6891.scala +++ b/test/files/pos/t6891.scala @@ -1,4 +1,5 @@ -// scalac: -Ycheck:extmethods -Xfatal-warnings +//> using options -Ycheck:extmethods -Xfatal-warnings +// object O { implicit class Foo[A](val value: String) extends AnyVal { def bippy() = { @@ -6,14 +7,30 @@ object O { () } + // The original test cases fail with the new is/asInstanceOf semantics + // introduced along with SIP-23 implementation because the method has a + // singleton typed argument which cannot be erased correctly. + // See: neg/sip23-tailrec-value-class.scala + //def boppy() = { + // @annotation.tailrec def loop(x: value.type): Unit = loop(x) + // () + //} + + //def beppy[C](c: => C) = { + // () => c + // @annotation.tailrec def loop(x: value.type): Unit = loop(x) + // () => c + // () + //} + def boppy() = { - @annotation.tailrec def loop(x: value.type): Unit = loop(x) + @annotation.tailrec def loop(x: Option[value.type]): Unit = loop(x) () } def beppy[C](c: => C) = { () => c - @annotation.tailrec def loop(x: value.type): Unit = loop(x) + @annotation.tailrec def loop(x: Option[value.type]): Unit = loop(x) () => c () } diff --git a/test/files/neg/t6895b.scala b/test/files/pos/t6895.scala similarity index 100% rename from test/files/neg/t6895b.scala rename to test/files/pos/t6895.scala diff --git a/test/files/pos/t6895b-2.scala b/test/files/pos/t6895b-2.scala deleted file mode 100644 index 079f3904842c..000000000000 --- a/test/files/pos/t6895b-2.scala +++ /dev/null @@ -1,40 +0,0 @@ -// scalac: -Xsource:2.13 -trait Foo[F[_]] -trait Bar[F[_], A] - -trait Or[A, B] - -class Test { - implicit def orFoo[A]: Foo[({type L[X] = Or[A, X]})#L] = ??? - implicit def barFoo[F[_]](implicit f: Foo[F]): Foo[({type L[X] = Bar[F, X]})#L] = ??? - - // Now we can define a couple of type aliases: - type StringOr[X] = Or[String, X] - type BarStringOr[X] = Bar[StringOr, X] - - // ok - implicitly[Foo[BarStringOr]] - barFoo[StringOr](null) : Foo[BarStringOr] - barFoo(null) : Foo[BarStringOr] - - // nok - implicitly[Foo[({type L[X] = Bar[StringOr, X]})#L]] - // Let's write the application explicitly, and then - // compile with just this line enabled and -explaintypes. - barFoo(null) : Foo[({type L[X] = Bar[StringOr, X]})#L] - - // Foo[[X]Bar[F,X]] <: Foo[[X]Bar[[X]Or[String,X],X]]? - // Bar[[X]Or[String,X],X] <: Bar[F,X]? - // F[_] <: Or[String,_]? - // false - // false - // false - - // Note that the type annotation above is typechecked as - // Foo[[X]Bar[[X]Or[String,X],X]], ie the type alias `L` - // is eta expanded. - // - // This is done so that it does not escape its defining scope. - // However, one this is done, higher kinded inference - // no longer is able to unify F with `StringOr` (scala/bug#2712) -} diff --git a/test/files/pos/t6895b.scala b/test/files/pos/t6895b.scala deleted file mode 100644 index 735dba55690c..000000000000 --- a/test/files/pos/t6895b.scala +++ /dev/null @@ -1,40 +0,0 @@ -// scalac: -Ypartial-unification -trait Foo[F[_]] -trait Bar[F[_], A] - -trait Or[A, B] - -class Test { - implicit def orFoo[A]: Foo[({type L[X] = Or[A, X]})#L] = ??? - implicit def barFoo[F[_]](implicit f: Foo[F]): Foo[({type L[X] = Bar[F, X]})#L] = ??? - - // Now we can define a couple of type aliases: - type StringOr[X] = Or[String, X] - type BarStringOr[X] = Bar[StringOr, X] - - // ok - implicitly[Foo[BarStringOr]] - barFoo[StringOr](null) : Foo[BarStringOr] - barFoo(null) : Foo[BarStringOr] - - // nok - implicitly[Foo[({type L[X] = Bar[StringOr, X]})#L]] - // Let's write the application explicitly, and then - // compile with just this line enabled and -explaintypes. - barFoo(null) : Foo[({type L[X] = Bar[StringOr, X]})#L] - - // Foo[[X]Bar[F,X]] <: Foo[[X]Bar[[X]Or[String,X],X]]? - // Bar[[X]Or[String,X],X] <: Bar[F,X]? - // F[_] <: Or[String,_]? - // false - // false - // false - - // Note that the type annotation above is typechecked as - // Foo[[X]Bar[[X]Or[String,X],X]], ie the type alias `L` - // is eta expanded. - // - // This is done so that it does not escape its defining scope. - // However, one this is done, higher kinded inference - // no longer is able to unify F with `StringOr` (scala/bug#2712) -} diff --git a/test/files/pos/t6896.scala b/test/files/pos/t6896.scala index 76e357da1049..404d343e0c78 100644 --- a/test/files/pos/t6896.scala +++ b/test/files/pos/t6896.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object TooManyMains { def main(args: Array[String]): Unit = { println("Hello, World!") diff --git a/test/files/pos/t6925.scala b/test/files/pos/t6925.scala index 862a6e9d0e72..c6f418519911 100644 --- a/test/files/pos/t6925.scala +++ b/test/files/pos/t6925.scala @@ -6,4 +6,4 @@ class Test { // check that this type checks: List(1).flatMap(n => Set(1).collect { case w => w }) -} \ No newline at end of file +} diff --git a/test/files/pos/t6925b.scala b/test/files/pos/t6925b.scala index 7e4fdd2d9b2f..4264f0d9a9a9 100644 --- a/test/files/pos/t6925b.scala +++ b/test/files/pos/t6925b.scala @@ -15,4 +15,4 @@ object Test { lazy val lx = { println("hello"); 3 } def test1(x: Int = lx) = ??? def test2(x: Int = lx match { case 0 => 1; case 3 => 4 }) = ??? -} \ No newline at end of file +} diff --git a/test/files/pos/t6942/Bar.java b/test/files/pos/t6942/Bar.java index 592f62efb43c..708b0403ccd9 100644 --- a/test/files/pos/t6942/Bar.java +++ b/test/files/pos/t6942/Bar.java @@ -1,3 +1,5 @@ +// scalac: -nowarn +// package foo; public enum Bar { @@ -232,4 +234,4 @@ public enum Bar { ZAMBIA /*("EU")*/, ZIMBABWE /*("EU")*/; -} \ No newline at end of file +} diff --git a/test/files/pos/t6942/t6942.scala b/test/files/pos/t6942/t6942.scala index b6fa968cc670..9a7440e455dc 100644 --- a/test/files/pos/t6942/t6942.scala +++ b/test/files/pos/t6942/t6942.scala @@ -1,4 +1,5 @@ -// scalac: -nowarn +//> using options -nowarn +// // not a peep out of the pattern matcher's unreachability analysis // its budget should suffice for these simple matches (they do have a large search space) class Test { diff --git a/test/files/pos/t6963c.scala b/test/files/pos/t6963c.scala index 4d1ba4bb0b1f..7e71581a75fd 100644 --- a/test/files/pos/t6963c.scala +++ b/test/files/pos/t6963c.scala @@ -1,4 +1,6 @@ -// scalac: -Xmigration:2.9 -Xfatal-warnings +//> using options -Werror -Xmigration:2.9 +// +import collection.Seq object Test { def f1(x: Any) = x.isInstanceOf[Seq[_]] def f2(x: Any) = x match { @@ -11,7 +13,7 @@ object Test { case _ => false } - def f4(x: Any) = x.isInstanceOf[Traversable[_]] + def f4(x: Any) = x.isInstanceOf[Iterable[_]] def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match { case (Some(_: Seq[_]), Nil, _) => 1 diff --git a/test/files/pos/t6966.scala b/test/files/pos/t6966.scala index 23adc6d0d2b4..a43d7c50197a 100644 --- a/test/files/pos/t6966.scala +++ b/test/files/pos/t6966.scala @@ -14,4 +14,4 @@ object CacheIvy { sealed trait HList sealed trait HNil extends HList object HNil extends HNil -final class HCons[H, T <: HList](head : H, tail : T) extends HList \ No newline at end of file +final class HCons[H, T <: HList](head : H, tail : T) extends HList diff --git a/test/files/pos/t6978/S.scala b/test/files/pos/t6978/S.scala index d620c02e7bd2..dad2d950a9a8 100644 --- a/test/files/pos/t6978/S.scala +++ b/test/files/pos/t6978/S.scala @@ -1,5 +1,4 @@ -// scalac: -Xlint -Xfatal-warnings - +//> using options -Xlint -Xfatal-warnings trait X { def f: Int } object Test extends J with X with App { diff --git a/test/files/pos/t6994.scala b/test/files/pos/t6994.scala index dd8132374ec9..143c75d329df 100644 --- a/test/files/pos/t6994.scala +++ b/test/files/pos/t6994.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { object NF { def unapply(t: Throwable): Option[Throwable] = None diff --git a/test/files/pos/t7011.scala b/test/files/pos/t7011.scala index 5f49b029d02e..ff2b15e1bb2b 100644 --- a/test/files/pos/t7011.scala +++ b/test/files/pos/t7011.scala @@ -1,8 +1,9 @@ -// scalac: -Ydebug -Xfatal-warnings +//> using options -Ydebug -Xfatal-warnings +// object bar { - def foo { + def foo: Unit = { lazy val x = 42 - {()=>x} + def f = {() => x} } } diff --git a/test/files/pos/t704.scala b/test/files/pos/t704.scala index e77a54a58ee7..aedd8c03afdc 100644 --- a/test/files/pos/t704.scala +++ b/test/files/pos/t704.scala @@ -16,7 +16,7 @@ trait E extends D { } class C extends E {} object Go extends D { - def main(args : Array[String]) { + def main(args : Array[String]): Unit = { new C().f() new C().f() } diff --git a/test/files/pos/t7046-2/Macros_1.scala b/test/files/pos/t7046-2/Macros_1.scala index 07c0c61281df..534db8bde266 100644 --- a/test/files/pos/t7046-2/Macros_1.scala +++ b/test/files/pos/t7046-2/Macros_1.scala @@ -1,7 +1,7 @@ package p1 import scala.reflect.macros.blackbox._ -import language.experimental._ +import language.experimental.macros object Macro { def impl(c: Context): c.Tree = { diff --git a/test/files/pos/t7088.scala b/test/files/pos/t7088.scala index de9d1b7040c6..5bf7230dc370 100644 --- a/test/files/pos/t7088.scala +++ b/test/files/pos/t7088.scala @@ -2,7 +2,7 @@ object Example extends App { type Tag[X] = {type Tag = X} type TaggedArray[T] = Array[T] with Tag[Any] - def method[T: reflect.ClassTag](a: TaggedArray[T], value: T) {a.update(0, value)} + def method[T: reflect.ClassTag](a: TaggedArray[T], value: T): Unit = {a.update(0, value)} method(Array(1, 2).asInstanceOf[TaggedArray[Int]], 1) } diff --git a/test/files/pos/t7126.scala b/test/files/pos/t7126.scala index 6720511e08a7..edac56d28d8c 100644 --- a/test/files/pos/t7126.scala +++ b/test/files/pos/t7126.scala @@ -8,4 +8,4 @@ object Test { // okay foo(???): Option[Any] def foo[CC[U]](t : CC[Any]): Option[CC[Any]] = None -} \ No newline at end of file +} diff --git a/test/files/pos/t7183.scala b/test/files/pos/t7183.scala index 2efc51684ea7..488924f7d4cc 100644 --- a/test/files/pos/t7183.scala +++ b/test/files/pos/t7183.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A object A { def unapply(a: A): Some[A] = Some(a) // Change return type to Option[A] and the warning is gone diff --git a/test/files/pos/t7190.scala b/test/files/pos/t7190.scala index f7ccded1b44c..449e5c83f183 100644 --- a/test/files/pos/t7190.scala +++ b/test/files/pos/t7190.scala @@ -23,4 +23,4 @@ object B { ord.splice.zero.asInstanceOf[Int] } } -} \ No newline at end of file +} diff --git a/test/files/pos/t7200b.scala b/test/files/pos/t7200b.scala index 59be898fd03c..81500ddc1383 100644 --- a/test/files/pos/t7200b.scala +++ b/test/files/pos/t7200b.scala @@ -47,4 +47,4 @@ object O extends Foo { // 5) [A#11336 <: #3.this.T#7068]( f#12505: A#11336)A#11336 // // This *does* match the method in the super class, and type inference -// chooses the correct type argument. \ No newline at end of file +// chooses the correct type argument. diff --git a/test/files/pos/t7212.scala b/test/files/pos/t7212.scala new file mode 100644 index 000000000000..ea68ac90eab9 --- /dev/null +++ b/test/files/pos/t7212.scala @@ -0,0 +1,25 @@ +//> using options -Xsource:3 -Xsource-features:infer-override + +class A { + def f: Option[String] = Some("hello, world") + def remove(): Unit = () +} +class B extends A { + override def f = None + override def remove() = ??? +} +class C extends B { + override def f: Option[String] = Some("goodbye, cruel world") + override def remove(): Unit = println("removed! (not really)") +} + +trait T { def f: Object } +class K extends T { def f = "" } +object K { + val k = new K + val s: Any = k.f +} + +trait U extends T { def f = "" } +trait V { var v: Any } +trait W extends V { var v = "" } diff --git a/test/files/pos/t7212b/JavaClient.java b/test/files/pos/t7212b/JavaClient.java new file mode 100644 index 000000000000..8fb0650e9bd5 --- /dev/null +++ b/test/files/pos/t7212b/JavaClient.java @@ -0,0 +1,6 @@ + +public class JavaClient extends ScalaThing { + public void remove() { + throw new UnsupportedOperationException(); + } +} diff --git a/test/files/pos/t7212b/JavaThing.java b/test/files/pos/t7212b/JavaThing.java new file mode 100644 index 000000000000..4c3f6cab0b47 --- /dev/null +++ b/test/files/pos/t7212b/JavaThing.java @@ -0,0 +1,4 @@ + +public interface JavaThing { + default void remove() { throw new UnsupportedOperationException(); } +} diff --git a/test/files/pos/t7212b/ScalaThing.scala b/test/files/pos/t7212b/ScalaThing.scala new file mode 100644 index 000000000000..35df80065f48 --- /dev/null +++ b/test/files/pos/t7212b/ScalaThing.scala @@ -0,0 +1,5 @@ +//> using options -Xsource:3 -Xsource-features:infer-override + +class ScalaThing extends JavaThing { + override def remove() = ??? +} diff --git a/test/files/pos/t7226.scala b/test/files/pos/t7226.scala index 06f0c95dc47b..1b7e2549c37f 100644 --- a/test/files/pos/t7226.scala +++ b/test/files/pos/t7226.scala @@ -2,11 +2,11 @@ trait HK { type Rep[X] // okay - def unzip2[A, B](ps: Rep[List[(A, B)]]) + def unzip2[A, B](ps: Rep[List[(A, B)]]): Unit unzip2(null.asInstanceOf[Rep[List[(Int, String)]]]) // okay - def unzipHK[A, B, C[_]](ps: Rep[C[(A, B)]]) + def unzipHK[A, B, C[_]](ps: Rep[C[(A, B)]]): Unit unzipHK(null.asInstanceOf[Rep[List[(Int, String)]]]) def unzipHKRet0[A, C[_]](ps: C[A]): C[Int] diff --git a/test/files/pos/t7232/Test.scala b/test/files/pos/t7232/Test.scala index 175db0af4300..798bb11235e3 100644 --- a/test/files/pos/t7232/Test.scala +++ b/test/files/pos/t7232/Test.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test { import pack._ Foo.okay().size() diff --git a/test/files/pos/t7232b/Test.scala b/test/files/pos/t7232b/Test.scala index 33cc6587ae2c..6675688722cc 100644 --- a/test/files/pos/t7232b/Test.scala +++ b/test/files/pos/t7232b/Test.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test { import pack._ diff --git a/test/files/pos/t7232c/Test.scala b/test/files/pos/t7232c/Test.scala index 0fe46e78dd88..48ce66340185 100644 --- a/test/files/pos/t7232c/Test.scala +++ b/test/files/pos/t7232c/Test.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test { import pack._ Foo.innerList().isInnerList() diff --git a/test/files/pos/t7232d/Test.scala b/test/files/pos/t7232d/Test.scala index 042e0396d761..af3b5f05ded4 100644 --- a/test/files/pos/t7232d/Test.scala +++ b/test/files/pos/t7232d/Test.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test { import pack._ Foo.mapEntry().getKey() diff --git a/test/files/pos/t7232e/Foo.java b/test/files/pos/t7232e/Foo.java new file mode 100644 index 000000000000..4d735183b5d7 --- /dev/null +++ b/test/files/pos/t7232e/Foo.java @@ -0,0 +1,10 @@ +package pack; + +import java.util.List; // does shadow +import java.awt.*; // doesn't shadow, keep looking +import java.util.function.Supplier; // irrelevant, keep looking + +public class Foo { + // should be java.util.List. + public static List list() { throw new Error(); } +} diff --git a/test/files/pos/t7232e/List.java b/test/files/pos/t7232e/List.java new file mode 100644 index 000000000000..243409e61578 --- /dev/null +++ b/test/files/pos/t7232e/List.java @@ -0,0 +1,5 @@ +package pack; + +public class List { + public void packList() {} +} diff --git a/test/files/pos/t7232e/Test.scala b/test/files/pos/t7232e/Test.scala new file mode 100644 index 000000000000..aad5788927c3 --- /dev/null +++ b/test/files/pos/t7232e/Test.scala @@ -0,0 +1,7 @@ +//> using options -Werror +object Test { + import pack._ + + // java.util.List not java.awt.List or pack.List + Foo.list().listIterator() +} diff --git a/test/files/pos/t7232f/C.java b/test/files/pos/t7232f/C.java new file mode 100644 index 000000000000..8ece2f6f7424 --- /dev/null +++ b/test/files/pos/t7232f/C.java @@ -0,0 +1,8 @@ + +import p.D; + +import java.util.function.Supplier; + +public class C { + public final D d = new D(); +} diff --git a/test/files/pos/t7232f/D.java b/test/files/pos/t7232f/D.java new file mode 100644 index 000000000000..e42b89ee2570 --- /dev/null +++ b/test/files/pos/t7232f/D.java @@ -0,0 +1,4 @@ + +public class D { + public void notok() { } +} diff --git a/test/files/pos/t7232f/Test.scala b/test/files/pos/t7232f/Test.scala new file mode 100644 index 000000000000..7235cb2d18b0 --- /dev/null +++ b/test/files/pos/t7232f/Test.scala @@ -0,0 +1,8 @@ + +object Test { + def main(args: Array[String]): Unit = { + val c = new C() + val x = c.d + x.ok() + } +} diff --git a/test/files/pos/t7232f/q.scala b/test/files/pos/t7232f/q.scala new file mode 100644 index 000000000000..afd07cfb3beb --- /dev/null +++ b/test/files/pos/t7232f/q.scala @@ -0,0 +1,6 @@ + +package p + +class D { + def ok() = () +} diff --git a/test/files/pos/t7232g/Foo.java b/test/files/pos/t7232g/Foo.java new file mode 100644 index 000000000000..6bd19e1a0c16 --- /dev/null +++ b/test/files/pos/t7232g/Foo.java @@ -0,0 +1,8 @@ +package pack; + +import java.util.*; + +public class Foo { + // should be java.util.List obviously. + public static List list() { throw new Error(); } +} diff --git a/test/files/pos/t7232g/Test.scala b/test/files/pos/t7232g/Test.scala new file mode 100644 index 000000000000..1f73d9edff61 --- /dev/null +++ b/test/files/pos/t7232g/Test.scala @@ -0,0 +1,6 @@ +//> using options -Werror +object Test { + import pack._ + + Foo.list().listIterator() +} diff --git a/test/files/pos/t7285a.scala b/test/files/pos/t7285a.scala index 830807fce2a3..1f1c1e1c652a 100644 --- a/test/files/pos/t7285a.scala +++ b/test/files/pos/t7285a.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed abstract class Base object Test { @@ -24,7 +25,7 @@ object Test1 { case object Up extends Base { } - (d1: Base, d2: Base) => + def f = (d1: Base, d2: Base) => (d1, d2) match { case (Up, Up) | (Down, Down) => false case (Down, Up) => true @@ -43,7 +44,7 @@ object Test2 { case object Up extends Base { } - (d1: Base, d2: Base) => + def f = (d1: Base, d2: Base) => (d1) match { case Up | Down => false } @@ -56,7 +57,7 @@ object Test3 { object Base { case object Down extends Base - (d1: Base, d2: Base) => + def f = (d1: Base, d2: Base) => (d1, d2) match { case (Down, Down) => false } @@ -75,7 +76,7 @@ object Test4 { } import Test4.Base._ - (d1: Base, d2: Base) => + def f = (d1: Base, d2: Base) => (d1, d2) match { case (Up, Up) | (Down, Down) => false case (Down, Test4.Base.Up) => true diff --git a/test/files/pos/t7315.scala b/test/files/pos/t7315.scala index 82a7e727c20a..bb59cf5b14a3 100644 --- a/test/files/pos/t7315.scala +++ b/test/files/pos/t7315.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation -Xfatal-warnings +//> using options -deprecation -Xfatal-warnings +// package scala.pack @deprecatedInheritance diff --git a/test/files/pos/t7329.scala b/test/files/pos/t7329.scala index 76bf1fb9f5bd..143ea3cd13c5 100644 --- a/test/files/pos/t7329.scala +++ b/test/files/pos/t7329.scala @@ -1 +1 @@ -class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B)) \ No newline at end of file +class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B)) diff --git a/test/files/pos/t7369.scala b/test/files/pos/t7369.scala index 3f0d3b60b856..8234eeb9d2e8 100644 --- a/test/files/pos/t7369.scala +++ b/test/files/pos/t7369.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { val X, Y = true (null: Tuple1[Boolean]) match { diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala index b38687c8b3cc..a6e2f91ac1d7 100644 --- a/test/files/pos/t7377/Macro_1.scala +++ b/test/files/pos/t7377/Macro_1.scala @@ -1,4 +1,4 @@ -import language.experimental._ +import language.experimental.macros import scala.reflect.macros.blackbox.Context object M { diff --git a/test/files/pos/t7398/Iterator.java b/test/files/pos/t7398/Iterator.java new file mode 100644 index 000000000000..75b5a8b303b7 --- /dev/null +++ b/test/files/pos/t7398/Iterator.java @@ -0,0 +1,10 @@ +public interface Iterator { + boolean hasNext(); + E next(); + default void remove() { + throw new UnsupportedOperationException("remove"); + } + default void forEachRemaining(java.util.function.Consumer action) { + throw new UnsupportedOperationException("forEachRemaining"); + } +} diff --git a/test/files/pos/t7398/Test.scala b/test/files/pos/t7398/Test.scala new file mode 100644 index 000000000000..2068acaa6dc7 --- /dev/null +++ b/test/files/pos/t7398/Test.scala @@ -0,0 +1,5 @@ +class Test extends Iterator[String] { + def hasNext = true + def next() = "" + def test = this.remove() +} diff --git a/test/files/pos/t7427.scala b/test/files/pos/t7427.scala index 73ae2b05c937..2050702e4adc 100644 --- a/test/files/pos/t7427.scala +++ b/test/files/pos/t7427.scala @@ -1,4 +1,5 @@ -// scalac: -Ydebug +//> using options -Ydebug +// // Compiles with no options // Compiles with -Ydebug -Ydisable-unreachable-prevention // Crashes with -Ydebug diff --git a/test/files/pos/t7433.scala b/test/files/pos/t7433.scala index 674b49fe5269..16015b178a41 100644 --- a/test/files/pos/t7433.scala +++ b/test/files/pos/t7433.scala @@ -1,6 +1,7 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { - def foo() { + def foo(): Unit = { try { for (i <- 1 until 5) return } catch { diff --git a/test/files/pos/t7459a.scala b/test/files/pos/t7459a.scala index 5107715e0628..ab8a518b0a7a 100644 --- a/test/files/pos/t7459a.scala +++ b/test/files/pos/t7459a.scala @@ -1,7 +1,7 @@ trait SpecialException extends Throwable object Test { - def run() { + def run(): Unit = { try { ??? } catch { @@ -15,4 +15,4 @@ object Test { // case e => // } } -} \ No newline at end of file +} diff --git a/test/files/pos/t7459b.scala b/test/files/pos/t7459b.scala index a4b4fd07a907..2c4837ae46b9 100644 --- a/test/files/pos/t7459b.scala +++ b/test/files/pos/t7459b.scala @@ -9,4 +9,4 @@ class Test { () => f ??? } -} \ No newline at end of file +} diff --git a/test/files/pos/t7461/Macros_1.scala b/test/files/pos/t7461/Macros_1.scala index ca84d75624e2..e7e1e2d5e281 100644 --- a/test/files/pos/t7461/Macros_1.scala +++ b/test/files/pos/t7461/Macros_1.scala @@ -9,5 +9,5 @@ object Macros { c.Expr[Unit](q"()") } - def foo = macro impl -} \ No newline at end of file + def foo: Unit = macro impl +} diff --git a/test/files/pos/t7461/Test_2.scala b/test/files/pos/t7461/Test_2.scala index 3839659c9a0d..866e3dc6fc25 100644 --- a/test/files/pos/t7461/Test_2.scala +++ b/test/files/pos/t7461/Test_2.scala @@ -1,3 +1,3 @@ class C { def foo = Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t7481.scala b/test/files/pos/t7481.scala new file mode 100644 index 000000000000..c419d1f20eba --- /dev/null +++ b/test/files/pos/t7481.scala @@ -0,0 +1,10 @@ + +//> using options -no-specialization + +object Test { + // val fixesCompile = Array(1, 2, 3) + + def foo: Any = new Array[Byte](0) + + def f[@specialized(Int) A](a: A): A = a +} diff --git a/test/files/pos/t7505.scala b/test/files/pos/t7505.scala index 3e1e6ab8b40c..bc8c7fad6151 100644 --- a/test/files/pos/t7505.scala +++ b/test/files/pos/t7505.scala @@ -13,4 +13,4 @@ case class ContextProperty(value: Any) { // The underlying error was: type mismatch; // found : Boolean(true) // required: AnyRef -// def toInt: Int = value match { \ No newline at end of file +// def toInt: Int = value match { diff --git a/test/files/pos/t7517.scala b/test/files/pos/t7517.scala index 7ce4c6b13eaf..df4f40130450 100644 --- a/test/files/pos/t7517.scala +++ b/test/files/pos/t7517.scala @@ -19,4 +19,4 @@ object Box { val a1: Box[Split] = Box.split[L,B](box) val a2: Box[ ({ type l[A[x]] = L[ (A ∙ B)#l ] })#l ] = Box.split(box) } -} \ No newline at end of file +} diff --git a/test/files/pos/t7532/B_2.scala b/test/files/pos/t7532/B_2.scala index ee7ce7751f7b..40bc0615a655 100644 --- a/test/files/pos/t7532/B_2.scala +++ b/test/files/pos/t7532/B_2.scala @@ -2,4 +2,4 @@ object Test { val r = new R new r.attr() // Was: error while loading attr, class file '.../t7532-pos.obj/R$attr.class' has location not matching its contents: contains class new R.attr1 -} \ No newline at end of file +} diff --git a/test/files/pos/t7532b/A_1.scala b/test/files/pos/t7532b/A_1.scala index e8f95406095a..586465ee6e54 100644 --- a/test/files/pos/t7532b/A_1.scala +++ b/test/files/pos/t7532b/A_1.scala @@ -4,4 +4,4 @@ class R { class `@` } -class `@` \ No newline at end of file +class `@` diff --git a/test/files/pos/t7532b/B_2.scala b/test/files/pos/t7532b/B_2.scala index 1555a5daa7a4..c4f15daf5c61 100644 --- a/test/files/pos/t7532b/B_2.scala +++ b/test/files/pos/t7532b/B_2.scala @@ -5,4 +5,4 @@ object Test { new r.attr() new r.`@` new `@` -} \ No newline at end of file +} diff --git a/test/files/pos/t7551/T.scala b/test/files/pos/t7551/T.scala deleted file mode 100644 index ca9a32e0a410..000000000000 --- a/test/files/pos/t7551/T.scala +++ /dev/null @@ -1,10 +0,0 @@ -// scalac: -Xfatal-warnings -package p - -@A(subInterface = classOf[T.S]) -trait T { -} - -object T { - private[p] trait S extends T { } -} diff --git a/test/files/pos/t7551/T_1.scala b/test/files/pos/t7551/T_1.scala new file mode 100644 index 000000000000..86e3d0d0ac2c --- /dev/null +++ b/test/files/pos/t7551/T_1.scala @@ -0,0 +1,10 @@ +//> using options -Werror +package p + +@A(subInterface = classOf[T.S]) +trait T { +} + +object T { + private[p] trait S extends T { } +} diff --git a/test/files/pos/t7551/Test.scala b/test/files/pos/t7551/Test.scala deleted file mode 100644 index 6031bc9f5225..000000000000 --- a/test/files/pos/t7551/Test.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Xfatal-warnings -package p - -object Foo { - def bar(t: T) { } -} diff --git a/test/files/pos/t7551/Test_2.scala b/test/files/pos/t7551/Test_2.scala new file mode 100644 index 000000000000..a8b0e508c497 --- /dev/null +++ b/test/files/pos/t7551/Test_2.scala @@ -0,0 +1,6 @@ +//> using options -Werror +package p + +object Foo { + def bar(t: T): Unit = { } +} diff --git a/test/files/pos/t757.scala b/test/files/pos/t757.scala index fd7624cee75e..602f3cd4dc48 100644 --- a/test/files/pos/t757.scala +++ b/test/files/pos/t757.scala @@ -1,6 +1,6 @@ package foo { object C { - def foo { + def foo: Unit = { Console.println("foo") } } diff --git a/test/files/pos/t7591/Demo.scala b/test/files/pos/t7591/Demo.scala index 696d53585bc5..90fee0e02dcd 100644 --- a/test/files/pos/t7591/Demo.scala +++ b/test/files/pos/t7591/Demo.scala @@ -3,7 +3,11 @@ * @author Paul Phillips */ -import scala.tools.cmd._ +package scala.demo + +import scala.tools.partest.nest.{ Instance, Interpolation, Meta, Spec } + +import scala.language.postfixOps /** A sample command specification for illustrative purposes. * First take advantage of the meta-options: @@ -47,7 +51,7 @@ object DemoSpec extends DemoSpec with Property { type ThisCommandLine = SpecCommandLine def creator(args: List[String]) = new SpecCommandLine(args) { - override def errorFn(msg: String) = { println("Error: " + msg) ; sys.exit(0) } + override def errorFn(msg: String) = { throw new Error("Error: " + msg) } } } diff --git a/src/compiler/scala/tools/cmd/Property.scala b/test/files/pos/t7591/Property.scala similarity index 84% rename from src/compiler/scala/tools/cmd/Property.scala rename to test/files/pos/t7591/Property.scala index 0a1ffff7e8e9..65f02fd7449c 100644 --- a/src/compiler/scala/tools/cmd/Property.scala +++ b/test/files/pos/t7591/Property.scala @@ -1,7 +1,7 @@ /* * Scala (https://www.scala-lang.org) * - * Copyright EPFL and Lightbend, Inc. + * Copyright EPFL and Lightbend, Inc. dba Akka * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). @@ -10,13 +10,14 @@ * additional information regarding copyright ownership. */ -package scala.tools -package cmd +package scala.demo -import nsc.io._ +import scala.sys.process.Parser.tokenize +import scala.tools.nsc.io._ +import scala.tools.partest.nest.{Reference, toOpt} +import scala.util.chaining._ import java.util.Properties import java.io.FileInputStream -import scala.sys.SystemProperties /** Contains logic for translating a property key/value pair into * equivalent command line arguments. The default settings will @@ -43,14 +44,14 @@ class PropertyMapper(reference: Reference) extends (((String, String)) => List[S if (isUnaryOption(key) && isTrue(value)) List(opt) else if (isBinaryOption(key)) List(opt, value) - else returning(Nil)(_ => onError(key, value)) + else Nil.tap(_ => onError(key, value)) } def isTrue(value: String) = List("yes", "on", "true") contains value.toLowerCase def apply(kv: (String, String)): List[String] = { val (k, v) = kv - if (isPassThrough(k)) toArgs(v) + if (isPassThrough(k)) tokenize(v) else propNameToOptionName(k) match { case Some(optName) => translate(optName, v) case _ => Nil @@ -63,10 +64,10 @@ trait Property extends Reference { override def propertyArgs: List[String] = systemPropertiesToOptions def loadProperties(file: File): Properties = - returning(new Properties)(_ load new FileInputStream(file.path)) + new Properties().tap(_ load new FileInputStream(file.path)) def systemPropertiesToOptions: List[String] = - propertiesToOptions(new SystemProperties().toList) + propertiesToOptions(System.getProperties) def propertiesToOptions(file: File): List[String] = propertiesToOptions(loadProperties(file)) diff --git a/test/files/pos/t7596/B_2.scala b/test/files/pos/t7596/B_2.scala index 977e5c8bd1e5..f33c3c32fa0c 100644 --- a/test/files/pos/t7596/B_2.scala +++ b/test/files/pos/t7596/B_2.scala @@ -16,4 +16,4 @@ object Test { // Sites.type <: Config.driver.Table? // Driver.this.type = Config.driver.type? // false -// false \ No newline at end of file +// false diff --git a/test/files/pos/t7596c/B_2.scala b/test/files/pos/t7596c/B_2.scala index 33da68c1ff11..790182d4a879 100644 --- a/test/files/pos/t7596c/B_2.scala +++ b/test/files/pos/t7596c/B_2.scala @@ -6,4 +6,4 @@ object Test { // This variation worked by avoiding referring to the // overloaded term `Config.driver` in the parent type of -// Sites \ No newline at end of file +// Sites diff --git a/test/files/pos/t7606.scala b/test/files/pos/t7606.scala new file mode 100644 index 000000000000..7c19f94f19b3 --- /dev/null +++ b/test/files/pos/t7606.scala @@ -0,0 +1,20 @@ +import scala.language.{ dynamics, implicitConversions } + +class ID(s: String) + +class DynamicFields extends scala.Dynamic { + def updateDynamic(name: ID)(value: Any): Unit = ??? +} + +object BugUpdateDynamic { + implicit def string2id(s: String): ID = new ID(s) + + def explicitly(): Unit = { + val o = new DynamicFields + o.updateDynamic("foo")("bar") + } + def bug(): Unit = { + val o = new DynamicFields + o.foo = "bar" + } +} diff --git a/test/files/pos/t7612.scala b/test/files/pos/t7612.scala new file mode 100644 index 000000000000..56b4c001a6ec --- /dev/null +++ b/test/files/pos/t7612.scala @@ -0,0 +1,18 @@ +object Test { + trait Fili[A] + trait Kili[M] { + def fili: Fili[M] + } + + trait A extends Kili[A] { + def fili: Fili[A] + } + + trait Ori[M] extends Kili[M] { + val ori: Fili[M] + def fili: ori.type + } + trait B extends Ori[B] + + def foo(a: A, b: B) = if (true) a else b +} diff --git a/test/files/pos/t7649.scala b/test/files/pos/t7649.scala index 4bcb6068c0fa..d31c150bc40f 100644 --- a/test/files/pos/t7649.scala +++ b/test/files/pos/t7649.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// object Test { val c: scala.reflect.macros.blackbox.Context = ??? import c.universe._ diff --git a/test/files/pos/t767.scala b/test/files/pos/t767.scala index 0c4067f02221..c3da64f1e02b 100644 --- a/test/files/pos/t767.scala +++ b/test/files/pos/t767.scala @@ -3,7 +3,7 @@ abstract class AbsCell { val init: T private var value: T = init def get: T = value - def set (x: T) { value = x } + def set (x: T): Unit = { value = x } class Node { val foo = 1 @@ -11,7 +11,7 @@ abstract class AbsCell { } object inner { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val cell = new AbsCell { val init = new Node() } cell.set(new cell.type#T()) // nullpointer exception } diff --git a/test/files/pos/t7683-stop-after-parser/sample_2.scala b/test/files/pos/t7683-stop-after-parser/sample_2.scala index f7a84923593f..bc08be642a68 100644 --- a/test/files/pos/t7683-stop-after-parser/sample_2.scala +++ b/test/files/pos/t7683-stop-after-parser/sample_2.scala @@ -1,5 +1,4 @@ -// scalac: -Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser - +//> using options -Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser package sample // just a sample that is compiled with the explosive plugin disabled diff --git a/test/files/pos/t7690.scala b/test/files/pos/t7690.scala index e8911a93e8de..4d88c334844a 100644 --- a/test/files/pos/t7690.scala +++ b/test/files/pos/t7690.scala @@ -14,4 +14,4 @@ object C { import A._ "asdf" == i.toString } -} \ No newline at end of file +} diff --git a/test/files/pos/t7707.scala b/test/files/pos/t7707.scala new file mode 100644 index 000000000000..fee40c2862af --- /dev/null +++ b/test/files/pos/t7707.scala @@ -0,0 +1,12 @@ +//> using options -Werror -Xlint + +// uses apply default, ctor default is unused +object O { O() ; def f(): Unit = O() } +case class O private (x: Int = 3) + +object Q { Q[Int]() ; def f(): Unit = Q[Int]() } +case class Q[A] private (x: Int = 3) + +// normal usage +object P { new P() ; def f(): Unit = new P() } +class P private (x: Int = 3) diff --git a/test/files/pos/t7745.scala b/test/files/pos/t7745.scala new file mode 100644 index 000000000000..d1b0ed9b2c8f --- /dev/null +++ b/test/files/pos/t7745.scala @@ -0,0 +1,38 @@ + +package bug + +import scala.language.implicitConversions + +class Base[T] + +class BaseOps[T] { + type OpT[U] = Op[T, U] // Fails below + //type OpT[U] = List[U] // OK + //trait OpT[U] extends Op[T, U] // OK + + def op(tgt: OpTarget[OpT]) = tgt +} + +object Base { + implicit def baseOps[T](b: Base[T]): BaseOps[T] = new BaseOps[T] +} + +class Op[A, B] + +class OpTarget[TC[_]] + +object OpTarget { + implicit def apply[TC[_]](a: Any): OpTarget[TC] = new OpTarget[TC] +} + +object TestBase { + val baseOps = new BaseOps[String] + baseOps.op(23) // OK in all cases + + val base = new Base[String] + base.op(23) // In the failing case: + // found : Int(23) + // required: shapeless.OpTarget[[U]shapeless.Op[String,U]] + // base.op(23) + // ^ +} diff --git a/test/files/pos/t7750.scala b/test/files/pos/t7750.scala index a819a6f06e14..b0e509072f3d 100644 --- a/test/files/pos/t7750.scala +++ b/test/files/pos/t7750.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -feature +//> using options -Xfatal-warnings -feature +// trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] trait Growable[T] trait Sizing diff --git a/test/files/pos/t7753.scala b/test/files/pos/t7753.scala index 93ad23f11448..80e2f1c66742 100644 --- a/test/files/pos/t7753.scala +++ b/test/files/pos/t7753.scala @@ -8,7 +8,7 @@ trait SI { } object Test { - def test { + def test: Unit = { def indirect(si: SI)(v: si.instance.Out) = v val foo: Foo { type Out = Int } = ??? diff --git a/test/files/pos/t7776.scala b/test/files/pos/t7776.scala index a36497a7a181..627d20dd63fe 100644 --- a/test/files/pos/t7776.scala +++ b/test/files/pos/t7776.scala @@ -17,4 +17,4 @@ object MacroErasure { import c.universe._ c.Expr(q"$f[${tt.tpe}]($x)") } -} \ No newline at end of file +} diff --git a/test/files/pos/t7782.scala b/test/files/pos/t7782.scala index 037bdad673a6..9b98f6ac6829 100644 --- a/test/files/pos/t7782.scala +++ b/test/files/pos/t7782.scala @@ -15,7 +15,7 @@ object O { } abstract class C[E] { - def foo[BB](f: BB) + def foo[BB](f: BB): Unit def test[B](f: B): Any = foo(f) // error: no type parameters for method foo: ( f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1) // --- because --- diff --git a/test/files/pos/t7782b.scala b/test/files/pos/t7782b.scala index 09da4a5c5be6..5b1979ec1fc0 100644 --- a/test/files/pos/t7782b.scala +++ b/test/files/pos/t7782b.scala @@ -15,7 +15,7 @@ object O { } abstract class C[E] { - def foo[BB](f: BB) + def foo[BB](f: BB): Unit def test[B](f: B): Any = foo(f) // error: no type parameters for method foo: ( f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1) // --- because --- diff --git a/test/files/pos/t7788.scala b/test/files/pos/t7788.scala index 81eada962b68..c97cb34cee74 100644 --- a/test/files/pos/t7788.scala +++ b/test/files/pos/t7788.scala @@ -5,4 +5,4 @@ class Test { def conforms(x: Int, y: Int) = x < y def foo[A](implicit ev: Int => A) = ??? foo[Int] -} \ No newline at end of file +} diff --git a/test/files/pos/t7864.scala b/test/files/pos/t7864.scala index 42b04187d063..10f1b926e1b1 100644 --- a/test/files/pos/t7864.scala +++ b/test/files/pos/t7864.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint +//> using options -Xlint +// object Test { val f = 0; ({ toString; (x: Any) => x})("$f ") diff --git a/test/files/pos/t7891.scala b/test/files/pos/t7891.scala new file mode 100644 index 000000000000..7d7e29e93a98 --- /dev/null +++ b/test/files/pos/t7891.scala @@ -0,0 +1,3 @@ +class B { self => + def this(x: Any) { this() ; new Object { self } } +} diff --git a/test/files/pos/t7983.scala b/test/files/pos/t7983.scala index a583e538c539..bae9f3333fa4 100644 --- a/test/files/pos/t7983.scala +++ b/test/files/pos/t7983.scala @@ -20,7 +20,7 @@ class DivergenceTest { u2: Shape2[_ <: Level, M2, U2] ): Shape2[Level, (M1,M2), (U1,U2)] = ??? - def foo { + def foo: Unit = { class Coffees extends ColumnBase[Int] def map1[F, T](f: F)(implicit shape: Shape2[_ <: Flat, F, T]) = ??? diff --git a/test/files/pos/t7987/Macro_1.scala b/test/files/pos/t7987/Macro_1.scala index 81f717b9c4ca..acf5f601c773 100644 --- a/test/files/pos/t7987/Macro_1.scala +++ b/test/files/pos/t7987/Macro_1.scala @@ -1,4 +1,4 @@ -import scala.language.experimental._ +import scala.language.experimental.macros object Macro { def apply[A](a: A): A = macro impl[A] diff --git a/test/files/pos/t8001/Macros_1.scala b/test/files/pos/t8001/Macros_1.scala index aee595979679..077082a9c243 100644 --- a/test/files/pos/t8001/Macros_1.scala +++ b/test/files/pos/t8001/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/pos/t8001/Test_2.scala b/test/files/pos/t8001/Test_2.scala index e4a131b4ea23..9db1f4636be6 100644 --- a/test/files/pos/t8001/Test_2.scala +++ b/test/files/pos/t8001/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test extends App { Macros.foo (): Unit diff --git a/test/files/pos/t8011.scala b/test/files/pos/t8011.scala index 76bd7acd1434..451590d77e71 100644 --- a/test/files/pos/t8011.scala +++ b/test/files/pos/t8011.scala @@ -5,4 +5,4 @@ class ThingOps1(val x: String) extends AnyVal { } } -trait X[B] { def foo(a: B): Any } \ No newline at end of file +trait X[B] { def foo(a: B): Any } diff --git a/test/files/pos/t8013/inpervolated_2.scala b/test/files/pos/t8013/inpervolated_2.scala index 757d9af839dc..90e571b42c8c 100644 --- a/test/files/pos/t8013/inpervolated_2.scala +++ b/test/files/pos/t8013/inpervolated_2.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:missing-interpolator /* * scalac: -Xfatal-warnings -Xlint */ diff --git a/test/files/pos/t8013/inpervolator_1.scala b/test/files/pos/t8013/inpervolator_1.scala index 04959a18e2ae..612e1d727df8 100644 --- a/test/files/pos/t8013/inpervolator_1.scala +++ b/test/files/pos/t8013/inpervolator_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings -Xlint:missing-interpolator package t8013 diff --git a/test/files/pos/t8040.scala b/test/files/pos/t8040.scala index 65cb992980e8..f55ce3c166ea 100644 --- a/test/files/pos/t8040.scala +++ b/test/files/pos/t8040.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings -Ywarn-unused:params +//> using options -Xfatal-warnings -Ywarn-unused:params +// object Test { implicit class C(val sc: StringContext) { // no warn unused sc diff --git a/test/files/pos/t8054.scala b/test/files/pos/t8054.scala index a7bb44b1eda3..66a55e8b5380 100644 --- a/test/files/pos/t8054.scala +++ b/test/files/pos/t8054.scala @@ -18,7 +18,7 @@ object D2 extends D { object Hello { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { // 2.10.3 - ok // 2.11.0-M7 - type mismatch; found : Seq[DB1.MANIFEST.Entry] // required: Seq[DB1.MANIFEST.Entry] diff --git a/test/files/pos/t8064/Client_2.scala b/test/files/pos/t8064/Client_2.scala index 64ce75cbdee6..4b4f6f199219 100644 --- a/test/files/pos/t8064/Client_2.scala +++ b/test/files/pos/t8064/Client_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// object Test { Macro { def s = "" diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala index c8de31aad5c9..3faf1bda0a46 100644 --- a/test/files/pos/t8064/Macro_1.scala +++ b/test/files/pos/t8064/Macro_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// import language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/pos/t8064b/Client_2.scala b/test/files/pos/t8064b/Client_2.scala index 6eac7dc4184b..052c14860eb6 100644 --- a/test/files/pos/t8064b/Client_2.scala +++ b/test/files/pos/t8064b/Client_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// object Test { Macro { "".reverse diff --git a/test/files/pos/t8064b/Macro_1.scala b/test/files/pos/t8064b/Macro_1.scala index 941f7ffd2465..e803a072392c 100644 --- a/test/files/pos/t8064b/Macro_1.scala +++ b/test/files/pos/t8064b/Macro_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// import language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/pos/t8111.scala b/test/files/pos/t8111.scala index 09463ce697a6..e222ea8ab4a1 100644 --- a/test/files/pos/t8111.scala +++ b/test/files/pos/t8111.scala @@ -1,6 +1,6 @@ trait T { - def crashy(ma: Any) { + def crashy(ma: Any): Unit = { // okay val f1 = (u: Unit) => ma foo(f1)() diff --git a/test/files/pos/t8121/A_1.scala b/test/files/pos/t8121/A_1.scala new file mode 100644 index 000000000000..6d3fafca8d8b --- /dev/null +++ b/test/files/pos/t8121/A_1.scala @@ -0,0 +1,2 @@ +package a +object Foo diff --git a/test/files/pos/t8121/B_2.scala b/test/files/pos/t8121/B_2.scala new file mode 100644 index 000000000000..660dfd6791e3 --- /dev/null +++ b/test/files/pos/t8121/B_2.scala @@ -0,0 +1,4 @@ +//> using options -Werror +package b +import a.Foo +class Foo diff --git a/test/files/pos/t8157-2.10.scala b/test/files/pos/t8157-2.10.scala deleted file mode 100644 index c8ce34be79f0..000000000000 --- a/test/files/pos/t8157-2.10.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Xsource:2.10 -object Test { // PolyTYped function default arg unicity check, - // fails in 2.11, authorized under -Xsource:2.10 - def foo(printer: Any, question: => String, show: Boolean = false)(op: => Any): Any = ??? - def foo[T](question: => String, show: Boolean)(op: => Any = ()): Any = ??? -} diff --git a/test/files/pos/t8170.scala b/test/files/pos/t8170.scala index b65f4b85720f..1991da72f425 100644 --- a/test/files/pos/t8170.scala +++ b/test/files/pos/t8170.scala @@ -24,4 +24,4 @@ result = {AbstractNoArgsTypeRef@3237}"F#24451" tp = {PolyType@3235}"[F#14824 <: O#7703.X#7793]F#14824" tparams = (0) = {AbstractTypeSymbol@3247}"type F#24451" -*/ \ No newline at end of file +*/ diff --git a/test/files/pos/t8176.scala b/test/files/pos/t8176.scala new file mode 100644 index 000000000000..8a53a3d79fe5 --- /dev/null +++ b/test/files/pos/t8176.scala @@ -0,0 +1,5 @@ +object Test { + implicit lazy val ambiguousPostfixOps1: language.postfixOps.type = language.postfixOps + implicit lazy val ambiguousPostfixOps2: language.postfixOps.type = language.postfixOps + List(1, 2, 3) tail +} \ No newline at end of file diff --git a/test/files/pos/t8177b.scala b/test/files/pos/t8177b.scala index b7ed9342a36f..82f41b61530b 100644 --- a/test/files/pos/t8177b.scala +++ b/test/files/pos/t8177b.scala @@ -10,4 +10,4 @@ object ThingHolder { type Alias[AIn] = Thing { type A = AIn } } // but the symbol in the outer type ref wasn't co-evolved (so it still referred to the { type A = AIn } underlying the old prefix) // coEvolveSym used to only look at prefixes that were directly RefinedTypes, but they could also be SingleTypes with an underlying RefinedType class View[AIn](val in: ThingHolder.Alias[AIn]) { def f(p: in.A): in.A = p } -class SubView extends View[Int](IntThing) { override def f(p: in.A): in.A = p } \ No newline at end of file +class SubView extends View[Int](IntThing) { override def f(p: in.A): in.A = p } diff --git a/test/files/pos/t8177g.scala b/test/files/pos/t8177g.scala index bb66d3202190..237257b69dbf 100644 --- a/test/files/pos/t8177g.scala +++ b/test/files/pos/t8177g.scala @@ -8,4 +8,4 @@ class AA[T] { class B extends AA[Int] { override def foo: B.this.x.A = super.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t8187.scala b/test/files/pos/t8187.scala index 99b10c626034..84b8cd0f4c7b 100644 --- a/test/files/pos/t8187.scala +++ b/test/files/pos/t8187.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { val tyn: TypeName = (??? : TypeSymbol).name val ten: TermName = (??? : TermSymbol).name -} \ No newline at end of file +} diff --git a/test/files/pos/t8209a/Macros_1.scala b/test/files/pos/t8209a/Macros_1.scala index 17014b47440f..5d7852cb7add 100644 --- a/test/files/pos/t8209a/Macros_1.scala +++ b/test/files/pos/t8209a/Macros_1.scala @@ -14,4 +14,4 @@ object Macros { } def foo: A = macro impl -} \ No newline at end of file +} diff --git a/test/files/pos/t8209a/Test_2.scala b/test/files/pos/t8209a/Test_2.scala index e19d572f55ef..bedef776ff87 100644 --- a/test/files/pos/t8209a/Test_2.scala +++ b/test/files/pos/t8209a/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { val a: A = Macros.foo val b: B = Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t8209b/Macros_1.scala b/test/files/pos/t8209b/Macros_1.scala index 705f7d630c91..fa521d38db00 100644 --- a/test/files/pos/t8209b/Macros_1.scala +++ b/test/files/pos/t8209b/Macros_1.scala @@ -14,4 +14,4 @@ object Macros { } def foo: A = macro impl -} \ No newline at end of file +} diff --git a/test/files/pos/t8209b/Test_2.scala b/test/files/pos/t8209b/Test_2.scala index e19d572f55ef..bedef776ff87 100644 --- a/test/files/pos/t8209b/Test_2.scala +++ b/test/files/pos/t8209b/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { val a: A = Macros.foo val b: B = Macros.foo -} \ No newline at end of file +} diff --git a/test/files/pos/t8223.scala b/test/files/pos/t8223.scala index 52d6b0098eeb..7f80d8de9d05 100644 --- a/test/files/pos/t8223.scala +++ b/test/files/pos/t8223.scala @@ -17,9 +17,9 @@ package object p { type EnvAlias[W <: HasA] = ViewEnv[W#A] type SubAlias[W <: HasA] = ViewEnv[W#A]#SubView - def f0[R](xs: R)(implicit tc: Indexable[R]): ViewEnv[tc.A]#SubView = new ViewEnv[tc.A]() get - def f1[R](xs: R)(implicit tc: Indexable[R]): EnvAlias[tc.type]#SubView = new ViewEnv[tc.A]() get - def f2[R](xs: R)(implicit tc: Indexable[R]): SubAlias[tc.type] = new ViewEnv[tc.A]() get + def f0[R](xs: R)(implicit tc: Indexable[R]): ViewEnv[tc.A]#SubView = new ViewEnv[tc.A]().get + def f1[R](xs: R)(implicit tc: Indexable[R]): EnvAlias[tc.type]#SubView = new ViewEnv[tc.A]().get + def f2[R](xs: R)(implicit tc: Indexable[R]): SubAlias[tc.type] = new ViewEnv[tc.A]().get def g0 = f0(Array(1)) has 2 // ok def g1 = f1(Array(1)) has 2 // ok diff --git a/test/files/pos/t8252.scala b/test/files/pos/t8252.scala new file mode 100644 index 000000000000..e0f33c63fde7 --- /dev/null +++ b/test/files/pos/t8252.scala @@ -0,0 +1,129 @@ +// https://github.com/scala/bug/issues/8252 +object t1 { + import scala.language.higherKinds + type Id[A] = A + def foo[F[_], G[_], A](a: F[G[A]]): F[G[A]] = { println("what?! " + a); a } + def oops(): Unit = { + foo[Id, Id, Int](1) + } + oops() + def expected(): Unit = { + val kaboom = foo[Id, Id, Int](1) + } +} + +// https://github.com/scala/bug/issues/8252#issuecomment-534822175 +object t2 { + trait Foo { type X } + trait HL extends Foo { override type X } + trait HC[H <: Foo, T <: HL] extends HL { override type X = H#X with T#X } + trait HN extends HL { override type X = Any } + class A extends Foo { trait X } ; class B extends Foo { trait X } + class Test { + def test: Unit = { + val bad = new HC[A, HC[B, HN]] {} + val xx: bad.X = ??? + } + } +} + +// https://github.com/scala/bug/issues/8252#issuecomment-347417206 +object t3 { + import scala.language.reflectiveCalls + + trait Schema + + class Thing[S] { + def apply[T](col: S => T): T = ??? + } + + type :+:[H, T <: Schema] = H with T + type End = Schema + + class Test { + + def test = { + new Thing[{val foo: String} :+: End].apply(_.foo) + + new Thing[{val foo: String} :+: {val bar: Int} :+: End].apply(x => x.foo) + } + } + + // https://github.com/scala/bug/issues/8252#issuecomment-347456209 + trait Example[T] { + type Out + + def apply[A](fn: Out => A): A = ??? + } + + object Example { + def apply[A](implicit inst: Example[A]): Aux[A, inst.Out] = inst + + type Aux[T, Out0] = Example[T] {type Out = Out0} + + implicit def forall[T]: Aux[T, T] = new Example[T] { + type Out = T + } + } + + Example[ {val foo: Int} :+: {val bar: String} :+: {val baz: Boolean} :+: {val buzz: Double} :+: {val booze: Float} :+: End].apply(_.foo) + + // https://github.com/scala/bug/issues/8252#issuecomment-347562144 + new Example[Unit] { + type Out = {val foo: Int} :+: {val bar: String} :+: {val baz: Boolean} :+: {val buzz: Double} :+: {val booze: Float} :+: End + }.apply(_.foo) + + // https://github.com/scala/bug/issues/8252#issuecomment-347562502 + new Example[Unit] { + type Out = {val foo: Int} :+: End + }.apply(_.foo) +} + +// https://github.com/scala/bug/issues/8252#issuecomment-347565398 +object t4 { + import scala.language.reflectiveCalls + + object Example1 { + + trait Schema + + type :+:[H, T <: Schema] = H with T + type End = Schema + + class Thing[S] { + type Out = S + + def apply[T](fn: Out => T): T = ??? + } + + new Thing[ {val foo: String} :+: {val bar: Int} :+: {val baz: Boolean} :+: {val booze: Double} :+: End + ].apply(x => x.foo) + + val foo = new Thing[ {val foo: String} :+: {val bar: Int} :+: {val baz: Boolean} :+: {val booze: Double} :+: End + ] + + foo.apply(x => x.foo) + } + + object Example2 { + + trait Schema + + type :+:[H, T <: Schema] = H with T + type End = Schema + + class Thing[S] + + implicit class ThingOps[S](self: Thing[S]) { + type Out = S + + def apply[T](fn: Out => T): T = ??? + } + + val foo = new Thing[ {val foo: String} :+: {val bar: Int} :+: {val baz: Boolean} :+: {val booze: Double} :+: End + ] + + foo.apply(x => x.foo) + + } +} \ No newline at end of file diff --git a/test/files/pos/t8299.scala b/test/files/pos/t8299.scala new file mode 100644 index 000000000000..fd83b58f7f29 --- /dev/null +++ b/test/files/pos/t8299.scala @@ -0,0 +1,18 @@ +object Test extends App { + class C + + implicit class CompatibleC(c: C) { + def foo(x: Int) = ??? + } + + val c: C = ??? + println(c.foo _) + + object B { + def f(x: Int) = x + def f(x: String) = x + } + + val fun: Int => Int = B.f _ + val funs: String => String = B.f _ +} diff --git a/test/files/pos/t8300-conversions-a.scala b/test/files/pos/t8300-conversions-a.scala index 248a8b73b28e..1a24da7502af 100644 --- a/test/files/pos/t8300-conversions-a.scala +++ b/test/files/pos/t8300-conversions-a.scala @@ -20,4 +20,4 @@ object Test extends App { val sym: Symbol = ??? sym.asFreeType -} \ No newline at end of file +} diff --git a/test/files/pos/t8300-conversions-b.scala b/test/files/pos/t8300-conversions-b.scala index 0524ee36834f..a571dbea9040 100644 --- a/test/files/pos/t8300-conversions-b.scala +++ b/test/files/pos/t8300-conversions-b.scala @@ -20,4 +20,4 @@ object Test extends App { val sym: Symbol = ??? sym.asFreeType -} \ No newline at end of file +} diff --git a/test/files/pos/t8300-overloading.scala b/test/files/pos/t8300-overloading.scala index ae9699ab8613..2eeba0a66ca7 100644 --- a/test/files/pos/t8300-overloading.scala +++ b/test/files/pos/t8300-overloading.scala @@ -13,4 +13,4 @@ object Test extends App { def foo(name: Name) = ??? def foo(name: TermName) = ??? -} \ No newline at end of file +} diff --git a/test/files/pos/t8300-patmat-a.scala b/test/files/pos/t8300-patmat-a.scala index 4421c0a15eb4..ab3a3c960526 100644 --- a/test/files/pos/t8300-patmat-a.scala +++ b/test/files/pos/t8300-patmat-a.scala @@ -17,4 +17,4 @@ object Test extends App { case ScalaName => ??? } } -} \ No newline at end of file +} diff --git a/test/files/pos/t8300-patmat-b.scala b/test/files/pos/t8300-patmat-b.scala index c01aeb912d44..0acad4406956 100644 --- a/test/files/pos/t8300-patmat-b.scala +++ b/test/files/pos/t8300-patmat-b.scala @@ -17,4 +17,4 @@ object Test extends App { case ScalaName => ??? } } -} \ No newline at end of file +} diff --git a/test/files/pos/t8352/Macros_1.scala b/test/files/pos/t8352/Macros_1.scala index f5c8ce578f12..c1c63e57bea3 100644 --- a/test/files/pos/t8352/Macros_1.scala +++ b/test/files/pos/t8352/Macros_1.scala @@ -4,4 +4,4 @@ import scala.language.experimental.macros object Macros { def impl(c: Context)(x: c.Expr[Boolean]): c.Expr[Boolean] = x def foo(x: Boolean): Boolean = macro impl -} \ No newline at end of file +} diff --git a/test/files/pos/t8352/Test_2.scala b/test/files/pos/t8352/Test_2.scala index b5bfe92ffb2b..1f84c3ca1e91 100644 --- a/test/files/pos/t8352/Test_2.scala +++ b/test/files/pos/t8352/Test_2.scala @@ -1,5 +1,5 @@ object Test extends App { - def expectUnit() { + def expectUnit(): Unit = { Macros.foo(true) } -} \ No newline at end of file +} diff --git a/test/files/pos/t8363.scala b/test/files/pos/t8363.scala index 0b42a9c4d206..b076d5a6a461 100644 --- a/test/files/pos/t8363.scala +++ b/test/files/pos/t8363.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:method +//> using options -Ydelambdafy:method +// class C(a: Any) class Test { def foo: Any = { diff --git a/test/files/pos/t8365.scala b/test/files/pos/t8365.scala new file mode 100644 index 000000000000..5f7547790e5f --- /dev/null +++ b/test/files/pos/t8365.scala @@ -0,0 +1,10 @@ + +class A { + def f = + this match { + case this => + } + val this = this +} + + diff --git a/test/files/pos/t8367.scala b/test/files/pos/t8367.scala index 0367b97e44ef..6c62bae474ee 100644 --- a/test/files/pos/t8367.scala +++ b/test/files/pos/t8367.scala @@ -8,4 +8,4 @@ package java.lang class Throwable // class CloneNotSupportedException -// class InterruptedException \ No newline at end of file +// class InterruptedException diff --git a/test/files/pos/t8369a.scala b/test/files/pos/t8369a.scala index 0596fdaf7402..13046007fbad 100644 --- a/test/files/pos/t8369a.scala +++ b/test/files/pos/t8369a.scala @@ -2,4 +2,4 @@ object Bug { trait Sys[S] def test[S <: Sys[S]] = read[S]() def read[S <: Sys[S]](baz: Any = 0): Some[S] = ??? -} \ No newline at end of file +} diff --git a/test/files/pos/t8369b.scala b/test/files/pos/t8369b.scala index 8145911db194..3194463fb794 100644 --- a/test/files/pos/t8369b.scala +++ b/test/files/pos/t8369b.scala @@ -15,4 +15,4 @@ object Bug { def read[S <: Sys[S]](baz: Baz[S#Tx] = NoBaz): Bar[S] = ??? } trait Bar[S <: Sys[S]] -} \ No newline at end of file +} diff --git a/test/files/pos/t8410.scala b/test/files/pos/t8410.scala index ada06474f64b..8b63b936617d 100644 --- a/test/files/pos/t8410.scala +++ b/test/files/pos/t8410.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** -Xfatal-warnings -deprecation:false -opt-warnings:none +//> using options -opt:inline:** -Wopt:none -Werror -deprecation:false object Test extends App { @deprecated("","") def f = 42 diff --git a/test/files/pos/t8460.scala b/test/files/pos/t8460.scala index 10d2ed432ce6..cd1e6c392cf4 100644 --- a/test/files/pos/t8460.scala +++ b/test/files/pos/t8460.scala @@ -9,7 +9,7 @@ trait UFunc { object UFunc { class UImpl[A, B, C] - implicit def implicitDoubleUTag[Tag, V, VR](implicit conv: V=>Double, impl: UImpl[Tag, Double, VR]):UImpl[Tag, V, VR] = ??? + implicit def implicitDoubleUTag[Tag, V, VR](implicit conv: V => Double, impl: UImpl[Tag, Double, VR]):UImpl[Tag, V, VR] = ??? } diff --git a/test/files/pos/t8493.scala b/test/files/pos/t8493.scala new file mode 100644 index 000000000000..a5e63a1bede4 --- /dev/null +++ b/test/files/pos/t8493.scala @@ -0,0 +1,25 @@ +object Test { + trait Foo { + def foo: this.type + } + + case class Impl() extends Foo { + def foo = ??? + def bar: Unit = () + } + + object Foo { + def foo(f: Foo): f.type = f.foo + } + + def work(f: Impl): Unit = + Foo.foo(f).bar + + def bug(f: Int => Impl): Unit = + Foo.foo(f(1)).bar + + def workaround(f: Int => Impl): Unit = { + val tmp = f(1) + Foo.foo(tmp).bar + } +} diff --git a/test/files/pos/t851.scala b/test/files/pos/t851.scala index afe4b79d3c06..70f18673c72a 100644 --- a/test/files/pos/t851.scala +++ b/test/files/pos/t851.scala @@ -22,7 +22,7 @@ object test { """bad """ } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Console.println(ok1 + ok2 + ok3 + ok4 + bad) } } diff --git a/test/files/pos/t8522.scala b/test/files/pos/t8522.scala new file mode 100644 index 000000000000..f6fb5ef7845d --- /dev/null +++ b/test/files/pos/t8522.scala @@ -0,0 +1,21 @@ +trait IterateeT[F[_]] +trait StepT[F[_]] + +class Test { + def iterateeT[F[_]](s: F[StepT[F]]): IterateeT[F] = ??? + + def fail[M[+_]]: IterateeT[M] = { + val back: M[StepT[M]] = ??? + iterateeT(back) // fails after SI-2066 fix + } + + def okay1[M[_]]: IterateeT[M] = { + val back: M[StepT[M]] = ??? + iterateeT(back) // okay if M is invariant + } + + def okay2[M[_]]: IterateeT[M] = { + val back: M[StepT[M]] = ??? + iterateeT[M](back) // okay if type args are explicit + } +} diff --git a/test/files/pos/t8523.scala b/test/files/pos/t8523.scala index 96b835aea9c4..b70a4cdc2ee6 100644 --- a/test/files/pos/t8523.scala +++ b/test/files/pos/t8523.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-dead-code -Xfatal-warnings +//> using options -Ywarn-dead-code -Xfatal-warnings +// import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/pos/t8546.scala b/test/files/pos/t8546.scala index 8fc4506e2180..da374f427f05 100644 --- a/test/files/pos/t8546.scala +++ b/test/files/pos/t8546.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// package test class F1() { diff --git a/test/files/pos/t8578.scala b/test/files/pos/t8578.scala index 0e070c1d2a20..be5be85156bb 100644 --- a/test/files/pos/t8578.scala +++ b/test/files/pos/t8578.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:method +//> using options -Ydelambdafy:method +// class DuplicateClassName { () => { {() => ()} diff --git a/test/files/pos/t8583.scala b/test/files/pos/t8583.scala new file mode 100644 index 000000000000..258981ceefce --- /dev/null +++ b/test/files/pos/t8583.scala @@ -0,0 +1,13 @@ +import language.implicitConversions + +class t8583 { + + case class A( value: Double ) { + def *( o: A ) = A( value * o.value ) + } + + implicit def doubleToA( d: Double ) = A( d ) + implicit def listToA( in: List[A] ): A = in.head + + val result: A = List( A( 1 ) ) map { 2.0 * _ } //this line causes the compiler to crash +} diff --git a/test/files/pos/t8596.scala b/test/files/pos/t8596.scala index 1bed37636484..72a63c3b3b04 100644 --- a/test/files/pos/t8596.scala +++ b/test/files/pos/t8596.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// class TypeTreeObjects { class Container { def typeParamAndDefaultArg[C](name: String = ""): String = "" diff --git a/test/files/pos/t8602.scala b/test/files/pos/t8602.scala new file mode 100644 index 000000000000..b8502b646d88 --- /dev/null +++ b/test/files/pos/t8602.scala @@ -0,0 +1,10 @@ +//> using options -language:higherKinds +// +object Test { + case class Foo[CC[_], D <: CC[Int]](d: D, cc: CC[Int]) + Foo(Nil, List(1, 2, 3)) + + class H[F[_]] + def g[F[_], T, FT <: F[T]](h: H[F]) = 1 + g(new H[Set]) +} diff --git a/test/files/pos/t8617.scala b/test/files/pos/t8617.scala index 1f536086cb15..42ba325f5f05 100644 --- a/test/files/pos/t8617.scala +++ b/test/files/pos/t8617.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos +// +// object Test { def foo[A] = implicitly[OptManifest[A]] // was "unpositioned tree" under -Yrangepos diff --git a/test/files/pos/t8664.scala b/test/files/pos/t8664.scala new file mode 100644 index 000000000000..991646445e00 --- /dev/null +++ b/test/files/pos/t8664.scala @@ -0,0 +1,4 @@ +object Test extends App { + Ordering[Symbol] + Seq('b, 'c, 'a).sorted +} diff --git a/test/files/pos/t8719/Macros_1.scala b/test/files/pos/t8719/Macros_1.scala index 152c92f25483..7dcc38e631df 100644 --- a/test/files/pos/t8719/Macros_1.scala +++ b/test/files/pos/t8719/Macros_1.scala @@ -18,4 +18,4 @@ object Macros { } def typecheck(code: String): Option[String] = macro typecheck_impl -} \ No newline at end of file +} diff --git a/test/files/pos/t8719/Test_2.scala b/test/files/pos/t8719/Test_2.scala index 997eb2f23652..9a051f3954df 100644 --- a/test/files/pos/t8719/Test_2.scala +++ b/test/files/pos/t8719/Test_2.scala @@ -7,4 +7,4 @@ object Bar { object Test extends App { println(Macros.typecheck("val Foo(x, y, z) = Foo(1, 'a')")) println(Macros.typecheck("val Bar(x, y, z) = Foo(1, 'a')")) -} \ No newline at end of file +} diff --git a/test/files/pos/t8736-b.scala b/test/files/pos/t8736-b.scala index 903292d23298..1c96b7fd30fa 100644 --- a/test/files/pos/t8736-b.scala +++ b/test/files/pos/t8736-b.scala @@ -1,4 +1,4 @@ -// scalac: -feature -language:_ -Xfatal-warnings +//> using options -feature -language:_ -Xfatal-warnings // showing that all are set class X { def hk[M[_]] = ??? diff --git a/test/files/pos/t8736.scala b/test/files/pos/t8736.scala index 46c0cdfd000e..5768f5f95333 100644 --- a/test/files/pos/t8736.scala +++ b/test/files/pos/t8736.scala @@ -1,4 +1,4 @@ -// scalac: -feature -language:implicitConversions -language:higherKinds -language:-implicitConversions -Xfatal-warnings +//> using options -feature -language:implicitConversions -language:higherKinds -language:-implicitConversions -Xfatal-warnings // showing that multiple settings are respected, and explicit enablement has precedence class X { def hk[M[_]] = ??? diff --git a/test/files/pos/t8781/Test_2.scala b/test/files/pos/t8781/Test_2.scala index bd86664ac4e8..c67c7daab081 100644 --- a/test/files/pos/t8781/Test_2.scala +++ b/test/files/pos/t8781/Test_2.scala @@ -1,4 +1,5 @@ -// scalac: -Ymacro-expand:discard -Ystop-after:typer +//> using options -Ymacro-expand:discard -Ystop-after:typer +// object Test { implicit class RichT(t: T) { def augmented = "" } diff --git a/test/files/pos/t8828.scala b/test/files/pos/t8828.scala index ea242656bcd8..5015a785157b 100644 --- a/test/files/pos/t8828.scala +++ b/test/files/pos/t8828.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:inaccessible -Xfatal-warnings +//> using options -Xlint:inaccessible -Xfatal-warnings +// package outer diff --git a/test/files/pos/t8841.scala b/test/files/pos/t8841.scala new file mode 100644 index 000000000000..d822bca39640 --- /dev/null +++ b/test/files/pos/t8841.scala @@ -0,0 +1,9 @@ +class Cell(val ambiguousName: Option[String]) + +class Test { + def wrap(f: Any): Nothing = ??? + wrap { + val c = new Cell(ambiguousName = Some("bla")) + val ambiguousName = c.ambiguousName + } +} diff --git a/test/files/pos/t8852/Interface.java b/test/files/pos/t8852/Interface.java new file mode 100644 index 000000000000..7b35f3b12f1e --- /dev/null +++ b/test/files/pos/t8852/Interface.java @@ -0,0 +1,5 @@ +public interface Interface { + public static int staticMethod() { + return 42; + } +} diff --git a/test/files/pos/t8852/Test.scala b/test/files/pos/t8852/Test.scala new file mode 100644 index 000000000000..acd36ec2a5a0 --- /dev/null +++ b/test/files/pos/t8852/Test.scala @@ -0,0 +1,5 @@ +object Test { + val x: Int = Interface.staticMethod() +} + +class C extends Interface // expect no errors about unimplemented members. diff --git a/test/files/pos/t8855b.scala b/test/files/pos/t8855b.scala new file mode 100644 index 000000000000..c53bb415a8c2 --- /dev/null +++ b/test/files/pos/t8855b.scala @@ -0,0 +1,17 @@ + +trait T { + final val x = 42 + def f = { + import x._ + val g: Int => Int = + + g(1) + } +} +/* + * +test/files/neg/t8855.scala:5: error: stable identifier required, but 42 found. + import x._ + ^ +Exception in thread "main" java.lang.NullPointerException + at scala.tools.nsc.typechecker.Contexts.registerImport(Contexts.scala:93) + */ diff --git a/test/files/pos/t8861.scala b/test/files/pos/t8861.scala index 5f1c6161a4ae..a560bba1be96 100644 --- a/test/files/pos/t8861.scala +++ b/test/files/pos/t8861.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint:infer-any -Xfatal-warnings +//> using options -Xlint:infer-any -Xfatal-warnings +// trait Test { type R = PartialFunction[Any, Unit] diff --git a/test/files/pos/t8882.scala b/test/files/pos/t8882.scala new file mode 100644 index 000000000000..cdb70bd32ea0 --- /dev/null +++ b/test/files/pos/t8882.scala @@ -0,0 +1,10 @@ +import language.higherKinds + +object Test { + class A[T[_]] + object B extends A[List] + object C extends A[Option] + + val values1 = Seq[Any](B, C) + val values2 = Seq(B, C) +} diff --git a/test/files/pos/t8893.scala b/test/files/pos/t8893.scala index b87c8bdd3cff..c4d53e758370 100644 --- a/test/files/pos/t8893.scala +++ b/test/files/pos/t8893.scala @@ -2,7 +2,7 @@ object Test { def a(): Option[String] = Some("a") - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { a() match { case Some(b1) => a() match { diff --git a/test/files/pos/t8894.scala b/test/files/pos/t8894.scala index 3b26f1ae7e15..b4b6b30b5bb7 100644 --- a/test/files/pos/t8894.scala +++ b/test/files/pos/t8894.scala @@ -9,4 +9,4 @@ object CC extends { class Test { val cc = new CC(23, "foo") val CC(i, s) = cc -} \ No newline at end of file +} diff --git a/test/files/pos/t8934a/Test_2.scala b/test/files/pos/t8934a/Test_2.scala index ecc922db0859..6b25ad28c0ba 100644 --- a/test/files/pos/t8934a/Test_2.scala +++ b/test/files/pos/t8934a/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Ystop-after:typer -Ymacro-expand:discard -nowarn +//> using options -Ystop-after:typer -Ymacro-expand:discard -nowarn object Test { "" match { case Unapply(a, b) => diff --git a/test/files/pos/t8947/Client_2.scala b/test/files/pos/t8947/Client_2.scala index 1a5082a2f910..29e819f194d2 100644 --- a/test/files/pos/t8947/Client_2.scala +++ b/test/files/pos/t8947/Client_2.scala @@ -1 +1 @@ -object Test { X.extractor } \ No newline at end of file +object Test { X.extractor } diff --git a/test/files/pos/t8954/t1.scala b/test/files/pos/t8954/t1.scala index 1f015a7d48d0..ccd5cb71b280 100644 --- a/test/files/pos/t8954/t1.scala +++ b/test/files/pos/t8954/t1.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Xfatal-warnings -deprecation package scala.foo // 1. a class about to be made final diff --git a/test/files/pos/t8954/t2.scala b/test/files/pos/t8954/t2.scala index 899ad407015d..5002a62a096b 100644 --- a/test/files/pos/t8954/t2.scala +++ b/test/files/pos/t8954/t2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -deprecation +//> using options -Xfatal-warnings -deprecation package scala.foo // 1.2 deprecated children should be fine... diff --git a/test/files/pos/t8965.scala b/test/files/pos/t8965.scala index 84f7bc479053..729777049d10 100644 --- a/test/files/pos/t8965.scala +++ b/test/files/pos/t8965.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// class A { def f(x: Any with AnyRef, y: Any with AnyRef) = x eq y // a.scala:2: warning: Any and Any are unrelated: they will most likely never compare equal diff --git a/test/files/pos/t8999.scala b/test/files/pos/t8999.scala index ad2f99b906ba..23b3870b812d 100644 --- a/test/files/pos/t8999.scala +++ b/test/files/pos/t8999.scala @@ -1,4 +1,5 @@ -// scalac: -nowarn +//> using options -nowarn +// object Types { abstract sealed class Type diff --git a/test/files/neg/t900.scala b/test/files/pos/t900.scala similarity index 100% rename from test/files/neg/t900.scala rename to test/files/pos/t900.scala diff --git a/test/files/pos/t9014.scala b/test/files/pos/t9014.scala new file mode 100644 index 000000000000..8af97634c488 --- /dev/null +++ b/test/files/pos/t9014.scala @@ -0,0 +1,6 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + def inner(default: T = t): T = t + inner() + } +} diff --git a/test/files/pos/t9020.scala b/test/files/pos/t9020.scala index b43e7516ae6c..a882772581af 100644 --- a/test/files/pos/t9020.scala +++ b/test/files/pos/t9020.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-value-discard -Xfatal-warnings +//> using options -Ywarn-value-discard -Xfatal-warnings +// trait ValueDiscard[@specialized U] { def u: U } diff --git a/test/files/pos/t9034.scala b/test/files/pos/t9034.scala new file mode 100644 index 000000000000..4dd27d039a44 --- /dev/null +++ b/test/files/pos/t9034.scala @@ -0,0 +1,9 @@ +object Test { + abstract class Foo { + protected val foo: Boolean + } + + def foo[@specialized T](t: T) = new Foo { + protected val foo: Boolean = true + } +} diff --git a/test/files/pos/t9050.scala b/test/files/pos/t9050.scala index b1ab09f901c8..acb9222a0926 100644 --- a/test/files/pos/t9050.scala +++ b/test/files/pos/t9050.scala @@ -1,5 +1,5 @@ final class Mu[F](val value: Any) extends AnyVal { - def cata(f: F) { + def cata(f: F): Unit = { // crash ((y: Mu[F]) => y.cata(f)) // crash diff --git a/test/files/pos/t9061.scala b/test/files/pos/t9061.scala new file mode 100644 index 000000000000..a48057e33014 --- /dev/null +++ b/test/files/pos/t9061.scala @@ -0,0 +1,30 @@ +object original { + trait Bar[T] { + val value: T + } + + object Bar { + def apply[T](t: => T): Bar[T] = new Bar[T] { + val value = t + } + } + + trait Foo[A] { def foo(a: A): Unit } + object Foo { + implicit val intFoo = new Foo[Int] { def foo(x: Int) = () } + } + + object Demo { + Bar[Foo[Int]]({ + object Blah { + lazy val blah: Foo[Int] = Foo.intFoo + } + Blah.blah + }).value.foo _ + } +} + +class Reduced { + def byName(a: => Any) = ??? + byName({ object Blah; Blah }).toString _ +} diff --git a/test/files/pos/t9107.scala b/test/files/pos/t9107.scala new file mode 100644 index 000000000000..827971d05444 --- /dev/null +++ b/test/files/pos/t9107.scala @@ -0,0 +1,12 @@ +//> using options -Werror -deprecation + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +class qqq(count: Int) { + def macroTransform(annottees: Any*): Any = macro qqq.qqqImpl +} + +object qqq { + def qqqImpl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = ??? +} diff --git a/test/files/pos/t9111-inliner-workaround/Test_1.scala b/test/files/pos/t9111-inliner-workaround/Test_1.scala index cd477ddb4161..d8124076edc8 100644 --- a/test/files/pos/t9111-inliner-workaround/Test_1.scala +++ b/test/files/pos/t9111-inliner-workaround/Test_1.scala @@ -1,4 +1,5 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** -Wopt +// object Test extends App { println(new A_1.Inner()) diff --git a/test/files/pos/t9125.scala b/test/files/pos/t9125.scala new file mode 100644 index 000000000000..f1d3e67f618f --- /dev/null +++ b/test/files/pos/t9125.scala @@ -0,0 +1,14 @@ + +package p { + class C +} + +package q { + package p { + class K { + import _root_.{p => pp} + def f() = new pp.C + def g() = new _root_.p.C + } + } +} diff --git a/test/files/pos/t9151.scala b/test/files/pos/t9151.scala new file mode 100644 index 000000000000..daf95ee10f6d --- /dev/null +++ b/test/files/pos/t9151.scala @@ -0,0 +1,15 @@ +class t9151 { + abstract class Foo extends Ordered[Foo] + + val seq: Seq[Int] = null + val set: Set[Int] = null + val map: Map[Int, Int] = null + val values: Map[Int, Set[Foo]] = null + + map ++ set.map(_ -> "") + + values ++ seq.groupBy(_ / 2).toSeq.map({ + case (key, group) => + key -> (values(key) ++ group.map(_ => "")) + }) +} diff --git a/test/files/pos/t9178b.scala b/test/files/pos/t9178b.scala index a1d3837ed650..cbeaed4f17fe 100644 --- a/test/files/pos/t9178b.scala +++ b/test/files/pos/t9178b.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental abstract class Test{ val writeInput: java.io.OutputStream => Unit def getOutputStream(): java.io.OutputStream diff --git a/test/files/pos/t9211.scala b/test/files/pos/t9211.scala new file mode 100644 index 000000000000..5250effd8dea --- /dev/null +++ b/test/files/pos/t9211.scala @@ -0,0 +1,9 @@ +//> using options -Werror -Xlint + +trait T[A] +class C extends T[Any] + +class Test { + def f[A](t: T[A]) = () + def g() = f(new C) +} diff --git a/test/files/pos/t9220.scala b/test/files/pos/t9220.scala index 7e4f11ba2829..660791b6f940 100644 --- a/test/files/pos/t9220.scala +++ b/test/files/pos/t9220.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test { trait Command object Command { diff --git a/test/files/pos/t9227.scala b/test/files/pos/t9227.scala new file mode 100644 index 000000000000..ec921355ac37 --- /dev/null +++ b/test/files/pos/t9227.scala @@ -0,0 +1,6 @@ +trait TC[M[_], @specialized(Int) A] + +object Test { + def f2[M[_], @specialized(Int) A](implicit ev: TC[M, A]): M[A] = ??? + def f1[M[_], @specialized(Int) A](implicit ev: TC[M, A]): M[A] = f2[M, A](ev) +} diff --git a/test/files/pos/t9285.scala b/test/files/pos/t9285.scala index 2fedc05f5bbf..03e3f0e1691b 100644 --- a/test/files/pos/t9285.scala +++ b/test/files/pos/t9285.scala @@ -1,2 +1,3 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// case class C(placeholder: Unit) diff --git a/test/files/pos/t9337.scala b/test/files/pos/t9337.scala new file mode 100644 index 000000000000..2ae661c47470 --- /dev/null +++ b/test/files/pos/t9337.scala @@ -0,0 +1,13 @@ +object Test { + trait TypePreservingFn[T[X <: T[X]]] + trait Validator[T, This <: Validator[T,This]] + + trait Foo[T] { + type V[This <: Validator[T, This]] = Validator[T, This] + val f: TypePreservingFn[V] = ??? + } + + class Bar[T] extends Foo[T] { + val g: TypePreservingFn[V] = ??? + } +} diff --git a/test/files/pos/t9352.scala b/test/files/pos/t9352.scala new file mode 100644 index 000000000000..6cc1d1a2aa6f --- /dev/null +++ b/test/files/pos/t9352.scala @@ -0,0 +1,15 @@ +object RefinementTest { + trait A[T] + trait B[T] extends A[T] + + def takesPartial + (first: PartialFunction[Any, Unit]) + (second: () => Unit): Unit = ??? + + def foo[T](a: A[T]) { + a match { + case b: B[t] => + takesPartial({ case _ => })(() => implicitly[t =:= T]) + } + } +} diff --git a/test/files/pos/t9356/Test_3.scala b/test/files/pos/t9356/Test_3.scala index fa1b76c9e1eb..b7bfe0289061 100644 --- a/test/files/pos/t9356/Test_3.scala +++ b/test/files/pos/t9356/Test_3.scala @@ -1,3 +1,3 @@ class Foo1 extends Foo -class Foo2 extends Foo \ No newline at end of file +class Foo2 extends Foo diff --git a/test/files/pos/t9368.scala b/test/files/pos/t9368.scala new file mode 100644 index 000000000000..87da8e20be46 --- /dev/null +++ b/test/files/pos/t9368.scala @@ -0,0 +1,18 @@ +case class Event[T](sender: T) + +class Foo(bar: String) { + self => + + def this() { + this("baz") + + new { Event(self) } // crashes compiler + } +} + +class Bar(x: Int) { + def this() = { + this(1) + new { x } + } +} diff --git a/test/files/pos/t9369.scala b/test/files/pos/t9369.scala index e83b8ba3eac4..15ecc7103216 100644 --- a/test/files/pos/t9369.scala +++ b/test/files/pos/t9369.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings -unchecked +//> using options -Xfatal-warnings // object Test { diff --git a/test/files/pos/t9370/sample_2.scala b/test/files/pos/t9370/sample_2.scala index 3149d56c2942..70ae0156aafc 100644 --- a/test/files/pos/t9370/sample_2.scala +++ b/test/files/pos/t9370/sample_2.scala @@ -1,5 +1,6 @@ -// scalac: -Xplugin:/tmp -Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser +//> using options -Xplugin:/tmp-fake -Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser +// package sample // just a sample that is compiled with the explosive plugin disabled diff --git a/test/files/pos/t9397.scala b/test/files/pos/t9397.scala index 3dbc6591d3f6..2a849516be12 100644 --- a/test/files/pos/t9397.scala +++ b/test/files/pos/t9397.scala @@ -4,7 +4,7 @@ import scala.reflect.runtime.universe._ object Foo { - def bar[T: TypeTag]() { + def bar[T: TypeTag](): Unit = { } import foo._ diff --git a/test/files/pos/t9399.scala b/test/files/pos/t9399.scala index 572dc1423790..537717ed1df4 100644 --- a/test/files/pos/t9399.scala +++ b/test/files/pos/t9399.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed abstract class TA sealed abstract class TB extends TA case object A extends TA diff --git a/test/files/pos/t9411a.scala b/test/files/pos/t9411a.scala index b97b07c92715..b28d5e17871b 100644 --- a/test/files/pos/t9411a.scala +++ b/test/files/pos/t9411a.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object OhNoes { sealed trait F diff --git a/test/files/pos/t9411b.scala b/test/files/pos/t9411b.scala index 738b80db5c8a..77714d038e16 100644 --- a/test/files/pos/t9411b.scala +++ b/test/files/pos/t9411b.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object OhNoes { sealed trait F diff --git a/test/files/pos/t9414.scala b/test/files/pos/t9414.scala new file mode 100644 index 000000000000..d9c5eba385f5 --- /dev/null +++ b/test/files/pos/t9414.scala @@ -0,0 +1,78 @@ +import annotation._ + +class C { + def foo = { + class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + class Child extends Parent { + def notBar = 42 + } + } +} + +class D { + def foo = { + class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + class Child extends Parent + class GrandChild extends Child { + def notBar = 42 + } + } +} + +object E { + sealed class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + final class Child extends Parent { + def notBar = 42 + } +} + +class F { + def foo = { + class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + class K { + class Child extends Parent { + def notBar = 42 + } + class GrandChild extends Child { + def neitherBar = 42 + } + } + } +} + +class G { + sealed class Parent { + @tailrec def bar: Int = { + println("here we go again") + bar + } + } + def foo = { + class Child extends Parent { + def notBar = 42 + } + class GrandChild extends Child { + def neitherBar = 42 + } + } +} diff --git a/test/files/pos/t9449.scala b/test/files/pos/t9449.scala index 3b86dc80a0c0..7f1967cf8cfc 100644 --- a/test/files/pos/t9449.scala +++ b/test/files/pos/t9449.scala @@ -16,4 +16,4 @@ object Test { // found : Int => Int // required: II } -} \ No newline at end of file +} diff --git a/test/files/pos/t9451.scala b/test/files/pos/t9451.scala new file mode 100644 index 000000000000..15b00d9d459e --- /dev/null +++ b/test/files/pos/t9451.scala @@ -0,0 +1,10 @@ +import scala.language.higherKinds +object t9451 { + implicit def impl[I[_]]: { + type F[X] = { type Self = I[X] } + } = new { + type F[X] = { type Self = I[X] } + } + + implicitly[{type F[X] = { type Self = Iterable[X] }}] +} diff --git a/test/files/pos/t9490.scala b/test/files/pos/t9490.scala new file mode 100644 index 000000000000..a7975257ec4c --- /dev/null +++ b/test/files/pos/t9490.scala @@ -0,0 +1,27 @@ +//> using options -Werror -Xlint:inaccessible + +package ws { + private[ws] trait Foo + private[ws] object Test { + class Bar { + def apply(f: Foo) = ??? + } + } +} + +package p { + private[p] class D + sealed trait T { def f(d: D): Unit } + final class C extends T { def f(d: D) = () } +} + +/* was: +t9490.scala:7: warning: method apply in class Bar references private[ws] trait Foo. +Classes which cannot access Foo may be unable to override apply. + def apply(f: Foo) = ??? + ^ +t9490.scala:14: warning: method f in trait T references private[p] class D. +Classes which cannot access D may be unable to provide a concrete implementation of f. + sealed trait T { def f(d: D): Unit } + ^ + */ diff --git a/test/files/pos/t9552/t9552a.scala b/test/files/pos/t9552/t9552a.scala new file mode 100644 index 000000000000..7cd244c096f7 --- /dev/null +++ b/test/files/pos/t9552/t9552a.scala @@ -0,0 +1,4 @@ + +package object p { + type Foo = Int +} diff --git a/test/files/pos/t9552/t9552b.scala b/test/files/pos/t9552/t9552b.scala new file mode 100644 index 000000000000..29c7af827f15 --- /dev/null +++ b/test/files/pos/t9552/t9552b.scala @@ -0,0 +1,11 @@ + +package p { + object Dingo { type Foo = String } + import Dingo._ + //import Dingo.Foo // works + trait Bippy { + //import Dingo._ // works + def z: Foo + z: String + } +} diff --git a/test/files/pos/t9568.scala b/test/files/pos/t9568.scala new file mode 100644 index 000000000000..9c02a733aded --- /dev/null +++ b/test/files/pos/t9568.scala @@ -0,0 +1,20 @@ +object Cyclic { + class Node[T]() { + type Self = T + } + + val nodeA = new Node[Int]() + val nodeB = new NodeB(a => a) + val nodeC = new NodeC(a => a) + /* + val nodeB = new NodeB(a => a + 1) + val nodeC = new NodeC(a => a + 1) + */ + val nodeD = new NodeD((b, c) => b + c) + + class NodeB[T](fun: Function[nodeA.Self, T]) extends Node[T] + + class NodeC[T](fun: Function[nodeA.Self, T]) extends Node[T] + + class NodeD[T](fun: Function2[nodeB.Self, nodeC.Self, T]) extends Node[T] +} diff --git a/test/files/pos/t9630/t9630a.scala b/test/files/pos/t9630/t9630a.scala index 6629bd350f06..c76ecd2ff217 100644 --- a/test/files/pos/t9630/t9630a.scala +++ b/test/files/pos/t9630/t9630a.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings sealed trait Base final case class Base_1(sameName: Some[Any]) extends Base diff --git a/test/files/pos/t9630/t9630b.scala b/test/files/pos/t9630/t9630b.scala index 84d6382bdc39..980615abf6a4 100644 --- a/test/files/pos/t9630/t9630b.scala +++ b/test/files/pos/t9630/t9630b.scala @@ -1,5 +1,4 @@ -// scalac: -Xfatal-warnings - +//> using options -Werror class Test { def test(b: Base): Unit = b match { case Base_1(Some(_)) => diff --git a/test/files/pos/t9657.scala b/test/files/pos/t9657.scala new file mode 100644 index 000000000000..27aa7587a5e4 --- /dev/null +++ b/test/files/pos/t9657.scala @@ -0,0 +1,21 @@ +//> using options -Werror +sealed trait PowerSource +case object Petrol extends PowerSource +case object Pedal extends PowerSource + +sealed abstract class Vehicle { type A <: PowerSource } +case object Bicycle extends Vehicle { type A = Pedal.type } +case class Bus(fuel: Int) extends Vehicle { type A = Petrol.type } +case class Car(fuel: Int) extends Vehicle { type A = Petrol.type } + +object Test { + def refuel[P <: Petrol.type](vehicle: Vehicle { type A = P }): Vehicle = vehicle match { + case Car(_) => Car(100) + case Bus(_) => Bus(100) // was: "unreachable code" warning + } + + def main(args: Array[String]): Unit = { + println(refuel(Car(100))) + println(refuel(Bus(5))) + } +} diff --git a/test/files/pos/t9717.scala b/test/files/pos/t9717.scala new file mode 100644 index 000000000000..fe3b72f9bb3e --- /dev/null +++ b/test/files/pos/t9717.scala @@ -0,0 +1,29 @@ +//> using options -Yno-predef + +import scala.Predef.implicitly + +class A(val a: Int) +class B(implicit F: Int) extends A( implicitly[Int] ) +class C(implicit F: Int) extends A( { + val v = implicitly[Int] + v +}) + + +class A1(val a: Int)(implicit val F: Int) +class B1(implicit F: Int) extends A1(implicitly[Int]) +class C1(implicit F: Int) extends A1({ + val v = implicitly[Int] + v +}) +class D1 extends A1({ + implicit val v: Int = 1; implicitly[Int] +})(0) + +class D(x: Int) { + import D.f + def this() = { this(D.f); implicitly[Int] } // D.f in scope after self constr call +} +object D { + implicit def f: Int = 1 +} diff --git a/test/files/pos/t9717_2.scala b/test/files/pos/t9717_2.scala new file mode 100644 index 000000000000..e1bc9d462d1a --- /dev/null +++ b/test/files/pos/t9717_2.scala @@ -0,0 +1,8 @@ +class Ann(implicit val i: Int) + +abstract class Bob(implicit i: Int) extends Ann { + def foo: Int + def dee(): Bob = new Bob { + def foo = 23 + } +} diff --git a/test/files/pos/t9725.scala b/test/files/pos/t9725.scala new file mode 100644 index 000000000000..db613005e880 --- /dev/null +++ b/test/files/pos/t9725.scala @@ -0,0 +1,14 @@ +trait Foo1[-X] { def bar[Y <: X](y: Y) = y } // typechecks + +// A variant of Foo1 encoding the type parameter Y using a dependent method type. +// error: contravariant type X occurs in covariant position in type <: X of type Y +trait Foo2[-X] { def bar(x: { type Y <: X })(y: x.Y) = y } + +trait Foo3[+X] { def bar[Y >: X](y: Y) = y } // typechecks + +// A variant of Foo3 using a dependent method type. +// error: covariant type X occurs in contravariant position in type >: X of type Y +trait Foo4[+X] { def bar(x: { type Y >: X })(y: x.Y) = y } + +// error: covariant type X occurs in contravariant position in type >: X of type Y +trait Foo9[+X] { def bar(x: { type Y >: X }): Any } diff --git a/test/files/pos/t9745.scala b/test/files/pos/t9745.scala index 6b6443e4eb77..3ed5359c8223 100644 --- a/test/files/pos/t9745.scala +++ b/test/files/pos/t9745.scala @@ -11,4 +11,4 @@ class D { class Convo { def f(i: Int)(z: Any): Int = ??? val g = (x: Int, y: Int) => f(42)(x, y) -} \ No newline at end of file +} diff --git a/test/files/pos/t9855.scala b/test/files/pos/t9855.scala index b6ac3e2432cd..9d4e27cc3c00 100644 --- a/test/files/pos/t9855.scala +++ b/test/files/pos/t9855.scala @@ -1,5 +1,5 @@ class C { - def xx(verb: String, a: Array[Int]) { + def xx(verb: String, a: Array[Int]): Unit = { val reYYYY = """(\d\d\d\d)""".r verb match { case "time" if a.isEmpty => diff --git a/test/files/pos/t9931.scala b/test/files/pos/t9931.scala new file mode 100644 index 000000000000..777e2137b8c4 --- /dev/null +++ b/test/files/pos/t9931.scala @@ -0,0 +1,17 @@ + +trait Compute[A] { + type Start + val start: Compute[Start] +} + +object Test { + def main(args: Array[String]): Unit = foo(new Compute[Unit] { type Start = Unit; val start = this }) + def foo[A](c: Compute[A]): Unit = + c match { + case c: Compute[A] => + new Compute[A] { + type Start = c.Start + val start = c.start + } + } +} diff --git a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala index f719972a17fe..42af173eb623 100644 --- a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala +++ b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala @@ -1,16 +1,13 @@ -import scala.collection.generic.GenericTraversableTemplate -import scala.collection.Iterable - -class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { +class IterableOps[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) { def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { - implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) + implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) = new IterableOps[CC, A1, A2](tuple) val t = (List(1, 2, 3), List(6, 5, 4)) - tupleOfIterableWrapper(t) unzip + tupleOfIterableWrapper(t).unzip } diff --git a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala index 19243505b433..ee7484eb7b3c 100644 --- a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala +++ b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala @@ -1,18 +1,17 @@ -import scala.collection.generic.GenericTraversableTemplate import scala.collection.Iterable -class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { +class IterableOps[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) { def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { - implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) + implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B], A1, A2](tuple: (CC[A1], Iterable[A2])) = new IterableOps[CC, A1, A2](tuple) val t = (List(1, 2, 3), List(6, 5, 4)) - tupleOfIterableWrapper(t) unzip + tupleOfIterableWrapper(t).unzip - t unzip + t.unzip } diff --git a/test/files/pos/tcpoly_infer_ticket1864.scala b/test/files/pos/tcpoly_infer_ticket1864.scala index 77d20234d586..70cfac06291b 100644 --- a/test/files/pos/tcpoly_infer_ticket1864.scala +++ b/test/files/pos/tcpoly_infer_ticket1864.scala @@ -32,7 +32,7 @@ object App { } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { implicit def richBuffer[T, B[U] <: Buffer[U]](buffer: B[T]): RichBuffer[T, B] = new RichBuffer[T, B](buffer) diff --git a/test/files/pos/tcpoly_infer_ticket474.scala b/test/files/pos/tcpoly_infer_ticket474.scala index 8c9be4d5c4ef..9012deb2b627 100644 --- a/test/files/pos/tcpoly_infer_ticket474.scala +++ b/test/files/pos/tcpoly_infer_ticket474.scala @@ -1,5 +1,5 @@ trait Builder[C[_], T] { - def +=(x: T) + def +=(x: T): Unit def finalise: C[T] } @@ -24,4 +24,4 @@ object Test { } val l: List[Int] = foo(8) -} \ No newline at end of file +} diff --git a/test/files/pos/tcpoly_infer_ticket716.scala b/test/files/pos/tcpoly_infer_ticket716.scala index 24e8f663bc3c..df3b2996a72b 100644 --- a/test/files/pos/tcpoly_infer_ticket716.scala +++ b/test/files/pos/tcpoly_infer_ticket716.scala @@ -1,18 +1,18 @@ trait Functor[F[_]] { - def fmap[A,B](fun: A=>B, arg:F[A]): F[B] + def fmap[A, B](fun: A => B, arg: F[A]): F[B] } object Functor{ implicit val ListFunctor: Functor[List] = new Functor[List] { def fmap[A, B](f: A => B, arg: List[A]):List[B] = arg map f } - final class OOFunctor[F[_],A](arg:F[A])(implicit ftr: Functor[F]) { - def fmap[B](fun: A=>B):F[B] = ftr.fmap(fun,arg) + final class OOFunctor[F[_], A](arg: F[A])(implicit ftr: Functor[F]) { + def fmap[B](fun: A => B): F[B] = ftr.fmap(fun,arg) } //breaks if uncommented - implicit def lifttoOO[F[_],A](arg:F[A])(implicit ftr: Functor[F]) = new OOFunctor[F,A](arg)(ftr) + implicit def lifttoOO[F[_], A](arg: F[A])(implicit ftr: Functor[F]) = new OOFunctor[F, A](arg)(ftr) //works if uncommented //implicit def liftListtoOO[A](arg:List[A]):OOFunctor[List,A] = new OOFunctor[List,A](arg) @@ -20,7 +20,7 @@ object Functor{ object GeneralLiftingDemo extends App { import Functor._ - val l = List(1,2,3) - val res = l fmap( 1+) // TODO: should not need explicit call to lifttoOO + val l = List(1, 2, 3) + val res = l fmap (1 + _) // TODO: should not need explicit call to lifttoOO println("OO : " + res ) } diff --git a/test/files/pos/tcpoly_seq.scala b/test/files/pos/tcpoly_seq.scala index 48b3e1ce5210..3afe89e76050 100644 --- a/test/files/pos/tcpoly_seq.scala +++ b/test/files/pos/tcpoly_seq.scala @@ -60,7 +60,7 @@ trait HOSeq { * * @param x the element to append. */ - def += (x: A) { + def += (x: A): Unit = { if (exported) copy if (start.isEmpty) { last = new HOSeq.this.:: (x, Nil) @@ -81,13 +81,13 @@ trait HOSeq { /** Clears the buffer contents. */ - def clear { + def clear: Unit = { start = Nil exported = false } /** Copy contents of this buffer */ - private def copy { + private def copy: Unit = { var cursor = start val limit = last.tail clear @@ -153,8 +153,8 @@ trait HOSeq { Subseq <: Seq[T] - def map[K](f: T=>K): MapResult[K] - def filter(f: T=>Boolean): FilterResult + def map[K](f: T => K): MapResult[K] + def filter(f: T => Boolean): FilterResult def subseq(from: Int, to: Int): Subseq def flatMap[S <: Seq[K], K](f: T => S): S#Concat // legal? def concat(others: Seq[T]): Concat diff --git a/test/files/pos/tcpoly_seq_typealias.scala b/test/files/pos/tcpoly_seq_typealias.scala index fb48126ce69b..8d2f6e7c381c 100644 --- a/test/files/pos/tcpoly_seq_typealias.scala +++ b/test/files/pos/tcpoly_seq_typealias.scala @@ -62,7 +62,7 @@ trait HOSeq { * * @param x the element to append. */ - def += (x: A) { + def += (x: A): Unit = { if (exported) copy if (start.isEmpty) { last = new HOSeq.this.:: (x, Nil) @@ -83,13 +83,13 @@ trait HOSeq { /** Clears the buffer contents. */ - def clear { + def clear: Unit = { start = Nil exported = false } /** Copy contents of this buffer */ - private def copy { + private def copy: Unit = { var cursor = start val limit = last.tail clear diff --git a/test/files/pos/tcpoly_ticket2096.scala b/test/files/pos/tcpoly_ticket2096.scala index d92589c92915..d2387b36bd24 100644 --- a/test/files/pos/tcpoly_ticket2096.scala +++ b/test/files/pos/tcpoly_ticket2096.scala @@ -27,4 +27,4 @@ class MBraceSequitor[A] extends MBraceSeq[MSequitor,A] { val empty : MSequitor[A] = new MSequitor[A]( ) override def nest( a : A ) = empty.nest( a ) override def flatten[T <: MSequitor[MSequitor[A]]]( bsq : T ): MSequitor[A] = empty.flatten( bsq ) -} \ No newline at end of file +} diff --git a/test/files/pos/tcpoly_typeapp.scala b/test/files/pos/tcpoly_typeapp.scala index 4cb1da4f74cd..b131b7288ea8 100644 --- a/test/files/pos/tcpoly_typeapp.scala +++ b/test/files/pos/tcpoly_typeapp.scala @@ -1,4 +1,4 @@ abstract class x { type t[m[x] <: Bound[x], Bound[x]] - val x: t[scala.collection.mutable.MutableList, Iterable] + val x: t[scala.collection.mutable.Stack, Iterable] } diff --git a/test/files/pos/thistype.scala b/test/files/pos/thistype.scala index ac315f332369..265c821ad6ee 100644 --- a/test/files/pos/thistype.scala +++ b/test/files/pos/thistype.scala @@ -5,10 +5,10 @@ object Test { } class MouseCtl extends Ctl { - def mouseDown(x: Int, y: Int) { Console.println("mouse down") } + def mouseDown(x: Int, y: Int): Unit = { Console.println("mouse down") } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { new MouseCtl().enable.mouseDown(1, 2) } diff --git a/test/files/pos/trailing-commas.scala b/test/files/pos/trailing-commas.scala index b9401fe49dd9..37a1d52950f1 100644 --- a/test/files/pos/trailing-commas.scala +++ b/test/files/pos/trailing-commas.scala @@ -36,7 +36,7 @@ trait Params { )(implicit ev0: Ev0, ev1: Ev1, - ) + ): Unit } trait ClassParams { @@ -84,7 +84,7 @@ trait FunTypeParamClause { def f[ A, B, - ] + ]: Unit } trait SimpleType { @@ -137,7 +137,7 @@ trait ImportSelectors { } trait Bindings { - def g(f: (Int, String) => Boolean) + def g(f: (Int, String) => Boolean): Unit g(( foo, diff --git a/test/files/pos/trait_fields_inherit_double_def.scala b/test/files/pos/trait_fields_inherit_double_def.scala index 8703d6312c1d..1c6acd92a69d 100644 --- a/test/files/pos/trait_fields_inherit_double_def.scala +++ b/test/files/pos/trait_fields_inherit_double_def.scala @@ -17,4 +17,4 @@ trait IterableSplitter extends DelegatedSignalling { var signalDelegate: Signalling = ??? } -class SUB extends IterableSplitter \ No newline at end of file +class SUB extends IterableSplitter diff --git a/test/files/pos/trait_fields_nested_private_object.scala b/test/files/pos/trait_fields_nested_private_object.scala index 8efc1cb3fa3f..40215b252e62 100644 --- a/test/files/pos/trait_fields_nested_private_object.scala +++ b/test/files/pos/trait_fields_nested_private_object.scala @@ -5,4 +5,4 @@ trait NestedObj { class C extends NestedObj { def O = ??? -} \ No newline at end of file +} diff --git a/test/files/pos/trait_fields_nested_public_object.scala b/test/files/pos/trait_fields_nested_public_object.scala index 016487fb8ab3..37c42b8ba900 100644 --- a/test/files/pos/trait_fields_nested_public_object.scala +++ b/test/files/pos/trait_fields_nested_public_object.scala @@ -2,4 +2,4 @@ trait NestedObj { object O { println("NO") } } -class C extends NestedObj \ No newline at end of file +class C extends NestedObj diff --git a/test/files/pos/trait_fields_owners.scala b/test/files/pos/trait_fields_owners.scala index 6aa5572171d4..be38fdf4317a 100644 --- a/test/files/pos/trait_fields_owners.scala +++ b/test/files/pos/trait_fields_owners.scala @@ -16,4 +16,4 @@ trait V { def scalaPropOrNone(name: String): Option[String] = ??? } -object O extends V \ No newline at end of file +object O extends V diff --git a/test/files/pos/trait_fields_private_this.scala b/test/files/pos/trait_fields_private_this.scala index 8065cc89e6a1..2f5fc4440210 100644 --- a/test/files/pos/trait_fields_private_this.scala +++ b/test/files/pos/trait_fields_private_this.scala @@ -2,4 +2,4 @@ trait Chars { private[this] val char2uescapeArray: String = ??? } -object Chars extends Chars \ No newline at end of file +object Chars extends Chars diff --git a/test/files/pos/typetags.scala b/test/files/pos/typetags.scala index 239a9b32ece9..684ea1018658 100644 --- a/test/files/pos/typetags.scala +++ b/test/files/pos/typetags.scala @@ -13,4 +13,4 @@ // } // } -object Test extends App \ No newline at end of file +object Test extends App diff --git a/test/files/pos/u000a.scala b/test/files/pos/u000a.scala new file mode 100644 index 000000000000..bee073acece0 --- /dev/null +++ b/test/files/pos/u000a.scala @@ -0,0 +1,18 @@ +// Unicode newline in a single-line comment? +// Compiler will expect code on the line. +// Here the code is valid. + +/* \n object foo */ +// \n object foo +/* \12 object foo */ +// \12 object foo +/* \u000a object foo */ +// object foo \u000a +// \u000a object foo +/* \n foo */ +// \n foo +/* \12 foo */ +// \12 foo +/* \u000a foo */ +// foo \u000a +// \u000a foo diff --git a/test/files/pos/unary-unquoted.scala b/test/files/pos/unary-unquoted.scala new file mode 100644 index 000000000000..5506f5736f09 --- /dev/null +++ b/test/files/pos/unary-unquoted.scala @@ -0,0 +1,7 @@ + +object Test { + def +[T](x: T): String = "x" + `+`[Int](6): String // Parser can treat + as identifier when backquoted and followed by a type argument + `+`(6): String // Parser can treat + as identifier when backquoted and followed by a value argument + +(6): Int // Parser prioritizes + as unary when possible +} diff --git a/test/files/pos/unchecked-a.scala b/test/files/pos/unchecked-a.scala index ccafbf13969e..d208f98df25e 100644 --- a/test/files/pos/unchecked-a.scala +++ b/test/files/pos/unchecked-a.scala @@ -1,13 +1,10 @@ -// scalac: -unchecked -Xfatal-warnings +//> using options -Xfatal-warnings // trait Y trait Z extends Y class X[+A <: Y] object Test { - def f1(x: X[_ <: Y]) = x match { - case _: X[Any] => // looks a little funny; `Any` is outside the bounds for `A` - } def f2(x: X[_ <: Y]) = x match { case _: X[Y] => // looks better, let's allow this (too) } diff --git a/test/files/pos/unsafe.scala b/test/files/pos/unsafe.scala index 97d769791f77..cbdf7ed218a7 100644 --- a/test/files/pos/unsafe.scala +++ b/test/files/pos/unsafe.scala @@ -1,5 +1,5 @@ -// scalac: --release:8 -Yrelease:sun.misc +//> using options --release:8 -Yrelease:sun.misc import sun.misc.Unsafe diff --git a/test/files/pos/userdefined_apply.scala b/test/files/pos/userdefined_apply.scala index e29f9f514168..d1e335756a02 100644 --- a/test/files/pos/userdefined_apply.scala +++ b/test/files/pos/userdefined_apply.scala @@ -1,27 +1,27 @@ // NOTE: the companion inherits a public apply method from Function1! -case class NeedsCompanion private (x: Int) +case class NeedsCompanion protected (x: Int) object ClashNoSig { // ok private def apply(x: Int) = if (x > 0) new ClashNoSig(x) else ??? } -case class ClashNoSig private (x: Int) +case class ClashNoSig protected (x: Int) object Clash { private def apply(x: Int) = if (x > 0) new Clash(x) else ??? } -case class Clash private (x: Int) +case class Clash protected (x: Int) object ClashSig { private def apply(x: Int): ClashSig = if (x > 0) new ClashSig(x) else ??? } -case class ClashSig private (x: Int) +case class ClashSig protected (x: Int) object ClashOverload { private def apply(x: Int): ClashOverload = if (x > 0) new ClashOverload(x) else apply("") def apply(x: String): ClashOverload = ??? } -case class ClashOverload private (x: Int) +case class ClashOverload protected (x: Int) object NoClashSig { private def apply(x: Boolean): NoClashSig = if (x) NoClashSig(1) else ??? @@ -33,7 +33,7 @@ object NoClashOverload { private def apply(x: Boolean): NoClashOverload = if (x) NoClashOverload(1) else apply("") def apply(x: String): NoClashOverload = ??? } -case class NoClashOverload private (x: Int) +case class NoClashOverload protected (x: Int) @@ -43,7 +43,7 @@ class BaseNCP[T] { } object NoClashPoly extends BaseNCP[Boolean] -case class NoClashPoly private(x: Int) +case class NoClashPoly protected(x: Int) class BaseCP[T] { @@ -51,4 +51,4 @@ class BaseCP[T] { def apply(x: T): ClashPoly = if (???) ClashPoly(1) else ??? } object ClashPoly extends BaseCP[Int] -case class ClashPoly private(x: Int) +case class ClashPoly protected(x: Int) diff --git a/test/files/pos/value-class-override-no-spec.scala b/test/files/pos/value-class-override-no-spec.scala index 368101df4885..842418aefcd3 100644 --- a/test/files/pos/value-class-override-no-spec.scala +++ b/test/files/pos/value-class-override-no-spec.scala @@ -1,4 +1,5 @@ -// scalac: -no-specialization +//> using options -no-specialization +// // There are two versions of this tests: one with and one without specialization. // The bug was only exposed *without* specialization. trait T extends Any { diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala index 8b8c414b47b0..2f60b213e44d 100644 --- a/test/files/pos/varargs-future.scala +++ b/test/files/pos/varargs-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // class Test { diff --git a/test/files/pos/variance-alias.scala b/test/files/pos/variance-alias.scala new file mode 100644 index 000000000000..94477cea2572 --- /dev/null +++ b/test/files/pos/variance-alias.scala @@ -0,0 +1,6 @@ +trait Tc[F[_]] +object X { + type Id[+A] = A + val a: Tc[({type L[+X] = X})#L] = new Tc[({type L[+X] = X})#L] {} // ok, therefore the following should be too: + val b: Tc[Id] = new Tc[Id] {} // ok +} diff --git a/test/files/pos/variance-holes.scala b/test/files/pos/variance-holes.scala new file mode 100644 index 000000000000..cdcbfa2a225f --- /dev/null +++ b/test/files/pos/variance-holes.scala @@ -0,0 +1,61 @@ +import scala.annotation.unchecked.uncheckedVariance + +object Test { + type Lower1[+A] >: List[A] + + class Lower2[F[+_]] { + type G[+x] >: F[x] + } + + class Lower3[F[+_, +_]](v: F[Int, Int]) { + def asWiden[F2[+x, +y] >: F[x, y]]: F2[Int, Int] = v + } + + trait Refined1[+A] { + def foo: { type T <: A } + } + + trait Refined2[+A] { + def foo(x: { type T >: A }): Unit + } + + class Refined3[+A] { + generic[{ type T >: A } => Int] + } + + class Refined4[+A] { + generic[{ type T <: A } => Int] + } + + class RefinedUpper1[+A, x <: { type T <: A }] + class RefinedUpper2[+A, x <: { type T[_ <: A] }] + trait RefinedLower[+A, x <: { type T[_ >: A] }] + + class PrivateThis1[+A] { + private[this] object Foo { var x: A = _ } + } + + class PrivateThis2[-A] { + private[this] val x: Set[A] = Set.empty + private[this] var y: Set[A] = Set.empty + + class Escape { + println(x) + println(y) + } + } + + def generic[A]: Unit = () + + trait UncheckedHKT { + type F[+_, -_] + } + + object UncheckedHKT { + def impl1[G[_, _]]: UncheckedHKT { type F[+A, -B] = G[A @uncheckedVariance, B @uncheckedVariance] } = + new UncheckedHKT { type F[+A, -B] = G[A @uncheckedVariance, B @uncheckedVariance] } + + def impl2[G[_, _]]: UncheckedHKT { type F[+A, -B] = G[A, B] @uncheckedVariance } = + new UncheckedHKT { type F[+A, -B] = G[A, B] @uncheckedVariance } + } +} \ No newline at end of file diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala index 383db8420f85..74dcc177c039 100644 --- a/test/files/pos/variant-placeholders-future.scala +++ b/test/files/pos/variant-placeholders-future.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +//> using options -Xsource:3 // object Test { type `-_` = Int diff --git a/test/files/pos/viewtest1.scala b/test/files/pos/viewtest1.scala index c681ae706948..38945ad2f9b4 100644 --- a/test/files/pos/viewtest1.scala +++ b/test/files/pos/viewtest1.scala @@ -32,7 +32,7 @@ abstract class Tree[+a <% Ordered[a]] { object Test { import O.view - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { var t: Tree[String] = Empty for (s <- args) { t = t insert s diff --git a/test/files/pos/viewtest2.scala b/test/files/pos/viewtest2.scala index 5c8678099d69..04881dd0d9fc 100644 --- a/test/files/pos/viewtest2.scala +++ b/test/files/pos/viewtest2.scala @@ -4,8 +4,8 @@ package test */ trait Ordered[+a] { - /** Result of comparing `this' with operand `that'. - * returns `x' where + /** Result of comparing `this` with operand `that`. + * returns `x` where * x < 0 iff this < that * x == 0 iff this == that * x > 0 iff this > that @@ -89,7 +89,7 @@ object Test { if (s.length() == 0) List() else s.charAt(0) :: toCharList(s.substring(1)) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { { var t: Tree[String] = Empty for (s <- args) { diff --git a/test/files/pos/virtpatmat_alts_subst.scala b/test/files/pos/virtpatmat_alts_subst.scala index b03bbe3b0390..e27c52f9c772 100644 --- a/test/files/pos/virtpatmat_alts_subst.scala +++ b/test/files/pos/virtpatmat_alts_subst.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental case class Foo(s: String) { def appliedType(tycon: Any) = tycon match { diff --git a/test/files/pos/virtpatmat_anonfun_for.scala b/test/files/pos/virtpatmat_anonfun_for.scala index 8623cd97bafa..73b54b18b0f7 100644 --- a/test/files/pos/virtpatmat_anonfun_for.scala +++ b/test/files/pos/virtpatmat_anonfun_for.scala @@ -5,4 +5,4 @@ trait Foo { case _ => println(tvs) }) } -} \ No newline at end of file +} diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala index 489950d090da..8ec931fe78fa 100644 --- a/test/files/pos/virtpatmat_binding_opt.scala +++ b/test/files/pos/virtpatmat_binding_opt.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental class Test { def combine = this match { case that if that eq this => this // just return this diff --git a/test/files/pos/virtpatmat_castbinder.scala b/test/files/pos/virtpatmat_castbinder.scala index 82a97faed59b..ffe74eda71e9 100644 --- a/test/files/pos/virtpatmat_castbinder.scala +++ b/test/files/pos/virtpatmat_castbinder.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental class IntMap[+V] case class Bin[+T](m: IntMap[T]) extends IntMap[T] case class Tip[+T](x: T) extends IntMap[T] diff --git a/test/files/pos/virtpatmat_exhaust_big.scala b/test/files/pos/virtpatmat_exhaust_big.scala index 985093354083..7bb887980581 100644 --- a/test/files/pos/virtpatmat_exhaust_big.scala +++ b/test/files/pos/virtpatmat_exhaust_big.scala @@ -30,4 +30,4 @@ object Test { Z10 | Z11() | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19() => } -} \ No newline at end of file +} diff --git a/test/files/pos/virtpatmat_exhaust_unchecked.scala b/test/files/pos/virtpatmat_exhaust_unchecked.scala index d97dc04eea8d..fb0a5efb0529 100644 --- a/test/files/pos/virtpatmat_exhaust_unchecked.scala +++ b/test/files/pos/virtpatmat_exhaust_unchecked.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// sealed trait Option {} case class Choice(a: Option, b: Option) extends Option; case class Some(x: Boolean) extends Option; diff --git a/test/files/pos/virtpatmat_exist1.scala b/test/files/pos/virtpatmat_exist1.scala index a8c1322e61e8..f3ac809a7268 100644 --- a/test/files/pos/virtpatmat_exist1.scala +++ b/test/files/pos/virtpatmat_exist1.scala @@ -1,11 +1,27 @@ -// scalac: -Xexperimental -import annotation.unchecked.{ uncheckedVariance=> uV } +import annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.{IterableFactory, StrictOptimizedIterableOps, mutable} import scala.collection.immutable.{ListMap, ListSet} -import scala.collection.mutable.{HashMap, HashSet} +import scala.collection.mutable.{AbstractSet, HashMap, HashSet, Set, SetOps} + +// Stub of HashSet, but not final, so we can extend from it (in Test below) +class HS[A] + extends AbstractSet[A] + with SetOps[A, HS, HS[A]] + with StrictOptimizedIterableOps[A, HS, HS[A]] + with collection.IterableFactoryDefaults[A, HS] + with Serializable { + override def iterableFactory: IterableFactory[HS] = ??? + def get(elem: A): Option[A] = ??? + def contains(elem: A): Boolean = ??? + def addOne(elem: A): HS.this.type = ??? + def clear(): Unit = ??? + def iterator: Iterator[A] = ??? + def subtractOne(elem: A): HS.this.type = ??? +} object Test { class HashMapCollision1[A, +B](var hash: Int, var kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV] - class HashSetCollision1[A](var hash: Int, var ks: ListSet[A]) extends HashSet[A] + class HashSetCollision1[A](var hash: Int, var ks: ListSet[A]) extends HS[A] def splitArray[T](ad: Array[Iterable[T]]): Any = ad(0) match { diff --git a/test/files/pos/virtpatmat_exist2.scala b/test/files/pos/virtpatmat_exist2.scala index e9859f4d49ec..f6ebb3ee2f84 100644 --- a/test/files/pos/virtpatmat_exist2.scala +++ b/test/files/pos/virtpatmat_exist2.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental class ParseResult[+T] case class MemoEntry[+T](var r: Either[Nothing,ParseResult[_]]) diff --git a/test/files/pos/virtpatmat_exist3.scala b/test/files/pos/virtpatmat_exist3.scala index 09ce4883f360..6a6d428b1a09 100644 --- a/test/files/pos/virtpatmat_exist3.scala +++ b/test/files/pos/virtpatmat_exist3.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental class ReferenceQueue[T] { def wrapper(jref: ReferenceQueue[_]): ReferenceQueue[T] = jref match { diff --git a/test/files/pos/virtpatmat_exist4.scala b/test/files/pos/virtpatmat_exist4.scala index a04d0e3229b8..728006276350 100644 --- a/test/files/pos/virtpatmat_exist4.scala +++ b/test/files/pos/virtpatmat_exist4.scala @@ -32,4 +32,4 @@ object Test2 { def tvarString(bounds: List[AnyRef]) = { bounds collect { case x: JClass => x } } -} \ No newline at end of file +} diff --git a/test/files/pos/virtpatmat_exist_uncurry.scala b/test/files/pos/virtpatmat_exist_uncurry.scala index e017da634329..727922b31c0c 100644 --- a/test/files/pos/virtpatmat_exist_uncurry.scala +++ b/test/files/pos/virtpatmat_exist_uncurry.scala @@ -3,4 +3,4 @@ object Test { def collect[U](f: PartialFunction[Leaf[_], U]): List[U] def leaves: List[Leaf[T]] = collect { case l: Leaf[T] => l } } -} \ No newline at end of file +} diff --git a/test/files/pos/virtpatmat_gadt_array.scala b/test/files/pos/virtpatmat_gadt_array.scala index b1c6befb0e4e..de0367df05c9 100644 --- a/test/files/pos/virtpatmat_gadt_array.scala +++ b/test/files/pos/virtpatmat_gadt_array.scala @@ -1,9 +1,8 @@ -// scalac: -Xexperimental -import scala.collection.mutable._ +import scala.collection.ArrayOps object Test { def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = xs match { case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]] - case null => null + case null => null.asInstanceOf[ArrayOps[T]] // `ArrayOps` is AnyVal } // def genericArrayOps[T >: Nothing <: Any](xs: Array[T]): scala.collection.mutable.ArrayOps[T] // = OptionMatching.runOrElse(xs)(((x1: Array[T]) => @@ -12,5 +11,5 @@ object Test { // (OptionMatching.guard(null.==(x1), x1.asInstanceOf[Array[T]]).flatMap(((x3: Array[T]) => // OptionMatching.one(null))): Option[scala.collection.mutable.ArrayOps[T]])): Option[scala.collection.mutable.ArrayOps[T]]).orElse((OptionMatching.zero: Option[scala.collection.mutable.ArrayOps[T]])))) - def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs) + def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps(xs) } diff --git a/test/files/pos/virtpatmat_infer_single_1.scala b/test/files/pos/virtpatmat_infer_single_1.scala index a12a67a7e30b..0e2a7817b8b4 100644 --- a/test/files/pos/virtpatmat_infer_single_1.scala +++ b/test/files/pos/virtpatmat_infer_single_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental case class TypeBounds(a: Type, b: Type) class Type { def bounds: TypeBounds = bounds match { diff --git a/test/files/pos/virtpatmat_instof_valuetype.scala b/test/files/pos/virtpatmat_instof_valuetype.scala index 01bc2f138f7c..46bcf7aa1c99 100644 --- a/test/files/pos/virtpatmat_instof_valuetype.scala +++ b/test/files/pos/virtpatmat_instof_valuetype.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental case class Data(private val t: Option[String] = None, only: Boolean = false) { def add(other: Data) = { other match { diff --git a/test/files/pos/virtpatmat_obj_in_case.scala b/test/files/pos/virtpatmat_obj_in_case.scala index 67f9ae2015d6..63bb98dd541f 100644 --- a/test/files/pos/virtpatmat_obj_in_case.scala +++ b/test/files/pos/virtpatmat_obj_in_case.scala @@ -1,4 +1,3 @@ -// scalac: -Xexperimental class ObjInCase { 0 match { case _ => object o diff --git a/test/files/pos/virtpatmat_partialfun_nsdnho.scala b/test/files/pos/virtpatmat_partialfun_nsdnho.scala index f79e82813c41..46536480730a 100644 --- a/test/files/pos/virtpatmat_partialfun_nsdnho.scala +++ b/test/files/pos/virtpatmat_partialfun_nsdnho.scala @@ -15,4 +15,10 @@ class Test { // at scala.tools.nsc.typechecker.SuperAccessors$SuperAccTransformer.hostForAccessorOf(SuperAccessors.scala:474) // at scala.tools.nsc.typechecker.SuperAccessors$SuperAccTransformer.needsProtectedAccessor(SuperAccessors.scala:457) val c: (Int => (Any => Any)) = { m => { case _ => m.toInt } } -} \ No newline at end of file + + + // Again, but using function literal + val a2: (Map[Int, Int] => (Any => Any)) = { m => { _ => m - 1} } + val b2: (Int => (Any => Any)) = { m => { _ => m } } + val c2: (Int => (Any => Any)) = { m => { _ => m.toInt } } +} diff --git a/test/files/pos/virtpatmat_reach_const.scala b/test/files/pos/virtpatmat_reach_const.scala index b55b7cb22935..562f7e146101 100644 --- a/test/files/pos/virtpatmat_reach_const.scala +++ b/test/files/pos/virtpatmat_reach_const.scala @@ -8,4 +8,4 @@ object Test { case (a :: Nil, b :: Nil) => case _ => } -} \ No newline at end of file +} diff --git a/test/files/pos/warn-unused-params-not-implicits.scala b/test/files/pos/warn-unused-params-not-implicits.scala index c2e4f39a0a7c..f8fa369d2ea5 100644 --- a/test/files/pos/warn-unused-params-not-implicits.scala +++ b/test/files/pos/warn-unused-params-not-implicits.scala @@ -1,4 +1,5 @@ -// scalac: -Ywarn-unused:params,-implicits -Xfatal-warnings +//> using options -Ywarn-unused:params,-implicits -Xfatal-warnings +// trait InterFace { /** Call something. */ diff --git a/test/files/pos/widen-existential.scala b/test/files/pos/widen-existential.scala index d7fa3cc1d893..7c10d60e1aaa 100644 --- a/test/files/pos/widen-existential.scala +++ b/test/files/pos/widen-existential.scala @@ -4,4 +4,4 @@ class A { val g = classOf[List[_]] List(g, g) } -} \ No newline at end of file +} diff --git a/test/files/pos/xlint1.scala b/test/files/pos/xlint1.scala index ff8c8515f92b..4ac7706ba3c1 100644 --- a/test/files/pos/xlint1.scala +++ b/test/files/pos/xlint1.scala @@ -1,4 +1,5 @@ -// scalac: -Xlint -Xfatal-warnings +//> using options -Xlint -Xfatal-warnings +// package object foo { implicit class Bar[T](val x: T) extends AnyVal { def bippy = 1 diff --git a/test/files/pos/xml-attributes.scala b/test/files/pos/xml-attributes.scala new file mode 100644 index 000000000000..dd50696fa01d --- /dev/null +++ b/test/files/pos/xml-attributes.scala @@ -0,0 +1,10 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/pos/xml-comments.scala b/test/files/pos/xml-comments.scala new file mode 100644 index 000000000000..a38dbe330acb --- /dev/null +++ b/test/files/pos/xml-comments.scala @@ -0,0 +1,10 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/pos/xml-entityref.scala b/test/files/pos/xml-entityref.scala new file mode 100644 index 000000000000..1240dd5d7098 --- /dev/null +++ b/test/files/pos/xml-entityref.scala @@ -0,0 +1,9 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + & " ' < > + +} diff --git a/test/files/pos/xml-interpolation.scala b/test/files/pos/xml-interpolation.scala new file mode 100644 index 000000000000..08a2081f3045 --- /dev/null +++ b/test/files/pos/xml-interpolation.scala @@ -0,0 +1,13 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + {bar} + { bar } + {{ bar }} + {{ 3 }} + {{3}} + +} diff --git a/test/files/pos/xml-match.scala b/test/files/pos/xml-match.scala new file mode 100644 index 000000000000..ddc984aca46b --- /dev/null +++ b/test/files/pos/xml-match.scala @@ -0,0 +1,20 @@ +//> using options -Ystop-after:parser +// +object foo { + def bar(e: Elem) = e match { + case { _* } => + case { _ } => + case {{3}} => {{3}} + case {{3}} => {{3}} + case { x } if x.toString.toInt < 4 => + { x.toString.toInt + 1 } + } + def bar(n: Node) = n match { + case { _* } => + case { _ } => + case {{3}} => {{3}} + case {{3}} => {{3}} + case { x } if x.toString.toInt < 4 => + { x.toString.toInt + 1 } + } +} diff --git a/test/files/pos/xml-nodebuffer.scala b/test/files/pos/xml-nodebuffer.scala new file mode 100644 index 000000000000..0aa3d97ada5d --- /dev/null +++ b/test/files/pos/xml-nodebuffer.scala @@ -0,0 +1,11 @@ +//> using options -Ystop-after:parser +// + +import scala.xml.NodeBuffer + +object foo { + // SI-9027 + // xml-nodebuffer.scala:10: error: ';' expected but '.' found. + val nodeBuffer: NodeBuffer = + nodeBuffer.foreach(println) +} diff --git a/test/files/pos/xml-ns-empty.scala b/test/files/pos/xml-ns-empty.scala new file mode 100644 index 000000000000..c74aa70dc4ab --- /dev/null +++ b/test/files/pos/xml-ns-empty.scala @@ -0,0 +1,7 @@ +//> using options -Ystop-after:parser +// +object foo { + val n = + + n.namespace == null +} diff --git a/test/files/pos/xml-ns-text.scala b/test/files/pos/xml-ns-text.scala new file mode 100644 index 000000000000..6eed98e900b2 --- /dev/null +++ b/test/files/pos/xml-ns-text.scala @@ -0,0 +1,7 @@ +//> using options -Ystop-after:parser +// +object foo { + val xml = + + ; +} diff --git a/test/files/pos/xml-ns.scala b/test/files/pos/xml-ns.scala new file mode 100644 index 000000000000..092b06d894e9 --- /dev/null +++ b/test/files/pos/xml-ns.scala @@ -0,0 +1,9 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + {bar} + +} diff --git a/test/files/pos/xml-pcdata.scala b/test/files/pos/xml-pcdata.scala new file mode 100644 index 000000000000..f657e2d4166f --- /dev/null +++ b/test/files/pos/xml-pcdata.scala @@ -0,0 +1,13 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + + + + val pcdata = + + +} diff --git a/test/files/pos/xml-procinstr.scala b/test/files/pos/xml-procinstr.scala new file mode 100644 index 000000000000..254de36885be --- /dev/null +++ b/test/files/pos/xml-procinstr.scala @@ -0,0 +1,10 @@ +//> using options -Ystop-after:parser +// +object foo { + val bar = "baz" + val xml = + + + + +} diff --git a/test/files/pos/xml-quasiquote.scala b/test/files/pos/xml-quasiquote.scala new file mode 100644 index 000000000000..624a03cd74e9 --- /dev/null +++ b/test/files/pos/xml-quasiquote.scala @@ -0,0 +1,8 @@ +//> using options -Ystop-after:typer +// +import reflect.runtime.universe._ + +object foo { + val ns1 = + q"" +} diff --git a/test/files/pos/xml-xmlns.scala b/test/files/pos/xml-xmlns.scala new file mode 100644 index 000000000000..58c552283698 --- /dev/null +++ b/test/files/pos/xml-xmlns.scala @@ -0,0 +1,7 @@ +//> using options -Ystop-after:parser +// +object foo { + val html = + + +} diff --git a/test/files/pos/yimports-deep.scala b/test/files/pos/yimports-deep.scala new file mode 100644 index 000000000000..b1d0890251bb --- /dev/null +++ b/test/files/pos/yimports-deep.scala @@ -0,0 +1,12 @@ + +// as usual, the special import must be at package level + +package p { + package q { + object X { + import Predef.{identity=>_} + def f() = 1 -> 2 + def g() = 42 ensuring (_ > 0) + } + } +} diff --git a/test/files/pos/yimports-order-b.scala b/test/files/pos/yimports-order-b.scala new file mode 100644 index 000000000000..7905fc25a45a --- /dev/null +++ b/test/files/pos/yimports-order-b.scala @@ -0,0 +1,17 @@ + +package top { + package middle { + import bottom.B.b + class C { + def p() = println("hello, world") + def f = b // comment me to see Predef exclusion + } + } +} + +package bottom { + import Predef.{Set => _} + object B { + def b = Set(42) + } +} diff --git a/test/files/pos/yimports-pkgobj/C_2.scala b/test/files/pos/yimports-pkgobj/C_2.scala new file mode 100644 index 000000000000..7f8c8bdaa3ed --- /dev/null +++ b/test/files/pos/yimports-pkgobj/C_2.scala @@ -0,0 +1,8 @@ +//> using options -Yimports:scala,scala.Predef,hello.world +// +import hello.world.{Numb => _, _} // no effect, world isPackage + +class C { + val v: Numb = 42 + def greet() = println("hello, world!") +} diff --git a/test/files/pos/yimports-pkgobj/minidef_1.scala b/test/files/pos/yimports-pkgobj/minidef_1.scala new file mode 100644 index 000000000000..c0107f57d958 --- /dev/null +++ b/test/files/pos/yimports-pkgobj/minidef_1.scala @@ -0,0 +1,7 @@ +//> using options -Yimports:scala +// +package hello + +package object world { + type Numb = Int +} diff --git a/test/files/pos/z1730.scala b/test/files/pos/z1730.scala index 1e74aa48d2f5..35adc7834684 100644 --- a/test/files/pos/z1730.scala +++ b/test/files/pos/z1730.scala @@ -1,4 +1,5 @@ -// scalac: -Ycheck:all +//> using options -Ycheck:_ +// // /scala/trac/z1730/a.scala // Wed May 23 07:41:25 PDT 2012 @@ -8,7 +9,7 @@ class X[R] { class Boo { implicit def toX[R](v: R) : X[R] = null - def goo2 { + def goo2: Unit = { 3.xx(34) } } diff --git a/test/files/positions/Scaladoc2.scala b/test/files/positions/Scaladoc2.scala index e52263d86c6c..1b3ba709d695 100644 --- a/test/files/positions/Scaladoc2.scala +++ b/test/files/positions/Scaladoc2.scala @@ -1,16 +1,16 @@ object Scaladoc2 { - def f { + def f: Unit = { /** * Foo */ - def g {} + def g: Unit = {} /* * Blah blah */ - def h{} + def h: Unit ={} h } - def h {} + def h: Unit = {} } diff --git a/test/files/positions/Scaladoc3.scala b/test/files/positions/Scaladoc3.scala index c331b7e396c1..8ebb50f4a490 100644 --- a/test/files/positions/Scaladoc3.scala +++ b/test/files/positions/Scaladoc3.scala @@ -4,5 +4,5 @@ object Scaladoc3 { */ import scala.collection.mutable.ArrayBuffer - def f {} + def f: Unit = {} } diff --git a/test/files/positions/Scaladoc4.scala b/test/files/positions/Scaladoc4.scala index 133cde1c855a..b02466dee0a9 100644 --- a/test/files/positions/Scaladoc4.scala +++ b/test/files/positions/Scaladoc4.scala @@ -4,5 +4,5 @@ object Scaladoc4 { */ 2+2 - def f {} + def f: Unit = {} } diff --git a/test/files/positions/Scaladoc6.scala b/test/files/positions/Scaladoc6.scala index 8beda625ae4b..7177db37c83d 100644 --- a/test/files/positions/Scaladoc6.scala +++ b/test/files/positions/Scaladoc6.scala @@ -6,5 +6,5 @@ object Scaladoc6 { val i = 23 } - def f {} + def f: Unit = {} } diff --git a/test/files/positions/SyntheticNonSynthetic2.scala b/test/files/positions/SyntheticNonSynthetic2.scala index ba53186f845f..5bbb5c461c94 100644 --- a/test/files/positions/SyntheticNonSynthetic2.scala +++ b/test/files/positions/SyntheticNonSynthetic2.scala @@ -1,3 +1,3 @@ object SyntheticNonSynthetic2 { - def foo[A >: Exception] (a : A) {} + def foo[A >: Exception] (a : A): Unit = {} } diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index 94a3d64d68dc..8fc5a7dbaa03 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -3,7 +3,11 @@ reload: CallccInterpreter.scala askTypeCompletion at CallccInterpreter.scala(51,34) ================================================================================ [response] askTypeCompletion at (51,34) -retrieved 66 members +retrieved 70 members +[inaccessible] private[this] val self: callccInterpreter.type +[inaccessible] private[this] val self: callccInterpreter.type +[inaccessible] private[this] val self: callccInterpreter.type +[inaccessible] private[this] val self: callccInterpreter.type abstract trait Term extends AnyRef abstract trait Value extends AnyRef case class Add extends callccInterpreter.Term with Product with Serializable @@ -18,19 +22,19 @@ case class Var extends callccInterpreter.Term with Product with Serializable case object Wrong def +(other: String): String def ->[B](y: B): (callccInterpreter.type, B) -def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] -def apply(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value] +def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with callccInterpreter.Value with java.io.Serializable] +def apply(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value] def callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A]): callccInterpreter.M[A] def ensuring(cond: Boolean): callccInterpreter.type -def ensuring(cond: Boolean,msg: => Any): callccInterpreter.type +def ensuring(cond: Boolean, msg: => Any): callccInterpreter.type def ensuring(cond: callccInterpreter.type => Boolean): callccInterpreter.type -def ensuring(cond: callccInterpreter.type => Boolean,msg: => Any): callccInterpreter.type -def equals(x$1: Any): Boolean +def ensuring(cond: callccInterpreter.type => Boolean, msg: => Any): callccInterpreter.type +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def id[A]: A => A -def interp(t: callccInterpreter.Term,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value] -def lookup(x: callccInterpreter.Name,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value] +def interp(t: callccInterpreter.Term, e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value] +def lookup(x: callccInterpreter.Name, e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value] def main(args: Array[String]): Unit def showM(m: callccInterpreter.M[callccInterpreter.Value]): String def test(t: callccInterpreter.Term): String @@ -38,7 +42,7 @@ def toString(): String def unitM[A](a: A): callccInterpreter.M[A] def →[B](y: B): (callccInterpreter.type, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -49,7 +53,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit object Add object App object Ccc @@ -90,8 +94,8 @@ def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply( askType at CallccInterpreter.scala(50,30) ================================================================================ [response] askTypeAt (50,30) -def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { - case (_1: callccInterpreter.Value, _2: callccInterpreter.Value)(callccInterpreter.Value, callccInterpreter.Value)((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n))) +def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with callccInterpreter.Value with java.io.Serializable] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { + case (_1: callccInterpreter.Value, _2: callccInterpreter.Value): (callccInterpreter.Value, callccInterpreter.Value)((n: Int): callccInterpreter.Num((m @ _)), (n: Int): callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n))) case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong) } ================================================================================ diff --git a/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala index d498fe0b1744..2c86cd583ab5 100644 --- a/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala +++ b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala @@ -82,9 +82,9 @@ object callccInterpreter { val term1 = App(Con(1), Con(2)) val term2 = Add(Con(1), Ccc("k", Add(Con(2), App(Var("k"), Con(4))))) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(test(term0)) println(test(term1)) println(test(term2)) } -} \ No newline at end of file +} diff --git a/test/files/presentation/completion-implicit-chained.check b/test/files/presentation/completion-implicit-chained.check index c583b7877c36..52e87567cdf7 100644 --- a/test/files/presentation/completion-implicit-chained.check +++ b/test/files/presentation/completion-implicit-chained.check @@ -6,12 +6,12 @@ askTypeCompletion at Completions.scala(11,16) retrieved 22 members [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit -def equals(x$1: Any): Boolean +def equals(x$1: Object): Boolean def hashCode(): Int def map(x: Int => Int)(implicit a: DummyImplicit): test.O.type def toString(): String final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -22,6 +22,6 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit private[this] val prefix123: Int ================================================================================ diff --git a/test/files/presentation/completion-implicit-chained/Test.scala b/test/files/presentation/completion-implicit-chained/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/completion-implicit-chained/Test.scala +++ b/test/files/presentation/completion-implicit-chained/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala index 08c6ebf059ca..761bcd9c96d3 100644 --- a/test/files/presentation/doc/doc.scala +++ b/test/files/presentation/doc/doc.scala @@ -1,3 +1,4 @@ +//> using options -Xlint -Werror import scala.reflect.internal.util.{ BatchSourceFile, SourceFile } import scala.tools.nsc.doc import scala.tools.nsc.doc.base._ @@ -44,6 +45,7 @@ object Test extends InteractiveTest { val global: this.type = this + @annotation.nowarn override lazy val analyzer = new { val global: outer.type = outer } with doc.ScaladocAnalyzer with InteractiveAnalyzer { @@ -57,8 +59,6 @@ object Test extends InteractiveTest { def warnNoLink = false def findExternalLink(sym: Symbol, name: String) = None - override def forScaladoc = true - def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { val docResponse = new Response[(String, String, Position)] askDocComment(sym, source, sym.owner, fragments, docResponse) @@ -73,7 +73,7 @@ object Test extends InteractiveTest { } } - override def runDefaultTests() { + override def runDefaultTests(): Unit = { import compiler._ def findSource(name: String) = sourceFiles.find(_.file.name == name).get @@ -90,7 +90,7 @@ object Test extends InteractiveTest { println("Couldn't reload") case Some(_) => val parseResponse = new Response[Tree] - askParsedEntered(batch, true, parseResponse) + askParsedEntered(batch, keepLoaded = true, parseResponse) parseResponse.get.swap.toOption match { case None => println("Couldn't parse") @@ -110,7 +110,7 @@ object Test extends InteractiveTest { case Some(comment) => import comment._ def cnt(bodies: Iterable[Body]) = bodies.size - val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see) + val actual = cnt(example) + cnt(version.toList) + cnt(since.toList) + cnt(todo) + cnt(note) + cnt(see) if (actual != i) println(s"Got docComment with $actual tags instead of $i, file text:\n$newText") } @@ -132,6 +132,7 @@ object Test extends InteractiveTest { case s: Seq[_] => s exists (existsText(_, text)) case p: Product => p.productIterator exists (existsText(_, text)) case c: Comment => existsText(c.body, text) + case x => throw new MatchError(x) } val (derived, base) = compiler.ask { () => val derived = compiler.rootMirror.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived")) diff --git a/test/files/presentation/doc/src/Class.scala b/test/files/presentation/doc/src/Class.scala index a974bd6f5cfc..c5d0b357e169 100644 --- a/test/files/presentation/doc/src/Class.scala +++ b/test/files/presentation/doc/src/Class.scala @@ -1 +1 @@ -object Class \ No newline at end of file +object Class diff --git a/test/files/presentation/dollar-completion.check b/test/files/presentation/dollar-completion.check new file mode 100644 index 000000000000..95e5fddaf92d --- /dev/null +++ b/test/files/presentation/dollar-completion.check @@ -0,0 +1,47 @@ +reload: Completions.scala + +askScopeCompletion at Completions.scala(5,2) +================================================================================ +[response] askScopeCompletion at (5,2) +retrieved 16 members +abstract trait T extends AnyRef +case class C1 extends Product with Serializable +class C2 extends AnyRef +def (x: Int): test.C1 +def canEqual(x$1: Any): Boolean +def copy(x: Int): test.C1 +def productArity: Int +def productElement(x$1: Int): Any +object C1 +override def equals(x$1: Any): Boolean +override def hashCode(): Int +override def productElementName(x$1: Int): String +override def productIterator: Iterator[Any] +override def productPrefix: String +override def toString(): String +private[this] val x: Int +================================================================================ + +askScopeCompletion at Completions.scala(12,2) +================================================================================ +[response] askScopeCompletion at (12,2) +retrieved 4 members +abstract trait T extends AnyRef +case class C1 extends Product with Serializable +class C2 extends AnyRef +object C1 +================================================================================ + +askScopeCompletion at Completions.scala(21,2) +================================================================================ +[response] askScopeCompletion at (21,2) +retrieved 8 members +abstract trait T extends AnyRef +case class C1 extends Product with Serializable +class C2 extends AnyRef +def $: Int +def $var: Int +def (): test.C2 +def dollar$: Int +object C1 +================================================================================ diff --git a/test/files/presentation/dollar-completion/Test.scala b/test/files/presentation/dollar-completion/Test.scala new file mode 100644 index 000000000000..14a6aa835064 --- /dev/null +++ b/test/files/presentation/dollar-completion/Test.scala @@ -0,0 +1,3 @@ +import scala.tools.nsc.interactive.tests.InteractiveTest + +object Test extends InteractiveTest diff --git a/test/files/presentation/dollar-completion/src/Completions.scala b/test/files/presentation/dollar-completion/src/Completions.scala new file mode 100644 index 000000000000..e82a0bdd859a --- /dev/null +++ b/test/files/presentation/dollar-completion/src/Completions.scala @@ -0,0 +1,22 @@ +package test + +case class C1(x: Int) { + // Filter out `def copy$default$1: Int` + /*_*/ +} + +trait T { + println("hello") + + // Filter out `$init$` + /*_*/ +} + +class C2 { + def `$` = 1 + def `dollar$` = 2 + def `$var` = 3 + + // Include explicit dollar methods + /*_*/ +} diff --git a/test/files/presentation/forgotten-ask.scala b/test/files/presentation/forgotten-ask.scala index 358dd75e9827..0ed7c810277c 100644 --- a/test/files/presentation/forgotten-ask.scala +++ b/test/files/presentation/forgotten-ask.scala @@ -13,7 +13,7 @@ object Test extends InteractiveTest { final val Timeout = 5000 //ms - override def main(args: Array[String]) { + override def main(args: Array[String]): Unit = { val item1 = askItem() compiler.askShutdown() @@ -30,4 +30,4 @@ object Test extends InteractiveTest { case _ => } } -} \ No newline at end of file +} diff --git a/test/files/presentation/higher-order-completion.check b/test/files/presentation/higher-order-completion.check new file mode 100644 index 000000000000..2a963af4ff13 --- /dev/null +++ b/test/files/presentation/higher-order-completion.check @@ -0,0 +1,153 @@ +reload: Completions.scala + +askTypeCompletion at Completions.scala(12,14) +================================================================================ +[response] askTypeCompletion at (12,14) +retrieved 35 members +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] protected[package lang] def clone(): Object +[inaccessible] protected[package lang] def finalize(): Unit +def +(other: String): String +def ->[B](y: B): (test.Foo, B) +def Bar: Double +def Baz: Double +def ensuring(cond: Boolean): test.Foo +def ensuring(cond: Boolean, msg: => Any): test.Foo +def ensuring(cond: test.Foo => Boolean): test.Foo +def ensuring(cond: test.Foo => Boolean, msg: => Any): test.Foo +def equals(x$1: Object): Boolean +def formatted(fmtstr: String): String +def hashCode(): Int +def toString(): String +def →[B](y: B): (test.Foo, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ + +askTypeCompletion at Completions.scala(15,13) +================================================================================ +[response] askTypeCompletion at (15,13) +retrieved 35 members +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] protected[package lang] def clone(): Object +[inaccessible] protected[package lang] def finalize(): Unit +def +(other: String): String +def ->[B](y: B): (test.Foo, B) +def Bar: Double +def Baz: Double +def ensuring(cond: Boolean): test.Foo +def ensuring(cond: Boolean, msg: => Any): test.Foo +def ensuring(cond: test.Foo => Boolean): test.Foo +def ensuring(cond: test.Foo => Boolean, msg: => Any): test.Foo +def equals(x$1: Object): Boolean +def formatted(fmtstr: String): String +def hashCode(): Int +def toString(): String +def →[B](y: B): (test.Foo, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ + +askTypeCompletion at Completions.scala(18,17) +================================================================================ +[response] askTypeCompletion at (18,17) +retrieved 35 members +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] protected[package lang] def clone(): Object +[inaccessible] protected[package lang] def finalize(): Unit +def +(other: String): String +def ->[B](y: B): (test.Foo, B) +def Bar: Double +def Baz: Double +def ensuring(cond: Boolean): test.Foo +def ensuring(cond: Boolean, msg: => Any): test.Foo +def ensuring(cond: test.Foo => Boolean): test.Foo +def ensuring(cond: test.Foo => Boolean, msg: => Any): test.Foo +def equals(x$1: Object): Boolean +def formatted(fmtstr: String): String +def hashCode(): Int +def toString(): String +def →[B](y: B): (test.Foo, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ + +askTypeCompletion at Completions.scala(21,24) +================================================================================ +[response] askTypeCompletion at (21,24) +retrieved 35 members +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] private[this] val self: test.Foo +[inaccessible] protected[package lang] def clone(): Object +[inaccessible] protected[package lang] def finalize(): Unit +def +(other: String): String +def ->[B](y: B): (test.Foo, B) +def Bar: Double +def Baz: Double +def ensuring(cond: Boolean): test.Foo +def ensuring(cond: Boolean, msg: => Any): test.Foo +def ensuring(cond: test.Foo => Boolean): test.Foo +def ensuring(cond: test.Foo => Boolean, msg: => Any): test.Foo +def equals(x$1: Object): Boolean +def formatted(fmtstr: String): String +def hashCode(): Int +def toString(): String +def →[B](y: B): (test.Foo, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ diff --git a/test/files/presentation/higher-order-completion/Test.scala b/test/files/presentation/higher-order-completion/Test.scala new file mode 100644 index 000000000000..14a6aa835064 --- /dev/null +++ b/test/files/presentation/higher-order-completion/Test.scala @@ -0,0 +1,3 @@ +import scala.tools.nsc.interactive.tests.InteractiveTest + +object Test extends InteractiveTest diff --git a/test/files/presentation/higher-order-completion/src/Completions.scala b/test/files/presentation/higher-order-completion/src/Completions.scala new file mode 100644 index 000000000000..afb423d3a1af --- /dev/null +++ b/test/files/presentation/higher-order-completion/src/Completions.scala @@ -0,0 +1,22 @@ +package test + +/* check that members are visible when completing inside an Apply with insufficient args */ + +class Foo { + def Bar: Double = 2.0 + def Baz: Double = 1.0 +} + +class Completion1 { + def singleArg(f: Foo => Double): Nothing = ??? + singleArg(_./*!*/) + + def multiArg(f: Foo => Double, s: String): Nothing = ??? + multiArg(_./*!*/) // importantly we want to see Bar and Baz as completions here + + def multiArgFull(f: Foo => Double, s: String): Nothing = ??? + multiArgFull(_./*!*/,???) + + def multiArgWithDefault(f: Foo => Double, s: String = "hello"): Nothing = ??? + multiArgWithDefault(_./*!*/) +} diff --git a/test/files/presentation/hyperlinks-macro/Runner.scala b/test/files/presentation/hyperlinks-macro/Runner.scala index c2f89bdb17ca..694065de2325 100644 --- a/test/files/presentation/hyperlinks-macro/Runner.scala +++ b/test/files/presentation/hyperlinks-macro/Runner.scala @@ -1,7 +1,7 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override def runDefaultTests() { + override def runDefaultTests(): Unit = { sourceFiles foreach (src => askLoadedTyped(src).get) super.runDefaultTests() } diff --git a/test/files/presentation/hyperlinks/Runner.scala b/test/files/presentation/hyperlinks/Runner.scala index b78e13c23a2d..b787b80f87ca 100644 --- a/test/files/presentation/hyperlinks/Runner.scala +++ b/test/files/presentation/hyperlinks/Runner.scala @@ -1,11 +1,11 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override def runDefaultTests() { + override def runDefaultTests(): Unit = { // make sure typer is done.. the virtual pattern matcher might translate // some trees and mess up positions. But we'll catch it red handed! sourceFiles foreach (src => askLoadedTyped(src).get) super.runDefaultTests() } -} \ No newline at end of file +} diff --git a/test/files/presentation/hyperlinks/src/NameDefaultTests.scala b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala index 340d223d2ea7..fadaecb74cac 100644 --- a/test/files/presentation/hyperlinks/src/NameDefaultTests.scala +++ b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala @@ -7,7 +7,7 @@ class NameDefaults { implicit val l = 42 - def bar { + def bar: Unit = { println() val someOtherInt = 10 diff --git a/test/files/presentation/hyperlinks/src/PatMatTests.scala b/test/files/presentation/hyperlinks/src/PatMatTests.scala index 718410635735..82b773d3e809 100644 --- a/test/files/presentation/hyperlinks/src/PatMatTests.scala +++ b/test/files/presentation/hyperlinks/src/PatMatTests.scala @@ -7,7 +7,7 @@ case class CaseTwo(str: String) extends BaseType class PatMatTests { - def foo(x: BaseType) { + def foo(x: BaseType): Unit = { x match { case CaseOne/*#*/(10, first :: second :: Nil) => val tmp = 23 @@ -19,10 +19,10 @@ class PatMatTests { } } - def multipleAssign() { + def multipleAssign(): Unit = { val (x, y) = ("abc", "def") println(x/*#*/, y/*#*/) } -} \ No newline at end of file +} diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check index 79bfde5343cd..c57099342e29 100644 --- a/test/files/presentation/ide-bug-1000349.check +++ b/test/files/presentation/ide-bug-1000349.check @@ -3,21 +3,25 @@ reload: CompletionOnEmptyArgMethod.scala askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17) ================================================================================ [response] askTypeCompletion at (2,17) -retrieved 30 members +retrieved 34 members +[inaccessible] private[this] val self: Foo +[inaccessible] private[this] val self: Foo +[inaccessible] private[this] val self: Foo +[inaccessible] private[this] val self: Foo def +(other: String): String def ->[B](y: B): (Foo, B) def ensuring(cond: Boolean): Foo -def ensuring(cond: Boolean,msg: => Any): Foo +def ensuring(cond: Boolean, msg: => Any): Foo def ensuring(cond: Foo => Boolean): Foo -def ensuring(cond: Foo => Boolean,msg: => Any): Foo -def equals(x$1: Any): Boolean +def ensuring(cond: Foo => Boolean, msg: => Any): Foo +def equals(x$1: Object): Boolean def foo: Foo def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (Foo, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -28,7 +32,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit protected[package lang] def clone(): Object protected[package lang] def finalize(): Unit ================================================================================ diff --git a/test/files/presentation/ide-bug-1000349/Runner.scala b/test/files/presentation/ide-bug-1000349/Runner.scala index 1ef3cf902531..1c03e3d5badf 100644 --- a/test/files/presentation/ide-bug-1000349/Runner.scala +++ b/test/files/presentation/ide-bug-1000349/Runner.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala b/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala index a3d8e8f06715..c6b8d0925d1e 100644 --- a/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala +++ b/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala @@ -4,4 +4,4 @@ object Foo { class Foo { def foo = this -} \ No newline at end of file +} diff --git a/test/files/presentation/ide-bug-1000469/Runner.scala b/test/files/presentation/ide-bug-1000469/Runner.scala index 1ef3cf902531..1c03e3d5badf 100644 --- a/test/files/presentation/ide-bug-1000469/Runner.scala +++ b/test/files/presentation/ide-bug-1000469/Runner.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala b/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala index 02e836ef0d7d..13e9e2fa5ad1 100644 --- a/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala +++ b/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala @@ -2,4 +2,4 @@ package scala class EventHandler { @transient private val foo = 2 -} \ No newline at end of file +} diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check index 4fb7f1828534..d9a785ea7476 100644 --- a/test/files/presentation/ide-bug-1000475.check +++ b/test/files/presentation/ide-bug-1000475.check @@ -3,22 +3,26 @@ reload: Foo.scala askTypeCompletion at Foo.scala(3,7) ================================================================================ [response] askTypeCompletion at (3,7) -retrieved 29 members +retrieved 33 members +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (Object, B) def ensuring(cond: Boolean): Object -def ensuring(cond: Boolean,msg: => Any): Object +def ensuring(cond: Boolean, msg: => Any): Object def ensuring(cond: Object => Boolean): Object -def ensuring(cond: Object => Boolean,msg: => Any): Object -def equals(x$1: Any): Boolean +def ensuring(cond: Object => Boolean, msg: => Any): Object +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (Object, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -29,28 +33,32 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit ================================================================================ askTypeCompletion at Foo.scala(6,10) ================================================================================ [response] askTypeCompletion at (6,10) -retrieved 29 members +retrieved 33 members +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (Object, B) def ensuring(cond: Boolean): Object -def ensuring(cond: Boolean,msg: => Any): Object +def ensuring(cond: Boolean, msg: => Any): Object def ensuring(cond: Object => Boolean): Object -def ensuring(cond: Object => Boolean,msg: => Any): Object -def equals(x$1: Any): Boolean +def ensuring(cond: Object => Boolean, msg: => Any): Object +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (Object, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -61,28 +69,32 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit ================================================================================ askTypeCompletion at Foo.scala(7,7) ================================================================================ [response] askTypeCompletion at (7,7) -retrieved 29 members +retrieved 33 members +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object +[inaccessible] private[this] val self: Object [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (Object, B) def ensuring(cond: Boolean): Object -def ensuring(cond: Boolean,msg: => Any): Object +def ensuring(cond: Boolean, msg: => Any): Object def ensuring(cond: Object => Boolean): Object -def ensuring(cond: Object => Boolean,msg: => Any): Object -def equals(x$1: Any): Boolean +def ensuring(cond: Object => Boolean, msg: => Any): Object +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (Object, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -93,5 +105,5 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit ================================================================================ diff --git a/test/files/presentation/ide-bug-1000475/Runner.scala b/test/files/presentation/ide-bug-1000475/Runner.scala index 1ef3cf902531..1c03e3d5badf 100644 --- a/test/files/presentation/ide-bug-1000475/Runner.scala +++ b/test/files/presentation/ide-bug-1000475/Runner.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/ide-bug-1000475/src/Foo.scala b/test/files/presentation/ide-bug-1000475/src/Foo.scala index 5dd6b7d00d3f..17c7bb6880e7 100644 --- a/test/files/presentation/ide-bug-1000475/src/Foo.scala +++ b/test/files/presentation/ide-bug-1000475/src/Foo.scala @@ -6,4 +6,4 @@ class Foo { m(1).toS/*!*/ m(1)./*!*/ println() -} \ No newline at end of file +} diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check index 12eafcd6de1d..5569188c1252 100644 --- a/test/files/presentation/ide-bug-1000531.check +++ b/test/files/presentation/ide-bug-1000531.check @@ -1,18 +1,22 @@ reload: CrashOnLoad.scala, TestIterable.java -askTypeCompletion at CrashOnLoad.scala(6,11) +askTypeCompletion at CrashOnLoad.scala(9,11) ================================================================================ -[response] askTypeCompletion at (6,11) -retrieved 30 members +[response] askTypeCompletion at (9,11) +retrieved 34 members +[inaccessible] private[this] val self: other.TestIterator[Nothing] +[inaccessible] private[this] val self: other.TestIterator[Nothing] +[inaccessible] private[this] val self: other.TestIterator[Nothing] +[inaccessible] private[this] val self: other.TestIterator[Nothing] [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (other.TestIterator[Nothing], B) def ensuring(cond: Boolean): other.TestIterator[Nothing] -def ensuring(cond: Boolean,msg: => Any): other.TestIterator[Nothing] +def ensuring(cond: Boolean, msg: => Any): other.TestIterator[Nothing] def ensuring(cond: other.TestIterator[Nothing] => Boolean): other.TestIterator[Nothing] -def ensuring(cond: other.TestIterator[Nothing] => Boolean,msg: => Any): other.TestIterator[Nothing] -def equals(x$1: Any): Boolean +def ensuring(cond: other.TestIterator[Nothing] => Boolean, msg: => Any): other.TestIterator[Nothing] +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hasNext: Boolean def hashCode(): Int @@ -20,7 +24,7 @@ def next: T def toString(): String def →[B](y: B): (other.TestIterator[Nothing], B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -31,5 +35,5 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit ================================================================================ diff --git a/test/files/presentation/ide-bug-1000531/Runner.scala b/test/files/presentation/ide-bug-1000531/Runner.scala index 1ef3cf902531..1c03e3d5badf 100644 --- a/test/files/presentation/ide-bug-1000531/Runner.scala +++ b/test/files/presentation/ide-bug-1000531/Runner.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala index 25e0a9580f10..1a4065e2eef2 100644 --- a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala +++ b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala @@ -1,6 +1,9 @@ /** When this files is opened within the IDE, a typing error is reported. */ class A[B] extends TestIterable[B] { - import collection.convert.ImplicitConversionsToScala._ + //import collection.convert.ImplicitConversionsToScala._ + implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = ??? + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = ??? + def iterator: other.TestIterator[Nothing] = ??? iterator./*!*/ diff --git a/test/files/presentation/ide-t1000567/Runner.scala b/test/files/presentation/ide-t1000567/Runner.scala index 51b43caa7b6d..a23593dd25cc 100644 --- a/test/files/presentation/ide-t1000567/Runner.scala +++ b/test/files/presentation/ide-t1000567/Runner.scala @@ -5,8 +5,8 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { override def runDefaultTests(): Unit = { - val a = sourceFiles.find(_.file.name == "a.scala").head - val b = sourceFiles.find(_.file.name == "b.scala").head + val a = sourceFiles.find(_.file.name == "a.scala").get + val b = sourceFiles.find(_.file.name == "b.scala").get askLoadedTyped(a).get askLoadedTyped(b).get super.runDefaultTests() diff --git a/test/files/presentation/ide-t1000609/Runner.scala b/test/files/presentation/ide-t1000609/Runner.scala index 1ef3cf902531..1c03e3d5badf 100644 --- a/test/files/presentation/ide-t1000609/Runner.scala +++ b/test/files/presentation/ide-t1000609/Runner.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala b/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala index d4bedaf9eeac..ea09ab14d5a3 100644 --- a/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala +++ b/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala @@ -5,4 +5,4 @@ class Foo { object Test { val a = new Foo a.foo() /*#*/ -} \ No newline at end of file +} diff --git a/test/files/presentation/ide-t1000976.check b/test/files/presentation/ide-t1000976.check index a70d6a028189..d58f86d6c63f 100644 --- a/test/files/presentation/ide-t1000976.check +++ b/test/files/presentation/ide-t1000976.check @@ -1,2 +1 @@ -not found: object c -not found: type C +Test OK \ No newline at end of file diff --git a/test/files/presentation/ide-t1000976/Test.scala b/test/files/presentation/ide-t1000976/Test.scala index 8a1584cea0c9..7a989e82381a 100644 --- a/test/files/presentation/ide-t1000976/Test.scala +++ b/test/files/presentation/ide-t1000976/Test.scala @@ -1,9 +1,11 @@ -// scalac: -sourcepath src import scala.tools.nsc.interactive.tests.InteractiveTest import scala.reflect.internal.util.SourceFile import scala.tools.nsc.interactive.Response object Test extends InteractiveTest { + + override def argsString = "-sourcepath src" + override def execute(): Unit = { loadSourceAndWaitUntilTypechecked("A.scala") val sourceB = loadSourceAndWaitUntilTypechecked("B.scala") @@ -11,7 +13,7 @@ object Test extends InteractiveTest { } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get compiler.askToDoFirst(sourceFile) val res = new Response[Unit] compiler.askReload(List(sourceFile), res) diff --git a/test/files/presentation/ide-t1001388/Test.scala b/test/files/presentation/ide-t1001388/Test.scala index f6079cf0b2cd..071dd20e7ed0 100644 --- a/test/files/presentation/ide-t1001388/Test.scala +++ b/test/files/presentation/ide-t1001388/Test.scala @@ -9,7 +9,7 @@ object Test extends InteractiveTest { } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get askLoadedTyped(sourceFile).get /* The response to `askLoadedType` may return before `interactive.Global.waitLoadedType` * fully executes. Because this test expects `waitLoadedType` is fully executed before diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check index 3bd3d8af41e8..4fe2b05ebeb6 100644 --- a/test/files/presentation/implicit-member.check +++ b/test/files/presentation/implicit-member.check @@ -3,21 +3,25 @@ reload: ImplicitMember.scala askTypeCompletion at ImplicitMember.scala(7,7) ================================================================================ [response] askTypeCompletion at (7,7) -retrieved 32 members +retrieved 36 members +[inaccessible] private[this] val self: Implicit.type +[inaccessible] private[this] val self: Implicit.type +[inaccessible] private[this] val self: Implicit.type +[inaccessible] private[this] val self: Implicit.type def +(other: String): String def ->[B](y: B): (Implicit.type, B) def ensuring(cond: Boolean): Implicit.type -def ensuring(cond: Boolean,msg: => Any): Implicit.type +def ensuring(cond: Boolean, msg: => Any): Implicit.type def ensuring(cond: Implicit.type => Boolean): Implicit.type -def ensuring(cond: Implicit.type => Boolean,msg: => Any): Implicit.type -def equals(x$1: Any): Boolean +def ensuring(cond: Implicit.type => Boolean, msg: => Any): Implicit.type +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (Implicit.type, B) final class AppliedImplicit[A] extends AnyRef final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -28,7 +32,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit implicit def AppliedImplicit[A](x: A): Implicit.AppliedImplicit[A] private[this] val x: Implicit.type protected[package lang] def clone(): Object diff --git a/test/files/presentation/implicit-member/src/ImplicitMember.scala b/test/files/presentation/implicit-member/src/ImplicitMember.scala index a547b65a8e52..4a2c9bb24671 100644 --- a/test/files/presentation/implicit-member/src/ImplicitMember.scala +++ b/test/files/presentation/implicit-member/src/ImplicitMember.scala @@ -5,4 +5,4 @@ object Implicit { implicit def AppliedImplicit[A](x: A): AppliedImplicit[A] = new AppliedImplicit(x) this./*!*/x -} \ No newline at end of file +} diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check index eaa3a686bdda..ad4aa0cd6c79 100644 --- a/test/files/presentation/infix-completion.check +++ b/test/files/presentation/infix-completion.check @@ -1,25 +1,192 @@ reload: Snippet.scala -askTypeCompletion at Snippet.scala(1,38) +askTypeCompletion at Snippet.scala(1,34) ================================================================================ -[response] askTypeCompletion at (1,38) -retrieved 30 members +[response] askTypeCompletion at (1,34) +#partest !java15+ +retrieved 207 members +#partest java15+ +retrieved 209 members +#partest +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] protected def num: Fractional[Double] +[inaccessible] protected def ord: Ordering[Double] +[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean +[inaccessible] protected def unifiedPrimitiveHashcode: Int [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit -def +(other: ): One.type -def ->[B](y: B): (One.type, B) -def ensuring(cond: Boolean): One.type -def ensuring(cond: Boolean,msg: => Any): One.type -def ensuring(cond: One.type => Boolean): One.type -def ensuring(cond: One.type => Boolean,msg: => Any): One.type +def !=(x: Byte): Boolean +def !=(x: Char): Boolean +def !=(x: Double): Boolean +def !=(x: Float): Boolean +def !=(x: Int): Boolean +def !=(x: Long): Boolean +def !=(x: Short): Boolean +def %(x: Byte): Int +def %(x: Char): Int +def %(x: Double): Double +def %(x: Float): Float +def %(x: Int): Int +def %(x: Long): Long +def %(x: Short): Int +def &(x: Byte): Int +def &(x: Char): Int +def &(x: Int): Int +def &(x: Long): Long +def &(x: Short): Int +def *(x: Byte): Int +def *(x: Char): Int +def *(x: Double): Double +def *(x: Float): Float +def *(x: Int): Int +def *(x: Long): Long +def *(x: Short): Int +def +(x: Byte): Int +def +(x: Char): Int +def +(x: Double): Double +def +(x: Float): Float +def +(x: Int): Int +def +(x: Long): Long +def +(x: Short): Int +def +(x: String): String +def -(x: Byte): Int +def -(x: Char): Int +def -(x: Double): Double +def -(x: Float): Float +def -(x: Int): Int +def -(x: Long): Long +def -(x: Short): Int +def ->[B](y: B): (Int, B) +def /(x: Byte): Int +def /(x: Char): Int +def /(x: Double): Double +def /(x: Float): Float +def /(x: Int): Int +def /(x: Long): Long +def /(x: Short): Int +def <(x: Byte): Boolean +def <(x: Char): Boolean +def <(x: Double): Boolean +def <(x: Float): Boolean +def <(x: Int): Boolean +def <(x: Long): Boolean +def <(x: Short): Boolean +def <<(x: Int): Int +def <<(x: Long): Int +def <=(x: Byte): Boolean +def <=(x: Char): Boolean +def <=(x: Double): Boolean +def <=(x: Float): Boolean +def <=(x: Int): Boolean +def <=(x: Long): Boolean +def <=(x: Short): Boolean +def ==(x: Byte): Boolean +def ==(x: Char): Boolean +def ==(x: Double): Boolean +def ==(x: Float): Boolean +def ==(x: Int): Boolean +def ==(x: Long): Boolean +def ==(x: Short): Boolean +def >(x: Byte): Boolean +def >(x: Char): Boolean +def >(x: Double): Boolean +def >(x: Float): Boolean +def >(x: Int): Boolean +def >(x: Long): Boolean +def >(x: Short): Boolean +def >=(x: Byte): Boolean +def >=(x: Char): Boolean +def >=(x: Double): Boolean +def >=(x: Float): Boolean +def >=(x: Int): Boolean +def >=(x: Long): Boolean +def >=(x: Short): Boolean +def >>(x: Int): Int +def >>(x: Long): Int +def >>>(x: Int): Int +def >>>(x: Long): Int +def ^(x: Byte): Int +def ^(x: Char): Int +def ^(x: Int): Int +def ^(x: Long): Long +def ^(x: Short): Int +def byteValue(): Byte +def ceil: Double +def compare(y: Double): Int +def compare(y: Float): Int +def compare(y: Int): Int +def compare(y: Long): Int +def compareTo(that: Double): Int +def compareTo(that: Float): Int +def compareTo(that: Int): Int +def compareTo(that: Long): Int +def compareTo(x$1: Double): Int +def compareTo(x$1: Float): Int +def compareTo(x$1: Integer): Int +def compareTo(x$1: Long): Int +#partest java15+ +def describeConstable(): java.util.Optional[Double] +#partest +def doubleValue(): Double +def ensuring(cond: Boolean): Int +def ensuring(cond: Boolean, msg: => Any): Int +def ensuring(cond: Int => Boolean): Int +def ensuring(cond: Int => Boolean, msg: => Any): Int def equals(x$1: Any): Boolean +def floatValue(): Float +def floor: Double def formatted(fmtstr: String): String def hashCode(): Int +def intValue(): Int +def isFinite: Boolean +def isInfinite(): Boolean +def isInfinity: Boolean +def isNaN(): Boolean +def isNegInfinity: Boolean +def isPosInfinity: Boolean +def isValidLong: Boolean +def longValue(): Long +#partest java15+ +def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double +#partest +def round: Long +def shortValue(): Short +def sign: Double +def to(end: Int): scala.collection.immutable.Range.Inclusive +def to(end: Int, step: Int): scala.collection.immutable.Range.Inclusive +def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long] +def to(end: Long, step: Long): scala.collection.immutable.NumericRange.Inclusive[Long] +def toBinaryString: String +def toByte: Byte +def toChar: Char +def toDegrees: Double +def toDouble: Double +def toFloat: Float +def toHexString: String +def toInt: Int +def toLong: Long +def toOctalString: String +def toRadians: Double +def toShort: Short def toString(): String -def youCompleteMe(other: One.type): Unit -def →[B](y: B): (One.type, B) +def unary_+ : Int +def unary_- : Int +def unary_~ : Int +def until(end: Int): scala.collection.immutable.Range +def until(end: Int, step: Int): scala.collection.immutable.Range +def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long] +def until(end: Long, step: Long): scala.collection.immutable.NumericRange.Exclusive[Long] +def |(x: Byte): Int +def |(x: Char): Int +def |(x: Int): Int +def |(x: Long): Long +def |(x: Short): Int +def →[B](y: B): (Int, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -30,5 +197,21 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit +override def abs: Double +override def isValidByte: Boolean +override def isValidChar: Boolean +override def isValidInt: Boolean +override def isValidShort: Boolean +override def isWhole: Boolean +override def max(that: Double): Double +override def max(that: Float): Float +override def max(that: Int): Int +override def max(that: Long): Long +override def min(that: Double): Double +override def min(that: Float): Float +override def min(that: Int): Int +override def min(that: Long): Long +override def signum: Int +private[this] val self: Double ================================================================================ diff --git a/test/files/presentation/infix-completion/src/Snippet.scala b/test/files/presentation/infix-completion/src/Snippet.scala index 75b07c11a324..7e03c486ba8e 100644 --- a/test/files/presentation/infix-completion/src/Snippet.scala +++ b/test/files/presentation/infix-completion/src/Snippet.scala @@ -1,5 +1 @@ -object Snippet{val x = 123; One + One./*!*/} -object One { - def +(other: One) = this - def youCompleteMe(other: One.type) = () -} +object Snippet{val x = 123; 1 + 1./*!*/} diff --git a/test/files/presentation/infix-completion2.check b/test/files/presentation/infix-completion2.check index b410fe39f1f9..ad4aa0cd6c79 100644 --- a/test/files/presentation/infix-completion2.check +++ b/test/files/presentation/infix-completion2.check @@ -1,25 +1,192 @@ reload: Snippet.scala -askTypeCompletion at Snippet.scala(1,46) +askTypeCompletion at Snippet.scala(1,34) ================================================================================ -[response] askTypeCompletion at (1,46) -retrieved 30 members +[response] askTypeCompletion at (1,34) +#partest !java15+ +retrieved 207 members +#partest java15+ +retrieved 209 members +#partest +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] private[this] val self: Int +[inaccessible] protected def num: Fractional[Double] +[inaccessible] protected def ord: Ordering[Double] +[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean +[inaccessible] protected def unifiedPrimitiveHashcode: Int [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit -def +(other: ): One.type -def ->[B](y: B): (Snippet.x.type, B) -def ensuring(cond: Boolean): Snippet.x.type -def ensuring(cond: Boolean,msg: => Any): Snippet.x.type -def ensuring(cond: Snippet.x.type => Boolean): Snippet.x.type -def ensuring(cond: Snippet.x.type => Boolean,msg: => Any): Snippet.x.type +def !=(x: Byte): Boolean +def !=(x: Char): Boolean +def !=(x: Double): Boolean +def !=(x: Float): Boolean +def !=(x: Int): Boolean +def !=(x: Long): Boolean +def !=(x: Short): Boolean +def %(x: Byte): Int +def %(x: Char): Int +def %(x: Double): Double +def %(x: Float): Float +def %(x: Int): Int +def %(x: Long): Long +def %(x: Short): Int +def &(x: Byte): Int +def &(x: Char): Int +def &(x: Int): Int +def &(x: Long): Long +def &(x: Short): Int +def *(x: Byte): Int +def *(x: Char): Int +def *(x: Double): Double +def *(x: Float): Float +def *(x: Int): Int +def *(x: Long): Long +def *(x: Short): Int +def +(x: Byte): Int +def +(x: Char): Int +def +(x: Double): Double +def +(x: Float): Float +def +(x: Int): Int +def +(x: Long): Long +def +(x: Short): Int +def +(x: String): String +def -(x: Byte): Int +def -(x: Char): Int +def -(x: Double): Double +def -(x: Float): Float +def -(x: Int): Int +def -(x: Long): Long +def -(x: Short): Int +def ->[B](y: B): (Int, B) +def /(x: Byte): Int +def /(x: Char): Int +def /(x: Double): Double +def /(x: Float): Float +def /(x: Int): Int +def /(x: Long): Long +def /(x: Short): Int +def <(x: Byte): Boolean +def <(x: Char): Boolean +def <(x: Double): Boolean +def <(x: Float): Boolean +def <(x: Int): Boolean +def <(x: Long): Boolean +def <(x: Short): Boolean +def <<(x: Int): Int +def <<(x: Long): Int +def <=(x: Byte): Boolean +def <=(x: Char): Boolean +def <=(x: Double): Boolean +def <=(x: Float): Boolean +def <=(x: Int): Boolean +def <=(x: Long): Boolean +def <=(x: Short): Boolean +def ==(x: Byte): Boolean +def ==(x: Char): Boolean +def ==(x: Double): Boolean +def ==(x: Float): Boolean +def ==(x: Int): Boolean +def ==(x: Long): Boolean +def ==(x: Short): Boolean +def >(x: Byte): Boolean +def >(x: Char): Boolean +def >(x: Double): Boolean +def >(x: Float): Boolean +def >(x: Int): Boolean +def >(x: Long): Boolean +def >(x: Short): Boolean +def >=(x: Byte): Boolean +def >=(x: Char): Boolean +def >=(x: Double): Boolean +def >=(x: Float): Boolean +def >=(x: Int): Boolean +def >=(x: Long): Boolean +def >=(x: Short): Boolean +def >>(x: Int): Int +def >>(x: Long): Int +def >>>(x: Int): Int +def >>>(x: Long): Int +def ^(x: Byte): Int +def ^(x: Char): Int +def ^(x: Int): Int +def ^(x: Long): Long +def ^(x: Short): Int +def byteValue(): Byte +def ceil: Double +def compare(y: Double): Int +def compare(y: Float): Int +def compare(y: Int): Int +def compare(y: Long): Int +def compareTo(that: Double): Int +def compareTo(that: Float): Int +def compareTo(that: Int): Int +def compareTo(that: Long): Int +def compareTo(x$1: Double): Int +def compareTo(x$1: Float): Int +def compareTo(x$1: Integer): Int +def compareTo(x$1: Long): Int +#partest java15+ +def describeConstable(): java.util.Optional[Double] +#partest +def doubleValue(): Double +def ensuring(cond: Boolean): Int +def ensuring(cond: Boolean, msg: => Any): Int +def ensuring(cond: Int => Boolean): Int +def ensuring(cond: Int => Boolean, msg: => Any): Int def equals(x$1: Any): Boolean +def floatValue(): Float +def floor: Double def formatted(fmtstr: String): String def hashCode(): Int +def intValue(): Int +def isFinite: Boolean +def isInfinite(): Boolean +def isInfinity: Boolean +def isNaN(): Boolean +def isNegInfinity: Boolean +def isPosInfinity: Boolean +def isValidLong: Boolean +def longValue(): Long +#partest java15+ +def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double +#partest +def round: Long +def shortValue(): Short +def sign: Double +def to(end: Int): scala.collection.immutable.Range.Inclusive +def to(end: Int, step: Int): scala.collection.immutable.Range.Inclusive +def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long] +def to(end: Long, step: Long): scala.collection.immutable.NumericRange.Inclusive[Long] +def toBinaryString: String +def toByte: Byte +def toChar: Char +def toDegrees: Double +def toDouble: Double +def toFloat: Float +def toHexString: String +def toInt: Int +def toLong: Long +def toOctalString: String +def toRadians: Double +def toShort: Short def toString(): String -def youCompleteMe(other: One.type): Unit -def →[B](y: B): (Snippet.x.type, B) +def unary_+ : Int +def unary_- : Int +def unary_~ : Int +def until(end: Int): scala.collection.immutable.Range +def until(end: Int, step: Int): scala.collection.immutable.Range +def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long] +def until(end: Long, step: Long): scala.collection.immutable.NumericRange.Exclusive[Long] +def |(x: Byte): Int +def |(x: Char): Int +def |(x: Int): Int +def |(x: Long): Long +def |(x: Short): Int +def →[B](y: B): (Int, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -30,5 +197,21 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit +override def abs: Double +override def isValidByte: Boolean +override def isValidChar: Boolean +override def isValidInt: Boolean +override def isValidShort: Boolean +override def isWhole: Boolean +override def max(that: Double): Double +override def max(that: Float): Float +override def max(that: Int): Int +override def max(that: Long): Long +override def min(that: Double): Double +override def min(that: Float): Float +override def min(that: Int): Int +override def min(that: Long): Long +override def signum: Int +private[this] val self: Double ================================================================================ diff --git a/test/files/presentation/infix-completion2/src/Snippet.scala b/test/files/presentation/infix-completion2/src/Snippet.scala index 9ffac983b3d6..4eb8c24a2e55 100644 --- a/test/files/presentation/infix-completion2/src/Snippet.scala +++ b/test/files/presentation/infix-completion2/src/Snippet.scala @@ -1,5 +1 @@ -object Snippet{val x: One.type = 123; One + x./*!*/} -object One { - def +(other: One) = this - def youCompleteMe(other: One.type) = () -} +object Snippet{val x = 123; 1 + x./*!*/} diff --git a/test/files/presentation/package-object-issues.check b/test/files/presentation/package-object-issues.check new file mode 100644 index 000000000000..c3f750f0b7f8 --- /dev/null +++ b/test/files/presentation/package-object-issues.check @@ -0,0 +1,42 @@ +reload: Main.scala + +askTypeCompletion at Main.scala(7,6) +================================================================================ +[response] askTypeCompletion at (7,6) +def +(other: String): String +def ->[B](y: B): (concurrent.ExecutionException, B) +def ensuring(cond: Boolean): concurrent.ExecutionException +def ensuring(cond: Boolean, msg: => Any): concurrent.ExecutionException +def ensuring(cond: concurrent.ExecutionException => Boolean): concurrent.ExecutionException +def ensuring(cond: concurrent.ExecutionException => Boolean, msg: => Any): concurrent.ExecutionException +def equals(x$1: Object): Boolean +def fillInStackTrace(): Throwable +def formatted(fmtstr: String): String +def getCause(): Throwable +def getLocalizedMessage(): String +def getMessage(): String +def getStackTrace(): Array[StackTraceElement] +def hashCode(): Int +def initCause(x$1: Throwable): Throwable +def printStackTrace(): Unit +def printStackTrace(x$1: java.io.PrintStream): Unit +def printStackTrace(x$1: java.io.PrintWriter): Unit +def setStackTrace(x$1: Array[StackTraceElement]): Unit +def toString(): String +def →[B](y: B): (concurrent.ExecutionException, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def addSuppressed(x$1: Throwable): Unit +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def getSuppressed(): Array[Throwable] +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ diff --git a/test/files/presentation/package-object-issues/Test.scala b/test/files/presentation/package-object-issues/Test.scala new file mode 100644 index 000000000000..75c2533dd923 --- /dev/null +++ b/test/files/presentation/package-object-issues/Test.scala @@ -0,0 +1,8 @@ +import scala.tools.nsc.interactive.tests.InteractiveTest + +object Test extends InteractiveTest { + + override protected def filterOutLines(line: String) = + line.contains("inaccessible") || line.contains("retrieved ") + +} diff --git a/test/files/presentation/package-object-issues/src/Main.scala b/test/files/presentation/package-object-issues/src/Main.scala new file mode 100644 index 000000000000..8c0f481c0ac0 --- /dev/null +++ b/test/files/presentation/package-object-issues/src/Main.scala @@ -0,0 +1,10 @@ +package scala.concurrent + +import scala.concurrent.ExecutionException + +object Main extends App { + def foo(n: ExecutionException, k: Int): Unit = { + n./*!*/ + k + } +} diff --git a/test/files/presentation/package-object-type.check b/test/files/presentation/package-object-type.check new file mode 100644 index 000000000000..c3f750f0b7f8 --- /dev/null +++ b/test/files/presentation/package-object-type.check @@ -0,0 +1,42 @@ +reload: Main.scala + +askTypeCompletion at Main.scala(7,6) +================================================================================ +[response] askTypeCompletion at (7,6) +def +(other: String): String +def ->[B](y: B): (concurrent.ExecutionException, B) +def ensuring(cond: Boolean): concurrent.ExecutionException +def ensuring(cond: Boolean, msg: => Any): concurrent.ExecutionException +def ensuring(cond: concurrent.ExecutionException => Boolean): concurrent.ExecutionException +def ensuring(cond: concurrent.ExecutionException => Boolean, msg: => Any): concurrent.ExecutionException +def equals(x$1: Object): Boolean +def fillInStackTrace(): Throwable +def formatted(fmtstr: String): String +def getCause(): Throwable +def getLocalizedMessage(): String +def getMessage(): String +def getStackTrace(): Array[StackTraceElement] +def hashCode(): Int +def initCause(x$1: Throwable): Throwable +def printStackTrace(): Unit +def printStackTrace(x$1: java.io.PrintStream): Unit +def printStackTrace(x$1: java.io.PrintWriter): Unit +def setStackTrace(x$1: Array[StackTraceElement]): Unit +def toString(): String +def →[B](y: B): (concurrent.ExecutionException, B) +final def !=(x$1: Any): Boolean +final def ## : Int +final def ==(x$1: Any): Boolean +final def addSuppressed(x$1: Throwable): Unit +final def asInstanceOf[T0]: T0 +final def eq(x$1: AnyRef): Boolean +final def getSuppressed(): Array[Throwable] +final def isInstanceOf[T0]: Boolean +final def ne(x$1: AnyRef): Boolean +final def notify(): Unit +final def notifyAll(): Unit +final def synchronized[T0](x$1: T0): T0 +final def wait(): Unit +final def wait(x$1: Long): Unit +final def wait(x$1: Long, x$2: Int): Unit +================================================================================ diff --git a/test/files/presentation/package-object-type/Test.scala b/test/files/presentation/package-object-type/Test.scala new file mode 100644 index 000000000000..b06e25d8dd1c --- /dev/null +++ b/test/files/presentation/package-object-type/Test.scala @@ -0,0 +1,9 @@ +import scala.tools.nsc.interactive.tests.InteractiveTest +import scala.tools.nsc.util + +object Test extends InteractiveTest { + + override protected def filterOutLines(line: String) = + line.contains("inaccessible") || line.contains("retrieved ") + +} diff --git a/test/files/presentation/package-object-type/src/Main.scala b/test/files/presentation/package-object-type/src/Main.scala new file mode 100644 index 000000000000..493984f55654 --- /dev/null +++ b/test/files/presentation/package-object-type/src/Main.scala @@ -0,0 +1,10 @@ +package example + +import scala.concurrent.ExecutionException + +object Main extends App { + def foo(n: ExecutionException, k: Int): Unit = { + n./*!*/ + k + } +} diff --git a/test/files/presentation/parse-invariants/Test.scala b/test/files/presentation/parse-invariants/Test.scala index 29b51a3f3fbe..6237993f3a79 100644 --- a/test/files/presentation/parse-invariants/Test.scala +++ b/test/files/presentation/parse-invariants/Test.scala @@ -6,7 +6,7 @@ object Test extends InteractiveTest { override def execute(): Unit = { def test(fileName: String): Unit = { - val sf = sourceFiles.find(_.file.name == fileName).head + val sf = sourceFiles.find(_.file.name == fileName).get noNewSymbols(sf) uniqueParseTree(sf) unattributedParseTree(sf) @@ -20,7 +20,7 @@ object Test extends InteractiveTest { /** * Asking for a parseTree should not enter any new symbols. */ - private def noNewSymbols(sf: SourceFile) { + private def noNewSymbols(sf: SourceFile): Unit = { def nextId() = compiler.NoSymbol.newTermSymbol(compiler.TermName("dummy"), compiler.NoPosition, compiler.NoFlags).id val id = nextId() println("parseTree") @@ -36,7 +36,7 @@ object Test extends InteractiveTest { /** * Asking twice for a parseTree on the same source should always return a new tree */ - private def uniqueParseTree(sf: SourceFile) { + private def uniqueParseTree(sf: SourceFile): Unit = { val parseTree1 = compiler.parseTree(sf) val parseTree2 = compiler.parseTree(sf) if (parseTree1 != parseTree2) { @@ -49,7 +49,7 @@ object Test extends InteractiveTest { /** * A parseTree should never contain any symbols or types */ - private def unattributedParseTree(sf: SourceFile) { + private def unattributedParseTree(sf: SourceFile): Unit = { if (noSymbolsOrTypes(compiler.parseTree(sf))) { reporter.println("Unattributed OK") } else { @@ -60,7 +60,7 @@ object Test extends InteractiveTest { /** * Once you have obtained a parseTree it should never change */ - private def neverModifyParseTree(sf: SourceFile) { + private def neverModifyParseTree(sf: SourceFile): Unit = { val parsedTree = compiler.parseTree(sf) loadSourceAndWaitUntilTypechecked(sf) if (noSymbolsOrTypes(parsedTree)) { @@ -73,7 +73,7 @@ object Test extends InteractiveTest { /** * Should always return a parse tree */ - private def shouldAlwaysReturnParseTree(sf: SourceFile) { + private def shouldAlwaysReturnParseTree(sf: SourceFile): Unit = { loadSourceAndWaitUntilTypechecked(sf) if (noSymbolsOrTypes(compiler.parseTree(sf))) { reporter.println("AlwaysParseTree OK") @@ -109,4 +109,4 @@ object Test extends InteractiveTest { } } -} \ No newline at end of file +} diff --git a/test/files/presentation/partial-fun/Runner.scala b/test/files/presentation/partial-fun/Runner.scala index 3edd5bb5b0d7..0ffd5359e83e 100644 --- a/test/files/presentation/partial-fun/Runner.scala +++ b/test/files/presentation/partial-fun/Runner.scala @@ -1,7 +1,7 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override def runDefaultTests() { + override def runDefaultTests(): Unit = { sourceFiles foreach (src => askLoadedTyped(src).get) super.runDefaultTests() diff --git a/test/files/presentation/partial-fun/src/PartialFun.scala b/test/files/presentation/partial-fun/src/PartialFun.scala index 4657898ed1ff..4c62ec980410 100644 --- a/test/files/presentation/partial-fun/src/PartialFun.scala +++ b/test/files/presentation/partial-fun/src/PartialFun.scala @@ -1,5 +1,5 @@ class A { - def foo { + def foo: Unit = { val x: PartialFunction[Int, Int] = ({ case 0 => 0 }) } } diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check index 220bdf33b263..b02cf1cec8e9 100644 --- a/test/files/presentation/ping-pong.check +++ b/test/files/presentation/ping-pong.check @@ -1,25 +1,29 @@ reload: PingPong.scala -askTypeCompletion at PingPong.scala(10,23) +askTypeCompletion at PingPong.scala(10,31) ================================================================================ -[response] askTypeCompletion at (10,23) -retrieved 32 members +[response] askTypeCompletion at (10,31) +retrieved 36 members [inaccessible] private[this] val ping: Ping +[inaccessible] private[this] val self: Pong +[inaccessible] private[this] val self: Pong +[inaccessible] private[this] val self: Pong +[inaccessible] private[this] val self: Pong [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (Pong, B) def ensuring(cond: Boolean): Pong -def ensuring(cond: Boolean,msg: => Any): Pong +def ensuring(cond: Boolean, msg: => Any): Pong def ensuring(cond: Pong => Boolean): Pong -def ensuring(cond: Pong => Boolean,msg: => Any): Pong -def equals(x$1: Any): Boolean +def ensuring(cond: Pong => Boolean, msg: => Any): Pong +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def poke(): Unit def →[B](y: B): (Pong, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -30,24 +34,28 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit override def toString(): String private[this] val name: String ================================================================================ -askTypeCompletion at PingPong.scala(19,20) +askTypeCompletion at PingPong.scala(19,28) ================================================================================ -[response] askTypeCompletion at (19,20) -retrieved 33 members +[response] askTypeCompletion at (19,28) +retrieved 37 members +[inaccessible] private[this] val self: Ping +[inaccessible] private[this] val self: Ping +[inaccessible] private[this] val self: Ping +[inaccessible] private[this] val self: Ping [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (Ping, B) def ensuring(cond: Boolean): Ping -def ensuring(cond: Boolean,msg: => Any): Ping +def ensuring(cond: Boolean, msg: => Any): Ping def ensuring(cond: Ping => Boolean): Ping -def ensuring(cond: Ping => Boolean,msg: => Any): Ping -def equals(x$1: Any): Boolean +def ensuring(cond: Ping => Boolean, msg: => Any): Ping +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def loop: Unit @@ -55,7 +63,7 @@ def name: String def poke: Unit def →[B](y: B): (Ping, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -66,20 +74,20 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit override def toString(): String private[this] val pong: Pong ================================================================================ -askType at PingPong.scala(8,10) +askType at PingPong.scala(8,18) ================================================================================ -[response] askTypeAt (8,10) +[response] askTypeAt (8,18) def loop: Unit = Ping.this.poke() ================================================================================ -askType at PingPong.scala(10,10) +askType at PingPong.scala(10,18) ================================================================================ -[response] askTypeAt (10,10) +[response] askTypeAt (10,18) def poke: Unit = Ping.this.pong.poke() ================================================================================ diff --git a/test/files/presentation/ping-pong/src/PingPong.scala b/test/files/presentation/ping-pong/src/PingPong.scala index 94f52c465801..7cad49e53766 100644 --- a/test/files/presentation/ping-pong/src/PingPong.scala +++ b/test/files/presentation/ping-pong/src/PingPong.scala @@ -5,9 +5,9 @@ class Ping { def name = "ping" - def loop/*?*/ { poke() } + def loop: Unit =/*?*/ { poke() } - def poke/*?*/ { pong./*!*/poke() } + def poke: Unit =/*?*/ { pong./*!*/poke() } override def toString = name } @@ -16,7 +16,7 @@ class Pong(ping: Ping) { val name/*?*/ = "pong" - def poke() { ping./*!*/poke() } + def poke(): Unit = { ping./*!*/poke() } override def toString = name -} \ No newline at end of file +} diff --git a/test/files/presentation/private-case-class-members/Test.scala b/test/files/presentation/private-case-class-members/Test.scala index e64c8238ead7..f8ee835053d9 100644 --- a/test/files/presentation/private-case-class-members/Test.scala +++ b/test/files/presentation/private-case-class-members/Test.scala @@ -9,7 +9,7 @@ object Test extends InteractiveTest { } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get compiler.askToDoFirst(sourceFile) val res = new Response[Unit] compiler.askReload(List(sourceFile), res) diff --git a/test/files/presentation/random/src/Random.scala b/test/files/presentation/random/src/Random.scala index af76a28f471d..d4f7c2d5b348 100644 --- a/test/files/presentation/random/src/Random.scala +++ b/test/files/presentation/random/src/Random.scala @@ -14,7 +14,7 @@ import java.util.Random */ object randomclient { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val filter/*?*/ = try { Integer.parseInt(args(0)/*?*/) match { case 1 => x: Int => x % 2 != 0 @@ -63,8 +63,8 @@ object randomserver { } catch { case e: IOException => - System.err.println("Could not listen on port: 9999."); - System.exit(-1) + System.err.println("Could not listen on port: 9999.") + throw e } } @@ -103,4 +103,4 @@ case class ServerThread(socket: Socket) extends Thread("ServerThread") { } } -} \ No newline at end of file +} diff --git a/test/files/presentation/recursive-ask/RecursiveAsk.scala b/test/files/presentation/recursive-ask/RecursiveAsk.scala index b0e29b3fd38d..b930f09c1e4d 100644 --- a/test/files/presentation/recursive-ask/RecursiveAsk.scala +++ b/test/files/presentation/recursive-ask/RecursiveAsk.scala @@ -3,11 +3,11 @@ import scala.tools.nsc.interactive.tests._ object Test extends InteractiveTest { override def execute(): Unit = recursiveAskForResponse() - def recursiveAskForResponse() { + def recursiveAskForResponse(): Unit = { val res0 = compiler.askForResponse( () => { println("[ outer] askForResponse") val res = compiler.askForResponse( () => { println("[nested] askForResponse") }) - println (res.get(5000) match { + println(res.get(5000) match { case Some(_) => "passed" case None => "timeout" }) diff --git a/test/files/presentation/scope-completion-1/Test.scala b/test/files/presentation/scope-completion-1/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/scope-completion-1/Test.scala +++ b/test/files/presentation/scope-completion-1/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/scope-completion-2/Test.scala b/test/files/presentation/scope-completion-2/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/scope-completion-2/Test.scala +++ b/test/files/presentation/scope-completion-2/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/scope-completion-3/Test.scala b/test/files/presentation/scope-completion-3/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/scope-completion-3/Test.scala +++ b/test/files/presentation/scope-completion-3/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/scope-completion-4/Test.scala b/test/files/presentation/scope-completion-4/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/scope-completion-4/Test.scala +++ b/test/files/presentation/scope-completion-4/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/scope-completion-4/src/Completions.scala b/test/files/presentation/scope-completion-4/src/Completions.scala index d11315720ad4..ec81a8da5bc1 100644 --- a/test/files/presentation/scope-completion-4/src/Completions.scala +++ b/test/files/presentation/scope-completion-4/src/Completions.scala @@ -4,11 +4,11 @@ package test class Completion1 { - def f { + def f: Unit = { - def ff { + def ff: Unit = { - def fff { + def fff: Unit = { /*_*/ } @@ -25,7 +25,7 @@ class Completion1 { class fc { - def fcf { + def fcf: Unit = { /*_*/ } @@ -53,7 +53,7 @@ class Completion1 { /*_*/ - def ccf { + def ccf: Unit = { /*_*/ } @@ -62,7 +62,7 @@ class Completion1 { /*_*/ - def cf { + def cf: Unit = { class cfc { /*_*/ @@ -70,7 +70,7 @@ class Completion1 { /*_*/ - def cff { + def cff: Unit = { /*_*/ } diff --git a/test/files/presentation/scope-completion-import/Test.scala b/test/files/presentation/scope-completion-import/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/scope-completion-import/Test.scala +++ b/test/files/presentation/scope-completion-import/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/scope-completion-import/src/Completions.scala b/test/files/presentation/scope-completion-import/src/Completions.scala index d30aa0b4e379..4b891ed49299 100644 --- a/test/files/presentation/scope-completion-import/src/Completions.scala +++ b/test/files/presentation/scope-completion-import/src/Completions.scala @@ -45,7 +45,7 @@ class Foo_1 { import O._ - def bar { + def bar: Unit = { /*_*/ } } @@ -55,7 +55,7 @@ class Foo_2 { val o = O import o._ - def bar { + def bar: Unit = { /*_*/ } } @@ -65,7 +65,7 @@ class Foo_3 { val c = new C import c._ - def bar { + def bar: Unit = { /*_*/ } } diff --git a/test/files/presentation/t1207/Test.scala b/test/files/presentation/t1207/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/t1207/Test.scala +++ b/test/files/presentation/t1207/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/t12308.check b/test/files/presentation/t12308.check index 80792e4a7f27..51b1da10bf8c 100644 --- a/test/files/presentation/t12308.check +++ b/test/files/presentation/t12308.check @@ -1,11 +1,11 @@ reload: Foo.scala askLoadedTyped 1 -Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) askLoadedTyped 2 -Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) reload: Foo.scala askLoadedTyped 3 -Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) targeted 1 askType at Foo.scala(2,37) @@ -25,7 +25,7 @@ askType at Foo.scala(4,37) [response] askTypeAt (4,37) 1 ================================================================================ -Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) reload: Foo.scala targeted 2 - doesn't handle nowarn correctly @@ -46,5 +46,5 @@ askType at Foo.scala(4,37) [response] askTypeAt (4,37) 1 ================================================================================ -Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) -Problem(RangePosition(t12308/src/Foo.scala, 109, 109, 114),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) +Problem(RangePosition(t12308/src/Foo.scala, 109, 109, 114),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1,List()) diff --git a/test/files/presentation/t12308/Test.scala b/test/files/presentation/t12308/Test.scala index fe767587654c..102b06170d69 100644 --- a/test/files/presentation/t12308/Test.scala +++ b/test/files/presentation/t12308/Test.scala @@ -6,7 +6,7 @@ object Test extends InteractiveTest { println(compiler.unitOfFile.values.flatMap(_.problems).mkString("", "\n", "")) } - override def runDefaultTests() { + override def runDefaultTests(): Unit = { val run = compiler.currentRun println("askLoadedTyped 1") @@ -19,7 +19,7 @@ object Test extends InteractiveTest { ws() assert(run eq compiler.currentRun) - askReload(sourceFiles) // new run, new tree, type checking again + askReload(sourceFiles.toIndexedSeq) // new run, new tree, type checking again println("askLoadedTyped 3") sourceFiles foreach (src => askLoadedTyped(src).get) ws() @@ -32,7 +32,7 @@ object Test extends InteractiveTest { assert(run1 eq compiler.currentRun) ws() - askReload(sourceFiles) + askReload(sourceFiles.toIndexedSeq) // what happens here: diff --git a/test/files/presentation/t13083.check b/test/files/presentation/t13083.check new file mode 100644 index 000000000000..d4e3e7cc2a30 --- /dev/null +++ b/test/files/presentation/t13083.check @@ -0,0 +1,7 @@ +reload: CompleteLocalImport.scala + +askTypeCompletion at CompleteLocalImport.scala(2,14) +================================================================================ +[response] askTypeCompletion at (2,14) +retrieved 14 members +================================================================================ diff --git a/test/files/presentation/t13083/Runner.scala b/test/files/presentation/t13083/Runner.scala new file mode 100644 index 000000000000..13e63ea4ed7e --- /dev/null +++ b/test/files/presentation/t13083/Runner.scala @@ -0,0 +1,5 @@ +import scala.tools.nsc.interactive.tests._ + +object Test extends InteractiveTest { + override protected def filterOutLines(line: String) = line.contains("package") +} diff --git a/test/files/presentation/t13083/src/CompleteLocalImport.scala b/test/files/presentation/t13083/src/CompleteLocalImport.scala new file mode 100644 index 000000000000..6e5d3df40b76 --- /dev/null +++ b/test/files/presentation/t13083/src/CompleteLocalImport.scala @@ -0,0 +1,3 @@ +object Autocompletewrapper { + import java./*!*/ +} diff --git a/test/files/presentation/t4287/Test.scala b/test/files/presentation/t4287/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/t4287/Test.scala +++ b/test/files/presentation/t4287/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/t4287b/Test.scala b/test/files/presentation/t4287b/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/t4287b/Test.scala +++ b/test/files/presentation/t4287b/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/t4287b/src/Foo.scala b/test/files/presentation/t4287b/src/Foo.scala index 47c676e2a288..ac494790028b 100644 --- a/test/files/presentation/t4287b/src/Foo.scala +++ b/test/files/presentation/t4287b/src/Foo.scala @@ -12,4 +12,4 @@ class C(i: Int) extends { } with Greeting { val name = "avc" println(i/*#*/) -} \ No newline at end of file +} diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check index 0f24d9626b7e..81bbf4f386b7 100644 --- a/test/files/presentation/t5708.check +++ b/test/files/presentation/t5708.check @@ -3,26 +3,30 @@ reload: Completions.scala askTypeCompletion at Completions.scala(17,9) ================================================================================ [response] askTypeCompletion at (17,9) -retrieved 37 members +retrieved 41 members [inaccessible] private def privateM: String [inaccessible] private[this] val privateV: String [inaccessible] private[this] val protectedV: String +[inaccessible] private[this] val self: test.Compat.type +[inaccessible] private[this] val self: test.Compat.type +[inaccessible] private[this] val self: test.Compat.type +[inaccessible] private[this] val self: test.Compat.type [inaccessible] protected def protectedValM: String [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (test.Compat.type, B) def ensuring(cond: Boolean): test.Compat.type -def ensuring(cond: Boolean,msg: => Any): test.Compat.type +def ensuring(cond: Boolean, msg: => Any): test.Compat.type def ensuring(cond: test.Compat.type => Boolean): test.Compat.type -def ensuring(cond: test.Compat.type => Boolean,msg: => Any): test.Compat.type -def equals(x$1: Any): Boolean +def ensuring(cond: test.Compat.type => Boolean, msg: => Any): test.Compat.type +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def toString(): String def →[B](y: B): (test.Compat.type, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -33,7 +37,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit final private[this] val CONST_STRING: String("constant") lazy val foo: Int private[package test] def pkgPrivateM: String diff --git a/test/files/presentation/t5708/Test.scala b/test/files/presentation/t5708/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/t5708/Test.scala +++ b/test/files/presentation/t5708/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/t7548.check b/test/files/presentation/t7548.check index 5bfb0d27feb0..11d9d96b8003 100644 --- a/test/files/presentation/t7548.check +++ b/test/files/presentation/t7548.check @@ -1 +1 @@ -(x: Int)Unit +(x: Int): Unit diff --git a/test/files/presentation/t7548/Test.scala b/test/files/presentation/t7548/Test.scala index 94a60480563a..45f3ed49afd9 100644 --- a/test/files/presentation/t7548/Test.scala +++ b/test/files/presentation/t7548/Test.scala @@ -1,11 +1,11 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override protected def loadSources() { /* don't parse or typecheck sources */ } + override protected def loadSources(): Unit = { /* don't parse or typecheck sources */ } import compiler._ - override def runDefaultTests() { + override def runDefaultTests(): Unit = { val res = new Response[Tree] val pos = compiler.rangePos(sourceFiles.head, 102,102,102) compiler.askTypeAt(pos, res) diff --git a/test/files/presentation/t7548/src/Foo.scala b/test/files/presentation/t7548/src/Foo.scala index cc997f6e5f0a..0e61163e92c4 100644 --- a/test/files/presentation/t7548/src/Foo.scala +++ b/test/files/presentation/t7548/src/Foo.scala @@ -4,4 +4,4 @@ object Foo { def foo(x: Int, y: String) = {} foo(2) -} \ No newline at end of file +} diff --git a/test/files/presentation/t7548b.check b/test/files/presentation/t7548b.check index 35445fedf6a6..2faeff7335d8 100644 --- a/test/files/presentation/t7548b.check +++ b/test/files/presentation/t7548b.check @@ -1 +1 @@ -Foo.this.I2BI(Foo.this.I).+: (other: Foo.BI.type)Unit +Foo.this.I2BI(Foo.this.I).+: (other: Foo.BI.type): Unit diff --git a/test/files/presentation/t7548b/Test.scala b/test/files/presentation/t7548b/Test.scala index 0c022df83966..15baa220f6d9 100644 --- a/test/files/presentation/t7548b/Test.scala +++ b/test/files/presentation/t7548b/Test.scala @@ -1,11 +1,11 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override protected def loadSources() { /* don't parse or typecheck sources */ } + override protected def loadSources(): Unit = { /* don't parse or typecheck sources */ } import compiler._ - override def runDefaultTests() { + override def runDefaultTests(): Unit = { val res = new Response[Tree] val pos = compiler.rangePos(sourceFiles.head, 191, 191, 191) // + compiler.askTypeAt(pos, res) diff --git a/test/files/presentation/t7678/Runner.scala b/test/files/presentation/t7678/Runner.scala index 42001813c00c..7fbe989e9706 100644 --- a/test/files/presentation/t7678/Runner.scala +++ b/test/files/presentation/t7678/Runner.scala @@ -5,15 +5,15 @@ object Test extends InteractiveTest { import compiler._, definitions._ - override def runDefaultTests() { - def resolveTypeTagHyperlink() { + override def runDefaultTests(): Unit = { + def resolveTypeTagHyperlink(): Unit = { val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.swap.getOrElse(???) val r = new Response[Position] compiler.askLinkPos(sym, new BatchSourceFile("", source), r) r.get } - def checkTypeTagSymbolConsistent() { + def checkTypeTagSymbolConsistent(): Unit = { compiler.askForResponse { () => { val runDefinitions = currentRun.runDefinitions diff --git a/test/files/presentation/t7915.check b/test/files/presentation/t7915.check index 0849aaa82b1b..85b7d0482919 100644 --- a/test/files/presentation/t7915.check +++ b/test/files/presentation/t7915.check @@ -17,7 +17,7 @@ askHyperlinkPos for `Bar` at (8,11) Foo.scala askHyperlinkPos for `baz` at (8,22) Foo.scala ================================================================================ -[response] found askHyperlinkPos for `baz` at (2,31) Foo.scala +[response] found askHyperlinkPos for `baz` at (2,39) Foo.scala ================================================================================ askHyperlinkPos for `Bar` at (9,11) Foo.scala @@ -27,5 +27,5 @@ askHyperlinkPos for `Bar` at (9,11) Foo.scala askHyperlinkPos for `baz` at (9,22) Foo.scala ================================================================================ -[response] found askHyperlinkPos for `baz` at (2,31) Foo.scala +[response] found askHyperlinkPos for `baz` at (2,39) Foo.scala ================================================================================ diff --git a/test/files/presentation/t7915/Test.scala b/test/files/presentation/t7915/Test.scala index c2f89bdb17ca..694065de2325 100644 --- a/test/files/presentation/t7915/Test.scala +++ b/test/files/presentation/t7915/Test.scala @@ -1,7 +1,7 @@ import scala.tools.nsc.interactive.tests.InteractiveTest object Test extends InteractiveTest { - override def runDefaultTests() { + override def runDefaultTests(): Unit = { sourceFiles foreach (src => askLoadedTyped(src).get) super.runDefaultTests() } diff --git a/test/files/presentation/t7915/src/Foo.scala b/test/files/presentation/t7915/src/Foo.scala index 5c9ca36a6e46..aa6a85384e18 100644 --- a/test/files/presentation/t7915/src/Foo.scala +++ b/test/files/presentation/t7915/src/Foo.scala @@ -1,9 +1,9 @@ class Bar { - def bar(b: Int = 2) {}; def baz[X](b: Int = 2) {} + def bar(b: Int = 2): Unit = {}; def baz[X](b: Int = 2): Unit = {} } class Foo { - def foo() { + def foo(): Unit = { new Bar/*#*/().bar/*#*/() new Bar/*#*/().baz/*#*/[Any]() new Bar/*#*/().baz/*#*/() diff --git a/test/files/presentation/t8085.check b/test/files/presentation/t8085.check index fd1aae4c1afb..0e85de45f915 100644 --- a/test/files/presentation/t8085.check +++ b/test/files/presentation/t8085.check @@ -1,3 +1,2 @@ reload: NodeScalaSuite.scala -value rich is not a member of String -not found: type Foo +Test OK diff --git a/test/files/presentation/t8085/Test.scala b/test/files/presentation/t8085/Test.scala index 6ec7c11b9c39..9b0b45fb0588 100644 --- a/test/files/presentation/t8085/Test.scala +++ b/test/files/presentation/t8085/Test.scala @@ -1,17 +1,18 @@ -// scalac: -sourcepath src import scala.tools.nsc.interactive.tests.InteractiveTest import scala.reflect.internal.util.SourceFile import scala.tools.nsc.interactive.Response object Test extends InteractiveTest { + override def argsString = "-sourcepath src" + override def execute(): Unit = { val src = loadSourceAndWaitUntilTypechecked("NodeScalaSuite.scala") checkErrors(src) } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get askReload(List(sourceFile)).get askLoadedTyped(sourceFile).get sourceFile diff --git a/test/files/presentation/t8085b.check b/test/files/presentation/t8085b.check index fd1aae4c1afb..0e85de45f915 100644 --- a/test/files/presentation/t8085b.check +++ b/test/files/presentation/t8085b.check @@ -1,3 +1,2 @@ reload: NodeScalaSuite.scala -value rich is not a member of String -not found: type Foo +Test OK diff --git a/test/files/presentation/t8085b/Test.scala b/test/files/presentation/t8085b/Test.scala index 6ec7c11b9c39..9b0b45fb0588 100644 --- a/test/files/presentation/t8085b/Test.scala +++ b/test/files/presentation/t8085b/Test.scala @@ -1,17 +1,18 @@ -// scalac: -sourcepath src import scala.tools.nsc.interactive.tests.InteractiveTest import scala.reflect.internal.util.SourceFile import scala.tools.nsc.interactive.Response object Test extends InteractiveTest { + override def argsString = "-sourcepath src" + override def execute(): Unit = { val src = loadSourceAndWaitUntilTypechecked("NodeScalaSuite.scala") checkErrors(src) } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get askReload(List(sourceFile)).get askLoadedTyped(sourceFile).get sourceFile diff --git a/test/files/presentation/t8459/Test.scala b/test/files/presentation/t8459/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/t8459/Test.scala +++ b/test/files/presentation/t8459/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/t8934/Runner.scala b/test/files/presentation/t8934/Runner.scala index 944f4583913e..385a18bacb65 100644 --- a/test/files/presentation/t8934/Runner.scala +++ b/test/files/presentation/t8934/Runner.scala @@ -10,7 +10,7 @@ object Test extends InteractiveTest { } private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = { - val sourceFile = sourceFiles.find(_.file.name == sourceName).head + val sourceFile = sourceFiles.find(_.file.name == sourceName).get askReload(List(sourceFile)).get askLoadedTyped(sourceFile).get sourceFile diff --git a/test/files/presentation/t8941/Runner.scala b/test/files/presentation/t8941/Runner.scala index 0a8923a583f9..14a6aa835064 100644 --- a/test/files/presentation/t8941/Runner.scala +++ b/test/files/presentation/t8941/Runner.scala @@ -1,11 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest { - override def runDefaultTests() { - // make sure typer is done.. the virtual pattern matcher might translate - // some trees and mess up positions. But we'll catch it red handed! - // sourceFiles foreach (src => askLoadedTyped(src).get) - super.runDefaultTests() - } - -} +object Test extends InteractiveTest diff --git a/test/files/presentation/t8941/src/Source.scala b/test/files/presentation/t8941/src/Source.scala index 7438cccb03d3..85a430e85f51 100644 --- a/test/files/presentation/t8941/src/Source.scala +++ b/test/files/presentation/t8941/src/Source.scala @@ -1,7 +1,7 @@ object Foo { implicit class MatCreator(val ctx: StringContext) extends AnyVal { def m(args: Any*): Unit = { - ctx.checkLengths(args) + ctx.s(args: _*) } ???/*?*/ } diff --git a/test/files/presentation/t8941b.check b/test/files/presentation/t8941b.check deleted file mode 100644 index a41525b0881f..000000000000 --- a/test/files/presentation/t8941b.check +++ /dev/null @@ -1,3 +0,0 @@ -IdempotencyTest.scala:7: warning: imported `Reporter` is permanently hidden by definition of trait Reporter in package core -import reporters.Reporter - ^ diff --git a/test/files/presentation/t8941b/IdempotencyTest.scala b/test/files/presentation/t8941b/IdempotencyTest.scala index 36870dfae575..f2b6fff86fdc 100644 --- a/test/files/presentation/t8941b/IdempotencyTest.scala +++ b/test/files/presentation/t8941b/IdempotencyTest.scala @@ -16,14 +16,14 @@ abstract class IdempotencyTest { self => private object Break extends scala.util.control.ControlThrowable - private val compilerReporter: reporters.Reporter = new InteractiveReporter { + private val compilerReporter: Reporter = new InteractiveReporter { override def compiler = self.compiler } object compiler extends Global(settings, compilerReporter) { - override def checkForMoreWork(pos: Position) { + override def checkForMoreWork(pos: Position): Unit = { } - override def signalDone(context: Context, old: Tree, result: Tree) { + override def signalDone(context: Context, old: Tree, result: Tree): Unit = { // println("signalDone: " + old.toString.take(50).replaceAll("\n", "\\n")) if (!interrupted && lockedCount == 0 && interruptsEnabled && shouldInterrupt(result)) { interrupted = true @@ -35,7 +35,7 @@ abstract class IdempotencyTest { self => } // we're driving manually using our own thread, disable the check here. - override def assertCorrectThread() {} + override def assertCorrectThread(): Unit = {} } import compiler._ @@ -53,12 +53,12 @@ abstract class IdempotencyTest { self => private val source: SourceFile = newSourceFile(code) private def markerPosition: Position = source.position(code.indexOf("/*?*/")) - def assertNoProblems() { + def assertNoProblems(): Unit = { val problems = getUnit(source).get.problems assert(problems.isEmpty, problems.mkString("\n")) } - def show() { + def show(): Unit = { reloadSource(source) try { typedTree(source, true) @@ -69,5 +69,5 @@ abstract class IdempotencyTest { self => assertNoProblems() } - def main(args: Array[String]) { show() } + def main(args: Array[String]): Unit = { show() } } diff --git a/test/files/presentation/t8941b/Test.scala b/test/files/presentation/t8941b/Test.scala index 7269a1428689..d44cfd609b7f 100644 --- a/test/files/presentation/t8941b/Test.scala +++ b/test/files/presentation/t8941b/Test.scala @@ -3,7 +3,7 @@ import scala.tools.nsc.interactive.tests.core.IdempotencyTest // At the time of writing this test, removing any part of `enterExistingSym` // leads to a failure. object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { test(""" object Foo { def term { @@ -43,7 +43,7 @@ object Test { """) } - def test(code0: String) { + def test(code0: String): Unit = { val t = new IdempotencyTest { def code = code0 } diff --git a/test/files/presentation/t9238.scala b/test/files/presentation/t9238.scala index 5995f670a01c..a56d57ea3059 100644 --- a/test/files/presentation/t9238.scala +++ b/test/files/presentation/t9238.scala @@ -4,7 +4,7 @@ import scala.tools.nsc.reporters.ConsoleReporter object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val settings = new Settings settings.usejavacp.value = true val reporter = new ConsoleReporter(settings) diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check index b77887f8f7a8..eb40acef3b3b 100644 --- a/test/files/presentation/visibility.check +++ b/test/files/presentation/visibility.check @@ -3,15 +3,19 @@ reload: Completions.scala askTypeCompletion at Completions.scala(14,12) ================================================================================ [response] askTypeCompletion at (14,12) -retrieved 35 members +retrieved 39 members [inaccessible] private[this] def secretPrivateThis(): Unit +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo def +(other: String): String def ->[B](y: B): (accessibility.Foo, B) def ensuring(cond: Boolean): accessibility.Foo -def ensuring(cond: Boolean,msg: => Any): accessibility.Foo +def ensuring(cond: Boolean, msg: => Any): accessibility.Foo def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo -def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo -def equals(x$1: Any): Boolean +def ensuring(cond: accessibility.Foo => Boolean, msg: => Any): accessibility.Foo +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def secretPublic(): Unit @@ -19,7 +23,7 @@ def someTests(other: accessibility.Foo): Unit def toString(): String def →[B](y: B): (accessibility.Foo, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -30,7 +34,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit private def secretPrivate(): Unit protected def secretProtected(): Unit protected[package accessibility] def secretProtectedInPackage(): Unit @@ -41,14 +45,18 @@ protected[package lang] def finalize(): Unit askTypeCompletion at Completions.scala(16,11) ================================================================================ [response] askTypeCompletion at (16,11) -retrieved 35 members +retrieved 39 members +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo def +(other: String): String def ->[B](y: B): (accessibility.Foo, B) def ensuring(cond: Boolean): accessibility.Foo -def ensuring(cond: Boolean,msg: => Any): accessibility.Foo +def ensuring(cond: Boolean, msg: => Any): accessibility.Foo def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo -def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo -def equals(x$1: Any): Boolean +def ensuring(cond: accessibility.Foo => Boolean, msg: => Any): accessibility.Foo +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def secretPublic(): Unit @@ -56,7 +64,7 @@ def someTests(other: accessibility.Foo): Unit def toString(): String def →[B](y: B): (accessibility.Foo, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -67,7 +75,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit private def secretPrivate(): Unit private[this] def secretPrivateThis(): Unit protected def secretProtected(): Unit @@ -79,14 +87,18 @@ protected[package lang] def finalize(): Unit askTypeCompletion at Completions.scala(22,11) ================================================================================ [response] askTypeCompletion at (22,11) -retrieved 34 members +retrieved 38 members +[inaccessible] private[this] val self: accessibility.AccessibilityChecks +[inaccessible] private[this] val self: accessibility.AccessibilityChecks +[inaccessible] private[this] val self: accessibility.AccessibilityChecks +[inaccessible] private[this] val self: accessibility.AccessibilityChecks def +(other: String): String def ->[B](y: B): (accessibility.AccessibilityChecks, B) def ensuring(cond: Boolean): accessibility.AccessibilityChecks -def ensuring(cond: Boolean,msg: => Any): accessibility.AccessibilityChecks +def ensuring(cond: Boolean, msg: => Any): accessibility.AccessibilityChecks def ensuring(cond: accessibility.AccessibilityChecks => Boolean): accessibility.AccessibilityChecks -def ensuring(cond: accessibility.AccessibilityChecks => Boolean,msg: => Any): accessibility.AccessibilityChecks -def equals(x$1: Any): Boolean +def ensuring(cond: accessibility.AccessibilityChecks => Boolean, msg: => Any): accessibility.AccessibilityChecks +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def secretPublic(): Unit @@ -95,7 +107,7 @@ def someTests: Unit def toString(): String def →[B](y: B): (accessibility.AccessibilityChecks, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -106,7 +118,7 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit protected def secretProtected(): Unit protected[package accessibility] def secretProtectedInPackage(): Unit protected[package lang] def clone(): Object @@ -116,19 +128,23 @@ protected[package lang] def finalize(): Unit askTypeCompletion at Completions.scala(28,10) ================================================================================ [response] askTypeCompletion at (28,10) -retrieved 35 members +retrieved 39 members [inaccessible] private def secretPrivate(): Unit [inaccessible] private[this] def secretPrivateThis(): Unit +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo [inaccessible] protected def secretProtected(): Unit [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit def +(other: String): String def ->[B](y: B): (accessibility.Foo, B) def ensuring(cond: Boolean): accessibility.Foo -def ensuring(cond: Boolean,msg: => Any): accessibility.Foo +def ensuring(cond: Boolean, msg: => Any): accessibility.Foo def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo -def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo -def equals(x$1: Any): Boolean +def ensuring(cond: accessibility.Foo => Boolean, msg: => Any): accessibility.Foo +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def secretPublic(): Unit @@ -136,7 +152,7 @@ def someTests(other: accessibility.Foo): Unit def toString(): String def →[B](y: B): (accessibility.Foo, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -147,16 +163,20 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit protected[package accessibility] def secretProtectedInPackage(): Unit ================================================================================ askTypeCompletion at Completions.scala(37,8) ================================================================================ [response] askTypeCompletion at (37,8) -retrieved 35 members +retrieved 39 members [inaccessible] private def secretPrivate(): Unit [inaccessible] private[this] def secretPrivateThis(): Unit +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo +[inaccessible] private[this] val self: accessibility.Foo [inaccessible] protected def secretProtected(): Unit [inaccessible] protected[package accessibility] def secretProtectedInPackage(): Unit [inaccessible] protected[package lang] def clone(): Object @@ -164,10 +184,10 @@ retrieved 35 members def +(other: String): String def ->[B](y: B): (accessibility.Foo, B) def ensuring(cond: Boolean): accessibility.Foo -def ensuring(cond: Boolean,msg: => Any): accessibility.Foo +def ensuring(cond: Boolean, msg: => Any): accessibility.Foo def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo -def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo -def equals(x$1: Any): Boolean +def ensuring(cond: accessibility.Foo => Boolean, msg: => Any): accessibility.Foo +def equals(x$1: Object): Boolean def formatted(fmtstr: String): String def hashCode(): Int def secretPublic(): Unit @@ -175,7 +195,7 @@ def someTests(other: accessibility.Foo): Unit def toString(): String def →[B](y: B): (accessibility.Foo, B) final def !=(x$1: Any): Boolean -final def ##(): Int +final def ## : Int final def ==(x$1: Any): Boolean final def asInstanceOf[T0]: T0 final def eq(x$1: AnyRef): Boolean @@ -186,5 +206,5 @@ final def notifyAll(): Unit final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit -final def wait(x$1: Long,x$2: Int): Unit +final def wait(x$1: Long, x$2: Int): Unit ================================================================================ diff --git a/test/files/presentation/visibility/Test.scala b/test/files/presentation/visibility/Test.scala index bec1131c4cc1..14a6aa835064 100644 --- a/test/files/presentation/visibility/Test.scala +++ b/test/files/presentation/visibility/Test.scala @@ -1,3 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest diff --git a/test/files/presentation/visibility/src/Completions.scala b/test/files/presentation/visibility/src/Completions.scala index 69ec3959ad63..c66f0d8666ca 100644 --- a/test/files/presentation/visibility/src/Completions.scala +++ b/test/files/presentation/visibility/src/Completions.scala @@ -37,4 +37,4 @@ package other { o./*!*/ // should only match secretPublic } } -} \ No newline at end of file +} diff --git a/test/files/res/t5167/t5167_1.scala b/test/files/res/t5167/t5167_1.scala index ed282435076e..474f912446ed 100644 --- a/test/files/res/t5167/t5167_1.scala +++ b/test/files/res/t5167/t5167_1.scala @@ -1,7 +1,7 @@ package compilerbug trait SadTrait { - def buggyMethod[T](argWithDefault1: Int = 0)(argWithDefault2: String = "default") { + def buggyMethod[T](argWithDefault1: Int = 0)(argWithDefault2: String = "default"): Unit = { for (i <- 0 to 1) { val x = argWithDefault1 val y = argWithDefault2 diff --git a/test/files/res/t5167/t5167_2.scala b/test/files/res/t5167/t5167_2.scala index 5aa56efe75e3..2f8f1c03a4be 100644 --- a/test/files/res/t5167/t5167_2.scala +++ b/test/files/res/t5167/t5167_2.scala @@ -1,7 +1,7 @@ package compilerbug class TestClass { - def repro() { + def repro(): Unit = { SadObject.buggyMethod[Int]()() } } diff --git a/test/files/res/t597.check b/test/files/res/t597.check index 6cf64f734be2..4cea293a5d83 100644 --- a/test/files/res/t597.check +++ b/test/files/res/t597.check @@ -1,4 +1,6 @@ -nsc> -nsc> +nsc> warning: 2 deprecations (since 2.13.2); re-run with -deprecation for details + +nsc> warning: 1 deprecation (since 2.13.2); re-run with -deprecation for details + nsc> diff --git a/test/files/res/t687.check b/test/files/res/t687.check index 5f72c98636a9..d3da69b37c16 100644 --- a/test/files/res/t687.check +++ b/test/files/res/t687.check @@ -3,6 +3,6 @@ nsc> nsc> t687/QueryB.scala:3: error: name clash between defined and inherited member: def equals(x$1: Any): Boolean in class Any and override def equals(o: Object): Boolean at line 3 -have same type after erasure: (x$1: Object)Boolean - override def equals(o : Object) = false; +have same type after erasure: (x$1: Object): Boolean + override def equals(o : Object) = false ^ diff --git a/test/files/res/t687/QueryA.scala b/test/files/res/t687/QueryA.scala index 72365c7e7f64..939a03b6dc54 100644 --- a/test/files/res/t687/QueryA.scala +++ b/test/files/res/t687/QueryA.scala @@ -1,4 +1,4 @@ -package t687; +package t687 trait Query { - override def equals(o : Any) = false; + override def equals(o : Any) = false } diff --git a/test/files/res/t687/QueryB.scala b/test/files/res/t687/QueryB.scala index 8f6f2d9ebf26..9fc9e457da87 100644 --- a/test/files/res/t687/QueryB.scala +++ b/test/files/res/t687/QueryB.scala @@ -1,4 +1,4 @@ -package t687; +package t687 trait Query { - override def equals(o : Object) = false; + override def equals(o : Object) = false } diff --git a/test/files/res/t722/ScanBased.scala b/test/files/res/t722/ScanBased.scala index 8e55b800ac8d..98cd08cd7e1b 100644 --- a/test/files/res/t722/ScanBased.scala +++ b/test/files/res/t722/ScanBased.scala @@ -1,10 +1,10 @@ package t722; trait ScanBased extends Parser { trait AdjacentLink extends Link { - override def foo() = super.foo; + override def foo() = super.foo(); } trait WhitespaceLink extends AdjacentLink { - override def foo() = super.foo; + override def foo() = super.foo(); } } diff --git a/test/files/res/t743.check b/test/files/res/t743.check index 6cf64f734be2..0f4aa7526148 100644 --- a/test/files/res/t743.check +++ b/test/files/res/t743.check @@ -1,4 +1,5 @@ -nsc> +nsc> warning: 1 deprecation (since 2.13.2); re-run with -deprecation for details + nsc> nsc> diff --git a/test/files/res/t831.check b/test/files/res/t831.check index 6cf64f734be2..c96021985fea 100644 --- a/test/files/res/t831.check +++ b/test/files/res/t831.check @@ -1,4 +1,5 @@ -nsc> +nsc> warning: 3 deprecations (since 2.13.2); re-run with -deprecation for details + nsc> nsc> diff --git a/test/files/res/t9170.check b/test/files/res/t9170.check index 6d40b6ba8df8..e5795a222ba5 100644 --- a/test/files/res/t9170.check +++ b/test/files/res/t9170.check @@ -2,6 +2,6 @@ nsc> t9170/A.scala:3: error: double definition: def f[A](a: => A): Int at line 2 and def f[A](a: => Either[Exception,A]): Int at line 3 -have same type after erasure: (a: Function0)Int +have same type after erasure: (a: Function0): Int def f[A](a: => Either[Exception, A]) = 2 ^ diff --git a/test/files/run/BoxUnboxTest.check b/test/files/run/BoxUnboxTest.check new file mode 100644 index 000000000000..7fe58da60d1e --- /dev/null +++ b/test/files/run/BoxUnboxTest.check @@ -0,0 +1,21 @@ +BoxUnboxTest.scala:55: warning: comparing values of types Int and Null using `==` will always yield false + val n6 = null.asInstanceOf[Int] == null + ^ +BoxUnboxTest.scala:59: warning: comparing values of types Int and Null using `!=` will always yield true + val n8 = null.asInstanceOf[Int] != null + ^ +BoxUnboxTest.scala:65: warning: comparing values of types Int and Null using `==` will always yield false + val n10 = mp.get(0) == null // scala/bug#602 + ^ +BoxUnboxTest.scala:114: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true + assert(eff() == b); chk() + ^ +BoxUnboxTest.scala:116: warning: comparing values of types scala.runtime.BoxedUnit and scala.runtime.BoxedUnit using `==` will always yield true + assert(boxing(eff()) == b); chk() + ^ +BoxUnboxTest.scala:129: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true + assert(n1 == b) + ^ +BoxUnboxTest.scala:131: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true + val n2 = null.asInstanceOf[Unit] == b + ^ diff --git a/test/junit/scala/lang/primitives/BoxUnboxTest.scala b/test/files/run/BoxUnboxTest.scala similarity index 83% rename from test/junit/scala/lang/primitives/BoxUnboxTest.scala rename to test/files/run/BoxUnboxTest.scala index 461e960d7ee0..90a8a5a45907 100644 --- a/test/junit/scala/lang/primitives/BoxUnboxTest.scala +++ b/test/files/run/BoxUnboxTest.scala @@ -1,25 +1,19 @@ -package scala.lang.primitives +import org.junit.Assert.{ assertThrows => _, _ } +import scala.tools.testkit.AssertUtil._ -import org.junit.Assert._ -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 +class VCI(val x: Int) extends AnyVal { override def toString = "" + x } -import scala.annotation.nowarn -import scala.tools.testing.RunTesting +object Test { -object BoxUnboxTest { - class VCI(val x: Int) extends AnyVal { override def toString = "" + x } -} - -@RunWith(classOf[JUnit4]) -@nowarn("msg=comparing values") -class BoxUnboxTest extends RunTesting { + def main(args: Array[String]): Unit = { + boxUnboxInt() + numericConversions() + boxUnboxBoolean() + boxUnboxUnit() + t9671() + } - @Test def boxUnboxInt(): Unit = { - import scala.tools.testing.AssertUtil._ - import org.junit.Assert._ def genericNull[T] = null.asInstanceOf[T] // allowed, see scala/bug#4437, point 2 @@ -79,10 +73,7 @@ class BoxUnboxTest extends RunTesting { assertEquals(n12, 0) } - @Test def numericConversions(): Unit = { - import scala.tools.testing.AssertUtil._ - import org.junit.Assert._ val i1 = 1L.asInstanceOf[Int] assertEquals(i1, 1) @@ -92,20 +83,16 @@ class BoxUnboxTest extends RunTesting { } } - @Test def boxUnboxBoolean(): Unit = { val n1 = Option(null.asInstanceOf[Boolean]) assertEquals(n1, Some(false)) } - @Test def boxUnboxUnit(): Unit = { // should not use assertEquals in this test: it takes two Object parameters. normally, Unit does // not conform to Object, but for Java-defined methods scalac makes an exception and treats them // as Any. passing a Unit as Any makes the compiler go through another layer of boxing, so it // can hide some bugs (where we actually have a null, but the compiler makes it a ()). - import scala.tools.testing.AssertUtil._ - import org.junit.Assert._ var v = 0 def eff() = { v = 1 } @@ -113,13 +100,30 @@ class BoxUnboxTest extends RunTesting { val b = runtime.BoxedUnit.UNIT + def boxing(x: Unit): runtime.BoxedUnit = { + val k = this.getClass.getClassLoader.loadClass("scala.Unit$") + val u = k.getDeclaredField("MODULE$").get(null) + k.getDeclaredMethods.find(_.getName == "box").get.invoke(u, x.asInstanceOf[Object]).asInstanceOf[runtime.BoxedUnit] + } + def unboxing(x: Object): Unit = { + val k = this.getClass.getClassLoader.loadClass("scala.Unit$") + val u = k.getDeclaredField("MODULE$").get(null) + k.getDeclaredMethods.find(_.getName == "unbox").get.invoke(u, x).asInstanceOf[Unit] + } + assert(eff() == b); chk() - assert(Unit.box(eff()) == b); chk() + //assert(Unit.box(eff()) == b); chk() + assert(boxing(eff()) == b); chk() assert(().asInstanceOf[Object] == b) - Unit.unbox({eff(); b}); chk() - Unit.unbox({eff(); null}); chk() - assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() + //Unit.unbox({eff(); b}); chk() + //Unit.unbox({eff(); null}); chk() + //assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() + unboxing({eff(); b}); chk() + unboxing({eff(); null}); chk() + assertThrows[ClassCastException]( + try unboxing({eff(); ""}) catch { case t: java.lang.reflect.InvocationTargetException => throw t.getCause } + ); chk() val n1 = null.asInstanceOf[Unit] assert(n1 == b) @@ -132,9 +136,7 @@ class BoxUnboxTest extends RunTesting { assertEquals(n3, "()") } - @Test def t9671(): Unit = { - import scala.lang.primitives.BoxUnboxTest.VCI def f1(a: Any) = "" + a def f2(a: AnyVal) = "" + a diff --git a/test/files/run/Course-2002-01.scala b/test/files/run/Course-2002-01.scala index 3426f26eed19..b489d3cca18c 100644 --- a/test/files/run/Course-2002-01.scala +++ b/test/files/run/Course-2002-01.scala @@ -188,19 +188,19 @@ object M4 { else pascal(c - 1, l - 1) + pascal(c, l - 1); Console.print(pascal(0,0)); - Console.println; + Console.println() Console.print(pascal(0,1)); Console.print(' '); Console.print(pascal(1,1)); - Console.println; + Console.println() Console.print(pascal(0,2)); Console.print(' '); Console.print(pascal(1,2)); Console.print(' '); Console.print(pascal(2,2)); - Console.println; + Console.println() Console.print(pascal(0,3)); Console.print(' '); @@ -209,7 +209,7 @@ object M4 { Console.print(pascal(2,3)); Console.print(' '); Console.print(pascal(3,3)); - Console.println; + Console.println() Console.print(pascal(0,4)); Console.print(' '); @@ -220,7 +220,7 @@ object M4 { Console.print(pascal(3,4)); Console.print(' '); Console.print(pascal(4,4)); - Console.println; + Console.println() } //############################################################################ diff --git a/test/files/run/Course-2002-02.scala b/test/files/run/Course-2002-02.scala index b8650108ed89..0d5d6c437369 100644 --- a/test/files/run/Course-2002-02.scala +++ b/test/files/run/Course-2002-02.scala @@ -8,7 +8,7 @@ object M0 { Console.println(gcd(14,21)) Console.println(factorial(5)) - Console.println + Console.println() } //############################################################################ @@ -38,7 +38,7 @@ object M1 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -65,7 +65,7 @@ object M2 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -85,7 +85,7 @@ object M3 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -108,7 +108,7 @@ object M4 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -129,7 +129,7 @@ object M5 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -149,7 +149,7 @@ object M6 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -172,7 +172,7 @@ object M7 { Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) - Console.println + Console.println() } //############################################################################ @@ -188,7 +188,7 @@ object M8 { Console.println("pi = " + productPi(20)) Console.println("pi = " + pi) - Console.println + Console.println() } //############################################################################ @@ -223,7 +223,7 @@ object M9 { Console.println("pi = " + sumPi(20)) Console.println("pi = " + productPi(20)) Console.println("pi = " + pi) - Console.println + Console.println() } //############################################################################ @@ -244,7 +244,7 @@ object MA { def sqrt(x: Double) = fixedPoint(y => (y + x / y) / 2)(1.0) Console.println("sqrt(2) = " + sqrt(2)) - Console.println + Console.println() } //############################################################################ @@ -266,7 +266,7 @@ object MB { def sqrt(x: Double) = fixedPoint(averageDamp(y => x/y))(1.0); Console.println("sqrt(2) = " + sqrt(2)) - Console.println + Console.println() } //############################################################################ @@ -294,13 +294,13 @@ object MC { "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)); Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)); - Console.println; + Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)); Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)); - Console.println; + Console.println() Console.println( "factorial(0) = " + factorial(0)) @@ -314,7 +314,7 @@ object MC { "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) - Console.println + Console.println() } //############################################################################ @@ -339,13 +339,13 @@ object MD { "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)) Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)) - Console.println; + Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)) Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)) - Console.println; + Console.println() Console.println( "factorial(0) = " + factorial(0)) @@ -359,7 +359,7 @@ object MD { "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) - Console.println + Console.println() } //############################################################################ @@ -382,13 +382,13 @@ object ME { "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)) Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)) - Console.println; + Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)) Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)) - Console.println; + Console.println() Console.println( "factorial(0) = " + factorial(0)) @@ -402,7 +402,7 @@ object ME { "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) - Console.println + Console.println() } //############################################################################ @@ -464,7 +464,7 @@ object MH { Console.println("power(0,6) = " + power(0,6)) Console.println("power(0,7) = " + power(0,7)) Console.println("power(0,8) = " + power(0,8)) - Console.println + Console.println() Console.println("power(1,0) = " + power(1,0)) Console.println("power(1,1) = " + power(1,1)) @@ -475,7 +475,7 @@ object MH { Console.println("power(1,6) = " + power(1,6)) Console.println("power(1,7) = " + power(1,7)) Console.println("power(1,8) = " + power(1,8)) - Console.println + Console.println() Console.println("power(2,0) = " + power(2,0)) Console.println("power(2,1) = " + power(2,1)) @@ -486,7 +486,7 @@ object MH { Console.println("power(2,6) = " + power(2,6)) Console.println("power(2,7) = " + power(2,7)) Console.println("power(2,8) = " + power(2,8)) - Console.println + Console.println() Console.println("power(3,0) = " + power(3,0)) Console.println("power(3,1) = " + power(3,1)) @@ -497,7 +497,7 @@ object MH { Console.println("power(3,6) = " + power(3,6)) Console.println("power(3,7) = " + power(3,7)) Console.println("power(3,8) = " + power(3,8)) - Console.println + Console.println() Console.println("power(4,0) = " + power(4,0)) Console.println("power(4,1) = " + power(4,1)) @@ -508,7 +508,7 @@ object MH { Console.println("power(4,6) = " + power(4,6)) Console.println("power(4,7) = " + power(4,7)) Console.println("power(4,8) = " + power(4,8)) - Console.println + Console.println() Console.println("power(5,0) = " + power(5,0)) Console.println("power(5,1) = " + power(5,1)) @@ -519,7 +519,7 @@ object MH { Console.println("power(5,6) = " + power(5,6)) Console.println("power(5,7) = " + power(5,7)) Console.println("power(5,8) = " + power(5,8)) - Console.println + Console.println() } //############################################################################ diff --git a/test/files/run/Course-2002-03.scala b/test/files/run/Course-2002-03.scala index c3b357e610b6..d1e4fdaa32a6 100644 --- a/test/files/run/Course-2002-03.scala +++ b/test/files/run/Course-2002-03.scala @@ -15,7 +15,7 @@ object M0 { r.denom * s.denom); def makeString(r: Rational) = - r.numer + "/" + r.denom; + s"${r.numer}/${r.denom}" val x = new Rational(1, 2); val y = new Rational(1, 3); @@ -23,7 +23,7 @@ object M0 { Console.println(x.denom); Console.println(makeString(x)); Console.println(makeString(addRational(x,y))); - Console.println; + Console.println() } //############################################################################ @@ -40,7 +40,7 @@ object M1 { new Rational( numer * r.numer, denom * r.denom); - override def toString() = numer + "/" + denom; + override def toString() = s"$numer/$denom" } val x = new Rational(1, 3); @@ -50,7 +50,7 @@ object M1 { Console.println(y); Console.println(z); Console.println(x.add(y).mul(z)); - Console.println; + Console.println() } //############################################################################ @@ -77,7 +77,7 @@ object M2 { new Rational( numer * r.denom, denom * r.numer); - override def toString() = numer + "/" + denom; + override def toString() = s"$numer/$denom" } val x = new Rational(1, 3); @@ -87,7 +87,7 @@ object M2 { Console.println(y); Console.println(z); Console.println(x.add(y).mul(z)); - Console.println; + Console.println() } //############################################################################ @@ -100,7 +100,7 @@ object M3 { def less(that: Rational) = this.numer * that.denom < that.numer * this.denom; def max(that: Rational) = if (this.less(that)) that else this; - override def toString() = numer + "/" + denom; + override def toString() = s"$numer/$denom" } val x = new Rational(66, 42); @@ -109,7 +109,7 @@ object M3 { Console.println(y); Console.println(x.max(y)); Console.println(y.max(x)); - Console.println; + Console.println() } //############################################################################ @@ -136,13 +136,13 @@ object M4 { new Rational( numer * r.denom, denom * r.numer); - override def toString() = numer + "/" + denom; + override def toString() = s"$numer/$denom" } val x = new Rational(1, 2); val y = new Rational(1, 3); Console.println(x * x + y * y); - Console.println; + Console.println() } //############################################################################ @@ -174,7 +174,7 @@ object M5 { Console.println(x contains 1); Console.println(x contains 2); Console.println(x contains 3); - Console.println; + Console.println() } //############################################################################ @@ -272,7 +272,7 @@ object M8 { new NonEmpty(f(elem), lset, rset) } - def foreach(f: Int => Unit) { + def foreach(f: Int => Unit): Unit = { left.foreach(f); f(elem); right.foreach(f); @@ -287,7 +287,7 @@ object M8 { if (f(elem)) accu.incl(elem) else accu)); } - def test = { + def test() = { val set0: IntSet = new Empty; val set1: IntSet = new Empty incl 1; val set2: IntSet = new Empty incl 1 incl 2; @@ -301,27 +301,27 @@ object M8 { Console.println("set2 = " + set2); Console.println("set3 = " + (set3.toString())); Console.println("set4 = " + set4); - Console.println; + Console.println() Console.println("set2 contains the following elements:"); set2.foreach(Console.println); - Console.println; + Console.println() Console.println("set3 contains the following elements:"); - set3 foreach Console.println; - Console.println; + set3 foreach Console.println + Console.println() Console.println("set4 contains the following elements:"); set4.print(); - Console.println; + Console.println() Console.println("2 <- set2: " + (set2 contains 2)); Console.println("3 <- set2: " + set2.contains(3)); - Console.println; + Console.println() Console.println("setx = " + setx); Console.println("setx * 2 = " + (setx.map(x => 2 * x))); - Console.println; + Console.println() Console.println("setx = " + setx); Console.println("sety = " + sety); @@ -331,7 +331,7 @@ object M8 { Console.println("sety > 0 = " + (sety.filter(x => x > 0))); Console.println("setx & sety = " + (setx.intersect2(sety))); Console.println("sety & setx = " + (sety.intersect2(setx))); - Console.println; + Console.println() } } @@ -363,18 +363,18 @@ object M9 { override def toString() = asString; } - def test = { + def test() = { Console.println(new Rational(2,2).asString); Console.println(new Rational(2,2).toString()); Console.println(new Rational(2,2)); - Console.println; + Console.println() } } //############################################################################ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { M0; M1; M2; @@ -383,8 +383,8 @@ object Test { M5; M6; M7; - M8.test; - M9.test; + M8.test() + M9.test() () } } diff --git a/test/files/run/Course-2002-04.scala b/test/files/run/Course-2002-04.scala index 368e29db453d..3d27b21ef847 100644 --- a/test/files/run/Course-2002-04.scala +++ b/test/files/run/Course-2002-04.scala @@ -17,7 +17,7 @@ object M0 { } } - def test = { + def test() = { val isort: List[Int] => List[Int] = quicksort[Int]((x,y) => x < y); val list0 = List(6,3,1,8,7,1,2,5,8,4,3,4,8); val list1 = quicksort[Int]((x,y) => x < y)(list0); @@ -34,7 +34,7 @@ object M0 { Console.println("list4 = " + list4); Console.println("list5 = " + list5); Console.println("list6 = " + list6); - Console.println; + Console.println() } } @@ -92,7 +92,7 @@ object M1 { array2list(array); } - def test = { + def test() = { val list0 = List(); val list1 = List(0); val list2 = List(0,1); @@ -116,7 +116,7 @@ object M1 { Console.println("list8: " + list8 + " -> " + isort(list8)); Console.println("list9: " + list9 + " -> " + isort(list9)); Console.println("listA: " + listA + " -> " + isort(listA)); - Console.println; + Console.println() } } @@ -132,14 +132,14 @@ object M2 { horner(x, coefs.tail) * x + coefs.head } - def test = { + def test() = { val poly = List(9.0,5.0,7.0,5.0); Console.println("f(x) = 5x^3+7x^2+5x+9"); Console.println("f(0) = " + horner(0, poly)); Console.println("f(1) = " + horner(1, poly)); Console.println("f(2) = " + horner(2, poly)); Console.println("f(3) = " + horner(3, poly)); - Console.println; + Console.println() } } @@ -172,49 +172,47 @@ object M3 { m1.map(row => matrixTimesVector(columns, row)) } - def test = { + def test() = { val v1 = List(2.0,3.0,4.0); val v2 = List(6.0,7.0,8.0); def id = List(List(1.0,0.0,0.0),List(0.0,1.0,0.0),List(0.0,0.0,1.0)); def m1 = List(List(2.0,0.0,0.0),List(0.0,2.0,0.0),List(0.0,0.0,2.0)); def m2 = List(List(1.0,2.0,3.0),List(4.0,5.0,6.0),List(7.0,8.0,9.0)); - def v = List(2.0,3.0,4.0); - Console.println("v1 = " + v1); Console.println("v2 = " + v2); - Console.println; + Console.println() Console.println("id = " + id); Console.println("m1 = " + m1); Console.println("m2 = " + m2); - Console.println; + Console.println() Console.println("v1 * v1 = " + dotproduct(v1,v1)); Console.println("v1 * v2 = " + dotproduct(v1,v2)); Console.println("v2 * v1 = " + dotproduct(v2,v1)); Console.println("v1 * v2 = " + dotproduct(v1,v2)); - Console.println; + Console.println() Console.println("id * v1 = " + matrixTimesVector(id,v1)); Console.println("m1 * v1 = " + matrixTimesVector(m1,v1)); Console.println("m2 * v1 = " + matrixTimesVector(m2,v1)); - Console.println; + Console.println() Console.println("trn(id) = " + transpose(id)); Console.println("trn(m1) = " + transpose(m1)); Console.println("trn(m2) = " + transpose(m2)); - Console.println; + Console.println() Console.println("List(v1) * id = " + matrixTimesMatrix(List(v1),id)); Console.println("List(v1) * m1 = " + matrixTimesMatrix(List(v1),m1)); Console.println("List(v1) * m2 = " + matrixTimesMatrix(List(v1),m2)); - Console.println; + Console.println() Console.println("id * List(v1) = " + matrixTimesMatrix(id,List(v1))); Console.println("m1 * List(v1) = " + matrixTimesMatrix(m1,List(v1))); Console.println("m2 * List(v1) = " + matrixTimesMatrix(m2,List(v1))); - Console.println; + Console.println() Console.println("id * id = " + matrixTimesMatrix(id,id)); Console.println("id * m1 = " + matrixTimesMatrix(id,m1)); @@ -225,7 +223,7 @@ object M3 { Console.println("m1 * m2 = " + matrixTimesMatrix(m1,m2)); Console.println("m2 * m1 = " + matrixTimesMatrix(m2,m1)); Console.println("m2 * m2 = " + matrixTimesMatrix(m2,m2)); - Console.println; + Console.println() } } @@ -233,10 +231,10 @@ object M3 { object Test { def main(args: Array[String]): Unit = { - M0.test; - M1.test; - M2.test; - M3.test; + M0.test() + M1.test() + M2.test() + M3.test() () } } diff --git a/test/files/run/Course-2002-05.scala b/test/files/run/Course-2002-05.scala index 80317bc757ca..d1d0a251be22 100644 --- a/test/files/run/Course-2002-05.scala +++ b/test/files/run/Course-2002-05.scala @@ -25,24 +25,24 @@ object M0 { } } - def test = { + def test() = { Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 0))); Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 5))); Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 9))); - Console.println; + Console.println() Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 0))); Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 5))); Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 9))); - Console.println; + Console.println() Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 0))); Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 5))); Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 9))); - Console.println; + Console.println() Console.println(quicksort[Int]((x,y) => x < y)(List(7,2,1,5,4,3,8,6))); - Console.println; + Console.println() } } @@ -65,24 +65,24 @@ object M1 { } } - def test = { + def test() = { Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 0))); Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 5))); Console.println(partition[Int](List(1,2,3,4,5,6,7,8), (x => x < 9))); - Console.println; + Console.println() Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 0))); Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 5))); Console.println(partition[Int](List(8,7,6,5,4,3,2,1), (x => x < 9))); - Console.println; + Console.println() Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 0))); Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 5))); Console.println(partition[Int](List(7,2,1,5,4,3,8,6), (x => x < 9))); - Console.println; + Console.println() Console.println(quicksort[Int]((x,y) => x < y)(List(7,2,1,5,4,3,8,6))); - Console.println; + Console.println() } } @@ -100,13 +100,13 @@ object M2 { } } - def test = { + def test() = { Console.println(powerset(List())); Console.println(powerset(List(1))); Console.println(powerset(List(1,2))); Console.println(powerset(List(1,2,3))); Console.println(powerset(List(1,2,3,4))); - Console.println; + Console.println() } } @@ -144,12 +144,12 @@ object M3 { placeQueens(n) } - def test { + def test(): Unit = { Console.println("queens(1) = " + queens(1)); Console.println("queens(2) = " + queens(2)); Console.println("queens(3) = " + queens(3)); Console.println("queens(4) = " + queens(4)); - Console.println; + Console.println() } } @@ -189,24 +189,24 @@ object M4 { placeQueens(n); } - def test { + def test(): Unit = { Console.println("queens(1) = " + queens(1)); Console.println("queens(2) = " + queens(2)); Console.println("queens(3) = " + queens(3)); Console.println("queens(4) = " + queens(4)); - Console.println; + Console.println() } } //############################################################################ object Test { - def main(args: Array[String]) { - M0.test; - M1.test; - M2.test; - M3.test; - M4.test; + def main(args: Array[String]): Unit = { + M0.test() + M1.test() + M2.test() + M3.test() + M4.test() () } } diff --git a/test/files/run/Course-2002-06.scala b/test/files/run/Course-2002-06.scala index 908a934041b8..e82729682bb0 100644 --- a/test/files/run/Course-2002-06.scala +++ b/test/files/run/Course-2002-06.scala @@ -70,10 +70,10 @@ abstract class Graphics(_width: Double, _height: Double) { } /** updates the contents of the output device*/ - def repaint = (); + def repaint() = (); /** Add the last touch to the picture.*/ - def close : Unit; + def close(): Unit; } //############################################################################ @@ -122,7 +122,7 @@ class PostScript (filename: String, _width: Double, _height: Double) } def plotLine(x1: Double, y1: Double, x2: Double, y2: Double): Unit = { - Console.println(round(x1) + " " + round(y1) + " m " + + Console.println(round(x1).toString + " " + round(y1) + " m " + round(x2) + " " + round(y2) + " l"); } @@ -132,12 +132,12 @@ class PostScript (filename: String, _width: Double, _height: Double) Console.println("%%BoundingBox: 0 0 " + mm2ps(psWidth) + " " + mm2ps(psHeight)); Console.println("%%EndComments\n"); Console.println("/m {moveto} bind def\n/l {lineto} bind def\n"); - Console.println(mm2ps(line_thickness) + " setlinewidth\nnewpath"); + Console.println(mm2ps(line_thickness).toString + " setlinewidth\nnewpath"); /** Terminate the PS document and close the file stream. */ - def close : Unit = { + def close(): Unit = { Console.println("stroke\nshowpage\n%%EOF"); - Console.flush; + Console.flush() } } @@ -224,10 +224,10 @@ object M0 { new Vector(0.0, 0.0), new Vector(0.0, 2.0/3.0), new Vector(1.0, 0.0))); - canvas.repaint + canvas.repaint() } - def test = { + def test() = { val psfile = "-"; val canvas: Graphics = new PostScript(psfile, 2, 2); @@ -245,7 +245,7 @@ object M0 { threeHouses(identFrame); // Don't forget to close the canvas! - canvas.close + canvas.close() } } @@ -253,7 +253,7 @@ object M0 { object Test { def main(args: Array[String]): Unit = { - M0.test; + M0.test() () } } diff --git a/test/files/run/Course-2002-07.scala b/test/files/run/Course-2002-07.scala index db6e1d8e0449..5a43d2f9097a 100644 --- a/test/files/run/Course-2002-07.scala +++ b/test/files/run/Course-2002-07.scala @@ -49,7 +49,7 @@ object M0 { else sys.error("unknown expression") } - def test = { + def test() = { Console.println(" 0 = " + eval(new Number(0))); Console.println(" 1 = " + eval(new Number(1))); Console.println(" 0 + 1 = " + @@ -58,7 +58,7 @@ object M0 { eval(new Sum(new Number(1),new Number(2)))); Console.println("2 + 3 + 4 = " + eval(new Sum(new Sum(new Number(2),new Number(3)),new Number(4)))); - Console.println; + Console.println() } } @@ -77,7 +77,7 @@ object M1 { def eval: Int = e1.eval + e2.eval; } - def test = { + def test() = { Console.println(" 0 = " + new Number(0).eval); Console.println(" 1 = " + new Number(1).eval); Console.println(" 0 + 1 = " + @@ -86,7 +86,7 @@ object M1 { new Sum(new Number(1),new Number(2)).eval); Console.println("2 + 3 + 4 = " + new Sum(new Sum(new Number(2),new Number(3)),new Number(4)).eval); - Console.println; + Console.println() } } @@ -94,7 +94,7 @@ object M1 { object M2 { - trait Expr; + sealed trait Expr; case class Number(n: Int) extends Expr; case class Sum(e1: Expr, e2: Expr) extends Expr; @@ -103,14 +103,14 @@ object M2 { case Sum(e1, e2) => eval(e1) + eval(e2) } - def test = { + def test() = { Console.println(" 0 = " + eval(Number(0))); Console.println(" 1 = " + eval(Number(1))); Console.println(" 0 + 1 = " + eval(Sum(Number(0),Number(1)))); Console.println(" 1 + 2 = " + eval(Sum(Number(1),Number(2)))); Console.println("2 + 3 + 4 = " + eval(Sum(Sum(Number(2),Number(3)), Number(4)))); - Console.println; + Console.println() } } @@ -118,7 +118,7 @@ object M2 { object M3 { - trait Expr { + sealed trait Expr { def eval: Int = this match { case Number(n) => n case Sum(e1, e2) => e1.eval + e2.eval @@ -127,14 +127,14 @@ object M3 { case class Number(n: Int) extends Expr; case class Sum(e1: Expr, e2: Expr) extends Expr; - def test = { + def test() = { Console.println(" 0 = " + Number(0).eval); Console.println(" 1 = " + Number(1).eval); Console.println(" 0 + 1 = " + Sum(Number(0),Number(1)).eval); Console.println(" 1 + 2 = " + Sum(Number(1),Number(2)).eval); Console.println("2 + 3 + 4 = " + Sum(Sum(Number(2),Number(3)), Number(4)).eval); - Console.println; + Console.println() } } @@ -152,7 +152,7 @@ object M4 { Console.println(concat(xss).toString + " = concat(" + xss + ")"); // !!! .toString } - def test = { + def test() = { test_concat(List()); test_concat(List(List())); test_concat(List(List(),List())); @@ -172,7 +172,7 @@ object M4 { test_concat(List(List[Int](),List(1),List(2,3,4,5,6))); // !!! [int] test_concat(List(List[Int](),List[Int](),List(1,2,3,4,5,6))); // !!! [int] test_concat(List(List(1,2),List(3,4),List(5,6))); - Console.println; + Console.println() } } @@ -191,7 +191,7 @@ object M5 { Console.println(zipFun(xs,ys).toString + " = zipFun(" + xs + "," + ys + ")"); // !!! .toString } - def test = { + def test() = { test_zipFun(List(),List()); test_zipFun(List(),List('a','b','c')); test_zipFun(List(1,2,3),List()); @@ -206,7 +206,7 @@ object M5 { test_zipFun(List(1,2,3),List('a','b','c')); - Console.println; + Console.println() } } @@ -225,7 +225,7 @@ object M6 { Console.println(zipFun(xs,ys).toString + " = zipFun(" + xs + "," + ys + ")"); // !!! .toString } - def test = { + def test() = { test_zipFun(List(),List()); test_zipFun(List(),List('a','b','c')); test_zipFun(List(1,2,3),List()); @@ -240,7 +240,7 @@ object M6 { test_zipFun(List(1,2,3),List('a','b','c')); - Console.println; + Console.println() } } @@ -258,7 +258,7 @@ object M7 { Console.println(heads(xss).toString + " = heads(" + xss + ")"); // !!! .toString } - def test = { + def test() = { test_heads(List()); test_heads(List(List())); test_heads(List(List(),List())); @@ -283,7 +283,7 @@ object M7 { test_heads(List(List(1,2),List(3,4),List(5,6))); - Console.println; + Console.println() } } @@ -304,7 +304,7 @@ object M8 { } - def test = { + def test() = { test_heads(List()); test_heads(List(List())); test_heads(List(List(),List())); @@ -329,7 +329,7 @@ object M8 { test_heads(List(List(1,2),List(3,4),List(5,6))); - Console.println; + Console.println() } } @@ -338,7 +338,7 @@ object M8 { object M9 { - trait Expr { + sealed trait Expr { def derive(v: Var): Expr = this match { case Number(_) => Number(0) case Var(name) => if (name == v.name) Number(1) else Number(0) @@ -359,13 +359,13 @@ object M9 { override def toString = "Prod(" + e1 + ", " + e2 + ")"; // !!! remove ! } - def test = { + def test() = { val x = Var("x"); val f0 = Prod(x, x); val f1 = f0 derive x; Console.println("f (x) = " + f0); Console.println("f'(x) = " + f1); - Console.println; + Console.println() } } @@ -379,7 +379,7 @@ object MA { case (k1,v1) :: xs1 => if (k1 == k) v1 else lookup(xs1, k) } - trait Expr { + sealed trait Expr { def + (that: Expr) = Sum(this, that); def * (that: Expr) = Prod(this, that); def derive(v: Var): Expr = this match { @@ -425,7 +425,7 @@ object MA { loop } - def test = { + def test() = { val x = Var("x"); val f0 = x * x; @@ -440,7 +440,7 @@ object MA { Console.println("g (3) = " + evalvars(List(("x",3)))(g0)); Console.println("g'(3) = " + evalvars(List(("x",3)))(g1)); - Console.println; + Console.println() } } @@ -506,7 +506,7 @@ object MB { case _ => List() } - private def +< (that: Expr): Boolean = (this + 0 @@ -625,11 +625,11 @@ object MB { } } - def test = { + def test() = { val _1 = Lit(1); val _2 = Lit(2); val _3 = Lit(3); - val _4 = Lit(4); + @annotation.unused val _4 = Lit(4); val _5 = Lit(5); val x = Var("x"); @@ -651,7 +651,7 @@ object MB { Console.println("tf(x) = " + tf); Console.println("tg(x) = " + tg); Console.println("th(x) = " + th); - Console.println; + Console.println() val f4 = (x+ _3)*(_2+x)*x*(x+ _1) + (x+ _5)*(x*(x+ _2)+x+ _1) + (x^2) + x; val f3 = f4.derive(x); @@ -664,9 +664,9 @@ object MB { Console.println("f2(x) = " + f2); Console.println("f1(x) = " + f1); Console.println("f0(x) = " + f0); - Console.println; + Console.println() - def check(n: String, f: Expr, x: Int, e: Int) { + def check(n: String, f: Expr, x: Int, e: Int): Unit = { val a: Int = f.evaluate(List(("x",x))); val s: String = if (a == e) "ok" else "KO(" + e + ")"; Console.println(n + "(" + x + ") = " + a + " " + s); @@ -677,44 +677,44 @@ object MB { check("f4", f4, 2, 203); check("f4", f4, 3, 524); check("f4", f4, 4, 1121); - Console.println; + Console.println() check("f3", f3, 0, 23); check("f3", f3, 1, 88); check("f3", f3, 2, 219); check("f3", f3, 3, 440); - Console.println; + Console.println() check("f2", f2, 0, 40); check("f2", f2, 1, 94); check("f2", f2, 2, 172); - Console.println; + Console.println() check("f1", f1, 0, 42); check("f1", f1, 1, 66); - Console.println; + Console.println() check("f0", f0, 0, 24); - Console.println; + Console.println() } } //############################################################################ object Test { - def main(args: Array[String]) { - M0.test; - M1.test; - M2.test; - M3.test; - M4.test; - M5.test; + def main(args: Array[String]): Unit = { + M0.test() + M1.test() + M2.test() + M3.test() + M4.test() + M5.test() // !!! M6.test; - M7.test; - M8.test; - M9.test; - MA.test; - MB.test; + M7.test() + M8.test() + M9.test() + MA.test() + MB.test() () } } diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala index 1d0e02262df7..4031cf14d15e 100644 --- a/test/files/run/Course-2002-08.scala +++ b/test/files/run/Course-2002-08.scala @@ -9,14 +9,14 @@ object M0 { var x: String = "abc"; var count = 111; - def test = { + def test() = { Console.println("x = " + x); Console.println("count = " + count); x = "hello"; count = count + 1; Console.println("x = " + x); Console.println("count = " + count); - Console.println; + Console.println() } } @@ -36,7 +36,7 @@ object M1 { } else sys.error("insufficient funds"); } - def test0 = { + def test0() = { val account = new BankAccount(); Console.print("account deposit 50 -> "); Console.println((account deposit 50).toString()); // !!! .toString @@ -45,28 +45,28 @@ object M1 { Console.print("account withdraw 20 -> "); Console.println(account withdraw 20); Console.print("account withdraw 15 -> "); - Console.println; + Console.println() } - def test1 = { + def test1() = { val x = new BankAccount(); - val y = new BankAccount(); + @annotation.unused val y = new BankAccount(); Console.print("x deposit 30 -> "); Console.println((x deposit 30).toString()); // !!! .toString Console.print("y withdraw 20 -> "); - Console.println; + Console.println() } - def test2 = { + def test2() = { val x = new BankAccount(); - val y = new BankAccount(); + @annotation.unused val y = new BankAccount(); Console.print("x deposit 30 -> "); Console.println((x deposit 30).toString()); // !!! .toString Console.print("x withdraw 20 -> "); Console.println(x withdraw 20); } - def test3 = { + def test3() = { val x = new BankAccount(); val y = x; Console.print("x deposit 30 -> "); @@ -75,11 +75,11 @@ object M1 { Console.println(y withdraw 20); } - def test = { - test0; Console.println; - test1; Console.println; - test2; Console.println; - test3; Console.println; + def test() = { + test0(); Console.println() + test1(); Console.println() + test2(); Console.println() + test3(); Console.println() } } @@ -101,12 +101,12 @@ object M2 { r } - def test = { + def test() = { Console.println("2^0 = " + power(2,0)); Console.println("2^1 = " + power(2,1)); Console.println("2^2 = " + power(2,2)); Console.println("2^3 = " + power(2,3)); - Console.println; + Console.println() } } @@ -121,12 +121,12 @@ object M3 { r } - def test = { + def test() = { Console.println("2^0 = " + power(2,0)); Console.println("2^1 = " + power(2,1)); Console.println("2^2 = " + power(2,2)); Console.println("2^3 = " + power(2,3)); - Console.println; + Console.println() } } @@ -134,11 +134,11 @@ object M3 { object M4 { - def test = { - for (i <- range(1, 4)) { Console.print(i + " ") }; - Console.println; + def test() = { + for (i <- range(1, 4)) { Console.print(s"$i ") }; + Console.println() Console.println(for (i <- range(1, 4)) yield i); - Console.println; + Console.println() } } @@ -179,7 +179,7 @@ object M5 { agenda = insert(agenda, curtime + delay) } - private def next: Unit = agenda match { + private def next(): Unit = agenda match { case List() => () case (time, action) :: ag1 => { agenda = ag1; @@ -188,9 +188,9 @@ object M5 { } } - def run: Unit = { + def run(): Unit = { afterDelay(0){() => Console.println("*** simulation started ***"); } - while (!agenda.isEmpty) { next } + while (!agenda.isEmpty) { next() } } } @@ -263,7 +263,7 @@ object M5 { val AndGateDelay = 3; val OrGateDelay = 5; - def invert = { + def invert() = { val ain = new Wire(); val cout = new Wire(); inverter(ain, cout); @@ -272,9 +272,9 @@ object M5 { def test(a: Int) = { ain setSignal (if (a == 0) false else true); - run; + run() Console.println("!" + a + " = " + result); - Console.println; + Console.println() } probe("out ", cout); @@ -283,7 +283,7 @@ object M5 { test(1); } - def and = { + def and() = { val ain = new Wire(); val bin = new Wire(); val cout = new Wire(); @@ -294,13 +294,13 @@ object M5 { def test(a: Int, b: Int) = { ain setSignal (if (a == 0) false else true); bin setSignal (if (b == 0) false else true); - run; - Console.println(a + " & " + b + " = " + result); - Console.println; + run() + Console.println(a.toString + " & " + b + " = " + result); + Console.println() } probe("out ", cout); - Console.println; + Console.println() test(0,0); test(0,1); @@ -308,7 +308,7 @@ object M5 { test(1,1); } - def or = { + def or() = { val ain = new Wire(); val bin = new Wire(); val cout = new Wire(); @@ -319,13 +319,13 @@ object M5 { def test(a: Int, b: Int) = { ain setSignal (if (a == 0) false else true); bin setSignal (if (b == 0) false else true); - run; - Console.println(a + " | " + b + " = " + result); - Console.println; + run() + Console.println(a.toString + " | " + b + " = " + result); + Console.println() } probe("out ", cout); - Console.println; + Console.println() test(0,0); test(0,1); @@ -333,7 +333,7 @@ object M5 { test(1,1); } - def half = { + def half() = { val ain = new Wire(); val bin = new Wire(); val sout = new Wire(); @@ -347,14 +347,14 @@ object M5 { def test(a: Int, b: Int) = { ain setSignal (if (a == 0) false else true); bin setSignal (if (b == 0) false else true); - run; - Console.println(a + " + " + b + " = " + result); - Console.println; + run() + Console.println(a.toString + " + " + b + " = " + result); + Console.println() } probe("sum ", sout); probe("carry", cout); - Console.println; + Console.println() test(0,0); test(0,1); @@ -362,7 +362,7 @@ object M5 { test(1,1); } - def full = { + def full() = { val ain = new Wire(); val bin = new Wire(); val cin = new Wire(); @@ -378,14 +378,14 @@ object M5 { ain setSignal (if (a == 0) false else true); bin setSignal (if (b == 0) false else true); cin setSignal (if (c == 0) false else true); - run; - Console.println(a + " + " + b + " + " + c + " = " + result); - Console.println; + run() + Console.println(a.toString + " + " + b + " + " + c + " = " + result); + Console.println() } probe("sum ", sout); probe("carry", cout); - Console.println; + Console.println() test(0,0,0); test(0,0,1); @@ -398,13 +398,13 @@ object M5 { } } - def test = { - val sim = new Test(); - sim.invert; - sim.and; - sim.or; - sim.half; - sim.full; + def test() = { + val sim = new Test() + sim.invert() + sim.and() + sim.or() + sim.half() + sim.full() } } @@ -429,7 +429,7 @@ class Simulator() { agenda = insert(agenda, curtime + delay) } - def next: Unit = agenda match { + def next(): Unit = agenda match { case List() => () case (time, action) :: rest => { agenda = rest; @@ -440,9 +440,9 @@ class Simulator() { protected def currentTime: Int = curtime; - def run = { + def run() = { afterDelay(0){() => Console.println("*** simulation started ***"); } - while (!agenda.isEmpty) { next } + while (!agenda.isEmpty) { next() } } } @@ -556,7 +556,7 @@ class Main() extends CircuitSimulator() { val AndGateDelay = 3; val OrGateDelay = 5; - def main = { + def main() = { val n = 3; val outNum = 1 << n; @@ -571,15 +571,15 @@ class Main() extends CircuitSimulator() { for ((x,o) <- range(0,outNum) zip out) { probe("out" + x, o) } in.setSignal(true); - run; + run() ctrl(0).setSignal(true); - run; + run() ctrl(1).setSignal(true); - run; + run() ctrl(2).setSignal(true); - run; + run() ctrl(0).setSignal(false); - run; + run() } } @@ -587,13 +587,13 @@ class Main() extends CircuitSimulator() { object Test { def main(args: Array[String]): Unit = { - M0.test; - M1.test; - M2.test; - M3.test; - M4.test; - M5.test; - new Main().main; + M0.test() + M1.test() + M2.test() + M3.test() + M4.test() + M5.test() + new Main().main() () } } diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala index 704f2ec0aa5a..9e26b51e827d 100644 --- a/test/files/run/Course-2002-09.scala +++ b/test/files/run/Course-2002-09.scala @@ -3,23 +3,23 @@ //############################################################################ trait Constraint { - def newValue: Unit; - def dropValue: Unit + def newValue(): Unit; + def dropValue(): Unit } object NoConstraint extends Constraint { - def newValue: Unit = sys.error("NoConstraint.newValue"); - def dropValue: Unit = sys.error("NoConstraint.dropValue"); + def newValue(): Unit = sys.error("NoConstraint.newValue"); + def dropValue(): Unit = sys.error("NoConstraint.dropValue"); } class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint { - def newValue = (a1.getValue, a2.getValue, sum.getValue) match { + def newValue() = (a1.getValue, a2.getValue, sum.getValue) match { case (Some(x1), Some(x2), _ ) => sum.setValue(x1 + x2, this) case (Some(x1), _ , Some(r)) => a2.setValue(r - x1, this) case (_ , Some(x2), Some(r)) => a1.setValue(r - x2, this) case _ => } - def dropValue: Unit = { + def dropValue(): Unit = { a1.forgetValue(this); a2.forgetValue(this); sum.forgetValue(this); } a1 connect this; @@ -29,7 +29,7 @@ class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint { class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity) extends Constraint { - def newValue = (m1.getValue, m2.getValue, prod.getValue) match { + def newValue() = (m1.getValue, m2.getValue, prod.getValue) match { case (Some(0d), _ , _ ) => prod.setValue(0, this); case (_ , Some(0d), _ ) => prod.setValue(0, this); case (Some(x1), Some(x2), _ ) => prod.setValue(x1 * x2, this) @@ -37,7 +37,7 @@ class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity) case (_, Some(x2), Some(r)) => m1.setValue(r / x2, this) case _ => } - def dropValue: Unit = { + def dropValue(): Unit = { m1.forgetValue(this); m2.forgetValue(this); prod.forgetValue(this); } m1 connect this; @@ -46,13 +46,13 @@ class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity) } class Squarer(square: Quantity, root: Quantity) extends Constraint { - def newValue: Unit = (square.getValue, root.getValue) match { + def newValue(): Unit = (square.getValue, root.getValue) match { case (Some(x), _ )if (x < 0) => sys.error("Square of negative number") case (Some(x), _ ) => root.setValue(Math.sqrt(x), this) case (_ , Some(x)) => square.setValue(x*x, this) case _ => } - def dropValue: Unit = { + def dropValue(): Unit = { square.forgetValue(this); root.forgetValue(this); } square connect this; @@ -60,11 +60,11 @@ class Squarer(square: Quantity, root: Quantity) extends Constraint { } class Eq(a: Quantity, b: Quantity) extends Constraint { - def newValue = ((a.getValue, b.getValue): @unchecked) match { + def newValue() = ((a.getValue, b.getValue): @unchecked) match { case (Some(x), _ ) => b.setValue(x, this); case (_ , Some(y)) => a.setValue(y, this); } - def dropValue { + def dropValue(): Unit = { a.forgetValue(this); b.forgetValue(this); } a connect this; @@ -72,16 +72,16 @@ class Eq(a: Quantity, b: Quantity) extends Constraint { } class Constant(q: Quantity, v: Double) extends Constraint { - def newValue: Unit = sys.error("Constant.newValue"); - def dropValue: Unit = sys.error("Constant.dropValue"); + def newValue(): Unit = sys.error("Constant.newValue"); + def dropValue(): Unit = sys.error("Constant.dropValue"); q connect this; q.setValue(v, this); } class Probe(name: String, q: Quantity) extends Constraint { - def newValue: Unit = printProbe(q.getValue); - def dropValue: Unit = printProbe(None); - private def printProbe(v: Option[Double]) { + def newValue(): Unit = printProbe(q.getValue); + def dropValue(): Unit = printProbe(None); + private def printProbe(v: Option[Double]): Unit = { val vstr = v match { case Some(x) => x.toString() case None => "?" @@ -104,7 +104,7 @@ class Quantity() { case None => informant = setter; value = Some(v); for (c <- constraints; if !(c == informant)) { - c.newValue; + c.newValue() } } def setValue(v: Double): Unit = setValue(v, NoConstraint); @@ -112,15 +112,15 @@ class Quantity() { def forgetValue(retractor: Constraint): Unit = { if (retractor == informant) { value = None; - for (c <- constraints; if !(c == informant)) c.dropValue; + for (c <- constraints if !(c == informant)) c.dropValue() } } - def forgetValue: Unit = forgetValue(NoConstraint); + def forgetValue(): Unit = forgetValue(NoConstraint); def connect(c: Constraint) = { constraints = c :: constraints; value match { - case Some(_) => c.newValue + case Some(_) => c.newValue() case None => } } @@ -179,26 +179,26 @@ object M0 { new Constant(y, 32); } - def test = { + def test() = { val c = new Quantity(); new Probe("c", c); val f = new Quantity(); new Probe("f", f); CFconverter(c, f); c.setValue(0); - c.forgetValue; - Console.println; + c.forgetValue() + Console.println() c.setValue(100); - c.forgetValue; - Console.println; + c.forgetValue() + Console.println() f.setValue(32); - f.forgetValue; - Console.println; + f.forgetValue() + Console.println() f.setValue(212); - f.forgetValue; - Console.println; + f.forgetValue() + Console.println() } } @@ -221,16 +221,16 @@ object M1 { def show_c2f(c: Quantity, f: Quantity, v: Int) = { c.setValue(v); Console.println(c.str + " Celsius -> " + f.str + " Fahrenheits"); - c.forgetValue; + c.forgetValue() } def show_f2c(c: Quantity, f: Quantity, v: Int) = { f.setValue(v); Console.println(f.str + " Fahrenheits -> " + c.str + " Celsius"); - f.forgetValue; + f.forgetValue() } - def test = { + def test() = { val c = new Quantity(); val f = new Quantity(); CFconverter(c, f); @@ -239,7 +239,7 @@ object M1 { show_c2f(c, f, 100); show_f2c(c, f, 32); show_f2c(c, f, 212); - Console.println; + Console.println() } } @@ -261,10 +261,10 @@ object M2 { def show(x: Option[Int], y: Option[Int], z: Option[Int]) = { Console.print("a = " +set(a,x)+ ", b = " +set(b,y)+ ", c = " +set(c,z)); Console.println(" => " + a.str + " * " + b.str + " = " + c.str); - a.forgetValue; b.forgetValue; c.forgetValue; + a.forgetValue(); b.forgetValue(); c.forgetValue(); } - def test = { + def test() = { show(None , None , None ); show(Some(2), None , None ); show(None , Some(3), None ); @@ -273,7 +273,7 @@ object M2 { show(Some(2), None , Some(6)); show(None , Some(3), Some(6)); show(Some(2), Some(3), Some(6)); - Console.println; + Console.println() show(Some(0), None , None ); show(None , Some(0), None ); @@ -286,7 +286,7 @@ object M2 { show(Some(0), Some(7), Some(0)); show(Some(7), Some(0), Some(0)); show(Some(0), Some(0), Some(0)); - Console.println; + Console.println() } } @@ -295,7 +295,7 @@ object M2 { object M3 { - def test = { + def test() = { val a = new Quantity(); val b = new Quantity(); val c = new Quantity(); @@ -303,28 +303,28 @@ object M3 { a.setValue(3); b.setValue(4); Console.println("a = 3, b = 4 => c = " + c.str); - a.forgetValue; b.forgetValue; + a.forgetValue(); b.forgetValue(); a.setValue(3); c.setValue(5); Console.println("a = 3, c = 5 => b = " + b.str); - a.forgetValue; c.forgetValue; + a.forgetValue(); c.forgetValue(); b.setValue(4); c.setValue(5); Console.println("b = 4, c = 5 => a = " + a.str); - b.forgetValue; c.forgetValue; + b.forgetValue(); c.forgetValue(); - Console.println; + Console.println() } } //############################################################################ object Test { - def main(args: Array[String]) { - M0.test; - M1.test; - M2.test; - M3.test; + def main(args: Array[String]): Unit = { + M0.test() + M1.test() + M2.test() + M3.test() () } } diff --git a/test/files/run/Course-2002-10.scala b/test/files/run/Course-2002-10.scala index 4cfa1deb0462..d87ae350db6f 100644 --- a/test/files/run/Course-2002-10.scala +++ b/test/files/run/Course-2002-10.scala @@ -6,16 +6,16 @@ import math.{Pi, log} object M0 { - def addStream (s1: Stream[Int], s2: Stream[Int]): Stream[Int] = - Stream.cons(s1.head + s2.head, addStream(s1.tail, s2.tail)); + def addLazyList (s1: LazyList[Int], s2: LazyList[Int]): LazyList[Int] = + LazyList.cons(s1.head + s2.head, addLazyList(s1.tail, s2.tail)); - val fib: Stream[Int] = - Stream.cons(0, Stream.cons(1, addStream(this.fib, this.fib.tail))); + val fib: LazyList[Int] = + LazyList.cons(0, LazyList.cons(1, addLazyList(this.fib, this.fib.tail))); - def test = { + def test() = { var i = 0; fib.take(20).foreach(n => {Console.println("fib("+i+") = "+n); i=i+1}); - Console.println; + Console.println() } } @@ -23,36 +23,36 @@ object M0 { object M1 { - def scale(x: Double, s: Stream[Double]): Stream[Double] = + def scale(x: Double, s: LazyList[Double]): LazyList[Double] = s map { e: Double => e*x } - def partialSums(s: Stream[Double]): Stream[Double] = - Stream.cons(s.head, partialSums(s.tail) map (x => x + s.head)); + def partialSums(s: LazyList[Double]): LazyList[Double] = + LazyList.cons(s.head, partialSums(s.tail) map (x => x + s.head)); - def euler(s: Stream[Double]): Stream[Double] = { + def euler(s: LazyList[Double]): LazyList[Double] = { val nm1 = s apply 0; val n = s apply 1; val np1 = s apply 2; - Stream.cons(np1 - ((np1 - n)*(np1 - n) / (nm1 - 2*n + np1)),euler(s.tail)) + LazyList.cons(np1 - ((np1 - n)*(np1 - n) / (nm1 - 2*n + np1)),euler(s.tail)) }; - def better(s: Stream[Double], transform: Stream[Double] => Stream[Double]) - : Stream[Stream[Double]] = - Stream.cons(s, better(transform(s), transform)); + def better(s: LazyList[Double], transform: LazyList[Double] => LazyList[Double]) + : LazyList[LazyList[Double]] = + LazyList.cons(s, better(transform(s), transform)); - def veryGood(s: Stream[Double], transform: Stream[Double] => Stream[Double]) - : Stream[Double] = + def veryGood(s: LazyList[Double], transform: LazyList[Double] => LazyList[Double]) + : LazyList[Double] = better(s, transform) map (x => x.head); - def lnSummands(n: Double): Stream[Double] = - Stream.cons(1.0 / n, lnSummands(n + 1.0) map { x: Double => -x }) + def lnSummands(n: Double): LazyList[Double] = + LazyList.cons(1.0 / n, lnSummands(n + 1.0) map { x: Double => -x }) var ln0 = partialSums(lnSummands(1.0)); var ln1 = euler(ln0); var ln2 = veryGood(ln0, euler); - def piSummands(n: Double): Stream[Double] = - Stream.cons(1.0 / n, piSummands(n + 2.0) map { x: Double => -x }) + def piSummands(n: Double): LazyList[Double] = + LazyList.cons(1.0 / n, piSummands(n + 2.0) map { x: Double => -x }) var pi0 = scale(4.0, partialSums(piSummands(1.0))); var pi1 = euler(pi0); @@ -63,7 +63,7 @@ object M1 { else pad(s + " ", n - 1); def str(d: Double) = { val s = d.toString(); pad(s, 18 - s.length()) }; - def test = { + def test() = { var i = 0; while (i < 10) { Console.print("pi("+i+") = "); @@ -76,7 +76,7 @@ object M1 { Console.print(str(Pi) + ", "); Console.print(str(Pi) + ", "); Console.print(str(Pi) + "\n"); - Console.println; + Console.println() i = 0; while (i < 10) { Console.print("ln("+i+") = "); @@ -89,7 +89,7 @@ object M1 { Console.print(str(log(2)) + ", "); Console.print(str(log(2)) + ", "); Console.print(str(log(2)) + "\n"); - Console.println; + Console.println() } } @@ -100,24 +100,24 @@ object M2 { class IntIterator(start: Int) extends Iterator[Int] { var current: Int = start; def hasNext = true; - def next = { current = current + 1; current - 1 }; + def next() = { current = current + 1; current - 1 }; } class PrimeIterator() extends Iterator[Int] { var current: Iterator[Int] = new IntIterator(2); def hasNext = true; - def next = { - val p = current.next; + def next() = { + val p = current.next(); current = current filter { x => !((x % p) == 0) }; p } } - def test = { + def test() = { val i = (new PrimeIterator()).take(30); Console.print("prime numbers:"); - while (i.hasNext) { Console.print(" " + i.next); } - Console.println; + while (i.hasNext) { Console.print(" " + i.next()); } + Console.println() } } @@ -125,9 +125,9 @@ object M2 { object Test { def main(args: Array[String]): Unit = { - M0.test; - M1.test; - M2.test; + M0.test() + M1.test() + M2.test() () } } diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala index a596a33873d3..bb0d001198ab 100644 --- a/test/files/run/Course-2002-13.scala +++ b/test/files/run/Course-2002-13.scala @@ -17,10 +17,10 @@ class Tokenizer(s: String, delimiters: String) extends Iterator[String] { i < s.length() } - def next: String = + def next(): String = if (hasNext) { val start = i; - var ch = s.charAt(i); i = i + 1; + val ch = s.charAt(i); i = i + 1; if (isDelimiter(ch)) ch.toString() else { while (i < s.length() && @@ -123,36 +123,36 @@ object Programs { lhs.toString() + " :- " + rhs.mkString("", ",", "") + "."; } - def list2stream[a](xs: List[a]): Stream[a] = xs match { - case List() => Stream.empty - case x :: xs1 => Stream.cons(x, list2stream(xs1)) + def list2stream[a](xs: List[a]): LazyList[a] = xs match { + case List() => LazyList.empty + case x :: xs1 => LazyList.cons(x, list2stream(xs1)) } - def option2stream[a](xo: Option[a]): Stream[a] = xo match { - case None => Stream.empty - case Some(x) => Stream.cons(x, Stream.empty) + def option2stream[a](xo: Option[a]): LazyList[a] = xo match { + case None => LazyList.empty + case Some(x) => LazyList.cons(x, LazyList.empty) } - def solve(query: List[Term], clauses: List[Clause]): Stream[Subst] = { + def solve(query: List[Term], clauses: List[Clause]): LazyList[Subst] = { - def solve2(query: List[Term], s: Subst): Stream[Subst] = query match { + def solve2(query: List[Term], s: Subst): LazyList[Subst] = query match { case List() => - Stream.cons(s, Stream.empty) + LazyList.cons(s, LazyList.empty) case Con("not", qs) :: query1 => - if (solve1(qs, s).isEmpty) Stream.cons(s, Stream.empty) - else Stream.empty + if (solve1(qs, s).isEmpty) LazyList.cons(s, LazyList.empty) + else LazyList.empty case q :: query1 => for (clause <- list2stream(clauses); s1 <- tryClause(clause.newInstance, q, s); s2 <- solve1(query1, s1)) yield s2 } - def solve1(query: List[Term], s: Subst): Stream[Subst] = { + def solve1(query: List[Term], s: Subst): LazyList[Subst] = { val ss = solve2(query, s); if (debug) Console.println("solved " + query + " = " + ss); ss } - def tryClause(c: Clause, q: Term, s: Subst): Stream[Subst] = { + def tryClause(c: Clause, q: Term, s: Subst): LazyList[Subst] = { if (debug) Console.println("trying " + c); for (s1 <- option2stream(unify(q, c.lhs, s)); s2 <- solve1(c.rhs, s1)) yield s2; } @@ -166,30 +166,30 @@ import Programs._; class Parser(s: String) { val it = new Tokenizer(s, "(),.?"); - var token: String = it.next; + var token: String = it.next(); def syntaxError(msg: String): Unit = sys.error(msg + ", but " + token + " found"); def rep[a](p: => a): List[a] = { val t = p; - if (token == ",") { token = it.next; t :: rep(p) } else List(t) + if (token == ",") { token = it.next(); t :: rep(p) } else List(t) } def constructor: Term = { val a = token; - token = it.next; + token = it.next(); Con(a, if (token equals "(") { - token = it.next; + token = it.next(); val ts: List[Term] = if (token equals ")") List() else rep(term); - if (token equals ")") token = it.next else syntaxError("`)' expected"); + if (token equals ")") token = it.next() else syntaxError("`)` expected"); ts } else List()) } def term: Term = { val ch = token.charAt(0); - if ('A' <= ch && ch <= 'Z') { val a = token; token = it.next; Var(a) } + if ('A' <= ch && ch <= 'Z') { val a = token; token = it.next(); Var(a) } else if (it.isDelimiter(ch)) { syntaxError("term expected"); null } else constructor } @@ -197,14 +197,14 @@ class Parser(s: String) { def line: Clause = { val result = if (token equals "?") { - token = it.next; + token = it.next(); Clause(NoTerm, rep(constructor)); } else { Clause( constructor, - if (token equals ":-") { token = it.next; rep(constructor) } else List()) + if (token equals ":-") { token = it.next(); rep(constructor) } else List()) } - if (token equals ".") token = it.next else syntaxError("`.' expected"); + if (token equals ".") token = it.next() else syntaxError("`.` expected"); result } @@ -215,7 +215,7 @@ object Prolog { def processor: String => Unit = { var program: List[Clause] = List(); - var solutions: Stream[Subst] = Stream.empty; + var solutions: LazyList[Subst] = LazyList.empty; var tvs: List[String] = List(); { input => new Parser(input).all foreach { c => @@ -268,7 +268,7 @@ object Test { "?phrase(S,V,A,D,N).\n" + "?more.\n" ); - Console.println; + Console.println() Prolog.process( "sujet(jean).\n" + @@ -287,7 +287,7 @@ object Test { "?phrase(S,V,A,D,N).\n" + "?more.\n" ); - Console.println; + Console.println() Prolog.process( "sujet(jean).\n" + @@ -313,7 +313,7 @@ object Test { "?phrase(jean,mange,le,cons(grand,nil),cheval).\n" + "?phrase(jean,mange,le,cons(grand,nil),table).\n" ); - Console.println; + Console.println() () } diff --git a/test/files/run/InferOverloadedPartialFunction.scala b/test/files/run/InferOverloadedPartialFunction.scala new file mode 100644 index 000000000000..8a87609a19f5 --- /dev/null +++ b/test/files/run/InferOverloadedPartialFunction.scala @@ -0,0 +1,90 @@ +class MySeq[T](val i: Int) { + def map1[U](f: T => U): MySeq[U] = new MySeq[U](10) + def map2[U](f: T => U): MySeq[U] = new MySeq[U](20) +} + +class MyMap[A, B](i: Int) extends MySeq[(A, B)](i) { + def map1[C](f: (A, B) => C): MySeq[C] = new MySeq[C](11) + def map1[C, D](f: (A, B) => (C, D)): MyMap[C, D] = new MyMap[C, D](12) + def map1[C, D](f: ((A, B)) => (C, D)): MyMap[C, D] = new MyMap[C, D](13) + def map3[U](f: PartialFunction[(A, B), U]): MySeq[U] = new MySeq[U](30) + + def foo(f: Function2[Int, Int, Int]): Unit = () + def foo[R](pf: PartialFunction[(A, B), R]): MySeq[R] = new MySeq[R](100) +} + +object Test extends App { + val m = new MyMap[Int, String](0) + + // These ones already worked because they are not overloaded: + m.map2 { case (k, v) => k - 1 } + m.map3 { case (k, v) => k - 1 } + + // These already worked because preSelectOverloaded eliminated the non-applicable overload: + // (The still pick the same overload; no previously legal code changes semantics) + assert(m.map1(t => t._1).i == 10) + assert(m.map1((kInFunction, vInFunction) => kInFunction - 1).i == 11) + val r1 = m.map1(t => (t._1, 42.0)) + val r1t: MyMap[Int, Double] = r1 + assert(r1.i == 13) + + // These worked because they are not case literals (and the argument types are known for overload resolution): + assert(m.map1({ case (k, v) => k - 1 }: PartialFunction[(Int, String), Int]).i == 10) + assert(m.map2({ case (k, v) => k - 1 }: PartialFunction[(Int, String), Int]).i == 20) + + // These ones did not work before, now always picks tupled version over Function2 version: + assert(m.map1 { case (k, v) => k }.i == 10) + val r2 = m.map1 { case (k, v) => (k, k*10) } + val r2t: MyMap[Int, Int] = r2 + assert(r2.i == 13) + val r3 = m.foo { case (k, v) => k - 1 } + val r3t: MySeq[Int] = r3 + + // Used to be ambiguous but overload resolution now favors PartialFunction + def h[R](pf: Function2[Int, String, R]): Int = 1 + def h[R](pf: PartialFunction[(Double, Double), R]): Int = 2 + assert(h { case (a: Double, b: Double) => 42: Int } == 2) + + val xs = new SortedMap + assert(xs.collectF { kv => 1 } == 0) + assert(xs.collectF { case (k, v) => 1 } == 0) + assert(xs.collectF { case (k, v) => (1, 1) } == 2) + assert(xs.collect { case (k, v) => 1 } == 0) + assert(xs.collect { case (k, v) => (1, 1) } == 1) + + val ys = new SortedMapMixed + assert(ys.collect { kv => 1 } == 0) + assert(ys.collect { kv => (1, 1) } == 0) + assert(ys.collect { case (k, v) => 1 } == 1) // could be 0 with the extra work in https://github.com/scala/scala/pull/5975/commits/3c95dac0dcbb0c8eb4686264026ad9c86b2022de + assert(ys.collect { case (k, v) => (1, 1) } == 2) +} + +class SortedMap { + def collect[B](pf: PartialFunction[(String, Int), B]): Int = 0 + def collect[K2 : Ordering, V2](pf: PartialFunction[(String, Int), (K2, V2)]): Int = 1 + def collectF[B](pf: Function1[(String, Int), B]): Int = if(pf.isInstanceOf[PartialFunction[_, _]]) 1 else 0 + def collectF[K2 : Ordering, V2](pf: Function1[(String, Int), (K2, V2)]): Int = if(pf.isInstanceOf[PartialFunction[_, _]]) 3 else 2 +} + +class SortedMapMixed { + type PF[-A, +B] = PartialFunction[A, B] + def collect[B](pf: Function1[(String, Int), B]): Int = if(pf.isInstanceOf[PartialFunction[_, _]]) 1 else 0 + def collect[K2 : Ordering, V2](pf: PF[(String, Int), (K2, V2)]): Int = 2 +} + +// Test case for https://github.com/scala/bug/issues/10608: +class IM[T] { + def map(f: T => Int): Int = 1 + def map(f: T => String): String = "a" + def map2(f: T => String): String = "a" +} +abstract class BTypes { trait Foo } +class CC[BT <: BTypes](val bt: BT) { + import bt.Foo + val im1: IM[String] = new IM[String] + val im2: IM[Foo] = new IM[Foo] + + im1.map { case x => "" } // OK + im2.map { case x => "" } // Did not compile + im2.map2 { case x => "" } // OK +} diff --git a/test/files/run/Meter.check b/test/files/run/Meter.check index c79c51a2947e..bf901bea0182 100644 --- a/test/files/run/Meter.check +++ b/test/files/run/Meter.check @@ -1,4 +1,4 @@ -Meter.scala:72: warning: a.Meter and Int are unrelated: they will never compare equal +Meter.scala:74: warning: a.Meter and Int are unrelated: they will never compare equal println("x == 1: "+(x == 1)) ^ 2.0 diff --git a/test/files/run/Meter.scala b/test/files/run/Meter.scala index a10ad31b4af5..7ef9d247e1ba 100644 --- a/test/files/run/Meter.scala +++ b/test/files/run/Meter.scala @@ -1,3 +1,5 @@ +import annotation.unused + package a { abstract class BoxingConversions[Boxed, Unboxed] { def box(x: Unboxed): Boxed @@ -7,11 +9,11 @@ package a { class Meter(val underlying: Double) extends AnyVal with _root_.b.Printable { def + (other: Meter): Meter = new Meter(this.underlying + other.underlying) - def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying + def / (other: Meter)(implicit @unused dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying def / (factor: Double): Meter = new Meter(this.underlying / factor) def < (other: Meter): Boolean = this.underlying < other.underlying def toFoot: Foot = new Foot(this.underlying * 0.3048) - override def print = { Console.print(">>>"); super.print; proprint } + override def print() = { Console.print(">>>"); super.print(); proprint() } override def toString: String = underlying.toString+"m" } @@ -21,7 +23,7 @@ package a { def apply(x: Double): Meter = new Meter(x) - implicit val boxings = new BoxingConversions[Meter, Double] { + implicit val boxings: BoxingConversions[Meter, Double] = new BoxingConversions[Meter, Double] { def box(x: Double) = new Meter(x) def unbox(m: Meter) = m.underlying } @@ -33,7 +35,7 @@ package a { override def toString = unbox.toString+"ft" } object Foot { - implicit val boxings = new BoxingConversions[Foot, Double] { + implicit val boxings: BoxingConversions[Foot, Double] = new BoxingConversions[Foot, Double] { def box(x: Double) = new Foot(x) def unbox(m: Foot) = m.unbox } @@ -42,8 +44,8 @@ package a { } package b { trait Printable extends Any { - def print: Unit = Console.print(this) - protected def proprint = Console.print("<<<") + def print(): Unit = Console.print(this) + protected def proprint() = Console.print("<<<") } } import a._ @@ -53,11 +55,11 @@ object Test extends App { { val x: Meter = new Meter(1) val a: Object = x.asInstanceOf[Object] - val y: Meter = a.asInstanceOf[Meter] + @unused val y: Meter = a.asInstanceOf[Meter] val u: Double = 1 val b: Object = u.asInstanceOf[Object] - val v: Double = b.asInstanceOf[Double] + @unused val v: Double = b.asInstanceOf[Double] } val x = new Meter(1) @@ -79,9 +81,9 @@ object Test extends App { { println("testing native arrays") val arr = Array(x, y + x) - println(arr.deep) - def foo[T <: Printable](x: Array[T]) { - for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) } + println(scala.tools.partest.Util.prettyArray(arr)) + def foo[T <: Printable](x: Array[T]): Unit = { + for (i <- 0 until x.length) { x(i).print(); println(" "+x(i)) } } val m = arr(0) println(m) diff --git a/test/files/run/MeterCaseClass.check b/test/files/run/MeterCaseClass.check index 75b775bc1134..0eeb0b5668cf 100644 --- a/test/files/run/MeterCaseClass.check +++ b/test/files/run/MeterCaseClass.check @@ -1,4 +1,4 @@ -MeterCaseClass.scala:69: warning: comparing values of types a.Meter and Int using `==` will always yield false +MeterCaseClass.scala:71: warning: comparing values of types a.Meter and Int using `==` will always yield false println("x == 1: "+(x == 1)) ^ 2.0 diff --git a/test/files/run/MeterCaseClass.scala b/test/files/run/MeterCaseClass.scala index 39d95c2af562..1f40dc5e7dbc 100644 --- a/test/files/run/MeterCaseClass.scala +++ b/test/files/run/MeterCaseClass.scala @@ -1,3 +1,5 @@ +import scala.tools.partest.Util.ArrayDeep + package a { abstract class BoxingConversions[Boxed, Unboxed] { def box(x: Unboxed): Boxed @@ -7,18 +9,18 @@ package a { case class Meter(underlying: Double) extends AnyVal with _root_.b.Printable { def + (other: Meter): Meter = new Meter(this.underlying + other.underlying) - def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying + def / (other: Meter)(implicit @annotation.unused dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying def / (factor: Double): Meter = new Meter(this.underlying / factor) def < (other: Meter): Boolean = this.underlying < other.underlying def toFoot: Foot = new Foot(this.underlying * 0.3048) - override def print = { Console.print(">>>"); super.print; proprint } + override def print() = { Console.print(">>>"); super.print(); proprint() } } object Meter extends (Double => Meter) { private[a] trait MeterArg - implicit val boxings = new BoxingConversions[Meter, Double] { + implicit val boxings: BoxingConversions[Meter, Double] = new BoxingConversions[Meter, Double] { def box(x: Double) = new Meter(x) def unbox(m: Meter) = m.underlying } @@ -30,7 +32,7 @@ package a { override def toString = unbox.toString+"ft" } object Foot { - implicit val boxings = new BoxingConversions[Foot, Double] { + implicit val boxings: BoxingConversions[Foot, Double] = new BoxingConversions[Foot, Double] { def box(x: Double) = new Foot(x) def unbox(m: Foot) = m.unbox } @@ -39,8 +41,8 @@ package a { } package b { trait Printable extends Any { - def print: Unit = Console.print(this) - protected def proprint = Console.print("<<<") + def print(): Unit = Console.print(this) + protected def proprint() = Console.print("<<<") } } import a._ @@ -50,11 +52,11 @@ object Test extends App { { val x: Meter = new Meter(1) val a: Object = x.asInstanceOf[Object] - val y: Meter = a.asInstanceOf[Meter] + @annotation.unused val y: Meter = a.asInstanceOf[Meter] val u: Double = 1 val b: Object = u.asInstanceOf[Object] - val v: Double = b.asInstanceOf[Double] + @annotation.unused val v: Double = b.asInstanceOf[Double] } val x = new Meter(1) @@ -77,8 +79,8 @@ object Test extends App { { println("testing native arrays") val arr = Array(x, y + x) println(arr.deep) - def foo[T <: Printable](x: Array[T]) { - for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) } + def foo[T <: Printable](x: Array[T]): Unit = { + for (i <- 0 until x.length) { x(i).print(); println(" "+x(i)) } } val m = arr(0) println(m) diff --git a/test/files/run/MutableListTest.scala b/test/files/run/MutableListTest.scala deleted file mode 100644 index 322a368b1615..000000000000 --- a/test/files/run/MutableListTest.scala +++ /dev/null @@ -1,126 +0,0 @@ - - - -import scala.collection.mutable.MutableList - - - -class ExtList[T] extends MutableList[T] { - def printState { - println("Length: " + len) - println("Last elem: " + last0.elem) - println("First elem: " + first0.elem) - println("After first elem: " + first0.next.elem) - println("After first: " + first0.next) - println("Last: " + last0) - } -} - -object Test { - - def main(args: Array[String]) { - testEmpty - testAddElement - testAddFewElements - testAddMoreElements - testTraversables - } - - def testEmpty { - val mlist = new MutableList[Int] - assert(mlist.isEmpty) - assert(mlist.get(0) == None) - } - - def testAddElement { - val mlist = new MutableList[Int] - mlist += 17 - assert(mlist.nonEmpty) - assert(mlist.length == 1) - assert(mlist.head == 17) - assert(mlist.last == 17) - assert(mlist(0) == 17) - assert(mlist.tail.isEmpty) - assert(mlist.tail.length == 0) - mlist(0) = 101 - assert(mlist(0) == 101) - assert(mlist.toList == List(101)) - assert(mlist.tail.get(0) == None) - assert((mlist.tail += 19).head == 19) - assert(mlist.tail.length == 0) - } - - def testAddFewElements { - val mlist = new MutableList[Int] - for (i <- 0 until 2) mlist += i -// mlist.printState - for (i <- 0 until 2) assert(mlist(i) == i) - assert(mlist.length == 2) - assert(mlist.nonEmpty) - assert(mlist.tail.length == 1) - assert(mlist.tail.tail.length == 0) - assert(mlist.tail.tail.isEmpty) - } - - def testAddMoreElements { - val mlist = new MutableList[Int] - for (i <- 0 until 10) mlist += i * i - assert(mlist.nonEmpty) - assert(mlist.length == 10) - for (i <- 0 until 10) assert(mlist(i) == i * i) - assert(mlist(5) == 5 * 5) - assert(mlist(9) == 9 * 9) - var sometail = mlist - for (i <- 0 until 10) { - assert(sometail.head == i * i) - sometail = sometail.tail - } - mlist(5) = -25 - assert(mlist(5) == -25) - mlist(0) = -1 - assert(mlist(0) == -1) - mlist(9) = -81 - assert(mlist(9) == -81) - assert(mlist(5) == -25) - assert(mlist(0) == -1) - assert(mlist.last == -81) - mlist.clear - assert(mlist.isEmpty) - mlist += 1001 - assert(mlist.head == 1001) - mlist += 9999 - assert(mlist.tail.head == 9999) - assert(mlist.last == 9999) - } - - def testTraversables { - val mlist = new MutableList[Int] - for (i <- 0 until 10) mlist += i * i - var lst = mlist.drop(5) - for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5)) - lst = lst.take(3) - for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5)) - lst += 129 - assert(lst(3) == 129) - assert(lst.last == 129) - assert(lst.length == 4) - lst += 7 - assert(lst.init.last == 129) - assert(lst.length == 5) - lst.clear - assert(lst.length == 0) - for (i <- 0 until 5) lst += i - assert(lst.reduceLeft(_ + _) == 10) - } - -} - - - - - - - - - - diff --git a/test/files/run/OrderingTest.scala b/test/files/run/OrderingTest.scala index 8af18aaba5da..994c607d2fd1 100644 --- a/test/files/run/OrderingTest.scala +++ b/test/files/run/OrderingTest.scala @@ -1,4 +1,9 @@ +import scala.collection.immutable.SortedSet +import scala.math.Ordering.Double.TotalOrdering +import scala.math.Ordering.Implicits._ + object Test extends App { + def test[T](t1 : T, t2 : T)(implicit ord : Ordering[T]) = { val cmp = ord.compare(t1, t2); val cmp2 = ord.compare(t2, t1); @@ -15,7 +20,7 @@ object Test extends App { test(t2, t2); } - assert(Ordering[String].compare("australopithecus", "brontausaurus") < 0) + assert(Ordering[String].compare("australopithecus", "brontosaurus") < 0) // assert(Ordering[Unit].compare((), ()) == 0) testAll("bar", "foo"); @@ -24,8 +29,10 @@ object Test extends App { testAll(1, 2); testAll(1.0, 2.0); testAll(None, Some(1)); - testAll[Iterable[Int]](List(1), List(1, 2)); - testAll[Iterable[Int]](List(1, 2), List(2)); + testAll[Seq[Int]](List(1), List(1, 2)); + testAll[Seq[Int]](List(1, 2), List(2)); + testAll[SortedSet[Int]](SortedSet(1), SortedSet(1, 2)) + testAll[SortedSet[Int]](SortedSet(1, 2), SortedSet(2)) testAll((1, "bar"), (1, "foo")) testAll((1, "foo"), (2, "bar")) diff --git a/test/files/run/QueueTest.scala b/test/files/run/QueueTest.scala index 859ce2071ab1..8fd09f18b2c5 100644 --- a/test/files/run/QueueTest.scala +++ b/test/files/run/QueueTest.scala @@ -1,31 +1,16 @@ - - import scala.collection.mutable.Queue - - - -class ExtQueue[T] extends Queue[T] { - def printState { - println("-------------------") - println("Length: " + len) - println("First: " + first0) - println("First elem: " + first0.elem) - println("After first: " + first0.next) - } -} - object Test { - def main(args: Array[String]) { - testEmpty - testEnqueue - testTwoEnqueues - testFewEnqueues - testMoreEnqueues + def main(args: Array[String]): Unit = { + testEmpty() + testEnqueue() + testTwoEnqueues() + testFewEnqueues() + testMoreEnqueues() } - def testEmpty { + def testEmpty(): Unit = { val queue = new Queue[Int] assert(queue.isEmpty) @@ -34,7 +19,7 @@ object Test { assert(queue.dequeueFirst(_ > 500) == None) assert(queue.dequeueAll(_ > 500).isEmpty) - queue.clear + queue.clear() assert(queue.isEmpty) assert(queue.size == 0) assert(queue.length == 0) @@ -42,7 +27,7 @@ object Test { assert(queue.dequeueAll(_ > 500).isEmpty) } - def testEnqueue { + def testEnqueue(): Unit = { val queue = new Queue[Int] queue.enqueue(10) @@ -54,7 +39,7 @@ object Test { assert(queue.init.isEmpty) assert(queue.tail.isEmpty) - queue.clear + queue.clear() assert(queue.isEmpty) assert(queue.length == 0) @@ -64,7 +49,7 @@ object Test { assert(queue.head == 11) assert(queue.front == 11) - val deq = queue.dequeue + val deq = queue.dequeue() assert(deq == 11) assert(queue.isEmpty) assert(queue.length == 0) @@ -79,8 +64,8 @@ object Test { assert(queue.isEmpty && queue.length == 0) } - def testTwoEnqueues { - val queue = new ExtQueue[Int] + def testTwoEnqueues(): Unit = { + val queue = new Queue[Int] queue.enqueue(30) queue.enqueue(40) @@ -88,7 +73,6 @@ object Test { assert(queue.size == 2) assert(queue.nonEmpty) assert(queue.front == 30) -// queue.printState val all = queue.dequeueAll(_ > 20) assert(all.size == 2) @@ -98,8 +82,8 @@ object Test { assert(queue.isEmpty) } - def testFewEnqueues { - val queue = new ExtQueue[Int] + def testFewEnqueues(): Unit = { + val queue = new Queue[Int] queue.enqueue(10) queue.enqueue(20) @@ -108,35 +92,27 @@ object Test { assert(queue.head == 10) assert(queue.last == 20) assert(queue.front == 10) -// queue.printState - val ten = queue.dequeue + val ten = queue.dequeue() assert(ten == 10) assert(queue.length == 1) -// queue.printState queue.enqueue(30) -// queue.printState val gt25 = queue.dequeueFirst(_ > 25) assert(gt25 == Some(30)) assert(queue.nonEmpty) assert(queue.length == 1) assert(queue.head == 20) assert(queue.front == 20) -// queue.printState queue.enqueue(30) -// queue.printState val lt25 = queue.dequeueFirst(_ < 25) assert(lt25 == Some(20)) assert(queue.nonEmpty) assert(queue.length == 1) -// queue.printState queue.enqueue(40) -// queue.printState val all = queue.dequeueAll(_ > 20) -// queue.printState assert(all.size == 2) assert(all.contains(30)) assert(all.contains(40)) @@ -145,19 +121,14 @@ object Test { queue.enqueue(50) queue.enqueue(60) -// queue.printState val allgt55 = queue.dequeueAll(_ > 55) -// println(allgt55) -// queue.printState assert(allgt55.size == 1) assert(allgt55.contains(60)) assert(queue.length == 1) queue.enqueue(70) queue.enqueue(80) -// queue.printState val alllt75 = queue.dequeueAll(_ < 75) -// queue.printState assert(alllt75.size == 2) assert(alllt75.contains(70)) assert(alllt75.contains(50)) @@ -167,12 +138,12 @@ object Test { assert(queue.front == 80) } - def testMoreEnqueues { - val queue = new ExtQueue[Int] + def testMoreEnqueues(): Unit = { + val queue = new Queue[Int] for (i <- 0 until 10) queue.enqueue(i * 2) for (i <- 0 until 10) { - val top = queue.dequeue + val top = queue.dequeue() assert(top == i * 2) assert(queue.length == 10 - i - 1) } @@ -183,15 +154,12 @@ object Test { assert(queue.length == 10) assert(queue.nonEmpty) - //queue.printState val gt5 = queue.dequeueAll(_ > 4) - //queue.printState - //println(gt5) assert(gt5.size == 7) assert(queue.length == 3) assert(queue.nonEmpty) - queue.clear + queue.clear() assert(queue.length == 0) assert(queue.isEmpty) @@ -217,14 +185,11 @@ object Test { assert(queue.length == 10) val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1) - assert(foddgt25 == Some(49)) + assert(foddgt25 == Some(49), foddgt25) assert(queue.length == 9) assert(queue.nonEmpty) - //queue.printState val lt30 = queue.dequeueAll(_ < 30) - //println(lt30) - //queue.printState assert(lt30.size == 6) assert(queue.length == 3) @@ -258,7 +223,7 @@ object Test { assert(queue.head == 0) assert(queue.last == 3) - queue.dequeue + queue.dequeue() assert(queue.head == 3) queue.enqueue(9) @@ -267,7 +232,7 @@ object Test { assert(queue.length == 1) assert(queue.head == 9) - queue.clear + queue.clear() for (i <- -100 until 100) queue.enqueue(i * i + i % 7 + 5) assert(queue.length == 200) diff --git a/test/files/run/ReverseSeqView.scala b/test/files/run/ReverseSeqView.scala index 2004791bffee..b290f82358d6 100644 --- a/test/files/run/ReverseSeqView.scala +++ b/test/files/run/ReverseSeqView.scala @@ -1,25 +1,5 @@ - - - - - - object Test extends App { - - val lstv = List(1, 2, 3).view - val lstvr = lstv.reverse + val lstv = List(1, 2, 3).view // SeqView + val lstvr = lstv.reverse // Can reverse a SeqView, but get a plain View which can no longer be reversed assert(lstvr.iterator.toList == List(3, 2, 1)) - assert(lstvr.reverse == List(1, 2, 3)) - assert(lstvr.reverseIterator.toList == List(1, 2, 3)) - assert(lstvr.reverseMap(_ + 1) == List(2, 3, 4)) - } - - - - - - - - - diff --git a/test/files/run/SD-290.scala b/test/files/run/SD-290.scala index 0af9cb7cfa4f..bd8c171db9eb 100644 --- a/test/files/run/SD-290.scala +++ b/test/files/run/SD-290.scala @@ -9,10 +9,10 @@ object p1 { object D } package object p2 { - class B + @annotation.nowarn class B object B - class C extends java.io.Serializable + @annotation.nowarn class C extends java.io.Serializable object C type D = DD @@ -24,7 +24,7 @@ class DD extends java.io.Serializable object Test { def main(args: Array[String]): Unit = { - // This is the behaviour that was intended and was unchanged by this commmit. + // This is the behaviour that was intended and was unchanged by this commit. assert(!(p1.B : Object).isInstanceOf[scala.Serializable]) assert(p1.C.isInstanceOf[scala.Serializable]) assert(!(p1.D: Object).isInstanceOf[scala.Serializable]) diff --git a/test/files/run/StringConcat.check b/test/files/run/StringConcat.check new file mode 100644 index 000000000000..10eaa9a20d1b Binary files /dev/null and b/test/files/run/StringConcat.check differ diff --git a/test/files/run/StringConcat.scala b/test/files/run/StringConcat.scala new file mode 100644 index 000000000000..95c8026b6258 --- /dev/null +++ b/test/files/run/StringConcat.scala @@ -0,0 +1,86 @@ +//> using javaOpt -Xss128M + +import scala.tools.partest.ReplTest + +// ReplTest so that the long concatenation is compiled at test-run-time with the larger `Xss`. +// Tests are always compiled in the partest VM. +object Test extends ReplTest { + def code = + """// This should generally obey 15.18.1. of the JLS (String Concatenation Operator +) + |def concatenatingVariousTypes(): String = { + | val str: String = "some string" + | val sb: StringBuffer = new StringBuffer("some stringbuffer") + | val cs: CharSequence = java.nio.CharBuffer.allocate(50).append("charsequence") + | val i: Int = 123456789 + | val s: Short = 345 + | val b: Byte = 12 + | val z: Boolean = true + | val f: Float = 3.14f + | val j: Long = 98762147483647L + | val d: Double = 3.1415d + | + | "String " + str + "\n" + + | "StringBuffer " + sb + "\n" + + | "CharSequence " + cs + "\n" + + | "Int " + i + "\n" + + | "Short " + s + "\n" + + | "Byte " + b + "\n" + + | "Boolean " + z + "\n" + + | "Float " + f + "\n" + + | "Long " + j + "\n" + + | "Double " + d + "\n" + |} + |// The characters `\u0001` and `\u0002` play a special role in `StringConcatFactory` + |def concatenationInvolvingSpecialCharacters(): String = { + | val s1 = "Qux" + | val s2 = "Quux" + | + | s"Foo \u0001 $s1 Bar \u0002 $s2 Baz" + |} + |// Concatenation involving more than 200 elements + |def largeConcatenation(): String = { + | val s00 = "s00" + | val s01 = "s01" + | val s02 = "s02" + | val s03 = "s03" + | val s04 = "s04" + | val s05 = "s05" + | val s06 = "s06" + | val s07 = "s07" + | val s08 = "s08" + | + | // 24 rows follow + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + |} + |println("----------") + |println(concatenatingVariousTypes()) + |println("----------") + |println(concatenationInvolvingSpecialCharacters()) + |println("----------") + |println(largeConcatenation()) + |println("----------") + |""".stripMargin +} diff --git a/test/files/run/StubErrorHK.scala b/test/files/run/StubErrorHK.scala index 7ee8c6d6a5c6..e311524a191c 100644 --- a/test/files/run/StubErrorHK.scala +++ b/test/files/run/StubErrorHK.scala @@ -12,7 +12,7 @@ object Test extends scala.tools.partest.StubErrorMessageTest { def userCode = """ package stuberrors object C extends App { - println(new B) + println(new B[Nothing]) } """ diff --git a/test/files/run/StubErrorReturnTypePolyFunction.check b/test/files/run/StubErrorReturnTypePolyFunction.check index 77fc691aa58d..b8b89e850da0 100644 --- a/test/files/run/StubErrorReturnTypePolyFunction.check +++ b/test/files/run/StubErrorReturnTypePolyFunction.check @@ -1,9 +1,9 @@ -error: newSource1.scala:13: type arguments [stuberrors.D] do not conform to method foo's type parameter bounds [T <: stuberrors.A] - b.foo[D] - ^ error: newSource1.scala:13: Symbol 'type stuberrors.A' is missing from the classpath. This symbol is required by 'class stuberrors.D'. Make sure that type A is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. A full rebuild may help if 'D.class' was compiled against an incompatible version of stuberrors. b.foo[D] ^ +error: newSource1.scala:13: type arguments [stuberrors.D] do not conform to method foo's type parameter bounds [T <: stuberrors.A] + b.foo[D] + ^ diff --git a/test/files/run/StubErrorTypeDef.check b/test/files/run/StubErrorTypeDef.check index a25e30fab93d..a8185c21c7b2 100644 --- a/test/files/run/StubErrorTypeDef.check +++ b/test/files/run/StubErrorTypeDef.check @@ -1,16 +1,18 @@ error: newSource1.scala:4: Symbol 'type stuberrors.A' is missing from the classpath. +This symbol is required by 'class stuberrors.E'. +Make sure that type A is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. +A full rebuild may help if 'E.class' was compiled against an incompatible version of stuberrors. + new B { type D = E } + ^ +error: newSource1.scala:4: Symbol 'type stuberrors.A' is missing from the classpath. This symbol is required by 'type stuberrors.B.D'. Make sure that type A is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. A full rebuild may help if 'B.class' was compiled against an incompatible version of stuberrors. new B { type D = E } ^ -error: newSource1.scala:4: overriding type D in class B with bounds <: stuberrors.A; - type D has incompatible type +error: newSource1.scala:4: incompatible type in overriding +type D <: stuberrors.A (defined in class B); + found : stuberrors.E + required: <: stuberrors.A new B { type D = E } ^ -error: newSource1.scala:4: Symbol 'type stuberrors.A' is missing from the classpath. -This symbol is required by 'class stuberrors.E'. -Make sure that type A is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. -A full rebuild may help if 'E.class' was compiled against an incompatible version of stuberrors. - new B { type D = E } - ^ diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala index 7c185b0e0995..ad3840b2bae1 100644 --- a/test/files/run/SymbolsTest.scala +++ b/test/files/run/SymbolsTest.scala @@ -2,46 +2,46 @@ import scala.language.reflectiveCalls class Slazz { - val s1 = 'myFirstSymbol - val s2 = 'mySecondSymbol - def s3 = 'myThirdSymbol + val s1 = Symbol("myFirstSymbol") + val s2 = Symbol("mySecondSymbol") + def s3 = Symbol("myThirdSymbol") var s4: Symbol = null - s4 = 'myFourthSymbol + s4 = Symbol("myFourthSymbol") } class Base { - val basesymbol = 'symbase + val basesymbol = Symbol("symbase") } class Sub extends Base { - val subsymbol = 'symsub + val subsymbol = Symbol("symsub") } trait Signs { - val ind = 'indication - val trace = 'trace + val ind = Symbol("indication") + val trace = Symbol("trace") } trait Lazy1 { lazy val v1 = "lazy v1" - lazy val s1 = 'lazySymbol1 + lazy val s1 = Symbol("lazySymbol1") } trait Lazy2 { lazy val v2 = "lazy v2" - lazy val s2 = 'lazySymbol2 + lazy val s2 = Symbol("lazySymbol2") } trait Lazy3 { lazy val v3 = "lazy v3" - lazy val s3 = 'lazySymbol3 + lazy val s3 = Symbol("lazySymbol3") } object SingletonOfLazyness { - lazy val lazysym = 'lazySymbol - lazy val another = 'another - lazy val lastone = 'lastone + lazy val lazysym = Symbol("lazySymbol") + lazy val another = Symbol("another") + lazy val lastone = Symbol("lastone") } /* @@ -49,102 +49,102 @@ object SingletonOfLazyness { */ object Test { class Inner { - val simba = 'smba + val simba = Symbol("smba") var mfs: Symbol = null mfs = Symbol("mfsa") } object InnerObject { - val o1 = 'aaa - val o2 = 'ddd + val o1 = Symbol("aaa") + val o2 = Symbol("ddd") } - def aSymbol = 'myFirstSymbol - val anotherSymbol = 'mySecondSymbol - - def main(args: Array[String]) { - testLiterals - testForLoop - testInnerClasses - testInnerObjects - testWithHashMaps - testLists - testAnonymous - testNestedObject - testInheritance - testTraits - testLazyTraits - testLazyObjects + def aSymbol = Symbol("myFirstSymbol") + val anotherSymbol = Symbol("mySecondSymbol") + + def main(args: Array[String]): Unit = { + testLiterals() + testForLoop() + testInnerClasses() + testInnerObjects() + testWithHashMaps() + testLists() + testAnonymous() + testNestedObject() + testInheritance() + testTraits() + testLazyTraits() + testLazyObjects() } - def testLiterals { + def testLiterals(): Unit = { val scl = new Slazz assert(scl.s1 == aSymbol) assert(scl.s2 == anotherSymbol) - assert(scl.s3 == 'myThirdSymbol) + assert(scl.s3 == Symbol("myThirdSymbol")) assert(scl.s4 == Symbol.apply("myFourthSymbol")) assert(scl.s1 == Symbol("myFirstSymbol")) } - def testForLoop { + def testForLoop(): Unit = { for (i <- 0 until 100) List("Val" + i) } - def testInnerClasses { + def testInnerClasses(): Unit = { val innerPower = new Inner - assert(innerPower.simba == 'smba) - assert(innerPower.mfs == 'mfsa) + assert(innerPower.simba == Symbol("smba")) + assert(innerPower.mfs == Symbol("mfsa")) } - def testInnerObjects { - assert(InnerObject.o1 == 'aaa) - assert(InnerObject.o2 == 'ddd) + def testInnerObjects(): Unit = { + assert(InnerObject.o1 == Symbol("aaa")) + assert(InnerObject.o2 == Symbol("ddd")) } - def testWithHashMaps { + def testWithHashMaps(): Unit = { val map = new collection.mutable.HashMap[Symbol, Symbol] - map.put(InnerObject.o1, 'smba) - map.put(InnerObject.o2, 'mfsa) + map.put(InnerObject.o1, Symbol("smba")) + map.put(InnerObject.o2, Symbol("mfsa")) map.put(Symbol("WeirdKey" + 1), Symbol("Weird" + "Val" + 1)) - assert(map('aaa) == 'smba) - assert(map('ddd) == 'mfsa) - assert(map('WeirdKey1) == Symbol("WeirdVal1")) + assert(map(Symbol("aaa")) == Symbol("smba")) + assert(map(Symbol("ddd")) == Symbol("mfsa")) + assert(map(Symbol("WeirdKey1")) == Symbol("WeirdVal1")) - map.clear + map.clear() for (i <- 0 until 100) map.put(Symbol("symKey" + i), Symbol("symVal" + i)) assert(map(Symbol("symKey15")) == Symbol("symVal15")) - assert(map('symKey22) == 'symVal22) - assert(map('symKey73) == 'symVal73) - assert(map('symKey56) == 'symVal56) - assert(map('symKey91) == 'symVal91) + assert(map(Symbol("symKey22")) == Symbol("symVal22")) + assert(map(Symbol("symKey73")) == Symbol("symVal73")) + assert(map(Symbol("symKey56")) == Symbol("symVal56")) + assert(map(Symbol("symKey91")) == Symbol("symVal91")) } - def testLists { + def testLists(): Unit = { var lst: List[Symbol] = Nil for (i <- 0 until 100) lst ::= Symbol("lsym" + (99 - i)) - assert(lst(0) == 'lsym0) - assert(lst(10) == 'lsym10) - assert(lst(30) == 'lsym30) - assert(lst(40) == 'lsym40) - assert(lst(65) == 'lsym65) - assert(lst(90) == 'lsym90) + assert(lst(0) == Symbol("lsym0")) + assert(lst(10) == Symbol("lsym10")) + assert(lst(30) == Symbol("lsym30")) + assert(lst(40) == Symbol("lsym40")) + assert(lst(65) == Symbol("lsym65")) + assert(lst(90) == Symbol("lsym90")) } - def testAnonymous { // TODO complaints classdef can't be found for some reason, runs fine in my case + def testAnonymous(): Unit = { // TODO complaints classdef can't be found for some reason, runs fine in my case // val anon = () => { - // val simba = 'smba + // val simba = Symbol("smba") // simba // } // val an2 = () => { // object nested { - // val m = 'mfsa + // val m = Symbol("mfsa") // } // nested.m // } // val an3 = () => { // object nested { // val f = () => { - // 'layered + // Symbol("layered") // } // def gets = f() // } @@ -153,94 +153,94 @@ object Test { // val inner = new Inner // assert(anon() == inner.simba) // assert(anon().toString == "'smba") - // assert(an2() == 'mfsa) + // assert(an2() == Symbol("mfsa")) // assert(an3() == Symbol("layered" + "")) } - def testNestedObject { + def testNestedObject(): Unit = { object nested { - def sign = 'sign - def insignia = 'insignia + def sign = Symbol("sign") + def insignia = Symbol("insignia") } - assert(nested.sign == 'sign) - assert(nested.insignia == 'insignia) - assert(('insignia).toString == "'insignia") + assert(nested.sign == Symbol("sign")) + assert(nested.insignia == Symbol("insignia")) + assert((Symbol("insignia")).toString == "Symbol(insignia)") } - def testInheritance { + def testInheritance(): Unit = { val base = new Base val sub = new Sub - assert(base.basesymbol == 'symbase) - assert(sub.subsymbol == 'symsub) - assert(sub.basesymbol == 'symbase) + assert(base.basesymbol == Symbol("symbase")) + assert(sub.subsymbol == Symbol("symsub")) + assert(sub.basesymbol == Symbol("symbase")) val anon = new Sub { - def subsubsymbol = 'symsubsub + def subsubsymbol = Symbol("symsubsub") } - assert(anon.subsubsymbol == 'symsubsub) - assert(anon.subsymbol == 'symsub) - assert(anon.basesymbol == 'symbase) + assert(anon.subsubsymbol == Symbol("symsubsub")) + assert(anon.subsymbol == Symbol("symsub")) + assert(anon.basesymbol == Symbol("symbase")) object nested extends Sub { - def objsymbol = 'symobj + def objsymbol = Symbol("symobj") } - assert(nested.objsymbol == 'symobj) - assert(nested.subsymbol == 'symsub) - assert(nested.basesymbol == 'symbase) - assert(('symbase).toString == "'symbase") + assert(nested.objsymbol == Symbol("symobj")) + assert(nested.subsymbol == Symbol("symsub")) + assert(nested.basesymbol == Symbol("symbase")) + assert((Symbol("symbase")).toString == "Symbol(symbase)") } - def testTraits { + def testTraits(): Unit = { val fromTrait = new AnyRef with Signs { - def traitsymbol = 'traitSymbol + def traitsymbol = Symbol("traitSymbol") } - assert(fromTrait.traitsymbol == 'traitSymbol) - assert(fromTrait.ind == 'indication) - assert(fromTrait.trace == 'trace) - assert(('trace).toString == "'trace") + assert(fromTrait.traitsymbol == Symbol("traitSymbol")) + assert(fromTrait.ind == Symbol("indication")) + assert(fromTrait.trace == Symbol("trace")) + assert((Symbol("trace")).toString == "Symbol(trace)") trait Compl { - val s1 = 's1 - def s2 = 's2 + val s1 = Symbol("s1") + def s2 = Symbol("s2") object inner { - val s3 = 's3 - val s4 = 's4 + val s3 = Symbol("s3") + val s4 = Symbol("s4") } } val compl = new Sub with Signs with Compl - assert(compl.s1 == 's1) - assert(compl.s2 == 's2) - assert(compl.inner.s3 == 's3) - assert(compl.inner.s4 == 's4) - assert(compl.ind == 'indication) - assert(compl.trace == 'trace) - assert(compl.subsymbol == 'symsub) - assert(compl.basesymbol == 'symbase) + assert(compl.s1 == Symbol("s1")) + assert(compl.s2 == Symbol("s2")) + assert(compl.inner.s3 == Symbol("s3")) + assert(compl.inner.s4 == Symbol("s4")) + assert(compl.ind == Symbol("indication")) + assert(compl.trace == Symbol("trace")) + assert(compl.subsymbol == Symbol("symsub")) + assert(compl.basesymbol == Symbol("symbase")) object Local extends Signs with Compl { - val s5 = 's5 - def s6 = 's6 + val s5 = Symbol("s5") + def s6 = Symbol("s6") object inner2 { - val s7 = 's7 - def s8 = 's8 + val s7 = Symbol("s7") + def s8 = Symbol("s8") } } - assert(Local.s5 == 's5) - assert(Local.s6 == 's6) - assert(Local.inner2.s7 == 's7) - assert(Local.inner2.s8 == 's8) - assert(Local.inner.s3 == 's3) - assert(Local.inner.s4 == 's4) - assert(Local.s1 == 's1) - assert(Local.s2 == 's2) - assert(Local.trace == 'trace) - assert(Local.ind == 'indication) - assert(('s8).toString == "'s8") + assert(Local.s5 == Symbol("s5")) + assert(Local.s6 == Symbol("s6")) + assert(Local.inner2.s7 == Symbol("s7")) + assert(Local.inner2.s8 == Symbol("s8")) + assert(Local.inner.s3 == Symbol("s3")) + assert(Local.inner.s4 == Symbol("s4")) + assert(Local.s1 == Symbol("s1")) + assert(Local.s2 == Symbol("s2")) + assert(Local.trace == Symbol("trace")) + assert(Local.ind == Symbol("indication")) + assert((Symbol("s8")).toString == "Symbol(s8)") } - def testLazyTraits { + def testLazyTraits(): Unit = { val l1 = new AnyRef with Lazy1 val l2 = new AnyRef with Lazy2 val l3 = new AnyRef with Lazy3 @@ -248,35 +248,22 @@ object Test { l1.v1 l2.v2 l3.v3 - assert((l1.s1).toString == "'lazySymbol1") + assert((l1.s1).toString == "Symbol(lazySymbol1)") assert(l2.s2 == Symbol("lazySymbol" + 2)) - assert(l3.s3 == 'lazySymbol3) + assert(l3.s3 == Symbol("lazySymbol3")) } - def testLazyObjects { - assert(SingletonOfLazyness.lazysym == 'lazySymbol) + def testLazyObjects(): Unit = { + assert(SingletonOfLazyness.lazysym == Symbol("lazySymbol")) assert(SingletonOfLazyness.another == Symbol("ano" + "ther")) - assert((SingletonOfLazyness.lastone).toString == "'lastone") + assert((SingletonOfLazyness.lastone).toString == "Symbol(lastone)") object nested { - lazy val sym1 = 'snested1 - lazy val sym2 = 'snested2 + lazy val sym1 = Symbol("snested1") + lazy val sym2 = Symbol("snested2") } - assert(nested.sym1 == 'snested1) + assert(nested.sym1 == Symbol("snested1")) assert(nested.sym2 == Symbol("snested" + "2")) } - } - - - - - - - - - - - - diff --git a/test/files/run/UnrolledBuffer.scala b/test/files/run/UnrolledBuffer.scala index 62a1f7d083fb..8007711ec6c6 100644 --- a/test/files/run/UnrolledBuffer.scala +++ b/test/files/run/UnrolledBuffer.scala @@ -1,14 +1,7 @@ - - - - import collection.mutable.UnrolledBuffer - - object Test { - - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val u1 = new UnrolledBuffer[Int] assert(u1.isEmpty) assert(u1.size == 0) @@ -21,7 +14,7 @@ object Test { assert(u1.nonEmpty) assert(u1.size == 3) - u1.clear + u1.clear() assert(u1.isEmpty) assert(u1.size == 0) @@ -40,8 +33,8 @@ object Test { val u2 = u1 map { x => (x - 1) / 2 } assert(u2 == UnrolledBuffer(0, 1, 2, 3, 4)) - u1.clear - u2.clear + u1.clear() + u2.clear() assert(u1.size == 0) assert(u2.size == 0) @@ -107,7 +100,7 @@ object Test { assertCorrect(u1) } - def assertCorrect(u1: UnrolledBuffer[Int]) { + def assertCorrect(u1: UnrolledBuffer[Int]): Unit = { val sz = u1.size val store = new Array[Int](sz) for (i <- 0 until sz) { @@ -121,5 +114,4 @@ object Test { assert((u1 map { x => x }) == u1) assert(u1.iterator.toSeq.size == u1.size) } - } diff --git a/test/files/run/WeakHashSetTest.scala b/test/files/run/WeakHashSetTest.scala index 8072aa9b8463..f27eb7875300 100644 --- a/test/files/run/WeakHashSetTest.scala +++ b/test/files/run/WeakHashSetTest.scala @@ -1,17 +1,17 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val test = scala.reflect.internal.util.WeakHashSetTest - test.checkEmpty - test.checkPlusEquals - test.checkPlusEqualsCollisions - test.checkRehashing - test.checkRehashCollisions - test.checkFindOrUpdate - test.checkMinusEquals - test.checkMinusEqualsCollisions - test.checkClear - test.checkIterator - test.checkIteratorCollisions + test.checkEmpty() + test.checkPlusEquals() + test.checkPlusEqualsCollisions() + test.checkRehashing() + test.checkRehashCollisions() + test.checkFindOrUpdate() + test.checkMinusEquals() + test.checkMinusEqualsCollisions() + test.checkClear() + test.checkIterator() + test.checkIteratorCollisions() // This test is commented out because it relies on gc behavior which isn't reliable enough in an automated environment // test.checkRemoveUnreferencedObjects @@ -22,65 +22,65 @@ object Test { // it uses the package private "diagnostics" method package scala.reflect.internal.util { - object WeakHashSetTest { - // a class guaranteed to provide hash collisions - case class Collider(x : String) extends Comparable[Collider] with Serializable { - override def hashCode = 0 - def compareTo(y : Collider) = this.x compareTo y.x - } + // a class guaranteed to provide hash collisions + case class Collider(x : String) extends Comparable[Collider] with Serializable { + override def hashCode = 0 + def compareTo(y : Collider) = this.x compareTo y.x + } + object WeakHashSetTest { // basic emptiness check - def checkEmpty { + def checkEmpty(): Unit = { val hs = new WeakHashSet[String]() assert(hs.size == 0) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // make sure += works - def checkPlusEquals { + def checkPlusEquals(): Unit = { val hs = new WeakHashSet[String]() val elements = List("hello", "goodbye") elements foreach (hs += _) assert(hs.size == 2) assert(hs contains "hello") assert(hs contains "goodbye") - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // make sure += works when there are collisions - def checkPlusEqualsCollisions { + def checkPlusEqualsCollisions(): Unit = { val hs = new WeakHashSet[Collider]() val elements = List("hello", "goodbye") map Collider elements foreach (hs += _) assert(hs.size == 2) assert(hs contains Collider("hello")) assert(hs contains Collider("goodbye")) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // add a large number of elements to force rehashing and then validate - def checkRehashing { + def checkRehashing(): Unit = { val size = 200 val hs = new WeakHashSet[String]() val elements = (0 until size).toList map ("a" + _) elements foreach (hs += _) elements foreach {i => assert(hs contains i)} - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // make sure rehashing works properly when the set is rehashed - def checkRehashCollisions { + def checkRehashCollisions(): Unit = { val size = 200 val hs = new WeakHashSet[Collider]() val elements = (0 until size).toList map {x => Collider("a" + x)} elements foreach (hs += _) elements foreach {i => assert(hs contains i)} - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // test that unreferenced objects are removed // not run in an automated environment because gc behavior can't be relied on - def checkRemoveUnreferencedObjects { + def checkRemoveUnreferencedObjects(): Unit = { val size = 200 val hs = new WeakHashSet[Collider]() val elements = (0 until size).toList map {x => Collider("a" + x)} @@ -97,11 +97,11 @@ package scala.reflect.internal.util { for (i <- 0 until size) { assert(!(hs contains Collider("b" + i))) } - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // make sure findOrUpdate returns the originally entered element - def checkFindOrUpdate { + def checkFindOrUpdate(): Unit = { val size = 200 val hs = new WeakHashSet[Collider]() val elements = (0 until size).toList map {x => Collider("a" + x)} @@ -111,11 +111,11 @@ package scala.reflect.internal.util { // original put in assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i)) } - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // check -= functionality - def checkMinusEquals { + def checkMinusEquals(): Unit = { val hs = new WeakHashSet[String]() val elements = List("hello", "goodbye") elements foreach (hs += _) @@ -123,11 +123,11 @@ package scala.reflect.internal.util { assert(hs.size == 1) assert(hs contains "hello") assert(!(hs contains "goodbye")) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // check -= when there are collisions - def checkMinusEqualsCollisions { + def checkMinusEqualsCollisions(): Unit = { val hs = new WeakHashSet[Collider] val elements = List(Collider("hello"), Collider("goodbye")) elements foreach (hs += _) @@ -138,11 +138,11 @@ package scala.reflect.internal.util { hs -= Collider("hello") assert(hs.size == 0) assert(!(hs contains Collider("hello"))) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // check that the clear method actually cleans everything - def checkClear { + def checkClear(): Unit = { val size = 200 val hs = new WeakHashSet[String]() val elements = (0 until size).toList map ("a" + _) @@ -150,25 +150,25 @@ package scala.reflect.internal.util { hs.clear() assert(hs.size == 0) elements foreach {i => assert(!(hs contains i))} - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // check that the iterator covers all the contents - def checkIterator { + def checkIterator(): Unit = { val hs = new WeakHashSet[String]() val elements = (0 until 20).toList map ("a" + _) elements foreach (hs += _) assert(elements.iterator.toList.sorted == elements.sorted) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } // check that the iterator covers all the contents even when there is a collision - def checkIteratorCollisions { + def checkIteratorCollisions(): Unit = { val hs = new WeakHashSet[Collider] val elements = (0 until 20).toList map {x => Collider("a" + x)} elements foreach (hs += _) assert(elements.iterator.toList.sorted == elements.sorted) - hs.diagnostics.fullyValidate + hs.diagnostics.fullyValidate() } } } diff --git a/test/files/run/absoverride.scala b/test/files/run/absoverride.scala index a3c03df0d7f4..0301498935a0 100644 --- a/test/files/run/absoverride.scala +++ b/test/files/run/absoverride.scala @@ -5,7 +5,7 @@ abstract class AbsIterator { } trait RichIterator extends AbsIterator { - def foreach(f: T => Unit) { + def foreach(f: T => Unit): Unit = { while (hasNext) f(next) } } @@ -33,7 +33,7 @@ trait LoggedIterator extends AbsIterator { class Iter2(s: String) extends StringIterator(s) with SyncIterator with LoggedIterator; object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { class Iter extends StringIterator(args(0)) with RichIterator with SyncIterator with LoggedIterator val iter = new Iter iter foreach Console.println diff --git a/test/files/run/abstype_implicits.scala b/test/files/run/abstype_implicits.scala index b3bcb20f5f6c..7283f6fb4cac 100644 --- a/test/files/run/abstype_implicits.scala +++ b/test/files/run/abstype_implicits.scala @@ -1,5 +1,5 @@ -// scalac: -Xsource:2.13 -import scala.language.higherKinds +//> using options -Xsource:2.13 +// trait Functor[F[_]] diff --git a/test/files/run/abstypetags_core.scala b/test/files/run/abstypetags_core.scala index 2692fec035b6..4c98a65c0d87 100644 --- a/test/files/run/abstypetags_core.scala +++ b/test/files/run/abstypetags_core.scala @@ -31,4 +31,4 @@ object Test extends App { println(implicitly[WeakTypeTag[Null]]) println(implicitly[WeakTypeTag[Nothing]] eq WeakTypeTag.Nothing) println(implicitly[WeakTypeTag[Nothing]]) -} \ No newline at end of file +} diff --git a/test/files/run/abstypetags_serialize.scala b/test/files/run/abstypetags_serialize.scala index 6ec97105fea1..5d0805f9332c 100644 --- a/test/files/run/abstypetags_serialize.scala +++ b/test/files/run/abstypetags_serialize.scala @@ -1,4 +1,3 @@ -import scala.language.higherKinds import java.io._ import scala.reflect.runtime.universe._ import scala.reflect.runtime.{universe => ru} diff --git a/test/files/run/adding-growing-set.scala b/test/files/run/adding-growing-set.scala index ab94b893b20a..50361b116472 100644 --- a/test/files/run/adding-growing-set.scala +++ b/test/files/run/adding-growing-set.scala @@ -5,7 +5,7 @@ object Test { def main(args: Array[String]): Unit = { val a = new Array[Long](1000000) (1 to 10000) foreach (i => a(i) = i) - val s = collection.mutable.Set(a: _*) + val s = collection.mutable.Set(collection.immutable.ArraySeq.unsafeWrapArray(a): _*) assert(s.sum > 0) } } diff --git a/test/files/run/amp.scala b/test/files/run/amp.scala index a1924ef63701..1276a361c7b4 100644 --- a/test/files/run/amp.scala +++ b/test/files/run/amp.scala @@ -2,12 +2,12 @@ object Test extends App { def foo() = { def f: Int = 1 - val x = f _ + val x = () => f x } def bar(g: => Int) = { - g _ + () => g } Console.println((bar{ Console.println("g called"); 42 })()) diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index 3cfbda651639..e06ac600610f 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -18,20 +18,20 @@ canAdaptAnnotations(Trees$TypeTree, ?) [7] canAdaptAnnotations(Trees$Typed, ?) [3] canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] -lub(List(Int @testAnn, Int)) [1] +lub(List(1 @testAnn, 2)) [1] pluginsPt(?, Trees$Annotated) [6] -pluginsPt(?, Trees$Apply) [11] +pluginsPt(?, Trees$Apply) [17] pluginsPt(?, Trees$ApplyImplicitView) [2] pluginsPt(?, Trees$Block) [4] pluginsPt(?, Trees$ClassDef) [2] pluginsPt(?, Trees$DefDef) [14] -pluginsPt(?, Trees$Ident) [43] +pluginsPt(?, Trees$Ident) [44] pluginsPt(?, Trees$If) [2] pluginsPt(?, Trees$Literal) [16] pluginsPt(?, Trees$New) [6] pluginsPt(?, Trees$PackageDef) [1] pluginsPt(?, Trees$Return) [1] -pluginsPt(?, Trees$Select) [45] +pluginsPt(?, Trees$Select) [42] pluginsPt(?, Trees$Super) [2] pluginsPt(?, Trees$This) [13] pluginsPt(?, Trees$TypeApply) [3] @@ -61,7 +61,6 @@ pluginsPt(String, Trees$Ident) [3] pluginsPt(String, Trees$Literal) [1] pluginsPt(String, Trees$Select) [1] pluginsPt(Unit, Trees$Assign) [1] -pluginsPt(testAnn, Trees$Apply) [6] pluginsTypeSig(, Trees$Template) [2] pluginsTypeSig(class A, Trees$ClassDef) [1] pluginsTypeSig(class testAnn, Trees$ClassDef) [1] @@ -91,18 +90,19 @@ pluginsTypeSigAccessor(value x) [1] pluginsTypeSigAccessor(value y) [1] pluginsTypeSigAccessor(variable count) [2] pluginsTyped( <: Int, Trees$TypeBoundsTree) [2] -pluginsTyped(()Double, Trees$Select) [6] -pluginsTyped(()Object, Trees$Select) [1] -pluginsTyped(()String, Trees$Ident) [1] -pluginsTyped(()String, Trees$TypeApply) [1] -pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1] -pluginsTyped(()testAnn, Trees$Select) [12] -pluginsTyped((str: String)A (param: Double)A, Trees$Select) [1] -pluginsTyped((x$1: Any)Boolean (x: Double)Boolean (x: Float)Boolean (x: Long)Boolean (x: Int)Boolean (x: Char)Boolean (x: Short)Boolean (x: Byte)Boolean, Trees$Select) [1] -pluginsTyped((x$1: Int)Unit, Trees$Select) [1] -pluginsTyped((x: Double)Double (x: Float)Float (x: Long)Long (x: Int)Int (x: Char)Int (x: Short)Int (x: Byte)Int (x: String)String, Trees$Select) [1] -pluginsTyped((x: String)scala.collection.immutable.StringOps, Trees$Select) [2] -pluginsTyped((xs: Array[Any])scala.collection.mutable.WrappedArray[Any], Trees$TypeApply) [1] +pluginsTyped("huhu", Trees$Block) [1] +pluginsTyped((): Double, Trees$Select) [3] +pluginsTyped((): List, Trees$TypeApply) [1] +pluginsTyped((): Object, Trees$Select) [1] +pluginsTyped((): String, Trees$Ident) [1] +pluginsTyped((): scala.annotation.Annotation, Trees$Select) [1] +pluginsTyped((): testAnn, Trees$Select) [12] +pluginsTyped((str: String): A (param: Double): A, Trees$Select) [1] +pluginsTyped((x$1: Any): Boolean (x: Double): Boolean (x: Float): Boolean (x: Long): Boolean (x: Int): Boolean (x: Char): Boolean (x: Short): Boolean (x: Byte): Boolean, Trees$Select) [1] +pluginsTyped((x$1: Int): Unit, Trees$Select) [1] +pluginsTyped((x: Double): Double (x: Float): Float (x: Long): Long (x: Int): Int (x: Char): Int (x: Short): Int (x: Byte): Int (x: String): String, Trees$Select) [1] +pluginsTyped((x: String): scala.collection.StringOps, Trees$Select) [2] +pluginsTyped((xs: Array[Any]): scala.collection.immutable.ArraySeq[Any], Trees$TypeApply) [1] pluginsTyped(.type, Trees$Ident) [1] pluginsTyped(, Trees$Select) [1] pluginsTyped(, Trees$ClassDef) [2] @@ -110,11 +110,6 @@ pluginsTyped(, Trees$DefDef) [14] pluginsTyped(, Trees$PackageDef) [1] pluginsTyped(, Trees$TypeDef) [1] pluginsTyped(, Trees$ValDef) [13] -pluginsTyped(=> Boolean @testAnn, Trees$Select) [1] -pluginsTyped(=> Double, Trees$Select) [1] -pluginsTyped(=> Int, Trees$Select) [5] -pluginsTyped(=> Int, Trees$TypeApply) [1] -pluginsTyped(=> String @testAnn, Trees$Select) [1] pluginsTyped(A, Trees$Apply) [1] pluginsTyped(A, Trees$Ident) [2] pluginsTyped(A, Trees$This) [1] @@ -124,14 +119,14 @@ pluginsTyped(A.this.type, Trees$This) [11] pluginsTyped(Any, Trees$TypeTree) [1] pluginsTyped(AnyRef, Trees$Select) [4] pluginsTyped(Array[Any], Trees$ArrayValue) [1] -pluginsTyped(Boolean @testAnn, Trees$Select) [1] +pluginsTyped(Boolean @testAnn, Trees$Select) [2] pluginsTyped(Boolean @testAnn, Trees$TypeTree) [2] pluginsTyped(Boolean(false), Trees$Literal) [1] pluginsTyped(Boolean, Trees$Apply) [1] pluginsTyped(Boolean, Trees$Select) [2] pluginsTyped(Char('c'), Trees$Literal) [2] pluginsTyped(Double, Trees$Apply) [3] -pluginsTyped(Double, Trees$Select) [4] +pluginsTyped(Double, Trees$Select) [5] pluginsTyped(Int @testAnn, Trees$TypeTree) [2] pluginsTyped(Int @testAnn, Trees$Typed) [2] pluginsTyped(Int(0), Trees$Literal) [2] @@ -141,43 +136,49 @@ pluginsTyped(Int(2), Trees$Literal) [1] pluginsTyped(Int, Trees$Apply) [1] pluginsTyped(Int, Trees$Ident) [1] pluginsTyped(Int, Trees$If) [1] -pluginsTyped(Int, Trees$Select) [10] +pluginsTyped(Int, Trees$Select) [15] +pluginsTyped(Int, Trees$TypeApply) [1] pluginsTyped(Int, Trees$TypeTree) [8] +pluginsTyped(List, Trees$Apply) [1] +pluginsTyped(List, Trees$TypeTree) [1] +pluginsTyped(List.type, Trees$Select) [1] pluginsTyped(List[Any], Trees$Apply) [1] pluginsTyped(List[Any], Trees$Select) [1] pluginsTyped(List[Any], Trees$TypeTree) [2] pluginsTyped(Nothing, Trees$Return) [1] pluginsTyped(Object, Trees$Apply) [1] pluginsTyped(String @testAnn, Trees$Ident) [1] -pluginsTyped(String @testAnn, Trees$Select) [1] +pluginsTyped(String @testAnn, Trees$Select) [2] pluginsTyped(String @testAnn, Trees$TypeTree) [3] pluginsTyped(String(""), Trees$Literal) [2] pluginsTyped(String("huhu"), Trees$Literal) [1] pluginsTyped(String("str") @testAnn, Trees$Typed) [1] pluginsTyped(String("str"), Trees$Literal) [1] pluginsTyped(String("two"), Trees$Literal) [2] -pluginsTyped(String, Trees$Apply) [2] -pluginsTyped(String, Trees$Block) [2] +pluginsTyped(String, Trees$Apply) [1] +pluginsTyped(String, Trees$Block) [1] pluginsTyped(String, Trees$Select) [4] -pluginsTyped(String, Trees$TypeTree) [5] +pluginsTyped(String, Trees$TypeTree) [4] pluginsTyped(Unit, Trees$Apply) [2] pluginsTyped(Unit, Trees$Assign) [1] pluginsTyped(Unit, Trees$Block) [4] pluginsTyped(Unit, Trees$If) [1] pluginsTyped(Unit, Trees$Literal) [5] pluginsTyped(Unit, Trees$TypeTree) [1] -pluginsTyped([A](xs: A*)List[A], Trees$Select) [1] -pluginsTyped([T <: Int]=> Int, Trees$Select) [1] -pluginsTyped([T0]()T0, Trees$Select) [1] -pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1] +pluginsTyped([A](elems: A*): List[A], Trees$Select) [1] +pluginsTyped([T <: Int]Int, Trees$Select) [1] +pluginsTyped([T0](): T0, Trees$Select) [1] +pluginsTyped([T](xs: Array[T]): scala.collection.immutable.ArraySeq[T], Trees$Select) [1] pluginsTyped(annotation.type, Trees$Select) [4] +pluginsTyped(collection.immutable.List.type, Trees$Select) [1] pluginsTyped(math.type, Trees$Select) [9] pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1] pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4] pluginsTyped(scala.annotation.TypeConstraint, Trees$TypeTree) [2] -pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2] -pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2] -pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1] +pluginsTyped(scala.collection.StringOps, Trees$ApplyImplicitView) [2] +pluginsTyped(scala.collection.immutable.ArraySeq[Any], Trees$Apply) [1] +pluginsTyped(scala.package.type, Trees$Select) [1] +pluginsTyped(scala.type, Trees$Ident) [1] pluginsTyped(str.type, Trees$Ident) [3] pluginsTyped(testAnn, Trees$Apply) [6] pluginsTyped(testAnn, Trees$Ident) [6] @@ -185,5 +186,4 @@ pluginsTyped(testAnn, Trees$New) [6] pluginsTyped(testAnn, Trees$This) [1] pluginsTyped(testAnn, Trees$TypeTree) [1] pluginsTyped(testAnn.super.type, Trees$Super) [1] -pluginsTyped(type, Trees$Select) [1] pluginsTypedReturn(return f, String) [1] diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala index 4b297ff2206e..e5720649b7db 100644 --- a/test/files/run/analyzerPlugins.scala +++ b/test/files/run/analyzerPlugins.scala @@ -2,8 +2,7 @@ import scala.tools.partest._ import scala.tools.nsc._ object Test extends DirectTest { - - override def extraSettings: String = "-usejavacp" + override def extraSettings: String = s"${super.extraSettings} '-Wconf:cat=deprecation&msg=early initializers:s'" def code = """ class testAnn extends annotation.TypeConstraint @@ -22,10 +21,10 @@ object Test extends DirectTest { var count = 0 - math.random // some statement + math.random() // some statement def method: String = { - math.random + math.random() val f = inferField def nested(): String = { @@ -36,16 +35,16 @@ object Test extends DirectTest { nested() } - def this(str: String) { + def this(str: String) = { this(str.toDouble) - math.random + math.random() count += 1 } } """.trim - def show() { + def show(): Unit = { val global = newCompiler() import global._ import analyzer._ @@ -116,7 +115,7 @@ object Test extends DirectTest { addAnalyzerPlugin(analyzerPlugin) compileString(global)(code) - val res = output.groupBy(identity).mapValues(_.size).map { case (k,v) => s"$k [$v]" }.toList.sorted + val res = output.groupBy(identity).view.mapValues(_.size).map { case (k,v) => s"$k [$v]" }.toList.sorted println(res.mkString("\n")) } diff --git a/test/files/run/annot-infix-tostr.check b/test/files/run/annot-infix-tostr.check new file mode 100644 index 000000000000..2ff6d74fb48e --- /dev/null +++ b/test/files/run/annot-infix-tostr.check @@ -0,0 +1,20 @@ + +scala> class ann extends annotation.StaticAnnotation +class ann + +scala> def a: Int => (Int @ann) = ??? +def a: Int => Int @ann + +scala> def b: Int => Int @ann = ??? +def b: Int => Int @ann + +scala> def c: (Int => Int) @ann = ??? +def c: (Int => Int) @ann + +scala> def d: Int => (Int => Int) @ann = ??? +def d: Int => ((Int => Int) @ann) + +scala> def e: (Int => Int => Int) @ann = ??? +def e: (Int => (Int => Int)) @ann + +scala> :quit diff --git a/test/files/run/annot-infix-tostr.scala b/test/files/run/annot-infix-tostr.scala new file mode 100644 index 000000000000..1b5f4a41bd4e --- /dev/null +++ b/test/files/run/annot-infix-tostr.scala @@ -0,0 +1,12 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ + |class ann extends annotation.StaticAnnotation + |def a: Int => (Int @ann) = ??? + |def b: Int => Int @ann = ??? + |def c: (Int => Int) @ann = ??? + |def d: Int => (Int => Int) @ann = ??? + |def e: (Int => Int => Int) @ann = ??? + |""".stripMargin +} diff --git a/test/files/run/annotatedRetyping.scala b/test/files/run/annotatedRetyping.scala index 9b9ebd5a1ebd..865b6aad1c71 100644 --- a/test/files/run/annotatedRetyping.scala +++ b/test/files/run/annotatedRetyping.scala @@ -1,10 +1,7 @@ import scala.tools.partest._ -import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" - def code = """ class testAnn extends annotation.Annotation @@ -28,7 +25,7 @@ object Test extends DirectTest { // bug was that typedAnnotated assigned a type to the Annotated tree. The second type check would consider // the tree as alreadyTyped, which is not cool, the Annotated needs to be transformed into a Typed tree. - def show() { + def show(): Unit = { val global = newCompiler() import global._ import analyzer._ @@ -39,7 +36,7 @@ object Test extends DirectTest { override def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = { defTree match { case impl: Template => - templates += typer.context.owner -> (impl, typer) + templates += typer.context.owner -> ((impl, typer)) case dd: DefDef if dd.symbol.isPrimaryConstructor && templates.contains(dd.symbol.owner) => val (impl, templTyper) = templates(dd.symbol.owner) diff --git a/test/files/run/anyval-box-types.scala b/test/files/run/anyval-box-types.scala index cd219aed556d..933005d0aab8 100644 --- a/test/files/run/anyval-box-types.scala +++ b/test/files/run/anyval-box-types.scala @@ -1,4 +1,5 @@ -// scalac: -Xmaxwarns 0 -opt:l:method +//> using options -Xmaxwarns 0 -opt:local +// object Test extends App { val one: java.lang.Integer = 1 diff --git a/test/files/run/applydynamic_sip.scala b/test/files/run/applydynamic_sip.scala index c33da81b3843..cdc16ac7f5ac 100644 --- a/test/files/run/applydynamic_sip.scala +++ b/test/files/run/applydynamic_sip.scala @@ -1,4 +1,5 @@ -// scalac: -Yrangepos:false -language:dynamics +//> using options -Yrangepos:false -language:dynamics +// object Test extends App { object stubUpdate { def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")")) @@ -10,9 +11,9 @@ object Test extends App { def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")")) } class MyDynamic extends Dynamic { - def applyDynamic[T](n: String)(as: Any*) = {println("qual.applyDynamic("+ n +")"+ as.toList.mkString("(",", ", ")")); stub} - def applyDynamicNamed[T](n: String)(as: (String, Any)*) = {println("qual.applyDynamicNamed("+ n +")"+ as.toList.mkString("(",", ", ")")); stub} - def selectDynamic[T](n: String) = {println("qual.selectDynamic("+ n +")"); stub} + def applyDynamic[A](n: String)(as: Any*) = {println("qual.applyDynamic("+ n +")"+ as.toList.mkString("(",", ", ")")); stub} + def applyDynamicNamed[A](n: String)(as: (String, Any)*) = {println("qual.applyDynamicNamed("+ n +")"+ as.toList.mkString("(",", ", ")")); stub} + def selectDynamic[A](n: String) = {println("qual.selectDynamic("+ n +")"); stub} def updateDynamic(n: String)(x: Any): Unit = {println("qual.updateDynamic("+ n +")("+ x +")")} } val qual = new MyDynamic diff --git a/test/files/run/argfile.check b/test/files/run/argfile.check new file mode 100644 index 000000000000..bb8621865db7 --- /dev/null +++ b/test/files/run/argfile.check @@ -0,0 +1 @@ +Compiler arguments written to: argfile-run.obj/print-args.txt diff --git a/test/files/run/argfile.scala b/test/files/run/argfile.scala new file mode 100644 index 000000000000..5fdb17fc988e --- /dev/null +++ b/test/files/run/argfile.scala @@ -0,0 +1,40 @@ + +import java.nio.file.Files + +import scala.jdk.CollectionConverters._ +import scala.reflect.internal.util._ +import scala.tools.nsc.{CompilerCommand, Settings} +import scala.tools.partest.DirectTest +import scala.util.chaining._ + +object Test extends DirectTest { + var count = 0 + lazy val argfile = testOutput.jfile.toPath().resolve("print-args.txt") + lazy val goodarg = testOutput.jfile.toPath().resolve("print-args2.txt") + override def extraSettings = + if (count == 0) s"""${super.extraSettings} -Xsource:3 -Vprint-args $argfile "-Wconf:cat=unused-nowarn&msg=does not suppress&site=C:s"""" + else s"@$goodarg" + + // Use CompilerCommand for expanding the args file. + override def newSettings(args: List[String]) = (new Settings).tap { s => + val cc = new CompilerCommand(args, s) + assert(cc.ok) + assert(cc.files.isEmpty) + } + def code = + sm""" + |import annotation.* + |@nowarn + |final class C { + | def f: Int = "42".toInt + |} + """ + def show() = { + assert(compile()) + // drop "-Vprint-args .../print-args.txt newSource1.scala" + val args = Files.readAllLines(argfile).asScala.toList.dropRight(3) + Files.write(goodarg, args.asJava) + count += 1 + assert(compile()) + } +} diff --git a/test/files/run/array-charSeq.check b/test/files/run/array-charSeq.check index 051fd72a3c08..f1f374f63ec5 100644 --- a/test/files/run/array-charSeq.check +++ b/test/files/run/array-charSeq.check @@ -1,4 +1,3 @@ -warning: two deprecations (since 2.12.0); re-run with -deprecation for details [check 'abcdefghi'] len = 9 sub(0, 9) == 'abcdefghi' diff --git a/test/files/run/array-charSeq.scala b/test/files/run/array-charSeq.scala index 86bfb829a69e..a334cd6aaeee 100644 --- a/test/files/run/array-charSeq.scala +++ b/test/files/run/array-charSeq.scala @@ -1,12 +1,12 @@ + object Test { val arr = Array[Char]('a' to 'i': _*) - var xs: CharSequence = ArrayCharSequence(arr) + var xs: CharSequence = new runtime.ArrayCharSequence(arr, 0, arr.length) val hash = xs.hashCode - def check(chars: CharSequence) { + def check(chars: CharSequence): Unit = { println("\n[check '" + chars + "'] len = " + chars.length) chars match { - case x: Predef.ArrayCharSequence => assert(x.__arrayOfChars eq arr, ((x.__arrayOfChars, arr))) case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr))) case x => assert(false, x) } diff --git a/test/files/run/array-cleanup-optimation-specialized.scala b/test/files/run/array-cleanup-optimation-specialized.scala index 5db397752df3..1b161505736e 100644 --- a/test/files/run/array-cleanup-optimation-specialized.scala +++ b/test/files/run/array-cleanup-optimation-specialized.scala @@ -4,9 +4,11 @@ object Test { def main(args: Array[String]): Unit = { assert(apply[String]("") == classOf[Array[String]]) assert(apply[Double](1d) == classOf[Array[Double]]) - } - def apply[@specialized(Double) C: ClassTag](c: C): Class[_] = { - Array(c).getClass + assert(applyBound[Double](1d) == classOf[Array[Double]]) } + + def apply[@specialized(Double) C: ClassTag](c: C): Class[_] = Array(c).getClass + + def applyBound[D <: Double: ClassTag](d: D): Class[_] = Array.apply[D](d).getClass } diff --git a/test/files/run/arrayclone-new.scala b/test/files/run/arrayclone-new.scala index 506e4f527cba..a8053b618a1c 100644 --- a/test/files/run/arrayclone-new.scala +++ b/test/files/run/arrayclone-new.scala @@ -1,4 +1,4 @@ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.ClassTag object Test extends App{ BooleanArrayClone; @@ -105,4 +105,4 @@ object PolymorphicArrayClone{ val y : Array[Int] = mangled.it; // make sure it's unboxed testIt(mangled.it, 0, 1); -} \ No newline at end of file +} diff --git a/test/files/run/arraycopy.scala b/test/files/run/arraycopy.scala index bb06200dc7cf..77a8809a13e8 100644 --- a/test/files/run/arraycopy.scala +++ b/test/files/run/arraycopy.scala @@ -1,7 +1,7 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val a = new Array[Int](10) val b = new Array[Any](10) for (i <- 0 until 10) b(i) = i diff --git a/test/files/run/arrays.scala b/test/files/run/arrays.scala index c8bf80ea60ff..1c950959f774 100644 --- a/test/files/run/arrays.scala +++ b/test/files/run/arrays.scala @@ -100,7 +100,7 @@ object Test { var checks: Int = 0; - def check(test0: Boolean, actual: Any, expected: Any) { + def check(test0: Boolean, actual: Any, expected: Any): Unit = { val test1: Boolean = actual == expected; if (!test0 || !test1) { val s0 = if (test0) "ok" else "KO"; @@ -112,27 +112,27 @@ object Test { checks += 1 } - def check_Ta[T <: Any ](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_Ta[T <: Any ](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l); check(xs(0) == x0, xs(0), x0); c(xs); } - def check_Tv[T <: AnyVal ](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_Tv[T <: AnyVal ](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l); check(xs(0) == x0, xs(0), x0); check_Ta(xs, l, x0, c); c(xs); } - def check_Tr[T <: AnyRef ](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_Tr[T <: AnyRef ](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l); check(xs(0) == x0, xs(0), x0); check_Ta(xs, l, x0, c); c(xs); } - def check_To[T <: Object ](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_To[T <: Object ](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l); check(xs(0) == x0, xs(0), x0); check_Ta(xs, l, x0, c); @@ -140,7 +140,7 @@ object Test { c(xs); } - def check_Tm[T <: Map ](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_Tm[T <: Map ](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l) check(xs(0) == x0, xs(0), x0) check_Ta(xs, l, x0, c) @@ -149,7 +149,7 @@ object Test { c(xs) } - def check_Tn[T <: Strings](xs: Array[T], l: Int, x0: T, c: Check[T]) { + def check_Tn[T <: Strings](xs: Array[T], l: Int, x0: T, c: Check[T]): Unit = { check(xs.length == l, xs.length, l) check(xs(0) == x0, xs(0), x0) check_Ta(xs, l, x0, c) @@ -158,7 +158,7 @@ object Test { c(xs) } - def checkT2368() { + def checkT2368(): Unit = { val arr = Array(1, 2, 3) arr(0) += 1 assert(arr(0) == 2) @@ -206,7 +206,7 @@ object Test { val a2: Int = 0; val a3: Null = null; val a4: String = "a-z"; - val a5: Symbol = 'token; + val a5: Symbol = Symbol("token"); val a6: HashMap = new HashMap(); val a7: TreeMap = scala.collection.immutable.TreeMap.empty[Int, Any]; val a8: Strings = List("a", "z"); @@ -293,7 +293,7 @@ object Test { def fcheck(xs: Array[Float ]): Unit = { check(xs.length == 3, xs.length, 3); check(xs(0) == f0, xs(0), f0); - check(xs(1) == f1, xs(1), f1: Float); // !!! : Float + check(xs(1) == f1, xs(1), f1.toFloat); // !!! : Float check(xs(2) == f2, xs(2), f2); } @@ -331,7 +331,7 @@ object Test { check(xs(2) == m2, xs(2), m2); } - def ncheck(xs: Array[Strings]) { + def ncheck(xs: Array[Strings]): Unit = { check(xs.length == 3, xs.length, 3) check(xs(0) == n0, xs(0), n0) check(xs(1) == n1, xs(1), n1) @@ -341,13 +341,13 @@ object Test { //########################################################################## // Miscellaneous checks - def checkZip { + def checkZip(): Unit = { val zipped = Array("a", "b", "c").zip(Array(1, 2)) val expected = Array(("a",1), ("b",2)) check(zipped sameElements expected, zipped.toList, expected.toList) } - def checkConcat { // ticket #713 + def checkConcat(): Unit = { // ticket #713 val x1 = Array.concat(Array(1, 2), Array(3, 4)) val y1 = Array(1, 2, 3, 4) check(x1 sameElements y1, x1.toList, y1.toList) @@ -363,7 +363,7 @@ object Test { val carray: Array[Char ] = Array(c0, c1, c2); val iarray: Array[Int ] = Array(i0, i1, i2); val larray: Array[Long ] = Array(l0, l1, l2); - val farray: Array[Float ] = Array(f0, f1, f2); + val farray: Array[Float ] = Array(f0, f1.toFloat, f2); val darray: Array[Double ] = Array(d0, d1, d2); val rarray: Array[AnyRef ] = Array(r0, r1, r2, r4, r4, r5); val oarray: Array[Object ] = Array(o0, o1, o2, o4, o4, o5); @@ -921,8 +921,8 @@ object Test { //###################################################################### - checkZip - checkConcat + checkZip() + checkConcat() checkT2368() //###################################################################### diff --git a/test/files/run/arrayview.scala b/test/files/run/arrayview.scala index 97e840f5e946..3d84f0edbe7c 100644 --- a/test/files/run/arrayview.scala +++ b/test/files/run/arrayview.scala @@ -2,7 +2,7 @@ object Test { def f = (1 to 100).toArray.view def main(args: Array[String]): Unit = { - val xs = (f filter (_ < 50)).reverse.filter(_ % 2 == 0).map(_ / 2).flatMap(x => Array(1, x)) + val xs = (f filter (_ < 50)).filter(_ % 2 == 0).map(_ / 2).flatMap(x => Array(1, x)) assert(xs.size == 48) val ys = xs.toArray assert(ys.size == 48) diff --git a/test/files/run/badout.check b/test/files/run/badout.check new file mode 100644 index 000000000000..60a91649c2a5 --- /dev/null +++ b/test/files/run/badout.check @@ -0,0 +1 @@ +error: Output dir does not exist: badout-run.obj/bogus diff --git a/test/files/run/badout.scala b/test/files/run/badout.scala new file mode 100644 index 000000000000..43637933ae02 --- /dev/null +++ b/test/files/run/badout.scala @@ -0,0 +1,13 @@ + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + override def code = "" + + private def bogusTemp = s"${testOutput.toString}/bogus" // no need to obfuscate -${System.currentTimeMillis} + + override def extraSettings = s"${super.extraSettings} -Ygen-asmp $bogusTemp" + + override def show() = assert(!compile()) +} diff --git a/test/files/run/bcodeInlinerMixed.check b/test/files/run/bcodeInlinerMixed.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/test/files/run/bcodeInlinerMixed/A_1.java b/test/files/run/bcodeInlinerMixed/A_1.java index 44d7d88eeb03..0b1c71909f55 100644 --- a/test/files/run/bcodeInlinerMixed/A_1.java +++ b/test/files/run/bcodeInlinerMixed/A_1.java @@ -1,3 +1,4 @@ public class A_1 { - public static final int bar() { return 100; } + // non-trivial method to avoid it from being inlined + public static final int bar() { return Integer.valueOf("123") + Integer.valueOf("321"); } } diff --git a/test/files/run/bcodeInlinerMixed/B_1.scala b/test/files/run/bcodeInlinerMixed/B_1.scala index 1628dc24fe5c..c80ef4b69067 100644 --- a/test/files/run/bcodeInlinerMixed/B_1.scala +++ b/test/files/run/bcodeInlinerMixed/B_1.scala @@ -1,4 +1,5 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** +// // Since 1.0.18, partest does mixed compilation only in two stages // 1. scalac *.scala *.java // 2. javac *.java @@ -6,8 +7,8 @@ // Before it used to do a third stage // 3. scalc *.scala // -// Because he inliner doesn't has access to the bytecode of `bar`, it cannot verify whether the -// invocation of `bar` can be safely copied to a differnet place, so `flop` is not inlined to `B.g` +// Because the inliner doesn't have access to the bytecode of `bar`, it cannot verify whether the +// invocation of `bar` can be safely copied to a different place, so `flop` is not inlined to `B.g` // or `C.h`. class B { diff --git a/test/files/run/bcodeInlinerMixed/Test_2.scala b/test/files/run/bcodeInlinerMixed/Test_2.scala index 00451fa6c5aa..91a005185c11 100644 --- a/test/files/run/bcodeInlinerMixed/Test_2.scala +++ b/test/files/run/bcodeInlinerMixed/Test_2.scala @@ -1,5 +1,7 @@ -// scalac: -opt:l:inline -opt-inline-from:** -import scala.tools.partest.{BytecodeTest, ASMConverters} +//> using options -opt:inline:** +// +import scala.tools.partest.BytecodeTest +import scala.tools.testkit.ASMConverters import ASMConverters._ class D { @@ -10,7 +12,7 @@ class D { } object Test extends BytecodeTest { - def show: Unit = { + def show(): Unit = { val gIns = instructionsFromMethod(getMethod(loadClassNode("B"), "g")) val hIns = instructionsFromMethod(getMethod(loadClassNode("C"), "h")) for (i <- List(gIns, hIns)) { diff --git a/test/files/run/bigDecimalCache.scala b/test/files/run/bigDecimalCache.scala index c0c709a50f7d..d5c295eea983 100644 --- a/test/files/run/bigDecimalCache.scala +++ b/test/files/run/bigDecimalCache.scala @@ -1,6 +1,6 @@ object Test { def main(args: Array[String]): Unit = { - val bd5a = BigDecimal(5) + @annotation.unused val bd5a = BigDecimal(5) val mc = java.math.MathContext.DECIMAL32 val bd5b = BigDecimal(5,mc) diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check index bdb5b50da4f6..82cb98551abd 100644 --- a/test/files/run/bitsets.check +++ b/test/files/run/bitsets.check @@ -1,4 +1,3 @@ -warning: three deprecations (since 2.12.0); re-run with -deprecation for details ms0 = BitSet(2) ms1 = BitSet(2) ms2 = BitSet(2) @@ -61,7 +60,7 @@ ia2 = List(2) ia3 = List() i2_m0 = List(1010101010101010101010101) -i2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1) +i2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1, 0, 0, 0) i2_m0c = true i2_m1c = true i2_m2c = true @@ -83,3 +82,5 @@ i2_r1 = true i2_r2 = true i2_r3 = true +125 + diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala index 5d492207494a..1cb9abea4a9a 100644 --- a/test/files/run/bitsets.scala +++ b/test/files/run/bitsets.scala @@ -38,7 +38,7 @@ object TestMutable { Console.println("mi0 = " + ms0.toImmutable) Console.println("mi1 = " + ms1.toImmutable) Console.println("mi2 = " + ms2.toImmutable) - Console.println + Console.println() val N = 257 val gen = 3 @@ -81,19 +81,19 @@ object TestMutable2 { println("m2_i1 = " + (t1 == b1)) println("m2_i2 = " + (t2 == b2)) println("m2_i3 = " + (t3 == b3)) - println("m2_f0 = " + (t0.from(42) == b0.from(42))) - println("m2_f1 = " + (t1.from(42) == b1.from(42))) - println("m2_f2 = " + (t2.from(42) == b2.from(42))) - println("m2_f3 = " + (t3.from(42) == b3.from(42))) - println("m2_t0 = " + (t0.to(195) == b0.to(195))) - println("m2_t1 = " + (t1.to(195) == b1.to(195))) - println("m2_t2 = " + (t2.to(195) == b2.to(195))) - println("m2_t3 = " + (t3.to(195) == b3.to(195))) + println("m2_f0 = " + (t0.rangeFrom(42) == b0.rangeFrom(42))) + println("m2_f1 = " + (t1.rangeFrom(42) == b1.rangeFrom(42))) + println("m2_f2 = " + (t2.rangeFrom(42) == b2.rangeFrom(42))) + println("m2_f3 = " + (t3.rangeFrom(42) == b3.rangeFrom(42))) + println("m2_t0 = " + (t0.rangeTo(195) == b0.rangeTo(195))) + println("m2_t1 = " + (t1.rangeTo(195) == b1.rangeTo(195))) + println("m2_t2 = " + (t2.rangeTo(195) == b2.rangeTo(195))) + println("m2_t3 = " + (t3.rangeTo(195) == b3.rangeTo(195))) println("m2_r0 = " + (t0.range(43,194) == b0.range(43,194))) println("m2_r1 = " + (t1.range(43,194) == b1.range(43,194))) println("m2_r2 = " + (t2.range(43,194) == b2.range(43,194))) println("m2_r3 = " + (t3.range(43,194) == b3.range(43,194))) - println + println() } object TestMutable3 { @@ -162,7 +162,7 @@ object TestImmutable { Console.println("ia1 = " + is1.toList) Console.println("ia2 = " + is2.toList) Console.println("ia3 = " + is3.toList) - Console.println + Console.println() } object TestImmutable2 { @@ -191,19 +191,25 @@ object TestImmutable2 { println("i2_i1 = " + (t1 == b1)) println("i2_i2 = " + (t2 == b2)) println("i2_i3 = " + (t3 == b3)) - println("i2_f0 = " + (t0.from(42) == b0.from(42))) - println("i2_f1 = " + (t1.from(42) == b1.from(42))) - println("i2_f2 = " + (t2.from(42) == b2.from(42))) - println("i2_f3 = " + (t3.from(42) == b3.from(42))) - println("i2_t0 = " + (t0.to(195) == b0.to(195))) - println("i2_t1 = " + (t1.to(195) == b1.to(195))) - println("i2_t2 = " + (t2.to(195) == b2.to(195))) - println("i2_t3 = " + (t3.to(195) == b3.to(195))) + println("i2_f0 = " + (t0.rangeFrom(42) == b0.rangeFrom(42))) + println("i2_f1 = " + (t1.rangeFrom(42) == b1.rangeFrom(42))) + println("i2_f2 = " + (t2.rangeFrom(42) == b2.rangeFrom(42))) + println("i2_f3 = " + (t3.rangeFrom(42) == b3.rangeFrom(42))) + println("i2_t0 = " + (t0.rangeTo(195) == b0.rangeTo(195))) + println("i2_t1 = " + (t1.rangeTo(195) == b1.rangeTo(195))) + println("i2_t2 = " + (t2.rangeTo(195) == b2.rangeTo(195))) + println("i2_t3 = " + (t3.rangeTo(195) == b3.rangeTo(195))) println("i2_r0 = " + (t0.range(77,194) == b0.range(77,194))) println("i2_r1 = " + (t1.range(77,194) == b1.range(77,194))) println("i2_r2 = " + (t2.range(77,194) == b2.range(77,194))) println("i2_r3 = " + (t3.range(77,194) == b3.range(77,194))) - println + println() +} + +object TestImmutable3 { + import scala.collection.immutable.BitSet + BitSet(125).filter{ xi => println(xi); true } // scala/bug#11380 + println() } object Test extends App { @@ -213,6 +219,7 @@ object Test extends App { // TestMutable4 TestImmutable TestImmutable2 + TestImmutable3 } //############################################################################ diff --git a/test/files/run/blame_eye_triple_eee-double.check b/test/files/run/blame_eye_triple_eee-double.check index 53eac99ecd18..ebe949834aed 100644 --- a/test/files/run/blame_eye_triple_eee-double.check +++ b/test/files/run/blame_eye_triple_eee-double.check @@ -1,3 +1,6 @@ +blame_eye_triple_eee-double.scala:49: warning: unreachable code + case _ => println("NaN matching was good") + ^ if (NaN == NaN) is good if (x == x) is good if (x == NaN) is good diff --git a/test/files/run/blame_eye_triple_eee-float.check b/test/files/run/blame_eye_triple_eee-float.check index 53eac99ecd18..6ba063a13990 100644 --- a/test/files/run/blame_eye_triple_eee-float.check +++ b/test/files/run/blame_eye_triple_eee-float.check @@ -1,3 +1,6 @@ +blame_eye_triple_eee-float.scala:49: warning: unreachable code + case _ => println("NaN matching was good") + ^ if (NaN == NaN) is good if (x == x) is good if (x == NaN) is good diff --git a/test/files/run/blank.scala b/test/files/run/blank.scala new file mode 100644 index 000000000000..9bb437490070 --- /dev/null +++ b/test/files/run/blank.scala @@ -0,0 +1,11 @@ +//> using jvm 11+ +// +// skalac: --release:8 +// trivial manual test for partest --realeasy, which sets --release:8. +// under --realeasy, skip this test because of the javaVersion, irrespective of JDK in use. +// otherwise, this test passes trivially on JDK11+ and is skipped on lesser JDKs. +// note that explicit --release:8 asks to compile against JDK8 but only run on the requested version. + +object Test extends App { + assert("".isBlank) // String#isBlank was added in JDK11 +} diff --git a/test/files/run/boolexprs.scala b/test/files/run/boolexprs.scala index b9b4faea9cfa..56cabc5c5b16 100644 --- a/test/files/run/boolexprs.scala +++ b/test/files/run/boolexprs.scala @@ -33,7 +33,7 @@ object Test2 { // Test code object Test { - def check_success(name: String, closure: => Int, expected: Int) { + def check_success(name: String, closure: => Int, expected: Int): Unit = { Console.print("test " + name); try { val actual: Int = closure; @@ -46,13 +46,13 @@ object Test { case exception: Throwable => Console.print(" raised exception " + exception); } - Console.println; + Console.println() } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { check_success("Test1", Test1.run, 1); check_success("Test2", Test2.run, 0); - Console.println; + Console.println() } } diff --git a/test/files/run/boolord.scala b/test/files/run/boolord.scala index 05d06ffea98f..7a827ffc39c0 100644 --- a/test/files/run/boolord.scala +++ b/test/files/run/boolord.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Console.println("false < false = " + (false < false)) Console.println("false < true = " + (false < true)) Console.println("true < false = " + (true < false)) diff --git a/test/files/run/breakout.check b/test/files/run/breakout.check deleted file mode 100644 index 7971496d1f0c..000000000000 --- a/test/files/run/breakout.check +++ /dev/null @@ -1 +0,0 @@ -2, 3, 4 diff --git a/test/files/run/breakout.scala b/test/files/run/breakout.scala deleted file mode 100644 index 8081405bd19b..000000000000 --- a/test/files/run/breakout.scala +++ /dev/null @@ -1,9 +0,0 @@ -import scala.collection.generic._ -import scala.collection._ -import scala.collection.mutable._ - -object Test extends App { - val l = List(1, 2, 3) - val a: Array[Int] = l.map(_ + 1)(breakOut) - println(a.mkString(", ")) -} diff --git a/test/files/run/bridges.javaopts b/test/files/run/bridges.javaopts deleted file mode 100644 index 3a63111bf2fd..000000000000 --- a/test/files/run/bridges.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xss128M diff --git a/test/files/run/bridges.scala b/test/files/run/bridges.scala index eb036bd781dc..dfe7efad8b74 100644 --- a/test/files/run/bridges.scala +++ b/test/files/run/bridges.scala @@ -1,3 +1,5 @@ +//> using javaOpt -Xss128M + //############################################################################ // Test bridge methods //############################################################################ @@ -11,7 +13,7 @@ object Help { val max: Int = 4; var next: Int = 0; var vars: Array[String] = new Array[String](max); - def init: Unit = { + def init(): Unit = { var i = 0; while (i < max) { vars(i) = null; i = i + 1; } next = 0; @@ -23,14 +25,18 @@ object Help { while (i < max) { if (vars(i) != null) b = false; i = i + 1; } b; } - def print: Unit = { + def print(): Unit = { var i = 0; while (i < max) { if (i > 0) Console.print(", "); Console.print(vars(i)); i = i + 1; } } - def foo = { vars(next) = "foo"; next = next + 1; } - def bar = { vars(next) = "bar"; next = next + 1; } - def mix = { vars(next) = "mix"; next = next + 1; } - def sub = { vars(next) = "sub"; next = next + 1; } + @annotation.nowarn("cat=lint-nullary-unit") + def foo: Unit = { vars(next) = "foo"; next = next + 1; () } + @annotation.nowarn("cat=lint-nullary-unit") + def bar: Unit = { vars(next) = "bar"; next = next + 1; () } + @annotation.nowarn("cat=lint-nullary-unit") + def mix: Unit = { vars(next) = "mix"; next = next + 1; () } + @annotation.nowarn("cat=lint-nullary-unit") + def sub: Unit = { vars(next) = "sub"; next = next + 1; () } } import Help.foo; @@ -3579,18 +3585,18 @@ object Test { var errors: Int = 0; def test(name: String, test: => Any, count: Int, value: String) = { try { - Help.init; + Help.init() test; if (!Help.check(count, value)) { Console.print(name + " failed: "); - Help.print; - Console.println; + Help.print() + Console.println() errors = errors + 1; } } catch { case exception: Throwable => { Console.print(name + " raised exception " + exception); - Console.println; + Console.println() errors = errors + 1; } } @@ -7115,8 +7121,8 @@ object Test { // */test("S_TZIfwFooXIfwBarYIf", new S_TZIfwFooXIfwBarYIf[D], 4, "mix"); if (errors > 0) { - Console.println; - Console.println(errors + " error" + (if (errors > 1) "s" else "")); + Console.println() + Console.println(s"$errors error" + (if (errors > 1) "s" else "")); } } } diff --git a/test/files/run/bugs.check b/test/files/run/bugs.check index 261c74ad15e8..951c06133ec7 100644 --- a/test/files/run/bugs.check +++ b/test/files/run/bugs.check @@ -84,12 +84,6 @@ hello <<< bug 328 >>> bug 328 -<<< bug 396 -A -B -C ->>> bug 396 - <<< bug 399 a >>> bug 399 diff --git a/test/files/run/bugs.scala b/test/files/run/bugs.scala index 02849b581789..1ab3ec0aae1a 100644 --- a/test/files/run/bugs.scala +++ b/test/files/run/bugs.scala @@ -1,3 +1,8 @@ +//> using options -Werror -Xlint:deprecation +// + +import annotation.unused + //############################################################################ // Bugs //############################################################################ @@ -7,7 +12,7 @@ object Bug98Test { object MyCase { def name = "mycase" } - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { println(MyCase.name) } } @@ -31,8 +36,8 @@ object Bug120Test { def print[A](str: String, res: A): A = { println(str); res } - def test(args: Array[String]) { - val c = new Bug120C(1) + def test(args: Array[String]): Unit = { + @unused val c = new Bug120C(1) () } } @@ -44,7 +49,7 @@ object Bug135Test { import scala.collection.immutable.TreeMap - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { val myMap:TreeMap[Int, String] = new TreeMap val map1 = myMap + ((42, "The answer")) println(map1.get(42)) @@ -65,17 +70,17 @@ trait Bug142Bar2 { type Inner; def foo: Inner; foo; } trait Bug142Bar3 { class Inner; def foo: Inner = {Console.println("ok"); null}; } trait Bug142Bar4 { class Inner; def foo: Inner; foo; } -object Bug142Test1 extends Bug142Foo1 with Bug142Bar1 { def test(args: Array[String]) {} } -object Bug142Test2 extends Bug142Foo2 with Bug142Bar2 { def test(args: Array[String]) {} } -object Bug142Test3 extends Bug142Foo3 with Bug142Bar3 { def test(args: Array[String]) {} } -object Bug142Test4 extends Bug142Foo4 with Bug142Bar4 { def test(args: Array[String]) {} } -object Bug142Test5 extends Bug142Foo1 with Bug142Bar1 { def test(args: Array[String]) {} } -object Bug142Test6 extends Bug142Foo2 with Bug142Bar2 { def test(args: Array[String]) {} } -object Bug142Test7 extends Bug142Foo3 with Bug142Bar3 { def test(args: Array[String]) {} } -object Bug142Test8 extends Bug142Foo4 with Bug142Bar4 { def test(args: Array[String]) {} } +object Bug142Test1 extends Bug142Foo1 with Bug142Bar1 { def test(args: Array[String]): Unit = {} } +object Bug142Test2 extends Bug142Foo2 with Bug142Bar2 { def test(args: Array[String]): Unit = {} } +object Bug142Test3 extends Bug142Foo3 with Bug142Bar3 { def test(args: Array[String]): Unit = {} } +object Bug142Test4 extends Bug142Foo4 with Bug142Bar4 { def test(args: Array[String]): Unit = {} } +object Bug142Test5 extends Bug142Foo1 with Bug142Bar1 { def test(args: Array[String]): Unit = {} } +object Bug142Test6 extends Bug142Foo2 with Bug142Bar2 { def test(args: Array[String]): Unit = {} } +object Bug142Test7 extends Bug142Foo3 with Bug142Bar3 { def test(args: Array[String]): Unit = {} } +object Bug142Test8 extends Bug142Foo4 with Bug142Bar4 { def test(args: Array[String]): Unit = {} } object Bug142Test { - def test(args:Array[String]) { + def test(args:Array[String]): Unit = { Bug142Test1; Bug142Test2; Bug142Test3; @@ -93,7 +98,7 @@ object Bug142Test { object Bug166Test { import scala.collection.mutable.HashMap - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { val m: HashMap[String,String] = new HashMap[String, String] m.update("foo","bar") } @@ -110,7 +115,7 @@ class Bug167Node(bar:Int) { } object Bug167Test { - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { if (new Bug167Node(0).foo != 1) println("bug 167"); } } @@ -124,7 +129,7 @@ class Bug168Foo { } object Bug168Test { - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { (new Bug168Foo).foo () } @@ -152,7 +157,7 @@ class Bug174Foo[X] { } object Bug174Test { - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { (new Bug174Foo[Int]).inner.test () } @@ -200,12 +205,12 @@ object Bug199Test { // Bug 213 trait Bug213Foo { - def testAll: Unit; + def testAll(): Unit; def testAllRef: String; } class Bug213Bar extends Bug213Foo { - def testAll = (().asInstanceOf[Nothing] : Nothing); + def testAll() = (().asInstanceOf[Nothing] : Nothing); def testAllRef = ("".asInstanceOf[Null] : Null); } @@ -213,7 +218,7 @@ object Bug213Test { def test(args: Array[String]): Unit = { val foo: Bug213Foo = new Bug213Bar; try { - foo.testAll; + foo.testAll() } catch { case e: ClassCastException => Console.println("Cannot cast unit to Nothing"); @@ -245,7 +250,7 @@ object Bug217Test { object Bug222Test { def test(args:Array[String]): Unit = { - val array: Array[String] = new Array(16); + @unused val array: Array[String] = new Array(16); () } } @@ -271,9 +276,9 @@ object Bug226Test { def id[a](xs: Array[a]): Array[a] = xs; - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { var xs = new Array[Int](1); - class X { xs }; + @unused class X { xs }; xs = id(xs); id(xs); () @@ -294,7 +299,7 @@ object Bug233Test { // Bug 250 object Bug250Test { - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { if (true) null; () } @@ -315,7 +320,7 @@ object Bug257Test { f2(x); } - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { f(sayhello())(sayhi()) } } @@ -332,7 +337,7 @@ abstract class Bug266AFoo { object Bug266ATest extends Bug266AFoo { type T = String; - class I1 extends I0 { def f(x: String) { Console.println("hello") } } + class I1 extends I0 { def f(x: String): Unit = { Console.println("hello") } } def test(args: Array[String]): Unit = { new I1; () } } @@ -353,7 +358,7 @@ abstract class Bug266BA1 extends Bug266BA { trait Bug266BB extends Bug266BA { type t = Int; class P1 extends Bug266BB.this.P { - def f(x: Int) { Console.println(x + 1) } + def f(x: Int): Unit = { Console.println(x + 1) } } def mkP = new P1; val in = 3; @@ -367,7 +372,7 @@ object Bug266BTest { // main object Bug266Test { - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { Bug266ATest.test(args); Bug266BTest.test(args); } @@ -378,7 +383,7 @@ object Bug266Test { class Bug316MyIterator extends Iterator[Int] { def hasNext = false - def next = 42 + def next() = 42 } object Bug316Test { @@ -390,33 +395,10 @@ object Bug316Test { // Bug 328 object Bug328Test { - def test0(f: Function1[Int,String]) {} + def test0(f: Function1[Int,String]): Unit = {} def test(args: Array[String]): Unit = test0(args); } -//############################################################################ -// Bug 396 - -trait Bug396A { - class I { - def run = Console.println("A"); - } -} -trait Bug396B extends Bug396A { - class I extends super.I { - override def run = { super.run; Console.println("B"); } - } -} -trait Bug396C extends Bug396A { - trait I extends super.I { - override def run = { super.run; Console.println("C"); } - } -} -object Bug396Test extends Bug396B with Bug396C { - class I2 extends super[Bug396B].I with super[Bug396C].I; - def test(args: Array[String]): Unit = (new I2).run -} - //############################################################################ // Bug 399 @@ -429,7 +411,7 @@ object Bug399Test { (new G).f } - def test(args: Array[String]) { + def test(args: Array[String]): Unit = { Console.println(f("a")); } } @@ -439,21 +421,21 @@ object Bug399Test { object Test { var errors: Int = 0 - def test(bug: Int, test: => Unit) { + def test(bug: Int, test: => Unit): Unit = { Console.println("<<< bug " + bug) try { test; } catch { case exception: Throwable => Console.print("Exception in thread \"" + Thread.currentThread + "\" " + exception); - Console.println; + Console.println(); errors += 1 } Console.println(">>> bug " + bug) - Console.println + Console.println() } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { test( 98, Bug98Test.test(args)); test(120, Bug120Test.test(args)); @@ -476,12 +458,11 @@ object Test { test(266, Bug266Test.test(args)); test(316, Bug316Test.test(args)); test(328, Bug328Test.test(args)); - test(396, Bug396Test.test(args)); test(399, Bug399Test.test(args)); if (errors > 0) { - Console.println; - Console.println(errors + " error" + (if (errors > 1) "s" else "")); + Console.println(); + Console.println(s"$errors error" + (if (errors > 1) "s" else "")); } } } diff --git a/test/files/run/byname-implicits-17.scala b/test/files/run/byname-implicits-17.scala new file mode 100644 index 000000000000..676f7fe82c8d --- /dev/null +++ b/test/files/run/byname-implicits-17.scala @@ -0,0 +1,76 @@ +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +object Generic { + type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 } +} + +object ListInstances { + type LRepr[T] = Either[::[T], Either[Nil.type, Unit]] + type CRepr[T] = (T, (List[T], Unit)) + type NRepr = Unit + + implicit def genList[T]: Generic.Aux[List[T], LRepr[T]] = new Generic[List[T]] { + type Repr = LRepr[T] + def to(t: List[T]): Repr = t match { + case hd :: tl => Left(::(hd, tl)) + case n@Nil => Right(Left(Nil)) + } + def from(r: Repr): List[T] = (r: @unchecked) match { + case Left(c) => c + case Right(Left(n)) => n + } + } + + implicit def genCons[T]: Generic.Aux[::[T], CRepr[T]] = new Generic[::[T]] { + type Repr = CRepr[T] + def to(t: ::[T]): Repr = (t.head, (t.tail, ())) + def from(r: Repr): ::[T] = ::(r._1, r._2._1) + } + + implicit def genNil: Generic.Aux[Nil.type, NRepr] = new Generic[Nil.type] { + type Repr = NRepr + def to(t: Nil.type): Repr = () + def from(r: Repr): Nil.type = Nil + } +} + +trait Show[T] { + def show(t: T): String +} + +object Show { + def apply[T](implicit st: => Show[T]): Show[T] = st + + implicit def showUnit: Show[Unit] = new Show[Unit] { + def show(u: Unit): String = "()" + } + + implicit def showInt: Show[Int] = new Show[Int] { + def show(i: Int): String = i.toString + } + + implicit def showPair[T, U](implicit st: Show[T], su: Show[U]): Show[(T, U)] = new Show[(T, U)] { + def show(t: (T, U)): String = s"(${st.show(t._1)}, ${su.show(t._2)}" + } + + implicit def showEither[T, U](implicit st: Show[T], su: Show[U]): Show[Either[T, U]] = new Show[Either[T, U]] { + def show(t: Either[T, U]): String = t match { + case Left(t) => s"Left(${st.show(t)})" + case Right(u) => s"Right(${su.show(u)})" + } + } + + implicit def showGen[T, R](implicit gen: Generic.Aux[T, R], sr: => Show[R]): Show[T] = new Show[T] { + def show(t: T) = sr.show(gen.to(t)) + } +} + +object Test extends App { + import ListInstances._ + val sl = Show[List[Int]] + assert(sl.show(List(1, 2, 3)) == "Left((1, (Left((2, (Left((3, (Right(Left(())), ()), ()), ())") +} diff --git a/test/files/run/byname-implicits-28.scala b/test/files/run/byname-implicits-28.scala new file mode 100644 index 000000000000..722952380fc3 --- /dev/null +++ b/test/files/run/byname-implicits-28.scala @@ -0,0 +1,173 @@ +object Loop0 { + trait Link0 { + def next0: Link0 + } + object Link0 { + implicit def mkLink0(implicit l0: => Link0): Link0 = + new Link0 { lazy val next0 = l0 } + } + + def check: Boolean = { + val loop = implicitly[Link0] + loop eq loop.next0 + } +} + +object Loop1 { + trait Link0 { + def next0: Link0 + def next1: Link1 + } + object Link0 { + implicit def mkLink0(implicit l0: => Link0, l1: => Link1): Link0 = + new Link0 { lazy val next0 = l0 ; lazy val next1 = l1 } + } + + trait Link1 { + def next0: Link0 + def next1: Link1 + } + object Link1 { + implicit def mkLink1(implicit l0: => Link0, l1: => Link1): Link1 = + new Link1 { lazy val next0 = l0 ; lazy val next1 = l1 } + } + + def check: Boolean = { + val loop = implicitly[Link0] + loop eq loop.next0 + loop.next1 eq loop.next1.next1 + } +} + +object Loop2 { + trait Link0 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + } + object Link0 { + implicit def mkLink0(implicit l0: => Link0, l1: => Link1, l2: => Link2): Link0 = + new Link0 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 } + } + + trait Link1 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + } + object Link1 { + implicit def mkLink1(implicit l0: => Link0, l1: => Link1, l2: => Link2): Link1 = + new Link1 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 } + } + + trait Link2 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + } + object Link2 { + implicit def mkLink2(implicit l0: => Link0, l1: => Link1, l2: => Link2): Link2 = + new Link2 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 } + } + + def check: Boolean = { + val loop = implicitly[Link0] + loop eq loop.next0 + loop.next1 eq loop.next1.next1 + loop.next2 eq loop.next2.next2 + } +} + +object Loop3 { + trait Link0 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + def next3: Link3 + } + object Link0 { + implicit def mkLink0(implicit l0: => Link0, l1: => Link1, l2: => Link2, l3: => Link3): Link0 = + new Link0 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 ; lazy val next3 = l3 } + } + + trait Link1 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + def next3: Link3 + } + object Link1 { + implicit def mkLink1(implicit l0: => Link0, l1: => Link1, l2: => Link2, l3: => Link3): Link1 = + new Link1 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 ; lazy val next3 = l3 } + } + + trait Link2 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + def next3: Link3 + } + object Link2 { + implicit def mkLink2(implicit l0: => Link0, l1: => Link1, l2: => Link2, l3: => Link3): Link2 = + new Link2 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 ; lazy val next3 = l3 } + } + + trait Link3 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + def next3: Link3 + } + object Link3 { + implicit def mkLink3(implicit l0: => Link0, l1: => Link1, l2: => Link2, l3: => Link3): Link3 = + new Link3 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 ; lazy val next3 = l3 } + } + + def check: Boolean = { + val loop = implicitly[Link0] + loop eq loop.next0 + loop.next1 eq loop.next1.next1 + loop.next2 eq loop.next2.next2 + loop.next3 eq loop.next3.next3 + } +} + +object Loop2b { + trait Link0 { + def next0: Link0 + def next1: Link1 + def next2: Link2 + } + object Link0 { + implicit def mkLink0(implicit l0: => Link0, l1: => Link1, l2: => Link2): Link0 = + new Link0 { lazy val next0 = l0 ; lazy val next1 = l1 ; lazy val next2 = l2 } + } + + trait Link1 { + def next1: Link1 + def next2: Link2 + } + object Link1 { + implicit def mkLink1(implicit l1: => Link1, l2: => Link2): Link1 = + new Link1 { lazy val next1 = l1 ; lazy val next2 = l2 } + } + + trait Link2 { + def next2: Link2 + } + object Link2 { + implicit def mkLink2(implicit l2: => Link2): Link2 = + new Link2 { lazy val next2 = l2 } + } + + def check: Boolean = { + val loop = implicitly[Link0] + loop eq loop.next0 + loop.next1 eq loop.next1.next1 + loop.next2 eq loop.next2.next2 + } +} + +object Test extends App { + assert(Loop0.check && Loop1.check && Loop2.check && Loop3.check && Loop2b.check) +} diff --git a/test/files/run/byname-implicits-29.scala b/test/files/run/byname-implicits-29.scala new file mode 100644 index 000000000000..244a5876fa75 --- /dev/null +++ b/test/files/run/byname-implicits-29.scala @@ -0,0 +1,16 @@ +object Test { + class Foo(val bar: Bar) + class Bar(baz0: => Baz) { + lazy val baz = baz0 + } + class Baz(val foo: Foo) + + implicit def foo(implicit bar: Bar): Foo = new Foo(bar) + implicit def bar(implicit baz: => Baz): Bar = new Bar(baz) + implicit def baz(implicit foo: Foo): Baz = new Baz(foo) + + def main(args: Array[String]): Unit = { + val foo = implicitly[Foo] + assert(foo.bar.baz.foo eq foo) + } +} diff --git a/test/files/run/byname-implicits-30.scala b/test/files/run/byname-implicits-30.scala new file mode 100644 index 000000000000..de105eef0391 --- /dev/null +++ b/test/files/run/byname-implicits-30.scala @@ -0,0 +1,17 @@ +object Test { + class Loop[T, U](self0: => Loop[T, U], swap0: => Loop[U, T]) { + lazy val self: Loop[T, U] = self0 + lazy val swap: Loop[U, T] = swap0 + } + + object Loop { + implicit def mkLoop[T, U](implicit tu: => Loop[T, U], ut: => Loop[U, T]): Loop[T, U] = new Loop(tu, ut) + } + + def main(args: Array[String]): Unit = { + val loop = implicitly[Loop[Int, String]] + assert(loop.self eq loop) + assert(loop.swap.self eq loop.swap) + assert(loop.swap.swap eq loop) + } +} diff --git a/test/files/run/byname-implicits-4.scala b/test/files/run/byname-implicits-4.scala new file mode 100644 index 000000000000..6e223adba49d --- /dev/null +++ b/test/files/run/byname-implicits-4.scala @@ -0,0 +1,10 @@ +trait Foo { def next: Foo } +object Foo { + implicit def foo(implicit rec: => Foo): Foo = new Foo { def next = rec } +} + +object Test extends App { + val foo = implicitly[Foo] + assert(foo eq foo.next) +} + diff --git a/test/files/run/byname-implicits-5.scala b/test/files/run/byname-implicits-5.scala new file mode 100644 index 000000000000..767d049a650e --- /dev/null +++ b/test/files/run/byname-implicits-5.scala @@ -0,0 +1,14 @@ + +object Test extends App { + var x = 0 + + def foo(implicit y0: => Int): Int = { + x = 0 + val y = y0 // side effect + y*x + } + + implicit lazy val bar: Int = { x = 1 ; 23 } + + assert(foo == 23) +} diff --git a/test/files/run/byname-implicits-6.scala b/test/files/run/byname-implicits-6.scala new file mode 100644 index 000000000000..9ee2594abcd8 --- /dev/null +++ b/test/files/run/byname-implicits-6.scala @@ -0,0 +1,131 @@ +/* + * Demo of using by name implicits to resolve (hidden) divergence issues when + * traversing recursive generic structures. + * + * See https://stackoverflow.com/questions/25923974 + */ +sealed trait HList +object HList { + implicit class Syntax[L <: HList](l: L) { + def ::[U](u: U): U :: L = new ::(u, l) + } +} + +sealed trait HNil extends HList +object HNil extends HNil +case class ::[+H, +T <: HList](head : H, tail : T) extends HList + +trait Generic[T] { + type Repr + def to(t: T): Repr + def from(r: Repr): T +} + +object Generic { + type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 } +} + +trait DeepHLister[R] { + type Out + def apply(r: R): Out +} + +object DeepHLister extends DeepHLister0 { + def apply[R](implicit dh: DeepHLister[R]): Aux[R, dh.Out] = dh + + implicit def consDeepHLister[H, OutH <: HList, T <: HList, OutT <: HList](implicit + dhh: DeepHLister.Aux[H, OutH], + dht: DeepHLister.Aux[T, OutT] + ): Aux[H :: T, OutH :: OutT] = new DeepHLister[H :: T] { + type Out = OutH :: OutT + def apply(r: H :: T) = dhh(r.head) :: dht(r.tail) + } + + implicit object hnilDeepHLister extends DeepHLister[HNil] { + type Out = HNil + def apply(r: HNil) = HNil + } +} + +trait DeepHLister0 extends DeepHLister1 { + implicit def genDeepHLister[T, R <: HList, OutR <: HList](implicit + gen: Generic.Aux[T, R], + dhr: => DeepHLister.Aux[R, OutR] + ): Aux[T, OutR] = new DeepHLister[T] { + type Out = OutR + def apply(r: T) = dhr(gen.to(r)) + } +} + +trait DeepHLister1 { + type Aux[R, Out0] = DeepHLister[R] { type Out = Out0 } + + implicit def default[T]: Aux[T, T] = new DeepHLister[T] { + type Out = T + def apply(r: T): T = r + } +} + +object Test extends App { +} + +object DeepHListerDemo extends App { + case class A(x: Int, y: String) + object A { + type ARepr = Int :: String :: HNil + implicit val aGen: Generic.Aux[A, ARepr] = new Generic[A] { + type Repr = ARepr + def to(t: A): Repr = t.x :: t.y :: HNil + def from(r: Repr): A = A(r.head, r.tail.head) + } + } + + case class B(x: A, y: A) + object B { + type BRepr = A :: A :: HNil + implicit val bGen: Generic.Aux[B, BRepr] = new Generic[B] { + type Repr = BRepr + def to(t: B): Repr = t.x :: t.y :: HNil + def from(r: Repr): B = B(r.head, r.tail.head) + } + } + + case class C(b: B, a: A) + object C { + type CRepr = B :: A :: HNil + implicit val cGen: Generic.Aux[C, CRepr] = new Generic[C] { + type Repr = CRepr + def to(t: C): Repr = t.b :: t.a :: HNil + def from(r: Repr): C = C(r.head, r.tail.head) + } + } + + case class D(a: A, b: B) + object D { + type DRepr = A :: B :: HNil + implicit val dGen: Generic.Aux[D, DRepr] = new Generic[D] { + type Repr = DRepr + def to(t: D): Repr = t.a :: t.b :: HNil + def from(r: Repr): D = D(r.head, r.tail.head) + } + } + + def typed[T](t : => T): Unit = {} + + type ARepr = Int :: String :: HNil + type BRepr = ARepr :: ARepr :: HNil + type CRepr = BRepr :: ARepr :: HNil + type DRepr = ARepr :: BRepr :: HNil + + val adhl = DeepHLister[A :: HNil] + typed[DeepHLister.Aux[A :: HNil, ARepr :: HNil]](adhl) + + val bdhl = DeepHLister[B :: HNil] + typed[DeepHLister.Aux[B :: HNil, BRepr :: HNil]](bdhl) + + val cdhl = DeepHLister[C :: HNil] + typed[DeepHLister.Aux[C :: HNil, CRepr :: HNil]](cdhl) + + val ddhl = DeepHLister[D :: HNil] + typed[DeepHLister.Aux[D :: HNil, DRepr :: HNil]](ddhl) +} diff --git a/test/files/run/byname.check b/test/files/run/byname.check index 7e49eedec111..d558f5ccb853 100644 --- a/test/files/run/byname.check +++ b/test/files/run/byname.check @@ -12,9 +12,6 @@ test varargs r completed properly test all completed properly test all r completed properly test all s completed properly -test c00 completed properly -test c00 r completed properly -test c00 rr completed properly test cbb completed properly test cbb r completed properly test cbb rr completed properly diff --git a/test/files/run/byname.scala b/test/files/run/byname.scala index 132555234892..ba0e1c0dab2b 100644 --- a/test/files/run/byname.scala +++ b/test/files/run/byname.scala @@ -1,6 +1,6 @@ object Test extends App { -def test[A](name: String, expect: A, actual: => A) { +def test[A](name: String, expect: A, actual: => A): Unit = { if (expect != actual) throw new AssertionError("test " + name + " failed") else println("test " + name + " completed properly") } @@ -8,7 +8,7 @@ def test[A](name: String, expect: A, actual: => A) { def testNoBraces = 1 test("no braces", 1, testNoBraces) -val testNoBracesR = testNoBraces _ +val testNoBracesR = () => testNoBraces test("no braces r", 1, testNoBracesR()) def testPlain(x: String, y: String): String = x + y @@ -29,7 +29,7 @@ test("old by name s", 3, testOldByNameS(2)) def testRegThenByName(x: Int, y: => Int): Int = x + y test("reg then by name", 7, testRegThenByName(3, 2 * 2)) -val testRegThenByNameS: (Int, =>Int) => Int = testRegThenByName _ +val testRegThenByNameS: (Int, => Int) => Int = testRegThenByName _ test("reg then by name s", 8, testRegThenByNameS(2, 12 / 2)) def testVarargs(x: Int*) = x.reduceLeft((x: Int, y: Int) => x + y) @@ -44,21 +44,11 @@ test("all", 5, testAll(1, 2, 22, 23)) val testAllR = testAll _ test("all r", 7, testAllR(2, 3, Seq(34, 35))) -val testAllS: (Int, =>Int, Int*) => Int = testAll _ -test("all s", 8, testAllS(1, 5, 78, 89)) +val testAllS: (Int, =>Int, Seq[Int]) => Int = testAll _ +test("all s", 8, testAllS(1, 5, Seq(78, 89))) // test currying -def testC00()(): Int = 1 -test("c00", 1, testC00()()) - -val testC00R = testC00 _ -test("c00 r", 1, testC00R()()) - -val testC00RR = testC00() _ -test("c00 rr", 1, testC00RR()) - - def testCBB(a: => Int)(b: => Int) = a + b test("cbb", 3, testCBB(1)(2)) @@ -75,10 +65,8 @@ test("cvv", 3, testCVV(1, 2)("", 8)) val testCVVR = testCVV _ test("cvv r", 3, testCVVR(Seq(1))("", Seq(8, 9))) -val testCVVRS: (String, Int*) => Int = testCVV(2, 3) -test("cvv rs", 4, testCVVRS("", 5, 6)) +val testCVVRS: (String, Seq[Int]) => Int = testCVV(2, 3) +test("cvv rs", 4, testCVVRS("", Seq(5, 6))) println("$") - -// vim: set ts=4 sw=4 et: } diff --git a/test/files/run/bytecodecs.scala b/test/files/run/bytecodecs.scala index 837be0edd974..b8caaa038528 100644 --- a/test/files/run/bytecodecs.scala +++ b/test/files/run/bytecodecs.scala @@ -1,29 +1,30 @@ +import scala.tools.partest.Util.ArrayDeep import scala.reflect.internal.pickling.ByteCodecs._ object Test { - def test8to7(xs: Array[Byte]) { + def test8to7(xs: Array[Byte]): Unit = { val ys = encode8to7(xs) decode7to8(ys, ys.length) assert(ys.take(xs.length).deep == xs.deep, "test8to7("+xs.deep+") failed, result = "+ys.take(xs.length).deep) } - def testAll(xs: Array[Byte]) { + def testAll(xs: Array[Byte]): Unit = { val ys = encode(xs) decode(ys) assert(ys.take(xs.length).deep == xs.deep, "testAll("+xs.deep+") failed, result = "+ys.take(xs.length).deep) } - def test(inputs: Array[Byte]*) { + def test(inputs: Array[Byte]*): Unit = { for (input <- inputs) { test8to7(input) testAll(input) } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { test( Array(1, 2, 3), Array(1, 2, 3, 4, 5, 6, 7), @@ -31,7 +32,7 @@ object Test { Array(1, 3, -1, -128, 0, 0, -128, 1, 2, 3)) val rand = new scala.util.Random() for (i <- 1 until 5000) { - var xs = new Array[Byte](i) + val xs = new Array[Byte](i) rand.nextBytes(xs) test(xs) } diff --git a/test/files/run/case-class-23-redefined-unapply.check b/test/files/run/case-class-23-redefined-unapply.check new file mode 100644 index 000000000000..99a45a1c9126 --- /dev/null +++ b/test/files/run/case-class-23-redefined-unapply.check @@ -0,0 +1 @@ +(1,3) diff --git a/test/files/run/case-class-23-redefined-unapply.scala b/test/files/run/case-class-23-redefined-unapply.scala new file mode 100644 index 000000000000..205c9abd3b93 --- /dev/null +++ b/test/files/run/case-class-23-redefined-unapply.scala @@ -0,0 +1,35 @@ +case class TwentyThree( + _1: Int, + _2: Int, + _3: Int, + _4: Int, + _5: Int, + _6: Int, + _7: Int, + _8: Int, + _9: Int, + _10: Int, + _11: Int, + _12: Int, + _13: Int, + _14: Int, + _15: Int, + _16: Int, + _17: Int, + _18: Int, + _19: Int, + _20: Int, + _21: Int, + _22: Int, + _23: Int +) + +object TwentyThree { + def unapply(x: TwentyThree): Some[(Int, Int, Int)] = Some(x._1, x._2, x._3) +} + +object Test extends App { + val x = new TwentyThree(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) + val TwentyThree(a, 2, b) = x + println((a, b)) +} diff --git a/test/files/run/caseClassEquality.scala b/test/files/run/caseClassEquality.scala index c11d7ad0d19c..448d50bbedb4 100644 --- a/test/files/run/caseClassEquality.scala +++ b/test/files/run/caseClassEquality.scala @@ -1,9 +1,7 @@ object Test { abstract class A1 case class C1(x: Int) extends A1 - class C2(x: Int) extends C1(x) { - override def productPrefix = "Shazbot!" - } + class C2(x: Int) extends C1(x) class C3(x: Int) extends C1(x) { override def canEqual(other: Any) = other.isInstanceOf[C3] override def equals(other: Any) = other match { diff --git a/test/files/run/caseClassHash.check b/test/files/run/caseClassHash.check index b5a6f08e9980..cf37fdb19f48 100644 --- a/test/files/run/caseClassHash.check +++ b/test/files/run/caseClassHash.check @@ -1,9 +1,9 @@ Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5.0) Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5) -1383698062 -1383698062 +930449446 +930449446 true -## method 1: 1383698062 -## method 2: 1383698062 - Murmur 1: 1383698062 - Murmur 2: 1383698062 +## method 1: 930449446 +## method 2: 930449446 + Murmur 1: 930449446 + Murmur 2: 930449446 diff --git a/test/files/run/caseClassHash.scala b/test/files/run/caseClassHash.scala index c5cb09c355d6..af7c7b999ac5 100644 --- a/test/files/run/caseClassHash.scala +++ b/test/files/run/caseClassHash.scala @@ -5,14 +5,14 @@ object Test { def main(args: Array[String]): Unit = { val foo1 = mkFoo[Double](5.0d) - val foo2 = mkFoo[Long](5l) + val foo2 = mkFoo[Long](5L) List(foo1, foo2, foo1.##, foo2.##, foo1 == foo2) foreach println println("## method 1: " + foo1.##) println("## method 2: " + foo2.##) - println(" Murmur 1: " + scala.util.hashing.MurmurHash3.productHash(foo1)) - println(" Murmur 2: " + scala.util.hashing.MurmurHash3.productHash(foo2)) + println(" Murmur 1: " + scala.util.hashing.MurmurHash3.caseClassHash(foo1)) + println(" Murmur 2: " + scala.util.hashing.MurmurHash3.caseClassHash(foo2)) } } diff --git a/test/files/run/case_class_equals_fields_sort.scala b/test/files/run/case_class_equals_fields_sort.scala new file mode 100644 index 000000000000..86f0a9a128ae --- /dev/null +++ b/test/files/run/case_class_equals_fields_sort.scala @@ -0,0 +1,9 @@ +object Test { + def main(args: Array[String]): Unit = { + class X { override def equals(x: Any) = throw new Exception("shouldn't be called") } + case class C(x: X, i: Int) + val x = new X + + C(x, 1) == C(x, 2) + } +} diff --git a/test/files/run/caseclasses.scala b/test/files/run/caseclasses.scala index 10c0916dc061..fd482ecef801 100644 --- a/test/files/run/caseclasses.scala +++ b/test/files/run/caseclasses.scala @@ -27,6 +27,7 @@ object Test extends App { (f(2): AnyRef) match { case Foo(1) => Console.println("OK") case Bar() => Console.println("NO") + case x => throw new MatchError(x) } try { Bar() productElement 3 diff --git a/test/files/run/checked.check b/test/files/run/checked.check index a71a51ee4a8a..338d222a00ca 100644 --- a/test/files/run/checked.check +++ b/test/files/run/checked.check @@ -1,7 +1,7 @@ sum = 12 -[OK] Caught UFE: Uninitialized field: checked.scala: 43 +[OK] Caught UFE: Uninitialized field: checked.scala: 44 2 -[OK] Caught UFE: Uninitialized field: checked.scala: 74 +[OK] Caught UFE: Uninitialized field: checked.scala: 75 x = 10 y = 11 lz1 = 1 diff --git a/test/files/run/checked.scala b/test/files/run/checked.scala index 62522da34690..a7352e859832 100644 --- a/test/files/run/checked.scala +++ b/test/files/run/checked.scala @@ -1,5 +1,6 @@ -// scalac: -Xcheckinit -nowarn -/* Test checked initializers. Needs to be run with -Xexperimental and -checkinit +//> using options -Xcheckinit -nowarn +// +/* Test checked initializers. Needs to be run with -checkinit */ // 0 inherited fields diff --git a/test/files/run/checkinit.check b/test/files/run/checkinit.check index 19941473ac0c..5c3adaeee5d3 100644 --- a/test/files/run/checkinit.check +++ b/test/files/run/checkinit.check @@ -1,2 +1,2 @@ -Uninitialized field: checkinit.scala: 27 -Uninitialized field: checkinit.scala: 31 +Uninitialized field: checkinit.scala: 28 +Uninitialized field: checkinit.scala: 32 diff --git a/test/files/run/checkinit.scala b/test/files/run/checkinit.scala index e1da600c20ba..7d13bdb05bda 100644 --- a/test/files/run/checkinit.scala +++ b/test/files/run/checkinit.scala @@ -1,4 +1,5 @@ -// scalac: -Xcheckinit +//> using options -Xcheckinit +// class C(val x: AnyRef, val y: AnyRef) class D(val x: AnyRef, val y: AnyRef) { val z: AnyRef = "" diff --git a/test/files/run/class-symbol-contravariant.check b/test/files/run/class-symbol-contravariant.check index 5166fce96a95..0d5cc6a142bc 100644 --- a/test/files/run/class-symbol-contravariant.check +++ b/test/files/run/class-symbol-contravariant.check @@ -5,28 +5,28 @@ import scala.tools.nsc._, intp.global._, definitions._ Try :help or completions for vals._ and power._ scala> val u = rootMirror.universe -u: $r.intp.global.type = +val u: $r.intp.global.type = scala> import u._, scala.reflect.internal.Flags import u._ import scala.reflect.internal.Flags scala> class C -defined class C +class C scala> val sym = u.typeOf[C].typeSymbol -sym: u.Symbol = class C +val sym: u.Symbol = class C scala> sym.isContravariant -res0: Boolean = false +val res0: Boolean = false scala> sym setFlag Flags.INCONSTRUCTOR -res1: sym.type = class C +val res1: sym.type = class C scala> sym.isClassLocalToConstructor -res2: Boolean = true +val res2: Boolean = true scala> sym.isContravariant // was true -res3: Boolean = false +val res3: Boolean = false scala> :quit diff --git a/test/files/run/class-symbol-contravariant.scala b/test/files/run/class-symbol-contravariant.scala index 6a84944e3bbc..57d963ce2a23 100644 --- a/test/files/run/class-symbol-contravariant.scala +++ b/test/files/run/class-symbol-contravariant.scala @@ -12,4 +12,4 @@ object Test extends ReplTest { |sym.isClassLocalToConstructor |sym.isContravariant // was true |""".stripMargin.trim -} \ No newline at end of file +} diff --git a/test/files/run/classOfObjectType.scala b/test/files/run/classOfObjectType.scala new file mode 100644 index 000000000000..2df7d614f709 --- /dev/null +++ b/test/files/run/classOfObjectType.scala @@ -0,0 +1,7 @@ + +object Test { + object Bar + def main(args: Array[String]): Unit = { + assert(Bar.getClass == classOf[Bar.type] ) + } +} \ No newline at end of file diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala index 9a47a067f9d5..167f4698e66b 100644 --- a/test/files/run/classfile-format-51.scala +++ b/test/files/run/classfile-format-51.scala @@ -1,8 +1,9 @@ import java.io.{File, FileOutputStream} -import scala.tools.partest._ +import scala.tools.partest.DirectTest +import scala.tools.partest.nest.StreamCapture import scala.tools.asm -import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes} +import asm.{ClassWriter, Handle, Opcodes} import Opcodes._ // This test ensures that we can read JDK 7 (classfile format 51) files, including those @@ -16,9 +17,9 @@ import Opcodes._ // verify. So the test includes a version check that short-circuits the whole test // on JDK 6 object Test extends DirectTest { - override def extraSettings: String = "-opt:l:inline -opt-inline-from:** -usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-opt:inline:** -usejavacp -cp ${testOutput.path}" - def generateClass() { + def generateClass(): Unit = { val invokerClassName = "DynamicInvoker" val bootstrapMethodName = "bootstrap" val bootStrapMethodType = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;" @@ -104,22 +105,8 @@ object Driver { println(invoker.test()) } """ - - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - try { - // this test is only valid under JDK 1.7+ - testUnderJavaAtLeast("1.7") { - generateClass() - compile() - () - } otherwise { - () - } - } - finally - System.setErr(prevErr) + override def show(): Unit = StreamCapture.redirErr { + generateClass() + compile() } } diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala index b64837a360ec..efd9ee88d767 100644 --- a/test/files/run/classfile-format-52.scala +++ b/test/files/run/classfile-format-52.scala @@ -1,8 +1,9 @@ import java.io.{File, FileOutputStream} -import scala.tools.partest._ +import scala.tools.partest.DirectTest +import scala.tools.partest.nest.StreamCapture import scala.tools.asm -import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes} +import asm.{ClassWriter, Opcodes} import Opcodes._ // This test ensures that we can read JDK 8 (classfile format 52) files, including those @@ -13,16 +14,16 @@ import Opcodes._ // By its nature the test can only work on JDK 8+ because under JDK 7- the // interface won't verify. object Test extends DirectTest { - override def extraSettings: String = "-opt:l:inline -opt-inline-from:** -usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-opt:inline:** -usejavacp -cp ${testOutput.path}" - def generateInterface() { + def generateInterface(): Unit = { val interfaceName = "HasDefaultMethod" val methodType = "()Ljava/lang/String;" val cw = new ClassWriter(0) cw.visit(52, ACC_PUBLIC+ACC_ABSTRACT+ACC_INTERFACE, interfaceName, null, "java/lang/Object", null) - def createMethod(flags:Int, name: String) { + def createMethod(flags:Int, name: String): Unit = { val method = cw.visitMethod(flags, name, methodType, null, null) method.visitCode() method.visitLdcInsn(s"hello from $name") @@ -54,23 +55,10 @@ class Driver extends HasDefaultMethod { } """ - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - try { - // this test is only valid under JDK 1.8+ - testUnderJavaAtLeast("1.8") { - generateInterface() - compile() - Class.forName("Driver").getDeclaredConstructor().newInstance() - () - } otherwise { - println("hello from publicMethod") - println("hello from staticMethod") - } - } - finally - System.setErr(prevErr) + override def show(): Unit = StreamCapture.redirErr { + generateInterface() + compile() + Class.forName("Driver").getDeclaredConstructor().newInstance() + () } } diff --git a/test/files/run/classmanifests_new_alias.scala b/test/files/run/classmanifests_new_alias.scala index 777bd5dd6d45..2a6ae7a4046f 100644 --- a/test/files/run/classmanifests_new_alias.scala +++ b/test/files/run/classmanifests_new_alias.scala @@ -1,6 +1,7 @@ @deprecated("Suppress warnings", since="2.11") object Test extends App { + import scala.reflect.ClassManifest type CM[T] = ClassManifest[T] println(implicitly[CM[Int]]) println(implicitly[CM[Int]] eq Manifest.Int) diff --git a/test/files/run/classmanifests_new_core.scala b/test/files/run/classmanifests_new_core.scala index 0a9c58e8e106..23a5964fdf40 100644 --- a/test/files/run/classmanifests_new_core.scala +++ b/test/files/run/classmanifests_new_core.scala @@ -1,5 +1,7 @@ + @deprecated("Suppress warnings", since="2.11") object Test extends App { - println(classManifest[Int]) - println(classManifest[Int] eq Manifest.Int) + import scala.reflect.ClassManifest + println(implicitly[ClassManifest[Int]]) + println(implicitly[ClassManifest[Int]] eq Manifest.Int) } diff --git a/test/files/run/classof.scala b/test/files/run/classof.scala index 257829e9768f..9c6b263463e3 100644 --- a/test/files/run/classof.scala +++ b/test/files/run/classof.scala @@ -2,7 +2,7 @@ class SomeClass object Test { def main(args: Array[String]): Unit = { - val cls: Predef.Class[SomeClass] = classOf[SomeClass] + @annotation.unused val cls: Predef.Class[SomeClass] = classOf[SomeClass] println("Value types:") println(classOf[Unit]) println(classOf[Boolean]) diff --git a/test/files/run/classtags_contextbound.scala b/test/files/run/classtags_contextbound.scala index 2f12792154c3..f4a706ed3afd 100644 --- a/test/files/run/classtags_contextbound.scala +++ b/test/files/run/classtags_contextbound.scala @@ -1,7 +1,7 @@ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.ClassTag object Test extends App { def mkArray[T: ClassTag] = Array[T]() def foo[T: ClassTag] = mkArray[T] println(foo[Int].getClass) -} \ No newline at end of file +} diff --git a/test/files/run/classtags_core.scala b/test/files/run/classtags_core.scala index 0e174d824396..fc93b020f0b8 100644 --- a/test/files/run/classtags_core.scala +++ b/test/files/run/classtags_core.scala @@ -1,4 +1,4 @@ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.ClassTag object Test extends App { println(implicitly[ClassTag[Byte]] eq ClassTag.Byte) @@ -31,4 +31,4 @@ object Test extends App { println(implicitly[ClassTag[Null]]) println(implicitly[ClassTag[Nothing]] eq ClassTag.Nothing) println(implicitly[ClassTag[Nothing]]) -} \ No newline at end of file +} diff --git a/test/files/run/classtags_multi.scala b/test/files/run/classtags_multi.scala index b4b47bcf0e87..0a054b16cdd1 100644 --- a/test/files/run/classtags_multi.scala +++ b/test/files/run/classtags_multi.scala @@ -1,4 +1,4 @@ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.classTag object Test extends App { println(classTag[Int]) @@ -6,4 +6,4 @@ object Test extends App { println(classTag[Array[Array[Int]]]) println(classTag[Array[Array[Array[Int]]]]) println(classTag[Array[Array[Array[Array[Int]]]]]) -} \ No newline at end of file +} diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check deleted file mode 100644 index 5e43d25f7e84..000000000000 --- a/test/files/run/collection-conversions.check +++ /dev/null @@ -1,126 +0,0 @@ --- Testing iterator --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing Vector --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing List --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing Buffer --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing ParVector --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing ParArray --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing Set --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing SetView --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK --- Testing BufferView --- - :[Direct] Vector : OK - :[Copy] Vector : OK - :[Direct] Buffer : OK - :[Copy] Buffer : OK - :[Direct] GenSeq : OK - :[Copy] GenSeq : OK - :[Copy] Seq : OK - :[Direct] Stream : OK - :[Copy] Stream : OK - :[Direct] Array : OK - :[Copy] Array : OK - :[Copy] ParVector: OK - :[Copy] ParArray : OK \ No newline at end of file diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala deleted file mode 100644 index cd05f68e2626..000000000000 --- a/test/files/run/collection-conversions.scala +++ /dev/null @@ -1,64 +0,0 @@ -import collection._ -import mutable.Buffer -import parallel.immutable.ParVector -import parallel.mutable.ParArray -import reflect.ClassTag - -object Test { - - def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = { - print(" :" + msg +": ") - val isArray = obj match { - case x: Array[Int] => true - case _ => false - } - val expectedEquals = - if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq - else obj == expected - val tagEquals = tag == tag2 - if(expectedEquals && tagEquals) print("OK") - else print("FAILED") - if(!expectedEquals) print(", " + obj + " != " + expected) - if(!tagEquals) print(", " + tag + " != " + tag2) - println("") - } - - val testVector = Vector(1,2,3) - val testBuffer = Buffer(1,2,3) - val testGenSeq = GenSeq(1,2,3) - val testSeq = Seq(1,2,3) - val testStream = Stream(1,2,3) - val testArray = Array(1,2,3) - val testParVector = ParVector(1,2,3) - val testParArray = ParArray(1,2,3) - - def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = { - val tmp = col - println("-- Testing " + name + " ---") - printResult("[Direct] Vector ", col.toVector, testVector) - printResult("[Copy] Vector ", col.to[Vector], testVector) - printResult("[Direct] Buffer ", col.toBuffer, testBuffer) - printResult("[Copy] Buffer ", col.to[Buffer], testBuffer) - printResult("[Direct] GenSeq ", col.toSeq, testGenSeq) - printResult("[Copy] GenSeq ", col.to[GenSeq], testGenSeq) - printResult("[Copy] Seq ", col.to[Seq], testSeq) - printResult("[Direct] Stream ", col.toStream, testStream) - printResult("[Copy] Stream ", col.to[Stream], testStream) - printResult("[Direct] Array ", col.toArray, testArray) - printResult("[Copy] Array ", col.to[Array], testArray) - printResult("[Copy] ParVector", col.to[ParVector], testParVector) - printResult("[Copy] ParArray ", col.to[ParArray], testParArray) - } - - def main(args: Array[String]): Unit = { - testConversion("iterator", (1 to 3).iterator) - testConversion("Vector", Vector(1,2,3)) - testConversion("List", List(1,2,3)) - testConversion("Buffer", Buffer(1,2,3)) - testConversion("ParVector", ParVector(1,2,3)) - testConversion("ParArray", ParArray(1,2,3)) - testConversion("Set", Set(1,2,3)) - testConversion("SetView", Set(1,2,3).view) - testConversion("BufferView", Buffer(1,2,3).view) - } -} diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check index 54442c33afb5..cd28d1a0f885 100644 --- a/test/files/run/collection-stacks.check +++ b/test/files/run/collection-stacks.check @@ -1,17 +1,9 @@ -warning: one deprecation (since 2.11.0) -warning: two deprecations (since 2.12.0) -warning: three deprecations in total; re-run with -deprecation for details -3-2-1: true 3-2-1: true apply 3: true -3: true -1: true 1: true top 3: true -3: true pop -2-1: true 3: true 2-1: true diff --git a/test/files/run/collection-stacks.scala b/test/files/run/collection-stacks.scala index be9fbbf1aed9..86058d982f5f 100644 --- a/test/files/run/collection-stacks.scala +++ b/test/files/run/collection-stacks.scala @@ -1,4 +1,4 @@ -import scala.collection.{ immutable, mutable } +import scala.collection.mutable object Test extends App { def mutableStack[T](xs: T*): mutable.Stack[T] = { @@ -7,32 +7,21 @@ object Test extends App { s } - def immutableStack[T](xs: T*): immutable.Stack[T] = { - immutable.Stack.empty[T] pushAll xs - } - - def check[T](expected: T, got: T) { - println(got + ": " + (expected == got)) + def check[T](expected: T, got: T): Unit = { + println(s"$got: ${expected == got}") } // check #957 - check("3-2-1", immutableStack(1, 2, 3).iterator.mkString("-")) check("3-2-1", mutableStack(1, 2, 3).iterator.mkString("-")) println("apply") - check(3, immutableStack(1, 2, 3).apply(0)) check(3, mutableStack(1, 2, 3).apply(0)) - check(1, immutableStack(1, 2, 3).apply(2)) check(1, mutableStack(1, 2, 3).apply(2)) println("top") - check(3, immutableStack(1, 2, 3).top) check(3, mutableStack(1, 2, 3).top) println("pop") - check("2-1", immutableStack(1, 2, 3).pop.mkString("-")) check(3, mutableStack(1, 2, 3).pop()) check("2-1", { val s = mutableStack(1, 2, 3); s.pop(); s.toList.mkString("-") }) } - -// vim: set ts=2 sw=2 et: diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala index 8cbce0b7efe1..b9b8255644d3 100644 --- a/test/files/run/collections.scala +++ b/test/files/run/collections.scala @@ -1,5 +1,4 @@ import scala.collection._ -import scala.compat.Platform.currentTime import scala.language.postfixOps object Test extends App { @@ -8,17 +7,17 @@ object Test extends App { def sum[A](xs: Iterable[Int]) = xs.foldLeft(0)((x, y) => x + y) - def time(op: => Unit) { - val start = currentTime + def time(op: => Unit): Unit = { + val start = System.currentTimeMillis() op - if (printTime) println(" time = "+(currentTime - start)+"ms") + if (printTime) println(" time = "+(System.currentTimeMillis() - start)+"ms") } def test(msg: String, s0: collection.immutable.Set[Int], iters: Int) = { println("***** "+msg+":") var s = s0 s = s + 2 - s = s + (3, 4000, 10000) + s = s + 3 + 4000 + 10000 println("test1: "+sum(s)) time { s = s ++ (List.range(0, iters) map (2*)) @@ -35,8 +34,8 @@ object Test extends App { def test(msg: String, s0: collection.mutable.Set[Int], iters: Int) = { println("***** "+msg+":") var s = s0 - s = s + 2 - s = s + (3, 4000, 10000) + s = s.clone() += 2 + s = s.clone.addAll(List(3, 4000, 10000)) println("test1: "+sum(s)) time { s = s ++ (List.range(0, iters) map (2*)) @@ -54,7 +53,7 @@ object Test extends App { println("***** "+msg+":") var s = s0 s = s + (2 -> 2) - s = s + (3 -> 3, 4000 -> 4000, 10000 -> 10000) + s = s + (3 -> 3) + (4000 -> 4000) + (10000 -> 10000) println("test1: "+sum(s map (_._2))) time { s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2)) @@ -88,8 +87,8 @@ object Test extends App { def test(msg: String, s0: collection.mutable.Map[Int, Int], iters: Int) = { println("***** "+msg+":") var s = s0 - s = s + (2 -> 2) - s = s + (3 -> 3, 4000 -> 4000, 10000 -> 10000) + s = s.clone() += (2 -> 2) + s = s.clone().addAll(List(3 -> 3, 4000 -> 4000, 10000 -> 10000)) println("test1: "+sum(s map (_._2))) time { s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2)) diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check index 8ba66c0bea74..256ed93a4ac8 100644 --- a/test/files/run/colltest.check +++ b/test/files/run/colltest.check @@ -1,9 +1,5 @@ -warning: two deprecations (since 2.11.0); re-run with -deprecation for details true false true false -true -false -succeeded for 10 iterations. succeeded for 10 iterations. diff --git a/test/files/run/colltest.scala b/test/files/run/colltest.scala index 703e94a3c7a7..d27e0989f031 100644 --- a/test/files/run/colltest.scala +++ b/test/files/run/colltest.scala @@ -24,7 +24,7 @@ class TestSet(s0: Set[Int], s1: Set[Int]) { case 6 => "add" case 7 => "size" } - def checkSubSet(pre: String, s0: Set[Int], s1: Set[Int]) { + def checkSubSet(pre: String, s0: Set[Int], s1: Set[Int]): Unit = { for (e <- s0.iterator) if (!(s1 contains e)) { assert(false, pre+" element: "+e+"\n S0 = "+s0+"\n S1 = "+s1) @@ -46,12 +46,8 @@ class TestSet(s0: Set[Int], s1: Set[Int]) { Console.println("succeeded for "+Iterations+" iterations.") } object Test extends App { - def t3954 { + def t3954(): Unit = { import scala.collection.mutable - import scala.collection.immutable - val result = new mutable.ImmutableSetAdaptor(immutable.ListSet.empty[Int]) - println(result.add(1)) - println(result.add(1)) val result2 = new mutable.HashSet[Int] println(result2.add(1)) println(result2.add(1)) @@ -59,8 +55,7 @@ object Test extends App { println(result3.add(1)) println(result3.add(1)) } - t3954 + t3954() new TestSet(HashSet.empty, new LinkedHashSet) - new TestSet(new ImmutableSetAdaptor(collection.immutable.Set.empty[Int]), new LinkedHashSet) } diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check index 5ec6286d9ef2..ecaff56a0ce5 100644 --- a/test/files/run/colltest1.check +++ b/test/files/run/colltest1.check @@ -12,12 +12,12 @@ new test starting with Stream() 10: Stream(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) 9: Stream(2, 3, 4, 5, 6, 7, 8, 9, 10) 1 -Stream(1, ?) -new test starting with WrappedArray() -10: ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) -9: ArrayBuffer(2, 3, 4, 5, 6, 7, 8, 9, 10) +Stream(1, ) +new test starting with ArraySeq() +10: ArraySeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) +9: ArraySeq(2, 3, 4, 5, 6, 7, 8, 9, 10) 1 -ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) +ArraySeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) new test starting with ArrayBuffer() 10: ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) 9: ArrayBuffer(2, 3, 4, 5, 6, 7, 8, 9, 10) diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala index db7f87a8a70e..ae5e6d8be18e 100644 --- a/test/files/run/colltest1.scala +++ b/test/files/run/colltest1.scala @@ -4,14 +4,15 @@ import scala.collection._ import scala.language.postfixOps +@deprecated("Tests deprecated API", since="2.13.0") object Test extends App { - def orderedTraversableTest(empty: Traversable[Int]) { + def orderedTraversableTest(empty: Traversable[Int]): Unit = { println("new test starting with "+empty) assert(empty.isEmpty) val ten = empty ++ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) - println(ten.size+": "+ten) - println(ten.tail.size+": "+ten.tail) + println(s"${ten.size}: $ten") + println(s"${ten.tail.size}: ${ten.tail}") assert(ten == empty ++ (1 to 10)) assert(ten.size == 10) assert(ten forall (_ <= 10)) @@ -33,7 +34,7 @@ object Test extends App { val (o, e) = ten.partition(_ % 2 == 0) assert(o.size == e.size) val gs = ten groupBy (x => x / 4) - val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList.sorted + val vs1 = (for (k <- gs.keysIterator; v <- gs(k).iterator) yield v).toList.sorted val vs2 = gs.values.toList.flatten.sorted // val vs2 = gs.values.toList flatMap (xs => xs) assert(ten.head == 1) @@ -43,23 +44,22 @@ object Test extends App { assert(ten.last == 10) assert(List(ten.head) ++ ten.tail == ten) assert(ten.init ++ List(ten.last) == ten, ten.init) - assert(vs1 == vs2, vs1+"!="+vs2) + assert(vs1 == vs2, s"$vs1!=$vs2") assert(vs1 == ten) assert((ten take 5) == firstFive) assert((ten drop 5) == secondFive) - assert(ten slice (3, 3) isEmpty) - assert((ten slice (3, 6)) == List(4, 5, 6), ten slice (3, 6)) + assert(ten.slice(3, 3).isEmpty) + assert((ten.slice(3, 6)) == List(4, 5, 6), ten.slice(3, 6)) assert((ten takeWhile (_ <= 5)) == firstFive) assert((ten dropWhile (_ <= 5)) == secondFive) - assert((ten span (_ <= 5)) == (firstFive, secondFive)) - assert((ten splitAt 5) == (firstFive, secondFive), ten splitAt 5) + assert(ten.span(_ <= 5) == ((firstFive, secondFive))) + assert(ten.splitAt(5) == ((firstFive, secondFive)), ten.splitAt(5)) val buf = new mutable.ArrayBuffer[Int] - firstFive copyToBuffer buf - secondFive copyToBuffer buf - assert(buf.result == ten, buf.result) + buf ++= firstFive + buf ++= secondFive + assert(buf == ten, buf) assert(ten.toArray.size == 10) assert(ten.toArray.toSeq == ten, ten.toArray.toSeq) - assert(ten.toIterable == ten) assert(ten.toList == ten) assert(ten.toSeq == ten) assert(ten.toStream == ten) @@ -67,16 +67,16 @@ object Test extends App { assert(ten.mkString("[", "; ", "]") endsWith "[1; 2; 3; 4; 5; 6; 7; 8; 9; 10]") } - def orderedIterableTest(empty: Iterable[Int]) { + def orderedIterableTest(empty: Iterable[Int]): Unit = { orderedTraversableTest(empty) val six = empty ++ List(1, 2, 3, 4, 5, 6) assert(six.iterator.toStream == six) assert(six.takeRight(4) == List(3, 4, 5, 6), six.takeRight(4)) assert(six.dropRight(3) == List(1, 2, 3)) - assert(six sameElements (1 to 6)) + assert(six.iterator.sameElements(1 to 6)) } - def sequenceTest(empty: Seq[Int]) { + def sequenceTest(empty: Seq[Int]): Unit = { orderedIterableTest(empty) val ten = empty ++ (1 to 10) println(ten) @@ -122,7 +122,7 @@ object Test extends App { assert(ten contains 1) assert(ten contains 10) assert(!(ten contains 0)) - assert((empty ++ (1 to 7) union empty ++ (3 to 10)) == List(1, 2, 3, 4, 5, 6, 7, 3, 4, 5, 6, 7, 8, 9, 10)) + assert((empty ++ (1 to 7) ++ empty ++ (3 to 10)) == List(1, 2, 3, 4, 5, 6, 7, 3, 4, 5, 6, 7, 8, 9, 10)) assert((ten diff ten).isEmpty) assert((ten diff List()) == ten) assert((ten diff (ten filter (_ % 2 == 0))) == (ten filterNot (_ % 2 == 0))) @@ -137,9 +137,9 @@ object Test extends App { assert(ten.sortWith(_ > _) == ten.reverse) } - def setTest(empty: => Set[String]) { + def setTest(empty: => Set[String]): Unit = { var s = empty + "A" + "B" + "C" - s += ("D", "E", "F") + s ++= List("D", "E", "F") s ++= List("G", "H", "I") s ++= ('J' to 'Z') map (_.toString) assert(s forall (s contains)) @@ -147,7 +147,7 @@ object Test extends App { assert(!(s contains "0")) s = s + "0" assert(s contains "0") - s = s - "X" + s = s.diff(Set("X")) assert(!(s contains "X")) assert(empty.isEmpty) assert(!s.isEmpty) @@ -156,12 +156,12 @@ object Test extends App { assert(!s.isEmpty) val s1 = s intersect empty assert(s1 == empty, s1) - def abc = empty + ("a", "b", "c") - def bc = empty + ("b", "c") + def abc = empty ++ Set("a", "b", "c") + def bc = empty ++ Set("b", "c") assert(bc subsetOf abc) } - def rangeTest(r: Range) { + def rangeTest(r: Range): Unit = { val ten = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) assert(r == ten) assert(r.toList == ten) @@ -173,7 +173,7 @@ object Test extends App { def mapTest(empty: => Map[String, String]) = { var m = empty + ("A" -> "A") + ("B" -> "B") + ("C" -> "C") - m += (("D" -> "D"), ("E" -> "E"), ("F" -> "F")) + m ++= List(("D" -> "D"), ("E" -> "E"), ("F" -> "F")) m ++= List(("G" -> "G"), ("H" -> "H"), ("I" -> "I")) m ++= ('J' to 'Z') map (x => (x.toString -> x.toString)) println(m.toList.sorted) @@ -184,10 +184,11 @@ object Test extends App { assert(m.getOrElse("7", "@") == "@") assert(m.keySet.size == 26) assert(m.size == 26) - assert(m.keySet == Set() ++ m.keysIterator) - assert(m.keySet == m.keysIterator.toList.toSet, m.keySet.toList+"!="+m.keysIterator.toList.toSet) + assert(m.keySet == Set() ++ m.keysIterator.to(LazyList)) + assert(m.keySet == m.keysIterator.toList.toSet, s"${m.keySet.toList}!=${m.keysIterator.toList.toSet}") val m1 = empty ++ m - val mm = m -- m.keySet.toList + val ks = m.keySet + val mm = m.view.filterKeys(k => !ks(k)) assert(mm.isEmpty, mm) def m3 = empty ++ m1 assert(m1 == m3) @@ -199,7 +200,7 @@ object Test extends App { def mutableMapTest(empty: => mutable.Map[String, String]) = { mapTest(empty) val m1 = empty ++ (('A' to 'Z') map (_.toString) map (x => (x, x))) - val m2 = m1 retain ((k, v) => k == "N") + val m2 = m1 filterInPlace ((k, v) => k == "N") assert(m2.size == 1, m2) } diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala index 940907665820..ac636fa23589 100644 --- a/test/files/run/compiler-asSeenFrom.scala +++ b/test/files/run/compiler-asSeenFrom.scala @@ -1,56 +1,4 @@ -/* - * filter: inliner warning; re-run with - */ -import scala.tools.nsc._ -import scala.tools.partest.DirectTest -import scala.collection.{ mutable, immutable, generic } -import scala.language.{postfixOps, implicitConversions} -import scala.reflect.runtime.{universe => ru} - -// necessary to avoid bincompat with scala-partest compiled against the old compiler -abstract class CompilerTest extends DirectTest { - def check(source: String, unit: global.CompilationUnit): Unit - - lazy val global: Global = newCompiler() - lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *) - import global._ - import definitions.{ compilerTypeFromTag } - - override def extraSettings = "-feature -usejavacp -d " + testOutput.path - - def show() = (sources, units).zipped foreach check - - // Override at least one of these... - def code = "" - def sources: List[String] = List(code) - - // Utility functions - class MkType(sym: Symbol) { - def apply[M](implicit t: ru.TypeTag[M]): Type = - if (sym eq NoSymbol) NoType - else appliedType(sym, compilerTypeFromTag(t)) - } - implicit def mkMkType(sym: Symbol) = new MkType(sym) - - def allMembers(root: Symbol): List[Symbol] = { - def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = { - val latest = roots flatMap (_.info.members) filterNot (seen contains _) - if (latest.isEmpty) seen.toList.sortWith(_ isLess _) - else loop(seen ++ latest, latest) - } - loop(Set(), List(root)) - } - - class SymsInPackage(pkgName: String) { - def pkg = rootMirror.getPackage(pkgName) - def classes = allMembers(pkg) filter (_.isClass) - def modules = allMembers(pkg) filter (_.isModule) - def symbols = classes ++ terms filterNot (_ eq NoSymbol) - def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor) - def tparams = classes flatMap (_.info.typeParams) - def tpes = symbols map (_.tpe) distinct - } -} +import scala.tools.partest.CompilerTest /** It's too messy but it's better than not having it. */ @@ -86,24 +34,22 @@ package ll { """ object syms extends SymsInPackage("ll") { - def isPossibleEnclosure(encl: Symbol, sym: Symbol) = sym.enclClassChain drop 1 exists (_ isSubClass encl) + def isPossibleEnclosure(encl: Symbol, sym: Symbol) = sym.enclClassChain.drop(1).exists(_ isSubClass encl) def isInterestingPrefix(pre: Type) = pre.typeConstructor.typeParams.nonEmpty && pre.members.exists(_.isType) - def asSeenPrefixes = tpes map (_.finalResultType) distinct - def typeRefPrefixes = asSeenPrefixes filter isInterestingPrefix + def asSeenPrefixes = tpes.map(_.finalResultType).distinct + def typeRefPrefixes = asSeenPrefixes.filter(isInterestingPrefix) - def nestsIn(outer: Symbol) = classes filter (c => c.enclClassChain drop 1 exists(_ isSubClass outer)) + def nestsIn(outer: Symbol) = classes.filter(_.enclClassChain.drop(1).exists(_ isSubClass outer)) def typeRefs(targs: List[Type]) = ( - for (p <- typeRefPrefixes ; c <- classes filter (isPossibleEnclosure(p.typeSymbol, _)) ; a <- targs) yield + for (p <- typeRefPrefixes ; c <- classes.filter(isPossibleEnclosure(p.typeSymbol, _)) ; a <- targs) yield typeRef(p, c, List(a)) ) val wfmt = "%-" + 25 + "s" def to_s(x: Any): String = wfmt.format(x.toString.replaceAll("""\bll\.""", "")) - def fmt(args: Any*): String = { - (args map to_s mkString " ").replaceAll("""\s+$""", "") - } + def fmt(args: Any*): String = args.map(to_s).mkString(" ").replaceAll("""\s+$""", "") def fname(sym: Symbol) = { val p = "" + sym.owner.name val x = if (sym.owner.isPackageClass || sym.owner.isModuleClass || sym.owner.isTerm) "." else "#" @@ -112,7 +58,7 @@ package ll { def permuteAsSeenFrom(targs: List[Type]) = ( for { - tp <- typeRefs(targs filterNot (_ eq NoType)) + tp <- typeRefs(targs.filterNot(_ eq NoType)) prefix <- asSeenPrefixes if tp.prefix != prefix site <- classes @@ -120,7 +66,7 @@ package ll { if tp != seen if !seen.isInstanceOf[ExistentialType] } - yield ((site, tp, prefix, seen)) + yield (site, tp, prefix, seen) ) def block(label: Any)(lines: List[String]): List[String] = { @@ -148,10 +94,13 @@ package ll { def pretty(xs: List[_]) = if (xs.isEmpty) "" else xs.mkString("\n ", "\n ", "\n") - def signaturesIn(info: Type): List[String] = ( - info.members.toList - filterNot (s => s.isType || s.owner == ObjectClass || s.owner == AnyClass || s.isConstructor) - map (_.defString) + def signaturesIn(sym: Symbol): List[String] = ( + if (sym.owner == ObjectClass || sym.owner == AnyClass) Nil + else { + sym.tpe.members.toList + .filterNot(s => s.isType || s.owner == ObjectClass || s.owner == AnyClass || s.isConstructor) + .map(_.defString) + } ) def check(source: String, unit: global.CompilationUnit) = { @@ -161,10 +110,10 @@ package ll { val typeArgs = List[Type](IntClass.tpe, ListClass[Int]) ++ tparams.map(_.tpe) permute(typeArgs) foreach println } - for (x <- classes ++ terms) { - afterEachPhase(signaturesIn(x.tpe)) collect { + for (sym <- classes ++ terms) { + afterEachPhase(signaturesIn(sym)) collect { case (ph, sigs) if sigs.nonEmpty => - println(sigs.mkString(x + " { // after " + ph + "\n ", "\n ", "\n}\n")) + println(sigs.mkString(s"$sym { // after $ph\n ", "\n ", "\n}\n")) } } } diff --git a/test/files/run/concurrent-map-conversions.scala b/test/files/run/concurrent-map-conversions.scala deleted file mode 100644 index 1179764e37eb..000000000000 --- a/test/files/run/concurrent-map-conversions.scala +++ /dev/null @@ -1,29 +0,0 @@ - -object Test extends App { - - def needPackageConcurrentMap(map: collection.concurrent.Map[Int, Int]) { - } - def needJavaConcurrent(map: java.util.concurrent.ConcurrentMap[Int, Int]) { - } - - def testConversions() { - import collection.convert.ImplicitConversions._ - val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int] - val ctrie = new collection.concurrent.TrieMap[Int, Int] - - needPackageConcurrentMap(skiplist) - needJavaConcurrent(ctrie) - } - - def testConverters() { - import collection.JavaConverters._ - val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int] - val ctrie = new collection.concurrent.TrieMap[Int, Int] - - needPackageConcurrentMap(skiplist.asScala) - needJavaConcurrent(ctrie.asJava) - } - - testConversions() - testConverters() -} diff --git a/test/files/run/constAnnArgs.check b/test/files/run/constAnnArgs.check new file mode 100644 index 000000000000..1d77c80782ab --- /dev/null +++ b/test/files/run/constAnnArgs.check @@ -0,0 +1,21 @@ + +scala> @deprecated(message = "x", since = "y") def f = 1; f + ^ + warning: method f is deprecated (since y): x +def f: Int +val res0: Int = 1 + +scala> :pa -raw << JUMP! +// Entering paste mode (JUMP! to finish) + +package scala { class deprecated(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation } +JUMP! +// Exiting paste mode... now compiling with scalac. + +scala> @deprecated(message = "x", since = "y") def g = 1; g + ^ + warning: method g is deprecated (since y): x +def g: Int +val res1: Int = 1 + +scala> :quit diff --git a/test/files/run/constAnnArgs.scala b/test/files/run/constAnnArgs.scala new file mode 100644 index 000000000000..42df32c6934c --- /dev/null +++ b/test/files/run/constAnnArgs.scala @@ -0,0 +1,12 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-deprecation" + def code = + """@deprecated(message = "x", since = "y") def f = 1; f + |:pa -raw << JUMP! + |package scala { class deprecated(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation } + |JUMP! + |@deprecated(message = "x", since = "y") def g = 1; g + |""".stripMargin +} diff --git a/test/files/run/constant-optimization.scala b/test/files/run/constant-optimization.scala index 5d13272f3b1b..56855fbc1c1c 100644 --- a/test/files/run/constant-optimization.scala +++ b/test/files/run/constant-optimization.scala @@ -1,20 +1,20 @@ object Test extends App { - def testBothReachable() { - val i = util.Random.nextInt + def testBothReachable(): Unit = { + val i = util.Random.nextInt() val x = if (i % 2 == 0) null else "good" val y = if (x == null) "good" else x + "" println(s"testBothReachable: $y") } - def testOneReachable() { + def testOneReachable(): Unit = { val i = 1 val x = if (i != 1) null else "good" val y = if (x == null) "good" else x + "" println(s"testOneReachable: $y") } - def testAllReachable() { - val i = util.Random.nextInt + def testAllReachable(): Unit = { + val i = util.Random.nextInt() val y = (i % 2) match { case 0 => "good" case 1 => "good" @@ -23,8 +23,8 @@ object Test extends App { println(s"testAllReachable: $y") } - def testOneUnreachable() { - val i = util.Random.nextInt + def testOneUnreachable(): Unit = { + val i = util.Random.nextInt() val x = if (i % 2 == 0) { 1 } else { @@ -38,8 +38,8 @@ object Test extends App { println(s"testOneUnreachable: $y") } - def testDefaultUnreachable() { - val i = util.Random.nextInt + def testDefaultUnreachable(): Unit = { + val i = util.Random.nextInt() val x = if (i % 2 == 0) { 1 } else { diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check index 9df13533548b..0009305537c5 100644 --- a/test/files/run/constant-type.check +++ b/test/files/run/constant-type.check @@ -5,7 +5,7 @@ import scala.tools.nsc._, intp.global._, definitions._ Try :help or completions for vals._ and power._ scala> val s = transformedType(StringClass.toType).asInstanceOf[Type] -s: $r.intp.global.Type = String +val s: $r.intp.global.Type = String scala> { println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) } Class[String](classOf[java.lang.String]) diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check index 58cdeb7df0a0..7827cee72491 100644 --- a/test/files/run/constrained-types.check +++ b/test/files/run/constrained-types.check @@ -1,6 +1,6 @@ scala> class Annot(obj: Any) extends annotation.Annotation with annotation.TypeConstraint -defined class Annot +class Annot scala> @@ -9,21 +9,21 @@ scala> class A { val y: Int @Annot(x) = 10 override def toString = "an A" } -defined class A +class A scala> scala> val a = new A -a: A = an A +val a: A = an A scala> val y = a.y // should rewrite "this.x" to "a.x" -y: Int @Annot(a.x) = 10 +val y: Int @Annot(a.x) = 10 scala> var a2 = new A -a2: A = an A +var a2: A = an A scala> val y2 = a2.y // should drop the annotation -y2: Int = 10 +val y2: Int = 10 scala> @@ -31,12 +31,12 @@ scala> object Stuff { val x = "hello" val y : Int @Annot(x) = 10 } -defined object Stuff +object Stuff scala> scala> val y = Stuff.y // should rewrite the annotation -y: Int @Annot(Stuff.x) = 10 +val y: Int @Annot(Stuff.x) = 10 scala> @@ -44,50 +44,50 @@ scala> class B { val y: Int @Annot(Stuff.x) = 10 override def toString = "a B" } -defined class B +class B scala> scala> val b = new B -b: B = a B +val b: B = a B scala> val y = b.y // should keep the annotation -y: Int @Annot(Stuff.x) = 10 +val y: Int @Annot(Stuff.x) = 10 scala> def m(x: String): String @Annot(x) = x -m: (x: String)String @Annot(x) +def m(x: String): String @Annot(x) scala> scala> val three = "three" -three: String = three +val three: String = three scala> val three2 = m(three:three.type) // should change x to three -three2: String @Annot(three) = three +val three2: String @Annot(three) = three scala> var four = "four" -four: String = four +var four: String = four scala> val four2 = m(four) // should have an existential bound -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -four2: String @Annot(x) forSome { val x: String } = four +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +val four2: String @Annot(x) forSome { val x: String } = four scala> val four3 = four2 // should have the same type as four2 -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -four3: String @Annot(x) forSome { val x: String } = four +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +val four3: String @Annot(x) forSome { val x: String } = four scala> val stuff = m("stuff") // should not crash -stuff: String @Annot("stuff") = stuff +val stuff: String @Annot("stuff") = stuff scala> scala> class peer extends annotation.Annotation // should not crash -defined class peer +class peer scala> scala> class NPE[T <: NPE[T] @peer] // should not crash -defined class NPE +class NPE scala> @@ -96,8 +96,8 @@ scala> def m = { val y : String @Annot(x) = x y } // x should not escape the local scope with a narrow type -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -m: String @Annot(x) forSome { val x: String } +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +def m: String @Annot(x) forSome { val x: String } scala> @@ -110,40 +110,36 @@ scala> def n(y: String) = { } m("stuff".stripMargin) } // x should be existentially bound -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -n: (y: String)String @Annot(x) forSome { val x: String } +warning: 1 feature warning; for details, enable `:setting -feature` or `:replay -feature` +def n(y: String): String @Annot(x) forSome { val x: String } scala> scala> class rep extends annotation.Annotation { } -defined class rep +class rep scala> scala> object A { val x = "hello" : String @ rep } -defined object A +object A warning: previously defined class A is not a companion to object A. Companions must be defined together; you may wish to use :paste mode for this. scala> scala> val y = a.x // should drop the annotation -y: String = hello +val y: String = hello scala> scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message -:12: error: not found: value e - val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:12: error: not found: value f - val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + error: not found: value e ^ -:12: error: not found: value g - val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + error: not found: value f ^ -:12: error: not found: value h - val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + error: not found: value g ^ + error: not found: value h scala> :quit diff --git a/test/files/run/constrained-types.scala b/test/files/run/constrained-types.scala index 7ec8f93d381b..f27445a7bfd4 100644 --- a/test/files/run/constrained-types.scala +++ b/test/files/run/constrained-types.scala @@ -75,7 +75,6 @@ val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message """ override def transformSettings(s: Settings): Settings = { - s.Xexperimental.value = true s.deprecation.value = true // when running that compiler, give it a scala-library to the classpath s.classpath.value = sys.props("java.class.path") diff --git a/test/files/run/contrib674.check b/test/files/run/contrib674.check index 7fcac8e06d44..491bb717fbb0 100644 --- a/test/files/run/contrib674.check +++ b/test/files/run/contrib674.check @@ -1,6 +1,3 @@ -contrib674.scala:15: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected - 1 - ^ -contrib674.scala:15: warning: a pure expression does nothing in statement position +contrib674.scala:15: warning: discarded pure expression does nothing 1 ^ diff --git a/test/files/run/contrib674.scala b/test/files/run/contrib674.scala index bb9dad3686a9..e19932eabb66 100644 --- a/test/files/run/contrib674.scala +++ b/test/files/run/contrib674.scala @@ -15,5 +15,5 @@ object Test extends App { 1 } - bad + bad() } diff --git a/test/files/run/ctor-order.scala b/test/files/run/ctor-order.scala index 5f5871691af0..158eb320238f 100644 --- a/test/files/run/ctor-order.scala +++ b/test/files/run/ctor-order.scala @@ -1,3 +1,4 @@ +//> using options -Xmaxwarns 0 /** Test that constructor operations are reordered correctly. */ class Outer { diff --git a/test/files/run/ctries-new/DumbHash.scala b/test/files/run/ctries-new/DumbHash.scala deleted file mode 100644 index 8ef325b67c86..000000000000 --- a/test/files/run/ctries-new/DumbHash.scala +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - -class DumbHash(val i: Int) { - override def equals(other: Any) = other match { - case that: DumbHash => that.i == this.i - case _ => false - } - override def hashCode = i % 5 - override def toString = "DH(%s)".format(i) -} diff --git a/test/files/run/ctries-new/Wrap.scala b/test/files/run/ctries-new/Wrap.scala deleted file mode 100644 index 7b645c1612a7..000000000000 --- a/test/files/run/ctries-new/Wrap.scala +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - -case class Wrap(i: Int) { - override def hashCode = i * 0x9e3775cd -} diff --git a/test/files/run/ctries-new/concmap.scala b/test/files/run/ctries-new/concmap.scala deleted file mode 100644 index 76916564a72f..000000000000 --- a/test/files/run/ctries-new/concmap.scala +++ /dev/null @@ -1,188 +0,0 @@ - - - -import collection.concurrent.TrieMap - - -object ConcurrentMapSpec extends Spec { - - val initsz = 500 - val secondsz = 750 - - def test() { - "support put" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None) - for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i)) - } - - "support put if absent" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i)) - } - - "support remove if mapped to a specific value" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false) - } - - "support replace if mapped to a specific value" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false) - for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false) - } - - "support replace if present" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i)) - for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None) - } - - def assertEqual(a: Any, b: Any) = { - if (a != b) println(a, b) - assert(a == b) - } - - "support replace if mapped to a specific value, using several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 55000 - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - class Updater(index: Int, offs: Int) extends Thread { - override def run() { - var repeats = 0 - for (i <- 0 until sz) { - val j = (offs + i) % sz - var k = Int.MaxValue - do { - if (k != Int.MaxValue) repeats += 1 - k = ct.lookup(new Wrap(j)) - } while (!ct.replace(new Wrap(j), k, -k)) - } - //println("Thread %d repeats: %d".format(index, repeats)) - } - } - - val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i) - - val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i) - threads2.foreach(_.start()) - threads2.foreach(_.join()) - - for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i) - } - - "support put if absent, several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 110000 - - class Updater(offs: Int) extends Thread { - override def run() { - for (i <- 0 until sz) { - val j = (offs + i) % sz - ct.putIfAbsent(new Wrap(j), j) - assert(ct.lookup(new Wrap(j)) == j) - } - } - } - - val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assert(ct(new Wrap(i)) == i) - } - - "support remove if mapped to a specific value, several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 55000 - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - class Remover(offs: Int) extends Thread { - override def run() { - for (i <- 0 until sz) { - val j = (offs + i) % sz - ct.remove(new Wrap(j), j) - assert(ct.get(new Wrap(j)) == None) - } - } - } - - val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None) - } - - "have all or none of the elements depending on the oddity" in { - val ct = new TrieMap[Wrap, Int] - val sz = 65000 - for (i <- 0 until sz) ct(new Wrap(i)) = i - - class Modifier(index: Int, offs: Int) extends Thread { - override def run() { - for (j <- 0 until sz) { - val i = (offs + j) % sz - var success = false - do { - if (ct.contains(new Wrap(i))) { - success = ct.remove(new Wrap(i)) != None - } else { - success = ct.putIfAbsent(new Wrap(i), i) == None - } - } while (!success) - } - } - } - - def modify(n: Int) = { - val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - } - - modify(16) - for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i)) - modify(15) - for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None) - } - - "compute size correctly" in { - val ct = new TrieMap[Wrap, Int] - val sz = 36450 - for (i <- 0 until sz) ct(new Wrap(i)) = i - - assertEqual(ct.size, sz) - assertEqual(ct.size, sz) - } - - "compute size correctly in parallel" in { - val ct = new TrieMap[Wrap, Int] - val sz = 36450 - for (i <- 0 until sz) ct(new Wrap(i)) = i - val pct = ct.par - - assertEqual(pct.size, sz) - assertEqual(pct.size, sz) - } - - } - -} diff --git a/test/files/run/ctries-new/iterator.scala b/test/files/run/ctries-new/iterator.scala deleted file mode 100644 index bb1175e61bf7..000000000000 --- a/test/files/run/ctries-new/iterator.scala +++ /dev/null @@ -1,275 +0,0 @@ -import collection._ -import collection.concurrent.TrieMap - -object IteratorSpec extends Spec { - - def test() { - "work for an empty trie" in { - val ct = new TrieMap - val it = ct.iterator - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - } - - def nonEmptyIteratorCheck(sz: Int) { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - val it = ct.iterator - val tracker = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) { - assert(it.hasNext == true) - tracker += it.next - } - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - tracker.size shouldEqual (sz) - tracker shouldEqual (ct) - } - - "work for a 1 element trie" in { - nonEmptyIteratorCheck(1) - } - - "work for a 2 element trie" in { - nonEmptyIteratorCheck(2) - } - - "work for a 3 element trie" in { - nonEmptyIteratorCheck(3) - } - - "work for a 5 element trie" in { - nonEmptyIteratorCheck(5) - } - - "work for a 10 element trie" in { - nonEmptyIteratorCheck(10) - } - - "work for a 20 element trie" in { - nonEmptyIteratorCheck(20) - } - - "work for a 50 element trie" in { - nonEmptyIteratorCheck(50) - } - - "work for a 100 element trie" in { - nonEmptyIteratorCheck(100) - } - - "work for a 1k element trie" in { - nonEmptyIteratorCheck(1000) - } - - "work for a 5k element trie" in { - nonEmptyIteratorCheck(5000) - } - - "work for a 75k element trie" in { - nonEmptyIteratorCheck(75000) - } - - "work for a 250k element trie" in { - nonEmptyIteratorCheck(500000) - } - - def nonEmptyCollideCheck(sz: Int) { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until sz) ct.put(new DumbHash(i), i) - - val it = ct.iterator - val tracker = mutable.Map[DumbHash, Int]() - for (i <- 0 until sz) { - assert(it.hasNext == true) - tracker += it.next - } - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - tracker.size shouldEqual (sz) - tracker shouldEqual (ct) - } - - "work for colliding hashcodes, 2 element trie" in { - nonEmptyCollideCheck(2) - } - - "work for colliding hashcodes, 3 element trie" in { - nonEmptyCollideCheck(3) - } - - "work for colliding hashcodes, 5 element trie" in { - nonEmptyCollideCheck(5) - } - - "work for colliding hashcodes, 10 element trie" in { - nonEmptyCollideCheck(10) - } - - "work for colliding hashcodes, 100 element trie" in { - nonEmptyCollideCheck(100) - } - - "work for colliding hashcodes, 500 element trie" in { - nonEmptyCollideCheck(500) - } - - "work for colliding hashcodes, 5k element trie" in { - nonEmptyCollideCheck(5000) - } - - def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) { - if (a != b) { - println(a.size + " vs " + b.size) - } - assert(a == b) - } - - "be consistent when taken with concurrent modifications" in { - val sz = 25000 - val W = 15 - val S = 5 - val checks = 5 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - class Modifier extends Thread { - override def run() { - for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match { - case Some(_) => ct.remove(new Wrap(i)) - case None => - } - } - } - - def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) { - class Iter extends Thread { - override def run() { - val snap = ct.readOnlySnapshot() - val initial = mutable.Map[Wrap, Int]() - for (kv <- snap) initial += kv - - for (i <- 0 until checks) { - assertEqual(snap.iterator.toMap, initial) - } - } - } - - val iter = new Iter - iter.start() - iter.join() - } - - val threads = for (_ <- 0 until W) yield new Modifier - threads.foreach(_.start()) - for (_ <- 0 until S) consistentIteration(ct, checks) - threads.foreach(_.join()) - } - - "be consistent with a concurrent removal with a well defined order" in { - val sz = 150000 - val sgroupsize = 10 - val sgroupnum = 5 - val removerslowdown = 50 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - class Remover extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(ct.remove(new Wrap(i)) == Some(i)) - for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate - } - } - } - - def consistentIteration(it: Iterator[(Wrap, Int)]) = { - class Iter extends Thread { - override def run() { - val elems = it.toBuffer - if (elems.nonEmpty) { - val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i - assert(elems.forall(_._1.i >= minelem)) - } - } - } - new Iter - } - - val remover = new Remover - remover.start() - for (_ <- 0 until sgroupnum) { - val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) - iters.foreach(_.start()) - iters.foreach(_.join()) - } - remover.join() - } - - "be consistent with a concurrent insertion with a well defined order" in { - val sz = 150000 - val sgroupsize = 10 - val sgroupnum = 10 - val inserterslowdown = 50 - val ct = new TrieMap[Wrap, Int] - - class Inserter extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(ct.put(new Wrap(i), i) == None) - for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate - } - } - } - - def consistentIteration(it: Iterator[(Wrap, Int)]) = { - class Iter extends Thread { - override def run() { - val elems = it.toSeq - if (elems.nonEmpty) { - val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i - assert(elems.forall(_._1.i <= maxelem)) - } - } - } - new Iter - } - - val inserter = new Inserter - inserter.start() - for (_ <- 0 until sgroupnum) { - val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) - iters.foreach(_.start()) - iters.foreach(_.join()) - } - inserter.join() - } - - "work on a yet unevaluated snapshot" in { - val sz = 50000 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - val snap = ct.snapshot() - val it = snap.iterator - - while (it.hasNext) it.next() - } - - "be duplicated" in { - val sz = 50 - val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*) - val it = ct.splitter - for (_ <- 0 until (sz / 2)) it.next() - val dupit = it.dup - - it.toList shouldEqual dupit.toList - } - - } - -} diff --git a/test/files/run/ctries-new/lnode.scala b/test/files/run/ctries-new/lnode.scala deleted file mode 100644 index 4cc97050e54b..000000000000 --- a/test/files/run/ctries-new/lnode.scala +++ /dev/null @@ -1,61 +0,0 @@ - - - -import collection.concurrent.TrieMap - - -object LNodeSpec extends Spec { - - val initsz = 1500 - val secondsz = 1750 - - def test() { - "accept elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - } - - "lookup elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i)) - for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None) - } - - "remove elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - for (i <- 0 until initsz) { - val remelem = ct.remove(new DumbHash(i)) - assert(remelem == Some(i), "removing " + i + " yields " + remelem) - } - for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) - } - - "put elements with the same hash codes if absent" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.put(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) - for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i) - } - - "replace elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) - for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i) - for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) - } - - "remove elements with the same hash codes if mapped to a specific value" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true) - } - - } - -} diff --git a/test/files/run/ctries-new/main.scala b/test/files/run/ctries-new/main.scala deleted file mode 100644 index 34f3ec2ccf9a..000000000000 --- a/test/files/run/ctries-new/main.scala +++ /dev/null @@ -1,50 +0,0 @@ -import scala.reflect.{ClassTag, classTag} - - - - - - - - -object Test { - - def main(args: Array[String]) { - ConcurrentMapSpec.test() - IteratorSpec.test() - LNodeSpec.test() - SnapshotSpec.test() - } - -} - - -trait Spec { - - implicit def implicitously = scala.language.implicitConversions - implicit def reflectively = scala.language.reflectiveCalls - - implicit def str2ops(s: String) = new { - def in[U](body: =>U) { - // just execute body - body - } - } - - implicit def any2ops(a: Any) = new { - def shouldEqual(other: Any) = assert(a == other) - } - - def evaluating[U](body: =>U) = new { - def shouldProduce[T <: Throwable: ClassTag]() = { - var produced = false - try body - catch { - case e: Throwable => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true - } finally { - assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]]) - } - } - } - -} diff --git a/test/files/run/ctries-new/snapshot.scala b/test/files/run/ctries-new/snapshot.scala deleted file mode 100644 index 57155d49c619..000000000000 --- a/test/files/run/ctries-new/snapshot.scala +++ /dev/null @@ -1,267 +0,0 @@ - - - - -import collection._ -import collection.concurrent.TrieMap - - - -object SnapshotSpec extends Spec { - - def test() { - "support snapshots" in { - val ctn = new TrieMap - ctn.snapshot() - ctn.readOnlySnapshot() - - val ct = new TrieMap[Int, Int] - for (i <- 0 until 100) ct.put(i, i) - ct.snapshot() - ct.readOnlySnapshot() - } - - "empty 2 quiescent snapshots in isolation" in { - val sz = 4000 - - class Worker(trie: TrieMap[Wrap, Int]) extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(trie.remove(new Wrap(i)) == Some(i)) - for (j <- 0 until sz) - if (j <= i) assert(trie.get(new Wrap(j)) == None) - else assert(trie.get(new Wrap(j)) == Some(j)) - } - } - } - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - val snapt = ct.snapshot() - - val original = new Worker(ct) - val snapshot = new Worker(snapt) - original.start() - snapshot.start() - original.join() - snapshot.join() - - for (i <- 0 until sz) { - assert(ct.get(new Wrap(i)) == None) - assert(snapt.get(new Wrap(i)) == None) - } - } - - def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) { - @volatile var e: Exception = null - - // reads possible entries once and stores them - // then reads all these N more times to check if the - // state stayed the same - class Reader(trie: Map[Wrap, Int]) extends Thread { - setName("Reader " + name) - - override def run() = - try check() - catch { - case ex: Exception => e = ex - } - - def check() { - val initial = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) trie.get(new Wrap(i)) match { - case Some(i) => initial.put(new Wrap(i), i) - case None => // do nothing - } - - for (k <- 0 until N) { - for (i <- 0 until sz) { - val tres = trie.get(new Wrap(i)) - val ires = initial.get(new Wrap(i)) - if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres)) - assert(tres == ires) - } - } - } - } - - val reader = new Reader(readonly) - reader.start() - reader.join() - - if (e ne null) { - e.printStackTrace() - throw e - } - } - - // traverses the trie `rep` times and modifies each entry - class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread { - setName("Modifier %d".format(index)) - - override def run() { - for (k <- 0 until rep) { - for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match { - case Some(_) => trie.remove(new Wrap(i)) - case None => // do nothing - } - } - } - } - - // removes all the elements from the trie - class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread { - setName("Remover %d".format(index)) - - override def run() { - for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz)) - } - } - - "have a consistent quiescent read-only snapshot" in { - val sz = 10000 - val N = 100 - val W = 10 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val readonly = ct.readOnlySnapshot() - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - consistentReadOnly("qm", readonly, sz, N) - threads.foreach(_.join()) - } - - // now, we check non-quiescent snapshots, as these permit situations - // where a thread is caught in the middle of the update when a snapshot is taken - - "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in { - val sz = 1250 - val W = 100 - val S = 5000 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5) - threads.foreach(_.join()) - } - - "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in { - val sz = 1000 - val N = 7000 - val W = 10 - val S = 7000 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5) - threads.foreach(_.join()) - } - - def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) { - @volatile var e: Exception = null - - // reads possible entries once and stores them - // then reads all these N more times to check if the - // state stayed the same - class Worker extends Thread { - setName("Worker " + name) - - override def run() = - try check() - catch { - case ex: Exception => e = ex - } - - def check() { - val initial = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) trie.get(new Wrap(i)) match { - case Some(i) => initial.put(new Wrap(i), i) - case None => // do nothing - } - - for (k <- 0 until N) { - // modify - for ((key, value) <- initial) { - val oldv = if (k % 2 == 0) value else -value - val newv = -oldv - trie.replace(key, oldv, newv) - } - - // check - for (i <- 0 until sz) if (initial.contains(new Wrap(i))) { - val expected = if (k % 2 == 0) -i else i - //println(trie.get(new Wrap(i))) - assert(trie.get(new Wrap(i)) == Some(expected)) - } else { - assert(trie.get(new Wrap(i)) == None) - } - } - } - } - - val worker = new Worker - worker.start() - worker.join() - - if (e ne null) { - e.printStackTrace() - throw e - } - } - - "have a consistent non-quiescent snapshot, concurrent with modifications" in { - val sz = 9000 - val N = 1000 - val W = 10 - val S = 400 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) { - consistentReadOnly("non-qm", ct.snapshot(), sz, 5) - consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5) - } - threads.foreach(_.join()) - } - - "work when many concurrent snapshots are taken, concurrent with modifications" in { - val sz = 12000 - val W = 10 - val S = 10 - val modifytimes = 1200 - val snaptimes = 600 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - - class Snapshooter extends Thread { - setName("Snapshooter") - override def run() { - for (k <- 0 until snaptimes) { - val snap = ct.snapshot() - for (i <- 0 until sz) snap.remove(new Wrap(i)) - for (i <- 0 until sz) assert(!snap.contains(new Wrap(i))) - } - } - } - - val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz) - val shooters = for (i <- 0 until S) yield new Snapshooter - val threads = mods ++ shooters - threads.foreach(_.start()) - threads.foreach(_.join()) - } - - } - -} diff --git a/test/files/run/ctries-old/DumbHash.scala b/test/files/run/ctries-old/DumbHash.scala deleted file mode 100644 index 8ef325b67c86..000000000000 --- a/test/files/run/ctries-old/DumbHash.scala +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - -class DumbHash(val i: Int) { - override def equals(other: Any) = other match { - case that: DumbHash => that.i == this.i - case _ => false - } - override def hashCode = i % 5 - override def toString = "DH(%s)".format(i) -} diff --git a/test/files/run/ctries-old/Wrap.scala b/test/files/run/ctries-old/Wrap.scala deleted file mode 100644 index 7b645c1612a7..000000000000 --- a/test/files/run/ctries-old/Wrap.scala +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - -case class Wrap(i: Int) { - override def hashCode = i * 0x9e3775cd -} diff --git a/test/files/run/ctries-old/concmap.scala b/test/files/run/ctries-old/concmap.scala deleted file mode 100644 index affc6fe294ff..000000000000 --- a/test/files/run/ctries-old/concmap.scala +++ /dev/null @@ -1,189 +0,0 @@ - - - -import collection.concurrent.TrieMap -import Test.Spec - - -object ConcurrentMapSpec extends Spec { - - val initsz = 500 - val secondsz = 750 - - def test() { - "support put" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None) - for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i)) - } - - "support put if absent" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i)) - } - - "support remove if mapped to a specific value" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true) - for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false) - } - - "support replace if mapped to a specific value" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false) - for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false) - } - - "support replace if present" in { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until initsz) ct.update(new Wrap(i), i) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i)) - for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None) - } - - def assertEqual(a: Any, b: Any) = { - if (a != b) println(a, b) - assert(a == b) - } - - "support replace if mapped to a specific value, using several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 55000 - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - class Updater(index: Int, offs: Int) extends Thread { - override def run() { - var repeats = 0 - for (i <- 0 until sz) { - val j = (offs + i) % sz - var k = Int.MaxValue - do { - if (k != Int.MaxValue) repeats += 1 - k = ct.lookup(new Wrap(j)) - } while (!ct.replace(new Wrap(j), k, -k)) - } - //println("Thread %d repeats: %d".format(index, repeats)) - } - } - - val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i) - - val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i) - threads2.foreach(_.start()) - threads2.foreach(_.join()) - - for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i) - } - - "support put if absent, several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 110000 - - class Updater(offs: Int) extends Thread { - override def run() { - for (i <- 0 until sz) { - val j = (offs + i) % sz - ct.putIfAbsent(new Wrap(j), j) - assert(ct.lookup(new Wrap(j)) == j) - } - } - } - - val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assert(ct(new Wrap(i)) == i) - } - - "support remove if mapped to a specific value, several threads" in { - val ct = new TrieMap[Wrap, Int] - val sz = 55000 - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - class Remover(offs: Int) extends Thread { - override def run() { - for (i <- 0 until sz) { - val j = (offs + i) % sz - ct.remove(new Wrap(j), j) - assert(ct.get(new Wrap(j)) == None) - } - } - } - - val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - - for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None) - } - - "have all or none of the elements depending on the oddity" in { - val ct = new TrieMap[Wrap, Int] - val sz = 65000 - for (i <- 0 until sz) ct(new Wrap(i)) = i - - class Modifier(index: Int, offs: Int) extends Thread { - override def run() { - for (j <- 0 until sz) { - val i = (offs + j) % sz - var success = false - do { - if (ct.contains(new Wrap(i))) { - success = ct.remove(new Wrap(i)) != None - } else { - success = ct.putIfAbsent(new Wrap(i), i) == None - } - } while (!success) - } - } - } - - def modify(n: Int) = { - val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i) - threads.foreach(_.start()) - threads.foreach(_.join()) - } - - modify(16) - for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i)) - modify(15) - for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None) - } - - "compute size correctly" in { - val ct = new TrieMap[Wrap, Int] - val sz = 36450 - for (i <- 0 until sz) ct(new Wrap(i)) = i - - assertEqual(ct.size, sz) - assertEqual(ct.size, sz) - } - - "compute size correctly in parallel" in { - val ct = new TrieMap[Wrap, Int] - val sz = 36450 - for (i <- 0 until sz) ct(new Wrap(i)) = i - val pct = ct.par - - assertEqual(pct.size, sz) - assertEqual(pct.size, sz) - } - - } - -} diff --git a/test/files/run/ctries-old/iterator.scala b/test/files/run/ctries-old/iterator.scala deleted file mode 100644 index 127f6c9f06d8..000000000000 --- a/test/files/run/ctries-old/iterator.scala +++ /dev/null @@ -1,290 +0,0 @@ - - - - -import collection._ -import collection.concurrent.TrieMap - -import Test.Spec - - -object IteratorSpec extends Spec { - - def test() { - "work for an empty trie" in { - val ct = new TrieMap - val it = ct.iterator - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - } - - def nonEmptyIteratorCheck(sz: Int) { - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - val it = ct.iterator - val tracker = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) { - assert(it.hasNext == true) - tracker += it.next - } - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - tracker.size shouldEqual (sz) - tracker shouldEqual (ct) - } - - "work for a 1 element trie" in { - nonEmptyIteratorCheck(1) - } - - "work for a 2 element trie" in { - nonEmptyIteratorCheck(2) - } - - "work for a 3 element trie" in { - nonEmptyIteratorCheck(3) - } - - "work for a 5 element trie" in { - nonEmptyIteratorCheck(5) - } - - "work for a 10 element trie" in { - nonEmptyIteratorCheck(10) - } - - "work for a 20 element trie" in { - nonEmptyIteratorCheck(20) - } - - "work for a 50 element trie" in { - nonEmptyIteratorCheck(50) - } - - "work for a 100 element trie" in { - nonEmptyIteratorCheck(100) - } - - "work for a 1k element trie" in { - nonEmptyIteratorCheck(1000) - } - - "work for a 5k element trie" in { - nonEmptyIteratorCheck(5000) - } - - "work for a 75k element trie" in { - nonEmptyIteratorCheck(75000) - } - - "work for a 250k element trie" in { - nonEmptyIteratorCheck(500000) - } - - def nonEmptyCollideCheck(sz: Int) { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until sz) ct.put(new DumbHash(i), i) - - val it = ct.iterator - val tracker = mutable.Map[DumbHash, Int]() - for (i <- 0 until sz) { - assert(it.hasNext == true) - tracker += it.next - } - - it.hasNext shouldEqual (false) - evaluating { it.next() }.shouldProduce [NoSuchElementException] - tracker.size shouldEqual (sz) - tracker shouldEqual (ct) - } - - "work for colliding hashcodes, 2 element trie" in { - nonEmptyCollideCheck(2) - } - - "work for colliding hashcodes, 3 element trie" in { - nonEmptyCollideCheck(3) - } - - "work for colliding hashcodes, 5 element trie" in { - nonEmptyCollideCheck(5) - } - - "work for colliding hashcodes, 10 element trie" in { - nonEmptyCollideCheck(10) - } - - "work for colliding hashcodes, 100 element trie" in { - nonEmptyCollideCheck(100) - } - - "work for colliding hashcodes, 500 element trie" in { - nonEmptyCollideCheck(500) - } - - "work for colliding hashcodes, 5k element trie" in { - nonEmptyCollideCheck(5000) - } - - def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) { - if (a != b) { - println(a.size + " vs " + b.size) - // println(a) - // println(b) - // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) - // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) - } - assert(a == b) - } - - "be consistent when taken with concurrent modifications" in { - val sz = 25000 - val W = 15 - val S = 5 - val checks = 5 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - class Modifier extends Thread { - override def run() { - for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match { - case Some(_) => ct.remove(new Wrap(i)) - case None => - } - } - } - - def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) { - class Iter extends Thread { - override def run() { - val snap = ct.readOnlySnapshot() - val initial = mutable.Map[Wrap, Int]() - for (kv <- snap) initial += kv - - for (i <- 0 until checks) { - assertEqual(snap.iterator.toMap, initial) - } - } - } - - val iter = new Iter - iter.start() - iter.join() - } - - val threads = for (_ <- 0 until W) yield new Modifier - threads.foreach(_.start()) - for (_ <- 0 until S) consistentIteration(ct, checks) - threads.foreach(_.join()) - } - - "be consistent with a concurrent removal with a well defined order" in { - val sz = 150000 - val sgroupsize = 10 - val sgroupnum = 5 - val removerslowdown = 50 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - - class Remover extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(ct.remove(new Wrap(i)) == Some(i)) - for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate - } - //println("done removing") - } - } - - def consistentIteration(it: Iterator[(Wrap, Int)]) = { - class Iter extends Thread { - override def run() { - val elems = it.toBuffer - if (elems.nonEmpty) { - val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i - assert(elems.forall(_._1.i >= minelem)) - } - } - } - new Iter - } - - val remover = new Remover - remover.start() - for (_ <- 0 until sgroupnum) { - val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) - iters.foreach(_.start()) - iters.foreach(_.join()) - } - //println("done with iterators") - remover.join() - } - - "be consistent with a concurrent insertion with a well defined order" in { - val sz = 150000 - val sgroupsize = 10 - val sgroupnum = 10 - val inserterslowdown = 50 - val ct = new TrieMap[Wrap, Int] - - class Inserter extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(ct.put(new Wrap(i), i) == None) - for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate - } - //println("done inserting") - } - } - - def consistentIteration(it: Iterator[(Wrap, Int)]) = { - class Iter extends Thread { - override def run() { - val elems = it.toSeq - if (elems.nonEmpty) { - val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i - assert(elems.forall(_._1.i <= maxelem)) - } - } - } - new Iter - } - - val inserter = new Inserter - inserter.start() - for (_ <- 0 until sgroupnum) { - val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) - iters.foreach(_.start()) - iters.foreach(_.join()) - } - //println("done with iterators") - inserter.join() - } - - "work on a yet unevaluated snapshot" in { - val sz = 50000 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.update(new Wrap(i), i) - - val snap = ct.snapshot() - val it = snap.iterator - - while (it.hasNext) it.next() - } - - "be duplicated" in { - val sz = 50 - val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*) - val it = ct.splitter - for (_ <- 0 until (sz / 2)) it.next() - val dupit = it.dup - - it.toList shouldEqual dupit.toList - } - - } - -} diff --git a/test/files/run/ctries-old/lnode.scala b/test/files/run/ctries-old/lnode.scala deleted file mode 100644 index f9eb9ce87789..000000000000 --- a/test/files/run/ctries-old/lnode.scala +++ /dev/null @@ -1,62 +0,0 @@ - - - -import collection.concurrent.TrieMap - -import Test.Spec - -object LNodeSpec extends Spec { - - val initsz = 1500 - val secondsz = 1750 - - def test() { - "accept elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - } - - "lookup elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i)) - for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None) - } - - "remove elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - for (i <- 0 until initsz) { - val remelem = ct.remove(new DumbHash(i)) - assert(remelem == Some(i), "removing " + i + " yields " + remelem) - } - for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) - } - - "put elements with the same hash codes if absent" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) ct.put(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) - for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) - for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) - for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i) - } - - "replace elements with the same hash codes" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) - for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) - for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i) - for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) - } - - "remove elements with the same hash codes if mapped to a specific value" in { - val ct = new TrieMap[DumbHash, Int] - for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) - for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true) - } - - } - -} diff --git a/test/files/run/ctries-old/main.scala b/test/files/run/ctries-old/main.scala deleted file mode 100644 index 77161fed2fbe..000000000000 --- a/test/files/run/ctries-old/main.scala +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - -@deprecated("Suppress warnings", since="2.11") -object Test { - - def main(args: Array[String]) { - ConcurrentMapSpec.test() - IteratorSpec.test() - LNodeSpec.test() - SnapshotSpec.test() - } - - - -trait Spec { - - implicit def implicitously = scala.language.implicitConversions - implicit def reflectively = scala.language.reflectiveCalls - - implicit def str2ops(s: String) = new { - def in[U](body: =>U) { - // just execute body - body - } - } - - implicit def any2ops(a: Any) = new { - def shouldEqual(other: Any) = assert(a == other) - } - - def evaluating[U](body: =>U) = new { - def shouldProduce[T <: Throwable: ClassManifest]() = { - var produced = false - try body - catch { - case e: Throwable => if (e.getClass == implicitly[ClassManifest[T]].runtimeClass) produced = true - } finally { - assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]]) - } - } - } - -} -} diff --git a/test/files/run/ctries-old/snapshot.scala b/test/files/run/ctries-old/snapshot.scala deleted file mode 100644 index dfc2034e5614..000000000000 --- a/test/files/run/ctries-old/snapshot.scala +++ /dev/null @@ -1,268 +0,0 @@ - - - - -import collection._ -import collection.concurrent.TrieMap - -import Test.Spec - - -object SnapshotSpec extends Spec { - - def test() { - "support snapshots" in { - val ctn = new TrieMap - ctn.snapshot() - ctn.readOnlySnapshot() - - val ct = new TrieMap[Int, Int] - for (i <- 0 until 100) ct.put(i, i) - ct.snapshot() - ct.readOnlySnapshot() - } - - "empty 2 quiescent snapshots in isolation" in { - val sz = 4000 - - class Worker(trie: TrieMap[Wrap, Int]) extends Thread { - override def run() { - for (i <- 0 until sz) { - assert(trie.remove(new Wrap(i)) == Some(i)) - for (j <- 0 until sz) - if (j <= i) assert(trie.get(new Wrap(j)) == None) - else assert(trie.get(new Wrap(j)) == Some(j)) - } - } - } - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct.put(new Wrap(i), i) - val snapt = ct.snapshot() - - val original = new Worker(ct) - val snapshot = new Worker(snapt) - original.start() - snapshot.start() - original.join() - snapshot.join() - - for (i <- 0 until sz) { - assert(ct.get(new Wrap(i)) == None) - assert(snapt.get(new Wrap(i)) == None) - } - } - - def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) { - @volatile var e: Exception = null - - // reads possible entries once and stores them - // then reads all these N more times to check if the - // state stayed the same - class Reader(trie: Map[Wrap, Int]) extends Thread { - setName("Reader " + name) - - override def run() = - try check() - catch { - case ex: Exception => e = ex - } - - def check() { - val initial = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) trie.get(new Wrap(i)) match { - case Some(i) => initial.put(new Wrap(i), i) - case None => // do nothing - } - - for (k <- 0 until N) { - for (i <- 0 until sz) { - val tres = trie.get(new Wrap(i)) - val ires = initial.get(new Wrap(i)) - if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres)) - assert(tres == ires) - } - } - } - } - - val reader = new Reader(readonly) - reader.start() - reader.join() - - if (e ne null) { - e.printStackTrace() - throw e - } - } - - // traverses the trie `rep` times and modifies each entry - class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread { - setName("Modifier %d".format(index)) - - override def run() { - for (k <- 0 until rep) { - for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match { - case Some(_) => trie.remove(new Wrap(i)) - case None => // do nothing - } - } - } - } - - // removes all the elements from the trie - class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread { - setName("Remover %d".format(index)) - - override def run() { - for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz)) - } - } - - "have a consistent quiescent read-only snapshot" in { - val sz = 10000 - val N = 100 - val W = 10 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val readonly = ct.readOnlySnapshot() - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - consistentReadOnly("qm", readonly, sz, N) - threads.foreach(_.join()) - } - - // now, we check non-quiescent snapshots, as these permit situations - // where a thread is caught in the middle of the update when a snapshot is taken - - "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in { - val sz = 1250 - val W = 100 - val S = 5000 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5) - threads.foreach(_.join()) - } - - "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in { - val sz = 1000 - val N = 7000 - val W = 10 - val S = 7000 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5) - threads.foreach(_.join()) - } - - def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) { - @volatile var e: Exception = null - - // reads possible entries once and stores them - // then reads all these N more times to check if the - // state stayed the same - class Worker extends Thread { - setName("Worker " + name) - - override def run() = - try check() - catch { - case ex: Exception => e = ex - } - - def check() { - val initial = mutable.Map[Wrap, Int]() - for (i <- 0 until sz) trie.get(new Wrap(i)) match { - case Some(i) => initial.put(new Wrap(i), i) - case None => // do nothing - } - - for (k <- 0 until N) { - // modify - for ((key, value) <- initial) { - val oldv = if (k % 2 == 0) value else -value - val newv = -oldv - trie.replace(key, oldv, newv) - } - - // check - for (i <- 0 until sz) if (initial.contains(new Wrap(i))) { - val expected = if (k % 2 == 0) -i else i - //println(trie.get(new Wrap(i))) - assert(trie.get(new Wrap(i)) == Some(expected)) - } else { - assert(trie.get(new Wrap(i)) == None) - } - } - } - } - - val worker = new Worker - worker.start() - worker.join() - - if (e ne null) { - e.printStackTrace() - throw e - } - } - - "have a consistent non-quiescent snapshot, concurrent with modifications" in { - val sz = 9000 - val N = 1000 - val W = 10 - val S = 400 - - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) - - threads.foreach(_.start()) - for (i <- 0 until S) { - consistentReadOnly("non-qm", ct.snapshot(), sz, 5) - consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5) - } - threads.foreach(_.join()) - } - - "work when many concurrent snapshots are taken, concurrent with modifications" in { - val sz = 12000 - val W = 10 - val S = 10 - val modifytimes = 1200 - val snaptimes = 600 - val ct = new TrieMap[Wrap, Int] - for (i <- 0 until sz) ct(new Wrap(i)) = i - - class Snapshooter extends Thread { - setName("Snapshooter") - override def run() { - for (k <- 0 until snaptimes) { - val snap = ct.snapshot() - for (i <- 0 until sz) snap.remove(new Wrap(i)) - for (i <- 0 until sz) assert(!snap.contains(new Wrap(i))) - } - } - } - - val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz) - val shooters = for (i <- 0 until S) yield new Snapshooter - val threads = mods ++ shooters - threads.foreach(_.start()) - threads.foreach(_.join()) - } - - } - -} diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala index 2291b22f7aba..33afaade4348 100644 --- a/test/files/run/dead-code-elimination.scala +++ b/test/files/run/dead-code-elimination.scala @@ -22,12 +22,12 @@ final class A { class B { def f() = 1 to 10 foreach (_ => f3) } - def f = (new B).f() + def f() = (new B).f() } object Test { def main(args: Array[String]): Unit = { // force the loading of B - (new A).f + (new A).f() } } diff --git a/test/files/run/deadlock.scala b/test/files/run/deadlock.scala new file mode 100644 index 000000000000..f3dc67244e03 --- /dev/null +++ b/test/files/run/deadlock.scala @@ -0,0 +1,18 @@ +object Test extends App { + val createPredef = new Runnable { + def run = { + val _ = Predef; + } + } + val createVector = new Runnable { + def run = { + val _ = scala.collection.immutable.Vector; + } + } + val t1 = new Thread(createPredef) + val t2 = new Thread(createVector) + t1.start() + t2.start() + t1.join() + t2.join() +} diff --git a/test/files/run/debug-type-error.check b/test/files/run/debug-type-error.check new file mode 100644 index 000000000000..a201aa6756b1 --- /dev/null +++ b/test/files/run/debug-type-error.check @@ -0,0 +1,5 @@ +newSource1.scala:3: error: object dummy is not a member of package org + val a: org.dummy.Dummy = ??? + ^ +java.lang.Exception: Stack trace +java.lang.Exception: Stack trace diff --git a/test/files/run/debug-type-error.scala b/test/files/run/debug-type-error.scala new file mode 100644 index 000000000000..2652815d6bdc --- /dev/null +++ b/test/files/run/debug-type-error.scala @@ -0,0 +1,25 @@ +//> using filter (\s*)at(.*) + +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vdebug-type-error" + + def code: String = "" + + def noSuchType: String = """ +object Example +{ + val a: org.dummy.Dummy = ??? +} + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(noSuchType) + } +} diff --git a/test/files/run/deeps.scala b/test/files/run/deeps.scala index 1546112ed5dc..2d68df2e6931 100644 --- a/test/files/run/deeps.scala +++ b/test/files/run/deeps.scala @@ -1,3 +1,5 @@ +import scala.tools.partest.Util.ArrayDeep + //############################################################################ // deepEquals / deep.toString //############################################################################ @@ -6,41 +8,41 @@ // need to revisit array equality object Test { - def testEquals1 { + def testEquals1(): Unit = { println(Array(1) == Array(1)) println(Array(1) equals Array(1)) println(Array(1).deep == Array(1).deep) - println + println() } - def testEquals2 { + def testEquals2(): Unit = { println(Array(Array(1), Array(2)) == Array(Array(1), Array(2))) println(Array(Array(1), Array(2)) equals Array(Array(1), Array(2))) println(Array(Array(1), Array(2)).deep equals Array(Array(1), Array(2)).deep) - println + println() } - def testEquals3 { + def testEquals3(): Unit = { val a1 = Array(1) val b1 = Array(1) val a2 = Array(a1, b1) val b2 = Array(a1, b1) val a3 = Array(a2, b2) val b3 = Array(a2, b2) - def test[T](x: Array[T], y: Array[T]) { + def test[T](x: Array[T], y: Array[T]): Unit = { println("x=" + x.deep.toString) println("y=" + y.deep.toString) println(x == y) println(x equals y) println(x.deep == y.deep) - println + println() } test(a1, b1) test(a2, b2) test(a3, b3) } - def testEquals4 { + def testEquals4(): Unit = { println("boo:and:foo".split(':') == "boo:and:foo".split(':')) println("boo:and:foo".split(':') equals "boo:and:foo".split(':')) println("boo:and:foo".split(':').deep == "boo:and:foo".split(':').deep) @@ -52,18 +54,18 @@ object Test { println(xs.toArray.deep == ys.toArray.deep) } - def testToString1 { + def testToString1(): Unit = { def sweep(s: String) = ( s.replaceAll("D@[0-9a-fA-F]+", "D@0000000") .replaceAll("Z@[0-9a-fA-F]+", "Z@0000000") .replaceAll(";@[0-9a-fA-F]+", ";@0000000") ) - def test[T](a: Array[T]) { + def test[T](a: Array[T]): Unit = { println(sweep(a.deep.toString)) println(a.deep.toString) println(a.deep.mkString("[", ";", "]")) println(a.deep.mkString(";")) - println + println() } val ba1 = Array(true, false) @@ -88,14 +90,14 @@ object Test { test(sa3) } - def testToString2 { + def testToString2(): Unit = { println(Array(Array(true, false), Array(false)).deep.mkString("[", "; ", "]")) println(Array(Array('1', '2'), Array('3')).deep.mkString("[", "; ", "]")) println(Array(Array(1, 2), Array(3)).deep.mkString("[", "; ", "]")) - println + println() } - def testToString3 { + def testToString3(): Unit = { println("boo:and:foo".split(':').deep.toString) val xs = new java.util.ArrayList[String](); xs.add("a") @@ -103,12 +105,12 @@ object Test { } def main(args: Array[String]): Unit = { - println("testEquals1") ; testEquals1 - println("testEquals2") ; testEquals2 - println("testEquals3") ; testEquals3 - println("testEquals4") ; testEquals4 - testToString1 - testToString2 - testToString3 + println("testEquals1") ; testEquals1() + println("testEquals2") ; testEquals2() + println("testEquals3") ; testEquals3() + println("testEquals4") ; testEquals4() + testToString1() + testToString2() + testToString3() } } diff --git a/test/files/run/defaults-serizaliable-no-forwarders.flags b/test/files/run/defaults-serizaliable-no-forwarders.flags deleted file mode 100644 index ea6a956ea942..000000000000 --- a/test/files/run/defaults-serizaliable-no-forwarders.flags +++ /dev/null @@ -1 +0,0 @@ --Xmixin-force-forwarders:false diff --git a/test/files/run/defaults-serizaliable-no-forwarders.scala b/test/files/run/defaults-serizaliable-no-forwarders.scala index b2762cf443bc..0c77afda79c6 100644 --- a/test/files/run/defaults-serizaliable-no-forwarders.scala +++ b/test/files/run/defaults-serizaliable-no-forwarders.scala @@ -1,3 +1,5 @@ +//> using options -Xmixin-force-forwarders:false + import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream} trait T1 extends Serializable { diff --git a/test/files/run/delambdafy-dependent-on-param-subst-2.scala b/test/files/run/delambdafy-dependent-on-param-subst-2.scala index 7b6fc597e8e8..4f7b2dc6e7b4 100644 --- a/test/files/run/delambdafy-dependent-on-param-subst-2.scala +++ b/test/files/run/delambdafy-dependent-on-param-subst-2.scala @@ -6,7 +6,7 @@ class C class A { class C } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val a = new A // class O extends M[a.C] { def m(x: a.C) = true } diff --git a/test/files/run/delambdafy-dependent-on-param-subst.scala b/test/files/run/delambdafy-dependent-on-param-subst.scala index a934138f2aa6..77985f6dd2d4 100644 --- a/test/files/run/delambdafy-dependent-on-param-subst.scala +++ b/test/files/run/delambdafy-dependent-on-param-subst.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:method +//> using options -Ydelambdafy:method +// trait M[-X] { def m(x: X): Boolean } @@ -7,7 +8,7 @@ class C class A { class C } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val a = new A // class O extends M[a.C] { def m(x: a.C) = true } diff --git a/test/files/run/delambdafy-nested-by-name.scala b/test/files/run/delambdafy-nested-by-name.scala index 4498b3308d3a..37aa86a0477c 100644 --- a/test/files/run/delambdafy-nested-by-name.scala +++ b/test/files/run/delambdafy-nested-by-name.scala @@ -5,7 +5,7 @@ object Test { def meth1(arg1: => String) = arg1 def meth2(arg2: => String) = meth1({println("hello"); arg2}) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(meth2("world")) } -} \ No newline at end of file +} diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check index 1733e8eaac96..03ba48222827 100644 --- a/test/files/run/delambdafy_t6028.check +++ b/test/files/run/delambdafy_t6028.check @@ -56,4 +56,4 @@ package { } } -warning: one feature warning; re-run with -feature for details +warning: 1 feature warning; re-run with -feature for details diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala index ca39195310a1..5e1ed7d294f8 100644 --- a/test/files/run/delambdafy_t6028.scala +++ b/test/files/run/delambdafy_t6028.scala @@ -1,9 +1,8 @@ import scala.tools.partest._ -import java.io.{Console => _, _} object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Xprint:lambdalift -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Vprint:lambdalift" override def code = """class T(classParam: String) { | val field: String = "" @@ -13,9 +12,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala index 8d4976e9892f..93839ecf950c 100644 --- a/test/files/run/delambdafy_t6555.scala +++ b/test/files/run/delambdafy_t6555.scala @@ -1,15 +1,10 @@ -import scala.tools.partest._ -import java.io.{Console => _, _} +import scala.tools.partest.DirectTest object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:method -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:specialize -Ydelambdafy:method" override def code = "class Foo { val f = (param: String) => param } " - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_inline.check b/test/files/run/delambdafy_uncurry_byname_inline.check index d96a995f4479..1f011d7bfabb 100644 --- a/test/files/run/delambdafy_uncurry_byname_inline.check +++ b/test/files/run/delambdafy_uncurry_byname_inline.check @@ -7,14 +7,14 @@ package { }; def bar(x: () => Int): Int = x.apply(); def foo(): Int = Foo.this.bar({ - @SerialVersionUID(value = 0) final class $anonfun extends scala.runtime.AbstractFunction0[Int] with Serializable { - def (): <$anon: () => Int> = { + @SerialVersionUID(value = 0) final class $anonfun extends scala.runtime.AbstractFunction0[1] with java.io.Serializable { + def (): <$anon: () => 1> = { $anonfun.super.(); () }; final def apply(): Int = 1 }; - (new <$anon: () => Int>(): () => Int) + (new <$anon: () => 1>(): () => Int) }) } } diff --git a/test/files/run/delambdafy_uncurry_byname_inline.scala b/test/files/run/delambdafy_uncurry_byname_inline.scala index 8f480fa80488..6e3507960d39 100644 --- a/test/files/run/delambdafy_uncurry_byname_inline.scala +++ b/test/files/run/delambdafy_uncurry_byname_inline.scala @@ -1,9 +1,8 @@ -import scala.tools.partest._ -import java.io.{Console => _, _} +import scala.tools.partest.DirectTest object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:uncurry -Ydelambdafy:inline" override def code = """class Foo { | def bar(x: => Int) = x @@ -12,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala index 0ccc1f2e92bf..ccef6d1cd3dc 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.scala +++ b/test/files/run/delambdafy_uncurry_byname_method.scala @@ -1,9 +1,8 @@ -import scala.tools.partest._ -import java.io.{Console => _, _} +import scala.tools.partest.DirectTest object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry" override def code = """class Foo { | def bar(x: => String) = x @@ -12,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_inline.check b/test/files/run/delambdafy_uncurry_inline.check index 5521cc4a2cc3..47aa7b1e7dab 100644 --- a/test/files/run/delambdafy_uncurry_inline.check +++ b/test/files/run/delambdafy_uncurry_inline.check @@ -7,7 +7,7 @@ package { }; def bar(): Unit = { val f: Int => Int = { - @SerialVersionUID(value = 0) final class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with Serializable { + @SerialVersionUID(value = 0) final class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with java.io.Serializable { def (): <$anon: Int => Int> = { $anonfun.super.(); () diff --git a/test/files/run/delambdafy_uncurry_inline.scala b/test/files/run/delambdafy_uncurry_inline.scala index b42b65f5bbda..4187909a1508 100644 --- a/test/files/run/delambdafy_uncurry_inline.scala +++ b/test/files/run/delambdafy_uncurry_inline.scala @@ -1,9 +1,8 @@ import scala.tools.partest._ -import java.io.{Console => _, _} object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:uncurry -Ydelambdafy:inline" override def code = """class Foo { | def bar = { @@ -12,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_method.scala b/test/files/run/delambdafy_uncurry_method.scala index a988fb2ee7bf..849ed872f9c6 100644 --- a/test/files/run/delambdafy_uncurry_method.scala +++ b/test/files/run/delambdafy_uncurry_method.scala @@ -1,9 +1,8 @@ import scala.tools.partest._ -import java.io.{Console => _, _} object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry" override def code = """class Foo { | def bar = { @@ -12,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check index a9f994fefaf4..9d9c828a034c 100644 --- a/test/files/run/delay-bad.check +++ b/test/files/run/delay-bad.check @@ -1,10 +1,3 @@ -delay-bad.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - f(new C { 5 }) - ^ -delay-bad.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - f(new { val x = 5 } with E() { 5 }) - ^ -warning: one deprecation (since 2.11.0); re-run with -deprecation for details // new C { } diff --git a/test/files/run/delay-bad.scala b/test/files/run/delay-bad.scala index 43acc1ea3d3b..a743285791e3 100644 --- a/test/files/run/delay-bad.scala +++ b/test/files/run/delay-bad.scala @@ -1,3 +1,4 @@ +//> using options -Xmaxwarns 0 trait A extends DelayedInit { print("-A") @@ -44,7 +45,7 @@ class E() extends D() { object Test { def p(msg: String) = println("\n\n// " + msg) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f: A => Unit = _ => () p("new C { }") diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check index ed35b9225ffb..8eb04c7cff2e 100644 --- a/test/files/run/delay-good.check +++ b/test/files/run/delay-good.check @@ -1,9 +1,3 @@ -delay-good.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - f(new C { 5 }) - ^ -delay-good.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - f(new { val x = 5 } with E() { 5 }) - ^ // new C { } diff --git a/test/files/run/delay-good.scala b/test/files/run/delay-good.scala index 2e4487b92ca5..5787db7b4b5f 100644 --- a/test/files/run/delay-good.scala +++ b/test/files/run/delay-good.scala @@ -1,3 +1,4 @@ +//> using options -Xmaxwarns 0 trait A { print("-A") @@ -44,7 +45,7 @@ class E() extends D() { object Test { def p(msg: String) = println("\n\n// " + msg) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val f: A => Unit = _.postConstructionCode p("new C { }") diff --git a/test/files/run/deprecate-early-type-defs.check b/test/files/run/deprecate-early-type-defs.check index 9b1449ffcf5e..a2534f1d499b 100644 --- a/test/files/run/deprecate-early-type-defs.check +++ b/test/files/run/deprecate-early-type-defs.check @@ -1,3 +1,6 @@ -deprecate-early-type-defs.scala:2: warning: early type members are deprecated. Move them to the regular body: the semantics are the same. +deprecate-early-type-defs.scala:3: warning: early initializers are deprecated; they will be replaced by trait parameters in 3.0, see the migration guide on avoiding var/val in traits. +object Test extends { type T = Int } with App + ^ +deprecate-early-type-defs.scala:3: warning: early type members are deprecated: move them to the regular body; the semantics are the same object Test extends { type T = Int } with App ^ diff --git a/test/files/run/deprecate-early-type-defs.scala b/test/files/run/deprecate-early-type-defs.scala index 813eaf9415e2..cc0a32bf18a8 100644 --- a/test/files/run/deprecate-early-type-defs.scala +++ b/test/files/run/deprecate-early-type-defs.scala @@ -1,2 +1,3 @@ -// scalac: -deprecation +//> using options -deprecation +// object Test extends { type T = Int } with App diff --git a/test/files/run/disable-assertions.scala b/test/files/run/disable-assertions.scala index d329da9f76b4..e00efd00e71e 100644 --- a/test/files/run/disable-assertions.scala +++ b/test/files/run/disable-assertions.scala @@ -1,4 +1,5 @@ -// scalac: -Xdisable-assertions +//> using options -Xdisable-assertions +// object Elided { import annotation._, elidable._ diff --git a/test/files/run/dotty-i11332b.scala b/test/files/run/dotty-i11332b.scala index 2627029541fa..cf6374fc56ea 100644 --- a/test/files/run/dotty-i11332b.scala +++ b/test/files/run/dotty-i11332b.scala @@ -1,5 +1,5 @@ -// javaVersion: 11+ -// scalac: -release:11 +//> using jvm 11+ +//> using options -release:11 import java.lang.invoke._, MethodType.methodType diff --git a/test/files/run/dotty-t12348.scala b/test/files/run/dotty-t12348.scala index b655da3012dc..93cc39c7bd51 100644 --- a/test/files/run/dotty-t12348.scala +++ b/test/files/run/dotty-t12348.scala @@ -1,5 +1,5 @@ -// javaVersion: 11+ -// scalac: -release:11 +//> using jvm 11+ +//> using options -release:11 import java.lang.invoke._ import scala.runtime.IntRef diff --git a/test/files/run/duplicate-meth.scala b/test/files/run/duplicate-meth.scala index 40c0d3d67655..4e0df9ac7bbf 100644 --- a/test/files/run/duplicate-meth.scala +++ b/test/files/run/duplicate-meth.scala @@ -1,21 +1,22 @@ +import annotation.unused trait Base { - private val secure_# = 10l + @unused private val secure_# = 10L } class TestUser extends Base { def clsMeth(x: Int) = x - private def foo(x: Int) = x + @unused private def foo(x: Int) = x } object TestUser extends TestUser { def objMeth = "a" - private def foo(x: Int) = x + @unused private def foo(x: Int) = x } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { TestUser.objMeth // no-op, just check that it passes verification println("verified!") diff --git a/test/files/run/dynamic-applyDynamic.check b/test/files/run/dynamic-applyDynamic.check index ae6996f8fc27..0631de014b1c 100644 --- a/test/files/run/dynamic-applyDynamic.check +++ b/test/files/run/dynamic-applyDynamic.check @@ -1,14 +1,14 @@ [[syntax trees at end of typer]] // newSource1.scala [0:67]package [0:0] { - [0:67]object X extends [9:67][67]scala.AnyRef { - [67]def (): [9]X.type = [67]{ - [67][67][67]X.super.(); + [0:67]object X extends [9:67][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); [21] def d: [21]D = [21][21]X.this.d; - [37:49][37:38][37:38][37]X.this.d.applyDynamic(<39:45>"method")([46:48]10); - [56:61]<56:57><56:57>[56]X.this.d.applyDynamic(<56:57>"apply")([58:60]10) + [37:49][37:45][37:38][37]X.this.d.applyDynamic(<39:45>"method")([46:48]10); + [56:61][56:57]<56:57>[56]X.this.d.applyDynamic(<56:57>"apply")([58:60]10) } } diff --git a/test/files/run/dynamic-applyDynamic.scala b/test/files/run/dynamic-applyDynamic.scala index 3ce59713ded1..d6c6e8190ca6 100644 --- a/test/files/run/dynamic-applyDynamic.scala +++ b/test/files/run/dynamic-applyDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" override def code = """ object X { @@ -13,14 +13,10 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics class D extends Dynamic { def applyDynamic(name: String)(value: Any) = ??? -} \ No newline at end of file +} diff --git a/test/files/run/dynamic-applyDynamicNamed.check b/test/files/run/dynamic-applyDynamicNamed.check index c4e050ba17a4..20fb5e870477 100644 --- a/test/files/run/dynamic-applyDynamicNamed.check +++ b/test/files/run/dynamic-applyDynamicNamed.check @@ -1,14 +1,14 @@ [[syntax trees at end of typer]] // newSource1.scala [0:97]package [0:0] { - [0:97]object X extends [9:97][97]scala.AnyRef { - [97]def (): [9]X.type = [97]{ - [97][97][97]X.super.(); + [0:97]object X extends [9:97][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); [21] def d: [21]D = [21][21]X.this.d; - [37:70][37:38][37:38][37]X.this.d.applyDynamicNamed(<39:43>"meth")([44:55][44][44]scala.Tuple2.apply[[44]String, [44]Int]([44:50]"value1", [53:55]10), [57:69][57][57]scala.Tuple2.apply[[57]String, [57]Int]([57:63]"value2", [66:69]100)); - [77:91]<77:78><77:78>[77]X.this.d.applyDynamicNamed(<77:78>"apply")([79:90][79][79]scala.Tuple2.apply[[79]String, [79]Int]([79:85]"value1", [88:90]10)) + [37:70][37:43][37:38][37]X.this.d.applyDynamicNamed(<39:43>"meth")([44:55][44][44]scala.Tuple2.apply[[44]String, [44]Int]([44:50]"value1", [53:55]10), [57:69][57][57]scala.Tuple2.apply[[57]String, [57]Int]([57:63]"value2", [66:69]100)); + [77:91][77:78]<77:78>[77]X.this.d.applyDynamicNamed(<77:78>"apply")([79:90][79][79]scala.Tuple2.apply[[79]String, [79]Int]([79:85]"value1", [88:90]10)) } } diff --git a/test/files/run/dynamic-applyDynamicNamed.scala b/test/files/run/dynamic-applyDynamicNamed.scala index 500f44dc06d5..f48003acc735 100644 --- a/test/files/run/dynamic-applyDynamicNamed.scala +++ b/test/files/run/dynamic-applyDynamicNamed.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" override def code = """ object X { @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-selectDynamic.check b/test/files/run/dynamic-selectDynamic.check index 9635ca4e6f74..82cd656e6602 100644 --- a/test/files/run/dynamic-selectDynamic.check +++ b/test/files/run/dynamic-selectDynamic.check @@ -1,13 +1,13 @@ [[syntax trees at end of typer]] // newSource1.scala [0:50]package [0:0] { - [0:50]object X extends [9:50][50]scala.AnyRef { - [50]def (): [9]X.type = [50]{ - [50][50][50]X.super.(); + [0:50]object X extends [9:50][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); [21] def d: [21]D = [21][21]X.this.d; - [37:38][37:38][37]X.this.d.selectDynamic(<39:44>"field") + [37:44][37:38][37]X.this.d.selectDynamic(<39:44>"field") } } diff --git a/test/files/run/dynamic-selectDynamic.scala b/test/files/run/dynamic-selectDynamic.scala index 937529a505f4..061dd5055810 100644 --- a/test/files/run/dynamic-selectDynamic.scala +++ b/test/files/run/dynamic-selectDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" override def code = """ object X { @@ -12,11 +12,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-updateDynamic.check b/test/files/run/dynamic-updateDynamic.check index 154fea34cb69..5180f3e7bfd8 100644 --- a/test/files/run/dynamic-updateDynamic.check +++ b/test/files/run/dynamic-updateDynamic.check @@ -1,14 +1,14 @@ [[syntax trees at end of typer]] // newSource1.scala [0:69]package [0:0] { - [0:69]object X extends [9:69][69]scala.AnyRef { - [69]def (): [9]X.type = [69]{ - [69][69][69]X.super.(); + [0:69]object X extends [9:69][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); [21] def d: [21]D = [21][21]X.this.d; - [37:49][37:38][37:38][37]X.this.d.updateDynamic(<39:44>"field")([47:49]10); - [56:57][56:57][56]X.this.d.selectDynamic(<58:63>"field") + [37:49][37:44][37:38][37]X.this.d.updateDynamic(<39:44>"field")([47:49]10); + [56:63][56:57][56]X.this.d.selectDynamic(<58:63>"field") } } diff --git a/test/files/run/dynamic-updateDynamic.scala b/test/files/run/dynamic-updateDynamic.scala index 32fc530e7b0d..c44d7704e89c 100644 --- a/test/files/run/dynamic-updateDynamic.scala +++ b/test/files/run/dynamic-updateDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" override def code = """ object X { @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/elidable-opt.scala b/test/files/run/elidable-opt.scala index b5d7626c6e8b..efed09bd5af6 100644 --- a/test/files/run/elidable-opt.scala +++ b/test/files/run/elidable-opt.scala @@ -1,32 +1,33 @@ -// scalac: -Xelide-below 900 +//> using options -Xelide-below 900 -deprecation -Xmaxwarns 0 +// import annotation._ import elidable._ trait T { - @elidable(FINEST) def f1() - @elidable(SEVERE) def f2() + @elidable(FINEST) def f1(): Unit = ??? + @elidable(SEVERE) def f2(): Unit = ??? @elidable(FINEST) def f3() = assert(false, "Should have been elided.") - def f4() + def f4(): Unit } class C extends T { - def f1() = println("Good for me, I was not elided. C.f1") - def f2() = println("Good for me, I was not elided. C.f2") - @elidable(FINEST) def f4() = assert(false, "Should have been elided.") + override def f1() = println("Good for me, I was not elided. C.f1") + override def f2() = println("Good for me, I was not elided. C.f2") + @elidable(FINEST) def f4(): Unit = assert(false, "Should have been elided.") } object O { @elidable(FINEST) def f1() = assert(false, "Should have been elided.") @elidable(INFO) def f2() = assert(false, "Should have been elided.") @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3") - @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).") + @elidable(INFO) def f4: Unit = assert(false, "Should have been elided (no parens).") } object Test { @elidable(FINEST) def f1() = assert(false, "Should have been elided.") @elidable(INFO) def f2() = assert(false, "Should have been elided.") @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3") - @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).") + @elidable(INFO) def f4: Unit = assert(false, "Should have been elided (no parens).") @elidable(FINEST) def f5() = {} @elidable(FINEST) def f6() = true @@ -34,7 +35,7 @@ object Test { @elidable(FINEST) def f8() = 1:Short @elidable(FINEST) def f9() = 1:Char @elidable(FINEST) def fa() = 1 - @elidable(FINEST) def fb() = 1l + @elidable(FINEST) def fb() = 1L @elidable(FINEST) def fc() = 1.0f @elidable(FINEST) def fd() = 1.0 @elidable(FINEST) def fe() = "s" diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala index f48379900238..1daa6a8acff3 100644 --- a/test/files/run/elidable.scala +++ b/test/files/run/elidable.scala @@ -1,4 +1,5 @@ -// scalac: -Xelide-below WARNING +//> using options -Xelide-below WARNING -deprecation +// import annotation._ import elidable._ @@ -10,30 +11,30 @@ object Fail { import Fail.fail trait T { - @elidable(FINEST) def f1() - @elidable(SEVERE) def f2() + @elidable(FINEST) def f1(): Unit = ??? + @elidable(SEVERE) def f2(): Unit = ??? @elidable(FINEST) def f3() = fail("Should have been elided.") - def f4() + def f4(): Unit } class C extends T { - def f1() = println("Good for me, I was not elided. C.f1") - def f2() = println("Good for me, I was not elided. C.f2") - @elidable(FINEST) def f4() = fail("Should have been elided.") + override def f1() = println("Good for me, I was not elided. C.f1") + override def f2() = println("Good for me, I was not elided. C.f2") + @elidable(FINEST) def f4(): Unit = fail("Should have been elided.") } object O { @elidable(FINEST) def f1() = fail("Should have been elided.") @elidable(INFO) def f2() = fail("Should have been elided.") @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3") - @elidable(INFO) def f4 = fail("Should have been elided (no parens).") + @elidable(INFO) def f4: Unit = fail("Should have been elided (no parens).") } object Test { @elidable(FINEST) def f1() = fail("Should have been elided.") @elidable(INFO) def f2() = fail("Should have been elided.") @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3") - @elidable(INFO) def f4 = fail("Should have been elided (no parens).") + @elidable(INFO) def f4: Unit = fail("Should have been elided (no parens).") @elidable(FINEST) def f5() = {} @elidable(FINEST) def f6() = true @@ -41,7 +42,7 @@ object Test { @elidable(FINEST) def f8() = 1:Short @elidable(FINEST) def f9() = 1:Char @elidable(FINEST) def fa() = 1 - @elidable(FINEST) def fb() = 1l + @elidable(FINEST) def fb() = 1L @elidable(FINEST) def fc() = 1.0f @elidable(FINEST) def fd() = 1.0 @elidable(FINEST) def fe() = { fail("Should have been elided to empty string.") ; "hello, world" } diff --git a/test/files/run/enrich-gentraversable.check b/test/files/run/enrich-gentraversable.check index 94c66e36921d..ab6c12442e31 100644 --- a/test/files/run/enrich-gentraversable.check +++ b/test/files/run/enrich-gentraversable.check @@ -1,8 +1,8 @@ List(2, 4) -Array(2, 4) -HW -Vector(72, 108, 108, 32, 114, 108, 100) List(2, 4) Array(2, 4) HW -Vector(72, 108, 108, 32, 114, 108, 100) +ArraySeq(72, 108, 108, 32, 114, 108, 100) +Map(bar -> 2) +TreeMap(bar -> 2) +Map(bar -> 2) diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala index 36412e650ebf..2b7ec4dc476e 100644 --- a/test/files/run/enrich-gentraversable.scala +++ b/test/files/run/enrich-gentraversable.scala @@ -1,24 +1,30 @@ +import scala.tools.partest.Util.ArrayDeep import scala.language.implicitConversions -import scala.language.postfixOps object Test extends App { - import scala.collection.{GenTraversableOnce,GenTraversableLike} - import scala.collection.generic._ - - def typed[T](t : => T) {} - def testTraversableLike = { - class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) /* extends AnyVal */ { - final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = - r.flatMap(f(_).toSeq) + import scala.collection.generic.IsIterable + import scala.collection.{BuildFrom, Iterable, IterableOps, View} + import scala.collection.immutable.TreeMap + + def typed[T](t : => T): Unit = {} + def testIterableOps() = { + class FilterMapImpl[A, Repr](r: Repr, it: IterableOps[A, Iterable, _]) { + final def filterMap[B, That](f: A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = + bf.fromSpecific(r)(it.flatMap(f(_))) } - implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] = - new FilterMapImpl[fr.A, Repr](fr.conversion(r)) + implicit def filterMap[Repr](r: Repr)(implicit fr: IsIterable[Repr]): FilterMapImpl[fr.A, Repr] = + new FilterMapImpl[fr.A, Repr](r, fr(r)) val l = List(1, 2, 3, 4, 5) val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None) typed[List[Int]](fml) println(fml) + val lv = l.view + val fmlv = lv.filterMap(i => if (i % 2 == 0) Some(i) else None) + typed[View[Int]](fmlv) + println(fmlv.toList) + val a = Array(1, 2, 3, 4, 5) val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None) typed[Array[Int]](fma) @@ -29,42 +35,26 @@ object Test extends App { typed[String](fms1) println(fms1) - val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None) + val fms2 = s.filterMap(c => if(c % 2 == 0) Some(c.toInt) else None) typed[IndexedSeq[Int]](fms2) println(fms2) - } - def testTraversableOnce = { - class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) /* extends AnyVal */ { - final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = { - val b = cbf() - for(e <- r.seq) f(e) foreach (b +=) - b.result - } - } - implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] = - new FilterMapImpl[fr.A, Repr](fr.conversion(r)) + val m = Map(1 -> "foo", 2 -> "bar") + val fmm = m.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[Map[String, Int]](fmm) + println(fmm) - val l = List(1, 2, 3, 4, 5) - val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None) - typed[List[Int]](fml) - println(fml) + val tm = TreeMap(1 -> "foo", 2 -> "bar") + val tmm = tm.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[TreeMap[String, Int]](tmm) + println(tmm) - val a = Array(1, 2, 3, 4, 5) - val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None) - typed[Array[Int]](fma) - println(fma.deep) + val mv = m.view + val fmmv = mv.filterMap { case (k, v) => if (k % 2 == 0) Some(v -> k) else None } + typed[View[(String, Int)]](fmmv) + println(fmmv.toMap) - val s = "Hello World" - val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None) - typed[String](fms1) - println(fms1) - - val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None) - typed[IndexedSeq[Int]](fms2) - println(fms2) } - testTraversableLike - testTraversableOnce + testIterableOps() } diff --git a/test/files/run/enums.scala b/test/files/run/enums.scala index 3aad7ec3209c..257f9898ec2b 100644 --- a/test/files/run/enums.scala +++ b/test/files/run/enums.scala @@ -85,7 +85,7 @@ object Test5 { val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value } - def run { + def run(): Unit = { val s1 = D1.ValueSet(D1.North, D1.East) val s2 = D2.North + D2.East println(s1) @@ -96,7 +96,8 @@ object Test5 { println(s2.toBitMask.map(_.toBinaryString).toList) println(D1.ValueSet.fromBitMask(s1.toBitMask)) println(D2.ValueSet.fromBitMask(s2.toBitMask)) - println(WeekDays.values.range(WeekDays.Tue, WeekDays.Sat)) + val r: WeekDays.ValueSet = WeekDays.values.range(WeekDays.Tue, WeekDays.Sat) + println(r) } } @@ -117,8 +118,9 @@ object SerializationTest { prime } - def run { - serialize(new B()) + def run(): Unit = { + /* This is no longer possible with the proxy-based serialization for collections: */ + //serialize(new B()) serialize(new A()) } } @@ -143,7 +145,7 @@ object Test { exception.printStackTrace(); } } - Console.println; + Console.println() } def main(args: Array[String]): Unit = { @@ -151,10 +153,10 @@ object Test { check_success("Test2", Test2.run, 5); check_success("Test3", Test3.run, 1); check_success("Test4", Test4.run, 0); - Console.println; - Test5.run; - Console.println; - SerializationTest.run; + Console.println() + Test5.run() + Console.println() + SerializationTest.run() } } diff --git a/test/files/run/equality.scala b/test/files/run/equality.scala index 2af73691d824..3c8ce2943baf 100644 --- a/test/files/run/equality.scala +++ b/test/files/run/equality.scala @@ -3,31 +3,31 @@ object Test { def hash(x: Any): Int = x.## // forces upcast to Any - def makeFromInt(x: Int) = List( + def makeFromInt(x: Int) = List[Any]( x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x) ) ::: ( if (x < 0) Nil else List(x.toChar) ) - def makeFromDouble(x: Double) = List( + def makeFromDouble(x: Double) = List[Any]( x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x.toInt), BigDecimal(x) ) def main(args: Array[String]): Unit = { var xs = makeFromInt(5) for (x <- xs ; y <- xs) { - assert(x == y, x + " == " + y) + assert(x == y, s"$x == $y") assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y)) } xs = makeFromInt(-5) for (x <- xs ; y <- xs) { - assert(x == y, x + " == " + y) + assert(x == y, s"$x == $y") assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y)) } xs = makeFromDouble(500.0) for (x <- xs ; y <- xs) { - assert(x == y, x + " == " + y) + assert(x == y, s"$x == $y") assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y)) } diff --git a/test/files/run/eta-dependent.check b/test/files/run/eta-dependent.check new file mode 100644 index 000000000000..4d21f8dd08c4 --- /dev/null +++ b/test/files/run/eta-dependent.check @@ -0,0 +1,48 @@ + +scala> object defs { + val a = "obj" + def aa: a.type = a + def s = this + def f(x: Int): a.type = a + def g(x: Int)(y: x.type) = 0 + def h(x: a.type)(y: a.type) = 0 +} +object defs + +scala> import defs._ +import defs._ + +scala> val f1 = f _ +val f1: Int => defs.a.type = + +scala> val f2: Int => a.type = f +val f2: Int => defs.a.type = + +scala> val f3: Int => Object = f +val f3: Int => Object = + +scala> val g1 = g(10) _ +val g1: Int(10) => Int = + +scala> val g2: 10 => Int = g1 +val g2: 10 => Int = + +scala> val g3: 11 => Int = g(11) +val g3: 11 => Int = + +scala> val g4: Int => Int = g(11) // mismatch + ^ + error: type mismatch; + found : Int(11) => Int + required: Int => Int + +scala> val h1 = s.h(aa) _ +val h1: defs.a.type => Int = + +scala> val h2: a.type => Int = h1 +val h2: defs.a.type => Int = + +scala> val h3: a.type => Int = s.h(aa) +val h3: defs.a.type => Int = + +scala> :quit diff --git a/test/files/run/eta-dependent.scala b/test/files/run/eta-dependent.scala new file mode 100644 index 000000000000..1ceb5957cfd3 --- /dev/null +++ b/test/files/run/eta-dependent.scala @@ -0,0 +1,72 @@ +object NoMoreNeg { + def foo(x: AnyRef): x.type = x + val x: AnyRef => Any = foo +} + +object t12641 { + def f(sb: StringBuilder) = Option("").foreach(sb.append) +} + +object t12641a { + trait A { + def foo(s: String): this.type + def foo(s: Int): this.type + } + trait T { + val a1: A + val o: Option[String] + + def t(a2: A): Unit = { + o.foreach(a1.foo) + o.foreach(a2.foo) + + val f2: String => a2.type = a2.foo + val f3: String => A = a2.foo + } + } +} + +object t12641b { + trait A { + def foo(s: String): this.type + } + trait T { + val a1: A + val o: Option[String] + + def t(a2: A): Unit = { + o.foreach(a1.foo) + o.foreach(a2.foo) + + val f1 = a2.foo _ + val f2: String => a2.type = a2.foo + val f3: String => A = a2.foo + } + } +} + +import scala.tools.partest._ + +object Test extends ReplTest with Lambdaless { + def code = """ +object defs { + val a = "obj" + def aa: a.type = a + def s = this + def f(x: Int): a.type = a + def g(x: Int)(y: x.type) = 0 + def h(x: a.type)(y: a.type) = 0 +} +import defs._ +val f1 = f _ +val f2: Int => a.type = f +val f3: Int => Object = f +val g1 = g(10) _ +val g2: 10 => Int = g1 +val g3: 11 => Int = g(11) +val g4: Int => Int = g(11) // mismatch +val h1 = s.h(aa) _ +val h2: a.type => Int = h1 +val h3: a.type => Int = s.h(aa) +""".trim +} diff --git a/test/files/run/eta-expand-star.scala b/test/files/run/eta-expand-star.scala index 7717c4bc91ec..160b115e0d4f 100644 --- a/test/files/run/eta-expand-star.scala +++ b/test/files/run/eta-expand-star.scala @@ -3,6 +3,6 @@ object Test { def g[T] = f[T] _ def main(args: Array[String]): Unit = { - println(g("hello" +: args)) + println(g("hello" +: args.toIndexedSeq)) } } diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala index 8d755c380969..e9f401bf3f47 100644 --- a/test/files/run/exceptions-2.scala +++ b/test/files/run/exceptions-2.scala @@ -20,14 +20,14 @@ object NoExcep { case Leaf(_) => b; } - def method2 = try { + def method2() = try { Console.println("Hello, world"); } catch { case _: Error => Console.println("File error"); case t: Throwable => Console.println("Unknown error"); } - def method3 = try { + def method3() = try { try { Console.println("method3"); } catch { @@ -39,7 +39,7 @@ object NoExcep { case t: Exception => Console.println("Unknown error"); } - def method4 = try { + def method4() = try { Console.println(".."); } catch { case _: Throwable => sys.error(".."); @@ -47,7 +47,7 @@ object NoExcep { } object Test { - def nested1: Unit = try { + def nested1(): Unit = try { try { sys.error("nnnnoooo"); } finally { @@ -57,7 +57,7 @@ object Test { Console.println("Outermost finally"); } - def nested2 = try { + def nested2() = try { try { sys.error("nnnnoooo"); } finally { @@ -68,7 +68,7 @@ object Test { Console.println("Outermost finally"); } - def mixed = + def mixed() = try { if (10 > 0) throw Leaf(10); @@ -80,7 +80,7 @@ object Test { Console.println("Finally!"); } - def method2: Unit = { + def method2(): Unit = { try { if (10 > 0) throw Leaf(10); @@ -99,7 +99,7 @@ object Test { } } - def method3: Unit = try { + def method3(): Unit = try { try { val a: Leaf = null; println(a.x); @@ -111,7 +111,7 @@ object Test { Console.println("Caught an NPE"); } - def withValue1: Unit = { + def withValue1(): Unit = { val x = try { 10 } finally { @@ -120,7 +120,7 @@ object Test { Console.println(x); } - def withValue2: Unit = { + def withValue2(): Unit = { val x = try { null } finally { @@ -129,7 +129,7 @@ object Test { Console.println(x); } - def tryFinallyTry: Unit = { + def tryFinallyTry(): Unit = { try { () } finally { @@ -141,14 +141,14 @@ object Test { } } - def valInFinally: Unit = + def valInFinally(): Unit = try { } finally { val fin = "Abc"; Console.println(fin); } - def tryAndValInFinally: Unit = + def tryAndValInFinally(): Unit = try { } finally { val fin = "Abc"; @@ -157,7 +157,7 @@ object Test { } catch { case _: Throwable => () } } - def returnInBody: Unit = try { + def returnInBody(): Unit = try { try { Console.println("Normal execution..."); return @@ -169,7 +169,7 @@ object Test { Console.println("Outer finally"); } - def returnInBodySynch: Unit = try { + def returnInBodySynch(): Unit = try { synchronized { try { Console.println("Synchronized normal execution..."); @@ -184,7 +184,7 @@ object Test { } - def returnInBodyAndInFinally: Unit = try { + def returnInBodyAndInFinally(): Unit = try { try { Console.println("Normal execution..."); return @@ -198,7 +198,7 @@ object Test { return } - def returnInBodyAndInFinally2: Unit = try { + def returnInBodyAndInFinally2(): Unit = try { try { Console.println("Normal execution..."); return @@ -217,7 +217,7 @@ object Test { } /** bug #1020, no crash at compile time */ - def tryCatchInFinally: Unit = { + def tryCatchInFinally(): Unit = { try { Console.println("Try") } catch { @@ -233,7 +233,7 @@ object Test { } } - def tryThrowFinally: Unit = { + def tryThrowFinally(): Unit = { try { print("A") throw new Exception @@ -279,7 +279,7 @@ object Test { } /* Tests that class Issue passes verification. */ - def whileInFinally = { + def whileInFinally() = { new Issue } @@ -287,63 +287,63 @@ object Test { def main(args: Array[String]): Unit = { Console.println("nested1: "); - execute(nested1); + execute(nested1()); Console.println("nested2: "); - execute(nested2); + execute(nested2()); Console.println("mixed: "); - execute(mixed); + execute(mixed()); Console.println("withValue1:"); - execute(withValue1); + execute(withValue1()); Console.println("withValue2:"); - execute(withValue2); + execute(withValue2()); Console.println("method2:"); - execute(method2); + execute(method2()); Console.println("method3:"); - execute(method3); + execute(method3()); Console.println("tryFinallyTry:"); - execute(tryFinallyTry); + execute(tryFinallyTry()); Console.println("valInFinally:"); - execute(valInFinally); + execute(valInFinally()); Console.println("tryAndValInFinally"); - execute(tryAndValInFinally); + execute(tryAndValInFinally()); Console.println("================="); Console.println("NoExcep.method2:"); - execute(NoExcep.method2); + execute(NoExcep.method2()); Console.println("NoExcep.method3:"); - execute(NoExcep.method3); + execute(NoExcep.method3()); Console.println("NoExcep.method4:"); - execute(NoExcep.method4); + execute(NoExcep.method4()); Console.println("Return inside body:"); - execute(returnInBody); + execute(returnInBody()); Console.println("Return inside synchronized body:"); - execute(returnInBodySynch); + execute(returnInBodySynch()); Console.println("Return inside body and return in finally:"); - execute(returnInBodyAndInFinally); + execute(returnInBodyAndInFinally()); Console.println("Return inside body and return in finally inside finally:"); - execute(returnInBodyAndInFinally2); + execute(returnInBodyAndInFinally2()); Console.println("Throw in catch and finally:"); - execute(tryThrowFinally); + execute(tryThrowFinally()); Console.println("Return with finally clause that cleans the stack") - returnWithFinallyClean + returnWithFinallyClean: Unit - whileInFinally + whileInFinally() } } diff --git a/test/files/run/exceptions-nest.scala b/test/files/run/exceptions-nest.scala index 432d600d1320..03b181a77f14 100644 --- a/test/files/run/exceptions-nest.scala +++ b/test/files/run/exceptions-nest.scala @@ -112,7 +112,7 @@ object Test extends App { } def test8 = { - var x = 1 + val x = 1 try { throw new NullPointerException } catch { @@ -133,7 +133,7 @@ object Test extends App { catch { case e if (x10 == 1) => 1 } } - def test11 { + def test11: Unit = { try { () } catch { case e: Throwable => () } } diff --git a/test/files/run/exceptions.scala b/test/files/run/exceptions.scala index f0fe76946b5c..63ef2ed61b57 100644 --- a/test/files/run/exceptions.scala +++ b/test/files/run/exceptions.scala @@ -22,8 +22,8 @@ object exceptions { if (value == "\u0000") value = "\\u0000"; Console.print(": " + what + " = " + value); if (!success) Console.print(" != " + expected); - Console.println; - Console.flush; + Console.println() + Console.flush() } def test: Unit = { diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check index 984baeaaf8e3..39efe241688b 100644 --- a/test/files/run/existential-rangepos.check +++ b/test/files/run/existential-rangepos.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:76]package [0:0] { - [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][76]scala.AnyRef { - [76]def (): [20]A[T] = [76]{ - [76][76][76]A.super.(); + [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][20]scala.AnyRef { + [20]def (): [20]A[T] = [20]{ + [20][20][20]A.super.(); [20]() }; [24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null; diff --git a/test/files/run/existential-rangepos.scala b/test/files/run/existential-rangepos.scala index 7d2b0810d342..e9493b8d175f 100644 --- a/test/files/run/existential-rangepos.scala +++ b/test/files/run/existential-rangepos.scala @@ -1,7 +1,7 @@ import scala.tools.partest._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Yrangepos -Xprint:patmat -Xprint-pos -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:patmat -Vprint-pos" override def code = """ abstract class A[T] { @@ -9,5 +9,5 @@ abstract class A[T] { val bar: Set[_ <: T] }""".trim - override def show(): Unit = Console.withErr(System.out)(compile()) + override def show(): Unit = compile() } diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala index 2984d81e6009..aec118ef68c5 100644 --- a/test/files/run/existentials-in-compiler.scala +++ b/test/files/run/existentials-in-compiler.scala @@ -1,9 +1,7 @@ /* * filter: inliner warnings; re-run with */ -import scala.tools.nsc._ import scala.tools.partest.CompilerTest -import scala.collection.{ mutable, immutable, generic } object Test extends CompilerTest { import global._ diff --git a/test/files/run/existentials.scala b/test/files/run/existentials.scala index bdd6fb93ee30..5cc4eee64a67 100644 --- a/test/files/run/existentials.scala +++ b/test/files/run/existentials.scala @@ -4,7 +4,7 @@ import scala.language.reflectiveCalls class Foo { class Line { case class Cell[T](var x: T) - def f[T](x: Any): Cell[t1] forSome { type t1 } = x match { case y: Cell[t] => y } + def f[T](x: Any): Cell[t1] forSome { type t1 } = (x: @unchecked) match { case y: Cell[t] => y } var x: Cell[T] forSome { type T } = new Cell(1) println({ x = new Cell("abc"); x }) @@ -14,7 +14,7 @@ class Foo { class FooW { class Line { case class Cell[T](var x: T) - def f[T](x: Any): Cell[ _ ] = x match { case y: Cell[t] => y } + def f[T](x: Any): Cell[ _ ] = (x: @unchecked) match { case y: Cell[t] => y } var x: Cell[_] = new Cell(1) println({ x = new Cell("abc"); x }) @@ -41,7 +41,7 @@ object LUB { object Bug1189 { case class Cell[T](x: T) type U = Cell[T1] forSome { type T1 } - def f(x: Any): U = x match { case y: Cell[_] => y } + def f(x: Any): U = (x: @unchecked) match { case y: Cell[_] => y } var x: U = Cell(1) println(x) diff --git a/test/files/run/existentials3-new.scala b/test/files/run/existentials3-new.scala index 5dfd7fb3945a..5420ae110543 100644 --- a/test/files/run/existentials3-new.scala +++ b/test/files/run/existentials3-new.scala @@ -77,6 +77,6 @@ object Misc { def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) } def f2 = f1.head.bippy } - def g1 = o1.f1 _ - def g2 = o1.f2 _ + def g1 = () => o1.f1 + def g2 = () => o1.f2 } diff --git a/test/files/run/existentials3-old.check b/test/files/run/existentials3-old.check index 36a458daccf4..88cbb8c346af 100644 --- a/test/files/run/existentials3-old.check +++ b/test/files/run/existentials3-old.check @@ -1,5 +1,5 @@ -_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object -_ <: Object with Test$ToS with scala.Product with scala.Serializable +_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with java.io.Serializable] with java.io.Serializable with java.lang.Object +_ <: Object with Test$ToS with scala.Product with java.io.Serializable Object with Test$ToS Object with Test$ToS Object with Test$ToS @@ -7,10 +7,10 @@ scala.Function0[Object with Test$ToS] scala.Function0[Object with Test$ToS] _ <: Object with _ <: Object with Object with Test$ToS _ <: Object with _ <: Object with _ <: Object with Test$ToS -scala.collection.immutable.List[Object with scala.collection.Seq[Int]] -scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]] -_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object -_ <: Object with Test$ToS with scala.Product with scala.Serializable +scala.collection.immutable.List[Object with scala.collection.immutable.Seq[Int]] +scala.collection.immutable.List[Object with scala.collection.immutable.Seq[_ <: Int]] +_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with java.io.Serializable] with java.io.Serializable with java.lang.Object +_ <: Object with Test$ToS with scala.Product with java.io.Serializable Object with Test$ToS Object with Test$ToS Object with Test$ToS @@ -18,5 +18,5 @@ scala.Function0[Object with Test$ToS] scala.Function0[Object with Test$ToS] _ <: Object with _ <: Object with Object with Test$ToS _ <: Object with _ <: Object with _ <: Object with Test$ToS -scala.collection.immutable.List[Object with scala.collection.Seq[Int]] -scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]] +scala.collection.immutable.List[Object with scala.collection.immutable.Seq[Int]] +scala.collection.immutable.List[Object with scala.collection.immutable.Seq[_ <: Int]] diff --git a/test/files/run/existentials3-old.scala b/test/files/run/existentials3-old.scala index c021c0e71e1f..bee1968755d4 100644 --- a/test/files/run/existentials3-old.scala +++ b/test/files/run/existentials3-old.scala @@ -70,6 +70,6 @@ object Misc { def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) } def f2 = f1.head.bippy } - def g1 = o1.f1 _ - def g2 = o1.f2 _ + def g1 = () => o1.f1 + def g2 = () => o1.f2 } diff --git a/test/files/run/extend-global.scala b/test/files/run/extend-global.scala new file mode 100644 index 000000000000..fb04dc40b54c --- /dev/null +++ b/test/files/run/extend-global.scala @@ -0,0 +1,8 @@ +import scala.tools.nsc._ + +// extending Global is a pretty thorough test case of bridge generation +// make sure we don't cause VerifyErrors when messing with symbol pairs +object Test extends Global(new Settings) { + def main(args: Array[String]): Unit = { + } +} diff --git a/test/files/run/f-interpolator-unit.scala b/test/files/run/f-interpolator-unit.scala new file mode 100644 index 000000000000..68be88da51a2 --- /dev/null +++ b/test/files/run/f-interpolator-unit.scala @@ -0,0 +1,273 @@ +import java.text.DecimalFormat + +import scala.language.implicitConversions + +// extracted from the junit test, which requires bootstrapping + +object StringContextTestUtils { + private val decimalSeparator: Char = new DecimalFormat().getDecimalFormatSymbols().getDecimalSeparator() + private val numberPattern = """(\d+)\.(\d+.*)""".r + + implicit class StringContextOps(val sc: StringContext) extends AnyVal { + // Use this String interpolator to avoid problems with a locale-dependent decimal mark. + def locally(numbers: String*): String = { + val numbersWithCorrectLocale = numbers.map(applyProperLocale) + sc.s(numbersWithCorrectLocale: _*) + } + + // Handles cases like locally"3.14" - it's prettier than locally"${"3.14"}". + def locally(): String = sc.parts.map(applyProperLocale).mkString + + private def applyProperLocale(number: String): String = { + val numberPattern(intPart, fractionalPartAndSuffix) = number + s"$intPart$decimalSeparator$fractionalPartAndSuffix" + } + } +} + +object Test extends App { + + import StringContext._ + import StringContextTestUtils.StringContextOps + + final val tester = "hello" + final val number = "42" // strings only, alas + + def assertEquals(s0: String, s1: String, i: Int = -1) = assert(s0 == s1, s"$s0 == $s1${if (i >= 0) " at " + i.toString else ""}") + + def noEscape() = { + val s = "string" + val res = processEscapes(s) + assertEquals(s, res) + } + def tabbed() = { + val s = """a\tb""" + val res = processEscapes(s) + assertEquals("a\tb", res) + } + def quoted() = { + val s = """hello, \"world\"""" + val res = processEscapes(s) + assertEquals("""hello, "world"""", res) + } + def t5856(): Unit = { + class X { + override def toString = "Test" + + def thistle(): Unit = { + assertEquals("Test", s"$this") + assertEquals("TestTest", s"$this$this") + assertEquals("Test$", s"$this$$") + assertEquals("Test.##", s"$this.##") + assertEquals("Test.toString", s"$this.toString") + assertEquals("Test=THIS", s"$this=THIS") + } + } + new X().thistle() // this'll be good + } + + def t6631_baseline() = assertEquals("\f\r\n\t", s"""\f\r\n\t""") + + // verifying that the standard interpolators can be supplanted + def antiHijack_?() = { + object AllYourStringsAreBelongToMe { case class StringContext(args: Any*) { def s(args: Any*) = "!!!!" } } + import AllYourStringsAreBelongToMe._ + //assertEquals("????", s"????") + assertEquals("!!!!", s"????") // OK to hijack core interpolator ids + } + + def fIf() = { + val res = f"${if (true) 2.5 else 2.5}%.2f" + val expected = locally"2.50" + assertEquals(expected, res) + } + + def fIfNot() = { + val res = f"${if (false) 2.5 else 3.5}%.2f" + val expected = locally"3.50" + assertEquals(expected, res) + } + + def fHeteroArgs() = { + val res = f"${3.14}%.2f rounds to ${3}%d" + val expected = locally"${"3.14"} rounds to 3" + assertEquals(expected, res) + } + + @annotation.nowarn + def `f interpolator baseline`(): Unit = { + + // ignore spurious warning scala/bug#11946 + type ignore = annotation.unused + @ignore def ignore: ignore = ??? // ignore that ignore looks unused + + @ignore implicit def stringToBoolean(s: String): Boolean = java.lang.Boolean.parseBoolean(s) + @ignore implicit def stringToChar(s: String): Char = s(0) + @ignore implicit def str2fmt(s: String): java.util.Formattable = new java.util.Formattable { + def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("%s", s) + } + + val b_true = true + val b_false = false + + val i = 42 + + val f_zero = 0.0 + val f_zero_- = -0.0 + + val s = "Scala" + + val fff = new java.util.Formattable { + def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4") + } + import java.util.{Calendar, Locale} + val c = Calendar.getInstance(Locale.US) + c.set(2012, Calendar.MAY, 26) + @ignore implicit def strToDate(x: String): Calendar = c + + val ss = List[(String, String)] ( + // 'b' / 'B' (category: general) + // ----------------------------- + f"${b_false}%b" -> "false", + f"${b_true}%b" -> "true", + + f"${null}%b" -> "false", + f"${false}%b" -> "false", + f"${true}%b" -> "true", + f"${true && false}%b" -> "false", + f"${java.lang.Boolean.valueOf(false)}%b" -> "false", + f"${java.lang.Boolean.valueOf(true)}%b" -> "true", + + f"${null}%B" -> "FALSE", + f"${false}%B" -> "FALSE", + f"${true}%B" -> "TRUE", + f"${java.lang.Boolean.valueOf(false)}%B" -> "FALSE", + f"${java.lang.Boolean.valueOf(true)}%B" -> "TRUE", + + f"${"true"}%b" -> "true", + f"${"false"}%b"-> "false", + + // 'h' | 'H' (category: general) + // ----------------------------- + f"${null}%h" -> "null", + f"${f_zero}%h" -> "0", + f"${f_zero_-}%h" -> "80000000", + f"${s}%h" -> "4c01926", + + f"${null}%H" -> "NULL", + f"${s}%H" -> "4C01926", + + // 's' | 'S' (category: general) + // ----------------------------- + f"${null}%s" -> "null", + f"${null}%S" -> "NULL", + f"${s}%s" -> "Scala", + f"${s}%S" -> "SCALA", + f"${5}" -> "5", + f"${i}" -> "42", + f"${Symbol("foo")}" -> "Symbol(foo)", + + f"${Thread.State.NEW}" -> "NEW", + + // 'c' | 'C' (category: character) + // ------------------------------- + f"${120:Char}%c" -> "x", + f"${120:Byte}%c" -> "x", + f"${120:Short}%c" -> "x", + f"${120:Int}%c" -> "x", + f"${java.lang.Character.valueOf('x')}%c" -> "x", + f"${java.lang.Byte.valueOf(120:Byte)}%c" -> "x", + f"${java.lang.Short.valueOf(120:Short)}%c" -> "x", + f"${java.lang.Integer.valueOf(120)}%c" -> "x", + + f"${'x' : java.lang.Character}%c" -> "x", + f"${(120:Byte) : java.lang.Byte}%c" -> "x", + f"${(120:Short) : java.lang.Short}%c" -> "x", + f"${120 : java.lang.Integer}%c" -> "x", + + f"${"Scala"}%c" -> "S", + + // 'd' | 'o' | 'x' | 'X' (category: integral) + // ------------------------------------------ + f"${120:Byte}%d" -> "120", + f"${120:Short}%d" -> "120", + f"${120:Int}%d" -> "120", + f"${120:Long}%d" -> "120", + f"${60 * 2}%d" -> "120", + f"${java.lang.Byte.valueOf(120:Byte)}%d" -> "120", + f"${java.lang.Short.valueOf(120:Short)}%d" -> "120", + f"${java.lang.Integer.valueOf(120)}%d" -> "120", + f"${java.lang.Long.valueOf(120)}%d" -> "120", + f"${120 : java.lang.Integer}%d" -> "120", + f"${120 : java.lang.Long}%d" -> "120", + f"${BigInt(120)}%d" -> "120", + + f"${new java.math.BigInteger("120")}%d" -> "120", + + f"${4}%#10X" -> " 0X4", + + f"She is ${fff}%#s feet tall." -> "She is 4 feet tall.", + + f"Just want to say ${"hello, world"}%#s..." -> "Just want to say hello, world...", + + { @ignore implicit val strToShort = (s: String) => java.lang.Short.parseShort(s) ; f"${"120"}%d" } -> "120", + { @ignore implicit val strToInt = (s: String) => 42 ; f"${"120"}%d" } -> "42", + + // 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point) + // ------------------------------------------------------------------ + f"${3.4f}%e" -> locally"3.400000e+00", + f"${3.4}%e" -> locally"3.400000e+00", + f"${3.4f : java.lang.Float}%e" -> locally"3.400000e+00", + f"${3.4 : java.lang.Double}%e" -> locally"3.400000e+00", + + f"${BigDecimal(3.4)}%e" -> locally"3.400000e+00", + + f"${new java.math.BigDecimal(3.4)}%e" -> locally"3.400000e+00", + + f"${3}%e" -> locally"3.000000e+00", + f"${3L}%e" -> locally"3.000000e+00", + + // 't' | 'T' (category: date/time) + // ------------------------------- + f"${c}%TD" -> "05/26/12", + f"${c.getTime}%TD" -> "05/26/12", + f"${c.getTime.getTime}%TD" -> "05/26/12", + f"""${"1234"}%TD""" -> "05/26/12", + + // literals and arg indexes + f"%%" -> "%", + f" mind%n------%nmatter" -> + """| mind + |------ + |matter""".stripMargin.linesIterator.mkString(System.lineSeparator), + f"${i}%d % "42 42 9", + f"${7}%d % "7 7 9", + f"${7}%d %2$$d ${9}%d" -> "7 9 9", + + f"${null}%d % "null FALSE", + + f"${5: Any}" -> "5", + f"${5}%s% "55", + f"${3.14}%s,% locally"3.14,${"3.140000"}", + + f"${"hello"}%-10s" -> "hello ", + (f"$tester%-10s$number%3s": "hello 42") -> "hello 42", + f"z" -> "z" + ) + + for (((f, s), i) <- ss.zipWithIndex) assertEquals(s, f, i) + } + + noEscape() + tabbed() + quoted() + t5856() + t6631_baseline() + antiHijack_?() + fIf() + fIfNot() + fHeteroArgs() + `f interpolator baseline`() + + assertEquals("hell", f"$tester%.4s") +} diff --git a/test/files/run/fail-non-value-types.check b/test/files/run/fail-non-value-types.check index 714dce2c507b..6169621f1ad7 100644 --- a/test/files/run/fail-non-value-types.check +++ b/test/files/run/fail-non-value-types.check @@ -1,3 +1,3 @@ -[B, That](f: A => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList.this.Repr,B,That])That -[B, That](f: Int => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList[Int]#Repr,B,That])That -()CompletelyIndependentList[A] +[B, That](f: A => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList.this.Repr,B,That]): That +[B, That](f: Int => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList[Int]#Repr,B,That]): That +(): CompletelyIndependentList[A] diff --git a/test/files/run/fail-non-value-types.scala b/test/files/run/fail-non-value-types.scala index d9a69e17c248..d140558c9916 100644 --- a/test/files/run/fail-non-value-types.scala +++ b/test/files/run/fail-non-value-types.scala @@ -10,7 +10,7 @@ class CompletelyIndependentList[+A] { object Test { var failed = false def expectFailure[T](body: => T): Boolean = { - try { val res = body ; failed = true ; println(res + " failed to fail.") ; false } + try { val res = body ; failed = true ; println(s"$res failed to fail.") ; false } catch { case _: AssertionError => true } } @@ -35,6 +35,6 @@ object Test { println(map.info) println(map.infoIn(cil)) println(distinct.info) - if (failed) sys.exit(1) + assert(!failed) } } diff --git a/test/files/run/finally.check b/test/files/run/finally.check index b0f2293d1171..916cb1d22264 100644 --- a/test/files/run/finally.check +++ b/test/files/run/finally.check @@ -10,7 +10,7 @@ in finally Running throwCatch java.lang.Exception in finally -COUGHT: java.lang.Exception +CAUGHT: java.lang.Exception ---------------------------------------- Running retBody in finally diff --git a/test/files/run/finally.scala b/test/files/run/finally.scala index 467c9e5868dc..1307869472fb 100644 --- a/test/files/run/finally.scala +++ b/test/files/run/finally.scala @@ -3,16 +3,16 @@ object Test extends App { // test that finally is not covered by any exception handlers. - def throwCatchFinally { + def throwCatchFinally(): Unit = { try { - bar + bar() } catch { case e: Throwable => println(e) } } // test that finally is not covered by any exception handlers. - def bar { + def bar(): Unit = { try { println("hi") } @@ -26,7 +26,7 @@ object Test extends App { } // return in catch (finally is executed) - def retCatch { + def retCatch(): Unit = { try { throw new Exception } catch { @@ -37,7 +37,7 @@ object Test extends App { } // throw in catch (finally is executed, exception propagated) - def throwCatch { + def throwCatch(): Unit = { try { throw new Exception } catch { @@ -48,7 +48,7 @@ object Test extends App { } // return inside body (finally is executed) - def retBody { + def retBody(): Unit = { try { return } catch { @@ -59,7 +59,7 @@ object Test extends App { } // throw inside body (finally and catch are executed) - def throwBody { + def throwBody(): Unit = { try { throw new Exception } catch { @@ -69,7 +69,7 @@ object Test extends App { } // return inside finally (each finally is executed once) - def retFinally { + def retFinally(): Unit = { try { try println("body") finally { @@ -81,7 +81,7 @@ object Test extends App { // throw inside finally (finally is executed once, exception is propagated) - def throwFinally { + def throwFinally(): Unit = { try { try println("body") finally { @@ -94,7 +94,7 @@ object Test extends App { } // nested finally blocks with return value - def nestedFinallyBlocks: Int = + def nestedFinallyBlocks(): Int = try { try { return 10 @@ -106,22 +106,22 @@ object Test extends App { println("in finally 2") } - def test[A](m: => A, name: String) { + def test[A](m: => A, name: String): Unit = { println("Running %s".format(name)) try { m } catch { - case e: Throwable => println("COUGHT: " + e) + case e: Throwable => println("CAUGHT: " + e) } println("-" * 40) } - test(throwCatchFinally, "throwCatchFinally") - test(retCatch, "retCatch") - test(throwCatch, "throwCatch") - test(retBody, "retBody") - test(throwBody, "throwBody") - test(retFinally, "retFinally") - test(throwFinally, "throwFinally") - test(nestedFinallyBlocks, "nestedFinallyBlocks") + test(throwCatchFinally(), "throwCatchFinally") + test(retCatch(), "retCatch") + test(throwCatch(), "throwCatch") + test(retBody(), "retBody") + test(throwBody(), "throwBody") + test(retFinally(), "retFinally") + test(throwFinally(), "throwFinally") + test(nestedFinallyBlocks(), "nestedFinallyBlocks") } diff --git a/test/files/run/finalvar.check b/test/files/run/finalvar.check deleted file mode 100644 index 249629397246..000000000000 --- a/test/files/run/finalvar.check +++ /dev/null @@ -1,6 +0,0 @@ -(2,2,2,2,1) -(2,2,2,2) -(2,2,2,2,1001) -(2,2,2,2) -2 -10 diff --git a/test/files/run/finalvar.scala b/test/files/run/finalvar.scala deleted file mode 100644 index d08ae65f23e0..000000000000 --- a/test/files/run/finalvar.scala +++ /dev/null @@ -1,38 +0,0 @@ -// scalac: -Yoverride-vars -opt:l:inline -opt-inline-from:** -object Final { - class X(final var x: Int) { } - def f = new X(0).x += 1 -} - -class A { - var x = 1 - def y0 = x - def y1 = this.x - def y2 = (this: A).x -} - -class B extends A { - override def x = 2 - def z = super.x -} - -object Test { - def main(args: Array[String]): Unit = { - Final.f - val a = new B - println((a.x, a.y0, a.y1, a.y2, a.z)) - val a0: A = a - println((a0.x, a0.y0, a0.y1, a0.y2)) - a.x = 1001 - println((a.x, a.y0, a.y1, a.y2, a.z)) - println((a0.x, a0.y0, a0.y1, a0.y2)) - - val d = new D - println(d.w) - d.ten - println(d.w) - } -} - -class C { var w = 1 ; def ten = this.w = 10 } -class D extends C { override var w = 2 } diff --git a/test/files/run/fors.scala b/test/files/run/fors.scala index c778df3e242b..003304d9f147 100644 --- a/test/files/run/fors.scala +++ b/test/files/run/fors.scala @@ -6,7 +6,7 @@ object Test extends App { val xs = List(1, 2, 3) - val ys = List('a, 'b, 'c) + val ys = List(Symbol("a"), Symbol("b"), Symbol("c")) def it = 0 until 10 @@ -14,71 +14,71 @@ object Test extends App { /////////////////// old syntax /////////////////// - def testOld { + def testOld(): Unit = { println("\ntestOld") // lists - for (x <- xs) print(x + " "); println + for (x <- xs) print(s"$x "); println() for (x <- xs; - if x % 2 == 0) print(x + " "); println + if x % 2 == 0) print(s"$x "); println() for {x <- xs - if x % 2 == 0} print(x + " "); println + if x % 2 == 0} print(s"$x "); println() var n = 0 for (_ <- xs) n += 1; println(n) - for ((x, y) <- xs zip ys) print(x + " "); println - for (p @ (x, y) <- xs zip ys) print(p._1 + " "); println + for ((x, y) <- xs zip ys) print(s"$x "); println() + for (p @ (x, y) <- xs zip ys) print(s"${p._1} "); println() // iterators - for (x <- it) print(x + " "); println + for (x <- it) print(s"$x "); println() for (x <- it; - if x % 2 == 0) print(x + " "); println + if x % 2 == 0) print(s"$x "); println() for {x <- it - if x % 2 == 0} print(x + " "); println + if x % 2 == 0} print(s"$x "); println() // arrays - for (x <- ar) print(x + " "); println + for (x <- ar) print(s"$x "); println() for (x <- ar; - if x.toInt > 97) print(x + " "); println + if x.toInt > 97) print(s"$x "); println() for {x <- ar - if x.toInt > 97} print(x + " "); println + if x.toInt > 97} print(s"$x "); println() } /////////////////// new syntax /////////////////// - def testNew { + def testNew(): Unit = { println("\ntestNew") // lists var n = 0 for (_ <- xs) n += 1; println(n) - for ((x, y) <- xs zip ys) print(x + " "); println - for (p @ (x, y) <- xs zip ys) print(p._1 + " "); println + for ((x, y) <- xs zip ys) print(s"$x "); println() + for (p @ (x, y) <- xs zip ys) print(s"${p._1} "); println() // iterators - for (x <- it) print(x + " "); println - for (x <- it if x % 2 == 0) print(x + " "); println - for (x <- it; if x % 2 == 0) print(x + " "); println + for (x <- it) print(s"$x "); println() + for (x <- it if x % 2 == 0) print(s"$x "); println() + for (x <- it; if x % 2 == 0) print(s"$x "); println() for (x <- it; - if x % 2 == 0) print(x + " "); println + if x % 2 == 0) print(s"$x "); println() for (x <- it - if x % 2 == 0) print(x + " "); println + if x % 2 == 0) print(s"$x "); println() for {x <- it - if x % 2 == 0} print(x + " "); println + if x % 2 == 0} print(s"$x "); println() for (x <- it; y = 2 - if x % y == 0) print(x + " "); println + if x % y == 0) print(s"$x "); println() for {x <- it y = 2 - if x % y == 0} print(x + " "); println + if x % y == 0} print(s"$x "); println() // arrays - for (x <- ar) print(x + " "); println + for (x <- ar) print(s"$x "); println() } //////////////////////////////////////////////////// - testOld - testNew + testOld() + testNew() } diff --git a/test/files/run/forvaleq.scala b/test/files/run/forvaleq.scala index 8c1824a7699d..f9c81caf5cc7 100644 --- a/test/files/run/forvaleq.scala +++ b/test/files/run/forvaleq.scala @@ -1,6 +1,5 @@ // test "foo = expr" clauses in for comprehensions -import scala.collection.immutable.Queue import scala.{List=>L} object Test { @@ -87,5 +86,5 @@ object Test { println("called " + count + " times") } - def main(args: Array[String]) {} + def main(args: Array[String]): Unit = {} } diff --git a/test/files/run/freetypes_false_alarm1.scala b/test/files/run/freetypes_false_alarm1.scala index 8d6797f792e3..e9396f3f0d7b 100644 --- a/test/files/run/freetypes_false_alarm1.scala +++ b/test/files/run/freetypes_false_alarm1.scala @@ -7,4 +7,4 @@ object Test extends App { val tpe: ru.Type = ru.typeOf[List[Int]] println(tpe) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/freetypes_false_alarm2.check b/test/files/run/freetypes_false_alarm2.check deleted file mode 100644 index 02e4a84d62c4..000000000000 --- a/test/files/run/freetypes_false_alarm2.check +++ /dev/null @@ -1 +0,0 @@ -false \ No newline at end of file diff --git a/test/files/run/freetypes_false_alarm2.scala b/test/files/run/freetypes_false_alarm2.scala index a517f7396b68..7e21e5467d47 100644 --- a/test/files/run/freetypes_false_alarm2.scala +++ b/test/files/run/freetypes_false_alarm2.scala @@ -1,9 +1,8 @@ import scala.reflect.runtime.universe._ import scala.reflect.runtime.{universe => ru} -import scala.tools.reflect.Eval import internal._ object Test extends App { val tpe = typeOf[ru.Type] - println(isFreeType(tpe.typeSymbol)) -} \ No newline at end of file + assert(!isFreeType(tpe.typeSymbol)) +} diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check index 4831db09eea0..dd9dce64ed78 100644 --- a/test/files/run/future-flatmap-exec-count.check +++ b/test/files/run/future-flatmap-exec-count.check @@ -1,4 +1,3 @@ -warning: one deprecation (since 2.12.0); re-run with -deprecation for details mapping execute() flatmapping diff --git a/test/files/run/future-flatmap-exec-count.scala b/test/files/run/future-flatmap-exec-count.scala index 86c37be938d1..cd5a53555044 100644 --- a/test/files/run/future-flatmap-exec-count.scala +++ b/test/files/run/future-flatmap-exec-count.scala @@ -1,8 +1,8 @@ import scala.concurrent._ -import java.util.concurrent.atomic.AtomicInteger +@deprecated("Tests deprecated API", since="2.12") object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { test() } diff --git a/test/files/run/gadts.scala b/test/files/run/gadts.scala index 57c7fc8af03a..d3ac0b17ddd0 100644 --- a/test/files/run/gadts.scala +++ b/test/files/run/gadts.scala @@ -1,4 +1,4 @@ -abstract class Term[T] +sealed abstract class Term[T] case class Lit(x: Int) extends Term[Int] case class Succ(t: Term[Int]) extends Term[Int] case class IsZero(t: Term[Int]) extends Term[Boolean] diff --git a/test/files/run/getClassTest-valueClass.scala b/test/files/run/getClassTest-valueClass.scala index 05a116dfff99..4e5717da1cbc 100644 --- a/test/files/run/getClassTest-valueClass.scala +++ b/test/files/run/getClassTest-valueClass.scala @@ -3,6 +3,7 @@ class V(val x: Int) extends AnyVal object Test { def main(args: Array[String]) = { val v = new V(2) + @annotation.unused val s: Any = 2 println(2.getClass) println(v.getClass) diff --git a/test/files/run/groupby.scala b/test/files/run/groupby.scala index a751e65e80e2..9a33ae28968b 100644 --- a/test/files/run/groupby.scala +++ b/test/files/run/groupby.scala @@ -4,7 +4,7 @@ // Fixes #3422 object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val arr = Array.range(0,10) val map = arr groupBy (_%2) val v1 = map(0) diff --git a/test/files/run/hashCodeDistribution.scala b/test/files/run/hashCodeDistribution.scala index 5be9d1db6dd4..284f3d977c49 100644 --- a/test/files/run/hashCodeDistribution.scala +++ b/test/files/run/hashCodeDistribution.scala @@ -14,4 +14,4 @@ object Test { assert(collisionRate < 5, "Collision rate too high: %d / 1000".format(collisionRate)) // println("collisionRate = %d / 1000".format(collisionRate)) } -} \ No newline at end of file +} diff --git a/test/files/run/hashCodeStatics.scala b/test/files/run/hashCodeStatics.scala index bff62cce1848..2d45693c817c 100644 --- a/test/files/run/hashCodeStatics.scala +++ b/test/files/run/hashCodeStatics.scala @@ -1,16 +1,14 @@ // This only tests direct access to the methods in Statics, // not the whole scheme. -object Test -{ - import java.{ lang => jl } +object Test { import scala.runtime.Statics.anyHash def allSame[T](xs: List[T]) = assert(xs.distinct.size == 1, "failed: " + xs) - def mkNumbers(x: Int): List[Number] = - List(x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble) + def mkNumbers(x: Int): List[Any] = + List[Any](x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble) - def testLDF(x: Long) = allSame(List[Number](x, x.toDouble, x.toFloat) map anyHash) + def testLDF(x: Long) = allSame(List[Any](x, x.toDouble, x.toFloat) map anyHash) def main(args: Array[String]): Unit = { List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n => diff --git a/test/files/run/hashset.check b/test/files/run/hashset.check deleted file mode 100644 index 9542a1ff48b5..000000000000 --- a/test/files/run/hashset.check +++ /dev/null @@ -1,26 +0,0 @@ -*** HashSet primitives -0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true -20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false -0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19 - -*** HashSet Strings with null -null true -0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true -20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false -0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null -null false -0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true - -*** ParHashSet primitives -0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true -20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false -0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19 - -*** ParHashSet Strings with null -null true -0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true -20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false -0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null -null false -0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true - diff --git a/test/files/run/hashset.scala b/test/files/run/hashset.scala deleted file mode 100644 index a4d49c142e96..000000000000 --- a/test/files/run/hashset.scala +++ /dev/null @@ -1,48 +0,0 @@ -import scala.collection.generic.{Growable, Shrinkable} -import scala.collection.GenSet -import scala.collection.mutable.FlatHashTable -import scala.collection.mutable.HashSet -import scala.collection.parallel.mutable.ParHashSet - -object Test extends App { - test(new Creator{ - def create[A] = new HashSet[A] - def hashSetType = "HashSet" - }) - - test(new Creator{ - def create[A] = new ParHashSet[A] - def hashSetType = "ParHashSet" - }) - - - def test(creator : Creator) { - println("*** " + creator.hashSetType + " primitives") - val h1 = creator.create[Int] - for (i <- 0 until 20) h1 += i - println((for (i <- 0 until 20) yield i + " " + (h1 contains i)).toList.sorted mkString(",")) - println((for (i <- 20 until 40) yield i + " " + (h1 contains i)).toList.sorted mkString(",")) - println(h1.toList.sorted mkString ",") - println - - println("*** " + creator.hashSetType + " Strings with null") - val h2 = creator.create[String] - h2 += null - for (i <- 0 until 20) h2 += "" + i - println("null " + (h2 contains null)) - println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println((for (i <- 20 until 40) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println((h2.toList map {x => "" + x}).sorted mkString ",") - - h2 -= null - h2 -= "" + 0 - println("null " + (h2 contains null)) - println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(",")) - println - } - - trait Creator { - def create[A] : GenSet[A] with Cloneable with FlatHashTable[A] with Growable[A] with Shrinkable[A] - def hashSetType : String - } -} \ No newline at end of file diff --git a/test/files/run/hashsetremove.check b/test/files/run/hashsetremove.check deleted file mode 100644 index 8de9826895ce..000000000000 --- a/test/files/run/hashsetremove.check +++ /dev/null @@ -1,6 +0,0 @@ -remove 0 should be false, was false -contains 1 should be true, was true -remove 1 should be true, was true -contains 1 should be false, was false -remove 1 should be false, was false -contains 1 should be false, was false diff --git a/test/files/run/hashsetremove.scala b/test/files/run/hashsetremove.scala index 7b82a9909b9d..193cdd7718d6 100644 --- a/test/files/run/hashsetremove.scala +++ b/test/files/run/hashsetremove.scala @@ -4,10 +4,10 @@ import scala.collection.mutable.HashSet object Test extends App { val h = new HashSet[Int] h += 1 - println(s"remove 0 should be false, was ${h remove 0}") - println(s"contains 1 should be true, was ${h contains 1}") - println(s"remove 1 should be true, was ${h remove 1}") - println(s"contains 1 should be false, was ${h contains 1}") - println(s"remove 1 should be false, was ${h remove 1}") - println(s"contains 1 should be false, was ${h contains 1}") - } \ No newline at end of file + assert(!h.remove(0)) + assert(h(1)) + assert(h.remove(1)) + assert(!h(1)) + assert(!h.remove(1)) + assert(!h(1)) + } diff --git a/test/files/run/hk-typevar-unification.scala b/test/files/run/hk-typevar-unification.scala index 3d3a5258abe0..81d6ab4e0081 100644 --- a/test/files/run/hk-typevar-unification.scala +++ b/test/files/run/hk-typevar-unification.scala @@ -1,5 +1,5 @@ -// scalac: -Xsource:2.13 -import scala.language.higherKinds +//> using options -Xsource:2.13 +// trait Forall[F[_]] { def instantiate[A]: F[A] diff --git a/test/files/run/huge-string.check b/test/files/run/huge-string.check new file mode 100644 index 000000000000..8dc07a5a6cbf --- /dev/null +++ b/test/files/run/huge-string.check @@ -0,0 +1,3 @@ +error: Error while emitting C +UTF8 string too large +error: Method s in class C has a bad String constant of length 393210 diff --git a/test/files/run/huge-string.scala b/test/files/run/huge-string.scala new file mode 100644 index 000000000000..f60db3c376c4 --- /dev/null +++ b/test/files/run/huge-string.scala @@ -0,0 +1,13 @@ +//> abusing options -Vdebug -Xverify + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + def genStr = "ohohoh" * 0xFFFF + def code = s""" + class C { + def s = "$genStr" + } + """ + def show() = assert(!compile()) +} diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index c113a183a679..09128f499e43 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -4,7 +4,7 @@ import scala.tools.asm.{ClassWriter, Opcodes, ClassReader} import scala.tools.asm.tree.{InsnNode, ClassNode} import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.partest.DirectTest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Test that ClassReader does not crash if the bytecode of a method has unreachable code. @@ -30,13 +30,13 @@ object Test extends DirectTest { |} """.stripMargin - compileString(newCompiler(s"-usejavacp", "-cp", testOutput.path))(aCode) + compileString(newCompiler("-cp", testOutput.path))(aCode) addDeadCode() // If inlining fails, the compiler will issue an inliner warning that is not present in the // check file - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) + compileString(newCompiler("-cp", testOutput.path, "-opt:inline:**"))(bCode) } def readClass(file: String) = { @@ -57,15 +57,15 @@ object Test extends DirectTest { os.close() } - def addDeadCode() { + def addDeadCode(): Unit = { val file = (testOutput / "p" / "A.class").path val cnode = readClass(file) - val method = cnode.methods.asScala.find(_.name == "f").head + val method = cnode.methods.asScala.find(_.name == "f").get AsmUtils.traceMethod(method) val insns = method.instructions - val it = insns.iterator() + val it = insns.iterator while (it.hasNext) { val in = it.next() if (in.getOpcode == Opcodes.IRETURN) { diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check index 38c2fb932625..78cc54bfa694 100644 --- a/test/files/run/idempotency-case-classes.check +++ b/test/files/run/idempotency-case-classes.check @@ -18,12 +18,18 @@ C(2,3) def productElement(x$1: Int): Any = x$1 match { case 0 => C.this.x case 1 => C.this.y - case _ => throw new IndexOutOfBoundsException(x$1.toString()) + case _ => scala.runtime.Statics.ioobe[Any](x$1) }; override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](C.this); def canEqual(x$1: Any): Boolean = x$1.$isInstanceOf[C](); + override def productElementName(x$1: Int): String = x$1 match { + case 0 => "x" + case 1 => "y" + case _ => scala.runtime.Statics.ioobe[String](x$1) + }; override def hashCode(): Int = { var acc: Int = -889275714; + acc = scala.runtime.Statics.mix(acc, 67); acc = scala.runtime.Statics.mix(acc, x); acc = scala.runtime.Statics.mix(acc, y); scala.runtime.Statics.finalizeHash(acc, 2) @@ -34,17 +40,17 @@ C(2,3) case _ => false }.&&({ val C$1: C = x$1.asInstanceOf[C]; - C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y)).&&(C$1.canEqual(C.this)) + C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y).&&(C$1.canEqual(C.this))) })) }; - object C extends scala.runtime.AbstractFunction2[Int,Int,C] with Serializable { + object C extends scala.runtime.AbstractFunction2[Int,Int,C] with java.io.Serializable { def (): C.type = { C.super.(); () }; final override def toString(): String = "C"; case def apply(x: Int, y: Int): C = new C(x, y); - case def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null)) + case def unapply(x$0: C): Option[(Int, Int)] = if (x$0.eq(null)) scala.None else Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y)) diff --git a/test/files/run/idempotency-case-classes.scala b/test/files/run/idempotency-case-classes.scala index 1342e3a48f41..8cd92f1a6236 100644 --- a/test/files/run/idempotency-case-classes.scala +++ b/test/files/run/idempotency-case-classes.scala @@ -19,4 +19,4 @@ object Test extends App { // this is the current behaviour, rather than the desired behavior; see scala/bug#5467 case _: ToolBoxError => println("error!") } -} \ No newline at end of file +} diff --git a/test/files/run/idempotency-labels.scala b/test/files/run/idempotency-labels.scala index 084c93d3c62e..85c5b61110fe 100644 --- a/test/files/run/idempotency-labels.scala +++ b/test/files/run/idempotency-labels.scala @@ -19,4 +19,4 @@ object Test extends App { } catch { case _: ToolBoxError => println("error!") } -} \ No newline at end of file +} diff --git a/test/files/run/idempotency-lazy-vals.scala b/test/files/run/idempotency-lazy-vals.scala index 99e7eb237f5b..236e1516137c 100644 --- a/test/files/run/idempotency-lazy-vals.scala +++ b/test/files/run/idempotency-lazy-vals.scala @@ -7,7 +7,7 @@ object Test extends App { val lazee = reify { class C { lazy val x = 2 - implicit lazy val y = 3 + implicit lazy val y: Int = 3 } val c = new C() import c._ @@ -24,4 +24,4 @@ object Test extends App { // this is the current behaviour, rather than the desired behavior; see scala/bug#5466 case _: ToolBoxError => println("error!") } -} \ No newline at end of file +} diff --git a/test/files/run/idempotency-this.check b/test/files/run/idempotency-this.check index 88b8288adfe6..72bbe8909f7f 100644 --- a/test/files/run/idempotency-this.check +++ b/test/files/run/idempotency-this.check @@ -1,4 +1,4 @@ List() -List.apply[String]("") -Apply(TypeApply(Select(Ident(scala.collection.immutable.List), TermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), TypeName("String"))))), List(Literal(Constant("")))) +`package`.List.apply[String]("") +Apply(TypeApply(Select(Select(Ident(scala.package), TermName("List")), TermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), TypeName("String"))))), List(Literal(Constant("")))) List() diff --git a/test/files/run/idempotency-this.scala b/test/files/run/idempotency-this.scala index da41e2c1ce93..d29ea968f517 100644 --- a/test/files/run/idempotency-this.scala +++ b/test/files/run/idempotency-this.scala @@ -19,4 +19,4 @@ object Test extends App { // this is the current behaviour, rather than the desired behavior; see scala/bug#5705 case _: ToolBoxError => println("error!") } -} \ No newline at end of file +} diff --git a/test/files/run/identifierCase.check b/test/files/run/identifierCase.check new file mode 100644 index 000000000000..186740cfbe05 --- /dev/null +++ b/test/files/run/identifierCase.check @@ -0,0 +1,10 @@ +identifierCase.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1) + case ªpple => "not so constant" + ^ +identifierCase.scala:23: warning: unreachable code due to variable pattern 'ªpple' on line 22 + case ʰelper => "unreachable" + ^ +identifierCase.scala:23: warning: unreachable code + case ʰelper => "unreachable" + ^ +not so constant diff --git a/test/files/run/identifierCase.scala b/test/files/run/identifierCase.scala new file mode 100644 index 000000000000..5df95d004cf4 --- /dev/null +++ b/test/files/run/identifierCase.scala @@ -0,0 +1,42 @@ +object Test { + + val $reserved = "$reserved" + val Džul = "Džul" + val ǂnûm = "ǂnûm" + val ⅰ_ⅲ = "ⅰ_ⅲ" + val Ⅰ_Ⅲ = "Ⅰ_Ⅲ" + val ↁelerious = "ↁelerious" + val ǃqhàà = "ǃqhàà" + val ʹthatsaletter = "ʹthatsaletter" + + def main(args: Array[String]): Unit = { + val s = "foo" match { + case $reserved => "constant" + case Džul => "constant" + case ǂnûm => "constant" + case ⅰ_ⅲ => "constant" + case Ⅰ_Ⅲ => "constant" + case ↁelerious => "constant" + case ǃqhàà => "constant" + case ʹthatsaletter => "constant" + case ªpple => "not so constant" + case ʰelper => "unreachable" + } + println(s) + } + + //all non-op characters can follow a "normal" leading letter + //if any of the second characters below weren't letters or numbers, this wouldn't compile + //they are not numbers since they are used as leading characters above + + val a$reserved = "a$" + val aDžul = "aDžul" + val aǂnûm = "aǂnûm" + val aⅰ_ⅲ = "aⅰ_ⅲ" + val aⅠ_Ⅲ = "aⅠ_Ⅲ" + val aↁelerious = "aↁelerious" + val aǃqhàà = "aǃqhàà" + val aʹthatsaletter = "aʹthatsaletter" + val anªpple = "anªpple" + val aʰelper = "aʰelper" +} \ No newline at end of file diff --git a/test/files/run/imain.scala b/test/files/run/imain.scala index c164fb53effb..9e5294e1320c 100644 --- a/test/files/run/imain.scala +++ b/test/files/run/imain.scala @@ -1,15 +1,19 @@ +import java.io.OutputStream + +import scala.tools.nsc.interpreter.shell.ReplReporterImpl + object Test { import scala.tools.nsc._ import interpreter._ import java.io.PrintWriter - class NullOutputStream extends OutputStream { def write(b: Int) { } } + class NullOutputStream extends OutputStream { def write(b: Int): Unit = { } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val settings = new Settings settings.classpath.value = System.getProperty("java.class.path") - val intp = new IMain(settings, new PrintWriter(new NullOutputStream)) + val intp = new IMain(settings, new ReplReporterImpl(settings, new PrintWriter(new NullOutputStream))) intp.interpret("def x0 = 123") intp.interpret("val x1 = x0 * 2") println(intp.valueOfTerm("x1")) diff --git a/test/files/run/impconvtimes.scala b/test/files/run/impconvtimes.scala index 477a16a8903d..d377685a9c77 100644 --- a/test/files/run/impconvtimes.scala +++ b/test/files/run/impconvtimes.scala @@ -9,7 +9,7 @@ object Test { def *(newUnit: Unit) = Measure(scalar, newUnit) } - implicit def double2Measure(scalar: Double) = + implicit def double2Measure(scalar: Double): Measure = Measure(scalar, NoUnit) diff --git a/test/files/run/implicit-caching.scala b/test/files/run/implicit-caching.scala index 1e8e17ddd615..0029b992b673 100644 --- a/test/files/run/implicit-caching.scala +++ b/test/files/run/implicit-caching.scala @@ -5,8 +5,7 @@ trait FooSub[T] extends Foo[T] { } object FooSub { - implicit def fooSub[T](implicit ft: Bar[T]): FooSub[T] = - new FooSub[T] {} + implicit def fooSub[T: Bar]: FooSub[T] = new FooSub[T] {} } trait Bar[T] diff --git a/test/files/run/implicit-class-implicit-param-with-default.scala b/test/files/run/implicit-class-implicit-param-with-default.scala index 9c8919f529e8..28e36db93935 100644 --- a/test/files/run/implicit-class-implicit-param-with-default.scala +++ b/test/files/run/implicit-class-implicit-param-with-default.scala @@ -1,7 +1,7 @@ object Test { implicit class C(self: String)(implicit val foo: String = "default") - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println("".foo) println(C("").foo) println(new C("").foo) diff --git a/test/files/run/implicits.scala b/test/files/run/implicits.scala index 5681a9d484dc..fbc8536c041b 100644 --- a/test/files/run/implicits.scala +++ b/test/files/run/implicits.scala @@ -2,7 +2,7 @@ import scala.language.implicitConversions object A { object B { - implicit def int2string(x: Int) = "["+x.toString+"]" + implicit def int2string(x: Int): String = "["+x.toString+"]" } } diff --git a/test/files/run/imports.scala b/test/files/run/imports.scala index 4bdbef9f95f2..5becd83d96d5 100644 --- a/test/files/run/imports.scala +++ b/test/files/run/imports.scala @@ -7,11 +7,11 @@ object checker { def check(location: String, what: String, value: Any): Unit = { Console.print("In " + location + ", " + what + ".toString() returns "); - Console.flush; + Console.flush() val string: String = if (value == null) "null" else value.toString(); val test = if (string == location) "ok" else "KO"; Console.println(string + " -> " + test); - Console.flush; + Console.flush() } } @@ -31,7 +31,7 @@ class C_ico() { check("C_ico", "v_ico ", v_ico); check("C_ico", "field ", field); check("C_ico", "method", method); - Console.println; + Console.println() } object o_ico { @@ -58,7 +58,7 @@ class C_ioc() { check("C_ioc", "v_ioc ", v_ioc); check("C_ioc", "field ", field); check("C_ioc", "method", method); - Console.println; + Console.println() } //############################################################################ @@ -79,7 +79,7 @@ class C_oic() { check("C_oic", "v_oic ", v_oic); check("C_oic", "field ", field); check("C_oic", "method", method); - Console.println; + Console.println() } //############################################################################ diff --git a/test/files/run/indy-meth-refs-b.scala b/test/files/run/indy-meth-refs-b.scala deleted file mode 100644 index 148d29e1011b..000000000000 --- a/test/files/run/indy-meth-refs-b.scala +++ /dev/null @@ -1,8 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - def min0[A](less: (A, A) => Boolean, xs: List[A]): Option[A] = None - - def min(xs: List[Int]) = min0((x: Int, y: Int) => x < y, xs) - - def main(args: Array[String]): Unit = min(List()) -} diff --git a/test/files/run/indy-meth-refs-c.scala b/test/files/run/indy-meth-refs-c.scala deleted file mode 100644 index bce150b77f9b..000000000000 --- a/test/files/run/indy-meth-refs-c.scala +++ /dev/null @@ -1,12 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - def main(args: Array[String]): Unit = { - val str = "" - val foo = new Foo() - use(foo.bar(str)) - } - - class Foo { def bar(x: Object) = Symbol("ok") } - - def use(x: => Any) = () -} diff --git a/test/files/run/indy-meth-refs-d.scala b/test/files/run/indy-meth-refs-d.scala deleted file mode 100644 index 6c99c8c86e71..000000000000 --- a/test/files/run/indy-meth-refs-d.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - class Foo { def bar() = () } - def main(args: Array[String]): Unit = - Option(new Foo()).foreach(_.bar()) -} diff --git a/test/files/run/indy-meth-refs-e.scala b/test/files/run/indy-meth-refs-e.scala deleted file mode 100644 index 64e25457dcd4..000000000000 --- a/test/files/run/indy-meth-refs-e.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - def main(args: Array[String]): Unit = { - List(Option("a")).map(_.map(_.toUpperCase)) - } -} diff --git a/test/files/run/indy-meth-refs-f.scala b/test/files/run/indy-meth-refs-f.scala deleted file mode 100644 index 90a20b4e445d..000000000000 --- a/test/files/run/indy-meth-refs-f.scala +++ /dev/null @@ -1,122 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - def anyA(f: Any => Any) = () - def anyB(f: Any => Boolean) = () - def anyI(f: Any => Int) = () - - def objA(f: Object => Any) = () - def objB(f: Object => Boolean) = () - def objI(f: Object => Int) = () - - def strS(f: String => String) = () - - def arrII(f: Array[Int] => Int) = () - def arrSI(f: Array[String] => Int) = () - def arrSS(f: Array[String] => String) = () - - def boolB(f: Boolean => Boolean) = () - - def intI(f: Int => Int) = () - def intB(f: Int => Boolean) = () - - class StrVC(val x: String) extends AnyVal { - def unwrap = x - def bang = new StrVC(x + "!") - } - class BoolVC(val x: Boolean) extends AnyVal { - def unwrap = x - def not = new BoolVC(!x) - } - class IntVC(val x: Int) extends AnyVal { - def unwrap = x - def inc = new IntVC(x + 1) - def isZero = x == 0 - def isZeroVC = new BoolVC(x == 0) - } - - def mkStrVC(x: String): StrVC = new StrVC(x) - def mkBoolVC(x: Boolean): BoolVC = new BoolVC(x) - def mkIntVC(x: Int): IntVC = new IntVC(x) - - def strVC1(f: StrVC => String) = () - def strVC2(f: StrVC => StrVC) = () - def strVC3(f: String => StrVC) = () - - def boolVC1(f: BoolVC => Boolean) = () - def boolVC2(f: BoolVC => BoolVC) = () - def boolVC3(f: Boolean => BoolVC) = () - - def intVC1(f: IntVC => Int) = () - def intVC2(f: IntVC => IntVC) = () - def intVC3(f: Int => IntVC) = () - def intVC4(f: IntVC => Boolean) = () - - def vcs1(f: IntVC => BoolVC) = () - - def main(args: Array[String]): Unit = { - anyB("" == _) - anyB("" != _) - anyB(_.isInstanceOf[String]) - anyA(_.asInstanceOf[String]) - anyI(_.##) - - objB("" eq _) - objB("" ne _) - objB("" == _) - objB("" != _) - objB(_.isInstanceOf[String]) - objA(_.asInstanceOf[String]) - objA("".synchronized(_)) - - strS("" + _) - - arrII(_.length) - arrII(xs => xs(0)) - arrSI(_.length) - arrSS(xs => xs(0)) - - //boolB(true eq _) // the result type of an implicit conversion must be more specific than AnyRef ¯\_(ツ)_/¯ - //boolB(true ne _) - boolB(!_) - boolB(true || _) - boolB(true && _) - boolB(true | _) - boolB(true & _) - boolB(true ^ _) - - //intB(1 eq _) - //intB(1 ne _) - intI(1 + _) - intI(1 - _) - intI(1 * _) - intI(1 / _) - intI(1 % _) - intB(1 < _) - intB(1 <= _) - intB(1 > _) - intB(1 >= _) - intI(1 ^ _) - intI(1 & _) - intI(1 << _) - intI(1 >>> _) - intI(1 >> _) - intI(_.toInt) - intI(i => -i) - intI(i => ~i) - - strVC1(_.unwrap) - strVC2(_.bang) - strVC3(mkStrVC(_)) - - boolVC1(_.unwrap) - boolVC2(_.not) - boolVC3(mkBoolVC(_)) - - intVC1(_.unwrap) - intVC2(_.inc) - intVC3(mkIntVC(_)) - intVC4(_.isZero) - - vcs1(_.isZeroVC) - } -} diff --git a/test/files/run/indy-meth-refs-g.scala b/test/files/run/indy-meth-refs-g.scala deleted file mode 100644 index 2d66dad23e0f..000000000000 --- a/test/files/run/indy-meth-refs-g.scala +++ /dev/null @@ -1,16 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -import java.io.File - -import scala.collection.mutable - -object Test { - val fqnsToFiles = mutable.HashMap[String, (File, Boolean)]() - - def main(args: Array[String]): Unit = test() - - def test() = { - val fqn = "bob" - val newResult = Option((new File("f"), true)) - newResult.foreach(res => fqnsToFiles.put(fqn, res)) - } -} diff --git a/test/files/run/indy-meth-refs-h.scala b/test/files/run/indy-meth-refs-h.scala deleted file mode 100644 index 988eb9da889e..000000000000 --- a/test/files/run/indy-meth-refs-h.scala +++ /dev/null @@ -1,13 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -trait Entity { - def name: String - def announce = { - def msg = s"I am $name" - None.getOrElse(msg) - } -} - -object Test extends Entity { - def name = "Test" - def main(args: Array[String]): Unit = assert(announce == "I am Test") -} diff --git a/test/files/run/indy-meth-refs-i.scala b/test/files/run/indy-meth-refs-i.scala deleted file mode 100644 index 25778894d170..000000000000 --- a/test/files/run/indy-meth-refs-i.scala +++ /dev/null @@ -1,17 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -class C { - def foo = 0 -} - -class D extends C { - override def foo = 1 - def bar = () => super.foo -} - -object Test { - def main(args: Array[String]): Unit = { - val d = new D - val obtained = d.bar.apply() - assert(obtained == 0, obtained) - } -} diff --git a/test/files/run/indy-meth-refs-j/I.java b/test/files/run/indy-meth-refs-j/I.java deleted file mode 100644 index 5aa5e84b85bc..000000000000 --- a/test/files/run/indy-meth-refs-j/I.java +++ /dev/null @@ -1,5 +0,0 @@ -package demo; - -public @interface I { - String value(); -} diff --git a/test/files/run/indy-meth-refs-j/test.scala b/test/files/run/indy-meth-refs-j/test.scala deleted file mode 100644 index 962046991744..000000000000 --- a/test/files/run/indy-meth-refs-j/test.scala +++ /dev/null @@ -1,6 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -object Test { - def main(args: Array[String]): Unit = { - val I_value: demo.I => String = x => x.value() - } -} diff --git a/test/files/run/indy-meth-refs.scala b/test/files/run/indy-meth-refs.scala deleted file mode 100644 index 110be78ebc7a..000000000000 --- a/test/files/run/indy-meth-refs.scala +++ /dev/null @@ -1,10 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -case object Test { - def f0(f: Function0[String]) = () - def f1(f: Function1[Any, String]) = () - - def main(args: Array[String]): Unit = { - f0(() => toString()) - f1(_.toString()) - } -} diff --git a/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala index 49610031412e..e5e8b4c1b32e 100644 --- a/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala +++ b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Macro.classNameOf(classOf[C])) } class C(val x: Int) extends AnyVal diff --git a/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala index a284e28725f8..b51a72006b29 100644 --- a/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala +++ b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(Macro.methodTypeOf({def x(a: Int): String = ???})) println(Macro.methodTypeOf({def x(): C = ???})) } diff --git a/test/files/run/indy-via-macro-reflector/Test_2.scala b/test/files/run/indy-via-macro-reflector/Test_2.scala index 6e51340afa43..d0ffefb2c0ec 100644 --- a/test/files/run/indy-via-macro-reflector/Test_2.scala +++ b/test/files/run/indy-via-macro-reflector/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(new C().foo(null, 0)) println(Macro.reflectorConstructor("dynamic")) println(Macro.reflectorTrait("dynamic")) diff --git a/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala b/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala index 77c2b522c714..37eb41e06ab4 100644 --- a/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala +++ b/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val s = "foo!bar" assert(Macro.matcher("foo.bar", s).matches == true) } diff --git a/test/files/run/indy-via-macro/Test_2.scala b/test/files/run/indy-via-macro/Test_2.scala index 830947a46b93..b5b61000101d 100644 --- a/test/files/run/indy-via-macro/Test_2.scala +++ b/test/files/run/indy-via-macro/Test_2.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { assert(Macro.compilePattern("foo.bar").matcher("foo!bar").matches) } -} \ No newline at end of file +} diff --git a/test/files/run/indyLambdaKinds.check b/test/files/run/indyLambdaKinds.check index 17c7eb8e52b8..6476d0dbc91b 100644 --- a/test/files/run/indyLambdaKinds.check +++ b/test/files/run/indyLambdaKinds.check @@ -1,16 +1,50 @@ -Inline into Main$.t1a: inlined A_1.a. Before: 7 ins, inlined: 3 ins. -Inline into Main$.t1b: inlined A_1.a. Before: 11 ins, inlined: 3 ins. -Inline into Main$.t2a: inlined A_1.b. Before: 7 ins, inlined: 3 ins. -Inline into Main$.t2b: inlined A_1.b. Before: 10 ins, inlined: 3 ins. -Inline into Main$.t3a: inlined A_1.c. Before: 7 ins, inlined: 3 ins. -Inline into Main$.t3b: inlined A_1.c. Before: 10 ins, inlined: 3 ins. -Inline into Main$.t4a: failed A_1.d. A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. -Inline into Main$.t4b: failed A_1.d. A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. -Inline into Main$.t5a: failed A_1.e. A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. -Inline into Main$.t5b: failed A_1.e. A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. -Inline into Main$.t6a: failed A_1.f. A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. -Inline into Main$.t6b: failed A_1.f. A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. -warning: 6 optimizer warnings; re-run enabling -opt-warnings for details, or try -help +Inlining into Main$.main + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 119 ins, after: 136 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 136 ins, after: 153 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 153 ins, after: 170 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 170 ins, after: 187 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 187 ins, after: 204 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 204 ins, after: 221 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 221 ins, after: 238 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 238 ins, after: 255 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 255 ins, after: 272 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 272 ins, after: 289 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 289 ins, after: 306 ins. + inlined scala/Predef$.println (the callee is a forwarder or alias method). Before: 306 ins, after: 323 ins. + inlined A_1.m1 (the callee is a small trivial method). Before: 323 ins, after: 337 ins. + inlined A_1.m1 (the callee is a small trivial method). Before: 337 ins, after: 351 ins. + inlined A_1.m1 (the callee is a small trivial method). Before: 351 ins, after: 365 ins. + inlined A_1.m1 (the callee is a small trivial method). Before: 365 ins, after: 379 ins. +Inlining into Main$.t1a: inlined A_1.a (the callsite is annotated `@inline`). Before: 7 ins, after: 14 ins. +Inlining into Main$.t2a: inlined A_1.b (the callsite is annotated `@inline`). Before: 7 ins, after: 14 ins. +Inlining into Main$.t3a: inlined A_1.c (the callsite is annotated `@inline`). Before: 7 ins, after: 14 ins. +#partest java24+ +Inlining into Main$.t4a: failed A_1.d (the callsite is annotated `@inline`). A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t4b: failed A_1.d (the callsite is annotated `@inline`). A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t5a: failed A_1.e (the callsite is annotated `@inline`). A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$0(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t5b: failed A_1.e (the callsite is annotated `@inline`). A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$0(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t6a: failed A_1.f (the callsite is annotated `@inline`). A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$0(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t6b: failed A_1.f (the callsite is annotated `@inline`). A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$0(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +#partest !java24+ +Inlining into Main$.t4a: failed A_1.d (the callsite is annotated `@inline`). A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t4b: failed A_1.d (the callsite is annotated `@inline`). A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t5a: failed A_1.e (the callsite is annotated `@inline`). A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t5b: failed A_1.e (the callsite is annotated `@inline`). A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t6a: failed A_1.f (the callsite is annotated `@inline`). A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +Inlining into Main$.t6b: failed A_1.f (the callsite is annotated `@inline`). A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +#partest +Inlining into Main$.t1b + inlined A_1.a (the callsite is annotated `@inline`). Before: 11 ins, after: 18 ins. + rewrote invocations of closure allocated in Main$.t1b with body m1: INVOKEINTERFACE java/util/function/BiFunction.apply (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object; (itf) + inlined A_1.m1 (the callee is a small trivial method). Before: 25 ins, after: 30 ins. +Inlining into Main$.t2b + inlined A_1.b (the callsite is annotated `@inline`). Before: 10 ins, after: 17 ins. + rewrote invocations of closure allocated in Main$.t2b with body m2: INVOKEINTERFACE java/util/function/Function.apply (Ljava/lang/Object;)Ljava/lang/Object; (itf) + inlined A_1.m2 (the callee is a small trivial method). Before: 21 ins, after: 25 ins. +Inlining into Main$.t3b + inlined A_1.c (the callsite is annotated `@inline`). Before: 10 ins, after: 17 ins. + rewrote invocations of closure allocated in Main$.t3b with body : INVOKEINTERFACE java/util/function/Function.apply (Ljava/lang/Object;)Ljava/lang/Object; (itf) +warning: 6 optimizer warnings; re-run enabling -Wopt for details, or try -help m1 m1 m2 diff --git a/test/files/run/indyLambdaKinds/Test_2.scala b/test/files/run/indyLambdaKinds/Test_2.scala index d876dd5fd72b..4480d9c817af 100644 --- a/test/files/run/indyLambdaKinds/Test_2.scala +++ b/test/files/run/indyLambdaKinds/Test_2.scala @@ -3,7 +3,7 @@ import reflect.internal.util._ object Test extends DirectTest { - override def extraSettings: String = s"-usejavacp -cp ${testOutput.path} -opt:l:inline -opt-inline-from:** -Yopt-log-inline _ -d ${testOutput.path}" + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path} -opt:inline:** -Vinline _" override def code = """object Main { @noinline def t1a(a: A_1) = a.a(): @inline @@ -49,7 +49,6 @@ object Test extends DirectTest { override def show(): Unit = { compile() - ScalaClassLoader(getClass.getClassLoader) run ("Main", Nil) - + ScalaClassLoader(getClass.getClassLoader).run("Main", Nil) } } diff --git a/test/files/run/indylambda-boxing/test.scala b/test/files/run/indylambda-boxing/test.scala index 82f8d2f49786..74691ff2edd0 100644 --- a/test/files/run/indylambda-boxing/test.scala +++ b/test/files/run/indylambda-boxing/test.scala @@ -10,7 +10,7 @@ class Test { def test7 = {val vc = new Capture; (i: Int) => vc } def test8 = {val c = 42; (s: String) => (s, c)} // not adapted - def test9 = {val c = 42; (s: String) => ()} + def test9 = {@annotation.unused val c = 42; (s: String) => ()} def test10 = {(s: List[String]) => ()} } diff --git a/test/files/run/inferred-structural-3.check b/test/files/run/inferred-structural-3.check new file mode 100644 index 000000000000..6f270aa5313b --- /dev/null +++ b/test/files/run/inferred-structural-3.check @@ -0,0 +1,32 @@ + +scala> trait A { def f: AnyRef } // refinement dropped +trait A + +scala> def a = Option(new { def g = 1 }) // refinement dropped +def a: Option[AnyRef] + +scala> def b: Option[{ def g: Int }] = Option(new { def g = 1 }) +def b: Option[AnyRef{def g: Int}] + +scala> def c(p: { def i: Int }): Int = 0 +def c(p: AnyRef{def i: Int}): Int + +scala> def d = new A { def f: A = this } // refinement of existing method is kept, in Scala 3 too +def d: A{def f: A} + +scala> def e = new A { def f: AnyRef = new AnyRef } // no refinement in 2.13 eihter +def e: A + +scala> def f = new A { def f = new AnyRef } // no refinement in 2.13 either +def f: A + +scala> def g = new A { def f = this } // inferred type of `f` is AnyRef because of infer-override +def g: A + +scala> def h = new AnyRef { type T = String } // TODO: dropped in Scala 3; figure out the rules Scala 3 uses and approximate them +def h: AnyRef{type T = String} + +scala> def i = new AnyRef { val x = 2 } // dropped +def i: AnyRef + +scala> :quit diff --git a/test/files/run/inferred-structural-3.scala b/test/files/run/inferred-structural-3.scala new file mode 100644 index 000000000000..03196dda6c99 --- /dev/null +++ b/test/files/run/inferred-structural-3.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-Xsource:3 -Xsource-features:no-infer-structural,infer-override" + def code = + """trait A { def f: AnyRef } // refinement dropped + |def a = Option(new { def g = 1 }) // refinement dropped + |def b: Option[{ def g: Int }] = Option(new { def g = 1 }) + |def c(p: { def i: Int }): Int = 0 + |def d = new A { def f: A = this } // refinement of existing method is kept, in Scala 3 too + |def e = new A { def f: AnyRef = new AnyRef } // no refinement in 2.13 eihter + |def f = new A { def f = new AnyRef } // no refinement in 2.13 either + |def g = new A { def f = this } // inferred type of `f` is AnyRef because of infer-override + |def h = new AnyRef { type T = String } // TODO: dropped in Scala 3; figure out the rules Scala 3 uses and approximate them + |def i = new AnyRef { val x = 2 } // dropped + |""".stripMargin +} diff --git a/test/files/run/inferred-type-constructors-hou.check b/test/files/run/inferred-type-constructors-hou.check index bb4e32912f4b..2931b3a721ce 100644 --- a/test/files/run/inferred-type-constructors-hou.check +++ b/test/files/run/inferred-type-constructors-hou.check @@ -1,4 +1,3 @@ -warning: two feature warnings; re-run with -feature for details p.Iterable[Int] p.Set[Int] p.Seq[Int] @@ -50,7 +49,7 @@ warning: two feature warnings; re-run with -feature for details scala.collection.immutable.Set[Int] Seq[Int] Array[Int] - scala.collection.AbstractSet[Int] + scala.collection.immutable.AbstractSet[Int] Comparable[String] scala.collection.immutable.LinearSeq[Int] - Iterable[Int] + scala.collection.immutable.Iterable[Int] diff --git a/test/files/run/inferred-type-constructors-hou.scala b/test/files/run/inferred-type-constructors-hou.scala index f59df0606490..722287716cf7 100644 --- a/test/files/run/inferred-type-constructors-hou.scala +++ b/test/files/run/inferred-type-constructors-hou.scala @@ -1,4 +1,3 @@ -// scalac: -Ypartial-unification package p { trait TCon[+CC[X]] { def fPublic: CC[Int] = ??? @@ -38,7 +37,7 @@ object Test { def extract[A, CC[X]](xs: CC[A]): CC[A] = xs def whatis[T: TypeTag](x: T): Unit = { val tpe = typeOf[T] - val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replaceAllLiterally("package ", "") + val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replace("package ", "") println(f"$access%15s $tpe") } @@ -120,7 +119,7 @@ object Test { whatis(extract(Array[Int]())) whatis(extract(scala.collection.immutable.BitSet(1))) whatis(extract("abc")) - whatis(extract(if (true) Stream(1) else List(1))) + whatis(extract(if (true) LazyList(1) else List(1))) whatis(extract(if (true) Seq(1) else Set(1))) } } diff --git a/test/files/run/inferred-type-constructors.check b/test/files/run/inferred-type-constructors.check index 07057387689e..8b296d27fc13 100644 --- a/test/files/run/inferred-type-constructors.check +++ b/test/files/run/inferred-type-constructors.check @@ -1,4 +1,3 @@ -warning: two feature warnings; re-run with -feature for details p.Iterable[Int] p.Set[Int] p.Seq[Int] @@ -47,10 +46,11 @@ warning: two feature warnings; re-run with -feature for details List[Nothing] scala.collection.immutable.Vector[Nothing] scala.collection.immutable.Iterable[(Int, Int)] + scala.collection.immutable.Map[Int,Int] scala.collection.immutable.Set[Int] Seq[Int] Array[Int] - scala.collection.AbstractSet[Int] + scala.collection.immutable.AbstractSet[Int] Comparable[String] scala.collection.immutable.LinearSeq[Int] - Iterable[Int] + scala.collection.immutable.Iterable[Int] diff --git a/test/files/run/inferred-type-constructors.scala b/test/files/run/inferred-type-constructors.scala index 79a8653f686b..74582ddefee9 100644 --- a/test/files/run/inferred-type-constructors.scala +++ b/test/files/run/inferred-type-constructors.scala @@ -37,7 +37,7 @@ object Test { def extract[A, CC[X]](xs: CC[A]): CC[A] = xs def whatis[T: TypeTag](x: T): Unit = { val tpe = typeOf[T] - val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replaceAllLiterally("package ", "") + val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replace("package ", "") println(f"$access%15s $tpe") } @@ -113,13 +113,14 @@ object Test { whatis(extract(Nil)) whatis(extract(Vector())) + whatis(extract(scala.collection.immutable.Iterable[(Int, Int)]())) whatis(extract(Map[Int,Int]())) whatis(extract(Set[Int]())) whatis(extract(Seq[Int]())) whatis(extract(Array[Int]())) whatis(extract(scala.collection.immutable.BitSet(1))) whatis(extract("abc")) - whatis(extract(if (true) Stream(1) else List(1))) + whatis(extract(if (true) LazyList(1) else List(1))) whatis(extract(if (true) Seq(1) else Set(1))) } } diff --git a/test/files/run/infiniteloop.check b/test/files/run/infiniteloop.check index 6f8cf6e4d9cf..18d89b22bd93 100644 --- a/test/files/run/infiniteloop.check +++ b/test/files/run/infiniteloop.check @@ -1 +1,2 @@ -Stream(512, 256, 128, 64, 32, 16, 8, 4, 2, 1) +LazyList() +List(512, 256, 128, 64, 32, 16, 8, 4, 2, 1) diff --git a/test/files/run/infiniteloop.scala b/test/files/run/infiniteloop.scala index 06926cec1ec0..2765c4a6f8db 100644 --- a/test/files/run/infiniteloop.scala +++ b/test/files/run/infiniteloop.scala @@ -1,13 +1,14 @@ /** Tests the optimiser (not to loop on 'reverse'). */ object Test extends App { - def foo { - val s3 = Stream.range(1, 1000) //100000 (ticket #153: Stackoverflow) + def foo(): Unit = { + val s3 = LazyList.range(1, 1000) //100000 (ticket #153: Stackoverflow) // ticket #153 def powers(x: Int) = if ((x&(x-1)) == 0) Some(x) else None println(s3.flatMap(powers).reverse) + println(s3.flatMap(powers).reverse.toList) } - foo + foo() } diff --git a/test/files/run/infix-rangepos.scala b/test/files/run/infix-rangepos.scala index 8d2a16a0b536..5221ef503ea0 100644 --- a/test/files/run/infix-rangepos.scala +++ b/test/files/run/infix-rangepos.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ object Test extends CompilerTest { import global._ - override def extraSettings = super.extraSettings + " -Yrangepos" + override def sources = List( "class C1 { def t = List(1).map ( x => x ) }", "class C2 { def t = List(1).map { x => x } }", diff --git a/test/files/run/infixPostfixAttachments.check b/test/files/run/infixPostfixAttachments.check index d3b4c75c820f..248f5bfc6202 100644 --- a/test/files/run/infixPostfixAttachments.check +++ b/test/files/run/infixPostfixAttachments.check @@ -1,8 +1,17 @@ +newSource1.scala:15: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method d, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + def t6 = this d + ^ +newSource1.scala:16: warning: Auto-application to `()` is deprecated. Supply the empty argument list `()` explicitly to invoke method d, +or remove the empty argument list from its definition (Java-defined methods are exempt). +In Scala 3, an unapplied method like this will be eta-expanded into a function. [quickfixable] + def t7 = this.d + ^ t1 this.a(0) List(InfixAttachment) -t2 this.b(scala.Tuple2.apply[Int, Int](1, 2)).b(scala.Tuple2.apply[Int, Int](1, 2)).c(1, 2) List(InfixAttachment) +t2 this.b(scala.Tuple2.apply[Int, Int](1, 2)).b(scala.Tuple2.apply[Int, Int](1, 2)).c(1, 2) List(InfixAttachment, MultiargInfixAttachment) t2 this.b(scala.Tuple2.apply[Int, Int](1, 2)).b(scala.Tuple2.apply[Int, Int](1, 2)) List(InfixAttachment) t2 this.b(scala.Tuple2.apply[Int, Int](1, 2)) List(InfixAttachment) -t4 this.d() List(InfixAttachment) t6 this.d() List(AutoApplicationAttachment, InfixAttachment) t6 this.d List(PostfixAttachment) t7 this.d() List(AutoApplicationAttachment) diff --git a/test/files/run/infixPostfixAttachments.scala b/test/files/run/infixPostfixAttachments.scala index 217e443b4d08..14ef3226d9e5 100644 --- a/test/files/run/infixPostfixAttachments.scala +++ b/test/files/run/infixPostfixAttachments.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ object Test extends CompilerTest { import global._ - override def extraSettings = super.extraSettings + " -Yrangepos -Ystop-after:typer" + override def extraSettings = super.extraSettings + " -Ystop-after:typer -deprecation" override def code = """class C { @@ -17,7 +17,7 @@ object Test extends CompilerTest { | def t1 = this a 0 | def t2 = this b (1, 2) b ((1, 2)) c (1, 2) | def t3 = this.b(1, 2).b((1, 2)).c(1, 2) - | def t4 = this d () + | // def t4 = this d () // not allowed in 2.13 | def t5 = this.d() | def t6 = this d | def t7 = this.d diff --git a/test/files/run/inline-stack-map-frames/A_1.scala b/test/files/run/inline-stack-map-frames/A_1.scala new file mode 100644 index 000000000000..477835051395 --- /dev/null +++ b/test/files/run/inline-stack-map-frames/A_1.scala @@ -0,0 +1,7 @@ +//> using options -opt:inline:** +class A { + @noinline final def b: B = null + @inline final def a: A = b +} + +class B extends A diff --git a/test/files/run/inline-stack-map-frames/Test_2.scala b/test/files/run/inline-stack-map-frames/Test_2.scala new file mode 100644 index 000000000000..13ca041f45a2 --- /dev/null +++ b/test/files/run/inline-stack-map-frames/Test_2.scala @@ -0,0 +1,17 @@ +//> using options -opt:inline:** +class C { + def t(a: A): AnyRef = { + // a.a is inlined, resulting in a.b, which has return type B + var foo: AnyRef = if (hashCode == 0) a.a else this + if (foo == null) + foo = this + // at the merge point, the stack map frame calculation needs the LUB of (B, C), + // so the ClassBType for C needs to be cached + foo + } +} +object Test { + def main(args: Array[String]): Unit = { + new C + } +} diff --git a/test/files/run/inliner-infer.scala b/test/files/run/inliner-infer.scala index e41d6ae5c761..20024a5e0c89 100644 --- a/test/files/run/inliner-infer.scala +++ b/test/files/run/inliner-infer.scala @@ -1,6 +1,6 @@ -/** Test that the inliner is not inferring that `xs' is +/** Test that the inliner is not inferring that `xs` is * always Nil, removing the call to isEmpty. */ object Test extends App { diff --git a/test/files/run/inner-obj-auto.scala b/test/files/run/inner-obj-auto.scala index 00ea5119cc60..bac5dedc8944 100644 --- a/test/files/run/inner-obj-auto.scala +++ b/test/files/run/inner-obj-auto.scala @@ -1,7 +1,8 @@ - +//> using options -Xmaxwarns 0 /* ================================================================================ Automatically generated on 2011-05-11. Do Not Edit (unless you have to). + (I had to.) (2-level nesting) ================================================================================ */ @@ -19,7 +20,7 @@ class Class2_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -29,10 +30,10 @@ class Class2_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class1_2).run } + def run: Unit = { (new Class1_2).run } } @@ -48,7 +49,7 @@ object Object3_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -58,10 +59,10 @@ object Object3_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class1_2).run } // trigger + def run: Unit = { (new Class1_2).run } // trigger } @@ -77,7 +78,7 @@ trait Trait4_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -87,10 +88,10 @@ trait Trait4_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class1_2).run } + def run: Unit = { (new Class1_2).run } } @@ -106,7 +107,7 @@ class Class6_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -116,10 +117,10 @@ class Class6_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object5_2.run } + def run: Unit = { Object5_2.run } } @@ -135,7 +136,7 @@ object Object7_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -145,10 +146,10 @@ object Object7_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object5_2.run } // trigger + def run: Unit = { Object5_2.run } // trigger } @@ -164,7 +165,7 @@ trait Trait8_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -174,10 +175,10 @@ trait Trait8_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object5_2.run } + def run: Unit = { Object5_2.run } } @@ -193,7 +194,7 @@ class Class10_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -203,10 +204,10 @@ class Class10_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait9_2 {}).run } + def run: Unit = { (new Trait9_2 {}).run } } @@ -222,7 +223,7 @@ object Object11_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -232,10 +233,10 @@ object Object11_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait9_2 {}).run } // trigger + def run: Unit = { (new Trait9_2 {}).run } // trigger } @@ -251,7 +252,7 @@ trait Trait12_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -261,16 +262,16 @@ trait Trait12_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait9_2 {}).run } + def run: Unit = { (new Trait9_2 {}).run } } class Class14_1 { - def method13_2 { + def method13_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -280,7 +281,7 @@ class Class14_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -293,13 +294,13 @@ class Class14_1 { runTest // trigger } - def run { method13_2 } + def run: Unit = { method13_2 } } object Object15_1 { - def method13_2 { + def method13_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -309,7 +310,7 @@ object Object15_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -322,13 +323,13 @@ object Object15_1 { runTest // trigger } - def run { method13_2 } // trigger + def run: Unit = { method13_2 } // trigger } trait Trait16_1 { - def method13_2 { + def method13_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -338,7 +339,7 @@ trait Trait16_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -351,13 +352,13 @@ trait Trait16_1 { runTest // trigger } - def run { method13_2 } + def run: Unit = { method13_2 } } class Class18_1 { - private def method17_2 { + private def method17_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -367,7 +368,7 @@ class Class18_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -380,13 +381,13 @@ class Class18_1 { runTest // trigger } - def run { method17_2 } + def run: Unit = { method17_2 } } object Object19_1 { - private def method17_2 { + private def method17_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -396,7 +397,7 @@ object Object19_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -409,13 +410,13 @@ object Object19_1 { runTest // trigger } - def run { method17_2 } // trigger + def run: Unit = { method17_2 } // trigger } trait Trait20_1 { - private def method17_2 { + private def method17_2: Unit = { var ObjCounter = 0 object Obj { ObjCounter += 1} @@ -425,7 +426,7 @@ trait Trait20_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -438,7 +439,7 @@ trait Trait20_1 { runTest // trigger } - def run { method17_2 } + def run: Unit = { method17_2 } } @@ -454,7 +455,7 @@ class Class22_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -467,7 +468,7 @@ class Class22_1 { runTest // trigger } - def run { fun21_2() } + def run: Unit = { fun21_2() } } @@ -483,7 +484,7 @@ object Object23_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -496,7 +497,7 @@ object Object23_1 { runTest // trigger } - def run { fun21_2() } // trigger + def run: Unit = { fun21_2() } // trigger } @@ -512,7 +513,7 @@ trait Trait24_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -525,7 +526,7 @@ trait Trait24_1 { runTest // trigger } - def run { fun21_2() } + def run: Unit = { fun21_2() } } @@ -542,7 +543,7 @@ class Class26_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -556,7 +557,7 @@ class Class26_1 { } } - def run { (new Class25_2) } + def run: Unit = { (new Class25_2) } } @@ -573,7 +574,7 @@ object Object27_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -587,7 +588,7 @@ object Object27_1 { } } - def run { (new Class25_2) } // trigger + def run: Unit = { (new Class25_2) } // trigger } @@ -604,7 +605,7 @@ trait Trait28_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -618,7 +619,7 @@ trait Trait28_1 { } } - def run { (new Class25_2) } + def run: Unit = { (new Class25_2) } } @@ -635,7 +636,7 @@ class Class30_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -649,7 +650,7 @@ class Class30_1 { } } - def run { (new Trait29_2 {}) } + def run: Unit = { (new Trait29_2 {}) } } @@ -666,7 +667,7 @@ object Object31_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -680,7 +681,7 @@ object Object31_1 { } } - def run { (new Trait29_2 {}) } // trigger + def run: Unit = { (new Trait29_2 {}) } // trigger } @@ -697,7 +698,7 @@ trait Trait32_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -711,7 +712,7 @@ trait Trait32_1 { } } - def run { (new Trait29_2 {}) } + def run: Unit = { (new Trait29_2 {}) } } @@ -727,7 +728,7 @@ class Class34_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -740,7 +741,7 @@ class Class34_1 { runTest // trigger } - def run { lzvalue33_2 } + def run: Unit = { lzvalue33_2 } } @@ -756,7 +757,7 @@ object Object35_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -769,7 +770,7 @@ object Object35_1 { runTest // trigger } - def run { lzvalue33_2 } // trigger + def run: Unit = { lzvalue33_2 } // trigger } @@ -785,7 +786,7 @@ trait Trait36_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -798,7 +799,7 @@ trait Trait36_1 { runTest // trigger } - def run { lzvalue33_2 } + def run: Unit = { lzvalue33_2 } } @@ -814,7 +815,7 @@ class Class38_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -827,7 +828,7 @@ class Class38_1 { runTest // trigger } - def run { value37_2 } + def run: Unit = { value37_2 } } @@ -843,7 +844,7 @@ object Object39_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -856,7 +857,7 @@ object Object39_1 { runTest // trigger } - def run { value37_2 } // trigger + def run: Unit = { value37_2 } // trigger } @@ -872,7 +873,7 @@ trait Trait40_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -885,7 +886,7 @@ trait Trait40_1 { runTest // trigger } - def run { value37_2 } + def run: Unit = { value37_2 } } @@ -901,7 +902,7 @@ class Class42_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -911,10 +912,10 @@ class Class42_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class41_2).run } + def run: Unit = { (new Class41_2).run } } @@ -930,7 +931,7 @@ object Object43_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -940,10 +941,10 @@ object Object43_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class41_2).run } // trigger + def run: Unit = { (new Class41_2).run } // trigger } @@ -959,7 +960,7 @@ trait Trait44_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -969,10 +970,10 @@ trait Trait44_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class41_2).run } + def run: Unit = { (new Class41_2).run } } @@ -988,7 +989,7 @@ class Class46_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -998,10 +999,10 @@ class Class46_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object45_2.run } + def run: Unit = { Object45_2.run } } @@ -1017,7 +1018,7 @@ object Object47_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1027,10 +1028,10 @@ object Object47_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object45_2.run } // trigger + def run: Unit = { Object45_2.run } // trigger } @@ -1046,7 +1047,7 @@ trait Trait48_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1056,10 +1057,10 @@ trait Trait48_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object45_2.run } + def run: Unit = { Object45_2.run } } @@ -1075,7 +1076,7 @@ class Class50_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1085,10 +1086,10 @@ class Class50_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait49_2 {}).run } + def run: Unit = { (new Trait49_2 {}).run } } @@ -1104,7 +1105,7 @@ object Object51_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1114,10 +1115,10 @@ object Object51_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait49_2 {}).run } // trigger + def run: Unit = { (new Trait49_2 {}).run } // trigger } @@ -1133,7 +1134,7 @@ trait Trait52_1 { x == Obj } - def runTest { + def runTest: Unit = { try { assert(singleThreadedAccess(Obj)) assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1143,10 +1144,10 @@ trait Trait52_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait49_2 {}).run } + def run: Unit = { (new Trait49_2 {}).run } } @@ -1157,7 +1158,7 @@ class Class54_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1166,7 +1167,7 @@ class Class54_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1176,10 +1177,10 @@ class Class54_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class53_2).run } + def run: Unit = { (new Class53_2).run } } @@ -1190,7 +1191,7 @@ object Object55_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1199,7 +1200,7 @@ object Object55_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1209,10 +1210,10 @@ object Object55_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class53_2).run } // trigger + def run: Unit = { (new Class53_2).run } // trigger } @@ -1223,7 +1224,7 @@ trait Trait56_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1232,7 +1233,7 @@ trait Trait56_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1242,10 +1243,10 @@ trait Trait56_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Class53_2).run } + def run: Unit = { (new Class53_2).run } } @@ -1256,7 +1257,7 @@ class Class58_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1265,7 +1266,7 @@ class Class58_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1275,10 +1276,10 @@ class Class58_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object57_2.run } + def run: Unit = { Object57_2.run } } @@ -1289,7 +1290,7 @@ object Object59_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1298,7 +1299,7 @@ object Object59_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1308,10 +1309,10 @@ object Object59_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object57_2.run } // trigger + def run: Unit = { Object57_2.run } // trigger } @@ -1322,7 +1323,7 @@ trait Trait60_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1331,7 +1332,7 @@ trait Trait60_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1341,10 +1342,10 @@ trait Trait60_1 { } } - def run { runTest } // trigger + def run: Unit = { runTest } // trigger } - def run { Object57_2.run } + def run: Unit = { Object57_2.run } } @@ -1355,7 +1356,7 @@ class Class62_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1364,7 +1365,7 @@ class Class62_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1374,10 +1375,10 @@ class Class62_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait61_2 {}).run } + def run: Unit = { (new Trait61_2 {}).run } } @@ -1388,7 +1389,7 @@ object Object63_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1397,7 +1398,7 @@ object Object63_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1407,10 +1408,10 @@ object Object63_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait61_2 {}).run } // trigger + def run: Unit = { (new Trait61_2 {}).run } // trigger } @@ -1421,7 +1422,7 @@ trait Trait64_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1430,7 +1431,7 @@ trait Trait64_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1440,21 +1441,21 @@ trait Trait64_1 { } } - def run { runTest } + def run: Unit = { runTest } } - def run { (new Trait61_2 {}).run } + def run: Unit = { (new Trait61_2 {}).run } } class Class66_1 { - def method65_2 { + def method65_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1463,7 +1464,7 @@ class Class66_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1476,18 +1477,18 @@ class Class66_1 { runTest // trigger } - def run { method65_2 } + def run: Unit = { method65_2 } } object Object67_1 { - def method65_2 { + def method65_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1496,7 +1497,7 @@ object Object67_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1509,18 +1510,18 @@ object Object67_1 { runTest // trigger } - def run { method65_2 } // trigger + def run: Unit = { method65_2 } // trigger } trait Trait68_1 { - def method65_2 { + def method65_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1529,7 +1530,7 @@ trait Trait68_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1542,18 +1543,18 @@ trait Trait68_1 { runTest // trigger } - def run { method65_2 } + def run: Unit = { method65_2 } } class Class70_1 { - private def method69_2 { + private def method69_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1562,7 +1563,7 @@ class Class70_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1575,18 +1576,18 @@ class Class70_1 { runTest // trigger } - def run { method69_2 } + def run: Unit = { method69_2 } } object Object71_1 { - private def method69_2 { + private def method69_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1595,7 +1596,7 @@ object Object71_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1608,18 +1609,18 @@ object Object71_1 { runTest // trigger } - def run { method69_2 } // trigger + def run: Unit = { method69_2 } // trigger } trait Trait72_1 { - private def method69_2 { + private def method69_2: Unit = { @volatile var ObjCounter = 0 object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1628,7 +1629,7 @@ trait Trait72_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1641,7 +1642,7 @@ trait Trait72_1 { runTest // trigger } - def run { method69_2 } + def run: Unit = { method69_2 } } @@ -1652,7 +1653,7 @@ class Class74_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1661,7 +1662,7 @@ class Class74_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1674,7 +1675,7 @@ class Class74_1 { runTest // trigger } - def run { fun73_2() } + def run: Unit = { fun73_2() } } @@ -1685,7 +1686,7 @@ object Object75_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1694,7 +1695,7 @@ object Object75_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1707,7 +1708,7 @@ object Object75_1 { runTest // trigger } - def run { fun73_2() } // trigger + def run: Unit = { fun73_2() } // trigger } @@ -1718,7 +1719,7 @@ trait Trait76_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1727,7 +1728,7 @@ trait Trait76_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1740,7 +1741,7 @@ trait Trait76_1 { runTest // trigger } - def run { fun73_2() } + def run: Unit = { fun73_2() } } @@ -1752,7 +1753,7 @@ class Class78_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1761,7 +1762,7 @@ class Class78_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1775,7 +1776,7 @@ class Class78_1 { } } - def run { (new Class77_2) } + def run: Unit = { (new Class77_2) } } @@ -1787,7 +1788,7 @@ object Object79_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1796,7 +1797,7 @@ object Object79_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1810,7 +1811,7 @@ object Object79_1 { } } - def run { (new Class77_2) } // trigger + def run: Unit = { (new Class77_2) } // trigger } @@ -1822,7 +1823,7 @@ trait Trait80_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1831,7 +1832,7 @@ trait Trait80_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1845,7 +1846,7 @@ trait Trait80_1 { } } - def run { (new Class77_2) } + def run: Unit = { (new Class77_2) } } @@ -1857,7 +1858,7 @@ class Class82_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1866,7 +1867,7 @@ class Class82_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1880,7 +1881,7 @@ class Class82_1 { } } - def run { (new Trait81_2 {}) } + def run: Unit = { (new Trait81_2 {}) } } @@ -1892,7 +1893,7 @@ object Object83_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1901,7 +1902,7 @@ object Object83_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1915,7 +1916,7 @@ object Object83_1 { } } - def run { (new Trait81_2 {}) } // trigger + def run: Unit = { (new Trait81_2 {}) } // trigger } @@ -1927,7 +1928,7 @@ trait Trait84_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1936,7 +1937,7 @@ trait Trait84_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1950,7 +1951,7 @@ trait Trait84_1 { } } - def run { (new Trait81_2 {}) } + def run: Unit = { (new Trait81_2 {}) } } @@ -1961,7 +1962,7 @@ class Class90_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -1970,7 +1971,7 @@ class Class90_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -1983,7 +1984,7 @@ class Class90_1 { runTest // trigger } - def run { value89_2 } + def run: Unit = { value89_2 } } @@ -1994,7 +1995,7 @@ trait Trait92_1 { object Obj { ObjCounter += 1} - def multiThreadedAccess() { + def multiThreadedAccess(): Unit = { val threads = for (i <- 1 to 5) yield new Thread(new Runnable { def run = Obj }) @@ -2003,7 +2004,7 @@ trait Trait92_1 { threads foreach (_.join()) } - def runTest { + def runTest: Unit = { try { multiThreadedAccess() assert(ObjCounter == 1, "multiple instances: " + ObjCounter) @@ -2016,12 +2017,12 @@ trait Trait92_1 { runTest // trigger } - def run { value89_2 } + def run: Unit = { value89_2 } } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { (new Class2_1).run Object3_1.run (new Trait4_1 {}).run diff --git a/test/files/run/interop_classtags_are_classmanifests.scala b/test/files/run/interop_classtags_are_classmanifests.scala index 62d85c3ce3b2..ef59354cf98d 100644 --- a/test/files/run/interop_classtags_are_classmanifests.scala +++ b/test/files/run/interop_classtags_are_classmanifests.scala @@ -1,9 +1,9 @@ -import scala.reflect.ClassTag @deprecated("Suppress warnings", since="2.11") object Test extends App { + import scala.reflect.{ClassManifest, ClassTag} def classTagIsClassManifest[T: ClassTag] = { - println(classManifest[T]) + println(implicitly[ClassManifest[T]]) } classTagIsClassManifest[Int] diff --git a/test/files/run/interop_manifests_are_abstypetags.scala b/test/files/run/interop_manifests_are_abstypetags.scala index f2c2723106bb..e864aec1099e 100644 --- a/test/files/run/interop_manifests_are_abstypetags.scala +++ b/test/files/run/interop_manifests_are_abstypetags.scala @@ -8,4 +8,4 @@ object Test extends App { manifestIsWeakTypeTag[Int] manifestIsWeakTypeTag[String] manifestIsWeakTypeTag[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/run/interop_manifests_are_classtags.scala b/test/files/run/interop_manifests_are_classtags.scala index 705038ece7cb..a4dd8f5bee6e 100644 --- a/test/files/run/interop_manifests_are_classtags.scala +++ b/test/files/run/interop_manifests_are_classtags.scala @@ -1,7 +1,7 @@ -import scala.reflect.{ClassTag, classTag} @deprecated("Suppress warnings", since="2.11") object Test extends App { + import scala.reflect.{ClassManifest, classTag} def classManifestIsClassTag[T: ClassManifest] = { println(classTag[T]) println(Array[T]().toList) diff --git a/test/files/run/interop_manifests_are_typetags.scala b/test/files/run/interop_manifests_are_typetags.scala index 294d3c22deb1..a228e701706e 100644 --- a/test/files/run/interop_manifests_are_typetags.scala +++ b/test/files/run/interop_manifests_are_typetags.scala @@ -8,4 +8,4 @@ object Test extends App { manifestIsTypeTag[Int] manifestIsTypeTag[String] manifestIsTypeTag[Array[Int]] -} \ No newline at end of file +} diff --git a/test/files/run/interop_typetags_are_manifests.scala b/test/files/run/interop_typetags_are_manifests.scala index 2dc3ff110b42..bc7f0840c37b 100644 --- a/test/files/run/interop_typetags_are_manifests.scala +++ b/test/files/run/interop_typetags_are_manifests.scala @@ -1,7 +1,7 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false +// import scala.reflect.runtime.universe._ import scala.reflect.ClassTag -import internal._ object Test extends App { def typeTagIsManifest[T: TypeTag : ClassTag] = { diff --git a/test/files/run/interpolation-repl.check b/test/files/run/interpolation-repl.check new file mode 100644 index 000000000000..c6e246c806b1 --- /dev/null +++ b/test/files/run/interpolation-repl.check @@ -0,0 +1,12 @@ + +scala> raw"\"" +val res0: String = \" + +scala> raw"\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " +val res1: String = "\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " + +scala> raw"\" // this used to compile, now it's unclosed + ^ + error: unclosed string literal; note that `\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead + +scala> :quit diff --git a/test/files/run/interpolation-repl.scala b/test/files/run/interpolation-repl.scala new file mode 100644 index 000000000000..ba84178ce92c --- /dev/null +++ b/test/files/run/interpolation-repl.scala @@ -0,0 +1,9 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ +raw"\"" +raw"\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " +raw"\" // this used to compile, now it's unclosed +""" +} diff --git a/test/files/run/interpolation.check b/test/files/run/interpolation.check index 997abb449726..2ab952f46f75 100644 --- a/test/files/run/interpolation.check +++ b/test/files/run/interpolation.check @@ -30,3 +30,9 @@ Best price: 13.35 0 00 +"everybody loves escaped quotes" is a common sentiment. +hi"$" +hi"$" +hi"$" +hi"$" +hi"$" diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala index 14d981934895..4dc85e9f1f56 100644 --- a/test/files/run/interpolation.scala +++ b/test/files/run/interpolation.scala @@ -29,4 +29,12 @@ object Test extends App { println(f"") println(f"${0}") println(f"${0}${0}") + + println(s"$"everybody loves escaped quotes$" is a common sentiment.") + println(f"hi$"$$$"") + println(raw"hi$"$$$"") + + println(s"""hi$"$$$"""") + println(f"""hi$"$$$"""") + println(raw"""hi$"$$$"""") } diff --git a/test/files/run/invalid-lubs.check b/test/files/run/invalid-lubs.check new file mode 100644 index 000000000000..ada908f00b8d --- /dev/null +++ b/test/files/run/invalid-lubs.check @@ -0,0 +1,17 @@ + +scala> def foo(a: Boolean, b: List[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c +def foo(a: Boolean, b: List[Any], c: scala.collection.mutable.ListBuffer[Any]): scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]},scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Any] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]}] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.StrictOptimizedSeqOps[A,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable]} + +scala> List(List[Any](), collection.mutable.ListBuffer[Any]()) +val res0: List[scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]},scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Any] with scala.collection.StrictOptimizedSeqOps[Any,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Any] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]}] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.StrictOptimizedSeqOps[A,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable]}] = List(List(), ListBuffer()) + +scala> List(List(), collection.mutable.ListBuffer()) +val res1: List[scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]},scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Nothing] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]}] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.StrictOptimizedSeqOps[A,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable]}] = List(List(), ListBuffer()) + +scala> List(List(), Vector()) +val res2: List[scala.collection.immutable.AbstractSeq[Nothing] with scala.collection.immutable.StrictOptimizedSeqOps[Nothing,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.immutable.StrictOptimizedSeqOps[_,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.immutable.StrictOptimizedSeqOps[_,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[_] with scala.collection.immutable.StrictOptimizedSeqOps[_,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.immutable.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]},scala.collection.immutable.AbstractSeq[Nothing] with scala.collection.immutable.StrictOptimizedSeqOps[Nothing,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.immutable.StrictOptimizedSeqOps[_,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[Nothing] with scala.collection.immutable.StrictOptimizedSeqOps[Nothing,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[Nothing] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.immutable.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]}] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.immutable.AbstractSeq[A] with scala.collection.immutable.StrictOptimizedSeqOps[A,[_]scala.collection.immutable.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.immutable.AbstractSeq[A] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable]}] = List(List(), Vector()) + +scala> List(collection.mutable.Queue(), List()) +val res3: List[scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]},scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.StrictOptimizedSeqOps[_,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Nothing] with scala.collection.StrictOptimizedSeqOps[Nothing,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[Nothing] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable]}] with scala.collection.generic.DefaultSerializable{def iterableFactory: scala.collection.SeqFactory[[A]scala.collection.AbstractSeq[A] with scala.collection.StrictOptimizedSeqOps[A,[_]scala.collection.AbstractSeq[_] with scala.collection.generic.DefaultSerializable,scala.collection.AbstractSeq[A] with scala.collection.generic.DefaultSerializable] with scala.collection.generic.DefaultSerializable]}] = List(Queue(), List()) + +scala> :quit diff --git a/test/files/run/invalid-lubs.scala b/test/files/run/invalid-lubs.scala new file mode 100644 index 000000000000..b5862d70f729 --- /dev/null +++ b/test/files/run/invalid-lubs.scala @@ -0,0 +1,12 @@ +import scala.tools.partest.ReplTest +// These LUBs are no longer invalid. +// ReplTest because some errors shadow others +object Test extends ReplTest { + def code = + """def foo(a: Boolean, b: List[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c + |List(List[Any](), collection.mutable.ListBuffer[Any]()) + |List(List(), collection.mutable.ListBuffer()) + |List(List(), Vector()) + |List(collection.mutable.Queue(), List()) + """.stripMargin +} diff --git a/test/files/run/iq.check b/test/files/run/iq.check index 311bf83ed429..a05257dde5d8 100644 --- a/test/files/run/iq.check +++ b/test/files/run/iq.check @@ -6,6 +6,7 @@ qc: Queue(42, 0) Head: 42 q5: Queue(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) q5[5]: 5 +q5alt: Queue(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) q5 == q5c: true q5c == q5: true q8: Queue(2, 3, 4, 5, 6, 7, 8, 9, 10, 11) diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala index 9929f0e1a0fa..137d4a984517 100644 --- a/test/files/run/iq.scala +++ b/test/files/run/iq.scala @@ -5,7 +5,7 @@ import scala.collection.immutable.Queue object iq { - def main { + def main(): Unit = { /* Create an empty queue. */ val q: Queue[Int] = Queue.empty @@ -16,7 +16,7 @@ object iq { Console.println("Empty") } - /* Test enqueing. */ + /* Test enqueueing. */ val q2 = q.enqueue(42).enqueue(0) val qa = q :+ 42 :+ 0 assert(q2 == qa) @@ -51,8 +51,8 @@ object iq { q3 } - /* Test sequence enqueing. */ - val q5: Queue[Any] = q4.enqueue(List(1,2,3,4,5,6,7,8,9)) + /* Test sequence enqueueing. */ + val q5: Queue[Any] = q4.enqueueAll(List(1,2,3,4,5,6,7,8,9)) /* Test toString. * Expected: q5: Queue(0,1,2,3,4,5,6,7,8,9) */ @@ -62,7 +62,11 @@ object iq { */ Console.println("q5[5]: " + q5(5)) - val q5c: Queue[Int] = Queue.empty.enqueue(List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) + val q5alt: Queue[Any] = q4.enqueueAll(collection.Iterable(1,2,3,4,5,6,7,8,9)) + Console.println("q5alt: " + q5alt) + assert(q5alt.sameElements(q5)) + + val q5c: Queue[Int] = Queue.empty.enqueueAll(List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) /* Testing == * Expected: q5 == q9: true @@ -91,7 +95,7 @@ object iq { */ Console.print("Elements: "); q6.iterator.foreach(e => Console.print(" "+ e + " ")) - Console.println; + Console.println() /* Testing mkString * Expected: String: <1-2-3-4-5-6-7-8-9> @@ -111,7 +115,7 @@ object iq { } object Test { - def main(args: Array[String]) { - iq.main + def main(args: Array[String]): Unit = { + iq.main() } } diff --git a/test/files/run/is-valid-num.scala b/test/files/run/is-valid-num.scala index c003c091e6a3..5e0f17405c2e 100644 --- a/test/files/run/is-valid-num.scala +++ b/test/files/run/is-valid-num.scala @@ -1,6 +1,5 @@ -/* - * filter: re-run with -deprecation - */ +//> using options -Xlint -Werror +@annotation.nowarn("cat=deprecation&msg=isWhole") object Test { def x = BigInt("10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000") def y = BigDecimal("" + (Short.MaxValue + 1) + ".0") @@ -11,7 +10,7 @@ object Test { def l2 = Int.MinValue.toLong - 1 def main(args: Array[String]): Unit = { -// assert(x.isWhole, x) + assert(x.isWhole, x) assert(!x.isValidDouble, x) assert(!x.isValidFloat, x) assert(!x.isValidLong, x) @@ -49,9 +48,9 @@ object Test { testNaNs() } - def testBigInts() { + def testBigInts(): Unit = { def biExp2(e: Int) = BigInt(1) << e - def checkBigInt2(bi: BigInt) { checkBigInt(-bi); checkBigInt(bi) } + def checkBigInt2(bi: BigInt): Unit = { checkBigInt(-bi); checkBigInt(bi) } val pf = 24 val pd = 53 @@ -136,7 +135,7 @@ object Test { checkBigInt2(biExp2(1024)) } - def testNonWholeDoubles() { + def testNonWholeDoubles(): Unit = { checkNonWholeDouble(0.5) checkNonWholeDouble(-math.E) checkNonWholeDouble((1L << 51).toDouble + 0.5) @@ -145,7 +144,7 @@ object Test { checkNonWholeDouble(Double.NegativeInfinity) } - def testNaNs() { + def testNaNs(): Unit = { assert(!Double.NaN.isWhole, Double.NaN) // assert(!Double.NaN.isValidDouble, Double.NaN) // assert(!Double.NaN.isValidFloat, Double.NaN) @@ -165,13 +164,13 @@ object Test { assert(!Float.NaN.isValidByte, Float.NaN) } - def checkNonWholeDouble(d: Double) { + def checkNonWholeDouble(d: Double): Unit = { val f = d.toFloat val isFloat = f == d if (!d.isInfinity) { val bd = BigDecimal(new java.math.BigDecimal(d)) -// assert(!bd.isWhole, bd) + assert(!bd.isWhole, bd) assert(bd.isExactDouble, bd) assert(bd.isExactFloat == isFloat, bd) assert(!bd.isValidLong, bd) @@ -202,7 +201,7 @@ object Test { } } - def checkBigInt(bi: BigInt) { + def checkBigInt(bi: BigInt): Unit = { val bd = BigDecimal(bi, java.math.MathContext.UNLIMITED) val isByte = bi >= Byte.MinValue && bi <= Byte.MaxValue val isShort = bi >= Short.MinValue && bi <= Short.MaxValue @@ -221,7 +220,7 @@ object Test { assert(bd.isValidShort == isShort, bd) assert(bd.isValidByte == isByte, bd) -// assert(bi.isWhole, bi) + assert(bi.isWhole, bi) assert(bi.isValidDouble == isDouble, bi) assert(bi.isValidFloat == isFloat, bi) assert(bi.isValidLong == isLong, bi) diff --git a/test/files/run/iterableonce-deprecations.check b/test/files/run/iterableonce-deprecations.check new file mode 100644 index 000000000000..218a76bb1ead --- /dev/null +++ b/test/files/run/iterableonce-deprecations.check @@ -0,0 +1,30 @@ +iterableonce-deprecations.scala:10: warning: method to in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(factory) instead + i.to(List) + ^ +iterableonce-deprecations.scala:12: warning: method toBuffer in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(ArrayBuffer) instead + i.toBuffer + ^ +iterableonce-deprecations.scala:13: warning: method toArray in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.toArray + i.toArray + ^ +iterableonce-deprecations.scala:14: warning: method toList in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(List) instead + i.toList + ^ +iterableonce-deprecations.scala:15: warning: method toSet in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(Set) instead + i.toSet + ^ +iterableonce-deprecations.scala:16: warning: method toIterable in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(Iterable) instead + i.toIterable + ^ +iterableonce-deprecations.scala:17: warning: method toSeq in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(Seq) instead + i.toSeq + ^ +iterableonce-deprecations.scala:18: warning: method toStream in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(LazyList) instead + i.toStream + ^ +iterableonce-deprecations.scala:19: warning: method toVector in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(Vector) instead + i.toVector + ^ +iterableonce-deprecations.scala:23: warning: method toMap in class IterableOnceExtensionMethods is deprecated (since 2.13.0): Use .iterator.to(Map) instead + ti.toMap + ^ diff --git a/test/files/run/iterableonce-deprecations.scala b/test/files/run/iterableonce-deprecations.scala new file mode 100644 index 000000000000..5a8108368c23 --- /dev/null +++ b/test/files/run/iterableonce-deprecations.scala @@ -0,0 +1,25 @@ +//> using options -deprecation +// +// +// +// collections-strawman issue #467 +object Test { + def main(args: Array[String]): Unit = { + val i: IterableOnce[Int] = List(1, 2, 3, 4, 5) + + i.to(List) + + i.toBuffer + i.toArray + i.toList + i.toSet + i.toIterable + i.toSeq + i.toStream + i.toVector + + val ti: IterableOnce[(Int, Int)] = List((1,2), (3,4), (5,6)) + + ti.toMap + } +} diff --git a/test/files/run/iterables.check b/test/files/run/iterables.check index aac90b70a909..b7b0b61a85c8 100644 --- a/test/files/run/iterables.check +++ b/test/files/run/iterables.check @@ -1,5 +1,7 @@ false 0,1,2,3,4,5,6,7,8,9 5,6,7,8,9 +0,2,4,6,8 + 0,2,4,6,8 1,3,5,7,9 diff --git a/test/files/run/iterables.scala b/test/files/run/iterables.scala index ad30f4731673..bc5492fb1d5d 100644 --- a/test/files/run/iterables.scala +++ b/test/files/run/iterables.scala @@ -1,13 +1,17 @@ +import scala.collection.StrictOptimizedIterableOps object Test extends App { class Test(n: Int) extends Iterable[Int] { private var i = 0 def iterator = new Iterator[Int] { def hasNext = i < n - def next = + def next() = if (hasNext) { val v = i; i += 1; v } else throw new IndexOutOfBoundsException("empty iterator") } } + + class TestStrict(n: Int) extends Test(n) with StrictOptimizedIterableOps[Int, Iterable, Iterable[Int]] + { val x = new Test(10) println(x.isEmpty) @@ -20,7 +24,13 @@ object Test extends App { { val x = new Test(10) val y = x.partition(_ % 2 == 0) - println(y._1.mkString(",")) - println(y._2.mkString(",")) + println(y._1.mkString(",")) // evens + println(y._2.mkString(",")) // empty, creates two iterators + } + { + val x = new TestStrict(10) + val y = x.partition(_ % 2 == 0) + println(y._1.mkString(",")) // evens + println(y._2.mkString(",")) // odds } } diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala index cd82856a5bbe..a0db917cd87c 100644 --- a/test/files/run/iterator-from.scala +++ b/test/files/run/iterator-from.scala @@ -1,49 +1,47 @@ /* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps - * filter: optimizer warning + * filter: inliner warnings */ import scala.util.{Random => R} import scala.collection._ -import scala.math.Ordered object Test extends App { val maxLength = 25 val maxKey = 50 val maxValue = 50 - def testSet[A <% Ordered[A]](set: SortedSet[A], list: List[A]) { + def testSet[A](set: SortedSet[A], list: List[A])(implicit o: Ordering[A]): Unit = { val distinctSorted = list.distinct.sorted - assertEquals("Set size wasn't the same as list sze", set.size, distinctSorted.size) + assertEquals("Set size wasn't the same as list size", set.size, distinctSorted.size) for(key <- distinctSorted) { val clazz = set.getClass val iteratorFrom = (set iteratorFrom key).toList - check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set from key).iterator.toList) - check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (_ < key)) - check(clazz, list, s"set iteratorFrom $key", s"set keysIterator from $key", iteratorFrom, (set keysIteratorFrom key).toList) + check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set rangeFrom key).iterator.toList) + check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (o.lt(_, key))) } } - def testMap[A <% Ordered[A], B](map: SortedMap[A, B], list: List[(A, B)]) { + def testMap[A, B](map: SortedMap[A, B], list: List[(A, B)])(implicit o: Ordering[A]): Unit = { val distinctSorted = distinctByKey(list).sortBy(_._1) - assertEquals("Map size wasn't the same as list sze", map.size, distinctSorted.size) + assertEquals("Map size wasn't the same as list size", map.size, distinctSorted.size) for(keyValue <- distinctSorted) { val key = keyValue._1 val clazz = map.getClass val iteratorFrom = (map iteratorFrom key).toList - check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map from key).iterator.toList) - check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (_._1 < key)) + check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map rangeFrom key).iterator.toList) + check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (x => o.lt(x._1, key))) check(clazz, list, s"map iteratorFrom $key map (_._1)", s"map keysIteratorFrom $key", iteratorFrom map (_._1), (map keysIteratorFrom key).toList) check(clazz, list, s"map iteratorFrom $key map (_._2)", s"map valuesIteratorFrom $key", iteratorFrom map (_._2), (map valuesIteratorFrom key).toList) } } - def check[A](clazz: Class[_], list: List[_], m1: String, m2: String, l1: List[A], l2: List[A]) { + def check[A](clazz: Class[_], list: List[_], m1: String, m2: String, l1: List[A], l2: List[A]): Unit = { assertEquals(s"$clazz: `$m1` didn't match `$m2` on list $list", l1, l2) } - def assertEquals[A](msg: String, x: A, y: A) { + def assertEquals[A](msg: String, x: A, y: A): Unit = { assert(x == y, s"$msg\n1: $x\n2: $y") } @@ -65,7 +63,7 @@ object Test extends App { val treeMap = immutable.TreeMap(keyValues:_*) testMap(treeMap, keyValues) - testMap(treeMap.filterKeys(_ % 2 == 0), keyValues filter (_._1 % 2 == 0)) - testMap(treeMap mapValues (_ + 1), keyValues map {case (k,v) => (k, v + 1)}) + testMap(treeMap.view.filterKeys(_ % 2 == 0).to(SortedMap), keyValues filter (_._1 % 2 == 0)) + testMap(treeMap.view.mapValues(_ + 1).to(SortedMap), keyValues map {case (k,v) => (k, v + 1)}) } } diff --git a/test/files/run/iterator3444.scala b/test/files/run/iterator3444.scala index 1d0713addc4e..43037a440cf1 100644 --- a/test/files/run/iterator3444.scala +++ b/test/files/run/iterator3444.scala @@ -3,14 +3,14 @@ // ticked #3444 object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val it = (1 to 12).toSeq.iterator - assert(it.next == 1) + assert(it.next() == 1) assert(it.take(2).toList == List(2, 3)) val jt = (4 to 12).toSeq.iterator - assert(jt.next == 4) + assert(jt.next() == 4) assert(jt.drop(5).toList == List(10, 11, 12)) val kt = (1 until 10).toSeq.iterator diff --git a/test/files/run/junitForwarders/C_1.scala b/test/files/run/junitForwarders/C_1.scala index 919f3118c157..00b8d458ec56 100644 --- a/test/files/run/junitForwarders/C_1.scala +++ b/test/files/run/junitForwarders/C_1.scala @@ -1,15 +1,15 @@ trait T { - @org.junit.Test def foo = 0 + @org.junitlike.Test def foo = 0 } class C extends T object Test extends App { def check(c: Class[_], e: String) = { - val s = c.getDeclaredMethods.sortBy(_.getName).map(m => s"${m.getName} - ${m.getDeclaredAnnotations.map(ann => "@" + ann.annotationType().getName) mkString(", ")}").mkString(";") + val s = c.getDeclaredMethods.sortBy(_.getName).map(m => s"${m.getName} - ${m.getDeclaredAnnotations.mkString(", ")}").mkString(";") assert(s == e, s"found: $s\nexpected: $e") } - check(classOf[C], "foo - @org.junit.Test") + check(classOf[C], "foo - @org.junitlike.Test()") // scala/scala-dev#213, scala/scala#5570: `foo$` should not have the @Test annotation - check(classOf[T], "$init$ - ;foo - @org.junit.Test;foo$ - ") + check(classOf[T], "$init$ - ;foo - @org.junitlike.Test();foo$ - ") } diff --git a/test/files/run/junitForwarders/Test.java b/test/files/run/junitForwarders/Test.java index 57c4d5b544d8..651cf8ea1840 100644 --- a/test/files/run/junitForwarders/Test.java +++ b/test/files/run/junitForwarders/Test.java @@ -1,4 +1,4 @@ -package org.junit; +package org.junitlike; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/test/files/run/kmpSliceSearch.scala b/test/files/run/kmpSliceSearch.scala index 7a0bfc67838a..e9d0c032d0a0 100644 --- a/test/files/run/kmpSliceSearch.scala +++ b/test/files/run/kmpSliceSearch.scala @@ -1,6 +1,4 @@ -// scalac: -Ydelambdafy:inline object Test { - import scala.collection.SeqLike def slowSearch[A](xs: Seq[A], ys: Seq[A], start: Int = 0): Int = { if (xs startsWith ys) start else if (xs.isEmpty) -1 @@ -11,28 +9,28 @@ object Test { if (i<0) i else xs.length - ys.length - i } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val rng = new scala.util.Random(java.lang.Integer.parseInt("kmp",36)) // Make sure we agree with naive implementation for (h <- Array(2,5,1000)) { for (i <- 0 to 100) { for (j <- 0 to 10) { - val xs = (0 to j).map(_ => (rng.nextInt & 0x7FFFFFFF) % h) + val xs = (0 to j).map(_ => (rng.nextInt() & 0x7FFFFFFF) % h) val xsa = xs.toArray val xsv = Vector() ++ xs val xsl = xs.toList - val xss = Vector[Seq[Int]](xs,xsa,xsv,xsl) + val xss = Vector[Seq[Int]](xs,xsa.toIndexedSeq,xsv,xsl) for (k <- 0 to 5) { - val ys = (0 to k).map(_ => (rng.nextInt & 0x7FFFFFFF) % h) + val ys = (0 to k).map(_ => (rng.nextInt() & 0x7FFFFFFF) % h) val ysa = ys.toArray val ysv = Vector() ++ ys val ysl = ys.toList - val yss = Vector[Seq[Int]](ys,ysa,ysv,ysl) + val yss = Vector[Seq[Int]](ys,ysa.toIndexedSeq,ysv,ysl) val fwd_slow = slowSearch(xs,ys) val bkw_slow = bkwSlowSearch(xs,ys) - val fwd_fast = xss.flatMap(xs => yss.map(ys => SeqLike.indexOf(xs,0,xs.length,ys,0,ys.length,0))) - val bkw_fast = xss.flatMap(xs => yss.map(ys => SeqLike.lastIndexOf(xs,0,xs.length,ys,0,ys.length,xs.length))) + val fwd_fast = xss.flatMap(xs => yss.map(ys => xs.indexOfSlice(ys))) + val bkw_fast = xss.flatMap(xs => yss.map(ys => xs.lastIndexOfSlice(ys))) assert(fwd_fast.forall(_ == fwd_slow)) assert(bkw_fast.forall(_ == bkw_slow)) } @@ -42,20 +40,20 @@ object Test { // Check performance^Wcorrectness of common small test cases val haystacks = List[Seq[Int]]( - Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15), + Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15).toIndexedSeq, Vector(99,2,99,99,2,99,99,99,2,99,99,99,99,2), List(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1), 1 to 15 ) val needles = List[Seq[Int]]( - Array(7,8,9,10), + Array(7,8,9,10).toIndexedSeq, Vector(99,99,99), List(1,1,1,1,1,2), 5 to 9 ) (haystacks zip needles) foreach { case (hay, nee) => - println(hay.indexOfSlice(nee,2) + " " + hay.lastIndexOfSlice(nee,13)) + println(s"${hay.indexOfSlice(nee,2)} ${hay.lastIndexOfSlice(nee,13)}") } } } diff --git a/test/files/run/lambda-serialization-gc.javaopts b/test/files/run/lambda-serialization-gc.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/lambda-serialization-gc.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/lambda-serialization-gc.scala b/test/files/run/lambda-serialization-gc.scala index cc61436e32e3..2195e027a356 100644 --- a/test/files/run/lambda-serialization-gc.scala +++ b/test/files/run/lambda-serialization-gc.scala @@ -1,3 +1,5 @@ +//> using javaOpt -Xmx512m + import java.io._ import java.net.URLClassLoader @@ -23,7 +25,7 @@ object Test { def deserializedInThrowawayClassloader = { val throwawayLoader: java.net.URLClassLoader = new java.net.URLClassLoader(loader.getURLs, ClassLoader.getSystemClassLoader) { val maxMemory = Runtime.getRuntime.maxMemory() - val junk = new Array[Byte]((maxMemory / 2).toInt) + @annotation.unused val junk = new Array[Byte]((maxMemory / 2).toInt) } val clazz = throwawayLoader.loadClass("C") assert(clazz != loaderCClass) diff --git a/test/files/run/lambda-serialization-meth-ref.scala b/test/files/run/lambda-serialization-meth-ref.scala deleted file mode 100644 index f5fc228e89ed..000000000000 --- a/test/files/run/lambda-serialization-meth-ref.scala +++ /dev/null @@ -1,37 +0,0 @@ -// scalac: -Ydelambdafy:method-ref -import java.io.{ByteArrayInputStream, ByteArrayOutputStream} -import java.io.{ObjectInputStream, ObjectOutputStream} - -object Test { - def main(args: Array[String]): Unit = { - roundTripMethRef() - roundTripMethRef_F0() - } - - // lambda targeting a method reference, not a SAM or FunctionN (should behave the same way) - def roundTripMethRef(): Unit = { - val lambda: String => String = (s: String) => s.toUpperCase - val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[String => String] - val reconstituted2 = serializeDeserialize(reconstituted1).asInstanceOf[String => String] - assert(reconstituted1.apply("yo") == "YO") - assert(reconstituted1.getClass == reconstituted2.getClass) - } - - def name = "Test" - - def roundTripMethRef_F0(): Unit = { - val lambda: () => String = () => Test.name - val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[() => String] - val reconstituted2 = serializeDeserialize(reconstituted1).asInstanceOf[() => String] - assert(reconstituted1.apply() == "Test") - assert(reconstituted1.getClass == reconstituted2.getClass) - } - - def serializeDeserialize[T <: AnyRef](obj: T) = { - val buffer = new ByteArrayOutputStream - val out = new ObjectOutputStream(buffer) - out.writeObject(obj) - val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) - in.readObject.asInstanceOf[T] - } -} diff --git a/test/files/run/lambda-serialization-security.scala b/test/files/run/lambda-serialization-security.scala index 08e235b1cb9b..0bdec3079ce6 100644 --- a/test/files/run/lambda-serialization-security.scala +++ b/test/files/run/lambda-serialization-security.scala @@ -13,14 +13,14 @@ object Test { val lambda = (p: Param) => ("a", p, c) val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any] val p = new Param - assert(reconstituted1.apply(p) == ("a", p, c)) + assert(reconstituted1.apply(p) == (("a", p, c))) val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any] assert(reconstituted1.getClass == reconstituted2.getClass) val reconstituted3 = serializeDeserialize(reconstituted1) - assert(reconstituted3.apply(p) == ("a", p, c)) + assert(reconstituted3.apply(p) == (("a", p, c))) - val specializedLambda = (p: Int) => List(p, c).length + val specializedLambda = (p: Int) => List[Any](p, c).length assert(serializeDeserialize(specializedLambda).apply(42) == 2) assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2) } diff --git a/test/files/run/lambda-serialization.scala b/test/files/run/lambda-serialization.scala index 1134eb0aa95d..f4ef4eb90913 100644 --- a/test/files/run/lambda-serialization.scala +++ b/test/files/run/lambda-serialization.scala @@ -21,7 +21,7 @@ class C extends java.io.Serializable { () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => () ) - private def foo(): Unit = { + @annotation.unused private def foo(): Unit = { assert(false, "should not be called!!!") } } @@ -39,18 +39,8 @@ object Test { } def fakeLambdaFailsToDeserialize(): Unit = { - val fake = new SerializedLambda( - /* capturingClass = */ classOf[C], - /* functionalInterfaceClass = */ classOf[FakeSam].getName, - /* functionalInterfaceMethodName = */ "apply", - /* functionalInterfaceMethodSignature = */ "()V", - /* implMethodKind = */ MethodHandleInfo.REF_invokeVirtual, - /* implClass = */ classOf[C].getName, - /* implMethodName = */ "foo", - /* implMethodSignature = */ "()V", - /* instantiatedMethodType = */ "()V", - /* capturedArgs = */ Array(new C), - ) + val fake = new SerializedLambda(classOf[C], classOf[FakeSam].getName, "apply", "()V", + MethodHandleInfo.REF_invokeVirtual, classOf[C].getName, "foo", "()V", "()V", Array(new C)) try { serializeDeserialize(fake).asInstanceOf[FakeSam].apply() assert(false) diff --git a/test/files/run/large_class.scala b/test/files/run/large_class.scala index aa486ef8f7ed..e422f653a2da 100644 --- a/test/files/run/large_class.scala +++ b/test/files/run/large_class.scala @@ -1,9 +1,7 @@ import scala.tools.partest._ -import java.io.{Console => _, _} // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path def s(n: Int) = "\""+n+"\"" @@ -19,9 +17,5 @@ object Test extends DirectTest { s(n+60000)+")") mkString ";"} |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/large_code.scala b/test/files/run/large_code.scala index f9d7f8c95bd0..c3b0beac7876 100644 --- a/test/files/run/large_code.scala +++ b/test/files/run/large_code.scala @@ -1,9 +1,7 @@ import scala.tools.partest._ -import java.io.{Console => _, _} // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path // test that we hit the code size limit and error out gracefully // 5958 is the magic number (2^16/11 -- each `a(1,2,3,4,5,6)` is 11 bytes of bytecode) @@ -16,9 +14,5 @@ object Test extends DirectTest { | } |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/lazy-concurrent.scala b/test/files/run/lazy-concurrent.scala index d09fc4cd0663..8ee606a2fe8c 100644 --- a/test/files/run/lazy-concurrent.scala +++ b/test/files/run/lazy-concurrent.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { class Singleton { val field = () println("Initializing singleton.") diff --git a/test/files/run/lazy-exprs.scala b/test/files/run/lazy-exprs.scala index 204c4b564bc6..21c28b9f73f6 100644 --- a/test/files/run/lazy-exprs.scala +++ b/test/files/run/lazy-exprs.scala @@ -1,6 +1,6 @@ object TestExpressions { - def patmatchScrut { + def patmatchScrut(): Unit = { lazy val z1: Option[String] = { println("forced "); Some("lazy z1") } val res = z1 match { @@ -14,7 +14,7 @@ object TestExpressions { println("failed") } - def patmatchCase { + def patmatchCase(): Unit = { val t: Option[String] = Some("test") val res = t match { case Some(msg) => @@ -31,7 +31,7 @@ object TestExpressions { } - def patmatchPat { + def patmatchPat(): Unit = { lazy val Z1 = { println("forced "); "lazy Z1" } print("lazy val in case: ") val t: Option[String] = Some("lazy Z1") @@ -44,7 +44,7 @@ object TestExpressions { } } - def ifcond { + def ifcond(): Unit = { lazy val z1 = { println("forced "); "lazy z1" } print("lazy val in if condition: ") if (z1 == "lazy z1") @@ -56,7 +56,7 @@ object TestExpressions { lazy val LazyField = { println("forced LazyField"); "LazyField" } - def testPatMatchField { + def testPatMatchField(): Unit = { print("lazy val in pattern: ") val t: Option[String] = Some("LazyField") t match { @@ -69,7 +69,7 @@ object TestExpressions { } lazy val (x, y) = ({print("x"); "x"}, {print("y"); "y"}) - def testPatLazyVal { + def testPatLazyVal(): Unit = { println("lazy val with patterns:") print("x and y: ") println("(" + x + ", " + y + ")") @@ -78,18 +78,18 @@ object TestExpressions { println("(" + x1 + ", " + y1 + ")") } - def test { - patmatchScrut - patmatchCase - patmatchPat - ifcond - testPatMatchField - testPatLazyVal + def test(): Unit = { + patmatchScrut() + patmatchCase() + patmatchPat() + ifcond() + testPatMatchField() + testPatLazyVal() } } object Test extends App { - TestExpressions.test + TestExpressions.test() } diff --git a/test/files/run/lazy-locals-2.scala b/test/files/run/lazy-locals-2.scala index d6c33cffcb85..7faa4d9bf45b 100644 --- a/test/files/run/lazy-locals-2.scala +++ b/test/files/run/lazy-locals-2.scala @@ -13,11 +13,11 @@ import Logs._ class C { def getInt : Int = { log("getInt"); 1 } def getString: String = { log("getString"); "s" } - def getUnit : Unit = { log("getUnit") } + def getUnit(): Unit = { log("getUnit") } lazy val t1 = getInt lazy val t2 = getString - lazy val t3 = getUnit + lazy val t3 = getUnit() checkLog("") def m1 = { @@ -28,8 +28,8 @@ class C { lazy val t1 = getString t1 + t1 } - def m3 = { - lazy val t1 = getUnit + def m3() = { + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("") @@ -44,7 +44,7 @@ class C { t1 + t1 } val vl3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("getInt:getString:getUnit:():()") @@ -59,7 +59,7 @@ class C { t1 + t1 } var vr3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("getInt:getString:getUnit:():()") @@ -74,7 +74,7 @@ class C { t1 + t1 } lazy val lvl3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("") @@ -83,7 +83,7 @@ class C { { lazy val t1 = getInt lazy val t2 = getString - lazy val t3 = getUnit + lazy val t3 = getUnit() log(t1 + t1) log(t2 + t2) @@ -100,7 +100,7 @@ class C { log(m1); log(m1) log(m2); log(m2) - log(m3); log(m3) + log(m3()); log(m3()) checkLog("getInt:2:getInt:2:getString:ss:getString:ss:getUnit:():():():getUnit:():():()") log(vl1); log(vl1) @@ -123,11 +123,11 @@ class C { trait T { def getInt : Int = { log("getInt"); 1 } def getString: String = { log("getString"); "s" } - def getUnit : Unit = { log("getUnit") } + def getUnit(): Unit = { log("getUnit") } lazy val t1 = getInt lazy val t2 = getString - lazy val t3 = getUnit + lazy val t3 = getUnit() checkLog("") def m1 = { @@ -138,8 +138,8 @@ trait T { lazy val t1 = getString t1 + t1 } - def m3 = { - lazy val t1 = getUnit + def m3() = { + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("") @@ -154,7 +154,7 @@ trait T { t1 + t1 } val vl3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("getInt:getString:getUnit:():()") @@ -169,7 +169,7 @@ trait T { t1 + t1 } var vr3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("getInt:getString:getUnit:():()") @@ -184,7 +184,7 @@ trait T { t1 + t1 } lazy val lvl3 = { - lazy val t1 = getUnit + lazy val t1 = getUnit() log(t1); log(t1) } checkLog("") @@ -193,7 +193,7 @@ trait T { { lazy val t1 = getInt lazy val t2 = getString - lazy val t3 = getUnit + lazy val t3 = getUnit() log(t1 + t1) log(t2 + t2) @@ -210,7 +210,7 @@ trait T { log(m1); log(m1) log(m2); log(m2) - log(m3); log(m3) + log(m3()); log(m3()) checkLog("getInt:2:getInt:2:getString:ss:getString:ss:getUnit:():():():getUnit:():():()") log(vl1); log(vl1) @@ -281,8 +281,8 @@ object Test { lzyComputeMethods == expComputeMethods, s"wrong lzycompute methods. expected:\n$expComputeMethods\nfound:\n$lzyComputeMethods") - val fields = c.getClass.getDeclaredFields.toList.sortBy(_.getName).map(_.toString) - val expFields = List( + val fields: List[String] = c.getClass.getDeclaredFields.toList.sortBy(_.getName).map(_.toString) + val expFields = List[String]( "private volatile byte C.bitmap$0", "private int C.lvl1", "private java.lang.String C.lvl2", @@ -305,9 +305,22 @@ object Test { d.run() val dFields = d.getClass.getDeclaredFields.toList.sortBy(_.getName).map(_.toString) - assert( - dFields == expFields.map(_.replaceAll(" C.", " D.")), - s"wrong fields. expected:\n$expFields\nfound:\n$fields") + val expDFields = List[String]( + "private volatile byte D.bitmap$0", + "private int D.lvl1", + "private java.lang.String D.lvl2", + "private scala.runtime.BoxedUnit D.lvl3", + "private int D.t1", + "private java.lang.String D.t2", + "private scala.runtime.BoxedUnit D.t3", + "private int D.vl1", + "private java.lang.String D.vl2", + "private scala.runtime.BoxedUnit D.vl3", + "private int D.vr1", + "private java.lang.String D.vr2", + "private scala.runtime.BoxedUnit D.vr3") + assert(dFields == expDFields, + s"wrong fields. expected:\n$expDFields\nfound:\n$dFields") val d1 = new D1 diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check index 0a3a85ead682..d1cc754f2cf6 100644 --- a/test/files/run/lazy-locals.check +++ b/test/files/run/lazy-locals.check @@ -1,6 +1,3 @@ -lazy-locals.scala:153: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - { - ^ forced lazy val q q = 10 forced lazy val t diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala index b28b28e35d9d..de4fff901bbb 100644 --- a/test/files/run/lazy-locals.scala +++ b/test/files/run/lazy-locals.scala @@ -1,4 +1,4 @@ - +//> using options -Xmaxwarns 0 object Test extends App { lazy val w = 10 @@ -130,15 +130,15 @@ object Test extends App { t } - def testRecVal { + def testRecVal: Unit = { lazy val twos: List[Int] = 2 :: twos - lazy val ones: Stream[Int] = Stream.cons(1, ones) + lazy val ones: LazyList[Int] = LazyList.cons(1, ones) println("First 5 elements of ones: " + ones.take(5).toList) } // should compile without error - def testMutualRecVal { + def testMutualRecVal: Unit = { lazy val odd: Int = 1 + even lazy val even: Int = 1 + odd diff --git a/test/files/run/lazy-traits.scala b/test/files/run/lazy-traits.scala index 38207672dba7..bd07398f2bd9 100644 --- a/test/files/run/lazy-traits.scala +++ b/test/files/run/lazy-traits.scala @@ -110,7 +110,7 @@ class OverflownLazyFields extends Object with A { } trait PrivateLazy { - private lazy val str = "z1" + @annotation.unused private lazy val str = "z1" } /** Test successful compilation. */ @@ -155,7 +155,7 @@ class MixedUnitLazy extends UnitLazy with UnitLazyT { object Test extends App { - def test(name: String, v: A) { + def test(name: String, v: A): Unit = { println(name + " test:") println(v) println(v) diff --git a/test/files/run/lazy_local_labels.scala b/test/files/run/lazy_local_labels.scala index f4a1cdf68988..db77c3de1e1d 100644 --- a/test/files/run/lazy_local_labels.scala +++ b/test/files/run/lazy_local_labels.scala @@ -1,6 +1,6 @@ // should print HI nine times to indicate the lazy val has been re-initialized on every iteration object Test extends App { - def fooDo: Unit = { + def fooDo(): Unit = { var i = 3 do { lazy val x = { println("HI"); 1 } @@ -8,7 +8,7 @@ object Test extends App { } while(i > 0) } - def fooWhile: Unit = { + def fooWhile(): Unit = { var i = 3 while(i > 0) { lazy val x = { println("HI"); 1 } @@ -22,7 +22,7 @@ object Test extends App { } - fooWhile - fooDo + fooWhile() + fooDo() fooTail(3) } diff --git a/test/files/run/lisp.check b/test/files/run/lisp.check index 64053f26d0b3..29db0101a26b 100644 --- a/test/files/run/lisp.check +++ b/test/files/run/lisp.check @@ -11,7 +11,7 @@ faculty(10) = 3628800 faculty(10) = 3628800 foobar = ("a" "bc" "def" "z") -List('lambda, List('x), List('+, List('*, 'x, 'x), 1)) +List(Symbol(lambda), List(Symbol(x)), List(Symbol(+), List(Symbol(*), Symbol(x), Symbol(x)), 1)) (lambda (x) (+ (* x x) 1)) ( '(1 2 3)) = (1 2 3) diff --git a/test/files/run/lisp.scala b/test/files/run/lisp.scala index 162c7d259997..ad0817571ba3 100644 --- a/test/files/run/lisp.scala +++ b/test/files/run/lisp.scala @@ -12,7 +12,7 @@ class LispTokenizer(s: String) extends Iterator[String] { while (i < s.length() && s.charAt(i) <= ' ') i += 1 i < s.length() } - def next: String = + def next(): String = if (hasNext) { val start = i if (isDelimiter(s charAt i)) i += 1 @@ -66,7 +66,7 @@ object LispCaseClasses extends Lisp { case class STR(x: String) extends Data { override def toString() = "\"" + x + "\""; } - case class FUN(f: List[Data] => Data) extends Data { + case class FUN(f: PartialFunction[List[Data], Data]) extends Data { override def toString() = ""; } @@ -245,10 +245,10 @@ object LispCaseClasses extends Lisp { else SYM(token) } def parseList: Data = { - val token = it.next; + val token = it.next(); if (token == ")") NIL() else CONS(parse(token), parseList) } - parse(it.next) + parse(it.next()) } def lisp2string(d: Data): String = d.toString(); @@ -263,7 +263,7 @@ object LispAny extends Lisp { type Data = Any; - case class Lambda(f: List[Data] => Data); + case class Lambda(f: PartialFunction[List[Data], Data]); var curexp: Data = null; var trace: Boolean = false; @@ -303,17 +303,17 @@ object LispAny extends Lisp { def asBoolean(x: Data): Boolean = x != 0 def normalize(x: Data): Data = x match { - case 'and :: x :: y :: Nil => - normalize('if :: x :: y :: 0 :: Nil) - case 'or :: x :: y :: Nil => - normalize('if :: x :: 1 :: y :: Nil) - case 'def :: (name :: args) :: body :: expr :: Nil => - normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil) - case 'cond :: ('else :: expr :: Nil) :: rest => + case Symbol("and") :: x :: y :: Nil => + normalize(Symbol("if") :: x :: y :: 0 :: Nil) + case Symbol("or") :: x :: y :: Nil => + normalize(Symbol("if") :: x :: 1 :: y :: Nil) + case Symbol("def") :: (name :: args) :: body :: expr :: Nil => + normalize(Symbol("def") :: name :: (Symbol("lambda") :: args :: body :: Nil) :: expr :: Nil) + case Symbol("cond") :: (Symbol("else") :: expr :: Nil) :: rest => normalize(expr); - case 'cond :: (test :: expr :: Nil) :: rest => - normalize('if :: test :: expr :: ('cond :: rest) :: Nil) - case 'cond :: 'else :: expr :: Nil => + case Symbol("cond") :: (test :: expr :: Nil) :: rest => + normalize(Symbol("if") :: test :: expr :: (Symbol("cond") :: rest) :: Nil) + case Symbol("cond") :: Symbol("else") :: expr :: Nil => normalize(expr) case h :: t => normalize(h) :: asList(normalize(t)) @@ -342,15 +342,15 @@ object LispAny extends Lisp { def eval1(x: Data, env: Environment): Data = x match { case Symbol(name) => env lookup name - case 'def :: Symbol(name) :: y :: z :: Nil => + case Symbol("def") :: Symbol(name) :: y :: z :: Nil => eval(z, env.extendRec(name, (env1 => eval(y, env1)))) - case 'val :: Symbol(name) :: y :: z :: Nil => + case Symbol("val") :: Symbol(name) :: y :: z :: Nil => eval(z, env.extend(name, eval(y, env))) - case 'lambda :: params :: y :: Nil => + case Symbol("lambda") :: params :: y :: Nil => mkLambda(params, y, env) - case 'if :: c :: y :: z :: Nil => + case Symbol("if") :: c :: y :: z :: Nil => if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env) - case 'quote :: y :: Nil => + case Symbol("quote") :: y :: Nil => y case y :: z => apply(eval(y, env), z map (x => eval(x, env))) @@ -437,10 +437,10 @@ object LispAny extends Lisp { else Symbol(token) } def parseList: List[Data] = { - val token = it.next; + val token = it.next(); if (token == ")") Nil else parse(token) :: parseList } - parse(it.next) + parse(it.next()) } } @@ -453,18 +453,18 @@ class LispUser(lisp: Lisp) { def evaluate(s: String) = lisp2string(lisp.evaluate(s)); - def run = { + def run() = { Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]); Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))"))); - Console.println; + Console.println() Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))")); Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))")); Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))")); Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))")); Console.println("(null? '()) = " + evaluate("(null? (quote()))")); - Console.println; + Console.println() Console.println("faculty(10) = " + evaluate( "(def (faculty n) " + @@ -500,7 +500,7 @@ class LispUser(lisp: Lisp) { "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " + "(val v4 (foo 6) " + "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))")); - Console.println; + Console.println() } } @@ -508,9 +508,9 @@ class LispUser(lisp: Lisp) { // Main object Test { - def main(args: Array[String]) { - new LispUser(LispCaseClasses).run; - new LispUser(LispAny).run; + def main(args: Array[String]): Unit = { + new LispUser(LispCaseClasses).run() + new LispUser(LispAny).run() () } } diff --git a/test/files/run/list-apply-eval.scala b/test/files/run/list-apply-eval.scala index 6e012cdcd6ec..2078f15a86a4 100644 --- a/test/files/run/list-apply-eval.scala +++ b/test/files/run/list-apply-eval.scala @@ -4,9 +4,9 @@ object Test { counter += 1 counter.toString } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { //List.apply is subject to an optimisation in cleanup - //ensure that the arguments are evaluated in the currect order + //ensure that the arguments are evaluated in the correct order // Rewritten to: // val myList: List = new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), scala.collection.immutable.Nil))); val myList = List(next, next, next) diff --git a/test/files/run/list_map.scala b/test/files/run/list_map.scala index fba3aae22878..59acf09c6886 100644 --- a/test/files/run/list_map.scala +++ b/test/files/run/list_map.scala @@ -1,7 +1,7 @@ import collection.immutable.ListMap object Test { - def testImmutableMinus() { + def testImmutableMinus(): Unit = { val empty = ListMap.empty[Int, Int] val m0 = ListMap(1 -> 1, 2 -> 2) @@ -20,7 +20,7 @@ object Test { assert ((empty - 1) eq empty) } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { testImmutableMinus() } } diff --git a/test/files/run/lists-run.check b/test/files/run/lists-run.check new file mode 100644 index 000000000000..163e64900075 --- /dev/null +++ b/test/files/run/lists-run.check @@ -0,0 +1,4 @@ +lists-run.scala:177: warning: match may not be exhaustive. +It would fail on the following input: List((x: String forSome x not in "foo")) + def show(xs: List[String]) = xs match { + ^ diff --git a/test/files/run/lists-run.scala b/test/files/run/lists-run.scala index 05767b900545..0760ac980fd2 100644 --- a/test/files/run/lists-run.scala +++ b/test/files/run/lists-run.scala @@ -5,8 +5,8 @@ import scala.language.postfixOps object Test { - def main(args: Array[String]) { - Test_multiset.run() // multiset operations: union, intersect, diff + def main(args: Array[String]): Unit = { + Test_multiset.run() // multiset operations: :::, intersect, diff Test1.run() //count, exists, filter, .. Test2.run() //#468 Test3.run() //#1691 @@ -16,13 +16,13 @@ object Test { } object Test_multiset { - def run() { + def run(): Unit = { def isSubListOf[A](thiz: List[A], that: List[A]): Boolean = thiz forall (that contains _) val xs = List(1, 1, 2) val ys = List(1, 2, 2, 3) - assert(List(1, 1, 2, 1, 2, 2, 3) == (xs union ys), "xs_union_ys") - assert(List(1, 2, 2, 3, 1, 1, 2) == (ys union xs), "ys_union_xs") + assert(List(1, 1, 2, 1, 2, 2, 3) == (xs ::: ys), "xs_:::_ys") + assert(List(1, 2, 2, 3, 1, 1, 2) == (ys ::: xs), "ys_:::_xs") assert(List(1, 2) == (xs intersect ys), "xs_intersect_ys") assert(List(1, 2) == (ys intersect xs), "ys_intersect_xs") assert(List(1) == (xs diff ys), "xs_diff_ys") @@ -30,8 +30,8 @@ object Test_multiset { assert(isSubListOf(xs filterNot (ys contains), xs diff ys), "xs_subset_ys") val zs = List(0, 1, 1, 2, 2, 2) - assert(List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3) == (zs union ys), "zs_union_ys") - assert(List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2) == (ys union zs), "ys_union_zs") + assert(List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3) == (zs ::: ys), "zs_:::_ys") + assert(List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2) == (ys ::: zs), "ys_:::_zs") assert(List(1, 2, 2) == (zs intersect ys), "zs_intersect_ys") assert(List(1, 2, 2) == (ys intersect zs), "ys_intersect_zs") assert(List(0, 1, 2) == (zs diff ys), "zs_diff_ys") @@ -39,8 +39,8 @@ object Test_multiset { assert(isSubListOf(zs filterNot (ys contains), zs diff ys), "xs_subset_ys") val ws = List(2) - assert(List(2, 1, 2, 2, 3) == (ws union ys), "ws_union_ys") - assert(List(1, 2, 2, 3, 2) == (ys union ws), "ys_union_ws") + assert(List(2, 1, 2, 2, 3) == (ws ::: ys), "ws_:::_ys") + assert(List(1, 2, 2, 3, 2) == (ys ::: ws), "ys_:::_ws") assert(List(2) == (ws intersect ys), "ws_intersect_ys") assert(List(2) == (ys intersect ws), "ys_intersect_ws") assert(List() == (ws diff ys), "ws_diff_ys") @@ -48,8 +48,8 @@ object Test_multiset { assert(isSubListOf(ws filterNot (ys contains), ws diff ys), "ws_subset_ys") val vs = List(3, 2, 2, 1) - assert(List(1, 1, 2, 3, 2, 2, 1) == (xs union vs), "xs_union_vs") - assert(List(3, 2, 2, 1, 1, 1, 2) == (vs union xs), "vs_union_xs") + assert(List(1, 1, 2, 3, 2, 2, 1) == (xs ::: vs), "xs_:::_vs") + assert(List(3, 2, 2, 1, 1, 1, 2) == (vs ::: xs), "vs_:::_xs") assert(List(1, 2) == (xs intersect vs), "xs_intersect_vs") assert(List(2, 1) == (vs intersect xs), "vs_intersect_xs") assert(List(1) == (xs diff vs), "xs_diff_vs") @@ -80,12 +80,12 @@ min cardinality(ys, e))) } object Test1 { - def run() { + def run(): Unit = { val xs1 = List(1, 2, 3) val xs2 = List('a', 'b') val xs3 = List(List(1, 2), List(4, 5)) val xs4 = List(2, 4, 6, 8) - val xs5 = List(List(3, 4), List(3), List(4, 5)) + @annotation.unused val xs5 = List(List(3, 4), List(3), List(4, 5)) { val n1 = xs1 count { e => e % 2 != 0 } @@ -132,9 +132,9 @@ object Test1 { } object Test2 { - def run() { + def run(): Unit = { val xs1 = List(1, 2, 3) - val xs2 = List(0) + @annotation.unused val xs2 = List(0) val ys1 = xs1 ::: List(4) assert(List(1, 2, 3, 4) == ys1, "check_:::") @@ -150,7 +150,7 @@ object Test2 { } object Test3 { - def run() { + def run(): Unit = { try { List.range(1, 10, 0) } catch { @@ -162,7 +162,7 @@ object Test3 { } object Test4 { - def run() { + def run(): Unit = { assert(List(1,2,3).endsWith(List(2,3))) assert(!List(1,2,3).endsWith(List(1,3))) assert(List(1,2,3).endsWith(List())) @@ -179,7 +179,7 @@ object Test5 { case List(x) => x.toString case Nil => "Nil" } - def run() { + def run(): Unit = { assert(show(List()) == "Nil") assert(show(List("a")) == "a") assert(show(List("foo", "b")) == "List(b)") diff --git a/test/files/run/literal-type-varargs.scala b/test/files/run/literal-type-varargs.scala new file mode 100644 index 000000000000..3b78af727dc1 --- /dev/null +++ b/test/files/run/literal-type-varargs.scala @@ -0,0 +1,4 @@ +object Test extends App { + val x = List.apply[1](1) + assert(x == List(1)) +} diff --git a/test/files/run/literals-parsing.check b/test/files/run/literals-parsing.check new file mode 100644 index 000000000000..e1b3cac77718 --- /dev/null +++ b/test/files/run/literals-parsing.check @@ -0,0 +1,14 @@ +[[syntax trees at end of parser]] // newSource1.scala +[0:161]package [0:0] { + [0:161]abstract trait T extends [8:161][8]scala.AnyRef { + [8]def $init$() = [8]{ + [8]() + }; + [16:34]def i: [23:26]Int = [29:34]1024; + [41:69]def j: [48:52]Long = [55:69][55:63][55:61]1024L.$times([64:69]1024); + [99:114]def f = [107:114]3.14; + [121:138]def d = [129:138]3.14E-19; + [146:155]def z = [154:155]0 + } +} + diff --git a/test/files/run/literals-parsing.scala b/test/files/run/literals-parsing.scala new file mode 100644 index 000000000000..dde2de6023b2 --- /dev/null +++ b/test/files/run/literals-parsing.scala @@ -0,0 +1,23 @@ +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + override def extraSettings: String = + s"-usejavacp -Vprint-pos -Vprint:parser -Ystop-after:parser -cp ${testOutput.path}" + + // test/files/pos/t6124.scala + override def code = """ + trait T { + def i: Int = 1_024 + def j: Long = 1_024L * 1_024 + //def k = 1'024 + + def f = 3_14e-2 + def d = 3_14E-2_1 + + def z = 0 + } + """.trim + + override def show(): Unit = compile() +} diff --git a/test/files/run/literals.check b/test/files/run/literals.check index dccfec7afc1e..207239e62d64 100644 --- a/test/files/run/literals.check +++ b/test/files/run/literals.check @@ -1,12 +1,12 @@ -literals.scala:35: warning: Octal escape literals are deprecated, use \u0061 instead. - check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab") - ^ -literals.scala:35: warning: Octal escape literals are deprecated, use \u0062 instead. - check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab") - ^ -literals.scala:38: warning: Octal escape literals are deprecated, use \u0000 instead. - "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50), - ^ -literals.scala:38: warning: Octal escape literals are deprecated, use \u0000 instead. - "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50), - ^ +literals.scala:137: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + check_success("1l == 1L", 1l, 1L) + ^ +literals.scala:138: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + check_success("1L == 1l", 1L, 1l) + ^ +literals.scala:139: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + check_success("1.asInstanceOf[Long] == 1l", 1.asInstanceOf[Long], 1l) + ^ +literals.scala:186: warning: Lowercase el for long is not recommended because it is easy to confuse with numeral 1; use uppercase L instead [quickfixable] + check_success("1l.asInstanceOf[Double] == 1.0", 1l.asInstanceOf[Double], 1.0) + ^ diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala index 0d9566480b48..06e6cacf4fb1 100644 --- a/test/files/run/literals.scala +++ b/test/files/run/literals.scala @@ -1,4 +1,5 @@ -// scalac: -deprecation +//> using options -deprecation +// //############################################################################ // Literals //############################################################################ @@ -7,15 +8,7 @@ object Test { - /* I add a couple of Unicode identifier tests here "temporarily" */ - - def \u03b1\u03c1\u03b5\u03c4\u03b7 = "alpha rho epsilon tau eta" - - case class GGG(i: Int) { - def \u03b1\u03b1(that: GGG) = i + that.i - } - - def check_success[A](name: String, closure: => A, expected: A) { + def check_success[A](name: String, closure: => A, expected: A): Unit = { val res: Option[String] = try { val actual: A = closure @@ -27,17 +20,21 @@ object Test { for (e <- res) println(s"test $name $e") } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { // char + + //unicode escapes escape in char literals check_success("'\\u0024' == '$'", '\u0024', '$') check_success("'\\u005f' == '_'", '\u005f', '_') - check_success("65.asInstanceOf[Char] == 'A'", 65.asInstanceOf[Char], 'A') - check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab") - //check_success("\"\\0x61\\0x62\".trim() == \"x61\\0x62\"", "\0x61\0x62".substring(1), "x61\0x62") - check_success(""""\0x61\0x62".getBytes == Array(0, 120, ...)""", - "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50), - true) + //unicode escapes escape in interpolations + check_success("""s"\\u0024" == "$"""", s"\u0024", "$") + check_success("s\"\"\"\\u0024\"\"\" == \"$\"", s"""\u0024""", "$") + + + //Int#asInstanceOf[Char] gets the char at the codepoint + check_success("65.asInstanceOf[Char] == 'A'", 65.asInstanceOf[Char], 'A') + // boolean check_success("(65 : Byte) == 'A'", (65: Byte) == 'A', true) // contrib #176 @@ -62,6 +59,80 @@ object Test { check_success("0x80000000 == -2147483648", 0x80000000, -2147483648) check_success("0xffffffff == -1", 0xffffffff, -1) + check_success("0b_0000 == 0x0", 0b_0000, 0x0) + check_success("0b_0001 == 0x1", 0b_0001, 0x1) + check_success("0b_0010 == 0x2", 0b_0010, 0x2) + check_success("0b_0011 == 0x3", 0b_0011, 0x3) + check_success("0b_0100 == 0x4", 0b_0100, 0x4) + check_success("0b_0101 == 0x5", 0b_0101, 0x5) + check_success("0b_0110 == 0x6", 0b_0110, 0x6) + check_success("0b_0111 == 0x7", 0b_0111, 0x7) + check_success("0b_1000 == 0x8", 0b_1000, 0x8) + check_success("0b_1001 == 0x9", 0b_1001, 0x9) + check_success("0b_1010 == 0xa", 0b_1010, 0xa) + check_success("0b_1011 == 0xb", 0b_1011, 0xb) + check_success("0b_1100 == 0xc", 0b_1100, 0xc) + check_success("0b_1101 == 0xd", 0b_1101, 0xd) + check_success("0b_1110 == 0xe", 0b_1110, 0xe) + check_success("0b_1111 == 0xf", 0b_1111, 0xf) + + check_success("0B_1000 == 0x8", 0B_1000, 0x8) + + assert(0b0001_0000 == 16) + assert(0b0010_0000 == 32) + assert(0b0100_0000 == 64) + assert(0b1000_0000 == 128) + + assert(0b0001_0000_0000 == 256) + assert(0b0010_0000_0000 == 512) + assert(0b0100_0000_0000 == 1024) + assert(0b1000_0000_0000 == 2048) + + assert(0b0001_0000_0000_0000 == 4096) + assert(0b0010_0000_0000_0000 == 8192) + assert(0b0100_0000_0000_0000 == 16384) + assert(0b1000_0000_0000_0000 == 32768) + + assert(0b0001__0000_0000_0000_0000 == 65536) + assert(0b0010__0000_0000_0000_0000 == 131072) + assert(0b0100__0000_0000_0000_0000 == 262144) + assert(0b1000__0000_0000_0000_0000 == 524288) + + assert(0b0001_0000__0000_0000_0000_0000 == 1048576) + assert(0b0010_0000__0000_0000_0000_0000 == 2097152) + assert(0b0100_0000__0000_0000_0000_0000 == 4194304) + assert(0b1000_0000__0000_0000_0000_0000 == 8388608) + + assert(0b0001_0000_0000__0000_0000_0000_0000 == 16777216) + assert(0b0010_0000_0000__0000_0000_0000_0000 == 33554432) + assert(0b0100_0000_0000__0000_0000_0000_0000 == 67108864) + assert(0b1000_0000_0000__0000_0000_0000_0000 == 134217728) + + assert(0b0001_0000_0000_0000__0000_0000_0000_0000 == 268435456) + assert(0b0010_0000_0000_0000__0000_0000_0000_0000 == 536870912) + assert(0b0100_0000_0000_0000__0000_0000_0000_0000 == 1073741824) + assert(0b1000_0000_0000_0000__0000_0000_0000_0000L == 2147483648L) + + assert(0b1000_0000_0000_0000__0000_0000_0000_0000 == -2147483648) // Signed ! + assert(0b1111_1111_1111_1111__1111_1111_1111_1111 == -1) + + // Randomly generated using https://numbergenerator.org/random-32-bit-binary-number#!numbers=10&length=32&addfilters= + // Converted to signed decimal using https://onlinetoolz.net/unsigned-signed#base=2&bits=32 + assert(0b0110_1000_1100_0101_0010_1100_0100_0011 == 1757752387) + assert(0b1111_0101_0100_1011_0101_1000_0011_0110 == -179611594) + assert(0b0000_0011_0000_1010_1010_0011_0000_0000 == 51028736) + assert(0b0101_0010_1111_1001_0100_0101_1101_1011 == 1392068059) + assert(0b1001_0000_1111_1001_1011_1101_1100_1111 == -1862681137) + + assert(0B0000_0111_1110_1100_0111_1100_1000_0010 == 132938882) + assert(0B0000_1011_0111_1011_0001_1010_1010_1000 == 192617128) + assert(0B1100_1100_1000_1010_1111_0111_0100_1101 == -863307955) + assert(0B1000_0000_0001_0010_0001_1001_0101_1110 == -2146297506) + assert(0B1110_0000_0110_1100_0111_0110_1100_1111 == -529762609) + + assert(0b0010_1001_0101_1001__1010_0100_1000_1010__1001_1000_0011_0111__1100_1011_0111_0101L == 2979593543648529269L) + assert(0b1101_1110_0100_1000__0010_1101_1010_0010__0111_1000_1111_1001__1010_1001_0101_1000L == -2429641823128802984L) + // long check_success("1l == 1L", 1l, 1L) check_success("1L == 1l", 1L, 1l) @@ -75,7 +146,7 @@ object Test { 0xffffffffffffffffL, -1L) // see JLS at address: - // http://java.sun.com/docs/books/jls/second_edition/html/lexical.doc.html#230798 + // https://java.sun.com/docs/books/jls/second_edition/html/lexical.doc.html#230798 // float check_success("1e1f == 10.0f", 1e1f, 10.0f) @@ -93,7 +164,7 @@ object Test { 1.0000001f) check_success("3.4028235E38f == Float.MaxValue", 3.4028235E38f, Float.MaxValue) check_success("1.asInstanceOf[Float] == 1.0", 1.asInstanceOf[Float], 1.0f) - check_success("1l.asInstanceOf[Float] == 1.0", 1l.asInstanceOf[Float], 1.0f) + check_success("1L.asInstanceOf[Float] == 1.0", 1L.asInstanceOf[Float], 1.0f) // double check_success("1e1 == 10.0", 1e1, 10.0) @@ -115,10 +186,6 @@ object Test { check_success("1l.asInstanceOf[Double] == 1.0", 1l.asInstanceOf[Double], 1.0) check_success("\"\".length()", "\u001a".length(), 1) - - val ggg = GGG(1) \u03b1\u03b1 GGG(2) - check_success("ggg == 3", ggg, 3) - } } diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check deleted file mode 100644 index 5c4bb29d3838..000000000000 --- a/test/files/run/lub-visibility.check +++ /dev/null @@ -1,9 +0,0 @@ - -scala> // should infer List[scala.collection.immutable.Seq[Nothing]] - -scala> // but reverted that for scala/bug#5534. - -scala> val x = List(List(), Vector()) -x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with Serializable] = List(List(), Vector()) - -scala> :quit diff --git a/test/files/run/lub-visibility.scala b/test/files/run/lub-visibility.scala deleted file mode 100644 index b1606d58019b..000000000000 --- a/test/files/run/lub-visibility.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.tools.partest.ReplTest -object Test extends ReplTest { - def code = """ - |// should infer List[scala.collection.immutable.Seq[Nothing]] - |// but reverted that for scala/bug#5534. - |val x = List(List(), Vector()) - """.stripMargin -} diff --git a/test/files/run/macro-abort-fresh/Macros_1.scala b/test/files/run/macro-abort-fresh/Macros_1.scala index 91f1dfb9238d..f97460a47427 100644 --- a/test/files/run/macro-abort-fresh/Macros_1.scala +++ b/test/files/run/macro-abort-fresh/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -12,5 +12,5 @@ object Impls { } object Macros { - def foo = macro Impls.impl + def foo: Nothing = macro Impls.impl } diff --git a/test/files/run/macro-abort-fresh/Test_2.scala b/test/files/run/macro-abort-fresh/Test_2.scala index a6b9272e6dc5..9a34c62e0f13 100644 --- a/test/files/run/macro-abort-fresh/Test_2.scala +++ b/test/files/run/macro-abort-fresh/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-auto-duplicate/Macros_1.scala b/test/files/run/macro-auto-duplicate/Macros_1.scala index 2c910e6af7ef..96bb6fda3de5 100644 --- a/test/files/run/macro-auto-duplicate/Macros_1.scala +++ b/test/files/run/macro-auto-duplicate/Macros_1.scala @@ -8,10 +8,10 @@ object Macros { def defAndUseX(rhs: Tree) = { Block(List(ValDef(NoMods, newTermName("x"), TypeTree(), rhs)), x) } - val xi4 = defAndUseX(Literal(Constant(4))) - val xs2 = defAndUseX(Literal(Constant("2"))) - c.Expr[String](Apply(Select(xi4, newTermName("$plus")), List(xs2))) + val xs2 = defAndUseX(Literal(Constant("4"))) + val xi4 = defAndUseX(Literal(Constant(2))) + c.Expr[String](Apply(Select(xs2, newTermName("$plus")), List(xi4))) } - def foo = macro impl -} \ No newline at end of file + def foo: String = macro impl +} diff --git a/test/files/run/macro-auto-duplicate/Test_2.scala b/test/files/run/macro-auto-duplicate/Test_2.scala index f697da60200f..cff569bd81b1 100644 --- a/test/files/run/macro-auto-duplicate/Test_2.scala +++ b/test/files/run/macro-auto-duplicate/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { println(Macros.foo) -} \ No newline at end of file +} diff --git a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala index 18b78d1deef1..827d78bd00c4 100644 --- a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala +++ b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-basic-ma-md-mi/Macros_2.scala b/test/files/run/macro-basic-ma-md-mi/Macros_2.scala index 9f2fa1e802a8..ec74bfcda8c0 100644 --- a/test/files/run/macro-basic-ma-md-mi/Macros_2.scala +++ b/test/files/run/macro-basic-ma-md-mi/Macros_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { object Shmacros { def foo(x: Int): Int = macro Impls.foo diff --git a/test/files/run/macro-basic-ma-md-mi/Test_3.scala b/test/files/run/macro-basic-ma-md-mi/Test_3.scala index 71bbc55f8e12..011eb1f57453 100644 --- a/test/files/run/macro-basic-ma-md-mi/Test_3.scala +++ b/test/files/run/macro-basic-ma-md-mi/Test_3.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros.Shmacros._ println(foo(2) + Macros.bar(2) * new Macros().quux(4)) diff --git a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala index 4f9a195d31d6..1a2c3dc2891d 100644 --- a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala +++ b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-basic-ma-mdmi/Test_2.scala b/test/files/run/macro-basic-ma-mdmi/Test_2.scala index 71bbc55f8e12..011eb1f57453 100644 --- a/test/files/run/macro-basic-ma-mdmi/Test_2.scala +++ b/test/files/run/macro-basic-ma-mdmi/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros.Shmacros._ println(foo(2) + Macros.bar(2) * new Macros().quux(4)) diff --git a/test/files/run/macro-basic-mamd-mi/Impls_1.scala b/test/files/run/macro-basic-mamd-mi/Impls_1.scala index 07ad0479c4d0..6f2e9ac206b1 100644 --- a/test/files/run/macro-basic-mamd-mi/Impls_1.scala +++ b/test/files/run/macro-basic-mamd-mi/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala b/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala index d21a2e0ca772..fda54bb5c06a 100644 --- a/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala +++ b/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { object Shmacros { def foo(x: Int): Int = macro Impls.foo diff --git a/test/files/run/macro-blackbox-materialization/Macros_1.scala b/test/files/run/macro-blackbox-materialization/Macros_1.scala index ea8d1bed1496..cb1186e39706 100644 --- a/test/files/run/macro-blackbox-materialization/Macros_1.scala +++ b/test/files/run/macro-blackbox-materialization/Macros_1.scala @@ -13,4 +13,4 @@ object Macros { import c.universe._ reify(C[T](c.literal(weakTypeOf[T].toString).splice)) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-blackbox-materialization/Test_2.scala b/test/files/run/macro-blackbox-materialization/Test_2.scala index 001ff9aea8f4..370ae23b80c8 100644 --- a/test/files/run/macro-blackbox-materialization/Test_2.scala +++ b/test/files/run/macro-blackbox-materialization/Test_2.scala @@ -2,4 +2,4 @@ object Test extends App { println(implicitly[C[Int]]) println(implicitly[C[String]]) println(implicitly[C[Nothing]]) -} \ No newline at end of file +} diff --git a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala index 699b183a41ff..dc76a9c23bd3 100644 --- a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala +++ b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.language.experimental.macros import scala.reflect.macros.blackbox.{Context => BlackboxContext} import scala.reflect.macros.whitebox.{Context => WhiteboxContext} diff --git a/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala index 544550a53790..95b43dd41f5d 100644 --- a/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala +++ b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.language.experimental.macros object Macros { diff --git a/test/files/run/macro-bundle-context-alias/Macros_1.scala b/test/files/run/macro-bundle-context-alias/Macros_1.scala index 354c5e0d9208..d7e03072553b 100644 --- a/test/files/run/macro-bundle-context-alias/Macros_1.scala +++ b/test/files/run/macro-bundle-context-alias/Macros_1.scala @@ -35,4 +35,4 @@ class C { def whitebox: C = macro Module.WhiteboxBundle.impl def refinedWhitebox: C = macro Module.RefinedWhiteboxBundle.impl override def toString = "C" -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-context-alias/Test_2.scala b/test/files/run/macro-bundle-context-alias/Test_2.scala index de499cc11170..408c4b8df4b5 100644 --- a/test/files/run/macro-bundle-context-alias/Test_2.scala +++ b/test/files/run/macro-bundle-context-alias/Test_2.scala @@ -3,4 +3,4 @@ object Test extends App { println(new C().refinedBlackbox) println(new C().whitebox) println(new C().refinedWhitebox) -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-context-refinement/Macros_1.scala b/test/files/run/macro-bundle-context-refinement/Macros_1.scala index d3a5d179c60d..970370f10c73 100644 --- a/test/files/run/macro-bundle-context-refinement/Macros_1.scala +++ b/test/files/run/macro-bundle-context-refinement/Macros_1.scala @@ -16,4 +16,4 @@ class C { def blackbox: C = macro BlackboxBundle.impl def whitebox: C = macro WhiteboxBundle.impl override def toString = "C" -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-context-refinement/Test_2.scala b/test/files/run/macro-bundle-context-refinement/Test_2.scala index 43d641adeb56..81871036c28f 100644 --- a/test/files/run/macro-bundle-context-refinement/Test_2.scala +++ b/test/files/run/macro-bundle-context-refinement/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { println(new C().blackbox) println(new C().whitebox) -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-repl.check b/test/files/run/macro-bundle-repl.check index ce5c980f6ea5..32ba587bb9ed 100644 --- a/test/files/run/macro-bundle-repl.check +++ b/test/files/run/macro-bundle-repl.check @@ -6,16 +6,16 @@ scala> import scala.reflect.macros.blackbox.Context import scala.reflect.macros.blackbox.Context scala> class Bar(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } };def bar: Unit = macro Bar.impl -defined class Bar -defined term macro bar: Unit +class Bar +def bar: Unit scala> bar scala> class Foo(val c: Context) { def impl = { import c.universe._; c.Expr[Unit](q"()") } } -defined class Foo +class Foo scala> def foo: Unit = macro Foo.impl -defined term macro foo: Unit +def foo: Unit scala> foo diff --git a/test/files/run/macro-bundle-static/Impls_Macros_1.scala b/test/files/run/macro-bundle-static/Impls_Macros_1.scala index 0142e5d94547..bc0250294fb8 100644 --- a/test/files/run/macro-bundle-static/Impls_Macros_1.scala +++ b/test/files/run/macro-bundle-static/Impls_Macros_1.scala @@ -5,13 +5,13 @@ object Enclosing { class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Unit](q"()") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Unit = macro mono } } object Macros { - def mono = macro Enclosing.Impl.mono - def poly[T] = macro Enclosing.Impl.poly[T] + def mono: Unit = macro Enclosing.Impl.mono + def poly[T]: String = macro Enclosing.Impl.poly[T] } package pkg { @@ -19,12 +19,12 @@ package pkg { class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Boolean](q"true") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Boolean = macro mono } } object Macros { - def mono = macro Enclosing.Impl.mono - def poly[T] = macro Enclosing.Impl.poly[T] + def mono: Boolean = macro Enclosing.Impl.mono + def poly[T]: String = macro Enclosing.Impl.poly[T] } -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-static/Test_2.scala b/test/files/run/macro-bundle-static/Test_2.scala index e35260cdce69..cf0aa733070e 100644 --- a/test/files/run/macro-bundle-static/Test_2.scala +++ b/test/files/run/macro-bundle-static/Test_2.scala @@ -5,4 +5,4 @@ object Test extends App { println(pkg.Macros.mono) println(pkg.Macros.poly[Int]) println(new pkg.Enclosing.Impl(???).weird) -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala index 33f6b01c2075..e714c7c8262a 100644 --- a/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala +++ b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala @@ -1,26 +1,26 @@ -// scalac: -language:experimental.macros +import language.experimental.macros import scala.reflect.macros.blackbox.Context class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Unit](q"()") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Unit = macro mono } object Macros { - def mono = macro Impl.mono - def poly[T] = macro Impl.poly[T] + def mono: Unit = macro Impl.mono + def poly[T]: String = macro Impl.poly[T] } package pkg { class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Boolean](q"true") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Boolean = macro mono } object Macros { - def mono = macro Impl.mono - def poly[T] = macro Impl.poly[T] + def mono: Boolean = macro Impl.mono + def poly[T]: String = macro Impl.poly[T] } } diff --git a/test/files/run/macro-bundle-toplevel/Test_2.scala b/test/files/run/macro-bundle-toplevel/Test_2.scala index b3a0c36a50ca..061a18363ddc 100644 --- a/test/files/run/macro-bundle-toplevel/Test_2.scala +++ b/test/files/run/macro-bundle-toplevel/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println(Macros.mono) println(Macros.poly[Int]) diff --git a/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala b/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala index 5e1b11895d2b..b8edefe2d6f6 100644 --- a/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala +++ b/test/files/run/macro-bundle-whitebox-decl/Impls_Macros_1.scala @@ -4,23 +4,23 @@ import scala.reflect.macros.whitebox.Context class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Unit](q"()") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Unit = macro mono } object Macros { - def mono = macro Impl.mono - def poly[T] = macro Impl.poly[T] + def mono: Unit = macro Impl.mono + def poly[T]: String = macro Impl.poly[T] } package pkg { class Impl(val c: Context) { def mono = { import c.universe._; c.Expr[Boolean](q"true") } def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") } - def weird = macro mono + def weird: Boolean = macro mono } object Macros { - def mono = macro Impl.mono - def poly[T] = macro Impl.poly[T] + def mono: Boolean = macro Impl.mono + def poly[T]: String = macro Impl.poly[T] } -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-whitebox-decl/Test_2.scala b/test/files/run/macro-bundle-whitebox-decl/Test_2.scala index 195fb4926212..061a18363ddc 100644 --- a/test/files/run/macro-bundle-whitebox-decl/Test_2.scala +++ b/test/files/run/macro-bundle-whitebox-decl/Test_2.scala @@ -5,4 +5,4 @@ object Test extends App { println(pkg.Macros.mono) println(pkg.Macros.poly[Int]) println(new pkg.Impl(???).weird) -} \ No newline at end of file +} diff --git a/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala b/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala index de1863418ee9..e58594697dcd 100644 --- a/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala +++ b/test/files/run/macro-bundle-whitebox-use-raw/Macros_1.scala @@ -43,7 +43,7 @@ class FundepMaterializationBundle(val c: Context) { } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala b/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala index 3a8170025184..4b15dc4965ad 100644 --- a/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala +++ b/test/files/run/macro-bundle-whitebox-use-raw/Test_2.scala @@ -5,7 +5,7 @@ object Test extends App { def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c) locally { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } @@ -15,5 +15,6 @@ object Test extends App { 42 match { case ExtractorMacro(x) => println(x) + case x => throw new MatchError(x) } } diff --git a/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala b/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala index de1863418ee9..e58594697dcd 100644 --- a/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala +++ b/test/files/run/macro-bundle-whitebox-use-refined/Macros_1.scala @@ -43,7 +43,7 @@ class FundepMaterializationBundle(val c: Context) { } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala b/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala index 3a8170025184..4b15dc4965ad 100644 --- a/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala +++ b/test/files/run/macro-bundle-whitebox-use-refined/Test_2.scala @@ -5,7 +5,7 @@ object Test extends App { def foo[C, L](c: C)(implicit iso: FundepMaterialization[C, L]): L = iso.to(c) locally { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } @@ -15,5 +15,6 @@ object Test extends App { 42 match { case ExtractorMacro(x) => println(x) + case x => throw new MatchError(x) } } diff --git a/test/files/run/macro-def-path-dependent/Dummy.scala b/test/files/run/macro-def-path-dependent/Dummy.scala index 8bd441b74e46..0f0f07040ceb 100644 --- a/test/files/run/macro-def-path-dependent/Dummy.scala +++ b/test/files/run/macro-def-path-dependent/Dummy.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println("it works") } diff --git a/test/files/run/macro-def-path-dependent/Test_1.scala b/test/files/run/macro-def-path-dependent/Test_1.scala index 5b72882bbfe0..ffa41fb9d88e 100644 --- a/test/files/run/macro-def-path-dependent/Test_1.scala +++ b/test/files/run/macro-def-path-dependent/Test_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros // NOTE: blocked by scala/bug#8049 // package test1 diff --git a/test/files/run/macro-def-path-dependent/Test_2.scala b/test/files/run/macro-def-path-dependent/Test_2.scala index ea4fb917efc5..6556992fd9ad 100644 --- a/test/files/run/macro-def-path-dependent/Test_2.scala +++ b/test/files/run/macro-def-path-dependent/Test_2.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros package test2 +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context trait Exprs { diff --git a/test/files/run/macro-def-path-dependent/Test_3.scala b/test/files/run/macro-def-path-dependent/Test_3.scala index cfb3d6c508a1..18142286ce9b 100644 --- a/test/files/run/macro-def-path-dependent/Test_3.scala +++ b/test/files/run/macro-def-path-dependent/Test_3.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros package test3 +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context trait Exprs { diff --git a/test/files/run/macro-def-path-dependent/Test_4.scala b/test/files/run/macro-def-path-dependent/Test_4.scala index 349f554b03f2..71d17e795613 100644 --- a/test/files/run/macro-def-path-dependent/Test_4.scala +++ b/test/files/run/macro-def-path-dependent/Test_4.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros package test4 +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context import scala.reflect.api.Universe diff --git a/test/files/run/macro-def-path-dependent/Test_5.scala b/test/files/run/macro-def-path-dependent/Test_5.scala index 9a2ab153c94a..f556a01d3bf4 100644 --- a/test/files/run/macro-def-path-dependent/Test_5.scala +++ b/test/files/run/macro-def-path-dependent/Test_5.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros package test56 import scala.reflect.runtime.universe._ diff --git a/test/files/run/macro-def-path-dependent/Test_6.scala b/test/files/run/macro-def-path-dependent/Test_6.scala index a555b840c796..44223d76eea8 100644 --- a/test/files/run/macro-def-path-dependent/Test_6.scala +++ b/test/files/run/macro-def-path-dependent/Test_6.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros package test56 +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context import scala.reflect.api.Universe diff --git a/test/files/run/macro-divergence-spurious/Test_2.scala b/test/files/run/macro-divergence-spurious/Test_2.scala index dcc4593335cb..e06175f306bc 100644 --- a/test/files/run/macro-divergence-spurious/Test_2.scala +++ b/test/files/run/macro-divergence-spurious/Test_2.scala @@ -1,3 +1,3 @@ object Test extends App { println(implicitly[Complex[Foo]]) -} \ No newline at end of file +} diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check index 4d5eb78c0faa..136e30c8a8d3 100644 --- a/test/files/run/macro-duplicate.check +++ b/test/files/run/macro-duplicate.check @@ -1,3 +1,3 @@ -Test_2.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +Test_2.scala:5: warning: a pure expression does nothing in statement position Macros.foo ^ diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala index 70e73eb03fbb..25cdad211f92 100644 --- a/test/files/run/macro-duplicate/Impls_Macros_1.scala +++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { @@ -26,5 +26,5 @@ object Macros { c.Expr[Unit](Block(cdef1 :: Nil, Literal(Constant(())))) } - def foo = macro impl + def foo: Unit = macro impl } diff --git a/test/files/run/macro-duplicate/Test_2.scala b/test/files/run/macro-duplicate/Test_2.scala index 6f173f8b03b4..fa943d692e65 100644 --- a/test/files/run/macro-duplicate/Test_2.scala +++ b/test/files/run/macro-duplicate/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.concurrent._ import ExecutionContext.Implicits.global diff --git a/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala b/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala index 26ed64d8c387..610e1fb47d1d 100644 --- a/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala +++ b/test/files/run/macro-enclosingowner-detectvar/Macros_1.scala @@ -11,4 +11,4 @@ object Macros { } def foo: Int = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/macro-enclosingowner-detectvar/Test_2.scala b/test/files/run/macro-enclosingowner-detectvar/Test_2.scala index 58521d94291d..990020093f6d 100644 --- a/test/files/run/macro-enclosingowner-detectvar/Test_2.scala +++ b/test/files/run/macro-enclosingowner-detectvar/Test_2.scala @@ -20,4 +20,4 @@ object Test extends App { lazy val d3: Int = Macros.foo lazy val d4: Int = Predef.identity(Predef.identity(Macros.foo)) d1; d2; d3; d4 -} \ No newline at end of file +} diff --git a/test/files/run/macro-enclosingowner-sbt/Macros_1.scala b/test/files/run/macro-enclosingowner-sbt/Macros_1.scala index a98a984861ac..4500d4e5e4f8 100644 --- a/test/files/run/macro-enclosingowner-sbt/Macros_1.scala +++ b/test/files/run/macro-enclosingowner-sbt/Macros_1.scala @@ -11,4 +11,4 @@ object Macros { } def foo: Int = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/macro-enclosingowner-sbt/Test_2.scala b/test/files/run/macro-enclosingowner-sbt/Test_2.scala index 58521d94291d..990020093f6d 100644 --- a/test/files/run/macro-enclosingowner-sbt/Test_2.scala +++ b/test/files/run/macro-enclosingowner-sbt/Test_2.scala @@ -20,4 +20,4 @@ object Test extends App { lazy val d3: Int = Macros.foo lazy val d4: Int = Predef.identity(Predef.identity(Macros.foo)) d1; d2; d3; d4 -} \ No newline at end of file +} diff --git a/test/files/run/macro-enclosures/Impls_Macros_1.scala b/test/files/run/macro-enclosures/Impls_Macros_1.scala index 1a6adf90525b..b7e72b38e4ff 100644 --- a/test/files/run/macro-enclosures/Impls_Macros_1.scala +++ b/test/files/run/macro-enclosures/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { diff --git a/test/files/run/macro-enclosures/Test_2.scala b/test/files/run/macro-enclosures/Test_2.scala index b3927e8d3f2d..c0dbf997cfc1 100644 --- a/test/files/run/macro-enclosures/Test_2.scala +++ b/test/files/run/macro-enclosures/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { test.Test.test } diff --git a/test/files/run/macro-expand-implicit-argument/Macros_1.scala b/test/files/run/macro-expand-implicit-argument/Macros_1.scala index 0bcf5b503b16..87717b0adbbb 100644 --- a/test/files/run/macro-expand-implicit-argument/Macros_1.scala +++ b/test/files/run/macro-expand-implicit-argument/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import annotation.tailrec import scala.math.{min, max} import scala.{specialized => spec} @@ -20,7 +19,7 @@ object Macros { * * "As seen on scala-internals!" */ - def array[A](as:A*)(implicit ct: ClassTag[A]) = macro arrayMacro[A] + def array[A](as:A*)(implicit ct: ClassTag[A]): Array[A] = macro arrayMacro[A] /** * Takes in something like: diff --git a/test/files/run/macro-expand-implicit-argument/Test_2.scala b/test/files/run/macro-expand-implicit-argument/Test_2.scala index e98c4d84a9bf..545117868f0a 100644 --- a/test/files/run/macro-expand-implicit-argument/Test_2.scala +++ b/test/files/run/macro-expand-implicit-argument/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros._ println(array(1, 2, 3).toList) diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala index 3cbafa9b3537..f63067cfc71b 100644 --- a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala +++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala index faa1175869de..a126bd813d46 100644 --- a/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala +++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { - implicit val x = 42 + implicit val x: Int = 42 def foo(implicit x: Int): Unit = macro Impls.foo foo } diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala index 01696c2e9359..d72db872e891 100644 --- a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala +++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala index 73b5db248e64..3c9edf59f167 100644 --- a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala +++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { import scala.language.implicitConversions implicit def foo(x: String): Option[Int] = macro Impls.foo diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala index e32ece28d2e8..ffabb9563ab7 100644 --- a/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala +++ b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala index 45785eac5ac1..7a902d9bb1cb 100644 --- a/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala +++ b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { implicit def foo: Int = macro Impls.foo def bar(implicit x: Int) = println(x) diff --git a/test/files/run/macro-expand-implicit-macro-is-view.check b/test/files/run/macro-expand-implicit-macro-is-view.check index 0cfbf08886fc..abacfceb8aae 100644 --- a/test/files/run/macro-expand-implicit-macro-is-view.check +++ b/test/files/run/macro-expand-implicit-macro-is-view.check @@ -1 +1,5 @@ +Macros_Test_2.scala:11: warning: view bounds are deprecated; use an implicit parameter instead. + example: instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)` + def bar[T <% Option[Int]](x: T) = println(x) + ^ 2 diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala index aeceee5a5b55..85857b8a17c6 100644 --- a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala +++ b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala @@ -1,3 +1,4 @@ +//> using options -deprecation import scala.reflect.macros.blackbox.Context object Impls { @@ -6,4 +7,4 @@ object Impls { val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, TermName("toInt")))) c.Expr[Option[Int]](body) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala index 0d99f32d7ed1..db8742b67eb1 100644 --- a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala +++ b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala @@ -1,3 +1,4 @@ +//> using options -deprecation object Macros { import scala.language.experimental.macros diff --git a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala index 63221fc044f8..3e816c88e721 100644 --- a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala +++ b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala index 13e815799c21..553b239fd95c 100644 --- a/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala +++ b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo(x: Int)(y: Int): Unit = macro Impls.foo foo(40)(2) diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala index 6c203abc4d0e..f3e2f3712547 100644 --- a/test/files/run/macro-expand-nullary-generic/Impls_1.scala +++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala index 9d68667cd8fd..d3016c7d1601 100644 --- a/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala +++ b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo1[T]: Unit = macro Impls.fooNullary[T] def foo2[T](): Unit = macro Impls.fooEmpty[T] diff --git a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala index a2d89f52cad3..72e9147eb50b 100644 --- a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala +++ b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala index 67c430635956..534528b56936 100644 --- a/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala +++ b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo1: Unit = macro Impls.fooNullary def foo2(): Unit = macro Impls.fooEmpty diff --git a/test/files/run/macro-expand-overload/Impls_1.scala b/test/files/run/macro-expand-overload/Impls_1.scala index f478e112b3d0..0d1673d20edf 100644 --- a/test/files/run/macro-expand-overload/Impls_1.scala +++ b/test/files/run/macro-expand-overload/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-overload/Macros_Test_2.scala b/test/files/run/macro-expand-overload/Macros_Test_2.scala index 95871a659100..4a3f1eb9bf7a 100644 --- a/test/files/run/macro-expand-overload/Macros_Test_2.scala +++ b/test/files/run/macro-expand-overload/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(x: String): Unit = macro Impls.fooObjectString def foo(x: Int): Unit = macro Impls.fooObjectInt diff --git a/test/files/run/macro-expand-override/Impls_1.scala b/test/files/run/macro-expand-override/Impls_1.scala index 3ca09592bc3d..7d37f50a19cf 100644 --- a/test/files/run/macro-expand-override/Impls_1.scala +++ b/test/files/run/macro-expand-override/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-override/Macros_Test_2.scala b/test/files/run/macro-expand-override/Macros_Test_2.scala index a1f4109c41e3..04960c06f360 100644 --- a/test/files/run/macro-expand-override/Macros_Test_2.scala +++ b/test/files/run/macro-expand-override/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class B { def foo(x: String): Unit = macro Impls.fooBString def foo(x: Int): Unit = macro Impls.fooBInt diff --git a/test/files/run/macro-expand-recursive/Impls_1.scala b/test/files/run/macro-expand-recursive/Impls_1.scala index 0ddd2116e9e8..087a5dc45cf1 100644 --- a/test/files/run/macro-expand-recursive/Impls_1.scala +++ b/test/files/run/macro-expand-recursive/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-recursive/Macros_Test_2.scala b/test/files/run/macro-expand-recursive/Macros_Test_2.scala index e5257383cdea..2d37177a0d6d 100644 --- a/test/files/run/macro-expand-recursive/Macros_Test_2.scala +++ b/test/files/run/macro-expand-recursive/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo: Unit = macro Impls.foo def fooFoo: Unit = macro Impls.fooFoo diff --git a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala index ca9d935dd9b4..95aaa1c3d71b 100644 --- a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala +++ b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls1 { diff --git a/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala index f6779a41eac5..25295723375e 100644 --- a/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala +++ b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros1 { def foo[U <: String]: Unit = macro Impls1.foo[U] } diff --git a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala index fc16e495fc8b..958bc4b98777 100644 --- a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala +++ b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala index 4ca1ddf6b117..4cb48ff8a24f 100644 --- a/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala +++ b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo[U]: Unit = macro Impls.foo[U] foo[Int] diff --git a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala index b829208401ba..0bf2a1cda62e 100644 --- a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala +++ b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala index 74015bd72575..3d737429ac29 100644 --- a/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala +++ b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo[U](x: U): Unit = macro Impls.foo[U] foo(42) diff --git a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala index 9f178dfb32bb..067d7c216095 100644 --- a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala +++ b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context @@ -34,7 +34,7 @@ object Impls345 { object Macros4 { class D[T] { class C[U] { - def foo[V] = macro Impls345.foo[T, U, V] + def foo[V]: Unit = macro Impls345.foo[T, U, V] } } } diff --git a/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala index f13513593c83..a84df1c52e2a 100644 --- a/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala +++ b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros1 { class C[T] { def foo[U](x: U): Unit = macro Impls1.foo[U] diff --git a/test/files/run/macro-expand-unapply-a.check b/test/files/run/macro-expand-unapply-a.check index 7c2976e51e03..6ff2f5529339 100644 --- a/test/files/run/macro-expand-unapply-a.check +++ b/test/files/run/macro-expand-unapply-a.check @@ -1,2 +1,10 @@ +Test_2.scala:4: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + List(1, 2) match { case UnapplyMacro(x, y) => println((x, y)) } + ^ +Test_2.scala:5: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + List(1, 2, 3) match { case UnapplyMacro(x, y, z) => println((x, y, z)) } + ^ (1,2) (1,2,3) diff --git a/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala index 34f89ccdc634..8097d10bb073 100644 --- a/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala +++ b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala @@ -1,8 +1,8 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.whitebox.Context object Helper { - def unapplySeq[T](x: List[T]): Option[Seq[T]] = List.unapplySeq[T](x) + def unapplySeq[T](x: List[T]): Option[Seq[T]] = Some(x) } object Macros { diff --git a/test/files/run/macro-expand-unapply-a/Test_2.scala b/test/files/run/macro-expand-unapply-a/Test_2.scala index 878647900ed4..b401561caa18 100644 --- a/test/files/run/macro-expand-unapply-a/Test_2.scala +++ b/test/files/run/macro-expand-unapply-a/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import Macros._ object Test extends App { diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check index 2709b57038d6..b957f84c415f 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check +++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check @@ -1,4 +1,4 @@ reflective compilation has failed: -no `: _*' annotation allowed here -(such annotations are only allowed in arguments to *-parameters) +Sequence argument type annotation `: _*` cannot be used here: +the single parameter has type Any which is not a repeated parameter type diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala index 7b4520d6673d..3adbc77f0019 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala index 22baa17ed837..801e39c09fbc 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(xs: Int*): Unit = macro Impls.foo } diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala index d7bca5eb40fb..892cd8333f74 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala @@ -1,10 +1,9 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { def foo(c: Context)(xs: c.Expr[Int]*) = { import c.universe._ - val stripped_xs = xs map (_.tree) toList match { + val stripped_xs = xs.map(_.tree).toList match { case List(Typed(stripped, Ident(wildstar))) if wildstar == typeNames.WILDCARD_STAR => List(stripped) case _ => ??? } diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala index e45b40b14a2f..58a90a0ccd0c 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(xs: Int*): Unit = macro Impls.foo } diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala index 8178b73a6852..60e9aef81054 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala index e45b40b14a2f..58a90a0ccd0c 100644 --- a/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala +++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(xs: Int*): Unit = macro Impls.foo } diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala index 7b4520d6673d..3adbc77f0019 100644 --- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala +++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala index 18c4802623a6..274e64942b9f 100644 --- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala +++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(xs: Int*): Unit = macro Impls.foo } diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs.check b/test/files/run/macro-expand-varargs-implicit-over-varargs.check index 2c174a8a994d..5cd2a15a3c69 100644 --- a/test/files/run/macro-expand-varargs-implicit-over-varargs.check +++ b/test/files/run/macro-expand-varargs-implicit-over-varargs.check @@ -1 +1 @@ -WrappedArray(1, 2, 3, 4, 5) +ArraySeq(1, 2, 3, 4, 5) diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala index 8178b73a6852..60e9aef81054 100644 --- a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala +++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala index 18c4802623a6..274e64942b9f 100644 --- a/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala +++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo(xs: Int*): Unit = macro Impls.foo } diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala index 7d4853871f8c..99ff8b82d847 100644 --- a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala +++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala @@ -1,6 +1,6 @@ -// scalac: -language:experimental.macros -import scala.reflect.runtime.universe._ +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.reflect.runtime.universe._ object Impls { def foo_targs[T, U: c.WeakTypeTag](c: Context = null)(x: c.Expr[Int] = null) = { @@ -17,5 +17,5 @@ object Impls { } class Macros[T] { - def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U] + def foo_targs[U](x: Int): Unit = macro Impls.foo_targs[T, U] } diff --git a/test/files/run/macro-impl-default-params/Test_2.scala b/test/files/run/macro-impl-default-params/Test_2.scala index e7582c7d5624..7ea89b6208b0 100644 --- a/test/files/run/macro-impl-default-params/Test_2.scala +++ b/test/files/run/macro-impl-default-params/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println("foo_targs:") new Macros[Int]().foo_targs[String](42) diff --git a/test/files/run/macro-impl-relaxed/Macros_1.scala b/test/files/run/macro-impl-relaxed/Macros_1.scala index 420eb2a39997..5eb1b2e98679 100644 --- a/test/files/run/macro-impl-relaxed/Macros_1.scala +++ b/test/files/run/macro-impl-relaxed/Macros_1.scala @@ -11,4 +11,4 @@ object Macros { def fooTU(x: Int): Int = macro implTU def fooUT(x: Int): Int = macro implUT def fooTT(x: Int): Int = macro implTT -} \ No newline at end of file +} diff --git a/test/files/run/macro-impl-relaxed/Test_2.scala b/test/files/run/macro-impl-relaxed/Test_2.scala index 2eaeef0fd041..e7d97204c3c7 100644 --- a/test/files/run/macro-impl-relaxed/Test_2.scala +++ b/test/files/run/macro-impl-relaxed/Test_2.scala @@ -3,4 +3,4 @@ object Test extends App { println(Macros.fooTU(2)) println(Macros.fooUT(2)) println(Macros.fooTT(2)) -} \ No newline at end of file +} diff --git a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala index 54d6e244693c..121c7c042e8f 100644 --- a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala +++ b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -9,5 +9,5 @@ object Impls { } object Macros { - def foo(x: Int) = macro Impls.foo + def foo(x: Int): Unit = macro Impls.foo } diff --git a/test/files/run/macro-impl-rename-context/Test_2.scala b/test/files/run/macro-impl-rename-context/Test_2.scala index b4e65e5003c6..74f60329a3c5 100644 --- a/test/files/run/macro-impl-rename-context/Test_2.scala +++ b/test/files/run/macro-impl-rename-context/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println("foo") Macros.foo(42) diff --git a/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala index 4b9f2a9fdfcd..705defb18fe8 100644 --- a/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala +++ b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala index 819904ae695f..d2234f21e4d0 100644 --- a/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala +++ b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo: Unit = macro Impls.foo[String] } diff --git a/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala index e44ca638926a..c3866a069919 100644 --- a/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala +++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala index 4ca1ddf6b117..4cb48ff8a24f 100644 --- a/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala +++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo[U]: Unit = macro Impls.foo[U] foo[Int] diff --git a/test/files/run/macro-implicit-decorator/Macros_1.scala b/test/files/run/macro-implicit-decorator/Macros_1.scala index 2dad66c3d8ab..e3dd5c891c73 100644 --- a/test/files/run/macro-implicit-decorator/Macros_1.scala +++ b/test/files/run/macro-implicit-decorator/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.whitebox trait Derivation[A] diff --git a/test/files/run/macro-implicit-decorator/Test_2.scala b/test/files/run/macro-implicit-decorator/Test_2.scala index 3066c11d460c..88c60d91b09f 100644 --- a/test/files/run/macro-implicit-decorator/Test_2.scala +++ b/test/files/run/macro-implicit-decorator/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros // https://github.com/scala/bug/issues/10398 class CustomClass @@ -6,9 +5,9 @@ class CustomClass trait MyTC[A] object MyTC { - implicit val forInt = new MyTC[Int] {} - implicit def forList[A](implicit a: Derivation[MyTC[A]]) = new MyTC[List[A]] {} - implicit def forCustomClass(implicit a: Derivation[MyTC[List[Boolean]]]) = new MyTC[CustomClass] {} + implicit val forInt: MyTC[Int] = new MyTC[Int] {} + implicit def forList[A](implicit a: Derivation[MyTC[A]]): MyTC[List[A]] = new MyTC[List[A]] {} + implicit def forCustomClass(implicit a: Derivation[MyTC[List[Boolean]]]): MyTC[CustomClass] = new MyTC[CustomClass] {} } object Test extends App { diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala index c8b9db5d335d..b9590cbb126b 100644 --- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala +++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala index a6b9272e6dc5..9a34c62e0f13 100644 --- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala +++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala index 5d0160feb40b..765fea7a64cb 100644 --- a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala +++ b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -10,5 +10,5 @@ object Impls { } object Macros { - def foo = macro Impls.foo + def foo: Int = macro Impls.foo } diff --git a/test/files/run/macro-invalidret-nontypeable/Test_2.scala b/test/files/run/macro-invalidret-nontypeable/Test_2.scala index 42a85b303fbb..b1ad7ca5596d 100644 --- a/test/files/run/macro-invalidret-nontypeable/Test_2.scala +++ b/test/files/run/macro-invalidret-nontypeable/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala index b5b38ff7d34c..8cd528155502 100644 --- a/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala +++ b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -6,5 +6,5 @@ object Impls { } object Macros { - def foo(x: Int) = macro Impls.foo + def foo(x: Int): Int = macro Impls.foo } diff --git a/test/files/run/macro-invalidusage-badret/Test_2.scala b/test/files/run/macro-invalidusage-badret/Test_2.scala index c00b33b9c614..fc71353f5421 100644 --- a/test/files/run/macro-invalidusage-badret/Test_2.scala +++ b/test/files/run/macro-invalidusage-badret/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala index f094a6929fa6..f468034957d3 100644 --- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala +++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -10,5 +10,5 @@ object Impls { } object Macros { - def foo[T](x: T) = macro Impls.foo[T] + def foo[T](x: T): Unit = macro Impls.foo[T] } diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala index a6b9272e6dc5..9a34c62e0f13 100644 --- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala +++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala index 85ac49e75524..ea33e9bcfefb 100644 --- a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala +++ b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -11,5 +11,5 @@ object Impls { } object Macros { - def foo(x: Int)(y: Int) = macro Impls.foo + def foo(x: Int)(y: Int): Unit = macro Impls.foo } diff --git a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala index d37143b2296c..75b8c139d41d 100644 --- a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala +++ b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-nonrangepos-args/Macros_1.scala b/test/files/run/macro-nonrangepos-args/Macros_1.scala new file mode 100644 index 000000000000..97b938613c5d --- /dev/null +++ b/test/files/run/macro-nonrangepos-args/Macros_1.scala @@ -0,0 +1,10 @@ +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macros { + def impl(c: Context)(x: c.Tree): c.Tree = { + import c.universe._ + Literal(Constant(s"Line: ${x.pos.line}. Width: ${x.pos.end - x.pos.start}.")) + } + def pos(x: Any): String = macro impl +} diff --git a/test/files/run/macro-nonrangepos-args/Test_2.scala b/test/files/run/macro-nonrangepos-args/Test_2.scala new file mode 100644 index 000000000000..8cc5c6ad52d6 --- /dev/null +++ b/test/files/run/macro-nonrangepos-args/Test_2.scala @@ -0,0 +1,7 @@ +//> using options -Yrangepos:false +object Test extends App { + val num = 42 + val pos = Macros.pos(num + 17) + val text = "num + 17" + assert(pos == s"Line: 4. Width: ${text.length}.", pos) // position of binary op is always a range +} diff --git a/test/files/run/macro-openmacros.check b/test/files/run/macro-openmacros.check index a3d8ead09728..d923d03544b0 100644 --- a/test/files/run/macro-openmacros.check +++ b/test/files/run/macro-openmacros.check @@ -1,3 +1,3 @@ -List(MacroContext(foo@source-Test_2.scala,line-3,offset=93 +0)) -List(MacroContext(foo@source-Test_2.scala,line-3,offset=93 +1), MacroContext(foo@source-Test_2.scala,line-3,offset=93 +0)) -List(MacroContext(foo@source-Test_2.scala,line-3,offset=93 +2), MacroContext(foo@source-Test_2.scala,line-3,offset=93 +1), MacroContext(foo@source-Test_2.scala,line-3,offset=93 +0)) +List(MacroContext(foo@source-Test_2.scala,line-3,offset=70 +0)) +List(MacroContext(foo@source-Test_2.scala,line-3,offset=70 +1), MacroContext(foo@source-Test_2.scala,line-3,offset=70 +0)) +List(MacroContext(foo@source-Test_2.scala,line-3,offset=70 +2), MacroContext(foo@source-Test_2.scala,line-3,offset=70 +1), MacroContext(foo@source-Test_2.scala,line-3,offset=70 +0)) diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala index 9ad4541f818f..2e2069c89411 100644 --- a/test/files/run/macro-openmacros/Impls_Macros_1.scala +++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala @@ -1,5 +1,7 @@ -// scalac: -Yrangepos:false -language:experimental.macros +//> using options -Yrangepos:false +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.util.Properties.isWin object Macros { def impl(c: Context): c.Expr[Unit] = { @@ -9,7 +11,6 @@ object Macros { def normalizePaths(s: String) = { val base = (dir.getCanonicalPath + java.io.File.separator).replace('\\', '/') var regex = """\Q%s\E""" format base - val isWin = System.getProperty("os.name", "") startsWith "Windows" if (isWin) regex = "(?i)" + regex s.replace('\\', '/').replaceAll(regex, "") } @@ -22,5 +23,5 @@ object Macros { } } - def foo = macro impl + def foo: Unit = macro impl } diff --git a/test/files/run/macro-openmacros/Test_2.scala b/test/files/run/macro-openmacros/Test_2.scala index 090733984c57..976a6c0011e3 100644 --- a/test/files/run/macro-openmacros/Test_2.scala +++ b/test/files/run/macro-openmacros/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false -language:experimental.macros +//> using options -Yrangepos:false object Test extends App { Macros.foo } diff --git a/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala b/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala index b623d8820ac6..aef45976a9c2 100644 --- a/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala +++ b/test/files/run/macro-parse-position-malformed/Impls_Macros_1.scala @@ -15,4 +15,4 @@ object Macros { c.Expr[String](Literal(Constant(out))) } def foo(): String = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/macro-parse-position.check b/test/files/run/macro-parse-position.check index 3da0320696d2..feceb9fac88f 100644 --- a/test/files/run/macro-parse-position.check +++ b/test/files/run/macro-parse-position.check @@ -1,5 +1,5 @@ false -source-,line-1,offset=4 +RangePosition(, 0, 4, 7) 8 foo bar diff --git a/test/files/run/macro-parse-position/Impls_Macros_1.scala b/test/files/run/macro-parse-position/Impls_Macros_1.scala index a17f0cdbfa63..34a0e6ad2d70 100644 --- a/test/files/run/macro-parse-position/Impls_Macros_1.scala +++ b/test/files/run/macro-parse-position/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-parse-position/Test_2.scala b/test/files/run/macro-parse-position/Test_2.scala index db0bac2510ff..fcf7a1ef9210 100644 --- a/test/files/run/macro-parse-position/Test_2.scala +++ b/test/files/run/macro-parse-position/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false object Test extends App { println(Macros.foo) } diff --git a/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala index f80d2bbfe738..98b021631e48 100644 --- a/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala +++ b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { @@ -6,5 +6,5 @@ object Macros { def foo(c: Context)(x: c.Expr[Any]) = x } - def foo(x: Any) = macro Impls.foo + def foo(x: Any): Any = macro Impls.foo } diff --git a/test/files/run/macro-quasiinvalidbody-c/Test_2.scala b/test/files/run/macro-quasiinvalidbody-c/Test_2.scala index 544c0334504c..cdc8e1d9e47e 100644 --- a/test/files/run/macro-quasiinvalidbody-c/Test_2.scala +++ b/test/files/run/macro-quasiinvalidbody-c/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros._ println(foo(42)) diff --git a/test/files/run/macro-quasiquotes/Macros_1.scala b/test/files/run/macro-quasiquotes/Macros_1.scala index 764542a8709e..58cb46bb7940 100644 --- a/test/files/run/macro-quasiquotes/Macros_1.scala +++ b/test/files/run/macro-quasiquotes/Macros_1.scala @@ -12,4 +12,4 @@ object Macros { def m1: Unit = macro Impls.impl1 def m2: Unit = macro Impls.impl2 def m3: Int = macro Impls.impl3 -} \ No newline at end of file +} diff --git a/test/files/run/macro-range/Common_1.scala b/test/files/run/macro-range/Common_1.scala index fe58cb54e86e..24c106ff0870 100644 --- a/test/files/run/macro-range/Common_1.scala +++ b/test/files/run/macro-range/Common_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context abstract class RangeDefault { @@ -41,7 +40,7 @@ abstract class Utils { } def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = { val continu = Apply(Ident(lname), Nil) - val rhs = If(cond, Block(List(body), continu), Literal(Constant())) + val rhs = If(cond, Block(List(body), continu), Literal(Constant(()))) LabelDef(lname, Nil, rhs) } def makeBinop(left: Tree, op: String, right: Tree): Tree = diff --git a/test/files/run/macro-range/Expansion_Impossible_2.scala b/test/files/run/macro-range/Expansion_Impossible_2.scala index c2d32cedb615..ed5f0f5e7575 100644 --- a/test/files/run/macro-range/Expansion_Impossible_2.scala +++ b/test/files/run/macro-range/Expansion_Impossible_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-range/Expansion_Possible_3.scala b/test/files/run/macro-range/Expansion_Possible_3.scala index f27e80a7caf2..4f3faf20b67a 100644 --- a/test/files/run/macro-range/Expansion_Possible_3.scala +++ b/test/files/run/macro-range/Expansion_Possible_3.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class Range(val from: Int, val to: Int) extends RangeDefault { override def foreach(f: Int => Unit): Unit = macro Impls.foreach } diff --git a/test/files/run/macro-rangepos-args.check b/test/files/run/macro-rangepos-args.check deleted file mode 100644 index 98dab80fb76d..000000000000 --- a/test/files/run/macro-rangepos-args.check +++ /dev/null @@ -1 +0,0 @@ -Line: 4. Width: 5. diff --git a/test/files/run/macro-rangepos-args/Macros_1.scala b/test/files/run/macro-rangepos-args/Macros_1.scala index 41a88375e761..97b938613c5d 100644 --- a/test/files/run/macro-rangepos-args/Macros_1.scala +++ b/test/files/run/macro-rangepos-args/Macros_1.scala @@ -1,4 +1,3 @@ -// scalac: -Yrangepos import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-rangepos-args/Test_2.scala b/test/files/run/macro-rangepos-args/Test_2.scala index 9772030ef755..0e6c5834d18c 100644 --- a/test/files/run/macro-rangepos-args/Test_2.scala +++ b/test/files/run/macro-rangepos-args/Test_2.scala @@ -1,5 +1,6 @@ -// scalac: -Yrangepos object Test extends App { val x = 2 - println(Macros.pos(x + 2)) + val pos = Macros.pos(x + 2 + "42".toString) + val text = """x + 2 + "42".toString""" + assert(pos == s"Line: 3. Width: ${text.length}.", pos) } diff --git a/test/files/run/macro-rangepos-subpatterns/Macros_1.scala b/test/files/run/macro-rangepos-subpatterns/Macros_1.scala index 9f0b7a5dc537..988dfb744053 100644 --- a/test/files/run/macro-rangepos-subpatterns/Macros_1.scala +++ b/test/files/run/macro-rangepos-subpatterns/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos +// import scala.reflect.macros.whitebox.Context import language.experimental.macros diff --git a/test/files/run/macro-rangepos-subpatterns/Test_2.scala b/test/files/run/macro-rangepos-subpatterns/Test_2.scala index df152c4d2b41..c9b1982c9db2 100644 --- a/test/files/run/macro-rangepos-subpatterns/Test_2.scala +++ b/test/files/run/macro-rangepos-subpatterns/Test_2.scala @@ -1,6 +1,7 @@ -// scalac: -Yrangepos +// object Test extends App { 42 match { case Extractor(a) => println(a) + case x => throw new MatchError(x) } } diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala index 349594c4ed37..1b964837ffc6 100644 --- a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala +++ b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -10,5 +10,5 @@ object Impls { } object Macros { - def foo(x: Int) = macro Impls.foo + def foo(x: Int): Int = macro Impls.foo } diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala index e5616c0d7f73..267d1bc7b0ed 100644 --- a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala +++ b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala index 89a818d99c17..e62db783b2eb 100644 --- a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala +++ b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala @@ -6,4 +6,4 @@ object Impls { val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))) c.Expr[Int](body) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala index 73c2d0592122..c51f1a10c1ba 100644 --- a/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala +++ b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { println(implicitly[WeakTypeTag[Int]]) println(implicitly[WeakTypeTag[List[Int]]]) -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala index 4ba2231d9a55..19070eec8307 100644 --- a/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala +++ b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[WeakTypeTag[List[T]]]) } fooNoTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala index 70ca615e1f9b..c32dff18aa68 100644 --- a/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala +++ b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[WeakTypeTag[List[T]]]) } fooTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala index ecae4110a8be..af5b924f5fa5 100644 --- a/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala +++ b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[WeakTypeTag[List[T]]]) } fooTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-basic/Macros_1.scala b/test/files/run/macro-reify-basic/Macros_1.scala index 9f91dc1379ef..9c2fb941899b 100644 --- a/test/files/run/macro-reify-basic/Macros_1.scala +++ b/test/files/run/macro-reify-basic/Macros_1.scala @@ -1,8 +1,8 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { - def foo(s: String) = macro Impls.foo + def foo(s: String): Unit = macro Impls.foo object Impls { def foo(c: Context)(s: c.Expr[String]) = c.universe.reify { diff --git a/test/files/run/macro-reify-basic/Test_2.scala b/test/files/run/macro-reify-basic/Test_2.scala index ce2bab33416e..62b52734adf0 100644 --- a/test/files/run/macro-reify-basic/Test_2.scala +++ b/test/files/run/macro-reify-basic/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { Macros.foo("world") } diff --git a/test/files/run/macro-reify-chained1/Impls_Macros_1.scala b/test/files/run/macro-reify-chained1/Impls_Macros_1.scala index 7f877b2729d4..ac975d2d7a15 100644 --- a/test/files/run/macro-reify-chained1/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-chained1/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-chained1/Test_2.scala b/test/files/run/macro-reify-chained1/Test_2.scala index 2adb07b03528..9e8310bc0fed 100644 --- a/test/files/run/macro-reify-chained1/Test_2.scala +++ b/test/files/run/macro-reify-chained1/Test_2.scala @@ -6,4 +6,4 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] q.map(x => x).map(x => x) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-chained2/Impls_Macros_1.scala b/test/files/run/macro-reify-chained2/Impls_Macros_1.scala index 965b1910446f..2c5a5202e467 100644 --- a/test/files/run/macro-reify-chained2/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-chained2/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-chained2/Test_2.scala b/test/files/run/macro-reify-chained2/Test_2.scala index 2adb07b03528..9e8310bc0fed 100644 --- a/test/files/run/macro-reify-chained2/Test_2.scala +++ b/test/files/run/macro-reify-chained2/Test_2.scala @@ -6,4 +6,4 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] q.map(x => x).map(x => x) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-freevars/Macros_1.scala b/test/files/run/macro-reify-freevars/Macros_1.scala index 789033d74762..db775688bf3e 100644 --- a/test/files/run/macro-reify-freevars/Macros_1.scala +++ b/test/files/run/macro-reify-freevars/Macros_1.scala @@ -1,5 +1,5 @@ -// scalac: -language:experimental.macros package scala.collection.slick +import scala.language.experimental.macros object QueryableMacros{ def map[T:c.WeakTypeTag, S:c.WeakTypeTag] diff --git a/test/files/run/macro-reify-freevars/Test_2.scala b/test/files/run/macro-reify-freevars/Test_2.scala index 9201d9d3a6c0..e2417c32de47 100644 --- a/test/files/run/macro-reify-freevars/Test_2.scala +++ b/test/files/run/macro-reify-freevars/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala index 7f877b2729d4..ac975d2d7a15 100644 --- a/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-nested-a1/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-a1/Test_2.scala b/test/files/run/macro-reify-nested-a1/Test_2.scala index b99c4c55e4b2..e47570f23f88 100644 --- a/test/files/run/macro-reify-nested-a1/Test_2.scala +++ b/test/files/run/macro-reify-nested-a1/Test_2.scala @@ -1,9 +1,9 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1)) + q.map(e1 => q.map(e2 => e1)) locally { val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1)) + q.map(e1 => q.map(e2 => e1)) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala index 965b1910446f..2c5a5202e467 100644 --- a/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-nested-a2/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-a2/Test_2.scala b/test/files/run/macro-reify-nested-a2/Test_2.scala index b99c4c55e4b2..e47570f23f88 100644 --- a/test/files/run/macro-reify-nested-a2/Test_2.scala +++ b/test/files/run/macro-reify-nested-a2/Test_2.scala @@ -1,9 +1,9 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1)) + q.map(e1 => q.map(e2 => e1)) locally { val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1)) + q.map(e1 => q.map(e2 => e1)) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala index 7f877b2729d4..ac975d2d7a15 100644 --- a/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-nested-b1/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-b1/Test_2.scala b/test/files/run/macro-reify-nested-b1/Test_2.scala index b1990363492a..5609d7ca1b38 100644 --- a/test/files/run/macro-reify-nested-b1/Test_2.scala +++ b/test/files/run/macro-reify-nested-b1/Test_2.scala @@ -1,9 +1,9 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1).map(e2=>e1)) + q.map(e1 => q.map(e2 => e1).map(e2 => e1)) locally { val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1).map(e2=>e1)) + q.map(e1 => q.map(e2 => e1).map(e2 => e1)) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala index 965b1910446f..2c5a5202e467 100644 --- a/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-nested-b2/Impls_Macros_1.scala @@ -44,4 +44,4 @@ class Queryable[T]{ } object Queryable{ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-nested-b2/Test_2.scala b/test/files/run/macro-reify-nested-b2/Test_2.scala index b1990363492a..5609d7ca1b38 100644 --- a/test/files/run/macro-reify-nested-b2/Test_2.scala +++ b/test/files/run/macro-reify-nested-b2/Test_2.scala @@ -1,9 +1,9 @@ object Test extends App{ val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1).map(e2=>e1)) + q.map(e1 => q.map(e2 => e1).map(e2 => e1)) locally { val q : Queryable[Any] = new Queryable[Any] - q.map(e1 => q.map(e2=>e1).map(e2=>e1)) + q.map(e1 => q.map(e2 => e1).map(e2 => e1)) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala index cab3efc75538..38ec6f022e1d 100644 --- a/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala +++ b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-reify-ref-to-packageless/Test_2.scala b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala index ab4f7702eaf2..35ac80bc8c7a 100644 --- a/test/files/run/macro-reify-ref-to-packageless/Test_2.scala +++ b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo: Int = macro Impls.foo println(foo) diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala index 82b29a3817c0..c4755a697cfa 100644 --- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -10,5 +10,5 @@ object Impls { } object Macros { - def foo(x: Int) = macro Impls.foo + def foo(x: Int): Int = macro Impls.foo } diff --git a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala index 7cf7a5de37e6..dbc17e7c158a 100644 --- a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala +++ b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => cm} diff --git a/test/files/run/macro-reify-staticXXX/Macros_1.scala b/test/files/run/macro-reify-staticXXX/Macros_1.scala index 12a5d8714b14..61f381d814eb 100644 --- a/test/files/run/macro-reify-staticXXX/Macros_1.scala +++ b/test/files/run/macro-reify-staticXXX/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object B { override def toString = "object" } @@ -27,7 +27,7 @@ object packageless { } } - def test = macro impl + def test: Any = macro impl } package packageful { @@ -44,6 +44,6 @@ package packageful { } } - def test = macro impl + def test: Any = macro impl } } diff --git a/test/files/run/macro-reify-staticXXX/Test_2.scala b/test/files/run/macro-reify-staticXXX/Test_2.scala index 332f954e5b2e..6e8cc360804c 100644 --- a/test/files/run/macro-reify-staticXXX/Test_2.scala +++ b/test/files/run/macro-reify-staticXXX/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { packageless.test packageful.Test.test diff --git a/test/files/run/macro-reify-tagful-a/Macros_1.scala b/test/files/run/macro-reify-tagful-a/Macros_1.scala index 8c4c253db438..af625434bb57 100644 --- a/test/files/run/macro-reify-tagful-a/Macros_1.scala +++ b/test/files/run/macro-reify-tagful-a/Macros_1.scala @@ -1,9 +1,9 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context object Macros { - def foo[T](s: T) = macro Impls.foo[T] + def foo[T](s: T): List[T] = macro Impls.foo[T] object Impls { def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify { diff --git a/test/files/run/macro-reify-tagful-a/Test_2.scala b/test/files/run/macro-reify-tagful-a/Test_2.scala index e1b0f8456b19..910da1bbb1cc 100644 --- a/test/files/run/macro-reify-tagful-a/Test_2.scala +++ b/test/files/run/macro-reify-tagful-a/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { val list: List[String] = Macros.foo("hello world") println(list) diff --git a/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala index d4150250114a..18065d800e23 100644 --- a/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala @@ -1,8 +1,8 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { - def foo[T](s: T) = macro Impls.foo[T] + def foo[T](s: T): List[T] = macro Impls.foo[T] object Impls { def foo[T](c: Context)(s: c.Expr[T]) = c.universe.reify { diff --git a/test/files/run/macro-reify-tagless-a/Test_2.scala b/test/files/run/macro-reify-tagless-a/Test_2.scala index 4e6b14553516..afb418a7550e 100644 --- a/test/files/run/macro-reify-tagless-a/Test_2.scala +++ b/test/files/run/macro-reify-tagless-a/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { //val list: List[String] = Macros.foo("hello world") //println(list) diff --git a/test/files/run/macro-reify-type.check b/test/files/run/macro-reify-type.check index ea5e70e10dc8..ab711f1da025 100644 --- a/test/files/run/macro-reify-type.check +++ b/test/files/run/macro-reify-type.check @@ -1 +1 @@ -[B, That](f: Int => B)(implicit bf: scala.collection.generic.CanBuildFrom[List[Int],B,That])That \ No newline at end of file +[B](f: Int => B): List[B] diff --git a/test/files/run/macro-reify-type/Macros_1.scala b/test/files/run/macro-reify-type/Macros_1.scala index 2f8dd2346b33..1d60068d7695 100644 --- a/test/files/run/macro-reify-type/Macros_1.scala +++ b/test/files/run/macro-reify-type/Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context import scala.reflect.runtime.{universe => ru} diff --git a/test/files/run/macro-reify-type/Test_2.scala b/test/files/run/macro-reify-type/Test_2.scala index 04010423bf8f..99a23c67c2ec 100644 --- a/test/files/run/macro-reify-type/Test_2.scala +++ b/test/files/run/macro-reify-type/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import StaticReflect._ object Test extends App { diff --git a/test/files/run/macro-reify-typetag-notypeparams/Test.scala b/test/files/run/macro-reify-typetag-notypeparams/Test.scala index be9feac4caf1..29732ceb7e08 100644 --- a/test/files/run/macro-reify-typetag-notypeparams/Test.scala +++ b/test/files/run/macro-reify-typetag-notypeparams/Test.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { println(implicitly[TypeTag[Int]]) println(implicitly[TypeTag[List[Int]]]) -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala b/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala index be0ce33e4113..ec68f9d1092c 100644 --- a/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala +++ b/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala @@ -6,4 +6,4 @@ object Test extends App { println(implicitly[TypeTag[List[T]]]) } fooTypeTag[Int] -} \ No newline at end of file +} diff --git a/test/files/run/macro-reify-unreify.check b/test/files/run/macro-reify-unreify.check index 55d61e6068b4..926d7ce1d150 100644 --- a/test/files/run/macro-reify-unreify.check +++ b/test/files/run/macro-reify-unreify.check @@ -1 +1 @@ -hello world = Expr[String("hello world")]("hello world") +hello world = Expr[String("hello world")]("hello ".$plus("world")) diff --git a/test/files/run/macro-reify-unreify/Macros_1.scala b/test/files/run/macro-reify-unreify/Macros_1.scala index 0005bf378549..dc681abcc37a 100644 --- a/test/files/run/macro-reify-unreify/Macros_1.scala +++ b/test/files/run/macro-reify-unreify/Macros_1.scala @@ -1,8 +1,8 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { - def foo(s: String) = macro Impls.foo + def foo(s: String): Unit = macro Impls.foo object Impls { def foo(c: Context)(s: c.Expr[String]) = { diff --git a/test/files/run/macro-reify-unreify/Test_2.scala b/test/files/run/macro-reify-unreify/Test_2.scala index ce2bab33416e..62b52734adf0 100644 --- a/test/files/run/macro-reify-unreify/Test_2.scala +++ b/test/files/run/macro-reify-unreify/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { Macros.foo("world") } diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check index 7e1cb6158334..a2786377ad87 100644 --- a/test/files/run/macro-repl-basic.check +++ b/test/files/run/macro-repl-basic.check @@ -1,6 +1,6 @@ -scala> import language.experimental.macros -import language.experimental.macros +scala> import scala.language.experimental.macros +import scala.language.experimental.macros scala> import scala.reflect.macros.blackbox.Context import scala.reflect.macros.blackbox.Context @@ -26,7 +26,49 @@ scala> object Impls { c.Expr[Int](body) } } -defined object Impls +object Impls + +scala> object Macros { + def bar(x: Int): Int = macro Impls.bar +} + def bar(x: Int): Int = macro Impls.bar + ^ +On line 2: error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. + +scala> :replay -Yrepl-class-based:false +replay> import scala.language.experimental.macros +import scala.language.experimental.macros + +replay> import scala.reflect.macros.blackbox.Context +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +replay> object Impls { + def foo(c: Context)(x: c.Expr[Int]) = { + import c.universe._ + val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))) + c.Expr[Int](body) + } + + def bar(c: Context)(x: c.Expr[Int]) = { + import c.universe._ + val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))) + c.Expr[Int](body) + } + + def quux(c: Context)(x: c.Expr[Int]) = { + import c.universe._ + val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3)))) + c.Expr[Int](body) + } +} +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context +object Impls + scala> object Macros { object Shmacros { @@ -36,8 +78,8 @@ scala> object Macros { }; class Macros { def quux(x: Int): Int = macro Impls.quux } -defined object Macros -defined class Macros +object Macros +class Macros scala> diff --git a/test/files/run/macro-repl-basic.scala b/test/files/run/macro-repl-basic.scala index 84ea1e2dea65..edd9afcd8aaf 100644 --- a/test/files/run/macro-repl-basic.scala +++ b/test/files/run/macro-repl-basic.scala @@ -2,13 +2,8 @@ import scala.tools.nsc.Settings import scala.tools.partest.ReplTest object Test extends ReplTest { - override def transformSettings(s: Settings) = { - s.Yreplclassbased.value = false // macros are object-based only - s - } - def code = """ - |import language.experimental.macros + |import scala.language.experimental.macros |import scala.reflect.macros.blackbox.Context | |object Impls { @@ -31,6 +26,10 @@ object Test extends ReplTest { | } |} |object Macros { + | def bar(x: Int): Int = macro Impls.bar + |} + |:replay -Yrepl-class-based:false + |object Macros { | object Shmacros { | def foo(x: Int): Int = macro Impls.foo | } diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check index a307ffb26ee3..bca5f2cbabd5 100644 --- a/test/files/run/macro-repl-dontexpand.check +++ b/test/files/run/macro-repl-dontexpand.check @@ -1,14 +1,14 @@ scala> def bar1(c: scala.reflect.macros.blackbox.Context) = ??? -bar1: (c: scala.reflect.macros.blackbox.Context)Nothing +def bar1(c: scala.reflect.macros.blackbox.Context): Nothing -scala> def foo1 = macro bar1 -defined term macro foo1: Nothing +scala> def foo1: Nothing = macro bar1 +def foo1: Nothing scala> def bar2(c: scala.reflect.macros.whitebox.Context) = ??? -bar2: (c: scala.reflect.macros.whitebox.Context)Nothing +def bar2(c: scala.reflect.macros.whitebox.Context): Nothing -scala> def foo2 = macro bar2 -defined term macro foo2: Nothing +scala> def foo2: Nothing = macro bar2 +def foo2: Nothing scala> :quit diff --git a/test/files/run/macro-repl-dontexpand.scala b/test/files/run/macro-repl-dontexpand.scala index dc2bc5e25f25..2e884da0275c 100644 --- a/test/files/run/macro-repl-dontexpand.scala +++ b/test/files/run/macro-repl-dontexpand.scala @@ -10,8 +10,8 @@ object Test extends ReplTest { override def extraSettings = "-language:experimental.macros" def code = """ |def bar1(c: scala.reflect.macros.blackbox.Context) = ??? - |def foo1 = macro bar1 + |def foo1: Nothing = macro bar1 |def bar2(c: scala.reflect.macros.whitebox.Context) = ??? - |def foo2 = macro bar2 + |def foo2: Nothing = macro bar2 |""".stripMargin } diff --git a/test/files/run/macro-settings/Impls_Macros_1.scala b/test/files/run/macro-settings/Impls_Macros_1.scala index 094d09706d6d..bf49af63c5df 100644 --- a/test/files/run/macro-settings/Impls_Macros_1.scala +++ b/test/files/run/macro-settings/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros -Xmacro-settings:hello=1 +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -11,5 +11,5 @@ object Impls { } object Macros { - def foo = macro Impls.impl + def foo: Unit = macro Impls.impl } diff --git a/test/files/run/macro-settings/Test_2.scala b/test/files/run/macro-settings/Test_2.scala index 8fb9520b650b..fc891cfc1dd6 100644 --- a/test/files/run/macro-settings/Test_2.scala +++ b/test/files/run/macro-settings/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros -Xmacro-settings:hello=1 +//> using options -Xmacro-settings:hello=1 object Test extends App { Macros.foo } diff --git a/test/files/run/macro-sip19-revised.check b/test/files/run/macro-sip19-revised.check index ade06c8d9c98..86c3d819b038 100644 --- a/test/files/run/macro-sip19-revised.check +++ b/test/files/run/macro-sip19-revised.check @@ -1,5 +1,5 @@ -hey, i've been called from SourceLocation1(null,Test_2.scala,12,292) -hey, i've been called from SourceLocation1(SourceLocation1(null,Test_2.scala,12,292),Test_2.scala,9,263) -hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,12,292),Test_2.scala,9,263),Test_2.scala,9,263) -hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,12,292),Test_2.scala,9,263),Test_2.scala,9,263),Test_2.scala,7,221) +hey, i've been called from SourceLocation1(null,Test_2.scala,11,251) +hey, i've been called from SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222) +hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222) +hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222),Test_2.scala,6,180) 2 diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala index d1435814e1e8..71a14f213de7 100644 --- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala +++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.whitebox.Context object Macros { diff --git a/test/files/run/macro-sip19-revised/Test_2.scala b/test/files/run/macro-sip19-revised/Test_2.scala index f5d73677503e..d9a4d7d4fc61 100644 --- a/test/files/run/macro-sip19-revised/Test_2.scala +++ b/test/files/run/macro-sip19-revised/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import Macros._ object Test extends App { diff --git a/test/files/run/macro-sip19.check b/test/files/run/macro-sip19.check index 075b688c81a6..07cfd8c1e1e3 100644 --- a/test/files/run/macro-sip19.check +++ b/test/files/run/macro-sip19.check @@ -1,5 +1,5 @@ -hey, i've been called from SourceLocation(Test_2.scala,16,407) -hey, i've been called from SourceLocation(Test_2.scala,12,372) -hey, i've been called from SourceLocation(Test_2.scala,12,372) -hey, i've been called from SourceLocation(Test_2.scala,10,326) +hey, i've been called from SourceLocation(Test_2.scala,15,366) +hey, i've been called from SourceLocation(Test_2.scala,11,331) +hey, i've been called from SourceLocation(Test_2.scala,11,331) +hey, i've been called from SourceLocation(Test_2.scala,9,285) 2 diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala index e22ed06d1412..c661239f6487 100644 --- a/test/files/run/macro-sip19/Impls_Macros_1.scala +++ b/test/files/run/macro-sip19/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.whitebox.Context object Macros { diff --git a/test/files/run/macro-sip19/Test_2.scala b/test/files/run/macro-sip19/Test_2.scala index 8225bf393516..32326e635203 100644 --- a/test/files/run/macro-sip19/Test_2.scala +++ b/test/files/run/macro-sip19/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import Macros._ object Test extends App { diff --git a/test/files/run/macro-subpatterns/Test_2.scala b/test/files/run/macro-subpatterns/Test_2.scala index dc6e6688774d..adc1653deb26 100644 --- a/test/files/run/macro-subpatterns/Test_2.scala +++ b/test/files/run/macro-subpatterns/Test_2.scala @@ -1,5 +1,6 @@ object Test extends App { 42 match { case Extractor(a @ Extractor(b @ Extractor(c))) => println(a); println(b); println(c) + case x => throw new MatchError(x) } } diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check index e20a5bbdfb65..3ce06e7990be 100644 --- a/test/files/run/macro-system-properties.check +++ b/test/files/run/macro-system-properties.check @@ -1,6 +1,6 @@ -scala> import scala.language.experimental._, scala.reflect.macros.blackbox.Context -import scala.language.experimental._ +scala> import scala.language.experimental.macros, scala.reflect.macros.blackbox.Context +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context scala> object GrabContext { @@ -9,12 +9,12 @@ scala> object GrabContext { def impl(c: Context)() = { import c.universe._; System.getProperties.put("lastContext", c); c.Expr[Unit](q"()") } def grab(): Unit = macro impl } -defined object GrabContext +object GrabContext scala> object Test { class C(implicit a: Any) { GrabContext.grab } } -defined object Test +object Test scala> object Test { class C(implicit a: Any) { GrabContext.grab } } -defined object Test +object Test scala> :quit diff --git a/test/files/run/macro-system-properties.scala b/test/files/run/macro-system-properties.scala index 0d8602a20bd0..88e2e34f947f 100644 --- a/test/files/run/macro-system-properties.scala +++ b/test/files/run/macro-system-properties.scala @@ -8,7 +8,7 @@ object Test extends ReplTest { } def code = """ - import scala.language.experimental._, scala.reflect.macros.blackbox.Context + import scala.language.experimental.macros, scala.reflect.macros.blackbox.Context object GrabContext { def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context]) // System.properties lets you stash true globals (unlike statics which are classloader scoped) diff --git a/test/files/run/macro-term-declared-in-annotation/Impls_1.scala b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala index 6c27be7e13ac..6855f583c91f 100644 --- a/test/files/run/macro-term-declared-in-annotation/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-annotation/Macros_2.scala b/test/files/run/macro-term-declared-in-annotation/Macros_2.scala index c42cc5bad8f4..81a6f9a3be2e 100644 --- a/test/files/run/macro-term-declared-in-annotation/Macros_2.scala +++ b/test/files/run/macro-term-declared-in-annotation/Macros_2.scala @@ -1,9 +1,9 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class foo(val bar: String) extends annotation.StaticAnnotation object Api { // foo in ann must have a different name // otherwise, we get bitten by https://github.com/scala/bug/issues/5544 - @foo({def fooInAnn = macro Impls.foo; fooInAnn}) + @foo({def fooInAnn: String = macro Impls.foo; fooInAnn}) def foo = println("it works") } diff --git a/test/files/run/macro-term-declared-in-annotation/Test_3.scala b/test/files/run/macro-term-declared-in-annotation/Test_3.scala index 265d69a79bd3..285f8959e00a 100644 --- a/test/files/run/macro-term-declared-in-annotation/Test_3.scala +++ b/test/files/run/macro-term-declared-in-annotation/Test_3.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { Api.foo } diff --git a/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala index d16b78e7fe76..4bb5879d9a9b 100644 --- a/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala @@ -1,4 +1,5 @@ -// scalac: -language:experimental.macros +//> using options -language:experimental.macros +//import scala.language.experimental.macros import scala.language.reflectiveCalls object Test extends App { diff --git a/test/files/run/macro-term-declared-in-block/Impls_1.scala b/test/files/run/macro-term-declared-in-block/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-block/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-block/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala index b2fb5e41838d..706229539072 100644 --- a/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-class-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-class-class/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala index 33eae20e8122..475fc57f377b 100644 --- a/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class Macros { class Macros { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-class-object/Impls_1.scala b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-class-object/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala index b043bfd4f16d..48ce943c1ffe 100644 --- a/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class Macros { object Macros { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-class/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-class/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala index 0c8e75763692..c093e4d168f0 100644 --- a/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros class Macros { def foo: Unit = macro Impls.foo } diff --git a/test/files/run/macro-term-declared-in-default-param/Impls_1.scala b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala index f51d57630cbd..37eeb6a678a5 100644 --- a/test/files/run/macro-term-declared-in-default-param/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala index 8cd831e8e548..ab87dd94920b 100644 --- a/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def foo(bar: String = { def foo: String = macro Impls.foo; foo }) = println(bar) diff --git a/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala index cd874131f540..1ffe6ccbbe99 100644 --- a/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala +++ b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Impls { @@ -15,6 +15,6 @@ object Macros { implicit def foo(x: String): Foo = new Foo(x) class Foo(val x: String) { - def toOptionOfInt = macro Impls.toOptionOfInt + def toOptionOfInt: Option[Int] = macro Impls.toOptionOfInt } } diff --git a/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala b/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala index 1fe1c1ff431d..1b470cb8c420 100644 --- a/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala +++ b/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros._ println("2".toOptionOfInt) diff --git a/test/files/run/macro-term-declared-in-method/Impls_1.scala b/test/files/run/macro-term-declared-in-method/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-method/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-method/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala index 59a8d237c28f..0cbda02b89b1 100644 --- a/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Test extends App { def bar() = { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-object-class/Impls_1.scala b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-object-class/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala index 2d10ed62b84e..1a5ac90e75e9 100644 --- a/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { class Macros { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-object-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-object-object/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala index 66d3ba6a66f9..a8806362cbe8 100644 --- a/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { object Macros { def foo: Unit = macro Impls.foo diff --git a/test/files/run/macro-term-declared-in-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-object/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-object/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala index 13b966031524..057dff202c09 100644 --- a/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros object Macros { def foo: Unit = macro Impls.foo } diff --git a/test/files/run/macro-term-declared-in-package-object/Impls_1.scala b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-package-object/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala index 1ea509867ca6..cf874ea1c2cc 100644 --- a/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros package object Macros { def foo: Unit = macro Impls.foo } diff --git a/test/files/run/macro-term-declared-in-refinement/Impls_1.scala b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-refinement/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala index d9c444679a68..a00059ec44de 100644 --- a/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala @@ -1,4 +1,5 @@ -// scalac: -language:experimental.macros +//> using options -language:experimental.macros +//import scala.language.experimental.macros import scala.language.reflectiveCalls class Base diff --git a/test/files/run/macro-term-declared-in-trait/Impls_1.scala b/test/files/run/macro-term-declared-in-trait/Impls_1.scala index 8815878212dc..c98b8db40e12 100644 --- a/test/files/run/macro-term-declared-in-trait/Impls_1.scala +++ b/test/files/run/macro-term-declared-in-trait/Impls_1.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.macros.blackbox.Context object Impls { diff --git a/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala index 197428d75fa8..45d7405e7468 100644 --- a/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala +++ b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros trait Base { def foo: Unit = macro Impls.foo } diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check index 07404cf3fb40..b937087183e4 100644 --- a/test/files/run/macro-typecheck-implicitsdisabled.check +++ b/test/files/run/macro-typecheck-implicitsdisabled.check @@ -1,2 +1,3 @@ scala.Predef.ArrowAssoc[Int](1).->[Int](2) scala.reflect.macros.TypecheckException: value -> is not a member of Int +did you mean >>? diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala index b7b0aba0fdf3..cccbd8ddc8e3 100644 --- a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala +++ b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { @@ -10,7 +10,7 @@ object Macros { c.Expr[String](Literal(Constant(ttree1.toString))) } - def foo_with_implicits_enabled = macro impl_with_implicits_enabled + def foo_with_implicits_enabled: String = macro impl_with_implicits_enabled def impl_with_implicits_disabled(c: Context) = { import c.universe._ @@ -25,5 +25,5 @@ object Macros { } } - def foo_with_implicits_disabled = macro impl_with_implicits_disabled + def foo_with_implicits_disabled: String = macro impl_with_implicits_disabled } diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala b/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala index 3d01c1001bd9..9763465c0561 100644 --- a/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala +++ b/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println(Macros.foo_with_implicits_enabled) println(Macros.foo_with_implicits_disabled) diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala index b6849e5416b2..285957302838 100644 --- a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala +++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { @@ -11,7 +11,7 @@ object Macros { c.Expr[String](Literal(Constant(ttree1.toString))) } - def foo_with_macros_enabled = macro impl_with_macros_enabled + def foo_with_macros_enabled: String = macro impl_with_macros_enabled def impl_with_macros_disabled(c: Context) = { import c.universe._ @@ -28,5 +28,5 @@ object Macros { c.Expr[String](Literal(Constant(ttree2.toString))) } - def foo_with_macros_disabled = macro impl_with_macros_disabled + def foo_with_macros_disabled: String = macro impl_with_macros_disabled } diff --git a/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala b/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala index 447240b92f5e..76cd2a5549c2 100644 --- a/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala +++ b/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println(Macros.foo_with_macros_enabled) println(Macros.foo_with_macros_disabled) diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala index 0ce78f4de639..b0d65185cf42 100644 --- a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala +++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { @@ -11,7 +11,7 @@ object Macros { c.Expr[String](Literal(Constant(ttree1.toString))) } - def foo_with_macros_enabled = macro impl_with_macros_enabled + def foo_with_macros_enabled: String = macro impl_with_macros_enabled def impl_with_macros_disabled(c: Context) = { import c.universe._ @@ -28,5 +28,5 @@ object Macros { c.Expr[String](Literal(Constant(ttree2.toString))) } - def foo_with_macros_disabled = macro impl_with_macros_disabled + def foo_with_macros_disabled: String = macro impl_with_macros_disabled } diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala b/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala index 447240b92f5e..76cd2a5549c2 100644 --- a/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala +++ b/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { println(Macros.foo_with_macros_enabled) println(Macros.foo_with_macros_disabled) diff --git a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala index 9dd2dbaee67f..dce4d5b7db0c 100644 --- a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala +++ b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macro-undetparams-consfromsls/Test_2.scala b/test/files/run/macro-undetparams-consfromsls/Test_2.scala index 9c83f3951805..a23ea9d9dc79 100644 --- a/test/files/run/macro-undetparams-consfromsls/Test_2.scala +++ b/test/files/run/macro-undetparams-consfromsls/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { import Macros._ val xs = cons(1, nil) diff --git a/test/files/run/macro-undetparams-implicitval/Test.scala b/test/files/run/macro-undetparams-implicitval/Test.scala index 109ffd947a66..c584e45b2bfd 100644 --- a/test/files/run/macro-undetparams-implicitval/Test.scala +++ b/test/files/run/macro-undetparams-implicitval/Test.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros import scala.reflect.runtime.universe._ object Test extends App { diff --git a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala index 3b7b6cb95d77..3ffbd33dbc05 100644 --- a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala +++ b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala @@ -1,4 +1,4 @@ -// scalac: -language:experimental.macros +import scala.language.experimental.macros import scala.reflect.runtime.universe._ import scala.reflect.macros.blackbox.Context @@ -8,5 +8,5 @@ object Macros { reify { println(c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[T]].toString))).splice) } } - def foo[T](foo: T) = macro impl[T] + def foo[T](foo: T): Unit = macro impl[T] } diff --git a/test/files/run/macro-undetparams-macroitself/Test_2.scala b/test/files/run/macro-undetparams-macroitself/Test_2.scala index bd9a9ccbc46f..90c1ae951ffa 100644 --- a/test/files/run/macro-undetparams-macroitself/Test_2.scala +++ b/test/files/run/macro-undetparams-macroitself/Test_2.scala @@ -1,4 +1,3 @@ -// scalac: -language:experimental.macros object Test extends App { Macros.foo(42) Macros.foo("42") diff --git a/test/files/run/macro-vampire-false-warning/Macros_1.scala b/test/files/run/macro-vampire-false-warning/Macros_1.scala index 43b10597e048..169506de2e5e 100644 --- a/test/files/run/macro-vampire-false-warning/Macros_1.scala +++ b/test/files/run/macro-vampire-false-warning/Macros_1.scala @@ -1,5 +1,5 @@ -// scalac: -Xfatal-warnings -// As per http://meta.plasm.us/posts/2013/08/31/feeding-our-vampires/ +// As per https://meta.plasm.us/posts/2013/08/31/feeding-our-vampires/ +//> using options -Werror import scala.annotation.StaticAnnotation import scala.reflect.macros.whitebox.Context @@ -19,7 +19,7 @@ object Macros { import c.universe._ import Flag._ // val kvps = xs.toList map { case q"${_}(${Literal(Constant(name: String))}).->[${_}]($value)" => name -> value } - val kvps = xs.map(_.tree).toList map { case Apply(TypeApply(Select(Apply(_, List(Literal(Constant(name: String)))), _), _), List(value)) => name -> value } + val kvps = xs.map(_.tree).toList map { case Apply(TypeApply(Select(Apply(_, List(Literal(Constant(name: String)))), _), _), List(value)) => name -> value case x => throw new MatchError(x) } // val fields = kvps map { case (k, v) => q"@body($v) def ${TermName(k)} = macro Macros.selFieldImpl" } val fields = kvps map { case (k, v) => DefDef( Modifiers(MACRO, typeNames.EMPTY, List(Apply(Select(New(Ident(TypeName("body"))), termNames.CONSTRUCTOR), List(v)))), diff --git a/test/files/run/macro-vampire-false-warning/Test_2.scala b/test/files/run/macro-vampire-false-warning/Test_2.scala index 9eda3062ffcd..a9dd906bbae5 100644 --- a/test/files/run/macro-vampire-false-warning/Test_2.scala +++ b/test/files/run/macro-vampire-false-warning/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test extends App { val foo = mkObject("x" -> "2", "y" -> 3) println(foo.x) diff --git a/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala b/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala index a2e925bb3ad9..aaf27c2896a5 100644 --- a/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala +++ b/test/files/run/macro-whitebox-dynamic-materialization/Macros_1.scala @@ -22,4 +22,4 @@ object Macros { c.abort(c.enclosingPosition, "I don't like classes that contain integers") q"new Foo[$tpe]{ override def toString = ${tpe.toString} }" } -} \ No newline at end of file +} diff --git a/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala b/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala index bf19209ab7e8..abb5229bd0d9 100644 --- a/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala +++ b/test/files/run/macro-whitebox-dynamic-materialization/Test_2.scala @@ -1,4 +1,4 @@ object Test extends App { println(implicitly[Foo[C1]]) println(implicitly[Foo[C2]]) -} \ No newline at end of file +} diff --git a/test/files/run/macro-whitebox-extractor/Test_2.scala b/test/files/run/macro-whitebox-extractor/Test_2.scala index 41be6f976713..c0b95e070347 100644 --- a/test/files/run/macro-whitebox-extractor/Test_2.scala +++ b/test/files/run/macro-whitebox-extractor/Test_2.scala @@ -1,5 +1,6 @@ object Test extends App { 42 match { case Extractor(x) => println(x) + case x => throw new MatchError(x) } } diff --git a/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala b/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala index 5e89e6b2f8f5..0c512ace830d 100644 --- a/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala +++ b/test/files/run/macro-whitebox-fundep-materialization/Macros_1.scala @@ -24,7 +24,7 @@ object Iso { } def mkFrom() = { - if (fields.length == 0) Literal(Constant(Unit)) + if (fields.length == 0) Literal(Constant(())) else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim)))) } diff --git a/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala index 40ca1d549c5b..8b60943cfd8d 100644 --- a/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala +++ b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala @@ -5,8 +5,8 @@ object Test extends App { { val equiv = foo(Foo(23, "foo", true)) - def typed[T](t: => T) {} + def typed[T](t: => T): Unit = {} typed[(Int, String, Boolean)](equiv) println(equiv) } -} \ No newline at end of file +} diff --git a/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala b/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala index 45fdb79c3045..b574582064c2 100644 --- a/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala +++ b/test/files/run/macro-whitebox-structural/Impls_Macros_1.scala @@ -13,4 +13,4 @@ object Macros { } def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/macro-whitebox-structural/Test_2.scala b/test/files/run/macro-whitebox-structural/Test_2.scala index ea6a817e346e..e02b4feb427a 100644 --- a/test/files/run/macro-whitebox-structural/Test_2.scala +++ b/test/files/run/macro-whitebox-structural/Test_2.scala @@ -2,4 +2,4 @@ import Macros._ object Test extends App { println(Macros.foo.x) -} \ No newline at end of file +} diff --git a/test/files/run/macroPlugins-enterStats.scala b/test/files/run/macroPlugins-enterStats.scala index 917233e990c8..346f1fe01e82 100644 --- a/test/files/run/macroPlugins-enterStats.scala +++ b/test/files/run/macroPlugins-enterStats.scala @@ -1,8 +1,10 @@ +//> using options -deprecation +// import scala.tools.partest._ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:typer" + override def extraSettings: String = "-usejavacp -Vprint:typer" def code = """ class C { @@ -11,7 +13,7 @@ object Test extends DirectTest { } """.trim - def show() { + def show(): Unit = { val global = newCompiler() import global._ import analyzer._ @@ -22,7 +24,7 @@ object Test extends DirectTest { def logEnterStat(pluginName: String, stat: Tree): Unit = log(s"$pluginName:enterStat($stat)") def deriveStat(pluginName: String, typer: Typer, stat: Tree): List[Tree] = stat match { case DefDef(mods, name, Nil, Nil, TypeTree(), body) => - val derived = DefDef(NoMods, TermName(name + pluginName), Nil, Nil, TypeTree(), Ident(TermName("$qmark$qmark$qmark"))) + val derived = DefDef(NoMods, TermName(name.toString + pluginName), Nil, Nil, TypeTree(), Ident(TermName("$qmark$qmark$qmark"))) newNamer(typer.context).enterSym(derived) List(derived) case _ => diff --git a/test/files/run/macroPlugins-isBlackbox/Macros_2.scala b/test/files/run/macroPlugins-isBlackbox/Macros_2.scala index a90dd702dfe9..93cc1869de32 100644 --- a/test/files/run/macroPlugins-isBlackbox/Macros_2.scala +++ b/test/files/run/macroPlugins-isBlackbox/Macros_2.scala @@ -8,4 +8,4 @@ object Macros { } def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala b/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala index b78a18ea6a76..e3ea856d28a6 100644 --- a/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala +++ b/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala @@ -18,4 +18,4 @@ class Plugin(val global: Global) extends NscPlugin { Some(false) } } -} \ No newline at end of file +} diff --git a/test/files/run/macroPlugins-isBlackbox/Test_3.scala b/test/files/run/macroPlugins-isBlackbox/Test_3.scala index 68eb05dc8eb5..d3397c023ddf 100644 --- a/test/files/run/macroPlugins-isBlackbox/Test_3.scala +++ b/test/files/run/macroPlugins-isBlackbox/Test_3.scala @@ -1,4 +1,4 @@ -// scalac: -Xplugin:. +//> using options -Xplugin:. object Test extends App { val x: Int = Macros.foo } diff --git a/test/files/run/macroPlugins-macroArgs.check b/test/files/run/macroPlugins-macroArgs.check index 1191247b6d9a..a68f8069b63d 100644 --- a/test/files/run/macroPlugins-macroArgs.check +++ b/test/files/run/macroPlugins-macroArgs.check @@ -1,2 +1,2 @@ -1 -2 +hijacked 1 +hijacked 2 diff --git a/test/files/run/macroPlugins-macroArgs/Plugin_1.scala b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala index d67ac4420484..c2ca09d2a25d 100644 --- a/test/files/run/macroPlugins-macroArgs/Plugin_1.scala +++ b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xplugin:. package macroArgs import scala.tools.nsc.Global diff --git a/test/files/run/macroPlugins-macroArgs/Test_3.scala b/test/files/run/macroPlugins-macroArgs/Test_3.scala index 84c3fbf9b5d3..37440cdcccc6 100644 --- a/test/files/run/macroPlugins-macroArgs/Test_3.scala +++ b/test/files/run/macroPlugins-macroArgs/Test_3.scala @@ -1,3 +1,4 @@ +//> using options -Xplugin:. object Test extends App { Macros.foo("1") Macros.foo("2") diff --git a/test/files/run/macroPlugins-macroExpand.check b/test/files/run/macroPlugins-macroExpand.check index b6f843618909..6f685c2af4aa 100644 --- a/test/files/run/macroPlugins-macroExpand.check +++ b/test/files/run/macroPlugins-macroExpand.check @@ -1,2 +1,2 @@ -impl1 -impl2 +expanded into println("impl1") +expanded into println("impl2") diff --git a/test/files/run/macroPlugins-macroExpand/Macros_2.scala b/test/files/run/macroPlugins-macroExpand/Macros_2.scala index 5c3a5a80a31a..bd22bb7504b4 100644 --- a/test/files/run/macroPlugins-macroExpand/Macros_2.scala +++ b/test/files/run/macroPlugins-macroExpand/Macros_2.scala @@ -1,4 +1,3 @@ -// scalac: -Yrangepos:false import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macroPlugins-macroExpand/Plugin_1.scala b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala index b22c5d527605..bbccd32ededf 100644 --- a/test/files/run/macroPlugins-macroExpand/Plugin_1.scala +++ b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xplugin:. -Yrangepos:false package macroExpand import scala.tools.nsc.Global @@ -19,7 +18,7 @@ class Plugin(val global: Global) extends NscPlugin { object expander extends DefMacroExpander(typer, expandee, mode, pt) { override def onSuccess(expanded: Tree) = { val message = s"expanded into ${expanded.toString}" - typer.typed(q"println($message)") + this.typer.typed(q"println($message)") } } Some(expander(expandee)) diff --git a/test/files/run/macroPlugins-macroExpand/Test_3.scala b/test/files/run/macroPlugins-macroExpand/Test_3.scala index 360d9bbaa0f6..5351095330c8 100644 --- a/test/files/run/macroPlugins-macroExpand/Test_3.scala +++ b/test/files/run/macroPlugins-macroExpand/Test_3.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Xplugin:. object Test extends App { Macros.foo1 Macros.foo2 diff --git a/test/files/run/macroPlugins-macroRuntime.check b/test/files/run/macroPlugins-macroRuntime.check index 1191247b6d9a..af16d1ac36e2 100644 --- a/test/files/run/macroPlugins-macroRuntime.check +++ b/test/files/run/macroPlugins-macroRuntime.check @@ -1,2 +1,2 @@ -1 -2 +hijacked +hijacked diff --git a/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala index f4f472e0def9..aefc62491be0 100644 --- a/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala +++ b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xplugin:. package macroRuntime import scala.tools.nsc.Global diff --git a/test/files/run/macroPlugins-macroRuntime/Test_3.scala b/test/files/run/macroPlugins-macroRuntime/Test_3.scala index 84c3fbf9b5d3..37440cdcccc6 100644 --- a/test/files/run/macroPlugins-macroRuntime/Test_3.scala +++ b/test/files/run/macroPlugins-macroRuntime/Test_3.scala @@ -1,3 +1,4 @@ +//> using options -Xplugin:. object Test extends App { Macros.foo("1") Macros.foo("2") diff --git a/test/files/run/macroPlugins-namerHooks.check b/test/files/run/macroPlugins-namerHooks.check index 4409f196f0bf..41670ba66ad1 100644 --- a/test/files/run/macroPlugins-namerHooks.check +++ b/test/files/run/macroPlugins-namerHooks.check @@ -18,9 +18,11 @@ enterStat(super.()) enterSym( def copy$default$1 = x) enterSym( def copy$default$2 = y) enterSym( var acc: Int = -889275714) +enterSym(acc = scala.runtime.Statics.mix(acc, 67)) enterSym(acc = scala.runtime.Statics.mix(acc, x)) enterSym(acc = scala.runtime.Statics.mix(acc, y)) enterStat( var acc: Int = -889275714) +enterStat(acc = scala.runtime.Statics.mix(acc, 67)) enterStat(acc = scala.runtime.Statics.mix(acc, x)) enterStat(acc = scala.runtime.Statics.mix(acc, y)) enterSym( val C$1: C = x$1.asInstanceOf[C]) @@ -28,7 +30,7 @@ enterStat( val C$1: C = x$1.asInstanceOf[C]) enterSym(def () = { super.(); () }) enterSym(final override def toString() = "C") enterSym(case def apply(x: Int, y: Int): C = new C(x, y)) -enterSym(case def unapply(x$0: C): _root_.scala.Option[scala.Tuple2[Int, Int]] = if (x$0.==(null)) scala.None else Some(scala.Tuple2(x$0.x, x$0.y))) +enterSym(case def unapply(x$0: C): _root_.scala.Option[scala.Tuple2[Int, Int]] = if (x$0.eq(null)) scala.None else Some(scala.Tuple2(x$0.x, x$0.y))) enterStat(def () = { super.(); () }) enterStat(final override def toString() = "C") enterSym(def () = { super.(); () }) @@ -37,6 +39,8 @@ enterSym(super.()) enterStat(super.()) enterSym(case val x1: Int = x$1) enterStat(case val x1: Int = x$1) +enterSym(case val x1: Int = x$1) +enterStat(case val x1: Int = x$1) enterSym(case val x1: Any = x$1) enterSym(case5(){ if (x1.isInstanceOf[C]) matchEnd4(true) else case6() }) enterSym(case6(){ matchEnd4(false) }) diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala index 302429b19e40..89ee7756867a 100644 --- a/test/files/run/macroPlugins-namerHooks.scala +++ b/test/files/run/macroPlugins-namerHooks.scala @@ -2,13 +2,12 @@ import scala.tools.partest._ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def code = """ case class C(x: Int, y: Int) """.trim - def show() { + def show(): Unit = { val global = newCompiler() import global._ import analyzer._ diff --git a/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala index 7dafd5bfac91..d92c4a257cfb 100644 --- a/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala +++ b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xplugin:. -Yrangepos:false +//> using options -Xplugin:. -Yrangepos:false import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context diff --git a/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala index b9445dd9d30a..e9df77f49580 100644 --- a/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala +++ b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false package typedMacroBody import scala.tools.nsc.Global diff --git a/test/files/run/macroPlugins-typedMacroBody/Test_3.scala b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala index 360d9bbaa0f6..7ababf77324f 100644 --- a/test/files/run/macroPlugins-typedMacroBody/Test_3.scala +++ b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala @@ -1,4 +1,4 @@ -// scalac: -Yrangepos:false +//> using options -Yrangepos:false object Test extends App { Macros.foo1 Macros.foo2 diff --git a/test/files/run/manifests-new.scala b/test/files/run/manifests-new.scala index 8b42e3ca7303..502c2d1bdd19 100644 --- a/test/files/run/manifests-new.scala +++ b/test/files/run/manifests-new.scala @@ -1,6 +1,6 @@ -import scala.language.{ higherKinds, postfixOps } +import scala.language.postfixOps import scala.reflect.runtime.universe._ object Test @@ -39,6 +39,7 @@ object Test case CO => showsCovariance && !showsContravariance && !showsInvariance case IN => showsInvariance && !showsCovariance && !showsContravariance case CONTRA => showsContravariance && !showsCovariance && !showsInvariance + case x => throw new MatchError(x) } } diff --git a/test/files/run/manifests-old.scala b/test/files/run/manifests-old.scala index d8b1e751d4c1..4aa6e97dacb4 100644 --- a/test/files/run/manifests-old.scala +++ b/test/files/run/manifests-old.scala @@ -1,4 +1,4 @@ -import scala.language.{ higherKinds, postfixOps } +import scala.language.postfixOps @deprecated("Suppress warnings", since="2.11") object Test @@ -37,6 +37,7 @@ object Test case CO => showsCovariance && !showsContravariance && !showsInvariance case IN => showsInvariance && !showsCovariance && !showsContravariance case CONTRA => showsContravariance && !showsCovariance && !showsInvariance + case x => throw new MatchError(x) } } diff --git a/test/files/run/manifests-undeprecated-in-2.10.0.scala b/test/files/run/manifests-undeprecated-in-2.10.0.scala index 38f095c83b50..6cea26e304ab 100644 --- a/test/files/run/manifests-undeprecated-in-2.10.0.scala +++ b/test/files/run/manifests-undeprecated-in-2.10.0.scala @@ -1,4 +1,5 @@ -// scalac: -Xfatal-warnings +//> using options -Xfatal-warnings +// object Test extends App { def m1a: scala.reflect.Manifest[Int] = scala.reflect.Manifest.Int def m2a: scala.reflect.OptManifest[Int] = ??? diff --git a/test/files/run/mapConserve.scala b/test/files/run/mapConserve.scala index 7b6ebe3b2d9b..d1e3136d9f25 100644 --- a/test/files/run/mapConserve.scala +++ b/test/files/run/mapConserve.scala @@ -1,8 +1,6 @@ /* - * filter: optimizer warning + * filter: inliner warning */ -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer object Test { val maxListLength = 7 // up to 16, but larger is slower @@ -13,11 +11,11 @@ object Test { for (i <- 0 until 250000) xs = "X" :: xs - val lowers = xs.mapConserve(_.toLowerCase) + @annotation.unused val lowers = xs.mapConserve(_.toLowerCase) assert(xs.mapConserve(x => x) eq xs) } - def checkBehaviourUnchanged(input: List[_], oldOutput: List[_], newOutput: List[_]) { + def checkBehaviourUnchanged(input: List[_], oldOutput: List[_], newOutput: List[_]): Unit = { if (oldOutput eq input) assert(newOutput eq oldOutput) else { @@ -30,7 +28,7 @@ object Test { var callCount = 0 val lastHexDigit: Function1[BigInt, AnyRef] = { x: BigInt => callCount+=1; if (x < 16) x else x % 16 } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { for (length <- 0 to maxListLength; bitmap <- 0 until (1 << length); data = List.range(0, length) map { x: Int => diff --git a/test/files/run/mapValues.scala b/test/files/run/mapValues.scala index d3266bd18fef..ff6f93346b49 100644 --- a/test/files/run/mapValues.scala +++ b/test/files/run/mapValues.scala @@ -1,6 +1,6 @@ object Test { val m = Map(1 -> 1, 2 -> 2) - val mv = (m mapValues identity) - 1 + val mv = (m.view mapValues identity).toMap - 1 def main(args: Array[String]): Unit = { assert(mv.size == 1) diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala deleted file mode 100644 index e287b0eb097a..000000000000 --- a/test/files/run/map_java_conversions.scala +++ /dev/null @@ -1,55 +0,0 @@ -import collection.convert.ImplicitConversionsToScala._ -import collection.JavaConverters._ - -object Test { - - def main(args: Array[String]) { - test(new java.util.HashMap[String, String]) - test(new java.util.Properties) - testConcMap - } - - def testConcMap { - import collection.convert.ImplicitConversionsToScala._ - - val concMap = new java.util.concurrent.ConcurrentHashMap[String, String] - - test(concMap) - val cmap = mapAsScalaConcurrentMap(concMap) - cmap.putIfAbsent("absentKey", "absentValue") - cmap.put("somekey", "somevalue") - assert(cmap.remove("somekey", "somevalue") == true) - assert(cmap.replace("absentKey", "newAbsentValue") == Some("absentValue")) - assert(cmap.replace("absentKey", "newAbsentValue", ".......") == true) - } - - def test(m: collection.mutable.Map[String, String]) { - m.clear - assert(m.size == 0) - - m.put("key", "value") - assert(m.size == 1) - - assert(m.put("key", "anotherValue") == Some("value")) - assert(m.put("key2", "value2") == None) - assert(m.size == 2) - - m += (("key3", "value3")) - assert(m.size == 3) - - m -= "key2" - assert(m.size == 2) - assert(m.nonEmpty) - assert(m.remove("key") == Some("anotherValue")) - - m.clear - for (i <- 0 until 10) m += (("key" + i, "value" + i)) - for ((k, v) <- m) assert(k.startsWith("key")) - } -} - - - - - - diff --git a/test/files/run/map_test.scala b/test/files/run/map_test.scala index b76dfb457776..00d2bf1f38b4 100644 --- a/test/files/run/map_test.scala +++ b/test/files/run/map_test.scala @@ -5,17 +5,17 @@ object Test extends App { test2() println("OK") - def test1() { + def test1(): Unit = { val myMap: TreeMap[Int, String] = new TreeMap test_map(myMap) } - def test2() { + def test2(): Unit = { val myMap: ListMap[Int, String] = new ListMap test_map(myMap) } - def test_map(myMap: Map[Int, String]) { + def test_map(myMap: Map[Int, String]): Unit = { val map1 = myMap.updated(42,"The answer") val map2 = map1.updated(17,"A small random number") val map3 = map2.updated(666,"A bigger random number") @@ -30,7 +30,7 @@ object Test extends App { i = 0 while(i < 4712) { if (map.isDefinedAt(i)) - print(i + "->" + map(i) + " "); + print(s"$i->${map(i)} ") i += 1 } println("") diff --git a/test/files/run/matcharraytail.check b/test/files/run/matcharraytail.check index f2844d41a994..7e4858334795 100644 --- a/test/files/run/matcharraytail.check +++ b/test/files/run/matcharraytail.check @@ -1,2 +1,2 @@ Array(foo, bar, baz) -Vector(bar, baz) +ArraySeq(bar, baz) diff --git a/test/files/run/matcharraytail.scala b/test/files/run/matcharraytail.scala index 24a8dd12b2e4..7980734b0d8d 100644 --- a/test/files/run/matcharraytail.scala +++ b/test/files/run/matcharraytail.scala @@ -1,3 +1,5 @@ +import scala.tools.partest.Util.ArrayDeep + object Test extends App{ Array("foo", "bar", "baz") match { case x@Array("foo", bar @_*) => println(x.deep.toString); println(bar.toString); diff --git a/test/files/run/matchemptyarray.scala b/test/files/run/matchemptyarray.scala index 1fa53b48297a..a6d45387630a 100644 --- a/test/files/run/matchemptyarray.scala +++ b/test/files/run/matchemptyarray.scala @@ -1,5 +1,8 @@ +import scala.tools.partest.Util.ArrayDeep + object Test extends App{ Array[String]() match { case x@Array() => println(x.deep.toString()); + case x => throw new MatchError(x) } } diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala index f6f320245a91..8567ddcaaffd 100644 --- a/test/files/run/matchonseq.scala +++ b/test/files/run/matchonseq.scala @@ -1,8 +1,10 @@ object Test extends App { Vector(1,2,3) match { case head +: tail => println("It worked! head=" + head) + case _ => println("Go back and use your noggin'") } Vector(1,2,3) match { case init :+ last => println("It worked! last=" + last) + case _ => println("Ha ha. You didn't last long..") } } diff --git a/test/files/run/matchonstream.check b/test/files/run/matchonstream.check index 3dc3aa5164a7..7a44e93b2628 100644 --- a/test/files/run/matchonstream.check +++ b/test/files/run/matchonstream.check @@ -1 +1 @@ -It worked! +It worked! (class: LazyList) diff --git a/test/files/run/matchonstream.scala b/test/files/run/matchonstream.scala index c9bfefc216ad..fdb14faf1e96 100644 --- a/test/files/run/matchonstream.scala +++ b/test/files/run/matchonstream.scala @@ -1,3 +1,3 @@ object Test extends App{ - Stream.from(1) match { case Stream(1, 2, x @_*) => println("It worked!") } + LazyList.from(1) match { case LazyList(1, 2, x @_*) => println(s"It worked! (class: ${x.getClass.getSimpleName})") } } diff --git a/test/files/run/maxerrs.scala b/test/files/run/maxerrs.scala index f73aa66ac284..4bb2f8f3baf3 100644 --- a/test/files/run/maxerrs.scala +++ b/test/files/run/maxerrs.scala @@ -1,7 +1,8 @@ import scala.tools.partest._ -import scala.tools.nsc.{Global, Settings} -import scala.tools.nsc.reporters.Reporter +import scala.tools.nsc.Settings +import scala.tools.nsc.reporters.FilteringReporter +import scala.reflect.internal.util.CodeAction /** Test that compiler enforces maxerrs when given a plain Reporter. */ object Test extends DirectTest { @@ -14,33 +15,40 @@ object Test extends DirectTest { } """.trim - override def extraSettings = "-usejavacp" - + var store0: UnfilteredStoreReporter = _ // a reporter that ignores all limits - lazy val store = new UnfilteredStoreReporter + def store = store0 final val limit = 3 override def show(): Unit = { compile() - assert(store.infos.size == limit) + assert(store.infos.size == limit, s"${store.infos.size} should be $limit") } override def newSettings(args: List[String]) = { val s = super.newSettings(args) s.maxerrs.value = limit s } - override def reporter(s: Settings) = store + override def reporter(s: Settings) = + if (store0 ne null) store0 + else { + store0 = new UnfilteredStoreReporter(s) + store0 + } } -class UnfilteredStoreReporter extends Reporter { +class UnfilteredStoreReporter(s: Settings) extends FilteringReporter { import scala.tools.nsc.reporters.StoreReporter._ import scala.collection.mutable import scala.reflect.internal.util.Position val infos = new mutable.LinkedHashSet[Info] - override def info0(pos: Position, msg: String, severity: Severity, force: Boolean) = infos += Info(pos, msg, severity) + override def settings: Settings = s + + override def doReport(pos: Position, msg: String, severity: Severity, actions: List[CodeAction]): Unit = + infos += Info(pos, msg, severity, actions) override def reset(): Unit = { super.reset() diff --git a/test/files/run/memberpos.scala b/test/files/run/memberpos.scala index f2b79c0ec1f2..49397374b57b 100644 --- a/test/files/run/memberpos.scala +++ b/test/files/run/memberpos.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ // Simple sanity test for -Yshow-member-pos. object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Ystop-after:parser -Yshow-member-pos \"\" -d " + testOutput.path + override def extraSettings: String = """-usejavacp -Ystop-after:parser -Yshow-member-pos "" """ override def show() = compile() override def code = """ class A(val a: Int = 1) { diff --git a/test/files/run/mirror_symbolof_x.scala b/test/files/run/mirror_symbolof_x.scala index 8fec301f5694..b72f48c1b5bf 100644 --- a/test/files/run/mirror_symbolof_x.scala +++ b/test/files/run/mirror_symbolof_x.scala @@ -1,4 +1,3 @@ -import scala.reflect.runtime.universe._ import scala.reflect.runtime.{universe => ru} import scala.reflect.runtime.{currentMirror => cm} import scala.reflect.api.Mirror diff --git a/test/files/run/misc.check b/test/files/run/misc.check index 075dfeff2f66..25301c7cf308 100644 --- a/test/files/run/misc.check +++ b/test/files/run/misc.check @@ -2,7 +2,7 @@ misc.scala:46: warning: a pure expression does nothing in statement position; mu 42; ^ misc.scala:47: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses - 42l; + 42L; ^ misc.scala:48: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 23.5f; diff --git a/test/files/run/misc.scala b/test/files/run/misc.scala index 2ae76bd9687a..8e50b1fb779a 100644 --- a/test/files/run/misc.scala +++ b/test/files/run/misc.scala @@ -9,17 +9,17 @@ object Test { Console.print("### fib("); Console.print(n); Console.print(") = "); - Console.flush; + Console.flush() val v = fib(n); Console.print(v); - Console.println; - Console.flush; + Console.println() + Console.flush() v } - def id[X](x: X): X = x; + def id[T](x: T): T = x; - def apply[X](f: X => X, x: X): X = f(x); + def apply[T](f: T => T, x: T): T = f(x); def id_obj(x: AnyRef): AnyRef = x; @@ -44,7 +44,7 @@ object Test { def foobar = { 42; - 42l; + 42L; 23.5f; 23.5; "Hello"; @@ -57,7 +57,7 @@ object Test { Console.print("### "); Console.println(17); Console.println("### Bye"); - Console.println; + Console.println() val x = 13; x; // !!! why are DefDef replaced by Block(Tree[0])? we should use Empty! @@ -99,10 +99,10 @@ object Test { val mySub = new MySubclass(); Console.println(mySub); myObj.test(); - Console.println; + Console.println() Console.println(apply_any(id_any, "identity").toString()); - Console.println; + Console.println() }; foobar; @@ -135,13 +135,13 @@ case class Y(y: Int, z: Int) extends X(y + z) { Console.println("A.a = " + a.getA); Console.println("B.a = " + b.getA); Console.println("B.b = " + b.getB); - Console.println; + Console.println() Console.println("X.a = " + x.getX); Console.println("Y.a = " + y.getX); Console.println("Y.b = " + y.getY); Console.println("Y.b = " + y.y); - Console.println; + Console.println() } //############################################################################ @@ -168,10 +168,10 @@ val x: X = new X(); val y: X = new Y(); x.foo; -Console.println; +Console.println() y.foo; -Console.println; +Console.println() } //############################################################################ @@ -210,7 +210,7 @@ class O(a: Int) { Console.println(new O(1).foo) } -Console.println; +Console.println() case class Bar(); @@ -227,7 +227,7 @@ Console.println; //############################################################################ - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { } //############################################################################ diff --git a/test/files/run/mixin-signatures.scala b/test/files/run/mixin-signatures.scala index 0f6d0844d8fa..48e345e8c645 100644 --- a/test/files/run/mixin-signatures.scala +++ b/test/files/run/mixin-signatures.scala @@ -83,8 +83,8 @@ object Test { // scala.tools.nsc.symtab.Flags.flagsToString(flags) } - def show(clazz: Class[_]) { - print(clazz + " {") + def show(clazz: Class[_]): Unit = { + print(clazz.toString + " {") clazz.getMethods.sortBy(x => (x.getName, x.isBridge, x.toString)) filter (_.getName.length == 1) foreach { m => print("\n " + m + flagsString(m)) if ("" + m != "" + m.toGenericString) { @@ -94,8 +94,8 @@ object Test { println("\n}") println("") } - def show(x: AnyRef) { show(x.getClass) } - def show(x: String) { show(Class.forName(x)) } + def show(x: AnyRef): Unit = { show(x.getClass) } + def show(x: String): Unit = { show(Class.forName(x)) } def main(args: Array[String]): Unit = { List(bar1, bar2, bar3, bar4, bar5) foreach show diff --git a/test/files/run/mixins.scala b/test/files/run/mixins.scala index 23aec6b52488..75d7f98b16a8 100644 --- a/test/files/run/mixins.scala +++ b/test/files/run/mixins.scala @@ -68,7 +68,7 @@ object Test3 { } def test() = { - val c = new C(); + @annotation.unused val c = new C(); } } diff --git a/test/files/run/module-serialization-proxy-class-unload.scala b/test/files/run/module-serialization-proxy-class-unload.scala new file mode 100644 index 000000000000..eb0d4daf8a40 --- /dev/null +++ b/test/files/run/module-serialization-proxy-class-unload.scala @@ -0,0 +1,26 @@ +//> using options -Ddummy=run-in-forked-process +object Module { + val data = new Array[Byte](32 * 1024 * 1024) +} + +object Test { + private val readResolve = classOf[scala.runtime.ModuleSerializationProxy].getDeclaredMethod("readResolve") + readResolve.setAccessible(true) + + val testClassesDir = System.getProperty("partest.output") + def main(args: Array[String]): Unit = { + for (i <- 1 to 256) { + // This would "java.lang.OutOfMemoryError: Java heap space" if ModuleSerializationProxy + // prevented class unloading. + deserializeDynamicLoadedClass() + } + } + + def deserializeDynamicLoadedClass(): Unit = { + val loader = new java.net.URLClassLoader(Array(new java.io.File(testClassesDir).toURI.toURL), ClassLoader.getSystemClassLoader) + val moduleClass = loader.loadClass("Module$") + assert(moduleClass ne Module.getClass) + val result = readResolve.invoke(new scala.runtime.ModuleSerializationProxy(moduleClass)) + assert(result.getClass == moduleClass) + } +} diff --git a/test/files/run/module-static.check b/test/files/run/module-static.check new file mode 100644 index 000000000000..dc7c8a8ed319 --- /dev/null +++ b/test/files/run/module-static.check @@ -0,0 +1,3 @@ +c +t +o1 diff --git a/test/files/run/module-static.scala b/test/files/run/module-static.scala new file mode 100644 index 000000000000..69ea186939ad --- /dev/null +++ b/test/files/run/module-static.scala @@ -0,0 +1,26 @@ + +class C { + val c1 = "c" + println("c") +} +trait T { + final val t1: String = "t" + println(t1) +} +object Test extends C with T { + val o1: String = "o1" + println("o1") + + def main(args: Array[String]): Unit = { + import java.lang.reflect.{Field, Modifier} + def checkStaticFinal(name: String, isFinal: Boolean): Field = { + val f = Test.getClass.getDeclaredField(name) + assert(Modifier.isStatic(f.getModifiers)) + assert(Modifier.isFinal(f.getModifiers) == isFinal) + f + } + checkStaticFinal("MODULE$", isFinal = true) + checkStaticFinal("t1", isFinal = false) + checkStaticFinal("o1", isFinal = true) + } +} diff --git a/test/files/run/multi-array.scala b/test/files/run/multi-array.scala index 36e21ae539de..a26cb05c1ac9 100644 --- a/test/files/run/multi-array.scala +++ b/test/files/run/multi-array.scala @@ -1,3 +1,5 @@ +import scala.tools.partest.Util.ArrayDeep + object Test extends App { val a = Array(1, 2, 3) println(a.deep.toString) diff --git a/test/files/run/multiLineOps.scala b/test/files/run/multiLineOps.scala new file mode 100644 index 000000000000..bcb218a3bba1 --- /dev/null +++ b/test/files/run/multiLineOps.scala @@ -0,0 +1,15 @@ +//> using options -Xsource:3 -Xsource-features:leading-infix + +// was: without backticks, "not found: value +" (but parsed here as +a * 6, where backticks fool the lexer) +// now: + is taken as "solo" infix op + +object Test extends App { + val a = 7 + val x = 1 + + + `a` + * + 6 + + assert(x == 1 + 7 * 6, x) // was: 1, now: successor(42) +} diff --git a/test/files/run/mutable-treeset.scala b/test/files/run/mutable-treeset.scala index 100ab39965be..a83d569ab650 100644 --- a/test/files/run/mutable-treeset.scala +++ b/test/files/run/mutable-treeset.scala @@ -22,8 +22,8 @@ object Test extends App { val subsubnonlist = almostmin :: almostmax :: subnonlist val subsubsorted = distinctSubsublist.sorted - def testSize { - def check(set : TreeSet[Int], list: List[Int]) { + def testSize(): Unit = { + def check(set : TreeSet[Int], list: List[Int]): Unit = { assert(set.size == list.size, s"$set had size ${set.size} while $list had size ${list.size}") } @@ -32,17 +32,17 @@ object Test extends App { check(set, distinct) check(set.clone, distinct) - val subset = set from (min + 1) until max + val subset = set rangeFrom (min + 1) rangeUntil max check(subset, distinctSublist) check(subset.clone, distinctSublist) - val subsubset = subset from (almostmin + 1) until almostmax + val subsubset = subset rangeFrom (almostmin + 1) rangeUntil almostmax check(subsubset, distinctSubsublist) check(subsubset.clone, distinctSubsublist) } - def testContains { - def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) { + def testContains(): Unit = { + def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]): Unit = { assert(list forall set.apply, s"$set did not contain all elements of $list using apply") assert(list forall set.contains, s"$set did not contain all elements of $list using contains") assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply") @@ -53,17 +53,17 @@ object Test extends App { check(set, list, nonlist) check(set.clone, list, nonlist) - val subset = set from (min + 1) until max + val subset = set rangeFrom (min + 1) rangeUntil max check(subset, sublist, subnonlist) check(subset.clone, sublist, subnonlist) - val subsubset = subset from (almostmin + 1) until almostmax + val subsubset = subset rangeFrom (almostmin + 1) rangeUntil almostmax check(subsubset, subsublist, subsubnonlist) check(subsubset.clone, subsublist, subsubnonlist) } - def testAdd { - def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) { + def testAdd(): Unit = { + def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]): Unit = { var builtList = List[Int]() for (x <- list) { set += x @@ -77,9 +77,9 @@ object Test extends App { val set = TreeSet[Int]() val clone = set.clone - val subset = set.clone from (min + 1) until max + val subset = set.clone rangeFrom (min + 1) rangeUntil max val subclone = subset.clone - val subsubset = subset.clone from (almostmin + 1) until almostmax + val subsubset = subset.clone rangeFrom (almostmin + 1) rangeUntil almostmax val subsubclone = subsubset.clone check(set, list, nonlist) @@ -92,8 +92,8 @@ object Test extends App { check(subsubclone, list, subsubnonlist) } - def testRemove { - def check(set: TreeSet[Int], sorted: List[Int]) { + def testRemove(): Unit = { + def check(set: TreeSet[Int], sorted: List[Int]): Unit = { var builtList = sorted for (x <- list) { set remove x @@ -104,9 +104,9 @@ object Test extends App { } val set = TreeSet(list:_*) val clone = set.clone - val subset = set.clone from (min + 1) until max + val subset = set.clone rangeFrom (min + 1) rangeUntil max val subclone = subset.clone - val subsubset = subset.clone from (almostmin + 1) until almostmax + val subsubset = subset.clone rangeFrom (almostmin + 1) rangeUntil almostmax val subsubclone = subsubset.clone check(set, sorted) @@ -119,8 +119,8 @@ object Test extends App { check(subsubclone, subsubsorted) } - def testIterator { - def check(set: TreeSet[Int], list: List[Int]) { + def testIterator(): Unit = { + def check(set: TreeSet[Int], list: List[Int]): Unit = { val it = set.iterator.toList assert(it == list, s"$it did not equal $list") } @@ -128,18 +128,18 @@ object Test extends App { check(set, sorted) check(set.clone, sorted) - val subset = set from (min + 1) until max + val subset = set rangeFrom (min + 1) rangeUntil max check(subset, subsorted) check(subset.clone, subsorted) - val subsubset = subset from (almostmin + 1) until almostmax + val subsubset = subset rangeFrom (almostmin + 1) rangeUntil almostmax check(subsubset, subsubsorted) check(subsubset.clone, subsubsorted) } - testSize - testContains - testAdd - testRemove - testIterator + testSize() + testContains() + testAdd() + testRemove() + testIterator() } diff --git a/test/files/run/name-based-patmat.check b/test/files/run/name-based-patmat.check index 3d5fc40ed7f4..332a956367f0 100644 --- a/test/files/run/name-based-patmat.check +++ b/test/files/run/name-based-patmat.check @@ -1,3 +1,12 @@ +name-based-patmat.scala:71: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. + case Foo(5, 10, 15, 20, _*) => 1 // wildcard does not align with repeated param + ^ +name-based-patmat.scala:72: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. + case Foo(5, 10, 15, _*) => 2 // wildcard does not align with repeated param + ^ +name-based-patmat.scala:74: warning: unreachable code + case Foo(5, 10) => 4 // should warn unreachable + ^ `catdog only` has 11 chars `catdog only, no product` has 23 chars catdog diff --git a/test/files/run/name-based-patmat.scala b/test/files/run/name-based-patmat.scala index 8e2094010032..50693595b24f 100644 --- a/test/files/run/name-based-patmat.scala +++ b/test/files/run/name-based-patmat.scala @@ -1,3 +1,4 @@ +//> using options -Xlint:stars-align final class MiniSome[T](val get: T) extends AnyVal { def isEmpty = false } package p0 { @@ -67,8 +68,8 @@ package p3 { object Bar { def f(x: Foo) = x match { - case Foo(5, 10, 15, 20, _*) => 1 - case Foo(5, 10, 15, _*) => 2 + case Foo(5, 10, 15, 20, _*) => 1 // wildcard does not align with repeated param + case Foo(5, 10, 15, _*) => 2 // wildcard does not align with repeated param case Foo(5, 10, _*) => 3 case Foo(5, 10) => 4 // should warn unreachable case _ => 5 diff --git a/test/files/run/names-defaults-nest.scala b/test/files/run/names-defaults-nest.scala index 2849bdfc5072..66b9091bca13 100644 --- a/test/files/run/names-defaults-nest.scala +++ b/test/files/run/names-defaults-nest.scala @@ -1,5 +1,5 @@ object Test { - def multinest = { + def multinest() = { def baz = {bar()} def bar(x: String = "a"): Any = { def bar(x: String = "b") = x @@ -8,7 +8,7 @@ object Test { bar$default$1(0) assert(baz == "ba", baz) } - def main(args: Array[String]) { - multinest + def main(args: Array[String]): Unit = { + multinest() } } diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check index 7c2a778264d6..982fff64dcd3 100644 --- a/test/files/run/names-defaults.check +++ b/test/files/run/names-defaults.check @@ -1,12 +1,42 @@ -names-defaults.scala:269: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected +names-defaults.scala:318: warning: value q has an inferred structural type: Test.Test3207_1.p.Inner{def g: Int} + members that can be accessed with a reflective call: def g: Int + val q = new p.Inner() { + ^ +names-defaults.scala:325: warning: value inner has an inferred structural type: this.Inner{def g: Int} + members that can be accessed with a reflective call: def g: Int + val inner = new Inner() { + ^ +names-defaults.scala:324: warning: value p has an inferred structural type: Test.P3207[Int]{val inner: this.Inner{def g: Int}} + members that can be accessed with a reflective call: val inner: this.Inner{def g: Int} + val p = new P3207[Int] { + ^ +names-defaults.scala:359: warning: the parameter name y is deprecated: use b instead + deprNam1(y = 10, a = 1) + ^ +names-defaults.scala:360: warning: the parameter name x is deprecated: use a instead + deprNam1(b = 2, x = 10) + ^ +names-defaults.scala:369: warning: the parameter name s is deprecated: use x instead + println(deprNam2.f(s = "dlf")) + ^ +names-defaults.scala:371: warning: the parameter name x is deprecated: use s instead + println(deprNam2.g(x = "sljkfd")) + ^ +names-defaults.scala:35: warning: local var var2 in value is never used + var var2 = 0 + ^ +names-defaults.scala:279: warning: local val u in method foo is never used + class A2489 { def foo(): Unit = { def bar(a: Int = 1) = a; bar(); val u = 0 } } + ^ +names-defaults.scala:280: warning: local val v in method foo is never used + class A2489x2 { def foo(): Unit = { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } + ^ +names-defaults.scala:280: warning: local val u in method foo is never used + class A2489x2 { def foo(): Unit = { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } + ^ +names-defaults.scala:269: warning: discarded pure expression does nothing spawn(b = { val ttt = 1; ttt }, a = 0) ^ -names-defaults.scala:269: warning: a pure expression does nothing in statement position - spawn(b = { val ttt = 1; ttt }, a = 0) - ^ -warning: four deprecations -warning: one deprecation (since 2.12.4) -warning: 5 deprecations in total; re-run with -deprecation for details 1: @ get: $ get: 2 diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala index 7f42776b6e39..136d656f7a5d 100644 --- a/test/files/run/names-defaults.scala +++ b/test/files/run/names-defaults.scala @@ -1,5 +1,5 @@ - -import scala.language.{ higherKinds, existentials } +//> using options -Xlint +import scala.language.existentials object Test extends App { def get[T](x: T) = { println("get: "+ x); x } @@ -24,12 +24,12 @@ object Test extends App { // anonymous functions { - def doMod(f: Int => Unit) { f(20) } + def doMod(f: Int => Unit): Unit = { f(20) } var var1 = 0 doMod(var1 = _) println(var1) - synchronized(var1 = 30) + synchronized { var1 = 30 } println(var1) var var2 = 0 @@ -119,7 +119,7 @@ object Test extends App { // vararg - def test5(a: Int, b: Int)(c: Int, d: String*) = a +", "+ d.toList + def test5(a: Int, b: Int)(c: Int, d: String*) = s"$a, ${d.toList}" println(test5(b = 1, a = 2)(3, "4", "4", "4")) println(test5(b = 1, a = 2)(c = 29)) @@ -265,7 +265,7 @@ object Test extends App { // #2290 def spawn(a: Int, b: => Unit) = { () } - def t { + def t(): Unit = { spawn(b = { val ttt = 1; ttt }, a = 0) } @@ -276,8 +276,8 @@ object Test extends App { case class A2390[T](x: Int) { def copy(a: Int)(b: Int = 0) = 0 } // #2489 - class A2489 { def foo { def bar(a: Int = 1) = a; bar(); val u = 0 } } - class A2489x2 { def foo { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } + class A2489 { def foo(): Unit = { def bar(a: Int = 1) = a; bar(); val u = 0 } } + class A2489x2 { def foo(): Unit = { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } // a bug reported on the mailing lists, related to #2489 class Test2489 { @@ -355,15 +355,15 @@ object Test extends App { (new DBLAH()) // deprecated names - def deprNam1(@deprecatedName('x) a: Int, @deprecatedName('y) b: Int) = a + b + def deprNam1(@deprecatedName("x") a: Int, @deprecatedName("y") b: Int) = a + b deprNam1(y = 10, a = 1) deprNam1(b = 2, x = 10) object deprNam2 { - def f(@deprecatedName('s) x: String) = 1 + def f(@deprecatedName("s") x: String) = 1 def f(s: Object) = 2 - def g(@deprecatedName('x) s: Object) = 3 + def g(@deprecatedName("x") s: Object) = 3 def g(s: String) = 4 } println(deprNam2.f(s = "dlf")) @@ -406,13 +406,13 @@ object Test extends App { println(f8177(a = 1, 1)) // DEFINITIONS - def test1(a: Int, b: String) = println(a +": "+ b) - def test2(u: Int, v: Int)(k: String, l: Int) = println(l +": "+ k +", "+ (u + v)) + def test1(a: Int, b: String) = println(s"$a: $b") + def test2(u: Int, v: Int)(k: String, l: Int) = println(l.toString +": "+ k +", "+ (u + v)) - def test3[T1, T2](a: Int, b: T1)(c: String, d: T2) = println(a +": "+ c +", "+ b +", "+ d) + def test3[T1, T2](a: Int, b: T1)(c: String, d: T2) = println(a.toString +": "+ c +", "+ b +", "+ d) def test4(a: Int) = { - def inner(b: Int = a, c: String) = println(b +": "+ c) + def inner(b: Int = a, c: String) = println(b.toString +": "+ c) inner(c = "/") } def test5(argName: Unit) = println("test5") @@ -425,13 +425,13 @@ object Test extends App { class Base { - def test1[T1, T2](a: Int = 100, b: T1)(c: T2, d: String = a +": "+ b)(e: T2 = c, f: Int) = - println(a +": "+ d +", "+ b +", "+ c +", "+ e +", "+ f) + def test1[T1, T2](a: Int = 100, b: T1)(c: T2, d: String = a.toString +": "+ b)(e: T2 = c, f: Int) = + println(a.toString +": "+ d +", "+ b +", "+ c +", "+ e +", "+ f) } class Sub1 extends Base { override def test1[U1, U2](b: Int, a: U1)(m: U2, r: String = "overridden")(o: U2, f: Int = 555) = - println(b +": "+ r +", "+ a +", "+ m +", "+ o +", "+ f) + println(b.toString +": "+ r +", "+ a +", "+ m +", "+ o +", "+ f) } @@ -480,13 +480,13 @@ class A2 { // using names / defaults in self constructor call. // overloading resolution: calling A3("string") picks the second, method with default is always less specific. class A3(x: String, y: Int = 10) { - def this(a: Object) { + def this(a: Object) = { this(y = 10, x = a.toString()) println(x) } } class A4(x: String, y: Int = 11) { - def this(b: Double, sep: String) { + def this(b: Double, sep: String) = { this(sep + b + sep) println(y) } diff --git a/test/files/run/newTags.scala b/test/files/run/newTags.scala index c5199d4e55d8..43c01256ee74 100644 --- a/test/files/run/newTags.scala +++ b/test/files/run/newTags.scala @@ -8,4 +8,4 @@ object Test extends App { println(implicitly[ApiUniverse#TypeTag[T]]) } foo[Map[String, String]] -} \ No newline at end of file +} diff --git a/test/files/run/no-pickle-skolems/Test_2.scala b/test/files/run/no-pickle-skolems/Test_2.scala index da55ad9df0cc..4414acd3417a 100644 --- a/test/files/run/no-pickle-skolems/Test_2.scala +++ b/test/files/run/no-pickle-skolems/Test_2.scala @@ -12,13 +12,13 @@ object Test { var seen: Set[Symbol] = Set() def id(s: Symbol): Int = s.asInstanceOf[{ def id: Int }].id - def check(s: Symbol) { + def check(s: Symbol): Unit = { if (!seen(s)) { seen += s if (s.name.toString == name) buf ::= s } } - def loop(t: Type) { + def loop(t: Type): Unit = { t match { case TypeRef(pre, sym, args) => loop(pre) ; check(sym) ; args foreach loop case PolyType(tparams, restpe) => tparams foreach { tp => check(tp) ; check(tp.owner) ; loop(tp.info) } ; loop(restpe) @@ -28,7 +28,7 @@ object Test { } loop(m) - buf.reverse.distinct map (s => s.name + "#" + id(s)) + buf.reverse.distinct map (s => s"${s.name}#${id(s)}") } def main(args: Array[String]): Unit = { diff --git a/test/files/run/noInlineUnknownIndy.check b/test/files/run/noInlineUnknownIndy.check index 7cc6d1b67521..3bfdad6ad791 100644 --- a/test/files/run/noInlineUnknownIndy.check +++ b/test/files/run/noInlineUnknownIndy.check @@ -1,5 +1,6 @@ newSource1.scala:1: warning: A_1::test()Ljava/lang/String; could not be inlined: -Failed to check if A_1::test()Ljava/lang/String; can be safely inlined to T without causing an IllegalAccessError. Checking instruction INVOKEDYNAMIC m()LA_1$Fun; [ +Failed to check if A_1::test()Ljava/lang/String; can be safely inlined to T without causing an IllegalAccessError. +Checking failed for instruction INVOKEDYNAMIC m()LA_1$Fun; [ // handle kind 0x6 : INVOKESTATIC not/java/lang/SomeLambdaMetafactory.notAMetaFactoryMethod(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite; // arguments: @@ -7,7 +8,7 @@ Failed to check if A_1::test()Ljava/lang/String; can be safely inlined to T with // handle kind 0x6 : INVOKESTATIC A_1.lambda$test$0(Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; - ] failed: + ]: The callee contains an InvokeDynamic instruction with an unknown bootstrap method (not a LambdaMetaFactory). class T { def foo = A_1.test } ^ diff --git a/test/files/run/noInlineUnknownIndy/Test.scala b/test/files/run/noInlineUnknownIndy/Test.scala index 89529bf9230e..8579e6aca238 100644 --- a/test/files/run/noInlineUnknownIndy/Test.scala +++ b/test/files/run/noInlineUnknownIndy/Test.scala @@ -1,33 +1,33 @@ import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.tools.asm.tree.{ClassNode, InvokeDynamicInsnNode} import scala.tools.asm.{Handle, Opcodes} import scala.tools.partest.BytecodeTest.modifyClassFile -import scala.tools.partest._ +import scala.tools.partest.DirectTest object Test extends DirectTest { - def code = ??? + override def code = "class T { def foo = A_1.test }" - def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path, "-opt:l:inline", "-opt-inline-from:**", "-Yopt-inline-heuristics:everything", "-opt-warnings:_"))(code) + override def extraSettings = { + val classpath = List(sys.props("partest.lib"), testOutput.path).mkString(sys.props("path.separator")) + s"-cp $classpath -opt:inline:** -Yopt-inline-heuristics:everything -Wopt:_" } - def show(): Unit = { + override def show(): Unit = { val unknownBootstrapMethod = new Handle( Opcodes.H_INVOKESTATIC, "not/java/lang/SomeLambdaMetafactory", "notAMetaFactoryMethod", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", /* itf = */ false) - modifyClassFile(new File(testOutput.toFile, "A_1.class"))((cn: ClassNode) => { - val testMethod = cn.methods.iterator.asScala.find(_.name == "test").head + modifyClassFile(new File(testOutput.jfile, "A_1.class"))((cn: ClassNode) => { + val testMethod = cn.methods.iterator.asScala.find(_.name == "test").get val indy = testMethod.instructions.iterator.asScala.collect({ case i: InvokeDynamicInsnNode => i }).next() indy.bsm = unknownBootstrapMethod cn }) - compileCode("class T { def foo = A_1.test }") + compile() } } diff --git a/test/files/run/nonfatal.scala b/test/files/run/nonfatal.scala new file mode 100644 index 000000000000..60e2becd8eaa --- /dev/null +++ b/test/files/run/nonfatal.scala @@ -0,0 +1,33 @@ + +//> using options -opt:none +// +import scala.tools.partest.BytecodeTest +import scala.tools.testkit.ASMConverters.instructionsFromMethod +import org.junit.Assert.assertEquals + +object Test extends BytecodeTest { + def show(): Unit = { + val classNode = loadClassNode("C") + val f = getMethod(classNode, "f") + val g = getMethod(classNode, "g") + assertEquals(instructionsFromMethod(f), instructionsFromMethod(g)) + //sameBytecode(f, g) // prints + new C().run() // should not crash + } +} + +class C { + import scala.util.control.NonFatal + def x = 42 + def f = try x catch { case NonFatal(e) => e.printStackTrace(); -1 } + def g = try x catch { case e: Throwable if NonFatal(e) => e.printStackTrace(); -1 } + + def run(): Unit = { + val any: Any = 42 + + any match { + case NonFatal(e) => ??? + case _ => + } + } +} diff --git a/test/files/run/nonlocalreturn.check b/test/files/run/nonlocalreturn.check deleted file mode 100644 index aeb2d5e2398d..000000000000 --- a/test/files/run/nonlocalreturn.check +++ /dev/null @@ -1 +0,0 @@ -Some(1) diff --git a/test/files/run/nonlocalreturn.scala b/test/files/run/nonlocalreturn.scala index 3c1e7420eda8..3d6b18507ada 100644 --- a/test/files/run/nonlocalreturn.scala +++ b/test/files/run/nonlocalreturn.scala @@ -1,3 +1,4 @@ +//> using options -Xlint:-nonlocal-return object Test { def wrap[K](body: => K): K = body @@ -5,9 +6,7 @@ object Test { wrap({ return Some(1) ; None }) } - def main(args: Array[String]) { - println(f()) - } + def main(args: Array[String]): Unit = assert(f() == Some(1)) } // java.lang.ClassCastException: scala.Some cannot be cast to scala.None$ // at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5) diff --git a/test/files/run/nothingTypeDce.scala b/test/files/run/nothingTypeDce.scala index dc7d0493d0ab..e39efaf53b61 100644 --- a/test/files/run/nothingTypeDce.scala +++ b/test/files/run/nothingTypeDce.scala @@ -1,4 +1,5 @@ -// scalac: -opt:unreachable-code +//> using options -opt:unreachable-code +// // See comment in BCodeBodyBuilder // -opt:unreachable-code diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala index bb8e858e46ed..efa6e41cfae1 100644 --- a/test/files/run/nothingTypeNoOpt.scala +++ b/test/files/run/nothingTypeNoOpt.scala @@ -1,4 +1,5 @@ -// scalac: -opt:l:none +//> using options -opt:none +// // See comment in BCodeBodyBuilder // -target:jvm-1.6 -opt:l:none diff --git a/test/files/run/nowarn.scala b/test/files/run/nowarn.scala new file mode 100644 index 000000000000..e49ce061090a --- /dev/null +++ b/test/files/run/nowarn.scala @@ -0,0 +1,18 @@ + +import scala.tools.nsc.{Driver, Global} +import scala.reflect.internal.util.NoPosition + +// ensure that nowarn is respected by the default reporter + +class Driven extends Driver { + override protected def processSettingsHook(): Boolean = { + settings.nowarn.value = true + true + } + protected def newCompiler(): Global = Global(settings, reporter) + override protected def doCompile(compiler: Global): Unit = reporter.warning(NoPosition, s"I can't do anything for ${reporter.getClass}.") + def run(): Unit = process(Array("file.scala")) +} +object Test { + def main(args: Array[String]): Unit = new Driven().run() +} diff --git a/test/files/run/null-and-intersect.scala b/test/files/run/null-and-intersect.scala index 7266dabe6df4..04552635551d 100644 --- a/test/files/run/null-and-intersect.scala +++ b/test/files/run/null-and-intersect.scala @@ -1,9 +1,10 @@ object Test { + trait Immutable trait Immortal class Bippy extends Immutable with Immortal class Boppy extends Immutable - def f[T](x: Traversable[T]) = x match { + def f[T](x: Iterable[T]) = x match { case _: Map[_, _] => 3 case _: Seq[_] => 2 case _: Iterable[_] => 1 diff --git a/test/files/run/null-hash.scala b/test/files/run/null-hash.scala index 9b1f28b083c1..f74bee77d576 100644 --- a/test/files/run/null-hash.scala +++ b/test/files/run/null-hash.scala @@ -1,5 +1,5 @@ object Test { - def f1 = List(5, 10, null: String).## + def f1 = List[Any](5, 10, null: String).## def f2(x: Any) = x.## def f3 = ((55, "abc", null: List[Int])).## diff --git a/test/files/run/number-parsing.scala b/test/files/run/number-parsing.scala index 5627ee900627..605ead72d453 100644 --- a/test/files/run/number-parsing.scala +++ b/test/files/run/number-parsing.scala @@ -8,24 +8,25 @@ object Test { assert((MinusZero: scala.Float) == (PlusZero: scala.Float)) assert(!(MinusZero equals PlusZero)) - List( - -5f.max(2) , - -5f max 2 , - -5.max(2) , - -5 max 2 - ) foreach (num => assert(num == 2)) + assert(List[AnyVal]( + -5f.max(2), + -5f max 2, + -5.max(2), + -5 max 2, + ).forall(_ == 2)) } case class Foo(val x: Double) { def unary_- : Foo = Foo(-x) def +(other: Foo): Foo = Foo(x + other.x) } - def objTests = { + def objTests() = { assert(-Foo(5.0) + Foo(10.0) == Foo(5.0)) assert(-Foo(5.0).+(Foo(10.0)) == Foo(-15.0)) } def main(args: Array[String]): Unit = { numTests() + objTests() } } diff --git a/test/files/run/numeric-range.scala b/test/files/run/numeric-range.scala index 4645db6ef026..9cf8e605e856 100644 --- a/test/files/run/numeric-range.scala +++ b/test/files/run/numeric-range.scala @@ -3,7 +3,7 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val r = 'a' to 'z' for (i <- -2 to (r.length + 2)) { assert(r.take(i) == r.toList.take(i), (i, r.take(i))) diff --git a/test/files/run/optimizer-array-load.scala b/test/files/run/optimizer-array-load.scala index a4d76f73853a..c09b47f274f9 100644 --- a/test/files/run/optimizer-array-load.scala +++ b/test/files/run/optimizer-array-load.scala @@ -5,7 +5,7 @@ object Test { while (x<=5) { println(x) - val a = ar(x) + @annotation.unused val a = ar(x) x+=1 } } diff --git a/test/files/run/outertest.check b/test/files/run/outertest.check new file mode 100644 index 000000000000..a89412606485 --- /dev/null +++ b/test/files/run/outertest.check @@ -0,0 +1 @@ +warning: 1 deprecation (since 2.13.0); re-run with -deprecation for details diff --git a/test/files/run/outertest.scala b/test/files/run/outertest.scala index fa0443f66948..3d9d3125105b 100644 --- a/test/files/run/outertest.scala +++ b/test/files/run/outertest.scala @@ -43,7 +43,7 @@ object Test extends App { assert((new b.L).bar eq b) assert((new c.M).bar eq c) - def checkOuterFields[C: ClassTag](expected: Int) { + def checkOuterFields[C: ClassTag](expected: Int): Unit = { val cls = implicitly[ClassTag[C]].runtimeClass val outerFields = cls.getDeclaredFields().filter(_.getName.contains("$outer")) assert(outerFields.size == expected, outerFields.map(_.getName)) diff --git a/test/files/run/overloads.scala b/test/files/run/overloads.scala index e84fef021365..593d1f884593 100644 --- a/test/files/run/overloads.scala +++ b/test/files/run/overloads.scala @@ -40,8 +40,8 @@ object overloads { if (value == "\u0000") value = "\\u0000"; Console.print(": " + what + " = " + value); if (!success) Console.print(" != " + expected); - Console.println; - Console.flush; + Console.println() + Console.flush() } def - = 0; @@ -52,7 +52,7 @@ object overloads { def --(c: Char) = c; def --(i: Int) = i; - def test: Unit = { + def test(): Unit = { check("-('a')", -('a'), -97); check("-(97)", -(97), -97); @@ -87,7 +87,7 @@ object overloads { object Test { def main(args: Array[String]): Unit = { - overloads.test; + overloads.test() } } diff --git a/test/files/run/package-object-stale-decl.scala b/test/files/run/package-object-stale-decl.scala new file mode 100644 index 000000000000..bbf1ba7cda16 --- /dev/null +++ b/test/files/run/package-object-stale-decl.scala @@ -0,0 +1,40 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def pkg = "package object b extends B" + def B = "package b; class B { def stale = 42 }" + def A = "package b; class A { stale }" + } + class V2 extends V1 { + override def B = "package b; class B { }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + val v2 = new V2 + compiles(v1.A, v1.B, v1.pkg)() + delete(testOutput / "b" / "A.class") + compiles(v2.B, v2.A)(Some("not found: value stale")) + } + + def compiles(codes: String*)(expectedError: Option[String] = None) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + val reporterOutput = storeReporter.infos.map(x => x.pos.showError(x.msg)).mkString("\n") + expectedError match { + case None => + assert(!global.reporter.hasErrors, reporterOutput) + case Some(text) => + assert(global.reporter.hasErrors, "expected compile failure, got success") + assert(reporterOutput.contains(text), reporterOutput) + } + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-toolbox.scala b/test/files/run/package-object-toolbox.scala new file mode 100644 index 000000000000..d84a7e3c2668 --- /dev/null +++ b/test/files/run/package-object-toolbox.scala @@ -0,0 +1,40 @@ +import java.io.File +import java.net.URLClassLoader + +import scala.reflect.io.Path +import scala.reflect.runtime.{ universe => ru } +import scala.tools.partest._ +import scala.tools.reflect.ToolBox + +import org.junit.Assert._ + +object Test extends StoreReporterDirectTest { + val cp = List(sys.props("partest.lib"), testOutput.path) + override def extraSettings = s"-cp ${cp.mkString(File.pathSeparator)}" + + def show(): Unit = { + compiles("package object pkg { def foo = 1 }") + val loader = new URLClassLoader(cp.map(new File(_).toURI.toURL).toArray) + val mirror = ru.runtimeMirror(loader) + + val toolbox = mirror.mkToolBox() + val result1 = toolbox.eval(toolbox.parse("pkg.foo")) + assertEquals(1, result1) + + val obj = toolbox.eval(toolbox.parse("pkg.`package`")) + val pkg = mirror.staticPackage("pkg") + val sym = pkg.info.decl(ru.TermName("foo")).asMethod + val meth = mirror.reflect(obj).reflectMethod(sym) + val res2 = meth.apply() + assertEquals(1, res2) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala new file mode 100644 index 000000000000..9d467f714044 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala @@ -0,0 +1,25 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg) + delete(testOutput / "b" / "A.class") + compiles(A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala new file mode 100644 index 000000000000..123de8d847b1 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala @@ -0,0 +1,26 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + def M = "package b; class M" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg, M) + delete(testOutput / "b" / "A.class") + compiles(M, A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor.scala b/test/files/run/package-object-with-inner-class-in-ancestor.scala new file mode 100644 index 000000000000..03e1c561de0d --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor.scala @@ -0,0 +1,33 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def O = "package b; object O { def o = \"\" }" + def A = "package b; class A { class C { O.o } }" + def pkg = "package object b extends A" + } + class V2 extends V1 { + override def O = "package b; object O { def o = 42 }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + compiles(v1.O, v1.A, v1.pkg) + delete(testOutput / "b" / "A.class", testOutput / "b" / "A$C.class") + val v2 = new V2 + compiles(v2.O, v2.A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/parmap-ops.scala b/test/files/run/parmap-ops.scala deleted file mode 100644 index 4274460c9d35..000000000000 --- a/test/files/run/parmap-ops.scala +++ /dev/null @@ -1,48 +0,0 @@ -import collection._ - -object Test { - - def main(args: Array[String]) { - val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par - - // ops - assert(gm.isDefinedAt(1)) - assert(gm.contains(1)) - assert(gm.getOrElse(1, 2) == 1) - assert(gm.getOrElse(2, 3) == 3) - assert(gm.keysIterator.toSet == Set(0, 1)) - assert(gm.valuesIterator.toSet == Set(0, 1)) - assert(gm.keySet == Set(0, 1)) - assert(gm.keys.toSet == Set(0, 1)) - assert(gm.values.toSet == Set(0, 1)) - try { - gm.default(-1) - assert(false) - } catch { - case e: NoSuchElementException => // ok - } - - assert(gm.filterKeys(_ % 2 == 0)(0) == 0) - assert(gm.filterKeys(_ % 2 == 0).get(1) == None) - assert(gm.mapValues(_ + 1)(0) == 1) - - // with defaults - val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1) - val dm = pm.withDefault(x => -x) - assert(dm(0) == 0) - assert(dm(1) == 1) - assert(dm(2) == -2) - assert(dm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) - dm.put(3, 3) - assert(dm(3) == 3) - assert(pm(3) == 3) - assert(dm(4) == -4) - - val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x) - assert(imdm(0) == 0) - assert(imdm(1) == 1) - assert(imdm(2) == -2) - assert(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) - } - -} diff --git a/test/files/run/partialfun.check b/test/files/run/partialfun.check index d4e9f494cd6f..72e9f19f2288 100644 --- a/test/files/run/partialfun.check +++ b/test/files/run/partialfun.check @@ -4,3 +4,41 @@ 0:isDefinedAt 1:isDefinedAt 2:apply + +false +true +Vector(1, 2, 3, 4, 5) +Vector(1, 2, 3, 4, 5) + +testing function literal syntax with methods overloaded for Function1 and PartialFunction +base case: a method that takes a Function1, so no overloading +fn only +fn only +fn only + +base case: a method that takes a PartialFunction, so no overloading +pf only +pf only +pf only + +test case: a method that is overloaded for Function1 and PartialFunction +fn wins +pf wins +pf wins + +testing eta-expansion with methods overloaded for Function1 and PartialFunction +base case: a method that takes a Function1, so no overloading +fn only +fn only +fn only +fn only + +base case: a method that takes a PartialFunction, so no overloading +pf only +pf only +pf only + +test case: a method that is overloaded for Function1 and PartialFunction +fn wins +fn wins +fn wins diff --git a/test/files/run/partialfun.scala b/test/files/run/partialfun.scala index 71c7d3e61c1c..b5fac5a1231c 100644 --- a/test/files/run/partialfun.scala +++ b/test/files/run/partialfun.scala @@ -1,20 +1,19 @@ import collection._ -import collection.generic._ object Test { - def collectIDA[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { + def collectIDA[A, B, CC[_], Repr, That](_this: IterableOps[A, CC, Repr])(pf: PartialFunction[A, B])(implicit bf: BuildFrom[Repr, B, That]): That = { val repr: Repr = _this.asInstanceOf[Repr] - val b = bf(repr) + val b = bf.newBuilder(repr) _this foreach { x => if (pf isDefinedAt x) b += pf(x) } - b.result + b.result() } - def collectRW[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { + def collectRW[A, B, CC[_], Repr, That](_this: IterableOps[A, CC, Repr])(pf: PartialFunction[A, B])(implicit bf: BuildFrom[Repr, B, That]): That = { val repr: Repr = _this.asInstanceOf[Repr] - val b = bf(repr) + val b = bf.newBuilder(repr) val f = pf runWith { b += _ } _this foreach f - b.result + b.result() } var cnt = 0 @@ -40,7 +39,7 @@ object Test { case Ex2(result) => result } - def collectTest() { + def collectTest(): Unit = { val xs = 1 to 100 resetCnt() @@ -61,17 +60,17 @@ object Test { println(cntRW) } - def orElseTest() { + def orElseTest(): Unit = { val pf0 = new PartialFunction[Unit, Unit] { - def apply(u: Unit) { println("0:apply") } + def apply(u: Unit): Unit = { println("0:apply") } def isDefinedAt(u: Unit) = { println("0:isDefinedAt"); false } } val pf1 = new PartialFunction[Unit, Unit] { - def apply(u: Unit) { println("1:apply") } + def apply(u: Unit): Unit = { println("1:apply") } def isDefinedAt(u: Unit) = { println("1:isDefinedAt"); false } } val pf2 = new PartialFunction[Unit, Unit] { - def apply(u: Unit) { println("2:apply") } + def apply(u: Unit): Unit = { println("2:apply") } def isDefinedAt(u: Unit) = { println("2:isDefinedAt"); true } } @@ -79,8 +78,75 @@ object Test { chained(()) } + def fromFunctionLiteralTest(): Unit = { + def isEven(n: Int): Boolean = PartialFunction.cond(n)(_ % 2 == 0) + println(isEven(1)) + println(isEven(2)) + println((1 to 5).map(_.toString)) + println((1 to 5).collect(_.toString)) + } + + def takeFunction1(fn: String => String) = println("fn only") + def takePartialFunction(pf: PartialFunction[String, String]) = println("pf only") + def takeFunctionLike(fn: String => String) = println("fn wins") + def takeFunctionLike(pf: PartialFunction[String, String]) = println("pf wins") + + def testOverloadingWithFunction1(): Unit = { + println("testing function literal syntax with methods overloaded for Function1 and PartialFunction") + println("base case: a method that takes a Function1, so no overloading") + takeFunction1(_.reverse) + takeFunction1 { case s => s.reverse } + takeFunction1 { case s: String => s.reverse } + println() + + println("base case: a method that takes a PartialFunction, so no overloading") + takePartialFunction(_.reverse) + takePartialFunction { case s => s.reverse } + takePartialFunction { case s: String => s.reverse } + println() + + println("test case: a method that is overloaded for Function1 and PartialFunction") + takeFunctionLike(_.reverse) + takeFunctionLike { case s => s.reverse } + takeFunctionLike { case s: String => s.reverse } + } + + def reverse(s: String): String = s.reverse + + def testEtaExpansion(): Unit = { + println("testing eta-expansion with methods overloaded for Function1 and PartialFunction") + println("base case: a method that takes a Function1, so no overloading") + takeFunction1(x => reverse(x)) + takeFunction1(reverse(_)) + takeFunction1(reverse _) + takeFunction1(reverse) + println() + + println("base case: a method that takes a PartialFunction, so no overloading") + takePartialFunction(x => reverse(x)) + takePartialFunction(reverse(_)) + takePartialFunction(reverse _) + //takePartialFunction(reverse) // can't pass a method to a method that takes a PartialFunction + println() + + println("test case: a method that is overloaded for Function1 and PartialFunction") + takeFunctionLike(x => reverse(x)) + takeFunctionLike(reverse(_)) + takeFunctionLike(reverse _) + //takeFunctionLike(reverse) // can't pass a method to a method overloaded to take a Function1 or a PartialFunction + } + def main(args: Array[String]): Unit = { collectTest() orElseTest() + println() + + fromFunctionLiteralTest() + println() + + testOverloadingWithFunction1() + println() + + testEtaExpansion() } } diff --git a/test/files/run/patmat-behavior-2.scala b/test/files/run/patmat-behavior-2.scala index 04c57a94918a..b509ae5930bf 100644 --- a/test/files/run/patmat-behavior-2.scala +++ b/test/files/run/patmat-behavior-2.scala @@ -1,3 +1,5 @@ +//scalac: -Xmaxwarns 0 -Xlint:-stars-align +// case class Foo(x: Int, ys: Int*) { // We write our own toString because of scala/bug#7735 override def toString = (x +: ys).mkString("Foo(", ", ", ")") diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check index 273a1434fbd3..ec81fbb143e2 100644 --- a/test/files/run/patmat-behavior.check +++ b/test/files/run/patmat-behavior.check @@ -1,90 +1,234 @@ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ +patmat-behavior.scala:43: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb1[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:44: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb2[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:45: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb3[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:46: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb4[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:47: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb5[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:48: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gb6[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:50: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc1[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:51: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc2[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:52: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc3[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:53: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc4[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:54: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc5[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:55: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gc6[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:57: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd1[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:58: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd2[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:59: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd3[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:60: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd4[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:61: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd5[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:62: warning: match may not be exhaustive. +It would fail on the following inputs: C00(), C01(_), C10(_), C11(_, _), C20(_, _), C21(_, _, _) + def gd6[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:68: warning: match may not be exhaustive. +It would fail on the following input: C00() + def gb1[A](x: C00[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:69: warning: match may not be exhaustive. +It would fail on the following input: C10(_) + def gb2[A](x: C10[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:70: warning: match may not be exhaustive. +It would fail on the following input: C20(_, _) + def gb3[A](x: C20[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:71: warning: match may not be exhaustive. +It would fail on the following input: C01(_) + def gb4[A](x: C01[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:72: warning: match may not be exhaustive. +It would fail on the following input: C11(_, _) + def gb5[A](x: C11[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:73: warning: match may not be exhaustive. +It would fail on the following input: C21(_, _, _) + def gb6[A](x: C21[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:75: warning: match may not be exhaustive. +It would fail on the following input: C00() + def gc1[A](x: C00[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:76: warning: match may not be exhaustive. +It would fail on the following input: C10(_) + def gc2[A](x: C10[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:77: warning: match may not be exhaustive. +It would fail on the following input: C20(_, _) + def gc3[A](x: C20[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:78: warning: match may not be exhaustive. +It would fail on the following input: C01(_) + def gc4[A](x: C01[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:79: warning: match may not be exhaustive. +It would fail on the following input: C11(_, _) + def gc5[A](x: C11[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:80: warning: match may not be exhaustive. +It would fail on the following input: C21(_, _, _) + def gc6[A](x: C21[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:82: warning: match may not be exhaustive. +It would fail on the following input: C00() + def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:83: warning: match may not be exhaustive. +It would fail on the following input: C10(_) + def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:84: warning: match may not be exhaustive. +It would fail on the following input: C20(_, _) + def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:85: warning: match may not be exhaustive. +It would fail on the following input: C01(_) + def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:86: warning: match may not be exhaustive. +It would fail on the following input: C11(_, _) + def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ +patmat-behavior.scala:87: warning: match may not be exhaustive. +It would fail on the following input: C21(_, _, _) + def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } + ^ diff --git a/test/files/run/patmat-exprs.check b/test/files/run/patmat-exprs.check deleted file mode 100644 index b6df9385faa0..000000000000 --- a/test/files/run/patmat-exprs.check +++ /dev/null @@ -1 +0,0 @@ -((5 + 10) + 300) diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala index d18df9c7148f..faa2b2b75d7b 100644 --- a/test/files/run/patmat-exprs.scala +++ b/test/files/run/patmat-exprs.scala @@ -1,3 +1,5 @@ +//> using options -Werror -Xlint +// import scala.language.{ implicitConversions } import runtime.ScalaRunTime @@ -29,7 +31,7 @@ object Test { } def main(args: Array[String]): Unit = { - println((5: Expr[Int]) + 10 + 15 * 20) + assert("((5 + 10) + 300)" == ((5: Expr[Int]) + 10 + 15 * 20).toString) } } @@ -95,7 +97,7 @@ trait Pattern { val i1 = a.iterator val i2 = b.iterator while (i1.hasNext && i2.hasNext) - if (!similar(i1.next, i2.next)) + if (!similar(i1.next(), i2.next())) return false; true; } @@ -154,7 +156,7 @@ trait Pattern { if (f.isDefinedAt(this)) (f(this) :: a) else a } - def leaves: List[Leaf[T]] = collect { case l: Leaf[T] => l } + def leaves: List[Leaf[T]] = collect { case l: Leaf[T @unchecked] => l } def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) def - (other: Expr[T])(implicit n: NumericOps[T]) = Sub(this, other) @@ -210,22 +212,22 @@ trait Pattern { private def reduce(implicit num: NumericOps[T]): Expr[T] = { this match { case Add(Seq(Neg(x), Neg(y), Neg(z))) => Neg(Add(List(x, y, z))) - case Add(Seq(Mul(x, y), z)) if (x == z) => Mul(x, Add(List(y, One[T]))) - case Add(Seq(Mul(x, y), z)) if (y == z) => Mul(y, Add(List(z, One[T]))) + case Add(Seq(Mul(x, y), z)) if (x == z) => Mul(x, Add(List(y, One[T]()))) + case Add(Seq(Mul(x, y), z)) if (y == z) => Mul(y, Add(List(z, One[T]()))) case Add(Seq(Mul(x, y), Mul(u, w))) if (x == u) => Mul(x, Add(List(y, w))) case Add(Seq(Mul(x, y), Mul(u, w))) if (y == w) => Mul(y, Add(List(x, u))) case Add(Seq(Add(x), Add(y))) => Add(x.toList ::: y.toList).simplify case Add(Seq(Add(x), y)) => Add(y :: x.toList).simplify case Add(Seq(x, Add(y))) => Add(x :: y.toList).simplify case Add(x) => { - val noZeros = x.filter(_ != Zero[T]) + val noZeros = x.filter(_ != Zero[T]()) val noOnes = noZeros.map { case y: One[_] => Const(num.one); case y => y } val constant = num.sum(noOnes.collect { case c: Const[T] => c.value }) val rest = noOnes.filter(x => !x.isInstanceOf[Const[_]]).toList val reduced = reduceComponents(rest) val args = if (num.similar(constant, num.zero)) reduced else reduced ::: Const(constant) :: Nil args.size match { - case 0 => Zero[T] + case 0 => Zero[T]() case 1 => args.head case 2 => Add2(args(0), args(1)) case 3 => Add3(args(0), args(1), args(2)) @@ -234,20 +236,20 @@ trait Pattern { } case Sub(x: Zero[_], y) => Neg(y) case Sub(x, y: Zero[_]) => x - case Sub(x, y) if x == y => Zero[T] - case Sub(Mul(x, y), z) if (x == z) => Mul(x, Sub(y, One[T])) - case Sub(Mul(x, y), z) if (y == z) => Mul(y, Sub(z, One[T])) + case Sub(x, y) if x == y => Zero[T]() + case Sub(Mul(x, y), z) if (x == z) => Mul(x, Sub(y, One[T]())) + case Sub(Mul(x, y), z) if (y == z) => Mul(y, Sub(z, One[T]())) case Sub(Mul(x, y), Mul(u, w)) if (x == u) => Mul(x, Sub(y, w)) case Sub(Mul(x, y), Mul(u, w)) if (y == w) => Mul(y, Sub(x, u)) - case Mul(x: Zero[_], y) => Zero[T] - case Mul(x, y: Zero[_]) => Zero[T] + case Mul(x: Zero[_], y) => Zero[T]() + case Mul(x, y: Zero[_]) => Zero[T]() case Mul(x: One[_], y) => y case Mul(x, y: One[_]) => x case Mul(Neg(x: One[_]), y) => Neg(y) case Mul(x, Neg(y: One[_])) => Neg(x) case Mul(x, y) if (x == y) => Sqr(x) - case Div(x: Zero[_], y) => Zero[T] // warning: possibly extends domain + case Div(x: Zero[_], y) => Zero[T]() // warning: possibly extends domain case Div(x, y: One[_]) => x case Div(Sqr(x), y) if x == y => x case Div(Mul(x, y), z) if (x == z) => y @@ -261,12 +263,12 @@ trait Pattern { case Div(x: One[_], y) => Inv(y) case Div(x, Sqr(y)) if x == y => Inv(y) case Div(Mul(x, y), Sqr(Mul(u, w))) if x == u && y == w => Inv(Mul(x, y)) - case Div(x, y) if x == y => One[T] + case Div(x, y) if x == y => One[T]() case Mul(Neg(a), Neg(b)) => Mul(a, b) case Div(Neg(a), Neg(b)) => Div(a, b) - case Neg(x: Zero[_]) => Zero[T] + case Neg(x: Zero[_]) => Zero[T]() case Neg(x: One[_]) => Const(num.neg(num.one)) case Sub(Const(x), Const(y)) => const(num.sub(x, y)) case Mul(Const(x), Const(y)) => const(num.mul(x, y)) @@ -279,8 +281,8 @@ trait Pattern { case Mul(Mul(Const(y), z), Const(x)) => Mul(const(num.mul(x, y)), z) case Mul(Mul(y, Const(z)), Const(x)) => Mul(const(num.mul(x, z)), y) - case Const(x) if x == num.one => One[T] - case Const(x) if x == num.zero => Zero[T] + case Const(x) if x == num.one => One[T]() + case Const(x) if x == num.zero => Zero[T]() case Sub(x, Neg(y)) => Add(List(x, y)) case Sub(Neg(x), y) => Neg(Add(List(x, y))) @@ -299,7 +301,7 @@ trait Pattern { private def optimizeWith(f: Expr[T] => Expr[T]): Expr[T] = { f(mapArgs(EndoFunction[Expr[_]]( - a => a match { case x: Expr[T] => x.optimizeWith(f) } + a => a match { case x: Expr[T @unchecked] => x.optimizeWith(f) } ))) } @@ -338,26 +340,26 @@ trait Pattern { trait NonZero[T] extends Expr[T] case class Const[T](value: T)(implicit num: NumericOps[T]) extends Leaf[T] with NonZero[T] { - def derivative(variable: Var[T]) = Zero[T] + def derivative(variable: Var[T]) = Zero[T]() def eval(f: Any => Any) = value override def toString = value.toString } case class Zero[T]()(implicit num: NumericOps[T]) extends Leaf[T] { - def derivative(variable: Var[T]) = Zero[T] + def derivative(variable: Var[T]) = Zero[T]() def eval(f: Any => Any) = num.zero override def toString = "0" } case class One[T]()(implicit num: NumericOps[T]) extends Leaf[T] { - def derivative(variable: Var[T]) = Zero[T] + def derivative(variable: Var[T]) = Zero[T]() def eval(f: Any => Any) = num.one override def toString = "1" } abstract class Var[T](implicit num: NumericOps[T]) extends Leaf[T] { - def derivative(variable: Var[T]) = if (variable == this) One[T] else Zero[T] + def derivative(variable: Var[T]) = if (variable == this) One[T]() else Zero[T]() def eval(f: Any => Any) = f(this).asInstanceOf[T] } @@ -462,7 +464,7 @@ trait Pattern { def derivative(v: Var[T]) = Mul(Mul(Const(num.two), expr), expr.derivative(v)) def eval(f: Any => Any) = num.sqr(expr.eval(f)) def mapArgs(f: EndoFunction[Expr[_]]) = Sqr(f(expr)) - override def toString = expr + " ^ 2" + override def toString = expr.toString + " ^ 2" override lazy val hashCode = ScalaRunTime._hashCode(this); } @@ -510,9 +512,7 @@ trait Pattern { override lazy val hashCode = ScalaRunTime._hashCode(this); } - - abstract class Compare[T](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean)(implicit num: NumericOps[T]) - extends Expr[Boolean] { + abstract class Compare[T: NumericOps](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean) extends Expr[Boolean] { def derivative(v: Var[Boolean]) = throw new IllegalStateException("Derivative of Boolean not allowed") def eval(f: Any => Any) = cmp(left.eval(f), right.eval(f)) val args = List(left, right) @@ -563,7 +563,7 @@ trait Pattern { object Expr { /** Creates a constant expression */ def const[T](value: T)(implicit num: NumericOps[T]): Leaf[T] = - if (num.zero == value) Zero[T] + if (num.zero == value) Zero[T]() else Const(value) implicit def double2Constant[T](d: Double)(implicit num: NumericOps[T]): Leaf[T] = diff --git a/test/files/run/patmat-finally.scala b/test/files/run/patmat-finally.scala index 19cf1dd635e1..78be86fef977 100644 --- a/test/files/run/patmat-finally.scala +++ b/test/files/run/patmat-finally.scala @@ -1,6 +1,6 @@ /** Test pattern matching and finally, see scala/bug#5929. */ object Test extends App { - def bar(s1: Object, s2: Object) { + def bar(s1: Object, s2: Object): Unit = { s1 match { case _ => } diff --git a/test/files/run/patmat-mix-case-extractor.scala b/test/files/run/patmat-mix-case-extractor.scala index 964e6f743c67..fac2c0a6320f 100644 --- a/test/files/run/patmat-mix-case-extractor.scala +++ b/test/files/run/patmat-mix-case-extractor.scala @@ -1,3 +1,4 @@ +//> using options -Xmaxwarns 0 trait CaseClass trait ProdCaseClass extends CaseClass { def x: Int } trait SeqCaseClass extends CaseClass { def xs: Seq[Int] } @@ -10,7 +11,7 @@ case class CaseClass4(x: Int, xs: Int*) extends ProdCaseClass with SeqCaseClass object Extractor1 { def unapply(x: CaseClass): Boolean = false } object Extractor2 { def unapplySeq(x: SeqCaseClass): Option[Seq[Int]] = Some(x.xs) } object Extractor3 { def unapply(x: ProdCaseClass): Option[Int] = Some(x.x) } -object Extractor4 { def unapplySeq(x: ProdCaseClass with SeqCaseClass): Option[(Int, Seq[Int])] = Some(x.x, x.xs) } +object Extractor4 { def unapplySeq(x: ProdCaseClass with SeqCaseClass): Option[(Int, Seq[Int])] = Some((x.x, x.xs)) } class A { def f1(x: Any) = x match { @@ -35,7 +36,7 @@ class A { case CaseClass4(x, xs @ _*) => x + xs.sum case _ => -2 } - def run() { + def run(): Unit = { List( f1(CaseClass1()), f1(CaseClass2(1, 2, 3)), @@ -51,7 +52,7 @@ class A { object Test { def main(args: Array[String]): Unit = { - (new A).run + (new A).run() } } diff --git a/test/files/run/patmat-no-inline-isEmpty.check b/test/files/run/patmat-no-inline-isEmpty.check new file mode 100644 index 000000000000..60ed79c25c04 --- /dev/null +++ b/test/files/run/patmat-no-inline-isEmpty.check @@ -0,0 +1,36 @@ +[[syntax trees at end of patmat]] // newSource1.scala +package { + object A extends scala.AnyRef { + def (): A.type = { + A.super.(); + () + }; + def unapplySeq(a: Int): Wrap = new Wrap(a) + }; + class T extends scala.AnyRef { + def (): T = { + T.super.(); + () + }; + def t: Any = { + case val x1: Int(2) = 2; + case5(){ + val o7: Wrap = A.unapplySeq(x1); + if (o7.isEmpty.unary_!) + { + val xs: Seq[Int] = o7.get.toSeq; + matchEnd4(xs) + } + else + case6() + }; + case6(){ + matchEnd4("other") + }; + matchEnd4(x: Any){ + x + } + } + } +} + diff --git a/test/files/run/patmat-no-inline-isEmpty.scala b/test/files/run/patmat-no-inline-isEmpty.scala new file mode 100644 index 000000000000..52fb76d1ccf4 --- /dev/null +++ b/test/files/run/patmat-no-inline-isEmpty.scala @@ -0,0 +1,31 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + def depCode = + """class Wrap(private val a: Int) extends AnyVal { + | def isEmpty: false = { println("confirm seq isEmpty method doesn't get elided"); false } + | def get = this + | def lengthCompare(len: Int) = Integer.compare(1, len) + | def apply(i: Int) = if (i == 0) a else Nil(i) + | def drop(n: Int): scala.Seq[Int] = if (n == 0) toSeq else Nil + | def toSeq: scala.Seq[Int] = List(a) + |} + """.stripMargin + + override def code = + """object A { + | def unapplySeq(a: Int) = new Wrap(a) + |} + |class T { + | def t: Any = 2 match { + | case A(xs @ _*) => xs + | case _ => "other" + | } + |} + """.stripMargin + + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) + } +} diff --git a/test/files/run/patmat-no-inline-unapply.check b/test/files/run/patmat-no-inline-unapply.check new file mode 100644 index 000000000000..6b0b6eb2425b --- /dev/null +++ b/test/files/run/patmat-no-inline-unapply.check @@ -0,0 +1,25 @@ +[[syntax trees at end of patmat]] // newSource1.scala +package { + class T extends scala.AnyRef { + def (): T = { + T.super.(); + () + }; + def t: Any = { + case val x1: Int(2) = 2; + case5(){ + if (A.unapply(x1)) + matchEnd4("ok") + else + case6() + }; + case6(){ + matchEnd4("other") + }; + matchEnd4(x: Any){ + x + } + } + } +} + diff --git a/test/files/run/patmat-no-inline-unapply.scala b/test/files/run/patmat-no-inline-unapply.scala new file mode 100644 index 000000000000..1ce9994c30d2 --- /dev/null +++ b/test/files/run/patmat-no-inline-unapply.scala @@ -0,0 +1,23 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + def depCode = + """object A { + | def unapply(a: Int): true = true + |} + """.stripMargin + + override def code = + """class T { + | def t: Any = 2 match { + | case A() => "ok" + | case _ => "other" + | } + |} + """.stripMargin + + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) + } +} diff --git a/test/files/run/patmat-origtp-switch.check b/test/files/run/patmat-origtp-switch.check index 84a92e1c6ab3..2bebf4044e6d 100644 --- a/test/files/run/patmat-origtp-switch.check +++ b/test/files/run/patmat-origtp-switch.check @@ -2,7 +2,7 @@ package {.type} { class C extends scala.AnyRef { def (): C = { - C.super{C.super.type}.{()Object}(){Object}; + C.super{C.super.type}.{(): Object}(){Object}; (){Unit} }{Unit}; def foo[A](a: A, b: A with C, i: Int): A = { @@ -10,7 +10,7 @@ package {.type} { x1{Int} match { case 0{Int(0)} => a{A} case 1{Int(1)} => b{A with C} - case _{Int} => throw new MatchError{MatchError}{(obj: Any)MatchError}(x1{Int}){MatchError}{Nothing} + case _{Int} => throw new MatchError{MatchError}{(obj: Any): MatchError}(x1{Int}){MatchError}{Nothing} }{A} }{A} } diff --git a/test/files/run/patmat-origtp-switch.scala b/test/files/run/patmat-origtp-switch.scala index bf7eb62a9123..c890ee13601c 100644 --- a/test/files/run/patmat-origtp-switch.scala +++ b/test/files/run/patmat-origtp-switch.scala @@ -1,9 +1,8 @@ import scala.tools.partest._ -import java.io.{Console => _, _} object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint:patmat -Xprint-types -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:patmat -Vprint-types" override def code = """class C { def foo[A](a: A, b: A with C, i: Int) = i match { @@ -13,9 +12,5 @@ object Test extends DirectTest { } """ - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/patmat-seq.check b/test/files/run/patmat-seq.check new file mode 100644 index 000000000000..3521edc2682a --- /dev/null +++ b/test/files/run/patmat-seq.check @@ -0,0 +1,405 @@ +newSource1.scala:32: warning: unreachable code + case A(x, y) => (x, y) // wrapper.get.apply(0/1) + ^ +[[syntax trees at end of patmat]] // newSource1.scala +package { + object A extends scala.AnyRef { + def (): A.type = { + A.super.(); + () + }; + def unapplySeq(a: Int): collection.SeqFactory.UnapplySeqWrapper[Int] = new collection.SeqFactory.UnapplySeqWrapper[Int](scala.collection.mutable.ArrayBuffer.apply[Int](1, 2, 3)) + }; + object B extends scala.AnyRef { + def (): B.type = { + B.super.(); + () + }; + def unapplySeq(a: Int): Some[scala.collection.immutable.ArraySeq[Int]] = new Some[scala.collection.immutable.ArraySeq[Int]](scala.collection.immutable.ArraySeq.apply[Int](1, 2, 3)((ClassTag.Int: scala.reflect.ClassTag[Int]))) + }; + class Foo[T] extends scala.AnyRef { + def (): Foo[T] = { + Foo.super.(); + () + }; + def isEmpty: Boolean = false; + def get: Foo[T] = this; + def apply(i: Int): T = scala.Predef.???; + def length: Int = 2; + def drop(n: Int): Seq[T] = scala.Predef.???; + def toSeq: Seq[T] = scala.Predef.??? + }; + object C extends scala.AnyRef { + def (): C.type = { + C.super.(); + () + }; + def unapplySeq(a: Int): Foo[Int] = new Foo[Int]() + }; + object D extends scala.AnyRef { + def (): D.type = { + D.super.(); + () + }; + def unapplySeq[T](a: Int): Some[Foo[T]] = new Some[Foo[T]](new Foo[T]()) + }; + object E extends scala.AnyRef { + def (): E.type = { + E.super.(); + () + }; + def unapplySeq(a: Int): Array.UnapplySeqWrapper[Int] = new Array.UnapplySeqWrapper[Int](scala.Array.apply(1, 2, 3)) + }; + object F extends scala.AnyRef { + def (): F.type = { + F.super.(); + () + }; + def unapplySeq(a: Int): Some[String] = new Some[String]("123") + }; + class T extends scala.AnyRef { + def (): T = { + T.super.(); + () + }; + def t: Any = { + case val x1: Int(2) = 2; + case16(){ + val o18: collection.SeqFactory.UnapplySeqWrapper[Int] = A.unapplySeq(x1); + if (o18.isEmpty.unary_!) + { + val xs: Seq[Int] = o18.get.toSeq; + matchEnd15(xs) + } + else + case17() + }; + case17(){ + val o20: collection.SeqFactory.UnapplySeqWrapper[Int] = A.unapplySeq(x1); + if (o20.isEmpty.unary_!.&&(o20.get.!=(null).&&(o20.get.lengthCompare(2).==(0)))) + { + val x: Int = o20.get.apply(0); + val y: Int = o20.get.apply(1); + matchEnd15(new (Int, Int)(x, y)) + } + else + case19() + }; + case19(){ + val o22: collection.SeqFactory.UnapplySeqWrapper[Int] = A.unapplySeq(x1); + if (o22.isEmpty.unary_!.&&(o22.get.!=(null).&&(o22.get.lengthCompare(1).>=(0)))) + { + val x: Int = o22.get.apply(0); + val xs: Seq[Int] = o22.get.drop(1); + matchEnd15(new (Int, Seq[Int])(x, xs)) + } + else + case21() + }; + case21(){ + val o24: Some[scala.collection.immutable.ArraySeq[Int]] = B.unapplySeq(x1); + if (o24.isEmpty.unary_!) + { + val xs: Seq[Int] = o24.get; + matchEnd15(xs) + } + else + case23() + }; + case23(){ + val o26: Some[scala.collection.immutable.ArraySeq[Int]] = B.unapplySeq(x1); + if (o26.isEmpty.unary_!.&&(o26.get.!=(null).&&(o26.get.lengthCompare(2).==(0)))) + { + val x: Int = o26.get.apply(0); + val y: Int = o26.get.apply(1); + matchEnd15(new (Int, Int)(x, y)) + } + else + case25() + }; + case25(){ + val o28: Some[scala.collection.immutable.ArraySeq[Int]] = B.unapplySeq(x1); + if (o28.isEmpty.unary_!.&&(o28.get.!=(null).&&(o28.get.lengthCompare(1).>=(0)))) + { + val x: Int = o28.get.apply(0); + val xs: Seq[Int] = o28.get.drop(1); + matchEnd15(new (Int, Seq[Int])(x, xs)) + } + else + case27() + }; + case27(){ + val o30: Foo[Int] = C.unapplySeq(x1); + if (o30.isEmpty.unary_!) + { + val xs: Seq[Int] = o30.get.toSeq; + matchEnd15(xs) + } + else + case29() + }; + case29(){ + val o32: Foo[Int] = C.unapplySeq(x1); + if (o32.isEmpty.unary_!.&&(o32.get.!=(null).&&(scala.math.`package`.signum(o32.get.length.-(2)).==(0)))) + { + val x: Int = o32.get.apply(0); + val y: Int = o32.get.apply(1); + matchEnd15(new (Int, Int)(x, y)) + } + else + case31() + }; + case31(){ + val o34: Foo[Int] = C.unapplySeq(x1); + if (o34.isEmpty.unary_!.&&(o34.get.!=(null).&&(scala.math.`package`.signum(o34.get.length.-(1)).>=(0)))) + { + val x: Int = o34.get.apply(0); + val xs: Seq[Int] = o34.get.drop(1); + matchEnd15(new (Int, Seq[Int])(x, xs)) + } + else + case33() + }; + case33(){ + val o36: Some[Foo[Nothing]] = D.unapplySeq[Nothing](x1); + if (o36.isEmpty.unary_!) + { + val xs: Seq[Nothing] = o36.get.toSeq; + matchEnd15(xs) + } + else + case35() + }; + case35(){ + val o38: Some[Foo[Nothing]] = D.unapplySeq[Nothing](x1); + if (o38.isEmpty.unary_!.&&(o38.get.!=(null).&&(scala.math.`package`.signum(o38.get.length.-(2)).==(0)))) + { + val x: Nothing = o38.get.apply(0); + val y: Nothing = o38.get.apply(1); + matchEnd15(new (Nothing, Nothing)(x, y)) + } + else + case37() + }; + case37(){ + val o40: Some[Foo[Nothing]] = D.unapplySeq[Nothing](x1); + if (o40.isEmpty.unary_!.&&(o40.get.!=(null).&&(scala.math.`package`.signum(o40.get.length.-(1)).>=(0)))) + { + val x: Nothing = o40.get.apply(0); + val xs: Seq[Nothing] = o40.get.drop(1); + matchEnd15(new (Nothing, Seq[Nothing])(x, xs)) + } + else + case39() + }; + case39(){ + matchEnd15(throw new MatchError(2)) + }; + matchEnd15(x: Any){ + x + } + } + } +} + +[[syntax trees at end of posterasure]] // newSource1.scala +package { + object A extends Object { + def (): A.type = { + A.super.(); + () + }; + def unapplySeq(a: Int): scala.collection.SeqOps = scala.collection.mutable.ArrayBuffer.apply(scala.runtime.ScalaRunTime.wrapIntArray(Array[Int]{1, 2, 3})).$asInstanceOf[scala.collection.SeqOps]() + }; + object B extends Object { + def (): B.type = { + B.super.(); + () + }; + def unapplySeq(a: Int): Some = new Some(scala.collection.immutable.ArraySeq.apply(scala.runtime.ScalaRunTime.wrapIntArray(Array[Int]{1, 2, 3}), (ClassTag.Int(): scala.reflect.ClassTag))) + }; + class Foo extends Object { + def (): Foo = { + Foo.super.(); + () + }; + def isEmpty(): Boolean = false; + def get(): Foo = this; + def apply(i: Int): Object = scala.Predef.???(); + def length(): Int = 2; + def drop(n: Int): Seq = scala.Predef.???(); + def toSeq(): Seq = scala.Predef.???() + }; + object C extends Object { + def (): C.type = { + C.super.(); + () + }; + def unapplySeq(a: Int): Foo = new Foo() + }; + object D extends Object { + def (): D.type = { + D.super.(); + () + }; + def unapplySeq(a: Int): Some = new Some(new Foo()) + }; + object E extends Object { + def (): E.type = { + E.super.(); + () + }; + def unapplySeq(a: Int): Object = scala.Array.apply(1, scala.runtime.ScalaRunTime.wrapIntArray(Array[Int]{2, 3})) + }; + object F extends Object { + def (): F.type = { + F.super.(); + () + }; + def unapplySeq(a: Int): Some = new Some("123") + }; + class T extends Object { + def (): T = { + T.super.(); + () + }; + def t(): Object = { + case val x1: Int = 2; + case16(){ + val o18: scala.collection.SeqOps = A.unapplySeq(x1); + if (scala.collection.SeqFactory.UnapplySeqWrapper.isEmpty$extension(o18).unary_!()) + { + val xs: Seq = scala.collection.SeqFactory.UnapplySeqWrapper.toSeq$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o18)); + matchEnd15(xs) + } + else + case17() + }; + case17(){ + val o20: scala.collection.SeqOps = A.unapplySeq(x1); + if (scala.collection.SeqFactory.UnapplySeqWrapper.isEmpty$extension(o20).unary_!().&&(new collection.SeqFactory.UnapplySeqWrapper(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o20)).!=(null).&&(scala.collection.SeqFactory.UnapplySeqWrapper.lengthCompare$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o20), 2).==(0)))) + { + val x: Int = unbox(scala.collection.SeqFactory.UnapplySeqWrapper.apply$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o20), 0)); + val y: Int = unbox(scala.collection.SeqFactory.UnapplySeqWrapper.apply$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o20), 1)); + matchEnd15(new Tuple2$mcII$sp(x, y)) + } + else + case19() + }; + case19(){ + val o22: scala.collection.SeqOps = A.unapplySeq(x1); + if (scala.collection.SeqFactory.UnapplySeqWrapper.isEmpty$extension(o22).unary_!().&&(new collection.SeqFactory.UnapplySeqWrapper(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o22)).!=(null).&&(scala.collection.SeqFactory.UnapplySeqWrapper.lengthCompare$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o22), 1).>=(0)))) + { + val x: Int = unbox(scala.collection.SeqFactory.UnapplySeqWrapper.apply$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o22), 0)); + val xs: Seq = scala.collection.SeqFactory.UnapplySeqWrapper.drop$extension(scala.collection.SeqFactory.UnapplySeqWrapper.get$extension(o22), 1); + matchEnd15(new Tuple2(scala.Int.box(x), xs)) + } + else + case21() + }; + case21(){ + val o24: Some = B.unapplySeq(x1); + if (o24.isEmpty().unary_!()) + { + val xs: Seq = o24.get().$asInstanceOf[Seq](); + matchEnd15(xs) + } + else + case23() + }; + case23(){ + val o26: Some = B.unapplySeq(x1); + if (o26.isEmpty().unary_!().&&(o26.get().!=(null).&&(o26.get().$asInstanceOf[collection.IndexedSeqOps]().lengthCompare(2).==(0)))) + { + val x: Int = unbox(o26.get().$asInstanceOf[collection.immutable.ArraySeq]().apply(0)); + val y: Int = unbox(o26.get().$asInstanceOf[collection.immutable.ArraySeq]().apply(1)); + matchEnd15(new Tuple2$mcII$sp(x, y)) + } + else + case25() + }; + case25(){ + val o28: Some = B.unapplySeq(x1); + if (o28.isEmpty().unary_!().&&(o28.get().!=(null).&&(o28.get().$asInstanceOf[collection.IndexedSeqOps]().lengthCompare(1).>=(0)))) + { + val x: Int = unbox(o28.get().$asInstanceOf[collection.immutable.ArraySeq]().apply(0)); + val xs: Seq = o28.get().$asInstanceOf[collection.immutable.ArraySeq]().drop(1); + matchEnd15(new Tuple2(scala.Int.box(x), xs)) + } + else + case27() + }; + case27(){ + val o30: Foo = C.unapplySeq(x1); + if (o30.isEmpty().unary_!()) + { + val xs: Seq = o30.get().toSeq(); + matchEnd15(xs) + } + else + case29() + }; + case29(){ + val o32: Foo = C.unapplySeq(x1); + if (o32.isEmpty().unary_!().&&(o32.get().!=(null).&&(scala.math.`package`.signum(o32.get().length().-(2)).==(0)))) + { + val x: Int = unbox(o32.get().apply(0)); + val y: Int = unbox(o32.get().apply(1)); + matchEnd15(new Tuple2$mcII$sp(x, y)) + } + else + case31() + }; + case31(){ + val o34: Foo = C.unapplySeq(x1); + if (o34.isEmpty().unary_!().&&(o34.get().!=(null).&&(scala.math.`package`.signum(o34.get().length().-(1)).>=(0)))) + { + val x: Int = unbox(o34.get().apply(0)); + val xs: Seq = o34.get().drop(1); + matchEnd15(new Tuple2(scala.Int.box(x), xs)) + } + else + case33() + }; + case33(){ + val o36: Some = D.unapplySeq(x1); + if (o36.isEmpty().unary_!()) + { + val xs: Seq = o36.get().$asInstanceOf[Foo]().toSeq(); + matchEnd15(xs) + } + else + case35() + }; + case35(){ + val o38: Some = D.unapplySeq(x1); + if (o38.isEmpty().unary_!().&&(o38.get().!=(null).&&(scala.math.`package`.signum(o38.get().$asInstanceOf[Foo]().length().-(2)).==(0)))) + { + val x: Nothing = o38.get().$asInstanceOf[Foo]().apply(0).$asInstanceOf[Nothing](); + val y: Nothing = o38.get().$asInstanceOf[Foo]().apply(1).$asInstanceOf[Nothing](); + matchEnd15(new Tuple2(x, y)) + } + else + case37() + }; + case37(){ + val o40: Some = D.unapplySeq(x1); + if (o40.isEmpty().unary_!().&&(o40.get().!=(null).&&(scala.math.`package`.signum(o40.get().$asInstanceOf[Foo]().length().-(1)).>=(0)))) + { + val x: Nothing = o40.get().$asInstanceOf[Foo]().apply(0).$asInstanceOf[Nothing](); + val xs: Seq = o40.get().$asInstanceOf[Foo]().drop(1); + matchEnd15(new Tuple2(x, xs)) + } + else + case39() + }; + case39(){ + matchEnd15(throw new MatchError(scala.Int.box(2))) + }; + matchEnd15(x: Object){ + x + } + } + } +} + diff --git a/test/files/run/patmat-seq.scala b/test/files/run/patmat-seq.scala new file mode 100644 index 000000000000..874656ab6d66 --- /dev/null +++ b/test/files/run/patmat-seq.scala @@ -0,0 +1,55 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + + override def extraSettings: String = "-usejavacp -Vprint:patmat,posterasure -Ystop-after:posterasure" + + override def code = + """object A { + | def unapplySeq(a: Int) = new collection.SeqFactory.UnapplySeqWrapper(collection.mutable.ArrayBuffer(1,2,3)) + |} + |object B { + | // this works: Some and immutable collections support the required interface. + | // in reality, immutable collections also use the UnapplySeqWrapper value class, which is eliminated by erasure + | def unapplySeq(a: Int) = Some(collection.immutable.ArraySeq(1,2,3)) + |} + |class Foo[T] { + | def isEmpty = false + | def get = this + | def apply(i: Int): T = ??? + | def length = 2 // if there's no lengthCompare, the translation uses length + | def drop(n: Int): Seq[T] = ??? + | def toSeq: Seq[T] = ??? + |} + |object C { + | def unapplySeq(a: Int) = new Foo[Int] + |} + |object D { + | def unapplySeq[T](a: Int) = Some(new Foo[T]) + |} + |object E { + | def unapplySeq(a: Int) = new Array.UnapplySeqWrapper(Array(1,2,3)) + |} + |object F { + | def unapplySeq(a: Int) = Some("123") + |} + |class T { + | def t: Any = 2 match { + | case A(xs @ _*) => xs // wrapper.get.toSeq + | case A(x, y) => (x, y) // wrapper.get.apply(0/1) + | case A(x, xs @ _*) => (x, xs) // wrapper.get.drop(1) + | case B(xs @ _*) => xs + | case B(x, y) => (x, y) + | case B(x, xs @ _*) => (x, xs) + | case C(xs @ _*) => xs + | case C(x, y) => (x, y) + | case C(x, xs @ _*) => (x, xs) + | case D(xs @ _*) => xs + | case D(x, y) => (x, y) + | case D(x, xs @ _*) => (x, xs) + | } + |} + """.stripMargin + + override def show(): Unit = compile() +} diff --git a/test/files/run/patmatnew.check b/test/files/run/patmatnew.check index 72f4289b9211..cc42f919a9f1 100644 --- a/test/files/run/patmatnew.check +++ b/test/files/run/patmatnew.check @@ -1,21 +1,27 @@ -patmatnew.scala:670: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning. +patmatnew.scala:673: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning. case e => { ^ -patmatnew.scala:489: warning: unreachable code - case _ if false => - ^ -patmatnew.scala:351: warning: a pure expression does nothing in statement position +patmatnew.scala:354: warning: a pure expression does nothing in statement position case 1 => "OK" ^ -patmatnew.scala:352: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected - case 2 => assert(false); "KO" - ^ -patmatnew.scala:352: warning: a pure expression does nothing in statement position +patmatnew.scala:355: warning: discarded pure expression does nothing case 2 => assert(false); "KO" ^ -patmatnew.scala:353: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected - case 3 => assert(false); "KO" - ^ -patmatnew.scala:353: warning: a pure expression does nothing in statement position +patmatnew.scala:356: warning: discarded pure expression does nothing case 3 => assert(false); "KO" ^ +patmatnew.scala:178: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + def doMatch2(xs: List[String]): List[String] = xs match { + ^ +patmatnew.scala:280: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + def doMatch1(xs: List[Char]) = xs match { + ^ +patmatnew.scala:283: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), Nil + def doMatch2(xs: List[Char]) = xs match { + ^ +patmatnew.scala:492: warning: unreachable code + case _ if false => + ^ diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala index 2647d97836af..53addeb8ae27 100644 --- a/test/files/run/patmatnew.scala +++ b/test/files/run/patmatnew.scala @@ -1,9 +1,11 @@ +//> using options -deprecation +// import scala.language.{ postfixOps } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { ApplyFromJcl.run() Bug1093.run() Bug1094.run() @@ -31,20 +33,21 @@ object Test { TestSequence07.run() TestSequence08.run() TestSimpleIntSwitch.run() - TestStream.run() + TestLazyList.run() TestUnbox.run() Ticket11.run() Ticket2.run() Ticket346.run() Ticket37.run() Ticket44.run() + NullMatch.run() } - def assertEquals(a: Any, b: Any) { assert(a == b) } - def assertEquals(msg: String, a: Any, b: Any) { assert(a == b, msg) } + def assertEquals(a: Any, b: Any): Unit = { assert(a == b) } + def assertEquals(msg: String, a: Any, b: Any): Unit = { assert(a == b, msg) } object SimpleUnapply { - def run() { // from sortedmap, old version + def run(): Unit = { // from sortedmap, old version List((1, 2)).head match { case kv@(key, _) => kv.toString + " " + key.toString } @@ -54,8 +57,8 @@ object Test { object SeqUnapply { case class SFB(i: Int, xs: List[Int]) - def run() { - List(1, 2) match { + def run(): Unit = { + (List(1, 2): @unchecked) match { case List(1) => assert(false, "wrong case") case List(1, 2, xs@_*) => assert(xs.isEmpty, "not empty") case Nil => assert(false, "wrong case") @@ -68,7 +71,7 @@ object Test { } object ApplyFromJcl { - def run() { + def run(): Unit = { val p = (1, 2) Some(2) match { case Some(p._2) => @@ -78,7 +81,7 @@ object Test { } object TestSimpleIntSwitch { - def run() { + def run(): Unit = { assertEquals("s1", 1, 1 match { case 3 => 3 case 2 => 2 @@ -112,7 +115,7 @@ object Test { } val foo1 = new Foo(1) val foo2 = new Foo(2) - def run() { + def run(): Unit = { val res = (foo1.Bar(2): Any) match { case foo1.Bar(2) => true @@ -141,7 +144,7 @@ object Test { // multiple guards for same pattern object TestGuards extends Shmeez { val tree: Tree = Beez(2) - def run() { + def run(): Unit = { val res = tree match { case Beez(x) if x == 3 => false case Beez(x) if x == 2 => true @@ -158,7 +161,7 @@ object Test { // test EqualsPatternClass in combination with MixTypes opt, bug #1276 object TestEqualsPatternOpt { val NoContext = new Object - def run() { + def run(): Unit = { assertEquals(1, ((NoContext: Any) match { case that: AnyRef if this eq that => 0 case NoContext => 1 @@ -175,7 +178,7 @@ object Test { def doMatch2(xs: List[String]): List[String] = xs match { case List(_, rest@_*) => rest.toList } - def run() { + def run(): Unit = { val list1 = List() assertEquals(doMatch(list1), "ok") val list2 = List("1", "2", "3") @@ -190,15 +193,15 @@ object Test { def doMatch(l: Seq[String]): String = l match { case Seq(_*) => "ok" } - def run() { + def run(): Unit = { val list1 = List() assertEquals(doMatch(list1), "ok") val list2 = List("1", "2", "3") assertEquals(doMatch(list2), "ok") val array3 = Array[String]() - assertEquals(doMatch(array3), "ok") + assertEquals(doMatch(array3.toIndexedSeq), "ok") val array4 = Array[String]("ga", "gu") - assertEquals(doMatch(array4), "ok") + assertEquals(doMatch(array4.toIndexedSeq), "ok") } } @@ -208,7 +211,7 @@ object Test { case List(_, _, _, _*) => "ok" case _ => "not ok" } - def run() { + def run(): Unit = { val list1 = List() assertEquals(doMatch(list1), "not ok") val list2 = List("1", "2", "3") @@ -222,7 +225,7 @@ object Test { object TestSequence04 { case class Foo(i: Int, chars: Char*) - def run() { + def run(): Unit = { val a = Foo(0, 'a') match { case Foo(i, c, chars@_*) => c case _ => null @@ -244,7 +247,7 @@ object Test { case class Foo() extends Con case class Bar(xs: Con*) extends Con - def run() { + def run(): Unit = { val res = (Bar(Foo()): Con) match { case Bar(xs@_*) => xs // this should be optimized away to a pattern Bar(xs) case _ => Nil @@ -264,7 +267,7 @@ object Test { case A(A(1)) => 2 } - def run() { + def run(): Unit = { assertEquals(doMatch(A(null), 1), 0) assertEquals(doMatch(A(1), 2), 1) assertEquals(doMatch(A(A(1)), 2), 2) @@ -289,7 +292,7 @@ object Test { case Seq(x, y, z@_*) => z.toList } - def run() { + def run(): Unit = { assertEquals(List('a', 'b'), doMatch1(List('a', 'b', 'c', 'd'))) assertEquals(List('c', 'd'), doMatch2(List('a', 'b', 'c', 'd'))) assertEquals(List('a', 'b'), doMatch3(List('a', 'b', 'c', 'd'))) @@ -299,7 +302,7 @@ object Test { // backquoted identifiers in pattern object TestSequence08 { - def run() { + def run(): Unit = { val xs = List(2, 3) val ys = List(1, 2, 3) match { case x :: `xs` => xs @@ -309,17 +312,17 @@ object Test { } } - // unapply for Streams - object TestStream { - def sum(stream: Stream[Int]): Int = - stream match { - case Stream.Empty => 0 - case Stream.cons(hd, tl) => hd + sum(tl) + // unapply for LazyLists + object TestLazyList { + def sum(lazyList: LazyList[Int]): Int = + lazyList match { + case ll if ll.isEmpty => 0 + case LazyList.cons(hd, tl) => hd + sum(tl) } - val str: Stream[Int] = List(1, 2, 3).iterator.toStream + val str: LazyList[Int] = List(1, 2, 3).to(LazyList) - def run() { assertEquals(sum(str), 6) } + def run(): Unit = { assertEquals(sum(str), 6) } } // bug#1163 order of temps must be preserved @@ -341,11 +344,11 @@ object Test { case n :: ls => flips((l take n reverse) ::: (l drop n)) + 1 } - def run() { assertEquals("both", (Var("x"), Var("y")), f) } + def run(): Unit = { assertEquals("both", (Var("x"), Var("y")), f()) } } object TestUnbox { - def run() { + def run(): Unit = { val xyz: (Int, String, Boolean) = (1, "abc", true) xyz._1 match { case 1 => "OK" @@ -368,7 +371,7 @@ object Test { else Some(p.father) } - def run() { + def run(): Unit = { val p1 = new Person("p1", null) val p2 = new Person("p2", p1) assertEquals((p2.name, p1.name), p2 match { @@ -389,7 +392,7 @@ object Test { class Foo(j: Int) { case class Bar(i: Int) } - def run() { + def run(): Unit = { "baz" match { case Foo1(x) => Foo1.p(x) @@ -453,7 +456,7 @@ object Test { case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack } } - def run() { + def run(): Unit = { assert(!(new Buffer).ps.isDefinedAt(42)) } } @@ -471,7 +474,7 @@ object Test { case Get(xs) => // the argDummy should have proper arg.tpe (Int in this case) } } - def run() { + def run(): Unit = { assert(!(new Buffer).jp.isDefinedAt(40)) assert(!(new Buffer).jp.isDefinedAt(42)) } @@ -482,7 +485,7 @@ object Test { case x :: xs if xs.forall { y => y.hashCode() > 0 } => 1 } - def run() { + def run(): Unit = { val s: PartialFunction[Any, Any] = { case List(4 :: xs) => 1 case List(5 :: xs) => 1 @@ -527,7 +530,7 @@ object Test { } } - def run() { + def run(): Unit = { method1(); method2(); } @@ -544,7 +547,7 @@ object Test { case (EQ, 1) => "1" case (EQ, 2) => "2" } - def run() { + def run(): Unit = { val x = (EQ, 0); assertEquals("0", analyze(x)); // should print "0" val y = (EQ, 1); @@ -580,7 +583,7 @@ object Test { case _ => "n.a." } - def run() { + def run(): Unit = { // make up some class that has a size class MyNode extends SizeImpl assertEquals("!size 42", info(new MyNode)) @@ -596,13 +599,13 @@ object Test { case a: AnyRef if runtime.ScalaRunTime.isArray(a) => "Array" case _ => v.toString } - def run() { assertEquals("Array", foo(Array(0))) } + def run(): Unit = { assertEquals("Array", foo(Array(0))) } } // bug#1093 (contribution #460) object Bug1093 { - def run() { + def run(): Unit = { assert((Some(3): @unchecked) match { case Some(1 | 2) => false case Some(3) => true @@ -619,7 +622,7 @@ object Test { X("a", "b") match { case X(p, ps@_*) => foo(ps: _*) } - def run() { assertEquals("Foo", bar) } + def run(): Unit = { assertEquals("Foo", bar) } } // #2 @@ -634,7 +637,7 @@ object Test { } object Ticket2 { - def run() { + def run(): Unit = { val o1 = new Outer_2; val o2 = new Outer_2; val x: Any = o1.Foo(1, 2); val y: Any = o2.Foo(1, 2) assert(x != y, "equals test returns true (but should not)") assert(x match { @@ -657,7 +660,7 @@ object Test { class MyException2 extends MyException1 with SpecialException object Ticket11 { - def run() { + def run(): Unit = { Array[Throwable](new Exception("abc"), new MyException1, new MyException2).foreach { e => @@ -678,9 +681,9 @@ object Test { // #37 object Ticket37 { - def foo() {} + def foo(): Unit = {} val (a, b) = { foo(); (2, 3) } - def run() { assertEquals(this.a, 2) } + def run(): Unit = { assertEquals(this.a, 2) } } // #44 @@ -699,11 +702,11 @@ object Test { } } object Ticket44 { - def run() { assert(Y.toString ne null) /*instantiate Y*/ } + def run(): Unit = { assert(Y.toString ne null) /*instantiate Y*/ } } object Ticket211 { - def run() { + def run(): Unit = { (Some(123): Option[Int]) match { case (x: Option[a]) if false => {}; case (y: Option[b]) => {}; @@ -755,11 +758,83 @@ object Test { case _ => false } - def run() { + def run(): Unit = { assert(empty(new L(Nil))) assert(singleton(new L(List(1)))) } } // end Ticket346 + // scala/bug#4364 + object NullMatch { + object XArray { + def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = + if (x eq null) sys.error("Unexpected null!") + else Some(x.toIndexedSeq) + } + + object YArray { + def unapply(xs: Array[Int]): Boolean = + if (xs eq null) sys.error("Unexpected null!") + else true + } + + object Animal { + def unapply(x: AnyRef): Option[AnyRef] = + if (x.toString == "Animal") Some(x) + else None + } + + def nullMatch[A](xs: Array[A]): Boolean = xs match { + case Array(xs @_*) => false + case _ => true + } + + def nullMatch2[A](xs: Array[A]): Boolean = xs match { + case XArray(xs @_*) => false + case _ => true + } + + def nullMatch3[A](xs: Array[A]): Boolean = xs match { + case XArray(xs @_*) if 1 == 1 => false + case _ => true + } + + def nullMatch4(xs: Array[Int]): Boolean = xs match { + case YArray() => false + case _ => true + } + + def nullMatch5(x: AnyRef): Boolean = x match { + case Animal(x) => false + case _ => true + } + + def t8787nullMatch() = { + val r = """\d+""".r + val s: String = null + val x = s match { case r() => 1 ; case _ => 2 } + 2 == x + } + + def t8787nullMatcher() = { + val r = """(\d+):(\d+)""".r + val s = "1:2 3:4 5:6" + val z = ((r findAllMatchIn s).toList :+ null) flatMap { + case r(x, y) => Some((x.toInt, y.toInt)) + case _ => None + } + List((1,2),(3,4),(5,6)) == z + } + + def run(): Unit = { + assert(nullMatch(null)) + assert(nullMatch2(null)) + assert(nullMatch3(null)) + assert(nullMatch4(null)) + assert(nullMatch5(null)) + assert(t8787nullMatch()) + assert(t8787nullMatcher()) + } + } } diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala deleted file mode 100644 index d4ae305aa75a..000000000000 --- a/test/files/run/pc-conversions.scala +++ /dev/null @@ -1,94 +0,0 @@ -/* - * filter: inliner warning; re-run with - */ - -import collection._ - - -// test conversions between collections -object Test { - - def main(args: Array[String]) { - testConversions - } - - def testConversions { - // seq conversions - assertSeq(parallel.mutable.ParArray(1, 2, 3)) - assertSeq(parallel.mutable.ParHashMap(1 -> 2, 2 -> 3)) - assertSeq(parallel.mutable.ParHashSet(1, 2, 3)) - assertSeq(parallel.immutable.ParRange(1, 50, 1, false)) - assertSeq(parallel.immutable.ParHashMap(1 -> 2, 2 -> 4)) - assertSeq(parallel.immutable.ParHashSet(1, 2, 3)) - - // par conversions - assertPar(Array(1, 2, 3)) - assertPar(mutable.ArrayBuffer(1, 2, 3)) - assertPar(mutable.ArraySeq(1, 2, 3)) - assertPar(mutable.WrappedArray.make[Int](Array(1, 2, 3))) - assertPar(mutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(mutable.HashSet(1, 2, 3)) - assertPar(immutable.Range(1, 50, 1)) - assertPar(immutable.HashMap(1 -> 1, 2 -> 2)) - assertPar(immutable.HashSet(1, 2, 3)) - - // par.to* and to*.par tests - assertToPar(List(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(Stream(1 -> 1, 2 -> 2)) - assertToPar(Array(1 -> 1, 2 -> 2)) - assertToPar(mutable.PriorityQueue(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(mutable.ArrayBuffer(1 -> 1, 2 -> 2)) - assertToPar(mutable.ArraySeq(1 -> 3)) - assertToPar(mutable.WrappedArray.make[(Int, Int)](Array(1 -> 3))) - assertToPar(mutable.HashMap(1 -> 3)) - assertToPar(mutable.HashSet(1 -> 3)) - assertToPar(immutable.HashMap(1 -> 3)) - assertToPar(immutable.HashSet(1 -> 3)) - assertToPar(parallel.mutable.ParArray(1 -> 1, 2 -> 2, 3 -> 3)) - assertToPar(parallel.mutable.ParHashMap(1 -> 2)) - assertToPar(parallel.mutable.ParHashSet(1 -> 2)) - assertToPar(parallel.immutable.ParHashMap(1 -> 2)) - assertToPar(parallel.immutable.ParHashSet(1 -> 3)) - - assertToParWoMap(immutable.Range(1, 10, 2)) - - // seq and par again conversions) - assertSeqPar(parallel.mutable.ParArray(1, 2, 3)) - } - - def assertSeqPar[T](pc: parallel.ParIterable[T]) = pc.seq.par == pc - - def assertSeq[T](pc: parallel.ParIterable[T]) = assert(pc.seq == pc) - - def assertPar[T, P <: Parallel](xs: GenIterable[T]) = assert(xs == xs.par) - - def assertToPar[K, V](xs: GenTraversable[(K, V)]) { - xs match { - case _: Seq[_] => - assert(xs.toIterable.par == xs) - assert(xs.par.toIterable == xs) - case _ => - } - - assert(xs.toSeq.par == xs.toSeq) - assert(xs.par.toSeq == xs.toSeq) - - assert(xs.toSet.par == xs.toSet) - assert(xs.par.toSet == xs.toSet) - - assert(xs.toMap.par == xs.toMap) - assert(xs.par.toMap == xs.toMap) - } - - def assertToParWoMap[T](xs: GenSeq[T]) { - assert(xs.toIterable.par == xs.toIterable) - assert(xs.par.toIterable == xs.toIterable) - - assert(xs.toSeq.par == xs.toSeq) - assert(xs.par.toSeq == xs.toSeq) - - assert(xs.toSet.par == xs.toSet) - assert(xs.par.toSet == xs.toSet) - } - -} diff --git a/test/files/run/pf-catch.scala b/test/files/run/pf-catch.scala index 33982d055745..90b3e818432b 100644 --- a/test/files/run/pf-catch.scala +++ b/test/files/run/pf-catch.scala @@ -1,7 +1,6 @@ -import scala.language.{ postfixOps } object Test { - def shortName(x: AnyRef) = x.getClass.getName split '.' last + def shortName(x: AnyRef) = x.getClass.getName.split('.').last type Handler[+T] = PartialFunction[Throwable, T] val standardHandler: Handler[String] = { @@ -9,17 +8,16 @@ object Test { case x: java.lang.IllegalArgumentException => shortName(x) } - def fn[T: Handler](body: => T): T = { + def fn[T: Handler](body: => T): T = try body catch implicitly[Handler[T]] - } - def f1 = { + def f1() = { implicit val myHandler = standardHandler println(fn(Nil.head)) println(fn(null.toString)) } - def f2 = { + def f2() = { implicit val myHandler: Handler[String] = standardHandler orElse { case x => "DEBUG: " + shortName(x) } @@ -28,9 +26,9 @@ object Test { } def main(args: Array[String]): Unit = { - try f1 + try f1() catch { case x: Throwable => println(shortName(x) + " slipped by.") } - f2 + f2() } } diff --git a/test/files/run/position-val-def.check b/test/files/run/position-val-def.check index b0ce48239ba9..e9d120e50abc 100644 --- a/test/files/run/position-val-def.check +++ b/test/files/run/position-val-def.check @@ -1,30 +1,113 @@ -val x = 0 -[0:9]val x = [8:9]0 - -var x = 0 -[0:9]var x = [8:9]0 - -val x, y = 0 -[NoPosition]{ - [4]val x = [11]0; - [7:12]val y = [11:12]0; - [NoPosition]() -} - -var x, y = 0 -[NoPosition]{ - [4]var x = [11]0; - [7:12]var y = [11:12]0; - [NoPosition]() -} - -val (x, y) = 0 -[NoPosition]{ - <0:14> private[this] val x$1 = <4:14>[13:14][13:14]0: @[13]scala.unchecked match { - <4:10>case <4:10>[4]scala.Tuple2(<5:6>(x @ [5]_), <8:9>(y @ [8]_)) => <4:10><4:10>scala.Tuple2(<4:10>x, <4:10>y) - }; - [5:6]val x = [5]x$1._1; - [8:9]val y = [8]x$1._2; - [NoPosition]() -} - +newSource8.scala:1: warning: Pattern definition introduces Unit-valued member of C7; consider wrapping it in `locally { ... }`. +class C7 { val Some(_) = Option(42) } + ^ +class C0 { val x = 42 } +[15:16] [11:21] [NoPosition] -> private[this] val x: Int = _ +[15] [15] [15] -> def x(): Int = C0.this.x +[15:16] [19:21] -> C0.this.x = 42 +-- +class C1 { var x = 42 } +[15:16] [11:21] [NoPosition] -> private[this] var x: Int = _ +[15] [15] [15] -> def x(): Int = C1.this.x +[15] [15] [15] -> def x_=(x$1: Int): Unit = C1.this.x = x$1 +[15] [15] -> C1.this.x = x$1 +[15:16] [19:21] -> C1.this.x = 42 +-- +class C2 { val x, y = 42 } +[15:16] <11:24> [NoPosition] -> private[this] val x: Int = _ +[15] [15] [15] -> def x(): Int = C2.this.x +[18:19] <11:24> [NoPosition] -> private[this] val y: Int = _ +[18] [18] [18] -> def y(): Int = C2.this.y +[15:16] [22] -> C2.this.x = 42 +[18:19] [22:24] -> C2.this.y = 42 +-- +class C3 { var x, y = 42 } +[15:16] <11:24> [NoPosition] -> private[this] var x: Int = _ +[15] [15] [15] -> def x(): Int = C3.this.x +[15] [15] [15] -> def x_=(x$1: Int): Unit = C3.this.x = x$1 +[15] [15] -> C3.this.x = x$1 +[18:19] <11:24> [NoPosition] -> private[this] var y: Int = _ +[18] [18] [18] -> def y(): Int = C3.this.y +[18] [18] [18] -> def y_=(x$1: Int): Unit = C3.this.y = x$1 +[18] [18] -> C3.this.y = x$1 +[15:16] [22] -> C3.this.x = 42 +[18:19] [22:24] -> C3.this.y = 42 +-- +class C4 { val (x, y) = (42, 27) } +<15:32> <15:32> [NoPosition] -> private[this] val x$1: Tuple2 = _ +[16:17] <11:32> [NoPosition] -> private[this] val x: Int = _ +[16] [16] [16] -> def x(): Int = C4.this.x +[19:20] <11:32> [NoPosition] -> private[this] val y: Int = _ +[19] [19] [19] -> def y(): Int = C4.this.y +<15:32> [24:32] -> C4.this.x$1 = {... +[24:32] [24:32] [24:32] -> case val x1: Tuple2 = (new Tuple2$mcII$sp(42, 27): Tuple2) +[24:32] -> new Tuple2$mcII$sp(42, 27) + [25:27] -> 42 + [29:31] -> 27 +<15:21> -> x1.ne(null) + <15:21> -> null +<16:17> <16:17> <16:17> -> val x: Int = x1._1$mcI$sp() +<19:20> <19:20> <19:20> -> val y: Int = x1._2$mcI$sp() +<15:21> -> new Tuple2$mcII$sp(x, y) + <16:17> -> x + <19:20> -> y +[16:17] [16] -> C4.this.x = C4.this.x$1._1$mcI$sp() +[19:20] [19] -> C4.this.y = C4.this.x$1._2$mcI$sp() +-- +class C5 { val (x, y), (w, z) = (42, 27) } +<15:40> <15:40> [NoPosition] -> private[this] val x$1: Tuple2 = _ +[16:17] <11:40> [NoPosition] -> private[this] val x: Int = _ +[16] [16] [16] -> def x(): Int = C5.this.x +[19:20] <11:40> [NoPosition] -> private[this] val y: Int = _ +[19] [19] [19] -> def y(): Int = C5.this.y +<23:40> <23:40> [NoPosition] -> private[this] val x$2: Tuple2 = _ +[24:25] <11:40> [NoPosition] -> private[this] val w: Int = _ +[24] [24] [24] -> def w(): Int = C5.this.w +[27:28] <11:40> [NoPosition] -> private[this] val z: Int = _ +[27] [27] [27] -> def z(): Int = C5.this.z +<15:40> [32] -> C5.this.x$1 = {... +[32] [32] [32] -> case val x1: Tuple2 = (new Tuple2$mcII$sp(42, 27): Tuple2) +<15:21> -> x1.ne(null) + <15:21> -> null +<16:17> <16:17> <16:17> -> val x: Int = x1._1$mcI$sp() +<19:20> <19:20> <19:20> -> val y: Int = x1._2$mcI$sp() +<15:21> -> new Tuple2$mcII$sp(x, y) + <16:17> -> x + <19:20> -> y +[16:17] [16] -> C5.this.x = C5.this.x$1._1$mcI$sp() +[19:20] [19] -> C5.this.y = C5.this.x$1._2$mcI$sp() +<23:40> [32:40] -> C5.this.x$2 = {... +[32:40] [32:40] [32:40] -> case val x1: Tuple2 = (new Tuple2$mcII$sp(42, 27): Tuple2) +[32:40] -> new Tuple2$mcII$sp(42, 27) + [33:35] -> 42 + [37:39] -> 27 +<23:29> -> x1.ne(null) + <23:29> -> null +<24:25> <24:25> <24:25> -> val w: Int = x1._1$mcI$sp() +<27:28> <27:28> <27:28> -> val z: Int = x1._2$mcI$sp() +<23:29> -> new Tuple2$mcII$sp(w, z) + <24:25> -> w + <27:28> -> z +[24:25] [24] -> C5.this.w = C5.this.x$2._1$mcI$sp() +[27:28] [27] -> C5.this.z = C5.this.x$2._2$mcI$sp() +-- +class C6 { val x, y, z: String = "hello, worlds" } +[15:16] <11:48> [NoPosition] -> private[this] val x: String = _ +[15] [15] [15] -> def x(): String = C6.this.x +[18:19] <11:48> [NoPosition] -> private[this] val y: String = _ +[18] [18] [18] -> def y(): String = C6.this.y +[21:22] <11:48> [NoPosition] -> private[this] val z: String = _ +[21] [21] [21] -> def z(): String = C6.this.z +[15:16] [33] -> C6.this.x = "hello, worlds" +[18:19] [33] -> C6.this.y = "hello, worlds" +[21:22] [33:48] -> C6.this.z = "hello, worlds" +-- +class C7 { val Some(_) = Option(42) } +[11:35] [11:35] [NoPosition] -> private[this] val x$1: scala.runtime.BoxedUnit = _ +[11:35] [25:35] -> C7.this.x$1 = {... +[25:35] [25:35] [25:35] -> case val x1: Option = (scala.Option.apply(scala.Int.box(42)): Option) +[25:35] -> scala.Option.apply(scala.Int.box(42)) + [32:34] -> scala.Int.box(42) +[32:34] -> scala.Int.box(42) + [32:34] -> 42 +-- diff --git a/test/files/run/position-val-def.scala b/test/files/run/position-val-def.scala index 62cb54acf80d..2b0da2598dbd 100644 --- a/test/files/run/position-val-def.scala +++ b/test/files/run/position-val-def.scala @@ -1,26 +1,61 @@ -import scala.reflect.runtime.universe._ -import scala.reflect.runtime.{universe => ru} -import scala.reflect.runtime.{currentMirror => cm} -import scala.tools.reflect.ToolBox +//> using options -Xsource:3-cross -object Test { - val toolbox = cm.mkToolBox(options = "-Yrangepos") +import scala.reflect.internal.util.StringContextStripMarginOps +import scala.tools.partest.CompilerTest +import java.util.concurrent.atomic.AtomicInteger - def main(args: Array[String]) { - def test(expr: String) { - val t = toolbox.parse(expr) - println(expr) - println(show(t, printPositions = true)) - println() +object Test extends CompilerTest { + import global.{show as tshow, *} + + val counter = new AtomicInteger + + override def sources = + sm""" + val x = 42 + var x = 42 + val x, y = 42 + var x, y = 42 + val (x, y) = (42, 27) + val (x, y), (w, z) = (42, 27) + val x, y, z: String = "hello, worlds" + val Some(_) = Option(42) + """.linesIterator.map(_.trim).filter(_.nonEmpty) + .map(s => s"class C${counter.getAndIncrement} { $s }") + .toList + + def check(source: String, unit: CompilationUnit): Unit = { + println(source) + //println("--") + //println(tshow(unit.body)) + //println("--") + unit.body.foreach { + case t: ValOrDefDef if !t.symbol.isConstructor && !t.symbol.isParameter => + println(f"${tshow(t.namePos)}%-8s${tshow(t.pos)}%-8s${tshow(t.rhs.pos)}%-14s -> ${tshow(t).clipped}") + case t: Assign => + println(f"${tshow(t.pos)}%-8s${tshow(t.rhs.pos)}%-22s -> ${tshow(t).clipped}") + case t @ treeInfo.Application(fun, _, argss) + if !t.pos.isZeroExtent + && argss.exists(_.nonEmpty) + && !fun.symbol.isLabel + && fun.symbol.owner != definitions.MatchErrorClass + && !treeInfo.isSuperConstrCall(t) + => + println(f"${tshow(t.pos)}%-30s -> ${tshow(t).clipped}") + for (args <- argss; arg <- args) + println(f" ${tshow(arg.pos)}%-28s -> ${tshow(arg).clipped}") + case _ => } - val tests = """ - val x = 0 - var x = 0 - val x, y = 0 - var x, y = 0 - val (x, y) = 0 - """ - val exprs = tests.split("\\n").map(_.trim).filterNot(_.isEmpty) - exprs foreach test + println("--") + } + implicit class Clippy(val s: String) extends AnyVal { + def clipped = { + val it = s.linesIterator + val t = it.next() + if (it.hasNext) s"$t..." else t + } + } + implicit class Positional(val pos: Position) extends AnyVal { + def isZeroExtent = + !pos.isRange || pos.start == pos.end } } diff --git a/test/files/run/pr7593.scala b/test/files/run/pr7593.scala index eac03abf8673..5e01d3c37aa6 100644 --- a/test/files/run/pr7593.scala +++ b/test/files/run/pr7593.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { def foo = synchronized { "bar" } - val eta = foo _ - println(eta()) + val bar = () => foo + println(bar()) } } diff --git a/test/files/run/predef-cycle.scala b/test/files/run/predef-cycle.scala index ab147688bc01..64b352bc4300 100644 --- a/test/files/run/predef-cycle.scala +++ b/test/files/run/predef-cycle.scala @@ -1,11 +1,11 @@ class Force { val t1 = new Thread { - override def run() { + override def run(): Unit = { scala.`package` } } val t2 = new Thread { - override def run() { + override def run(): Unit = { scala.Predef } } @@ -16,7 +16,7 @@ class Force { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { new Force() } } @@ -68,4 +68,4 @@ object Test { at scala.package$.(package.scala:46) at scala.package$.(package.scala) at Force$$anon$1.run(predef-cycle.scala:4) - */ \ No newline at end of file + */ diff --git a/test/files/run/preinits.check b/test/files/run/preinits.check index e97a14b77f5c..954cbdf99e5a 100644 --- a/test/files/run/preinits.check +++ b/test/files/run/preinits.check @@ -4,6 +4,7 @@ trait B extends { override val x = 1 } with A { println("B") } preinits.scala:3: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized. trait C extends { override val x = 2 } with A ^ +warning: 2 deprecations (since 2.13.0); re-run with -deprecation for details A B 2 diff --git a/test/files/run/primitive-sigs-2-new.scala b/test/files/run/primitive-sigs-2-new.scala index 4da3d23ffe3e..00718a318fff 100644 --- a/test/files/run/primitive-sigs-2-new.scala +++ b/test/files/run/primitive-sigs-2-new.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline +// import scala.language.{ postfixOps } import scala.reflect.{ClassTag, classTag} @@ -29,7 +30,7 @@ object Test { def main(args: Array[String]): Unit = { println(c2.getGenericInterfaces.map(_.toString).sorted mkString " ") println(c1m ++ c2m sorted) - println(new C f) + println(new C().f()) c3m.sorted foreach println } } diff --git a/test/files/run/primitive-sigs-2-old.scala b/test/files/run/primitive-sigs-2-old.scala index 5d5963814f2f..a3145e81fd25 100644 --- a/test/files/run/primitive-sigs-2-old.scala +++ b/test/files/run/primitive-sigs-2-old.scala @@ -1,4 +1,5 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline +// import scala.language.{ postfixOps } import java.{ lang => jl } @@ -19,7 +20,7 @@ class Arr { // instead of the more appealing version from the past // public T[] Arr.arr4(T[][],scala.reflect.Manifest) // - // because java inflict's its reference-only generic-arrays on us. + // because Java inflicts its reference-only generic-arrays on us. // def arr4[T: Manifest](xss: Array[Array[T]]): Array[T] = xss map (_.head) } @@ -36,7 +37,7 @@ object Test { def main(args: Array[String]): Unit = { println(c2.getGenericInterfaces.map(_.toString).sorted mkString " ") println(c1m ++ c2m sorted) - println(new C f) + println(new C().f()) c3m.sorted foreach println } } diff --git a/test/files/run/print-args.check b/test/files/run/print-args.check new file mode 100644 index 000000000000..60f43da7cdd3 --- /dev/null +++ b/test/files/run/print-args.check @@ -0,0 +1,6 @@ +Compiler arguments written to: print-args-run.obj/print-args.txt +newSource1.scala:3: error: type mismatch; + found : String("42") + required: Int + def f: Int = "42" + ^ diff --git a/test/files/run/print-args.scala b/test/files/run/print-args.scala new file mode 100644 index 000000000000..88070f35c046 --- /dev/null +++ b/test/files/run/print-args.scala @@ -0,0 +1,33 @@ +import java.nio.file.Files + +import org.junit.Assert.assertFalse +import scala.jdk.CollectionConverters._ +import scala.reflect.internal.util._ +import scala.tools.partest.DirectTest +import scala.tools.testkit.AssertUtil.assertSameElements + +import org.junit.Assert._ + +object Test extends DirectTest { + lazy val argfile = testOutput.jfile.toPath().resolve("print-args.txt") + override def extraSettings = s"${super.extraSettings} -Vprint-args ${argfile}" + def expected = + sm""" + |-usejavacp + |-d + |${testOutput.jfile.toPath()} + |-Vprint-args + |${testOutput.jfile.toPath().resolve(argfile)} + |newSource1.scala + """.trim + def code = + sm""" + |class C { + | def f: Int = "42" + |} + """ + def show() = { + assertFalse(compile()) + assertSameElements(expected.linesIterator, Files.readAllLines(argfile).asScala) + } +} diff --git a/test/files/run/productElementName.check b/test/files/run/productElementName.check new file mode 100644 index 000000000000..6bad7ad80814 --- /dev/null +++ b/test/files/run/productElementName.check @@ -0,0 +1,14 @@ +User(name=Susan, age=42) +ユーザ(名前=Susan, 年齢=42) +U$er(na$me=Susan, a$ge=42) +type(for=Susan, if=42) +contains spaces(first param=Susan, second param=42) +Symbols(::=Susan, ||=42) +MultipleParamLists(a=Susan, b=42) +AuxiliaryConstructor(a=Susan, b=42) +OverloadedApply(a=Susan, b=123) +DefinesProductElementName(foo=Susan, foo=42) +InheritsProductElementName(a=Susan, b=42) +InheritsProductElementName_Override(overridden=Susan, overridden=42) +InheritsProductElementName_Override_SelfType(a=Susan, b=42) +PrivateMembers(a=10, b=20, c=30, d=40, e=50, f=60) diff --git a/test/files/run/productElementName.scala b/test/files/run/productElementName.scala new file mode 100644 index 000000000000..9755dacac826 --- /dev/null +++ b/test/files/run/productElementName.scala @@ -0,0 +1,107 @@ +//> using options -Xsource:3 -Xsource-features:leading-infix + +import scala.tools.testkit.AssertUtil.assertThrown +import scala.util.chaining.* +import org.junit.Assert.assertEquals + +case class User(name: String, age: Int) + +case class ユーザ(名前: String, 年齢: Int) + +case class U$er(na$me: String, a$ge: Int) + +case class `type`(`for`: String, `if`: Int) + +case class `contains spaces`(`first param`: String, `second param`: Int) + +case class Symbols(:: : String, || : Int) + +case class MultipleParamLists(a: String, b: Int)(c: Boolean) + +case class AuxiliaryConstructor(a: String, b: Int) { + def this(x: String) = this(x, 123) +} + +case class OverloadedApply(a: String, b: Int) +object OverloadedApply { + def apply(x: String): OverloadedApply = new OverloadedApply(x, 123) +} + +case class DefinesProductElementName(a: String, b: Int) { + override def productElementName(n: Int): String = "foo" +} + +trait A { + def productElementName(n: Int): String = "overridden" +} +case class InheritsProductElementName(a: String, b: Int) extends A + +trait B extends Product { + override def productElementName(n: Int): String = "overridden" +} +case class InheritsProductElementName_Override(a: String, b: Int) extends B + +trait C { self: Product => + override def productElementName(n: Int): String = "overridden" +} +case class InheritsProductElementName_Override_SelfType(a: String, b: Int) extends C + +case class PrivateMembers(a: Int, private val b: Int, c: Int, private val d: Int, e: Int, private val f: Int) + +case class ImplicitParameter[A: Ordering](a: String, b: Int)(c: A) + +case object CaseObject + +object Test extends App { + def verify(p: Product, checkName: Boolean = true): Unit = { + val iterated = p.productElementNames.zip(p.productIterator) + .map { case (name, value) => s"$name=$value" } + .mkString(p.productPrefix + "(", ", ", ")") + val indexed = (0 until p.productArity) + .map(i => s"${p.productElementName(i)}=${p.productElement(i)}") + .mkString(p.productPrefix + "(", ", ", ")") + assertEquals(iterated, indexed) + if (checkName) assertThrown[IndexOutOfBoundsException](_ => true)(p.productElementName(p.productArity + 1)) + println(iterated) + } + + verify(User("Susan", 42)) + verify(ユーザ("Susan", 42)) + verify(U$er("Susan", 42)) + verify(`type`("Susan", 42)) + verify(`contains spaces`("Susan", 42)) + verify(Symbols("Susan", 42)) + verify(MultipleParamLists("Susan", 42)(true)) + verify(AuxiliaryConstructor("Susan", 42)) + verify(OverloadedApply("Susan")) + verify(DefinesProductElementName("Susan", 42), checkName = false) + + // uses the synthetic, not the one defined in the trait + verify(InheritsProductElementName("Susan", 42)) + + // uses the override defined in the trait + verify(InheritsProductElementName_Override("Susan", 42), checkName = false) + + // uses the synthetic, not the one defined in the trait + verify(InheritsProductElementName_Override_SelfType("Susan", 42)) + + verify(PrivateMembers(10, 20, 30, 40, 50, 60)) + + // message check and probe for characteristic stack frames + def check(t: Throwable)(msg: String)(ms: String*): Boolean = + (t.getMessage == msg).tap(if (_) () else println(s"expected [$msg], got [${t.getMessage}]")) + && + ms.forall(m => t.getStackTrace.exists(f => m == s"${f.getClassName}.${f.getMethodName}")) + + //java.lang.IndexOutOfBoundsException: 99 + assertThrown[IndexOutOfBoundsException](check(_)("99")("scala.runtime.Statics.ioobe", "ImplicitParameter.productElementName")) { + ImplicitParameter("foo", 123)(42).productElementName(99) + } + assertThrown[IndexOutOfBoundsException](_ => true) { + ImplicitParameter("foo", 123)(42).productElementName(2) + } + //java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 [sic] + assertThrown[IndexOutOfBoundsException](check(_)(s"99 is out of bounds (min 0, max -1)")("scala.Product.productElementName", "CaseObject$.productElementName")) { + CaseObject.productElementName(99) + } +} diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index 373f63e5b259..436a02643597 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/run/programmatic-main.scala b/test/files/run/programmatic-main.scala index 542ac2781e47..e4d81624cb9d 100644 --- a/test/files/run/programmatic-main.scala +++ b/test/files/run/programmatic-main.scala @@ -10,7 +10,7 @@ object Test { def main(args: Array[String]): Unit = { Console.withErr(Console.out) { - Main process (baseargs ++ "-Xpluginsdir /does/not/exist/foo/quux -Xshow-phases".split(' ')) + Main process (baseargs ++ "-Xpluginsdir /does/not/exist/foo/quux -Vphases".split(' ')) } } } diff --git a/test/files/run/proxy.check b/test/files/run/proxy.check index c40b3db7c21a..0b4e6dce377b 100644 --- a/test/files/run/proxy.check +++ b/test/files/run/proxy.check @@ -1,3 +1,4 @@ +warning: 2 deprecations (since 2.13.0); re-run with -deprecation for details false true false diff --git a/test/files/run/pure-args-byname-noinline.scala b/test/files/run/pure-args-byname-noinline.scala index 35b1dfea01d2..7c4897d1df32 100644 --- a/test/files/run/pure-args-byname-noinline.scala +++ b/test/files/run/pure-args-byname-noinline.scala @@ -1,23 +1,23 @@ object Test { //Were affected by scala/bug#6306 - def f[A](a: =>A) = println(a.toString) - def f1[A <: AnyVal](a: =>A) = println(a.toString) - def f1a[A <: AnyVal](a: =>A) = println(a.##) - def f2[A <: AnyRef](a: =>A) = println(a.toString) - def f2a[A <: String](a: =>A) = println(a.toString) + def f[A](a: => A) = println(a.toString) + def f1[A <: AnyVal](a: => A) = println(a.toString) + def f1a[A <: AnyVal](a: => A) = println(a.##) + def f2[A <: AnyRef](a: => A) = println(a.toString) + def f2a[A <: String](a: => A) = println(a.toString) //Works - def f3[A](a: =>Seq[A]) = println(a.toString) + def f3[A](a: => Seq[A]) = println(a.toString) def foo() = println(2) def client(f: () => Unit) = {f(); f()} - def attempt2() { + def attempt2(): Unit = { val bar: () => Unit = foo _ //The code causing scala/bug#6306 was supposed to optimize code like this: client(() => bar ()) //to: client(bar) } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { attempt2() f3(Seq(1)) f3(Seq()) diff --git a/test/files/run/pure-warning-post-macro/Macro_1.scala b/test/files/run/pure-warning-post-macro/Macro_1.scala index 9f9c400fced1..a21569107e1a 100644 --- a/test/files/run/pure-warning-post-macro/Macro_1.scala +++ b/test/files/run/pure-warning-post-macro/Macro_1.scala @@ -1,4 +1,3 @@ -// scalac: -Xfatal-warnings import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context @@ -9,6 +8,7 @@ object Macro { block match { case Block(stats, expr) => q"_root_.scala.List.apply[${weakTypeOf[T]}](..${stats :+ expr})" + case x => throw new MatchError(x) } } } diff --git a/test/files/run/pure-warning-post-macro/test_2.scala b/test/files/run/pure-warning-post-macro/test_2.scala index 3defb7cfcb85..4b4934c08392 100644 --- a/test/files/run/pure-warning-post-macro/test_2.scala +++ b/test/files/run/pure-warning-post-macro/test_2.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +//> using options -Werror object Test { def main(args: Array[String]): Unit = { // We don't want a "pure expression discarded" warning here as the macro will diff --git a/test/files/run/range-unit.scala b/test/files/run/range-unit.scala index d8ebc00e9c95..730a1d7c1767 100644 --- a/test/files/run/range-unit.scala +++ b/test/files/run/range-unit.scala @@ -31,7 +31,7 @@ object Test { catch { case e: IllegalArgumentException => List("---\n " + e) } } - def runGroup(label: String, f: (Int, Int, Int) => Range) { + def runGroup(label: String, f: (Int, Int, Int) => Range): Unit = { println(">>> " + label + " <<<\n") for (start <- endpoints) { val s = "%-7s %-7s %-7s %s".format("start", "end", "step", "length/first/last") diff --git a/test/files/run/range.scala b/test/files/run/range.scala index aedb0295c6af..61c6735b77a0 100644 --- a/test/files/run/range.scala +++ b/test/files/run/range.scala @@ -4,7 +4,7 @@ object Test { def rangeForeach(range : Range) = { val buffer = new scala.collection.mutable.ListBuffer[Int]; range.foreach(buffer += _); - assert(buffer.toList == range.iterator.toList, buffer.toList+"/"+range.iterator.toList) + assert(buffer.toList == range.iterator.toList, buffer.toList.toString + "/" + range.iterator.toList) } def boundaryTests() = { @@ -54,14 +54,12 @@ object Test { def main(args: Array[String]): Unit = { implicit val imp1 = Numeric.BigDecimalAsIfIntegral - implicit val imp2 = Numeric.DoubleAsIfIntegral val _grs = List[GR[_]]( GR(BigDecimal(5.0)), GR(BigDecimal(0.25)), // scala/bug#9348 GR(BigInt(5)), GR(5L), - GR(5.0d), GR(2.toByte) ) val grs = _grs ::: (_grs map (_.negated)) diff --git a/test/files/run/red-black-tree-serial-new.check b/test/files/run/red-black-tree-serial-new.check deleted file mode 100644 index 9b36b507dbba..000000000000 --- a/test/files/run/red-black-tree-serial-new.check +++ /dev/null @@ -1,10 +0,0 @@ - original - Map(key0 -> value:0, key1 -> value:1, key2 -> value:2, key3 -> value:3, key4 -> value:4, key5 -> value:5, key6 -> value:6, key7 -> value:7, key8 -> value:8, key9 -> value:9) - original class - class scala.collection.immutable.TreeMap - binary - ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D617024547265654D617050726F7879B1481B8B7DA251FC030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B65793074000776616C75653A307400046B65793174000776616C75653A317400046B65793274000776616C75653A327400046B65793374000776616C75653A337400046B65793474000776616C75653A347400046B65793574000776616C75653A357400046B65793674000776616C75653A367400046B65793774000776616C75653A377400046B65793874000776616C75653A387400046B65793974000776616C75653A3978 - recovered - Map(key0 -> value:0, key1 -> value:1, key2 -> value:2, key3 -> value:3, key4 -> value:4, key5 -> value:5, key6 -> value:6, key7 -> value:7, key8 -> value:8, key9 -> value:9) - recovered class - class scala.collection.immutable.TreeMap - original - TreeSet(key0, key1, key2, key3, key4, key5, key6, key7, key8, key9) - original class - class scala.collection.immutable.TreeSet - binary - ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574245472656553657450726F78798A8EF7F0C326F866030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B6579307400046B6579317400046B6579327400046B6579337400046B6579347400046B6579357400046B6579367400046B6579377400046B6579387400046B65793978 - recovered - TreeSet(key0, key1, key2, key3, key4, key5, key6, key7, key8, key9) - recovered class - class scala.collection.immutable.TreeSet diff --git a/test/files/run/red-black-tree-serial-new.javaopts b/test/files/run/red-black-tree-serial-new.javaopts deleted file mode 100644 index 3b28d3671f30..000000000000 --- a/test/files/run/red-black-tree-serial-new.javaopts +++ /dev/null @@ -1,2 +0,0 @@ --Dscala.collection.immutable.TreeSet.newSerialisation=true --Dscala.collection.immutable.TreeMap.newSerialisation=true \ No newline at end of file diff --git a/test/files/run/red-black-tree-serial-new/NewTreeSerialFormat.scala b/test/files/run/red-black-tree-serial-new/NewTreeSerialFormat.scala deleted file mode 100644 index f23236d0e9bf..000000000000 --- a/test/files/run/red-black-tree-serial-new/NewTreeSerialFormat.scala +++ /dev/null @@ -1,59 +0,0 @@ -import scala.collection.immutable.{TreeSet, TreeMap} - -object Test extends App { - def write(o: AnyRef): Array[Byte] = { - val ba = new java.io.ByteArrayOutputStream(512) - val out = new java.io.ObjectOutputStream(ba) - out.writeObject(o) - out.close() - ba.toByteArray() - } - - def read(buffer: Array[Byte]): AnyRef = { - val in = - new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) - in.readObject() - } - - def parse(str: String) : Array[Byte] = { - val b = Array.newBuilder[Byte] - var i = 0 - while (i < str.length) { - b += java.lang.Integer.parseInt(str.substring(i, i+2), 16).toByte - i += 2 - } - b.result() - } - - def testRoundTrip(data: AnyRef) = { - println(s" original - $data") - println(s" original class - ${data.getClass}") - val bytes = write(data) - println(s" binary - ${bytes.map(b => f"$b%02X").mkString}") - val recovered = read(bytes) - println(s" recovered - $recovered") - println(s" recovered class - ${recovered.getClass}") - assert(data == recovered) - } - def testParse(data:AnyRef, bin: String): Unit = { - val recovered = read(parse(bin)) - assert(data == recovered) - } - - - val data = List.tabulate(10)(i => s"key$i" -> s"value:$i") - val treeMap: TreeMap[String, String] = (TreeMap.newBuilder[String, String] ++= data).result() - val treeSet: TreeSet[String] = (TreeSet.newBuilder[String] ++= data.map(_._1)).result() - - testRoundTrip(treeMap) - testRoundTrip(treeSet) - - //legacy format - testParse(treeMap, "ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D6170416E108B62F3AFC20200024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C00047472656574002E4C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00024C0005726967687471007E00024C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B657930707074000776616C75653A307371007E0006000000017400046B657932707074000776616C75653A3274000776616C75653A317371007E0006000000067400046B6579357371007E0006000000017400046B657934707074000776616C75653A347372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707074000776616C75653A367371007E0006000000027400046B657938707371007E0019000000017400046B657939707074000776616C75653A3974000776616C75653A3874000776616C75653A3774000776616C75653A3574000776616C75653A33") - testParse(treeSet, "ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574B117552038DB580B0200024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C00047472656574002E4C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00024C0005726967687471007E00024C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B6579307070737200177363616C612E72756E74696D652E426F786564556E697474A67D471DECCB9A02000078707371007E0006000000017400046B657932707071007E001071007E00107371007E0006000000067400046B6579357371007E0006000000017400046B657934707071007E00107372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707071007E00107371007E0006000000027400046B657938707371007E0017000000017400046B657939707071007E001071007E001071007E001071007E001071007E0010") - - //new format - testParse(treeMap, "ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D617024547265654D617050726F7879B1481B8B7DA251FC030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B65793074000776616C75653A307400046B65793174000776616C75653A317400046B65793274000776616C75653A327400046B65793374000776616C75653A337400046B65793474000776616C75653A347400046B65793574000776616C75653A357400046B65793674000776616C75653A367400046B65793774000776616C75653A377400046B65793874000776616C75653A387400046B65793974000776616C75653A3978") - testParse(treeSet, "ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574245472656553657450726F78798A8EF7F0C326F866030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B6579307400046B6579317400046B6579327400046B6579337400046B6579347400046B6579357400046B6579367400046B6579377400046B6579387400046B65793978") - -} \ No newline at end of file diff --git a/test/files/run/red-black-tree-serial.check b/test/files/run/red-black-tree-serial.check deleted file mode 100644 index 22f0b8ce3cc1..000000000000 --- a/test/files/run/red-black-tree-serial.check +++ /dev/null @@ -1,10 +0,0 @@ - original - Map(key0 -> value:0, key1 -> value:1, key2 -> value:2, key3 -> value:3, key4 -> value:4, key5 -> value:5, key6 -> value:6, key7 -> value:7, key8 -> value:8, key9 -> value:9) - original class - class scala.collection.immutable.TreeMap - binary - ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D6170416E108B62F3AFC20300024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C0004747265657400314C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F4E6577526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00084C0005726967687471007E00084C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B657930707074000776616C75653A307371007E0006000000017400046B657932707074000776616C75653A3274000776616C75653A317371007E0006000000067400046B6579357371007E0006000000017400046B657934707074000776616C75653A347372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707074000776616C75653A367371007E0006000000027400046B657938707371007E0019000000017400046B657939707074000776616C75653A3974000776616C75653A3874000776616C75653A3774000776616C75653A3574000776616C75653A3378 - recovered - Map(key0 -> value:0, key1 -> value:1, key2 -> value:2, key3 -> value:3, key4 -> value:4, key5 -> value:5, key6 -> value:6, key7 -> value:7, key8 -> value:8, key9 -> value:9) - recovered class - class scala.collection.immutable.TreeMap - original - TreeSet(key0, key1, key2, key3, key4, key5, key6, key7, key8, key9) - original class - class scala.collection.immutable.TreeSet - binary - ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574B117552038DB580B0300024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C0004747265657400314C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F4E6577526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00084C0005726967687471007E00084C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B6579307070737200177363616C612E72756E74696D652E426F786564556E697474A67D471DECCB9A02000078707371007E0006000000017400046B657932707071007E001071007E00107371007E0006000000067400046B6579357371007E0006000000017400046B657934707071007E00107372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707071007E00107371007E0006000000027400046B657938707371007E0017000000017400046B657939707071007E001071007E001071007E001071007E001071007E001078 - recovered - TreeSet(key0, key1, key2, key3, key4, key5, key6, key7, key8, key9) - recovered class - class scala.collection.immutable.TreeSet diff --git a/test/files/run/red-black-tree-serial/TreeSerialFormat.scala b/test/files/run/red-black-tree-serial/TreeSerialFormat.scala deleted file mode 100644 index 689e0452d2f5..000000000000 --- a/test/files/run/red-black-tree-serial/TreeSerialFormat.scala +++ /dev/null @@ -1,60 +0,0 @@ -import scala.collection.immutable.{TreeSet, TreeMap} - -object Test extends App { - - def write(o: AnyRef): Array[Byte] = { - val ba = new java.io.ByteArrayOutputStream(512) - val out = new java.io.ObjectOutputStream(ba) - out.writeObject(o) - out.close() - ba.toByteArray() - } - - def read(buffer: Array[Byte]): AnyRef = { - val in = - new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) - in.readObject() - } - - def parse(str: String) : Array[Byte] = { - val b = Array.newBuilder[Byte] - var i = 0 - while (i < str.length) { - b += java.lang.Integer.parseInt(str.substring(i, i+2), 16).toByte - i += 2 - } - b.result() - } - - def testRoundTrip(data: AnyRef) = { - println(s" original - $data") - println(s" original class - ${data.getClass}") - val bytes = write(data) - println(s" binary - ${bytes.map(b => f"$b%02X").mkString}") - val recovered = read(bytes) - println(s" recovered - $recovered") - println(s" recovered class - ${recovered.getClass}") - assert(data == recovered) - } - def testParse(data:AnyRef, bin: String): Unit = { - val recovered = read(parse(bin)) - assert(data == recovered) - } - - - val data = List.tabulate(10)(i => s"key$i" -> s"value:$i") - val treeMap: TreeMap[String, String] = (TreeMap.newBuilder[String, String] ++= data).result() - val treeSet: TreeSet[String] = (TreeSet.newBuilder[String] ++= data.map(_._1)).result() - - testRoundTrip(treeMap) - testRoundTrip(treeSet) - - //legacy format - testParse(treeMap, "ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D6170416E108B62F3AFC20200024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C00047472656574002E4C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00024C0005726967687471007E00024C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B657930707074000776616C75653A307371007E0006000000017400046B657932707074000776616C75653A3274000776616C75653A317371007E0006000000067400046B6579357371007E0006000000017400046B657934707074000776616C75653A347372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707074000776616C75653A367371007E0006000000027400046B657938707371007E0019000000017400046B657939707074000776616C75653A3974000776616C75653A3874000776616C75653A3774000776616C75653A3574000776616C75653A33") - testParse(treeSet, "ACED0005737200227363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574B117552038DB580B0200024C00086F72646572696E677400154C7363616C612F6D6174682F4F72646572696E673B4C00047472656574002E4C7363616C612F636F6C6C656374696F6E2F696D6D757461626C652F526564426C61636B5472656524547265653B78707372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A0200007870737200317363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524426C61636B54726565CD1C6708A7A754010200007872002C7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524547265656BA824B21C96EC32020005490005636F756E744C00036B65797400124C6A6176612F6C616E672F4F626A6563743B4C00046C65667471007E00024C0005726967687471007E00024C000576616C756571007E000878700000000A7400046B6579337371007E0006000000037400046B6579317371007E0006000000017400046B6579307070737200177363616C612E72756E74696D652E426F786564556E697474A67D471DECCB9A02000078707371007E0006000000017400046B657932707071007E001071007E00107371007E0006000000067400046B6579357371007E0006000000017400046B657934707071007E00107372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E526564426C61636B5472656524526564547265655A6F5AFFBDC0180C0200007871007E0007000000047400046B6579377371007E0006000000017400046B657936707071007E00107371007E0006000000027400046B657938707371007E0017000000017400046B657939707071007E001071007E001071007E001071007E001071007E0010") - - //new format - testParse(treeMap, "ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E547265654D617024547265654D617050726F7879B1481B8B7DA251FC030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B65793074000776616C75653A307400046B65793174000776616C75653A317400046B65793274000776616C75653A327400046B65793374000776616C75653A337400046B65793474000776616C75653A347400046B65793574000776616C75653A357400046B65793674000776616C75653A367400046B65793774000776616C75653A377400046B65793874000776616C75653A387400046B65793974000776616C75653A3978") - testParse(treeSet, "ACED00057372002F7363616C612E636F6C6C656374696F6E2E696D6D757461626C652E54726565536574245472656553657450726F78798A8EF7F0C326F866030000787077040000000A7372001B7363616C612E6D6174682E4F72646572696E6724537472696E6724DD981A719E75481A02000078707400046B6579307400046B6579317400046B6579327400046B6579337400046B6579347400046B6579357400046B6579367400046B6579377400046B6579387400046B65793978") - -} \ No newline at end of file diff --git a/test/files/run/reflect-java-param-names/J_1.java b/test/files/run/reflect-java-param-names/J_1.java index 61e2a765a148..e19f6e46a16c 100644 --- a/test/files/run/reflect-java-param-names/J_1.java +++ b/test/files/run/reflect-java-param-names/J_1.java @@ -1,8 +1,6 @@ -/* - * javac: -parameters - */ +//> using javacOpt -parameters public class J_1 { public J_1(int i, int j) {} public void inst(int i, J j) {} public static void statik(int i, J j) {} -} \ No newline at end of file +} diff --git a/test/files/run/reflect-java-param-names/Test_2.scala b/test/files/run/reflect-java-param-names/Test_2.scala index ffb0debe688a..ac8ce956d108 100644 --- a/test/files/run/reflect-java-param-names/Test_2.scala +++ b/test/files/run/reflect-java-param-names/Test_2.scala @@ -6,11 +6,11 @@ object Test extends App { val inst = j_1.info.decl(TermName("inst")) val statik = j_1.companion.info.decl(TermName("statik")) - def check(info: Type) { + def check(info: Type): Unit = { assert(info.paramLists.head.map(_.name) == List(TermName("i"), TermName("j")), info) } check(constr.info) check(inst.info) check(statik.info) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-allmirrors-tostring.check b/test/files/run/reflection-allmirrors-tostring.check index 3003cce6c04c..1dea2f41d007 100644 --- a/test/files/run/reflection-allmirrors-tostring.check +++ b/test/files/run/reflection-allmirrors-tostring.check @@ -7,7 +7,7 @@ method mirror for def m1: Int (bound to an instance of C) method mirror for def m2(): Int (bound to an instance of C) method mirror for def m3[T >: String <: Int]: T (bound to an instance of C) method mirror for def m4[A[_], B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C) -method mirror for def m5(x: => Int,y: Int*): String (bound to an instance of C) +method mirror for def m5(x: => Int, y: Int*): String (bound to an instance of C) class mirror for C.C (bound to an instance of C) module mirror for C.M (bound to an instance of C) constructor mirror for def (): C (bound to null) diff --git a/test/files/run/reflection-allmirrors-tostring.scala b/test/files/run/reflection-allmirrors-tostring.scala index f0614e9a9895..4c4ad360746f 100644 --- a/test/files/run/reflection-allmirrors-tostring.scala +++ b/test/files/run/reflection-allmirrors-tostring.scala @@ -1,4 +1,3 @@ -import scala.language.higherKinds import scala.reflect.runtime.universe._ class C { diff --git a/test/files/run/reflection-companion.scala b/test/files/run/reflection-companion.scala index 0f62dead120c..63929091c9c2 100644 --- a/test/files/run/reflection-companion.scala +++ b/test/files/run/reflection-companion.scala @@ -13,4 +13,4 @@ object Test extends App { println(showRaw(symbolOf[T].companion, printKinds = true)) println(showRaw(cm.staticPackage("scala").moduleClass.companion, printKinds = true)) println(showRaw(cm.staticPackage("scala").companion, printKinds = true)) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-companiontype.scala b/test/files/run/reflection-companiontype.scala index 0f63457670a5..1111bbd06cec 100644 --- a/test/files/run/reflection-companiontype.scala +++ b/test/files/run/reflection-companiontype.scala @@ -19,4 +19,4 @@ object Test extends App { println(showRaw(typeOf[T].companion, printKinds = true)) println(showRaw(cm.staticPackage("scala").moduleClass.asType.toType.companion, printKinds = true)) println(showRaw(cm.staticPackage("scala").info.companion, printKinds = true)) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-inner-badpath.scala b/test/files/run/reflection-constructormirror-inner-badpath.scala index e7c06b32ae35..159aa1d518e9 100644 --- a/test/files/run/reflection-constructormirror-inner-badpath.scala +++ b/test/files/run/reflection-constructormirror-inner-badpath.scala @@ -29,4 +29,4 @@ class Foo{ object Test extends App{ val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-inner-good.scala b/test/files/run/reflection-constructormirror-inner-good.scala index c09da5b300da..c57a8d4b760b 100644 --- a/test/files/run/reflection-constructormirror-inner-good.scala +++ b/test/files/run/reflection-constructormirror-inner-good.scala @@ -23,4 +23,4 @@ class Foo{ object Test extends App{ val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-nested-badpath.scala b/test/files/run/reflection-constructormirror-nested-badpath.scala index cf0de77e10bf..c5beb95c6b93 100644 --- a/test/files/run/reflection-constructormirror-nested-badpath.scala +++ b/test/files/run/reflection-constructormirror-nested-badpath.scala @@ -29,4 +29,4 @@ object Test extends App{ ) val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-nested-good.scala b/test/files/run/reflection-constructormirror-nested-good.scala index 363b7204613b..7a5947a2b0d9 100644 --- a/test/files/run/reflection-constructormirror-nested-good.scala +++ b/test/files/run/reflection-constructormirror-nested-good.scala @@ -23,4 +23,4 @@ object Test extends App{ ) val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-toplevel-badpath.scala b/test/files/run/reflection-constructormirror-toplevel-badpath.scala index eda4aa05310b..3fc9f3f557db 100644 --- a/test/files/run/reflection-constructormirror-toplevel-badpath.scala +++ b/test/files/run/reflection-constructormirror-toplevel-badpath.scala @@ -30,4 +30,4 @@ class Foo{ object Test extends App{ val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-constructormirror-toplevel-good.scala b/test/files/run/reflection-constructormirror-toplevel-good.scala index 9842d0169542..de9149fb447a 100644 --- a/test/files/run/reflection-constructormirror-toplevel-good.scala +++ b/test/files/run/reflection-constructormirror-toplevel-good.scala @@ -24,4 +24,4 @@ class Foo{ object Test extends App{ val foo = new Foo println( foo.foo ) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check index b995e0cfb124..379b3875d797 100644 --- a/test/files/run/reflection-equality.check +++ b/test/files/run/reflection-equality.check @@ -2,7 +2,7 @@ scala> class X { def methodIntIntInt(x: Int, y: Int) = x+y } -defined class X +class X scala> @@ -13,37 +13,37 @@ scala> import scala.reflect.runtime.{ currentMirror => cm } import scala.reflect.runtime.{currentMirror=>cm} scala> def im: InstanceMirror = cm.reflect(new X) -im: reflect.runtime.universe.InstanceMirror +def im: reflect.runtime.universe.InstanceMirror scala> val cs: ClassSymbol = im.symbol -cs: reflect.runtime.universe.ClassSymbol = class X +val cs: reflect.runtime.universe.ClassSymbol = class X scala> val ts: Type = cs.info -ts: reflect.runtime.universe.Type = +val ts: reflect.runtime.universe.Type = AnyRef { def (): X - def methodIntIntInt(x: Int,y: Int): Int + def methodIntIntInt(x: Int, y: Int): Int } scala> val ms: MethodSymbol = ts.decl(TermName("methodIntIntInt")).asMethod -ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt +val ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt scala> val MethodType( _, t1 ) = ms.info -t1: reflect.runtime.universe.Type = Int +val t1: reflect.runtime.universe.Type = Int scala> val t2 = typeOf[scala.Int] -t2: reflect.runtime.universe.Type = Int +val t2: reflect.runtime.universe.Type = Int scala> t1 == t2 -res0: Boolean = false +val res0: Boolean = false scala> t1 =:= t2 -res1: Boolean = true +val res1: Boolean = true scala> t1 <:< t2 -res2: Boolean = true +val res2: Boolean = true scala> t2 <:< t1 -res3: Boolean = true +val res3: Boolean = true scala> :quit diff --git a/test/files/run/reflection-fancy-java-classes/Test_2.scala b/test/files/run/reflection-fancy-java-classes/Test_2.scala index bd131a1c6e78..19ef51aa7c99 100644 --- a/test/files/run/reflection-fancy-java-classes/Test_2.scala +++ b/test/files/run/reflection-fancy-java-classes/Test_2.scala @@ -19,4 +19,4 @@ object Test extends App { val sfancy = cm.classSymbol(jfancy) println(sfancy) println(sfancy.owner) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-fieldsymbol-navigation.scala b/test/files/run/reflection-fieldsymbol-navigation.scala index 33dc18a7e33f..924d97f5e947 100644 --- a/test/files/run/reflection-fieldsymbol-navigation.scala +++ b/test/files/run/reflection-fieldsymbol-navigation.scala @@ -12,4 +12,4 @@ object Test extends App { println(x.accessed.asTerm.isVar) println(x.getter) println(x.setter) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-idtc.scala b/test/files/run/reflection-idtc.scala index f9eae612f0c2..c2b0347c73d9 100644 --- a/test/files/run/reflection-idtc.scala +++ b/test/files/run/reflection-idtc.scala @@ -13,4 +13,4 @@ object Test extends App { println(idTC2) println(appliedType(idTC2, List(typeOf[Int]))) println(appliedType(idTC2, List(typeOf[Int])).dealias) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-implicit.scala b/test/files/run/reflection-implicit.scala index a6e939322ad5..1e104d1e5809 100644 --- a/test/files/run/reflection-implicit.scala +++ b/test/files/run/reflection-implicit.scala @@ -3,7 +3,7 @@ import scala.language.implicitConversions import scala.reflect.runtime.universe._ class C { - implicit val v = new C + implicit val v: C = new C implicit def d(x: C)(implicit c: C): Int = ??? implicit class X(val x: Int) } diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala index b9347869e439..b9361131023c 100644 --- a/test/files/run/reflection-java-crtp/Main_2.scala +++ b/test/files/run/reflection-java-crtp/Main_2.scala @@ -1,8 +1,8 @@ object Test extends App { import scala.reflect.runtime.universe._ - val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass + val `enum` = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass // make sure that the E's in Enum> are represented by the same symbol - val e1 = enum.typeParams(0).asType + val e1 = `enum`.typeParams(0).asType val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.info println(e1, e2, e1 eq e2) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check index 2a74b2b6f865..88a692e26e73 100644 --- a/test/files/run/reflection-magicsymbols-invoke.check +++ b/test/files/run/reflection-magicsymbols-invoke.check @@ -2,15 +2,15 @@ Any it's important to print the list of Any's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -method !=: (x$1: Any)Boolean -method ##: ()Int -method ==: (x$1: Any)Boolean -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[_] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toString: ()String +method !=: (x$1: Any): Boolean +method ##: Int +method ==: (x$1: Any): Boolean +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[_] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toString: (): String testing Any.!=: false testing Any.##: 50 testing Any.==: true @@ -26,37 +26,37 @@ testing Any.toString: 2 AnyVal it's important to print the list of AnyVal's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor AnyVal: ()AnyVal -method getClass: ()Class[_ <: AnyVal] +constructor AnyVal: (): AnyVal +method getClass: (): Class[_ <: AnyVal] testing AnyVal.: class scala.ScalaReflectionException: unsupported symbol constructor AnyVal when invoking bytecodeless method mirror for def (): AnyVal (bound to null) testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass ============ AnyRef it's important to print the list of AnyRef's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Object: ()Object -method !=: (x$1: Any)Boolean -method ##: ()Int -method ==: (x$1: Any)Boolean -method asInstanceOf: [T0]=> T0 -method clone: ()Object -method eq: (x$1: AnyRef)Boolean -method equals: (x$1: Any)Boolean -method finalize: ()Unit -method getClass: ()Class[_] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method ne: (x$1: AnyRef)Boolean -method notify: ()Unit -method notifyAll: ()Unit -method synchronized: [T0](x$1: T0)T0 -method toString: ()String +constructor Object: (): Object +method !=: (x$1: Any): Boolean +method ##: Int +method ==: (x$1: Any): Boolean +method asInstanceOf: [T0]T0 +method clone: (): Object +method eq: (x$1: AnyRef): Boolean +method equals: (x$1: Object): Boolean +method finalize: (): Unit +method getClass: (): Class[_] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method ne: (x$1: AnyRef): Boolean +method notify: (): Unit +method notifyAll: (): Unit +method synchronized: [T0](x$1: T0): T0 +method toString: (): String #partest java20+ -method wait0: (x$1: Long)Unit +method wait0: (x$1: Long): Unit #partest -method wait: ()Unit -method wait: (x$1: Long)Unit -method wait: (x$1: Long, x$2: Int)Unit +method wait: (): Unit +method wait: (x$1: Long): Unit +method wait: (x$1: Long, x$2: Int): Unit testing Object.!=: false testing Object.##: 50 testing Object.==: true @@ -81,29 +81,29 @@ TODO: also test AnyRef.wait overloads Array it's important to print the list of Array's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Array: (_length: Int)Array[T] -method !=: (x$1: Any)Boolean -method ##: ()Int -method ==: (x$1: Any)Boolean -method apply: (i: Int)T -method asInstanceOf: [T0]=> T0 -method clone: ()Array[T] -method eq: (x$1: AnyRef)Boolean -method equals: (x$1: Any)Boolean -method finalize: ()Unit -method getClass: ()Class[_] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method length: => Int -method ne: (x$1: AnyRef)Boolean -method notify: ()Unit -method notifyAll: ()Unit -method synchronized: [T0](x$1: T0)T0 -method toString: ()String -method update: (i: Int, x: T)Unit -method wait: ()Unit -method wait: (x$1: Long)Unit -method wait: (x$1: Long, x$2: Int)Unit +constructor Array: (_length: Int): Array[T] +method !=: (x$1: Any): Boolean +method ##: Int +method ==: (x$1: Any): Boolean +method apply: (i: Int): T +method asInstanceOf: [T0]T0 +method clone: (): Array[T] +method eq: (x$1: AnyRef): Boolean +method equals: (x$1: Object): Boolean +method finalize: (): Unit +method getClass: (): Class[_] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method length: Int +method ne: (x$1: AnyRef): Boolean +method notify: (): Unit +method notifyAll: (): Unit +method synchronized: [T0](x$1: T0): T0 +method toString: (): String +method update: (i: Int, x: T): Unit +method wait: (): Unit +method wait: (x$1: Long): Unit +method wait: (x$1: Long, x$2: Int): Unit value _length: Int testing Array.length: 2 testing Array.apply: 1 diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala index df7d293514fe..aae8defd796e 100644 --- a/test/files/run/reflection-magicsymbols-invoke.scala +++ b/test/files/run/reflection-magicsymbols-invoke.scala @@ -9,8 +9,8 @@ package scala { } object Test extends App { - def key(sym: Symbol) = sym + ": " + sym.info - def test(tpe: Type, receiver: Any, method: String, args: Any*) { + def key(sym: Symbol) = s"$sym: ${sym.info}" + def test(tpe: Type, receiver: Any, method: String, args: Any*): Unit = { def wrap[T](op: => T) = try { var result = op.asInstanceOf[AnyRef] @@ -20,7 +20,7 @@ object Test extends App { } catch { case ex: Throwable => val realex = scala.ExceptionUtils.unwrapThrowable(ex) - println(realex.getClass + ": " + realex.getMessage) + println(s"${realex.getClass}: ${realex.getMessage}") } print(s"testing ${tpe.typeSymbol.name}.$method: ") wrap { diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check index 750abfeaa11d..0b0431cb3de3 100644 --- a/test/files/run/reflection-magicsymbols-repl.check +++ b/test/files/run/reflection-magicsymbols-repl.check @@ -12,15 +12,14 @@ scala> class A { def foo7(x: Nothing) = ??? def foo8(x: Singleton) = ??? } -defined class A +class A scala> def test(n: Int): Unit = { - val sig = typeOf[A] member TermName("foo" + n) info + val sig = typeOf[A].member(TermName("foo" + n)).info val x = sig.asInstanceOf[MethodType].params.head println(x.info) } -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -test: (n: Int)Unit +def test(n: Int): Unit scala> for (i <- 1 to 8) test(i) Int* diff --git a/test/files/run/reflection-magicsymbols-repl.scala b/test/files/run/reflection-magicsymbols-repl.scala index c006e85b3a29..a9f1fa02bd80 100644 --- a/test/files/run/reflection-magicsymbols-repl.scala +++ b/test/files/run/reflection-magicsymbols-repl.scala @@ -14,7 +14,7 @@ object Test extends ReplTest { | def foo8(x: Singleton) = ??? |} |def test(n: Int): Unit = { - | val sig = typeOf[A] member TermName("foo" + n) info + | val sig = typeOf[A].member(TermName("foo" + n)).info | val x = sig.asInstanceOf[MethodType].params.head | println(x.info) |} diff --git a/test/files/run/reflection-mem-glbs.javaopts b/test/files/run/reflection-mem-glbs.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-glbs.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-glbs.scala b/test/files/run/reflection-mem-glbs.scala index 3f29a914bc06..fe3c9a83e8ff 100644 --- a/test/files/run/reflection-mem-glbs.scala +++ b/test/files/run/reflection-mem-glbs.scala @@ -1,3 +1,5 @@ +//> using javaOpt -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } @@ -6,8 +8,8 @@ trait B { type T <: B } object Test extends MemoryTest { override def maxDelta = 10 override def calcsPerIter = 50000 - override def calc() { + override def calc(): Unit = { import scala.reflect.runtime.universe._ glb(List(typeOf[A], typeOf[B])) } -} \ No newline at end of file +} diff --git a/test/files/run/reflection-mem-tags.javaopts b/test/files/run/reflection-mem-tags.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-tags.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-tags.scala b/test/files/run/reflection-mem-tags.scala index 8815e7dcd8a0..dbd434b4422c 100644 --- a/test/files/run/reflection-mem-tags.scala +++ b/test/files/run/reflection-mem-tags.scala @@ -1,3 +1,5 @@ +//> using javaOpt -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } @@ -6,7 +8,7 @@ trait B { type T <: B } object Test extends MemoryTest { override def maxDelta = 10 override def calcsPerIter = 100000 - override def calc() { + override def calc(): Unit = { import scala.reflect.runtime.universe._ def foo = { class A { def x = 2; def y: A = new A } @@ -14,4 +16,4 @@ object Test extends MemoryTest { } foo } -} \ No newline at end of file +} diff --git a/test/files/run/reflection-methodsymbol-params.scala b/test/files/run/reflection-methodsymbol-params.scala index bc1289a62516..d8d76b2db173 100644 --- a/test/files/run/reflection-methodsymbol-params.scala +++ b/test/files/run/reflection-methodsymbol-params.scala @@ -21,4 +21,4 @@ object Test extends App { println(typeOf[C].member(TermName("y2")).asMethod.paramLists) println(typeOf[C].member(TermName("y3")).asMethod.paramLists) println(typeOf[C].member(TermName("y4")).asMethod.paramLists) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-methodsymbol-returntype.scala b/test/files/run/reflection-methodsymbol-returntype.scala index 74a9e5dac03d..7cf5aa0c1667 100644 --- a/test/files/run/reflection-methodsymbol-returntype.scala +++ b/test/files/run/reflection-methodsymbol-returntype.scala @@ -21,4 +21,4 @@ object Test extends App { println(typeOf[C].member(TermName("y2")).asMethod.returnType) println(typeOf[C].member(TermName("y3")).asMethod.returnType) println(typeOf[C].member(TermName("y4")).asMethod.returnType) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-methodsymbol-typeparams.scala b/test/files/run/reflection-methodsymbol-typeparams.scala index 56d37ebeaa95..51b52b24c37b 100644 --- a/test/files/run/reflection-methodsymbol-typeparams.scala +++ b/test/files/run/reflection-methodsymbol-typeparams.scala @@ -21,4 +21,4 @@ object Test extends App { println(typeOf[C].member(TermName("y2")).asMethod.typeParams) println(typeOf[C].member(TermName("y3")).asMethod.typeParams) println(typeOf[C].member(TermName("y4")).asMethod.typeParams) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-modulemirror-inner-badpath.scala b/test/files/run/reflection-modulemirror-inner-badpath.scala index de200c548b39..6ecb4234f79e 100644 --- a/test/files/run/reflection-modulemirror-inner-badpath.scala +++ b/test/files/run/reflection-modulemirror-inner-badpath.scala @@ -21,4 +21,4 @@ class Foo{ object Test extends App{ val foo = new Foo println(foo.foo) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-modulemirror-inner-good.scala b/test/files/run/reflection-modulemirror-inner-good.scala index b5540dbe1ce2..ec66c916015e 100644 --- a/test/files/run/reflection-modulemirror-inner-good.scala +++ b/test/files/run/reflection-modulemirror-inner-good.scala @@ -20,4 +20,4 @@ class Foo{ object Test extends App{ val foo = new Foo println(foo.foo) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-modulemirror-nested-good.scala b/test/files/run/reflection-modulemirror-nested-good.scala index 8390bbaac76b..cc123dbec163 100644 --- a/test/files/run/reflection-modulemirror-nested-good.scala +++ b/test/files/run/reflection-modulemirror-nested-good.scala @@ -21,4 +21,4 @@ object Test extends App{ object R { override def toString = "R" } val foo = new Foo println(foo.foo) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-modulemirror-toplevel-badpath.scala b/test/files/run/reflection-modulemirror-toplevel-badpath.scala index 8b2994cabbfa..9fc69599fb4b 100644 --- a/test/files/run/reflection-modulemirror-toplevel-badpath.scala +++ b/test/files/run/reflection-modulemirror-toplevel-badpath.scala @@ -23,4 +23,4 @@ class Foo{ object Test extends App{ val foo = new Foo println(foo.foo) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-modulemirror-toplevel-good.scala b/test/files/run/reflection-modulemirror-toplevel-good.scala index 0663136a22f9..6ca1071dd75e 100644 --- a/test/files/run/reflection-modulemirror-toplevel-good.scala +++ b/test/files/run/reflection-modulemirror-toplevel-good.scala @@ -17,4 +17,4 @@ class Foo{ object Test extends App{ val foo = new Foo println(foo.foo) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-package-name-conflict.check b/test/files/run/reflection-package-name-conflict.check new file mode 100644 index 000000000000..441e2e9c6609 --- /dev/null +++ b/test/files/run/reflection-package-name-conflict.check @@ -0,0 +1,4 @@ +class c +class c +class c +class c diff --git a/test/files/run/reflection-package-name-conflict/A_1.scala b/test/files/run/reflection-package-name-conflict/A_1.scala new file mode 100644 index 000000000000..76975abc2962 --- /dev/null +++ b/test/files/run/reflection-package-name-conflict/A_1.scala @@ -0,0 +1,8 @@ +package a { + object `package` { + val b2 = 42 + } + package b1 { + class c + } +} diff --git a/test/files/run/reflection-package-name-conflict/A_2.scala b/test/files/run/reflection-package-name-conflict/A_2.scala new file mode 100644 index 000000000000..989f85f9feaa --- /dev/null +++ b/test/files/run/reflection-package-name-conflict/A_2.scala @@ -0,0 +1,9 @@ +package a { + object `package` { + val b1 = 42 + } + + package b2 { + class c + } +} diff --git a/test/files/run/reflection-package-name-conflict/Test.scala b/test/files/run/reflection-package-name-conflict/Test.scala new file mode 100644 index 000000000000..4f35a011417b --- /dev/null +++ b/test/files/run/reflection-package-name-conflict/Test.scala @@ -0,0 +1,10 @@ +import reflect.runtime.universe._ + +object Test { + def main(args: Array[String]): Unit = { + for (clsName <- List("a.b1.c", "a.b2.c")) { + println(rootMirror.classSymbol(Class.forName("a.b1.c"))) + println(rootMirror.classSymbol(Class.forName("a.b2.c"))) + } + } +} diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check index df889b9da6b1..9da0211515fb 100644 --- a/test/files/run/reflection-repl-classes.check +++ b/test/files/run/reflection-repl-classes.check @@ -1,13 +1,13 @@ scala> class A -defined class A +class A scala> scala> class B { def foo(x: A) = 1 } -defined class B +class B scala> @@ -18,7 +18,7 @@ scala> object defs { val method = im.symbol.info.member(u.TermName("foo")).asMethod val mm = im.reflectMethod(method) } -defined object defs +object defs scala> import defs._ import defs._ @@ -26,6 +26,6 @@ import defs._ scala> scala> mm(new A) -res0: Any = 1 +val res0: Any = 1 scala> :quit diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check index 9420c3ed1fb8..e7dc158e4a73 100644 --- a/test/files/run/reflection-repl-elementary.check +++ b/test/files/run/reflection-repl-elementary.check @@ -1,5 +1,5 @@ scala> scala.reflect.runtime.universe.typeOf[List[Nothing]] -res0: reflect.runtime.universe.Type = List[Nothing] +val res0: reflect.runtime.universe.Type = List[Nothing] scala> :quit diff --git a/test/files/run/reflection-sanitychecks.scala b/test/files/run/reflection-sanitychecks.scala index 3f4873bbee2d..47dfcdcc261d 100644 --- a/test/files/run/reflection-sanitychecks.scala +++ b/test/files/run/reflection-sanitychecks.scala @@ -46,4 +46,4 @@ object Test extends App { test(typeOf[C]) test(typeOf[D]) test(typeOf[E]) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-scala-annotations.check b/test/files/run/reflection-scala-annotations.check index 1a5806455ee8..e4fdb89b757e 100644 --- a/test/files/run/reflection-scala-annotations.check +++ b/test/files/run/reflection-scala-annotations.check @@ -1,7 +1,2 @@ -reflection-scala-annotations.scala:5: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class jann(x: Int, y: Array[Int]) extends ClassfileAnnotation - ^ -new sann(1, scala.collection.immutable.List.apply[Int](1, 2)) -new jann(y = Array(1, 2), x = 2) +new sann(1, scala.`package`.List.apply[Int](1, 2)) +new jann(x = 2, y = Array(1, 2)) diff --git a/test/files/run/reflection-scala-annotations.scala b/test/files/run/reflection-scala-annotations.scala index f6a6895ee06b..10676ef208e5 100644 --- a/test/files/run/reflection-scala-annotations.scala +++ b/test/files/run/reflection-scala-annotations.scala @@ -2,7 +2,7 @@ import scala.reflect.runtime.universe._ import scala.annotation._ class sann(x: Int, y: List[Int]) extends StaticAnnotation -class jann(x: Int, y: Array[Int]) extends ClassfileAnnotation +class jann(x: Int, y: Array[Int]) extends ConstantAnnotation @sann(1, List(1, 2)) class S diff --git a/test/files/run/reflection-sorted-decls.scala b/test/files/run/reflection-sorted-decls.scala index 8dcb0f3ec6cd..9befd942cfe4 100644 --- a/test/files/run/reflection-sorted-decls.scala +++ b/test/files/run/reflection-sorted-decls.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { class Foo(val a: Int, val b: Int, val c: Int) import scala.reflect.runtime.{currentMirror => cm} val decls = cm.classSymbol(classOf[Foo]).info.decls diff --git a/test/files/run/reflection-sorted-members.scala b/test/files/run/reflection-sorted-members.scala index fa028c99c641..7a09f9d296b0 100644 --- a/test/files/run/reflection-sorted-members.scala +++ b/test/files/run/reflection-sorted-members.scala @@ -1,5 +1,5 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { trait T1 { def a: Int; def c: Int } trait T2 { def a: Int; def b: Int } class Bar(val x: Int) diff --git a/test/files/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala index f65131f18a3b..0af48bb20e56 100644 --- a/test/files/run/reflection-sync-potpourri.scala +++ b/test/files/run/reflection-sync-potpourri.scala @@ -29,4 +29,4 @@ object Test extends App { } }) threads foreach (_.start) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-sync-subtypes.check b/test/files/run/reflection-sync-subtypes.check deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/test/files/run/reflection-sync-subtypes.scala b/test/files/run/reflection-sync-subtypes.scala index 7f75a464ac3f..d5e6a864e46c 100644 --- a/test/files/run/reflection-sync-subtypes.scala +++ b/test/files/run/reflection-sync-subtypes.scala @@ -17,4 +17,4 @@ object Test extends App { } }) threads foreach (_.start) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-tags.scala b/test/files/run/reflection-tags.scala index 3d7c7b2a0a0b..54b561b574c1 100644 --- a/test/files/run/reflection-tags.scala +++ b/test/files/run/reflection-tags.scala @@ -18,4 +18,4 @@ object Test extends App { case _ => false })) println(outliers) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-valueclasses-derived.scala b/test/files/run/reflection-valueclasses-derived.scala index 8d25e2929c69..8c03c2aab1ab 100644 --- a/test/files/run/reflection-valueclasses-derived.scala +++ b/test/files/run/reflection-valueclasses-derived.scala @@ -9,4 +9,4 @@ object Test extends App { println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("foo")).asMethod)(2)) println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("getClass")).asMethod)()) println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("toString")).asMethod)()) -} \ No newline at end of file +} diff --git a/test/files/run/reflection-valueclasses-magic.check b/test/files/run/reflection-valueclasses-magic.check index 2fa09dae690a..11fd9f3cd09a 100644 --- a/test/files/run/reflection-valueclasses-magic.check +++ b/test/files/run/reflection-valueclasses-magic.check @@ -2,125 +2,125 @@ Byte it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Byte: ()Byte -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Int -method %: (x: Char)Int -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Int -method %: (x: Long)Long -method %: (x: Short)Int -method &: (x: Byte)Int -method &: (x: Char)Int -method &: (x: Int)Int -method &: (x: Long)Long -method &: (x: Short)Int -method *: (x: Byte)Int -method *: (x: Char)Int -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Int -method *: (x: Long)Long -method *: (x: Short)Int -method +: (x: Byte)Int -method +: (x: Char)Int -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Int -method +: (x: Long)Long -method +: (x: Short)Int -method +: (x: String)String -method -: (x: Byte)Int -method -: (x: Char)Int -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Int -method -: (x: Long)Long -method -: (x: Short)Int -method /: (x: Byte)Int -method /: (x: Char)Int -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Int -method /: (x: Long)Long -method /: (x: Short)Int -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <<: (x: Int)Int -method <<: (x: Long)Int -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method >>: (x: Int)Int -method >>: (x: Long)Int -method >>>: (x: Int)Int -method >>>: (x: Long)Int -method ^: (x: Byte)Int -method ^: (x: Char)Int -method ^: (x: Int)Int -method ^: (x: Long)Long -method ^: (x: Short)Int -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Byte] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Int -method unary_-: => Int -method unary_~: => Int -method |: (x: Byte)Int -method |: (x: Char)Int -method |: (x: Int)Int -method |: (x: Long)Long -method |: (x: Short)Int +constructor Byte: (): Byte +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Int +method %: (x: Char): Int +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Int +method %: (x: Long): Long +method %: (x: Short): Int +method &: (x: Byte): Int +method &: (x: Char): Int +method &: (x: Int): Int +method &: (x: Long): Long +method &: (x: Short): Int +method *: (x: Byte): Int +method *: (x: Char): Int +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Int +method *: (x: Long): Long +method *: (x: Short): Int +method +: (x: Byte): Int +method +: (x: Char): Int +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Int +method +: (x: Long): Long +method +: (x: Short): Int +method +: (x: String): String +method -: (x: Byte): Int +method -: (x: Char): Int +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Int +method -: (x: Long): Long +method -: (x: Short): Int +method /: (x: Byte): Int +method /: (x: Char): Int +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Int +method /: (x: Long): Long +method /: (x: Short): Int +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <<: (x: Int): Int +method <<: (x: Long): Int +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method >>: (x: Int): Int +method >>: (x: Long): Int +method >>>: (x: Int): Int +method >>>: (x: Long): Int +method ^: (x: Byte): Int +method ^: (x: Char): Int +method ^: (x: Int): Int +method ^: (x: Long): Long +method ^: (x: Short): Int +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Byte] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Int +method unary_-: Int +method unary_~: Int +method |: (x: Byte): Int +method |: (x: Char): Int +method |: (x: Int): Int +method |: (x: Long): Long +method |: (x: Short): Int testing Byte.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2 testing Byte.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2 testing Byte.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>  @@ -210,125 +210,125 @@ testing Byte.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Dou Short it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Short: ()Short -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Int -method %: (x: Char)Int -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Int -method %: (x: Long)Long -method %: (x: Short)Int -method &: (x: Byte)Int -method &: (x: Char)Int -method &: (x: Int)Int -method &: (x: Long)Long -method &: (x: Short)Int -method *: (x: Byte)Int -method *: (x: Char)Int -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Int -method *: (x: Long)Long -method *: (x: Short)Int -method +: (x: Byte)Int -method +: (x: Char)Int -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Int -method +: (x: Long)Long -method +: (x: Short)Int -method +: (x: String)String -method -: (x: Byte)Int -method -: (x: Char)Int -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Int -method -: (x: Long)Long -method -: (x: Short)Int -method /: (x: Byte)Int -method /: (x: Char)Int -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Int -method /: (x: Long)Long -method /: (x: Short)Int -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <<: (x: Int)Int -method <<: (x: Long)Int -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method >>: (x: Int)Int -method >>: (x: Long)Int -method >>>: (x: Int)Int -method >>>: (x: Long)Int -method ^: (x: Byte)Int -method ^: (x: Char)Int -method ^: (x: Int)Int -method ^: (x: Long)Long -method ^: (x: Short)Int -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Short] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Int -method unary_-: => Int -method unary_~: => Int -method |: (x: Byte)Int -method |: (x: Char)Int -method |: (x: Int)Int -method |: (x: Long)Long -method |: (x: Short)Int +constructor Short: (): Short +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Int +method %: (x: Char): Int +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Int +method %: (x: Long): Long +method %: (x: Short): Int +method &: (x: Byte): Int +method &: (x: Char): Int +method &: (x: Int): Int +method &: (x: Long): Long +method &: (x: Short): Int +method *: (x: Byte): Int +method *: (x: Char): Int +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Int +method *: (x: Long): Long +method *: (x: Short): Int +method +: (x: Byte): Int +method +: (x: Char): Int +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Int +method +: (x: Long): Long +method +: (x: Short): Int +method +: (x: String): String +method -: (x: Byte): Int +method -: (x: Char): Int +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Int +method -: (x: Long): Long +method -: (x: Short): Int +method /: (x: Byte): Int +method /: (x: Char): Int +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Int +method /: (x: Long): Long +method /: (x: Short): Int +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <<: (x: Int): Int +method <<: (x: Long): Int +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method >>: (x: Int): Int +method >>: (x: Long): Int +method >>>: (x: Int): Int +method >>>: (x: Long): Int +method ^: (x: Byte): Int +method ^: (x: Char): Int +method ^: (x: Int): Int +method ^: (x: Long): Long +method ^: (x: Short): Int +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Short] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Int +method unary_-: Int +method unary_~: Int +method |: (x: Byte): Int +method |: (x: Char): Int +method |: (x: Int): Int +method |: (x: Long): Long +method |: (x: Short): Int testing Short.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2 testing Short.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2 testing Short.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>  @@ -418,125 +418,125 @@ testing Short.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Do Char it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Char: ()Char -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Int -method %: (x: Char)Int -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Int -method %: (x: Long)Long -method %: (x: Short)Int -method &: (x: Byte)Int -method &: (x: Char)Int -method &: (x: Int)Int -method &: (x: Long)Long -method &: (x: Short)Int -method *: (x: Byte)Int -method *: (x: Char)Int -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Int -method *: (x: Long)Long -method *: (x: Short)Int -method +: (x: Byte)Int -method +: (x: Char)Int -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Int -method +: (x: Long)Long -method +: (x: Short)Int -method +: (x: String)String -method -: (x: Byte)Int -method -: (x: Char)Int -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Int -method -: (x: Long)Long -method -: (x: Short)Int -method /: (x: Byte)Int -method /: (x: Char)Int -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Int -method /: (x: Long)Long -method /: (x: Short)Int -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <<: (x: Int)Int -method <<: (x: Long)Int -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method >>: (x: Int)Int -method >>: (x: Long)Int -method >>>: (x: Int)Int -method >>>: (x: Long)Int -method ^: (x: Byte)Int -method ^: (x: Char)Int -method ^: (x: Int)Int -method ^: (x: Long)Long -method ^: (x: Short)Int -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Char] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Int -method unary_-: => Int -method unary_~: => Int -method |: (x: Byte)Int -method |: (x: Char)Int -method |: (x: Int)Int -method |: (x: Long)Long -method |: (x: Short)Int +constructor Char: (): Char +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Int +method %: (x: Char): Int +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Int +method %: (x: Long): Long +method %: (x: Short): Int +method &: (x: Byte): Int +method &: (x: Char): Int +method &: (x: Int): Int +method &: (x: Long): Long +method &: (x: Short): Int +method *: (x: Byte): Int +method *: (x: Char): Int +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Int +method *: (x: Long): Long +method *: (x: Short): Int +method +: (x: Byte): Int +method +: (x: Char): Int +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Int +method +: (x: Long): Long +method +: (x: Short): Int +method +: (x: String): String +method -: (x: Byte): Int +method -: (x: Char): Int +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Int +method -: (x: Long): Long +method -: (x: Short): Int +method /: (x: Byte): Int +method /: (x: Char): Int +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Int +method /: (x: Long): Long +method /: (x: Short): Int +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <<: (x: Int): Int +method <<: (x: Long): Int +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method >>: (x: Int): Int +method >>: (x: Long): Int +method >>>: (x: Int): Int +method >>>: (x: Long): Int +method ^: (x: Byte): Int +method ^: (x: Char): Int +method ^: (x: Int): Int +method ^: (x: Long): Long +method ^: (x: Short): Int +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Char] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Int +method unary_-: Int +method unary_~: Int +method |: (x: Byte): Int +method |: (x: Char): Int +method |: (x: Int): Int +method |: (x: Long): Long +method |: (x: Short): Int testing Char.toByte() with receiver =  and args = List(): [class java.lang.Byte] =======> 2 testing Char.toShort() with receiver =  and args = List(): [class java.lang.Short] =======> 2 testing Char.toChar() with receiver =  and args = List(): [class java.lang.Character] =======>  @@ -626,125 +626,125 @@ testing Char.%(Double) with receiver =  and args = List(2.0 class java.lang.Dou Int it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Int: ()Int -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Int -method %: (x: Char)Int -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Int -method %: (x: Long)Long -method %: (x: Short)Int -method &: (x: Byte)Int -method &: (x: Char)Int -method &: (x: Int)Int -method &: (x: Long)Long -method &: (x: Short)Int -method *: (x: Byte)Int -method *: (x: Char)Int -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Int -method *: (x: Long)Long -method *: (x: Short)Int -method +: (x: Byte)Int -method +: (x: Char)Int -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Int -method +: (x: Long)Long -method +: (x: Short)Int -method +: (x: String)String -method -: (x: Byte)Int -method -: (x: Char)Int -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Int -method -: (x: Long)Long -method -: (x: Short)Int -method /: (x: Byte)Int -method /: (x: Char)Int -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Int -method /: (x: Long)Long -method /: (x: Short)Int -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <<: (x: Int)Int -method <<: (x: Long)Int -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method >>: (x: Int)Int -method >>: (x: Long)Int -method >>>: (x: Int)Int -method >>>: (x: Long)Int -method ^: (x: Byte)Int -method ^: (x: Char)Int -method ^: (x: Int)Int -method ^: (x: Long)Long -method ^: (x: Short)Int -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Int] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Int -method unary_-: => Int -method unary_~: => Int -method |: (x: Byte)Int -method |: (x: Char)Int -method |: (x: Int)Int -method |: (x: Long)Long -method |: (x: Short)Int +constructor Int: (): Int +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Int +method %: (x: Char): Int +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Int +method %: (x: Long): Long +method %: (x: Short): Int +method &: (x: Byte): Int +method &: (x: Char): Int +method &: (x: Int): Int +method &: (x: Long): Long +method &: (x: Short): Int +method *: (x: Byte): Int +method *: (x: Char): Int +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Int +method *: (x: Long): Long +method *: (x: Short): Int +method +: (x: Byte): Int +method +: (x: Char): Int +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Int +method +: (x: Long): Long +method +: (x: Short): Int +method +: (x: String): String +method -: (x: Byte): Int +method -: (x: Char): Int +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Int +method -: (x: Long): Long +method -: (x: Short): Int +method /: (x: Byte): Int +method /: (x: Char): Int +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Int +method /: (x: Long): Long +method /: (x: Short): Int +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <<: (x: Int): Int +method <<: (x: Long): Int +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method >>: (x: Int): Int +method >>: (x: Long): Int +method >>>: (x: Int): Int +method >>>: (x: Long): Int +method ^: (x: Byte): Int +method ^: (x: Char): Int +method ^: (x: Int): Int +method ^: (x: Long): Long +method ^: (x: Short): Int +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Int] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Int +method unary_-: Int +method unary_~: Int +method |: (x: Byte): Int +method |: (x: Char): Int +method |: (x: Int): Int +method |: (x: Long): Long +method |: (x: Short): Int testing Int.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2 testing Int.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2 testing Int.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>  @@ -834,125 +834,125 @@ testing Int.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Doub Long it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Long: ()Long -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Long -method %: (x: Char)Long -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Long -method %: (x: Long)Long -method %: (x: Short)Long -method &: (x: Byte)Long -method &: (x: Char)Long -method &: (x: Int)Long -method &: (x: Long)Long -method &: (x: Short)Long -method *: (x: Byte)Long -method *: (x: Char)Long -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Long -method *: (x: Long)Long -method *: (x: Short)Long -method +: (x: Byte)Long -method +: (x: Char)Long -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Long -method +: (x: Long)Long -method +: (x: Short)Long -method +: (x: String)String -method -: (x: Byte)Long -method -: (x: Char)Long -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Long -method -: (x: Long)Long -method -: (x: Short)Long -method /: (x: Byte)Long -method /: (x: Char)Long -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Long -method /: (x: Long)Long -method /: (x: Short)Long -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <<: (x: Int)Long -method <<: (x: Long)Long -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method >>: (x: Int)Long -method >>: (x: Long)Long -method >>>: (x: Int)Long -method >>>: (x: Long)Long -method ^: (x: Byte)Long -method ^: (x: Char)Long -method ^: (x: Int)Long -method ^: (x: Long)Long -method ^: (x: Short)Long -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Long] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Long -method unary_-: => Long -method unary_~: => Long -method |: (x: Byte)Long -method |: (x: Char)Long -method |: (x: Int)Long -method |: (x: Long)Long -method |: (x: Short)Long +constructor Long: (): Long +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Long +method %: (x: Char): Long +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Long +method %: (x: Long): Long +method %: (x: Short): Long +method &: (x: Byte): Long +method &: (x: Char): Long +method &: (x: Int): Long +method &: (x: Long): Long +method &: (x: Short): Long +method *: (x: Byte): Long +method *: (x: Char): Long +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Long +method *: (x: Long): Long +method *: (x: Short): Long +method +: (x: Byte): Long +method +: (x: Char): Long +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Long +method +: (x: Long): Long +method +: (x: Short): Long +method +: (x: String): String +method -: (x: Byte): Long +method -: (x: Char): Long +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Long +method -: (x: Long): Long +method -: (x: Short): Long +method /: (x: Byte): Long +method /: (x: Char): Long +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Long +method /: (x: Long): Long +method /: (x: Short): Long +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <<: (x: Int): Long +method <<: (x: Long): Long +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method >>: (x: Int): Long +method >>: (x: Long): Long +method >>>: (x: Int): Long +method >>>: (x: Long): Long +method ^: (x: Byte): Long +method ^: (x: Char): Long +method ^: (x: Int): Long +method ^: (x: Long): Long +method ^: (x: Short): Long +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Long] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Long +method unary_-: Long +method unary_~: Long +method |: (x: Byte): Long +method |: (x: Char): Long +method |: (x: Int): Long +method |: (x: Long): Long +method |: (x: Short): Long testing Long.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2 testing Long.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2 testing Long.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>  @@ -1042,103 +1042,103 @@ testing Long.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Dou Float it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Float: ()Float -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Float -method %: (x: Char)Float -method %: (x: Double)Double -method %: (x: Float)Float -method %: (x: Int)Float -method %: (x: Long)Float -method %: (x: Short)Float -method *: (x: Byte)Float -method *: (x: Char)Float -method *: (x: Double)Double -method *: (x: Float)Float -method *: (x: Int)Float -method *: (x: Long)Float -method *: (x: Short)Float -method +: (x: Byte)Float -method +: (x: Char)Float -method +: (x: Double)Double -method +: (x: Float)Float -method +: (x: Int)Float -method +: (x: Long)Float -method +: (x: Short)Float -method +: (x: String)String -method -: (x: Byte)Float -method -: (x: Char)Float -method -: (x: Double)Double -method -: (x: Float)Float -method -: (x: Int)Float -method -: (x: Long)Float -method -: (x: Short)Float -method /: (x: Byte)Float -method /: (x: Char)Float -method /: (x: Double)Double -method /: (x: Float)Float -method /: (x: Int)Float -method /: (x: Long)Float -method /: (x: Short)Float -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Float] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Float -method unary_-: => Float +constructor Float: (): Float +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Float +method %: (x: Char): Float +method %: (x: Double): Double +method %: (x: Float): Float +method %: (x: Int): Float +method %: (x: Long): Float +method %: (x: Short): Float +method *: (x: Byte): Float +method *: (x: Char): Float +method *: (x: Double): Double +method *: (x: Float): Float +method *: (x: Int): Float +method *: (x: Long): Float +method *: (x: Short): Float +method +: (x: Byte): Float +method +: (x: Char): Float +method +: (x: Double): Double +method +: (x: Float): Float +method +: (x: Int): Float +method +: (x: Long): Float +method +: (x: Short): Float +method +: (x: String): String +method -: (x: Byte): Float +method -: (x: Char): Float +method -: (x: Double): Double +method -: (x: Float): Float +method -: (x: Int): Float +method -: (x: Long): Float +method -: (x: Short): Float +method /: (x: Byte): Float +method /: (x: Char): Float +method /: (x: Double): Double +method /: (x: Float): Float +method /: (x: Int): Float +method /: (x: Long): Float +method /: (x: Short): Float +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Float] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Float +method unary_-: Float testing Float.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2 testing Float.toShort() with receiver = 2.0 and args = List(): [class java.lang.Short] =======> 2 testing Float.toChar() with receiver = 2.0 and args = List(): [class java.lang.Character] =======>  @@ -1228,103 +1228,103 @@ testing Float.%(Double) with receiver = 2.0 and args = List(2.0 class java.lang. Double it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Double: ()Double -method !=: (x$1: Any)Boolean -method !=: (x: Byte)Boolean -method !=: (x: Char)Boolean -method !=: (x: Double)Boolean -method !=: (x: Float)Boolean -method !=: (x: Int)Boolean -method !=: (x: Long)Boolean -method !=: (x: Short)Boolean -method ##: ()Int -method %: (x: Byte)Double -method %: (x: Char)Double -method %: (x: Double)Double -method %: (x: Float)Double -method %: (x: Int)Double -method %: (x: Long)Double -method %: (x: Short)Double -method *: (x: Byte)Double -method *: (x: Char)Double -method *: (x: Double)Double -method *: (x: Float)Double -method *: (x: Int)Double -method *: (x: Long)Double -method *: (x: Short)Double -method +: (x: Byte)Double -method +: (x: Char)Double -method +: (x: Double)Double -method +: (x: Float)Double -method +: (x: Int)Double -method +: (x: Long)Double -method +: (x: Short)Double -method +: (x: String)String -method -: (x: Byte)Double -method -: (x: Char)Double -method -: (x: Double)Double -method -: (x: Float)Double -method -: (x: Int)Double -method -: (x: Long)Double -method -: (x: Short)Double -method /: (x: Byte)Double -method /: (x: Char)Double -method /: (x: Double)Double -method /: (x: Float)Double -method /: (x: Int)Double -method /: (x: Long)Double -method /: (x: Short)Double -method <: (x: Byte)Boolean -method <: (x: Char)Boolean -method <: (x: Double)Boolean -method <: (x: Float)Boolean -method <: (x: Int)Boolean -method <: (x: Long)Boolean -method <: (x: Short)Boolean -method <=: (x: Byte)Boolean -method <=: (x: Char)Boolean -method <=: (x: Double)Boolean -method <=: (x: Float)Boolean -method <=: (x: Int)Boolean -method <=: (x: Long)Boolean -method <=: (x: Short)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Byte)Boolean -method ==: (x: Char)Boolean -method ==: (x: Double)Boolean -method ==: (x: Float)Boolean -method ==: (x: Int)Boolean -method ==: (x: Long)Boolean -method ==: (x: Short)Boolean -method >: (x: Byte)Boolean -method >: (x: Char)Boolean -method >: (x: Double)Boolean -method >: (x: Float)Boolean -method >: (x: Int)Boolean -method >: (x: Long)Boolean -method >: (x: Short)Boolean -method >=: (x: Byte)Boolean -method >=: (x: Char)Boolean -method >=: (x: Double)Boolean -method >=: (x: Float)Boolean -method >=: (x: Int)Boolean -method >=: (x: Long)Boolean -method >=: (x: Short)Boolean -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Double] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toByte: => Byte -method toChar: => Char -method toDouble: => Double -method toFloat: => Float -method toInt: => Int -method toLong: => Long -method toShort: => Short -method toString: ()String -method unary_+: => Double -method unary_-: => Double +constructor Double: (): Double +method !=: (x$1: Any): Boolean +method !=: (x: Byte): Boolean +method !=: (x: Char): Boolean +method !=: (x: Double): Boolean +method !=: (x: Float): Boolean +method !=: (x: Int): Boolean +method !=: (x: Long): Boolean +method !=: (x: Short): Boolean +method ##: Int +method %: (x: Byte): Double +method %: (x: Char): Double +method %: (x: Double): Double +method %: (x: Float): Double +method %: (x: Int): Double +method %: (x: Long): Double +method %: (x: Short): Double +method *: (x: Byte): Double +method *: (x: Char): Double +method *: (x: Double): Double +method *: (x: Float): Double +method *: (x: Int): Double +method *: (x: Long): Double +method *: (x: Short): Double +method +: (x: Byte): Double +method +: (x: Char): Double +method +: (x: Double): Double +method +: (x: Float): Double +method +: (x: Int): Double +method +: (x: Long): Double +method +: (x: Short): Double +method +: (x: String): String +method -: (x: Byte): Double +method -: (x: Char): Double +method -: (x: Double): Double +method -: (x: Float): Double +method -: (x: Int): Double +method -: (x: Long): Double +method -: (x: Short): Double +method /: (x: Byte): Double +method /: (x: Char): Double +method /: (x: Double): Double +method /: (x: Float): Double +method /: (x: Int): Double +method /: (x: Long): Double +method /: (x: Short): Double +method <: (x: Byte): Boolean +method <: (x: Char): Boolean +method <: (x: Double): Boolean +method <: (x: Float): Boolean +method <: (x: Int): Boolean +method <: (x: Long): Boolean +method <: (x: Short): Boolean +method <=: (x: Byte): Boolean +method <=: (x: Char): Boolean +method <=: (x: Double): Boolean +method <=: (x: Float): Boolean +method <=: (x: Int): Boolean +method <=: (x: Long): Boolean +method <=: (x: Short): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Byte): Boolean +method ==: (x: Char): Boolean +method ==: (x: Double): Boolean +method ==: (x: Float): Boolean +method ==: (x: Int): Boolean +method ==: (x: Long): Boolean +method ==: (x: Short): Boolean +method >: (x: Byte): Boolean +method >: (x: Char): Boolean +method >: (x: Double): Boolean +method >: (x: Float): Boolean +method >: (x: Int): Boolean +method >: (x: Long): Boolean +method >: (x: Short): Boolean +method >=: (x: Byte): Boolean +method >=: (x: Char): Boolean +method >=: (x: Double): Boolean +method >=: (x: Float): Boolean +method >=: (x: Int): Boolean +method >=: (x: Long): Boolean +method >=: (x: Short): Boolean +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Double] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toByte: Byte +method toChar: Char +method toDouble: Double +method toFloat: Float +method toInt: Int +method toLong: Long +method toShort: Short +method toString: (): String +method unary_+: Double +method unary_-: Double testing Double.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2 testing Double.toShort() with receiver = 2.0 and args = List(): [class java.lang.Short] =======> 2 testing Double.toChar() with receiver = 2.0 and args = List(): [class java.lang.Character] =======>  @@ -1414,24 +1414,24 @@ testing Double.%(Double) with receiver = 2.0 and args = List(2.0 class java.lang Boolean it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Boolean: ()Boolean -method !=: (x$1: Any)Boolean -method !=: (x: Boolean)Boolean -method ##: ()Int -method &&: (x: Boolean)Boolean -method &: (x: Boolean)Boolean -method ==: (x$1: Any)Boolean -method ==: (x: Boolean)Boolean -method ^: (x: Boolean)Boolean -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Boolean] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toString: ()String -method unary_!: => Boolean -method |: (x: Boolean)Boolean -method ||: (x: Boolean)Boolean +constructor Boolean: (): Boolean +method !=: (x$1: Any): Boolean +method !=: (x: Boolean): Boolean +method ##: Int +method &&: (x: Boolean): Boolean +method &: (x: Boolean): Boolean +method ==: (x$1: Any): Boolean +method ==: (x: Boolean): Boolean +method ^: (x: Boolean): Boolean +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Boolean] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toString: (): String +method unary_!: Boolean +method |: (x: Boolean): Boolean +method ||: (x: Boolean): Boolean testing Boolean.unary_!() with receiver = true and args = List(): [class java.lang.Boolean] =======> false testing Boolean.==(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true testing Boolean.!=(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> false @@ -1444,13 +1444,13 @@ testing Boolean.^(Boolean) with receiver = true and args = List(true class java. Unit it's important to print the list of Byte's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Unit: ()Unit -method !=: (x$1: Any)Boolean -method ##: ()Int -method ==: (x$1: Any)Boolean -method asInstanceOf: [T0]=> T0 -method equals: (x$1: Any)Boolean -method getClass: ()Class[Unit] -method hashCode: ()Int -method isInstanceOf: [T0]=> Boolean -method toString: ()String +constructor Unit: (): Unit +method !=: (x$1: Any): Boolean +method ##: Int +method ==: (x$1: Any): Boolean +method asInstanceOf: [T0]T0 +method equals: (x$1: Any): Boolean +method getClass: (): Class[Unit] +method hashCode: (): Int +method isInstanceOf: [T0]Boolean +method toString: (): String diff --git a/test/files/run/reflection-valueclasses-magic.scala b/test/files/run/reflection-valueclasses-magic.scala index 366b5fe27097..07cd824283e6 100644 --- a/test/files/run/reflection-valueclasses-magic.scala +++ b/test/files/run/reflection-valueclasses-magic.scala @@ -1,3 +1,5 @@ +//> using options -deprecation +// import scala.reflect.runtime.universe._ import scala.reflect.runtime.universe.definitions._ import scala.reflect.runtime.{currentMirror => cm} @@ -14,8 +16,9 @@ object Test extends App { sym match { // initialize parameter symbols case meth: MethodSymbol => meth.paramLists.flatten.map(_.info) + case _ => throw new MatchError(sym) } - sym + ": " + sym.info + s"$sym: ${sym.info}" } def convert(value: Any, tpe: Type) = { @@ -32,7 +35,7 @@ object Test extends App { else throw new Exception(s"not supported: value = $value, tpe = $tpe") } - def test[T: ClassTag](tpe: Type, receiver: T, method: String, args: Any*) { + def test[T: ClassTag](tpe: Type, receiver: T, method: String, args: Any*): Unit = { def wrap[T](op: => T) = try { var result = op.asInstanceOf[AnyRef] @@ -42,24 +45,24 @@ object Test extends App { } catch { case ex: Throwable => val realex = scala.ExceptionUtils.unwrapThrowable(ex) - println(realex.getClass + ": " + realex.getMessage) + println(s"${realex.getClass}: ${realex.getMessage}") } val meth = tpe.decl(TermName(method).encodedName.toTermName) val testees = if (meth.isMethod) List(meth.asMethod) else meth.asTerm.alternatives.map(_.asMethod) testees foreach (testee => { val convertedArgs = args.zipWithIndex.map { case (arg, i) => convert(arg, testee.paramLists.flatten.apply(i).info) } - print(s"testing ${tpe.typeSymbol.name}.$method(${testee.paramLists.flatten.map(_.info).mkString(','.toString)}) with receiver = $receiver and args = ${convertedArgs.map(arg => arg + ' '.toString + arg.getClass).toList}: ") + print(s"testing ${tpe.typeSymbol.name}.$method(${testee.paramLists.flatten.map(_.info).mkString(','.toString)}) with receiver = $receiver and args = ${convertedArgs.map(arg => s"$arg ${arg.getClass}").toList}: ") wrap(cm.reflect(receiver).reflectMethod(testee)(convertedArgs: _*)) }) } - def header(tpe: Type) { + def header(tpe: Type): Unit = { println(s"============\n$tpe") println("it's important to print the list of Byte's members") println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test") tpe.members.toList.sortBy(key).foreach(sym => println(key(sym))) } - def testNumeric[T: ClassTag](tpe: Type, value: T) { + def testNumeric[T: ClassTag](tpe: Type, value: T): Unit = { header(tpe) List("toByte", "toShort", "toChar", "toInt", "toLong", "toFloat", "toDouble") foreach (meth => test(tpe, value, meth)) test(tpe, value, "==", 2) @@ -75,7 +78,7 @@ object Test extends App { test(tpe, value, "%", 2) } - def testIntegral[T: ClassTag](tpe: Type, value: T) { + def testIntegral[T: ClassTag](tpe: Type, value: T): Unit = { testNumeric(tpe, value) test(tpe, value, "unary_~") test(tpe, value, "unary_+") @@ -88,7 +91,7 @@ object Test extends App { test(tpe, value, "^", 2) } - def testBoolean() { + def testBoolean(): Unit = { header(typeOf[Boolean]) test(typeOf[Boolean], true, "unary_!") test(typeOf[Boolean], true, "==", true) @@ -100,7 +103,7 @@ object Test extends App { test(typeOf[Boolean], true, "^", true) } - def testUnit() { + def testUnit(): Unit = { header(typeOf[Unit]) } @@ -113,4 +116,4 @@ object Test extends App { testNumeric(typeOf[Double], 2.toDouble) testBoolean() testUnit() -} \ No newline at end of file +} diff --git a/test/files/run/reflection-valueclasses-standard.scala b/test/files/run/reflection-valueclasses-standard.scala index b6b5a2ede239..74719dcf55f9 100644 --- a/test/files/run/reflection-valueclasses-standard.scala +++ b/test/files/run/reflection-valueclasses-standard.scala @@ -18,4 +18,4 @@ object Test extends App { test('2') test(true) test(()) -} \ No newline at end of file +} diff --git a/test/files/run/reflinit.scala b/test/files/run/reflinit.scala index 6d3ba3a4dfa4..979dfb380800 100644 --- a/test/files/run/reflinit.scala +++ b/test/files/run/reflinit.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { val tt2 = typeOf[List[Int]] println(tt2) -} \ No newline at end of file +} diff --git a/test/files/run/reify-aliases.scala b/test/files/run/reify-aliases.scala index 45b1a34e241b..31141f3ac4e2 100644 --- a/test/files/run/reify-aliases.scala +++ b/test/files/run/reify-aliases.scala @@ -2,4 +2,4 @@ import scala.reflect.runtime.universe._ object Test extends App { println(showRaw(typeOf[String])) -} \ No newline at end of file +} diff --git a/test/files/run/reify-each-node-type.scala b/test/files/run/reify-each-node-type.scala index ac6fe82d3c33..2489c1539241 100644 --- a/test/files/run/reify-each-node-type.scala +++ b/test/files/run/reify-each-node-type.scala @@ -45,10 +45,10 @@ object s { act(reify { val x: Int = 0 /* ValDef */ }) act(reify { val x = 0 /* TypeTree */ }) act(reify { if (true) () /* If */ }) - act(reify { def f { } /* DefDef */ }) + act(reify { def f: Unit = { } /* DefDef */ }) act(reify { def m = super.q /* Super */ }) act(reify { trait A /* ClassDef Template */ }) - act(reify { def f(x: Any) { } /* EmptyTree */ }) + act(reify { def f(x: Any): Unit = { } /* EmptyTree */ }) act(reify { null: D with E /* CompoundTypeTree */ }) act(reify { type T = Int /* TypeDef */ }) act(reify { type CC[T <: D] = C[T] /* TypeBoundsTree */ }) diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check index 32ed87635688..678f70d82c0e 100644 --- a/test/files/run/reify-repl-fail-gracefully.check +++ b/test/files/run/reify-repl-fail-gracefully.check @@ -8,8 +8,7 @@ import scala.reflect.runtime.universe._ scala> scala> reify -:16: error: too few argument lists for macro invocation - reify ^ + error: too few argument lists for macro invocation scala> :quit diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check index 71841ff83b46..43b86532bfed 100644 --- a/test/files/run/reify_ann1a.check +++ b/test/files/run/reify_ann1a.check @@ -1,28 +1,28 @@ { - @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef { - @new ann(List.apply("3a")) @new ann(List.apply("3b")) private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _; - def (@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = { + @new ann(`package`.List.apply("1a")) @new ann(`package`.List.apply("1b")) class C[@new ann(`package`.List.apply("2a")) @new ann(`package`.List.apply("2b")) T] extends AnyRef { + @new ann(`package`.List.apply("3a")) @new ann(`package`.List.apply("3b")) private[this] val x: T @ann(`package`.List.apply("4a")) @ann(`package`.List.apply("4b")) = _; + def (@new ann(`package`.List.apply("3a")) @new ann(`package`.List.apply("3b")) x: T @ann(`package`.List.apply("4a")) @ann(`package`.List.apply("4b"))) = { super.(); () }; - @new ann(List.apply("5a")) @new ann(List.apply("5b")) def f(x: Int @ann(List.apply("6a")) @ann(List.apply("6b"))) = { - @new ann(List.apply("7a")) @new ann(List.apply("7b")) val r = x.$plus(3): @ann(List.apply("8a")): @ann(List.apply("8b")); - val s = (4: Int @ann(List.apply("9a")) @ann(List.apply("9b"))); + @new ann(`package`.List.apply("5a")) @new ann(`package`.List.apply("5b")) def f(x: Int @ann(`package`.List.apply("6a")) @ann(`package`.List.apply("6b"))) = { + @new ann(`package`.List.apply("7a")) @new ann(`package`.List.apply("7b")) val r = x.$plus(3): @ann(`package`.List.apply("8a")): @ann(`package`.List.apply("8b")); + val s = (4: Int @ann(`package`.List.apply("9a")) @ann(`package`.List.apply("9b"))); r.$plus(s) } }; () } { - @ann(List.apply[String]("1a")) @ann(List.apply[String]("1b")) class C[@ann(List.apply[String]("2a")) @ann(List.apply[String]("2b")) T] extends AnyRef { - @ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) private[this] val x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a")) = _; - def (@ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a"))): C[T] = { + @ann(`package`.List.apply[String]("1a")) @ann(`package`.List.apply[String]("1b")) class C[@ann(`package`.List.apply[String]("2a")) @ann(`package`.List.apply[String]("2b")) T] extends AnyRef { + @ann(`package`.List.apply[String]("3a")) @ann(`package`.List.apply[String]("3b")) private[this] val x: T @ann(`package`.List.apply[String]("4b")) @ann(`package`.List.apply[String]("4a")) = _; + def (@ann(`package`.List.apply[String]("3a")) @ann(`package`.List.apply[String]("3b")) x: T @ann(`package`.List.apply[String]("4b")) @ann(`package`.List.apply[String]("4a"))): C[T] = { C.super.(); () }; - @ann(List.apply[String]("5a")) @ann(List.apply[String]("5b")) def f(x: Int @ann(List.apply[String]("6b")) @ann(List.apply[String]("6a"))): Int = { - @ann(List.apply[String]("7a")) @ann(List.apply[String]("7b")) val r: Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")) = ((x.+(3): Int @ann(List.apply[String]("8a"))): Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a"))); - val s: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")) = (4: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a"))); + @ann(`package`.List.apply[String]("5a")) @ann(`package`.List.apply[String]("5b")) def f(x: Int @ann(`package`.List.apply[String]("6b")) @ann(`package`.List.apply[String]("6a"))): Int = { + @ann(`package`.List.apply[String]("7a")) @ann(`package`.List.apply[String]("7b")) val r: Int @ann(`package`.List.apply[String]("8b")) @ann(`package`.List.apply[String]("8a")) = ((x.+(3): Int @ann(`package`.List.apply[String]("8a"))): Int @ann(`package`.List.apply[String]("8b")) @ann(`package`.List.apply[String]("8a"))); + val s: Int @ann(`package`.List.apply[String]("9b")) @ann(`package`.List.apply[String]("9a")) = (4: Int @ann(`package`.List.apply[String]("9b")) @ann(`package`.List.apply[String]("9a"))); r.+(s) } }; diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala index e3ff9e532dd0..1078514eaddd 100644 --- a/test/files/run/reify_ann1a.scala +++ b/test/files/run/reify_ann1a.scala @@ -25,4 +25,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check index 2750e72ba177..439863c82228 100644 --- a/test/files/run/reify_ann1b.check +++ b/test/files/run/reify_ann1b.check @@ -1,13 +1,3 @@ -reify_ann1b.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class ann0(bar: String) extends annotation.ClassfileAnnotation - ^ -reify_ann1b.scala:7: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class ann1(bar: String) extends annotation.ClassfileAnnotation - ^ { @new ann0(bar = "1a") @new ann1(bar = "1b") class C[@new ann0(bar = "2a") @new ann1(bar = "2b") T] extends AnyRef { @new ann0(bar = "3a") @new ann1(bar = "3b") private[this] val x: T @ann0(bar = "4a") @ann1(bar = "4b") = _; diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala index f1710d6fc4c5..f94187bc65d3 100644 --- a/test/files/run/reify_ann1b.scala +++ b/test/files/run/reify_ann1b.scala @@ -3,8 +3,8 @@ import scala.reflect.runtime.{universe => ru} import scala.reflect.runtime.{currentMirror => cm} import scala.tools.reflect.ToolBox -class ann0(bar: String) extends annotation.ClassfileAnnotation -class ann1(bar: String) extends annotation.ClassfileAnnotation +class ann0(bar: String) extends annotation.ConstantAnnotation +class ann1(bar: String) extends annotation.ConstantAnnotation object Test extends App { // test 1: reify @@ -26,4 +26,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check index a26fa42045ec..aeb7ad051803 100644 --- a/test/files/run/reify_ann2a.check +++ b/test/files/run/reify_ann2a.check @@ -6,15 +6,15 @@ () } }; - @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef { - @new ann(List.apply("3a")) @new ann(List.apply("3b")) private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _; - def (@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = { + @new ann(`package`.List.apply("1a")) @new ann(`package`.List.apply("1b")) class C[@new ann(`package`.List.apply("2a")) @new ann(`package`.List.apply("2b")) T] extends AnyRef { + @new ann(`package`.List.apply("3a")) @new ann(`package`.List.apply("3b")) private[this] val x: T @ann(`package`.List.apply("4a")) @ann(`package`.List.apply("4b")) = _; + def (@new ann(`package`.List.apply("3a")) @new ann(`package`.List.apply("3b")) x: T @ann(`package`.List.apply("4a")) @ann(`package`.List.apply("4b"))) = { super.(); () }; - @new ann(List.apply("5a")) @new ann(List.apply("5b")) def f(x: Int @ann(List.apply("6a")) @ann(List.apply("6b"))) = { - @new ann(List.apply("7a")) @new ann(List.apply("7b")) val r = x.$plus(3): @ann(List.apply("8a")): @ann(List.apply("8b")); - val s = (4: Int @ann(List.apply("9a")) @ann(List.apply("9b"))); + @new ann(`package`.List.apply("5a")) @new ann(`package`.List.apply("5b")) def f(x: Int @ann(`package`.List.apply("6a")) @ann(`package`.List.apply("6b"))) = { + @new ann(`package`.List.apply("7a")) @new ann(`package`.List.apply("7b")) val r = x.$plus(3): @ann(`package`.List.apply("8a")): @ann(`package`.List.apply("8b")); + val s = (4: Int @ann(`package`.List.apply("9a")) @ann(`package`.List.apply("9b"))); r.$plus(s) } }; @@ -28,15 +28,15 @@ () } }; - @ann(List.apply[String]("1a")) @ann(List.apply[String]("1b")) class C[@ann(List.apply[String]("2a")) @ann(List.apply[String]("2b")) T] extends AnyRef { - @ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) private[this] val x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a")) = _; - def (@ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a"))): C[T] = { + @ann(`package`.List.apply[String]("1a")) @ann(`package`.List.apply[String]("1b")) class C[@ann(`package`.List.apply[String]("2a")) @ann(`package`.List.apply[String]("2b")) T] extends AnyRef { + @ann(`package`.List.apply[String]("3a")) @ann(`package`.List.apply[String]("3b")) private[this] val x: T @ann(`package`.List.apply[String]("4b")) @ann(`package`.List.apply[String]("4a")) = _; + def (@ann(`package`.List.apply[String]("3a")) @ann(`package`.List.apply[String]("3b")) x: T @ann(`package`.List.apply[String]("4b")) @ann(`package`.List.apply[String]("4a"))): C[T] = { C.super.(); () }; - @ann(List.apply[String]("5a")) @ann(List.apply[String]("5b")) def f(x: Int @ann(List.apply[String]("6b")) @ann(List.apply[String]("6a"))): Int = { - @ann(List.apply[String]("7a")) @ann(List.apply[String]("7b")) val r: Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")) = ((x.+(3): Int @ann(List.apply[String]("8a"))): Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a"))); - val s: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")) = (4: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a"))); + @ann(`package`.List.apply[String]("5a")) @ann(`package`.List.apply[String]("5b")) def f(x: Int @ann(`package`.List.apply[String]("6b")) @ann(`package`.List.apply[String]("6a"))): Int = { + @ann(`package`.List.apply[String]("7a")) @ann(`package`.List.apply[String]("7b")) val r: Int @ann(`package`.List.apply[String]("8b")) @ann(`package`.List.apply[String]("8a")) = ((x.+(3): Int @ann(`package`.List.apply[String]("8a"))): Int @ann(`package`.List.apply[String]("8b")) @ann(`package`.List.apply[String]("8a"))); + val s: Int @ann(`package`.List.apply[String]("9b")) @ann(`package`.List.apply[String]("9a")) = (4: Int @ann(`package`.List.apply[String]("9b")) @ann(`package`.List.apply[String]("9a"))); r.+(s) } }; diff --git a/test/files/run/reify_ann2a.scala b/test/files/run/reify_ann2a.scala index 515fba015ffb..7ae2dc48d1b1 100644 --- a/test/files/run/reify_ann2a.scala +++ b/test/files/run/reify_ann2a.scala @@ -25,4 +25,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann2b.check b/test/files/run/reify_ann2b.check new file mode 100644 index 000000000000..6ccd0ef2c841 --- /dev/null +++ b/test/files/run/reify_ann2b.check @@ -0,0 +1,44 @@ +{ + class ann extends ConstantAnnotation { + private[this] val bar: Predef.String = _; + def (bar: Predef.String) = { + super.(); + () + } + }; + @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends AnyRef { + @new ann(bar = "3a") @new ann(bar = "3b") private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _; + def (@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = { + super.(); + () + }; + @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6a") @ann(bar = "6b")) = { + @new ann(bar = "7a") @new ann(bar = "7b") val r = x.$plus(3): @ann(bar = "8a"): @ann(bar = "8b"); + val s = (4: Int @ann(bar = "9a") @ann(bar = "9b")); + r.$plus(s) + } + }; + () +} +{ + class ann extends scala.annotation.Annotation with scala.annotation.ConstantAnnotation { + private[this] val bar: String = _; + def (bar: String): ann = { + ann.super.(); + () + } + }; + @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef { + @ann(bar = "3a") @ann(bar = "3b") private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _; + def (@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = { + C.super.(); + () + }; + @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = { + @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a")); + val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a")); + r.+(s) + } + }; + () +} diff --git a/test/files/neg/reify_ann2b.scala b/test/files/run/reify_ann2b.scala similarity index 88% rename from test/files/neg/reify_ann2b.scala rename to test/files/run/reify_ann2b.scala index 72d8c611cb44..6c505a983d15 100644 --- a/test/files/neg/reify_ann2b.scala +++ b/test/files/run/reify_ann2b.scala @@ -6,7 +6,7 @@ import scala.tools.reflect.ToolBox object Test extends App { // test 1: reify val tree = reify{ - class ann(bar: String) extends annotation.ClassfileAnnotation + class ann(bar: String) extends annotation.ConstantAnnotation @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) { @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = { @@ -20,9 +20,9 @@ object Test extends App { // test 2: import and typecheck val toolbox = cm.mkToolBox() - val ttree = toolbox.typeCheck(tree) + val ttree = toolbox.typecheck(tree) println(ttree.toString) // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann3.scala b/test/files/run/reify_ann3.scala index 7098e928a73b..be9f9c8e9a85 100644 --- a/test/files/run/reify_ann3.scala +++ b/test/files/run/reify_ann3.scala @@ -19,4 +19,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann4.scala b/test/files/run/reify_ann4.scala index f6426213dfae..6ba4b8c21516 100644 --- a/test/files/run/reify_ann4.scala +++ b/test/files/run/reify_ann4.scala @@ -23,4 +23,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_ann5.check b/test/files/run/reify_ann5.check index 1ec0457e542c..93a8cffa27ae 100644 --- a/test/files/run/reify_ann5.check +++ b/test/files/run/reify_ann5.check @@ -1,6 +1,6 @@ { class C extends AnyRef { - @new inline @beanGetter() @new BeanProperty() val x: Int = _; + @new BeanProperty() @new inline @beanGetter() @new BeanProperty() val x: Int = _; def (x: Int) = { super.(); () @@ -10,13 +10,13 @@ } { class C extends AnyRef { - @scala.beans.BeanProperty private[this] val x: Int = _; + @scala.beans.BeanProperty @scala.beans.BeanProperty private[this] val x: Int = _; def x: Int = C.this.x; def (x: Int): C = { C.super.(); () }; - @inline @scala.annotation.meta.beanGetter def getX(): Int = C.this.x + @scala.beans.BeanProperty @inline @scala.annotation.meta.beanGetter @scala.beans.BeanProperty def getX(): Int = C.this.x }; () } diff --git a/test/files/run/reify_ann5.scala b/test/files/run/reify_ann5.scala index 5e2f058a39bb..3cc355249108 100644 --- a/test/files/run/reify_ann5.scala +++ b/test/files/run/reify_ann5.scala @@ -20,4 +20,4 @@ object Test extends App { // test 3: import and compile toolbox.eval(tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_anonymous.scala b/test/files/run/reify_anonymous.scala index d743014dfd52..f2e9a5023031 100644 --- a/test/files/run/reify_anonymous.scala +++ b/test/files/run/reify_anonymous.scala @@ -5,4 +5,4 @@ object Test extends App { reify { println(new {def x = 2; def y = x * x}.y) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check index bda0f06e755a..cd4be46db675 100644 --- a/test/files/run/reify_classfileann_a.check +++ b/test/files/run/reify_classfileann_a.check @@ -1,10 +1,5 @@ -reify_classfileann_a.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation - ^ { - @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef { + @new Ann(bar = "1", quux = Array("2", "3"), baz = new SuppressWarnings(value = Array("hups"))) class C extends AnyRef { def () = { super.(); () @@ -13,7 +8,7 @@ class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends a () } { - @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends AnyRef { + @Ann(bar = "1", quux = ["2", "3"], baz = SuppressWarnings(value = ["hups"])) class C extends AnyRef { def (): C = { C.super.(); () diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala deleted file mode 100644 index 9ae12bff820b..000000000000 --- a/test/files/run/reify_classfileann_a.scala +++ /dev/null @@ -1,22 +0,0 @@ -import scala.reflect.runtime.universe._ -import scala.reflect.runtime.{universe => ru} -import scala.reflect.runtime.{currentMirror => cm} -import scala.tools.reflect.ToolBox - -class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation - -object Test extends App { - // test 1: reify - val tree = reify{ - @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4")) class C - }.tree - println(tree.toString) - - // test 2: import and typecheck - val toolbox = cm.mkToolBox() - val ttree = toolbox.typecheck(tree) - println(ttree.toString) - - // test 3: import and compile - toolbox.eval(tree) -} \ No newline at end of file diff --git a/test/files/run/reify_classfileann_a/Ann.java b/test/files/run/reify_classfileann_a/Ann.java new file mode 100644 index 000000000000..5162f7a08f52 --- /dev/null +++ b/test/files/run/reify_classfileann_a/Ann.java @@ -0,0 +1,5 @@ +public @interface Ann { + public String bar(); + public String[] quux() default {}; + public SuppressWarnings baz() default @SuppressWarnings({}); +} diff --git a/test/files/run/reify_classfileann_a/Test.scala b/test/files/run/reify_classfileann_a/Test.scala new file mode 100644 index 000000000000..7c2364efddc2 --- /dev/null +++ b/test/files/run/reify_classfileann_a/Test.scala @@ -0,0 +1,20 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{universe => ru} +import scala.reflect.runtime.{currentMirror => cm} +import scala.tools.reflect.ToolBox + +object Test extends App { + // test 1: reify + val tree = reify{ + @Ann(bar="1", quux=Array("2", "3"), baz = new SuppressWarnings(Array("hups"))) class C + }.tree + println(tree.toString) + + // test 2: import and typecheck + val toolbox = cm.mkToolBox() + val ttree = toolbox.typecheck(tree) + println(ttree.toString) + + // test 3: import and compile + toolbox.eval(tree) +} diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check index e9d8809e5372..f1fe8f8faccf 100644 --- a/test/files/run/reify_classfileann_b.check +++ b/test/files/run/reify_classfileann_b.check @@ -1,15 +1,10 @@ -reify_classfileann_b.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation - ^ { class C extends AnyRef { def () = { super.(); () }; - def x: Int = 2: @ann(bar = "1",quux = Array("2", "3"),baz = new ann(bar = "4")) + def x: Int = 2: @Ann(bar = "1",quux = Array("2", "3"),baz = new SuppressWarnings(value = Array("hups"))) }; () } @@ -19,7 +14,7 @@ class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends a C.super.(); () }; - def x: Int = (2: Int(2) @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4"))) + def x: Int = (2: Int(2) @Ann(bar = "1", quux = ["2", "3"], baz = SuppressWarnings(value = ["hups"]))) }; () } diff --git a/test/files/run/reify_classfileann_b.scala b/test/files/run/reify_classfileann_b.scala deleted file mode 100644 index a0cb8f0b49b1..000000000000 --- a/test/files/run/reify_classfileann_b.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.reflect.runtime.universe._ -import scala.reflect.runtime.{universe => ru} -import scala.reflect.runtime.{currentMirror => cm} -import scala.tools.reflect.ToolBox - -class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation - -object Test extends App { - // test 1: reify - val tree = reify{ - class C { - def x: Int = { - 2: @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4")) - } - } - }.tree - println(tree.toString) - - // test 2: import and typecheck - val toolbox = cm.mkToolBox() - val ttree = toolbox.typecheck(tree) - println(ttree.toString) - - // test 3: import and compile - toolbox.eval(tree) -} \ No newline at end of file diff --git a/test/files/run/reify_classfileann_b/Ann.java b/test/files/run/reify_classfileann_b/Ann.java new file mode 100644 index 000000000000..5162f7a08f52 --- /dev/null +++ b/test/files/run/reify_classfileann_b/Ann.java @@ -0,0 +1,5 @@ +public @interface Ann { + public String bar(); + public String[] quux() default {}; + public SuppressWarnings baz() default @SuppressWarnings({}); +} diff --git a/test/files/run/reify_classfileann_b/Test.scala b/test/files/run/reify_classfileann_b/Test.scala new file mode 100644 index 000000000000..dfea43798826 --- /dev/null +++ b/test/files/run/reify_classfileann_b/Test.scala @@ -0,0 +1,24 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{universe => ru} +import scala.reflect.runtime.{currentMirror => cm} +import scala.tools.reflect.ToolBox + +object Test extends App { + // test 1: reify + val tree = reify{ + class C { + def x: Int = { + 2: @Ann(bar="1", quux=Array("2", "3"), baz = new SuppressWarnings(Array("hups"))) + } + } + }.tree + println(tree.toString) + + // test 2: import and typecheck + val toolbox = cm.mkToolBox() + val ttree = toolbox.typecheck(tree) + println(ttree.toString) + + // test 3: import and compile + toolbox.eval(tree) +} diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala index af24a4b1e487..0652cff8c0bb 100644 --- a/test/files/run/reify_closure1.scala +++ b/test/files/run/reify_closure1.scala @@ -16,4 +16,4 @@ object Test extends App { println(foo(List(1, 2, 3))(10)) println(foo(List(1, 2, 3, 4))(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala index 7a2cdb5e17bc..fa66ef6972dd 100644 --- a/test/files/run/reify_closure2a.scala +++ b/test/files/run/reify_closure2a.scala @@ -16,4 +16,4 @@ object Test extends App { println(foo(1)(10)) println(foo(2)(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala index cb17c89501eb..945416c0502d 100644 --- a/test/files/run/reify_closure3a.scala +++ b/test/files/run/reify_closure3a.scala @@ -18,4 +18,4 @@ object Test extends App { println(foo(1)(10)) println(foo(2)(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala index 23436e0763e0..6e7cce7de9f2 100644 --- a/test/files/run/reify_closure4a.scala +++ b/test/files/run/reify_closure4a.scala @@ -18,4 +18,4 @@ object Test extends App { println(foo(1)(10)) println(foo(2)(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala index 6b5089a4e58b..f8a39ee2eee0 100644 --- a/test/files/run/reify_closure5a.scala +++ b/test/files/run/reify_closure5a.scala @@ -18,4 +18,4 @@ object Test extends App { println(fun1(10)) var fun2 = foo(List(1, 2, 3, 4)) println(fun2(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala index cba035132dfa..beaf17e6cbe3 100644 --- a/test/files/run/reify_closure6.scala +++ b/test/files/run/reify_closure6.scala @@ -26,4 +26,4 @@ object Test extends App { val fun2 = foo(List(1, 2, 3, 4)) println("second invocation = " + fun2(10)) println("q after second invocation = " + q) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala index 2a7ce25e8866..7c0ff0b284f0 100644 --- a/test/files/run/reify_closure7.scala +++ b/test/files/run/reify_closure7.scala @@ -30,4 +30,4 @@ object Test extends App { println("first invocation = " + fun1(10)) val fun2 = foo(List(1, 2, 3, 4)) println("second invocation = " + fun2(10)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala index f303a7511c10..150db0f5caea 100644 --- a/test/files/run/reify_closure8a.scala +++ b/test/files/run/reify_closure8a.scala @@ -12,4 +12,4 @@ object Test extends App { val dyn = toolbox.eval(new Foo(10).fun.tree) val foo = dyn.asInstanceOf[Int] println(foo) -} \ No newline at end of file +} diff --git a/test/files/run/reify_closure8b.scala b/test/files/run/reify_closure8b.scala index c693cb490e90..b5322896f30f 100644 --- a/test/files/run/reify_closure8b.scala +++ b/test/files/run/reify_closure8b.scala @@ -18,4 +18,4 @@ object Test extends App { case ex: Throwable => println(ex) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala index a2740c83626f..cd25453b84c9 100644 --- a/test/files/run/reify_closures10.scala +++ b/test/files/run/reify_closures10.scala @@ -10,4 +10,4 @@ object Test extends App { val toolbox = cm.mkToolBox() println(toolbox.eval(code.tree)) -} \ No newline at end of file +} diff --git a/test/files/run/reify_complex.scala b/test/files/run/reify_complex.scala index 4abec3900e3e..885a6323bc26 100644 --- a/test/files/run/reify_complex.scala +++ b/test/files/run/reify_complex.scala @@ -22,4 +22,4 @@ object Test extends App { val x = new Complex(2, 1); val y = new Complex(1, 3) println(x + y) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_copypaste1.javaopts b/test/files/run/reify_copypaste1.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/reify_copypaste1.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala index cf813182aec0..d11dcc563043 100644 --- a/test/files/run/reify_copypaste1.scala +++ b/test/files/run/reify_copypaste1.scala @@ -1,3 +1,5 @@ +//> using javaOpt -Dneeds.forked.jvm + import scala.reflect.runtime._ import scala.reflect.runtime.universe._ import scala.reflect.runtime.universe.definitions._ @@ -16,4 +18,4 @@ object Test extends App { output.reset() toolBox.eval(Block(stats, expr)) stdout.println(output.toString) -} \ No newline at end of file +} diff --git a/test/files/run/reify_copypaste2.scala b/test/files/run/reify_copypaste2.scala index 12d08cf244b8..e64a729dfad0 100644 --- a/test/files/run/reify_copypaste2.scala +++ b/test/files/run/reify_copypaste2.scala @@ -7,4 +7,4 @@ object Test extends App { val x = 2 val outer = reify{reify{x}} println(outer.tree) -} \ No newline at end of file +} diff --git a/test/files/run/reify_for1.scala b/test/files/run/reify_for1.scala index e1f5347572c8..e0d7e41bcd04 100644 --- a/test/files/run/reify_for1.scala +++ b/test/files/run/reify_for1.scala @@ -7,4 +7,4 @@ object Test extends App { val sumOfSquares2 = (1 to 100).filter(_ % 3 == 0).map(Math.pow(_, 2)).sum assert(sumOfSquares1 == sumOfSquares2) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_fors_newpatmat.scala b/test/files/run/reify_fors_newpatmat.scala index 6bee9538a8e4..5387590053ff 100644 --- a/test/files/run/reify_fors_newpatmat.scala +++ b/test/files/run/reify_fors_newpatmat.scala @@ -82,7 +82,7 @@ object Test extends App { print("Persons over 20:") olderThan20(persons) foreach { x => print(" " + x) } - println + println() import Numeric._ @@ -90,7 +90,7 @@ object Test extends App { print("findNums(15) =") findNums(15) foreach { x => print(" " + x) } - println + println() val xs = List(3.5, 5.0, 4.5) println("average(" + xs + ") = " + sum(xs) / xs.length) @@ -98,4 +98,4 @@ object Test extends App { val ys = List(2.0, 1.0, 3.0) println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys)) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_fors_oldpatmat.scala b/test/files/run/reify_fors_oldpatmat.scala index 6bee9538a8e4..5387590053ff 100644 --- a/test/files/run/reify_fors_oldpatmat.scala +++ b/test/files/run/reify_fors_oldpatmat.scala @@ -82,7 +82,7 @@ object Test extends App { print("Persons over 20:") olderThan20(persons) foreach { x => print(" " + x) } - println + println() import Numeric._ @@ -90,7 +90,7 @@ object Test extends App { print("findNums(15) =") findNums(15) foreach { x => print(" " + x) } - println + println() val xs = List(3.5, 5.0, 4.5) println("average(" + xs + ") = " + sum(xs) / xs.length) @@ -98,4 +98,4 @@ object Test extends App { val ys = List(2.0, 1.0, 3.0) println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys)) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_generic.scala b/test/files/run/reify_generic.scala index 7baffac6a30c..35a46f686015 100644 --- a/test/files/run/reify_generic.scala +++ b/test/files/run/reify_generic.scala @@ -6,4 +6,4 @@ object Test extends App { val product = List(1, 2, 3).head * List[Any](4, 2, 0).head.asInstanceOf[Int] println(product) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_generic2.scala b/test/files/run/reify_generic2.scala index 36ab61e077b6..0582f6aaa55d 100644 --- a/test/files/run/reify_generic2.scala +++ b/test/files/run/reify_generic2.scala @@ -7,4 +7,4 @@ object Test extends App { val product = List(new C, new C).length * List[C](new C, new C).length println(product) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala index cb04ddffdea1..054baf10883d 100644 --- a/test/files/run/reify_getter.scala +++ b/test/files/run/reify_getter.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_implicits-old.scala b/test/files/run/reify_implicits-old.scala index a4e90488e5a8..efc70797b61b 100644 --- a/test/files/run/reify_implicits-old.scala +++ b/test/files/run/reify_implicits-old.scala @@ -3,7 +3,9 @@ import scala.language.{ implicitConversions, reflectiveCalls } import scala.reflect.runtime.universe._ import scala.tools.reflect.Eval +@deprecated("","") object Test extends App { + import scala.reflect.ClassManifest reify { implicit def arrayWrapper[A : ClassManifest](x: Array[A]) = new { diff --git a/test/files/run/reify_inheritance.scala b/test/files/run/reify_inheritance.scala index c73266443874..bbd0a46e48a0 100644 --- a/test/files/run/reify_inheritance.scala +++ b/test/files/run/reify_inheritance.scala @@ -14,4 +14,4 @@ object Test extends App { println(new D().y * new C().x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_inner1.scala b/test/files/run/reify_inner1.scala index 8da338ee4aca..9238bbb737cc 100644 --- a/test/files/run/reify_inner1.scala +++ b/test/files/run/reify_inner1.scala @@ -13,4 +13,4 @@ object Test extends App { val inner = new outer.D() println(inner.x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_inner2.scala b/test/files/run/reify_inner2.scala index f82eff8f0307..6616bae72e0f 100644 --- a/test/files/run/reify_inner2.scala +++ b/test/files/run/reify_inner2.scala @@ -13,4 +13,4 @@ object Test extends App { val inner = outer.D println(inner.x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_inner3.scala b/test/files/run/reify_inner3.scala index 72f8d9a38af5..859b63f9436f 100644 --- a/test/files/run/reify_inner3.scala +++ b/test/files/run/reify_inner3.scala @@ -13,4 +13,4 @@ object Test extends App { val inner = new outer.D println(inner.x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_inner4.scala b/test/files/run/reify_inner4.scala index ecbbf149a467..e2fc921c55c2 100644 --- a/test/files/run/reify_inner4.scala +++ b/test/files/run/reify_inner4.scala @@ -13,4 +13,4 @@ object Test extends App { val inner = outer.D println(inner.x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_magicsymbols.check b/test/files/run/reify_magicsymbols.check index c9d892d79328..84926be41be0 100644 --- a/test/files/run/reify_magicsymbols.check +++ b/test/files/run/reify_magicsymbols.check @@ -9,5 +9,4 @@ List[AnyRef] List[Null] List[Nothing] AnyRef{def foo(x: Int): Int} -Int* => Unit (=> Int) => Unit diff --git a/test/files/run/reify_magicsymbols.scala b/test/files/run/reify_magicsymbols.scala index 256ecbea3379..c85e0be76c85 100644 --- a/test/files/run/reify_magicsymbols.scala +++ b/test/files/run/reify_magicsymbols.scala @@ -12,6 +12,6 @@ object Test extends App { println(typeOf[List[Null]]) println(typeOf[List[Nothing]]) println(typeOf[{def foo(x: Int): Int}]) - println(typeOf[(Int*) => Unit]) + //println(typeOf[(Int*) => Unit]) println(typeOf[(=> Int) => Unit]) -} \ No newline at end of file +} diff --git a/test/files/run/reify_maps_newpatmat.scala b/test/files/run/reify_maps_newpatmat.scala index b538355b0353..57c9d560ae45 100644 --- a/test/files/run/reify_maps_newpatmat.scala +++ b/test/files/run/reify_maps_newpatmat.scala @@ -17,4 +17,4 @@ object Test extends App { } ) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_maps_oldpatmat.scala b/test/files/run/reify_maps_oldpatmat.scala index b538355b0353..57c9d560ae45 100644 --- a/test/files/run/reify_maps_oldpatmat.scala +++ b/test/files/run/reify_maps_oldpatmat.scala @@ -17,4 +17,4 @@ object Test extends App { } ) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala index 76f935ecd270..3ca47c436024 100644 --- a/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala +++ b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala index e7c5cb71c1bd..a8b45b3a46ae 100644 --- a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala +++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala @@ -13,4 +13,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala index 770fcccd1557..7378774ba22e 100644 --- a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala +++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala @@ -18,4 +18,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala index 32e7e9003b77..f2fb53400390 100644 --- a/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala +++ b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_nested_inner_refers_to_global.scala b/test/files/run/reify_nested_inner_refers_to_global.scala index 877222f5bfba..e9845732995e 100644 --- a/test/files/run/reify_nested_inner_refers_to_global.scala +++ b/test/files/run/reify_nested_inner_refers_to_global.scala @@ -14,4 +14,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_nested_inner_refers_to_local.scala b/test/files/run/reify_nested_inner_refers_to_local.scala index 703474e07e1b..802071ffbe07 100644 --- a/test/files/run/reify_nested_inner_refers_to_local.scala +++ b/test/files/run/reify_nested_inner_refers_to_local.scala @@ -14,4 +14,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_nested_outer_refers_to_global.scala b/test/files/run/reify_nested_outer_refers_to_global.scala index e40c569ce6f2..374f8a536d38 100644 --- a/test/files/run/reify_nested_outer_refers_to_global.scala +++ b/test/files/run/reify_nested_outer_refers_to_global.scala @@ -16,4 +16,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_nested_outer_refers_to_local.scala b/test/files/run/reify_nested_outer_refers_to_local.scala index 12147c51dab4..2d6fd64174b1 100644 --- a/test/files/run/reify_nested_outer_refers_to_local.scala +++ b/test/files/run/reify_nested_outer_refers_to_local.scala @@ -16,4 +16,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_01.scala b/test/files/run/reify_newimpl_01.scala index e4b46e428f1e..20167da16fae 100644 --- a/test/files/run/reify_newimpl_01.scala +++ b/test/files/run/reify_newimpl_01.scala @@ -10,4 +10,4 @@ object Test extends App { } println(code.eval) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_02.scala b/test/files/run/reify_newimpl_02.scala index fa7ee17acfa2..4d72c780992a 100644 --- a/test/files/run/reify_newimpl_02.scala +++ b/test/files/run/reify_newimpl_02.scala @@ -10,4 +10,4 @@ object Test extends App { } println(code.eval) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_03.scala b/test/files/run/reify_newimpl_03.scala index 8d6542552841..2fbd89ccb2d5 100644 --- a/test/files/run/reify_newimpl_03.scala +++ b/test/files/run/reify_newimpl_03.scala @@ -10,4 +10,4 @@ object Test extends App { } println(code.eval) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_04.scala b/test/files/run/reify_newimpl_04.scala index 21341ed1024c..c75207b30d2b 100644 --- a/test/files/run/reify_newimpl_04.scala +++ b/test/files/run/reify_newimpl_04.scala @@ -10,4 +10,4 @@ object Test extends App { } println(code.eval) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_05.scala b/test/files/run/reify_newimpl_05.scala index 635eba382702..758a296e3ffe 100644 --- a/test/files/run/reify_newimpl_05.scala +++ b/test/files/run/reify_newimpl_05.scala @@ -11,4 +11,4 @@ object Test extends App { } println(code.eval) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_06.scala b/test/files/run/reify_newimpl_06.scala index 0bf37da8c6f3..49022e962685 100644 --- a/test/files/run/reify_newimpl_06.scala +++ b/test/files/run/reify_newimpl_06.scala @@ -10,4 +10,4 @@ object Test extends App { } println(new C(2).code.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_11.scala b/test/files/run/reify_newimpl_11.scala index e8ca66441867..9cc15747dbc7 100644 --- a/test/files/run/reify_newimpl_11.scala +++ b/test/files/run/reify_newimpl_11.scala @@ -16,4 +16,4 @@ object Test extends App { case ex: Throwable => println(ex) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_12.scala b/test/files/run/reify_newimpl_12.scala index 246d7b4d4c5f..739f6ff6792a 100644 --- a/test/files/run/reify_newimpl_12.scala +++ b/test/files/run/reify_newimpl_12.scala @@ -11,4 +11,4 @@ object Test extends App { } new C[Int] -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_13.scala b/test/files/run/reify_newimpl_13.scala index 1b2b8cb529ca..5aa14aa553a0 100644 --- a/test/files/run/reify_newimpl_13.scala +++ b/test/files/run/reify_newimpl_13.scala @@ -18,4 +18,4 @@ object Test extends App { println(ex) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_14.scala b/test/files/run/reify_newimpl_14.scala index 284e87acd379..547df10c0868 100644 --- a/test/files/run/reify_newimpl_14.scala +++ b/test/files/run/reify_newimpl_14.scala @@ -13,4 +13,4 @@ object Test extends App { new C[Int] } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_15.scala b/test/files/run/reify_newimpl_15.scala index cb66e8549e6e..504cdf7c7034 100644 --- a/test/files/run/reify_newimpl_15.scala +++ b/test/files/run/reify_newimpl_15.scala @@ -12,4 +12,4 @@ object Test extends App { } new C -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_18.scala b/test/files/run/reify_newimpl_18.scala index 8456fd1b8ffd..7d6401c62f67 100644 --- a/test/files/run/reify_newimpl_18.scala +++ b/test/files/run/reify_newimpl_18.scala @@ -12,4 +12,4 @@ object Test extends App { } new C[Int] -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_19.scala b/test/files/run/reify_newimpl_19.scala index ba2d39cfdb84..eca455279f90 100644 --- a/test/files/run/reify_newimpl_19.scala +++ b/test/files/run/reify_newimpl_19.scala @@ -17,4 +17,4 @@ object Test extends App { case ex: Throwable => println(ex) } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_20.scala b/test/files/run/reify_newimpl_20.scala index f8ddb53a2208..b5c6499023ca 100644 --- a/test/files/run/reify_newimpl_20.scala +++ b/test/files/run/reify_newimpl_20.scala @@ -13,4 +13,4 @@ object Test extends App { } new C { type T = String } // this "mistake" is made for a reason! -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_21.scala b/test/files/run/reify_newimpl_21.scala index 97261b21ed49..ff78d9bb7909 100644 --- a/test/files/run/reify_newimpl_21.scala +++ b/test/files/run/reify_newimpl_21.scala @@ -17,4 +17,4 @@ object Test extends App { } println((new D).code.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check index b2f4d5624e1b..7b3394bf383f 100644 --- a/test/files/run/reify_newimpl_22.check +++ b/test/files/run/reify_newimpl_22.check @@ -15,9 +15,9 @@ scala> { } println(code.eval) } -:19: free term: Ident(TermName("x")) defined by res0 in :18:7 val code = reify { ^ +On line 3: free term: Ident(TermName("x")) defined by res0 in :2:7 2 scala> :quit diff --git a/test/files/run/reify_newimpl_22.scala b/test/files/run/reify_newimpl_22.scala index 8512620a16eb..beaf13e558c8 100644 --- a/test/files/run/reify_newimpl_22.scala +++ b/test/files/run/reify_newimpl_22.scala @@ -14,4 +14,4 @@ import scala.tools.reflect.Eval println(code.eval) } """ -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check index abf314b26a3b..d9c6db0e8d6f 100644 --- a/test/files/run/reify_newimpl_23.check +++ b/test/files/run/reify_newimpl_23.check @@ -8,15 +8,15 @@ import scala.tools.reflect.ToolBox scala> import scala.tools.reflect.Eval import scala.tools.reflect.Eval -scala> def foo[T]{ +scala> def foo[T] = { val code = reify { List[T]() } println(code.eval) } -:17: free type: Ident(TypeName("T")) defined by foo in :16:9 val code = reify { ^ -foo: [T]=> Unit +On line 2: free type: Ident(TypeName("T")) defined by foo in :1:9 +def foo[T]: Unit scala> :quit diff --git a/test/files/run/reify_newimpl_23.scala b/test/files/run/reify_newimpl_23.scala index d4c2a68ce65d..f05cbc5628d8 100644 --- a/test/files/run/reify_newimpl_23.scala +++ b/test/files/run/reify_newimpl_23.scala @@ -6,11 +6,11 @@ object Test extends ReplTest { import scala.reflect.runtime.universe._ import scala.tools.reflect.ToolBox import scala.tools.reflect.Eval -def foo[T]{ +def foo[T] = { val code = reify { List[T]() } println(code.eval) } """ -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check index d446caa91a03..c7ee65707c5a 100644 --- a/test/files/run/reify_newimpl_25.check +++ b/test/files/run/reify_newimpl_25.check @@ -5,9 +5,9 @@ scala> { val tt = implicitly[TypeTag[x.type]] println(tt) } -:15: free term: Ident(TermName("x")) defined by res0 in :14:7 val tt = implicitly[TypeTag[x.type]] ^ +On line 4: free term: Ident(TermName("x")) defined by res0 in :3:7 TypeTag[x.type] scala> :quit diff --git a/test/files/run/reify_newimpl_25.scala b/test/files/run/reify_newimpl_25.scala index 01cc04b59f3d..5a999656bbf1 100644 --- a/test/files/run/reify_newimpl_25.scala +++ b/test/files/run/reify_newimpl_25.scala @@ -10,4 +10,4 @@ object Test extends ReplTest { println(tt) } """ -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check index eb2b8309a085..0c8cd0eaa050 100644 --- a/test/files/run/reify_newimpl_26.check +++ b/test/files/run/reify_newimpl_26.check @@ -1,13 +1,13 @@ -scala> def foo[T]{ +scala> def foo[T] = { import scala.reflect.runtime.universe._ val tt = implicitly[WeakTypeTag[List[T]]] println(tt) } -:13: free type: Ident(TypeName("T")) defined by foo in :11:9 val tt = implicitly[WeakTypeTag[List[T]]] ^ -foo: [T]=> Unit +On line 3: free type: Ident(TypeName("T")) defined by foo in :1:9 +def foo[T]: Unit scala> foo[Int] WeakTypeTag[List[T]] diff --git a/test/files/run/reify_newimpl_26.scala b/test/files/run/reify_newimpl_26.scala index af74d60e8b12..f1f158e9ef7d 100644 --- a/test/files/run/reify_newimpl_26.scala +++ b/test/files/run/reify_newimpl_26.scala @@ -3,11 +3,11 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { override def extraSettings = "-Xlog-free-types" def code = """ -def foo[T]{ +def foo[T] = { import scala.reflect.runtime.universe._ val tt = implicitly[WeakTypeTag[List[T]]] println(tt) } foo[Int] """ -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_27.scala b/test/files/run/reify_newimpl_27.scala index db9ada36e432..b9e95f5bddcd 100644 --- a/test/files/run/reify_newimpl_27.scala +++ b/test/files/run/reify_newimpl_27.scala @@ -12,4 +12,4 @@ object Test extends App { } C -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_29.scala b/test/files/run/reify_newimpl_29.scala index 033c360b8c9e..ad4419bb67e6 100644 --- a/test/files/run/reify_newimpl_29.scala +++ b/test/files/run/reify_newimpl_29.scala @@ -12,4 +12,4 @@ object Test extends App { } new C -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_31.scala b/test/files/run/reify_newimpl_31.scala index 2e20aa0f62ce..55661753490d 100644 --- a/test/files/run/reify_newimpl_31.scala +++ b/test/files/run/reify_newimpl_31.scala @@ -12,4 +12,4 @@ object Test extends App { } C -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_33.scala b/test/files/run/reify_newimpl_33.scala index 98bb2e510297..523285445ffa 100644 --- a/test/files/run/reify_newimpl_33.scala +++ b/test/files/run/reify_newimpl_33.scala @@ -13,4 +13,4 @@ object Test extends App { } C -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_35.check b/test/files/run/reify_newimpl_35.check index 4f9b43dbbfa1..4e02e78b9d43 100644 --- a/test/files/run/reify_newimpl_35.check +++ b/test/files/run/reify_newimpl_35.check @@ -3,7 +3,7 @@ scala> import scala.reflect.runtime.universe._ import scala.reflect.runtime.universe._ scala> def foo[T: TypeTag] = reify{List[T]()} -foo: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Expr[List[T]] +def foo[T](implicit evidence$1: reflect.runtime.universe.TypeTag[T]): reflect.runtime.universe.Expr[List[T]] scala> println(foo) Expr[List[Nothing]](Nil) diff --git a/test/files/run/reify_newimpl_35.scala b/test/files/run/reify_newimpl_35.scala index f2ebf5181be8..aad6ce5a52b8 100644 --- a/test/files/run/reify_newimpl_35.scala +++ b/test/files/run/reify_newimpl_35.scala @@ -1,5 +1,6 @@ import scala.tools.partest.ReplTest +// Show that reify sees rewrite to Nil in typer object Test extends ReplTest { override def extraSettings = "-Xlog-free-types" def code = """ diff --git a/test/files/run/reify_newimpl_36.scala b/test/files/run/reify_newimpl_36.scala index 490e645cf79f..df4799803ffb 100644 --- a/test/files/run/reify_newimpl_36.scala +++ b/test/files/run/reify_newimpl_36.scala @@ -12,4 +12,4 @@ object Test extends App { println(code2.eval) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_37.scala b/test/files/run/reify_newimpl_37.scala index 7c4d4af3ddd3..c8ceba340703 100644 --- a/test/files/run/reify_newimpl_37.scala +++ b/test/files/run/reify_newimpl_37.scala @@ -13,4 +13,4 @@ object Test extends App { println(code3.eval) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_38.scala b/test/files/run/reify_newimpl_38.scala index fd898b9ab285..b3160f0183f8 100644 --- a/test/files/run/reify_newimpl_38.scala +++ b/test/files/run/reify_newimpl_38.scala @@ -12,4 +12,4 @@ object Test extends App { println(code2.eval) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_39.scala b/test/files/run/reify_newimpl_39.scala index 885c738275b8..0568eb482476 100644 --- a/test/files/run/reify_newimpl_39.scala +++ b/test/files/run/reify_newimpl_39.scala @@ -13,4 +13,4 @@ object Test extends App { println(code3.eval) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_40.scala b/test/files/run/reify_newimpl_40.scala index 018bf720f366..0409f71ad9c9 100644 --- a/test/files/run/reify_newimpl_40.scala +++ b/test/files/run/reify_newimpl_40.scala @@ -13,4 +13,4 @@ object Test extends App { println(code3.eval) } } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_41.scala b/test/files/run/reify_newimpl_41.scala index 9bb79fb2b9dc..5eb5e5780f33 100644 --- a/test/files/run/reify_newimpl_41.scala +++ b/test/files/run/reify_newimpl_41.scala @@ -15,4 +15,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_42.scala b/test/files/run/reify_newimpl_42.scala index bd7deadea6c1..669932e422b2 100644 --- a/test/files/run/reify_newimpl_42.scala +++ b/test/files/run/reify_newimpl_42.scala @@ -14,4 +14,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_43.scala b/test/files/run/reify_newimpl_43.scala index 88ea22432216..823464015f50 100644 --- a/test/files/run/reify_newimpl_43.scala +++ b/test/files/run/reify_newimpl_43.scala @@ -13,4 +13,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_44.scala b/test/files/run/reify_newimpl_44.scala index 88ea22432216..823464015f50 100644 --- a/test/files/run/reify_newimpl_44.scala +++ b/test/files/run/reify_newimpl_44.scala @@ -13,4 +13,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_45.scala b/test/files/run/reify_newimpl_45.scala index fd8011f46878..148c3650aaad 100644 --- a/test/files/run/reify_newimpl_45.scala +++ b/test/files/run/reify_newimpl_45.scala @@ -13,4 +13,4 @@ object Test extends App { } new C[String] -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_47.scala b/test/files/run/reify_newimpl_47.scala index 8740132f6a1a..b78189399932 100644 --- a/test/files/run/reify_newimpl_47.scala +++ b/test/files/run/reify_newimpl_47.scala @@ -14,4 +14,4 @@ object Test extends App { } println(code.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_48.scala b/test/files/run/reify_newimpl_48.scala index 9899bc09a045..5ad7eb961169 100644 --- a/test/files/run/reify_newimpl_48.scala +++ b/test/files/run/reify_newimpl_48.scala @@ -19,4 +19,4 @@ object Test extends App { } println(code.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_49.scala b/test/files/run/reify_newimpl_49.scala index 2222bd69d551..957c5bbde76a 100644 --- a/test/files/run/reify_newimpl_49.scala +++ b/test/files/run/reify_newimpl_49.scala @@ -13,4 +13,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_50.scala b/test/files/run/reify_newimpl_50.scala index 279cb161a028..64d52eb50bf3 100644 --- a/test/files/run/reify_newimpl_50.scala +++ b/test/files/run/reify_newimpl_50.scala @@ -12,4 +12,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_51.scala b/test/files/run/reify_newimpl_51.scala index f823bf4033cd..011d9a6cebc8 100644 --- a/test/files/run/reify_newimpl_51.scala +++ b/test/files/run/reify_newimpl_51.scala @@ -15,4 +15,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_newimpl_52.scala b/test/files/run/reify_newimpl_52.scala index f01199e830c8..8b473ffcf797 100644 --- a/test/files/run/reify_newimpl_52.scala +++ b/test/files/run/reify_newimpl_52.scala @@ -15,4 +15,4 @@ object Test extends App { } code.eval } -} \ No newline at end of file +} diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala index 099a353e8988..d2185f1cff54 100644 --- a/test/files/run/reify_printf.scala +++ b/test/files/run/reify_printf.scala @@ -43,8 +43,8 @@ object Test extends App { val Literal(Constant(s_format: String)) = format val paramsStack = scala.collection.mutable.Stack(params: _*) val parsed = s_format.split("(?<=%[\\w%])|(?=%[\\w%])") map { - case "%d" => createTempValDef( paramsStack.pop, typeOf[Int] ) - case "%s" => createTempValDef( paramsStack.pop, typeOf[String] ) + case "%d" => createTempValDef( paramsStack.pop(), typeOf[Int] ) + case "%s" => createTempValDef( paramsStack.pop(), typeOf[String] ) case "%%" => { (None:Option[Tree], Literal(Constant("%"))) } diff --git a/test/files/run/reify_properties.scala b/test/files/run/reify_properties.scala index 01a9b12a9295..1041482670fc 100644 --- a/test/files/run/reify_properties.scala +++ b/test/files/run/reify_properties.scala @@ -13,7 +13,7 @@ object Test extends App { /** The setter function, defaults to identity. */ private var getter: T => T = identity[T] - /** Retrive the value held in this property. */ + /** Retrieve the value held in this property. */ def apply(): T = getter(value) /** Update the value held in this property, through the setter. */ diff --git a/test/files/run/reify_renamed_term_basic.scala b/test/files/run/reify_renamed_term_basic.scala index cd76def39509..563ce1e8cf83 100644 --- a/test/files/run/reify_renamed_term_basic.scala +++ b/test/files/run/reify_renamed_term_basic.scala @@ -17,4 +17,4 @@ object Test extends App { ) println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_term_local_to_reifee.scala b/test/files/run/reify_renamed_term_local_to_reifee.scala index 1860316a5bb7..44725ff435a0 100644 --- a/test/files/run/reify_renamed_term_local_to_reifee.scala +++ b/test/files/run/reify_renamed_term_local_to_reifee.scala @@ -17,4 +17,4 @@ object Test extends App { } println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_term_overloaded_method.scala b/test/files/run/reify_renamed_term_overloaded_method.scala index 3ef442d20398..a93c8f60fd89 100644 --- a/test/files/run/reify_renamed_term_overloaded_method.scala +++ b/test/files/run/reify_renamed_term_overloaded_method.scala @@ -14,4 +14,4 @@ object Test extends App { } println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_term_t5841.scala b/test/files/run/reify_renamed_term_t5841.scala index ef18d650bf39..551d5afe59b1 100644 --- a/test/files/run/reify_renamed_term_t5841.scala +++ b/test/files/run/reify_renamed_term_t5841.scala @@ -4,4 +4,4 @@ import scala.tools.reflect.Eval object Test extends App { println(reify{ru}.eval.getClass) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_type_basic.scala b/test/files/run/reify_renamed_type_basic.scala index 23729e5c5423..1666c9687cbe 100644 --- a/test/files/run/reify_renamed_type_basic.scala +++ b/test/files/run/reify_renamed_type_basic.scala @@ -13,4 +13,4 @@ object Test extends App { } println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_type_local_to_reifee.scala b/test/files/run/reify_renamed_type_local_to_reifee.scala index ed1bad239e6e..56dfa881cc6f 100644 --- a/test/files/run/reify_renamed_type_local_to_reifee.scala +++ b/test/files/run/reify_renamed_type_local_to_reifee.scala @@ -21,4 +21,4 @@ object Test extends App { } println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_renamed_type_spliceable.scala b/test/files/run/reify_renamed_type_spliceable.scala index 9c2cff519932..73de3cc160a9 100644 --- a/test/files/run/reify_renamed_type_spliceable.scala +++ b/test/files/run/reify_renamed_type_spliceable.scala @@ -18,4 +18,4 @@ object Test extends App { }) println(expr.eval) -} \ No newline at end of file +} diff --git a/test/files/run/reify_sort.scala b/test/files/run/reify_sort.scala index 17e3976c09f3..867f33c3368b 100644 --- a/test/files/run/reify_sort.scala +++ b/test/files/run/reify_sort.scala @@ -7,13 +7,13 @@ object Test extends App { * visible in their scope (including local variables or * arguments of enclosing methods). */ - def sort(a: Array[Int]) { + def sort(a: Array[Int]): Unit = { - def swap(i: Int, j: Int) { + def swap(i: Int, j: Int): Unit = { val t = a(i); a(i) = a(j); a(j) = t } - def sort1(l: Int, r: Int) { + def sort1(l: Int, r: Int): Unit = { val pivot = a((l + r) / 2) var i = l var j = r @@ -34,7 +34,7 @@ object Test extends App { sort1(0, a.length - 1) } - def println(ar: Array[Int]) { + def println(ar: Array[Int]): Unit = { def print1 = { def iter(i: Int): String = ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "") @@ -48,4 +48,4 @@ object Test extends App { sort(ar) println(ar) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_sort1.scala b/test/files/run/reify_sort1.scala index 6fb3cc5895a0..27bf1ba28e9e 100644 --- a/test/files/run/reify_sort1.scala +++ b/test/files/run/reify_sort1.scala @@ -18,4 +18,4 @@ object Test extends App { println(xs) println(sort(xs)) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_1a.scala b/test/files/run/reify_typerefs_1a.scala index 2e961f171de6..a4ea4f92344f 100644 --- a/test/files/run/reify_typerefs_1a.scala +++ b/test/files/run/reify_typerefs_1a.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_1b.scala b/test/files/run/reify_typerefs_1b.scala index 88bb864820e5..ecc8d2b9b40a 100644 --- a/test/files/run/reify_typerefs_1b.scala +++ b/test/files/run/reify_typerefs_1b.scala @@ -15,4 +15,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_2a.scala b/test/files/run/reify_typerefs_2a.scala index 3a1db1d80f2f..3a3fcc41e510 100644 --- a/test/files/run/reify_typerefs_2a.scala +++ b/test/files/run/reify_typerefs_2a.scala @@ -17,4 +17,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_2b.scala b/test/files/run/reify_typerefs_2b.scala index 50082aa8d298..26c44d03c1e5 100644 --- a/test/files/run/reify_typerefs_2b.scala +++ b/test/files/run/reify_typerefs_2b.scala @@ -17,4 +17,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_3a.scala b/test/files/run/reify_typerefs_3a.scala index 682d6f01ac47..217dab3936da 100644 --- a/test/files/run/reify_typerefs_3a.scala +++ b/test/files/run/reify_typerefs_3a.scala @@ -17,4 +17,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_typerefs_3b.scala b/test/files/run/reify_typerefs_3b.scala index c85072f55fff..2967c129f68e 100644 --- a/test/files/run/reify_typerefs_3b.scala +++ b/test/files/run/reify_typerefs_3b.scala @@ -17,4 +17,4 @@ object Test extends App { val toolbox = cm.mkToolBox() val evaluated = toolbox.eval(code.tree) println("evaluated = " + evaluated) -} \ No newline at end of file +} diff --git a/test/files/run/reify_varargs.scala b/test/files/run/reify_varargs.scala index 1cbc7c9473c5..d157149b467e 100644 --- a/test/files/run/reify_varargs.scala +++ b/test/files/run/reify_varargs.scala @@ -8,4 +8,4 @@ object Test extends App { "Hoth", "the fifth of August", "a disturbance in the Force") println("Message="+msg) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/repl-always-use-instance.check b/test/files/run/repl-always-use-instance.check index 5b1f9786a7a7..d234c90a2557 100644 --- a/test/files/run/repl-always-use-instance.check +++ b/test/files/run/repl-always-use-instance.check @@ -1,14 +1,14 @@ scala> case class Name(value: String) -defined class Name +class Name scala> val x = Name("foo") -x: Name = Name(foo) +val x: Name = Name(foo) scala> val y = Name("bar") -y: Name = Name(bar) +val y: Name = Name(bar) scala> val z = Name(x.value + y.value) -z: Name = Name(foobar) +val z: Name = Name(foobar) scala> :quit diff --git a/test/files/run/repl-any-error.check b/test/files/run/repl-any-error.check new file mode 100644 index 000000000000..586dbdcf4795 --- /dev/null +++ b/test/files/run/repl-any-error.check @@ -0,0 +1,5 @@ + +scala> 42 +val res0: Int = 42 + +scala> :quit diff --git a/test/files/run/repl-any-error.scala b/test/files/run/repl-any-error.scala new file mode 100644 index 000000000000..73e74076942f --- /dev/null +++ b/test/files/run/repl-any-error.scala @@ -0,0 +1,9 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-Wconf:any:error" + + def code = """ +42 + """.trim +} diff --git a/test/files/run/repl-assign.check b/test/files/run/repl-assign.check index a9e0a2204a6d..247fbbd8252d 100644 --- a/test/files/run/repl-assign.check +++ b/test/files/run/repl-assign.check @@ -1,14 +1,14 @@ -scala> var x = 10 -x: Int = 10 +scala> var x = 42 +var x: Int = 42 -scala> var y = 11 -y: Int = 11 +scala> x = 17 +// mutated x -scala> x = 12 -x: Int = 12 +scala> x += 10 +// mutated x -scala> y = 13 -y: Int = 13 +scala> x +val res0: Int = 27 scala> :quit diff --git a/test/files/run/repl-assign.scala b/test/files/run/repl-assign.scala index ee3c1649d8b9..7c12d3d2b4d7 100644 --- a/test/files/run/repl-assign.scala +++ b/test/files/run/repl-assign.scala @@ -1,10 +1,4 @@ -import scala.tools.partest.ReplTest -object Test extends ReplTest { - def code = """ -var x = 10 -var y = 11 -x = 12 -y = 13 - """ -} \ No newline at end of file +import scala.tools.partest.SessionTest + +object Test extends SessionTest diff --git a/test/files/run/repl-backticks.scala b/test/files/run/repl-backticks.scala index e40a8bc66225..c2427389c0c2 100644 --- a/test/files/run/repl-backticks.scala +++ b/test/files/run/repl-backticks.scala @@ -1,4 +1,5 @@ -import scala.tools.nsc._ +import scala.tools.nsc._ +import scala.tools.nsc.interpreter.shell.ReplReporterImpl object Test { val testCode = """ @@ -8,10 +9,10 @@ object Test { `yield` """ - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val settings = new Settings() settings.classpath.value = System.getProperty("java.class.path") - val repl = new interpreter.IMain(settings) + val repl = new interpreter.IMain(settings, new ReplReporterImpl(settings)) repl.interpret(testCode) } } diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check index bdf8842bb0b1..ab6fd1639c7f 100644 --- a/test/files/run/repl-bare-expr.check +++ b/test/files/run/repl-bare-expr.check @@ -1,48 +1,42 @@ scala> 2 ; 3 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 2 ;; ^ -res0: Int = 3 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res0: Int = 3 scala> { 2 ; 3 } -:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses - { 2 ; 3 } ^ -res1: Int = 3 + warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +val res1: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { 1 + 2 + 3 } ; bippy+88+11 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -defined object Cow -defined class Moo -bippy: Int -res2: Int = 105 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +object Cow +class Moo +def bippy: Int +val res2: Int = 105 scala> scala> object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy -defined object Bovine -defined class Ruminant -res3: Int = 216 +object Bovine +class Ruminant +val res3: Int = 216 scala> Bovine.x = List(Ruminant(5), Cow, new Moo) -Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo) +// mutated Bovine.x scala> Bovine.x -res4: List[Any] = List(Ruminant(5), Cow, Moooooo) +val res4: List[Any] = List(Ruminant(5), Cow, Moooooo) scala> :quit diff --git a/test/files/run/repl-bare-expr.scala b/test/files/run/repl-bare-expr.scala index df9849fa6d86..ea56f6a319ad 100644 --- a/test/files/run/repl-bare-expr.scala +++ b/test/files/run/repl-bare-expr.scala @@ -13,4 +13,4 @@ object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * Bovine.x = List(Ruminant(5), Cow, new Moo) Bovine.x """ -} \ No newline at end of file +} diff --git a/test/files/run/repl-class-based-escaping-reads.check b/test/files/run/repl-class-based-escaping-reads.check index 5fab18ffb30f..7acfbe8565c5 100644 --- a/test/files/run/repl-class-based-escaping-reads.check +++ b/test/files/run/repl-class-based-escaping-reads.check @@ -1,8 +1,8 @@ scala> type anotherint = Int -defined type alias anotherint +type anotherint scala> val four: anotherint = 4 -four: anotherint = 4 +val four: anotherint = 4 scala> :quit diff --git a/test/files/run/repl-class-based-implicit-import.check b/test/files/run/repl-class-based-implicit-import.check index a165d264f2f4..85d97a78c1fd 100644 --- a/test/files/run/repl-class-based-implicit-import.check +++ b/test/files/run/repl-class-based-implicit-import.check @@ -1,9 +1,9 @@ scala> def showInt(implicit x: Int) = println(x) -showInt: (implicit x: Int)Unit +def showInt(implicit x: Int): Unit -scala> object IntHolder { implicit val myInt = 5 } -defined object IntHolder +scala> object IntHolder { implicit val myInt: Int = 5 } +object IntHolder scala> import IntHolder.myInt import IntHolder.myInt @@ -13,9 +13,9 @@ scala> showInt scala> class A; showInt 5 -defined class A +class A scala> class B { showInt } -defined class B +class B scala> :quit diff --git a/test/files/run/repl-class-based-implicit-import.scala b/test/files/run/repl-class-based-implicit-import.scala index 9153c8e08f7f..133c99969f66 100644 --- a/test/files/run/repl-class-based-implicit-import.scala +++ b/test/files/run/repl-class-based-implicit-import.scala @@ -11,7 +11,7 @@ object Test extends ReplTest { def code = """ |def showInt(implicit x: Int) = println(x) - |object IntHolder { implicit val myInt = 5 } + |object IntHolder { implicit val myInt: Int = 5 } |import IntHolder.myInt |showInt |class A; showInt diff --git a/test/files/run/repl-class-based-outer-pointers.check b/test/files/run/repl-class-based-outer-pointers.check index 54bc714ab079..2e5c9044e832 100644 --- a/test/files/run/repl-class-based-outer-pointers.check +++ b/test/files/run/repl-class-based-outer-pointers.check @@ -4,13 +4,12 @@ scala> sealed abstract class Value; object Value { final case class Str(value: String) extends Value final case class Bool(value: Boolean) extends Value } -defined class Value -defined object Value +class Value +object Value scala> class C { final case class Num(value: Double) } // here it should still warn -:11: warning: The outer reference in this type test cannot be checked at run time. - class C { final case class Num(value: Double) } // here it should still warn ^ -defined class C + warning: The outer reference in this type test cannot be checked at run time. +class C scala> :quit diff --git a/test/files/run/repl-class-based-term-macros.check b/test/files/run/repl-class-based-term-macros.check index b7b9c94b3399..39b80c1c94c1 100644 --- a/test/files/run/repl-class-based-term-macros.check +++ b/test/files/run/repl-class-based-term-macros.check @@ -14,94 +14,86 @@ import scala.reflect.macros.{blackbox, whitebox} scala> scala> def implBBC(c: blackbox.Context) = c.universe.reify(()) -implBBC: (c: scala.reflect.macros.blackbox.Context)c.universe.Expr[Unit] +def implBBC(c: scala.reflect.macros.blackbox.Context): c.universe.Expr[Unit] scala> def implWBC(c: whitebox.Context) = c.universe.reify(()) -implWBC: (c: scala.reflect.macros.whitebox.Context)c.universe.Expr[Unit] +def implWBC(c: scala.reflect.macros.whitebox.Context): c.universe.Expr[Unit] scala> def implRBBC(c: reflect.macros.blackbox.Context) = c.universe.reify(()) -implRBBC: (c: scala.reflect.macros.blackbox.Context)c.universe.Expr[Unit] +def implRBBC(c: scala.reflect.macros.blackbox.Context): c.universe.Expr[Unit] scala> def implRWBC(c: reflect.macros.whitebox.Context) = c.universe.reify(()) -implRWBC: (c: scala.reflect.macros.whitebox.Context)c.universe.Expr[Unit] +def implRWBC(c: scala.reflect.macros.whitebox.Context): c.universe.Expr[Unit] scala> def implSRBBC(c: scala.reflect.macros.blackbox.Context) = c.universe.reify(()) -implSRBBC: (c: scala.reflect.macros.blackbox.Context)c.universe.Expr[Unit] +def implSRBBC(c: scala.reflect.macros.blackbox.Context): c.universe.Expr[Unit] scala> def implSRWBC(c: scala.reflect.macros.whitebox.Context) = c.universe.reify(()) -implSRWBC: (c: scala.reflect.macros.whitebox.Context)c.universe.Expr[Unit] +def implSRWBC(c: scala.reflect.macros.whitebox.Context): c.universe.Expr[Unit] scala> def implRSRBBC(c: _root_.scala.reflect.macros.blackbox.Context) = c.universe.reify(()) -implRSRBBC: (c: scala.reflect.macros.blackbox.Context)c.universe.Expr[Unit] +def implRSRBBC(c: scala.reflect.macros.blackbox.Context): c.universe.Expr[Unit] scala> def implRSRWBC(c: _root_.scala.reflect.macros.whitebox.Context) = c.universe.reify(()) -implRSRWBC: (c: scala.reflect.macros.whitebox.Context)c.universe.Expr[Unit] +def implRSRWBC(c: scala.reflect.macros.whitebox.Context): c.universe.Expr[Unit] scala> scala> def fooBBC: Unit = macro implBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooBBC: Unit = macro implBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooWBC: Unit = macro implWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooWBC: Unit = macro implWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooRBBC: Unit = macro implRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooRBBC: Unit = macro implRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooRWBC: Unit = macro implRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooRWBC: Unit = macro implRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooSRBBC: Unit = macro implSRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooSRBBC: Unit = macro implSRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooSRWBC: Unit = macro implSRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooSRWBC: Unit = macro implSRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooRSRBBC: Unit = macro implRSRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooRSRBBC: Unit = macro implRSRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def fooRSRWBC: Unit = macro implRSRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def fooRSRWBC: Unit = macro implRSRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> @@ -135,73 +127,65 @@ scala> object MacrosModule { def implRSRBBC(c: _root_.scala.reflect.macros.blackbox.Context) = c.universe.reify(()) def implRSRWBC(c: _root_.scala.reflect.macros.whitebox.Context) = c.universe.reify(()) } -defined object MacrosModule +object MacrosModule scala> scala> def barBBC: Unit = macro MacrosModule.implBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barBBC: Unit = macro MacrosModule.implBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barWBC: Unit = macro MacrosModule.implWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barWBC: Unit = macro MacrosModule.implWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barRBBC: Unit = macro MacrosModule.implRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barRBBC: Unit = macro MacrosModule.implRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barRWBC: Unit = macro MacrosModule.implRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barRWBC: Unit = macro MacrosModule.implRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barSRBBC: Unit = macro MacrosModule.implSRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barSRBBC: Unit = macro MacrosModule.implSRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barSRWBC: Unit = macro MacrosModule.implSRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barSRWBC: Unit = macro MacrosModule.implSRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barRSRBBC: Unit = macro MacrosModule.implRSRBBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barRSRBBC: Unit = macro MacrosModule.implRSRBBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def barRSRWBC: Unit = macro MacrosModule.implRSRWBC -:16: error: macro implementation reference has wrong shape. required: -macro [].[[]] or -macro [].[[]] -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def barRSRWBC: Unit = macro MacrosModule.implRSRWBC ^ + error: macro implementation reference has wrong shape. required: + macro [].[[]] or + macro [].[[]] + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> @@ -226,78 +210,70 @@ scala> scala> scala> class MacroBundleBBC(val c: blackbox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleBBC +class MacroBundleBBC scala> class MacroBundleWBC(val c: whitebox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleWBC +class MacroBundleWBC scala> class MacroBundleRBBC(val c: reflect.macros.blackbox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleRBBC +class MacroBundleRBBC scala> class MacroBundleRWBC(val c: reflect.macros.whitebox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleRWBC +class MacroBundleRWBC scala> class MacroBundleSRBBC(val c: scala.reflect.macros.blackbox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleSRBBC +class MacroBundleSRBBC scala> class MacroBundleSRWBC(val c: scala.reflect.macros.whitebox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleSRWBC +class MacroBundleSRWBC scala> class MacroBundleRSRBBC(val c: _root_.scala.reflect.macros.blackbox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleRSRBBC +class MacroBundleRSRBBC scala> class MacroBundleRSRWBC(val c: _root_.scala.reflect.macros.whitebox.Context) { def impl = c.universe.reify(()) } -defined class MacroBundleRSRWBC +class MacroBundleRSRWBC scala> scala> def bazBBC: Unit = macro MacroBundleBBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazBBC: Unit = macro MacroBundleBBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazWBC: Unit = macro MacroBundleWBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazWBC: Unit = macro MacroBundleWBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazRBBC: Unit = macro MacroBundleRBBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazRBBC: Unit = macro MacroBundleRBBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazRWBC: Unit = macro MacroBundleRWBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazRWBC: Unit = macro MacroBundleRWBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazSRBBC: Unit = macro MacroBundleSRBBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazSRBBC: Unit = macro MacroBundleSRBBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazSRWBC: Unit = macro MacroBundleSRWBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazSRWBC: Unit = macro MacroBundleSRWBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazRSRBBC: Unit = macro MacroBundleRSRBBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazRSRBBC: Unit = macro MacroBundleRSRBBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> def bazRSRWBC: Unit = macro MacroBundleRSRWBC.impl -:16: error: macro bundles must be static -note: macro definition is not supported in the REPL when using -Yrepl-classbased. - def bazRSRWBC: Unit = macro MacroBundleRSRWBC.impl ^ + error: macro bundles must be static + note: macro definition is not supported in the REPL when using -Yrepl-classbased, run :replay -Yrepl-class-based:false. scala> // diff --git a/test/files/run/repl-class-based-value-class.check b/test/files/run/repl-class-based-value-class.check index 8ceceb2ca94d..b21736e735af 100644 --- a/test/files/run/repl-class-based-value-class.check +++ b/test/files/run/repl-class-based-value-class.check @@ -1,20 +1,29 @@ scala> class Meter(val value: Int) extends AnyVal -defined class Meter +class Meter scala> val x = new Meter(1) -x: Meter = Meter@XXXXXXXX +val x: Meter = Meter@XXXXXXXX scala> object T { class Meter(val value: Int) extends AnyVal } -defined object T +object T scala> val y = new T.Meter(2) -y: T.Meter = T$Meter@XXXXXXXX +val y: T.Meter = T$Meter@XXXXXXXX scala> object S { object T { class Meter(val value: Int) extends AnyVal } } -defined object S +object S scala> val z = new S.T.Meter(3) -z: S.T.Meter = S$T$Meter@XXXXXXXX +val z: S.T.Meter = S$T$Meter@XXXXXXXX + +scala> case class Foo(x: Int) extends AnyVal +class Foo + +scala> val a = Foo(1) +val a: Foo = Foo(1) + +scala> val b = new Foo(1) +val b: Foo = Foo(1) scala> :quit diff --git a/test/files/run/repl-class-based-value-class.scala b/test/files/run/repl-class-based-value-class.scala index 8754ed0b63c6..0792f8d7335a 100644 --- a/test/files/run/repl-class-based-value-class.scala +++ b/test/files/run/repl-class-based-value-class.scala @@ -15,5 +15,8 @@ object Test extends ReplTest with Hashless { |val y = new T.Meter(2) |object S { object T { class Meter(val value: Int) extends AnyVal } } |val z = new S.T.Meter(3) + |case class Foo(x: Int) extends AnyVal + |val a = Foo(1) + |val b = new Foo(1) |""".stripMargin } diff --git a/test/files/run/repl-classbased.check b/test/files/run/repl-classbased.check index e11fc170e5b8..0b8f6d22fd5a 100644 --- a/test/files/run/repl-classbased.check +++ b/test/files/run/repl-classbased.check @@ -1,23 +1,23 @@ scala> case class K(s: String) -defined class K +class K scala> class C { implicit val k: K = K("OK?"); override def toString = s"C($k)" } -defined class C +class C scala> val c = new C -c: C = C(K(OK?)) +val c: C = C(K(OK?)) scala> import c.k import c.k scala> implicitly[K] -res0: K = K(OK?) +val res0: K = K(OK?) scala> val k = 42 -k: Int = 42 +val k: Int = 42 scala> k // was K(OK?) -res1: Int = 42 +val res1: Int = 42 scala> :quit diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 55bfec241250..f881b5ecaa07 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -1,14 +1,14 @@ scala> :type List[1, 2, 3] -:1: error: identifier expected but integer literal found. - List[1, 2, 3] - ^ + ^ + error: wrong number of type parameters for method apply: [A](elems: A*): List[A] in trait IterableFactory + scala> :type List(1, 2, 3) List[Int] scala> :type def foo[T](x: T) = List(x) -[T](x: T)List[T] +[T](x: T): List[T] scala> :type val bar = List(Set(1)) List[scala.collection.immutable.Set[Int]] @@ -17,10 +17,10 @@ scala> :type lazy val bar = Set(Set(1)) scala.collection.immutable.Set[scala.collection.immutable.Set[Int]] scala> :type def f[T >: Null, U <: String](x: T, y: U) = Set(x, y) -[T >: Null, U <: String](x: T, y: U)scala.collection.immutable.Set[Any] +[T >: Null, U <: String](x: T, y: U): scala.collection.immutable.Set[Any] scala> :type def x = 1 ; def bar[T >: Null <: AnyRef](xyz: T) = 5 -=> Int [T >: Null <: AnyRef](xyz: T)Int +Int [T >: Null <: AnyRef](xyz: T): Int scala> @@ -34,23 +34,27 @@ scala> :type lazy val f = 5 Int scala> :type protected lazy val f = 5 -:5: error: lazy value f cannot be accessed in object $iw - Access to protected lazy value f not permitted because - enclosing object $eval in package $line13 is not a subclass of - object $iw where target is defined - lazy val $result = f - ^ + + lazy val $result = $line16.$read.INSTANCE.$iw.f + ^ +:4: error: lazy value f cannot be accessed as a member of INSTANCE.$iw from object $eval in package + Access to protected lazy value f not permitted because + enclosing object $eval in package $line16 is not a subclass of + class $iw where target is defined + +(To diagnose errors in synthetic code, try adding `// show` to the end of your input.) + scala> :type def f = 5 -=> Int +Int scala> :type def f() = 5 -()Int +(): Int scala> scala> :type def g[T](xs: Set[_ <: T]) = Some(xs.head) -[T](xs: Set[_ <: T])Some[T] +[T](xs: Set[_ <: T]): Some[T] scala> @@ -75,7 +79,7 @@ TypeRef( ) TypeRef( TypeSymbol( - sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable + sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with LinearSeqOps[A,List,List[A]] with StrictOptimizedLinearSeqOps[A,List,List[A]] with StrictOptimizedSeqOps[A,List,List[A]] with IterableFactoryDefaults[A,List] with DefaultSerializable ) args = List( @@ -87,7 +91,7 @@ TypeRef( scala> :type -v def f[T >: Null, U <: String](x: T, y: U) = Set(x, y) // Type signature -[T >: Null, U <: String](x: T, y: U)scala.collection.immutable.Set[Any] +[T >: Null, U <: String](x: T, y: U): scala.collection.immutable.Set[Any] // Internal Type structure PolyType( @@ -96,7 +100,7 @@ PolyType( params = List(TermSymbol(x: T), TermSymbol(y: U)) resultType = TypeRef( TypeSymbol( - abstract trait Set[A] extends Iterable[A] with Set[A] with GenericSetTemplate[A,scala.collection.immutable.Set] with SetLike[A,scala.collection.immutable.Set[A]] with Parallelizable[A,scala.collection.parallel.immutable.ParSet[A]] + abstract trait Set[A] extends Iterable[A] with Set[A] with SetOps[A,scala.collection.immutable.Set,scala.collection.immutable.Set[A]] with IterableFactoryDefaults[A,scala.collection.immutable.Set] ) args = List(TypeRef(TypeSymbol(abstract class Any extends ))) @@ -106,7 +110,7 @@ PolyType( scala> :type -v def x = 1 ; def bar[T >: Null <: AnyRef](xyz: T) = 5 // Type signature -=> Int [T >: Null <: AnyRef](xyz: T)Int +Int [T >: Null <: AnyRef](xyz: T): Int // Internal Type structure OverloadedType( @@ -137,12 +141,13 @@ TypeRef( TypeRef(TypeSymbol(final abstract class Int extends AnyVal)) TypeRef( TypeSymbol( - abstract trait Iterator[+A] extends TraversableOnce[A] + abstract trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A,Iterator,Iterator[A]] + ) args = List( TypeRef( TypeSymbol( - sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable + sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with LinearSeqOps[A,List,List[A]] with StrictOptimizedLinearSeqOps[A,List,List[A]] with StrictOptimizedSeqOps[A,List,List[A]] with IterableFactoryDefaults[A,List] with DefaultSerializable ) args = List( @@ -158,7 +163,7 @@ TypeRef( scala> :type -v def f[T <: AnyVal] = List[T]().combinations _ // Type signature -[T <: AnyVal]=> Int => Iterator[List[T]] +[T <: AnyVal]Int => Iterator[List[T]] // Internal Type structure PolyType( @@ -170,12 +175,13 @@ PolyType( TypeRef(TypeSymbol(final abstract class Int extends AnyVal)) TypeRef( TypeSymbol( - abstract trait Iterator[+A] extends TraversableOnce[A] + abstract trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A,Iterator,Iterator[A]] + ) args = List( TypeRef( TypeSymbol( - sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable + sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with LinearSeqOps[A,List,List[A]] with StrictOptimizedLinearSeqOps[A,List,List[A]] with StrictOptimizedSeqOps[A,List,List[A]] with IterableFactoryDefaults[A,List] with DefaultSerializable ) args = List(TypeParamTypeRef(TypeParam(T <: AnyVal))) @@ -189,7 +195,7 @@ PolyType( scala> :type -v def f[T, U >: T](x: T, y: List[U]) = x :: y // Type signature -[T, U >: T](x: T, y: List[U])List[U] +[T, U >: T](x: T, y: List[U]): List[U] // Internal Type structure PolyType( @@ -198,7 +204,7 @@ PolyType( params = List(TermSymbol(x: T), TermSymbol(y: List[U])) resultType = TypeRef( TypeSymbol( - sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable + sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with LinearSeqOps[A,List,List[A]] with StrictOptimizedLinearSeqOps[A,List,List[A]] with StrictOptimizedSeqOps[A,List,List[A]] with IterableFactoryDefaults[A,List] with DefaultSerializable ) args = List(TypeParamTypeRef(TypeParam(U >: T))) diff --git a/test/files/run/repl-completions.check b/test/files/run/repl-completions.check index b86ac0312418..224c7b7e3155 100644 --- a/test/files/run/repl-completions.check +++ b/test/files/run/repl-completions.check @@ -2,13 +2,14 @@ scala> // completions! scala> object O { def x_y_x = 1; def x_y_z = 2; def getFooBarZot = 3} -defined object O +object O scala> :completions O.x [completions] O.x_y_x [completions] O.x_y_z scala> :completions O.x_y_x +[completions] O.x_y_x scala> :completions O.x_y_a @@ -21,15 +22,12 @@ scala> :completions x_y_ scala> :completions x_y_a -scala> :completions fBZ -[completions] getFooBarZot - scala> :completions object O2 { val x = O. [completions] object O2 { val x = O.getFooBarZot [completions] object O2 { val x = O.x_y_x [completions] object O2 { val x = O.x_y_z scala> :completions :completion -[completions] :completions +[completions] ::completions scala> :quit diff --git a/test/files/run/repl-completions.scala b/test/files/run/repl-completions.scala index 6217efb8e4a3..9e585f91255a 100644 --- a/test/files/run/repl-completions.scala +++ b/test/files/run/repl-completions.scala @@ -10,7 +10,6 @@ object Test extends ReplTest { |import O._ |:completions x_y_ |:completions x_y_a - |:completions fBZ |:completions object O2 { val x = O. |:completions :completion |""".stripMargin diff --git a/test/files/run/repl-deadlock.check b/test/files/run/repl-deadlock.check index 970b9121cd96..0e9643d1558d 100644 --- a/test/files/run/repl-deadlock.check +++ b/test/files/run/repl-deadlock.check @@ -11,6 +11,6 @@ import scala.concurrent.ExecutionContext.Implicits.global scala> scala> Await.result(Future(42), 1.second) // progression to not deadlocking -res0: Int = 42 +val res0: Int = 42 scala> :quit diff --git a/test/files/run/repl-deadlock.scala b/test/files/run/repl-deadlock.scala index 238af813c2ba..2a51da215067 100644 --- a/test/files/run/repl-deadlock.scala +++ b/test/files/run/repl-deadlock.scala @@ -1,12 +1,6 @@ -import scala.tools.nsc.Settings import scala.tools.partest.ReplTest object Test extends ReplTest { - override def transformSettings(s: Settings) = { - s.Yreplclassbased.value = true // TODO: drop when it's default true, to assert it works out the box - s - } - def code = """ |import scala.concurrent._ |import scala.concurrent.duration._ diff --git a/test/files/run/repl-errors.check b/test/files/run/repl-errors.check new file mode 100644 index 000000000000..12cc4c32e6db --- /dev/null +++ b/test/files/run/repl-errors.check @@ -0,0 +1,14 @@ + +scala> '\060' + ^ + error: octal escape literals are unsupported: use \u0030 instead + +scala> def foo() { } + ^ + warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `foo`'s return type [quickfixable] +def foo(): Unit + +scala> @annotation.nowarn def sshhh() { } +def sshhh(): Unit + +scala> :quit diff --git a/test/files/run/repl-errors.scala b/test/files/run/repl-errors.scala new file mode 100644 index 000000000000..cb7f2150465c --- /dev/null +++ b/test/files/run/repl-errors.scala @@ -0,0 +1,11 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-deprecation" + + def code = """ +'\060' +def foo() { } +@annotation.nowarn def sshhh() { } + """.trim +} diff --git a/test/files/run/repl-inline.check b/test/files/run/repl-inline.check index c6b363a86a74..1a57df043842 100644 --- a/test/files/run/repl-inline.check +++ b/test/files/run/repl-inline.check @@ -1,10 +1,10 @@ -callerOfCaller: String -g: String -h: String -g: String -h: String -callerOfCaller: String -g: String -h: String -g: String -h: String +def callerOfCaller: String +def g: String +def h: String +def g: String +def h: String +def callerOfCaller: String +def g: String +def h: String +def g: String +def h: String diff --git a/test/files/run/repl-inline.scala b/test/files/run/repl-inline.scala index 5c727bd841a0..6f499a81734e 100644 --- a/test/files/run/repl-inline.scala +++ b/test/files/run/repl-inline.scala @@ -1,24 +1,25 @@ import scala.tools.nsc._ +import scala.tools.nsc.interpreter.shell.ReplReporterImpl object Test { val testCode = """ def callerOfCaller = Thread.currentThread.getStackTrace.drop(2).head.getMethodName -def g = callerOfCaller -def h = g +@noinline def g = callerOfCaller +@noinline def h = g assert(h == "g", h) @inline def g = callerOfCaller -def h = g +@noinline def h = g assert(h == "h", h) """ - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { def test(f: Settings => Unit): Unit = { val settings = new Settings() - settings.processArgumentString("-opt:l:inline -opt-inline-from:** -opt-warnings") + settings.processArgumentString("-opt:inline:** -Wopt") f(settings) settings.usejavacp.value = true - val repl = new interpreter.IMain(settings) + val repl = new interpreter.IMain(settings, new ReplReporterImpl(settings)) testCode.linesIterator.foreach(repl.interpret(_)) } test(_ => ()) diff --git a/test/files/run/repl-kind.check b/test/files/run/repl-kind.check index 6f591eb86d58..099619c2fc21 100644 --- a/test/files/run/repl-kind.check +++ b/test/files/run/repl-kind.check @@ -13,23 +13,23 @@ Either's kind is F[+A1,+A2] * -(+)-> * -(+)-> * This is a type constructor: a 1st-order-kinded type. -scala> :k -v scala.collection.generic.ImmutableSortedMapFactory -scala.collection.generic.ImmutableSortedMapFactory's kind is X[CC[A,B] <: scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]]] -(* -> * -> *(scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]])) -> * +scala> :k -v scala.collection.SortedMapOps +scala.collection.SortedMapOps's kind is X[K,+V,+CC[X,Y] <: [X, Y]scala.collection.Map[X,Y] with scala.collection.SortedMapOps[X, Y, CC, _],+C <: scala.collection.SortedMapOps[K,V,CC,C]] +* -> * -(+)-> (* -> * -> *([X, Y]scala.collection.Map[X,Y] with scala.collection.SortedMapOps[X, Y, CC, _])) -(+)-> *(scala.collection.SortedMapOps[K,V,CC,C]) -(+)-> * This is a type constructor that takes type constructor(s): a higher-kinded type. -scala> :kind -v Predef.Pair -Pair's kind is F[+A1,+A2] +scala> :kind -v Tuple2 +Tuple2's kind is F[+A1,+A2] * -(+)-> * -(+)-> * This is a type constructor: a 1st-order-kinded type. -scala> :kind -v Predef.Pair[Int, Int] -Pair[Int,Int]'s kind is A +scala> :kind -v Tuple2[Int, Int] +(Int, Int)'s kind is A * This is a proper type. scala> trait Functor[F[_]] {} -defined trait Functor +trait Functor scala> :kind -v Functor Functor's kind is X[F[A]] @@ -37,7 +37,7 @@ Functor's kind is X[F[A]] This is a type constructor that takes type constructor(s): a higher-kinded type. scala> object Bar { class Bop } -defined object Bar +object Bar scala> import Bar.Bop import Bar.Bop @@ -46,7 +46,7 @@ scala> :kind Bop Bar.Bop's kind is A scala> type IntTuple[+A] = (Int, A) -defined type alias IntTuple +type IntTuple scala> :kind IntTuple IntTuple's kind is F[+A] @@ -55,16 +55,16 @@ scala> :kind ({type l[A] = Either[Int, A]})#l scala.util.Either[Int,?]'s kind is F[+A] scala> trait Nat[-F1[_], +F2[_]] {} -defined trait Nat +trait Nat scala> type ~>[-F1[_], +F2[_]] = Nat[F1, F2] -defined type alias $tilde$greater +type $tilde$greater scala> :kind ({type l[F[-_]] = F ~> List})#l Nat[?,[+A]List[A]]'s kind is X[-F[A]] scala> :kind 5 -: error: type 5 was not found +5's kind is A scala> :kind Nonexisting : error: type Nonexisting was not found diff --git a/test/files/run/repl-kind.scala b/test/files/run/repl-kind.scala index 7f7c2ef1f3e0..e83fa19b19fa 100644 --- a/test/files/run/repl-kind.scala +++ b/test/files/run/repl-kind.scala @@ -8,9 +8,9 @@ object Test extends ReplTest { :kind scala.Option :k (Int, Int) => Int :k -v Either -:k -v scala.collection.generic.ImmutableSortedMapFactory -:kind -v Predef.Pair -:kind -v Predef.Pair[Int, Int] +:k -v scala.collection.SortedMapOps +:kind -v Tuple2 +:kind -v Tuple2[Int, Int] trait Functor[F[_]] {} :kind -v Functor object Bar { class Bop } diff --git a/test/files/run/repl-no-imports-no-predef-classbased.check b/test/files/run/repl-no-imports-no-predef-classbased.check index a796600061c4..3646429d69b5 100644 --- a/test/files/run/repl-no-imports-no-predef-classbased.check +++ b/test/files/run/repl-no-imports-no-predef-classbased.check @@ -1,23 +1,23 @@ scala> case class K(s: java.lang.String) -defined class K +class K scala> class C { implicit val k: K = K("OK?"); override def toString = "C(" + k.toString + ")" } -defined class C +class C scala> val c = new C -c: C = C(K(OK?)) +val c: C = C(K(OK?)) scala> import c.k import c.k scala> scala.Predef.implicitly[K] -res0: K = K(OK?) +val res0: K = K(OK?) scala> val k = 42 -k: Int = 42 +val k: Int = 42 scala> k // was K(OK?) -res1: Int = 42 +val res1: Int = 42 scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef-power.check b/test/files/run/repl-no-imports-no-predef-power.check index a1b8060ff212..52140e1b5c2c 100644 --- a/test/files/run/repl-no-imports-no-predef-power.check +++ b/test/files/run/repl-no-imports-no-predef-power.check @@ -7,23 +7,22 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: one deprecation (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation' -res0: $r.global.noSelfType.type = private val _ = _ +warning: 1 deprecation (since 2.11.0); for details, enable `:setting -deprecation` or `:replay -deprecation` +val res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -tp: $r.global.Type = Array[scala.util.Random] +val tp: $r.global.Type = Array[scala.util.Random] scala> tp.memberType(Array_apply) // evidence -res1: $r.global.Type = (i: Int)scala.util.Random +val res1: $r.global.Type = (i: Int): scala.util.Random scala> val m = LIT(10) // treedsl -m: $r.treedsl.global.Literal = 10 +val m: $r.treedsl.global.Literal = 10 scala> typed(m).tpe // typed is in scope -res2: $r.treedsl.global.Type = Int(10) +val res2: $r.treedsl.global.Type = Int(10) scala> """escaping is hard, m'kah""" -res3: String = escaping is hard, m'kah +val res3: String = escaping is hard, m'kah scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check index c3dc93541b8d..d732a8e5e654 100644 --- a/test/files/run/repl-no-imports-no-predef.check +++ b/test/files/run/repl-no-imports-no-predef.check @@ -1,208 +1,185 @@ scala> 1 -res0: Int = 1 +val res0: Int = 1 scala> 1.0 -res1: Double = 1.0 +val res1: Double = 1.0 scala> () scala> "abc" -res3: String = abc +val res3: String = abc scala> (1, 2) -res4: (Int, Int) = (1,2) +val res4: (Int, Int) = (1,2) scala> scala> { import scala.Predef.ArrowAssoc; 1 -> 2 } -res5: (Int, Int) = (1,2) - -scala> { import scala.Predef.ArrowAssoc; 1 → 2 } -res6: (Int, Int) = (1,2) +val res5: (Int, Int) = (1,2) scala> 1 -> 2 -:12: error: value -> is not a member of Int - 1 -> 2 - ^ - -scala> 1 → 2 -:12: error: value → is not a member of Int - 1 → 2 ^ + error: value -> is not a member of Int + did you mean >>? scala> scala> val answer = 42 -answer: Int = 42 +val answer: Int = 42 scala> { import scala.StringContext; s"answer: $answer" } -res9: String = answer: 42 +val res7: String = answer: 42 scala> s"answer: $answer" -:13: error: not found: value StringContext - s"answer: $answer" ^ + error: not found: value StringContext scala> scala> "abc" + true -res11: String = abctrue +val res9: String = abctrue scala> -scala> { import scala.Predef.any2stringadd; true + "abc" } -res12: String = trueabc - scala> true + "abc" -:12: error: value + is not a member of Boolean - true + "abc" ^ + error: value + is not a member of Boolean scala> scala> var x = 10 -x: Int = 10 +var x: Int = 10 scala> var y = 11 -y: Int = 11 +var y: Int = 11 scala> x = 12 -x: Int = 12 +// mutated x scala> y = 13 -y: Int = 13 +// mutated y scala> scala> 2 ; 3 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 2 ;; ^ -res14: Int = 3 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res11: Int = 3 scala> { 2 ; 3 } -:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses - { 2 ; 3 } ^ -res15: Int = 3 + warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses +val res12: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { 1 + 2 + 3 } ; bippy+88+11 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -defined object Cow -defined class Moo -bippy: Int -res16: Int = 105 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +object Cow +class Moo +def bippy: Int +val res13: Int = 105 scala> scala> object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy -defined object Bovine -defined class Ruminant -res17: Int = 216 +object Bovine +class Ruminant +val res14: Int = 216 scala> Bovine.x = scala.List(Ruminant(5), Cow, new Moo) -Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo) +// mutated Bovine.x scala> Bovine.x -res18: List[Any] = List(Ruminant(5), Cow, Moooooo) +val res15: List[Any] = List(Ruminant(5), Cow, Moooooo) scala> scala> (2) -res19: Int = 2 +val res16: Int = 2 scala> (2 + 2) -res20: Int = 4 +val res17: Int = 4 scala> ((2 + 2)) -res21: Int = 4 +val res18: Int = 4 scala> ((2 + 2)) -res22: Int = 4 +val res19: Int = 4 scala> ( (2 + 2)) -res23: Int = 4 +val res20: Int = 4 scala> ( (2 + 2 ) ) -res24: Int = 4 +val res21: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; ( (2 + 2 ) ) ;; ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; ( (2 + 2 ) ) ;; + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -res25: Int = 5 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res22: Int = 5 scala> (((2 + 2)), ((2 + 2))) -res26: (Int, Int) = (4,4) +val res23: (Int, Int) = (4,4) scala> (((2 + 2)), ((2 + 2)), 2) -res27: (Int, Int, Int) = (4,4,2) +val res24: (Int, Int, Int) = (4,4,2) scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString) -res28: String = 4423 +val res25: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; ((2 + 2)) ;; ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; ((2 + 2)) ;; + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -res29: (Int, Int, Int) = (1,2,3) + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res26: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: scala.Int) => x + 1 ; () => ((5)) -:12: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; (x: scala.Int) => x + 1 ;; ^ -res30: () => Int = + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + ^ + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res27: () => Int = scala> scala> () => 5 -res31: () => Int = +val res28: () => Int = scala> 55 ; () => 5 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ;; ^ -res32: () => Int = + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res29: () => Int = scala> () => { class X ; new X } -res33: () => AnyRef = +val res30: () => AnyRef = scala> scala> def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z -foo: (x: Int)(y: Int)(z: Int)Int +def foo(x: Int)(y: Int)(z: Int): Int scala> foo(5)(10)(15)+foo(5)(10)(15) -res34: Int = 60 +val res31: Int = 60 scala> scala> scala.List(1) ++ scala.List('a') -res35: List[AnyVal] = List(1, a) +val res32: List[AnyVal] = List(1, a) scala> @@ -211,26 +188,22 @@ scala> :paste < EOF class C { def c = 42 } EOF - -// Exiting paste mode, now interpreting. - -defined class C +// Exiting paste mode... now interpreting. +class C scala> new C().c -res36: Int = 42 +val res33: Int = 42 scala> :paste <| EOF // Entering paste mode (EOF to finish) class D { def d = 42 } EOF - -// Exiting paste mode, now interpreting. - -defined class D +// Exiting paste mode... now interpreting. +class D scala> new D().d -res37: Int = 42 +val res34: Int = 42 scala> @@ -247,34 +220,32 @@ object Dingus private val x = 55 } EOF - -// Exiting paste mode, now interpreting. - -defined class Dingus -defined object Dingus +// Exiting paste mode... now interpreting. +class Dingus +object Dingus scala> val x = (new Dingus).y -x: Int = 110 +val x: Int = 110 scala> scala> val x1 = 1 -x1: Int = 1 +val x1: Int = 1 scala> val x2 = 2 -x2: Int = 2 +val x2: Int = 2 scala> val x3 = 3 -x3: Int = 3 +val x3: Int = 3 scala> case class BippyBungus() -defined class BippyBungus +class BippyBungus scala> x1 + x2 + x3 -res38: Int = 6 +val res35: Int = 6 scala> :reset -Resetting interpreter state. +Resetting REPL state. Forgetting this session history: 1 @@ -283,11 +254,9 @@ Forgetting this session history: "abc" (1, 2) { import scala.Predef.ArrowAssoc; 1 -> 2 } -{ import scala.Predef.ArrowAssoc; 1 → 2 } val answer = 42 { import scala.StringContext; s"answer: $answer" } "abc" + true -{ import scala.Predef.any2stringadd; true + "abc" } var x = 10 var y = 11 x = 12 @@ -333,28 +302,24 @@ Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, C Forgetting defined types: BippyBungus, C, D, Dingus, Moo, Ruminant scala> x1 + x2 + x3 -:12: error: not found: value x1 - x1 + x2 + x3 ^ -:12: error: not found: value x2 - x1 + x2 + x3 + error: not found: value x1 ^ -:12: error: not found: value x3 - x1 + x2 + x3 + error: not found: value x2 ^ + error: not found: value x3 scala> val x1 = 4 -x1: Int = 4 +val x1: Int = 4 scala> new BippyBungus -:12: error: not found: type BippyBungus - new BippyBungus ^ + error: not found: type BippyBungus scala> class BippyBungus() { def f = 5 } -defined class BippyBungus +class BippyBungus scala> { new BippyBungus ; x1 } -res2: Int = 4 +val res2: Int = 4 scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef.scala b/test/files/run/repl-no-imports-no-predef.scala index 71504f277ee1..94c120f4b038 100644 --- a/test/files/run/repl-no-imports-no-predef.scala +++ b/test/files/run/repl-no-imports-no-predef.scala @@ -16,9 +16,7 @@ object Test extends ReplTest with Lambdaless { (1, 2) { import scala.Predef.ArrowAssoc; 1 -> 2 } -{ import scala.Predef.ArrowAssoc; 1 → 2 } 1 -> 2 -1 → 2 val answer = 42 { import scala.StringContext; s"answer: $answer" } @@ -26,7 +24,6 @@ s"answer: $answer" "abc" + true -{ import scala.Predef.any2stringadd; true + "abc" } true + "abc" var x = 10 diff --git a/test/files/run/repl-no-uescape.check b/test/files/run/repl-no-uescape.check index 01eeafaa701f..2525a74abf2f 100644 --- a/test/files/run/repl-no-uescape.check +++ b/test/files/run/repl-no-uescape.check @@ -1,5 +1,5 @@ scala> object A -defined object A +object A scala> :quit diff --git a/test/files/run/repl-no-uescape.scala b/test/files/run/repl-no-uescape.scala index 1865109ebf09..05a9f7bdcc1e 100644 --- a/test/files/run/repl-no-uescape.scala +++ b/test/files/run/repl-no-uescape.scala @@ -1,5 +1,4 @@ import scala.tools.partest.ReplTest -import scala.tools.nsc.Settings /* scala> object A @@ -21,10 +20,6 @@ scala> 42 + "\u001B[1m\u001B[34mres0\u001B[0m: \u001B[1m\u001B[32mInt\u001B[0m = " + scala.runtime.ScalaRunTime.replStringOf(res0, 1000) */ object Test extends ReplTest { - override def transformSettings(settings: Settings): Settings = { - settings.nouescape.value = true - settings - } def code = """ object A """ diff --git a/test/files/run/repl-out-dir.check b/test/files/run/repl-out-dir.check index 6fd85f5bbab2..2dcc617958b0 100644 --- a/test/files/run/repl-out-dir.check +++ b/test/files/run/repl-out-dir.check @@ -1,11 +1,13 @@ scala> case class Bippy(x: Int) -defined class Bippy +class Bippy scala> val x = Bippy(1) -x: Bippy = Bippy(1) +val x: Bippy = Bippy(1) -scala> $intp.showDirectory +scala> $intp.reporter.withoutUnwrapping { + println($intp.showDirectory) +} repl-out-dir-run.obj $line1 $eval$.class @@ -13,35 +15,32 @@ repl-out-dir-run.obj $line2 $eval$.class $eval.class - $read$$iw$$iw$.class - $read$$iw$.class + $read$$iw.class $read$.class $read.class $line3 $eval$.class $eval.class - $read$$iw$$iw$.class - $read$$iw$$iw$Bippy$.class - $read$$iw$$iw$Bippy.class - $read$$iw$.class + $read$$iw$Bippy$.class + $read$$iw$Bippy.class + $read$$iw.class $read$.class $read.class $line4 $eval$.class $eval.class - $read$$iw$$iw$.class - $read$$iw$.class + $read$$iw.class $read$.class $read.class $line5 $eval$.class $eval.class - $read$$iw$$iw$.class - $read$$iw$.class + $read$$iw.class $read$.class $read.class $repl_$init.class Test$.class Test.class + scala> :quit diff --git a/test/files/run/repl-out-dir.scala b/test/files/run/repl-out-dir.scala index 33c823aa2d78..8d94b8ba5920 100644 --- a/test/files/run/repl-out-dir.scala +++ b/test/files/run/repl-out-dir.scala @@ -1,13 +1,14 @@ import scala.tools.partest.ReplTest -import scala.tools.nsc.Settings object Test extends ReplTest { override def extraSettings = s"-Yrepl-outdir ${testOutput.path}" - def code = s""" + def code = """ case class Bippy(x: Int) val x = Bippy(1) -$$intp.showDirectory +$intp.reporter.withoutUnwrapping { + println($intp.showDirectory) +} """ } diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index b9871cfba17f..7b394a389867 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -1,82 +1,78 @@ scala> (2) -res0: Int = 2 +val res0: Int = 2 scala> (2 + 2) -res1: Int = 4 +val res1: Int = 4 scala> ((2 + 2)) -res2: Int = 4 +val res2: Int = 4 scala> ((2 + 2)) -res3: Int = 4 +val res3: Int = 4 scala> ( (2 + 2)) -res4: Int = 4 +val res4: Int = 4 scala> ( (2 + 2 ) ) -res5: Int = 4 +val res5: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; ( (2 + 2 ) ) ;; ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 5 ; ( (2 + 2 ) ) ;; + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -res6: Int = 5 + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res6: Int = 5 scala> (((2 + 2)), ((2 + 2))) -res7: (Int, Int) = (4,4) +val res7: (Int, Int) = (4,4) scala> (((2 + 2)), ((2 + 2)), 2) -res8: (Int, Int, Int) = (4,4,2) +val res8: (Int, Int, Int) = (4,4,2) scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString) -res9: String = 4423 +val res9: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; ((2 + 2)) ;; ^ -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; ((2 + 2)) ;; + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses ^ -res10: (Int, Int, Int) = (1,2,3) + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res10: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ; (x: Int) => x + 1 ;; ^ -res11: () => Int = + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + ^ + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res11: () => Int = scala> scala> () => 5 -res12: () => Int = +val res12: () => Int = scala> 55 ; () => 5 -:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 55 ;; ^ -res13: () => Int = + warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses +val res13: () => Int = scala> () => { class X ; new X } -res14: () => AnyRef = +val res14: () => AnyRef = scala> scala> def foo(x: Int)(y: Int)(z: Int) = x+y+z -foo: (x: Int)(y: Int)(z: Int)Int +def foo(x: Int)(y: Int)(z: Int): Int scala> foo(5)(10)(15)+foo(5)(10)(15) -res15: Int = 60 +val res15: Int = 60 scala> scala> List(1) ++ List('a') -res16: List[AnyVal] = List(1, a) +val res16: List[AnyVal] = List(1, a) scala> :quit diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check index 4c589df41a58..c95a0249fd29 100644 --- a/test/files/run/repl-paste-2.check +++ b/test/files/run/repl-paste-2.check @@ -30,30 +30,30 @@ res10: Int = 12 // Replaying 8 commands from transcript. scala> 999l -res0: Long = 999 +warning: 1 deprecation (since 2.13.0); for details, enable `:setting -deprecation` or `:replay -deprecation` +val res0: Long = 999 scala> val res5 = { 123 } -res5: Int = 123 +val res5: Int = 123 scala> val res6 = { 567 } -res6: Int = 567 +val res6: Int = 567 scala> res5 + res6 -res1: Int = 690 +val res1: Int = 690 scala> val x = dingus -:11: error: not found: value dingus - val x = dingus ^ + error: not found: value dingus scala> val x = "dingus" -x: String = dingus +val x: String = dingus scala> x.length -res2: Int = 6 +val res2: Int = 6 scala> x.length + res5 -res3: Int = 129 +val res3: Int = 129 scala> :quit diff --git a/test/files/run/repl-paste-2.scala b/test/files/run/repl-paste-2.scala index 65f9b25175a2..9a8f07ee2b39 100644 --- a/test/files/run/repl-paste-2.scala +++ b/test/files/run/repl-paste-2.scala @@ -28,4 +28,4 @@ res9: Int = 6 scala> x.length + res5 res10: Int = 12 """ -} \ No newline at end of file +} diff --git a/test/files/run/repl-paste-3.check b/test/files/run/repl-paste-3.check index 603fcfa1740a..6b9a381b2648 100644 --- a/test/files/run/repl-paste-3.check +++ b/test/files/run/repl-paste-3.check @@ -3,6 +3,6 @@ scala> println(3) 3 scala> List(1,2) -res1: List[Int] = List(1, 2) +val res1: List[Int] = List(1, 2) scala> :quit diff --git a/test/files/run/repl-paste-3.scala b/test/files/run/repl-paste-3.scala index 3f26799ccb39..b9b3db5b63f9 100644 --- a/test/files/run/repl-paste-3.scala +++ b/test/files/run/repl-paste-3.scala @@ -5,4 +5,4 @@ object Test extends ReplTest { println(3) List(1,2) """ -} \ No newline at end of file +} diff --git a/test/files/run/repl-paste-4.scala b/test/files/run/repl-paste-4.scala index 4f8d1d208bc7..0e6941274179 100644 --- a/test/files/run/repl-paste-4.scala +++ b/test/files/run/repl-paste-4.scala @@ -6,11 +6,12 @@ object Test extends SessionTest { s"""| |scala> :paste $pastie |Pasting file $pastie... - |defined class Foo - |defined object Foo + |// Exiting paste mode... now interpreting. + |class Foo + |object Foo | |scala> Foo(new Foo) - |res0: Int = 7 + |val res0: Int = 7 | |scala> :quit""" def pastie = testPath changeExtension "pastie" diff --git a/test/files/run/repl-paste-5.check b/test/files/run/repl-paste-5.check index 8b97b8888d7f..4459c72baedf 100644 --- a/test/files/run/repl-paste-5.check +++ b/test/files/run/repl-paste-5.check @@ -4,25 +4,21 @@ scala> :paste < EOF class C { def c = 42 } EOF - -// Exiting paste mode, now interpreting. - -defined class C +// Exiting paste mode... now interpreting. +class C scala> new C().c -res0: Int = 42 +val res0: Int = 42 scala> :paste <| EOF // Entering paste mode (EOF to finish) |class D { def d = 42 } EOF - -// Exiting paste mode, now interpreting. - -defined class D +// Exiting paste mode... now interpreting. +class D scala> new D().d -res1: Int = 42 +val res1: Int = 42 scala> :quit diff --git a/test/files/run/repl-paste-6.check b/test/files/run/repl-paste-6.check old mode 100755 new mode 100644 index efcea9274fa1..5033f7e55b9a --- a/test/files/run/repl-paste-6.check +++ b/test/files/run/repl-paste-6.check @@ -5,13 +5,11 @@ scala> :paste < EOF case class C(i: Int) val c = C(42) EOF - -// Exiting paste mode, now interpreting. - -defined class C -c: C = C(42) +// Exiting paste mode... now interpreting. +class C +val c: C = C(42) scala> val d: C = c // shew -d: C = C(42) +val d: C = C(42) scala> :quit diff --git a/test/files/run/repl-paste-b.check b/test/files/run/repl-paste-b.check index 2e205d48d65f..cc3a3db405c8 100644 --- a/test/files/run/repl-paste-b.check +++ b/test/files/run/repl-paste-b.check @@ -4,10 +4,8 @@ scala> :paste < EOF object X EOF - -// Exiting paste mode, now interpreting. - -defined object X +// Exiting paste mode... now interpreting. +object X scala> assert(X.getClass.getName.contains("line")) diff --git a/test/files/run/repl-paste-error.check b/test/files/run/repl-paste-error.check new file mode 100644 index 000000000000..265b69e83d5d --- /dev/null +++ b/test/files/run/repl-paste-error.check @@ -0,0 +1,21 @@ + +scala> :paste < END +// Entering paste mode (END to finish) + +def f(): Unit = { + "hello" + 42 +} +END +// Exiting paste mode... now interpreting. + + "hello" + ^ +:2: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + + 42 + ^ +:3: warning: discarded pure expression does nothing +def f(): Unit + +scala> :quit diff --git a/test/files/run/repl-paste-error.scala b/test/files/run/repl-paste-error.scala new file mode 100644 index 000000000000..0b557d5c438f --- /dev/null +++ b/test/files/run/repl-paste-error.scala @@ -0,0 +1,14 @@ + +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + + def code = """ + |:paste < END + |def f(): Unit = { + | "hello" + | 42 + |} + |END + """.stripMargin.trim +} diff --git a/test/files/run/repl-paste-parse.check b/test/files/run/repl-paste-parse.check deleted file mode 100755 index e7871c1dc755..000000000000 --- a/test/files/run/repl-paste-parse.check +++ /dev/null @@ -1,7 +0,0 @@ - -scala> - -scala> repl-paste-parse.script:1: error: illegal start of simple pattern -val case = 9 - ^ -:quit diff --git a/test/files/run/repl-paste-parse.scala b/test/files/run/repl-paste-parse.scala deleted file mode 100644 index 9f10c9abe68b..000000000000 --- a/test/files/run/repl-paste-parse.scala +++ /dev/null @@ -1,19 +0,0 @@ -import scala.tools.partest.ReplTest -import scala.tools.nsc.Settings -import scala.tools.nsc.GenericRunnerSettings -import scala.tools.nsc.settings.MutableSettings - -object Test extends ReplTest { - def scriptPath = testPath.changeExtension("script") - override def transformSettings(s: Settings) = s match { - case m: MutableSettings => - val t = new GenericRunnerSettings(s.errorFn) - m copyInto t - t processArgumentString s"-usejavacp -i $scriptPath" - t - case _ => s - } - - def code = "" -} - diff --git a/test/files/run/repl-paste-parse.script b/test/files/run/repl-paste-parse.script deleted file mode 100644 index 903f6e7b0c07..000000000000 --- a/test/files/run/repl-paste-parse.script +++ /dev/null @@ -1 +0,0 @@ -val case = 9 diff --git a/test/files/run/repl-paste-raw-b.scala b/test/files/run/repl-paste-raw-b.scala index fbbfb06e4248..60c5c2ea0bc0 100644 --- a/test/files/run/repl-paste-raw-b.scala +++ b/test/files/run/repl-paste-raw-b.scala @@ -6,12 +6,13 @@ object Test extends SessionTest { s"""| |scala> :paste $pastie |Pasting file $pastie... + |// Exiting paste mode... now compiling with scalac. | |scala> val favoriteThing = brown_paper.Gift(true) - |favoriteThing: brown_paper.Gift = Gift(true) + |val favoriteThing: brown_paper.Gift = Gift(true) | |scala> favoriteThing.hasString - |res0: Boolean = true + |val res0: Boolean = true | |scala> :quit""" def pastie = testPath changeExtension "pastie" diff --git a/test/files/run/repl-paste-raw-c.scala b/test/files/run/repl-paste-raw-c.scala index b4a1505d971c..d0859ae26644 100644 --- a/test/files/run/repl-paste-raw-c.scala +++ b/test/files/run/repl-paste-raw-c.scala @@ -6,9 +6,11 @@ object Test extends SessionTest { s"""| |scala> :paste -raw $pastie |Pasting file $pastie... - |$pastie:3: error: expected class or object definition + |// Exiting paste mode... now compiling with scalac. + | |val nope = 42 |^ + |$pastie:3: error: expected class or object definition |There were compilation errors! | |scala> :quit""" diff --git a/test/files/run/repl-paste-raw.scala b/test/files/run/repl-paste-raw.scala index fbbfb06e4248..60c5c2ea0bc0 100644 --- a/test/files/run/repl-paste-raw.scala +++ b/test/files/run/repl-paste-raw.scala @@ -6,12 +6,13 @@ object Test extends SessionTest { s"""| |scala> :paste $pastie |Pasting file $pastie... + |// Exiting paste mode... now compiling with scalac. | |scala> val favoriteThing = brown_paper.Gift(true) - |favoriteThing: brown_paper.Gift = Gift(true) + |val favoriteThing: brown_paper.Gift = Gift(true) | |scala> favoriteThing.hasString - |res0: Boolean = true + |val res0: Boolean = true | |scala> :quit""" def pastie = testPath changeExtension "pastie" diff --git a/test/files/run/repl-paste.check b/test/files/run/repl-paste.check index 3bcfd33c9430..a70bd90f2a15 100644 --- a/test/files/run/repl-paste.check +++ b/test/files/run/repl-paste.check @@ -14,11 +14,9 @@ object Dingus } val x = (new Dingus).y - -// Exiting paste mode, now interpreting. - -defined class Dingus -defined object Dingus -x: Int = 110 +// Exiting paste mode... now interpreting. +class Dingus +object Dingus +val x: Int = 110 scala> :quit diff --git a/test/files/run/repl-paste.scala b/test/files/run/repl-paste.scala index 5495505353b9..378a927a3a9a 100644 --- a/test/files/run/repl-paste.scala +++ b/test/files/run/repl-paste.scala @@ -16,4 +16,4 @@ object Dingus val x = (new Dingus).y """ ) -} \ No newline at end of file +} diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check index a1b8060ff212..52140e1b5c2c 100644 --- a/test/files/run/repl-power.check +++ b/test/files/run/repl-power.check @@ -7,23 +7,22 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: one deprecation (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation' -res0: $r.global.noSelfType.type = private val _ = _ +warning: 1 deprecation (since 2.11.0); for details, enable `:setting -deprecation` or `:replay -deprecation` +val res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags -warning: one feature warning; for details, enable `:setting -feature' or `:replay -feature' -tp: $r.global.Type = Array[scala.util.Random] +val tp: $r.global.Type = Array[scala.util.Random] scala> tp.memberType(Array_apply) // evidence -res1: $r.global.Type = (i: Int)scala.util.Random +val res1: $r.global.Type = (i: Int): scala.util.Random scala> val m = LIT(10) // treedsl -m: $r.treedsl.global.Literal = 10 +val m: $r.treedsl.global.Literal = 10 scala> typed(m).tpe // typed is in scope -res2: $r.treedsl.global.Type = Int(10) +val res2: $r.treedsl.global.Type = Int(10) scala> """escaping is hard, m'kah""" -res3: String = escaping is hard, m'kah +val res3: String = escaping is hard, m'kah scala> :quit diff --git a/test/files/run/repl-previous-result.check b/test/files/run/repl-previous-result.check new file mode 100644 index 000000000000..31cb8461c3cb --- /dev/null +++ b/test/files/run/repl-previous-result.check @@ -0,0 +1,8 @@ + +scala> "foobar" +val res0: String = foobar + +scala> .size +val res1: Int = 6 + +scala> :quit diff --git a/test/files/run/repl-previous-result.scala b/test/files/run/repl-previous-result.scala new file mode 100644 index 000000000000..4caacac332e8 --- /dev/null +++ b/test/files/run/repl-previous-result.scala @@ -0,0 +1,8 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def code = """ + |"foobar" + |.size + """.stripMargin.trim +} diff --git a/test/files/run/repl-release.check b/test/files/run/repl-release.check new file mode 100644 index 000000000000..4b245b2dd5da --- /dev/null +++ b/test/files/run/repl-release.check @@ -0,0 +1,42 @@ + +scala> def callerOfCaller = Thread.currentThread.getStackTrace.drop(2).head.getMethodName +def callerOfCaller: String + +scala> @noinline def g = callerOfCaller +def g: String + +scala> @noinline def h = g +def h: String + +scala> assert(h == "g", h) + +scala> @inline def g = callerOfCaller +def g: String + +scala> @noinline def h = g +def h: String + +scala> assert(h == "h", h) + +scala> :quit + +scala> def callerOfCaller = Thread.currentThread.getStackTrace.drop(2).head.getMethodName +def callerOfCaller: String + +scala> @noinline def g = callerOfCaller +def g: String + +scala> @noinline def h = g +def h: String + +scala> assert(h == "g", h) + +scala> @inline def g = callerOfCaller +def g: String + +scala> @noinline def h = g +def h: String + +scala> assert(h == "h", h) + +scala> :quit diff --git a/test/files/run/repl-release.scala b/test/files/run/repl-release.scala new file mode 100644 index 000000000000..82ab1807678e --- /dev/null +++ b/test/files/run/repl-release.scala @@ -0,0 +1,33 @@ +import scala.tools.partest.ReplTest +import scala.tools.nsc._ +import scala.tools.nsc.Settings +import scala.tools.nsc.interpreter.shell.ReplReporterImpl + +// cf run/repl-inline.scala +object Test extends ReplTest { + + var count = 0 + + override def transformSettings(s: Settings) = { + s.processArguments("-release:8" :: "-opt:inline:**" :: "-Wopt" :: Nil, processAll = true) + s.Yreplclassbased.value = count > 0 + count += 1 + s + } + + override def code = + """ +def callerOfCaller = Thread.currentThread.getStackTrace.drop(2).head.getMethodName +@noinline def g = callerOfCaller +@noinline def h = g +assert(h == "g", h) +@inline def g = callerOfCaller +@noinline def h = g +assert(h == "h", h) + """ + + override def show() = { + super.show() + super.show() + } +} diff --git a/test/files/run/repl-replay.check b/test/files/run/repl-replay.check new file mode 100644 index 000000000000..e8059184c308 --- /dev/null +++ b/test/files/run/repl-replay.check @@ -0,0 +1,14 @@ + +scala> locally { val x = 42 ; "$x" } +val res0: String = $x + +scala> :replay -Xlint +replay> locally { val x = 42 ; "$x" } + ^ + warning: possible missing interpolator: detected interpolated identifier `$x` + ^ + warning: local val x in value res0 is never used +val res0: String = $x + + +scala> :quit diff --git a/test/files/run/repl-replay.scala b/test/files/run/repl-replay.scala new file mode 100644 index 000000000000..9b2a1d635101 --- /dev/null +++ b/test/files/run/repl-replay.scala @@ -0,0 +1,9 @@ + +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def code = """ + |locally { val x = 42 ; "$x" } + |:replay -Xlint + """.stripMargin.trim +} diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check index cf4d9a149e29..d9541c01cc01 100644 --- a/test/files/run/repl-reset.check +++ b/test/files/run/repl-reset.check @@ -1,21 +1,21 @@ scala> val x1 = 1 -x1: Int = 1 +val x1: Int = 1 scala> val x2 = 2 -x2: Int = 2 +val x2: Int = 2 scala> val x3 = 3 -x3: Int = 3 +val x3: Int = 3 scala> case class BippyBungus() -defined class BippyBungus +class BippyBungus scala> x1 + x2 + x3 -res0: Int = 6 +val res0: Int = 6 scala> :reset -Resetting interpreter state. +Resetting REPL state. Forgetting this session history: val x1 = 1 @@ -28,28 +28,24 @@ Forgetting all expression results and named terms: $intp, BippyBungus, x1, x2, x Forgetting defined types: BippyBungus scala> x1 + x2 + x3 -:12: error: not found: value x1 - x1 + x2 + x3 ^ -:12: error: not found: value x2 - x1 + x2 + x3 + error: not found: value x1 ^ -:12: error: not found: value x3 - x1 + x2 + x3 + error: not found: value x2 ^ + error: not found: value x3 scala> val x1 = 4 -x1: Int = 4 +val x1: Int = 4 scala> new BippyBungus -:12: error: not found: type BippyBungus - new BippyBungus ^ + error: not found: type BippyBungus scala> class BippyBungus() { def f = 5 } -defined class BippyBungus +class BippyBungus scala> { new BippyBungus ; x1 } -res2: Int = 4 +val res2: Int = 4 scala> :quit diff --git a/test/files/run/repl-save.scala b/test/files/run/repl-save.scala index 25a8de6bde00..42074d2f3401 100644 --- a/test/files/run/repl-save.scala +++ b/test/files/run/repl-save.scala @@ -4,13 +4,13 @@ object Test extends SessionTest { override def session = s"""| |scala> val i = 7 - |i: Int = 7 + |val i: Int = 7 | |scala> val j = 8 - |j: Int = 8 + |val j: Int = 8 | |scala> i * j - |res0: Int = 56 + |val res0: Int = 56 | |scala> :save $saveto | @@ -19,6 +19,6 @@ s"""| override def stripMargins: Boolean = true override def show() = { checkSession() - Console print saveto.toFile.slurp + Console print saveto.toFile.slurp() } } diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check index ecabc85e02b6..c2b1f539be11 100644 --- a/test/files/run/repl-serialization.check +++ b/test/files/run/repl-serialization.check @@ -1,23 +1,23 @@ == evaluating lines -extract: AnyRef => Unit = +val extract: AnyRef => Unit = evaluating x -x: Int = 0 -getX: ()Int -defined class U -y: Int = +val x: Int = 0 +def getX(): Int +class U +lazy val y: Int // unevaluated evaluating z evaluating zz -defined class D -z: Int = 0 -zz: Int = 0 -defined object O -defined class TestClass -t: TestClass = TestClass +class D +val z: Int = 0 +val zz: Int = 0 +object O +class TestClass +val t: TestClass = TestClass import t._ -defined class A -defined type alias AA +class A +type AA constructing U -u: U = U +val u: U = U == evaluating lambda evaluating y evaluating O diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala index d439c92e8ea3..9312ce90400b 100644 --- a/test/files/run/repl-serialization.scala +++ b/test/files/run/repl-serialization.scala @@ -3,11 +3,11 @@ import java.io._ import scala.reflect.io.AbstractFile import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.IMain -import scala.tools.nsc.util._ import scala.reflect.internal.util.AbstractFileClassLoader +import scala.tools.nsc.interpreter.shell.ReplReporterImpl object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { run() } @@ -40,7 +40,7 @@ object Test { |extract(() => new AA(x + getX() + y + z + zz + O.apply + u.x)) """.stripMargin - imain = IMain(settings) + imain = new IMain(settings, new ReplReporterImpl(settings)) println("== evaluating lines") imain.directBind("extract", "(AnyRef => Unit)", extract) code.linesIterator.foreach(imain.interpret) diff --git a/test/files/run/repl-suspended-warnings.check b/test/files/run/repl-suspended-warnings.check new file mode 100644 index 000000000000..e4b913831cac --- /dev/null +++ b/test/files/run/repl-suspended-warnings.check @@ -0,0 +1,23 @@ + +scala> @annotation.nowarn def f { } +def f: Unit + +scala> def f { } + ^ + error: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `f`'s return type [quickfixable] + Applicable -Wconf / @nowarn filters for this fatal warning: msg=, cat=deprecation, version=2.13.0 + +scala> @annotation.nowarn def f { } +def f: Unit + +scala> class C { def match = 42 } + ^ + error: identifier expected but 'match' found. + +scala> class C { def `match` = 42 } +class C + +scala> class C { def `match` = 42 } +class C + +scala> :quit diff --git a/test/files/run/repl-suspended-warnings.scala b/test/files/run/repl-suspended-warnings.scala new file mode 100644 index 000000000000..60da04d98cf2 --- /dev/null +++ b/test/files/run/repl-suspended-warnings.scala @@ -0,0 +1,13 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-Wconf:any:e" + def code = + """@annotation.nowarn def f { } + |def f { } + |@annotation.nowarn def f { } + |class C { def match = 42 } + |class C { def `match` = 42 } + |class C { def `match` = 42 } + |""".stripMargin +} diff --git a/test/files/run/repl-term-macros.check b/test/files/run/repl-term-macros.check index b02061116770..f00b026996d4 100644 --- a/test/files/run/repl-term-macros.check +++ b/test/files/run/repl-term-macros.check @@ -8,30 +8,30 @@ import language.experimental.macros scala> scala> def impl1(c: Context) = { import c.universe._; c.Expr[Unit](q"()") } -impl1: (c: scala.reflect.macros.blackbox.Context)c.Expr[Unit] +def impl1(c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] scala> def foo1: Unit = macro impl1 -defined term macro foo1: Unit +def foo1: Unit scala> foo1 scala> scala> def impl2(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") } -impl2: (c: scala.reflect.macros.blackbox.Context)()c.Expr[Unit] +def impl2(c: scala.reflect.macros.blackbox.Context)(): c.Expr[Unit] scala> def foo2(): Unit = macro impl2 -defined term macro foo2: ()Unit +def foo2(): Unit scala> foo2() scala> scala> def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") } -impl3: (c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int])(y: c.Expr[Int])c.Expr[Unit] +def impl3(c: scala.reflect.macros.blackbox.Context)(x: c.Expr[Int])(y: c.Expr[Int]): c.Expr[Unit] scala> def foo3(x: Int)(y: Int): Unit = macro impl3 -defined term macro foo3: (x: Int)(y: Int)Unit +def foo3(x: Int)(y: Int): Unit scala> foo3(2)(3) diff --git a/test/files/run/repl-term-macros.scala b/test/files/run/repl-term-macros.scala index 9ac5279fc535..674a53ea4cb9 100644 --- a/test/files/run/repl-term-macros.scala +++ b/test/files/run/repl-term-macros.scala @@ -23,4 +23,4 @@ def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c def foo3(x: Int)(y: Int): Unit = macro impl3 foo3(2)(3) """ -} \ No newline at end of file +} diff --git a/test/files/run/repl-trace-elided-more.check b/test/files/run/repl-trace-elided-more.check new file mode 100644 index 000000000000..4a10f7d9d36d --- /dev/null +++ b/test/files/run/repl-trace-elided-more.check @@ -0,0 +1,25 @@ + +scala> val e = new AssertionError("oops") +val e: AssertionError = java.lang.AssertionError: oops + +scala> e.addSuppressed(new java.io.IOException("bad io")) + +scala> throw e +java.lang.AssertionError: oops + ... ??? elided + Suppressed: java.io.IOException: bad io + ... ??? elided and ??? more + +scala> val f = new Exception("f"); val g = new Exception("g") +val f: Exception = java.lang.Exception: f +val g: Exception = java.lang.Exception: g + +scala> f.addSuppressed(g) + +scala> throw f +java.lang.Exception: f + ... ??? elided + Suppressed: java.lang.Exception: g + ... ??? more + +scala> :quit diff --git a/test/files/run/repl-trace-elided-more.scala b/test/files/run/repl-trace-elided-more.scala new file mode 100644 index 000000000000..95a0ed5d997c --- /dev/null +++ b/test/files/run/repl-trace-elided-more.scala @@ -0,0 +1,5 @@ +import scala.tools.partest.{SessionTest, StackCleaner} + +// scala/bug#11945 print stack trace elided count correctly +// +object Test extends SessionTest with StackCleaner diff --git a/test/files/run/repl-transcript.check b/test/files/run/repl-transcript.check index d0b455cbf623..ece2bbf47304 100644 --- a/test/files/run/repl-transcript.check +++ b/test/files/run/repl-transcript.check @@ -18,19 +18,19 @@ scala> res6.sum + res5 // Replaying 5 commands from transcript. scala> class Bippity -defined class Bippity +class Bippity scala> def f = new Bippity -f: Bippity +def f: Bippity scala> val res5 = { 123 } -res5: Int = 123 +val res5: Int = 123 scala> val res6 = { 1 to 100 map (_ + 1) } -res6: scala.collection.immutable.IndexedSeq[Int] = Vector(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101) +val res6: IndexedSeq[Int] = Vector(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101) scala> res6.sum + res5 -res0: Int = 5273 +val res0: Int = 5273 scala> :quit diff --git a/test/files/run/repl-trim-stack-trace.check b/test/files/run/repl-trim-stack-trace.check index 65e282ae4aea..ee27e0c4cec9 100644 --- a/test/files/run/repl-trim-stack-trace.check +++ b/test/files/run/repl-trim-stack-trace.check @@ -1,28 +1,35 @@ -Welcome to Scala -Type in expressions for evaluation. Or try :help. scala> def f = throw new Exception("Uh-oh") -f: Nothing +def f: Nothing scala> f java.lang.Exception: Uh-oh - at .f(:XX) + at f(:XX) ... ??? elided scala> def f = throw new Exception("") -f: Nothing +def f: Nothing scala> f java.lang.Exception: - at .f(:XX) + at f(:XX) ... ??? elided scala> def f = throw new Exception -f: Nothing +def f: Nothing scala> f java.lang.Exception - at .f(:XX) + at f(:XX) + ... ??? elided + +scala> null.asInstanceOf +#partest !java15+ +java.lang.NullPointerException +#partest java15+ +java.lang.NullPointerException: Cannot throw exception because the return value of "res3()" is null +#partest + at .lzycompute(:8) ... ??? elided scala> :quit diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala index c36a4d9a9666..9a39181726e4 100644 --- a/test/files/run/repl-trim-stack-trace.scala +++ b/test/files/run/repl-trim-stack-trace.scala @@ -1,15 +1,5 @@ +import scala.tools.partest.{SessionTest, StackCleaner} -import scala.tools.partest.{SessionTest, Welcoming} - -// scala/bug#7740 -object Test extends SessionTest with Welcoming { - // normalize the "elided" lines because the frame count depends on test context - lazy val elided = """(\s+\.{3} )\d+( elided)""".r - lazy val frame = """(\s+\Qat .f(:\E)\d+(\))""".r - override def normalize(line: String) = line match { - case elided(ellipsis, suffix) => s"$ellipsis???$suffix" - case frame(prefix, suffix) => s"${prefix}XX${suffix}" - case s => s - } - override def expected = super.expected map normalize -} +// scala/bug#7740 pretty print stack traces +// +object Test extends SessionTest with StackCleaner diff --git a/test/files/run/repl-type-verbose.check b/test/files/run/repl-type-verbose.check index 10a9c1217c76..79e056b4084c 100644 --- a/test/files/run/repl-type-verbose.check +++ b/test/files/run/repl-type-verbose.check @@ -3,7 +3,7 @@ scala> // verbose! scala> :type -v def f = 5 // Type signature -=> Int +Int // Internal Type structure NullaryMethodType( @@ -12,7 +12,7 @@ NullaryMethodType( scala> :type -v def f() = 5 // Type signature -()Int +(): Int // Internal Type structure NullaryMethodType( @@ -23,7 +23,7 @@ NullaryMethodType( scala> :type -v def f[T] = 5 // Type signature -[T]=> Int +[T]Int // Internal Type structure PolyType( @@ -35,7 +35,7 @@ PolyType( scala> :type -v def f[T >: Null] = 5 // Type signature -[T >: Null]=> Int +[T >: Null]Int // Internal Type structure PolyType( @@ -47,7 +47,7 @@ PolyType( scala> :type -v def f[T <: String] = 5 // Type signature -[T <: String]=> Int +[T <: String]Int // Internal Type structure PolyType( @@ -59,7 +59,7 @@ PolyType( scala> :type -v def f[T]() = 5 // Type signature -[T]()Int +[T](): Int // Internal Type structure PolyType( @@ -73,7 +73,7 @@ PolyType( scala> :type -v def f[T, U]() = 5 // Type signature -[T, U]()Int +[T, U](): Int // Internal Type structure PolyType( @@ -87,7 +87,7 @@ PolyType( scala> :type -v def f[T, U]()() = 5 // Type signature -[T, U]()()Int +[T, U]()(): Int // Internal Type structure PolyType( @@ -103,7 +103,7 @@ PolyType( scala> :type -v def f[T, U <: T] = 5 // Type signature -[T, U <: T]=> Int +[T, U <: T]Int // Internal Type structure PolyType( @@ -115,7 +115,7 @@ PolyType( scala> :type -v def f[T, U <: T](x: T)(y: U) = 5 // Type signature -[T, U <: T](x: T)(y: U)Int +[T, U <: T](x: T)(y: U): Int // Internal Type structure PolyType( @@ -133,7 +133,7 @@ PolyType( scala> :type -v def f[T: Ordering] = 5 // Type signature -[T](implicit evidence$1: Ordering[T])Int +[T](implicit evidence$1: Ordering[T]): Int // Internal Type structure PolyType( @@ -148,7 +148,7 @@ PolyType( scala> :type -v def f[T: Ordering] = implicitly[Ordering[T]] // Type signature -[T](implicit evidence$1: Ordering[T])Ordering[T] +[T](implicit evidence$1: Ordering[T]): Ordering[T] // Internal Type structure PolyType( @@ -171,7 +171,7 @@ PolyType( scala> :type -v def f[T <: { type Bippy = List[Int] ; def g(): Bippy }] = 5 // Type signature -[T <: AnyRef{type Bippy = List[Int]; def g(): this.Bippy}]=> Int +[T <: AnyRef{type Bippy = List[Int]; def g(): this.Bippy}]Int // Internal Type structure PolyType( diff --git a/test/files/run/repl-type.check b/test/files/run/repl-type.check new file mode 100644 index 000000000000..2bc7d6ef9cc1 --- /dev/null +++ b/test/files/run/repl-type.check @@ -0,0 +1,38 @@ + +scala> :type 42 +Int + +scala> val x: 23 = 23 +val x: 23 = 23 + +scala> :type x +23 + +scala> :type (23: 23) +Int + +scala> val y = x +val y: Int = 23 + +scala> :type y +Int + +scala> final val z = x +val z: 23 = 23 + +scala> :type z +23 + +scala> def xx: 23 = 23 +def xx: 23 + +scala> :type xx +23 + +scala> final val yy = xx +val yy: 23 = 23 + +scala> :type yy +23 + +scala> :quit diff --git a/test/files/run/repl-type.scala b/test/files/run/repl-type.scala new file mode 100644 index 000000000000..7ed1e4f1ab64 --- /dev/null +++ b/test/files/run/repl-type.scala @@ -0,0 +1,20 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-feature -language:_" + + def code = """ +:type 42 +val x: 23 = 23 +:type x +:type (23: 23) +val y = x +:type y +final val z = x +:type z +def xx: 23 = 23 +:type xx +final val yy = xx +:type yy + """.trim +} diff --git a/test/files/run/retclosure.check b/test/files/run/retclosure.check deleted file mode 100644 index 94c4971e4a42..000000000000 --- a/test/files/run/retclosure.check +++ /dev/null @@ -1 +0,0 @@ -check failed: some problem diff --git a/test/files/run/retclosure.scala b/test/files/run/retclosure.scala index d354cb35867b..46e35074141f 100644 --- a/test/files/run/retclosure.scala +++ b/test/files/run/retclosure.scala @@ -1,11 +1,11 @@ /* Test return expressions inside closures. - * + * scalac: -Xlint:-nonlocal-return * See bug#834 */ object Test { def response: String = { def check: Option[String] = { - val closure: String=>Nothing = + val closure: String => Nothing = p => return Some("some problem") // should return from check closure("whatever") @@ -17,7 +17,5 @@ object Test { } } - def main(args: Array[String]) { - Console.println(response) - } + def main(args: Array[String]): Unit = assert(response == "check failed: some problem") } diff --git a/test/files/run/richs.check b/test/files/run/richs.check deleted file mode 100644 index d2f4badc9209..000000000000 --- a/test/files/run/richs.check +++ /dev/null @@ -1,79 +0,0 @@ -warning: two deprecations (since 2.11.0); re-run with -deprecation for details - -RichCharTest1: -true -true -true -true - -RichIntTest: -10 -11 -12 -13 -0 -0 -10000 -10 -20 -10001 -ffffffff - -RichStringTest1: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest2: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest3: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest4: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc -xyz -s5: abc - #xyz - -RichStringTest5: -s1: abc - xyz -s2: abc - xyz -s3: abc - xyz -s4: abc - |xyz -s5: abc -xyz -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala deleted file mode 100644 index 0cadff88f478..000000000000 --- a/test/files/run/richs.scala +++ /dev/null @@ -1,139 +0,0 @@ -trait RichTest { - val s1 = """abc""" - val s2 = """abc\txyz\n""" - val s3 = """abc - xyz""" - val s4 = """abc - |xyz""" - val s5 = """abc - #xyz""" - def getObjectName: String = { - val cn = this.getClass().getName() - cn.substring(0, cn.length-1) - } - def length[A](it: Iterator[A]) = it.toList.length - def length[A](it: Iterable[A]) = it.toList.length - def run: Unit -} -object RichCharTest1 extends RichTest { - def run { - println("\n" + getObjectName + ":") - println('\40'.isWhitespace) - println('\011'.isWhitespace) - println('1'.asDigit == 1) - println('A'.asDigit == 10) - } -} -// object RichCharTest2 extends RichTest { -// case class C(s: String) { -// private val it = s.iterator -// private var c: Char = _ -// def ch(): Char = c -// def nextch(): Unit = { c = if (it.hasNext) it.next else ';' } -// def err(msg: String) = println(msg) -// nextch() -// } -// def run { -// println("\n" + getObjectName + ":") -// val c1 = C("x4A;") -// val s1 = xml.Utility.parseCharRef(c1.ch, c1.nextch, c1.err) -// val c2 = C("74;") -// val s2 = xml.Utility.parseCharRef(c2.ch, c2.nextch, c2.err) -// println(s1 == s2) -// } -// } -object RichIntTest extends RichTest { - private val n = 10 - private val m = -2 - def run { - println("\n" + getObjectName + ":") - println(length(0 until n)) - println(length(0 to n)) - println(length(m until n)) - println(length(m to n)) - println(length(n until m)) - println(length(n to m)) - - println(16.toBinaryString) // should be "10000" - println(16.toHexString) // should be "10" - println(16.toOctalString) // should be "20" - - println(65537.toHexString) // should be "10001" - println((-1).toHexString) // should be "ffffffff" - } -} -object RichStringTest1 extends RichTest { - def run { - println("\n" + getObjectName + ":") - println("s1: " + s1) - println("s2: " + s2) - println("s3: " + s3) - println("s4: " + s4) - println("s5: " + s5) - } -} -object RichStringTest2 extends RichTest { - def run { - println("\n" + getObjectName + ":") - Console.print("s1: "); s1.linesIterator foreach println - Console.print("s2: "); s2.linesIterator foreach println - Console.print("s3: "); s3.linesIterator foreach println - Console.print("s4: "); s4.linesIterator foreach println - Console.print("s5: "); s5.linesIterator foreach println - } -} -object RichStringTest3 extends RichTest { - def run { - println("\n" + getObjectName + ":") - println("s1: " + s1.stripLineEnd) - println("s2: " + s2.stripLineEnd) - println("s3: " + s3.stripLineEnd) - println("s4: " + s4.stripLineEnd) - println("s5: " + s5.stripLineEnd) - } -} -object RichStringTest4 extends RichTest { - def run { - println("\n" + getObjectName + ":") - println("s1: " + s1.stripMargin) - println("s2: " + s2.stripMargin) - println("s3: " + s3.stripMargin) - println("s4: " + s4.stripMargin) - println("s5: " + s5.stripMargin) - } -} -object RichStringTest5 extends RichTest { - def run { - println("\n" + getObjectName + ":") - println("s1: " + s3.stripMargin('#')) - println("s2: " + s3.stripMargin('#')) - println("s3: " + s3.stripMargin('#')) - println("s4: " + s4.stripMargin('#')) - println("s5: " + s5.stripMargin('#')) - } -} -object RichStringTest6 extends RichTest { - def run { - println("a:b:c:d".split(':').toList) - println("a.b.c.d".split('.').toList) - println("a$b$c$d".split('$').toList) - println("a^b^c^d".split('^').toList) - println("a\\b\\c\\d".split('\\').toList) - println("a:b:c.d".split(Array(':', '.')).toList) - println("a:b.c$d".split(Array(':', '.', '$')).toList) - } -} -/** xxx */ -object Test { - def main(args: Array[String]) { - RichCharTest1.run - //RichCharTest2.run - RichIntTest.run - RichStringTest1.run - RichStringTest2.run - RichStringTest3.run - RichStringTest4.run - RichStringTest5.run - RichStringTest6.run - } -} diff --git a/test/files/run/runtime-richChar.scala b/test/files/run/runtime-richChar.scala index dceb70e74d6c..72e25a3a833d 100644 --- a/test/files/run/runtime-richChar.scala +++ b/test/files/run/runtime-richChar.scala @@ -1,5 +1,5 @@ object Test extends App { - def testSeq(name: String, expected: Seq[Char], got: Seq[Char]) { + def testSeq(name: String, expected: Seq[Char], got: Seq[Char]): Unit = { if (expected.toList == got.toList) println(name + " ok") else diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala index 468a80fc0c87..937d77117a14 100644 --- a/test/files/run/runtime.scala +++ b/test/files/run/runtime.scala @@ -15,7 +15,7 @@ object Test0Test { i = i + 1; } Console.print("]"); - Console.println; + Console.println() } def test(args: Array[String]): Unit = { @@ -24,7 +24,7 @@ object Test0Test { val ss: Array[Short ] = Array(3, 4, 5); val cs: Array[Char ] = Array('a', 'b', 'c'); val is: Array[Int ] = Array(6, 7, 8); - val ls: Array[Long ] = Array(9l, 10l, 11l); + val ls: Array[Long ] = Array(9L, 10L, 11L); val fs: Array[Float ] = Array(12.0f, 13.0f); val ds: Array[Double ] = Array(14.0d, 15.0d); val os: Array[AnyRef ] = Array("string"); @@ -51,7 +51,7 @@ package test1.bar { class PrintStream() { def println(): Unit = { - Console.println; + Console.println() } } @@ -60,20 +60,20 @@ package test1.bar { object Test1Test { def test(args: Array[String]): Unit = { - {Console.print(10)}; Console.println; + {Console.print(10)}; Console.println(); // {System.out.print(11); java}.lang.System.out.println(); // {System.out.print(12); java.lang}.System.out.println(); // {System.out.print(13); java.lang.System}.out.println(); - {Console.print(14); Console}.println; - {Console.print(15); (() => Console.println):(() => Unit)} apply (); - {Console.print(16); Console.println}; + {Console.print(14); Console}.println(); + {Console.print(15); (() => Console.println()):(() => Unit)}.apply(); + {Console.print(16); Console.println()}; {Console.print(20)}; test1.bar.System.out.println(); // {System.out.print(21); test1}.bar.System.out.println(); // {System.out.print(22); test1.bar}.System.out.println(); {Console.print(23); test1.bar.System}.out.println(); {Console.print(24); test1.bar.System.out}.println(); - {Console.print(25); test1.bar.System.out.println _ : (() => Unit)} apply (); + {Console.print(25); test1.bar.System.out.println _ : (() => Unit)}.apply(); {Console.print(26); test1.bar.System.out.println()}; } @@ -85,23 +85,23 @@ object Test1Test { package test2 { class A { - def run = Console.println("A"); + def run() = Console.println("A"); } trait M0 extends A { - override def run = { super.run; Console.println("M0"); } + override def run() = { super.run(); Console.println("M0"); } } class M1 extends M0 { - override def run = { super.run; Console.println("M1"); } + override def run() = { super.run(); Console.println("M1"); } } trait N0 extends A { - override def run = { super.run; Console.println("N0"); } + override def run() = { super.run(); Console.println("N0"); } } class N1 extends N0 { - override def run = { super.run; Console.println("N1"); } + override def run() = { super.run(); Console.println("N1"); } } object M0N0 extends M0 with N0; @@ -113,10 +113,10 @@ package test2 { object Test2Test { def test(args: Array[String]): Unit = { - test2.M0N0.run; Console.println; - test2.N0M0.run; Console.println; - test2.M1N0.run; Console.println; - test2.N1M0.run; Console.println; + test2.M0N0.run(); Console.println() + test2.N0M0.run(); Console.println() + test2.M1N0.run(); Console.println() + test2.N1M0.run(); Console.println() } } @@ -174,12 +174,12 @@ object Test { case exception: Throwable => { //val name: String = Thread.currentThread().getName(); Console.print("Exception in thread \"" + name + "\" " + exception); - Console.println; + Console.println() errors = errors + 1; } } Console.println(">>> " + name); - Console.println; + Console.println() } def main(args: Array[String]): Unit = { @@ -190,8 +190,8 @@ object Test { test("Test3" , Test3Test.test(args)); if (errors > 0) { - Console.println; - Console.println(errors + " error" + (if (errors > 1) "s" else "")); + Console.println() + Console.println(s"$errors error" + (if (errors > 1) "s" else "")); } } } diff --git a/test/files/run/runtimeEval1.scala b/test/files/run/runtimeEval1.scala index 9497b52918dd..84861c6c8f2d 100644 --- a/test/files/run/runtimeEval1.scala +++ b/test/files/run/runtimeEval1.scala @@ -6,4 +6,4 @@ object Test extends App { val x = 2 println(x) }.eval -} \ No newline at end of file +} diff --git a/test/files/run/runtimeEval2.scala b/test/files/run/runtimeEval2.scala index 513b8201292d..a566487c5b8a 100644 --- a/test/files/run/runtimeEval2.scala +++ b/test/files/run/runtimeEval2.scala @@ -8,4 +8,4 @@ object Test extends App { val x = 2 val outer = reify{reify{x}} println(outer.eval.eval) -} \ No newline at end of file +} diff --git a/test/files/run/sammy_after_implicit_view.scala b/test/files/run/sammy_after_implicit_view.scala index aa9158d0af7d..4d3d5a37cfd0 100644 --- a/test/files/run/sammy_after_implicit_view.scala +++ b/test/files/run/sammy_after_implicit_view.scala @@ -9,7 +9,7 @@ object Test extends App { def implicitSam() = { import language.implicitConversions var ok = true - implicit def fun2sam(fun: Int => String): MySam = { ok = false; new MySam { def apply(x: Int) = fun(x) } } + @annotation.unused implicit def fun2sam(fun: Int => String): MySam = { ok = false; new MySam { def apply(x: Int) = fun(x) } } val className = (((x: Int) => x.toString): MySam).getClass.toString assert(ok, "implicit conversion not called") assert(!(className contains AnonFunClass), className) diff --git a/test/files/run/sammy_cbn.scala b/test/files/run/sammy_cbn.scala index b84b2fd8e531..f6124441c91f 100644 --- a/test/files/run/sammy_cbn.scala +++ b/test/files/run/sammy_cbn.scala @@ -1,7 +1,7 @@ trait F0[T] { def apply(): T } object Test extends App { - def delay[T](v: => T) = (v _): F0[T] + def delay[T](v: => T) = (() => v): F0[T] // should not fail with ClassCastException: $$Lambda$6279/897871870 cannot be cast to F0 // (also, should not say boe!) diff --git a/test/files/run/sammy_java8.scala b/test/files/run/sammy_java8.scala deleted file mode 100644 index db9df7f5febf..000000000000 --- a/test/files/run/sammy_java8.scala +++ /dev/null @@ -1,34 +0,0 @@ -import scala.tools.partest._ - -// java8 version of sammy_poly.scala -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - global.settings.Xexperimental.value = true - - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(samSource) ++ - compilationUnits(global)(useSamSource) - } - - private def samSource = """ -// trait F[T, U] { def apply(x: T): U } -public interface F { - U apply(T t); - default void yadayada() { - throw new UnsupportedOperationException("yadayada"); - } -} - """ - - private def useSamSource = """ -class T { - def app[T, U](x: T)(f: F[T, U]): U = f(x) - app(1)(x => List(x)) -} - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/sammy_return.scala b/test/files/run/sammy_return.scala index e959619dd112..f9a6d2d9fd8c 100644 --- a/test/files/run/sammy_return.scala +++ b/test/files/run/sammy_return.scala @@ -11,4 +11,4 @@ class TO[A](x: A) { object Test extends App { assert(new TO("a").collectFirst(new PF[String, String]).get == "a") -} \ No newline at end of file +} diff --git a/test/files/run/sammy_seriazable.scala b/test/files/run/sammy_seriazable.scala index 458b99238a23..30d8efa6bd04 100644 --- a/test/files/run/sammy_seriazable.scala +++ b/test/files/run/sammy_seriazable.scala @@ -4,11 +4,11 @@ trait NotSerializableInterface { def apply(a: Any): Any } abstract class NotSerializableClass { def apply(a: Any): Any } // SAM type that supports lambdas-as-invoke-dynamic trait IsSerializableInterface extends java.io.Serializable { def apply(a: Any): Any } -// SAM type that still requires lambdas-as-anonhmous-classes +// SAM type that still requires lambdas-as-anonymous-classes abstract class IsSerializableClass extends java.io.Serializable { def apply(a: Any): Any } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val nsi: NotSerializableInterface = x => x val nsc: NotSerializableClass = x => x diff --git a/test/files/run/sammy_vararg_cbn.check b/test/files/run/sammy_vararg_cbn.check index 1cff0f067c46..9d495850f927 100644 --- a/test/files/run/sammy_vararg_cbn.check +++ b/test/files/run/sammy_vararg_cbn.check @@ -1 +1 @@ -WrappedArray(1) +ArraySeq(1) diff --git a/test/files/run/sbt-icode-interface.check b/test/files/run/sbt-icode-interface.check index bea9b6fc21a4..3087c07e3cf8 100644 --- a/test/files/run/sbt-icode-interface.check +++ b/test/files/run/sbt-icode-interface.check @@ -1 +1 @@ -warning: one deprecation (since 2.12.0); re-run with -deprecation for details +warning: 1 deprecation (since 2.12.0); re-run with -deprecation for details diff --git a/test/files/run/sbt-icode-interface.scala b/test/files/run/sbt-icode-interface.scala index 7cd2de5c00f7..1b7bd5a6acad 100644 --- a/test/files/run/sbt-icode-interface.scala +++ b/test/files/run/sbt-icode-interface.scala @@ -8,22 +8,22 @@ object Test extends DirectTest { object O """.trim - def show() { - val global = newCompiler("-usejavacp") + def show(): Unit = { + val global = newCompiler() import global._ val r = new Run r.compileSources(newSourceFile(code) :: Nil) val results = collection.mutable.Buffer[(Boolean, String)]() - // Nailing down defacto compiler API from SBT's usage + // Nailing down de facto compiler API from SBT's usage // https://github.com/sbt/sbt/blob/adb41611cf73260938274915d8462d924df200c8/compile/interface/src/main/scala/xsbt/Analyzer.scala#L29-L41 def isTopLevelModule(sym: Symbol) = sym.isTopLevel && sym.isModule for (unit <- currentRun.units if !unit.isJava) { val sourceFile = unit.source.file.file for (iclass <- unit.icode) { val sym = iclass.symbol - def addGenerated(separatorRequired: Boolean) { + def addGenerated(separatorRequired: Boolean): Unit = { results += (separatorRequired -> sym.fullName) } if (sym.isModuleClass && !sym.isImplClass) { diff --git a/test/files/run/scalapInvokedynamic.scala b/test/files/run/scalapInvokedynamic.scala index 670cf26662fb..003c1701fb63 100644 --- a/test/files/run/scalapInvokedynamic.scala +++ b/test/files/run/scalapInvokedynamic.scala @@ -8,4 +8,4 @@ class C { object Test extends App { val testClassesDir = System.getProperty("partest.output") scala.tools.scalap.Main.main(Array("-cp", testClassesDir, "C")) -} \ No newline at end of file +} diff --git a/test/files/run/scan.scala b/test/files/run/scan.scala index 47e0a7d976d3..b52dde0afb7b 100644 --- a/test/files/run/scan.scala +++ b/test/files/run/scan.scala @@ -1,23 +1,18 @@ - - - - object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val lst = List(1, 2, 3, 4, 5) assert(lst.scanLeft(0)(_ + _) == List(0, 1, 3, 6, 10, 15)) - assert(lst.scanRight(0)(_ + _) == List(15, 14, 12, 9, 5, 0)) + assert(lst.scanRight(0)(_ + _) == List(15, 14, 12, 9, 5, 0), "List scanRight") val emp = List[Int]() assert(emp.scanLeft(0)(_ + _) == List(0)) assert(emp.scanRight(0)(_ + _) == List(0)) - val stream = Stream(1, 2, 3, 4, 5) - assert(stream.scanLeft(0)(_ + _) == Stream(0, 1, 3, 6, 10, 15)) + val stream = LazyList(1, 2, 3, 4, 5) + assert(stream.scanLeft(0)(_ + _) == LazyList(0, 1, 3, 6, 10, 15)) - assert(Stream.from(1).scanLeft(0)(_ + _).take(5) == Stream(0, 1, 3, 6, 10)) + assert(LazyList.from(1).scanLeft(0)(_ + _).take(5) == LazyList(0, 1, 3, 6, 10)) } - -} \ No newline at end of file +} diff --git a/test/files/run/sd167.check b/test/files/run/sd167.check index 587be6b4c3f9..51c8fccd08ae 100644 --- a/test/files/run/sd167.check +++ b/test/files/run/sd167.check @@ -1 +1,5 @@ +sd167.scala:6: warning: match may not be exhaustive. +It would fail on the following input: Some(_) + Some(1) match { case x"${a}" => } // used to convert to `case Some(a) =>` and omit side effects + ^ x diff --git a/test/files/run/sd167.scala b/test/files/run/sd167.scala index 5095e772ad78..04c0b940621f 100644 --- a/test/files/run/sd167.scala +++ b/test/files/run/sd167.scala @@ -2,7 +2,7 @@ object Test { implicit class ToExtractor(val s: StringContext) { def x = {println("x"); Some } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { Some(1) match { case x"${a}" => } // used to convert to `case Some(a) =>` and omit side effects } } diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check index 2257aa0f0ccd..ff192d9bf60e 100644 --- a/test/files/run/sd187.check +++ b/test/files/run/sd187.check @@ -1,23 +1,23 @@ [[syntax trees at end of patmat]] // newSource1.scala -[7]package [7] { - [7]class C extends [9][2302]scala.AnyRef { - [2302]def (): [9]C = [2302]{ - [2302][2302][2302]C.super.(); +[1:2302]package [1:1] { + [1:2302]class C extends [9:2302][9]scala.AnyRef { + [9]def (): [9]C = [9]{ + [9][9][9]C.super.(); [9]() }; - [107]def commonSubPattern([124]x: [127]): [107]AnyVal = [205]{ - [205] var rc6: [205]Boolean = [205]false; - [205] var x3: [205]String = [205][205][205]null.asInstanceOf[[205]String]; - [205]{ - [205]case val x1: [205]Any = [205]x; - [205]case8(){ - [313]if ([313][313]x1.isInstanceOf[[313]Option[_]]) - [325][325]matchEnd7([325]()) + [103:904]def commonSubPattern([124:130]x: [127:130]): [107]AnyVal = [206:220]{ + [206:220] var rc6: [211]Boolean = [211]false; + [206:220] var x3: [211]String = [211][211][211]null.asInstanceOf[[211]String]; + [206:220]{ + [206:220]case val x1: [211]Any = [206:220]([206:207]x: [211]); + [206:220]case8(){ + [312:324]if ([313][313]x1.isInstanceOf[[313]Option[_]]) + [325:327][325]matchEnd7([325:327]()) else [313][313]case9() }; - [205]case9(){ - [412]if ([412][412]x1.isInstanceOf[[412]String]) + [206:220]case9(){ + [412:421]if ([412][412]x1.isInstanceOf[[412]String]) [412]{ [412][412]rc6 = [412]true; [412][412]x3 = [412]([412][412]x1.asInstanceOf[[412]String]: [412]String); @@ -29,32 +29,32 @@ else [412][412]case10() }; - [205]case10(){ - [612]if ([612][612]rc6.&&([627][627]x3.==([630]"6"))) + [206:220]case10(){ + [612:621]if ([612][612]rc6.&&([627][627]x3.==([630]"6"))) [712][712]matchEnd7([712][712]x3.hashCode()) else [612][612]case11() }; - [205]case11(){ - [205][205]matchEnd7([205]throw [205][205][205]new [205]MatchError([205]x1)) + [206:220]case11(){ + [206:220][206:220]matchEnd7([206:220]throw [206:220][206:220][206:220]new [206:220]MatchError([206:220]x1)) }; - [205]matchEnd7(x: [NoPosition]AnyVal){ - [205]x + [206:220]matchEnd7(x: [NoPosition]AnyVal){ + [206:220]x } } }; - [1007]def extractor([1017]x: [1020]): [1007]Any = [1027]{ - [1027]case val x1: [1027]Any = [1027]x; - [1027]case6(){ - [1120]if ([1120][1120]x1.isInstanceOf[[1120]Product2[T1,T2]]) - [1120]{ - [1120] val x2: [1120]Product2[T1,T2] = [1120]([1120][1120]x1.asInstanceOf[[1120]Product2[T1,T2]]: [1120]Product2[T1,T2]); - [1112]{ - [1112] val o8: [1112]Option[Product2[T1,T2]] = [1112][1112][1112]scala.Product2.unapply[[1112]T1, [1112]T2]([1112]x2); - [1112]if ([1112]o8.isEmpty.unary_!) - [1112]{ - [1121]val a: [1121]Any = [1121]o8.get._1; - [1210][1210]matchEnd5([1210]a) + [1003:1306]def extractor([1017:1023]x: [1020:1023]): [1007]Any = [1028:1041]{ + [1028:1041]case val x1: [1032]Any = [1028:1041]([1028:1029]x: [1032]); + [1028:1041]case6(){ + [1112:1126]if ([1120][1120]x1.isInstanceOf[[1120]Product2[T1,T2]]) + [1112:1126]{ + [1112:1126] val x2: [1120]Product2[T1,T2] = [1120]([1120][1120]x1.asInstanceOf[[1120]Product2[T1,T2]]: [1120]Product2[T1,T2]); + [1112:1120]{ + [1112:1120] val o8: [1112]Option[Product2[T1,T2]] = [1112:1120][1112:1120][1112:1120]scala.Product2.unapply[[1112]T1, [1112]T2]([1112]x2); + [1112:1211]if ([1112]o8.isEmpty.unary_!) + [1112:1211]{ + [1121:1122]val a: [1121]Any = [1121]o8.get._1; + [1210:1211][1210]matchEnd5([1210:1211]a) } else [1112][1112]case7() @@ -63,35 +63,35 @@ else [1120][1120]case7() }; - [1027]case7(){ - [1027][1027]matchEnd5([1027]throw [1027][1027][1027]new [1027]MatchError([1027]x1)) + [1028:1041]case7(){ + [1028:1041][1028:1041]matchEnd5([1028:1041]throw [1028:1041][1028:1041][1028:1041]new [1028:1041]MatchError([1028:1041]x1)) }; - [1027]matchEnd5(x: [NoPosition]Any){ - [1027]x + [1028:1041]matchEnd5(x: [NoPosition]Any){ + [1028:1041]x } }; - [1407]def swatch: [1407]String = [1505]try { - [1607][1607][1607]C.this.toString() + [1403:2204]def swatch: [1407]String = [1505:2106]try { + [1607:1615][1607:1615][1607]C.this.toString() } catch { - [1505]case [1505](ex6 @ [1505]_) => [1505]{ - [1812] val x4: [1812]Throwable = [1812]ex6; - [1505]case9(){ - [1812]if ([1812][1812]x4.ne([1812]null)) - [1812]{ - [1812] val x5: [1812]Throwable = [1812]x4; - [1812]if ([1915][1915][1912]"".isEmpty()) - [2014][2014]matchEnd8([2014][2014]x5.toString()) + [1812:2022]case [1812](ex6 @ [1812]_) => [1812:1824]{ + [1812:1824] val x4: [1812]Throwable = [1812]ex6; + [1812:1824]case9(){ + [1812:1824]if ([1812][1812]x4.ne([1812]null)) + [1812:2022]{ + [1812:1824] val x5: [1812]Throwable = [1812]x4; + [1812:2022]if ([1912:1922][1912:1922][1912:1914]"".isEmpty()) + [2012:2022][2014]matchEnd8([2014][2014]x5.toString()) else [1812][1812]case10() } else [1812][1812]case10() }; - [1505]case10(){ - [1505][1505]matchEnd8([1505]throw [1505]ex6) + [1812]case10(){ + [1812][1812]matchEnd8([1812]throw [1812]ex6) }; - [1505]matchEnd8(x: [NoPosition]String){ - [1505]x + [1812]matchEnd8(x: [NoPosition]String){ + [1812]x } } } diff --git a/test/files/run/sd187.scala b/test/files/run/sd187.scala index 91d4d56cde74..be475a15e0c3 100644 --- a/test/files/run/sd187.scala +++ b/test/files/run/sd187.scala @@ -1,15 +1,14 @@ import scala.tools.partest._ -import java.io.{Console => _, _} object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Xprint-pos -Xprint:patmat -Ystop-after:patmat -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint-pos -Vprint:patmat -Ystop-after:patmat" override def code = """ |class C { // | def commonSubPattern(x: Any) = { // - | x match { // + | (x: @unchecked) match { // | case _: Option[_] => // | case s: String if s == "4" => // | s.hashCode // @@ -17,7 +16,7 @@ object Test extends DirectTest { | s.hashCode // | } // | } // - | def extractor(x: Any) = x match { // + | def extractor(x: Any) = (x: @unchecked) match { // | case Product2(a, b) => // | a // | } // @@ -33,10 +32,5 @@ object Test extends DirectTest { |} |""".stripMargin - - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } -} \ No newline at end of file + override def show(): Unit = compile() +} diff --git a/test/files/run/sd275-java/Test.scala b/test/files/run/sd275-java/Test.scala index 84187527d29f..48adedcd6326 100644 --- a/test/files/run/sd275-java/Test.scala +++ b/test/files/run/sd275-java/Test.scala @@ -2,18 +2,7 @@ import scala.tools.partest._ import java.io.File object Test extends StoreReporterDirectTest { - def code = ??? - - def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) - } - - def show(): Unit = { - deletePackage("p1/p2/p3") - deletePackage("p1/p2") - - compileCode(""" + def code = """ package sample class Test { @@ -23,11 +12,21 @@ class Test { def test = new Inner().foo } - """) + """ + + override def extraSettings = { + val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + s"-cp $classpath" + } + + def show(): Unit = { + deletePackage("p1/p2/p3") + deletePackage("p1/p2") + compile() assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) } - def deletePackage(name: String) { + def deletePackage(name: String): Unit = { val directory = new File(testOutput.path, name) for (f <- directory.listFiles()) { assert(f.getName.endsWith(".class")) diff --git a/test/files/run/sd275.scala b/test/files/run/sd275.scala index 8cdee3ae155e..b150b59afebe 100644 --- a/test/files/run/sd275.scala +++ b/test/files/run/sd275.scala @@ -2,15 +2,7 @@ import scala.tools.partest._ import java.io.File object Test extends StoreReporterDirectTest { - def code = ??? - - def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) - } - - def show(): Unit = { - compileCode(""" + def code = """ package sample { class A1 { @@ -29,12 +21,19 @@ package p1 { } } } - """) + """ + + override def extraSettings = { + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + s"-cp $classpath" + } + + def show(): Unit = { + compile() assert(filteredInfos.isEmpty, filteredInfos) deletePackage("p1/p2/p3") deletePackage("p1/p2") - - compileCode(""" + compileString(newCompiler())(""" package sample class Test { @@ -48,7 +47,7 @@ class Test { assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before. } - def deletePackage(name: String) { + def deletePackage(name: String): Unit = { val directory = new File(testOutput.path, name) for (f <- directory.listFiles()) { assert(f.getName.endsWith(".class")) diff --git a/test/files/run/sd336.scala b/test/files/run/sd336.scala index 799455d45cf0..b1bd0a021e3f 100644 --- a/test/files/run/sd336.scala +++ b/test/files/run/sd336.scala @@ -1,3 +1,4 @@ +//AbstractMethodError with missing bridge for path-dependent type object Test { final def main(args: Array[String]): Unit = { val f: A => Any = { a => @@ -7,7 +8,7 @@ object Test { f(new A(new B)) } - def foo[A, B](f: A => B, a: A): B = f(a) + def foo[X, Y](f: X => Y, x: X): Y = f(x) } class A(val b: B) diff --git a/test/files/run/sd455.check b/test/files/run/sd455.check new file mode 100644 index 000000000000..a3de8ced7009 --- /dev/null +++ b/test/files/run/sd455.check @@ -0,0 +1,3 @@ +true +false +default diff --git a/test/files/run/sd455.scala b/test/files/run/sd455.scala new file mode 100644 index 000000000000..25cd05052a59 --- /dev/null +++ b/test/files/run/sd455.scala @@ -0,0 +1,10 @@ +object Const { final val VAL = 1 ; final val VAR = 2 } +import Const._ +object Test { + def test(i: Int) = i match { case v @ (VAR | VAL) => v == VAR case _ => "default" } + def main(args: Array[String]): Unit = { + println(test(VAR)) + println(test(VAL)) + println(test(-1)) + } +} diff --git a/test/files/run/sd760a.scala b/test/files/run/sd760a.scala deleted file mode 100644 index 5db397752df3..000000000000 --- a/test/files/run/sd760a.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.reflect.ClassTag - -object Test { - def main(args: Array[String]): Unit = { - assert(apply[String]("") == classOf[Array[String]]) - assert(apply[Double](1d) == classOf[Array[Double]]) - } - - def apply[@specialized(Double) C: ClassTag](c: C): Class[_] = { - Array(c).getClass - } -} diff --git a/test/files/run/sd760b.scala b/test/files/run/sd760b.scala deleted file mode 100644 index fae0e9cf8a6d..000000000000 --- a/test/files/run/sd760b.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.reflect.ClassTag - -object Test { - def main(args: Array[String]): Unit = { - assert(apply[Double](1d) == classOf[Array[Double]]) - } - - def apply[D <: Double: ClassTag](d: D): Class[_] = { - Array.apply[D](d).getClass - } -} diff --git a/test/files/run/sd884.check b/test/files/run/sd884.check new file mode 100644 index 000000000000..d64c0b9305bd --- /dev/null +++ b/test/files/run/sd884.check @@ -0,0 +1,186 @@ + +scala> import annotation._, scala.util.chaining._ +import annotation._ +import scala.util.chaining._ + +scala> class ann(x: Int = 1, y: Int = 2) extends Annotation +class ann + +scala> class naa(x: Int = 1, y: Int = 2) extends Annotation { + def this(s: String) = this(1, 2) +} +class naa + +scala> class mul(x: Int = 1, y: Int = 2)(z: Int = 3, zz: Int = 4) extends Annotation +class mul + +scala> class kon(x: Int = 1, y: Int = 2) extends ConstantAnnotation +class kon + +scala> class rann(x: Int = 1.tap(println), y: Int) extends Annotation +class rann + +scala> class C { + val a = 1 + val b = 2 + + @ann(y = b, x = a) def m1 = 1 + + @ann(x = a) def m2 = 1 + @ann(y = b) def m3 = 1 + + @naa(a, b) def m4 = 1 + @naa(y = b, x = a) def m5 = 1 + @naa("") def m6 = 1 + + // warn, only first argument list is kept + @mul(a, b)(a, b) def m7 = 1 + @mul(y = b)(a, b) def m8 = 1 + @mul(y = b, x = a)(zz = b) def m9 = 1 + @mul(y = b)(zz = b) def m10 = 1 + + @kon(y = 22) def m11 = 1 + @kon(11) def m12 = 1 +} + @mul(a, b)(a, b) def m7 = 1 + ^ +On line 15: warning: Implementation limitation: multiple argument lists on annotations are + currently not supported; ignoring arguments List(C.this.a, C.this.b) + @mul(y = b)(a, b) def m8 = 1 + ^ +On line 16: warning: Implementation limitation: multiple argument lists on annotations are + currently not supported; ignoring arguments List(C.this.a, C.this.b) + @mul(y = b, x = a)(zz = b) def m9 = 1 + ^ +On line 17: warning: Implementation limitation: multiple argument lists on annotations are + currently not supported; ignoring arguments List(3, C.this.b) + @mul(y = b)(zz = b) def m10 = 1 + ^ +On line 18: warning: Implementation limitation: multiple argument lists on annotations are + currently not supported; ignoring arguments List(3, C.this.b) +class C + +scala> :power +Power mode enabled. :phase is at typer. +import scala.tools.nsc._, intp.global._, definitions._ +Try :help or completions for vals._ and power._ + +scala> println(typeOf[C].members.toList.filter(_.name.startsWith("m")).sortBy(_.name).map(_.annotations.head).mkString("\n")) +ann(C.this.a, C.this.b) +mul(1, C.this.b) +kon(y = 22) +kon(x = 11) +ann(C.this.a, 2) +ann(1, C.this.b) +naa(C.this.a, C.this.b) +naa(C.this.a, C.this.b) +naa("") +mul(C.this.a, C.this.b) +mul(1, C.this.b) +mul(C.this.a, C.this.b) + +scala> val i6 = typeOf[C].member(TermName("m6")).annotations.head +val i6: $r.intp.global.AnnotationInfo = naa("") + +scala> i6.constructorSymbol(global.typer.typed).paramss +val res1: List[List[$r.intp.global.Symbol]] = List(List(value s)) + +scala> val i11 = typeOf[C].member(TermName("m11")).annotations.head +val i11: $r.intp.global.AnnotationInfo = kon(y = 22) + +scala> i11.assocs +val res2: List[($r.intp.global.Name, $r.intp.global.ClassfileAnnotArg)] = List((y,22)) + +scala> i11.assocsWithDefaults +val res3: List[($r.intp.global.Name, $r.intp.global.ClassfileAnnotArg)] = List((x,1), (y,22)) + +scala> val i3 = typeOf[C].member(TermName("m3")).annotations.head +val i3: $r.intp.global.AnnotationInfo = ann(1, C.this.b) + +scala> i3.args.map(_.tpe) +val res4: List[$r.intp.global.Type] = List(Int(1) @scala.annotation.meta.defaultArg, Int) + +scala> i3.args.map(i3.argIsDefault) +val res5: List[Boolean] = List(true, false) + +scala> // ordinary named/default args when using annotation class in executed code + +scala> new rann(y = 2.tap(println)); () // prints 2, then the default 1 +2 +1 + +scala> @rann(y = {new rann(y = 2.tap(println)); 2}) class r1 +class r1 + +scala> println(typeOf[r1].typeSymbol.annotations.head.args) +List(scala.util.`package`.chaining.scalaUtilChainingOps[Int](1).tap[Unit](((x: Any) => scala.Predef.println(x))), { + { + val x$1: Int = scala.util.`package`.chaining.scalaUtilChainingOps[Int](2).tap[Unit](((x: Any) => scala.Predef.println(x))); + val x$2: Int = $line17.$read.INSTANCE.$iw.rann.$default$1; + new $line17.$read.INSTANCE.$iw.rann(x$2, x$1) + }; + 2 +}) + +scala> // subclassing + +scala> class sub1(z: Int = 3) extends ann(11, z) +class sub1 + +scala> class sub2(z: Int = 3) extends ann(y = z) +class sub2 + +scala> class suk(z: Int = 3) extends kon(y = 22) +class suk + +scala> class sum(z: Int) extends mul(11, 22)(z) +class sum + +scala> println(typeOf[sub1].typeSymbol.annotations) +List(scala.annotation.meta.superArg("x", 11), scala.annotation.meta.superFwdArg("y", "z")) + +scala> println(typeOf[sub2].typeSymbol.annotations) +List(scala.annotation.meta.superArg("x", 1), scala.annotation.meta.superFwdArg("y", "z")) + +scala> println(typeOf[suk].typeSymbol.annotations) +List(scala.annotation.meta.superArg("y", 22)) + +scala> println(typeOf[sum].typeSymbol.annotations) // none +List() + +scala> class D { + val a = 1 + + @sub1() def m1 = 1 + @sub1(a) def m2 = 1 + @sub2 def m3 = 1 + @sub2(33) def m4 = 1 + + @suk() def k1 = 1 + @suk(33) def k2 = 1 +} +class D + +scala> val ms = typeOf[D].members.toList.filter(_.name.startsWith("m")).sortBy(_.name).map(_.annotations.head) +val ms: List[$r.intp.global.AnnotationInfo] = List(sub1(3), sub1(D.this.a), sub2(3), sub2(33)) + +scala> ms.foreach(m => {println(m.args); println(m.argsForSuper(typeOf[ann].typeSymbol)) }) +List(3) +List(11, 3) +List(D.this.a) +List(11, D.this.a) +List(3) +List(1, 3) +List(33) +List(1, 33) + +scala> val ks = typeOf[D].members.toList.filter(_.name.startsWith("k")).sortBy(_.name).map(_.annotations.head) +val ks: List[$r.intp.global.AnnotationInfo] = List(suk, suk(z = 33)) + +scala> ks.foreach(k => {println(k.assocs); println(k.assocsForSuper(typeOf[kon].typeSymbol)) }) +List() +List((y,22)) +List((z,33)) +List((y,22)) + +scala> :quit diff --git a/test/files/run/sd884.scala b/test/files/run/sd884.scala new file mode 100644 index 000000000000..ec45116e7440 --- /dev/null +++ b/test/files/run/sd884.scala @@ -0,0 +1,74 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def code = + """import annotation._, scala.util.chaining._ + |class ann(x: Int = 1, y: Int = 2) extends Annotation + |class naa(x: Int = 1, y: Int = 2) extends Annotation { + | def this(s: String) = this(1, 2) + |} + |class mul(x: Int = 1, y: Int = 2)(z: Int = 3, zz: Int = 4) extends Annotation + |class kon(x: Int = 1, y: Int = 2) extends ConstantAnnotation + |class rann(x: Int = 1.tap(println), y: Int) extends Annotation + |class C { + | val a = 1 + | val b = 2 + | + | @ann(y = b, x = a) def m1 = 1 + | + | @ann(x = a) def m2 = 1 + | @ann(y = b) def m3 = 1 + | + | @naa(a, b) def m4 = 1 + | @naa(y = b, x = a) def m5 = 1 + | @naa("") def m6 = 1 + | + | // warn, only first argument list is kept + | @mul(a, b)(a, b) def m7 = 1 + | @mul(y = b)(a, b) def m8 = 1 + | @mul(y = b, x = a)(zz = b) def m9 = 1 + | @mul(y = b)(zz = b) def m10 = 1 + | + | @kon(y = 22) def m11 = 1 + | @kon(11) def m12 = 1 + |} + |:power + |println(typeOf[C].members.toList.filter(_.name.startsWith("m")).sortBy(_.name).map(_.annotations.head).mkString("\n")) + |val i6 = typeOf[C].member(TermName("m6")).annotations.head + |i6.constructorSymbol(global.typer.typed).paramss + |val i11 = typeOf[C].member(TermName("m11")).annotations.head + |i11.assocs + |i11.assocsWithDefaults + |val i3 = typeOf[C].member(TermName("m3")).annotations.head + |i3.args.map(_.tpe) + |i3.args.map(i3.argIsDefault) + |// ordinary named/default args when using annotation class in executed code + |new rann(y = 2.tap(println)); () // prints 2, then the default 1 + |@rann(y = {new rann(y = 2.tap(println)); 2}) class r1 + |println(typeOf[r1].typeSymbol.annotations.head.args) + |// subclassing + |class sub1(z: Int = 3) extends ann(11, z) + |class sub2(z: Int = 3) extends ann(y = z) + |class suk(z: Int = 3) extends kon(y = 22) + |class sum(z: Int) extends mul(11, 22)(z) + |println(typeOf[sub1].typeSymbol.annotations) + |println(typeOf[sub2].typeSymbol.annotations) + |println(typeOf[suk].typeSymbol.annotations) + |println(typeOf[sum].typeSymbol.annotations) // none + |class D { + | val a = 1 + | + | @sub1() def m1 = 1 + | @sub1(a) def m2 = 1 + | @sub2 def m3 = 1 + | @sub2(33) def m4 = 1 + | + | @suk() def k1 = 1 + | @suk(33) def k2 = 1 + |} + |val ms = typeOf[D].members.toList.filter(_.name.startsWith("m")).sortBy(_.name).map(_.annotations.head) + |ms.foreach(m => {println(m.args); println(m.argsForSuper(typeOf[ann].typeSymbol)) }) + |val ks = typeOf[D].members.toList.filter(_.name.startsWith("k")).sortBy(_.name).map(_.annotations.head) + |ks.foreach(k => {println(k.assocs); println(k.assocsForSuper(typeOf[kon].typeSymbol)) }) + |""".stripMargin +} diff --git a/test/files/run/sd884b.check b/test/files/run/sd884b.check new file mode 100644 index 000000000000..f3369404f848 --- /dev/null +++ b/test/files/run/sd884b.check @@ -0,0 +1,54 @@ + +scala> class B { + @ann(x = 11) def m1 = 1 + @ann(y = 22) def m2 = 1 + + @kon(x = 11) def k1 = 1 + @kon(y = 22) def k2 = 1 +} +class B + +scala> :power +Power mode enabled. :phase is at typer. +import scala.tools.nsc._, intp.global._, definitions._ +Try :help or completions for vals._ and power._ + +scala> def t(tp: Type) = { + val ms = tp.members.toList.filter(_.name.startsWith("m")).sortBy(_.name) + for (m <- ms) { + val i = m.annotations.head + println(i) + println(i.args.map(_.tpe)) + println(i.args.map(i.argIsDefault)) + } + val ks = tp.members.toList.filter(_.name.startsWith("k")).sortBy(_.name) + ks.foreach(k => println(k.annotations.head)) + ks.foreach(k => println(k.annotations.head.assocsWithDefaults)) +} +def t(tp: $r.intp.global.Type): Unit + +scala> t(typeOf[A]) +ann(11, T.i) +List(Int, Int @scala.annotation.meta.defaultArg) +List(false, true) +ann(1, 22) +List(Int(1) @scala.annotation.meta.defaultArg, Int) +List(true, false) +kon(x = 11) +kon(y = 22) +List((x,11), (y,2)) +List((x,1), (y,22)) + +scala> t(typeOf[B]) +ann(11, T.i) +List(Int(11), Int @scala.annotation.meta.defaultArg) +List(false, true) +ann(1, 22) +List(Int @scala.annotation.meta.defaultArg, Int(22)) +List(true, false) +kon(x = 11) +kon(y = 22) +List((x,11), (y,2)) +List((x,1), (y,22)) + +scala> :quit diff --git a/test/files/run/sd884b/A.scala b/test/files/run/sd884b/A.scala new file mode 100644 index 000000000000..77524c80ff98 --- /dev/null +++ b/test/files/run/sd884b/A.scala @@ -0,0 +1,12 @@ +class ann(x: Int = 1, y: Int = T.i) extends annotation.StaticAnnotation +class kon(x: Int = 1, y: Int = 2) extends annotation.ConstantAnnotation + +object T { def i = 0 } + +class A { + @ann(x = 11) def m1 = 1 + @ann(y = 22) def m2 = 1 + + @kon(x = 11) def k1 = 1 + @kon(y = 22) def k2 = 1 +} diff --git a/test/files/run/sd884b/Test_1.scala b/test/files/run/sd884b/Test_1.scala new file mode 100644 index 000000000000..84e619eae845 --- /dev/null +++ b/test/files/run/sd884b/Test_1.scala @@ -0,0 +1,28 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def code = + """class B { + | @ann(x = 11) def m1 = 1 + | @ann(y = 22) def m2 = 1 + | + | @kon(x = 11) def k1 = 1 + | @kon(y = 22) def k2 = 1 + |} + |:power + |def t(tp: Type) = { + | val ms = tp.members.toList.filter(_.name.startsWith("m")).sortBy(_.name) + | for (m <- ms) { + | val i = m.annotations.head + | println(i) + | println(i.args.map(_.tpe)) + | println(i.args.map(i.argIsDefault)) + | } + | val ks = tp.members.toList.filter(_.name.startsWith("k")).sortBy(_.name) + | ks.foreach(k => println(k.annotations.head)) + | ks.foreach(k => println(k.annotations.head.assocsWithDefaults)) + |} + |t(typeOf[A]) + |t(typeOf[B]) + |""".stripMargin +} diff --git a/test/files/run/search.check b/test/files/run/search.check deleted file mode 100644 index e0c55043e3ec..000000000000 --- a/test/files/run/search.check +++ /dev/null @@ -1,6 +0,0 @@ -Found(2) -Found(4) -InsertionPoint(10) -Found(2) -Found(4) -InsertionPoint(10) diff --git a/test/files/run/search.scala b/test/files/run/search.scala deleted file mode 100644 index ed7fed54a707..000000000000 --- a/test/files/run/search.scala +++ /dev/null @@ -1,14 +0,0 @@ -object Test extends App { - import scala.collection.{LinearSeq, IndexedSeq} - import scala.collection.Searching.search - - val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) - println(ls.search(3)) - println(ls.search(5, 3, 8)) - println(ls.search(12)) - - val is = IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) - println(is.search(3)) - println(is.search(5, 3, 8)) - println(is.search(12)) -} diff --git a/test/files/run/sequenceComparisons.scala b/test/files/run/sequenceComparisons.scala index 613b37f45ab6..183b7257412a 100644 --- a/test/files/run/sequenceComparisons.scala +++ b/test/files/run/sequenceComparisons.scala @@ -1,5 +1,6 @@ +//> using options -deprecation import scala.collection.{ mutable, immutable } -import collection.{ Seq, Traversable } +import collection.Seq object Test { // TODO: @@ -13,7 +14,6 @@ object Test { mutable.ArrayBuffer(_: _*), // mutable.ArrayStack(_: _*), mutable.Buffer(_: _*), - mutable.LinearSeq(_: _*), // null on Nil // mutable.LinkedList(_: _*), mutable.ListBuffer(_: _*), @@ -22,12 +22,11 @@ object Test { // mutable.Queue(_: _*), immutable.Seq(_: _*), mutable.Seq(_: _*), - immutable.Stack(_: _*), // mutable.Stack(_: _*), immutable.IndexedSeq(_: _*), // was Vector //mutable.Vector(_: _*), immutable.List(_: _*), - immutable.Stream(_: _*) + immutable.LazyList(_: _*) ) abstract class Data[T] { @@ -89,7 +88,7 @@ object Test { val failures = new mutable.ListBuffer[String] var testCount = 0 - def assertOne(op1: Any, op2: Any, res: Boolean, str: String) { + def assertOne(op1: Any, op2: Any, res: Boolean, str: String): Unit = { testCount += 1 val resStr = str.format(op1, op2) // println(resStr) @@ -113,7 +112,7 @@ object Test { } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { runSeqs() assert(failures.isEmpty, failures mkString "\n") diff --git a/test/files/run/serialize-stream.check b/test/files/run/serialize-stream.check index e2a9f57aa776..6a8cc16cc694 100644 --- a/test/files/run/serialize-stream.check +++ b/test/files/run/serialize-stream.check @@ -1,6 +1,13 @@ -Stream(1, ?) +warning: 3 deprecations (since 2.13.0); re-run with -deprecation for details +Stream(1, ) List(1, 2, 3) -Stream(1, ?) +Stream(1, ) List(1) Stream() List() +LazyList() +List(1, 2, 3) +LazyList() +List(1) +LazyList() +List() diff --git a/test/files/run/serialize-stream.scala b/test/files/run/serialize-stream.scala index 3ab9f2df3407..a8fb008f0788 100644 --- a/test/files/run/serialize-stream.scala +++ b/test/files/run/serialize-stream.scala @@ -1,7 +1,7 @@ object Test { - def ser[T](s: Stream[T]) { + def ser[T](s: Seq[T]): Unit = { val bos = new java.io.ByteArrayOutputStream() val oos = new java.io.ObjectOutputStream(bos) oos.writeObject(s) @@ -12,9 +12,13 @@ object Test { println(obj.asInstanceOf[Seq[T]].toList) } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { ser(Stream(1, 2, 3)) ser(Stream(1)) ser(Stream()) + + ser(LazyList(1, 2, 3)) + ser(LazyList(1)) + ser(LazyList()) } } diff --git a/test/files/run/settings-parse.check b/test/files/run/settings-parse.check index 03c363b55186..10401a66435b 100644 --- a/test/files/run/settings-parse.check +++ b/test/files/run/settings-parse.check @@ -12,55 +12,46 @@ 3) List(-cp, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 4) List(-cp, , , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 5) List(-cp, , -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 6) List(, -cp, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 7) List(-cp, , -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 8) List(-cp, , , -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 9) List(-cp, , -deprecation, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 10) List(-cp, , -deprecation, foo.scala, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 11) List(, -cp, , -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } @@ -82,115 +73,96 @@ 16) List(-cp, , foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 17) List(-cp, , , foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 18) List(-cp, , foo.scala, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 19) List(-cp, , foo.scala, -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 20) List(, -cp, , foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 21) List(-deprecation, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 22) List(, -deprecation, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 23) List(-deprecation, -cp, , ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 24) List(-deprecation, , -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 25) List(-deprecation, -cp, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 26) List(, -deprecation, -cp, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 27) List(-deprecation, -cp, , , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 28) List(-deprecation, -cp, , foo.scala, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 29) List(-deprecation, , -cp, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 30) List(-deprecation, foo.scala, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 31) List(, -deprecation, foo.scala, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 32) List(-deprecation, , foo.scala, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 33) List(-deprecation, foo.scala, -cp, , ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 34) List(-deprecation, foo.scala, , -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } @@ -212,61 +184,51 @@ 39) List(foo.scala, -cp, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 40) List(, foo.scala, -cp, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 41) List(foo.scala, -cp, , , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 42) List(foo.scala, -cp, , -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 43) List(foo.scala, , -cp, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 44) List(foo.scala, -deprecation, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 45) List(, foo.scala, -deprecation, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 46) List(foo.scala, , -deprecation, -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 47) List(foo.scala, -deprecation, -cp, , ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } 48) List(foo.scala, -deprecation, , -cp, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = "" } @@ -284,55 +246,46 @@ 3) List(-cp, /tmp:/bippy, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 4) List(-cp, /tmp:/bippy, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 5) List(-cp, /tmp:/bippy, -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 6) List(, -cp, /tmp:/bippy, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 7) List(-cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 8) List(-cp, /tmp:/bippy, , -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 9) List(-cp, /tmp:/bippy, -deprecation, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 10) List(-cp, /tmp:/bippy, -deprecation, foo.scala, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 11) List(, -cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } @@ -354,115 +307,96 @@ 16) List(-cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 17) List(-cp, /tmp:/bippy, , foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 18) List(-cp, /tmp:/bippy, foo.scala, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 19) List(-cp, /tmp:/bippy, foo.scala, -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 20) List(, -cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 21) List(-deprecation, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 22) List(, -deprecation, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 23) List(-deprecation, -cp, /tmp:/bippy, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 24) List(-deprecation, , -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 25) List(-deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 26) List(, -deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 27) List(-deprecation, -cp, /tmp:/bippy, , foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 28) List(-deprecation, -cp, /tmp:/bippy, foo.scala, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 29) List(-deprecation, , -cp, /tmp:/bippy, foo.scala) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 30) List(-deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 31) List(, -deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 32) List(-deprecation, , foo.scala, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 33) List(-deprecation, foo.scala, -cp, /tmp:/bippy, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 34) List(-deprecation, foo.scala, , -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } @@ -484,61 +418,51 @@ 39) List(foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 40) List(, foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 41) List(foo.scala, -cp, /tmp:/bippy, , -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 42) List(foo.scala, -cp, /tmp:/bippy, -deprecation, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 43) List(foo.scala, , -cp, /tmp:/bippy, -deprecation) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 44) List(foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 45) List(, foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 46) List(foo.scala, , -deprecation, -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 47) List(foo.scala, -deprecation, -cp, /tmp:/bippy, ) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } 48) List(foo.scala, -deprecation, , -cp, /tmp:/bippy) ==> Settings { -deprecation = true - -Wconf = List(cat=deprecation:w, cat=deprecation:ws, cat=feature:ws, cat=optimizer:ws) -classpath = /tmp:/bippy } diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala index 0697e932faa9..13c84ccc03e6 100644 --- a/test/files/run/settings-parse.scala +++ b/test/files/run/settings-parse.scala @@ -3,7 +3,7 @@ import scala.tools.nsc.settings.MutableSettings object Test { val tokens = "" :: "-deprecation" :: "foo.scala" :: Nil - val permutations0 = tokens.toSet.subsets.flatMap(_.toList.permutations).toList.distinct + val permutations0 = tokens.toSet.subsets().flatMap(_.toList.permutations).toList.distinct def runWithCp(cp: String) = { val permutations = permutations0.flatMap(s => ("-cp CPTOKEN" :: s).permutations) diff --git a/test/files/run/shortClass.scala b/test/files/run/shortClass.scala index b7bb0168963a..c1c041308a45 100644 --- a/test/files/run/shortClass.scala +++ b/test/files/run/shortClass.scala @@ -17,8 +17,8 @@ object Test { def main(args: Array[String]): Unit = { val f = new Foo - val instances = List(f, new f.Bar, f.Bar, new Foo with DingDongBippy, new f.Bar with DingDongBippy) - instances map (_.getClass.getName) foreach println - instances map shortClassOfInstance foreach println + val instances = List[AnyRef](f, new f.Bar, f.Bar, new Foo with DingDongBippy, new f.Bar with DingDongBippy) + instances.map(_.getClass.getName).foreach(println) + instances.map(shortClassOfInstance).foreach(println) } } diff --git a/test/files/run/showdecl/Macros_1.scala b/test/files/run/showdecl/Macros_1.scala index 89b8e8d3c20d..235b6caa5253 100644 --- a/test/files/run/showdecl/Macros_1.scala +++ b/test/files/run/showdecl/Macros_1.scala @@ -27,4 +27,4 @@ object Macros { } def foo: Any = macro impl -} \ No newline at end of file +} diff --git a/test/files/run/showraw_aliases.scala b/test/files/run/showraw_aliases.scala index 56bd13707d66..b78a72b2e587 100644 --- a/test/files/run/showraw_aliases.scala +++ b/test/files/run/showraw_aliases.scala @@ -14,4 +14,4 @@ object Test extends App { def stabilize(s: String) = stabilizePositions(stabilizeIds(s)) println(stabilize(showRaw(ttree))) println(stabilize(showRaw(ttree, printIds = true))) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_mods.scala b/test/files/run/showraw_mods.scala index a10e4821dc9a..77d80f57f6eb 100644 --- a/test/files/run/showraw_mods.scala +++ b/test/files/run/showraw_mods.scala @@ -3,4 +3,4 @@ import scala.reflect.runtime.universe._ object Test extends App { val tree = reify{trait C { private[this] val x = 2; var y = x; lazy val z = y }} println(showRaw(tree.tree)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_nosymbol.scala b/test/files/run/showraw_nosymbol.scala index fbdc1591c9eb..d7f3f7c250c7 100644 --- a/test/files/run/showraw_nosymbol.scala +++ b/test/files/run/showraw_nosymbol.scala @@ -2,4 +2,4 @@ import scala.reflect.runtime.universe._ object Test extends App { println(showRaw(NoSymbol)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check index d8cb1fde026f..d2cea249acc9 100644 --- a/test/files/run/showraw_tree.check +++ b/test/files/run/showraw_tree.check @@ -1,2 +1,2 @@ -Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List()) +Apply(Select(New(AppliedTypeTree(Select(Ident(scala.collection.immutable.ArraySeq), TypeName("ofRef")), List(Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List(Apply(Apply(Select(Ident(scala.Array), TermName("apply")), List()), List(Select(Ident(scala.Predef), TermName("implicitly")))))) Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List()) diff --git a/test/files/run/showraw_tree.scala b/test/files/run/showraw_tree.scala index 3624a24d6a03..c508f466df25 100644 --- a/test/files/run/showraw_tree.scala +++ b/test/files/run/showraw_tree.scala @@ -1,8 +1,8 @@ import scala.reflect.runtime.universe._ object Test extends App { - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) println(showRaw(tree1.tree)) println(showRaw(tree2.tree)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check index d7a7aa5959ad..572c86d0a22c 100644 --- a/test/files/run/showraw_tree_ids.check +++ b/test/files/run/showraw_tree_ids.check @@ -1,2 +1,2 @@ -Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#), List(Select(Ident(scala.Predef#), TypeName("String")), Select(Ident(scala.Predef#), TypeName("String"))))), termNames.CONSTRUCTOR), List()) +Apply(Select(New(AppliedTypeTree(Select(Ident(scala.collection.immutable.ArraySeq#), TypeName("ofRef")), List(Select(Ident(scala.Predef#), TypeName("String"))))), termNames.CONSTRUCTOR), List(Apply(Apply(Select(Ident(scala.Array#), TermName("apply")), List()), List(Select(Ident(scala.Predef#), TermName("implicitly")))))) Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#), List(Select(Ident(scala.Predef#), TypeName("String")), Select(Ident(scala.Predef#), TypeName("String"))))), termNames.CONSTRUCTOR), List()) diff --git a/test/files/run/showraw_tree_ids.scala b/test/files/run/showraw_tree_ids.scala index ea9a3cd4c22d..c829f7f5d8c2 100644 --- a/test/files/run/showraw_tree_ids.scala +++ b/test/files/run/showraw_tree_ids.scala @@ -1,9 +1,9 @@ import scala.reflect.runtime.universe._ object Test extends App { - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#") println(stabilize(showRaw(tree1.tree, printIds = true))) println(stabilize(showRaw(tree2.tree, printIds = true))) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check index 85939b02f03e..218743591c82 100644 --- a/test/files/run/showraw_tree_kinds.check +++ b/test/files/run/showraw_tree_kinds.check @@ -1,2 +1,2 @@ -Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), termNames.CONSTRUCTOR), List()) +Apply(Select(New(AppliedTypeTree(Select(Ident(scala.collection.immutable.ArraySeq#MOD), TypeName("ofRef")), List(Select(Ident(scala.Predef#MOD), TypeName("String"))))), termNames.CONSTRUCTOR), List(Apply(Apply(Select(Ident(scala.Array#MOD), TermName("apply")), List()), List(Select(Ident(scala.Predef#MOD), TermName("implicitly")))))) Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), termNames.CONSTRUCTOR), List()) diff --git a/test/files/run/showraw_tree_kinds.scala b/test/files/run/showraw_tree_kinds.scala index 0ca5a387da6b..506028c0d026 100644 --- a/test/files/run/showraw_tree_kinds.scala +++ b/test/files/run/showraw_tree_kinds.scala @@ -1,8 +1,8 @@ import scala.reflect.runtime.universe._ object Test extends App { - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) println(showRaw(tree1.tree, printKinds = true)) println(showRaw(tree2.tree, printKinds = true)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check index e122148040bf..7f8d6539ff38 100644 --- a/test/files/run/showraw_tree_types_ids.check +++ b/test/files/run/showraw_tree_types_ids.check @@ -1,9 +1,22 @@ -Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#), TypeName("String")#)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#), TypeName("String")#)))))), termNames.CONSTRUCTOR#), List()) -[1] TypeRef(ThisType(scala.collection.immutable#), scala.collection.immutable.HashMap#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()), TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))) -[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#), scala.collection.immutable.HashMap#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()), TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) -[3] TypeRef(ThisType(scala.collection.immutable#), scala.collection.immutable.HashMap#, List()) -[4] TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()) -[5] SingleType(ThisType(scala#), scala.Predef#) +Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Select[3](Ident[4](scala.collection.immutable.ArraySeq#), scala.collection.immutable.ArraySeq.ofRef#), List(TypeTree[5]().setOriginal(Select[5](Ident[6](scala.Predef#), TypeName("String")#)))))), termNames.CONSTRUCTOR#), List(Apply[7](Apply[8](TypeApply[9](Select[10](Ident[11](scala.Array#), TermName("apply")#), List(TypeTree[5]())), List()), List(Apply[12](TypeApply[13](Select[14](Ident[6](scala.Predef#), TermName("implicitly")#), List(TypeTree[12]())), List(Typed[12](Apply[12](TypeApply[15](Select[16](Ident[17](scala.reflect.ClassTag#), TermName("apply")#), List(TypeTree[5]())), List(Literal[18](Constant(String)))), TypeTree[12]()))))))) +[1] TypeRef(SingleType(ThisType(scala.collection.immutable#), scala.collection.immutable.ArraySeq#), scala.collection.immutable.ArraySeq.ofRef#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))) +[2] MethodType(List(TermName("unsafeArray")#), TypeRef(SingleType(ThisType(scala.collection.immutable#), scala.collection.immutable.ArraySeq#), scala.collection.immutable.ArraySeq.ofRef#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) +[3] TypeRef(SingleType(ThisType(scala.collection.immutable#), scala.collection.immutable.ArraySeq#), scala.collection.immutable.ArraySeq.ofRef#, List()) +[4] SingleType(ThisType(scala.collection.immutable#), scala.collection.immutable.ArraySeq#) +[5] TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()) +[6] SingleType(ThisType(scala#), scala.Predef#) +[7] TypeRef(ThisType(scala#), scala.Array#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))) +[8] MethodType(List(TermName("evidence$5")#), TypeRef(ThisType(scala#), scala.Array#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) +[9] MethodType(List(TermName("xs")#), MethodType(List(TermName("evidence$5")#), TypeRef(ThisType(scala#), scala.Array#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))))) +[10] PolyType(List(TypeName("T")#), MethodType(List(TermName("xs")#), MethodType(List(TermName("evidence$5")#), TypeRef(ThisType(scala#), scala.Array#, List(TypeRef(NoPrefix, TypeName("T")#, List())))))) +[11] SingleType(ThisType(scala#), scala.Array#) +[12] TypeRef(ThisType(scala.reflect#), scala.reflect.ClassTag#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))) +[13] MethodType(List(TermName("e")#), TypeRef(ThisType(scala.reflect#), scala.reflect.ClassTag#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) +[14] PolyType(List(TypeName("T")#), MethodType(List(TermName("e")#), TypeRef(NoPrefix, TypeName("T")#, List()))) +[15] MethodType(List(TermName("runtimeClass1")#), TypeRef(ThisType(scala.reflect#), scala.reflect.ClassTag#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) +[16] PolyType(List(TypeName("T")#), MethodType(List(TermName("runtimeClass1")#), TypeRef(ThisType(scala.reflect#), scala.reflect.ClassTag#, List(TypeRef(NoPrefix, TypeName("T")#, List()))))) +[17] SingleType(ThisType(scala.reflect#), scala.reflect.ClassTag#) +[18] FoldableConstantType(Constant(TypeRef(ThisType(java.lang#), java.lang.String#, List()))) Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap#), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#), TypeName("String")#)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#), TypeName("String")#)))))), termNames.CONSTRUCTOR#), List()) [1] TypeRef(ThisType(scala.collection.mutable#), scala.collection.mutable.HashMap#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()), TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()))) [2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#), scala.collection.mutable.HashMap#, List(TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List()), TypeRef(SingleType(ThisType(scala#), scala.Predef#), TypeName("String")#, List())))) diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala index 883af0110ad7..e314cad948c5 100644 --- a/test/files/run/showraw_tree_types_ids.scala +++ b/test/files/run/showraw_tree_types_ids.scala @@ -3,9 +3,9 @@ import scala.tools.reflect.ToolBox object Test extends App { val tb = runtimeMirror(getClass.getClassLoader).mkToolBox() - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#") println(stabilize(showRaw(tb.typecheck(tree1.tree), printIds = true, printTypes = true))) println(stabilize(showRaw(tb.typecheck(tree2.tree), printIds = true, printTypes = true))) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check index 4934ed41dcb6..3c348052b157 100644 --- a/test/files/run/showraw_tree_types_typed.check +++ b/test/files/run/showraw_tree_types_typed.check @@ -1,9 +1,22 @@ -Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List()) -[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))) -[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) -[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List()) -[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()) -[5] SingleType(ThisType(scala), scala.Predef) +Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Select[3](Ident[4](scala.collection.immutable.ArraySeq), scala.collection.immutable.ArraySeq.ofRef), List(TypeTree[5]().setOriginal(Select[5](Ident[6](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List(Apply[7](Apply[8](TypeApply[9](Select[10](Ident[11](scala.Array), TermName("apply")), List(TypeTree[5]())), List()), List(Apply[12](TypeApply[13](Select[14](Ident[6](scala.Predef), TermName("implicitly")), List(TypeTree[12]())), List(Typed[12](Apply[12](TypeApply[15](Select[16](Ident[17](scala.reflect.ClassTag), TermName("apply")), List(TypeTree[5]())), List(Literal[18](Constant(String)))), TypeTree[12]()))))))) +[1] TypeRef(SingleType(ThisType(scala.collection.immutable), scala.collection.immutable.ArraySeq), scala.collection.immutable.ArraySeq.ofRef, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))) +[2] MethodType(List(TermName("unsafeArray")), TypeRef(SingleType(ThisType(scala.collection.immutable), scala.collection.immutable.ArraySeq), scala.collection.immutable.ArraySeq.ofRef, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) +[3] TypeRef(SingleType(ThisType(scala.collection.immutable), scala.collection.immutable.ArraySeq), scala.collection.immutable.ArraySeq.ofRef, List()) +[4] SingleType(ThisType(scala.collection.immutable), scala.collection.immutable.ArraySeq) +[5] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()) +[6] SingleType(ThisType(scala), scala.Predef) +[7] TypeRef(ThisType(scala), scala.Array, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))) +[8] MethodType(List(TermName("evidence$5")), TypeRef(ThisType(scala), scala.Array, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) +[9] MethodType(List(TermName("xs")), MethodType(List(TermName("evidence$5")), TypeRef(ThisType(scala), scala.Array, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))) +[10] PolyType(List(TypeName("T")), MethodType(List(TermName("xs")), MethodType(List(TermName("evidence$5")), TypeRef(ThisType(scala), scala.Array, List(TypeRef(NoPrefix, TypeName("T"), List())))))) +[11] SingleType(ThisType(scala), scala.Array) +[12] TypeRef(ThisType(scala.reflect), scala.reflect.ClassTag, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))) +[13] MethodType(List(TermName("e")), TypeRef(ThisType(scala.reflect), scala.reflect.ClassTag, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) +[14] PolyType(List(TypeName("T")), MethodType(List(TermName("e")), TypeRef(NoPrefix, TypeName("T"), List()))) +[15] MethodType(List(TermName("runtimeClass1")), TypeRef(ThisType(scala.reflect), scala.reflect.ClassTag, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) +[16] PolyType(List(TypeName("T")), MethodType(List(TermName("runtimeClass1")), TypeRef(ThisType(scala.reflect), scala.reflect.ClassTag, List(TypeRef(NoPrefix, TypeName("T"), List()))))) +[17] SingleType(ThisType(scala.reflect), scala.reflect.ClassTag) +[18] FoldableConstantType(Constant(TypeRef(ThisType(java.lang), java.lang.String, List()))) Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List()) [1] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))) [2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))) diff --git a/test/files/run/showraw_tree_types_typed.scala b/test/files/run/showraw_tree_types_typed.scala index 3dd696c77e7c..91b24930e837 100644 --- a/test/files/run/showraw_tree_types_typed.scala +++ b/test/files/run/showraw_tree_types_typed.scala @@ -3,8 +3,8 @@ import scala.tools.reflect.ToolBox object Test extends App { val tb = runtimeMirror(getClass.getClassLoader).mkToolBox() - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) println(showRaw(tb.typecheck(tree1.tree), printTypes = true)) println(showRaw(tb.typecheck(tree2.tree), printTypes = true)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check index d8cb1fde026f..d2cea249acc9 100644 --- a/test/files/run/showraw_tree_types_untyped.check +++ b/test/files/run/showraw_tree_types_untyped.check @@ -1,2 +1,2 @@ -Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List()) +Apply(Select(New(AppliedTypeTree(Select(Ident(scala.collection.immutable.ArraySeq), TypeName("ofRef")), List(Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List(Apply(Apply(Select(Ident(scala.Array), TermName("apply")), List()), List(Select(Ident(scala.Predef), TermName("implicitly")))))) Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), termNames.CONSTRUCTOR), List()) diff --git a/test/files/run/showraw_tree_types_untyped.scala b/test/files/run/showraw_tree_types_untyped.scala index 4df2eb66b22c..0066fce34258 100644 --- a/test/files/run/showraw_tree_types_untyped.scala +++ b/test/files/run/showraw_tree_types_untyped.scala @@ -1,8 +1,8 @@ import scala.reflect.runtime.universe._ object Test extends App { - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) println(showRaw(tree1.tree, printTypes = true)) println(showRaw(tree2.tree, printTypes = true)) -} \ No newline at end of file +} diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check index b94d568a7579..7ec845950b39 100644 --- a/test/files/run/showraw_tree_ultimate.check +++ b/test/files/run/showraw_tree_ultimate.check @@ -1,9 +1,22 @@ -Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap##CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef##MOD), TypeName("String")##TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef##MOD), TypeName("String")##TPE)))))), termNames.CONSTRUCTOR##PCTOR), List()) -[1] TypeRef(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.HashMap##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()), TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))) -[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.HashMap##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()), TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) -[3] TypeRef(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.HashMap##CLS, List()) -[4] TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()) -[5] SingleType(ThisType(scala##PKC), scala.Predef##MOD) +Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Select[3](Ident[4](scala.collection.immutable.ArraySeq##MOD), scala.collection.immutable.ArraySeq.ofRef##CLS), List(TypeTree[5]().setOriginal(Select[5](Ident[6](scala.Predef##MOD), TypeName("String")##TPE)))))), termNames.CONSTRUCTOR##PCTOR), List(Apply[7](Apply[8](TypeApply[9](Select[10](Ident[11](scala.Array##MOD), TermName("apply")##METH), List(TypeTree[5]())), List()), List(Apply[12](TypeApply[13](Select[14](Ident[6](scala.Predef##MOD), TermName("implicitly")##METH), List(TypeTree[12]())), List(Typed[12](Apply[12](TypeApply[15](Select[16](Ident[17](scala.reflect.ClassTag##MOD), TermName("apply")##METH), List(TypeTree[5]())), List(Literal[18](Constant(String)))), TypeTree[12]()))))))) +[1] TypeRef(SingleType(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.ArraySeq##MOD), scala.collection.immutable.ArraySeq.ofRef##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))) +[2] MethodType(List(TermName("unsafeArray")##VAL), TypeRef(SingleType(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.ArraySeq##MOD), scala.collection.immutable.ArraySeq.ofRef##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) +[3] TypeRef(SingleType(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.ArraySeq##MOD), scala.collection.immutable.ArraySeq.ofRef##CLS, List()) +[4] SingleType(ThisType(scala.collection.immutable##PKC), scala.collection.immutable.ArraySeq##MOD) +[5] TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()) +[6] SingleType(ThisType(scala##PKC), scala.Predef##MOD) +[7] TypeRef(ThisType(scala##PKC), scala.Array##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))) +[8] MethodType(List(TermName("evidence$5")##VAL), TypeRef(ThisType(scala##PKC), scala.Array##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) +[9] MethodType(List(TermName("xs")##VAL), MethodType(List(TermName("evidence$5")##VAL), TypeRef(ThisType(scala##PKC), scala.Array##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))))) +[10] PolyType(List(TypeName("T")##TPE), MethodType(List(TermName("xs")##VAL), MethodType(List(TermName("evidence$5")##VAL), TypeRef(ThisType(scala##PKC), scala.Array##CLS, List(TypeRef(NoPrefix, TypeName("T")##TPE, List())))))) +[11] SingleType(ThisType(scala##PKC), scala.Array##MOD) +[12] TypeRef(ThisType(scala.reflect##PKC), scala.reflect.ClassTag##TRT, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))) +[13] MethodType(List(TermName("e")##VAL), TypeRef(ThisType(scala.reflect##PKC), scala.reflect.ClassTag##TRT, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) +[14] PolyType(List(TypeName("T")##TPE), MethodType(List(TermName("e")##VAL), TypeRef(NoPrefix, TypeName("T")##TPE, List()))) +[15] MethodType(List(TermName("runtimeClass1")##VAL), TypeRef(ThisType(scala.reflect##PKC), scala.reflect.ClassTag##TRT, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) +[16] PolyType(List(TypeName("T")##TPE), MethodType(List(TermName("runtimeClass1")##VAL), TypeRef(ThisType(scala.reflect##PKC), scala.reflect.ClassTag##TRT, List(TypeRef(NoPrefix, TypeName("T")##TPE, List()))))) +[17] SingleType(ThisType(scala.reflect##PKC), scala.reflect.ClassTag##MOD) +[18] FoldableConstantType(Constant(TypeRef(ThisType(java.lang##PKC), java.lang.String##CLS, List()))) Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap##CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef##MOD), TypeName("String")##TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef##MOD), TypeName("String")##TPE)))))), termNames.CONSTRUCTOR##CTOR), List()) [1] TypeRef(ThisType(scala.collection.mutable##PKC), scala.collection.mutable.HashMap##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()), TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()))) [2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable##PKC), scala.collection.mutable.HashMap##CLS, List(TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List()), TypeRef(SingleType(ThisType(scala##PKC), scala.Predef##MOD), TypeName("String")##TPE, List())))) diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala index e0d36e6bb75e..091fb4a0f52d 100644 --- a/test/files/run/showraw_tree_ultimate.scala +++ b/test/files/run/showraw_tree_ultimate.scala @@ -3,9 +3,9 @@ import scala.tools.reflect.ToolBox object Test extends App { val tb = runtimeMirror(getClass.getClassLoader).mkToolBox() - val tree1 = reify(new collection.immutable.HashMap[String, String]) + val tree1 = reify(new collection.immutable.ArraySeq.ofRef[String](Array())) val tree2 = reify(new collection.mutable.HashMap[String, String]) def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#") println(stabilize(showRaw(tb.typecheck(tree1.tree), printIds = true, printKinds = true, printTypes = true))) println(stabilize(showRaw(tb.typecheck(tree2.tree), printIds = true, printKinds = true, printTypes = true))) -} \ No newline at end of file +} diff --git a/test/files/run/shutdownhooks.javaopts b/test/files/run/shutdownhooks.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/shutdownhooks.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/shutdownhooks.scala b/test/files/run/shutdownhooks.scala index 7fe5d129d6d3..df076fd0a118 100644 --- a/test/files/run/shutdownhooks.scala +++ b/test/files/run/shutdownhooks.scala @@ -1,12 +1,16 @@ +//> using javaOpt -Dneeds.forked.jvm + object Test { scala.sys.addShutdownHook { - Thread.sleep(1000) + // sleep is added here so main#shutdown happens before this hook. + // Thread.sleep(1000) was not enough according to https://github.com/scala/bug/issues/11536 + Thread.sleep(3000) println("Test#shutdown.") } def daemon() = { val t = new Thread { - override def run() { + override def run(): Unit = { Thread.sleep(10000) println("Hallelujah!") // should not see this } @@ -18,7 +22,7 @@ object Test { def nonDaemon() = { val t = new Thread { - override def run() { + override def run(): Unit = { Thread.sleep(100) println("Fooblitzky!") } diff --git a/test/files/run/sip23-cast-1.check b/test/files/run/sip23-cast-1.check new file mode 100644 index 000000000000..e79ed5fa8d21 --- /dev/null +++ b/test/files/run/sip23-cast-1.check @@ -0,0 +1,36 @@ +sip23-cast-1.scala:14: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 0.asInstanceOf[0] + ^ +sip23-cast-1.scala:15: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 1.asInstanceOf[0] + ^ +sip23-cast-1.scala:19: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 0L.asInstanceOf[0L] + ^ +sip23-cast-1.scala:20: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 1L.asInstanceOf[0L] + ^ +sip23-cast-1.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 0.0.asInstanceOf[0.0] + ^ +sip23-cast-1.scala:25: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 1.0.asInstanceOf[0.0] + ^ +sip23-cast-1.scala:29: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 0.0F.asInstanceOf[0.0F] + ^ +sip23-cast-1.scala:30: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 1.0F.asInstanceOf[0.0F] + ^ +sip23-cast-1.scala:34: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + true.asInstanceOf[true] + ^ +sip23-cast-1.scala:35: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + true.asInstanceOf[false] + ^ +sip23-cast-1.scala:39: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 'f'.asInstanceOf['f'] + ^ +sip23-cast-1.scala:40: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 'f'.asInstanceOf['b'] + ^ diff --git a/test/files/run/sip23-cast-1.scala b/test/files/run/sip23-cast-1.scala new file mode 100644 index 000000000000..d7da02cae6b3 --- /dev/null +++ b/test/files/run/sip23-cast-1.scala @@ -0,0 +1,90 @@ +object Test extends App { + trait Global + final val global0 = new Global {} + final val global1 = new Global {} + + // asInstanceOf should do the minimum required to satisfy the verifier, in particular + // it should not assert equalities. + // isInstanceOf should check equalities. + + // asInstanceOf + global0.asInstanceOf[global0.type] + global0.asInstanceOf[global1.type] + + 0.asInstanceOf[0] + 1.asInstanceOf[0] + null.asInstanceOf[0] + null.asInstanceOf[1] + + 0L.asInstanceOf[0L] + 1L.asInstanceOf[0L] + null.asInstanceOf[0L] + null.asInstanceOf[1L] + + 0.0.asInstanceOf[0.0] + 1.0.asInstanceOf[0.0] + null.asInstanceOf[0.0] + null.asInstanceOf[1.0] + + 0.0F.asInstanceOf[0.0F] + 1.0F.asInstanceOf[0.0F] + null.asInstanceOf[0.0F] + null.asInstanceOf[1.0F] + + true.asInstanceOf[true] + true.asInstanceOf[false] + null.asInstanceOf[true] + null.asInstanceOf[false] + + 'f'.asInstanceOf['f'] + 'f'.asInstanceOf['b'] + null.asInstanceOf['f'] + null.asInstanceOf['\u0000'] + + // Spec requires null to be an inhabitant of all subtypes of AnyRef, + // non-null singleton types included. + "foo".asInstanceOf["foo"] + "foo".asInstanceOf["bar"] + null.asInstanceOf["foo"] + + // isInstanceOf + assert(global0.isInstanceOf[global0.type]) + assert(!global0.isInstanceOf[global1.type]) + + assert(0.isInstanceOf[0]) + assert(!1.isInstanceOf[0]) + assert(!null.isInstanceOf[0]) + assert(!null.isInstanceOf[1]) + + assert(0L.isInstanceOf[0L]) + assert(!1L.isInstanceOf[0L]) + assert(!null.isInstanceOf[0L]) + assert(!null.isInstanceOf[1L]) + + assert(0.0.isInstanceOf[0.0]) + assert(!1.0.isInstanceOf[0.0]) + assert(!null.isInstanceOf[0.0]) + assert(!null.isInstanceOf[1.0]) + + assert(0.0F.isInstanceOf[0.0F]) + assert(!1.0F.isInstanceOf[0.0F]) + assert(!null.isInstanceOf[0.0F]) + assert(!null.isInstanceOf[1.0F]) + + assert(true.isInstanceOf[true]) + assert(!true.isInstanceOf[false]) + assert(!null.isInstanceOf[true]) + assert(!null.isInstanceOf[false]) + + assert('f'.isInstanceOf['f']) + assert(!'f'.isInstanceOf['b']) + assert(!null.isInstanceOf['f']) + assert(!null.isInstanceOf['\u0000']) + + // Despite the spec the implementation doesn't treat null as an + // inhabitant of subtypes of AnyRef when doing an isInstanceOf + // test. + assert("foo".isInstanceOf["foo"]) + assert(!"foo".isInstanceOf["bar"]) + assert(!null.isInstanceOf["foo"]) +} diff --git a/test/files/run/sip23-implicit-resolution.scala b/test/files/run/sip23-implicit-resolution.scala new file mode 100644 index 000000000000..beef7ae73e9f --- /dev/null +++ b/test/files/run/sip23-implicit-resolution.scala @@ -0,0 +1,17 @@ +object Test extends App { + trait Assoc[K] { type V ; val v: V } + + def mkAssoc[K, V0](k: K, v0: V0): Assoc[k.type] { type V = V0 } = new Assoc[k.type] {type V = V0 ; val v = v0} + def lookup[K](k: K)(implicit a: Assoc[k.type]): a.V = a.v + + implicit def firstAssoc: Assoc[1] { type V = String } = mkAssoc(1, "Panda!") + implicit def secondAssoc: Assoc[2] { type V = String } = mkAssoc(2, "Kitty!") + + implicit def ageAssoc: Assoc["Age"] { type V = Int } = mkAssoc("Age", 3) + implicit def nmAssoc: Assoc["Name"] { type V = String } = mkAssoc("Name", "Jane") + + assert(lookup(1) == "Panda!") + assert(lookup(2) == "Kitty!") + assert(lookup("Age") == 3) + assert(lookup("Name") == "Jane") +} diff --git a/test/files/run/sip23-initialization0.scala b/test/files/run/sip23-initialization0.scala new file mode 100644 index 000000000000..02a2064c2234 --- /dev/null +++ b/test/files/run/sip23-initialization0.scala @@ -0,0 +1,13 @@ +class TestClazz { + def bar(x: "Hi"): x.type = x + + final val y = "Hi" + + val z0 = bar(y) + assert(z0 == y) + + final val z1 = bar(y) + assert(z1 == y) +} + +object Test extends TestClazz with App diff --git a/test/files/run/sip23-initialization1.scala b/test/files/run/sip23-initialization1.scala new file mode 100644 index 000000000000..a330d5181183 --- /dev/null +++ b/test/files/run/sip23-initialization1.scala @@ -0,0 +1,11 @@ +object Test extends App { + def bar(x: "Hi"): x.type = x + + final val y = "Hi" + + val z0 = bar(y) + assert(z0 == y) + + final val z1 = bar(y) + assert(z1 == y) +} diff --git a/test/files/run/sip23-macro-eval/Macros_1.scala b/test/files/run/sip23-macro-eval/Macros_1.scala new file mode 100644 index 000000000000..933cd5dcfd5f --- /dev/null +++ b/test/files/run/sip23-macro-eval/Macros_1.scala @@ -0,0 +1,15 @@ +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object Macros { + def impl(c: Context): c.Expr[Any] = { + import c.universe._; + + val res0 = c.eval(c.Expr[Any](q"implicitly[ValueOf[1]]")) + val res = res0.asInstanceOf[ValueOf[1]].value + + c.Expr[Any](q"$res") + } + + def eval: Any = macro impl +} diff --git a/test/files/run/sip23-macro-eval/Test_2.scala b/test/files/run/sip23-macro-eval/Test_2.scala new file mode 100644 index 000000000000..2b4c354d05fb --- /dev/null +++ b/test/files/run/sip23-macro-eval/Test_2.scala @@ -0,0 +1,4 @@ +object Test extends App { + val res = Macros.eval + assert(res == 1) +} diff --git a/test/files/run/sip23-no-constant-folding.check b/test/files/run/sip23-no-constant-folding.check new file mode 100644 index 000000000000..c6a6bd717378 --- /dev/null +++ b/test/files/run/sip23-no-constant-folding.check @@ -0,0 +1,2 @@ +got 42 +panda diff --git a/test/files/run/sip23-no-constant-folding.scala b/test/files/run/sip23-no-constant-folding.scala new file mode 100644 index 000000000000..5b83fb888004 --- /dev/null +++ b/test/files/run/sip23-no-constant-folding.scala @@ -0,0 +1,15 @@ +object Test extends App { + + def noisyId[A](x: A): x.type = { + println(s"got $x") + x + } + + def testNoConstantFolding: 23 = { + println("panda") + 23 + } + + assert(noisyId(42) == 42) + assert(testNoConstantFolding == 23) +} diff --git a/test/files/run/sip23-patterns.scala b/test/files/run/sip23-patterns.scala new file mode 100644 index 000000000000..517fc079b390 --- /dev/null +++ b/test/files/run/sip23-patterns.scala @@ -0,0 +1,16 @@ +object Test extends App { + assert((0: Any) match { case _: 0 => true; case _ => false }) + assert((0: Any) match { case _: 1 => false; case _ => true }) + assert((null: Any) match { case _: 0 => false; case _ => true }) + assert((null: Any) match { case _: 1 => false; case _ => true }) + + assert(("foo": Any) match { case _: "foo" => true; case _ => false }) + assert(("foo": Any) match { case _: "bar" => false; case _ => true }) + assert((null: Any) match { case _: "foo" => false; case _ => true }) + + object Foo + object Bar + assert((Foo: Any) match { case _: Foo.type => true; case _ => false }) + assert((Foo: Any) match { case _: Bar.type => false; case _ => true }) + assert((null: Any) match { case _: Foo.type => false; case _ => true }) +} diff --git a/test/files/run/sip23-rangepos.scala b/test/files/run/sip23-rangepos.scala new file mode 100644 index 000000000000..a3dad706310c --- /dev/null +++ b/test/files/run/sip23-rangepos.scala @@ -0,0 +1,6 @@ +// +// +object Test extends App { + val foo: "foo" = "foo" + val baz: -23 = -23 +} diff --git a/test/files/run/sip23-rec-constant.check b/test/files/run/sip23-rec-constant.check new file mode 100644 index 000000000000..7ed6ff82de6b --- /dev/null +++ b/test/files/run/sip23-rec-constant.check @@ -0,0 +1 @@ +5 diff --git a/test/files/run/sip23-rec-constant.scala b/test/files/run/sip23-rec-constant.scala new file mode 100644 index 000000000000..2f10725cb8c1 --- /dev/null +++ b/test/files/run/sip23-rec-constant.scala @@ -0,0 +1,5 @@ +object Test extends App { + final val recFive : 5 = recFive + 0 // vertigo inducing but apparently works in dotty + println(recFive) +} + diff --git a/test/files/run/sip23-singleton-isas.check b/test/files/run/sip23-singleton-isas.check new file mode 100644 index 000000000000..474194c6806a --- /dev/null +++ b/test/files/run/sip23-singleton-isas.check @@ -0,0 +1,36 @@ +sip23-singleton-isas.scala:8: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert(foo.isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:9: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((foo: String).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:14: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert(bar.isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:15: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((bar: String).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:20: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert(baz.isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:21: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((baz: String).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:25: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert("foo".isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:26: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert(("foo": String).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:32: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((y: (x.type with y.type)).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:33: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((y: (x.type with Int)).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:35: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert((y: A).isInstanceOf[Singleton]) + ^ +sip23-singleton-isas.scala:36: warning: fruitless type test: every non-null value will be a Singleton dynamically + assert(!(null: String).isInstanceOf[Singleton]) + ^ diff --git a/test/files/run/sip23-singleton-isas.scala b/test/files/run/sip23-singleton-isas.scala new file mode 100644 index 000000000000..dc99241b4df9 --- /dev/null +++ b/test/files/run/sip23-singleton-isas.scala @@ -0,0 +1,37 @@ +object Test extends App { + + // expr.isInstanceOf[Singleton] is true iff the expression has a singleton type + // However, expr.asInstanceOf[Singleton] is erased to expr.asInstanceOf[Any] so it never throws + // as discussed in https://docs.scala-lang.org/sips/minutes/2017-12-06-sip-minutes.html + + val foo: String = "foo" + assert(foo.isInstanceOf[Singleton]) + assert((foo: String).isInstanceOf[Singleton]) + foo.asInstanceOf[Singleton] + (foo: String).asInstanceOf[Singleton] + + val bar: "foo" = "foo" + assert(bar.isInstanceOf[Singleton]) + assert((bar: String).isInstanceOf[Singleton]) + bar.asInstanceOf[Singleton] + (bar: String).asInstanceOf[Singleton] + + final val baz = "foo" + assert(baz.isInstanceOf[Singleton]) + assert((baz: String).isInstanceOf[Singleton]) + baz.asInstanceOf[Singleton] + (baz: String).asInstanceOf[Singleton] + + assert("foo".isInstanceOf[Singleton]) + assert(("foo": String).isInstanceOf[Singleton]) + "foo".asInstanceOf[Singleton] + ("foo": String).asInstanceOf[Singleton] + + val x = 1 + val y: x.type = x + assert((y: (x.type with y.type)).isInstanceOf[Singleton]) + assert((y: (x.type with Int)).isInstanceOf[Singleton]) + type A = x.type + assert((y: A).isInstanceOf[Singleton]) + assert(!(null: String).isInstanceOf[Singleton]) +} diff --git a/test/files/run/sip23-toolbox-eval.scala b/test/files/run/sip23-toolbox-eval.scala new file mode 100644 index 000000000000..f331da66792d --- /dev/null +++ b/test/files/run/sip23-toolbox-eval.scala @@ -0,0 +1,7 @@ +import scala.tools.reflect.ToolBox +import scala.reflect.runtime.{ universe => ru } + +object Test extends App { + val tb = scala.reflect.runtime.currentMirror.mkToolBox() + tb.eval(ru.reify(implicitly[ValueOf[1]]).tree) +} diff --git a/test/files/run/sip23-type-equality.scala b/test/files/run/sip23-type-equality.scala new file mode 100644 index 000000000000..765ff6c2c67f --- /dev/null +++ b/test/files/run/sip23-type-equality.scala @@ -0,0 +1,8 @@ +object Test extends App { + final val x = 1 + val y: x.type = 1 + + implicitly[x.type =:= y.type] + implicitly[1 =:= y.type] + implicitly[1 =:= x.type] +} diff --git a/test/files/run/sip23-valueof.scala b/test/files/run/sip23-valueof.scala new file mode 100644 index 000000000000..684257c3b001 --- /dev/null +++ b/test/files/run/sip23-valueof.scala @@ -0,0 +1,26 @@ +object Test extends App { + object Foo + val foo = "foo" + + implicitly[ValueOf[1]] + implicitly[ValueOf[1L]] + implicitly[ValueOf[1.0]] + implicitly[ValueOf[1.0F]] + implicitly[ValueOf[true]] + implicitly[ValueOf['f']] + implicitly[ValueOf["foo"]] + implicitly[ValueOf[Unit]] + implicitly[ValueOf[Foo.type]] + implicitly[ValueOf[foo.type]] + + assert((valueOf[1]: 1) == 1) + assert((valueOf[1L]: 1L) == 1L) + assert((valueOf[1.0]: 1.0) == 1.0) + assert((valueOf[1.0F]: 1.0F) == 1.0F) + assert((valueOf[true]: true) == true) + assert((valueOf['f']: 'f') == 'f') + assert((valueOf["foo"]: "foo") == "foo") + assert((valueOf[Unit]: Unit) == ((): Any)) + assert((valueOf[Foo.type]: Foo.type) eq Foo) + assert((valueOf[foo.type]: foo.type) eq foo) +} diff --git a/test/files/run/sip23-widen2.scala b/test/files/run/sip23-widen2.scala new file mode 100644 index 000000000000..a637cbf7f76c --- /dev/null +++ b/test/files/run/sip23-widen2.scala @@ -0,0 +1,20 @@ +object Test extends App { + class F[T] + object F { + implicit def fi: F[Int] = new F[Int] + implicit def fu: F[Unit] = new F[Unit] + } + case class Widened[T](value: Boolean) + object Widened { + implicit def notWidened[T <: Singleton]: Widened[T] = Widened(false) + implicit def widened[T]: Widened[T] = Widened(true) + } + + def widened[T](t: T)(implicit w: Widened[T], @annotation.unused f: F[T]): Boolean = w.value + + def boundedWidened[T <: Singleton](t: T)(implicit w: Widened[T]): Boolean = w.value + + assert(widened(23)) + assert(!boundedWidened(23)) + assert(widened(())) +} diff --git a/test/files/run/slices.scala b/test/files/run/slices.scala index 107b8e658a8c..6bd7b1a41ffe 100644 --- a/test/files/run/slices.scala +++ b/test/files/run/slices.scala @@ -1,4 +1,4 @@ - +import scala.tools.partest.Util.ArrayDeep import scala.language.postfixOps object Test extends App { @@ -9,21 +9,21 @@ object Test extends App { println(List(1, 2, 3, 4).slice(-1, 1)) println(List(1, 2, 3, 4).slice(1, -1)) println(List(1, 2, 3, 4).slice(-2, 2)) - println + println() println(List(1, 2, 3, 4) take 3) println(List(1, 2, 3) take 3) println(List(1, 2) take 3) println((List(): List[Int]) take 3) println(List[Nothing]() take 3) - println + println() println(List(1, 2, 3, 4) drop 3) println(List(1, 2, 3) drop 3) println(List(1, 2) drop 3) println((List(): List[Int]) drop 3) println(List[Nothing]() drop 3) - println + println() // arrays println(Array(1, 2, 3, 4).slice(1, 2).deep) @@ -31,19 +31,19 @@ object Test extends App { println(Array(1, 2, 3, 4).slice(-1, 1).deep) println(Array(1, 2, 3, 4).slice(1, -1).deep) println(Array(1, 2, 3, 4).slice(-2, 2).deep) - println + println() println(Array(1, 2, 3, 4) take 3 deep) println(Array(1, 2, 3) take 3 deep) println(Array(1, 2) take 3 deep) println((Array(): Array[Int]) take 3 deep) // println(Array[Nothing]() take 3) // contrib #757 - println + println() println(Array(1, 2, 3, 4) drop 3 deep) println(Array(1, 2, 3) drop 3 deep) println(Array(1, 2) drop 3 deep) println((Array(): Array[Int]) drop 3 deep) // println(Array[Nothing]() drop 3) - println + println() } diff --git a/test/files/run/small-seq-apply.check b/test/files/run/small-seq-apply.check new file mode 100644 index 000000000000..09b781bbf5b4 --- /dev/null +++ b/test/files/run/small-seq-apply.check @@ -0,0 +1 @@ +cost of tracking allocations - cost of Object = 0 diff --git a/test/files/run/small-seq-apply.scala b/test/files/run/small-seq-apply.scala new file mode 100644 index 000000000000..fe10afcca021 --- /dev/null +++ b/test/files/run/small-seq-apply.scala @@ -0,0 +1,47 @@ + +object Sizes { + def list: Int = 24 + def listBuffer: Int = 32 + + def refArray(length:Int): Int = (16 + (length+1) * 4) /8 * 8 + def wrappedRefArray(length:Int): Int = refArray(length) + 16 + def wrappedRefArrayIterator: Int = 24 +} + +object Test extends scala.tools.testkit.AllocationTest { + def main(args: Array[String]): Unit = { + smallSeqAllocation + //largeSeqAllocation + } + def smallSeqAllocation: Unit = { + exactAllocates(Sizes.list * 1, "collection seq size 1")(Seq("0")) + exactAllocates(Sizes.list * 2, "collection seq size 2")(Seq("0", "1")) + exactAllocates(Sizes.list * 3, "collection seq size 3")(Seq("0", "1", "")) + exactAllocates(Sizes.list * 4, "collection seq size 4")(Seq("0", "1", "2", "3")) + exactAllocates(Sizes.list * 5, "collection seq size 5")(Seq("0", "1", "2", "3", "4")) + exactAllocates(Sizes.list * 6, "collection seq size 6")(Seq("0", "1", "2", "3", "4", "5")) + exactAllocates(Sizes.list * 7, "collection seq size 7")(Seq("0", "1", "2", "3", "4", "5", "6")) + } + def largeSeqAllocation: Unit = { + exactAllocates(Sizes.list * 10 + Sizes.wrappedRefArray(10) + Sizes.listBuffer + 16, "collection seq size 10")( + Seq("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")) + exactAllocates(Sizes.list * 20 + Sizes.wrappedRefArray(20) + Sizes.listBuffer + 16, "collection seq size 20")( + Seq("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19")) + } +} + +/* +restored partest (cf junit) to test outside scala.collection package + +cost of tracking allocations - cost of Object = 0 +java.lang.AssertionError: allocating min = 88 allowed = 24 -- collection seq size 1 + result = List(0) (class scala.collection.immutable.$colon$colon) + allocation 88 (1000 times) + + at org.junit.Assert.fail(Assert.java:89) + at scala.tools.testkit.AllocationTest.failTest(AllocationTest.scala:111) + at scala.tools.testkit.AllocationTest.exactAllocates(AllocationTest.scala:102) + at scala.tools.testkit.AllocationTest.exactAllocates$(AllocationTest.scala:100) + at Test$.exactAllocates(small-seq-apply.scala:11) + at Test$.smallSeqAllocation(small-seq-apply.scala:17) + */ diff --git a/test/files/run/smallseq.check b/test/files/run/smallseq.check new file mode 100644 index 000000000000..4daff0a79eb9 --- /dev/null +++ b/test/files/run/smallseq.check @@ -0,0 +1,15 @@ +[[syntax trees at end of cleanup]] // newSource1.scala +package { + class C extends Object { + def f0(): Seq = scala.collection.immutable.Nil.$asInstanceOf[Seq](); + def f1(): Seq = scala.collection.immutable.Nil.$asInstanceOf[Seq](); + def g(): Seq = scala.collection.immutable.Nil.$asInstanceOf[Seq](); + def h(): Seq = scala.collection.immutable.Nil.$asInstanceOf[Seq](); + def nil(): scala.collection.immutable.Nil.type = scala.collection.immutable.Nil; + def (): C = { + C.super.(); + () + } + } +} + diff --git a/test/files/run/smallseq.scala b/test/files/run/smallseq.scala new file mode 100644 index 000000000000..85f5736991f8 --- /dev/null +++ b/test/files/run/smallseq.scala @@ -0,0 +1,24 @@ + +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vprint:cleanup" + override def code = """ + class C { + def f0 = Seq(Nil: _*) + def f1 = Seq.apply[String](Nil: _*) + + def g = Seq.apply[String](scala.collection.immutable.Nil: _*) + + def h = scala.collection.immutable.Seq.apply[String](scala.collection.immutable.Nil: _*) + + def nil = Nil + } + """ + override def show() = assert(compile()) +} + +/* +was: +def f0(): Seq = scala.`package`.Seq().apply(scala.`package`.Nil()).$asInstanceOf[Seq](); + */ diff --git a/test/files/run/source3Xrun.scala b/test/files/run/source3Xrun.scala new file mode 100644 index 000000000000..3ed48a23c156 --- /dev/null +++ b/test/files/run/source3Xrun.scala @@ -0,0 +1,71 @@ +//> using options -Xsource:3 -Xsource-features:_ + +// StringContext hygiene +class SC1 { + class Impl(parts: Any*) { + def s(args: Any*) = "hello, old world" + } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = s"hello, $name" +} + +class SC2 { + import SC2.* + class Impl(parts: Any*) { + def x(args: Any*) = "hello, old world" } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = x"hello, $name" +} +object SC2 { + implicit class x(val sc: StringContext) extends AnyVal { + def x(args: Any*) = "hello, world" + } +} + +object UnicodeEscapes { + def inTripleQuoted = """\u0041""" + def inRawInterpolation = raw"\u0041" + def inRawTripleQuoted = raw"""\u0041""" +} + +object InfixNewline extends App { + class K { def x(y: Int) = 0 } + + def x(a: Int) = 1 + + def ok = { + (new K) + `x` (42) + } +} + +case class CaseCompanionMods private (x: Int) +object CaseCompanionMods { def i = CaseCompanionMods(1) } + +object Test extends App { + locally { + assert(new SC1().test == "hello, Scala3") + assert(new SC2().test == "hello, world") + } + + locally { + val asList = List('\\', 'u', '0', '0', '4', '1') + assert(asList == UnicodeEscapes.inTripleQuoted.toList) + assert(asList == UnicodeEscapes.inRawInterpolation.toList) + assert(asList == UnicodeEscapes.inRawTripleQuoted.toList) + } + + locally { + assert(InfixNewline.ok == 0) + } + + locally { + CaseCompanionMods.i + } +} diff --git a/test/files/run/source3run.scala b/test/files/run/source3run.scala new file mode 100644 index 000000000000..a2f9c6847c83 --- /dev/null +++ b/test/files/run/source3run.scala @@ -0,0 +1,78 @@ +//> using options -Wconf:cat=scala3-migration:s -Xsource:3 + +// StringContext hygiene +class SC1 { + class Impl(parts: Any*) { + def s(args: Any*) = "hello, old world" + } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = s"hello, $name" +} + +class SC2 { + import SC2.* + class Impl(parts: Any*) { + def x(args: Any*) = "hello, old world" } + object StringContext { + def apply(parts: Any*) = new Impl(parts: _*) + } + def name = "Scala3" + def test = x"hello, $name" +} +object SC2 { + implicit class x(val sc: StringContext) extends AnyVal { + def x(args: Any*) = "hello, world" + } +} + +object UnicodeEscapes { + def inTripleQuoted = """\u0041""" + def inRawInterpolation = raw"\u0041" + def inRawTripleQuoted = raw"""\u0041""" +} + +object InfixNewline extends App { + class K { def x(y: Int) = 0 } + + def x(a: Int) = 1 + + def ok = { + (new K) + `x` (42) + } +} + +case class CaseCompanionMods private (x: Int) +object CaseCompanionMods { def i = CaseCompanionMods(1) } + +trait InferredBase { def f: Object } +object InferredSub extends InferredBase { def f = "a" } + +object Test extends App { + locally { + assert(new SC1().test == "hello, old world") + assert(new SC2().test == "hello, old world") + } + + locally { + val asList = List('A') + assert(asList == UnicodeEscapes.inTripleQuoted.toList) + assert(asList == UnicodeEscapes.inRawInterpolation.toList) + assert(asList == UnicodeEscapes.inRawTripleQuoted.toList) + } + + locally { + assert(InfixNewline.ok == 1) + } + + locally { + CaseCompanionMods.i.copy(x = CaseCompanionMods(2).x) + } + + locally { + assert(InferredSub.f.toUpperCase == "A") + } +} diff --git a/test/files/run/spec-nlreturn.check b/test/files/run/spec-nlreturn.check deleted file mode 100644 index 26cff0736032..000000000000 --- a/test/files/run/spec-nlreturn.check +++ /dev/null @@ -1,2 +0,0 @@ -scala.runtime.NonLocalReturnControl$mcI$sp -16 diff --git a/test/files/run/spec-nlreturn.scala b/test/files/run/spec-nlreturn.scala index 5ab1747856d8..d9238a67863a 100644 --- a/test/files/run/spec-nlreturn.scala +++ b/test/files/run/spec-nlreturn.scala @@ -1,17 +1,13 @@ - +//scalac: -Xlint:-nonlocal-return object Test { def f(): Int = { - try { - val g = (1 to 10 map { i => return 16 ; i }).sum - g - } - catch { case x: runtime.NonLocalReturnControl[_] => - println(x.getClass.getName) - x.value.asInstanceOf[Int] + try (1 to 10).map { i => return 16 ; i }.sum + catch { + case x: runtime.NonLocalReturnControl[_] => + assert(x.getClass.getName == "scala.runtime.NonLocalReturnControl$mcI$sp") + x.value.asInstanceOf[Int] } } - def main(args: Array[String]): Unit = { - println(f()) - } + def main(args: Array[String]): Unit = assert(f() == 16) } diff --git a/test/files/run/specialize-functional-interface/T.scala b/test/files/run/specialize-functional-interface/T.scala new file mode 100644 index 000000000000..4f758a3cfe31 --- /dev/null +++ b/test/files/run/specialize-functional-interface/T.scala @@ -0,0 +1 @@ +trait T[@specialized(Int) A] { def t(a: A): A } \ No newline at end of file diff --git a/test/files/run/specialize-functional-interface/Test_1.java b/test/files/run/specialize-functional-interface/Test_1.java new file mode 100644 index 000000000000..97c0657a15f1 --- /dev/null +++ b/test/files/run/specialize-functional-interface/Test_1.java @@ -0,0 +1,8 @@ +class Test_1 { + public T doubler() { + return new T$mcI$sp() { + public int t$mcI$sp(int i) { return i * 2; } + public Object t(Object i) { return (int) i * 2; } + }; + } +} diff --git a/test/files/run/specialize-functional-interface/Test_2.scala b/test/files/run/specialize-functional-interface/Test_2.scala new file mode 100644 index 000000000000..02e22062a58f --- /dev/null +++ b/test/files/run/specialize-functional-interface/Test_2.scala @@ -0,0 +1,4 @@ +object Test extends App { + val result = new Test_1().doubler().asInstanceOf[T[Int]].t(1) + assert(result == 2) +} diff --git a/test/files/run/splain-tree.check b/test/files/run/splain-tree.check new file mode 100644 index 000000000000..ce4973924d5d --- /dev/null +++ b/test/files/run/splain-tree.check @@ -0,0 +1,94 @@ +newSource1.scala:28: error: implicit error; +!I e: tpes.I1 +i1a invalid because +!I p: tpes.I2 +――i2 invalid because + !I p: tpes.I3 +――――i3a invalid because + !I p: tpes.I4 +――――――i4 invalid because + !I p: tpes.I5 +――――――――i5 invalid because + !I p: tpes.I6 +――――――――――i6a invalid because + !I p: tpes.I7 +――――――――――――i7 invalid because + !I p: tpes.I8 +――――――――――――――i8 invalid because + !I p: tpes.I9 +――――――――――i6b invalid because + !I p: tpes.I8 +――――――――――――i8 invalid because + !I p: tpes.I9 +――――i3b invalid because + !I p: tpes.I4 +――――――i4 invalid because + !I p: tpes.I5 +――――――――i5 invalid because + !I p: tpes.I6 +――――――――――i6a invalid because + !I p: tpes.I7 +――――――――――――i7 invalid because + !I p: tpes.I8 +――――――――――――――i8 invalid because + !I p: tpes.I9 +――――――――――i6b invalid because + !I p: tpes.I8 +――――――――――――i8 invalid because + !I p: tpes.I9 +i1b invalid because +!I p: tpes.I6 +――i6a invalid because + !I p: tpes.I7 +――――i7 invalid because + !I p: tpes.I8 +――――――i8 invalid because + !I p: tpes.I9 +――i6b invalid because + !I p: tpes.I8 +――――i8 invalid because + !I p: tpes.I9 + implicitly[I1] + ^ +newSource1.scala:28: error: implicit error; +!I e: tpes.I1 +i1a invalid because +!I p: tpes.I2 +⋮ +――i3a invalid because + !I p: tpes.I4 + ⋮ +――――i6a invalid because + !I p: tpes.I7 + ⋮ +――――――――i8 invalid because + !I p: tpes.I9 +――――i6b invalid because + !I p: tpes.I8 +――――――i8 invalid because + !I p: tpes.I9 +――i3b invalid because + !I p: tpes.I4 + ⋮ +――――i6a invalid because + !I p: tpes.I7 + ⋮ +――――――――i8 invalid because + !I p: tpes.I9 +――――i6b invalid because + !I p: tpes.I8 +――――――i8 invalid because + !I p: tpes.I9 +i1b invalid because +!I p: tpes.I6 +――i6a invalid because + !I p: tpes.I7 + ⋮ +――――――i8 invalid because + !I p: tpes.I9 +――i6b invalid because + !I p: tpes.I8 +――――i8 invalid because + !I p: tpes.I9 + implicitly[I1] + ^ \ No newline at end of file diff --git a/test/files/run/splain-tree.scala b/test/files/run/splain-tree.scala new file mode 100644 index 000000000000..56f9ff7a3f16 --- /dev/null +++ b/test/files/run/splain-tree.scala @@ -0,0 +1,51 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits" + + def code: String = "" + + def verboseTree: String = """ +object tpes +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait I5 + trait I6 + trait I7 + trait I8 + trait I9 +} +import tpes._ + +object Tree +{ + implicit def i8(implicit p: I9): I8 = ??? + implicit def i7(implicit p: I8): I7 = ??? + implicit def i6a(implicit p: I7): I6 = ??? + implicit def i6b(implicit p: I8): I6 = ??? + implicit def i5(implicit p: I6): I5 = ??? + implicit def i4(implicit p: I5): I4 = ??? + implicit def i3a(implicit p: I4): I3 = ??? + implicit def i3b(implicit p: I4): I3 = ??? + implicit def i2(implicit p: I3): I2 = ??? + implicit def i1a(implicit p: I2): I1 = ??? + implicit def i1b(implicit p: I6): I1 = ??? + implicitly[I1] +} + """ + + def show(): Unit = { + val global = newCompiler() + val globalVerbose = newCompiler("-Vimplicits-verbose-tree") + + def run(code: String): Unit = { + compileString(globalVerbose)(code.trim) + compileString(global)(code.trim) + } + + run(verboseTree) + } +} diff --git a/test/files/run/splain-truncrefined.check b/test/files/run/splain-truncrefined.check new file mode 100644 index 000000000000..bf112963fd65 --- /dev/null +++ b/test/files/run/splain-truncrefined.check @@ -0,0 +1,4 @@ +newSource1.scala:7: error: type mismatch; + TruncRefined.D|TruncRefined.C {...} + f(new D { type X = C; type Y = D }) + ^ diff --git a/test/files/run/splain-truncrefined.scala b/test/files/run/splain-truncrefined.scala new file mode 100644 index 000000000000..2be99a6350bb --- /dev/null +++ b/test/files/run/splain-truncrefined.scala @@ -0,0 +1,28 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs -Vimplicits-max-refined 5" + + def code: String = "" + + def truncrefined: String = """ +object TruncRefined +{ + class C + trait D + type CAux[A] = C { type X = C; type Y = D } + def f(arg1: CAux[D]) = ??? + f(new D { type X = C; type Y = D }) +} + + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(truncrefined) + } +} diff --git a/test/files/run/splain.check b/test/files/run/splain.check new file mode 100644 index 000000000000..9c41024605b2 --- /dev/null +++ b/test/files/run/splain.check @@ -0,0 +1,145 @@ +newSource1.scala:13: error: implicit error; +!I e: ImplicitChain.II +ImplicitChain.g invalid because +!I impPar3: ImplicitChain.I1 +――ImplicitChain.i1 invalid because + !I impPar7: ImplicitChain.I3 + implicitly[II] + ^ +newSource1.scala:6: error: type mismatch; + FoundReq.L|FoundReq.R + f(new L) + ^ +newSource1.scala:7: error: implicit error; +!I e: Bounds.F[Bounds.Arg] +Bounds.g invalid because +nonconformant bounds; +[Bounds.Arg, scala.Nothing] +[A <: Bounds.Base, B] + implicitly[F[Arg]] + ^ +newSource1.scala:4: error: implicit error; +!I ec: scala.concurrent.ExecutionContext + Cannot find an implicit ExecutionContext. You might add + an (implicit ec: ExecutionContext) parameter to your method. + + The ExecutionContext is used to configure how and on which + thread pools asynchronous tasks (such as Futures) will run, + so the specific ExecutionContext that is selected is important. + + If your application does not define an ExecutionContext elsewhere, + consider using Scala's global ExecutionContext by defining + the following: + + implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global + + long + ^ +newSource1.scala:10: error: implicit error; +!I e: java.lang.String +f invalid because +!I impPar4: + List[ + ( + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ) + :::: + (InfixBreak.Short :::: InfixBreak.Short) :::: + ( + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ) + :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ] + (No implicit view available from Int => InfixBreak.T2.) + + implicitly[String] + ^ +newSource1.scala:11: error: implicit error; +!I e: + DeepHole.C1[ + DeepHole.T3[ + DeepHole.T1[List[java.lang.String], ?] + , + DeepHole.T2[DeepHole.Id, DeepHole.C4, ?] + , + ? + ] + ] + implicitly[C1[T3]] + ^ +newSource1.scala:9: error: implicit error; +!I e: Aux.F.Aux[Aux.C, Aux.D] +Aux.f invalid because +!I impPar10: Aux.C + implicitly[F.Aux[C, D]] + ^ +newSource1.scala:11: error: type mismatch; + Refined.A with Refined.B with Refined.E|Refined.C with Refined.F| {type X = scala.Int|java.lang.String; type Y = java.lang.String; type Z = |java.lang.String} + f(x) + ^ +newSource1.scala:25: error: type mismatch; + C.X.Y.T|B.X.Y.T + f(x: C.X.Y.T) + ^ +newSource1.scala:6: error: type mismatch; + scala.Int|(=> Foo.A) => Foo.B + f(1: Int) + ^ +newSource1.scala:3: error: type mismatch; + java.lang.String|Tuple1[java.lang.String] + val a: Tuple1[String] = "Tuple1": String + ^ +newSource1.scala:7: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:8: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:6: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:5: error: implicit error; +!I ev: scala.math.Ordering[java.lang.Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: java.lang.Object => java.lang.Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +Ordering.comparatorToOrdering invalid because +!I cmp: java.util.Comparator[java.lang.Object] + ms.map(_ => o) + ^ +newSource1.scala:9: error: implicit error; +!I e: List[a.TypeA] + (No implicit view available from Int => a.TypeA.) + + implicitly[List[TypeA]] + ^ +newSource1.scala:10: error: implicit error; +!I e: Seq[a.b.TypeB] + (No implicit view available from Int => a.b.TypeB.) + + implicitly[Seq[TypeB]] + ^ +newSource1.scala:11: error: implicit error; +!I e: Iterable[a.b.c.TypeC] + implicitly[Traversable[TypeC]] + ^ +newSource1.scala:12: error: implicit error; +!I e: Iterator[a.b.c.d.TypeD] + implicitly[Iterator[TypeD]] + ^ diff --git a/test/files/run/splain.scala b/test/files/run/splain.scala new file mode 100644 index 000000000000..c291c3338bbd --- /dev/null +++ b/test/files/run/splain.scala @@ -0,0 +1,255 @@ +import scala.tools.partest._ + +object Test +extends DirectTest +{ + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs" + + def code: String = "" + + def chain: String = """ +object ImplicitChain +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait II + implicit def i1(implicit impPar7: I3): I1 = ??? + implicit def i2a(implicit impPar8: I3): I2 = ??? + implicit def i2b(implicit impPar8: I3): I2 = ??? + implicit def i4(implicit impPar9: I2): I4 = ??? + implicit def g(implicit impPar3: I1, impPar1: I4): II = ??? + implicitly[II] +} + """ + + def foundReq: String = """ +object FoundReq +{ + class L + type R + def f(r: R): Int = ??? + f(new L) +} + """ + + final val foundReqSingleAbstractMethod = """ +object FoundReq extends App { + def f(x: AnyVal, f: Runnable) = 1 + def f(x: Double, f: Runnable) = 2 + + f(3.0, () => println("work")) +} + """ + + def bounds: String = """ +object Bounds +{ + trait Base + trait Arg + trait F[A] + implicit def g[A <: Base, B]: F[A] = ??? + implicitly[F[Arg]] +} + """ + + def longAnnotationMessage: String = """ +object Long +{ + def long(implicit ec: concurrent.ExecutionContext): Unit = ??? + long +} + """ + + def longInfix: String = """ +object InfixBreak +{ + type ::::[A, B] + trait VeryLongTypeName + trait Short + type T1 = VeryLongTypeName :::: VeryLongTypeName :::: VeryLongTypeName :::: + VeryLongTypeName + type T2 = T1 :::: (Short :::: Short) :::: T1 :::: T1 + implicit def f(implicit impPar4: List[T2]): String = ??? + implicitly[String] +} + """ + + def deeplyNestedHole: String = """ +object DeepHole +{ + trait C1[F[_]] + trait C2[F[_], G[_], A] + trait C3[A, B] + trait C4[A] + type Id[A] = A + type T1[X] = C3[List[String], X] + type T2[Y] = C2[Id, C4, Y] + type T3[Z] = C2[T1, T2, Z] + implicitly[C1[T3]] +} + """ + + def auxType: String = """ +object Aux +{ + trait C + trait D + trait F + object F { type Aux[A, B] = F { type X = A; type Y = B } } + implicit def f[A, B](implicit impPar10: C): F { type X = A; type Y = B } = + ??? + implicitly[F.Aux[C, D]] +} + """ + + def refined: String = """ +object Refined +{ + trait A + trait B + trait C + trait D + trait E + trait F + def f(a: A with B with C { type Y = String; type X = String; type Z = String }): Unit = ??? + val x: B with E with A with F { type X = Int; type Y = String } = ??? + f(x) +} + """ + + def disambiguateQualified: String = """ +object A +{ + object B + { + object X + { + object Y + { + type T + } + } + } + object C + { + object X + { + object Y + { + type T + } + } + } + def f(a: B.X.Y.T): Unit = () + val x: C.X.Y.T = ??? + f(x: C.X.Y.T) +} + """ + + def bynameParam: String = """ +object Foo +{ + type A + type B + def f(g: (=> A) => B): Unit = () + f(1: Int) +} + """ + + def tuple1: String = """ +object Tup1 +{ + val a: Tuple1[String] = "Tuple1": String +} + """ + + def singleType: String = """ +object SingleImp +{ + class ***[A, B] + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] +} + """ + + def singleTypeInFunction: String = """ +object SingleImp +{ + class ***[A, B] + def fn(): Unit = { + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] + } +} + """ + + def singleTypeWithFreeSymbol: String = """ +object SingleImp +{ + class ***[A, B] + def fn[A, B](a: A, b: B) = { + + implicitly[a.type *** b.type] + } +} + """ + + def parameterAnnotation: String = """ + import collection.{mutable => m, immutable => i} + object Test { + val o = new Object + val ms = m.SortedSet(1,2,3) + ms.map(_ => o) + } + """ + + def shorthandTypes: String = """ +object a { + type TypeA + object b { + type TypeB + object c { + type TypeC + object d { + type TypeD + implicitly[List[TypeA]] + implicitly[Seq[TypeB]] + implicitly[Traversable[TypeC]] + implicitly[Iterator[TypeD]] + } + } + } +} +""" + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(chain) + run(foundReq) + run(foundReqSingleAbstractMethod) + run(bounds) + run(longAnnotationMessage) + run(longInfix) + run(deeplyNestedHole) + run(auxType) + run(refined) + run(disambiguateQualified) + run(bynameParam) + run(tuple1) + run(singleType) + run(singleTypeInFunction) + run(singleTypeWithFreeSymbol) + run(parameterAnnotation) + run(shorthandTypes) + } +} diff --git a/test/files/run/static-module-method.scala b/test/files/run/static-module-method.scala index a8691300defc..3e3b3cc07ae6 100644 --- a/test/files/run/static-module-method.scala +++ b/test/files/run/static-module-method.scala @@ -8,7 +8,7 @@ object Test { def map(x: String, f: String => String) = f(x) - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { println(map("hello", Test.moduleMethod)) } -} \ No newline at end of file +} diff --git a/test/files/run/staticQualifier.check b/test/files/run/staticQualifier.check new file mode 100644 index 000000000000..fe81901643b0 --- /dev/null +++ b/test/files/run/staticQualifier.check @@ -0,0 +1,3 @@ +42 +hai +42 diff --git a/test/files/run/staticQualifier/A_1.scala b/test/files/run/staticQualifier/A_1.scala new file mode 100644 index 000000000000..d056596542f5 --- /dev/null +++ b/test/files/run/staticQualifier/A_1.scala @@ -0,0 +1,21 @@ +import scala.language.experimental.macros +import scala.reflect.macros.blackbox.Context + +object A { + def foo(x: Int): Int = macro foo_impl + + def foo_impl(c: Context)(x: c.Expr[Int]): c.Tree = { + val g = c.universe.asInstanceOf[scala.tools.nsc.Global] + import g._ + import scala.tools.nsc.symtab.Flags._ + + val t = x.tree.asInstanceOf[Tree] match { + case s @ Select(_, n) if n.toString == "f2" => + val field = s.symbol.accessed + field.setFlag(STATIC).resetFlag(PRIVATE | LOCAL) + s.setSymbol(field) + } + + t.asInstanceOf[c.Tree] + } +} diff --git a/test/files/run/staticQualifier/Test_2.scala b/test/files/run/staticQualifier/Test_2.scala new file mode 100644 index 000000000000..bff989413cdb --- /dev/null +++ b/test/files/run/staticQualifier/Test_2.scala @@ -0,0 +1,24 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.testkit.ASMConverters._ +import scala.tools.asm.Opcodes._ + +object Test extends BytecodeTest { + val f2 = 42 + + def getT: Test.type = { + println("hai") + Test + } + + def show(): Unit = { + println(A.foo(Test.f2)) + println(A.foo(getT.f2)) + + val ins = instructionsFromMethod(getMethod(loadClassNode("Test$"), "show")) + val gs = ins.count { + case Field(GETSTATIC, "Test$", "f2", _) => true + case _ => false + } + assert(gs == 2) + } +} diff --git a/test/files/run/stream-gc.check b/test/files/run/stream-gc.check new file mode 100644 index 000000000000..202f49c8ebab --- /dev/null +++ b/test/files/run/stream-gc.check @@ -0,0 +1 @@ +warning: 6 deprecations (since 2.13.0); re-run with -deprecation for details diff --git a/test/files/run/stream-gc.scala b/test/files/run/stream-gc.scala new file mode 100644 index 000000000000..decacf669fa8 --- /dev/null +++ b/test/files/run/stream-gc.scala @@ -0,0 +1,12 @@ +//> using javaOpt -Xmx5M -Xms5M + +import scala.collection.immutable._ + +object Test extends App { + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).find(_ => false) + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collect { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).iterator.foreach(_ => ()) +} diff --git a/test/files/run/stream-stack-overflow-filter-map.scala b/test/files/run/stream-stack-overflow-filter-map.scala index f3a9dd49cb10..c93ab4b8e470 100644 --- a/test/files/run/stream-stack-overflow-filter-map.scala +++ b/test/files/run/stream-stack-overflow-filter-map.scala @@ -1,29 +1,26 @@ -import collection.generic.{FilterMonadic, CanBuildFrom} +import collection.{IterableOps, BuildFrom} object Test extends App { - def mapSucc[Repr, That](s: FilterMonadic[Int, Repr])(implicit cbf: CanBuildFrom[Repr, Int, That]) = s map (_ + 1) - def flatMapId[T, Repr, That](s: FilterMonadic[T, Repr])(implicit cbf: CanBuildFrom[Repr, T, That]) = s flatMap (Seq(_)) - - def testStreamPred(s: Stream[Int])(p: Int => Boolean) { + def testStreamPred(s: LazyList[Int])(p: Int => Boolean): Unit = { val res1 = s withFilter p val res2 = s filter p val expected = s.toSeq filter p - val fMapped1 = flatMapId(res1) - val fMapped2 = flatMapId(res2) + val fMapped1 = res1.flatMap(Seq(_)) + val fMapped2 = res2.flatMap(Seq(_)) assert(fMapped1 == fMapped2) assert(fMapped1.toSeq == expected) - val mapped1 = mapSucc(res1) - val mapped2 = mapSucc(res2) + val mapped1 = res1 map (_ + 1) + val mapped2 = res2 map (_ + 1) assert(mapped1 == mapped2) assert(mapped1.toSeq == (expected map (_ + 1))) assert((res1 map identity).toSeq == res2.toSeq) } - def testStream(s: Stream[Int]) { + def testStream(s: LazyList[Int]): Unit = { testStreamPred(s)(_ => false) testStreamPred(s)(_ => true) testStreamPred(s)(_ % 2 == 0) @@ -32,12 +29,12 @@ object Test extends App { //Reduced version of the test case - either invocation used to cause a stack //overflow before commit 80b3f433e5536d086806fa108ccdfacf10719cc2. - val resFMap = (1 to 10000).toStream withFilter (_ => false) flatMap (Seq(_)) - val resMap = (1 to 10000).toStream withFilter (_ => false) map (_ + 1) + val resFMap = (1 to 10000).to(LazyList) withFilter (_ => false) flatMap (Seq(_)) + val resMap = (1 to 10000).to(LazyList) withFilter (_ => false) map (_ + 1) //Complete test case for withFilter + map/flatMap, as requested by @axel22. for (j <- (0 to 3) :+ 10000) { - val stream = (1 to j).toStream + val stream = (1 to j).to(LazyList) assert(stream.toSeq == (1 to j).toSeq) testStream(stream) } diff --git a/test/files/run/streamWithFilter.scala b/test/files/run/streamWithFilter.scala index cb919d4f5568..806e65d950c4 100644 --- a/test/files/run/streamWithFilter.scala +++ b/test/files/run/streamWithFilter.scala @@ -1,5 +1,5 @@ object Test { - val nums = Stream.from(1) + val nums = LazyList.from(1) def isFizz(x: Int) = x % 3 == 0 def isBuzz(x: Int) = x % 5 == 0 // next line will run forever if withFilter isn't doing its thing. diff --git a/test/files/run/stream_flatmap_odds.check b/test/files/run/stream_flatmap_odds.check index 2b945e7c6492..be33ef6e8f44 100644 --- a/test/files/run/stream_flatmap_odds.check +++ b/test/files/run/stream_flatmap_odds.check @@ -1 +1 @@ -Stream(1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83) +LazyList(1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83) diff --git a/test/files/run/stream_flatmap_odds.scala b/test/files/run/stream_flatmap_odds.scala index 1935253595c5..80ba1a749d4b 100644 --- a/test/files/run/stream_flatmap_odds.scala +++ b/test/files/run/stream_flatmap_odds.scala @@ -1,4 +1,4 @@ object Test extends App { - lazy val odds: Stream[Int] = Stream(1) append ( odds flatMap {x => Stream(x + 2)} ) + lazy val odds: LazyList[Int] = LazyList(1) lazyAppendedAll ( odds flatMap {x => LazyList(x + 2)} ) Console println (odds take 42).force } diff --git a/test/files/run/stream_length.scala b/test/files/run/stream_length.scala index 89dca36eca38..e9792f1b7854 100644 --- a/test/files/run/stream_length.scala +++ b/test/files/run/stream_length.scala @@ -1,15 +1,13 @@ - - object Test { - def walk(depth: Int, bias: String): Stream[String] = { + def walk(depth: Int, bias: String): LazyList[String] = { if (depth == 0) - Stream(bias) + LazyList(bias) else { - (Stream.iterate(1, 99)(_+1).map((x: Int) => walk(depth-1, bias + x))).flatten + (LazyList.iterate(1, 99)(_+1).map((x: Int) => walk(depth-1, bias + x))).flatten } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { if (scala.tools.partest.utils.Properties.isAvian) { println("!!!TEST SKIPPED!!!") println("See scala/bug#7600 for further information.") diff --git a/test/files/run/streams.check b/test/files/run/streams.check index db6d2eebab87..c7d5e0632d99 100644 --- a/test/files/run/streams.check +++ b/test/files/run/streams.check @@ -1,26 +1,26 @@ -Stream() -Stream() +LazyList() +LazyList() true true true Array(1) -Stream(1, ?) -Stream(1, ?) -Stream() -Stream() -Stream(1) -Stream() +LazyList() +LazyList() +LazyList() +LazyList() +LazyList(1) +LazyList() true true true true Array(1, 2) -Stream(2) -Stream() -Stream(1, 2) -Stream() +LazyList() +LazyList() +LazyList(1, 2) +LazyList() true true true @@ -30,8 +30,8 @@ true 999 512 100000 -Stream(100001, ?) -Stream(100001, ?) +LazyList() +LazyList() true true 705082704 diff --git a/test/files/run/streams.scala b/test/files/run/streams.scala index 350e103eab0b..6966c9290e60 100644 --- a/test/files/run/streams.scala +++ b/test/files/run/streams.scala @@ -1,13 +1,15 @@ +import scala.tools.partest.Util.ArrayDeep + object Test extends App { - val s0: Stream[Int] = Stream.empty + val s0: LazyList[Int] = LazyList.empty println(s0.take(1)) println(s0.takeWhile(_ > 0)) println(s0.lengthCompare(-5) > 0) println(s0.lengthCompare(0) == 0) println(s0.lengthCompare(5) < 0) - println + println() - val s1 = Stream.cons(1, Stream.empty) + val s1 = LazyList.cons(1, LazyList.empty) println(s1.toArray.deep) println(s1.take(1)) println(s1.take(2)) @@ -19,9 +21,9 @@ object Test extends App { println(s1.lengthCompare(0) > 0) println(s1.lengthCompare(1) == 0) println(s1.lengthCompare(5) < 0) - println + println() - val s2 = s1.append(Stream.cons(2, Stream.empty)) + val s2 = s1.lazyAppendedAll(LazyList.cons(2, LazyList.empty)) println(s2.toArray.deep) println(s2.drop(1)) println(s2.drop(2)) @@ -32,9 +34,9 @@ object Test extends App { println(s2.lengthCompare(1) > 0) println(s2.lengthCompare(2) == 0) println(s2.lengthCompare(5) < 0) - println + println() - val s3 = Stream.range(1, 1000) //100000 (ticket #153: Stackoverflow) + val s3 = LazyList.range(1, 1000) //100000 (ticket #153: Stackoverflow) println(s3.length) // ticket #153 @@ -46,21 +48,21 @@ object Test extends App { val size = 100000 // test tail recursive methods - println(Stream.from(1).take(size).last) - println(Stream.from(1).drop(size)) - println(Stream.from(1).filter(_ > size).take(5)) - println(Stream.from(1).take(size).forall(_ >= 0)) - println(Stream.from(1).exists(_ > size)) - Stream.from(1).take(size).foreach( x => () ) - println(Stream.from(1).take(size).foldLeft(0)(_ + _)) + println(LazyList.from(1).take(size).last) + println(LazyList.from(1).drop(size)) + println(LazyList.from(1).filter(_ > size).take(5)) + println(LazyList.from(1).take(size).forall(_ >= 0)) + println(LazyList.from(1).exists(_ > size)) + LazyList.from(1).take(size).foreach( x => () ) + println(LazyList.from(1).take(size).foldLeft(0)(_ + _)) val arr = new Array[Int](size) - Stream.from(1).take(size).copyToArray(arr, 0) + LazyList.from(1).take(size).copyToArray(arr, 0) - println + println() // ticket #6415 lazy val x = { println("evaluated"); 1 } - val s4 = 0 #:: x #:: Stream.empty + val s4 = 0 #:: x #:: LazyList.empty println(s4.isDefinedAt(0)) } diff --git a/test/files/run/string-extractor.scala b/test/files/run/string-extractor.scala index c0fe911ff36c..e1d86d16f532 100644 --- a/test/files/run/string-extractor.scala +++ b/test/files/run/string-extractor.scala @@ -6,20 +6,22 @@ final class StringExtract(val s: String) extends AnyVal { def apply(idx: Int): Char = s charAt idx def head: Char = s charAt 0 def tail: String = s drop 1 - def drop(n: Int): StringExtract = new StringExtract(s drop n) + def drop(n: Int): Seq[Char] = toSeq.drop(n) + def toSeq: Seq[Char] = s.toSeq override def toString = s } final class ThreeStringExtract(val s: String) extends AnyVal { - def isEmpty = (s eq null) || (s == "") - def get: (List[Int], Double, ThreeStringExtract) = ((s.length :: Nil, s.length.toDouble, this)) - def length = s.length - def lengthCompare(n: Int) = s.length compare n - def apply(idx: Int): Char = s charAt idx - def head: Char = s charAt 0 - def tail: String = s drop 1 - def drop(n: Int): ThreeStringExtract = new ThreeStringExtract(s drop n) + def isEmpty = (s eq null) || (s == "") + def get: (List[Int], Double, Seq[Char]) = ((s.length :: Nil, s.length.toDouble, toSeq)) + def length = s.length + def lengthCompare(n: Int) = s.length compare n + def apply(idx: Int): Char = s charAt idx + def head: Char = s charAt 0 + def tail: String = s drop 1 + def drop(n: Int): Seq[Char] = toSeq.drop(n) + def toSeq: Seq[Char] = s.toSeq override def toString = s } diff --git a/test/files/run/string-switch-defaults-null.check b/test/files/run/string-switch-defaults-null.check new file mode 100644 index 000000000000..4bbcfcf56827 --- /dev/null +++ b/test/files/run/string-switch-defaults-null.check @@ -0,0 +1,2 @@ +2 +-1 diff --git a/test/files/run/string-switch-defaults-null.scala b/test/files/run/string-switch-defaults-null.scala new file mode 100644 index 000000000000..9fc4ce235a2d --- /dev/null +++ b/test/files/run/string-switch-defaults-null.scala @@ -0,0 +1,16 @@ +import annotation.switch + +object Test { + def test(s: String): Int = { + (s : @switch) match { + case "1" => 0 + case null => -1 + case _ => s.toInt + } + } + + def main(args: Array[String]): Unit = { + println(test("2")) + println(test(null)) + } +} diff --git a/test/files/run/string-switch-pos.check b/test/files/run/string-switch-pos.check new file mode 100644 index 000000000000..59a35068b477 --- /dev/null +++ b/test/files/run/string-switch-pos.check @@ -0,0 +1,76 @@ +[[syntax trees at end of patmat]] // newSource1.scala +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][13]scala.AnyRef { + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); + [13]() + }; + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ + [56:57]case val x1: [56]String = [56:57]s; + [56:57][56:57]x1 match { + [56:57]case [75:81]"AaAa" => [93:94]1 + [56:57]case [104:110]"asdf" => [122:123]2 + [133:181]case [133:139]"BbBb" => [133:181]if ([143:147]cond) + [151:152]3 + else + [180:181]4 + [56:57]case [56:57]([191:197]"CcCc"| [200:205]"Cc2") => [209:210]5 + [56:57]case [56:57]_ => [56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1) + } + } + } +} + +[[syntax trees at end of cleanup]] // newSource1.scala +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][13:216]Object { + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ + [56:57]case val x1: [56]String = [56:57]s; + [56:57]{ + [56:139][56:57]if ([56][56]x1.eq([56]null)) + [56]0 + else + [56][56]x1.hashCode() match { + [56:57]case [56]67506 => [56:57]if ([56][56][56]"Cc2".equals([56]x1)) + [56][56]case4() + else + [56][56]defaultCase1() + [56:81]case [56]2031744 => [75:81]if ([75][75][75]"AaAa".equals([75]x1)) + [75:94][75]matchEnd1([93:94]1) + else + [56][56]defaultCase1() + [56:139]case [56]2062528 => [133:139]if ([133][133][133]"BbBb".equals([133]x1)) + [133:181][133]matchEnd1([133:181]if ([143:147]cond) + [151:152]3 + else + [180:181]4) + else + [56][56]defaultCase1() + [56:57]case [56]2093312 => [56:57]if ([56][56][56]"CcCc".equals([56]x1)) + [56][56]case4() + else + [56][56]defaultCase1() + [56:110]case [56]3003444 => [104:110]if ([104][104][104]"asdf".equals([104]x1)) + [104:123][104]matchEnd1([122:123]2) + else + [56][56]defaultCase1() + [56]case [56]_ => [56][56]defaultCase1() + }; + [56]case4(){ + [56][56]matchEnd1([209:210]5) + }; + [56]defaultCase1(){ + [56][56]matchEnd1([56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1)) + }; + [56]matchEnd1(x$1: [NoPosition]Int){ + [56]x$1 + } + } + }; + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); + [13]() + } + } +} + diff --git a/test/files/run/string-switch-pos.scala b/test/files/run/string-switch-pos.scala new file mode 100644 index 000000000000..b8d8c7ad1a9f --- /dev/null +++ b/test/files/run/string-switch-pos.scala @@ -0,0 +1,19 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -stop:cleanup -Vprint:patmat,cleanup -Vprint-pos" + + override def code = + """class Switch { + | def switch(s: String, cond: Boolean) = s match { + | case "AaAa" => 1 + | case "asdf" => 2 + | case "BbBb" if cond => 3 + | case "BbBb" => 4 + | case "CcCc" | "Cc2" => 5 + | } + |} + """.stripMargin.trim + + override def show(): Unit = compile() +} diff --git a/test/files/run/string-switch.check b/test/files/run/string-switch.check new file mode 100644 index 000000000000..7ab6b33ec0ae --- /dev/null +++ b/test/files/run/string-switch.check @@ -0,0 +1,29 @@ +fido Success(dog) +garfield Success(cat) +wanda Success(fish) +henry Success(horse) +felix Failure(scala.MatchError: felix (of class java.lang.String)) +deuteronomy Success(cat) +===== +AaAa 2031744 Success(1) +BBBB 2031744 Success(2) +BBAa 2031744 Failure(scala.MatchError: BBAa (of class java.lang.String)) +cCCc 3015872 Success(3) +ddDd 3077408 Success(4) +EEee 2125120 Failure(scala.MatchError: EEee (of class java.lang.String)) +===== +A Success(()) +X Failure(scala.MatchError: X (of class java.lang.String)) +===== + Success(3) +null Success(2) +7 Failure(scala.MatchError: 7 (of class java.lang.String)) +===== +pig Success(1) +dog Success(2) +===== +Ea 2236 Success(1) +FB 2236 Success(2) +cC 3136 Success(3) +xx 3840 Success(4) +null 0 Success(4) diff --git a/test/files/run/string-switch.scala b/test/files/run/string-switch.scala new file mode 100644 index 000000000000..fb1d5f9ffa7a --- /dev/null +++ b/test/files/run/string-switch.scala @@ -0,0 +1,69 @@ +//> using options -Werror +import annotation.switch +import util.Try + +object Test extends App { + + def species(name: String) = (name.toLowerCase : @switch) match { + case "fido" => "dog" + case "garfield" | "deuteronomy" => "cat" + case "wanda" => "fish" + case "henry" => "horse" + } + List("fido", "garfield", "wanda", "henry", "felix", "deuteronomy").foreach { n => println(s"$n ${Try(species(n))}") } + + println("=====") + + def collide(in: String) = (in : @switch) match { + case "AaAa" => 1 + case "BBBB" => 2 + case "cCCc" => 3 + case x if x == "ddDd" => 4 + } + List("AaAa", "BBBB", "BBAa", "cCCc", "ddDd", "EEee").foreach { s => + println(s"$s ${s.##} ${Try(collide(s))}") + } + + println("=====") + + def unitary(in: String) = (in : @switch) match { + case "A" => + case x => throw new MatchError(x) + } + List("A","X").foreach { s => + println(s"$s ${Try(unitary(s))}") + } + + println("=====") + + def nullFun(in: String) = (in : @switch) match { + case "1" => 1 + case null => 2 + case "" => 3 + } + List("", null, "7").foreach { s => + println(s"$s ${Try(nullFun(s))}") + } + + println("=====") + + def default(in: String) = (in : @switch) match { + case "pig" => 1 + case _ => 2 + } + List("pig","dog").foreach { s => + println(s"$s ${Try(default(s))}") + } + + println("=====") + + def onceOnly(in: Iterator[String]) = (in.next() : @switch) match { + case "Ea" => 1 + case "FB" => 2 //collision with above + case "cC" => 3 + case _ => 4 + } + List("Ea", "FB", "cC", "xx", null).foreach { s => + println(s"$s ${s.##} ${Try(onceOnly(Iterator(s)))}") + } +} diff --git a/test/files/run/stringbuilder.scala b/test/files/run/stringbuilder.scala index a98f9cf37dd7..213db32af4db 100644 --- a/test/files/run/stringbuilder.scala +++ b/test/files/run/stringbuilder.scala @@ -1,43 +1,40 @@ - -import scala.language.reflectiveCalls - object Test extends App { val str = "ABCDEFGHIJKLMABCDEFGHIJKLM" val surrogateStr = "an old Turkic letter: \uD803\uDC22" - type SB = { - def indexOf(str: String): Int - def indexOf(str: String, fromIndex: Int): Int - def lastIndexOf(str: String): Int - def lastIndexOf(str: String, fromIndex: Int): Int - } - import scala.collection.mutable.{ StringBuilder => ScalaStringBuilder } import java.lang.{ StringBuilder => JavaStringBuilder } val sbScala = new ScalaStringBuilder() append str val sbJava = new JavaStringBuilder() append str - val sbs: List[SB] = List[SB](sbScala, sbJava) - def sameAnswers(f: (SB) => Int) = assert(f(sbScala) == f(sbJava)) + def sameAnswers(s: String, i: Int = -1, l: Boolean = false) = { + if (l) { + if (i == -1) assert(sbScala.lastIndexOf(s) == sbJava.lastIndexOf(s)) + else assert(sbScala.lastIndexOf(s, i) == sbJava.lastIndexOf(s, i)) + } else { + if (i == -1) assert(sbScala.indexOf(s) == sbJava.indexOf(s), s"$s -- $sbScala -- $sbJava") + else assert(sbScala.indexOf(s, i) == sbJava.indexOf(s, i)) + } + } - sameAnswers(_.indexOf("")) - sameAnswers(_.indexOf("G")) - sameAnswers(_.indexOf("ABC")) - sameAnswers(_.indexOf("KLM")) - sameAnswers(_.indexOf("QZV")) - sameAnswers(_.indexOf("LMABC")) - sameAnswers(_.lastIndexOf("")) - sameAnswers(_.lastIndexOf("M")) - sameAnswers(_.lastIndexOf("ABCDEFG")) - sameAnswers(_.lastIndexOf("KLM")) - sameAnswers(_.lastIndexOf("QZV")) - sameAnswers(_.lastIndexOf("GHI", 22)) - sameAnswers(_.lastIndexOf("KLM", 22)) + sameAnswers("") + sameAnswers("G") + sameAnswers("ABC") + sameAnswers("KLM") + sameAnswers("QZV") + sameAnswers("LMABC") + sameAnswers("", l = true) + sameAnswers("M", l = true) + sameAnswers("ABCDEFG", l = true) + sameAnswers("KLM", l = true) + sameAnswers("QZV", l = true) + sameAnswers("GHI", 22, l = true) + sameAnswers("KLM", 22, l = true) // testing that the "reverse" implementation avoids reversing surrogate pairs - val jsb = new JavaStringBuilder(surrogateStr).reverse - val ssb = new ScalaStringBuilder(surrogateStr).reverseContents + val jsb = new JavaStringBuilder(surrogateStr).reverse() + val ssb = new ScalaStringBuilder(surrogateStr).reverseInPlace() assert(jsb.toString == ssb.toString) } diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala index 7da104ca6eb0..7941fb939500 100644 --- a/test/files/run/structural.scala +++ b/test/files/run/structural.scala @@ -37,7 +37,7 @@ object test1 { def r: Array[Unit] = Array((), ()) def s: Array[String] = Array("one", "two") def t: Array[Tata] = Array(t1, t2) - def u[T](f: T=>T, v:T): T = f(v) + def u[T](f: T => T, v: T): T = f(v) var v = 4 var w = 11 val x = t1 @@ -64,12 +64,12 @@ object test1 { def r: Array[Unit] def s: Array[String] def t: Array[Tata] - def u[T](f: T=>T, v:T): T + def u[T](f: T => T, v: T): T var v: Int val y: Tata } - def l (r: rt) { + def l (r: rt): Unit = { println(" 1. " + r.c) println(" 2. " + r.a + 1) println(" 3. " + r.d(o1)) @@ -88,7 +88,7 @@ object test1 { println("16. " + r.m(Array("one", "two"))) println("17. " + r.n(Array(t1, t2))) println("18. " + (r.o(0) + 1)) - println("19. " + (r.p(0).hashCode() > 0)) + println("19. " + (r.p(0).toString == "ohone")) println("20. " + r.q(0)) println("21. " + r.r(0)) println("22. " + r.m(r.s)) @@ -102,15 +102,15 @@ object test1 { println("30. " + r.e(r.x)) // static error }*/ - def mb(r: Object { def e[T](x: T): T }) { + def mb(r: Object { def e[T](x: T): T }): Unit = { println("31. " + r.e[Int](4)) // while this is ok } - def m1(r: Object { def z(x: Tata): Unit }) { + def m1(r: Object { def z(x: Tata): Unit }): Unit = { println("32. " + r.z(new Titi)) // while this is ok } - def m2[T](r: Object { def e(x: Tata): T; val x: Tata }) { + def m2[T](r: Object { def e(x: Tata): T; val x: Tata }): Unit = { println("33. " + r.e(r.x)) // and this too } @@ -118,7 +118,7 @@ object test1 { def e(x: T): T = x } - def m3[T](r: Rec3[T], x: T) { + def m3[T](r: Rec3[T], x: T): Unit = { println("33. " + r.e(x)) // and this too } @@ -132,24 +132,24 @@ object test1 { } object test2 { - class C extends { def f() { println("1") } } + class C extends { def f(): Unit = { println("1") } } val x1 = new C x1.f() - abstract class D extends { def f() } - val x2 = new D { def f() { println("2") } } + abstract class D extends { def f(): Unit } + val x2 = new D { def f(): Unit = { println("2") } } x2.f() - val x3 = new { def f() { println("3") } } - def run(x: { def f() }) { x.f() } + val x3 = new { def f(): Unit = { println("3") } } + def run(x: { def f(): Unit }): Unit = { x.f() } run(x3) - type T = { def f() } - val x4 = new AnyRef { def f() { println("4") } } // ok! + type T = { def f(): Unit } + val x4 = new AnyRef { def f(): Unit = { println("4") } } // ok! //val x4 = new T { def f() { println("4") } } // error! (bug #1241) x4.f() - val x5: T = new { def f() { println("5") } } + val x5: T = new { def f(): Unit = { println("5") } } x5.f() } diff --git a/test/files/run/substSymRefinementOwner.check b/test/files/run/substSymRefinementOwner.check new file mode 100644 index 000000000000..d0768e38b9bc --- /dev/null +++ b/test/files/run/substSymRefinementOwner.check @@ -0,0 +1,31 @@ + +scala> :power +Power mode enabled. :phase is at typer. +import scala.tools.nsc._, intp.global._, definitions._ +Try :help or completions for vals._ and power._ + +scala> class C { + def f = new { + def g = new { + def h = 1 + } + } +} +class C + +scala> val f = typeOf[C].decl(TermName("f")) +val f: $r.intp.global.Symbol = method f + +scala> val g = f.tpe.resultType.decls.head +val g: $r.intp.global.Symbol = method g + +scala> g.ownerChain.take(4) +val res0: List[$r.intp.global.Symbol] = List(method g, , method f, class C) + +scala> g.tpe.resultType.typeSymbol +val res1: $r.intp.global.Symbol = + +scala> g.tpe.resultType.typeSymbol.ownerChain.take(4) +val res2: List[$r.intp.global.Symbol] = List(, method g, , method f) + +scala> :quit diff --git a/test/files/run/substSymRefinementOwner.scala b/test/files/run/substSymRefinementOwner.scala new file mode 100644 index 000000000000..d7077c2d2f72 --- /dev/null +++ b/test/files/run/substSymRefinementOwner.scala @@ -0,0 +1,19 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = + """:power + |class C { + | def f = new { + | def g = new { + | def h = 1 + | } + | } + |} + |val f = typeOf[C].decl(TermName("f")) + |val g = f.tpe.resultType.decls.head + |g.ownerChain.take(4) + |g.tpe.resultType.typeSymbol + |g.tpe.resultType.typeSymbol.ownerChain.take(4) + |""".stripMargin +} diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala index c31acf9d7bc3..a8e0fb5375cd 100644 --- a/test/files/run/synchronized.scala +++ b/test/files/run/synchronized.scala @@ -1,7 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** -/* - * filter: optimizer warnings; - */ +//> using options -opt:inline:** -Wopt:none import java.lang.Thread.holdsLock import scala.collection.mutable.StringBuilder @@ -327,9 +324,7 @@ class C2 extends T object O2 extends T object Test extends App { - def check(name: String, result: Boolean) { - println("%-10s %s" format (name +":", if (result) "OK" else "FAILED")) - } + def check(name: String, result: Boolean): Unit = println("%-10s %s".format(name +":", if (result) "OK" else "FAILED")) val c1 = new C1 check("c1.f1", c1.f1) diff --git a/test/files/run/t0005.scala b/test/files/run/t0005.scala index 9c86e8c5597f..38c24745fe16 100644 --- a/test/files/run/t0005.scala +++ b/test/files/run/t0005.scala @@ -11,7 +11,7 @@ object B1 { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { import B1.B2.q val res = 5 match { case q(x) => x } println(res) diff --git a/test/files/run/t0017.scala b/test/files/run/t0017.scala index 245cbb7e42e4..49231cc66a5e 100644 --- a/test/files/run/t0017.scala +++ b/test/files/run/t0017.scala @@ -1,17 +1,18 @@ -object Test extends App { - -def transpose[A](arr: Array[Array[A]]) = { - for (i <- Array.range(0, arr(0).length)) yield - for (row <- arr) yield row(i) -} +import scala.collection.mutable +import scala.tools.partest.Util.ArrayDeep -var my_arr = Array(Array(1,2),Array(3,4)) +object Test extends App { + def transpose[A](arr: Array[Array[A]]) = { + for (i <- Array.range(0, arr(0).length)) yield + for (row <- arr.toIndexedSeq) yield row(i) + } -for (i <- Array.range(0, my_arr(0).length)) yield - for (row <- my_arr) yield row(i) + var my_arr = Array(Array(1, 2), Array(3, 4)) -val transposed = transpose(my_arr) + for (i <- Array.range(0, my_arr(0).length)) yield + for (row <- my_arr) yield row(i) -println(transposed.deep.toString) + val transposed = transpose(my_arr) + println(transposed.deep.toString) } diff --git a/test/files/run/t0048.scala b/test/files/run/t0048.scala index 9ee3453cd9bc..c1fcc6ddcb89 100644 --- a/test/files/run/t0048.scala +++ b/test/files/run/t0048.scala @@ -5,7 +5,7 @@ object A1 { } object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val q = new A1.A2.X val res = 5 match { case q(x) => x } println(res) diff --git a/test/files/run/t0325.scala b/test/files/run/t0325.scala index a126a3a20408..35b97a97f3b0 100644 --- a/test/files/run/t0325.scala +++ b/test/files/run/t0325.scala @@ -16,7 +16,7 @@ case class RS(self: String) { object Test { def expect = List("a","b") - def test(f: => Array[String], which: String) { + def test(f: => Array[String], which: String): Unit = { try { val ret = f.toList if (ret != expect) @@ -28,16 +28,16 @@ object Test { } } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val badChars = "?*{+([\\^.$" for (c <- badChars) test(("a"+c+"b").split(c),"RichString split('"+ c + "')") - println + println() for (c <- badChars) test(RS("a"+c+"b").split(c),"RS split('"+ c + "')") - println + println() val badCases = List( ']' -> "x]", '&' -> "&&",'\\' -> "\\x", '[' -> "[x", @@ -45,7 +45,7 @@ object Test { ) for ((c,str) <- badCases) test(("a"+c+"b").split(str.toArray),"RichString split(\""+ str + "\")") - println + println() for ((c,str) <- badCases) test(RS("a"+c+"b").split(str.toArray),"RS split(\""+ str + "\")") diff --git a/test/files/run/t0421-new.scala b/test/files/run/t0421-new.scala index 8df5aa1992e9..2dc9ada35625 100644 --- a/test/files/run/t0421-new.scala +++ b/test/files/run/t0421-new.scala @@ -1,3 +1,5 @@ +import scala.tools.partest.Util.ArrayDeep + import scala.reflect.{ClassTag, classTag} // ticket #421 @@ -29,4 +31,4 @@ object Test extends App { println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deep.mkString("[", ",", "]")) println(matmul(Array(Array(4)), Array(Array(6, 8))).deep.mkString("[", ",", "]")) -} \ No newline at end of file +} diff --git a/test/files/run/t0421-old.scala b/test/files/run/t0421-old.scala index dde89bc5421a..28e708a824f3 100644 --- a/test/files/run/t0421-old.scala +++ b/test/files/run/t0421-old.scala @@ -1,7 +1,10 @@ + // ticket #421 @deprecated("Suppress warnings", since="2.11") object Test extends App { + import scala.reflect.ClassManifest + import scala.tools.partest.Util.ArrayDeep def transpose[A: ClassManifest](xss: Array[Array[A]]) = { for (i <- Array.range(0, xss(0).length)) yield diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala index b860a0874f11..10f64e31da95 100644 --- a/test/files/run/t0432.scala +++ b/test/files/run/t0432.scala @@ -12,7 +12,7 @@ object Test { val s = new StringValue("hei") - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { m(s) } } diff --git a/test/files/run/t0528.scala b/test/files/run/t0528.scala index 68a997517300..5a94d87a921b 100644 --- a/test/files/run/t0528.scala +++ b/test/files/run/t0528.scala @@ -1,5 +1,6 @@ - +import scala.tools.partest.Util.ArrayDeep import scala.language.{ existentials } + trait Sequ[A] { def toArray: Array[T forSome {type T <: A}] } diff --git a/test/files/run/t0677-new.scala b/test/files/run/t0677-new.scala index 15c8b4aa194c..e1e2cefc9a47 100644 --- a/test/files/run/t0677-new.scala +++ b/test/files/run/t0677-new.scala @@ -7,4 +7,4 @@ object Test extends App { val x = new X[String] x.a(1)(2) = "hello" assert(x.a(1)(2) == "hello") -} \ No newline at end of file +} diff --git a/test/files/run/t0677-old.scala b/test/files/run/t0677-old.scala index 8d4c3ee06056..90a759c378a5 100644 --- a/test/files/run/t0677-old.scala +++ b/test/files/run/t0677-old.scala @@ -1,7 +1,7 @@ - @deprecated("Suppress warnings", since="2.11") object Test extends App { + import scala.reflect.ClassManifest class X[T: ClassManifest] { val a = Array.ofDim[T](3, 4) } diff --git a/test/files/run/t0911.check b/test/files/run/t0911.check new file mode 100644 index 000000000000..a89412606485 --- /dev/null +++ b/test/files/run/t0911.check @@ -0,0 +1 @@ +warning: 1 deprecation (since 2.13.0); re-run with -deprecation for details diff --git a/test/files/run/t10009.scala b/test/files/run/t10009.scala index 2a318752f11a..63be312ac8d3 100644 --- a/test/files/run/t10009.scala +++ b/test/files/run/t10009.scala @@ -3,7 +3,7 @@ import scala.reflect.runtime.universe._ import scala.tools.reflect.ToolBox object Test { - def test(code: String, log: Boolean = false) { + def test(code: String, log: Boolean = false): Unit = { val tb = currentMirror.mkToolBox() val tree = tb.parse(code) val typed = tb.typecheck(tree) @@ -25,4 +25,4 @@ object Test { test("{ class a { protected val x = 42 }; new a { x } }") // failed test("{ class a { protected[a] val x = 42 }; new a }") // failed } -} \ No newline at end of file +} diff --git a/test/files/run/t10016.check b/test/files/run/t10016.check new file mode 100644 index 000000000000..7457fcc9b22c --- /dev/null +++ b/test/files/run/t10016.check @@ -0,0 +1,8 @@ + +scala> def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? +def existWith(x: List[_] with Int{def xxx: Int}): Nothing + +scala> def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? +def existKeepsAnnot(x: List[Any] @SerialVersionUID(value = 1L) with Int{def xxx: Int}): Nothing + +scala> :quit diff --git a/test/files/run/t10016.scala b/test/files/run/t10016.scala new file mode 100644 index 000000000000..113046527a04 --- /dev/null +++ b/test/files/run/t10016.scala @@ -0,0 +1,11 @@ +import scala.tools.partest.ReplTest + +// check that we don't lose the annotation on the existential type nested in an intersection type +// it's okay that List[_] is represented as List[Any] -- they are equivalent due to variance (existential extrapolation) +// (The above comment should not be construed as an endorsement of rewrapping as a great way to implement a bunch of different type "proxies") +object Test extends ReplTest { + def code = """ + |def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? + |def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? + """.stripMargin +} diff --git a/test/files/run/t10042/Checks_0.scala b/test/files/run/t10042/Checks_0.scala index 4efa3b77af91..e42787c36ff0 100644 --- a/test/files/run/t10042/Checks_0.scala +++ b/test/files/run/t10042/Checks_0.scala @@ -44,4 +44,4 @@ object CheckMacro { new Checks[c.universe.type](c.universe).check(symbolOf[T].asClass) Literal(Constant(())) } -} \ No newline at end of file +} diff --git a/test/files/run/t10042/Test_1.scala b/test/files/run/t10042/Test_1.scala index c6d9f5ae9240..f2afd0011a6b 100644 --- a/test/files/run/t10042/Test_1.scala +++ b/test/files/run/t10042/Test_1.scala @@ -8,4 +8,4 @@ object Test extends App { val checks = new Checks[universe.type](universe) checks.check(symbolOf[Subject_0].asClass) checks.check(symbolOf[Subject_1].asClass) -} \ No newline at end of file +} diff --git a/test/files/run/t10067/Test.scala b/test/files/run/t10067/Test.scala index 0508369e28ca..8e02bf32d139 100644 --- a/test/files/run/t10067/Test.scala +++ b/test/files/run/t10067/Test.scala @@ -1,4 +1,3 @@ -// scalac: -unchecked object Test { def main(args: Array[String]): Unit = { //get inner class as some instance of super type @@ -15,6 +14,7 @@ object Test { //this will fail with java.lang.NoSuchMethodError icObj match { case ic: ocStable.InnerClass => ; + case x => throw new MatchError(x) } } } diff --git a/test/files/run/t10069.scala b/test/files/run/t10069.scala index 4e70b7e814a4..f122f8fb95fe 100644 --- a/test/files/run/t10069.scala +++ b/test/files/run/t10069.scala @@ -25,7 +25,7 @@ object Test { check(foo2(new Array[Int](1))) check(foo3(new Array[String](1))) } - def check(f: => Any) { + def check(f: => Any): Unit = { try {f ; sys.error("no exception thrown") } catch { case Expected => diff --git a/test/files/run/t10075.scala b/test/files/run/t10075.scala index e7564c5c8b65..a56ea1d952fc 100644 --- a/test/files/run/t10075.scala +++ b/test/files/run/t10075.scala @@ -22,7 +22,7 @@ class SerializableBecauseTransient extends Serializable with SerializableActuall var notSerializedR: NotSerializable = new NotSerializable } -// Indirectly check that the @transient annotation on `notSerialized` made it to the underyling field in bytecode. +// Indirectly check that the @transient annotation on `notSerialized` made it to the underlying field in bytecode. // If it doesn't, `writeObject` will fail to serialize the field `notSerialized`, because `NotSerializable` is not serializable object Test { def main(args: Array[String]): Unit = { diff --git a/test/files/run/t10075b.check b/test/files/run/t10075b.check index dc64e95ac7aa..c1801c3c7761 100644 --- a/test/files/run/t10075b.check +++ b/test/files/run/t10075b.check @@ -45,9 +45,9 @@ @RetainedAnnotation() public int TMix.lzyValGetterAnnotation() private int TMix.lzyValGetterAnnotation$lzycompute() @RetainedAnnotation() public int TMix.method() -@RetainedAnnotation() private final int TMix.valFieldAnnotation +@RetainedAnnotation() private int TMix.valFieldAnnotation public int TMix.valFieldAnnotation() - private final int TMix.valGetterAnnotation + private int TMix.valGetterAnnotation @RetainedAnnotation() public int TMix.valGetterAnnotation() @RetainedAnnotation() private int TMix.varFieldAnnotation public int TMix.varFieldAnnotation() diff --git a/test/files/run/t10075b/Test_2.scala b/test/files/run/t10075b/Test_2.scala index 89ba2bd488b8..7577b3444c6a 100644 --- a/test/files/run/t10075b/Test_2.scala +++ b/test/files/run/t10075b/Test_2.scala @@ -53,4 +53,4 @@ object Test extends App { flatMap(cls => cls.getDeclaredFields ++ cls.getDeclaredMethods)). sortBy(x => (x.getDeclaringClass.getName, x.getName, x.toString)). foreach(x => println(x.getAnnotations.toList.mkString(" ") + " " + x)) -} \ No newline at end of file +} diff --git a/test/files/run/t10094.check b/test/files/run/t10094.check new file mode 100644 index 000000000000..45b983be36b7 --- /dev/null +++ b/test/files/run/t10094.check @@ -0,0 +1 @@ +hi diff --git a/test/files/run/t10094.scala b/test/files/run/t10094.scala new file mode 100644 index 000000000000..74f507e447d8 --- /dev/null +++ b/test/files/run/t10094.scala @@ -0,0 +1,11 @@ +trait T[@specialized(Int) S] { + def initialValue: S + var value: S = initialValue +} + +final class C[@specialized(Int) S](val initialValue: S) extends T[S] + +object Test { + def main(args: Array[String]): Unit = + println(new C("hi").initialValue) +} diff --git a/test/files/run/t10097.check b/test/files/run/t10097.check deleted file mode 100644 index d1938a94e3e1..000000000000 --- a/test/files/run/t10097.check +++ /dev/null @@ -1,3 +0,0 @@ -t10097.scala:3: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)' -case class C(implicit c: Int) - ^ diff --git a/test/files/run/t10097.scala b/test/files/run/t10097.scala deleted file mode 100644 index ad900dee4924..000000000000 --- a/test/files/run/t10097.scala +++ /dev/null @@ -1,7 +0,0 @@ -// scalac: -deprecation - -case class C(implicit c: Int) - -object Test extends App { - assert(C()(42).productArity == 0) -} diff --git a/test/files/run/t10171/Test.scala b/test/files/run/t10171/Test.scala index 37a2cfc67f92..8185ac99dc77 100644 --- a/test/files/run/t10171/Test.scala +++ b/test/files/run/t10171/Test.scala @@ -4,11 +4,13 @@ import java.io.File object Test extends StoreReporterDirectTest { def code = ??? - def compileCode(code: String) = { + override def extraSettings = { val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + s"-cp $classpath" } + def compileCode(code: String) = compileString(newCompiler())(code) + def library = """ package a { package b { diff --git a/test/files/run/t10203.check b/test/files/run/t10203.check new file mode 100644 index 000000000000..c97fe36a70b7 --- /dev/null +++ b/test/files/run/t10203.check @@ -0,0 +1,14 @@ +[[syntax trees at end of typer]] // newSource1.scala +[0:88]package [0:0] { + [0:88]object X extends [9:88][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); + [9]() + }; + [17:24][17:18][17:18]D.selectDynamic[[17]Nothing](<19:24>"aaaaa"); + [31:42][31:42][31:32]D.selectDynamic[[39:42]](<33:38>"sssss"); + [50:60][50:57][50:51][50:51]D.applyDynamic[[50]Int](<52:57>"ddddd")([58:59]1); + [67:82][67:78][67:78][67:68]D.applyDynamic[[75:78]](<69:74>"fffff")([80:81]1) + } +} + diff --git a/test/files/run/t10203.scala b/test/files/run/t10203.scala new file mode 100644 index 000000000000..c4d02f3a88b2 --- /dev/null +++ b/test/files/run/t10203.scala @@ -0,0 +1,24 @@ +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + override def extraSettings: String = + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" + + override def code = """ + object X { + D.aaaaa + D.sssss[Int] + D.ddddd(1) + D.fffff[Int](1) + } + """.trim + + override def show(): Unit = compile() +} + +import language.dynamics +object D extends Dynamic { + def selectDynamic[T](nme: String): String = ??? + def applyDynamic[T](name: String)(value: T) = ??? +} diff --git a/test/files/run/t10231/A_1.java b/test/files/run/t10231/A_1.java index 5cc2ed36061e..1f37b878f297 100644 --- a/test/files/run/t10231/A_1.java +++ b/test/files/run/t10231/A_1.java @@ -1,6 +1,4 @@ -/* - * javac: -parameters - */ +//> using javacOpt -parameters public class A_1 { public class Inner { public int x; diff --git a/test/files/run/t10240.check b/test/files/run/t10240.check new file mode 100644 index 000000000000..68646df19745 --- /dev/null +++ b/test/files/run/t10240.check @@ -0,0 +1,33 @@ + +List apply 1 +[0:12][0:10]List.apply([11:12]1) +List apply 1 + +List apply[Int] 2 +[0:17][0:15][0:10]List.apply[[11:14]Int]([16:17]2) +List apply[Int] 2 +List apply[Int] + +List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) +[0:72][0:63][0:52]List.apply[List[Int]](List(1), List(2)).mapConserve[[53:62][53:57]List[[58:61]Any]]([65:71](([65:66]x) => [70:71]x)) +List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x) +List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] +List apply[List[Int]] + +1 ->[Int] 2 +[0:11][0:9][0:4]1.$minus$greater[[5:8]Int]([10:11]2) +1 ->[Int] 2 +1 ->[Int] + +new A() op [Int, String ] 42 +[0:36][0:32][0:10]new A().op[[13:16]Int, [20:26]String]([34:36]42) +new A() op [Int, String ] 42 +new A() op [Int, String ] + +42 ::[Int] Nil +[0:14]{ + [0:2]final val rassoc$1 = [0:2]42; + [3:14]<3:14><3:14>Nil.$colon$colon[[6:9]Int]([0]rassoc$1) +} +42 ::[Int] Nil +::[Int] Nil diff --git a/test/files/run/t10240.scala b/test/files/run/t10240.scala new file mode 100644 index 000000000000..e4d28baae2ba --- /dev/null +++ b/test/files/run/t10240.scala @@ -0,0 +1,35 @@ +object Test extends App { + import scala.reflect.internal.util.StringContextStripMarginOps + import scala.reflect.runtime._ + import scala.reflect.runtime.universe._ + import scala.tools.reflect.ToolBox + + val mirror = universe.runtimeMirror(universe.getClass.getClassLoader) + val toolbox = mirror.mkToolBox() + def showParsed(code: String) = { + val parsed = toolbox.parse(code) + def codeOf(pos: Position) = code.substring(pos.start, pos.end) + val recovered = codeOf(parsed.pos) + val pieces = parsed.collect { + case tree @ TypeApply(fun, args) => codeOf(tree.pos) + } + val display = + if (pieces.isEmpty) recovered + else + sm"""|$recovered + |${pieces.mkString("\n")}""" + println { + sm"""| + |$code + |${show(parsed, printPositions = true)} + |$display""" + } + } + showParsed("List apply 1") + showParsed("List apply[Int] 2") + showParsed("List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x)") + showParsed("1 ->[Int] 2") + //def op[A, B](i: Int): Int = 2*i + showParsed("new A() op [Int, String ] 42") + showParsed("42 ::[Int] Nil") +} diff --git a/test/files/run/t10277.check b/test/files/run/t10277.check new file mode 100644 index 000000000000..30952fec3ee9 --- /dev/null +++ b/test/files/run/t10277.check @@ -0,0 +1,4 @@ +put$mcJ$sp +fillRange$mcJ$sp +fillRange$mcJ$sp$ +fillRange$mcJ$sp diff --git a/test/files/run/t10277.scala b/test/files/run/t10277.scala new file mode 100644 index 000000000000..2a7262cad931 --- /dev/null +++ b/test/files/run/t10277.scala @@ -0,0 +1,28 @@ +trait Column {} + +trait TypedColumn[@specialized(Long, Double) T] extends Column { + def put(idx: Int, value: T): Unit + + def fillRange(start: Int, len: Int, value: T): Unit = { + var idx = start + val end = start + len + while (idx < end) { + put(idx, value) + idx += 1 + } + } +} + +final class LongColumn extends TypedColumn[Long] { + override def put(idx: Int, value: Long): Unit = { + val frames = Thread.currentThread().getStackTrace.toList.drop(1).takeWhile(_.getMethodName != "main") + println(frames.map(_.getMethodName).mkString("\n")) + } +} + +object Test { + def main(args: Array[String]): Unit = { + val c = new LongColumn + c.fillRange(0, 1, 10L) + } +} diff --git a/test/files/run/t10277b.check b/test/files/run/t10277b.check new file mode 100644 index 000000000000..8ef8f1daed7f --- /dev/null +++ b/test/files/run/t10277b.check @@ -0,0 +1,4 @@ +g$mcI$sp +g$mcI$sp$ +g$mcI$sp +f$mcI$sp diff --git a/test/files/run/t10277b.scala b/test/files/run/t10277b.scala new file mode 100644 index 000000000000..c37bb81b040f --- /dev/null +++ b/test/files/run/t10277b.scala @@ -0,0 +1,20 @@ +trait A[@specialized(Int) T] { + def f(x: T): Unit +} + +trait B[@specialized(Int) T] { + def g(x: T): Unit = { + val frames = Thread.currentThread().getStackTrace.toList.drop(1).takeWhile(_.getMethodName != "main") + println(frames.map(_.getMethodName).mkString("\n")) + } +} + +class C[@specialized(Int) T] extends A[T] with B[T] { + def f(x: T): Unit = g(x) +} + +object Test { + def main(args: Array[String]): Unit = { + new C[Int].f(0) + } +} diff --git a/test/files/run/t10283.scala b/test/files/run/t10283.scala index b68f5ac0dd17..8a432fdec202 100644 --- a/test/files/run/t10283.scala +++ b/test/files/run/t10283.scala @@ -1,4 +1,5 @@ -// scalac: -Xsource:2.13 +//> using options -Xsource:2.13 +// trait OpacityTypes { type T def orderingT: Ordering[T] diff --git a/test/files/run/t10284.check b/test/files/run/t10284.check index a17790161c8a..039e5431960a 100644 --- a/test/files/run/t10284.check +++ b/test/files/run/t10284.check @@ -1 +1 @@ -res0: Int = 42 +val res0: Int = 42 diff --git a/test/files/run/t10284.scala b/test/files/run/t10284.scala index 7498de434eba..03ca072d89f2 100644 --- a/test/files/run/t10284.scala +++ b/test/files/run/t10284.scala @@ -2,12 +2,10 @@ // run repl -i script -e expression // The Runner normally requires -howtorun:repl to pass -e to REPL. -import scala.tools.partest.{ReplTest, Welcoming} +import scala.tools.partest.ReplTest import scala.tools.nsc.{GenericRunnerSettings, Settings} -// Welcoming just fakes ReplTest into not stripping leading lines of output -// since REPL doesn't emit a header for -e -object Test extends ReplTest with Welcoming { +object Test extends ReplTest { def code = "" def script = testPath.changeExtension("script") diff --git a/test/files/run/t10344.check b/test/files/run/t10344.check new file mode 100644 index 000000000000..8ec903059973 --- /dev/null +++ b/test/files/run/t10344.check @@ -0,0 +1,14 @@ +[[syntax trees at end of typer]] // newSource1.scala +package { + object t10344 extends scala.AnyRef { + def (): t10344.type = { + t10344.super.(); + () + }; + def unwrap[F[_] >: [_]Nothing <: [_]Any](f: F[Unit] => Unit): Unit = (); + private[this] val f: (=> Unit) => Unit = ((x$1: => Unit) => ()); + def f: (=> Unit) => Unit = t10344.this.f; + t10344.this.unwrap[](t10344.this.f) + } +} + diff --git a/test/files/run/t10344.scala b/test/files/run/t10344.scala new file mode 100644 index 000000000000..fbbc8a871c88 --- /dev/null +++ b/test/files/run/t10344.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + override def extraSettings: String = + s"-usejavacp -Vprint:typer -Ystop-after:typer" + + override def code = """ +object t10344 { + def unwrap[F[_]](f: F[Unit] => Unit): Unit = () + val f: (=> Unit) => Unit = { _ => () } + unwrap(f) +} + """ + + override def show(): Unit = compile() +} diff --git a/test/files/run/t10363.scala b/test/files/run/t10363.scala new file mode 100644 index 000000000000..7d1462c9ced8 --- /dev/null +++ b/test/files/run/t10363.scala @@ -0,0 +1,31 @@ +trait Foo[A, B] +object Foo { + type Bar[A] = Foo[A, _] +} + +trait Base[M[_]] { + def method(in: M[_]): Unit +} + +class Concrete extends Base[Foo.Bar] { + def method(in: Foo.Bar[_]): Unit = {} +} + +trait Template[M[_]] { + def toBeImplemented: Base[M] + def mark[A]: M[A] + + def method2(): Unit = { + toBeImplemented.method(mark[Nothing]) + } +} + +class Impl extends Template[Foo.Bar] { + def toBeImplemented: Base[Foo.Bar] = new Concrete + def mark[A]: Foo.Bar[A] = new Foo[A, Nothing] {} +} + +object Test { + def main(args: Array[String]): Unit = + (new Impl).method2() +} diff --git a/test/files/run/t10439.scala b/test/files/run/t10439.scala index 998d367ca0e8..a96a79095a04 100644 --- a/test/files/run/t10439.scala +++ b/test/files/run/t10439.scala @@ -1,4 +1,5 @@ -// scalac: -Xcheckinit +//> using options -Xcheckinit +// object Test { private var s: String = _ diff --git a/test/files/run/t10450/A.java b/test/files/run/t10450/A.java index 74b08ea117a4..efef3d1a53fb 100644 --- a/test/files/run/t10450/A.java +++ b/test/files/run/t10450/A.java @@ -1,6 +1,4 @@ -/* - * filter: unchecked - */ +//> using filter unchecked package a; class B> { @@ -12,8 +10,8 @@ public T setConnectTimeout(int connectTimeout) { return (T) this; } - public T setFailedAttempts(int slaveFailedAttempts) { - this.failedAttempts = slaveFailedAttempts; + public T setFailedAttempts(int failedAttempts) { + this.failedAttempts = failedAttempts; return (T) this; } } diff --git a/test/files/run/t10471.scala b/test/files/run/t10471.scala index df98544f651c..82ea43418e58 100644 --- a/test/files/run/t10471.scala +++ b/test/files/run/t10471.scala @@ -1,7 +1,7 @@ import scala.tools.partest._ object Test extends StoreReporterDirectTest { - override def extraSettings: String = "-usejavacp -Xprint:typer -Ystop-after:typer" + override def extraSettings: String = "-usejavacp -Vprint:typer -Ystop-after:typer" override def code = """@scala.annotation.meta.field class blort extends scala.annotation.StaticAnnotation diff --git a/test/files/run/t10477.scala b/test/files/run/t10477.scala new file mode 100644 index 000000000000..68d20d88e033 --- /dev/null +++ b/test/files/run/t10477.scala @@ -0,0 +1,31 @@ +abstract class BasicBackend { + type F + val f: F +} + +class DistributedBackend extends BasicBackend { + type F = FImpl + val f: F = new FImpl + class FImpl +} + +trait BasicProfile { + type Backend <: BasicBackend + val backend: Backend + trait SimpleQL { + val f: backend.F = backend.f + } +} + +trait DistributedProfile extends BasicProfile { _: DistributedDriver => + type Backend = DistributedBackend + val backend: Backend = new DistributedBackend + class SimpleQlImpl extends SimpleQL + new SimpleQlImpl +} + +class DistributedDriver extends DistributedProfile + +object Test extends App { + new DistributedDriver() +} diff --git a/test/files/run/t10513.scala b/test/files/run/t10513.scala index b4788e04b2cc..3e8f3b427696 100644 --- a/test/files/run/t10513.scala +++ b/test/files/run/t10513.scala @@ -13,12 +13,12 @@ import ExecutionContext.Implicits.global */ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val arrSz = 50 * 10000 val numFutures = 4000 val rng = new Random() - val longStandingPromise = Promise[Nothing] + val longStandingPromise = Promise[Nothing]() val futures = List.tabulate(numFutures) { i => val arr = new Array[Int](arrSz) diff --git a/test/files/run/t10522.scala b/test/files/run/t10522.scala index 7e801a580834..218b293c2018 100644 --- a/test/files/run/t10522.scala +++ b/test/files/run/t10522.scala @@ -35,4 +35,4 @@ object Test { println(fs2()) println(fs2()) } -} \ No newline at end of file +} diff --git a/test/files/run/t10539.scala b/test/files/run/t10539.scala new file mode 100644 index 000000000000..fe120eee956e --- /dev/null +++ b/test/files/run/t10539.scala @@ -0,0 +1,12 @@ +//> using options -Xdev +// +class A { + def ==(a: A) = "LOL" +} +case class B(a: A) + +object Test extends App { + B(new A) == B(new A) +} + +// was: java.lang.ClassCastException: class java.lang.String cannot be cast to class java.lang.Boolean diff --git a/test/files/run/t10545.scala b/test/files/run/t10545.scala new file mode 100644 index 000000000000..a0c6460a19da --- /dev/null +++ b/test/files/run/t10545.scala @@ -0,0 +1,14 @@ + +class D[T] + +class C[F[_]](val i: Int) +object C { + def apply[F[_]](implicit cf: C[F]): Int = cf.i + + implicit def c0[F[_]]: C[F] = new C[F](0) + implicit def c1: C[D] = new C[D](1) +} + +object Test extends App { + assert(C[D] == 1) // Works in Dotty ... +} diff --git a/test/files/run/t10551.scala b/test/files/run/t10551.scala index 4ae52c6e207b..84412d725275 100644 --- a/test/files/run/t10551.scala +++ b/test/files/run/t10551.scala @@ -11,7 +11,7 @@ object Test extends App { def check[A](cls: Class[A])(implicit tag: reflect.ClassTag[A]): Unit = { val suffix = if (cls != tag.runtimeClass) " != " + tag.runtimeClass else "" - println(cls + suffix) + println(cls.toString + suffix) } check(classOf[Id[Int]]) @@ -50,4 +50,4 @@ object Test extends App { check(classOf[Bixt[_]]) check(classOf[Bixty]) -} \ No newline at end of file +} diff --git a/test/files/run/t10552/Test_2.scala b/test/files/run/t10552/Test_2.scala index 189719afa0a7..333dd2c45845 100644 --- a/test/files/run/t10552/Test_2.scala +++ b/test/files/run/t10552/Test_2.scala @@ -1,14 +1,10 @@ import scala.tools.partest._ +import scala.tools.testkit.AssertUtil.assertThrows object Test extends DirectTest { override def extraSettings: String = s"-usejavacp -cp ${testOutput.path} -Ystop-after:typer" def code = "class C { A.f }" - def show(): Unit = try { - compile() - throw new Error("Expected OutOfMemoryError") - } catch { - case e: OutOfMemoryError if e.getMessage == "OOM" => - } + def show(): Unit = assertThrows[OutOfMemoryError](compile(), _ == "OOM") } diff --git a/test/files/run/t10611.scala b/test/files/run/t10611.scala index 869e8f58cada..a8ecdb3072aa 100644 --- a/test/files/run/t10611.scala +++ b/test/files/run/t10611.scala @@ -4,4 +4,4 @@ object Test extends App { /* crashes at runtime with NoSuchMethodError */ this.##.## -} \ No newline at end of file +} diff --git a/test/files/run/t10641.scala b/test/files/run/t10641.scala new file mode 100644 index 000000000000..102a9dbbc725 --- /dev/null +++ b/test/files/run/t10641.scala @@ -0,0 +1,49 @@ + +import scala.tools.partest.DirectTest + +import java.net.URI +import java.nio.charset.StandardCharsets.UTF_8 +import javax.tools._ + +import scala.jdk.CollectionConverters._ + +object Test extends DirectTest { + def jcode = + """ +public class J_0 { + public int _() { return 42; } + public String funkyJavaNameFactory() { return "funk"; } + public int underscore() { return _(); } +} + """.trim() + def code = + """ +class S_1 { + val j = new J_0 + import j.{`_` => u, funkyJavaNameFactory => f} // was: Wildcard import must be in last position + val x = u + def y = f +} + """.trim() + + override def extraSettings = s"-usejavacp -classpath ${testOutput.path}" + + def show() = + // (use of '_' as an identifier might not be supported in releases after Java SE 8) + testUnderJavaAtLeast("9") { + () + } otherwise { + //val jfile = testOutput.jfile.toPath.resolve("J_0.java").tap(Files.writeString(_, jcode, UTF_8) + val javac = ToolProvider.getSystemJavaCompiler() + val fm = javac.getStandardFileManager(null, null, UTF_8) // null diagnostics, locale + val opts = List("-d", testOutput.path, "-nowarn").asJava + val uri = URI.create("string:///J_0.java") + val kind = JavaFileObject.Kind.SOURCE + val unit = new SimpleJavaFileObject(uri, kind) { override def getCharContent(ignore: Boolean) = jcode } + val units = List(unit).asJava + val task = javac.getTask(null, fm, null, opts, null, units) // null out, diagnostics, classes + + assert(task.call()) + assert(compile()) + } +} diff --git a/test/files/run/t10646.scala b/test/files/run/t10646.scala index fd63afe4b382..4746a343b1f5 100644 --- a/test/files/run/t10646.scala +++ b/test/files/run/t10646.scala @@ -7,7 +7,7 @@ object Test extends App { it.head it.last - val that = Array(A("baz"), A('fff)) + val that = Array(A("baz"), A(Symbol("fff"))) that.head that.last } diff --git a/test/files/run/t10650.check b/test/files/run/t10650.check deleted file mode 100644 index f011cd849116..000000000000 --- a/test/files/run/t10650.check +++ /dev/null @@ -1,5 +0,0 @@ -name: a; isNamePresent: true; isSynthetic: false -name: _; isNamePresent: true; isSynthetic: false -name: ***; isNamePresent: true; isSynthetic: false -name: unary_!; isNamePresent: true; isSynthetic: false -name: ABC; isNamePresent: true; isSynthetic: false diff --git a/test/files/run/t10650/Test.scala b/test/files/run/t10650/Test.scala deleted file mode 100644 index a32e8d4df5a9..000000000000 --- a/test/files/run/t10650/Test.scala +++ /dev/null @@ -1,18 +0,0 @@ -class Foo -object Foo { - def bar(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) = null -} - -object Test extends App { - val methodParams = classOf[Foo].getDeclaredMethods.head.getParameters - - def printParams(params: Array[java.lang.reflect.Parameter]) = { - params.foreach { param => - println(s"name: ${param.getName}; isNamePresent: ${param.isNamePresent}; isSynthetic: ${param.isSynthetic}") - } - } - - printParams(methodParams) - - Foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0) -} diff --git a/test/files/run/t10655.check b/test/files/run/t10655.check new file mode 100644 index 000000000000..fa5d8614183d --- /dev/null +++ b/test/files/run/t10655.check @@ -0,0 +1,26 @@ + +scala> :paste -java <| EOF +// Entering paste mode (EOF to finish) + + |package p; + |public class C { + | public int c() { + | return 42; + | } + | public String toString() { + | return "hi, C"; + | } + |} +EOF +// Exiting paste mode... now compiling with javac. + +scala> new p.C +val res0: p.C = hi, C + +scala> class D extends p.C +class D + +scala> new D().c() +val res1: Int = 42 + +scala> :quit diff --git a/test/files/run/t10655.scala b/test/files/run/t10655.scala new file mode 100644 index 000000000000..f9905038aa16 --- /dev/null +++ b/test/files/run/t10655.scala @@ -0,0 +1,18 @@ + +object Test extends scala.tools.partest.ReplTest { + def code = """:paste -java <| EOF + |package p; + |public class C { + | public int c() { + | return 42; + | } + | public String toString() { + | return "hi, C"; + | } + |} +EOF +new p.C +class D extends p.C +new D().c() + """ +} diff --git a/test/files/run/t10692.scala b/test/files/run/t10692.scala index ae91a14adf0b..de41e5d562d9 100644 --- a/test/files/run/t10692.scala +++ b/test/files/run/t10692.scala @@ -1,4 +1,5 @@ -// scalac: -Xcheckinit +//> using options -Xcheckinit +// trait T { private var s: String = _ def getS: String = { diff --git a/test/files/run/t10699/A_1.java b/test/files/run/t10699/A_1.java index 7e16862e1ec2..4fa02d8d7371 100644 --- a/test/files/run/t10699/A_1.java +++ b/test/files/run/t10699/A_1.java @@ -1,7 +1,5 @@ -/* - * javac: -parameters - */ +//> using javacOpt -parameters public class A_1 { public T identity_inst(T t, T other) { return t; } public static T identity_static(T t, T other) { return t; } -} \ No newline at end of file +} diff --git a/test/files/run/t10699/Test_2.scala b/test/files/run/t10699/Test_2.scala index 842b30d41c13..37b2ebe7e5e9 100644 --- a/test/files/run/t10699/Test_2.scala +++ b/test/files/run/t10699/Test_2.scala @@ -4,4 +4,4 @@ object Test extends App { val other = "other" assert(a_1.identity_inst(other = other, t = t) == t) assert(A_1.identity_static(other = other, t = t) == t) -} \ No newline at end of file +} diff --git a/test/files/run/t107.scala b/test/files/run/t107.scala index ab1b289882a6..2cd4c98208e4 100644 --- a/test/files/run/t107.scala +++ b/test/files/run/t107.scala @@ -5,4 +5,4 @@ object Test { hash += bytes(0) Console.println(hash) } -} \ No newline at end of file +} diff --git a/test/files/run/t1074.check b/test/files/run/t1074.check index ccf1cb1551cc..9e12a97e44d0 100644 --- a/test/files/run/t1074.check +++ b/test/files/run/t1074.check @@ -1,3 +1,3 @@ -q0 = Set(kl, jk, cd, fg, ef, gh, a, de, hj, b, lm, mn) -q1 = Set() 0 -q2 = Set() 0 +q0 = List(a, b, cd, de, ef, fg, gh, hj, jk, kl, lm, mn) +q1 = HashSet() 0 +q2 = HashSet() 0 diff --git a/test/files/run/t1074.scala b/test/files/run/t1074.scala index a95f9eedbc9c..21fc45df8bdd 100644 --- a/test/files/run/t1074.scala +++ b/test/files/run/t1074.scala @@ -3,11 +3,10 @@ object Test { def main(args : Array[String]) : Unit = { var words = "a" :: "b" :: "cd" :: "de" :: "fg" :: "ef" :: "gh" :: "jk" :: "hj" :: "kl" :: "lm" :: "mn" :: Nil - val q0:Set[String] = - new HashSet[String]() ++ words + val q0:Set[String] = HashSet[String]() ++ words val q1 = q0.filter(w => false) val q2 = q1.filter(w => false) - Console.println("q0 = " + q0) + Console.println("q0 = " + q0.toList.sorted) Console.println("q1 = " + q1+" "+q1.size) Console.println("q2 = " + q2+" "+q2.size) } diff --git a/test/files/run/t10751.check b/test/files/run/t10751.check new file mode 100644 index 000000000000..0142b6896a14 --- /dev/null +++ b/test/files/run/t10751.check @@ -0,0 +1,37 @@ +[[syntax trees at end of typer]] // newSource1.scala +[0:201]package [0:0] { + [0:201]object Test extends [12:201][12]scala.AnyRef { + [12]def (): [12]Test.type = [12]{ + [12][12][12]Test.super.(); + [12]() + }; + [20:43]private[this] val n: [38]Int = [42:43]1; + [51:60]{ + [53:60]<53:60><53:60>C.foo_:[[53]Nothing]([51]1) + }; + [67:81]{ + [69:81]<69:81><69:81>C.foo_:[[75:78]]([67]1) + }; + [89:98]{ + [89:98]<91:98><91:98>C.foo_:[[91]Nothing]([89:90][89]Test.this.n) + }; + [105:119]{ + [105:119]<107:119><107:119>C.foo_:[[113:116]]([105:106][105]Test.this.n) + }; + [127:136]{ + [129:136]<129:136><129:136>C.bar_:[[129]Nothing]([127]1) + }; + [143:157]{ + [145:157]<145:157><145:157>C.bar_:[[151:154]]([143]1) + }; + [165:174]{ + [165:166]final val rassoc$7: [165]Int = [165:166][165]Test.this.n; + [167:174]<167:174><167:174>C.bar_:[[167]Nothing]([165]rassoc$7) + }; + [181:195]{ + [181:182]final val rassoc$8: [181]Int = [181:182][181]Test.this.n; + [183:195]<183:195><183:195>C.bar_:[[189:192]]([181]rassoc$8) + } + } +} + diff --git a/test/files/run/t10751.scala b/test/files/run/t10751.scala new file mode 100644 index 000000000000..1c019b4e78a9 --- /dev/null +++ b/test/files/run/t10751.scala @@ -0,0 +1,33 @@ +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + override def extraSettings: String = + s"-usejavacp -Vprint-pos -Vprint:typer -Ystop-after:typer -cp ${testOutput.path}" + + override def code = """ + object Test { + private[this] val n = 1 + + 1 foo_: C + 1 foo_:[Any] C + + n foo_: C + n foo_:[Any] C + + 1 bar_: C + 1 bar_:[Any] C + + n bar_: C + n bar_:[Any] C + } + """.trim + + override def show(): Unit = compile() +} + +class C { + def foo_:[Dummy](i: => Int): Int = i + def bar_:[Dummy](i: Int): Int = i +} +object C extends C diff --git a/test/files/run/t10768.check b/test/files/run/t10768.check new file mode 100644 index 000000000000..7e264571dd54 --- /dev/null +++ b/test/files/run/t10768.check @@ -0,0 +1,11 @@ + +scala> type Id[T] = T +type Id + +scala> def foo(x: Int): Id[x.type] = x +def foo(x: Int): Id[x.type] + +scala> foo(1) +val res0: Id[Int(1)] = 1 + +scala> :quit diff --git a/test/files/run/t10768.scala b/test/files/run/t10768.scala new file mode 100644 index 000000000000..2ac4bd87be83 --- /dev/null +++ b/test/files/run/t10768.scala @@ -0,0 +1,10 @@ +import scala.tools.nsc.interpreter._ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ + |type Id[T] = T + |def foo(x: Int): Id[x.type] = x + |foo(1) + |""".stripMargin +} diff --git a/test/files/run/t10783.scala b/test/files/run/t10783.scala index 160cbb6867bf..010963de8f74 100644 --- a/test/files/run/t10783.scala +++ b/test/files/run/t10783.scala @@ -28,4 +28,4 @@ object Test { assert(X.baz == 43) assert(X.bip == 43) } -} \ No newline at end of file +} diff --git a/test/files/run/t10788.check b/test/files/run/t10788.check new file mode 100644 index 000000000000..a08c1d171e1b --- /dev/null +++ b/test/files/run/t10788.check @@ -0,0 +1 @@ +effect diff --git a/test/files/run/t10788.scala b/test/files/run/t10788.scala new file mode 100644 index 000000000000..1816ea8c6a9d --- /dev/null +++ b/test/files/run/t10788.scala @@ -0,0 +1,16 @@ +object Test { + class Box[T](t: T) { + def foo: T = { + println("effect") + t + } + } + + object Box { + def apply(x: String): Box[x.type] = new Box[x.type](x) + } + + def main(args: Array[String]): Unit = { + val bar = Box("foo").foo + } +} diff --git a/test/files/run/t10788b.check b/test/files/run/t10788b.check new file mode 100644 index 000000000000..37fd1b0c31a1 --- /dev/null +++ b/test/files/run/t10788b.check @@ -0,0 +1,9 @@ +effect +effect +effect +oh boy +ohai +oh boy +ohai +boo +effect42 diff --git a/test/files/run/t10788b.scala b/test/files/run/t10788b.scala new file mode 100644 index 000000000000..d690fb549eb9 --- /dev/null +++ b/test/files/run/t10788b.scala @@ -0,0 +1,55 @@ +object Test { + abstract class Box { + type T + val t: T + def foo: T = { + println("effect") + t + } + def fooEff(): T = { + println("effect") + t + } + + def fooEffPoly[U >: T <: T](): U = { + println("effect") + t + } + + } + + object Box { + def apply(x: String): Box { type T = x.type } = new Box { + type T = x.type + val t = x + } + } + + class BoxParam[T](val t: T) { + println("ohai") + def foo: T = { + println("boo") + t + } + } + + object BoxParam { + def apply(x: String): BoxParam[x.type] = { println("oh boy"); new BoxParam[x.type](x) } + } + + + class C { + println("effect42") + final val const = 42 + } + + def main(args: Array[String]): Unit = { + val bar = Box("foo").foo + Box("foo").fooEff() + Box("foo").fooEffPoly() + BoxParam("foo").t + BoxParam("foo").foo + + val x = new C().const + } +} diff --git a/test/files/run/t10819.scala b/test/files/run/t10819.scala new file mode 100644 index 000000000000..fa6fd60909c4 --- /dev/null +++ b/test/files/run/t10819.scala @@ -0,0 +1,23 @@ +import scala.tools.partest._ + +object Test extends StoreReporterDirectTest { + override def extraSettings: String = "-usejavacp -Vprint:typer -Ystop-after:typer" + + override def code = + """class C { + | def id[T <: AnyVal](x: T): T = x + | id(if (1 == 1) 1 else 2) // should infer id[Int], not id[AnyVal] + |} + | + """.stripMargin + + def show(): Unit = { + val baos = new java.io.ByteArrayOutputStream() + Console.withOut(baos)(Console.withErr(baos)(compile())) + val out = baos.toString("UTF-8") + + val inferredType = out.linesIterator.filter(_.contains("C.this.id[")).map(_.trim).toList + assert(inferredType.length == 1) + assert(inferredType.forall(_.startsWith("C.this.id[Int]")), inferredType) + } +} diff --git a/test/files/run/t10853.scala b/test/files/run/t10853.scala new file mode 100644 index 000000000000..1870e608c59f --- /dev/null +++ b/test/files/run/t10853.scala @@ -0,0 +1,12 @@ +trait F[T1, R] { def apply(funArg: T1): R } + +trait SetOps[A, +C] extends F[A, Boolean] { final def apply(setEl: A): Boolean = false } + +class AbstractSet[A] extends SetOps[A, AbstractSet[A]] +class AbstractSet2[A] extends AbstractSet[A] with SetOps[A, AbstractSet2[A]] + +object Test { + def main(args: Array[String]): Unit = { + new AbstractSet2[String] + } +} diff --git a/test/files/run/t10943.check b/test/files/run/t10943.check new file mode 100644 index 000000000000..8cc79d76a587 --- /dev/null +++ b/test/files/run/t10943.check @@ -0,0 +1,21 @@ + +scala> class C extends Dynamic + ^ + error: extension of type scala.Dynamic needs to be enabled + by making the implicit value scala.language.dynamics visible. + This can be achieved by adding the import clause 'import scala.language.dynamics' + or by setting the compiler option -language:dynamics. + See the Scaladoc for value scala.language.dynamics for a discussion + why the feature needs to be explicitly enabled. + +scala> class C extends Dynamic + ^ + error: extension of type scala.Dynamic needs to be enabled + by making the implicit value scala.language.dynamics visible. + +scala> class C extends Dynamic + ^ + error: extension of type scala.Dynamic needs to be enabled + by making the implicit value scala.language.dynamics visible. + +scala> :quit diff --git a/test/files/run/t10943.scala b/test/files/run/t10943.scala new file mode 100644 index 000000000000..c0695f171cd1 --- /dev/null +++ b/test/files/run/t10943.scala @@ -0,0 +1,10 @@ + +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ + |class C extends Dynamic + |class C extends Dynamic + |class C extends Dynamic + """.stripMargin +} diff --git a/test/files/run/t10956.check b/test/files/run/t10956.check index 1bec9ae8f2ae..993bd6983c24 100644 --- a/test/files/run/t10956.check +++ b/test/files/run/t10956.check @@ -1,15 +1,10 @@ -scala> $intp.isettings.maxPrintString = 0 -$intp.isettings.maxPrintString: Int = 0 - scala> :paste < EOF // Entering paste mode (EOF to finish) import java.awt.AWTError,java.awt.Dialog,java.awt.KeyEventDispatcher,java.awt.Robot,java.awt.AWTEvent,java.awt.Dimension,java.awt.KeyEventPostProcessor,java.awt.ScrollPane,java.awt.AWTEventMulticaster,java.awt.DisplayMode,java.awt.KeyboardFocusManager,java.awt.ScrollPaneAdjustable,java.awt.AWTException,java.awt.Event,java.awt.Label,java.awt.Scrollbar,java.awt.AWTKeyStroke,java.awt.EventQueue,java.awt.LayoutManager,java.awt.SecondaryLoop,java.awt.AWTPermission,java.awt.FileDialog,java.awt.LayoutManager2,java.awt.Shape,java.awt.ActiveEvent,java.awt.FlowLayout,java.awt.LinearGradientPaint,java.awt.SplashScreen,java.awt.Adjustable,java.awt.FocusTraversalPolicy,java.awt.List,java.awt.Stroke,java.awt.AlphaComposite,java.awt.Font,java.awt.MediaTracker,java.awt.SystemColor,java.awt.BasicStroke,java.awt.FontFormatException,java.awt.Menu,java.awt.SystemTray,java.awt.BorderLayout,java.awt.FontMetrics,java.awt.MenuBar,java.awt.TextArea,java.awt.BufferCapabilities,java.awt.Frame,java.awt.MenuComponent,java.awt.TextComponent,java.awt.Button,java.awt.GradientPaint,java.awt.MenuContainer,java.awt.TextField,java.awt.Canvas,java.awt.Graphics,java.awt.MenuItem,java.awt.TexturePaint,java.awt.CardLayout,java.awt.Graphics2D,java.awt.MenuShortcut,java.awt.Toolkit,java.awt.Checkbox,java.awt.GraphicsConfigTemplate,java.awt.MouseInfo,java.awt.Transparency,java.awt.CheckboxGroup,java.awt.GraphicsConfiguration,java.awt.MultipleGradientPaint,java.awt.TrayIcon,java.awt.CheckboxMenuItem,java.awt.GraphicsDevice,java.awt.PageAttributes,java.awt.Window,java.awt.Choice,java.awt.GraphicsEnvironment,java.awt.Paint,java.awt.color,java.awt.Color,java.awt.GridBagConstraints,java.awt.PaintContext,java.awt.datatransfer,java.awt.Component,java.awt.GridBagLayout,java.awt.Panel,java.awt.dnd,java.awt.ComponentOrientation,java.awt.GridBagLayoutInfo,java.awt.Point,java.awt.event,java.awt.Composite,java.awt.GridLayout,java.awt.PointerInfo,java.awt.font,java.awt.CompositeContext,java.awt.HeadlessException,java.awt.Polygon,java.awt.geom,java.awt.Container,java.awt.IllegalComponentStateException,java.awt.PopupMenu,java.awt.im,java.awt.ContainerOrderFocusTraversalPolicy,java.awt.Image,java.awt.PrintGraphics,java.awt.image,java.awt.Cursor,java.awt.ImageCapabilities,java.awt.PrintJob,java.awt.peer,java.awt.DefaultFocusTraversalPolicy,java.awt.Insets,java.awt.RadialGradientPaint,java.awt.print,java.awt.DefaultKeyboardFocusManager,java.awt.ItemSelectable,java.awt.Rectangle,java.awt.Desktop,java.awt.JobAttributes,java.awt.RenderingHints EOF - -// Exiting paste mode, now interpreting. - +// Exiting paste mode... now interpreting. import java.awt.AWTError import java.awt.Dialog import java.awt.KeyEventDispatcher diff --git a/test/files/run/t10956.scala b/test/files/run/t10956.scala index 94d95de6546c..88377dde4ee6 100644 --- a/test/files/run/t10956.scala +++ b/test/files/run/t10956.scala @@ -10,7 +10,6 @@ import scala.tools.nsc.Settings object Test extends ReplTest { def code = """ -$intp.isettings.maxPrintString = 0 :paste < EOF import java.awt.AWTError,java.awt.Dialog,java.awt.KeyEventDispatcher,java.awt.Robot,java.awt.AWTEvent,java.awt.Dimension,java.awt.KeyEventPostProcessor,java.awt.ScrollPane,java.awt.AWTEventMulticaster,java.awt.DisplayMode,java.awt.KeyboardFocusManager,java.awt.ScrollPaneAdjustable,java.awt.AWTException,java.awt.Event,java.awt.Label,java.awt.Scrollbar,java.awt.AWTKeyStroke,java.awt.EventQueue,java.awt.LayoutManager,java.awt.SecondaryLoop,java.awt.AWTPermission,java.awt.FileDialog,java.awt.LayoutManager2,java.awt.Shape,java.awt.ActiveEvent,java.awt.FlowLayout,java.awt.LinearGradientPaint,java.awt.SplashScreen,java.awt.Adjustable,java.awt.FocusTraversalPolicy,java.awt.List,java.awt.Stroke,java.awt.AlphaComposite,java.awt.Font,java.awt.MediaTracker,java.awt.SystemColor,java.awt.BasicStroke,java.awt.FontFormatException,java.awt.Menu,java.awt.SystemTray,java.awt.BorderLayout,java.awt.FontMetrics,java.awt.MenuBar,java.awt.TextArea,java.awt.BufferCapabilities,java.awt.Frame,java.awt.MenuComponent,java.awt.TextComponent,java.awt.Button,java.awt.GradientPaint,java.awt.MenuContainer,java.awt.TextField,java.awt.Canvas,java.awt.Graphics,java.awt.MenuItem,java.awt.TexturePaint,java.awt.CardLayout,java.awt.Graphics2D,java.awt.MenuShortcut,java.awt.Toolkit,java.awt.Checkbox,java.awt.GraphicsConfigTemplate,java.awt.MouseInfo,java.awt.Transparency,java.awt.CheckboxGroup,java.awt.GraphicsConfiguration,java.awt.MultipleGradientPaint,java.awt.TrayIcon,java.awt.CheckboxMenuItem,java.awt.GraphicsDevice,java.awt.PageAttributes,java.awt.Window,java.awt.Choice,java.awt.GraphicsEnvironment,java.awt.Paint,java.awt.color,java.awt.Color,java.awt.GridBagConstraints,java.awt.PaintContext,java.awt.datatransfer,java.awt.Component,java.awt.GridBagLayout,java.awt.Panel,java.awt.dnd,java.awt.ComponentOrientation,java.awt.GridBagLayoutInfo,java.awt.Point,java.awt.event,java.awt.Composite,java.awt.GridLayout,java.awt.PointerInfo,java.awt.font,java.awt.CompositeContext,java.awt.HeadlessException,java.awt.Polygon,java.awt.geom,java.awt.Container,java.awt.IllegalComponentStateException,java.awt.PopupMenu,java.awt.im,java.awt.ContainerOrderFocusTraversalPolicy,java.awt.Image,java.awt.PrintGraphics,java.awt.image,java.awt.Cursor,java.awt.ImageCapabilities,java.awt.PrintJob,java.awt.peer,java.awt.DefaultFocusTraversalPolicy,java.awt.Insets,java.awt.RadialGradientPaint,java.awt.print,java.awt.DefaultKeyboardFocusManager,java.awt.ItemSelectable,java.awt.Rectangle,java.awt.Desktop,java.awt.JobAttributes,java.awt.RenderingHints EOF diff --git a/test/files/run/t11042.check b/test/files/run/t11042.check new file mode 100644 index 000000000000..e157b03a85dd --- /dev/null +++ b/test/files/run/t11042.check @@ -0,0 +1,6 @@ +t11042.scala:7: warning: Passing an explicit array value to a Scala varargs method is deprecated (since 2.13.0) and will result in a defensive copy; Use the more efficient non-copying ArraySeq.unsafeWrapArray or an explicit toIndexedSeq call + println(g(Array(1, 2, 3): _*)) + ^ +false +true +true diff --git a/test/files/run/t11042.scala b/test/files/run/t11042.scala new file mode 100644 index 000000000000..cdb7fca04e7f --- /dev/null +++ b/test/files/run/t11042.scala @@ -0,0 +1,9 @@ +//> using options -Xlint:deprecation +object Test extends App { + def f(xs: Array[Int]): Boolean = xs.isInstanceOf[scala.collection.immutable.Seq[_]] + def g(xs: Int*): Boolean = xs.isInstanceOf[scala.collection.immutable.Seq[_]] + + println(f(Array(1, 2, 3))) + println(g(Array(1, 2, 3): _*)) + println(g(1, 2, 3)) +} diff --git a/test/files/run/t11064.check b/test/files/run/t11064.check new file mode 100644 index 000000000000..f71d3fd21323 --- /dev/null +++ b/test/files/run/t11064.check @@ -0,0 +1,16 @@ + +scala> // should say "unbound wildcard type" not "not found: type _$1" + +scala> type T = _ + ^ + error: unbound wildcard type + +scala> class C extends _ + ^ + error: unbound wildcard type + +scala> trait F[x <: _] + ^ + error: unbound wildcard type + +scala> :quit diff --git a/test/files/run/t11064.scala b/test/files/run/t11064.scala new file mode 100644 index 000000000000..aa7b70f937a6 --- /dev/null +++ b/test/files/run/t11064.scala @@ -0,0 +1,9 @@ +object Test extends tools.partest.ReplTest { + override def code = + """ + | // should say "unbound wildcard type" not "not found: type _$1" + | type T = _ + | class C extends _ + | trait F[x <: _] + """.stripMargin +} \ No newline at end of file diff --git a/test/files/run/t11109/JaVarArgs.java b/test/files/run/t11109/JaVarArgs.java deleted file mode 100644 index cecccf97551f..000000000000 --- a/test/files/run/t11109/JaVarArgs.java +++ /dev/null @@ -1,9 +0,0 @@ -// filter: Note: -package t11109; - -import java.io.*; - -public class JaVarArgs { - public void serialize(T... ts) {} - public void universalize(T... ts) {} -} \ No newline at end of file diff --git a/test/files/run/t11109/Test.scala b/test/files/run/t11109/Test.scala deleted file mode 100644 index be0ad9acdd35..000000000000 --- a/test/files/run/t11109/Test.scala +++ /dev/null @@ -1,7 +0,0 @@ -import t11109._ - -object Test extends App { - val jva = new JaVarArgs - jva.serialize("asdf") - jva.universalize(Universal) -} \ No newline at end of file diff --git a/test/files/run/t11109/Universal.scala b/test/files/run/t11109/Universal.scala deleted file mode 100644 index e551fab8d0d0..000000000000 --- a/test/files/run/t11109/Universal.scala +++ /dev/null @@ -1,4 +0,0 @@ -package t11109 - -trait Universal extends Any -object Universal extends Universal \ No newline at end of file diff --git a/test/files/run/t11255/A_1.scala b/test/files/run/t11255/A_1.scala index 3e33c3971aa8..2c7eef1fa0d2 100644 --- a/test/files/run/t11255/A_1.scala +++ b/test/files/run/t11255/A_1.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** class K(val f: Int => Int) extends Serializable class A { @inline final def f = new K(x => x + 1) diff --git a/test/files/run/t11255/Test_2.scala b/test/files/run/t11255/Test_2.scala index ec5dcaa60361..eec0b877a672 100644 --- a/test/files/run/t11255/Test_2.scala +++ b/test/files/run/t11255/Test_2.scala @@ -1,4 +1,4 @@ -// scalac: -opt:l:inline -opt-inline-from:** +//> using options -opt:inline:** object Test { def serializeDeserialize(obj: Object): Object = { import java.io._ diff --git a/test/files/run/t11272.javaopts b/test/files/run/t11272.javaopts deleted file mode 100644 index 88ac6a3f37f4..000000000000 --- a/test/files/run/t11272.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx196m diff --git a/test/files/run/t11272.scala b/test/files/run/t11272.scala deleted file mode 100644 index 24a14daef4ca..000000000000 --- a/test/files/run/t11272.scala +++ /dev/null @@ -1,12 +0,0 @@ - -object Test { - def main(args: Array[String]): Unit = { - test() - } - def test() = { - val iter = Iterator(128*1024*1024, 128*1024*1024).flatMap(new Array[Byte](_)) - while (iter.hasNext) { - iter.next() - } - } -} diff --git a/test/files/run/t11385.scala b/test/files/run/t11385.scala new file mode 100644 index 000000000000..dd2221bdaabd --- /dev/null +++ b/test/files/run/t11385.scala @@ -0,0 +1,31 @@ + +import scala.tools.partest.DirectTest + +import java.nio.file.Files.{createDirectories, createTempDirectory} +import scala.tools.testkit.ReleasablePath._ +import scala.util.Using + +// an unfortunately-named resource dir on the classpath +// +object Test extends DirectTest { + + def code = "package acme { class C }" + + def show() = assert { + Using.resource(createTempDirectory("t11385")) { tmp => + val pkg = createDirectories(tmp.resolve("acme").resolve("C").resolve("sub")) + compile("-classpath", tmp.toString) + } + } +} + +/* Was: +error: Error while emitting newSource1.scala +assertion failed: + Java member module without member class: package sub - List(package sub) + while compiling: newSource1.scala + during phase: jvm + library version: version 2.13.2-20191220-000408-7bfe4c0 + compiler version: version 2.13.2-20191220-000408-7bfe4c0 + reconstructed args: -usejavacp -classpath /var/folders/2_/xb149z895wb5f1y632xp2bw40000gq/T/t113851788670762590621498 -d t11385-run.obj + */ diff --git a/test/files/run/t11397.scala b/test/files/run/t11397.scala new file mode 100644 index 000000000000..0732270fe4d1 --- /dev/null +++ b/test/files/run/t11397.scala @@ -0,0 +1,41 @@ + +trait MyAny { + type TSomething + type This <: MyAny +} + +class MyBool extends MyAny { + type TSomething = DummyImplicit + type This = MyBool +} + +object Test extends App { + val i = new MyBool + val j = new MyBool + var call = 0 + + final class MyMatch[MV <: MyAny](val matchVal : MV) { + def myCase[MC](pattern : Boolean)(block : => Unit)( + implicit patternBld : matchVal.TSomething + ) : MyMatch[MV] = { + call -= 1 + block + this + } + } + + def myMatch[MV <: MyAny](matchValue : MV) : MyMatch[matchValue.This] = { + assert(call == 0) + call += 1 + new MyMatch[matchValue.This](matchValue.asInstanceOf[matchValue.This]) + } + + myMatch(i) + .myCase(true) { + //val x = 42 + myMatch(j) + .myCase(true) { + + } + } +} diff --git a/test/files/run/t11402.check b/test/files/run/t11402.check index ba381609869f..7a11246f48cc 100644 --- a/test/files/run/t11402.check +++ b/test/files/run/t11402.check @@ -1,14 +1,12 @@ -scala> import scala.concurrent.duration._; val t = 1 second -:11: warning: postfix operator second should be enabled -by making the implicit value scala.language.postfixOps visible. -This can be achieved by adding the import clause 'import scala.language.postfixOps' -or by setting the compiler option -language:postfixOps. -See the Scaladoc for value scala.language.postfixOps for a discussion -why the feature should be explicitly enabled. - import scala.concurrent.duration._; val t = 1 second - ^ -import scala.concurrent.duration._ -t: scala.concurrent.duration.FiniteDuration = 1 second +scala> def f = { + val x = 'abc + val y = x.toString + y +} + val x = 'abc + ^ +On line 2: warning: symbol literal is deprecated; use Symbol("abc") instead [quickfixable] +def f: String scala> :quit diff --git a/test/files/run/t11402.scala b/test/files/run/t11402.scala index 077079dc0a4e..43fd16094a98 100644 --- a/test/files/run/t11402.scala +++ b/test/files/run/t11402.scala @@ -1,12 +1,14 @@ + import scala.tools.nsc.Settings import scala.tools.partest.ReplTest +import scala.util.chaining._ object Test extends ReplTest { - override def transformSettings(ss: Settings) = { - ss.feature.value = true - ss - } - - // From zinc's InteractiveConsoleInterfaceSpecification "should evaluate postfix op with a warning" - override def code = "import scala.concurrent.duration._; val t = 1 second" + override def transformSettings(ss: Settings) = ss.tap(_.deprecation.value = true) + override def code = + """|def f = { + | val x = 'abc + | val y = x.toString + | y + |}""".stripMargin } diff --git a/test/files/run/t11465.check b/test/files/run/t11465.check new file mode 100644 index 000000000000..03fdfdd2eb9d --- /dev/null +++ b/test/files/run/t11465.check @@ -0,0 +1,6 @@ +F +Z +55 +F +Z +55 diff --git a/test/files/run/t11465.scala b/test/files/run/t11465.scala new file mode 100644 index 000000000000..7acdb24ab477 --- /dev/null +++ b/test/files/run/t11465.scala @@ -0,0 +1,5 @@ +object Test extends App { + def f = { println("F") ; 1 to 10 } + println { f.foldLeft{ println("Z"); 0 }(_ + _) } + println { (f.foldLeft{ println("Z"); 0 } _) (_ + _) } +} \ No newline at end of file diff --git a/test/files/run/t11476.check b/test/files/run/t11476.check new file mode 100644 index 000000000000..82929b4aa96a --- /dev/null +++ b/test/files/run/t11476.check @@ -0,0 +1,4 @@ +t11476.scala:5: warning: match may not be exhaustive. +It would fail on the following input: Foo(_) + val x = Foo(1) match { case Foo(a) => a } + ^ diff --git a/test/files/run/t11476.scala b/test/files/run/t11476.scala new file mode 100644 index 000000000000..aa15286c34a1 --- /dev/null +++ b/test/files/run/t11476.scala @@ -0,0 +1,7 @@ +case class Foo(a: Int*) { def lengthCompare(i: Int) = 1 } + +object Test { + def main(args: Array[String]): Unit = { + val x = Foo(1) match { case Foo(a) => a } + } +} diff --git a/test/files/run/t11476b.check b/test/files/run/t11476b.check new file mode 100644 index 000000000000..f29363bd0d35 --- /dev/null +++ b/test/files/run/t11476b.check @@ -0,0 +1,4 @@ +t11476b.scala:11: warning: match may not be exhaustive. +It would fail on the following input: Foo(_) + val x = Foo(1) match { case FooExtractor(a) => a } + ^ diff --git a/test/files/run/t11476b.scala b/test/files/run/t11476b.scala new file mode 100644 index 000000000000..46f99eba31ae --- /dev/null +++ b/test/files/run/t11476b.scala @@ -0,0 +1,13 @@ +case class Foo(a: Int*) { def lengthCompare(i: Int) = 1 } +object FooExtractor { + def unapplySeq(x$0: Foo): Option[Seq[Int]] = if (x$0.==(null)) + None + else + Some[Seq[Int]](x$0.a); +} + +object Test { + def main(args: Array[String]): Unit = { + val x = Foo(1) match { case FooExtractor(a) => a } + } +} diff --git a/test/files/run/t11534b.scala b/test/files/run/t11534b.scala new file mode 100644 index 000000000000..75e835bed9a3 --- /dev/null +++ b/test/files/run/t11534b.scala @@ -0,0 +1,24 @@ +object Test { + case class O(i: Int) { + class A + class B extends A { + def bOuter = O.this + } + trait C { + def cOuter = O.this + } + class D extends o2.B with C + } + val o1 = new O(1); + val o2 = new O(2); + def pat1(a: Test.o1.C) = a match { + case b: Test.o1.B => + assert(b.bOuter eq Test.o1, + s"expected ${o1} as outer of value conforming to pattern `b: Test.o1.B`, but got ${b.bOuter}") + case _ => + + } + def main(args: Array[String]): Unit = { + pat1(new o1.D) + } +} diff --git a/test/files/run/t11534c.scala b/test/files/run/t11534c.scala new file mode 100644 index 000000000000..1563a6dfac44 --- /dev/null +++ b/test/files/run/t11534c.scala @@ -0,0 +1,135 @@ +//> using options -unchecked +import scala.util.Try + +object Test { + class O(val i: Int) { + class A { + val aOuter = i + } + + class B1 extends A { + val b1Outer = i + } + } + class M(i: Int) extends O(i) { + class B2 extends m2.A { + val b2Outer = i + } + + def pat1(a: M.this.A) = a match { + case b: M.this.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: M.this.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: M.this.B1) = a match { + case b: M.this.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: M.this.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + } + + val m1 = new M(1); + val m2 = new M(2); + + def pat1(a: m1.A) = a match { + case b: m1.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: m1.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: m1.B1) = a match { + case b: m1.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: m1.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + + def pat5(a: M#B2) = a match { + case b: m2.A => // can elide outer check, (a : A#B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + + trait ScalaProvider { def loader: Int } + type ScalaProvider2 = { def loaderLibraryOnly: Int } + import scala.language.reflectiveCalls + + def cb1400(provider: ScalaProvider) = try { + provider match { + case p: ScalaProvider2 @unchecked => p.loaderLibraryOnly + } + } catch { + case _: NoSuchMethodException => provider.loader + } + + def assertOuter(expected: Int, actual: Int): Unit = { + if (expected != actual) throw WrongOuter(expected, actual) + } + case class WrongOuter(expected: Int, actual: Int) extends RuntimeException(s"expected: $expected, actual: $actual") + + def main(args: Array[String]): Unit = { + assert(pat1(new m1.B1)) + assert(m1.pat1(new m1.B1)) + assert(Try(pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(!pat2(new m2.B2)) + assert(!m1.pat2(new m2.B2)) + assert(pat2(new m1.B2)) + assert(m1.pat2(new m1.B2)) + + assert(pat3(new m1.B1)) + assert(m1.pat3(new m1.B1)) + assert(Try(pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(pat4(new m1.B2)) + assert(m1.pat4(new m1.B2)) + assert(pat4((new m2.B2).asInstanceOf[m1.B2])) + assert(m1.pat4((new m2.B2).asInstanceOf[m1.B2])) + + assert(pat5(new m1.B2)) + assert(pat5(new m2.B2)) + + class SP1 extends ScalaProvider { def loader = 1 } + class SP2 extends ScalaProvider { def loader = 1; def loaderLibraryOnly = 2 } + assert(cb1400(new SP1()) == 1) + assert(cb1400(new SP2()) == 2) + } +} diff --git a/test/files/run/t11564.check b/test/files/run/t11564.check index 776d1f332e0f..777665adfb6a 100644 --- a/test/files/run/t11564.check +++ b/test/files/run/t11564.check @@ -1,13 +1,13 @@ -scala> :power -Power mode enabled. :phase is at typer. -import scala.tools.nsc._, intp.global._, definitions._ -Try :help or completions for vals._ and power._ +scala> val x = ""; s"$x" +val x: String = "" +val res0: String = "" -scala> class C { object o } -defined class C +scala> val s"$y" = "" +val y: String = "" -scala> typeOf[C].member(newTermName("o")).debugFlagString -res0: String = +scala> val x = ""; s"$x" +val x: String = "" +val res1: String = "" scala> :quit diff --git a/test/files/run/t11564.scala b/test/files/run/t11564.scala index 0753248bcd56..b7684bf09201 100644 --- a/test/files/run/t11564.scala +++ b/test/files/run/t11564.scala @@ -14,8 +14,8 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { def code = """ -:power -class C { object o } -typeOf[C].member(newTermName("o")).debugFlagString +val x = ""; s"$x" +val s"$y" = "" +val x = ""; s"$x" """ } diff --git a/test/files/run/t11609.check b/test/files/run/t11609.check new file mode 100644 index 000000000000..02687084a22c --- /dev/null +++ b/test/files/run/t11609.check @@ -0,0 +1,24 @@ +new Foo 0 +new Foo 1 +new Bar 0 +new Foo 2 +new Foo 3 +new Bar 0 +new Bar 1 +sara +frank +new Foo 4 +new Bar 0 +new Bar 1 +new Foo 5 +new Foo 6 +new Bar 0 +new Foo 7 +new Foo 8 +new Bar 0 +new Bar 1 +sara +frank +new Foo 9 +new Bar 0 +new Bar 1 diff --git a/test/files/run/t11609.scala b/test/files/run/t11609.scala new file mode 100644 index 000000000000..9018dcc3ffab --- /dev/null +++ b/test/files/run/t11609.scala @@ -0,0 +1,41 @@ +class Foo { + println(s"new Foo ${Foo.c}"); Foo.c += 1 + + class Bar { + println(s"new Bar ${Bar.c}"); Bar.c += 1 + } + + object Bar { + var c = 0 + implicit def brr: Bar = new Bar + } + def m(implicit b: Bar): Foo = Foo.this +} +object Foo { + var c = 0 +} + +object Test { + (new Foo).m(null) + def a = (new Foo).m(null) + + (new Foo).m + def b = (new Foo).m + + (new Foo).m(null).m(null) + def c = (new Foo).m(null).m(null) + + (new Foo).m.m + def d = (new Foo).m.m + + { println("sara"); { println("frank"); new Foo}.m}.m + def e = { println("sara"); { println("frank"); new Foo}.m}.m + + def main(args: Array[String]): Unit = { + a + b + c + d + e + } +} diff --git a/test/files/run/t1167.scala b/test/files/run/t1167.scala index 0374e20a7e02..d1cb83be67f8 100644 --- a/test/files/run/t1167.scala +++ b/test/files/run/t1167.scala @@ -1,10 +1,10 @@ -// scalac: -Ydelambdafy:inline +//> using options -Ydelambdafy:inline /** Tests for compatible InnerClasses attribute between trait and * impl classes, as well as anonymous classes. */ trait Test1 { - def testFunc(i: Int): Unit = (i: Int) => i + 5 + def testFunc(i: Int) = (i: Int) => i + 5 } /* getName diff --git a/test/files/run/t11736.scala b/test/files/run/t11736.scala new file mode 100644 index 000000000000..ef7749e9a930 --- /dev/null +++ b/test/files/run/t11736.scala @@ -0,0 +1,50 @@ +trait Foo1 { + override def toString = "FU" + trait Bar1 { assert(Foo1.this.toString == "FU") } + class Bar2 { assert(Foo1.this.toString == "FU") } +} + +class Foo2 { + override def toString = "FU" + trait Bar1 { assert(Foo2.this.toString == "FU") } + class Bar2 { assert(Foo2.this.toString == "FU") } +} + +trait Baz1 extends Foo1 +class Baz2 extends Foo1 + +trait Baz3 extends Foo2 +class Baz4 extends Foo2 + +object Biz1 extends Baz1 { + new Bar1 {} + new Bar2 {} + new Bar2 +} + +object Biz2 extends Baz2 { + new Bar1 {} + new Bar2 {} + new Bar2 +} + +object Biz3 extends Baz3 { + new Bar1 {} + new Bar2 {} + new Bar2 +} + +object Biz4 extends Baz4 { + new Bar1 {} + new Bar2 {} + new Bar2 +} + +object Test { + def main(args: Array[String]): Unit = { + Biz1 + Biz2 + Biz3 + Biz4 + } +} diff --git a/test/files/run/t11746.scala b/test/files/run/t11746.scala new file mode 100644 index 000000000000..5d3669b26956 --- /dev/null +++ b/test/files/run/t11746.scala @@ -0,0 +1,23 @@ + +import scala.tools.partest.DirectTest +import scala.tools.nsc.reporters.{Reporter, StoreReporter} +import scala.tools.nsc.Settings +import scala.tools.nsc.util.stringFromStream + +object Test extends DirectTest { + + override def extraSettings = "-usejavacp -opt:_ -opt-inline-from:** -Vinline _" + + override def reporter(settings: Settings): Reporter = new StoreReporter(settings) + + override def code = "class C { def f = locally(42) }" + + override def show() = { + val res = stringFromStream { os => + Console.withOut(os) { + assert(compile()) + } + } + assert(res.startsWith("Inlining into C.f")) + } +} diff --git a/test/files/run/t11756.scala b/test/files/run/t11756.scala new file mode 100644 index 000000000000..79736e396856 --- /dev/null +++ b/test/files/run/t11756.scala @@ -0,0 +1,47 @@ +trait MyAny { + type TSomething + type This <: MyAny +} + +class MyBool extends MyAny { + type TSomething = DummyImplicit + type This = MyBool +} + +object Test extends App { + val i = new MyBool + var call = 0 + var callList : List[Int] = List() + + final class MyMatch[MV <: MyAny](val matchVal : MV) { + def myCase[MC](pattern : Boolean)(block : => Unit)( + implicit patternBld : matchVal.TSomething + ) : MyMatch[MV] = { + call -= 1 + block + this + } + } + + def myMatch[MV <: MyAny](matchValue : MV) : MyMatch[matchValue.This] = { + callList = callList :+ call + call += 1 + new MyMatch[matchValue.This](matchValue.asInstanceOf[matchValue.This]) + } + + myMatch(i) + .myCase(true) { + myMatch(i) + .myCase(true) { + + } + } + .myCase(true) { + myMatch(i) + .myCase(true) { + + } + } + + assert(callList == List(0, 0, -1)) +} diff --git a/test/files/run/t11802-pluginsdir/ploogin.scala b/test/files/run/t11802-pluginsdir/ploogin.scala index d48c042e7626..dbbbf59d996d 100644 --- a/test/files/run/t11802-pluginsdir/ploogin.scala +++ b/test/files/run/t11802-pluginsdir/ploogin.scala @@ -3,8 +3,6 @@ package t11802 import scala.tools.nsc.{Global, Phase} import scala.tools.nsc.plugins.{Plugin, PluginComponent} -import scala.reflect.io.Path -import scala.reflect.io.File /** A test plugin. */ abstract class Ploogin(val global: Global, val name: String = "ploogin") extends Plugin { @@ -21,9 +19,7 @@ abstract class Ploogin(val global: Global, val name: String = "ploogin") extends def newPhase(prev: Phase) = new TestPhase(prev) class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description - def apply(unit: CompilationUnit) { - if (settings.isDeveloper) inform(s"My phase name is $phaseName") - } + def apply(unit: CompilationUnit): Unit = if (settings.developer.value) inform(s"My phase name is $phaseName") } } } diff --git a/test/files/run/t11802-pluginsdir/t11802.scala b/test/files/run/t11802-pluginsdir/t11802.scala index 0fb1a5de8f34..a5b974224021 100644 --- a/test/files/run/t11802-pluginsdir/t11802.scala +++ b/test/files/run/t11802-pluginsdir/t11802.scala @@ -57,4 +57,3 @@ object Test extends DirectTest { compile("-Xpluginsdir", dir.toString, "-Xplugin-require:ploogin1_1") } } - diff --git a/test/files/run/t11815.scala b/test/files/run/t11815.scala new file mode 100644 index 000000000000..3743e1d93e86 --- /dev/null +++ b/test/files/run/t11815.scala @@ -0,0 +1,39 @@ + +// check writing a jar with manifest. + +import scala.reflect.io.{AbstractFile} +import scala.tools.nsc._, backend.jvm.ClassfileWriters, reporters.StoreReporter, io.Jar +import scala.tools.partest.DirectTest +import scala.util.chaining._ + +class JarTest(global: Global) { + val cfws = new ClassfileWriters { + val postProcessor: scala.tools.nsc.backend.jvm.PostProcessor = null + } + def test(path: String): Unit = { + //val cfw = cfws.ClassfileWriter.apply(global) + val f = AbstractFile.getFile(path) + val fw = cfws.FileWriter(global, f, Some("XMain")) + fw.writeFile("foo.class", "hello".getBytes) + fw.writeFile("bar.class", "world".getBytes) + fw.close() + + new Jar(path).mainClass match { + case Some(null) | None => println("Missing main-class") + case Some("XMain") => + case Some(_) => println("Wrong main-class") + } + } +} + +object Test extends DirectTest { + def code = "" + def show() = { + val jarpath = (testOutput / "xmain.jar").path + val settings = new Settings().tap(_.processArgumentString(s"-Xmain-class XMain -d $jarpath")) + val storer = new StoreReporter(settings) + val global = Global(settings, storer) + new JarTest(global).test(jarpath) + assert(storer.infos.isEmpty) + } +} diff --git a/test/files/run/t11838.check b/test/files/run/t11838.check new file mode 100644 index 000000000000..89c47f6aec7d --- /dev/null +++ b/test/files/run/t11838.check @@ -0,0 +1,21 @@ + +scala> :load t11838.script +val args: Array[String] = Array() +Loading t11838.script... + +// Detected repl transcript. Paste more, or ctrl-D to finish. + +// Replaying 1 commands from transcript. + +scala> val m = status match { + case 42 => "ok" + case 27 => { + println("oops") + "oops" + } + + } + ^ +t11838.script:6: error: '}' expected but eof found. + +scala> :quit diff --git a/test/files/run/t11838.scala b/test/files/run/t11838.scala new file mode 100644 index 000000000000..b9408b5190c8 --- /dev/null +++ b/test/files/run/t11838.scala @@ -0,0 +1,11 @@ + +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def code = s":load $script" + def script = testPath.changeExtension("script") +} + +// was: